From 585436d1ba555088a194de89662330ea5e4c63b4 Mon Sep 17 00:00:00 2001
From: terencecho <3916587+terencecho@users.noreply.github.com>
Date: Mon, 20 Nov 2023 20:43:09 -0800
Subject: [PATCH] Set more configs to be sensitive and update to latest spec
(#55)
* Set more configs to be sensitive e.g. keys, passwords and update to latest spec
* run go generate
---
.gitattributes | 0
README.md | 10 +-
USAGE.md | 6 -
airbyte.yaml | 27207 +++++++++++-----
docs/data-sources/connection.md | 14 +-
docs/data-sources/destination_aws_datalake.md | 127 +-
.../destination_azure_blob_storage.md | 63 +-
docs/data-sources/destination_bigquery.md | 110 +-
.../destination_bigquery_denormalized.md | 137 -
docs/data-sources/destination_clickhouse.md | 99 +-
docs/data-sources/destination_convex.md | 13 +-
docs/data-sources/destination_cumulio.md | 14 +-
docs/data-sources/destination_databend.md | 17 +-
docs/data-sources/destination_databricks.md | 102 +-
docs/data-sources/destination_dev_null.md | 35 +-
docs/data-sources/destination_duckdb.md | 36 +
docs/data-sources/destination_dynamodb.md | 17 +-
.../data-sources/destination_elasticsearch.md | 64 +-
docs/data-sources/destination_firebolt.md | 67 +-
docs/data-sources/destination_firestore.md | 13 +-
docs/data-sources/destination_gcs.md | 377 +-
.../data-sources/destination_google_sheets.md | 22 +-
docs/data-sources/destination_keen.md | 14 +-
docs/data-sources/destination_kinesis.md | 17 +-
docs/data-sources/destination_langchain.md | 141 +-
docs/data-sources/destination_milvus.md | 191 +-
docs/data-sources/destination_mongodb.md | 214 +-
docs/data-sources/destination_mssql.md | 146 +-
docs/data-sources/destination_mysql.md | 99 +-
docs/data-sources/destination_oracle.md | 100 +-
docs/data-sources/destination_pinecone.md | 99 +-
docs/data-sources/destination_postgres.md | 235 +-
docs/data-sources/destination_pubsub.md | 19 +-
docs/data-sources/destination_qdrant.md | 36 +
docs/data-sources/destination_redis.md | 153 +-
docs/data-sources/destination_redshift.md | 216 +-
docs/data-sources/destination_s3.md | 356 +-
docs/data-sources/destination_s3_glue.md | 101 +-
docs/data-sources/destination_sftp_json.md | 16 +-
docs/data-sources/destination_snowflake.md | 93 +-
docs/data-sources/destination_timeplus.md | 13 +-
docs/data-sources/destination_typesense.md | 16 +-
docs/data-sources/destination_vertica.md | 100 +-
docs/data-sources/destination_weaviate.md | 36 +
docs/data-sources/destination_xata.md | 13 +-
docs/data-sources/source_aha.md | 18 +-
docs/data-sources/source_aircall.md | 19 +-
docs/data-sources/source_airtable.md | 70 +-
docs/data-sources/source_alloydb.md | 372 +-
docs/data-sources/source_amazon_ads.md | 28 +-
.../source_amazon_seller_partner.md | 33 +-
docs/data-sources/source_amazon_sqs.md | 26 +-
docs/data-sources/source_amplitude.md | 22 +-
docs/data-sources/source_apify_dataset.md | 19 +-
docs/data-sources/source_appfollow.md | 17 +-
docs/data-sources/source_asana.md | 70 +-
docs/data-sources/source_auth0.md | 68 +-
docs/data-sources/source_aws_cloudtrail.md | 20 +-
.../data-sources/source_azure_blob_storage.md | 46 +-
docs/data-sources/source_azure_table.md | 19 +-
docs/data-sources/source_bamboo_hr.md | 20 +-
docs/data-sources/source_bigcommerce.md | 49 -
docs/data-sources/source_bigquery.md | 19 +-
docs/data-sources/source_bing_ads.md | 24 +-
docs/data-sources/source_braintree.md | 22 +-
docs/data-sources/source_braze.md | 19 +-
docs/data-sources/source_cart.md | 36 +
docs/data-sources/source_chargebee.md | 21 +-
docs/data-sources/source_chartmogul.md | 20 +-
docs/data-sources/source_clickhouse.md | 103 +-
docs/data-sources/source_clickup_api.md | 22 +-
docs/data-sources/source_clockify.md | 19 +-
docs/data-sources/source_close_com.md | 18 +-
docs/data-sources/source_coda.md | 17 +-
docs/data-sources/source_coin_api.md | 29 +-
docs/data-sources/source_coinmarketcap.md | 20 +-
docs/data-sources/source_configcat.md | 18 +-
docs/data-sources/source_confluence.md | 19 +-
docs/data-sources/source_convex.md | 18 +-
docs/data-sources/source_datascope.md | 18 +-
docs/data-sources/source_delighted.md | 18 +-
docs/data-sources/source_dixa.md | 19 +-
docs/data-sources/source_dockerhub.md | 17 +-
docs/data-sources/source_dremio.md | 18 +-
docs/data-sources/source_dynamodb.md | 22 +-
docs/data-sources/source_e2e_test_cloud.md | 100 -
docs/data-sources/source_emailoctopus.md | 17 +-
docs/data-sources/source_exchange_rates.md | 20 +-
.../data-sources/source_facebook_marketing.md | 47 +-
docs/data-sources/source_facebook_pages.md | 18 +-
docs/data-sources/source_faker.md | 21 +-
docs/data-sources/source_fauna.md | 89 +-
docs/data-sources/source_file.md | 36 +
docs/data-sources/source_file_secure.md | 221 -
docs/data-sources/source_firebolt.md | 22 +-
docs/data-sources/source_freshcaller.md | 21 +-
docs/data-sources/source_freshdesk.md | 20 +-
docs/data-sources/source_freshsales.md | 18 +-
docs/data-sources/source_gainsight_px.md | 17 +-
docs/data-sources/source_gcs.md | 19 +-
docs/data-sources/source_getlago.md | 17 +-
docs/data-sources/source_github.md | 70 +-
docs/data-sources/source_gitlab.md | 74 +-
docs/data-sources/source_glassfrog.md | 17 +-
docs/data-sources/source_gnews.md | 58 +-
docs/data-sources/source_google_ads.md | 43 +-
.../source_google_analytics_data_api.md | 72 +-
.../source_google_analytics_v4.md | 102 -
docs/data-sources/source_google_directory.md | 72 +-
docs/data-sources/source_google_drive.md | 36 +
.../source_google_pagespeed_insights.md | 20 +-
.../source_google_search_console.md | 87 +-
docs/data-sources/source_google_sheets.md | 68 +-
docs/data-sources/source_google_webfonts.md | 20 +-
.../source_google_workspace_admin_reports.md | 19 +-
docs/data-sources/source_greenhouse.md | 17 +-
docs/data-sources/source_gridly.md | 18 +-
docs/data-sources/source_harvest.md | 85 +-
docs/data-sources/source_hubplanner.md | 17 +-
docs/data-sources/source_hubspot.md | 71 +-
docs/data-sources/source_insightly.md | 18 +-
docs/data-sources/source_instagram.md | 20 +-
docs/data-sources/source_instatus.md | 17 +-
docs/data-sources/source_intercom.md | 20 +-
docs/data-sources/source_ip2whois.md | 18 +-
docs/data-sources/source_iterable.md | 18 +-
docs/data-sources/source_jira.md | 24 +-
docs/data-sources/source_k6_cloud.md | 17 +-
docs/data-sources/source_klarna.md | 21 +-
docs/data-sources/source_klaviyo.md | 18 +-
docs/data-sources/source_kustomer_singer.md | 18 +-
docs/data-sources/source_kyve.md | 21 +-
docs/data-sources/source_launchdarkly.md | 17 +-
docs/data-sources/source_lemlist.md | 17 +-
docs/data-sources/source_lever_hiring.md | 69 +-
docs/data-sources/source_linkedin_ads.md | 81 +-
docs/data-sources/source_linkedin_pages.md | 67 +-
docs/data-sources/source_linnworks.md | 20 +-
docs/data-sources/source_lokalise.md | 18 +-
docs/data-sources/source_mailchimp.md | 67 +-
docs/data-sources/source_mailgun.md | 19 +-
docs/data-sources/source_mailjet_sms.md | 19 +-
docs/data-sources/source_marketo.md | 20 +-
docs/data-sources/source_metabase.md | 25 +-
docs/data-sources/source_microsoft_teams.md | 73 +-
docs/data-sources/source_mixpanel.md | 73 +-
docs/data-sources/source_monday.md | 68 +-
docs/data-sources/source_mongodb.md | 128 -
.../source_mongodb_internal_poc.md | 21 +-
docs/data-sources/source_mongodb_v2.md | 36 +
docs/data-sources/source_mssql.md | 205 +-
docs/data-sources/source_my_hours.md | 20 +-
docs/data-sources/source_mysql.md | 248 +-
docs/data-sources/source_netsuite.md | 24 +-
docs/data-sources/source_notion.md | 67 +-
docs/data-sources/source_nytimes.md | 23 +-
docs/data-sources/source_okta.md | 68 +-
docs/data-sources/source_omnisend.md | 17 +-
docs/data-sources/source_onesignal.md | 29 +-
docs/data-sources/source_oracle.md | 205 +-
docs/data-sources/source_orb.md | 23 +-
docs/data-sources/source_orbit.md | 19 +-
docs/data-sources/source_outbrain_amplify.md | 70 +-
docs/data-sources/source_outreach.md | 21 +-
.../data-sources/source_paypal_transaction.md | 21 +-
docs/data-sources/source_paystack.md | 19 +-
docs/data-sources/source_pendo.md | 17 +-
docs/data-sources/source_persistiq.md | 17 +-
docs/data-sources/source_pexels_api.md | 22 +-
docs/data-sources/source_pinterest.md | 68 +-
docs/data-sources/source_pipedrive.md | 26 +-
docs/data-sources/source_pocket.md | 31 +-
docs/data-sources/source_pokeapi.md | 17 +-
docs/data-sources/source_polygon_stock_api.md | 25 +-
docs/data-sources/source_postgres.md | 372 +-
docs/data-sources/source_posthog.md | 20 +-
docs/data-sources/source_postmarkapp.md | 18 +-
docs/data-sources/source_prestashop.md | 19 +-
docs/data-sources/source_punk_api.md | 19 +-
docs/data-sources/source_pypi.md | 18 +-
docs/data-sources/source_qualaroo.md | 20 +-
docs/data-sources/source_quickbooks.md | 54 +-
docs/data-sources/source_railz.md | 19 +-
docs/data-sources/source_recharge.md | 18 +-
docs/data-sources/source_recreation.md | 18 +-
docs/data-sources/source_recruitee.md | 18 +-
docs/data-sources/source_recurly.md | 19 +-
docs/data-sources/source_redshift.md | 23 +-
docs/data-sources/source_retently.md | 82 +-
docs/data-sources/source_rki_covid.md | 17 +-
docs/data-sources/source_rss.md | 17 +-
docs/data-sources/source_s3.md | 350 +-
docs/data-sources/source_salesforce.md | 32 +-
docs/data-sources/source_salesloft.md | 71 +-
docs/data-sources/source_sap_fieldglass.md | 17 +-
docs/data-sources/source_secoda.md | 17 +-
docs/data-sources/source_sendgrid.md | 18 +-
docs/data-sources/source_sendinblue.md | 17 +-
docs/data-sources/source_senseforce.md | 21 +-
docs/data-sources/source_sentry.md | 21 +-
docs/data-sources/source_sftp.md | 72 +-
docs/data-sources/source_sftp_bulk.md | 29 +-
docs/data-sources/source_shopify.md | 68 +-
docs/data-sources/source_shortio.md | 19 +-
docs/data-sources/source_slack.md | 70 +-
docs/data-sources/source_smaily.md | 19 +-
docs/data-sources/source_smartengage.md | 17 +-
docs/data-sources/source_smartsheets.md | 73 +-
.../data-sources/source_snapchat_marketing.md | 21 +-
docs/data-sources/source_snowflake.md | 76 +-
docs/data-sources/source_sonar_cloud.md | 21 +-
docs/data-sources/source_spacex_api.md | 18 +-
docs/data-sources/source_square.md | 69 +-
docs/data-sources/source_strava.md | 22 +-
docs/data-sources/source_stripe.md | 21 +-
docs/data-sources/source_survey_sparrow.md | 60 +-
docs/data-sources/source_surveymonkey.md | 31 +-
docs/data-sources/source_tempo.md | 17 +-
docs/data-sources/source_the_guardian_api.md | 22 +-
docs/data-sources/source_tiktok_marketing.md | 74 +-
docs/data-sources/source_todoist.md | 17 +-
docs/data-sources/source_trello.md | 20 +-
docs/data-sources/source_trustpilot.md | 72 +-
docs/data-sources/source_tvmaze_schedule.md | 22 +-
docs/data-sources/source_twilio.md | 20 +-
docs/data-sources/source_twilio_taskrouter.md | 18 +-
docs/data-sources/source_twitter.md | 20 +-
docs/data-sources/source_typeform.md | 72 +-
docs/data-sources/source_us_census.md | 19 +-
docs/data-sources/source_vantage.md | 17 +-
docs/data-sources/source_webflow.md | 18 +-
docs/data-sources/source_whisky_hunter.md | 16 +-
.../source_wikipedia_pageviews.md | 23 +-
docs/data-sources/source_woocommerce.md | 20 +-
docs/data-sources/source_xero.md | 60 -
docs/data-sources/source_xkcd.md | 16 +-
docs/data-sources/source_yandex_metrica.md | 20 +-
docs/data-sources/source_yotpo.md | 20 +-
docs/data-sources/source_younium.md | 50 -
docs/data-sources/source_youtube_analytics.md | 30 +-
docs/data-sources/source_zendesk_chat.md | 70 +-
docs/data-sources/source_zendesk_sell.md | 36 +
docs/data-sources/source_zendesk_sunshine.md | 70 +-
docs/data-sources/source_zendesk_support.md | 87 +-
docs/data-sources/source_zendesk_talk.md | 86 +-
docs/data-sources/source_zenloop.md | 20 +-
docs/data-sources/source_zoho_crm.md | 26 +-
docs/data-sources/source_zoom.md | 17 +-
docs/data-sources/source_zuora.md | 24 +-
docs/data-sources/workspace.md | 4 +-
docs/index.md | 2 +-
docs/resources/connection.md | 25 +-
docs/resources/destination_aws_datalake.md | 139 +-
.../destination_azure_blob_storage.md | 70 +-
docs/resources/destination_bigquery.md | 120 +-
.../destination_bigquery_denormalized.md | 172 -
docs/resources/destination_clickhouse.md | 109 +-
docs/resources/destination_convex.md | 19 +-
docs/resources/destination_cumulio.md | 29 +-
docs/resources/destination_databend.md | 33 +-
docs/resources/destination_databricks.md | 113 +-
docs/resources/destination_dev_null.md | 36 +-
docs/resources/destination_duckdb.md | 58 +
docs/resources/destination_dynamodb.md | 26 +-
docs/resources/destination_elasticsearch.md | 63 +-
docs/resources/destination_firebolt.md | 69 +-
docs/resources/destination_firestore.md | 13 +-
docs/resources/destination_gcs.md | 400 +-
docs/resources/destination_google_sheets.md | 17 +-
docs/resources/destination_keen.md | 24 +-
docs/resources/destination_kinesis.md | 34 +-
docs/resources/destination_langchain.md | 145 +-
docs/resources/destination_milvus.md | 233 +-
docs/resources/destination_mongodb.md | 204 +-
docs/resources/destination_mssql.md | 160 +-
docs/resources/destination_mysql.md | 109 +-
docs/resources/destination_oracle.md | 114 +-
docs/resources/destination_pinecone.md | 155 +-
docs/resources/destination_postgres.md | 244 +-
docs/resources/destination_pubsub.md | 30 +-
docs/resources/destination_qdrant.md | 283 +
docs/resources/destination_redis.md | 166 +-
docs/resources/destination_redshift.md | 249 +-
docs/resources/destination_s3.md | 381 +-
docs/resources/destination_s3_glue.md | 115 +-
docs/resources/destination_sftp_json.md | 20 +-
docs/resources/destination_snowflake.md | 114 +-
docs/resources/destination_timeplus.md | 25 +-
docs/resources/destination_typesense.md | 25 +-
docs/resources/destination_vertica.md | 111 +-
docs/resources/destination_weaviate.md | 289 +
docs/resources/destination_xata.md | 19 +-
docs/resources/source_aha.md | 18 +-
docs/resources/source_aircall.md | 20 +-
docs/resources/source_airtable.md | 72 +-
docs/resources/source_alloydb.md | 334 +-
docs/resources/source_amazon_ads.md | 33 +-
.../resources/source_amazon_seller_partner.md | 46 +-
docs/resources/source_amazon_sqs.md | 23 +-
docs/resources/source_amplitude.md | 23 +-
docs/resources/source_apify_dataset.md | 25 +-
docs/resources/source_appfollow.md | 17 +-
docs/resources/source_asana.md | 77 +-
docs/resources/source_auth0.md | 58 +-
docs/resources/source_aws_cloudtrail.md | 22 +-
docs/resources/source_azure_blob_storage.md | 172 +-
docs/resources/source_azure_table.md | 19 +-
docs/resources/source_bamboo_hr.md | 20 +-
docs/resources/source_bigcommerce.md | 57 -
docs/resources/source_bigquery.md | 12 +-
docs/resources/source_bing_ads.md | 63 +-
docs/resources/source_braintree.md | 18 +-
docs/resources/source_braze.md | 20 +-
docs/resources/source_cart.md | 90 +
docs/resources/source_chargebee.md | 16 +-
docs/resources/source_chartmogul.md | 21 +-
docs/resources/source_clickhouse.md | 106 +-
docs/resources/source_clickup_api.md | 17 +-
docs/resources/source_clockify.md | 17 +-
docs/resources/source_close_com.md | 21 +-
docs/resources/source_coda.md | 16 +-
docs/resources/source_coin_api.md | 23 +-
docs/resources/source_coinmarketcap.md | 18 +-
docs/resources/source_configcat.md | 18 +-
docs/resources/source_confluence.md | 14 +-
docs/resources/source_convex.md | 14 +-
docs/resources/source_datascope.md | 18 +-
docs/resources/source_delighted.md | 18 +-
docs/resources/source_dixa.md | 23 +-
docs/resources/source_dockerhub.md | 12 +-
docs/resources/source_dremio.md | 24 +-
docs/resources/source_dynamodb.md | 23 +-
docs/resources/source_e2e_test_cloud.md | 124 -
docs/resources/source_emailoctopus.md | 16 +-
docs/resources/source_exchange_rates.md | 21 +-
docs/resources/source_facebook_marketing.md | 63 +-
docs/resources/source_facebook_pages.md | 14 +-
docs/resources/source_faker.md | 38 +-
docs/resources/source_fauna.md | 92 +-
docs/resources/source_file.md | 164 +
docs/resources/source_file_secure.md | 283 -
docs/resources/source_firebolt.md | 26 +-
docs/resources/source_freshcaller.md | 20 +-
docs/resources/source_freshdesk.md | 16 +-
docs/resources/source_freshsales.md | 14 +-
docs/resources/source_gainsight_px.md | 16 +-
docs/resources/source_gcs.md | 148 +-
docs/resources/source_getlago.md | 22 +-
docs/resources/source_github.md | 85 +-
docs/resources/source_gitlab.md | 100 +-
docs/resources/source_glassfrog.md | 16 +-
docs/resources/source_gnews.md | 28 +-
docs/resources/source_google_ads.md | 21 +-
.../source_google_analytics_data_api.md | 1285 +-
docs/resources/source_google_analytics_v4.md | 135 -
docs/resources/source_google_directory.md | 77 +-
docs/resources/source_google_drive.md | 226 +
.../source_google_pagespeed_insights.md | 16 +-
.../resources/source_google_search_console.md | 73 +-
docs/resources/source_google_sheets.md | 56 +-
docs/resources/source_google_webfonts.md | 14 +-
.../source_google_workspace_admin_reports.md | 16 +-
docs/resources/source_greenhouse.md | 16 +-
docs/resources/source_gridly.md | 18 +-
docs/resources/source_harvest.md | 70 +-
docs/resources/source_hubplanner.md | 16 +-
docs/resources/source_hubspot.md | 67 +-
docs/resources/source_insightly.md | 18 +-
docs/resources/source_instagram.md | 16 +-
docs/resources/source_instatus.md | 16 +-
docs/resources/source_intercom.md | 14 +-
docs/resources/source_ip2whois.md | 18 +-
docs/resources/source_iterable.md | 18 +-
docs/resources/source_jira.md | 36 +-
docs/resources/source_k6_cloud.md | 16 +-
docs/resources/source_klarna.md | 28 +-
docs/resources/source_klaviyo.md | 23 +-
docs/resources/source_kustomer_singer.md | 18 +-
docs/resources/source_kyve.md | 28 +-
docs/resources/source_launchdarkly.md | 14 +-
docs/resources/source_lemlist.md | 16 +-
docs/resources/source_lever_hiring.md | 70 +-
docs/resources/source_linkedin_ads.md | 75 +-
docs/resources/source_linkedin_pages.md | 69 +-
docs/resources/source_linnworks.md | 16 +-
docs/resources/source_lokalise.md | 18 +-
docs/resources/source_mailchimp.md | 63 +-
docs/resources/source_mailgun.md | 17 +-
docs/resources/source_mailjet_sms.md | 20 +-
docs/resources/source_marketo.md | 12 +-
docs/resources/source_metabase.md | 18 +-
docs/resources/source_microsoft_teams.md | 70 +-
docs/resources/source_mixpanel.md | 90 +-
docs/resources/source_monday.md | 65 +-
docs/resources/source_mongodb.md | 152 -
docs/resources/source_mongodb_internal_poc.md | 20 +-
docs/resources/source_mongodb_v2.md | 115 +
docs/resources/source_mssql.md | 201 +-
docs/resources/source_my_hours.md | 19 +-
docs/resources/source_mysql.md | 245 +-
docs/resources/source_netsuite.md | 23 +-
docs/resources/source_notion.md | 67 +-
docs/resources/source_nytimes.md | 24 +-
docs/resources/source_okta.md | 62 +-
docs/resources/source_omnisend.md | 16 +-
docs/resources/source_onesignal.md | 16 +-
docs/resources/source_oracle.md | 205 +-
docs/resources/source_orb.md | 29 +-
docs/resources/source_orbit.md | 20 +-
docs/resources/source_outbrain_amplify.md | 58 +-
docs/resources/source_outreach.md | 14 +-
docs/resources/source_paypal_transaction.md | 22 +-
docs/resources/source_paystack.md | 19 +-
docs/resources/source_pendo.md | 16 +-
docs/resources/source_persistiq.md | 16 +-
docs/resources/source_pexels_api.md | 18 +-
docs/resources/source_pinterest.md | 98 +-
docs/resources/source_pipedrive.md | 32 +-
docs/resources/source_pocket.md | 21 +-
docs/resources/source_pokeapi.md | 17 +-
docs/resources/source_polygon_stock_api.md | 20 +-
docs/resources/source_postgres.md | 332 +-
docs/resources/source_posthog.md | 22 +-
docs/resources/source_postmarkapp.md | 12 +-
docs/resources/source_prestashop.md | 20 +-
docs/resources/source_punk_api.md | 14 +-
docs/resources/source_pypi.md | 12 +-
docs/resources/source_qualaroo.md | 20 +-
docs/resources/source_quickbooks.md | 59 +-
docs/resources/source_railz.md | 20 +-
docs/resources/source_recharge.md | 14 +-
docs/resources/source_recreation.md | 14 +-
docs/resources/source_recruitee.md | 18 +-
docs/resources/source_recurly.md | 20 +-
docs/resources/source_redshift.md | 19 +-
docs/resources/source_retently.md | 70 +-
docs/resources/source_rki_covid.md | 14 +-
docs/resources/source_rss.md | 14 +-
docs/resources/source_s3.md | 370 +-
docs/resources/source_salesforce.md | 31 +-
docs/resources/source_salesloft.md | 65 +-
docs/resources/source_sap_fieldglass.md | 16 +-
docs/resources/source_secoda.md | 16 +-
docs/resources/source_sendgrid.md | 18 +-
docs/resources/source_sendinblue.md | 16 +-
docs/resources/source_senseforce.md | 19 +-
docs/resources/source_sentry.md | 19 +-
docs/resources/source_sftp.md | 71 +-
docs/resources/source_sftp_bulk.md | 37 +-
docs/resources/source_shopify.md | 68 +-
docs/resources/source_shortio.md | 20 +-
docs/resources/source_slack.md | 67 +-
docs/resources/source_smaily.md | 14 +-
docs/resources/source_smartengage.md | 16 +-
docs/resources/source_smartsheets.md | 80 +-
docs/resources/source_snapchat_marketing.md | 17 +-
docs/resources/source_snowflake.md | 62 +-
docs/resources/source_sonar_cloud.md | 16 +-
docs/resources/source_spacex_api.md | 16 +-
docs/resources/source_square.md | 73 +-
docs/resources/source_strava.md | 19 +-
docs/resources/source_stripe.md | 30 +-
docs/resources/source_survey_sparrow.md | 56 +-
docs/resources/source_surveymonkey.md | 22 +-
docs/resources/source_tempo.md | 16 +-
docs/resources/source_the_guardian_api.md | 26 +-
docs/resources/source_tiktok_marketing.md | 81 +-
docs/resources/source_todoist.md | 16 +-
docs/resources/source_trello.md | 24 +-
docs/resources/source_trustpilot.md | 73 +-
docs/resources/source_tvmaze_schedule.md | 14 +-
docs/resources/source_twilio.md | 17 +-
docs/resources/source_twilio_taskrouter.md | 14 +-
docs/resources/source_twitter.md | 22 +-
docs/resources/source_typeform.md | 77 +-
docs/resources/source_us_census.md | 16 +-
docs/resources/source_vantage.md | 14 +-
docs/resources/source_webflow.md | 18 +-
docs/resources/source_whisky_hunter.md | 18 +-
docs/resources/source_wikipedia_pageviews.md | 26 +-
docs/resources/source_woocommerce.md | 22 +-
docs/resources/source_xero.md | 74 -
docs/resources/source_xkcd.md | 18 +-
docs/resources/source_yandex_metrica.md | 22 +-
docs/resources/source_yotpo.md | 24 +-
docs/resources/source_younium.md | 62 -
docs/resources/source_youtube_analytics.md | 21 +-
docs/resources/source_zendesk_chat.md | 71 +-
docs/resources/source_zendesk_sell.md | 53 +
docs/resources/source_zendesk_sunshine.md | 64 +-
docs/resources/source_zendesk_support.md | 74 +-
docs/resources/source_zendesk_talk.md | 73 +-
docs/resources/source_zenloop.md | 14 +-
docs/resources/source_zoho_crm.md | 26 +-
docs/resources/source_zoom.md | 16 +-
docs/resources/source_zuora.md | 25 +-
docs/resources/workspace.md | 4 +-
examples/README.md | 0
.../airbyte_connection/data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 3 -
.../data-source.tf | 0
.../airbyte_destination_convex/data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../airbyte_destination_duckdb/data-source.tf | 3 +
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../airbyte_destination_gcs/data-source.tf | 0
.../data-source.tf | 0
.../airbyte_destination_keen/data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../airbyte_destination_milvus/data-source.tf | 0
.../data-source.tf | 0
.../airbyte_destination_mssql/data-source.tf | 0
.../airbyte_destination_mysql/data-source.tf | 0
.../airbyte_destination_oracle/data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../airbyte_destination_pubsub/data-source.tf | 0
.../airbyte_destination_qdrant/data-source.tf | 3 +
.../airbyte_destination_redis/data-source.tf | 0
.../data-source.tf | 0
.../airbyte_destination_s3/data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 0
.../data-source.tf | 3 +
.../airbyte_destination_xata/data-source.tf | 0
.../airbyte_source_aha/data-source.tf | 1 -
.../airbyte_source_aircall/data-source.tf | 1 -
.../airbyte_source_airtable/data-source.tf | 1 -
.../airbyte_source_alloydb/data-source.tf | 1 -
.../airbyte_source_amazon_ads/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_amazon_sqs/data-source.tf | 1 -
.../airbyte_source_amplitude/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_appfollow/data-source.tf | 1 -
.../airbyte_source_asana/data-source.tf | 1 -
.../airbyte_source_auth0/data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_azure_table/data-source.tf | 1 -
.../airbyte_source_bamboo_hr/data-source.tf | 1 -
.../airbyte_source_bigcommerce/data-source.tf | 4 -
.../airbyte_source_bigquery/data-source.tf | 1 -
.../airbyte_source_bing_ads/data-source.tf | 1 -
.../airbyte_source_braintree/data-source.tf | 1 -
.../airbyte_source_braze/data-source.tf | 1 -
.../airbyte_source_cart/data-source.tf | 3 +
.../airbyte_source_chargebee/data-source.tf | 1 -
.../airbyte_source_chartmogul/data-source.tf | 1 -
.../airbyte_source_clickhouse/data-source.tf | 1 -
.../airbyte_source_clickup_api/data-source.tf | 1 -
.../airbyte_source_clockify/data-source.tf | 1 -
.../airbyte_source_close_com/data-source.tf | 1 -
.../airbyte_source_coda/data-source.tf | 1 -
.../airbyte_source_coin_api/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_configcat/data-source.tf | 1 -
.../airbyte_source_confluence/data-source.tf | 1 -
.../airbyte_source_convex/data-source.tf | 1 -
.../airbyte_source_datascope/data-source.tf | 1 -
.../airbyte_source_delighted/data-source.tf | 1 -
.../airbyte_source_dixa/data-source.tf | 1 -
.../airbyte_source_dockerhub/data-source.tf | 1 -
.../airbyte_source_dremio/data-source.tf | 1 -
.../airbyte_source_dynamodb/data-source.tf | 1 -
.../data-source.tf | 4 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_faker/data-source.tf | 1 -
.../airbyte_source_fauna/data-source.tf | 1 -
.../airbyte_source_file/data-source.tf | 3 +
.../airbyte_source_file_secure/data-source.tf | 4 -
.../airbyte_source_firebolt/data-source.tf | 1 -
.../airbyte_source_freshcaller/data-source.tf | 1 -
.../airbyte_source_freshdesk/data-source.tf | 1 -
.../airbyte_source_freshsales/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_gcs/data-source.tf | 1 -
.../airbyte_source_getlago/data-source.tf | 1 -
.../airbyte_source_github/data-source.tf | 1 -
.../airbyte_source_gitlab/data-source.tf | 1 -
.../airbyte_source_glassfrog/data-source.tf | 1 -
.../airbyte_source_gnews/data-source.tf | 1 -
.../airbyte_source_google_ads/data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 4 -
.../data-source.tf | 1 -
.../data-source.tf | 3 +
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_greenhouse/data-source.tf | 1 -
.../airbyte_source_gridly/data-source.tf | 1 -
.../airbyte_source_harvest/data-source.tf | 1 -
.../airbyte_source_hubplanner/data-source.tf | 1 -
.../airbyte_source_hubspot/data-source.tf | 1 -
.../airbyte_source_insightly/data-source.tf | 1 -
.../airbyte_source_instagram/data-source.tf | 1 -
.../airbyte_source_instatus/data-source.tf | 1 -
.../airbyte_source_intercom/data-source.tf | 1 -
.../airbyte_source_ip2whois/data-source.tf | 1 -
.../airbyte_source_iterable/data-source.tf | 1 -
.../airbyte_source_jira/data-source.tf | 1 -
.../airbyte_source_k6_cloud/data-source.tf | 1 -
.../airbyte_source_klarna/data-source.tf | 1 -
.../airbyte_source_klaviyo/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_kyve/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_lemlist/data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_linnworks/data-source.tf | 1 -
.../airbyte_source_lokalise/data-source.tf | 1 -
.../airbyte_source_mailchimp/data-source.tf | 1 -
.../airbyte_source_mailgun/data-source.tf | 1 -
.../airbyte_source_mailjet_sms/data-source.tf | 1 -
.../airbyte_source_marketo/data-source.tf | 1 -
.../airbyte_source_metabase/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_mixpanel/data-source.tf | 1 -
.../airbyte_source_monday/data-source.tf | 1 -
.../airbyte_source_mongodb/data-source.tf | 4 -
.../data-source.tf | 1 -
.../airbyte_source_mongodb_v2/data-source.tf | 3 +
.../airbyte_source_mssql/data-source.tf | 1 -
.../airbyte_source_my_hours/data-source.tf | 1 -
.../airbyte_source_mysql/data-source.tf | 1 -
.../airbyte_source_netsuite/data-source.tf | 1 -
.../airbyte_source_notion/data-source.tf | 1 -
.../airbyte_source_nytimes/data-source.tf | 1 -
.../airbyte_source_okta/data-source.tf | 1 -
.../airbyte_source_omnisend/data-source.tf | 1 -
.../airbyte_source_onesignal/data-source.tf | 1 -
.../airbyte_source_oracle/data-source.tf | 1 -
.../airbyte_source_orb/data-source.tf | 1 -
.../airbyte_source_orbit/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_outreach/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_paystack/data-source.tf | 1 -
.../airbyte_source_pendo/data-source.tf | 1 -
.../airbyte_source_persistiq/data-source.tf | 1 -
.../airbyte_source_pexels_api/data-source.tf | 1 -
.../airbyte_source_pinterest/data-source.tf | 1 -
.../airbyte_source_pipedrive/data-source.tf | 1 -
.../airbyte_source_pocket/data-source.tf | 1 -
.../airbyte_source_pokeapi/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_postgres/data-source.tf | 1 -
.../airbyte_source_posthog/data-source.tf | 1 -
.../airbyte_source_postmarkapp/data-source.tf | 1 -
.../airbyte_source_prestashop/data-source.tf | 1 -
.../airbyte_source_punk_api/data-source.tf | 1 -
.../airbyte_source_pypi/data-source.tf | 1 -
.../airbyte_source_qualaroo/data-source.tf | 1 -
.../airbyte_source_quickbooks/data-source.tf | 1 -
.../airbyte_source_railz/data-source.tf | 1 -
.../airbyte_source_recharge/data-source.tf | 1 -
.../airbyte_source_recreation/data-source.tf | 1 -
.../airbyte_source_recruitee/data-source.tf | 1 -
.../airbyte_source_recurly/data-source.tf | 1 -
.../airbyte_source_redshift/data-source.tf | 1 -
.../airbyte_source_retently/data-source.tf | 1 -
.../airbyte_source_rki_covid/data-source.tf | 1 -
.../airbyte_source_rss/data-source.tf | 1 -
.../airbyte_source_s3/data-source.tf | 1 -
.../airbyte_source_salesforce/data-source.tf | 1 -
.../airbyte_source_salesloft/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_secoda/data-source.tf | 1 -
.../airbyte_source_sendgrid/data-source.tf | 1 -
.../airbyte_source_sendinblue/data-source.tf | 1 -
.../airbyte_source_senseforce/data-source.tf | 1 -
.../airbyte_source_sentry/data-source.tf | 1 -
.../airbyte_source_sftp/data-source.tf | 1 -
.../airbyte_source_sftp_bulk/data-source.tf | 1 -
.../airbyte_source_shopify/data-source.tf | 1 -
.../airbyte_source_shortio/data-source.tf | 1 -
.../airbyte_source_slack/data-source.tf | 1 -
.../airbyte_source_smaily/data-source.tf | 1 -
.../airbyte_source_smartengage/data-source.tf | 1 -
.../airbyte_source_smartsheets/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_snowflake/data-source.tf | 1 -
.../airbyte_source_sonar_cloud/data-source.tf | 1 -
.../airbyte_source_spacex_api/data-source.tf | 1 -
.../airbyte_source_square/data-source.tf | 1 -
.../airbyte_source_strava/data-source.tf | 1 -
.../airbyte_source_stripe/data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_tempo/data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_todoist/data-source.tf | 1 -
.../airbyte_source_trello/data-source.tf | 1 -
.../airbyte_source_trustpilot/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_twilio/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_twitter/data-source.tf | 1 -
.../airbyte_source_typeform/data-source.tf | 1 -
.../airbyte_source_us_census/data-source.tf | 1 -
.../airbyte_source_vantage/data-source.tf | 1 -
.../airbyte_source_webflow/data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_woocommerce/data-source.tf | 1 -
.../airbyte_source_xero/data-source.tf | 4 -
.../airbyte_source_xkcd/data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_yotpo/data-source.tf | 1 -
.../airbyte_source_younium/data-source.tf | 4 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 3 +
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../data-source.tf | 1 -
.../airbyte_source_zenloop/data-source.tf | 1 -
.../airbyte_source_zoho_crm/data-source.tf | 1 -
.../airbyte_source_zoom/data-source.tf | 1 -
.../airbyte_source_zuora/data-source.tf | 1 -
.../airbyte_workspace/data-source.tf | 0
examples/provider/provider.tf | 2 +-
.../resources/airbyte_connection/resource.tf | 16 +-
.../resource.tf | 21 +-
.../resource.tf | 11 +-
.../airbyte_destination_bigquery/resource.tf | 18 +-
.../resource.tf | 28 -
.../resource.tf | 22 +-
.../airbyte_destination_convex/resource.tf | 10 +-
.../airbyte_destination_cumulio/resource.tf | 12 +-
.../airbyte_destination_databend/resource.tf | 18 +-
.../resource.tf | 10 +-
.../airbyte_destination_dev_null/resource.tf | 10 +-
.../airbyte_destination_duckdb/resource.tf | 10 +
.../airbyte_destination_dynamodb/resource.tf | 8 +-
.../resource.tf | 15 +-
.../airbyte_destination_firebolt/resource.tf | 17 +-
.../airbyte_destination_firestore/resource.tf | 6 +-
.../airbyte_destination_gcs/resource.tf | 12 +-
.../resource.tf | 8 +-
.../airbyte_destination_keen/resource.tf | 12 +-
.../airbyte_destination_kinesis/resource.tf | 18 +-
.../airbyte_destination_langchain/resource.tf | 17 +-
.../airbyte_destination_milvus/resource.tf | 33 +-
.../airbyte_destination_mongodb/resource.tf | 21 +-
.../airbyte_destination_mssql/resource.tf | 28 +-
.../airbyte_destination_mysql/resource.tf | 22 +-
.../airbyte_destination_oracle/resource.tf | 24 +-
.../airbyte_destination_pinecone/resource.tf | 30 +-
.../airbyte_destination_postgres/resource.tf | 28 +-
.../airbyte_destination_pubsub/resource.tf | 8 +-
.../airbyte_destination_qdrant/resource.tf | 49 +
.../airbyte_destination_redis/resource.tf | 26 +-
.../airbyte_destination_redshift/resource.tf | 36 +-
.../airbyte_destination_s3/resource.tf | 14 +-
.../airbyte_destination_s3_glue/resource.tf | 18 +-
.../airbyte_destination_sftp_json/resource.tf | 8 +-
.../airbyte_destination_snowflake/resource.tf | 26 +-
.../airbyte_destination_timeplus/resource.tf | 10 +-
.../airbyte_destination_typesense/resource.tf | 16 +-
.../airbyte_destination_vertica/resource.tf | 24 +-
.../airbyte_destination_weaviate/resource.tf | 52 +
.../airbyte_destination_xata/resource.tf | 10 +-
.../resources/airbyte_source_aha/resource.tf | 12 +-
.../airbyte_source_aircall/resource.tf | 14 +-
.../airbyte_source_airtable/resource.tf | 13 +-
.../airbyte_source_alloydb/resource.tf | 26 +-
.../airbyte_source_amazon_ads/resource.tf | 21 +-
.../resource.tf | 22 +-
.../airbyte_source_amazon_sqs/resource.tf | 12 +-
.../airbyte_source_amplitude/resource.tf | 10 +-
.../airbyte_source_apify_dataset/resource.tf | 13 +-
.../airbyte_source_appfollow/resource.tf | 10 +-
.../airbyte_source_asana/resource.tf | 15 +-
.../airbyte_source_auth0/resource.tf | 13 +-
.../airbyte_source_aws_cloudtrail/resource.tf | 8 +-
.../resource.tf | 43 +-
.../airbyte_source_azure_table/resource.tf | 10 +-
.../airbyte_source_bamboo_hr/resource.tf | 8 +-
.../airbyte_source_bigcommerce/resource.tf | 11 -
.../airbyte_source_bigquery/resource.tf | 8 +-
.../airbyte_source_bing_ads/resource.tf | 27 +-
.../airbyte_source_braintree/resource.tf | 10 +-
.../airbyte_source_braze/resource.tf | 14 +-
.../resources/airbyte_source_cart/resource.tf | 16 +
.../airbyte_source_chargebee/resource.tf | 10 +-
.../airbyte_source_chartmogul/resource.tf | 13 +-
.../airbyte_source_clickhouse/resource.tf | 22 +-
.../airbyte_source_clickup_api/resource.tf | 8 +-
.../airbyte_source_clockify/resource.tf | 8 +-
.../airbyte_source_close_com/resource.tf | 12 +-
.../resources/airbyte_source_coda/resource.tf | 10 +-
.../airbyte_source_coin_api/resource.tf | 10 +-
.../airbyte_source_coinmarketcap/resource.tf | 12 +-
.../airbyte_source_configcat/resource.tf | 12 +-
.../airbyte_source_confluence/resource.tf | 8 +-
.../airbyte_source_convex/resource.tf | 8 +-
.../airbyte_source_datascope/resource.tf | 12 +-
.../airbyte_source_delighted/resource.tf | 12 +-
.../resources/airbyte_source_dixa/resource.tf | 14 +-
.../airbyte_source_dockerhub/resource.tf | 8 +-
.../airbyte_source_dremio/resource.tf | 12 +-
.../airbyte_source_dynamodb/resource.tf | 10 +-
.../airbyte_source_e2e_test_cloud/resource.tf | 18 -
.../airbyte_source_emailoctopus/resource.tf | 10 +-
.../airbyte_source_exchange_rates/resource.tf | 12 +-
.../resource.tf | 27 +-
.../airbyte_source_facebook_pages/resource.tf | 8 +-
.../airbyte_source_faker/resource.tf | 16 +-
.../airbyte_source_fauna/resource.tf | 22 +-
.../resources/airbyte_source_file/resource.tf | 19 +
.../airbyte_source_file_secure/resource.tf | 20 -
.../airbyte_source_firebolt/resource.tf | 20 +-
.../airbyte_source_freshcaller/resource.tf | 12 +-
.../airbyte_source_freshdesk/resource.tf | 10 +-
.../airbyte_source_freshsales/resource.tf | 8 +-
.../airbyte_source_gainsight_px/resource.tf | 10 +-
.../resources/airbyte_source_gcs/resource.tf | 53 +-
.../airbyte_source_getlago/resource.tf | 11 +-
.../airbyte_source_github/resource.tf | 20 +-
.../airbyte_source_gitlab/resource.tf | 27 +-
.../airbyte_source_glassfrog/resource.tf | 10 +-
.../airbyte_source_gnews/resource.tf | 22 +-
.../airbyte_source_google_ads/resource.tf | 8 +-
.../resource.tf | 75 +-
.../resource.tf | 21 -
.../resource.tf | 15 +-
.../airbyte_source_google_drive/resource.tf | 35 +
.../resource.tf | 10 +-
.../resource.tf | 17 +-
.../airbyte_source_google_sheets/resource.tf | 11 +-
.../resource.tf | 8 +-
.../resource.tf | 12 +-
.../airbyte_source_greenhouse/resource.tf | 10 +-
.../airbyte_source_gridly/resource.tf | 12 +-
.../airbyte_source_harvest/resource.tf | 18 +-
.../airbyte_source_hubplanner/resource.tf | 10 +-
.../airbyte_source_hubspot/resource.tf | 19 +-
.../airbyte_source_insightly/resource.tf | 12 +-
.../airbyte_source_instagram/resource.tf | 8 +-
.../airbyte_source_instatus/resource.tf | 10 +-
.../airbyte_source_intercom/resource.tf | 8 +-
.../airbyte_source_ip2whois/resource.tf | 12 +-
.../airbyte_source_iterable/resource.tf | 12 +-
.../resources/airbyte_source_jira/resource.tf | 18 +-
.../airbyte_source_k6_cloud/resource.tf | 10 +-
.../airbyte_source_klarna/resource.tf | 16 +-
.../airbyte_source_klaviyo/resource.tf | 12 +-
.../resource.tf | 12 +-
.../resources/airbyte_source_kyve/resource.tf | 18 +-
.../airbyte_source_launchdarkly/resource.tf | 8 +-
.../airbyte_source_lemlist/resource.tf | 10 +-
.../airbyte_source_lever_hiring/resource.tf | 15 +-
.../airbyte_source_linkedin_ads/resource.tf | 19 +-
.../airbyte_source_linkedin_pages/resource.tf | 13 +-
.../airbyte_source_linnworks/resource.tf | 10 +-
.../airbyte_source_lokalise/resource.tf | 12 +-
.../airbyte_source_mailchimp/resource.tf | 13 +-
.../airbyte_source_mailgun/resource.tf | 8 +-
.../airbyte_source_mailjet_sms/resource.tf | 14 +-
.../airbyte_source_marketo/resource.tf | 8 +-
.../airbyte_source_metabase/resource.tf | 10 +-
.../resource.tf | 13 +-
.../airbyte_source_mixpanel/resource.tf | 18 +-
.../airbyte_source_monday/resource.tf | 11 +-
.../airbyte_source_mongodb/resource.tf | 18 -
.../resource.tf | 8 +-
.../airbyte_source_mongodb_v2/resource.tf | 21 +
.../airbyte_source_mssql/resource.tf | 25 +-
.../airbyte_source_my_hours/resource.tf | 10 +-
.../airbyte_source_mysql/resource.tf | 23 +-
.../airbyte_source_netsuite/resource.tf | 10 +-
.../airbyte_source_notion/resource.tf | 15 +-
.../airbyte_source_nytimes/resource.tf | 18 +-
.../resources/airbyte_source_okta/resource.tf | 15 +-
.../airbyte_source_omnisend/resource.tf | 10 +-
.../airbyte_source_onesignal/resource.tf | 8 +-
.../airbyte_source_oracle/resource.tf | 26 +-
.../resources/airbyte_source_orb/resource.tf | 14 +-
.../airbyte_source_orbit/resource.tf | 14 +-
.../resource.tf | 15 +-
.../airbyte_source_outreach/resource.tf | 8 +-
.../resource.tf | 9 +-
.../airbyte_source_paystack/resource.tf | 10 +-
.../airbyte_source_pendo/resource.tf | 10 +-
.../airbyte_source_persistiq/resource.tf | 10 +-
.../airbyte_source_pexels_api/resource.tf | 12 +-
.../airbyte_source_pinterest/resource.tf | 35 +-
.../airbyte_source_pipedrive/resource.tf | 15 +-
.../airbyte_source_pocket/resource.tf | 10 +-
.../airbyte_source_pokeapi/resource.tf | 10 +-
.../resource.tf | 16 +-
.../airbyte_source_postgres/resource.tf | 22 +-
.../airbyte_source_posthog/resource.tf | 10 +-
.../airbyte_source_postmarkapp/resource.tf | 8 +-
.../airbyte_source_prestashop/resource.tf | 14 +-
.../airbyte_source_punk_api/resource.tf | 10 +-
.../resources/airbyte_source_pypi/resource.tf | 8 +-
.../airbyte_source_qualaroo/resource.tf | 12 +-
.../airbyte_source_quickbooks/resource.tf | 17 +-
.../airbyte_source_railz/resource.tf | 14 +-
.../airbyte_source_recharge/resource.tf | 8 +-
.../airbyte_source_recreation/resource.tf | 8 +-
.../airbyte_source_recruitee/resource.tf | 12 +-
.../airbyte_source_recurly/resource.tf | 14 +-
.../airbyte_source_redshift/resource.tf | 10 +-
.../airbyte_source_retently/resource.tf | 18 +-
.../airbyte_source_rki_covid/resource.tf | 10 +-
.../resources/airbyte_source_rss/resource.tf | 10 +-
.../resources/airbyte_source_s3/resource.tf | 28 +-
.../airbyte_source_salesforce/resource.tf | 13 +-
.../airbyte_source_salesloft/resource.tf | 15 +-
.../airbyte_source_sap_fieldglass/resource.tf | 10 +-
.../airbyte_source_secoda/resource.tf | 10 +-
.../airbyte_source_sendgrid/resource.tf | 12 +-
.../airbyte_source_sendinblue/resource.tf | 10 +-
.../airbyte_source_senseforce/resource.tf | 10 +-
.../airbyte_source_sentry/resource.tf | 10 +-
.../resources/airbyte_source_sftp/resource.tf | 13 +-
.../airbyte_source_sftp_bulk/resource.tf | 12 +-
.../airbyte_source_shopify/resource.tf | 15 +-
.../airbyte_source_shortio/resource.tf | 14 +-
.../airbyte_source_slack/resource.tf | 17 +-
.../airbyte_source_smaily/resource.tf | 8 +-
.../airbyte_source_smartengage/resource.tf | 10 +-
.../airbyte_source_smartsheets/resource.tf | 15 +-
.../resource.tf | 8 +-
.../airbyte_source_snowflake/resource.tf | 11 +-
.../airbyte_source_sonar_cloud/resource.tf | 8 +-
.../airbyte_source_spacex_api/resource.tf | 12 +-
.../airbyte_source_square/resource.tf | 17 +-
.../airbyte_source_strava/resource.tf | 9 +-
.../airbyte_source_stripe/resource.tf | 14 +-
.../airbyte_source_survey_sparrow/resource.tf | 12 +-
.../airbyte_source_surveymonkey/resource.tf | 13 +-
.../airbyte_source_tempo/resource.tf | 10 +-
.../resource.tf | 20 +-
.../resource.tf | 17 +-
.../airbyte_source_todoist/resource.tf | 10 +-
.../airbyte_source_trello/resource.tf | 14 +-
.../airbyte_source_trustpilot/resource.tf | 13 +-
.../resource.tf | 10 +-
.../airbyte_source_twilio/resource.tf | 8 +-
.../resource.tf | 8 +-
.../airbyte_source_twitter/resource.tf | 16 +-
.../airbyte_source_typeform/resource.tf | 15 +-
.../airbyte_source_us_census/resource.tf | 10 +-
.../airbyte_source_vantage/resource.tf | 8 +-
.../airbyte_source_webflow/resource.tf | 12 +-
.../airbyte_source_whisky_hunter/resource.tf | 11 +-
.../resource.tf | 22 +-
.../airbyte_source_woocommerce/resource.tf | 16 +-
.../resources/airbyte_source_xero/resource.tf | 17 -
.../resources/airbyte_source_xkcd/resource.tf | 11 +-
.../airbyte_source_yandex_metrica/resource.tf | 16 +-
.../airbyte_source_yotpo/resource.tf | 10 +-
.../airbyte_source_younium/resource.tf | 12 -
.../resource.tf | 15 +-
.../airbyte_source_zendesk_chat/resource.tf | 15 +-
.../airbyte_source_zendesk_sell/resource.tf | 9 +
.../resource.tf | 19 +-
.../resource.tf | 18 +-
.../airbyte_source_zendesk_talk/resource.tf | 20 +-
.../airbyte_source_zenloop/resource.tf | 8 +-
.../airbyte_source_zoho_crm/resource.tf | 16 +-
.../resources/airbyte_source_zoom/resource.tf | 10 +-
.../airbyte_source_zuora/resource.tf | 14 +-
.../resources/airbyte_workspace/resource.tf | 2 +-
files.gen | 1364 +-
gen.yaml | 24 +-
go.mod | 4 +-
go.sum | 4 +
.../boolplanmodifier/suppress_diff.go | 2 +-
.../float64planmodifier/suppress_diff.go | 2 +-
.../int64planmodifier/suppress_diff.go | 2 +-
.../listplanmodifier/suppress_diff.go | 2 +-
.../mapplanmodifier/suppress_diff.go | 2 +-
.../numberplanmodifier/suppress_diff.go | 2 +-
.../objectplanmodifier/suppress_diff.go | 2 +-
.../setplanmodifier/suppress_diff.go | 2 +-
.../stringplanmodifier/suppress_diff.go | 2 +-
internal/planmodifiers/utils/state_check.go | 0
internal/provider/connection_data_source.go | 100 +-
.../provider/connection_data_source_sdk.go | 27 +-
internal/provider/connection_resource.go | 44 +-
internal/provider/connection_resource_sdk.go | 23 +-
.../destination_awsdatalake_data_source.go | 331 +-
...destination_awsdatalake_data_source_sdk.go | 6 +-
.../destination_awsdatalake_resource.go | 200 +-
.../destination_awsdatalake_resource_sdk.go | 197 +-
...estination_azureblobstorage_data_source.go | 149 +-
...nation_azureblobstorage_data_source_sdk.go | 6 +-
.../destination_azureblobstorage_resource.go | 123 +-
...stination_azureblobstorage_resource_sdk.go | 81 +-
.../destination_bigquery_data_source.go | 293 +-
.../destination_bigquery_data_source_sdk.go | 6 +-
.../provider/destination_bigquery_resource.go | 194 +-
.../destination_bigquery_resource_sdk.go | 175 +-
...nation_bigquerydenormalized_data_source.go | 389 -
...on_bigquerydenormalized_data_source_sdk.go | 14 -
...stination_bigquerydenormalized_resource.go | 555 -
...ation_bigquerydenormalized_resource_sdk.go | 235 -
.../destination_clickhouse_data_source.go | 227 +-
.../destination_clickhouse_data_source_sdk.go | 6 +-
.../destination_clickhouse_resource.go | 175 +-
.../destination_clickhouse_resource_sdk.go | 189 +-
.../destination_convex_data_source.go | 41 +-
.../destination_convex_data_source_sdk.go | 6 +-
.../provider/destination_convex_resource.go | 34 +-
.../destination_convex_resource_sdk.go | 15 +-
.../destination_cumulio_data_source.go | 45 +-
.../destination_cumulio_data_source_sdk.go | 6 +-
.../provider/destination_cumulio_resource.go | 40 +-
.../destination_cumulio_resource_sdk.go | 31 +-
.../destination_databend_data_source.go | 57 +-
.../destination_databend_data_source_sdk.go | 6 +-
.../provider/destination_databend_resource.go | 44 +-
.../destination_databend_resource_sdk.go | 23 +-
.../destination_databricks_data_source.go | 311 +-
.../destination_databricks_data_source_sdk.go | 6 +-
.../destination_databricks_resource.go | 227 +-
.../destination_databricks_resource_sdk.go | 165 +-
.../destination_devnull_data_source.go | 73 +-
.../destination_devnull_data_source_sdk.go | 6 +-
.../provider/destination_devnull_resource.go | 67 +-
.../destination_devnull_resource_sdk.go | 41 +-
.../destination_duckdb_data_source.go | 137 +
.../destination_duckdb_data_source_sdk.go | 18 +
...urce.go => destination_duckdb_resource.go} | 167 +-
.../destination_duckdb_resource_sdk.go | 94 +
.../destination_dynamodb_data_source.go | 84 +-
.../destination_dynamodb_data_source_sdk.go | 6 +-
.../provider/destination_dynamodb_resource.go | 44 +-
.../destination_dynamodb_resource_sdk.go | 25 +-
.../destination_elasticsearch_data_source.go | 147 +-
...stination_elasticsearch_data_source_sdk.go | 6 +-
.../destination_elasticsearch_resource.go | 110 +-
.../destination_elasticsearch_resource_sdk.go | 83 +-
.../destination_firebolt_data_source.go | 159 +-
.../destination_firebolt_data_source_sdk.go | 6 +-
.../provider/destination_firebolt_resource.go | 112 +-
.../destination_firebolt_resource_sdk.go | 91 +-
.../destination_firestore_data_source.go | 41 +-
.../destination_firestore_data_source_sdk.go | 6 +-
.../destination_firestore_resource.go | 33 +-
.../destination_firestore_resource_sdk.go | 11 +-
.../provider/destination_gcs_data_source.go | 750 +-
.../destination_gcs_data_source_sdk.go | 6 +-
internal/provider/destination_gcs_resource.go | 511 +-
.../provider/destination_gcs_resource_sdk.go | 685 +-
.../destination_googlesheets_data_source.go | 55 +-
...estination_googlesheets_data_source_sdk.go | 6 +-
.../destination_googlesheets_resource.go | 34 +-
.../destination_googlesheets_resource_sdk.go | 17 +-
.../provider/destination_keen_data_source.go | 45 +-
.../destination_keen_data_source_sdk.go | 6 +-
.../provider/destination_keen_resource.go | 39 +-
.../provider/destination_keen_resource_sdk.go | 17 +-
.../destination_kinesis_data_source.go | 57 +-
.../destination_kinesis_data_source_sdk.go | 6 +-
.../provider/destination_kinesis_resource.go | 43 +-
.../destination_kinesis_resource_sdk.go | 51 +-
.../destination_langchain_data_source.go | 272 +-
.../destination_langchain_data_source_sdk.go | 6 +-
.../destination_langchain_resource.go | 213 +-
.../destination_langchain_resource_sdk.go | 237 +-
.../destination_milvus_data_source.go | 363 +-
.../destination_milvus_data_source_sdk.go | 6 +-
.../provider/destination_milvus_resource.go | 379 +-
.../destination_milvus_resource_sdk.go | 529 +-
.../destination_mongodb_data_source.go | 431 +-
.../destination_mongodb_data_source_sdk.go | 6 +-
.../provider/destination_mongodb_resource.go | 324 +-
.../destination_mongodb_resource_sdk.go | 383 +-
.../provider/destination_mssql_data_source.go | 308 +-
.../destination_mssql_data_source_sdk.go | 6 +-
.../provider/destination_mssql_resource.go | 243 +-
.../destination_mssql_resource_sdk.go | 263 +-
.../provider/destination_mysql_data_source.go | 227 +-
.../destination_mysql_data_source_sdk.go | 6 +-
.../provider/destination_mysql_resource.go | 185 +-
.../destination_mysql_resource_sdk.go | 181 +-
.../destination_oracle_data_source.go | 231 +-
.../destination_oracle_data_source_sdk.go | 6 +-
.../provider/destination_oracle_resource.go | 180 +-
.../destination_oracle_resource_sdk.go | 183 +-
.../destination_pinecone_data_source.go | 185 +-
.../destination_pinecone_data_source_sdk.go | 6 +-
.../provider/destination_pinecone_resource.go | 245 +-
.../destination_pinecone_resource_sdk.go | 351 +-
.../destination_postgres_data_source.go | 475 +-
.../destination_postgres_data_source_sdk.go | 6 +-
.../provider/destination_postgres_resource.go | 389 +-
.../destination_postgres_resource_sdk.go | 397 +-
.../destination_pubsub_data_source.go | 65 +-
.../destination_pubsub_data_source_sdk.go | 6 +-
.../provider/destination_pubsub_resource.go | 58 +-
.../destination_pubsub_resource_sdk.go | 39 +-
.../destination_qdrant_data_source.go | 137 +
.../destination_qdrant_data_source_sdk.go | 18 +
.../provider/destination_qdrant_resource.go | 583 +
.../destination_qdrant_resource_sdk.go | 576 +
.../provider/destination_redis_data_source.go | 335 +-
.../destination_redis_data_source_sdk.go | 6 +-
.../provider/destination_redis_resource.go | 264 +-
.../destination_redis_resource_sdk.go | 267 +-
.../destination_redshift_data_source.go | 508 +-
.../destination_redshift_data_source_sdk.go | 6 +-
.../provider/destination_redshift_resource.go | 394 +-
.../destination_redshift_resource_sdk.go | 383 +-
.../provider/destination_s3_data_source.go | 728 +-
.../destination_s3_data_source_sdk.go | 6 +-
internal/provider/destination_s3_resource.go | 504 +-
.../provider/destination_s3_resource_sdk.go | 697 +-
.../destination_s3glue_data_source.go | 251 +-
.../destination_s3glue_data_source_sdk.go | 6 +-
.../provider/destination_s3glue_resource.go | 141 +-
.../destination_s3glue_resource_sdk.go | 165 +-
.../destination_sftpjson_data_source.go | 53 +-
.../destination_sftpjson_data_source_sdk.go | 6 +-
.../provider/destination_sftpjson_resource.go | 39 +-
.../destination_sftpjson_resource_sdk.go | 11 +-
.../destination_snowflake_data_source.go | 214 +-
.../destination_snowflake_data_source_sdk.go | 6 +-
.../destination_snowflake_resource.go | 147 +-
.../destination_snowflake_resource_sdk.go | 211 +-
.../destination_timeplus_data_source.go | 41 +-
.../destination_timeplus_data_source_sdk.go | 6 +-
.../provider/destination_timeplus_resource.go | 39 +-
.../destination_timeplus_resource_sdk.go | 29 +-
.../destination_typesense_data_source.go | 53 +-
.../destination_typesense_data_source_sdk.go | 6 +-
.../destination_typesense_resource.go | 34 +-
.../destination_typesense_resource_sdk.go | 21 +-
.../destination_vertica_data_source.go | 231 +-
.../destination_vertica_data_source_sdk.go | 6 +-
.../provider/destination_vertica_resource.go | 175 +-
.../destination_vertica_resource_sdk.go | 183 +-
.../destination_weaviate_data_source.go | 137 +
.../destination_weaviate_data_source_sdk.go | 18 +
.../provider/destination_weaviate_resource.go | 607 +
.../destination_weaviate_resource_sdk.go | 586 +
.../provider/destination_xata_data_source.go | 41 +-
.../destination_xata_data_source_sdk.go | 6 +-
.../provider/destination_xata_resource.go | 34 +-
.../provider/destination_xata_resource_sdk.go | 15 +-
internal/provider/provider.go | 36 +-
internal/provider/reflect/diags.go | 0
internal/provider/reflect/doc.go | 0
.../provider/reflect/generic_attr_value.go | 0
internal/provider/reflect/helpers.go | 0
internal/provider/reflect/interfaces.go | 0
internal/provider/reflect/into.go | 0
internal/provider/reflect/map.go | 0
internal/provider/reflect/number.go | 2 +-
internal/provider/reflect/options.go | 0
internal/provider/reflect/outof.go | 0
internal/provider/reflect/pointer.go | 0
internal/provider/reflect/primitive.go | 0
internal/provider/reflect/slice.go | 0
internal/provider/reflect/struct.go | 0
internal/provider/source_aha_data_source.go | 40 +-
.../provider/source_aha_data_source_sdk.go | 6 +-
internal/provider/source_aha_resource.go | 37 +-
internal/provider/source_aha_resource_sdk.go | 15 +-
.../provider/source_aircall_data_source.go | 54 +-
.../source_aircall_data_source_sdk.go | 6 +-
internal/provider/source_aircall_resource.go | 42 +-
.../provider/source_aircall_resource_sdk.go | 17 +-
.../provider/source_airtable_data_source.go | 157 +-
.../source_airtable_data_source_sdk.go | 6 +-
internal/provider/source_airtable_resource.go | 120 +-
.../provider/source_airtable_resource_sdk.go | 120 +-
.../provider/source_alloydb_data_source.go | 762 +-
.../source_alloydb_data_source_sdk.go | 6 +-
internal/provider/source_alloydb_resource.go | 595 +-
.../provider/source_alloydb_resource_sdk.go | 591 +-
.../provider/source_amazonads_data_source.go | 99 +-
.../source_amazonads_data_source_sdk.go | 6 +-
.../provider/source_amazonads_resource.go | 57 +-
.../provider/source_amazonads_resource_sdk.go | 46 +-
.../source_amazonsellerpartner_data_source.go | 137 +-
...rce_amazonsellerpartner_data_source_sdk.go | 6 +-
.../source_amazonsellerpartner_resource.go | 87 +-
...source_amazonsellerpartner_resource_sdk.go | 128 +-
.../provider/source_amazonsqs_data_source.go | 104 +-
.../source_amazonsqs_data_source_sdk.go | 6 +-
.../provider/source_amazonsqs_resource.go | 45 +-
.../provider/source_amazonsqs_resource_sdk.go | 25 +-
.../provider/source_amplitude_data_source.go | 65 +-
.../source_amplitude_data_source_sdk.go | 6 +-
.../provider/source_amplitude_resource.go | 49 +-
.../provider/source_amplitude_resource_sdk.go | 20 +-
.../source_apifydataset_data_source.go | 50 +-
.../source_apifydataset_data_source_sdk.go | 6 +-
.../provider/source_apifydataset_resource.go | 47 +-
.../source_apifydataset_resource_sdk.go | 43 +-
.../provider/source_appfollow_data_source.go | 42 +-
.../source_appfollow_data_source_sdk.go | 6 +-
.../provider/source_appfollow_resource.go | 36 +-
.../provider/source_appfollow_resource_sdk.go | 13 +-
internal/provider/source_asana_data_source.go | 132 +-
.../provider/source_asana_data_source_sdk.go | 6 +-
internal/provider/source_asana_resource.go | 121 +-
.../provider/source_asana_resource_sdk.go | 135 +-
internal/provider/source_auth0_data_source.go | 137 +-
.../provider/source_auth0_data_source_sdk.go | 6 +-
internal/provider/source_auth0_resource.go | 109 +-
.../provider/source_auth0_resource_sdk.go | 75 +-
.../source_awscloudtrail_data_source.go | 58 +-
.../source_awscloudtrail_data_source_sdk.go | 6 +-
.../provider/source_awscloudtrail_resource.go | 44 +-
.../source_awscloudtrail_resource_sdk.go | 27 +-
.../source_azureblobstorage_data_source.go | 102 +-
...source_azureblobstorage_data_source_sdk.go | 6 +-
.../source_azureblobstorage_resource.go | 271 +-
.../source_azureblobstorage_resource_sdk.go | 570 +-
.../provider/source_azuretable_data_source.go | 50 +-
.../source_azuretable_data_source_sdk.go | 6 +-
.../provider/source_azuretable_resource.go | 42 +-
.../source_azuretable_resource_sdk.go | 11 +-
.../provider/source_bamboohr_data_source.go | 54 +-
.../source_bamboohr_data_source_sdk.go | 6 +-
internal/provider/source_bamboohr_resource.go | 47 +-
.../provider/source_bamboohr_resource_sdk.go | 11 +-
.../source_bigcommerce_data_source_sdk.go | 14 -
.../source_bigcommerce_resource_sdk.go | 76 -
.../provider/source_bigquery_data_source.go | 50 +-
.../source_bigquery_data_source_sdk.go | 6 +-
internal/provider/source_bigquery_resource.go | 36 +-
.../provider/source_bigquery_resource_sdk.go | 11 +-
.../provider/source_bingads_data_source.go | 79 +-
.../source_bingads_data_source_sdk.go | 6 +-
internal/provider/source_bingads_resource.go | 131 +-
.../provider/source_bingads_resource_sdk.go | 93 +-
.../provider/source_braintree_data_source.go | 71 +-
.../source_braintree_data_source_sdk.go | 6 +-
.../provider/source_braintree_resource.go | 46 +-
.../provider/source_braintree_resource_sdk.go | 11 +-
internal/provider/source_braze_data_source.go | 48 +-
.../provider/source_braze_data_source_sdk.go | 6 +-
internal/provider/source_braze_resource.go | 42 +-
.../provider/source_braze_resource_sdk.go | 19 +-
internal/provider/source_cart_data_source.go | 137 +
.../provider/source_cart_data_source_sdk.go | 18 +
...ro_resource.go => source_cart_resource.go} | 166 +-
internal/provider/source_cart_resource_sdk.go | 141 +
.../provider/source_chargebee_data_source.go | 65 +-
.../source_chargebee_data_source_sdk.go | 6 +-
.../provider/source_chargebee_resource.go | 45 +-
.../provider/source_chargebee_resource_sdk.go | 13 +-
.../provider/source_chartmogul_data_source.go | 63 +-
.../source_chartmogul_data_source_sdk.go | 6 +-
.../provider/source_chartmogul_resource.go | 55 +-
.../source_chartmogul_resource_sdk.go | 19 +-
.../provider/source_clickhouse_data_source.go | 228 +-
.../source_clickhouse_data_source_sdk.go | 6 +-
.../provider/source_clickhouse_resource.go | 178 +-
.../source_clickhouse_resource_sdk.go | 167 +-
.../provider/source_clickupapi_data_source.go | 62 +-
.../source_clickupapi_data_source_sdk.go | 6 +-
.../provider/source_clickupapi_resource.go | 42 +-
.../source_clickupapi_resource_sdk.go | 11 +-
.../provider/source_clockify_data_source.go | 50 +-
.../source_clockify_data_source_sdk.go | 6 +-
internal/provider/source_clockify_resource.go | 42 +-
.../provider/source_clockify_resource_sdk.go | 11 +-
.../provider/source_closecom_data_source.go | 50 +-
.../source_closecom_data_source_sdk.go | 6 +-
internal/provider/source_closecom_resource.go | 41 +-
.../provider/source_closecom_resource_sdk.go | 17 +-
internal/provider/source_coda_data_source.go | 36 +-
.../provider/source_coda_data_source_sdk.go | 6 +-
internal/provider/source_coda_resource.go | 37 +-
internal/provider/source_coda_resource_sdk.go | 13 +-
.../provider/source_coinapi_data_source.go | 82 +-
.../source_coinapi_data_source_sdk.go | 6 +-
internal/provider/source_coinapi_resource.go | 46 +-
.../provider/source_coinapi_resource_sdk.go | 25 +-
.../source_coinmarketcap_data_source.go | 58 +-
.../source_coinmarketcap_data_source_sdk.go | 6 +-
.../provider/source_coinmarketcap_resource.go | 39 +-
.../source_coinmarketcap_resource_sdk.go | 19 +-
.../provider/source_configcat_data_source.go | 46 +-
.../source_configcat_data_source_sdk.go | 6 +-
.../provider/source_configcat_resource.go | 49 +-
.../provider/source_configcat_resource_sdk.go | 15 +-
.../provider/source_confluence_data_source.go | 50 +-
.../source_confluence_data_source_sdk.go | 6 +-
.../provider/source_confluence_resource.go | 37 +-
.../source_confluence_resource_sdk.go | 11 +-
.../provider/source_convex_data_source.go | 39 +-
.../provider/source_convex_data_source_sdk.go | 6 +-
internal/provider/source_convex_resource.go | 49 +-
.../provider/source_convex_resource_sdk.go | 11 +-
.../provider/source_datascope_data_source.go | 46 +-
.../source_datascope_data_source_sdk.go | 6 +-
.../provider/source_datascope_resource.go | 37 +-
.../provider/source_datascope_resource_sdk.go | 15 +-
.../provider/source_delighted_data_source.go | 50 +-
.../source_delighted_data_source_sdk.go | 6 +-
.../provider/source_delighted_resource.go | 42 +-
.../provider/source_delighted_resource_sdk.go | 15 +-
internal/provider/source_dixa_data_source.go | 44 +-
.../provider/source_dixa_data_source_sdk.go | 6 +-
internal/provider/source_dixa_resource.go | 45 +-
internal/provider/source_dixa_resource_sdk.go | 22 +-
.../provider/source_dockerhub_data_source.go | 42 +-
.../source_dockerhub_data_source_sdk.go | 6 +-
.../provider/source_dockerhub_resource.go | 36 +-
.../provider/source_dockerhub_resource_sdk.go | 11 +-
.../provider/source_dremio_data_source.go | 40 +-
.../provider/source_dremio_data_source_sdk.go | 6 +-
internal/provider/source_dremio_resource.go | 42 +-
.../provider/source_dremio_resource_sdk.go | 29 +-
.../provider/source_dynamodb_data_source.go | 89 +-
.../source_dynamodb_data_source_sdk.go | 6 +-
internal/provider/source_dynamodb_resource.go | 45 +-
.../provider/source_dynamodb_resource_sdk.go | 11 +-
.../source_e2etestcloud_data_source.go | 271 -
.../source_e2etestcloud_data_source_sdk.go | 14 -
.../provider/source_e2etestcloud_resource.go | 437 -
.../source_e2etestcloud_resource_sdk.go | 188 -
.../source_emailoctopus_data_source.go | 42 +-
.../source_emailoctopus_data_source_sdk.go | 6 +-
.../provider/source_emailoctopus_resource.go | 49 +-
.../source_emailoctopus_resource_sdk.go | 13 +-
.../source_exchangerates_data_source.go | 58 +-
.../source_exchangerates_data_source_sdk.go | 6 +-
.../provider/source_exchangerates_resource.go | 47 +-
.../source_exchangerates_resource_sdk.go | 13 +-
.../source_facebookmarketing_data_source.go | 167 +-
...ource_facebookmarketing_data_source_sdk.go | 6 +-
.../source_facebookmarketing_resource.go | 102 +-
.../source_facebookmarketing_resource_sdk.go | 83 +-
.../source_facebookpages_data_source.go | 46 +-
.../source_facebookpages_data_source_sdk.go | 6 +-
.../provider/source_facebookpages_resource.go | 37 +-
.../source_facebookpages_resource_sdk.go | 11 +-
internal/provider/source_faker_data_source.go | 52 +-
.../provider/source_faker_data_source_sdk.go | 6 +-
internal/provider/source_faker_resource.go | 61 +-
.../provider/source_faker_resource_sdk.go | 25 +-
internal/provider/source_fauna_data_source.go | 153 +-
.../provider/source_fauna_data_source_sdk.go | 6 +-
internal/provider/source_fauna_resource.go | 135 +-
.../provider/source_fauna_resource_sdk.go | 151 +-
internal/provider/source_file_data_source.go | 137 +
.../provider/source_file_data_source_sdk.go | 18 +
internal/provider/source_file_resource.go | 475 +
internal/provider/source_file_resource_sdk.go | 445 +
.../provider/source_filesecure_data_source.go | 526 -
.../source_filesecure_data_source_sdk.go | 14 -
.../provider/source_filesecure_resource.go | 692 -
.../source_filesecure_resource_sdk.go | 458 -
.../provider/source_firebolt_data_source.go | 62 +-
.../source_firebolt_data_source_sdk.go | 6 +-
internal/provider/source_firebolt_resource.go | 37 +-
.../provider/source_firebolt_resource_sdk.go | 23 +-
.../source_freshcaller_data_source.go | 62 +-
.../source_freshcaller_data_source_sdk.go | 6 +-
.../provider/source_freshcaller_resource.go | 42 +-
.../source_freshcaller_resource_sdk.go | 25 +-
.../provider/source_freshdesk_data_source.go | 58 +-
.../source_freshdesk_data_source_sdk.go | 6 +-
.../provider/source_freshdesk_resource.go | 42 +-
.../provider/source_freshdesk_resource_sdk.go | 11 +-
.../provider/source_freshsales_data_source.go | 46 +-
.../source_freshsales_data_source_sdk.go | 6 +-
.../provider/source_freshsales_resource.go | 37 +-
.../source_freshsales_resource_sdk.go | 11 +-
.../source_gainsightpx_data_source.go | 42 +-
.../source_gainsightpx_data_source_sdk.go | 6 +-
.../provider/source_gainsightpx_resource.go | 49 +-
.../source_gainsightpx_resource_sdk.go | 13 +-
internal/provider/source_gcs_data_source.go | 44 +-
.../provider/source_gcs_data_source_sdk.go | 6 +-
internal/provider/source_gcs_resource.go | 209 +-
internal/provider/source_gcs_resource_sdk.go | 410 +-
.../provider/source_getlago_data_source.go | 42 +-
.../source_getlago_data_source_sdk.go | 6 +-
internal/provider/source_getlago_resource.go | 40 +-
.../provider/source_getlago_resource_sdk.go | 27 +-
.../provider/source_github_data_source.go | 153 +-
.../provider/source_github_data_source_sdk.go | 6 +-
internal/provider/source_github_resource.go | 136 +-
.../provider/source_github_resource_sdk.go | 241 +-
.../provider/source_gitlab_data_source.go | 170 +-
.../provider/source_gitlab_data_source_sdk.go | 6 +-
internal/provider/source_gitlab_resource.go | 141 +-
.../provider/source_gitlab_resource_sdk.go | 157 +-
.../provider/source_glassfrog_data_source.go | 42 +-
.../source_glassfrog_data_source_sdk.go | 6 +-
.../provider/source_glassfrog_resource.go | 49 +-
.../provider/source_glassfrog_resource_sdk.go | 13 +-
internal/provider/source_gnews_data_source.go | 188 +-
.../provider/source_gnews_data_source_sdk.go | 6 +-
internal/provider/source_gnews_resource.go | 51 +-
.../provider/source_gnews_resource_sdk.go | 35 +-
.../provider/source_googleads_data_source.go | 105 +-
.../source_googleads_data_source_sdk.go | 6 +-
.../provider/source_googleads_resource.go | 53 +-
.../provider/source_googleads_resource_sdk.go | 19 +-
...urce_googleanalyticsdataapi_data_source.go | 167 +-
..._googleanalyticsdataapi_data_source_sdk.go | 6 +-
.../source_googleanalyticsdataapi_resource.go | 1237 +-
...rce_googleanalyticsdataapi_resource_sdk.go | 2885 +-
.../source_googleanalyticsv4_data_source.go | 278 -
...ource_googleanalyticsv4_data_source_sdk.go | 14 -
.../source_googleanalyticsv4_resource.go | 444 -
.../source_googleanalyticsv4_resource_sdk.go | 207 -
.../source_googledirectory_data_source.go | 152 +-
.../source_googledirectory_data_source_sdk.go | 6 +-
.../source_googledirectory_resource.go | 116 +-
.../source_googledirectory_resource_sdk.go | 119 +-
...e.go => source_googledrive_data_source.go} | 84 +-
.../source_googledrive_data_source_sdk.go | 18 +
.../provider/source_googledrive_resource.go | 556 +
.../source_googledrive_resource_sdk.go | 604 +
...rce_googlepagespeedinsights_data_source.go | 57 +-
...googlepagespeedinsights_data_source_sdk.go | 6 +-
...source_googlepagespeedinsights_resource.go | 37 +-
...ce_googlepagespeedinsights_resource_sdk.go | 19 +-
.../source_googlesearchconsole_data_source.go | 202 +-
...rce_googlesearchconsole_data_source_sdk.go | 6 +-
.../source_googlesearchconsole_resource.go | 125 +-
...source_googlesearchconsole_resource_sdk.go | 107 +-
.../source_googlesheets_data_source.go | 148 +-
.../source_googlesheets_data_source_sdk.go | 6 +-
.../provider/source_googlesheets_resource.go | 113 +-
.../source_googlesheets_resource_sdk.go | 75 +-
.../source_googlewebfonts_data_source.go | 54 +-
.../source_googlewebfonts_data_source_sdk.go | 6 +-
.../source_googlewebfonts_resource.go | 37 +-
.../source_googlewebfonts_resource_sdk.go | 11 +-
...googleworkspaceadminreports_data_source.go | 50 +-
...leworkspaceadminreports_data_source_sdk.go | 6 +-
...ce_googleworkspaceadminreports_resource.go | 36 +-
...oogleworkspaceadminreports_resource_sdk.go | 11 +-
.../provider/source_greenhouse_data_source.go | 42 +-
.../source_greenhouse_data_source_sdk.go | 6 +-
.../provider/source_greenhouse_resource.go | 49 +-
.../source_greenhouse_resource_sdk.go | 13 +-
.../provider/source_gridly_data_source.go | 39 +-
.../provider/source_gridly_data_source_sdk.go | 6 +-
internal/provider/source_gridly_resource.go | 39 +-
.../provider/source_gridly_resource_sdk.go | 15 +-
.../provider/source_harvest_data_source.go | 186 +-
.../source_harvest_data_source_sdk.go | 6 +-
internal/provider/source_harvest_resource.go | 141 +-
.../provider/source_harvest_resource_sdk.go | 119 +-
.../provider/source_hubplanner_data_source.go | 42 +-
.../source_hubplanner_data_source_sdk.go | 6 +-
.../provider/source_hubplanner_resource.go | 49 +-
.../source_hubplanner_resource_sdk.go | 13 +-
.../provider/source_hubspot_data_source.go | 151 +-
.../source_hubspot_data_source_sdk.go | 6 +-
internal/provider/source_hubspot_resource.go | 117 +-
.../provider/source_hubspot_resource_sdk.go | 91 +-
.../provider/source_insightly_data_source.go | 46 +-
.../source_insightly_data_source_sdk.go | 6 +-
.../provider/source_insightly_resource.go | 37 +-
.../provider/source_insightly_resource_sdk.go | 15 +-
.../provider/source_instagram_data_source.go | 58 +-
.../source_instagram_data_source_sdk.go | 6 +-
.../provider/source_instagram_resource.go | 42 +-
.../provider/source_instagram_resource_sdk.go | 25 +-
.../provider/source_instatus_data_source.go | 42 +-
.../source_instatus_data_source_sdk.go | 6 +-
internal/provider/source_instatus_resource.go | 49 +-
.../provider/source_instatus_resource_sdk.go | 13 +-
.../provider/source_intercom_data_source.go | 58 +-
.../source_intercom_data_source_sdk.go | 6 +-
internal/provider/source_intercom_resource.go | 42 +-
.../provider/source_intercom_resource_sdk.go | 11 +-
.../provider/source_ip2whois_data_source.go | 46 +-
.../source_ip2whois_data_source_sdk.go | 6 +-
internal/provider/source_ip2whois_resource.go | 37 +-
.../provider/source_ip2whois_resource_sdk.go | 20 +-
.../provider/source_iterable_data_source.go | 50 +-
.../source_iterable_data_source_sdk.go | 6 +-
internal/provider/source_iterable_resource.go | 54 +-
.../provider/source_iterable_resource_sdk.go | 15 +-
internal/provider/source_jira_data_source.go | 69 +-
.../provider/source_jira_data_source_sdk.go | 6 +-
internal/provider/source_jira_resource.go | 65 +-
internal/provider/source_jira_resource_sdk.go | 35 +-
.../provider/source_k6cloud_data_source.go | 42 +-
.../source_k6cloud_data_source_sdk.go | 6 +-
internal/provider/source_k6cloud_resource.go | 37 +-
.../provider/source_k6cloud_resource_sdk.go | 13 +-
.../provider/source_klarna_data_source.go | 56 +-
.../provider/source_klarna_data_source_sdk.go | 6 +-
internal/provider/source_klarna_resource.go | 44 +-
.../provider/source_klarna_resource_sdk.go | 25 +-
.../provider/source_klaviyo_data_source.go | 50 +-
.../source_klaviyo_data_source_sdk.go | 6 +-
internal/provider/source_klaviyo_resource.go | 42 +-
.../provider/source_klaviyo_resource_sdk.go | 29 +-
.../source_kustomersinger_data_source.go | 46 +-
.../source_kustomersinger_data_source_sdk.go | 6 +-
.../source_kustomersinger_resource.go | 37 +-
.../source_kustomersinger_resource_sdk.go | 15 +-
internal/provider/source_kyve_data_source.go | 52 +-
.../provider/source_kyve_data_source_sdk.go | 6 +-
internal/provider/source_kyve_resource.go | 46 +-
internal/provider/source_kyve_resource_sdk.go | 21 +-
.../source_launchdarkly_data_source.go | 42 +-
.../source_launchdarkly_data_source_sdk.go | 6 +-
.../provider/source_launchdarkly_resource.go | 49 +-
.../source_launchdarkly_resource_sdk.go | 11 +-
.../provider/source_lemlist_data_source.go | 42 +-
.../source_lemlist_data_source_sdk.go | 6 +-
internal/provider/source_lemlist_resource.go | 49 +-
.../provider/source_lemlist_resource_sdk.go | 13 +-
.../source_leverhiring_data_source.go | 155 +-
.../source_leverhiring_data_source_sdk.go | 6 +-
.../provider/source_leverhiring_resource.go | 112 +-
.../source_leverhiring_resource_sdk.go | 107 +-
.../source_linkedinads_data_source.go | 201 +-
.../source_linkedinads_data_source_sdk.go | 6 +-
.../provider/source_linkedinads_resource.go | 116 +-
.../source_linkedinads_resource_sdk.go | 111 +-
.../source_linkedinpages_data_source.go | 139 +-
.../source_linkedinpages_data_source_sdk.go | 6 +-
.../provider/source_linkedinpages_resource.go | 105 +-
.../source_linkedinpages_resource_sdk.go | 95 +-
.../provider/source_linnworks_data_source.go | 57 +-
.../source_linnworks_data_source_sdk.go | 6 +-
.../provider/source_linnworks_resource.go | 44 +-
.../provider/source_linnworks_resource_sdk.go | 11 +-
.../provider/source_lokalise_data_source.go | 46 +-
.../source_lokalise_data_source_sdk.go | 6 +-
internal/provider/source_lokalise_resource.go | 37 +-
.../provider/source_lokalise_resource_sdk.go | 15 +-
.../provider/source_mailchimp_data_source.go | 138 +-
.../source_mailchimp_data_source_sdk.go | 6 +-
.../provider/source_mailchimp_resource.go | 105 +-
.../provider/source_mailchimp_resource_sdk.go | 87 +-
.../provider/source_mailgun_data_source.go | 54 +-
.../source_mailgun_data_source_sdk.go | 6 +-
internal/provider/source_mailgun_resource.go | 47 +-
.../provider/source_mailgun_resource_sdk.go | 11 +-
.../provider/source_mailjetsms_data_source.go | 50 +-
.../source_mailjetsms_data_source_sdk.go | 6 +-
.../provider/source_mailjetsms_resource.go | 37 +-
.../source_mailjetsms_resource_sdk.go | 17 +-
.../provider/source_marketo_data_source.go | 58 +-
.../source_marketo_data_source_sdk.go | 6 +-
internal/provider/source_marketo_resource.go | 41 +-
.../provider/source_marketo_resource_sdk.go | 11 +-
.../provider/source_metabase_data_source.go | 57 +-
.../source_metabase_data_source_sdk.go | 6 +-
internal/provider/source_metabase_resource.go | 42 +-
.../provider/source_metabase_resource_sdk.go | 11 +-
.../source_microsoftteams_data_source.go | 168 +-
.../source_microsoftteams_data_source_sdk.go | 6 +-
.../source_microsoftteams_resource.go | 120 +-
.../source_microsoftteams_resource_sdk.go | 107 +-
.../provider/source_mixpanel_data_source.go | 177 +-
.../source_mixpanel_data_source_sdk.go | 6 +-
internal/provider/source_mixpanel_resource.go | 138 +-
.../provider/source_mixpanel_resource_sdk.go | 156 +-
.../provider/source_monday_data_source.go | 137 +-
.../provider/source_monday_data_source_sdk.go | 6 +-
internal/provider/source_monday_resource.go | 114 +-
.../provider/source_monday_resource_sdk.go | 85 +-
.../provider/source_mongodb_data_source.go | 319 -
.../source_mongodb_data_source_sdk.go | 14 -
internal/provider/source_mongodb_resource.go | 485 -
.../provider/source_mongodb_resource_sdk.go | 231 -
.../source_mongodbinternalpoc_data_source.go | 58 +-
...urce_mongodbinternalpoc_data_source_sdk.go | 6 +-
.../source_mongodbinternalpoc_resource.go | 42 +-
.../source_mongodbinternalpoc_resource_sdk.go | 11 +-
.../provider/source_mongodbv2_data_source.go | 137 +
.../source_mongodbv2_data_source_sdk.go | 18 +
.../provider/source_mongodbv2_resource.go | 413 +
.../provider/source_mongodbv2_resource_sdk.go | 256 +
internal/provider/source_mssql_data_source.go | 429 +-
.../provider/source_mssql_data_source_sdk.go | 6 +-
internal/provider/source_mssql_resource.go | 331 +-
.../provider/source_mssql_resource_sdk.go | 299 +-
.../provider/source_myhours_data_source.go | 54 +-
.../source_myhours_data_source_sdk.go | 6 +-
internal/provider/source_myhours_resource.go | 42 +-
.../provider/source_myhours_resource_sdk.go | 11 +-
internal/provider/source_mysql_data_source.go | 504 +-
.../provider/source_mysql_data_source_sdk.go | 6 +-
internal/provider/source_mysql_resource.go | 402 +-
.../provider/source_mysql_resource_sdk.go | 389 +-
.../provider/source_netsuite_data_source.go | 71 +-
.../source_netsuite_data_source_sdk.go | 6 +-
internal/provider/source_netsuite_resource.go | 44 +-
.../provider/source_netsuite_resource_sdk.go | 11 +-
.../provider/source_notion_data_source.go | 141 +-
.../provider/source_notion_data_source_sdk.go | 6 +-
internal/provider/source_notion_resource.go | 127 +-
.../provider/source_notion_resource_sdk.go | 133 +-
.../provider/source_nytimes_data_source.go | 82 +-
.../source_nytimes_data_source_sdk.go | 6 +-
internal/provider/source_nytimes_resource.go | 51 +-
.../provider/source_nytimes_resource_sdk.go | 29 +-
internal/provider/source_okta_data_source.go | 137 +-
.../provider/source_okta_data_source_sdk.go | 6 +-
internal/provider/source_okta_resource.go | 105 +-
internal/provider/source_okta_resource_sdk.go | 75 +-
.../provider/source_omnisend_data_source.go | 42 +-
.../source_omnisend_data_source_sdk.go | 6 +-
internal/provider/source_omnisend_resource.go | 49 +-
.../provider/source_omnisend_resource_sdk.go | 13 +-
.../provider/source_onesignal_data_source.go | 71 +-
.../source_onesignal_data_source_sdk.go | 6 +-
.../provider/source_onesignal_resource.go | 45 +-
.../provider/source_onesignal_resource_sdk.go | 15 +-
.../provider/source_oracle_data_source.go | 412 +-
.../provider/source_oracle_data_source_sdk.go | 6 +-
internal/provider/source_oracle_resource.go | 309 +-
.../provider/source_oracle_resource_sdk.go | 331 +-
internal/provider/source_orb_data_source.go | 62 +-
.../provider/source_orb_data_source_sdk.go | 6 +-
internal/provider/source_orb_resource.go | 45 +-
internal/provider/source_orb_resource_sdk.go | 11 +-
internal/provider/source_orbit_data_source.go | 44 +-
.../provider/source_orbit_data_source_sdk.go | 6 +-
internal/provider/source_orbit_resource.go | 37 +-
.../provider/source_orbit_resource_sdk.go | 17 +-
.../source_outbrainamplify_data_source.go | 164 +-
.../source_outbrainamplify_data_source_sdk.go | 6 +-
.../source_outbrainamplify_resource.go | 112 +-
.../source_outbrainamplify_resource_sdk.go | 79 +-
.../provider/source_outreach_data_source.go | 58 +-
.../source_outreach_data_source_sdk.go | 6 +-
internal/provider/source_outreach_resource.go | 37 +-
.../provider/source_outreach_resource_sdk.go | 11 +-
.../source_paypaltransaction_data_source.go | 62 +-
...ource_paypaltransaction_data_source_sdk.go | 6 +-
.../source_paypaltransaction_resource.go | 52 +-
.../source_paypaltransaction_resource_sdk.go | 39 +-
.../provider/source_paystack_data_source.go | 54 +-
.../source_paystack_data_source_sdk.go | 6 +-
internal/provider/source_paystack_resource.go | 47 +-
.../provider/source_paystack_resource_sdk.go | 11 +-
internal/provider/source_pendo_data_source.go | 35 +-
.../provider/source_pendo_data_source_sdk.go | 6 +-
internal/provider/source_pendo_resource.go | 41 +-
.../provider/source_pendo_resource_sdk.go | 13 +-
.../provider/source_persistiq_data_source.go | 42 +-
.../source_persistiq_data_source_sdk.go | 6 +-
.../provider/source_persistiq_resource.go | 49 +-
.../provider/source_persistiq_resource_sdk.go | 13 +-
.../provider/source_pexelsapi_data_source.go | 62 +-
.../source_pexelsapi_data_source_sdk.go | 6 +-
.../provider/source_pexelsapi_resource.go | 37 +-
.../provider/source_pexelsapi_resource_sdk.go | 11 +-
.../provider/source_pinterest_data_source.go | 147 +-
.../source_pinterest_data_source_sdk.go | 6 +-
.../provider/source_pinterest_resource.go | 229 +-
.../provider/source_pinterest_resource_sdk.go | 249 +-
.../provider/source_pipedrive_data_source.go | 64 +-
.../source_pipedrive_data_source_sdk.go | 6 +-
.../provider/source_pipedrive_resource.go | 63 +-
.../provider/source_pipedrive_resource_sdk.go | 40 +-
.../provider/source_pocket_data_source.go | 108 +-
.../provider/source_pocket_data_source_sdk.go | 6 +-
internal/provider/source_pocket_resource.go | 57 +-
.../provider/source_pocket_resource_sdk.go | 23 +-
.../provider/source_pokeapi_data_source.go | 42 +-
.../source_pokeapi_data_source_sdk.go | 6 +-
internal/provider/source_pokeapi_resource.go | 931 +-
.../provider/source_pokeapi_resource_sdk.go | 15 +-
.../source_polygonstockapi_data_source.go | 81 +-
.../source_polygonstockapi_data_source_sdk.go | 6 +-
.../source_polygonstockapi_resource.go | 45 +-
.../source_polygonstockapi_resource_sdk.go | 13 +-
.../provider/source_postgres_data_source.go | 762 +-
.../source_postgres_data_source_sdk.go | 6 +-
internal/provider/source_postgres_resource.go | 595 +-
.../provider/source_postgres_resource_sdk.go | 583 +-
.../provider/source_posthog_data_source.go | 58 +-
.../source_posthog_data_source_sdk.go | 6 +-
internal/provider/source_posthog_resource.go | 52 +-
.../provider/source_posthog_resource_sdk.go | 11 +-
.../source_postmarkapp_data_source.go | 46 +-
.../source_postmarkapp_data_source_sdk.go | 6 +-
.../provider/source_postmarkapp_resource.go | 36 +-
.../source_postmarkapp_resource_sdk.go | 11 +-
.../provider/source_prestashop_data_source.go | 54 +-
.../source_prestashop_data_source_sdk.go | 6 +-
.../provider/source_prestashop_resource.go | 42 +-
.../source_prestashop_resource_sdk.go | 19 +-
.../provider/source_punkapi_data_source.go | 50 +-
.../source_punkapi_data_source_sdk.go | 6 +-
internal/provider/source_punkapi_resource.go | 36 +-
.../provider/source_punkapi_resource_sdk.go | 11 +-
internal/provider/source_pypi_data_source.go | 40 +-
.../provider/source_pypi_data_source_sdk.go | 6 +-
internal/provider/source_pypi_resource.go | 36 +-
internal/provider/source_pypi_resource_sdk.go | 11 +-
.../provider/source_qualaroo_data_source.go | 55 +-
.../source_qualaroo_data_source_sdk.go | 6 +-
internal/provider/source_qualaroo_resource.go | 38 +-
.../provider/source_qualaroo_resource_sdk.go | 19 +-
.../provider/source_quickbooks_data_source.go | 140 +-
.../source_quickbooks_data_source_sdk.go | 6 +-
.../provider/source_quickbooks_resource.go | 103 +-
.../source_quickbooks_resource_sdk.go | 83 +-
internal/provider/source_railz_data_source.go | 44 +-
.../provider/source_railz_data_source_sdk.go | 6 +-
internal/provider/source_railz_resource.go | 37 +-
.../provider/source_railz_resource_sdk.go | 17 +-
.../provider/source_recharge_data_source.go | 50 +-
.../source_recharge_data_source_sdk.go | 6 +-
internal/provider/source_recharge_resource.go | 42 +-
.../provider/source_recharge_resource_sdk.go | 11 +-
.../provider/source_recreation_data_source.go | 45 +-
.../source_recreation_data_source_sdk.go | 6 +-
.../provider/source_recreation_resource.go | 37 +-
.../source_recreation_resource_sdk.go | 11 +-
.../provider/source_recruitee_data_source.go | 46 +-
.../source_recruitee_data_source_sdk.go | 6 +-
.../provider/source_recruitee_resource.go | 37 +-
.../provider/source_recruitee_resource_sdk.go | 15 +-
.../provider/source_recurly_data_source.go | 50 +-
.../source_recurly_data_source_sdk.go | 6 +-
internal/provider/source_recurly_resource.go | 37 +-
.../provider/source_recurly_resource_sdk.go | 17 +-
.../provider/source_redshift_data_source.go | 67 +-
.../source_redshift_data_source_sdk.go | 6 +-
internal/provider/source_redshift_resource.go | 42 +-
.../provider/source_redshift_resource_sdk.go | 25 +-
.../provider/source_retently_data_source.go | 168 +-
.../source_retently_data_source_sdk.go | 6 +-
internal/provider/source_retently_resource.go | 133 +-
.../provider/source_retently_resource_sdk.go | 124 +-
.../provider/source_rkicovid_data_source.go | 42 +-
.../source_rkicovid_data_source_sdk.go | 6 +-
internal/provider/source_rkicovid_resource.go | 36 +-
.../provider/source_rkicovid_resource_sdk.go | 13 +-
internal/provider/source_rss_data_source.go | 36 +-
.../provider/source_rss_data_source_sdk.go | 6 +-
internal/provider/source_rss_resource.go | 36 +-
internal/provider/source_rss_resource_sdk.go | 13 +-
internal/provider/source_s3_data_source.go | 810 +-
.../provider/source_s3_data_source_sdk.go | 6 +-
internal/provider/source_s3_resource.go | 651 +-
internal/provider/source_s3_resource_sdk.go | 1043 +-
.../provider/source_salesforce_data_source.go | 102 +-
.../source_salesforce_data_source_sdk.go | 6 +-
.../provider/source_salesforce_resource.go | 64 +-
.../source_salesforce_resource_sdk.go | 43 +-
.../provider/source_salesloft_data_source.go | 164 +-
.../source_salesloft_data_source_sdk.go | 6 +-
.../provider/source_salesloft_resource.go | 124 +-
.../provider/source_salesloft_resource_sdk.go | 87 +-
.../source_sapfieldglass_data_source.go | 42 +-
.../source_sapfieldglass_data_source_sdk.go | 6 +-
.../provider/source_sapfieldglass_resource.go | 49 +-
.../source_sapfieldglass_resource_sdk.go | 13 +-
.../provider/source_secoda_data_source.go | 36 +-
.../provider/source_secoda_data_source_sdk.go | 6 +-
internal/provider/source_secoda_resource.go | 39 +-
.../provider/source_secoda_resource_sdk.go | 13 +-
.../provider/source_sendgrid_data_source.go | 50 +-
.../source_sendgrid_data_source_sdk.go | 6 +-
internal/provider/source_sendgrid_resource.go | 42 +-
.../provider/source_sendgrid_resource_sdk.go | 15 +-
.../provider/source_sendinblue_data_source.go | 42 +-
.../source_sendinblue_data_source_sdk.go | 6 +-
.../provider/source_sendinblue_resource.go | 49 +-
.../source_sendinblue_resource_sdk.go | 13 +-
.../provider/source_senseforce_data_source.go | 62 +-
.../source_senseforce_data_source_sdk.go | 6 +-
.../provider/source_senseforce_resource.go | 47 +-
.../source_senseforce_resource_sdk.go | 13 +-
.../provider/source_sentry_data_source.go | 58 +-
.../provider/source_sentry_data_source_sdk.go | 6 +-
internal/provider/source_sentry_resource.go | 45 +-
.../provider/source_sentry_resource_sdk.go | 11 +-
internal/provider/source_sftp_data_source.go | 146 +-
.../provider/source_sftp_data_source_sdk.go | 6 +-
internal/provider/source_sftp_resource.go | 125 +-
internal/provider/source_sftp_resource_sdk.go | 81 +-
.../provider/source_sftpbulk_data_source.go | 97 +-
.../source_sftpbulk_data_source_sdk.go | 6 +-
internal/provider/source_sftpbulk_resource.go | 71 +-
.../provider/source_sftpbulk_resource_sdk.go | 43 +-
.../provider/source_shopify_data_source.go | 151 +-
.../source_shopify_data_source_sdk.go | 6 +-
internal/provider/source_shopify_resource.go | 112 +-
.../provider/source_shopify_resource_sdk.go | 97 +-
.../provider/source_shortio_data_source.go | 49 +-
.../source_shortio_data_source_sdk.go | 6 +-
internal/provider/source_shortio_resource.go | 37 +-
.../provider/source_shortio_resource_sdk.go | 17 +-
internal/provider/source_slack_data_source.go | 154 +-
.../provider/source_slack_data_source_sdk.go | 6 +-
internal/provider/source_slack_resource.go | 123 +-
.../provider/source_slack_resource_sdk.go | 107 +-
.../provider/source_smaily_data_source.go | 44 +-
.../provider/source_smaily_data_source_sdk.go | 6 +-
internal/provider/source_smaily_resource.go | 37 +-
.../provider/source_smaily_resource_sdk.go | 11 +-
.../source_smartengage_data_source.go | 42 +-
.../source_smartengage_data_source_sdk.go | 6 +-
.../provider/source_smartengage_resource.go | 49 +-
.../source_smartengage_resource_sdk.go | 13 +-
.../source_smartsheets_data_source.go | 173 +-
.../source_smartsheets_data_source_sdk.go | 6 +-
.../provider/source_smartsheets_resource.go | 123 +-
.../source_smartsheets_resource_sdk.go | 107 +-
.../source_snapchatmarketing_data_source.go | 65 +-
...ource_snapchatmarketing_data_source_sdk.go | 6 +-
.../source_snapchatmarketing_resource.go | 45 +-
.../source_snapchatmarketing_resource_sdk.go | 13 +-
.../provider/source_snowflake_data_source.go | 175 +-
.../source_snowflake_data_source_sdk.go | 6 +-
.../provider/source_snowflake_resource.go | 114 +-
.../provider/source_snowflake_resource_sdk.go | 91 +-
.../provider/source_sonarcloud_data_source.go | 70 +-
.../source_sonarcloud_data_source_sdk.go | 6 +-
.../provider/source_sonarcloud_resource.go | 49 +-
.../source_sonarcloud_resource_sdk.go | 13 +-
.../provider/source_spacexapi_data_source.go | 44 +-
.../source_spacexapi_data_source_sdk.go | 6 +-
.../provider/source_spacexapi_resource.go | 36 +-
.../provider/source_spacexapi_resource_sdk.go | 20 +-
.../provider/source_square_data_source.go | 149 +-
.../provider/source_square_data_source_sdk.go | 6 +-
internal/provider/source_square_resource.go | 122 +-
.../provider/source_square_resource_sdk.go | 95 +-
.../provider/source_strava_data_source.go | 65 +-
.../provider/source_strava_data_source_sdk.go | 6 +-
internal/provider/source_strava_resource.go | 51 +-
.../provider/source_strava_resource_sdk.go | 25 +-
.../provider/source_stripe_data_source.go | 56 +-
.../provider/source_stripe_data_source_sdk.go | 6 +-
internal/provider/source_stripe_resource.go | 59 +-
.../provider/source_stripe_resource_sdk.go | 39 +-
.../source_surveymonkey_data_source.go | 90 +-
.../source_surveymonkey_data_source_sdk.go | 6 +-
.../provider/source_surveymonkey_resource.go | 54 +-
.../source_surveymonkey_resource_sdk.go | 23 +-
.../source_surveysparrow_data_source.go | 121 +-
.../source_surveysparrow_data_source_sdk.go | 6 +-
.../provider/source_surveysparrow_resource.go | 104 +-
.../source_surveysparrow_resource_sdk.go | 89 +-
internal/provider/source_tempo_data_source.go | 36 +-
.../provider/source_tempo_data_source_sdk.go | 6 +-
internal/provider/source_tempo_resource.go | 49 +-
.../provider/source_tempo_resource_sdk.go | 13 +-
.../source_theguardianapi_data_source.go | 62 +-
.../source_theguardianapi_data_source_sdk.go | 6 +-
.../source_theguardianapi_resource.go | 37 +-
.../source_theguardianapi_resource_sdk.go | 23 +-
.../source_tiktokmarketing_data_source.go | 178 +-
.../source_tiktokmarketing_data_source_sdk.go | 6 +-
.../source_tiktokmarketing_resource.go | 132 +-
.../source_tiktokmarketing_resource_sdk.go | 114 +-
.../provider/source_todoist_data_source.go | 42 +-
.../source_todoist_data_source_sdk.go | 6 +-
internal/provider/source_todoist_resource.go | 49 +-
.../provider/source_todoist_resource_sdk.go | 13 +-
.../provider/source_trello_data_source.go | 53 +-
.../provider/source_trello_data_source_sdk.go | 6 +-
internal/provider/source_trello_resource.go | 45 +-
.../provider/source_trello_resource_sdk.go | 19 +-
.../provider/source_trustpilot_data_source.go | 168 +-
.../source_trustpilot_data_source_sdk.go | 6 +-
.../provider/source_trustpilot_resource.go | 120 +-
.../source_trustpilot_resource_sdk.go | 103 +-
.../source_tvmazeschedule_data_source.go | 58 +-
.../source_tvmazeschedule_data_source_sdk.go | 6 +-
.../source_tvmazeschedule_resource.go | 36 +-
.../source_tvmazeschedule_resource_sdk.go | 11 +-
.../provider/source_twilio_data_source.go | 52 +-
.../provider/source_twilio_data_source_sdk.go | 6 +-
internal/provider/source_twilio_resource.go | 47 +-
.../provider/source_twilio_resource_sdk.go | 11 +-
.../source_twiliotaskrouter_data_source.go | 46 +-
...source_twiliotaskrouter_data_source_sdk.go | 6 +-
.../source_twiliotaskrouter_resource.go | 37 +-
.../source_twiliotaskrouter_resource_sdk.go | 11 +-
.../provider/source_twitter_data_source.go | 61 +-
.../source_twitter_data_source_sdk.go | 6 +-
internal/provider/source_twitter_resource.go | 46 +-
.../provider/source_twitter_resource_sdk.go | 19 +-
.../provider/source_typeform_data_source.go | 169 +-
.../source_typeform_data_source_sdk.go | 6 +-
internal/provider/source_typeform_resource.go | 124 +-
.../provider/source_typeform_resource_sdk.go | 103 +-
.../provider/source_uscensus_data_source.go | 50 +-
.../source_uscensus_data_source_sdk.go | 6 +-
internal/provider/source_uscensus_resource.go | 37 +-
.../provider/source_uscensus_resource_sdk.go | 11 +-
.../provider/source_vantage_data_source.go | 42 +-
.../source_vantage_data_source_sdk.go | 6 +-
internal/provider/source_vantage_resource.go | 49 +-
.../provider/source_vantage_resource_sdk.go | 11 +-
.../provider/source_webflow_data_source.go | 46 +-
.../source_webflow_data_source_sdk.go | 6 +-
internal/provider/source_webflow_resource.go | 37 +-
.../provider/source_webflow_resource_sdk.go | 15 +-
.../source_whiskyhunter_data_source.go | 38 +-
.../source_whiskyhunter_data_source_sdk.go | 6 +-
.../provider/source_whiskyhunter_resource.go | 51 +-
.../source_whiskyhunter_resource_sdk.go | 15 +-
.../source_wikipediapageviews_data_source.go | 66 +-
...urce_wikipediapageviews_data_source_sdk.go | 6 +-
.../source_wikipediapageviews_resource.go | 36 +-
.../source_wikipediapageviews_resource_sdk.go | 25 +-
.../source_woocommerce_data_source.go | 58 +-
.../source_woocommerce_data_source_sdk.go | 6 +-
.../provider/source_woocommerce_resource.go | 42 +-
.../source_woocommerce_resource_sdk.go | 21 +-
internal/provider/source_xero_data_source.go | 186 -
.../provider/source_xero_data_source_sdk.go | 14 -
internal/provider/source_xero_resource_sdk.go | 99 -
internal/provider/source_xkcd_data_source.go | 32 +-
.../provider/source_xkcd_data_source_sdk.go | 6 +-
internal/provider/source_xkcd_resource.go | 51 +-
internal/provider/source_xkcd_resource_sdk.go | 15 +-
.../source_yandexmetrica_data_source.go | 61 +-
.../source_yandexmetrica_data_source_sdk.go | 6 +-
.../provider/source_yandexmetrica_resource.go | 46 +-
.../source_yandexmetrica_resource_sdk.go | 21 +-
internal/provider/source_yotpo_data_source.go | 52 +-
.../provider/source_yotpo_data_source_sdk.go | 6 +-
internal/provider/source_yotpo_resource.go | 48 +-
.../provider/source_yotpo_resource_sdk.go | 25 +-
.../provider/source_younium_data_source.go | 165 -
.../source_younium_data_source_sdk.go | 14 -
.../provider/source_younium_resource_sdk.go | 90 -
.../source_youtubeanalytics_data_source.go | 63 +-
...source_youtubeanalytics_data_source_sdk.go | 6 +-
.../source_youtubeanalytics_resource.go | 52 +-
.../source_youtubeanalytics_resource_sdk.go | 29 +-
.../source_zendeskchat_data_source.go | 154 +-
.../source_zendeskchat_data_source_sdk.go | 6 +-
.../provider/source_zendeskchat_resource.go | 119 +-
.../source_zendeskchat_resource_sdk.go | 95 +-
.../source_zendesksell_data_source.go | 137 +
.../source_zendesksell_data_source_sdk.go | 18 +
...urce.go => source_zendesksell_resource.go} | 127 +-
.../source_zendesksell_resource_sdk.go | 73 +
.../source_zendesksunshine_data_source.go | 154 +-
.../source_zendesksunshine_data_source_sdk.go | 6 +-
.../source_zendesksunshine_resource.go | 113 +-
.../source_zendesksunshine_resource_sdk.go | 87 +-
.../source_zendesksupport_data_source.go | 191 +-
.../source_zendesksupport_data_source_sdk.go | 6 +-
.../source_zendesksupport_resource.go | 152 +-
.../source_zendesksupport_resource_sdk.go | 139 +-
.../source_zendesktalk_data_source.go | 187 +-
.../source_zendesktalk_data_source_sdk.go | 6 +-
.../provider/source_zendesktalk_resource.go | 141 +-
.../source_zendesktalk_resource_sdk.go | 135 +-
.../provider/source_zenloop_data_source.go | 54 +-
.../source_zenloop_data_source_sdk.go | 6 +-
internal/provider/source_zenloop_resource.go | 37 +-
.../provider/source_zenloop_resource_sdk.go | 11 +-
.../provider/source_zohocrm_data_source.go | 99 +-
.../source_zohocrm_data_source_sdk.go | 6 +-
internal/provider/source_zohocrm_resource.go | 55 +-
.../provider/source_zohocrm_resource_sdk.go | 27 +-
internal/provider/source_zoom_data_source.go | 36 +-
.../provider/source_zoom_data_source_sdk.go | 6 +-
internal/provider/source_zoom_resource.go | 37 +-
internal/provider/source_zoom_resource_sdk.go | 13 +-
internal/provider/source_zuora_data_source.go | 77 +-
.../provider/source_zuora_data_source_sdk.go | 6 +-
internal/provider/source_zuora_resource.go | 49 +-
.../provider/source_zuora_resource_sdk.go | 27 +-
...type_ad_analytics_report_configuration.go} | 2 +-
....go => type_aescbc_envelope_encryption.go} | 4 +-
..._source_amazon_s3.go => type_amazon_s3.go} | 3 +-
internal/provider/type_and_group.go | 7 +
.../{type_source_xkcd.go => type_api_key.go} | 4 +-
...modes_required.go => type_api_key_auth.go} | 4 +-
...i_key_secret.go => type_api_key_secret.go} | 3 +-
...odes_preferred.go => type_api_password.go} | 4 +-
...l_applications.go => type_applications.go} | 2 +-
... => type_authenticate_via_google_oauth.go} | 3 +-
...> type_authenticate_via_harvest_o_auth.go} | 4 +-
... => type_authenticate_via_lever_o_auth.go} | 3 +-
....go => type_authenticate_via_microsoft.go} | 3 +-
...pe_authenticate_via_microsoft_o_auth20.go} | 4 +-
...go => type_authenticate_with_api_token.go} | 4 +-
...uthenticate_with_personal_access_token.go} | 3 +-
internal/provider/type_avro_apache_avro.go | 10 +
...ile_format_avro.go => type_avro_format.go} | 4 +-
....go => type_az_blob_azure_blob_storage.go} | 3 +-
internal/provider/type_azure_open_ai.go | 11 +
internal/provider/type_by_markdown_header.go | 9 +
.../provider/type_by_programming_language.go | 9 +
...thod_no_tunnel.go => type_by_separator.go} | 5 +-
...mpression_codec_bzip2.go => type_bzip2.go} | 2 +-
internal/provider/type_central_api_router.go | 11 +
...ce.go => type_chroma_local_persistance.go} | 3 +-
...vus_embedding_cohere.go => type_cohere.go} | 3 +-
internal/provider/type_connection_schedule.go | 0
.../type_connection_schedule_response.go | 11 +
...urce_s3_file_format_csv.go => type_csv.go} | 3 +-
....go => type_csv_comma_separated_values.go} | 4 +-
...ormat_csv_format.go => type_csv_format.go} | 29 +-
...stom_queries.go => type_custom_queries.go} | 2 +-
.../provider/type_custom_report_config.go | 12 +
...ompression_codec_xz.go => type_deflate.go} | 2 +-
.../provider/type_destination_aws_datalake.go | 1 -
.../type_destination_aws_datalake1.go | 21 -
...nation_aws_datalake_authentication_mode.go | 6 +-
...s_datalake_authentication_mode_iam_role.go | 10 -
...ion_aws_datalake_output_format_wildcard.go | 6 +-
.../type_destination_azure_blob_storage.go | 1 -
...orage_json_lines_newline_delimited_json.go | 6 +
...nation_azure_blob_storage_output_format.go | 6 +-
...utput_format_csv_comma_separated_values.go | 10 -
.../provider/type_destination_bigquery.go | 2 +-
.../type_destination_bigquery_credential.go | 7 +
.../type_destination_bigquery_denormalized.go | 15 -
...on_bigquery_denormalized_loading_method.go | 10 -
...denormalized_loading_method_gcs_staging.go | 14 -
...d_loading_method_gcs_staging_credential.go | 7 -
...lized_update_loading_method_gcs_staging.go | 14 -
...e_loading_method_gcs_staging_credential.go | 7 -
.../type_destination_bigquery_hmac_key.go | 10 +
...ype_destination_bigquery_loading_method.go | 6 +-
...ion_bigquery_loading_method_gcs_staging.go | 14 -
...y_loading_method_gcs_staging_credential.go | 7 -
...query_update_loading_method_gcs_staging.go | 14 -
...e_loading_method_gcs_staging_credential.go | 7 -
.../provider/type_destination_clickhouse.go | 15 +-
...estination_clickhouse_ssh_tunnel_method.go | 9 +-
...sh_tunnel_method_ssh_key_authentication.go | 13 -
internal/provider/type_destination_convex.go | 5 +-
internal/provider/type_destination_cumulio.go | 7 +-
.../provider/type_destination_databend.go | 13 +-
.../provider/type_destination_databricks.go | 1 -
.../provider/type_destination_databricks1.go | 19 -
...tination_databricks_azure_blob_storage.go} | 3 +-
...ype_destination_databricks_data_source1.go | 9 +-
...ype_destination_databricks_data_source2.go | 12 -
...ation_databricks_data_source_amazon_s31.go | 15 -
...databricks_update_data_source_amazon_s3.go | 15 -
...atabricks_update_data_source_amazon_s31.go | 15 -
.../provider/type_destination_dev_null.go | 3 -
...e_destination_dev_null_test_destination.go | 3 +-
internal/provider/type_destination_duckdb.go | 11 +
.../provider/type_destination_dynamodb.go | 1 -
.../provider/type_destination_dynamodb1.go | 14 -
.../type_destination_elasticsearch.go | 1 -
...ion_elasticsearch_authentication_method.go | 6 +-
...authentication_method_username_password.go | 11 -
.../provider/type_destination_firebolt.go | 15 +-
...ype_destination_firebolt_loading_method.go | 6 +-
.../provider/type_destination_firestore.go | 1 -
internal/provider/type_destination_gcs.go | 1 -
.../type_destination_gcs_authentication.go | 3 +-
.../type_destination_gcs_compression.go | 8 +
.../type_destination_gcs_compression_codec.go | 12 +
...tination_gcs_csv_comma_separated_values.go | 11 +
...n_gcs_json_lines_newline_delimited_json.go | 10 +
.../type_destination_gcs_output_format.go | 12 +-
...tion_gcs_output_format_avro_apache_avro.go | 10 -
...rmat_avro_apache_avro_compression_codec.go | 12 -
...utput_format_csv_comma_separated_values.go | 11 -
..._csv_comma_separated_values_compression.go | 8 -
...rated_values_compression_no_compression.go | 9 -
...ormat_json_lines_newline_delimited_json.go | 10 -
...ines_newline_delimited_json_compression.go | 8 -
...limited_json_compression_no_compression.go | 9 -
..._output_format_parquet_columnar_storage.go | 15 -
...stination_gcs_parquet_columnar_storage.go} | 2 +-
..._destination_gcs_update_no_compression.go} | 2 +-
...s_update_output_format_avro_apache_avro.go | 10 -
...rmat_avro_apache_avro_compression_codec.go | 12 -
...utput_format_csv_comma_separated_values.go | 11 -
..._csv_comma_separated_values_compression.go | 8 -
...ormat_json_lines_newline_delimited_json.go | 10 -
...ines_newline_delimited_json_compression.go | 8 -
.../type_destination_google_sheets.go | 5 +-
...sheets_authentication_via_google_o_auth.go | 0
internal/provider/type_destination_keen.go | 7 +-
internal/provider/type_destination_kinesis.go | 13 +-
.../provider/type_destination_langchain.go | 9 +-
.../type_destination_langchain_embedding.go | 6 +-
.../type_destination_langchain_indexing.go | 9 +-
...destination_langchain_indexing_pinecone.go | 12 -
...=> type_destination_langchain_pinecone.go} | 2 +-
...ation_langchain_processing_config_model.go | 0
internal/provider/type_destination_milvus.go | 9 +-
...o => type_destination_milvus_api_token.go} | 3 +-
.../type_destination_milvus_authentication.go | 9 +
.../type_destination_milvus_embedding.go | 14 +-
.../type_destination_milvus_indexing.go | 12 +-
...tination_milvus_indexing_authentication.go | 12 -
...tination_milvus_processing_config_model.go | 10 +-
.../type_destination_milvus_text_splitter.go | 9 +
internal/provider/type_destination_mongodb.go | 9 +-
..._destination_mongodb_authorization_type.go | 6 +-
...ngodb_authorization_type_login_password.go | 11 -
...ination_mongodb_authorization_type_none.go | 9 -
...tination_mongodb_mongo_db_instance_type.go | 9 +-
...godb_mongo_db_instance_type_replica_set.go | 11 -
...tance_type_standalone_mongo_db_instance.go | 11 -
...e_destination_mongodb_ssh_tunnel_method.go | 12 -
internal/provider/type_destination_mssql.go | 19 +-
...ype_destination_mssql_ssh_tunnel_method.go | 12 -
.../type_destination_mssql_ssl_method.go | 6 +-
...thod_encrypted_trust_server_certificate.go | 9 -
internal/provider/type_destination_mysql.go | 16 -
...ype_destination_mysql_ssh_tunnel_method.go | 12 -
internal/provider/type_destination_oracle.go | 17 +-
...pe_destination_oracle_ssh_tunnel_method.go | 12 -
.../provider/type_destination_pinecone.go | 9 +-
.../type_destination_pinecone_embedding.go | 11 +-
.../provider/type_destination_postgres.go | 19 +-
..._destination_postgres_ssh_tunnel_method.go | 12 -
.../type_destination_postgres_ssl_modes.go | 18 +-
..._destination_postgres_ssl_modes_disable.go | 9 -
...e_destination_postgres_ssl_modes_prefer.go | 9 -
..._destination_postgres_ssl_modes_require.go | 9 -
...tination_postgres_ssl_modes_verify_full.go | 13 -
internal/provider/type_destination_pubsub.go | 1 -
internal/provider/type_destination_qdrant.go | 9 +
...estination_qdrant_authentication_method.go | 8 +
...type_destination_qdrant_distance_metric.go | 9 +
.../type_destination_qdrant_indexing.go | 14 +
internal/provider/type_destination_redis.go | 17 +-
...ype_destination_redis_ssh_tunnel_method.go | 12 -
.../type_destination_redis_ssl_modes.go | 6 +-
.../provider/type_destination_redshift.go | 19 +-
.../provider/type_destination_redshift1.go | 18 -
.../type_destination_redshift_encryption.go | 8 +
..._destination_redshift_ssh_tunnel_method.go | 12 -
...hift_update_uploading_method_s3_staging.go | 18 -
...ift_update_uploading_method_s3_staging1.go | 18 -
..._uploading_method_s3_staging_encryption.go | 8 -
...e_destination_redshift_uploading_method.go | 6 +-
..._destination_redshift_uploading_method1.go | 10 -
...on_redshift_uploading_method_s3_staging.go | 18 -
...n_redshift_uploading_method_s3_staging1.go | 18 -
..._uploading_method_s3_staging_encryption.go | 8 -
...g_encryption_aescbc_envelope_encryption.go | 10 -
...hod_s3_staging_encryption_no_encryption.go | 9 -
...tion_redshift_uploading_method_standard.go | 9 -
internal/provider/type_destination_s3.go | 1 -
internal/provider/type_destination_s31.go | 18 -
internal/provider/type_destination_s3_glue.go | 1 -
.../provider/type_destination_s3_glue1.go | 20 -
.../type_destination_s3_glue_output_format.go | 3 +-
...ormat_json_lines_newline_delimited_json.go | 11 -
...ines_newline_delimited_json_compression.go | 8 -
...ormat_json_lines_newline_delimited_json.go | 11 -
...ines_newline_delimited_json_compression.go | 8 -
...on_s3_json_lines_newline_delimited_json.go | 11 +
.../type_destination_s3_output_format.go | 12 +-
...ation_s3_output_format_avro_apache_avro.go | 10 -
...rmat_avro_apache_avro_compression_codec.go | 12 -
...utput_format_csv_comma_separated_values.go | 11 -
..._csv_comma_separated_values_compression.go | 8 -
...ormat_json_lines_newline_delimited_json.go | 11 -
...ines_newline_delimited_json_compression.go | 8 -
...3_update_output_format_avro_apache_avro.go | 10 -
...rmat_avro_apache_avro_compression_codec.go | 12 -
...utput_format_csv_comma_separated_values.go | 11 -
..._csv_comma_separated_values_compression.go | 8 -
...ormat_json_lines_newline_delimited_json.go | 11 -
...ines_newline_delimited_json_compression.go | 8 -
.../provider/type_destination_sftp_json.go | 1 -
.../provider/type_destination_snowflake.go | 20 +-
...tination_snowflake_authorization_method.go | 9 +-
...horization_method_username_and_password.go | 10 -
.../provider/type_destination_timeplus.go | 5 +-
.../provider/type_destination_typesense.go | 11 +-
internal/provider/type_destination_vertica.go | 17 +-
...e_destination_vertica_ssh_tunnel_method.go | 12 -
.../provider/type_destination_weaviate.go | 9 +
...ype_destination_weaviate_authentication.go | 9 +
.../type_destination_weaviate_embedding.go | 13 +
.../type_destination_weaviate_indexing.go | 14 +
internal/provider/type_destination_xata.go | 5 +-
...earch.go => type_doc_array_hnsw_search.go} | 3 +-
...document_file_type_format_experimental.go} | 4 +-
internal/provider/type_enabled.go | 9 +
...o => type_encrypted_verify_certificate.go} | 3 +-
internal/provider/type_expression.go | 10 +
...ia_s3.go => type_external_table_via_s3.go} | 3 +-
...> type_field_name_mapping_config_model.go} | 5 +-
.../provider/type_file_based_stream_config.go | 17 +
...dding_from_field.go => type_from_field.go} | 3 +-
...ge.go => type_gcs_google_cloud_storage.go} | 3 +-
internal/provider/type_gcs_staging.go | 12 +
..._json_compression_gzip.go => type_gzip.go} | 2 +-
internal/provider/type_header.go | 10 +
...redential_hmac_key.go => type_hmac_key.go} | 2 +-
internal/provider/type_https_public_web.go | 9 +
internal/provider/type_iam_role.go | 9 +
...mazon_web_services.go => type_iam_user.go} | 3 +-
...sight_config.go => type_insight_config.go} | 2 +-
...type_json_lines_newline_delimited_json.go} | 2 +-
..._s3_file_format_jsonl.go => type_jsonl.go} | 3 +-
...ion.go => type_key_pair_authentication.go} | 3 +-
...cdc.go => type_logical_replication_cdc.go} | 5 +-
...ngo_db_atlas.go => type_mongo_db_atlas.go} | 2 +-
.../type_mongo_db_atlas_replica_set.go | 14 +
... => type_native_network_encryption_nne.go} | 3 +-
...codec_snappy.go => type_no_compression.go} | 2 +-
internal/provider/type_not_expression.go | 7 +
...uthentication_o_auth.go => type_o_auth.go} | 3 +-
...on_method_o_auth20.go => type_o_auth20.go} | 3 +-
...antage.go => type_o_auth2_access_token.go} | 3 +-
... type_o_auth2_confidential_application.go} | 3 +-
...n_embedding_open_ai.go => type_open_ai.go} | 3 +-
...ss_token.go => type_open_ai_compatible.go} | 6 +-
...file_format_parquet.go => type_parquet.go} | 3 +-
...on.go => type_parquet_columnar_storage.go} | 2 +-
...bedding_fake.go => type_parquet_format.go} | 4 +-
...ion.go => type_password_authentication.go} | 3 +-
internal/provider/type_project_secret.go | 9 +
...type_read_changes_using_binary_log_cdc.go} | 3 +-
..._changes_using_change_data_capture_cdc.go} | 3 +-
...ype_replica_set.go => type_replica_set.go} | 2 +-
internal/provider/type_report_config.go | 18 +
internal/provider/type_s3_staging.go | 17 +
..._token.go => type_sandbox_access_token.go} | 3 +-
...ll.go => type_scp_secure_copy_protocol.go} | 3 +-
.../provider/type_self_managed_replica_set.go | 14 +
internal/provider/type_service_account.go | 11 +
...ication.go => type_service_account_key.go} | 4 +-
...ype_service_account_key_authentication.go} | 4 +-
internal/provider/type_service_name.go | 9 +
...y.go => type_single_store_access_token.go} | 4 +-
...codec_no_compression.go => type_snappy.go} | 2 +-
internal/provider/type_source_aha.go | 5 +-
internal/provider/type_source_aircall.go | 7 +-
internal/provider/type_source_airtable.go | 3 -
.../type_source_airtable_authentication.go | 6 +-
...th.go => type_source_airtable_o_auth20.go} | 2 +-
internal/provider/type_source_alloydb.go | 21 +-
internal/provider/type_source_alloydb1.go | 19 -
..._allow.go => type_source_alloydb_allow.go} | 3 +-
.../type_source_alloydb_replication_method.go | 9 +-
...type_source_alloydb_replication_method1.go | 12 -
...ication_method_logical_replication_cdc1.go | 16 -
...rce_alloydb_replication_method_standard.go | 9 -
...lloydb_replication_method_standard_xmin.go | 9 -
.../type_source_alloydb_ssh_tunnel_method.go | 12 -
.../provider/type_source_alloydb_ssl_modes.go | 18 +-
.../type_source_alloydb_ssl_modes1.go | 18 -
.../type_source_alloydb_ssl_modes_disable.go | 10 -
.../type_source_alloydb_ssl_modes_disable1.go | 10 -
.../type_source_alloydb_ssl_modes_prefer.go | 10 -
.../type_source_alloydb_ssl_modes_prefer1.go | 10 -
.../type_source_alloydb_ssl_modes_require.go | 10 -
.../type_source_alloydb_ssl_modes_require1.go | 10 -
...ype_source_alloydb_ssl_modes_verify_ca1.go | 14 -
...pe_source_alloydb_ssl_modes_verify_full.go | 14 -
...e_source_alloydb_ssl_modes_verify_full1.go | 14 -
...lication_method_logical_replication_cdc.go | 16 -
...ication_method_logical_replication_cdc1.go | 16 -
...e_source_alloydb_update_ssl_modes_allow.go | 10 -
..._source_alloydb_update_ssl_modes_allow1.go | 10 -
...source_alloydb_update_ssl_modes_disable.go | 10 -
...ource_alloydb_update_ssl_modes_disable1.go | 10 -
..._source_alloydb_update_ssl_modes_prefer.go | 10 -
...source_alloydb_update_ssl_modes_prefer1.go | 10 -
...source_alloydb_update_ssl_modes_require.go | 10 -
...ource_alloydb_update_ssl_modes_require1.go | 10 -
...urce_alloydb_update_ssl_modes_verify_ca.go | 14 -
...rce_alloydb_update_ssl_modes_verify_ca1.go | 14 -
...ce_alloydb_update_ssl_modes_verify_full.go | 14 -
...e_alloydb_update_ssl_modes_verify_full1.go | 14 -
...ca.go => type_source_alloydb_verify_ca.go} | 5 +-
internal/provider/type_source_amazon_ads.go | 2 -
.../type_source_amazon_seller_partner.go | 7 +-
internal/provider/type_source_amazon_sqs.go | 1 -
internal/provider/type_source_amplitude.go | 1 -
.../provider/type_source_apify_dataset.go | 6 +-
internal/provider/type_source_appfollow.go | 3 +-
internal/provider/type_source_asana.go | 5 +-
...e_source_asana_authentication_mechanism.go | 6 +-
..._mechanism_authenticate_via_asana_oauth.go | 12 -
...authenticate_with_personal_access_token.go | 10 -
internal/provider/type_source_auth0.go | 1 -
...type_source_auth0_authentication_method.go | 6 +-
...hentication_method_o_auth2_access_token.go | 10 -
.../provider/type_source_aws_cloudtrail.go | 1 -
.../type_source_azure_blob_storage.go | 14 +-
...zure_blob_storage_csv_header_definition.go | 9 +
.../type_source_azure_blob_storage_format.go | 11 +
..._source_azure_blob_storage_input_format.go | 8 -
...ormat_json_lines_newline_delimited_json.go | 9 -
internal/provider/type_source_azure_table.go | 1 -
internal/provider/type_source_bamboo_hr.go | 1 -
internal/provider/type_source_bigcommerce.go | 12 -
internal/provider/type_source_bigquery.go | 1 -
internal/provider/type_source_bing_ads.go | 17 +-
internal/provider/type_source_braintree.go | 1 -
internal/provider/type_source_braze.go | 7 +-
...tination_silent.go => type_source_cart.go} | 5 +-
.../type_source_cart_authorization_method.go | 8 +
internal/provider/type_source_chargebee.go | 1 -
internal/provider/type_source_chartmogul.go | 6 +-
internal/provider/type_source_clickhouse.go | 13 +-
...ype_source_clickhouse_ssh_tunnel_method.go | 12 -
internal/provider/type_source_clickup_api.go | 1 -
internal/provider/type_source_clockify.go | 1 -
internal/provider/type_source_close_com.go | 5 +-
internal/provider/type_source_coda.go | 3 +-
internal/provider/type_source_coin_api.go | 1 -
.../provider/type_source_coinmarketcap.go | 7 +-
internal/provider/type_source_configcat.go | 11 -
internal/provider/type_source_confluence.go | 1 -
internal/provider/type_source_convex.go | 11 -
internal/provider/type_source_datascope.go | 5 +-
internal/provider/type_source_delighted.go | 5 +-
internal/provider/type_source_dixa.go | 7 +-
internal/provider/type_source_dockerhub.go | 1 -
internal/provider/type_source_dremio.go | 5 +-
internal/provider/type_source_dynamodb.go | 1 -
internal/provider/type_source_dynamodb1.go | 14 -
.../provider/type_source_e2e_test_cloud.go | 14 -
...type_source_e2e_test_cloud_mock_catalog.go | 10 -
...2e_test_cloud_mock_catalog_multi_schema.go | 10 -
...e_test_cloud_mock_catalog_single_schema.go | 12 -
internal/provider/type_source_emailoctopus.go | 10 -
.../provider/type_source_exchange_rates.go | 1 -
.../type_source_facebook_marketing.go | 26 +-
.../provider/type_source_facebook_pages.go | 1 -
internal/provider/type_source_faker.go | 11 +-
internal/provider/type_source_fauna.go | 1 -
.../provider/type_source_fauna_collection.go | 4 +-
...e_source_fauna_collection_deletion_mode.go | 10 -
...fauna_collection_deletion_mode_disabled.go | 9 -
..._fauna_collection_deletion_mode_enabled.go | 10 -
.../type_source_fauna_deletion_mode.go | 8 +
internal/provider/type_source_file.go | 13 +
...ype_source_file_s3_amazon_web_services.go} | 3 +-
internal/provider/type_source_file_secure.go | 14 -
...ype_source_file_secure_storage_provider.go | 20 -
...ecure_storage_provider_https_public_web.go | 10 -
...orage_provider_scp_secure_copy_protocol.go | 13 -
...ider_sftp_secure_file_transfer_protocol.go | 13 -
.../type_source_file_storage_provider.go | 13 +
internal/provider/type_source_firebolt.go | 13 +-
internal/provider/type_source_freshcaller.go | 1 -
internal/provider/type_source_freshdesk.go | 1 -
internal/provider/type_source_freshsales.go | 1 -
internal/provider/type_source_gainsight_px.go | 10 -
internal/provider/type_source_gcs.go | 8 +-
internal/provider/type_source_gcs_format.go | 7 +
.../provider/type_source_gcs_stream_config.go | 17 +
internal/provider/type_source_getlago.go | 4 +-
internal/provider/type_source_github.go | 14 +-
.../type_source_github_authentication.go | 6 +-
internal/provider/type_source_gitlab.go | 13 +-
...type_source_gitlab_authorization_method.go | 6 +-
...ce_gitlab_authorization_method_o_auth20.go | 14 -
...tlab_authorization_method_private_token.go | 10 -
...th20.go => type_source_gitlab_o_auth20.go} | 3 +-
internal/provider/type_source_glassfrog.go | 10 -
internal/provider/type_source_gnews.go | 1 -
internal/provider/type_source_google_ads.go | 3 +-
...pe_source_google_ads_google_credentials.go | 0
.../type_source_google_analytics_data_api.go | 11 +-
...e_google_analytics_data_api_credentials.go | 6 +-
...edentials_authenticate_via_google_oauth.go | 13 -
...analytics_data_api_custom_report_config.go | 13 +
...le_analytics_data_api_dimensions_filter.go | 10 +
...nsions_filter1_expressions_double_value.go | 9 +
...ensions_filter1_expressions_int64_value.go | 9 +
...m_reports_array_dimension_filter_filter.go | 10 +
...ports_array_dimension_filter_from_value.go | 8 +
...mas_custom_reports_array_between_filter.go | 8 +
...mas_custom_reports_array_in_list_filter.go | 10 +
...mas_custom_reports_array_numeric_filter.go | 10 +
...emas_custom_reports_array_string_filter.go | 11 +
.../type_source_google_analytics_v4.go | 14 -
..._source_google_analytics_v4_credentials.go | 10 -
.../provider/type_source_google_directory.go | 3 -
...rce_google_directory_google_credentials.go | 6 +-
..._google_credentials_service_account_key.go | 11 -
...e_credentials_sign_in_via_google_o_auth.go | 12 -
internal/provider/type_source_google_drive.go | 12 +
...type_source_google_drive_authentication.go | 8 +
.../type_source_google_drive_csv_format.go | 20 +
...e_google_drive_file_based_stream_config.go | 16 +
.../type_source_google_drive_format.go | 11 +
...ive_service_account_key_authentication.go} | 3 +-
.../type_source_google_pagespeed_insights.go | 1 -
.../type_source_google_search_console.go | 1 -
...ogle_search_console_authentication_type.go | 6 +-
...gle_search_console_custom_report_config.go | 0
...ole_service_account_key_authentication.go} | 3 +-
.../provider/type_source_google_sheets.go | 7 +-
...ype_source_google_sheets_authentication.go | 10 -
...tication_authenticate_via_google_o_auth.go | 12 -
.../provider/type_source_google_webfonts.go | 1 -
...e_source_google_workspace_admin_reports.go | 1 -
internal/provider/type_source_greenhouse.go | 10 -
internal/provider/type_source_gridly.go | 5 +-
internal/provider/type_source_harvest.go | 1 -
internal/provider/type_source_harvest1.go | 13 -
...uthenticate_with_personal_access_token.go} | 5 +-
...source_harvest_authentication_mechanism.go | 6 +-
...ource_harvest_authentication_mechanism1.go | 10 -
...chanism_authenticate_via_harvest_o_auth.go | 13 -
...hanism_authenticate_via_harvest_o_auth1.go | 13 -
...uthenticate_with_personal_access_token1.go | 11 -
...chanism_authenticate_via_harvest_o_auth.go | 13 -
...hanism_authenticate_via_harvest_o_auth1.go | 13 -
...authenticate_with_personal_access_token.go | 11 -
...uthenticate_with_personal_access_token1.go | 11 -
internal/provider/type_source_hubplanner.go | 10 -
internal/provider/type_source_hubspot.go | 1 -
.../type_source_hubspot_authentication.go | 6 +-
...pe_source_hubspot_authentication_o_auth.go | 12 -
...urce_hubspot_authentication_private_app.go | 10 -
internal/provider/type_source_insightly.go | 5 +-
internal/provider/type_source_instagram.go | 1 -
internal/provider/type_source_instatus.go | 10 -
internal/provider/type_source_intercom.go | 1 -
internal/provider/type_source_ip2whois.go | 5 +-
internal/provider/type_source_iterable.go | 11 -
internal/provider/type_source_jira.go | 3 +-
internal/provider/type_source_k6_cloud.go | 3 +-
internal/provider/type_source_klarna.go | 1 -
internal/provider/type_source_klaviyo.go | 5 +-
.../provider/type_source_kustomer_singer.go | 5 +-
internal/provider/type_source_kyve.go | 11 +-
internal/provider/type_source_lemlist.go | 10 -
internal/provider/type_source_lever_hiring.go | 1 -
...e_lever_hiring_authentication_mechanism.go | 6 +-
...echanism_authenticate_via_lever_api_key.go | 10 -
...mechanism_authenticate_via_lever_o_auth.go | 12 -
internal/provider/type_source_linkedin_ads.go | 9 +-
...type_source_linkedin_ads_authentication.go | 6 +-
...inkedin_ads_authentication_access_token.go | 10 -
.../provider/type_source_linkedin_pages.go | 5 +-
...pe_source_linkedin_pages_authentication.go | 10 -
internal/provider/type_source_linnworks.go | 1 -
internal/provider/type_source_lokalise.go | 5 +-
internal/provider/type_source_mailchimp.go | 1 -
.../type_source_mailchimp_authentication.go | 6 +-
...source_mailchimp_authentication_api_key.go | 10 -
internal/provider/type_source_mailgun.go | 1 -
internal/provider/type_source_mailjet_sms.go | 7 +-
internal/provider/type_source_marketo.go | 1 -
internal/provider/type_source_metabase.go | 1 -
.../provider/type_source_microsoft_teams.go | 1 -
...icrosoft_teams_authentication_mechanism.go | 6 +-
...ism_authenticate_via_microsoft_o_auth20.go | 13 -
internal/provider/type_source_mixpanel.go | 18 +-
...source_mixpanel_authentication_wildcard.go | 6 +-
..._authentication_wildcard_project_secret.go | 10 -
...authentication_wildcard_service_account.go | 11 -
internal/provider/type_source_monday.go | 3 -
...type_source_monday_authorization_method.go | 6 +-
...e_monday_authorization_method_api_token.go | 10 -
...th20.go => type_source_monday_o_auth20.go} | 3 +-
internal/provider/type_source_mongodb.go | 14 -
internal/provider/type_source_mongodb1.go | 14 -
.../type_source_mongodb_internal_poc.go | 1 -
...e_source_mongodb_mongo_db_instance_type.go | 12 -
..._source_mongodb_mongo_db_instance_type1.go | 12 -
...b_mongo_db_instance_type_mongo_db_atlas.go | 11 -
..._mongo_db_instance_type_mongo_db_atlas1.go | 11 -
...e_mongo_db_instance_type_mongo_db_atlas.go | 11 -
..._mongo_db_instance_type_mongo_db_atlas1.go | 11 -
internal/provider/type_source_mongodb_v2.go | 12 +
.../type_source_mongodb_v2_cluster_type.go | 8 +
internal/provider/type_source_mssql.go | 21 +-
.../type_source_mssql_ssh_tunnel_method.go | 12 -
.../provider/type_source_mssql_ssl_method.go | 10 -
.../type_source_mssql_update_method.go | 6 +-
...d_scan_changes_with_user_defined_cursor.go | 9 -
internal/provider/type_source_my_hours.go | 1 -
internal/provider/type_source_mysql.go | 19 +-
.../type_source_mysql_ssh_tunnel_method.go | 12 -
.../provider/type_source_mysql_ssl_modes.go | 12 +-
.../type_source_mysql_update_method.go | 6 +-
...y_ca.go => type_source_mysql_verify_ca.go} | 3 +-
internal/provider/type_source_netsuite.go | 1 -
internal/provider/type_source_notion.go | 5 +-
.../type_source_notion_authenticate_using.go | 10 -
..._notion_authenticate_using_access_token.go | 10 -
...ype_source_notion_authentication_method.go | 8 +
...th20.go => type_source_notion_o_auth20.go} | 3 +-
internal/provider/type_source_nytimes.go | 11 +-
internal/provider/type_source_okta.go | 1 -
.../type_source_okta_authorization_method.go | 6 +-
internal/provider/type_source_omnisend.go | 10 -
internal/provider/type_source_onesignal.go | 9 +-
internal/provider/type_source_oracle.go | 19 +-
.../provider/type_source_oracle_connect_by.go | 6 +-
...e_source_oracle_connect_by_service_name.go | 10 -
...e_source_oracle_connect_by_system_idsid.go | 10 -
.../provider/type_source_oracle_encryption.go | 6 +-
...yption_tls_encrypted_verify_certificate.go | 10 -
.../type_source_oracle_ssh_tunnel_method.go | 12 -
internal/provider/type_source_orb.go | 1 -
internal/provider/type_source_orbit.go | 7 +-
.../provider/type_source_outbrain_amplify.go | 1 -
..._outbrain_amplify_authentication_method.go | 6 +-
...lify_authentication_method_access_token.go | 10 -
...authentication_method_username_password.go | 11 -
internal/provider/type_source_outreach.go | 1 -
.../type_source_paypal_transaction.go | 2 +-
internal/provider/type_source_paystack.go | 1 -
internal/provider/type_source_pendo.go | 10 -
internal/provider/type_source_persistiq.go | 10 -
internal/provider/type_source_pexels_api.go | 1 -
internal/provider/type_source_pinterest.go | 8 +-
...e_source_pinterest_authorization_method.go | 5 +-
...pinterest_authorization_method_o_auth20.go | 12 -
internal/provider/type_source_pipedrive.go | 5 +-
internal/provider/type_source_pocket.go | 1 -
internal/provider/type_source_pokeapi.go | 1 -
.../provider/type_source_polygon_stock_api.go | 1 -
internal/provider/type_source_postgres.go | 21 +-
internal/provider/type_source_postgres1.go | 19 -
.../type_source_postgres_ssh_tunnel_method.go | 12 -
.../type_source_postgres_ssl_modes.go | 18 -
.../type_source_postgres_ssl_modes1.go | 18 -
.../type_source_postgres_ssl_modes_allow.go | 10 -
.../type_source_postgres_ssl_modes_allow1.go | 10 -
.../type_source_postgres_ssl_modes_disable.go | 10 -
...type_source_postgres_ssl_modes_disable1.go | 10 -
.../type_source_postgres_ssl_modes_prefer.go | 10 -
.../type_source_postgres_ssl_modes_prefer1.go | 10 -
.../type_source_postgres_ssl_modes_require.go | 10 -
...type_source_postgres_ssl_modes_require1.go | 10 -
...ype_source_postgres_ssl_modes_verify_ca.go | 14 -
...pe_source_postgres_ssl_modes_verify_ca1.go | 14 -
...e_source_postgres_ssl_modes_verify_full.go | 14 -
..._source_postgres_ssl_modes_verify_full1.go | 14 -
.../type_source_postgres_update_method.go | 9 +-
.../type_source_postgres_update_method1.go | 12 -
..._detect_changes_with_xmin_system_column.go | 9 -
..._read_changes_using_write_ahead_log_cdc.go | 16 -
...read_changes_using_write_ahead_log_cdc1.go | 16 -
...d_scan_changes_with_user_defined_cursor.go | 9 -
..._source_postgres_update_ssl_modes_allow.go | 10 -
...source_postgres_update_ssl_modes_allow1.go | 10 -
...ource_postgres_update_ssl_modes_disable.go | 10 -
...urce_postgres_update_ssl_modes_disable1.go | 10 -
...source_postgres_update_ssl_modes_prefer.go | 10 -
...ource_postgres_update_ssl_modes_prefer1.go | 10 -
...ource_postgres_update_ssl_modes_require.go | 10 -
...urce_postgres_update_ssl_modes_require1.go | 10 -
...rce_postgres_update_ssl_modes_verify_ca.go | 14 -
...ce_postgres_update_ssl_modes_verify_ca1.go | 14 -
...e_postgres_update_ssl_modes_verify_full.go | 14 -
..._postgres_update_ssl_modes_verify_full1.go | 14 -
..._read_changes_using_write_ahead_log_cdc.go | 16 -
...read_changes_using_write_ahead_log_cdc1.go | 16 -
internal/provider/type_source_posthog.go | 1 -
internal/provider/type_source_postmarkapp.go | 1 -
internal/provider/type_source_prestashop.go | 7 +-
internal/provider/type_source_punk_api.go | 1 -
internal/provider/type_source_pypi.go | 1 -
internal/provider/type_source_qualaroo.go | 9 +-
internal/provider/type_source_quickbooks.go | 1 -
..._source_quickbooks_authorization_method.go | 3 +-
....go => type_source_quickbooks_o_auth20.go} | 3 +-
internal/provider/type_source_railz.go | 7 +-
internal/provider/type_source_recharge.go | 1 -
internal/provider/type_source_recreation.go | 1 -
internal/provider/type_source_recruitee.go | 5 +-
internal/provider/type_source_recurly.go | 7 +-
internal/provider/type_source_redshift.go | 1 -
internal/provider/type_source_retently.go | 3 -
internal/provider/type_source_retently1.go | 10 -
...ource_retently_authentication_mechanism.go | 6 +-
...urce_retently_authentication_mechanism1.go | 10 -
...hanism_authenticate_via_retently_o_auth.go | 13 -
...anism_authenticate_via_retently_o_auth1.go | 13 -
...n_mechanism_authenticate_with_api_token.go | 11 -
..._mechanism_authenticate_with_api_token1.go | 11 -
...hanism_authenticate_via_retently_o_auth.go | 13 -
...anism_authenticate_via_retently_o_auth1.go | 13 -
...n_mechanism_authenticate_with_api_token.go | 11 -
..._mechanism_authenticate_with_api_token1.go | 11 -
internal/provider/type_source_rki_covid.go | 3 +-
internal/provider/type_source_rss.go | 3 +-
internal/provider/type_source_s3.go | 23 +-
...type_source_s3_file_based_stream_config.go | 18 -
...urce_s3_file_based_stream_config_format.go | 14 -
..._based_stream_config_format_avro_format.go | 10 -
...format_csv_format_csv_header_definition.go | 9 -
...mat_csv_header_definition_autogenerated.go | 9 -
...v_format_csv_header_definition_from_csv.go | 9 -
...mat_csv_header_definition_user_provided.go | 10 -
...based_stream_config_format_jsonl_format.go | 9 -
...sed_stream_config_format_parquet_format.go | 10 -
.../provider/type_source_s3_file_format.go | 12 +-
.../type_source_s3_s3_amazon_web_services.go | 0
...e_based_stream_config_format_csv_format.go | 22 -
...format_csv_format_csv_header_definition.go | 9 -
internal/provider/type_source_salesforce.go | 16 +-
internal/provider/type_source_salesloft.go | 1 -
.../type_source_salesloft_credentials.go | 6 +-
...ft_credentials_authenticate_via_api_key.go | 10 -
.../provider/type_source_sap_fieldglass.go | 10 -
internal/provider/type_source_secoda.go | 10 -
internal/provider/type_source_sendgrid.go | 5 +-
internal/provider/type_source_sendinblue.go | 10 -
internal/provider/type_source_senseforce.go | 1 -
internal/provider/type_source_sentry.go | 1 -
internal/provider/type_source_sftp.go | 1 -
...ype_source_sftp_authentication_wildcard.go | 6 +-
internal/provider/type_source_sftp_bulk.go | 1 -
...pe_source_sftp_password_authentication.go} | 3 +-
...ype_source_sftp_ssh_key_authentication.go} | 3 +-
internal/provider/type_source_shopify.go | 1 -
...h20.go => type_source_shopify_o_auth20.go} | 3 +-
...ce_shopify_shopify_authorization_method.go | 6 +-
...opify_authorization_method_api_password.go | 10 -
...y_shopify_authorization_method_o_auth20.go | 12 -
internal/provider/type_source_shortio.go | 7 +-
internal/provider/type_source_slack.go | 1 -
...e_source_slack_authentication_mechanism.go | 6 +-
...lack_authentication_mechanism_api_token.go | 10 -
...tion_mechanism_sign_in_via_slack_o_auth.go | 12 -
internal/provider/type_source_smaily.go | 1 -
internal/provider/type_source_smartengage.go | 10 -
internal/provider/type_source_smartsheets.go | 1 -
...source_smartsheets_authorization_method.go | 6 +-
.../type_source_snapchat_marketing.go | 1 -
internal/provider/type_source_snowflake.go | 1 -
...e_source_snowflake_authorization_method.go | 6 +-
...horization_method_username_and_password.go | 11 -
...0.go => type_source_snowflake_o_auth20.go} | 3 +-
internal/provider/type_source_sonar_cloud.go | 1 -
internal/provider/type_source_spacex_api.go | 5 +-
internal/provider/type_source_square.go | 1 -
.../type_source_square_authentication.go | 6 +-
...pe_source_square_authentication_api_key.go | 10 -
...are_authentication_oauth_authentication.go | 12 -
internal/provider/type_source_strava.go | 2 -
internal/provider/type_source_stripe.go | 3 +-
.../provider/type_source_survey_sparrow.go | 1 -
.../type_source_survey_sparrow_base_url.go | 6 +-
..._survey_sparrow_base_url_global_account.go | 9 -
...survey_sparrow_base_urleu_based_account.go | 9 -
internal/provider/type_source_surveymonkey.go | 9 +-
...nkey_survey_monkey_authorization_method.go | 12 -
internal/provider/type_source_tempo.go | 10 -
.../provider/type_source_the_guardian_api.go | 13 +-
.../provider/type_source_tiktok_marketing.go | 1 -
..._tiktok_marketing_authentication_method.go | 6 +-
... type_source_tiktok_marketing_o_auth20.go} | 3 +-
internal/provider/type_source_todoist.go | 10 -
internal/provider/type_source_trello.go | 9 +-
internal/provider/type_source_trustpilot.go | 1 -
...y.go => type_source_trustpilot_api_key.go} | 3 +-
..._source_trustpilot_authorization_method.go | 6 +-
.../provider/type_source_tvmaze_schedule.go | 1 -
internal/provider/type_source_twilio.go | 1 -
.../provider/type_source_twilio_taskrouter.go | 1 -
internal/provider/type_source_twitter.go | 9 +-
internal/provider/type_source_typeform.go | 7 +-
...pe_source_typeform_authorization_method.go | 10 -
internal/provider/type_source_us_census.go | 1 -
internal/provider/type_source_webflow.go | 5 +-
.../provider/type_source_whisky_hunter.go | 9 -
.../type_source_wikipedia_pageviews.go | 15 +-
internal/provider/type_source_woocommerce.go | 9 +-
internal/provider/type_source_xero.go | 12 -
.../provider/type_source_yandex_metrica.go | 9 +-
internal/provider/type_source_yotpo.go | 1 -
internal/provider/type_source_younium.go | 13 -
.../provider/type_source_youtube_analytics.go | 5 +-
.../type_source_youtube_analytics1.go | 10 -
...be_analytics_authenticate_via_o_auth201.go | 12 -
internal/provider/type_source_zendesk_chat.go | 1 -
...ource_zendesk_chat_authorization_method.go | 6 +-
..._chat_authorization_method_access_token.go | 10 -
...o => type_source_zendesk_chat_o_auth20.go} | 3 +-
.../provider/type_source_zendesk_sunshine.go | 1 -
...type_source_zendesk_sunshine_api_token.go} | 4 +-
...e_zendesk_sunshine_authorization_method.go | 6 +-
...sunshine_authorization_method_api_token.go | 11 -
..._sunshine_authorization_method_o_auth20.go | 12 -
.../provider/type_source_zendesk_support.go | 1 -
.../provider/type_source_zendesk_support1.go | 13 -
... type_source_zendesk_support_api_token.go} | 5 +-
...e_source_zendesk_support_authentication.go | 6 +-
..._source_zendesk_support_authentication1.go | 10 -
...endesk_support_authentication_api_token.go | 12 -
...ndesk_support_authentication_api_token1.go | 12 -
...zendesk_support_authentication_o_auth20.go | 13 -
...endesk_support_authentication_o_auth201.go | 13 -
...> type_source_zendesk_support_o_auth20.go} | 5 +-
...support_update_authentication_api_token.go | 12 -
...upport_update_authentication_api_token1.go | 12 -
..._support_update_authentication_o_auth20.go | 13 -
...support_update_authentication_o_auth201.go | 13 -
internal/provider/type_source_zendesk_talk.go | 7 +-
.../provider/type_source_zendesk_talk1.go | 12 -
...type_source_zendesk_talk_authentication.go | 10 -
...ype_source_zendesk_talk_authentication1.go | 10 -
..._zendesk_talk_authentication_api_token1.go | 12 -
...e_zendesk_talk_authentication_o_auth201.go | 13 -
...sk_talk_update_authentication_api_token.go | 12 -
...k_talk_update_authentication_api_token1.go | 12 -
...esk_talk_update_authentication_o_auth20.go | 13 -
...sk_talk_update_authentication_o_auth201.go | 13 -
internal/provider/type_source_zenloop.go | 1 -
internal/provider/type_source_zoho_crm.go | 1 -
internal/provider/type_source_zoom.go | 3 +-
internal/provider/type_source_zuora.go | 1 -
.../provider/type_ssh_key_authentication.go | 12 +
...o => type_standalone_mongo_db_instance.go} | 2 +-
.../provider/type_stream_configuration.go | 0
.../provider/type_stream_configurations.go | 0
...s_criteria.go => type_streams_criteria.go} | 2 +-
internal/provider/type_system_idsid.go | 9 +
... type_tls_encrypted_verify_certificate.go} | 4 +-
internal/provider/type_user_provided.go | 9 +
.../provider/type_username_and_password.go | 9 +
..._password.go => type_username_password.go} | 3 +-
...l_modes_verify_ca.go => type_verify_ca.go} | 3 +-
...verify_identity.go => type_verify_full.go} | 3 +-
...ompression_codec_deflate.go => type_xz.go} | 2 +-
...n_codec_zstandard.go => type_zstandard.go} | 2 +-
internal/provider/utils.go | 2 +-
internal/provider/workspace_data_source.go | 20 +-
.../provider/workspace_data_source_sdk.go | 8 +-
internal/provider/workspace_resource.go | 10 +-
internal/provider/workspace_resource_sdk.go | 8 +-
internal/sdk/connections.go | 77 +-
internal/sdk/destinations.go | 2332 +-
internal/sdk/jobs.go | 71 +-
.../sdk/pkg/models/operations/canceljob.go | 42 +-
.../pkg/models/operations/createconnection.go | 39 +-
.../models/operations/createdestination.go | 37 +-
.../createdestinationawsdatalake.go | 37 +-
.../createdestinationazureblobstorage.go | 37 +-
.../operations/createdestinationbigquery.go | 37 +-
.../createdestinationbigquerydenormalized.go | 16 -
.../operations/createdestinationclickhouse.go | 37 +-
.../operations/createdestinationconvex.go | 37 +-
.../operations/createdestinationcumulio.go | 37 +-
.../operations/createdestinationdatabend.go | 37 +-
.../operations/createdestinationdatabricks.go | 37 +-
.../operations/createdestinationdevnull.go | 37 +-
.../operations/createdestinationduckdb.go | 47 +
.../operations/createdestinationdynamodb.go | 37 +-
.../createdestinationelasticsearch.go | 37 +-
.../operations/createdestinationfirebolt.go | 37 +-
.../operations/createdestinationfirestore.go | 37 +-
.../models/operations/createdestinationgcs.go | 37 +-
.../createdestinationgooglesheets.go | 37 +-
.../operations/createdestinationkeen.go | 37 +-
.../operations/createdestinationkinesis.go | 37 +-
.../operations/createdestinationlangchain.go | 37 +-
.../operations/createdestinationmilvus.go | 37 +-
.../operations/createdestinationmongodb.go | 37 +-
.../operations/createdestinationmssql.go | 37 +-
.../operations/createdestinationmysql.go | 37 +-
.../operations/createdestinationoracle.go | 37 +-
.../operations/createdestinationpinecone.go | 37 +-
.../operations/createdestinationpostgres.go | 37 +-
.../operations/createdestinationpubsub.go | 37 +-
.../operations/createdestinationqdrant.go | 47 +
.../operations/createdestinationredis.go | 37 +-
.../operations/createdestinationredshift.go | 37 +-
.../models/operations/createdestinations3.go | 37 +-
.../operations/createdestinations3glue.go | 37 +-
.../operations/createdestinationsftpjson.go | 37 +-
.../operations/createdestinationsnowflake.go | 37 +-
.../operations/createdestinationtimeplus.go | 37 +-
.../operations/createdestinationtypesense.go | 37 +-
.../operations/createdestinationvertica.go | 37 +-
.../operations/createdestinationweaviate.go | 47 +
.../operations/createdestinationxata.go | 37 +-
.../sdk/pkg/models/operations/createjob.go | 35 +-
...createorupdateworkspaceoauthcredentials.go | 42 +-
.../sdk/pkg/models/operations/createsource.go | 37 +-
.../pkg/models/operations/createsourceaha.go | 37 +-
.../models/operations/createsourceaircall.go | 37 +-
.../models/operations/createsourceairtable.go | 37 +-
.../models/operations/createsourcealloydb.go | 37 +-
.../operations/createsourceamazonads.go | 37 +-
.../createsourceamazonsellerpartner.go | 37 +-
.../operations/createsourceamazonsqs.go | 37 +-
.../operations/createsourceamplitude.go | 37 +-
.../operations/createsourceapifydataset.go | 37 +-
.../operations/createsourceappfollow.go | 37 +-
.../models/operations/createsourceasana.go | 37 +-
.../models/operations/createsourceauth0.go | 37 +-
.../operations/createsourceawscloudtrail.go | 37 +-
.../createsourceazureblobstorage.go | 37 +-
.../operations/createsourceazuretable.go | 37 +-
.../models/operations/createsourcebamboohr.go | 37 +-
.../operations/createsourcebigcommerce.go | 16 -
.../models/operations/createsourcebigquery.go | 37 +-
.../models/operations/createsourcebingads.go | 37 +-
.../operations/createsourcebraintree.go | 37 +-
.../models/operations/createsourcebraze.go | 37 +-
.../pkg/models/operations/createsourcecart.go | 47 +
.../operations/createsourcechargebee.go | 37 +-
.../operations/createsourcechartmogul.go | 37 +-
.../operations/createsourceclickhouse.go | 37 +-
.../operations/createsourceclickupapi.go | 37 +-
.../models/operations/createsourceclockify.go | 37 +-
.../models/operations/createsourceclosecom.go | 37 +-
.../pkg/models/operations/createsourcecoda.go | 37 +-
.../models/operations/createsourcecoinapi.go | 37 +-
.../operations/createsourcecoinmarketcap.go | 37 +-
.../operations/createsourceconfigcat.go | 37 +-
.../operations/createsourceconfluence.go | 37 +-
.../models/operations/createsourceconvex.go | 37 +-
.../operations/createsourcedatascope.go | 37 +-
.../operations/createsourcedelighted.go | 37 +-
.../pkg/models/operations/createsourcedixa.go | 37 +-
.../operations/createsourcedockerhub.go | 37 +-
.../models/operations/createsourcedremio.go | 37 +-
.../models/operations/createsourcedynamodb.go | 37 +-
.../operations/createsourcee2etestcloud.go | 16 -
.../operations/createsourceemailoctopus.go | 37 +-
.../operations/createsourceexchangerates.go | 37 +-
.../createsourcefacebookmarketing.go | 37 +-
.../operations/createsourcefacebookpages.go | 37 +-
.../models/operations/createsourcefaker.go | 37 +-
.../models/operations/createsourcefauna.go | 37 +-
.../pkg/models/operations/createsourcefile.go | 47 +
.../operations/createsourcefilesecure.go | 16 -
.../models/operations/createsourcefirebolt.go | 37 +-
.../operations/createsourcefreshcaller.go | 37 +-
.../operations/createsourcefreshdesk.go | 37 +-
.../operations/createsourcefreshsales.go | 37 +-
.../operations/createsourcegainsightpx.go | 37 +-
.../pkg/models/operations/createsourcegcs.go | 37 +-
.../models/operations/createsourcegetlago.go | 37 +-
.../models/operations/createsourcegithub.go | 37 +-
.../models/operations/createsourcegitlab.go | 37 +-
.../operations/createsourceglassfrog.go | 37 +-
.../models/operations/createsourcegnews.go | 37 +-
.../operations/createsourcegoogleads.go | 37 +-
.../createsourcegoogleanalyticsdataapi.go | 37 +-
.../createsourcegoogleanalyticsv4.go | 16 -
.../operations/createsourcegoogledirectory.go | 37 +-
.../operations/createsourcegoogledrive.go | 47 +
.../createsourcegooglepagespeedinsights.go | 37 +-
.../createsourcegooglesearchconsole.go | 37 +-
.../operations/createsourcegooglesheets.go | 37 +-
.../operations/createsourcegooglewebfonts.go | 37 +-
...createsourcegoogleworkspaceadminreports.go | 37 +-
.../operations/createsourcegreenhouse.go | 37 +-
.../models/operations/createsourcegridly.go | 37 +-
.../models/operations/createsourceharvest.go | 37 +-
.../operations/createsourcehubplanner.go | 37 +-
.../models/operations/createsourcehubspot.go | 37 +-
.../operations/createsourceinsightly.go | 37 +-
.../operations/createsourceinstagram.go | 37 +-
.../models/operations/createsourceinstatus.go | 37 +-
.../models/operations/createsourceintercom.go | 37 +-
.../models/operations/createsourceip2whois.go | 37 +-
.../models/operations/createsourceiterable.go | 37 +-
.../pkg/models/operations/createsourcejira.go | 37 +-
.../models/operations/createsourcek6cloud.go | 37 +-
.../models/operations/createsourceklarna.go | 37 +-
.../models/operations/createsourceklaviyo.go | 37 +-
.../operations/createsourcekustomersinger.go | 37 +-
.../pkg/models/operations/createsourcekyve.go | 37 +-
.../operations/createsourcelaunchdarkly.go | 37 +-
.../models/operations/createsourcelemlist.go | 37 +-
.../operations/createsourceleverhiring.go | 37 +-
.../operations/createsourcelinkedinads.go | 37 +-
.../operations/createsourcelinkedinpages.go | 37 +-
.../operations/createsourcelinnworks.go | 37 +-
.../models/operations/createsourcelokalise.go | 37 +-
.../operations/createsourcemailchimp.go | 37 +-
.../models/operations/createsourcemailgun.go | 37 +-
.../operations/createsourcemailjetsms.go | 37 +-
.../models/operations/createsourcemarketo.go | 37 +-
.../models/operations/createsourcemetabase.go | 37 +-
.../operations/createsourcemicrosoftteams.go | 37 +-
.../models/operations/createsourcemixpanel.go | 37 +-
.../models/operations/createsourcemonday.go | 37 +-
.../models/operations/createsourcemongodb.go | 16 -
.../createsourcemongodbinternalpoc.go | 37 +-
.../operations/createsourcemongodbv2.go | 47 +
.../models/operations/createsourcemssql.go | 37 +-
.../models/operations/createsourcemyhours.go | 37 +-
.../models/operations/createsourcemysql.go | 37 +-
.../models/operations/createsourcenetsuite.go | 37 +-
.../models/operations/createsourcenotion.go | 37 +-
.../models/operations/createsourcenytimes.go | 37 +-
.../pkg/models/operations/createsourceokta.go | 37 +-
.../models/operations/createsourceomnisend.go | 37 +-
.../operations/createsourceonesignal.go | 37 +-
.../models/operations/createsourceoracle.go | 37 +-
.../pkg/models/operations/createsourceorb.go | 37 +-
.../models/operations/createsourceorbit.go | 37 +-
.../operations/createsourceoutbrainamplify.go | 37 +-
.../models/operations/createsourceoutreach.go | 37 +-
.../createsourcepaypaltransaction.go | 37 +-
.../models/operations/createsourcepaystack.go | 37 +-
.../models/operations/createsourcependo.go | 37 +-
.../operations/createsourcepersistiq.go | 37 +-
.../operations/createsourcepexelsapi.go | 37 +-
.../operations/createsourcepinterest.go | 37 +-
.../operations/createsourcepipedrive.go | 37 +-
.../models/operations/createsourcepocket.go | 37 +-
.../models/operations/createsourcepokeapi.go | 37 +-
.../operations/createsourcepolygonstockapi.go | 37 +-
.../models/operations/createsourcepostgres.go | 37 +-
.../models/operations/createsourceposthog.go | 37 +-
.../operations/createsourcepostmarkapp.go | 37 +-
.../operations/createsourceprestashop.go | 37 +-
.../models/operations/createsourcepunkapi.go | 37 +-
.../pkg/models/operations/createsourcepypi.go | 37 +-
.../models/operations/createsourcequalaroo.go | 37 +-
.../operations/createsourcequickbooks.go | 37 +-
.../models/operations/createsourcerailz.go | 37 +-
.../models/operations/createsourcerecharge.go | 37 +-
.../operations/createsourcerecreation.go | 37 +-
.../operations/createsourcerecruitee.go | 37 +-
.../models/operations/createsourcerecurly.go | 37 +-
.../models/operations/createsourceredshift.go | 37 +-
.../models/operations/createsourceretently.go | 37 +-
.../models/operations/createsourcerkicovid.go | 37 +-
.../pkg/models/operations/createsourcerss.go | 37 +-
.../pkg/models/operations/createsources3.go | 37 +-
.../operations/createsourcesalesforce.go | 37 +-
.../operations/createsourcesalesloft.go | 37 +-
.../operations/createsourcesapfieldglass.go | 37 +-
.../models/operations/createsourcesecoda.go | 37 +-
.../models/operations/createsourcesendgrid.go | 37 +-
.../operations/createsourcesendinblue.go | 37 +-
.../operations/createsourcesenseforce.go | 37 +-
.../models/operations/createsourcesentry.go | 37 +-
.../pkg/models/operations/createsourcesftp.go | 37 +-
.../models/operations/createsourcesftpbulk.go | 37 +-
.../models/operations/createsourceshopify.go | 37 +-
.../models/operations/createsourceshortio.go | 37 +-
.../models/operations/createsourceslack.go | 37 +-
.../models/operations/createsourcesmaily.go | 37 +-
.../operations/createsourcesmartengage.go | 37 +-
.../operations/createsourcesmartsheets.go | 37 +-
.../createsourcesnapchatmarketing.go | 37 +-
.../operations/createsourcesnowflake.go | 37 +-
.../operations/createsourcesonarcloud.go | 37 +-
.../operations/createsourcespacexapi.go | 37 +-
.../models/operations/createsourcesquare.go | 37 +-
.../models/operations/createsourcestrava.go | 37 +-
.../models/operations/createsourcestripe.go | 37 +-
.../operations/createsourcesurveymonkey.go | 37 +-
.../operations/createsourcesurveysparrow.go | 37 +-
.../models/operations/createsourcetempo.go | 37 +-
.../operations/createsourcetheguardianapi.go | 37 +-
.../operations/createsourcetiktokmarketing.go | 37 +-
.../models/operations/createsourcetodoist.go | 37 +-
.../models/operations/createsourcetrello.go | 37 +-
.../operations/createsourcetrustpilot.go | 37 +-
.../operations/createsourcetvmazeschedule.go | 37 +-
.../models/operations/createsourcetwilio.go | 37 +-
.../createsourcetwiliotaskrouter.go | 37 +-
.../models/operations/createsourcetwitter.go | 37 +-
.../models/operations/createsourcetypeform.go | 37 +-
.../models/operations/createsourceuscensus.go | 37 +-
.../models/operations/createsourcevantage.go | 37 +-
.../models/operations/createsourcewebflow.go | 37 +-
.../operations/createsourcewhiskyhunter.go | 37 +-
.../createsourcewikipediapageviews.go | 37 +-
.../operations/createsourcewoocommerce.go | 37 +-
.../pkg/models/operations/createsourcexero.go | 16 -
.../pkg/models/operations/createsourcexkcd.go | 37 +-
.../operations/createsourceyandexmetrica.go | 37 +-
.../models/operations/createsourceyotpo.go | 37 +-
.../models/operations/createsourceyounium.go | 16 -
.../createsourceyoutubeanalytics.go | 37 +-
.../operations/createsourcezendeskchat.go | 37 +-
.../operations/createsourcezendesksell.go | 47 +
.../operations/createsourcezendesksunshine.go | 37 +-
.../operations/createsourcezendesksupport.go | 37 +-
.../operations/createsourcezendesktalk.go | 37 +-
.../models/operations/createsourcezenloop.go | 37 +-
.../models/operations/createsourcezohocrm.go | 37 +-
.../pkg/models/operations/createsourcezoom.go | 37 +-
.../models/operations/createsourcezuora.go | 37 +-
.../pkg/models/operations/createworkspace.go | 35 +-
.../pkg/models/operations/deleteconnection.go | 33 +-
.../models/operations/deletedestination.go | 33 +-
.../deletedestinationawsdatalake.go | 33 +-
.../deletedestinationazureblobstorage.go | 33 +-
.../operations/deletedestinationbigquery.go | 33 +-
.../deletedestinationbigquerydenormalized.go | 17 -
.../operations/deletedestinationclickhouse.go | 33 +-
.../operations/deletedestinationconvex.go | 33 +-
.../operations/deletedestinationcumulio.go | 33 +-
.../operations/deletedestinationdatabend.go | 33 +-
.../operations/deletedestinationdatabricks.go | 33 +-
.../operations/deletedestinationdevnull.go | 33 +-
.../operations/deletedestinationduckdb.go | 48 +
.../operations/deletedestinationdynamodb.go | 33 +-
.../deletedestinationelasticsearch.go | 33 +-
.../operations/deletedestinationfirebolt.go | 33 +-
.../operations/deletedestinationfirestore.go | 33 +-
.../models/operations/deletedestinationgcs.go | 33 +-
.../deletedestinationgooglesheets.go | 33 +-
.../operations/deletedestinationkeen.go | 33 +-
.../operations/deletedestinationkinesis.go | 33 +-
.../operations/deletedestinationlangchain.go | 33 +-
.../operations/deletedestinationmilvus.go | 33 +-
.../operations/deletedestinationmongodb.go | 33 +-
.../operations/deletedestinationmssql.go | 33 +-
.../operations/deletedestinationmysql.go | 33 +-
.../operations/deletedestinationoracle.go | 33 +-
.../operations/deletedestinationpinecone.go | 33 +-
.../operations/deletedestinationpostgres.go | 33 +-
.../operations/deletedestinationpubsub.go | 33 +-
.../operations/deletedestinationqdrant.go | 48 +
.../operations/deletedestinationredis.go | 33 +-
.../operations/deletedestinationredshift.go | 33 +-
.../models/operations/deletedestinations3.go | 33 +-
.../operations/deletedestinations3glue.go | 33 +-
.../operations/deletedestinationsftpjson.go | 33 +-
.../operations/deletedestinationsnowflake.go | 33 +-
.../operations/deletedestinationtimeplus.go | 33 +-
.../operations/deletedestinationtypesense.go | 33 +-
.../operations/deletedestinationvertica.go | 33 +-
.../operations/deletedestinationweaviate.go | 48 +
.../operations/deletedestinationxata.go | 33 +-
.../sdk/pkg/models/operations/deletesource.go | 33 +-
.../pkg/models/operations/deletesourceaha.go | 33 +-
.../models/operations/deletesourceaircall.go | 33 +-
.../models/operations/deletesourceairtable.go | 33 +-
.../models/operations/deletesourcealloydb.go | 33 +-
.../operations/deletesourceamazonads.go | 33 +-
.../deletesourceamazonsellerpartner.go | 33 +-
.../operations/deletesourceamazonsqs.go | 33 +-
.../operations/deletesourceamplitude.go | 33 +-
.../operations/deletesourceapifydataset.go | 33 +-
.../operations/deletesourceappfollow.go | 33 +-
.../models/operations/deletesourceasana.go | 33 +-
.../models/operations/deletesourceauth0.go | 33 +-
.../operations/deletesourceawscloudtrail.go | 33 +-
.../deletesourceazureblobstorage.go | 33 +-
.../operations/deletesourceazuretable.go | 33 +-
.../models/operations/deletesourcebamboohr.go | 33 +-
.../operations/deletesourcebigcommerce.go | 17 -
.../models/operations/deletesourcebigquery.go | 33 +-
.../models/operations/deletesourcebingads.go | 33 +-
.../operations/deletesourcebraintree.go | 33 +-
.../models/operations/deletesourcebraze.go | 33 +-
.../pkg/models/operations/deletesourcecart.go | 48 +
.../operations/deletesourcechargebee.go | 33 +-
.../operations/deletesourcechartmogul.go | 33 +-
.../operations/deletesourceclickhouse.go | 33 +-
.../operations/deletesourceclickupapi.go | 33 +-
.../models/operations/deletesourceclockify.go | 33 +-
.../models/operations/deletesourceclosecom.go | 33 +-
.../pkg/models/operations/deletesourcecoda.go | 33 +-
.../models/operations/deletesourcecoinapi.go | 33 +-
.../operations/deletesourcecoinmarketcap.go | 33 +-
.../operations/deletesourceconfigcat.go | 33 +-
.../operations/deletesourceconfluence.go | 33 +-
.../models/operations/deletesourceconvex.go | 33 +-
.../operations/deletesourcedatascope.go | 33 +-
.../operations/deletesourcedelighted.go | 33 +-
.../pkg/models/operations/deletesourcedixa.go | 33 +-
.../operations/deletesourcedockerhub.go | 33 +-
.../models/operations/deletesourcedremio.go | 33 +-
.../models/operations/deletesourcedynamodb.go | 33 +-
.../operations/deletesourcee2etestcloud.go | 17 -
.../operations/deletesourceemailoctopus.go | 33 +-
.../operations/deletesourceexchangerates.go | 33 +-
.../deletesourcefacebookmarketing.go | 33 +-
.../operations/deletesourcefacebookpages.go | 33 +-
.../models/operations/deletesourcefaker.go | 33 +-
.../models/operations/deletesourcefauna.go | 33 +-
.../pkg/models/operations/deletesourcefile.go | 48 +
.../operations/deletesourcefilesecure.go | 17 -
.../models/operations/deletesourcefirebolt.go | 33 +-
.../operations/deletesourcefreshcaller.go | 33 +-
.../operations/deletesourcefreshdesk.go | 33 +-
.../operations/deletesourcefreshsales.go | 33 +-
.../operations/deletesourcegainsightpx.go | 33 +-
.../pkg/models/operations/deletesourcegcs.go | 33 +-
.../models/operations/deletesourcegetlago.go | 33 +-
.../models/operations/deletesourcegithub.go | 33 +-
.../models/operations/deletesourcegitlab.go | 33 +-
.../operations/deletesourceglassfrog.go | 33 +-
.../models/operations/deletesourcegnews.go | 33 +-
.../operations/deletesourcegoogleads.go | 33 +-
.../deletesourcegoogleanalyticsdataapi.go | 33 +-
.../deletesourcegoogleanalyticsv4.go | 17 -
.../operations/deletesourcegoogledirectory.go | 33 +-
.../operations/deletesourcegoogledrive.go | 48 +
.../deletesourcegooglepagespeedinsights.go | 33 +-
.../deletesourcegooglesearchconsole.go | 33 +-
.../operations/deletesourcegooglesheets.go | 33 +-
.../operations/deletesourcegooglewebfonts.go | 33 +-
...deletesourcegoogleworkspaceadminreports.go | 33 +-
.../operations/deletesourcegreenhouse.go | 33 +-
.../models/operations/deletesourcegridly.go | 33 +-
.../models/operations/deletesourceharvest.go | 33 +-
.../operations/deletesourcehubplanner.go | 33 +-
.../models/operations/deletesourcehubspot.go | 33 +-
.../operations/deletesourceinsightly.go | 33 +-
.../operations/deletesourceinstagram.go | 33 +-
.../models/operations/deletesourceinstatus.go | 33 +-
.../models/operations/deletesourceintercom.go | 33 +-
.../models/operations/deletesourceip2whois.go | 33 +-
.../models/operations/deletesourceiterable.go | 33 +-
.../pkg/models/operations/deletesourcejira.go | 33 +-
.../models/operations/deletesourcek6cloud.go | 33 +-
.../models/operations/deletesourceklarna.go | 33 +-
.../models/operations/deletesourceklaviyo.go | 33 +-
.../operations/deletesourcekustomersinger.go | 33 +-
.../pkg/models/operations/deletesourcekyve.go | 33 +-
.../operations/deletesourcelaunchdarkly.go | 33 +-
.../models/operations/deletesourcelemlist.go | 33 +-
.../operations/deletesourceleverhiring.go | 33 +-
.../operations/deletesourcelinkedinads.go | 33 +-
.../operations/deletesourcelinkedinpages.go | 33 +-
.../operations/deletesourcelinnworks.go | 33 +-
.../models/operations/deletesourcelokalise.go | 33 +-
.../operations/deletesourcemailchimp.go | 33 +-
.../models/operations/deletesourcemailgun.go | 33 +-
.../operations/deletesourcemailjetsms.go | 33 +-
.../models/operations/deletesourcemarketo.go | 33 +-
.../models/operations/deletesourcemetabase.go | 33 +-
.../operations/deletesourcemicrosoftteams.go | 33 +-
.../models/operations/deletesourcemixpanel.go | 33 +-
.../models/operations/deletesourcemonday.go | 33 +-
.../models/operations/deletesourcemongodb.go | 17 -
.../deletesourcemongodbinternalpoc.go | 33 +-
.../operations/deletesourcemongodbv2.go | 48 +
.../models/operations/deletesourcemssql.go | 33 +-
.../models/operations/deletesourcemyhours.go | 33 +-
.../models/operations/deletesourcemysql.go | 33 +-
.../models/operations/deletesourcenetsuite.go | 33 +-
.../models/operations/deletesourcenotion.go | 33 +-
.../models/operations/deletesourcenytimes.go | 33 +-
.../pkg/models/operations/deletesourceokta.go | 33 +-
.../models/operations/deletesourceomnisend.go | 33 +-
.../operations/deletesourceonesignal.go | 33 +-
.../models/operations/deletesourceoracle.go | 33 +-
.../pkg/models/operations/deletesourceorb.go | 33 +-
.../models/operations/deletesourceorbit.go | 33 +-
.../operations/deletesourceoutbrainamplify.go | 33 +-
.../models/operations/deletesourceoutreach.go | 33 +-
.../deletesourcepaypaltransaction.go | 33 +-
.../models/operations/deletesourcepaystack.go | 33 +-
.../models/operations/deletesourcependo.go | 33 +-
.../operations/deletesourcepersistiq.go | 33 +-
.../operations/deletesourcepexelsapi.go | 33 +-
.../operations/deletesourcepinterest.go | 33 +-
.../operations/deletesourcepipedrive.go | 33 +-
.../models/operations/deletesourcepocket.go | 33 +-
.../models/operations/deletesourcepokeapi.go | 33 +-
.../operations/deletesourcepolygonstockapi.go | 33 +-
.../models/operations/deletesourcepostgres.go | 33 +-
.../models/operations/deletesourceposthog.go | 33 +-
.../operations/deletesourcepostmarkapp.go | 33 +-
.../operations/deletesourceprestashop.go | 33 +-
.../models/operations/deletesourcepunkapi.go | 33 +-
.../pkg/models/operations/deletesourcepypi.go | 33 +-
.../models/operations/deletesourcequalaroo.go | 33 +-
.../operations/deletesourcequickbooks.go | 33 +-
.../models/operations/deletesourcerailz.go | 33 +-
.../models/operations/deletesourcerecharge.go | 33 +-
.../operations/deletesourcerecreation.go | 33 +-
.../operations/deletesourcerecruitee.go | 33 +-
.../models/operations/deletesourcerecurly.go | 33 +-
.../models/operations/deletesourceredshift.go | 33 +-
.../models/operations/deletesourceretently.go | 33 +-
.../models/operations/deletesourcerkicovid.go | 33 +-
.../pkg/models/operations/deletesourcerss.go | 33 +-
.../pkg/models/operations/deletesources3.go | 33 +-
.../operations/deletesourcesalesforce.go | 33 +-
.../operations/deletesourcesalesloft.go | 33 +-
.../operations/deletesourcesapfieldglass.go | 33 +-
.../models/operations/deletesourcesecoda.go | 33 +-
.../models/operations/deletesourcesendgrid.go | 33 +-
.../operations/deletesourcesendinblue.go | 33 +-
.../operations/deletesourcesenseforce.go | 33 +-
.../models/operations/deletesourcesentry.go | 33 +-
.../pkg/models/operations/deletesourcesftp.go | 33 +-
.../models/operations/deletesourcesftpbulk.go | 33 +-
.../models/operations/deletesourceshopify.go | 33 +-
.../models/operations/deletesourceshortio.go | 33 +-
.../models/operations/deletesourceslack.go | 33 +-
.../models/operations/deletesourcesmaily.go | 33 +-
.../operations/deletesourcesmartengage.go | 33 +-
.../operations/deletesourcesmartsheets.go | 33 +-
.../deletesourcesnapchatmarketing.go | 33 +-
.../operations/deletesourcesnowflake.go | 33 +-
.../operations/deletesourcesonarcloud.go | 33 +-
.../operations/deletesourcespacexapi.go | 33 +-
.../models/operations/deletesourcesquare.go | 33 +-
.../models/operations/deletesourcestrava.go | 33 +-
.../models/operations/deletesourcestripe.go | 33 +-
.../operations/deletesourcesurveymonkey.go | 33 +-
.../operations/deletesourcesurveysparrow.go | 33 +-
.../models/operations/deletesourcetempo.go | 33 +-
.../operations/deletesourcetheguardianapi.go | 33 +-
.../operations/deletesourcetiktokmarketing.go | 33 +-
.../models/operations/deletesourcetodoist.go | 33 +-
.../models/operations/deletesourcetrello.go | 33 +-
.../operations/deletesourcetrustpilot.go | 33 +-
.../operations/deletesourcetvmazeschedule.go | 33 +-
.../models/operations/deletesourcetwilio.go | 33 +-
.../deletesourcetwiliotaskrouter.go | 33 +-
.../models/operations/deletesourcetwitter.go | 33 +-
.../models/operations/deletesourcetypeform.go | 33 +-
.../models/operations/deletesourceuscensus.go | 33 +-
.../models/operations/deletesourcevantage.go | 33 +-
.../models/operations/deletesourcewebflow.go | 33 +-
.../operations/deletesourcewhiskyhunter.go | 33 +-
.../deletesourcewikipediapageviews.go | 33 +-
.../operations/deletesourcewoocommerce.go | 33 +-
.../pkg/models/operations/deletesourcexero.go | 17 -
.../pkg/models/operations/deletesourcexkcd.go | 33 +-
.../operations/deletesourceyandexmetrica.go | 33 +-
.../models/operations/deletesourceyotpo.go | 33 +-
.../models/operations/deletesourceyounium.go | 17 -
.../deletesourceyoutubeanalytics.go | 33 +-
.../operations/deletesourcezendeskchat.go | 33 +-
.../operations/deletesourcezendesksell.go | 48 +
.../operations/deletesourcezendesksunshine.go | 33 +-
.../operations/deletesourcezendesksupport.go | 33 +-
.../operations/deletesourcezendesktalk.go | 33 +-
.../models/operations/deletesourcezenloop.go | 33 +-
.../models/operations/deletesourcezohocrm.go | 33 +-
.../pkg/models/operations/deletesourcezoom.go | 33 +-
.../models/operations/deletesourcezuora.go | 33 +-
.../pkg/models/operations/deleteworkspace.go | 33 +-
.../pkg/models/operations/getconnection.go | 46 +-
.../pkg/models/operations/getdestination.go | 44 +-
.../operations/getdestinationawsdatalake.go | 44 +-
.../getdestinationazureblobstorage.go | 44 +-
.../operations/getdestinationbigquery.go | 44 +-
.../getdestinationbigquerydenormalized.go | 20 -
.../operations/getdestinationclickhouse.go | 44 +-
.../models/operations/getdestinationconvex.go | 44 +-
.../operations/getdestinationcumulio.go | 44 +-
.../operations/getdestinationdatabend.go | 44 +-
.../operations/getdestinationdatabricks.go | 44 +-
.../operations/getdestinationdevnull.go | 44 +-
.../models/operations/getdestinationduckdb.go | 58 +
.../operations/getdestinationdynamodb.go | 44 +-
.../operations/getdestinationelasticsearch.go | 44 +-
.../operations/getdestinationfirebolt.go | 44 +-
.../operations/getdestinationfirestore.go | 44 +-
.../models/operations/getdestinationgcs.go | 44 +-
.../operations/getdestinationgooglesheets.go | 44 +-
.../models/operations/getdestinationkeen.go | 44 +-
.../operations/getdestinationkinesis.go | 44 +-
.../operations/getdestinationlangchain.go | 44 +-
.../models/operations/getdestinationmilvus.go | 44 +-
.../operations/getdestinationmongodb.go | 44 +-
.../models/operations/getdestinationmssql.go | 44 +-
.../models/operations/getdestinationmysql.go | 44 +-
.../models/operations/getdestinationoracle.go | 44 +-
.../operations/getdestinationpinecone.go | 44 +-
.../operations/getdestinationpostgres.go | 44 +-
.../models/operations/getdestinationpubsub.go | 44 +-
.../models/operations/getdestinationqdrant.go | 58 +
.../models/operations/getdestinationredis.go | 44 +-
.../operations/getdestinationredshift.go | 44 +-
.../pkg/models/operations/getdestinations3.go | 44 +-
.../models/operations/getdestinations3glue.go | 44 +-
.../operations/getdestinationsftpjson.go | 44 +-
.../operations/getdestinationsnowflake.go | 44 +-
.../operations/getdestinationtimeplus.go | 44 +-
.../operations/getdestinationtypesense.go | 44 +-
.../operations/getdestinationvertica.go | 44 +-
.../operations/getdestinationweaviate.go | 58 +
.../models/operations/getdestinationxata.go | 44 +-
internal/sdk/pkg/models/operations/getjob.go | 42 +-
.../sdk/pkg/models/operations/getsource.go | 44 +-
.../sdk/pkg/models/operations/getsourceaha.go | 44 +-
.../pkg/models/operations/getsourceaircall.go | 44 +-
.../models/operations/getsourceairtable.go | 44 +-
.../pkg/models/operations/getsourcealloydb.go | 44 +-
.../models/operations/getsourceamazonads.go | 44 +-
.../getsourceamazonsellerpartner.go | 44 +-
.../models/operations/getsourceamazonsqs.go | 44 +-
.../models/operations/getsourceamplitude.go | 44 +-
.../operations/getsourceapifydataset.go | 44 +-
.../models/operations/getsourceappfollow.go | 44 +-
.../pkg/models/operations/getsourceasana.go | 44 +-
.../pkg/models/operations/getsourceauth0.go | 44 +-
.../operations/getsourceawscloudtrail.go | 44 +-
.../operations/getsourceazureblobstorage.go | 44 +-
.../models/operations/getsourceazuretable.go | 44 +-
.../models/operations/getsourcebamboohr.go | 44 +-
.../models/operations/getsourcebigcommerce.go | 20 -
.../models/operations/getsourcebigquery.go | 44 +-
.../pkg/models/operations/getsourcebingads.go | 44 +-
.../models/operations/getsourcebraintree.go | 44 +-
.../pkg/models/operations/getsourcebraze.go | 44 +-
.../pkg/models/operations/getsourcecart.go | 58 +
.../models/operations/getsourcechargebee.go | 44 +-
.../models/operations/getsourcechartmogul.go | 44 +-
.../models/operations/getsourceclickhouse.go | 44 +-
.../models/operations/getsourceclickupapi.go | 44 +-
.../models/operations/getsourceclockify.go | 44 +-
.../models/operations/getsourceclosecom.go | 44 +-
.../pkg/models/operations/getsourcecoda.go | 44 +-
.../pkg/models/operations/getsourcecoinapi.go | 44 +-
.../operations/getsourcecoinmarketcap.go | 44 +-
.../models/operations/getsourceconfigcat.go | 44 +-
.../models/operations/getsourceconfluence.go | 44 +-
.../pkg/models/operations/getsourceconvex.go | 44 +-
.../models/operations/getsourcedatascope.go | 44 +-
.../models/operations/getsourcedelighted.go | 44 +-
.../pkg/models/operations/getsourcedixa.go | 44 +-
.../models/operations/getsourcedockerhub.go | 44 +-
.../pkg/models/operations/getsourcedremio.go | 44 +-
.../models/operations/getsourcedynamodb.go | 44 +-
.../operations/getsourcee2etestcloud.go | 20 -
.../operations/getsourceemailoctopus.go | 44 +-
.../operations/getsourceexchangerates.go | 44 +-
.../operations/getsourcefacebookmarketing.go | 44 +-
.../operations/getsourcefacebookpages.go | 44 +-
.../pkg/models/operations/getsourcefaker.go | 44 +-
.../pkg/models/operations/getsourcefauna.go | 44 +-
.../pkg/models/operations/getsourcefile.go | 58 +
.../models/operations/getsourcefilesecure.go | 20 -
.../models/operations/getsourcefirebolt.go | 44 +-
.../models/operations/getsourcefreshcaller.go | 44 +-
.../models/operations/getsourcefreshdesk.go | 44 +-
.../models/operations/getsourcefreshsales.go | 44 +-
.../models/operations/getsourcegainsightpx.go | 44 +-
.../sdk/pkg/models/operations/getsourcegcs.go | 44 +-
.../pkg/models/operations/getsourcegetlago.go | 44 +-
.../pkg/models/operations/getsourcegithub.go | 44 +-
.../pkg/models/operations/getsourcegitlab.go | 44 +-
.../models/operations/getsourceglassfrog.go | 44 +-
.../pkg/models/operations/getsourcegnews.go | 44 +-
.../models/operations/getsourcegoogleads.go | 44 +-
.../getsourcegoogleanalyticsdataapi.go | 44 +-
.../operations/getsourcegoogleanalyticsv4.go | 20 -
.../operations/getsourcegoogledirectory.go | 44 +-
.../models/operations/getsourcegoogledrive.go | 58 +
.../getsourcegooglepagespeedinsights.go | 44 +-
.../getsourcegooglesearchconsole.go | 44 +-
.../operations/getsourcegooglesheets.go | 44 +-
.../operations/getsourcegooglewebfonts.go | 44 +-
.../getsourcegoogleworkspaceadminreports.go | 44 +-
.../models/operations/getsourcegreenhouse.go | 44 +-
.../pkg/models/operations/getsourcegridly.go | 44 +-
.../pkg/models/operations/getsourceharvest.go | 44 +-
.../models/operations/getsourcehubplanner.go | 44 +-
.../pkg/models/operations/getsourcehubspot.go | 44 +-
.../models/operations/getsourceinsightly.go | 44 +-
.../models/operations/getsourceinstagram.go | 44 +-
.../models/operations/getsourceinstatus.go | 44 +-
.../models/operations/getsourceintercom.go | 44 +-
.../models/operations/getsourceip2whois.go | 44 +-
.../models/operations/getsourceiterable.go | 44 +-
.../pkg/models/operations/getsourcejira.go | 44 +-
.../pkg/models/operations/getsourcek6cloud.go | 44 +-
.../pkg/models/operations/getsourceklarna.go | 44 +-
.../pkg/models/operations/getsourceklaviyo.go | 44 +-
.../operations/getsourcekustomersinger.go | 44 +-
.../pkg/models/operations/getsourcekyve.go | 44 +-
.../operations/getsourcelaunchdarkly.go | 44 +-
.../pkg/models/operations/getsourcelemlist.go | 44 +-
.../models/operations/getsourceleverhiring.go | 44 +-
.../models/operations/getsourcelinkedinads.go | 44 +-
.../operations/getsourcelinkedinpages.go | 44 +-
.../models/operations/getsourcelinnworks.go | 44 +-
.../models/operations/getsourcelokalise.go | 44 +-
.../models/operations/getsourcemailchimp.go | 44 +-
.../pkg/models/operations/getsourcemailgun.go | 44 +-
.../models/operations/getsourcemailjetsms.go | 44 +-
.../pkg/models/operations/getsourcemarketo.go | 44 +-
.../models/operations/getsourcemetabase.go | 44 +-
.../operations/getsourcemicrosoftteams.go | 44 +-
.../models/operations/getsourcemixpanel.go | 44 +-
.../pkg/models/operations/getsourcemonday.go | 44 +-
.../pkg/models/operations/getsourcemongodb.go | 20 -
.../operations/getsourcemongodbinternalpoc.go | 44 +-
.../models/operations/getsourcemongodbv2.go | 58 +
.../pkg/models/operations/getsourcemssql.go | 44 +-
.../pkg/models/operations/getsourcemyhours.go | 44 +-
.../pkg/models/operations/getsourcemysql.go | 44 +-
.../models/operations/getsourcenetsuite.go | 44 +-
.../pkg/models/operations/getsourcenotion.go | 44 +-
.../pkg/models/operations/getsourcenytimes.go | 44 +-
.../pkg/models/operations/getsourceokta.go | 44 +-
.../models/operations/getsourceomnisend.go | 44 +-
.../models/operations/getsourceonesignal.go | 44 +-
.../pkg/models/operations/getsourceoracle.go | 44 +-
.../sdk/pkg/models/operations/getsourceorb.go | 44 +-
.../pkg/models/operations/getsourceorbit.go | 44 +-
.../operations/getsourceoutbrainamplify.go | 44 +-
.../models/operations/getsourceoutreach.go | 44 +-
.../operations/getsourcepaypaltransaction.go | 44 +-
.../models/operations/getsourcepaystack.go | 44 +-
.../pkg/models/operations/getsourcependo.go | 44 +-
.../models/operations/getsourcepersistiq.go | 44 +-
.../models/operations/getsourcepexelsapi.go | 44 +-
.../models/operations/getsourcepinterest.go | 44 +-
.../models/operations/getsourcepipedrive.go | 44 +-
.../pkg/models/operations/getsourcepocket.go | 44 +-
.../pkg/models/operations/getsourcepokeapi.go | 44 +-
.../operations/getsourcepolygonstockapi.go | 44 +-
.../models/operations/getsourcepostgres.go | 44 +-
.../pkg/models/operations/getsourceposthog.go | 44 +-
.../models/operations/getsourcepostmarkapp.go | 44 +-
.../models/operations/getsourceprestashop.go | 44 +-
.../pkg/models/operations/getsourcepunkapi.go | 44 +-
.../pkg/models/operations/getsourcepypi.go | 44 +-
.../models/operations/getsourcequalaroo.go | 44 +-
.../models/operations/getsourcequickbooks.go | 44 +-
.../pkg/models/operations/getsourcerailz.go | 44 +-
.../models/operations/getsourcerecharge.go | 44 +-
.../models/operations/getsourcerecreation.go | 44 +-
.../models/operations/getsourcerecruitee.go | 44 +-
.../pkg/models/operations/getsourcerecurly.go | 44 +-
.../models/operations/getsourceredshift.go | 44 +-
.../models/operations/getsourceretently.go | 44 +-
.../models/operations/getsourcerkicovid.go | 44 +-
.../sdk/pkg/models/operations/getsourcerss.go | 44 +-
.../sdk/pkg/models/operations/getsources3.go | 44 +-
.../models/operations/getsourcesalesforce.go | 44 +-
.../models/operations/getsourcesalesloft.go | 44 +-
.../operations/getsourcesapfieldglass.go | 44 +-
.../pkg/models/operations/getsourcesecoda.go | 44 +-
.../models/operations/getsourcesendgrid.go | 44 +-
.../models/operations/getsourcesendinblue.go | 44 +-
.../models/operations/getsourcesenseforce.go | 44 +-
.../pkg/models/operations/getsourcesentry.go | 44 +-
.../pkg/models/operations/getsourcesftp.go | 44 +-
.../models/operations/getsourcesftpbulk.go | 44 +-
.../pkg/models/operations/getsourceshopify.go | 44 +-
.../pkg/models/operations/getsourceshortio.go | 44 +-
.../pkg/models/operations/getsourceslack.go | 44 +-
.../pkg/models/operations/getsourcesmaily.go | 44 +-
.../models/operations/getsourcesmartengage.go | 44 +-
.../models/operations/getsourcesmartsheets.go | 44 +-
.../operations/getsourcesnapchatmarketing.go | 44 +-
.../models/operations/getsourcesnowflake.go | 44 +-
.../models/operations/getsourcesonarcloud.go | 44 +-
.../models/operations/getsourcespacexapi.go | 44 +-
.../pkg/models/operations/getsourcesquare.go | 44 +-
.../pkg/models/operations/getsourcestrava.go | 44 +-
.../pkg/models/operations/getsourcestripe.go | 44 +-
.../operations/getsourcesurveymonkey.go | 44 +-
.../operations/getsourcesurveysparrow.go | 44 +-
.../pkg/models/operations/getsourcetempo.go | 44 +-
.../operations/getsourcetheguardianapi.go | 44 +-
.../operations/getsourcetiktokmarketing.go | 44 +-
.../pkg/models/operations/getsourcetodoist.go | 44 +-
.../pkg/models/operations/getsourcetrello.go | 44 +-
.../models/operations/getsourcetrustpilot.go | 44 +-
.../operations/getsourcetvmazeschedule.go | 44 +-
.../pkg/models/operations/getsourcetwilio.go | 44 +-
.../operations/getsourcetwiliotaskrouter.go | 44 +-
.../pkg/models/operations/getsourcetwitter.go | 44 +-
.../models/operations/getsourcetypeform.go | 44 +-
.../models/operations/getsourceuscensus.go | 44 +-
.../pkg/models/operations/getsourcevantage.go | 44 +-
.../pkg/models/operations/getsourcewebflow.go | 44 +-
.../operations/getsourcewhiskyhunter.go | 44 +-
.../operations/getsourcewikipediapageviews.go | 44 +-
.../models/operations/getsourcewoocommerce.go | 44 +-
.../pkg/models/operations/getsourcexero.go | 20 -
.../pkg/models/operations/getsourcexkcd.go | 44 +-
.../operations/getsourceyandexmetrica.go | 44 +-
.../pkg/models/operations/getsourceyotpo.go | 44 +-
.../pkg/models/operations/getsourceyounium.go | 20 -
.../operations/getsourceyoutubeanalytics.go | 44 +-
.../models/operations/getsourcezendeskchat.go | 44 +-
.../models/operations/getsourcezendesksell.go | 58 +
.../operations/getsourcezendesksunshine.go | 44 +-
.../operations/getsourcezendesksupport.go | 44 +-
.../models/operations/getsourcezendesktalk.go | 44 +-
.../pkg/models/operations/getsourcezenloop.go | 44 +-
.../pkg/models/operations/getsourcezohocrm.go | 44 +-
.../pkg/models/operations/getsourcezoom.go | 44 +-
.../pkg/models/operations/getsourcezuora.go | 44 +-
.../models/operations/getstreamproperties.go | 70 +-
.../sdk/pkg/models/operations/getworkspace.go | 42 +-
.../pkg/models/operations/initiateoauth.go | 26 +-
.../pkg/models/operations/listconnections.go | 85 +-
.../pkg/models/operations/listdestinations.go | 83 +-
.../sdk/pkg/models/operations/listjobs.go | 130 +-
.../sdk/pkg/models/operations/listsources.go | 83 +-
.../pkg/models/operations/listworkspaces.go | 81 +-
.../pkg/models/operations/patchconnection.go | 53 +-
.../pkg/models/operations/patchdestination.go | 51 +-
.../sdk/pkg/models/operations/patchsource.go | 51 +-
.../pkg/models/operations/putdestination.go | 51 +-
.../operations/putdestinationawsdatalake.go | 42 +-
.../putdestinationazureblobstorage.go | 42 +-
.../operations/putdestinationbigquery.go | 42 +-
.../putdestinationbigquerydenormalized.go | 19 -
.../operations/putdestinationclickhouse.go | 42 +-
.../models/operations/putdestinationconvex.go | 42 +-
.../operations/putdestinationcumulio.go | 42 +-
.../operations/putdestinationdatabend.go | 42 +-
.../operations/putdestinationdatabricks.go | 42 +-
.../operations/putdestinationdevnull.go | 42 +-
.../models/operations/putdestinationduckdb.go | 57 +
.../operations/putdestinationdynamodb.go | 42 +-
.../operations/putdestinationelasticsearch.go | 42 +-
.../operations/putdestinationfirebolt.go | 42 +-
.../operations/putdestinationfirestore.go | 42 +-
.../models/operations/putdestinationgcs.go | 42 +-
.../operations/putdestinationgooglesheets.go | 42 +-
.../models/operations/putdestinationkeen.go | 42 +-
.../operations/putdestinationkinesis.go | 42 +-
.../operations/putdestinationlangchain.go | 42 +-
.../models/operations/putdestinationmilvus.go | 42 +-
.../operations/putdestinationmongodb.go | 42 +-
.../models/operations/putdestinationmssql.go | 42 +-
.../models/operations/putdestinationmysql.go | 42 +-
.../models/operations/putdestinationoracle.go | 42 +-
.../operations/putdestinationpinecone.go | 42 +-
.../operations/putdestinationpostgres.go | 42 +-
.../models/operations/putdestinationpubsub.go | 42 +-
.../models/operations/putdestinationqdrant.go | 57 +
.../models/operations/putdestinationredis.go | 42 +-
.../operations/putdestinationredshift.go | 42 +-
.../pkg/models/operations/putdestinations3.go | 42 +-
.../models/operations/putdestinations3glue.go | 42 +-
.../operations/putdestinationsftpjson.go | 42 +-
.../operations/putdestinationsnowflake.go | 42 +-
.../operations/putdestinationtimeplus.go | 42 +-
.../operations/putdestinationtypesense.go | 42 +-
.../operations/putdestinationvertica.go | 42 +-
.../operations/putdestinationweaviate.go | 57 +
.../models/operations/putdestinationxata.go | 42 +-
.../sdk/pkg/models/operations/putsource.go | 51 +-
.../sdk/pkg/models/operations/putsourceaha.go | 42 +-
.../pkg/models/operations/putsourceaircall.go | 42 +-
.../models/operations/putsourceairtable.go | 42 +-
.../pkg/models/operations/putsourcealloydb.go | 42 +-
.../models/operations/putsourceamazonads.go | 42 +-
.../putsourceamazonsellerpartner.go | 42 +-
.../models/operations/putsourceamazonsqs.go | 42 +-
.../models/operations/putsourceamplitude.go | 42 +-
.../operations/putsourceapifydataset.go | 42 +-
.../models/operations/putsourceappfollow.go | 42 +-
.../pkg/models/operations/putsourceasana.go | 42 +-
.../pkg/models/operations/putsourceauth0.go | 42 +-
.../operations/putsourceawscloudtrail.go | 42 +-
.../operations/putsourceazureblobstorage.go | 42 +-
.../models/operations/putsourceazuretable.go | 42 +-
.../models/operations/putsourcebamboohr.go | 42 +-
.../models/operations/putsourcebigcommerce.go | 19 -
.../models/operations/putsourcebigquery.go | 42 +-
.../pkg/models/operations/putsourcebingads.go | 42 +-
.../models/operations/putsourcebraintree.go | 42 +-
.../pkg/models/operations/putsourcebraze.go | 42 +-
.../pkg/models/operations/putsourcecart.go | 57 +
.../models/operations/putsourcechargebee.go | 42 +-
.../models/operations/putsourcechartmogul.go | 42 +-
.../models/operations/putsourceclickhouse.go | 42 +-
.../models/operations/putsourceclickupapi.go | 42 +-
.../models/operations/putsourceclockify.go | 42 +-
.../models/operations/putsourceclosecom.go | 42 +-
.../pkg/models/operations/putsourcecoda.go | 42 +-
.../pkg/models/operations/putsourcecoinapi.go | 42 +-
.../operations/putsourcecoinmarketcap.go | 42 +-
.../models/operations/putsourceconfigcat.go | 42 +-
.../models/operations/putsourceconfluence.go | 42 +-
.../pkg/models/operations/putsourceconvex.go | 42 +-
.../models/operations/putsourcedatascope.go | 42 +-
.../models/operations/putsourcedelighted.go | 42 +-
.../pkg/models/operations/putsourcedixa.go | 42 +-
.../models/operations/putsourcedockerhub.go | 42 +-
.../pkg/models/operations/putsourcedremio.go | 42 +-
.../models/operations/putsourcedynamodb.go | 42 +-
.../operations/putsourcee2etestcloud.go | 19 -
.../operations/putsourceemailoctopus.go | 42 +-
.../operations/putsourceexchangerates.go | 42 +-
.../operations/putsourcefacebookmarketing.go | 42 +-
.../operations/putsourcefacebookpages.go | 42 +-
.../pkg/models/operations/putsourcefaker.go | 42 +-
.../pkg/models/operations/putsourcefauna.go | 42 +-
.../pkg/models/operations/putsourcefile.go | 57 +
.../models/operations/putsourcefilesecure.go | 19 -
.../models/operations/putsourcefirebolt.go | 42 +-
.../models/operations/putsourcefreshcaller.go | 42 +-
.../models/operations/putsourcefreshdesk.go | 42 +-
.../models/operations/putsourcefreshsales.go | 42 +-
.../models/operations/putsourcegainsightpx.go | 42 +-
.../sdk/pkg/models/operations/putsourcegcs.go | 42 +-
.../pkg/models/operations/putsourcegetlago.go | 42 +-
.../pkg/models/operations/putsourcegithub.go | 42 +-
.../pkg/models/operations/putsourcegitlab.go | 42 +-
.../models/operations/putsourceglassfrog.go | 42 +-
.../pkg/models/operations/putsourcegnews.go | 42 +-
.../models/operations/putsourcegoogleads.go | 42 +-
.../putsourcegoogleanalyticsdataapi.go | 42 +-
.../operations/putsourcegoogleanalyticsv4.go | 19 -
.../operations/putsourcegoogledirectory.go | 42 +-
.../models/operations/putsourcegoogledrive.go | 57 +
.../putsourcegooglepagespeedinsights.go | 42 +-
.../putsourcegooglesearchconsole.go | 42 +-
.../operations/putsourcegooglesheets.go | 42 +-
.../operations/putsourcegooglewebfonts.go | 42 +-
.../putsourcegoogleworkspaceadminreports.go | 42 +-
.../models/operations/putsourcegreenhouse.go | 42 +-
.../pkg/models/operations/putsourcegridly.go | 42 +-
.../pkg/models/operations/putsourceharvest.go | 42 +-
.../models/operations/putsourcehubplanner.go | 42 +-
.../pkg/models/operations/putsourcehubspot.go | 42 +-
.../models/operations/putsourceinsightly.go | 42 +-
.../models/operations/putsourceinstagram.go | 42 +-
.../models/operations/putsourceinstatus.go | 42 +-
.../models/operations/putsourceintercom.go | 42 +-
.../models/operations/putsourceip2whois.go | 42 +-
.../models/operations/putsourceiterable.go | 42 +-
.../pkg/models/operations/putsourcejira.go | 42 +-
.../pkg/models/operations/putsourcek6cloud.go | 42 +-
.../pkg/models/operations/putsourceklarna.go | 42 +-
.../pkg/models/operations/putsourceklaviyo.go | 42 +-
.../operations/putsourcekustomersinger.go | 42 +-
.../pkg/models/operations/putsourcekyve.go | 42 +-
.../operations/putsourcelaunchdarkly.go | 42 +-
.../pkg/models/operations/putsourcelemlist.go | 42 +-
.../models/operations/putsourceleverhiring.go | 42 +-
.../models/operations/putsourcelinkedinads.go | 42 +-
.../operations/putsourcelinkedinpages.go | 42 +-
.../models/operations/putsourcelinnworks.go | 42 +-
.../models/operations/putsourcelokalise.go | 42 +-
.../models/operations/putsourcemailchimp.go | 42 +-
.../pkg/models/operations/putsourcemailgun.go | 42 +-
.../models/operations/putsourcemailjetsms.go | 42 +-
.../pkg/models/operations/putsourcemarketo.go | 42 +-
.../models/operations/putsourcemetabase.go | 42 +-
.../operations/putsourcemicrosoftteams.go | 42 +-
.../models/operations/putsourcemixpanel.go | 42 +-
.../pkg/models/operations/putsourcemonday.go | 42 +-
.../pkg/models/operations/putsourcemongodb.go | 19 -
.../operations/putsourcemongodbinternalpoc.go | 42 +-
.../models/operations/putsourcemongodbv2.go | 57 +
.../pkg/models/operations/putsourcemssql.go | 42 +-
.../pkg/models/operations/putsourcemyhours.go | 42 +-
.../pkg/models/operations/putsourcemysql.go | 42 +-
.../models/operations/putsourcenetsuite.go | 42 +-
.../pkg/models/operations/putsourcenotion.go | 42 +-
.../pkg/models/operations/putsourcenytimes.go | 42 +-
.../pkg/models/operations/putsourceokta.go | 42 +-
.../models/operations/putsourceomnisend.go | 42 +-
.../models/operations/putsourceonesignal.go | 42 +-
.../pkg/models/operations/putsourceoracle.go | 42 +-
.../sdk/pkg/models/operations/putsourceorb.go | 42 +-
.../pkg/models/operations/putsourceorbit.go | 42 +-
.../operations/putsourceoutbrainamplify.go | 42 +-
.../models/operations/putsourceoutreach.go | 42 +-
.../operations/putsourcepaypaltransaction.go | 42 +-
.../models/operations/putsourcepaystack.go | 42 +-
.../pkg/models/operations/putsourcependo.go | 42 +-
.../models/operations/putsourcepersistiq.go | 42 +-
.../models/operations/putsourcepexelsapi.go | 42 +-
.../models/operations/putsourcepinterest.go | 42 +-
.../models/operations/putsourcepipedrive.go | 42 +-
.../pkg/models/operations/putsourcepocket.go | 42 +-
.../pkg/models/operations/putsourcepokeapi.go | 42 +-
.../operations/putsourcepolygonstockapi.go | 42 +-
.../models/operations/putsourcepostgres.go | 42 +-
.../pkg/models/operations/putsourceposthog.go | 42 +-
.../models/operations/putsourcepostmarkapp.go | 42 +-
.../models/operations/putsourceprestashop.go | 42 +-
.../pkg/models/operations/putsourcepunkapi.go | 42 +-
.../pkg/models/operations/putsourcepypi.go | 42 +-
.../models/operations/putsourcequalaroo.go | 42 +-
.../models/operations/putsourcequickbooks.go | 42 +-
.../pkg/models/operations/putsourcerailz.go | 42 +-
.../models/operations/putsourcerecharge.go | 42 +-
.../models/operations/putsourcerecreation.go | 42 +-
.../models/operations/putsourcerecruitee.go | 42 +-
.../pkg/models/operations/putsourcerecurly.go | 42 +-
.../models/operations/putsourceredshift.go | 42 +-
.../models/operations/putsourceretently.go | 42 +-
.../models/operations/putsourcerkicovid.go | 42 +-
.../sdk/pkg/models/operations/putsourcerss.go | 42 +-
.../sdk/pkg/models/operations/putsources3.go | 42 +-
.../models/operations/putsourcesalesforce.go | 42 +-
.../models/operations/putsourcesalesloft.go | 42 +-
.../operations/putsourcesapfieldglass.go | 42 +-
.../pkg/models/operations/putsourcesecoda.go | 42 +-
.../models/operations/putsourcesendgrid.go | 42 +-
.../models/operations/putsourcesendinblue.go | 42 +-
.../models/operations/putsourcesenseforce.go | 42 +-
.../pkg/models/operations/putsourcesentry.go | 42 +-
.../pkg/models/operations/putsourcesftp.go | 42 +-
.../models/operations/putsourcesftpbulk.go | 42 +-
.../pkg/models/operations/putsourceshopify.go | 42 +-
.../pkg/models/operations/putsourceshortio.go | 42 +-
.../pkg/models/operations/putsourceslack.go | 42 +-
.../pkg/models/operations/putsourcesmaily.go | 42 +-
.../models/operations/putsourcesmartengage.go | 42 +-
.../models/operations/putsourcesmartsheets.go | 42 +-
.../operations/putsourcesnapchatmarketing.go | 42 +-
.../models/operations/putsourcesnowflake.go | 42 +-
.../models/operations/putsourcesonarcloud.go | 42 +-
.../models/operations/putsourcespacexapi.go | 42 +-
.../pkg/models/operations/putsourcesquare.go | 42 +-
.../pkg/models/operations/putsourcestrava.go | 42 +-
.../pkg/models/operations/putsourcestripe.go | 42 +-
.../operations/putsourcesurveymonkey.go | 42 +-
.../operations/putsourcesurveysparrow.go | 42 +-
.../pkg/models/operations/putsourcetempo.go | 42 +-
.../operations/putsourcetheguardianapi.go | 42 +-
.../operations/putsourcetiktokmarketing.go | 42 +-
.../pkg/models/operations/putsourcetodoist.go | 42 +-
.../pkg/models/operations/putsourcetrello.go | 42 +-
.../models/operations/putsourcetrustpilot.go | 42 +-
.../operations/putsourcetvmazeschedule.go | 42 +-
.../pkg/models/operations/putsourcetwilio.go | 42 +-
.../operations/putsourcetwiliotaskrouter.go | 42 +-
.../pkg/models/operations/putsourcetwitter.go | 42 +-
.../models/operations/putsourcetypeform.go | 42 +-
.../models/operations/putsourceuscensus.go | 42 +-
.../pkg/models/operations/putsourcevantage.go | 42 +-
.../pkg/models/operations/putsourcewebflow.go | 42 +-
.../operations/putsourcewhiskyhunter.go | 42 +-
.../operations/putsourcewikipediapageviews.go | 42 +-
.../models/operations/putsourcewoocommerce.go | 42 +-
.../pkg/models/operations/putsourcexero.go | 19 -
.../pkg/models/operations/putsourcexkcd.go | 42 +-
.../operations/putsourceyandexmetrica.go | 42 +-
.../pkg/models/operations/putsourceyotpo.go | 42 +-
.../pkg/models/operations/putsourceyounium.go | 19 -
.../operations/putsourceyoutubeanalytics.go | 42 +-
.../models/operations/putsourcezendeskchat.go | 42 +-
.../models/operations/putsourcezendesksell.go | 57 +
.../operations/putsourcezendesksunshine.go | 42 +-
.../operations/putsourcezendesksupport.go | 42 +-
.../models/operations/putsourcezendesktalk.go | 42 +-
.../pkg/models/operations/putsourcezenloop.go | 42 +-
.../pkg/models/operations/putsourcezohocrm.go | 42 +-
.../pkg/models/operations/putsourcezoom.go | 42 +-
.../pkg/models/operations/putsourcezuora.go | 42 +-
.../pkg/models/operations/updateworkspace.go | 49 +-
internal/sdk/pkg/models/sdkerrors/sdkerror.go | 35 +
.../sdk/pkg/models/shared/actortypeenum.go | 0
.../models/shared/connectioncreaterequest.go | 100 +-
.../models/shared/connectionpatchrequest.go | 80 +-
.../pkg/models/shared/connectionresponse.go | 112 +-
.../pkg/models/shared/connectionschedule.go | 14 +
.../shared/connectionscheduleresponse.go | 21 +
.../pkg/models/shared/connectionsresponse.go | 21 +
.../pkg/models/shared/connectionstatusenum.go | 0
.../models/shared/connectionsyncmodeenum.go | 0
.../models/shared/destinationawsdatalake.go | 450 +-
.../destinationawsdatalakecreaterequest.go | 35 +-
.../destinationawsdatalakeputrequest.go | 21 +
.../shared/destinationawsdatalakeupdate.go | 554 +-
.../shared/destinationazureblobstorage.go | 230 +-
...estinationazureblobstoragecreaterequest.go | 35 +-
.../destinationazureblobstorageputrequest.go | 21 +
.../destinationazureblobstorageupdate.go | 228 +-
.../pkg/models/shared/destinationbigquery.go | 395 +-
.../destinationbigquerycreaterequest.go | 35 +-
.../shared/destinationbigquerydenormalized.go | 433 -
...nationbigquerydenormalizedcreaterequest.go | 9 -
...stinationbigquerydenormalizedputrequest.go | 9 -
.../destinationbigquerydenormalizedupdate.go | 408 -
.../shared/destinationbigqueryputrequest.go | 21 +
.../shared/destinationbigqueryupdate.go | 501 +-
.../models/shared/destinationclickhouse.go | 328 +-
.../destinationclickhousecreaterequest.go | 35 +-
.../shared/destinationclickhouseputrequest.go | 21 +
.../shared/destinationclickhouseupdate.go | 328 +-
.../pkg/models/shared/destinationconvex.go | 46 +-
.../shared/destinationconvexcreaterequest.go | 35 +-
.../shared/destinationconvexputrequest.go | 21 +
.../models/shared/destinationconvexupdate.go | 14 +
.../models/shared/destinationcreaterequest.go | 35 +-
.../pkg/models/shared/destinationcumulio.go | 55 +-
.../shared/destinationcumuliocreaterequest.go | 35 +-
.../shared/destinationcumulioputrequest.go | 21 +
.../models/shared/destinationcumulioupdate.go | 38 +-
.../pkg/models/shared/destinationdatabend.go | 78 +-
.../destinationdatabendcreaterequest.go | 35 +-
.../shared/destinationdatabendputrequest.go | 21 +
.../shared/destinationdatabendupdate.go | 61 +-
.../models/shared/destinationdatabricks.go | 437 +-
.../destinationdatabrickscreaterequest.go | 35 +-
.../shared/destinationdatabricksputrequest.go | 21 +
.../shared/destinationdatabricksupdate.go | 429 +-
.../pkg/models/shared/destinationdevnull.go | 106 +-
.../shared/destinationdevnullcreaterequest.go | 35 +-
.../shared/destinationdevnullputrequest.go | 21 +
.../models/shared/destinationdevnullupdate.go | 85 +-
.../pkg/models/shared/destinationduckdb.go | 79 +
.../shared/destinationduckdbcreaterequest.go | 40 +
.../shared/destinationduckdbputrequest.go | 30 +
.../models/shared/destinationduckdbupdate.go | 33 +
.../pkg/models/shared/destinationdynamodb.go | 71 +-
.../destinationdynamodbcreaterequest.go | 35 +-
.../shared/destinationdynamodbputrequest.go | 21 +
.../shared/destinationdynamodbupdate.go | 115 +-
.../models/shared/destinationelasticsearch.go | 216 +-
.../destinationelasticsearchcreaterequest.go | 35 +-
.../destinationelasticsearchputrequest.go | 21 +
.../shared/destinationelasticsearchupdate.go | 210 +-
.../pkg/models/shared/destinationfirebolt.go | 235 +-
.../destinationfireboltcreaterequest.go | 35 +-
.../shared/destinationfireboltputrequest.go | 21 +
.../shared/destinationfireboltupdate.go | 204 +-
.../pkg/models/shared/destinationfirestore.go | 46 +-
.../destinationfirestorecreaterequest.go | 35 +-
.../shared/destinationfirestoreputrequest.go | 21 +
.../shared/destinationfirestoreupdate.go | 14 +
.../sdk/pkg/models/shared/destinationgcs.go | 1247 +-
.../shared/destinationgcscreaterequest.go | 35 +-
.../models/shared/destinationgcsputrequest.go | 21 +
.../pkg/models/shared/destinationgcsupdate.go | 1315 +-
.../models/shared/destinationgooglesheets.go | 65 +-
.../destinationgooglesheetscreaterequest.go | 35 +-
.../destinationgooglesheetsputrequest.go | 21 +
.../shared/destinationgooglesheetsupdate.go | 41 +-
.../sdk/pkg/models/shared/destinationkeen.go | 55 +-
.../shared/destinationkeencreaterequest.go | 35 +-
.../shared/destinationkeenputrequest.go | 21 +
.../models/shared/destinationkeenupdate.go | 38 +-
.../pkg/models/shared/destinationkinesis.go | 76 +-
.../shared/destinationkinesiscreaterequest.go | 35 +-
.../shared/destinationkinesisputrequest.go | 21 +
.../models/shared/destinationkinesisupdate.go | 61 +-
.../pkg/models/shared/destinationlangchain.go | 448 +-
.../destinationlangchaincreaterequest.go | 35 +-
.../shared/destinationlangchainputrequest.go | 21 +
.../shared/destinationlangchainupdate.go | 455 +-
.../pkg/models/shared/destinationmilvus.go | 1200 +-
.../shared/destinationmilvuscreaterequest.go | 35 +-
.../shared/destinationmilvusputrequest.go | 21 +
.../models/shared/destinationmilvusupdate.go | 1171 +-
.../pkg/models/shared/destinationmongodb.go | 676 +-
.../shared/destinationmongodbcreaterequest.go | 35 +-
.../shared/destinationmongodbputrequest.go | 21 +
.../models/shared/destinationmongodbupdate.go | 671 +-
.../sdk/pkg/models/shared/destinationmssql.go | 476 +-
.../shared/destinationmssqlcreaterequest.go | 35 +-
.../shared/destinationmssqlputrequest.go | 21 +
.../models/shared/destinationmssqlupdate.go | 470 +-
.../sdk/pkg/models/shared/destinationmysql.go | 332 +-
.../shared/destinationmysqlcreaterequest.go | 35 +-
.../shared/destinationmysqlputrequest.go | 21 +
.../models/shared/destinationmysqlupdate.go | 308 +-
.../pkg/models/shared/destinationoracle.go | 339 +-
.../shared/destinationoraclecreaterequest.go | 35 +-
.../shared/destinationoracleputrequest.go | 21 +
.../models/shared/destinationoracleupdate.go | 317 +-
.../models/shared/destinationpatchrequest.go | 14 +
.../pkg/models/shared/destinationpinecone.go | 875 +-
.../destinationpineconecreaterequest.go | 35 +-
.../shared/destinationpineconeputrequest.go | 21 +
.../shared/destinationpineconeupdate.go | 846 +-
.../pkg/models/shared/destinationpostgres.go | 751 +-
.../destinationpostgrescreaterequest.go | 35 +-
.../shared/destinationpostgresputrequest.go | 21 +
.../shared/destinationpostgresupdate.go | 751 +-
.../pkg/models/shared/destinationpubsub.go | 98 +-
.../shared/destinationpubsubcreaterequest.go | 35 +-
.../shared/destinationpubsubputrequest.go | 21 +
.../models/shared/destinationpubsubupdate.go | 81 +-
.../models/shared/destinationputrequest.go | 14 +
.../pkg/models/shared/destinationqdrant.go | 1448 +
.../shared/destinationqdrantcreaterequest.go | 40 +
.../shared/destinationqdrantputrequest.go | 30 +
.../models/shared/destinationqdrantupdate.go | 1408 +
.../sdk/pkg/models/shared/destinationredis.go | 494 +-
.../shared/destinationrediscreaterequest.go | 35 +-
.../shared/destinationredisputrequest.go | 21 +
.../models/shared/destinationredisupdate.go | 486 +-
.../pkg/models/shared/destinationredshift.go | 766 +-
.../destinationredshiftcreaterequest.go | 35 +-
.../shared/destinationredshiftputrequest.go | 21 +
.../shared/destinationredshiftupdate.go | 760 +-
.../pkg/models/shared/destinationresponse.go | 35 +
.../sdk/pkg/models/shared/destinations3.go | 1221 +-
.../shared/destinations3createrequest.go | 35 +-
.../pkg/models/shared/destinations3glue.go | 356 +-
.../shared/destinations3gluecreaterequest.go | 35 +-
.../shared/destinations3glueputrequest.go | 21 +
.../models/shared/destinations3glueupdate.go | 352 +-
.../models/shared/destinations3putrequest.go | 21 +
.../pkg/models/shared/destinations3update.go | 1195 +-
.../pkg/models/shared/destinationsftpjson.go | 67 +-
.../destinationsftpjsoncreaterequest.go | 35 +-
.../shared/destinationsftpjsonputrequest.go | 21 +
.../shared/destinationsftpjsonupdate.go | 52 +-
.../pkg/models/shared/destinationsnowflake.go | 332 +-
.../destinationsnowflakecreaterequest.go | 35 +-
.../shared/destinationsnowflakeputrequest.go | 21 +
.../shared/destinationsnowflakeupdate.go | 330 +-
.../pkg/models/shared/destinationsresponse.go | 21 +
.../pkg/models/shared/destinationtimeplus.go | 48 +-
.../destinationtimepluscreaterequest.go | 35 +-
.../shared/destinationtimeplusputrequest.go | 21 +
.../shared/destinationtimeplusupdate.go | 31 +-
.../pkg/models/shared/destinationtypesense.go | 67 +-
.../destinationtypesensecreaterequest.go | 35 +-
.../shared/destinationtypesenseputrequest.go | 21 +
.../shared/destinationtypesenseupdate.go | 35 +
.../pkg/models/shared/destinationvertica.go | 339 +-
.../shared/destinationverticacreaterequest.go | 35 +-
.../shared/destinationverticaputrequest.go | 21 +
.../models/shared/destinationverticaupdate.go | 315 +-
.../pkg/models/shared/destinationweaviate.go | 1446 +
.../destinationweaviatecreaterequest.go | 40 +
.../shared/destinationweaviateputrequest.go | 30 +
.../shared/destinationweaviateupdate.go | 1406 +
.../sdk/pkg/models/shared/destinationxata.go | 46 +-
.../shared/destinationxatacreaterequest.go | 35 +-
.../shared/destinationxataputrequest.go | 21 +
.../models/shared/destinationxataupdate.go | 14 +
.../sdk/pkg/models/shared/geographyenum.go | 0
.../models/shared/geographyenumnodefault.go | 0
.../pkg/models/shared/initiateoauthrequest.go | 41 +-
.../sdk/pkg/models/shared/jobcreaterequest.go | 14 +
internal/sdk/pkg/models/shared/jobresponse.go | 63 +
.../sdk/pkg/models/shared/jobsresponse.go | 21 +
.../sdk/pkg/models/shared/jobstatusenum.go | 0
internal/sdk/pkg/models/shared/jobtypeenum.go | 0
.../models/shared/namespacedefinitionenum.go | 0
.../namespacedefinitionenumnodefault.go | 0
.../nonbreakingschemaupdatesbehaviorenum.go | 0
...akingschemaupdatesbehaviorenumnodefault.go | 0
.../shared/oauthcredentialsconfiguration.go | 0
.../models/shared/oauthinputconfiguration.go | 0
.../sdk/pkg/models/shared/scheduletypeenum.go | 0
.../shared/scheduletypewithbasicenum.go | 0
.../sdk/pkg/models/shared/schemebasicauth.go | 22 +
internal/sdk/pkg/models/shared/security.go | 19 +-
internal/sdk/pkg/models/shared/sourceaha.go | 46 +-
.../models/shared/sourceahacreaterequest.go | 40 +-
.../pkg/models/shared/sourceahaputrequest.go | 21 +
.../sdk/pkg/models/shared/sourceahaupdate.go | 14 +
.../sdk/pkg/models/shared/sourceaircall.go | 53 +-
.../shared/sourceaircallcreaterequest.go | 40 +-
.../models/shared/sourceaircallputrequest.go | 21 +
.../pkg/models/shared/sourceaircallupdate.go | 33 +
.../sdk/pkg/models/shared/sourceairtable.go | 203 +-
.../shared/sourceairtablecreaterequest.go | 40 +-
.../models/shared/sourceairtableputrequest.go | 21 +
.../pkg/models/shared/sourceairtableupdate.go | 174 +-
.../sdk/pkg/models/shared/sourcealloydb.go | 1240 +-
.../shared/sourcealloydbcreaterequest.go | 40 +-
.../models/shared/sourcealloydbputrequest.go | 21 +
.../pkg/models/shared/sourcealloydbupdate.go | 1234 +-
.../sdk/pkg/models/shared/sourceamazonads.go | 113 +-
.../shared/sourceamazonadscreaterequest.go | 40 +-
.../shared/sourceamazonadsputrequest.go | 21 +
.../models/shared/sourceamazonadsupdate.go | 159 +-
.../shared/sourceamazonsellerpartner.go | 164 +-
.../sourceamazonsellerpartnercreaterequest.go | 40 +-
.../sourceamazonsellerpartnerputrequest.go | 21 +
.../shared/sourceamazonsellerpartnerupdate.go | 216 +-
.../sdk/pkg/models/shared/sourceamazonsqs.go | 97 +-
.../shared/sourceamazonsqscreaterequest.go | 40 +-
.../shared/sourceamazonsqsputrequest.go | 21 +
.../models/shared/sourceamazonsqsupdate.go | 77 +-
.../sdk/pkg/models/shared/sourceamplitude.go | 74 +-
.../shared/sourceamplitudecreaterequest.go | 40 +-
.../shared/sourceamplitudeputrequest.go | 21 +
.../models/shared/sourceamplitudeupdate.go | 70 +-
.../pkg/models/shared/sourceapifydataset.go | 52 +-
.../shared/sourceapifydatasetcreaterequest.go | 40 +-
.../shared/sourceapifydatasetputrequest.go | 21 +
.../models/shared/sourceapifydatasetupdate.go | 22 +-
.../sdk/pkg/models/shared/sourceappfollow.go | 39 +-
.../shared/sourceappfollowcreaterequest.go | 40 +-
.../shared/sourceappfollowputrequest.go | 21 +
.../models/shared/sourceappfollowupdate.go | 7 +
internal/sdk/pkg/models/shared/sourceasana.go | 211 +-
.../models/shared/sourceasanacreaterequest.go | 40 +-
.../models/shared/sourceasanaputrequest.go | 21 +
.../pkg/models/shared/sourceasanaupdate.go | 198 +-
internal/sdk/pkg/models/shared/sourceauth0.go | 205 +-
.../models/shared/sourceauth0createrequest.go | 40 +-
.../models/shared/sourceauth0putrequest.go | 21 +
.../pkg/models/shared/sourceauth0update.go | 187 +-
.../pkg/models/shared/sourceawscloudtrail.go | 64 +-
.../sourceawscloudtrailcreaterequest.go | 40 +-
.../shared/sourceawscloudtrailputrequest.go | 21 +
.../shared/sourceawscloudtrailupdate.go | 44 +-
.../models/shared/sourceazureblobstorage.go | 952 +-
.../sourceazureblobstoragecreaterequest.go | 42 +-
.../sourceazureblobstorageputrequest.go | 23 +
.../shared/sourceazureblobstorageupdate.go | 932 +-
.../sdk/pkg/models/shared/sourceazuretable.go | 53 +-
.../shared/sourceazuretablecreaterequest.go | 40 +-
.../shared/sourceazuretableputrequest.go | 21 +
.../models/shared/sourceazuretableupdate.go | 38 +-
.../sdk/pkg/models/shared/sourcebamboohr.go | 62 +-
.../shared/sourcebamboohrcreaterequest.go | 40 +-
.../models/shared/sourcebamboohrputrequest.go | 21 +
.../pkg/models/shared/sourcebamboohrupdate.go | 47 +-
.../pkg/models/shared/sourcebigcommerce.go | 42 -
.../shared/sourcebigcommercecreaterequest.go | 11 -
.../shared/sourcebigcommerceputrequest.go | 9 -
.../models/shared/sourcebigcommerceupdate.go | 12 -
.../sdk/pkg/models/shared/sourcebigquery.go | 39 +-
.../shared/sourcebigquerycreaterequest.go | 40 +-
.../models/shared/sourcebigqueryputrequest.go | 21 +
.../pkg/models/shared/sourcebigqueryupdate.go | 21 +
.../sdk/pkg/models/shared/sourcebingads.go | 282 +-
.../shared/sourcebingadscreaterequest.go | 40 +-
.../models/shared/sourcebingadsputrequest.go | 21 +
.../pkg/models/shared/sourcebingadsupdate.go | 276 +-
.../sdk/pkg/models/shared/sourcebraintree.go | 67 +-
.../shared/sourcebraintreecreaterequest.go | 40 +-
.../shared/sourcebraintreeputrequest.go | 21 +
.../models/shared/sourcebraintreeupdate.go | 47 +
internal/sdk/pkg/models/shared/sourcebraze.go | 55 +-
.../models/shared/sourcebrazecreaterequest.go | 40 +-
.../models/shared/sourcebrazeputrequest.go | 21 +
.../pkg/models/shared/sourcebrazeupdate.go | 35 +-
internal/sdk/pkg/models/shared/sourcecart.go | 264 +
.../models/shared/sourcecartcreaterequest.go | 49 +
.../pkg/models/shared/sourcecartputrequest.go | 30 +
.../sdk/pkg/models/shared/sourcecartupdate.go | 224 +
.../sdk/pkg/models/shared/sourcechargebee.go | 60 +-
.../shared/sourcechargebeecreaterequest.go | 40 +-
.../shared/sourcechargebeeputrequest.go | 21 +
.../models/shared/sourcechargebeeupdate.go | 58 +-
.../sdk/pkg/models/shared/sourcechartmogul.go | 76 +-
.../shared/sourcechartmogulcreaterequest.go | 40 +-
.../shared/sourcechartmogulputrequest.go | 21 +
.../models/shared/sourcechartmogulupdate.go | 58 +-
.../sdk/pkg/models/shared/sourceclickhouse.go | 311 +-
.../shared/sourceclickhousecreaterequest.go | 40 +-
.../shared/sourceclickhouseputrequest.go | 21 +
.../models/shared/sourceclickhouseupdate.go | 301 +-
.../sdk/pkg/models/shared/sourceclickupapi.go | 76 +-
.../shared/sourceclickupapicreaterequest.go | 40 +-
.../shared/sourceclickupapiputrequest.go | 21 +
.../models/shared/sourceclickupapiupdate.go | 59 +-
.../sdk/pkg/models/shared/sourceclockify.go | 53 +-
.../shared/sourceclockifycreaterequest.go | 40 +-
.../models/shared/sourceclockifyputrequest.go | 21 +
.../pkg/models/shared/sourceclockifyupdate.go | 38 +-
.../sdk/pkg/models/shared/sourceclosecom.go | 50 +-
.../shared/sourceclosecomcreaterequest.go | 40 +-
.../models/shared/sourceclosecomputrequest.go | 21 +
.../pkg/models/shared/sourceclosecomupdate.go | 30 +-
internal/sdk/pkg/models/shared/sourcecoda.go | 39 +-
.../models/shared/sourcecodacreaterequest.go | 40 +-
.../pkg/models/shared/sourcecodaputrequest.go | 21 +
.../sdk/pkg/models/shared/sourcecodaupdate.go | 7 +
.../sdk/pkg/models/shared/sourcecoinapi.go | 85 +-
.../shared/sourcecoinapicreaterequest.go | 40 +-
.../models/shared/sourcecoinapiputrequest.go | 21 +
.../pkg/models/shared/sourcecoinapiupdate.go | 81 +-
.../pkg/models/shared/sourcecoinmarketcap.go | 53 +-
.../sourcecoinmarketcapcreaterequest.go | 40 +-
.../shared/sourcecoinmarketcapputrequest.go | 21 +
.../shared/sourcecoinmarketcapupdate.go | 39 +-
.../sdk/pkg/models/shared/sourceconfigcat.go | 46 +-
.../shared/sourceconfigcatcreaterequest.go | 40 +-
.../shared/sourceconfigcatputrequest.go | 21 +
.../models/shared/sourceconfigcatupdate.go | 14 +
.../sdk/pkg/models/shared/sourceconfluence.go | 53 +-
.../shared/sourceconfluencecreaterequest.go | 40 +-
.../shared/sourceconfluenceputrequest.go | 21 +
.../models/shared/sourceconfluenceupdate.go | 21 +
.../sdk/pkg/models/shared/sourceconvex.go | 32 +-
.../shared/sourceconvexcreaterequest.go | 40 +-
.../models/shared/sourceconvexputrequest.go | 21 +
.../pkg/models/shared/sourceconvexupdate.go | 14 +
.../pkg/models/shared/sourcecreaterequest.go | 40 +-
.../sdk/pkg/models/shared/sourcedatascope.go | 46 +-
.../shared/sourcedatascopecreaterequest.go | 40 +-
.../shared/sourcedatascopeputrequest.go | 21 +
.../models/shared/sourcedatascopeupdate.go | 14 +
.../sdk/pkg/models/shared/sourcedelighted.go | 46 +-
.../shared/sourcedelightedcreaterequest.go | 40 +-
.../shared/sourcedelightedputrequest.go | 21 +
.../models/shared/sourcedelightedupdate.go | 26 +
internal/sdk/pkg/models/shared/sourcedixa.go | 56 +-
.../models/shared/sourcedixacreaterequest.go | 40 +-
.../pkg/models/shared/sourcedixaputrequest.go | 21 +
.../sdk/pkg/models/shared/sourcedixaupdate.go | 41 +-
.../sdk/pkg/models/shared/sourcedockerhub.go | 39 +-
.../shared/sourcedockerhubcreaterequest.go | 40 +-
.../shared/sourcedockerhubputrequest.go | 21 +
.../models/shared/sourcedockerhubupdate.go | 7 +
.../sdk/pkg/models/shared/sourcedremio.go | 46 +-
.../shared/sourcedremiocreaterequest.go | 40 +-
.../models/shared/sourcedremioputrequest.go | 21 +
.../pkg/models/shared/sourcedremioupdate.go | 31 +-
.../sdk/pkg/models/shared/sourcedynamodb.go | 57 +-
.../shared/sourcedynamodbcreaterequest.go | 40 +-
.../models/shared/sourcedynamodbputrequest.go | 21 +
.../pkg/models/shared/sourcedynamodbupdate.go | 51 +-
.../pkg/models/shared/sourcee2etestcloud.go | 204 -
.../shared/sourcee2etestcloudcreaterequest.go | 11 -
.../shared/sourcee2etestcloudputrequest.go | 9 -
.../models/shared/sourcee2etestcloudupdate.go | 179 -
.../pkg/models/shared/sourceemailoctopus.go | 39 +-
.../shared/sourceemailoctopuscreaterequest.go | 40 +-
.../shared/sourceemailoctopusputrequest.go | 21 +
.../models/shared/sourceemailoctopusupdate.go | 7 +
.../pkg/models/shared/sourceexchangerates.go | 62 +-
.../sourceexchangeratescreaterequest.go | 40 +-
.../shared/sourceexchangeratesputrequest.go | 21 +
.../shared/sourceexchangeratesupdate.go | 44 +-
.../models/shared/sourcefacebookmarketing.go | 649 +-
.../sourcefacebookmarketingcreaterequest.go | 40 +-
.../sourcefacebookmarketingputrequest.go | 21 +
.../shared/sourcefacebookmarketingupdate.go | 637 +-
.../pkg/models/shared/sourcefacebookpages.go | 46 +-
.../sourcefacebookpagescreaterequest.go | 40 +-
.../shared/sourcefacebookpagesputrequest.go | 21 +
.../shared/sourcefacebookpagesupdate.go | 14 +
internal/sdk/pkg/models/shared/sourcefaker.go | 75 +-
.../models/shared/sourcefakercreaterequest.go | 40 +-
.../models/shared/sourcefakerputrequest.go | 21 +
.../pkg/models/shared/sourcefakerupdate.go | 60 +-
internal/sdk/pkg/models/shared/sourcefauna.go | 247 +-
.../models/shared/sourcefaunacreaterequest.go | 40 +-
.../models/shared/sourcefaunaputrequest.go | 21 +
.../pkg/models/shared/sourcefaunaupdate.go | 233 +-
internal/sdk/pkg/models/shared/sourcefile.go | 784 +
.../models/shared/sourcefilecreaterequest.go | 49 +
.../pkg/models/shared/sourcefileputrequest.go | 30 +
.../sdk/pkg/models/shared/sourcefilesecure.go | 511 -
.../shared/sourcefilesecurecreaterequest.go | 11 -
.../shared/sourcefilesecureputrequest.go | 9 -
.../models/shared/sourcefilesecureupdate.go | 486 -
.../sdk/pkg/models/shared/sourcefileupdate.go | 755 +
.../sdk/pkg/models/shared/sourcefirebolt.go | 60 +-
.../shared/sourcefireboltcreaterequest.go | 40 +-
.../models/shared/sourcefireboltputrequest.go | 21 +
.../pkg/models/shared/sourcefireboltupdate.go | 42 +
.../pkg/models/shared/sourcefreshcaller.go | 69 +-
.../shared/sourcefreshcallercreaterequest.go | 40 +-
.../shared/sourcefreshcallerputrequest.go | 21 +
.../models/shared/sourcefreshcallerupdate.go | 49 +-
.../sdk/pkg/models/shared/sourcefreshdesk.go | 60 +-
.../shared/sourcefreshdeskcreaterequest.go | 40 +-
.../shared/sourcefreshdeskputrequest.go | 21 +
.../models/shared/sourcefreshdeskupdate.go | 40 +
.../sdk/pkg/models/shared/sourcefreshsales.go | 46 +-
.../shared/sourcefreshsalescreaterequest.go | 40 +-
.../shared/sourcefreshsalesputrequest.go | 21 +
.../models/shared/sourcefreshsalesupdate.go | 14 +
.../pkg/models/shared/sourcegainsightpx.go | 39 +-
.../shared/sourcegainsightpxcreaterequest.go | 40 +-
.../shared/sourcegainsightpxputrequest.go | 21 +
.../models/shared/sourcegainsightpxupdate.go | 7 +
internal/sdk/pkg/models/shared/sourcegcs.go | 643 +-
.../models/shared/sourcegcscreaterequest.go | 43 +-
.../pkg/models/shared/sourcegcsputrequest.go | 24 +
.../sdk/pkg/models/shared/sourcegcsupdate.go | 642 +-
.../sdk/pkg/models/shared/sourcegetlago.go | 48 +-
.../shared/sourcegetlagocreaterequest.go | 40 +-
.../models/shared/sourcegetlagoputrequest.go | 21 +
.../pkg/models/shared/sourcegetlagoupdate.go | 31 +
.../sdk/pkg/models/shared/sourcegithub.go | 260 +-
.../shared/sourcegithubcreaterequest.go | 40 +-
.../models/shared/sourcegithubputrequest.go | 21 +
.../pkg/models/shared/sourcegithubupdate.go | 240 +-
.../sdk/pkg/models/shared/sourcegitlab.go | 259 +-
.../shared/sourcegitlabcreaterequest.go | 40 +-
.../models/shared/sourcegitlabputrequest.go | 21 +
.../pkg/models/shared/sourcegitlabupdate.go | 241 +-
.../sdk/pkg/models/shared/sourceglassfrog.go | 39 +-
.../shared/sourceglassfrogcreaterequest.go | 40 +-
.../shared/sourceglassfrogputrequest.go | 21 +
.../models/shared/sourceglassfrogupdate.go | 7 +
internal/sdk/pkg/models/shared/sourcegnews.go | 107 +-
.../models/shared/sourcegnewscreaterequest.go | 40 +-
.../models/shared/sourcegnewsputrequest.go | 21 +
.../pkg/models/shared/sourcegnewsupdate.go | 293 +-
.../sdk/pkg/models/shared/sourcegoogleads.go | 134 +-
.../shared/sourcegoogleadscreaterequest.go | 40 +-
.../shared/sourcegoogleadsputrequest.go | 21 +
.../models/shared/sourcegoogleadsupdate.go | 122 +-
.../shared/sourcegoogleanalyticsdataapi.go | 8470 ++++-
...urcegoogleanalyticsdataapicreaterequest.go | 40 +-
.../sourcegoogleanalyticsdataapiputrequest.go | 21 +
.../sourcegoogleanalyticsdataapiupdate.go | 8464 ++++-
.../models/shared/sourcegoogleanalyticsv4.go | 185 -
.../sourcegoogleanalyticsv4createrequest.go | 11 -
.../sourcegoogleanalyticsv4putrequest.go | 9 -
.../shared/sourcegoogleanalyticsv4update.go | 160 -
.../models/shared/sourcegoogledirectory.go | 198 +-
.../sourcegoogledirectorycreaterequest.go | 40 +-
.../shared/sourcegoogledirectoryputrequest.go | 21 +
.../shared/sourcegoogledirectoryupdate.go | 171 +-
.../pkg/models/shared/sourcegoogledrive.go | 1106 +
.../shared/sourcegoogledrivecreaterequest.go | 51 +
.../shared/sourcegoogledriveputrequest.go | 32 +
.../models/shared/sourcegoogledriveupdate.go | 1077 +
.../shared/sourcegooglepagespeedinsights.go | 60 +-
...rcegooglepagespeedinsightscreaterequest.go | 40 +-
...sourcegooglepagespeedinsightsputrequest.go | 21 +
.../sourcegooglepagespeedinsightsupdate.go | 66 +-
.../shared/sourcegooglesearchconsole.go | 289 +-
.../sourcegooglesearchconsolecreaterequest.go | 40 +-
.../sourcegooglesearchconsoleputrequest.go | 21 +
.../shared/sourcegooglesearchconsoleupdate.go | 299 +-
.../pkg/models/shared/sourcegooglesheets.go | 189 +-
.../shared/sourcegooglesheetscreaterequest.go | 40 +-
.../shared/sourcegooglesheetsputrequest.go | 21 +
.../models/shared/sourcegooglesheetsupdate.go | 183 +-
.../pkg/models/shared/sourcegooglewebfonts.go | 60 +-
.../sourcegooglewebfontscreaterequest.go | 40 +-
.../shared/sourcegooglewebfontsputrequest.go | 21 +
.../shared/sourcegooglewebfontsupdate.go | 28 +
.../sourcegoogleworkspaceadminreports.go | 53 +-
...oogleworkspaceadminreportscreaterequest.go | 40 +-
...cegoogleworkspaceadminreportsputrequest.go | 21 +
...sourcegoogleworkspaceadminreportsupdate.go | 21 +
.../sdk/pkg/models/shared/sourcegreenhouse.go | 39 +-
.../shared/sourcegreenhousecreaterequest.go | 40 +-
.../shared/sourcegreenhouseputrequest.go | 21 +
.../models/shared/sourcegreenhouseupdate.go | 7 +
.../sdk/pkg/models/shared/sourcegridly.go | 46 +-
.../shared/sourcegridlycreaterequest.go | 40 +-
.../models/shared/sourcegridlyputrequest.go | 21 +
.../pkg/models/shared/sourcegridlyupdate.go | 14 +
.../sdk/pkg/models/shared/sourceharvest.go | 270 +-
.../shared/sourceharvestcreaterequest.go | 40 +-
.../models/shared/sourceharvestputrequest.go | 21 +
.../pkg/models/shared/sourceharvestupdate.go | 250 +-
.../sdk/pkg/models/shared/sourcehubplanner.go | 39 +-
.../shared/sourcehubplannercreaterequest.go | 40 +-
.../shared/sourcehubplannerputrequest.go | 21 +
.../models/shared/sourcehubplannerupdate.go | 7 +
.../sdk/pkg/models/shared/sourcehubspot.go | 200 +-
.../shared/sourcehubspotcreaterequest.go | 40 +-
.../models/shared/sourcehubspotputrequest.go | 21 +
.../pkg/models/shared/sourcehubspotupdate.go | 182 +-
.../sdk/pkg/models/shared/sourceinsightly.go | 44 +-
.../shared/sourceinsightlycreaterequest.go | 40 +-
.../shared/sourceinsightlyputrequest.go | 21 +
.../models/shared/sourceinsightlyupdate.go | 14 +
.../sdk/pkg/models/shared/sourceinstagram.go | 64 +-
.../shared/sourceinstagramcreaterequest.go | 40 +-
.../shared/sourceinstagramputrequest.go | 21 +
.../models/shared/sourceinstagramupdate.go | 44 +-
.../sdk/pkg/models/shared/sourceinstatus.go | 39 +-
.../shared/sourceinstatuscreaterequest.go | 40 +-
.../models/shared/sourceinstatusputrequest.go | 21 +
.../pkg/models/shared/sourceinstatusupdate.go | 7 +
.../sdk/pkg/models/shared/sourceintercom.go | 60 +-
.../shared/sourceintercomcreaterequest.go | 40 +-
.../models/shared/sourceintercomputrequest.go | 21 +
.../pkg/models/shared/sourceintercomupdate.go | 40 +
.../sdk/pkg/models/shared/sourceip2whois.go | 46 +-
.../shared/sourceip2whoiscreaterequest.go | 40 +-
.../models/shared/sourceip2whoisputrequest.go | 21 +
.../pkg/models/shared/sourceip2whoisupdate.go | 14 +
.../sdk/pkg/models/shared/sourceiterable.go | 46 +-
.../shared/sourceiterablecreaterequest.go | 40 +-
.../models/shared/sourceiterableputrequest.go | 21 +
.../pkg/models/shared/sourceiterableupdate.go | 26 +
internal/sdk/pkg/models/shared/sourcejira.go | 144 +-
.../models/shared/sourcejiracreaterequest.go | 40 +-
.../pkg/models/shared/sourcejiraputrequest.go | 21 +
.../sdk/pkg/models/shared/sourcejiraupdate.go | 128 +-
.../sdk/pkg/models/shared/sourcek6cloud.go | 39 +-
.../shared/sourcek6cloudcreaterequest.go | 40 +-
.../models/shared/sourcek6cloudputrequest.go | 21 +
.../pkg/models/shared/sourcek6cloudupdate.go | 7 +
.../sdk/pkg/models/shared/sourceklarna.go | 60 +-
.../shared/sourceklarnacreaterequest.go | 40 +-
.../models/shared/sourceklarnaputrequest.go | 21 +
.../pkg/models/shared/sourceklarnaupdate.go | 42 +-
.../sdk/pkg/models/shared/sourceklaviyo.go | 50 +-
.../shared/sourceklaviyocreaterequest.go | 40 +-
.../models/shared/sourceklaviyoputrequest.go | 21 +
.../pkg/models/shared/sourceklaviyoupdate.go | 30 +-
.../pkg/models/shared/sourcekustomersinger.go | 46 +-
.../sourcekustomersingercreaterequest.go | 40 +-
.../shared/sourcekustomersingerputrequest.go | 21 +
.../shared/sourcekustomersingerupdate.go | 14 +
internal/sdk/pkg/models/shared/sourcekyve.go | 71 +-
.../models/shared/sourcekyvecreaterequest.go | 40 +-
.../pkg/models/shared/sourcekyveputrequest.go | 21 +
.../sdk/pkg/models/shared/sourcekyveupdate.go | 54 +-
.../pkg/models/shared/sourcelaunchdarkly.go | 39 +-
.../shared/sourcelaunchdarklycreaterequest.go | 40 +-
.../shared/sourcelaunchdarklyputrequest.go | 21 +
.../models/shared/sourcelaunchdarklyupdate.go | 7 +
.../sdk/pkg/models/shared/sourcelemlist.go | 39 +-
.../shared/sourcelemlistcreaterequest.go | 40 +-
.../models/shared/sourcelemlistputrequest.go | 21 +
.../pkg/models/shared/sourcelemlistupdate.go | 7 +
.../pkg/models/shared/sourceleverhiring.go | 203 +-
.../shared/sourceleverhiringcreaterequest.go | 40 +-
.../shared/sourceleverhiringputrequest.go | 21 +
.../models/shared/sourceleverhiringupdate.go | 189 +-
.../pkg/models/shared/sourcelinkedinads.go | 309 +-
.../shared/sourcelinkedinadscreaterequest.go | 40 +-
.../shared/sourcelinkedinadsputrequest.go | 21 +
.../models/shared/sourcelinkedinadsupdate.go | 301 +-
.../pkg/models/shared/sourcelinkedinpages.go | 196 +-
.../sourcelinkedinpagescreaterequest.go | 40 +-
.../shared/sourcelinkedinpagesputrequest.go | 21 +
.../shared/sourcelinkedinpagesupdate.go | 165 +-
.../sdk/pkg/models/shared/sourcelinnworks.go | 60 +-
.../shared/sourcelinnworkscreaterequest.go | 40 +-
.../shared/sourcelinnworksputrequest.go | 21 +
.../models/shared/sourcelinnworksupdate.go | 40 +
.../sdk/pkg/models/shared/sourcelokalise.go | 46 +-
.../shared/sourcelokalisecreaterequest.go | 40 +-
.../models/shared/sourcelokaliseputrequest.go | 21 +
.../pkg/models/shared/sourcelokaliseupdate.go | 14 +
.../sdk/pkg/models/shared/sourcemailchimp.go | 196 +-
.../shared/sourcemailchimpcreaterequest.go | 40 +-
.../shared/sourcemailchimpputrequest.go | 21 +
.../models/shared/sourcemailchimpupdate.go | 167 +-
.../sdk/pkg/models/shared/sourcemailgun.go | 55 +-
.../shared/sourcemailguncreaterequest.go | 40 +-
.../models/shared/sourcemailgunputrequest.go | 21 +
.../pkg/models/shared/sourcemailgunupdate.go | 35 +-
.../sdk/pkg/models/shared/sourcemailjetsms.go | 53 +-
.../shared/sourcemailjetsmscreaterequest.go | 40 +-
.../shared/sourcemailjetsmsputrequest.go | 21 +
.../models/shared/sourcemailjetsmsupdate.go | 21 +
.../sdk/pkg/models/shared/sourcemarketo.go | 60 +-
.../shared/sourcemarketocreaterequest.go | 40 +-
.../models/shared/sourcemarketoputrequest.go | 21 +
.../pkg/models/shared/sourcemarketoupdate.go | 40 +
.../sdk/pkg/models/shared/sourcemetabase.go | 62 +-
.../shared/sourcemetabasecreaterequest.go | 40 +-
.../models/shared/sourcemetabaseputrequest.go | 21 +
.../pkg/models/shared/sourcemetabaseupdate.go | 28 +
.../pkg/models/shared/sourcemicrosoftteams.go | 215 +-
.../sourcemicrosoftteamscreaterequest.go | 40 +-
.../shared/sourcemicrosoftteamsputrequest.go | 21 +
.../shared/sourcemicrosoftteamsupdate.go | 188 +-
.../sdk/pkg/models/shared/sourcemixpanel.go | 262 +-
.../shared/sourcemixpanelcreaterequest.go | 40 +-
.../models/shared/sourcemixpanelputrequest.go | 21 +
.../pkg/models/shared/sourcemixpanelupdate.go | 256 +-
.../sdk/pkg/models/shared/sourcemonday.go | 198 +-
.../shared/sourcemondaycreaterequest.go | 40 +-
.../models/shared/sourcemondayputrequest.go | 21 +
.../pkg/models/shared/sourcemondayupdate.go | 169 +-
.../sdk/pkg/models/shared/sourcemongodb.go | 284 -
.../shared/sourcemongodbcreaterequest.go | 11 -
.../models/shared/sourcemongodbinternalpoc.go | 69 +-
.../sourcemongodbinternalpoccreaterequest.go | 40 +-
.../sourcemongodbinternalpocputrequest.go | 21 +
.../shared/sourcemongodbinternalpocupdate.go | 52 +-
.../models/shared/sourcemongodbputrequest.go | 9 -
.../pkg/models/shared/sourcemongodbupdate.go | 259 -
.../sdk/pkg/models/shared/sourcemongodbv2.go | 346 +
.../shared/sourcemongodbv2createrequest.go | 49 +
.../shared/sourcemongodbv2putrequest.go | 30 +
.../models/shared/sourcemongodbv2update.go | 317 +
internal/sdk/pkg/models/shared/sourcemssql.go | 645 +-
.../models/shared/sourcemssqlcreaterequest.go | 40 +-
.../models/shared/sourcemssqlputrequest.go | 21 +
.../pkg/models/shared/sourcemssqlupdate.go | 644 +-
.../sdk/pkg/models/shared/sourcemyhours.go | 62 +-
.../shared/sourcemyhourscreaterequest.go | 40 +-
.../models/shared/sourcemyhoursputrequest.go | 21 +
.../pkg/models/shared/sourcemyhoursupdate.go | 45 +-
internal/sdk/pkg/models/shared/sourcemysql.go | 762 +-
.../models/shared/sourcemysqlcreaterequest.go | 40 +-
.../models/shared/sourcemysqlputrequest.go | 21 +
.../pkg/models/shared/sourcemysqlupdate.go | 756 +-
.../sdk/pkg/models/shared/sourcenetsuite.go | 90 +-
.../shared/sourcenetsuitecreaterequest.go | 40 +-
.../models/shared/sourcenetsuiteputrequest.go | 21 +
.../pkg/models/shared/sourcenetsuiteupdate.go | 73 +-
.../sdk/pkg/models/shared/sourcenotion.go | 226 +-
.../shared/sourcenotioncreaterequest.go | 40 +-
.../models/shared/sourcenotionputrequest.go | 21 +
.../pkg/models/shared/sourcenotionupdate.go | 208 +-
.../sdk/pkg/models/shared/sourcenytimes.go | 67 +-
.../shared/sourcenytimescreaterequest.go | 40 +-
.../models/shared/sourcenytimesputrequest.go | 21 +
.../pkg/models/shared/sourcenytimesupdate.go | 85 +-
internal/sdk/pkg/models/shared/sourceokta.go | 203 +-
.../models/shared/sourceoktacreaterequest.go | 40 +-
.../pkg/models/shared/sourceoktaputrequest.go | 21 +
.../sdk/pkg/models/shared/sourceoktaupdate.go | 172 +-
.../sdk/pkg/models/shared/sourceomnisend.go | 39 +-
.../shared/sourceomnisendcreaterequest.go | 40 +-
.../models/shared/sourceomnisendputrequest.go | 21 +
.../pkg/models/shared/sourceomnisendupdate.go | 7 +
.../sdk/pkg/models/shared/sourceonesignal.go | 81 +-
.../shared/sourceonesignalcreaterequest.go | 40 +-
.../shared/sourceonesignalputrequest.go | 21 +
.../models/shared/sourceonesignalupdate.go | 65 +-
.../sdk/pkg/models/shared/sourceoracle.go | 628 +-
.../shared/sourceoraclecreaterequest.go | 40 +-
.../models/shared/sourceoracleputrequest.go | 21 +
.../pkg/models/shared/sourceoracleupdate.go | 652 +-
internal/sdk/pkg/models/shared/sourceorb.go | 83 +-
.../models/shared/sourceorbcreaterequest.go | 40 +-
internal/sdk/pkg/models/shared/sourceorbit.go | 53 +-
.../models/shared/sourceorbitcreaterequest.go | 40 +-
.../models/shared/sourceorbitputrequest.go | 21 +
.../pkg/models/shared/sourceorbitupdate.go | 21 +
.../pkg/models/shared/sourceorbputrequest.go | 21 +
.../sdk/pkg/models/shared/sourceorbupdate.go | 66 +-
.../models/shared/sourceoutbrainamplify.go | 214 +-
.../sourceoutbrainamplifycreaterequest.go | 40 +-
.../shared/sourceoutbrainamplifyputrequest.go | 21 +
.../shared/sourceoutbrainamplifyupdate.go | 223 +-
.../sdk/pkg/models/shared/sourceoutreach.go | 67 +-
.../shared/sourceoutreachcreaterequest.go | 40 +-
.../models/shared/sourceoutreachputrequest.go | 21 +
.../pkg/models/shared/sourceoutreachupdate.go | 35 +
.../pkg/models/shared/sourcepatchrequest.go | 28 +
.../models/shared/sourcepaypaltransaction.go | 80 +-
.../sourcepaypaltransactioncreaterequest.go | 40 +-
.../sourcepaypaltransactionputrequest.go | 21 +
.../shared/sourcepaypaltransactionupdate.go | 60 +-
.../sdk/pkg/models/shared/sourcepaystack.go | 55 +-
.../shared/sourcepaystackcreaterequest.go | 40 +-
.../models/shared/sourcepaystackputrequest.go | 21 +
.../pkg/models/shared/sourcepaystackupdate.go | 35 +-
internal/sdk/pkg/models/shared/sourcependo.go | 39 +-
.../models/shared/sourcependocreaterequest.go | 40 +-
.../models/shared/sourcependoputrequest.go | 21 +
.../pkg/models/shared/sourcependoupdate.go | 7 +
.../sdk/pkg/models/shared/sourcepersistiq.go | 39 +-
.../shared/sourcepersistiqcreaterequest.go | 40 +-
.../shared/sourcepersistiqputrequest.go | 21 +
.../models/shared/sourcepersistiqupdate.go | 7 +
.../sdk/pkg/models/shared/sourcepexelsapi.go | 74 +-
.../shared/sourcepexelsapicreaterequest.go | 40 +-
.../shared/sourcepexelsapiputrequest.go | 21 +
.../models/shared/sourcepexelsapiupdate.go | 42 +
.../sdk/pkg/models/shared/sourcepinterest.go | 906 +-
.../shared/sourcepinterestcreaterequest.go | 40 +-
.../shared/sourcepinterestputrequest.go | 21 +
.../models/shared/sourcepinterestupdate.go | 906 +-
.../sdk/pkg/models/shared/sourcepipedrive.go | 66 +-
.../shared/sourcepipedrivecreaterequest.go | 40 +-
.../shared/sourcepipedriveputrequest.go | 21 +
.../models/shared/sourcepipedriveupdate.go | 49 +-
.../sdk/pkg/models/shared/sourcepocket.go | 109 +-
.../shared/sourcepocketcreaterequest.go | 40 +-
.../models/shared/sourcepocketputrequest.go | 21 +
.../pkg/models/shared/sourcepocketupdate.go | 149 +-
.../sdk/pkg/models/shared/sourcepokeapi.go | 2755 +-
.../shared/sourcepokeapicreaterequest.go | 40 +-
.../models/shared/sourcepokeapiputrequest.go | 21 +
.../pkg/models/shared/sourcepokeapiupdate.go | 2730 +-
.../models/shared/sourcepolygonstockapi.go | 97 +-
.../sourcepolygonstockapicreaterequest.go | 40 +-
.../shared/sourcepolygonstockapiputrequest.go | 21 +
.../shared/sourcepolygonstockapiupdate.go | 77 +-
.../sdk/pkg/models/shared/sourcepostgres.go | 1222 +-
.../shared/sourcepostgrescreaterequest.go | 40 +-
.../models/shared/sourcepostgresputrequest.go | 21 +
.../pkg/models/shared/sourcepostgresupdate.go | 1218 +-
.../sdk/pkg/models/shared/sourceposthog.go | 62 +-
.../shared/sourceposthogcreaterequest.go | 40 +-
.../models/shared/sourceposthogputrequest.go | 21 +
.../pkg/models/shared/sourceposthogupdate.go | 44 +-
.../pkg/models/shared/sourcepostmarkapp.go | 46 +-
.../shared/sourcepostmarkappcreaterequest.go | 40 +-
.../shared/sourcepostmarkappputrequest.go | 21 +
.../models/shared/sourcepostmarkappupdate.go | 14 +
.../sdk/pkg/models/shared/sourceprestashop.go | 55 +-
.../shared/sourceprestashopcreaterequest.go | 40 +-
.../shared/sourceprestashopputrequest.go | 21 +
.../models/shared/sourceprestashopupdate.go | 35 +-
.../sdk/pkg/models/shared/sourcepunkapi.go | 53 +-
.../shared/sourcepunkapicreaterequest.go | 40 +-
.../models/shared/sourcepunkapiputrequest.go | 21 +
.../pkg/models/shared/sourcepunkapiupdate.go | 21 +
.../sdk/pkg/models/shared/sourceputrequest.go | 14 +
internal/sdk/pkg/models/shared/sourcepypi.go | 46 +-
.../models/shared/sourcepypicreaterequest.go | 40 +-
.../pkg/models/shared/sourcepypiputrequest.go | 21 +
.../sdk/pkg/models/shared/sourcepypiupdate.go | 14 +
.../sdk/pkg/models/shared/sourcequalaroo.go | 60 +-
.../shared/sourcequalaroocreaterequest.go | 40 +-
.../models/shared/sourcequalarooputrequest.go | 21 +
.../pkg/models/shared/sourcequalarooupdate.go | 28 +
.../sdk/pkg/models/shared/sourcequickbooks.go | 160 +-
.../shared/sourcequickbookscreaterequest.go | 40 +-
.../shared/sourcequickbooksputrequest.go | 21 +
.../models/shared/sourcequickbooksupdate.go | 142 +-
internal/sdk/pkg/models/shared/sourcerailz.go | 53 +-
.../models/shared/sourcerailzcreaterequest.go | 40 +-
.../models/shared/sourcerailzputrequest.go | 21 +
.../pkg/models/shared/sourcerailzupdate.go | 21 +
.../sdk/pkg/models/shared/sourcerecharge.go | 46 +-
.../shared/sourcerechargecreaterequest.go | 40 +-
.../models/shared/sourcerechargeputrequest.go | 21 +
.../pkg/models/shared/sourcerechargeupdate.go | 26 +
.../sdk/pkg/models/shared/sourcerecreation.go | 48 +-
.../shared/sourcerecreationcreaterequest.go | 40 +-
.../shared/sourcerecreationputrequest.go | 21 +
.../models/shared/sourcerecreationupdate.go | 14 +
.../sdk/pkg/models/shared/sourcerecruitee.go | 46 +-
.../shared/sourcerecruiteecreaterequest.go | 40 +-
.../shared/sourcerecruiteeputrequest.go | 21 +
.../models/shared/sourcerecruiteeupdate.go | 14 +
.../sdk/pkg/models/shared/sourcerecurly.go | 53 +-
.../shared/sourcerecurlycreaterequest.go | 40 +-
.../models/shared/sourcerecurlyputrequest.go | 21 +
.../pkg/models/shared/sourcerecurlyupdate.go | 21 +
.../sdk/pkg/models/shared/sourceredshift.go | 69 +-
.../shared/sourceredshiftcreaterequest.go | 40 +-
.../models/shared/sourceredshiftputrequest.go | 21 +
.../pkg/models/shared/sourceredshiftupdate.go | 66 +-
.../sdk/pkg/models/shared/sourceresponse.go | 35 +
.../sdk/pkg/models/shared/sourceretently.go | 247 +-
.../shared/sourceretentlycreaterequest.go | 40 +-
.../models/shared/sourceretentlyputrequest.go | 21 +
.../pkg/models/shared/sourceretentlyupdate.go | 220 +-
.../sdk/pkg/models/shared/sourcerkicovid.go | 37 +-
.../shared/sourcerkicovidcreaterequest.go | 40 +-
.../models/shared/sourcerkicovidputrequest.go | 21 +
.../pkg/models/shared/sourcerkicovidupdate.go | 7 +
internal/sdk/pkg/models/shared/sourcerss.go | 37 +-
.../models/shared/sourcersscreaterequest.go | 40 +-
.../pkg/models/shared/sourcerssputrequest.go | 21 +
.../sdk/pkg/models/shared/sourcerssupdate.go | 7 +
internal/sdk/pkg/models/shared/sources3.go | 1338 +-
.../models/shared/sources3createrequest.go | 40 +-
.../pkg/models/shared/sources3putrequest.go | 21 +
.../sdk/pkg/models/shared/sources3update.go | 1338 +-
.../sdk/pkg/models/shared/sourcesalesforce.go | 146 +-
.../shared/sourcesalesforcecreaterequest.go | 40 +-
.../shared/sourcesalesforceputrequest.go | 21 +
.../models/shared/sourcesalesforceupdate.go | 142 +-
.../sdk/pkg/models/shared/sourcesalesloft.go | 210 +-
.../shared/sourcesalesloftcreaterequest.go | 40 +-
.../shared/sourcesalesloftputrequest.go | 21 +
.../models/shared/sourcesalesloftupdate.go | 192 +-
.../pkg/models/shared/sourcesapfieldglass.go | 39 +-
.../sourcesapfieldglasscreaterequest.go | 40 +-
.../shared/sourcesapfieldglassputrequest.go | 21 +
.../shared/sourcesapfieldglassupdate.go | 7 +
.../sdk/pkg/models/shared/sourcesecoda.go | 39 +-
.../shared/sourcesecodacreaterequest.go | 40 +-
.../models/shared/sourcesecodaputrequest.go | 21 +
.../pkg/models/shared/sourcesecodaupdate.go | 7 +
.../sdk/pkg/models/shared/sourcesendgrid.go | 46 +-
.../shared/sourcesendgridcreaterequest.go | 40 +-
.../models/shared/sourcesendgridputrequest.go | 21 +
.../pkg/models/shared/sourcesendgridupdate.go | 26 +
.../sdk/pkg/models/shared/sourcesendinblue.go | 39 +-
.../shared/sourcesendinbluecreaterequest.go | 40 +-
.../shared/sourcesendinblueputrequest.go | 21 +
.../models/shared/sourcesendinblueupdate.go | 7 +
.../sdk/pkg/models/shared/sourcesenseforce.go | 69 +-
.../shared/sourcesenseforcecreaterequest.go | 40 +-
.../shared/sourcesenseforceputrequest.go | 21 +
.../models/shared/sourcesenseforceupdate.go | 51 +-
.../sdk/pkg/models/shared/sourcesentry.go | 69 +-
.../shared/sourcesentrycreaterequest.go | 40 +-
.../models/shared/sourcesentryputrequest.go | 21 +
.../pkg/models/shared/sourcesentryupdate.go | 52 +-
internal/sdk/pkg/models/shared/sourcesftp.go | 229 +-
.../sdk/pkg/models/shared/sourcesftpbulk.go | 126 +-
.../shared/sourcesftpbulkcreaterequest.go | 40 +-
.../models/shared/sourcesftpbulkputrequest.go | 21 +
.../pkg/models/shared/sourcesftpbulkupdate.go | 124 +-
.../models/shared/sourcesftpcreaterequest.go | 40 +-
.../pkg/models/shared/sourcesftpputrequest.go | 21 +
.../sdk/pkg/models/shared/sourcesftpupdate.go | 211 +-
.../sdk/pkg/models/shared/sourceshopify.go | 213 +-
.../shared/sourceshopifycreaterequest.go | 40 +-
.../models/shared/sourceshopifyputrequest.go | 21 +
.../pkg/models/shared/sourceshopifyupdate.go | 207 +-
.../sdk/pkg/models/shared/sourceshortio.go | 53 +-
.../shared/sourceshortiocreaterequest.go | 40 +-
.../models/shared/sourceshortioputrequest.go | 21 +
.../pkg/models/shared/sourceshortioupdate.go | 21 +
internal/sdk/pkg/models/shared/sourceslack.go | 225 +-
.../models/shared/sourceslackcreaterequest.go | 40 +-
.../models/shared/sourceslackputrequest.go | 21 +
.../pkg/models/shared/sourceslackupdate.go | 207 +-
.../sdk/pkg/models/shared/sourcesmaily.go | 53 +-
.../shared/sourcesmailycreaterequest.go | 40 +-
.../models/shared/sourcesmailyputrequest.go | 21 +
.../pkg/models/shared/sourcesmailyupdate.go | 21 +
.../pkg/models/shared/sourcesmartengage.go | 39 +-
.../shared/sourcesmartengagecreaterequest.go | 40 +-
.../shared/sourcesmartengageputrequest.go | 21 +
.../models/shared/sourcesmartengageupdate.go | 7 +
.../pkg/models/shared/sourcesmartsheets.go | 226 +-
.../shared/sourcesmartsheetscreaterequest.go | 40 +-
.../shared/sourcesmartsheetsputrequest.go | 21 +
.../models/shared/sourcesmartsheetsupdate.go | 254 +-
.../models/shared/sourcesnapchatmarketing.go | 71 +-
.../sourcesnapchatmarketingcreaterequest.go | 40 +-
.../sourcesnapchatmarketingputrequest.go | 21 +
.../shared/sourcesnapchatmarketingupdate.go | 51 +-
.../sdk/pkg/models/shared/sourcesnowflake.go | 231 +-
.../shared/sourcesnowflakecreaterequest.go | 40 +-
.../shared/sourcesnowflakeputrequest.go | 21 +
.../models/shared/sourcesnowflakeupdate.go | 214 +-
.../sdk/pkg/models/shared/sourcesonarcloud.go | 69 +-
.../shared/sourcesonarcloudcreaterequest.go | 40 +-
.../shared/sourcesonarcloudputrequest.go | 21 +
.../models/shared/sourcesonarcloudupdate.go | 49 +-
.../sdk/pkg/models/shared/sourcespacexapi.go | 48 +-
.../shared/sourcespacexapicreaterequest.go | 40 +-
.../shared/sourcespacexapiputrequest.go | 21 +
.../models/shared/sourcespacexapiupdate.go | 14 +
.../sdk/pkg/models/shared/sourcesquare.go | 220 +-
.../shared/sourcesquarecreaterequest.go | 40 +-
.../models/shared/sourcesquareputrequest.go | 21 +
.../pkg/models/shared/sourcesquareupdate.go | 202 +-
.../sdk/pkg/models/shared/sourcesresponse.go | 21 +
.../sdk/pkg/models/shared/sourcestrava.go | 73 +-
.../shared/sourcestravacreaterequest.go | 40 +-
.../models/shared/sourcestravaputrequest.go | 21 +
.../pkg/models/shared/sourcestravaupdate.go | 53 +-
.../sdk/pkg/models/shared/sourcestripe.go | 91 +-
.../shared/sourcestripecreaterequest.go | 40 +-
.../models/shared/sourcestripeputrequest.go | 21 +
.../pkg/models/shared/sourcestripeupdate.go | 73 +-
.../pkg/models/shared/sourcesurveymonkey.go | 112 +-
.../shared/sourcesurveymonkeycreaterequest.go | 40 +-
.../shared/sourcesurveymonkeyputrequest.go | 21 +
.../models/shared/sourcesurveymonkeyupdate.go | 118 +-
.../pkg/models/shared/sourcesurveysparrow.go | 177 +-
.../sourcesurveysparrowcreaterequest.go | 40 +-
.../shared/sourcesurveysparrowputrequest.go | 21 +
.../shared/sourcesurveysparrowupdate.go | 160 +-
internal/sdk/pkg/models/shared/sourcetempo.go | 39 +-
.../models/shared/sourcetempocreaterequest.go | 40 +-
.../models/shared/sourcetempoputrequest.go | 21 +
.../pkg/models/shared/sourcetempoupdate.go | 7 +
.../pkg/models/shared/sourcetheguardianapi.go | 74 +-
.../sourcetheguardianapicreaterequest.go | 40 +-
.../shared/sourcetheguardianapiputrequest.go | 21 +
.../shared/sourcetheguardianapiupdate.go | 42 +
.../models/shared/sourcetiktokmarketing.go | 243 +-
.../sourcetiktokmarketingcreaterequest.go | 40 +-
.../shared/sourcetiktokmarketingputrequest.go | 21 +
.../shared/sourcetiktokmarketingupdate.go | 225 +-
.../sdk/pkg/models/shared/sourcetodoist.go | 37 +-
.../shared/sourcetodoistcreaterequest.go | 40 +-
.../models/shared/sourcetodoistputrequest.go | 21 +
.../pkg/models/shared/sourcetodoistupdate.go | 7 +
.../sdk/pkg/models/shared/sourcetrello.go | 62 +-
.../shared/sourcetrellocreaterequest.go | 40 +-
.../models/shared/sourcetrelloputrequest.go | 21 +
.../pkg/models/shared/sourcetrelloupdate.go | 42 +-
.../sdk/pkg/models/shared/sourcetrustpilot.go | 217 +-
.../shared/sourcetrustpilotcreaterequest.go | 40 +-
.../shared/sourcetrustpilotputrequest.go | 21 +
.../models/shared/sourcetrustpilotupdate.go | 188 +-
.../pkg/models/shared/sourcetvmazeschedule.go | 60 +-
.../sourcetvmazeschedulecreaterequest.go | 40 +-
.../shared/sourcetvmazescheduleputrequest.go | 21 +
.../shared/sourcetvmazescheduleupdate.go | 28 +
.../sdk/pkg/models/shared/sourcetwilio.go | 60 +-
.../shared/sourcetwiliocreaterequest.go | 40 +-
.../models/shared/sourcetwilioputrequest.go | 21 +
.../models/shared/sourcetwiliotaskrouter.go | 46 +-
.../sourcetwiliotaskroutercreaterequest.go | 40 +-
.../sourcetwiliotaskrouterputrequest.go | 21 +
.../shared/sourcetwiliotaskrouterupdate.go | 14 +
.../pkg/models/shared/sourcetwilioupdate.go | 42 +-
.../sdk/pkg/models/shared/sourcetwitter.go | 60 +-
.../shared/sourcetwittercreaterequest.go | 40 +-
.../models/shared/sourcetwitterputrequest.go | 21 +
.../pkg/models/shared/sourcetwitterupdate.go | 40 +
.../sdk/pkg/models/shared/sourcetypeform.go | 219 +-
.../shared/sourcetypeformcreaterequest.go | 40 +-
.../models/shared/sourcetypeformputrequest.go | 21 +
.../pkg/models/shared/sourcetypeformupdate.go | 199 +-
.../sdk/pkg/models/shared/sourceuscensus.go | 53 +-
.../shared/sourceuscensuscreaterequest.go | 40 +-
.../models/shared/sourceuscensusputrequest.go | 21 +
.../pkg/models/shared/sourceuscensusupdate.go | 21 +
.../sdk/pkg/models/shared/sourcevantage.go | 39 +-
.../shared/sourcevantagecreaterequest.go | 40 +-
.../models/shared/sourcevantageputrequest.go | 21 +
.../pkg/models/shared/sourcevantageupdate.go | 7 +
.../sdk/pkg/models/shared/sourcewebflow.go | 46 +-
.../shared/sourcewebflowcreaterequest.go | 40 +-
.../models/shared/sourcewebflowputrequest.go | 21 +
.../pkg/models/shared/sourcewebflowupdate.go | 14 +
.../pkg/models/shared/sourcewhiskyhunter.go | 30 +-
.../shared/sourcewhiskyhuntercreaterequest.go | 40 +-
.../shared/sourcewhiskyhunterputrequest.go | 21 +
.../models/shared/sourcewhiskyhunterupdate.go | 0
.../models/shared/sourcewikipediapageviews.go | 81 +-
.../sourcewikipediapageviewscreaterequest.go | 40 +-
.../sourcewikipediapageviewsputrequest.go | 21 +
.../shared/sourcewikipediapageviewsupdate.go | 49 +
.../pkg/models/shared/sourcewoocommerce.go | 62 +-
.../shared/sourcewoocommercecreaterequest.go | 40 +-
.../shared/sourcewoocommerceputrequest.go | 21 +
.../models/shared/sourcewoocommerceupdate.go | 42 +-
internal/sdk/pkg/models/shared/sourcexero.go | 55 -
.../models/shared/sourcexerocreaterequest.go | 11 -
.../pkg/models/shared/sourcexeroputrequest.go | 9 -
.../sdk/pkg/models/shared/sourcexeroupdate.go | 28 -
internal/sdk/pkg/models/shared/sourcexkcd.go | 30 +-
.../models/shared/sourcexkcdcreaterequest.go | 40 +-
.../pkg/models/shared/sourcexkcdputrequest.go | 21 +
.../sdk/pkg/models/shared/sourcexkcdupdate.go | 0
.../pkg/models/shared/sourceyandexmetrica.go | 62 +-
.../sourceyandexmetricacreaterequest.go | 40 +-
.../shared/sourceyandexmetricaputrequest.go | 21 +
.../shared/sourceyandexmetricaupdate.go | 42 +-
internal/sdk/pkg/models/shared/sourceyotpo.go | 60 +-
.../models/shared/sourceyotpocreaterequest.go | 40 +-
.../models/shared/sourceyotpoputrequest.go | 21 +
.../pkg/models/shared/sourceyotpoupdate.go | 42 +-
.../sdk/pkg/models/shared/sourceyounium.go | 44 -
.../shared/sourceyouniumcreaterequest.go | 11 -
.../models/shared/sourceyouniumputrequest.go | 9 -
.../pkg/models/shared/sourceyouniumupdate.go | 14 -
.../models/shared/sourceyoutubeanalytics.go | 95 +-
.../sourceyoutubeanalyticscreaterequest.go | 40 +-
.../sourceyoutubeanalyticsputrequest.go | 21 +
.../shared/sourceyoutubeanalyticsupdate.go | 71 +-
.../pkg/models/shared/sourcezendeskchat.go | 212 +-
.../shared/sourcezendeskchatcreaterequest.go | 40 +-
.../shared/sourcezendeskchatputrequest.go | 21 +
.../models/shared/sourcezendeskchatupdate.go | 194 +-
.../pkg/models/shared/sourcezendesksell.go | 61 +
.../shared/sourcezendesksellcreaterequest.go | 49 +
.../shared/sourcezendesksellputrequest.go | 30 +
.../models/shared/sourcezendesksellupdate.go | 15 +
.../models/shared/sourcezendesksunshine.go | 210 +-
.../sourcezendesksunshinecreaterequest.go | 40 +-
.../shared/sourcezendesksunshineputrequest.go | 21 +
.../shared/sourcezendesksunshineupdate.go | 192 +-
.../pkg/models/shared/sourcezendesksupport.go | 276 +-
.../sourcezendesksupportcreaterequest.go | 40 +-
.../shared/sourcezendesksupportputrequest.go | 21 +
.../shared/sourcezendesksupportupdate.go | 258 +-
.../pkg/models/shared/sourcezendesktalk.go | 269 +-
.../shared/sourcezendesktalkcreaterequest.go | 40 +-
.../shared/sourcezendesktalkputrequest.go | 21 +
.../models/shared/sourcezendesktalkupdate.go | 251 +-
.../sdk/pkg/models/shared/sourcezenloop.go | 60 +-
.../shared/sourcezenloopcreaterequest.go | 40 +-
.../models/shared/sourcezenloopputrequest.go | 21 +
.../pkg/models/shared/sourcezenloopupdate.go | 28 +
.../sdk/pkg/models/shared/sourcezohocrm.go | 83 +-
.../shared/sourcezohocrmcreaterequest.go | 40 +-
.../models/shared/sourcezohocrmputrequest.go | 21 +
.../pkg/models/shared/sourcezohocrmupdate.go | 111 +-
internal/sdk/pkg/models/shared/sourcezoom.go | 39 +-
.../models/shared/sourcezoomcreaterequest.go | 40 +-
.../pkg/models/shared/sourcezoomputrequest.go | 21 +
.../sdk/pkg/models/shared/sourcezoomupdate.go | 7 +
internal/sdk/pkg/models/shared/sourcezuora.go | 76 +-
.../models/shared/sourcezuoracreaterequest.go | 40 +-
.../models/shared/sourcezuoraputrequest.go | 21 +
.../pkg/models/shared/sourcezuoraupdate.go | 106 +-
.../pkg/models/shared/streamconfiguration.go | 28 +
.../pkg/models/shared/streamconfigurations.go | 7 +
.../sdk/pkg/models/shared/streamproperties.go | 42 +
.../models/shared/streampropertiesresponse.go | 7 +
.../models/shared/workspacecreaterequest.go | 7 +
.../workspaceoauthcredentialsrequest.go | 21 +
.../pkg/models/shared/workspaceresponse.go | 42 +-
.../pkg/models/shared/workspacesresponse.go | 21 +
.../models/shared/workspaceupdaterequest.go | 7 +
internal/sdk/pkg/types/bigint.go | 41 +-
internal/sdk/pkg/types/date.go | 4 +
internal/sdk/pkg/types/datetime.go | 0
internal/sdk/pkg/types/decimal.go | 20 +
internal/sdk/pkg/types/pointers.go | 10 +
internal/sdk/pkg/utils/contenttype.go | 0
internal/sdk/pkg/utils/form.go | 8 +-
internal/sdk/pkg/utils/headers.go | 0
internal/sdk/pkg/utils/json.go | 579 +
internal/sdk/pkg/utils/pathparams.go | 57 +-
internal/sdk/pkg/utils/queryparams.go | 0
internal/sdk/pkg/utils/requestbody.go | 45 +-
internal/sdk/pkg/utils/retries.go | 0
internal/sdk/pkg/utils/security.go | 108 +-
internal/sdk/pkg/utils/utils.go | 12 +-
internal/sdk/sdk.go | 51 +-
internal/sdk/sources.go | 9272 +++---
internal/sdk/streams.go | 27 +-
internal/sdk/workspaces.go | 83 +-
internal/validators/DateValidator.go | 2 +-
internal/validators/ExactlyOneChild.go | 0
internal/validators/JSONParseValidator.go | 0
internal/validators/RFC3339Validator.go | 10 +-
main.go | 8 +-
terraform-registry-manifest.json | 0
tools/tools.go | 0
4812 files changed, 213192 insertions(+), 118545 deletions(-)
mode change 100755 => 100644 .gitattributes
mode change 100755 => 100644 USAGE.md
delete mode 100644 docs/data-sources/destination_bigquery_denormalized.md
create mode 100644 docs/data-sources/destination_duckdb.md
create mode 100644 docs/data-sources/destination_qdrant.md
create mode 100644 docs/data-sources/destination_weaviate.md
delete mode 100644 docs/data-sources/source_bigcommerce.md
create mode 100644 docs/data-sources/source_cart.md
delete mode 100644 docs/data-sources/source_e2e_test_cloud.md
create mode 100644 docs/data-sources/source_file.md
delete mode 100644 docs/data-sources/source_file_secure.md
delete mode 100644 docs/data-sources/source_google_analytics_v4.md
create mode 100644 docs/data-sources/source_google_drive.md
delete mode 100644 docs/data-sources/source_mongodb.md
create mode 100644 docs/data-sources/source_mongodb_v2.md
delete mode 100644 docs/data-sources/source_xero.md
delete mode 100644 docs/data-sources/source_younium.md
create mode 100644 docs/data-sources/source_zendesk_sell.md
delete mode 100644 docs/resources/destination_bigquery_denormalized.md
create mode 100644 docs/resources/destination_duckdb.md
create mode 100644 docs/resources/destination_qdrant.md
create mode 100644 docs/resources/destination_weaviate.md
delete mode 100644 docs/resources/source_bigcommerce.md
create mode 100644 docs/resources/source_cart.md
delete mode 100644 docs/resources/source_e2e_test_cloud.md
create mode 100644 docs/resources/source_file.md
delete mode 100644 docs/resources/source_file_secure.md
delete mode 100644 docs/resources/source_google_analytics_v4.md
create mode 100644 docs/resources/source_google_drive.md
delete mode 100644 docs/resources/source_mongodb.md
create mode 100644 docs/resources/source_mongodb_v2.md
delete mode 100644 docs/resources/source_xero.md
delete mode 100644 docs/resources/source_younium.md
create mode 100644 docs/resources/source_zendesk_sell.md
mode change 100755 => 100644 examples/README.md
mode change 100755 => 100644 examples/data-sources/airbyte_connection/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_aws_datalake/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_azure_blob_storage/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_bigquery/data-source.tf
delete mode 100755 examples/data-sources/airbyte_destination_bigquery_denormalized/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_clickhouse/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_convex/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_cumulio/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_databend/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_databricks/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_dev_null/data-source.tf
create mode 100644 examples/data-sources/airbyte_destination_duckdb/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_dynamodb/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_elasticsearch/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_firebolt/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_firestore/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_gcs/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_google_sheets/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_keen/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_kinesis/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_langchain/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_milvus/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_mongodb/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_mssql/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_mysql/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_oracle/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_pinecone/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_postgres/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_pubsub/data-source.tf
create mode 100644 examples/data-sources/airbyte_destination_qdrant/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_redis/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_redshift/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_s3/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_s3_glue/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_sftp_json/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_snowflake/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_timeplus/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_typesense/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_vertica/data-source.tf
create mode 100644 examples/data-sources/airbyte_destination_weaviate/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_destination_xata/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_aha/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_aircall/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_airtable/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_alloydb/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_amazon_ads/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_amazon_seller_partner/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_amazon_sqs/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_amplitude/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_apify_dataset/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_appfollow/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_asana/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_auth0/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_aws_cloudtrail/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_azure_blob_storage/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_azure_table/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_bamboo_hr/data-source.tf
delete mode 100755 examples/data-sources/airbyte_source_bigcommerce/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_bigquery/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_bing_ads/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_braintree/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_braze/data-source.tf
create mode 100644 examples/data-sources/airbyte_source_cart/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_chargebee/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_chartmogul/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_clickhouse/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_clickup_api/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_clockify/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_close_com/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_coda/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_coin_api/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_coinmarketcap/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_configcat/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_confluence/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_convex/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_datascope/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_delighted/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_dixa/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_dockerhub/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_dremio/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_dynamodb/data-source.tf
delete mode 100755 examples/data-sources/airbyte_source_e2e_test_cloud/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_emailoctopus/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_exchange_rates/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_facebook_marketing/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_facebook_pages/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_faker/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_fauna/data-source.tf
create mode 100644 examples/data-sources/airbyte_source_file/data-source.tf
delete mode 100755 examples/data-sources/airbyte_source_file_secure/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_firebolt/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_freshcaller/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_freshdesk/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_freshsales/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_gainsight_px/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_gcs/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_getlago/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_github/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_gitlab/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_glassfrog/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_gnews/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_google_ads/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_google_analytics_data_api/data-source.tf
delete mode 100755 examples/data-sources/airbyte_source_google_analytics_v4/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_google_directory/data-source.tf
create mode 100644 examples/data-sources/airbyte_source_google_drive/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_google_pagespeed_insights/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_google_search_console/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_google_sheets/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_google_webfonts/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_google_workspace_admin_reports/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_greenhouse/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_gridly/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_harvest/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_hubplanner/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_hubspot/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_insightly/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_instagram/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_instatus/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_intercom/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_ip2whois/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_iterable/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_jira/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_k6_cloud/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_klarna/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_klaviyo/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_kustomer_singer/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_kyve/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_launchdarkly/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_lemlist/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_lever_hiring/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_linkedin_ads/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_linkedin_pages/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_linnworks/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_lokalise/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_mailchimp/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_mailgun/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_mailjet_sms/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_marketo/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_metabase/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_microsoft_teams/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_mixpanel/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_monday/data-source.tf
delete mode 100755 examples/data-sources/airbyte_source_mongodb/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_mongodb_internal_poc/data-source.tf
create mode 100644 examples/data-sources/airbyte_source_mongodb_v2/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_mssql/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_my_hours/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_mysql/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_netsuite/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_notion/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_nytimes/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_okta/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_omnisend/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_onesignal/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_oracle/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_orb/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_orbit/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_outbrain_amplify/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_outreach/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_paypal_transaction/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_paystack/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_pendo/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_persistiq/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_pexels_api/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_pinterest/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_pipedrive/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_pocket/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_pokeapi/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_polygon_stock_api/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_postgres/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_posthog/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_postmarkapp/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_prestashop/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_punk_api/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_pypi/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_qualaroo/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_quickbooks/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_railz/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_recharge/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_recreation/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_recruitee/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_recurly/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_redshift/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_retently/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_rki_covid/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_rss/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_s3/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_salesforce/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_salesloft/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_sap_fieldglass/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_secoda/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_sendgrid/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_sendinblue/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_senseforce/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_sentry/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_sftp/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_sftp_bulk/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_shopify/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_shortio/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_slack/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_smaily/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_smartengage/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_smartsheets/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_snapchat_marketing/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_snowflake/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_sonar_cloud/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_spacex_api/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_square/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_strava/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_stripe/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_survey_sparrow/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_surveymonkey/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_tempo/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_the_guardian_api/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_tiktok_marketing/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_todoist/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_trello/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_trustpilot/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_tvmaze_schedule/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_twilio/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_twilio_taskrouter/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_twitter/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_typeform/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_us_census/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_vantage/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_webflow/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_whisky_hunter/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_wikipedia_pageviews/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_woocommerce/data-source.tf
delete mode 100755 examples/data-sources/airbyte_source_xero/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_xkcd/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_yandex_metrica/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_yotpo/data-source.tf
delete mode 100755 examples/data-sources/airbyte_source_younium/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_youtube_analytics/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_zendesk_chat/data-source.tf
create mode 100644 examples/data-sources/airbyte_source_zendesk_sell/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_zendesk_sunshine/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_zendesk_support/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_zendesk_talk/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_zenloop/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_zoho_crm/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_zoom/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_source_zuora/data-source.tf
mode change 100755 => 100644 examples/data-sources/airbyte_workspace/data-source.tf
mode change 100755 => 100644 examples/provider/provider.tf
mode change 100755 => 100644 examples/resources/airbyte_connection/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_aws_datalake/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_azure_blob_storage/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_bigquery/resource.tf
delete mode 100755 examples/resources/airbyte_destination_bigquery_denormalized/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_clickhouse/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_convex/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_cumulio/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_databend/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_databricks/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_dev_null/resource.tf
create mode 100644 examples/resources/airbyte_destination_duckdb/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_dynamodb/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_elasticsearch/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_firebolt/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_firestore/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_gcs/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_google_sheets/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_keen/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_kinesis/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_langchain/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_milvus/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_mongodb/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_mssql/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_mysql/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_oracle/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_pinecone/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_postgres/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_pubsub/resource.tf
create mode 100644 examples/resources/airbyte_destination_qdrant/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_redis/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_redshift/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_s3/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_s3_glue/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_sftp_json/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_snowflake/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_timeplus/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_typesense/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_vertica/resource.tf
create mode 100644 examples/resources/airbyte_destination_weaviate/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_destination_xata/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_aha/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_aircall/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_airtable/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_alloydb/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_amazon_ads/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_amazon_seller_partner/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_amazon_sqs/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_amplitude/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_apify_dataset/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_appfollow/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_asana/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_auth0/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_aws_cloudtrail/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_azure_blob_storage/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_azure_table/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_bamboo_hr/resource.tf
delete mode 100755 examples/resources/airbyte_source_bigcommerce/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_bigquery/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_bing_ads/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_braintree/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_braze/resource.tf
create mode 100644 examples/resources/airbyte_source_cart/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_chargebee/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_chartmogul/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_clickhouse/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_clickup_api/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_clockify/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_close_com/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_coda/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_coin_api/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_coinmarketcap/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_configcat/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_confluence/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_convex/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_datascope/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_delighted/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_dixa/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_dockerhub/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_dremio/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_dynamodb/resource.tf
delete mode 100755 examples/resources/airbyte_source_e2e_test_cloud/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_emailoctopus/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_exchange_rates/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_facebook_marketing/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_facebook_pages/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_faker/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_fauna/resource.tf
create mode 100644 examples/resources/airbyte_source_file/resource.tf
delete mode 100755 examples/resources/airbyte_source_file_secure/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_firebolt/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_freshcaller/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_freshdesk/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_freshsales/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_gainsight_px/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_gcs/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_getlago/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_github/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_gitlab/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_glassfrog/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_gnews/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_google_ads/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_google_analytics_data_api/resource.tf
delete mode 100755 examples/resources/airbyte_source_google_analytics_v4/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_google_directory/resource.tf
create mode 100644 examples/resources/airbyte_source_google_drive/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_google_pagespeed_insights/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_google_search_console/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_google_sheets/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_google_webfonts/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_google_workspace_admin_reports/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_greenhouse/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_gridly/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_harvest/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_hubplanner/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_hubspot/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_insightly/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_instagram/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_instatus/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_intercom/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_ip2whois/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_iterable/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_jira/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_k6_cloud/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_klarna/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_klaviyo/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_kustomer_singer/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_kyve/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_launchdarkly/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_lemlist/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_lever_hiring/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_linkedin_ads/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_linkedin_pages/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_linnworks/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_lokalise/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_mailchimp/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_mailgun/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_mailjet_sms/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_marketo/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_metabase/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_microsoft_teams/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_mixpanel/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_monday/resource.tf
delete mode 100755 examples/resources/airbyte_source_mongodb/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_mongodb_internal_poc/resource.tf
create mode 100644 examples/resources/airbyte_source_mongodb_v2/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_mssql/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_my_hours/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_mysql/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_netsuite/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_notion/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_nytimes/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_okta/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_omnisend/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_onesignal/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_oracle/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_orb/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_orbit/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_outbrain_amplify/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_outreach/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_paypal_transaction/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_paystack/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_pendo/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_persistiq/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_pexels_api/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_pinterest/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_pipedrive/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_pocket/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_pokeapi/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_polygon_stock_api/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_postgres/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_posthog/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_postmarkapp/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_prestashop/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_punk_api/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_pypi/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_qualaroo/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_quickbooks/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_railz/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_recharge/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_recreation/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_recruitee/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_recurly/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_redshift/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_retently/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_rki_covid/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_rss/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_s3/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_salesforce/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_salesloft/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_sap_fieldglass/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_secoda/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_sendgrid/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_sendinblue/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_senseforce/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_sentry/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_sftp/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_sftp_bulk/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_shopify/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_shortio/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_slack/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_smaily/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_smartengage/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_smartsheets/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_snapchat_marketing/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_snowflake/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_sonar_cloud/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_spacex_api/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_square/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_strava/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_stripe/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_survey_sparrow/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_surveymonkey/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_tempo/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_the_guardian_api/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_tiktok_marketing/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_todoist/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_trello/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_trustpilot/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_tvmaze_schedule/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_twilio/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_twilio_taskrouter/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_twitter/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_typeform/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_us_census/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_vantage/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_webflow/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_whisky_hunter/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_wikipedia_pageviews/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_woocommerce/resource.tf
delete mode 100755 examples/resources/airbyte_source_xero/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_xkcd/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_yandex_metrica/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_yotpo/resource.tf
delete mode 100755 examples/resources/airbyte_source_younium/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_youtube_analytics/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_zendesk_chat/resource.tf
create mode 100644 examples/resources/airbyte_source_zendesk_sell/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_zendesk_sunshine/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_zendesk_support/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_zendesk_talk/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_zenloop/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_zoho_crm/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_zoom/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_source_zuora/resource.tf
mode change 100755 => 100644 examples/resources/airbyte_workspace/resource.tf
mode change 100755 => 100644 go.mod
mode change 100755 => 100644 go.sum
mode change 100755 => 100644 internal/planmodifiers/boolplanmodifier/suppress_diff.go
mode change 100755 => 100644 internal/planmodifiers/float64planmodifier/suppress_diff.go
mode change 100755 => 100644 internal/planmodifiers/int64planmodifier/suppress_diff.go
mode change 100755 => 100644 internal/planmodifiers/listplanmodifier/suppress_diff.go
mode change 100755 => 100644 internal/planmodifiers/mapplanmodifier/suppress_diff.go
mode change 100755 => 100644 internal/planmodifiers/numberplanmodifier/suppress_diff.go
mode change 100755 => 100644 internal/planmodifiers/objectplanmodifier/suppress_diff.go
mode change 100755 => 100644 internal/planmodifiers/setplanmodifier/suppress_diff.go
mode change 100755 => 100644 internal/planmodifiers/stringplanmodifier/suppress_diff.go
mode change 100755 => 100644 internal/planmodifiers/utils/state_check.go
mode change 100755 => 100644 internal/provider/connection_data_source.go
mode change 100755 => 100644 internal/provider/connection_data_source_sdk.go
mode change 100755 => 100644 internal/provider/connection_resource.go
mode change 100755 => 100644 internal/provider/connection_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_awsdatalake_data_source.go
mode change 100755 => 100644 internal/provider/destination_awsdatalake_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_awsdatalake_resource.go
mode change 100755 => 100644 internal/provider/destination_awsdatalake_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_azureblobstorage_data_source.go
mode change 100755 => 100644 internal/provider/destination_azureblobstorage_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_azureblobstorage_resource.go
mode change 100755 => 100644 internal/provider/destination_azureblobstorage_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_bigquery_data_source.go
mode change 100755 => 100644 internal/provider/destination_bigquery_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_bigquery_resource.go
mode change 100755 => 100644 internal/provider/destination_bigquery_resource_sdk.go
delete mode 100755 internal/provider/destination_bigquerydenormalized_data_source.go
delete mode 100755 internal/provider/destination_bigquerydenormalized_data_source_sdk.go
delete mode 100755 internal/provider/destination_bigquerydenormalized_resource.go
delete mode 100755 internal/provider/destination_bigquerydenormalized_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_clickhouse_data_source.go
mode change 100755 => 100644 internal/provider/destination_clickhouse_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_clickhouse_resource.go
mode change 100755 => 100644 internal/provider/destination_clickhouse_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_convex_data_source.go
mode change 100755 => 100644 internal/provider/destination_convex_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_convex_resource.go
mode change 100755 => 100644 internal/provider/destination_convex_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_cumulio_data_source.go
mode change 100755 => 100644 internal/provider/destination_cumulio_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_cumulio_resource.go
mode change 100755 => 100644 internal/provider/destination_cumulio_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_databend_data_source.go
mode change 100755 => 100644 internal/provider/destination_databend_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_databend_resource.go
mode change 100755 => 100644 internal/provider/destination_databend_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_databricks_data_source.go
mode change 100755 => 100644 internal/provider/destination_databricks_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_databricks_resource.go
mode change 100755 => 100644 internal/provider/destination_databricks_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_devnull_data_source.go
mode change 100755 => 100644 internal/provider/destination_devnull_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_devnull_resource.go
mode change 100755 => 100644 internal/provider/destination_devnull_resource_sdk.go
create mode 100644 internal/provider/destination_duckdb_data_source.go
create mode 100644 internal/provider/destination_duckdb_data_source_sdk.go
rename internal/provider/{source_younium_resource.go => destination_duckdb_resource.go} (56%)
mode change 100755 => 100644
create mode 100644 internal/provider/destination_duckdb_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_dynamodb_data_source.go
mode change 100755 => 100644 internal/provider/destination_dynamodb_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_dynamodb_resource.go
mode change 100755 => 100644 internal/provider/destination_dynamodb_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_elasticsearch_data_source.go
mode change 100755 => 100644 internal/provider/destination_elasticsearch_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_elasticsearch_resource.go
mode change 100755 => 100644 internal/provider/destination_elasticsearch_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_firebolt_data_source.go
mode change 100755 => 100644 internal/provider/destination_firebolt_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_firebolt_resource.go
mode change 100755 => 100644 internal/provider/destination_firebolt_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_firestore_data_source.go
mode change 100755 => 100644 internal/provider/destination_firestore_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_firestore_resource.go
mode change 100755 => 100644 internal/provider/destination_firestore_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_gcs_data_source.go
mode change 100755 => 100644 internal/provider/destination_gcs_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_gcs_resource.go
mode change 100755 => 100644 internal/provider/destination_gcs_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_googlesheets_data_source.go
mode change 100755 => 100644 internal/provider/destination_googlesheets_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_googlesheets_resource.go
mode change 100755 => 100644 internal/provider/destination_googlesheets_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_keen_data_source.go
mode change 100755 => 100644 internal/provider/destination_keen_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_keen_resource.go
mode change 100755 => 100644 internal/provider/destination_keen_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_kinesis_data_source.go
mode change 100755 => 100644 internal/provider/destination_kinesis_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_kinesis_resource.go
mode change 100755 => 100644 internal/provider/destination_kinesis_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_langchain_data_source.go
mode change 100755 => 100644 internal/provider/destination_langchain_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_langchain_resource.go
mode change 100755 => 100644 internal/provider/destination_langchain_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_milvus_data_source.go
mode change 100755 => 100644 internal/provider/destination_milvus_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_milvus_resource.go
mode change 100755 => 100644 internal/provider/destination_milvus_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_mongodb_data_source.go
mode change 100755 => 100644 internal/provider/destination_mongodb_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_mongodb_resource.go
mode change 100755 => 100644 internal/provider/destination_mongodb_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_mssql_data_source.go
mode change 100755 => 100644 internal/provider/destination_mssql_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_mssql_resource.go
mode change 100755 => 100644 internal/provider/destination_mssql_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_mysql_data_source.go
mode change 100755 => 100644 internal/provider/destination_mysql_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_mysql_resource.go
mode change 100755 => 100644 internal/provider/destination_mysql_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_oracle_data_source.go
mode change 100755 => 100644 internal/provider/destination_oracle_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_oracle_resource.go
mode change 100755 => 100644 internal/provider/destination_oracle_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_pinecone_data_source.go
mode change 100755 => 100644 internal/provider/destination_pinecone_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_pinecone_resource.go
mode change 100755 => 100644 internal/provider/destination_pinecone_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_postgres_data_source.go
mode change 100755 => 100644 internal/provider/destination_postgres_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_postgres_resource.go
mode change 100755 => 100644 internal/provider/destination_postgres_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_pubsub_data_source.go
mode change 100755 => 100644 internal/provider/destination_pubsub_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_pubsub_resource.go
mode change 100755 => 100644 internal/provider/destination_pubsub_resource_sdk.go
create mode 100644 internal/provider/destination_qdrant_data_source.go
create mode 100644 internal/provider/destination_qdrant_data_source_sdk.go
create mode 100644 internal/provider/destination_qdrant_resource.go
create mode 100644 internal/provider/destination_qdrant_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_redis_data_source.go
mode change 100755 => 100644 internal/provider/destination_redis_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_redis_resource.go
mode change 100755 => 100644 internal/provider/destination_redis_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_redshift_data_source.go
mode change 100755 => 100644 internal/provider/destination_redshift_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_redshift_resource.go
mode change 100755 => 100644 internal/provider/destination_redshift_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_s3_data_source.go
mode change 100755 => 100644 internal/provider/destination_s3_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_s3_resource.go
mode change 100755 => 100644 internal/provider/destination_s3_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_s3glue_data_source.go
mode change 100755 => 100644 internal/provider/destination_s3glue_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_s3glue_resource.go
mode change 100755 => 100644 internal/provider/destination_s3glue_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_sftpjson_data_source.go
mode change 100755 => 100644 internal/provider/destination_sftpjson_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_sftpjson_resource.go
mode change 100755 => 100644 internal/provider/destination_sftpjson_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_snowflake_data_source.go
mode change 100755 => 100644 internal/provider/destination_snowflake_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_snowflake_resource.go
mode change 100755 => 100644 internal/provider/destination_snowflake_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_timeplus_data_source.go
mode change 100755 => 100644 internal/provider/destination_timeplus_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_timeplus_resource.go
mode change 100755 => 100644 internal/provider/destination_timeplus_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_typesense_data_source.go
mode change 100755 => 100644 internal/provider/destination_typesense_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_typesense_resource.go
mode change 100755 => 100644 internal/provider/destination_typesense_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_vertica_data_source.go
mode change 100755 => 100644 internal/provider/destination_vertica_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_vertica_resource.go
mode change 100755 => 100644 internal/provider/destination_vertica_resource_sdk.go
create mode 100644 internal/provider/destination_weaviate_data_source.go
create mode 100644 internal/provider/destination_weaviate_data_source_sdk.go
create mode 100644 internal/provider/destination_weaviate_resource.go
create mode 100644 internal/provider/destination_weaviate_resource_sdk.go
mode change 100755 => 100644 internal/provider/destination_xata_data_source.go
mode change 100755 => 100644 internal/provider/destination_xata_data_source_sdk.go
mode change 100755 => 100644 internal/provider/destination_xata_resource.go
mode change 100755 => 100644 internal/provider/destination_xata_resource_sdk.go
mode change 100755 => 100644 internal/provider/provider.go
mode change 100755 => 100644 internal/provider/reflect/diags.go
mode change 100755 => 100644 internal/provider/reflect/doc.go
mode change 100755 => 100644 internal/provider/reflect/generic_attr_value.go
mode change 100755 => 100644 internal/provider/reflect/helpers.go
mode change 100755 => 100644 internal/provider/reflect/interfaces.go
mode change 100755 => 100644 internal/provider/reflect/into.go
mode change 100755 => 100644 internal/provider/reflect/map.go
mode change 100755 => 100644 internal/provider/reflect/number.go
mode change 100755 => 100644 internal/provider/reflect/options.go
mode change 100755 => 100644 internal/provider/reflect/outof.go
mode change 100755 => 100644 internal/provider/reflect/pointer.go
mode change 100755 => 100644 internal/provider/reflect/primitive.go
mode change 100755 => 100644 internal/provider/reflect/slice.go
mode change 100755 => 100644 internal/provider/reflect/struct.go
mode change 100755 => 100644 internal/provider/source_aha_data_source.go
mode change 100755 => 100644 internal/provider/source_aha_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_aha_resource.go
mode change 100755 => 100644 internal/provider/source_aha_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_aircall_data_source.go
mode change 100755 => 100644 internal/provider/source_aircall_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_aircall_resource.go
mode change 100755 => 100644 internal/provider/source_aircall_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_airtable_data_source.go
mode change 100755 => 100644 internal/provider/source_airtable_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_airtable_resource.go
mode change 100755 => 100644 internal/provider/source_airtable_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_alloydb_data_source.go
mode change 100755 => 100644 internal/provider/source_alloydb_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_alloydb_resource.go
mode change 100755 => 100644 internal/provider/source_alloydb_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_amazonads_data_source.go
mode change 100755 => 100644 internal/provider/source_amazonads_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_amazonads_resource.go
mode change 100755 => 100644 internal/provider/source_amazonads_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_amazonsellerpartner_data_source.go
mode change 100755 => 100644 internal/provider/source_amazonsellerpartner_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_amazonsellerpartner_resource.go
mode change 100755 => 100644 internal/provider/source_amazonsellerpartner_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_amazonsqs_data_source.go
mode change 100755 => 100644 internal/provider/source_amazonsqs_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_amazonsqs_resource.go
mode change 100755 => 100644 internal/provider/source_amazonsqs_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_amplitude_data_source.go
mode change 100755 => 100644 internal/provider/source_amplitude_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_amplitude_resource.go
mode change 100755 => 100644 internal/provider/source_amplitude_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_apifydataset_data_source.go
mode change 100755 => 100644 internal/provider/source_apifydataset_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_apifydataset_resource.go
mode change 100755 => 100644 internal/provider/source_apifydataset_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_appfollow_data_source.go
mode change 100755 => 100644 internal/provider/source_appfollow_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_appfollow_resource.go
mode change 100755 => 100644 internal/provider/source_appfollow_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_asana_data_source.go
mode change 100755 => 100644 internal/provider/source_asana_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_asana_resource.go
mode change 100755 => 100644 internal/provider/source_asana_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_auth0_data_source.go
mode change 100755 => 100644 internal/provider/source_auth0_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_auth0_resource.go
mode change 100755 => 100644 internal/provider/source_auth0_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_awscloudtrail_data_source.go
mode change 100755 => 100644 internal/provider/source_awscloudtrail_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_awscloudtrail_resource.go
mode change 100755 => 100644 internal/provider/source_awscloudtrail_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_azureblobstorage_data_source.go
mode change 100755 => 100644 internal/provider/source_azureblobstorage_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_azureblobstorage_resource.go
mode change 100755 => 100644 internal/provider/source_azureblobstorage_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_azuretable_data_source.go
mode change 100755 => 100644 internal/provider/source_azuretable_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_azuretable_resource.go
mode change 100755 => 100644 internal/provider/source_azuretable_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_bamboohr_data_source.go
mode change 100755 => 100644 internal/provider/source_bamboohr_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_bamboohr_resource.go
mode change 100755 => 100644 internal/provider/source_bamboohr_resource_sdk.go
delete mode 100755 internal/provider/source_bigcommerce_data_source_sdk.go
delete mode 100755 internal/provider/source_bigcommerce_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_bigquery_data_source.go
mode change 100755 => 100644 internal/provider/source_bigquery_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_bigquery_resource.go
mode change 100755 => 100644 internal/provider/source_bigquery_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_bingads_data_source.go
mode change 100755 => 100644 internal/provider/source_bingads_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_bingads_resource.go
mode change 100755 => 100644 internal/provider/source_bingads_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_braintree_data_source.go
mode change 100755 => 100644 internal/provider/source_braintree_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_braintree_resource.go
mode change 100755 => 100644 internal/provider/source_braintree_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_braze_data_source.go
mode change 100755 => 100644 internal/provider/source_braze_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_braze_resource.go
mode change 100755 => 100644 internal/provider/source_braze_resource_sdk.go
create mode 100644 internal/provider/source_cart_data_source.go
create mode 100644 internal/provider/source_cart_data_source_sdk.go
rename internal/provider/{source_xero_resource.go => source_cart_resource.go} (64%)
mode change 100755 => 100644
create mode 100644 internal/provider/source_cart_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_chargebee_data_source.go
mode change 100755 => 100644 internal/provider/source_chargebee_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_chargebee_resource.go
mode change 100755 => 100644 internal/provider/source_chargebee_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_chartmogul_data_source.go
mode change 100755 => 100644 internal/provider/source_chartmogul_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_chartmogul_resource.go
mode change 100755 => 100644 internal/provider/source_chartmogul_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_clickhouse_data_source.go
mode change 100755 => 100644 internal/provider/source_clickhouse_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_clickhouse_resource.go
mode change 100755 => 100644 internal/provider/source_clickhouse_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_clickupapi_data_source.go
mode change 100755 => 100644 internal/provider/source_clickupapi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_clickupapi_resource.go
mode change 100755 => 100644 internal/provider/source_clickupapi_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_clockify_data_source.go
mode change 100755 => 100644 internal/provider/source_clockify_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_clockify_resource.go
mode change 100755 => 100644 internal/provider/source_clockify_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_closecom_data_source.go
mode change 100755 => 100644 internal/provider/source_closecom_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_closecom_resource.go
mode change 100755 => 100644 internal/provider/source_closecom_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_coda_data_source.go
mode change 100755 => 100644 internal/provider/source_coda_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_coda_resource.go
mode change 100755 => 100644 internal/provider/source_coda_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_coinapi_data_source.go
mode change 100755 => 100644 internal/provider/source_coinapi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_coinapi_resource.go
mode change 100755 => 100644 internal/provider/source_coinapi_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_coinmarketcap_data_source.go
mode change 100755 => 100644 internal/provider/source_coinmarketcap_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_coinmarketcap_resource.go
mode change 100755 => 100644 internal/provider/source_coinmarketcap_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_configcat_data_source.go
mode change 100755 => 100644 internal/provider/source_configcat_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_configcat_resource.go
mode change 100755 => 100644 internal/provider/source_configcat_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_confluence_data_source.go
mode change 100755 => 100644 internal/provider/source_confluence_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_confluence_resource.go
mode change 100755 => 100644 internal/provider/source_confluence_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_convex_data_source.go
mode change 100755 => 100644 internal/provider/source_convex_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_convex_resource.go
mode change 100755 => 100644 internal/provider/source_convex_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_datascope_data_source.go
mode change 100755 => 100644 internal/provider/source_datascope_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_datascope_resource.go
mode change 100755 => 100644 internal/provider/source_datascope_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_delighted_data_source.go
mode change 100755 => 100644 internal/provider/source_delighted_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_delighted_resource.go
mode change 100755 => 100644 internal/provider/source_delighted_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_dixa_data_source.go
mode change 100755 => 100644 internal/provider/source_dixa_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_dixa_resource.go
mode change 100755 => 100644 internal/provider/source_dixa_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_dockerhub_data_source.go
mode change 100755 => 100644 internal/provider/source_dockerhub_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_dockerhub_resource.go
mode change 100755 => 100644 internal/provider/source_dockerhub_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_dremio_data_source.go
mode change 100755 => 100644 internal/provider/source_dremio_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_dremio_resource.go
mode change 100755 => 100644 internal/provider/source_dremio_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_dynamodb_data_source.go
mode change 100755 => 100644 internal/provider/source_dynamodb_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_dynamodb_resource.go
mode change 100755 => 100644 internal/provider/source_dynamodb_resource_sdk.go
delete mode 100755 internal/provider/source_e2etestcloud_data_source.go
delete mode 100755 internal/provider/source_e2etestcloud_data_source_sdk.go
delete mode 100755 internal/provider/source_e2etestcloud_resource.go
delete mode 100755 internal/provider/source_e2etestcloud_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_emailoctopus_data_source.go
mode change 100755 => 100644 internal/provider/source_emailoctopus_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_emailoctopus_resource.go
mode change 100755 => 100644 internal/provider/source_emailoctopus_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_exchangerates_data_source.go
mode change 100755 => 100644 internal/provider/source_exchangerates_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_exchangerates_resource.go
mode change 100755 => 100644 internal/provider/source_exchangerates_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_facebookmarketing_data_source.go
mode change 100755 => 100644 internal/provider/source_facebookmarketing_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_facebookmarketing_resource.go
mode change 100755 => 100644 internal/provider/source_facebookmarketing_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_facebookpages_data_source.go
mode change 100755 => 100644 internal/provider/source_facebookpages_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_facebookpages_resource.go
mode change 100755 => 100644 internal/provider/source_facebookpages_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_faker_data_source.go
mode change 100755 => 100644 internal/provider/source_faker_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_faker_resource.go
mode change 100755 => 100644 internal/provider/source_faker_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_fauna_data_source.go
mode change 100755 => 100644 internal/provider/source_fauna_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_fauna_resource.go
mode change 100755 => 100644 internal/provider/source_fauna_resource_sdk.go
create mode 100644 internal/provider/source_file_data_source.go
create mode 100644 internal/provider/source_file_data_source_sdk.go
create mode 100644 internal/provider/source_file_resource.go
create mode 100644 internal/provider/source_file_resource_sdk.go
delete mode 100755 internal/provider/source_filesecure_data_source.go
delete mode 100755 internal/provider/source_filesecure_data_source_sdk.go
delete mode 100755 internal/provider/source_filesecure_resource.go
delete mode 100755 internal/provider/source_filesecure_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_firebolt_data_source.go
mode change 100755 => 100644 internal/provider/source_firebolt_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_firebolt_resource.go
mode change 100755 => 100644 internal/provider/source_firebolt_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_freshcaller_data_source.go
mode change 100755 => 100644 internal/provider/source_freshcaller_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_freshcaller_resource.go
mode change 100755 => 100644 internal/provider/source_freshcaller_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_freshdesk_data_source.go
mode change 100755 => 100644 internal/provider/source_freshdesk_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_freshdesk_resource.go
mode change 100755 => 100644 internal/provider/source_freshdesk_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_freshsales_data_source.go
mode change 100755 => 100644 internal/provider/source_freshsales_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_freshsales_resource.go
mode change 100755 => 100644 internal/provider/source_freshsales_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_gainsightpx_data_source.go
mode change 100755 => 100644 internal/provider/source_gainsightpx_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_gainsightpx_resource.go
mode change 100755 => 100644 internal/provider/source_gainsightpx_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_gcs_data_source.go
mode change 100755 => 100644 internal/provider/source_gcs_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_gcs_resource.go
mode change 100755 => 100644 internal/provider/source_gcs_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_getlago_data_source.go
mode change 100755 => 100644 internal/provider/source_getlago_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_getlago_resource.go
mode change 100755 => 100644 internal/provider/source_getlago_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_github_data_source.go
mode change 100755 => 100644 internal/provider/source_github_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_github_resource.go
mode change 100755 => 100644 internal/provider/source_github_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_gitlab_data_source.go
mode change 100755 => 100644 internal/provider/source_gitlab_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_gitlab_resource.go
mode change 100755 => 100644 internal/provider/source_gitlab_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_glassfrog_data_source.go
mode change 100755 => 100644 internal/provider/source_glassfrog_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_glassfrog_resource.go
mode change 100755 => 100644 internal/provider/source_glassfrog_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_gnews_data_source.go
mode change 100755 => 100644 internal/provider/source_gnews_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_gnews_resource.go
mode change 100755 => 100644 internal/provider/source_gnews_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_googleads_data_source.go
mode change 100755 => 100644 internal/provider/source_googleads_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_googleads_resource.go
mode change 100755 => 100644 internal/provider/source_googleads_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_googleanalyticsdataapi_data_source.go
mode change 100755 => 100644 internal/provider/source_googleanalyticsdataapi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_googleanalyticsdataapi_resource.go
mode change 100755 => 100644 internal/provider/source_googleanalyticsdataapi_resource_sdk.go
delete mode 100755 internal/provider/source_googleanalyticsv4_data_source.go
delete mode 100755 internal/provider/source_googleanalyticsv4_data_source_sdk.go
delete mode 100755 internal/provider/source_googleanalyticsv4_resource.go
delete mode 100755 internal/provider/source_googleanalyticsv4_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_googledirectory_data_source.go
mode change 100755 => 100644 internal/provider/source_googledirectory_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_googledirectory_resource.go
mode change 100755 => 100644 internal/provider/source_googledirectory_resource_sdk.go
rename internal/provider/{source_bigcommerce_data_source.go => source_googledrive_data_source.go} (53%)
mode change 100755 => 100644
create mode 100644 internal/provider/source_googledrive_data_source_sdk.go
create mode 100644 internal/provider/source_googledrive_resource.go
create mode 100644 internal/provider/source_googledrive_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_googlepagespeedinsights_data_source.go
mode change 100755 => 100644 internal/provider/source_googlepagespeedinsights_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_googlepagespeedinsights_resource.go
mode change 100755 => 100644 internal/provider/source_googlepagespeedinsights_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_googlesearchconsole_data_source.go
mode change 100755 => 100644 internal/provider/source_googlesearchconsole_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_googlesearchconsole_resource.go
mode change 100755 => 100644 internal/provider/source_googlesearchconsole_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_googlesheets_data_source.go
mode change 100755 => 100644 internal/provider/source_googlesheets_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_googlesheets_resource.go
mode change 100755 => 100644 internal/provider/source_googlesheets_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_googlewebfonts_data_source.go
mode change 100755 => 100644 internal/provider/source_googlewebfonts_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_googlewebfonts_resource.go
mode change 100755 => 100644 internal/provider/source_googlewebfonts_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_googleworkspaceadminreports_data_source.go
mode change 100755 => 100644 internal/provider/source_googleworkspaceadminreports_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_googleworkspaceadminreports_resource.go
mode change 100755 => 100644 internal/provider/source_googleworkspaceadminreports_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_greenhouse_data_source.go
mode change 100755 => 100644 internal/provider/source_greenhouse_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_greenhouse_resource.go
mode change 100755 => 100644 internal/provider/source_greenhouse_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_gridly_data_source.go
mode change 100755 => 100644 internal/provider/source_gridly_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_gridly_resource.go
mode change 100755 => 100644 internal/provider/source_gridly_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_harvest_data_source.go
mode change 100755 => 100644 internal/provider/source_harvest_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_harvest_resource.go
mode change 100755 => 100644 internal/provider/source_harvest_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_hubplanner_data_source.go
mode change 100755 => 100644 internal/provider/source_hubplanner_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_hubplanner_resource.go
mode change 100755 => 100644 internal/provider/source_hubplanner_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_hubspot_data_source.go
mode change 100755 => 100644 internal/provider/source_hubspot_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_hubspot_resource.go
mode change 100755 => 100644 internal/provider/source_hubspot_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_insightly_data_source.go
mode change 100755 => 100644 internal/provider/source_insightly_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_insightly_resource.go
mode change 100755 => 100644 internal/provider/source_insightly_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_instagram_data_source.go
mode change 100755 => 100644 internal/provider/source_instagram_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_instagram_resource.go
mode change 100755 => 100644 internal/provider/source_instagram_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_instatus_data_source.go
mode change 100755 => 100644 internal/provider/source_instatus_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_instatus_resource.go
mode change 100755 => 100644 internal/provider/source_instatus_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_intercom_data_source.go
mode change 100755 => 100644 internal/provider/source_intercom_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_intercom_resource.go
mode change 100755 => 100644 internal/provider/source_intercom_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_ip2whois_data_source.go
mode change 100755 => 100644 internal/provider/source_ip2whois_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_ip2whois_resource.go
mode change 100755 => 100644 internal/provider/source_ip2whois_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_iterable_data_source.go
mode change 100755 => 100644 internal/provider/source_iterable_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_iterable_resource.go
mode change 100755 => 100644 internal/provider/source_iterable_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_jira_data_source.go
mode change 100755 => 100644 internal/provider/source_jira_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_jira_resource.go
mode change 100755 => 100644 internal/provider/source_jira_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_k6cloud_data_source.go
mode change 100755 => 100644 internal/provider/source_k6cloud_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_k6cloud_resource.go
mode change 100755 => 100644 internal/provider/source_k6cloud_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_klarna_data_source.go
mode change 100755 => 100644 internal/provider/source_klarna_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_klarna_resource.go
mode change 100755 => 100644 internal/provider/source_klarna_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_klaviyo_data_source.go
mode change 100755 => 100644 internal/provider/source_klaviyo_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_klaviyo_resource.go
mode change 100755 => 100644 internal/provider/source_klaviyo_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_kustomersinger_data_source.go
mode change 100755 => 100644 internal/provider/source_kustomersinger_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_kustomersinger_resource.go
mode change 100755 => 100644 internal/provider/source_kustomersinger_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_kyve_data_source.go
mode change 100755 => 100644 internal/provider/source_kyve_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_kyve_resource.go
mode change 100755 => 100644 internal/provider/source_kyve_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_launchdarkly_data_source.go
mode change 100755 => 100644 internal/provider/source_launchdarkly_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_launchdarkly_resource.go
mode change 100755 => 100644 internal/provider/source_launchdarkly_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_lemlist_data_source.go
mode change 100755 => 100644 internal/provider/source_lemlist_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_lemlist_resource.go
mode change 100755 => 100644 internal/provider/source_lemlist_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_leverhiring_data_source.go
mode change 100755 => 100644 internal/provider/source_leverhiring_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_leverhiring_resource.go
mode change 100755 => 100644 internal/provider/source_leverhiring_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_linkedinads_data_source.go
mode change 100755 => 100644 internal/provider/source_linkedinads_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_linkedinads_resource.go
mode change 100755 => 100644 internal/provider/source_linkedinads_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_linkedinpages_data_source.go
mode change 100755 => 100644 internal/provider/source_linkedinpages_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_linkedinpages_resource.go
mode change 100755 => 100644 internal/provider/source_linkedinpages_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_linnworks_data_source.go
mode change 100755 => 100644 internal/provider/source_linnworks_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_linnworks_resource.go
mode change 100755 => 100644 internal/provider/source_linnworks_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_lokalise_data_source.go
mode change 100755 => 100644 internal/provider/source_lokalise_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_lokalise_resource.go
mode change 100755 => 100644 internal/provider/source_lokalise_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_mailchimp_data_source.go
mode change 100755 => 100644 internal/provider/source_mailchimp_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_mailchimp_resource.go
mode change 100755 => 100644 internal/provider/source_mailchimp_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_mailgun_data_source.go
mode change 100755 => 100644 internal/provider/source_mailgun_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_mailgun_resource.go
mode change 100755 => 100644 internal/provider/source_mailgun_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_mailjetsms_data_source.go
mode change 100755 => 100644 internal/provider/source_mailjetsms_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_mailjetsms_resource.go
mode change 100755 => 100644 internal/provider/source_mailjetsms_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_marketo_data_source.go
mode change 100755 => 100644 internal/provider/source_marketo_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_marketo_resource.go
mode change 100755 => 100644 internal/provider/source_marketo_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_metabase_data_source.go
mode change 100755 => 100644 internal/provider/source_metabase_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_metabase_resource.go
mode change 100755 => 100644 internal/provider/source_metabase_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_microsoftteams_data_source.go
mode change 100755 => 100644 internal/provider/source_microsoftteams_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_microsoftteams_resource.go
mode change 100755 => 100644 internal/provider/source_microsoftteams_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_mixpanel_data_source.go
mode change 100755 => 100644 internal/provider/source_mixpanel_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_mixpanel_resource.go
mode change 100755 => 100644 internal/provider/source_mixpanel_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_monday_data_source.go
mode change 100755 => 100644 internal/provider/source_monday_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_monday_resource.go
mode change 100755 => 100644 internal/provider/source_monday_resource_sdk.go
delete mode 100755 internal/provider/source_mongodb_data_source.go
delete mode 100755 internal/provider/source_mongodb_data_source_sdk.go
delete mode 100755 internal/provider/source_mongodb_resource.go
delete mode 100755 internal/provider/source_mongodb_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_mongodbinternalpoc_data_source.go
mode change 100755 => 100644 internal/provider/source_mongodbinternalpoc_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_mongodbinternalpoc_resource.go
mode change 100755 => 100644 internal/provider/source_mongodbinternalpoc_resource_sdk.go
create mode 100644 internal/provider/source_mongodbv2_data_source.go
create mode 100644 internal/provider/source_mongodbv2_data_source_sdk.go
create mode 100644 internal/provider/source_mongodbv2_resource.go
create mode 100644 internal/provider/source_mongodbv2_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_mssql_data_source.go
mode change 100755 => 100644 internal/provider/source_mssql_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_mssql_resource.go
mode change 100755 => 100644 internal/provider/source_mssql_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_myhours_data_source.go
mode change 100755 => 100644 internal/provider/source_myhours_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_myhours_resource.go
mode change 100755 => 100644 internal/provider/source_myhours_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_mysql_data_source.go
mode change 100755 => 100644 internal/provider/source_mysql_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_mysql_resource.go
mode change 100755 => 100644 internal/provider/source_mysql_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_netsuite_data_source.go
mode change 100755 => 100644 internal/provider/source_netsuite_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_netsuite_resource.go
mode change 100755 => 100644 internal/provider/source_netsuite_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_notion_data_source.go
mode change 100755 => 100644 internal/provider/source_notion_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_notion_resource.go
mode change 100755 => 100644 internal/provider/source_notion_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_nytimes_data_source.go
mode change 100755 => 100644 internal/provider/source_nytimes_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_nytimes_resource.go
mode change 100755 => 100644 internal/provider/source_nytimes_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_okta_data_source.go
mode change 100755 => 100644 internal/provider/source_okta_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_okta_resource.go
mode change 100755 => 100644 internal/provider/source_okta_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_omnisend_data_source.go
mode change 100755 => 100644 internal/provider/source_omnisend_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_omnisend_resource.go
mode change 100755 => 100644 internal/provider/source_omnisend_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_onesignal_data_source.go
mode change 100755 => 100644 internal/provider/source_onesignal_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_onesignal_resource.go
mode change 100755 => 100644 internal/provider/source_onesignal_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_oracle_data_source.go
mode change 100755 => 100644 internal/provider/source_oracle_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_oracle_resource.go
mode change 100755 => 100644 internal/provider/source_oracle_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_orb_data_source.go
mode change 100755 => 100644 internal/provider/source_orb_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_orb_resource.go
mode change 100755 => 100644 internal/provider/source_orb_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_orbit_data_source.go
mode change 100755 => 100644 internal/provider/source_orbit_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_orbit_resource.go
mode change 100755 => 100644 internal/provider/source_orbit_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_outbrainamplify_data_source.go
mode change 100755 => 100644 internal/provider/source_outbrainamplify_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_outbrainamplify_resource.go
mode change 100755 => 100644 internal/provider/source_outbrainamplify_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_outreach_data_source.go
mode change 100755 => 100644 internal/provider/source_outreach_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_outreach_resource.go
mode change 100755 => 100644 internal/provider/source_outreach_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_paypaltransaction_data_source.go
mode change 100755 => 100644 internal/provider/source_paypaltransaction_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_paypaltransaction_resource.go
mode change 100755 => 100644 internal/provider/source_paypaltransaction_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_paystack_data_source.go
mode change 100755 => 100644 internal/provider/source_paystack_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_paystack_resource.go
mode change 100755 => 100644 internal/provider/source_paystack_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_pendo_data_source.go
mode change 100755 => 100644 internal/provider/source_pendo_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_pendo_resource.go
mode change 100755 => 100644 internal/provider/source_pendo_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_persistiq_data_source.go
mode change 100755 => 100644 internal/provider/source_persistiq_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_persistiq_resource.go
mode change 100755 => 100644 internal/provider/source_persistiq_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_pexelsapi_data_source.go
mode change 100755 => 100644 internal/provider/source_pexelsapi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_pexelsapi_resource.go
mode change 100755 => 100644 internal/provider/source_pexelsapi_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_pinterest_data_source.go
mode change 100755 => 100644 internal/provider/source_pinterest_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_pinterest_resource.go
mode change 100755 => 100644 internal/provider/source_pinterest_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_pipedrive_data_source.go
mode change 100755 => 100644 internal/provider/source_pipedrive_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_pipedrive_resource.go
mode change 100755 => 100644 internal/provider/source_pipedrive_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_pocket_data_source.go
mode change 100755 => 100644 internal/provider/source_pocket_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_pocket_resource.go
mode change 100755 => 100644 internal/provider/source_pocket_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_pokeapi_data_source.go
mode change 100755 => 100644 internal/provider/source_pokeapi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_pokeapi_resource.go
mode change 100755 => 100644 internal/provider/source_pokeapi_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_polygonstockapi_data_source.go
mode change 100755 => 100644 internal/provider/source_polygonstockapi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_polygonstockapi_resource.go
mode change 100755 => 100644 internal/provider/source_polygonstockapi_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_postgres_data_source.go
mode change 100755 => 100644 internal/provider/source_postgres_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_postgres_resource.go
mode change 100755 => 100644 internal/provider/source_postgres_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_posthog_data_source.go
mode change 100755 => 100644 internal/provider/source_posthog_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_posthog_resource.go
mode change 100755 => 100644 internal/provider/source_posthog_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_postmarkapp_data_source.go
mode change 100755 => 100644 internal/provider/source_postmarkapp_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_postmarkapp_resource.go
mode change 100755 => 100644 internal/provider/source_postmarkapp_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_prestashop_data_source.go
mode change 100755 => 100644 internal/provider/source_prestashop_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_prestashop_resource.go
mode change 100755 => 100644 internal/provider/source_prestashop_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_punkapi_data_source.go
mode change 100755 => 100644 internal/provider/source_punkapi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_punkapi_resource.go
mode change 100755 => 100644 internal/provider/source_punkapi_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_pypi_data_source.go
mode change 100755 => 100644 internal/provider/source_pypi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_pypi_resource.go
mode change 100755 => 100644 internal/provider/source_pypi_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_qualaroo_data_source.go
mode change 100755 => 100644 internal/provider/source_qualaroo_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_qualaroo_resource.go
mode change 100755 => 100644 internal/provider/source_qualaroo_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_quickbooks_data_source.go
mode change 100755 => 100644 internal/provider/source_quickbooks_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_quickbooks_resource.go
mode change 100755 => 100644 internal/provider/source_quickbooks_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_railz_data_source.go
mode change 100755 => 100644 internal/provider/source_railz_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_railz_resource.go
mode change 100755 => 100644 internal/provider/source_railz_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_recharge_data_source.go
mode change 100755 => 100644 internal/provider/source_recharge_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_recharge_resource.go
mode change 100755 => 100644 internal/provider/source_recharge_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_recreation_data_source.go
mode change 100755 => 100644 internal/provider/source_recreation_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_recreation_resource.go
mode change 100755 => 100644 internal/provider/source_recreation_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_recruitee_data_source.go
mode change 100755 => 100644 internal/provider/source_recruitee_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_recruitee_resource.go
mode change 100755 => 100644 internal/provider/source_recruitee_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_recurly_data_source.go
mode change 100755 => 100644 internal/provider/source_recurly_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_recurly_resource.go
mode change 100755 => 100644 internal/provider/source_recurly_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_redshift_data_source.go
mode change 100755 => 100644 internal/provider/source_redshift_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_redshift_resource.go
mode change 100755 => 100644 internal/provider/source_redshift_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_retently_data_source.go
mode change 100755 => 100644 internal/provider/source_retently_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_retently_resource.go
mode change 100755 => 100644 internal/provider/source_retently_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_rkicovid_data_source.go
mode change 100755 => 100644 internal/provider/source_rkicovid_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_rkicovid_resource.go
mode change 100755 => 100644 internal/provider/source_rkicovid_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_rss_data_source.go
mode change 100755 => 100644 internal/provider/source_rss_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_rss_resource.go
mode change 100755 => 100644 internal/provider/source_rss_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_s3_data_source.go
mode change 100755 => 100644 internal/provider/source_s3_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_s3_resource.go
mode change 100755 => 100644 internal/provider/source_s3_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_salesforce_data_source.go
mode change 100755 => 100644 internal/provider/source_salesforce_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_salesforce_resource.go
mode change 100755 => 100644 internal/provider/source_salesforce_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_salesloft_data_source.go
mode change 100755 => 100644 internal/provider/source_salesloft_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_salesloft_resource.go
mode change 100755 => 100644 internal/provider/source_salesloft_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_sapfieldglass_data_source.go
mode change 100755 => 100644 internal/provider/source_sapfieldglass_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_sapfieldglass_resource.go
mode change 100755 => 100644 internal/provider/source_sapfieldglass_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_secoda_data_source.go
mode change 100755 => 100644 internal/provider/source_secoda_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_secoda_resource.go
mode change 100755 => 100644 internal/provider/source_secoda_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_sendgrid_data_source.go
mode change 100755 => 100644 internal/provider/source_sendgrid_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_sendgrid_resource.go
mode change 100755 => 100644 internal/provider/source_sendgrid_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_sendinblue_data_source.go
mode change 100755 => 100644 internal/provider/source_sendinblue_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_sendinblue_resource.go
mode change 100755 => 100644 internal/provider/source_sendinblue_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_senseforce_data_source.go
mode change 100755 => 100644 internal/provider/source_senseforce_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_senseforce_resource.go
mode change 100755 => 100644 internal/provider/source_senseforce_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_sentry_data_source.go
mode change 100755 => 100644 internal/provider/source_sentry_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_sentry_resource.go
mode change 100755 => 100644 internal/provider/source_sentry_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_sftp_data_source.go
mode change 100755 => 100644 internal/provider/source_sftp_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_sftp_resource.go
mode change 100755 => 100644 internal/provider/source_sftp_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_sftpbulk_data_source.go
mode change 100755 => 100644 internal/provider/source_sftpbulk_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_sftpbulk_resource.go
mode change 100755 => 100644 internal/provider/source_sftpbulk_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_shopify_data_source.go
mode change 100755 => 100644 internal/provider/source_shopify_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_shopify_resource.go
mode change 100755 => 100644 internal/provider/source_shopify_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_shortio_data_source.go
mode change 100755 => 100644 internal/provider/source_shortio_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_shortio_resource.go
mode change 100755 => 100644 internal/provider/source_shortio_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_slack_data_source.go
mode change 100755 => 100644 internal/provider/source_slack_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_slack_resource.go
mode change 100755 => 100644 internal/provider/source_slack_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_smaily_data_source.go
mode change 100755 => 100644 internal/provider/source_smaily_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_smaily_resource.go
mode change 100755 => 100644 internal/provider/source_smaily_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_smartengage_data_source.go
mode change 100755 => 100644 internal/provider/source_smartengage_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_smartengage_resource.go
mode change 100755 => 100644 internal/provider/source_smartengage_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_smartsheets_data_source.go
mode change 100755 => 100644 internal/provider/source_smartsheets_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_smartsheets_resource.go
mode change 100755 => 100644 internal/provider/source_smartsheets_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_snapchatmarketing_data_source.go
mode change 100755 => 100644 internal/provider/source_snapchatmarketing_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_snapchatmarketing_resource.go
mode change 100755 => 100644 internal/provider/source_snapchatmarketing_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_snowflake_data_source.go
mode change 100755 => 100644 internal/provider/source_snowflake_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_snowflake_resource.go
mode change 100755 => 100644 internal/provider/source_snowflake_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_sonarcloud_data_source.go
mode change 100755 => 100644 internal/provider/source_sonarcloud_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_sonarcloud_resource.go
mode change 100755 => 100644 internal/provider/source_sonarcloud_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_spacexapi_data_source.go
mode change 100755 => 100644 internal/provider/source_spacexapi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_spacexapi_resource.go
mode change 100755 => 100644 internal/provider/source_spacexapi_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_square_data_source.go
mode change 100755 => 100644 internal/provider/source_square_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_square_resource.go
mode change 100755 => 100644 internal/provider/source_square_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_strava_data_source.go
mode change 100755 => 100644 internal/provider/source_strava_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_strava_resource.go
mode change 100755 => 100644 internal/provider/source_strava_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_stripe_data_source.go
mode change 100755 => 100644 internal/provider/source_stripe_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_stripe_resource.go
mode change 100755 => 100644 internal/provider/source_stripe_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_surveymonkey_data_source.go
mode change 100755 => 100644 internal/provider/source_surveymonkey_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_surveymonkey_resource.go
mode change 100755 => 100644 internal/provider/source_surveymonkey_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_surveysparrow_data_source.go
mode change 100755 => 100644 internal/provider/source_surveysparrow_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_surveysparrow_resource.go
mode change 100755 => 100644 internal/provider/source_surveysparrow_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_tempo_data_source.go
mode change 100755 => 100644 internal/provider/source_tempo_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_tempo_resource.go
mode change 100755 => 100644 internal/provider/source_tempo_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_theguardianapi_data_source.go
mode change 100755 => 100644 internal/provider/source_theguardianapi_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_theguardianapi_resource.go
mode change 100755 => 100644 internal/provider/source_theguardianapi_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_tiktokmarketing_data_source.go
mode change 100755 => 100644 internal/provider/source_tiktokmarketing_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_tiktokmarketing_resource.go
mode change 100755 => 100644 internal/provider/source_tiktokmarketing_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_todoist_data_source.go
mode change 100755 => 100644 internal/provider/source_todoist_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_todoist_resource.go
mode change 100755 => 100644 internal/provider/source_todoist_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_trello_data_source.go
mode change 100755 => 100644 internal/provider/source_trello_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_trello_resource.go
mode change 100755 => 100644 internal/provider/source_trello_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_trustpilot_data_source.go
mode change 100755 => 100644 internal/provider/source_trustpilot_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_trustpilot_resource.go
mode change 100755 => 100644 internal/provider/source_trustpilot_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_tvmazeschedule_data_source.go
mode change 100755 => 100644 internal/provider/source_tvmazeschedule_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_tvmazeschedule_resource.go
mode change 100755 => 100644 internal/provider/source_tvmazeschedule_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_twilio_data_source.go
mode change 100755 => 100644 internal/provider/source_twilio_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_twilio_resource.go
mode change 100755 => 100644 internal/provider/source_twilio_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_twiliotaskrouter_data_source.go
mode change 100755 => 100644 internal/provider/source_twiliotaskrouter_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_twiliotaskrouter_resource.go
mode change 100755 => 100644 internal/provider/source_twiliotaskrouter_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_twitter_data_source.go
mode change 100755 => 100644 internal/provider/source_twitter_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_twitter_resource.go
mode change 100755 => 100644 internal/provider/source_twitter_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_typeform_data_source.go
mode change 100755 => 100644 internal/provider/source_typeform_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_typeform_resource.go
mode change 100755 => 100644 internal/provider/source_typeform_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_uscensus_data_source.go
mode change 100755 => 100644 internal/provider/source_uscensus_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_uscensus_resource.go
mode change 100755 => 100644 internal/provider/source_uscensus_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_vantage_data_source.go
mode change 100755 => 100644 internal/provider/source_vantage_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_vantage_resource.go
mode change 100755 => 100644 internal/provider/source_vantage_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_webflow_data_source.go
mode change 100755 => 100644 internal/provider/source_webflow_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_webflow_resource.go
mode change 100755 => 100644 internal/provider/source_webflow_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_whiskyhunter_data_source.go
mode change 100755 => 100644 internal/provider/source_whiskyhunter_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_whiskyhunter_resource.go
mode change 100755 => 100644 internal/provider/source_whiskyhunter_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_wikipediapageviews_data_source.go
mode change 100755 => 100644 internal/provider/source_wikipediapageviews_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_wikipediapageviews_resource.go
mode change 100755 => 100644 internal/provider/source_wikipediapageviews_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_woocommerce_data_source.go
mode change 100755 => 100644 internal/provider/source_woocommerce_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_woocommerce_resource.go
mode change 100755 => 100644 internal/provider/source_woocommerce_resource_sdk.go
delete mode 100755 internal/provider/source_xero_data_source.go
delete mode 100755 internal/provider/source_xero_data_source_sdk.go
delete mode 100755 internal/provider/source_xero_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_xkcd_data_source.go
mode change 100755 => 100644 internal/provider/source_xkcd_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_xkcd_resource.go
mode change 100755 => 100644 internal/provider/source_xkcd_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_yandexmetrica_data_source.go
mode change 100755 => 100644 internal/provider/source_yandexmetrica_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_yandexmetrica_resource.go
mode change 100755 => 100644 internal/provider/source_yandexmetrica_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_yotpo_data_source.go
mode change 100755 => 100644 internal/provider/source_yotpo_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_yotpo_resource.go
mode change 100755 => 100644 internal/provider/source_yotpo_resource_sdk.go
delete mode 100755 internal/provider/source_younium_data_source.go
delete mode 100755 internal/provider/source_younium_data_source_sdk.go
delete mode 100755 internal/provider/source_younium_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_youtubeanalytics_data_source.go
mode change 100755 => 100644 internal/provider/source_youtubeanalytics_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_youtubeanalytics_resource.go
mode change 100755 => 100644 internal/provider/source_youtubeanalytics_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_zendeskchat_data_source.go
mode change 100755 => 100644 internal/provider/source_zendeskchat_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_zendeskchat_resource.go
mode change 100755 => 100644 internal/provider/source_zendeskchat_resource_sdk.go
create mode 100644 internal/provider/source_zendesksell_data_source.go
create mode 100644 internal/provider/source_zendesksell_data_source_sdk.go
rename internal/provider/{source_bigcommerce_resource.go => source_zendesksell_resource.go} (69%)
mode change 100755 => 100644
create mode 100644 internal/provider/source_zendesksell_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_zendesksunshine_data_source.go
mode change 100755 => 100644 internal/provider/source_zendesksunshine_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_zendesksunshine_resource.go
mode change 100755 => 100644 internal/provider/source_zendesksunshine_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_zendesksupport_data_source.go
mode change 100755 => 100644 internal/provider/source_zendesksupport_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_zendesksupport_resource.go
mode change 100755 => 100644 internal/provider/source_zendesksupport_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_zendesktalk_data_source.go
mode change 100755 => 100644 internal/provider/source_zendesktalk_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_zendesktalk_resource.go
mode change 100755 => 100644 internal/provider/source_zendesktalk_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_zenloop_data_source.go
mode change 100755 => 100644 internal/provider/source_zenloop_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_zenloop_resource.go
mode change 100755 => 100644 internal/provider/source_zenloop_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_zohocrm_data_source.go
mode change 100755 => 100644 internal/provider/source_zohocrm_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_zohocrm_resource.go
mode change 100755 => 100644 internal/provider/source_zohocrm_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_zoom_data_source.go
mode change 100755 => 100644 internal/provider/source_zoom_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_zoom_resource.go
mode change 100755 => 100644 internal/provider/source_zoom_resource_sdk.go
mode change 100755 => 100644 internal/provider/source_zuora_data_source.go
mode change 100755 => 100644 internal/provider/source_zuora_data_source_sdk.go
mode change 100755 => 100644 internal/provider/source_zuora_resource.go
mode change 100755 => 100644 internal/provider/source_zuora_resource_sdk.go
rename internal/provider/{type_source_linkedin_ads_ad_analytics_report_configuration.go => type_ad_analytics_report_configuration.go} (83%)
mode change 100755 => 100644
rename internal/provider/{type_destination_firebolt_loading_method_sql_inserts.go => type_aescbc_envelope_encryption.go} (61%)
mode change 100755 => 100644
rename internal/provider/{type_destination_databricks_data_source_amazon_s3.go => type_amazon_s3.go} (81%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_and_group.go
rename internal/provider/{type_source_xkcd.go => type_api_key.go} (68%)
mode change 100755 => 100644
rename internal/provider/{type_source_mysql_ssl_modes_required.go => type_api_key_auth.go} (67%)
mode change 100755 => 100644
rename internal/provider/{type_destination_elasticsearch_authentication_method_api_key_secret.go => type_api_key_secret.go} (69%)
mode change 100755 => 100644
rename internal/provider/{type_source_mysql_ssl_modes_preferred.go => type_api_password.go} (67%)
mode change 100755 => 100644
rename internal/provider/{type_source_onesignal_applications.go => type_applications.go} (87%)
mode change 100755 => 100644
rename internal/provider/{type_source_snowflake_authorization_method_o_auth20.go => type_authenticate_via_google_oauth.go} (77%)
mode change 100755 => 100644
rename internal/provider/{type_source_youtube_analytics_authenticate_via_o_auth20.go => type_authenticate_via_harvest_o_auth.go} (87%)
mode change 100755 => 100644
rename internal/provider/{type_source_okta_authorization_method_o_auth20.go => type_authenticate_via_lever_o_auth.go} (75%)
mode change 100755 => 100644
rename internal/provider/{type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft.go => type_authenticate_via_microsoft.go} (70%)
mode change 100755 => 100644
rename internal/provider/{type_source_linkedin_ads_authentication_o_auth20.go => type_authenticate_via_microsoft_o_auth20.go} (75%)
mode change 100755 => 100644
rename internal/provider/{type_source_alloydb_ssl_modes_allow1.go => type_authenticate_with_api_token.go} (70%)
mode change 100755 => 100644
rename internal/provider/{type_source_github_authentication_personal_access_token.go => type_authenticate_with_personal_access_token.go} (65%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_avro_apache_avro.go
rename internal/provider/{type_source_s3_file_format_avro.go => type_avro_format.go} (66%)
mode change 100755 => 100644
rename internal/provider/{type_source_file_secure_storage_provider_az_blob_azure_blob_storage.go => type_az_blob_azure_blob_storage.go} (73%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_azure_open_ai.go
create mode 100644 internal/provider/type_by_markdown_header.go
create mode 100644 internal/provider/type_by_programming_language.go
rename internal/provider/{type_destination_clickhouse_ssh_tunnel_method_no_tunnel.go => type_by_separator.go} (54%)
mode change 100755 => 100644
rename internal/provider/{type_destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2.go => type_bzip2.go} (71%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_central_api_router.go
rename internal/provider/{type_destination_langchain_indexing_chroma_local_persistance.go => type_chroma_local_persistance.go} (71%)
mode change 100755 => 100644
rename internal/provider/{type_destination_milvus_embedding_cohere.go => type_cohere.go} (70%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_connection_schedule.go
create mode 100644 internal/provider/type_connection_schedule_response.go
rename internal/provider/{type_source_s3_file_format_csv.go => type_csv.go} (89%)
mode change 100755 => 100644
rename internal/provider/{type_destination_postgres_ssl_modes_allow.go => type_csv_comma_separated_values.go} (65%)
mode change 100755 => 100644
rename internal/provider/{type_source_s3_file_based_stream_config_format_csv_format.go => type_csv_format.go} (52%)
mode change 100755 => 100644
rename internal/provider/{type_source_google_ads_custom_queries.go => type_custom_queries.go} (84%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_custom_report_config.go
rename internal/provider/{type_destination_gcs_output_format_avro_apache_avro_compression_codec_xz.go => type_deflate.go} (78%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_destination_aws_datalake.go
delete mode 100755 internal/provider/type_destination_aws_datalake1.go
mode change 100755 => 100644 internal/provider/type_destination_aws_datalake_authentication_mode.go
delete mode 100755 internal/provider/type_destination_aws_datalake_authentication_mode_iam_role.go
mode change 100755 => 100644 internal/provider/type_destination_aws_datalake_output_format_wildcard.go
mode change 100755 => 100644 internal/provider/type_destination_azure_blob_storage.go
create mode 100644 internal/provider/type_destination_azure_blob_storage_json_lines_newline_delimited_json.go
mode change 100755 => 100644 internal/provider/type_destination_azure_blob_storage_output_format.go
delete mode 100755 internal/provider/type_destination_azure_blob_storage_output_format_csv_comma_separated_values.go
mode change 100755 => 100644 internal/provider/type_destination_bigquery.go
create mode 100644 internal/provider/type_destination_bigquery_credential.go
delete mode 100755 internal/provider/type_destination_bigquery_denormalized.go
delete mode 100755 internal/provider/type_destination_bigquery_denormalized_loading_method.go
delete mode 100755 internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging.go
delete mode 100755 internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging_credential.go
delete mode 100755 internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging.go
delete mode 100755 internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging_credential.go
create mode 100644 internal/provider/type_destination_bigquery_hmac_key.go
mode change 100755 => 100644 internal/provider/type_destination_bigquery_loading_method.go
delete mode 100755 internal/provider/type_destination_bigquery_loading_method_gcs_staging.go
delete mode 100755 internal/provider/type_destination_bigquery_loading_method_gcs_staging_credential.go
delete mode 100755 internal/provider/type_destination_bigquery_update_loading_method_gcs_staging.go
delete mode 100755 internal/provider/type_destination_bigquery_update_loading_method_gcs_staging_credential.go
mode change 100755 => 100644 internal/provider/type_destination_clickhouse.go
mode change 100755 => 100644 internal/provider/type_destination_clickhouse_ssh_tunnel_method.go
delete mode 100755 internal/provider/type_destination_clickhouse_ssh_tunnel_method_ssh_key_authentication.go
mode change 100755 => 100644 internal/provider/type_destination_convex.go
mode change 100755 => 100644 internal/provider/type_destination_cumulio.go
mode change 100755 => 100644 internal/provider/type_destination_databend.go
mode change 100755 => 100644 internal/provider/type_destination_databricks.go
delete mode 100755 internal/provider/type_destination_databricks1.go
rename internal/provider/{type_destination_databricks_data_source_azure_blob_storage.go => type_destination_databricks_azure_blob_storage.go} (79%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_destination_databricks_data_source1.go
delete mode 100755 internal/provider/type_destination_databricks_data_source2.go
delete mode 100755 internal/provider/type_destination_databricks_data_source_amazon_s31.go
delete mode 100755 internal/provider/type_destination_databricks_update_data_source_amazon_s3.go
delete mode 100755 internal/provider/type_destination_databricks_update_data_source_amazon_s31.go
mode change 100755 => 100644 internal/provider/type_destination_dev_null.go
mode change 100755 => 100644 internal/provider/type_destination_dev_null_test_destination.go
create mode 100644 internal/provider/type_destination_duckdb.go
mode change 100755 => 100644 internal/provider/type_destination_dynamodb.go
delete mode 100755 internal/provider/type_destination_dynamodb1.go
mode change 100755 => 100644 internal/provider/type_destination_elasticsearch.go
mode change 100755 => 100644 internal/provider/type_destination_elasticsearch_authentication_method.go
delete mode 100755 internal/provider/type_destination_elasticsearch_authentication_method_username_password.go
mode change 100755 => 100644 internal/provider/type_destination_firebolt.go
mode change 100755 => 100644 internal/provider/type_destination_firebolt_loading_method.go
mode change 100755 => 100644 internal/provider/type_destination_firestore.go
mode change 100755 => 100644 internal/provider/type_destination_gcs.go
mode change 100755 => 100644 internal/provider/type_destination_gcs_authentication.go
create mode 100644 internal/provider/type_destination_gcs_compression.go
create mode 100644 internal/provider/type_destination_gcs_compression_codec.go
create mode 100644 internal/provider/type_destination_gcs_csv_comma_separated_values.go
create mode 100644 internal/provider/type_destination_gcs_json_lines_newline_delimited_json.go
mode change 100755 => 100644 internal/provider/type_destination_gcs_output_format.go
delete mode 100755 internal/provider/type_destination_gcs_output_format_avro_apache_avro.go
delete mode 100755 internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec.go
delete mode 100755 internal/provider/type_destination_gcs_output_format_csv_comma_separated_values.go
delete mode 100755 internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression.go
delete mode 100755 internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression_no_compression.go
delete mode 100755 internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json.go
delete mode 100755 internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression.go
delete mode 100755 internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression.go
delete mode 100755 internal/provider/type_destination_gcs_output_format_parquet_columnar_storage.go
rename internal/provider/{type_destination_s3_output_format_parquet_columnar_storage.go => type_destination_gcs_parquet_columnar_storage.go} (90%)
mode change 100755 => 100644
rename internal/provider/{type_destination_gcs_output_format_csv_comma_separated_values_compression_gzip.go => type_destination_gcs_update_no_compression.go} (73%)
mode change 100755 => 100644
delete mode 100755 internal/provider/type_destination_gcs_update_output_format_avro_apache_avro.go
delete mode 100755 internal/provider/type_destination_gcs_update_output_format_avro_apache_avro_compression_codec.go
delete mode 100755 internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values.go
delete mode 100755 internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values_compression.go
delete mode 100755 internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json.go
delete mode 100755 internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json_compression.go
mode change 100755 => 100644 internal/provider/type_destination_google_sheets.go
mode change 100755 => 100644 internal/provider/type_destination_google_sheets_authentication_via_google_o_auth.go
mode change 100755 => 100644 internal/provider/type_destination_keen.go
mode change 100755 => 100644 internal/provider/type_destination_kinesis.go
mode change 100755 => 100644 internal/provider/type_destination_langchain.go
mode change 100755 => 100644 internal/provider/type_destination_langchain_embedding.go
mode change 100755 => 100644 internal/provider/type_destination_langchain_indexing.go
delete mode 100755 internal/provider/type_destination_langchain_indexing_pinecone.go
rename internal/provider/{type_destination_pinecone_indexing.go => type_destination_langchain_pinecone.go} (88%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_destination_langchain_processing_config_model.go
mode change 100755 => 100644 internal/provider/type_destination_milvus.go
rename internal/provider/{type_destination_milvus_indexing_authentication_api_token.go => type_destination_milvus_api_token.go} (66%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_destination_milvus_authentication.go
mode change 100755 => 100644 internal/provider/type_destination_milvus_embedding.go
mode change 100755 => 100644 internal/provider/type_destination_milvus_indexing.go
delete mode 100755 internal/provider/type_destination_milvus_indexing_authentication.go
mode change 100755 => 100644 internal/provider/type_destination_milvus_processing_config_model.go
create mode 100644 internal/provider/type_destination_milvus_text_splitter.go
mode change 100755 => 100644 internal/provider/type_destination_mongodb.go
mode change 100755 => 100644 internal/provider/type_destination_mongodb_authorization_type.go
delete mode 100755 internal/provider/type_destination_mongodb_authorization_type_login_password.go
delete mode 100755 internal/provider/type_destination_mongodb_authorization_type_none.go
mode change 100755 => 100644 internal/provider/type_destination_mongodb_mongo_db_instance_type.go
delete mode 100755 internal/provider/type_destination_mongodb_mongo_db_instance_type_replica_set.go
delete mode 100755 internal/provider/type_destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go
delete mode 100755 internal/provider/type_destination_mongodb_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_destination_mssql.go
delete mode 100755 internal/provider/type_destination_mssql_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_destination_mssql_ssl_method.go
delete mode 100755 internal/provider/type_destination_mssql_ssl_method_encrypted_trust_server_certificate.go
delete mode 100755 internal/provider/type_destination_mysql.go
delete mode 100755 internal/provider/type_destination_mysql_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_destination_oracle.go
delete mode 100755 internal/provider/type_destination_oracle_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_destination_pinecone.go
mode change 100755 => 100644 internal/provider/type_destination_pinecone_embedding.go
mode change 100755 => 100644 internal/provider/type_destination_postgres.go
delete mode 100755 internal/provider/type_destination_postgres_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_destination_postgres_ssl_modes.go
delete mode 100755 internal/provider/type_destination_postgres_ssl_modes_disable.go
delete mode 100755 internal/provider/type_destination_postgres_ssl_modes_prefer.go
delete mode 100755 internal/provider/type_destination_postgres_ssl_modes_require.go
delete mode 100755 internal/provider/type_destination_postgres_ssl_modes_verify_full.go
mode change 100755 => 100644 internal/provider/type_destination_pubsub.go
create mode 100644 internal/provider/type_destination_qdrant.go
create mode 100644 internal/provider/type_destination_qdrant_authentication_method.go
create mode 100644 internal/provider/type_destination_qdrant_distance_metric.go
create mode 100644 internal/provider/type_destination_qdrant_indexing.go
mode change 100755 => 100644 internal/provider/type_destination_redis.go
delete mode 100755 internal/provider/type_destination_redis_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_destination_redis_ssl_modes.go
mode change 100755 => 100644 internal/provider/type_destination_redshift.go
delete mode 100755 internal/provider/type_destination_redshift1.go
create mode 100644 internal/provider/type_destination_redshift_encryption.go
delete mode 100755 internal/provider/type_destination_redshift_ssh_tunnel_method.go
delete mode 100755 internal/provider/type_destination_redshift_update_uploading_method_s3_staging.go
delete mode 100755 internal/provider/type_destination_redshift_update_uploading_method_s3_staging1.go
delete mode 100755 internal/provider/type_destination_redshift_update_uploading_method_s3_staging_encryption.go
mode change 100755 => 100644 internal/provider/type_destination_redshift_uploading_method.go
delete mode 100755 internal/provider/type_destination_redshift_uploading_method1.go
delete mode 100755 internal/provider/type_destination_redshift_uploading_method_s3_staging.go
delete mode 100755 internal/provider/type_destination_redshift_uploading_method_s3_staging1.go
delete mode 100755 internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption.go
delete mode 100755 internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_aescbc_envelope_encryption.go
delete mode 100755 internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_no_encryption.go
delete mode 100755 internal/provider/type_destination_redshift_uploading_method_standard.go
mode change 100755 => 100644 internal/provider/type_destination_s3.go
delete mode 100755 internal/provider/type_destination_s31.go
mode change 100755 => 100644 internal/provider/type_destination_s3_glue.go
delete mode 100755 internal/provider/type_destination_s3_glue1.go
mode change 100755 => 100644 internal/provider/type_destination_s3_glue_output_format.go
delete mode 100755 internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json.go
delete mode 100755 internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json_compression.go
delete mode 100755 internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json.go
delete mode 100755 internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression.go
create mode 100644 internal/provider/type_destination_s3_json_lines_newline_delimited_json.go
mode change 100755 => 100644 internal/provider/type_destination_s3_output_format.go
delete mode 100755 internal/provider/type_destination_s3_output_format_avro_apache_avro.go
delete mode 100755 internal/provider/type_destination_s3_output_format_avro_apache_avro_compression_codec.go
delete mode 100755 internal/provider/type_destination_s3_output_format_csv_comma_separated_values.go
delete mode 100755 internal/provider/type_destination_s3_output_format_csv_comma_separated_values_compression.go
delete mode 100755 internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json.go
delete mode 100755 internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json_compression.go
delete mode 100755 internal/provider/type_destination_s3_update_output_format_avro_apache_avro.go
delete mode 100755 internal/provider/type_destination_s3_update_output_format_avro_apache_avro_compression_codec.go
delete mode 100755 internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values.go
delete mode 100755 internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values_compression.go
delete mode 100755 internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json.go
delete mode 100755 internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json_compression.go
mode change 100755 => 100644 internal/provider/type_destination_sftp_json.go
mode change 100755 => 100644 internal/provider/type_destination_snowflake.go
mode change 100755 => 100644 internal/provider/type_destination_snowflake_authorization_method.go
delete mode 100755 internal/provider/type_destination_snowflake_authorization_method_username_and_password.go
mode change 100755 => 100644 internal/provider/type_destination_timeplus.go
mode change 100755 => 100644 internal/provider/type_destination_typesense.go
mode change 100755 => 100644 internal/provider/type_destination_vertica.go
delete mode 100755 internal/provider/type_destination_vertica_ssh_tunnel_method.go
create mode 100644 internal/provider/type_destination_weaviate.go
create mode 100644 internal/provider/type_destination_weaviate_authentication.go
create mode 100644 internal/provider/type_destination_weaviate_embedding.go
create mode 100644 internal/provider/type_destination_weaviate_indexing.go
mode change 100755 => 100644 internal/provider/type_destination_xata.go
rename internal/provider/{type_destination_langchain_indexing_doc_array_hnsw_search.go => type_doc_array_hnsw_search.go} (66%)
mode change 100755 => 100644
rename internal/provider/{type_destination_databricks_data_source_recommended_managed_tables.go => type_document_file_type_format_experimental.go} (55%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_enabled.go
rename internal/provider/{type_destination_mssql_ssl_method_encrypted_verify_certificate.go => type_encrypted_verify_certificate.go} (64%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_expression.go
rename internal/provider/{type_destination_firebolt_loading_method_external_table_via_s3.go => type_external_table_via_s3.go} (76%)
mode change 100755 => 100644
rename internal/provider/{type_destination_azure_blob_storage_output_format_json_lines_newline_delimited_json.go => type_field_name_mapping_config_model.go} (54%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_file_based_stream_config.go
rename internal/provider/{type_destination_milvus_embedding_from_field.go => type_from_field.go} (73%)
mode change 100755 => 100644
rename internal/provider/{type_source_file_secure_storage_provider_gcs_google_cloud_storage.go => type_gcs_google_cloud_storage.go} (65%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_gcs_staging.go
rename internal/provider/{type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip.go => type_gzip.go} (71%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_header.go
rename internal/provider/{type_destination_bigquery_loading_method_gcs_staging_credential_hmac_key.go => type_hmac_key.go} (81%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_https_public_web.go
create mode 100644 internal/provider/type_iam_role.go
rename internal/provider/{type_source_file_secure_storage_provider_s3_amazon_web_services.go => type_iam_user.go} (70%)
mode change 100755 => 100644
rename internal/provider/{type_source_facebook_marketing_insight_config.go => type_insight_config.go} (93%)
mode change 100755 => 100644
rename internal/provider/{type_destination_aws_datalake_output_format_wildcard_parquet_columnar_storage.go => type_json_lines_newline_delimited_json.go} (77%)
mode change 100755 => 100644
rename internal/provider/{type_source_s3_file_format_jsonl.go => type_jsonl.go} (78%)
mode change 100755 => 100644
rename internal/provider/{type_destination_snowflake_authorization_method_key_pair_authentication.go => type_key_pair_authentication.go} (68%)
mode change 100755 => 100644
rename internal/provider/{type_source_alloydb_replication_method_logical_replication_cdc.go => type_logical_replication_cdc.go} (83%)
mode change 100755 => 100644
rename internal/provider/{type_destination_mongodb_mongo_db_instance_type_mongo_db_atlas.go => type_mongo_db_atlas.go} (79%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_mongo_db_atlas_replica_set.go
rename internal/provider/{type_source_oracle_encryption_native_network_encryption_nne.go => type_native_network_encryption_nne.go} (63%)
mode change 100755 => 100644
rename internal/provider/{type_destination_gcs_output_format_avro_apache_avro_compression_codec_snappy.go => type_no_compression.go} (71%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_not_expression.go
rename internal/provider/{type_source_github_authentication_o_auth.go => type_o_auth.go} (76%)
mode change 100755 => 100644
rename internal/provider/{type_source_zendesk_chat_authorization_method_o_auth20.go => type_o_auth20.go} (76%)
mode change 100755 => 100644
rename internal/provider/{type_source_vantage.go => type_o_auth2_access_token.go} (73%)
mode change 100755 => 100644
rename internal/provider/{type_source_auth0_authentication_method_o_auth2_confidential_application.go => type_o_auth2_confidential_application.go} (71%)
mode change 100755 => 100644
rename internal/provider/{type_destination_langchain_embedding_open_ai.go => type_open_ai.go} (69%)
mode change 100755 => 100644
rename internal/provider/{type_source_airtable_authentication_personal_access_token.go => type_open_ai_compatible.go} (54%)
mode change 100755 => 100644
rename internal/provider/{type_source_s3_file_format_parquet.go => type_parquet.go} (77%)
mode change 100755 => 100644
rename internal/provider/{type_destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json.go => type_parquet_columnar_storage.go} (75%)
mode change 100755 => 100644
rename internal/provider/{type_destination_langchain_embedding_fake.go => type_parquet_format.go} (65%)
mode change 100755 => 100644
rename internal/provider/{type_destination_clickhouse_ssh_tunnel_method_password_authentication.go => type_password_authentication.go} (74%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_project_secret.go
rename internal/provider/{type_source_mysql_update_method_read_changes_using_binary_log_cdc.go => type_read_changes_using_binary_log_cdc.go} (70%)
mode change 100755 => 100644
rename internal/provider/{type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go => type_read_changes_using_change_data_capture_cdc.go} (73%)
mode change 100755 => 100644
rename internal/provider/{type_source_mongodb_mongo_db_instance_type_replica_set.go => type_replica_set.go} (84%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_report_config.go
create mode 100644 internal/provider/type_s3_staging.go
rename internal/provider/{type_source_tiktok_marketing_authentication_method_sandbox_access_token.go => type_sandbox_access_token.go} (68%)
mode change 100755 => 100644
rename internal/provider/{type_source_file_secure_storage_provider_ssh_secure_shell.go => type_scp_secure_copy_protocol.go} (75%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_self_managed_replica_set.go
create mode 100644 internal/provider/type_service_account.go
rename internal/provider/{type_source_google_analytics_data_api_credentials_service_account_key_authentication.go => type_service_account_key.go} (61%)
mode change 100755 => 100644
rename internal/provider/{type_destination_bigquery_loading_method_standard_inserts.go => type_service_account_key_authentication.go} (60%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_service_name.go
rename internal/provider/{type_source_launchdarkly.go => type_single_store_access_token.go} (71%)
mode change 100755 => 100644
rename internal/provider/{type_destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression.go => type_snappy.go} (69%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_aha.go
mode change 100755 => 100644 internal/provider/type_source_aircall.go
mode change 100755 => 100644 internal/provider/type_source_airtable.go
mode change 100755 => 100644 internal/provider/type_source_airtable_authentication.go
rename internal/provider/{type_source_xero_authenticate_via_xero_o_auth.go => type_source_airtable_o_auth20.go} (89%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_alloydb.go
delete mode 100755 internal/provider/type_source_alloydb1.go
rename internal/provider/{type_source_alloydb_ssl_modes_allow.go => type_source_alloydb_allow.go} (71%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_alloydb_replication_method.go
delete mode 100755 internal/provider/type_source_alloydb_replication_method1.go
delete mode 100755 internal/provider/type_source_alloydb_replication_method_logical_replication_cdc1.go
delete mode 100755 internal/provider/type_source_alloydb_replication_method_standard.go
delete mode 100755 internal/provider/type_source_alloydb_replication_method_standard_xmin.go
delete mode 100755 internal/provider/type_source_alloydb_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_source_alloydb_ssl_modes.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes1.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes_disable.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes_disable1.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes_prefer.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes_prefer1.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes_require.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes_require1.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes_verify_ca1.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes_verify_full.go
delete mode 100755 internal/provider/type_source_alloydb_ssl_modes_verify_full1.go
delete mode 100755 internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc.go
delete mode 100755 internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc1.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_allow.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_allow1.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_disable.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_disable1.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_prefer.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_prefer1.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_require.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_require1.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_verify_ca.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_verify_ca1.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_verify_full.go
delete mode 100755 internal/provider/type_source_alloydb_update_ssl_modes_verify_full1.go
rename internal/provider/{type_source_alloydb_ssl_modes_verify_ca.go => type_source_alloydb_verify_ca.go} (83%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_amazon_ads.go
mode change 100755 => 100644 internal/provider/type_source_amazon_seller_partner.go
mode change 100755 => 100644 internal/provider/type_source_amazon_sqs.go
mode change 100755 => 100644 internal/provider/type_source_amplitude.go
mode change 100755 => 100644 internal/provider/type_source_apify_dataset.go
mode change 100755 => 100644 internal/provider/type_source_appfollow.go
mode change 100755 => 100644 internal/provider/type_source_asana.go
mode change 100755 => 100644 internal/provider/type_source_asana_authentication_mechanism.go
delete mode 100755 internal/provider/type_source_asana_authentication_mechanism_authenticate_via_asana_oauth.go
delete mode 100755 internal/provider/type_source_asana_authentication_mechanism_authenticate_with_personal_access_token.go
mode change 100755 => 100644 internal/provider/type_source_auth0.go
mode change 100755 => 100644 internal/provider/type_source_auth0_authentication_method.go
delete mode 100755 internal/provider/type_source_auth0_authentication_method_o_auth2_access_token.go
mode change 100755 => 100644 internal/provider/type_source_aws_cloudtrail.go
mode change 100755 => 100644 internal/provider/type_source_azure_blob_storage.go
create mode 100644 internal/provider/type_source_azure_blob_storage_csv_header_definition.go
create mode 100644 internal/provider/type_source_azure_blob_storage_format.go
delete mode 100755 internal/provider/type_source_azure_blob_storage_input_format.go
delete mode 100755 internal/provider/type_source_azure_blob_storage_input_format_json_lines_newline_delimited_json.go
mode change 100755 => 100644 internal/provider/type_source_azure_table.go
mode change 100755 => 100644 internal/provider/type_source_bamboo_hr.go
delete mode 100755 internal/provider/type_source_bigcommerce.go
mode change 100755 => 100644 internal/provider/type_source_bigquery.go
mode change 100755 => 100644 internal/provider/type_source_bing_ads.go
mode change 100755 => 100644 internal/provider/type_source_braintree.go
mode change 100755 => 100644 internal/provider/type_source_braze.go
rename internal/provider/{type_destination_dev_null_test_destination_silent.go => type_source_cart.go} (50%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_source_cart_authorization_method.go
mode change 100755 => 100644 internal/provider/type_source_chargebee.go
mode change 100755 => 100644 internal/provider/type_source_chartmogul.go
mode change 100755 => 100644 internal/provider/type_source_clickhouse.go
delete mode 100755 internal/provider/type_source_clickhouse_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_source_clickup_api.go
mode change 100755 => 100644 internal/provider/type_source_clockify.go
mode change 100755 => 100644 internal/provider/type_source_close_com.go
mode change 100755 => 100644 internal/provider/type_source_coda.go
mode change 100755 => 100644 internal/provider/type_source_coin_api.go
mode change 100755 => 100644 internal/provider/type_source_coinmarketcap.go
delete mode 100755 internal/provider/type_source_configcat.go
mode change 100755 => 100644 internal/provider/type_source_confluence.go
delete mode 100755 internal/provider/type_source_convex.go
mode change 100755 => 100644 internal/provider/type_source_datascope.go
mode change 100755 => 100644 internal/provider/type_source_delighted.go
mode change 100755 => 100644 internal/provider/type_source_dixa.go
mode change 100755 => 100644 internal/provider/type_source_dockerhub.go
mode change 100755 => 100644 internal/provider/type_source_dremio.go
mode change 100755 => 100644 internal/provider/type_source_dynamodb.go
delete mode 100755 internal/provider/type_source_dynamodb1.go
delete mode 100755 internal/provider/type_source_e2e_test_cloud.go
delete mode 100755 internal/provider/type_source_e2e_test_cloud_mock_catalog.go
delete mode 100755 internal/provider/type_source_e2e_test_cloud_mock_catalog_multi_schema.go
delete mode 100755 internal/provider/type_source_e2e_test_cloud_mock_catalog_single_schema.go
delete mode 100755 internal/provider/type_source_emailoctopus.go
mode change 100755 => 100644 internal/provider/type_source_exchange_rates.go
mode change 100755 => 100644 internal/provider/type_source_facebook_marketing.go
mode change 100755 => 100644 internal/provider/type_source_facebook_pages.go
mode change 100755 => 100644 internal/provider/type_source_faker.go
mode change 100755 => 100644 internal/provider/type_source_fauna.go
mode change 100755 => 100644 internal/provider/type_source_fauna_collection.go
delete mode 100755 internal/provider/type_source_fauna_collection_deletion_mode.go
delete mode 100755 internal/provider/type_source_fauna_collection_deletion_mode_disabled.go
delete mode 100755 internal/provider/type_source_fauna_collection_deletion_mode_enabled.go
create mode 100644 internal/provider/type_source_fauna_deletion_mode.go
create mode 100644 internal/provider/type_source_file.go
rename internal/provider/{type_destination_aws_datalake_authentication_mode_iam_user.go => type_source_file_s3_amazon_web_services.go} (69%)
mode change 100755 => 100644
delete mode 100755 internal/provider/type_source_file_secure.go
delete mode 100755 internal/provider/type_source_file_secure_storage_provider.go
delete mode 100755 internal/provider/type_source_file_secure_storage_provider_https_public_web.go
delete mode 100755 internal/provider/type_source_file_secure_storage_provider_scp_secure_copy_protocol.go
delete mode 100755 internal/provider/type_source_file_secure_storage_provider_sftp_secure_file_transfer_protocol.go
create mode 100644 internal/provider/type_source_file_storage_provider.go
mode change 100755 => 100644 internal/provider/type_source_firebolt.go
mode change 100755 => 100644 internal/provider/type_source_freshcaller.go
mode change 100755 => 100644 internal/provider/type_source_freshdesk.go
mode change 100755 => 100644 internal/provider/type_source_freshsales.go
delete mode 100755 internal/provider/type_source_gainsight_px.go
mode change 100755 => 100644 internal/provider/type_source_gcs.go
create mode 100644 internal/provider/type_source_gcs_format.go
create mode 100644 internal/provider/type_source_gcs_stream_config.go
mode change 100755 => 100644 internal/provider/type_source_getlago.go
mode change 100755 => 100644 internal/provider/type_source_github.go
mode change 100755 => 100644 internal/provider/type_source_github_authentication.go
mode change 100755 => 100644 internal/provider/type_source_gitlab.go
mode change 100755 => 100644 internal/provider/type_source_gitlab_authorization_method.go
delete mode 100755 internal/provider/type_source_gitlab_authorization_method_o_auth20.go
delete mode 100755 internal/provider/type_source_gitlab_authorization_method_private_token.go
rename internal/provider/{type_source_airtable_authentication_o_auth20.go => type_source_gitlab_o_auth20.go} (80%)
mode change 100755 => 100644
delete mode 100755 internal/provider/type_source_glassfrog.go
mode change 100755 => 100644 internal/provider/type_source_gnews.go
mode change 100755 => 100644 internal/provider/type_source_google_ads.go
mode change 100755 => 100644 internal/provider/type_source_google_ads_google_credentials.go
mode change 100755 => 100644 internal/provider/type_source_google_analytics_data_api.go
mode change 100755 => 100644 internal/provider/type_source_google_analytics_data_api_credentials.go
delete mode 100755 internal/provider/type_source_google_analytics_data_api_credentials_authenticate_via_google_oauth.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_custom_report_config.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_dimensions_filter.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_double_value.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_int64_value.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_filter.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_from_value.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_between_filter.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_in_list_filter.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_numeric_filter.go
create mode 100644 internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_string_filter.go
delete mode 100755 internal/provider/type_source_google_analytics_v4.go
delete mode 100755 internal/provider/type_source_google_analytics_v4_credentials.go
mode change 100755 => 100644 internal/provider/type_source_google_directory.go
mode change 100755 => 100644 internal/provider/type_source_google_directory_google_credentials.go
delete mode 100755 internal/provider/type_source_google_directory_google_credentials_service_account_key.go
delete mode 100755 internal/provider/type_source_google_directory_google_credentials_sign_in_via_google_o_auth.go
create mode 100644 internal/provider/type_source_google_drive.go
create mode 100644 internal/provider/type_source_google_drive_authentication.go
create mode 100644 internal/provider/type_source_google_drive_csv_format.go
create mode 100644 internal/provider/type_source_google_drive_file_based_stream_config.go
create mode 100644 internal/provider/type_source_google_drive_format.go
rename internal/provider/{type_source_google_sheets_authentication_service_account_key_authentication.go => type_source_google_drive_service_account_key_authentication.go} (62%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_google_pagespeed_insights.go
mode change 100755 => 100644 internal/provider/type_source_google_search_console.go
mode change 100755 => 100644 internal/provider/type_source_google_search_console_authentication_type.go
mode change 100755 => 100644 internal/provider/type_source_google_search_console_custom_report_config.go
rename internal/provider/{type_source_google_search_console_authentication_type_service_account_key_authentication.go => type_source_google_search_console_service_account_key_authentication.go} (65%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_google_sheets.go
delete mode 100755 internal/provider/type_source_google_sheets_authentication.go
delete mode 100755 internal/provider/type_source_google_sheets_authentication_authenticate_via_google_o_auth.go
mode change 100755 => 100644 internal/provider/type_source_google_webfonts.go
mode change 100755 => 100644 internal/provider/type_source_google_workspace_admin_reports.go
delete mode 100755 internal/provider/type_source_greenhouse.go
mode change 100755 => 100644 internal/provider/type_source_gridly.go
mode change 100755 => 100644 internal/provider/type_source_harvest.go
delete mode 100755 internal/provider/type_source_harvest1.go
rename internal/provider/{type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token.go => type_source_harvest_authenticate_with_personal_access_token.go} (66%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_harvest_authentication_mechanism.go
delete mode 100755 internal/provider/type_source_harvest_authentication_mechanism1.go
delete mode 100755 internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth.go
delete mode 100755 internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth1.go
delete mode 100755 internal/provider/type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token1.go
delete mode 100755 internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth.go
delete mode 100755 internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth1.go
delete mode 100755 internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token.go
delete mode 100755 internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token1.go
delete mode 100755 internal/provider/type_source_hubplanner.go
mode change 100755 => 100644 internal/provider/type_source_hubspot.go
mode change 100755 => 100644 internal/provider/type_source_hubspot_authentication.go
delete mode 100755 internal/provider/type_source_hubspot_authentication_o_auth.go
delete mode 100755 internal/provider/type_source_hubspot_authentication_private_app.go
mode change 100755 => 100644 internal/provider/type_source_insightly.go
mode change 100755 => 100644 internal/provider/type_source_instagram.go
delete mode 100755 internal/provider/type_source_instatus.go
mode change 100755 => 100644 internal/provider/type_source_intercom.go
mode change 100755 => 100644 internal/provider/type_source_ip2whois.go
delete mode 100755 internal/provider/type_source_iterable.go
mode change 100755 => 100644 internal/provider/type_source_jira.go
mode change 100755 => 100644 internal/provider/type_source_k6_cloud.go
mode change 100755 => 100644 internal/provider/type_source_klarna.go
mode change 100755 => 100644 internal/provider/type_source_klaviyo.go
mode change 100755 => 100644 internal/provider/type_source_kustomer_singer.go
mode change 100755 => 100644 internal/provider/type_source_kyve.go
delete mode 100755 internal/provider/type_source_lemlist.go
mode change 100755 => 100644 internal/provider/type_source_lever_hiring.go
mode change 100755 => 100644 internal/provider/type_source_lever_hiring_authentication_mechanism.go
delete mode 100755 internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key.go
delete mode 100755 internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth.go
mode change 100755 => 100644 internal/provider/type_source_linkedin_ads.go
mode change 100755 => 100644 internal/provider/type_source_linkedin_ads_authentication.go
delete mode 100755 internal/provider/type_source_linkedin_ads_authentication_access_token.go
mode change 100755 => 100644 internal/provider/type_source_linkedin_pages.go
delete mode 100755 internal/provider/type_source_linkedin_pages_authentication.go
mode change 100755 => 100644 internal/provider/type_source_linnworks.go
mode change 100755 => 100644 internal/provider/type_source_lokalise.go
mode change 100755 => 100644 internal/provider/type_source_mailchimp.go
mode change 100755 => 100644 internal/provider/type_source_mailchimp_authentication.go
delete mode 100755 internal/provider/type_source_mailchimp_authentication_api_key.go
mode change 100755 => 100644 internal/provider/type_source_mailgun.go
mode change 100755 => 100644 internal/provider/type_source_mailjet_sms.go
mode change 100755 => 100644 internal/provider/type_source_marketo.go
mode change 100755 => 100644 internal/provider/type_source_metabase.go
mode change 100755 => 100644 internal/provider/type_source_microsoft_teams.go
mode change 100755 => 100644 internal/provider/type_source_microsoft_teams_authentication_mechanism.go
delete mode 100755 internal/provider/type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth20.go
mode change 100755 => 100644 internal/provider/type_source_mixpanel.go
mode change 100755 => 100644 internal/provider/type_source_mixpanel_authentication_wildcard.go
delete mode 100755 internal/provider/type_source_mixpanel_authentication_wildcard_project_secret.go
delete mode 100755 internal/provider/type_source_mixpanel_authentication_wildcard_service_account.go
mode change 100755 => 100644 internal/provider/type_source_monday.go
mode change 100755 => 100644 internal/provider/type_source_monday_authorization_method.go
delete mode 100755 internal/provider/type_source_monday_authorization_method_api_token.go
rename internal/provider/{type_source_monday_authorization_method_o_auth20.go => type_source_monday_o_auth20.go} (77%)
mode change 100755 => 100644
delete mode 100755 internal/provider/type_source_mongodb.go
delete mode 100755 internal/provider/type_source_mongodb1.go
mode change 100755 => 100644 internal/provider/type_source_mongodb_internal_poc.go
delete mode 100755 internal/provider/type_source_mongodb_mongo_db_instance_type.go
delete mode 100755 internal/provider/type_source_mongodb_mongo_db_instance_type1.go
delete mode 100755 internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas.go
delete mode 100755 internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas1.go
delete mode 100755 internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas.go
delete mode 100755 internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas1.go
create mode 100644 internal/provider/type_source_mongodb_v2.go
create mode 100644 internal/provider/type_source_mongodb_v2_cluster_type.go
mode change 100755 => 100644 internal/provider/type_source_mssql.go
delete mode 100755 internal/provider/type_source_mssql_ssh_tunnel_method.go
delete mode 100755 internal/provider/type_source_mssql_ssl_method.go
mode change 100755 => 100644 internal/provider/type_source_mssql_update_method.go
delete mode 100755 internal/provider/type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go
mode change 100755 => 100644 internal/provider/type_source_my_hours.go
mode change 100755 => 100644 internal/provider/type_source_mysql.go
delete mode 100755 internal/provider/type_source_mysql_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_source_mysql_ssl_modes.go
mode change 100755 => 100644 internal/provider/type_source_mysql_update_method.go
rename internal/provider/{type_source_mysql_ssl_modes_verify_ca.go => type_source_mysql_verify_ca.go} (81%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_netsuite.go
mode change 100755 => 100644 internal/provider/type_source_notion.go
delete mode 100755 internal/provider/type_source_notion_authenticate_using.go
delete mode 100755 internal/provider/type_source_notion_authenticate_using_access_token.go
create mode 100644 internal/provider/type_source_notion_authentication_method.go
rename internal/provider/{type_source_notion_authenticate_using_o_auth20.go => type_source_notion_o_auth20.go} (75%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_nytimes.go
mode change 100755 => 100644 internal/provider/type_source_okta.go
mode change 100755 => 100644 internal/provider/type_source_okta_authorization_method.go
delete mode 100755 internal/provider/type_source_omnisend.go
mode change 100755 => 100644 internal/provider/type_source_onesignal.go
mode change 100755 => 100644 internal/provider/type_source_oracle.go
mode change 100755 => 100644 internal/provider/type_source_oracle_connect_by.go
delete mode 100755 internal/provider/type_source_oracle_connect_by_service_name.go
delete mode 100755 internal/provider/type_source_oracle_connect_by_system_idsid.go
mode change 100755 => 100644 internal/provider/type_source_oracle_encryption.go
delete mode 100755 internal/provider/type_source_oracle_encryption_tls_encrypted_verify_certificate.go
delete mode 100755 internal/provider/type_source_oracle_ssh_tunnel_method.go
mode change 100755 => 100644 internal/provider/type_source_orb.go
mode change 100755 => 100644 internal/provider/type_source_orbit.go
mode change 100755 => 100644 internal/provider/type_source_outbrain_amplify.go
mode change 100755 => 100644 internal/provider/type_source_outbrain_amplify_authentication_method.go
delete mode 100755 internal/provider/type_source_outbrain_amplify_authentication_method_access_token.go
delete mode 100755 internal/provider/type_source_outbrain_amplify_authentication_method_username_password.go
mode change 100755 => 100644 internal/provider/type_source_outreach.go
mode change 100755 => 100644 internal/provider/type_source_paypal_transaction.go
mode change 100755 => 100644 internal/provider/type_source_paystack.go
delete mode 100755 internal/provider/type_source_pendo.go
delete mode 100755 internal/provider/type_source_persistiq.go
mode change 100755 => 100644 internal/provider/type_source_pexels_api.go
mode change 100755 => 100644 internal/provider/type_source_pinterest.go
mode change 100755 => 100644 internal/provider/type_source_pinterest_authorization_method.go
delete mode 100755 internal/provider/type_source_pinterest_authorization_method_o_auth20.go
mode change 100755 => 100644 internal/provider/type_source_pipedrive.go
mode change 100755 => 100644 internal/provider/type_source_pocket.go
mode change 100755 => 100644 internal/provider/type_source_pokeapi.go
mode change 100755 => 100644 internal/provider/type_source_polygon_stock_api.go
mode change 100755 => 100644 internal/provider/type_source_postgres.go
delete mode 100755 internal/provider/type_source_postgres1.go
delete mode 100755 internal/provider/type_source_postgres_ssh_tunnel_method.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes1.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_allow.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_allow1.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_disable.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_disable1.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_prefer.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_prefer1.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_require.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_require1.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_verify_ca.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_verify_ca1.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_verify_full.go
delete mode 100755 internal/provider/type_source_postgres_ssl_modes_verify_full1.go
mode change 100755 => 100644 internal/provider/type_source_postgres_update_method.go
delete mode 100755 internal/provider/type_source_postgres_update_method1.go
delete mode 100755 internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go
delete mode 100755 internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go
delete mode 100755 internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go
delete mode 100755 internal/provider/type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_allow.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_allow1.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_disable.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_disable1.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_prefer.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_prefer1.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_require.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_require1.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_verify_ca.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_verify_ca1.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_verify_full.go
delete mode 100755 internal/provider/type_source_postgres_update_ssl_modes_verify_full1.go
delete mode 100755 internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go
delete mode 100755 internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go
mode change 100755 => 100644 internal/provider/type_source_posthog.go
mode change 100755 => 100644 internal/provider/type_source_postmarkapp.go
mode change 100755 => 100644 internal/provider/type_source_prestashop.go
mode change 100755 => 100644 internal/provider/type_source_punk_api.go
mode change 100755 => 100644 internal/provider/type_source_pypi.go
mode change 100755 => 100644 internal/provider/type_source_qualaroo.go
mode change 100755 => 100644 internal/provider/type_source_quickbooks.go
mode change 100755 => 100644 internal/provider/type_source_quickbooks_authorization_method.go
rename internal/provider/{type_source_quickbooks_authorization_method_o_auth20.go => type_source_quickbooks_o_auth20.go} (81%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_railz.go
mode change 100755 => 100644 internal/provider/type_source_recharge.go
mode change 100755 => 100644 internal/provider/type_source_recreation.go
mode change 100755 => 100644 internal/provider/type_source_recruitee.go
mode change 100755 => 100644 internal/provider/type_source_recurly.go
mode change 100755 => 100644 internal/provider/type_source_redshift.go
mode change 100755 => 100644 internal/provider/type_source_retently.go
delete mode 100755 internal/provider/type_source_retently1.go
mode change 100755 => 100644 internal/provider/type_source_retently_authentication_mechanism.go
delete mode 100755 internal/provider/type_source_retently_authentication_mechanism1.go
delete mode 100755 internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth.go
delete mode 100755 internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth1.go
delete mode 100755 internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token.go
delete mode 100755 internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token1.go
delete mode 100755 internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth.go
delete mode 100755 internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth1.go
delete mode 100755 internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token.go
delete mode 100755 internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token1.go
mode change 100755 => 100644 internal/provider/type_source_rki_covid.go
mode change 100755 => 100644 internal/provider/type_source_rss.go
mode change 100755 => 100644 internal/provider/type_source_s3.go
delete mode 100755 internal/provider/type_source_s3_file_based_stream_config.go
delete mode 100755 internal/provider/type_source_s3_file_based_stream_config_format.go
delete mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go
delete mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go
delete mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go
delete mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go
delete mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go
delete mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go
delete mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go
mode change 100755 => 100644 internal/provider/type_source_s3_file_format.go
mode change 100755 => 100644 internal/provider/type_source_s3_s3_amazon_web_services.go
delete mode 100755 internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go
delete mode 100755 internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go
mode change 100755 => 100644 internal/provider/type_source_salesforce.go
mode change 100755 => 100644 internal/provider/type_source_salesloft.go
mode change 100755 => 100644 internal/provider/type_source_salesloft_credentials.go
delete mode 100755 internal/provider/type_source_salesloft_credentials_authenticate_via_api_key.go
delete mode 100755 internal/provider/type_source_sap_fieldglass.go
delete mode 100755 internal/provider/type_source_secoda.go
mode change 100755 => 100644 internal/provider/type_source_sendgrid.go
delete mode 100755 internal/provider/type_source_sendinblue.go
mode change 100755 => 100644 internal/provider/type_source_senseforce.go
mode change 100755 => 100644 internal/provider/type_source_sentry.go
mode change 100755 => 100644 internal/provider/type_source_sftp.go
mode change 100755 => 100644 internal/provider/type_source_sftp_authentication_wildcard.go
mode change 100755 => 100644 internal/provider/type_source_sftp_bulk.go
rename internal/provider/{type_source_sftp_authentication_wildcard_password_authentication.go => type_source_sftp_password_authentication.go} (64%)
mode change 100755 => 100644
rename internal/provider/{type_source_sftp_authentication_wildcard_ssh_key_authentication.go => type_source_sftp_ssh_key_authentication.go} (64%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_shopify.go
rename internal/provider/{type_source_mailchimp_authentication_o_auth20.go => type_source_shopify_o_auth20.go} (75%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_shopify_shopify_authorization_method.go
delete mode 100755 internal/provider/type_source_shopify_shopify_authorization_method_api_password.go
delete mode 100755 internal/provider/type_source_shopify_shopify_authorization_method_o_auth20.go
mode change 100755 => 100644 internal/provider/type_source_shortio.go
mode change 100755 => 100644 internal/provider/type_source_slack.go
mode change 100755 => 100644 internal/provider/type_source_slack_authentication_mechanism.go
delete mode 100755 internal/provider/type_source_slack_authentication_mechanism_api_token.go
delete mode 100755 internal/provider/type_source_slack_authentication_mechanism_sign_in_via_slack_o_auth.go
mode change 100755 => 100644 internal/provider/type_source_smaily.go
delete mode 100755 internal/provider/type_source_smartengage.go
mode change 100755 => 100644 internal/provider/type_source_smartsheets.go
mode change 100755 => 100644 internal/provider/type_source_smartsheets_authorization_method.go
mode change 100755 => 100644 internal/provider/type_source_snapchat_marketing.go
mode change 100755 => 100644 internal/provider/type_source_snowflake.go
mode change 100755 => 100644 internal/provider/type_source_snowflake_authorization_method.go
delete mode 100755 internal/provider/type_source_snowflake_authorization_method_username_and_password.go
rename internal/provider/{type_destination_snowflake_authorization_method_o_auth20.go => type_source_snowflake_o_auth20.go} (76%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_sonar_cloud.go
mode change 100755 => 100644 internal/provider/type_source_spacex_api.go
mode change 100755 => 100644 internal/provider/type_source_square.go
mode change 100755 => 100644 internal/provider/type_source_square_authentication.go
delete mode 100755 internal/provider/type_source_square_authentication_api_key.go
delete mode 100755 internal/provider/type_source_square_authentication_oauth_authentication.go
mode change 100755 => 100644 internal/provider/type_source_strava.go
mode change 100755 => 100644 internal/provider/type_source_stripe.go
mode change 100755 => 100644 internal/provider/type_source_survey_sparrow.go
mode change 100755 => 100644 internal/provider/type_source_survey_sparrow_base_url.go
delete mode 100755 internal/provider/type_source_survey_sparrow_base_url_global_account.go
delete mode 100755 internal/provider/type_source_survey_sparrow_base_urleu_based_account.go
mode change 100755 => 100644 internal/provider/type_source_surveymonkey.go
delete mode 100755 internal/provider/type_source_surveymonkey_survey_monkey_authorization_method.go
delete mode 100755 internal/provider/type_source_tempo.go
mode change 100755 => 100644 internal/provider/type_source_the_guardian_api.go
mode change 100755 => 100644 internal/provider/type_source_tiktok_marketing.go
mode change 100755 => 100644 internal/provider/type_source_tiktok_marketing_authentication_method.go
rename internal/provider/{type_source_tiktok_marketing_authentication_method_o_auth20.go => type_source_tiktok_marketing_o_auth20.go} (75%)
mode change 100755 => 100644
delete mode 100755 internal/provider/type_source_todoist.go
mode change 100755 => 100644 internal/provider/type_source_trello.go
mode change 100755 => 100644 internal/provider/type_source_trustpilot.go
rename internal/provider/{type_source_trustpilot_authorization_method_api_key.go => type_source_trustpilot_api_key.go} (66%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_trustpilot_authorization_method.go
mode change 100755 => 100644 internal/provider/type_source_tvmaze_schedule.go
mode change 100755 => 100644 internal/provider/type_source_twilio.go
mode change 100755 => 100644 internal/provider/type_source_twilio_taskrouter.go
mode change 100755 => 100644 internal/provider/type_source_twitter.go
mode change 100755 => 100644 internal/provider/type_source_typeform.go
delete mode 100755 internal/provider/type_source_typeform_authorization_method.go
mode change 100755 => 100644 internal/provider/type_source_us_census.go
mode change 100755 => 100644 internal/provider/type_source_webflow.go
delete mode 100755 internal/provider/type_source_whisky_hunter.go
mode change 100755 => 100644 internal/provider/type_source_wikipedia_pageviews.go
mode change 100755 => 100644 internal/provider/type_source_woocommerce.go
delete mode 100755 internal/provider/type_source_xero.go
mode change 100755 => 100644 internal/provider/type_source_yandex_metrica.go
mode change 100755 => 100644 internal/provider/type_source_yotpo.go
delete mode 100755 internal/provider/type_source_younium.go
mode change 100755 => 100644 internal/provider/type_source_youtube_analytics.go
delete mode 100755 internal/provider/type_source_youtube_analytics1.go
delete mode 100755 internal/provider/type_source_youtube_analytics_authenticate_via_o_auth201.go
mode change 100755 => 100644 internal/provider/type_source_zendesk_chat.go
mode change 100755 => 100644 internal/provider/type_source_zendesk_chat_authorization_method.go
delete mode 100755 internal/provider/type_source_zendesk_chat_authorization_method_access_token.go
rename internal/provider/{type_source_google_search_console_authentication_type_o_auth.go => type_source_zendesk_chat_o_auth20.go} (76%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_zendesk_sunshine.go
rename internal/provider/{type_source_pipedrive_api_key_authentication.go => type_source_zendesk_sunshine_api_token.go} (68%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_zendesk_sunshine_authorization_method.go
delete mode 100755 internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go
delete mode 100755 internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go
mode change 100755 => 100644 internal/provider/type_source_zendesk_support.go
delete mode 100755 internal/provider/type_source_zendesk_support1.go
rename internal/provider/{type_source_zendesk_talk_authentication_api_token.go => type_source_zendesk_support_api_token.go} (75%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_source_zendesk_support_authentication.go
delete mode 100755 internal/provider/type_source_zendesk_support_authentication1.go
delete mode 100755 internal/provider/type_source_zendesk_support_authentication_api_token.go
delete mode 100755 internal/provider/type_source_zendesk_support_authentication_api_token1.go
delete mode 100755 internal/provider/type_source_zendesk_support_authentication_o_auth20.go
delete mode 100755 internal/provider/type_source_zendesk_support_authentication_o_auth201.go
rename internal/provider/{type_source_zendesk_talk_authentication_o_auth20.go => type_source_zendesk_support_o_auth20.go} (78%)
mode change 100755 => 100644
delete mode 100755 internal/provider/type_source_zendesk_support_update_authentication_api_token.go
delete mode 100755 internal/provider/type_source_zendesk_support_update_authentication_api_token1.go
delete mode 100755 internal/provider/type_source_zendesk_support_update_authentication_o_auth20.go
delete mode 100755 internal/provider/type_source_zendesk_support_update_authentication_o_auth201.go
mode change 100755 => 100644 internal/provider/type_source_zendesk_talk.go
delete mode 100755 internal/provider/type_source_zendesk_talk1.go
delete mode 100755 internal/provider/type_source_zendesk_talk_authentication.go
delete mode 100755 internal/provider/type_source_zendesk_talk_authentication1.go
delete mode 100755 internal/provider/type_source_zendesk_talk_authentication_api_token1.go
delete mode 100755 internal/provider/type_source_zendesk_talk_authentication_o_auth201.go
delete mode 100755 internal/provider/type_source_zendesk_talk_update_authentication_api_token.go
delete mode 100755 internal/provider/type_source_zendesk_talk_update_authentication_api_token1.go
delete mode 100755 internal/provider/type_source_zendesk_talk_update_authentication_o_auth20.go
delete mode 100755 internal/provider/type_source_zendesk_talk_update_authentication_o_auth201.go
mode change 100755 => 100644 internal/provider/type_source_zenloop.go
mode change 100755 => 100644 internal/provider/type_source_zoho_crm.go
mode change 100755 => 100644 internal/provider/type_source_zoom.go
mode change 100755 => 100644 internal/provider/type_source_zuora.go
create mode 100644 internal/provider/type_ssh_key_authentication.go
rename internal/provider/{type_source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go => type_standalone_mongo_db_instance.go} (79%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/type_stream_configuration.go
mode change 100755 => 100644 internal/provider/type_stream_configurations.go
rename internal/provider/{type_source_salesforce_streams_criteria.go => type_streams_criteria.go} (83%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_system_idsid.go
rename internal/provider/{type_destination_milvus_indexing_authentication_no_auth.go => type_tls_encrypted_verify_certificate.go} (61%)
mode change 100755 => 100644
create mode 100644 internal/provider/type_user_provided.go
create mode 100644 internal/provider/type_username_and_password.go
rename internal/provider/{type_destination_milvus_indexing_authentication_username_password.go => type_username_password.go} (69%)
mode change 100755 => 100644
rename internal/provider/{type_destination_postgres_ssl_modes_verify_ca.go => type_verify_ca.go} (73%)
mode change 100755 => 100644
rename internal/provider/{type_source_mysql_ssl_modes_verify_identity.go => type_verify_full.go} (80%)
mode change 100755 => 100644
rename internal/provider/{type_destination_gcs_output_format_avro_apache_avro_compression_codec_deflate.go => type_xz.go} (77%)
mode change 100755 => 100644
rename internal/provider/{type_destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard.go => type_zstandard.go} (80%)
mode change 100755 => 100644
mode change 100755 => 100644 internal/provider/utils.go
mode change 100755 => 100644 internal/provider/workspace_data_source.go
mode change 100755 => 100644 internal/provider/workspace_data_source_sdk.go
mode change 100755 => 100644 internal/provider/workspace_resource.go
mode change 100755 => 100644 internal/provider/workspace_resource_sdk.go
mode change 100755 => 100644 internal/sdk/connections.go
mode change 100755 => 100644 internal/sdk/destinations.go
mode change 100755 => 100644 internal/sdk/jobs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/canceljob.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createconnection.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestination.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationawsdatalake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationbigquery.go
delete mode 100755 internal/sdk/pkg/models/operations/createdestinationbigquerydenormalized.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationcumulio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationdatabend.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationdatabricks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationdevnull.go
create mode 100644 internal/sdk/pkg/models/operations/createdestinationduckdb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationdynamodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationelasticsearch.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationfirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationfirestore.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationgcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationgooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationkeen.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationkinesis.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationlangchain.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationmilvus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationmongodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationmssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationmysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationpinecone.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationpostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationpubsub.go
create mode 100644 internal/sdk/pkg/models/operations/createdestinationqdrant.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationredis.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinations3.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinations3glue.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationsftpjson.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationsnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationtimeplus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationtypesense.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationvertica.go
create mode 100644 internal/sdk/pkg/models/operations/createdestinationweaviate.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createdestinationxata.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createjob.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createorupdateworkspaceoauthcredentials.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsource.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceaha.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceaircall.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceairtable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcealloydb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceamazonads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceamazonsellerpartner.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceamazonsqs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceamplitude.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceapifydataset.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceappfollow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceasana.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceauth0.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceawscloudtrail.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceazuretable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcebamboohr.go
delete mode 100755 internal/sdk/pkg/models/operations/createsourcebigcommerce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcebigquery.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcebingads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcebraintree.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcebraze.go
create mode 100644 internal/sdk/pkg/models/operations/createsourcecart.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcechargebee.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcechartmogul.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceclickupapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceclockify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceclosecom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcecoinapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcecoinmarketcap.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceconfigcat.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceconfluence.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcedatascope.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcedelighted.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcedixa.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcedockerhub.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcedremio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcedynamodb.go
delete mode 100755 internal/sdk/pkg/models/operations/createsourcee2etestcloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceemailoctopus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceexchangerates.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcefacebookmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcefacebookpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcefaker.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcefauna.go
create mode 100644 internal/sdk/pkg/models/operations/createsourcefile.go
delete mode 100755 internal/sdk/pkg/models/operations/createsourcefilesecure.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcefirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcefreshcaller.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcefreshdesk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcefreshsales.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegainsightpx.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegetlago.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegithub.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegitlab.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceglassfrog.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegnews.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegoogleads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegoogleanalyticsdataapi.go
delete mode 100755 internal/sdk/pkg/models/operations/createsourcegoogleanalyticsv4.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegoogledirectory.go
create mode 100644 internal/sdk/pkg/models/operations/createsourcegoogledrive.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegooglepagespeedinsights.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegooglesearchconsole.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegooglewebfonts.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegoogleworkspaceadminreports.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegreenhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcegridly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceharvest.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcehubplanner.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcehubspot.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceinsightly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceinstagram.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceinstatus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceintercom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceip2whois.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceiterable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcejira.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcek6cloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceklarna.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceklaviyo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcekustomersinger.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcekyve.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcelaunchdarkly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcelemlist.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceleverhiring.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcelinkedinads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcelinkedinpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcelinnworks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcelokalise.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemailchimp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemailgun.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemailjetsms.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemarketo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemetabase.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemicrosoftteams.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemixpanel.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemonday.go
delete mode 100755 internal/sdk/pkg/models/operations/createsourcemongodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemongodbinternalpoc.go
create mode 100644 internal/sdk/pkg/models/operations/createsourcemongodbv2.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemyhours.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcemysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcenetsuite.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcenotion.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcenytimes.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceokta.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceomnisend.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceonesignal.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceorb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceorbit.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceoutbrainamplify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceoutreach.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepaypaltransaction.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepaystack.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcependo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepersistiq.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepexelsapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepinterest.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepipedrive.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepocket.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepokeapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepolygonstockapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceposthog.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepostmarkapp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceprestashop.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepunkapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcepypi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcequalaroo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcequickbooks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcerailz.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcerecharge.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcerecreation.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcerecruitee.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcerecurly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceretently.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcerkicovid.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcerss.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsources3.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesalesforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesalesloft.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesapfieldglass.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesendgrid.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesendinblue.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesenseforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesentry.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesftp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesftpbulk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceshopify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceshortio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceslack.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesmaily.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesmartengage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesmartsheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesnapchatmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesonarcloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcespacexapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesquare.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcestrava.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcestripe.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesurveymonkey.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcesurveysparrow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetempo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetheguardianapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetiktokmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetodoist.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetrello.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetrustpilot.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetvmazeschedule.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetwilio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetwiliotaskrouter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetwitter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcetypeform.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceuscensus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcevantage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcewebflow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcewhiskyhunter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcewikipediapageviews.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcewoocommerce.go
delete mode 100755 internal/sdk/pkg/models/operations/createsourcexero.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcexkcd.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceyandexmetrica.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceyotpo.go
delete mode 100755 internal/sdk/pkg/models/operations/createsourceyounium.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourceyoutubeanalytics.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcezendeskchat.go
create mode 100644 internal/sdk/pkg/models/operations/createsourcezendesksell.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcezendesksunshine.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcezendesksupport.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcezendesktalk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcezenloop.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcezohocrm.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcezoom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createsourcezuora.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/createworkspace.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deleteconnection.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestination.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationawsdatalake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationbigquery.go
delete mode 100755 internal/sdk/pkg/models/operations/deletedestinationbigquerydenormalized.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationcumulio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationdatabend.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationdatabricks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationdevnull.go
create mode 100644 internal/sdk/pkg/models/operations/deletedestinationduckdb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationdynamodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationelasticsearch.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationfirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationfirestore.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationgcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationgooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationkeen.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationkinesis.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationlangchain.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationmilvus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationmongodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationmssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationmysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationpinecone.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationpostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationpubsub.go
create mode 100644 internal/sdk/pkg/models/operations/deletedestinationqdrant.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationredis.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinations3.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinations3glue.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationsftpjson.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationsnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationtimeplus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationtypesense.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationvertica.go
create mode 100644 internal/sdk/pkg/models/operations/deletedestinationweaviate.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletedestinationxata.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesource.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceaha.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceaircall.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceairtable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcealloydb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceamazonads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceamazonsellerpartner.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceamazonsqs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceamplitude.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceapifydataset.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceappfollow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceasana.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceauth0.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceawscloudtrail.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceazuretable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcebamboohr.go
delete mode 100755 internal/sdk/pkg/models/operations/deletesourcebigcommerce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcebigquery.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcebingads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcebraintree.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcebraze.go
create mode 100644 internal/sdk/pkg/models/operations/deletesourcecart.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcechargebee.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcechartmogul.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceclickupapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceclockify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceclosecom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcecoinapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcecoinmarketcap.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceconfigcat.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceconfluence.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcedatascope.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcedelighted.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcedixa.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcedockerhub.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcedremio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcedynamodb.go
delete mode 100755 internal/sdk/pkg/models/operations/deletesourcee2etestcloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceemailoctopus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceexchangerates.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcefacebookmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcefacebookpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcefaker.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcefauna.go
create mode 100644 internal/sdk/pkg/models/operations/deletesourcefile.go
delete mode 100755 internal/sdk/pkg/models/operations/deletesourcefilesecure.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcefirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcefreshcaller.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcefreshdesk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcefreshsales.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegainsightpx.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegetlago.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegithub.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegitlab.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceglassfrog.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegnews.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegoogleads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsdataapi.go
delete mode 100755 internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsv4.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegoogledirectory.go
create mode 100644 internal/sdk/pkg/models/operations/deletesourcegoogledrive.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegooglepagespeedinsights.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegooglesearchconsole.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegooglewebfonts.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegoogleworkspaceadminreports.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegreenhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcegridly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceharvest.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcehubplanner.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcehubspot.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceinsightly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceinstagram.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceinstatus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceintercom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceip2whois.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceiterable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcejira.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcek6cloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceklarna.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceklaviyo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcekustomersinger.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcekyve.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcelaunchdarkly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcelemlist.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceleverhiring.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcelinkedinads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcelinkedinpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcelinnworks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcelokalise.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemailchimp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemailgun.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemailjetsms.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemarketo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemetabase.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemicrosoftteams.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemixpanel.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemonday.go
delete mode 100755 internal/sdk/pkg/models/operations/deletesourcemongodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemongodbinternalpoc.go
create mode 100644 internal/sdk/pkg/models/operations/deletesourcemongodbv2.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemyhours.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcemysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcenetsuite.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcenotion.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcenytimes.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceokta.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceomnisend.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceonesignal.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceorb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceorbit.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceoutbrainamplify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceoutreach.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepaypaltransaction.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepaystack.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcependo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepersistiq.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepexelsapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepinterest.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepipedrive.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepocket.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepokeapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepolygonstockapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceposthog.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepostmarkapp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceprestashop.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepunkapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcepypi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcequalaroo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcequickbooks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcerailz.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcerecharge.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcerecreation.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcerecruitee.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcerecurly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceretently.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcerkicovid.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcerss.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesources3.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesalesforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesalesloft.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesapfieldglass.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesendgrid.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesendinblue.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesenseforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesentry.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesftp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesftpbulk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceshopify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceshortio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceslack.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesmaily.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesmartengage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesmartsheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesnapchatmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesonarcloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcespacexapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesquare.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcestrava.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcestripe.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesurveymonkey.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcesurveysparrow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetempo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetheguardianapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetiktokmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetodoist.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetrello.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetrustpilot.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetvmazeschedule.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetwilio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetwiliotaskrouter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetwitter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcetypeform.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceuscensus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcevantage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcewebflow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcewhiskyhunter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcewikipediapageviews.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcewoocommerce.go
delete mode 100755 internal/sdk/pkg/models/operations/deletesourcexero.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcexkcd.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceyandexmetrica.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceyotpo.go
delete mode 100755 internal/sdk/pkg/models/operations/deletesourceyounium.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourceyoutubeanalytics.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcezendeskchat.go
create mode 100644 internal/sdk/pkg/models/operations/deletesourcezendesksell.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcezendesksunshine.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcezendesksupport.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcezendesktalk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcezenloop.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcezohocrm.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcezoom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deletesourcezuora.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/deleteworkspace.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getconnection.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestination.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationawsdatalake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationbigquery.go
delete mode 100755 internal/sdk/pkg/models/operations/getdestinationbigquerydenormalized.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationcumulio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationdatabend.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationdatabricks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationdevnull.go
create mode 100644 internal/sdk/pkg/models/operations/getdestinationduckdb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationdynamodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationelasticsearch.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationfirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationfirestore.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationgcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationgooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationkeen.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationkinesis.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationlangchain.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationmilvus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationmongodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationmssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationmysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationpinecone.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationpostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationpubsub.go
create mode 100644 internal/sdk/pkg/models/operations/getdestinationqdrant.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationredis.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinations3.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinations3glue.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationsftpjson.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationsnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationtimeplus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationtypesense.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationvertica.go
create mode 100644 internal/sdk/pkg/models/operations/getdestinationweaviate.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getdestinationxata.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getjob.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsource.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceaha.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceaircall.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceairtable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcealloydb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceamazonads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceamazonsellerpartner.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceamazonsqs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceamplitude.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceapifydataset.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceappfollow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceasana.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceauth0.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceawscloudtrail.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceazuretable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcebamboohr.go
delete mode 100755 internal/sdk/pkg/models/operations/getsourcebigcommerce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcebigquery.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcebingads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcebraintree.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcebraze.go
create mode 100644 internal/sdk/pkg/models/operations/getsourcecart.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcechargebee.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcechartmogul.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceclickupapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceclockify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceclosecom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcecoinapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcecoinmarketcap.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceconfigcat.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceconfluence.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcedatascope.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcedelighted.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcedixa.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcedockerhub.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcedremio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcedynamodb.go
delete mode 100755 internal/sdk/pkg/models/operations/getsourcee2etestcloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceemailoctopus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceexchangerates.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcefacebookmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcefacebookpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcefaker.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcefauna.go
create mode 100644 internal/sdk/pkg/models/operations/getsourcefile.go
delete mode 100755 internal/sdk/pkg/models/operations/getsourcefilesecure.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcefirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcefreshcaller.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcefreshdesk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcefreshsales.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegainsightpx.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegetlago.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegithub.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegitlab.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceglassfrog.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegnews.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegoogleads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegoogleanalyticsdataapi.go
delete mode 100755 internal/sdk/pkg/models/operations/getsourcegoogleanalyticsv4.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegoogledirectory.go
create mode 100644 internal/sdk/pkg/models/operations/getsourcegoogledrive.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegooglepagespeedinsights.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegooglesearchconsole.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegooglewebfonts.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegoogleworkspaceadminreports.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegreenhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcegridly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceharvest.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcehubplanner.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcehubspot.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceinsightly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceinstagram.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceinstatus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceintercom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceip2whois.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceiterable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcejira.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcek6cloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceklarna.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceklaviyo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcekustomersinger.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcekyve.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcelaunchdarkly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcelemlist.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceleverhiring.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcelinkedinads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcelinkedinpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcelinnworks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcelokalise.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemailchimp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemailgun.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemailjetsms.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemarketo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemetabase.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemicrosoftteams.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemixpanel.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemonday.go
delete mode 100755 internal/sdk/pkg/models/operations/getsourcemongodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemongodbinternalpoc.go
create mode 100644 internal/sdk/pkg/models/operations/getsourcemongodbv2.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemyhours.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcemysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcenetsuite.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcenotion.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcenytimes.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceokta.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceomnisend.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceonesignal.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceorb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceorbit.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceoutbrainamplify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceoutreach.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepaypaltransaction.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepaystack.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcependo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepersistiq.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepexelsapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepinterest.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepipedrive.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepocket.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepokeapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepolygonstockapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceposthog.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepostmarkapp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceprestashop.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepunkapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcepypi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcequalaroo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcequickbooks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcerailz.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcerecharge.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcerecreation.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcerecruitee.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcerecurly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceretently.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcerkicovid.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcerss.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsources3.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesalesforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesalesloft.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesapfieldglass.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesendgrid.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesendinblue.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesenseforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesentry.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesftp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesftpbulk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceshopify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceshortio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceslack.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesmaily.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesmartengage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesmartsheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesnapchatmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesonarcloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcespacexapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesquare.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcestrava.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcestripe.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesurveymonkey.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcesurveysparrow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetempo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetheguardianapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetiktokmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetodoist.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetrello.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetrustpilot.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetvmazeschedule.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetwilio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetwiliotaskrouter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetwitter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcetypeform.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceuscensus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcevantage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcewebflow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcewhiskyhunter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcewikipediapageviews.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcewoocommerce.go
delete mode 100755 internal/sdk/pkg/models/operations/getsourcexero.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcexkcd.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceyandexmetrica.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceyotpo.go
delete mode 100755 internal/sdk/pkg/models/operations/getsourceyounium.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourceyoutubeanalytics.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcezendeskchat.go
create mode 100644 internal/sdk/pkg/models/operations/getsourcezendesksell.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcezendesksunshine.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcezendesksupport.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcezendesktalk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcezenloop.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcezohocrm.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcezoom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getsourcezuora.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getstreamproperties.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/getworkspace.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/initiateoauth.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/listconnections.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/listdestinations.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/listjobs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/listsources.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/listworkspaces.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/patchconnection.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/patchdestination.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/patchsource.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestination.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationawsdatalake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationbigquery.go
delete mode 100755 internal/sdk/pkg/models/operations/putdestinationbigquerydenormalized.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationcumulio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationdatabend.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationdatabricks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationdevnull.go
create mode 100644 internal/sdk/pkg/models/operations/putdestinationduckdb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationdynamodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationelasticsearch.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationfirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationfirestore.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationgcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationgooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationkeen.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationkinesis.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationlangchain.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationmilvus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationmongodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationmssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationmysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationpinecone.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationpostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationpubsub.go
create mode 100644 internal/sdk/pkg/models/operations/putdestinationqdrant.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationredis.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinations3.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinations3glue.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationsftpjson.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationsnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationtimeplus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationtypesense.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationvertica.go
create mode 100644 internal/sdk/pkg/models/operations/putdestinationweaviate.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putdestinationxata.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsource.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceaha.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceaircall.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceairtable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcealloydb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceamazonads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceamazonsellerpartner.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceamazonsqs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceamplitude.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceapifydataset.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceappfollow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceasana.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceauth0.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceawscloudtrail.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceazuretable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcebamboohr.go
delete mode 100755 internal/sdk/pkg/models/operations/putsourcebigcommerce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcebigquery.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcebingads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcebraintree.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcebraze.go
create mode 100644 internal/sdk/pkg/models/operations/putsourcecart.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcechargebee.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcechartmogul.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceclickupapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceclockify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceclosecom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcecoinapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcecoinmarketcap.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceconfigcat.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceconfluence.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcedatascope.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcedelighted.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcedixa.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcedockerhub.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcedremio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcedynamodb.go
delete mode 100755 internal/sdk/pkg/models/operations/putsourcee2etestcloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceemailoctopus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceexchangerates.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcefacebookmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcefacebookpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcefaker.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcefauna.go
create mode 100644 internal/sdk/pkg/models/operations/putsourcefile.go
delete mode 100755 internal/sdk/pkg/models/operations/putsourcefilesecure.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcefirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcefreshcaller.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcefreshdesk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcefreshsales.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegainsightpx.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegetlago.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegithub.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegitlab.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceglassfrog.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegnews.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegoogleads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegoogleanalyticsdataapi.go
delete mode 100755 internal/sdk/pkg/models/operations/putsourcegoogleanalyticsv4.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegoogledirectory.go
create mode 100644 internal/sdk/pkg/models/operations/putsourcegoogledrive.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegooglepagespeedinsights.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegooglesearchconsole.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegooglewebfonts.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegoogleworkspaceadminreports.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegreenhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcegridly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceharvest.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcehubplanner.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcehubspot.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceinsightly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceinstagram.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceinstatus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceintercom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceip2whois.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceiterable.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcejira.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcek6cloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceklarna.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceklaviyo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcekustomersinger.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcekyve.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcelaunchdarkly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcelemlist.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceleverhiring.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcelinkedinads.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcelinkedinpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcelinnworks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcelokalise.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemailchimp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemailgun.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemailjetsms.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemarketo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemetabase.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemicrosoftteams.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemixpanel.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemonday.go
delete mode 100755 internal/sdk/pkg/models/operations/putsourcemongodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemongodbinternalpoc.go
create mode 100644 internal/sdk/pkg/models/operations/putsourcemongodbv2.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemyhours.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcemysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcenetsuite.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcenotion.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcenytimes.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceokta.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceomnisend.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceonesignal.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceorb.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceorbit.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceoutbrainamplify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceoutreach.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepaypaltransaction.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepaystack.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcependo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepersistiq.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepexelsapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepinterest.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepipedrive.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepocket.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepokeapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepolygonstockapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceposthog.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepostmarkapp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceprestashop.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepunkapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcepypi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcequalaroo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcequickbooks.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcerailz.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcerecharge.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcerecreation.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcerecruitee.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcerecurly.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceretently.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcerkicovid.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcerss.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsources3.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesalesforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesalesloft.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesapfieldglass.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesendgrid.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesendinblue.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesenseforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesentry.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesftp.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesftpbulk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceshopify.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceshortio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceslack.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesmaily.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesmartengage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesmartsheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesnapchatmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesonarcloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcespacexapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesquare.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcestrava.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcestripe.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesurveymonkey.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcesurveysparrow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetempo.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetheguardianapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetiktokmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetodoist.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetrello.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetrustpilot.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetvmazeschedule.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetwilio.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetwiliotaskrouter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetwitter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcetypeform.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceuscensus.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcevantage.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcewebflow.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcewhiskyhunter.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcewikipediapageviews.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcewoocommerce.go
delete mode 100755 internal/sdk/pkg/models/operations/putsourcexero.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcexkcd.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceyandexmetrica.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceyotpo.go
delete mode 100755 internal/sdk/pkg/models/operations/putsourceyounium.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourceyoutubeanalytics.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcezendeskchat.go
create mode 100644 internal/sdk/pkg/models/operations/putsourcezendesksell.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcezendesksunshine.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcezendesksupport.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcezendesktalk.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcezenloop.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcezohocrm.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcezoom.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/putsourcezuora.go
mode change 100755 => 100644 internal/sdk/pkg/models/operations/updateworkspace.go
create mode 100644 internal/sdk/pkg/models/sdkerrors/sdkerror.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/actortypeenum.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/connectioncreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/connectionpatchrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/connectionresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/connectionschedule.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/connectionscheduleresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/connectionsresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/connectionstatusenum.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/connectionsyncmodeenum.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationawsdatalake.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationawsdatalakecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationawsdatalakeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationawsdatalakeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationazureblobstoragecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationazureblobstorageputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationbigquery.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationbigquerycreaterequest.go
delete mode 100755 internal/sdk/pkg/models/shared/destinationbigquerydenormalized.go
delete mode 100755 internal/sdk/pkg/models/shared/destinationbigquerydenormalizedcreaterequest.go
delete mode 100755 internal/sdk/pkg/models/shared/destinationbigquerydenormalizedputrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/destinationbigquerydenormalizedupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationbigqueryputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationbigqueryupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationclickhousecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationclickhouseputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationclickhouseupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationconvexcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationconvexputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationconvexupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationcumulio.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationcumuliocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationcumulioputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationcumulioupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdatabend.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdatabendcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdatabendputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdatabendupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdatabricks.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdatabrickscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdatabricksputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdatabricksupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdevnull.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdevnullcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdevnullputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdevnullupdate.go
create mode 100644 internal/sdk/pkg/models/shared/destinationduckdb.go
create mode 100644 internal/sdk/pkg/models/shared/destinationduckdbcreaterequest.go
create mode 100644 internal/sdk/pkg/models/shared/destinationduckdbputrequest.go
create mode 100644 internal/sdk/pkg/models/shared/destinationduckdbupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdynamodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdynamodbcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdynamodbputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationdynamodbupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationelasticsearch.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationelasticsearchcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationelasticsearchputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationelasticsearchupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationfirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationfireboltcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationfireboltputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationfireboltupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationfirestore.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationfirestorecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationfirestoreputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationfirestoreupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationgcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationgcscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationgcsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationgcsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationgooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationgooglesheetscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationgooglesheetsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationgooglesheetsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationkeen.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationkeencreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationkeenputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationkeenupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationkinesis.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationkinesiscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationkinesisputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationkinesisupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationlangchain.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationlangchaincreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationlangchainputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationlangchainupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmilvus.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmilvusputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmilvusupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmongodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmongodbcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmongodbputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmongodbupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmssqlcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmssqlputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmssqlupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmysqlcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmysqlputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationmysqlupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationoraclecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationoracleputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationoracleupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpatchrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpinecone.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpineconecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpineconeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpineconeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpostgrescreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpostgresputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpostgresupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpubsub.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpubsubcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpubsubputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationpubsubupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationputrequest.go
create mode 100644 internal/sdk/pkg/models/shared/destinationqdrant.go
create mode 100644 internal/sdk/pkg/models/shared/destinationqdrantcreaterequest.go
create mode 100644 internal/sdk/pkg/models/shared/destinationqdrantputrequest.go
create mode 100644 internal/sdk/pkg/models/shared/destinationqdrantupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationredis.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationrediscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationredisputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationredisupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationredshiftcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationredshiftputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationredshiftupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinations3.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinations3createrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinations3glue.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinations3gluecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinations3glueputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinations3glueupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinations3putrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinations3update.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationsftpjson.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationsftpjsoncreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationsftpjsonputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationsftpjsonupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationsnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationsnowflakecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationsnowflakeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationsnowflakeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationsresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationtimeplus.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationtimepluscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationtimeplusputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationtimeplusupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationtypesense.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationtypesensecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationtypesenseputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationtypesenseupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationvertica.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationverticacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationverticaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationverticaupdate.go
create mode 100644 internal/sdk/pkg/models/shared/destinationweaviate.go
create mode 100644 internal/sdk/pkg/models/shared/destinationweaviatecreaterequest.go
create mode 100644 internal/sdk/pkg/models/shared/destinationweaviateputrequest.go
create mode 100644 internal/sdk/pkg/models/shared/destinationweaviateupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationxata.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationxatacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationxataputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/destinationxataupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/geographyenum.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/geographyenumnodefault.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/initiateoauthrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/jobcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/jobresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/jobsresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/jobstatusenum.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/jobtypeenum.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/namespacedefinitionenum.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/namespacedefinitionenumnodefault.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/nonbreakingschemaupdatesbehaviorenum.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/nonbreakingschemaupdatesbehaviorenumnodefault.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/oauthcredentialsconfiguration.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/oauthinputconfiguration.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/scheduletypeenum.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/scheduletypewithbasicenum.go
create mode 100644 internal/sdk/pkg/models/shared/schemebasicauth.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/security.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceaha.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceahacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceahaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceahaupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceaircall.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceaircallcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceaircallputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceaircallupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceairtable.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceairtablecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceairtableputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceairtableupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcealloydb.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcealloydbcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcealloydbputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcealloydbupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonads.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonadscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonadsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonadsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonsellerpartner.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonsellerpartnercreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonsellerpartnerputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonsellerpartnerupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonsqs.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonsqscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonsqsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamazonsqsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamplitude.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamplitudecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamplitudeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceamplitudeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceapifydataset.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceapifydatasetcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceapifydatasetputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceapifydatasetupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceappfollow.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceappfollowcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceappfollowputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceappfollowupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceasana.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceasanacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceasanaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceasanaupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceauth0.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceauth0createrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceauth0putrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceauth0update.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceawscloudtrail.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceawscloudtrailcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceawscloudtrailputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceawscloudtrailupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceazureblobstorage.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceazureblobstoragecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceazureblobstorageputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceazureblobstorageupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceazuretable.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceazuretablecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceazuretableputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceazuretableupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebamboohr.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebamboohrcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebamboohrputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebamboohrupdate.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcebigcommerce.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcebigcommercecreaterequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcebigcommerceputrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcebigcommerceupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebigquery.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebigquerycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebigqueryputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebigqueryupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebingads.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebingadscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebingadsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebingadsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebraintree.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebraintreecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebraintreeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebraintreeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebraze.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebrazecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebrazeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcebrazeupdate.go
create mode 100644 internal/sdk/pkg/models/shared/sourcecart.go
create mode 100644 internal/sdk/pkg/models/shared/sourcecartcreaterequest.go
create mode 100644 internal/sdk/pkg/models/shared/sourcecartputrequest.go
create mode 100644 internal/sdk/pkg/models/shared/sourcecartupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcechargebee.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcechargebeecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcechargebeeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcechargebeeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcechartmogul.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcechartmogulcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcechartmogulputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcechartmogulupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclickhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclickhousecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclickhouseputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclickhouseupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclickupapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclickupapicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclickupapiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclickupapiupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclockify.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclockifycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclockifyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclockifyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclosecom.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclosecomcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclosecomputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceclosecomupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecodacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecodaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecodaupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecoinapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecoinapicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecoinapiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecoinapiupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecoinmarketcap.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecoinmarketcapcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecoinmarketcapputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecoinmarketcapupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconfigcat.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconfigcatcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconfigcatputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconfigcatupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconfluence.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconfluencecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconfluenceputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconfluenceupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconvex.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconvexcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconvexputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceconvexupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedatascope.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedatascopecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedatascopeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedatascopeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedelighted.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedelightedcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedelightedputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedelightedupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedixa.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedixacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedixaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedixaupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedockerhub.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedockerhubcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedockerhubputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedockerhubupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedremio.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedremiocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedremioputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedremioupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedynamodb.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedynamodbcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedynamodbputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcedynamodbupdate.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcee2etestcloud.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcee2etestcloudcreaterequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcee2etestcloudputrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcee2etestcloudupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceemailoctopus.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceemailoctopuscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceemailoctopusputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceemailoctopusupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceexchangerates.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceexchangeratescreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceexchangeratesputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceexchangeratesupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefacebookmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefacebookmarketingcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefacebookmarketingputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefacebookmarketingupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefacebookpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefacebookpagescreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefacebookpagesputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefacebookpagesupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefaker.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefakercreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefakerputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefakerupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefauna.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefaunacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefaunaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefaunaupdate.go
create mode 100644 internal/sdk/pkg/models/shared/sourcefile.go
create mode 100644 internal/sdk/pkg/models/shared/sourcefilecreaterequest.go
create mode 100644 internal/sdk/pkg/models/shared/sourcefileputrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcefilesecure.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcefilesecurecreaterequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcefilesecureputrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcefilesecureupdate.go
create mode 100644 internal/sdk/pkg/models/shared/sourcefileupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefirebolt.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefireboltcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefireboltputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefireboltupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshcaller.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshcallercreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshcallerputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshcallerupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshdesk.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshdeskcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshdeskputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshdeskupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshsales.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshsalescreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshsalesputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcefreshsalesupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegainsightpx.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegainsightpxcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegainsightpxputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegainsightpxupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegcs.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegcscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegcsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegcsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegetlago.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegetlagocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegetlagoputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegetlagoupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegithub.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegithubcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegithubputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegithubupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegitlab.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegitlabcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegitlabputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegitlabupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceglassfrog.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceglassfrogcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceglassfrogputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceglassfrogupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegnews.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegnewscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegnewsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegnewsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleads.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleadscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleadsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4createrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4putrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4update.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogledirectory.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogledirectorycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogledirectoryputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go
create mode 100644 internal/sdk/pkg/models/shared/sourcegoogledrive.go
create mode 100644 internal/sdk/pkg/models/shared/sourcegoogledrivecreaterequest.go
create mode 100644 internal/sdk/pkg/models/shared/sourcegoogledriveputrequest.go
create mode 100644 internal/sdk/pkg/models/shared/sourcegoogledriveupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglepagespeedinsights.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglesearchconsole.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglesearchconsolecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglesearchconsoleputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglesearchconsoleupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglesheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglesheetscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglesheetsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglesheetsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglewebfonts.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglewebfontscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglewebfontsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegooglewebfontsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreports.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegreenhouse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegreenhousecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegreenhouseputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegreenhouseupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegridly.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegridlycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegridlyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcegridlyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceharvest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceharvestcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceharvestputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceharvestupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcehubplanner.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcehubplannercreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcehubplannerputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcehubplannerupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcehubspot.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcehubspotcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcehubspotputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcehubspotupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinsightly.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinsightlycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinsightlyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinsightlyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinstagram.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinstagramcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinstagramputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinstagramupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinstatus.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinstatuscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinstatusputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceinstatusupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceintercom.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceintercomcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceintercomputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceintercomupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceip2whois.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceip2whoiscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceip2whoisputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceip2whoisupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceiterable.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceiterablecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceiterableputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceiterableupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcejira.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcejiracreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcejiraputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcejiraupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcek6cloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcek6cloudcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcek6cloudputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcek6cloudupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceklarna.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceklarnacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceklarnaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceklarnaupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceklaviyo.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceklaviyocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceklaviyoputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceklaviyoupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcekustomersinger.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcekustomersingercreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcekustomersingerputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcekustomersingerupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcekyve.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcekyvecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcekyveputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcekyveupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelaunchdarkly.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelaunchdarklycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelaunchdarklyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelaunchdarklyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelemlist.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelemlistcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelemlistputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelemlistupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceleverhiring.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceleverhiringcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceleverhiringputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceleverhiringupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinkedinads.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinkedinadscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinkedinadsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinkedinadsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinkedinpages.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinkedinpagescreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinkedinpagesputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinkedinpagesupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinnworks.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinnworkscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinnworksputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelinnworksupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelokalise.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelokalisecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelokaliseputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcelokaliseupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailchimp.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailchimpcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailchimpputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailchimpupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailgun.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailguncreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailgunputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailgunupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailjetsms.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailjetsmscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailjetsmsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemailjetsmsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemarketo.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemarketocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemarketoputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemarketoupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemetabase.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemetabasecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemetabaseputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemetabaseupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemicrosoftteams.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemicrosoftteamscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemicrosoftteamsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemicrosoftteamsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemixpanel.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemixpanelcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemixpanelputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemixpanelupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemonday.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemondaycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemondayputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemondayupdate.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcemongodb.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcemongodbcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemongodbinternalpoc.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemongodbinternalpoccreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemongodbinternalpocputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemongodbinternalpocupdate.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcemongodbputrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcemongodbupdate.go
create mode 100644 internal/sdk/pkg/models/shared/sourcemongodbv2.go
create mode 100644 internal/sdk/pkg/models/shared/sourcemongodbv2createrequest.go
create mode 100644 internal/sdk/pkg/models/shared/sourcemongodbv2putrequest.go
create mode 100644 internal/sdk/pkg/models/shared/sourcemongodbv2update.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemssql.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemssqlcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemssqlputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemssqlupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemyhours.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemyhourscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemyhoursputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemyhoursupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemysql.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemysqlcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemysqlputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcemysqlupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenetsuite.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenetsuitecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenetsuiteputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenetsuiteupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenotion.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenotioncreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenotionputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenotionupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenytimes.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenytimescreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenytimesputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcenytimesupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceokta.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoktacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoktaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoktaupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceomnisend.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceomnisendcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceomnisendputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceomnisendupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceonesignal.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceonesignalcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceonesignalputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceonesignalupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoracle.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoraclecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoracleputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoracleupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceorb.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceorbcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceorbit.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceorbitcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceorbitputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceorbitupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceorbputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceorbupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoutbrainamplify.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoutbrainamplifycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoutbrainamplifyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoutbrainamplifyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoutreach.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoutreachcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoutreachputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceoutreachupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepatchrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepaypaltransaction.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepaypaltransactioncreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepaypaltransactionputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepaypaltransactionupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepaystack.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepaystackcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepaystackputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepaystackupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcependo.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcependocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcependoputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcependoupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepersistiq.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepersistiqcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepersistiqputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepersistiqupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepexelsapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepexelsapicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepexelsapiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepexelsapiupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepinterest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepinterestcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepinterestputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepinterestupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepipedrive.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepipedrivecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepipedriveputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepipedriveupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepocket.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepocketcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepocketputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepocketupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepokeapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepokeapicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepokeapiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepokeapiupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepolygonstockapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepolygonstockapicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepolygonstockapiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepolygonstockapiupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepostgres.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepostgrescreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepostgresputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepostgresupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceposthog.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceposthogcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceposthogputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceposthogupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepostmarkapp.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepostmarkappcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepostmarkappputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepostmarkappupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceprestashop.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceprestashopcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceprestashopputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceprestashopupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepunkapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepunkapicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepunkapiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepunkapiupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepypi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepypicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepypiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcepypiupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcequalaroo.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcequalaroocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcequalarooputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcequalarooupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcequickbooks.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcequickbookscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcequickbooksputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcequickbooksupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerailz.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerailzcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerailzputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerailzupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecharge.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerechargecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerechargeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerechargeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecreation.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecreationcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecreationputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecreationupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecruitee.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecruiteecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecruiteeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecruiteeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecurly.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecurlycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecurlyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerecurlyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceredshift.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceredshiftcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceredshiftputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceredshiftupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceretently.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceretentlycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceretentlyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceretentlyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerkicovid.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerkicovidcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerkicovidputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerkicovidupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerss.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcersscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerssputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcerssupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sources3.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sources3createrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sources3putrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sources3update.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesalesforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesalesforcecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesalesforceputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesalesforceupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesalesloft.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesalesloftcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesalesloftputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesalesloftupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesapfieldglass.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesapfieldglasscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesapfieldglassputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesapfieldglassupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesecoda.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesecodacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesecodaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesecodaupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesendgrid.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesendgridcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesendgridputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesendgridupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesendinblue.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesendinbluecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesendinblueputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesendinblueupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesenseforce.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesenseforcecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesenseforceputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesenseforceupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesentry.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesentrycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesentryputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesentryupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesftp.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesftpbulk.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesftpbulkcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesftpbulkputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesftpbulkupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesftpcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesftpputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesftpupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceshopify.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceshopifycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceshopifyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceshopifyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceshortio.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceshortiocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceshortioputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceshortioupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceslack.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceslackcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceslackputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceslackupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmaily.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmailycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmailyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmailyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmartengage.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmartengagecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmartengageputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmartengageupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmartsheets.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmartsheetscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmartsheetsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesmartsheetsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesnapchatmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesnapchatmarketingcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesnapchatmarketingputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesnapchatmarketingupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesnowflake.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesnowflakecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesnowflakeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesnowflakeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesonarcloud.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesonarcloudcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesonarcloudputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesonarcloudupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcespacexapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcespacexapicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcespacexapiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcespacexapiupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesquare.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesquarecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesquareputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesquareupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcestrava.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcestravacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcestravaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcestravaupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcestripe.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcestripecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcestripeputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcestripeupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesurveymonkey.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesurveymonkeycreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesurveymonkeyputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesurveymonkeyupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesurveysparrow.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesurveysparrowcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesurveysparrowputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcesurveysparrowupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetempo.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetempocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetempoputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetempoupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetheguardianapi.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetheguardianapicreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetheguardianapiputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetheguardianapiupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetiktokmarketing.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetiktokmarketingcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetiktokmarketingputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetiktokmarketingupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetodoist.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetodoistcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetodoistputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetodoistupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetrello.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetrellocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetrelloputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetrelloupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetrustpilot.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetrustpilotcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetrustpilotputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetrustpilotupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetvmazeschedule.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetvmazeschedulecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetvmazescheduleputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetvmazescheduleupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwilio.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwiliocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwilioputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwiliotaskrouter.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwiliotaskroutercreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwiliotaskrouterputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwiliotaskrouterupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwilioupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwitter.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwittercreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwitterputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetwitterupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetypeform.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetypeformcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetypeformputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcetypeformupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceuscensus.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceuscensuscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceuscensusputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceuscensusupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcevantage.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcevantagecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcevantageputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcevantageupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewebflow.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewebflowcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewebflowputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewebflowupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewhiskyhunter.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewhiskyhuntercreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewhiskyhunterputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewhiskyhunterupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewikipediapageviews.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewikipediapageviewscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewikipediapageviewsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewikipediapageviewsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewoocommerce.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewoocommercecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewoocommerceputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcewoocommerceupdate.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcexero.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcexerocreaterequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcexeroputrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourcexeroupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcexkcd.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcexkcdcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcexkcdputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcexkcdupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyandexmetrica.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyandexmetricacreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyandexmetricaputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyandexmetricaupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyotpo.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyotpocreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyotpoputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyotpoupdate.go
delete mode 100755 internal/sdk/pkg/models/shared/sourceyounium.go
delete mode 100755 internal/sdk/pkg/models/shared/sourceyouniumcreaterequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourceyouniumputrequest.go
delete mode 100755 internal/sdk/pkg/models/shared/sourceyouniumupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyoutubeanalytics.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyoutubeanalyticscreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyoutubeanalyticsputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourceyoutubeanalyticsupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendeskchat.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendeskchatcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendeskchatputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go
create mode 100644 internal/sdk/pkg/models/shared/sourcezendesksell.go
create mode 100644 internal/sdk/pkg/models/shared/sourcezendesksellcreaterequest.go
create mode 100644 internal/sdk/pkg/models/shared/sourcezendesksellputrequest.go
create mode 100644 internal/sdk/pkg/models/shared/sourcezendesksellupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesksunshine.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesksunshinecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesksunshineputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesksupport.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesksupportcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesksupportputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesksupportupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesktalk.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesktalkcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesktalkputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezendesktalkupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezenloop.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezenloopcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezenloopputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezenloopupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezohocrm.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezohocrmcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezohocrmputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezohocrmupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezoom.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezoomcreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezoomputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezoomupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezuora.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezuoracreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezuoraputrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/sourcezuoraupdate.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/streamconfiguration.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/streamconfigurations.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/streamproperties.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/streampropertiesresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/workspacecreaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/workspaceoauthcredentialsrequest.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/workspaceresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/workspacesresponse.go
mode change 100755 => 100644 internal/sdk/pkg/models/shared/workspaceupdaterequest.go
mode change 100755 => 100644 internal/sdk/pkg/types/bigint.go
mode change 100755 => 100644 internal/sdk/pkg/types/date.go
mode change 100755 => 100644 internal/sdk/pkg/types/datetime.go
create mode 100644 internal/sdk/pkg/types/decimal.go
create mode 100644 internal/sdk/pkg/types/pointers.go
mode change 100755 => 100644 internal/sdk/pkg/utils/contenttype.go
mode change 100755 => 100644 internal/sdk/pkg/utils/form.go
mode change 100755 => 100644 internal/sdk/pkg/utils/headers.go
create mode 100644 internal/sdk/pkg/utils/json.go
mode change 100755 => 100644 internal/sdk/pkg/utils/pathparams.go
mode change 100755 => 100644 internal/sdk/pkg/utils/queryparams.go
mode change 100755 => 100644 internal/sdk/pkg/utils/requestbody.go
mode change 100755 => 100644 internal/sdk/pkg/utils/retries.go
mode change 100755 => 100644 internal/sdk/pkg/utils/security.go
mode change 100755 => 100644 internal/sdk/pkg/utils/utils.go
mode change 100755 => 100644 internal/sdk/sdk.go
mode change 100755 => 100644 internal/sdk/sources.go
mode change 100755 => 100644 internal/sdk/streams.go
mode change 100755 => 100644 internal/sdk/workspaces.go
mode change 100755 => 100644 internal/validators/DateValidator.go
mode change 100755 => 100644 internal/validators/ExactlyOneChild.go
mode change 100755 => 100644 internal/validators/JSONParseValidator.go
mode change 100755 => 100644 internal/validators/RFC3339Validator.go
mode change 100755 => 100644 main.go
mode change 100755 => 100644 terraform-registry-manifest.json
mode change 100755 => 100644 tools/tools.go
diff --git a/.gitattributes b/.gitattributes
old mode 100755
new mode 100644
diff --git a/README.md b/README.md
index edacfb928..a5fed2bef 100755
--- a/README.md
+++ b/README.md
@@ -24,7 +24,7 @@ terraform {
required_providers {
airbyte = {
source = "airbytehq/airbyte"
- version = "0.3.4"
+ version = "0.3.5"
}
}
}
@@ -36,12 +36,14 @@ provider "airbyte" {
-## Testing the provider locally
+### Testing the provider locally
Should you want to validate a change locally, the `--debug` flag allows you to execute the provider against a terraform instance locally.
This also allows for debuggers (e.g. delve) to be attached to the provider.
+### Example
+
```sh
go run main.go --debug
# Copy the TF_REATTACH_PROVIDERS env var
@@ -56,6 +58,10 @@ TF_REATTACH_PROVIDERS=... terraform apply
+
+
+
+
Terraform allows you to use local provider builds by setting a `dev_overrides` block in a configuration file called `.terraformrc`. This block overrides all other configured installation methods.
Terraform searches for the `.terraformrc` file in your home directory and applies any configuration settings you set.
diff --git a/USAGE.md b/USAGE.md
old mode 100755
new mode 100644
index d45e8dc2b..2ffc222a8
--- a/USAGE.md
+++ b/USAGE.md
@@ -1,10 +1,4 @@
-## Testing the provider locally
-
-Should you want to validate a change locally, the `--debug` flag allows you to execute the provider against a terraform instance locally.
-
-This also allows for debuggers (e.g. delve) to be attached to the provider.
-
```sh
go run main.go --debug
# Copy the TF_REATTACH_PROVIDERS env var
diff --git a/airbyte.yaml b/airbyte.yaml
index ba26f4a2c..6070730b6 100644
--- a/airbyte.yaml
+++ b/airbyte.yaml
@@ -2484,13 +2484,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#Bigcommerce:
+ /sources#Bigquery:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBigcommerceCreateRequest"
+ $ref: "#/components/schemas/SourceBigqueryCreateRequest"
tags:
- "Sources"
responses:
@@ -2504,14 +2504,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceBigcommerce"
+ operationId: "createSourceBigquery"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Bigcommerce#create
- /sources/{sourceId}#Bigcommerce:
+ x-speakeasy-entity-operation: Source_Bigquery#create
+ /sources/{sourceId}#Bigquery:
get:
tags:
- "Sources"
@@ -2526,10 +2526,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceBigcommerce"
+ operationId: "getSourceBigquery"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Bigcommerce#read
+ x-speakeasy-entity-operation: Source_Bigquery#read
put:
tags:
- "Sources"
@@ -2537,7 +2537,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBigcommercePutRequest"
+ $ref: "#/components/schemas/SourceBigqueryPutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -2545,10 +2545,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceBigcommerce"
+ operationId: "putSourceBigquery"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Bigcommerce#update
+ x-speakeasy-entity-operation: Source_Bigquery#update
delete:
tags:
- "Sources"
@@ -2559,10 +2559,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceBigcommerce"
+ operationId: "deleteSourceBigquery"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Bigcommerce#delete
+ x-speakeasy-entity-operation: Source_Bigquery#delete
parameters:
- name: "sourceId"
schema:
@@ -2570,13 +2570,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#Bigquery:
+ /sources#BingAds:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBigqueryCreateRequest"
+ $ref: "#/components/schemas/SourceBingAdsCreateRequest"
tags:
- "Sources"
responses:
@@ -2590,14 +2590,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceBigquery"
+ operationId: "createSourceBingAds"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Bigquery#create
- /sources/{sourceId}#Bigquery:
+ x-speakeasy-entity-operation: Source_BingAds#create
+ /sources/{sourceId}#BingAds:
get:
tags:
- "Sources"
@@ -2612,10 +2612,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceBigquery"
+ operationId: "getSourceBingAds"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Bigquery#read
+ x-speakeasy-entity-operation: Source_BingAds#read
put:
tags:
- "Sources"
@@ -2623,7 +2623,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBigqueryPutRequest"
+ $ref: "#/components/schemas/SourceBingAdsPutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -2631,10 +2631,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceBigquery"
+ operationId: "putSourceBingAds"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Bigquery#update
+ x-speakeasy-entity-operation: Source_BingAds#update
delete:
tags:
- "Sources"
@@ -2645,10 +2645,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceBigquery"
+ operationId: "deleteSourceBingAds"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Bigquery#delete
+ x-speakeasy-entity-operation: Source_BingAds#delete
parameters:
- name: "sourceId"
schema:
@@ -2656,13 +2656,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#BingAds:
+ /sources#Braintree:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBingAdsCreateRequest"
+ $ref: "#/components/schemas/SourceBraintreeCreateRequest"
tags:
- "Sources"
responses:
@@ -2676,14 +2676,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceBingAds"
+ operationId: "createSourceBraintree"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_BingAds#create
- /sources/{sourceId}#BingAds:
+ x-speakeasy-entity-operation: Source_Braintree#create
+ /sources/{sourceId}#Braintree:
get:
tags:
- "Sources"
@@ -2698,10 +2698,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceBingAds"
+ operationId: "getSourceBraintree"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_BingAds#read
+ x-speakeasy-entity-operation: Source_Braintree#read
put:
tags:
- "Sources"
@@ -2709,7 +2709,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBingAdsPutRequest"
+ $ref: "#/components/schemas/SourceBraintreePutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -2717,10 +2717,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceBingAds"
+ operationId: "putSourceBraintree"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_BingAds#update
+ x-speakeasy-entity-operation: Source_Braintree#update
delete:
tags:
- "Sources"
@@ -2731,10 +2731,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceBingAds"
+ operationId: "deleteSourceBraintree"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_BingAds#delete
+ x-speakeasy-entity-operation: Source_Braintree#delete
parameters:
- name: "sourceId"
schema:
@@ -2742,13 +2742,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#Braintree:
+ /sources#Braze:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBraintreeCreateRequest"
+ $ref: "#/components/schemas/SourceBrazeCreateRequest"
tags:
- "Sources"
responses:
@@ -2762,14 +2762,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceBraintree"
+ operationId: "createSourceBraze"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Braintree#create
- /sources/{sourceId}#Braintree:
+ x-speakeasy-entity-operation: Source_Braze#create
+ /sources/{sourceId}#Braze:
get:
tags:
- "Sources"
@@ -2784,10 +2784,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceBraintree"
+ operationId: "getSourceBraze"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Braintree#read
+ x-speakeasy-entity-operation: Source_Braze#read
put:
tags:
- "Sources"
@@ -2795,7 +2795,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBraintreePutRequest"
+ $ref: "#/components/schemas/SourceBrazePutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -2803,10 +2803,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceBraintree"
+ operationId: "putSourceBraze"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Braintree#update
+ x-speakeasy-entity-operation: Source_Braze#update
delete:
tags:
- "Sources"
@@ -2817,10 +2817,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceBraintree"
+ operationId: "deleteSourceBraze"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Braintree#delete
+ x-speakeasy-entity-operation: Source_Braze#delete
parameters:
- name: "sourceId"
schema:
@@ -2828,13 +2828,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#Braze:
+ /sources#Cart:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBrazeCreateRequest"
+ $ref: "#/components/schemas/SourceCartCreateRequest"
tags:
- "Sources"
responses:
@@ -2848,14 +2848,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceBraze"
+ operationId: "createSourceCart"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Braze#create
- /sources/{sourceId}#Braze:
+ x-speakeasy-entity-operation: Source_Cart#create
+ /sources/{sourceId}#Cart:
get:
tags:
- "Sources"
@@ -2870,10 +2870,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceBraze"
+ operationId: "getSourceCart"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Braze#read
+ x-speakeasy-entity-operation: Source_Cart#read
put:
tags:
- "Sources"
@@ -2881,7 +2881,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceBrazePutRequest"
+ $ref: "#/components/schemas/SourceCartPutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -2889,10 +2889,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceBraze"
+ operationId: "putSourceCart"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Braze#update
+ x-speakeasy-entity-operation: Source_Cart#update
delete:
tags:
- "Sources"
@@ -2903,10 +2903,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceBraze"
+ operationId: "deleteSourceCart"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Braze#delete
+ x-speakeasy-entity-operation: Source_Cart#delete
parameters:
- name: "sourceId"
schema:
@@ -4462,92 +4462,6 @@ paths:
type: "string"
in: "path"
required: true
- /sources#E2eTestCloud:
- post:
- requestBody:
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceE2eTestCloudCreateRequest"
- tags:
- - "Sources"
- responses:
- "200":
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceResponse"
- description: "Successful operation"
- "400":
- description: "Invalid data"
- "403":
- description: "Not allowed"
- operationId: "createSourceE2eTestCloud"
- summary: "Create a source"
- description:
- "Creates a source given a name, workspace id, and a json blob containing\
- \ the configuration for the source."
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_E2eTestCloud#create
- /sources/{sourceId}#E2eTestCloud:
- get:
- tags:
- - "Sources"
- responses:
- "200":
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceResponse"
- description: "Get a Source by the id in the path."
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "getSourceE2eTestCloud"
- summary: "Get Source details"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_E2eTestCloud#read
- put:
- tags:
- - "Sources"
- requestBody:
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceE2eTestCloudPutRequest"
- responses:
- "2XX":
- description: "The resource was updated successfully"
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "putSourceE2eTestCloud"
- summary: "Update a Source fully"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_E2eTestCloud#update
- delete:
- tags:
- - "Sources"
- responses:
- "2XX":
- description: "The resource was deleted successfully"
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "deleteSourceE2eTestCloud"
- summary: "Delete a Source"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_E2eTestCloud#delete
- parameters:
- - name: "sourceId"
- schema:
- format: "UUID"
- type: "string"
- in: "path"
- required: true
/sources#Emailoctopus:
post:
requestBody:
@@ -5064,13 +4978,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#FileSecure:
+ /sources#File:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceFileSecureCreateRequest"
+ $ref: "#/components/schemas/SourceFileCreateRequest"
tags:
- "Sources"
responses:
@@ -5084,14 +4998,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceFileSecure"
+ operationId: "createSourceFile"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_FileSecure#create
- /sources/{sourceId}#FileSecure:
+ x-speakeasy-entity-operation: Source_File#create
+ /sources/{sourceId}#File:
get:
tags:
- "Sources"
@@ -5106,10 +5020,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceFileSecure"
+ operationId: "getSourceFile"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_FileSecure#read
+ x-speakeasy-entity-operation: Source_File#read
put:
tags:
- "Sources"
@@ -5117,7 +5031,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceFileSecurePutRequest"
+ $ref: "#/components/schemas/SourceFilePutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -5125,10 +5039,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceFileSecure"
+ operationId: "putSourceFile"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_FileSecure#update
+ x-speakeasy-entity-operation: Source_File#update
delete:
tags:
- "Sources"
@@ -5139,10 +5053,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceFileSecure"
+ operationId: "deleteSourceFile"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_FileSecure#delete
+ x-speakeasy-entity-operation: Source_File#delete
parameters:
- name: "sourceId"
schema:
@@ -6268,13 +6182,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#GoogleAnalyticsV4:
+ /sources#GoogleDirectory:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceGoogleAnalyticsV4CreateRequest"
+ $ref: "#/components/schemas/SourceGoogleDirectoryCreateRequest"
tags:
- "Sources"
responses:
@@ -6288,14 +6202,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceGoogleAnalyticsV4"
+ operationId: "createSourceGoogleDirectory"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_GoogleAnalyticsV4#create
- /sources/{sourceId}#GoogleAnalyticsV4:
+ x-speakeasy-entity-operation: Source_GoogleDirectory#create
+ /sources/{sourceId}#GoogleDirectory:
get:
tags:
- "Sources"
@@ -6310,10 +6224,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceGoogleAnalyticsV4"
+ operationId: "getSourceGoogleDirectory"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_GoogleAnalyticsV4#read
+ x-speakeasy-entity-operation: Source_GoogleDirectory#read
put:
tags:
- "Sources"
@@ -6321,7 +6235,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceGoogleAnalyticsV4PutRequest"
+ $ref: "#/components/schemas/SourceGoogleDirectoryPutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -6329,10 +6243,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceGoogleAnalyticsV4"
+ operationId: "putSourceGoogleDirectory"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_GoogleAnalyticsV4#update
+ x-speakeasy-entity-operation: Source_GoogleDirectory#update
delete:
tags:
- "Sources"
@@ -6343,10 +6257,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceGoogleAnalyticsV4"
+ operationId: "deleteSourceGoogleDirectory"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_GoogleAnalyticsV4#delete
+ x-speakeasy-entity-operation: Source_GoogleDirectory#delete
parameters:
- name: "sourceId"
schema:
@@ -6354,13 +6268,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#GoogleDirectory:
+ /sources#GoogleDrive:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceGoogleDirectoryCreateRequest"
+ $ref: "#/components/schemas/SourceGoogleDriveCreateRequest"
tags:
- "Sources"
responses:
@@ -6374,14 +6288,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceGoogleDirectory"
+ operationId: "createSourceGoogleDrive"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_GoogleDirectory#create
- /sources/{sourceId}#GoogleDirectory:
+ x-speakeasy-entity-operation: Source_GoogleDrive#create
+ /sources/{sourceId}#GoogleDrive:
get:
tags:
- "Sources"
@@ -6396,10 +6310,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceGoogleDirectory"
+ operationId: "getSourceGoogleDrive"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_GoogleDirectory#read
+ x-speakeasy-entity-operation: Source_GoogleDrive#read
put:
tags:
- "Sources"
@@ -6407,7 +6321,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceGoogleDirectoryPutRequest"
+ $ref: "#/components/schemas/SourceGoogleDrivePutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -6415,10 +6329,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceGoogleDirectory"
+ operationId: "putSourceGoogleDrive"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_GoogleDirectory#update
+ x-speakeasy-entity-operation: Source_GoogleDrive#update
delete:
tags:
- "Sources"
@@ -6429,10 +6343,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceGoogleDirectory"
+ operationId: "deleteSourceGoogleDrive"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_GoogleDirectory#delete
+ x-speakeasy-entity-operation: Source_GoogleDrive#delete
parameters:
- name: "sourceId"
schema:
@@ -9622,92 +9536,6 @@ paths:
type: "string"
in: "path"
required: true
- /sources#Mongodb:
- post:
- requestBody:
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceMongodbCreateRequest"
- tags:
- - "Sources"
- responses:
- "200":
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceResponse"
- description: "Successful operation"
- "400":
- description: "Invalid data"
- "403":
- description: "Not allowed"
- operationId: "createSourceMongodb"
- summary: "Create a source"
- description:
- "Creates a source given a name, workspace id, and a json blob containing\
- \ the configuration for the source."
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Mongodb#create
- /sources/{sourceId}#Mongodb:
- get:
- tags:
- - "Sources"
- responses:
- "200":
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceResponse"
- description: "Get a Source by the id in the path."
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "getSourceMongodb"
- summary: "Get Source details"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Mongodb#read
- put:
- tags:
- - "Sources"
- requestBody:
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceMongodbPutRequest"
- responses:
- "2XX":
- description: "The resource was updated successfully"
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "putSourceMongodb"
- summary: "Update a Source fully"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Mongodb#update
- delete:
- tags:
- - "Sources"
- responses:
- "2XX":
- description: "The resource was deleted successfully"
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "deleteSourceMongodb"
- summary: "Delete a Source"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Mongodb#delete
- parameters:
- - name: "sourceId"
- schema:
- format: "UUID"
- type: "string"
- in: "path"
- required: true
/sources#MongodbInternalPoc:
post:
requestBody:
@@ -9794,6 +9622,92 @@ paths:
type: "string"
in: "path"
required: true
+ /sources#MongodbV2:
+ post:
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/SourceMongodbV2CreateRequest"
+ tags:
+ - "Sources"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/SourceResponse"
+ description: "Successful operation"
+ "400":
+ description: "Invalid data"
+ "403":
+ description: "Not allowed"
+ operationId: "createSourceMongodbV2"
+ summary: "Create a source"
+ description:
+ "Creates a source given a name, workspace id, and a json blob containing\
+ \ the configuration for the source."
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Source_MongodbV2#create
+ /sources/{sourceId}#MongodbV2:
+ get:
+ tags:
+ - "Sources"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/SourceResponse"
+ description: "Get a Source by the id in the path."
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "getSourceMongodbV2"
+ summary: "Get Source details"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Source_MongodbV2#read
+ put:
+ tags:
+ - "Sources"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/SourceMongodbV2PutRequest"
+ responses:
+ "2XX":
+ description: "The resource was updated successfully"
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "putSourceMongodbV2"
+ summary: "Update a Source fully"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Source_MongodbV2#update
+ delete:
+ tags:
+ - "Sources"
+ responses:
+ "2XX":
+ description: "The resource was deleted successfully"
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "deleteSourceMongodbV2"
+ summary: "Delete a Source"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Source_MongodbV2#delete
+ parameters:
+ - name: "sourceId"
+ schema:
+ format: "UUID"
+ type: "string"
+ in: "path"
+ required: true
/sources#Mssql:
post:
requestBody:
@@ -17018,92 +16932,6 @@ paths:
type: "string"
in: "path"
required: true
- /sources#Xero:
- post:
- requestBody:
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceXeroCreateRequest"
- tags:
- - "Sources"
- responses:
- "200":
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceResponse"
- description: "Successful operation"
- "400":
- description: "Invalid data"
- "403":
- description: "Not allowed"
- operationId: "createSourceXero"
- summary: "Create a source"
- description:
- "Creates a source given a name, workspace id, and a json blob containing\
- \ the configuration for the source."
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Xero#create
- /sources/{sourceId}#Xero:
- get:
- tags:
- - "Sources"
- responses:
- "200":
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceResponse"
- description: "Get a Source by the id in the path."
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "getSourceXero"
- summary: "Get Source details"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Xero#read
- put:
- tags:
- - "Sources"
- requestBody:
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/SourceXeroPutRequest"
- responses:
- "2XX":
- description: "The resource was updated successfully"
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "putSourceXero"
- summary: "Update a Source fully"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Xero#update
- delete:
- tags:
- - "Sources"
- responses:
- "2XX":
- description: "The resource was deleted successfully"
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "deleteSourceXero"
- summary: "Delete a Source"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Xero#delete
- parameters:
- - name: "sourceId"
- schema:
- format: "UUID"
- type: "string"
- in: "path"
- required: true
/sources#Xkcd:
post:
requestBody:
@@ -17362,13 +17190,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#Younium:
+ /sources#YoutubeAnalytics:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceYouniumCreateRequest"
+ $ref: "#/components/schemas/SourceYoutubeAnalyticsCreateRequest"
tags:
- "Sources"
responses:
@@ -17382,14 +17210,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceYounium"
+ operationId: "createSourceYoutubeAnalytics"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Younium#create
- /sources/{sourceId}#Younium:
+ x-speakeasy-entity-operation: Source_YoutubeAnalytics#create
+ /sources/{sourceId}#YoutubeAnalytics:
get:
tags:
- "Sources"
@@ -17404,10 +17232,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceYounium"
+ operationId: "getSourceYoutubeAnalytics"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Younium#read
+ x-speakeasy-entity-operation: Source_YoutubeAnalytics#read
put:
tags:
- "Sources"
@@ -17415,7 +17243,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceYouniumPutRequest"
+ $ref: "#/components/schemas/SourceYoutubeAnalyticsPutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -17423,10 +17251,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceYounium"
+ operationId: "putSourceYoutubeAnalytics"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Younium#update
+ x-speakeasy-entity-operation: Source_YoutubeAnalytics#update
delete:
tags:
- "Sources"
@@ -17437,10 +17265,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceYounium"
+ operationId: "deleteSourceYoutubeAnalytics"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_Younium#delete
+ x-speakeasy-entity-operation: Source_YoutubeAnalytics#delete
parameters:
- name: "sourceId"
schema:
@@ -17448,13 +17276,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#YoutubeAnalytics:
+ /sources#ZendeskChat:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceYoutubeAnalyticsCreateRequest"
+ $ref: "#/components/schemas/SourceZendeskChatCreateRequest"
tags:
- "Sources"
responses:
@@ -17468,14 +17296,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceYoutubeAnalytics"
+ operationId: "createSourceZendeskChat"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_YoutubeAnalytics#create
- /sources/{sourceId}#YoutubeAnalytics:
+ x-speakeasy-entity-operation: Source_ZendeskChat#create
+ /sources/{sourceId}#ZendeskChat:
get:
tags:
- "Sources"
@@ -17490,10 +17318,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceYoutubeAnalytics"
+ operationId: "getSourceZendeskChat"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_YoutubeAnalytics#read
+ x-speakeasy-entity-operation: Source_ZendeskChat#read
put:
tags:
- "Sources"
@@ -17501,7 +17329,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceYoutubeAnalyticsPutRequest"
+ $ref: "#/components/schemas/SourceZendeskChatPutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -17509,10 +17337,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceYoutubeAnalytics"
+ operationId: "putSourceZendeskChat"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_YoutubeAnalytics#update
+ x-speakeasy-entity-operation: Source_ZendeskChat#update
delete:
tags:
- "Sources"
@@ -17523,10 +17351,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceYoutubeAnalytics"
+ operationId: "deleteSourceZendeskChat"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_YoutubeAnalytics#delete
+ x-speakeasy-entity-operation: Source_ZendeskChat#delete
parameters:
- name: "sourceId"
schema:
@@ -17534,13 +17362,13 @@ paths:
type: "string"
in: "path"
required: true
- /sources#ZendeskChat:
+ /sources#ZendeskSell:
post:
requestBody:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceZendeskChatCreateRequest"
+ $ref: "#/components/schemas/SourceZendeskSellCreateRequest"
tags:
- "Sources"
responses:
@@ -17554,14 +17382,14 @@ paths:
description: "Invalid data"
"403":
description: "Not allowed"
- operationId: "createSourceZendeskChat"
+ operationId: "createSourceZendeskSell"
summary: "Create a source"
description:
"Creates a source given a name, workspace id, and a json blob containing\
\ the configuration for the source."
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_ZendeskChat#create
- /sources/{sourceId}#ZendeskChat:
+ x-speakeasy-entity-operation: Source_ZendeskSell#create
+ /sources/{sourceId}#ZendeskSell:
get:
tags:
- "Sources"
@@ -17576,10 +17404,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "getSourceZendeskChat"
+ operationId: "getSourceZendeskSell"
summary: "Get Source details"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_ZendeskChat#read
+ x-speakeasy-entity-operation: Source_ZendeskSell#read
put:
tags:
- "Sources"
@@ -17587,7 +17415,7 @@ paths:
content:
application/json:
schema:
- $ref: "#/components/schemas/SourceZendeskChatPutRequest"
+ $ref: "#/components/schemas/SourceZendeskSellPutRequest"
responses:
"2XX":
description: "The resource was updated successfully"
@@ -17595,10 +17423,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "putSourceZendeskChat"
+ operationId: "putSourceZendeskSell"
summary: "Update a Source fully"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_ZendeskChat#update
+ x-speakeasy-entity-operation: Source_ZendeskSell#update
delete:
tags:
- "Sources"
@@ -17609,10 +17437,10 @@ paths:
description: "Not allowed"
"404":
description: "Not found"
- operationId: "deleteSourceZendeskChat"
+ operationId: "deleteSourceZendeskSell"
summary: "Delete a Source"
x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Source_ZendeskChat#delete
+ x-speakeasy-entity-operation: Source_ZendeskSell#delete
parameters:
- name: "sourceId"
schema:
@@ -18480,92 +18308,6 @@ paths:
type: "string"
in: "path"
required: true
- /destinations#BigqueryDenormalized:
- post:
- requestBody:
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/DestinationBigqueryDenormalizedCreateRequest"
- tags:
- - "Destinations"
- responses:
- "200":
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/DestinationResponse"
- description: "Successful operation"
- "400":
- description: "Invalid data"
- "403":
- description: "Not allowed"
- operationId: "createDestinationBigqueryDenormalized"
- summary: "Create a destination"
- description:
- "Creates a destination given a name, workspace id, and a json blob containing\
- \ the configuration for the destination."
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Destination_BigqueryDenormalized#create
- /destinations/{destinationId}#BigqueryDenormalized:
- get:
- tags:
- - "Destinations"
- responses:
- "200":
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/DestinationResponse"
- description: "Get a Destination by the id in the path."
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "getDestinationBigqueryDenormalized"
- summary: "Get Destination details"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Destination_BigqueryDenormalized#read
- put:
- tags:
- - "Destinations"
- requestBody:
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/DestinationBigqueryDenormalizedPutRequest"
- responses:
- "2XX":
- description: "The resource was updated successfully"
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "putDestinationBigqueryDenormalized"
- summary: "Update a Destination fully"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Destination_BigqueryDenormalized#update
- delete:
- tags:
- - "Destinations"
- responses:
- "2XX":
- description: "The resource was deleted successfully"
- "403":
- description: "Not allowed"
- "404":
- description: "Not found"
- operationId: "deleteDestinationBigqueryDenormalized"
- summary: "Delete a Destination"
- x-use-speakeasy-middleware: true
- x-speakeasy-entity-operation: Destination_BigqueryDenormalized#delete
- parameters:
- - name: "destinationId"
- schema:
- format: "UUID"
- type: "string"
- in: "path"
- required: true
/destinations#Clickhouse:
post:
requestBody:
@@ -19082,6 +18824,92 @@ paths:
type: "string"
in: "path"
required: true
+ /destinations#Duckdb:
+ post:
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationDuckdbCreateRequest"
+ tags:
+ - "Destinations"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationResponse"
+ description: "Successful operation"
+ "400":
+ description: "Invalid data"
+ "403":
+ description: "Not allowed"
+ operationId: "createDestinationDuckdb"
+ summary: "Create a destination"
+ description:
+ "Creates a destination given a name, workspace id, and a json blob containing\
+ \ the configuration for the destination."
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Duckdb#create
+ /destinations/{destinationId}#Duckdb:
+ get:
+ tags:
+ - "Destinations"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationResponse"
+ description: "Get a Destination by the id in the path."
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "getDestinationDuckdb"
+ summary: "Get Destination details"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Duckdb#read
+ put:
+ tags:
+ - "Destinations"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationDuckdbPutRequest"
+ responses:
+ "2XX":
+ description: "The resource was updated successfully"
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "putDestinationDuckdb"
+ summary: "Update a Destination fully"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Duckdb#update
+ delete:
+ tags:
+ - "Destinations"
+ responses:
+ "2XX":
+ description: "The resource was deleted successfully"
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "deleteDestinationDuckdb"
+ summary: "Delete a Destination"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Duckdb#delete
+ parameters:
+ - name: "destinationId"
+ schema:
+ format: "UUID"
+ type: "string"
+ in: "path"
+ required: true
/destinations#Dynamodb:
post:
requestBody:
@@ -20544,6 +20372,92 @@ paths:
type: "string"
in: "path"
required: true
+ /destinations#Qdrant:
+ post:
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationQdrantCreateRequest"
+ tags:
+ - "Destinations"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationResponse"
+ description: "Successful operation"
+ "400":
+ description: "Invalid data"
+ "403":
+ description: "Not allowed"
+ operationId: "createDestinationQdrant"
+ summary: "Create a destination"
+ description:
+ "Creates a destination given a name, workspace id, and a json blob containing\
+ \ the configuration for the destination."
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Qdrant#create
+ /destinations/{destinationId}#Qdrant:
+ get:
+ tags:
+ - "Destinations"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationResponse"
+ description: "Get a Destination by the id in the path."
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "getDestinationQdrant"
+ summary: "Get Destination details"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Qdrant#read
+ put:
+ tags:
+ - "Destinations"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationQdrantPutRequest"
+ responses:
+ "2XX":
+ description: "The resource was updated successfully"
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "putDestinationQdrant"
+ summary: "Update a Destination fully"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Qdrant#update
+ delete:
+ tags:
+ - "Destinations"
+ responses:
+ "2XX":
+ description: "The resource was deleted successfully"
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "deleteDestinationQdrant"
+ summary: "Delete a Destination"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Qdrant#delete
+ parameters:
+ - name: "destinationId"
+ schema:
+ format: "UUID"
+ type: "string"
+ in: "path"
+ required: true
/destinations#Redis:
post:
requestBody:
@@ -21318,6 +21232,92 @@ paths:
type: "string"
in: "path"
required: true
+ /destinations#Weaviate:
+ post:
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationWeaviateCreateRequest"
+ tags:
+ - "Destinations"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationResponse"
+ description: "Successful operation"
+ "400":
+ description: "Invalid data"
+ "403":
+ description: "Not allowed"
+ operationId: "createDestinationWeaviate"
+ summary: "Create a destination"
+ description:
+ "Creates a destination given a name, workspace id, and a json blob containing\
+ \ the configuration for the destination."
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Weaviate#create
+ /destinations/{destinationId}#Weaviate:
+ get:
+ tags:
+ - "Destinations"
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationResponse"
+ description: "Get a Destination by the id in the path."
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "getDestinationWeaviate"
+ summary: "Get Destination details"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Weaviate#read
+ put:
+ tags:
+ - "Destinations"
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/DestinationWeaviatePutRequest"
+ responses:
+ "2XX":
+ description: "The resource was updated successfully"
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "putDestinationWeaviate"
+ summary: "Update a Destination fully"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Weaviate#update
+ delete:
+ tags:
+ - "Destinations"
+ responses:
+ "2XX":
+ description: "The resource was deleted successfully"
+ "403":
+ description: "Not allowed"
+ "404":
+ description: "Not found"
+ operationId: "deleteDestinationWeaviate"
+ summary: "Delete a Destination"
+ x-use-speakeasy-middleware: true
+ x-speakeasy-entity-operation: Destination_Weaviate#delete
+ parameters:
+ - name: "destinationId"
+ schema:
+ format: "UUID"
+ type: "string"
+ in: "path"
+ required: true
/destinations#Xata:
post:
requestBody:
@@ -21600,6 +21600,12 @@ components:
type: "object"
properties:
name:
+ description: "Name of the source e.g. dev-mysql-instance."
+ type: "string"
+ definitionId:
+ description: "The UUID of the connector definition. One of configuration.sourceType\
+ \ or definitionId must be provided."
+ format: "uuid"
type: "string"
workspaceId:
format: "uuid"
@@ -21650,10 +21656,13 @@ components:
required:
- "redirectUrl"
- "workspaceId"
- - "name"
type: "object"
properties:
name:
+ description: "The name of the source to authenticate to. Deprecated - use\
+ \ sourceType instead."
+ type: "string"
+ sourceType:
description: "The name of the source to authenticate to"
type: "string"
redirectUrl:
@@ -21901,6 +21910,12 @@ components:
type: "object"
properties:
name:
+ description: "Name of the destination e.g. dev-mysql-instance."
+ type: "string"
+ definitionId:
+ description: "The UUID of the connector definition. One of configuration.destinationType\
+ \ or definitionId must be provided."
+ format: "uuid"
type: "string"
workspaceId:
format: "uuid"
@@ -22149,7 +22164,6 @@ components:
- "source"
- "destination"
source-trello:
- title: "Trello Spec"
type: "object"
required:
- "key"
@@ -22164,6 +22178,7 @@ components:
>docs for instructions on how to generate it."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
token:
type: "string"
title: "API token"
@@ -22171,6 +22186,7 @@ components:
>docs for instructions on how to generate it."
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start Date"
@@ -22188,7 +22204,10 @@ components:
pattern: "^[0-9a-fA-F]{24}$"
title: "Trello Board IDs"
description: "IDs of the boards to replicate data from. If left empty, data\
- \ from all boards to which you have access will be replicated."
+ \ from all boards to which you have access will be replicated. Please\
+ \ note that this is not the 8-character ID in the board's shortLink (URL\
+ \ of the board). Rather, what is required here is the 24-character ID\
+ \ usually returned by the API"
order: 3
sourceType:
title: "trello"
@@ -22198,7 +22217,6 @@ components:
order: 0
type: "string"
source-trello-update:
- title: "Trello Spec"
type: "object"
required:
- "key"
@@ -22236,7 +22254,10 @@ components:
pattern: "^[0-9a-fA-F]{24}$"
title: "Trello Board IDs"
description: "IDs of the boards to replicate data from. If left empty, data\
- \ from all boards to which you have access will be replicated."
+ \ from all boards to which you have access will be replicated. Please\
+ \ note that this is not the 8-character ID in the board's shortLink (URL\
+ \ of the board). Rather, what is required here is the 24-character ID\
+ \ usually returned by the API"
order: 3
source-the-guardian-api:
title: "The Guardian Api Spec"
@@ -22252,6 +22273,7 @@ components:
description: "Your API Key. See here. The key is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start Date"
type: "string"
@@ -22444,6 +22466,7 @@ components:
type: "string"
description: "Refresh Token to renew the expired Access Token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Authenticate with Personal Access Token"
required:
@@ -22462,6 +22485,7 @@ components:
> personal access token."
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "harvest"
const: "harvest"
@@ -22572,10 +22596,12 @@ components:
description: "Access token recieved as a result of API call to https://api.yotpo.com/oauth/token\
\ (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
app_key:
title: "App Key"
type: "string"
description: "App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)"
+ x-speakeasy-param-sensitive: true
start_date:
title: "Date-From Filter"
type: "string"
@@ -22646,6 +22672,7 @@ components:
> the docs for info on how to obtain this."
order: 0
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
url:
type: "string"
title: "Shop URL"
@@ -22721,6 +22748,7 @@ components:
description: "Consumer key associated with your integration"
order: 1
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
consumer_secret:
type: "string"
title: "Consumer Secret"
@@ -22733,12 +22761,14 @@ components:
description: "Access token key"
order: 3
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
token_secret:
type: "string"
title: "Token Secret"
description: "Access token secret"
order: 4
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
object_types:
type: "array"
title: "Object Types"
@@ -22867,6 +22897,7 @@ components:
title: "Access Key"
description: "API access key used to retrieve data from Convex."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "convex"
const: "convex"
@@ -22906,6 +22937,7 @@ components:
description: "Recurly API Key. See the docs for more information on how to generate this key."
order: 1
+ x-speakeasy-param-sensitive: true
begin_time:
type: "string"
description: "ISO8601 timestamp from which the replication from Recurly\
@@ -23010,12 +23042,14 @@ components:
title: "Access Token"
description: "Access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
refresh_token:
type: "string"
title: "Refresh Token"
description: "Refresh Token to obtain new Access Token, when it's\
\ expired."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Access Token"
required:
@@ -23033,6 +23067,7 @@ components:
title: "Access Token"
description: "The Access Token to make authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "zendesk-chat"
const: "zendesk-chat"
@@ -23133,6 +23168,7 @@ components:
type: "string"
description: "The password associated to the username"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start Date"
description: "Start date for collecting time logs"
@@ -23192,6 +23228,43 @@ components:
minimum: 1
maximum: 365
default: 30
+ source-zendesk-sell:
+ title: "Source Zendesk Sell Spec"
+ type: "object"
+ required:
+ - "api_token"
+ - "sourceType"
+ properties:
+ api_token:
+ title: "API token"
+ type: "string"
+ description: "The API token for authenticating to Zendesk Sell"
+ examples:
+ - "f23yhd630otl94y85a8bf384958473pto95847fd006da49382716or937ruw059"
+ airbyte_secret: true
+ order: 1
+ x-speakeasy-param-sensitive: true
+ sourceType:
+ title: "zendesk-sell"
+ const: "zendesk-sell"
+ enum:
+ - "zendesk-sell"
+ order: 0
+ type: "string"
+ source-zendesk-sell-update:
+ title: "Source Zendesk Sell Spec"
+ type: "object"
+ required:
+ - "api_token"
+ properties:
+ api_token:
+ title: "API token"
+ type: "string"
+ description: "The API token for authenticating to Zendesk Sell"
+ examples:
+ - "f23yhd630otl94y85a8bf384958473pto95847fd006da49382716or937ruw059"
+ airbyte_secret: true
+ order: 1
source-klaviyo:
title: "Klaviyo Spec"
type: "object"
@@ -23202,15 +23275,19 @@ components:
>docs if you need help finding this key."
airbyte_secret: true
type: "string"
+ order: 0
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start Date"
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
- \ data before this date will not be replicated."
+ \ data before this date will not be replicated. This field is optional\
+ \ - if not provided, all data will be replicated."
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-25T00:00:00Z"
type: "string"
format: "date-time"
+ order: 1
sourceType:
title: "klaviyo"
const: "klaviyo"
@@ -23220,7 +23297,6 @@ components:
type: "string"
required:
- "api_key"
- - "start_date"
- "sourceType"
source-klaviyo-update:
title: "Klaviyo Spec"
@@ -23232,18 +23308,20 @@ components:
>docs if you need help finding this key."
airbyte_secret: true
type: "string"
+ order: 0
start_date:
title: "Start Date"
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
- \ data before this date will not be replicated."
+ \ data before this date will not be replicated. This field is optional\
+ \ - if not provided, all data will be replicated."
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-25T00:00:00Z"
type: "string"
format: "date-time"
+ order: 1
required:
- "api_key"
- - "start_date"
source-quickbooks:
title: "Source QuickBooks Spec"
type: "object"
@@ -23292,16 +23370,19 @@ components:
title: "Refresh Token"
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
access_token:
description: "Access token fot making authenticated requests."
title: "Access Token"
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
token_expiry_date:
type: "string"
title: "Token Expiry Date"
description: "The date-time when the access token should be refreshed."
format: "date-time"
+ x-speakeasy-param-sensitive: true
realm_id:
description: "Labeled Company ID. The Make API Calls panel is populated\
\ with the realm id and the current access token."
@@ -23470,6 +23551,7 @@ components:
example: "a very long hex sequence"
order: 1
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "webflow"
const: "webflow"
@@ -23690,6 +23772,7 @@ components:
type: "string"
description: "The Token for obtaining a new access token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Service Account Key"
description: "For these scenario user should obtain service account's\
\ credentials from the Google API Console and provide delegated email."
@@ -23810,6 +23893,7 @@ components:
type: "string"
description: "API Key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "smartengage"
const: "smartengage"
@@ -23857,6 +23941,7 @@ components:
type: "string"
description: "Access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
required:
- "type"
- "access_token"
@@ -23877,6 +23962,7 @@ components:
type: "string"
description: "Add Password for authentication."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
required:
- "type"
- "username"
@@ -24015,6 +24101,7 @@ components:
description: "Your API Token. See here. The key is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "k6-cloud"
const: "k6-cloud"
@@ -24094,6 +24181,7 @@ components:
order: 5
group: "auth"
always_show: true
+ x-speakeasy-param-sensitive: true
jdbc_url_params:
description: "Additional properties to pass to the JDBC URL string when\
\ connecting to the database formatted as 'key=value' pairs separated\
@@ -24201,6 +24289,7 @@ components:
multiline: true
order: 3
always_show: true
+ x-speakeasy-param-sensitive: true
client_key_password:
type: "string"
title: "Client key password"
@@ -24208,6 +24297,7 @@ components:
\ password will be generated automatically."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "verify-full"
additionalProperties: true
description: "This is the most secure mode. Always require encryption\
@@ -24245,6 +24335,7 @@ components:
multiline: true
order: 3
always_show: true
+ x-speakeasy-param-sensitive: true
client_key_password:
type: "string"
title: "Client key password"
@@ -24252,6 +24343,7 @@ components:
\ password will be generated automatically."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
default: "require"
replication_method:
type: "object"
@@ -24327,7 +24419,7 @@ components:
lsn_commit_behaviour:
type: "string"
title: "LSN commit behaviour"
- description: "Determines when Airbtye should flush the LSN of processed\
+ description: "Determines when Airbyte should flush the LSN of processed\
\ WAL logs in the source database. `After loading Data in the destination`\
\ is default. If `While reading Data` is selected, in case of a\
\ downstream failure (while loading data into the destination),\
@@ -24427,6 +24519,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -24471,6 +24564,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
group: "security"
sourceType:
title: "postgres"
@@ -24778,7 +24872,7 @@ components:
lsn_commit_behaviour:
type: "string"
title: "LSN commit behaviour"
- description: "Determines when Airbtye should flush the LSN of processed\
+ description: "Determines when Airbyte should flush the LSN of processed\
\ WAL logs in the source database. `After loading Data in the destination`\
\ is default. If `While reading Data` is selected, in case of a\
\ downstream failure (while loading data into the destination),\
@@ -24942,6 +25036,7 @@ components:
description: "Your API Token. See here. The token is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "todoist"
const: "todoist"
@@ -25175,6 +25270,7 @@ components:
airbyte_secret: true
type: "string"
order: 2
+ x-speakeasy-param-sensitive: true
start_date:
title: "Replication Start Date"
description: "UTC date and time in the format 2020-10-01T00:00:00Z. Any\
@@ -25256,6 +25352,7 @@ components:
description: "API Key, use admin to generate this key."
order: 0
+ x-speakeasy-param-sensitive: true
start_time:
title: "Start time"
type: "string"
@@ -25312,6 +25409,7 @@ components:
description: "API Key"
order: 0
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
query:
type: "string"
order: 1
@@ -25776,6 +25874,7 @@ components:
\ href=\"https://docs.airbyte.com/integrations/sources/google-ads#setup-guide\"\
>documentation."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
client_id:
type: "string"
title: "Client ID"
@@ -25801,6 +25900,7 @@ components:
\ instructions on finding this value, refer to our documentation."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
access_token:
type: "string"
title: "Access Token"
@@ -25810,6 +25910,7 @@ components:
https://docs.airbyte.com/integrations/sources/google-ads#setup-guide\"\
>documentation."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
customer_id:
title: "Customer ID(s)"
type: "string"
@@ -26145,6 +26246,7 @@ components:
\ more here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
refresh_token:
title: "Refresh Token"
type: "string"
@@ -26152,6 +26254,7 @@ components:
\ here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Service Account Key Authentication"
required:
@@ -26204,7 +26307,11 @@ components:
type: "string"
dimensions:
title: "Dimensions"
- description: "A list of dimensions (country, date, device, page, query)"
+ description: "A list of available dimensions. Please note, that for\
+ \ technical reasons `date` is the default dimension which will be\
+ \ included in your query whether you specify it or not. Primary\
+ \ key will consist of your custom dimensions and the default dimension\
+ \ along with `site_url` and `search_type`."
type: "array"
items:
title: "ValidEnums"
@@ -26215,7 +26322,9 @@ components:
- "device"
- "page"
- "query"
- minItems: 1
+ default:
+ - "date"
+ minItems: 0
required:
- "name"
- "dimensions"
@@ -26386,7 +26495,11 @@ components:
type: "string"
dimensions:
title: "Dimensions"
- description: "A list of dimensions (country, date, device, page, query)"
+ description: "A list of available dimensions. Please note, that for\
+ \ technical reasons `date` is the default dimension which will be\
+ \ included in your query whether you specify it or not. Primary\
+ \ key will consist of your custom dimensions and the default dimension\
+ \ along with `site_url` and `search_type`."
type: "array"
items:
title: "ValidEnums"
@@ -26397,7 +26510,9 @@ components:
- "device"
- "page"
- "query"
- minItems: 1
+ default:
+ - "date"
+ minItems: 0
required:
- "name"
- "dimensions"
@@ -26563,6 +26678,7 @@ components:
- "fc6243f283e51f6ca989aab298b17da125496f50"
airbyte_secret: true
order: 2
+ x-speakeasy-param-sensitive: true
athlete_id:
type: "integer"
description: "The Athlete ID of your Strava developer application."
@@ -26666,6 +26782,7 @@ components:
title: "API User Password"
description: "API user password. See https://smaily.com/help/api/general/create-api-user/"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "smaily"
const: "smaily"
@@ -26708,6 +26825,7 @@ components:
description: "Kustomer API Token. See the docs on how to obtain this"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start Date"
type: "string"
@@ -26956,6 +27074,7 @@ components:
description: "The Access Token for making authenticated requests."
airbyte_secret: true
order: 3
+ x-speakeasy-param-sensitive: true
- title: "API Password"
description: "API Password Auth"
type: "object"
@@ -26976,6 +27095,7 @@ components:
\ `Shopify` store."
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Replication Start Date"
@@ -27086,6 +27206,7 @@ components:
type: "string"
description: "API Key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "omnisend"
const: "omnisend"
@@ -27104,220 +27225,318 @@ components:
type: "string"
description: "API Key"
airbyte_secret: true
- source-mongodb:
+ source-mongodb-v2:
title: "MongoDb Source Spec"
type: "object"
required:
- - "database"
+ - "database_config"
- "sourceType"
properties:
- instance_type:
+ database_config:
type: "object"
- title: "MongoDb Instance Type"
- description: "The MongoDb instance to connect to. For MongoDB Atlas and\
- \ Replica Set TLS connection is used by default."
- order: 0
+ title: "Cluster Type"
+ description: "Configures the MongoDB cluster type."
+ order: 1
+ group: "connection"
+ display_type: "radio"
oneOf:
- - title: "Standalone MongoDb Instance"
+ - title: "MongoDB Atlas Replica Set"
+ description: "MongoDB Atlas-hosted cluster configured as a replica set"
required:
- - "instance"
- - "host"
- - "port"
+ - "cluster_type"
+ - "connection_string"
+ - "database"
+ - "username"
+ - "password"
+ - "auth_source"
+ additionalProperties: true
properties:
- instance:
+ cluster_type:
type: "string"
- const: "standalone"
+ const: "ATLAS_REPLICA_SET"
+ order: 1
enum:
- - "standalone"
- host:
- title: "Host"
+ - "ATLAS_REPLICA_SET"
+ connection_string:
+ title: "Connection String"
type: "string"
- description: "The host name of the Mongo database."
- order: 0
- port:
- title: "Port"
- type: "integer"
- description: "The port of the Mongo database."
- minimum: 0
- maximum: 65536
- default: 27017
+ description: "The connection string of the cluster that you want to\
+ \ replicate."
examples:
- - "27017"
- order: 1
- - title: "Replica Set"
+ - "mongodb+srv://cluster0.abcd1.mongodb.net/"
+ order: 2
+ database:
+ title: "Database Name"
+ type: "string"
+ description: "The name of the MongoDB database that contains the collection(s)\
+ \ to replicate."
+ order: 3
+ username:
+ title: "Username"
+ type: "string"
+ description: "The username which is used to access the database."
+ order: 4
+ password:
+ title: "Password"
+ type: "string"
+ description: "The password associated with this username."
+ airbyte_secret: true
+ order: 5
+ x-speakeasy-param-sensitive: true
+ auth_source:
+ title: "Authentication Source"
+ type: "string"
+ description: "The authentication source where the user information\
+ \ is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource\
+ \ for more details."
+ default: "admin"
+ examples:
+ - "admin"
+ order: 6
+ - title: "Self-Managed Replica Set"
+ description: "MongoDB self-hosted cluster configured as a replica set"
required:
- - "instance"
- - "server_addresses"
+ - "cluster_type"
+ - "connection_string"
+ - "database"
+ additionalProperties: true
properties:
- instance:
+ cluster_type:
type: "string"
- const: "replica"
+ const: "SELF_MANAGED_REPLICA_SET"
+ order: 1
enum:
- - "replica"
- server_addresses:
- title: "Server Addresses"
+ - "SELF_MANAGED_REPLICA_SET"
+ connection_string:
+ title: "Connection String"
type: "string"
- description: "The members of a replica set. Please specify `host`:`port`\
- \ of each member separated by comma."
+ description: "The connection string of the cluster that you want to\
+ \ replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string\
+ \ for more information."
examples:
- - "host1:27017,host2:27017,host3:27017"
- order: 0
- replica_set:
- title: "Replica Set"
+ - "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017/"
+ - "mongodb://example.host.com:27017/"
+ order: 2
+ database:
+ title: "Database Name"
type: "string"
- description: "A replica set in MongoDB is a group of mongod processes\
- \ that maintain the same data set."
- order: 1
- - title: "MongoDB Atlas"
- additionalProperties: true
- required:
- - "instance"
- - "cluster_url"
- properties:
- instance:
+ description: "The name of the MongoDB database that contains the collection(s)\
+ \ to replicate."
+ order: 3
+ username:
+ title: "Username"
type: "string"
- const: "atlas"
- enum:
- - "atlas"
- cluster_url:
- title: "Cluster URL"
+ description: "The username which is used to access the database."
+ order: 4
+ password:
+ title: "Password"
type: "string"
- description: "The URL of a cluster to connect to."
- order: 0
- database:
- title: "Database Name"
- type: "string"
- description: "The database you want to replicate."
- order: 1
- user:
- title: "User"
- type: "string"
- description: "The username which is used to access the database."
- order: 2
- password:
- title: "Password"
- type: "string"
- description: "The password associated with this username."
- airbyte_secret: true
- order: 3
- auth_source:
- title: "Authentication Source"
- type: "string"
- description: "The authentication source where the user information is stored."
- default: "admin"
- examples:
- - "admin"
- order: 4
+ description: "The password associated with this username."
+ airbyte_secret: true
+ order: 5
+ x-speakeasy-param-sensitive: true
+ auth_source:
+ title: "Authentication Source"
+ type: "string"
+ description: "The authentication source where the user information\
+ \ is stored."
+ default: "admin"
+ examples:
+ - "admin"
+ order: 6
+ initial_waiting_seconds:
+ type: "integer"
+ title: "Initial Waiting Time in Seconds (Advanced)"
+ description: "The amount of time the connector will wait when it launches\
+ \ to determine if there is new data to sync or not. Defaults to 300 seconds.\
+ \ Valid range: 120 seconds to 1200 seconds."
+ default: 300
+ order: 7
+ min: 120
+ max: 1200
+ group: "advanced"
+ queue_size:
+ type: "integer"
+ title: "Size of the queue (Advanced)"
+ description: "The size of the internal queue. This may interfere with memory\
+ \ consumption and efficiency of the connector, please be careful."
+ default: 10000
+ order: 8
+ min: 1000
+ max: 10000
+ group: "advanced"
+ discover_sample_size:
+ type: "integer"
+ title: "Document discovery sample size (Advanced)"
+ description: "The maximum number of documents to sample when attempting\
+ \ to discover the unique fields for a collection."
+ default: 10000
+ order: 9
+ minimum: 1000
+ maximum: 100000
+ group: "advanced"
sourceType:
- title: "mongodb"
- const: "mongodb"
+ title: "mongodb-v2"
+ const: "mongodb-v2"
enum:
- - "mongodb"
+ - "mongodb-v2"
order: 0
type: "string"
- source-mongodb-update:
+ groups:
+ - id: "connection"
+ - id: "advanced"
+ title: "Advanced"
+ source-mongodb-v2-update:
title: "MongoDb Source Spec"
type: "object"
required:
- - "database"
+ - "database_config"
properties:
- instance_type:
+ database_config:
type: "object"
- title: "MongoDb Instance Type"
- description: "The MongoDb instance to connect to. For MongoDB Atlas and\
- \ Replica Set TLS connection is used by default."
- order: 0
+ title: "Cluster Type"
+ description: "Configures the MongoDB cluster type."
+ order: 1
+ group: "connection"
+ display_type: "radio"
oneOf:
- - title: "Standalone MongoDb Instance"
+ - title: "MongoDB Atlas Replica Set"
+ description: "MongoDB Atlas-hosted cluster configured as a replica set"
required:
- - "instance"
- - "host"
- - "port"
+ - "cluster_type"
+ - "connection_string"
+ - "database"
+ - "username"
+ - "password"
+ - "auth_source"
+ additionalProperties: true
properties:
- instance:
+ cluster_type:
type: "string"
- const: "standalone"
+ const: "ATLAS_REPLICA_SET"
+ order: 1
enum:
- - "standalone"
- host:
- title: "Host"
+ - "ATLAS_REPLICA_SET"
+ connection_string:
+ title: "Connection String"
type: "string"
- description: "The host name of the Mongo database."
- order: 0
- port:
- title: "Port"
- type: "integer"
- description: "The port of the Mongo database."
- minimum: 0
- maximum: 65536
- default: 27017
+ description: "The connection string of the cluster that you want to\
+ \ replicate."
examples:
- - "27017"
- order: 1
- - title: "Replica Set"
+ - "mongodb+srv://cluster0.abcd1.mongodb.net/"
+ order: 2
+ database:
+ title: "Database Name"
+ type: "string"
+ description: "The name of the MongoDB database that contains the collection(s)\
+ \ to replicate."
+ order: 3
+ username:
+ title: "Username"
+ type: "string"
+ description: "The username which is used to access the database."
+ order: 4
+ password:
+ title: "Password"
+ type: "string"
+ description: "The password associated with this username."
+ airbyte_secret: true
+ order: 5
+ auth_source:
+ title: "Authentication Source"
+ type: "string"
+ description: "The authentication source where the user information\
+ \ is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource\
+ \ for more details."
+ default: "admin"
+ examples:
+ - "admin"
+ order: 6
+ - title: "Self-Managed Replica Set"
+ description: "MongoDB self-hosted cluster configured as a replica set"
required:
- - "instance"
- - "server_addresses"
+ - "cluster_type"
+ - "connection_string"
+ - "database"
+ additionalProperties: true
properties:
- instance:
+ cluster_type:
type: "string"
- const: "replica"
+ const: "SELF_MANAGED_REPLICA_SET"
+ order: 1
enum:
- - "replica"
- server_addresses:
- title: "Server Addresses"
+ - "SELF_MANAGED_REPLICA_SET"
+ connection_string:
+ title: "Connection String"
type: "string"
- description: "The members of a replica set. Please specify `host`:`port`\
- \ of each member separated by comma."
+ description: "The connection string of the cluster that you want to\
+ \ replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string\
+ \ for more information."
examples:
- - "host1:27017,host2:27017,host3:27017"
- order: 0
- replica_set:
- title: "Replica Set"
+ - "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017/"
+ - "mongodb://example.host.com:27017/"
+ order: 2
+ database:
+ title: "Database Name"
type: "string"
- description: "A replica set in MongoDB is a group of mongod processes\
- \ that maintain the same data set."
- order: 1
- - title: "MongoDB Atlas"
- additionalProperties: true
- required:
- - "instance"
- - "cluster_url"
- properties:
- instance:
+ description: "The name of the MongoDB database that contains the collection(s)\
+ \ to replicate."
+ order: 3
+ username:
+ title: "Username"
type: "string"
- const: "atlas"
- enum:
- - "atlas"
- cluster_url:
- title: "Cluster URL"
+ description: "The username which is used to access the database."
+ order: 4
+ password:
+ title: "Password"
type: "string"
- description: "The URL of a cluster to connect to."
- order: 0
- database:
- title: "Database Name"
- type: "string"
- description: "The database you want to replicate."
- order: 1
- user:
- title: "User"
- type: "string"
- description: "The username which is used to access the database."
- order: 2
- password:
- title: "Password"
- type: "string"
- description: "The password associated with this username."
- airbyte_secret: true
- order: 3
- auth_source:
- title: "Authentication Source"
- type: "string"
- description: "The authentication source where the user information is stored."
- default: "admin"
- examples:
- - "admin"
- order: 4
+ description: "The password associated with this username."
+ airbyte_secret: true
+ order: 5
+ auth_source:
+ title: "Authentication Source"
+ type: "string"
+ description: "The authentication source where the user information\
+ \ is stored."
+ default: "admin"
+ examples:
+ - "admin"
+ order: 6
+ initial_waiting_seconds:
+ type: "integer"
+ title: "Initial Waiting Time in Seconds (Advanced)"
+ description: "The amount of time the connector will wait when it launches\
+ \ to determine if there is new data to sync or not. Defaults to 300 seconds.\
+ \ Valid range: 120 seconds to 1200 seconds."
+ default: 300
+ order: 7
+ min: 120
+ max: 1200
+ group: "advanced"
+ queue_size:
+ type: "integer"
+ title: "Size of the queue (Advanced)"
+ description: "The size of the internal queue. This may interfere with memory\
+ \ consumption and efficiency of the connector, please be careful."
+ default: 10000
+ order: 8
+ min: 1000
+ max: 10000
+ group: "advanced"
+ discover_sample_size:
+ type: "integer"
+ title: "Document discovery sample size (Advanced)"
+ description: "The maximum number of documents to sample when attempting\
+ \ to discover the unique fields for a collection."
+ default: 10000
+ order: 9
+ minimum: 1000
+ maximum: 100000
+ group: "advanced"
+ groups:
+ - id: "connection"
+ - id: "advanced"
+ title: "Advanced"
source-retently:
title: "Retently Api Spec"
type: "object"
@@ -27356,6 +27575,7 @@ components:
description: "Retently Refresh Token which can be used to fetch new\
\ Bearer Tokens when the current one expires."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Authenticate with API Token"
required:
@@ -27374,6 +27594,7 @@ components:
>docs for more information on how to obtain this key."
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "retently"
const: "retently"
@@ -27450,6 +27671,7 @@ components:
description: "Bearer token"
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
sourceType:
title: "coda"
const: "coda"
@@ -27479,6 +27701,7 @@ components:
type: "string"
title: "API Key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "pendo"
const: "pendo"
@@ -27503,7 +27726,8 @@ components:
title: "Start Date"
description: "The date from which you'd like to replicate data for User\
\ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\
- \ this date will be replicated."
+ \ this date will be replicated. If left blank, the start date will be\
+ \ set to 2 years before the present date."
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-25T00:00:00Z"
@@ -27517,6 +27741,7 @@ components:
>docs for more information"
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
client_id:
title: "Client Id"
description: "The Client ID for your Oauth application"
@@ -27537,7 +27762,6 @@ components:
order: 0
type: "string"
required:
- - "start_date"
- "access_token"
- "sourceType"
source-instagram-update:
@@ -27548,7 +27772,8 @@ components:
title: "Start Date"
description: "The date from which you'd like to replicate data for User\
\ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\
- \ this date will be replicated."
+ \ this date will be replicated. If left blank, the start date will be\
+ \ set to 2 years before the present date."
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-25T00:00:00Z"
@@ -27575,7 +27800,6 @@ components:
airbyte_hidden: true
type: "string"
required:
- - "start_date"
- "access_token"
source-s3:
title: "Config"
@@ -27612,16 +27836,15 @@ components:
title: "Name"
description: "The name of the stream."
type: "string"
- file_type:
- title: "File Type"
- description: "The data file type that is being extracted for a stream."
- type: "string"
globs:
title: "Globs"
description: "The pattern used to specify which files should be selected\
\ from the file system. For more information on glob pattern matching\
\ look here."
+ default:
+ - "**"
+ order: 1
type: "array"
items:
type: "string"
@@ -27651,6 +27874,7 @@ components:
description: "The column or columns (for a composite key) that serves\
\ as the unique identifier of a record."
type: "string"
+ x-speakeasy-param-sensitive: true
days_to_sync_if_history_is_full:
title: "Days To Sync If History Is Full"
description: "When the state history of the file store is full, syncs\
@@ -27682,6 +27906,8 @@ components:
\ handling floating point numbers."
default: false
type: "boolean"
+ required:
+ - "filetype"
- title: "CSV Format"
type: "object"
properties:
@@ -27778,6 +28004,8 @@ components:
type: "string"
enum:
- "From CSV"
+ required:
+ - "header_definition_type"
- title: "Autogenerated"
type: "object"
properties:
@@ -27788,6 +28016,8 @@ components:
type: "string"
enum:
- "Autogenerated"
+ required:
+ - "header_definition_type"
- title: "User Provided"
type: "object"
properties:
@@ -27807,6 +28037,7 @@ components:
type: "string"
required:
- "column_names"
+ - "header_definition_type"
type: "object"
true_values:
title: "True Values"
@@ -27847,6 +28078,8 @@ components:
enum:
- "None"
- "Primitive Types Only"
+ required:
+ - "filetype"
- title: "Jsonl Format"
type: "object"
properties:
@@ -27857,6 +28090,8 @@ components:
type: "string"
enum:
- "jsonl"
+ required:
+ - "filetype"
- title: "Parquet Format"
type: "object"
properties:
@@ -27874,6 +28109,31 @@ components:
\ so this is not recommended."
default: false
type: "boolean"
+ required:
+ - "filetype"
+ - title: "Document File Type Format (Experimental)"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "unstructured"
+ const: "unstructured"
+ type: "string"
+ enum:
+ - "unstructured"
+ skip_unprocessable_file_types:
+ title: "Skip Unprocessable File Types"
+ description: "If true, skip files that cannot be parsed because\
+ \ of their file type and log a warning. If false, fail the\
+ \ sync. Corrupted files with valid file types will still result\
+ \ in a failed sync."
+ default: true
+ always_show: true
+ type: "boolean"
+ description: "Extract text from document formats (.pdf, .docx, .md,\
+ \ .pptx) and emit as one record per file."
+ required:
+ - "filetype"
schemaless:
title: "Schemaless"
description: "When enabled, syncs will not validate or structure records\
@@ -27882,7 +28142,7 @@ components:
type: "boolean"
required:
- "name"
- - "file_type"
+ - "format"
bucket:
title: "Bucket"
description: "Name of the S3 bucket where the file(s) exist."
@@ -27896,6 +28156,7 @@ components:
airbyte_secret: true
order: 2
type: "string"
+ x-speakeasy-param-sensitive: true
aws_secret_access_key:
title: "AWS Secret Access Key"
description: "In order to access private Buckets stored on AWS S3, this\
@@ -27904,12 +28165,18 @@ components:
airbyte_secret: true
order: 3
type: "string"
+ x-speakeasy-param-sensitive: true
endpoint:
title: "Endpoint"
- description: "Endpoint to an S3 compatible service. Leave empty to use AWS."
+ description: "Endpoint to an S3 compatible service. Leave empty to use AWS.\
+ \ The custom endpoint must be secure, but the 'https' prefix is not required."
default: ""
+ examples:
+ - "my-s3-endpoint.com"
+ - "https://my-s3-endpoint.com"
order: 4
type: "string"
+ pattern: "^(?!http://).*$"
dataset:
title: "Output Stream Name"
description: "Deprecated and will be removed soon. Please do not use this\
@@ -28172,16 +28439,20 @@ components:
\ connector requires credentials with the proper permissions. If accessing\
\ publicly available data, this field is not necessary."
airbyte_secret: true
+ always_show: true
order: 1
type: "string"
+ x-speakeasy-param-sensitive: true
aws_secret_access_key:
title: "AWS Secret Access Key"
description: "In order to access private Buckets stored on AWS S3, this\
\ connector requires credentials with the proper permissions. If accessing\
\ publicly available data, this field is not necessary."
airbyte_secret: true
+ always_show: true
order: 2
type: "string"
+ x-speakeasy-param-sensitive: true
path_prefix:
title: "Path Prefix"
description: "By providing a path-like prefix (e.g. myFolder/thisTable/)\
@@ -28261,16 +28532,15 @@ components:
title: "Name"
description: "The name of the stream."
type: "string"
- file_type:
- title: "File Type"
- description: "The data file type that is being extracted for a stream."
- type: "string"
globs:
title: "Globs"
description: "The pattern used to specify which files should be selected\
\ from the file system. For more information on glob pattern matching\
\ look here."
+ default:
+ - "**"
+ order: 1
type: "array"
items:
type: "string"
@@ -28331,6 +28601,8 @@ components:
\ handling floating point numbers."
default: false
type: "boolean"
+ required:
+ - "filetype"
- title: "CSV Format"
type: "object"
properties:
@@ -28427,6 +28699,8 @@ components:
type: "string"
enum:
- "From CSV"
+ required:
+ - "header_definition_type"
- title: "Autogenerated"
type: "object"
properties:
@@ -28437,6 +28711,8 @@ components:
type: "string"
enum:
- "Autogenerated"
+ required:
+ - "header_definition_type"
- title: "User Provided"
type: "object"
properties:
@@ -28456,6 +28732,7 @@ components:
type: "string"
required:
- "column_names"
+ - "header_definition_type"
type: "object"
true_values:
title: "True Values"
@@ -28496,6 +28773,8 @@ components:
enum:
- "None"
- "Primitive Types Only"
+ required:
+ - "filetype"
- title: "Jsonl Format"
type: "object"
properties:
@@ -28506,6 +28785,8 @@ components:
type: "string"
enum:
- "jsonl"
+ required:
+ - "filetype"
- title: "Parquet Format"
type: "object"
properties:
@@ -28523,6 +28804,31 @@ components:
\ so this is not recommended."
default: false
type: "boolean"
+ required:
+ - "filetype"
+ - title: "Document File Type Format (Experimental)"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "unstructured"
+ const: "unstructured"
+ type: "string"
+ enum:
+ - "unstructured"
+ skip_unprocessable_file_types:
+ title: "Skip Unprocessable File Types"
+ description: "If true, skip files that cannot be parsed because\
+ \ of their file type and log a warning. If false, fail the\
+ \ sync. Corrupted files with valid file types will still result\
+ \ in a failed sync."
+ default: true
+ always_show: true
+ type: "boolean"
+ description: "Extract text from document formats (.pdf, .docx, .md,\
+ \ .pptx) and emit as one record per file."
+ required:
+ - "filetype"
schemaless:
title: "Schemaless"
description: "When enabled, syncs will not validate or structure records\
@@ -28531,7 +28837,7 @@ components:
type: "boolean"
required:
- "name"
- - "file_type"
+ - "format"
bucket:
title: "Bucket"
description: "Name of the S3 bucket where the file(s) exist."
@@ -28555,10 +28861,15 @@ components:
type: "string"
endpoint:
title: "Endpoint"
- description: "Endpoint to an S3 compatible service. Leave empty to use AWS."
+ description: "Endpoint to an S3 compatible service. Leave empty to use AWS.\
+ \ The custom endpoint must be secure, but the 'https' prefix is not required."
default: ""
+ examples:
+ - "my-s3-endpoint.com"
+ - "https://my-s3-endpoint.com"
order: 4
type: "string"
+ pattern: "^(?!http://).*$"
dataset:
title: "Output Stream Name"
description: "Deprecated and will be removed soon. Please do not use this\
@@ -28821,6 +29132,7 @@ components:
\ connector requires credentials with the proper permissions. If accessing\
\ publicly available data, this field is not necessary."
airbyte_secret: true
+ always_show: true
order: 1
type: "string"
aws_secret_access_key:
@@ -28829,6 +29141,7 @@ components:
\ connector requires credentials with the proper permissions. If accessing\
\ publicly available data, this field is not necessary."
airbyte_secret: true
+ always_show: true
order: 2
type: "string"
path_prefix:
@@ -28868,217 +29181,848 @@ components:
- "streams"
- "bucket"
source-azure-blob-storage:
- title: "AzureBlobStorage Source Spec"
+ title: "Config"
+ description: "NOTE: When this Spec is changed, legacy_config_transformer.py\
+ \ must also be modified to uptake the changes\nbecause it is responsible for\
+ \ converting legacy Azure Blob Storage v0 configs into v1 configs using the\
+ \ File-Based CDK."
type: "object"
- required:
- - "azure_blob_storage_account_name"
- - "azure_blob_storage_account_key"
- - "azure_blob_storage_container_name"
- - "format"
- - "sourceType"
properties:
- azure_blob_storage_endpoint:
- title: "Endpoint Domain Name"
- type: "string"
- default: "blob.core.windows.net"
- description: "This is Azure Blob Storage endpoint domain name. Leave default\
- \ value (or leave it empty if run container from command line) to use\
- \ Microsoft native from example."
+ start_date:
+ title: "Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\
+ \ Any file modified before this date will not be replicated."
examples:
- - "blob.core.windows.net"
- azure_blob_storage_container_name:
- title: "Azure blob storage container (Bucket) Name"
+ - "2021-01-01T00:00:00.000000Z"
+ format: "date-time"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ"
+ order: 1
type: "string"
- description: "The name of the Azure blob storage container."
- examples:
- - "airbytetescontainername"
+ streams:
+ title: "The list of streams to sync"
+ description: "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\
+ \ format, and how they should be parsed and validated. When sending data\
+ \ to warehouse destination such as Snowflake or BigQuery, each stream\
+ \ is a separate table."
+ order: 10
+ type: "array"
+ items:
+ title: "FileBasedStreamConfig"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name of the stream."
+ type: "string"
+ globs:
+ title: "Globs"
+ description: "The pattern used to specify which files should be selected\
+ \ from the file system. For more information on glob pattern matching\
+ \ look here."
+ default:
+ - "**"
+ order: 1
+ type: "array"
+ items:
+ type: "string"
+ legacy_prefix:
+ title: "Legacy Prefix"
+ description: "The path prefix configured in v3 versions of the S3\
+ \ connector. This option is deprecated in favor of a single glob."
+ airbyte_hidden: true
+ type: "string"
+ validation_policy:
+ title: "Validation Policy"
+ description: "The name of the validation policy that dictates sync\
+ \ behavior when a record does not adhere to the stream schema."
+ default: "Emit Record"
+ enum:
+ - "Emit Record"
+ - "Skip Record"
+ - "Wait for Discover"
+ input_schema:
+ title: "Input Schema"
+ description: "The schema that will be used to validate records extracted\
+ \ from the file. This will override the stream schema that is auto-detected\
+ \ from incoming files."
+ type: "string"
+ primary_key:
+ title: "Primary Key"
+ description: "The column or columns (for a composite key) that serves\
+ \ as the unique identifier of a record."
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ days_to_sync_if_history_is_full:
+ title: "Days To Sync If History Is Full"
+ description: "When the state history of the file store is full, syncs\
+ \ will only read files that were last modified in the provided day\
+ \ range."
+ default: 3
+ type: "integer"
+ format:
+ title: "Format"
+ description: "The configuration options that are used to alter how\
+ \ to read incoming files that deviate from the standard formatting."
+ type: "object"
+ oneOf:
+ - title: "Avro Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "avro"
+ const: "avro"
+ type: "string"
+ enum:
+ - "avro"
+ double_as_string:
+ title: "Convert Double Fields to Strings"
+ description: "Whether to convert double fields to strings. This\
+ \ is recommended if you have decimal numbers with a high degree\
+ \ of precision because there can be a loss precision when\
+ \ handling floating point numbers."
+ default: false
+ type: "boolean"
+ required:
+ - "filetype"
+ - title: "CSV Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "csv"
+ const: "csv"
+ type: "string"
+ enum:
+ - "csv"
+ delimiter:
+ title: "Delimiter"
+ description: "The character delimiting individual cells in the\
+ \ CSV data. This may only be a 1-character string. For tab-delimited\
+ \ data enter '\\t'."
+ default: ","
+ type: "string"
+ quote_char:
+ title: "Quote Character"
+ description: "The character used for quoting CSV values. To\
+ \ disallow quoting, make this field blank."
+ default: "\""
+ type: "string"
+ escape_char:
+ title: "Escape Character"
+ description: "The character used for escaping special characters.\
+ \ To disallow escaping, leave this field blank."
+ type: "string"
+ encoding:
+ title: "Encoding"
+ description: "The character encoding of the CSV data. Leave\
+ \ blank to default to UTF8. See list of python encodings for allowable\
+ \ options."
+ default: "utf8"
+ type: "string"
+ double_quote:
+ title: "Double Quote"
+ description: "Whether two quotes in a quoted CSV value denote\
+ \ a single quote in the data."
+ default: true
+ type: "boolean"
+ null_values:
+ title: "Null Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as null values. For example, if the value 'NA'\
+ \ should be interpreted as null, enter 'NA' in this field."
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ strings_can_be_null:
+ title: "Strings Can Be Null"
+ description: "Whether strings can be interpreted as null values.\
+ \ If true, strings that match the null_values set will be\
+ \ interpreted as null. If false, strings that match the null_values\
+ \ set will be interpreted as the string itself."
+ default: true
+ type: "boolean"
+ skip_rows_before_header:
+ title: "Skip Rows Before Header"
+ description: "The number of rows to skip before the header row.\
+ \ For example, if the header row is on the 3rd row, enter\
+ \ 2 in this field."
+ default: 0
+ type: "integer"
+ skip_rows_after_header:
+ title: "Skip Rows After Header"
+ description: "The number of rows to skip after the header row."
+ default: 0
+ type: "integer"
+ header_definition:
+ title: "CSV Header Definition"
+ description: "How headers will be defined. `User Provided` assumes\
+ \ the CSV does not have a header row and uses the headers\
+ \ provided and `Autogenerated` assumes the CSV does not have\
+ \ a header row and the CDK will generate headers using for\
+ \ `f{i}` where `i` is the index starting from 0. Else, the\
+ \ default behavior is to use the header from the CSV file.\
+ \ If a user wants to autogenerate or provide column names\
+ \ for a CSV having headers, they can skip rows."
+ default:
+ header_definition_type: "From CSV"
+ oneOf:
+ - title: "From CSV"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "From CSV"
+ const: "From CSV"
+ type: "string"
+ enum:
+ - "From CSV"
+ required:
+ - "header_definition_type"
+ - title: "Autogenerated"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "Autogenerated"
+ const: "Autogenerated"
+ type: "string"
+ enum:
+ - "Autogenerated"
+ required:
+ - "header_definition_type"
+ - title: "User Provided"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "User Provided"
+ const: "User Provided"
+ type: "string"
+ enum:
+ - "User Provided"
+ column_names:
+ title: "Column Names"
+ description: "The column names that will be used while\
+ \ emitting the CSV records"
+ type: "array"
+ items:
+ type: "string"
+ required:
+ - "column_names"
+ - "header_definition_type"
+ type: "object"
+ true_values:
+ title: "True Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as true values."
+ default:
+ - "y"
+ - "yes"
+ - "t"
+ - "true"
+ - "on"
+ - "1"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ false_values:
+ title: "False Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as false values."
+ default:
+ - "n"
+ - "no"
+ - "f"
+ - "false"
+ - "off"
+ - "0"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ inference_type:
+ title: "Inference Type"
+ description: "How to infer the types of the columns. If none,\
+ \ inference default to strings."
+ default: "None"
+ airbyte_hidden: true
+ enum:
+ - "None"
+ - "Primitive Types Only"
+ required:
+ - "filetype"
+ - title: "Jsonl Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "jsonl"
+ const: "jsonl"
+ type: "string"
+ enum:
+ - "jsonl"
+ required:
+ - "filetype"
+ - title: "Parquet Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "parquet"
+ const: "parquet"
+ type: "string"
+ enum:
+ - "parquet"
+ decimal_as_float:
+ title: "Convert Decimal Fields to Floats"
+ description: "Whether to convert decimal fields to floats. There\
+ \ is a loss of precision when converting decimals to floats,\
+ \ so this is not recommended."
+ default: false
+ type: "boolean"
+ required:
+ - "filetype"
+ - title: "Document File Type Format (Experimental)"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "unstructured"
+ const: "unstructured"
+ type: "string"
+ enum:
+ - "unstructured"
+ skip_unprocessable_file_types:
+ title: "Skip Unprocessable File Types"
+ description: "If true, skip files that cannot be parsed because\
+ \ of their file type and log a warning. If false, fail the\
+ \ sync. Corrupted files with valid file types will still result\
+ \ in a failed sync."
+ default: true
+ always_show: true
+ type: "boolean"
+ description: "Extract text from document formats (.pdf, .docx, .md,\
+ \ .pptx) and emit as one record per file."
+ required:
+ - "filetype"
+ schemaless:
+ title: "Schemaless"
+ description: "When enabled, syncs will not validate or structure records\
+ \ against the stream's schema."
+ default: false
+ type: "boolean"
+ required:
+ - "name"
+ - "format"
azure_blob_storage_account_name:
title: "Azure Blob Storage account name"
- type: "string"
description: "The account's name of the Azure Blob Storage."
examples:
- "airbyte5storage"
+ order: 2
+ type: "string"
azure_blob_storage_account_key:
title: "Azure Blob Storage account key"
description: "The Azure blob storage account key."
airbyte_secret: true
- type: "string"
examples:
- "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="
- azure_blob_storage_blobs_prefix:
- title: "Azure Blob Storage blobs prefix"
- description: "The Azure blob storage prefix to be applied"
+ order: 3
type: "string"
+ x-speakeasy-param-sensitive: true
+ azure_blob_storage_container_name:
+ title: "Azure blob storage container (Bucket) Name"
+ description: "The name of the Azure blob storage container."
examples:
- - "FolderA/FolderB/"
- azure_blob_storage_schema_inference_limit:
- title: "Azure Blob Storage schema inference limit"
- description: "The Azure blob storage blobs to scan for inferring the schema,\
- \ useful on large amounts of data with consistent structure"
- type: "integer"
- examples:
- - "500"
- format:
- title: "Input Format"
- type: "object"
- description: "Input data format"
- oneOf:
- - title: "JSON Lines: newline-delimited JSON"
- required:
- - "format_type"
- properties:
- format_type:
- type: "string"
- const: "JSONL"
- enum:
- - "JSONL"
- sourceType:
- title: "azure-blob-storage"
- const: "azure-blob-storage"
- enum:
- - "azure-blob-storage"
- order: 0
+ - "airbytetescontainername"
+ order: 4
type: "string"
- source-azure-blob-storage-update:
- title: "AzureBlobStorage Source Spec"
- type: "object"
- required:
- - "azure_blob_storage_account_name"
- - "azure_blob_storage_account_key"
- - "azure_blob_storage_container_name"
- - "format"
- properties:
azure_blob_storage_endpoint:
title: "Endpoint Domain Name"
- type: "string"
- default: "blob.core.windows.net"
description: "This is Azure Blob Storage endpoint domain name. Leave default\
\ value (or leave it empty if run container from command line) to use\
\ Microsoft native from example."
examples:
- "blob.core.windows.net"
- azure_blob_storage_container_name:
- title: "Azure blob storage container (Bucket) Name"
- type: "string"
- description: "The name of the Azure blob storage container."
- examples:
- - "airbytetescontainername"
- azure_blob_storage_account_name:
- title: "Azure Blob Storage account name"
- type: "string"
- description: "The account's name of the Azure Blob Storage."
- examples:
- - "airbyte5storage"
- azure_blob_storage_account_key:
- title: "Azure Blob Storage account key"
- description: "The Azure blob storage account key."
- airbyte_secret: true
- type: "string"
- examples:
- - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="
- azure_blob_storage_blobs_prefix:
- title: "Azure Blob Storage blobs prefix"
- description: "The Azure blob storage prefix to be applied"
- type: "string"
- examples:
- - "FolderA/FolderB/"
- azure_blob_storage_schema_inference_limit:
- title: "Azure Blob Storage schema inference limit"
- description: "The Azure blob storage blobs to scan for inferring the schema,\
- \ useful on large amounts of data with consistent structure"
- type: "integer"
- examples:
- - "500"
- format:
- title: "Input Format"
- type: "object"
- description: "Input data format"
- oneOf:
- - title: "JSON Lines: newline-delimited JSON"
- required:
- - "format_type"
- properties:
- format_type:
- type: "string"
- const: "JSONL"
- enum:
- - "JSONL"
- source-close-com:
- title: "Close.com Spec"
- type: "object"
- required:
- - "api_key"
- - "sourceType"
- properties:
- api_key:
- title: "API Key"
- type: "string"
- description: "Close.com API key (usually starts with 'api_'; find yours\
- \ here)."
- airbyte_secret: true
- start_date:
- title: "Replication Start Date"
+ order: 11
type: "string"
- description: "The start date to sync data; all data after this date will\
- \ be replicated. Leave blank to retrieve all the data available in the\
- \ account. Format: YYYY-MM-DD."
- examples:
- - "2021-01-01"
- default: "2021-01-01"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
- format: "date"
sourceType:
- title: "close-com"
- const: "close-com"
+ title: "azure-blob-storage"
+ const: "azure-blob-storage"
enum:
- - "close-com"
+ - "azure-blob-storage"
order: 0
type: "string"
- source-close-com-update:
- title: "Close.com Spec"
- type: "object"
- required:
- - "api_key"
- properties:
- api_key:
- title: "API Key"
- type: "string"
- description: "Close.com API key (usually starts with 'api_'; find yours\
- \ here)."
- airbyte_secret: true
- start_date:
- title: "Replication Start Date"
- type: "string"
- description: "The start date to sync data; all data after this date will\
- \ be replicated. Leave blank to retrieve all the data available in the\
- \ account. Format: YYYY-MM-DD."
- examples:
- - "2021-01-01"
- default: "2021-01-01"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
- format: "date"
- source-zendesk-sunshine:
- type: "object"
required:
- - "start_date"
- - "subdomain"
+ - "streams"
+ - "azure_blob_storage_account_name"
+ - "azure_blob_storage_account_key"
+ - "azure_blob_storage_container_name"
- "sourceType"
+ source-azure-blob-storage-update:
+ title: "Config"
+ description: "NOTE: When this Spec is changed, legacy_config_transformer.py\
+ \ must also be modified to uptake the changes\nbecause it is responsible for\
+ \ converting legacy Azure Blob Storage v0 configs into v1 configs using the\
+ \ File-Based CDK."
+ type: "object"
properties:
- subdomain:
- type: "string"
- order: 0
- title: "Subdomain"
- description: "The subdomain for your Zendesk Account."
start_date:
- type: "string"
- title: "Start date"
- format: "date-time"
- description: "The date from which you'd like to replicate data for Zendesk\
- \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z."
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ title: "Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\
+ \ Any file modified before this date will not be replicated."
examples:
- - "2021-01-01T00:00:00Z"
- order: 1
+ - "2021-01-01T00:00:00.000000Z"
+ format: "date-time"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ"
+ order: 1
+ type: "string"
+ streams:
+ title: "The list of streams to sync"
+ description: "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\
+ \ format, and how they should be parsed and validated. When sending data\
+ \ to warehouse destination such as Snowflake or BigQuery, each stream\
+ \ is a separate table."
+ order: 10
+ type: "array"
+ items:
+ title: "FileBasedStreamConfig"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name of the stream."
+ type: "string"
+ globs:
+ title: "Globs"
+ description: "The pattern used to specify which files should be selected\
+ \ from the file system. For more information on glob pattern matching\
+ \ look here."
+ default:
+ - "**"
+ order: 1
+ type: "array"
+ items:
+ type: "string"
+ legacy_prefix:
+ title: "Legacy Prefix"
+ description: "The path prefix configured in v3 versions of the S3\
+ \ connector. This option is deprecated in favor of a single glob."
+ airbyte_hidden: true
+ type: "string"
+ validation_policy:
+ title: "Validation Policy"
+ description: "The name of the validation policy that dictates sync\
+ \ behavior when a record does not adhere to the stream schema."
+ default: "Emit Record"
+ enum:
+ - "Emit Record"
+ - "Skip Record"
+ - "Wait for Discover"
+ input_schema:
+ title: "Input Schema"
+ description: "The schema that will be used to validate records extracted\
+ \ from the file. This will override the stream schema that is auto-detected\
+ \ from incoming files."
+ type: "string"
+ primary_key:
+ title: "Primary Key"
+ description: "The column or columns (for a composite key) that serves\
+ \ as the unique identifier of a record."
+ type: "string"
+ days_to_sync_if_history_is_full:
+ title: "Days To Sync If History Is Full"
+ description: "When the state history of the file store is full, syncs\
+ \ will only read files that were last modified in the provided day\
+ \ range."
+ default: 3
+ type: "integer"
+ format:
+ title: "Format"
+ description: "The configuration options that are used to alter how\
+ \ to read incoming files that deviate from the standard formatting."
+ type: "object"
+ oneOf:
+ - title: "Avro Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "avro"
+ const: "avro"
+ type: "string"
+ enum:
+ - "avro"
+ double_as_string:
+ title: "Convert Double Fields to Strings"
+ description: "Whether to convert double fields to strings. This\
+ \ is recommended if you have decimal numbers with a high degree\
+ \ of precision because there can be a loss precision when\
+ \ handling floating point numbers."
+ default: false
+ type: "boolean"
+ required:
+ - "filetype"
+ - title: "CSV Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "csv"
+ const: "csv"
+ type: "string"
+ enum:
+ - "csv"
+ delimiter:
+ title: "Delimiter"
+ description: "The character delimiting individual cells in the\
+ \ CSV data. This may only be a 1-character string. For tab-delimited\
+ \ data enter '\\t'."
+ default: ","
+ type: "string"
+ quote_char:
+ title: "Quote Character"
+ description: "The character used for quoting CSV values. To\
+ \ disallow quoting, make this field blank."
+ default: "\""
+ type: "string"
+ escape_char:
+ title: "Escape Character"
+ description: "The character used for escaping special characters.\
+ \ To disallow escaping, leave this field blank."
+ type: "string"
+ encoding:
+ title: "Encoding"
+ description: "The character encoding of the CSV data. Leave\
+ \ blank to default to UTF8. See list of python encodings for allowable\
+ \ options."
+ default: "utf8"
+ type: "string"
+ double_quote:
+ title: "Double Quote"
+ description: "Whether two quotes in a quoted CSV value denote\
+ \ a single quote in the data."
+ default: true
+ type: "boolean"
+ null_values:
+ title: "Null Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as null values. For example, if the value 'NA'\
+ \ should be interpreted as null, enter 'NA' in this field."
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ strings_can_be_null:
+ title: "Strings Can Be Null"
+ description: "Whether strings can be interpreted as null values.\
+ \ If true, strings that match the null_values set will be\
+ \ interpreted as null. If false, strings that match the null_values\
+ \ set will be interpreted as the string itself."
+ default: true
+ type: "boolean"
+ skip_rows_before_header:
+ title: "Skip Rows Before Header"
+ description: "The number of rows to skip before the header row.\
+ \ For example, if the header row is on the 3rd row, enter\
+ \ 2 in this field."
+ default: 0
+ type: "integer"
+ skip_rows_after_header:
+ title: "Skip Rows After Header"
+ description: "The number of rows to skip after the header row."
+ default: 0
+ type: "integer"
+ header_definition:
+ title: "CSV Header Definition"
+ description: "How headers will be defined. `User Provided` assumes\
+ \ the CSV does not have a header row and uses the headers\
+ \ provided and `Autogenerated` assumes the CSV does not have\
+ \ a header row and the CDK will generate headers using for\
+ \ `f{i}` where `i` is the index starting from 0. Else, the\
+ \ default behavior is to use the header from the CSV file.\
+ \ If a user wants to autogenerate or provide column names\
+ \ for a CSV having headers, they can skip rows."
+ default:
+ header_definition_type: "From CSV"
+ oneOf:
+ - title: "From CSV"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "From CSV"
+ const: "From CSV"
+ type: "string"
+ enum:
+ - "From CSV"
+ required:
+ - "header_definition_type"
+ - title: "Autogenerated"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "Autogenerated"
+ const: "Autogenerated"
+ type: "string"
+ enum:
+ - "Autogenerated"
+ required:
+ - "header_definition_type"
+ - title: "User Provided"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "User Provided"
+ const: "User Provided"
+ type: "string"
+ enum:
+ - "User Provided"
+ column_names:
+ title: "Column Names"
+ description: "The column names that will be used while\
+ \ emitting the CSV records"
+ type: "array"
+ items:
+ type: "string"
+ required:
+ - "column_names"
+ - "header_definition_type"
+ type: "object"
+ true_values:
+ title: "True Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as true values."
+ default:
+ - "y"
+ - "yes"
+ - "t"
+ - "true"
+ - "on"
+ - "1"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ false_values:
+ title: "False Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as false values."
+ default:
+ - "n"
+ - "no"
+ - "f"
+ - "false"
+ - "off"
+ - "0"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ inference_type:
+ title: "Inference Type"
+ description: "How to infer the types of the columns. If none,\
+ \ inference default to strings."
+ default: "None"
+ airbyte_hidden: true
+ enum:
+ - "None"
+ - "Primitive Types Only"
+ required:
+ - "filetype"
+ - title: "Jsonl Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "jsonl"
+ const: "jsonl"
+ type: "string"
+ enum:
+ - "jsonl"
+ required:
+ - "filetype"
+ - title: "Parquet Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "parquet"
+ const: "parquet"
+ type: "string"
+ enum:
+ - "parquet"
+ decimal_as_float:
+ title: "Convert Decimal Fields to Floats"
+ description: "Whether to convert decimal fields to floats. There\
+ \ is a loss of precision when converting decimals to floats,\
+ \ so this is not recommended."
+ default: false
+ type: "boolean"
+ required:
+ - "filetype"
+ - title: "Document File Type Format (Experimental)"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "unstructured"
+ const: "unstructured"
+ type: "string"
+ enum:
+ - "unstructured"
+ skip_unprocessable_file_types:
+ title: "Skip Unprocessable File Types"
+ description: "If true, skip files that cannot be parsed because\
+ \ of their file type and log a warning. If false, fail the\
+ \ sync. Corrupted files with valid file types will still result\
+ \ in a failed sync."
+ default: true
+ always_show: true
+ type: "boolean"
+ description: "Extract text from document formats (.pdf, .docx, .md,\
+ \ .pptx) and emit as one record per file."
+ required:
+ - "filetype"
+ schemaless:
+ title: "Schemaless"
+ description: "When enabled, syncs will not validate or structure records\
+ \ against the stream's schema."
+ default: false
+ type: "boolean"
+ required:
+ - "name"
+ - "format"
+ azure_blob_storage_account_name:
+ title: "Azure Blob Storage account name"
+ description: "The account's name of the Azure Blob Storage."
+ examples:
+ - "airbyte5storage"
+ order: 2
+ type: "string"
+ azure_blob_storage_account_key:
+ title: "Azure Blob Storage account key"
+ description: "The Azure blob storage account key."
+ airbyte_secret: true
+ examples:
+ - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="
+ order: 3
+ type: "string"
+ azure_blob_storage_container_name:
+ title: "Azure blob storage container (Bucket) Name"
+ description: "The name of the Azure blob storage container."
+ examples:
+ - "airbytetescontainername"
+ order: 4
+ type: "string"
+ azure_blob_storage_endpoint:
+ title: "Endpoint Domain Name"
+ description: "This is Azure Blob Storage endpoint domain name. Leave default\
+ \ value (or leave it empty if run container from command line) to use\
+ \ Microsoft native from example."
+ examples:
+ - "blob.core.windows.net"
+ order: 11
+ type: "string"
+ required:
+ - "streams"
+ - "azure_blob_storage_account_name"
+ - "azure_blob_storage_account_key"
+ - "azure_blob_storage_container_name"
+ source-close-com:
+ title: "Close.com Spec"
+ type: "object"
+ required:
+ - "api_key"
+ - "sourceType"
+ properties:
+ api_key:
+ title: "API Key"
+ type: "string"
+ description: "Close.com API key (usually starts with 'api_'; find yours\
+ \ here)."
+ airbyte_secret: true
+ x-speakeasy-param-sensitive: true
+ start_date:
+ title: "Replication Start Date"
+ type: "string"
+ description: "The start date to sync data; all data after this date will\
+ \ be replicated. Leave blank to retrieve all the data available in the\
+ \ account. Format: YYYY-MM-DD."
+ examples:
+ - "2021-01-01"
+ default: "2021-01-01"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ format: "date"
+ sourceType:
+ title: "close-com"
+ const: "close-com"
+ enum:
+ - "close-com"
+ order: 0
+ type: "string"
+ source-close-com-update:
+ title: "Close.com Spec"
+ type: "object"
+ required:
+ - "api_key"
+ properties:
+ api_key:
+ title: "API Key"
+ type: "string"
+ description: "Close.com API key (usually starts with 'api_'; find yours\
+ \ here)."
+ airbyte_secret: true
+ start_date:
+ title: "Replication Start Date"
+ type: "string"
+ description: "The start date to sync data; all data after this date will\
+ \ be replicated. Leave blank to retrieve all the data available in the\
+ \ account. Format: YYYY-MM-DD."
+ examples:
+ - "2021-01-01"
+ default: "2021-01-01"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ format: "date"
+ source-zendesk-sunshine:
+ type: "object"
+ required:
+ - "start_date"
+ - "subdomain"
+ - "sourceType"
+ properties:
+ subdomain:
+ type: "string"
+ order: 0
+ title: "Subdomain"
+ description: "The subdomain for your Zendesk Account."
+ start_date:
+ type: "string"
+ title: "Start date"
+ format: "date-time"
+ description: "The date from which you'd like to replicate data for Zendesk\
+ \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z."
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-01-01T00:00:00Z"
+ order: 1
credentials:
title: "Authorization Method"
type: "object"
@@ -29113,6 +30057,7 @@ components:
title: "Access Token"
description: "Long-term access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "API Token"
required:
@@ -29133,6 +30078,7 @@ components:
description: "API Token. See the docs for information on how to generate this key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
email:
type: "string"
title: "Email"
@@ -29243,6 +30189,7 @@ components:
description: "Your API Key. See here. The key is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
base:
type: "string"
description: "ISO reference currency. See docs to obtain yours."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Access token"
type: "object"
required:
@@ -29499,6 +30386,7 @@ components:
\ OAuth Token Tools. See the docs to obtain yours."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "linkedin-pages"
const: "linkedin-pages"
@@ -29699,15 +30587,18 @@ components:
type: "string"
description: "Access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
token_expiry_date:
type: "string"
description: "The date-time when the access token should be refreshed."
format: "date-time"
+ x-speakeasy-param-sensitive: true
refresh_token:
type: "string"
title: "Refresh Token"
description: "The token for obtaining a new access token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Authenticate via API Key"
type: "object"
required:
@@ -29726,6 +30617,7 @@ components:
description: "API Key for making authenticated requests. More instruction\
\ on how to find this value in our docs"
+ x-speakeasy-param-sensitive: true
start_date:
order: 1
type: "string"
@@ -29841,6 +30733,7 @@ components:
title: "Secret Key"
description: "Short.io Secret Key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start Date"
@@ -29894,6 +30787,7 @@ components:
title: "Rest API Key"
airbyte_secret: true
description: "Instatus REST API key"
+ x-speakeasy-param-sensitive: true
sourceType:
title: "instatus"
const: "instatus"
@@ -29927,6 +30821,7 @@ components:
description: "Your Yandex Metrica API access token"
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
counter_id:
type: "string"
title: "Counter ID"
@@ -30019,6 +30914,7 @@ components:
type: "string"
description: "App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
title: "Date-From Filter"
type: "string"
@@ -30075,6 +30971,7 @@ components:
\ is your personal API token. See here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
team_id:
type: "string"
description: "The ID of your team in ClickUp. Retrieve it from the `/team`\
@@ -30175,6 +31072,7 @@ components:
description: "Refresh Token to renew the expired Access Token."
airbyte_secret: true
order: 2
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start Date"
type: "string"
@@ -30254,7 +31152,6 @@ components:
title: "Source Gitlab Spec"
type: "object"
required:
- - "start_date"
- "credentials"
- "sourceType"
properties:
@@ -30289,14 +31186,17 @@ components:
type: "string"
description: "Access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
token_expiry_date:
type: "string"
description: "The date-time when the access token should be refreshed."
format: "date-time"
+ x-speakeasy-param-sensitive: true
refresh_token:
type: "string"
description: "The key to refresh the expired access_token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Private Token"
type: "object"
required:
@@ -30313,12 +31213,13 @@ components:
description: "Log into your Gitlab account and then generate a personal\
\ Access Token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start Date"
description: "The date from which you'd like to replicate data for GitLab\
- \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\
- \ date will be replicated."
+ \ API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data\
+ \ will be replicated. All data generated after this date will be replicated."
examples:
- "2021-03-01T00:00:00Z"
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
@@ -30339,13 +31240,32 @@ components:
examples:
- "airbyte.io"
title: "Groups"
- description: "Space-delimited list of groups. e.g. airbyte.io."
+ description: "[DEPRECATED] Space-delimited list of groups. e.g. airbyte.io."
+ airbyte_hidden: true
+ groups_list:
+ type: "array"
+ items:
+ type: "string"
+ examples:
+ - "airbyte.io"
+ title: "Groups"
+ description: "List of groups. e.g. airbyte.io."
order: 3
projects:
type: "string"
title: "Projects"
examples:
- "airbyte.io/documentation"
+ description: "[DEPRECATED] Space-delimited list of projects. e.g. airbyte.io/documentation\
+ \ meltano/tap-gitlab."
+ airbyte_hidden: true
+ projects_list:
+ type: "array"
+ items:
+ type: "string"
+ title: "Projects"
+ examples:
+ - "airbyte.io/documentation"
description: "Space-delimited list of projects. e.g. airbyte.io/documentation\
\ meltano/tap-gitlab."
order: 4
@@ -30360,7 +31280,6 @@ components:
title: "Source Gitlab Spec"
type: "object"
required:
- - "start_date"
- "credentials"
properties:
credentials:
@@ -30422,8 +31341,8 @@ components:
type: "string"
title: "Start Date"
description: "The date from which you'd like to replicate data for GitLab\
- \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\
- \ date will be replicated."
+ \ API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data\
+ \ will be replicated. All data generated after this date will be replicated."
examples:
- "2021-03-01T00:00:00Z"
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
@@ -30444,13 +31363,32 @@ components:
examples:
- "airbyte.io"
title: "Groups"
- description: "Space-delimited list of groups. e.g. airbyte.io."
+ description: "[DEPRECATED] Space-delimited list of groups. e.g. airbyte.io."
+ airbyte_hidden: true
+ groups_list:
+ type: "array"
+ items:
+ type: "string"
+ examples:
+ - "airbyte.io"
+ title: "Groups"
+ description: "List of groups. e.g. airbyte.io."
order: 3
projects:
type: "string"
title: "Projects"
examples:
- "airbyte.io/documentation"
+ description: "[DEPRECATED] Space-delimited list of projects. e.g. airbyte.io/documentation\
+ \ meltano/tap-gitlab."
+ airbyte_hidden: true
+ projects_list:
+ type: "array"
+ items:
+ type: "string"
+ title: "Projects"
+ examples:
+ - "airbyte.io/documentation"
description: "Space-delimited list of projects. e.g. airbyte.io/documentation\
\ meltano/tap-gitlab."
order: 4
@@ -30467,6 +31405,7 @@ components:
description: "Your Access token. See here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "launchdarkly"
const: "launchdarkly"
@@ -30532,12 +31471,14 @@ components:
description: "Access Token for making authenticated requests."
airbyte_secret: true
order: 3
+ x-speakeasy-param-sensitive: true
refresh_token:
type: "string"
title: "Refresh Token"
description: "Refresh Token for making authenticated requests."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Username and Password"
type: "object"
required:
@@ -30566,6 +31507,7 @@ components:
airbyte_secret: true
title: "Password"
order: 2
+ x-speakeasy-param-sensitive: true
order: 0
host:
description: "The host domain of the snowflake instance (must include the\
@@ -30824,6 +31766,7 @@ components:
\ knowns as scopes."
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start Date"
@@ -30952,6 +31895,7 @@ components:
token:
title: "API Token"
type: "string"
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start Date"
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
@@ -31086,6 +32030,7 @@ components:
- "xxxxxHRNxxx3TBxxxxxx"
airbyte_secret: true
order: 7
+ x-speakeasy-param-sensitive: true
secret_key:
title: "AWS IAM Secret Key"
description: "The Secret Key of the AWS IAM Role to use for pulling messages"
@@ -31094,6 +32039,7 @@ components:
- "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz"
airbyte_secret: true
order: 8
+ x-speakeasy-param-sensitive: true
sourceType:
title: "amazon-sqs"
const: "amazon-sqs"
@@ -31217,6 +32163,7 @@ components:
description: "Your User Token. See here. The token is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
organization:
title: "Organization"
type: "string"
@@ -31231,6 +32178,7 @@ components:
examples:
- "airbyte-ws-order"
- "airbyte-ws-checkout"
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start date"
type: "string"
@@ -31316,6 +32264,7 @@ components:
>here This API is Case Sensitive."
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
api_url:
title: "API Url"
description: "The URL for the Clockify API. This should only need to be\
@@ -31462,12 +32411,14 @@ components:
description: "Your application's Consumer Key."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
access_token:
type: "string"
title: "Access Token"
description: "The user's Pocket access token."
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
state:
type: "string"
title: "State"
@@ -31667,14 +32618,17 @@ components:
type: "string"
description: "Access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
token_expiry_date:
type: "string"
description: "The date-time when the access token should be refreshed."
format: "date-time"
+ x-speakeasy-param-sensitive: true
refresh_token:
type: "string"
description: "The key to refresh the expired access_token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "API Access Token"
type: "object"
required:
@@ -31695,6 +32649,7 @@ components:
\ > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
spreadsheet_id:
title: "Sheet ID"
description: "The spreadsheet ID. Find it by opening the spreadsheet then\
@@ -31869,6 +32824,7 @@ components:
>docs for more information on how to obtain this key."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start Date"
@@ -31955,6 +32911,7 @@ components:
airbyte_secret: true
order: 4
always_show: true
+ x-speakeasy-param-sensitive: true
jdbc_url_params:
description: "Additional properties to pass to the JDBC URL string when\
\ connecting to the database formatted as 'key=value' pairs separated\
@@ -32036,6 +32993,7 @@ components:
multiline: true
order: 3
always_show: true
+ x-speakeasy-param-sensitive: true
client_key_password:
type: "string"
title: "Client key password"
@@ -32043,6 +33001,7 @@ components:
\ you do not add it - the password will be generated automatically."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Verify Identity"
description: "Always connect with SSL. Verify both CA and Hostname."
required:
@@ -32082,6 +33041,7 @@ components:
multiline: true
order: 3
always_show: true
+ x-speakeasy-param-sensitive: true
client_key_password:
type: "string"
title: "Client key password"
@@ -32089,6 +33049,7 @@ components:
\ you do not add it - the password will be generated automatically."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
default: "required"
replication_method:
type: "object"
@@ -32209,6 +33170,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -32253,6 +33215,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
sourceType:
title: "mysql"
const: "mysql"
@@ -32613,6 +33576,7 @@ components:
description: "EmailOctopus API Key. See the docs for information on how to generate this key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "emailoctopus"
const: "emailoctopus"
@@ -32652,6 +33616,7 @@ components:
description: "Secret key (secret_key)"
order: 1
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start date"
@@ -32705,6 +33670,7 @@ components:
description: "Authorizes you to work with Orbit workspaces associated with\
\ the token."
order: 0
+ x-speakeasy-param-sensitive: true
workspace:
type: "string"
title: "Workspace"
@@ -32757,27 +33723,31 @@ components:
type: "object"
required:
- "token"
+ - "dataset_id"
- "sourceType"
properties:
token:
- title: "Personal API tokens"
- description: "Your application's Client Secret. You can find this value\
- \ on the console\
- \ integrations tab after you login."
type: "string"
+ title: "API token"
+ description: "Personal API token of your Apify account. In Apify Console,\
+ \ you can find your API token in the Settings section under the Integrations tab after you login. See\
+ \ the Apify Docs for more information."
examples:
- - "Personal API tokens"
+ - "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk"
airbyte_secret: true
- datasetId:
+ x-speakeasy-param-sensitive: true
+ dataset_id:
type: "string"
title: "Dataset ID"
- description: "ID of the dataset you would like to load to Airbyte."
- clean:
- type: "boolean"
- title: "Clean"
- description: "If set to true, only clean items will be downloaded from the\
- \ dataset. See description of what clean means in Apify API docs. If not sure, set clean to false."
+ description: "ID of the dataset you would like to load to Airbyte. In Apify\
+ \ Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs\
+ \ for more information."
+ examples:
+ - "rHuMdwm6xCFt6WiGU"
sourceType:
title: "apify-dataset"
const: "apify-dataset"
@@ -32790,26 +33760,29 @@ components:
type: "object"
required:
- "token"
+ - "dataset_id"
properties:
token:
- title: "Personal API tokens"
- description: "Your application's Client Secret. You can find this value\
- \ on the console\
- \ integrations tab after you login."
type: "string"
+ title: "API token"
+ description: "Personal API token of your Apify account. In Apify Console,\
+ \ you can find your API token in the Settings section under the Integrations tab after you login. See\
+ \ the Apify Docs for more information."
examples:
- - "Personal API tokens"
+ - "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk"
airbyte_secret: true
- datasetId:
+ dataset_id:
type: "string"
title: "Dataset ID"
- description: "ID of the dataset you would like to load to Airbyte."
- clean:
- type: "boolean"
- title: "Clean"
- description: "If set to true, only clean items will be downloaded from the\
- \ dataset. See description of what clean means in Apify API docs. If not sure, set clean to false."
+ description: "ID of the dataset you would like to load to Airbyte. In Apify\
+ \ Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs\
+ \ for more information."
+ examples:
+ - "rHuMdwm6xCFt6WiGU"
source-confluence:
type: "object"
required:
@@ -32833,6 +33806,7 @@ components:
>generating an API token."
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
domain_name:
title: "Domain name"
description: "Your Confluence domain name"
@@ -32888,6 +33862,7 @@ components:
description: "API Key"
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
environment:
type: "string"
description: "The environment to use. Either sandbox or production.\n"
@@ -33006,6 +33981,7 @@ components:
description: "Orb API Key, issued from the Orb admin console."
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start Date"
@@ -33033,6 +34009,7 @@ components:
description: "Property key names to extract from all events, in order to\
\ enrich ledger entries corresponding to an event deduction."
order: 4
+ x-speakeasy-param-sensitive: true
numeric_event_properties_keys:
type: "array"
items:
@@ -33041,10 +34018,12 @@ components:
description: "Property key names to extract from all events, in order to\
\ enrich ledger entries corresponding to an event deduction."
order: 5
+ x-speakeasy-param-sensitive: true
subscription_usage_grouping_key:
type: "string"
title: "Subscription usage grouping key (string value)"
description: "Property key name to group subscription usage by."
+ x-speakeasy-param-sensitive: true
plan_id:
type: "string"
title: "Orb Plan ID for Subscription Usage (string value)"
@@ -33130,6 +34109,7 @@ components:
>create authentication tokens.For self-hosted, you can find or create\
\ authentication tokens by visiting \"{instance_url_prefix}/settings/account/api/auth-tokens/\""
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
hostname:
type: "string"
title: "Host Name"
@@ -33194,21 +34174,26 @@ components:
title: "Notion Source Spec"
type: "object"
required:
- - "start_date"
+ - "credentials"
- "sourceType"
properties:
start_date:
title: "Start Date"
- description: "UTC date and time in the format 2017-01-25T00:00:00.000Z.\
- \ Any data before this date will not be replicated."
+ description: "UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z.\
+ \ During incremental sync, any data generated before this date will not\
+ \ be replicated. If left blank, the start date will be set to 2 years\
+ \ before the present date."
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z"
examples:
- "2020-11-16T00:00:00.000Z"
type: "string"
format: "date-time"
credentials:
- title: "Authenticate using"
- description: "Pick an authentication method."
+ title: "Authentication Method"
+ description: "Choose either OAuth (recommended for Airbyte Cloud) or Access\
+ \ Token. See our docs\
+ \ for more information."
type: "object"
order: 1
oneOf:
@@ -33228,19 +34213,25 @@ components:
client_id:
title: "Client ID"
type: "string"
- description: "The ClientID of your Notion integration."
+ description: "The Client ID of your Notion integration. See our docs\
+ \ for more information."
airbyte_secret: true
client_secret:
title: "Client Secret"
type: "string"
- description: "The ClientSecret of your Notion integration."
+ description: "The Client Secret of your Notion integration. See our\
+ \ docs\
+ \ for more information."
airbyte_secret: true
access_token:
title: "Access Token"
type: "string"
- description: "Access Token is a token you received by complete the\
- \ OauthWebFlow of Notion."
+ description: "The Access Token received by completing the OAuth flow\
+ \ for your Notion integration. See our docs\
+ \ for more information."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Access Token"
required:
@@ -33254,10 +34245,12 @@ components:
- "token"
token:
title: "Access Token"
- description: "Notion API access token, see the docs for more information on how to obtain this token."
+ description: "The Access Token for your private Notion integration.\
+ \ See the docs\
+ \ for more information on how to obtain this token."
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "notion"
const: "notion"
@@ -33269,20 +34262,25 @@ components:
title: "Notion Source Spec"
type: "object"
required:
- - "start_date"
+ - "credentials"
properties:
start_date:
title: "Start Date"
- description: "UTC date and time in the format 2017-01-25T00:00:00.000Z.\
- \ Any data before this date will not be replicated."
+ description: "UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z.\
+ \ During incremental sync, any data generated before this date will not\
+ \ be replicated. If left blank, the start date will be set to 2 years\
+ \ before the present date."
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z"
examples:
- "2020-11-16T00:00:00.000Z"
type: "string"
format: "date-time"
credentials:
- title: "Authenticate using"
- description: "Pick an authentication method."
+ title: "Authentication Method"
+ description: "Choose either OAuth (recommended for Airbyte Cloud) or Access\
+ \ Token. See our docs\
+ \ for more information."
type: "object"
order: 1
oneOf:
@@ -33302,18 +34300,23 @@ components:
client_id:
title: "Client ID"
type: "string"
- description: "The ClientID of your Notion integration."
+ description: "The Client ID of your Notion integration. See our docs\
+ \ for more information."
airbyte_secret: true
client_secret:
title: "Client Secret"
type: "string"
- description: "The ClientSecret of your Notion integration."
+ description: "The Client Secret of your Notion integration. See our\
+ \ docs\
+ \ for more information."
airbyte_secret: true
access_token:
title: "Access Token"
type: "string"
- description: "Access Token is a token you received by complete the\
- \ OauthWebFlow of Notion."
+ description: "The Access Token received by completing the OAuth flow\
+ \ for your Notion integration. See our docs\
+ \ for more information."
airbyte_secret: true
- type: "object"
title: "Access Token"
@@ -33328,8 +34331,9 @@ components:
- "token"
token:
title: "Access Token"
- description: "Notion API access token, see the docs for more information on how to obtain this token."
+ description: "The Access Token for your private Notion integration.\
+ \ See the docs\
+ \ for more information on how to obtain this token."
type: "string"
airbyte_secret: true
source-trustpilot:
@@ -33377,16 +34381,19 @@ components:
title: "Access Token"
description: "Access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
token_expiry_date:
type: "string"
title: "Token expiry date time"
description: "The date-time when the access token should be refreshed."
format: "date-time"
+ x-speakeasy-param-sensitive: true
refresh_token:
type: "string"
title: "Refresh token"
description: "The key to refresh the expired access_token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "API Key"
description: "The API key authentication method gives you access to only\
@@ -33534,6 +34541,7 @@ components:
description: "API key is required to access google apis, For getting your's\
\ goto google console and generate api key for Webfonts"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sort:
type: "string"
description: "Optional, to find how to sort"
@@ -33701,6 +34709,7 @@ components:
description: "Slack access_token. See our docs if you need help generating the token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
order: 0
- type: "object"
title: "API Token"
@@ -33719,6 +34728,7 @@ components:
description: "A Slack bot token. See the docs for instructions on how to generate it."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
order: 1
sourceType:
title: "slack"
@@ -33828,7 +34838,7 @@ components:
>docs for instructions on how to generate it."
airbyte_secret: true
order: 1
- source-file-secure:
+ source-file:
title: "File Source Spec"
type: "object"
required:
@@ -33934,6 +34944,7 @@ components:
description: "In order to access private Buckets stored on AWS S3,\
\ this connector would need credentials with the proper permissions.\
\ If accessing publicly available data, this field is not necessary."
+ x-speakeasy-param-sensitive: true
aws_secret_access_key:
type: "string"
title: "AWS Secret Access Key"
@@ -33941,6 +34952,7 @@ components:
\ this connector would need credentials with the proper permissions.\
\ If accessing publicly available data, this field is not necessary."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "AzBlob: Azure Blob Storage"
required:
- "storage"
@@ -33966,6 +34978,7 @@ components:
\ Access Signature) token. If accessing publicly available data,\
\ this field is not necessary."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
shared_key:
type: "string"
title: "Shared Key"
@@ -33974,6 +34987,7 @@ components:
\ account shared key (aka account key or access key). If accessing\
\ publicly available data, this field is not necessary."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "SSH: Secure Shell"
required:
- "storage"
@@ -33995,6 +35009,7 @@ components:
title: "Password"
description: ""
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
host:
type: "string"
title: "Host"
@@ -34025,6 +35040,7 @@ components:
title: "Password"
description: ""
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
host:
type: "string"
title: "Host"
@@ -34055,6 +35071,7 @@ components:
title: "Password"
description: ""
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
host:
type: "string"
title: "Host"
@@ -34065,13 +35082,13 @@ components:
default: "22"
description: ""
sourceType:
- title: "file-secure"
- const: "file-secure"
+ title: "file"
+ const: "file"
enum:
- - "file-secure"
+ - "file"
order: 0
type: "string"
- source-file-secure-update:
+ source-file-update:
title: "File Source Spec"
type: "object"
required:
@@ -34321,6 +35338,7 @@ components:
\ > API tokens. See here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
project_id:
title: "Project Id"
type: "string"
@@ -34377,6 +35395,7 @@ components:
title: "Refresh Token"
description: "OAuth2.0 Refresh Token"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
dc_region:
title: "Data Center Location"
type: "string"
@@ -34515,6 +35534,7 @@ components:
description: "The Aptrinsic API Key which is recieved from the dashboard\
\ settings (ref - https://app.aptrinsic.com/settings/api-keys)"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "gainsight-px"
const: "gainsight-px"
@@ -34573,10 +35593,10 @@ components:
minimum: 0
description: "When set, the connector will always re-export data from the\
\ past N days, where N is the value set here. This is useful if your data\
- \ is frequently updated after creation. Applies only to streams that do\
- \ not support event-based incremental syncs: CheckoutSessionLineItems,\
- \ Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks.\
- \ More info here"
order: 3
slice_range:
@@ -34596,6 +35616,30 @@ components:
\ be made and faster the sync will be. On the other hand, the more seldom\
\ the state is persisted."
order: 4
+ num_workers:
+ type: "integer"
+ title: "Number of concurrent workers"
+ minimum: 1
+ maximum: 20
+ default: 10
+ examples:
+ - 1
+ - 2
+ - 3
+ description: "The number of worker thread to use for the sync. The performance\
+ \ upper boundary depends on call_rate_limit setting and type of account."
+ order: 5
+ call_rate_limit:
+ type: "integer"
+ title: "Max number of API calls per second"
+ examples:
+ - 25
+ - 100
+ description: "The number of API calls per second that you allow connector\
+ \ to make. This value can not be bigger than real API call rate limit\
+ \ (https://stripe.com/docs/rate-limits). If not specified the default\
+ \ maximum is 25 and 100 calls per second for test and production tokens\
+ \ respectively."
sourceType:
title: "stripe"
const: "stripe"
@@ -34641,10 +35685,10 @@ components:
minimum: 0
description: "When set, the connector will always re-export data from the\
\ past N days, where N is the value set here. This is useful if your data\
- \ is frequently updated after creation. Applies only to streams that do\
- \ not support event-based incremental syncs: CheckoutSessionLineItems,\
- \ Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks.\
- \ More info here"
order: 3
slice_range:
@@ -34664,6 +35708,30 @@ components:
\ be made and faster the sync will be. On the other hand, the more seldom\
\ the state is persisted."
order: 4
+ num_workers:
+ type: "integer"
+ title: "Number of concurrent workers"
+ minimum: 1
+ maximum: 20
+ default: 10
+ examples:
+ - 1
+ - 2
+ - 3
+ description: "The number of worker thread to use for the sync. The performance\
+ \ upper boundary depends on call_rate_limit setting and type of account."
+ order: 5
+ call_rate_limit:
+ type: "integer"
+ title: "Max number of API calls per second"
+ examples:
+ - 25
+ - 100
+ description: "The number of API calls per second that you allow connector\
+ \ to make. This value can not be bigger than real API call rate limit\
+ \ (https://stripe.com/docs/rate-limits). If not specified the default\
+ \ maximum is 25 and 100 calls per second for test and production tokens\
+ \ respectively."
source-youtube-analytics:
title: "YouTube Analytics Spec"
type: "object"
@@ -34696,6 +35764,7 @@ components:
description: "A refresh token generated using the above client ID and\
\ secret"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "youtube-analytics"
const: "youtube-analytics"
@@ -34795,6 +35864,7 @@ components:
\ href='https://developers.google.com/identity/protocols/oauth2'>Google's\
\ documentation for more information."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Service Account Key Authentication"
type: "object"
required:
@@ -34949,6 +36019,7 @@ components:
https://docs.airbyte.com/integrations/sources/zendesk-talk\">docs\
\ for more information."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "OAuth2.0"
type: "object"
required:
@@ -34968,6 +36039,7 @@ components:
https://docs.airbyte.com/integrations/sources/zendesk-talk\">docs\
\ for more information."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
client_id:
type: "string"
title: "Client ID"
@@ -35101,6 +36173,7 @@ components:
description: "Freshdesk API Key. See the docs for more information on how to obtain this key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
requests_per_minute:
title: "Requests per minute"
type: "integer"
@@ -35158,246 +36231,6 @@ components:
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2020-12-01T00:00:00Z"
- source-google-analytics-v4:
- title: "Google Analytics (V4) Spec"
- type: "object"
- required:
- - "view_id"
- - "start_date"
- - "sourceType"
- properties:
- credentials:
- order: 0
- type: "object"
- title: "Credentials"
- description: "Credentials for the service"
- oneOf:
- - title: "Authenticate via Google (Oauth)"
- type: "object"
- required:
- - "client_id"
- - "client_secret"
- - "refresh_token"
- properties:
- auth_type:
- type: "string"
- const: "Client"
- order: 0
- enum:
- - "Client"
- client_id:
- title: "Client ID"
- type: "string"
- description: "The Client ID of your Google Analytics developer application."
- airbyte_secret: true
- order: 1
- client_secret:
- title: "Client Secret"
- type: "string"
- description: "The Client Secret of your Google Analytics developer\
- \ application."
- airbyte_secret: true
- order: 2
- refresh_token:
- title: "Refresh Token"
- type: "string"
- description: "The token for obtaining a new access token."
- airbyte_secret: true
- order: 3
- access_token:
- title: "Access Token"
- type: "string"
- description: "Access Token for making authenticated requests."
- airbyte_secret: true
- order: 4
- - type: "object"
- title: "Service Account Key Authentication"
- required:
- - "credentials_json"
- properties:
- auth_type:
- type: "string"
- const: "Service"
- order: 0
- enum:
- - "Service"
- credentials_json:
- title: "Service Account JSON Key"
- type: "string"
- description: "The JSON key of the service account to use for authorization"
- examples:
- - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\
- \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }"
- airbyte_secret: true
- start_date:
- order: 1
- type: "string"
- title: "Replication Start Date"
- description: "The date in the format YYYY-MM-DD. Any data before this date\
- \ will not be replicated."
- examples:
- - "2020-06-01"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$|^$|[\\s\\S]+$"
- format: "date"
- view_id:
- order: 2
- type: "string"
- title: "View ID"
- description: "The ID for the Google Analytics View you want to fetch data\
- \ from. This can be found from the Google Analytics Account Explorer."
- custom_reports:
- order: 3
- type: "string"
- title: "Custom Reports"
- description: "A JSON array describing the custom reports you want to sync\
- \ from Google Analytics. See the docs for more information about the exact format you can use\
- \ to fill out this field."
- window_in_days:
- type: "integer"
- title: "Data request time increment in days"
- description: "The time increment used by the connector when requesting data\
- \ from the Google Analytics API. More information is available in the\
- \ the docs. The bigger this value is, the faster the sync will be,\
- \ but the more likely that sampling will be applied to your data, potentially\
- \ causing inaccuracies in the returned results. We recommend setting this\
- \ to 1 unless you have a hard requirement to make the sync faster at the\
- \ expense of accuracy. The minimum allowed value for this field is 1,\
- \ and the maximum is 364. "
- examples:
- - 30
- - 60
- - 90
- - 120
- - 200
- - 364
- default: 1
- order: 4
- sourceType:
- title: "google-analytics-v4"
- const: "google-analytics-v4"
- enum:
- - "google-analytics-v4"
- order: 0
- type: "string"
- source-google-analytics-v4-update:
- title: "Google Analytics (V4) Spec"
- type: "object"
- required:
- - "view_id"
- - "start_date"
- properties:
- credentials:
- order: 0
- type: "object"
- title: "Credentials"
- description: "Credentials for the service"
- oneOf:
- - title: "Authenticate via Google (Oauth)"
- type: "object"
- required:
- - "client_id"
- - "client_secret"
- - "refresh_token"
- properties:
- auth_type:
- type: "string"
- const: "Client"
- order: 0
- enum:
- - "Client"
- client_id:
- title: "Client ID"
- type: "string"
- description: "The Client ID of your Google Analytics developer application."
- airbyte_secret: true
- order: 1
- client_secret:
- title: "Client Secret"
- type: "string"
- description: "The Client Secret of your Google Analytics developer\
- \ application."
- airbyte_secret: true
- order: 2
- refresh_token:
- title: "Refresh Token"
- type: "string"
- description: "The token for obtaining a new access token."
- airbyte_secret: true
- order: 3
- access_token:
- title: "Access Token"
- type: "string"
- description: "Access Token for making authenticated requests."
- airbyte_secret: true
- order: 4
- - type: "object"
- title: "Service Account Key Authentication"
- required:
- - "credentials_json"
- properties:
- auth_type:
- type: "string"
- const: "Service"
- order: 0
- enum:
- - "Service"
- credentials_json:
- title: "Service Account JSON Key"
- type: "string"
- description: "The JSON key of the service account to use for authorization"
- examples:
- - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\
- \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }"
- airbyte_secret: true
- start_date:
- order: 1
- type: "string"
- title: "Replication Start Date"
- description: "The date in the format YYYY-MM-DD. Any data before this date\
- \ will not be replicated."
- examples:
- - "2020-06-01"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$|^$|[\\s\\S]+$"
- format: "date"
- view_id:
- order: 2
- type: "string"
- title: "View ID"
- description: "The ID for the Google Analytics View you want to fetch data\
- \ from. This can be found from the Google Analytics Account Explorer."
- custom_reports:
- order: 3
- type: "string"
- title: "Custom Reports"
- description: "A JSON array describing the custom reports you want to sync\
- \ from Google Analytics. See the docs for more information about the exact format you can use\
- \ to fill out this field."
- window_in_days:
- type: "integer"
- title: "Data request time increment in days"
- description: "The time increment used by the connector when requesting data\
- \ from the Google Analytics API. More information is available in the\
- \ the docs. The bigger this value is, the faster the sync will be,\
- \ but the more likely that sampling will be applied to your data, potentially\
- \ causing inaccuracies in the returned results. We recommend setting this\
- \ to 1 unless you have a hard requirement to make the sync faster at the\
- \ expense of accuracy. The minimum allowed value for this field is 1,\
- \ and the maximum is 364. "
- examples:
- - 30
- - 60
- - 90
- - 120
- - 200
- - 364
- default: 1
- order: 4
source-asana:
title: "Asana Spec"
type: "object"
@@ -35436,6 +36269,7 @@ components:
title: ""
description: ""
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Authenticate with Personal Access Token"
required:
@@ -35454,6 +36288,18 @@ components:
description: "Asana Personal Access Token (generate yours here)."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
+ test_mode:
+ type: "boolean"
+ title: "Test Mode"
+ description: "This flag is used for testing purposes for certain streams\
+ \ that return a lot of data. This flag is not meant to be enabled for\
+ \ prod."
+ airbyte_hidden: true
+ organization_export_ids:
+ title: "Organization Export IDs"
+ description: "Globally unique identifiers for the organization exports"
+ type: "array"
sourceType:
title: "asana"
const: "asana"
@@ -35517,6 +36363,17 @@ components:
description: "Asana Personal Access Token (generate yours here)."
airbyte_secret: true
+ test_mode:
+ type: "boolean"
+ title: "Test Mode"
+ description: "This flag is used for testing purposes for certain streams\
+ \ that return a lot of data. This flag is not meant to be enabled for\
+ \ prod."
+ airbyte_hidden: true
+ organization_export_ids:
+ title: "Organization Export IDs"
+ description: "Globally unique identifiers for the organization exports"
+ type: "array"
source-posthog:
title: "PostHog Spec"
type: "object"
@@ -35540,6 +36397,7 @@ components:
title: "API Key"
description: "API Key. See the docs for information on how to generate this key."
+ x-speakeasy-param-sensitive: true
base_url:
type: "string"
default: "https://app.posthog.com"
@@ -35610,18 +36468,24 @@ components:
- 10
- 5
source-getlago:
- title: "Getlago Spec"
+ title: "Lago Spec"
type: "object"
required:
- "api_key"
- "sourceType"
properties:
+ api_url:
+ title: "API Url"
+ type: "string"
+ description: "Your Lago API URL"
+ default: "https://api.getlago.com/api/v1"
api_key:
title: "API Key"
type: "string"
description: "Your API Key. See here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "getlago"
const: "getlago"
@@ -35630,11 +36494,16 @@ components:
order: 0
type: "string"
source-getlago-update:
- title: "Getlago Spec"
+ title: "Lago Spec"
type: "object"
required:
- "api_key"
properties:
+ api_url:
+ title: "API Url"
+ type: "string"
+ description: "Your Lago API URL"
+ default: "https://api.getlago.com/api/v1"
api_key:
title: "API Key"
type: "string"
@@ -35653,6 +36522,7 @@ components:
type: "string"
title: "API Key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
grid_id:
type: "string"
title: "Grid ID"
@@ -35738,6 +36608,7 @@ components:
type: "string"
description: "A Refresh Token to renew the expired Access Token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Authenticate via Microsoft"
required:
@@ -35877,6 +36748,7 @@ components:
required:
- "aws_environment"
- "region"
+ - "account_type"
- "lwa_app_id"
- "lwa_client_secret"
- "refresh_token"
@@ -35928,46 +36800,35 @@ components:
default: "US"
type: "string"
order: 2
- aws_access_key:
- title: "AWS Access Key"
- description: "Specifies the AWS access key used as part of the credentials\
- \ to authenticate the user."
- airbyte_secret: true
- order: 3
- type: "string"
- aws_secret_key:
- title: "AWS Secret Access Key"
- description: "Specifies the AWS secret key used as part of the credentials\
- \ to authenticate the user."
- airbyte_secret: true
- order: 4
- type: "string"
- role_arn:
- title: "Role ARN"
- description: "Specifies the Amazon Resource Name (ARN) of an IAM role that\
- \ you want to use to perform operations requested using this profile.\
- \ (Needs permission to 'Assume Role' STS)."
- airbyte_secret: true
- order: 5
+ account_type:
+ title: "AWS Seller Partner Account Type"
+ description: "Type of the Account you're going to authorize the Airbyte\
+ \ application by"
+ enum:
+ - "Seller"
+ - "Vendor"
+ default: "Seller"
type: "string"
+ order: 3
lwa_app_id:
title: "LWA Client Id"
description: "Your Login with Amazon Client ID."
- order: 6
+ order: 4
airbyte_secret: true
type: "string"
lwa_client_secret:
title: "LWA Client Secret"
description: "Your Login with Amazon Client Secret."
airbyte_secret: true
- order: 7
+ order: 5
type: "string"
refresh_token:
title: "Refresh Token"
description: "The Refresh Token obtained via OAuth flow authorization."
airbyte_secret: true
- order: 8
+ order: 6
type: "string"
+ x-speakeasy-param-sensitive: true
replication_start_date:
title: "Start Date"
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
@@ -35975,8 +36836,9 @@ components:
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-25T00:00:00Z"
- order: 9
+ order: 7
type: "string"
+ format: "date-time"
replication_end_date:
title: "End Date"
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
@@ -35984,8 +36846,9 @@ components:
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$|^$"
examples:
- "2017-01-25T00:00:00Z"
- order: 10
+ order: 8
type: "string"
+ format: "date-time"
period_in_days:
title: "Period In Days"
type: "integer"
@@ -35993,7 +36856,8 @@ components:
\ when no updated state is present for reports that support sliced incremental\
\ sync."
default: 90
- order: 11
+ minimum: 1
+ order: 9
report_options:
title: "Report Options"
description: "Additional information passed to reports. This varies by report\
@@ -36002,18 +36866,8 @@ components:
- "{\"GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT\": {\"reportPeriod\": \"WEEK\"\
}}"
- "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}"
- order: 12
+ order: 10
type: "string"
- max_wait_seconds:
- title: "Max wait time for reports (in seconds)"
- description: "Sometimes report can take up to 30 minutes to generate. This\
- \ will set the limit for how long to wait for a successful report."
- default: 500
- examples:
- - "500"
- - "1980"
- order: 13
- type: "integer"
advanced_stream_options:
title: "Advanced Stream Options"
description: "Additional information to configure report options. This varies\
@@ -36022,7 +36876,7 @@ components:
examples:
- "{\"GET_SALES_AND_TRAFFIC_REPORT\": {\"availability_sla_days\": 3}}"
- "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}"
- order: 14
+ order: 12
type: "string"
sourceType:
title: "amazon-seller-partner"
@@ -36037,6 +36891,7 @@ components:
required:
- "aws_environment"
- "region"
+ - "account_type"
- "lwa_app_id"
- "lwa_client_secret"
- "refresh_token"
@@ -36087,45 +36942,33 @@ components:
default: "US"
type: "string"
order: 2
- aws_access_key:
- title: "AWS Access Key"
- description: "Specifies the AWS access key used as part of the credentials\
- \ to authenticate the user."
- airbyte_secret: true
- order: 3
- type: "string"
- aws_secret_key:
- title: "AWS Secret Access Key"
- description: "Specifies the AWS secret key used as part of the credentials\
- \ to authenticate the user."
- airbyte_secret: true
- order: 4
- type: "string"
- role_arn:
- title: "Role ARN"
- description: "Specifies the Amazon Resource Name (ARN) of an IAM role that\
- \ you want to use to perform operations requested using this profile.\
- \ (Needs permission to 'Assume Role' STS)."
- airbyte_secret: true
- order: 5
+ account_type:
+ title: "AWS Seller Partner Account Type"
+ description: "Type of the Account you're going to authorize the Airbyte\
+ \ application by"
+ enum:
+ - "Seller"
+ - "Vendor"
+ default: "Seller"
type: "string"
+ order: 3
lwa_app_id:
title: "LWA Client Id"
description: "Your Login with Amazon Client ID."
- order: 6
+ order: 4
airbyte_secret: true
type: "string"
lwa_client_secret:
title: "LWA Client Secret"
description: "Your Login with Amazon Client Secret."
airbyte_secret: true
- order: 7
+ order: 5
type: "string"
refresh_token:
title: "Refresh Token"
description: "The Refresh Token obtained via OAuth flow authorization."
airbyte_secret: true
- order: 8
+ order: 6
type: "string"
replication_start_date:
title: "Start Date"
@@ -36134,8 +36977,9 @@ components:
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-25T00:00:00Z"
- order: 9
+ order: 7
type: "string"
+ format: "date-time"
replication_end_date:
title: "End Date"
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
@@ -36143,8 +36987,9 @@ components:
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$|^$"
examples:
- "2017-01-25T00:00:00Z"
- order: 10
+ order: 8
type: "string"
+ format: "date-time"
period_in_days:
title: "Period In Days"
type: "integer"
@@ -36152,7 +36997,8 @@ components:
\ when no updated state is present for reports that support sliced incremental\
\ sync."
default: 90
- order: 11
+ minimum: 1
+ order: 9
report_options:
title: "Report Options"
description: "Additional information passed to reports. This varies by report\
@@ -36161,18 +37007,8 @@ components:
- "{\"GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT\": {\"reportPeriod\": \"WEEK\"\
}}"
- "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}"
- order: 12
+ order: 10
type: "string"
- max_wait_seconds:
- title: "Max wait time for reports (in seconds)"
- description: "Sometimes report can take up to 30 minutes to generate. This\
- \ will set the limit for how long to wait for a successful report."
- default: 500
- examples:
- - "500"
- - "1980"
- order: 13
- type: "integer"
advanced_stream_options:
title: "Advanced Stream Options"
description: "Additional information to configure report options. This varies\
@@ -36181,66 +37017,8 @@ components:
examples:
- "{\"GET_SALES_AND_TRAFFIC_REPORT\": {\"availability_sla_days\": 3}}"
- "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}"
- order: 14
- type: "string"
- source-bigcommerce:
- title: "BigCommerce Source CDK Specifications"
- type: "object"
- required:
- - "start_date"
- - "store_hash"
- - "access_token"
- - "sourceType"
- properties:
- start_date:
- type: "string"
- title: "Start Date"
- description: "The date you would like to replicate data. Format: YYYY-MM-DD."
- examples:
- - "2021-01-01"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
- store_hash:
- type: "string"
- title: "Store Hash"
- description: "The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/,\
- \ The store's hash code is 'HASH_CODE'."
- access_token:
- type: "string"
- title: "Access Token"
- description: "Access Token for making authenticated requests."
- airbyte_secret: true
- sourceType:
- title: "bigcommerce"
- const: "bigcommerce"
- enum:
- - "bigcommerce"
- order: 0
- type: "string"
- source-bigcommerce-update:
- title: "BigCommerce Source CDK Specifications"
- type: "object"
- required:
- - "start_date"
- - "store_hash"
- - "access_token"
- properties:
- start_date:
- type: "string"
- title: "Start Date"
- description: "The date you would like to replicate data. Format: YYYY-MM-DD."
- examples:
- - "2021-01-01"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
- store_hash:
- type: "string"
- title: "Store Hash"
- description: "The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/,\
- \ The store's hash code is 'HASH_CODE'."
- access_token:
+ order: 12
type: "string"
- title: "Access Token"
- description: "Access Token for making authenticated requests."
- airbyte_secret: true
source-recreation:
title: "Recreation Spec"
type: "object"
@@ -36253,6 +37031,7 @@ components:
type: "string"
description: "API Key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
query_campsites:
title: "Query Campsite"
type: "string"
@@ -36321,6 +37100,7 @@ components:
\ our documentation\
\ for more information."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Access Token"
type: "object"
required:
@@ -36338,6 +37118,7 @@ components:
\ Refer to our documentation\
\ for more information."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start Date"
@@ -36598,6 +37379,7 @@ components:
description: "Your API Key. Get your key here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "us-census"
const: "us-census"
@@ -36636,9 +37418,6 @@ components:
source-pinterest:
title: "Pinterest Spec"
type: "object"
- required:
- - "start_date"
- - "sourceType"
properties:
start_date:
type: "string"
@@ -36696,23 +37475,263 @@ components:
description: "Refresh Token to obtain new Access Token, when it's\
\ expired."
airbyte_secret: true
- - type: "object"
- title: "Access Token"
+ x-speakeasy-param-sensitive: true
+ custom_reports:
+ title: "Custom Reports"
+ description: "A list which contains ad statistics entries, each entry must\
+ \ have a name and can contains fields, breakdowns or action_breakdowns.\
+ \ Click on \"add\" to fill this field."
+ type: "array"
+ items:
+ title: "ReportConfig"
+ description: "Config for custom report"
+ type: "object"
required:
- - "auth_method"
- - "access_token"
+ - "name"
+ - "level"
+ - "granularity"
+ - "columns"
properties:
- auth_method:
+ name:
+ title: "Name"
+ description: "The name value of report"
type: "string"
- const: "access_token"
order: 0
+ level:
+ title: "Level"
+ description: "Chosen level for API"
+ default: "ADVERTISER"
enum:
- - "access_token"
- access_token:
+ - "ADVERTISER"
+ - "ADVERTISER_TARGETING"
+ - "CAMPAIGN"
+ - "CAMPAIGN_TARGETING"
+ - "AD_GROUP"
+ - "AD_GROUP_TARGETING"
+ - "PIN_PROMOTION"
+ - "PIN_PROMOTION_TARGETING"
+ - "KEYWORD"
+ - "PRODUCT_GROUP"
+ - "PRODUCT_GROUP_TARGETING"
+ - "PRODUCT_ITEM"
type: "string"
- title: "Access Token"
- description: "The Access Token to make authenticated requests."
- airbyte_secret: true
+ order: 1
+ granularity:
+ title: "Granularity"
+ description: "Chosen granularity for API"
+ default: "TOTAL"
+ enum:
+ - "TOTAL"
+ - "DAY"
+ - "HOUR"
+ - "WEEK"
+ - "MONTH"
+ type: "string"
+ order: 2
+ columns:
+ title: "Columns"
+ description: "A list of chosen columns"
+ default: []
+ type: "array"
+ order: 3
+ items:
+ title: "ValidEnums"
+ description: "An enumeration."
+ enum:
+ - "ADVERTISER_ID"
+ - "AD_ACCOUNT_ID"
+ - "AD_GROUP_ENTITY_STATUS"
+ - "AD_GROUP_ID"
+ - "AD_ID"
+ - "CAMPAIGN_DAILY_SPEND_CAP"
+ - "CAMPAIGN_ENTITY_STATUS"
+ - "CAMPAIGN_ID"
+ - "CAMPAIGN_LIFETIME_SPEND_CAP"
+ - "CAMPAIGN_NAME"
+ - "CHECKOUT_ROAS"
+ - "CLICKTHROUGH_1"
+ - "CLICKTHROUGH_1_GROSS"
+ - "CLICKTHROUGH_2"
+ - "CPC_IN_MICRO_DOLLAR"
+ - "CPM_IN_DOLLAR"
+ - "CPM_IN_MICRO_DOLLAR"
+ - "CTR"
+ - "CTR_2"
+ - "ECPCV_IN_DOLLAR"
+ - "ECPCV_P95_IN_DOLLAR"
+ - "ECPC_IN_DOLLAR"
+ - "ECPC_IN_MICRO_DOLLAR"
+ - "ECPE_IN_DOLLAR"
+ - "ECPM_IN_MICRO_DOLLAR"
+ - "ECPV_IN_DOLLAR"
+ - "ECTR"
+ - "EENGAGEMENT_RATE"
+ - "ENGAGEMENT_1"
+ - "ENGAGEMENT_2"
+ - "ENGAGEMENT_RATE"
+ - "IDEA_PIN_PRODUCT_TAG_VISIT_1"
+ - "IDEA_PIN_PRODUCT_TAG_VISIT_2"
+ - "IMPRESSION_1"
+ - "IMPRESSION_1_GROSS"
+ - "IMPRESSION_2"
+ - "INAPP_CHECKOUT_COST_PER_ACTION"
+ - "OUTBOUND_CLICK_1"
+ - "OUTBOUND_CLICK_2"
+ - "PAGE_VISIT_COST_PER_ACTION"
+ - "PAGE_VISIT_ROAS"
+ - "PAID_IMPRESSION"
+ - "PIN_ID"
+ - "PIN_PROMOTION_ID"
+ - "REPIN_1"
+ - "REPIN_2"
+ - "REPIN_RATE"
+ - "SPEND_IN_DOLLAR"
+ - "SPEND_IN_MICRO_DOLLAR"
+ - "TOTAL_CHECKOUT"
+ - "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_CLICKTHROUGH"
+ - "TOTAL_CLICK_ADD_TO_CART"
+ - "TOTAL_CLICK_CHECKOUT"
+ - "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_CLICK_LEAD"
+ - "TOTAL_CLICK_SIGNUP"
+ - "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_CONVERSIONS"
+ - "TOTAL_CUSTOM"
+ - "TOTAL_ENGAGEMENT"
+ - "TOTAL_ENGAGEMENT_CHECKOUT"
+ - "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_ENGAGEMENT_LEAD"
+ - "TOTAL_ENGAGEMENT_SIGNUP"
+ - "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT"
+ - "TOTAL_IMPRESSION_FREQUENCY"
+ - "TOTAL_IMPRESSION_USER"
+ - "TOTAL_LEAD"
+ - "TOTAL_OFFLINE_CHECKOUT"
+ - "TOTAL_PAGE_VISIT"
+ - "TOTAL_REPIN_RATE"
+ - "TOTAL_SIGNUP"
+ - "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_VIDEO_3SEC_VIEWS"
+ - "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND"
+ - "TOTAL_VIDEO_MRC_VIEWS"
+ - "TOTAL_VIDEO_P0_COMBINED"
+ - "TOTAL_VIDEO_P100_COMPLETE"
+ - "TOTAL_VIDEO_P25_COMBINED"
+ - "TOTAL_VIDEO_P50_COMBINED"
+ - "TOTAL_VIDEO_P75_COMBINED"
+ - "TOTAL_VIDEO_P95_COMBINED"
+ - "TOTAL_VIEW_ADD_TO_CART"
+ - "TOTAL_VIEW_CHECKOUT"
+ - "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_VIEW_LEAD"
+ - "TOTAL_VIEW_SIGNUP"
+ - "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_WEB_CHECKOUT"
+ - "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_WEB_CLICK_CHECKOUT"
+ - "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_WEB_ENGAGEMENT_CHECKOUT"
+ - "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_WEB_SESSIONS"
+ - "TOTAL_WEB_VIEW_CHECKOUT"
+ - "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "VIDEO_3SEC_VIEWS_2"
+ - "VIDEO_LENGTH"
+ - "VIDEO_MRC_VIEWS_2"
+ - "VIDEO_P0_COMBINED_2"
+ - "VIDEO_P100_COMPLETE_2"
+ - "VIDEO_P25_COMBINED_2"
+ - "VIDEO_P50_COMBINED_2"
+ - "VIDEO_P75_COMBINED_2"
+ - "VIDEO_P95_COMBINED_2"
+ - "WEB_CHECKOUT_COST_PER_ACTION"
+ - "WEB_CHECKOUT_ROAS"
+ - "WEB_SESSIONS_1"
+ - "WEB_SESSIONS_2"
+ click_window_days:
+ title: "Click window days"
+ description: "Number of days to use as the conversion attribution\
+ \ window for a pin click action."
+ default: 30
+ enum:
+ - 0
+ - 1
+ - 7
+ - 14
+ - 30
+ - 60
+ type: "integer"
+ order: 4
+ engagement_window_days:
+ title: "Engagement window days"
+ description: "Number of days to use as the conversion attribution\
+ \ window for an engagement action."
+ default:
+ - 30
+ enum:
+ - 0
+ - 1
+ - 7
+ - 14
+ - 30
+ - 60
+ type: "integer"
+ order: 5
+ view_window_days:
+ title: "View window days"
+ description: "Number of days to use as the conversion attribution\
+ \ window for a view action."
+ default:
+ - 30
+ enum:
+ - 0
+ - 1
+ - 7
+ - 14
+ - 30
+ - 60
+ type: "integer"
+ order: 6
+ conversion_report_time:
+ title: "Conversion report time"
+ description: "The date by which the conversion metrics returned from\
+ \ this endpoint will be reported. There are two dates associated\
+ \ with a conversion event: the date that the user interacted with\
+ \ the ad, and the date that the user completed a conversion event.."
+ default: "TIME_OF_AD_ACTION"
+ enum:
+ - "TIME_OF_AD_ACTION"
+ - "TIME_OF_CONVERSION"
+ type: "string"
+ order: 7
+ attribution_types:
+ title: "Attribution types"
+ description: "List of types of attribution for the conversion report"
+ default:
+ - "INDIVIDUAL"
+ - "HOUSEHOLD"
+ type: "array"
+ items:
+ title: "ValidEnums"
+ description: "An enumeration."
+ enum:
+ - "INDIVIDUAL"
+ - "HOUSEHOLD"
+ order: 8
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "A date in the format YYYY-MM-DD. If you have not set\
+ \ a date, it would be defaulted to latest allowed date by report\
+ \ api (913 days from today)."
+ format: "date"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ pattern_descriptor: "YYYY-MM-DD"
+ examples:
+ - "2022-07-28"
+ order: 9
sourceType:
title: "pinterest"
const: "pinterest"
@@ -36723,8 +37742,6 @@ components:
source-pinterest-update:
title: "Pinterest Spec"
type: "object"
- required:
- - "start_date"
properties:
start_date:
type: "string"
@@ -36782,36 +37799,278 @@ components:
description: "Refresh Token to obtain new Access Token, when it's\
\ expired."
airbyte_secret: true
- - type: "object"
- title: "Access Token"
+ custom_reports:
+ title: "Custom Reports"
+ description: "A list which contains ad statistics entries, each entry must\
+ \ have a name and can contains fields, breakdowns or action_breakdowns.\
+ \ Click on \"add\" to fill this field."
+ type: "array"
+ items:
+ title: "ReportConfig"
+ description: "Config for custom report"
+ type: "object"
required:
- - "auth_method"
- - "access_token"
+ - "name"
+ - "level"
+ - "granularity"
+ - "columns"
properties:
- auth_method:
+ name:
+ title: "Name"
+ description: "The name value of report"
type: "string"
- const: "access_token"
order: 0
+ level:
+ title: "Level"
+ description: "Chosen level for API"
+ default: "ADVERTISER"
enum:
- - "access_token"
- access_token:
+ - "ADVERTISER"
+ - "ADVERTISER_TARGETING"
+ - "CAMPAIGN"
+ - "CAMPAIGN_TARGETING"
+ - "AD_GROUP"
+ - "AD_GROUP_TARGETING"
+ - "PIN_PROMOTION"
+ - "PIN_PROMOTION_TARGETING"
+ - "KEYWORD"
+ - "PRODUCT_GROUP"
+ - "PRODUCT_GROUP_TARGETING"
+ - "PRODUCT_ITEM"
type: "string"
- title: "Access Token"
- description: "The Access Token to make authenticated requests."
- airbyte_secret: true
+ order: 1
+ granularity:
+ title: "Granularity"
+ description: "Chosen granularity for API"
+ default: "TOTAL"
+ enum:
+ - "TOTAL"
+ - "DAY"
+ - "HOUR"
+ - "WEEK"
+ - "MONTH"
+ type: "string"
+ order: 2
+ columns:
+ title: "Columns"
+ description: "A list of chosen columns"
+ default: []
+ type: "array"
+ order: 3
+ items:
+ title: "ValidEnums"
+ description: "An enumeration."
+ enum:
+ - "ADVERTISER_ID"
+ - "AD_ACCOUNT_ID"
+ - "AD_GROUP_ENTITY_STATUS"
+ - "AD_GROUP_ID"
+ - "AD_ID"
+ - "CAMPAIGN_DAILY_SPEND_CAP"
+ - "CAMPAIGN_ENTITY_STATUS"
+ - "CAMPAIGN_ID"
+ - "CAMPAIGN_LIFETIME_SPEND_CAP"
+ - "CAMPAIGN_NAME"
+ - "CHECKOUT_ROAS"
+ - "CLICKTHROUGH_1"
+ - "CLICKTHROUGH_1_GROSS"
+ - "CLICKTHROUGH_2"
+ - "CPC_IN_MICRO_DOLLAR"
+ - "CPM_IN_DOLLAR"
+ - "CPM_IN_MICRO_DOLLAR"
+ - "CTR"
+ - "CTR_2"
+ - "ECPCV_IN_DOLLAR"
+ - "ECPCV_P95_IN_DOLLAR"
+ - "ECPC_IN_DOLLAR"
+ - "ECPC_IN_MICRO_DOLLAR"
+ - "ECPE_IN_DOLLAR"
+ - "ECPM_IN_MICRO_DOLLAR"
+ - "ECPV_IN_DOLLAR"
+ - "ECTR"
+ - "EENGAGEMENT_RATE"
+ - "ENGAGEMENT_1"
+ - "ENGAGEMENT_2"
+ - "ENGAGEMENT_RATE"
+ - "IDEA_PIN_PRODUCT_TAG_VISIT_1"
+ - "IDEA_PIN_PRODUCT_TAG_VISIT_2"
+ - "IMPRESSION_1"
+ - "IMPRESSION_1_GROSS"
+ - "IMPRESSION_2"
+ - "INAPP_CHECKOUT_COST_PER_ACTION"
+ - "OUTBOUND_CLICK_1"
+ - "OUTBOUND_CLICK_2"
+ - "PAGE_VISIT_COST_PER_ACTION"
+ - "PAGE_VISIT_ROAS"
+ - "PAID_IMPRESSION"
+ - "PIN_ID"
+ - "PIN_PROMOTION_ID"
+ - "REPIN_1"
+ - "REPIN_2"
+ - "REPIN_RATE"
+ - "SPEND_IN_DOLLAR"
+ - "SPEND_IN_MICRO_DOLLAR"
+ - "TOTAL_CHECKOUT"
+ - "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_CLICKTHROUGH"
+ - "TOTAL_CLICK_ADD_TO_CART"
+ - "TOTAL_CLICK_CHECKOUT"
+ - "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_CLICK_LEAD"
+ - "TOTAL_CLICK_SIGNUP"
+ - "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_CONVERSIONS"
+ - "TOTAL_CUSTOM"
+ - "TOTAL_ENGAGEMENT"
+ - "TOTAL_ENGAGEMENT_CHECKOUT"
+ - "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_ENGAGEMENT_LEAD"
+ - "TOTAL_ENGAGEMENT_SIGNUP"
+ - "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT"
+ - "TOTAL_IMPRESSION_FREQUENCY"
+ - "TOTAL_IMPRESSION_USER"
+ - "TOTAL_LEAD"
+ - "TOTAL_OFFLINE_CHECKOUT"
+ - "TOTAL_PAGE_VISIT"
+ - "TOTAL_REPIN_RATE"
+ - "TOTAL_SIGNUP"
+ - "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_VIDEO_3SEC_VIEWS"
+ - "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND"
+ - "TOTAL_VIDEO_MRC_VIEWS"
+ - "TOTAL_VIDEO_P0_COMBINED"
+ - "TOTAL_VIDEO_P100_COMPLETE"
+ - "TOTAL_VIDEO_P25_COMBINED"
+ - "TOTAL_VIDEO_P50_COMBINED"
+ - "TOTAL_VIDEO_P75_COMBINED"
+ - "TOTAL_VIDEO_P95_COMBINED"
+ - "TOTAL_VIEW_ADD_TO_CART"
+ - "TOTAL_VIEW_CHECKOUT"
+ - "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_VIEW_LEAD"
+ - "TOTAL_VIEW_SIGNUP"
+ - "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_WEB_CHECKOUT"
+ - "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_WEB_CLICK_CHECKOUT"
+ - "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_WEB_ENGAGEMENT_CHECKOUT"
+ - "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "TOTAL_WEB_SESSIONS"
+ - "TOTAL_WEB_VIEW_CHECKOUT"
+ - "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ - "VIDEO_3SEC_VIEWS_2"
+ - "VIDEO_LENGTH"
+ - "VIDEO_MRC_VIEWS_2"
+ - "VIDEO_P0_COMBINED_2"
+ - "VIDEO_P100_COMPLETE_2"
+ - "VIDEO_P25_COMBINED_2"
+ - "VIDEO_P50_COMBINED_2"
+ - "VIDEO_P75_COMBINED_2"
+ - "VIDEO_P95_COMBINED_2"
+ - "WEB_CHECKOUT_COST_PER_ACTION"
+ - "WEB_CHECKOUT_ROAS"
+ - "WEB_SESSIONS_1"
+ - "WEB_SESSIONS_2"
+ click_window_days:
+ title: "Click window days"
+ description: "Number of days to use as the conversion attribution\
+ \ window for a pin click action."
+ default: 30
+ enum:
+ - 0
+ - 1
+ - 7
+ - 14
+ - 30
+ - 60
+ type: "integer"
+ order: 4
+ engagement_window_days:
+ title: "Engagement window days"
+ description: "Number of days to use as the conversion attribution\
+ \ window for an engagement action."
+ default:
+ - 30
+ enum:
+ - 0
+ - 1
+ - 7
+ - 14
+ - 30
+ - 60
+ type: "integer"
+ order: 5
+ view_window_days:
+ title: "View window days"
+ description: "Number of days to use as the conversion attribution\
+ \ window for a view action."
+ default:
+ - 30
+ enum:
+ - 0
+ - 1
+ - 7
+ - 14
+ - 30
+ - 60
+ type: "integer"
+ order: 6
+ conversion_report_time:
+ title: "Conversion report time"
+ description: "The date by which the conversion metrics returned from\
+ \ this endpoint will be reported. There are two dates associated\
+ \ with a conversion event: the date that the user interacted with\
+ \ the ad, and the date that the user completed a conversion event.."
+ default: "TIME_OF_AD_ACTION"
+ enum:
+ - "TIME_OF_AD_ACTION"
+ - "TIME_OF_CONVERSION"
+ type: "string"
+ order: 7
+ attribution_types:
+ title: "Attribution types"
+ description: "List of types of attribution for the conversion report"
+ default:
+ - "INDIVIDUAL"
+ - "HOUSEHOLD"
+ type: "array"
+ items:
+ title: "ValidEnums"
+ description: "An enumeration."
+ enum:
+ - "INDIVIDUAL"
+ - "HOUSEHOLD"
+ order: 8
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "A date in the format YYYY-MM-DD. If you have not set\
+ \ a date, it would be defaulted to latest allowed date by report\
+ \ api (913 days from today)."
+ format: "date"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ pattern_descriptor: "YYYY-MM-DD"
+ examples:
+ - "2022-07-28"
+ order: 9
source-spacex-api:
- title: "Spacex Api Spec"
type: "object"
+ required:
+ - "sourceType"
properties:
id:
title: "Unique ID for specific source target"
type: "string"
desciption: "Optional, For a specific ID"
+ order: 0
options:
title: "Configuration options for endpoints"
type: "string"
desciption: "Optional, Possible values for an endpoint. Example values for\
\ launches-latest, upcoming, past"
+ order: 1
sourceType:
title: "spacex-api"
const: "spacex-api"
@@ -36820,18 +38079,20 @@ components:
order: 0
type: "string"
source-spacex-api-update:
- title: "Spacex Api Spec"
type: "object"
+ required: []
properties:
id:
title: "Unique ID for specific source target"
type: "string"
desciption: "Optional, For a specific ID"
+ order: 0
options:
title: "Configuration options for endpoints"
type: "string"
desciption: "Optional, Possible values for an endpoint. Example values for\
\ launches-latest, upcoming, past"
+ order: 1
source-bamboo-hr:
title: "Bamboo HR Spec"
type: "object"
@@ -36847,6 +38108,7 @@ components:
type: "string"
description: "Api key of bamboo hr"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
custom_reports_fields:
type: "string"
default: ""
@@ -36940,6 +38202,7 @@ components:
description: "Refresh Token to obtain new Access Token, when it's\
\ expired."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "API Token"
required:
@@ -36958,6 +38221,7 @@ components:
description: "An Okta token. See the docs for instructions on how to generate it."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "okta"
const: "okta"
@@ -37038,6 +38302,9 @@ components:
airbyte_secret: true
source-mixpanel:
title: "Source Mixpanel Spec"
+ required:
+ - "credentials"
+ - "sourceType"
type: "object"
properties:
credentials:
@@ -37051,6 +38318,7 @@ components:
required:
- "username"
- "secret"
+ - "project_id"
properties:
option_title:
type: "string"
@@ -37073,6 +38341,12 @@ components:
https://developer.mixpanel.com/reference/service-accounts\">docs\
\ for more information on how to obtain this."
airbyte_secret: true
+ project_id:
+ order: 3
+ title: "Project ID"
+ description: "Your project ID number. See the docs for more information on how to obtain this."
+ type: "integer"
- type: "object"
title: "Project Secret"
required:
@@ -37091,19 +38365,14 @@ components:
description: "Mixpanel project secret. See the docs for more information on how to obtain this."
airbyte_secret: true
- project_id:
- order: 1
- title: "Project ID"
- description: "Your project ID number. See the docs for more information on how to obtain this."
- type: "integer"
attribution_window:
order: 2
title: "Attribution Window"
type: "integer"
- description: " A period of time for attributing results to ads and the lookback\
+ description: "A period of time for attributing results to ads and the lookback\
\ period after those actions occur during which ad results are counted.\
- \ Default attribution window is 5 days."
+ \ Default attribution window is 5 days. (This value should be non-negative\
+ \ integer)"
default: 5
project_timezone:
order: 3
@@ -37159,7 +38428,7 @@ components:
title: "Date slicing window"
description: "Defines window size in days, that used to slice through data.\
\ You can reduce it, if amount of data in each window is too big for your\
- \ environment."
+ \ environment. (This value should be positive integer)"
type: "integer"
minimum: 1
default: 30
@@ -37172,6 +38441,8 @@ components:
type: "string"
source-mixpanel-update:
title: "Source Mixpanel Spec"
+ required:
+ - "credentials"
type: "object"
properties:
credentials:
@@ -37185,6 +38456,7 @@ components:
required:
- "username"
- "secret"
+ - "project_id"
properties:
option_title:
type: "string"
@@ -37207,6 +38479,12 @@ components:
https://developer.mixpanel.com/reference/service-accounts\">docs\
\ for more information on how to obtain this."
airbyte_secret: true
+ project_id:
+ order: 3
+ title: "Project ID"
+ description: "Your project ID number. See the docs for more information on how to obtain this."
+ type: "integer"
- type: "object"
title: "Project Secret"
required:
@@ -37225,19 +38503,14 @@ components:
description: "Mixpanel project secret. See the docs for more information on how to obtain this."
airbyte_secret: true
- project_id:
- order: 1
- title: "Project ID"
- description: "Your project ID number. See the docs for more information on how to obtain this."
- type: "integer"
attribution_window:
order: 2
title: "Attribution Window"
type: "integer"
- description: " A period of time for attributing results to ads and the lookback\
+ description: "A period of time for attributing results to ads and the lookback\
\ period after those actions occur during which ad results are counted.\
- \ Default attribution window is 5 days."
+ \ Default attribution window is 5 days. (This value should be non-negative\
+ \ integer)"
default: 5
project_timezone:
order: 3
@@ -37293,7 +38566,7 @@ components:
title: "Date slicing window"
description: "Defines window size in days, that used to slice through data.\
\ You can reduce it, if amount of data in each window is too big for your\
- \ environment."
+ \ environment. (This value should be positive integer)"
type: "integer"
minimum: 1
default: 30
@@ -37307,6 +38580,7 @@ components:
description: "Your API Key. See here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
domain:
title: "Domain"
type: "string"
@@ -37355,6 +38629,7 @@ components:
>docs for more information on how to obtain this token."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
query:
type: "string"
title: "Search Query"
@@ -37441,12 +38716,14 @@ components:
type: "string"
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
private_key:
title: "Private key"
description: "The private key"
type: "string"
multiline: true
order: 2
+ x-speakeasy-param-sensitive: true
host:
title: "Host Address"
description: "The server host address"
@@ -37694,6 +38971,7 @@ components:
description: "The OAuth access token. See the Zendesk docs for more information on generating this token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
client_id:
type: "string"
title: "Client ID"
@@ -37731,6 +39009,7 @@ components:
>full documentation for more information on generating this\
\ token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
ignore_pagination:
type: "boolean"
default: false
@@ -37902,128 +39181,6 @@ components:
pattern: "^[0-9]{2}-[0-9]{4}$"
examples:
- "MM-YYYY"
- source-xero:
- title: "Xero Spec"
- type: "object"
- required:
- - "authentication"
- - "tenant_id"
- - "start_date"
- - "sourceType"
- properties:
- authentication:
- type: "object"
- title: "Authenticate via Xero (OAuth)"
- required:
- - "client_id"
- - "client_secret"
- - "refresh_token"
- - "access_token"
- - "token_expiry_date"
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "Enter your Xero application's Client ID"
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "Enter your Xero application's Client Secret"
- airbyte_secret: true
- refresh_token:
- type: "string"
- title: "Refresh Token"
- description: "Enter your Xero application's refresh token"
- airbyte_secret: true
- access_token:
- type: "string"
- title: "Access Token"
- description: "Enter your Xero application's access token"
- airbyte_secret: true
- token_expiry_date:
- type: "string"
- description: "The date-time when the access token should be refreshed"
- order: 0
- tenant_id:
- title: "Tenant ID"
- type: "string"
- description: "Enter your Xero organization's Tenant ID"
- airbyte_secret: true
- order: 1
- start_date:
- type: "string"
- title: "Start Date"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
- description: "UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any\
- \ data with created_at before this data will not be synced."
- examples:
- - "2022-03-01T00:00:00Z"
- format: "date-time"
- order: 2
- sourceType:
- title: "xero"
- const: "xero"
- enum:
- - "xero"
- order: 0
- type: "string"
- source-xero-update:
- title: "Xero Spec"
- type: "object"
- required:
- - "authentication"
- - "tenant_id"
- - "start_date"
- properties:
- authentication:
- type: "object"
- title: "Authenticate via Xero (OAuth)"
- required:
- - "client_id"
- - "client_secret"
- - "refresh_token"
- - "access_token"
- - "token_expiry_date"
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "Enter your Xero application's Client ID"
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "Enter your Xero application's Client Secret"
- airbyte_secret: true
- refresh_token:
- type: "string"
- title: "Refresh Token"
- description: "Enter your Xero application's refresh token"
- airbyte_secret: true
- access_token:
- type: "string"
- title: "Access Token"
- description: "Enter your Xero application's access token"
- airbyte_secret: true
- token_expiry_date:
- type: "string"
- description: "The date-time when the access token should be refreshed"
- order: 0
- tenant_id:
- title: "Tenant ID"
- type: "string"
- description: "Enter your Xero organization's Tenant ID"
- airbyte_secret: true
- order: 1
- start_date:
- type: "string"
- title: "Start Date"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
- description: "UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any\
- \ data with created_at before this data will not be synced."
- examples:
- - "2022-03-01T00:00:00Z"
- format: "date-time"
- order: 2
source-tiktok-marketing:
title: "TikTok Marketing Source Spec"
type: "object"
@@ -38060,6 +39217,7 @@ components:
description: "Long-term Authorized Access Token."
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
advertiser_id:
title: "Advertiser ID"
description: "The Advertiser ID to filter reports and streams. Let\
@@ -38089,6 +39247,7 @@ components:
description: "The long-term authorized access token."
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
required:
- "advertiser_id"
- "access_token"
@@ -38251,12 +39410,14 @@ components:
description: "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
aws_secret_key:
type: "string"
title: "Secret Key"
description: "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
aws_region_name:
type: "string"
title: "Region Name"
@@ -38334,6 +39495,7 @@ components:
\ is used for Authorization to your account by BasicAuth."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
domain:
type: "string"
title: "Domain"
@@ -38379,16 +39541,35 @@ components:
expand_issue_changelog:
type: "boolean"
title: "Expand Issue Changelog"
- description: "Expand the changelog when replicating issues."
+ airbyte_hidden: true
+ description: "(DEPRECATED) Expand the changelog when replicating issues."
default: false
- order: 5
render_fields:
type: "boolean"
title: "Render Issue Fields"
- description: "Render issue fields in HTML format in addition to Jira JSON-like\
- \ format."
+ airbyte_hidden: true
+ description: "(DEPRECATED) Render issue fields in HTML format in addition\
+ \ to Jira JSON-like format."
default: false
- order: 6
+ expand_issue_transition:
+ type: "boolean"
+ title: "Expand Issue Transitions"
+ airbyte_hidden: true
+ description: "(DEPRECATED) Expand the transitions when replicating issues."
+ default: false
+ issues_stream_expand_with:
+ type: "array"
+ items:
+ type: "string"
+ enum:
+ - "renderedFields"
+ - "transitions"
+ - "changelog"
+ title: "Expand Issues stream"
+ airbyte_hidden: true
+ description: "Select fields to Expand the `Issues` stream when replicating\
+ \ with: "
+ default: []
enable_experimental_streams:
type: "boolean"
title: "Enable Experimental Streams"
@@ -38396,7 +39577,7 @@ components:
\ Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables\
\ for more info."
default: false
- order: 7
+ order: 5
sourceType:
title: "jira"
const: "jira"
@@ -38465,16 +39646,35 @@ components:
expand_issue_changelog:
type: "boolean"
title: "Expand Issue Changelog"
- description: "Expand the changelog when replicating issues."
+ airbyte_hidden: true
+ description: "(DEPRECATED) Expand the changelog when replicating issues."
default: false
- order: 5
render_fields:
type: "boolean"
title: "Render Issue Fields"
- description: "Render issue fields in HTML format in addition to Jira JSON-like\
- \ format."
+ airbyte_hidden: true
+ description: "(DEPRECATED) Render issue fields in HTML format in addition\
+ \ to Jira JSON-like format."
default: false
- order: 6
+ expand_issue_transition:
+ type: "boolean"
+ title: "Expand Issue Transitions"
+ airbyte_hidden: true
+ description: "(DEPRECATED) Expand the transitions when replicating issues."
+ default: false
+ issues_stream_expand_with:
+ type: "array"
+ items:
+ type: "string"
+ enum:
+ - "renderedFields"
+ - "transitions"
+ - "changelog"
+ title: "Expand Issues stream"
+ airbyte_hidden: true
+ description: "Select fields to Expand the `Issues` stream when replicating\
+ \ with: "
+ default: []
enable_experimental_streams:
type: "boolean"
title: "Enable Experimental Streams"
@@ -38482,7 +39682,7 @@ components:
\ Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables\
\ for more info."
default: false
- order: 7
+ order: 5
source-hubspot:
title: "HubSpot Source Spec"
type: "object"
@@ -38547,6 +39747,7 @@ components:
examples:
- "refresh_token"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Private App"
required:
@@ -38567,6 +39768,7 @@ components:
>Hubspot docs if you need help finding this token."
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "hubspot"
const: "hubspot"
@@ -38694,6 +39896,7 @@ components:
type: "string"
description: "API Key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "sap-fieldglass"
const: "sap-fieldglass"
@@ -38729,6 +39932,7 @@ components:
description: "Twilio Auth Token"
airbyte_secret: true
title: "Auth Token"
+ x-speakeasy-param-sensitive: true
sourceType:
title: "twilio-taskrouter"
const: "twilio-taskrouter"
@@ -38780,6 +39984,7 @@ components:
description: "Zenloop API Token. You can get the API token in settings page\
\ here "
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
date_from:
type: "string"
description: "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24.\
@@ -38845,6 +40050,7 @@ components:
description: "Tempo API Token. Go to Tempo>Settings, scroll down to Data\
\ Access and select API integration."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "tempo"
const: "tempo"
@@ -38881,6 +40087,7 @@ components:
>docs for more information on how to obtain this key."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
site:
type: "string"
title: "Site"
@@ -38975,6 +40182,7 @@ components:
>docs for more information on how to obtain this key."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
applications:
type: "array"
title: "Applications"
@@ -38997,6 +40205,7 @@ components:
title: "REST API Key"
order: 2
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
required:
- "app_id"
- "app_api_key"
@@ -39094,8 +40303,7 @@ components:
title: "Google Analytics (Data API) Spec"
type: "object"
required:
- - "property_id"
- - "date_ranges_start_date"
+ - "property_ids"
- "sourceType"
properties:
credentials:
@@ -39135,12 +40343,14 @@ components:
description: "The token for obtaining a new access token."
airbyte_secret: true
order: 3
+ x-speakeasy-param-sensitive: true
access_token:
title: "Access Token"
type: "string"
description: "Access Token for making authenticated requests."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Service Account Key Authentication"
required:
@@ -39164,20 +40374,21 @@ components:
\ \"private_key_id\": YOUR_PRIVATE_KEY, ... }"
airbyte_secret: true
order: 1
- property_id:
- type: "string"
- title: "Property ID"
- description: "The Property ID is a unique number assigned to each property\
- \ in Google Analytics, found in your GA4 property URL. This ID allows\
- \ the connector to track the specific events associated with your property.\
- \ Refer to the Google\
+ property_ids:
+ title: "Property IDs"
+ description: "A list of your Property IDs. The Property ID is a unique number\
+ \ assigned to each property in Google Analytics, found in your GA4 property\
+ \ URL. This ID allows the connector to track the specific events associated\
+ \ with your property. Refer to the Google\
\ Analytics documentation to locate your property ID."
- pattern: "^[0-9]*$"
- pattern_descriptor: "123..."
- examples:
- - "1738294"
- - "5729978930"
order: 1
+ type: "array"
+ items:
+ type: "string"
+ pattern: "^[0-9]*$"
+ examples:
+ - - "1738294"
+ - "5729978930"
date_ranges_start_date:
type: "string"
title: "Start Date"
@@ -39190,14 +40401,1763 @@ components:
examples:
- "2021-01-01"
order: 2
- custom_reports:
- order: 3
- type: "string"
+ custom_reports_array:
title: "Custom Reports"
- description: "A JSON array describing the custom reports you want to sync\
- \ from Google Analytics. See the documentation for more information about the exact format you\
- \ can use to fill out this field."
+ description: "You can add your Custom Analytics report by creating one."
+ order: 4
+ type: "array"
+ items:
+ title: "Custom Report Config"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name of the custom report, this name would be used\
+ \ as stream name."
+ type: "string"
+ order: 0
+ dimensions:
+ title: "Dimensions"
+ description: "A list of dimensions."
+ type: "array"
+ items:
+ type: "string"
+ minItems: 1
+ order: 1
+ metrics:
+ title: "Metrics"
+ description: "A list of metrics."
+ type: "array"
+ items:
+ type: "string"
+ minItems: 1
+ order: 2
+ dimensionFilter:
+ title: "Dimensions filter"
+ description: "Dimensions filter"
+ type: "object"
+ order: 3
+ oneOf:
+ - title: "andGroup"
+ description: "The FilterExpressions in andGroup have an AND relationship."
+ type: "object"
+ properties:
+ filter_type:
+ type: "string"
+ const: "andGroup"
+ order: 0
+ enum:
+ - "andGroup"
+ expressions:
+ title: "Expressions"
+ type: "array"
+ order: 1
+ items:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "filter_type"
+ - "expressions"
+ - title: "orGroup"
+ type: "object"
+ description: "The FilterExpressions in orGroup have an OR relationship."
+ properties:
+ filter_type:
+ type: "string"
+ const: "orGroup"
+ order: 0
+ enum:
+ - "orGroup"
+ expressions:
+ title: "Expressions"
+ type: "array"
+ order: 1
+ items:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "filter_type"
+ - "expressions"
+ - title: "notExpression"
+ type: "object"
+ description: "The FilterExpression is NOT of notExpression."
+ properties:
+ filter_type:
+ type: "string"
+ const: "notExpression"
+ order: 0
+ enum:
+ - "notExpression"
+ expression:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ - title: "filter"
+ type: "object"
+ description: "A primitive filter. In the same FilterExpression,\
+ \ all of the filter's field names need to be either all dimensions."
+ properties:
+ filter_type:
+ type: "string"
+ const: "filter"
+ order: 0
+ enum:
+ - "filter"
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ metricFilter:
+ title: "Metrics filter"
+ description: "Metrics filter"
+ type: "object"
+ order: 4
+ oneOf:
+ - title: "andGroup"
+ description: "The FilterExpressions in andGroup have an AND relationship."
+ type: "object"
+ properties:
+ filter_type:
+ type: "string"
+ const: "andGroup"
+ order: 0
+ enum:
+ - "andGroup"
+ expressions:
+ title: "Expressions"
+ type: "array"
+ order: 1
+ items:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "filter_type"
+ - "expressions"
+ - title: "orGroup"
+ type: "object"
+ description: "The FilterExpressions in orGroup have an OR relationship."
+ properties:
+ filter_type:
+ type: "string"
+ const: "orGroup"
+ order: 0
+ enum:
+ - "orGroup"
+ expressions:
+ title: "Expressions"
+ type: "array"
+ order: 1
+ items:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "filter_type"
+ - "expressions"
+ - title: "notExpression"
+ type: "object"
+ description: "The FilterExpression is NOT of notExpression."
+ properties:
+ filter_type:
+ type: "string"
+ const: "notExpression"
+ order: 0
+ enum:
+ - "notExpression"
+ expression:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ - title: "filter"
+ type: "object"
+ description: "A primitive filter. In the same FilterExpression,\
+ \ all of the filter's field names need to be either all metrics."
+ properties:
+ filter_type:
+ type: "string"
+ const: "filter"
+ order: 0
+ enum:
+ - "filter"
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "name"
+ - "dimensions"
+ - "metrics"
window_in_days:
type: "integer"
title: "Data Request Interval (Days)"
@@ -39219,7 +42179,7 @@ components:
minimum: 1
maximum: 364
default: 1
- order: 4
+ order: 5
sourceType:
title: "google-analytics-data-api"
const: "google-analytics-data-api"
@@ -39231,8 +42191,7 @@ components:
title: "Google Analytics (Data API) Spec"
type: "object"
required:
- - "property_id"
- - "date_ranges_start_date"
+ - "property_ids"
properties:
credentials:
order: 0
@@ -39300,20 +42259,21 @@ components:
\ \"private_key_id\": YOUR_PRIVATE_KEY, ... }"
airbyte_secret: true
order: 1
- property_id:
- type: "string"
- title: "Property ID"
- description: "The Property ID is a unique number assigned to each property\
- \ in Google Analytics, found in your GA4 property URL. This ID allows\
- \ the connector to track the specific events associated with your property.\
- \ Refer to the Google\
+ property_ids:
+ title: "Property IDs"
+ description: "A list of your Property IDs. The Property ID is a unique number\
+ \ assigned to each property in Google Analytics, found in your GA4 property\
+ \ URL. This ID allows the connector to track the specific events associated\
+ \ with your property. Refer to the Google\
\ Analytics documentation to locate your property ID."
- pattern: "^[0-9]*$"
- pattern_descriptor: "123..."
- examples:
- - "1738294"
- - "5729978930"
order: 1
+ type: "array"
+ items:
+ type: "string"
+ pattern: "^[0-9]*$"
+ examples:
+ - - "1738294"
+ - "5729978930"
date_ranges_start_date:
type: "string"
title: "Start Date"
@@ -39326,14 +42286,1763 @@ components:
examples:
- "2021-01-01"
order: 2
- custom_reports:
- order: 3
- type: "string"
+ custom_reports_array:
title: "Custom Reports"
- description: "A JSON array describing the custom reports you want to sync\
- \ from Google Analytics. See the documentation for more information about the exact format you\
- \ can use to fill out this field."
+ description: "You can add your Custom Analytics report by creating one."
+ order: 4
+ type: "array"
+ items:
+ title: "Custom Report Config"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name of the custom report, this name would be used\
+ \ as stream name."
+ type: "string"
+ order: 0
+ dimensions:
+ title: "Dimensions"
+ description: "A list of dimensions."
+ type: "array"
+ items:
+ type: "string"
+ minItems: 1
+ order: 1
+ metrics:
+ title: "Metrics"
+ description: "A list of metrics."
+ type: "array"
+ items:
+ type: "string"
+ minItems: 1
+ order: 2
+ dimensionFilter:
+ title: "Dimensions filter"
+ description: "Dimensions filter"
+ type: "object"
+ order: 3
+ oneOf:
+ - title: "andGroup"
+ description: "The FilterExpressions in andGroup have an AND relationship."
+ type: "object"
+ properties:
+ filter_type:
+ type: "string"
+ const: "andGroup"
+ order: 0
+ enum:
+ - "andGroup"
+ expressions:
+ title: "Expressions"
+ type: "array"
+ order: 1
+ items:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "filter_type"
+ - "expressions"
+ - title: "orGroup"
+ type: "object"
+ description: "The FilterExpressions in orGroup have an OR relationship."
+ properties:
+ filter_type:
+ type: "string"
+ const: "orGroup"
+ order: 0
+ enum:
+ - "orGroup"
+ expressions:
+ title: "Expressions"
+ type: "array"
+ order: 1
+ items:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "filter_type"
+ - "expressions"
+ - title: "notExpression"
+ type: "object"
+ description: "The FilterExpression is NOT of notExpression."
+ properties:
+ filter_type:
+ type: "string"
+ const: "notExpression"
+ order: 0
+ enum:
+ - "notExpression"
+ expression:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ - title: "filter"
+ type: "object"
+ description: "A primitive filter. In the same FilterExpression,\
+ \ all of the filter's field names need to be either all dimensions."
+ properties:
+ filter_type:
+ type: "string"
+ const: "filter"
+ order: 0
+ enum:
+ - "filter"
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ metricFilter:
+ title: "Metrics filter"
+ description: "Metrics filter"
+ type: "object"
+ order: 4
+ oneOf:
+ - title: "andGroup"
+ description: "The FilterExpressions in andGroup have an AND relationship."
+ type: "object"
+ properties:
+ filter_type:
+ type: "string"
+ const: "andGroup"
+ order: 0
+ enum:
+ - "andGroup"
+ expressions:
+ title: "Expressions"
+ type: "array"
+ order: 1
+ items:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "filter_type"
+ - "expressions"
+ - title: "orGroup"
+ type: "object"
+ description: "The FilterExpressions in orGroup have an OR relationship."
+ properties:
+ filter_type:
+ type: "string"
+ const: "orGroup"
+ order: 0
+ enum:
+ - "orGroup"
+ expressions:
+ title: "Expressions"
+ type: "array"
+ order: 1
+ items:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "filter_type"
+ - "expressions"
+ - title: "notExpression"
+ type: "object"
+ description: "The FilterExpression is NOT of notExpression."
+ properties:
+ filter_type:
+ type: "string"
+ const: "notExpression"
+ order: 0
+ enum:
+ - "notExpression"
+ expression:
+ title: "Expression"
+ type: "object"
+ properties:
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ - title: "filter"
+ type: "object"
+ description: "A primitive filter. In the same FilterExpression,\
+ \ all of the filter's field names need to be either all metrics."
+ properties:
+ filter_type:
+ type: "string"
+ const: "filter"
+ order: 0
+ enum:
+ - "filter"
+ field_name:
+ title: "fieldName"
+ type: "string"
+ order: 1
+ filter:
+ title: "filter"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "stringFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "stringFilter"
+ enum:
+ - "stringFilter"
+ matchType:
+ title: "matchType"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "MATCH_TYPE_UNSPECIFIED"
+ - "EXACT"
+ - "BEGINS_WITH"
+ - "ENDS_WITH"
+ - "CONTAINS"
+ - "FULL_REGEXP"
+ - "PARTIAL_REGEXP"
+ value:
+ tittle: "value"
+ type: "string"
+ order: 0
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 2
+ required:
+ - "filter_name"
+ - "value"
+ - title: "inListFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "inListFilter"
+ enum:
+ - "inListFilter"
+ values:
+ tittle: "values"
+ type: "array"
+ minItems: 1
+ order: 0
+ items:
+ type: "string"
+ caseSensitive:
+ tittle: "caseSensitive"
+ type: "boolean"
+ order: 1
+ required:
+ - "filter_name"
+ - "values"
+ - title: "numericFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "numericFilter"
+ enum:
+ - "numericFilter"
+ operation:
+ title: "operation"
+ type: "array"
+ order: 1
+ items:
+ title: "ValidEnums"
+ enum:
+ - "OPERATION_UNSPECIFIED"
+ - "EQUAL"
+ - "LESS_THAN"
+ - "LESS_THAN_OR_EQUAL"
+ - "GREATER_THAN"
+ - "GREATER_THAN_OR_EQUAL"
+ value:
+ tittle: "value"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "operation"
+ - "value"
+ - title: "betweenFilter"
+ type: "object"
+ properties:
+ filter_name:
+ type: "string"
+ const: "betweenFilter"
+ enum:
+ - "betweenFilter"
+ fromValue:
+ tittle: "fromValue"
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ toValue:
+ tittle: "toValue"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "int64Value"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "int64Value"
+ enum:
+ - "int64Value"
+ value:
+ type: "string"
+ required:
+ - "value_type"
+ - "value"
+ - title: "doubleValue"
+ type: "object"
+ properties:
+ value_type:
+ type: "string"
+ const: "doubleValue"
+ enum:
+ - "doubleValue"
+ value:
+ type: "number"
+ required:
+ - "value_type"
+ - "value"
+ required:
+ - "filter_name"
+ - "fromValue"
+ - "toValue"
+ required:
+ - "field_name"
+ - "filter"
+ required:
+ - "name"
+ - "dimensions"
+ - "metrics"
window_in_days:
type: "integer"
title: "Data Request Interval (Days)"
@@ -39355,7 +44064,7 @@ components:
minimum: 1
maximum: 364
default: 1
- order: 4
+ order: 5
source-mailgun:
title: "Source Mailgun Spec"
type: "object"
@@ -39368,6 +44077,7 @@ components:
airbyte_secret: true
description: "Primary account API key to access your Mailgun data."
title: "Private API Key"
+ x-speakeasy-param-sensitive: true
domain_region:
type: "string"
description: "Domain region code. 'EU' or 'US' are possible values. The\
@@ -39442,6 +44152,7 @@ components:
\ href=\"https://developers.intercom.com/building-apps/docs/authentication-types#how-to-get-your-access-token\"\
>Intercom docs for more information."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
client_id:
title: "Client Id"
type: "string"
@@ -39537,6 +44248,7 @@ components:
description: "Your API Access Key. See here. The key is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "secoda"
const: "secoda"
@@ -39567,6 +44279,7 @@ components:
type: "string"
description: "JWT Token"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "zoom"
const: "zoom"
@@ -39598,6 +44311,7 @@ components:
description: "A Delighted API key."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
since:
title: "Replication Start Date"
type: "string"
@@ -39674,6 +44388,7 @@ components:
description: "A string which is associated with your Merchant ID and is\
\ used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "klarna"
const: "klarna"
@@ -39717,7 +44432,6 @@ components:
\ used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)"
airbyte_secret: true
source-typeform:
- title: "Source Typeform Spec"
type: "object"
required:
- "credentials"
@@ -39754,14 +44468,17 @@ components:
type: "string"
description: "Access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
token_expiry_date:
type: "string"
description: "The date-time when the access token should be refreshed."
format: "date-time"
+ x-speakeasy-param-sensitive: true
refresh_token:
type: "string"
description: "The key to refresh the expired access_token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Private Token"
type: "object"
required:
@@ -39778,6 +44495,7 @@ components:
description: "Log into your Typeform account and then generate a personal\
\ Access Token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start Date"
@@ -39809,7 +44527,6 @@ components:
order: 0
type: "string"
source-typeform-update:
- title: "Source Typeform Spec"
type: "object"
required:
- "credentials"
@@ -39905,6 +44622,7 @@ components:
description: "API Key that is generated when you authenticate to Dremio\
\ API"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
base_url:
type: "string"
description: "URL of your Dremio instance"
@@ -39933,7 +44651,6 @@ components:
description: "URL of your Dremio instance"
default: "https://app.dremio.cloud"
source-paypal-transaction:
- title: "Paypal Transaction Search"
type: "object"
required:
- "client_id"
@@ -39947,32 +44664,44 @@ components:
title: "Client ID"
description: "The Client ID of your Paypal developer application."
airbyte_secret: true
+ order: 0
client_secret:
type: "string"
title: "Client secret"
description: "The Client Secret of your Paypal developer application."
airbyte_secret: true
- refresh_token:
- type: "string"
- title: "Refresh token"
- description: "The key to refresh the expired access token."
- airbyte_secret: true
+ order: 1
start_date:
- type: "string"
title: "Start Date"
- description: "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\
+ description: "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\
\ present time."
+ type: "string"
examples:
- "2021-06-11T23:59:59"
- "2021-06-11T23:59:59+00:00"
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$"
format: "date-time"
+ order: 2
is_sandbox:
title: "Sandbox"
description: "Determines whether to use the sandbox or production environment."
type: "boolean"
default: false
+ refresh_token:
+ type: "string"
+ title: "Refresh token"
+ description: "The key to refresh the expired access token."
+ airbyte_secret: true
+ x-speakeasy-param-sensitive: true
+ time_window:
+ type: "integer"
+ title: "Number of days per request"
+ description: "The number of days per request. Must be a number between 1\
+ \ and 31."
+ default: 7
+ minimum: 1
+ maximum: 31
sourceType:
title: "paypal-transaction"
const: "paypal-transaction"
@@ -39981,7 +44710,6 @@ components:
order: 0
type: "string"
source-paypal-transaction-update:
- title: "Paypal Transaction Search"
type: "object"
required:
- "client_id"
@@ -39994,32 +44722,43 @@ components:
title: "Client ID"
description: "The Client ID of your Paypal developer application."
airbyte_secret: true
+ order: 0
client_secret:
type: "string"
title: "Client secret"
description: "The Client Secret of your Paypal developer application."
airbyte_secret: true
- refresh_token:
- type: "string"
- title: "Refresh token"
- description: "The key to refresh the expired access token."
- airbyte_secret: true
+ order: 1
start_date:
- type: "string"
title: "Start Date"
- description: "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\
+ description: "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\
\ present time."
+ type: "string"
examples:
- "2021-06-11T23:59:59"
- "2021-06-11T23:59:59+00:00"
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$"
format: "date-time"
+ order: 2
is_sandbox:
title: "Sandbox"
description: "Determines whether to use the sandbox or production environment."
type: "boolean"
default: false
+ refresh_token:
+ type: "string"
+ title: "Refresh token"
+ description: "The key to refresh the expired access token."
+ airbyte_secret: true
+ time_window:
+ type: "integer"
+ title: "Number of days per request"
+ description: "The number of days per request. Must be a number between 1\
+ \ and 31."
+ default: 7
+ minimum: 1
+ maximum: 31
source-lemlist:
title: "Lemlist Spec"
type: "object"
@@ -40032,6 +44771,7 @@ components:
title": "API key"
description: "Lemlist API key,"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "lemlist"
const: "lemlist"
@@ -40065,6 +44805,7 @@ components:
\ goto https://www.pexels.com/api/documentation and create account for\
\ free."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
query:
title: "Specific query for the search"
type: "string"
@@ -40185,6 +44926,7 @@ components:
type: "string"
description: "API key provided by Glassfrog"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "glassfrog"
const: "glassfrog"
@@ -40207,11 +44949,11 @@ components:
type: "object"
properties:
account_id:
- title: "Account ID"
+ title: "Ad Account ID"
description: "The Facebook Ad account ID to use when pulling data from the\
- \ Facebook Marketing API. Open your Meta Ads Manager. The Ad account ID\
- \ number is in the account dropdown menu or in your browser's address\
- \ bar. See the Meta Ads Manager. See the docs for more information."
order: 0
pattern: "^[0-9]+$"
@@ -40219,12 +44961,24 @@ components:
examples:
- "111111111111111"
type: "string"
+ access_token:
+ title: "Access Token"
+ description: "The value of the generated access token. From your App’s Dashboard,\
+ \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\
+ \ ads_read, read_insights, business_management. Then click on \"Get\
+ \ token\". See the docs for more information."
+ order: 1
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start Date"
description: "The date from which you'd like to replicate data for all incremental\
- \ streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after\
- \ this date will be replicated."
- order: 1
+ \ streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data\
+ \ will be replicated for usual streams and only last 2 years for insight\
+ \ streams."
+ order: 2
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-25T00:00:00Z"
@@ -40236,22 +44990,12 @@ components:
\ incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated\
\ between the start date and this end date will be replicated. Not setting\
\ this option will result in always syncing the latest data."
- order: 2
+ order: 3
pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-26T00:00:00Z"
type: "string"
format: "date-time"
- access_token:
- title: "Access Token"
- description: "The value of the generated access token. From your App’s Dashboard,\
- \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\
- \ ads_read, read_insights, business_management. Then click on \"Get\
- \ token\". See the docs for more information."
- order: 3
- airbyte_secret: true
- type: "string"
include_deleted:
title: "Include Deleted Campaigns, Ads, and AdSets"
description: "Set to active if you want to include data from deleted Campaigns,\
@@ -40574,16 +45318,6 @@ components:
mininum: 1
exclusiveMinimum: 0
type: "integer"
- max_batch_size:
- title: "Maximum size of Batched Requests"
- description: "Maximum batch size used when sending batch requests to Facebook\
- \ API. Most users do not need to set this field unless they specifically\
- \ need to tune the connector to address specific issues or use cases."
- default: 50
- exclusiveMinimum: 0
- maximum: 50
- order: 9
- type: "integer"
action_breakdowns_allow_empty:
title: "Action Breakdowns Allow Empty"
description: "Allows action_breakdowns to be an empty list"
@@ -40611,7 +45345,6 @@ components:
type: "string"
required:
- "account_id"
- - "start_date"
- "access_token"
- "sourceType"
source-facebook-marketing-update:
@@ -40619,11 +45352,11 @@ components:
type: "object"
properties:
account_id:
- title: "Account ID"
+ title: "Ad Account ID"
description: "The Facebook Ad account ID to use when pulling data from the\
- \ Facebook Marketing API. Open your Meta Ads Manager. The Ad account ID\
- \ number is in the account dropdown menu or in your browser's address\
- \ bar. See the Meta Ads Manager. See the docs for more information."
order: 0
pattern: "^[0-9]+$"
@@ -40631,12 +45364,23 @@ components:
examples:
- "111111111111111"
type: "string"
+ access_token:
+ title: "Access Token"
+ description: "The value of the generated access token. From your App’s Dashboard,\
+ \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\
+ \ ads_read, read_insights, business_management. Then click on \"Get\
+ \ token\". See the docs for more information."
+ order: 1
+ airbyte_secret: true
+ type: "string"
start_date:
title: "Start Date"
description: "The date from which you'd like to replicate data for all incremental\
- \ streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after\
- \ this date will be replicated."
- order: 1
+ \ streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data\
+ \ will be replicated for usual streams and only last 2 years for insight\
+ \ streams."
+ order: 2
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-25T00:00:00Z"
@@ -40648,22 +45392,12 @@ components:
\ incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated\
\ between the start date and this end date will be replicated. Not setting\
\ this option will result in always syncing the latest data."
- order: 2
+ order: 3
pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- "2017-01-26T00:00:00Z"
type: "string"
format: "date-time"
- access_token:
- title: "Access Token"
- description: "The value of the generated access token. From your App’s Dashboard,\
- \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\
- \ ads_read, read_insights, business_management. Then click on \"Get\
- \ token\". See the docs for more information."
- order: 3
- airbyte_secret: true
- type: "string"
include_deleted:
title: "Include Deleted Campaigns, Ads, and AdSets"
description: "Set to active if you want to include data from deleted Campaigns,\
@@ -40986,16 +45720,6 @@ components:
mininum: 1
exclusiveMinimum: 0
type: "integer"
- max_batch_size:
- title: "Maximum size of Batched Requests"
- description: "Maximum batch size used when sending batch requests to Facebook\
- \ API. Most users do not need to set this field unless they specifically\
- \ need to tune the connector to address specific issues or use cases."
- default: 50
- exclusiveMinimum: 0
- maximum: 50
- order: 9
- type: "integer"
action_breakdowns_allow_empty:
title: "Action Breakdowns Allow Empty"
description: "Allows action_breakdowns to be an empty list"
@@ -41016,7 +45740,6 @@ components:
type: "string"
required:
- "account_id"
- - "start_date"
- "access_token"
source-facebook-pages:
title: "Facebook Pages Spec"
@@ -41032,6 +45755,7 @@ components:
description: "Facebook Page Access Token"
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
page_id:
type: "string"
title: "Page ID"
@@ -41076,6 +45800,7 @@ components:
description: "Recruitee API Key. See here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
company_id:
title: "Company ID"
type: "integer"
@@ -41121,6 +45846,7 @@ components:
>here. The key is case sensitive."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
survey_id:
type: "array"
description: "A List of your survey ids for survey-specific stream"
@@ -41233,6 +45959,7 @@ components:
>docs for more information on how to obtain this key."
order: 1
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
storage_endpoint_suffix:
title: "Endpoint Suffix"
type: "string"
@@ -41337,6 +46064,7 @@ components:
description: "Access Token for making authenticated requests. See the\
\ docs for information on how to generate this key."
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start Date"
order: 3
@@ -41448,6 +46176,7 @@ components:
description: "PersistIq API Key. See the docs for more information on where to find that key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "persistiq"
const: "persistiq"
@@ -41485,6 +46214,7 @@ components:
description: "Basic auth password. See here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "configcat"
const: "configcat"
@@ -41525,6 +46255,7 @@ components:
title: "API Token"
description: "Your Insightly API token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
type:
- "string"
@@ -41568,262 +46299,434 @@ components:
examples:
- "2021-03-01T00:00:00Z"
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
- source-oracle:
- title: "Oracle Source Spec"
+ source-cart:
+ title: "Cart.com Spec"
type: "object"
required:
- - "host"
- - "port"
- - "username"
- - "encryption"
+ - "start_date"
- "sourceType"
properties:
- host:
- title: "Host"
- description: "Hostname of the database."
- type: "string"
- order: 1
- port:
- title: "Port"
- description: "Port of the database.\nOracle Corporations recommends the\
- \ following port numbers:\n1521 - Default listening port for client connections\
- \ to the listener. \n2484 - Recommended and officially registered listening\
- \ port for client connections to the listener using TCP/IP with SSL"
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 1521
- order: 2
- connection_data:
- title: "Connect by"
+ credentials:
+ title: "Authorization Method"
+ description: ""
type: "object"
- description: "Connect data that will be used for DB connection"
- order: 3
oneOf:
- - title: "Service name"
- description: "Use service name"
+ - title: "Central API Router"
+ type: "object"
+ order: 0
required:
- - "service_name"
+ - "auth_type"
+ - "user_name"
+ - "user_secret"
+ - "site_id"
properties:
- connection_type:
+ auth_type:
type: "string"
- const: "service_name"
- default: "service_name"
+ const: "CENTRAL_API_ROUTER"
order: 0
enum:
- - "service_name"
- service_name:
- title: "Service name"
+ - "CENTRAL_API_ROUTER"
+ user_name:
type: "string"
+ title: "User Name"
+ description: "Enter your application's User Name"
+ airbyte_secret: true
order: 1
- - title: "System ID (SID)"
- description: "Use SID (Oracle System Identifier)"
+ user_secret:
+ type: "string"
+ title: "User Secret"
+ description: "Enter your application's User Secret"
+ airbyte_secret: true
+ order: 2
+ site_id:
+ type: "string"
+ title: "Site ID"
+ description: "You can determine a site provisioning site Id by hitting\
+ \ https://site.com/store/sitemonitor.aspx and reading the response\
+ \ param PSID"
+ airbyte_secret: true
+ order: 3
+ - title: "Single Store Access Token"
+ type: "object"
+ order: 1
required:
- - "sid"
+ - "auth_type"
+ - "access_token"
+ - "store_name"
properties:
- connection_type:
+ auth_type:
type: "string"
- const: "sid"
- default: "sid"
+ const: "SINGLE_STORE_ACCESS_TOKEN"
order: 0
enum:
- - "sid"
- sid:
- title: "System ID (SID)"
+ - "SINGLE_STORE_ACCESS_TOKEN"
+ access_token:
type: "string"
+ title: "Access Token"
+ airbyte_secret: true
order: 1
- username:
- title: "User"
- description: "The username which is used to access the database."
- type: "string"
- order: 4
- password:
- title: "Password"
- description: "The password associated with the username."
+ description: "Access Token for making authenticated requests."
+ x-speakeasy-param-sensitive: true
+ store_name:
+ type: "string"
+ title: "Store Name"
+ order: 2
+ description: "The name of Cart.com Online Store. All API URLs start\
+ \ with https://[mystorename.com]/api/v1/, where [mystorename.com]\
+ \ is the domain name of your store."
+ start_date:
+ title: "Start Date"
type: "string"
- airbyte_secret: true
- order: 5
- schemas:
- title: "Schemas"
- description: "The list of schemas to sync from. Defaults to user. Case sensitive."
- type: "array"
- items:
- type: "string"
- minItems: 1
- uniqueItems: true
- order: 6
- jdbc_url_params:
- title: "JDBC URL Params"
- description: "Additional properties to pass to the JDBC URL string when\
- \ connecting to the database formatted as 'key=value' pairs separated\
- \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
+ description: "The date from which you'd like to replicate the data"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-01-01T00:00:00Z"
+ sourceType:
+ title: "cart"
+ const: "cart"
+ enum:
+ - "cart"
+ order: 0
type: "string"
- order: 7
- encryption:
- title: "Encryption"
- type: "object"
- description: "The encryption method with is used when communicating with\
- \ the database."
- order: 8
- oneOf:
- - title: "Native Network Encryption (NNE)"
- description: "The native network encryption gives you the ability to encrypt\
- \ database connections, without the configuration overhead of TCP/IP\
- \ and SSL/TLS and without the need to open and listen on different ports."
- required:
- - "encryption_method"
- properties:
- encryption_method:
- type: "string"
- const: "client_nne"
- enum:
- - "client_nne"
- default: "client_nne"
- encryption_algorithm:
- type: "string"
- description: "This parameter defines what encryption algorithm is\
- \ used."
- title: "Encryption Algorithm"
- default: "AES256"
- enum:
- - "AES256"
- - "RC4_56"
- - "3DES168"
- - title: "TLS Encrypted (verify certificate)"
- description: "Verify and use the certificate provided by the server."
- required:
- - "encryption_method"
- - "ssl_certificate"
- properties:
- encryption_method:
- type: "string"
- const: "encrypted_verify_certificate"
- enum:
- - "encrypted_verify_certificate"
- default: "encrypted_verify_certificate"
- ssl_certificate:
- title: "SSL PEM File"
- description: "Privacy Enhanced Mail (PEM) files are concatenated certificate\
- \ containers frequently used in certificate installations."
- type: "string"
- airbyte_secret: true
- multiline: true
- order: 4
- tunnel_method:
+ source-cart-update:
+ title: "Cart.com Spec"
+ type: "object"
+ required:
+ - "start_date"
+ properties:
+ credentials:
+ title: "Authorization Method"
+ description: ""
type: "object"
- title: "SSH Tunnel Method"
- description: "Whether to initiate an SSH tunnel before connecting to the\
- \ database, and if so, which kind of authentication to use."
oneOf:
- - title: "No Tunnel"
- required:
- - "tunnel_method"
- properties:
- tunnel_method:
- description: "No ssh tunnel needed to connect to database"
- type: "string"
- const: "NO_TUNNEL"
- order: 0
- enum:
- - "NO_TUNNEL"
- - title: "SSH Key Authentication"
+ - title: "Central API Router"
+ type: "object"
+ order: 0
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "ssh_key"
+ - "auth_type"
+ - "user_name"
+ - "user_secret"
+ - "site_id"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and ssh key"
+ auth_type:
type: "string"
- const: "SSH_KEY_AUTH"
+ const: "CENTRAL_API_ROUTER"
order: 0
enum:
- - "SSH_KEY_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "CENTRAL_API_ROUTER"
+ user_name:
type: "string"
+ title: "User Name"
+ description: "Enter your application's User Name"
+ airbyte_secret: true
order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
- examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host."
+ user_secret:
type: "string"
- order: 3
- ssh_key:
- title: "SSH Private Key"
- description: "OS-level user account ssh key credentials in RSA PEM\
- \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ title: "User Secret"
+ description: "Enter your application's User Secret"
+ airbyte_secret: true
+ order: 2
+ site_id:
type: "string"
+ title: "Site ID"
+ description: "You can determine a site provisioning site Id by hitting\
+ \ https://site.com/store/sitemonitor.aspx and reading the response\
+ \ param PSID"
airbyte_secret: true
- multiline: true
- order: 4
- - title: "Password Authentication"
+ order: 3
+ - title: "Single Store Access Token"
+ type: "object"
+ order: 1
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "tunnel_user_password"
+ - "auth_type"
+ - "access_token"
+ - "store_name"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and password authentication"
+ auth_type:
type: "string"
- const: "SSH_PASSWORD_AUTH"
+ const: "SINGLE_STORE_ACCESS_TOKEN"
order: 0
enum:
- - "SSH_PASSWORD_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "SINGLE_STORE_ACCESS_TOKEN"
+ access_token:
type: "string"
+ title: "Access Token"
+ airbyte_secret: true
order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
- examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host"
- type: "string"
- order: 3
- tunnel_user_password:
- title: "Password"
- description: "OS-level password for logging into the jump server host"
+ description: "Access Token for making authenticated requests."
+ store_name:
type: "string"
- airbyte_secret: true
- order: 4
- sourceType:
- title: "oracle"
- const: "oracle"
- enum:
- - "oracle"
- order: 0
+ title: "Store Name"
+ order: 2
+ description: "The name of Cart.com Online Store. All API URLs start\
+ \ with https://[mystorename.com]/api/v1/, where [mystorename.com]\
+ \ is the domain name of your store."
+ start_date:
+ title: "Start Date"
type: "string"
- source-oracle-update:
+ description: "The date from which you'd like to replicate the data"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ examples:
+ - "2021-01-01T00:00:00Z"
+ source-oracle:
+ title: "Oracle Source Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "encryption"
+ - "sourceType"
+ properties:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
+ type: "string"
+ order: 1
+ port:
+ title: "Port"
+ description: "Port of the database.\nOracle Corporations recommends the\
+ \ following port numbers:\n1521 - Default listening port for client connections\
+ \ to the listener. \n2484 - Recommended and officially registered listening\
+ \ port for client connections to the listener using TCP/IP with SSL"
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 1521
+ order: 2
+ connection_data:
+ title: "Connect by"
+ type: "object"
+ description: "Connect data that will be used for DB connection"
+ order: 3
+ oneOf:
+ - title: "Service name"
+ description: "Use service name"
+ required:
+ - "service_name"
+ properties:
+ connection_type:
+ type: "string"
+ const: "service_name"
+ default: "service_name"
+ order: 0
+ enum:
+ - "service_name"
+ service_name:
+ title: "Service name"
+ type: "string"
+ order: 1
+ - title: "System ID (SID)"
+ description: "Use SID (Oracle System Identifier)"
+ required:
+ - "sid"
+ properties:
+ connection_type:
+ type: "string"
+ const: "sid"
+ default: "sid"
+ order: 0
+ enum:
+ - "sid"
+ sid:
+ title: "System ID (SID)"
+ type: "string"
+ order: 1
+ username:
+ title: "User"
+ description: "The username which is used to access the database."
+ type: "string"
+ order: 4
+ password:
+ title: "Password"
+ description: "The password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 5
+ x-speakeasy-param-sensitive: true
+ schemas:
+ title: "Schemas"
+ description: "The list of schemas to sync from. Defaults to user. Case sensitive."
+ type: "array"
+ items:
+ type: "string"
+ minItems: 1
+ uniqueItems: true
+ order: 6
+ jdbc_url_params:
+ title: "JDBC URL Params"
+ description: "Additional properties to pass to the JDBC URL string when\
+ \ connecting to the database formatted as 'key=value' pairs separated\
+ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
+ type: "string"
+ order: 7
+ encryption:
+ title: "Encryption"
+ type: "object"
+ description: "The encryption method with is used when communicating with\
+ \ the database."
+ order: 8
+ oneOf:
+ - title: "Native Network Encryption (NNE)"
+ description: "The native network encryption gives you the ability to encrypt\
+ \ database connections, without the configuration overhead of TCP/IP\
+ \ and SSL/TLS and without the need to open and listen on different ports."
+ required:
+ - "encryption_method"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "client_nne"
+ enum:
+ - "client_nne"
+ default: "client_nne"
+ encryption_algorithm:
+ type: "string"
+ description: "This parameter defines what encryption algorithm is\
+ \ used."
+ title: "Encryption Algorithm"
+ default: "AES256"
+ enum:
+ - "AES256"
+ - "RC4_56"
+ - "3DES168"
+ - title: "TLS Encrypted (verify certificate)"
+ description: "Verify and use the certificate provided by the server."
+ required:
+ - "encryption_method"
+ - "ssl_certificate"
+ properties:
+ encryption_method:
+ type: "string"
+ const: "encrypted_verify_certificate"
+ enum:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ ssl_certificate:
+ title: "SSL PEM File"
+ description: "Privacy Enhanced Mail (PEM) files are concatenated certificate\
+ \ containers frequently used in certificate installations."
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ enum:
+ - "NO_TUNNEL"
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ enum:
+ - "SSH_KEY_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ x-speakeasy-param-sensitive: true
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ enum:
+ - "SSH_PASSWORD_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ x-speakeasy-param-sensitive: true
+ sourceType:
+ title: "oracle"
+ const: "oracle"
+ enum:
+ - "oracle"
+ order: 0
+ type: "string"
+ source-oracle-update:
title: "Oracle Source Spec"
type: "object"
required:
@@ -42106,7 +47009,6 @@ components:
required:
- "api_key"
- "start_date"
- - "interval"
- "sourceType"
properties:
api_key:
@@ -42116,6 +47018,7 @@ components:
> the docs for info on how to obtain this."
airbyte_secret: true
order: 0
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start date"
@@ -42126,18 +47029,6 @@ components:
- "2017-01-25T00:00:00Z"
order: 1
format: "date-time"
- interval:
- type: "string"
- title: "Interval"
- description: "Some APIs such as Metrics require intervals to cluster data."
- enum:
- - "day"
- - "week"
- - "month"
- - "quarter"
- default: "month"
- order: 2
sourceType:
title: "chartmogul"
const: "chartmogul"
@@ -42151,7 +47042,6 @@ components:
required:
- "api_key"
- "start_date"
- - "interval"
properties:
api_key:
type: "string"
@@ -42170,18 +47060,6 @@ components:
- "2017-01-25T00:00:00Z"
order: 1
format: "date-time"
- interval:
- type: "string"
- title: "Interval"
- description: "Some APIs such as Metrics require intervals to cluster data."
- enum:
- - "day"
- - "week"
- - "month"
- - "quarter"
- default: "month"
- order: 2
source-coinmarketcap:
title: "Coinmarketcap Spec"
type: "object"
@@ -42196,6 +47074,7 @@ components:
description: "Your API Key. See here. The token is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
data_type:
title: "Data type"
type: "string"
@@ -42257,7 +47136,6 @@ components:
- "AVAX"
- "BTC"
source-dixa:
- title: "Dixa Spec"
type: "object"
required:
- "api_token"
@@ -42268,12 +47146,8 @@ components:
type: "string"
description: "Dixa API token"
airbyte_secret: true
- start_date:
- type: "string"
- description: "The connector pulls records updated from this date onwards."
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
- examples:
- - "YYYY-MM-DD"
+ order: 1
+ x-speakeasy-param-sensitive: true
batch_size:
type: "integer"
description: "Number of days to batch into one request. Max 31."
@@ -42282,6 +47156,15 @@ components:
- 1
- 31
default: 31
+ order: 2
+ start_date:
+ type: "string"
+ title: "Start date"
+ format: "date-time"
+ description: "The connector pulls records updated from this date onwards."
+ examples:
+ - "YYYY-MM-DD"
+ order: 3
sourceType:
title: "dixa"
const: "dixa"
@@ -42290,7 +47173,6 @@ components:
order: 0
type: "string"
source-dixa-update:
- title: "Dixa Spec"
type: "object"
required:
- "api_token"
@@ -42300,12 +47182,7 @@ components:
type: "string"
description: "Dixa API token"
airbyte_secret: true
- start_date:
- type: "string"
- description: "The connector pulls records updated from this date onwards."
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
- examples:
- - "YYYY-MM-DD"
+ order: 1
batch_size:
type: "integer"
description: "Number of days to batch into one request. Max 31."
@@ -42314,13 +47191,21 @@ components:
- 1
- 31
default: 31
+ order: 2
+ start_date:
+ type: "string"
+ title: "Start date"
+ format: "date-time"
+ description: "The connector pulls records updated from this date onwards."
+ examples:
+ - "YYYY-MM-DD"
+ order: 3
source-freshcaller:
title: "Freshcaller Spec"
type: "object"
required:
- "domain"
- "api_key"
- - "start_date"
- "sourceType"
properties:
domain:
@@ -42335,6 +47220,7 @@ components:
description: "Freshcaller API Key. See the docs for more information on how to obtain this key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
requests_per_minute:
title: "Requests per minute"
type: "integer"
@@ -42368,7 +47254,6 @@ components:
required:
- "domain"
- "api_key"
- - "start_date"
properties:
domain:
type: "string"
@@ -42427,6 +47312,7 @@ components:
https://docs.airbyte.com/integrations/sources/recharge\">docs for\
\ more information."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "recharge"
const: "recharge"
@@ -42471,6 +47357,7 @@ components:
description: "API Key"
title: "API Bearer Token"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
url:
type: "string"
description: "URL"
@@ -42518,6 +47405,7 @@ components:
type: "string"
description: "API Key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "datascope"
const: "datascope"
@@ -42712,6 +47600,7 @@ components:
type: "string"
airbyte_secret: true
order: 2
+ x-speakeasy-param-sensitive: true
session_token:
type: "string"
description: "To generate your session token, you need to run the following\
@@ -42722,6 +47611,7 @@ components:
\ by default, sessions are good for 14 days and needs to be regenerated."
airbyte_secret: true
order: 3
+ x-speakeasy-param-sensitive: true
sourceType:
title: "metabase"
const: "metabase"
@@ -42767,7 +47657,6 @@ components:
- "developer_token"
- "client_id"
- "refresh_token"
- - "reports_start_date"
- "sourceType"
properties:
auth_method:
@@ -42803,6 +47692,7 @@ components:
description: "Refresh Token to renew the expired Access Token."
airbyte_secret: true
order: 3
+ x-speakeasy-param-sensitive: true
developer_token:
type: "string"
title: "Developer Token"
@@ -42811,26 +47701,120 @@ components:
> in the docs."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
reports_start_date:
type: "string"
title: "Reports replication start date"
format: "date"
- default: "2020-01-01"
description: "The start date from which to begin replicating report data.\
\ Any data generated before this date will not be replicated in reports.\
- \ This is a UTC date in YYYY-MM-DD format."
+ \ This is a UTC date in YYYY-MM-DD format. If not set, data from previous\
+ \ and current calendar year will be replicated."
order: 5
lookback_window:
title: "Lookback window"
description: "Also known as attribution or conversion window. How far into\
\ the past to look for records (in days). If your conversion window has\
\ an hours/minutes granularity, round it up to the number of days exceeding.\
- \ Used only for performance report streams in incremental mode."
+ \ Used only for performance report streams in incremental mode without\
+ \ specified Reports Start Date."
type: "integer"
default: 0
minimum: 0
maximum: 90
order: 6
+ custom_reports:
+ title: "Custom Reports"
+ description: "You can add your Custom Bing Ads report by creating one."
+ order: 7
+ type: "array"
+ items:
+ title: "Custom Report Config"
+ type: "object"
+ properties:
+ name:
+ title: "Report Name"
+ description: "The name of the custom report, this name would be used\
+ \ as stream name"
+ type: "string"
+ examples:
+ - "Account Performance"
+ - "AdDynamicTextPerformanceReport"
+ - "custom report"
+ reporting_object:
+ title: "Reporting Data Object"
+ description: "The name of the the object derives from the ReportRequest\
+ \ object. You can find it in Bing Ads Api docs - Reporting API -\
+ \ Reporting Data Objects."
+ type: "string"
+ enum:
+ - "AccountPerformanceReportRequest"
+ - "AdDynamicTextPerformanceReportRequest"
+ - "AdExtensionByAdReportRequest"
+ - "AdExtensionByKeywordReportRequest"
+ - "AdExtensionDetailReportRequest"
+ - "AdGroupPerformanceReportRequest"
+ - "AdPerformanceReportRequest"
+ - "AgeGenderAudienceReportRequest"
+ - "AudiencePerformanceReportRequest"
+ - "CallDetailReportRequest"
+ - "CampaignPerformanceReportRequest"
+ - "ConversionPerformanceReportRequest"
+ - "DestinationUrlPerformanceReportRequest"
+ - "DSAAutoTargetPerformanceReportRequest"
+ - "DSACategoryPerformanceReportRequest"
+ - "DSASearchQueryPerformanceReportRequest"
+ - "GeographicPerformanceReportRequest"
+ - "GoalsAndFunnelsReportRequest"
+ - "HotelDimensionPerformanceReportRequest"
+ - "HotelGroupPerformanceReportRequest"
+ - "KeywordPerformanceReportRequest"
+ - "NegativeKeywordConflictReportRequest"
+ - "ProductDimensionPerformanceReportRequest"
+ - "ProductMatchCountReportRequest"
+ - "ProductNegativeKeywordConflictReportRequest"
+ - "ProductPartitionPerformanceReportRequest"
+ - "ProductPartitionUnitPerformanceReportRequest"
+ - "ProductSearchQueryPerformanceReportRequest"
+ - "ProfessionalDemographicsAudienceReportRequest"
+ - "PublisherUsagePerformanceReportRequest"
+ - "SearchCampaignChangeHistoryReportRequest"
+ - "SearchQueryPerformanceReportRequest"
+ - "ShareOfVoiceReportRequest"
+ - "UserLocationPerformanceReportRequest"
+ report_columns:
+ title: "Columns"
+ description: "A list of available report object columns. You can find\
+ \ it in description of reporting object that you want to add to\
+ \ custom report."
+ type: "array"
+ items:
+ description: "Name of report column."
+ type: "string"
+ minItems: 1
+ report_aggregation:
+ title: "Aggregation"
+ description: "A list of available aggregations."
+ type: "string"
+ items:
+ title: "ValidEnums"
+ description: "An enumeration of aggregations."
+ enum:
+ - "Hourly"
+ - "Daily"
+ - "Weekly"
+ - "Monthly"
+ - "DayOfWeek"
+ - "HourOfDay"
+ - "WeeklyStartingMonday"
+ - "Summary"
+ default:
+ - "Hourly"
+ required:
+ - "name"
+ - "reporting_object"
+ - "report_columns"
+ - "report_aggregation"
sourceType:
title: "bing-ads"
const: "bing-ads"
@@ -42845,7 +47829,6 @@ components:
- "developer_token"
- "client_id"
- "refresh_token"
- - "reports_start_date"
properties:
auth_method:
type: "string"
@@ -42892,252 +47875,358 @@ components:
type: "string"
title: "Reports replication start date"
format: "date"
- default: "2020-01-01"
description: "The start date from which to begin replicating report data.\
\ Any data generated before this date will not be replicated in reports.\
- \ This is a UTC date in YYYY-MM-DD format."
+ \ This is a UTC date in YYYY-MM-DD format. If not set, data from previous\
+ \ and current calendar year will be replicated."
order: 5
lookback_window:
title: "Lookback window"
description: "Also known as attribution or conversion window. How far into\
\ the past to look for records (in days). If your conversion window has\
\ an hours/minutes granularity, round it up to the number of days exceeding.\
- \ Used only for performance report streams in incremental mode."
+ \ Used only for performance report streams in incremental mode without\
+ \ specified Reports Start Date."
type: "integer"
default: 0
minimum: 0
maximum: 90
order: 6
- source-e2e-test-cloud:
- title: "Cloud E2E Test Source Spec"
- type: "object"
- required:
- - "max_messages"
- - "mock_catalog"
- - "sourceType"
- properties:
- type:
- type: "string"
- const: "CONTINUOUS_FEED"
- default: "CONTINUOUS_FEED"
- order: 10
- enum:
- - "CONTINUOUS_FEED"
- max_messages:
- title: "Max Records"
- description: "Number of records to emit per stream. Min 1. Max 100 billion."
- type: "integer"
- default: 100
- min: 1
- max: 100000000000
- order: 20
- seed:
- title: "Random Seed"
- description: "When the seed is unspecified, the current time millis will\
- \ be used as the seed. Range: [0, 1000000]."
- type: "integer"
- default: 0
- examples:
- - 42
- min: 0
- max: 1000000
- order: 30
- message_interval_ms:
- title: "Message Interval (ms)"
- description: "Interval between messages in ms. Min 0 ms. Max 60000 ms (1\
- \ minute)."
- type: "integer"
- min: 0
- max: 60000
- default: 0
- order: 40
- mock_catalog:
- title: "Mock Catalog"
- type: "object"
- order: 50
- oneOf:
- - title: "Single Schema"
- description: "A catalog with one or multiple streams that share the same\
- \ schema."
- required:
- - "type"
- - "stream_name"
- - "stream_schema"
+ custom_reports:
+ title: "Custom Reports"
+ description: "You can add your Custom Bing Ads report by creating one."
+ order: 7
+ type: "array"
+ items:
+ title: "Custom Report Config"
+ type: "object"
properties:
- type:
+ name:
+ title: "Report Name"
+ description: "The name of the custom report, this name would be used\
+ \ as stream name"
type: "string"
- const: "SINGLE_STREAM"
- default: "SINGLE_STREAM"
- enum:
- - "SINGLE_STREAM"
- stream_name:
- title: "Stream Name"
- description: "Name of the data stream."
- type: "string"
- default: "data_stream"
- stream_schema:
- title: "Stream Schema"
- description: "A Json schema for the stream. The schema should be compatible\
- \ with draft-07. See this doc for examples."
- type: "string"
- default: "{ \"type\": \"object\", \"properties\": { \"column1\": {\
- \ \"type\": \"string\" } } }"
- stream_duplication:
- title: "Duplicate the stream N times"
- description: "Duplicate the stream for easy load testing. Each stream\
- \ name will have a number suffix. For example, if the stream name\
- \ is \"ds\", the duplicated streams will be \"ds_0\", \"ds_1\",\
- \ etc."
- type: "integer"
- default: 1
- min: 1
- max: 10000
- - title: "Multi Schema"
- description: "A catalog with multiple data streams, each with a different\
- \ schema."
- required:
- - "type"
- - "stream_schemas"
- properties:
- type:
+ examples:
+ - "Account Performance"
+ - "AdDynamicTextPerformanceReport"
+ - "custom report"
+ reporting_object:
+ title: "Reporting Data Object"
+ description: "The name of the the object derives from the ReportRequest\
+ \ object. You can find it in Bing Ads Api docs - Reporting API -\
+ \ Reporting Data Objects."
+ type: "string"
+ enum:
+ - "AccountPerformanceReportRequest"
+ - "AdDynamicTextPerformanceReportRequest"
+ - "AdExtensionByAdReportRequest"
+ - "AdExtensionByKeywordReportRequest"
+ - "AdExtensionDetailReportRequest"
+ - "AdGroupPerformanceReportRequest"
+ - "AdPerformanceReportRequest"
+ - "AgeGenderAudienceReportRequest"
+ - "AudiencePerformanceReportRequest"
+ - "CallDetailReportRequest"
+ - "CampaignPerformanceReportRequest"
+ - "ConversionPerformanceReportRequest"
+ - "DestinationUrlPerformanceReportRequest"
+ - "DSAAutoTargetPerformanceReportRequest"
+ - "DSACategoryPerformanceReportRequest"
+ - "DSASearchQueryPerformanceReportRequest"
+ - "GeographicPerformanceReportRequest"
+ - "GoalsAndFunnelsReportRequest"
+ - "HotelDimensionPerformanceReportRequest"
+ - "HotelGroupPerformanceReportRequest"
+ - "KeywordPerformanceReportRequest"
+ - "NegativeKeywordConflictReportRequest"
+ - "ProductDimensionPerformanceReportRequest"
+ - "ProductMatchCountReportRequest"
+ - "ProductNegativeKeywordConflictReportRequest"
+ - "ProductPartitionPerformanceReportRequest"
+ - "ProductPartitionUnitPerformanceReportRequest"
+ - "ProductSearchQueryPerformanceReportRequest"
+ - "ProfessionalDemographicsAudienceReportRequest"
+ - "PublisherUsagePerformanceReportRequest"
+ - "SearchCampaignChangeHistoryReportRequest"
+ - "SearchQueryPerformanceReportRequest"
+ - "ShareOfVoiceReportRequest"
+ - "UserLocationPerformanceReportRequest"
+ report_columns:
+ title: "Columns"
+ description: "A list of available report object columns. You can find\
+ \ it in description of reporting object that you want to add to\
+ \ custom report."
+ type: "array"
+ items:
+ description: "Name of report column."
+ type: "string"
+ minItems: 1
+ report_aggregation:
+ title: "Aggregation"
+ description: "A list of available aggregations."
type: "string"
- const: "MULTI_STREAM"
- default: "MULTI_STREAM"
- enum:
- - "MULTI_STREAM"
- stream_schemas:
- title: "Streams and Schemas"
- description: "A Json object specifying multiple data streams and their\
- \ schemas. Each key in this object is one stream name. Each value\
- \ is the schema for that stream. The schema should be compatible\
- \ with draft-07. See this doc for examples."
- type: "string"
- default: "{ \"stream1\": { \"type\": \"object\", \"properties\": {\
- \ \"field1\": { \"type\": \"string\" } } }, \"stream2\": { \"type\"\
- : \"object\", \"properties\": { \"field1\": { \"type\": \"boolean\"\
- \ } } } }"
- sourceType:
- title: "e2e-test-cloud"
- const: "e2e-test-cloud"
- enum:
- - "e2e-test-cloud"
- order: 0
- type: "string"
+ items:
+ title: "ValidEnums"
+ description: "An enumeration of aggregations."
+ enum:
+ - "Hourly"
+ - "Daily"
+ - "Weekly"
+ - "Monthly"
+ - "DayOfWeek"
+ - "HourOfDay"
+ - "WeeklyStartingMonday"
+ - "Summary"
+ default:
+ - "Hourly"
+ required:
+ - "name"
+ - "reporting_object"
+ - "report_columns"
+ - "report_aggregation"
+ source-e2e-test-cloud:
+ title: "Cloud E2E Test Source Spec"
+ type: "object"
+ oneOf:
+ - title: "Continuous Feed"
+ type: "object"
+ required:
+ - "type"
+ - "max_messages"
+ - "mock_catalog"
+ additionalProperties: true
+ properties:
+ type:
+ type: "string"
+ const: "CONTINUOUS_FEED"
+ default: "CONTINUOUS_FEED"
+ order: 10
+ enum:
+ - "CONTINUOUS_FEED"
+ max_messages:
+ title: "Max Records"
+ description: "Number of records to emit per stream. Min 1. Max 100 billion."
+ type: "integer"
+ default: 100
+ min: 1
+ max: 100000000000
+ order: 20
+ seed:
+ title: "Random Seed"
+ description: "When the seed is unspecified, the current time millis will\
+ \ be used as the seed. Range: [0, 1000000]."
+ type: "integer"
+ default: 0
+ examples:
+ - 42
+ min: 0
+ max: 1000000
+ order: 30
+ message_interval_ms:
+ title: "Message Interval (ms)"
+ description: "Interval between messages in ms. Min 0 ms. Max 60000 ms\
+ \ (1 minute)."
+ type: "integer"
+ min: 0
+ max: 60000
+ default: 0
+ order: 40
+ mock_catalog:
+ title: "Mock Catalog"
+ type: "object"
+ order: 50
+ oneOf:
+ - title: "Single Schema"
+ description: "A catalog with one or multiple streams that share the\
+ \ same schema."
+ type: "object"
+ required:
+ - "type"
+ - "stream_name"
+ - "stream_schema"
+ properties:
+ type:
+ type: "string"
+ const: "SINGLE_STREAM"
+ default: "SINGLE_STREAM"
+ enum:
+ - "SINGLE_STREAM"
+ stream_name:
+ title: "Stream Name"
+ description: "Name of the data stream."
+ type: "string"
+ default: "data_stream"
+ stream_schema:
+ title: "Stream Schema"
+ description: "A Json schema for the stream. The schema should be\
+ \ compatible with draft-07. See this doc for examples."
+ type: "string"
+ default: "{ \"type\": \"object\", \"properties\": { \"column1\"\
+ : { \"type\": \"string\" } } }"
+ stream_duplication:
+ title: "Duplicate the stream N times"
+ description: "Duplicate the stream for easy load testing. Each stream\
+ \ name will have a number suffix. For example, if the stream name\
+ \ is \"ds\", the duplicated streams will be \"ds_0\", \"ds_1\"\
+ , etc."
+ type: "integer"
+ default: 1
+ min: 1
+ max: 10000
+ - title: "Multi Schema"
+ type: "object"
+ description: "A catalog with multiple data streams, each with a different\
+ \ schema."
+ required:
+ - "type"
+ - "stream_schemas"
+ properties:
+ type:
+ type: "string"
+ const: "MULTI_STREAM"
+ default: "MULTI_STREAM"
+ enum:
+ - "MULTI_STREAM"
+ stream_schemas:
+ title: "Streams and Schemas"
+ description: "A Json object specifying multiple data streams and\
+ \ their schemas. Each key in this object is one stream name. Each\
+ \ value is the schema for that stream. The schema should be compatible\
+ \ with draft-07. See this doc for examples."
+ type: "string"
+ default: "{ \"stream1\": { \"type\": \"object\", \"properties\"\
+ : { \"field1\": { \"type\": \"string\" } } }, \"stream2\": { \"\
+ type\": \"object\", \"properties\": { \"field1\": { \"type\":\
+ \ \"boolean\" } } } }"
+ sourceType:
+ title: "e2e-test-cloud"
+ const: "e2e-test-cloud"
+ enum:
+ - "e2e-test-cloud"
+ order: 0
+ type: "string"
source-e2e-test-cloud-update:
title: "Cloud E2E Test Source Spec"
type: "object"
- required:
- - "max_messages"
- - "mock_catalog"
- properties:
- type:
- type: "string"
- const: "CONTINUOUS_FEED"
- default: "CONTINUOUS_FEED"
- order: 10
- enum:
- - "CONTINUOUS_FEED"
- max_messages:
- title: "Max Records"
- description: "Number of records to emit per stream. Min 1. Max 100 billion."
- type: "integer"
- default: 100
- min: 1
- max: 100000000000
- order: 20
- seed:
- title: "Random Seed"
- description: "When the seed is unspecified, the current time millis will\
- \ be used as the seed. Range: [0, 1000000]."
- type: "integer"
- default: 0
- examples:
- - 42
- min: 0
- max: 1000000
- order: 30
- message_interval_ms:
- title: "Message Interval (ms)"
- description: "Interval between messages in ms. Min 0 ms. Max 60000 ms (1\
- \ minute)."
- type: "integer"
- min: 0
- max: 60000
- default: 0
- order: 40
- mock_catalog:
- title: "Mock Catalog"
- type: "object"
- order: 50
- oneOf:
- - title: "Single Schema"
- description: "A catalog with one or multiple streams that share the same\
- \ schema."
- required:
- - "type"
- - "stream_name"
- - "stream_schema"
- properties:
- type:
- type: "string"
- const: "SINGLE_STREAM"
- default: "SINGLE_STREAM"
- enum:
- - "SINGLE_STREAM"
- stream_name:
- title: "Stream Name"
- description: "Name of the data stream."
- type: "string"
- default: "data_stream"
- stream_schema:
- title: "Stream Schema"
- description: "A Json schema for the stream. The schema should be compatible\
- \ with draft-07. See this doc for examples."
- type: "string"
- default: "{ \"type\": \"object\", \"properties\": { \"column1\": {\
- \ \"type\": \"string\" } } }"
- stream_duplication:
- title: "Duplicate the stream N times"
- description: "Duplicate the stream for easy load testing. Each stream\
- \ name will have a number suffix. For example, if the stream name\
- \ is \"ds\", the duplicated streams will be \"ds_0\", \"ds_1\",\
- \ etc."
- type: "integer"
- default: 1
- min: 1
- max: 10000
- - title: "Multi Schema"
- description: "A catalog with multiple data streams, each with a different\
- \ schema."
- required:
- - "type"
- - "stream_schemas"
- properties:
- type:
- type: "string"
- const: "MULTI_STREAM"
- default: "MULTI_STREAM"
- enum:
- - "MULTI_STREAM"
- stream_schemas:
- title: "Streams and Schemas"
- description: "A Json object specifying multiple data streams and their\
- \ schemas. Each key in this object is one stream name. Each value\
- \ is the schema for that stream. The schema should be compatible\
- \ with draft-07. See this doc for examples."
- type: "string"
- default: "{ \"stream1\": { \"type\": \"object\", \"properties\": {\
- \ \"field1\": { \"type\": \"string\" } } }, \"stream2\": { \"type\"\
- : \"object\", \"properties\": { \"field1\": { \"type\": \"boolean\"\
- \ } } } }"
+ oneOf:
+ - title: "Continuous Feed"
+ type: "object"
+ required:
+ - "type"
+ - "max_messages"
+ - "mock_catalog"
+ additionalProperties: true
+ properties:
+ type:
+ type: "string"
+ const: "CONTINUOUS_FEED"
+ default: "CONTINUOUS_FEED"
+ order: 10
+ enum:
+ - "CONTINUOUS_FEED"
+ max_messages:
+ title: "Max Records"
+ description: "Number of records to emit per stream. Min 1. Max 100 billion."
+ type: "integer"
+ default: 100
+ min: 1
+ max: 100000000000
+ order: 20
+ seed:
+ title: "Random Seed"
+ description: "When the seed is unspecified, the current time millis will\
+ \ be used as the seed. Range: [0, 1000000]."
+ type: "integer"
+ default: 0
+ examples:
+ - 42
+ min: 0
+ max: 1000000
+ order: 30
+ message_interval_ms:
+ title: "Message Interval (ms)"
+ description: "Interval between messages in ms. Min 0 ms. Max 60000 ms\
+ \ (1 minute)."
+ type: "integer"
+ min: 0
+ max: 60000
+ default: 0
+ order: 40
+ mock_catalog:
+ title: "Mock Catalog"
+ type: "object"
+ order: 50
+ oneOf:
+ - title: "Single Schema"
+ description: "A catalog with one or multiple streams that share the\
+ \ same schema."
+ type: "object"
+ required:
+ - "type"
+ - "stream_name"
+ - "stream_schema"
+ properties:
+ type:
+ type: "string"
+ const: "SINGLE_STREAM"
+ default: "SINGLE_STREAM"
+ enum:
+ - "SINGLE_STREAM"
+ stream_name:
+ title: "Stream Name"
+ description: "Name of the data stream."
+ type: "string"
+ default: "data_stream"
+ stream_schema:
+ title: "Stream Schema"
+ description: "A Json schema for the stream. The schema should be\
+ \ compatible with draft-07. See this doc for examples."
+ type: "string"
+ default: "{ \"type\": \"object\", \"properties\": { \"column1\"\
+ : { \"type\": \"string\" } } }"
+ stream_duplication:
+ title: "Duplicate the stream N times"
+ description: "Duplicate the stream for easy load testing. Each stream\
+ \ name will have a number suffix. For example, if the stream name\
+ \ is \"ds\", the duplicated streams will be \"ds_0\", \"ds_1\"\
+ , etc."
+ type: "integer"
+ default: 1
+ min: 1
+ max: 10000
+ - title: "Multi Schema"
+ type: "object"
+ description: "A catalog with multiple data streams, each with a different\
+ \ schema."
+ required:
+ - "type"
+ - "stream_schemas"
+ properties:
+ type:
+ type: "string"
+ const: "MULTI_STREAM"
+ default: "MULTI_STREAM"
+ enum:
+ - "MULTI_STREAM"
+ stream_schemas:
+ title: "Streams and Schemas"
+ description: "A Json object specifying multiple data streams and\
+ \ their schemas. Each key in this object is one stream name. Each\
+ \ value is the schema for that stream. The schema should be compatible\
+ \ with draft-07. See this doc for examples."
+ type: "string"
+ default: "{ \"stream1\": { \"type\": \"object\", \"properties\"\
+ : { \"field1\": { \"type\": \"string\" } } }, \"stream2\": { \"\
+ type\": \"object\", \"properties\": { \"field1\": { \"type\":\
+ \ \"boolean\" } } } }"
source-monday:
title: "Monday Spec"
type: "object"
@@ -43184,6 +48273,7 @@ components:
title: "Access Token"
description: "Access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "API Token"
required:
@@ -43201,6 +48291,7 @@ components:
title: "Personal API Token"
description: "API Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "monday"
const: "monday"
@@ -43293,14 +48384,17 @@ components:
description: "Amplitude API Key. See the setup guide for more information on how to obtain this key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
secret_key:
type: "string"
title: "Secret Key"
description: "Amplitude Secret Key. See the setup guide for more information on how to obtain this key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
+ format: "date-time"
title: "Replication Start Date"
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
description: "UTC date and time in the format 2021-01-25T00:00:00Z. Any\
@@ -43353,6 +48447,7 @@ components:
airbyte_secret: true
start_date:
type: "string"
+ format: "date-time"
title: "Replication Start Date"
pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
description: "UTC date and time in the format 2021-01-25T00:00:00Z. Any\
@@ -43385,6 +48480,7 @@ components:
\ when using without API Key. Creating and using the API key therefore\
\ is recommended. The key is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
urls:
type: "array"
items:
@@ -43483,36 +48579,23 @@ components:
type: "object"
required:
- "replication_start_date"
+ - "api_token"
- "sourceType"
properties:
- authorization:
- type: "object"
- title: "API Key Authentication"
- required:
- - "auth_type"
- - "api_token"
- properties:
- auth_type:
- type: "string"
- const: "Token"
- order: 0
- enum:
- - "Token"
- api_token:
- title: "API Token"
- type: "string"
- description: "The Pipedrive API Token."
- airbyte_secret: true
+ api_token:
+ title: "API Token"
+ type: "string"
+ description: "The Pipedrive API Token."
+ airbyte_secret: true
+ x-speakeasy-param-sensitive: true
replication_start_date:
title: "Start Date"
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
\ data before this date will not be replicated. When specified and not\
\ None, then stream will behave as incremental"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- - "2017-01-25T00:00:00Z"
+ - "2017-01-25 00:00:00Z"
type: "string"
- format: "date-time"
sourceType:
title: "pipedrive"
const: "pipedrive"
@@ -43525,35 +48608,21 @@ components:
type: "object"
required:
- "replication_start_date"
+ - "api_token"
properties:
- authorization:
- type: "object"
- title: "API Key Authentication"
- required:
- - "auth_type"
- - "api_token"
- properties:
- auth_type:
- type: "string"
- const: "Token"
- order: 0
- enum:
- - "Token"
- api_token:
- title: "API Token"
- type: "string"
- description: "The Pipedrive API Token."
- airbyte_secret: true
+ api_token:
+ title: "API Token"
+ type: "string"
+ description: "The Pipedrive API Token."
+ airbyte_secret: true
replication_start_date:
title: "Start Date"
description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
\ data before this date will not be replicated. When specified and not\
\ None, then stream will behave as incremental"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
examples:
- - "2017-01-25T00:00:00Z"
+ - "2017-01-25 00:00:00Z"
type: "string"
- format: "date-time"
source-amazon-ads:
title: "Amazon Ads Spec"
type: "object"
@@ -43572,6 +48641,7 @@ components:
>docs for more information."
order: 1
type: "string"
+ airbyte_secret: true
client_secret:
title: "Client Secret"
description: "The client secret of your Amazon Ads developer application.\
@@ -43587,6 +48657,7 @@ components:
airbyte_secret: true
order: 3
type: "string"
+ x-speakeasy-param-sensitive: true
region:
title: "Region"
description: "Region to pull data from (EU/NA/FE). See docs for more information."
order: 1
type: "string"
+ airbyte_secret: true
client_secret:
title: "Client Secret"
description: "The client secret of your Amazon Ads developer application.\
@@ -43731,6 +48805,8 @@ components:
title: "Start Date"
description: "The Start date for collecting reports, should not be more\
\ than 60 days in the past. In YYYY-MM-DD format"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$"
+ format: "date"
examples:
- "2022-10-10"
- "2022-10-22"
@@ -43817,6 +48893,7 @@ components:
description: "Your API Key. See here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "sendinblue"
const: "sendinblue"
@@ -43840,8 +48917,8 @@ components:
title: "GitHub Source Spec"
type: "object"
required:
- - "start_date"
- - "repository"
+ - "credentials"
+ - "repositories"
- "sourceType"
properties:
credentials:
@@ -43867,6 +48944,7 @@ components:
title: "Access Token"
description: "OAuth access token"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
client_id:
type: "string"
title: "Client Id"
@@ -43896,20 +48974,7 @@ components:
\ across multiple API tokens, input multiple tokens separated with\
\ \",\""
airbyte_secret: true
- start_date:
- type: "string"
- title: "Start date"
- description: "The date from which you'd like to replicate data from GitHub\
- \ in the format YYYY-MM-DDT00:00:00Z. For the streams which support this\
- \ configuration, only data generated on or after the start date will be\
- \ replicated. This field doesn't apply to all streams, see the docs for more\
- \ info"
- examples:
- - "2021-03-01T00:00:00Z"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
- order: 1
- format: "date-time"
+ x-speakeasy-param-sensitive: true
repository:
type: "string"
examples:
@@ -43917,23 +48982,77 @@ components:
- "airbytehq/*"
- "airbytehq/airbyte"
title: "GitHub Repositories"
- description: "Space-delimited list of GitHub organizations/repositories,\
+ description: "(DEPRCATED) Space-delimited list of GitHub organizations/repositories,\
\ e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get\
\ all repositories from organization and `airbytehq/airbyte airbytehq/another-repo`\
\ for multiple repositories."
- order: 2
+ airbyte_hidden: true
pattern: "^([\\w.-]+/(\\*|[\\w.-]+(?docs for more info"
+ examples:
+ - "2021-03-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ"
+ order: 2
+ format: "date-time"
+ api_url:
+ type: "string"
+ examples:
+ - "https://github.com"
+ - "https://github.company.org"
+ title: "API URL"
+ default: "https://api.github.com/"
+ description: "Please enter your basic URL from self-hosted GitHub instance\
+ \ or leave it empty to use GitHub."
+ order: 3
branch:
type: "string"
title: "Branch"
examples:
- "airbytehq/airbyte/master airbytehq/airbyte/my-branch"
- description: "Space-delimited list of GitHub repository branches to pull\
- \ commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified\
- \ for a repository, the default branch will be pulled."
- order: 3
+ description: "(DEPRCATED) Space-delimited list of GitHub repository branches\
+ \ to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches\
+ \ are specified for a repository, the default branch will be pulled."
+ airbyte_hidden: true
+ pattern_descriptor: "org/repo/branch1 org/repo/branch2"
+ branches:
+ type: "array"
+ items:
+ type: "string"
+ title: "Branches"
+ examples:
+ - "airbytehq/airbyte/master airbytehq/airbyte/my-branch"
+ description: "List of GitHub repository branches to pull commits for, e.g.\
+ \ `airbytehq/airbyte/master`. If no branches are specified for a repository,\
+ \ the default branch will be pulled."
+ order: 4
pattern_descriptor: "org/repo/branch1 org/repo/branch2"
requests_per_hour:
type: "integer"
@@ -43942,7 +49061,7 @@ components:
\ (15000 for Github Enterprise). You can specify a lower value to limit\
\ your use of the API quota."
minimum: 1
- order: 4
+ order: 5
sourceType:
title: "github"
const: "github"
@@ -43954,8 +49073,8 @@ components:
title: "GitHub Source Spec"
type: "object"
required:
- - "start_date"
- - "repository"
+ - "credentials"
+ - "repositories"
properties:
credentials:
title: "Authentication"
@@ -44009,20 +49128,6 @@ components:
\ across multiple API tokens, input multiple tokens separated with\
\ \",\""
airbyte_secret: true
- start_date:
- type: "string"
- title: "Start date"
- description: "The date from which you'd like to replicate data from GitHub\
- \ in the format YYYY-MM-DDT00:00:00Z. For the streams which support this\
- \ configuration, only data generated on or after the start date will be\
- \ replicated. This field doesn't apply to all streams, see the docs for more\
- \ info"
- examples:
- - "2021-03-01T00:00:00Z"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
- order: 1
- format: "date-time"
repository:
type: "string"
examples:
@@ -44030,23 +49135,77 @@ components:
- "airbytehq/*"
- "airbytehq/airbyte"
title: "GitHub Repositories"
- description: "Space-delimited list of GitHub organizations/repositories,\
+ description: "(DEPRCATED) Space-delimited list of GitHub organizations/repositories,\
\ e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get\
\ all repositories from organization and `airbytehq/airbyte airbytehq/another-repo`\
\ for multiple repositories."
- order: 2
+ airbyte_hidden: true
pattern: "^([\\w.-]+/(\\*|[\\w.-]+(?docs for more info"
+ examples:
+ - "2021-03-01T00:00:00Z"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ"
+ order: 2
+ format: "date-time"
+ api_url:
+ type: "string"
+ examples:
+ - "https://github.com"
+ - "https://github.company.org"
+ title: "API URL"
+ default: "https://api.github.com/"
+ description: "Please enter your basic URL from self-hosted GitHub instance\
+ \ or leave it empty to use GitHub."
+ order: 3
branch:
type: "string"
title: "Branch"
examples:
- "airbytehq/airbyte/master airbytehq/airbyte/my-branch"
- description: "Space-delimited list of GitHub repository branches to pull\
- \ commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified\
- \ for a repository, the default branch will be pulled."
- order: 3
+ description: "(DEPRCATED) Space-delimited list of GitHub repository branches\
+ \ to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches\
+ \ are specified for a repository, the default branch will be pulled."
+ airbyte_hidden: true
+ pattern_descriptor: "org/repo/branch1 org/repo/branch2"
+ branches:
+ type: "array"
+ items:
+ type: "string"
+ title: "Branches"
+ examples:
+ - "airbytehq/airbyte/master airbytehq/airbyte/my-branch"
+ description: "List of GitHub repository branches to pull commits for, e.g.\
+ \ `airbytehq/airbyte/master`. If no branches are specified for a repository,\
+ \ the default branch will be pulled."
+ order: 4
pattern_descriptor: "org/repo/branch1 org/repo/branch2"
requests_per_hour:
type: "integer"
@@ -44055,7 +49214,7 @@ components:
\ (15000 for Github Enterprise). You can specify a lower value to limit\
\ your use of the API quota."
minimum: 1
- order: 4
+ order: 5
source-bigquery:
title: "BigQuery Source Spec"
type: "object"
@@ -44127,6 +49286,7 @@ components:
description: "Your API Access token. See here."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "vantage"
const: "vantage"
@@ -44166,6 +49326,7 @@ components:
title: "Password"
description: "Firebolt password."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
account:
type: "string"
title: "Account"
@@ -44307,6 +49468,7 @@ components:
title: "Refresh Token"
description: "The token for obtaining the new access token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
redirect_uri:
type: "string"
title: "Redirect URI"
@@ -44428,6 +49590,7 @@ components:
order: 5
group: "auth"
always_show: true
+ x-speakeasy-param-sensitive: true
jdbc_url_params:
description: "Additional properties to pass to the JDBC URL string when\
\ connecting to the database formatted as 'key=value' pairs separated\
@@ -44535,6 +49698,7 @@ components:
multiline: true
order: 3
always_show: true
+ x-speakeasy-param-sensitive: true
client_key_password:
type: "string"
title: "Client key password"
@@ -44542,6 +49706,7 @@ components:
\ password will be generated automatically."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "verify-full"
additionalProperties: true
description: "This is the most secure mode. Always require encryption\
@@ -44579,6 +49744,7 @@ components:
multiline: true
order: 3
always_show: true
+ x-speakeasy-param-sensitive: true
client_key_password:
type: "string"
title: "Client key password"
@@ -44586,6 +49752,7 @@ components:
\ password will be generated automatically."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
default: "require"
replication_method:
type: "object"
@@ -44757,6 +49924,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -44801,6 +49969,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
group: "security"
sourceType:
title: "alloydb"
@@ -45268,6 +50437,905 @@ components:
title: "Pokemon Name"
description: "Pokemon requested from the API."
pattern: "^[a-z0-9_\\-]+$"
+ enum:
+ - "bulbasaur"
+ - "ivysaur"
+ - "venusaur"
+ - "charmander"
+ - "charmeleon"
+ - "charizard"
+ - "squirtle"
+ - "wartortle"
+ - "blastoise"
+ - "caterpie"
+ - "metapod"
+ - "butterfree"
+ - "weedle"
+ - "kakuna"
+ - "beedrill"
+ - "pidgey"
+ - "pidgeotto"
+ - "pidgeot"
+ - "rattata"
+ - "raticate"
+ - "spearow"
+ - "fearow"
+ - "ekans"
+ - "arbok"
+ - "pikachu"
+ - "raichu"
+ - "sandshrew"
+ - "sandslash"
+ - "nidoranf"
+ - "nidorina"
+ - "nidoqueen"
+ - "nidoranm"
+ - "nidorino"
+ - "nidoking"
+ - "clefairy"
+ - "clefable"
+ - "vulpix"
+ - "ninetales"
+ - "jigglypuff"
+ - "wigglytuff"
+ - "zubat"
+ - "golbat"
+ - "oddish"
+ - "gloom"
+ - "vileplume"
+ - "paras"
+ - "parasect"
+ - "venonat"
+ - "venomoth"
+ - "diglett"
+ - "dugtrio"
+ - "meowth"
+ - "persian"
+ - "psyduck"
+ - "golduck"
+ - "mankey"
+ - "primeape"
+ - "growlithe"
+ - "arcanine"
+ - "poliwag"
+ - "poliwhirl"
+ - "poliwrath"
+ - "abra"
+ - "kadabra"
+ - "alakazam"
+ - "machop"
+ - "machoke"
+ - "machamp"
+ - "bellsprout"
+ - "weepinbell"
+ - "victreebel"
+ - "tentacool"
+ - "tentacruel"
+ - "geodude"
+ - "graveler"
+ - "golem"
+ - "ponyta"
+ - "rapidash"
+ - "slowpoke"
+ - "slowbro"
+ - "magnemite"
+ - "magneton"
+ - "farfetchd"
+ - "doduo"
+ - "dodrio"
+ - "seel"
+ - "dewgong"
+ - "grimer"
+ - "muk"
+ - "shellder"
+ - "cloyster"
+ - "gastly"
+ - "haunter"
+ - "gengar"
+ - "onix"
+ - "drowzee"
+ - "hypno"
+ - "krabby"
+ - "kingler"
+ - "voltorb"
+ - "electrode"
+ - "exeggcute"
+ - "exeggutor"
+ - "cubone"
+ - "marowak"
+ - "hitmonlee"
+ - "hitmonchan"
+ - "lickitung"
+ - "koffing"
+ - "weezing"
+ - "rhyhorn"
+ - "rhydon"
+ - "chansey"
+ - "tangela"
+ - "kangaskhan"
+ - "horsea"
+ - "seadra"
+ - "goldeen"
+ - "seaking"
+ - "staryu"
+ - "starmie"
+ - "mrmime"
+ - "scyther"
+ - "jynx"
+ - "electabuzz"
+ - "magmar"
+ - "pinsir"
+ - "tauros"
+ - "magikarp"
+ - "gyarados"
+ - "lapras"
+ - "ditto"
+ - "eevee"
+ - "vaporeon"
+ - "jolteon"
+ - "flareon"
+ - "porygon"
+ - "omanyte"
+ - "omastar"
+ - "kabuto"
+ - "kabutops"
+ - "aerodactyl"
+ - "snorlax"
+ - "articuno"
+ - "zapdos"
+ - "moltres"
+ - "dratini"
+ - "dragonair"
+ - "dragonite"
+ - "mewtwo"
+ - "mew"
+ - "chikorita"
+ - "bayleef"
+ - "meganium"
+ - "cyndaquil"
+ - "quilava"
+ - "typhlosion"
+ - "totodile"
+ - "croconaw"
+ - "feraligatr"
+ - "sentret"
+ - "furret"
+ - "hoothoot"
+ - "noctowl"
+ - "ledyba"
+ - "ledian"
+ - "spinarak"
+ - "ariados"
+ - "crobat"
+ - "chinchou"
+ - "lanturn"
+ - "pichu"
+ - "cleffa"
+ - "igglybuff"
+ - "togepi"
+ - "togetic"
+ - "natu"
+ - "xatu"
+ - "mareep"
+ - "flaaffy"
+ - "ampharos"
+ - "bellossom"
+ - "marill"
+ - "azumarill"
+ - "sudowoodo"
+ - "politoed"
+ - "hoppip"
+ - "skiploom"
+ - "jumpluff"
+ - "aipom"
+ - "sunkern"
+ - "sunflora"
+ - "yanma"
+ - "wooper"
+ - "quagsire"
+ - "espeon"
+ - "umbreon"
+ - "murkrow"
+ - "slowking"
+ - "misdreavus"
+ - "unown"
+ - "wobbuffet"
+ - "girafarig"
+ - "pineco"
+ - "forretress"
+ - "dunsparce"
+ - "gligar"
+ - "steelix"
+ - "snubbull"
+ - "granbull"
+ - "qwilfish"
+ - "scizor"
+ - "shuckle"
+ - "heracross"
+ - "sneasel"
+ - "teddiursa"
+ - "ursaring"
+ - "slugma"
+ - "magcargo"
+ - "swinub"
+ - "piloswine"
+ - "corsola"
+ - "remoraid"
+ - "octillery"
+ - "delibird"
+ - "mantine"
+ - "skarmory"
+ - "houndour"
+ - "houndoom"
+ - "kingdra"
+ - "phanpy"
+ - "donphan"
+ - "porygon2"
+ - "stantler"
+ - "smeargle"
+ - "tyrogue"
+ - "hitmontop"
+ - "smoochum"
+ - "elekid"
+ - "magby"
+ - "miltank"
+ - "blissey"
+ - "raikou"
+ - "entei"
+ - "suicune"
+ - "larvitar"
+ - "pupitar"
+ - "tyranitar"
+ - "lugia"
+ - "ho-oh"
+ - "celebi"
+ - "treecko"
+ - "grovyle"
+ - "sceptile"
+ - "torchic"
+ - "combusken"
+ - "blaziken"
+ - "mudkip"
+ - "marshtomp"
+ - "swampert"
+ - "poochyena"
+ - "mightyena"
+ - "zigzagoon"
+ - "linoone"
+ - "wurmple"
+ - "silcoon"
+ - "beautifly"
+ - "cascoon"
+ - "dustox"
+ - "lotad"
+ - "lombre"
+ - "ludicolo"
+ - "seedot"
+ - "nuzleaf"
+ - "shiftry"
+ - "taillow"
+ - "swellow"
+ - "wingull"
+ - "pelipper"
+ - "ralts"
+ - "kirlia"
+ - "gardevoir"
+ - "surskit"
+ - "masquerain"
+ - "shroomish"
+ - "breloom"
+ - "slakoth"
+ - "vigoroth"
+ - "slaking"
+ - "nincada"
+ - "ninjask"
+ - "shedinja"
+ - "whismur"
+ - "loudred"
+ - "exploud"
+ - "makuhita"
+ - "hariyama"
+ - "azurill"
+ - "nosepass"
+ - "skitty"
+ - "delcatty"
+ - "sableye"
+ - "mawile"
+ - "aron"
+ - "lairon"
+ - "aggron"
+ - "meditite"
+ - "medicham"
+ - "electrike"
+ - "manectric"
+ - "plusle"
+ - "minun"
+ - "volbeat"
+ - "illumise"
+ - "roselia"
+ - "gulpin"
+ - "swalot"
+ - "carvanha"
+ - "sharpedo"
+ - "wailmer"
+ - "wailord"
+ - "numel"
+ - "camerupt"
+ - "torkoal"
+ - "spoink"
+ - "grumpig"
+ - "spinda"
+ - "trapinch"
+ - "vibrava"
+ - "flygon"
+ - "cacnea"
+ - "cacturne"
+ - "swablu"
+ - "altaria"
+ - "zangoose"
+ - "seviper"
+ - "lunatone"
+ - "solrock"
+ - "barboach"
+ - "whiscash"
+ - "corphish"
+ - "crawdaunt"
+ - "baltoy"
+ - "claydol"
+ - "lileep"
+ - "cradily"
+ - "anorith"
+ - "armaldo"
+ - "feebas"
+ - "milotic"
+ - "castform"
+ - "kecleon"
+ - "shuppet"
+ - "banette"
+ - "duskull"
+ - "dusclops"
+ - "tropius"
+ - "chimecho"
+ - "absol"
+ - "wynaut"
+ - "snorunt"
+ - "glalie"
+ - "spheal"
+ - "sealeo"
+ - "walrein"
+ - "clamperl"
+ - "huntail"
+ - "gorebyss"
+ - "relicanth"
+ - "luvdisc"
+ - "bagon"
+ - "shelgon"
+ - "salamence"
+ - "beldum"
+ - "metang"
+ - "metagross"
+ - "regirock"
+ - "regice"
+ - "registeel"
+ - "latias"
+ - "latios"
+ - "kyogre"
+ - "groudon"
+ - "rayquaza"
+ - "jirachi"
+ - "deoxys"
+ - "turtwig"
+ - "grotle"
+ - "torterra"
+ - "chimchar"
+ - "monferno"
+ - "infernape"
+ - "piplup"
+ - "prinplup"
+ - "empoleon"
+ - "starly"
+ - "staravia"
+ - "staraptor"
+ - "bidoof"
+ - "bibarel"
+ - "kricketot"
+ - "kricketune"
+ - "shinx"
+ - "luxio"
+ - "luxray"
+ - "budew"
+ - "roserade"
+ - "cranidos"
+ - "rampardos"
+ - "shieldon"
+ - "bastiodon"
+ - "burmy"
+ - "wormadam"
+ - "mothim"
+ - "combee"
+ - "vespiquen"
+ - "pachirisu"
+ - "buizel"
+ - "floatzel"
+ - "cherubi"
+ - "cherrim"
+ - "shellos"
+ - "gastrodon"
+ - "ambipom"
+ - "drifloon"
+ - "drifblim"
+ - "buneary"
+ - "lopunny"
+ - "mismagius"
+ - "honchkrow"
+ - "glameow"
+ - "purugly"
+ - "chingling"
+ - "stunky"
+ - "skuntank"
+ - "bronzor"
+ - "bronzong"
+ - "bonsly"
+ - "mimejr"
+ - "happiny"
+ - "chatot"
+ - "spiritomb"
+ - "gible"
+ - "gabite"
+ - "garchomp"
+ - "munchlax"
+ - "riolu"
+ - "lucario"
+ - "hippopotas"
+ - "hippowdon"
+ - "skorupi"
+ - "drapion"
+ - "croagunk"
+ - "toxicroak"
+ - "carnivine"
+ - "finneon"
+ - "lumineon"
+ - "mantyke"
+ - "snover"
+ - "abomasnow"
+ - "weavile"
+ - "magnezone"
+ - "lickilicky"
+ - "rhyperior"
+ - "tangrowth"
+ - "electivire"
+ - "magmortar"
+ - "togekiss"
+ - "yanmega"
+ - "leafeon"
+ - "glaceon"
+ - "gliscor"
+ - "mamoswine"
+ - "porygon-z"
+ - "gallade"
+ - "probopass"
+ - "dusknoir"
+ - "froslass"
+ - "rotom"
+ - "uxie"
+ - "mesprit"
+ - "azelf"
+ - "dialga"
+ - "palkia"
+ - "heatran"
+ - "regigigas"
+ - "giratina"
+ - "cresselia"
+ - "phione"
+ - "manaphy"
+ - "darkrai"
+ - "shaymin"
+ - "arceus"
+ - "victini"
+ - "snivy"
+ - "servine"
+ - "serperior"
+ - "tepig"
+ - "pignite"
+ - "emboar"
+ - "oshawott"
+ - "dewott"
+ - "samurott"
+ - "patrat"
+ - "watchog"
+ - "lillipup"
+ - "herdier"
+ - "stoutland"
+ - "purrloin"
+ - "liepard"
+ - "pansage"
+ - "simisage"
+ - "pansear"
+ - "simisear"
+ - "panpour"
+ - "simipour"
+ - "munna"
+ - "musharna"
+ - "pidove"
+ - "tranquill"
+ - "unfezant"
+ - "blitzle"
+ - "zebstrika"
+ - "roggenrola"
+ - "boldore"
+ - "gigalith"
+ - "woobat"
+ - "swoobat"
+ - "drilbur"
+ - "excadrill"
+ - "audino"
+ - "timburr"
+ - "gurdurr"
+ - "conkeldurr"
+ - "tympole"
+ - "palpitoad"
+ - "seismitoad"
+ - "throh"
+ - "sawk"
+ - "sewaddle"
+ - "swadloon"
+ - "leavanny"
+ - "venipede"
+ - "whirlipede"
+ - "scolipede"
+ - "cottonee"
+ - "whimsicott"
+ - "petilil"
+ - "lilligant"
+ - "basculin"
+ - "sandile"
+ - "krokorok"
+ - "krookodile"
+ - "darumaka"
+ - "darmanitan"
+ - "maractus"
+ - "dwebble"
+ - "crustle"
+ - "scraggy"
+ - "scrafty"
+ - "sigilyph"
+ - "yamask"
+ - "cofagrigus"
+ - "tirtouga"
+ - "carracosta"
+ - "archen"
+ - "archeops"
+ - "trubbish"
+ - "garbodor"
+ - "zorua"
+ - "zoroark"
+ - "minccino"
+ - "cinccino"
+ - "gothita"
+ - "gothorita"
+ - "gothitelle"
+ - "solosis"
+ - "duosion"
+ - "reuniclus"
+ - "ducklett"
+ - "swanna"
+ - "vanillite"
+ - "vanillish"
+ - "vanilluxe"
+ - "deerling"
+ - "sawsbuck"
+ - "emolga"
+ - "karrablast"
+ - "escavalier"
+ - "foongus"
+ - "amoonguss"
+ - "frillish"
+ - "jellicent"
+ - "alomomola"
+ - "joltik"
+ - "galvantula"
+ - "ferroseed"
+ - "ferrothorn"
+ - "klink"
+ - "klang"
+ - "klinklang"
+ - "tynamo"
+ - "eelektrik"
+ - "eelektross"
+ - "elgyem"
+ - "beheeyem"
+ - "litwick"
+ - "lampent"
+ - "chandelure"
+ - "axew"
+ - "fraxure"
+ - "haxorus"
+ - "cubchoo"
+ - "beartic"
+ - "cryogonal"
+ - "shelmet"
+ - "accelgor"
+ - "stunfisk"
+ - "mienfoo"
+ - "mienshao"
+ - "druddigon"
+ - "golett"
+ - "golurk"
+ - "pawniard"
+ - "bisharp"
+ - "bouffalant"
+ - "rufflet"
+ - "braviary"
+ - "vullaby"
+ - "mandibuzz"
+ - "heatmor"
+ - "durant"
+ - "deino"
+ - "zweilous"
+ - "hydreigon"
+ - "larvesta"
+ - "volcarona"
+ - "cobalion"
+ - "terrakion"
+ - "virizion"
+ - "tornadus"
+ - "thundurus"
+ - "reshiram"
+ - "zekrom"
+ - "landorus"
+ - "kyurem"
+ - "keldeo"
+ - "meloetta"
+ - "genesect"
+ - "chespin"
+ - "quilladin"
+ - "chesnaught"
+ - "fennekin"
+ - "braixen"
+ - "delphox"
+ - "froakie"
+ - "frogadier"
+ - "greninja"
+ - "bunnelby"
+ - "diggersby"
+ - "fletchling"
+ - "fletchinder"
+ - "talonflame"
+ - "scatterbug"
+ - "spewpa"
+ - "vivillon"
+ - "litleo"
+ - "pyroar"
+ - "flabebe"
+ - "floette"
+ - "florges"
+ - "skiddo"
+ - "gogoat"
+ - "pancham"
+ - "pangoro"
+ - "furfrou"
+ - "espurr"
+ - "meowstic"
+ - "honedge"
+ - "doublade"
+ - "aegislash"
+ - "spritzee"
+ - "aromatisse"
+ - "swirlix"
+ - "slurpuff"
+ - "inkay"
+ - "malamar"
+ - "binacle"
+ - "barbaracle"
+ - "skrelp"
+ - "dragalge"
+ - "clauncher"
+ - "clawitzer"
+ - "helioptile"
+ - "heliolisk"
+ - "tyrunt"
+ - "tyrantrum"
+ - "amaura"
+ - "aurorus"
+ - "sylveon"
+ - "hawlucha"
+ - "dedenne"
+ - "carbink"
+ - "goomy"
+ - "sliggoo"
+ - "goodra"
+ - "klefki"
+ - "phantump"
+ - "trevenant"
+ - "pumpkaboo"
+ - "gourgeist"
+ - "bergmite"
+ - "avalugg"
+ - "noibat"
+ - "noivern"
+ - "xerneas"
+ - "yveltal"
+ - "zygarde"
+ - "diancie"
+ - "hoopa"
+ - "volcanion"
+ - "rowlet"
+ - "dartrix"
+ - "decidueye"
+ - "litten"
+ - "torracat"
+ - "incineroar"
+ - "popplio"
+ - "brionne"
+ - "primarina"
+ - "pikipek"
+ - "trumbeak"
+ - "toucannon"
+ - "yungoos"
+ - "gumshoos"
+ - "grubbin"
+ - "charjabug"
+ - "vikavolt"
+ - "crabrawler"
+ - "crabominable"
+ - "oricorio"
+ - "cutiefly"
+ - "ribombee"
+ - "rockruff"
+ - "lycanroc"
+ - "wishiwashi"
+ - "mareanie"
+ - "toxapex"
+ - "mudbray"
+ - "mudsdale"
+ - "dewpider"
+ - "araquanid"
+ - "fomantis"
+ - "lurantis"
+ - "morelull"
+ - "shiinotic"
+ - "salandit"
+ - "salazzle"
+ - "stufful"
+ - "bewear"
+ - "bounsweet"
+ - "steenee"
+ - "tsareena"
+ - "comfey"
+ - "oranguru"
+ - "passimian"
+ - "wimpod"
+ - "golisopod"
+ - "sandygast"
+ - "palossand"
+ - "pyukumuku"
+ - "typenull"
+ - "silvally"
+ - "minior"
+ - "komala"
+ - "turtonator"
+ - "togedemaru"
+ - "mimikyu"
+ - "bruxish"
+ - "drampa"
+ - "dhelmise"
+ - "jangmo-o"
+ - "hakamo-o"
+ - "kommo-o"
+ - "tapukoko"
+ - "tapulele"
+ - "tapubulu"
+ - "tapufini"
+ - "cosmog"
+ - "cosmoem"
+ - "solgaleo"
+ - "lunala"
+ - "nihilego"
+ - "buzzwole"
+ - "pheromosa"
+ - "xurkitree"
+ - "celesteela"
+ - "kartana"
+ - "guzzlord"
+ - "necrozma"
+ - "magearna"
+ - "marshadow"
+ - "poipole"
+ - "naganadel"
+ - "stakataka"
+ - "blacephalon"
+ - "zeraora"
+ - "meltan"
+ - "melmetal"
+ - "grookey"
+ - "thwackey"
+ - "rillaboom"
+ - "scorbunny"
+ - "raboot"
+ - "cinderace"
+ - "sobble"
+ - "drizzile"
+ - "inteleon"
+ - "skwovet"
+ - "greedent"
+ - "rookidee"
+ - "corvisquire"
+ - "corviknight"
+ - "blipbug"
+ - "dottler"
+ - "orbeetle"
+ - "nickit"
+ - "thievul"
+ - "gossifleur"
+ - "eldegoss"
+ - "wooloo"
+ - "dubwool"
+ - "chewtle"
+ - "drednaw"
+ - "yamper"
+ - "boltund"
+ - "rolycoly"
+ - "carkol"
+ - "coalossal"
+ - "applin"
+ - "flapple"
+ - "appletun"
+ - "silicobra"
+ - "sandaconda"
+ - "cramorant"
+ - "arrokuda"
+ - "barraskewda"
+ - "toxel"
+ - "toxtricity"
+ - "sizzlipede"
+ - "centiskorch"
+ - "clobbopus"
+ - "grapploct"
+ - "sinistea"
+ - "polteageist"
+ - "hatenna"
+ - "hattrem"
+ - "hatterene"
+ - "impidimp"
+ - "morgrem"
+ - "grimmsnarl"
+ - "obstagoon"
+ - "perrserker"
+ - "cursola"
+ - "sirfetchd"
+ - "mrrime"
+ - "runerigus"
+ - "milcery"
+ - "alcremie"
+ - "falinks"
+ - "pincurchin"
+ - "snom"
+ - "frosmoth"
+ - "stonjourner"
+ - "eiscue"
+ - "indeedee"
+ - "morpeko"
+ - "cufant"
+ - "copperajah"
+ - "dracozolt"
+ - "arctozolt"
+ - "dracovish"
+ - "arctovish"
+ - "duraludon"
+ - "dreepy"
+ - "drakloak"
+ - "dragapult"
+ - "zacian"
+ - "zamazenta"
+ - "eternatus"
+ - "kubfu"
+ - "urshifu"
+ - "zarude"
+ - "regieleki"
+ - "regidrago"
+ - "glastrier"
+ - "spectrier"
+ - "calyrex"
examples:
- "ditto"
- "luxray"
@@ -45290,6 +51358,905 @@ components:
title: "Pokemon Name"
description: "Pokemon requested from the API."
pattern: "^[a-z0-9_\\-]+$"
+ enum:
+ - "bulbasaur"
+ - "ivysaur"
+ - "venusaur"
+ - "charmander"
+ - "charmeleon"
+ - "charizard"
+ - "squirtle"
+ - "wartortle"
+ - "blastoise"
+ - "caterpie"
+ - "metapod"
+ - "butterfree"
+ - "weedle"
+ - "kakuna"
+ - "beedrill"
+ - "pidgey"
+ - "pidgeotto"
+ - "pidgeot"
+ - "rattata"
+ - "raticate"
+ - "spearow"
+ - "fearow"
+ - "ekans"
+ - "arbok"
+ - "pikachu"
+ - "raichu"
+ - "sandshrew"
+ - "sandslash"
+ - "nidoranf"
+ - "nidorina"
+ - "nidoqueen"
+ - "nidoranm"
+ - "nidorino"
+ - "nidoking"
+ - "clefairy"
+ - "clefable"
+ - "vulpix"
+ - "ninetales"
+ - "jigglypuff"
+ - "wigglytuff"
+ - "zubat"
+ - "golbat"
+ - "oddish"
+ - "gloom"
+ - "vileplume"
+ - "paras"
+ - "parasect"
+ - "venonat"
+ - "venomoth"
+ - "diglett"
+ - "dugtrio"
+ - "meowth"
+ - "persian"
+ - "psyduck"
+ - "golduck"
+ - "mankey"
+ - "primeape"
+ - "growlithe"
+ - "arcanine"
+ - "poliwag"
+ - "poliwhirl"
+ - "poliwrath"
+ - "abra"
+ - "kadabra"
+ - "alakazam"
+ - "machop"
+ - "machoke"
+ - "machamp"
+ - "bellsprout"
+ - "weepinbell"
+ - "victreebel"
+ - "tentacool"
+ - "tentacruel"
+ - "geodude"
+ - "graveler"
+ - "golem"
+ - "ponyta"
+ - "rapidash"
+ - "slowpoke"
+ - "slowbro"
+ - "magnemite"
+ - "magneton"
+ - "farfetchd"
+ - "doduo"
+ - "dodrio"
+ - "seel"
+ - "dewgong"
+ - "grimer"
+ - "muk"
+ - "shellder"
+ - "cloyster"
+ - "gastly"
+ - "haunter"
+ - "gengar"
+ - "onix"
+ - "drowzee"
+ - "hypno"
+ - "krabby"
+ - "kingler"
+ - "voltorb"
+ - "electrode"
+ - "exeggcute"
+ - "exeggutor"
+ - "cubone"
+ - "marowak"
+ - "hitmonlee"
+ - "hitmonchan"
+ - "lickitung"
+ - "koffing"
+ - "weezing"
+ - "rhyhorn"
+ - "rhydon"
+ - "chansey"
+ - "tangela"
+ - "kangaskhan"
+ - "horsea"
+ - "seadra"
+ - "goldeen"
+ - "seaking"
+ - "staryu"
+ - "starmie"
+ - "mrmime"
+ - "scyther"
+ - "jynx"
+ - "electabuzz"
+ - "magmar"
+ - "pinsir"
+ - "tauros"
+ - "magikarp"
+ - "gyarados"
+ - "lapras"
+ - "ditto"
+ - "eevee"
+ - "vaporeon"
+ - "jolteon"
+ - "flareon"
+ - "porygon"
+ - "omanyte"
+ - "omastar"
+ - "kabuto"
+ - "kabutops"
+ - "aerodactyl"
+ - "snorlax"
+ - "articuno"
+ - "zapdos"
+ - "moltres"
+ - "dratini"
+ - "dragonair"
+ - "dragonite"
+ - "mewtwo"
+ - "mew"
+ - "chikorita"
+ - "bayleef"
+ - "meganium"
+ - "cyndaquil"
+ - "quilava"
+ - "typhlosion"
+ - "totodile"
+ - "croconaw"
+ - "feraligatr"
+ - "sentret"
+ - "furret"
+ - "hoothoot"
+ - "noctowl"
+ - "ledyba"
+ - "ledian"
+ - "spinarak"
+ - "ariados"
+ - "crobat"
+ - "chinchou"
+ - "lanturn"
+ - "pichu"
+ - "cleffa"
+ - "igglybuff"
+ - "togepi"
+ - "togetic"
+ - "natu"
+ - "xatu"
+ - "mareep"
+ - "flaaffy"
+ - "ampharos"
+ - "bellossom"
+ - "marill"
+ - "azumarill"
+ - "sudowoodo"
+ - "politoed"
+ - "hoppip"
+ - "skiploom"
+ - "jumpluff"
+ - "aipom"
+ - "sunkern"
+ - "sunflora"
+ - "yanma"
+ - "wooper"
+ - "quagsire"
+ - "espeon"
+ - "umbreon"
+ - "murkrow"
+ - "slowking"
+ - "misdreavus"
+ - "unown"
+ - "wobbuffet"
+ - "girafarig"
+ - "pineco"
+ - "forretress"
+ - "dunsparce"
+ - "gligar"
+ - "steelix"
+ - "snubbull"
+ - "granbull"
+ - "qwilfish"
+ - "scizor"
+ - "shuckle"
+ - "heracross"
+ - "sneasel"
+ - "teddiursa"
+ - "ursaring"
+ - "slugma"
+ - "magcargo"
+ - "swinub"
+ - "piloswine"
+ - "corsola"
+ - "remoraid"
+ - "octillery"
+ - "delibird"
+ - "mantine"
+ - "skarmory"
+ - "houndour"
+ - "houndoom"
+ - "kingdra"
+ - "phanpy"
+ - "donphan"
+ - "porygon2"
+ - "stantler"
+ - "smeargle"
+ - "tyrogue"
+ - "hitmontop"
+ - "smoochum"
+ - "elekid"
+ - "magby"
+ - "miltank"
+ - "blissey"
+ - "raikou"
+ - "entei"
+ - "suicune"
+ - "larvitar"
+ - "pupitar"
+ - "tyranitar"
+ - "lugia"
+ - "ho-oh"
+ - "celebi"
+ - "treecko"
+ - "grovyle"
+ - "sceptile"
+ - "torchic"
+ - "combusken"
+ - "blaziken"
+ - "mudkip"
+ - "marshtomp"
+ - "swampert"
+ - "poochyena"
+ - "mightyena"
+ - "zigzagoon"
+ - "linoone"
+ - "wurmple"
+ - "silcoon"
+ - "beautifly"
+ - "cascoon"
+ - "dustox"
+ - "lotad"
+ - "lombre"
+ - "ludicolo"
+ - "seedot"
+ - "nuzleaf"
+ - "shiftry"
+ - "taillow"
+ - "swellow"
+ - "wingull"
+ - "pelipper"
+ - "ralts"
+ - "kirlia"
+ - "gardevoir"
+ - "surskit"
+ - "masquerain"
+ - "shroomish"
+ - "breloom"
+ - "slakoth"
+ - "vigoroth"
+ - "slaking"
+ - "nincada"
+ - "ninjask"
+ - "shedinja"
+ - "whismur"
+ - "loudred"
+ - "exploud"
+ - "makuhita"
+ - "hariyama"
+ - "azurill"
+ - "nosepass"
+ - "skitty"
+ - "delcatty"
+ - "sableye"
+ - "mawile"
+ - "aron"
+ - "lairon"
+ - "aggron"
+ - "meditite"
+ - "medicham"
+ - "electrike"
+ - "manectric"
+ - "plusle"
+ - "minun"
+ - "volbeat"
+ - "illumise"
+ - "roselia"
+ - "gulpin"
+ - "swalot"
+ - "carvanha"
+ - "sharpedo"
+ - "wailmer"
+ - "wailord"
+ - "numel"
+ - "camerupt"
+ - "torkoal"
+ - "spoink"
+ - "grumpig"
+ - "spinda"
+ - "trapinch"
+ - "vibrava"
+ - "flygon"
+ - "cacnea"
+ - "cacturne"
+ - "swablu"
+ - "altaria"
+ - "zangoose"
+ - "seviper"
+ - "lunatone"
+ - "solrock"
+ - "barboach"
+ - "whiscash"
+ - "corphish"
+ - "crawdaunt"
+ - "baltoy"
+ - "claydol"
+ - "lileep"
+ - "cradily"
+ - "anorith"
+ - "armaldo"
+ - "feebas"
+ - "milotic"
+ - "castform"
+ - "kecleon"
+ - "shuppet"
+ - "banette"
+ - "duskull"
+ - "dusclops"
+ - "tropius"
+ - "chimecho"
+ - "absol"
+ - "wynaut"
+ - "snorunt"
+ - "glalie"
+ - "spheal"
+ - "sealeo"
+ - "walrein"
+ - "clamperl"
+ - "huntail"
+ - "gorebyss"
+ - "relicanth"
+ - "luvdisc"
+ - "bagon"
+ - "shelgon"
+ - "salamence"
+ - "beldum"
+ - "metang"
+ - "metagross"
+ - "regirock"
+ - "regice"
+ - "registeel"
+ - "latias"
+ - "latios"
+ - "kyogre"
+ - "groudon"
+ - "rayquaza"
+ - "jirachi"
+ - "deoxys"
+ - "turtwig"
+ - "grotle"
+ - "torterra"
+ - "chimchar"
+ - "monferno"
+ - "infernape"
+ - "piplup"
+ - "prinplup"
+ - "empoleon"
+ - "starly"
+ - "staravia"
+ - "staraptor"
+ - "bidoof"
+ - "bibarel"
+ - "kricketot"
+ - "kricketune"
+ - "shinx"
+ - "luxio"
+ - "luxray"
+ - "budew"
+ - "roserade"
+ - "cranidos"
+ - "rampardos"
+ - "shieldon"
+ - "bastiodon"
+ - "burmy"
+ - "wormadam"
+ - "mothim"
+ - "combee"
+ - "vespiquen"
+ - "pachirisu"
+ - "buizel"
+ - "floatzel"
+ - "cherubi"
+ - "cherrim"
+ - "shellos"
+ - "gastrodon"
+ - "ambipom"
+ - "drifloon"
+ - "drifblim"
+ - "buneary"
+ - "lopunny"
+ - "mismagius"
+ - "honchkrow"
+ - "glameow"
+ - "purugly"
+ - "chingling"
+ - "stunky"
+ - "skuntank"
+ - "bronzor"
+ - "bronzong"
+ - "bonsly"
+ - "mimejr"
+ - "happiny"
+ - "chatot"
+ - "spiritomb"
+ - "gible"
+ - "gabite"
+ - "garchomp"
+ - "munchlax"
+ - "riolu"
+ - "lucario"
+ - "hippopotas"
+ - "hippowdon"
+ - "skorupi"
+ - "drapion"
+ - "croagunk"
+ - "toxicroak"
+ - "carnivine"
+ - "finneon"
+ - "lumineon"
+ - "mantyke"
+ - "snover"
+ - "abomasnow"
+ - "weavile"
+ - "magnezone"
+ - "lickilicky"
+ - "rhyperior"
+ - "tangrowth"
+ - "electivire"
+ - "magmortar"
+ - "togekiss"
+ - "yanmega"
+ - "leafeon"
+ - "glaceon"
+ - "gliscor"
+ - "mamoswine"
+ - "porygon-z"
+ - "gallade"
+ - "probopass"
+ - "dusknoir"
+ - "froslass"
+ - "rotom"
+ - "uxie"
+ - "mesprit"
+ - "azelf"
+ - "dialga"
+ - "palkia"
+ - "heatran"
+ - "regigigas"
+ - "giratina"
+ - "cresselia"
+ - "phione"
+ - "manaphy"
+ - "darkrai"
+ - "shaymin"
+ - "arceus"
+ - "victini"
+ - "snivy"
+ - "servine"
+ - "serperior"
+ - "tepig"
+ - "pignite"
+ - "emboar"
+ - "oshawott"
+ - "dewott"
+ - "samurott"
+ - "patrat"
+ - "watchog"
+ - "lillipup"
+ - "herdier"
+ - "stoutland"
+ - "purrloin"
+ - "liepard"
+ - "pansage"
+ - "simisage"
+ - "pansear"
+ - "simisear"
+ - "panpour"
+ - "simipour"
+ - "munna"
+ - "musharna"
+ - "pidove"
+ - "tranquill"
+ - "unfezant"
+ - "blitzle"
+ - "zebstrika"
+ - "roggenrola"
+ - "boldore"
+ - "gigalith"
+ - "woobat"
+ - "swoobat"
+ - "drilbur"
+ - "excadrill"
+ - "audino"
+ - "timburr"
+ - "gurdurr"
+ - "conkeldurr"
+ - "tympole"
+ - "palpitoad"
+ - "seismitoad"
+ - "throh"
+ - "sawk"
+ - "sewaddle"
+ - "swadloon"
+ - "leavanny"
+ - "venipede"
+ - "whirlipede"
+ - "scolipede"
+ - "cottonee"
+ - "whimsicott"
+ - "petilil"
+ - "lilligant"
+ - "basculin"
+ - "sandile"
+ - "krokorok"
+ - "krookodile"
+ - "darumaka"
+ - "darmanitan"
+ - "maractus"
+ - "dwebble"
+ - "crustle"
+ - "scraggy"
+ - "scrafty"
+ - "sigilyph"
+ - "yamask"
+ - "cofagrigus"
+ - "tirtouga"
+ - "carracosta"
+ - "archen"
+ - "archeops"
+ - "trubbish"
+ - "garbodor"
+ - "zorua"
+ - "zoroark"
+ - "minccino"
+ - "cinccino"
+ - "gothita"
+ - "gothorita"
+ - "gothitelle"
+ - "solosis"
+ - "duosion"
+ - "reuniclus"
+ - "ducklett"
+ - "swanna"
+ - "vanillite"
+ - "vanillish"
+ - "vanilluxe"
+ - "deerling"
+ - "sawsbuck"
+ - "emolga"
+ - "karrablast"
+ - "escavalier"
+ - "foongus"
+ - "amoonguss"
+ - "frillish"
+ - "jellicent"
+ - "alomomola"
+ - "joltik"
+ - "galvantula"
+ - "ferroseed"
+ - "ferrothorn"
+ - "klink"
+ - "klang"
+ - "klinklang"
+ - "tynamo"
+ - "eelektrik"
+ - "eelektross"
+ - "elgyem"
+ - "beheeyem"
+ - "litwick"
+ - "lampent"
+ - "chandelure"
+ - "axew"
+ - "fraxure"
+ - "haxorus"
+ - "cubchoo"
+ - "beartic"
+ - "cryogonal"
+ - "shelmet"
+ - "accelgor"
+ - "stunfisk"
+ - "mienfoo"
+ - "mienshao"
+ - "druddigon"
+ - "golett"
+ - "golurk"
+ - "pawniard"
+ - "bisharp"
+ - "bouffalant"
+ - "rufflet"
+ - "braviary"
+ - "vullaby"
+ - "mandibuzz"
+ - "heatmor"
+ - "durant"
+ - "deino"
+ - "zweilous"
+ - "hydreigon"
+ - "larvesta"
+ - "volcarona"
+ - "cobalion"
+ - "terrakion"
+ - "virizion"
+ - "tornadus"
+ - "thundurus"
+ - "reshiram"
+ - "zekrom"
+ - "landorus"
+ - "kyurem"
+ - "keldeo"
+ - "meloetta"
+ - "genesect"
+ - "chespin"
+ - "quilladin"
+ - "chesnaught"
+ - "fennekin"
+ - "braixen"
+ - "delphox"
+ - "froakie"
+ - "frogadier"
+ - "greninja"
+ - "bunnelby"
+ - "diggersby"
+ - "fletchling"
+ - "fletchinder"
+ - "talonflame"
+ - "scatterbug"
+ - "spewpa"
+ - "vivillon"
+ - "litleo"
+ - "pyroar"
+ - "flabebe"
+ - "floette"
+ - "florges"
+ - "skiddo"
+ - "gogoat"
+ - "pancham"
+ - "pangoro"
+ - "furfrou"
+ - "espurr"
+ - "meowstic"
+ - "honedge"
+ - "doublade"
+ - "aegislash"
+ - "spritzee"
+ - "aromatisse"
+ - "swirlix"
+ - "slurpuff"
+ - "inkay"
+ - "malamar"
+ - "binacle"
+ - "barbaracle"
+ - "skrelp"
+ - "dragalge"
+ - "clauncher"
+ - "clawitzer"
+ - "helioptile"
+ - "heliolisk"
+ - "tyrunt"
+ - "tyrantrum"
+ - "amaura"
+ - "aurorus"
+ - "sylveon"
+ - "hawlucha"
+ - "dedenne"
+ - "carbink"
+ - "goomy"
+ - "sliggoo"
+ - "goodra"
+ - "klefki"
+ - "phantump"
+ - "trevenant"
+ - "pumpkaboo"
+ - "gourgeist"
+ - "bergmite"
+ - "avalugg"
+ - "noibat"
+ - "noivern"
+ - "xerneas"
+ - "yveltal"
+ - "zygarde"
+ - "diancie"
+ - "hoopa"
+ - "volcanion"
+ - "rowlet"
+ - "dartrix"
+ - "decidueye"
+ - "litten"
+ - "torracat"
+ - "incineroar"
+ - "popplio"
+ - "brionne"
+ - "primarina"
+ - "pikipek"
+ - "trumbeak"
+ - "toucannon"
+ - "yungoos"
+ - "gumshoos"
+ - "grubbin"
+ - "charjabug"
+ - "vikavolt"
+ - "crabrawler"
+ - "crabominable"
+ - "oricorio"
+ - "cutiefly"
+ - "ribombee"
+ - "rockruff"
+ - "lycanroc"
+ - "wishiwashi"
+ - "mareanie"
+ - "toxapex"
+ - "mudbray"
+ - "mudsdale"
+ - "dewpider"
+ - "araquanid"
+ - "fomantis"
+ - "lurantis"
+ - "morelull"
+ - "shiinotic"
+ - "salandit"
+ - "salazzle"
+ - "stufful"
+ - "bewear"
+ - "bounsweet"
+ - "steenee"
+ - "tsareena"
+ - "comfey"
+ - "oranguru"
+ - "passimian"
+ - "wimpod"
+ - "golisopod"
+ - "sandygast"
+ - "palossand"
+ - "pyukumuku"
+ - "typenull"
+ - "silvally"
+ - "minior"
+ - "komala"
+ - "turtonator"
+ - "togedemaru"
+ - "mimikyu"
+ - "bruxish"
+ - "drampa"
+ - "dhelmise"
+ - "jangmo-o"
+ - "hakamo-o"
+ - "kommo-o"
+ - "tapukoko"
+ - "tapulele"
+ - "tapubulu"
+ - "tapufini"
+ - "cosmog"
+ - "cosmoem"
+ - "solgaleo"
+ - "lunala"
+ - "nihilego"
+ - "buzzwole"
+ - "pheromosa"
+ - "xurkitree"
+ - "celesteela"
+ - "kartana"
+ - "guzzlord"
+ - "necrozma"
+ - "magearna"
+ - "marshadow"
+ - "poipole"
+ - "naganadel"
+ - "stakataka"
+ - "blacephalon"
+ - "zeraora"
+ - "meltan"
+ - "melmetal"
+ - "grookey"
+ - "thwackey"
+ - "rillaboom"
+ - "scorbunny"
+ - "raboot"
+ - "cinderace"
+ - "sobble"
+ - "drizzile"
+ - "inteleon"
+ - "skwovet"
+ - "greedent"
+ - "rookidee"
+ - "corvisquire"
+ - "corviknight"
+ - "blipbug"
+ - "dottler"
+ - "orbeetle"
+ - "nickit"
+ - "thievul"
+ - "gossifleur"
+ - "eldegoss"
+ - "wooloo"
+ - "dubwool"
+ - "chewtle"
+ - "drednaw"
+ - "yamper"
+ - "boltund"
+ - "rolycoly"
+ - "carkol"
+ - "coalossal"
+ - "applin"
+ - "flapple"
+ - "appletun"
+ - "silicobra"
+ - "sandaconda"
+ - "cramorant"
+ - "arrokuda"
+ - "barraskewda"
+ - "toxel"
+ - "toxtricity"
+ - "sizzlipede"
+ - "centiskorch"
+ - "clobbopus"
+ - "grapploct"
+ - "sinistea"
+ - "polteageist"
+ - "hatenna"
+ - "hattrem"
+ - "hatterene"
+ - "impidimp"
+ - "morgrem"
+ - "grimmsnarl"
+ - "obstagoon"
+ - "perrserker"
+ - "cursola"
+ - "sirfetchd"
+ - "mrrime"
+ - "runerigus"
+ - "milcery"
+ - "alcremie"
+ - "falinks"
+ - "pincurchin"
+ - "snom"
+ - "frosmoth"
+ - "stonjourner"
+ - "eiscue"
+ - "indeedee"
+ - "morpeko"
+ - "cufant"
+ - "copperajah"
+ - "dracozolt"
+ - "arctozolt"
+ - "dracovish"
+ - "arctovish"
+ - "duraludon"
+ - "dreepy"
+ - "drakloak"
+ - "dragapult"
+ - "zacian"
+ - "zamazenta"
+ - "eternatus"
+ - "kubfu"
+ - "urshifu"
+ - "zarude"
+ - "regieleki"
+ - "regidrago"
+ - "glastrier"
+ - "spectrier"
+ - "calyrex"
examples:
- "ditto"
- "luxray"
@@ -45310,6 +52277,7 @@ components:
description: "Your API access token. See here. The toke is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
backend_url:
type: "string"
title: "Senseforce backend URL"
@@ -45441,8 +52409,8 @@ components:
\ are a lot of data per day, set this setting to 1. If there is only very\
\ little data per day, you might change the setting to 10 or more."
source-freshsales:
- title: "Freshsales Spec"
type: "object"
+ title: "Freshsales Spec"
required:
- "domain_name"
- "api_key"
@@ -45450,16 +52418,19 @@ components:
properties:
domain_name:
type: "string"
+ order: 0
title: "Domain Name"
description: "The Name of your Freshsales domain"
examples:
- "mydomain.myfreshworks.com"
api_key:
type: "string"
+ order: 1
title: "API Key"
description: "Freshsales API Key. See here. The key is case sensitive."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "freshsales"
const: "freshsales"
@@ -45468,20 +52439,22 @@ components:
order: 0
type: "string"
source-freshsales-update:
- title: "Freshsales Spec"
type: "object"
+ title: "Freshsales Spec"
required:
- "domain_name"
- "api_key"
properties:
domain_name:
type: "string"
+ order: 0
title: "Domain Name"
description: "The Name of your Freshsales domain"
examples:
- "mydomain.myfreshworks.com"
api_key:
type: "string"
+ order: 1
title: "API Key"
description: "Freshsales API Key. See here. The key is case sensitive."
@@ -45498,6 +52471,7 @@ components:
description: "Hubplanner API key. See https://github.com/hubplanner/API#authentication\
\ for more details."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
sourceType:
title: "hubplanner"
const: "hubplanner"
@@ -45516,88 +52490,6 @@ components:
description: "Hubplanner API key. See https://github.com/hubplanner/API#authentication\
\ for more details."
airbyte_secret: true
- source-qualaroo:
- title: "Qualaroo Spec"
- type: "object"
- required:
- - "token"
- - "key"
- - "start_date"
- - "sourceType"
- properties:
- token:
- type: "string"
- title: "API token"
- description: "A Qualaroo token. See the docs for instructions on how to generate it."
- airbyte_secret: true
- key:
- type: "string"
- title: "API key"
- description: "A Qualaroo token. See the docs for instructions on how to generate it."
- airbyte_secret: true
- start_date:
- type: "string"
- title: "Start Date"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$"
- description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
- \ data before this date will not be replicated."
- examples:
- - "2021-03-01T00:00:00.000Z"
- survey_ids:
- type: "array"
- items:
- type: "string"
- pattern: "^[0-9]{1,8}$"
- title: "Qualaroo survey IDs"
- description: "IDs of the surveys from which you'd like to replicate data.\
- \ If left empty, data from all surveys to which you have access will be\
- \ replicated."
- sourceType:
- title: "qualaroo"
- const: "qualaroo"
- enum:
- - "qualaroo"
- order: 0
- type: "string"
- source-qualaroo-update:
- title: "Qualaroo Spec"
- type: "object"
- required:
- - "token"
- - "key"
- - "start_date"
- properties:
- token:
- type: "string"
- title: "API token"
- description: "A Qualaroo token. See the docs for instructions on how to generate it."
- airbyte_secret: true
- key:
- type: "string"
- title: "API key"
- description: "A Qualaroo token. See the docs for instructions on how to generate it."
- airbyte_secret: true
- start_date:
- type: "string"
- title: "Start Date"
- pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$"
- description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
- \ data before this date will not be replicated."
- examples:
- - "2021-03-01T00:00:00.000Z"
- survey_ids:
- type: "array"
- items:
- type: "string"
- pattern: "^[0-9]{1,8}$"
- title: "Qualaroo survey IDs"
- description: "IDs of the surveys from which you'd like to replicate data.\
- \ If left empty, data from all surveys to which you have access will be\
- \ replicated."
source-square:
title: "Square Spec"
type: "object"
@@ -45641,6 +52533,7 @@ components:
description: "A refresh token generated using the above client ID\
\ and secret"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "API key"
type: "object"
required:
@@ -45658,6 +52551,7 @@ components:
title: "API key token"
description: "The API key for a Square application"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
is_sandbox:
type: "boolean"
description: "Determines whether to use the sandbox or production environment."
@@ -45784,6 +52678,7 @@ components:
\ yours here)."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
title: "Start Date"
@@ -45897,6 +52792,7 @@ components:
type: "string"
airbyte_secret: true
order: 6
+ x-speakeasy-param-sensitive: true
jdbc_url_params:
title: "JDBC URL Params"
description: "Additional properties to pass to the JDBC URL string when\
@@ -46001,6 +52897,7 @@ components:
description: "The password associated with this username."
airbyte_secret: true
order: 3
+ x-speakeasy-param-sensitive: true
auth_source:
title: "Authentication Source"
type: "string"
@@ -46077,6 +52974,7 @@ components:
>docs for more information on how to obtain this key."
name: "Public Key"
type: "string"
+ x-speakeasy-param-sensitive: true
private_key:
title: "Private Key"
description: "Braintree Private Key. See the docs for information on how to generate this key."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
campaign_id:
type: "string"
title: "ID of a campaign to sync email activities"
@@ -46338,15 +53239,18 @@ components:
type: "string"
description: "Access Token for making authenticated requests."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
token_expiry_date:
type: "string"
description: "The date-time when the access token should be refreshed."
format: "date-time"
+ x-speakeasy-param-sensitive: true
refresh_token:
type: "string"
title: "Refresh token"
description: "The key to refresh the expired access token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Personal Access Token"
type: "object"
required:
@@ -46366,6 +53270,7 @@ components:
airbyte_secret: true
examples:
- "key1234567890"
+ x-speakeasy-param-sensitive: true
sourceType:
title: "airtable"
const: "airtable"
@@ -46488,6 +53393,7 @@ components:
type: "string"
airbyte_secret: true
order: 5
+ x-speakeasy-param-sensitive: true
jdbc_url_params:
title: "JDBC URL Params"
description: "Additional properties to pass to the JDBC URL string when\
@@ -46666,6 +53572,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -46710,6 +53617,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
sourceType:
title: "mssql"
const: "mssql"
@@ -47046,6 +53954,7 @@ components:
airbyte_secret: true
examples:
- "A012345678910EXAMPLE"
+ x-speakeasy-param-sensitive: true
secret_access_key:
title: "Dynamodb Access Key"
type: "string"
@@ -47053,6 +53962,7 @@ components:
airbyte_secret: true
examples:
- "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
+ x-speakeasy-param-sensitive: true
reserved_attribute_names:
title: "Reserved attribute names"
type: "string"
@@ -47177,6 +54087,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
start_date:
title: "Start Date"
description: "Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ\
@@ -47373,6 +54284,7 @@ components:
title: "Password"
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
tunnel_method:
type: "object"
title: "SSH Tunnel Method"
@@ -47436,6 +54348,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -47480,6 +54393,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
sourceType:
title: "clickhouse"
const: "clickhouse"
@@ -47809,6 +54723,7 @@ components:
title: "Refresh Token"
description: "The token for obtaining new access token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- type: "object"
title: "Authenticate via Lever (Api Key)"
required:
@@ -47826,6 +54741,7 @@ components:
description: "The Api Key of your Lever Hiring account."
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
start_date:
order: 0
type: "string"
@@ -47945,6 +54861,7 @@ components:
title: "Rest API Key"
airbyte_secret: true
description: "Braze REST API key"
+ x-speakeasy-param-sensitive: true
start_date:
type: "string"
format: "date"
@@ -48033,6 +54950,7 @@ components:
type: "string"
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
- title: "SSH Key Authentication"
required:
- "auth_method"
@@ -48053,6 +54971,7 @@ components:
airbyte_secret: true
multiline: true
order: 1
+ x-speakeasy-param-sensitive: true
file_types:
title: "File types"
description: "Coma separated file types. Currently only 'csv' and 'json'\
@@ -48187,586 +55106,1982 @@ components:
examples:
- "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`"
order: 6
- source-mailjet-sms:
- title: "Mailjet Sms Spec"
+ source-google-drive:
+ title: "Google Drive Source Spec"
+ description: "Used during spec; allows the developer to configure the cloud\
+ \ provider specific options\nthat are needed when users configure a file-based\
+ \ source."
type: "object"
- required:
- - "token"
- - "sourceType"
properties:
- token:
- title: "Access Token"
- type: "string"
- description: "Your access token. See here."
- airbyte_secret: true
start_date:
- title: "Start date"
- type: "integer"
- description: "Retrieve SMS messages created after the specified timestamp.\
- \ Required format - Unix timestamp."
- pattern: "^[0-9]*$"
- examples:
- - 1666261656
- end_date:
- title: "End date"
- type: "integer"
- description: "Retrieve SMS messages created before the specified timestamp.\
- \ Required format - Unix timestamp."
- pattern: "^[0-9]*$"
+ title: "Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\
+ \ Any file modified before this date will not be replicated."
examples:
- - 1666281656
+ - "2021-01-01T00:00:00.000000Z"
+ format: "date-time"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ"
+ order: 1
+ type: "string"
+ streams:
+ title: "The list of streams to sync"
+ description: "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\
+ \ format, and how they should be parsed and validated. When sending data\
+ \ to warehouse destination such as Snowflake or BigQuery, each stream\
+ \ is a separate table."
+ order: 10
+ type: "array"
+ items:
+ title: "FileBasedStreamConfig"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name of the stream."
+ type: "string"
+ globs:
+ title: "Globs"
+ description: "The pattern used to specify which files should be selected\
+ \ from the file system. For more information on glob pattern matching\
+ \ look here."
+ default:
+ - "**"
+ order: 1
+ type: "array"
+ items:
+ type: "string"
+ validation_policy:
+ title: "Validation Policy"
+ description: "The name of the validation policy that dictates sync\
+ \ behavior when a record does not adhere to the stream schema."
+ default: "Emit Record"
+ enum:
+ - "Emit Record"
+ - "Skip Record"
+ - "Wait for Discover"
+ input_schema:
+ title: "Input Schema"
+ description: "The schema that will be used to validate records extracted\
+ \ from the file. This will override the stream schema that is auto-detected\
+ \ from incoming files."
+ type: "string"
+ primary_key:
+ title: "Primary Key"
+ description: "The column or columns (for a composite key) that serves\
+ \ as the unique identifier of a record."
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ days_to_sync_if_history_is_full:
+ title: "Days To Sync If History Is Full"
+ description: "When the state history of the file store is full, syncs\
+ \ will only read files that were last modified in the provided day\
+ \ range."
+ default: 3
+ type: "integer"
+ format:
+ title: "Format"
+ description: "The configuration options that are used to alter how\
+ \ to read incoming files that deviate from the standard formatting."
+ type: "object"
+ oneOf:
+ - title: "Avro Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "avro"
+ const: "avro"
+ type: "string"
+ enum:
+ - "avro"
+ double_as_string:
+ title: "Convert Double Fields to Strings"
+ description: "Whether to convert double fields to strings. This\
+ \ is recommended if you have decimal numbers with a high degree\
+ \ of precision because there can be a loss precision when\
+ \ handling floating point numbers."
+ default: false
+ type: "boolean"
+ required:
+ - "filetype"
+ - title: "CSV Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "csv"
+ const: "csv"
+ type: "string"
+ enum:
+ - "csv"
+ delimiter:
+ title: "Delimiter"
+ description: "The character delimiting individual cells in the\
+ \ CSV data. This may only be a 1-character string. For tab-delimited\
+ \ data enter '\\t'."
+ default: ","
+ type: "string"
+ quote_char:
+ title: "Quote Character"
+ description: "The character used for quoting CSV values. To\
+ \ disallow quoting, make this field blank."
+ default: "\""
+ type: "string"
+ escape_char:
+ title: "Escape Character"
+ description: "The character used for escaping special characters.\
+ \ To disallow escaping, leave this field blank."
+ type: "string"
+ encoding:
+ title: "Encoding"
+ description: "The character encoding of the CSV data. Leave\
+ \ blank to default to UTF8. See list of python encodings for allowable\
+ \ options."
+ default: "utf8"
+ type: "string"
+ double_quote:
+ title: "Double Quote"
+ description: "Whether two quotes in a quoted CSV value denote\
+ \ a single quote in the data."
+ default: true
+ type: "boolean"
+ null_values:
+ title: "Null Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as null values. For example, if the value 'NA'\
+ \ should be interpreted as null, enter 'NA' in this field."
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ strings_can_be_null:
+ title: "Strings Can Be Null"
+ description: "Whether strings can be interpreted as null values.\
+ \ If true, strings that match the null_values set will be\
+ \ interpreted as null. If false, strings that match the null_values\
+ \ set will be interpreted as the string itself."
+ default: true
+ type: "boolean"
+ skip_rows_before_header:
+ title: "Skip Rows Before Header"
+ description: "The number of rows to skip before the header row.\
+ \ For example, if the header row is on the 3rd row, enter\
+ \ 2 in this field."
+ default: 0
+ type: "integer"
+ skip_rows_after_header:
+ title: "Skip Rows After Header"
+ description: "The number of rows to skip after the header row."
+ default: 0
+ type: "integer"
+ header_definition:
+ title: "CSV Header Definition"
+ description: "How headers will be defined. `User Provided` assumes\
+ \ the CSV does not have a header row and uses the headers\
+ \ provided and `Autogenerated` assumes the CSV does not have\
+ \ a header row and the CDK will generate headers using for\
+ \ `f{i}` where `i` is the index starting from 0. Else, the\
+ \ default behavior is to use the header from the CSV file.\
+ \ If a user wants to autogenerate or provide column names\
+ \ for a CSV having headers, they can skip rows."
+ default:
+ header_definition_type: "From CSV"
+ oneOf:
+ - title: "From CSV"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "From CSV"
+ const: "From CSV"
+ type: "string"
+ enum:
+ - "From CSV"
+ required:
+ - "header_definition_type"
+ - title: "Autogenerated"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "Autogenerated"
+ const: "Autogenerated"
+ type: "string"
+ enum:
+ - "Autogenerated"
+ required:
+ - "header_definition_type"
+ - title: "User Provided"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "User Provided"
+ const: "User Provided"
+ type: "string"
+ enum:
+ - "User Provided"
+ column_names:
+ title: "Column Names"
+ description: "The column names that will be used while\
+ \ emitting the CSV records"
+ type: "array"
+ items:
+ type: "string"
+ required:
+ - "column_names"
+ - "header_definition_type"
+ type: "object"
+ true_values:
+ title: "True Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as true values."
+ default:
+ - "y"
+ - "yes"
+ - "t"
+ - "true"
+ - "on"
+ - "1"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ false_values:
+ title: "False Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as false values."
+ default:
+ - "n"
+ - "no"
+ - "f"
+ - "false"
+ - "off"
+ - "0"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ required:
+ - "filetype"
+ - title: "Jsonl Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "jsonl"
+ const: "jsonl"
+ type: "string"
+ enum:
+ - "jsonl"
+ required:
+ - "filetype"
+ - title: "Parquet Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "parquet"
+ const: "parquet"
+ type: "string"
+ enum:
+ - "parquet"
+ decimal_as_float:
+ title: "Convert Decimal Fields to Floats"
+ description: "Whether to convert decimal fields to floats. There\
+ \ is a loss of precision when converting decimals to floats,\
+ \ so this is not recommended."
+ default: false
+ type: "boolean"
+ required:
+ - "filetype"
+ - title: "Document File Type Format (Experimental)"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "unstructured"
+ const: "unstructured"
+ type: "string"
+ enum:
+ - "unstructured"
+ skip_unprocessable_file_types:
+ title: "Skip Unprocessable File Types"
+ description: "If true, skip files that cannot be parsed because\
+ \ of their file type and log a warning. If false, fail the\
+ \ sync. Corrupted files with valid file types will still result\
+ \ in a failed sync."
+ default: true
+ always_show: true
+ type: "boolean"
+ description: "Extract text from document formats (.pdf, .docx, .md,\
+ \ .pptx) and emit as one record per file."
+ required:
+ - "filetype"
+ schemaless:
+ title: "Schemaless"
+ description: "When enabled, syncs will not validate or structure records\
+ \ against the stream's schema."
+ default: false
+ type: "boolean"
+ required:
+ - "name"
+ - "format"
+ folder_url:
+ title: "Folder Url"
+ description: "URL for the folder you want to sync. Using individual streams\
+ \ and glob patterns, it's possible to only sync a subset of all files\
+ \ located in the folder."
+ examples:
+ - "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn"
+ order: 0
+ pattern: "^https://drive.google.com/.+"
+ pattern_descriptor: "https://drive.google.com/drive/folders/MY-FOLDER-ID"
+ type: "string"
+ credentials:
+ title: "Authentication"
+ description: "Credentials for connecting to the Google Drive API"
+ type: "object"
+ oneOf:
+ - title: "Authenticate via Google (OAuth)"
+ type: "object"
+ properties:
+ auth_type:
+ title: "Auth Type"
+ default: "Client"
+ const: "Client"
+ enum:
+ - "Client"
+ type: "string"
+ client_id:
+ title: "Client ID"
+ description: "Client ID for the Google Drive API"
+ airbyte_secret: true
+ type: "string"
+ client_secret:
+ title: "Client Secret"
+ description: "Client Secret for the Google Drive API"
+ airbyte_secret: true
+ type: "string"
+ refresh_token:
+ title: "Refresh Token"
+ description: "Refresh Token for the Google Drive API"
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "auth_type"
+ - title: "Service Account Key Authentication"
+ type: "object"
+ properties:
+ auth_type:
+ title: "Auth Type"
+ default: "Service"
+ const: "Service"
+ enum:
+ - "Service"
+ type: "string"
+ service_account_info:
+ title: "Service Account Information"
+ description: "The JSON key of the service account to use for authorization.\
+ \ Read more here."
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "service_account_info"
+ - "auth_type"
sourceType:
- title: "mailjet-sms"
- const: "mailjet-sms"
+ title: "google-drive"
+ const: "google-drive"
enum:
- - "mailjet-sms"
+ - "google-drive"
order: 0
type: "string"
- source-mailjet-sms-update:
- title: "Mailjet Sms Spec"
- type: "object"
required:
- - "token"
+ - "streams"
+ - "folder_url"
+ - "credentials"
+ - "sourceType"
+ source-google-drive-update:
+ title: "Google Drive Source Spec"
+ description: "Used during spec; allows the developer to configure the cloud\
+ \ provider specific options\nthat are needed when users configure a file-based\
+ \ source."
+ type: "object"
properties:
- token:
- title: "Access Token"
- type: "string"
- description: "Your access token. See here."
- airbyte_secret: true
start_date:
- title: "Start date"
- type: "integer"
- description: "Retrieve SMS messages created after the specified timestamp.\
- \ Required format - Unix timestamp."
- pattern: "^[0-9]*$"
- examples:
- - 1666261656
- end_date:
- title: "End date"
- type: "integer"
- description: "Retrieve SMS messages created before the specified timestamp.\
- \ Required format - Unix timestamp."
- pattern: "^[0-9]*$"
+ title: "Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\
+ \ Any file modified before this date will not be replicated."
examples:
- - 1666281656
- source-gcs:
- title: "Gcs Spec"
- type: "object"
- required:
- - "gcs_bucket"
- - "gcs_path"
- - "service_account"
- - "sourceType"
- properties:
- gcs_bucket:
- type: "string"
- title: "GCS bucket"
- description: "GCS bucket name"
- gcs_path:
- type: "string"
- title: "GCS Path"
- description: "GCS path to data"
- service_account:
- type: "string"
- title: "Service Account Information."
- description: "Enter your Google Cloud service account key in JSON format"
- airbyte_secret: true
- examples:
- - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID, \"\
- private_key_id\": YOUR_PRIVATE_KEY, ... }"
- sourceType:
- title: "gcs"
- const: "gcs"
- enum:
- - "gcs"
+ - "2021-01-01T00:00:00.000000Z"
+ format: "date-time"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ"
+ order: 1
+ type: "string"
+ streams:
+ title: "The list of streams to sync"
+ description: "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\
+ \ format, and how they should be parsed and validated. When sending data\
+ \ to warehouse destination such as Snowflake or BigQuery, each stream\
+ \ is a separate table."
+ order: 10
+ type: "array"
+ items:
+ title: "FileBasedStreamConfig"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name of the stream."
+ type: "string"
+ globs:
+ title: "Globs"
+ description: "The pattern used to specify which files should be selected\
+ \ from the file system. For more information on glob pattern matching\
+ \ look here."
+ default:
+ - "**"
+ order: 1
+ type: "array"
+ items:
+ type: "string"
+ validation_policy:
+ title: "Validation Policy"
+ description: "The name of the validation policy that dictates sync\
+ \ behavior when a record does not adhere to the stream schema."
+ default: "Emit Record"
+ enum:
+ - "Emit Record"
+ - "Skip Record"
+ - "Wait for Discover"
+ input_schema:
+ title: "Input Schema"
+ description: "The schema that will be used to validate records extracted\
+ \ from the file. This will override the stream schema that is auto-detected\
+ \ from incoming files."
+ type: "string"
+ primary_key:
+ title: "Primary Key"
+ description: "The column or columns (for a composite key) that serves\
+ \ as the unique identifier of a record."
+ type: "string"
+ days_to_sync_if_history_is_full:
+ title: "Days To Sync If History Is Full"
+ description: "When the state history of the file store is full, syncs\
+ \ will only read files that were last modified in the provided day\
+ \ range."
+ default: 3
+ type: "integer"
+ format:
+ title: "Format"
+ description: "The configuration options that are used to alter how\
+ \ to read incoming files that deviate from the standard formatting."
+ type: "object"
+ oneOf:
+ - title: "Avro Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "avro"
+ const: "avro"
+ type: "string"
+ enum:
+ - "avro"
+ double_as_string:
+ title: "Convert Double Fields to Strings"
+ description: "Whether to convert double fields to strings. This\
+ \ is recommended if you have decimal numbers with a high degree\
+ \ of precision because there can be a loss precision when\
+ \ handling floating point numbers."
+ default: false
+ type: "boolean"
+ required:
+ - "filetype"
+ - title: "CSV Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "csv"
+ const: "csv"
+ type: "string"
+ enum:
+ - "csv"
+ delimiter:
+ title: "Delimiter"
+ description: "The character delimiting individual cells in the\
+ \ CSV data. This may only be a 1-character string. For tab-delimited\
+ \ data enter '\\t'."
+ default: ","
+ type: "string"
+ quote_char:
+ title: "Quote Character"
+ description: "The character used for quoting CSV values. To\
+ \ disallow quoting, make this field blank."
+ default: "\""
+ type: "string"
+ escape_char:
+ title: "Escape Character"
+ description: "The character used for escaping special characters.\
+ \ To disallow escaping, leave this field blank."
+ type: "string"
+ encoding:
+ title: "Encoding"
+ description: "The character encoding of the CSV data. Leave\
+ \ blank to default to UTF8. See list of python encodings for allowable\
+ \ options."
+ default: "utf8"
+ type: "string"
+ double_quote:
+ title: "Double Quote"
+ description: "Whether two quotes in a quoted CSV value denote\
+ \ a single quote in the data."
+ default: true
+ type: "boolean"
+ null_values:
+ title: "Null Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as null values. For example, if the value 'NA'\
+ \ should be interpreted as null, enter 'NA' in this field."
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ strings_can_be_null:
+ title: "Strings Can Be Null"
+ description: "Whether strings can be interpreted as null values.\
+ \ If true, strings that match the null_values set will be\
+ \ interpreted as null. If false, strings that match the null_values\
+ \ set will be interpreted as the string itself."
+ default: true
+ type: "boolean"
+ skip_rows_before_header:
+ title: "Skip Rows Before Header"
+ description: "The number of rows to skip before the header row.\
+ \ For example, if the header row is on the 3rd row, enter\
+ \ 2 in this field."
+ default: 0
+ type: "integer"
+ skip_rows_after_header:
+ title: "Skip Rows After Header"
+ description: "The number of rows to skip after the header row."
+ default: 0
+ type: "integer"
+ header_definition:
+ title: "CSV Header Definition"
+ description: "How headers will be defined. `User Provided` assumes\
+ \ the CSV does not have a header row and uses the headers\
+ \ provided and `Autogenerated` assumes the CSV does not have\
+ \ a header row and the CDK will generate headers using for\
+ \ `f{i}` where `i` is the index starting from 0. Else, the\
+ \ default behavior is to use the header from the CSV file.\
+ \ If a user wants to autogenerate or provide column names\
+ \ for a CSV having headers, they can skip rows."
+ default:
+ header_definition_type: "From CSV"
+ oneOf:
+ - title: "From CSV"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "From CSV"
+ const: "From CSV"
+ type: "string"
+ enum:
+ - "From CSV"
+ required:
+ - "header_definition_type"
+ - title: "Autogenerated"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "Autogenerated"
+ const: "Autogenerated"
+ type: "string"
+ enum:
+ - "Autogenerated"
+ required:
+ - "header_definition_type"
+ - title: "User Provided"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "User Provided"
+ const: "User Provided"
+ type: "string"
+ enum:
+ - "User Provided"
+ column_names:
+ title: "Column Names"
+ description: "The column names that will be used while\
+ \ emitting the CSV records"
+ type: "array"
+ items:
+ type: "string"
+ required:
+ - "column_names"
+ - "header_definition_type"
+ type: "object"
+ true_values:
+ title: "True Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as true values."
+ default:
+ - "y"
+ - "yes"
+ - "t"
+ - "true"
+ - "on"
+ - "1"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ false_values:
+ title: "False Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as false values."
+ default:
+ - "n"
+ - "no"
+ - "f"
+ - "false"
+ - "off"
+ - "0"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ required:
+ - "filetype"
+ - title: "Jsonl Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "jsonl"
+ const: "jsonl"
+ type: "string"
+ enum:
+ - "jsonl"
+ required:
+ - "filetype"
+ - title: "Parquet Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "parquet"
+ const: "parquet"
+ type: "string"
+ enum:
+ - "parquet"
+ decimal_as_float:
+ title: "Convert Decimal Fields to Floats"
+ description: "Whether to convert decimal fields to floats. There\
+ \ is a loss of precision when converting decimals to floats,\
+ \ so this is not recommended."
+ default: false
+ type: "boolean"
+ required:
+ - "filetype"
+ - title: "Document File Type Format (Experimental)"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "unstructured"
+ const: "unstructured"
+ type: "string"
+ enum:
+ - "unstructured"
+ skip_unprocessable_file_types:
+ title: "Skip Unprocessable File Types"
+ description: "If true, skip files that cannot be parsed because\
+ \ of their file type and log a warning. If false, fail the\
+ \ sync. Corrupted files with valid file types will still result\
+ \ in a failed sync."
+ default: true
+ always_show: true
+ type: "boolean"
+ description: "Extract text from document formats (.pdf, .docx, .md,\
+ \ .pptx) and emit as one record per file."
+ required:
+ - "filetype"
+ schemaless:
+ title: "Schemaless"
+ description: "When enabled, syncs will not validate or structure records\
+ \ against the stream's schema."
+ default: false
+ type: "boolean"
+ required:
+ - "name"
+ - "format"
+ folder_url:
+ title: "Folder Url"
+ description: "URL for the folder you want to sync. Using individual streams\
+ \ and glob patterns, it's possible to only sync a subset of all files\
+ \ located in the folder."
+ examples:
+ - "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn"
order: 0
+ pattern: "^https://drive.google.com/.+"
+ pattern_descriptor: "https://drive.google.com/drive/folders/MY-FOLDER-ID"
type: "string"
- source-gcs-update:
- title: "Gcs Spec"
- type: "object"
+ credentials:
+ title: "Authentication"
+ description: "Credentials for connecting to the Google Drive API"
+ type: "object"
+ oneOf:
+ - title: "Authenticate via Google (OAuth)"
+ type: "object"
+ properties:
+ auth_type:
+ title: "Auth Type"
+ default: "Client"
+ const: "Client"
+ enum:
+ - "Client"
+ type: "string"
+ client_id:
+ title: "Client ID"
+ description: "Client ID for the Google Drive API"
+ airbyte_secret: true
+ type: "string"
+ client_secret:
+ title: "Client Secret"
+ description: "Client Secret for the Google Drive API"
+ airbyte_secret: true
+ type: "string"
+ refresh_token:
+ title: "Refresh Token"
+ description: "Refresh Token for the Google Drive API"
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "client_id"
+ - "client_secret"
+ - "refresh_token"
+ - "auth_type"
+ - title: "Service Account Key Authentication"
+ type: "object"
+ properties:
+ auth_type:
+ title: "Auth Type"
+ default: "Service"
+ const: "Service"
+ enum:
+ - "Service"
+ type: "string"
+ service_account_info:
+ title: "Service Account Information"
+ description: "The JSON key of the service account to use for authorization.\
+ \ Read more here."
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "service_account_info"
+ - "auth_type"
required:
- - "gcs_bucket"
- - "gcs_path"
- - "service_account"
- properties:
- gcs_bucket:
- type: "string"
- title: "GCS bucket"
- description: "GCS bucket name"
- gcs_path:
- type: "string"
- title: "GCS Path"
- description: "GCS path to data"
- service_account:
- type: "string"
- title: "Service Account Information."
- description: "Enter your Google Cloud service account key in JSON format"
- airbyte_secret: true
- examples:
- - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID, \"\
- private_key_id\": YOUR_PRIVATE_KEY, ... }"
- source-nytimes:
- title: "Nytimes Spec"
+ - "streams"
+ - "folder_url"
+ - "credentials"
+ source-mailjet-sms:
+ title: "Mailjet Sms Spec"
type: "object"
required:
- - "api_key"
- - "start_date"
- - "period"
+ - "token"
- "sourceType"
properties:
- api_key:
+ token:
+ title: "Access Token"
type: "string"
- title: "API Key"
- description: "API Key"
+ description: "Your access token. See here."
airbyte_secret: true
- order: 0
+ x-speakeasy-param-sensitive: true
start_date:
- type: "string"
- title: "Start Date"
- description: "Start date to begin the article retrieval (format YYYY-MM)"
- pattern: "^[0-9]{4}-[0-9]{2}$"
+ title: "Start date"
+ type: "integer"
+ description: "Retrieve SMS messages created after the specified timestamp.\
+ \ Required format - Unix timestamp."
+ pattern: "^[0-9]*$"
examples:
- - "2022-08"
- - "1851-01"
- format: "date"
- order: 1
+ - 1666261656
end_date:
- type: "string"
- title: "End Date"
- description: "End date to stop the article retrieval (format YYYY-MM)"
- pattern: "^[0-9]{4}-[0-9]{2}$"
- examples:
- - "2022-08"
- - "1851-01"
- format: "date"
- order: 2
- period:
+ title: "End date"
type: "integer"
- title: "Period (used for Most Popular streams)"
- description: "Period of time (in days)"
- order: 3
- enum:
- - 1
- - 7
- - 30
- share_type:
- type: "string"
- title: "Share Type (used for Most Popular Shared stream)"
- description: "Share Type"
- order: 4
- enum:
- - "facebook"
+ description: "Retrieve SMS messages created before the specified timestamp.\
+ \ Required format - Unix timestamp."
+ pattern: "^[0-9]*$"
+ examples:
+ - 1666281656
sourceType:
- title: "nytimes"
- const: "nytimes"
+ title: "mailjet-sms"
+ const: "mailjet-sms"
enum:
- - "nytimes"
+ - "mailjet-sms"
order: 0
type: "string"
- source-nytimes-update:
- title: "Nytimes Spec"
+ source-mailjet-sms-update:
+ title: "Mailjet Sms Spec"
type: "object"
required:
- - "api_key"
- - "start_date"
- - "period"
+ - "token"
properties:
- api_key:
+ token:
+ title: "Access Token"
type: "string"
- title: "API Key"
- description: "API Key"
+ description: "Your access token. See here."
airbyte_secret: true
- order: 0
start_date:
- type: "string"
- title: "Start Date"
- description: "Start date to begin the article retrieval (format YYYY-MM)"
- pattern: "^[0-9]{4}-[0-9]{2}$"
+ title: "Start date"
+ type: "integer"
+ description: "Retrieve SMS messages created after the specified timestamp.\
+ \ Required format - Unix timestamp."
+ pattern: "^[0-9]*$"
examples:
- - "2022-08"
- - "1851-01"
- format: "date"
- order: 1
+ - 1666261656
end_date:
- type: "string"
- title: "End Date"
- description: "End date to stop the article retrieval (format YYYY-MM)"
- pattern: "^[0-9]{4}-[0-9]{2}$"
- examples:
- - "2022-08"
- - "1851-01"
- format: "date"
- order: 2
- period:
+ title: "End date"
type: "integer"
- title: "Period (used for Most Popular streams)"
- description: "Period of time (in days)"
- order: 3
- enum:
- - 1
- - 7
- - 30
- share_type:
- type: "string"
- title: "Share Type (used for Most Popular Shared stream)"
- description: "Share Type"
- order: 4
- enum:
- - "facebook"
- source-greenhouse:
- title: "Greenhouse Spec"
+ description: "Retrieve SMS messages created before the specified timestamp.\
+ \ Required format - Unix timestamp."
+ pattern: "^[0-9]*$"
+ examples:
+ - 1666281656
+ source-gcs:
+ title: "Config"
+ description: "NOTE: When this Spec is changed, legacy_config_transformer.py\
+ \ must also be\nmodified to uptake the changes because it is responsible for\
+ \ converting\nlegacy GCS configs into file based configs using the File-Based\
+ \ CDK."
type: "object"
- required:
- - "api_key"
- - "sourceType"
properties:
- api_key:
- title: "API Key"
+ start_date:
+ title: "Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\
+ \ Any file modified before this date will not be replicated."
+ examples:
+ - "2021-01-01T00:00:00.000000Z"
+ format: "date-time"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ"
+ order: 1
type: "string"
- description: "Greenhouse API Key. See the docs for more information on how to generate this key."
- airbyte_secret: true
+ streams:
+ title: "The list of streams to sync"
+ description: "Each instance of this configuration defines a stream.\
+ \ Use this to define which files belong in the stream, their format, and\
+ \ how they should be parsed and validated. When sending data to warehouse\
+ \ destination such as Snowflake or BigQuery, each stream is a separate\
+ \ table."
+ order: 3
+ type: "array"
+ items:
+ title: "SourceGCSStreamConfig"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name of the stream."
+ order: 0
+ type: "string"
+ globs:
+ title: "Globs"
+ description: "The pattern used to specify which files should be selected\
+ \ from the file system. For more information on glob pattern matching\
+ \ look here."
+ order: 1
+ type: "array"
+ items:
+ type: "string"
+ legacy_prefix:
+ title: "Legacy Prefix"
+ description: "The path prefix configured in previous versions of the\
+ \ GCS connector. This option is deprecated in favor of a single\
+ \ glob."
+ airbyte_hidden: true
+ type: "string"
+ validation_policy:
+ title: "Validation Policy"
+ description: "The name of the validation policy that dictates sync\
+ \ behavior when a record does not adhere to the stream schema."
+ default: "Emit Record"
+ enum:
+ - "Emit Record"
+ - "Skip Record"
+ - "Wait for Discover"
+ input_schema:
+ title: "Input Schema"
+ description: "The schema that will be used to validate records extracted\
+ \ from the file. This will override the stream schema that is auto-detected\
+ \ from incoming files."
+ type: "string"
+ primary_key:
+ title: "Primary Key"
+ description: "The column or columns (for a composite key) that serves\
+ \ as the unique identifier of a record."
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ days_to_sync_if_history_is_full:
+ title: "Days To Sync If History Is Full"
+ description: "When the state history of the file store is full, syncs\
+ \ will only read files that were last modified in the provided day\
+ \ range."
+ default: 3
+ type: "integer"
+ format:
+ title: "Format"
+ description: "The configuration options that are used to alter how\
+ \ to read incoming files that deviate from the standard formatting."
+ order: 2
+ type: "object"
+ oneOf:
+ - title: "CSV Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "csv"
+ const: "csv"
+ type: "string"
+ enum:
+ - "csv"
+ delimiter:
+ title: "Delimiter"
+ description: "The character delimiting individual cells in the\
+ \ CSV data. This may only be a 1-character string. For tab-delimited\
+ \ data enter '\\t'."
+ default: ","
+ type: "string"
+ quote_char:
+ title: "Quote Character"
+ description: "The character used for quoting CSV values. To\
+ \ disallow quoting, make this field blank."
+ default: "\""
+ type: "string"
+ escape_char:
+ title: "Escape Character"
+ description: "The character used for escaping special characters.\
+ \ To disallow escaping, leave this field blank."
+ type: "string"
+ encoding:
+ title: "Encoding"
+ description: "The character encoding of the CSV data. Leave\
+ \ blank to default to UTF8. See list of python encodings for allowable\
+ \ options."
+ default: "utf8"
+ type: "string"
+ double_quote:
+ title: "Double Quote"
+ description: "Whether two quotes in a quoted CSV value denote\
+ \ a single quote in the data."
+ default: true
+ type: "boolean"
+ null_values:
+ title: "Null Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as null values. For example, if the value 'NA'\
+ \ should be interpreted as null, enter 'NA' in this field."
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ strings_can_be_null:
+ title: "Strings Can Be Null"
+ description: "Whether strings can be interpreted as null values.\
+ \ If true, strings that match the null_values set will be\
+ \ interpreted as null. If false, strings that match the null_values\
+ \ set will be interpreted as the string itself."
+ default: true
+ type: "boolean"
+ skip_rows_before_header:
+ title: "Skip Rows Before Header"
+ description: "The number of rows to skip before the header row.\
+ \ For example, if the header row is on the 3rd row, enter\
+ \ 2 in this field."
+ default: 0
+ type: "integer"
+ skip_rows_after_header:
+ title: "Skip Rows After Header"
+ description: "The number of rows to skip after the header row."
+ default: 0
+ type: "integer"
+ header_definition:
+ title: "CSV Header Definition"
+ description: "How headers will be defined. `User Provided` assumes\
+ \ the CSV does not have a header row and uses the headers\
+ \ provided and `Autogenerated` assumes the CSV does not have\
+ \ a header row and the CDK will generate headers using for\
+ \ `f{i}` where `i` is the index starting from 0. Else, the\
+ \ default behavior is to use the header from the CSV file.\
+ \ If a user wants to autogenerate or provide column names\
+ \ for a CSV having headers, they can skip rows."
+ default:
+ header_definition_type: "From CSV"
+ oneOf:
+ - title: "From CSV"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "From CSV"
+ const: "From CSV"
+ type: "string"
+ enum:
+ - "From CSV"
+ required:
+ - "header_definition_type"
+ - title: "Autogenerated"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "Autogenerated"
+ const: "Autogenerated"
+ type: "string"
+ enum:
+ - "Autogenerated"
+ required:
+ - "header_definition_type"
+ - title: "User Provided"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "User Provided"
+ const: "User Provided"
+ type: "string"
+ enum:
+ - "User Provided"
+ column_names:
+ title: "Column Names"
+ description: "The column names that will be used while\
+ \ emitting the CSV records"
+ type: "array"
+ items:
+ type: "string"
+ required:
+ - "column_names"
+ - "header_definition_type"
+ type: "object"
+ true_values:
+ title: "True Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as true values."
+ default:
+ - "y"
+ - "yes"
+ - "t"
+ - "true"
+ - "on"
+ - "1"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ false_values:
+ title: "False Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as false values."
+ default:
+ - "n"
+ - "no"
+ - "f"
+ - "false"
+ - "off"
+ - "0"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ inference_type:
+ title: "Inference Type"
+ description: "How to infer the types of the columns. If none,\
+ \ inference default to strings."
+ default: "None"
+ airbyte_hidden: true
+ enum:
+ - "None"
+ - "Primitive Types Only"
+ required:
+ - "filetype"
+ schemaless:
+ title: "Schemaless"
+ description: "When enabled, syncs will not validate or structure records\
+ \ against the stream's schema."
+ default: false
+ type: "boolean"
+ required:
+ - "name"
+ - "format"
+ service_account:
+ title: "Service Account Information"
+ description: "Enter your Google Cloud service account key in JSON format"
+ airbyte_secret: true
order: 0
+ type: "string"
+ bucket:
+ title: "Bucket"
+ description: "Name of the GCS bucket where the file(s) exist."
+ order: 2
+ type: "string"
sourceType:
- title: "greenhouse"
- const: "greenhouse"
+ title: "gcs"
+ const: "gcs"
enum:
- - "greenhouse"
+ - "gcs"
order: 0
type: "string"
- source-greenhouse-update:
- title: "Greenhouse Spec"
- type: "object"
required:
- - "api_key"
- properties:
- api_key:
- title: "API Key"
- type: "string"
- description: "Greenhouse API Key. See the docs for more information on how to generate this key."
- airbyte_secret: true
- order: 0
- trello:
- title: "Trello Spec"
- harvest:
- properties:
- credentials:
- properties:
- client_id:
- title: "Client ID"
- type: "string"
- description: "The Client ID of your Harvest developer application."
- client_secret:
- title: "Client Secret"
- type: "string"
- description: "The Client Secret of your Harvest developer application."
- airbyte_secret: true
- title: "Harvest Spec"
- zendesk-chat:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The Client ID of your OAuth application"
- airbyte_secret: true
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The Client Secret of your OAuth application."
- airbyte_secret: true
- title: "Zendesk Chat Spec"
- google-ads:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- order: 1
- description: "The Client ID of your Google Ads developer application.\
- \ For detailed instructions on finding this value, refer to our documentation."
- client_secret:
- type: "string"
- title: "Client Secret"
- order: 2
- description: "The Client Secret of your Google Ads developer application.\
- \ For detailed instructions on finding this value, refer to our documentation."
- airbyte_secret: true
- developer_token:
- type: "string"
- title: "Developer Token"
- order: 0
- description: "The Developer Token granted by Google to use their APIs.\
- \ For detailed instructions on finding this value, refer to our documentation."
- airbyte_secret: true
- title: "Google Ads Spec"
- google-search-console:
- properties:
- authorization:
- properties:
- client_id:
- title: "Client ID"
- type: "string"
- description: "The client ID of your Google Search Console developer\
- \ application. Read more here."
- airbyte_secret: true
- client_secret:
- title: "Client Secret"
- type: "string"
- description: "The client secret of your Google Search Console developer\
- \ application. Read more here."
- airbyte_secret: true
- title: "Google Search Console Spec"
- strava:
+ - "streams"
+ - "service_account"
+ - "bucket"
+ - "sourceType"
+ source-gcs-update:
+ title: "Config"
+ description: "NOTE: When this Spec is changed, legacy_config_transformer.py\
+ \ must also be\nmodified to uptake the changes because it is responsible for\
+ \ converting\nlegacy GCS configs into file based configs using the File-Based\
+ \ CDK."
+ type: "object"
properties:
- client_id:
- type: "string"
- description: "The Client ID of your Strava developer application."
- title: "Client ID"
- pattern: "^[0-9_\\-]+$"
- examples:
- - "12345"
- order: 0
- client_secret:
- type: "string"
- description: "The Client Secret of your Strava developer application."
- title: "Client Secret"
- pattern: "^[0-9a-fA-F]+$"
+ start_date:
+ title: "Start Date"
+ description: "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\
+ \ Any file modified before this date will not be replicated."
examples:
- - "fc6243f283e51f6ca989aab298b17da125496f50"
- airbyte_secret: true
+ - "2021-01-01T00:00:00.000000Z"
+ format: "date-time"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$"
+ pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ"
order: 1
- title: "Strava Spec"
- shopify:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The Client ID of the Shopify developer application."
- airbyte_secret: true
- order: 1
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The Client Secret of the Shopify developer application."
- airbyte_secret: true
- order: 2
- title: "Shopify Source CDK Specifications"
- retently:
- properties:
- credentials:
- properties:
- client_id:
- title: "Client ID"
- type: "string"
- description: "The Client ID of your Retently developer application."
- client_secret:
- title: "Client Secret"
- type: "string"
- description: "The Client Secret of your Retently developer application."
- airbyte_secret: true
- title: "Retently Api Spec"
- instagram:
- properties:
- client_id:
- title: "Client Id"
- description: "The Client ID for your Oauth application"
- airbyte_secret: true
- airbyte_hidden: true
- type: "string"
- client_secret:
- title: "Client Secret"
- description: "The Client Secret for your Oauth application"
- airbyte_secret: true
- airbyte_hidden: true
- type: "string"
- title: "Source Instagram"
- zendesk-sunshine:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The Client ID of your OAuth application."
- airbyte_secret: true
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The Client Secret of your OAuth application."
- airbyte_secret: true
- title: null
- snapchat-marketing:
- properties:
- client_id:
- title: "Client ID"
- type: "string"
- description: "The Client ID of your Snapchat developer application."
- airbyte_secret: true
- order: 0
- client_secret:
- title: "Client Secret"
type: "string"
- description: "The Client Secret of your Snapchat developer application."
- airbyte_secret: true
- order: 1
- title: "Snapchat Marketing Spec"
- gitlab:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- description: "The API ID of the Gitlab developer application."
- airbyte_secret: true
- client_secret:
- type: "string"
- description: "The API Secret the Gitlab developer application."
- airbyte_secret: true
- title: "Source Gitlab Spec"
- snowflake:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The Client ID of your Snowflake developer application."
- airbyte_secret: true
- order: 1
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The Client Secret of your Snowflake developer application."
- airbyte_secret: true
- order: 2
- title: "Snowflake Source Spec"
- smartsheets:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- description: "The API ID of the SmartSheets developer application."
- airbyte_secret: true
- client_secret:
- type: "string"
- description: "The API Secret the SmartSheets developer application."
- airbyte_secret: true
- title: "Smartsheets Source Spec"
- notion:
- properties:
- credentials:
- properties:
- client_id:
- title: "Client ID"
- type: "string"
- description: "The ClientID of your Notion integration."
- airbyte_secret: true
- client_secret:
- title: "Client Secret"
- type: "string"
- description: "The ClientSecret of your Notion integration."
- airbyte_secret: true
- title: "Notion Source Spec"
- slack:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "Slack client_id. See our docs if you need help finding this id."
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "Slack client_secret. See our docs if you need help finding this secret."
- airbyte_secret: true
- title: "Slack Spec"
- youtube-analytics:
+ streams:
+ title: "The list of streams to sync"
+ description: "Each instance of this configuration defines a stream.\
+ \ Use this to define which files belong in the stream, their format, and\
+ \ how they should be parsed and validated. When sending data to warehouse\
+ \ destination such as Snowflake or BigQuery, each stream is a separate\
+ \ table."
+ order: 3
+ type: "array"
+ items:
+ title: "SourceGCSStreamConfig"
+ type: "object"
+ properties:
+ name:
+ title: "Name"
+ description: "The name of the stream."
+ order: 0
+ type: "string"
+ globs:
+ title: "Globs"
+ description: "The pattern used to specify which files should be selected\
+ \ from the file system. For more information on glob pattern matching\
+ \ look here."
+ order: 1
+ type: "array"
+ items:
+ type: "string"
+ legacy_prefix:
+ title: "Legacy Prefix"
+ description: "The path prefix configured in previous versions of the\
+ \ GCS connector. This option is deprecated in favor of a single\
+ \ glob."
+ airbyte_hidden: true
+ type: "string"
+ validation_policy:
+ title: "Validation Policy"
+ description: "The name of the validation policy that dictates sync\
+ \ behavior when a record does not adhere to the stream schema."
+ default: "Emit Record"
+ enum:
+ - "Emit Record"
+ - "Skip Record"
+ - "Wait for Discover"
+ input_schema:
+ title: "Input Schema"
+ description: "The schema that will be used to validate records extracted\
+ \ from the file. This will override the stream schema that is auto-detected\
+ \ from incoming files."
+ type: "string"
+ primary_key:
+ title: "Primary Key"
+ description: "The column or columns (for a composite key) that serves\
+ \ as the unique identifier of a record."
+ type: "string"
+ days_to_sync_if_history_is_full:
+ title: "Days To Sync If History Is Full"
+ description: "When the state history of the file store is full, syncs\
+ \ will only read files that were last modified in the provided day\
+ \ range."
+ default: 3
+ type: "integer"
+ format:
+ title: "Format"
+ description: "The configuration options that are used to alter how\
+ \ to read incoming files that deviate from the standard formatting."
+ order: 2
+ type: "object"
+ oneOf:
+ - title: "CSV Format"
+ type: "object"
+ properties:
+ filetype:
+ title: "Filetype"
+ default: "csv"
+ const: "csv"
+ type: "string"
+ enum:
+ - "csv"
+ delimiter:
+ title: "Delimiter"
+ description: "The character delimiting individual cells in the\
+ \ CSV data. This may only be a 1-character string. For tab-delimited\
+ \ data enter '\\t'."
+ default: ","
+ type: "string"
+ quote_char:
+ title: "Quote Character"
+ description: "The character used for quoting CSV values. To\
+ \ disallow quoting, make this field blank."
+ default: "\""
+ type: "string"
+ escape_char:
+ title: "Escape Character"
+ description: "The character used for escaping special characters.\
+ \ To disallow escaping, leave this field blank."
+ type: "string"
+ encoding:
+ title: "Encoding"
+ description: "The character encoding of the CSV data. Leave\
+ \ blank to default to UTF8. See list of python encodings for allowable\
+ \ options."
+ default: "utf8"
+ type: "string"
+ double_quote:
+ title: "Double Quote"
+ description: "Whether two quotes in a quoted CSV value denote\
+ \ a single quote in the data."
+ default: true
+ type: "boolean"
+ null_values:
+ title: "Null Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as null values. For example, if the value 'NA'\
+ \ should be interpreted as null, enter 'NA' in this field."
+ default: []
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ strings_can_be_null:
+ title: "Strings Can Be Null"
+ description: "Whether strings can be interpreted as null values.\
+ \ If true, strings that match the null_values set will be\
+ \ interpreted as null. If false, strings that match the null_values\
+ \ set will be interpreted as the string itself."
+ default: true
+ type: "boolean"
+ skip_rows_before_header:
+ title: "Skip Rows Before Header"
+ description: "The number of rows to skip before the header row.\
+ \ For example, if the header row is on the 3rd row, enter\
+ \ 2 in this field."
+ default: 0
+ type: "integer"
+ skip_rows_after_header:
+ title: "Skip Rows After Header"
+ description: "The number of rows to skip after the header row."
+ default: 0
+ type: "integer"
+ header_definition:
+ title: "CSV Header Definition"
+ description: "How headers will be defined. `User Provided` assumes\
+ \ the CSV does not have a header row and uses the headers\
+ \ provided and `Autogenerated` assumes the CSV does not have\
+ \ a header row and the CDK will generate headers using for\
+ \ `f{i}` where `i` is the index starting from 0. Else, the\
+ \ default behavior is to use the header from the CSV file.\
+ \ If a user wants to autogenerate or provide column names\
+ \ for a CSV having headers, they can skip rows."
+ default:
+ header_definition_type: "From CSV"
+ oneOf:
+ - title: "From CSV"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "From CSV"
+ const: "From CSV"
+ type: "string"
+ enum:
+ - "From CSV"
+ required:
+ - "header_definition_type"
+ - title: "Autogenerated"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "Autogenerated"
+ const: "Autogenerated"
+ type: "string"
+ enum:
+ - "Autogenerated"
+ required:
+ - "header_definition_type"
+ - title: "User Provided"
+ type: "object"
+ properties:
+ header_definition_type:
+ title: "Header Definition Type"
+ default: "User Provided"
+ const: "User Provided"
+ type: "string"
+ enum:
+ - "User Provided"
+ column_names:
+ title: "Column Names"
+ description: "The column names that will be used while\
+ \ emitting the CSV records"
+ type: "array"
+ items:
+ type: "string"
+ required:
+ - "column_names"
+ - "header_definition_type"
+ type: "object"
+ true_values:
+ title: "True Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as true values."
+ default:
+ - "y"
+ - "yes"
+ - "t"
+ - "true"
+ - "on"
+ - "1"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ false_values:
+ title: "False Values"
+ description: "A set of case-sensitive strings that should be\
+ \ interpreted as false values."
+ default:
+ - "n"
+ - "no"
+ - "f"
+ - "false"
+ - "off"
+ - "0"
+ type: "array"
+ items:
+ type: "string"
+ uniqueItems: true
+ inference_type:
+ title: "Inference Type"
+ description: "How to infer the types of the columns. If none,\
+ \ inference default to strings."
+ default: "None"
+ airbyte_hidden: true
+ enum:
+ - "None"
+ - "Primitive Types Only"
+ required:
+ - "filetype"
+ schemaless:
+ title: "Schemaless"
+ description: "When enabled, syncs will not validate or structure records\
+ \ against the stream's schema."
+ default: false
+ type: "boolean"
+ required:
+ - "name"
+ - "format"
+ service_account:
+ title: "Service Account Information"
+ description: "Enter your Google Cloud service account key in JSON format"
+ airbyte_secret: true
+ order: 0
+ type: "string"
+ bucket:
+ title: "Bucket"
+ description: "Name of the GCS bucket where the file(s) exist."
+ order: 2
+ type: "string"
+ required:
+ - "streams"
+ - "service_account"
+ - "bucket"
+ source-qualaroo:
+ title: "Qualaroo Spec"
+ type: "object"
+ required:
+ - "token"
+ - "key"
+ - "start_date"
+ - "sourceType"
+ properties:
+ token:
+ type: "string"
+ title: "API token"
+ description: "A Qualaroo token. See the docs for instructions on how to generate it."
+ airbyte_secret: true
+ x-speakeasy-param-sensitive: true
+ key:
+ type: "string"
+ title: "API key"
+ description: "A Qualaroo token. See the docs for instructions on how to generate it."
+ airbyte_secret: true
+ x-speakeasy-param-sensitive: true
+ start_date:
+ type: "string"
+ title: "Start Date"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2021-03-01T00:00:00.000Z"
+ survey_ids:
+ type: "array"
+ items:
+ type: "string"
+ pattern: "^[0-9]{1,8}$"
+ title: "Qualaroo survey IDs"
+ description: "IDs of the surveys from which you'd like to replicate data.\
+ \ If left empty, data from all surveys to which you have access will be\
+ \ replicated."
+ sourceType:
+ title: "qualaroo"
+ const: "qualaroo"
+ enum:
+ - "qualaroo"
+ order: 0
+ type: "string"
+ source-qualaroo-update:
+ title: "Qualaroo Spec"
+ type: "object"
+ required:
+ - "token"
+ - "key"
+ - "start_date"
+ properties:
+ token:
+ type: "string"
+ title: "API token"
+ description: "A Qualaroo token. See the docs for instructions on how to generate it."
+ airbyte_secret: true
+ key:
+ type: "string"
+ title: "API key"
+ description: "A Qualaroo token. See the docs for instructions on how to generate it."
+ airbyte_secret: true
+ start_date:
+ type: "string"
+ title: "Start Date"
+ pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$"
+ description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\
+ \ data before this date will not be replicated."
+ examples:
+ - "2021-03-01T00:00:00.000Z"
+ survey_ids:
+ type: "array"
+ items:
+ type: "string"
+ pattern: "^[0-9]{1,8}$"
+ title: "Qualaroo survey IDs"
+ description: "IDs of the surveys from which you'd like to replicate data.\
+ \ If left empty, data from all surveys to which you have access will be\
+ \ replicated."
+ source-nytimes:
+ title: "Nytimes Spec"
+ type: "object"
+ required:
+ - "api_key"
+ - "start_date"
+ - "period"
+ - "sourceType"
+ properties:
+ api_key:
+ type: "string"
+ title: "API Key"
+ description: "API Key"
+ airbyte_secret: true
+ order: 0
+ x-speakeasy-param-sensitive: true
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "Start date to begin the article retrieval (format YYYY-MM)"
+ pattern: "^[0-9]{4}-[0-9]{2}$"
+ examples:
+ - "2022-08"
+ - "1851-01"
+ format: "date"
+ order: 1
+ end_date:
+ type: "string"
+ title: "End Date"
+ description: "End date to stop the article retrieval (format YYYY-MM)"
+ pattern: "^[0-9]{4}-[0-9]{2}$"
+ examples:
+ - "2022-08"
+ - "1851-01"
+ format: "date"
+ order: 2
+ period:
+ type: "integer"
+ title: "Period (used for Most Popular streams)"
+ description: "Period of time (in days)"
+ order: 3
+ enum:
+ - 1
+ - 7
+ - 30
+ share_type:
+ type: "string"
+ title: "Share Type (used for Most Popular Shared stream)"
+ description: "Share Type"
+ order: 4
+ enum:
+ - "facebook"
+ sourceType:
+ title: "nytimes"
+ const: "nytimes"
+ enum:
+ - "nytimes"
+ order: 0
+ type: "string"
+ source-nytimes-update:
+ title: "Nytimes Spec"
+ type: "object"
+ required:
+ - "api_key"
+ - "start_date"
+ - "period"
+ properties:
+ api_key:
+ type: "string"
+ title: "API Key"
+ description: "API Key"
+ airbyte_secret: true
+ order: 0
+ start_date:
+ type: "string"
+ title: "Start Date"
+ description: "Start date to begin the article retrieval (format YYYY-MM)"
+ pattern: "^[0-9]{4}-[0-9]{2}$"
+ examples:
+ - "2022-08"
+ - "1851-01"
+ format: "date"
+ order: 1
+ end_date:
+ type: "string"
+ title: "End Date"
+ description: "End date to stop the article retrieval (format YYYY-MM)"
+ pattern: "^[0-9]{4}-[0-9]{2}$"
+ examples:
+ - "2022-08"
+ - "1851-01"
+ format: "date"
+ order: 2
+ period:
+ type: "integer"
+ title: "Period (used for Most Popular streams)"
+ description: "Period of time (in days)"
+ order: 3
+ enum:
+ - 1
+ - 7
+ - 30
+ share_type:
+ type: "string"
+ title: "Share Type (used for Most Popular Shared stream)"
+ description: "Share Type"
+ order: 4
+ enum:
+ - "facebook"
+ source-greenhouse:
+ title: "Greenhouse Spec"
+ type: "object"
+ required:
+ - "api_key"
+ - "sourceType"
+ properties:
+ api_key:
+ title: "API Key"
+ type: "string"
+ description: "Greenhouse API Key. See the docs for more information on how to generate this key."
+ airbyte_secret: true
+ order: 0
+ x-speakeasy-param-sensitive: true
+ sourceType:
+ title: "greenhouse"
+ const: "greenhouse"
+ enum:
+ - "greenhouse"
+ order: 0
+ type: "string"
+ source-greenhouse-update:
+ title: "Greenhouse Spec"
+ type: "object"
+ required:
+ - "api_key"
+ properties:
+ api_key:
+ title: "API Key"
+ type: "string"
+ description: "Greenhouse API Key. See the docs for more information on how to generate this key."
+ airbyte_secret: true
+ order: 0
+ trello:
+ title: null
+ harvest:
properties:
credentials:
properties:
client_id:
title: "Client ID"
type: "string"
- description: "The Client ID of your developer application"
- airbyte_secret: true
+ description: "The Client ID of your Harvest developer application."
client_secret:
title: "Client Secret"
type: "string"
- description: "The client secret of your developer application"
+ description: "The Client Secret of your Harvest developer application."
airbyte_secret: true
- title: "YouTube Analytics Spec"
- google-sheets:
+ title: "Harvest Spec"
+ zendesk-chat:
properties:
credentials:
properties:
client_id:
- title: "Client ID"
type: "string"
- description: "Enter your Google application's Client ID. See Google's\
- \ documentation for more information."
+ title: "Client ID"
+ description: "The Client ID of your OAuth application"
airbyte_secret: true
client_secret:
- title: "Client Secret"
type: "string"
- description: "Enter your Google application's Client Secret. See Google's\
- \ documentation for more information."
+ title: "Client Secret"
+ description: "The Client Secret of your OAuth application."
airbyte_secret: true
- title: "Google Sheets Source Spec"
- zendesk-talk:
- properties:
+ title: "Zendesk Chat Spec"
+ google-ads:
+ properties:
credentials:
properties:
client_id:
type: "string"
title: "Client ID"
- description: "Client ID"
+ order: 1
+ description: "The Client ID of your Google Ads developer application.\
+ \ For detailed instructions on finding this value, refer to our documentation."
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ order: 2
+ description: "The Client Secret of your Google Ads developer application.\
+ \ For detailed instructions on finding this value, refer to our documentation."
+ airbyte_secret: true
+ developer_token:
+ type: "string"
+ title: "Developer Token"
+ order: 0
+ description: "The Developer Token granted by Google to use their APIs.\
+ \ For detailed instructions on finding this value, refer to our documentation."
+ airbyte_secret: true
+ title: "Google Ads Spec"
+ google-search-console:
+ properties:
+ authorization:
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The client ID of your Google Search Console developer\
+ \ application. Read more here."
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The client secret of your Google Search Console developer\
+ \ application. Read more here."
+ airbyte_secret: true
+ title: "Google Search Console Spec"
+ strava:
+ properties:
+ client_id:
+ type: "string"
+ description: "The Client ID of your Strava developer application."
+ title: "Client ID"
+ pattern: "^[0-9_\\-]+$"
+ examples:
+ - "12345"
+ order: 0
+ client_secret:
+ type: "string"
+ description: "The Client Secret of your Strava developer application."
+ title: "Client Secret"
+ pattern: "^[0-9a-fA-F]+$"
+ examples:
+ - "fc6243f283e51f6ca989aab298b17da125496f50"
+ airbyte_secret: true
+ order: 1
+ title: "Strava Spec"
+ shopify:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The Client ID of the Shopify developer application."
airbyte_secret: true
+ order: 1
client_secret:
type: "string"
title: "Client Secret"
- description: "Client Secret"
+ description: "The Client Secret of the Shopify developer application."
airbyte_secret: true
- title: "Source Zendesk Talk Spec"
- google-analytics-v4:
+ order: 2
+ title: "Shopify Source CDK Specifications"
+ retently:
properties:
credentials:
properties:
client_id:
title: "Client ID"
type: "string"
- description: "The Client ID of your Google Analytics developer application."
+ description: "The Client ID of your Retently developer application."
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The Client Secret of your Retently developer application."
+ airbyte_secret: true
+ title: "Retently Api Spec"
+ instagram:
+ properties:
+ client_id:
+ title: "Client Id"
+ description: "The Client ID for your Oauth application"
+ airbyte_secret: true
+ airbyte_hidden: true
+ type: "string"
+ client_secret:
+ title: "Client Secret"
+ description: "The Client Secret for your Oauth application"
+ airbyte_secret: true
+ airbyte_hidden: true
+ type: "string"
+ title: "Source Instagram"
+ zendesk-sunshine:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The Client ID of your OAuth application."
airbyte_secret: true
- order: 1
client_secret:
+ type: "string"
title: "Client Secret"
+ description: "The Client Secret of your OAuth application."
+ airbyte_secret: true
+ title: null
+ snapchat-marketing:
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your Snapchat developer application."
+ airbyte_secret: true
+ order: 0
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The Client Secret of your Snapchat developer application."
+ airbyte_secret: true
+ order: 1
+ title: "Snapchat Marketing Spec"
+ gitlab:
+ properties:
+ credentials:
+ properties:
+ client_id:
type: "string"
- description: "The Client Secret of your Google Analytics developer application."
+ description: "The API ID of the Gitlab developer application."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ description: "The API Secret the Gitlab developer application."
+ airbyte_secret: true
+ title: "Source Gitlab Spec"
+ snowflake:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The Client ID of your Snowflake developer application."
+ airbyte_secret: true
+ order: 1
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "The Client Secret of your Snowflake developer application."
airbyte_secret: true
order: 2
- title: "Google Analytics (V4) Spec"
+ title: "Snowflake Source Spec"
+ smartsheets:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ description: "The API ID of the SmartSheets developer application."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ description: "The API Secret the SmartSheets developer application."
+ airbyte_secret: true
+ title: "Smartsheets Source Spec"
+ notion:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your Notion integration. See our docs\
+ \ for more information."
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The Client Secret of your Notion integration. See our\
+ \ docs\
+ \ for more information."
+ airbyte_secret: true
+ title: "Notion Source Spec"
+ slack:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "Slack client_id. See our docs if you need help finding this id."
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "Slack client_secret. See our docs if you need help finding this secret."
+ airbyte_secret: true
+ title: "Slack Spec"
+ youtube-analytics:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your developer application"
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The client secret of your developer application"
+ airbyte_secret: true
+ title: "YouTube Analytics Spec"
+ google-sheets:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "Enter your Google application's Client ID. See Google's\
+ \ documentation for more information."
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "Enter your Google application's Client Secret. See Google's\
+ \ documentation for more information."
+ airbyte_secret: true
+ title: "Google Sheets Source Spec"
+ zendesk-talk:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "Client ID"
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "Client Secret"
+ airbyte_secret: true
+ title: "Source Zendesk Talk Spec"
asana:
properties:
credentials:
@@ -48798,1135 +57113,1880 @@ components:
title: "Microsoft Teams Spec"
amazon-seller-partner:
properties:
- lwa_app_id:
- title: "LWA Client Id"
- description: "Your Login with Amazon Client ID."
- order: 6
- airbyte_secret: true
+ lwa_app_id:
+ title: "LWA Client Id"
+ description: "Your Login with Amazon Client ID."
+ order: 4
+ airbyte_secret: true
+ type: "string"
+ lwa_client_secret:
+ title: "LWA Client Secret"
+ description: "Your Login with Amazon Client Secret."
+ airbyte_secret: true
+ order: 5
+ type: "string"
+ title: "Amazon Seller Partner Spec"
+ linkedin-ads:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The client ID of your developer application. Refer to\
+ \ our documentation\
+ \ for more information."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "The client secret of your developer application. Refer\
+ \ to our documentation\
+ \ for more information."
+ airbyte_secret: true
+ title: "Linkedin Ads Spec"
+ pinterest:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The Client ID of your OAuth application"
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "The Client Secret of your OAuth application."
+ airbyte_secret: true
+ title: "Pinterest Spec"
+ zendesk-support:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The OAuth client's ID. See this guide for more information."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "The OAuth client secret. See this guide for more information."
+ airbyte_secret: true
+ title: "Source Zendesk Support Spec"
+ tiktok-marketing:
+ properties:
+ credentials:
+ properties:
+ app_id:
+ title: "App ID"
+ description: "The Developer Application App ID."
+ airbyte_secret: true
+ type: "string"
+ secret:
+ title: "Secret"
+ description: "The Developer Application Secret."
+ airbyte_secret: true
+ type: "string"
+ title: "TikTok Marketing Source Spec"
+ hubspot:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ title: "Client ID"
+ description: "The Client ID of your HubSpot developer application. See\
+ \ the Hubspot docs if you need help finding this ID."
+ type: "string"
+ examples:
+ - "123456789000"
+ client_secret:
+ title: "Client Secret"
+ description: "The client secret for your HubSpot developer application.\
+ \ See the Hubspot docs if you need help finding this secret."
+ type: "string"
+ examples:
+ - "secret"
+ airbyte_secret: true
+ title: "HubSpot Source Spec"
+ google-analytics-data-api:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your Google Analytics developer application."
+ order: 1
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The Client Secret of your Google Analytics developer application."
+ airbyte_secret: true
+ order: 2
+ title: "Google Analytics (Data API) Spec"
+ intercom:
+ properties:
+ client_id:
+ title: "Client Id"
+ type: "string"
+ description: "Client Id for your Intercom application."
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "Client Secret for your Intercom application."
+ airbyte_secret: true
+ title: "Source Intercom Spec"
+ typeform:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ description: "The Client ID of the Typeform developer application."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ description: "The Client Secret the Typeform developer application."
+ airbyte_secret: true
+ title: null
+ facebook-marketing:
+ properties:
+ client_id:
+ title: "Client Id"
+ description: "The Client Id for your OAuth app"
+ airbyte_secret: true
+ airbyte_hidden: true
+ type: "string"
+ client_secret:
+ title: "Client Secret"
+ description: "The Client Secret for your OAuth app"
+ airbyte_secret: true
+ airbyte_hidden: true
+ type: "string"
+ title: "Source Facebook Marketing"
+ facebook-pages:
+ title: "Facebook Pages Spec"
+ surveymonkey:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The Client ID of the SurveyMonkey developer application."
+ airbyte_secret: true
+ order: 1
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "The Client Secret of the SurveyMonkey developer application."
+ airbyte_secret: true
+ order: 2
+ title: "SurveyMonkey Spec"
+ bing-ads:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The Client ID of your Microsoft Advertising developer application."
+ airbyte_secret: true
+ order: 1
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "The Client Secret of your Microsoft Advertising developer\
+ \ application."
+ default: ""
+ airbyte_secret: true
+ order: 2
+ title: "Bing Ads Spec"
+ monday:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The Client ID of your OAuth application."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "The Client Secret of your OAuth application."
+ airbyte_secret: true
+ title: "Monday Spec"
+ amazon-ads:
+ properties:
+ client_id:
+ title: "Client ID"
+ description: "The client ID of your Amazon Ads developer application. See\
+ \ the docs for more information."
+ order: 1
+ type: "string"
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ description: "The client secret of your Amazon Ads developer application.\
+ \ See the docs for more information."
+ airbyte_secret: true
+ order: 2
+ type: "string"
+ title: "Amazon Ads Spec"
+ github:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client Id"
+ description: "OAuth Client Id"
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: "Client ssecret"
+ description: "OAuth Client secret"
+ airbyte_secret: true
+ title: "GitHub Source Spec"
+ square:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The Square-issued ID of your application"
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: "Client Secret"
+ description: "The Square-issued application secret for your application"
+ airbyte_secret: true
+ title: "Square Spec"
+ mailchimp:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your OAuth application."
+ airbyte_secret: true
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The Client Secret of your OAuth application."
+ airbyte_secret: true
+ title: "Mailchimp Spec"
+ airtable:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ type: "string"
+ title: "Client ID"
+ description: "The client ID of the Airtable developer application."
+ airbyte_secret: true
+ client_secret:
+ type: "string"
+ title: "Client secret"
+ description: "The client secret the Airtable developer application."
+ airbyte_secret: true
+ title: "Airtable Source Spec"
+ salesforce:
+ properties:
+ client_id:
+ title: "Client ID"
+ description: "Enter your Salesforce developer application's Client ID"
+ type: "string"
+ order: 2
+ client_secret:
+ title: "Client Secret"
+ description: "Enter your Salesforce developer application's Client secret"
+ type: "string"
+ airbyte_secret: true
+ order: 3
+ title: "Salesforce Source Spec"
+ lever-hiring:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ title: "Client ID"
+ type: "string"
+ description: "The Client ID of your Lever Hiring developer application."
+ client_secret:
+ title: "Client Secret"
+ type: "string"
+ description: "The Client Secret of your Lever Hiring developer application."
+ airbyte_secret: true
+ title: "Lever Hiring Source Spec"
+ google-drive:
+ properties:
+ credentials:
+ properties:
+ client_id:
+ title: "Client ID"
+ description: "Client ID for the Google Drive API"
+ airbyte_secret: true
+ type: "string"
+ client_secret:
+ title: "Client Secret"
+ description: "Client Secret for the Google Drive API"
+ airbyte_secret: true
+ type: "string"
+ title: "Google Drive Source Spec"
+ destination-gcs:
+ title: "GCS Destination Spec"
+ type: "object"
+ required:
+ - "gcs_bucket_name"
+ - "gcs_bucket_path"
+ - "credential"
+ - "format"
+ - "destinationType"
+ properties:
+ gcs_bucket_name:
+ title: "GCS Bucket Name"
+ order: 1
+ type: "string"
+ description: "You can find the bucket name in the App Engine Admin console\
+ \ Application Settings page, under the label Google Cloud Storage Bucket.\
+ \ Read more here."
+ examples:
+ - "airbyte_sync"
+ gcs_bucket_path:
+ title: "GCS Bucket Path"
+ description: "GCS Bucket Path string Subdirectory under the above bucket\
+ \ to sync the data into."
+ order: 2
+ type: "string"
+ examples:
+ - "data_sync/test"
+ gcs_bucket_region:
+ title: "GCS Bucket Region"
+ type: "string"
+ order: 3
+ default: "us"
+ description: "Select a Region of the GCS Bucket. Read more here."
+ enum:
+ - "northamerica-northeast1"
+ - "northamerica-northeast2"
+ - "us-central1"
+ - "us-east1"
+ - "us-east4"
+ - "us-west1"
+ - "us-west2"
+ - "us-west3"
+ - "us-west4"
+ - "southamerica-east1"
+ - "southamerica-west1"
+ - "europe-central2"
+ - "europe-north1"
+ - "europe-west1"
+ - "europe-west2"
+ - "europe-west3"
+ - "europe-west4"
+ - "europe-west6"
+ - "asia-east1"
+ - "asia-east2"
+ - "asia-northeast1"
+ - "asia-northeast2"
+ - "asia-northeast3"
+ - "asia-south1"
+ - "asia-south2"
+ - "asia-southeast1"
+ - "asia-southeast2"
+ - "australia-southeast1"
+ - "australia-southeast2"
+ - "asia"
+ - "eu"
+ - "us"
+ - "asia1"
+ - "eur4"
+ - "nam4"
+ credential:
+ title: "Authentication"
+ description: "An HMAC key is a type of credential and can be associated\
+ \ with a service account or a user account in Cloud Storage. Read more\
+ \ here."
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "HMAC Key"
+ required:
+ - "credential_type"
+ - "hmac_key_access_id"
+ - "hmac_key_secret"
+ properties:
+ credential_type:
+ type: "string"
+ enum:
+ - "HMAC_KEY"
+ default: "HMAC_KEY"
+ hmac_key_access_id:
+ type: "string"
+ description: "When linked to a service account, this ID is 61 characters\
+ \ long; when linked to a user account, it is 24 characters long.\
+ \ Read more here."
+ title: "Access ID"
+ airbyte_secret: true
+ order: 0
+ examples:
+ - "1234567890abcdefghij1234"
+ x-speakeasy-param-sensitive: true
+ hmac_key_secret:
+ type: "string"
+ description: "The corresponding secret for the access ID. It is a\
+ \ 40-character base-64 encoded string. Read more here."
+ title: "Secret"
+ airbyte_secret: true
+ order: 1
+ examples:
+ - "1234567890abcdefghij1234567890ABCDEFGHIJ"
+ x-speakeasy-param-sensitive: true
+ format:
+ title: "Output Format"
+ type: "object"
+ description: "Output data format. One of the following formats must be selected\
+ \ - AVRO format, PARQUET format, CSV format, or JSONL format."
+ order: 4
+ oneOf:
+ - title: "Avro: Apache Avro"
+ required:
+ - "format_type"
+ - "compression_codec"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Avro"
+ default: "Avro"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data. Default\
+ \ to no compression."
+ type: "object"
+ oneOf:
+ - title: "No Compression"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "no compression"
+ default: "no compression"
+ - title: "Deflate"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "Deflate"
+ default: "Deflate"
+ compression_level:
+ title: "Deflate level"
+ description: "0: no compression & fastest, 9: best compression\
+ \ & slowest."
+ type: "integer"
+ default: 0
+ minimum: 0
+ maximum: 9
+ - title: "bzip2"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "bzip2"
+ default: "bzip2"
+ - title: "xz"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "xz"
+ default: "xz"
+ compression_level:
+ title: "Compression Level"
+ description: "The presets 0-3 are fast presets with medium compression.\
+ \ The presets 4-6 are fairly slow presets with high compression.\
+ \ The default preset is 6. The presets 7-9 are like the preset\
+ \ 6 but use bigger dictionaries and have higher compressor\
+ \ and decompressor memory requirements. Unless the uncompressed\
+ \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\
+ \ waste of memory to use the presets 7, 8, or 9, respectively.\
+ \ Read more here for details."
+ type: "integer"
+ default: 6
+ minimum: 0
+ maximum: 9
+ - title: "zstandard"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "zstandard"
+ default: "zstandard"
+ compression_level:
+ title: "Compression Level"
+ description: "Negative levels are 'fast' modes akin to lz4 or\
+ \ snappy, levels above 9 are generally for archival purposes,\
+ \ and levels above 18 use a lot of memory."
+ type: "integer"
+ default: 3
+ minimum: -5
+ maximum: 22
+ include_checksum:
+ title: "Include Checksum"
+ description: "If true, include a checksum with each data block."
+ type: "boolean"
+ default: false
+ - title: "snappy"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "snappy"
+ default: "snappy"
+ - title: "CSV: Comma-Separated Values"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "CSV"
+ default: "CSV"
+ flattening:
+ type: "string"
+ title: "Normalization"
+ description: "Whether the input JSON data should be normalized (flattened)\
+ \ in the output CSV. Please refer to docs for details."
+ default: "No flattening"
+ enum:
+ - "No flattening"
+ - "Root level flattening"
+ compression:
+ title: "Compression"
+ type: "object"
+ description: "Whether the output files should be compressed. If compression\
+ \ is selected, the output filename will have an extra extension\
+ \ (GZIP: \".csv.gz\")."
+ oneOf:
+ - title: "No Compression"
+ requires:
+ - "compression_type"
+ properties:
+ compression_type:
+ type: "string"
+ enum:
+ - "No Compression"
+ default: "No Compression"
+ - title: "GZIP"
+ requires:
+ - "compression_type"
+ properties:
+ compression_type:
+ type: "string"
+ enum:
+ - "GZIP"
+ default: "GZIP"
+ - title: "JSON Lines: newline-delimited JSON"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "JSONL"
+ default: "JSONL"
+ compression:
+ title: "Compression"
+ type: "object"
+ description: "Whether the output files should be compressed. If compression\
+ \ is selected, the output filename will have an extra extension\
+ \ (GZIP: \".jsonl.gz\")."
+ oneOf:
+ - title: "No Compression"
+ requires: "compression_type"
+ properties:
+ compression_type:
+ type: "string"
+ enum:
+ - "No Compression"
+ default: "No Compression"
+ - title: "GZIP"
+ requires: "compression_type"
+ properties:
+ compression_type:
+ type: "string"
+ enum:
+ - "GZIP"
+ default: "GZIP"
+ - title: "Parquet: Columnar Storage"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Parquet"
+ default: "Parquet"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data pages."
+ type: "string"
+ default: "UNCOMPRESSED"
+ enum:
+ - "UNCOMPRESSED"
+ - "SNAPPY"
+ - "GZIP"
+ - "LZO"
+ - "BROTLI"
+ - "LZ4"
+ - "ZSTD"
+ block_size_mb:
+ title: "Block Size (Row Group Size) (MB)"
+ description: "This is the size of a row group being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will improve\
+ \ the IO when reading, but consume more memory when writing. Default:\
+ \ 128 MB."
+ type: "integer"
+ default: 128
+ examples:
+ - 128
+ max_padding_size_mb:
+ title: "Max Padding Size (MB)"
+ description: "Maximum size allowed as padding to align row groups.\
+ \ This is also the minimum size of a row group. Default: 8 MB."
+ type: "integer"
+ default: 8
+ examples:
+ - 8
+ page_size_kb:
+ title: "Page Size (KB)"
+ description: "The page size is for compression. A block is composed\
+ \ of pages. A page is the smallest unit that must be read fully\
+ \ to access a single record. If this value is too small, the compression\
+ \ will deteriorate. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_page_size_kb:
+ title: "Dictionary Page Size (KB)"
+ description: "There is one dictionary page per column per row group\
+ \ when dictionary encoding is used. The dictionary page size works\
+ \ like the page size but for dictionary. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_encoding:
+ title: "Dictionary Encoding"
+ description: "Default: true."
+ type: "boolean"
+ default: true
+ destinationType:
+ title: "gcs"
+ const: "gcs"
+ enum:
+ - "gcs"
+ order: 0
+ type: "string"
+ destination-gcs-update:
+ title: "GCS Destination Spec"
+ type: "object"
+ required:
+ - "gcs_bucket_name"
+ - "gcs_bucket_path"
+ - "credential"
+ - "format"
+ properties:
+ gcs_bucket_name:
+ title: "GCS Bucket Name"
+ order: 1
+ type: "string"
+ description: "You can find the bucket name in the App Engine Admin console\
+ \ Application Settings page, under the label Google Cloud Storage Bucket.\
+ \ Read more here."
+ examples:
+ - "airbyte_sync"
+ gcs_bucket_path:
+ title: "GCS Bucket Path"
+ description: "GCS Bucket Path string Subdirectory under the above bucket\
+ \ to sync the data into."
+ order: 2
+ type: "string"
+ examples:
+ - "data_sync/test"
+ gcs_bucket_region:
+ title: "GCS Bucket Region"
+ type: "string"
+ order: 3
+ default: "us"
+ description: "Select a Region of the GCS Bucket. Read more here."
+ enum:
+ - "northamerica-northeast1"
+ - "northamerica-northeast2"
+ - "us-central1"
+ - "us-east1"
+ - "us-east4"
+ - "us-west1"
+ - "us-west2"
+ - "us-west3"
+ - "us-west4"
+ - "southamerica-east1"
+ - "southamerica-west1"
+ - "europe-central2"
+ - "europe-north1"
+ - "europe-west1"
+ - "europe-west2"
+ - "europe-west3"
+ - "europe-west4"
+ - "europe-west6"
+ - "asia-east1"
+ - "asia-east2"
+ - "asia-northeast1"
+ - "asia-northeast2"
+ - "asia-northeast3"
+ - "asia-south1"
+ - "asia-south2"
+ - "asia-southeast1"
+ - "asia-southeast2"
+ - "australia-southeast1"
+ - "australia-southeast2"
+ - "asia"
+ - "eu"
+ - "us"
+ - "asia1"
+ - "eur4"
+ - "nam4"
+ credential:
+ title: "Authentication"
+ description: "An HMAC key is a type of credential and can be associated\
+ \ with a service account or a user account in Cloud Storage. Read more\
+ \ here."
+ type: "object"
+ order: 0
+ oneOf:
+ - title: "HMAC Key"
+ required:
+ - "credential_type"
+ - "hmac_key_access_id"
+ - "hmac_key_secret"
+ properties:
+ credential_type:
+ type: "string"
+ enum:
+ - "HMAC_KEY"
+ default: "HMAC_KEY"
+ hmac_key_access_id:
+ type: "string"
+ description: "When linked to a service account, this ID is 61 characters\
+ \ long; when linked to a user account, it is 24 characters long.\
+ \ Read more here."
+ title: "Access ID"
+ airbyte_secret: true
+ order: 0
+ examples:
+ - "1234567890abcdefghij1234"
+ hmac_key_secret:
+ type: "string"
+ description: "The corresponding secret for the access ID. It is a\
+ \ 40-character base-64 encoded string. Read more here."
+ title: "Secret"
+ airbyte_secret: true
+ order: 1
+ examples:
+ - "1234567890abcdefghij1234567890ABCDEFGHIJ"
+ format:
+ title: "Output Format"
+ type: "object"
+ description: "Output data format. One of the following formats must be selected\
+ \ - AVRO format, PARQUET format, CSV format, or JSONL format."
+ order: 4
+ oneOf:
+ - title: "Avro: Apache Avro"
+ required:
+ - "format_type"
+ - "compression_codec"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Avro"
+ default: "Avro"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data. Default\
+ \ to no compression."
+ type: "object"
+ oneOf:
+ - title: "No Compression"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "no compression"
+ default: "no compression"
+ - title: "Deflate"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "Deflate"
+ default: "Deflate"
+ compression_level:
+ title: "Deflate level"
+ description: "0: no compression & fastest, 9: best compression\
+ \ & slowest."
+ type: "integer"
+ default: 0
+ minimum: 0
+ maximum: 9
+ - title: "bzip2"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "bzip2"
+ default: "bzip2"
+ - title: "xz"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "xz"
+ default: "xz"
+ compression_level:
+ title: "Compression Level"
+ description: "The presets 0-3 are fast presets with medium compression.\
+ \ The presets 4-6 are fairly slow presets with high compression.\
+ \ The default preset is 6. The presets 7-9 are like the preset\
+ \ 6 but use bigger dictionaries and have higher compressor\
+ \ and decompressor memory requirements. Unless the uncompressed\
+ \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\
+ \ waste of memory to use the presets 7, 8, or 9, respectively.\
+ \ Read more here for details."
+ type: "integer"
+ default: 6
+ minimum: 0
+ maximum: 9
+ - title: "zstandard"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "zstandard"
+ default: "zstandard"
+ compression_level:
+ title: "Compression Level"
+ description: "Negative levels are 'fast' modes akin to lz4 or\
+ \ snappy, levels above 9 are generally for archival purposes,\
+ \ and levels above 18 use a lot of memory."
+ type: "integer"
+ default: 3
+ minimum: -5
+ maximum: 22
+ include_checksum:
+ title: "Include Checksum"
+ description: "If true, include a checksum with each data block."
+ type: "boolean"
+ default: false
+ - title: "snappy"
+ required:
+ - "codec"
+ properties:
+ codec:
+ type: "string"
+ enum:
+ - "snappy"
+ default: "snappy"
+ - title: "CSV: Comma-Separated Values"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "CSV"
+ default: "CSV"
+ flattening:
+ type: "string"
+ title: "Normalization"
+ description: "Whether the input JSON data should be normalized (flattened)\
+ \ in the output CSV. Please refer to docs for details."
+ default: "No flattening"
+ enum:
+ - "No flattening"
+ - "Root level flattening"
+ compression:
+ title: "Compression"
+ type: "object"
+ description: "Whether the output files should be compressed. If compression\
+ \ is selected, the output filename will have an extra extension\
+ \ (GZIP: \".csv.gz\")."
+ oneOf:
+ - title: "No Compression"
+ requires:
+ - "compression_type"
+ properties:
+ compression_type:
+ type: "string"
+ enum:
+ - "No Compression"
+ default: "No Compression"
+ - title: "GZIP"
+ requires:
+ - "compression_type"
+ properties:
+ compression_type:
+ type: "string"
+ enum:
+ - "GZIP"
+ default: "GZIP"
+ - title: "JSON Lines: newline-delimited JSON"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "JSONL"
+ default: "JSONL"
+ compression:
+ title: "Compression"
+ type: "object"
+ description: "Whether the output files should be compressed. If compression\
+ \ is selected, the output filename will have an extra extension\
+ \ (GZIP: \".jsonl.gz\")."
+ oneOf:
+ - title: "No Compression"
+ requires: "compression_type"
+ properties:
+ compression_type:
+ type: "string"
+ enum:
+ - "No Compression"
+ default: "No Compression"
+ - title: "GZIP"
+ requires: "compression_type"
+ properties:
+ compression_type:
+ type: "string"
+ enum:
+ - "GZIP"
+ default: "GZIP"
+ - title: "Parquet: Columnar Storage"
+ required:
+ - "format_type"
+ properties:
+ format_type:
+ type: "string"
+ enum:
+ - "Parquet"
+ default: "Parquet"
+ compression_codec:
+ title: "Compression Codec"
+ description: "The compression algorithm used to compress data pages."
+ type: "string"
+ default: "UNCOMPRESSED"
+ enum:
+ - "UNCOMPRESSED"
+ - "SNAPPY"
+ - "GZIP"
+ - "LZO"
+ - "BROTLI"
+ - "LZ4"
+ - "ZSTD"
+ block_size_mb:
+ title: "Block Size (Row Group Size) (MB)"
+ description: "This is the size of a row group being buffered in memory.\
+ \ It limits the memory usage when writing. Larger values will improve\
+ \ the IO when reading, but consume more memory when writing. Default:\
+ \ 128 MB."
+ type: "integer"
+ default: 128
+ examples:
+ - 128
+ max_padding_size_mb:
+ title: "Max Padding Size (MB)"
+ description: "Maximum size allowed as padding to align row groups.\
+ \ This is also the minimum size of a row group. Default: 8 MB."
+ type: "integer"
+ default: 8
+ examples:
+ - 8
+ page_size_kb:
+ title: "Page Size (KB)"
+ description: "The page size is for compression. A block is composed\
+ \ of pages. A page is the smallest unit that must be read fully\
+ \ to access a single record. If this value is too small, the compression\
+ \ will deteriorate. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_page_size_kb:
+ title: "Dictionary Page Size (KB)"
+ description: "There is one dictionary page per column per row group\
+ \ when dictionary encoding is used. The dictionary page size works\
+ \ like the page size but for dictionary. Default: 1024 KB."
+ type: "integer"
+ default: 1024
+ examples:
+ - 1024
+ dictionary_encoding:
+ title: "Dictionary Encoding"
+ description: "Default: true."
+ type: "boolean"
+ default: true
+ destination-xata:
+ title: "Destination Xata"
+ type: "object"
+ required:
+ - "api_key"
+ - "db_url"
+ - "destinationType"
+ properties:
+ api_key:
+ title: "API Key"
+ description: "API Key to connect."
type: "string"
- lwa_client_secret:
- title: "LWA Client Secret"
- description: "Your Login with Amazon Client Secret."
+ order: 0
airbyte_secret: true
- order: 7
- type: "string"
- title: "Amazon Seller Partner Spec"
- linkedin-ads:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The client ID of your developer application. Refer to\
- \ our documentation\
- \ for more information."
- airbyte_secret: true
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The client secret of your developer application. Refer\
- \ to our documentation\
- \ for more information."
- airbyte_secret: true
- title: "Linkedin Ads Spec"
- pinterest:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The Client ID of your OAuth application"
- airbyte_secret: true
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The Client Secret of your OAuth application."
- airbyte_secret: true
- title: "Pinterest Spec"
- zendesk-support:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The OAuth client's ID. See this guide for more information."
- airbyte_secret: true
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The OAuth client secret. See this guide for more information."
- airbyte_secret: true
- title: "Source Zendesk Support Spec"
- xero:
- properties:
- authentication:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "Enter your Xero application's Client ID"
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "Enter your Xero application's Client Secret"
- airbyte_secret: true
- title: "Xero Spec"
- tiktok-marketing:
- properties:
- credentials:
- properties:
- app_id:
- title: "App ID"
- description: "The Developer Application App ID."
- airbyte_secret: true
- type: "string"
- secret:
- title: "Secret"
- description: "The Developer Application Secret."
- airbyte_secret: true
- type: "string"
- title: "TikTok Marketing Source Spec"
- hubspot:
- properties:
- credentials:
- properties:
- client_id:
- title: "Client ID"
- description: "The Client ID of your HubSpot developer application. See\
- \ the Hubspot docs if you need help finding this ID."
- type: "string"
- examples:
- - "123456789000"
- client_secret:
- title: "Client Secret"
- description: "The client secret for your HubSpot developer application.\
- \ See the Hubspot docs if you need help finding this secret."
- type: "string"
- examples:
- - "secret"
- airbyte_secret: true
- title: "HubSpot Source Spec"
- google-analytics-data-api:
- properties:
- credentials:
- properties:
- client_id:
- title: "Client ID"
- type: "string"
- description: "The Client ID of your Google Analytics developer application."
- order: 1
- client_secret:
- title: "Client Secret"
- type: "string"
- description: "The Client Secret of your Google Analytics developer application."
- airbyte_secret: true
- order: 2
- title: "Google Analytics (Data API) Spec"
- intercom:
- properties:
- client_id:
- title: "Client Id"
+ x-speakeasy-param-sensitive: true
+ db_url:
+ title: "Database URL"
+ description: "URL pointing to your workspace."
type: "string"
- description: "Client Id for your Intercom application."
- airbyte_secret: true
- client_secret:
- title: "Client Secret"
+ order: 1
+ example: "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main"
+ destinationType:
+ title: "xata"
+ const: "xata"
+ enum:
+ - "xata"
+ order: 0
type: "string"
- description: "Client Secret for your Intercom application."
- airbyte_secret: true
- title: "Source Intercom Spec"
- typeform:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- description: "The Client ID of the Typeform developer application."
- airbyte_secret: true
- client_secret:
- type: "string"
- description: "The Client Secret the Typeform developer application."
- airbyte_secret: true
- title: "Source Typeform Spec"
- facebook-marketing:
+ destination-xata-update:
+ title: "Destination Xata"
+ type: "object"
+ required:
+ - "api_key"
+ - "db_url"
properties:
- client_id:
- title: "Client Id"
- description: "The Client Id for your OAuth app"
- airbyte_secret: true
- airbyte_hidden: true
+ api_key:
+ title: "API Key"
+ description: "API Key to connect."
type: "string"
- client_secret:
- title: "Client Secret"
- description: "The Client Secret for your OAuth app"
+ order: 0
airbyte_secret: true
- airbyte_hidden: true
+ db_url:
+ title: "Database URL"
+ description: "URL pointing to your workspace."
type: "string"
- title: "Source Facebook Marketing"
- facebook-pages:
- title: "Facebook Pages Spec"
- surveymonkey:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The Client ID of the SurveyMonkey developer application."
- airbyte_secret: true
- order: 1
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The Client Secret of the SurveyMonkey developer application."
- airbyte_secret: true
- order: 2
- title: "SurveyMonkey Spec"
- bing-ads:
+ order: 1
+ example: "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main"
+ destination-clickhouse:
+ title: "ClickHouse Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ - "destinationType"
properties:
- client_id:
+ host:
+ title: "Host"
+ description: "Hostname of the database."
type: "string"
- title: "Client ID"
- description: "The Client ID of your Microsoft Advertising developer application."
- airbyte_secret: true
+ order: 0
+ port:
+ title: "Port"
+ description: "HTTP port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 8123
+ examples:
+ - "8123"
order: 1
- client_secret:
+ database:
+ title: "DB Name"
+ description: "Name of the database."
type: "string"
- title: "Client Secret"
- description: "The Client Secret of your Microsoft Advertising developer\
- \ application."
- default: ""
- airbyte_secret: true
order: 2
- title: "Bing Ads Spec"
- monday:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The Client ID of your OAuth application."
- airbyte_secret: true
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The Client Secret of your OAuth application."
- airbyte_secret: true
- title: "Monday Spec"
- amazon-ads:
- properties:
- client_id:
- title: "Client ID"
- description: "The client ID of your Amazon Ads developer application. See\
- \ the docs for more information."
- order: 1
+ username:
+ title: "User"
+ description: "Username to use to access the database."
type: "string"
- client_secret:
- title: "Client Secret"
- description: "The client secret of your Amazon Ads developer application.\
- \ See the docs for more information."
- airbyte_secret: true
- order: 2
+ order: 3
+ password:
+ title: "Password"
+ description: "Password associated with the username."
type: "string"
- title: "Amazon Ads Spec"
- github:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client Id"
- description: "OAuth Client Id"
- airbyte_secret: true
- client_secret:
- type: "string"
- title: "Client ssecret"
- description: "OAuth Client secret"
- airbyte_secret: true
- title: "GitHub Source Spec"
- square:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The Square-issued ID of your application"
- airbyte_secret: true
- client_secret:
- type: "string"
- title: "Client Secret"
- description: "The Square-issued application secret for your application"
- airbyte_secret: true
- title: "Square Spec"
- mailchimp:
- properties:
- credentials:
- properties:
- client_id:
- title: "Client ID"
- type: "string"
- description: "The Client ID of your OAuth application."
- airbyte_secret: true
- client_secret:
- title: "Client Secret"
- type: "string"
- description: "The Client Secret of your OAuth application."
- airbyte_secret: true
- title: "Mailchimp Spec"
- airtable:
- properties:
- credentials:
- properties:
- client_id:
- type: "string"
- title: "Client ID"
- description: "The client ID of the Airtable developer application."
- airbyte_secret: true
- client_secret:
- type: "string"
- title: "Client secret"
- description: "The client secret the Airtable developer application."
- airbyte_secret: true
- title: "Airtable Source Spec"
- salesforce:
- properties:
- client_id:
- title: "Client ID"
- description: "Enter your Salesforce developer application's Client ID"
+ airbyte_secret: true
+ order: 4
+ x-speakeasy-param-sensitive: true
+ jdbc_url_params:
+ description: "Additional properties to pass to the JDBC URL string when\
+ \ connecting to the database formatted as 'key=value' pairs separated\
+ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
+ title: "JDBC URL Params"
type: "string"
- order: 2
- client_secret:
- title: "Client Secret"
- description: "Enter your Salesforce developer application's Client secret"
+ order: 5
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ enum:
+ - "NO_TUNNEL"
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ enum:
+ - "SSH_KEY_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ x-speakeasy-param-sensitive: true
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ enum:
+ - "SSH_PASSWORD_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ x-speakeasy-param-sensitive: true
+ destinationType:
+ title: "clickhouse"
+ const: "clickhouse"
+ enum:
+ - "clickhouse"
+ order: 0
type: "string"
- airbyte_secret: true
- order: 3
- title: "Salesforce Source Spec"
- lever-hiring:
- properties:
- credentials:
- properties:
- client_id:
- title: "Client ID"
- type: "string"
- description: "The Client ID of your Lever Hiring developer application."
- client_secret:
- title: "Client Secret"
- type: "string"
- description: "The Client Secret of your Lever Hiring developer application."
- airbyte_secret: true
- title: "Lever Hiring Source Spec"
- destination-gcs:
- title: "GCS Destination Spec"
+ destination-clickhouse-update:
+ title: "ClickHouse Destination Spec"
type: "object"
required:
- - "gcs_bucket_name"
- - "gcs_bucket_path"
- - "credential"
- - "format"
- - "destinationType"
+ - "host"
+ - "port"
+ - "database"
+ - "username"
properties:
- gcs_bucket_name:
- title: "GCS Bucket Name"
- order: 1
+ host:
+ title: "Host"
+ description: "Hostname of the database."
type: "string"
- description: "You can find the bucket name in the App Engine Admin console\
- \ Application Settings page, under the label Google Cloud Storage Bucket.\
- \ Read more here."
+ order: 0
+ port:
+ title: "Port"
+ description: "HTTP port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 8123
examples:
- - "airbyte_sync"
- gcs_bucket_path:
- title: "GCS Bucket Path"
- description: "GCS Bucket Path string Subdirectory under the above bucket\
- \ to sync the data into."
- order: 2
+ - "8123"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "Name of the database."
type: "string"
- examples:
- - "data_sync/test"
- gcs_bucket_region:
- title: "GCS Bucket Region"
+ order: 2
+ username:
+ title: "User"
+ description: "Username to use to access the database."
type: "string"
order: 3
- default: "us"
- description: "Select a Region of the GCS Bucket. Read more here."
- enum:
- - "northamerica-northeast1"
- - "northamerica-northeast2"
- - "us-central1"
- - "us-east1"
- - "us-east4"
- - "us-west1"
- - "us-west2"
- - "us-west3"
- - "us-west4"
- - "southamerica-east1"
- - "southamerica-west1"
- - "europe-central2"
- - "europe-north1"
- - "europe-west1"
- - "europe-west2"
- - "europe-west3"
- - "europe-west4"
- - "europe-west6"
- - "asia-east1"
- - "asia-east2"
- - "asia-northeast1"
- - "asia-northeast2"
- - "asia-northeast3"
- - "asia-south1"
- - "asia-south2"
- - "asia-southeast1"
- - "asia-southeast2"
- - "australia-southeast1"
- - "australia-southeast2"
- - "asia"
- - "eu"
- - "us"
- - "asia1"
- - "eur4"
- - "nam4"
- credential:
- title: "Authentication"
- description: "An HMAC key is a type of credential and can be associated\
- \ with a service account or a user account in Cloud Storage. Read more\
- \ here."
+ password:
+ title: "Password"
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ jdbc_url_params:
+ description: "Additional properties to pass to the JDBC URL string when\
+ \ connecting to the database formatted as 'key=value' pairs separated\
+ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
+ title: "JDBC URL Params"
+ type: "string"
+ order: 5
+ tunnel_method:
type: "object"
- order: 0
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
oneOf:
- - title: "HMAC Key"
+ - title: "No Tunnel"
required:
- - "credential_type"
- - "hmac_key_access_id"
- - "hmac_key_secret"
+ - "tunnel_method"
properties:
- credential_type:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
type: "string"
+ const: "NO_TUNNEL"
+ order: 0
enum:
- - "HMAC_KEY"
- default: "HMAC_KEY"
- hmac_key_access_id:
+ - "NO_TUNNEL"
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
type: "string"
- description: "When linked to a service account, this ID is 61 characters\
- \ long; when linked to a user account, it is 24 characters long.\
- \ Read more here."
- title: "Access ID"
- airbyte_secret: true
+ const: "SSH_KEY_AUTH"
order: 0
+ enum:
+ - "SSH_KEY_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
examples:
- - "1234567890abcdefghij1234"
- hmac_key_secret:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
type: "string"
- description: "The corresponding secret for the access ID. It is a\
- \ 40-character base-64 encoded string. Read more here."
- title: "Secret"
airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ enum:
+ - "SSH_PASSWORD_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
examples:
- - "1234567890abcdefghij1234567890ABCDEFGHIJ"
- format:
- title: "Output Format"
- type: "object"
- description: "Output data format. One of the following formats must be selected\
- \ - AVRO format, PARQUET format, CSV format, or JSONL format."
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ destination-mssql:
+ title: "MS SQL Server Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "username"
+ - "database"
+ - "schema"
+ - "destinationType"
+ properties:
+ host:
+ title: "Host"
+ description: "The host name of the MSSQL database."
+ type: "string"
+ order: 0
+ port:
+ title: "Port"
+ description: "The port of the MSSQL database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 1433
+ examples:
+ - "1433"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "The name of the MSSQL database."
+ type: "string"
+ order: 2
+ schema:
+ title: "Default Schema"
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. The usual value for this field is \"public\"\
+ ."
+ type: "string"
+ examples:
+ - "public"
+ default: "public"
+ order: 3
+ username:
+ title: "User"
+ description: "The username which is used to access the database."
+ type: "string"
order: 4
+ password:
+ title: "Password"
+ description: "The password associated with this username."
+ type: "string"
+ airbyte_secret: true
+ order: 5
+ x-speakeasy-param-sensitive: true
+ jdbc_url_params:
+ title: "JDBC URL Params"
+ description: "Additional properties to pass to the JDBC URL string when\
+ \ connecting to the database formatted as 'key=value' pairs separated\
+ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
+ type: "string"
+ order: 6
+ ssl_method:
+ title: "SSL Method"
+ type: "object"
+ description: "The encryption method which is used to communicate with the\
+ \ database."
+ order: 7
oneOf:
- - title: "Avro: Apache Avro"
+ - title: "Encrypted (trust server certificate)"
+ description: "Use the certificate provided by the server without verification.\
+ \ (For testing purposes only!)"
required:
- - "format_type"
- - "compression_codec"
+ - "ssl_method"
+ type: "object"
properties:
- format_type:
+ ssl_method:
type: "string"
+ const: "encrypted_trust_server_certificate"
enum:
- - "Avro"
- default: "Avro"
- compression_codec:
- title: "Compression Codec"
- description: "The compression algorithm used to compress data. Default\
- \ to no compression."
- type: "object"
- oneOf:
- - title: "No Compression"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "no compression"
- default: "no compression"
- - title: "Deflate"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "Deflate"
- default: "Deflate"
- compression_level:
- title: "Deflate level"
- description: "0: no compression & fastest, 9: best compression\
- \ & slowest."
- type: "integer"
- default: 0
- minimum: 0
- maximum: 9
- - title: "bzip2"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "bzip2"
- default: "bzip2"
- - title: "xz"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "xz"
- default: "xz"
- compression_level:
- title: "Compression Level"
- description: "The presets 0-3 are fast presets with medium compression.\
- \ The presets 4-6 are fairly slow presets with high compression.\
- \ The default preset is 6. The presets 7-9 are like the preset\
- \ 6 but use bigger dictionaries and have higher compressor\
- \ and decompressor memory requirements. Unless the uncompressed\
- \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\
- \ waste of memory to use the presets 7, 8, or 9, respectively.\
- \ Read more here for details."
- type: "integer"
- default: 6
- minimum: 0
- maximum: 9
- - title: "zstandard"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "zstandard"
- default: "zstandard"
- compression_level:
- title: "Compression Level"
- description: "Negative levels are 'fast' modes akin to lz4 or\
- \ snappy, levels above 9 are generally for archival purposes,\
- \ and levels above 18 use a lot of memory."
- type: "integer"
- default: 3
- minimum: -5
- maximum: 22
- include_checksum:
- title: "Include Checksum"
- description: "If true, include a checksum with each data block."
- type: "boolean"
- default: false
- - title: "snappy"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "snappy"
- default: "snappy"
- - title: "CSV: Comma-Separated Values"
+ - "encrypted_trust_server_certificate"
+ default: "encrypted_trust_server_certificate"
+ - title: "Encrypted (verify certificate)"
+ description: "Verify and use the certificate provided by the server."
required:
- - "format_type"
+ - "ssl_method"
+ - "trustStoreName"
+ - "trustStorePassword"
+ type: "object"
properties:
- format_type:
+ ssl_method:
type: "string"
+ const: "encrypted_verify_certificate"
enum:
- - "CSV"
- default: "CSV"
- flattening:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ hostNameInCertificate:
+ title: "Host Name In Certificate"
type: "string"
- title: "Normalization"
- description: "Whether the input JSON data should be normalized (flattened)\
- \ in the output CSV. Please refer to docs for details."
- default: "No flattening"
- enum:
- - "No flattening"
- - "Root level flattening"
- compression:
- title: "Compression"
- type: "object"
- description: "Whether the output files should be compressed. If compression\
- \ is selected, the output filename will have an extra extension\
- \ (GZIP: \".csv.gz\")."
- oneOf:
- - title: "No Compression"
- requires:
- - "compression_type"
- properties:
- compression_type:
- type: "string"
- enum:
- - "No Compression"
- default: "No Compression"
- - title: "GZIP"
- requires:
- - "compression_type"
- properties:
- compression_type:
- type: "string"
- enum:
- - "GZIP"
- default: "GZIP"
- - title: "JSON Lines: newline-delimited JSON"
+ description: "Specifies the host name of the server. The value of\
+ \ this property must match the subject property of the certificate."
+ order: 8
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
required:
- - "format_type"
+ - "tunnel_method"
properties:
- format_type:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
type: "string"
+ const: "NO_TUNNEL"
+ order: 0
enum:
- - "JSONL"
- default: "JSONL"
- compression:
- title: "Compression"
- type: "object"
- description: "Whether the output files should be compressed. If compression\
- \ is selected, the output filename will have an extra extension\
- \ (GZIP: \".jsonl.gz\")."
- oneOf:
- - title: "No Compression"
- requires: "compression_type"
- properties:
- compression_type:
- type: "string"
- enum:
- - "No Compression"
- default: "No Compression"
- - title: "GZIP"
- requires: "compression_type"
- properties:
- compression_type:
- type: "string"
- enum:
- - "GZIP"
- default: "GZIP"
- - title: "Parquet: Columnar Storage"
+ - "NO_TUNNEL"
+ - title: "SSH Key Authentication"
required:
- - "format_type"
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
properties:
- format_type:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
enum:
- - "Parquet"
- default: "Parquet"
- compression_codec:
- title: "Compression Codec"
- description: "The compression algorithm used to compress data pages."
+ - "SSH_KEY_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
type: "string"
- default: "UNCOMPRESSED"
- enum:
- - "UNCOMPRESSED"
- - "SNAPPY"
- - "GZIP"
- - "LZO"
- - "BROTLI"
- - "LZ4"
- - "ZSTD"
- block_size_mb:
- title: "Block Size (Row Group Size) (MB)"
- description: "This is the size of a row group being buffered in memory.\
- \ It limits the memory usage when writing. Larger values will improve\
- \ the IO when reading, but consume more memory when writing. Default:\
- \ 128 MB."
- type: "integer"
- default: 128
- examples:
- - 128
- max_padding_size_mb:
- title: "Max Padding Size (MB)"
- description: "Maximum size allowed as padding to align row groups.\
- \ This is also the minimum size of a row group. Default: 8 MB."
- type: "integer"
- default: 8
- examples:
- - 8
- page_size_kb:
- title: "Page Size (KB)"
- description: "The page size is for compression. A block is composed\
- \ of pages. A page is the smallest unit that must be read fully\
- \ to access a single record. If this value is too small, the compression\
- \ will deteriorate. Default: 1024 KB."
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
type: "integer"
- default: 1024
+ minimum: 0
+ maximum: 65536
+ default: 22
examples:
- - 1024
- dictionary_page_size_kb:
- title: "Dictionary Page Size (KB)"
- description: "There is one dictionary page per column per row group\
- \ when dictionary encoding is used. The dictionary page size works\
- \ like the page size but for dictionary. Default: 1024 KB."
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ x-speakeasy-param-sensitive: true
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ enum:
+ - "SSH_PASSWORD_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
type: "integer"
- default: 1024
+ minimum: 0
+ maximum: 65536
+ default: 22
examples:
- - 1024
- dictionary_encoding:
- title: "Dictionary Encoding"
- description: "Default: true."
- type: "boolean"
- default: true
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ x-speakeasy-param-sensitive: true
destinationType:
- title: "gcs"
- const: "gcs"
+ title: "mssql"
+ const: "mssql"
enum:
- - "gcs"
+ - "mssql"
order: 0
type: "string"
- destination-gcs-update:
- title: "GCS Destination Spec"
+ destination-mssql-update:
+ title: "MS SQL Server Destination Spec"
type: "object"
required:
- - "gcs_bucket_name"
- - "gcs_bucket_path"
- - "credential"
- - "format"
+ - "host"
+ - "port"
+ - "username"
+ - "database"
+ - "schema"
properties:
- gcs_bucket_name:
- title: "GCS Bucket Name"
- order: 1
+ host:
+ title: "Host"
+ description: "The host name of the MSSQL database."
type: "string"
- description: "You can find the bucket name in the App Engine Admin console\
- \ Application Settings page, under the label Google Cloud Storage Bucket.\
- \ Read more here."
+ order: 0
+ port:
+ title: "Port"
+ description: "The port of the MSSQL database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 1433
examples:
- - "airbyte_sync"
- gcs_bucket_path:
- title: "GCS Bucket Path"
- description: "GCS Bucket Path string Subdirectory under the above bucket\
- \ to sync the data into."
+ - "1433"
+ order: 1
+ database:
+ title: "DB Name"
+ description: "The name of the MSSQL database."
+ type: "string"
order: 2
+ schema:
+ title: "Default Schema"
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. The usual value for this field is \"public\"\
+ ."
type: "string"
examples:
- - "data_sync/test"
- gcs_bucket_region:
- title: "GCS Bucket Region"
- type: "string"
+ - "public"
+ default: "public"
order: 3
- default: "us"
- description: "Select a Region of the GCS Bucket. Read more here."
- enum:
- - "northamerica-northeast1"
- - "northamerica-northeast2"
- - "us-central1"
- - "us-east1"
- - "us-east4"
- - "us-west1"
- - "us-west2"
- - "us-west3"
- - "us-west4"
- - "southamerica-east1"
- - "southamerica-west1"
- - "europe-central2"
- - "europe-north1"
- - "europe-west1"
- - "europe-west2"
- - "europe-west3"
- - "europe-west4"
- - "europe-west6"
- - "asia-east1"
- - "asia-east2"
- - "asia-northeast1"
- - "asia-northeast2"
- - "asia-northeast3"
- - "asia-south1"
- - "asia-south2"
- - "asia-southeast1"
- - "asia-southeast2"
- - "australia-southeast1"
- - "australia-southeast2"
- - "asia"
- - "eu"
- - "us"
- - "asia1"
- - "eur4"
- - "nam4"
- credential:
- title: "Authentication"
- description: "An HMAC key is a type of credential and can be associated\
- \ with a service account or a user account in Cloud Storage. Read more\
- \ here."
- type: "object"
- order: 0
- oneOf:
- - title: "HMAC Key"
- required:
- - "credential_type"
- - "hmac_key_access_id"
- - "hmac_key_secret"
- properties:
- credential_type:
- type: "string"
- enum:
- - "HMAC_KEY"
- default: "HMAC_KEY"
- hmac_key_access_id:
- type: "string"
- description: "When linked to a service account, this ID is 61 characters\
- \ long; when linked to a user account, it is 24 characters long.\
- \ Read more here."
- title: "Access ID"
- airbyte_secret: true
- order: 0
- examples:
- - "1234567890abcdefghij1234"
- hmac_key_secret:
- type: "string"
- description: "The corresponding secret for the access ID. It is a\
- \ 40-character base-64 encoded string. Read more here."
- title: "Secret"
- airbyte_secret: true
- order: 1
- examples:
- - "1234567890abcdefghij1234567890ABCDEFGHIJ"
- format:
- title: "Output Format"
- type: "object"
- description: "Output data format. One of the following formats must be selected\
- \ - AVRO format, PARQUET format, CSV format, or JSONL format."
+ username:
+ title: "User"
+ description: "The username which is used to access the database."
+ type: "string"
order: 4
+ password:
+ title: "Password"
+ description: "The password associated with this username."
+ type: "string"
+ airbyte_secret: true
+ order: 5
+ jdbc_url_params:
+ title: "JDBC URL Params"
+ description: "Additional properties to pass to the JDBC URL string when\
+ \ connecting to the database formatted as 'key=value' pairs separated\
+ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
+ type: "string"
+ order: 6
+ ssl_method:
+ title: "SSL Method"
+ type: "object"
+ description: "The encryption method which is used to communicate with the\
+ \ database."
+ order: 7
oneOf:
- - title: "Avro: Apache Avro"
+ - title: "Encrypted (trust server certificate)"
+ description: "Use the certificate provided by the server without verification.\
+ \ (For testing purposes only!)"
required:
- - "format_type"
- - "compression_codec"
+ - "ssl_method"
+ type: "object"
properties:
- format_type:
+ ssl_method:
type: "string"
+ const: "encrypted_trust_server_certificate"
enum:
- - "Avro"
- default: "Avro"
- compression_codec:
- title: "Compression Codec"
- description: "The compression algorithm used to compress data. Default\
- \ to no compression."
- type: "object"
- oneOf:
- - title: "No Compression"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "no compression"
- default: "no compression"
- - title: "Deflate"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "Deflate"
- default: "Deflate"
- compression_level:
- title: "Deflate level"
- description: "0: no compression & fastest, 9: best compression\
- \ & slowest."
- type: "integer"
- default: 0
- minimum: 0
- maximum: 9
- - title: "bzip2"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "bzip2"
- default: "bzip2"
- - title: "xz"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "xz"
- default: "xz"
- compression_level:
- title: "Compression Level"
- description: "The presets 0-3 are fast presets with medium compression.\
- \ The presets 4-6 are fairly slow presets with high compression.\
- \ The default preset is 6. The presets 7-9 are like the preset\
- \ 6 but use bigger dictionaries and have higher compressor\
- \ and decompressor memory requirements. Unless the uncompressed\
- \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\
- \ waste of memory to use the presets 7, 8, or 9, respectively.\
- \ Read more here for details."
- type: "integer"
- default: 6
- minimum: 0
- maximum: 9
- - title: "zstandard"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "zstandard"
- default: "zstandard"
- compression_level:
- title: "Compression Level"
- description: "Negative levels are 'fast' modes akin to lz4 or\
- \ snappy, levels above 9 are generally for archival purposes,\
- \ and levels above 18 use a lot of memory."
- type: "integer"
- default: 3
- minimum: -5
- maximum: 22
- include_checksum:
- title: "Include Checksum"
- description: "If true, include a checksum with each data block."
- type: "boolean"
- default: false
- - title: "snappy"
- required:
- - "codec"
- properties:
- codec:
- type: "string"
- enum:
- - "snappy"
- default: "snappy"
- - title: "CSV: Comma-Separated Values"
+ - "encrypted_trust_server_certificate"
+ default: "encrypted_trust_server_certificate"
+ - title: "Encrypted (verify certificate)"
+ description: "Verify and use the certificate provided by the server."
required:
- - "format_type"
+ - "ssl_method"
+ - "trustStoreName"
+ - "trustStorePassword"
+ type: "object"
properties:
- format_type:
+ ssl_method:
type: "string"
+ const: "encrypted_verify_certificate"
enum:
- - "CSV"
- default: "CSV"
- flattening:
+ - "encrypted_verify_certificate"
+ default: "encrypted_verify_certificate"
+ hostNameInCertificate:
+ title: "Host Name In Certificate"
type: "string"
- title: "Normalization"
- description: "Whether the input JSON data should be normalized (flattened)\
- \ in the output CSV. Please refer to docs for details."
- default: "No flattening"
- enum:
- - "No flattening"
- - "Root level flattening"
- compression:
- title: "Compression"
- type: "object"
- description: "Whether the output files should be compressed. If compression\
- \ is selected, the output filename will have an extra extension\
- \ (GZIP: \".csv.gz\")."
- oneOf:
- - title: "No Compression"
- requires:
- - "compression_type"
- properties:
- compression_type:
- type: "string"
- enum:
- - "No Compression"
- default: "No Compression"
- - title: "GZIP"
- requires:
- - "compression_type"
- properties:
- compression_type:
- type: "string"
- enum:
- - "GZIP"
- default: "GZIP"
- - title: "JSON Lines: newline-delimited JSON"
+ description: "Specifies the host name of the server. The value of\
+ \ this property must match the subject property of the certificate."
+ order: 8
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
required:
- - "format_type"
+ - "tunnel_method"
properties:
- format_type:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
type: "string"
+ const: "NO_TUNNEL"
+ order: 0
enum:
- - "JSONL"
- default: "JSONL"
- compression:
- title: "Compression"
- type: "object"
- description: "Whether the output files should be compressed. If compression\
- \ is selected, the output filename will have an extra extension\
- \ (GZIP: \".jsonl.gz\")."
- oneOf:
- - title: "No Compression"
- requires: "compression_type"
- properties:
- compression_type:
- type: "string"
- enum:
- - "No Compression"
- default: "No Compression"
- - title: "GZIP"
- requires: "compression_type"
- properties:
- compression_type:
- type: "string"
- enum:
- - "GZIP"
- default: "GZIP"
- - title: "Parquet: Columnar Storage"
+ - "NO_TUNNEL"
+ - title: "SSH Key Authentication"
required:
- - "format_type"
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
properties:
- format_type:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
enum:
- - "Parquet"
- default: "Parquet"
- compression_codec:
- title: "Compression Codec"
- description: "The compression algorithm used to compress data pages."
+ - "SSH_KEY_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
type: "string"
- default: "UNCOMPRESSED"
- enum:
- - "UNCOMPRESSED"
- - "SNAPPY"
- - "GZIP"
- - "LZO"
- - "BROTLI"
- - "LZ4"
- - "ZSTD"
- block_size_mb:
- title: "Block Size (Row Group Size) (MB)"
- description: "This is the size of a row group being buffered in memory.\
- \ It limits the memory usage when writing. Larger values will improve\
- \ the IO when reading, but consume more memory when writing. Default:\
- \ 128 MB."
- type: "integer"
- default: 128
- examples:
- - 128
- max_padding_size_mb:
- title: "Max Padding Size (MB)"
- description: "Maximum size allowed as padding to align row groups.\
- \ This is also the minimum size of a row group. Default: 8 MB."
- type: "integer"
- default: 8
- examples:
- - 8
- page_size_kb:
- title: "Page Size (KB)"
- description: "The page size is for compression. A block is composed\
- \ of pages. A page is the smallest unit that must be read fully\
- \ to access a single record. If this value is too small, the compression\
- \ will deteriorate. Default: 1024 KB."
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
type: "integer"
- default: 1024
+ minimum: 0
+ maximum: 65536
+ default: 22
examples:
- - 1024
- dictionary_page_size_kb:
- title: "Dictionary Page Size (KB)"
- description: "There is one dictionary page per column per row group\
- \ when dictionary encoding is used. The dictionary page size works\
- \ like the page size but for dictionary. Default: 1024 KB."
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ enum:
+ - "SSH_PASSWORD_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
type: "integer"
- default: 1024
+ minimum: 0
+ maximum: 65536
+ default: 22
examples:
- - 1024
- dictionary_encoding:
- title: "Dictionary Encoding"
- description: "Default: true."
- type: "boolean"
- default: true
- destination-xata:
- title: "Destination Xata"
- type: "object"
- required:
- - "api_key"
- - "db_url"
- - "destinationType"
- properties:
- api_key:
- title: "API Key"
- description: "API Key to connect."
- type: "string"
- order: 0
- airbyte_secret: true
- db_url:
- title: "Database URL"
- description: "URL pointing to your workspace."
- type: "string"
- order: 1
- example: "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main"
- destinationType:
- title: "xata"
- const: "xata"
- enum:
- - "xata"
- order: 0
- type: "string"
- destination-xata-update:
- title: "Destination Xata"
- type: "object"
- required:
- - "api_key"
- - "db_url"
- properties:
- api_key:
- title: "API Key"
- description: "API Key to connect."
- type: "string"
- order: 0
- airbyte_secret: true
- db_url:
- title: "Database URL"
- description: "URL pointing to your workspace."
- type: "string"
- order: 1
- example: "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main"
- destination-clickhouse:
- title: "ClickHouse Destination Spec"
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ destination-mysql:
+ title: "MySQL Destination Spec"
type: "object"
required:
- "host"
- "port"
- - "database"
- "username"
+ - "database"
- "destinationType"
properties:
host:
@@ -49936,13 +58996,13 @@ components:
order: 0
port:
title: "Port"
- description: "HTTP port of the database."
+ description: "Port of the database."
type: "integer"
minimum: 0
maximum: 65536
- default: 8123
+ default: 3306
examples:
- - "8123"
+ - "3306"
order: 1
database:
title: "DB Name"
@@ -49960,13 +59020,14 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
jdbc_url_params:
description: "Additional properties to pass to the JDBC URL string when\
\ connecting to the database formatted as 'key=value' pairs separated\
\ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
title: "JDBC URL Params"
type: "string"
- order: 5
+ order: 6
tunnel_method:
type: "object"
title: "SSH Tunnel Method"
@@ -50030,6 +59091,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -50074,21 +59136,22 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
destinationType:
- title: "clickhouse"
- const: "clickhouse"
+ title: "mysql"
+ const: "mysql"
enum:
- - "clickhouse"
+ - "mysql"
order: 0
type: "string"
- destination-clickhouse-update:
- title: "ClickHouse Destination Spec"
+ destination-mysql-update:
+ title: "MySQL Destination Spec"
type: "object"
required:
- "host"
- "port"
- - "database"
- "username"
+ - "database"
properties:
host:
title: "Host"
@@ -50097,13 +59160,13 @@ components:
order: 0
port:
title: "Port"
- description: "HTTP port of the database."
+ description: "Port of the database."
type: "integer"
minimum: 0
maximum: 65536
- default: 8123
+ default: 3306
examples:
- - "8123"
+ - "3306"
order: 1
database:
title: "DB Name"
@@ -50127,7 +59190,7 @@ components:
\ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
title: "JDBC URL Params"
type: "string"
- order: 5
+ order: 6
tunnel_method:
type: "object"
title: "SSH Tunnel Method"
@@ -50203,892 +59266,1191 @@ components:
description: "Connect through a jump server tunnel host using username\
\ and password authentication"
type: "string"
- const: "SSH_PASSWORD_AUTH"
- order: 0
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ enum:
+ - "SSH_PASSWORD_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ destination-pubsub:
+ title: "Google PubSub Destination Spec"
+ type: "object"
+ required:
+ - "project_id"
+ - "topic_id"
+ - "credentials_json"
+ - "ordering_enabled"
+ - "batching_enabled"
+ - "destinationType"
+ properties:
+ project_id:
+ type: "string"
+ description: "The GCP project ID for the project containing the target PubSub."
+ title: "Project ID"
+ topic_id:
+ type: "string"
+ description: "The PubSub topic ID in the given GCP project ID."
+ title: "PubSub Topic ID"
+ credentials_json:
+ type: "string"
+ description: "The contents of the JSON service account key. Check out the\
+ \ docs if you need help generating this key."
+ title: "Credentials JSON"
+ airbyte_secret: true
+ ordering_enabled:
+ title: "Message Ordering Enabled"
+ description: "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\
+ \ of stream"
+ type: "boolean"
+ default: false
+ batching_enabled:
+ type: "boolean"
+ title: "Message Batching Enabled"
+ description: "If TRUE messages will be buffered instead of sending them\
+ \ one by one"
+ default: false
+ batching_delay_threshold:
+ type: "integer"
+ title: "Message Batching: Delay Threshold"
+ description: "Number of ms before the buffer is flushed"
+ default: 1
+ minimum: 1
+ batching_element_count_threshold:
+ type: "integer"
+ title: "Message Batching: Element Count Threshold"
+ description: "Number of messages before the buffer is flushed"
+ default: 1
+ minimum: 1
+ batching_request_bytes_threshold:
+ type: "integer"
+ title: "Message Batching: Request Bytes Threshold"
+ description: "Number of bytes before the buffer is flushed"
+ default: 1
+ minimum: 1
+ destinationType:
+ title: "pubsub"
+ const: "pubsub"
+ enum:
+ - "pubsub"
+ order: 0
+ type: "string"
+ destination-pubsub-update:
+ title: "Google PubSub Destination Spec"
+ type: "object"
+ required:
+ - "project_id"
+ - "topic_id"
+ - "credentials_json"
+ - "ordering_enabled"
+ - "batching_enabled"
+ properties:
+ project_id:
+ type: "string"
+ description: "The GCP project ID for the project containing the target PubSub."
+ title: "Project ID"
+ topic_id:
+ type: "string"
+ description: "The PubSub topic ID in the given GCP project ID."
+ title: "PubSub Topic ID"
+ credentials_json:
+ type: "string"
+ description: "The contents of the JSON service account key. Check out the\
+ \ docs if you need help generating this key."
+ title: "Credentials JSON"
+ airbyte_secret: true
+ ordering_enabled:
+ title: "Message Ordering Enabled"
+ description: "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\
+ \ of stream"
+ type: "boolean"
+ default: false
+ batching_enabled:
+ type: "boolean"
+ title: "Message Batching Enabled"
+ description: "If TRUE messages will be buffered instead of sending them\
+ \ one by one"
+ default: false
+ batching_delay_threshold:
+ type: "integer"
+ title: "Message Batching: Delay Threshold"
+ description: "Number of ms before the buffer is flushed"
+ default: 1
+ minimum: 1
+ batching_element_count_threshold:
+ type: "integer"
+ title: "Message Batching: Element Count Threshold"
+ description: "Number of messages before the buffer is flushed"
+ default: 1
+ minimum: 1
+ batching_request_bytes_threshold:
+ type: "integer"
+ title: "Message Batching: Request Bytes Threshold"
+ description: "Number of bytes before the buffer is flushed"
+ default: 1
+ minimum: 1
+ destination-weaviate:
+ title: "Weaviate Destination Config"
+ type: "object"
+ properties:
+ processing:
+ title: "ProcessingConfigModel"
+ type: "object"
+ properties:
+ chunk_size:
+ title: "Chunk size"
+ description: "Size of chunks in tokens to store in vector store (make\
+ \ sure it is not too big for the context if your LLM)"
+ maximum: 8191
+ minimum: 1
+ type: "integer"
+ chunk_overlap:
+ title: "Chunk overlap"
+ description: "Size of overlap between chunks in tokens to store in vector\
+ \ store to better capture relevant context"
+ default: 0
+ type: "integer"
+ text_fields:
+ title: "Text fields to embed"
+ description: "List of fields in the record that should be used to calculate\
+ \ the embedding. The field list is applied to all streams in the same\
+ \ way and non-existing fields are ignored. If none are defined, all\
+ \ fields are considered text fields. When specifying text fields,\
+ \ you can access nested fields in the record by using dot notation,\
+ \ e.g. `user.name` will access the `name` field in the `user` object.\
+ \ It's also possible to use wildcards to access all fields in an object,\
+ \ e.g. `users.*.name` will access all `names` fields in all entries\
+ \ of the `users` array."
+ default: []
+ always_show: true
+ examples:
+ - "text"
+ - "user.name"
+ - "users.*.name"
+ type: "array"
+ items:
+ type: "string"
+ metadata_fields:
+ title: "Fields to store as metadata"
+ description: "List of fields in the record that should be stored as\
+ \ metadata. The field list is applied to all streams in the same way\
+ \ and non-existing fields are ignored. If none are defined, all fields\
+ \ are considered metadata fields. When specifying text fields, you\
+ \ can access nested fields in the record by using dot notation, e.g.\
+ \ `user.name` will access the `name` field in the `user` object. It's\
+ \ also possible to use wildcards to access all fields in an object,\
+ \ e.g. `users.*.name` will access all `names` fields in all entries\
+ \ of the `users` array. When specifying nested paths, all matching\
+ \ values are flattened into an array set to a field named by the path."
+ default: []
+ always_show: true
+ examples:
+ - "age"
+ - "user"
+ - "user.name"
+ type: "array"
+ items:
+ type: "string"
+ text_splitter:
+ title: "Text splitter"
+ description: "Split text fields into chunks based on the specified method."
+ type: "object"
+ oneOf:
+ - title: "By Separator"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "separator"
+ const: "separator"
+ enum:
+ - "separator"
+ type: "string"
+ separators:
+ title: "Separators"
+ description: "List of separator strings to split text fields by.\
+ \ The separator itself needs to be wrapped in double quotes,\
+ \ e.g. to split by the dot character, use \".\". To split by\
+ \ a newline, use \"\\n\"."
+ default:
+ - "\"\\n\\n\""
+ - "\"\\n\""
+ - "\" \""
+ - "\"\""
+ type: "array"
+ items:
+ type: "string"
+ keep_separator:
+ title: "Keep separator"
+ description: "Whether to keep the separator in the resulting chunks"
+ default: false
+ type: "boolean"
+ description: "Split the text by the list of separators until the chunk\
+ \ size is reached, using the earlier mentioned separators where\
+ \ possible. This is useful for splitting text fields by paragraphs,\
+ \ sentences, words, etc."
+ required:
+ - "mode"
+ - title: "By Markdown header"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "markdown"
+ const: "markdown"
+ enum:
+ - "markdown"
+ type: "string"
+ split_level:
+ title: "Split level"
+ description: "Level of markdown headers to split text fields by.\
+ \ Headings down to the specified level will be used as split\
+ \ points"
+ default: 1
+ minimum: 1
+ maximum: 6
+ type: "integer"
+ description: "Split the text by Markdown headers down to the specified\
+ \ header level. If the chunk size fits multiple sections, they will\
+ \ be combined into a single chunk."
+ required:
+ - "mode"
+ - title: "By Programming Language"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "code"
+ const: "code"
+ enum:
+ - "code"
+ type: "string"
+ language:
+ title: "Language"
+ description: "Split code in suitable places based on the programming\
+ \ language"
+ enum:
+ - "cpp"
+ - "go"
+ - "java"
+ - "js"
+ - "php"
+ - "proto"
+ - "python"
+ - "rst"
+ - "ruby"
+ - "rust"
+ - "scala"
+ - "swift"
+ - "markdown"
+ - "latex"
+ - "html"
+ - "sol"
+ type: "string"
+ required:
+ - "language"
+ - "mode"
+ description: "Split the text by suitable delimiters based on the programming\
+ \ language. This is useful for splitting code into chunks."
+ field_name_mappings:
+ title: "Field name mappings"
+ description: "List of fields to rename. Not applicable for nested fields,\
+ \ but can be used to rename fields already flattened via dot notation."
+ default: []
+ type: "array"
+ items:
+ title: "FieldNameMappingConfigModel"
+ type: "object"
+ properties:
+ from_field:
+ title: "From field name"
+ description: "The field name in the source"
+ type: "string"
+ to_field:
+ title: "To field name"
+ description: "The field name to use in the destination"
+ type: "string"
+ required:
+ - "from_field"
+ - "to_field"
+ required:
+ - "chunk_size"
+ group: "processing"
+ embedding:
+ title: "Embedding"
+ description: "Embedding configuration"
+ group: "embedding"
+ type: "object"
+ oneOf:
+ - title: "No external embedding"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "no_embedding"
+ const: "no_embedding"
+ enum:
+ - "no_embedding"
+ type: "string"
+ description: "Do not calculate and pass embeddings to Weaviate. Suitable\
+ \ for clusters with configured vectorizers to calculate embeddings within\
+ \ Weaviate or for classes that should only support regular text search."
+ required:
+ - "mode"
+ - title: "Azure OpenAI"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "azure_openai"
+ const: "azure_openai"
enum:
- - "SSH_PASSWORD_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "azure_openai"
type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
+ openai_key:
+ title: "Azure OpenAI API key"
+ description: "The API key for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ api_base:
+ title: "Resource base URL"
+ description: "The base URL for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host"
+ - "https://your-resource-name.openai.azure.com"
type: "string"
- order: 3
- tunnel_user_password:
- title: "Password"
- description: "OS-level password for logging into the jump server host"
+ deployment:
+ title: "Deployment"
+ description: "The deployment for your Azure OpenAI resource. You\
+ \ can find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "your-resource-name"
type: "string"
- airbyte_secret: true
- order: 4
- destination-mssql:
- title: "MS SQL Server Destination Spec"
- type: "object"
- required:
- - "host"
- - "port"
- - "username"
- - "database"
- - "schema"
- - "destinationType"
- properties:
- host:
- title: "Host"
- description: "The host name of the MSSQL database."
- type: "string"
- order: 0
- port:
- title: "Port"
- description: "The port of the MSSQL database."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 1433
- examples:
- - "1433"
- order: 1
- database:
- title: "DB Name"
- description: "The name of the MSSQL database."
- type: "string"
- order: 2
- schema:
- title: "Default Schema"
- description: "The default schema tables are written to if the source does\
- \ not specify a namespace. The usual value for this field is \"public\"\
- ."
- type: "string"
- examples:
- - "public"
- default: "public"
- order: 3
- username:
- title: "User"
- description: "The username which is used to access the database."
- type: "string"
- order: 4
- password:
- title: "Password"
- description: "The password associated with this username."
- type: "string"
- airbyte_secret: true
- order: 5
- jdbc_url_params:
- title: "JDBC URL Params"
- description: "Additional properties to pass to the JDBC URL string when\
- \ connecting to the database formatted as 'key=value' pairs separated\
- \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
- type: "string"
- order: 6
- ssl_method:
- title: "SSL Method"
- type: "object"
- description: "The encryption method which is used to communicate with the\
- \ database."
- order: 7
- oneOf:
- - title: "Encrypted (trust server certificate)"
- description: "Use the certificate provided by the server without verification.\
- \ (For testing purposes only!)"
required:
- - "ssl_method"
+ - "openai_key"
+ - "api_base"
+ - "deployment"
+ - "mode"
+ description: "Use the Azure-hosted OpenAI API to embed text. This option\
+ \ is using the text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "OpenAI"
type: "object"
properties:
- ssl_method:
- type: "string"
- const: "encrypted_trust_server_certificate"
+ mode:
+ title: "Mode"
+ default: "openai"
+ const: "openai"
enum:
- - "encrypted_trust_server_certificate"
- default: "encrypted_trust_server_certificate"
- - title: "Encrypted (verify certificate)"
- description: "Verify and use the certificate provided by the server."
+ - "openai"
+ type: "string"
+ openai_key:
+ title: "OpenAI API key"
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
required:
- - "ssl_method"
- - "trustStoreName"
- - "trustStorePassword"
+ - "openai_key"
+ - "mode"
+ description: "Use the OpenAI API to embed text. This option is using the\
+ \ text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "Cohere"
type: "object"
properties:
- ssl_method:
- type: "string"
- const: "encrypted_verify_certificate"
+ mode:
+ title: "Mode"
+ default: "cohere"
+ const: "cohere"
enum:
- - "encrypted_verify_certificate"
- default: "encrypted_verify_certificate"
- hostNameInCertificate:
- title: "Host Name In Certificate"
+ - "cohere"
type: "string"
- description: "Specifies the host name of the server. The value of\
- \ this property must match the subject property of the certificate."
- order: 8
- tunnel_method:
- type: "object"
- title: "SSH Tunnel Method"
- description: "Whether to initiate an SSH tunnel before connecting to the\
- \ database, and if so, which kind of authentication to use."
- oneOf:
- - title: "No Tunnel"
- required:
- - "tunnel_method"
- properties:
- tunnel_method:
- description: "No ssh tunnel needed to connect to database"
+ cohere_key:
+ title: "Cohere API key"
+ airbyte_secret: true
type: "string"
- const: "NO_TUNNEL"
- order: 0
- enum:
- - "NO_TUNNEL"
- - title: "SSH Key Authentication"
+ x-speakeasy-param-sensitive: true
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "ssh_key"
+ - "cohere_key"
+ - "mode"
+ description: "Use the Cohere API to embed text."
+ - title: "From Field"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and ssh key"
- type: "string"
- const: "SSH_KEY_AUTH"
- order: 0
+ mode:
+ title: "Mode"
+ default: "from_field"
+ const: "from_field"
enum:
- - "SSH_KEY_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "from_field"
type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
+ field_name:
+ title: "Field name"
+ description: "Name of the field in the record that contains the embedding"
examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host."
- type: "string"
- order: 3
- ssh_key:
- title: "SSH Private Key"
- description: "OS-level user account ssh key credentials in RSA PEM\
- \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ - "embedding"
+ - "vector"
type: "string"
- airbyte_secret: true
- multiline: true
- order: 4
- - title: "Password Authentication"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "tunnel_user_password"
+ - "field_name"
+ - "dimensions"
+ - "mode"
+ description: "Use a field in the record as the embedding. This is useful\
+ \ if you already have an embedding for your data and want to store it\
+ \ in the vector store."
+ - title: "Fake"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and password authentication"
+ mode:
+ title: "Mode"
+ default: "fake"
+ const: "fake"
+ enum:
+ - "fake"
type: "string"
- const: "SSH_PASSWORD_AUTH"
- order: 0
+ description: "Use a fake embedding made out of random vectors with 1536\
+ \ embedding dimensions. This is useful for testing the data pipeline\
+ \ without incurring any costs."
+ required:
+ - "mode"
+ - title: "OpenAI-compatible"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "openai_compatible"
+ const: "openai_compatible"
enum:
- - "SSH_PASSWORD_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "openai_compatible"
type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
+ api_key:
+ title: "API key"
+ default: ""
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ base_url:
+ title: "Base URL"
+ description: "The base URL for your OpenAI-compatible service"
examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host"
+ - "https://your-service-name.com"
type: "string"
- order: 3
- tunnel_user_password:
- title: "Password"
- description: "OS-level password for logging into the jump server host"
+ model_name:
+ title: "Model name"
+ description: "The name of the model to use for embedding"
+ default: "text-embedding-ada-002"
+ examples:
+ - "text-embedding-ada-002"
type: "string"
- airbyte_secret: true
- order: 4
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
+ required:
+ - "base_url"
+ - "dimensions"
+ - "mode"
+ description: "Use a service that's compatible with the OpenAI API to embed\
+ \ text."
+ indexing:
+ title: "Indexing"
+ type: "object"
+ properties:
+ host:
+ title: "Public Endpoint"
+ description: "The public endpoint of the Weaviate cluster."
+ order: 1
+ examples:
+ - "https://my-cluster.weaviate.network"
+ type: "string"
+ auth:
+ title: "Authentication"
+ description: "Authentication method"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "API Token"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "token"
+ const: "token"
+ enum:
+ - "token"
+ type: "string"
+ token:
+ title: "API Token"
+ description: "API Token for the Weaviate instance"
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ required:
+ - "token"
+ - "mode"
+ description: "Authenticate using an API token (suitable for Weaviate\
+ \ Cloud)"
+ - title: "Username/Password"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "username_password"
+ const: "username_password"
+ enum:
+ - "username_password"
+ type: "string"
+ username:
+ title: "Username"
+ description: "Username for the Weaviate cluster"
+ order: 1
+ type: "string"
+ password:
+ title: "Password"
+ description: "Password for the Weaviate cluster"
+ airbyte_secret: true
+ order: 2
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ required:
+ - "username"
+ - "password"
+ - "mode"
+ description: "Authenticate using username and password (suitable for\
+ \ self-managed Weaviate clusters)"
+ - title: "No Authentication"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "no_auth"
+ const: "no_auth"
+ enum:
+ - "no_auth"
+ type: "string"
+ description: "Do not authenticate (suitable for locally running test\
+ \ clusters, do not use for clusters with public IP addresses)"
+ required:
+ - "mode"
+ batch_size:
+ title: "Batch Size"
+ description: "The number of records to send to Weaviate in each batch"
+ default: 128
+ type: "integer"
+ text_field:
+ title: "Text Field"
+ description: "The field in the object that contains the embedded text"
+ default: "text"
+ type: "string"
+ default_vectorizer:
+ title: "Default Vectorizer"
+ description: "The vectorizer to use if new classes need to be created"
+ default: "none"
+ enum:
+ - "none"
+ - "text2vec-cohere"
+ - "text2vec-huggingface"
+ - "text2vec-openai"
+ - "text2vec-palm"
+ - "text2vec-contextionary"
+ - "text2vec-transformers"
+ - "text2vec-gpt4all"
+ type: "string"
+ additional_headers:
+ title: "Additional headers"
+ description: "Additional HTTP headers to send with every request."
+ default: []
+ examples:
+ - header_key: "X-OpenAI-Api-Key"
+ value: "my-openai-api-key"
+ type: "array"
+ items:
+ title: "Header"
+ type: "object"
+ properties:
+ header_key:
+ title: "Header Key"
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ value:
+ title: "Header Value"
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "header_key"
+ - "value"
+ required:
+ - "host"
+ - "auth"
+ group: "indexing"
+ description: "Indexing configuration"
destinationType:
- title: "mssql"
- const: "mssql"
+ title: "weaviate"
+ const: "weaviate"
enum:
- - "mssql"
+ - "weaviate"
order: 0
type: "string"
- destination-mssql-update:
- title: "MS SQL Server Destination Spec"
- type: "object"
required:
- - "host"
- - "port"
- - "username"
- - "database"
- - "schema"
+ - "processing"
+ - "embedding"
+ - "indexing"
+ - "destinationType"
+ groups:
+ - id: "processing"
+ title: "Processing"
+ - id: "embedding"
+ title: "Embedding"
+ - id: "indexing"
+ title: "Indexing"
+ destination-weaviate-update:
+ title: "Weaviate Destination Config"
+ type: "object"
properties:
- host:
- title: "Host"
- description: "The host name of the MSSQL database."
- type: "string"
- order: 0
- port:
- title: "Port"
- description: "The port of the MSSQL database."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 1433
- examples:
- - "1433"
- order: 1
- database:
- title: "DB Name"
- description: "The name of the MSSQL database."
- type: "string"
- order: 2
- schema:
- title: "Default Schema"
- description: "The default schema tables are written to if the source does\
- \ not specify a namespace. The usual value for this field is \"public\"\
- ."
- type: "string"
- examples:
- - "public"
- default: "public"
- order: 3
- username:
- title: "User"
- description: "The username which is used to access the database."
- type: "string"
- order: 4
- password:
- title: "Password"
- description: "The password associated with this username."
- type: "string"
- airbyte_secret: true
- order: 5
- jdbc_url_params:
- title: "JDBC URL Params"
- description: "Additional properties to pass to the JDBC URL string when\
- \ connecting to the database formatted as 'key=value' pairs separated\
- \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
- type: "string"
- order: 6
- ssl_method:
- title: "SSL Method"
+ processing:
+ title: "ProcessingConfigModel"
+ type: "object"
+ properties:
+ chunk_size:
+ title: "Chunk size"
+ description: "Size of chunks in tokens to store in vector store (make\
+ \ sure it is not too big for the context if your LLM)"
+ maximum: 8191
+ minimum: 1
+ type: "integer"
+ chunk_overlap:
+ title: "Chunk overlap"
+ description: "Size of overlap between chunks in tokens to store in vector\
+ \ store to better capture relevant context"
+ default: 0
+ type: "integer"
+ text_fields:
+ title: "Text fields to embed"
+ description: "List of fields in the record that should be used to calculate\
+ \ the embedding. The field list is applied to all streams in the same\
+ \ way and non-existing fields are ignored. If none are defined, all\
+ \ fields are considered text fields. When specifying text fields,\
+ \ you can access nested fields in the record by using dot notation,\
+ \ e.g. `user.name` will access the `name` field in the `user` object.\
+ \ It's also possible to use wildcards to access all fields in an object,\
+ \ e.g. `users.*.name` will access all `names` fields in all entries\
+ \ of the `users` array."
+ default: []
+ always_show: true
+ examples:
+ - "text"
+ - "user.name"
+ - "users.*.name"
+ type: "array"
+ items:
+ type: "string"
+ metadata_fields:
+ title: "Fields to store as metadata"
+ description: "List of fields in the record that should be stored as\
+ \ metadata. The field list is applied to all streams in the same way\
+ \ and non-existing fields are ignored. If none are defined, all fields\
+ \ are considered metadata fields. When specifying text fields, you\
+ \ can access nested fields in the record by using dot notation, e.g.\
+ \ `user.name` will access the `name` field in the `user` object. It's\
+ \ also possible to use wildcards to access all fields in an object,\
+ \ e.g. `users.*.name` will access all `names` fields in all entries\
+ \ of the `users` array. When specifying nested paths, all matching\
+ \ values are flattened into an array set to a field named by the path."
+ default: []
+ always_show: true
+ examples:
+ - "age"
+ - "user"
+ - "user.name"
+ type: "array"
+ items:
+ type: "string"
+ text_splitter:
+ title: "Text splitter"
+ description: "Split text fields into chunks based on the specified method."
+ type: "object"
+ oneOf:
+ - title: "By Separator"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "separator"
+ const: "separator"
+ enum:
+ - "separator"
+ type: "string"
+ separators:
+ title: "Separators"
+ description: "List of separator strings to split text fields by.\
+ \ The separator itself needs to be wrapped in double quotes,\
+ \ e.g. to split by the dot character, use \".\". To split by\
+ \ a newline, use \"\\n\"."
+ default:
+ - "\"\\n\\n\""
+ - "\"\\n\""
+ - "\" \""
+ - "\"\""
+ type: "array"
+ items:
+ type: "string"
+ keep_separator:
+ title: "Keep separator"
+ description: "Whether to keep the separator in the resulting chunks"
+ default: false
+ type: "boolean"
+ description: "Split the text by the list of separators until the chunk\
+ \ size is reached, using the earlier mentioned separators where\
+ \ possible. This is useful for splitting text fields by paragraphs,\
+ \ sentences, words, etc."
+ required:
+ - "mode"
+ - title: "By Markdown header"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "markdown"
+ const: "markdown"
+ enum:
+ - "markdown"
+ type: "string"
+ split_level:
+ title: "Split level"
+ description: "Level of markdown headers to split text fields by.\
+ \ Headings down to the specified level will be used as split\
+ \ points"
+ default: 1
+ minimum: 1
+ maximum: 6
+ type: "integer"
+ description: "Split the text by Markdown headers down to the specified\
+ \ header level. If the chunk size fits multiple sections, they will\
+ \ be combined into a single chunk."
+ required:
+ - "mode"
+ - title: "By Programming Language"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "code"
+ const: "code"
+ enum:
+ - "code"
+ type: "string"
+ language:
+ title: "Language"
+ description: "Split code in suitable places based on the programming\
+ \ language"
+ enum:
+ - "cpp"
+ - "go"
+ - "java"
+ - "js"
+ - "php"
+ - "proto"
+ - "python"
+ - "rst"
+ - "ruby"
+ - "rust"
+ - "scala"
+ - "swift"
+ - "markdown"
+ - "latex"
+ - "html"
+ - "sol"
+ type: "string"
+ required:
+ - "language"
+ - "mode"
+ description: "Split the text by suitable delimiters based on the programming\
+ \ language. This is useful for splitting code into chunks."
+ field_name_mappings:
+ title: "Field name mappings"
+ description: "List of fields to rename. Not applicable for nested fields,\
+ \ but can be used to rename fields already flattened via dot notation."
+ default: []
+ type: "array"
+ items:
+ title: "FieldNameMappingConfigModel"
+ type: "object"
+ properties:
+ from_field:
+ title: "From field name"
+ description: "The field name in the source"
+ type: "string"
+ to_field:
+ title: "To field name"
+ description: "The field name to use in the destination"
+ type: "string"
+ required:
+ - "from_field"
+ - "to_field"
+ required:
+ - "chunk_size"
+ group: "processing"
+ embedding:
+ title: "Embedding"
+ description: "Embedding configuration"
+ group: "embedding"
type: "object"
- description: "The encryption method which is used to communicate with the\
- \ database."
- order: 7
oneOf:
- - title: "Encrypted (trust server certificate)"
- description: "Use the certificate provided by the server without verification.\
- \ (For testing purposes only!)"
- required:
- - "ssl_method"
- type: "object"
- properties:
- ssl_method:
- type: "string"
- const: "encrypted_trust_server_certificate"
- enum:
- - "encrypted_trust_server_certificate"
- default: "encrypted_trust_server_certificate"
- - title: "Encrypted (verify certificate)"
- description: "Verify and use the certificate provided by the server."
- required:
- - "ssl_method"
- - "trustStoreName"
- - "trustStorePassword"
+ - title: "No external embedding"
type: "object"
properties:
- ssl_method:
- type: "string"
- const: "encrypted_verify_certificate"
+ mode:
+ title: "Mode"
+ default: "no_embedding"
+ const: "no_embedding"
enum:
- - "encrypted_verify_certificate"
- default: "encrypted_verify_certificate"
- hostNameInCertificate:
- title: "Host Name In Certificate"
+ - "no_embedding"
type: "string"
- description: "Specifies the host name of the server. The value of\
- \ this property must match the subject property of the certificate."
- order: 8
- tunnel_method:
- type: "object"
- title: "SSH Tunnel Method"
- description: "Whether to initiate an SSH tunnel before connecting to the\
- \ database, and if so, which kind of authentication to use."
- oneOf:
- - title: "No Tunnel"
+ description: "Do not calculate and pass embeddings to Weaviate. Suitable\
+ \ for clusters with configured vectorizers to calculate embeddings within\
+ \ Weaviate or for classes that should only support regular text search."
required:
- - "tunnel_method"
+ - "mode"
+ - title: "Azure OpenAI"
+ type: "object"
properties:
- tunnel_method:
- description: "No ssh tunnel needed to connect to database"
- type: "string"
- const: "NO_TUNNEL"
- order: 0
+ mode:
+ title: "Mode"
+ default: "azure_openai"
+ const: "azure_openai"
enum:
- - "NO_TUNNEL"
- - title: "SSH Key Authentication"
- required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "ssh_key"
- properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and ssh key"
+ - "azure_openai"
type: "string"
- const: "SSH_KEY_AUTH"
- order: 0
- enum:
- - "SSH_KEY_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ openai_key:
+ title: "Azure OpenAI API key"
+ description: "The API key for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ airbyte_secret: true
type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
+ api_base:
+ title: "Resource base URL"
+ description: "The base URL for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host."
+ - "https://your-resource-name.openai.azure.com"
type: "string"
- order: 3
- ssh_key:
- title: "SSH Private Key"
- description: "OS-level user account ssh key credentials in RSA PEM\
- \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ deployment:
+ title: "Deployment"
+ description: "The deployment for your Azure OpenAI resource. You\
+ \ can find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "your-resource-name"
type: "string"
- airbyte_secret: true
- multiline: true
- order: 4
- - title: "Password Authentication"
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "tunnel_user_password"
+ - "openai_key"
+ - "api_base"
+ - "deployment"
+ - "mode"
+ description: "Use the Azure-hosted OpenAI API to embed text. This option\
+ \ is using the text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "OpenAI"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and password authentication"
- type: "string"
- const: "SSH_PASSWORD_AUTH"
- order: 0
+ mode:
+ title: "Mode"
+ default: "openai"
+ const: "openai"
enum:
- - "SSH_PASSWORD_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
- type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
- examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host"
- type: "string"
- order: 3
- tunnel_user_password:
- title: "Password"
- description: "OS-level password for logging into the jump server host"
+ - "openai"
type: "string"
+ openai_key:
+ title: "OpenAI API key"
airbyte_secret: true
- order: 4
- destination-mysql:
- title: "MySQL Destination Spec"
- type: "object"
- required:
- - "host"
- - "port"
- - "username"
- - "database"
- - "destinationType"
- properties:
- host:
- title: "Host"
- description: "Hostname of the database."
- type: "string"
- order: 0
- port:
- title: "Port"
- description: "Port of the database."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 3306
- examples:
- - "3306"
- order: 1
- database:
- title: "DB Name"
- description: "Name of the database."
- type: "string"
- order: 2
- username:
- title: "User"
- description: "Username to use to access the database."
- type: "string"
- order: 3
- password:
- title: "Password"
- description: "Password associated with the username."
- type: "string"
- airbyte_secret: true
- order: 4
- jdbc_url_params:
- description: "Additional properties to pass to the JDBC URL string when\
- \ connecting to the database formatted as 'key=value' pairs separated\
- \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
- title: "JDBC URL Params"
- type: "string"
- order: 6
- tunnel_method:
- type: "object"
- title: "SSH Tunnel Method"
- description: "Whether to initiate an SSH tunnel before connecting to the\
- \ database, and if so, which kind of authentication to use."
- oneOf:
- - title: "No Tunnel"
- required:
- - "tunnel_method"
- properties:
- tunnel_method:
- description: "No ssh tunnel needed to connect to database"
type: "string"
- const: "NO_TUNNEL"
- order: 0
- enum:
- - "NO_TUNNEL"
- - title: "SSH Key Authentication"
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "ssh_key"
+ - "openai_key"
+ - "mode"
+ description: "Use the OpenAI API to embed text. This option is using the\
+ \ text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "Cohere"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and ssh key"
- type: "string"
- const: "SSH_KEY_AUTH"
- order: 0
+ mode:
+ title: "Mode"
+ default: "cohere"
+ const: "cohere"
enum:
- - "SSH_KEY_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
- type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
- examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host."
- type: "string"
- order: 3
- ssh_key:
- title: "SSH Private Key"
- description: "OS-level user account ssh key credentials in RSA PEM\
- \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ - "cohere"
type: "string"
+ cohere_key:
+ title: "Cohere API key"
airbyte_secret: true
- multiline: true
- order: 4
- - title: "Password Authentication"
+ type: "string"
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "tunnel_user_password"
+ - "cohere_key"
+ - "mode"
+ description: "Use the Cohere API to embed text."
+ - title: "From Field"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and password authentication"
- type: "string"
- const: "SSH_PASSWORD_AUTH"
- order: 0
+ mode:
+ title: "Mode"
+ default: "from_field"
+ const: "from_field"
enum:
- - "SSH_PASSWORD_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "from_field"
type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
+ field_name:
+ title: "Field name"
+ description: "Name of the field in the record that contains the embedding"
examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host"
- type: "string"
- order: 3
- tunnel_user_password:
- title: "Password"
- description: "OS-level password for logging into the jump server host"
- type: "string"
- airbyte_secret: true
- order: 4
- destinationType:
- title: "mysql"
- const: "mysql"
- enum:
- - "mysql"
- order: 0
- type: "string"
- destination-mysql-update:
- title: "MySQL Destination Spec"
- type: "object"
- required:
- - "host"
- - "port"
- - "username"
- - "database"
- properties:
- host:
- title: "Host"
- description: "Hostname of the database."
- type: "string"
- order: 0
- port:
- title: "Port"
- description: "Port of the database."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 3306
- examples:
- - "3306"
- order: 1
- database:
- title: "DB Name"
- description: "Name of the database."
- type: "string"
- order: 2
- username:
- title: "User"
- description: "Username to use to access the database."
- type: "string"
- order: 3
- password:
- title: "Password"
- description: "Password associated with the username."
- type: "string"
- airbyte_secret: true
- order: 4
- jdbc_url_params:
- description: "Additional properties to pass to the JDBC URL string when\
- \ connecting to the database formatted as 'key=value' pairs separated\
- \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
- title: "JDBC URL Params"
- type: "string"
- order: 6
- tunnel_method:
- type: "object"
- title: "SSH Tunnel Method"
- description: "Whether to initiate an SSH tunnel before connecting to the\
- \ database, and if so, which kind of authentication to use."
- oneOf:
- - title: "No Tunnel"
- required:
- - "tunnel_method"
- properties:
- tunnel_method:
- description: "No ssh tunnel needed to connect to database"
+ - "embedding"
+ - "vector"
type: "string"
- const: "NO_TUNNEL"
- order: 0
- enum:
- - "NO_TUNNEL"
- - title: "SSH Key Authentication"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "ssh_key"
+ - "field_name"
+ - "dimensions"
+ - "mode"
+ description: "Use a field in the record as the embedding. This is useful\
+ \ if you already have an embedding for your data and want to store it\
+ \ in the vector store."
+ - title: "Fake"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and ssh key"
- type: "string"
- const: "SSH_KEY_AUTH"
- order: 0
+ mode:
+ title: "Mode"
+ default: "fake"
+ const: "fake"
enum:
- - "SSH_KEY_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
- type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
- examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host."
- type: "string"
- order: 3
- ssh_key:
- title: "SSH Private Key"
- description: "OS-level user account ssh key credentials in RSA PEM\
- \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ - "fake"
type: "string"
- airbyte_secret: true
- multiline: true
- order: 4
- - title: "Password Authentication"
+ description: "Use a fake embedding made out of random vectors with 1536\
+ \ embedding dimensions. This is useful for testing the data pipeline\
+ \ without incurring any costs."
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "tunnel_user_password"
+ - "mode"
+ - title: "OpenAI-compatible"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and password authentication"
- type: "string"
- const: "SSH_PASSWORD_AUTH"
- order: 0
+ mode:
+ title: "Mode"
+ default: "openai_compatible"
+ const: "openai_compatible"
enum:
- - "SSH_PASSWORD_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "openai_compatible"
type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
+ api_key:
+ title: "API key"
+ default: ""
+ airbyte_secret: true
+ type: "string"
+ base_url:
+ title: "Base URL"
+ description: "The base URL for your OpenAI-compatible service"
examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host"
+ - "https://your-service-name.com"
type: "string"
- order: 3
- tunnel_user_password:
- title: "Password"
- description: "OS-level password for logging into the jump server host"
+ model_name:
+ title: "Model name"
+ description: "The name of the model to use for embedding"
+ default: "text-embedding-ada-002"
+ examples:
+ - "text-embedding-ada-002"
type: "string"
- airbyte_secret: true
- order: 4
- destination-pubsub:
- title: "Google PubSub Destination Spec"
- type: "object"
- required:
- - "project_id"
- - "topic_id"
- - "credentials_json"
- - "ordering_enabled"
- - "batching_enabled"
- - "destinationType"
- properties:
- project_id:
- type: "string"
- description: "The GCP project ID for the project containing the target PubSub."
- title: "Project ID"
- topic_id:
- type: "string"
- description: "The PubSub topic ID in the given GCP project ID."
- title: "PubSub Topic ID"
- credentials_json:
- type: "string"
- description: "The contents of the JSON service account key. Check out the\
- \ docs if you need help generating this key."
- title: "Credentials JSON"
- airbyte_secret: true
- ordering_enabled:
- title: "Message Ordering Enabled"
- description: "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\
- \ of stream"
- type: "boolean"
- default: false
- batching_enabled:
- type: "boolean"
- title: "Message Batching Enabled"
- description: "If TRUE messages will be buffered instead of sending them\
- \ one by one"
- default: false
- batching_delay_threshold:
- type: "integer"
- title: "Message Batching: Delay Threshold"
- description: "Number of ms before the buffer is flushed"
- default: 1
- minimum: 1
- batching_element_count_threshold:
- type: "integer"
- title: "Message Batching: Element Count Threshold"
- description: "Number of messages before the buffer is flushed"
- default: 1
- minimum: 1
- batching_request_bytes_threshold:
- type: "integer"
- title: "Message Batching: Request Bytes Threshold"
- description: "Number of bytes before the buffer is flushed"
- default: 1
- minimum: 1
- destinationType:
- title: "pubsub"
- const: "pubsub"
- enum:
- - "pubsub"
- order: 0
- type: "string"
- destination-pubsub-update:
- title: "Google PubSub Destination Spec"
- type: "object"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
+ required:
+ - "base_url"
+ - "dimensions"
+ - "mode"
+ description: "Use a service that's compatible with the OpenAI API to embed\
+ \ text."
+ indexing:
+ title: "Indexing"
+ type: "object"
+ properties:
+ host:
+ title: "Public Endpoint"
+ description: "The public endpoint of the Weaviate cluster."
+ order: 1
+ examples:
+ - "https://my-cluster.weaviate.network"
+ type: "string"
+ auth:
+ title: "Authentication"
+ description: "Authentication method"
+ type: "object"
+ order: 2
+ oneOf:
+ - title: "API Token"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "token"
+ const: "token"
+ enum:
+ - "token"
+ type: "string"
+ token:
+ title: "API Token"
+ description: "API Token for the Weaviate instance"
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "token"
+ - "mode"
+ description: "Authenticate using an API token (suitable for Weaviate\
+ \ Cloud)"
+ - title: "Username/Password"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "username_password"
+ const: "username_password"
+ enum:
+ - "username_password"
+ type: "string"
+ username:
+ title: "Username"
+ description: "Username for the Weaviate cluster"
+ order: 1
+ type: "string"
+ password:
+ title: "Password"
+ description: "Password for the Weaviate cluster"
+ airbyte_secret: true
+ order: 2
+ type: "string"
+ required:
+ - "username"
+ - "password"
+ - "mode"
+ description: "Authenticate using username and password (suitable for\
+ \ self-managed Weaviate clusters)"
+ - title: "No Authentication"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "no_auth"
+ const: "no_auth"
+ enum:
+ - "no_auth"
+ type: "string"
+ description: "Do not authenticate (suitable for locally running test\
+ \ clusters, do not use for clusters with public IP addresses)"
+ required:
+ - "mode"
+ batch_size:
+ title: "Batch Size"
+ description: "The number of records to send to Weaviate in each batch"
+ default: 128
+ type: "integer"
+ text_field:
+ title: "Text Field"
+ description: "The field in the object that contains the embedded text"
+ default: "text"
+ type: "string"
+ default_vectorizer:
+ title: "Default Vectorizer"
+ description: "The vectorizer to use if new classes need to be created"
+ default: "none"
+ enum:
+ - "none"
+ - "text2vec-cohere"
+ - "text2vec-huggingface"
+ - "text2vec-openai"
+ - "text2vec-palm"
+ - "text2vec-contextionary"
+ - "text2vec-transformers"
+ - "text2vec-gpt4all"
+ type: "string"
+ additional_headers:
+ title: "Additional headers"
+ description: "Additional HTTP headers to send with every request."
+ default: []
+ examples:
+ - header_key: "X-OpenAI-Api-Key"
+ value: "my-openai-api-key"
+ type: "array"
+ items:
+ title: "Header"
+ type: "object"
+ properties:
+ header_key:
+ title: "Header Key"
+ type: "string"
+ value:
+ title: "Header Value"
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "header_key"
+ - "value"
+ required:
+ - "host"
+ - "auth"
+ group: "indexing"
+ description: "Indexing configuration"
required:
- - "project_id"
- - "topic_id"
- - "credentials_json"
- - "ordering_enabled"
- - "batching_enabled"
- properties:
- project_id:
- type: "string"
- description: "The GCP project ID for the project containing the target PubSub."
- title: "Project ID"
- topic_id:
- type: "string"
- description: "The PubSub topic ID in the given GCP project ID."
- title: "PubSub Topic ID"
- credentials_json:
- type: "string"
- description: "The contents of the JSON service account key. Check out the\
- \ docs if you need help generating this key."
- title: "Credentials JSON"
- airbyte_secret: true
- ordering_enabled:
- title: "Message Ordering Enabled"
- description: "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\
- \ of stream"
- type: "boolean"
- default: false
- batching_enabled:
- type: "boolean"
- title: "Message Batching Enabled"
- description: "If TRUE messages will be buffered instead of sending them\
- \ one by one"
- default: false
- batching_delay_threshold:
- type: "integer"
- title: "Message Batching: Delay Threshold"
- description: "Number of ms before the buffer is flushed"
- default: 1
- minimum: 1
- batching_element_count_threshold:
- type: "integer"
- title: "Message Batching: Element Count Threshold"
- description: "Number of messages before the buffer is flushed"
- default: 1
- minimum: 1
- batching_request_bytes_threshold:
- type: "integer"
- title: "Message Batching: Request Bytes Threshold"
- description: "Number of bytes before the buffer is flushed"
- default: 1
- minimum: 1
+ - "processing"
+ - "embedding"
+ - "indexing"
+ groups:
+ - id: "processing"
+ title: "Processing"
+ - id: "embedding"
+ title: "Embedding"
+ - id: "indexing"
+ title: "Indexing"
destination-keen:
title: "Keen Spec"
type: "object"
@@ -51112,6 +60474,7 @@ components:
examples:
- "ABCDEFGHIJKLMNOPRSTUWXYZ"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
infer_timestamp:
title: "Infer Timestamp"
description: "Allow connector to guess keen.timestamp value based on the\
@@ -51277,6 +60640,7 @@ components:
type: "string"
airbyte_secret: true
order: 2
+ x-speakeasy-param-sensitive: true
tunnel_method:
type: "object"
title: "SSH Tunnel Method"
@@ -51340,6 +60704,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -51384,6 +60749,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
destinationType:
title: "mongodb"
const: "mongodb"
@@ -51644,6 +61010,7 @@ components:
examples:
- "A012345678910EXAMPLE"
order: 0
+ x-speakeasy-param-sensitive: true
secret_access_key:
type: "string"
description: "The corresponding secret to the access key ID. Read more docs if you need help generating this key. Default credentials will\
+ \ be used if this field is left empty."
+ title: "Credentials JSON"
+ airbyte_secret: true
+ destination-redshift:
+ title: "Redshift Destination Spec"
+ type: "object"
+ required:
+ - "host"
+ - "port"
+ - "database"
+ - "username"
+ - "password"
+ - "schema"
+ - "destinationType"
+ properties:
+ host:
+ description: "Host Endpoint of the Redshift Cluster (must include the cluster-id,\
+ \ region and end with .redshift.amazonaws.com)"
+ type: "string"
+ title: "Host"
+ group: "connection"
+ order: 1
+ port:
+ description: "Port of the database."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 5439
+ examples:
+ - "5439"
+ title: "Port"
+ group: "connection"
+ order: 2
+ username:
+ description: "Username to use to access the database."
+ type: "string"
+ title: "Username"
+ group: "connection"
+ order: 3
+ password:
+ description: "Password associated with the username."
+ type: "string"
+ airbyte_secret: true
+ title: "Password"
+ group: "connection"
+ order: 4
+ x-speakeasy-param-sensitive: true
+ database:
+ description: "Name of the database."
+ type: "string"
+ title: "Database"
+ group: "connection"
+ order: 5
+ schema:
+ description: "The default schema tables are written to if the source does\
+ \ not specify a namespace. Unless specifically configured, the usual value\
+ \ for this field is \"public\"."
+ type: "string"
+ examples:
+ - "public"
+ default: "public"
+ group: "connection"
+ title: "Default Schema"
+ order: 6
+ jdbc_url_params:
+ title: "JDBC URL Params"
+ description: "Additional properties to pass to the JDBC URL string when\
+ \ connecting to the database formatted as 'key=value' pairs separated\
+ \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
+ type: "string"
+ group: "connection"
+ order: 7
+ uploading_method:
+ title: "Uploading Method"
+ type: "object"
+ description: "The way data will be uploaded to Redshift."
+ group: "connection"
+ order: 8
+ display_type: "radio"
+ oneOf:
+ - title: "S3 Staging"
+ description: "(recommended) Uploads data to S3 and then uses a\
+ \ COPY to insert the data into Redshift. COPY is recommended for production\
+ \ workloads for better speed and scalability. See AWS docs for more details."
+ required:
+ - "method"
+ - "s3_bucket_name"
+ - "s3_bucket_region"
+ - "access_key_id"
+ - "secret_access_key"
+ properties:
+ method:
+ type: "string"
+ const: "S3 Staging"
+ enum:
+ - "S3 Staging"
+ s3_bucket_name:
+ title: "S3 Bucket Name"
+ type: "string"
+ description: "The name of the staging S3 bucket."
+ examples:
+ - "airbyte.staging"
+ s3_bucket_path:
+ title: "S3 Bucket Path"
+ type: "string"
+ description: "The directory under the S3 bucket where data will be\
+ \ written. If not provided, then defaults to the root directory.\
+ \ See path's name recommendations for more details."
+ examples:
+ - "data_sync/test"
+ s3_bucket_region:
+ title: "S3 Bucket Region"
+ type: "string"
+ default: ""
+ description: "The region of the S3 staging bucket."
+ enum:
+ - ""
+ - "us-east-1"
+ - "us-east-2"
+ - "us-west-1"
+ - "us-west-2"
+ - "af-south-1"
+ - "ap-east-1"
+ - "ap-south-1"
+ - "ap-northeast-1"
+ - "ap-northeast-2"
+ - "ap-northeast-3"
+ - "ap-southeast-1"
+ - "ap-southeast-2"
+ - "ca-central-1"
+ - "cn-north-1"
+ - "cn-northwest-1"
+ - "eu-central-1"
+ - "eu-north-1"
+ - "eu-south-1"
+ - "eu-west-1"
+ - "eu-west-2"
+ - "eu-west-3"
+ - "sa-east-1"
+ - "me-south-1"
+ file_name_pattern:
+ type: "string"
+ description: "The pattern allows you to set the file-name format for\
+ \ the S3 staging file(s)"
+ title: "S3 Filename pattern"
+ examples:
+ - "{date}"
+ - "{date:yyyy_MM}"
+ - "{timestamp}"
+ - "{part_number}"
+ - "{sync_id}"
+ order: 8
+ access_key_id:
+ type: "string"
+ description: "This ID grants access to the above S3 staging bucket.\
+ \ Airbyte requires Read and Write permissions to the given bucket.\
+ \ See AWS docs on how to generate an access key ID and secret access\
+ \ key."
+ title: "S3 Key Id"
+ airbyte_secret: true
+ x-speakeasy-param-sensitive: true
+ secret_access_key:
+ type: "string"
+ description: "The corresponding secret to the above access key id.\
+ \ See AWS docs on how to generate an access key ID and secret access\
+ \ key."
+ title: "S3 Access Key"
+ airbyte_secret: true
+ x-speakeasy-param-sensitive: true
+ purge_staging_data:
+ title: "Purge Staging Files and Tables"
+ type: "boolean"
+ description: "Whether to delete the staging files from S3 after completing\
+ \ the sync. See docs for details."
+ default: true
+ encryption:
+ title: "Encryption"
+ type: "object"
+ description: "How to encrypt the staging data"
+ default:
+ encryption_type: "none"
+ oneOf:
+ - title: "No encryption"
+ description: "Staging data will be stored in plaintext."
+ type: "object"
+ required:
+ - "encryption_type"
+ properties:
+ encryption_type:
+ type: "string"
+ const: "none"
+ enum:
+ - "none"
+ default: "none"
+ - title: "AES-CBC envelope encryption"
+ description: "Staging data will be encrypted using AES-CBC envelope\
+ \ encryption."
+ type: "object"
+ required:
+ - "encryption_type"
+ properties:
+ encryption_type:
+ type: "string"
+ const: "aes_cbc_envelope"
+ enum:
+ - "aes_cbc_envelope"
+ default: "aes_cbc_envelope"
+ key_encrypting_key:
+ type: "string"
+ title: "Key"
+ description: "The key, base64-encoded. Must be either 128, 192,\
+ \ or 256 bits. Leave blank to have Airbyte generate an ephemeral\
+ \ key for each sync."
+ airbyte_secret: true
+ x-speakeasy-param-sensitive: true
+ file_buffer_count:
+ title: "File Buffer Count"
+ type: "integer"
+ minimum: 10
+ maximum: 50
+ default: 10
+ description: "Number of file buffers allocated for writing data. Increasing\
+ \ this number is beneficial for connections using Change Data Capture\
+ \ (CDC) and up to the number of streams within a connection. Increasing\
+ \ the number of file buffers past the maximum number of streams\
+ \ has deteriorating effects"
+ examples:
+ - "10"
+ - title: "Standard"
+ required:
+ - "method"
+ description: "(not recommended) Direct loading using SQL INSERT\
+ \ statements. This method is extremely inefficient and provided only\
+ \ for quick testing. In all other cases, you should use S3 uploading."
+ properties:
+ method:
+ type: "string"
+ const: "Standard"
+ enum:
+ - "Standard"
+ tunnel_method:
+ type: "object"
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ enum:
+ - "NO_TUNNEL"
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ enum:
+ - "SSH_KEY_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ x-speakeasy-param-sensitive: true
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ enum:
+ - "SSH_PASSWORD_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ x-speakeasy-param-sensitive: true
+ destinationType:
+ title: "redshift"
+ const: "redshift"
enum:
- - "firestore"
+ - "redshift"
order: 0
type: "string"
- destination-firestore-update:
- title: "Destination Google Firestore"
- type: "object"
- required:
- - "project_id"
- properties:
- project_id:
- type: "string"
- description: "The GCP project ID for the project containing the target BigQuery\
- \ dataset."
- title: "Project ID"
- credentials_json:
- type: "string"
- description: "The contents of the JSON service account key. Check out the\
- \ docs if you need help generating this key. Default credentials will\
- \ be used if this field is left empty."
- title: "Credentials JSON"
- airbyte_secret: true
- destination-redshift:
+ groups:
+ - id: "connection"
+ title: "Connection"
+ destination-redshift-update:
title: "Redshift Destination Spec"
type: "object"
required:
@@ -52187,13 +61918,13 @@ components:
- "username"
- "password"
- "schema"
- - "destinationType"
properties:
host:
description: "Host Endpoint of the Redshift Cluster (must include the cluster-id,\
\ region and end with .redshift.amazonaws.com)"
type: "string"
title: "Host"
+ group: "connection"
order: 1
port:
description: "Port of the database."
@@ -52204,22 +61935,26 @@ components:
examples:
- "5439"
title: "Port"
+ group: "connection"
order: 2
username:
description: "Username to use to access the database."
type: "string"
title: "Username"
+ group: "connection"
order: 3
password:
description: "Password associated with the username."
type: "string"
airbyte_secret: true
title: "Password"
+ group: "connection"
order: 4
database:
description: "Name of the database."
type: "string"
title: "Database"
+ group: "connection"
order: 5
schema:
description: "The default schema tables are written to if the source does\
@@ -52229,6 +61964,7 @@ components:
examples:
- "public"
default: "public"
+ group: "connection"
title: "Default Schema"
order: 6
jdbc_url_params:
@@ -52237,23 +61973,21 @@ components:
\ connecting to the database formatted as 'key=value' pairs separated\
\ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
type: "string"
+ group: "connection"
order: 7
uploading_method:
title: "Uploading Method"
type: "object"
- description: "The method how the data will be uploaded to the database."
+ description: "The way data will be uploaded to Redshift."
+ group: "connection"
order: 8
+ display_type: "radio"
oneOf:
- - title: "Standard"
- required:
- - "method"
- properties:
- method:
- type: "string"
- const: "Standard"
- enum:
- - "Standard"
- title: "S3 Staging"
+ description: "(recommended) Uploads data to S3 and then uses a\
+ \ COPY to insert the data into Redshift. COPY is recommended for production\
+ \ workloads for better speed and scalability. See AWS docs for more details."
required:
- "method"
- "s3_bucket_name"
@@ -52269,10 +62003,7 @@ components:
s3_bucket_name:
title: "S3 Bucket Name"
type: "string"
- description: "The name of the staging S3 bucket to use if utilising\
- \ a COPY strategy. COPY is recommended for production workloads\
- \ for better speed and scalability. See AWS docs for more details."
+ description: "The name of the staging S3 bucket."
examples:
- "airbyte.staging"
s3_bucket_path:
@@ -52288,9 +62019,7 @@ components:
title: "S3 Bucket Region"
type: "string"
default: ""
- description: "The region of the S3 staging bucket to use if utilising\
- \ a COPY strategy. See AWS docs for details."
+ description: "The region of the S3 staging bucket."
enum:
- ""
- "us-east-1"
@@ -52404,601 +62133,1223 @@ components:
\ has deteriorating effects"
examples:
- "10"
+ - title: "Standard"
+ required:
+ - "method"
+ description: "(not recommended) Direct loading using SQL INSERT\
+ \ statements. This method is extremely inefficient and provided only\
+ \ for quick testing. In all other cases, you should use S3 uploading."
+ properties:
+ method:
+ type: "string"
+ const: "Standard"
+ enum:
+ - "Standard"
tunnel_method:
type: "object"
- title: "SSH Tunnel Method"
- description: "Whether to initiate an SSH tunnel before connecting to the\
- \ database, and if so, which kind of authentication to use."
+ title: "SSH Tunnel Method"
+ description: "Whether to initiate an SSH tunnel before connecting to the\
+ \ database, and if so, which kind of authentication to use."
+ oneOf:
+ - title: "No Tunnel"
+ required:
+ - "tunnel_method"
+ properties:
+ tunnel_method:
+ description: "No ssh tunnel needed to connect to database"
+ type: "string"
+ const: "NO_TUNNEL"
+ order: 0
+ enum:
+ - "NO_TUNNEL"
+ - title: "SSH Key Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "ssh_key"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and ssh key"
+ type: "string"
+ const: "SSH_KEY_AUTH"
+ order: 0
+ enum:
+ - "SSH_KEY_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host."
+ type: "string"
+ order: 3
+ ssh_key:
+ title: "SSH Private Key"
+ description: "OS-level user account ssh key credentials in RSA PEM\
+ \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ type: "string"
+ airbyte_secret: true
+ multiline: true
+ order: 4
+ - title: "Password Authentication"
+ required:
+ - "tunnel_method"
+ - "tunnel_host"
+ - "tunnel_port"
+ - "tunnel_user"
+ - "tunnel_user_password"
+ properties:
+ tunnel_method:
+ description: "Connect through a jump server tunnel host using username\
+ \ and password authentication"
+ type: "string"
+ const: "SSH_PASSWORD_AUTH"
+ order: 0
+ enum:
+ - "SSH_PASSWORD_AUTH"
+ tunnel_host:
+ title: "SSH Tunnel Jump Server Host"
+ description: "Hostname of the jump server host that allows inbound\
+ \ ssh tunnel."
+ type: "string"
+ order: 1
+ tunnel_port:
+ title: "SSH Connection Port"
+ description: "Port on the proxy/jump server that accepts inbound ssh\
+ \ connections."
+ type: "integer"
+ minimum: 0
+ maximum: 65536
+ default: 22
+ examples:
+ - "22"
+ order: 2
+ tunnel_user:
+ title: "SSH Login Username"
+ description: "OS-level username for logging into the jump server host"
+ type: "string"
+ order: 3
+ tunnel_user_password:
+ title: "Password"
+ description: "OS-level password for logging into the jump server host"
+ type: "string"
+ airbyte_secret: true
+ order: 4
+ groups:
+ - id: "connection"
+ title: "Connection"
+ destination-dynamodb:
+ title: "DynamoDB Destination Spec"
+ type: "object"
+ required:
+ - "dynamodb_table_name_prefix"
+ - "dynamodb_region"
+ - "access_key_id"
+ - "secret_access_key"
+ - "destinationType"
+ properties:
+ dynamodb_endpoint:
+ title: "Endpoint"
+ type: "string"
+ default: ""
+ description: "This is your DynamoDB endpoint url.(if you are working with\
+ \ AWS DynamoDB, just leave empty)."
+ examples:
+ - "http://localhost:9000"
+ dynamodb_table_name_prefix:
+ title: "Table name prefix"
+ type: "string"
+ description: "The prefix to use when naming DynamoDB tables."
+ examples:
+ - "airbyte_sync"
+ dynamodb_region:
+ title: "DynamoDB Region"
+ type: "string"
+ default: ""
+ description: "The region of the DynamoDB."
+ enum:
+ - ""
+ - "us-east-1"
+ - "us-east-2"
+ - "us-west-1"
+ - "us-west-2"
+ - "af-south-1"
+ - "ap-east-1"
+ - "ap-south-1"
+ - "ap-northeast-1"
+ - "ap-northeast-2"
+ - "ap-northeast-3"
+ - "ap-southeast-1"
+ - "ap-southeast-2"
+ - "ca-central-1"
+ - "cn-north-1"
+ - "cn-northwest-1"
+ - "eu-central-1"
+ - "eu-north-1"
+ - "eu-south-1"
+ - "eu-west-1"
+ - "eu-west-2"
+ - "eu-west-3"
+ - "sa-east-1"
+ - "me-south-1"
+ - "us-gov-east-1"
+ - "us-gov-west-1"
+ access_key_id:
+ type: "string"
+ description: "The access key id to access the DynamoDB. Airbyte requires\
+ \ Read and Write permissions to the DynamoDB."
+ title: "DynamoDB Key Id"
+ airbyte_secret: true
+ examples:
+ - "A012345678910EXAMPLE"
+ x-speakeasy-param-sensitive: true
+ secret_access_key:
+ type: "string"
+ description: "The corresponding secret to the access key id."
+ title: "DynamoDB Access Key"
+ airbyte_secret: true
+ examples:
+ - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
+ x-speakeasy-param-sensitive: true
+ destinationType:
+ title: "dynamodb"
+ const: "dynamodb"
+ enum:
+ - "dynamodb"
+ order: 0
+ type: "string"
+ destination-dynamodb-update:
+ title: "DynamoDB Destination Spec"
+ type: "object"
+ required:
+ - "dynamodb_table_name_prefix"
+ - "dynamodb_region"
+ - "access_key_id"
+ - "secret_access_key"
+ properties:
+ dynamodb_endpoint:
+ title: "Endpoint"
+ type: "string"
+ default: ""
+ description: "This is your DynamoDB endpoint url.(if you are working with\
+ \ AWS DynamoDB, just leave empty)."
+ examples:
+ - "http://localhost:9000"
+ dynamodb_table_name_prefix:
+ title: "Table name prefix"
+ type: "string"
+ description: "The prefix to use when naming DynamoDB tables."
+ examples:
+ - "airbyte_sync"
+ dynamodb_region:
+ title: "DynamoDB Region"
+ type: "string"
+ default: ""
+ description: "The region of the DynamoDB."
+ enum:
+ - ""
+ - "us-east-1"
+ - "us-east-2"
+ - "us-west-1"
+ - "us-west-2"
+ - "af-south-1"
+ - "ap-east-1"
+ - "ap-south-1"
+ - "ap-northeast-1"
+ - "ap-northeast-2"
+ - "ap-northeast-3"
+ - "ap-southeast-1"
+ - "ap-southeast-2"
+ - "ca-central-1"
+ - "cn-north-1"
+ - "cn-northwest-1"
+ - "eu-central-1"
+ - "eu-north-1"
+ - "eu-south-1"
+ - "eu-west-1"
+ - "eu-west-2"
+ - "eu-west-3"
+ - "sa-east-1"
+ - "me-south-1"
+ - "us-gov-east-1"
+ - "us-gov-west-1"
+ access_key_id:
+ type: "string"
+ description: "The access key id to access the DynamoDB. Airbyte requires\
+ \ Read and Write permissions to the DynamoDB."
+ title: "DynamoDB Key Id"
+ airbyte_secret: true
+ examples:
+ - "A012345678910EXAMPLE"
+ secret_access_key:
+ type: "string"
+ description: "The corresponding secret to the access key id."
+ title: "DynamoDB Access Key"
+ airbyte_secret: true
+ examples:
+ - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
+ destination-qdrant:
+ title: "Qdrant Destination Config"
+ type: "object"
+ properties:
+ processing:
+ title: "ProcessingConfigModel"
+ type: "object"
+ properties:
+ chunk_size:
+ title: "Chunk size"
+ description: "Size of chunks in tokens to store in vector store (make\
+ \ sure it is not too big for the context if your LLM)"
+ maximum: 8191
+ minimum: 1
+ type: "integer"
+ chunk_overlap:
+ title: "Chunk overlap"
+ description: "Size of overlap between chunks in tokens to store in vector\
+ \ store to better capture relevant context"
+ default: 0
+ type: "integer"
+ text_fields:
+ title: "Text fields to embed"
+ description: "List of fields in the record that should be used to calculate\
+ \ the embedding. The field list is applied to all streams in the same\
+ \ way and non-existing fields are ignored. If none are defined, all\
+ \ fields are considered text fields. When specifying text fields,\
+ \ you can access nested fields in the record by using dot notation,\
+ \ e.g. `user.name` will access the `name` field in the `user` object.\
+ \ It's also possible to use wildcards to access all fields in an object,\
+ \ e.g. `users.*.name` will access all `names` fields in all entries\
+ \ of the `users` array."
+ default: []
+ always_show: true
+ examples:
+ - "text"
+ - "user.name"
+ - "users.*.name"
+ type: "array"
+ items:
+ type: "string"
+ metadata_fields:
+ title: "Fields to store as metadata"
+ description: "List of fields in the record that should be stored as\
+ \ metadata. The field list is applied to all streams in the same way\
+ \ and non-existing fields are ignored. If none are defined, all fields\
+ \ are considered metadata fields. When specifying text fields, you\
+ \ can access nested fields in the record by using dot notation, e.g.\
+ \ `user.name` will access the `name` field in the `user` object. It's\
+ \ also possible to use wildcards to access all fields in an object,\
+ \ e.g. `users.*.name` will access all `names` fields in all entries\
+ \ of the `users` array. When specifying nested paths, all matching\
+ \ values are flattened into an array set to a field named by the path."
+ default: []
+ always_show: true
+ examples:
+ - "age"
+ - "user"
+ - "user.name"
+ type: "array"
+ items:
+ type: "string"
+ text_splitter:
+ title: "Text splitter"
+ description: "Split text fields into chunks based on the specified method."
+ type: "object"
+ oneOf:
+ - title: "By Separator"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "separator"
+ const: "separator"
+ enum:
+ - "separator"
+ type: "string"
+ separators:
+ title: "Separators"
+ description: "List of separator strings to split text fields by.\
+ \ The separator itself needs to be wrapped in double quotes,\
+ \ e.g. to split by the dot character, use \".\". To split by\
+ \ a newline, use \"\\n\"."
+ default:
+ - "\"\\n\\n\""
+ - "\"\\n\""
+ - "\" \""
+ - "\"\""
+ type: "array"
+ items:
+ type: "string"
+ keep_separator:
+ title: "Keep separator"
+ description: "Whether to keep the separator in the resulting chunks"
+ default: false
+ type: "boolean"
+ description: "Split the text by the list of separators until the chunk\
+ \ size is reached, using the earlier mentioned separators where\
+ \ possible. This is useful for splitting text fields by paragraphs,\
+ \ sentences, words, etc."
+ required:
+ - "mode"
+ - title: "By Markdown header"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "markdown"
+ const: "markdown"
+ enum:
+ - "markdown"
+ type: "string"
+ split_level:
+ title: "Split level"
+ description: "Level of markdown headers to split text fields by.\
+ \ Headings down to the specified level will be used as split\
+ \ points"
+ default: 1
+ minimum: 1
+ maximum: 6
+ type: "integer"
+ description: "Split the text by Markdown headers down to the specified\
+ \ header level. If the chunk size fits multiple sections, they will\
+ \ be combined into a single chunk."
+ required:
+ - "mode"
+ - title: "By Programming Language"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "code"
+ const: "code"
+ enum:
+ - "code"
+ type: "string"
+ language:
+ title: "Language"
+ description: "Split code in suitable places based on the programming\
+ \ language"
+ enum:
+ - "cpp"
+ - "go"
+ - "java"
+ - "js"
+ - "php"
+ - "proto"
+ - "python"
+ - "rst"
+ - "ruby"
+ - "rust"
+ - "scala"
+ - "swift"
+ - "markdown"
+ - "latex"
+ - "html"
+ - "sol"
+ type: "string"
+ required:
+ - "language"
+ - "mode"
+ description: "Split the text by suitable delimiters based on the programming\
+ \ language. This is useful for splitting code into chunks."
+ field_name_mappings:
+ title: "Field name mappings"
+ description: "List of fields to rename. Not applicable for nested fields,\
+ \ but can be used to rename fields already flattened via dot notation."
+ default: []
+ type: "array"
+ items:
+ title: "FieldNameMappingConfigModel"
+ type: "object"
+ properties:
+ from_field:
+ title: "From field name"
+ description: "The field name in the source"
+ type: "string"
+ to_field:
+ title: "To field name"
+ description: "The field name to use in the destination"
+ type: "string"
+ required:
+ - "from_field"
+ - "to_field"
+ required:
+ - "chunk_size"
+ group: "processing"
+ embedding:
+ title: "Embedding"
+ description: "Embedding configuration"
+ group: "embedding"
+ type: "object"
oneOf:
- - title: "No Tunnel"
- required:
- - "tunnel_method"
+ - title: "OpenAI"
+ type: "object"
properties:
- tunnel_method:
- description: "No ssh tunnel needed to connect to database"
- type: "string"
- const: "NO_TUNNEL"
- order: 0
+ mode:
+ title: "Mode"
+ default: "openai"
+ const: "openai"
enum:
- - "NO_TUNNEL"
- - title: "SSH Key Authentication"
+ - "openai"
+ type: "string"
+ openai_key:
+ title: "OpenAI API key"
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "ssh_key"
+ - "openai_key"
+ - "mode"
+ description: "Use the OpenAI API to embed text. This option is using the\
+ \ text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "Cohere"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and ssh key"
- type: "string"
- const: "SSH_KEY_AUTH"
- order: 0
+ mode:
+ title: "Mode"
+ default: "cohere"
+ const: "cohere"
enum:
- - "SSH_KEY_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
- type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
- examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host."
- type: "string"
- order: 3
- ssh_key:
- title: "SSH Private Key"
- description: "OS-level user account ssh key credentials in RSA PEM\
- \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ - "cohere"
type: "string"
+ cohere_key:
+ title: "Cohere API key"
airbyte_secret: true
- multiline: true
- order: 4
- - title: "Password Authentication"
+ type: "string"
+ x-speakeasy-param-sensitive: true
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "tunnel_user_password"
+ - "cohere_key"
+ - "mode"
+ description: "Use the Cohere API to embed text."
+ - title: "Fake"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and password authentication"
+ mode:
+ title: "Mode"
+ default: "fake"
+ const: "fake"
+ enum:
+ - "fake"
type: "string"
- const: "SSH_PASSWORD_AUTH"
- order: 0
+ description: "Use a fake embedding made out of random vectors with 1536\
+ \ embedding dimensions. This is useful for testing the data pipeline\
+ \ without incurring any costs."
+ required:
+ - "mode"
+ - title: "From Field"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "from_field"
+ const: "from_field"
enum:
- - "SSH_PASSWORD_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "from_field"
type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
+ field_name:
+ title: "Field name"
+ description: "Name of the field in the record that contains the embedding"
examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host"
- type: "string"
- order: 3
- tunnel_user_password:
- title: "Password"
- description: "OS-level password for logging into the jump server host"
+ - "embedding"
+ - "vector"
type: "string"
- airbyte_secret: true
- order: 4
- destinationType:
- title: "redshift"
- const: "redshift"
- enum:
- - "redshift"
- order: 0
- type: "string"
- destination-redshift-update:
- title: "Redshift Destination Spec"
- type: "object"
- required:
- - "host"
- - "port"
- - "database"
- - "username"
- - "password"
- - "schema"
- properties:
- host:
- description: "Host Endpoint of the Redshift Cluster (must include the cluster-id,\
- \ region and end with .redshift.amazonaws.com)"
- type: "string"
- title: "Host"
- order: 1
- port:
- description: "Port of the database."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 5439
- examples:
- - "5439"
- title: "Port"
- order: 2
- username:
- description: "Username to use to access the database."
- type: "string"
- title: "Username"
- order: 3
- password:
- description: "Password associated with the username."
- type: "string"
- airbyte_secret: true
- title: "Password"
- order: 4
- database:
- description: "Name of the database."
- type: "string"
- title: "Database"
- order: 5
- schema:
- description: "The default schema tables are written to if the source does\
- \ not specify a namespace. Unless specifically configured, the usual value\
- \ for this field is \"public\"."
- type: "string"
- examples:
- - "public"
- default: "public"
- title: "Default Schema"
- order: 6
- jdbc_url_params:
- title: "JDBC URL Params"
- description: "Additional properties to pass to the JDBC URL string when\
- \ connecting to the database formatted as 'key=value' pairs separated\
- \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)."
- type: "string"
- order: 7
- uploading_method:
- title: "Uploading Method"
- type: "object"
- description: "The method how the data will be uploaded to the database."
- order: 8
- oneOf:
- - title: "Standard"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
required:
- - "method"
+ - "field_name"
+ - "dimensions"
+ - "mode"
+ description: "Use a field in the record as the embedding. This is useful\
+ \ if you already have an embedding for your data and want to store it\
+ \ in the vector store."
+ - title: "Azure OpenAI"
+ type: "object"
properties:
- method:
- type: "string"
- const: "Standard"
+ mode:
+ title: "Mode"
+ default: "azure_openai"
+ const: "azure_openai"
enum:
- - "Standard"
- - title: "S3 Staging"
- required:
- - "method"
- - "s3_bucket_name"
- - "s3_bucket_region"
- - "access_key_id"
- - "secret_access_key"
- properties:
- method:
+ - "azure_openai"
type: "string"
- const: "S3 Staging"
- enum:
- - "S3 Staging"
- s3_bucket_name:
- title: "S3 Bucket Name"
+ openai_key:
+ title: "Azure OpenAI API key"
+ description: "The API key for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ airbyte_secret: true
type: "string"
- description: "The name of the staging S3 bucket to use if utilising\
- \ a COPY strategy. COPY is recommended for production workloads\
- \ for better speed and scalability. See AWS docs for more details."
+ x-speakeasy-param-sensitive: true
+ api_base:
+ title: "Resource base URL"
+ description: "The base URL for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
examples:
- - "airbyte.staging"
- s3_bucket_path:
- title: "S3 Bucket Path"
+ - "https://your-resource-name.openai.azure.com"
type: "string"
- description: "The directory under the S3 bucket where data will be\
- \ written. If not provided, then defaults to the root directory.\
- \ See path's name recommendations for more details."
+ deployment:
+ title: "Deployment"
+ description: "The deployment for your Azure OpenAI resource. You\
+ \ can find this in the Azure portal under your Azure OpenAI resource"
examples:
- - "data_sync/test"
- s3_bucket_region:
- title: "S3 Bucket Region"
+ - "your-resource-name"
type: "string"
- default: ""
- description: "The region of the S3 staging bucket to use if utilising\
- \ a COPY strategy. See AWS docs for details."
+ required:
+ - "openai_key"
+ - "api_base"
+ - "deployment"
+ - "mode"
+ description: "Use the Azure-hosted OpenAI API to embed text. This option\
+ \ is using the text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "OpenAI-compatible"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "openai_compatible"
+ const: "openai_compatible"
enum:
- - ""
- - "us-east-1"
- - "us-east-2"
- - "us-west-1"
- - "us-west-2"
- - "af-south-1"
- - "ap-east-1"
- - "ap-south-1"
- - "ap-northeast-1"
- - "ap-northeast-2"
- - "ap-northeast-3"
- - "ap-southeast-1"
- - "ap-southeast-2"
- - "ca-central-1"
- - "cn-north-1"
- - "cn-northwest-1"
- - "eu-central-1"
- - "eu-north-1"
- - "eu-south-1"
- - "eu-west-1"
- - "eu-west-2"
- - "eu-west-3"
- - "sa-east-1"
- - "me-south-1"
- file_name_pattern:
+ - "openai_compatible"
type: "string"
- description: "The pattern allows you to set the file-name format for\
- \ the S3 staging file(s)"
- title: "S3 Filename pattern"
+ api_key:
+ title: "API key"
+ default: ""
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ base_url:
+ title: "Base URL"
+ description: "The base URL for your OpenAI-compatible service"
examples:
- - "{date}"
- - "{date:yyyy_MM}"
- - "{timestamp}"
- - "{part_number}"
- - "{sync_id}"
- order: 8
- access_key_id:
+ - "https://your-service-name.com"
type: "string"
- description: "This ID grants access to the above S3 staging bucket.\
- \ Airbyte requires Read and Write permissions to the given bucket.\
- \ See AWS docs on how to generate an access key ID and secret access\
- \ key."
- title: "S3 Key Id"
- airbyte_secret: true
- secret_access_key:
+ model_name:
+ title: "Model name"
+ description: "The name of the model to use for embedding"
+ default: "text-embedding-ada-002"
+ examples:
+ - "text-embedding-ada-002"
type: "string"
- description: "The corresponding secret to the above access key id.\
- \ See AWS docs on how to generate an access key ID and secret access\
- \ key."
- title: "S3 Access Key"
- airbyte_secret: true
- purge_staging_data:
- title: "Purge Staging Files and Tables"
- type: "boolean"
- description: "Whether to delete the staging files from S3 after completing\
- \ the sync. See docs for details."
- default: true
- encryption:
- title: "Encryption"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
+ required:
+ - "base_url"
+ - "dimensions"
+ - "mode"
+ description: "Use a service that's compatible with the OpenAI API to embed\
+ \ text."
+ indexing:
+ title: "Indexing"
+ type: "object"
+ properties:
+ url:
+ title: "Public Endpoint"
+ description: "Public Endpoint of the Qdrant cluser"
+ order: 0
+ type: "string"
+ auth_method:
+ title: "Authentication Method"
+ description: "Method to authenticate with the Qdrant Instance"
+ default: "api_key_auth"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "ApiKeyAuth"
type: "object"
- description: "How to encrypt the staging data"
- default:
- encryption_type: "none"
- oneOf:
- - title: "No encryption"
- description: "Staging data will be stored in plaintext."
- type: "object"
- required:
- - "encryption_type"
- properties:
- encryption_type:
- type: "string"
- const: "none"
- enum:
- - "none"
- default: "none"
- - title: "AES-CBC envelope encryption"
- description: "Staging data will be encrypted using AES-CBC envelope\
- \ encryption."
- type: "object"
- required:
- - "encryption_type"
- properties:
- encryption_type:
- type: "string"
- const: "aes_cbc_envelope"
- enum:
- - "aes_cbc_envelope"
- default: "aes_cbc_envelope"
- key_encrypting_key:
+ properties:
+ mode:
+ title: "Mode"
+ default: "api_key_auth"
+ const: "api_key_auth"
+ enum:
+ - "api_key_auth"
+ type: "string"
+ api_key:
+ title: "API Key"
+ description: "API Key for the Qdrant instance"
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ required:
+ - "api_key"
+ - title: "NoAuth"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "no_auth"
+ const: "no_auth"
+ enum:
+ - "no_auth"
+ type: "string"
+ prefer_grpc:
+ title: "Prefer gRPC"
+ description: "Whether to prefer gRPC over HTTP. Set to true for Qdrant\
+ \ cloud clusters"
+ default: true
+ type: "boolean"
+ collection:
+ title: "Collection Name"
+ description: "The collection to load data into"
+ order: 2
+ type: "string"
+ distance_metric:
+ title: "Distance Metric"
+ description: "The Distance metric used to measure similarities among\
+ \ vectors. This field is only used if the collection defined in the\
+ \ does not exist yet and is created automatically by the connector."
+ default: "cos"
+ oneOf:
+ - title: "dot"
+ type: "object"
+ properties:
+ distance_metric:
+ title: "distance_metric"
+ default: "dot"
+ const: "dot"
+ enum:
+ - "dot"
+ type: "string"
+ - title: "cos"
+ type: "object"
+ properties:
+ distance_metric:
+ title: "distance_metric"
+ default: "cos"
+ const: "cos"
+ enum:
+ - "cos"
+ type: "string"
+ - title: "euc"
+ type: "object"
+ properties:
+ distance_metric:
+ title: "distance_metric"
+ default: "euc"
+ const: "euc"
+ enum:
+ - "euc"
+ type: "string"
+ text_field:
+ title: "Text Field"
+ description: "The field in the payload that contains the embedded text"
+ default: "text"
+ type: "string"
+ required:
+ - "url"
+ - "collection"
+ group: "Indexing"
+ description: "Indexing configuration"
+ destinationType:
+ title: "qdrant"
+ const: "qdrant"
+ enum:
+ - "qdrant"
+ order: 0
+ type: "string"
+ required:
+ - "processing"
+ - "embedding"
+ - "indexing"
+ - "destinationType"
+ groups:
+ - id: "processing"
+ title: "Processing"
+ - id: "embedding"
+ title: "Embedding"
+ - id: "indexing"
+ title: "Indexing"
+ destination-qdrant-update:
+ title: "Qdrant Destination Config"
+ type: "object"
+ properties:
+ processing:
+ title: "ProcessingConfigModel"
+ type: "object"
+ properties:
+ chunk_size:
+ title: "Chunk size"
+ description: "Size of chunks in tokens to store in vector store (make\
+ \ sure it is not too big for the context if your LLM)"
+ maximum: 8191
+ minimum: 1
+ type: "integer"
+ chunk_overlap:
+ title: "Chunk overlap"
+ description: "Size of overlap between chunks in tokens to store in vector\
+ \ store to better capture relevant context"
+ default: 0
+ type: "integer"
+ text_fields:
+ title: "Text fields to embed"
+ description: "List of fields in the record that should be used to calculate\
+ \ the embedding. The field list is applied to all streams in the same\
+ \ way and non-existing fields are ignored. If none are defined, all\
+ \ fields are considered text fields. When specifying text fields,\
+ \ you can access nested fields in the record by using dot notation,\
+ \ e.g. `user.name` will access the `name` field in the `user` object.\
+ \ It's also possible to use wildcards to access all fields in an object,\
+ \ e.g. `users.*.name` will access all `names` fields in all entries\
+ \ of the `users` array."
+ default: []
+ always_show: true
+ examples:
+ - "text"
+ - "user.name"
+ - "users.*.name"
+ type: "array"
+ items:
+ type: "string"
+ metadata_fields:
+ title: "Fields to store as metadata"
+ description: "List of fields in the record that should be stored as\
+ \ metadata. The field list is applied to all streams in the same way\
+ \ and non-existing fields are ignored. If none are defined, all fields\
+ \ are considered metadata fields. When specifying text fields, you\
+ \ can access nested fields in the record by using dot notation, e.g.\
+ \ `user.name` will access the `name` field in the `user` object. It's\
+ \ also possible to use wildcards to access all fields in an object,\
+ \ e.g. `users.*.name` will access all `names` fields in all entries\
+ \ of the `users` array. When specifying nested paths, all matching\
+ \ values are flattened into an array set to a field named by the path."
+ default: []
+ always_show: true
+ examples:
+ - "age"
+ - "user"
+ - "user.name"
+ type: "array"
+ items:
+ type: "string"
+ text_splitter:
+ title: "Text splitter"
+ description: "Split text fields into chunks based on the specified method."
+ type: "object"
+ oneOf:
+ - title: "By Separator"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "separator"
+ const: "separator"
+ enum:
+ - "separator"
+ type: "string"
+ separators:
+ title: "Separators"
+ description: "List of separator strings to split text fields by.\
+ \ The separator itself needs to be wrapped in double quotes,\
+ \ e.g. to split by the dot character, use \".\". To split by\
+ \ a newline, use \"\\n\"."
+ default:
+ - "\"\\n\\n\""
+ - "\"\\n\""
+ - "\" \""
+ - "\"\""
+ type: "array"
+ items:
type: "string"
- title: "Key"
- description: "The key, base64-encoded. Must be either 128, 192,\
- \ or 256 bits. Leave blank to have Airbyte generate an ephemeral\
- \ key for each sync."
- airbyte_secret: true
- file_buffer_count:
- title: "File Buffer Count"
- type: "integer"
- minimum: 10
- maximum: 50
- default: 10
- description: "Number of file buffers allocated for writing data. Increasing\
- \ this number is beneficial for connections using Change Data Capture\
- \ (CDC) and up to the number of streams within a connection. Increasing\
- \ the number of file buffers past the maximum number of streams\
- \ has deteriorating effects"
- examples:
- - "10"
- tunnel_method:
+ keep_separator:
+ title: "Keep separator"
+ description: "Whether to keep the separator in the resulting chunks"
+ default: false
+ type: "boolean"
+ description: "Split the text by the list of separators until the chunk\
+ \ size is reached, using the earlier mentioned separators where\
+ \ possible. This is useful for splitting text fields by paragraphs,\
+ \ sentences, words, etc."
+ required:
+ - "mode"
+ - title: "By Markdown header"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "markdown"
+ const: "markdown"
+ enum:
+ - "markdown"
+ type: "string"
+ split_level:
+ title: "Split level"
+ description: "Level of markdown headers to split text fields by.\
+ \ Headings down to the specified level will be used as split\
+ \ points"
+ default: 1
+ minimum: 1
+ maximum: 6
+ type: "integer"
+ description: "Split the text by Markdown headers down to the specified\
+ \ header level. If the chunk size fits multiple sections, they will\
+ \ be combined into a single chunk."
+ required:
+ - "mode"
+ - title: "By Programming Language"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "code"
+ const: "code"
+ enum:
+ - "code"
+ type: "string"
+ language:
+ title: "Language"
+ description: "Split code in suitable places based on the programming\
+ \ language"
+ enum:
+ - "cpp"
+ - "go"
+ - "java"
+ - "js"
+ - "php"
+ - "proto"
+ - "python"
+ - "rst"
+ - "ruby"
+ - "rust"
+ - "scala"
+ - "swift"
+ - "markdown"
+ - "latex"
+ - "html"
+ - "sol"
+ type: "string"
+ required:
+ - "language"
+ - "mode"
+ description: "Split the text by suitable delimiters based on the programming\
+ \ language. This is useful for splitting code into chunks."
+ field_name_mappings:
+ title: "Field name mappings"
+ description: "List of fields to rename. Not applicable for nested fields,\
+ \ but can be used to rename fields already flattened via dot notation."
+ default: []
+ type: "array"
+ items:
+ title: "FieldNameMappingConfigModel"
+ type: "object"
+ properties:
+ from_field:
+ title: "From field name"
+ description: "The field name in the source"
+ type: "string"
+ to_field:
+ title: "To field name"
+ description: "The field name to use in the destination"
+ type: "string"
+ required:
+ - "from_field"
+ - "to_field"
+ required:
+ - "chunk_size"
+ group: "processing"
+ embedding:
+ title: "Embedding"
+ description: "Embedding configuration"
+ group: "embedding"
type: "object"
- title: "SSH Tunnel Method"
- description: "Whether to initiate an SSH tunnel before connecting to the\
- \ database, and if so, which kind of authentication to use."
oneOf:
- - title: "No Tunnel"
- required:
- - "tunnel_method"
+ - title: "OpenAI"
+ type: "object"
properties:
- tunnel_method:
- description: "No ssh tunnel needed to connect to database"
+ mode:
+ title: "Mode"
+ default: "openai"
+ const: "openai"
+ enum:
+ - "openai"
type: "string"
- const: "NO_TUNNEL"
- order: 0
+ openai_key:
+ title: "OpenAI API key"
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "openai_key"
+ - "mode"
+ description: "Use the OpenAI API to embed text. This option is using the\
+ \ text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "Cohere"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "cohere"
+ const: "cohere"
enum:
- - "NO_TUNNEL"
- - title: "SSH Key Authentication"
+ - "cohere"
+ type: "string"
+ cohere_key:
+ title: "Cohere API key"
+ airbyte_secret: true
+ type: "string"
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "ssh_key"
+ - "cohere_key"
+ - "mode"
+ description: "Use the Cohere API to embed text."
+ - title: "Fake"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and ssh key"
+ mode:
+ title: "Mode"
+ default: "fake"
+ const: "fake"
+ enum:
+ - "fake"
type: "string"
- const: "SSH_KEY_AUTH"
- order: 0
+ description: "Use a fake embedding made out of random vectors with 1536\
+ \ embedding dimensions. This is useful for testing the data pipeline\
+ \ without incurring any costs."
+ required:
+ - "mode"
+ - title: "From Field"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "from_field"
+ const: "from_field"
enum:
- - "SSH_KEY_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "from_field"
type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
+ field_name:
+ title: "Field name"
+ description: "Name of the field in the record that contains the embedding"
examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host."
+ - "embedding"
+ - "vector"
type: "string"
- order: 3
- ssh_key:
- title: "SSH Private Key"
- description: "OS-level user account ssh key credentials in RSA PEM\
- \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
+ required:
+ - "field_name"
+ - "dimensions"
+ - "mode"
+ description: "Use a field in the record as the embedding. This is useful\
+ \ if you already have an embedding for your data and want to store it\
+ \ in the vector store."
+ - title: "Azure OpenAI"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "azure_openai"
+ const: "azure_openai"
+ enum:
+ - "azure_openai"
type: "string"
+ openai_key:
+ title: "Azure OpenAI API key"
+ description: "The API key for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
airbyte_secret: true
- multiline: true
- order: 4
- - title: "Password Authentication"
+ type: "string"
+ api_base:
+ title: "Resource base URL"
+ description: "The base URL for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "https://your-resource-name.openai.azure.com"
+ type: "string"
+ deployment:
+ title: "Deployment"
+ description: "The deployment for your Azure OpenAI resource. You\
+ \ can find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "your-resource-name"
+ type: "string"
required:
- - "tunnel_method"
- - "tunnel_host"
- - "tunnel_port"
- - "tunnel_user"
- - "tunnel_user_password"
+ - "openai_key"
+ - "api_base"
+ - "deployment"
+ - "mode"
+ description: "Use the Azure-hosted OpenAI API to embed text. This option\
+ \ is using the text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "OpenAI-compatible"
+ type: "object"
properties:
- tunnel_method:
- description: "Connect through a jump server tunnel host using username\
- \ and password authentication"
- type: "string"
- const: "SSH_PASSWORD_AUTH"
- order: 0
+ mode:
+ title: "Mode"
+ default: "openai_compatible"
+ const: "openai_compatible"
enum:
- - "SSH_PASSWORD_AUTH"
- tunnel_host:
- title: "SSH Tunnel Jump Server Host"
- description: "Hostname of the jump server host that allows inbound\
- \ ssh tunnel."
+ - "openai_compatible"
type: "string"
- order: 1
- tunnel_port:
- title: "SSH Connection Port"
- description: "Port on the proxy/jump server that accepts inbound ssh\
- \ connections."
- type: "integer"
- minimum: 0
- maximum: 65536
- default: 22
+ api_key:
+ title: "API key"
+ default: ""
+ airbyte_secret: true
+ type: "string"
+ base_url:
+ title: "Base URL"
+ description: "The base URL for your OpenAI-compatible service"
examples:
- - "22"
- order: 2
- tunnel_user:
- title: "SSH Login Username"
- description: "OS-level username for logging into the jump server host"
+ - "https://your-service-name.com"
type: "string"
- order: 3
- tunnel_user_password:
- title: "Password"
- description: "OS-level password for logging into the jump server host"
+ model_name:
+ title: "Model name"
+ description: "The name of the model to use for embedding"
+ default: "text-embedding-ada-002"
+ examples:
+ - "text-embedding-ada-002"
type: "string"
- airbyte_secret: true
- order: 4
- destination-dynamodb:
- title: "DynamoDB Destination Spec"
- type: "object"
- required:
- - "dynamodb_table_name_prefix"
- - "dynamodb_region"
- - "access_key_id"
- - "secret_access_key"
- - "destinationType"
- properties:
- dynamodb_endpoint:
- title: "Endpoint"
- type: "string"
- default: ""
- description: "This is your DynamoDB endpoint url.(if you are working with\
- \ AWS DynamoDB, just leave empty)."
- examples:
- - "http://localhost:9000"
- dynamodb_table_name_prefix:
- title: "Table name prefix"
- type: "string"
- description: "The prefix to use when naming DynamoDB tables."
- examples:
- - "airbyte_sync"
- dynamodb_region:
- title: "DynamoDB Region"
- type: "string"
- default: ""
- description: "The region of the DynamoDB."
- enum:
- - ""
- - "us-east-1"
- - "us-east-2"
- - "us-west-1"
- - "us-west-2"
- - "af-south-1"
- - "ap-east-1"
- - "ap-south-1"
- - "ap-northeast-1"
- - "ap-northeast-2"
- - "ap-northeast-3"
- - "ap-southeast-1"
- - "ap-southeast-2"
- - "ca-central-1"
- - "cn-north-1"
- - "cn-northwest-1"
- - "eu-central-1"
- - "eu-north-1"
- - "eu-south-1"
- - "eu-west-1"
- - "eu-west-2"
- - "eu-west-3"
- - "sa-east-1"
- - "me-south-1"
- - "us-gov-east-1"
- - "us-gov-west-1"
- access_key_id:
- type: "string"
- description: "The access key id to access the DynamoDB. Airbyte requires\
- \ Read and Write permissions to the DynamoDB."
- title: "DynamoDB Key Id"
- airbyte_secret: true
- examples:
- - "A012345678910EXAMPLE"
- secret_access_key:
- type: "string"
- description: "The corresponding secret to the access key id."
- title: "DynamoDB Access Key"
- airbyte_secret: true
- examples:
- - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
- destinationType:
- title: "dynamodb"
- const: "dynamodb"
- enum:
- - "dynamodb"
- order: 0
- type: "string"
- destination-dynamodb-update:
- title: "DynamoDB Destination Spec"
- type: "object"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
+ required:
+ - "base_url"
+ - "dimensions"
+ - "mode"
+ description: "Use a service that's compatible with the OpenAI API to embed\
+ \ text."
+ indexing:
+ title: "Indexing"
+ type: "object"
+ properties:
+ url:
+ title: "Public Endpoint"
+ description: "Public Endpoint of the Qdrant cluser"
+ order: 0
+ type: "string"
+ auth_method:
+ title: "Authentication Method"
+ description: "Method to authenticate with the Qdrant Instance"
+ default: "api_key_auth"
+ type: "object"
+ order: 1
+ oneOf:
+ - title: "ApiKeyAuth"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "api_key_auth"
+ const: "api_key_auth"
+ enum:
+ - "api_key_auth"
+ type: "string"
+ api_key:
+ title: "API Key"
+ description: "API Key for the Qdrant instance"
+ airbyte_secret: true
+ type: "string"
+ required:
+ - "api_key"
+ - title: "NoAuth"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "no_auth"
+ const: "no_auth"
+ enum:
+ - "no_auth"
+ type: "string"
+ prefer_grpc:
+ title: "Prefer gRPC"
+ description: "Whether to prefer gRPC over HTTP. Set to true for Qdrant\
+ \ cloud clusters"
+ default: true
+ type: "boolean"
+ collection:
+ title: "Collection Name"
+ description: "The collection to load data into"
+ order: 2
+ type: "string"
+ distance_metric:
+ title: "Distance Metric"
+ description: "The Distance metric used to measure similarities among\
+ \ vectors. This field is only used if the collection defined in the\
+ \ does not exist yet and is created automatically by the connector."
+ default: "cos"
+ oneOf:
+ - title: "dot"
+ type: "object"
+ properties:
+ distance_metric:
+ title: "distance_metric"
+ default: "dot"
+ const: "dot"
+ enum:
+ - "dot"
+ type: "string"
+ - title: "cos"
+ type: "object"
+ properties:
+ distance_metric:
+ title: "distance_metric"
+ default: "cos"
+ const: "cos"
+ enum:
+ - "cos"
+ type: "string"
+ - title: "euc"
+ type: "object"
+ properties:
+ distance_metric:
+ title: "distance_metric"
+ default: "euc"
+ const: "euc"
+ enum:
+ - "euc"
+ type: "string"
+ text_field:
+ title: "Text Field"
+ description: "The field in the payload that contains the embedded text"
+ default: "text"
+ type: "string"
+ required:
+ - "url"
+ - "collection"
+ group: "Indexing"
+ description: "Indexing configuration"
required:
- - "dynamodb_table_name_prefix"
- - "dynamodb_region"
- - "access_key_id"
- - "secret_access_key"
- properties:
- dynamodb_endpoint:
- title: "Endpoint"
- type: "string"
- default: ""
- description: "This is your DynamoDB endpoint url.(if you are working with\
- \ AWS DynamoDB, just leave empty)."
- examples:
- - "http://localhost:9000"
- dynamodb_table_name_prefix:
- title: "Table name prefix"
- type: "string"
- description: "The prefix to use when naming DynamoDB tables."
- examples:
- - "airbyte_sync"
- dynamodb_region:
- title: "DynamoDB Region"
- type: "string"
- default: ""
- description: "The region of the DynamoDB."
- enum:
- - ""
- - "us-east-1"
- - "us-east-2"
- - "us-west-1"
- - "us-west-2"
- - "af-south-1"
- - "ap-east-1"
- - "ap-south-1"
- - "ap-northeast-1"
- - "ap-northeast-2"
- - "ap-northeast-3"
- - "ap-southeast-1"
- - "ap-southeast-2"
- - "ca-central-1"
- - "cn-north-1"
- - "cn-northwest-1"
- - "eu-central-1"
- - "eu-north-1"
- - "eu-south-1"
- - "eu-west-1"
- - "eu-west-2"
- - "eu-west-3"
- - "sa-east-1"
- - "me-south-1"
- - "us-gov-east-1"
- - "us-gov-west-1"
- access_key_id:
- type: "string"
- description: "The access key id to access the DynamoDB. Airbyte requires\
- \ Read and Write permissions to the DynamoDB."
- title: "DynamoDB Key Id"
- airbyte_secret: true
- examples:
- - "A012345678910EXAMPLE"
- secret_access_key:
- type: "string"
- description: "The corresponding secret to the access key id."
- title: "DynamoDB Access Key"
- airbyte_secret: true
- examples:
- - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
+ - "processing"
+ - "embedding"
+ - "indexing"
+ groups:
+ - id: "processing"
+ title: "Processing"
+ - id: "embedding"
+ title: "Embedding"
+ - id: "indexing"
+ title: "Indexing"
destination-snowflake:
title: "Snowflake Destination Spec"
type: "object"
@@ -53096,11 +63447,13 @@ components:
title: "Access Token"
description: "Enter you application's Access Token"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
refresh_token:
type: "string"
title: "Refresh Token"
description: "Enter your application's Refresh Token"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Key Pair Authentication"
type: "object"
order: 1
@@ -53122,11 +63475,13 @@ components:
>docs for more information on how to obtain this key."
multiline: true
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
private_key_password:
type: "string"
title: "Passphrase"
description: "Passphrase for private key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
- title: "Username and Password"
type: "object"
required:
@@ -53146,6 +63501,7 @@ components:
airbyte_secret: true
title: "Password"
order: 1
+ x-speakeasy-param-sensitive: true
order: 6
jdbc_url_params:
description: "Enter the additional properties to pass to the JDBC URL string\
@@ -53156,9 +63512,18 @@ components:
order: 7
raw_data_schema:
type: "string"
- description: "The schema to write raw tables into"
- title: "Destinations V2 Raw Table Schema"
+ description: "The schema to write raw tables into (default: airbyte_internal)"
+ title: "Raw Table Schema Name"
order: 10
+ disable_type_dedupe:
+ type: "boolean"
+ default: false
+ description: "Disable Writing Final Tables. WARNING! The data format in\
+ \ _airbyte_data is likely stable but there are no guarantees that other\
+ \ metadata columns will remain the same in future versions"
+ title: "Disable Final Tables. (WARNING! Unstable option; Columns in raw\
+ \ table schema might change between versions)"
+ order: 11
destinationType:
title: "snowflake"
const: "snowflake"
@@ -53322,9 +63687,18 @@ components:
order: 7
raw_data_schema:
type: "string"
- description: "The schema to write raw tables into"
- title: "Destinations V2 Raw Table Schema"
+ description: "The schema to write raw tables into (default: airbyte_internal)"
+ title: "Raw Table Schema Name"
order: 10
+ disable_type_dedupe:
+ type: "boolean"
+ default: false
+ description: "Disable Writing Final Tables. WARNING! The data format in\
+ \ _airbyte_data is likely stable but there are no guarantees that other\
+ \ metadata columns will remain the same in future versions"
+ title: "Disable Final Tables. (WARNING! Unstable option; Columns in raw\
+ \ table schema might change between versions)"
+ order: 11
destination-databricks:
title: "Databricks Lakehouse Destination Spec"
type: "object"
@@ -53374,6 +63748,7 @@ components:
- "dapi0123456789abcdefghij0123456789AB"
airbyte_secret: true
order: 5
+ x-speakeasy-param-sensitive: true
database:
title: "Databricks catalog"
description: "The name of the catalog. If not specified otherwise, the \"\
@@ -53488,6 +63863,7 @@ components:
- "A012345678910EXAMPLE"
airbyte_secret: true
order: 5
+ x-speakeasy-param-sensitive: true
s3_secret_access_key:
title: "S3 Secret Access Key"
type: "string"
@@ -53496,6 +63872,7 @@ components:
- "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
airbyte_secret: true
order: 6
+ x-speakeasy-param-sensitive: true
file_name_pattern:
type: "string"
description: "The pattern allows you to set the file-name format for\
@@ -53554,6 +63931,7 @@ components:
examples:
- "?sv=2016-05-31&ss=b&srt=sco&sp=rwdl&se=2018-06-27T10:05:50Z&st=2017-06-27T02:05:50Z&spr=https,http&sig=bgqQwoXwxzuD2GJfagRg7VOS8hzNr3QLT7rhS8OFRLQ%3D"
order: 4
+ x-speakeasy-param-sensitive: true
purge_staging_data:
title: "Purge Staging Files and Tables"
type: "boolean"
@@ -53844,6 +64222,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
jdbc_url_params:
description: "Additional properties to pass to the JDBC URL string when\
\ connecting to the database formatted as 'key=value' pairs separated\
@@ -53926,6 +64305,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -53970,6 +64350,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
destinationType:
title: "oracle"
const: "oracle"
@@ -54208,11 +64589,13 @@ components:
type: "string"
description: "AWS User Access Key Id"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
aws_secret_access_key:
title: "Secret Access Key"
type: "string"
description: "Secret Access Key"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
order: 2
region:
title: "S3 Bucket Region"
@@ -54272,6 +64655,7 @@ components:
- "pii_level"
type: "string"
order: 7
+ x-speakeasy-param-sensitive: true
lakeformation_database_default_tag_values:
title: "Lake Formation Database Tag Values"
description: "Add default values for the `Tag Key` to databases created\
@@ -54578,6 +64962,7 @@ components:
description: "Size of chunks in tokens to store in vector store (make\
\ sure it is not too big for the context if your LLM)"
maximum: 8191
+ minimum: 1
type: "integer"
chunk_overlap:
title: "Chunk overlap"
@@ -54626,6 +65011,128 @@ components:
type: "array"
items:
type: "string"
+ text_splitter:
+ title: "Text splitter"
+ description: "Split text fields into chunks based on the specified method."
+ type: "object"
+ oneOf:
+ - title: "By Separator"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "separator"
+ const: "separator"
+ enum:
+ - "separator"
+ type: "string"
+ separators:
+ title: "Separators"
+ description: "List of separator strings to split text fields by.\
+ \ The separator itself needs to be wrapped in double quotes,\
+ \ e.g. to split by the dot character, use \".\". To split by\
+ \ a newline, use \"\\n\"."
+ default:
+ - "\"\\n\\n\""
+ - "\"\\n\""
+ - "\" \""
+ - "\"\""
+ type: "array"
+ items:
+ type: "string"
+ keep_separator:
+ title: "Keep separator"
+ description: "Whether to keep the separator in the resulting chunks"
+ default: false
+ type: "boolean"
+ description: "Split the text by the list of separators until the chunk\
+ \ size is reached, using the earlier mentioned separators where\
+ \ possible. This is useful for splitting text fields by paragraphs,\
+ \ sentences, words, etc."
+ required:
+ - "mode"
+ - title: "By Markdown header"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "markdown"
+ const: "markdown"
+ enum:
+ - "markdown"
+ type: "string"
+ split_level:
+ title: "Split level"
+ description: "Level of markdown headers to split text fields by.\
+ \ Headings down to the specified level will be used as split\
+ \ points"
+ default: 1
+ minimum: 1
+ maximum: 6
+ type: "integer"
+ description: "Split the text by Markdown headers down to the specified\
+ \ header level. If the chunk size fits multiple sections, they will\
+ \ be combined into a single chunk."
+ required:
+ - "mode"
+ - title: "By Programming Language"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "code"
+ const: "code"
+ enum:
+ - "code"
+ type: "string"
+ language:
+ title: "Language"
+ description: "Split code in suitable places based on the programming\
+ \ language"
+ enum:
+ - "cpp"
+ - "go"
+ - "java"
+ - "js"
+ - "php"
+ - "proto"
+ - "python"
+ - "rst"
+ - "ruby"
+ - "rust"
+ - "scala"
+ - "swift"
+ - "markdown"
+ - "latex"
+ - "html"
+ - "sol"
+ type: "string"
+ required:
+ - "language"
+ - "mode"
+ description: "Split the text by suitable delimiters based on the programming\
+ \ language. This is useful for splitting code into chunks."
+ field_name_mappings:
+ title: "Field name mappings"
+ description: "List of fields to rename. Not applicable for nested fields,\
+ \ but can be used to rename fields already flattened via dot notation."
+ default: []
+ type: "array"
+ items:
+ title: "FieldNameMappingConfigModel"
+ type: "object"
+ properties:
+ from_field:
+ title: "From field name"
+ description: "The field name in the source"
+ type: "string"
+ to_field:
+ title: "To field name"
+ description: "The field name to use in the destination"
+ type: "string"
+ required:
+ - "from_field"
+ - "to_field"
required:
- "chunk_size"
group: "processing"
@@ -54649,8 +65156,10 @@ components:
title: "OpenAI API key"
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
required:
- "openai_key"
+ - "mode"
description: "Use the OpenAI API to embed text. This option is using the\
\ text-embedding-ada-002 model with 1536 embedding dimensions."
- title: "Cohere"
@@ -54667,8 +65176,10 @@ components:
title: "Cohere API key"
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
required:
- "cohere_key"
+ - "mode"
description: "Use the Cohere API to embed text."
- title: "Fake"
type: "object"
@@ -54683,6 +65194,8 @@ components:
description: "Use a fake embedding made out of random vectors with 1536\
\ embedding dimensions. This is useful for testing the data pipeline\
\ without incurring any costs."
+ required:
+ - "mode"
- title: "From Field"
type: "object"
properties:
@@ -54710,9 +65223,90 @@ components:
required:
- "field_name"
- "dimensions"
+ - "mode"
description: "Use a field in the record as the embedding. This is useful\
\ if you already have an embedding for your data and want to store it\
\ in the vector store."
+ - title: "Azure OpenAI"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "azure_openai"
+ const: "azure_openai"
+ enum:
+ - "azure_openai"
+ type: "string"
+ openai_key:
+ title: "Azure OpenAI API key"
+ description: "The API key for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ api_base:
+ title: "Resource base URL"
+ description: "The base URL for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "https://your-resource-name.openai.azure.com"
+ type: "string"
+ deployment:
+ title: "Deployment"
+ description: "The deployment for your Azure OpenAI resource. You\
+ \ can find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "your-resource-name"
+ type: "string"
+ required:
+ - "openai_key"
+ - "api_base"
+ - "deployment"
+ - "mode"
+ description: "Use the Azure-hosted OpenAI API to embed text. This option\
+ \ is using the text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "OpenAI-compatible"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "openai_compatible"
+ const: "openai_compatible"
+ enum:
+ - "openai_compatible"
+ type: "string"
+ api_key:
+ title: "API key"
+ default: ""
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ base_url:
+ title: "Base URL"
+ description: "The base URL for your OpenAI-compatible service"
+ examples:
+ - "https://your-service-name.com"
+ type: "string"
+ model_name:
+ title: "Model name"
+ description: "The name of the model to use for embedding"
+ default: "text-embedding-ada-002"
+ examples:
+ - "text-embedding-ada-002"
+ type: "string"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
+ required:
+ - "base_url"
+ - "dimensions"
+ - "mode"
+ description: "Use a service that's compatible with the OpenAI API to embed\
+ \ text."
indexing:
title: "Indexing"
type: "object"
@@ -54757,8 +65351,10 @@ components:
description: "API Token for the Milvus instance"
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
required:
- "token"
+ - "mode"
description: "Authenticate using an API token (suitable for Zilliz\
\ Cloud)"
- title: "Username/Password"
@@ -54782,9 +65378,11 @@ components:
airbyte_secret: true
order: 2
type: "string"
+ x-speakeasy-param-sensitive: true
required:
- "username"
- "password"
+ - "mode"
description: "Authenticate using username and password (suitable for\
\ self-managed Milvus clusters)"
- title: "No auth"
@@ -54799,6 +65397,8 @@ components:
type: "string"
description: "Do not authenticate (suitable for locally running test\
\ clusters, do not use for clusters with public IP addresses)"
+ required:
+ - "mode"
vector_field:
title: "Vector Field"
description: "The field in the entity that contains the vector"
@@ -54847,6 +65447,7 @@ components:
description: "Size of chunks in tokens to store in vector store (make\
\ sure it is not too big for the context if your LLM)"
maximum: 8191
+ minimum: 1
type: "integer"
chunk_overlap:
title: "Chunk overlap"
@@ -54895,6 +65496,128 @@ components:
type: "array"
items:
type: "string"
+ text_splitter:
+ title: "Text splitter"
+ description: "Split text fields into chunks based on the specified method."
+ type: "object"
+ oneOf:
+ - title: "By Separator"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "separator"
+ const: "separator"
+ enum:
+ - "separator"
+ type: "string"
+ separators:
+ title: "Separators"
+ description: "List of separator strings to split text fields by.\
+ \ The separator itself needs to be wrapped in double quotes,\
+ \ e.g. to split by the dot character, use \".\". To split by\
+ \ a newline, use \"\\n\"."
+ default:
+ - "\"\\n\\n\""
+ - "\"\\n\""
+ - "\" \""
+ - "\"\""
+ type: "array"
+ items:
+ type: "string"
+ keep_separator:
+ title: "Keep separator"
+ description: "Whether to keep the separator in the resulting chunks"
+ default: false
+ type: "boolean"
+ description: "Split the text by the list of separators until the chunk\
+ \ size is reached, using the earlier mentioned separators where\
+ \ possible. This is useful for splitting text fields by paragraphs,\
+ \ sentences, words, etc."
+ required:
+ - "mode"
+ - title: "By Markdown header"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "markdown"
+ const: "markdown"
+ enum:
+ - "markdown"
+ type: "string"
+ split_level:
+ title: "Split level"
+ description: "Level of markdown headers to split text fields by.\
+ \ Headings down to the specified level will be used as split\
+ \ points"
+ default: 1
+ minimum: 1
+ maximum: 6
+ type: "integer"
+ description: "Split the text by Markdown headers down to the specified\
+ \ header level. If the chunk size fits multiple sections, they will\
+ \ be combined into a single chunk."
+ required:
+ - "mode"
+ - title: "By Programming Language"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "code"
+ const: "code"
+ enum:
+ - "code"
+ type: "string"
+ language:
+ title: "Language"
+ description: "Split code in suitable places based on the programming\
+ \ language"
+ enum:
+ - "cpp"
+ - "go"
+ - "java"
+ - "js"
+ - "php"
+ - "proto"
+ - "python"
+ - "rst"
+ - "ruby"
+ - "rust"
+ - "scala"
+ - "swift"
+ - "markdown"
+ - "latex"
+ - "html"
+ - "sol"
+ type: "string"
+ required:
+ - "language"
+ - "mode"
+ description: "Split the text by suitable delimiters based on the programming\
+ \ language. This is useful for splitting code into chunks."
+ field_name_mappings:
+ title: "Field name mappings"
+ description: "List of fields to rename. Not applicable for nested fields,\
+ \ but can be used to rename fields already flattened via dot notation."
+ default: []
+ type: "array"
+ items:
+ title: "FieldNameMappingConfigModel"
+ type: "object"
+ properties:
+ from_field:
+ title: "From field name"
+ description: "The field name in the source"
+ type: "string"
+ to_field:
+ title: "To field name"
+ description: "The field name to use in the destination"
+ type: "string"
+ required:
+ - "from_field"
+ - "to_field"
required:
- "chunk_size"
group: "processing"
@@ -54920,6 +65643,7 @@ components:
type: "string"
required:
- "openai_key"
+ - "mode"
description: "Use the OpenAI API to embed text. This option is using the\
\ text-embedding-ada-002 model with 1536 embedding dimensions."
- title: "Cohere"
@@ -54938,6 +65662,7 @@ components:
type: "string"
required:
- "cohere_key"
+ - "mode"
description: "Use the Cohere API to embed text."
- title: "Fake"
type: "object"
@@ -54952,6 +65677,8 @@ components:
description: "Use a fake embedding made out of random vectors with 1536\
\ embedding dimensions. This is useful for testing the data pipeline\
\ without incurring any costs."
+ required:
+ - "mode"
- title: "From Field"
type: "object"
properties:
@@ -54979,9 +65706,88 @@ components:
required:
- "field_name"
- "dimensions"
+ - "mode"
description: "Use a field in the record as the embedding. This is useful\
\ if you already have an embedding for your data and want to store it\
\ in the vector store."
+ - title: "Azure OpenAI"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "azure_openai"
+ const: "azure_openai"
+ enum:
+ - "azure_openai"
+ type: "string"
+ openai_key:
+ title: "Azure OpenAI API key"
+ description: "The API key for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ airbyte_secret: true
+ type: "string"
+ api_base:
+ title: "Resource base URL"
+ description: "The base URL for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "https://your-resource-name.openai.azure.com"
+ type: "string"
+ deployment:
+ title: "Deployment"
+ description: "The deployment for your Azure OpenAI resource. You\
+ \ can find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "your-resource-name"
+ type: "string"
+ required:
+ - "openai_key"
+ - "api_base"
+ - "deployment"
+ - "mode"
+ description: "Use the Azure-hosted OpenAI API to embed text. This option\
+ \ is using the text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "OpenAI-compatible"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "openai_compatible"
+ const: "openai_compatible"
+ enum:
+ - "openai_compatible"
+ type: "string"
+ api_key:
+ title: "API key"
+ default: ""
+ airbyte_secret: true
+ type: "string"
+ base_url:
+ title: "Base URL"
+ description: "The base URL for your OpenAI-compatible service"
+ examples:
+ - "https://your-service-name.com"
+ type: "string"
+ model_name:
+ title: "Model name"
+ description: "The name of the model to use for embedding"
+ default: "text-embedding-ada-002"
+ examples:
+ - "text-embedding-ada-002"
+ type: "string"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
+ required:
+ - "base_url"
+ - "dimensions"
+ - "mode"
+ description: "Use a service that's compatible with the OpenAI API to embed\
+ \ text."
indexing:
title: "Indexing"
type: "object"
@@ -55028,6 +65834,7 @@ components:
type: "string"
required:
- "token"
+ - "mode"
description: "Authenticate using an API token (suitable for Zilliz\
\ Cloud)"
- title: "Username/Password"
@@ -55054,6 +65861,7 @@ components:
required:
- "username"
- "password"
+ - "mode"
description: "Authenticate using username and password (suitable for\
\ self-managed Milvus clusters)"
- title: "No auth"
@@ -55068,6 +65876,8 @@ components:
type: "string"
description: "Do not authenticate (suitable for locally running test\
\ clusters, do not use for clusters with public IP addresses)"
+ required:
+ - "mode"
vector_field:
title: "Vector Field"
description: "The field in the entity that contains the vector"
@@ -55117,6 +65927,7 @@ components:
description: "Firebolt password."
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
account:
type: "string"
title: "Account"
@@ -55180,11 +65991,13 @@ components:
title: "AWS Key ID"
airbyte_secret: true
description: "AWS access key granting read and write access to S3."
+ x-speakeasy-param-sensitive: true
aws_key_secret:
type: "string"
title: "AWS Key Secret"
airbyte_secret: true
description: "Corresponding secret part of the AWS Key"
+ x-speakeasy-param-sensitive: true
destinationType:
title: "firebolt"
const: "firebolt"
@@ -55321,6 +66134,7 @@ components:
type: "string"
description: "The token for obtaining new access token."
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
destinationType:
title: "google-sheets"
const: "google-sheets"
@@ -55415,6 +66229,7 @@ components:
type: "string"
airbyte_secret: true
order: 6
+ x-speakeasy-param-sensitive: true
destinationType:
title: "databend"
const: "databend"
@@ -55479,15 +66294,21 @@ components:
properties:
pinecone_key:
title: "Pinecone API key"
+ description: "The Pinecone API key to use matching the environment (copy\
+ \ from Pinecone console)"
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
pinecone_environment:
- title: "Pinecone environment"
- description: "Pinecone environment to use"
+ title: "Pinecone Environment"
+ description: "Pinecone Cloud environment to use"
+ examples:
+ - "us-west1-gcp"
+ - "gcp-starter"
type: "string"
index:
title: "Index"
- description: "Pinecone index to use"
+ description: "Pinecone index in your project to load data into"
type: "string"
required:
- "pinecone_key"
@@ -55516,8 +66337,10 @@ components:
title: "OpenAI API key"
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
required:
- "openai_key"
+ - "mode"
description: "Use the OpenAI API to embed text. This option is using the\
\ text-embedding-ada-002 model with 1536 embedding dimensions."
- title: "Cohere"
@@ -55534,8 +66357,10 @@ components:
title: "Cohere API key"
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
required:
- "cohere_key"
+ - "mode"
description: "Use the Cohere API to embed text."
- title: "Fake"
type: "object"
@@ -55550,6 +66375,88 @@ components:
description: "Use a fake embedding made out of random vectors with 1536\
\ embedding dimensions. This is useful for testing the data pipeline\
\ without incurring any costs."
+ required:
+ - "mode"
+ - title: "Azure OpenAI"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "azure_openai"
+ const: "azure_openai"
+ enum:
+ - "azure_openai"
+ type: "string"
+ openai_key:
+ title: "Azure OpenAI API key"
+ description: "The API key for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ api_base:
+ title: "Resource base URL"
+ description: "The base URL for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "https://your-resource-name.openai.azure.com"
+ type: "string"
+ deployment:
+ title: "Deployment"
+ description: "The deployment for your Azure OpenAI resource. You\
+ \ can find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "your-resource-name"
+ type: "string"
+ required:
+ - "openai_key"
+ - "api_base"
+ - "deployment"
+ - "mode"
+ description: "Use the Azure-hosted OpenAI API to embed text. This option\
+ \ is using the text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "OpenAI-compatible"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "openai_compatible"
+ const: "openai_compatible"
+ enum:
+ - "openai_compatible"
+ type: "string"
+ api_key:
+ title: "API key"
+ default: ""
+ airbyte_secret: true
+ type: "string"
+ x-speakeasy-param-sensitive: true
+ base_url:
+ title: "Base URL"
+ description: "The base URL for your OpenAI-compatible service"
+ examples:
+ - "https://your-service-name.com"
+ type: "string"
+ model_name:
+ title: "Model name"
+ description: "The name of the model to use for embedding"
+ default: "text-embedding-ada-002"
+ examples:
+ - "text-embedding-ada-002"
+ type: "string"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
+ required:
+ - "base_url"
+ - "dimensions"
+ - "mode"
+ description: "Use a service that's compatible with the OpenAI API to embed\
+ \ text."
processing:
title: "ProcessingConfigModel"
type: "object"
@@ -55559,6 +66466,7 @@ components:
description: "Size of chunks in tokens to store in vector store (make\
\ sure it is not too big for the context if your LLM)"
maximum: 8191
+ minimum: 1
type: "integer"
chunk_overlap:
title: "Chunk overlap"
@@ -55607,6 +66515,128 @@ components:
type: "array"
items:
type: "string"
+ text_splitter:
+ title: "Text splitter"
+ description: "Split text fields into chunks based on the specified method."
+ type: "object"
+ oneOf:
+ - title: "By Separator"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "separator"
+ const: "separator"
+ enum:
+ - "separator"
+ type: "string"
+ separators:
+ title: "Separators"
+ description: "List of separator strings to split text fields by.\
+ \ The separator itself needs to be wrapped in double quotes,\
+ \ e.g. to split by the dot character, use \".\". To split by\
+ \ a newline, use \"\\n\"."
+ default:
+ - "\"\\n\\n\""
+ - "\"\\n\""
+ - "\" \""
+ - "\"\""
+ type: "array"
+ items:
+ type: "string"
+ keep_separator:
+ title: "Keep separator"
+ description: "Whether to keep the separator in the resulting chunks"
+ default: false
+ type: "boolean"
+ description: "Split the text by the list of separators until the chunk\
+ \ size is reached, using the earlier mentioned separators where\
+ \ possible. This is useful for splitting text fields by paragraphs,\
+ \ sentences, words, etc."
+ required:
+ - "mode"
+ - title: "By Markdown header"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "markdown"
+ const: "markdown"
+ enum:
+ - "markdown"
+ type: "string"
+ split_level:
+ title: "Split level"
+ description: "Level of markdown headers to split text fields by.\
+ \ Headings down to the specified level will be used as split\
+ \ points"
+ default: 1
+ minimum: 1
+ maximum: 6
+ type: "integer"
+ description: "Split the text by Markdown headers down to the specified\
+ \ header level. If the chunk size fits multiple sections, they will\
+ \ be combined into a single chunk."
+ required:
+ - "mode"
+ - title: "By Programming Language"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "code"
+ const: "code"
+ enum:
+ - "code"
+ type: "string"
+ language:
+ title: "Language"
+ description: "Split code in suitable places based on the programming\
+ \ language"
+ enum:
+ - "cpp"
+ - "go"
+ - "java"
+ - "js"
+ - "php"
+ - "proto"
+ - "python"
+ - "rst"
+ - "ruby"
+ - "rust"
+ - "scala"
+ - "swift"
+ - "markdown"
+ - "latex"
+ - "html"
+ - "sol"
+ type: "string"
+ required:
+ - "language"
+ - "mode"
+ description: "Split the text by suitable delimiters based on the programming\
+ \ language. This is useful for splitting code into chunks."
+ field_name_mappings:
+ title: "Field name mappings"
+ description: "List of fields to rename. Not applicable for nested fields,\
+ \ but can be used to rename fields already flattened via dot notation."
+ default: []
+ type: "array"
+ items:
+ title: "FieldNameMappingConfigModel"
+ type: "object"
+ properties:
+ from_field:
+ title: "From field name"
+ description: "The field name in the source"
+ type: "string"
+ to_field:
+ title: "To field name"
+ description: "The field name to use in the destination"
+ type: "string"
+ required:
+ - "from_field"
+ - "to_field"
required:
- "chunk_size"
group: "processing"
@@ -55639,15 +66669,20 @@ components:
properties:
pinecone_key:
title: "Pinecone API key"
+ description: "The Pinecone API key to use matching the environment (copy\
+ \ from Pinecone console)"
airbyte_secret: true
type: "string"
pinecone_environment:
- title: "Pinecone environment"
- description: "Pinecone environment to use"
+ title: "Pinecone Environment"
+ description: "Pinecone Cloud environment to use"
+ examples:
+ - "us-west1-gcp"
+ - "gcp-starter"
type: "string"
index:
title: "Index"
- description: "Pinecone index to use"
+ description: "Pinecone index in your project to load data into"
type: "string"
required:
- "pinecone_key"
@@ -55678,6 +66713,7 @@ components:
type: "string"
required:
- "openai_key"
+ - "mode"
description: "Use the OpenAI API to embed text. This option is using the\
\ text-embedding-ada-002 model with 1536 embedding dimensions."
- title: "Cohere"
@@ -55696,6 +66732,7 @@ components:
type: "string"
required:
- "cohere_key"
+ - "mode"
description: "Use the Cohere API to embed text."
- title: "Fake"
type: "object"
@@ -55710,6 +66747,86 @@ components:
description: "Use a fake embedding made out of random vectors with 1536\
\ embedding dimensions. This is useful for testing the data pipeline\
\ without incurring any costs."
+ required:
+ - "mode"
+ - title: "Azure OpenAI"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "azure_openai"
+ const: "azure_openai"
+ enum:
+ - "azure_openai"
+ type: "string"
+ openai_key:
+ title: "Azure OpenAI API key"
+ description: "The API key for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ airbyte_secret: true
+ type: "string"
+ api_base:
+ title: "Resource base URL"
+ description: "The base URL for your Azure OpenAI resource. You can\
+ \ find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "https://your-resource-name.openai.azure.com"
+ type: "string"
+ deployment:
+ title: "Deployment"
+ description: "The deployment for your Azure OpenAI resource. You\
+ \ can find this in the Azure portal under your Azure OpenAI resource"
+ examples:
+ - "your-resource-name"
+ type: "string"
+ required:
+ - "openai_key"
+ - "api_base"
+ - "deployment"
+ - "mode"
+ description: "Use the Azure-hosted OpenAI API to embed text. This option\
+ \ is using the text-embedding-ada-002 model with 1536 embedding dimensions."
+ - title: "OpenAI-compatible"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "openai_compatible"
+ const: "openai_compatible"
+ enum:
+ - "openai_compatible"
+ type: "string"
+ api_key:
+ title: "API key"
+ default: ""
+ airbyte_secret: true
+ type: "string"
+ base_url:
+ title: "Base URL"
+ description: "The base URL for your OpenAI-compatible service"
+ examples:
+ - "https://your-service-name.com"
+ type: "string"
+ model_name:
+ title: "Model name"
+ description: "The name of the model to use for embedding"
+ default: "text-embedding-ada-002"
+ examples:
+ - "text-embedding-ada-002"
+ type: "string"
+ dimensions:
+ title: "Embedding dimensions"
+ description: "The number of dimensions the embedding model is generating"
+ examples:
+ - 1536
+ - 384
+ type: "integer"
+ required:
+ - "base_url"
+ - "dimensions"
+ - "mode"
+ description: "Use a service that's compatible with the OpenAI API to embed\
+ \ text."
processing:
title: "ProcessingConfigModel"
type: "object"
@@ -55719,6 +66836,7 @@ components:
description: "Size of chunks in tokens to store in vector store (make\
\ sure it is not too big for the context if your LLM)"
maximum: 8191
+ minimum: 1
type: "integer"
chunk_overlap:
title: "Chunk overlap"
@@ -55759,14 +66877,136 @@ components:
\ of the `users` array. When specifying nested paths, all matching\
\ values are flattened into an array set to a field named by the path."
default: []
- always_show: true
- examples:
- - "age"
- - "user"
- - "user.name"
+ always_show: true
+ examples:
+ - "age"
+ - "user"
+ - "user.name"
+ type: "array"
+ items:
+ type: "string"
+ text_splitter:
+ title: "Text splitter"
+ description: "Split text fields into chunks based on the specified method."
+ type: "object"
+ oneOf:
+ - title: "By Separator"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "separator"
+ const: "separator"
+ enum:
+ - "separator"
+ type: "string"
+ separators:
+ title: "Separators"
+ description: "List of separator strings to split text fields by.\
+ \ The separator itself needs to be wrapped in double quotes,\
+ \ e.g. to split by the dot character, use \".\". To split by\
+ \ a newline, use \"\\n\"."
+ default:
+ - "\"\\n\\n\""
+ - "\"\\n\""
+ - "\" \""
+ - "\"\""
+ type: "array"
+ items:
+ type: "string"
+ keep_separator:
+ title: "Keep separator"
+ description: "Whether to keep the separator in the resulting chunks"
+ default: false
+ type: "boolean"
+ description: "Split the text by the list of separators until the chunk\
+ \ size is reached, using the earlier mentioned separators where\
+ \ possible. This is useful for splitting text fields by paragraphs,\
+ \ sentences, words, etc."
+ required:
+ - "mode"
+ - title: "By Markdown header"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "markdown"
+ const: "markdown"
+ enum:
+ - "markdown"
+ type: "string"
+ split_level:
+ title: "Split level"
+ description: "Level of markdown headers to split text fields by.\
+ \ Headings down to the specified level will be used as split\
+ \ points"
+ default: 1
+ minimum: 1
+ maximum: 6
+ type: "integer"
+ description: "Split the text by Markdown headers down to the specified\
+ \ header level. If the chunk size fits multiple sections, they will\
+ \ be combined into a single chunk."
+ required:
+ - "mode"
+ - title: "By Programming Language"
+ type: "object"
+ properties:
+ mode:
+ title: "Mode"
+ default: "code"
+ const: "code"
+ enum:
+ - "code"
+ type: "string"
+ language:
+ title: "Language"
+ description: "Split code in suitable places based on the programming\
+ \ language"
+ enum:
+ - "cpp"
+ - "go"
+ - "java"
+ - "js"
+ - "php"
+ - "proto"
+ - "python"
+ - "rst"
+ - "ruby"
+ - "rust"
+ - "scala"
+ - "swift"
+ - "markdown"
+ - "latex"
+ - "html"
+ - "sol"
+ type: "string"
+ required:
+ - "language"
+ - "mode"
+ description: "Split the text by suitable delimiters based on the programming\
+ \ language. This is useful for splitting code into chunks."
+ field_name_mappings:
+ title: "Field name mappings"
+ description: "List of fields to rename. Not applicable for nested fields,\
+ \ but can be used to rename fields already flattened via dot notation."
+ default: []
type: "array"
items:
- type: "string"
+ title: "FieldNameMappingConfigModel"
+ type: "object"
+ properties:
+ from_field:
+ title: "From field name"
+ description: "The field name in the source"
+ type: "string"
+ to_field:
+ title: "To field name"
+ description: "The field name to use in the destination"
+ type: "string"
+ required:
+ - "from_field"
+ - "to_field"
required:
- "chunk_size"
group: "processing"
@@ -55781,448 +67021,69 @@ components:
title: "Embedding"
- id: "indexing"
title: "Indexing"
- destination-bigquery-denormalized:
- title: "BigQuery Denormalized Typed Struct Destination Spec"
+ destination-duckdb:
+ title: "Destination Duckdb"
type: "object"
required:
- - "project_id"
- - "dataset_id"
+ - "destination_path"
- "destinationType"
properties:
- project_id:
- type: "string"
- description: "The GCP project ID for the project containing the target BigQuery\
- \ dataset. Read more here."
- title: "Project ID"
- order: 0
- dataset_id:
- type: "string"
- description: "The default BigQuery Dataset ID that tables are replicated\
- \ to if the source does not specify a namespace. Read more here."
- title: "Default Dataset ID"
- order: 1
- loading_method:
- type: "object"
- title: "Loading Method"
- description: "Loading method used to send select the way data will be uploaded\
- \ to BigQuery.
Standard Inserts - Direct uploading using SQL\
- \ INSERT statements. This method is extremely inefficient and provided\
- \ only for quick testing. In almost all cases, you should use staging.\
- \
GCS Staging - Writes large batches of records to a file,\
- \ uploads the file to GCS, then uses COPY INTO table to upload\
- \ the file. Recommended for most workloads for better speed and scalability.\
- \ Read more about GCS Staging here."
- order: 2
- oneOf:
- - title: "Standard Inserts"
- required:
- - "method"
- properties:
- method:
- type: "string"
- const: "Standard"
- enum:
- - "Standard"
- - title: "GCS Staging"
- type: "object"
- required:
- - "method"
- - "gcs_bucket_name"
- - "gcs_bucket_path"
- - "credential"
- properties:
- method:
- type: "string"
- const: "GCS Staging"
- order: 0
- enum:
- - "GCS Staging"
- credential:
- title: "Credential"
- description: "An HMAC key is a type of credential and can be associated\
- \ with a service account or a user account in Cloud Storage. Read\
- \ more here."
- type: "object"
- order: 1
- oneOf:
- - title: "HMAC key"
- order: 0
- required:
- - "credential_type"
- - "hmac_key_access_id"
- - "hmac_key_secret"
- properties:
- credential_type:
- type: "string"
- const: "HMAC_KEY"
- order: 0
- enum:
- - "HMAC_KEY"
- hmac_key_access_id:
- type: "string"
- description: "HMAC key access ID. When linked to a service account,\
- \ this ID is 61 characters long; when linked to a user account,\
- \ it is 24 characters long."
- title: "HMAC Key Access ID"
- airbyte_secret: true
- examples:
- - "1234567890abcdefghij1234"
- order: 1
- hmac_key_secret:
- type: "string"
- description: "The corresponding secret for the access ID. It\
- \ is a 40-character base-64 encoded string."
- title: "HMAC Key Secret"
- airbyte_secret: true
- examples:
- - "1234567890abcdefghij1234567890ABCDEFGHIJ"
- order: 2
- gcs_bucket_name:
- title: "GCS Bucket Name"
- type: "string"
- description: "The name of the GCS bucket. Read more here."
- examples:
- - "airbyte_sync"
- order: 2
- gcs_bucket_path:
- title: "GCS Bucket Path"
- description: "Directory under the GCS bucket where data will be written.\
- \ Read more here."
- type: "string"
- examples:
- - "data_sync/test"
- order: 3
- keep_files_in_gcs-bucket:
- type: "string"
- description: "This upload method is supposed to temporary store records\
- \ in GCS bucket. By this select you can chose if these records should\
- \ be removed from GCS when migration has finished. The default \"\
- Delete all tmp files from GCS\" value is used if not set explicitly."
- title: "GCS Tmp Files Afterward Processing"
- default: "Delete all tmp files from GCS"
- enum:
- - "Delete all tmp files from GCS"
- - "Keep all tmp files in GCS"
- order: 4
- file_buffer_count:
- title: "File Buffer Count"
- type: "integer"
- minimum: 10
- maximum: 50
- default: 10
- description: "Number of file buffers allocated for writing data. Increasing\
- \ this number is beneficial for connections using Change Data Capture\
- \ (CDC) and up to the number of streams within a connection. Increasing\
- \ the number of file buffers past the maximum number of streams\
- \ has deteriorating effects"
- examples:
- - "10"
- order: 5
- credentials_json:
+ motherduck_api_key:
+ title: "MotherDuck API Key"
type: "string"
- description: "The contents of the JSON service account key. Check out the\
- \ docs if you need help generating this key. Default credentials will\
- \ be used if this field is left empty."
- title: "Service Account Key JSON (Required for cloud, optional for open-source)"
+ description: "API key to use for authentication to a MotherDuck database."
airbyte_secret: true
- order: 3
- always_show: true
- dataset_location:
+ x-speakeasy-param-sensitive: true
+ destination_path:
+ title: "Destination DB"
type: "string"
- description: "The location of the dataset. Warning: Changes made after creation\
- \ will not be applied. The default \"US\" value is used if not set explicitly.\
- \ Read more here."
- title: "Dataset Location"
- default: "US"
- order: 4
- enum:
- - "US"
- - "EU"
- - "asia-east1"
- - "asia-east2"
- - "asia-northeast1"
- - "asia-northeast2"
- - "asia-northeast3"
- - "asia-south1"
- - "asia-south2"
- - "asia-southeast1"
- - "asia-southeast2"
- - "australia-southeast1"
- - "australia-southeast2"
- - "europe-central1"
- - "europe-central2"
- - "europe-north1"
- - "europe-southwest1"
- - "europe-west1"
- - "europe-west2"
- - "europe-west3"
- - "europe-west4"
- - "europe-west6"
- - "europe-west7"
- - "europe-west8"
- - "europe-west9"
- - "me-west1"
- - "northamerica-northeast1"
- - "northamerica-northeast2"
- - "southamerica-east1"
- - "southamerica-west1"
- - "us-central1"
- - "us-east1"
- - "us-east2"
- - "us-east3"
- - "us-east4"
- - "us-east5"
- - "us-west1"
- - "us-west2"
- - "us-west3"
- - "us-west4"
- big_query_client_buffer_size_mb:
- title: "Google BigQuery Client Chunk Size"
- description: "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\
- \ 15) for each table. The size that will be written by a single RPC. Written\
- \ data will be buffered and only flushed upon reaching this size or closing\
- \ the channel. The default 15MB value is used if not set explicitly. Read\
- \ more here."
- type: "integer"
- minimum: 1
- maximum: 15
- default: 15
+ description: "Path to the .duckdb file, or the text 'md:' to connect to\
+ \ MotherDuck. The file will be placed inside that local mount. For more\
+ \ information check out our docs"
examples:
- - "15"
- order: 5
+ - "/local/destination.duckdb"
+ - "md:"
+ - "motherduck:"
+ schema:
+ title: "Destination Schema"
+ type: "string"
+ description: "Database schema name, default for duckdb is 'main'."
+ example: "main"
destinationType:
- title: "bigquery-denormalized"
- const: "bigquery-denormalized"
+ title: "duckdb"
+ const: "duckdb"
enum:
- - "bigquery-denormalized"
+ - "duckdb"
order: 0
type: "string"
- destination-bigquery-denormalized-update:
- title: "BigQuery Denormalized Typed Struct Destination Spec"
+ destination-duckdb-update:
+ title: "Destination Duckdb"
type: "object"
required:
- - "project_id"
- - "dataset_id"
+ - "destination_path"
properties:
- project_id:
- type: "string"
- description: "The GCP project ID for the project containing the target BigQuery\
- \ dataset. Read more here."
- title: "Project ID"
- order: 0
- dataset_id:
- type: "string"
- description: "The default BigQuery Dataset ID that tables are replicated\
- \ to if the source does not specify a namespace. Read more here."
- title: "Default Dataset ID"
- order: 1
- loading_method:
- type: "object"
- title: "Loading Method"
- description: "Loading method used to send select the way data will be uploaded\
- \ to BigQuery.
Standard Inserts - Direct uploading using SQL\
- \ INSERT statements. This method is extremely inefficient and provided\
- \ only for quick testing. In almost all cases, you should use staging.\
- \
GCS Staging - Writes large batches of records to a file,\
- \ uploads the file to GCS, then uses COPY INTO table to upload\
- \ the file. Recommended for most workloads for better speed and scalability.\
- \ Read more about GCS Staging here."
- order: 2
- oneOf:
- - title: "Standard Inserts"
- required:
- - "method"
- properties:
- method:
- type: "string"
- const: "Standard"
- enum:
- - "Standard"
- - title: "GCS Staging"
- type: "object"
- required:
- - "method"
- - "gcs_bucket_name"
- - "gcs_bucket_path"
- - "credential"
- properties:
- method:
- type: "string"
- const: "GCS Staging"
- order: 0
- enum:
- - "GCS Staging"
- credential:
- title: "Credential"
- description: "An HMAC key is a type of credential and can be associated\
- \ with a service account or a user account in Cloud Storage. Read\
- \ more here."
- type: "object"
- order: 1
- oneOf:
- - title: "HMAC key"
- order: 0
- required:
- - "credential_type"
- - "hmac_key_access_id"
- - "hmac_key_secret"
- properties:
- credential_type:
- type: "string"
- const: "HMAC_KEY"
- order: 0
- enum:
- - "HMAC_KEY"
- hmac_key_access_id:
- type: "string"
- description: "HMAC key access ID. When linked to a service account,\
- \ this ID is 61 characters long; when linked to a user account,\
- \ it is 24 characters long."
- title: "HMAC Key Access ID"
- airbyte_secret: true
- examples:
- - "1234567890abcdefghij1234"
- order: 1
- hmac_key_secret:
- type: "string"
- description: "The corresponding secret for the access ID. It\
- \ is a 40-character base-64 encoded string."
- title: "HMAC Key Secret"
- airbyte_secret: true
- examples:
- - "1234567890abcdefghij1234567890ABCDEFGHIJ"
- order: 2
- gcs_bucket_name:
- title: "GCS Bucket Name"
- type: "string"
- description: "The name of the GCS bucket. Read more here."
- examples:
- - "airbyte_sync"
- order: 2
- gcs_bucket_path:
- title: "GCS Bucket Path"
- description: "Directory under the GCS bucket where data will be written.\
- \ Read more here."
- type: "string"
- examples:
- - "data_sync/test"
- order: 3
- keep_files_in_gcs-bucket:
- type: "string"
- description: "This upload method is supposed to temporary store records\
- \ in GCS bucket. By this select you can chose if these records should\
- \ be removed from GCS when migration has finished. The default \"\
- Delete all tmp files from GCS\" value is used if not set explicitly."
- title: "GCS Tmp Files Afterward Processing"
- default: "Delete all tmp files from GCS"
- enum:
- - "Delete all tmp files from GCS"
- - "Keep all tmp files in GCS"
- order: 4
- file_buffer_count:
- title: "File Buffer Count"
- type: "integer"
- minimum: 10
- maximum: 50
- default: 10
- description: "Number of file buffers allocated for writing data. Increasing\
- \ this number is beneficial for connections using Change Data Capture\
- \ (CDC) and up to the number of streams within a connection. Increasing\
- \ the number of file buffers past the maximum number of streams\
- \ has deteriorating effects"
- examples:
- - "10"
- order: 5
- credentials_json:
+ motherduck_api_key:
+ title: "MotherDuck API Key"
type: "string"
- description: "The contents of the JSON service account key. Check out the\
- \ docs if you need help generating this key. Default credentials will\
- \ be used if this field is left empty."
- title: "Service Account Key JSON (Required for cloud, optional for open-source)"
+ description: "API key to use for authentication to a MotherDuck database."
airbyte_secret: true
- order: 3
- always_show: true
- dataset_location:
+ destination_path:
+ title: "Destination DB"
type: "string"
- description: "The location of the dataset. Warning: Changes made after creation\
- \ will not be applied. The default \"US\" value is used if not set explicitly.\
- \ Read more here."
- title: "Dataset Location"
- default: "US"
- order: 4
- enum:
- - "US"
- - "EU"
- - "asia-east1"
- - "asia-east2"
- - "asia-northeast1"
- - "asia-northeast2"
- - "asia-northeast3"
- - "asia-south1"
- - "asia-south2"
- - "asia-southeast1"
- - "asia-southeast2"
- - "australia-southeast1"
- - "australia-southeast2"
- - "europe-central1"
- - "europe-central2"
- - "europe-north1"
- - "europe-southwest1"
- - "europe-west1"
- - "europe-west2"
- - "europe-west3"
- - "europe-west4"
- - "europe-west6"
- - "europe-west7"
- - "europe-west8"
- - "europe-west9"
- - "me-west1"
- - "northamerica-northeast1"
- - "northamerica-northeast2"
- - "southamerica-east1"
- - "southamerica-west1"
- - "us-central1"
- - "us-east1"
- - "us-east2"
- - "us-east3"
- - "us-east4"
- - "us-east5"
- - "us-west1"
- - "us-west2"
- - "us-west3"
- - "us-west4"
- big_query_client_buffer_size_mb:
- title: "Google BigQuery Client Chunk Size"
- description: "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\
- \ 15) for each table. The size that will be written by a single RPC. Written\
- \ data will be buffered and only flushed upon reaching this size or closing\
- \ the channel. The default 15MB value is used if not set explicitly. Read\
- \ more here."
- type: "integer"
- minimum: 1
- maximum: 15
- default: 15
+ description: "Path to the .duckdb file, or the text 'md:' to connect to\
+ \ MotherDuck. The file will be placed inside that local mount. For more\
+ \ information check out our docs"
examples:
- - "15"
- order: 5
+ - "/local/destination.duckdb"
+ - "md:"
+ - "motherduck:"
+ schema:
+ title: "Destination Schema"
+ type: "string"
+ description: "Database schema name, default for duckdb is 'main'."
+ example: "main"
destination-sftp-json:
title: "Destination SFTP JSON"
type: "object"
@@ -56259,6 +67120,7 @@ components:
type: "string"
airbyte_secret: true
order: 3
+ x-speakeasy-param-sensitive: true
destination_path:
title: "Destination path"
type: "string"
@@ -56452,6 +67314,7 @@ components:
examples:
- "A012345678910EXAMPLE"
order: 0
+ x-speakeasy-param-sensitive: true
secret_access_key:
type: "string"
description: "The corresponding secret to the access key ID. Read more here."
title: "Project ID"
+ group: "connection"
order: 0
dataset_location:
type: "string"
@@ -57741,6 +68612,7 @@ components:
\ will not be applied. Read more here."
title: "Dataset Location"
+ group: "connection"
order: 1
enum:
- "US"
@@ -57768,6 +68640,9 @@ components:
- "europe-west7"
- "europe-west8"
- "europe-west9"
+ - "europe-west12"
+ - "me-central1"
+ - "me-central2"
- "me-west1"
- "northamerica-northeast1"
- "northamerica-northeast2"
@@ -57779,6 +68654,7 @@ components:
- "us-east3"
- "us-east4"
- "us-east5"
+ - "us-south1"
- "us-west1"
- "us-west2"
- "us-west3"
@@ -57789,31 +68665,22 @@ components:
\ to if the source does not specify a namespace. Read more here."
title: "Default Dataset ID"
+ group: "connection"
order: 2
loading_method:
type: "object"
title: "Loading Method"
- description: "Loading method used to send select the way data will be uploaded\
- \ to BigQuery.
Standard Inserts - Direct uploading using SQL\
- \ INSERT statements. This method is extremely inefficient and provided\
- \ only for quick testing. In almost all cases, you should use staging.\
- \
GCS Staging - Writes large batches of records to a file,\
- \ uploads the file to GCS, then uses COPY INTO table to upload\
- \ the file. Recommended for most workloads for better speed and scalability.\
- \ Read more about GCS Staging here."
+ description: "The way data will be uploaded to BigQuery."
+ display_type: "radio"
+ group: "connection"
order: 3
oneOf:
- - title: "Standard Inserts"
- required:
- - "method"
- properties:
- method:
- type: "string"
- const: "Standard"
- enum:
- - "Standard"
- title: "GCS Staging"
+ description: "(recommended) Writes large batches of records to\
+ \ a file, uploads the file to GCS, then uses COPY INTO to load your\
+ \ data into BigQuery. Provides best-in-class speed, reliability and\
+ \ scalability. Read more about GCS Staging here."
required:
- "method"
- "gcs_bucket_name"
@@ -57857,6 +68724,7 @@ components:
examples:
- "1234567890abcdefghij1234"
order: 1
+ x-speakeasy-param-sensitive: true
hmac_key_secret:
type: "string"
description: "The corresponding secret for the access ID. It\
@@ -57866,6 +68734,7 @@ components:
examples:
- "1234567890abcdefghij1234567890ABCDEFGHIJ"
order: 2
+ x-speakeasy-param-sensitive: true
gcs_bucket_name:
title: "GCS Bucket Name"
type: "string"
@@ -57893,20 +68762,18 @@ components:
- "Delete all tmp files from GCS"
- "Keep all tmp files in GCS"
order: 4
- file_buffer_count:
- title: "File Buffer Count"
- type: "integer"
- minimum: 10
- maximum: 50
- default: 10
- description: "Number of file buffers allocated for writing data. Increasing\
- \ this number is beneficial for connections using Change Data Capture\
- \ (CDC) and up to the number of streams within a connection. Increasing\
- \ the number of file buffers past the maximum number of streams\
- \ has deteriorating effects"
- examples:
- - "10"
- order: 5
+ - title: "Standard Inserts"
+ required:
+ - "method"
+ description: "(not recommended) Direct loading using SQL INSERT\
+ \ statements. This method is extremely inefficient and provided only\
+ \ for quick testing. In all other cases, you should use GCS staging."
+ properties:
+ method:
+ type: "string"
+ const: "Standard"
+ enum:
+ - "Standard"
credentials_json:
type: "string"
description: "The contents of the JSON service account key. Check out the\
@@ -57915,6 +68782,7 @@ components:
\ be used if this field is left empty."
title: "Service Account Key JSON (Required for cloud, optional for open-source)"
airbyte_secret: true
+ group: "connection"
order: 4
always_show: true
transformation_priority:
@@ -57933,6 +68801,7 @@ components:
- "interactive"
- "batch"
order: 5
+ group: "advanced"
big_query_client_buffer_size_mb:
title: "Google BigQuery Client Chunk Size"
description: "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\
@@ -57948,11 +68817,23 @@ components:
examples:
- "15"
order: 6
+ group: "advanced"
raw_data_dataset:
type: "string"
- description: "The dataset to write raw tables into"
- title: "Destinations V2 Raw Table Dataset"
+ description: "The dataset to write raw tables into (default: airbyte_internal)"
+ title: "Raw Table Dataset Name"
order: 7
+ group: "advanced"
+ disable_type_dedupe:
+ type: "boolean"
+ default: false
+ description: "Disable Writing Final Tables. WARNING! The data format in\
+ \ _airbyte_data is likely stable but there are no guarantees that other\
+ \ metadata columns will remain the same in future versions"
+ title: "Disable Final Tables. (WARNING! Unstable option; Columns in raw\
+ \ table schema might change between versions)"
+ order: 8
+ group: "advanced"
destinationType:
title: "bigquery"
const: "bigquery"
@@ -57960,6 +68841,11 @@ components:
- "bigquery"
order: 0
type: "string"
+ groups:
+ - id: "connection"
+ title: "Connection"
+ - id: "advanced"
+ title: "Advanced"
destination-bigquery-update:
title: "BigQuery Destination Spec"
type: "object"
@@ -57974,6 +68860,7 @@ components:
\ dataset. Read more here."
title: "Project ID"
+ group: "connection"
order: 0
dataset_location:
type: "string"
@@ -57981,6 +68868,7 @@ components:
\ will not be applied. Read more here."
title: "Dataset Location"
+ group: "connection"
order: 1
enum:
- "US"
@@ -58008,6 +68896,9 @@ components:
- "europe-west7"
- "europe-west8"
- "europe-west9"
+ - "europe-west12"
+ - "me-central1"
+ - "me-central2"
- "me-west1"
- "northamerica-northeast1"
- "northamerica-northeast2"
@@ -58019,6 +68910,7 @@ components:
- "us-east3"
- "us-east4"
- "us-east5"
+ - "us-south1"
- "us-west1"
- "us-west2"
- "us-west3"
@@ -58029,31 +68921,22 @@ components:
\ to if the source does not specify a namespace. Read more here."
title: "Default Dataset ID"
+ group: "connection"
order: 2
loading_method:
type: "object"
title: "Loading Method"
- description: "Loading method used to send select the way data will be uploaded\
- \ to BigQuery.
Standard Inserts - Direct uploading using SQL\
- \ INSERT statements. This method is extremely inefficient and provided\
- \ only for quick testing. In almost all cases, you should use staging.\
- \
GCS Staging - Writes large batches of records to a file,\
- \ uploads the file to GCS, then uses COPY INTO table to upload\
- \ the file. Recommended for most workloads for better speed and scalability.\
- \ Read more about GCS Staging here."
+ description: "The way data will be uploaded to BigQuery."
+ display_type: "radio"
+ group: "connection"
order: 3
oneOf:
- - title: "Standard Inserts"
- required:
- - "method"
- properties:
- method:
- type: "string"
- const: "Standard"
- enum:
- - "Standard"
- title: "GCS Staging"
+ description: "(recommended) Writes large batches of records to\
+ \ a file, uploads the file to GCS, then uses COPY INTO to load your\
+ \ data into BigQuery. Provides best-in-class speed, reliability and\
+ \ scalability. Read more about GCS Staging here."
required:
- "method"
- "gcs_bucket_name"
@@ -58133,20 +69016,18 @@ components:
- "Delete all tmp files from GCS"
- "Keep all tmp files in GCS"
order: 4
- file_buffer_count:
- title: "File Buffer Count"
- type: "integer"
- minimum: 10
- maximum: 50
- default: 10
- description: "Number of file buffers allocated for writing data. Increasing\
- \ this number is beneficial for connections using Change Data Capture\
- \ (CDC) and up to the number of streams within a connection. Increasing\
- \ the number of file buffers past the maximum number of streams\
- \ has deteriorating effects"
- examples:
- - "10"
- order: 5
+ - title: "Standard Inserts"
+ required:
+ - "method"
+ description: "(not recommended) Direct loading using SQL INSERT\
+ \ statements. This method is extremely inefficient and provided only\
+ \ for quick testing. In all other cases, you should use GCS staging."
+ properties:
+ method:
+ type: "string"
+ const: "Standard"
+ enum:
+ - "Standard"
credentials_json:
type: "string"
description: "The contents of the JSON service account key. Check out the\
@@ -58155,6 +69036,7 @@ components:
\ be used if this field is left empty."
title: "Service Account Key JSON (Required for cloud, optional for open-source)"
airbyte_secret: true
+ group: "connection"
order: 4
always_show: true
transformation_priority:
@@ -58173,6 +69055,7 @@ components:
- "interactive"
- "batch"
order: 5
+ group: "advanced"
big_query_client_buffer_size_mb:
title: "Google BigQuery Client Chunk Size"
description: "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\
@@ -58188,11 +69071,28 @@ components:
examples:
- "15"
order: 6
+ group: "advanced"
raw_data_dataset:
type: "string"
- description: "The dataset to write raw tables into"
- title: "Destinations V2 Raw Table Dataset"
+ description: "The dataset to write raw tables into (default: airbyte_internal)"
+ title: "Raw Table Dataset Name"
order: 7
+ group: "advanced"
+ disable_type_dedupe:
+ type: "boolean"
+ default: false
+ description: "Disable Writing Final Tables. WARNING! The data format in\
+ \ _airbyte_data is likely stable but there are no guarantees that other\
+ \ metadata columns will remain the same in future versions"
+ title: "Disable Final Tables. (WARNING! Unstable option; Columns in raw\
+ \ table schema might change between versions)"
+ order: 8
+ group: "advanced"
+ groups:
+ - id: "connection"
+ title: "Connection"
+ - id: "advanced"
+ title: "Advanced"
destination-vertica:
title: "Vertica Destination Spec"
type: "object"
@@ -58235,6 +69135,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
jdbc_url_params:
description: "Additional properties to pass to the JDBC URL string when\
\ connecting to the database formatted as 'key=value' pairs separated\
@@ -58310,6 +69211,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -58354,6 +69256,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
destinationType:
title: "vertica"
const: "vertica"
@@ -58597,6 +69500,7 @@ components:
\ server"
type: "string"
airbyte_secret: true
+ x-speakeasy-param-sensitive: true
destinationType:
title: "elasticsearch"
const: "elasticsearch"
@@ -58718,6 +69622,7 @@ components:
type: "string"
examples:
- "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="
+ x-speakeasy-param-sensitive: true
azure_blob_storage_output_buffer_size:
title: "Azure Blob Storage output buffer size (Megabytes)"
type: "integer"
@@ -58932,6 +69837,7 @@ components:
title: "OpenAI API key"
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
required:
- "openai_key"
description: "Use the OpenAI API to embed text. This option is using the\
@@ -58969,6 +69875,7 @@ components:
title: "Pinecone API key"
airbyte_secret: true
type: "string"
+ x-speakeasy-param-sensitive: true
pinecone_environment:
title: "Pinecone environment"
description: "Pinecone environment to use"
@@ -59256,6 +70163,7 @@ components:
type: "string"
airbyte_secret: true
order: 1
+ x-speakeasy-param-sensitive: true
api_token:
title: "Cumul.io API Token"
description: "The corresponding API token generated in Cumul.io's platform\
@@ -59263,6 +70171,7 @@ components:
type: "string"
airbyte_secret: true
order: 2
+ x-speakeasy-param-sensitive: true
destinationType:
title: "cumulio"
const: "cumulio"
@@ -59351,6 +70260,7 @@ components:
type: "string"
airbyte_secret: true
order: 5
+ x-speakeasy-param-sensitive: true
ssl_mode:
title: "SSL modes"
description: "SSL connection modes. \n disable - Chose this mode\
@@ -59450,6 +70360,7 @@ components:
\ you do not add it - the password will be generated automatically."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "verify-full"
additionalProperties: false
description: "Verify-full SSL mode."
@@ -59487,6 +70398,7 @@ components:
airbyte_secret: true
multiline: true
order: 3
+ x-speakeasy-param-sensitive: true
client_key_password:
type: "string"
title: "Client key password"
@@ -59494,6 +70406,7 @@ components:
\ you do not add it - the password will be generated automatically."
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
jdbc_url_params:
description: "Additional properties to pass to the JDBC URL string when\
\ connecting to the database formatted as 'key=value' pairs separated\
@@ -59564,6 +70477,7 @@ components:
airbyte_secret: true
multiline: true
order: 4
+ x-speakeasy-param-sensitive: true
- title: "Password Authentication"
required:
- "tunnel_method"
@@ -59608,6 +70522,7 @@ components:
type: "string"
airbyte_secret: true
order: 4
+ x-speakeasy-param-sensitive: true
destinationType:
title: "postgres"
const: "postgres"
@@ -59925,16 +70840,21 @@ components:
order: 4
SourceAhaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-aha"
secretId:
@@ -59946,16 +70866,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAircallCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-aircall"
secretId:
@@ -59967,16 +70892,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAirtableCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-airtable"
secretId:
@@ -59988,16 +70918,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAlloydbCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-alloydb"
secretId:
@@ -60009,16 +70944,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAmazonAdsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-amazon-ads"
secretId:
@@ -60030,16 +70970,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAmazonSellerPartnerCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-amazon-seller-partner"
secretId:
@@ -60051,16 +70996,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAmazonSqsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-amazon-sqs"
secretId:
@@ -60072,16 +71022,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAmplitudeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-amplitude"
secretId:
@@ -60093,16 +71048,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceApifyDatasetCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-apify-dataset"
secretId:
@@ -60114,16 +71074,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAppfollowCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-appfollow"
secretId:
@@ -60135,16 +71100,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAsanaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-asana"
secretId:
@@ -60156,16 +71126,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAuth0CreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-auth0"
secretId:
@@ -60177,16 +71152,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAwsCloudtrailCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-aws-cloudtrail"
secretId:
@@ -60198,16 +71178,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAzureBlobStorageCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-azure-blob-storage"
secretId:
@@ -60219,16 +71204,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceAzureTableCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-azure-table"
secretId:
@@ -60240,16 +71230,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceBambooHrCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-bamboo-hr"
secretId:
@@ -60259,39 +71254,23 @@ components:
type: "string"
x-speakeasy-entity: Source_BambooHr
x-speakeasy-param-suppress-computed-diff: true
- SourceBigcommerceCreateRequest:
- required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
- properties:
- name:
- type: "string"
- workspaceId:
- format: "uuid"
- type: "string"
- configuration:
- $ref: "#/components/schemas/source-bigcommerce"
- secretId:
- description:
- "Optional secretID obtained through the public API OAuth redirect\
- \ flow."
- type: "string"
- x-speakeasy-entity: Source_Bigcommerce
- x-speakeasy-param-suppress-computed-diff: true
SourceBigqueryCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-bigquery"
secretId:
@@ -60303,16 +71282,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceBingAdsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-bing-ads"
secretId:
@@ -60324,16 +71308,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceBraintreeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-braintree"
secretId:
@@ -60345,16 +71334,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceBrazeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-braze"
secretId:
@@ -60364,18 +71358,49 @@ components:
type: "string"
x-speakeasy-entity: Source_Braze
x-speakeasy-param-suppress-computed-diff: true
- SourceChargebeeCreateRequest:
+ SourceCartCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
+ format: uuid
+ type: string
+ configuration:
+ $ref: "#/components/schemas/source-cart"
+ secretId:
+ description:
+ "Optional secretID obtained through the public API OAuth redirect\
+ \ flow."
type: "string"
+ x-speakeasy-entity: Source_Cart
+ x-speakeasy-param-suppress-computed-diff: true
+ SourceChargebeeCreateRequest:
+ required:
+ - name
+ - workspaceId
+ - configuration
+ type: object
+ properties:
+ name:
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
+ workspaceId:
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-chargebee"
secretId:
@@ -60387,16 +71412,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceChartmogulCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-chartmogul"
secretId:
@@ -60408,16 +71438,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceClickhouseCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-clickhouse"
secretId:
@@ -60429,16 +71464,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceClickupApiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-clickup-api"
secretId:
@@ -60450,16 +71490,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceClockifyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-clockify"
secretId:
@@ -60471,16 +71516,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceCloseComCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-close-com"
secretId:
@@ -60492,16 +71542,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceCodaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-coda"
secretId:
@@ -60513,16 +71568,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceCoinApiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-coin-api"
secretId:
@@ -60534,16 +71594,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceCoinmarketcapCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-coinmarketcap"
secretId:
@@ -60555,16 +71620,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceConfigcatCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-configcat"
secretId:
@@ -60576,16 +71646,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceConfluenceCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-confluence"
secretId:
@@ -60597,16 +71672,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceConvexCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-convex"
secretId:
@@ -60618,16 +71698,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceDatascopeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-datascope"
secretId:
@@ -60639,16 +71724,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceDelightedCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-delighted"
secretId:
@@ -60660,16 +71750,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceDixaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-dixa"
secretId:
@@ -60681,16 +71776,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceDockerhubCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-dockerhub"
secretId:
@@ -60702,16 +71802,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceDremioCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-dremio"
secretId:
@@ -60723,16 +71828,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceDynamodbCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-dynamodb"
secretId:
@@ -60742,39 +71852,23 @@ components:
type: "string"
x-speakeasy-entity: Source_Dynamodb
x-speakeasy-param-suppress-computed-diff: true
- SourceE2eTestCloudCreateRequest:
- required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
- properties:
- name:
- type: "string"
- workspaceId:
- format: "uuid"
- type: "string"
- configuration:
- $ref: "#/components/schemas/source-e2e-test-cloud"
- secretId:
- description:
- "Optional secretID obtained through the public API OAuth redirect\
- \ flow."
- type: "string"
- x-speakeasy-entity: Source_E2eTestCloud
- x-speakeasy-param-suppress-computed-diff: true
SourceEmailoctopusCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-emailoctopus"
secretId:
@@ -60786,16 +71880,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceExchangeRatesCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-exchange-rates"
secretId:
@@ -60807,16 +71906,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceFacebookMarketingCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-facebook-marketing"
secretId:
@@ -60826,18 +71930,23 @@ components:
type: "string"
x-speakeasy-entity: Source_FacebookMarketing
x-speakeasy-param-suppress-computed-diff: true
- SourceFacebookPagesCreateRequest:
- required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ SourceFacebookPagesCreateRequest:
+ required:
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-facebook-pages"
secretId:
@@ -60849,16 +71958,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceFakerCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-faker"
secretId:
@@ -60870,16 +71984,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceFaunaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-fauna"
secretId:
@@ -60889,39 +72008,49 @@ components:
type: "string"
x-speakeasy-entity: Source_Fauna
x-speakeasy-param-suppress-computed-diff: true
- SourceFileSecureCreateRequest:
+ SourceFileCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
- $ref: "#/components/schemas/source-file-secure"
+ $ref: "#/components/schemas/source-file"
secretId:
description:
"Optional secretID obtained through the public API OAuth redirect\
\ flow."
type: "string"
- x-speakeasy-entity: Source_FileSecure
+ x-speakeasy-entity: Source_File
x-speakeasy-param-suppress-computed-diff: true
SourceFireboltCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-firebolt"
secretId:
@@ -60933,16 +72062,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceFreshcallerCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-freshcaller"
secretId:
@@ -60954,16 +72088,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceFreshdeskCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-freshdesk"
secretId:
@@ -60975,16 +72114,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceFreshsalesCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-freshsales"
secretId:
@@ -60996,16 +72140,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGainsightPxCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-gainsight-px"
secretId:
@@ -61017,16 +72166,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGcsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-gcs"
secretId:
@@ -61038,16 +72192,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGetlagoCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-getlago"
secretId:
@@ -61059,16 +72218,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGithubCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-github"
secretId:
@@ -61080,16 +72244,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGitlabCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-gitlab"
secretId:
@@ -61101,16 +72270,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGlassfrogCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-glassfrog"
secretId:
@@ -61122,16 +72296,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGnewsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-gnews"
secretId:
@@ -61143,16 +72322,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGoogleAdsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-google-ads"
secretId:
@@ -61164,16 +72348,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGoogleAnalyticsDataApiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-google-analytics-data-api"
secretId:
@@ -61183,60 +72372,75 @@ components:
type: "string"
x-speakeasy-entity: Source_GoogleAnalyticsDataApi
x-speakeasy-param-suppress-computed-diff: true
- SourceGoogleAnalyticsV4CreateRequest:
+ SourceGoogleDirectoryCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
- $ref: "#/components/schemas/source-google-analytics-v4"
+ $ref: "#/components/schemas/source-google-directory"
secretId:
description:
"Optional secretID obtained through the public API OAuth redirect\
\ flow."
type: "string"
- x-speakeasy-entity: Source_GoogleAnalyticsV4
+ x-speakeasy-entity: Source_GoogleDirectory
x-speakeasy-param-suppress-computed-diff: true
- SourceGoogleDirectoryCreateRequest:
+ SourceGoogleDriveCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
- $ref: "#/components/schemas/source-google-directory"
+ $ref: "#/components/schemas/source-google-drive"
secretId:
description:
"Optional secretID obtained through the public API OAuth redirect\
\ flow."
type: "string"
- x-speakeasy-entity: Source_GoogleDirectory
+ x-speakeasy-entity: Source_GoogleDrive
x-speakeasy-param-suppress-computed-diff: true
SourceGooglePagespeedInsightsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-google-pagespeed-insights"
secretId:
@@ -61248,16 +72452,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGoogleSearchConsoleCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-google-search-console"
secretId:
@@ -61269,16 +72478,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGoogleSheetsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-google-sheets"
secretId:
@@ -61290,16 +72504,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGoogleWebfontsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-google-webfonts"
secretId:
@@ -61311,16 +72530,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGoogleWorkspaceAdminReportsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-google-workspace-admin-reports"
secretId:
@@ -61332,16 +72556,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGreenhouseCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-greenhouse"
secretId:
@@ -61353,16 +72582,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceGridlyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-gridly"
secretId:
@@ -61374,16 +72608,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceHarvestCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-harvest"
secretId:
@@ -61395,16 +72634,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceHubplannerCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-hubplanner"
secretId:
@@ -61416,16 +72660,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceHubspotCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-hubspot"
secretId:
@@ -61437,16 +72686,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceInsightlyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-insightly"
secretId:
@@ -61458,16 +72712,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceInstagramCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-instagram"
secretId:
@@ -61479,16 +72738,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceInstatusCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-instatus"
secretId:
@@ -61500,16 +72764,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceIntercomCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-intercom"
secretId:
@@ -61521,16 +72790,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceIp2whoisCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-ip2whois"
secretId:
@@ -61542,16 +72816,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceIterableCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-iterable"
secretId:
@@ -61563,16 +72842,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceJiraCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-jira"
secretId:
@@ -61584,16 +72868,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceK6CloudCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-k6-cloud"
secretId:
@@ -61605,16 +72894,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceKlarnaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-klarna"
secretId:
@@ -61626,16 +72920,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceKlaviyoCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-klaviyo"
secretId:
@@ -61647,16 +72946,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceKustomerSingerCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-kustomer-singer"
secretId:
@@ -61668,16 +72972,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceKyveCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-kyve"
secretId:
@@ -61689,16 +72998,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceLaunchdarklyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-launchdarkly"
secretId:
@@ -61710,16 +73024,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceLemlistCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-lemlist"
secretId:
@@ -61731,16 +73050,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceLeverHiringCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-lever-hiring"
secretId:
@@ -61752,16 +73076,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceLinkedinAdsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-linkedin-ads"
secretId:
@@ -61773,16 +73102,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceLinkedinPagesCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-linkedin-pages"
secretId:
@@ -61794,16 +73128,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceLinnworksCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-linnworks"
secretId:
@@ -61815,16 +73154,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceLokaliseCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-lokalise"
secretId:
@@ -61836,16 +73180,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMailchimpCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-mailchimp"
secretId:
@@ -61857,16 +73206,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMailgunCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-mailgun"
secretId:
@@ -61878,16 +73232,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMailjetSmsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-mailjet-sms"
secretId:
@@ -61899,16 +73258,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMarketoCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-marketo"
secretId:
@@ -61920,16 +73284,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMetabaseCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-metabase"
secretId:
@@ -61941,16 +73310,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMicrosoftTeamsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-microsoft-teams"
secretId:
@@ -61962,16 +73336,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMixpanelCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-mixpanel"
secretId:
@@ -61983,16 +73362,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMondayCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-monday"
secretId:
@@ -62002,60 +73386,75 @@ components:
type: "string"
x-speakeasy-entity: Source_Monday
x-speakeasy-param-suppress-computed-diff: true
- SourceMongodbCreateRequest:
+ SourceMongodbInternalPocCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
- $ref: "#/components/schemas/source-mongodb"
+ $ref: "#/components/schemas/source-mongodb-internal-poc"
secretId:
description:
"Optional secretID obtained through the public API OAuth redirect\
\ flow."
type: "string"
- x-speakeasy-entity: Source_Mongodb
+ x-speakeasy-entity: Source_MongodbInternalPoc
x-speakeasy-param-suppress-computed-diff: true
- SourceMongodbInternalPocCreateRequest:
+ SourceMongodbV2CreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
- $ref: "#/components/schemas/source-mongodb-internal-poc"
+ $ref: "#/components/schemas/source-mongodb-v2"
secretId:
description:
"Optional secretID obtained through the public API OAuth redirect\
\ flow."
type: "string"
- x-speakeasy-entity: Source_MongodbInternalPoc
+ x-speakeasy-entity: Source_MongodbV2
x-speakeasy-param-suppress-computed-diff: true
SourceMssqlCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-mssql"
secretId:
@@ -62067,16 +73466,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMyHoursCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-my-hours"
secretId:
@@ -62088,16 +73492,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceMysqlCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-mysql"
secretId:
@@ -62109,16 +73518,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceNetsuiteCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-netsuite"
secretId:
@@ -62130,16 +73544,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceNotionCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-notion"
secretId:
@@ -62151,16 +73570,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceNytimesCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-nytimes"
secretId:
@@ -62172,16 +73596,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceOktaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-okta"
secretId:
@@ -62193,16 +73622,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceOmnisendCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-omnisend"
secretId:
@@ -62214,16 +73648,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceOnesignalCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-onesignal"
secretId:
@@ -62235,16 +73674,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceOracleCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-oracle"
secretId:
@@ -62256,16 +73700,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceOrbCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-orb"
secretId:
@@ -62277,16 +73726,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceOrbitCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-orbit"
secretId:
@@ -62298,16 +73752,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceOutbrainAmplifyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-outbrain-amplify"
secretId:
@@ -62318,17 +73777,22 @@ components:
x-speakeasy-entity: Source_OutbrainAmplify
x-speakeasy-param-suppress-computed-diff: true
SourceOutreachCreateRequest:
- required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ required:
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-outreach"
secretId:
@@ -62340,16 +73804,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePaypalTransactionCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-paypal-transaction"
secretId:
@@ -62361,16 +73830,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePaystackCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-paystack"
secretId:
@@ -62382,16 +73856,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePendoCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-pendo"
secretId:
@@ -62403,16 +73882,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePersistiqCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-persistiq"
secretId:
@@ -62424,16 +73908,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePexelsApiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-pexels-api"
secretId:
@@ -62445,16 +73934,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePinterestCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-pinterest"
secretId:
@@ -62466,16 +73960,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePipedriveCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-pipedrive"
secretId:
@@ -62487,16 +73986,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePocketCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-pocket"
secretId:
@@ -62508,16 +74012,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePokeapiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-pokeapi"
secretId:
@@ -62529,16 +74038,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePolygonStockApiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-polygon-stock-api"
secretId:
@@ -62550,16 +74064,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePostgresCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-postgres"
secretId:
@@ -62571,16 +74090,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePosthogCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-posthog"
secretId:
@@ -62592,16 +74116,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePostmarkappCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-postmarkapp"
secretId:
@@ -62613,16 +74142,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePrestashopCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-prestashop"
secretId:
@@ -62634,16 +74168,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePunkApiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-punk-api"
secretId:
@@ -62655,16 +74194,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourcePypiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-pypi"
secretId:
@@ -62676,16 +74220,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceQualarooCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-qualaroo"
secretId:
@@ -62697,16 +74246,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceQuickbooksCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-quickbooks"
secretId:
@@ -62718,16 +74272,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceRailzCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-railz"
secretId:
@@ -62739,16 +74298,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceRechargeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-recharge"
secretId:
@@ -62760,16 +74324,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceRecreationCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-recreation"
secretId:
@@ -62781,16 +74350,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceRecruiteeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-recruitee"
secretId:
@@ -62802,16 +74376,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceRecurlyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-recurly"
secretId:
@@ -62823,16 +74402,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceRedshiftCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-redshift"
secretId:
@@ -62844,16 +74428,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceRetentlyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-retently"
secretId:
@@ -62865,16 +74454,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceRkiCovidCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-rki-covid"
secretId:
@@ -62886,16 +74480,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceRssCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-rss"
secretId:
@@ -62907,16 +74506,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceS3CreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-s3"
secretId:
@@ -62928,16 +74532,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSalesforceCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-salesforce"
secretId:
@@ -62949,16 +74558,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSalesloftCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-salesloft"
secretId:
@@ -62970,16 +74584,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSapFieldglassCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-sap-fieldglass"
secretId:
@@ -62991,16 +74610,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSecodaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-secoda"
secretId:
@@ -63012,16 +74636,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSendgridCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-sendgrid"
secretId:
@@ -63033,16 +74662,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSendinblueCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-sendinblue"
secretId:
@@ -63054,16 +74688,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSenseforceCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-senseforce"
secretId:
@@ -63075,16 +74714,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSentryCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-sentry"
secretId:
@@ -63096,16 +74740,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSftpCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-sftp"
secretId:
@@ -63117,16 +74766,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSftpBulkCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-sftp-bulk"
secretId:
@@ -63138,16 +74792,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceShopifyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-shopify"
secretId:
@@ -63159,16 +74818,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceShortioCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-shortio"
secretId:
@@ -63180,16 +74844,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSlackCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-slack"
secretId:
@@ -63201,16 +74870,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSmailyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-smaily"
secretId:
@@ -63222,16 +74896,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSmartengageCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-smartengage"
secretId:
@@ -63243,16 +74922,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSmartsheetsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-smartsheets"
secretId:
@@ -63264,16 +74948,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSnapchatMarketingCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-snapchat-marketing"
secretId:
@@ -63285,16 +74974,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSnowflakeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-snowflake"
secretId:
@@ -63306,16 +75000,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSonarCloudCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-sonar-cloud"
secretId:
@@ -63327,16 +75026,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSpacexApiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-spacex-api"
secretId:
@@ -63348,16 +75052,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSquareCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-square"
secretId:
@@ -63369,16 +75078,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceStravaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-strava"
secretId:
@@ -63390,16 +75104,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceStripeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-stripe"
secretId:
@@ -63411,16 +75130,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSurveySparrowCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-survey-sparrow"
secretId:
@@ -63432,16 +75156,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceSurveymonkeyCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-surveymonkey"
secretId:
@@ -63453,16 +75182,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTempoCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-tempo"
secretId:
@@ -63474,16 +75208,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTheGuardianApiCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-the-guardian-api"
secretId:
@@ -63495,16 +75234,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTiktokMarketingCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-tiktok-marketing"
secretId:
@@ -63516,16 +75260,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTodoistCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-todoist"
secretId:
@@ -63537,16 +75286,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTrelloCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-trello"
secretId:
@@ -63558,16 +75312,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTrustpilotCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-trustpilot"
secretId:
@@ -63579,16 +75338,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTvmazeScheduleCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-tvmaze-schedule"
secretId:
@@ -63600,16 +75364,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTwilioCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-twilio"
secretId:
@@ -63621,16 +75390,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTwilioTaskrouterCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-twilio-taskrouter"
secretId:
@@ -63642,16 +75416,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTwitterCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-twitter"
secretId:
@@ -63663,16 +75442,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceTypeformCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-typeform"
secretId:
@@ -63684,16 +75468,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceUsCensusCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-us-census"
secretId:
@@ -63705,16 +75494,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceVantageCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-vantage"
secretId:
@@ -63726,16 +75520,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceWebflowCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-webflow"
secretId:
@@ -63747,16 +75546,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceWhiskyHunterCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-whisky-hunter"
secretId:
@@ -63768,16 +75572,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceWikipediaPageviewsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-wikipedia-pageviews"
secretId:
@@ -63789,16 +75598,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceWoocommerceCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-woocommerce"
secretId:
@@ -63808,39 +75622,23 @@ components:
type: "string"
x-speakeasy-entity: Source_Woocommerce
x-speakeasy-param-suppress-computed-diff: true
- SourceXeroCreateRequest:
- required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
- properties:
- name:
- type: "string"
- workspaceId:
- format: "uuid"
- type: "string"
- configuration:
- $ref: "#/components/schemas/source-xero"
- secretId:
- description:
- "Optional secretID obtained through the public API OAuth redirect\
- \ flow."
- type: "string"
- x-speakeasy-entity: Source_Xero
- x-speakeasy-param-suppress-computed-diff: true
SourceXkcdCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-xkcd"
secretId:
@@ -63852,16 +75650,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceYandexMetricaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-yandex-metrica"
secretId:
@@ -63873,16 +75676,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceYotpoCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-yotpo"
secretId:
@@ -63892,81 +75700,101 @@ components:
type: "string"
x-speakeasy-entity: Source_Yotpo
x-speakeasy-param-suppress-computed-diff: true
- SourceYouniumCreateRequest:
+ SourceYoutubeAnalyticsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
- $ref: "#/components/schemas/source-younium"
+ $ref: "#/components/schemas/source-youtube-analytics"
secretId:
description:
"Optional secretID obtained through the public API OAuth redirect\
\ flow."
type: "string"
- x-speakeasy-entity: Source_Younium
+ x-speakeasy-entity: Source_YoutubeAnalytics
x-speakeasy-param-suppress-computed-diff: true
- SourceYoutubeAnalyticsCreateRequest:
+ SourceZendeskChatCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
- $ref: "#/components/schemas/source-youtube-analytics"
+ $ref: "#/components/schemas/source-zendesk-chat"
secretId:
description:
"Optional secretID obtained through the public API OAuth redirect\
\ flow."
type: "string"
- x-speakeasy-entity: Source_YoutubeAnalytics
+ x-speakeasy-entity: Source_ZendeskChat
x-speakeasy-param-suppress-computed-diff: true
- SourceZendeskChatCreateRequest:
+ SourceZendeskSellCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
- $ref: "#/components/schemas/source-zendesk-chat"
+ $ref: "#/components/schemas/source-zendesk-sell"
secretId:
description:
"Optional secretID obtained through the public API OAuth redirect\
\ flow."
type: "string"
- x-speakeasy-entity: Source_ZendeskChat
+ x-speakeasy-entity: Source_ZendeskSell
x-speakeasy-param-suppress-computed-diff: true
SourceZendeskSunshineCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-zendesk-sunshine"
secretId:
@@ -63978,16 +75806,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceZendeskSupportCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-zendesk-support"
secretId:
@@ -63999,16 +75832,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceZendeskTalkCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-zendesk-talk"
secretId:
@@ -64020,16 +75858,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceZenloopCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-zenloop"
secretId:
@@ -64041,16 +75884,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceZohoCrmCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-zoho-crm"
secretId:
@@ -64062,16 +75910,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceZoomCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-zoom"
secretId:
@@ -64083,16 +75936,21 @@ components:
x-speakeasy-param-suppress-computed-diff: true
SourceZuoraCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the source e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/source-zuora"
secretId:
@@ -64104,592 +75962,819 @@ components:
x-speakeasy-param-suppress-computed-diff: true
DestinationAwsDatalakeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-aws-datalake"
x-speakeasy-entity: Destination_AwsDatalake
x-speakeasy-param-suppress-computed-diff: true
DestinationAzureBlobStorageCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-azure-blob-storage"
x-speakeasy-entity: Destination_AzureBlobStorage
x-speakeasy-param-suppress-computed-diff: true
DestinationBigqueryCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-bigquery"
x-speakeasy-entity: Destination_Bigquery
x-speakeasy-param-suppress-computed-diff: true
- DestinationBigqueryDenormalizedCreateRequest:
- required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
- properties:
- name:
- type: "string"
- workspaceId:
- format: "uuid"
- type: "string"
- configuration:
- $ref: "#/components/schemas/destination-bigquery-denormalized"
- x-speakeasy-entity: Destination_BigqueryDenormalized
- x-speakeasy-param-suppress-computed-diff: true
DestinationClickhouseCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-clickhouse"
x-speakeasy-entity: Destination_Clickhouse
x-speakeasy-param-suppress-computed-diff: true
DestinationConvexCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-convex"
x-speakeasy-entity: Destination_Convex
x-speakeasy-param-suppress-computed-diff: true
DestinationCumulioCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-cumulio"
x-speakeasy-entity: Destination_Cumulio
x-speakeasy-param-suppress-computed-diff: true
DestinationDatabendCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-databend"
x-speakeasy-entity: Destination_Databend
x-speakeasy-param-suppress-computed-diff: true
DestinationDatabricksCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-databricks"
x-speakeasy-entity: Destination_Databricks
x-speakeasy-param-suppress-computed-diff: true
DestinationDevNullCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-dev-null"
x-speakeasy-entity: Destination_DevNull
x-speakeasy-param-suppress-computed-diff: true
+ DestinationDuckdbCreateRequest:
+ required:
+ - name
+ - workspaceId
+ - configuration
+ type: object
+ properties:
+ name:
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
+ workspaceId:
+ format: uuid
+ type: string
+ configuration:
+ $ref: "#/components/schemas/destination-duckdb"
+ x-speakeasy-entity: Destination_Duckdb
+ x-speakeasy-param-suppress-computed-diff: true
DestinationDynamodbCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-dynamodb"
x-speakeasy-entity: Destination_Dynamodb
x-speakeasy-param-suppress-computed-diff: true
DestinationElasticsearchCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-elasticsearch"
x-speakeasy-entity: Destination_Elasticsearch
x-speakeasy-param-suppress-computed-diff: true
DestinationFireboltCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-firebolt"
x-speakeasy-entity: Destination_Firebolt
x-speakeasy-param-suppress-computed-diff: true
DestinationFirestoreCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-firestore"
x-speakeasy-entity: Destination_Firestore
x-speakeasy-param-suppress-computed-diff: true
DestinationGcsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-gcs"
x-speakeasy-entity: Destination_Gcs
x-speakeasy-param-suppress-computed-diff: true
DestinationGoogleSheetsCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-google-sheets"
x-speakeasy-entity: Destination_GoogleSheets
x-speakeasy-param-suppress-computed-diff: true
DestinationKeenCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-keen"
x-speakeasy-entity: Destination_Keen
x-speakeasy-param-suppress-computed-diff: true
DestinationKinesisCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-kinesis"
x-speakeasy-entity: Destination_Kinesis
x-speakeasy-param-suppress-computed-diff: true
DestinationLangchainCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-langchain"
x-speakeasy-entity: Destination_Langchain
x-speakeasy-param-suppress-computed-diff: true
DestinationMilvusCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-milvus"
x-speakeasy-entity: Destination_Milvus
x-speakeasy-param-suppress-computed-diff: true
DestinationMongodbCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-mongodb"
x-speakeasy-entity: Destination_Mongodb
x-speakeasy-param-suppress-computed-diff: true
DestinationMssqlCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-mssql"
x-speakeasy-entity: Destination_Mssql
x-speakeasy-param-suppress-computed-diff: true
DestinationMysqlCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-mysql"
x-speakeasy-entity: Destination_Mysql
x-speakeasy-param-suppress-computed-diff: true
DestinationOracleCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-oracle"
x-speakeasy-entity: Destination_Oracle
x-speakeasy-param-suppress-computed-diff: true
DestinationPineconeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-pinecone"
x-speakeasy-entity: Destination_Pinecone
x-speakeasy-param-suppress-computed-diff: true
DestinationPostgresCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-postgres"
x-speakeasy-entity: Destination_Postgres
x-speakeasy-param-suppress-computed-diff: true
DestinationPubsubCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-pubsub"
x-speakeasy-entity: Destination_Pubsub
x-speakeasy-param-suppress-computed-diff: true
+ DestinationQdrantCreateRequest:
+ required:
+ - name
+ - workspaceId
+ - configuration
+ type: object
+ properties:
+ name:
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
+ workspaceId:
+ format: uuid
+ type: string
+ configuration:
+ $ref: "#/components/schemas/destination-qdrant"
+ x-speakeasy-entity: Destination_Qdrant
+ x-speakeasy-param-suppress-computed-diff: true
DestinationRedisCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-redis"
x-speakeasy-entity: Destination_Redis
x-speakeasy-param-suppress-computed-diff: true
DestinationRedshiftCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-redshift"
x-speakeasy-entity: Destination_Redshift
x-speakeasy-param-suppress-computed-diff: true
DestinationS3CreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-s3"
x-speakeasy-entity: Destination_S3
x-speakeasy-param-suppress-computed-diff: true
DestinationS3GlueCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-s3-glue"
x-speakeasy-entity: Destination_S3Glue
x-speakeasy-param-suppress-computed-diff: true
DestinationSftpJsonCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-sftp-json"
x-speakeasy-entity: Destination_SftpJson
x-speakeasy-param-suppress-computed-diff: true
DestinationSnowflakeCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-snowflake"
x-speakeasy-entity: Destination_Snowflake
x-speakeasy-param-suppress-computed-diff: true
DestinationTimeplusCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-timeplus"
x-speakeasy-entity: Destination_Timeplus
x-speakeasy-param-suppress-computed-diff: true
DestinationTypesenseCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-typesense"
x-speakeasy-entity: Destination_Typesense
x-speakeasy-param-suppress-computed-diff: true
DestinationVerticaCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-vertica"
x-speakeasy-entity: Destination_Vertica
x-speakeasy-param-suppress-computed-diff: true
+ DestinationWeaviateCreateRequest:
+ required:
+ - name
+ - workspaceId
+ - configuration
+ type: object
+ properties:
+ name:
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
+ workspaceId:
+ format: uuid
+ type: string
+ configuration:
+ $ref: "#/components/schemas/destination-weaviate"
+ x-speakeasy-entity: Destination_Weaviate
+ x-speakeasy-param-suppress-computed-diff: true
DestinationXataCreateRequest:
required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
+ - name
+ - workspaceId
+ - configuration
+ type: object
properties:
name:
- type: "string"
+ description: Name of the destination e.g. dev-mysql-instance.
+ type: string
+ definitionId:
+ description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ format: uuid
+ type: string
workspaceId:
- format: "uuid"
- type: "string"
+ format: uuid
+ type: string
configuration:
$ref: "#/components/schemas/destination-xata"
x-speakeasy-entity: Destination_Xata
@@ -64950,7 +77035,7 @@ components:
$ref: "#/components/schemas/source-bamboo-hr-update"
x-speakeasy-entity: Source_BambooHr
x-speakeasy-param-suppress-computed-diff: true
- SourceBigcommercePutRequest:
+ SourceBigqueryPutRequest:
required:
- "name"
- "workspaceId"
@@ -64963,10 +77048,10 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-bigcommerce-update"
- x-speakeasy-entity: Source_Bigcommerce
+ $ref: "#/components/schemas/source-bigquery-update"
+ x-speakeasy-entity: Source_Bigquery
x-speakeasy-param-suppress-computed-diff: true
- SourceBigqueryPutRequest:
+ SourceBingAdsPutRequest:
required:
- "name"
- "workspaceId"
@@ -64979,10 +77064,10 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-bigquery-update"
- x-speakeasy-entity: Source_Bigquery
+ $ref: "#/components/schemas/source-bing-ads-update"
+ x-speakeasy-entity: Source_BingAds
x-speakeasy-param-suppress-computed-diff: true
- SourceBingAdsPutRequest:
+ SourceBraintreePutRequest:
required:
- "name"
- "workspaceId"
@@ -64995,10 +77080,10 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-bing-ads-update"
- x-speakeasy-entity: Source_BingAds
+ $ref: "#/components/schemas/source-braintree-update"
+ x-speakeasy-entity: Source_Braintree
x-speakeasy-param-suppress-computed-diff: true
- SourceBraintreePutRequest:
+ SourceBrazePutRequest:
required:
- "name"
- "workspaceId"
@@ -65011,10 +77096,10 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-braintree-update"
- x-speakeasy-entity: Source_Braintree
+ $ref: "#/components/schemas/source-braze-update"
+ x-speakeasy-entity: Source_Braze
x-speakeasy-param-suppress-computed-diff: true
- SourceBrazePutRequest:
+ SourceCartPutRequest:
required:
- "name"
- "workspaceId"
@@ -65027,8 +77112,8 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-braze-update"
- x-speakeasy-entity: Source_Braze
+ $ref: "#/components/schemas/source-cart-update"
+ x-speakeasy-entity: Source_Cart
x-speakeasy-param-suppress-computed-diff: true
SourceChargebeePutRequest:
required:
@@ -65318,22 +77403,6 @@ components:
$ref: "#/components/schemas/source-dynamodb-update"
x-speakeasy-entity: Source_Dynamodb
x-speakeasy-param-suppress-computed-diff: true
- SourceE2eTestCloudPutRequest:
- required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
- properties:
- name:
- type: "string"
- workspaceId:
- format: "uuid"
- type: "string"
- configuration:
- $ref: "#/components/schemas/source-e2e-test-cloud-update"
- x-speakeasy-entity: Source_E2eTestCloud
- x-speakeasy-param-suppress-computed-diff: true
SourceEmailoctopusPutRequest:
required:
- "name"
@@ -65430,7 +77499,7 @@ components:
$ref: "#/components/schemas/source-fauna-update"
x-speakeasy-entity: Source_Fauna
x-speakeasy-param-suppress-computed-diff: true
- SourceFileSecurePutRequest:
+ SourceFilePutRequest:
required:
- "name"
- "workspaceId"
@@ -65443,8 +77512,8 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-file-secure-update"
- x-speakeasy-entity: Source_FileSecure
+ $ref: "#/components/schemas/source-file-update"
+ x-speakeasy-entity: Source_File
x-speakeasy-param-suppress-computed-diff: true
SourceFireboltPutRequest:
required:
@@ -65654,7 +77723,7 @@ components:
$ref: "#/components/schemas/source-google-analytics-data-api-update"
x-speakeasy-entity: Source_GoogleAnalyticsDataApi
x-speakeasy-param-suppress-computed-diff: true
- SourceGoogleAnalyticsV4PutRequest:
+ SourceGoogleDirectoryPutRequest:
required:
- "name"
- "workspaceId"
@@ -65667,10 +77736,10 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-google-analytics-v4-update"
- x-speakeasy-entity: Source_GoogleAnalyticsV4
+ $ref: "#/components/schemas/source-google-directory-update"
+ x-speakeasy-entity: Source_GoogleDirectory
x-speakeasy-param-suppress-computed-diff: true
- SourceGoogleDirectoryPutRequest:
+ SourceGoogleDrivePutRequest:
required:
- "name"
- "workspaceId"
@@ -65683,8 +77752,8 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-google-directory-update"
- x-speakeasy-entity: Source_GoogleDirectory
+ $ref: "#/components/schemas/source-google-drive-update"
+ x-speakeasy-entity: Source_GoogleDrive
x-speakeasy-param-suppress-computed-diff: true
SourceGooglePagespeedInsightsPutRequest:
required:
@@ -66278,7 +78347,7 @@ components:
$ref: "#/components/schemas/source-monday-update"
x-speakeasy-entity: Source_Monday
x-speakeasy-param-suppress-computed-diff: true
- SourceMongodbPutRequest:
+ SourceMongodbInternalPocPutRequest:
required:
- "name"
- "workspaceId"
@@ -66291,10 +78360,10 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-mongodb-update"
- x-speakeasy-entity: Source_Mongodb
+ $ref: "#/components/schemas/source-mongodb-internal-poc-update"
+ x-speakeasy-entity: Source_MongodbInternalPoc
x-speakeasy-param-suppress-computed-diff: true
- SourceMongodbInternalPocPutRequest:
+ SourceMongodbV2PutRequest:
required:
- "name"
- "workspaceId"
@@ -66307,8 +78376,8 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-mongodb-internal-poc-update"
- x-speakeasy-entity: Source_MongodbInternalPoc
+ $ref: "#/components/schemas/source-mongodb-v2-update"
+ x-speakeasy-entity: Source_MongodbV2
x-speakeasy-param-suppress-computed-diff: true
SourceMssqlPutRequest:
required:
@@ -67654,22 +79723,6 @@ components:
$ref: "#/components/schemas/source-woocommerce-update"
x-speakeasy-entity: Source_Woocommerce
x-speakeasy-param-suppress-computed-diff: true
- SourceXeroPutRequest:
- required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
- properties:
- name:
- type: "string"
- workspaceId:
- format: "uuid"
- type: "string"
- configuration:
- $ref: "#/components/schemas/source-xero-update"
- x-speakeasy-entity: Source_Xero
- x-speakeasy-param-suppress-computed-diff: true
SourceXkcdPutRequest:
required:
- "name"
@@ -67718,7 +79771,7 @@ components:
$ref: "#/components/schemas/source-yotpo-update"
x-speakeasy-entity: Source_Yotpo
x-speakeasy-param-suppress-computed-diff: true
- SourceYouniumPutRequest:
+ SourceYoutubeAnalyticsPutRequest:
required:
- "name"
- "workspaceId"
@@ -67731,10 +79784,10 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-younium-update"
- x-speakeasy-entity: Source_Younium
+ $ref: "#/components/schemas/source-youtube-analytics-update"
+ x-speakeasy-entity: Source_YoutubeAnalytics
x-speakeasy-param-suppress-computed-diff: true
- SourceYoutubeAnalyticsPutRequest:
+ SourceZendeskChatPutRequest:
required:
- "name"
- "workspaceId"
@@ -67747,10 +79800,10 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-youtube-analytics-update"
- x-speakeasy-entity: Source_YoutubeAnalytics
+ $ref: "#/components/schemas/source-zendesk-chat-update"
+ x-speakeasy-entity: Source_ZendeskChat
x-speakeasy-param-suppress-computed-diff: true
- SourceZendeskChatPutRequest:
+ SourceZendeskSellPutRequest:
required:
- "name"
- "workspaceId"
@@ -67763,8 +79816,8 @@ components:
format: "uuid"
type: "string"
configuration:
- $ref: "#/components/schemas/source-zendesk-chat-update"
- x-speakeasy-entity: Source_ZendeskChat
+ $ref: "#/components/schemas/source-zendesk-sell-update"
+ x-speakeasy-entity: Source_ZendeskSell
x-speakeasy-param-suppress-computed-diff: true
SourceZendeskSunshinePutRequest:
required:
@@ -67926,22 +79979,6 @@ components:
$ref: "#/components/schemas/destination-bigquery-update"
x-speakeasy-entity: Destination_Bigquery
x-speakeasy-param-suppress-computed-diff: true
- DestinationBigqueryDenormalizedPutRequest:
- required:
- - "name"
- - "workspaceId"
- - "configuration"
- type: "object"
- properties:
- name:
- type: "string"
- workspaceId:
- format: "uuid"
- type: "string"
- configuration:
- $ref: "#/components/schemas/destination-bigquery-denormalized-update"
- x-speakeasy-entity: Destination_BigqueryDenormalized
- x-speakeasy-param-suppress-computed-diff: true
DestinationClickhousePutRequest:
required:
- "name"
@@ -68038,6 +80075,22 @@ components:
$ref: "#/components/schemas/destination-dev-null-update"
x-speakeasy-entity: Destination_DevNull
x-speakeasy-param-suppress-computed-diff: true
+ DestinationDuckdbPutRequest:
+ required:
+ - "name"
+ - "workspaceId"
+ - "configuration"
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ workspaceId:
+ format: "uuid"
+ type: "string"
+ configuration:
+ $ref: "#/components/schemas/destination-duckdb-update"
+ x-speakeasy-entity: Destination_Duckdb
+ x-speakeasy-param-suppress-computed-diff: true
DestinationDynamodbPutRequest:
required:
- "name"
@@ -68310,6 +80363,22 @@ components:
$ref: "#/components/schemas/destination-pubsub-update"
x-speakeasy-entity: Destination_Pubsub
x-speakeasy-param-suppress-computed-diff: true
+ DestinationQdrantPutRequest:
+ required:
+ - "name"
+ - "workspaceId"
+ - "configuration"
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ workspaceId:
+ format: "uuid"
+ type: "string"
+ configuration:
+ $ref: "#/components/schemas/destination-qdrant-update"
+ x-speakeasy-entity: Destination_Qdrant
+ x-speakeasy-param-suppress-computed-diff: true
DestinationRedisPutRequest:
required:
- "name"
@@ -68454,6 +80523,22 @@ components:
$ref: "#/components/schemas/destination-vertica-update"
x-speakeasy-entity: Destination_Vertica
x-speakeasy-param-suppress-computed-diff: true
+ DestinationWeaviatePutRequest:
+ required:
+ - "name"
+ - "workspaceId"
+ - "configuration"
+ type: "object"
+ properties:
+ name:
+ type: "string"
+ workspaceId:
+ format: "uuid"
+ type: "string"
+ configuration:
+ $ref: "#/components/schemas/destination-weaviate-update"
+ x-speakeasy-entity: Destination_Weaviate
+ x-speakeasy-param-suppress-computed-diff: true
DestinationXataPutRequest:
required:
- "name"
diff --git a/docs/data-sources/connection.md b/docs/data-sources/connection.md
index ad58cde69..5a9825acb 100644
--- a/docs/data-sources/connection.md
+++ b/docs/data-sources/connection.md
@@ -28,15 +28,15 @@ data "airbyte_connection" "my_connection" {
### Read-Only
- `configurations` (Attributes) A list of configured stream options for a connection. (see [below for nested schema](#nestedatt--configurations))
-- `data_residency` (String) must be one of ["auto", "us", "eu"]
+- `data_residency` (String) must be one of ["auto", "us", "eu"]; Default: "auto"
- `destination_id` (String)
-- `name` (String) Optional name of the connection
-- `namespace_definition` (String) must be one of ["source", "destination", "custom_format"]
+- `name` (String)
+- `namespace_definition` (String) must be one of ["source", "destination", "custom_format"]; Default: "destination"
Define the location where the data will be stored in the destination
-- `namespace_format` (String) Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-- `non_breaking_schema_updates_behavior` (String) must be one of ["ignore", "disable_connection", "propagate_columns", "propagate_fully"]
+- `namespace_format` (String)
+- `non_breaking_schema_updates_behavior` (String) must be one of ["ignore", "disable_connection", "propagate_columns", "propagate_fully"]; Default: "ignore"
Set how Airbyte handles syncs when it detects a non-breaking schema change in the source
-- `prefix` (String) Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”).
+- `prefix` (String)
- `schedule` (Attributes) schedule for when the the connection should run, per the schedule type (see [below for nested schema](#nestedatt--schedule))
- `source_id` (String)
- `status` (String) must be one of ["active", "inactive", "deprecated"]
@@ -68,6 +68,6 @@ Read-Only:
- `basic_timing` (String)
- `cron_expression` (String)
-- `schedule_type` (String) must be one of ["manual", "cron"]
+- `schedule_type` (String) must be one of ["manual", "cron", "basic"]
diff --git a/docs/data-sources/destination_aws_datalake.md b/docs/data-sources/destination_aws_datalake.md
index a74e088b9..126bc87cf 100644
--- a/docs/data-sources/destination_aws_datalake.md
+++ b/docs/data-sources/destination_aws_datalake.md
@@ -27,131 +27,10 @@ data "airbyte_destination_aws_datalake" "my_destination_awsdatalake" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `aws_account_id` (String) target aws account id
-- `bucket_name` (String) The name of the S3 bucket. Read more here.
-- `bucket_prefix` (String) S3 prefix
-- `credentials` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `destination_type` (String) must be one of ["aws-datalake"]
-- `format` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format))
-- `glue_catalog_float_as_decimal` (Boolean) Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source.
-- `lakeformation_database_default_tag_key` (String) Add a default tag key to databases created by this destination
-- `lakeformation_database_default_tag_values` (String) Add default values for the `Tag Key` to databases created by this destination. Comma separate for multiple values.
-- `lakeformation_database_name` (String) The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
-- `lakeformation_governed_tables` (Boolean) Whether to create tables as LF governed tables.
-- `partitioning` (String) must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
-Partition data by cursor fields when a cursor field is a date
-- `region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the S3 bucket. See here for all region codes.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `destination_aws_datalake_authentication_mode_iam_role` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--destination_aws_datalake_authentication_mode_iam_role))
-- `destination_aws_datalake_authentication_mode_iam_user` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--destination_aws_datalake_authentication_mode_iam_user))
-- `destination_aws_datalake_update_authentication_mode_iam_role` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--destination_aws_datalake_update_authentication_mode_iam_role))
-- `destination_aws_datalake_update_authentication_mode_iam_user` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--destination_aws_datalake_update_authentication_mode_iam_user))
-
-
-### Nested Schema for `configuration.credentials.destination_aws_datalake_authentication_mode_iam_role`
-
-Read-Only:
-
-- `credentials_title` (String) must be one of ["IAM Role"]
-Name of the credentials
-- `role_arn` (String) Will assume this role to write data to s3
-
-
-
-### Nested Schema for `configuration.credentials.destination_aws_datalake_authentication_mode_iam_user`
-
-Read-Only:
-
-- `aws_access_key_id` (String) AWS User Access Key Id
-- `aws_secret_access_key` (String) Secret Access Key
-- `credentials_title` (String) must be one of ["IAM User"]
-Name of the credentials
-
-
-
-### Nested Schema for `configuration.credentials.destination_aws_datalake_update_authentication_mode_iam_role`
-
-Read-Only:
-
-- `credentials_title` (String) must be one of ["IAM Role"]
-Name of the credentials
-- `role_arn` (String) Will assume this role to write data to s3
-
-
-
-### Nested Schema for `configuration.credentials.destination_aws_datalake_update_authentication_mode_iam_user`
-
-Read-Only:
-
-- `aws_access_key_id` (String) AWS User Access Key Id
-- `aws_secret_access_key` (String) Secret Access Key
-- `credentials_title` (String) must be one of ["IAM User"]
-Name of the credentials
-
-
-
-
-### Nested Schema for `configuration.format`
-
-Read-Only:
-
-- `destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json))
-- `destination_aws_datalake_output_format_wildcard_parquet_columnar_storage` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--destination_aws_datalake_output_format_wildcard_parquet_columnar_storage))
-- `destination_aws_datalake_update_output_format_wildcard_json_lines_newline_delimited_json` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--destination_aws_datalake_update_output_format_wildcard_json_lines_newline_delimited_json))
-- `destination_aws_datalake_update_output_format_wildcard_parquet_columnar_storage` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--destination_aws_datalake_update_output_format_wildcard_parquet_columnar_storage))
-
-
-### Nested Schema for `configuration.format.destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "GZIP"]
-The compression algorithm used to compress data.
-- `format_type` (String) must be one of ["JSONL"]
-
-
-
-### Nested Schema for `configuration.format.destination_aws_datalake_output_format_wildcard_parquet_columnar_storage`
-
-Read-Only:
-
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
-The compression algorithm used to compress data.
-- `format_type` (String) must be one of ["Parquet"]
-
-
-
-### Nested Schema for `configuration.format.destination_aws_datalake_update_output_format_wildcard_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "GZIP"]
-The compression algorithm used to compress data.
-- `format_type` (String) must be one of ["JSONL"]
-
-
-
-### Nested Schema for `configuration.format.destination_aws_datalake_update_output_format_wildcard_parquet_columnar_storage`
-
-Read-Only:
-
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
-The compression algorithm used to compress data.
-- `format_type` (String) must be one of ["Parquet"]
-
diff --git a/docs/data-sources/destination_azure_blob_storage.md b/docs/data-sources/destination_azure_blob_storage.md
index 78b4dddf5..311a1e9a1 100644
--- a/docs/data-sources/destination_azure_blob_storage.md
+++ b/docs/data-sources/destination_azure_blob_storage.md
@@ -27,67 +27,10 @@ data "airbyte_destination_azure_blob_storage" "my_destination_azureblobstorage"
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `azure_blob_storage_account_key` (String) The Azure blob storage account key.
-- `azure_blob_storage_account_name` (String) The account's name of the Azure Blob Storage.
-- `azure_blob_storage_container_name` (String) The name of the Azure blob storage container. If not exists - will be created automatically. May be empty, then will be created automatically airbytecontainer+timestamp
-- `azure_blob_storage_endpoint_domain_name` (String) This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
-- `azure_blob_storage_output_buffer_size` (Number) The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure.
-- `azure_blob_storage_spill_size` (Number) The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable
-- `destination_type` (String) must be one of ["azure-blob-storage"]
-- `format` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format))
-
-
-### Nested Schema for `configuration.format`
-
-Read-Only:
-
-- `destination_azure_blob_storage_output_format_csv_comma_separated_values` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--destination_azure_blob_storage_output_format_csv_comma_separated_values))
-- `destination_azure_blob_storage_output_format_json_lines_newline_delimited_json` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--destination_azure_blob_storage_output_format_json_lines_newline_delimited_json))
-- `destination_azure_blob_storage_update_output_format_csv_comma_separated_values` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--destination_azure_blob_storage_update_output_format_csv_comma_separated_values))
-- `destination_azure_blob_storage_update_output_format_json_lines_newline_delimited_json` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--destination_azure_blob_storage_update_output_format_json_lines_newline_delimited_json))
-
-
-### Nested Schema for `configuration.format.destination_azure_blob_storage_output_format_csv_comma_separated_values`
-
-Read-Only:
-
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
-
-
-
-### Nested Schema for `configuration.format.destination_azure_blob_storage_output_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `format_type` (String) must be one of ["JSONL"]
-
-
-
-### Nested Schema for `configuration.format.destination_azure_blob_storage_update_output_format_csv_comma_separated_values`
-
-Read-Only:
-
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
-
-
-
-### Nested Schema for `configuration.format.destination_azure_blob_storage_update_output_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `format_type` (String) must be one of ["JSONL"]
-
diff --git a/docs/data-sources/destination_bigquery.md b/docs/data-sources/destination_bigquery.md
index 74596f1dd..b2b0d7c2c 100644
--- a/docs/data-sources/destination_bigquery.md
+++ b/docs/data-sources/destination_bigquery.md
@@ -27,114 +27,10 @@ data "airbyte_destination_bigquery" "my_destination_bigquery" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `big_query_client_buffer_size_mb` (Number) Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.
-- `credentials_json` (String) The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
-- `dataset_id` (String) The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.
-- `dataset_location` (String) must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-west1", "us-west2", "us-west3", "us-west4"]
-The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.
-- `destination_type` (String) must be one of ["bigquery"]
-- `loading_method` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method))
-- `project_id` (String) The GCP project ID for the project containing the target BigQuery dataset. Read more here.
-- `raw_data_dataset` (String) The dataset to write raw tables into
-- `transformation_priority` (String) must be one of ["interactive", "batch"]
-Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.
-
-
-### Nested Schema for `configuration.loading_method`
-
-Read-Only:
-
-- `destination_bigquery_loading_method_gcs_staging` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_loading_method_gcs_staging))
-- `destination_bigquery_loading_method_standard_inserts` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_loading_method_standard_inserts))
-- `destination_bigquery_update_loading_method_gcs_staging` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_update_loading_method_gcs_staging))
-- `destination_bigquery_update_loading_method_standard_inserts` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_update_loading_method_standard_inserts))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_loading_method_gcs_staging`
-
-Read-Only:
-
-- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_loading_method_gcs_staging--credential))
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `gcs_bucket_name` (String) The name of the GCS bucket. Read more here.
-- `gcs_bucket_path` (String) Directory under the GCS bucket where data will be written.
-- `keep_files_in_gcs_bucket` (String) must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
-This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-- `method` (String) must be one of ["GCS Staging"]
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_loading_method_gcs_staging.method`
-
-Read-Only:
-
-- `destination_bigquery_loading_method_gcs_staging_credential_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_loading_method_gcs_staging--method--destination_bigquery_loading_method_gcs_staging_credential_hmac_key))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_loading_method_gcs_staging.method.destination_bigquery_loading_method_gcs_staging_credential_hmac_key`
-
-Read-Only:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
-
-
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_loading_method_standard_inserts`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_update_loading_method_gcs_staging`
-
-Read-Only:
-
-- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_update_loading_method_gcs_staging--credential))
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `gcs_bucket_name` (String) The name of the GCS bucket. Read more here.
-- `gcs_bucket_path` (String) Directory under the GCS bucket where data will be written.
-- `keep_files_in_gcs_bucket` (String) must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
-This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-- `method` (String) must be one of ["GCS Staging"]
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_update_loading_method_gcs_staging.method`
-
-Read-Only:
-
-- `destination_bigquery_update_loading_method_gcs_staging_credential_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_update_loading_method_gcs_staging--method--destination_bigquery_update_loading_method_gcs_staging_credential_hmac_key))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_update_loading_method_gcs_staging.method.destination_bigquery_update_loading_method_gcs_staging_credential_hmac_key`
-
-Read-Only:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
-
-
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_update_loading_method_standard_inserts`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
diff --git a/docs/data-sources/destination_bigquery_denormalized.md b/docs/data-sources/destination_bigquery_denormalized.md
deleted file mode 100644
index 136f7db42..000000000
--- a/docs/data-sources/destination_bigquery_denormalized.md
+++ /dev/null
@@ -1,137 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_destination_bigquery_denormalized Data Source - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- DestinationBigqueryDenormalized DataSource
----
-
-# airbyte_destination_bigquery_denormalized (Data Source)
-
-DestinationBigqueryDenormalized DataSource
-
-## Example Usage
-
-```terraform
-data "airbyte_destination_bigquery_denormalized" "my_destination_bigquerydenormalized" {
- destination_id = "...my_destination_id..."
-}
-```
-
-
-## Schema
-
-### Required
-
-- `destination_id` (String)
-
-### Read-Only
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `big_query_client_buffer_size_mb` (Number) Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.
-- `credentials_json` (String) The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
-- `dataset_id` (String) The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.
-- `dataset_location` (String) must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-west1", "us-west2", "us-west3", "us-west4"]
-The location of the dataset. Warning: Changes made after creation will not be applied. The default "US" value is used if not set explicitly. Read more here.
-- `destination_type` (String) must be one of ["bigquery-denormalized"]
-- `loading_method` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method))
-- `project_id` (String) The GCP project ID for the project containing the target BigQuery dataset. Read more here.
-
-
-### Nested Schema for `configuration.loading_method`
-
-Read-Only:
-
-- `destination_bigquery_denormalized_loading_method_gcs_staging` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_loading_method_gcs_staging))
-- `destination_bigquery_denormalized_loading_method_standard_inserts` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_loading_method_standard_inserts))
-- `destination_bigquery_denormalized_update_loading_method_gcs_staging` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_update_loading_method_gcs_staging))
-- `destination_bigquery_denormalized_update_loading_method_standard_inserts` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_update_loading_method_standard_inserts))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_loading_method_gcs_staging`
-
-Read-Only:
-
-- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_loading_method_gcs_staging--credential))
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `gcs_bucket_name` (String) The name of the GCS bucket. Read more here.
-- `gcs_bucket_path` (String) Directory under the GCS bucket where data will be written. Read more here.
-- `keep_files_in_gcs_bucket` (String) must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
-This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-- `method` (String) must be one of ["GCS Staging"]
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_loading_method_gcs_staging.method`
-
-Read-Only:
-
-- `destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_loading_method_gcs_staging--method--destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_loading_method_gcs_staging.method.destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key`
-
-Read-Only:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
-
-
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_loading_method_standard_inserts`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_update_loading_method_gcs_staging`
-
-Read-Only:
-
-- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_update_loading_method_gcs_staging--credential))
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `gcs_bucket_name` (String) The name of the GCS bucket. Read more here.
-- `gcs_bucket_path` (String) Directory under the GCS bucket where data will be written. Read more here.
-- `keep_files_in_gcs_bucket` (String) must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
-This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-- `method` (String) must be one of ["GCS Staging"]
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_update_loading_method_gcs_staging.method`
-
-Read-Only:
-
-- `destination_bigquery_denormalized_update_loading_method_gcs_staging_credential_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_update_loading_method_gcs_staging--method--destination_bigquery_denormalized_update_loading_method_gcs_staging_credential_hmac_key))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_update_loading_method_gcs_staging.method.destination_bigquery_denormalized_update_loading_method_gcs_staging_credential_hmac_key`
-
-Read-Only:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
-
-
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_update_loading_method_standard_inserts`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
-
diff --git a/docs/data-sources/destination_clickhouse.md b/docs/data-sources/destination_clickhouse.md
index b80d91423..698295f37 100644
--- a/docs/data-sources/destination_clickhouse.md
+++ b/docs/data-sources/destination_clickhouse.md
@@ -27,103 +27,10 @@ data "airbyte_destination_clickhouse" "my_destination_clickhouse" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["clickhouse"]
-- `host` (String) Hostname of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
-- `port` (Number) HTTP port of the database.
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) Username to use to access the database.
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `destination_clickhouse_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_ssh_tunnel_method_no_tunnel))
-- `destination_clickhouse_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_ssh_tunnel_method_password_authentication))
-- `destination_clickhouse_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_ssh_tunnel_method_ssh_key_authentication))
-- `destination_clickhouse_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_update_ssh_tunnel_method_no_tunnel))
-- `destination_clickhouse_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_update_ssh_tunnel_method_password_authentication))
-- `destination_clickhouse_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/destination_convex.md b/docs/data-sources/destination_convex.md
index 962b6c13b..1fcc34e35 100644
--- a/docs/data-sources/destination_convex.md
+++ b/docs/data-sources/destination_convex.md
@@ -27,17 +27,10 @@ data "airbyte_destination_convex" "my_destination_convex" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key` (String) API access key used to send data to a Convex deployment.
-- `deployment_url` (String) URL of the Convex deployment that is the destination
-- `destination_type` (String) must be one of ["convex"]
-
diff --git a/docs/data-sources/destination_cumulio.md b/docs/data-sources/destination_cumulio.md
index a9e00b846..2b717a9c2 100644
--- a/docs/data-sources/destination_cumulio.md
+++ b/docs/data-sources/destination_cumulio.md
@@ -27,18 +27,10 @@ data "airbyte_destination_cumulio" "my_destination_cumulio" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_host` (String) URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.
-- `api_key` (String) An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
-- `api_token` (String) The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
-- `destination_type` (String) must be one of ["cumulio"]
-
diff --git a/docs/data-sources/destination_databend.md b/docs/data-sources/destination_databend.md
index c7750edcb..3aee7b9d7 100644
--- a/docs/data-sources/destination_databend.md
+++ b/docs/data-sources/destination_databend.md
@@ -27,21 +27,10 @@ data "airbyte_destination_databend" "my_destination_databend" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["databend"]
-- `host` (String) Hostname of the database.
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `table` (String) The default table was written to.
-- `username` (String) Username to use to access the database.
-
diff --git a/docs/data-sources/destination_databricks.md b/docs/data-sources/destination_databricks.md
index 267cd6625..5ad8d456d 100644
--- a/docs/data-sources/destination_databricks.md
+++ b/docs/data-sources/destination_databricks.md
@@ -27,106 +27,10 @@ data "airbyte_destination_databricks" "my_destination_databricks" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `accept_terms` (Boolean) You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector.
-- `data_source` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source))
-- `database` (String) The name of the catalog. If not specified otherwise, the "hive_metastore" will be used.
-- `databricks_http_path` (String) Databricks Cluster HTTP Path.
-- `databricks_personal_access_token` (String) Databricks Personal Access Token for making authenticated requests.
-- `databricks_port` (String) Databricks Cluster Port.
-- `databricks_server_hostname` (String) Databricks Cluster Server Hostname.
-- `destination_type` (String) must be one of ["databricks"]
-- `enable_schema_evolution` (Boolean) Support schema evolution for all streams. If "false", the connector might fail when a stream's schema changes.
-- `purge_staging_data` (Boolean) Default to 'true'. Switch it to 'false' for debugging purpose.
-- `schema` (String) The default schema tables are written. If not specified otherwise, the "default" will be used.
-
-
-### Nested Schema for `configuration.data_source`
-
-Read-Only:
-
-- `destination_databricks_data_source_amazon_s3` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_data_source_amazon_s3))
-- `destination_databricks_data_source_azure_blob_storage` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_data_source_azure_blob_storage))
-- `destination_databricks_data_source_recommended_managed_tables` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_data_source_recommended_managed_tables))
-- `destination_databricks_update_data_source_amazon_s3` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_update_data_source_amazon_s3))
-- `destination_databricks_update_data_source_azure_blob_storage` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_update_data_source_azure_blob_storage))
-- `destination_databricks_update_data_source_recommended_managed_tables` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_update_data_source_recommended_managed_tables))
-
-
-### Nested Schema for `configuration.data_source.destination_databricks_data_source_amazon_s3`
-
-Read-Only:
-
-- `data_source_type` (String) must be one of ["S3_STORAGE"]
-- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `s3_access_key_id` (String) The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.
-- `s3_bucket_name` (String) The name of the S3 bucket to use for intermittent staging of the data.
-- `s3_bucket_path` (String) The directory under the S3 bucket where data will be written.
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the S3 staging bucket to use if utilising a copy strategy.
-- `s3_secret_access_key` (String) The corresponding secret to the above access key id.
-
-
-
-### Nested Schema for `configuration.data_source.destination_databricks_data_source_azure_blob_storage`
-
-Read-Only:
-
-- `azure_blob_storage_account_name` (String) The account's name of the Azure Blob Storage.
-- `azure_blob_storage_container_name` (String) The name of the Azure blob storage container.
-- `azure_blob_storage_endpoint_domain_name` (String) This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
-- `azure_blob_storage_sas_token` (String) Shared access signature (SAS) token to grant limited access to objects in your storage account.
-- `data_source_type` (String) must be one of ["AZURE_BLOB_STORAGE"]
-
-
-
-### Nested Schema for `configuration.data_source.destination_databricks_data_source_recommended_managed_tables`
-
-Read-Only:
-
-- `data_source_type` (String) must be one of ["MANAGED_TABLES_STORAGE"]
-
-
-
-### Nested Schema for `configuration.data_source.destination_databricks_update_data_source_amazon_s3`
-
-Read-Only:
-
-- `data_source_type` (String) must be one of ["S3_STORAGE"]
-- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `s3_access_key_id` (String) The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.
-- `s3_bucket_name` (String) The name of the S3 bucket to use for intermittent staging of the data.
-- `s3_bucket_path` (String) The directory under the S3 bucket where data will be written.
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the S3 staging bucket to use if utilising a copy strategy.
-- `s3_secret_access_key` (String) The corresponding secret to the above access key id.
-
-
-
-### Nested Schema for `configuration.data_source.destination_databricks_update_data_source_azure_blob_storage`
-
-Read-Only:
-
-- `azure_blob_storage_account_name` (String) The account's name of the Azure Blob Storage.
-- `azure_blob_storage_container_name` (String) The name of the Azure blob storage container.
-- `azure_blob_storage_endpoint_domain_name` (String) This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
-- `azure_blob_storage_sas_token` (String) Shared access signature (SAS) token to grant limited access to objects in your storage account.
-- `data_source_type` (String) must be one of ["AZURE_BLOB_STORAGE"]
-
-
-
-### Nested Schema for `configuration.data_source.destination_databricks_update_data_source_recommended_managed_tables`
-
-Read-Only:
-
-- `data_source_type` (String) must be one of ["MANAGED_TABLES_STORAGE"]
-
diff --git a/docs/data-sources/destination_dev_null.md b/docs/data-sources/destination_dev_null.md
index 63a8e6009..dce64add1 100644
--- a/docs/data-sources/destination_dev_null.md
+++ b/docs/data-sources/destination_dev_null.md
@@ -27,39 +27,10 @@ data "airbyte_destination_dev_null" "my_destination_devnull" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `destination_type` (String) must be one of ["dev-null"]
-- `test_destination` (Attributes) The type of destination to be used (see [below for nested schema](#nestedatt--configuration--test_destination))
-
-
-### Nested Schema for `configuration.test_destination`
-
-Read-Only:
-
-- `destination_dev_null_test_destination_silent` (Attributes) The type of destination to be used (see [below for nested schema](#nestedatt--configuration--test_destination--destination_dev_null_test_destination_silent))
-- `destination_dev_null_update_test_destination_silent` (Attributes) The type of destination to be used (see [below for nested schema](#nestedatt--configuration--test_destination--destination_dev_null_update_test_destination_silent))
-
-
-### Nested Schema for `configuration.test_destination.destination_dev_null_test_destination_silent`
-
-Read-Only:
-
-- `test_destination_type` (String) must be one of ["SILENT"]
-
-
-
-### Nested Schema for `configuration.test_destination.destination_dev_null_update_test_destination_silent`
-
-Read-Only:
-
-- `test_destination_type` (String) must be one of ["SILENT"]
-
diff --git a/docs/data-sources/destination_duckdb.md b/docs/data-sources/destination_duckdb.md
new file mode 100644
index 000000000..eb6f66030
--- /dev/null
+++ b/docs/data-sources/destination_duckdb.md
@@ -0,0 +1,36 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_destination_duckdb Data Source - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ DestinationDuckdb DataSource
+---
+
+# airbyte_destination_duckdb (Data Source)
+
+DestinationDuckdb DataSource
+
+## Example Usage
+
+```terraform
+data "airbyte_destination_duckdb" "my_destination_duckdb" {
+ destination_id = "...my_destination_id..."
+}
+```
+
+
+## Schema
+
+### Required
+
+- `destination_id` (String)
+
+### Read-Only
+
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
+- `name` (String)
+- `workspace_id` (String)
+
+
diff --git a/docs/data-sources/destination_dynamodb.md b/docs/data-sources/destination_dynamodb.md
index ca158c7d8..78f7f8990 100644
--- a/docs/data-sources/destination_dynamodb.md
+++ b/docs/data-sources/destination_dynamodb.md
@@ -27,21 +27,10 @@ data "airbyte_destination_dynamodb" "my_destination_dynamodb" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key_id` (String) The access key id to access the DynamoDB. Airbyte requires Read and Write permissions to the DynamoDB.
-- `destination_type` (String) must be one of ["dynamodb"]
-- `dynamodb_endpoint` (String) This is your DynamoDB endpoint url.(if you are working with AWS DynamoDB, just leave empty).
-- `dynamodb_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the DynamoDB.
-- `dynamodb_table_name_prefix` (String) The prefix to use when naming DynamoDB tables.
-- `secret_access_key` (String) The corresponding secret to the access key id.
-
diff --git a/docs/data-sources/destination_elasticsearch.md b/docs/data-sources/destination_elasticsearch.md
index a3aa42c53..574c8dd6f 100644
--- a/docs/data-sources/destination_elasticsearch.md
+++ b/docs/data-sources/destination_elasticsearch.md
@@ -27,68 +27,10 @@ data "airbyte_destination_elasticsearch" "my_destination_elasticsearch" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `authentication_method` (Attributes) The type of authentication to be used (see [below for nested schema](#nestedatt--configuration--authentication_method))
-- `ca_certificate` (String) CA certificate
-- `destination_type` (String) must be one of ["elasticsearch"]
-- `endpoint` (String) The full url of the Elasticsearch server
-- `upsert` (Boolean) If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.
-
-
-### Nested Schema for `configuration.authentication_method`
-
-Read-Only:
-
-- `destination_elasticsearch_authentication_method_api_key_secret` (Attributes) Use a api key and secret combination to authenticate (see [below for nested schema](#nestedatt--configuration--authentication_method--destination_elasticsearch_authentication_method_api_key_secret))
-- `destination_elasticsearch_authentication_method_username_password` (Attributes) Basic auth header with a username and password (see [below for nested schema](#nestedatt--configuration--authentication_method--destination_elasticsearch_authentication_method_username_password))
-- `destination_elasticsearch_update_authentication_method_api_key_secret` (Attributes) Use a api key and secret combination to authenticate (see [below for nested schema](#nestedatt--configuration--authentication_method--destination_elasticsearch_update_authentication_method_api_key_secret))
-- `destination_elasticsearch_update_authentication_method_username_password` (Attributes) Basic auth header with a username and password (see [below for nested schema](#nestedatt--configuration--authentication_method--destination_elasticsearch_update_authentication_method_username_password))
-
-
-### Nested Schema for `configuration.authentication_method.destination_elasticsearch_authentication_method_api_key_secret`
-
-Read-Only:
-
-- `api_key_id` (String) The Key ID to used when accessing an enterprise Elasticsearch instance.
-- `api_key_secret` (String) The secret associated with the API Key ID.
-- `method` (String) must be one of ["secret"]
-
-
-
-### Nested Schema for `configuration.authentication_method.destination_elasticsearch_authentication_method_username_password`
-
-Read-Only:
-
-- `method` (String) must be one of ["basic"]
-- `password` (String) Basic auth password to access a secure Elasticsearch server
-- `username` (String) Basic auth username to access a secure Elasticsearch server
-
-
-
-### Nested Schema for `configuration.authentication_method.destination_elasticsearch_update_authentication_method_api_key_secret`
-
-Read-Only:
-
-- `api_key_id` (String) The Key ID to used when accessing an enterprise Elasticsearch instance.
-- `api_key_secret` (String) The secret associated with the API Key ID.
-- `method` (String) must be one of ["secret"]
-
-
-
-### Nested Schema for `configuration.authentication_method.destination_elasticsearch_update_authentication_method_username_password`
-
-Read-Only:
-
-- `method` (String) must be one of ["basic"]
-- `password` (String) Basic auth password to access a secure Elasticsearch server
-- `username` (String) Basic auth username to access a secure Elasticsearch server
-
diff --git a/docs/data-sources/destination_firebolt.md b/docs/data-sources/destination_firebolt.md
index 9860c7478..329a1f3e7 100644
--- a/docs/data-sources/destination_firebolt.md
+++ b/docs/data-sources/destination_firebolt.md
@@ -27,71 +27,10 @@ data "airbyte_destination_firebolt" "my_destination_firebolt" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `account` (String) Firebolt account to login.
-- `database` (String) The database to connect to.
-- `destination_type` (String) must be one of ["firebolt"]
-- `engine` (String) Engine name or url to connect to.
-- `host` (String) The host name of your Firebolt database.
-- `loading_method` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method))
-- `password` (String) Firebolt password.
-- `username` (String) Firebolt email address you use to login.
-
-
-### Nested Schema for `configuration.loading_method`
-
-Read-Only:
-
-- `destination_firebolt_loading_method_external_table_via_s3` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--destination_firebolt_loading_method_external_table_via_s3))
-- `destination_firebolt_loading_method_sql_inserts` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--destination_firebolt_loading_method_sql_inserts))
-- `destination_firebolt_update_loading_method_external_table_via_s3` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--destination_firebolt_update_loading_method_external_table_via_s3))
-- `destination_firebolt_update_loading_method_sql_inserts` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--destination_firebolt_update_loading_method_sql_inserts))
-
-
-### Nested Schema for `configuration.loading_method.destination_firebolt_loading_method_external_table_via_s3`
-
-Read-Only:
-
-- `aws_key_id` (String) AWS access key granting read and write access to S3.
-- `aws_key_secret` (String) Corresponding secret part of the AWS Key
-- `method` (String) must be one of ["S3"]
-- `s3_bucket` (String) The name of the S3 bucket.
-- `s3_region` (String) Region name of the S3 bucket.
-
-
-
-### Nested Schema for `configuration.loading_method.destination_firebolt_loading_method_sql_inserts`
-
-Read-Only:
-
-- `method` (String) must be one of ["SQL"]
-
-
-
-### Nested Schema for `configuration.loading_method.destination_firebolt_update_loading_method_external_table_via_s3`
-
-Read-Only:
-
-- `aws_key_id` (String) AWS access key granting read and write access to S3.
-- `aws_key_secret` (String) Corresponding secret part of the AWS Key
-- `method` (String) must be one of ["S3"]
-- `s3_bucket` (String) The name of the S3 bucket.
-- `s3_region` (String) Region name of the S3 bucket.
-
-
-
-### Nested Schema for `configuration.loading_method.destination_firebolt_update_loading_method_sql_inserts`
-
-Read-Only:
-
-- `method` (String) must be one of ["SQL"]
-
diff --git a/docs/data-sources/destination_firestore.md b/docs/data-sources/destination_firestore.md
index f2b9f50a1..ed59960c5 100644
--- a/docs/data-sources/destination_firestore.md
+++ b/docs/data-sources/destination_firestore.md
@@ -27,17 +27,10 @@ data "airbyte_destination_firestore" "my_destination_firestore" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials_json` (String) The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
-- `destination_type` (String) must be one of ["firestore"]
-- `project_id` (String) The GCP project ID for the project containing the target BigQuery dataset.
-
diff --git a/docs/data-sources/destination_gcs.md b/docs/data-sources/destination_gcs.md
index 3423670b8..ada8298f5 100644
--- a/docs/data-sources/destination_gcs.md
+++ b/docs/data-sources/destination_gcs.md
@@ -27,381 +27,10 @@ data "airbyte_destination_gcs" "my_destination_gcs" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--credential))
-- `destination_type` (String) must be one of ["gcs"]
-- `format` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format))
-- `gcs_bucket_name` (String) You can find the bucket name in the App Engine Admin console Application Settings page, under the label Google Cloud Storage Bucket. Read more here.
-- `gcs_bucket_path` (String) GCS Bucket Path string Subdirectory under the above bucket to sync the data into.
-- `gcs_bucket_region` (String) must be one of ["northamerica-northeast1", "northamerica-northeast2", "us-central1", "us-east1", "us-east4", "us-west1", "us-west2", "us-west3", "us-west4", "southamerica-east1", "southamerica-west1", "europe-central2", "europe-north1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "asia", "eu", "us", "asia1", "eur4", "nam4"]
-Select a Region of the GCS Bucket. Read more here.
-
-
-### Nested Schema for `configuration.credential`
-
-Read-Only:
-
-- `destination_gcs_authentication_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--credential--destination_gcs_authentication_hmac_key))
-- `destination_gcs_update_authentication_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--credential--destination_gcs_update_authentication_hmac_key))
-
-
-### Nested Schema for `configuration.credential.destination_gcs_authentication_hmac_key`
-
-Read-Only:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.
-
-
-
-### Nested Schema for `configuration.credential.destination_gcs_update_authentication_hmac_key`
-
-Read-Only:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.
-
-
-
-
-### Nested Schema for `configuration.format`
-
-Read-Only:
-
-- `destination_gcs_output_format_avro_apache_avro` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro))
-- `destination_gcs_output_format_csv_comma_separated_values` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_csv_comma_separated_values))
-- `destination_gcs_output_format_json_lines_newline_delimited_json` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_json_lines_newline_delimited_json))
-- `destination_gcs_output_format_parquet_columnar_storage` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_parquet_columnar_storage))
-- `destination_gcs_update_output_format_avro_apache_avro` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro))
-- `destination_gcs_update_output_format_csv_comma_separated_values` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_csv_comma_separated_values))
-- `destination_gcs_update_output_format_json_lines_newline_delimited_json` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_json_lines_newline_delimited_json))
-- `destination_gcs_update_output_format_parquet_columnar_storage` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_parquet_columnar_storage))
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro`
-
-Read-Only:
-
-- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--compression_codec))
-- `format_type` (String) must be one of ["Avro"]
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type`
-
-Read-Only:
-
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_deflate))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_snappy))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_xz))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard))
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2`
-
-Read-Only:
-
-- `codec` (String) must be one of ["bzip2"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_deflate`
-
-Read-Only:
-
-- `codec` (String) must be one of ["Deflate"]
-- `compression_level` (Number) 0: no compression & fastest, 9: best compression & slowest.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression`
-
-Read-Only:
-
-- `codec` (String) must be one of ["no compression"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_snappy`
-
-Read-Only:
-
-- `codec` (String) must be one of ["snappy"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_xz`
-
-Read-Only:
-
-- `codec` (String) must be one of ["xz"]
-- `compression_level` (Number) The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard`
-
-Read-Only:
-
-- `codec` (String) must be one of ["zstandard"]
-- `compression_level` (Number) Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
-- `include_checksum` (Boolean) If true, include a checksum with each data block.
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_csv_comma_separated_values`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_csv_comma_separated_values--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_csv_comma_separated_values.format_type`
-
-Read-Only:
-
-- `destination_gcs_output_format_csv_comma_separated_values_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_csv_comma_separated_values--format_type--destination_gcs_output_format_csv_comma_separated_values_compression_gzip))
-- `destination_gcs_output_format_csv_comma_separated_values_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_csv_comma_separated_values--format_type--destination_gcs_output_format_csv_comma_separated_values_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_csv_comma_separated_values.format_type.destination_gcs_output_format_csv_comma_separated_values_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_csv_comma_separated_values.format_type.destination_gcs_output_format_csv_comma_separated_values_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_json_lines_newline_delimited_json--compression))
-- `format_type` (String) must be one of ["JSONL"]
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_json_lines_newline_delimited_json.format_type`
-
-Read-Only:
-
-- `destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_json_lines_newline_delimited_json--format_type--destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_json_lines_newline_delimited_json--format_type--destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_json_lines_newline_delimited_json.format_type.destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_json_lines_newline_delimited_json.format_type.destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_parquet_columnar_storage`
-
-Read-Only:
-
-- `block_size_mb` (Number) This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]
-The compression algorithm used to compress data pages.
-- `dictionary_encoding` (Boolean) Default: true.
-- `dictionary_page_size_kb` (Number) There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
-- `format_type` (String) must be one of ["Parquet"]
-- `max_padding_size_mb` (Number) Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
-- `page_size_kb` (Number) The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro`
-
-Read-Only:
-
-- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--compression_codec))
-- `format_type` (String) must be one of ["Avro"]
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type`
-
-Read-Only:
-
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_bzip2))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_deflate))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_no_compression))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_snappy))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_xz))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_zstandard))
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_bzip2`
-
-Read-Only:
-
-- `codec` (String) must be one of ["bzip2"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_deflate`
-
-Read-Only:
-
-- `codec` (String) must be one of ["Deflate"]
-- `compression_level` (Number) 0: no compression & fastest, 9: best compression & slowest.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_no_compression`
-
-Read-Only:
-
-- `codec` (String) must be one of ["no compression"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_snappy`
-
-Read-Only:
-
-- `codec` (String) must be one of ["snappy"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_xz`
-
-Read-Only:
-
-- `codec` (String) must be one of ["xz"]
-- `compression_level` (Number) The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_zstandard`
-
-Read-Only:
-
-- `codec` (String) must be one of ["zstandard"]
-- `compression_level` (Number) Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
-- `include_checksum` (Boolean) If true, include a checksum with each data block.
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_csv_comma_separated_values`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_csv_comma_separated_values--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_csv_comma_separated_values.format_type`
-
-Read-Only:
-
-- `destination_gcs_update_output_format_csv_comma_separated_values_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_csv_comma_separated_values--format_type--destination_gcs_update_output_format_csv_comma_separated_values_compression_gzip))
-- `destination_gcs_update_output_format_csv_comma_separated_values_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_csv_comma_separated_values--format_type--destination_gcs_update_output_format_csv_comma_separated_values_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_csv_comma_separated_values.format_type.destination_gcs_update_output_format_csv_comma_separated_values_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_csv_comma_separated_values.format_type.destination_gcs_update_output_format_csv_comma_separated_values_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_json_lines_newline_delimited_json--compression))
-- `format_type` (String) must be one of ["JSONL"]
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_json_lines_newline_delimited_json.format_type`
-
-Read-Only:
-
-- `destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_json_lines_newline_delimited_json--format_type--destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_json_lines_newline_delimited_json--format_type--destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_json_lines_newline_delimited_json.format_type.destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_json_lines_newline_delimited_json.format_type.destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_parquet_columnar_storage`
-
-Read-Only:
-
-- `block_size_mb` (Number) This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]
-The compression algorithm used to compress data pages.
-- `dictionary_encoding` (Boolean) Default: true.
-- `dictionary_page_size_kb` (Number) There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
-- `format_type` (String) must be one of ["Parquet"]
-- `max_padding_size_mb` (Number) Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
-- `page_size_kb` (Number) The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
-
diff --git a/docs/data-sources/destination_google_sheets.md b/docs/data-sources/destination_google_sheets.md
index 6513438a8..ce1340761 100644
--- a/docs/data-sources/destination_google_sheets.md
+++ b/docs/data-sources/destination_google_sheets.md
@@ -27,26 +27,10 @@ data "airbyte_destination_google_sheets" "my_destination_googlesheets" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Google API Credentials for connecting to Google Sheets and Google Drive APIs (see [below for nested schema](#nestedatt--configuration--credentials))
-- `destination_type` (String) must be one of ["google-sheets"]
-- `spreadsheet_id` (String) The link to your spreadsheet. See this guide for more details.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `client_id` (String) The Client ID of your Google Sheets developer application.
-- `client_secret` (String) The Client Secret of your Google Sheets developer application.
-- `refresh_token` (String) The token for obtaining new access token.
-
diff --git a/docs/data-sources/destination_keen.md b/docs/data-sources/destination_keen.md
index c8d1b3efc..1ee3a8435 100644
--- a/docs/data-sources/destination_keen.md
+++ b/docs/data-sources/destination_keen.md
@@ -27,18 +27,10 @@ data "airbyte_destination_keen" "my_destination_keen" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) To get Keen Master API Key, navigate to the Access tab from the left-hand, side panel and check the Project Details section.
-- `destination_type` (String) must be one of ["keen"]
-- `infer_timestamp` (Boolean) Allow connector to guess keen.timestamp value based on the streamed data.
-- `project_id` (String) To get Keen Project ID, navigate to the Access tab from the left-hand, side panel and check the Project Details section.
-
diff --git a/docs/data-sources/destination_kinesis.md b/docs/data-sources/destination_kinesis.md
index 505774cb9..f1efe960f 100644
--- a/docs/data-sources/destination_kinesis.md
+++ b/docs/data-sources/destination_kinesis.md
@@ -27,21 +27,10 @@ data "airbyte_destination_kinesis" "my_destination_kinesis" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key` (String) Generate the AWS Access Key for current user.
-- `buffer_size` (Number) Buffer size for storing kinesis records before being batch streamed.
-- `destination_type` (String) must be one of ["kinesis"]
-- `endpoint` (String) AWS Kinesis endpoint.
-- `private_key` (String) The AWS Private Key - a string of numbers and letters that are unique for each account, also known as a "recovery phrase".
-- `region` (String) AWS region. Your account determines the Regions that are available to you.
-- `shard_count` (Number) Number of shards to which the data should be streamed.
-
diff --git a/docs/data-sources/destination_langchain.md b/docs/data-sources/destination_langchain.md
index d6c478c6e..464749589 100644
--- a/docs/data-sources/destination_langchain.md
+++ b/docs/data-sources/destination_langchain.md
@@ -27,145 +27,10 @@ data "airbyte_destination_langchain" "my_destination_langchain" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `destination_type` (String) must be one of ["langchain"]
-- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding))
-- `indexing` (Attributes) Indexing configuration (see [below for nested schema](#nestedatt--configuration--indexing))
-- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing))
-
-
-### Nested Schema for `configuration.embedding`
-
-Read-Only:
-
-- `destination_langchain_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_langchain_embedding_fake))
-- `destination_langchain_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_langchain_embedding_open_ai))
-- `destination_langchain_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_langchain_update_embedding_fake))
-- `destination_langchain_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_langchain_update_embedding_open_ai))
-
-
-### Nested Schema for `configuration.embedding.destination_langchain_embedding_fake`
-
-Read-Only:
-
-- `mode` (String) must be one of ["fake"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_langchain_embedding_open_ai`
-
-Read-Only:
-
-- `mode` (String) must be one of ["openai"]
-- `openai_key` (String)
-
-
-
-### Nested Schema for `configuration.embedding.destination_langchain_update_embedding_fake`
-
-Read-Only:
-
-- `mode` (String) must be one of ["fake"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_langchain_update_embedding_open_ai`
-
-Read-Only:
-
-- `mode` (String) must be one of ["openai"]
-- `openai_key` (String)
-
-
-
-
-### Nested Schema for `configuration.indexing`
-
-Read-Only:
-
-- `destination_langchain_indexing_chroma_local_persistance` (Attributes) Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_indexing_chroma_local_persistance))
-- `destination_langchain_indexing_doc_array_hnsw_search` (Attributes) DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_indexing_doc_array_hnsw_search))
-- `destination_langchain_indexing_pinecone` (Attributes) Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_indexing_pinecone))
-- `destination_langchain_update_indexing_chroma_local_persistance` (Attributes) Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_update_indexing_chroma_local_persistance))
-- `destination_langchain_update_indexing_doc_array_hnsw_search` (Attributes) DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_update_indexing_doc_array_hnsw_search))
-- `destination_langchain_update_indexing_pinecone` (Attributes) Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_update_indexing_pinecone))
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_indexing_chroma_local_persistance`
-
-Read-Only:
-
-- `collection_name` (String) Name of the collection to use.
-- `destination_path` (String) Path to the directory where chroma files will be written. The files will be placed inside that local mount.
-- `mode` (String) must be one of ["chroma_local"]
-
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_indexing_doc_array_hnsw_search`
-
-Read-Only:
-
-- `destination_path` (String) Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.
-- `mode` (String) must be one of ["DocArrayHnswSearch"]
-
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_indexing_pinecone`
-
-Read-Only:
-
-- `index` (String) Pinecone index to use
-- `mode` (String) must be one of ["pinecone"]
-- `pinecone_environment` (String) Pinecone environment to use
-- `pinecone_key` (String)
-
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_update_indexing_chroma_local_persistance`
-
-Read-Only:
-
-- `collection_name` (String) Name of the collection to use.
-- `destination_path` (String) Path to the directory where chroma files will be written. The files will be placed inside that local mount.
-- `mode` (String) must be one of ["chroma_local"]
-
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_update_indexing_doc_array_hnsw_search`
-
-Read-Only:
-
-- `destination_path` (String) Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.
-- `mode` (String) must be one of ["DocArrayHnswSearch"]
-
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_update_indexing_pinecone`
-
-Read-Only:
-
-- `index` (String) Pinecone index to use
-- `mode` (String) must be one of ["pinecone"]
-- `pinecone_environment` (String) Pinecone environment to use
-- `pinecone_key` (String)
-
-
-
-
-### Nested Schema for `configuration.processing`
-
-Read-Only:
-
-- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context
-- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
-- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. All other fields are passed along as meta fields. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
-
diff --git a/docs/data-sources/destination_milvus.md b/docs/data-sources/destination_milvus.md
index 515a84e8e..ce46b12e2 100644
--- a/docs/data-sources/destination_milvus.md
+++ b/docs/data-sources/destination_milvus.md
@@ -27,195 +27,10 @@ data "airbyte_destination_milvus" "my_destination_milvus" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `destination_type` (String) must be one of ["milvus"]
-- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding))
-- `indexing` (Attributes) Indexing configuration (see [below for nested schema](#nestedatt--configuration--indexing))
-- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing))
-
-
-### Nested Schema for `configuration.embedding`
-
-Read-Only:
-
-- `destination_milvus_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_cohere))
-- `destination_milvus_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_fake))
-- `destination_milvus_embedding_from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_from_field))
-- `destination_milvus_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_open_ai))
-- `destination_milvus_update_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_cohere))
-- `destination_milvus_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_fake))
-- `destination_milvus_update_embedding_from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_from_field))
-- `destination_milvus_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_open_ai))
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_embedding_cohere`
-
-Read-Only:
-
-- `cohere_key` (String)
-- `mode` (String) must be one of ["cohere"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_embedding_fake`
-
-Read-Only:
-
-- `mode` (String) must be one of ["fake"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_embedding_from_field`
-
-Read-Only:
-
-- `dimensions` (Number) The number of dimensions the embedding model is generating
-- `field_name` (String) Name of the field in the record that contains the embedding
-- `mode` (String) must be one of ["from_field"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_embedding_open_ai`
-
-Read-Only:
-
-- `mode` (String) must be one of ["openai"]
-- `openai_key` (String)
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_cohere`
-
-Read-Only:
-
-- `cohere_key` (String)
-- `mode` (String) must be one of ["cohere"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_fake`
-
-Read-Only:
-
-- `mode` (String) must be one of ["fake"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_from_field`
-
-Read-Only:
-
-- `dimensions` (Number) The number of dimensions the embedding model is generating
-- `field_name` (String) Name of the field in the record that contains the embedding
-- `mode` (String) must be one of ["from_field"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_open_ai`
-
-Read-Only:
-
-- `mode` (String) must be one of ["openai"]
-- `openai_key` (String)
-
-
-
-
-### Nested Schema for `configuration.indexing`
-
-Read-Only:
-
-- `auth` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--indexing--auth))
-- `collection` (String) The collection to load data into
-- `db` (String) The database to connect to
-- `host` (String) The public endpoint of the Milvus instance.
-- `text_field` (String) The field in the entity that contains the embedded text
-- `vector_field` (String) The field in the entity that contains the vector
-
-
-### Nested Schema for `configuration.indexing.auth`
-
-Read-Only:
-
-- `destination_milvus_indexing_authentication_api_token` (Attributes) Authenticate using an API token (suitable for Zilliz Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_api_token))
-- `destination_milvus_indexing_authentication_no_auth` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_no_auth))
-- `destination_milvus_indexing_authentication_username_password` (Attributes) Authenticate using username and password (suitable for self-managed Milvus clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_username_password))
-- `destination_milvus_update_indexing_authentication_api_token` (Attributes) Authenticate using an API token (suitable for Zilliz Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_api_token))
-- `destination_milvus_update_indexing_authentication_no_auth` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_no_auth))
-- `destination_milvus_update_indexing_authentication_username_password` (Attributes) Authenticate using username and password (suitable for self-managed Milvus clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_username_password))
-
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
-
-Read-Only:
-
-- `mode` (String) must be one of ["token"]
-- `token` (String) API Token for the Milvus instance
-
-
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
-
-Read-Only:
-
-- `mode` (String) must be one of ["no_auth"]
-
-
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
-
-Read-Only:
-
-- `mode` (String) must be one of ["username_password"]
-- `password` (String) Password for the Milvus instance
-- `username` (String) Username for the Milvus instance
-
-
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
-
-Read-Only:
-
-- `mode` (String) must be one of ["token"]
-- `token` (String) API Token for the Milvus instance
-
-
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
-
-Read-Only:
-
-- `mode` (String) must be one of ["no_auth"]
-
-
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
-
-Read-Only:
-
-- `mode` (String) must be one of ["username_password"]
-- `password` (String) Password for the Milvus instance
-- `username` (String) Username for the Milvus instance
-
-
-
-
-
-### Nested Schema for `configuration.processing`
-
-Read-Only:
-
-- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context
-- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
-- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
-- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
-
diff --git a/docs/data-sources/destination_mongodb.md b/docs/data-sources/destination_mongodb.md
index fcb6ba121..40f2b6dd2 100644
--- a/docs/data-sources/destination_mongodb.md
+++ b/docs/data-sources/destination_mongodb.md
@@ -27,218 +27,10 @@ data "airbyte_destination_mongodb" "my_destination_mongodb" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `auth_type` (Attributes) Authorization type. (see [below for nested schema](#nestedatt--configuration--auth_type))
-- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["mongodb"]
-- `instance_type` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type))
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-
-
-### Nested Schema for `configuration.auth_type`
-
-Read-Only:
-
-- `destination_mongodb_authorization_type_login_password` (Attributes) Login/Password. (see [below for nested schema](#nestedatt--configuration--auth_type--destination_mongodb_authorization_type_login_password))
-- `destination_mongodb_authorization_type_none` (Attributes) None. (see [below for nested schema](#nestedatt--configuration--auth_type--destination_mongodb_authorization_type_none))
-- `destination_mongodb_update_authorization_type_login_password` (Attributes) Login/Password. (see [below for nested schema](#nestedatt--configuration--auth_type--destination_mongodb_update_authorization_type_login_password))
-- `destination_mongodb_update_authorization_type_none` (Attributes) None. (see [below for nested schema](#nestedatt--configuration--auth_type--destination_mongodb_update_authorization_type_none))
-
-
-### Nested Schema for `configuration.auth_type.destination_mongodb_authorization_type_login_password`
-
-Read-Only:
-
-- `authorization` (String) must be one of ["login/password"]
-- `password` (String) Password associated with the username.
-- `username` (String) Username to use to access the database.
-
-
-
-### Nested Schema for `configuration.auth_type.destination_mongodb_authorization_type_none`
-
-Read-Only:
-
-- `authorization` (String) must be one of ["none"]
-
-
-
-### Nested Schema for `configuration.auth_type.destination_mongodb_update_authorization_type_login_password`
-
-Read-Only:
-
-- `authorization` (String) must be one of ["login/password"]
-- `password` (String) Password associated with the username.
-- `username` (String) Username to use to access the database.
-
-
-
-### Nested Schema for `configuration.auth_type.destination_mongodb_update_authorization_type_none`
-
-Read-Only:
-
-- `authorization` (String) must be one of ["none"]
-
-
-
-
-### Nested Schema for `configuration.instance_type`
-
-Read-Only:
-
-- `destination_mongodb_mongo_db_instance_type_mongo_db_atlas` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_mongo_db_instance_type_mongo_db_atlas))
-- `destination_mongodb_mongo_db_instance_type_replica_set` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_mongo_db_instance_type_replica_set))
-- `destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance))
-- `destination_mongodb_update_mongo_db_instance_type_mongo_db_atlas` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_update_mongo_db_instance_type_mongo_db_atlas))
-- `destination_mongodb_update_mongo_db_instance_type_replica_set` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_update_mongo_db_instance_type_replica_set))
-- `destination_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance))
-
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_mongo_db_instance_type_mongo_db_atlas`
-
-Read-Only:
-
-- `cluster_url` (String) URL of a cluster to connect to.
-- `instance` (String) must be one of ["atlas"]
-
-
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_mongo_db_instance_type_replica_set`
-
-Read-Only:
-
-- `instance` (String) must be one of ["replica"]
-- `replica_set` (String) A replica set name.
-- `server_addresses` (String) The members of a replica set. Please specify `host`:`port` of each member seperated by comma.
-
-
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance`
-
-Read-Only:
-
-- `host` (String) The Host of a Mongo database to be replicated.
-- `instance` (String) must be one of ["standalone"]
-- `port` (Number) The Port of a Mongo database to be replicated.
-
-
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_update_mongo_db_instance_type_mongo_db_atlas`
-
-Read-Only:
-
-- `cluster_url` (String) URL of a cluster to connect to.
-- `instance` (String) must be one of ["atlas"]
-
-
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_update_mongo_db_instance_type_replica_set`
-
-Read-Only:
-
-- `instance` (String) must be one of ["replica"]
-- `replica_set` (String) A replica set name.
-- `server_addresses` (String) The members of a replica set. Please specify `host`:`port` of each member seperated by comma.
-
-
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance`
-
-Read-Only:
-
-- `host` (String) The Host of a Mongo database to be replicated.
-- `instance` (String) must be one of ["standalone"]
-- `port` (Number) The Port of a Mongo database to be replicated.
-
-
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `destination_mongodb_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_ssh_tunnel_method_no_tunnel))
-- `destination_mongodb_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_ssh_tunnel_method_password_authentication))
-- `destination_mongodb_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_ssh_tunnel_method_ssh_key_authentication))
-- `destination_mongodb_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_update_ssh_tunnel_method_no_tunnel))
-- `destination_mongodb_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_update_ssh_tunnel_method_password_authentication))
-- `destination_mongodb_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/destination_mssql.md b/docs/data-sources/destination_mssql.md
index 35c20caac..fa77c0edf 100644
--- a/docs/data-sources/destination_mssql.md
+++ b/docs/data-sources/destination_mssql.md
@@ -27,150 +27,10 @@ data "airbyte_destination_mssql" "my_destination_mssql" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) The name of the MSSQL database.
-- `destination_type` (String) must be one of ["mssql"]
-- `host` (String) The host name of the MSSQL database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) The password associated with this username.
-- `port` (Number) The port of the MSSQL database.
-- `schema` (String) The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
-- `ssl_method` (Attributes) The encryption method which is used to communicate with the database. (see [below for nested schema](#nestedatt--configuration--ssl_method))
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) The username which is used to access the database.
-
-
-### Nested Schema for `configuration.ssl_method`
-
-Read-Only:
-
-- `destination_mssql_ssl_method_encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--destination_mssql_ssl_method_encrypted_trust_server_certificate))
-- `destination_mssql_ssl_method_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--destination_mssql_ssl_method_encrypted_verify_certificate))
-- `destination_mssql_update_ssl_method_encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--destination_mssql_update_ssl_method_encrypted_trust_server_certificate))
-- `destination_mssql_update_ssl_method_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--destination_mssql_update_ssl_method_encrypted_verify_certificate))
-
-
-### Nested Schema for `configuration.ssl_method.destination_mssql_ssl_method_encrypted_trust_server_certificate`
-
-Read-Only:
-
-- `ssl_method` (String) must be one of ["encrypted_trust_server_certificate"]
-
-
-
-### Nested Schema for `configuration.ssl_method.destination_mssql_ssl_method_encrypted_verify_certificate`
-
-Read-Only:
-
-- `host_name_in_certificate` (String) Specifies the host name of the server. The value of this property must match the subject property of the certificate.
-- `ssl_method` (String) must be one of ["encrypted_verify_certificate"]
-
-
-
-### Nested Schema for `configuration.ssl_method.destination_mssql_update_ssl_method_encrypted_trust_server_certificate`
-
-Read-Only:
-
-- `ssl_method` (String) must be one of ["encrypted_trust_server_certificate"]
-
-
-
-### Nested Schema for `configuration.ssl_method.destination_mssql_update_ssl_method_encrypted_verify_certificate`
-
-Read-Only:
-
-- `host_name_in_certificate` (String) Specifies the host name of the server. The value of this property must match the subject property of the certificate.
-- `ssl_method` (String) must be one of ["encrypted_verify_certificate"]
-
-
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `destination_mssql_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_ssh_tunnel_method_no_tunnel))
-- `destination_mssql_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_ssh_tunnel_method_password_authentication))
-- `destination_mssql_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_ssh_tunnel_method_ssh_key_authentication))
-- `destination_mssql_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_update_ssh_tunnel_method_no_tunnel))
-- `destination_mssql_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_update_ssh_tunnel_method_password_authentication))
-- `destination_mssql_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/destination_mysql.md b/docs/data-sources/destination_mysql.md
index 01493670c..f1c051b1c 100644
--- a/docs/data-sources/destination_mysql.md
+++ b/docs/data-sources/destination_mysql.md
@@ -27,103 +27,10 @@ data "airbyte_destination_mysql" "my_destination_mysql" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["mysql"]
-- `host` (String) Hostname of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) Username to use to access the database.
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `destination_mysql_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_ssh_tunnel_method_no_tunnel))
-- `destination_mysql_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_ssh_tunnel_method_password_authentication))
-- `destination_mysql_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_ssh_tunnel_method_ssh_key_authentication))
-- `destination_mysql_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_update_ssh_tunnel_method_no_tunnel))
-- `destination_mysql_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_update_ssh_tunnel_method_password_authentication))
-- `destination_mysql_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/destination_oracle.md b/docs/data-sources/destination_oracle.md
index b7287088b..d439323af 100644
--- a/docs/data-sources/destination_oracle.md
+++ b/docs/data-sources/destination_oracle.md
@@ -27,104 +27,10 @@ data "airbyte_destination_oracle" "my_destination_oracle" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `destination_type` (String) must be one of ["oracle"]
-- `host` (String) The hostname of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) The password associated with the username.
-- `port` (Number) The port of the database.
-- `schema` (String) The default schema is used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. The usual value for this field is "airbyte". In Oracle, schemas and users are the same thing, so the "user" parameter is used as the login credentials and this is used for the default Airbyte message schema.
-- `sid` (String) The System Identifier uniquely distinguishes the instance from any other instance on the same computer.
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) The username to access the database. This user must have CREATE USER privileges in the database.
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `destination_oracle_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_ssh_tunnel_method_no_tunnel))
-- `destination_oracle_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_ssh_tunnel_method_password_authentication))
-- `destination_oracle_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_ssh_tunnel_method_ssh_key_authentication))
-- `destination_oracle_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_update_ssh_tunnel_method_no_tunnel))
-- `destination_oracle_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_update_ssh_tunnel_method_password_authentication))
-- `destination_oracle_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/destination_pinecone.md b/docs/data-sources/destination_pinecone.md
index a8cbc6933..6bdd19ec0 100644
--- a/docs/data-sources/destination_pinecone.md
+++ b/docs/data-sources/destination_pinecone.md
@@ -27,103 +27,10 @@ data "airbyte_destination_pinecone" "my_destination_pinecone" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `destination_type` (String) must be one of ["pinecone"]
-- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding))
-- `indexing` (Attributes) Pinecone is a popular vector store that can be used to store and retrieve embeddings. (see [below for nested schema](#nestedatt--configuration--indexing))
-- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing))
-
-
-### Nested Schema for `configuration.embedding`
-
-Read-Only:
-
-- `destination_pinecone_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_cohere))
-- `destination_pinecone_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_fake))
-- `destination_pinecone_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_open_ai))
-- `destination_pinecone_update_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_cohere))
-- `destination_pinecone_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_fake))
-- `destination_pinecone_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_open_ai))
-
-
-### Nested Schema for `configuration.embedding.destination_pinecone_embedding_cohere`
-
-Read-Only:
-
-- `cohere_key` (String)
-- `mode` (String) must be one of ["cohere"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_pinecone_embedding_fake`
-
-Read-Only:
-
-- `mode` (String) must be one of ["fake"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_pinecone_embedding_open_ai`
-
-Read-Only:
-
-- `mode` (String) must be one of ["openai"]
-- `openai_key` (String)
-
-
-
-### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_cohere`
-
-Read-Only:
-
-- `cohere_key` (String)
-- `mode` (String) must be one of ["cohere"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_fake`
-
-Read-Only:
-
-- `mode` (String) must be one of ["fake"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_open_ai`
-
-Read-Only:
-
-- `mode` (String) must be one of ["openai"]
-- `openai_key` (String)
-
-
-
-
-### Nested Schema for `configuration.indexing`
-
-Read-Only:
-
-- `index` (String) Pinecone index to use
-- `pinecone_environment` (String) Pinecone environment to use
-- `pinecone_key` (String)
-
-
-
-### Nested Schema for `configuration.processing`
-
-Read-Only:
-
-- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context
-- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
-- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
-- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
-
diff --git a/docs/data-sources/destination_postgres.md b/docs/data-sources/destination_postgres.md
index c61ff7598..4343cf009 100644
--- a/docs/data-sources/destination_postgres.md
+++ b/docs/data-sources/destination_postgres.md
@@ -27,239 +27,10 @@ data "airbyte_destination_postgres" "my_destination_postgres" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["postgres"]
-- `host` (String) Hostname of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `schema` (String) The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
-- `ssl_mode` (Attributes) SSL connection modes.
- disable - Chose this mode to disable encryption of communication between Airbyte and destination database
- allow - Chose this mode to enable encryption only when required by the source database
- prefer - Chose this mode to allow unencrypted connection only if the source database does not support encryption
- require - Chose this mode to always require encryption. If the source database server does not support encryption, connection will fail
- verify-ca - Chose this mode to always require encryption and to verify that the source database server has a valid SSL certificate
- verify-full - This is the most secure mode. Chose this mode to always require encryption and to verify the identity of the source database server
- See more information - in the docs. (see [below for nested schema](#nestedatt--configuration--ssl_mode))
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) Username to use to access the database.
-
-
-### Nested Schema for `configuration.ssl_mode`
-
-Read-Only:
-
-- `destination_postgres_ssl_modes_allow` (Attributes) Allow SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_allow))
-- `destination_postgres_ssl_modes_disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_disable))
-- `destination_postgres_ssl_modes_prefer` (Attributes) Prefer SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_prefer))
-- `destination_postgres_ssl_modes_require` (Attributes) Require SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_require))
-- `destination_postgres_ssl_modes_verify_ca` (Attributes) Verify-ca SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_verify_ca))
-- `destination_postgres_ssl_modes_verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_verify_full))
-- `destination_postgres_update_ssl_modes_allow` (Attributes) Allow SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_allow))
-- `destination_postgres_update_ssl_modes_disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_disable))
-- `destination_postgres_update_ssl_modes_prefer` (Attributes) Prefer SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_prefer))
-- `destination_postgres_update_ssl_modes_require` (Attributes) Require SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_require))
-- `destination_postgres_update_ssl_modes_verify_ca` (Attributes) Verify-ca SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_verify_ca))
-- `destination_postgres_update_ssl_modes_verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_verify_full))
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_allow`
-
-Read-Only:
-
-- `mode` (String) must be one of ["allow"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_disable`
-
-Read-Only:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_prefer`
-
-Read-Only:
-
-- `mode` (String) must be one of ["prefer"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_require`
-
-Read-Only:
-
-- `mode` (String) must be one of ["require"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_verify_ca`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-ca"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_verify_full`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-full"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_allow`
-
-Read-Only:
-
-- `mode` (String) must be one of ["allow"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_disable`
-
-Read-Only:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_prefer`
-
-Read-Only:
-
-- `mode` (String) must be one of ["prefer"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_require`
-
-Read-Only:
-
-- `mode` (String) must be one of ["require"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_verify_ca`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-ca"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_verify_full`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-full"]
-
-
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `destination_postgres_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_ssh_tunnel_method_no_tunnel))
-- `destination_postgres_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_ssh_tunnel_method_password_authentication))
-- `destination_postgres_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_ssh_tunnel_method_ssh_key_authentication))
-- `destination_postgres_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_update_ssh_tunnel_method_no_tunnel))
-- `destination_postgres_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_update_ssh_tunnel_method_password_authentication))
-- `destination_postgres_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/destination_pubsub.md b/docs/data-sources/destination_pubsub.md
index 251e0d5d8..77918b67f 100644
--- a/docs/data-sources/destination_pubsub.md
+++ b/docs/data-sources/destination_pubsub.md
@@ -27,23 +27,10 @@ data "airbyte_destination_pubsub" "my_destination_pubsub" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `batching_delay_threshold` (Number) Number of ms before the buffer is flushed
-- `batching_element_count_threshold` (Number) Number of messages before the buffer is flushed
-- `batching_enabled` (Boolean) If TRUE messages will be buffered instead of sending them one by one
-- `batching_request_bytes_threshold` (Number) Number of bytes before the buffer is flushed
-- `credentials_json` (String) The contents of the JSON service account key. Check out the docs if you need help generating this key.
-- `destination_type` (String) must be one of ["pubsub"]
-- `ordering_enabled` (Boolean) If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key of stream
-- `project_id` (String) The GCP project ID for the project containing the target PubSub.
-- `topic_id` (String) The PubSub topic ID in the given GCP project ID.
-
diff --git a/docs/data-sources/destination_qdrant.md b/docs/data-sources/destination_qdrant.md
new file mode 100644
index 000000000..eedc79bd1
--- /dev/null
+++ b/docs/data-sources/destination_qdrant.md
@@ -0,0 +1,36 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_destination_qdrant Data Source - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ DestinationQdrant DataSource
+---
+
+# airbyte_destination_qdrant (Data Source)
+
+DestinationQdrant DataSource
+
+## Example Usage
+
+```terraform
+data "airbyte_destination_qdrant" "my_destination_qdrant" {
+ destination_id = "...my_destination_id..."
+}
+```
+
+
+## Schema
+
+### Required
+
+- `destination_id` (String)
+
+### Read-Only
+
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
+- `name` (String)
+- `workspace_id` (String)
+
+
diff --git a/docs/data-sources/destination_redis.md b/docs/data-sources/destination_redis.md
index 8dce9b1fa..e7dc7a46c 100644
--- a/docs/data-sources/destination_redis.md
+++ b/docs/data-sources/destination_redis.md
@@ -27,157 +27,10 @@ data "airbyte_destination_redis" "my_destination_redis" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `cache_type` (String) must be one of ["hash"]
-Redis cache type to store data in.
-- `destination_type` (String) must be one of ["redis"]
-- `host` (String) Redis host to connect to.
-- `password` (String) Password associated with Redis.
-- `port` (Number) Port of Redis.
-- `ssl` (Boolean) Indicates whether SSL encryption protocol will be used to connect to Redis. It is recommended to use SSL connection if possible.
-- `ssl_mode` (Attributes) SSL connection modes.
-
verify-full - This is the most secure mode. Always require encryption and verifies the identity of the source database server (see [below for nested schema](#nestedatt--configuration--ssl_mode))
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) Username associated with Redis.
-
-
-### Nested Schema for `configuration.ssl_mode`
-
-Read-Only:
-
-- `destination_redis_ssl_modes_disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_redis_ssl_modes_disable))
-- `destination_redis_ssl_modes_verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_redis_ssl_modes_verify_full))
-- `destination_redis_update_ssl_modes_disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_redis_update_ssl_modes_disable))
-- `destination_redis_update_ssl_modes_verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_redis_update_ssl_modes_verify_full))
-
-
-### Nested Schema for `configuration.ssl_mode.destination_redis_ssl_modes_disable`
-
-Read-Only:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_redis_ssl_modes_verify_full`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-full"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_redis_update_ssl_modes_disable`
-
-Read-Only:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_redis_update_ssl_modes_verify_full`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-full"]
-
-
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `destination_redis_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_ssh_tunnel_method_no_tunnel))
-- `destination_redis_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_ssh_tunnel_method_password_authentication))
-- `destination_redis_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_ssh_tunnel_method_ssh_key_authentication))
-- `destination_redis_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_update_ssh_tunnel_method_no_tunnel))
-- `destination_redis_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_update_ssh_tunnel_method_password_authentication))
-- `destination_redis_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/destination_redshift.md b/docs/data-sources/destination_redshift.md
index b0775d232..11b98f6c8 100644
--- a/docs/data-sources/destination_redshift.md
+++ b/docs/data-sources/destination_redshift.md
@@ -27,220 +27,10 @@ data "airbyte_destination_redshift" "my_destination_redshift" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["redshift"]
-- `host` (String) Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `schema` (String) The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public".
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `uploading_method` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method))
-- `username` (String) Username to use to access the database.
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `destination_redshift_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_ssh_tunnel_method_no_tunnel))
-- `destination_redshift_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_ssh_tunnel_method_password_authentication))
-- `destination_redshift_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_ssh_tunnel_method_ssh_key_authentication))
-- `destination_redshift_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_update_ssh_tunnel_method_no_tunnel))
-- `destination_redshift_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_update_ssh_tunnel_method_password_authentication))
-- `destination_redshift_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-
-### Nested Schema for `configuration.uploading_method`
-
-Read-Only:
-
-- `destination_redshift_update_uploading_method_s3_staging` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_s3_staging))
-- `destination_redshift_update_uploading_method_standard` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_standard))
-- `destination_redshift_uploading_method_s3_staging` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_s3_staging))
-- `destination_redshift_uploading_method_standard` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_standard))
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_s3_staging`
-
-Read-Only:
-
-- `access_key_id` (String) This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.
-- `encryption` (Attributes) How to encrypt the staging data (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_s3_staging--encryption))
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `method` (String) must be one of ["S3 Staging"]
-- `purge_staging_data` (Boolean) Whether to delete the staging files from S3 after completing the sync. See docs for details.
-- `s3_bucket_name` (String) The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.
-- `s3_bucket_path` (String) The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]
-The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.
-- `secret_access_key` (String) The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_s3_staging.secret_access_key`
-
-Read-Only:
-
-- `destination_redshift_update_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption` (Attributes) Staging data will be encrypted using AES-CBC envelope encryption. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_s3_staging--secret_access_key--destination_redshift_update_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption))
-- `destination_redshift_update_uploading_method_s3_staging_encryption_no_encryption` (Attributes) Staging data will be stored in plaintext. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_s3_staging--secret_access_key--destination_redshift_update_uploading_method_s3_staging_encryption_no_encryption))
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_s3_staging.secret_access_key.destination_redshift_update_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption`
-
-Read-Only:
-
-- `encryption_type` (String) must be one of ["aes_cbc_envelope"]
-- `key_encrypting_key` (String) The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.
-
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_s3_staging.secret_access_key.destination_redshift_update_uploading_method_s3_staging_encryption_no_encryption`
-
-Read-Only:
-
-- `encryption_type` (String) must be one of ["none"]
-
-
-
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_standard`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_s3_staging`
-
-Read-Only:
-
-- `access_key_id` (String) This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.
-- `encryption` (Attributes) How to encrypt the staging data (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_s3_staging--encryption))
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `method` (String) must be one of ["S3 Staging"]
-- `purge_staging_data` (Boolean) Whether to delete the staging files from S3 after completing the sync. See docs for details.
-- `s3_bucket_name` (String) The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.
-- `s3_bucket_path` (String) The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]
-The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.
-- `secret_access_key` (String) The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_s3_staging.secret_access_key`
-
-Read-Only:
-
-- `destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption` (Attributes) Staging data will be encrypted using AES-CBC envelope encryption. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_s3_staging--secret_access_key--destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption))
-- `destination_redshift_uploading_method_s3_staging_encryption_no_encryption` (Attributes) Staging data will be stored in plaintext. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_s3_staging--secret_access_key--destination_redshift_uploading_method_s3_staging_encryption_no_encryption))
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_s3_staging.secret_access_key.destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption`
-
-Read-Only:
-
-- `encryption_type` (String) must be one of ["aes_cbc_envelope"]
-- `key_encrypting_key` (String) The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.
-
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_s3_staging.secret_access_key.destination_redshift_uploading_method_s3_staging_encryption_no_encryption`
-
-Read-Only:
-
-- `encryption_type` (String) must be one of ["none"]
-
-
-
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_standard`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
diff --git a/docs/data-sources/destination_s3.md b/docs/data-sources/destination_s3.md
index 16f8d2284..929bed92f 100644
--- a/docs/data-sources/destination_s3.md
+++ b/docs/data-sources/destination_s3.md
@@ -27,360 +27,10 @@ data "airbyte_destination_s3" "my_destination_s3" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key_id` (String) The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.
-- `destination_type` (String) must be one of ["s3"]
-- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `format` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format))
-- `s3_bucket_name` (String) The name of the S3 bucket. Read more here.
-- `s3_bucket_path` (String) Directory under the S3 bucket where data will be written. Read more here
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the S3 bucket. See here for all region codes.
-- `s3_endpoint` (String) Your S3 endpoint url. Read more here
-- `s3_path_format` (String) Format string on how data will be organized inside the S3 bucket directory. Read more here
-- `secret_access_key` (String) The corresponding secret to the access key ID. Read more here
-
-
-### Nested Schema for `configuration.format`
-
-Read-Only:
-
-- `destination_s3_output_format_avro_apache_avro` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro))
-- `destination_s3_output_format_csv_comma_separated_values` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_csv_comma_separated_values))
-- `destination_s3_output_format_json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_json_lines_newline_delimited_json))
-- `destination_s3_output_format_parquet_columnar_storage` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_parquet_columnar_storage))
-- `destination_s3_update_output_format_avro_apache_avro` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro))
-- `destination_s3_update_output_format_csv_comma_separated_values` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_csv_comma_separated_values))
-- `destination_s3_update_output_format_json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_json_lines_newline_delimited_json))
-- `destination_s3_update_output_format_parquet_columnar_storage` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_parquet_columnar_storage))
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro`
-
-Read-Only:
-
-- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--compression_codec))
-- `format_type` (String) must be one of ["Avro"]
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type`
-
-Read-Only:
-
-- `destination_s3_output_format_avro_apache_avro_compression_codec_bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_bzip2))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_deflate))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_no_compression))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_snappy))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_xz))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_zstandard))
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_bzip2`
-
-Read-Only:
-
-- `codec` (String) must be one of ["bzip2"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_deflate`
-
-Read-Only:
-
-- `codec` (String) must be one of ["Deflate"]
-- `compression_level` (Number) 0: no compression & fastest, 9: best compression & slowest.
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_no_compression`
-
-Read-Only:
-
-- `codec` (String) must be one of ["no compression"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_snappy`
-
-Read-Only:
-
-- `codec` (String) must be one of ["snappy"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_xz`
-
-Read-Only:
-
-- `codec` (String) must be one of ["xz"]
-- `compression_level` (Number) See here for details.
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_zstandard`
-
-Read-Only:
-
-- `codec` (String) must be one of ["zstandard"]
-- `compression_level` (Number) Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
-- `include_checksum` (Boolean) If true, include a checksum with each data block.
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_csv_comma_separated_values`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_csv_comma_separated_values--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_csv_comma_separated_values.format_type`
-
-Read-Only:
-
-- `destination_s3_output_format_csv_comma_separated_values_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_csv_comma_separated_values--format_type--destination_s3_output_format_csv_comma_separated_values_compression_gzip))
-- `destination_s3_output_format_csv_comma_separated_values_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_csv_comma_separated_values--format_type--destination_s3_output_format_csv_comma_separated_values_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_csv_comma_separated_values.format_type.destination_s3_output_format_csv_comma_separated_values_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_csv_comma_separated_values.format_type.destination_s3_output_format_csv_comma_separated_values_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_json_lines_newline_delimited_json--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-- `format_type` (String) must be one of ["JSONL"]
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_json_lines_newline_delimited_json.format_type`
-
-Read-Only:
-
-- `destination_s3_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_json_lines_newline_delimited_json--format_type--destination_s3_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_s3_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_json_lines_newline_delimited_json--format_type--destination_s3_output_format_json_lines_newline_delimited_json_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_json_lines_newline_delimited_json.format_type.destination_s3_output_format_json_lines_newline_delimited_json_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_json_lines_newline_delimited_json.format_type.destination_s3_output_format_json_lines_newline_delimited_json_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_parquet_columnar_storage`
-
-Read-Only:
-
-- `block_size_mb` (Number) This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]
-The compression algorithm used to compress data pages.
-- `dictionary_encoding` (Boolean) Default: true.
-- `dictionary_page_size_kb` (Number) There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
-- `format_type` (String) must be one of ["Parquet"]
-- `max_padding_size_mb` (Number) Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
-- `page_size_kb` (Number) The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro`
-
-Read-Only:
-
-- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--compression_codec))
-- `format_type` (String) must be one of ["Avro"]
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type`
-
-Read-Only:
-
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_bzip2))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_deflate))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_no_compression))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_snappy))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_xz))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_zstandard))
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_bzip2`
-
-Read-Only:
-
-- `codec` (String) must be one of ["bzip2"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_deflate`
-
-Read-Only:
-
-- `codec` (String) must be one of ["Deflate"]
-- `compression_level` (Number) 0: no compression & fastest, 9: best compression & slowest.
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_no_compression`
-
-Read-Only:
-
-- `codec` (String) must be one of ["no compression"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_snappy`
-
-Read-Only:
-
-- `codec` (String) must be one of ["snappy"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_xz`
-
-Read-Only:
-
-- `codec` (String) must be one of ["xz"]
-- `compression_level` (Number) See here for details.
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_zstandard`
-
-Read-Only:
-
-- `codec` (String) must be one of ["zstandard"]
-- `compression_level` (Number) Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
-- `include_checksum` (Boolean) If true, include a checksum with each data block.
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_csv_comma_separated_values`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_csv_comma_separated_values--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_csv_comma_separated_values.format_type`
-
-Read-Only:
-
-- `destination_s3_update_output_format_csv_comma_separated_values_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_csv_comma_separated_values--format_type--destination_s3_update_output_format_csv_comma_separated_values_compression_gzip))
-- `destination_s3_update_output_format_csv_comma_separated_values_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_csv_comma_separated_values--format_type--destination_s3_update_output_format_csv_comma_separated_values_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_csv_comma_separated_values.format_type.destination_s3_update_output_format_csv_comma_separated_values_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_csv_comma_separated_values.format_type.destination_s3_update_output_format_csv_comma_separated_values_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_json_lines_newline_delimited_json--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-- `format_type` (String) must be one of ["JSONL"]
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_json_lines_newline_delimited_json.format_type`
-
-Read-Only:
-
-- `destination_s3_update_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_json_lines_newline_delimited_json--format_type--destination_s3_update_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_s3_update_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_json_lines_newline_delimited_json--format_type--destination_s3_update_output_format_json_lines_newline_delimited_json_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_json_lines_newline_delimited_json.format_type.destination_s3_update_output_format_json_lines_newline_delimited_json_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_json_lines_newline_delimited_json.format_type.destination_s3_update_output_format_json_lines_newline_delimited_json_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_parquet_columnar_storage`
-
-Read-Only:
-
-- `block_size_mb` (Number) This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]
-The compression algorithm used to compress data pages.
-- `dictionary_encoding` (Boolean) Default: true.
-- `dictionary_page_size_kb` (Number) There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
-- `format_type` (String) must be one of ["Parquet"]
-- `max_padding_size_mb` (Number) Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
-- `page_size_kb` (Number) The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
-
diff --git a/docs/data-sources/destination_s3_glue.md b/docs/data-sources/destination_s3_glue.md
index a9268a9b1..f92b03ce8 100644
--- a/docs/data-sources/destination_s3_glue.md
+++ b/docs/data-sources/destination_s3_glue.md
@@ -27,105 +27,10 @@ data "airbyte_destination_s3_glue" "my_destination_s3glue" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key_id` (String) The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.
-- `destination_type` (String) must be one of ["s3-glue"]
-- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `format` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format))
-- `glue_database` (String) Name of the glue database for creating the tables, leave blank if no integration
-- `glue_serialization_library` (String) must be one of ["org.openx.data.jsonserde.JsonSerDe", "org.apache.hive.hcatalog.data.JsonSerDe"]
-The library that your query engine will use for reading and writing data in your lake.
-- `s3_bucket_name` (String) The name of the S3 bucket. Read more here.
-- `s3_bucket_path` (String) Directory under the S3 bucket where data will be written. Read more here
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the S3 bucket. See here for all region codes.
-- `s3_endpoint` (String) Your S3 endpoint url. Read more here
-- `s3_path_format` (String) Format string on how data will be organized inside the S3 bucket directory. Read more here
-- `secret_access_key` (String) The corresponding secret to the access key ID. Read more here
-
-
-### Nested Schema for `configuration.format`
-
-Read-Only:
-
-- `destination_s3_glue_output_format_json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_output_format_json_lines_newline_delimited_json))
-- `destination_s3_glue_update_output_format_json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_update_output_format_json_lines_newline_delimited_json))
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_output_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_output_format_json_lines_newline_delimited_json--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-- `format_type` (String) must be one of ["JSONL"]
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_output_format_json_lines_newline_delimited_json.format_type`
-
-Read-Only:
-
-- `destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_output_format_json_lines_newline_delimited_json--format_type--destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_output_format_json_lines_newline_delimited_json--format_type--destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_output_format_json_lines_newline_delimited_json.format_type.destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_output_format_json_lines_newline_delimited_json.format_type.destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_update_output_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_update_output_format_json_lines_newline_delimited_json--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-- `format_type` (String) must be one of ["JSONL"]
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_update_output_format_json_lines_newline_delimited_json.format_type`
-
-Read-Only:
-
-- `destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_update_output_format_json_lines_newline_delimited_json--format_type--destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_update_output_format_json_lines_newline_delimited_json--format_type--destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_update_output_format_json_lines_newline_delimited_json.format_type.destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_gzip`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_update_output_format_json_lines_newline_delimited_json.format_type.destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_no_compression`
-
-Read-Only:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
diff --git a/docs/data-sources/destination_sftp_json.md b/docs/data-sources/destination_sftp_json.md
index 8cf448197..385d13431 100644
--- a/docs/data-sources/destination_sftp_json.md
+++ b/docs/data-sources/destination_sftp_json.md
@@ -27,20 +27,10 @@ data "airbyte_destination_sftp_json" "my_destination_sftpjson" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `destination_path` (String) Path to the directory where json files will be written.
-- `destination_type` (String) must be one of ["sftp-json"]
-- `host` (String) Hostname of the SFTP server.
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the SFTP server.
-- `username` (String) Username to use to access the SFTP server.
-
diff --git a/docs/data-sources/destination_snowflake.md b/docs/data-sources/destination_snowflake.md
index b68969f18..571c6ec7b 100644
--- a/docs/data-sources/destination_snowflake.md
+++ b/docs/data-sources/destination_snowflake.md
@@ -27,97 +27,10 @@ data "airbyte_destination_snowflake" "my_destination_snowflake" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `database` (String) Enter the name of the database you want to sync data into
-- `destination_type` (String) must be one of ["snowflake"]
-- `host` (String) Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)
-- `jdbc_url_params` (String) Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
-- `raw_data_schema` (String) The schema to write raw tables into
-- `role` (String) Enter the role that you want to use to access Snowflake
-- `schema` (String) Enter the name of the default schema
-- `username` (String) Enter the name of the user you want to use to access the database
-- `warehouse` (String) Enter the name of the warehouse that you want to sync data into
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `destination_snowflake_authorization_method_key_pair_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_authorization_method_key_pair_authentication))
-- `destination_snowflake_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_authorization_method_o_auth2_0))
-- `destination_snowflake_authorization_method_username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_authorization_method_username_and_password))
-- `destination_snowflake_update_authorization_method_key_pair_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_update_authorization_method_key_pair_authentication))
-- `destination_snowflake_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_update_authorization_method_o_auth2_0))
-- `destination_snowflake_update_authorization_method_username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_update_authorization_method_username_and_password))
-
-
-### Nested Schema for `configuration.credentials.destination_snowflake_authorization_method_key_pair_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Key Pair Authentication"]
-- `private_key` (String) RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.
-- `private_key_password` (String) Passphrase for private key
-
-
-
-### Nested Schema for `configuration.credentials.destination_snowflake_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Enter you application's Access Token
-- `auth_type` (String) must be one of ["OAuth2.0"]
-- `client_id` (String) Enter your application's Client ID
-- `client_secret` (String) Enter your application's Client secret
-- `refresh_token` (String) Enter your application's Refresh Token
-
-
-
-### Nested Schema for `configuration.credentials.destination_snowflake_authorization_method_username_and_password`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Username and Password"]
-- `password` (String) Enter the password associated with the username.
-
-
-
-### Nested Schema for `configuration.credentials.destination_snowflake_update_authorization_method_key_pair_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Key Pair Authentication"]
-- `private_key` (String) RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.
-- `private_key_password` (String) Passphrase for private key
-
-
-
-### Nested Schema for `configuration.credentials.destination_snowflake_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Enter you application's Access Token
-- `auth_type` (String) must be one of ["OAuth2.0"]
-- `client_id` (String) Enter your application's Client ID
-- `client_secret` (String) Enter your application's Client secret
-- `refresh_token` (String) Enter your application's Refresh Token
-
-
-
-### Nested Schema for `configuration.credentials.destination_snowflake_update_authorization_method_username_and_password`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Username and Password"]
-- `password` (String) Enter the password associated with the username.
-
diff --git a/docs/data-sources/destination_timeplus.md b/docs/data-sources/destination_timeplus.md
index bcb2d32d3..ee545360c 100644
--- a/docs/data-sources/destination_timeplus.md
+++ b/docs/data-sources/destination_timeplus.md
@@ -27,17 +27,10 @@ data "airbyte_destination_timeplus" "my_destination_timeplus" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `apikey` (String) Personal API key
-- `destination_type` (String) must be one of ["timeplus"]
-- `endpoint` (String) Timeplus workspace endpoint
-
diff --git a/docs/data-sources/destination_typesense.md b/docs/data-sources/destination_typesense.md
index 11db436ef..6d9647d98 100644
--- a/docs/data-sources/destination_typesense.md
+++ b/docs/data-sources/destination_typesense.md
@@ -27,20 +27,10 @@ data "airbyte_destination_typesense" "my_destination_typesense" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Typesense API Key
-- `batch_size` (Number) How many documents should be imported together. Default 1000
-- `destination_type` (String) must be one of ["typesense"]
-- `host` (String) Hostname of the Typesense instance without protocol.
-- `port` (String) Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443
-- `protocol` (String) Protocol of the Typesense instance. Ex: http or https. Default is https
-
diff --git a/docs/data-sources/destination_vertica.md b/docs/data-sources/destination_vertica.md
index 64ef4c6c5..d607b06fb 100644
--- a/docs/data-sources/destination_vertica.md
+++ b/docs/data-sources/destination_vertica.md
@@ -27,104 +27,10 @@ data "airbyte_destination_vertica" "my_destination_vertica" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["vertica"]
-- `host` (String) Hostname of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `schema` (String) Schema for vertica destination
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) Username to use to access the database.
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `destination_vertica_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_ssh_tunnel_method_no_tunnel))
-- `destination_vertica_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_ssh_tunnel_method_password_authentication))
-- `destination_vertica_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_ssh_tunnel_method_ssh_key_authentication))
-- `destination_vertica_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_update_ssh_tunnel_method_no_tunnel))
-- `destination_vertica_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_update_ssh_tunnel_method_password_authentication))
-- `destination_vertica_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/destination_weaviate.md b/docs/data-sources/destination_weaviate.md
new file mode 100644
index 000000000..f358d252c
--- /dev/null
+++ b/docs/data-sources/destination_weaviate.md
@@ -0,0 +1,36 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_destination_weaviate Data Source - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ DestinationWeaviate DataSource
+---
+
+# airbyte_destination_weaviate (Data Source)
+
+DestinationWeaviate DataSource
+
+## Example Usage
+
+```terraform
+data "airbyte_destination_weaviate" "my_destination_weaviate" {
+ destination_id = "...my_destination_id..."
+}
+```
+
+
+## Schema
+
+### Required
+
+- `destination_id` (String)
+
+### Read-Only
+
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
+- `name` (String)
+- `workspace_id` (String)
+
+
diff --git a/docs/data-sources/destination_xata.md b/docs/data-sources/destination_xata.md
index f12a84d10..cdb0201fd 100644
--- a/docs/data-sources/destination_xata.md
+++ b/docs/data-sources/destination_xata.md
@@ -27,17 +27,10 @@ data "airbyte_destination_xata" "my_destination_xata" {
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the destination.
+- `destination_type` (String)
- `name` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key to connect.
-- `db_url` (String) URL pointing to your workspace.
-- `destination_type` (String) must be one of ["xata"]
-
diff --git a/docs/data-sources/source_aha.md b/docs/data-sources/source_aha.md
index a3e136563..c0d905f82 100644
--- a/docs/data-sources/source_aha.md
+++ b/docs/data-sources/source_aha.md
@@ -14,7 +14,6 @@ SourceAha DataSource
```terraform
data "airbyte_source_aha" "my_source_aha" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_aha" "my_source_aha" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["aha"]
-- `url` (String) URL
-
diff --git a/docs/data-sources/source_aircall.md b/docs/data-sources/source_aircall.md
index 92a28f479..32b9df483 100644
--- a/docs/data-sources/source_aircall.md
+++ b/docs/data-sources/source_aircall.md
@@ -14,7 +14,6 @@ SourceAircall DataSource
```terraform
data "airbyte_source_aircall" "my_source_aircall" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_aircall" "my_source_aircall" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_id` (String) App ID found at settings https://dashboard.aircall.io/integrations/api-keys
-- `api_token` (String) App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)
-- `source_type` (String) must be one of ["aircall"]
-- `start_date` (String) Date time filter for incremental filter, Specify which date to extract from.
-
diff --git a/docs/data-sources/source_airtable.md b/docs/data-sources/source_airtable.md
index f8f218990..4e2b709ef 100644
--- a/docs/data-sources/source_airtable.md
+++ b/docs/data-sources/source_airtable.md
@@ -14,7 +14,6 @@ SourceAirtable DataSource
```terraform
data "airbyte_source_airtable" "my_source_airtable" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,75 +25,12 @@ data "airbyte_source_airtable" "my_source_airtable" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["airtable"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_airtable_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_airtable_authentication_o_auth2_0))
-- `source_airtable_authentication_personal_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_airtable_authentication_personal_access_token))
-- `source_airtable_update_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_airtable_update_authentication_o_auth2_0))
-- `source_airtable_update_authentication_personal_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_airtable_update_authentication_personal_access_token))
-
-
-### Nested Schema for `configuration.credentials.source_airtable_authentication_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The client ID of the Airtable developer application.
-- `client_secret` (String) The client secret the Airtable developer application.
-- `refresh_token` (String) The key to refresh the expired access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_airtable_authentication_personal_access_token`
-
-Read-Only:
-
-- `api_key` (String) The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.
-- `auth_method` (String) must be one of ["api_key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_airtable_update_authentication_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The client ID of the Airtable developer application.
-- `client_secret` (String) The client secret the Airtable developer application.
-- `refresh_token` (String) The key to refresh the expired access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_airtable_update_authentication_personal_access_token`
-
-Read-Only:
-
-- `api_key` (String) The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.
-- `auth_method` (String) must be one of ["api_key"]
-
diff --git a/docs/data-sources/source_alloydb.md b/docs/data-sources/source_alloydb.md
index 05cae8890..02d984cef 100644
--- a/docs/data-sources/source_alloydb.md
+++ b/docs/data-sources/source_alloydb.md
@@ -14,7 +14,6 @@ SourceAlloydb DataSource
```terraform
data "airbyte_source_alloydb" "my_source_alloydb" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,377 +25,12 @@ data "airbyte_source_alloydb" "my_source_alloydb" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) Name of the database.
-- `host` (String) Hostname of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `replication_method` (Attributes) Replication method for extracting data from the database. (see [below for nested schema](#nestedatt--configuration--replication_method))
-- `schemas` (List of String) The list of schemas (case sensitive) to sync from. Defaults to public.
-- `source_type` (String) must be one of ["alloydb"]
-- `ssl_mode` (Attributes) SSL connection modes.
- Read more in the docs. (see [below for nested schema](#nestedatt--configuration--ssl_mode))
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) Username to access the database.
-
-
-### Nested Schema for `configuration.replication_method`
-
-Read-Only:
-
-- `source_alloydb_replication_method_logical_replication_cdc` (Attributes) Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_replication_method_logical_replication_cdc))
-- `source_alloydb_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_replication_method_standard))
-- `source_alloydb_replication_method_standard_xmin` (Attributes) Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_replication_method_standard_xmin))
-- `source_alloydb_update_replication_method_logical_replication_cdc` (Attributes) Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_update_replication_method_logical_replication_cdc))
-- `source_alloydb_update_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_update_replication_method_standard))
-- `source_alloydb_update_replication_method_standard_xmin` (Attributes) Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_update_replication_method_standard_xmin))
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_replication_method_logical_replication_cdc`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]
-Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-- `method` (String) must be one of ["CDC"]
-- `plugin` (String) must be one of ["pgoutput"]
-A logical decoding plugin installed on the PostgreSQL server.
-- `publication` (String) A Postgres publication used for consuming changes. Read about publications and replication identities.
-- `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-- `replication_slot` (String) A plugin logical replication slot. Read about replication slots.
-
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_replication_method_standard`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_replication_method_standard_xmin`
-
-Read-Only:
-
-- `method` (String) must be one of ["Xmin"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_update_replication_method_logical_replication_cdc`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]
-Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-- `method` (String) must be one of ["CDC"]
-- `plugin` (String) must be one of ["pgoutput"]
-A logical decoding plugin installed on the PostgreSQL server.
-- `publication` (String) A Postgres publication used for consuming changes. Read about publications and replication identities.
-- `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-- `replication_slot` (String) A plugin logical replication slot. Read about replication slots.
-
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_update_replication_method_standard`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_update_replication_method_standard_xmin`
-
-Read-Only:
-
-- `method` (String) must be one of ["Xmin"]
-
-
-
-
-### Nested Schema for `configuration.ssl_mode`
-
-Read-Only:
-
-- `source_alloydb_ssl_modes_allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_allow))
-- `source_alloydb_ssl_modes_disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_disable))
-- `source_alloydb_ssl_modes_prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_prefer))
-- `source_alloydb_ssl_modes_require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_require))
-- `source_alloydb_ssl_modes_verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_verify_ca))
-- `source_alloydb_ssl_modes_verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_verify_full))
-- `source_alloydb_update_ssl_modes_allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_allow))
-- `source_alloydb_update_ssl_modes_disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_disable))
-- `source_alloydb_update_ssl_modes_prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_prefer))
-- `source_alloydb_update_ssl_modes_require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_require))
-- `source_alloydb_update_ssl_modes_verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_verify_ca))
-- `source_alloydb_update_ssl_modes_verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_verify_full))
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_allow`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["allow"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_disable`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_prefer`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["prefer"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_require`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["require"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_verify_ca`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-ca"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_verify_full`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-full"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_allow`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["allow"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_disable`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_prefer`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["prefer"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_require`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["require"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_verify_ca`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-ca"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_verify_full`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-full"]
-
-
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `source_alloydb_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_ssh_tunnel_method_no_tunnel))
-- `source_alloydb_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_ssh_tunnel_method_password_authentication))
-- `source_alloydb_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_ssh_tunnel_method_ssh_key_authentication))
-- `source_alloydb_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_update_ssh_tunnel_method_no_tunnel))
-- `source_alloydb_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_update_ssh_tunnel_method_password_authentication))
-- `source_alloydb_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/source_amazon_ads.md b/docs/data-sources/source_amazon_ads.md
index e320fca0e..2ebf2f996 100644
--- a/docs/data-sources/source_amazon_ads.md
+++ b/docs/data-sources/source_amazon_ads.md
@@ -14,7 +14,6 @@ SourceAmazonAds DataSource
```terraform
data "airbyte_source_amazon_ads" "my_source_amazonads" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,33 +25,12 @@ data "airbyte_source_amazon_ads" "my_source_amazonads" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The client ID of your Amazon Ads developer application. See the docs for more information.
-- `client_secret` (String) The client secret of your Amazon Ads developer application. See the docs for more information.
-- `look_back_window` (Number) The amount of days to go back in time to get the updated data from Amazon Ads
-- `marketplace_ids` (List of String) Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.
-- `profiles` (List of Number) Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.
-- `refresh_token` (String) Amazon Ads refresh token. See the docs for more information on how to obtain this token.
-- `region` (String) must be one of ["NA", "EU", "FE"]
-Region to pull data from (EU/NA/FE). See docs for more details.
-- `report_record_types` (List of String) Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details
-- `source_type` (String) must be one of ["amazon-ads"]
-- `start_date` (String) The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format
-- `state_filter` (List of String) Reflects the state of the Display, Product, and Brand Campaign streams as enabled, paused, or archived. If you do not populate this field, it will be ignored completely.
-
diff --git a/docs/data-sources/source_amazon_seller_partner.md b/docs/data-sources/source_amazon_seller_partner.md
index 282501471..132a75cf3 100644
--- a/docs/data-sources/source_amazon_seller_partner.md
+++ b/docs/data-sources/source_amazon_seller_partner.md
@@ -14,7 +14,6 @@ SourceAmazonSellerPartner DataSource
```terraform
data "airbyte_source_amazon_seller_partner" "my_source_amazonsellerpartner" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,38 +25,12 @@ data "airbyte_source_amazon_seller_partner" "my_source_amazonsellerpartner" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `advanced_stream_options` (String) Additional information to configure report options. This varies by report type, not every report implement this kind of feature. Must be a valid json string.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `aws_access_key` (String) Specifies the AWS access key used as part of the credentials to authenticate the user.
-- `aws_environment` (String) must be one of ["PRODUCTION", "SANDBOX"]
-Select the AWS Environment.
-- `aws_secret_key` (String) Specifies the AWS secret key used as part of the credentials to authenticate the user.
-- `lwa_app_id` (String) Your Login with Amazon Client ID.
-- `lwa_client_secret` (String) Your Login with Amazon Client Secret.
-- `max_wait_seconds` (Number) Sometimes report can take up to 30 minutes to generate. This will set the limit for how long to wait for a successful report.
-- `period_in_days` (Number) Will be used for stream slicing for initial full_refresh sync when no updated state is present for reports that support sliced incremental sync.
-- `refresh_token` (String) The Refresh Token obtained via OAuth flow authorization.
-- `region` (String) must be one of ["AE", "AU", "BE", "BR", "CA", "DE", "EG", "ES", "FR", "GB", "IN", "IT", "JP", "MX", "NL", "PL", "SA", "SE", "SG", "TR", "UK", "US"]
-Select the AWS Region.
-- `replication_end_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.
-- `replication_start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `report_options` (String) Additional information passed to reports. This varies by report type. Must be a valid json string.
-- `role_arn` (String) Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. (Needs permission to 'Assume Role' STS).
-- `source_type` (String) must be one of ["amazon-seller-partner"]
-
diff --git a/docs/data-sources/source_amazon_sqs.md b/docs/data-sources/source_amazon_sqs.md
index eac98b0d0..26ba47ff2 100644
--- a/docs/data-sources/source_amazon_sqs.md
+++ b/docs/data-sources/source_amazon_sqs.md
@@ -14,7 +14,6 @@ SourceAmazonSqs DataSource
```terraform
data "airbyte_source_amazon_sqs" "my_source_amazonsqs" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,31 +25,12 @@ data "airbyte_source_amazon_sqs" "my_source_amazonsqs" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key` (String) The Access Key ID of the AWS IAM Role to use for pulling messages
-- `attributes_to_return` (String) Comma separated list of Mesage Attribute names to return
-- `delete_messages` (Boolean) If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail.
-- `max_batch_size` (Number) Max amount of messages to get in one batch (10 max)
-- `max_wait_time` (Number) Max amount of time in seconds to wait for messages in a single poll (20 max)
-- `queue_url` (String) URL of the SQS Queue
-- `region` (String) must be one of ["us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-AWS Region of the SQS Queue
-- `secret_key` (String) The Secret Key of the AWS IAM Role to use for pulling messages
-- `source_type` (String) must be one of ["amazon-sqs"]
-- `visibility_timeout` (Number) Modify the Visibility Timeout of the individual message from the Queue's default (seconds).
-
diff --git a/docs/data-sources/source_amplitude.md b/docs/data-sources/source_amplitude.md
index 0df5b2607..6816123f4 100644
--- a/docs/data-sources/source_amplitude.md
+++ b/docs/data-sources/source_amplitude.md
@@ -14,7 +14,6 @@ SourceAmplitude DataSource
```terraform
data "airbyte_source_amplitude" "my_source_amplitude" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,27 +25,12 @@ data "airbyte_source_amplitude" "my_source_amplitude" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Amplitude API Key. See the setup guide for more information on how to obtain this key.
-- `data_region` (String) must be one of ["Standard Server", "EU Residency Server"]
-Amplitude data region server
-- `request_time_range` (Number) According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours.
-- `secret_key` (String) Amplitude Secret Key. See the setup guide for more information on how to obtain this key.
-- `source_type` (String) must be one of ["amplitude"]
-- `start_date` (String) UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_apify_dataset.md b/docs/data-sources/source_apify_dataset.md
index 3a3966b70..1cdc585f5 100644
--- a/docs/data-sources/source_apify_dataset.md
+++ b/docs/data-sources/source_apify_dataset.md
@@ -14,7 +14,6 @@ SourceApifyDataset DataSource
```terraform
data "airbyte_source_apify_dataset" "my_source_apifydataset" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_apify_dataset" "my_source_apifydataset" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `clean` (Boolean) If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false.
-- `dataset_id` (String) ID of the dataset you would like to load to Airbyte.
-- `source_type` (String) must be one of ["apify-dataset"]
-- `token` (String) Your application's Client Secret. You can find this value on the console integrations tab after you login.
-
diff --git a/docs/data-sources/source_appfollow.md b/docs/data-sources/source_appfollow.md
index f6692d321..4b84c082c 100644
--- a/docs/data-sources/source_appfollow.md
+++ b/docs/data-sources/source_appfollow.md
@@ -14,7 +14,6 @@ SourceAppfollow DataSource
```terraform
data "airbyte_source_appfollow" "my_source_appfollow" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_appfollow" "my_source_appfollow" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_secret` (String) API Key provided by Appfollow
-- `source_type` (String) must be one of ["appfollow"]
-
diff --git a/docs/data-sources/source_asana.md b/docs/data-sources/source_asana.md
index ce10ec949..9ef0ce9b2 100644
--- a/docs/data-sources/source_asana.md
+++ b/docs/data-sources/source_asana.md
@@ -14,7 +14,6 @@ SourceAsana DataSource
```terraform
data "airbyte_source_asana" "my_source_asana" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,75 +25,12 @@ data "airbyte_source_asana" "my_source_asana" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["asana"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_asana_authentication_mechanism_authenticate_via_asana_oauth` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--source_asana_authentication_mechanism_authenticate_via_asana_oauth))
-- `source_asana_authentication_mechanism_authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--source_asana_authentication_mechanism_authenticate_with_personal_access_token))
-- `source_asana_update_authentication_mechanism_authenticate_via_asana_oauth` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--source_asana_update_authentication_mechanism_authenticate_via_asana_oauth))
-- `source_asana_update_authentication_mechanism_authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--source_asana_update_authentication_mechanism_authenticate_with_personal_access_token))
-
-
-### Nested Schema for `configuration.credentials.source_asana_authentication_mechanism_authenticate_via_asana_oauth`
-
-Read-Only:
-
-- `client_id` (String)
-- `client_secret` (String)
-- `option_title` (String) must be one of ["OAuth Credentials"]
-OAuth Credentials
-- `refresh_token` (String)
-
-
-
-### Nested Schema for `configuration.credentials.source_asana_authentication_mechanism_authenticate_with_personal_access_token`
-
-Read-Only:
-
-- `option_title` (String) must be one of ["PAT Credentials"]
-PAT Credentials
-- `personal_access_token` (String) Asana Personal Access Token (generate yours here).
-
-
-
-### Nested Schema for `configuration.credentials.source_asana_update_authentication_mechanism_authenticate_via_asana_oauth`
-
-Read-Only:
-
-- `client_id` (String)
-- `client_secret` (String)
-- `option_title` (String) must be one of ["OAuth Credentials"]
-OAuth Credentials
-- `refresh_token` (String)
-
-
-
-### Nested Schema for `configuration.credentials.source_asana_update_authentication_mechanism_authenticate_with_personal_access_token`
-
-Read-Only:
-
-- `option_title` (String) must be one of ["PAT Credentials"]
-PAT Credentials
-- `personal_access_token` (String) Asana Personal Access Token (generate yours here).
-
diff --git a/docs/data-sources/source_auth0.md b/docs/data-sources/source_auth0.md
index c7ff792a7..b1871066e 100644
--- a/docs/data-sources/source_auth0.md
+++ b/docs/data-sources/source_auth0.md
@@ -14,7 +14,6 @@ SourceAuth0 DataSource
```terraform
data "airbyte_source_auth0" "my_source_auth0" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,73 +25,12 @@ data "airbyte_source_auth0" "my_source_auth0" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `base_url` (String) The Authentication API is served over HTTPS. All URLs referenced in the documentation have the following base `https://YOUR_DOMAIN`
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["auth0"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_auth0_authentication_method_o_auth2_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_auth0_authentication_method_o_auth2_access_token))
-- `source_auth0_authentication_method_o_auth2_confidential_application` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_auth0_authentication_method_o_auth2_confidential_application))
-- `source_auth0_update_authentication_method_o_auth2_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_auth0_update_authentication_method_o_auth2_access_token))
-- `source_auth0_update_authentication_method_o_auth2_confidential_application` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_auth0_update_authentication_method_o_auth2_confidential_application))
-
-
-### Nested Schema for `configuration.credentials.source_auth0_authentication_method_o_auth2_access_token`
-
-Read-Only:
-
-- `access_token` (String) Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.
-- `auth_type` (String) must be one of ["oauth2_access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_auth0_authentication_method_o_auth2_confidential_application`
-
-Read-Only:
-
-- `audience` (String) The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab
-- `auth_type` (String) must be one of ["oauth2_confidential_application"]
-- `client_id` (String) Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.
-- `client_secret` (String) Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.
-
-
-
-### Nested Schema for `configuration.credentials.source_auth0_update_authentication_method_o_auth2_access_token`
-
-Read-Only:
-
-- `access_token` (String) Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.
-- `auth_type` (String) must be one of ["oauth2_access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_auth0_update_authentication_method_o_auth2_confidential_application`
-
-Read-Only:
-
-- `audience` (String) The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab
-- `auth_type` (String) must be one of ["oauth2_confidential_application"]
-- `client_id` (String) Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.
-- `client_secret` (String) Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.
-
diff --git a/docs/data-sources/source_aws_cloudtrail.md b/docs/data-sources/source_aws_cloudtrail.md
index 6f81d4c8c..570fbf10c 100644
--- a/docs/data-sources/source_aws_cloudtrail.md
+++ b/docs/data-sources/source_aws_cloudtrail.md
@@ -14,7 +14,6 @@ SourceAwsCloudtrail DataSource
```terraform
data "airbyte_source_aws_cloudtrail" "my_source_awscloudtrail" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_aws_cloudtrail" "my_source_awscloudtrail" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `aws_key_id` (String) AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.
-- `aws_region_name` (String) The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name.
-- `aws_secret_key` (String) AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.
-- `source_type` (String) must be one of ["aws-cloudtrail"]
-- `start_date` (String) The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD.
-
diff --git a/docs/data-sources/source_azure_blob_storage.md b/docs/data-sources/source_azure_blob_storage.md
index 44564f923..4daef1747 100644
--- a/docs/data-sources/source_azure_blob_storage.md
+++ b/docs/data-sources/source_azure_blob_storage.md
@@ -14,7 +14,6 @@ SourceAzureBlobStorage DataSource
```terraform
data "airbyte_source_azure_blob_storage" "my_source_azureblobstorage" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,51 +25,12 @@ data "airbyte_source_azure_blob_storage" "my_source_azureblobstorage" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `azure_blob_storage_account_key` (String) The Azure blob storage account key.
-- `azure_blob_storage_account_name` (String) The account's name of the Azure Blob Storage.
-- `azure_blob_storage_blobs_prefix` (String) The Azure blob storage prefix to be applied
-- `azure_blob_storage_container_name` (String) The name of the Azure blob storage container.
-- `azure_blob_storage_endpoint` (String) This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
-- `azure_blob_storage_schema_inference_limit` (Number) The Azure blob storage blobs to scan for inferring the schema, useful on large amounts of data with consistent structure
-- `format` (Attributes) Input data format (see [below for nested schema](#nestedatt--configuration--format))
-- `source_type` (String) must be one of ["azure-blob-storage"]
-
-
-### Nested Schema for `configuration.format`
-
-Read-Only:
-
-- `source_azure_blob_storage_input_format_json_lines_newline_delimited_json` (Attributes) Input data format (see [below for nested schema](#nestedatt--configuration--format--source_azure_blob_storage_input_format_json_lines_newline_delimited_json))
-- `source_azure_blob_storage_update_input_format_json_lines_newline_delimited_json` (Attributes) Input data format (see [below for nested schema](#nestedatt--configuration--format--source_azure_blob_storage_update_input_format_json_lines_newline_delimited_json))
-
-
-### Nested Schema for `configuration.format.source_azure_blob_storage_input_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `format_type` (String) must be one of ["JSONL"]
-
-
-
-### Nested Schema for `configuration.format.source_azure_blob_storage_update_input_format_json_lines_newline_delimited_json`
-
-Read-Only:
-
-- `format_type` (String) must be one of ["JSONL"]
-
diff --git a/docs/data-sources/source_azure_table.md b/docs/data-sources/source_azure_table.md
index ce642ddde..62a90aa80 100644
--- a/docs/data-sources/source_azure_table.md
+++ b/docs/data-sources/source_azure_table.md
@@ -14,7 +14,6 @@ SourceAzureTable DataSource
```terraform
data "airbyte_source_azure_table" "my_source_azuretable" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_azure_table" "my_source_azuretable" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `source_type` (String) must be one of ["azure-table"]
-- `storage_access_key` (String) Azure Table Storage Access Key. See the docs for more information on how to obtain this key.
-- `storage_account_name` (String) The name of your storage account.
-- `storage_endpoint_suffix` (String) Azure Table Storage service account URL suffix. See the docs for more information on how to obtain endpoint suffix
-
diff --git a/docs/data-sources/source_bamboo_hr.md b/docs/data-sources/source_bamboo_hr.md
index ce7015f56..896a58abd 100644
--- a/docs/data-sources/source_bamboo_hr.md
+++ b/docs/data-sources/source_bamboo_hr.md
@@ -14,7 +14,6 @@ SourceBambooHr DataSource
```terraform
data "airbyte_source_bamboo_hr" "my_source_bamboohr" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_bamboo_hr" "my_source_bamboohr" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Api key of bamboo hr
-- `custom_reports_fields` (String) Comma-separated list of fields to include in custom reports.
-- `custom_reports_include_default_fields` (Boolean) If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.
-- `source_type` (String) must be one of ["bamboo-hr"]
-- `subdomain` (String) Sub Domain of bamboo hr
-
diff --git a/docs/data-sources/source_bigcommerce.md b/docs/data-sources/source_bigcommerce.md
deleted file mode 100644
index 7d420141a..000000000
--- a/docs/data-sources/source_bigcommerce.md
+++ /dev/null
@@ -1,49 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_bigcommerce Data Source - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceBigcommerce DataSource
----
-
-# airbyte_source_bigcommerce (Data Source)
-
-SourceBigcommerce DataSource
-
-## Example Usage
-
-```terraform
-data "airbyte_source_bigcommerce" "my_source_bigcommerce" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
-```
-
-
-## Schema
-
-### Required
-
-- `source_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `source_type` (String) must be one of ["bigcommerce"]
-- `start_date` (String) The date you would like to replicate data. Format: YYYY-MM-DD.
-- `store_hash` (String) The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/, The store's hash code is 'HASH_CODE'.
-
-
diff --git a/docs/data-sources/source_bigquery.md b/docs/data-sources/source_bigquery.md
index 23d78ce2d..6cd2b04f0 100644
--- a/docs/data-sources/source_bigquery.md
+++ b/docs/data-sources/source_bigquery.md
@@ -14,7 +14,6 @@ SourceBigquery DataSource
```terraform
data "airbyte_source_bigquery" "my_source_bigquery" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_bigquery" "my_source_bigquery" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials_json` (String) The contents of your Service Account Key JSON file. See the docs for more information on how to obtain this key.
-- `dataset_id` (String) The dataset ID to search for tables and views. If you are only loading data from one dataset, setting this option could result in much faster schema discovery.
-- `project_id` (String) The GCP project ID for the project containing the target BigQuery dataset.
-- `source_type` (String) must be one of ["bigquery"]
-
diff --git a/docs/data-sources/source_bing_ads.md b/docs/data-sources/source_bing_ads.md
index fcc29989b..3acf0f55c 100644
--- a/docs/data-sources/source_bing_ads.md
+++ b/docs/data-sources/source_bing_ads.md
@@ -14,7 +14,6 @@ SourceBingAds DataSource
```terraform
data "airbyte_source_bing_ads" "my_source_bingads" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,29 +25,12 @@ data "airbyte_source_bing_ads" "my_source_bingads" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your Microsoft Advertising developer application.
-- `client_secret` (String) The Client Secret of your Microsoft Advertising developer application.
-- `developer_token` (String) Developer token associated with user. See more info in the docs.
-- `lookback_window` (Number) Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode.
-- `refresh_token` (String) Refresh Token to renew the expired Access Token.
-- `reports_start_date` (String) The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format.
-- `source_type` (String) must be one of ["bing-ads"]
-- `tenant_id` (String) The Tenant ID of your Microsoft Advertising developer application. Set this to "common" unless you know you need a different value.
-
diff --git a/docs/data-sources/source_braintree.md b/docs/data-sources/source_braintree.md
index 3cdae9dbd..231ce5342 100644
--- a/docs/data-sources/source_braintree.md
+++ b/docs/data-sources/source_braintree.md
@@ -14,7 +14,6 @@ SourceBraintree DataSource
```terraform
data "airbyte_source_braintree" "my_source_braintree" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,27 +25,12 @@ data "airbyte_source_braintree" "my_source_braintree" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `environment` (String) must be one of ["Development", "Sandbox", "Qa", "Production"]
-Environment specifies where the data will come from.
-- `merchant_id` (String) The unique identifier for your entire gateway account. See the docs for more information on how to obtain this ID.
-- `private_key` (String) Braintree Private Key. See the docs for more information on how to obtain this key.
-- `public_key` (String) Braintree Public Key. See the docs for more information on how to obtain this key.
-- `source_type` (String) must be one of ["braintree"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_braze.md b/docs/data-sources/source_braze.md
index acc47cbcf..eaaad1364 100644
--- a/docs/data-sources/source_braze.md
+++ b/docs/data-sources/source_braze.md
@@ -14,7 +14,6 @@ SourceBraze DataSource
```terraform
data "airbyte_source_braze" "my_source_braze" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_braze" "my_source_braze" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Braze REST API key
-- `source_type` (String) must be one of ["braze"]
-- `start_date` (String) Rows after this date will be synced
-- `url` (String) Braze REST API endpoint
-
diff --git a/docs/data-sources/source_cart.md b/docs/data-sources/source_cart.md
new file mode 100644
index 000000000..157dc80f2
--- /dev/null
+++ b/docs/data-sources/source_cart.md
@@ -0,0 +1,36 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_cart Data Source - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceCart DataSource
+---
+
+# airbyte_source_cart (Data Source)
+
+SourceCart DataSource
+
+## Example Usage
+
+```terraform
+data "airbyte_source_cart" "my_source_cart" {
+ source_id = "...my_source_id..."
+}
+```
+
+
+## Schema
+
+### Required
+
+- `source_id` (String)
+
+### Read-Only
+
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
+- `name` (String)
+- `source_type` (String)
+- `workspace_id` (String)
+
+
diff --git a/docs/data-sources/source_chargebee.md b/docs/data-sources/source_chargebee.md
index 212dfba5d..19e65c8c1 100644
--- a/docs/data-sources/source_chargebee.md
+++ b/docs/data-sources/source_chargebee.md
@@ -14,7 +14,6 @@ SourceChargebee DataSource
```terraform
data "airbyte_source_chargebee" "my_source_chargebee" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_chargebee" "my_source_chargebee" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `product_catalog` (String) must be one of ["1.0", "2.0"]
-Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under `API Version` section.
-- `site` (String) The site prefix for your Chargebee instance.
-- `site_api_key` (String) Chargebee API Key. See the docs for more information on how to obtain this key.
-- `source_type` (String) must be one of ["chargebee"]
-- `start_date` (String) UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_chartmogul.md b/docs/data-sources/source_chartmogul.md
index 81ddb13bf..9edba9fa7 100644
--- a/docs/data-sources/source_chartmogul.md
+++ b/docs/data-sources/source_chartmogul.md
@@ -14,7 +14,6 @@ SourceChartmogul DataSource
```terraform
data "airbyte_source_chartmogul" "my_source_chartmogul" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_chartmogul" "my_source_chartmogul" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Your Chartmogul API key. See the docs for info on how to obtain this.
-- `interval` (String) must be one of ["day", "week", "month", "quarter"]
-Some APIs such as Metrics require intervals to cluster data.
-- `source_type` (String) must be one of ["chartmogul"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_clickhouse.md b/docs/data-sources/source_clickhouse.md
index ae49b9271..966cb3dde 100644
--- a/docs/data-sources/source_clickhouse.md
+++ b/docs/data-sources/source_clickhouse.md
@@ -14,7 +14,6 @@ SourceClickhouse DataSource
```terraform
data "airbyte_source_clickhouse" "my_source_clickhouse" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,108 +25,12 @@ data "airbyte_source_clickhouse" "my_source_clickhouse" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) The name of the database.
-- `host` (String) The host endpoint of the Clickhouse cluster.
-- `password` (String) The password associated with this username.
-- `port` (Number) The port of the database.
-- `source_type` (String) must be one of ["clickhouse"]
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) The username which is used to access the database.
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `source_clickhouse_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_ssh_tunnel_method_no_tunnel))
-- `source_clickhouse_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_ssh_tunnel_method_password_authentication))
-- `source_clickhouse_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_ssh_tunnel_method_ssh_key_authentication))
-- `source_clickhouse_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_update_ssh_tunnel_method_no_tunnel))
-- `source_clickhouse_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_update_ssh_tunnel_method_password_authentication))
-- `source_clickhouse_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/source_clickup_api.md b/docs/data-sources/source_clickup_api.md
index 9ebe039a3..7e0d708ab 100644
--- a/docs/data-sources/source_clickup_api.md
+++ b/docs/data-sources/source_clickup_api.md
@@ -14,7 +14,6 @@ SourceClickupAPI DataSource
```terraform
data "airbyte_source_clickup_api" "my_source_clickupapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,27 +25,12 @@ data "airbyte_source_clickup_api" "my_source_clickupapi" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_token` (String) Every ClickUp API call required authentication. This field is your personal API token. See here.
-- `folder_id` (String) The ID of your folder in your space. Retrieve it from the `/space/{space_id}/folder` of the ClickUp API. See here.
-- `include_closed_tasks` (Boolean) Include or exclude closed tasks. By default, they are excluded. See here.
-- `list_id` (String) The ID of your list in your folder. Retrieve it from the `/folder/{folder_id}/list` of the ClickUp API. See here.
-- `source_type` (String) must be one of ["clickup-api"]
-- `space_id` (String) The ID of your space in your workspace. Retrieve it from the `/team/{team_id}/space` of the ClickUp API. See here.
-- `team_id` (String) The ID of your team in ClickUp. Retrieve it from the `/team` of the ClickUp API. See here.
-
diff --git a/docs/data-sources/source_clockify.md b/docs/data-sources/source_clockify.md
index 7cd00cd81..8b791ebb6 100644
--- a/docs/data-sources/source_clockify.md
+++ b/docs/data-sources/source_clockify.md
@@ -14,7 +14,6 @@ SourceClockify DataSource
```terraform
data "airbyte_source_clockify" "my_source_clockify" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_clockify" "my_source_clockify" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) You can get your api access_key here This API is Case Sensitive.
-- `api_url` (String) The URL for the Clockify API. This should only need to be modified if connecting to an enterprise version of Clockify.
-- `source_type` (String) must be one of ["clockify"]
-- `workspace_id` (String) WorkSpace Id
-
diff --git a/docs/data-sources/source_close_com.md b/docs/data-sources/source_close_com.md
index 0da7ff227..703372f9e 100644
--- a/docs/data-sources/source_close_com.md
+++ b/docs/data-sources/source_close_com.md
@@ -14,7 +14,6 @@ SourceCloseCom DataSource
```terraform
data "airbyte_source_close_com" "my_source_closecom" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_close_com" "my_source_closecom" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Close.com API key (usually starts with 'api_'; find yours here).
-- `source_type` (String) must be one of ["close-com"]
-- `start_date` (String) The start date to sync data; all data after this date will be replicated. Leave blank to retrieve all the data available in the account. Format: YYYY-MM-DD.
-
diff --git a/docs/data-sources/source_coda.md b/docs/data-sources/source_coda.md
index 14ac64934..4958cd3d2 100644
--- a/docs/data-sources/source_coda.md
+++ b/docs/data-sources/source_coda.md
@@ -14,7 +14,6 @@ SourceCoda DataSource
```terraform
data "airbyte_source_coda" "my_source_coda" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_coda" "my_source_coda" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `auth_token` (String) Bearer token
-- `source_type` (String) must be one of ["coda"]
-
diff --git a/docs/data-sources/source_coin_api.md b/docs/data-sources/source_coin_api.md
index 85aabd87d..1181b4dac 100644
--- a/docs/data-sources/source_coin_api.md
+++ b/docs/data-sources/source_coin_api.md
@@ -14,7 +14,6 @@ SourceCoinAPI DataSource
```terraform
data "airbyte_source_coin_api" "my_source_coinapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,34 +25,12 @@ data "airbyte_source_coin_api" "my_source_coinapi" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key
-- `end_date` (String) The end date in ISO 8601 format. If not supplied, data will be returned
-from the start date to the current time, or when the count of result
-elements reaches its limit.
-- `environment` (String) must be one of ["sandbox", "production"]
-The environment to use. Either sandbox or production.
-- `limit` (Number) The maximum number of elements to return. If not supplied, the default
-is 100. For numbers larger than 100, each 100 items is counted as one
-request for pricing purposes. Maximum value is 100000.
-- `period` (String) The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get
-- `source_type` (String) must be one of ["coin-api"]
-- `start_date` (String) The start date in ISO 8601 format.
-- `symbol_id` (String) The symbol ID to use. See the documentation for a list.
-https://docs.coinapi.io/#list-all-symbols-get
-
diff --git a/docs/data-sources/source_coinmarketcap.md b/docs/data-sources/source_coinmarketcap.md
index 9427827f5..e724a2c96 100644
--- a/docs/data-sources/source_coinmarketcap.md
+++ b/docs/data-sources/source_coinmarketcap.md
@@ -14,7 +14,6 @@ SourceCoinmarketcap DataSource
```terraform
data "airbyte_source_coinmarketcap" "my_source_coinmarketcap" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_coinmarketcap" "my_source_coinmarketcap" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Your API Key. See here. The token is case sensitive.
-- `data_type` (String) must be one of ["latest", "historical"]
-/latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here.
-- `source_type` (String) must be one of ["coinmarketcap"]
-- `symbols` (List of String) Cryptocurrency symbols. (only used for quotes stream)
-
diff --git a/docs/data-sources/source_configcat.md b/docs/data-sources/source_configcat.md
index c12e0b154..8963963ca 100644
--- a/docs/data-sources/source_configcat.md
+++ b/docs/data-sources/source_configcat.md
@@ -14,7 +14,6 @@ SourceConfigcat DataSource
```terraform
data "airbyte_source_configcat" "my_source_configcat" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_configcat" "my_source_configcat" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `password` (String) Basic auth password. See here.
-- `source_type` (String) must be one of ["configcat"]
-- `username` (String) Basic auth user name. See here.
-
diff --git a/docs/data-sources/source_confluence.md b/docs/data-sources/source_confluence.md
index 13d270644..91d2d8bae 100644
--- a/docs/data-sources/source_confluence.md
+++ b/docs/data-sources/source_confluence.md
@@ -14,7 +14,6 @@ SourceConfluence DataSource
```terraform
data "airbyte_source_confluence" "my_source_confluence" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_confluence" "my_source_confluence" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_token` (String) Please follow the Jira confluence for generating an API token: generating an API token.
-- `domain_name` (String) Your Confluence domain name
-- `email` (String) Your Confluence login email
-- `source_type` (String) must be one of ["confluence"]
-
diff --git a/docs/data-sources/source_convex.md b/docs/data-sources/source_convex.md
index 69ae8565c..3edacfa70 100644
--- a/docs/data-sources/source_convex.md
+++ b/docs/data-sources/source_convex.md
@@ -14,7 +14,6 @@ SourceConvex DataSource
```terraform
data "airbyte_source_convex" "my_source_convex" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_convex" "my_source_convex" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key` (String) API access key used to retrieve data from Convex.
-- `deployment_url` (String)
-- `source_type` (String) must be one of ["convex"]
-
diff --git a/docs/data-sources/source_datascope.md b/docs/data-sources/source_datascope.md
index a0c83b327..373f8387e 100644
--- a/docs/data-sources/source_datascope.md
+++ b/docs/data-sources/source_datascope.md
@@ -14,7 +14,6 @@ SourceDatascope DataSource
```terraform
data "airbyte_source_datascope" "my_source_datascope" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_datascope" "my_source_datascope" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["datascope"]
-- `start_date` (String) Start date for the data to be replicated
-
diff --git a/docs/data-sources/source_delighted.md b/docs/data-sources/source_delighted.md
index 8a9f5ad3b..501b7349c 100644
--- a/docs/data-sources/source_delighted.md
+++ b/docs/data-sources/source_delighted.md
@@ -14,7 +14,6 @@ SourceDelighted DataSource
```terraform
data "airbyte_source_delighted" "my_source_delighted" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_delighted" "my_source_delighted" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) A Delighted API key.
-- `since` (String) The date from which you'd like to replicate the data
-- `source_type` (String) must be one of ["delighted"]
-
diff --git a/docs/data-sources/source_dixa.md b/docs/data-sources/source_dixa.md
index 8e131050d..9436fca5f 100644
--- a/docs/data-sources/source_dixa.md
+++ b/docs/data-sources/source_dixa.md
@@ -14,7 +14,6 @@ SourceDixa DataSource
```terraform
data "airbyte_source_dixa" "my_source_dixa" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_dixa" "my_source_dixa" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_token` (String) Dixa API token
-- `batch_size` (Number) Number of days to batch into one request. Max 31.
-- `source_type` (String) must be one of ["dixa"]
-- `start_date` (String) The connector pulls records updated from this date onwards.
-
diff --git a/docs/data-sources/source_dockerhub.md b/docs/data-sources/source_dockerhub.md
index 089092ff2..cf44298f0 100644
--- a/docs/data-sources/source_dockerhub.md
+++ b/docs/data-sources/source_dockerhub.md
@@ -14,7 +14,6 @@ SourceDockerhub DataSource
```terraform
data "airbyte_source_dockerhub" "my_source_dockerhub" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_dockerhub" "my_source_dockerhub" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `docker_username` (String) Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/ API call)
-- `source_type` (String) must be one of ["dockerhub"]
-
diff --git a/docs/data-sources/source_dremio.md b/docs/data-sources/source_dremio.md
index e4e27aab9..86680e653 100644
--- a/docs/data-sources/source_dremio.md
+++ b/docs/data-sources/source_dremio.md
@@ -14,7 +14,6 @@ SourceDremio DataSource
```terraform
data "airbyte_source_dremio" "my_source_dremio" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_dremio" "my_source_dremio" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key that is generated when you authenticate to Dremio API
-- `base_url` (String) URL of your Dremio instance
-- `source_type` (String) must be one of ["dremio"]
-
diff --git a/docs/data-sources/source_dynamodb.md b/docs/data-sources/source_dynamodb.md
index fb4e21664..28f6e02ff 100644
--- a/docs/data-sources/source_dynamodb.md
+++ b/docs/data-sources/source_dynamodb.md
@@ -14,7 +14,6 @@ SourceDynamodb DataSource
```terraform
data "airbyte_source_dynamodb" "my_source_dynamodb" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,27 +25,12 @@ data "airbyte_source_dynamodb" "my_source_dynamodb" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key_id` (String) The access key id to access Dynamodb. Airbyte requires read permissions to the database
-- `endpoint` (String) the URL of the Dynamodb database
-- `region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the Dynamodb database
-- `reserved_attribute_names` (String) Comma separated reserved attribute names present in your tables
-- `secret_access_key` (String) The corresponding secret to the access key id.
-- `source_type` (String) must be one of ["dynamodb"]
-
diff --git a/docs/data-sources/source_e2e_test_cloud.md b/docs/data-sources/source_e2e_test_cloud.md
deleted file mode 100644
index 9cda57220..000000000
--- a/docs/data-sources/source_e2e_test_cloud.md
+++ /dev/null
@@ -1,100 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_e2e_test_cloud Data Source - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceE2eTestCloud DataSource
----
-
-# airbyte_source_e2e_test_cloud (Data Source)
-
-SourceE2eTestCloud DataSource
-
-## Example Usage
-
-```terraform
-data "airbyte_source_e2e_test_cloud" "my_source_e2etestcloud" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
-```
-
-
-## Schema
-
-### Required
-
-- `source_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `max_messages` (Number) Number of records to emit per stream. Min 1. Max 100 billion.
-- `message_interval_ms` (Number) Interval between messages in ms. Min 0 ms. Max 60000 ms (1 minute).
-- `mock_catalog` (Attributes) (see [below for nested schema](#nestedatt--configuration--mock_catalog))
-- `seed` (Number) When the seed is unspecified, the current time millis will be used as the seed. Range: [0, 1000000].
-- `source_type` (String) must be one of ["e2e-test-cloud"]
-- `type` (String) must be one of ["CONTINUOUS_FEED"]
-
-
-### Nested Schema for `configuration.mock_catalog`
-
-Read-Only:
-
-- `source_e2e_test_cloud_mock_catalog_multi_schema` (Attributes) A catalog with multiple data streams, each with a different schema. (see [below for nested schema](#nestedatt--configuration--mock_catalog--source_e2e_test_cloud_mock_catalog_multi_schema))
-- `source_e2e_test_cloud_mock_catalog_single_schema` (Attributes) A catalog with one or multiple streams that share the same schema. (see [below for nested schema](#nestedatt--configuration--mock_catalog--source_e2e_test_cloud_mock_catalog_single_schema))
-- `source_e2e_test_cloud_update_mock_catalog_multi_schema` (Attributes) A catalog with multiple data streams, each with a different schema. (see [below for nested schema](#nestedatt--configuration--mock_catalog--source_e2e_test_cloud_update_mock_catalog_multi_schema))
-- `source_e2e_test_cloud_update_mock_catalog_single_schema` (Attributes) A catalog with one or multiple streams that share the same schema. (see [below for nested schema](#nestedatt--configuration--mock_catalog--source_e2e_test_cloud_update_mock_catalog_single_schema))
-
-
-### Nested Schema for `configuration.mock_catalog.source_e2e_test_cloud_mock_catalog_multi_schema`
-
-Read-Only:
-
-- `stream_schemas` (String) A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.
-- `type` (String) must be one of ["MULTI_STREAM"]
-
-
-
-### Nested Schema for `configuration.mock_catalog.source_e2e_test_cloud_mock_catalog_single_schema`
-
-Read-Only:
-
-- `stream_duplication` (Number) Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.
-- `stream_name` (String) Name of the data stream.
-- `stream_schema` (String) A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.
-- `type` (String) must be one of ["SINGLE_STREAM"]
-
-
-
-### Nested Schema for `configuration.mock_catalog.source_e2e_test_cloud_update_mock_catalog_multi_schema`
-
-Read-Only:
-
-- `stream_schemas` (String) A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.
-- `type` (String) must be one of ["MULTI_STREAM"]
-
-
-
-### Nested Schema for `configuration.mock_catalog.source_e2e_test_cloud_update_mock_catalog_single_schema`
-
-Read-Only:
-
-- `stream_duplication` (Number) Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.
-- `stream_name` (String) Name of the data stream.
-- `stream_schema` (String) A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.
-- `type` (String) must be one of ["SINGLE_STREAM"]
-
-
diff --git a/docs/data-sources/source_emailoctopus.md b/docs/data-sources/source_emailoctopus.md
index c06821419..6e4987515 100644
--- a/docs/data-sources/source_emailoctopus.md
+++ b/docs/data-sources/source_emailoctopus.md
@@ -14,7 +14,6 @@ SourceEmailoctopus DataSource
```terraform
data "airbyte_source_emailoctopus" "my_source_emailoctopus" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_emailoctopus" "my_source_emailoctopus" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) EmailOctopus API Key. See the docs for information on how to generate this key.
-- `source_type` (String) must be one of ["emailoctopus"]
-
diff --git a/docs/data-sources/source_exchange_rates.md b/docs/data-sources/source_exchange_rates.md
index 5656b0341..1e93acebc 100644
--- a/docs/data-sources/source_exchange_rates.md
+++ b/docs/data-sources/source_exchange_rates.md
@@ -14,7 +14,6 @@ SourceExchangeRates DataSource
```terraform
data "airbyte_source_exchange_rates" "my_source_exchangerates" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_exchange_rates" "my_source_exchangerates" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key` (String) Your API Key. See here. The key is case sensitive.
-- `base` (String) ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default base currency is EUR
-- `ignore_weekends` (Boolean) Ignore weekends? (Exchanges don't run on weekends)
-- `source_type` (String) must be one of ["exchange-rates"]
-- `start_date` (String) Start getting data from that date.
-
diff --git a/docs/data-sources/source_facebook_marketing.md b/docs/data-sources/source_facebook_marketing.md
index 37aac5827..4ab085b0e 100644
--- a/docs/data-sources/source_facebook_marketing.md
+++ b/docs/data-sources/source_facebook_marketing.md
@@ -14,7 +14,6 @@ SourceFacebookMarketing DataSource
```terraform
data "airbyte_source_facebook_marketing" "my_source_facebookmarketing" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,52 +25,12 @@ data "airbyte_source_facebook_marketing" "my_source_facebookmarketing" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information.
-- `account_id` (String) The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. Open your Meta Ads Manager. The Ad account ID number is in the account dropdown menu or in your browser's address bar. See the docs for more information.
-- `action_breakdowns_allow_empty` (Boolean) Allows action_breakdowns to be an empty list
-- `client_id` (String) The Client Id for your OAuth app
-- `client_secret` (String) The Client Secret for your OAuth app
-- `custom_insights` (Attributes List) A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field. (see [below for nested schema](#nestedatt--configuration--custom_insights))
-- `end_date` (String) The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.
-- `fetch_thumbnail_images` (Boolean) Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.
-- `include_deleted` (Boolean) Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.
-- `insights_lookback_window` (Number) The attribution window. Facebook freezes insight data 28 days after it was generated, which means that all data from the past 28 days may have changed since we last emitted it, so you can retrieve refreshed insights from the past by setting this parameter. If you set a custom lookback window value in Facebook account, please provide the same value here.
-- `max_batch_size` (Number) Maximum batch size used when sending batch requests to Facebook API. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.
-- `page_size` (Number) Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.
-- `source_type` (String) must be one of ["facebook-marketing"]
-- `start_date` (String) The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-
-
-### Nested Schema for `configuration.custom_insights`
-
-Read-Only:
-
-- `action_breakdowns` (List of String) A list of chosen action_breakdowns for action_breakdowns
-- `action_report_time` (String) must be one of ["conversion", "impression", "mixed"]
-Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.
-- `breakdowns` (List of String) A list of chosen breakdowns for breakdowns
-- `end_date` (String) The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.
-- `fields` (List of String) A list of chosen fields for fields parameter
-- `insights_lookback_window` (Number) The attribution window
-- `level` (String) must be one of ["ad", "adset", "campaign", "account"]
-Chosen level for API
-- `name` (String) The name value of insight
-- `start_date` (String) The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z.
-- `time_increment` (Number) Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).
-
diff --git a/docs/data-sources/source_facebook_pages.md b/docs/data-sources/source_facebook_pages.md
index ae7b9d1a9..e4ae98878 100644
--- a/docs/data-sources/source_facebook_pages.md
+++ b/docs/data-sources/source_facebook_pages.md
@@ -14,7 +14,6 @@ SourceFacebookPages DataSource
```terraform
data "airbyte_source_facebook_pages" "my_source_facebookpages" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_facebook_pages" "my_source_facebookpages" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) Facebook Page Access Token
-- `page_id` (String) Page ID
-- `source_type` (String) must be one of ["facebook-pages"]
-
diff --git a/docs/data-sources/source_faker.md b/docs/data-sources/source_faker.md
index 6abb65858..4f570faca 100644
--- a/docs/data-sources/source_faker.md
+++ b/docs/data-sources/source_faker.md
@@ -14,7 +14,6 @@ SourceFaker DataSource
```terraform
data "airbyte_source_faker" "my_source_faker" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_faker" "my_source_faker" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `always_updated` (Boolean) Should the updated_at values for every record be new each sync? Setting this to false will case the source to stop emitting records after COUNT records have been emitted.
-- `count` (Number) How many users should be generated in total. This setting does not apply to the purchases or products stream.
-- `parallelism` (Number) How many parallel workers should we use to generate fake data? Choose a value equal to the number of CPUs you will allocate to this source.
-- `records_per_slice` (Number) How many fake records will be in each page (stream slice), before a state message is emitted?
-- `seed` (Number) Manually control the faker random seed to return the same values on subsequent runs (leave -1 for random)
-- `source_type` (String) must be one of ["faker"]
-
diff --git a/docs/data-sources/source_fauna.md b/docs/data-sources/source_fauna.md
index bc859a89e..19924c450 100644
--- a/docs/data-sources/source_fauna.md
+++ b/docs/data-sources/source_fauna.md
@@ -14,7 +14,6 @@ SourceFauna DataSource
```terraform
data "airbyte_source_fauna" "my_source_fauna" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,94 +25,12 @@ data "airbyte_source_fauna" "my_source_fauna" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `collection` (Attributes) Settings for the Fauna Collection. (see [below for nested schema](#nestedatt--configuration--collection))
-- `domain` (String) Domain of Fauna to query. Defaults db.fauna.com. See the docs.
-- `port` (Number) Endpoint port.
-- `scheme` (String) URL scheme.
-- `secret` (String) Fauna secret, used when authenticating with the database.
-- `source_type` (String) must be one of ["fauna"]
-
-
-### Nested Schema for `configuration.collection`
-
-Read-Only:
-
-- `deletions` (Attributes) This only applies to incremental syncs.
-Enabling deletion mode informs your destination of deleted documents.
-Disabled - Leave this feature disabled, and ignore deleted documents.
-Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions))
-- `page_size` (Number) The page size used when reading documents from the database. The larger the page size, the faster the connector processes documents. However, if a page is too large, the connector may fail.
-Choose your page size based on how large the documents are.
-See the docs.
-
-
-### Nested Schema for `configuration.collection.deletions`
-
-Read-Only:
-
-- `source_fauna_collection_deletion_mode_disabled` (Attributes) This only applies to incremental syncs.
-Enabling deletion mode informs your destination of deleted documents.
-Disabled - Leave this feature disabled, and ignore deleted documents.
-Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions--source_fauna_collection_deletion_mode_disabled))
-- `source_fauna_collection_deletion_mode_enabled` (Attributes) This only applies to incremental syncs.
-Enabling deletion mode informs your destination of deleted documents.
-Disabled - Leave this feature disabled, and ignore deleted documents.
-Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions--source_fauna_collection_deletion_mode_enabled))
-- `source_fauna_update_collection_deletion_mode_disabled` (Attributes) This only applies to incremental syncs.
-Enabling deletion mode informs your destination of deleted documents.
-Disabled - Leave this feature disabled, and ignore deleted documents.
-Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions--source_fauna_update_collection_deletion_mode_disabled))
-- `source_fauna_update_collection_deletion_mode_enabled` (Attributes) This only applies to incremental syncs.
-Enabling deletion mode informs your destination of deleted documents.
-Disabled - Leave this feature disabled, and ignore deleted documents.
-Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions--source_fauna_update_collection_deletion_mode_enabled))
-
-
-### Nested Schema for `configuration.collection.deletions.source_fauna_update_collection_deletion_mode_enabled`
-
-Read-Only:
-
-- `deletion_mode` (String) must be one of ["ignore"]
-
-
-
-### Nested Schema for `configuration.collection.deletions.source_fauna_update_collection_deletion_mode_enabled`
-
-Read-Only:
-
-- `column` (String) Name of the "deleted at" column.
-- `deletion_mode` (String) must be one of ["deleted_field"]
-
-
-
-### Nested Schema for `configuration.collection.deletions.source_fauna_update_collection_deletion_mode_enabled`
-
-Read-Only:
-
-- `deletion_mode` (String) must be one of ["ignore"]
-
-
-
-### Nested Schema for `configuration.collection.deletions.source_fauna_update_collection_deletion_mode_enabled`
-
-Read-Only:
-
-- `column` (String) Name of the "deleted at" column.
-- `deletion_mode` (String) must be one of ["deleted_field"]
-
diff --git a/docs/data-sources/source_file.md b/docs/data-sources/source_file.md
new file mode 100644
index 000000000..75899efce
--- /dev/null
+++ b/docs/data-sources/source_file.md
@@ -0,0 +1,36 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_file Data Source - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceFile DataSource
+---
+
+# airbyte_source_file (Data Source)
+
+SourceFile DataSource
+
+## Example Usage
+
+```terraform
+data "airbyte_source_file" "my_source_file" {
+ source_id = "...my_source_id..."
+}
+```
+
+
+## Schema
+
+### Required
+
+- `source_id` (String)
+
+### Read-Only
+
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
+- `name` (String)
+- `source_type` (String)
+- `workspace_id` (String)
+
+
diff --git a/docs/data-sources/source_file_secure.md b/docs/data-sources/source_file_secure.md
deleted file mode 100644
index eb24b43b7..000000000
--- a/docs/data-sources/source_file_secure.md
+++ /dev/null
@@ -1,221 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_file_secure Data Source - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceFileSecure DataSource
----
-
-# airbyte_source_file_secure (Data Source)
-
-SourceFileSecure DataSource
-
-## Example Usage
-
-```terraform
-data "airbyte_source_file_secure" "my_source_filesecure" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
-```
-
-
-## Schema
-
-### Required
-
-- `source_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `dataset_name` (String) The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
-- `format` (String) must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "feather", "parquet", "yaml"]
-The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
-- `provider` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider))
-- `reader_options` (String) This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
-- `source_type` (String) must be one of ["file-secure"]
-- `url` (String) The URL path to access the file which should be replicated.
-
-
-### Nested Schema for `configuration.provider`
-
-Read-Only:
-
-- `source_file_secure_storage_provider_az_blob_azure_blob_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_az_blob_azure_blob_storage))
-- `source_file_secure_storage_provider_gcs_google_cloud_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_gcs_google_cloud_storage))
-- `source_file_secure_storage_provider_https_public_web` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_https_public_web))
-- `source_file_secure_storage_provider_s3_amazon_web_services` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_s3_amazon_web_services))
-- `source_file_secure_storage_provider_scp_secure_copy_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_scp_secure_copy_protocol))
-- `source_file_secure_storage_provider_sftp_secure_file_transfer_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_sftp_secure_file_transfer_protocol))
-- `source_file_secure_storage_provider_ssh_secure_shell` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_ssh_secure_shell))
-- `source_file_secure_update_storage_provider_az_blob_azure_blob_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_az_blob_azure_blob_storage))
-- `source_file_secure_update_storage_provider_gcs_google_cloud_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_gcs_google_cloud_storage))
-- `source_file_secure_update_storage_provider_https_public_web` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_https_public_web))
-- `source_file_secure_update_storage_provider_s3_amazon_web_services` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_s3_amazon_web_services))
-- `source_file_secure_update_storage_provider_scp_secure_copy_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_scp_secure_copy_protocol))
-- `source_file_secure_update_storage_provider_sftp_secure_file_transfer_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_sftp_secure_file_transfer_protocol))
-- `source_file_secure_update_storage_provider_ssh_secure_shell` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_ssh_secure_shell))
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_az_blob_azure_blob_storage`
-
-Read-Only:
-
-- `sas_token` (String) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
-- `shared_key` (String) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
-- `storage` (String) must be one of ["AzBlob"]
-- `storage_account` (String) The globally unique name of the storage account that the desired blob sits within. See here for more details.
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_gcs_google_cloud_storage`
-
-Read-Only:
-
-- `service_account_json` (String) In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
-- `storage` (String) must be one of ["GCS"]
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_https_public_web`
-
-Read-Only:
-
-- `storage` (String) must be one of ["HTTPS"]
-- `user_agent` (Boolean) Add User-Agent to request
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_s3_amazon_web_services`
-
-Read-Only:
-
-- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `storage` (String) must be one of ["S3"]
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_scp_secure_copy_protocol`
-
-Read-Only:
-
-- `host` (String)
-- `password` (String)
-- `port` (String)
-- `storage` (String) must be one of ["SCP"]
-- `user` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_sftp_secure_file_transfer_protocol`
-
-Read-Only:
-
-- `host` (String)
-- `password` (String)
-- `port` (String)
-- `storage` (String) must be one of ["SFTP"]
-- `user` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_ssh_secure_shell`
-
-Read-Only:
-
-- `host` (String)
-- `password` (String)
-- `port` (String)
-- `storage` (String) must be one of ["SSH"]
-- `user` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_az_blob_azure_blob_storage`
-
-Read-Only:
-
-- `sas_token` (String) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
-- `shared_key` (String) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
-- `storage` (String) must be one of ["AzBlob"]
-- `storage_account` (String) The globally unique name of the storage account that the desired blob sits within. See here for more details.
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_gcs_google_cloud_storage`
-
-Read-Only:
-
-- `service_account_json` (String) In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
-- `storage` (String) must be one of ["GCS"]
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_https_public_web`
-
-Read-Only:
-
-- `storage` (String) must be one of ["HTTPS"]
-- `user_agent` (Boolean) Add User-Agent to request
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_s3_amazon_web_services`
-
-Read-Only:
-
-- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `storage` (String) must be one of ["S3"]
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_scp_secure_copy_protocol`
-
-Read-Only:
-
-- `host` (String)
-- `password` (String)
-- `port` (String)
-- `storage` (String) must be one of ["SCP"]
-- `user` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_sftp_secure_file_transfer_protocol`
-
-Read-Only:
-
-- `host` (String)
-- `password` (String)
-- `port` (String)
-- `storage` (String) must be one of ["SFTP"]
-- `user` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_ssh_secure_shell`
-
-Read-Only:
-
-- `host` (String)
-- `password` (String)
-- `port` (String)
-- `storage` (String) must be one of ["SSH"]
-- `user` (String)
-
-
diff --git a/docs/data-sources/source_firebolt.md b/docs/data-sources/source_firebolt.md
index d2b817259..b6b325c85 100644
--- a/docs/data-sources/source_firebolt.md
+++ b/docs/data-sources/source_firebolt.md
@@ -14,7 +14,6 @@ SourceFirebolt DataSource
```terraform
data "airbyte_source_firebolt" "my_source_firebolt" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,27 +25,12 @@ data "airbyte_source_firebolt" "my_source_firebolt" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `account` (String) Firebolt account to login.
-- `database` (String) The database to connect to.
-- `engine` (String) Engine name or url to connect to.
-- `host` (String) The host name of your Firebolt database.
-- `password` (String) Firebolt password.
-- `source_type` (String) must be one of ["firebolt"]
-- `username` (String) Firebolt email address you use to login.
-
diff --git a/docs/data-sources/source_freshcaller.md b/docs/data-sources/source_freshcaller.md
index 68a9ae713..36bd63589 100644
--- a/docs/data-sources/source_freshcaller.md
+++ b/docs/data-sources/source_freshcaller.md
@@ -14,7 +14,6 @@ SourceFreshcaller DataSource
```terraform
data "airbyte_source_freshcaller" "my_source_freshcaller" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_freshcaller" "my_source_freshcaller" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Freshcaller API Key. See the docs for more information on how to obtain this key.
-- `domain` (String) Used to construct Base URL for the Freshcaller APIs
-- `requests_per_minute` (Number) The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.
-- `source_type` (String) must be one of ["freshcaller"]
-- `start_date` (String) UTC date and time. Any data created after this date will be replicated.
-- `sync_lag_minutes` (Number) Lag in minutes for each sync, i.e., at time T, data for the time range [prev_sync_time, T-30] will be fetched
-
diff --git a/docs/data-sources/source_freshdesk.md b/docs/data-sources/source_freshdesk.md
index 88ab4b73e..fe6f18c3c 100644
--- a/docs/data-sources/source_freshdesk.md
+++ b/docs/data-sources/source_freshdesk.md
@@ -14,7 +14,6 @@ SourceFreshdesk DataSource
```terraform
data "airbyte_source_freshdesk" "my_source_freshdesk" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_freshdesk" "my_source_freshdesk" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Freshdesk API Key. See the docs for more information on how to obtain this key.
-- `domain` (String) Freshdesk domain
-- `requests_per_minute` (Number) The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.
-- `source_type` (String) must be one of ["freshdesk"]
-- `start_date` (String) UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated.
-
diff --git a/docs/data-sources/source_freshsales.md b/docs/data-sources/source_freshsales.md
index ceb870b50..14994a6ab 100644
--- a/docs/data-sources/source_freshsales.md
+++ b/docs/data-sources/source_freshsales.md
@@ -14,7 +14,6 @@ SourceFreshsales DataSource
```terraform
data "airbyte_source_freshsales" "my_source_freshsales" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_freshsales" "my_source_freshsales" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Freshsales API Key. See here. The key is case sensitive.
-- `domain_name` (String) The Name of your Freshsales domain
-- `source_type` (String) must be one of ["freshsales"]
-
diff --git a/docs/data-sources/source_gainsight_px.md b/docs/data-sources/source_gainsight_px.md
index f464387d7..054b08bd5 100644
--- a/docs/data-sources/source_gainsight_px.md
+++ b/docs/data-sources/source_gainsight_px.md
@@ -14,7 +14,6 @@ SourceGainsightPx DataSource
```terraform
data "airbyte_source_gainsight_px" "my_source_gainsightpx" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_gainsight_px" "my_source_gainsightpx" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) The Aptrinsic API Key which is recieved from the dashboard settings (ref - https://app.aptrinsic.com/settings/api-keys)
-- `source_type` (String) must be one of ["gainsight-px"]
-
diff --git a/docs/data-sources/source_gcs.md b/docs/data-sources/source_gcs.md
index df00cdc2a..6b73f71a8 100644
--- a/docs/data-sources/source_gcs.md
+++ b/docs/data-sources/source_gcs.md
@@ -14,7 +14,6 @@ SourceGcs DataSource
```terraform
data "airbyte_source_gcs" "my_source_gcs" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_gcs" "my_source_gcs" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `gcs_bucket` (String) GCS bucket name
-- `gcs_path` (String) GCS path to data
-- `service_account` (String) Enter your Google Cloud service account key in JSON format
-- `source_type` (String) must be one of ["gcs"]
-
diff --git a/docs/data-sources/source_getlago.md b/docs/data-sources/source_getlago.md
index 0d1b331d4..f7615a0aa 100644
--- a/docs/data-sources/source_getlago.md
+++ b/docs/data-sources/source_getlago.md
@@ -14,7 +14,6 @@ SourceGetlago DataSource
```terraform
data "airbyte_source_getlago" "my_source_getlago" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_getlago" "my_source_getlago" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Your API Key. See here.
-- `source_type` (String) must be one of ["getlago"]
-
diff --git a/docs/data-sources/source_github.md b/docs/data-sources/source_github.md
index 439abaa9d..2f460a7df 100644
--- a/docs/data-sources/source_github.md
+++ b/docs/data-sources/source_github.md
@@ -14,7 +14,6 @@ SourceGithub DataSource
```terraform
data "airbyte_source_github" "my_source_github" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,75 +25,12 @@ data "airbyte_source_github" "my_source_github" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `branch` (String) Space-delimited list of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
-- `credentials` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials))
-- `repository` (String) Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
-- `requests_per_hour` (Number) The GitHub API allows for a maximum of 5000 requests per hour (15000 for Github Enterprise). You can specify a lower value to limit your use of the API quota.
-- `source_type` (String) must be one of ["github"]
-- `start_date` (String) The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_github_authentication_o_auth` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--source_github_authentication_o_auth))
-- `source_github_authentication_personal_access_token` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--source_github_authentication_personal_access_token))
-- `source_github_update_authentication_o_auth` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--source_github_update_authentication_o_auth))
-- `source_github_update_authentication_personal_access_token` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--source_github_update_authentication_personal_access_token))
-
-
-### Nested Schema for `configuration.credentials.source_github_authentication_o_auth`
-
-Read-Only:
-
-- `access_token` (String) OAuth access token
-- `client_id` (String) OAuth Client Id
-- `client_secret` (String) OAuth Client secret
-- `option_title` (String) must be one of ["OAuth Credentials"]
-
-
-
-### Nested Schema for `configuration.credentials.source_github_authentication_personal_access_token`
-
-Read-Only:
-
-- `option_title` (String) must be one of ["PAT Credentials"]
-- `personal_access_token` (String) Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","
-
-
-
-### Nested Schema for `configuration.credentials.source_github_update_authentication_o_auth`
-
-Read-Only:
-
-- `access_token` (String) OAuth access token
-- `client_id` (String) OAuth Client Id
-- `client_secret` (String) OAuth Client secret
-- `option_title` (String) must be one of ["OAuth Credentials"]
-
-
-
-### Nested Schema for `configuration.credentials.source_github_update_authentication_personal_access_token`
-
-Read-Only:
-
-- `option_title` (String) must be one of ["PAT Credentials"]
-- `personal_access_token` (String) Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","
-
diff --git a/docs/data-sources/source_gitlab.md b/docs/data-sources/source_gitlab.md
index 74e3c6d91..d813cdf4d 100644
--- a/docs/data-sources/source_gitlab.md
+++ b/docs/data-sources/source_gitlab.md
@@ -14,7 +14,6 @@ SourceGitlab DataSource
```terraform
data "airbyte_source_gitlab" "my_source_gitlab" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,79 +25,12 @@ data "airbyte_source_gitlab" "my_source_gitlab" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_url` (String) Please enter your basic URL from GitLab instance.
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `groups` (String) Space-delimited list of groups. e.g. airbyte.io.
-- `projects` (String) Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.
-- `source_type` (String) must be one of ["gitlab"]
-- `start_date` (String) The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_gitlab_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_gitlab_authorization_method_o_auth2_0))
-- `source_gitlab_authorization_method_private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_gitlab_authorization_method_private_token))
-- `source_gitlab_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_gitlab_update_authorization_method_o_auth2_0))
-- `source_gitlab_update_authorization_method_private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_gitlab_update_authorization_method_private_token))
-
-
-### Nested Schema for `configuration.credentials.source_gitlab_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The API ID of the Gitlab developer application.
-- `client_secret` (String) The API Secret the Gitlab developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_gitlab_authorization_method_private_token`
-
-Read-Only:
-
-- `access_token` (String) Log into your Gitlab account and then generate a personal Access Token.
-- `auth_type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_gitlab_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The API ID of the Gitlab developer application.
-- `client_secret` (String) The API Secret the Gitlab developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_gitlab_update_authorization_method_private_token`
-
-Read-Only:
-
-- `access_token` (String) Log into your Gitlab account and then generate a personal Access Token.
-- `auth_type` (String) must be one of ["access_token"]
-
diff --git a/docs/data-sources/source_glassfrog.md b/docs/data-sources/source_glassfrog.md
index 392ef9675..afee93207 100644
--- a/docs/data-sources/source_glassfrog.md
+++ b/docs/data-sources/source_glassfrog.md
@@ -14,7 +14,6 @@ SourceGlassfrog DataSource
```terraform
data "airbyte_source_glassfrog" "my_source_glassfrog" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_glassfrog" "my_source_glassfrog" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API key provided by Glassfrog
-- `source_type` (String) must be one of ["glassfrog"]
-
diff --git a/docs/data-sources/source_gnews.md b/docs/data-sources/source_gnews.md
index 301d66d2b..a81d0e57d 100644
--- a/docs/data-sources/source_gnews.md
+++ b/docs/data-sources/source_gnews.md
@@ -14,7 +14,6 @@ SourceGnews DataSource
```terraform
data "airbyte_source_gnews" "my_source_gnews" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,63 +25,12 @@ data "airbyte_source_gnews" "my_source_gnews" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key
-- `country` (String) must be one of ["au", "br", "ca", "cn", "eg", "fr", "de", "gr", "hk", "in", "ie", "il", "it", "jp", "nl", "no", "pk", "pe", "ph", "pt", "ro", "ru", "sg", "es", "se", "ch", "tw", "ua", "gb", "us"]
-This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter.
-- `end_date` (String) This parameter allows you to filter the articles that have a publication date smaller than or equal to the specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)
-- `in` (List of String) This parameter allows you to choose in which attributes the keywords are searched. The attributes that can be set are title, description and content. It is possible to combine several attributes.
-- `language` (String) must be one of ["ar", "zh", "nl", "en", "fr", "de", "el", "he", "hi", "it", "ja", "ml", "mr", "no", "pt", "ro", "ru", "es", "sv", "ta", "te", "uk"]
-- `nullable` (List of String) This parameter allows you to specify the attributes that you allow to return null values. The attributes that can be set are title, description and content. It is possible to combine several attributes
-- `query` (String) This parameter allows you to specify your search keywords to find the news articles you are looking for. The keywords will be used to return the most relevant articles. It is possible to use logical operators with keywords. - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by
- quotation marks are used to search for articles with the exact same keyword sequence.
- For example the query: "Apple iPhone" will return articles matching at least once this sequence of keywords.
-- Logical AND Operator: This operator allows you to make sure that several keywords are all used in the article
- search. By default the space character acts as an AND operator, it is possible to replace the space character
- by AND to obtain the same result. For example the query: Apple Microsoft is equivalent to Apple AND Microsoft
-- Logical OR Operator: This operator allows you to retrieve articles matching the keyword a or the keyword b.
- It is important to note that this operator has a higher precedence than the AND operator. For example the
- query: Apple OR Microsoft will return all articles matching the keyword Apple as well as all articles matching
- the keyword Microsoft
-- Logical NOT Operator: This operator allows you to remove from the results the articles corresponding to the
- specified keywords. To use it, you need to add NOT in front of each word or phrase surrounded by quotes.
- For example the query: Apple NOT iPhone will return all articles matching the keyword Apple but not the keyword
- iPhone
-- `sortby` (String) must be one of ["publishedAt", "relevance"]
-This parameter allows you to choose with which type of sorting the articles should be returned. Two values are possible:
- - publishedAt = sort by publication date, the articles with the most recent publication date are returned first
- - relevance = sort by best match to keywords, the articles with the best match are returned first
-- `source_type` (String) must be one of ["gnews"]
-- `start_date` (String) This parameter allows you to filter the articles that have a publication date greater than or equal to the specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)
-- `top_headlines_query` (String) This parameter allows you to specify your search keywords to find the news articles you are looking for. The keywords will be used to return the most relevant articles. It is possible to use logical operators with keywords. - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by
- quotation marks are used to search for articles with the exact same keyword sequence.
- For example the query: "Apple iPhone" will return articles matching at least once this sequence of keywords.
-- Logical AND Operator: This operator allows you to make sure that several keywords are all used in the article
- search. By default the space character acts as an AND operator, it is possible to replace the space character
- by AND to obtain the same result. For example the query: Apple Microsoft is equivalent to Apple AND Microsoft
-- Logical OR Operator: This operator allows you to retrieve articles matching the keyword a or the keyword b.
- It is important to note that this operator has a higher precedence than the AND operator. For example the
- query: Apple OR Microsoft will return all articles matching the keyword Apple as well as all articles matching
- the keyword Microsoft
-- Logical NOT Operator: This operator allows you to remove from the results the articles corresponding to the
- specified keywords. To use it, you need to add NOT in front of each word or phrase surrounded by quotes.
- For example the query: Apple NOT iPhone will return all articles matching the keyword Apple but not the keyword
- iPhone
-- `top_headlines_topic` (String) must be one of ["breaking-news", "world", "nation", "business", "technology", "entertainment", "sports", "science", "health"]
-This parameter allows you to change the category for the request.
-
diff --git a/docs/data-sources/source_google_ads.md b/docs/data-sources/source_google_ads.md
index d69d22932..88919455a 100644
--- a/docs/data-sources/source_google_ads.md
+++ b/docs/data-sources/source_google_ads.md
@@ -14,7 +14,6 @@ SourceGoogleAds DataSource
```terraform
data "airbyte_source_google_ads" "my_source_googleads" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,48 +25,12 @@ data "airbyte_source_google_ads" "my_source_googleads" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `conversion_window_days` (Number) A conversion window is the number of days after an ad interaction (such as an ad click or video view) during which a conversion, such as a purchase, is recorded in Google Ads. For more information, see Google's documentation.
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `custom_queries` (Attributes List) (see [below for nested schema](#nestedatt--configuration--custom_queries))
-- `customer_id` (String) Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation.
-- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set)
-- `login_customer_id` (String) If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation.
-- `source_type` (String) must be one of ["google-ads"]
-- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set)
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `access_token` (String) The Access Token for making authenticated requests. For detailed instructions on finding this value, refer to our documentation.
-- `client_id` (String) The Client ID of your Google Ads developer application. For detailed instructions on finding this value, refer to our documentation.
-- `client_secret` (String) The Client Secret of your Google Ads developer application. For detailed instructions on finding this value, refer to our documentation.
-- `developer_token` (String) The Developer Token granted by Google to use their APIs. For detailed instructions on finding this value, refer to our documentation.
-- `refresh_token` (String) The token used to obtain a new Access Token. For detailed instructions on finding this value, refer to our documentation.
-
-
-
-### Nested Schema for `configuration.custom_queries`
-
-Read-Only:
-
-- `query` (String) A custom defined GAQL query for building the report. Avoid including the segments.date field; wherever possible, Airbyte will automatically include it for incremental syncs. For more information, refer to Google's documentation.
-- `table_name` (String) The table name in your destination database for the chosen query.
-
diff --git a/docs/data-sources/source_google_analytics_data_api.md b/docs/data-sources/source_google_analytics_data_api.md
index c29393f3e..424e81348 100644
--- a/docs/data-sources/source_google_analytics_data_api.md
+++ b/docs/data-sources/source_google_analytics_data_api.md
@@ -14,7 +14,6 @@ SourceGoogleAnalyticsDataAPI DataSource
```terraform
data "airbyte_source_google_analytics_data_api" "my_source_googleanalyticsdataapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,77 +25,12 @@ data "airbyte_source_google_analytics_data_api" "my_source_googleanalyticsdataap
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials))
-- `custom_reports` (String) A JSON array describing the custom reports you want to sync from Google Analytics. See the documentation for more information about the exact format you can use to fill out this field.
-- `date_ranges_start_date` (String) The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.
-- `property_id` (String) The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.
-- `source_type` (String) must be one of ["google-analytics-data-api"]
-- `window_in_days` (Number) The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_google_analytics_data_api_credentials_authenticate_via_google_oauth` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_data_api_credentials_authenticate_via_google_oauth))
-- `source_google_analytics_data_api_credentials_service_account_key_authentication` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_data_api_credentials_service_account_key_authentication))
-- `source_google_analytics_data_api_update_credentials_authenticate_via_google_oauth` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_data_api_update_credentials_authenticate_via_google_oauth))
-- `source_google_analytics_data_api_update_credentials_service_account_key_authentication` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_data_api_update_credentials_service_account_key_authentication))
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_data_api_credentials_authenticate_via_google_oauth`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Google Analytics developer application.
-- `client_secret` (String) The Client Secret of your Google Analytics developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_data_api_credentials_service_account_key_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Service"]
-- `credentials_json` (String) The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_data_api_update_credentials_authenticate_via_google_oauth`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Google Analytics developer application.
-- `client_secret` (String) The Client Secret of your Google Analytics developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_data_api_update_credentials_service_account_key_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Service"]
-- `credentials_json` (String) The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.
-
diff --git a/docs/data-sources/source_google_analytics_v4.md b/docs/data-sources/source_google_analytics_v4.md
deleted file mode 100644
index b17e2c30e..000000000
--- a/docs/data-sources/source_google_analytics_v4.md
+++ /dev/null
@@ -1,102 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_google_analytics_v4 Data Source - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceGoogleAnalyticsV4 DataSource
----
-
-# airbyte_source_google_analytics_v4 (Data Source)
-
-SourceGoogleAnalyticsV4 DataSource
-
-## Example Usage
-
-```terraform
-data "airbyte_source_google_analytics_v4" "my_source_googleanalyticsv4" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
-```
-
-
-## Schema
-
-### Required
-
-- `source_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials))
-- `custom_reports` (String) A JSON array describing the custom reports you want to sync from Google Analytics. See the docs for more information about the exact format you can use to fill out this field.
-- `source_type` (String) must be one of ["google-analytics-v4"]
-- `start_date` (String) The date in the format YYYY-MM-DD. Any data before this date will not be replicated.
-- `view_id` (String) The ID for the Google Analytics View you want to fetch data from. This can be found from the Google Analytics Account Explorer.
-- `window_in_days` (Number) The time increment used by the connector when requesting data from the Google Analytics API. More information is available in the the docs. The bigger this value is, the faster the sync will be, but the more likely that sampling will be applied to your data, potentially causing inaccuracies in the returned results. We recommend setting this to 1 unless you have a hard requirement to make the sync faster at the expense of accuracy. The minimum allowed value for this field is 1, and the maximum is 364.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_google_analytics_v4_credentials_authenticate_via_google_oauth` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_v4_credentials_authenticate_via_google_oauth))
-- `source_google_analytics_v4_credentials_service_account_key_authentication` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_v4_credentials_service_account_key_authentication))
-- `source_google_analytics_v4_update_credentials_authenticate_via_google_oauth` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_v4_update_credentials_authenticate_via_google_oauth))
-- `source_google_analytics_v4_update_credentials_service_account_key_authentication` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_v4_update_credentials_service_account_key_authentication))
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_v4_credentials_authenticate_via_google_oauth`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Google Analytics developer application.
-- `client_secret` (String) The Client Secret of your Google Analytics developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_v4_credentials_service_account_key_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Service"]
-- `credentials_json` (String) The JSON key of the service account to use for authorization
-
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_v4_update_credentials_authenticate_via_google_oauth`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Google Analytics developer application.
-- `client_secret` (String) The Client Secret of your Google Analytics developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_v4_update_credentials_service_account_key_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Service"]
-- `credentials_json` (String) The JSON key of the service account to use for authorization
-
-
diff --git a/docs/data-sources/source_google_directory.md b/docs/data-sources/source_google_directory.md
index 6119cc061..1c0cad5b4 100644
--- a/docs/data-sources/source_google_directory.md
+++ b/docs/data-sources/source_google_directory.md
@@ -14,7 +14,6 @@ SourceGoogleDirectory DataSource
```terraform
data "airbyte_source_google_directory" "my_source_googledirectory" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,77 +25,12 @@ data "airbyte_source_google_directory" "my_source_googledirectory" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Google APIs use the OAuth 2.0 protocol for authentication and authorization. The Source supports Web server application and Service accounts scenarios. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["google-directory"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_google_directory_google_credentials_service_account_key` (Attributes) For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email. (see [below for nested schema](#nestedatt--configuration--credentials--source_google_directory_google_credentials_service_account_key))
-- `source_google_directory_google_credentials_sign_in_via_google_o_auth` (Attributes) For these scenario user only needs to give permission to read Google Directory data. (see [below for nested schema](#nestedatt--configuration--credentials--source_google_directory_google_credentials_sign_in_via_google_o_auth))
-- `source_google_directory_update_google_credentials_service_account_key` (Attributes) For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email. (see [below for nested schema](#nestedatt--configuration--credentials--source_google_directory_update_google_credentials_service_account_key))
-- `source_google_directory_update_google_credentials_sign_in_via_google_o_auth` (Attributes) For these scenario user only needs to give permission to read Google Directory data. (see [below for nested schema](#nestedatt--configuration--credentials--source_google_directory_update_google_credentials_sign_in_via_google_o_auth))
-
-
-### Nested Schema for `configuration.credentials.source_google_directory_google_credentials_service_account_key`
-
-Read-Only:
-
-- `credentials_json` (String) The contents of the JSON service account key. See the docs for more information on how to generate this key.
-- `credentials_title` (String) must be one of ["Service accounts"]
-Authentication Scenario
-- `email` (String) The email of the user, which has permissions to access the Google Workspace Admin APIs.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_directory_google_credentials_sign_in_via_google_o_auth`
-
-Read-Only:
-
-- `client_id` (String) The Client ID of the developer application.
-- `client_secret` (String) The Client Secret of the developer application.
-- `credentials_title` (String) must be one of ["Web server app"]
-Authentication Scenario
-- `refresh_token` (String) The Token for obtaining a new access token.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_directory_update_google_credentials_service_account_key`
-
-Read-Only:
-
-- `credentials_json` (String) The contents of the JSON service account key. See the docs for more information on how to generate this key.
-- `credentials_title` (String) must be one of ["Service accounts"]
-Authentication Scenario
-- `email` (String) The email of the user, which has permissions to access the Google Workspace Admin APIs.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_directory_update_google_credentials_sign_in_via_google_o_auth`
-
-Read-Only:
-
-- `client_id` (String) The Client ID of the developer application.
-- `client_secret` (String) The Client Secret of the developer application.
-- `credentials_title` (String) must be one of ["Web server app"]
-Authentication Scenario
-- `refresh_token` (String) The Token for obtaining a new access token.
-
diff --git a/docs/data-sources/source_google_drive.md b/docs/data-sources/source_google_drive.md
new file mode 100644
index 000000000..b748389a3
--- /dev/null
+++ b/docs/data-sources/source_google_drive.md
@@ -0,0 +1,36 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_google_drive Data Source - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceGoogleDrive DataSource
+---
+
+# airbyte_source_google_drive (Data Source)
+
+SourceGoogleDrive DataSource
+
+## Example Usage
+
+```terraform
+data "airbyte_source_google_drive" "my_source_googledrive" {
+ source_id = "...my_source_id..."
+}
+```
+
+
+## Schema
+
+### Required
+
+- `source_id` (String)
+
+### Read-Only
+
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
+- `name` (String)
+- `source_type` (String)
+- `workspace_id` (String)
+
+
diff --git a/docs/data-sources/source_google_pagespeed_insights.md b/docs/data-sources/source_google_pagespeed_insights.md
index 722c1e312..f6b040f3a 100644
--- a/docs/data-sources/source_google_pagespeed_insights.md
+++ b/docs/data-sources/source_google_pagespeed_insights.md
@@ -14,7 +14,6 @@ SourceGooglePagespeedInsights DataSource
```terraform
data "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedinsights" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedinsigh
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited when using without API Key. Creating and using the API key therefore is recommended. The key is case sensitive.
-- `categories` (List of String) Defines which Lighthouse category to run. One or many of: "accessibility", "best-practices", "performance", "pwa", "seo".
-- `source_type` (String) must be one of ["google-pagespeed-insights"]
-- `strategies` (List of String) The analyses strategy to use. Either "desktop" or "mobile".
-- `urls` (List of String) The URLs to retrieve pagespeed information from. The connector will attempt to sync PageSpeed reports for all the defined URLs. Format: https://(www.)url.domain
-
diff --git a/docs/data-sources/source_google_search_console.md b/docs/data-sources/source_google_search_console.md
index c13b41baa..3570fda5b 100644
--- a/docs/data-sources/source_google_search_console.md
+++ b/docs/data-sources/source_google_search_console.md
@@ -14,7 +14,6 @@ SourceGoogleSearchConsole DataSource
```terraform
data "airbyte_source_google_search_console" "my_source_googlesearchconsole" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,92 +25,12 @@ data "airbyte_source_google_search_console" "my_source_googlesearchconsole" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `authorization` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization))
-- `custom_reports` (String) (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports.
-- `custom_reports_array` (Attributes List) You can add your Custom Analytics report by creating one. (see [below for nested schema](#nestedatt--configuration--custom_reports_array))
-- `data_state` (String) must be one of ["final", "all"]
-If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.
-- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.
-- `site_urls` (List of String) The URLs of the website property attached to your GSC account. Learn more about properties here.
-- `source_type` (String) must be one of ["google-search-console"]
-- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.authorization`
-
-Read-Only:
-
-- `source_google_search_console_authentication_type_o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--source_google_search_console_authentication_type_o_auth))
-- `source_google_search_console_authentication_type_service_account_key_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--source_google_search_console_authentication_type_service_account_key_authentication))
-- `source_google_search_console_update_authentication_type_o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--source_google_search_console_update_authentication_type_o_auth))
-- `source_google_search_console_update_authentication_type_service_account_key_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--source_google_search_console_update_authentication_type_service_account_key_authentication))
-
-
-### Nested Schema for `configuration.authorization.source_google_search_console_authentication_type_o_auth`
-
-Read-Only:
-
-- `access_token` (String) Access token for making authenticated requests. Read more here.
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The client ID of your Google Search Console developer application. Read more here.
-- `client_secret` (String) The client secret of your Google Search Console developer application. Read more here.
-- `refresh_token` (String) The token for obtaining a new access token. Read more here.
-
-
-
-### Nested Schema for `configuration.authorization.source_google_search_console_authentication_type_service_account_key_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Service"]
-- `email` (String) The email of the user which has permissions to access the Google Workspace Admin APIs.
-- `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here.
-
-
-
-### Nested Schema for `configuration.authorization.source_google_search_console_update_authentication_type_o_auth`
-
-Read-Only:
-
-- `access_token` (String) Access token for making authenticated requests. Read more here.
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The client ID of your Google Search Console developer application. Read more here.
-- `client_secret` (String) The client secret of your Google Search Console developer application. Read more here.
-- `refresh_token` (String) The token for obtaining a new access token. Read more here.
-
-
-
-### Nested Schema for `configuration.authorization.source_google_search_console_update_authentication_type_service_account_key_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Service"]
-- `email` (String) The email of the user which has permissions to access the Google Workspace Admin APIs.
-- `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here.
-
-
-
-
-### Nested Schema for `configuration.custom_reports_array`
-
-Read-Only:
-
-- `dimensions` (List of String) A list of dimensions (country, date, device, page, query)
-- `name` (String) The name of the custom report, this name would be used as stream name
-
diff --git a/docs/data-sources/source_google_sheets.md b/docs/data-sources/source_google_sheets.md
index 9a1e2595f..355a7059d 100644
--- a/docs/data-sources/source_google_sheets.md
+++ b/docs/data-sources/source_google_sheets.md
@@ -14,7 +14,6 @@ SourceGoogleSheets DataSource
```terraform
data "airbyte_source_google_sheets" "my_source_googlesheets" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,73 +25,12 @@ data "airbyte_source_google_sheets" "my_source_googlesheets" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials))
-- `names_conversion` (Boolean) Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.
-- `source_type` (String) must be one of ["google-sheets"]
-- `spreadsheet_id` (String) Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_google_sheets_authentication_authenticate_via_google_o_auth` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--source_google_sheets_authentication_authenticate_via_google_o_auth))
-- `source_google_sheets_authentication_service_account_key_authentication` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--source_google_sheets_authentication_service_account_key_authentication))
-- `source_google_sheets_update_authentication_authenticate_via_google_o_auth` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--source_google_sheets_update_authentication_authenticate_via_google_o_auth))
-- `source_google_sheets_update_authentication_service_account_key_authentication` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--source_google_sheets_update_authentication_service_account_key_authentication))
-
-
-### Nested Schema for `configuration.credentials.source_google_sheets_authentication_authenticate_via_google_o_auth`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) Enter your Google application's Client ID. See Google's documentation for more information.
-- `client_secret` (String) Enter your Google application's Client Secret. See Google's documentation for more information.
-- `refresh_token` (String) Enter your Google application's refresh token. See Google's documentation for more information.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_sheets_authentication_service_account_key_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Service"]
-- `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_sheets_update_authentication_authenticate_via_google_o_auth`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) Enter your Google application's Client ID. See Google's documentation for more information.
-- `client_secret` (String) Enter your Google application's Client Secret. See Google's documentation for more information.
-- `refresh_token` (String) Enter your Google application's refresh token. See Google's documentation for more information.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_sheets_update_authentication_service_account_key_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Service"]
-- `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here.
-
diff --git a/docs/data-sources/source_google_webfonts.md b/docs/data-sources/source_google_webfonts.md
index 20e758d7f..76e3af3eb 100644
--- a/docs/data-sources/source_google_webfonts.md
+++ b/docs/data-sources/source_google_webfonts.md
@@ -14,7 +14,6 @@ SourceGoogleWebfonts DataSource
```terraform
data "airbyte_source_google_webfonts" "my_source_googlewebfonts" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_google_webfonts" "my_source_googlewebfonts" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `alt` (String) Optional, Available params- json, media, proto
-- `api_key` (String) API key is required to access google apis, For getting your's goto google console and generate api key for Webfonts
-- `pretty_print` (String) Optional, boolean type
-- `sort` (String) Optional, to find how to sort
-- `source_type` (String) must be one of ["google-webfonts"]
-
diff --git a/docs/data-sources/source_google_workspace_admin_reports.md b/docs/data-sources/source_google_workspace_admin_reports.md
index 5dc2f07f8..fa4febe8d 100644
--- a/docs/data-sources/source_google_workspace_admin_reports.md
+++ b/docs/data-sources/source_google_workspace_admin_reports.md
@@ -14,7 +14,6 @@ SourceGoogleWorkspaceAdminReports DataSource
```terraform
data "airbyte_source_google_workspace_admin_reports" "my_source_googleworkspaceadminreports" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_google_workspace_admin_reports" "my_source_googleworkspacea
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials_json` (String) The contents of the JSON service account key. See the docs for more information on how to generate this key.
-- `email` (String) The email of the user, which has permissions to access the Google Workspace Admin APIs.
-- `lookback` (Number) Sets the range of time shown in the report. Reports API allows from up to 180 days ago.
-- `source_type` (String) must be one of ["google-workspace-admin-reports"]
-
diff --git a/docs/data-sources/source_greenhouse.md b/docs/data-sources/source_greenhouse.md
index 8530aaa59..710b07762 100644
--- a/docs/data-sources/source_greenhouse.md
+++ b/docs/data-sources/source_greenhouse.md
@@ -14,7 +14,6 @@ SourceGreenhouse DataSource
```terraform
data "airbyte_source_greenhouse" "my_source_greenhouse" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_greenhouse" "my_source_greenhouse" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Greenhouse API Key. See the docs for more information on how to generate this key.
-- `source_type` (String) must be one of ["greenhouse"]
-
diff --git a/docs/data-sources/source_gridly.md b/docs/data-sources/source_gridly.md
index 7f10be2e7..976eb75fe 100644
--- a/docs/data-sources/source_gridly.md
+++ b/docs/data-sources/source_gridly.md
@@ -14,7 +14,6 @@ SourceGridly DataSource
```terraform
data "airbyte_source_gridly" "my_source_gridly" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_gridly" "my_source_gridly" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String)
-- `grid_id` (String) ID of a grid, or can be ID of a branch
-- `source_type` (String) must be one of ["gridly"]
-
diff --git a/docs/data-sources/source_harvest.md b/docs/data-sources/source_harvest.md
index db58945e7..cf421ee65 100644
--- a/docs/data-sources/source_harvest.md
+++ b/docs/data-sources/source_harvest.md
@@ -14,7 +14,6 @@ SourceHarvest DataSource
```terraform
data "airbyte_source_harvest" "my_source_harvest" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,90 +25,12 @@ data "airbyte_source_harvest" "my_source_harvest" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `account_id` (String) Harvest account ID. Required for all Harvest requests in pair with Personal Access Token
-- `credentials` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `replication_end_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.
-- `replication_start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `source_type` (String) must be one of ["harvest"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth))
-- `source_harvest_authentication_mechanism_authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--source_harvest_authentication_mechanism_authenticate_with_personal_access_token))
-- `source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth))
-- `source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token))
-
-
-### Nested Schema for `configuration.credentials.source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Harvest developer application.
-- `client_secret` (String) The Client Secret of your Harvest developer application.
-- `refresh_token` (String) Refresh Token to renew the expired Access Token.
-
-
-
-### Nested Schema for `configuration.credentials.source_harvest_authentication_mechanism_authenticate_with_personal_access_token`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `api_token` (String) Log into Harvest and then create new personal access token.
-- `auth_type` (String) must be one of ["Token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Harvest developer application.
-- `client_secret` (String) The Client Secret of your Harvest developer application.
-- `refresh_token` (String) Refresh Token to renew the expired Access Token.
-
-
-
-### Nested Schema for `configuration.credentials.source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `api_token` (String) Log into Harvest and then create new personal access token.
-- `auth_type` (String) must be one of ["Token"]
-
diff --git a/docs/data-sources/source_hubplanner.md b/docs/data-sources/source_hubplanner.md
index 88ec25a98..f96ecad7a 100644
--- a/docs/data-sources/source_hubplanner.md
+++ b/docs/data-sources/source_hubplanner.md
@@ -14,7 +14,6 @@ SourceHubplanner DataSource
```terraform
data "airbyte_source_hubplanner" "my_source_hubplanner" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_hubplanner" "my_source_hubplanner" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Hubplanner API key. See https://github.com/hubplanner/API#authentication for more details.
-- `source_type` (String) must be one of ["hubplanner"]
-
diff --git a/docs/data-sources/source_hubspot.md b/docs/data-sources/source_hubspot.md
index 46f3ebcf4..20793d6fc 100644
--- a/docs/data-sources/source_hubspot.md
+++ b/docs/data-sources/source_hubspot.md
@@ -14,7 +14,6 @@ SourceHubspot DataSource
```terraform
data "airbyte_source_hubspot" "my_source_hubspot" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,76 +25,12 @@ data "airbyte_source_hubspot" "my_source_hubspot" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["hubspot"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_hubspot_authentication_o_auth` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--source_hubspot_authentication_o_auth))
-- `source_hubspot_authentication_private_app` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--source_hubspot_authentication_private_app))
-- `source_hubspot_update_authentication_o_auth` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--source_hubspot_update_authentication_o_auth))
-- `source_hubspot_update_authentication_private_app` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--source_hubspot_update_authentication_private_app))
-
-
-### Nested Schema for `configuration.credentials.source_hubspot_authentication_o_auth`
-
-Read-Only:
-
-- `client_id` (String) The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.
-- `client_secret` (String) The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.
-- `credentials_title` (String) must be one of ["OAuth Credentials"]
-Name of the credentials
-- `refresh_token` (String) Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.
-
-
-
-### Nested Schema for `configuration.credentials.source_hubspot_authentication_private_app`
-
-Read-Only:
-
-- `access_token` (String) HubSpot Access token. See the Hubspot docs if you need help finding this token.
-- `credentials_title` (String) must be one of ["Private App Credentials"]
-Name of the credentials set
-
-
-
-### Nested Schema for `configuration.credentials.source_hubspot_update_authentication_o_auth`
-
-Read-Only:
-
-- `client_id` (String) The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.
-- `client_secret` (String) The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.
-- `credentials_title` (String) must be one of ["OAuth Credentials"]
-Name of the credentials
-- `refresh_token` (String) Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.
-
-
-
-### Nested Schema for `configuration.credentials.source_hubspot_update_authentication_private_app`
-
-Read-Only:
-
-- `access_token` (String) HubSpot Access token. See the Hubspot docs if you need help finding this token.
-- `credentials_title` (String) must be one of ["Private App Credentials"]
-Name of the credentials set
-
diff --git a/docs/data-sources/source_insightly.md b/docs/data-sources/source_insightly.md
index 11276454f..df18fbd22 100644
--- a/docs/data-sources/source_insightly.md
+++ b/docs/data-sources/source_insightly.md
@@ -14,7 +14,6 @@ SourceInsightly DataSource
```terraform
data "airbyte_source_insightly" "my_source_insightly" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_insightly" "my_source_insightly" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `source_type` (String) must be one of ["insightly"]
-- `start_date` (String) The date from which you'd like to replicate data for Insightly in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. Note that it will be used only for incremental streams.
-- `token` (String) Your Insightly API token.
-
diff --git a/docs/data-sources/source_instagram.md b/docs/data-sources/source_instagram.md
index 25108283f..441051576 100644
--- a/docs/data-sources/source_instagram.md
+++ b/docs/data-sources/source_instagram.md
@@ -14,7 +14,6 @@ SourceInstagram DataSource
```terraform
data "airbyte_source_instagram" "my_source_instagram" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_instagram" "my_source_instagram" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information
-- `client_id` (String) The Client ID for your Oauth application
-- `client_secret` (String) The Client Secret for your Oauth application
-- `source_type` (String) must be one of ["instagram"]
-- `start_date` (String) The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-
diff --git a/docs/data-sources/source_instatus.md b/docs/data-sources/source_instatus.md
index 63719d228..123f360b4 100644
--- a/docs/data-sources/source_instatus.md
+++ b/docs/data-sources/source_instatus.md
@@ -14,7 +14,6 @@ SourceInstatus DataSource
```terraform
data "airbyte_source_instatus" "my_source_instatus" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_instatus" "my_source_instatus" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Instatus REST API key
-- `source_type` (String) must be one of ["instatus"]
-
diff --git a/docs/data-sources/source_intercom.md b/docs/data-sources/source_intercom.md
index 4c16c3ba4..4eae4cbe9 100644
--- a/docs/data-sources/source_intercom.md
+++ b/docs/data-sources/source_intercom.md
@@ -14,7 +14,6 @@ SourceIntercom DataSource
```terraform
data "airbyte_source_intercom" "my_source_intercom" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_intercom" "my_source_intercom" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) Access token for making authenticated requests. See the Intercom docs for more information.
-- `client_id` (String) Client Id for your Intercom application.
-- `client_secret` (String) Client Secret for your Intercom application.
-- `source_type` (String) must be one of ["intercom"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_ip2whois.md b/docs/data-sources/source_ip2whois.md
index eec3c2c07..5f9333cc0 100644
--- a/docs/data-sources/source_ip2whois.md
+++ b/docs/data-sources/source_ip2whois.md
@@ -14,7 +14,6 @@ SourceIp2whois DataSource
```terraform
data "airbyte_source_ip2whois" "my_source_ip2whois" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_ip2whois" "my_source_ip2whois" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Your API Key. See here.
-- `domain` (String) Domain name. See here.
-- `source_type` (String) must be one of ["ip2whois"]
-
diff --git a/docs/data-sources/source_iterable.md b/docs/data-sources/source_iterable.md
index 468137bf6..c8dd782ea 100644
--- a/docs/data-sources/source_iterable.md
+++ b/docs/data-sources/source_iterable.md
@@ -14,7 +14,6 @@ SourceIterable DataSource
```terraform
data "airbyte_source_iterable" "my_source_iterable" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_iterable" "my_source_iterable" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Iterable API Key. See the docs for more information on how to obtain this key.
-- `source_type` (String) must be one of ["iterable"]
-- `start_date` (String) The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-
diff --git a/docs/data-sources/source_jira.md b/docs/data-sources/source_jira.md
index a7ea1f407..741ebb013 100644
--- a/docs/data-sources/source_jira.md
+++ b/docs/data-sources/source_jira.md
@@ -14,7 +14,6 @@ SourceJira DataSource
```terraform
data "airbyte_source_jira" "my_source_jira" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,29 +25,12 @@ data "airbyte_source_jira" "my_source_jira" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_token` (String) Jira API Token. See the docs for more information on how to generate this key. API Token is used for Authorization to your account by BasicAuth.
-- `domain` (String) The Domain for your Jira account, e.g. airbyteio.atlassian.net, airbyteio.jira.com, jira.your-domain.com
-- `email` (String) The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth.
-- `enable_experimental_streams` (Boolean) Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.
-- `expand_issue_changelog` (Boolean) Expand the changelog when replicating issues.
-- `projects` (List of String) List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects.
-- `render_fields` (Boolean) Render issue fields in HTML format in addition to Jira JSON-like format.
-- `source_type` (String) must be one of ["jira"]
-- `start_date` (String) The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation.
-
diff --git a/docs/data-sources/source_k6_cloud.md b/docs/data-sources/source_k6_cloud.md
index f664b0a39..4e9987d57 100644
--- a/docs/data-sources/source_k6_cloud.md
+++ b/docs/data-sources/source_k6_cloud.md
@@ -14,7 +14,6 @@ SourceK6Cloud DataSource
```terraform
data "airbyte_source_k6_cloud" "my_source_k6cloud" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_k6_cloud" "my_source_k6cloud" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_token` (String) Your API Token. See here. The key is case sensitive.
-- `source_type` (String) must be one of ["k6-cloud"]
-
diff --git a/docs/data-sources/source_klarna.md b/docs/data-sources/source_klarna.md
index 6102e86a7..d0640b0e1 100644
--- a/docs/data-sources/source_klarna.md
+++ b/docs/data-sources/source_klarna.md
@@ -14,7 +14,6 @@ SourceKlarna DataSource
```terraform
data "airbyte_source_klarna" "my_source_klarna" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_klarna" "my_source_klarna" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `password` (String) A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)
-- `playground` (Boolean) Propertie defining if connector is used against playground or production environment
-- `region` (String) must be one of ["eu", "us", "oc"]
-Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'
-- `source_type` (String) must be one of ["klarna"]
-- `username` (String) Consists of your Merchant ID (eid) - a unique number that identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)
-
diff --git a/docs/data-sources/source_klaviyo.md b/docs/data-sources/source_klaviyo.md
index d4a7f5180..0a77a6582 100644
--- a/docs/data-sources/source_klaviyo.md
+++ b/docs/data-sources/source_klaviyo.md
@@ -14,7 +14,6 @@ SourceKlaviyo DataSource
```terraform
data "airbyte_source_klaviyo" "my_source_klaviyo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_klaviyo" "my_source_klaviyo" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Klaviyo API Key. See our docs if you need help finding this key.
-- `source_type` (String) must be one of ["klaviyo"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_kustomer_singer.md b/docs/data-sources/source_kustomer_singer.md
index 5a4537dca..8c54ff0a6 100644
--- a/docs/data-sources/source_kustomer_singer.md
+++ b/docs/data-sources/source_kustomer_singer.md
@@ -14,7 +14,6 @@ SourceKustomerSinger DataSource
```terraform
data "airbyte_source_kustomer_singer" "my_source_kustomersinger" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_kustomer_singer" "my_source_kustomersinger" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_token` (String) Kustomer API Token. See the docs on how to obtain this
-- `source_type` (String) must be one of ["kustomer-singer"]
-- `start_date` (String) The date from which you'd like to replicate the data
-
diff --git a/docs/data-sources/source_kyve.md b/docs/data-sources/source_kyve.md
index 304bbf03c..0f038d0c9 100644
--- a/docs/data-sources/source_kyve.md
+++ b/docs/data-sources/source_kyve.md
@@ -14,7 +14,6 @@ SourceKyve DataSource
```terraform
data "airbyte_source_kyve" "my_source_kyve" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_kyve" "my_source_kyve" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `max_pages` (Number) The maximum amount of pages to go trough. Set to 'null' for all pages.
-- `page_size` (Number) The pagesize for pagination, smaller numbers are used in integration tests.
-- `pool_ids` (String) The IDs of the KYVE storage pool you want to archive. (Comma separated)
-- `source_type` (String) must be one of ["kyve"]
-- `start_ids` (String) The start-id defines, from which bundle id the pipeline should start to extract the data (Comma separated)
-- `url_base` (String) URL to the KYVE Chain API.
-
diff --git a/docs/data-sources/source_launchdarkly.md b/docs/data-sources/source_launchdarkly.md
index 58e8a139a..8061b8517 100644
--- a/docs/data-sources/source_launchdarkly.md
+++ b/docs/data-sources/source_launchdarkly.md
@@ -14,7 +14,6 @@ SourceLaunchdarkly DataSource
```terraform
data "airbyte_source_launchdarkly" "my_source_launchdarkly" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_launchdarkly" "my_source_launchdarkly" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) Your Access token. See here.
-- `source_type` (String) must be one of ["launchdarkly"]
-
diff --git a/docs/data-sources/source_lemlist.md b/docs/data-sources/source_lemlist.md
index c08e60316..2816b7a34 100644
--- a/docs/data-sources/source_lemlist.md
+++ b/docs/data-sources/source_lemlist.md
@@ -14,7 +14,6 @@ SourceLemlist DataSource
```terraform
data "airbyte_source_lemlist" "my_source_lemlist" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_lemlist" "my_source_lemlist" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Lemlist API key,
-- `source_type` (String) must be one of ["lemlist"]
-
diff --git a/docs/data-sources/source_lever_hiring.md b/docs/data-sources/source_lever_hiring.md
index 1c78dc637..9b3f2b685 100644
--- a/docs/data-sources/source_lever_hiring.md
+++ b/docs/data-sources/source_lever_hiring.md
@@ -14,7 +14,6 @@ SourceLeverHiring DataSource
```terraform
data "airbyte_source_lever_hiring" "my_source_leverhiring" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,74 +25,12 @@ data "airbyte_source_lever_hiring" "my_source_leverhiring" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `environment` (String) must be one of ["Production", "Sandbox"]
-The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.
-- `source_type` (String) must be one of ["lever-hiring"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. Note that it will be used only in the following incremental streams: comments, commits, and issues.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key))
-- `source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth))
-- `source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_api_key` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_api_key))
-- `source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_o_auth` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_o_auth))
-
-
-### Nested Schema for `configuration.credentials.source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key`
-
-Read-Only:
-
-- `api_key` (String) The Api Key of your Lever Hiring account.
-- `auth_type` (String) must be one of ["Api Key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Lever Hiring developer application.
-- `client_secret` (String) The Client Secret of your Lever Hiring developer application.
-- `refresh_token` (String) The token for obtaining new access token.
-
-
-
-### Nested Schema for `configuration.credentials.source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_api_key`
-
-Read-Only:
-
-- `api_key` (String) The Api Key of your Lever Hiring account.
-- `auth_type` (String) must be one of ["Api Key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_o_auth`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Lever Hiring developer application.
-- `client_secret` (String) The Client Secret of your Lever Hiring developer application.
-- `refresh_token` (String) The token for obtaining new access token.
-
diff --git a/docs/data-sources/source_linkedin_ads.md b/docs/data-sources/source_linkedin_ads.md
index 2cb32b690..aaa556947 100644
--- a/docs/data-sources/source_linkedin_ads.md
+++ b/docs/data-sources/source_linkedin_ads.md
@@ -14,7 +14,6 @@ SourceLinkedinAds DataSource
```terraform
data "airbyte_source_linkedin_ads" "my_source_linkedinads" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,86 +25,12 @@ data "airbyte_source_linkedin_ads" "my_source_linkedinads" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `account_ids` (List of Number) Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs.
-- `ad_analytics_reports` (Attributes List) (see [below for nested schema](#nestedatt--configuration--ad_analytics_reports))
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["linkedin-ads"]
-- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.ad_analytics_reports`
-
-Read-Only:
-
-- `name` (String) The name for the custom report.
-- `pivot_by` (String) must be one of ["COMPANY", "ACCOUNT", "SHARE", "CAMPAIGN", "CREATIVE", "CAMPAIGN_GROUP", "CONVERSION", "CONVERSATION_NODE", "CONVERSATION_NODE_OPTION_INDEX", "SERVING_LOCATION", "CARD_INDEX", "MEMBER_COMPANY_SIZE", "MEMBER_INDUSTRY", "MEMBER_SENIORITY", "MEMBER_JOB_TITLE ", "MEMBER_JOB_FUNCTION ", "MEMBER_COUNTRY_V2 ", "MEMBER_REGION_V2", "MEMBER_COMPANY", "PLACEMENT_NAME", "IMPRESSION_DEVICE_TYPE"]
-Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.
-- `time_granularity` (String) must be one of ["ALL", "DAILY", "MONTHLY", "YEARLY"]
-Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.
-
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_linkedin_ads_authentication_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_ads_authentication_access_token))
-- `source_linkedin_ads_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_ads_authentication_o_auth2_0))
-- `source_linkedin_ads_update_authentication_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_ads_update_authentication_access_token))
-- `source_linkedin_ads_update_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_ads_update_authentication_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_ads_authentication_access_token`
-
-Read-Only:
-
-- `access_token` (String) The access token generated for your developer application. Refer to our documentation for more information.
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_ads_authentication_o_auth2_0`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["oAuth2.0"]
-- `client_id` (String) The client ID of your developer application. Refer to our documentation for more information.
-- `client_secret` (String) The client secret of your developer application. Refer to our documentation for more information.
-- `refresh_token` (String) The key to refresh the expired access token. Refer to our documentation for more information.
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_ads_update_authentication_access_token`
-
-Read-Only:
-
-- `access_token` (String) The access token generated for your developer application. Refer to our documentation for more information.
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_ads_update_authentication_o_auth2_0`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["oAuth2.0"]
-- `client_id` (String) The client ID of your developer application. Refer to our documentation for more information.
-- `client_secret` (String) The client secret of your developer application. Refer to our documentation for more information.
-- `refresh_token` (String) The key to refresh the expired access token. Refer to our documentation for more information.
-
diff --git a/docs/data-sources/source_linkedin_pages.md b/docs/data-sources/source_linkedin_pages.md
index 83897879a..1bbe02716 100644
--- a/docs/data-sources/source_linkedin_pages.md
+++ b/docs/data-sources/source_linkedin_pages.md
@@ -14,7 +14,6 @@ SourceLinkedinPages DataSource
```terraform
data "airbyte_source_linkedin_pages" "my_source_linkedinpages" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,72 +25,12 @@ data "airbyte_source_linkedin_pages" "my_source_linkedinpages" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `org_id` (String) Specify the Organization ID
-- `source_type` (String) must be one of ["linkedin-pages"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_linkedin_pages_authentication_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_pages_authentication_access_token))
-- `source_linkedin_pages_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_pages_authentication_o_auth2_0))
-- `source_linkedin_pages_update_authentication_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_pages_update_authentication_access_token))
-- `source_linkedin_pages_update_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_pages_update_authentication_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_pages_authentication_access_token`
-
-Read-Only:
-
-- `access_token` (String) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_pages_authentication_o_auth2_0`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["oAuth2.0"]
-- `client_id` (String) The client ID of the LinkedIn developer application.
-- `client_secret` (String) The client secret of the LinkedIn developer application.
-- `refresh_token` (String) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_pages_update_authentication_access_token`
-
-Read-Only:
-
-- `access_token` (String) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_pages_update_authentication_o_auth2_0`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["oAuth2.0"]
-- `client_id` (String) The client ID of the LinkedIn developer application.
-- `client_secret` (String) The client secret of the LinkedIn developer application.
-- `refresh_token` (String) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
-
diff --git a/docs/data-sources/source_linnworks.md b/docs/data-sources/source_linnworks.md
index 4b1de1a3f..3a2bcfdfd 100644
--- a/docs/data-sources/source_linnworks.md
+++ b/docs/data-sources/source_linnworks.md
@@ -14,7 +14,6 @@ SourceLinnworks DataSource
```terraform
data "airbyte_source_linnworks" "my_source_linnworks" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_linnworks" "my_source_linnworks" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `application_id` (String) Linnworks Application ID
-- `application_secret` (String) Linnworks Application Secret
-- `source_type` (String) must be one of ["linnworks"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `token` (String)
-
diff --git a/docs/data-sources/source_lokalise.md b/docs/data-sources/source_lokalise.md
index df63115c4..f056d4f99 100644
--- a/docs/data-sources/source_lokalise.md
+++ b/docs/data-sources/source_lokalise.md
@@ -14,7 +14,6 @@ SourceLokalise DataSource
```terraform
data "airbyte_source_lokalise" "my_source_lokalise" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_lokalise" "my_source_lokalise" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Lokalise API Key with read-access. Available at Profile settings > API tokens. See here.
-- `project_id` (String) Lokalise project ID. Available at Project Settings > General.
-- `source_type` (String) must be one of ["lokalise"]
-
diff --git a/docs/data-sources/source_mailchimp.md b/docs/data-sources/source_mailchimp.md
index 30257bf41..c23f08560 100644
--- a/docs/data-sources/source_mailchimp.md
+++ b/docs/data-sources/source_mailchimp.md
@@ -14,7 +14,6 @@ SourceMailchimp DataSource
```terraform
data "airbyte_source_mailchimp" "my_source_mailchimp" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,72 +25,12 @@ data "airbyte_source_mailchimp" "my_source_mailchimp" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `campaign_id` (String)
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["mailchimp"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_mailchimp_authentication_api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_mailchimp_authentication_api_key))
-- `source_mailchimp_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_mailchimp_authentication_o_auth2_0))
-- `source_mailchimp_update_authentication_api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_mailchimp_update_authentication_api_key))
-- `source_mailchimp_update_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_mailchimp_update_authentication_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_mailchimp_authentication_api_key`
-
-Read-Only:
-
-- `apikey` (String) Mailchimp API Key. See the docs for information on how to generate this key.
-- `auth_type` (String) must be one of ["apikey"]
-
-
-
-### Nested Schema for `configuration.credentials.source_mailchimp_authentication_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) An access token generated using the above client ID and secret.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-
-
-
-### Nested Schema for `configuration.credentials.source_mailchimp_update_authentication_api_key`
-
-Read-Only:
-
-- `apikey` (String) Mailchimp API Key. See the docs for information on how to generate this key.
-- `auth_type` (String) must be one of ["apikey"]
-
-
-
-### Nested Schema for `configuration.credentials.source_mailchimp_update_authentication_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) An access token generated using the above client ID and secret.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-
diff --git a/docs/data-sources/source_mailgun.md b/docs/data-sources/source_mailgun.md
index dfb97bde8..7a66cc04c 100644
--- a/docs/data-sources/source_mailgun.md
+++ b/docs/data-sources/source_mailgun.md
@@ -14,7 +14,6 @@ SourceMailgun DataSource
```terraform
data "airbyte_source_mailgun" "my_source_mailgun" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_mailgun" "my_source_mailgun" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `domain_region` (String) Domain region code. 'EU' or 'US' are possible values. The default is 'US'.
-- `private_key` (String) Primary account API key to access your Mailgun data.
-- `source_type` (String) must be one of ["mailgun"]
-- `start_date` (String) UTC date and time in the format 2020-10-01 00:00:00. Any data before this date will not be replicated. If omitted, defaults to 3 days ago.
-
diff --git a/docs/data-sources/source_mailjet_sms.md b/docs/data-sources/source_mailjet_sms.md
index 826541e2d..9caabb4aa 100644
--- a/docs/data-sources/source_mailjet_sms.md
+++ b/docs/data-sources/source_mailjet_sms.md
@@ -14,7 +14,6 @@ SourceMailjetSms DataSource
```terraform
data "airbyte_source_mailjet_sms" "my_source_mailjetsms" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_mailjet_sms" "my_source_mailjetsms" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `end_date` (Number) Retrieve SMS messages created before the specified timestamp. Required format - Unix timestamp.
-- `source_type` (String) must be one of ["mailjet-sms"]
-- `start_date` (Number) Retrieve SMS messages created after the specified timestamp. Required format - Unix timestamp.
-- `token` (String) Your access token. See here.
-
diff --git a/docs/data-sources/source_marketo.md b/docs/data-sources/source_marketo.md
index 89cea25cd..a71adddc4 100644
--- a/docs/data-sources/source_marketo.md
+++ b/docs/data-sources/source_marketo.md
@@ -14,7 +14,6 @@ SourceMarketo DataSource
```terraform
data "airbyte_source_marketo" "my_source_marketo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_marketo" "my_source_marketo" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `client_id` (String) The Client ID of your Marketo developer application. See the docs for info on how to obtain this.
-- `client_secret` (String) The Client Secret of your Marketo developer application. See the docs for info on how to obtain this.
-- `domain_url` (String) Your Marketo Base URL. See the docs for info on how to obtain this.
-- `source_type` (String) must be one of ["marketo"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_metabase.md b/docs/data-sources/source_metabase.md
index ef27be2ed..bc269e410 100644
--- a/docs/data-sources/source_metabase.md
+++ b/docs/data-sources/source_metabase.md
@@ -14,7 +14,6 @@ SourceMetabase DataSource
```terraform
data "airbyte_source_metabase" "my_source_metabase" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,30 +25,12 @@ data "airbyte_source_metabase" "my_source_metabase" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `instance_api_url` (String) URL to your metabase instance API
-- `password` (String)
-- `session_token` (String) To generate your session token, you need to run the following command: ``` curl -X POST \
- -H "Content-Type: application/json" \
- -d '{"username": "person@metabase.com", "password": "fakepassword"}' \
- http://localhost:3000/api/session
-``` Then copy the value of the `id` field returned by a successful call to that API.
-Note that by default, sessions are good for 14 days and needs to be regenerated.
-- `source_type` (String) must be one of ["metabase"]
-- `username` (String)
-
diff --git a/docs/data-sources/source_microsoft_teams.md b/docs/data-sources/source_microsoft_teams.md
index a1e6174a4..546c27d16 100644
--- a/docs/data-sources/source_microsoft_teams.md
+++ b/docs/data-sources/source_microsoft_teams.md
@@ -14,7 +14,6 @@ SourceMicrosoftTeams DataSource
```terraform
data "airbyte_source_microsoft_teams" "my_source_microsoftteams" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,78 +25,12 @@ data "airbyte_source_microsoft_teams" "my_source_microsoftteams" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials))
-- `period` (String) Specifies the length of time over which the Team Device Report stream is aggregated. The supported values are: D7, D30, D90, and D180.
-- `source_type` (String) must be one of ["microsoft-teams"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft))
-- `source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0))
-- `source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft))
-- `source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0))
-
-
-### Nested Schema for `configuration.credentials.source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Token"]
-- `client_id` (String) The Client ID of your Microsoft Teams developer application.
-- `client_secret` (String) The Client Secret of your Microsoft Teams developer application.
-- `tenant_id` (String) A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL
-
-
-
-### Nested Schema for `configuration.credentials.source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Microsoft Teams developer application.
-- `client_secret` (String) The Client Secret of your Microsoft Teams developer application.
-- `refresh_token` (String) A Refresh Token to renew the expired Access Token.
-- `tenant_id` (String) A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL
-
-
-
-### Nested Schema for `configuration.credentials.source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Token"]
-- `client_id` (String) The Client ID of your Microsoft Teams developer application.
-- `client_secret` (String) The Client Secret of your Microsoft Teams developer application.
-- `tenant_id` (String) A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL
-
-
-
-### Nested Schema for `configuration.credentials.source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Microsoft Teams developer application.
-- `client_secret` (String) The Client Secret of your Microsoft Teams developer application.
-- `refresh_token` (String) A Refresh Token to renew the expired Access Token.
-- `tenant_id` (String) A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL
-
diff --git a/docs/data-sources/source_mixpanel.md b/docs/data-sources/source_mixpanel.md
index a607eabc2..db2ee2f16 100644
--- a/docs/data-sources/source_mixpanel.md
+++ b/docs/data-sources/source_mixpanel.md
@@ -14,7 +14,6 @@ SourceMixpanel DataSource
```terraform
data "airbyte_source_mixpanel" "my_source_mixpanel" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,78 +25,12 @@ data "airbyte_source_mixpanel" "my_source_mixpanel" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `attribution_window` (Number) A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days.
-- `credentials` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials))
-- `date_window_size` (Number) Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment.
-- `end_date` (String) The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date
-- `project_id` (Number) Your project ID number. See the docs for more information on how to obtain this.
-- `project_timezone` (String) Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console.
-- `region` (String) must be one of ["US", "EU"]
-The region of mixpanel domain instance either US or EU.
-- `select_properties_by_default` (Boolean) Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored.
-- `source_type` (String) must be one of ["mixpanel"]
-- `start_date` (String) The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_mixpanel_authentication_wildcard_project_secret` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--source_mixpanel_authentication_wildcard_project_secret))
-- `source_mixpanel_authentication_wildcard_service_account` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--source_mixpanel_authentication_wildcard_service_account))
-- `source_mixpanel_update_authentication_wildcard_project_secret` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--source_mixpanel_update_authentication_wildcard_project_secret))
-- `source_mixpanel_update_authentication_wildcard_service_account` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--source_mixpanel_update_authentication_wildcard_service_account))
-
-
-### Nested Schema for `configuration.credentials.source_mixpanel_authentication_wildcard_project_secret`
-
-Read-Only:
-
-- `api_secret` (String) Mixpanel project secret. See the docs for more information on how to obtain this.
-- `option_title` (String) must be one of ["Project Secret"]
-
-
-
-### Nested Schema for `configuration.credentials.source_mixpanel_authentication_wildcard_service_account`
-
-Read-Only:
-
-- `option_title` (String) must be one of ["Service Account"]
-- `secret` (String) Mixpanel Service Account Secret. See the docs for more information on how to obtain this.
-- `username` (String) Mixpanel Service Account Username. See the docs for more information on how to obtain this.
-
-
-
-### Nested Schema for `configuration.credentials.source_mixpanel_update_authentication_wildcard_project_secret`
-
-Read-Only:
-
-- `api_secret` (String) Mixpanel project secret. See the docs for more information on how to obtain this.
-- `option_title` (String) must be one of ["Project Secret"]
-
-
-
-### Nested Schema for `configuration.credentials.source_mixpanel_update_authentication_wildcard_service_account`
-
-Read-Only:
-
-- `option_title` (String) must be one of ["Service Account"]
-- `secret` (String) Mixpanel Service Account Secret. See the docs for more information on how to obtain this.
-- `username` (String) Mixpanel Service Account Username. See the docs for more information on how to obtain this.
-
diff --git a/docs/data-sources/source_monday.md b/docs/data-sources/source_monday.md
index bbc505251..b170ecd6b 100644
--- a/docs/data-sources/source_monday.md
+++ b/docs/data-sources/source_monday.md
@@ -14,7 +14,6 @@ SourceMonday DataSource
```terraform
data "airbyte_source_monday" "my_source_monday" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,73 +25,12 @@ data "airbyte_source_monday" "my_source_monday" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["monday"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_monday_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_monday_authorization_method_api_token))
-- `source_monday_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_monday_authorization_method_o_auth2_0))
-- `source_monday_update_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_monday_update_authorization_method_api_token))
-- `source_monday_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_monday_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_monday_authorization_method_api_token`
-
-Read-Only:
-
-- `api_token` (String) API Token for making authenticated requests.
-- `auth_type` (String) must be one of ["api_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_monday_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `subdomain` (String) Slug/subdomain of the account, or the first part of the URL that comes before .monday.com
-
-
-
-### Nested Schema for `configuration.credentials.source_monday_update_authorization_method_api_token`
-
-Read-Only:
-
-- `api_token` (String) API Token for making authenticated requests.
-- `auth_type` (String) must be one of ["api_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_monday_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `subdomain` (String) Slug/subdomain of the account, or the first part of the URL that comes before .monday.com
-
diff --git a/docs/data-sources/source_mongodb.md b/docs/data-sources/source_mongodb.md
deleted file mode 100644
index 61daff387..000000000
--- a/docs/data-sources/source_mongodb.md
+++ /dev/null
@@ -1,128 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_mongodb Data Source - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceMongodb DataSource
----
-
-# airbyte_source_mongodb (Data Source)
-
-SourceMongodb DataSource
-
-## Example Usage
-
-```terraform
-data "airbyte_source_mongodb" "my_source_mongodb" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
-```
-
-
-## Schema
-
-### Required
-
-- `source_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `auth_source` (String) The authentication source where the user information is stored.
-- `database` (String) The database you want to replicate.
-- `instance_type` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type))
-- `password` (String) The password associated with this username.
-- `source_type` (String) must be one of ["mongodb"]
-- `user` (String) The username which is used to access the database.
-
-
-### Nested Schema for `configuration.instance_type`
-
-Read-Only:
-
-- `source_mongodb_mongo_db_instance_type_mongo_db_atlas` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_mongo_db_instance_type_mongo_db_atlas))
-- `source_mongodb_mongo_db_instance_type_replica_set` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_mongo_db_instance_type_replica_set))
-- `source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance))
-- `source_mongodb_update_mongo_db_instance_type_mongo_db_atlas` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_update_mongo_db_instance_type_mongo_db_atlas))
-- `source_mongodb_update_mongo_db_instance_type_replica_set` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_update_mongo_db_instance_type_replica_set))
-- `source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance))
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_mongo_db_instance_type_mongo_db_atlas`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `cluster_url` (String) The URL of a cluster to connect to.
-- `instance` (String) must be one of ["atlas"]
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_mongo_db_instance_type_replica_set`
-
-Read-Only:
-
-- `instance` (String) must be one of ["replica"]
-- `replica_set` (String) A replica set in MongoDB is a group of mongod processes that maintain the same data set.
-- `server_addresses` (String) The members of a replica set. Please specify `host`:`port` of each member separated by comma.
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance`
-
-Read-Only:
-
-- `host` (String) The host name of the Mongo database.
-- `instance` (String) must be one of ["standalone"]
-- `port` (Number) The port of the Mongo database.
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_update_mongo_db_instance_type_mongo_db_atlas`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `cluster_url` (String) The URL of a cluster to connect to.
-- `instance` (String) must be one of ["atlas"]
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_update_mongo_db_instance_type_replica_set`
-
-Read-Only:
-
-- `instance` (String) must be one of ["replica"]
-- `replica_set` (String) A replica set in MongoDB is a group of mongod processes that maintain the same data set.
-- `server_addresses` (String) The members of a replica set. Please specify `host`:`port` of each member separated by comma.
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance`
-
-Read-Only:
-
-- `host` (String) The host name of the Mongo database.
-- `instance` (String) must be one of ["standalone"]
-- `port` (Number) The port of the Mongo database.
-
-
diff --git a/docs/data-sources/source_mongodb_internal_poc.md b/docs/data-sources/source_mongodb_internal_poc.md
index c05ab8a42..11bf95c9e 100644
--- a/docs/data-sources/source_mongodb_internal_poc.md
+++ b/docs/data-sources/source_mongodb_internal_poc.md
@@ -14,7 +14,6 @@ SourceMongodbInternalPoc DataSource
```terraform
data "airbyte_source_mongodb_internal_poc" "my_source_mongodbinternalpoc" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_mongodb_internal_poc" "my_source_mongodbinternalpoc" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `auth_source` (String) The authentication source where the user information is stored.
-- `connection_string` (String) The connection string of the database that you want to replicate..
-- `password` (String) The password associated with this username.
-- `replica_set` (String) The name of the replica set to be replicated.
-- `source_type` (String) must be one of ["mongodb-internal-poc"]
-- `user` (String) The username which is used to access the database.
-
diff --git a/docs/data-sources/source_mongodb_v2.md b/docs/data-sources/source_mongodb_v2.md
new file mode 100644
index 000000000..453a12d49
--- /dev/null
+++ b/docs/data-sources/source_mongodb_v2.md
@@ -0,0 +1,36 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_mongodb_v2 Data Source - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceMongodbV2 DataSource
+---
+
+# airbyte_source_mongodb_v2 (Data Source)
+
+SourceMongodbV2 DataSource
+
+## Example Usage
+
+```terraform
+data "airbyte_source_mongodb_v2" "my_source_mongodbv2" {
+ source_id = "...my_source_id..."
+}
+```
+
+
+## Schema
+
+### Required
+
+- `source_id` (String)
+
+### Read-Only
+
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
+- `name` (String)
+- `source_type` (String)
+- `workspace_id` (String)
+
+
diff --git a/docs/data-sources/source_mssql.md b/docs/data-sources/source_mssql.md
index ded4cb2bc..13a783502 100644
--- a/docs/data-sources/source_mssql.md
+++ b/docs/data-sources/source_mssql.md
@@ -14,7 +14,6 @@ SourceMssql DataSource
```terraform
data "airbyte_source_mssql" "my_source_mssql" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,210 +25,12 @@ data "airbyte_source_mssql" "my_source_mssql" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) The name of the database.
-- `host` (String) The hostname of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) The password associated with the username.
-- `port` (Number) The port of the database.
-- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method))
-- `schemas` (List of String) The list of schemas to sync from. Defaults to user. Case sensitive.
-- `source_type` (String) must be one of ["mssql"]
-- `ssl_method` (Attributes) The encryption method which is used when communicating with the database. (see [below for nested schema](#nestedatt--configuration--ssl_method))
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) The username which is used to access the database.
-
-
-### Nested Schema for `configuration.replication_method`
-
-Read-Only:
-
-- `source_mssql_update_method_read_changes_using_change_data_capture_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_method_read_changes_using_change_data_capture_cdc))
-- `source_mssql_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_method_scan_changes_with_user_defined_cursor))
-- `source_mssql_update_update_method_read_changes_using_change_data_capture_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_update_method_read_changes_using_change_data_capture_cdc))
-- `source_mssql_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_update_method_scan_changes_with_user_defined_cursor))
-
-
-### Nested Schema for `configuration.replication_method.source_mssql_update_method_read_changes_using_change_data_capture_cdc`
-
-Read-Only:
-
-- `data_to_sync` (String) must be one of ["Existing and New", "New Changes Only"]
-What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `method` (String) must be one of ["CDC"]
-- `snapshot_isolation` (String) must be one of ["Snapshot", "Read Committed"]
-Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
-
-
-
-### Nested Schema for `configuration.replication_method.source_mssql_update_method_scan_changes_with_user_defined_cursor`
-
-Read-Only:
-
-- `method` (String) must be one of ["STANDARD"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_mssql_update_update_method_read_changes_using_change_data_capture_cdc`
-
-Read-Only:
-
-- `data_to_sync` (String) must be one of ["Existing and New", "New Changes Only"]
-What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `method` (String) must be one of ["CDC"]
-- `snapshot_isolation` (String) must be one of ["Snapshot", "Read Committed"]
-Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
-
-
-
-### Nested Schema for `configuration.replication_method.source_mssql_update_update_method_scan_changes_with_user_defined_cursor`
-
-Read-Only:
-
-- `method` (String) must be one of ["STANDARD"]
-
-
-
-
-### Nested Schema for `configuration.ssl_method`
-
-Read-Only:
-
-- `source_mssql_ssl_method_encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--source_mssql_ssl_method_encrypted_trust_server_certificate))
-- `source_mssql_ssl_method_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--source_mssql_ssl_method_encrypted_verify_certificate))
-- `source_mssql_update_ssl_method_encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--source_mssql_update_ssl_method_encrypted_trust_server_certificate))
-- `source_mssql_update_ssl_method_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--source_mssql_update_ssl_method_encrypted_verify_certificate))
-
-
-### Nested Schema for `configuration.ssl_method.source_mssql_ssl_method_encrypted_trust_server_certificate`
-
-Read-Only:
-
-- `ssl_method` (String) must be one of ["encrypted_trust_server_certificate"]
-
-
-
-### Nested Schema for `configuration.ssl_method.source_mssql_ssl_method_encrypted_verify_certificate`
-
-Read-Only:
-
-- `host_name_in_certificate` (String) Specifies the host name of the server. The value of this property must match the subject property of the certificate.
-- `ssl_method` (String) must be one of ["encrypted_verify_certificate"]
-
-
-
-### Nested Schema for `configuration.ssl_method.source_mssql_update_ssl_method_encrypted_trust_server_certificate`
-
-Read-Only:
-
-- `ssl_method` (String) must be one of ["encrypted_trust_server_certificate"]
-
-
-
-### Nested Schema for `configuration.ssl_method.source_mssql_update_ssl_method_encrypted_verify_certificate`
-
-Read-Only:
-
-- `host_name_in_certificate` (String) Specifies the host name of the server. The value of this property must match the subject property of the certificate.
-- `ssl_method` (String) must be one of ["encrypted_verify_certificate"]
-
-
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `source_mssql_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_ssh_tunnel_method_no_tunnel))
-- `source_mssql_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_ssh_tunnel_method_password_authentication))
-- `source_mssql_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_ssh_tunnel_method_ssh_key_authentication))
-- `source_mssql_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_update_ssh_tunnel_method_no_tunnel))
-- `source_mssql_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_update_ssh_tunnel_method_password_authentication))
-- `source_mssql_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/source_my_hours.md b/docs/data-sources/source_my_hours.md
index 6ea4adaff..2a13e704d 100644
--- a/docs/data-sources/source_my_hours.md
+++ b/docs/data-sources/source_my_hours.md
@@ -14,7 +14,6 @@ SourceMyHours DataSource
```terraform
data "airbyte_source_my_hours" "my_source_myhours" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_my_hours" "my_source_myhours" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `email` (String) Your My Hours username
-- `logs_batch_size` (Number) Pagination size used for retrieving logs in days
-- `password` (String) The password associated to the username
-- `source_type` (String) must be one of ["my-hours"]
-- `start_date` (String) Start date for collecting time logs
-
diff --git a/docs/data-sources/source_mysql.md b/docs/data-sources/source_mysql.md
index 3f367ecab..cf0600aa0 100644
--- a/docs/data-sources/source_mysql.md
+++ b/docs/data-sources/source_mysql.md
@@ -14,7 +14,6 @@ SourceMysql DataSource
```terraform
data "airbyte_source_mysql" "my_source_mysql" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,253 +25,12 @@ data "airbyte_source_mysql" "my_source_mysql" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) The database name.
-- `host` (String) The host name of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.
-- `password` (String) The password associated with the username.
-- `port` (Number) The port to connect to.
-- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method))
-- `source_type` (String) must be one of ["mysql"]
-- `ssl_mode` (Attributes) SSL connection modes. Read more in the docs. (see [below for nested schema](#nestedatt--configuration--ssl_mode))
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) The username which is used to access the database.
-
-
-### Nested Schema for `configuration.replication_method`
-
-Read-Only:
-
-- `source_mysql_update_method_read_changes_using_binary_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mysql_update_method_read_changes_using_binary_log_cdc))
-- `source_mysql_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mysql_update_method_scan_changes_with_user_defined_cursor))
-- `source_mysql_update_update_method_read_changes_using_binary_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mysql_update_update_method_read_changes_using_binary_log_cdc))
-- `source_mysql_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mysql_update_update_method_scan_changes_with_user_defined_cursor))
-
-
-### Nested Schema for `configuration.replication_method.source_mysql_update_method_read_changes_using_binary_log_cdc`
-
-Read-Only:
-
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `method` (String) must be one of ["CDC"]
-- `server_time_zone` (String) Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.
-
-
-
-### Nested Schema for `configuration.replication_method.source_mysql_update_method_scan_changes_with_user_defined_cursor`
-
-Read-Only:
-
-- `method` (String) must be one of ["STANDARD"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_mysql_update_update_method_read_changes_using_binary_log_cdc`
-
-Read-Only:
-
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `method` (String) must be one of ["CDC"]
-- `server_time_zone` (String) Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.
-
-
-
-### Nested Schema for `configuration.replication_method.source_mysql_update_update_method_scan_changes_with_user_defined_cursor`
-
-Read-Only:
-
-- `method` (String) must be one of ["STANDARD"]
-
-
-
-
-### Nested Schema for `configuration.ssl_mode`
-
-Read-Only:
-
-- `source_mysql_ssl_modes_preferred` (Attributes) Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_ssl_modes_preferred))
-- `source_mysql_ssl_modes_required` (Attributes) Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_ssl_modes_required))
-- `source_mysql_ssl_modes_verify_ca` (Attributes) Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_ssl_modes_verify_ca))
-- `source_mysql_ssl_modes_verify_identity` (Attributes) Always connect with SSL. Verify both CA and Hostname. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_ssl_modes_verify_identity))
-- `source_mysql_update_ssl_modes_preferred` (Attributes) Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_update_ssl_modes_preferred))
-- `source_mysql_update_ssl_modes_required` (Attributes) Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_update_ssl_modes_required))
-- `source_mysql_update_ssl_modes_verify_ca` (Attributes) Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_update_ssl_modes_verify_ca))
-- `source_mysql_update_ssl_modes_verify_identity` (Attributes) Always connect with SSL. Verify both CA and Hostname. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_update_ssl_modes_verify_identity))
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_ssl_modes_preferred`
-
-Read-Only:
-
-- `mode` (String) must be one of ["preferred"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_ssl_modes_required`
-
-Read-Only:
-
-- `mode` (String) must be one of ["required"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_ssl_modes_verify_ca`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
-- `client_key` (String) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify_ca"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_ssl_modes_verify_identity`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
-- `client_key` (String) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify_identity"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_update_ssl_modes_preferred`
-
-Read-Only:
-
-- `mode` (String) must be one of ["preferred"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_update_ssl_modes_required`
-
-Read-Only:
-
-- `mode` (String) must be one of ["required"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_update_ssl_modes_verify_ca`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
-- `client_key` (String) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify_ca"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_update_ssl_modes_verify_identity`
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
-- `client_key` (String) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify_identity"]
-
-
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `source_mysql_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_ssh_tunnel_method_no_tunnel))
-- `source_mysql_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_ssh_tunnel_method_password_authentication))
-- `source_mysql_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_ssh_tunnel_method_ssh_key_authentication))
-- `source_mysql_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_update_ssh_tunnel_method_no_tunnel))
-- `source_mysql_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_update_ssh_tunnel_method_password_authentication))
-- `source_mysql_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/source_netsuite.md b/docs/data-sources/source_netsuite.md
index a472df8de..6b735119a 100644
--- a/docs/data-sources/source_netsuite.md
+++ b/docs/data-sources/source_netsuite.md
@@ -14,7 +14,6 @@ SourceNetsuite DataSource
```terraform
data "airbyte_source_netsuite" "my_source_netsuite" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,29 +25,12 @@ data "airbyte_source_netsuite" "my_source_netsuite" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `consumer_key` (String) Consumer key associated with your integration
-- `consumer_secret` (String) Consumer secret associated with your integration
-- `object_types` (List of String) The API names of the Netsuite objects you want to sync. Setting this speeds up the connection setup process by limiting the number of schemas that need to be retrieved from Netsuite.
-- `realm` (String) Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1, as for the `sandbox`
-- `source_type` (String) must be one of ["netsuite"]
-- `start_datetime` (String) Starting point for your data replication, in format of "YYYY-MM-DDTHH:mm:ssZ"
-- `token_key` (String) Access token key
-- `token_secret` (String) Access token secret
-- `window_in_days` (Number) The amount of days used to query the data with date chunks. Set smaller value, if you have lots of data.
-
diff --git a/docs/data-sources/source_notion.md b/docs/data-sources/source_notion.md
index cc7539cd2..b07d51619 100644
--- a/docs/data-sources/source_notion.md
+++ b/docs/data-sources/source_notion.md
@@ -14,7 +14,6 @@ SourceNotion DataSource
```terraform
data "airbyte_source_notion" "my_source_notion" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,72 +25,12 @@ data "airbyte_source_notion" "my_source_notion" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["notion"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00.000Z. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_notion_authenticate_using_access_token` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials--source_notion_authenticate_using_access_token))
-- `source_notion_authenticate_using_o_auth2_0` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials--source_notion_authenticate_using_o_auth2_0))
-- `source_notion_update_authenticate_using_access_token` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials--source_notion_update_authenticate_using_access_token))
-- `source_notion_update_authenticate_using_o_auth2_0` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials--source_notion_update_authenticate_using_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_notion_authenticate_using_access_token`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["token"]
-- `token` (String) Notion API access token, see the docs for more information on how to obtain this token.
-
-
-
-### Nested Schema for `configuration.credentials.source_notion_authenticate_using_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token is a token you received by complete the OauthWebFlow of Notion.
-- `auth_type` (String) must be one of ["OAuth2.0"]
-- `client_id` (String) The ClientID of your Notion integration.
-- `client_secret` (String) The ClientSecret of your Notion integration.
-
-
-
-### Nested Schema for `configuration.credentials.source_notion_update_authenticate_using_access_token`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["token"]
-- `token` (String) Notion API access token, see the docs for more information on how to obtain this token.
-
-
-
-### Nested Schema for `configuration.credentials.source_notion_update_authenticate_using_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token is a token you received by complete the OauthWebFlow of Notion.
-- `auth_type` (String) must be one of ["OAuth2.0"]
-- `client_id` (String) The ClientID of your Notion integration.
-- `client_secret` (String) The ClientSecret of your Notion integration.
-
diff --git a/docs/data-sources/source_nytimes.md b/docs/data-sources/source_nytimes.md
index 417c9f015..4f7e7b6da 100644
--- a/docs/data-sources/source_nytimes.md
+++ b/docs/data-sources/source_nytimes.md
@@ -14,7 +14,6 @@ SourceNytimes DataSource
```terraform
data "airbyte_source_nytimes" "my_source_nytimes" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,28 +25,12 @@ data "airbyte_source_nytimes" "my_source_nytimes" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key
-- `end_date` (String) End date to stop the article retrieval (format YYYY-MM)
-- `period` (Number) must be one of ["1", "7", "30"]
-Period of time (in days)
-- `share_type` (String) must be one of ["facebook"]
-Share Type
-- `source_type` (String) must be one of ["nytimes"]
-- `start_date` (String) Start date to begin the article retrieval (format YYYY-MM)
-
diff --git a/docs/data-sources/source_okta.md b/docs/data-sources/source_okta.md
index d0c4b8961..77498a19d 100644
--- a/docs/data-sources/source_okta.md
+++ b/docs/data-sources/source_okta.md
@@ -14,7 +14,6 @@ SourceOkta DataSource
```terraform
data "airbyte_source_okta" "my_source_okta" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,73 +25,12 @@ data "airbyte_source_okta" "my_source_okta" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `domain` (String) The Okta domain. See the docs for instructions on how to find it.
-- `source_type` (String) must be one of ["okta"]
-- `start_date` (String) UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_okta_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_okta_authorization_method_api_token))
-- `source_okta_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_okta_authorization_method_o_auth2_0))
-- `source_okta_update_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_okta_update_authorization_method_api_token))
-- `source_okta_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_okta_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_okta_authorization_method_api_token`
-
-Read-Only:
-
-- `api_token` (String) An Okta token. See the docs for instructions on how to generate it.
-- `auth_type` (String) must be one of ["api_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_okta_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
-
-
-
-### Nested Schema for `configuration.credentials.source_okta_update_authorization_method_api_token`
-
-Read-Only:
-
-- `api_token` (String) An Okta token. See the docs for instructions on how to generate it.
-- `auth_type` (String) must be one of ["api_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_okta_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
-
diff --git a/docs/data-sources/source_omnisend.md b/docs/data-sources/source_omnisend.md
index f584043cc..390b92ab5 100644
--- a/docs/data-sources/source_omnisend.md
+++ b/docs/data-sources/source_omnisend.md
@@ -14,7 +14,6 @@ SourceOmnisend DataSource
```terraform
data "airbyte_source_omnisend" "my_source_omnisend" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_omnisend" "my_source_omnisend" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["omnisend"]
-
diff --git a/docs/data-sources/source_onesignal.md b/docs/data-sources/source_onesignal.md
index b45b15d65..5af01e8fe 100644
--- a/docs/data-sources/source_onesignal.md
+++ b/docs/data-sources/source_onesignal.md
@@ -14,7 +14,6 @@ SourceOnesignal DataSource
```terraform
data "airbyte_source_onesignal" "my_source_onesignal" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,34 +25,12 @@ data "airbyte_source_onesignal" "my_source_onesignal" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `applications` (Attributes List) Applications keys, see the docs for more information on how to obtain this data (see [below for nested schema](#nestedatt--configuration--applications))
-- `outcome_names` (String) Comma-separated list of names and the value (sum/count) for the returned outcome data. See the docs for more details
-- `source_type` (String) must be one of ["onesignal"]
-- `start_date` (String) The date from which you'd like to replicate data for OneSignal API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-- `user_auth_key` (String) OneSignal User Auth Key, see the docs for more information on how to obtain this key.
-
-
-### Nested Schema for `configuration.applications`
-
-Read-Only:
-
-- `app_api_key` (String)
-- `app_id` (String)
-- `app_name` (String)
-
diff --git a/docs/data-sources/source_oracle.md b/docs/data-sources/source_oracle.md
index 639edae99..031f1cef1 100644
--- a/docs/data-sources/source_oracle.md
+++ b/docs/data-sources/source_oracle.md
@@ -14,7 +14,6 @@ SourceOracle DataSource
```terraform
data "airbyte_source_oracle" "my_source_oracle" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,210 +25,12 @@ data "airbyte_source_oracle" "my_source_oracle" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `connection_data` (Attributes) Connect data that will be used for DB connection (see [below for nested schema](#nestedatt--configuration--connection_data))
-- `encryption` (Attributes) The encryption method with is used when communicating with the database. (see [below for nested schema](#nestedatt--configuration--encryption))
-- `host` (String) Hostname of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) The password associated with the username.
-- `port` (Number) Port of the database.
-Oracle Corporations recommends the following port numbers:
-1521 - Default listening port for client connections to the listener.
-2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL
-- `schemas` (List of String) The list of schemas to sync from. Defaults to user. Case sensitive.
-- `source_type` (String) must be one of ["oracle"]
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) The username which is used to access the database.
-
-
-### Nested Schema for `configuration.connection_data`
-
-Read-Only:
-
-- `source_oracle_connect_by_service_name` (Attributes) Use service name (see [below for nested schema](#nestedatt--configuration--connection_data--source_oracle_connect_by_service_name))
-- `source_oracle_connect_by_system_id_sid` (Attributes) Use SID (Oracle System Identifier) (see [below for nested schema](#nestedatt--configuration--connection_data--source_oracle_connect_by_system_id_sid))
-- `source_oracle_update_connect_by_service_name` (Attributes) Use service name (see [below for nested schema](#nestedatt--configuration--connection_data--source_oracle_update_connect_by_service_name))
-- `source_oracle_update_connect_by_system_id_sid` (Attributes) Use SID (Oracle System Identifier) (see [below for nested schema](#nestedatt--configuration--connection_data--source_oracle_update_connect_by_system_id_sid))
-
-
-### Nested Schema for `configuration.connection_data.source_oracle_connect_by_service_name`
-
-Read-Only:
-
-- `connection_type` (String) must be one of ["service_name"]
-- `service_name` (String)
-
-
-
-### Nested Schema for `configuration.connection_data.source_oracle_connect_by_system_id_sid`
-
-Read-Only:
-
-- `connection_type` (String) must be one of ["sid"]
-- `sid` (String)
-
-
-
-### Nested Schema for `configuration.connection_data.source_oracle_update_connect_by_service_name`
-
-Read-Only:
-
-- `connection_type` (String) must be one of ["service_name"]
-- `service_name` (String)
-
-
-
-### Nested Schema for `configuration.connection_data.source_oracle_update_connect_by_system_id_sid`
-
-Read-Only:
-
-- `connection_type` (String) must be one of ["sid"]
-- `sid` (String)
-
-
-
-
-### Nested Schema for `configuration.encryption`
-
-Read-Only:
-
-- `source_oracle_encryption_native_network_encryption_nne` (Attributes) The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports. (see [below for nested schema](#nestedatt--configuration--encryption--source_oracle_encryption_native_network_encryption_nne))
-- `source_oracle_encryption_tls_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--encryption--source_oracle_encryption_tls_encrypted_verify_certificate))
-- `source_oracle_update_encryption_native_network_encryption_nne` (Attributes) The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports. (see [below for nested schema](#nestedatt--configuration--encryption--source_oracle_update_encryption_native_network_encryption_nne))
-- `source_oracle_update_encryption_tls_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--encryption--source_oracle_update_encryption_tls_encrypted_verify_certificate))
-
-
-### Nested Schema for `configuration.encryption.source_oracle_encryption_native_network_encryption_nne`
-
-Read-Only:
-
-- `encryption_algorithm` (String) must be one of ["AES256", "RC4_56", "3DES168"]
-This parameter defines what encryption algorithm is used.
-- `encryption_method` (String) must be one of ["client_nne"]
-
-
-
-### Nested Schema for `configuration.encryption.source_oracle_encryption_tls_encrypted_verify_certificate`
-
-Read-Only:
-
-- `encryption_method` (String) must be one of ["encrypted_verify_certificate"]
-- `ssl_certificate` (String) Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.
-
-
-
-### Nested Schema for `configuration.encryption.source_oracle_update_encryption_native_network_encryption_nne`
-
-Read-Only:
-
-- `encryption_algorithm` (String) must be one of ["AES256", "RC4_56", "3DES168"]
-This parameter defines what encryption algorithm is used.
-- `encryption_method` (String) must be one of ["client_nne"]
-
-
-
-### Nested Schema for `configuration.encryption.source_oracle_update_encryption_tls_encrypted_verify_certificate`
-
-Read-Only:
-
-- `encryption_method` (String) must be one of ["encrypted_verify_certificate"]
-- `ssl_certificate` (String) Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.
-
-
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `source_oracle_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_ssh_tunnel_method_no_tunnel))
-- `source_oracle_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_ssh_tunnel_method_password_authentication))
-- `source_oracle_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_ssh_tunnel_method_ssh_key_authentication))
-- `source_oracle_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_update_ssh_tunnel_method_no_tunnel))
-- `source_oracle_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_update_ssh_tunnel_method_password_authentication))
-- `source_oracle_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/source_orb.md b/docs/data-sources/source_orb.md
index 9e5783179..a1201af20 100644
--- a/docs/data-sources/source_orb.md
+++ b/docs/data-sources/source_orb.md
@@ -14,7 +14,6 @@ SourceOrb DataSource
```terraform
data "airbyte_source_orb" "my_source_orb" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,28 +25,12 @@ data "airbyte_source_orb" "my_source_orb" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Orb API Key, issued from the Orb admin console.
-- `lookback_window_days` (Number) When set to N, the connector will always refresh resources created within the past N days. By default, updated objects that are not newly created are not incrementally synced.
-- `numeric_event_properties_keys` (List of String) Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.
-- `plan_id` (String) Orb Plan ID to filter subscriptions that should have usage fetched.
-- `source_type` (String) must be one of ["orb"]
-- `start_date` (String) UTC date and time in the format 2022-03-01T00:00:00Z. Any data with created_at before this data will not be synced. For Subscription Usage, this becomes the `timeframe_start` API parameter.
-- `string_event_properties_keys` (List of String) Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.
-- `subscription_usage_grouping_key` (String) Property key name to group subscription usage by.
-
diff --git a/docs/data-sources/source_orbit.md b/docs/data-sources/source_orbit.md
index 9fa7ac3d1..db43f656a 100644
--- a/docs/data-sources/source_orbit.md
+++ b/docs/data-sources/source_orbit.md
@@ -14,7 +14,6 @@ SourceOrbit DataSource
```terraform
data "airbyte_source_orbit" "my_source_orbit" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_orbit" "my_source_orbit" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_token` (String) Authorizes you to work with Orbit workspaces associated with the token.
-- `source_type` (String) must be one of ["orbit"]
-- `start_date` (String) Date in the format 2022-06-26. Only load members whose last activities are after this date.
-- `workspace` (String) The unique name of the workspace that your API token is associated with.
-
diff --git a/docs/data-sources/source_outbrain_amplify.md b/docs/data-sources/source_outbrain_amplify.md
index b65430536..9f1843705 100644
--- a/docs/data-sources/source_outbrain_amplify.md
+++ b/docs/data-sources/source_outbrain_amplify.md
@@ -14,7 +14,6 @@ SourceOutbrainAmplify DataSource
```terraform
data "airbyte_source_outbrain_amplify" "my_source_outbrainamplify" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,75 +25,12 @@ data "airbyte_source_outbrain_amplify" "my_source_outbrainamplify" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Credentials for making authenticated requests requires either username/password or access_token. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `end_date` (String) Date in the format YYYY-MM-DD.
-- `geo_location_breakdown` (String) must be one of ["country", "region", "subregion"]
-The granularity used for geo location data in reports.
-- `report_granularity` (String) must be one of ["daily", "weekly", "monthly"]
-The granularity used for periodic data in reports. See the docs.
-- `source_type` (String) must be one of ["outbrain-amplify"]
-- `start_date` (String) Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_outbrain_amplify_authentication_method_access_token` (Attributes) Credentials for making authenticated requests requires either username/password or access_token. (see [below for nested schema](#nestedatt--configuration--credentials--source_outbrain_amplify_authentication_method_access_token))
-- `source_outbrain_amplify_authentication_method_username_password` (Attributes) Credentials for making authenticated requests requires either username/password or access_token. (see [below for nested schema](#nestedatt--configuration--credentials--source_outbrain_amplify_authentication_method_username_password))
-- `source_outbrain_amplify_update_authentication_method_access_token` (Attributes) Credentials for making authenticated requests requires either username/password or access_token. (see [below for nested schema](#nestedatt--configuration--credentials--source_outbrain_amplify_update_authentication_method_access_token))
-- `source_outbrain_amplify_update_authentication_method_username_password` (Attributes) Credentials for making authenticated requests requires either username/password or access_token. (see [below for nested schema](#nestedatt--configuration--credentials--source_outbrain_amplify_update_authentication_method_username_password))
-
-
-### Nested Schema for `configuration.credentials.source_outbrain_amplify_authentication_method_access_token`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_outbrain_amplify_authentication_method_username_password`
-
-Read-Only:
-
-- `password` (String) Add Password for authentication.
-- `type` (String) must be one of ["username_password"]
-- `username` (String) Add Username for authentication.
-
-
-
-### Nested Schema for `configuration.credentials.source_outbrain_amplify_update_authentication_method_access_token`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_outbrain_amplify_update_authentication_method_username_password`
-
-Read-Only:
-
-- `password` (String) Add Password for authentication.
-- `type` (String) must be one of ["username_password"]
-- `username` (String) Add Username for authentication.
-
diff --git a/docs/data-sources/source_outreach.md b/docs/data-sources/source_outreach.md
index 59d661161..b0a493a85 100644
--- a/docs/data-sources/source_outreach.md
+++ b/docs/data-sources/source_outreach.md
@@ -14,7 +14,6 @@ SourceOutreach DataSource
```terraform
data "airbyte_source_outreach" "my_source_outreach" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_outreach" "my_source_outreach" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `client_id` (String) The Client ID of your Outreach developer application.
-- `client_secret` (String) The Client Secret of your Outreach developer application.
-- `redirect_uri` (String) A Redirect URI is the location where the authorization server sends the user once the app has been successfully authorized and granted an authorization code or access token.
-- `refresh_token` (String) The token for obtaining the new access token.
-- `source_type` (String) must be one of ["outreach"]
-- `start_date` (String) The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-
diff --git a/docs/data-sources/source_paypal_transaction.md b/docs/data-sources/source_paypal_transaction.md
index cffbcf512..efd8b2226 100644
--- a/docs/data-sources/source_paypal_transaction.md
+++ b/docs/data-sources/source_paypal_transaction.md
@@ -14,7 +14,6 @@ SourcePaypalTransaction DataSource
```terraform
data "airbyte_source_paypal_transaction" "my_source_paypaltransaction" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_paypal_transaction" "my_source_paypaltransaction" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `client_id` (String) The Client ID of your Paypal developer application.
-- `client_secret` (String) The Client Secret of your Paypal developer application.
-- `is_sandbox` (Boolean) Determines whether to use the sandbox or production environment.
-- `refresh_token` (String) The key to refresh the expired access token.
-- `source_type` (String) must be one of ["paypal-transaction"]
-- `start_date` (String) Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.
-
diff --git a/docs/data-sources/source_paystack.md b/docs/data-sources/source_paystack.md
index 117f68e3a..3779820bd 100644
--- a/docs/data-sources/source_paystack.md
+++ b/docs/data-sources/source_paystack.md
@@ -14,7 +14,6 @@ SourcePaystack DataSource
```terraform
data "airbyte_source_paystack" "my_source_paystack" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_paystack" "my_source_paystack" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `lookback_window_days` (Number) When set, the connector will always reload data from the past N days, where N is the value set here. This is useful if your data is updated after creation.
-- `secret_key` (String) The Paystack API key (usually starts with 'sk_live_'; find yours here).
-- `source_type` (String) must be one of ["paystack"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_pendo.md b/docs/data-sources/source_pendo.md
index c6f4dc766..ab975dbb9 100644
--- a/docs/data-sources/source_pendo.md
+++ b/docs/data-sources/source_pendo.md
@@ -14,7 +14,6 @@ SourcePendo DataSource
```terraform
data "airbyte_source_pendo" "my_source_pendo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_pendo" "my_source_pendo" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String)
-- `source_type` (String) must be one of ["pendo"]
-
diff --git a/docs/data-sources/source_persistiq.md b/docs/data-sources/source_persistiq.md
index ddcc19899..f738cea0c 100644
--- a/docs/data-sources/source_persistiq.md
+++ b/docs/data-sources/source_persistiq.md
@@ -14,7 +14,6 @@ SourcePersistiq DataSource
```terraform
data "airbyte_source_persistiq" "my_source_persistiq" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_persistiq" "my_source_persistiq" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) PersistIq API Key. See the docs for more information on where to find that key.
-- `source_type` (String) must be one of ["persistiq"]
-
diff --git a/docs/data-sources/source_pexels_api.md b/docs/data-sources/source_pexels_api.md
index d1b8bb290..aebc7a2d9 100644
--- a/docs/data-sources/source_pexels_api.md
+++ b/docs/data-sources/source_pexels_api.md
@@ -14,7 +14,6 @@ SourcePexelsAPI DataSource
```terraform
data "airbyte_source_pexels_api" "my_source_pexelsapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,27 +25,12 @@ data "airbyte_source_pexels_api" "my_source_pexelsapi" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API key is required to access pexels api, For getting your's goto https://www.pexels.com/api/documentation and create account for free.
-- `color` (String) Optional, Desired photo color. Supported colors red, orange, yellow, green, turquoise, blue, violet, pink, brown, black, gray, white or any hexidecimal color code.
-- `locale` (String) Optional, The locale of the search you are performing. The current supported locales are 'en-US' 'pt-BR' 'es-ES' 'ca-ES' 'de-DE' 'it-IT' 'fr-FR' 'sv-SE' 'id-ID' 'pl-PL' 'ja-JP' 'zh-TW' 'zh-CN' 'ko-KR' 'th-TH' 'nl-NL' 'hu-HU' 'vi-VN' 'cs-CZ' 'da-DK' 'fi-FI' 'uk-UA' 'el-GR' 'ro-RO' 'nb-NO' 'sk-SK' 'tr-TR' 'ru-RU'.
-- `orientation` (String) Optional, Desired photo orientation. The current supported orientations are landscape, portrait or square
-- `query` (String) Optional, the search query, Example Ocean, Tigers, Pears, etc.
-- `size` (String) Optional, Minimum photo size. The current supported sizes are large(24MP), medium(12MP) or small(4MP).
-- `source_type` (String) must be one of ["pexels-api"]
-
diff --git a/docs/data-sources/source_pinterest.md b/docs/data-sources/source_pinterest.md
index 31f2e026c..8d9f7ab72 100644
--- a/docs/data-sources/source_pinterest.md
+++ b/docs/data-sources/source_pinterest.md
@@ -14,7 +14,6 @@ SourcePinterest DataSource
```terraform
data "airbyte_source_pinterest" "my_source_pinterest" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,73 +25,12 @@ data "airbyte_source_pinterest" "my_source_pinterest" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["pinterest"]
-- `start_date` (String) A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today).
-- `status` (List of String) Entity statuses based off of campaigns, ad_groups, and ads. If you do not have a status set, it will be ignored completely.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_pinterest_authorization_method_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_pinterest_authorization_method_access_token))
-- `source_pinterest_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_pinterest_authorization_method_o_auth2_0))
-- `source_pinterest_update_authorization_method_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_pinterest_update_authorization_method_access_token))
-- `source_pinterest_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_pinterest_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_pinterest_authorization_method_access_token`
-
-Read-Only:
-
-- `access_token` (String) The Access Token to make authenticated requests.
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_pinterest_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
-
-
-
-### Nested Schema for `configuration.credentials.source_pinterest_update_authorization_method_access_token`
-
-Read-Only:
-
-- `access_token` (String) The Access Token to make authenticated requests.
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_pinterest_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
-
diff --git a/docs/data-sources/source_pipedrive.md b/docs/data-sources/source_pipedrive.md
index 0ebebedbd..f4539f829 100644
--- a/docs/data-sources/source_pipedrive.md
+++ b/docs/data-sources/source_pipedrive.md
@@ -14,7 +14,6 @@ SourcePipedrive DataSource
```terraform
data "airbyte_source_pipedrive" "my_source_pipedrive" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,31 +25,12 @@ data "airbyte_source_pipedrive" "my_source_pipedrive" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `authorization` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization))
-- `replication_start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental
-- `source_type` (String) must be one of ["pipedrive"]
-
-
-### Nested Schema for `configuration.authorization`
-
-Read-Only:
-
-- `api_token` (String) The Pipedrive API Token.
-- `auth_type` (String) must be one of ["Token"]
-
diff --git a/docs/data-sources/source_pocket.md b/docs/data-sources/source_pocket.md
index c34b1f9eb..d054132d3 100644
--- a/docs/data-sources/source_pocket.md
+++ b/docs/data-sources/source_pocket.md
@@ -14,7 +14,6 @@ SourcePocket DataSource
```terraform
data "airbyte_source_pocket" "my_source_pocket" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,36 +25,12 @@ data "airbyte_source_pocket" "my_source_pocket" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) The user's Pocket access token.
-- `consumer_key` (String) Your application's Consumer Key.
-- `content_type` (String) must be one of ["article", "video", "image"]
-Select the content type of the items to retrieve.
-- `detail_type` (String) must be one of ["simple", "complete"]
-Select the granularity of the information about each item.
-- `domain` (String) Only return items from a particular `domain`.
-- `favorite` (Boolean) Retrieve only favorited items.
-- `search` (String) Only return items whose title or url contain the `search` string.
-- `since` (String) Only return items modified since the given timestamp.
-- `sort` (String) must be one of ["newest", "oldest", "title", "site"]
-Sort retrieved items by the given criteria.
-- `source_type` (String) must be one of ["pocket"]
-- `state` (String) must be one of ["unread", "archive", "all"]
-Select the state of the items to retrieve.
-- `tag` (String) Return only items tagged with this tag name. Use _untagged_ for retrieving only untagged items.
-
diff --git a/docs/data-sources/source_pokeapi.md b/docs/data-sources/source_pokeapi.md
index 99da2a445..aff4a4239 100644
--- a/docs/data-sources/source_pokeapi.md
+++ b/docs/data-sources/source_pokeapi.md
@@ -14,7 +14,6 @@ SourcePokeapi DataSource
```terraform
data "airbyte_source_pokeapi" "my_source_pokeapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_pokeapi" "my_source_pokeapi" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `pokemon_name` (String) Pokemon requested from the API.
-- `source_type` (String) must be one of ["pokeapi"]
-
diff --git a/docs/data-sources/source_polygon_stock_api.md b/docs/data-sources/source_polygon_stock_api.md
index a9ef45744..80af1f707 100644
--- a/docs/data-sources/source_polygon_stock_api.md
+++ b/docs/data-sources/source_polygon_stock_api.md
@@ -14,7 +14,6 @@ SourcePolygonStockAPI DataSource
```terraform
data "airbyte_source_polygon_stock_api" "my_source_polygonstockapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,30 +25,12 @@ data "airbyte_source_polygon_stock_api" "my_source_polygonstockapi" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `adjusted` (String) Determines whether or not the results are adjusted for splits. By default, results are adjusted and set to true. Set this to false to get results that are NOT adjusted for splits.
-- `api_key` (String) Your API ACCESS Key
-- `end_date` (String) The target date for the aggregate window.
-- `limit` (Number) The target date for the aggregate window.
-- `multiplier` (Number) The size of the timespan multiplier.
-- `sort` (String) Sort the results by timestamp. asc will return results in ascending order (oldest at the top), desc will return results in descending order (newest at the top).
-- `source_type` (String) must be one of ["polygon-stock-api"]
-- `start_date` (String) The beginning date for the aggregate window.
-- `stocks_ticker` (String) The exchange symbol that this item is traded under.
-- `timespan` (String) The size of the time window.
-
diff --git a/docs/data-sources/source_postgres.md b/docs/data-sources/source_postgres.md
index 54cf73891..529f28ad9 100644
--- a/docs/data-sources/source_postgres.md
+++ b/docs/data-sources/source_postgres.md
@@ -14,7 +14,6 @@ SourcePostgres DataSource
```terraform
data "airbyte_source_postgres" "my_source_postgres" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,377 +25,12 @@ data "airbyte_source_postgres" "my_source_postgres" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) Name of the database.
-- `host` (String) Hostname of the database.
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method))
-- `schemas` (List of String) The list of schemas (case sensitive) to sync from. Defaults to public.
-- `source_type` (String) must be one of ["postgres"]
-- `ssl_mode` (Attributes) SSL connection modes.
- Read more in the docs. (see [below for nested schema](#nestedatt--configuration--ssl_mode))
-- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `username` (String) Username to access the database.
-
-
-### Nested Schema for `configuration.replication_method`
-
-Read-Only:
-
-- `source_postgres_update_method_detect_changes_with_xmin_system_column` (Attributes) Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_detect_changes_with_xmin_system_column))
-- `source_postgres_update_method_read_changes_using_write_ahead_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_read_changes_using_write_ahead_log_cdc))
-- `source_postgres_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_scan_changes_with_user_defined_cursor))
-- `source_postgres_update_update_method_detect_changes_with_xmin_system_column` (Attributes) Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_detect_changes_with_xmin_system_column))
-- `source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc))
-- `source_postgres_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_scan_changes_with_user_defined_cursor))
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_method_detect_changes_with_xmin_system_column`
-
-Read-Only:
-
-- `method` (String) must be one of ["Xmin"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_method_read_changes_using_write_ahead_log_cdc`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]
-Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-- `method` (String) must be one of ["CDC"]
-- `plugin` (String) must be one of ["pgoutput"]
-A logical decoding plugin installed on the PostgreSQL server.
-- `publication` (String) A Postgres publication used for consuming changes. Read about publications and replication identities.
-- `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-- `replication_slot` (String) A plugin logical replication slot. Read about replication slots.
-
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_method_scan_changes_with_user_defined_cursor`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_detect_changes_with_xmin_system_column`
-
-Read-Only:
-
-- `method` (String) must be one of ["Xmin"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]
-Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-- `method` (String) must be one of ["CDC"]
-- `plugin` (String) must be one of ["pgoutput"]
-A logical decoding plugin installed on the PostgreSQL server.
-- `publication` (String) A Postgres publication used for consuming changes. Read about publications and replication identities.
-- `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-- `replication_slot` (String) A plugin logical replication slot. Read about replication slots.
-
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_scan_changes_with_user_defined_cursor`
-
-Read-Only:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-
-### Nested Schema for `configuration.ssl_mode`
-
-Read-Only:
-
-- `source_postgres_ssl_modes_allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_allow))
-- `source_postgres_ssl_modes_disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_disable))
-- `source_postgres_ssl_modes_prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_prefer))
-- `source_postgres_ssl_modes_require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_require))
-- `source_postgres_ssl_modes_verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_verify_ca))
-- `source_postgres_ssl_modes_verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_verify_full))
-- `source_postgres_update_ssl_modes_allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_allow))
-- `source_postgres_update_ssl_modes_disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_disable))
-- `source_postgres_update_ssl_modes_prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_prefer))
-- `source_postgres_update_ssl_modes_require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_require))
-- `source_postgres_update_ssl_modes_verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_verify_ca))
-- `source_postgres_update_ssl_modes_verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_verify_full))
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_allow`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["allow"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_disable`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_prefer`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["prefer"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_require`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["require"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_verify_ca`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-ca"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_verify_full`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-full"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_allow`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["allow"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_disable`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_prefer`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["prefer"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_require`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `mode` (String) must be one of ["require"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_verify_ca`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-ca"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_verify_full`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-- `mode` (String) must be one of ["verify-full"]
-
-
-
-
-### Nested Schema for `configuration.tunnel_method`
-
-Read-Only:
-
-- `source_postgres_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_ssh_tunnel_method_no_tunnel))
-- `source_postgres_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_ssh_tunnel_method_password_authentication))
-- `source_postgres_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_ssh_tunnel_method_ssh_key_authentication))
-- `source_postgres_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_update_ssh_tunnel_method_no_tunnel))
-- `source_postgres_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_update_ssh_tunnel_method_password_authentication))
-- `source_postgres_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_update_ssh_tunnel_method_no_tunnel`
-
-Read-Only:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_update_ssh_tunnel_method_password_authentication`
-
-Read-Only:
-
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_update_ssh_tunnel_method_ssh_key_authentication`
-
-Read-Only:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
diff --git a/docs/data-sources/source_posthog.md b/docs/data-sources/source_posthog.md
index e076d3b63..8edb56f7c 100644
--- a/docs/data-sources/source_posthog.md
+++ b/docs/data-sources/source_posthog.md
@@ -14,7 +14,6 @@ SourcePosthog DataSource
```terraform
data "airbyte_source_posthog" "my_source_posthog" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_posthog" "my_source_posthog" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key. See the docs for information on how to generate this key.
-- `base_url` (String) Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).
-- `events_time_step` (Number) Set lower value in case of failing long running sync of events stream.
-- `source_type` (String) must be one of ["posthog"]
-- `start_date` (String) The date from which you'd like to replicate the data. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_postmarkapp.md b/docs/data-sources/source_postmarkapp.md
index b1afac15d..551408386 100644
--- a/docs/data-sources/source_postmarkapp.md
+++ b/docs/data-sources/source_postmarkapp.md
@@ -14,7 +14,6 @@ SourcePostmarkapp DataSource
```terraform
data "airbyte_source_postmarkapp" "my_source_postmarkapp" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_postmarkapp" "my_source_postmarkapp" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `source_type` (String) must be one of ["postmarkapp"]
-- `x_postmark_account_token` (String) API Key for account
-- `x_postmark_server_token` (String) API Key for server
-
diff --git a/docs/data-sources/source_prestashop.md b/docs/data-sources/source_prestashop.md
index 3d3aca6cb..9330ec0ca 100644
--- a/docs/data-sources/source_prestashop.md
+++ b/docs/data-sources/source_prestashop.md
@@ -14,7 +14,6 @@ SourcePrestashop DataSource
```terraform
data "airbyte_source_prestashop" "my_source_prestashop" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_prestashop" "my_source_prestashop" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_key` (String) Your PrestaShop access key. See the docs for info on how to obtain this.
-- `source_type` (String) must be one of ["prestashop"]
-- `start_date` (String) The Start date in the format YYYY-MM-DD.
-- `url` (String) Shop URL without trailing slash.
-
diff --git a/docs/data-sources/source_punk_api.md b/docs/data-sources/source_punk_api.md
index dedfcb423..a6ff7d0ce 100644
--- a/docs/data-sources/source_punk_api.md
+++ b/docs/data-sources/source_punk_api.md
@@ -14,7 +14,6 @@ SourcePunkAPI DataSource
```terraform
data "airbyte_source_punk_api" "my_source_punkapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_punk_api" "my_source_punkapi" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `brewed_after` (String) To extract specific data with Unique ID
-- `brewed_before` (String) To extract specific data with Unique ID
-- `id` (String) To extract specific data with Unique ID
-- `source_type` (String) must be one of ["punk-api"]
-
diff --git a/docs/data-sources/source_pypi.md b/docs/data-sources/source_pypi.md
index ec211a78e..5a0466a9e 100644
--- a/docs/data-sources/source_pypi.md
+++ b/docs/data-sources/source_pypi.md
@@ -14,7 +14,6 @@ SourcePypi DataSource
```terraform
data "airbyte_source_pypi" "my_source_pypi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_pypi" "my_source_pypi" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `project_name` (String) Name of the project/package. Can only be in lowercase with hyphen. This is the name used using pip command for installing the package.
-- `source_type` (String) must be one of ["pypi"]
-- `version` (String) Version of the project/package. Use it to find a particular release instead of all releases.
-
diff --git a/docs/data-sources/source_qualaroo.md b/docs/data-sources/source_qualaroo.md
index c4eedcf28..5f72d5de5 100644
--- a/docs/data-sources/source_qualaroo.md
+++ b/docs/data-sources/source_qualaroo.md
@@ -14,7 +14,6 @@ SourceQualaroo DataSource
```terraform
data "airbyte_source_qualaroo" "my_source_qualaroo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_qualaroo" "my_source_qualaroo" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `key` (String) A Qualaroo token. See the docs for instructions on how to generate it.
-- `source_type` (String) must be one of ["qualaroo"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `survey_ids` (List of String) IDs of the surveys from which you'd like to replicate data. If left empty, data from all surveys to which you have access will be replicated.
-- `token` (String) A Qualaroo token. See the docs for instructions on how to generate it.
-
diff --git a/docs/data-sources/source_quickbooks.md b/docs/data-sources/source_quickbooks.md
index eeb81b122..4a783b4d3 100644
--- a/docs/data-sources/source_quickbooks.md
+++ b/docs/data-sources/source_quickbooks.md
@@ -14,7 +14,6 @@ SourceQuickbooks DataSource
```terraform
data "airbyte_source_quickbooks" "my_source_quickbooks" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,59 +25,12 @@ data "airbyte_source_quickbooks" "my_source_quickbooks" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `sandbox` (Boolean) Determines whether to use the sandbox or production environment.
-- `source_type` (String) must be one of ["quickbooks"]
-- `start_date` (String) The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_quickbooks_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_quickbooks_authorization_method_o_auth2_0))
-- `source_quickbooks_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_quickbooks_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_quickbooks_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access token fot making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
-- `client_secret` (String) Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
-- `realm_id` (String) Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token.
-- `refresh_token` (String) A token used when refreshing the access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_quickbooks_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access token fot making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
-- `client_secret` (String) Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
-- `realm_id` (String) Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token.
-- `refresh_token` (String) A token used when refreshing the access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
diff --git a/docs/data-sources/source_railz.md b/docs/data-sources/source_railz.md
index f5998c435..0db8ca0b8 100644
--- a/docs/data-sources/source_railz.md
+++ b/docs/data-sources/source_railz.md
@@ -14,7 +14,6 @@ SourceRailz DataSource
```terraform
data "airbyte_source_railz" "my_source_railz" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_railz" "my_source_railz" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `client_id` (String) Client ID (client_id)
-- `secret_key` (String) Secret key (secret_key)
-- `source_type` (String) must be one of ["railz"]
-- `start_date` (String) Start date
-
diff --git a/docs/data-sources/source_recharge.md b/docs/data-sources/source_recharge.md
index 2a6b96d10..3fdf04c83 100644
--- a/docs/data-sources/source_recharge.md
+++ b/docs/data-sources/source_recharge.md
@@ -14,7 +14,6 @@ SourceRecharge DataSource
```terraform
data "airbyte_source_recharge" "my_source_recharge" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_recharge" "my_source_recharge" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) The value of the Access Token generated. See the docs for more information.
-- `source_type` (String) must be one of ["recharge"]
-- `start_date` (String) The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_recreation.md b/docs/data-sources/source_recreation.md
index 522c2b5fb..6bd0c5bf1 100644
--- a/docs/data-sources/source_recreation.md
+++ b/docs/data-sources/source_recreation.md
@@ -14,7 +14,6 @@ SourceRecreation DataSource
```terraform
data "airbyte_source_recreation" "my_source_recreation" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_recreation" "my_source_recreation" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `apikey` (String) API Key
-- `query_campsites` (String)
-- `source_type` (String) must be one of ["recreation"]
-
diff --git a/docs/data-sources/source_recruitee.md b/docs/data-sources/source_recruitee.md
index 59bddb6f9..9831a0113 100644
--- a/docs/data-sources/source_recruitee.md
+++ b/docs/data-sources/source_recruitee.md
@@ -14,7 +14,6 @@ SourceRecruitee DataSource
```terraform
data "airbyte_source_recruitee" "my_source_recruitee" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_recruitee" "my_source_recruitee" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Recruitee API Key. See here.
-- `company_id` (Number) Recruitee Company ID. You can also find this ID on the Recruitee API tokens page.
-- `source_type` (String) must be one of ["recruitee"]
-
diff --git a/docs/data-sources/source_recurly.md b/docs/data-sources/source_recurly.md
index a1c7b17c2..38f97cef4 100644
--- a/docs/data-sources/source_recurly.md
+++ b/docs/data-sources/source_recurly.md
@@ -14,7 +14,6 @@ SourceRecurly DataSource
```terraform
data "airbyte_source_recurly" "my_source_recurly" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_recurly" "my_source_recurly" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Recurly API Key. See the docs for more information on how to generate this key.
-- `begin_time` (String) ISO8601 timestamp from which the replication from Recurly API will start from.
-- `end_time` (String) ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported.
-- `source_type` (String) must be one of ["recurly"]
-
diff --git a/docs/data-sources/source_redshift.md b/docs/data-sources/source_redshift.md
index ebae7c06a..1b3d3fdf3 100644
--- a/docs/data-sources/source_redshift.md
+++ b/docs/data-sources/source_redshift.md
@@ -14,7 +14,6 @@ SourceRedshift DataSource
```terraform
data "airbyte_source_redshift" "my_source_redshift" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,28 +25,12 @@ data "airbyte_source_redshift" "my_source_redshift" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `database` (String) Name of the database.
-- `host` (String) Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com).
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `schemas` (List of String) The list of schemas to sync from. Specify one or more explicitly or keep empty to process all schemas. Schema names are case sensitive.
-- `source_type` (String) must be one of ["redshift"]
-- `username` (String) Username to use to access the database.
-
diff --git a/docs/data-sources/source_retently.md b/docs/data-sources/source_retently.md
index 9724ec511..76168d2b1 100644
--- a/docs/data-sources/source_retently.md
+++ b/docs/data-sources/source_retently.md
@@ -14,7 +14,6 @@ SourceRetently DataSource
```terraform
data "airbyte_source_retently" "my_source_retently" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,87 +25,12 @@ data "airbyte_source_retently" "my_source_retently" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["retently"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_retently_authentication_mechanism_authenticate_via_retently_o_auth` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--source_retently_authentication_mechanism_authenticate_via_retently_o_auth))
-- `source_retently_authentication_mechanism_authenticate_with_api_token` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--source_retently_authentication_mechanism_authenticate_with_api_token))
-- `source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth))
-- `source_retently_update_authentication_mechanism_authenticate_with_api_token` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--source_retently_update_authentication_mechanism_authenticate_with_api_token))
-
-
-### Nested Schema for `configuration.credentials.source_retently_authentication_mechanism_authenticate_via_retently_o_auth`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Retently developer application.
-- `client_secret` (String) The Client Secret of your Retently developer application.
-- `refresh_token` (String) Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.
-
-
-
-### Nested Schema for `configuration.credentials.source_retently_authentication_mechanism_authenticate_with_api_token`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `api_key` (String) Retently API Token. See the docs for more information on how to obtain this key.
-- `auth_type` (String) must be one of ["Token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Retently developer application.
-- `client_secret` (String) The Client Secret of your Retently developer application.
-- `refresh_token` (String) Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.
-
-
-
-### Nested Schema for `configuration.credentials.source_retently_update_authentication_mechanism_authenticate_with_api_token`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `api_key` (String) Retently API Token. See the docs for more information on how to obtain this key.
-- `auth_type` (String) must be one of ["Token"]
-
diff --git a/docs/data-sources/source_rki_covid.md b/docs/data-sources/source_rki_covid.md
index 2dddd1a2f..745a69b98 100644
--- a/docs/data-sources/source_rki_covid.md
+++ b/docs/data-sources/source_rki_covid.md
@@ -14,7 +14,6 @@ SourceRkiCovid DataSource
```terraform
data "airbyte_source_rki_covid" "my_source_rkicovid" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_rki_covid" "my_source_rkicovid" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `source_type` (String) must be one of ["rki-covid"]
-- `start_date` (String) UTC date in the format 2017-01-25. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_rss.md b/docs/data-sources/source_rss.md
index de1a555d2..98fe7fc1e 100644
--- a/docs/data-sources/source_rss.md
+++ b/docs/data-sources/source_rss.md
@@ -14,7 +14,6 @@ SourceRss DataSource
```terraform
data "airbyte_source_rss" "my_source_rss" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_rss" "my_source_rss" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `source_type` (String) must be one of ["rss"]
-- `url` (String) RSS Feed URL
-
diff --git a/docs/data-sources/source_s3.md b/docs/data-sources/source_s3.md
index 24db6a956..5ccf08821 100644
--- a/docs/data-sources/source_s3.md
+++ b/docs/data-sources/source_s3.md
@@ -14,7 +14,6 @@ SourceS3 DataSource
```terraform
data "airbyte_source_s3" "my_source_s3" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,355 +25,12 @@ data "airbyte_source_s3" "my_source_s3" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
-because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `bucket` (String) Name of the S3 bucket where the file(s) exist.
-- `dataset` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.
-- `endpoint` (String) Endpoint to an S3 compatible service. Leave empty to use AWS.
-- `format` (Attributes) Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate (see [below for nested schema](#nestedatt--configuration--format))
-- `path_pattern` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.
-- `provider` (Attributes) Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services (see [below for nested schema](#nestedatt--configuration--provider))
-- `schema` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.
-- `source_type` (String) must be one of ["s3"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
-- `streams` (Attributes List) Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. (see [below for nested schema](#nestedatt--configuration--streams))
-
-
-### Nested Schema for `configuration.format`
-
-Read-Only:
-
-- `source_s3_file_format_avro` (Attributes) This connector utilises fastavro for Avro parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_file_format_avro))
-- `source_s3_file_format_csv` (Attributes) This connector utilises PyArrow (Apache Arrow) for CSV parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_file_format_csv))
-- `source_s3_file_format_jsonl` (Attributes) This connector uses PyArrow for JSON Lines (jsonl) file parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_file_format_jsonl))
-- `source_s3_file_format_parquet` (Attributes) This connector utilises PyArrow (Apache Arrow) for Parquet parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_file_format_parquet))
-- `source_s3_update_file_format_avro` (Attributes) This connector utilises fastavro for Avro parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_update_file_format_avro))
-- `source_s3_update_file_format_csv` (Attributes) This connector utilises PyArrow (Apache Arrow) for CSV parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_update_file_format_csv))
-- `source_s3_update_file_format_jsonl` (Attributes) This connector uses PyArrow for JSON Lines (jsonl) file parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_update_file_format_jsonl))
-- `source_s3_update_file_format_parquet` (Attributes) This connector utilises PyArrow (Apache Arrow) for Parquet parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_update_file_format_parquet))
-
-
-### Nested Schema for `configuration.format.source_s3_file_format_avro`
-
-Read-Only:
-
-- `filetype` (String) must be one of ["avro"]
-
-
-
-### Nested Schema for `configuration.format.source_s3_file_format_csv`
-
-Read-Only:
-
-- `additional_reader_options` (String) Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.
-- `advanced_options` (String) Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.
-- `block_size` (Number) The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
-- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
-- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data.
-- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
-- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
-- `filetype` (String) must be one of ["csv"]
-- `infer_datatypes` (Boolean) Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings
-- `newlines_in_values` (Boolean) Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.
-- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank.
-
-
-
-### Nested Schema for `configuration.format.source_s3_file_format_jsonl`
-
-Read-Only:
-
-- `block_size` (Number) The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
-- `filetype` (String) must be one of ["jsonl"]
-- `newlines_in_values` (Boolean) Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.
-- `unexpected_field_behavior` (String) must be one of ["ignore", "infer", "error"]
-How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
-
-
-
-### Nested Schema for `configuration.format.source_s3_file_format_parquet`
-
-Read-Only:
-
-- `batch_size` (Number) Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.
-- `buffer_size` (Number) Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.
-- `columns` (List of String) If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.
-- `filetype` (String) must be one of ["parquet"]
-
-
-
-### Nested Schema for `configuration.format.source_s3_update_file_format_avro`
-
-Read-Only:
-
-- `filetype` (String) must be one of ["avro"]
-
-
-
-### Nested Schema for `configuration.format.source_s3_update_file_format_csv`
-
-Read-Only:
-
-- `additional_reader_options` (String) Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.
-- `advanced_options` (String) Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.
-- `block_size` (Number) The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
-- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
-- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data.
-- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
-- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
-- `filetype` (String) must be one of ["csv"]
-- `infer_datatypes` (Boolean) Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings
-- `newlines_in_values` (Boolean) Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.
-- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank.
-
-
-
-### Nested Schema for `configuration.format.source_s3_update_file_format_jsonl`
-
-Read-Only:
-
-- `block_size` (Number) The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
-- `filetype` (String) must be one of ["jsonl"]
-- `newlines_in_values` (Boolean) Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.
-- `unexpected_field_behavior` (String) must be one of ["ignore", "infer", "error"]
-How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
-
-
-
-### Nested Schema for `configuration.format.source_s3_update_file_format_parquet`
-
-Read-Only:
-
-- `batch_size` (Number) Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.
-- `buffer_size` (Number) Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.
-- `columns` (List of String) If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.
-- `filetype` (String) must be one of ["parquet"]
-
-
-
-
-### Nested Schema for `configuration.provider`
-
-Read-Only:
-
-- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `bucket` (String) Name of the S3 bucket where the file(s) exist.
-- `endpoint` (String) Endpoint to an S3 compatible service. Leave empty to use AWS.
-- `path_prefix` (String) By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate.
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.
-
-
-
-### Nested Schema for `configuration.streams`
-
-Read-Only:
-
-- `days_to_sync_if_history_is_full` (Number) When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
-- `file_type` (String) The data file type that is being extracted for a stream.
-- `format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format))
-- `globs` (List of String) The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
-- `input_schema` (String) The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
-- `legacy_prefix` (String) The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
-- `name` (String) The name of the stream.
-- `primary_key` (String) The column or columns (for a composite key) that serves as the unique identifier of a record.
-- `schemaless` (Boolean) When enabled, syncs will not validate or structure records against the stream's schema.
-- `validation_policy` (String) must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
-The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
-
-
-### Nested Schema for `configuration.streams.format`
-
-Read-Only:
-
-- `source_s3_file_based_stream_config_format_avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_avro_format))
-- `source_s3_file_based_stream_config_format_csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_csv_format))
-- `source_s3_file_based_stream_config_format_jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_jsonl_format))
-- `source_s3_file_based_stream_config_format_parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_parquet_format))
-- `source_s3_update_file_based_stream_config_format_avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_avro_format))
-- `source_s3_update_file_based_stream_config_format_csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_csv_format))
-- `source_s3_update_file_based_stream_config_format_jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_jsonl_format))
-- `source_s3_update_file_based_stream_config_format_parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format))
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Read-Only:
-
-- `double_as_string` (Boolean) Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
-- `filetype` (String) must be one of ["avro"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Read-Only:
-
-- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
-- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data.
-- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
-- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
-- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values.
-- `filetype` (String) must be one of ["csv"]
-- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition))
-- `inference_type` (String) must be one of ["None", "Primitive Types Only"]
-How to infer the types of the columns. If none, inference default to strings.
-- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
-- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank.
-- `skip_rows_after_header` (Number) The number of rows to skip after the header row.
-- `skip_rows_before_header` (Number) The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
-- `strings_can_be_null` (Boolean) Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
-- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values.
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition`
-
-Read-Only:
-
-- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated))
-- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv))
-- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided))
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
-
-Read-Only:
-
-- `header_definition_type` (String) must be one of ["Autogenerated"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
-
-Read-Only:
-
-- `header_definition_type` (String) must be one of ["From CSV"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
-
-Read-Only:
-
-- `column_names` (List of String) The column names that will be used while emitting the CSV records
-- `header_definition_type` (String) must be one of ["User Provided"]
-
-
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Read-Only:
-
-- `filetype` (String) must be one of ["jsonl"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Read-Only:
-
-- `decimal_as_float` (Boolean) Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
-- `filetype` (String) must be one of ["parquet"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Read-Only:
-
-- `double_as_string` (Boolean) Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
-- `filetype` (String) must be one of ["avro"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Read-Only:
-
-- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
-- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data.
-- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
-- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
-- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values.
-- `filetype` (String) must be one of ["csv"]
-- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition))
-- `inference_type` (String) must be one of ["None", "Primitive Types Only"]
-How to infer the types of the columns. If none, inference default to strings.
-- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
-- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank.
-- `skip_rows_after_header` (Number) The number of rows to skip after the header row.
-- `skip_rows_before_header` (Number) The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
-- `strings_can_be_null` (Boolean) Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
-- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values.
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition`
-
-Read-Only:
-
-- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated))
-- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv))
-- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided))
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
-
-Read-Only:
-
-- `header_definition_type` (String) must be one of ["Autogenerated"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
-
-Read-Only:
-
-- `header_definition_type` (String) must be one of ["From CSV"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
-
-Read-Only:
-
-- `column_names` (List of String) The column names that will be used while emitting the CSV records
-- `header_definition_type` (String) must be one of ["User Provided"]
-
-
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Read-Only:
-
-- `filetype` (String) must be one of ["jsonl"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Read-Only:
-
-- `decimal_as_float` (Boolean) Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
-- `filetype` (String) must be one of ["parquet"]
-
diff --git a/docs/data-sources/source_salesforce.md b/docs/data-sources/source_salesforce.md
index 108f31092..74ed7a3b6 100644
--- a/docs/data-sources/source_salesforce.md
+++ b/docs/data-sources/source_salesforce.md
@@ -14,7 +14,6 @@ SourceSalesforce DataSource
```terraform
data "airbyte_source_salesforce" "my_source_salesforce" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,37 +25,12 @@ data "airbyte_source_salesforce" "my_source_salesforce" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) Enter your Salesforce developer application's Client ID
-- `client_secret` (String) Enter your Salesforce developer application's Client secret
-- `force_use_bulk_api` (Boolean) Toggle to use Bulk API (this might cause empty fields for some streams)
-- `is_sandbox` (Boolean) Toggle if you're using a Salesforce Sandbox
-- `refresh_token` (String) Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account.
-- `source_type` (String) must be one of ["salesforce"]
-- `start_date` (String) Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ format. Airbyte will replicate the data updated on and after this date. If this field is blank, Airbyte will replicate the data for last two years.
-- `streams_criteria` (Attributes List) Add filters to select only required stream based on `SObject` name. Use this field to filter which tables are displayed by this connector. This is useful if your Salesforce account has a large number of tables (>1000), in which case you may find it easier to navigate the UI and speed up the connector's performance if you restrict the tables displayed by this connector. (see [below for nested schema](#nestedatt--configuration--streams_criteria))
-
-
-### Nested Schema for `configuration.streams_criteria`
-
-Read-Only:
-
-- `criteria` (String) must be one of ["starts with", "ends with", "contains", "exacts", "starts not with", "ends not with", "not contains", "not exacts"]
-- `value` (String)
-
diff --git a/docs/data-sources/source_salesloft.md b/docs/data-sources/source_salesloft.md
index c2029f97f..276a0880d 100644
--- a/docs/data-sources/source_salesloft.md
+++ b/docs/data-sources/source_salesloft.md
@@ -14,7 +14,6 @@ SourceSalesloft DataSource
```terraform
data "airbyte_source_salesloft" "my_source_salesloft" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,76 +25,12 @@ data "airbyte_source_salesloft" "my_source_salesloft" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["salesloft"]
-- `start_date` (String) The date from which you'd like to replicate data for Salesloft API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_salesloft_credentials_authenticate_via_api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_salesloft_credentials_authenticate_via_api_key))
-- `source_salesloft_credentials_authenticate_via_o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_salesloft_credentials_authenticate_via_o_auth))
-- `source_salesloft_update_credentials_authenticate_via_api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_salesloft_update_credentials_authenticate_via_api_key))
-- `source_salesloft_update_credentials_authenticate_via_o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_salesloft_update_credentials_authenticate_via_o_auth))
-
-
-### Nested Schema for `configuration.credentials.source_salesloft_credentials_authenticate_via_api_key`
-
-Read-Only:
-
-- `api_key` (String) API Key for making authenticated requests. More instruction on how to find this value in our docs
-- `auth_type` (String) must be one of ["api_key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_salesloft_credentials_authenticate_via_o_auth`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your Salesloft developer application.
-- `client_secret` (String) The Client Secret of your Salesloft developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_salesloft_update_credentials_authenticate_via_api_key`
-
-Read-Only:
-
-- `api_key` (String) API Key for making authenticated requests. More instruction on how to find this value in our docs
-- `auth_type` (String) must be one of ["api_key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_salesloft_update_credentials_authenticate_via_o_auth`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your Salesloft developer application.
-- `client_secret` (String) The Client Secret of your Salesloft developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
diff --git a/docs/data-sources/source_sap_fieldglass.md b/docs/data-sources/source_sap_fieldglass.md
index be138c261..ae8a2269f 100644
--- a/docs/data-sources/source_sap_fieldglass.md
+++ b/docs/data-sources/source_sap_fieldglass.md
@@ -14,7 +14,6 @@ SourceSapFieldglass DataSource
```terraform
data "airbyte_source_sap_fieldglass" "my_source_sapfieldglass" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_sap_fieldglass" "my_source_sapfieldglass" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["sap-fieldglass"]
-
diff --git a/docs/data-sources/source_secoda.md b/docs/data-sources/source_secoda.md
index dcc538e3e..b133b589f 100644
--- a/docs/data-sources/source_secoda.md
+++ b/docs/data-sources/source_secoda.md
@@ -14,7 +14,6 @@ SourceSecoda DataSource
```terraform
data "airbyte_source_secoda" "my_source_secoda" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_secoda" "my_source_secoda" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Your API Access Key. See here. The key is case sensitive.
-- `source_type` (String) must be one of ["secoda"]
-
diff --git a/docs/data-sources/source_sendgrid.md b/docs/data-sources/source_sendgrid.md
index 9a2719f5a..4c535cdc3 100644
--- a/docs/data-sources/source_sendgrid.md
+++ b/docs/data-sources/source_sendgrid.md
@@ -14,7 +14,6 @@ SourceSendgrid DataSource
```terraform
data "airbyte_source_sendgrid" "my_source_sendgrid" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_sendgrid" "my_source_sendgrid" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `apikey` (String) API Key, use admin to generate this key.
-- `source_type` (String) must be one of ["sendgrid"]
-- `start_time` (String) Start time in ISO8601 format. Any data before this time point will not be replicated.
-
diff --git a/docs/data-sources/source_sendinblue.md b/docs/data-sources/source_sendinblue.md
index e36af0d59..ccc357e94 100644
--- a/docs/data-sources/source_sendinblue.md
+++ b/docs/data-sources/source_sendinblue.md
@@ -14,7 +14,6 @@ SourceSendinblue DataSource
```terraform
data "airbyte_source_sendinblue" "my_source_sendinblue" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_sendinblue" "my_source_sendinblue" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Your API Key. See here.
-- `source_type` (String) must be one of ["sendinblue"]
-
diff --git a/docs/data-sources/source_senseforce.md b/docs/data-sources/source_senseforce.md
index 16582c621..fd128aca6 100644
--- a/docs/data-sources/source_senseforce.md
+++ b/docs/data-sources/source_senseforce.md
@@ -14,7 +14,6 @@ SourceSenseforce DataSource
```terraform
data "airbyte_source_senseforce" "my_source_senseforce" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_senseforce" "my_source_senseforce" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) Your API access token. See here. The toke is case sensitive.
-- `backend_url` (String) Your Senseforce API backend URL. This is the URL shown during the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the term 'galaxy' in their ULR)
-- `dataset_id` (String) The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source).
-- `slice_range` (Number) The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more.
-- `source_type` (String) must be one of ["senseforce"]
-- `start_date` (String) UTC date and time in the format 2017-01-25. Only data with "Timestamp" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later
-
diff --git a/docs/data-sources/source_sentry.md b/docs/data-sources/source_sentry.md
index 2fdc3a11d..4e707b51b 100644
--- a/docs/data-sources/source_sentry.md
+++ b/docs/data-sources/source_sentry.md
@@ -14,7 +14,6 @@ SourceSentry DataSource
```terraform
data "airbyte_source_sentry" "my_source_sentry" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_sentry" "my_source_sentry" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `auth_token` (String) Log into Sentry and then create authentication tokens.For self-hosted, you can find or create authentication tokens by visiting "{instance_url_prefix}/settings/account/api/auth-tokens/"
-- `discover_fields` (List of String) Fields to retrieve when fetching discover events
-- `hostname` (String) Host name of Sentry API server.For self-hosted, specify your host name here. Otherwise, leave it empty.
-- `organization` (String) The slug of the organization the groups belong to.
-- `project` (String) The name (slug) of the Project you want to sync.
-- `source_type` (String) must be one of ["sentry"]
-
diff --git a/docs/data-sources/source_sftp.md b/docs/data-sources/source_sftp.md
index 943fa7510..bb02632ab 100644
--- a/docs/data-sources/source_sftp.md
+++ b/docs/data-sources/source_sftp.md
@@ -14,7 +14,6 @@ SourceSftp DataSource
```terraform
data "airbyte_source_sftp" "my_source_sftp" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,77 +25,12 @@ data "airbyte_source_sftp" "my_source_sftp" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials))
-- `file_pattern` (String) The regular expression to specify files for sync in a chosen Folder Path
-- `file_types` (String) Coma separated file types. Currently only 'csv' and 'json' types are supported.
-- `folder_path` (String) The directory to search files for sync
-- `host` (String) The server host address
-- `port` (Number) The server port
-- `source_type` (String) must be one of ["sftp"]
-- `user` (String) The server user
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_sftp_authentication_wildcard_password_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_sftp_authentication_wildcard_password_authentication))
-- `source_sftp_authentication_wildcard_ssh_key_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_sftp_authentication_wildcard_ssh_key_authentication))
-- `source_sftp_update_authentication_wildcard_password_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_sftp_update_authentication_wildcard_password_authentication))
-- `source_sftp_update_authentication_wildcard_ssh_key_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_sftp_update_authentication_wildcard_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.credentials.source_sftp_authentication_wildcard_password_authentication`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through password authentication
-- `auth_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.credentials.source_sftp_authentication_wildcard_ssh_key_authentication`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through ssh key
-- `auth_ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-
-
-
-### Nested Schema for `configuration.credentials.source_sftp_update_authentication_wildcard_password_authentication`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through password authentication
-- `auth_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.credentials.source_sftp_update_authentication_wildcard_ssh_key_authentication`
-
-Read-Only:
-
-- `auth_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through ssh key
-- `auth_ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-
diff --git a/docs/data-sources/source_sftp_bulk.md b/docs/data-sources/source_sftp_bulk.md
index d0a7712ff..390535747 100644
--- a/docs/data-sources/source_sftp_bulk.md
+++ b/docs/data-sources/source_sftp_bulk.md
@@ -14,7 +14,6 @@ SourceSftpBulk DataSource
```terraform
data "airbyte_source_sftp_bulk" "my_source_sftpbulk" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,34 +25,12 @@ data "airbyte_source_sftp_bulk" "my_source_sftpbulk" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `file_most_recent` (Boolean) Sync only the most recent file for the configured folder path and file pattern
-- `file_pattern` (String) The regular expression to specify files for sync in a chosen Folder Path
-- `file_type` (String) must be one of ["csv", "json"]
-The file type you want to sync. Currently only 'csv' and 'json' files are supported.
-- `folder_path` (String) The directory to search files for sync
-- `host` (String) The server host address
-- `password` (String) OS-level password for logging into the jump server host
-- `port` (Number) The server port
-- `private_key` (String) The private key
-- `separator` (String) The separator used in the CSV files. Define None if you want to use the Sniffer functionality
-- `source_type` (String) must be one of ["sftp-bulk"]
-- `start_date` (String) The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-- `stream_name` (String) The name of the stream or table you want to create
-- `username` (String) The server user
-
diff --git a/docs/data-sources/source_shopify.md b/docs/data-sources/source_shopify.md
index 8cf60d496..0e0a77038 100644
--- a/docs/data-sources/source_shopify.md
+++ b/docs/data-sources/source_shopify.md
@@ -14,7 +14,6 @@ SourceShopify DataSource
```terraform
data "airbyte_source_shopify" "my_source_shopify" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,73 +25,12 @@ data "airbyte_source_shopify" "my_source_shopify" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) The authorization method to use to retrieve data from Shopify (see [below for nested schema](#nestedatt--configuration--credentials))
-- `shop` (String) The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME' or 'NAME.myshopify.com'.
-- `source_type` (String) must be one of ["shopify"]
-- `start_date` (String) The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_shopify_shopify_authorization_method_api_password` (Attributes) API Password Auth (see [below for nested schema](#nestedatt--configuration--credentials--source_shopify_shopify_authorization_method_api_password))
-- `source_shopify_shopify_authorization_method_o_auth2_0` (Attributes) OAuth2.0 (see [below for nested schema](#nestedatt--configuration--credentials--source_shopify_shopify_authorization_method_o_auth2_0))
-- `source_shopify_update_shopify_authorization_method_api_password` (Attributes) API Password Auth (see [below for nested schema](#nestedatt--configuration--credentials--source_shopify_update_shopify_authorization_method_api_password))
-- `source_shopify_update_shopify_authorization_method_o_auth2_0` (Attributes) OAuth2.0 (see [below for nested schema](#nestedatt--configuration--credentials--source_shopify_update_shopify_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_shopify_shopify_authorization_method_api_password`
-
-Read-Only:
-
-- `api_password` (String) The API Password for your private application in the `Shopify` store.
-- `auth_method` (String) must be one of ["api_password"]
-
-
-
-### Nested Schema for `configuration.credentials.source_shopify_shopify_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) The Access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of the Shopify developer application.
-- `client_secret` (String) The Client Secret of the Shopify developer application.
-
-
-
-### Nested Schema for `configuration.credentials.source_shopify_update_shopify_authorization_method_api_password`
-
-Read-Only:
-
-- `api_password` (String) The API Password for your private application in the `Shopify` store.
-- `auth_method` (String) must be one of ["api_password"]
-
-
-
-### Nested Schema for `configuration.credentials.source_shopify_update_shopify_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) The Access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of the Shopify developer application.
-- `client_secret` (String) The Client Secret of the Shopify developer application.
-
diff --git a/docs/data-sources/source_shortio.md b/docs/data-sources/source_shortio.md
index 2b7a35624..dce7c1c72 100644
--- a/docs/data-sources/source_shortio.md
+++ b/docs/data-sources/source_shortio.md
@@ -14,7 +14,6 @@ SourceShortio DataSource
```terraform
data "airbyte_source_shortio" "my_source_shortio" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_shortio" "my_source_shortio" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `domain_id` (String)
-- `secret_key` (String) Short.io Secret Key
-- `source_type` (String) must be one of ["shortio"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_slack.md b/docs/data-sources/source_slack.md
index b08a5203b..d7bcbbc3e 100644
--- a/docs/data-sources/source_slack.md
+++ b/docs/data-sources/source_slack.md
@@ -14,7 +14,6 @@ SourceSlack DataSource
```terraform
data "airbyte_source_slack" "my_source_slack" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,75 +25,12 @@ data "airbyte_source_slack" "my_source_slack" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `channel_filter` (List of String) A channel name list (without leading '#' char) which limit the channels from which you'd like to sync. Empty list means no filter.
-- `credentials` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials))
-- `join_channels` (Boolean) Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages.
-- `lookback_window` (Number) How far into the past to look for messages in threads, default is 0 days
-- `source_type` (String) must be one of ["slack"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_slack_authentication_mechanism_api_token` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--source_slack_authentication_mechanism_api_token))
-- `source_slack_authentication_mechanism_sign_in_via_slack_o_auth` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--source_slack_authentication_mechanism_sign_in_via_slack_o_auth))
-- `source_slack_update_authentication_mechanism_api_token` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--source_slack_update_authentication_mechanism_api_token))
-- `source_slack_update_authentication_mechanism_sign_in_via_slack_o_auth` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--source_slack_update_authentication_mechanism_sign_in_via_slack_o_auth))
-
-
-### Nested Schema for `configuration.credentials.source_slack_authentication_mechanism_api_token`
-
-Read-Only:
-
-- `api_token` (String) A Slack bot token. See the docs for instructions on how to generate it.
-- `option_title` (String) must be one of ["API Token Credentials"]
-
-
-
-### Nested Schema for `configuration.credentials.source_slack_authentication_mechanism_sign_in_via_slack_o_auth`
-
-Read-Only:
-
-- `access_token` (String) Slack access_token. See our docs if you need help generating the token.
-- `client_id` (String) Slack client_id. See our docs if you need help finding this id.
-- `client_secret` (String) Slack client_secret. See our docs if you need help finding this secret.
-- `option_title` (String) must be one of ["Default OAuth2.0 authorization"]
-
-
-
-### Nested Schema for `configuration.credentials.source_slack_update_authentication_mechanism_api_token`
-
-Read-Only:
-
-- `api_token` (String) A Slack bot token. See the docs for instructions on how to generate it.
-- `option_title` (String) must be one of ["API Token Credentials"]
-
-
-
-### Nested Schema for `configuration.credentials.source_slack_update_authentication_mechanism_sign_in_via_slack_o_auth`
-
-Read-Only:
-
-- `access_token` (String) Slack access_token. See our docs if you need help generating the token.
-- `client_id` (String) Slack client_id. See our docs if you need help finding this id.
-- `client_secret` (String) Slack client_secret. See our docs if you need help finding this secret.
-- `option_title` (String) must be one of ["Default OAuth2.0 authorization"]
-
diff --git a/docs/data-sources/source_smaily.md b/docs/data-sources/source_smaily.md
index 18af44e6a..7e5e55dd0 100644
--- a/docs/data-sources/source_smaily.md
+++ b/docs/data-sources/source_smaily.md
@@ -14,7 +14,6 @@ SourceSmaily DataSource
```terraform
data "airbyte_source_smaily" "my_source_smaily" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_smaily" "my_source_smaily" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_password` (String) API user password. See https://smaily.com/help/api/general/create-api-user/
-- `api_subdomain` (String) API Subdomain. See https://smaily.com/help/api/general/create-api-user/
-- `api_username` (String) API user username. See https://smaily.com/help/api/general/create-api-user/
-- `source_type` (String) must be one of ["smaily"]
-
diff --git a/docs/data-sources/source_smartengage.md b/docs/data-sources/source_smartengage.md
index 06ed19ea9..84fe7388e 100644
--- a/docs/data-sources/source_smartengage.md
+++ b/docs/data-sources/source_smartengage.md
@@ -14,7 +14,6 @@ SourceSmartengage DataSource
```terraform
data "airbyte_source_smartengage" "my_source_smartengage" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_smartengage" "my_source_smartengage" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["smartengage"]
-
diff --git a/docs/data-sources/source_smartsheets.md b/docs/data-sources/source_smartsheets.md
index 7768c87cf..ad7dc3fa6 100644
--- a/docs/data-sources/source_smartsheets.md
+++ b/docs/data-sources/source_smartsheets.md
@@ -14,7 +14,6 @@ SourceSmartsheets DataSource
```terraform
data "airbyte_source_smartsheets" "my_source_smartsheets" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,78 +25,12 @@ data "airbyte_source_smartsheets" "my_source_smartsheets" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `metadata_fields` (List of String) A List of available columns which metadata can be pulled from.
-- `source_type` (String) must be one of ["smartsheets"]
-- `spreadsheet_id` (String) The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties
-- `start_datetime` (String) Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: `2000-01-01T13:00:00`
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_smartsheets_authorization_method_api_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_smartsheets_authorization_method_api_access_token))
-- `source_smartsheets_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_smartsheets_authorization_method_o_auth2_0))
-- `source_smartsheets_update_authorization_method_api_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_smartsheets_update_authorization_method_api_access_token))
-- `source_smartsheets_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_smartsheets_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_smartsheets_authorization_method_api_access_token`
-
-Read-Only:
-
-- `access_token` (String) The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.
-- `auth_type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_smartsheets_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The API ID of the SmartSheets developer application.
-- `client_secret` (String) The API Secret the SmartSheets developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_smartsheets_update_authorization_method_api_access_token`
-
-Read-Only:
-
-- `access_token` (String) The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.
-- `auth_type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_smartsheets_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The API ID of the SmartSheets developer application.
-- `client_secret` (String) The API Secret the SmartSheets developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
diff --git a/docs/data-sources/source_snapchat_marketing.md b/docs/data-sources/source_snapchat_marketing.md
index 26e642b8d..b39594476 100644
--- a/docs/data-sources/source_snapchat_marketing.md
+++ b/docs/data-sources/source_snapchat_marketing.md
@@ -14,7 +14,6 @@ SourceSnapchatMarketing DataSource
```terraform
data "airbyte_source_snapchat_marketing" "my_source_snapchatmarketing" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_snapchat_marketing" "my_source_snapchatmarketing" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `client_id` (String) The Client ID of your Snapchat developer application.
-- `client_secret` (String) The Client Secret of your Snapchat developer application.
-- `end_date` (String) Date in the format 2017-01-25. Any data after this date will not be replicated.
-- `refresh_token` (String) Refresh Token to renew the expired Access Token.
-- `source_type` (String) must be one of ["snapchat-marketing"]
-- `start_date` (String) Date in the format 2022-01-01. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_snowflake.md b/docs/data-sources/source_snowflake.md
index fbd2c3c57..4a597384d 100644
--- a/docs/data-sources/source_snowflake.md
+++ b/docs/data-sources/source_snowflake.md
@@ -14,7 +14,6 @@ SourceSnowflake DataSource
```terraform
data "airbyte_source_snowflake" "my_source_snowflake" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,81 +25,12 @@ data "airbyte_source_snowflake" "my_source_snowflake" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `database` (String) The database you created for Airbyte to access data.
-- `host` (String) The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com).
-- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `role` (String) The role you created for Airbyte to access Snowflake.
-- `schema` (String) The source Snowflake schema tables. Leave empty to access tables from multiple schemas.
-- `source_type` (String) must be one of ["snowflake"]
-- `warehouse` (String) The warehouse you created for Airbyte to access data.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_snowflake_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_snowflake_authorization_method_o_auth2_0))
-- `source_snowflake_authorization_method_username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_snowflake_authorization_method_username_and_password))
-- `source_snowflake_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_snowflake_update_authorization_method_o_auth2_0))
-- `source_snowflake_update_authorization_method_username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_snowflake_update_authorization_method_username_and_password))
-
-
-### Nested Schema for `configuration.credentials.source_snowflake_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["OAuth"]
-- `client_id` (String) The Client ID of your Snowflake developer application.
-- `client_secret` (String) The Client Secret of your Snowflake developer application.
-- `refresh_token` (String) Refresh Token for making authenticated requests.
-
-
-
-### Nested Schema for `configuration.credentials.source_snowflake_authorization_method_username_and_password`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["username/password"]
-- `password` (String) The password associated with the username.
-- `username` (String) The username you created to allow Airbyte to access the database.
-
-
-
-### Nested Schema for `configuration.credentials.source_snowflake_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["OAuth"]
-- `client_id` (String) The Client ID of your Snowflake developer application.
-- `client_secret` (String) The Client Secret of your Snowflake developer application.
-- `refresh_token` (String) Refresh Token for making authenticated requests.
-
-
-
-### Nested Schema for `configuration.credentials.source_snowflake_update_authorization_method_username_and_password`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["username/password"]
-- `password` (String) The password associated with the username.
-- `username` (String) The username you created to allow Airbyte to access the database.
-
diff --git a/docs/data-sources/source_sonar_cloud.md b/docs/data-sources/source_sonar_cloud.md
index 1de5284fe..7948ba06a 100644
--- a/docs/data-sources/source_sonar_cloud.md
+++ b/docs/data-sources/source_sonar_cloud.md
@@ -14,7 +14,6 @@ SourceSonarCloud DataSource
```terraform
data "airbyte_source_sonar_cloud" "my_source_sonarcloud" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_sonar_cloud" "my_source_sonarcloud" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `component_keys` (List of String) Comma-separated list of component keys.
-- `end_date` (String) To retrieve issues created before the given date (inclusive).
-- `organization` (String) Organization key. See here.
-- `source_type` (String) must be one of ["sonar-cloud"]
-- `start_date` (String) To retrieve issues created after the given date (inclusive).
-- `user_token` (String) Your User Token. See here. The token is case sensitive.
-
diff --git a/docs/data-sources/source_spacex_api.md b/docs/data-sources/source_spacex_api.md
index db2ea8af5..5a256c803 100644
--- a/docs/data-sources/source_spacex_api.md
+++ b/docs/data-sources/source_spacex_api.md
@@ -14,7 +14,6 @@ SourceSpacexAPI DataSource
```terraform
data "airbyte_source_spacex_api" "my_source_spacexapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_spacex_api" "my_source_spacexapi" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `id` (String)
-- `options` (String)
-- `source_type` (String) must be one of ["spacex-api"]
-
diff --git a/docs/data-sources/source_square.md b/docs/data-sources/source_square.md
index fca8cd041..1078aab67 100644
--- a/docs/data-sources/source_square.md
+++ b/docs/data-sources/source_square.md
@@ -14,7 +14,6 @@ SourceSquare DataSource
```terraform
data "airbyte_source_square" "my_source_square" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,74 +25,12 @@ data "airbyte_source_square" "my_source_square" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `include_deleted_objects` (Boolean) In some streams there is an option to include deleted objects (Items, Categories, Discounts, Taxes)
-- `is_sandbox` (Boolean) Determines whether to use the sandbox or production environment.
-- `source_type` (String) must be one of ["square"]
-- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. If not set, all data will be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_square_authentication_api_key` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--source_square_authentication_api_key))
-- `source_square_authentication_oauth_authentication` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--source_square_authentication_oauth_authentication))
-- `source_square_update_authentication_api_key` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--source_square_update_authentication_api_key))
-- `source_square_update_authentication_oauth_authentication` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--source_square_update_authentication_oauth_authentication))
-
-
-### Nested Schema for `configuration.credentials.source_square_authentication_api_key`
-
-Read-Only:
-
-- `api_key` (String) The API key for a Square application
-- `auth_type` (String) must be one of ["API Key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_square_authentication_oauth_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["OAuth"]
-- `client_id` (String) The Square-issued ID of your application
-- `client_secret` (String) The Square-issued application secret for your application
-- `refresh_token` (String) A refresh token generated using the above client ID and secret
-
-
-
-### Nested Schema for `configuration.credentials.source_square_update_authentication_api_key`
-
-Read-Only:
-
-- `api_key` (String) The API key for a Square application
-- `auth_type` (String) must be one of ["API Key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_square_update_authentication_oauth_authentication`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["OAuth"]
-- `client_id` (String) The Square-issued ID of your application
-- `client_secret` (String) The Square-issued application secret for your application
-- `refresh_token` (String) A refresh token generated using the above client ID and secret
-
diff --git a/docs/data-sources/source_strava.md b/docs/data-sources/source_strava.md
index efa5437bb..f7bd57519 100644
--- a/docs/data-sources/source_strava.md
+++ b/docs/data-sources/source_strava.md
@@ -14,7 +14,6 @@ SourceStrava DataSource
```terraform
data "airbyte_source_strava" "my_source_strava" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,27 +25,12 @@ data "airbyte_source_strava" "my_source_strava" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `athlete_id` (Number) The Athlete ID of your Strava developer application.
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Strava developer application.
-- `client_secret` (String) The Client Secret of your Strava developer application.
-- `refresh_token` (String) The Refresh Token with the activity: read_all permissions.
-- `source_type` (String) must be one of ["strava"]
-- `start_date` (String) UTC date and time. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_stripe.md b/docs/data-sources/source_stripe.md
index a8b965c93..728ec1d65 100644
--- a/docs/data-sources/source_stripe.md
+++ b/docs/data-sources/source_stripe.md
@@ -14,7 +14,6 @@ SourceStripe DataSource
```terraform
data "airbyte_source_stripe" "my_source_stripe" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,26 +25,12 @@ data "airbyte_source_stripe" "my_source_stripe" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `account_id` (String) Your Stripe account ID (starts with 'acct_', find yours here).
-- `client_secret` (String) Stripe API key (usually starts with 'sk_live_'; find yours here).
-- `lookback_window_days` (Number) When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here
-- `slice_range` (Number) The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted.
-- `source_type` (String) must be one of ["stripe"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated.
-
diff --git a/docs/data-sources/source_survey_sparrow.md b/docs/data-sources/source_survey_sparrow.md
index 77c24ca50..e383d3024 100644
--- a/docs/data-sources/source_survey_sparrow.md
+++ b/docs/data-sources/source_survey_sparrow.md
@@ -14,7 +14,6 @@ SourceSurveySparrow DataSource
```terraform
data "airbyte_source_survey_sparrow" "my_source_surveysparrow" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,65 +25,12 @@ data "airbyte_source_survey_sparrow" "my_source_surveysparrow" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) Your access token. See here. The key is case sensitive.
-- `region` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region))
-- `source_type` (String) must be one of ["survey-sparrow"]
-- `survey_id` (List of String) A List of your survey ids for survey-specific stream
-
-
-### Nested Schema for `configuration.region`
-
-Read-Only:
-
-- `source_survey_sparrow_base_url_eu_based_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--source_survey_sparrow_base_url_eu_based_account))
-- `source_survey_sparrow_base_url_global_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--source_survey_sparrow_base_url_global_account))
-- `source_survey_sparrow_update_base_url_eu_based_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--source_survey_sparrow_update_base_url_eu_based_account))
-- `source_survey_sparrow_update_base_url_global_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--source_survey_sparrow_update_base_url_global_account))
-
-
-### Nested Schema for `configuration.region.source_survey_sparrow_base_url_eu_based_account`
-
-Read-Only:
-
-- `url_base` (String) must be one of ["https://eu-api.surveysparrow.com/v3"]
-
-
-
-### Nested Schema for `configuration.region.source_survey_sparrow_base_url_global_account`
-
-Read-Only:
-
-- `url_base` (String) must be one of ["https://api.surveysparrow.com/v3"]
-
-
-
-### Nested Schema for `configuration.region.source_survey_sparrow_update_base_url_eu_based_account`
-
-Read-Only:
-
-- `url_base` (String) must be one of ["https://eu-api.surveysparrow.com/v3"]
-
-
-
-### Nested Schema for `configuration.region.source_survey_sparrow_update_base_url_global_account`
-
-Read-Only:
-
-- `url_base` (String) must be one of ["https://api.surveysparrow.com/v3"]
-
diff --git a/docs/data-sources/source_surveymonkey.md b/docs/data-sources/source_surveymonkey.md
index 50d3595df..59bae368a 100644
--- a/docs/data-sources/source_surveymonkey.md
+++ b/docs/data-sources/source_surveymonkey.md
@@ -14,7 +14,6 @@ SourceSurveymonkey DataSource
```terraform
data "airbyte_source_surveymonkey" "my_source_surveymonkey" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,36 +25,12 @@ data "airbyte_source_surveymonkey" "my_source_surveymonkey" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) The authorization method to use to retrieve data from SurveyMonkey (see [below for nested schema](#nestedatt--configuration--credentials))
-- `origin` (String) must be one of ["USA", "Europe", "Canada"]
-Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.
-- `source_type` (String) must be one of ["surveymonkey"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `survey_ids` (List of String) IDs of the surveys from which you'd like to replicate data. If left empty, data from all boards to which you have access will be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests. See the docs for information on how to generate this key.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of the SurveyMonkey developer application.
-- `client_secret` (String) The Client Secret of the SurveyMonkey developer application.
-
diff --git a/docs/data-sources/source_tempo.md b/docs/data-sources/source_tempo.md
index 1e714ad7a..fab40e022 100644
--- a/docs/data-sources/source_tempo.md
+++ b/docs/data-sources/source_tempo.md
@@ -14,7 +14,6 @@ SourceTempo DataSource
```terraform
data "airbyte_source_tempo" "my_source_tempo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_tempo" "my_source_tempo" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_token` (String) Tempo API Token. Go to Tempo>Settings, scroll down to Data Access and select API integration.
-- `source_type` (String) must be one of ["tempo"]
-
diff --git a/docs/data-sources/source_the_guardian_api.md b/docs/data-sources/source_the_guardian_api.md
index 8c2359883..d40fe8275 100644
--- a/docs/data-sources/source_the_guardian_api.md
+++ b/docs/data-sources/source_the_guardian_api.md
@@ -14,7 +14,6 @@ SourceTheGuardianAPI DataSource
```terraform
data "airbyte_source_the_guardian_api" "my_source_theguardianapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,27 +25,12 @@ data "airbyte_source_the_guardian_api" "my_source_theguardianapi" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Your API Key. See here. The key is case sensitive.
-- `end_date` (String) (Optional) Use this to set the maximum date (YYYY-MM-DD) of the results. Results newer than the end_date will not be shown. Default is set to the current date (today) for incremental syncs.
-- `query` (String) (Optional) The query (q) parameter filters the results to only those that include that search term. The q parameter supports AND, OR and NOT operators.
-- `section` (String) (Optional) Use this to filter the results by a particular section. See here for a list of all sections, and here for the sections endpoint documentation.
-- `source_type` (String) must be one of ["the-guardian-api"]
-- `start_date` (String) Use this to set the minimum date (YYYY-MM-DD) of the results. Results older than the start_date will not be shown.
-- `tag` (String) (Optional) A tag is a piece of data that is used by The Guardian to categorise content. Use this parameter to filter results by showing only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation.
-
diff --git a/docs/data-sources/source_tiktok_marketing.md b/docs/data-sources/source_tiktok_marketing.md
index c023c19fb..da2ec6353 100644
--- a/docs/data-sources/source_tiktok_marketing.md
+++ b/docs/data-sources/source_tiktok_marketing.md
@@ -14,7 +14,6 @@ SourceTiktokMarketing DataSource
```terraform
data "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,79 +25,12 @@ data "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `attribution_window` (Number) The attribution window in days.
-- `credentials` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials))
-- `end_date` (String) The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DD. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the data till the current date.
-- `include_deleted` (Boolean) Set to active if you want to include deleted data in reports.
-- `source_type` (String) must be one of ["tiktok-marketing"]
-- `start_date` (String) The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_tiktok_marketing_authentication_method_o_auth2_0` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_tiktok_marketing_authentication_method_o_auth2_0))
-- `source_tiktok_marketing_authentication_method_sandbox_access_token` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_tiktok_marketing_authentication_method_sandbox_access_token))
-- `source_tiktok_marketing_update_authentication_method_o_auth2_0` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_tiktok_marketing_update_authentication_method_o_auth2_0))
-- `source_tiktok_marketing_update_authentication_method_sandbox_access_token` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_tiktok_marketing_update_authentication_method_sandbox_access_token))
-
-
-### Nested Schema for `configuration.credentials.source_tiktok_marketing_authentication_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Long-term Authorized Access Token.
-- `advertiser_id` (String) The Advertiser ID to filter reports and streams. Let this empty to retrieve all.
-- `app_id` (String) The Developer Application App ID.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `secret` (String) The Developer Application Secret.
-
-
-
-### Nested Schema for `configuration.credentials.source_tiktok_marketing_authentication_method_sandbox_access_token`
-
-Read-Only:
-
-- `access_token` (String) The long-term authorized access token.
-- `advertiser_id` (String) The Advertiser ID which generated for the developer's Sandbox application.
-- `auth_type` (String) must be one of ["sandbox_access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_tiktok_marketing_update_authentication_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Long-term Authorized Access Token.
-- `advertiser_id` (String) The Advertiser ID to filter reports and streams. Let this empty to retrieve all.
-- `app_id` (String) The Developer Application App ID.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `secret` (String) The Developer Application Secret.
-
-
-
-### Nested Schema for `configuration.credentials.source_tiktok_marketing_update_authentication_method_sandbox_access_token`
-
-Read-Only:
-
-- `access_token` (String) The long-term authorized access token.
-- `advertiser_id` (String) The Advertiser ID which generated for the developer's Sandbox application.
-- `auth_type` (String) must be one of ["sandbox_access_token"]
-
diff --git a/docs/data-sources/source_todoist.md b/docs/data-sources/source_todoist.md
index 9dd87f1cc..06143db99 100644
--- a/docs/data-sources/source_todoist.md
+++ b/docs/data-sources/source_todoist.md
@@ -14,7 +14,6 @@ SourceTodoist DataSource
```terraform
data "airbyte_source_todoist" "my_source_todoist" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_todoist" "my_source_todoist" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `source_type` (String) must be one of ["todoist"]
-- `token` (String) Your API Token. See here. The token is case sensitive.
-
diff --git a/docs/data-sources/source_trello.md b/docs/data-sources/source_trello.md
index 7e782f778..f798e19d2 100644
--- a/docs/data-sources/source_trello.md
+++ b/docs/data-sources/source_trello.md
@@ -14,7 +14,6 @@ SourceTrello DataSource
```terraform
data "airbyte_source_trello" "my_source_trello" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_trello" "my_source_trello" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `board_ids` (List of String) IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated.
-- `key` (String) Trello API key. See the docs for instructions on how to generate it.
-- `source_type` (String) must be one of ["trello"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `token` (String) Trello API token. See the docs for instructions on how to generate it.
-
diff --git a/docs/data-sources/source_trustpilot.md b/docs/data-sources/source_trustpilot.md
index 40768f864..ce38c9bc7 100644
--- a/docs/data-sources/source_trustpilot.md
+++ b/docs/data-sources/source_trustpilot.md
@@ -14,7 +14,6 @@ SourceTrustpilot DataSource
```terraform
data "airbyte_source_trustpilot" "my_source_trustpilot" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,77 +25,12 @@ data "airbyte_source_trustpilot" "my_source_trustpilot" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `business_units` (List of String) The names of business units which shall be synchronized. Some streams e.g. configured_business_units or private_reviews use this configuration.
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["trustpilot"]
-- `start_date` (String) For streams with sync. method incremental the start date time to be used
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_trustpilot_authorization_method_api_key` (Attributes) The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0. (see [below for nested schema](#nestedatt--configuration--credentials--source_trustpilot_authorization_method_api_key))
-- `source_trustpilot_authorization_method_o_auth_2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_trustpilot_authorization_method_o_auth_2_0))
-- `source_trustpilot_update_authorization_method_api_key` (Attributes) The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0. (see [below for nested schema](#nestedatt--configuration--credentials--source_trustpilot_update_authorization_method_api_key))
-- `source_trustpilot_update_authorization_method_o_auth_2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_trustpilot_update_authorization_method_o_auth_2_0))
-
-
-### Nested Schema for `configuration.credentials.source_trustpilot_authorization_method_api_key`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["apikey"]
-- `client_id` (String) The API key of the Trustpilot API application.
-
-
-
-### Nested Schema for `configuration.credentials.source_trustpilot_authorization_method_o_auth_2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The API key of the Trustpilot API application. (represents the OAuth Client ID)
-- `client_secret` (String) The Secret of the Trustpilot API application. (represents the OAuth Client Secret)
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_trustpilot_update_authorization_method_api_key`
-
-Read-Only:
-
-- `auth_type` (String) must be one of ["apikey"]
-- `client_id` (String) The API key of the Trustpilot API application.
-
-
-
-### Nested Schema for `configuration.credentials.source_trustpilot_update_authorization_method_o_auth_2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The API key of the Trustpilot API application. (represents the OAuth Client ID)
-- `client_secret` (String) The Secret of the Trustpilot API application. (represents the OAuth Client Secret)
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
diff --git a/docs/data-sources/source_tvmaze_schedule.md b/docs/data-sources/source_tvmaze_schedule.md
index e5fb9f150..04a2d1160 100644
--- a/docs/data-sources/source_tvmaze_schedule.md
+++ b/docs/data-sources/source_tvmaze_schedule.md
@@ -14,7 +14,6 @@ SourceTvmazeSchedule DataSource
```terraform
data "airbyte_source_tvmaze_schedule" "my_source_tvmazeschedule" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,27 +25,12 @@ data "airbyte_source_tvmaze_schedule" "my_source_tvmazeschedule" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `domestic_schedule_country_code` (String) Country code for domestic TV schedule retrieval.
-- `end_date` (String) End date for TV schedule retrieval. May be in the future. Optional.
-- `source_type` (String) must be one of ["tvmaze-schedule"]
-- `start_date` (String) Start date for TV schedule retrieval. May be in the future.
-- `web_schedule_country_code` (String) ISO 3166-1 country code for web TV schedule retrieval. Leave blank for
-all countries plus global web channels (e.g. Netflix). Alternatively,
-set to 'global' for just global web channels.
-
diff --git a/docs/data-sources/source_twilio.md b/docs/data-sources/source_twilio.md
index 9c1438cc8..932c5e0f3 100644
--- a/docs/data-sources/source_twilio.md
+++ b/docs/data-sources/source_twilio.md
@@ -14,7 +14,6 @@ SourceTwilio DataSource
```terraform
data "airbyte_source_twilio" "my_source_twilio" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_twilio" "my_source_twilio" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `account_sid` (String) Twilio account SID
-- `auth_token` (String) Twilio Auth Token.
-- `lookback_window` (Number) How far into the past to look for records. (in minutes)
-- `source_type` (String) must be one of ["twilio"]
-- `start_date` (String) UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated.
-
diff --git a/docs/data-sources/source_twilio_taskrouter.md b/docs/data-sources/source_twilio_taskrouter.md
index ed0927859..834fcdce4 100644
--- a/docs/data-sources/source_twilio_taskrouter.md
+++ b/docs/data-sources/source_twilio_taskrouter.md
@@ -14,7 +14,6 @@ SourceTwilioTaskrouter DataSource
```terraform
data "airbyte_source_twilio_taskrouter" "my_source_twiliotaskrouter" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_twilio_taskrouter" "my_source_twiliotaskrouter" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `account_sid` (String) Twilio Account ID
-- `auth_token` (String) Twilio Auth Token
-- `source_type` (String) must be one of ["twilio-taskrouter"]
-
diff --git a/docs/data-sources/source_twitter.md b/docs/data-sources/source_twitter.md
index 39e43977a..8dcc57d16 100644
--- a/docs/data-sources/source_twitter.md
+++ b/docs/data-sources/source_twitter.md
@@ -14,7 +14,6 @@ SourceTwitter DataSource
```terraform
data "airbyte_source_twitter" "my_source_twitter" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_twitter" "my_source_twitter" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) App only Bearer Token. See the docs for more information on how to obtain this token.
-- `end_date` (String) The end date for retrieving tweets must be a minimum of 10 seconds prior to the request time.
-- `query` (String) Query for matching Tweets. You can learn how to build this query by reading build a query guide .
-- `source_type` (String) must be one of ["twitter"]
-- `start_date` (String) The start date for retrieving tweets cannot be more than 7 days in the past.
-
diff --git a/docs/data-sources/source_typeform.md b/docs/data-sources/source_typeform.md
index fec7cb66f..8da3c5ab3 100644
--- a/docs/data-sources/source_typeform.md
+++ b/docs/data-sources/source_typeform.md
@@ -14,7 +14,6 @@ SourceTypeform DataSource
```terraform
data "airbyte_source_typeform" "my_source_typeform" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,77 +25,12 @@ data "airbyte_source_typeform" "my_source_typeform" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `form_ids` (List of String) When this parameter is set, the connector will replicate data only from the input forms. Otherwise, all forms in your Typeform account will be replicated. You can find form IDs in your form URLs. For example, in the URL "https://mysite.typeform.com/to/u6nXL7" the form_id is u6nXL7. You can find form URLs on Share panel
-- `source_type` (String) must be one of ["typeform"]
-- `start_date` (String) The date from which you'd like to replicate data for Typeform API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_typeform_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_typeform_authorization_method_o_auth2_0))
-- `source_typeform_authorization_method_private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_typeform_authorization_method_private_token))
-- `source_typeform_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_typeform_update_authorization_method_o_auth2_0))
-- `source_typeform_update_authorization_method_private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_typeform_update_authorization_method_private_token))
-
-
-### Nested Schema for `configuration.credentials.source_typeform_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of the Typeform developer application.
-- `client_secret` (String) The Client Secret the Typeform developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_typeform_authorization_method_private_token`
-
-Read-Only:
-
-- `access_token` (String) Log into your Typeform account and then generate a personal Access Token.
-- `auth_type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_typeform_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of the Typeform developer application.
-- `client_secret` (String) The Client Secret the Typeform developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_typeform_update_authorization_method_private_token`
-
-Read-Only:
-
-- `access_token` (String) Log into your Typeform account and then generate a personal Access Token.
-- `auth_type` (String) must be one of ["access_token"]
-
diff --git a/docs/data-sources/source_us_census.md b/docs/data-sources/source_us_census.md
index b54bae126..0bfd24acf 100644
--- a/docs/data-sources/source_us_census.md
+++ b/docs/data-sources/source_us_census.md
@@ -14,7 +14,6 @@ SourceUsCensus DataSource
```terraform
data "airbyte_source_us_census" "my_source_uscensus" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,24 +25,12 @@ data "airbyte_source_us_census" "my_source_uscensus" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Your API Key. Get your key here.
-- `query_params` (String) The query parameters portion of the GET request, without the api key
-- `query_path` (String) The path portion of the GET request
-- `source_type` (String) must be one of ["us-census"]
-
diff --git a/docs/data-sources/source_vantage.md b/docs/data-sources/source_vantage.md
index a555ecf01..f76e6ff96 100644
--- a/docs/data-sources/source_vantage.md
+++ b/docs/data-sources/source_vantage.md
@@ -14,7 +14,6 @@ SourceVantage DataSource
```terraform
data "airbyte_source_vantage" "my_source_vantage" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_vantage" "my_source_vantage" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) Your API Access token. See here.
-- `source_type` (String) must be one of ["vantage"]
-
diff --git a/docs/data-sources/source_webflow.md b/docs/data-sources/source_webflow.md
index f842f597c..c50135773 100644
--- a/docs/data-sources/source_webflow.md
+++ b/docs/data-sources/source_webflow.md
@@ -14,7 +14,6 @@ SourceWebflow DataSource
```terraform
data "airbyte_source_webflow" "my_source_webflow" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,23 +25,12 @@ data "airbyte_source_webflow" "my_source_webflow" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api
-- `site_id` (String) The id of the Webflow site you are requesting data from. See https://developers.webflow.com/#sites
-- `source_type` (String) must be one of ["webflow"]
-
diff --git a/docs/data-sources/source_whisky_hunter.md b/docs/data-sources/source_whisky_hunter.md
index 410fbfb32..c31769eeb 100644
--- a/docs/data-sources/source_whisky_hunter.md
+++ b/docs/data-sources/source_whisky_hunter.md
@@ -14,7 +14,6 @@ SourceWhiskyHunter DataSource
```terraform
data "airbyte_source_whisky_hunter" "my_source_whiskyhunter" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,21 +25,12 @@ data "airbyte_source_whisky_hunter" "my_source_whiskyhunter" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `source_type` (String) must be one of ["whisky-hunter"]
-
diff --git a/docs/data-sources/source_wikipedia_pageviews.md b/docs/data-sources/source_wikipedia_pageviews.md
index b08e98319..7ec5b9b6c 100644
--- a/docs/data-sources/source_wikipedia_pageviews.md
+++ b/docs/data-sources/source_wikipedia_pageviews.md
@@ -14,7 +14,6 @@ SourceWikipediaPageviews DataSource
```terraform
data "airbyte_source_wikipedia_pageviews" "my_source_wikipediapageviews" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,28 +25,12 @@ data "airbyte_source_wikipedia_pageviews" "my_source_wikipediapageviews" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access` (String) If you want to filter by access method, use one of desktop, mobile-app or mobile-web. If you are interested in pageviews regardless of access method, use all-access.
-- `agent` (String) If you want to filter by agent type, use one of user, automated or spider. If you are interested in pageviews regardless of agent type, use all-agents.
-- `article` (String) The title of any article in the specified project. Any spaces should be replaced with underscores. It also should be URI-encoded, so that non-URI-safe characters like %, / or ? are accepted.
-- `country` (String) The ISO 3166-1 alpha-2 code of a country for which to retrieve top articles.
-- `end` (String) The date of the last day to include, in YYYYMMDD or YYYYMMDDHH format.
-- `project` (String) If you want to filter by project, use the domain of any Wikimedia project.
-- `source_type` (String) must be one of ["wikipedia-pageviews"]
-- `start` (String) The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format.
-
diff --git a/docs/data-sources/source_woocommerce.md b/docs/data-sources/source_woocommerce.md
index 771edd9af..8343df48b 100644
--- a/docs/data-sources/source_woocommerce.md
+++ b/docs/data-sources/source_woocommerce.md
@@ -14,7 +14,6 @@ SourceWoocommerce DataSource
```terraform
data "airbyte_source_woocommerce" "my_source_woocommerce" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_woocommerce" "my_source_woocommerce" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_key` (String) Customer Key for API in WooCommerce shop
-- `api_secret` (String) Customer Secret for API in WooCommerce shop
-- `shop` (String) The name of the store. For https://EXAMPLE.com, the shop name is 'EXAMPLE.com'.
-- `source_type` (String) must be one of ["woocommerce"]
-- `start_date` (String) The date you would like to replicate data from. Format: YYYY-MM-DD
-
diff --git a/docs/data-sources/source_xero.md b/docs/data-sources/source_xero.md
deleted file mode 100644
index f2e902a60..000000000
--- a/docs/data-sources/source_xero.md
+++ /dev/null
@@ -1,60 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_xero Data Source - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceXero DataSource
----
-
-# airbyte_source_xero (Data Source)
-
-SourceXero DataSource
-
-## Example Usage
-
-```terraform
-data "airbyte_source_xero" "my_source_xero" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
-```
-
-
-## Schema
-
-### Required
-
-- `source_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--authentication))
-- `source_type` (String) must be one of ["xero"]
-- `start_date` (String) UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any data with created_at before this data will not be synced.
-- `tenant_id` (String) Enter your Xero organization's Tenant ID
-
-
-### Nested Schema for `configuration.authentication`
-
-Read-Only:
-
-- `access_token` (String) Enter your Xero application's access token
-- `client_id` (String) Enter your Xero application's Client ID
-- `client_secret` (String) Enter your Xero application's Client Secret
-- `refresh_token` (String) Enter your Xero application's refresh token
-- `token_expiry_date` (String) The date-time when the access token should be refreshed
-
-
diff --git a/docs/data-sources/source_xkcd.md b/docs/data-sources/source_xkcd.md
index 8ff9e638d..a62fea152 100644
--- a/docs/data-sources/source_xkcd.md
+++ b/docs/data-sources/source_xkcd.md
@@ -14,7 +14,6 @@ SourceXkcd DataSource
```terraform
data "airbyte_source_xkcd" "my_source_xkcd" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,21 +25,12 @@ data "airbyte_source_xkcd" "my_source_xkcd" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `source_type` (String) must be one of ["xkcd"]
-
diff --git a/docs/data-sources/source_yandex_metrica.md b/docs/data-sources/source_yandex_metrica.md
index b92daa5f2..397c0bb29 100644
--- a/docs/data-sources/source_yandex_metrica.md
+++ b/docs/data-sources/source_yandex_metrica.md
@@ -14,7 +14,6 @@ SourceYandexMetrica DataSource
```terraform
data "airbyte_source_yandex_metrica" "my_source_yandexmetrica" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_yandex_metrica" "my_source_yandexmetrica" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `auth_token` (String) Your Yandex Metrica API access token
-- `counter_id` (String) Counter ID
-- `end_date` (String) Starting point for your data replication, in format of "YYYY-MM-DD". If not provided will sync till most recent date.
-- `source_type` (String) must be one of ["yandex-metrica"]
-- `start_date` (String) Starting point for your data replication, in format of "YYYY-MM-DD".
-
diff --git a/docs/data-sources/source_yotpo.md b/docs/data-sources/source_yotpo.md
index 38279d770..262fbb1a3 100644
--- a/docs/data-sources/source_yotpo.md
+++ b/docs/data-sources/source_yotpo.md
@@ -14,7 +14,6 @@ SourceYotpo DataSource
```terraform
data "airbyte_source_yotpo" "my_source_yotpo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_yotpo" "my_source_yotpo" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `access_token` (String) Access token recieved as a result of API call to https://api.yotpo.com/oauth/token (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)
-- `app_key` (String) App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)
-- `email` (String) Email address registered with yotpo.
-- `source_type` (String) must be one of ["yotpo"]
-- `start_date` (String) Date time filter for incremental filter, Specify which date to extract from.
-
diff --git a/docs/data-sources/source_younium.md b/docs/data-sources/source_younium.md
deleted file mode 100644
index 98f3eece0..000000000
--- a/docs/data-sources/source_younium.md
+++ /dev/null
@@ -1,50 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_younium Data Source - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceYounium DataSource
----
-
-# airbyte_source_younium (Data Source)
-
-SourceYounium DataSource
-
-## Example Usage
-
-```terraform
-data "airbyte_source_younium" "my_source_younium" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
-```
-
-
-## Schema
-
-### Required
-
-- `source_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `legal_entity` (String) Legal Entity that data should be pulled from
-- `password` (String) Account password for younium account API key
-- `playground` (Boolean) Property defining if connector is used against playground or production environment
-- `source_type` (String) must be one of ["younium"]
-- `username` (String) Username for Younium account
-
-
diff --git a/docs/data-sources/source_youtube_analytics.md b/docs/data-sources/source_youtube_analytics.md
index cb19bd3e7..a3b1e5153 100644
--- a/docs/data-sources/source_youtube_analytics.md
+++ b/docs/data-sources/source_youtube_analytics.md
@@ -14,7 +14,6 @@ SourceYoutubeAnalytics DataSource
```terraform
data "airbyte_source_youtube_analytics" "my_source_youtubeanalytics" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,35 +25,12 @@ data "airbyte_source_youtube_analytics" "my_source_youtubeanalytics" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["youtube-analytics"]
-
-
-### Nested Schema for `configuration.credentials`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `client_id` (String) The Client ID of your developer application
-- `client_secret` (String) The client secret of your developer application
-- `refresh_token` (String) A refresh token generated using the above client ID and secret
-
diff --git a/docs/data-sources/source_zendesk_chat.md b/docs/data-sources/source_zendesk_chat.md
index 6425e6c15..a8a08300f 100644
--- a/docs/data-sources/source_zendesk_chat.md
+++ b/docs/data-sources/source_zendesk_chat.md
@@ -14,7 +14,6 @@ SourceZendeskChat DataSource
```terraform
data "airbyte_source_zendesk_chat" "my_source_zendeskchat" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,75 +25,12 @@ data "airbyte_source_zendesk_chat" "my_source_zendeskchat" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["zendesk-chat"]
-- `start_date` (String) The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z.
-- `subdomain` (String) Required if you access Zendesk Chat from a Zendesk Support subdomain.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_zendesk_chat_authorization_method_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_chat_authorization_method_access_token))
-- `source_zendesk_chat_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_chat_authorization_method_o_auth2_0))
-- `source_zendesk_chat_update_authorization_method_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_chat_update_authorization_method_access_token))
-- `source_zendesk_chat_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_chat_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_chat_authorization_method_access_token`
-
-Read-Only:
-
-- `access_token` (String) The Access Token to make authenticated requests.
-- `credentials` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_chat_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `client_id` (String) The Client ID of your OAuth application
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `credentials` (String) must be one of ["oauth2.0"]
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_chat_update_authorization_method_access_token`
-
-Read-Only:
-
-- `access_token` (String) The Access Token to make authenticated requests.
-- `credentials` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_chat_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `client_id` (String) The Client ID of your OAuth application
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `credentials` (String) must be one of ["oauth2.0"]
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
-
diff --git a/docs/data-sources/source_zendesk_sell.md b/docs/data-sources/source_zendesk_sell.md
new file mode 100644
index 000000000..bd403d397
--- /dev/null
+++ b/docs/data-sources/source_zendesk_sell.md
@@ -0,0 +1,36 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_zendesk_sell Data Source - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceZendeskSell DataSource
+---
+
+# airbyte_source_zendesk_sell (Data Source)
+
+SourceZendeskSell DataSource
+
+## Example Usage
+
+```terraform
+data "airbyte_source_zendesk_sell" "my_source_zendesksell" {
+ source_id = "...my_source_id..."
+}
+```
+
+
+## Schema
+
+### Required
+
+- `source_id` (String)
+
+### Read-Only
+
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
+- `name` (String)
+- `source_type` (String)
+- `workspace_id` (String)
+
+
diff --git a/docs/data-sources/source_zendesk_sunshine.md b/docs/data-sources/source_zendesk_sunshine.md
index 20b70b156..b3554f5ea 100644
--- a/docs/data-sources/source_zendesk_sunshine.md
+++ b/docs/data-sources/source_zendesk_sunshine.md
@@ -14,7 +14,6 @@ SourceZendeskSunshine DataSource
```terraform
data "airbyte_source_zendesk_sunshine" "my_source_zendesksunshine" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,75 +25,12 @@ data "airbyte_source_zendesk_sunshine" "my_source_zendesksunshine" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["zendesk-sunshine"]
-- `start_date` (String) The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z.
-- `subdomain` (String) The subdomain for your Zendesk Account.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_zendesk_sunshine_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_sunshine_authorization_method_api_token))
-- `source_zendesk_sunshine_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_sunshine_authorization_method_o_auth2_0))
-- `source_zendesk_sunshine_update_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_sunshine_update_authorization_method_api_token))
-- `source_zendesk_sunshine_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_sunshine_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_sunshine_authorization_method_api_token`
-
-Read-Only:
-
-- `api_token` (String) API Token. See the docs for information on how to generate this key.
-- `auth_method` (String) must be one of ["api_token"]
-- `email` (String) The user email for your Zendesk account
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_sunshine_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Long-term access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_sunshine_update_authorization_method_api_token`
-
-Read-Only:
-
-- `api_token` (String) API Token. See the docs for information on how to generate this key.
-- `auth_method` (String) must be one of ["api_token"]
-- `email` (String) The user email for your Zendesk account
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_sunshine_update_authorization_method_o_auth2_0`
-
-Read-Only:
-
-- `access_token` (String) Long-term access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-
diff --git a/docs/data-sources/source_zendesk_support.md b/docs/data-sources/source_zendesk_support.md
index 9b4abef5d..b29e7588c 100644
--- a/docs/data-sources/source_zendesk_support.md
+++ b/docs/data-sources/source_zendesk_support.md
@@ -14,7 +14,6 @@ SourceZendeskSupport DataSource
```terraform
data "airbyte_source_zendesk_support" "my_source_zendesksupport" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,92 +25,12 @@ data "airbyte_source_zendesk_support" "my_source_zendesksupport" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `ignore_pagination` (Boolean) Makes each stream read a single page of data.
-- `source_type` (String) must be one of ["zendesk-support"]
-- `start_date` (String) The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-- `subdomain` (String) This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_zendesk_support_authentication_api_token` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_support_authentication_api_token))
-- `source_zendesk_support_authentication_o_auth2_0` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_support_authentication_o_auth2_0))
-- `source_zendesk_support_update_authentication_api_token` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_support_update_authentication_api_token))
-- `source_zendesk_support_update_authentication_o_auth2_0` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_support_update_authentication_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_support_authentication_api_token`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `api_token` (String) The value of the API token generated. See our full documentation for more information on generating this token.
-- `credentials` (String) must be one of ["api_token"]
-- `email` (String) The user email for your Zendesk account.
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_support_authentication_o_auth2_0`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `access_token` (String) The OAuth access token. See the Zendesk docs for more information on generating this token.
-- `client_id` (String) The OAuth client's ID. See this guide for more information.
-- `client_secret` (String) The OAuth client secret. See this guide for more information.
-- `credentials` (String) must be one of ["oauth2.0"]
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_support_update_authentication_api_token`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `api_token` (String) The value of the API token generated. See our full documentation for more information on generating this token.
-- `credentials` (String) must be one of ["api_token"]
-- `email` (String) The user email for your Zendesk account.
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_support_update_authentication_o_auth2_0`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `access_token` (String) The OAuth access token. See the Zendesk docs for more information on generating this token.
-- `client_id` (String) The OAuth client's ID. See this guide for more information.
-- `client_secret` (String) The OAuth client secret. See this guide for more information.
-- `credentials` (String) must be one of ["oauth2.0"]
-
diff --git a/docs/data-sources/source_zendesk_talk.md b/docs/data-sources/source_zendesk_talk.md
index 95a5dc86b..83e9ecfbe 100644
--- a/docs/data-sources/source_zendesk_talk.md
+++ b/docs/data-sources/source_zendesk_talk.md
@@ -14,7 +14,6 @@ SourceZendeskTalk DataSource
```terraform
data "airbyte_source_zendesk_talk" "my_source_zendesktalk" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,91 +25,12 @@ data "airbyte_source_zendesk_talk" "my_source_zendesktalk" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `credentials` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["zendesk-talk"]
-- `start_date` (String) The date from which you'd like to replicate data for Zendesk Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-- `subdomain` (String) This is your Zendesk subdomain that can be found in your account URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN is the value of your subdomain.
-
-
-### Nested Schema for `configuration.credentials`
-
-Read-Only:
-
-- `source_zendesk_talk_authentication_api_token` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_talk_authentication_api_token))
-- `source_zendesk_talk_authentication_o_auth2_0` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_talk_authentication_o_auth2_0))
-- `source_zendesk_talk_update_authentication_api_token` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_talk_update_authentication_api_token))
-- `source_zendesk_talk_update_authentication_o_auth2_0` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_talk_update_authentication_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_talk_authentication_api_token`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `api_token` (String) The value of the API token generated. See the docs for more information.
-- `auth_type` (String) must be one of ["api_token"]
-- `email` (String) The user email for your Zendesk account.
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_talk_authentication_o_auth2_0`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `access_token` (String) The value of the API token generated. See the docs for more information.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) Client ID
-- `client_secret` (String) Client Secret
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_talk_update_authentication_api_token`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `api_token` (String) The value of the API token generated. See the docs for more information.
-- `auth_type` (String) must be one of ["api_token"]
-- `email` (String) The user email for your Zendesk account.
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_talk_update_authentication_o_auth2_0`
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-Read-Only:
-
-- `access_token` (String) The value of the API token generated. See the docs for more information.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) Client ID
-- `client_secret` (String) Client Secret
-
diff --git a/docs/data-sources/source_zenloop.md b/docs/data-sources/source_zenloop.md
index ea54a14ee..e6eeee995 100644
--- a/docs/data-sources/source_zenloop.md
+++ b/docs/data-sources/source_zenloop.md
@@ -14,7 +14,6 @@ SourceZenloop DataSource
```terraform
data "airbyte_source_zenloop" "my_source_zenloop" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,25 +25,12 @@ data "airbyte_source_zenloop" "my_source_zenloop" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `api_token` (String) Zenloop API Token. You can get the API token in settings page here
-- `date_from` (String) Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24. Leave empty if only data from current data should be synced
-- `source_type` (String) must be one of ["zenloop"]
-- `survey_group_id` (String) Zenloop Survey Group ID. Can be found by pulling All Survey Groups via SurveyGroups stream. Leave empty to pull answers from all survey groups
-- `survey_id` (String) Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys
-
diff --git a/docs/data-sources/source_zoho_crm.md b/docs/data-sources/source_zoho_crm.md
index 1c856c4d9..3d44996cb 100644
--- a/docs/data-sources/source_zoho_crm.md
+++ b/docs/data-sources/source_zoho_crm.md
@@ -14,7 +14,6 @@ SourceZohoCrm DataSource
```terraform
data "airbyte_source_zoho_crm" "my_source_zohocrm" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,31 +25,12 @@ data "airbyte_source_zoho_crm" "my_source_zohocrm" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `client_id` (String) OAuth2.0 Client ID
-- `client_secret` (String) OAuth2.0 Client Secret
-- `dc_region` (String) must be one of ["US", "AU", "EU", "IN", "CN", "JP"]
-Please choose the region of your Data Center location. More info by this Link
-- `edition` (String) must be one of ["Free", "Standard", "Professional", "Enterprise", "Ultimate"]
-Choose your Edition of Zoho CRM to determine API Concurrency Limits
-- `environment` (String) must be one of ["Production", "Developer", "Sandbox"]
-Please choose the environment
-- `refresh_token` (String) OAuth2.0 Refresh Token
-- `source_type` (String) must be one of ["zoho-crm"]
-- `start_datetime` (String) ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`
-
diff --git a/docs/data-sources/source_zoom.md b/docs/data-sources/source_zoom.md
index 8ad13c4cf..1450109ae 100644
--- a/docs/data-sources/source_zoom.md
+++ b/docs/data-sources/source_zoom.md
@@ -14,7 +14,6 @@ SourceZoom DataSource
```terraform
data "airbyte_source_zoom" "my_source_zoom" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,22 +25,12 @@ data "airbyte_source_zoom" "my_source_zoom" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `jwt_token` (String) JWT Token
-- `source_type` (String) must be one of ["zoom"]
-
diff --git a/docs/data-sources/source_zuora.md b/docs/data-sources/source_zuora.md
index 28e7259ce..b1f8ffca1 100644
--- a/docs/data-sources/source_zuora.md
+++ b/docs/data-sources/source_zuora.md
@@ -14,7 +14,6 @@ SourceZuora DataSource
```terraform
data "airbyte_source_zuora" "my_source_zuora" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
```
@@ -26,29 +25,12 @@ data "airbyte_source_zuora" "my_source_zuora" {
- `source_id` (String)
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
### Read-Only
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `configuration` (String) Parsed as JSON.
+The values required to configure the source.
- `name` (String)
+- `source_type` (String)
- `workspace_id` (String)
-
-### Nested Schema for `configuration`
-
-Read-Only:
-
-- `client_id` (String) Your OAuth user Client ID
-- `client_secret` (String) Your OAuth user Client Secret
-- `data_query` (String) must be one of ["Live", "Unlimited"]
-Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link
-- `source_type` (String) must be one of ["zuora"]
-- `start_date` (String) Start Date in format: YYYY-MM-DD
-- `tenant_endpoint` (String) must be one of ["US Production", "US Cloud Production", "US API Sandbox", "US Cloud API Sandbox", "US Central Sandbox", "US Performance Test", "EU Production", "EU API Sandbox", "EU Central Sandbox"]
-Please choose the right endpoint where your Tenant is located. More info by this Link
-- `window_in_days` (String) The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year).
-
diff --git a/docs/data-sources/workspace.md b/docs/data-sources/workspace.md
index dc149db68..0d40ed212 100644
--- a/docs/data-sources/workspace.md
+++ b/docs/data-sources/workspace.md
@@ -27,7 +27,7 @@ data "airbyte_workspace" "my_workspace" {
### Read-Only
-- `data_residency` (String) must be one of ["auto", "us", "eu"]
-- `name` (String) Name of the workspace
+- `data_residency` (String) must be one of ["auto", "us", "eu"]; Default: "auto"
+- `name` (String)
diff --git a/docs/index.md b/docs/index.md
index be28658ed..8c1d8b88c 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -17,7 +17,7 @@ terraform {
required_providers {
airbyte = {
source = "airbytehq/airbyte"
- version = "0.3.4"
+ version = "0.3.5"
}
}
}
diff --git a/docs/resources/connection.md b/docs/resources/connection.md
index 83e738768..d7324a66c 100644
--- a/docs/resources/connection.md
+++ b/docs/resources/connection.md
@@ -20,7 +20,7 @@ resource "airbyte_connection" "my_connection" {
cursor_field = [
"...",
]
- name = "Terrence Rau"
+ name = "Cecil Johnson"
primary_key = [
[
"...",
@@ -30,19 +30,19 @@ resource "airbyte_connection" "my_connection" {
},
]
}
- data_residency = "us"
- destination_id = "d69a674e-0f46-47cc-8796-ed151a05dfc2"
- name = "Wilfred Wolff"
- namespace_definition = "custom_format"
+ data_residency = "auto"
+ destination_id = "e362083e-afc8-4559-94e0-a570f6dd427d"
+ name = "Melvin O'Connell"
+ namespace_definition = "source"
namespace_format = SOURCE_NAMESPACE
- non_breaking_schema_updates_behavior = "disable_connection"
+ non_breaking_schema_updates_behavior = "propagate_columns"
prefix = "...my_prefix..."
schedule = {
basic_timing = "...my_basic_timing..."
cron_expression = "...my_cron_expression..."
- schedule_type = "cron"
+ schedule_type = "manual"
}
- source_id = "ca1ba928-fc81-4674-acb7-39205929396f"
+ source_id = "78358423-25b6-4c7b-bfd2-fd307d60cb97"
status = "deprecated"
}
```
@@ -58,12 +58,13 @@ resource "airbyte_connection" "my_connection" {
### Optional
- `configurations` (Attributes) A list of configured stream options for a connection. (see [below for nested schema](#nestedatt--configurations))
-- `data_residency` (String) must be one of ["auto", "us", "eu"]
+- `data_residency` (String) must be one of ["auto", "us", "eu"]; Default: "auto"
- `name` (String) Optional name of the connection
-- `namespace_definition` (String) must be one of ["source", "destination", "custom_format"]
+- `namespace_definition` (String) must be one of ["source", "destination", "custom_format"]; Default: "destination"
Define the location where the data will be stored in the destination
-- `namespace_format` (String) Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
-- `non_breaking_schema_updates_behavior` (String) must be one of ["ignore", "disable_connection", "propagate_columns", "propagate_fully"]
+- `namespace_format` (String) Default: null
+Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
+- `non_breaking_schema_updates_behavior` (String) must be one of ["ignore", "disable_connection", "propagate_columns", "propagate_fully"]; Default: "ignore"
Set how Airbyte handles syncs when it detects a non-breaking schema change in the source
- `prefix` (String) Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”).
- `schedule` (Attributes) schedule for when the the connection should run, per the schedule type (see [below for nested schema](#nestedatt--schedule))
diff --git a/docs/resources/destination_aws_datalake.md b/docs/resources/destination_aws_datalake.md
index a8eb09c1d..65cdffe7f 100644
--- a/docs/resources/destination_aws_datalake.md
+++ b/docs/resources/destination_aws_datalake.md
@@ -19,28 +19,27 @@ resource "airbyte_destination_aws_datalake" "my_destination_awsdatalake" {
bucket_name = "...my_bucket_name..."
bucket_prefix = "...my_bucket_prefix..."
credentials = {
- destination_aws_datalake_authentication_mode_iam_role = {
- credentials_title = "IAM Role"
- role_arn = "...my_role_arn..."
+ iam_role = {
+ role_arn = "...my_role_arn..."
}
}
- destination_type = "aws-datalake"
format = {
- destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json = {
+ json_lines_newline_delimited_json = {
compression_codec = "GZIP"
format_type = "JSONL"
}
}
- glue_catalog_float_as_decimal = true
+ glue_catalog_float_as_decimal = false
lakeformation_database_default_tag_key = "pii_level"
lakeformation_database_default_tag_values = "private,public"
lakeformation_database_name = "...my_lakeformation_database_name..."
- lakeformation_governed_tables = true
- partitioning = "DAY"
- region = "ap-southeast-1"
+ lakeformation_governed_tables = false
+ partitioning = "YEAR/MONTH/DAY"
+ region = "eu-west-1"
}
- name = "Dr. Rickey Boyle"
- workspace_id = "aa2352c5-9559-407a-bf1a-3a2fa9467739"
+ definition_id = "635b80f2-a9b0-4de1-897a-c8629f5a79ed"
+ name = "Blanche MacGyver"
+ workspace_id = "e76a2f8d-fb9a-4ea6-8f38-6615e68b5c3f"
}
```
@@ -50,9 +49,13 @@ resource "airbyte_destination_aws_datalake" "my_destination_awsdatalake" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -65,73 +68,47 @@ Required:
- `bucket_name` (String) The name of the S3 bucket. Read more here.
- `credentials` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `destination_type` (String) must be one of ["aws-datalake"]
- `lakeformation_database_name` (String) The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
-- `region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the S3 bucket. See here for all region codes.
Optional:
- `aws_account_id` (String) target aws account id
- `bucket_prefix` (String) S3 prefix
- `format` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format))
-- `glue_catalog_float_as_decimal` (Boolean) Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source.
-- `lakeformation_database_default_tag_key` (String) Add a default tag key to databases created by this destination
+- `glue_catalog_float_as_decimal` (Boolean) Default: false
+Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source.
+- `lakeformation_database_default_tag_key` (String, Sensitive) Add a default tag key to databases created by this destination
- `lakeformation_database_default_tag_values` (String) Add default values for the `Tag Key` to databases created by this destination. Comma separate for multiple values.
-- `lakeformation_governed_tables` (Boolean) Whether to create tables as LF governed tables.
-- `partitioning` (String) must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
+- `lakeformation_governed_tables` (Boolean) Default: false
+Whether to create tables as LF governed tables.
+- `partitioning` (String) must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]; Default: "NO PARTITIONING"
Partition data by cursor fields when a cursor field is a date
+- `region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""
+The region of the S3 bucket. See here for all region codes.
### Nested Schema for `configuration.credentials`
Optional:
-- `destination_aws_datalake_authentication_mode_iam_role` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--destination_aws_datalake_authentication_mode_iam_role))
-- `destination_aws_datalake_authentication_mode_iam_user` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--destination_aws_datalake_authentication_mode_iam_user))
-- `destination_aws_datalake_update_authentication_mode_iam_role` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--destination_aws_datalake_update_authentication_mode_iam_role))
-- `destination_aws_datalake_update_authentication_mode_iam_user` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--destination_aws_datalake_update_authentication_mode_iam_user))
-
-
-### Nested Schema for `configuration.credentials.destination_aws_datalake_authentication_mode_iam_role`
-
-Required:
-
-- `credentials_title` (String) must be one of ["IAM Role"]
-Name of the credentials
-- `role_arn` (String) Will assume this role to write data to s3
-
+- `iam_role` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--iam_role))
+- `iam_user` (Attributes) Choose How to Authenticate to AWS. (see [below for nested schema](#nestedatt--configuration--credentials--iam_user))
-
-### Nested Schema for `configuration.credentials.destination_aws_datalake_authentication_mode_iam_user`
+
+### Nested Schema for `configuration.credentials.iam_role`
Required:
-- `aws_access_key_id` (String) AWS User Access Key Id
-- `aws_secret_access_key` (String) Secret Access Key
-- `credentials_title` (String) must be one of ["IAM User"]
-Name of the credentials
-
-
-
-### Nested Schema for `configuration.credentials.destination_aws_datalake_update_authentication_mode_iam_role`
-
-Required:
-
-- `credentials_title` (String) must be one of ["IAM Role"]
-Name of the credentials
- `role_arn` (String) Will assume this role to write data to s3
-
-### Nested Schema for `configuration.credentials.destination_aws_datalake_update_authentication_mode_iam_user`
+
+### Nested Schema for `configuration.credentials.iam_user`
Required:
-- `aws_access_key_id` (String) AWS User Access Key Id
-- `aws_secret_access_key` (String) Secret Access Key
-- `credentials_title` (String) must be one of ["IAM User"]
-Name of the credentials
+- `aws_access_key_id` (String, Sensitive) AWS User Access Key Id
+- `aws_secret_access_key` (String, Sensitive) Secret Access Key
@@ -140,60 +117,26 @@ Name of the credentials
Optional:
-- `destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json))
-- `destination_aws_datalake_output_format_wildcard_parquet_columnar_storage` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--destination_aws_datalake_output_format_wildcard_parquet_columnar_storage))
-- `destination_aws_datalake_update_output_format_wildcard_json_lines_newline_delimited_json` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--destination_aws_datalake_update_output_format_wildcard_json_lines_newline_delimited_json))
-- `destination_aws_datalake_update_output_format_wildcard_parquet_columnar_storage` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--destination_aws_datalake_update_output_format_wildcard_parquet_columnar_storage))
+- `json_lines_newline_delimited_json` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json))
+- `parquet_columnar_storage` (Attributes) Format of the data output. (see [below for nested schema](#nestedatt--configuration--format--parquet_columnar_storage))
-
-### Nested Schema for `configuration.format.destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json`
-
-Required:
-
-- `format_type` (String) must be one of ["JSONL"]
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json`
Optional:
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "GZIP"]
+- `compression_codec` (String) must be one of ["UNCOMPRESSED", "GZIP"]; Default: "UNCOMPRESSED"
The compression algorithm used to compress data.
+- `format_type` (String) must be one of ["JSONL"]; Default: "JSONL"
-
-### Nested Schema for `configuration.format.destination_aws_datalake_output_format_wildcard_parquet_columnar_storage`
-
-Required:
-
-- `format_type` (String) must be one of ["Parquet"]
-
-Optional:
-
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
-The compression algorithm used to compress data.
-
-
-
-### Nested Schema for `configuration.format.destination_aws_datalake_update_output_format_wildcard_json_lines_newline_delimited_json`
-
-Required:
-
-- `format_type` (String) must be one of ["JSONL"]
-
-Optional:
-
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "GZIP"]
-The compression algorithm used to compress data.
-
-
-
-### Nested Schema for `configuration.format.destination_aws_datalake_update_output_format_wildcard_parquet_columnar_storage`
-
-Required:
-
-- `format_type` (String) must be one of ["Parquet"]
+
+### Nested Schema for `configuration.format.parquet_columnar_storage`
Optional:
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
+- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]; Default: "SNAPPY"
The compression algorithm used to compress data.
+- `format_type` (String) must be one of ["Parquet"]; Default: "Parquet"
diff --git a/docs/resources/destination_azure_blob_storage.md b/docs/resources/destination_azure_blob_storage.md
index 4fc8e3411..b9895f27e 100644
--- a/docs/resources/destination_azure_blob_storage.md
+++ b/docs/resources/destination_azure_blob_storage.md
@@ -21,16 +21,15 @@ resource "airbyte_destination_azure_blob_storage" "my_destination_azureblobstora
azure_blob_storage_endpoint_domain_name = "blob.core.windows.net"
azure_blob_storage_output_buffer_size = 5
azure_blob_storage_spill_size = 500
- destination_type = "azure-blob-storage"
format = {
- destination_azure_blob_storage_output_format_csv_comma_separated_values = {
- flattening = "No flattening"
- format_type = "CSV"
+ csv_comma_separated_values = {
+ flattening = "No flattening"
}
}
}
- name = "Matt Hamill"
- workspace_id = "3f5ad019-da1f-4fe7-8f09-7b0074f15471"
+ definition_id = "b38acf3b-23ea-44e3-abf4-ba0e7ac63cda"
+ name = "Rogelio Purdy"
+ workspace_id = "cd76c9fd-07c9-468d-acb9-cb44c87d9163"
}
```
@@ -40,9 +39,13 @@ resource "airbyte_destination_azure_blob_storage" "my_destination_azureblobstora
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -53,61 +56,38 @@ resource "airbyte_destination_azure_blob_storage" "my_destination_azureblobstora
Required:
-- `azure_blob_storage_account_key` (String) The Azure blob storage account key.
+- `azure_blob_storage_account_key` (String, Sensitive) The Azure blob storage account key.
- `azure_blob_storage_account_name` (String) The account's name of the Azure Blob Storage.
-- `destination_type` (String) must be one of ["azure-blob-storage"]
- `format` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format))
Optional:
- `azure_blob_storage_container_name` (String) The name of the Azure blob storage container. If not exists - will be created automatically. May be empty, then will be created automatically airbytecontainer+timestamp
-- `azure_blob_storage_endpoint_domain_name` (String) This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
-- `azure_blob_storage_output_buffer_size` (Number) The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure.
-- `azure_blob_storage_spill_size` (Number) The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable
+- `azure_blob_storage_endpoint_domain_name` (String) Default: "blob.core.windows.net"
+This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
+- `azure_blob_storage_output_buffer_size` (Number) Default: 5
+The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure.
+- `azure_blob_storage_spill_size` (Number) Default: 500
+The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable
### Nested Schema for `configuration.format`
Optional:
-- `destination_azure_blob_storage_output_format_csv_comma_separated_values` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--destination_azure_blob_storage_output_format_csv_comma_separated_values))
-- `destination_azure_blob_storage_output_format_json_lines_newline_delimited_json` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--destination_azure_blob_storage_output_format_json_lines_newline_delimited_json))
-- `destination_azure_blob_storage_update_output_format_csv_comma_separated_values` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--destination_azure_blob_storage_update_output_format_csv_comma_separated_values))
-- `destination_azure_blob_storage_update_output_format_json_lines_newline_delimited_json` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--destination_azure_blob_storage_update_output_format_json_lines_newline_delimited_json))
-
-
-### Nested Schema for `configuration.format.destination_azure_blob_storage_output_format_csv_comma_separated_values`
-
-Required:
-
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
-
-
-
-### Nested Schema for `configuration.format.destination_azure_blob_storage_output_format_json_lines_newline_delimited_json`
-
-Required:
-
-- `format_type` (String) must be one of ["JSONL"]
+- `csv_comma_separated_values` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--csv_comma_separated_values))
+- `json_lines_newline_delimited_json` (Attributes) Output data format (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json))
+
+### Nested Schema for `configuration.format.csv_comma_separated_values`
-
-### Nested Schema for `configuration.format.destination_azure_blob_storage_update_output_format_csv_comma_separated_values`
-
-Required:
+Optional:
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
+- `flattening` (String) must be one of ["No flattening", "Root level flattening"]; Default: "No flattening"
Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
-
-
-### Nested Schema for `configuration.format.destination_azure_blob_storage_update_output_format_json_lines_newline_delimited_json`
-
-Required:
-- `format_type` (String) must be one of ["JSONL"]
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json`
diff --git a/docs/resources/destination_bigquery.md b/docs/resources/destination_bigquery.md
index aaf0fbe69..45f547141 100644
--- a/docs/resources/destination_bigquery.md
+++ b/docs/resources/destination_bigquery.md
@@ -18,30 +18,28 @@ resource "airbyte_destination_bigquery" "my_destination_bigquery" {
big_query_client_buffer_size_mb = 15
credentials_json = "...my_credentials_json..."
dataset_id = "...my_dataset_id..."
- dataset_location = "australia-southeast2"
- destination_type = "bigquery"
+ dataset_location = "me-central2"
+ disable_type_dedupe = true
loading_method = {
- destination_bigquery_loading_method_gcs_staging = {
+ gcs_staging = {
credential = {
- destination_bigquery_loading_method_gcs_staging_credential_hmac_key = {
- credential_type = "HMAC_KEY"
+ destination_bigquery_hmac_key = {
hmac_key_access_id = "1234567890abcdefghij1234"
hmac_key_secret = "1234567890abcdefghij1234567890ABCDEFGHIJ"
}
}
- file_buffer_count = 10
gcs_bucket_name = "airbyte_sync"
gcs_bucket_path = "data_sync/test"
- keep_files_in_gcs_bucket = "Delete all tmp files from GCS"
- method = "GCS Staging"
+ keep_files_in_gcs_bucket = "Keep all tmp files in GCS"
}
}
project_id = "...my_project_id..."
raw_data_dataset = "...my_raw_data_dataset..."
transformation_priority = "batch"
}
- name = "Edna Pouros"
- workspace_id = "d488e1e9-1e45-40ad-aabd-44269802d502"
+ definition_id = "2d142842-c5e9-475e-80d1-1a3c6d933cc0"
+ name = "Miss Celia Moore"
+ workspace_id = "2d2700dc-d43a-4c80-9ede-88b16b5e1575"
}
```
@@ -51,9 +49,13 @@ resource "airbyte_destination_bigquery" "my_destination_bigquery" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -65,18 +67,20 @@ resource "airbyte_destination_bigquery" "my_destination_bigquery" {
Required:
- `dataset_id` (String) The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.
-- `dataset_location` (String) must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-west1", "us-west2", "us-west3", "us-west4"]
+- `dataset_location` (String) must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.
-- `destination_type` (String) must be one of ["bigquery"]
- `project_id` (String) The GCP project ID for the project containing the target BigQuery dataset. Read more here.
Optional:
-- `big_query_client_buffer_size_mb` (Number) Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.
+- `big_query_client_buffer_size_mb` (Number) Default: 15
+Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.
- `credentials_json` (String) The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
-- `loading_method` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method))
-- `raw_data_dataset` (String) The dataset to write raw tables into
-- `transformation_priority` (String) must be one of ["interactive", "batch"]
+- `disable_type_dedupe` (Boolean) Default: false
+Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions
+- `loading_method` (Attributes) The way data will be uploaded to BigQuery. (see [below for nested schema](#nestedatt--configuration--loading_method))
+- `raw_data_dataset` (String) The dataset to write raw tables into (default: airbyte_internal)
+- `transformation_priority` (String) must be one of ["interactive", "batch"]; Default: "interactive"
Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.
@@ -84,94 +88,42 @@ Interactive run type means that the query is executed as soon as possible, and t
Optional:
-- `destination_bigquery_loading_method_gcs_staging` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_loading_method_gcs_staging))
-- `destination_bigquery_loading_method_standard_inserts` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_loading_method_standard_inserts))
-- `destination_bigquery_update_loading_method_gcs_staging` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_update_loading_method_gcs_staging))
-- `destination_bigquery_update_loading_method_standard_inserts` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_update_loading_method_standard_inserts))
+- `gcs_staging` (Attributes) (recommended) Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery. Provides best-in-class speed, reliability and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--gcs_staging))
+- `standard_inserts` (Attributes) (not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use GCS staging. (see [below for nested schema](#nestedatt--configuration--loading_method--standard_inserts))
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_loading_method_gcs_staging`
+
+### Nested Schema for `configuration.loading_method.gcs_staging`
Required:
-- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_loading_method_gcs_staging--credential))
+- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--gcs_staging--credential))
- `gcs_bucket_name` (String) The name of the GCS bucket. Read more here.
- `gcs_bucket_path` (String) Directory under the GCS bucket where data will be written.
-- `method` (String) must be one of ["GCS Staging"]
Optional:
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `keep_files_in_gcs_bucket` (String) must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
+- `keep_files_in_gcs_bucket` (String) must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]; Default: "Delete all tmp files from GCS"
This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_loading_method_gcs_staging.keep_files_in_gcs_bucket`
+
+### Nested Schema for `configuration.loading_method.gcs_staging.keep_files_in_gcs_bucket`
Optional:
-- `destination_bigquery_loading_method_gcs_staging_credential_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_loading_method_gcs_staging--keep_files_in_gcs_bucket--destination_bigquery_loading_method_gcs_staging_credential_hmac_key))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_loading_method_gcs_staging.keep_files_in_gcs_bucket.destination_bigquery_loading_method_gcs_staging_credential_hmac_key`
-
-Required:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
-
-
+- `hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--gcs_staging--keep_files_in_gcs_bucket--hmac_key))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_loading_method_standard_inserts`
+
+### Nested Schema for `configuration.loading_method.gcs_staging.keep_files_in_gcs_bucket.hmac_key`
Required:
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_update_loading_method_gcs_staging`
-
-Required:
+- `hmac_key_access_id` (String, Sensitive) HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
+- `hmac_key_secret` (String, Sensitive) The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
-- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_update_loading_method_gcs_staging--credential))
-- `gcs_bucket_name` (String) The name of the GCS bucket. Read more here.
-- `gcs_bucket_path` (String) Directory under the GCS bucket where data will be written.
-- `method` (String) must be one of ["GCS Staging"]
-Optional:
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `keep_files_in_gcs_bucket` (String) must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
-This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_update_loading_method_gcs_staging.keep_files_in_gcs_bucket`
-
-Optional:
-
-- `destination_bigquery_update_loading_method_gcs_staging_credential_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_update_loading_method_gcs_staging--keep_files_in_gcs_bucket--destination_bigquery_update_loading_method_gcs_staging_credential_hmac_key))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_update_loading_method_gcs_staging.keep_files_in_gcs_bucket.destination_bigquery_update_loading_method_gcs_staging_credential_hmac_key`
-
-Required:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
-
-
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_update_loading_method_standard_inserts`
-
-Required:
-- `method` (String) must be one of ["Standard"]
+
+### Nested Schema for `configuration.loading_method.standard_inserts`
diff --git a/docs/resources/destination_bigquery_denormalized.md b/docs/resources/destination_bigquery_denormalized.md
deleted file mode 100644
index 0c6bbd5fc..000000000
--- a/docs/resources/destination_bigquery_denormalized.md
+++ /dev/null
@@ -1,172 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_destination_bigquery_denormalized Resource - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- DestinationBigqueryDenormalized Resource
----
-
-# airbyte_destination_bigquery_denormalized (Resource)
-
-DestinationBigqueryDenormalized Resource
-
-## Example Usage
-
-```terraform
-resource "airbyte_destination_bigquery_denormalized" "my_destination_bigquerydenormalized" {
- configuration = {
- big_query_client_buffer_size_mb = 15
- credentials_json = "...my_credentials_json..."
- dataset_id = "...my_dataset_id..."
- dataset_location = "europe-west7"
- destination_type = "bigquery-denormalized"
- loading_method = {
- destination_bigquery_denormalized_loading_method_gcs_staging = {
- credential = {
- destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key = {
- credential_type = "HMAC_KEY"
- hmac_key_access_id = "1234567890abcdefghij1234"
- hmac_key_secret = "1234567890abcdefghij1234567890ABCDEFGHIJ"
- }
- }
- file_buffer_count = 10
- gcs_bucket_name = "airbyte_sync"
- gcs_bucket_path = "data_sync/test"
- keep_files_in_gcs_bucket = "Keep all tmp files in GCS"
- method = "GCS Staging"
- }
- }
- project_id = "...my_project_id..."
- }
- name = "Francisco Windler"
- workspace_id = "c969e9a3-efa7-47df-b14c-d66ae395efb9"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-### Read-Only
-
-- `destination_id` (String)
-- `destination_type` (String)
-
-
-### Nested Schema for `configuration`
-
-Required:
-
-- `dataset_id` (String) The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.
-- `destination_type` (String) must be one of ["bigquery-denormalized"]
-- `project_id` (String) The GCP project ID for the project containing the target BigQuery dataset. Read more here.
-
-Optional:
-
-- `big_query_client_buffer_size_mb` (Number) Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.
-- `credentials_json` (String) The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
-- `dataset_location` (String) must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-west1", "us-west2", "us-west3", "us-west4"]
-The location of the dataset. Warning: Changes made after creation will not be applied. The default "US" value is used if not set explicitly. Read more here.
-- `loading_method` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method))
-
-
-### Nested Schema for `configuration.loading_method`
-
-Optional:
-
-- `destination_bigquery_denormalized_loading_method_gcs_staging` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_loading_method_gcs_staging))
-- `destination_bigquery_denormalized_loading_method_standard_inserts` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_loading_method_standard_inserts))
-- `destination_bigquery_denormalized_update_loading_method_gcs_staging` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_update_loading_method_gcs_staging))
-- `destination_bigquery_denormalized_update_loading_method_standard_inserts` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_update_loading_method_standard_inserts))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_loading_method_gcs_staging`
-
-Required:
-
-- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_loading_method_gcs_staging--credential))
-- `gcs_bucket_name` (String) The name of the GCS bucket. Read more here.
-- `gcs_bucket_path` (String) Directory under the GCS bucket where data will be written. Read more here.
-- `method` (String) must be one of ["GCS Staging"]
-
-Optional:
-
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `keep_files_in_gcs_bucket` (String) must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
-This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_loading_method_gcs_staging.keep_files_in_gcs_bucket`
-
-Optional:
-
-- `destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_loading_method_gcs_staging--keep_files_in_gcs_bucket--destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_loading_method_gcs_staging.keep_files_in_gcs_bucket.destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key`
-
-Required:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
-
-
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_loading_method_standard_inserts`
-
-Required:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_update_loading_method_gcs_staging`
-
-Required:
-
-- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_update_loading_method_gcs_staging--credential))
-- `gcs_bucket_name` (String) The name of the GCS bucket. Read more here.
-- `gcs_bucket_path` (String) Directory under the GCS bucket where data will be written. Read more here.
-- `method` (String) must be one of ["GCS Staging"]
-
-Optional:
-
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `keep_files_in_gcs_bucket` (String) must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
-This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_update_loading_method_gcs_staging.keep_files_in_gcs_bucket`
-
-Optional:
-
-- `destination_bigquery_denormalized_update_loading_method_gcs_staging_credential_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--loading_method--destination_bigquery_denormalized_update_loading_method_gcs_staging--keep_files_in_gcs_bucket--destination_bigquery_denormalized_update_loading_method_gcs_staging_credential_hmac_key))
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_update_loading_method_gcs_staging.keep_files_in_gcs_bucket.destination_bigquery_denormalized_update_loading_method_gcs_staging_credential_hmac_key`
-
-Required:
-
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
-
-
-
-
-
-### Nested Schema for `configuration.loading_method.destination_bigquery_denormalized_update_loading_method_standard_inserts`
-
-Required:
-
-- `method` (String) must be one of ["Standard"]
-
-
diff --git a/docs/resources/destination_clickhouse.md b/docs/resources/destination_clickhouse.md
index 5dbe84b6c..5f5919893 100644
--- a/docs/resources/destination_clickhouse.md
+++ b/docs/resources/destination_clickhouse.md
@@ -15,21 +15,19 @@ DestinationClickhouse Resource
```terraform
resource "airbyte_destination_clickhouse" "my_destination_clickhouse" {
configuration = {
- database = "...my_database..."
- destination_type = "clickhouse"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 8123
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 8123
tunnel_method = {
- destination_clickhouse_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ no_tunnel = {}
}
- username = "Magdalena_Kuvalis"
+ username = "Rhianna_Leannon"
}
- name = "Sandy Huels"
- workspace_id = "97074ba4-469b-46e2-9419-59890afa563e"
+ definition_id = "2c276398-b468-48ad-b426-53c327fa18b5"
+ name = "Gerardo Corwin"
+ workspace_id = "4f41e22e-39b6-461a-89af-71290b2c6d65"
}
```
@@ -39,9 +37,13 @@ resource "airbyte_destination_clickhouse" "my_destination_clickhouse" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -53,15 +55,15 @@ resource "airbyte_destination_clickhouse" "my_destination_clickhouse" {
Required:
- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["clickhouse"]
- `host` (String) Hostname of the database.
-- `port` (Number) HTTP port of the database.
- `username` (String) Username to use to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
+- `password` (String, Sensitive) Password associated with the username.
+- `port` (Number) Default: 8123
+HTTP port of the database.
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
@@ -69,80 +71,41 @@ Optional:
Optional:
-- `destination_clickhouse_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_ssh_tunnel_method_no_tunnel))
-- `destination_clickhouse_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_ssh_tunnel_method_password_authentication))
-- `destination_clickhouse_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_ssh_tunnel_method_ssh_key_authentication))
-- `destination_clickhouse_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_update_ssh_tunnel_method_no_tunnel))
-- `destination_clickhouse_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_update_ssh_tunnel_method_password_authentication))
-- `destination_clickhouse_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_clickhouse_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_ssh_tunnel_method_no_tunnel`
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
+Optional:
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_update_ssh_tunnel_method_no_tunnel`
-
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_clickhouse_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/destination_convex.md b/docs/resources/destination_convex.md
index 680d10d2b..d592ddc92 100644
--- a/docs/resources/destination_convex.md
+++ b/docs/resources/destination_convex.md
@@ -15,12 +15,12 @@ DestinationConvex Resource
```terraform
resource "airbyte_destination_convex" "my_destination_convex" {
configuration = {
- access_key = "...my_access_key..."
- deployment_url = "https://murky-swan-635.convex.cloud"
- destination_type = "convex"
+ access_key = "...my_access_key..."
+ deployment_url = "https://cluttered-owl-337.convex.cloud"
}
- name = "Joyce Kertzmann"
- workspace_id = "4c8b711e-5b7f-4d2e-9028-921cddc69260"
+ definition_id = "335e03ab-ebb7-41b5-8e87-2ec68b6d2a9c"
+ name = "Patsy Powlowski"
+ workspace_id = "6941566f-22fd-430a-a8af-8c1d27b3e573"
}
```
@@ -30,9 +30,13 @@ resource "airbyte_destination_convex" "my_destination_convex" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -43,8 +47,7 @@ resource "airbyte_destination_convex" "my_destination_convex" {
Required:
-- `access_key` (String) API access key used to send data to a Convex deployment.
+- `access_key` (String, Sensitive) API access key used to send data to a Convex deployment.
- `deployment_url` (String) URL of the Convex deployment that is the destination
-- `destination_type` (String) must be one of ["convex"]
diff --git a/docs/resources/destination_cumulio.md b/docs/resources/destination_cumulio.md
index 29875b578..52ddf5562 100644
--- a/docs/resources/destination_cumulio.md
+++ b/docs/resources/destination_cumulio.md
@@ -15,13 +15,13 @@ DestinationCumulio Resource
```terraform
resource "airbyte_destination_cumulio" "my_destination_cumulio" {
configuration = {
- api_host = "...my_api_host..."
- api_key = "...my_api_key..."
- api_token = "...my_api_token..."
- destination_type = "cumulio"
+ api_host = "...my_api_host..."
+ api_key = "...my_api_key..."
+ api_token = "...my_api_token..."
}
- name = "Ebony Predovic"
- workspace_id = "6b0d5f0d-30c5-4fbb-a587-053202c73d5f"
+ definition_id = "c0eb8223-613d-423c-a875-293aec4aa100"
+ name = "Felipe Champlin"
+ workspace_id = "22581a88-452d-4e7c-b5eb-92a9e952da29"
}
```
@@ -31,9 +31,13 @@ resource "airbyte_destination_cumulio" "my_destination_cumulio" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -44,9 +48,12 @@ resource "airbyte_destination_cumulio" "my_destination_cumulio" {
Required:
-- `api_host` (String) URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.
-- `api_key` (String) An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
-- `api_token` (String) The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
-- `destination_type` (String) must be one of ["cumulio"]
+- `api_key` (String, Sensitive) An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
+- `api_token` (String, Sensitive) The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
+
+Optional:
+
+- `api_host` (String) Default: "https://api.cumul.io"
+URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.
diff --git a/docs/resources/destination_databend.md b/docs/resources/destination_databend.md
index 7cf563b9e..d6e3d0dda 100644
--- a/docs/resources/destination_databend.md
+++ b/docs/resources/destination_databend.md
@@ -15,16 +15,16 @@ DestinationDatabend Resource
```terraform
resource "airbyte_destination_databend" "my_destination_databend" {
configuration = {
- database = "...my_database..."
- destination_type = "databend"
- host = "...my_host..."
- password = "...my_password..."
- port = 443
- table = "default"
- username = "Leo.Purdy"
+ database = "...my_database..."
+ host = "...my_host..."
+ password = "...my_password..."
+ port = 443
+ table = "default"
+ username = "Kira78"
}
- name = "Bobby Kutch V"
- workspace_id = "b3fe49a8-d9cb-4f48-a333-23f9b77f3a41"
+ definition_id = "006aecee-7c88-4461-9655-998ae24eec56"
+ name = "Josefina Rosenbaum"
+ workspace_id = "48d71917-bd77-4158-87e0-4c579843cbfb"
}
```
@@ -34,9 +34,13 @@ resource "airbyte_destination_databend" "my_destination_databend" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -48,14 +52,15 @@ resource "airbyte_destination_databend" "my_destination_databend" {
Required:
- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["databend"]
- `host` (String) Hostname of the database.
- `username` (String) Username to use to access the database.
Optional:
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `table` (String) The default table was written to.
+- `password` (String, Sensitive) Password associated with the username.
+- `port` (Number) Default: 443
+Port of the database.
+- `table` (String) Default: "default"
+The default table was written to.
diff --git a/docs/resources/destination_databricks.md b/docs/resources/destination_databricks.md
index c8014b63a..bfea10ef6 100644
--- a/docs/resources/destination_databricks.md
+++ b/docs/resources/destination_databricks.md
@@ -17,22 +17,20 @@ resource "airbyte_destination_databricks" "my_destination_databricks" {
configuration = {
accept_terms = false
data_source = {
- destination_databricks_data_source_recommended_managed_tables = {
- data_source_type = "MANAGED_TABLES_STORAGE"
- }
+ recommended_managed_tables = {}
}
database = "...my_database..."
databricks_http_path = "sql/protocolvx/o/1234567489/0000-1111111-abcd90"
databricks_personal_access_token = "dapi0123456789abcdefghij0123456789AB"
databricks_port = "443"
databricks_server_hostname = "abc-12345678-wxyz.cloud.databricks.com"
- destination_type = "databricks"
enable_schema_evolution = true
purge_staging_data = false
schema = "default"
}
- name = "Bertha Thompson"
- workspace_id = "69280d1b-a77a-489e-bf73-7ae4203ce5e6"
+ definition_id = "05d7306c-fa6f-460b-bc11-e74f736d7a95"
+ name = "Meghan Mitchell"
+ workspace_id = "4c049945-edd6-4e95-a416-d119e802e071"
}
```
@@ -42,9 +40,13 @@ resource "airbyte_destination_databricks" "my_destination_databricks" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -55,112 +57,67 @@ resource "airbyte_destination_databricks" "my_destination_databricks" {
Required:
-- `accept_terms` (Boolean) You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector.
- `data_source` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source))
- `databricks_http_path` (String) Databricks Cluster HTTP Path.
-- `databricks_personal_access_token` (String) Databricks Personal Access Token for making authenticated requests.
+- `databricks_personal_access_token` (String, Sensitive) Databricks Personal Access Token for making authenticated requests.
- `databricks_server_hostname` (String) Databricks Cluster Server Hostname.
-- `destination_type` (String) must be one of ["databricks"]
Optional:
+- `accept_terms` (Boolean) Default: false
+You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector.
- `database` (String) The name of the catalog. If not specified otherwise, the "hive_metastore" will be used.
-- `databricks_port` (String) Databricks Cluster Port.
-- `enable_schema_evolution` (Boolean) Support schema evolution for all streams. If "false", the connector might fail when a stream's schema changes.
-- `purge_staging_data` (Boolean) Default to 'true'. Switch it to 'false' for debugging purpose.
-- `schema` (String) The default schema tables are written. If not specified otherwise, the "default" will be used.
+- `databricks_port` (String) Default: "443"
+Databricks Cluster Port.
+- `enable_schema_evolution` (Boolean) Default: false
+Support schema evolution for all streams. If "false", the connector might fail when a stream's schema changes.
+- `purge_staging_data` (Boolean) Default: true
+Default to 'true'. Switch it to 'false' for debugging purpose.
+- `schema` (String) Default: "default"
+The default schema tables are written. If not specified otherwise, the "default" will be used.
### Nested Schema for `configuration.data_source`
Optional:
-- `destination_databricks_data_source_amazon_s3` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_data_source_amazon_s3))
-- `destination_databricks_data_source_azure_blob_storage` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_data_source_azure_blob_storage))
-- `destination_databricks_data_source_recommended_managed_tables` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_data_source_recommended_managed_tables))
-- `destination_databricks_update_data_source_amazon_s3` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_update_data_source_amazon_s3))
-- `destination_databricks_update_data_source_azure_blob_storage` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_update_data_source_azure_blob_storage))
-- `destination_databricks_update_data_source_recommended_managed_tables` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--destination_databricks_update_data_source_recommended_managed_tables))
+- `amazon_s3` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--amazon_s3))
+- `azure_blob_storage` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--azure_blob_storage))
+- `recommended_managed_tables` (Attributes) Storage on which the delta lake is built. (see [below for nested schema](#nestedatt--configuration--data_source--recommended_managed_tables))
-
-### Nested Schema for `configuration.data_source.destination_databricks_data_source_amazon_s3`
+
+### Nested Schema for `configuration.data_source.amazon_s3`
Required:
-- `data_source_type` (String) must be one of ["S3_STORAGE"]
-- `s3_access_key_id` (String) The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.
+- `s3_access_key_id` (String, Sensitive) The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.
- `s3_bucket_name` (String) The name of the S3 bucket to use for intermittent staging of the data.
- `s3_bucket_path` (String) The directory under the S3 bucket where data will be written.
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the S3 staging bucket to use if utilising a copy strategy.
-- `s3_secret_access_key` (String) The corresponding secret to the above access key id.
+- `s3_secret_access_key` (String, Sensitive) The corresponding secret to the above access key id.
Optional:
- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-
-
-
-### Nested Schema for `configuration.data_source.destination_databricks_data_source_azure_blob_storage`
-
-Required:
-
-- `azure_blob_storage_account_name` (String) The account's name of the Azure Blob Storage.
-- `azure_blob_storage_container_name` (String) The name of the Azure blob storage container.
-- `azure_blob_storage_sas_token` (String) Shared access signature (SAS) token to grant limited access to objects in your storage account.
-- `data_source_type` (String) must be one of ["AZURE_BLOB_STORAGE"]
-
-Optional:
-
-- `azure_blob_storage_endpoint_domain_name` (String) This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
-
-
-
-### Nested Schema for `configuration.data_source.destination_databricks_data_source_recommended_managed_tables`
-
-Required:
-
-- `data_source_type` (String) must be one of ["MANAGED_TABLES_STORAGE"]
-
-
-
-### Nested Schema for `configuration.data_source.destination_databricks_update_data_source_amazon_s3`
-
-Required:
-
-- `data_source_type` (String) must be one of ["S3_STORAGE"]
-- `s3_access_key_id` (String) The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.
-- `s3_bucket_name` (String) The name of the S3 bucket to use for intermittent staging of the data.
-- `s3_bucket_path` (String) The directory under the S3 bucket where data will be written.
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
+- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""
The region of the S3 staging bucket to use if utilising a copy strategy.
-- `s3_secret_access_key` (String) The corresponding secret to the above access key id.
-
-Optional:
-
-- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-
-### Nested Schema for `configuration.data_source.destination_databricks_update_data_source_azure_blob_storage`
+
+### Nested Schema for `configuration.data_source.azure_blob_storage`
Required:
- `azure_blob_storage_account_name` (String) The account's name of the Azure Blob Storage.
- `azure_blob_storage_container_name` (String) The name of the Azure blob storage container.
-- `azure_blob_storage_sas_token` (String) Shared access signature (SAS) token to grant limited access to objects in your storage account.
-- `data_source_type` (String) must be one of ["AZURE_BLOB_STORAGE"]
+- `azure_blob_storage_sas_token` (String, Sensitive) Shared access signature (SAS) token to grant limited access to objects in your storage account.
Optional:
-- `azure_blob_storage_endpoint_domain_name` (String) This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
-
+- `azure_blob_storage_endpoint_domain_name` (String) Default: "blob.core.windows.net"
+This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
-
-### Nested Schema for `configuration.data_source.destination_databricks_update_data_source_recommended_managed_tables`
-
-Required:
-- `data_source_type` (String) must be one of ["MANAGED_TABLES_STORAGE"]
+
+### Nested Schema for `configuration.data_source.recommended_managed_tables`
diff --git a/docs/resources/destination_dev_null.md b/docs/resources/destination_dev_null.md
index 4d2ed7135..da4a14f86 100644
--- a/docs/resources/destination_dev_null.md
+++ b/docs/resources/destination_dev_null.md
@@ -15,15 +15,13 @@ DestinationDevNull Resource
```terraform
resource "airbyte_destination_dev_null" "my_destination_devnull" {
configuration = {
- destination_type = "dev-null"
test_destination = {
- destination_dev_null_test_destination_silent = {
- test_destination_type = "SILENT"
- }
+ silent = {}
}
}
- name = "Rene Hane"
- workspace_id = "a0d446ce-2af7-4a73-8f3b-e453f870b326"
+ definition_id = "29d4644f-9dd3-4d54-87cf-b82ef1e01ef5"
+ name = "Megan King"
+ workspace_id = "9e2c85c9-04a2-403f-b157-a47112db1eec"
}
```
@@ -33,9 +31,13 @@ resource "airbyte_destination_dev_null" "my_destination_devnull" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -46,7 +48,6 @@ resource "airbyte_destination_dev_null" "my_destination_devnull" {
Required:
-- `destination_type` (String) must be one of ["dev-null"]
- `test_destination` (Attributes) The type of destination to be used (see [below for nested schema](#nestedatt--configuration--test_destination))
@@ -54,22 +55,9 @@ Required:
Optional:
-- `destination_dev_null_test_destination_silent` (Attributes) The type of destination to be used (see [below for nested schema](#nestedatt--configuration--test_destination--destination_dev_null_test_destination_silent))
-- `destination_dev_null_update_test_destination_silent` (Attributes) The type of destination to be used (see [below for nested schema](#nestedatt--configuration--test_destination--destination_dev_null_update_test_destination_silent))
-
-
-### Nested Schema for `configuration.test_destination.destination_dev_null_test_destination_silent`
-
-Required:
-
-- `test_destination_type` (String) must be one of ["SILENT"]
-
-
-
-### Nested Schema for `configuration.test_destination.destination_dev_null_update_test_destination_silent`
-
-Required:
+- `silent` (Attributes) The type of destination to be used (see [below for nested schema](#nestedatt--configuration--test_destination--silent))
-- `test_destination_type` (String) must be one of ["SILENT"]
+
+### Nested Schema for `configuration.test_destination.silent`
diff --git a/docs/resources/destination_duckdb.md b/docs/resources/destination_duckdb.md
new file mode 100644
index 000000000..52d86ef2e
--- /dev/null
+++ b/docs/resources/destination_duckdb.md
@@ -0,0 +1,58 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_destination_duckdb Resource - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ DestinationDuckdb Resource
+---
+
+# airbyte_destination_duckdb (Resource)
+
+DestinationDuckdb Resource
+
+## Example Usage
+
+```terraform
+resource "airbyte_destination_duckdb" "my_destination_duckdb" {
+ configuration = {
+ destination_path = "motherduck:"
+ motherduck_api_key = "...my_motherduck_api_key..."
+ schema = "main"
+ }
+ definition_id = "9f91eb58-c332-4574-9699-3f062684640d"
+ name = "Bobbie Lang"
+ workspace_id = "d52cbff0-1858-4935-bdfe-2750539f4b80"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
+- `workspace_id` (String)
+
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
+### Read-Only
+
+- `destination_id` (String)
+- `destination_type` (String)
+
+
+### Nested Schema for `configuration`
+
+Required:
+
+- `destination_path` (String) Path to the .duckdb file, or the text 'md:' to connect to MotherDuck. The file will be placed inside that local mount. For more information check out our docs
+
+Optional:
+
+- `motherduck_api_key` (String, Sensitive) API key to use for authentication to a MotherDuck database.
+- `schema` (String) Database schema name, default for duckdb is 'main'.
+
+
diff --git a/docs/resources/destination_dynamodb.md b/docs/resources/destination_dynamodb.md
index dc59cc20e..10aa98dd2 100644
--- a/docs/resources/destination_dynamodb.md
+++ b/docs/resources/destination_dynamodb.md
@@ -16,14 +16,14 @@ DestinationDynamodb Resource
resource "airbyte_destination_dynamodb" "my_destination_dynamodb" {
configuration = {
access_key_id = "A012345678910EXAMPLE"
- destination_type = "dynamodb"
dynamodb_endpoint = "http://localhost:9000"
- dynamodb_region = "eu-south-1"
+ dynamodb_region = "ap-southeast-1"
dynamodb_table_name_prefix = "airbyte_sync"
secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
}
- name = "Joanna Kohler"
- workspace_id = "29cdb1a8-422b-4b67-9d23-22715bf0cbb1"
+ definition_id = "f993efae-2dca-4f86-989d-ab1153f466f7"
+ name = "Ms. Larry Reynolds"
+ workspace_id = "5aa0db79-7942-4be7-a5f1-f78855663545"
}
```
@@ -33,9 +33,13 @@ resource "airbyte_destination_dynamodb" "my_destination_dynamodb" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -46,15 +50,15 @@ resource "airbyte_destination_dynamodb" "my_destination_dynamodb" {
Required:
-- `access_key_id` (String) The access key id to access the DynamoDB. Airbyte requires Read and Write permissions to the DynamoDB.
-- `destination_type` (String) must be one of ["dynamodb"]
-- `dynamodb_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the DynamoDB.
+- `access_key_id` (String, Sensitive) The access key id to access the DynamoDB. Airbyte requires Read and Write permissions to the DynamoDB.
- `dynamodb_table_name_prefix` (String) The prefix to use when naming DynamoDB tables.
-- `secret_access_key` (String) The corresponding secret to the access key id.
+- `secret_access_key` (String, Sensitive) The corresponding secret to the access key id.
Optional:
-- `dynamodb_endpoint` (String) This is your DynamoDB endpoint url.(if you are working with AWS DynamoDB, just leave empty).
+- `dynamodb_endpoint` (String) Default: ""
+This is your DynamoDB endpoint url.(if you are working with AWS DynamoDB, just leave empty).
+- `dynamodb_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""
+The region of the DynamoDB.
diff --git a/docs/resources/destination_elasticsearch.md b/docs/resources/destination_elasticsearch.md
index df6faf196..2718d954d 100644
--- a/docs/resources/destination_elasticsearch.md
+++ b/docs/resources/destination_elasticsearch.md
@@ -16,19 +16,18 @@ DestinationElasticsearch Resource
resource "airbyte_destination_elasticsearch" "my_destination_elasticsearch" {
configuration = {
authentication_method = {
- destination_elasticsearch_authentication_method_api_key_secret = {
+ api_key_secret = {
api_key_id = "...my_api_key_id..."
api_key_secret = "...my_api_key_secret..."
- method = "secret"
}
}
- ca_certificate = "...my_ca_certificate..."
- destination_type = "elasticsearch"
- endpoint = "...my_endpoint..."
- upsert = true
+ ca_certificate = "...my_ca_certificate..."
+ endpoint = "...my_endpoint..."
+ upsert = false
}
- name = "Carolyn Rohan"
- workspace_id = "90f3443a-1108-4e0a-9cf4-b921879fce95"
+ definition_id = "da65ed46-5e75-48af-92ad-38ed7ed0e5e2"
+ name = "Katherine Considine"
+ workspace_id = "7d0e4e50-95ed-494b-8ecb-397d064562ef"
}
```
@@ -38,9 +37,13 @@ resource "airbyte_destination_elasticsearch" "my_destination_elasticsearch" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -51,62 +54,38 @@ resource "airbyte_destination_elasticsearch" "my_destination_elasticsearch" {
Required:
-- `destination_type` (String) must be one of ["elasticsearch"]
- `endpoint` (String) The full url of the Elasticsearch server
Optional:
- `authentication_method` (Attributes) The type of authentication to be used (see [below for nested schema](#nestedatt--configuration--authentication_method))
- `ca_certificate` (String) CA certificate
-- `upsert` (Boolean) If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.
+- `upsert` (Boolean) Default: true
+If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.
### Nested Schema for `configuration.authentication_method`
Optional:
-- `destination_elasticsearch_authentication_method_api_key_secret` (Attributes) Use a api key and secret combination to authenticate (see [below for nested schema](#nestedatt--configuration--authentication_method--destination_elasticsearch_authentication_method_api_key_secret))
-- `destination_elasticsearch_authentication_method_username_password` (Attributes) Basic auth header with a username and password (see [below for nested schema](#nestedatt--configuration--authentication_method--destination_elasticsearch_authentication_method_username_password))
-- `destination_elasticsearch_update_authentication_method_api_key_secret` (Attributes) Use a api key and secret combination to authenticate (see [below for nested schema](#nestedatt--configuration--authentication_method--destination_elasticsearch_update_authentication_method_api_key_secret))
-- `destination_elasticsearch_update_authentication_method_username_password` (Attributes) Basic auth header with a username and password (see [below for nested schema](#nestedatt--configuration--authentication_method--destination_elasticsearch_update_authentication_method_username_password))
-
-
-### Nested Schema for `configuration.authentication_method.destination_elasticsearch_authentication_method_api_key_secret`
-
-Required:
-
-- `api_key_id` (String) The Key ID to used when accessing an enterprise Elasticsearch instance.
-- `api_key_secret` (String) The secret associated with the API Key ID.
-- `method` (String) must be one of ["secret"]
-
-
-
-### Nested Schema for `configuration.authentication_method.destination_elasticsearch_authentication_method_username_password`
-
-Required:
-
-- `method` (String) must be one of ["basic"]
-- `password` (String) Basic auth password to access a secure Elasticsearch server
-- `username` (String) Basic auth username to access a secure Elasticsearch server
-
+- `api_key_secret` (Attributes) Use a api key and secret combination to authenticate (see [below for nested schema](#nestedatt--configuration--authentication_method--api_key_secret))
+- `username_password` (Attributes) Basic auth header with a username and password (see [below for nested schema](#nestedatt--configuration--authentication_method--username_password))
-
-### Nested Schema for `configuration.authentication_method.destination_elasticsearch_update_authentication_method_api_key_secret`
+
+### Nested Schema for `configuration.authentication_method.api_key_secret`
Required:
- `api_key_id` (String) The Key ID to used when accessing an enterprise Elasticsearch instance.
- `api_key_secret` (String) The secret associated with the API Key ID.
-- `method` (String) must be one of ["secret"]
-
-### Nested Schema for `configuration.authentication_method.destination_elasticsearch_update_authentication_method_username_password`
+
+### Nested Schema for `configuration.authentication_method.username_password`
Required:
-- `method` (String) must be one of ["basic"]
-- `password` (String) Basic auth password to access a secure Elasticsearch server
+- `password` (String, Sensitive) Basic auth password to access a secure Elasticsearch server
- `username` (String) Basic auth username to access a secure Elasticsearch server
diff --git a/docs/resources/destination_firebolt.md b/docs/resources/destination_firebolt.md
index 6a7c63d78..f4dc30b3d 100644
--- a/docs/resources/destination_firebolt.md
+++ b/docs/resources/destination_firebolt.md
@@ -15,16 +15,14 @@ DestinationFirebolt Resource
```terraform
resource "airbyte_destination_firebolt" "my_destination_firebolt" {
configuration = {
- account = "...my_account..."
- database = "...my_database..."
- destination_type = "firebolt"
- engine = "...my_engine..."
- host = "api.app.firebolt.io"
+ account = "...my_account..."
+ database = "...my_database..."
+ engine = "...my_engine..."
+ host = "api.app.firebolt.io"
loading_method = {
- destination_firebolt_loading_method_external_table_via_s3 = {
+ external_table_via_s3 = {
aws_key_id = "...my_aws_key_id..."
aws_key_secret = "...my_aws_key_secret..."
- method = "S3"
s3_bucket = "...my_s3_bucket..."
s3_region = "us-east-1"
}
@@ -32,8 +30,9 @@ resource "airbyte_destination_firebolt" "my_destination_firebolt" {
password = "...my_password..."
username = "username@email.com"
}
- name = "Roman Kulas"
- workspace_id = "c7abd74d-d39c-40f5-92cf-f7c70a45626d"
+ definition_id = "d37ea6e5-cbc1-4c07-86ea-3ea494c42020"
+ name = "Jared Spencer"
+ workspace_id = "d1afa414-5a8e-4ad6-8436-1fa9c0130565"
}
```
@@ -43,9 +42,13 @@ resource "airbyte_destination_firebolt" "my_destination_firebolt" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -57,8 +60,7 @@ resource "airbyte_destination_firebolt" "my_destination_firebolt" {
Required:
- `database` (String) The database to connect to.
-- `destination_type` (String) must be one of ["firebolt"]
-- `password` (String) Firebolt password.
+- `password` (String, Sensitive) Firebolt password.
- `username` (String) Firebolt email address you use to login.
Optional:
@@ -73,48 +75,21 @@ Optional:
Optional:
-- `destination_firebolt_loading_method_external_table_via_s3` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--destination_firebolt_loading_method_external_table_via_s3))
-- `destination_firebolt_loading_method_sql_inserts` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--destination_firebolt_loading_method_sql_inserts))
-- `destination_firebolt_update_loading_method_external_table_via_s3` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--destination_firebolt_update_loading_method_external_table_via_s3))
-- `destination_firebolt_update_loading_method_sql_inserts` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--destination_firebolt_update_loading_method_sql_inserts))
+- `external_table_via_s3` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--external_table_via_s3))
+- `sql_inserts` (Attributes) Loading method used to select the way data will be uploaded to Firebolt (see [below for nested schema](#nestedatt--configuration--loading_method--sql_inserts))
-
-### Nested Schema for `configuration.loading_method.destination_firebolt_loading_method_external_table_via_s3`
+
+### Nested Schema for `configuration.loading_method.external_table_via_s3`
Required:
-- `aws_key_id` (String) AWS access key granting read and write access to S3.
-- `aws_key_secret` (String) Corresponding secret part of the AWS Key
-- `method` (String) must be one of ["S3"]
+- `aws_key_id` (String, Sensitive) AWS access key granting read and write access to S3.
+- `aws_key_secret` (String, Sensitive) Corresponding secret part of the AWS Key
- `s3_bucket` (String) The name of the S3 bucket.
- `s3_region` (String) Region name of the S3 bucket.
-
-### Nested Schema for `configuration.loading_method.destination_firebolt_loading_method_sql_inserts`
-
-Required:
-
-- `method` (String) must be one of ["SQL"]
-
-
-
-### Nested Schema for `configuration.loading_method.destination_firebolt_update_loading_method_external_table_via_s3`
-
-Required:
-
-- `aws_key_id` (String) AWS access key granting read and write access to S3.
-- `aws_key_secret` (String) Corresponding secret part of the AWS Key
-- `method` (String) must be one of ["S3"]
-- `s3_bucket` (String) The name of the S3 bucket.
-- `s3_region` (String) Region name of the S3 bucket.
-
-
-
-### Nested Schema for `configuration.loading_method.destination_firebolt_update_loading_method_sql_inserts`
-
-Required:
-
-- `method` (String) must be one of ["SQL"]
+
+### Nested Schema for `configuration.loading_method.sql_inserts`
diff --git a/docs/resources/destination_firestore.md b/docs/resources/destination_firestore.md
index 77652908d..1237d484f 100644
--- a/docs/resources/destination_firestore.md
+++ b/docs/resources/destination_firestore.md
@@ -16,11 +16,11 @@ DestinationFirestore Resource
resource "airbyte_destination_firestore" "my_destination_firestore" {
configuration = {
credentials_json = "...my_credentials_json..."
- destination_type = "firestore"
project_id = "...my_project_id..."
}
- name = "Paula Jacobs I"
- workspace_id = "f16d9f5f-ce6c-4556-946c-3e250fb008c4"
+ definition_id = "53a4e50c-dde3-4bcf-b11f-630fa923b2f8"
+ name = "Sheldon Bernhard"
+ workspace_id = "868bf037-297d-4cd6-abcb-9a13f0bea64a"
}
```
@@ -30,9 +30,13 @@ resource "airbyte_destination_firestore" "my_destination_firestore" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -43,7 +47,6 @@ resource "airbyte_destination_firestore" "my_destination_firestore" {
Required:
-- `destination_type` (String) must be one of ["firestore"]
- `project_id` (String) The GCP project ID for the project containing the target BigQuery dataset.
Optional:
diff --git a/docs/resources/destination_gcs.md b/docs/resources/destination_gcs.md
index e187baa31..c2e0564e8 100644
--- a/docs/resources/destination_gcs.md
+++ b/docs/resources/destination_gcs.md
@@ -16,17 +16,16 @@ DestinationGcs Resource
resource "airbyte_destination_gcs" "my_destination_gcs" {
configuration = {
credential = {
- destination_gcs_authentication_hmac_key = {
+ hmac_key = {
credential_type = "HMAC_KEY"
hmac_key_access_id = "1234567890abcdefghij1234"
hmac_key_secret = "1234567890abcdefghij1234567890ABCDEFGHIJ"
}
}
- destination_type = "gcs"
format = {
- destination_gcs_output_format_avro_apache_avro = {
+ avro_apache_avro = {
compression_codec = {
- destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2 = {
+ bzip2 = {
codec = "bzip2"
}
}
@@ -37,8 +36,9 @@ resource "airbyte_destination_gcs" "my_destination_gcs" {
gcs_bucket_path = "data_sync/test"
gcs_bucket_region = "us-west1"
}
- name = "Miss Dennis Friesen"
- workspace_id = "c366c8dd-6b14-4429-8747-4778a7bd466d"
+ definition_id = "37e4a59e-7bfd-41d4-96bd-14d08d4a7d5d"
+ name = "Opal D'Amore"
+ workspace_id = "153b42c3-2f48-4f6e-943a-0f0f39a6c151"
}
```
@@ -48,9 +48,13 @@ resource "airbyte_destination_gcs" "my_destination_gcs" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -62,14 +66,13 @@ resource "airbyte_destination_gcs" "my_destination_gcs" {
Required:
- `credential` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--credential))
-- `destination_type` (String) must be one of ["gcs"]
- `format` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format))
- `gcs_bucket_name` (String) You can find the bucket name in the App Engine Admin console Application Settings page, under the label Google Cloud Storage Bucket. Read more here.
- `gcs_bucket_path` (String) GCS Bucket Path string Subdirectory under the above bucket to sync the data into.
Optional:
-- `gcs_bucket_region` (String) must be one of ["northamerica-northeast1", "northamerica-northeast2", "us-central1", "us-east1", "us-east4", "us-west1", "us-west2", "us-west3", "us-west4", "southamerica-east1", "southamerica-west1", "europe-central2", "europe-north1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "asia", "eu", "us", "asia1", "eur4", "nam4"]
+- `gcs_bucket_region` (String) must be one of ["northamerica-northeast1", "northamerica-northeast2", "us-central1", "us-east1", "us-east4", "us-west1", "us-west2", "us-west3", "us-west4", "southamerica-east1", "southamerica-west1", "europe-central2", "europe-north1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "asia", "eu", "us", "asia1", "eur4", "nam4"]; Default: "us"
Select a Region of the GCS Bucket. Read more here.
@@ -77,27 +80,19 @@ Select a Region of the GCS Bucket. Read more here. (see [below for nested schema](#nestedatt--configuration--credential--destination_gcs_authentication_hmac_key))
-- `destination_gcs_update_authentication_hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--credential--destination_gcs_update_authentication_hmac_key))
+- `hmac_key` (Attributes) An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here. (see [below for nested schema](#nestedatt--configuration--credential--hmac_key))
-
-### Nested Schema for `configuration.credential.destination_gcs_authentication_hmac_key`
+
+### Nested Schema for `configuration.credential.hmac_key`
Required:
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.
-
-
-
-### Nested Schema for `configuration.credential.destination_gcs_update_authentication_hmac_key`
+- `hmac_key_access_id` (String, Sensitive) When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.
+- `hmac_key_secret` (String, Sensitive) The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.
-Required:
+Optional:
-- `credential_type` (String) must be one of ["HMAC_KEY"]
-- `hmac_key_access_id` (String) When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.
-- `hmac_key_secret` (String) The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.
+- `credential_type` (String) must be one of ["HMAC_KEY"]; Default: "HMAC_KEY"
@@ -106,366 +101,179 @@ Required:
Optional:
-- `destination_gcs_output_format_avro_apache_avro` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro))
-- `destination_gcs_output_format_csv_comma_separated_values` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_csv_comma_separated_values))
-- `destination_gcs_output_format_json_lines_newline_delimited_json` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_json_lines_newline_delimited_json))
-- `destination_gcs_output_format_parquet_columnar_storage` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_parquet_columnar_storage))
-- `destination_gcs_update_output_format_avro_apache_avro` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro))
-- `destination_gcs_update_output_format_csv_comma_separated_values` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_csv_comma_separated_values))
-- `destination_gcs_update_output_format_json_lines_newline_delimited_json` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_json_lines_newline_delimited_json))
-- `destination_gcs_update_output_format_parquet_columnar_storage` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_parquet_columnar_storage))
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro`
-
-Required:
-
-- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--compression_codec))
-- `format_type` (String) must be one of ["Avro"]
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type`
-
-Optional:
-
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_deflate))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_snappy))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_xz))
-- `destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_avro_apache_avro--format_type--destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard))
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2`
-
-Required:
-
-- `codec` (String) must be one of ["bzip2"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_deflate`
-
-Required:
-
-- `codec` (String) must be one of ["Deflate"]
-
-Optional:
-
-- `compression_level` (Number) 0: no compression & fastest, 9: best compression & slowest.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression`
-
-Required:
-
-- `codec` (String) must be one of ["no compression"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_snappy`
-
-Required:
-
-- `codec` (String) must be one of ["snappy"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_xz`
-
-Required:
-
-- `codec` (String) must be one of ["xz"]
-
-Optional:
-
-- `compression_level` (Number) The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_avro_apache_avro.format_type.destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard`
-
-Required:
-
-- `codec` (String) must be one of ["zstandard"]
-
-Optional:
-
-- `compression_level` (Number) Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
-- `include_checksum` (Boolean) If true, include a checksum with each data block.
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_csv_comma_separated_values`
-
-Required:
-
-- `format_type` (String) must be one of ["CSV"]
-
-Optional:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_csv_comma_separated_values--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_csv_comma_separated_values.flattening`
-
-Optional:
-
-- `destination_gcs_output_format_csv_comma_separated_values_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_csv_comma_separated_values--flattening--destination_gcs_output_format_csv_comma_separated_values_compression_gzip))
-- `destination_gcs_output_format_csv_comma_separated_values_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_csv_comma_separated_values--flattening--destination_gcs_output_format_csv_comma_separated_values_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_csv_comma_separated_values.flattening.destination_gcs_output_format_csv_comma_separated_values_compression_gzip`
-
-Optional:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_csv_comma_separated_values.flattening.destination_gcs_output_format_csv_comma_separated_values_compression_no_compression`
-
-Optional:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
+- `avro_apache_avro` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro))
+- `csv_comma_separated_values` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--csv_comma_separated_values))
+- `json_lines_newline_delimited_json` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json))
+- `parquet_columnar_storage` (Attributes) Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format. (see [below for nested schema](#nestedatt--configuration--format--parquet_columnar_storage))
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_json_lines_newline_delimited_json`
+
+### Nested Schema for `configuration.format.avro_apache_avro`
Required:
-- `format_type` (String) must be one of ["JSONL"]
+- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--compression_codec))
Optional:
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_json_lines_newline_delimited_json--compression))
+- `format_type` (String) must be one of ["Avro"]; Default: "Avro"
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_json_lines_newline_delimited_json.compression`
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type`
Optional:
-- `destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_json_lines_newline_delimited_json--compression--destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_output_format_json_lines_newline_delimited_json--compression--destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression))
+- `bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--bzip2))
+- `deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--deflate))
+- `no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--no_compression))
+- `snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--snappy))
+- `xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--xz))
+- `zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--zstandard))
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_json_lines_newline_delimited_json.compression.destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip`
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.bzip2`
Optional:
-- `compression_type` (String) must be one of ["GZIP"]
+- `codec` (String) must be one of ["bzip2"]; Default: "bzip2"
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_json_lines_newline_delimited_json.compression.destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression`
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.deflate`
Optional:
-- `compression_type` (String) must be one of ["No Compression"]
+- `codec` (String) must be one of ["Deflate"]; Default: "Deflate"
+- `compression_level` (Number) Default: 0
+0: no compression & fastest, 9: best compression & slowest.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_output_format_parquet_columnar_storage`
-
-Required:
-
-- `format_type` (String) must be one of ["Parquet"]
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.no_compression`
Optional:
-- `block_size_mb` (Number) This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]
-The compression algorithm used to compress data pages.
-- `dictionary_encoding` (Boolean) Default: true.
-- `dictionary_page_size_kb` (Number) There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
-- `max_padding_size_mb` (Number) Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
-- `page_size_kb` (Number) The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro`
+- `codec` (String) must be one of ["no compression"]; Default: "no compression"
-Required:
-
-- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--compression_codec))
-- `format_type` (String) must be one of ["Avro"]
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type`
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.snappy`
Optional:
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_bzip2))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_deflate))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_no_compression))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_snappy))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_xz))
-- `destination_gcs_update_output_format_avro_apache_avro_compression_codec_zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_avro_apache_avro--format_type--destination_gcs_update_output_format_avro_apache_avro_compression_codec_zstandard))
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_bzip2`
-
-Required:
-
-- `codec` (String) must be one of ["bzip2"]
+- `codec` (String) must be one of ["snappy"]; Default: "snappy"
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_deflate`
-
-Required:
-
-- `codec` (String) must be one of ["Deflate"]
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.xz`
Optional:
-- `compression_level` (Number) 0: no compression & fastest, 9: best compression & slowest.
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_no_compression`
-
-Required:
-
-- `codec` (String) must be one of ["no compression"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_snappy`
+- `codec` (String) must be one of ["xz"]; Default: "xz"
+- `compression_level` (Number) Default: 6
+The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.
-Required:
-
-- `codec` (String) must be one of ["snappy"]
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_xz`
-Required:
-
-- `codec` (String) must be one of ["xz"]
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.zstandard`
Optional:
-- `compression_level` (Number) The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.
+- `codec` (String) must be one of ["zstandard"]; Default: "zstandard"
+- `compression_level` (Number) Default: 3
+Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
+- `include_checksum` (Boolean) Default: false
+If true, include a checksum with each data block.
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_avro_apache_avro.format_type.destination_gcs_update_output_format_avro_apache_avro_compression_codec_zstandard`
-Required:
-- `codec` (String) must be one of ["zstandard"]
+
+### Nested Schema for `configuration.format.csv_comma_separated_values`
Optional:
-- `compression_level` (Number) Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
-- `include_checksum` (Boolean) If true, include a checksum with each data block.
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_csv_comma_separated_values`
-
-Required:
-
-- `format_type` (String) must be one of ["CSV"]
-
-Optional:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_csv_comma_separated_values--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
+- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--csv_comma_separated_values--compression))
+- `flattening` (String) must be one of ["No flattening", "Root level flattening"]; Default: "No flattening"
Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
+- `format_type` (String) must be one of ["CSV"]; Default: "CSV"
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_csv_comma_separated_values.flattening`
+
+### Nested Schema for `configuration.format.csv_comma_separated_values.format_type`
Optional:
-- `destination_gcs_update_output_format_csv_comma_separated_values_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_csv_comma_separated_values--flattening--destination_gcs_update_output_format_csv_comma_separated_values_compression_gzip))
-- `destination_gcs_update_output_format_csv_comma_separated_values_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_csv_comma_separated_values--flattening--destination_gcs_update_output_format_csv_comma_separated_values_compression_no_compression))
+- `gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--csv_comma_separated_values--format_type--gzip))
+- `no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--csv_comma_separated_values--format_type--no_compression))
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_csv_comma_separated_values.flattening.destination_gcs_update_output_format_csv_comma_separated_values_compression_gzip`
+
+### Nested Schema for `configuration.format.csv_comma_separated_values.format_type.gzip`
Optional:
-- `compression_type` (String) must be one of ["GZIP"]
+- `compression_type` (String) must be one of ["GZIP"]; Default: "GZIP"
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_csv_comma_separated_values.flattening.destination_gcs_update_output_format_csv_comma_separated_values_compression_no_compression`
+
+### Nested Schema for `configuration.format.csv_comma_separated_values.format_type.no_compression`
Optional:
-- `compression_type` (String) must be one of ["No Compression"]
-
+- `compression_type` (String) must be one of ["No Compression"]; Default: "No Compression"
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_json_lines_newline_delimited_json`
-
-Required:
-- `format_type` (String) must be one of ["JSONL"]
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json`
Optional:
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_json_lines_newline_delimited_json--compression))
+- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json--compression))
+- `format_type` (String) must be one of ["JSONL"]; Default: "JSONL"
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_json_lines_newline_delimited_json.compression`
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json.format_type`
Optional:
-- `destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_json_lines_newline_delimited_json--compression--destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_gcs_update_output_format_json_lines_newline_delimited_json--compression--destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_no_compression))
+- `gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json--format_type--gzip))
+- `no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json--format_type--no_compression))
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_json_lines_newline_delimited_json.compression.destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_gzip`
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json.format_type.gzip`
Optional:
-- `compression_type` (String) must be one of ["GZIP"]
+- `compression_type` (String) must be one of ["GZIP"]; Default: "GZIP"
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_json_lines_newline_delimited_json.compression.destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_no_compression`
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json.format_type.no_compression`
Optional:
-- `compression_type` (String) must be one of ["No Compression"]
+- `compression_type` (String) must be one of ["No Compression"]; Default: "No Compression"
-
-### Nested Schema for `configuration.format.destination_gcs_update_output_format_parquet_columnar_storage`
-
-Required:
-
-- `format_type` (String) must be one of ["Parquet"]
+
+### Nested Schema for `configuration.format.parquet_columnar_storage`
Optional:
-- `block_size_mb` (Number) This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]
+- `block_size_mb` (Number) Default: 128
+This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
+- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]; Default: "UNCOMPRESSED"
The compression algorithm used to compress data pages.
-- `dictionary_encoding` (Boolean) Default: true.
-- `dictionary_page_size_kb` (Number) There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
-- `max_padding_size_mb` (Number) Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
-- `page_size_kb` (Number) The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
+- `dictionary_encoding` (Boolean) Default: true
+Default: true.
+- `dictionary_page_size_kb` (Number) Default: 1024
+There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
+- `format_type` (String) must be one of ["Parquet"]; Default: "Parquet"
+- `max_padding_size_mb` (Number) Default: 8
+Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
+- `page_size_kb` (Number) Default: 1024
+The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
diff --git a/docs/resources/destination_google_sheets.md b/docs/resources/destination_google_sheets.md
index c628da1fd..d9d7de769 100644
--- a/docs/resources/destination_google_sheets.md
+++ b/docs/resources/destination_google_sheets.md
@@ -20,11 +20,11 @@ resource "airbyte_destination_google_sheets" "my_destination_googlesheets" {
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
}
- destination_type = "google-sheets"
- spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit"
+ spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit"
}
- name = "Mr. Irma Schaefer"
- workspace_id = "b3cdca42-5190-44e5-a3c7-e0bc7178e479"
+ definition_id = "a78cf13c-3589-4bc3-aaba-63d3987f09ed"
+ name = "Manuel Cronin IV"
+ workspace_id = "dddbef1f-87bb-4506-9e16-a5a735a4e180"
}
```
@@ -34,9 +34,13 @@ resource "airbyte_destination_google_sheets" "my_destination_googlesheets" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -48,7 +52,6 @@ resource "airbyte_destination_google_sheets" "my_destination_googlesheets" {
Required:
- `credentials` (Attributes) Google API Credentials for connecting to Google Sheets and Google Drive APIs (see [below for nested schema](#nestedatt--configuration--credentials))
-- `destination_type` (String) must be one of ["google-sheets"]
- `spreadsheet_id` (String) The link to your spreadsheet. See this guide for more details.
@@ -58,6 +61,6 @@ Required:
- `client_id` (String) The Client ID of your Google Sheets developer application.
- `client_secret` (String) The Client Secret of your Google Sheets developer application.
-- `refresh_token` (String) The token for obtaining new access token.
+- `refresh_token` (String, Sensitive) The token for obtaining new access token.
diff --git a/docs/resources/destination_keen.md b/docs/resources/destination_keen.md
index 55eeac9fe..e0e719b5e 100644
--- a/docs/resources/destination_keen.md
+++ b/docs/resources/destination_keen.md
@@ -15,13 +15,13 @@ DestinationKeen Resource
```terraform
resource "airbyte_destination_keen" "my_destination_keen" {
configuration = {
- api_key = "ABCDEFGHIJKLMNOPRSTUWXYZ"
- destination_type = "keen"
- infer_timestamp = false
- project_id = "58b4acc22ba938934e888322e"
+ api_key = "ABCDEFGHIJKLMNOPRSTUWXYZ"
+ infer_timestamp = false
+ project_id = "58b4acc22ba938934e888322e"
}
- name = "Todd Oberbrunner DDS"
- workspace_id = "688282aa-4825-462f-a22e-9817ee17cbe6"
+ definition_id = "23f0d76f-b78b-4f74-ba22-de12791b5f13"
+ name = "Mr. Angelina Becker"
+ workspace_id = "49774ae8-7c30-4892-bfb0-f41f82248d60"
}
```
@@ -31,9 +31,13 @@ resource "airbyte_destination_keen" "my_destination_keen" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -44,12 +48,12 @@ resource "airbyte_destination_keen" "my_destination_keen" {
Required:
-- `api_key` (String) To get Keen Master API Key, navigate to the Access tab from the left-hand, side panel and check the Project Details section.
-- `destination_type` (String) must be one of ["keen"]
+- `api_key` (String, Sensitive) To get Keen Master API Key, navigate to the Access tab from the left-hand, side panel and check the Project Details section.
- `project_id` (String) To get Keen Project ID, navigate to the Access tab from the left-hand, side panel and check the Project Details section.
Optional:
-- `infer_timestamp` (Boolean) Allow connector to guess keen.timestamp value based on the streamed data.
+- `infer_timestamp` (Boolean) Default: true
+Allow connector to guess keen.timestamp value based on the streamed data.
diff --git a/docs/resources/destination_kinesis.md b/docs/resources/destination_kinesis.md
index b788f2018..42b0775d7 100644
--- a/docs/resources/destination_kinesis.md
+++ b/docs/resources/destination_kinesis.md
@@ -15,16 +15,16 @@ DestinationKinesis Resource
```terraform
resource "airbyte_destination_kinesis" "my_destination_kinesis" {
configuration = {
- access_key = "...my_access_key..."
- buffer_size = 1
- destination_type = "kinesis"
- endpoint = "kinesis.us‑west‑1.amazonaws.com"
- private_key = "...my_private_key..."
- region = "us‑west‑1"
- shard_count = 9
+ access_key = "...my_access_key..."
+ buffer_size = 1
+ endpoint = "kinesis.us‑west‑1.amazonaws.com"
+ private_key = "...my_private_key..."
+ region = "us‑west‑1"
+ shard_count = 1
}
- name = "Opal Kozey"
- workspace_id = "5bc0ab3c-20c4-4f37-89fd-871f99dd2efd"
+ definition_id = "83384bd8-7b5c-4ce3-a148-54333df23c5e"
+ name = "Mary Monahan"
+ workspace_id = "52521a04-7878-4c25-8cd1-84fd116e75f1"
}
```
@@ -34,9 +34,13 @@ resource "airbyte_destination_kinesis" "my_destination_kinesis" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -48,11 +52,15 @@ resource "airbyte_destination_kinesis" "my_destination_kinesis" {
Required:
- `access_key` (String) Generate the AWS Access Key for current user.
-- `buffer_size` (Number) Buffer size for storing kinesis records before being batch streamed.
-- `destination_type` (String) must be one of ["kinesis"]
- `endpoint` (String) AWS Kinesis endpoint.
- `private_key` (String) The AWS Private Key - a string of numbers and letters that are unique for each account, also known as a "recovery phrase".
- `region` (String) AWS region. Your account determines the Regions that are available to you.
-- `shard_count` (Number) Number of shards to which the data should be streamed.
+
+Optional:
+
+- `buffer_size` (Number) Default: 100
+Buffer size for storing kinesis records before being batch streamed.
+- `shard_count` (Number) Default: 5
+Number of shards to which the data should be streamed.
diff --git a/docs/resources/destination_langchain.md b/docs/resources/destination_langchain.md
index c6dc2e93e..6bb7c0272 100644
--- a/docs/resources/destination_langchain.md
+++ b/docs/resources/destination_langchain.md
@@ -15,29 +15,26 @@ DestinationLangchain Resource
```terraform
resource "airbyte_destination_langchain" "my_destination_langchain" {
configuration = {
- destination_type = "langchain"
embedding = {
- destination_langchain_embedding_fake = {
- mode = "fake"
- }
+ fake = {}
}
indexing = {
- destination_langchain_indexing_chroma_local_persistance_ = {
+ chroma_local_persistance = {
collection_name = "...my_collection_name..."
destination_path = "/local/my_chroma_db"
- mode = "chroma_local"
}
}
processing = {
- chunk_overlap = 0
- chunk_size = 1
+ chunk_overlap = 8
+ chunk_size = 3
text_fields = [
"...",
]
}
}
- name = "Hattie Nader"
- workspace_id = "1e674bdb-04f1-4575-a082-d68ea19f1d17"
+ definition_id = "0c9ec767-47b0-46cf-86fe-4a6f8bb810ed"
+ name = "Megan Kertzmann"
+ workspace_id = "02e7b218-3b2b-4c4f-adb7-afdacad2c14c"
}
```
@@ -47,9 +44,13 @@ resource "airbyte_destination_langchain" "my_destination_langchain" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -60,7 +61,6 @@ resource "airbyte_destination_langchain" "my_destination_langchain" {
Required:
-- `destination_type` (String) must be one of ["langchain"]
- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding))
- `indexing` (Attributes) Indexing configuration (see [below for nested schema](#nestedatt--configuration--indexing))
- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing))
@@ -70,49 +70,19 @@ Required:
Optional:
-- `destination_langchain_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_langchain_embedding_fake))
-- `destination_langchain_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_langchain_embedding_open_ai))
-- `destination_langchain_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_langchain_update_embedding_fake))
-- `destination_langchain_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_langchain_update_embedding_open_ai))
-
-
-### Nested Schema for `configuration.embedding.destination_langchain_embedding_fake`
-
-Optional:
-
-- `mode` (String) must be one of ["fake"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_langchain_embedding_open_ai`
-
-Required:
-
-- `openai_key` (String)
-
-Optional:
-
-- `mode` (String) must be one of ["openai"]
-
+- `fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--fake))
+- `open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--open_ai))
-
-### Nested Schema for `configuration.embedding.destination_langchain_update_embedding_fake`
-
-Optional:
+
+### Nested Schema for `configuration.embedding.fake`
-- `mode` (String) must be one of ["fake"]
-
-
-### Nested Schema for `configuration.embedding.destination_langchain_update_embedding_open_ai`
+
+### Nested Schema for `configuration.embedding.open_ai`
Required:
-- `openai_key` (String)
-
-Optional:
-
-- `mode` (String) must be one of ["openai"]
+- `openai_key` (String, Sensitive)
@@ -121,54 +91,12 @@ Optional:
Optional:
-- `destination_langchain_indexing_chroma_local_persistance` (Attributes) Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_indexing_chroma_local_persistance))
-- `destination_langchain_indexing_doc_array_hnsw_search` (Attributes) DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_indexing_doc_array_hnsw_search))
-- `destination_langchain_indexing_pinecone` (Attributes) Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_indexing_pinecone))
-- `destination_langchain_update_indexing_chroma_local_persistance` (Attributes) Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_update_indexing_chroma_local_persistance))
-- `destination_langchain_update_indexing_doc_array_hnsw_search` (Attributes) DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_update_indexing_doc_array_hnsw_search))
-- `destination_langchain_update_indexing_pinecone` (Attributes) Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain. (see [below for nested schema](#nestedatt--configuration--indexing--destination_langchain_update_indexing_pinecone))
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_indexing_chroma_local_persistance`
-
-Required:
-
-- `destination_path` (String) Path to the directory where chroma files will be written. The files will be placed inside that local mount.
-
-Optional:
-
-- `collection_name` (String) Name of the collection to use.
-- `mode` (String) must be one of ["chroma_local"]
-
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_indexing_doc_array_hnsw_search`
-
-Required:
-
-- `destination_path` (String) Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.
-
-Optional:
-
-- `mode` (String) must be one of ["DocArrayHnswSearch"]
-
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_indexing_pinecone`
-
-Required:
-
-- `index` (String) Pinecone index to use
-- `pinecone_environment` (String) Pinecone environment to use
-- `pinecone_key` (String)
-
-Optional:
-
-- `mode` (String) must be one of ["pinecone"]
+- `chroma_local_persistance` (Attributes) Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync. (see [below for nested schema](#nestedatt--configuration--indexing--chroma_local_persistance))
+- `doc_array_hnsw_search` (Attributes) DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite. (see [below for nested schema](#nestedatt--configuration--indexing--doc_array_hnsw_search))
+- `pinecone` (Attributes) Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain. (see [below for nested schema](#nestedatt--configuration--indexing--pinecone))
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_update_indexing_chroma_local_persistance`
+
+### Nested Schema for `configuration.indexing.chroma_local_persistance`
Required:
@@ -176,34 +104,26 @@ Required:
Optional:
-- `collection_name` (String) Name of the collection to use.
-- `mode` (String) must be one of ["chroma_local"]
+- `collection_name` (String) Default: "langchain"
+Name of the collection to use.
-
-### Nested Schema for `configuration.indexing.destination_langchain_update_indexing_doc_array_hnsw_search`
+
+### Nested Schema for `configuration.indexing.doc_array_hnsw_search`
Required:
- `destination_path` (String) Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.
-Optional:
-
-- `mode` (String) must be one of ["DocArrayHnswSearch"]
-
-
-### Nested Schema for `configuration.indexing.destination_langchain_update_indexing_pinecone`
+
+### Nested Schema for `configuration.indexing.pinecone`
Required:
- `index` (String) Pinecone index to use
- `pinecone_environment` (String) Pinecone environment to use
-- `pinecone_key` (String)
-
-Optional:
-
-- `mode` (String) must be one of ["pinecone"]
+- `pinecone_key` (String, Sensitive)
@@ -217,6 +137,7 @@ Required:
Optional:
-- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context
+- `chunk_overlap` (Number) Default: 0
+Size of overlap between chunks in tokens to store in vector store to better capture relevant context
diff --git a/docs/resources/destination_milvus.md b/docs/resources/destination_milvus.md
index 74d5422fb..7834ef199 100644
--- a/docs/resources/destination_milvus.md
+++ b/docs/resources/destination_milvus.md
@@ -15,39 +15,50 @@ DestinationMilvus Resource
```terraform
resource "airbyte_destination_milvus" "my_destination_milvus" {
configuration = {
- destination_type = "milvus"
embedding = {
- destination_milvus_embedding_cohere = {
- cohere_key = "...my_cohere_key..."
- mode = "cohere"
+ azure_open_ai = {
+ api_base = "https://your-resource-name.openai.azure.com"
+ deployment = "your-resource-name"
+ openai_key = "...my_openai_key..."
}
}
indexing = {
auth = {
- destination_milvus_indexing_authentication_api_token = {
- mode = "token"
+ destination_milvus_api_token = {
token = "...my_token..."
}
}
collection = "...my_collection..."
db = "...my_db..."
- host = "https://my-instance.zone.zillizcloud.com"
+ host = "tcp://my-local-milvus:19530"
text_field = "...my_text_field..."
vector_field = "...my_vector_field..."
}
processing = {
- chunk_overlap = 3
- chunk_size = 0
+ chunk_overlap = 1
+ chunk_size = 5
+ field_name_mappings = [
+ {
+ from_field = "...my_from_field..."
+ to_field = "...my_to_field..."
+ },
+ ]
metadata_fields = [
"...",
]
text_fields = [
"...",
]
+ text_splitter = {
+ by_markdown_header = {
+ split_level = 7
+ }
+ }
}
}
- name = "Sherry Morar IV"
- workspace_id = "086a1840-394c-4260-b1f9-3f5f0642dac7"
+ definition_id = "6683bb76-cbdd-442c-84b7-b603cc8cd887"
+ name = "Mr. Karl Jacobson"
+ workspace_id = "13ef7fc0-d176-4e5f-8145-49f1242182d1"
}
```
@@ -57,9 +68,13 @@ resource "airbyte_destination_milvus" "my_destination_milvus" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -70,7 +85,6 @@ resource "airbyte_destination_milvus" "my_destination_milvus" {
Required:
-- `destination_type` (String) must be one of ["milvus"]
- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding))
- `indexing` (Attributes) Indexing configuration (see [below for nested schema](#nestedatt--configuration--indexing))
- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing))
@@ -80,103 +94,65 @@ Required:
Optional:
-- `destination_milvus_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_cohere))
-- `destination_milvus_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_fake))
-- `destination_milvus_embedding_from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_from_field))
-- `destination_milvus_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_open_ai))
-- `destination_milvus_update_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_cohere))
-- `destination_milvus_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_fake))
-- `destination_milvus_update_embedding_from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_from_field))
-- `destination_milvus_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_open_ai))
+- `azure_open_ai` (Attributes) Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--azure_open_ai))
+- `cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--cohere))
+- `fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--fake))
+- `from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--from_field))
+- `open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--open_ai))
+- `open_ai_compatible` (Attributes) Use a service that's compatible with the OpenAI API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--open_ai_compatible))
-
-### Nested Schema for `configuration.embedding.destination_milvus_embedding_cohere`
+
+### Nested Schema for `configuration.embedding.azure_open_ai`
Required:
-- `cohere_key` (String)
+- `api_base` (String) The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+- `deployment` (String) The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+- `openai_key` (String, Sensitive) The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
-Optional:
-- `mode` (String) must be one of ["cohere"]
+
+### Nested Schema for `configuration.embedding.cohere`
+Required:
-
-### Nested Schema for `configuration.embedding.destination_milvus_embedding_fake`
+- `cohere_key` (String, Sensitive)
-Optional:
-- `mode` (String) must be one of ["fake"]
+
+### Nested Schema for `configuration.embedding.fake`
-
-### Nested Schema for `configuration.embedding.destination_milvus_embedding_from_field`
+
+### Nested Schema for `configuration.embedding.from_field`
Required:
- `dimensions` (Number) The number of dimensions the embedding model is generating
- `field_name` (String) Name of the field in the record that contains the embedding
-Optional:
-
-- `mode` (String) must be one of ["from_field"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_embedding_open_ai`
-
-Required:
-
-- `openai_key` (String)
-Optional:
-
-- `mode` (String) must be one of ["openai"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_cohere`
+
+### Nested Schema for `configuration.embedding.open_ai`
Required:
-- `cohere_key` (String)
+- `openai_key` (String, Sensitive)
-Optional:
-
-- `mode` (String) must be one of ["cohere"]
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_fake`
-
-Optional:
-
-- `mode` (String) must be one of ["fake"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_from_field`
+
+### Nested Schema for `configuration.embedding.open_ai_compatible`
Required:
+- `base_url` (String) The base URL for your OpenAI-compatible service
- `dimensions` (Number) The number of dimensions the embedding model is generating
-- `field_name` (String) Name of the field in the record that contains the embedding
-
-Optional:
-
-- `mode` (String) must be one of ["from_field"]
-
-
-
-### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_open_ai`
-
-Required:
-
-- `openai_key` (String)
Optional:
-- `mode` (String) must be one of ["openai"]
+- `api_key` (String, Sensitive) Default: ""
+- `model_name` (String) Default: "text-embedding-ada-002"
+The name of the model to use for embedding
@@ -191,101 +167,104 @@ Required:
Optional:
-- `db` (String) The database to connect to
-- `text_field` (String) The field in the entity that contains the embedded text
-- `vector_field` (String) The field in the entity that contains the vector
+- `db` (String) Default: ""
+The database to connect to
+- `text_field` (String) Default: "text"
+The field in the entity that contains the embedded text
+- `vector_field` (String) Default: "vector"
+The field in the entity that contains the vector
### Nested Schema for `configuration.indexing.auth`
Optional:
-- `destination_milvus_indexing_authentication_api_token` (Attributes) Authenticate using an API token (suitable for Zilliz Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_api_token))
-- `destination_milvus_indexing_authentication_no_auth` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_no_auth))
-- `destination_milvus_indexing_authentication_username_password` (Attributes) Authenticate using username and password (suitable for self-managed Milvus clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_username_password))
-- `destination_milvus_update_indexing_authentication_api_token` (Attributes) Authenticate using an API token (suitable for Zilliz Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_api_token))
-- `destination_milvus_update_indexing_authentication_no_auth` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_no_auth))
-- `destination_milvus_update_indexing_authentication_username_password` (Attributes) Authenticate using username and password (suitable for self-managed Milvus clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_username_password))
+- `api_token` (Attributes) Authenticate using an API token (suitable for Zilliz Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--api_token))
+- `no_auth` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--no_auth))
+- `username_password` (Attributes) Authenticate using username and password (suitable for self-managed Milvus clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--username_password))
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
+
+### Nested Schema for `configuration.indexing.auth.username_password`
Required:
-- `token` (String) API Token for the Milvus instance
+- `token` (String, Sensitive) API Token for the Milvus instance
-Optional:
-- `mode` (String) must be one of ["token"]
+
+### Nested Schema for `configuration.indexing.auth.username_password`
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
-
-Optional:
-
-- `mode` (String) must be one of ["no_auth"]
-
-
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
+
+### Nested Schema for `configuration.indexing.auth.username_password`
Required:
-- `password` (String) Password for the Milvus instance
+- `password` (String, Sensitive) Password for the Milvus instance
- `username` (String) Username for the Milvus instance
-Optional:
-- `mode` (String) must be one of ["username_password"]
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
+
+### Nested Schema for `configuration.processing`
Required:
-- `token` (String) API Token for the Milvus instance
+- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
Optional:
-- `mode` (String) must be one of ["token"]
+- `chunk_overlap` (Number) Default: 0
+Size of overlap between chunks in tokens to store in vector store to better capture relevant context
+- `field_name_mappings` (Attributes List) List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation. (see [below for nested schema](#nestedatt--configuration--processing--field_name_mappings))
+- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
+- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+- `text_splitter` (Attributes) Split text fields into chunks based on the specified method. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter))
+
+### Nested Schema for `configuration.processing.field_name_mappings`
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
+Required:
-Optional:
+- `from_field` (String) The field name in the source
+- `to_field` (String) The field name to use in the destination
-- `mode` (String) must be one of ["no_auth"]
+
+### Nested Schema for `configuration.processing.text_splitter`
-
-### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password`
+Optional:
-Required:
+- `by_markdown_header` (Attributes) Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_markdown_header))
+- `by_programming_language` (Attributes) Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_programming_language))
+- `by_separator` (Attributes) Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_separator))
-- `password` (String) Password for the Milvus instance
-- `username` (String) Username for the Milvus instance
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
Optional:
-- `mode` (String) must be one of ["username_password"]
+- `split_level` (Number) Default: 1
+Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
+Required:
-
-### Nested Schema for `configuration.processing`
+- `language` (String) must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
+Split code in suitable places based on the programming language
-Required:
-- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
Optional:
-- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context
-- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
-- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+- `keep_separator` (Boolean) Default: false
+Whether to keep the separator in the resulting chunks
+- `separators` (List of String) List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
diff --git a/docs/resources/destination_mongodb.md b/docs/resources/destination_mongodb.md
index 472867bfe..ec6866242 100644
--- a/docs/resources/destination_mongodb.md
+++ b/docs/resources/destination_mongodb.md
@@ -16,28 +16,25 @@ DestinationMongodb Resource
resource "airbyte_destination_mongodb" "my_destination_mongodb" {
configuration = {
auth_type = {
- destination_mongodb_authorization_type_login_password = {
- authorization = "login/password"
- password = "...my_password..."
- username = "Lucienne.Yundt"
+ login_password = {
+ password = "...my_password..."
+ username = "Emmalee.Towne89"
}
}
- database = "...my_database..."
- destination_type = "mongodb"
+ database = "...my_database..."
instance_type = {
- destination_mongodb_mongo_db_instance_type_mongo_db_atlas = {
+ mongo_db_atlas = {
cluster_url = "...my_cluster_url..."
instance = "atlas"
}
}
tunnel_method = {
- destination_mongodb_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_mongodb_no_tunnel = {}
}
}
- name = "Robyn Schmitt I"
- workspace_id = "aa63aae8-d678-464d-bb67-5fd5e60b375e"
+ definition_id = "895c9212-6184-452d-9432-f33897fec4ca"
+ name = "Adrienne Lockman"
+ workspace_id = "bf882725-c3c6-4bc3-9a6d-3f396b39ea0e"
}
```
@@ -47,9 +44,13 @@ resource "airbyte_destination_mongodb" "my_destination_mongodb" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -62,7 +63,6 @@ Required:
- `auth_type` (Attributes) Authorization type. (see [below for nested schema](#nestedatt--configuration--auth_type))
- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["mongodb"]
Optional:
@@ -74,45 +74,20 @@ Optional:
Optional:
-- `destination_mongodb_authorization_type_login_password` (Attributes) Login/Password. (see [below for nested schema](#nestedatt--configuration--auth_type--destination_mongodb_authorization_type_login_password))
-- `destination_mongodb_authorization_type_none` (Attributes) None. (see [below for nested schema](#nestedatt--configuration--auth_type--destination_mongodb_authorization_type_none))
-- `destination_mongodb_update_authorization_type_login_password` (Attributes) Login/Password. (see [below for nested schema](#nestedatt--configuration--auth_type--destination_mongodb_update_authorization_type_login_password))
-- `destination_mongodb_update_authorization_type_none` (Attributes) None. (see [below for nested schema](#nestedatt--configuration--auth_type--destination_mongodb_update_authorization_type_none))
-
-
-### Nested Schema for `configuration.auth_type.destination_mongodb_authorization_type_login_password`
-
-Required:
-
-- `authorization` (String) must be one of ["login/password"]
-- `password` (String) Password associated with the username.
-- `username` (String) Username to use to access the database.
-
-
-
-### Nested Schema for `configuration.auth_type.destination_mongodb_authorization_type_none`
-
-Required:
-
-- `authorization` (String) must be one of ["none"]
-
+- `login_password` (Attributes) Login/Password. (see [below for nested schema](#nestedatt--configuration--auth_type--login_password))
+- `none` (Attributes) None. (see [below for nested schema](#nestedatt--configuration--auth_type--none))
-
-### Nested Schema for `configuration.auth_type.destination_mongodb_update_authorization_type_login_password`
+
+### Nested Schema for `configuration.auth_type.login_password`
Required:
-- `authorization` (String) must be one of ["login/password"]
-- `password` (String) Password associated with the username.
+- `password` (String, Sensitive) Password associated with the username.
- `username` (String) Username to use to access the database.
-
-### Nested Schema for `configuration.auth_type.destination_mongodb_update_authorization_type_none`
-
-Required:
-
-- `authorization` (String) must be one of ["none"]
+
+### Nested Schema for `configuration.auth_type.none`
@@ -121,75 +96,47 @@ Required:
Optional:
-- `destination_mongodb_mongo_db_instance_type_mongo_db_atlas` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_mongo_db_instance_type_mongo_db_atlas))
-- `destination_mongodb_mongo_db_instance_type_replica_set` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_mongo_db_instance_type_replica_set))
-- `destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance))
-- `destination_mongodb_update_mongo_db_instance_type_mongo_db_atlas` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_update_mongo_db_instance_type_mongo_db_atlas))
-- `destination_mongodb_update_mongo_db_instance_type_replica_set` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_update_mongo_db_instance_type_replica_set))
-- `destination_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--destination_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance))
+- `mongo_db_atlas` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--mongo_db_atlas))
+- `replica_set` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--replica_set))
+- `standalone_mongo_db_instance` (Attributes) MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--standalone_mongo_db_instance))
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_mongo_db_instance_type_mongo_db_atlas`
+
+### Nested Schema for `configuration.instance_type.mongo_db_atlas`
Required:
- `cluster_url` (String) URL of a cluster to connect to.
-- `instance` (String) must be one of ["atlas"]
-
-
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_mongo_db_instance_type_replica_set`
-
-Required:
-
-- `instance` (String) must be one of ["replica"]
-- `server_addresses` (String) The members of a replica set. Please specify `host`:`port` of each member seperated by comma.
Optional:
-- `replica_set` (String) A replica set name.
+- `instance` (String) must be one of ["atlas"]; Default: "atlas"
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance`
+
+### Nested Schema for `configuration.instance_type.replica_set`
Required:
-- `host` (String) The Host of a Mongo database to be replicated.
-- `instance` (String) must be one of ["standalone"]
-- `port` (Number) The Port of a Mongo database to be replicated.
-
-
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_update_mongo_db_instance_type_mongo_db_atlas`
-
-Required:
-
-- `cluster_url` (String) URL of a cluster to connect to.
-- `instance` (String) must be one of ["atlas"]
-
-
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_update_mongo_db_instance_type_replica_set`
-
-Required:
-
-- `instance` (String) must be one of ["replica"]
- `server_addresses` (String) The members of a replica set. Please specify `host`:`port` of each member seperated by comma.
Optional:
+- `instance` (String) must be one of ["replica"]; Default: "replica"
- `replica_set` (String) A replica set name.
-
-### Nested Schema for `configuration.instance_type.destination_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance`
+
+### Nested Schema for `configuration.instance_type.standalone_mongo_db_instance`
Required:
- `host` (String) The Host of a Mongo database to be replicated.
-- `instance` (String) must be one of ["standalone"]
-- `port` (Number) The Port of a Mongo database to be replicated.
+
+Optional:
+
+- `instance` (String) must be one of ["standalone"]; Default: "standalone"
+- `port` (Number) Default: 27017
+The Port of a Mongo database to be replicated.
@@ -198,80 +145,41 @@ Required:
Optional:
-- `destination_mongodb_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_ssh_tunnel_method_no_tunnel))
-- `destination_mongodb_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_ssh_tunnel_method_password_authentication))
-- `destination_mongodb_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_ssh_tunnel_method_ssh_key_authentication))
-- `destination_mongodb_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_update_ssh_tunnel_method_no_tunnel))
-- `destination_mongodb_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_update_ssh_tunnel_method_password_authentication))
-- `destination_mongodb_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mongodb_update_ssh_tunnel_method_ssh_key_authentication))
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_ssh_tunnel_method_no_tunnel`
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_update_ssh_tunnel_method_no_tunnel`
-
-Required:
+Optional:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mongodb_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/destination_mssql.md b/docs/resources/destination_mssql.md
index 860001e3c..f653ec9c5 100644
--- a/docs/resources/destination_mssql.md
+++ b/docs/resources/destination_mssql.md
@@ -15,27 +15,23 @@ DestinationMssql Resource
```terraform
resource "airbyte_destination_mssql" "my_destination_mssql" {
configuration = {
- database = "...my_database..."
- destination_type = "mssql"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 1433
- schema = "public"
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 1433
+ schema = "public"
ssl_method = {
- destination_mssql_ssl_method_encrypted_trust_server_certificate_ = {
- ssl_method = "encrypted_trust_server_certificate"
- }
+ encrypted_trust_server_certificate = {}
}
tunnel_method = {
- destination_mssql_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_mssql_no_tunnel = {}
}
- username = "Desiree_Yost"
+ username = "Amalia.Blick"
}
- name = "Bert Treutel DVM"
- workspace_id = "33317fe3-5b60-4eb1-aa42-6555ba3c2874"
+ definition_id = "90e1a2bc-7de0-4ff6-b737-4915d3efc2cd"
+ name = "Jorge Beahan"
+ workspace_id = "6acc1e6f-1291-4560-8b55-b326e06d2448"
}
```
@@ -45,9 +41,13 @@ resource "airbyte_destination_mssql" "my_destination_mssql" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -59,16 +59,17 @@ resource "airbyte_destination_mssql" "my_destination_mssql" {
Required:
- `database` (String) The name of the MSSQL database.
-- `destination_type` (String) must be one of ["mssql"]
- `host` (String) The host name of the MSSQL database.
-- `port` (Number) The port of the MSSQL database.
-- `schema` (String) The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
- `username` (String) The username which is used to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) The password associated with this username.
+- `password` (String, Sensitive) The password associated with this username.
+- `port` (Number) Default: 1433
+The port of the MSSQL database.
+- `schema` (String) Default: "public"
+The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
- `ssl_method` (Attributes) The encryption method which is used to communicate with the database. (see [below for nested schema](#nestedatt--configuration--ssl_method))
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
@@ -77,45 +78,15 @@ Optional:
Optional:
-- `destination_mssql_ssl_method_encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--destination_mssql_ssl_method_encrypted_trust_server_certificate))
-- `destination_mssql_ssl_method_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--destination_mssql_ssl_method_encrypted_verify_certificate))
-- `destination_mssql_update_ssl_method_encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--destination_mssql_update_ssl_method_encrypted_trust_server_certificate))
-- `destination_mssql_update_ssl_method_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--destination_mssql_update_ssl_method_encrypted_verify_certificate))
-
-
-### Nested Schema for `configuration.ssl_method.destination_mssql_ssl_method_encrypted_trust_server_certificate`
-
-Required:
-
-- `ssl_method` (String) must be one of ["encrypted_trust_server_certificate"]
-
-
-
-### Nested Schema for `configuration.ssl_method.destination_mssql_ssl_method_encrypted_verify_certificate`
-
-Required:
-
-- `ssl_method` (String) must be one of ["encrypted_verify_certificate"]
-
-Optional:
-
-- `host_name_in_certificate` (String) Specifies the host name of the server. The value of this property must match the subject property of the certificate.
-
+- `encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--encrypted_trust_server_certificate))
+- `encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--encrypted_verify_certificate))
-
-### Nested Schema for `configuration.ssl_method.destination_mssql_update_ssl_method_encrypted_trust_server_certificate`
+
+### Nested Schema for `configuration.ssl_method.encrypted_trust_server_certificate`
-Required:
-
-- `ssl_method` (String) must be one of ["encrypted_trust_server_certificate"]
-
-
-
-### Nested Schema for `configuration.ssl_method.destination_mssql_update_ssl_method_encrypted_verify_certificate`
-
-Required:
-- `ssl_method` (String) must be one of ["encrypted_verify_certificate"]
+
+### Nested Schema for `configuration.ssl_method.encrypted_verify_certificate`
Optional:
@@ -128,80 +99,41 @@ Optional:
Optional:
-- `destination_mssql_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_ssh_tunnel_method_no_tunnel))
-- `destination_mssql_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_ssh_tunnel_method_password_authentication))
-- `destination_mssql_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_ssh_tunnel_method_ssh_key_authentication))
-- `destination_mssql_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_update_ssh_tunnel_method_no_tunnel))
-- `destination_mssql_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_update_ssh_tunnel_method_password_authentication))
-- `destination_mssql_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mssql_update_ssh_tunnel_method_ssh_key_authentication))
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_ssh_tunnel_method_no_tunnel`
-
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_update_ssh_tunnel_method_no_tunnel`
-
-Required:
+Optional:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mssql_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/destination_mysql.md b/docs/resources/destination_mysql.md
index 251fcdbc6..99b4555e9 100644
--- a/docs/resources/destination_mysql.md
+++ b/docs/resources/destination_mysql.md
@@ -15,21 +15,19 @@ DestinationMysql Resource
```terraform
resource "airbyte_destination_mysql" "my_destination_mysql" {
configuration = {
- database = "...my_database..."
- destination_type = "mysql"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 3306
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 3306
tunnel_method = {
- destination_mysql_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_mysql_no_tunnel = {}
}
- username = "Sheldon.Smitham"
+ username = "Elissa16"
}
- name = "Guy Luettgen"
- workspace_id = "a8d8f5c0-b2f2-4fb7-b194-a276b26916fe"
+ definition_id = "a53050a9-afbc-466c-913a-5b78062a6a13"
+ name = "Nick Rogahn"
+ workspace_id = "63598ffb-0429-424f-aeae-5018c3193740"
}
```
@@ -39,9 +37,13 @@ resource "airbyte_destination_mysql" "my_destination_mysql" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -53,15 +55,15 @@ resource "airbyte_destination_mysql" "my_destination_mysql" {
Required:
- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["mysql"]
- `host` (String) Hostname of the database.
-- `port` (Number) Port of the database.
- `username` (String) Username to use to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
+- `password` (String, Sensitive) Password associated with the username.
+- `port` (Number) Default: 3306
+Port of the database.
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
@@ -69,80 +71,41 @@ Optional:
Optional:
-- `destination_mysql_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_ssh_tunnel_method_no_tunnel))
-- `destination_mysql_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_ssh_tunnel_method_password_authentication))
-- `destination_mysql_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_ssh_tunnel_method_ssh_key_authentication))
-- `destination_mysql_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_update_ssh_tunnel_method_no_tunnel))
-- `destination_mysql_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_update_ssh_tunnel_method_password_authentication))
-- `destination_mysql_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_mysql_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_ssh_tunnel_method_no_tunnel`
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
+Optional:
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_update_ssh_tunnel_method_no_tunnel`
-
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_mysql_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/destination_oracle.md b/docs/resources/destination_oracle.md
index 7f4e8a16b..a8df5b7e1 100644
--- a/docs/resources/destination_oracle.md
+++ b/docs/resources/destination_oracle.md
@@ -15,22 +15,20 @@ DestinationOracle Resource
```terraform
resource "airbyte_destination_oracle" "my_destination_oracle" {
configuration = {
- destination_type = "oracle"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 1521
- schema = "airbyte"
- sid = "...my_sid..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 1521
+ schema = "airbyte"
+ sid = "...my_sid..."
tunnel_method = {
- destination_oracle_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_oracle_no_tunnel = {}
}
- username = "Viviane_Aufderhar"
+ username = "Abdullah_Ward15"
}
- name = "Tammy Medhurst"
- workspace_id = "3698f447-f603-4e8b-845e-80ca55efd20e"
+ definition_id = "2db6fe08-64a8-456a-8417-0ff8566dc323"
+ name = "Brittany Mohr"
+ workspace_id = "b07bf072-8b70-4775-98c6-7348eaa4356f"
}
```
@@ -40,9 +38,13 @@ resource "airbyte_destination_oracle" "my_destination_oracle" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -53,17 +55,18 @@ resource "airbyte_destination_oracle" "my_destination_oracle" {
Required:
-- `destination_type` (String) must be one of ["oracle"]
- `host` (String) The hostname of the database.
-- `port` (Number) The port of the database.
- `sid` (String) The System Identifier uniquely distinguishes the instance from any other instance on the same computer.
- `username` (String) The username to access the database. This user must have CREATE USER privileges in the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) The password associated with the username.
-- `schema` (String) The default schema is used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. The usual value for this field is "airbyte". In Oracle, schemas and users are the same thing, so the "user" parameter is used as the login credentials and this is used for the default Airbyte message schema.
+- `password` (String, Sensitive) The password associated with the username.
+- `port` (Number) Default: 1521
+The port of the database.
+- `schema` (String) Default: "airbyte"
+The default schema is used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. The usual value for this field is "airbyte". In Oracle, schemas and users are the same thing, so the "user" parameter is used as the login credentials and this is used for the default Airbyte message schema.
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
@@ -71,80 +74,41 @@ Optional:
Optional:
-- `destination_oracle_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_ssh_tunnel_method_no_tunnel))
-- `destination_oracle_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_ssh_tunnel_method_password_authentication))
-- `destination_oracle_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_ssh_tunnel_method_ssh_key_authentication))
-- `destination_oracle_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_update_ssh_tunnel_method_no_tunnel))
-- `destination_oracle_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_update_ssh_tunnel_method_password_authentication))
-- `destination_oracle_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_oracle_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_ssh_tunnel_method_no_tunnel`
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
+Optional:
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_update_ssh_tunnel_method_no_tunnel`
-
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_oracle_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/destination_pinecone.md b/docs/resources/destination_pinecone.md
index 6f890911c..2ec2b1567 100644
--- a/docs/resources/destination_pinecone.md
+++ b/docs/resources/destination_pinecone.md
@@ -15,31 +15,43 @@ DestinationPinecone Resource
```terraform
resource "airbyte_destination_pinecone" "my_destination_pinecone" {
configuration = {
- destination_type = "pinecone"
embedding = {
- destination_pinecone_embedding_cohere = {
- cohere_key = "...my_cohere_key..."
- mode = "cohere"
+ destination_pinecone_azure_open_ai = {
+ api_base = "https://your-resource-name.openai.azure.com"
+ deployment = "your-resource-name"
+ openai_key = "...my_openai_key..."
}
}
indexing = {
index = "...my_index..."
- pinecone_environment = "...my_pinecone_environment..."
+ pinecone_environment = "us-west1-gcp"
pinecone_key = "...my_pinecone_key..."
}
processing = {
- chunk_overlap = 2
- chunk_size = 3
+ chunk_overlap = 6
+ chunk_size = 6
+ field_name_mappings = [
+ {
+ from_field = "...my_from_field..."
+ to_field = "...my_to_field..."
+ },
+ ]
metadata_fields = [
"...",
]
text_fields = [
"...",
]
+ text_splitter = {
+ destination_pinecone_by_markdown_header = {
+ split_level = 7
+ }
+ }
}
}
- name = "Cecelia Braun"
- workspace_id = "8b6a89fb-e3a5-4aa8-a482-4d0ab4075088"
+ definition_id = "d49dbc4f-abbf-4199-8382-023b4de2c1a7"
+ name = "Bobby Lemke"
+ workspace_id = "d3cde3c9-d6fa-494b-b4b9-38f85ce1dfc1"
}
```
@@ -49,9 +61,13 @@ resource "airbyte_destination_pinecone" "my_destination_pinecone" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -62,7 +78,6 @@ resource "airbyte_destination_pinecone" "my_destination_pinecone" {
Required:
-- `destination_type` (String) must be one of ["pinecone"]
- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding))
- `indexing` (Attributes) Pinecone is a popular vector store that can be used to store and retrieve embeddings. (see [below for nested schema](#nestedatt--configuration--indexing))
- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing))
@@ -72,99 +87,127 @@ Required:
Optional:
-- `destination_pinecone_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_cohere))
-- `destination_pinecone_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_fake))
-- `destination_pinecone_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_open_ai))
-- `destination_pinecone_update_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_cohere))
-- `destination_pinecone_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_fake))
-- `destination_pinecone_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_open_ai))
+- `azure_open_ai` (Attributes) Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--azure_open_ai))
+- `cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--cohere))
+- `fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--fake))
+- `open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--open_ai))
+- `open_ai_compatible` (Attributes) Use a service that's compatible with the OpenAI API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--open_ai_compatible))
-
-### Nested Schema for `configuration.embedding.destination_pinecone_embedding_cohere`
+
+### Nested Schema for `configuration.embedding.azure_open_ai`
Required:
-- `cohere_key` (String)
+- `api_base` (String) The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+- `deployment` (String) The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+- `openai_key` (String, Sensitive) The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
-Optional:
-- `mode` (String) must be one of ["cohere"]
+
+### Nested Schema for `configuration.embedding.cohere`
+Required:
-
-### Nested Schema for `configuration.embedding.destination_pinecone_embedding_fake`
+- `cohere_key` (String, Sensitive)
+
+
+
+### Nested Schema for `configuration.embedding.fake`
-Optional:
-- `mode` (String) must be one of ["fake"]
+
+### Nested Schema for `configuration.embedding.open_ai`
+
+Required:
+
+- `openai_key` (String, Sensitive)
-
-### Nested Schema for `configuration.embedding.destination_pinecone_embedding_open_ai`
+
+### Nested Schema for `configuration.embedding.open_ai_compatible`
Required:
-- `openai_key` (String)
+- `base_url` (String) The base URL for your OpenAI-compatible service
+- `dimensions` (Number) The number of dimensions the embedding model is generating
Optional:
-- `mode` (String) must be one of ["openai"]
+- `api_key` (String, Sensitive) Default: ""
+- `model_name` (String) Default: "text-embedding-ada-002"
+The name of the model to use for embedding
-
-### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_cohere`
+
+
+### Nested Schema for `configuration.indexing`
Required:
-- `cohere_key` (String)
+- `index` (String) Pinecone index in your project to load data into
+- `pinecone_environment` (String) Pinecone Cloud environment to use
+- `pinecone_key` (String, Sensitive) The Pinecone API key to use matching the environment (copy from Pinecone console)
-Optional:
-- `mode` (String) must be one of ["cohere"]
+
+### Nested Schema for `configuration.processing`
+Required:
-
-### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_fake`
+- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
Optional:
-- `mode` (String) must be one of ["fake"]
-
+- `chunk_overlap` (Number) Default: 0
+Size of overlap between chunks in tokens to store in vector store to better capture relevant context
+- `field_name_mappings` (Attributes List) List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation. (see [below for nested schema](#nestedatt--configuration--processing--field_name_mappings))
+- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
+- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+- `text_splitter` (Attributes) Split text fields into chunks based on the specified method. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter))
-
-### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_open_ai`
+
+### Nested Schema for `configuration.processing.field_name_mappings`
Required:
-- `openai_key` (String)
+- `from_field` (String) The field name in the source
+- `to_field` (String) The field name to use in the destination
-Optional:
-- `mode` (String) must be one of ["openai"]
+
+### Nested Schema for `configuration.processing.text_splitter`
+Optional:
+- `by_markdown_header` (Attributes) Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_markdown_header))
+- `by_programming_language` (Attributes) Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_programming_language))
+- `by_separator` (Attributes) Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_separator))
-
-### Nested Schema for `configuration.indexing`
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
-Required:
+Optional:
-- `index` (String) Pinecone index to use
-- `pinecone_environment` (String) Pinecone environment to use
-- `pinecone_key` (String)
+- `split_level` (Number) Default: 1
+Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
-
-### Nested Schema for `configuration.processing`
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
Required:
-- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
+- `language` (String) must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
+Split code in suitable places based on the programming language
+
+
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
Optional:
-- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context
-- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
-- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+- `keep_separator` (Boolean) Default: false
+Whether to keep the separator in the resulting chunks
+- `separators` (List of String) List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
diff --git a/docs/resources/destination_postgres.md b/docs/resources/destination_postgres.md
index b75470f08..bcc1f9c49 100644
--- a/docs/resources/destination_postgres.md
+++ b/docs/resources/destination_postgres.md
@@ -15,27 +15,23 @@ DestinationPostgres Resource
```terraform
resource "airbyte_destination_postgres" "my_destination_postgres" {
configuration = {
- database = "...my_database..."
- destination_type = "postgres"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 5432
- schema = "public"
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 5432
+ schema = "public"
ssl_mode = {
- destination_postgres_ssl_modes_allow = {
- mode = "allow"
- }
+ allow = {}
}
tunnel_method = {
- destination_postgres_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_postgres_no_tunnel = {}
}
- username = "Foster.Borer"
+ username = "Burley_Kuhic"
}
- name = "Karen Kautzer"
- workspace_id = "904f3b11-94b8-4abf-a03a-79f9dfe0ab7d"
+ definition_id = "db19e64b-83f6-43d3-8837-0e173ec9d4f3"
+ name = "Dianna Dooley V"
+ workspace_id = "2a8a43c0-f29f-47cb-912b-320943801c36"
}
```
@@ -45,9 +41,13 @@ resource "airbyte_destination_postgres" "my_destination_postgres" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -59,16 +59,17 @@ resource "airbyte_destination_postgres" "my_destination_postgres" {
Required:
- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["postgres"]
- `host` (String) Hostname of the database.
-- `port` (Number) Port of the database.
-- `schema` (String) The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
- `username` (String) Username to use to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
+- `password` (String, Sensitive) Password associated with the username.
+- `port` (Number) Default: 5432
+Port of the database.
+- `schema` (String) Default: "public"
+The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
- `ssl_mode` (Attributes) SSL connection modes.
disable - Chose this mode to disable encryption of communication between Airbyte and destination database
allow - Chose this mode to enable encryption only when required by the source database
@@ -84,137 +85,53 @@ Optional:
Optional:
-- `destination_postgres_ssl_modes_allow` (Attributes) Allow SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_allow))
-- `destination_postgres_ssl_modes_disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_disable))
-- `destination_postgres_ssl_modes_prefer` (Attributes) Prefer SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_prefer))
-- `destination_postgres_ssl_modes_require` (Attributes) Require SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_require))
-- `destination_postgres_ssl_modes_verify_ca` (Attributes) Verify-ca SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_verify_ca))
-- `destination_postgres_ssl_modes_verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_ssl_modes_verify_full))
-- `destination_postgres_update_ssl_modes_allow` (Attributes) Allow SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_allow))
-- `destination_postgres_update_ssl_modes_disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_disable))
-- `destination_postgres_update_ssl_modes_prefer` (Attributes) Prefer SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_prefer))
-- `destination_postgres_update_ssl_modes_require` (Attributes) Require SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_require))
-- `destination_postgres_update_ssl_modes_verify_ca` (Attributes) Verify-ca SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_verify_ca))
-- `destination_postgres_update_ssl_modes_verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_postgres_update_ssl_modes_verify_full))
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_allow`
-
-Required:
-
-- `mode` (String) must be one of ["allow"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_disable`
-
-Required:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_prefer`
-
-Required:
-
-- `mode` (String) must be one of ["prefer"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_require`
-
-Required:
-
-- `mode` (String) must be one of ["require"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_verify_ca`
-
-Required:
-
-- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-ca"]
-
-Optional:
-
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_ssl_modes_verify_full`
-
-Required:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `mode` (String) must be one of ["verify-full"]
-
-Optional:
-
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_allow`
-
-Required:
-
-- `mode` (String) must be one of ["allow"]
+- `allow` (Attributes) Allow SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--allow))
+- `disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--disable))
+- `prefer` (Attributes) Prefer SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--prefer))
+- `require` (Attributes) Require SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--require))
+- `verify_ca` (Attributes) Verify-ca SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--verify_ca))
+- `verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--verify_full))
+
+### Nested Schema for `configuration.ssl_mode.allow`
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_disable`
-Required:
+
+### Nested Schema for `configuration.ssl_mode.disable`
-- `mode` (String) must be one of ["disable"]
+
+### Nested Schema for `configuration.ssl_mode.prefer`
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_prefer`
-
-Required:
-- `mode` (String) must be one of ["prefer"]
+
+### Nested Schema for `configuration.ssl_mode.require`
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_require`
-
-Required:
-
-- `mode` (String) must be one of ["require"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_verify_ca`
+
+### Nested Schema for `configuration.ssl_mode.verify_ca`
Required:
- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-ca"]
Optional:
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
+- `client_key_password` (String, Sensitive) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-
-### Nested Schema for `configuration.ssl_mode.destination_postgres_update_ssl_modes_verify_full`
+
+### Nested Schema for `configuration.ssl_mode.verify_full`
Required:
- `ca_certificate` (String) CA certificate
- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `mode` (String) must be one of ["verify-full"]
+- `client_key` (String, Sensitive) Client key
Optional:
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
+- `client_key_password` (String, Sensitive) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
@@ -223,80 +140,41 @@ Optional:
Optional:
-- `destination_postgres_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_ssh_tunnel_method_no_tunnel))
-- `destination_postgres_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_ssh_tunnel_method_password_authentication))
-- `destination_postgres_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_ssh_tunnel_method_ssh_key_authentication))
-- `destination_postgres_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_update_ssh_tunnel_method_no_tunnel))
-- `destination_postgres_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_update_ssh_tunnel_method_password_authentication))
-- `destination_postgres_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_postgres_update_ssh_tunnel_method_ssh_key_authentication))
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_ssh_tunnel_method_no_tunnel`
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_update_ssh_tunnel_method_no_tunnel`
-
-Required:
+Optional:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_postgres_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/destination_pubsub.md b/docs/resources/destination_pubsub.md
index e4efa0825..fd9cd8338 100644
--- a/docs/resources/destination_pubsub.md
+++ b/docs/resources/destination_pubsub.md
@@ -15,18 +15,18 @@ DestinationPubsub Resource
```terraform
resource "airbyte_destination_pubsub" "my_destination_pubsub" {
configuration = {
- batching_delay_threshold = 7
+ batching_delay_threshold = 5
batching_element_count_threshold = 5
batching_enabled = true
batching_request_bytes_threshold = 3
credentials_json = "...my_credentials_json..."
- destination_type = "pubsub"
ordering_enabled = true
project_id = "...my_project_id..."
topic_id = "...my_topic_id..."
}
- name = "Phil Boyer"
- workspace_id = "f86bc173-d689-4eee-9526-f8d986e881ea"
+ definition_id = "b6294a31-a29a-4af3-8680-70eca1537042"
+ name = "Ada Harber"
+ workspace_id = "e54dc306-1658-46b7-b990-fea69beba7dc"
}
```
@@ -36,9 +36,13 @@ resource "airbyte_destination_pubsub" "my_destination_pubsub" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -49,17 +53,21 @@ resource "airbyte_destination_pubsub" "my_destination_pubsub" {
Required:
-- `batching_enabled` (Boolean) If TRUE messages will be buffered instead of sending them one by one
- `credentials_json` (String) The contents of the JSON service account key. Check out the docs if you need help generating this key.
-- `destination_type` (String) must be one of ["pubsub"]
-- `ordering_enabled` (Boolean) If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key of stream
- `project_id` (String) The GCP project ID for the project containing the target PubSub.
- `topic_id` (String) The PubSub topic ID in the given GCP project ID.
Optional:
-- `batching_delay_threshold` (Number) Number of ms before the buffer is flushed
-- `batching_element_count_threshold` (Number) Number of messages before the buffer is flushed
-- `batching_request_bytes_threshold` (Number) Number of bytes before the buffer is flushed
+- `batching_delay_threshold` (Number) Default: 1
+Number of ms before the buffer is flushed
+- `batching_element_count_threshold` (Number) Default: 1
+Number of messages before the buffer is flushed
+- `batching_enabled` (Boolean) Default: false
+If TRUE messages will be buffered instead of sending them one by one
+- `batching_request_bytes_threshold` (Number) Default: 1
+Number of bytes before the buffer is flushed
+- `ordering_enabled` (Boolean) Default: false
+If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key of stream
diff --git a/docs/resources/destination_qdrant.md b/docs/resources/destination_qdrant.md
new file mode 100644
index 000000000..460f59a7c
--- /dev/null
+++ b/docs/resources/destination_qdrant.md
@@ -0,0 +1,283 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_destination_qdrant Resource - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ DestinationQdrant Resource
+---
+
+# airbyte_destination_qdrant (Resource)
+
+DestinationQdrant Resource
+
+## Example Usage
+
+```terraform
+resource "airbyte_destination_qdrant" "my_destination_qdrant" {
+ configuration = {
+ embedding = {
+ destination_qdrant_azure_open_ai = {
+ api_base = "https://your-resource-name.openai.azure.com"
+ deployment = "your-resource-name"
+ openai_key = "...my_openai_key..."
+ }
+ }
+ indexing = {
+ auth_method = {
+ api_key_auth = {
+ api_key = "...my_api_key..."
+ }
+ }
+ collection = "...my_collection..."
+ distance_metric = {
+ cos = {}
+ }
+ prefer_grpc = true
+ text_field = "...my_text_field..."
+ url = "...my_url..."
+ }
+ processing = {
+ chunk_overlap = 8
+ chunk_size = 9
+ field_name_mappings = [
+ {
+ from_field = "...my_from_field..."
+ to_field = "...my_to_field..."
+ },
+ ]
+ metadata_fields = [
+ "...",
+ ]
+ text_fields = [
+ "...",
+ ]
+ text_splitter = {
+ destination_qdrant_by_markdown_header = {
+ split_level = 9
+ }
+ }
+ }
+ }
+ definition_id = "8f8d8392-aab1-45fb-858b-ad9ea7671d58"
+ name = "Kathryn O'Keefe"
+ workspace_id = "9de520ce-3420-4a29-9e5c-09962877b187"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
+- `workspace_id` (String)
+
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
+### Read-Only
+
+- `destination_id` (String)
+- `destination_type` (String)
+
+
+### Nested Schema for `configuration`
+
+Required:
+
+- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding))
+- `indexing` (Attributes) Indexing configuration (see [below for nested schema](#nestedatt--configuration--indexing))
+- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing))
+
+
+### Nested Schema for `configuration.embedding`
+
+Optional:
+
+- `azure_open_ai` (Attributes) Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--azure_open_ai))
+- `cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--cohere))
+- `fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--fake))
+- `from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--from_field))
+- `open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--open_ai))
+- `open_ai_compatible` (Attributes) Use a service that's compatible with the OpenAI API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--open_ai_compatible))
+
+
+### Nested Schema for `configuration.embedding.azure_open_ai`
+
+Required:
+
+- `api_base` (String) The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+- `deployment` (String) The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+- `openai_key` (String, Sensitive) The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+
+
+
+### Nested Schema for `configuration.embedding.cohere`
+
+Required:
+
+- `cohere_key` (String, Sensitive)
+
+
+
+### Nested Schema for `configuration.embedding.fake`
+
+
+
+### Nested Schema for `configuration.embedding.from_field`
+
+Required:
+
+- `dimensions` (Number) The number of dimensions the embedding model is generating
+- `field_name` (String) Name of the field in the record that contains the embedding
+
+
+
+### Nested Schema for `configuration.embedding.open_ai`
+
+Required:
+
+- `openai_key` (String, Sensitive)
+
+
+
+### Nested Schema for `configuration.embedding.open_ai_compatible`
+
+Required:
+
+- `base_url` (String) The base URL for your OpenAI-compatible service
+- `dimensions` (Number) The number of dimensions the embedding model is generating
+
+Optional:
+
+- `api_key` (String, Sensitive) Default: ""
+- `model_name` (String) Default: "text-embedding-ada-002"
+The name of the model to use for embedding
+
+
+
+
+### Nested Schema for `configuration.indexing`
+
+Required:
+
+- `collection` (String) The collection to load data into
+- `url` (String) Public Endpoint of the Qdrant cluser
+
+Optional:
+
+- `auth_method` (Attributes) Method to authenticate with the Qdrant Instance (see [below for nested schema](#nestedatt--configuration--indexing--auth_method))
+- `distance_metric` (Attributes) The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector. (see [below for nested schema](#nestedatt--configuration--indexing--distance_metric))
+- `prefer_grpc` (Boolean) Default: true
+Whether to prefer gRPC over HTTP. Set to true for Qdrant cloud clusters
+- `text_field` (String) Default: "text"
+The field in the payload that contains the embedded text
+
+
+### Nested Schema for `configuration.indexing.auth_method`
+
+Optional:
+
+- `api_key_auth` (Attributes) Method to authenticate with the Qdrant Instance (see [below for nested schema](#nestedatt--configuration--indexing--auth_method--api_key_auth))
+- `no_auth` (Attributes) Method to authenticate with the Qdrant Instance (see [below for nested schema](#nestedatt--configuration--indexing--auth_method--no_auth))
+
+
+### Nested Schema for `configuration.indexing.auth_method.no_auth`
+
+Required:
+
+- `api_key` (String, Sensitive) API Key for the Qdrant instance
+
+
+
+### Nested Schema for `configuration.indexing.auth_method.no_auth`
+
+
+
+
+### Nested Schema for `configuration.indexing.distance_metric`
+
+Optional:
+
+- `cos` (Attributes) The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector. (see [below for nested schema](#nestedatt--configuration--indexing--distance_metric--cos))
+- `dot` (Attributes) The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector. (see [below for nested schema](#nestedatt--configuration--indexing--distance_metric--dot))
+- `euc` (Attributes) The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector. (see [below for nested schema](#nestedatt--configuration--indexing--distance_metric--euc))
+
+
+### Nested Schema for `configuration.indexing.distance_metric.euc`
+
+
+
+### Nested Schema for `configuration.indexing.distance_metric.euc`
+
+
+
+### Nested Schema for `configuration.indexing.distance_metric.euc`
+
+
+
+
+
+### Nested Schema for `configuration.processing`
+
+Required:
+
+- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
+
+Optional:
+
+- `chunk_overlap` (Number) Default: 0
+Size of overlap between chunks in tokens to store in vector store to better capture relevant context
+- `field_name_mappings` (Attributes List) List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation. (see [below for nested schema](#nestedatt--configuration--processing--field_name_mappings))
+- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
+- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+- `text_splitter` (Attributes) Split text fields into chunks based on the specified method. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter))
+
+
+### Nested Schema for `configuration.processing.field_name_mappings`
+
+Required:
+
+- `from_field` (String) The field name in the source
+- `to_field` (String) The field name to use in the destination
+
+
+
+### Nested Schema for `configuration.processing.text_splitter`
+
+Optional:
+
+- `by_markdown_header` (Attributes) Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_markdown_header))
+- `by_programming_language` (Attributes) Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_programming_language))
+- `by_separator` (Attributes) Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_separator))
+
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
+
+Optional:
+
+- `split_level` (Number) Default: 1
+Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+
+
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
+
+Required:
+
+- `language` (String) must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
+Split code in suitable places based on the programming language
+
+
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
+
+Optional:
+
+- `keep_separator` (Boolean) Default: false
+Whether to keep the separator in the resulting chunks
+- `separators` (List of String) List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+
+
diff --git a/docs/resources/destination_redis.md b/docs/resources/destination_redis.md
index 3392155a9..aa0cb75d7 100644
--- a/docs/resources/destination_redis.md
+++ b/docs/resources/destination_redis.md
@@ -15,26 +15,22 @@ DestinationRedis Resource
```terraform
resource "airbyte_destination_redis" "my_destination_redis" {
configuration = {
- cache_type = "hash"
- destination_type = "redis"
- host = "localhost,127.0.0.1"
- password = "...my_password..."
- port = 9
- ssl = false
+ cache_type = "hash"
+ host = "localhost,127.0.0.1"
+ password = "...my_password..."
+ port = 7
+ ssl = false
ssl_mode = {
- destination_redis_ssl_modes_disable = {
- mode = "disable"
- }
+ destination_redis_disable = {}
}
tunnel_method = {
- destination_redis_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_redis_no_tunnel = {}
}
- username = "Vivianne.Baumbach3"
+ username = "Keyshawn.Ledner"
}
- name = "Bonnie Halvorson"
- workspace_id = "f94e29e9-73e9-422a-97a1-5be3e060807e"
+ definition_id = "34412bc3-217a-4cbe-aad9-f3186486fc7b"
+ name = "Shannon Stroman"
+ workspace_id = "848f4034-6c04-4b19-bfb2-8918e382726e"
}
```
@@ -44,9 +40,13 @@ resource "airbyte_destination_redis" "my_destination_redis" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -57,17 +57,18 @@ resource "airbyte_destination_redis" "my_destination_redis" {
Required:
-- `cache_type` (String) must be one of ["hash"]
-Redis cache type to store data in.
-- `destination_type` (String) must be one of ["redis"]
- `host` (String) Redis host to connect to.
-- `port` (Number) Port of Redis.
- `username` (String) Username associated with Redis.
Optional:
-- `password` (String) Password associated with Redis.
-- `ssl` (Boolean) Indicates whether SSL encryption protocol will be used to connect to Redis. It is recommended to use SSL connection if possible.
+- `cache_type` (String) must be one of ["hash"]; Default: "hash"
+Redis cache type to store data in.
+- `password` (String, Sensitive) Password associated with Redis.
+- `port` (Number) Default: 6379
+Port of Redis.
+- `ssl` (Boolean) Default: false
+Indicates whether SSL encryption protocol will be used to connect to Redis. It is recommended to use SSL connection if possible.
- `ssl_mode` (Attributes) SSL connection modes.
verify-full - This is the most secure mode. Always require encryption and verifies the identity of the source database server (see [below for nested schema](#nestedatt--configuration--ssl_mode))
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
@@ -77,55 +78,25 @@ Optional:
Optional:
-- `destination_redis_ssl_modes_disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_redis_ssl_modes_disable))
-- `destination_redis_ssl_modes_verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_redis_ssl_modes_verify_full))
-- `destination_redis_update_ssl_modes_disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_redis_update_ssl_modes_disable))
-- `destination_redis_update_ssl_modes_verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--destination_redis_update_ssl_modes_verify_full))
-
-
-### Nested Schema for `configuration.ssl_mode.destination_redis_ssl_modes_disable`
-
-Required:
-
-- `mode` (String) must be one of ["disable"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_redis_ssl_modes_verify_full`
-
-Required:
-
-- `ca_certificate` (String) CA certificate
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `mode` (String) must be one of ["verify-full"]
-
-Optional:
-
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-
-
-
-### Nested Schema for `configuration.ssl_mode.destination_redis_update_ssl_modes_disable`
-
-Required:
+- `disable` (Attributes) Disable SSL. (see [below for nested schema](#nestedatt--configuration--ssl_mode--disable))
+- `verify_full` (Attributes) Verify-full SSL mode. (see [below for nested schema](#nestedatt--configuration--ssl_mode--verify_full))
-- `mode` (String) must be one of ["disable"]
+
+### Nested Schema for `configuration.ssl_mode.disable`
-
-### Nested Schema for `configuration.ssl_mode.destination_redis_update_ssl_modes_verify_full`
+
+### Nested Schema for `configuration.ssl_mode.verify_full`
Required:
- `ca_certificate` (String) CA certificate
- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `mode` (String) must be one of ["verify-full"]
+- `client_key` (String, Sensitive) Client key
Optional:
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
+- `client_key_password` (String, Sensitive) Password for keystorage. If you do not add it - the password will be generated automatically.
@@ -134,80 +105,41 @@ Optional:
Optional:
-- `destination_redis_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_ssh_tunnel_method_no_tunnel))
-- `destination_redis_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_ssh_tunnel_method_password_authentication))
-- `destination_redis_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_ssh_tunnel_method_ssh_key_authentication))
-- `destination_redis_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_update_ssh_tunnel_method_no_tunnel))
-- `destination_redis_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_update_ssh_tunnel_method_password_authentication))
-- `destination_redis_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redis_update_ssh_tunnel_method_ssh_key_authentication))
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_ssh_tunnel_method_no_tunnel`
-
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_update_ssh_tunnel_method_no_tunnel`
-
-Required:
+Optional:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redis_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/destination_redshift.md b/docs/resources/destination_redshift.md
index cf6d52877..e508a2712 100644
--- a/docs/resources/destination_redshift.md
+++ b/docs/resources/destination_redshift.md
@@ -15,41 +15,37 @@ DestinationRedshift Resource
```terraform
resource "airbyte_destination_redshift" "my_destination_redshift" {
configuration = {
- database = "...my_database..."
- destination_type = "redshift"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 5439
- schema = "public"
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 5439
+ schema = "public"
tunnel_method = {
- destination_redshift_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_redshift_no_tunnel = {}
}
uploading_method = {
- destination_redshift_uploading_method_s3_staging = {
+ s3_staging = {
access_key_id = "...my_access_key_id..."
encryption = {
- destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption = {
- encryption_type = "aes_cbc_envelope"
+ aes_cbc_envelope_encryption = {
key_encrypting_key = "...my_key_encrypting_key..."
}
}
file_buffer_count = 10
- file_name_pattern = "{timestamp}"
- method = "S3 Staging"
- purge_staging_data = false
+ file_name_pattern = "{date:yyyy_MM}"
+ purge_staging_data = true
s3_bucket_name = "airbyte.staging"
s3_bucket_path = "data_sync/test"
- s3_bucket_region = "us-west-2"
+ s3_bucket_region = "eu-west-1"
secret_access_key = "...my_secret_access_key..."
}
}
- username = "Margarette_Rau"
+ username = "Rollin_Ernser87"
}
- name = "Mrs. Geraldine Zulauf"
- workspace_id = "7a60ff2a-54a3-41e9-8764-a3e865e7956f"
+ definition_id = "1f9eaf9a-8e21-457a-8560-c89e77fd0c20"
+ name = "Linda Langworth"
+ workspace_id = "396de60f-942f-4937-a3c5-9508dd11c7ed"
}
```
@@ -59,9 +55,13 @@ resource "airbyte_destination_redshift" "my_destination_redshift" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -73,225 +73,116 @@ resource "airbyte_destination_redshift" "my_destination_redshift" {
Required:
- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["redshift"]
- `host` (String) Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `schema` (String) The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public".
+- `password` (String, Sensitive) Password associated with the username.
- `username` (String) Username to use to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
+- `port` (Number) Default: 5439
+Port of the database.
+- `schema` (String) Default: "public"
+The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public".
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
-- `uploading_method` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method))
+- `uploading_method` (Attributes) The way data will be uploaded to Redshift. (see [below for nested schema](#nestedatt--configuration--uploading_method))
### Nested Schema for `configuration.tunnel_method`
Optional:
-- `destination_redshift_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_ssh_tunnel_method_no_tunnel))
-- `destination_redshift_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_ssh_tunnel_method_password_authentication))
-- `destination_redshift_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_ssh_tunnel_method_ssh_key_authentication))
-- `destination_redshift_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_update_ssh_tunnel_method_no_tunnel))
-- `destination_redshift_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_update_ssh_tunnel_method_password_authentication))
-- `destination_redshift_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_redshift_update_ssh_tunnel_method_ssh_key_authentication))
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_ssh_tunnel_method_no_tunnel`
-
-Required:
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_update_ssh_tunnel_method_no_tunnel`
-
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
+Optional:
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_update_ssh_tunnel_method_password_authentication`
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-Required:
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_redshift_update_ssh_tunnel_method_ssh_key_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.uploading_method`
-
Optional:
-- `destination_redshift_update_uploading_method_s3_staging` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_s3_staging))
-- `destination_redshift_update_uploading_method_standard` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_standard))
-- `destination_redshift_uploading_method_s3_staging` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_s3_staging))
-- `destination_redshift_uploading_method_standard` (Attributes) The method how the data will be uploaded to the database. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_standard))
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_s3_staging`
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-Required:
-
-- `access_key_id` (String) This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.
-- `method` (String) must be one of ["S3 Staging"]
-- `s3_bucket_name` (String) The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]
-The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.
-- `secret_access_key` (String) The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.
-Optional:
-- `encryption` (Attributes) How to encrypt the staging data (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_s3_staging--encryption))
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
-- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `purge_staging_data` (Boolean) Whether to delete the staging files from S3 after completing the sync. See docs for details.
-- `s3_bucket_path` (String) The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_s3_staging.s3_bucket_path`
+
+### Nested Schema for `configuration.uploading_method`
Optional:
-- `destination_redshift_update_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption` (Attributes) Staging data will be encrypted using AES-CBC envelope encryption. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_s3_staging--s3_bucket_path--destination_redshift_update_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption))
-- `destination_redshift_update_uploading_method_s3_staging_encryption_no_encryption` (Attributes) Staging data will be stored in plaintext. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_update_uploading_method_s3_staging--s3_bucket_path--destination_redshift_update_uploading_method_s3_staging_encryption_no_encryption))
+- `s3_staging` (Attributes) (recommended) Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details. (see [below for nested schema](#nestedatt--configuration--uploading_method--s3_staging))
+- `standard` (Attributes) (not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use S3 uploading. (see [below for nested schema](#nestedatt--configuration--uploading_method--standard))
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_s3_staging.s3_bucket_path.destination_redshift_update_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption`
+
+### Nested Schema for `configuration.uploading_method.s3_staging`
Required:
-- `encryption_type` (String) must be one of ["aes_cbc_envelope"]
+- `access_key_id` (String, Sensitive) This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.
+- `s3_bucket_name` (String) The name of the staging S3 bucket.
+- `secret_access_key` (String, Sensitive) The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.
Optional:
-- `key_encrypting_key` (String) The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.
-
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_s3_staging.s3_bucket_path.destination_redshift_update_uploading_method_s3_staging_encryption_no_encryption`
-
-Required:
-
-- `encryption_type` (String) must be one of ["none"]
-
-
-
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_update_uploading_method_standard`
-
-Required:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_s3_staging`
-
-Required:
-
-- `access_key_id` (String) This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.
-- `method` (String) must be one of ["S3 Staging"]
-- `s3_bucket_name` (String) The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]
-The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.
-- `secret_access_key` (String) The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.
-
-Optional:
-
-- `encryption` (Attributes) How to encrypt the staging data (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_s3_staging--encryption))
-- `file_buffer_count` (Number) Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
+- `encryption` (Attributes) How to encrypt the staging data (see [below for nested schema](#nestedatt--configuration--uploading_method--s3_staging--encryption))
+- `file_buffer_count` (Number) Default: 10
+Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `purge_staging_data` (Boolean) Whether to delete the staging files from S3 after completing the sync. See docs for details.
+- `purge_staging_data` (Boolean) Default: true
+Whether to delete the staging files from S3 after completing the sync. See docs for details.
- `s3_bucket_path` (String) The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.
+- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]; Default: ""
+The region of the S3 staging bucket.
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_s3_staging.s3_bucket_path`
+
+### Nested Schema for `configuration.uploading_method.s3_staging.s3_bucket_region`
Optional:
-- `destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption` (Attributes) Staging data will be encrypted using AES-CBC envelope encryption. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_s3_staging--s3_bucket_path--destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption))
-- `destination_redshift_uploading_method_s3_staging_encryption_no_encryption` (Attributes) Staging data will be stored in plaintext. (see [below for nested schema](#nestedatt--configuration--uploading_method--destination_redshift_uploading_method_s3_staging--s3_bucket_path--destination_redshift_uploading_method_s3_staging_encryption_no_encryption))
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_s3_staging.s3_bucket_path.destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption`
+- `aescbc_envelope_encryption` (Attributes) Staging data will be encrypted using AES-CBC envelope encryption. (see [below for nested schema](#nestedatt--configuration--uploading_method--s3_staging--s3_bucket_region--aescbc_envelope_encryption))
+- `no_encryption` (Attributes) Staging data will be stored in plaintext. (see [below for nested schema](#nestedatt--configuration--uploading_method--s3_staging--s3_bucket_region--no_encryption))
-Required:
-
-- `encryption_type` (String) must be one of ["aes_cbc_envelope"]
+
+### Nested Schema for `configuration.uploading_method.s3_staging.s3_bucket_region.aescbc_envelope_encryption`
Optional:
-- `key_encrypting_key` (String) The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.
-
-
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_s3_staging.s3_bucket_path.destination_redshift_uploading_method_s3_staging_encryption_no_encryption`
+- `key_encrypting_key` (String, Sensitive) The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.
-Required:
-
-- `encryption_type` (String) must be one of ["none"]
+
+### Nested Schema for `configuration.uploading_method.s3_staging.s3_bucket_region.no_encryption`
-
-### Nested Schema for `configuration.uploading_method.destination_redshift_uploading_method_standard`
-
-Required:
-- `method` (String) must be one of ["Standard"]
+
+### Nested Schema for `configuration.uploading_method.standard`
diff --git a/docs/resources/destination_s3.md b/docs/resources/destination_s3.md
index 38141cdd1..c513bb9e5 100644
--- a/docs/resources/destination_s3.md
+++ b/docs/resources/destination_s3.md
@@ -16,12 +16,11 @@ DestinationS3 Resource
resource "airbyte_destination_s3" "my_destination_s3" {
configuration = {
access_key_id = "A012345678910EXAMPLE"
- destination_type = "s3"
- file_name_pattern = "{timestamp}"
+ file_name_pattern = "{date}"
format = {
- destination_s3_output_format_avro_apache_avro = {
+ destination_s3_avro_apache_avro = {
compression_codec = {
- destination_s3_output_format_avro_apache_avro_compression_codec_bzip2 = {
+ destination_s3_bzip2 = {
codec = "bzip2"
}
}
@@ -30,13 +29,14 @@ resource "airbyte_destination_s3" "my_destination_s3" {
}
s3_bucket_name = "airbyte_sync"
s3_bucket_path = "data_sync/test"
- s3_bucket_region = "us-west-1"
+ s3_bucket_region = "ap-southeast-1"
s3_endpoint = "http://localhost:9000"
s3_path_format = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_"
secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
}
- name = "Joyce O'Kon"
- workspace_id = "9da660ff-57bf-4aad-8f9e-fc1b4512c103"
+ definition_id = "b1d5b002-89a0-4dc0-a329-a5cae9f38884"
+ name = "Lloyd Watsica"
+ workspace_id = "20ebb305-f362-44c4-b900-725fa3e33722"
}
```
@@ -46,9 +46,13 @@ resource "airbyte_destination_s3" "my_destination_s3" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -59,378 +63,201 @@ resource "airbyte_destination_s3" "my_destination_s3" {
Required:
-- `destination_type` (String) must be one of ["s3"]
- `format` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format))
- `s3_bucket_name` (String) The name of the S3 bucket. Read more here.
- `s3_bucket_path` (String) Directory under the S3 bucket where data will be written. Read more here
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the S3 bucket. See here for all region codes.
Optional:
-- `access_key_id` (String) The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.
+- `access_key_id` (String, Sensitive) The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.
- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `s3_endpoint` (String) Your S3 endpoint url. Read more here
+- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""
+The region of the S3 bucket. See here for all region codes.
+- `s3_endpoint` (String) Default: ""
+Your S3 endpoint url. Read more here
- `s3_path_format` (String) Format string on how data will be organized inside the S3 bucket directory. Read more here
-- `secret_access_key` (String) The corresponding secret to the access key ID. Read more here
+- `secret_access_key` (String, Sensitive) The corresponding secret to the access key ID. Read more here
### Nested Schema for `configuration.format`
Optional:
-- `destination_s3_output_format_avro_apache_avro` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro))
-- `destination_s3_output_format_csv_comma_separated_values` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_csv_comma_separated_values))
-- `destination_s3_output_format_json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_json_lines_newline_delimited_json))
-- `destination_s3_output_format_parquet_columnar_storage` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_parquet_columnar_storage))
-- `destination_s3_update_output_format_avro_apache_avro` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro))
-- `destination_s3_update_output_format_csv_comma_separated_values` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_csv_comma_separated_values))
-- `destination_s3_update_output_format_json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_json_lines_newline_delimited_json))
-- `destination_s3_update_output_format_parquet_columnar_storage` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_parquet_columnar_storage))
+- `avro_apache_avro` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro))
+- `csv_comma_separated_values` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--csv_comma_separated_values))
+- `json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json))
+- `parquet_columnar_storage` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--parquet_columnar_storage))
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro`
+
+### Nested Schema for `configuration.format.avro_apache_avro`
Required:
-- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--compression_codec))
-- `format_type` (String) must be one of ["Avro"]
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type`
+- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--compression_codec))
Optional:
-- `destination_s3_output_format_avro_apache_avro_compression_codec_bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_bzip2))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_deflate))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_no_compression))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_snappy))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_xz))
-- `destination_s3_output_format_avro_apache_avro_compression_codec_zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_avro_apache_avro--format_type--destination_s3_output_format_avro_apache_avro_compression_codec_zstandard))
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_bzip2`
-
-Required:
-
-- `codec` (String) must be one of ["bzip2"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_deflate`
-
-Required:
-
-- `codec` (String) must be one of ["Deflate"]
-- `compression_level` (Number) 0: no compression & fastest, 9: best compression & slowest.
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_no_compression`
-
-Required:
-
-- `codec` (String) must be one of ["no compression"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_snappy`
-
-Required:
-
-- `codec` (String) must be one of ["snappy"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_xz`
-
-Required:
-
-- `codec` (String) must be one of ["xz"]
-- `compression_level` (Number) See here for details.
+- `format_type` (String) must be one of ["Avro"]; Default: "Avro"
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_avro_apache_avro.format_type.destination_s3_output_format_avro_apache_avro_compression_codec_zstandard`
-
-Required:
-
-- `codec` (String) must be one of ["zstandard"]
-- `compression_level` (Number) Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type`
Optional:
-- `include_checksum` (Boolean) If true, include a checksum with each data block.
-
-
-
+- `bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--bzip2))
+- `deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--deflate))
+- `no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--no_compression))
+- `snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--snappy))
+- `xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--xz))
+- `zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--avro_apache_avro--format_type--zstandard))
-
-### Nested Schema for `configuration.format.destination_s3_output_format_csv_comma_separated_values`
-
-Required:
-
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.bzip2`
Optional:
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_csv_comma_separated_values--compression))
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_csv_comma_separated_values.compression`
-
-Optional:
+- `codec` (String) must be one of ["bzip2"]; Default: "bzip2"
-- `destination_s3_output_format_csv_comma_separated_values_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_csv_comma_separated_values--compression--destination_s3_output_format_csv_comma_separated_values_compression_gzip))
-- `destination_s3_output_format_csv_comma_separated_values_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_csv_comma_separated_values--compression--destination_s3_output_format_csv_comma_separated_values_compression_no_compression))
-
-### Nested Schema for `configuration.format.destination_s3_output_format_csv_comma_separated_values.compression.destination_s3_output_format_csv_comma_separated_values_compression_gzip`
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.deflate`
Optional:
-- `compression_type` (String) must be one of ["GZIP"]
+- `codec` (String) must be one of ["Deflate"]; Default: "Deflate"
+- `compression_level` (Number) Default: 0
+0: no compression & fastest, 9: best compression & slowest.
-
-### Nested Schema for `configuration.format.destination_s3_output_format_csv_comma_separated_values.compression.destination_s3_output_format_csv_comma_separated_values_compression_no_compression`
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.no_compression`
Optional:
-- `compression_type` (String) must be one of ["No Compression"]
+- `codec` (String) must be one of ["no compression"]; Default: "no compression"
-
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_json_lines_newline_delimited_json`
-
-Required:
-
-- `format_type` (String) must be one of ["JSONL"]
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.snappy`
Optional:
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_json_lines_newline_delimited_json--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-
-
-### Nested Schema for `configuration.format.destination_s3_output_format_json_lines_newline_delimited_json.flattening`
+- `codec` (String) must be one of ["snappy"]; Default: "snappy"
-Optional:
-
-- `destination_s3_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_json_lines_newline_delimited_json--flattening--destination_s3_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_s3_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_output_format_json_lines_newline_delimited_json--flattening--destination_s3_output_format_json_lines_newline_delimited_json_compression_no_compression))
-
-### Nested Schema for `configuration.format.destination_s3_output_format_json_lines_newline_delimited_json.flattening.destination_s3_output_format_json_lines_newline_delimited_json_compression_gzip`
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.xz`
Optional:
-- `compression_type` (String) must be one of ["GZIP"]
+- `codec` (String) must be one of ["xz"]; Default: "xz"
+- `compression_level` (Number) Default: 6
+See here for details.
-
-### Nested Schema for `configuration.format.destination_s3_output_format_json_lines_newline_delimited_json.flattening.destination_s3_output_format_json_lines_newline_delimited_json_compression_no_compression`
+
+### Nested Schema for `configuration.format.avro_apache_avro.format_type.zstandard`
Optional:
-- `compression_type` (String) must be one of ["No Compression"]
+- `codec` (String) must be one of ["zstandard"]; Default: "zstandard"
+- `compression_level` (Number) Default: 3
+Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
+- `include_checksum` (Boolean) Default: false
+If true, include a checksum with each data block.
-
-### Nested Schema for `configuration.format.destination_s3_output_format_parquet_columnar_storage`
-
-Required:
-
-- `format_type` (String) must be one of ["Parquet"]
+
+### Nested Schema for `configuration.format.csv_comma_separated_values`
Optional:
-- `block_size_mb` (Number) This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]
-The compression algorithm used to compress data pages.
-- `dictionary_encoding` (Boolean) Default: true.
-- `dictionary_page_size_kb` (Number) There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
-- `max_padding_size_mb` (Number) Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
-- `page_size_kb` (Number) The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro`
-
-Required:
-
-- `compression_codec` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--compression_codec))
-- `format_type` (String) must be one of ["Avro"]
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type`
-
-Optional:
-
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_bzip2` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_bzip2))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_deflate` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_deflate))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_no_compression` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_no_compression))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_snappy` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_snappy))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_xz` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_xz))
-- `destination_s3_update_output_format_avro_apache_avro_compression_codec_zstandard` (Attributes) The compression algorithm used to compress data. Default to no compression. (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_avro_apache_avro--format_type--destination_s3_update_output_format_avro_apache_avro_compression_codec_zstandard))
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_bzip2`
-
-Required:
-
-- `codec` (String) must be one of ["bzip2"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_deflate`
-
-Required:
-
-- `codec` (String) must be one of ["Deflate"]
-- `compression_level` (Number) 0: no compression & fastest, 9: best compression & slowest.
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_no_compression`
-
-Required:
-
-- `codec` (String) must be one of ["no compression"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_snappy`
-
-Required:
-
-- `codec` (String) must be one of ["snappy"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_xz`
-
-Required:
-
-- `codec` (String) must be one of ["xz"]
-- `compression_level` (Number) See here for details.
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_avro_apache_avro.format_type.destination_s3_update_output_format_avro_apache_avro_compression_codec_zstandard`
-
-Required:
-
-- `codec` (String) must be one of ["zstandard"]
-- `compression_level` (Number) Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
-
-Optional:
-
-- `include_checksum` (Boolean) If true, include a checksum with each data block.
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_csv_comma_separated_values`
-
-Required:
-
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
+- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--csv_comma_separated_values--compression))
+- `flattening` (String) must be one of ["No flattening", "Root level flattening"]; Default: "No flattening"
Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-- `format_type` (String) must be one of ["CSV"]
-
-Optional:
+- `format_type` (String) must be one of ["CSV"]; Default: "CSV"
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_csv_comma_separated_values--compression))
-
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_csv_comma_separated_values.compression`
+
+### Nested Schema for `configuration.format.csv_comma_separated_values.format_type`
Optional:
-- `destination_s3_update_output_format_csv_comma_separated_values_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_csv_comma_separated_values--compression--destination_s3_update_output_format_csv_comma_separated_values_compression_gzip))
-- `destination_s3_update_output_format_csv_comma_separated_values_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_csv_comma_separated_values--compression--destination_s3_update_output_format_csv_comma_separated_values_compression_no_compression))
+- `gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--csv_comma_separated_values--format_type--gzip))
+- `no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz"). (see [below for nested schema](#nestedatt--configuration--format--csv_comma_separated_values--format_type--no_compression))
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_csv_comma_separated_values.compression.destination_s3_update_output_format_csv_comma_separated_values_compression_gzip`
+
+### Nested Schema for `configuration.format.csv_comma_separated_values.format_type.gzip`
Optional:
-- `compression_type` (String) must be one of ["GZIP"]
+- `compression_type` (String) must be one of ["GZIP"]; Default: "GZIP"
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_csv_comma_separated_values.compression.destination_s3_update_output_format_csv_comma_separated_values_compression_no_compression`
+
+### Nested Schema for `configuration.format.csv_comma_separated_values.format_type.no_compression`
Optional:
-- `compression_type` (String) must be one of ["No Compression"]
-
+- `compression_type` (String) must be one of ["No Compression"]; Default: "No Compression"
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_json_lines_newline_delimited_json`
-
-Required:
-- `format_type` (String) must be one of ["JSONL"]
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json`
Optional:
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_json_lines_newline_delimited_json--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
+- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json--compression))
+- `flattening` (String) must be one of ["No flattening", "Root level flattening"]; Default: "No flattening"
Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
+- `format_type` (String) must be one of ["JSONL"]; Default: "JSONL"
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_json_lines_newline_delimited_json.flattening`
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json.format_type`
Optional:
-- `destination_s3_update_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_json_lines_newline_delimited_json--flattening--destination_s3_update_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_s3_update_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_update_output_format_json_lines_newline_delimited_json--flattening--destination_s3_update_output_format_json_lines_newline_delimited_json_compression_no_compression))
+- `gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json--format_type--gzip))
+- `no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json--format_type--no_compression))
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_json_lines_newline_delimited_json.flattening.destination_s3_update_output_format_json_lines_newline_delimited_json_compression_gzip`
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json.format_type.gzip`
Optional:
-- `compression_type` (String) must be one of ["GZIP"]
+- `compression_type` (String) must be one of ["GZIP"]; Default: "GZIP"
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_json_lines_newline_delimited_json.flattening.destination_s3_update_output_format_json_lines_newline_delimited_json_compression_no_compression`
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json.format_type.no_compression`
Optional:
-- `compression_type` (String) must be one of ["No Compression"]
+- `compression_type` (String) must be one of ["No Compression"]; Default: "No Compression"
-
-### Nested Schema for `configuration.format.destination_s3_update_output_format_parquet_columnar_storage`
-
-Required:
-
-- `format_type` (String) must be one of ["Parquet"]
+
+### Nested Schema for `configuration.format.parquet_columnar_storage`
Optional:
-- `block_size_mb` (Number) This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
-- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]
+- `block_size_mb` (Number) Default: 128
+This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
+- `compression_codec` (String) must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]; Default: "UNCOMPRESSED"
The compression algorithm used to compress data pages.
-- `dictionary_encoding` (Boolean) Default: true.
-- `dictionary_page_size_kb` (Number) There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
-- `max_padding_size_mb` (Number) Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
-- `page_size_kb` (Number) The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
+- `dictionary_encoding` (Boolean) Default: true
+Default: true.
+- `dictionary_page_size_kb` (Number) Default: 1024
+There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
+- `format_type` (String) must be one of ["Parquet"]; Default: "Parquet"
+- `max_padding_size_mb` (Number) Default: 8
+Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
+- `page_size_kb` (Number) Default: 1024
+The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
diff --git a/docs/resources/destination_s3_glue.md b/docs/resources/destination_s3_glue.md
index 02f7619d4..c9f9b8866 100644
--- a/docs/resources/destination_s3_glue.md
+++ b/docs/resources/destination_s3_glue.md
@@ -16,30 +16,30 @@ DestinationS3Glue Resource
resource "airbyte_destination_s3_glue" "my_destination_s3glue" {
configuration = {
access_key_id = "A012345678910EXAMPLE"
- destination_type = "s3-glue"
- file_name_pattern = "{date}"
+ file_name_pattern = "{sync_id}"
format = {
- destination_s3_glue_output_format_json_lines_newline_delimited_json = {
+ destination_s3_glue_json_lines_newline_delimited_json = {
compression = {
- destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip = {
+ destination_s3_glue_gzip = {
compression_type = "GZIP"
}
}
- flattening = "No flattening"
+ flattening = "Root level flattening"
format_type = "JSONL"
}
}
glue_database = "airbyte_database"
- glue_serialization_library = "org.openx.data.jsonserde.JsonSerDe"
+ glue_serialization_library = "org.apache.hive.hcatalog.data.JsonSerDe"
s3_bucket_name = "airbyte_sync"
s3_bucket_path = "data_sync/test"
- s3_bucket_region = "ca-central-1"
+ s3_bucket_region = "eu-central-1"
s3_endpoint = "http://localhost:9000"
s3_path_format = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_"
secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
}
- name = "Edmund Daugherty"
- workspace_id = "15199ebf-d0e9-4fe6-8632-ca3aed011799"
+ definition_id = "2f8e06ef-6fed-4365-9e7d-5496735da213"
+ name = "Jordan Johnston"
+ workspace_id = "b9fef8f5-3876-4e3d-a30a-86e4df19faac"
}
```
@@ -49,9 +49,13 @@ resource "airbyte_destination_s3_glue" "my_destination_s3glue" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -62,105 +66,62 @@ resource "airbyte_destination_s3_glue" "my_destination_s3glue" {
Required:
-- `destination_type` (String) must be one of ["s3-glue"]
- `format` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format))
- `glue_database` (String) Name of the glue database for creating the tables, leave blank if no integration
-- `glue_serialization_library` (String) must be one of ["org.openx.data.jsonserde.JsonSerDe", "org.apache.hive.hcatalog.data.JsonSerDe"]
-The library that your query engine will use for reading and writing data in your lake.
- `s3_bucket_name` (String) The name of the S3 bucket. Read more here.
- `s3_bucket_path` (String) Directory under the S3 bucket where data will be written. Read more here
-- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
-The region of the S3 bucket. See here for all region codes.
Optional:
-- `access_key_id` (String) The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.
+- `access_key_id` (String, Sensitive) The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.
- `file_name_pattern` (String) The pattern allows you to set the file-name format for the S3 staging file(s)
-- `s3_endpoint` (String) Your S3 endpoint url. Read more here
+- `glue_serialization_library` (String) must be one of ["org.openx.data.jsonserde.JsonSerDe", "org.apache.hive.hcatalog.data.JsonSerDe"]; Default: "org.openx.data.jsonserde.JsonSerDe"
+The library that your query engine will use for reading and writing data in your lake.
+- `s3_bucket_region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""
+The region of the S3 bucket. See here for all region codes.
+- `s3_endpoint` (String) Default: ""
+Your S3 endpoint url. Read more here
- `s3_path_format` (String) Format string on how data will be organized inside the S3 bucket directory. Read more here
-- `secret_access_key` (String) The corresponding secret to the access key ID. Read more here
+- `secret_access_key` (String, Sensitive) The corresponding secret to the access key ID. Read more here
### Nested Schema for `configuration.format`
Optional:
-- `destination_s3_glue_output_format_json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_output_format_json_lines_newline_delimited_json))
-- `destination_s3_glue_update_output_format_json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_update_output_format_json_lines_newline_delimited_json))
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_output_format_json_lines_newline_delimited_json`
-
-Required:
-
-- `format_type` (String) must be one of ["JSONL"]
-
-Optional:
-
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_output_format_json_lines_newline_delimited_json--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
-Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_output_format_json_lines_newline_delimited_json.flattening`
-
-Optional:
-
-- `destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_output_format_json_lines_newline_delimited_json--flattening--destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_output_format_json_lines_newline_delimited_json--flattening--destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_no_compression))
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_output_format_json_lines_newline_delimited_json.flattening.destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip`
-
-Optional:
-
-- `compression_type` (String) must be one of ["GZIP"]
-
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_output_format_json_lines_newline_delimited_json.flattening.destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_no_compression`
-
-Optional:
-
-- `compression_type` (String) must be one of ["No Compression"]
-
-
-
-
-
-### Nested Schema for `configuration.format.destination_s3_glue_update_output_format_json_lines_newline_delimited_json`
-
-Required:
+- `json_lines_newline_delimited_json` (Attributes) Format of the data output. See here for more details (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json))
-- `format_type` (String) must be one of ["JSONL"]
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json`
Optional:
-- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_update_output_format_json_lines_newline_delimited_json--compression))
-- `flattening` (String) must be one of ["No flattening", "Root level flattening"]
+- `compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json--compression))
+- `flattening` (String) must be one of ["No flattening", "Root level flattening"]; Default: "Root level flattening"
Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
+- `format_type` (String) must be one of ["JSONL"]; Default: "JSONL"
-
-### Nested Schema for `configuration.format.destination_s3_glue_update_output_format_json_lines_newline_delimited_json.flattening`
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json.format_type`
Optional:
-- `destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_update_output_format_json_lines_newline_delimited_json--flattening--destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_gzip))
-- `destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--destination_s3_glue_update_output_format_json_lines_newline_delimited_json--flattening--destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_no_compression))
+- `gzip` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json--format_type--gzip))
+- `no_compression` (Attributes) Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz"). (see [below for nested schema](#nestedatt--configuration--format--json_lines_newline_delimited_json--format_type--no_compression))
-
-### Nested Schema for `configuration.format.destination_s3_glue_update_output_format_json_lines_newline_delimited_json.flattening.destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_gzip`
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json.format_type.gzip`
Optional:
-- `compression_type` (String) must be one of ["GZIP"]
+- `compression_type` (String) must be one of ["GZIP"]; Default: "GZIP"
-
-### Nested Schema for `configuration.format.destination_s3_glue_update_output_format_json_lines_newline_delimited_json.flattening.destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_no_compression`
+
+### Nested Schema for `configuration.format.json_lines_newline_delimited_json.format_type.no_compression`
Optional:
-- `compression_type` (String) must be one of ["No Compression"]
+- `compression_type` (String) must be one of ["No Compression"]; Default: "No Compression"
diff --git a/docs/resources/destination_sftp_json.md b/docs/resources/destination_sftp_json.md
index a7e3c7f07..94c01a37a 100644
--- a/docs/resources/destination_sftp_json.md
+++ b/docs/resources/destination_sftp_json.md
@@ -16,14 +16,14 @@ DestinationSftpJSON Resource
resource "airbyte_destination_sftp_json" "my_destination_sftpjson" {
configuration = {
destination_path = "/json_data"
- destination_type = "sftp-json"
host = "...my_host..."
password = "...my_password..."
port = 22
- username = "Dayton98"
+ username = "Deshawn10"
}
- name = "Terence Beer"
- workspace_id = "71778ff6-1d01-4747-a360-a15db6a66065"
+ definition_id = "846ef364-4196-4a04-bb96-66e7d15e7eed"
+ name = "Frederick Howell"
+ workspace_id = "586b689f-dc13-4c29-afcf-ab73b9ba5d30"
}
```
@@ -33,9 +33,13 @@ resource "airbyte_destination_sftp_json" "my_destination_sftpjson" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -47,13 +51,13 @@ resource "airbyte_destination_sftp_json" "my_destination_sftpjson" {
Required:
- `destination_path` (String) Path to the directory where json files will be written.
-- `destination_type` (String) must be one of ["sftp-json"]
- `host` (String) Hostname of the SFTP server.
-- `password` (String) Password associated with the username.
+- `password` (String, Sensitive) Password associated with the username.
- `username` (String) Username to use to access the SFTP server.
Optional:
-- `port` (Number) Port of the SFTP server.
+- `port` (Number) Default: 22
+Port of the SFTP server.
diff --git a/docs/resources/destination_snowflake.md b/docs/resources/destination_snowflake.md
index 53a90329c..a2d1478de 100644
--- a/docs/resources/destination_snowflake.md
+++ b/docs/resources/destination_snowflake.md
@@ -16,24 +16,24 @@ DestinationSnowflake Resource
resource "airbyte_destination_snowflake" "my_destination_snowflake" {
configuration = {
credentials = {
- destination_snowflake_authorization_method_key_pair_authentication = {
- auth_type = "Key Pair Authentication"
+ key_pair_authentication = {
private_key = "...my_private_key..."
private_key_password = "...my_private_key_password..."
}
}
- database = "AIRBYTE_DATABASE"
- destination_type = "snowflake"
- host = "accountname.snowflakecomputing.com"
- jdbc_url_params = "...my_jdbc_url_params..."
- raw_data_schema = "...my_raw_data_schema..."
- role = "AIRBYTE_ROLE"
- schema = "AIRBYTE_SCHEMA"
- username = "AIRBYTE_USER"
- warehouse = "AIRBYTE_WAREHOUSE"
+ database = "AIRBYTE_DATABASE"
+ disable_type_dedupe = true
+ host = "accountname.us-east-2.aws.snowflakecomputing.com"
+ jdbc_url_params = "...my_jdbc_url_params..."
+ raw_data_schema = "...my_raw_data_schema..."
+ role = "AIRBYTE_ROLE"
+ schema = "AIRBYTE_SCHEMA"
+ username = "AIRBYTE_USER"
+ warehouse = "AIRBYTE_WAREHOUSE"
}
- name = "Shaun Osinski"
- workspace_id = "851d6c64-5b08-4b61-891b-aa0fe1ade008"
+ definition_id = "d28dce71-d7fd-4713-a64c-8ab088c248e9"
+ name = "Robin Marvin"
+ workspace_id = "3407545d-5006-486d-84e6-08039bc7eb07"
}
```
@@ -43,9 +43,13 @@ resource "airbyte_destination_snowflake" "my_destination_snowflake" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -57,7 +61,6 @@ resource "airbyte_destination_snowflake" "my_destination_snowflake" {
Required:
- `database` (String) Enter the name of the database you want to sync data into
-- `destination_type` (String) must be one of ["snowflake"]
- `host` (String) Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)
- `role` (String) Enter the role that you want to use to access Snowflake
- `schema` (String) Enter the name of the default schema
@@ -67,98 +70,51 @@ Required:
Optional:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
+- `disable_type_dedupe` (Boolean) Default: false
+Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions
- `jdbc_url_params` (String) Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
-- `raw_data_schema` (String) The schema to write raw tables into
+- `raw_data_schema` (String) The schema to write raw tables into (default: airbyte_internal)
### Nested Schema for `configuration.credentials`
Optional:
-- `destination_snowflake_authorization_method_key_pair_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_authorization_method_key_pair_authentication))
-- `destination_snowflake_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_authorization_method_o_auth2_0))
-- `destination_snowflake_authorization_method_username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_authorization_method_username_and_password))
-- `destination_snowflake_update_authorization_method_key_pair_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_update_authorization_method_key_pair_authentication))
-- `destination_snowflake_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_update_authorization_method_o_auth2_0))
-- `destination_snowflake_update_authorization_method_username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--destination_snowflake_update_authorization_method_username_and_password))
+- `key_pair_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--key_pair_authentication))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
+- `username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--username_and_password))
-
-### Nested Schema for `configuration.credentials.destination_snowflake_authorization_method_key_pair_authentication`
+
+### Nested Schema for `configuration.credentials.key_pair_authentication`
Required:
-- `private_key` (String) RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.
+- `private_key` (String, Sensitive) RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.
Optional:
-- `auth_type` (String) must be one of ["Key Pair Authentication"]
-- `private_key_password` (String) Passphrase for private key
+- `private_key_password` (String, Sensitive) Passphrase for private key
-
-### Nested Schema for `configuration.credentials.destination_snowflake_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Enter you application's Access Token
-- `refresh_token` (String) Enter your application's Refresh Token
+- `access_token` (String, Sensitive) Enter you application's Access Token
+- `refresh_token` (String, Sensitive) Enter your application's Refresh Token
Optional:
-- `auth_type` (String) must be one of ["OAuth2.0"]
- `client_id` (String) Enter your application's Client ID
- `client_secret` (String) Enter your application's Client secret
-
-### Nested Schema for `configuration.credentials.destination_snowflake_authorization_method_username_and_password`
-
-Required:
-
-- `password` (String) Enter the password associated with the username.
-
-Optional:
-
-- `auth_type` (String) must be one of ["Username and Password"]
-
-
-
-### Nested Schema for `configuration.credentials.destination_snowflake_update_authorization_method_key_pair_authentication`
+
+### Nested Schema for `configuration.credentials.username_and_password`
Required:
-- `private_key` (String) RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.
-
-Optional:
-
-- `auth_type` (String) must be one of ["Key Pair Authentication"]
-- `private_key_password` (String) Passphrase for private key
-
-
-
-### Nested Schema for `configuration.credentials.destination_snowflake_update_authorization_method_o_auth2_0`
-
-Required:
-
-- `access_token` (String) Enter you application's Access Token
-- `refresh_token` (String) Enter your application's Refresh Token
-
-Optional:
-
-- `auth_type` (String) must be one of ["OAuth2.0"]
-- `client_id` (String) Enter your application's Client ID
-- `client_secret` (String) Enter your application's Client secret
-
-
-
-### Nested Schema for `configuration.credentials.destination_snowflake_update_authorization_method_username_and_password`
-
-Required:
-
-- `password` (String) Enter the password associated with the username.
-
-Optional:
-
-- `auth_type` (String) must be one of ["Username and Password"]
+- `password` (String, Sensitive) Enter the password associated with the username.
diff --git a/docs/resources/destination_timeplus.md b/docs/resources/destination_timeplus.md
index 30302c1a5..4f39216ca 100644
--- a/docs/resources/destination_timeplus.md
+++ b/docs/resources/destination_timeplus.md
@@ -15,12 +15,12 @@ DestinationTimeplus Resource
```terraform
resource "airbyte_destination_timeplus" "my_destination_timeplus" {
configuration = {
- apikey = "...my_apikey..."
- destination_type = "timeplus"
- endpoint = "https://us.timeplus.cloud/workspace_id"
+ apikey = "...my_apikey..."
+ endpoint = "https://us.timeplus.cloud/workspace_id"
}
- name = "Ruben Williamson"
- workspace_id = "5f350d8c-db5a-4341-8143-010421813d52"
+ definition_id = "32a47524-bb49-40aa-b53a-d11902ba1888"
+ name = "Kimberly Cole V"
+ workspace_id = "d193af49-1985-4c92-933c-ae7edb401c23"
}
```
@@ -30,9 +30,13 @@ resource "airbyte_destination_timeplus" "my_destination_timeplus" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -43,8 +47,11 @@ resource "airbyte_destination_timeplus" "my_destination_timeplus" {
Required:
-- `apikey` (String) Personal API key
-- `destination_type` (String) must be one of ["timeplus"]
-- `endpoint` (String) Timeplus workspace endpoint
+- `apikey` (String, Sensitive) Personal API key
+
+Optional:
+
+- `endpoint` (String) Default: "https://us.timeplus.cloud/"
+Timeplus workspace endpoint
diff --git a/docs/resources/destination_typesense.md b/docs/resources/destination_typesense.md
index d6f7345d8..b23be9813 100644
--- a/docs/resources/destination_typesense.md
+++ b/docs/resources/destination_typesense.md
@@ -15,15 +15,15 @@ DestinationTypesense Resource
```terraform
resource "airbyte_destination_typesense" "my_destination_typesense" {
configuration = {
- api_key = "...my_api_key..."
- batch_size = 0
- destination_type = "typesense"
- host = "...my_host..."
- port = "...my_port..."
- protocol = "...my_protocol..."
+ api_key = "...my_api_key..."
+ batch_size = 6
+ host = "...my_host..."
+ port = "...my_port..."
+ protocol = "...my_protocol..."
}
- name = "Conrad Rutherford"
- workspace_id = "e253b668-451c-46c6-a205-e16deab3fec9"
+ definition_id = "e69c6f21-d654-4173-8ccb-bc51a3caa62e"
+ name = "Lorraine Kiehn"
+ workspace_id = "a0d33800-2a57-467f-8f37-9fa4011eae8d"
}
```
@@ -33,9 +33,13 @@ resource "airbyte_destination_typesense" "my_destination_typesense" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -46,8 +50,7 @@ resource "airbyte_destination_typesense" "my_destination_typesense" {
Required:
-- `api_key` (String) Typesense API Key
-- `destination_type` (String) must be one of ["typesense"]
+- `api_key` (String, Sensitive) Typesense API Key
- `host` (String) Hostname of the Typesense instance without protocol.
Optional:
diff --git a/docs/resources/destination_vertica.md b/docs/resources/destination_vertica.md
index f4974a20b..2a21a1a7d 100644
--- a/docs/resources/destination_vertica.md
+++ b/docs/resources/destination_vertica.md
@@ -15,22 +15,20 @@ DestinationVertica Resource
```terraform
resource "airbyte_destination_vertica" "my_destination_vertica" {
configuration = {
- database = "...my_database..."
- destination_type = "vertica"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 5433
- schema = "...my_schema..."
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 5433
+ schema = "...my_schema..."
tunnel_method = {
- destination_vertica_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_vertica_no_tunnel = {}
}
- username = "Jackson.Kuvalis"
+ username = "Bailey26"
}
- name = "Ida Lubowitz"
- workspace_id = "73a8418d-1623-409f-b092-9921aefb9f58"
+ definition_id = "f7f4dcb2-8108-4584-a7e5-cd333285c7cc"
+ name = "Josefina Sporer"
+ workspace_id = "34f786aa-e3aa-4f52-bfe1-9eb1bf8ee233"
}
```
@@ -40,9 +38,13 @@ resource "airbyte_destination_vertica" "my_destination_vertica" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -54,16 +56,16 @@ resource "airbyte_destination_vertica" "my_destination_vertica" {
Required:
- `database` (String) Name of the database.
-- `destination_type` (String) must be one of ["vertica"]
- `host` (String) Hostname of the database.
-- `port` (Number) Port of the database.
- `schema` (String) Schema for vertica destination
- `username` (String) Username to use to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) Password associated with the username.
+- `password` (String, Sensitive) Password associated with the username.
+- `port` (Number) Default: 5433
+Port of the database.
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
@@ -71,80 +73,41 @@ Optional:
Optional:
-- `destination_vertica_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_ssh_tunnel_method_no_tunnel))
-- `destination_vertica_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_ssh_tunnel_method_password_authentication))
-- `destination_vertica_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_ssh_tunnel_method_ssh_key_authentication))
-- `destination_vertica_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_update_ssh_tunnel_method_no_tunnel))
-- `destination_vertica_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_update_ssh_tunnel_method_password_authentication))
-- `destination_vertica_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--destination_vertica_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_ssh_tunnel_method_no_tunnel`
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
+Optional:
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_update_ssh_tunnel_method_no_tunnel`
-
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.destination_vertica_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/destination_weaviate.md b/docs/resources/destination_weaviate.md
new file mode 100644
index 000000000..daee69f6f
--- /dev/null
+++ b/docs/resources/destination_weaviate.md
@@ -0,0 +1,289 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_destination_weaviate Resource - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ DestinationWeaviate Resource
+---
+
+# airbyte_destination_weaviate (Resource)
+
+DestinationWeaviate Resource
+
+## Example Usage
+
+```terraform
+resource "airbyte_destination_weaviate" "my_destination_weaviate" {
+ configuration = {
+ embedding = {
+ destination_weaviate_azure_open_ai = {
+ api_base = "https://your-resource-name.openai.azure.com"
+ deployment = "your-resource-name"
+ openai_key = "...my_openai_key..."
+ }
+ }
+ indexing = {
+ additional_headers = [
+ {
+ header_key = "...my_header_key..."
+ value = "...my_value..."
+ },
+ ]
+ auth = {
+ destination_weaviate_api_token = {
+ token = "...my_token..."
+ }
+ }
+ batch_size = 6
+ default_vectorizer = "text2vec-huggingface"
+ host = "https://my-cluster.weaviate.network"
+ text_field = "...my_text_field..."
+ }
+ processing = {
+ chunk_overlap = 4
+ chunk_size = 5
+ field_name_mappings = [
+ {
+ from_field = "...my_from_field..."
+ to_field = "...my_to_field..."
+ },
+ ]
+ metadata_fields = [
+ "...",
+ ]
+ text_fields = [
+ "...",
+ ]
+ text_splitter = {
+ destination_weaviate_by_markdown_header = {
+ split_level = 4
+ }
+ }
+ }
+ }
+ definition_id = "97e801e6-7689-4a46-b396-c7c6bf737242"
+ name = "Diana Runte Jr."
+ workspace_id = "59f1e303-60fc-40ea-a506-81bc3adb090c"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
+- `workspace_id` (String)
+
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
+### Read-Only
+
+- `destination_id` (String)
+- `destination_type` (String)
+
+
+### Nested Schema for `configuration`
+
+Required:
+
+- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding))
+- `indexing` (Attributes) Indexing configuration (see [below for nested schema](#nestedatt--configuration--indexing))
+- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing))
+
+
+### Nested Schema for `configuration.embedding`
+
+Optional:
+
+- `azure_open_ai` (Attributes) Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--azure_open_ai))
+- `cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--cohere))
+- `fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--fake))
+- `from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--from_field))
+- `no_external_embedding` (Attributes) Do not calculate and pass embeddings to Weaviate. Suitable for clusters with configured vectorizers to calculate embeddings within Weaviate or for classes that should only support regular text search. (see [below for nested schema](#nestedatt--configuration--embedding--no_external_embedding))
+- `open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--open_ai))
+- `open_ai_compatible` (Attributes) Use a service that's compatible with the OpenAI API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--open_ai_compatible))
+
+
+### Nested Schema for `configuration.embedding.azure_open_ai`
+
+Required:
+
+- `api_base` (String) The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+- `deployment` (String) The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+- `openai_key` (String, Sensitive) The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+
+
+
+### Nested Schema for `configuration.embedding.cohere`
+
+Required:
+
+- `cohere_key` (String, Sensitive)
+
+
+
+### Nested Schema for `configuration.embedding.fake`
+
+
+
+### Nested Schema for `configuration.embedding.from_field`
+
+Required:
+
+- `dimensions` (Number) The number of dimensions the embedding model is generating
+- `field_name` (String) Name of the field in the record that contains the embedding
+
+
+
+### Nested Schema for `configuration.embedding.no_external_embedding`
+
+
+
+### Nested Schema for `configuration.embedding.open_ai`
+
+Required:
+
+- `openai_key` (String, Sensitive)
+
+
+
+### Nested Schema for `configuration.embedding.open_ai_compatible`
+
+Required:
+
+- `base_url` (String) The base URL for your OpenAI-compatible service
+- `dimensions` (Number) The number of dimensions the embedding model is generating
+
+Optional:
+
+- `api_key` (String, Sensitive) Default: ""
+- `model_name` (String) Default: "text-embedding-ada-002"
+The name of the model to use for embedding
+
+
+
+
+### Nested Schema for `configuration.indexing`
+
+Required:
+
+- `auth` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--indexing--auth))
+- `host` (String) The public endpoint of the Weaviate cluster.
+
+Optional:
+
+- `additional_headers` (Attributes List) Additional HTTP headers to send with every request. (see [below for nested schema](#nestedatt--configuration--indexing--additional_headers))
+- `batch_size` (Number) Default: 128
+The number of records to send to Weaviate in each batch
+- `default_vectorizer` (String) must be one of ["none", "text2vec-cohere", "text2vec-huggingface", "text2vec-openai", "text2vec-palm", "text2vec-contextionary", "text2vec-transformers", "text2vec-gpt4all"]; Default: "none"
+The vectorizer to use if new classes need to be created
+- `text_field` (String) Default: "text"
+The field in the object that contains the embedded text
+
+
+### Nested Schema for `configuration.indexing.auth`
+
+Optional:
+
+- `api_token` (Attributes) Authenticate using an API token (suitable for Weaviate Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--api_token))
+- `no_authentication` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--no_authentication))
+- `username_password` (Attributes) Authenticate using username and password (suitable for self-managed Weaviate clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--username_password))
+
+
+### Nested Schema for `configuration.indexing.auth.username_password`
+
+Required:
+
+- `token` (String, Sensitive) API Token for the Weaviate instance
+
+
+
+### Nested Schema for `configuration.indexing.auth.username_password`
+
+
+
+### Nested Schema for `configuration.indexing.auth.username_password`
+
+Required:
+
+- `password` (String, Sensitive) Password for the Weaviate cluster
+- `username` (String) Username for the Weaviate cluster
+
+
+
+
+### Nested Schema for `configuration.indexing.additional_headers`
+
+Required:
+
+- `header_key` (String, Sensitive)
+- `value` (String)
+
+
+
+
+### Nested Schema for `configuration.processing`
+
+Required:
+
+- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
+
+Optional:
+
+- `chunk_overlap` (Number) Default: 0
+Size of overlap between chunks in tokens to store in vector store to better capture relevant context
+- `field_name_mappings` (Attributes List) List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation. (see [below for nested schema](#nestedatt--configuration--processing--field_name_mappings))
+- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
+- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+- `text_splitter` (Attributes) Split text fields into chunks based on the specified method. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter))
+
+
+### Nested Schema for `configuration.processing.field_name_mappings`
+
+Required:
+
+- `from_field` (String) The field name in the source
+- `to_field` (String) The field name to use in the destination
+
+
+
+### Nested Schema for `configuration.processing.text_splitter`
+
+Optional:
+
+- `by_markdown_header` (Attributes) Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_markdown_header))
+- `by_programming_language` (Attributes) Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_programming_language))
+- `by_separator` (Attributes) Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc. (see [below for nested schema](#nestedatt--configuration--processing--text_splitter--by_separator))
+
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
+
+Optional:
+
+- `split_level` (Number) Default: 1
+Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+
+
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
+
+Required:
+
+- `language` (String) must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
+Split code in suitable places based on the programming language
+
+
+
+### Nested Schema for `configuration.processing.text_splitter.by_separator`
+
+Optional:
+
+- `keep_separator` (Boolean) Default: false
+Whether to keep the separator in the resulting chunks
+- `separators` (List of String) List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+
+
diff --git a/docs/resources/destination_xata.md b/docs/resources/destination_xata.md
index a05ab3edd..5aa9f60bd 100644
--- a/docs/resources/destination_xata.md
+++ b/docs/resources/destination_xata.md
@@ -15,12 +15,12 @@ DestinationXata Resource
```terraform
resource "airbyte_destination_xata" "my_destination_xata" {
configuration = {
- api_key = "...my_api_key..."
- db_url = "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main"
- destination_type = "xata"
+ api_key = "...my_api_key..."
+ db_url = "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main"
}
- name = "Oscar Smith"
- workspace_id = "e68e4be0-5601-43f5-9da7-57a59ecfef66"
+ definition_id = "013842c1-01e2-465e-abc2-30b15094cc21"
+ name = "Derrick Green"
+ workspace_id = "b75e7d1c-9ddc-42da-b62f-af1b28fe26cb"
}
```
@@ -30,9 +30,13 @@ resource "airbyte_destination_xata" "my_destination_xata" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the destination e.g. dev-mysql-instance.
- `workspace_id` (String)
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+
### Read-Only
- `destination_id` (String)
@@ -43,8 +47,7 @@ resource "airbyte_destination_xata" "my_destination_xata" {
Required:
-- `api_key` (String) API Key to connect.
+- `api_key` (String, Sensitive) API Key to connect.
- `db_url` (String) URL pointing to your workspace.
-- `destination_type` (String) must be one of ["xata"]
diff --git a/docs/resources/source_aha.md b/docs/resources/source_aha.md
index df1b71d29..fc9c21f6e 100644
--- a/docs/resources/source_aha.md
+++ b/docs/resources/source_aha.md
@@ -15,13 +15,13 @@ SourceAha Resource
```terraform
resource "airbyte_source_aha" "my_source_aha" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "aha"
- url = "...my_url..."
+ api_key = "...my_api_key..."
+ url = "...my_url..."
}
- name = "Van Bergnaum"
- secret_id = "...my_secret_id..."
- workspace_id = "a3383c2b-eb47-4737-bc8d-72f64d1db1f2"
+ definition_id = "1bb0550b-4e34-4412-ae7f-29336e237818"
+ name = "Samuel Hammes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3da8d6ee-f047-4576-b0dd-bc2dbf188dfa"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_aha" "my_source_aha" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_aha" "my_source_aha" {
Required:
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["aha"]
+- `api_key` (String, Sensitive) API Key
- `url` (String) URL
diff --git a/docs/resources/source_aircall.md b/docs/resources/source_aircall.md
index 8576bc2f6..6af18b535 100644
--- a/docs/resources/source_aircall.md
+++ b/docs/resources/source_aircall.md
@@ -15,14 +15,14 @@ SourceAircall Resource
```terraform
resource "airbyte_source_aircall" "my_source_aircall" {
configuration = {
- api_id = "...my_api_id..."
- api_token = "...my_api_token..."
- source_type = "aircall"
- start_date = "2022-03-01T00:00:00.000Z"
+ api_id = "...my_api_id..."
+ api_token = "...my_api_token..."
+ start_date = "2022-03-01T00:00:00.000Z"
}
- name = "Martha Bashirian"
- secret_id = "...my_secret_id..."
- workspace_id = "1e96349e-1cf9-4e06-a3a4-37000ae6b6bc"
+ definition_id = "57111ac6-1dff-4a69-be71-43a3e9a244d7"
+ name = "Lucas Breitenberg"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a6e1cc19-3137-4221-8027-ee71b638bd64"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_aircall" "my_source_aircall" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,8 +51,7 @@ resource "airbyte_source_aircall" "my_source_aircall" {
Required:
- `api_id` (String) App ID found at settings https://dashboard.aircall.io/integrations/api-keys
-- `api_token` (String) App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)
-- `source_type` (String) must be one of ["aircall"]
+- `api_token` (String, Sensitive) App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)
- `start_date` (String) Date time filter for incremental filter, Specify which date to extract from.
diff --git a/docs/resources/source_airtable.md b/docs/resources/source_airtable.md
index 2e257fa5e..803fc74a6 100644
--- a/docs/resources/source_airtable.md
+++ b/docs/resources/source_airtable.md
@@ -16,20 +16,19 @@ SourceAirtable Resource
resource "airbyte_source_airtable" "my_source_airtable" {
configuration = {
credentials = {
- source_airtable_authentication_o_auth2_0 = {
+ source_airtable_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_method = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
- token_expiry_date = "2021-08-01T09:41:55.270Z"
+ token_expiry_date = "2021-04-10T21:26:19.630Z"
}
}
- source_type = "airtable"
}
- name = "Tommie Klocko"
- secret_id = "...my_secret_id..."
- workspace_id = "eac55a97-41d3-4113-9296-5bb8a7202611"
+ definition_id = "54814afe-b93d-44bb-9e9f-2bb80cd3fe4a"
+ name = "Todd Lockman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "38c45275-6445-4179-b0ed-8d43c0dabba6"
}
```
@@ -39,11 +38,12 @@ resource "airbyte_source_airtable" "my_source_airtable" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,71 +57,35 @@ resource "airbyte_source_airtable" "my_source_airtable" {
Optional:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["airtable"]
### Nested Schema for `configuration.credentials`
Optional:
-- `source_airtable_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_airtable_authentication_o_auth2_0))
-- `source_airtable_authentication_personal_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_airtable_authentication_personal_access_token))
-- `source_airtable_update_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_airtable_update_authentication_o_auth2_0))
-- `source_airtable_update_authentication_personal_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_airtable_update_authentication_personal_access_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
+- `personal_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--personal_access_token))
-
-### Nested Schema for `configuration.credentials.source_airtable_authentication_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
- `client_id` (String) The client ID of the Airtable developer application.
- `client_secret` (String) The client secret the Airtable developer application.
-- `refresh_token` (String) The key to refresh the expired access token.
+- `refresh_token` (String, Sensitive) The key to refresh the expired access token.
Optional:
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
+- `token_expiry_date` (String, Sensitive) The date-time when the access token should be refreshed.
-
-### Nested Schema for `configuration.credentials.source_airtable_authentication_personal_access_token`
+
+### Nested Schema for `configuration.credentials.personal_access_token`
Required:
-- `api_key` (String) The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.
-
-Optional:
-
-- `auth_method` (String) must be one of ["api_key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_airtable_update_authentication_o_auth2_0`
-
-Required:
-
-- `client_id` (String) The client ID of the Airtable developer application.
-- `client_secret` (String) The client secret the Airtable developer application.
-- `refresh_token` (String) The key to refresh the expired access token.
-
-Optional:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_airtable_update_authentication_personal_access_token`
-
-Required:
-
-- `api_key` (String) The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.
-
-Optional:
-
-- `auth_method` (String) must be one of ["api_key"]
+- `api_key` (String, Sensitive) The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.
diff --git a/docs/resources/source_alloydb.md b/docs/resources/source_alloydb.md
index 4d3d54abc..2022b665c 100644
--- a/docs/resources/source_alloydb.md
+++ b/docs/resources/source_alloydb.md
@@ -21,10 +21,10 @@ resource "airbyte_source_alloydb" "my_source_alloydb" {
password = "...my_password..."
port = 5432
replication_method = {
- source_alloydb_replication_method_logical_replication_cdc_ = {
- initial_waiting_seconds = 2
- lsn_commit_behaviour = "While reading Data"
- method = "CDC"
+ logical_replication_cdc = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ initial_waiting_seconds = 10
+ lsn_commit_behaviour = "After loading Data in the destination"
plugin = "pgoutput"
publication = "...my_publication..."
queue_size = 10
@@ -34,22 +34,20 @@ resource "airbyte_source_alloydb" "my_source_alloydb" {
schemas = [
"...",
]
- source_type = "alloydb"
ssl_mode = {
- source_alloydb_ssl_modes_allow = {
- mode = "allow"
+ source_alloydb_allow = {
+ additional_properties = "{ \"see\": \"documentation\" }"
}
}
tunnel_method = {
- source_alloydb_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_alloydb_no_tunnel = {}
}
- username = "Ashlynn_Emard"
+ username = "Olaf.Emard48"
}
- name = "Wilbert Crona"
- secret_id = "...my_secret_id..."
- workspace_id = "9b1abda8-c070-4e10-84cb-0672d1ad879e"
+ definition_id = "44fd252e-57aa-4673-9282-59f0c220e39e"
+ name = "Deborah Stanton"
+ secret_id = "...my_secret_id..."
+ workspace_id = "f09fb849-b0bd-4f3d-9ca9-6c63354ae1d2"
}
```
@@ -59,11 +57,12 @@ resource "airbyte_source_alloydb" "my_source_alloydb" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -78,14 +77,14 @@ Required:
- `database` (String) Name of the database.
- `host` (String) Hostname of the database.
-- `port` (Number) Port of the database.
-- `source_type` (String) must be one of ["alloydb"]
- `username` (String) Username to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.
-- `password` (String) Password associated with the username.
+- `password` (String, Sensitive) Password associated with the username.
+- `port` (Number) Default: 5432
+Port of the database.
- `replication_method` (Attributes) Replication method for extracting data from the database. (see [below for nested schema](#nestedatt--configuration--replication_method))
- `schemas` (List of String) The list of schemas (case sensitive) to sync from. Defaults to public.
- `ssl_mode` (Attributes) SSL connection modes.
@@ -97,83 +96,37 @@ Optional:
Optional:
-- `source_alloydb_replication_method_logical_replication_cdc` (Attributes) Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_replication_method_logical_replication_cdc))
-- `source_alloydb_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_replication_method_standard))
-- `source_alloydb_replication_method_standard_xmin` (Attributes) Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_replication_method_standard_xmin))
-- `source_alloydb_update_replication_method_logical_replication_cdc` (Attributes) Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_update_replication_method_logical_replication_cdc))
-- `source_alloydb_update_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_update_replication_method_standard))
-- `source_alloydb_update_replication_method_standard_xmin` (Attributes) Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_alloydb_update_replication_method_standard_xmin))
+- `logical_replication_cdc` (Attributes) Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. (see [below for nested schema](#nestedatt--configuration--replication_method--logical_replication_cdc))
+- `standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--standard))
+- `standard_xmin` (Attributes) Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--standard_xmin))
-
-### Nested Schema for `configuration.replication_method.source_alloydb_replication_method_logical_replication_cdc`
+
+### Nested Schema for `configuration.replication_method.logical_replication_cdc`
Required:
-- `method` (String) must be one of ["CDC"]
- `publication` (String) A Postgres publication used for consuming changes. Read about publications and replication identities.
- `replication_slot` (String) A plugin logical replication slot. Read about replication slots.
Optional:
- `additional_properties` (String) Parsed as JSON.
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]
+- `initial_waiting_seconds` (Number) Default: 300
+The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
+- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]; Default: "After loading Data in the destination"
Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-- `plugin` (String) must be one of ["pgoutput"]
+- `plugin` (String) must be one of ["pgoutput"]; Default: "pgoutput"
A logical decoding plugin installed on the PostgreSQL server.
-- `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_replication_method_standard`
-
-Required:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_replication_method_standard_xmin`
+- `queue_size` (Number) Default: 10000
+The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-Required:
-
-- `method` (String) must be one of ["Xmin"]
+
+### Nested Schema for `configuration.replication_method.standard`
-
-### Nested Schema for `configuration.replication_method.source_alloydb_update_replication_method_logical_replication_cdc`
-Required:
-
-- `method` (String) must be one of ["CDC"]
-- `publication` (String) A Postgres publication used for consuming changes. Read about publications and replication identities.
-- `replication_slot` (String) A plugin logical replication slot. Read about replication slots.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]
-Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-- `plugin` (String) must be one of ["pgoutput"]
-A logical decoding plugin installed on the PostgreSQL server.
-- `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_update_replication_method_standard`
-
-Required:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_alloydb_update_replication_method_standard_xmin`
-
-Required:
-
-- `method` (String) must be one of ["Xmin"]
+
+### Nested Schema for `configuration.replication_method.standard_xmin`
@@ -182,177 +135,73 @@ Required:
Optional:
-- `source_alloydb_ssl_modes_allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_allow))
-- `source_alloydb_ssl_modes_disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_disable))
-- `source_alloydb_ssl_modes_prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_prefer))
-- `source_alloydb_ssl_modes_require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_require))
-- `source_alloydb_ssl_modes_verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_verify_ca))
-- `source_alloydb_ssl_modes_verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_ssl_modes_verify_full))
-- `source_alloydb_update_ssl_modes_allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_allow))
-- `source_alloydb_update_ssl_modes_disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_disable))
-- `source_alloydb_update_ssl_modes_prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_prefer))
-- `source_alloydb_update_ssl_modes_require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_require))
-- `source_alloydb_update_ssl_modes_verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_verify_ca))
-- `source_alloydb_update_ssl_modes_verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_alloydb_update_ssl_modes_verify_full))
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_allow`
+- `allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--allow))
+- `disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--disable))
+- `prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--prefer))
+- `require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--require))
+- `verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--verify_ca))
+- `verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--verify_full))
-Required:
-
-- `mode` (String) must be one of ["allow"]
+
+### Nested Schema for `configuration.ssl_mode.allow`
Optional:
- `additional_properties` (String) Parsed as JSON.
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_disable`
-
-Required:
-
-- `mode` (String) must be one of ["disable"]
+
+### Nested Schema for `configuration.ssl_mode.disable`
Optional:
- `additional_properties` (String) Parsed as JSON.
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_prefer`
-
-Required:
-
-- `mode` (String) must be one of ["prefer"]
+
+### Nested Schema for `configuration.ssl_mode.prefer`
Optional:
- `additional_properties` (String) Parsed as JSON.
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_require`
-
-Required:
-
-- `mode` (String) must be one of ["require"]
+
+### Nested Schema for `configuration.ssl_mode.require`
Optional:
- `additional_properties` (String) Parsed as JSON.
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_verify_ca`
+
+### Nested Schema for `configuration.ssl_mode.verify_ca`
Required:
- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-ca"]
Optional:
- `additional_properties` (String) Parsed as JSON.
- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
+- `client_key` (String, Sensitive) Client key
+- `client_key_password` (String, Sensitive) Password for keystorage. If you do not add it - the password will be generated automatically.
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_ssl_modes_verify_full`
+
+### Nested Schema for `configuration.ssl_mode.verify_full`
Required:
- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-full"]
Optional:
- `additional_properties` (String) Parsed as JSON.
- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_allow`
-
-Required:
-
-- `mode` (String) must be one of ["allow"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_disable`
-
-Required:
-
-- `mode` (String) must be one of ["disable"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_prefer`
-
-Required:
-
-- `mode` (String) must be one of ["prefer"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_require`
-
-Required:
-
-- `mode` (String) must be one of ["require"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_verify_ca`
-
-Required:
-
-- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-ca"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_alloydb_update_ssl_modes_verify_full`
-
-Required:
-
-- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-full"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
+- `client_key` (String, Sensitive) Client key
+- `client_key_password` (String, Sensitive) Password for keystorage. If you do not add it - the password will be generated automatically.
@@ -361,80 +210,41 @@ Optional:
Optional:
-- `source_alloydb_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_ssh_tunnel_method_no_tunnel))
-- `source_alloydb_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_ssh_tunnel_method_password_authentication))
-- `source_alloydb_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_ssh_tunnel_method_ssh_key_authentication))
-- `source_alloydb_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_update_ssh_tunnel_method_no_tunnel))
-- `source_alloydb_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_update_ssh_tunnel_method_password_authentication))
-- `source_alloydb_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_alloydb_update_ssh_tunnel_method_ssh_key_authentication))
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_ssh_tunnel_method_no_tunnel`
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_update_ssh_tunnel_method_no_tunnel`
-
-Required:
+Optional:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_alloydb_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/source_amazon_ads.md b/docs/resources/source_amazon_ads.md
index 9313194aa..7b77ad032 100644
--- a/docs/resources/source_amazon_ads.md
+++ b/docs/resources/source_amazon_ads.md
@@ -15,30 +15,29 @@ SourceAmazonAds Resource
```terraform
resource "airbyte_source_amazon_ads" "my_source_amazonads" {
configuration = {
- auth_type = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- look_back_window = 10
+ look_back_window = 3
marketplace_ids = [
"...",
]
profiles = [
- 6,
+ 2,
]
refresh_token = "...my_refresh_token..."
- region = "EU"
+ region = "FE"
report_record_types = [
- "asins_targets",
+ "adGroups",
]
- source_type = "amazon-ads"
- start_date = "2022-10-10"
+ start_date = "2022-10-10"
state_filter = [
- "archived",
+ "paused",
]
}
- name = "Dan Towne"
- secret_id = "...my_secret_id..."
- workspace_id = "d02bae0b-e2d7-4822-99e3-ea4b5197f924"
+ definition_id = "34df0d75-6d8b-40d9-8daf-9186ab63a7b2"
+ name = "Chris Littel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ec566b1d-1d8b-4b57-bf00-1ddb3cf074d6"
}
```
@@ -48,11 +47,12 @@ resource "airbyte_source_amazon_ads" "my_source_amazonads" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -67,16 +67,15 @@ Required:
- `client_id` (String) The client ID of your Amazon Ads developer application. See the docs for more information.
- `client_secret` (String) The client secret of your Amazon Ads developer application. See the docs for more information.
-- `refresh_token` (String) Amazon Ads refresh token. See the docs for more information on how to obtain this token.
-- `source_type` (String) must be one of ["amazon-ads"]
+- `refresh_token` (String, Sensitive) Amazon Ads refresh token. See the docs for more information on how to obtain this token.
Optional:
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `look_back_window` (Number) The amount of days to go back in time to get the updated data from Amazon Ads
+- `look_back_window` (Number) Default: 3
+The amount of days to go back in time to get the updated data from Amazon Ads
- `marketplace_ids` (List of String) Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.
- `profiles` (List of Number) Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.
-- `region` (String) must be one of ["NA", "EU", "FE"]
+- `region` (String) must be one of ["NA", "EU", "FE"]; Default: "NA"
Region to pull data from (EU/NA/FE). See docs for more details.
- `report_record_types` (List of String) Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details
- `start_date` (String) The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format
diff --git a/docs/resources/source_amazon_seller_partner.md b/docs/resources/source_amazon_seller_partner.md
index 2a1f4205f..712445fd1 100644
--- a/docs/resources/source_amazon_seller_partner.md
+++ b/docs/resources/source_amazon_seller_partner.md
@@ -15,26 +15,22 @@ SourceAmazonSellerPartner Resource
```terraform
resource "airbyte_source_amazon_seller_partner" "my_source_amazonsellerpartner" {
configuration = {
+ account_type = "Seller"
advanced_stream_options = "{\"GET_SALES_AND_TRAFFIC_REPORT\": {\"availability_sla_days\": 3}}"
- auth_type = "oauth2.0"
- aws_access_key = "...my_aws_access_key..."
- aws_environment = "PRODUCTION"
- aws_secret_key = "...my_aws_secret_key..."
+ aws_environment = "SANDBOX"
lwa_app_id = "...my_lwa_app_id..."
lwa_client_secret = "...my_lwa_client_secret..."
- max_wait_seconds = 1980
- period_in_days = 5
+ period_in_days = 2
refresh_token = "...my_refresh_token..."
- region = "SA"
+ region = "AE"
replication_end_date = "2017-01-25T00:00:00Z"
replication_start_date = "2017-01-25T00:00:00Z"
- report_options = "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}"
- role_arn = "...my_role_arn..."
- source_type = "amazon-seller-partner"
+ report_options = "{\"GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT\": {\"reportPeriod\": \"WEEK\"}}"
}
- name = "Phyllis Quitzon"
- secret_id = "...my_secret_id..."
- workspace_id = "5c537c64-54ef-4b0b-b489-6c3ca5acfbe2"
+ definition_id = "69bb26e6-b9f2-45aa-9f8c-7d4107048d9f"
+ name = "Caleb Legros"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9afeef69-ead1-4e5d-b690-efc6e828b1d2"
}
```
@@ -44,11 +40,12 @@ resource "airbyte_source_amazon_seller_partner" "my_source_amazonsellerpartner"
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -61,26 +58,23 @@ resource "airbyte_source_amazon_seller_partner" "my_source_amazonsellerpartner"
Required:
-- `aws_environment` (String) must be one of ["PRODUCTION", "SANDBOX"]
-Select the AWS Environment.
- `lwa_app_id` (String) Your Login with Amazon Client ID.
- `lwa_client_secret` (String) Your Login with Amazon Client Secret.
-- `refresh_token` (String) The Refresh Token obtained via OAuth flow authorization.
-- `region` (String) must be one of ["AE", "AU", "BE", "BR", "CA", "DE", "EG", "ES", "FR", "GB", "IN", "IT", "JP", "MX", "NL", "PL", "SA", "SE", "SG", "TR", "UK", "US"]
-Select the AWS Region.
+- `refresh_token` (String, Sensitive) The Refresh Token obtained via OAuth flow authorization.
- `replication_start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `source_type` (String) must be one of ["amazon-seller-partner"]
Optional:
+- `account_type` (String) must be one of ["Seller", "Vendor"]; Default: "Seller"
+Type of the Account you're going to authorize the Airbyte application by
- `advanced_stream_options` (String) Additional information to configure report options. This varies by report type, not every report implement this kind of feature. Must be a valid json string.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `aws_access_key` (String) Specifies the AWS access key used as part of the credentials to authenticate the user.
-- `aws_secret_key` (String) Specifies the AWS secret key used as part of the credentials to authenticate the user.
-- `max_wait_seconds` (Number) Sometimes report can take up to 30 minutes to generate. This will set the limit for how long to wait for a successful report.
-- `period_in_days` (Number) Will be used for stream slicing for initial full_refresh sync when no updated state is present for reports that support sliced incremental sync.
+- `aws_environment` (String) must be one of ["PRODUCTION", "SANDBOX"]; Default: "PRODUCTION"
+Select the AWS Environment.
+- `period_in_days` (Number) Default: 90
+Will be used for stream slicing for initial full_refresh sync when no updated state is present for reports that support sliced incremental sync.
+- `region` (String) must be one of ["AE", "AU", "BE", "BR", "CA", "DE", "EG", "ES", "FR", "GB", "IN", "IT", "JP", "MX", "NL", "PL", "SA", "SE", "SG", "TR", "UK", "US"]; Default: "US"
+Select the AWS Region.
- `replication_end_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.
- `report_options` (String) Additional information passed to reports. This varies by report type. Must be a valid json string.
-- `role_arn` (String) Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. (Needs permission to 'Assume Role' STS).
diff --git a/docs/resources/source_amazon_sqs.md b/docs/resources/source_amazon_sqs.md
index 7ea4bb73b..6a0470a6d 100644
--- a/docs/resources/source_amazon_sqs.md
+++ b/docs/resources/source_amazon_sqs.md
@@ -17,18 +17,18 @@ resource "airbyte_source_amazon_sqs" "my_source_amazonsqs" {
configuration = {
access_key = "xxxxxHRNxxx3TBxxxxxx"
attributes_to_return = "attr1,attr2"
- delete_messages = false
+ delete_messages = true
max_batch_size = 5
max_wait_time = 5
queue_url = "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue"
- region = "ap-southeast-2"
+ region = "ap-northeast-2"
secret_key = "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz"
- source_type = "amazon-sqs"
visibility_timeout = 15
}
- name = "Cathy Kirlin"
- secret_id = "...my_secret_id..."
- workspace_id = "29177dea-c646-4ecb-9734-09e3eb1e5a2b"
+ definition_id = "aa9ea927-cae7-4b29-885e-6b85628652e0"
+ name = "Emmett Labadie"
+ secret_id = "...my_secret_id..."
+ workspace_id = "21b517b1-6f1f-4884-abcd-5137451945c4"
}
```
@@ -38,11 +38,12 @@ resource "airbyte_source_amazon_sqs" "my_source_amazonsqs" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -55,19 +56,19 @@ resource "airbyte_source_amazon_sqs" "my_source_amazonsqs" {
Required:
-- `delete_messages` (Boolean) If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail.
- `queue_url` (String) URL of the SQS Queue
- `region` (String) must be one of ["us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
AWS Region of the SQS Queue
-- `source_type` (String) must be one of ["amazon-sqs"]
Optional:
-- `access_key` (String) The Access Key ID of the AWS IAM Role to use for pulling messages
+- `access_key` (String, Sensitive) The Access Key ID of the AWS IAM Role to use for pulling messages
- `attributes_to_return` (String) Comma separated list of Mesage Attribute names to return
+- `delete_messages` (Boolean) Default: false
+If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail.
- `max_batch_size` (Number) Max amount of messages to get in one batch (10 max)
- `max_wait_time` (Number) Max amount of time in seconds to wait for messages in a single poll (20 max)
-- `secret_key` (String) The Secret Key of the AWS IAM Role to use for pulling messages
+- `secret_key` (String, Sensitive) The Secret Key of the AWS IAM Role to use for pulling messages
- `visibility_timeout` (Number) Modify the Visibility Timeout of the individual message from the Queue's default (seconds).
diff --git a/docs/resources/source_amplitude.md b/docs/resources/source_amplitude.md
index afa37370c..fc2c295ad 100644
--- a/docs/resources/source_amplitude.md
+++ b/docs/resources/source_amplitude.md
@@ -17,14 +17,14 @@ resource "airbyte_source_amplitude" "my_source_amplitude" {
configuration = {
api_key = "...my_api_key..."
data_region = "Standard Server"
- request_time_range = 1
+ request_time_range = 2
secret_key = "...my_secret_key..."
- source_type = "amplitude"
start_date = "2021-01-25T00:00:00Z"
}
- name = "Robin Bednar"
- secret_id = "...my_secret_id..."
- workspace_id = "116db995-45fc-495f-a889-70e189dbb30f"
+ definition_id = "526ae8aa-3c4f-4287-913b-8668105e1180"
+ name = "Dominic Dach"
+ secret_id = "...my_secret_id..."
+ workspace_id = "75a1ca19-0e95-4bd1-982a-17eb0af63def"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_amplitude" "my_source_amplitude" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,15 +52,15 @@ resource "airbyte_source_amplitude" "my_source_amplitude" {
Required:
-- `api_key` (String) Amplitude API Key. See the setup guide for more information on how to obtain this key.
-- `secret_key` (String) Amplitude Secret Key. See the setup guide for more information on how to obtain this key.
-- `source_type` (String) must be one of ["amplitude"]
+- `api_key` (String, Sensitive) Amplitude API Key. See the setup guide for more information on how to obtain this key.
+- `secret_key` (String, Sensitive) Amplitude Secret Key. See the setup guide for more information on how to obtain this key.
- `start_date` (String) UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.
Optional:
-- `data_region` (String) must be one of ["Standard Server", "EU Residency Server"]
+- `data_region` (String) must be one of ["Standard Server", "EU Residency Server"]; Default: "Standard Server"
Amplitude data region server
-- `request_time_range` (Number) According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours.
+- `request_time_range` (Number) Default: 24
+According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours.
diff --git a/docs/resources/source_apify_dataset.md b/docs/resources/source_apify_dataset.md
index dc02b94ef..cbab9974c 100644
--- a/docs/resources/source_apify_dataset.md
+++ b/docs/resources/source_apify_dataset.md
@@ -15,14 +15,13 @@ SourceApifyDataset Resource
```terraform
resource "airbyte_source_apify_dataset" "my_source_apifydataset" {
configuration = {
- clean = true
- dataset_id = "...my_dataset_id..."
- source_type = "apify-dataset"
- token = "Personal API tokens"
+ dataset_id = "rHuMdwm6xCFt6WiGU"
+ token = "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk"
}
- name = "Dale Ferry"
- secret_id = "...my_secret_id..."
- workspace_id = "055b197c-d44e-42f5-ad82-d3513bb6f48b"
+ definition_id = "a73356f3-9bea-45e2-889f-0e8905c8543b"
+ name = "Justin Luettgen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ac7dcada-d293-48da-9765-e7880f00a30d"
}
```
@@ -32,11 +31,12 @@ resource "airbyte_source_apify_dataset" "my_source_apifydataset" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,12 +49,7 @@ resource "airbyte_source_apify_dataset" "my_source_apifydataset" {
Required:
-- `source_type` (String) must be one of ["apify-dataset"]
-- `token` (String) Your application's Client Secret. You can find this value on the console integrations tab after you login.
-
-Optional:
-
-- `clean` (Boolean) If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false.
-- `dataset_id` (String) ID of the dataset you would like to load to Airbyte.
+- `dataset_id` (String) ID of the dataset you would like to load to Airbyte. In Apify Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs for more information.
+- `token` (String, Sensitive) Personal API token of your Apify account. In Apify Console, you can find your API token in the Settings section under the Integrations tab after you login. See the Apify Docs for more information.
diff --git a/docs/resources/source_appfollow.md b/docs/resources/source_appfollow.md
index 7f6e36ef9..17d017973 100644
--- a/docs/resources/source_appfollow.md
+++ b/docs/resources/source_appfollow.md
@@ -15,12 +15,12 @@ SourceAppfollow Resource
```terraform
resource "airbyte_source_appfollow" "my_source_appfollow" {
configuration = {
- api_secret = "...my_api_secret..."
- source_type = "appfollow"
+ api_secret = "...my_api_secret..."
}
- name = "Regina Huel"
- secret_id = "...my_secret_id..."
- workspace_id = "db35ff2e-4b27-4537-a8cd-9e7319c177d5"
+ definition_id = "def9a90f-a7f8-4f44-9b58-dfc559a0bee1"
+ name = "Maurice Wilderman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "23389204-2261-4684-a73e-f602c915f597"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_appfollow" "my_source_appfollow" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -45,10 +46,6 @@ resource "airbyte_source_appfollow" "my_source_appfollow" {
### Nested Schema for `configuration`
-Required:
-
-- `source_type` (String) must be one of ["appfollow"]
-
Optional:
- `api_secret` (String) API Key provided by Appfollow
diff --git a/docs/resources/source_asana.md b/docs/resources/source_asana.md
index 63b256530..e578bf02d 100644
--- a/docs/resources/source_asana.md
+++ b/docs/resources/source_asana.md
@@ -16,18 +16,21 @@ SourceAsana Resource
resource "airbyte_source_asana" "my_source_asana" {
configuration = {
credentials = {
- source_asana_authentication_mechanism_authenticate_via_asana_oauth_ = {
+ authenticate_via_asana_oauth = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- option_title = "OAuth Credentials"
refresh_token = "...my_refresh_token..."
}
}
- source_type = "asana"
+ organization_export_ids = [
+ "{ \"see\": \"documentation\" }",
+ ]
+ test_mode = true
}
- name = "Jill Wintheiser"
- secret_id = "...my_secret_id..."
- workspace_id = "b114eeb5-2ff7-485f-8378-14d4c98e0c2b"
+ definition_id = "f5896557-ce17-4ccd-ab10-d6388d4fdfb9"
+ name = "Ms. Irvin Anderson"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c04191be-b057-4f07-8546-621bdba90354"
}
```
@@ -37,11 +40,12 @@ resource "airbyte_source_asana" "my_source_asana" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -55,71 +59,32 @@ resource "airbyte_source_asana" "my_source_asana" {
Optional:
- `credentials` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["asana"]
+- `organization_export_ids` (List of String) Globally unique identifiers for the organization exports
+- `test_mode` (Boolean) This flag is used for testing purposes for certain streams that return a lot of data. This flag is not meant to be enabled for prod.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_asana_authentication_mechanism_authenticate_via_asana_oauth` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--source_asana_authentication_mechanism_authenticate_via_asana_oauth))
-- `source_asana_authentication_mechanism_authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--source_asana_authentication_mechanism_authenticate_with_personal_access_token))
-- `source_asana_update_authentication_mechanism_authenticate_via_asana_oauth` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--source_asana_update_authentication_mechanism_authenticate_via_asana_oauth))
-- `source_asana_update_authentication_mechanism_authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--source_asana_update_authentication_mechanism_authenticate_with_personal_access_token))
+- `authenticate_via_asana_oauth` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_asana_oauth))
+- `authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Github (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_with_personal_access_token))
-
-### Nested Schema for `configuration.credentials.source_asana_authentication_mechanism_authenticate_via_asana_oauth`
+
+### Nested Schema for `configuration.credentials.authenticate_via_asana_oauth`
Required:
- `client_id` (String)
- `client_secret` (String)
-- `refresh_token` (String)
+- `refresh_token` (String, Sensitive)
-Optional:
-
-- `option_title` (String) must be one of ["OAuth Credentials"]
-OAuth Credentials
-
-
-
-### Nested Schema for `configuration.credentials.source_asana_authentication_mechanism_authenticate_with_personal_access_token`
-
-Required:
-
-- `personal_access_token` (String) Asana Personal Access Token (generate yours here).
-
-Optional:
-
-- `option_title` (String) must be one of ["PAT Credentials"]
-PAT Credentials
-
-
-
-### Nested Schema for `configuration.credentials.source_asana_update_authentication_mechanism_authenticate_via_asana_oauth`
-Required:
-
-- `client_id` (String)
-- `client_secret` (String)
-- `refresh_token` (String)
-
-Optional:
-
-- `option_title` (String) must be one of ["OAuth Credentials"]
-OAuth Credentials
-
-
-
-### Nested Schema for `configuration.credentials.source_asana_update_authentication_mechanism_authenticate_with_personal_access_token`
+
+### Nested Schema for `configuration.credentials.authenticate_with_personal_access_token`
Required:
-- `personal_access_token` (String) Asana Personal Access Token (generate yours here).
-
-Optional:
-
-- `option_title` (String) must be one of ["PAT Credentials"]
-PAT Credentials
+- `personal_access_token` (String, Sensitive) Asana Personal Access Token (generate yours here).
diff --git a/docs/resources/source_auth0.md b/docs/resources/source_auth0.md
index 52fc91890..7930c812a 100644
--- a/docs/resources/source_auth0.md
+++ b/docs/resources/source_auth0.md
@@ -17,17 +17,16 @@ resource "airbyte_source_auth0" "my_source_auth0" {
configuration = {
base_url = "https://dev-yourOrg.us.auth0.com/"
credentials = {
- source_auth0_authentication_method_o_auth2_access_token = {
+ o_auth2_access_token = {
access_token = "...my_access_token..."
- auth_type = "oauth2_access_token"
}
}
- source_type = "auth0"
- start_date = "2023-08-05T00:43:59.244Z"
+ start_date = "2023-08-05T00:43:59.244Z"
}
- name = "Willard McLaughlin"
- secret_id = "...my_secret_id..."
- workspace_id = "75dad636-c600-4503-98bb-31180f739ae9"
+ definition_id = "f51ed0a8-181e-46e5-9fd9-ebe7b2f5ca6e"
+ name = "Dallas Wiza"
+ secret_id = "...my_secret_id..."
+ workspace_id = "2b052102-08e0-436b-a68d-758466c963e1"
}
```
@@ -37,11 +36,12 @@ resource "airbyte_source_auth0" "my_source_auth0" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -56,58 +56,34 @@ Required:
- `base_url` (String) The Authentication API is served over HTTPS. All URLs referenced in the documentation have the following base `https://YOUR_DOMAIN`
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["auth0"]
Optional:
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
+- `start_date` (String) Default: "2023-08-05T00:43:59.244Z"
+UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_auth0_authentication_method_o_auth2_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_auth0_authentication_method_o_auth2_access_token))
-- `source_auth0_authentication_method_o_auth2_confidential_application` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_auth0_authentication_method_o_auth2_confidential_application))
-- `source_auth0_update_authentication_method_o_auth2_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_auth0_update_authentication_method_o_auth2_access_token))
-- `source_auth0_update_authentication_method_o_auth2_confidential_application` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_auth0_update_authentication_method_o_auth2_confidential_application))
+- `o_auth2_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth2_access_token))
+- `o_auth2_confidential_application` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth2_confidential_application))
-
-### Nested Schema for `configuration.credentials.source_auth0_authentication_method_o_auth2_access_token`
+
+### Nested Schema for `configuration.credentials.o_auth2_access_token`
Required:
-- `access_token` (String) Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.
-- `auth_type` (String) must be one of ["oauth2_access_token"]
+- `access_token` (String, Sensitive) Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.
-
-### Nested Schema for `configuration.credentials.source_auth0_authentication_method_o_auth2_confidential_application`
+
+### Nested Schema for `configuration.credentials.o_auth2_confidential_application`
Required:
- `audience` (String) The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab
-- `auth_type` (String) must be one of ["oauth2_confidential_application"]
-- `client_id` (String) Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.
-- `client_secret` (String) Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.
-
-
-
-### Nested Schema for `configuration.credentials.source_auth0_update_authentication_method_o_auth2_access_token`
-
-Required:
-
-- `access_token` (String) Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.
-- `auth_type` (String) must be one of ["oauth2_access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_auth0_update_authentication_method_o_auth2_confidential_application`
-
-Required:
-
-- `audience` (String) The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab
-- `auth_type` (String) must be one of ["oauth2_confidential_application"]
- `client_id` (String) Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.
- `client_secret` (String) Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.
diff --git a/docs/resources/source_aws_cloudtrail.md b/docs/resources/source_aws_cloudtrail.md
index ae5eda7dc..1f0eb17d8 100644
--- a/docs/resources/source_aws_cloudtrail.md
+++ b/docs/resources/source_aws_cloudtrail.md
@@ -18,12 +18,12 @@ resource "airbyte_source_aws_cloudtrail" "my_source_awscloudtrail" {
aws_key_id = "...my_aws_key_id..."
aws_region_name = "...my_aws_region_name..."
aws_secret_key = "...my_aws_secret_key..."
- source_type = "aws-cloudtrail"
start_date = "2021-01-01"
}
- name = "Nellie Waters"
- secret_id = "...my_secret_id..."
- workspace_id = "09e28103-31f3-4981-94c7-00b607f3c93c"
+ definition_id = "1b394b84-acdf-48db-aa4f-7e23711b260f"
+ name = "Janis Erdman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "1edcb36c-da3d-451c-bc15-623ec6453ce6"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_aws_cloudtrail" "my_source_awscloudtrail" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,10 +51,13 @@ resource "airbyte_source_aws_cloudtrail" "my_source_awscloudtrail" {
Required:
-- `aws_key_id` (String) AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.
+- `aws_key_id` (String, Sensitive) AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.
- `aws_region_name` (String) The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name.
-- `aws_secret_key` (String) AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.
-- `source_type` (String) must be one of ["aws-cloudtrail"]
-- `start_date` (String) The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD.
+- `aws_secret_key` (String, Sensitive) AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.
+
+Optional:
+
+- `start_date` (String) Default: "1970-01-01"
+The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD.
diff --git a/docs/resources/source_azure_blob_storage.md b/docs/resources/source_azure_blob_storage.md
index a6896cfbb..8462aa41b 100644
--- a/docs/resources/source_azure_blob_storage.md
+++ b/docs/resources/source_azure_blob_storage.md
@@ -15,22 +15,35 @@ SourceAzureBlobStorage Resource
```terraform
resource "airbyte_source_azure_blob_storage" "my_source_azureblobstorage" {
configuration = {
- azure_blob_storage_account_key = "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="
- azure_blob_storage_account_name = "airbyte5storage"
- azure_blob_storage_blobs_prefix = "FolderA/FolderB/"
- azure_blob_storage_container_name = "airbytetescontainername"
- azure_blob_storage_endpoint = "blob.core.windows.net"
- azure_blob_storage_schema_inference_limit = 500
- format = {
- source_azure_blob_storage_input_format_json_lines_newline_delimited_json = {
- format_type = "JSONL"
- }
- }
- source_type = "azure-blob-storage"
+ azure_blob_storage_account_key = "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="
+ azure_blob_storage_account_name = "airbyte5storage"
+ azure_blob_storage_container_name = "airbytetescontainername"
+ azure_blob_storage_endpoint = "blob.core.windows.net"
+ start_date = "2021-01-01T00:00:00.000000Z"
+ streams = [
+ {
+ days_to_sync_if_history_is_full = 8
+ format = {
+ avro_format = {
+ double_as_string = true
+ }
+ }
+ globs = [
+ "...",
+ ]
+ input_schema = "...my_input_schema..."
+ legacy_prefix = "...my_legacy_prefix..."
+ name = "Angelina Armstrong"
+ primary_key = "...my_primary_key..."
+ schemaless = true
+ validation_policy = "Wait for Discover"
+ },
+ ]
}
- name = "Patty Mraz"
- secret_id = "...my_secret_id..."
- workspace_id = "3f2ceda7-e23f-4225-b411-faf4b7544e47"
+ definition_id = "e16b8da7-b814-43f8-91cf-99c7fd70e504"
+ name = "Joy Sipes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4f64874e-62c5-48d8-b92f-d48887cb19c4"
}
```
@@ -39,12 +52,14 @@ resource "airbyte_source_azure_blob_storage" "my_source_azureblobstorage" {
### Required
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `configuration` (Attributes) NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
+because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK. (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,39 +72,130 @@ resource "airbyte_source_azure_blob_storage" "my_source_azureblobstorage" {
Required:
-- `azure_blob_storage_account_key` (String) The Azure blob storage account key.
+- `azure_blob_storage_account_key` (String, Sensitive) The Azure blob storage account key.
- `azure_blob_storage_account_name` (String) The account's name of the Azure Blob Storage.
- `azure_blob_storage_container_name` (String) The name of the Azure blob storage container.
-- `format` (Attributes) Input data format (see [below for nested schema](#nestedatt--configuration--format))
-- `source_type` (String) must be one of ["azure-blob-storage"]
+- `streams` (Attributes List) Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. (see [below for nested schema](#nestedatt--configuration--streams))
Optional:
-- `azure_blob_storage_blobs_prefix` (String) The Azure blob storage prefix to be applied
- `azure_blob_storage_endpoint` (String) This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
-- `azure_blob_storage_schema_inference_limit` (Number) The Azure blob storage blobs to scan for inferring the schema, useful on large amounts of data with consistent structure
+- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
-
-### Nested Schema for `configuration.format`
+
+### Nested Schema for `configuration.streams`
+
+Required:
+
+- `format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format))
+- `name` (String) The name of the stream.
Optional:
-- `source_azure_blob_storage_input_format_json_lines_newline_delimited_json` (Attributes) Input data format (see [below for nested schema](#nestedatt--configuration--format--source_azure_blob_storage_input_format_json_lines_newline_delimited_json))
-- `source_azure_blob_storage_update_input_format_json_lines_newline_delimited_json` (Attributes) Input data format (see [below for nested schema](#nestedatt--configuration--format--source_azure_blob_storage_update_input_format_json_lines_newline_delimited_json))
+- `days_to_sync_if_history_is_full` (Number) Default: 3
+When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
+- `globs` (List of String) The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
+- `input_schema` (String) The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
+- `legacy_prefix` (String) The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
+- `primary_key` (String, Sensitive) The column or columns (for a composite key) that serves as the unique identifier of a record.
+- `schemaless` (Boolean) Default: false
+When enabled, syncs will not validate or structure records against the stream's schema.
+- `validation_policy` (String) must be one of ["Emit Record", "Skip Record", "Wait for Discover"]; Default: "Emit Record"
+The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
-
-### Nested Schema for `configuration.format.source_azure_blob_storage_input_format_json_lines_newline_delimited_json`
+
+### Nested Schema for `configuration.streams.format`
-Required:
+Optional:
+
+- `avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--avro_format))
+- `csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--csv_format))
+- `document_file_type_format_experimental` (Attributes) Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file. (see [below for nested schema](#nestedatt--configuration--streams--format--document_file_type_format_experimental))
+- `jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--jsonl_format))
+- `parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format))
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+Optional:
+
+- `double_as_string` (Boolean) Default: false
+Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+Optional:
+
+- `delimiter` (String) Default: ","
+The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+- `double_quote` (Boolean) Default: true
+Whether two quotes in a quoted CSV value denote a single quote in the data.
+- `encoding` (String) Default: "utf8"
+The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
+- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values.
+- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition))
+- `inference_type` (String) must be one of ["None", "Primitive Types Only"]; Default: "None"
+How to infer the types of the columns. If none, inference default to strings.
+- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
+- `quote_char` (String) Default: "\""
+The character used for quoting CSV values. To disallow quoting, make this field blank.
+- `skip_rows_after_header` (Number) Default: 0
+The number of rows to skip after the header row.
+- `skip_rows_before_header` (Number) Default: 0
+The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+- `strings_can_be_null` (Boolean) Default: true
+Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values.
+
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition`
+
+Optional:
-- `format_type` (String) must be one of ["JSONL"]
+- `autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition--autogenerated))
+- `from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition--from_csv))
+- `user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition--user_provided))
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition.user_provided`
-
-### Nested Schema for `configuration.format.source_azure_blob_storage_update_input_format_json_lines_newline_delimited_json`
+
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition.user_provided`
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition.user_provided`
Required:
-- `format_type` (String) must be one of ["JSONL"]
+- `column_names` (List of String) The column names that will be used while emitting the CSV records
+
+
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+Optional:
+
+- `skip_unprocessable_file_types` (Boolean) Default: true
+If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+Optional:
+
+- `decimal_as_float` (Boolean) Default: false
+Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
diff --git a/docs/resources/source_azure_table.md b/docs/resources/source_azure_table.md
index 608c2aab8..e6077b806 100644
--- a/docs/resources/source_azure_table.md
+++ b/docs/resources/source_azure_table.md
@@ -15,14 +15,14 @@ SourceAzureTable Resource
```terraform
resource "airbyte_source_azure_table" "my_source_azuretable" {
configuration = {
- source_type = "azure-table"
storage_access_key = "...my_storage_access_key..."
storage_account_name = "...my_storage_account_name..."
- storage_endpoint_suffix = "core.windows.net"
+ storage_endpoint_suffix = "core.chinacloudapi.cn"
}
- name = "Ian Baumbach"
- secret_id = "...my_secret_id..."
- workspace_id = "57a5b404-63a7-4d57-9f14-00e764ad7334"
+ definition_id = "ec8b4573-d66d-4007-a52a-2e4396e7403e"
+ name = "Adam Stracke V"
+ secret_id = "...my_secret_id..."
+ workspace_id = "59a4fa50-e807-4c86-bd0c-bf5314eea0fa"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_azure_table" "my_source_azuretable" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,12 +50,12 @@ resource "airbyte_source_azure_table" "my_source_azuretable" {
Required:
-- `source_type` (String) must be one of ["azure-table"]
-- `storage_access_key` (String) Azure Table Storage Access Key. See the docs for more information on how to obtain this key.
+- `storage_access_key` (String, Sensitive) Azure Table Storage Access Key. See the docs for more information on how to obtain this key.
- `storage_account_name` (String) The name of your storage account.
Optional:
-- `storage_endpoint_suffix` (String) Azure Table Storage service account URL suffix. See the docs for more information on how to obtain endpoint suffix
+- `storage_endpoint_suffix` (String) Default: "core.windows.net"
+Azure Table Storage service account URL suffix. See the docs for more information on how to obtain endpoint suffix
diff --git a/docs/resources/source_bamboo_hr.md b/docs/resources/source_bamboo_hr.md
index 7e8bce413..382290a09 100644
--- a/docs/resources/source_bamboo_hr.md
+++ b/docs/resources/source_bamboo_hr.md
@@ -18,12 +18,12 @@ resource "airbyte_source_bamboo_hr" "my_source_bamboohr" {
api_key = "...my_api_key..."
custom_reports_fields = "...my_custom_reports_fields..."
custom_reports_include_default_fields = true
- source_type = "bamboo-hr"
subdomain = "...my_subdomain..."
}
- name = "Ralph Rau"
- secret_id = "...my_secret_id..."
- workspace_id = "1b36a080-88d1-400e-bada-200ef0422eb2"
+ definition_id = "1aa37367-271c-478a-9aa9-603df323c7d7"
+ name = "Joel Harber"
+ secret_id = "...my_secret_id..."
+ workspace_id = "f8882a19-738b-4218-b704-94da21b79cfd"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_bamboo_hr" "my_source_bamboohr" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,13 +51,14 @@ resource "airbyte_source_bamboo_hr" "my_source_bamboohr" {
Required:
-- `api_key` (String) Api key of bamboo hr
-- `source_type` (String) must be one of ["bamboo-hr"]
+- `api_key` (String, Sensitive) Api key of bamboo hr
- `subdomain` (String) Sub Domain of bamboo hr
Optional:
-- `custom_reports_fields` (String) Comma-separated list of fields to include in custom reports.
-- `custom_reports_include_default_fields` (Boolean) If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.
+- `custom_reports_fields` (String) Default: ""
+Comma-separated list of fields to include in custom reports.
+- `custom_reports_include_default_fields` (Boolean) Default: true
+If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.
diff --git a/docs/resources/source_bigcommerce.md b/docs/resources/source_bigcommerce.md
deleted file mode 100644
index e850994f9..000000000
--- a/docs/resources/source_bigcommerce.md
+++ /dev/null
@@ -1,57 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_bigcommerce Resource - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceBigcommerce Resource
----
-
-# airbyte_source_bigcommerce (Resource)
-
-SourceBigcommerce Resource
-
-## Example Usage
-
-```terraform
-resource "airbyte_source_bigcommerce" "my_source_bigcommerce" {
- configuration = {
- access_token = "...my_access_token..."
- source_type = "bigcommerce"
- start_date = "2021-01-01"
- store_hash = "...my_store_hash..."
- }
- name = "Beth Gleason"
- secret_id = "...my_secret_id..."
- workspace_id = "9ab8366c-723f-4fda-9e06-bee4825c1fc0"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `source_id` (String)
-- `source_type` (String)
-
-
-### Nested Schema for `configuration`
-
-Required:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `source_type` (String) must be one of ["bigcommerce"]
-- `start_date` (String) The date you would like to replicate data. Format: YYYY-MM-DD.
-- `store_hash` (String) The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/, The store's hash code is 'HASH_CODE'.
-
-
diff --git a/docs/resources/source_bigquery.md b/docs/resources/source_bigquery.md
index 5d4224fa6..277efd84a 100644
--- a/docs/resources/source_bigquery.md
+++ b/docs/resources/source_bigquery.md
@@ -18,11 +18,11 @@ resource "airbyte_source_bigquery" "my_source_bigquery" {
credentials_json = "...my_credentials_json..."
dataset_id = "...my_dataset_id..."
project_id = "...my_project_id..."
- source_type = "bigquery"
}
- name = "Joe Bradtke"
- secret_id = "...my_secret_id..."
- workspace_id = "80bff918-544e-4c42-9efc-ce8f1977773e"
+ definition_id = "9baf3821-deb7-4264-9ad9-e5fb53126691"
+ name = "Darrin Rogahn"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b9ea24da-51fb-473f-872f-2e8bbfe18227"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_bigquery" "my_source_bigquery" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,7 +52,6 @@ Required:
- `credentials_json` (String) The contents of your Service Account Key JSON file. See the docs for more information on how to obtain this key.
- `project_id` (String) The GCP project ID for the project containing the target BigQuery dataset.
-- `source_type` (String) must be one of ["bigquery"]
Optional:
diff --git a/docs/resources/source_bing_ads.md b/docs/resources/source_bing_ads.md
index c209d01ae..00fbc9cec 100644
--- a/docs/resources/source_bing_ads.md
+++ b/docs/resources/source_bing_ads.md
@@ -15,19 +15,28 @@ SourceBingAds Resource
```terraform
resource "airbyte_source_bing_ads" "my_source_bingads" {
configuration = {
- auth_method = "oauth2.0"
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ custom_reports = [
+ {
+ name = "AdDynamicTextPerformanceReport"
+ report_aggregation = "...my_report_aggregation..."
+ report_columns = [
+ "...",
+ ]
+ reporting_object = "ShareOfVoiceReportRequest"
+ },
+ ]
developer_token = "...my_developer_token..."
- lookback_window = 4
+ lookback_window = 3
refresh_token = "...my_refresh_token..."
- reports_start_date = "2022-08-23"
- source_type = "bing-ads"
+ reports_start_date = "2022-08-17"
tenant_id = "...my_tenant_id..."
}
- name = "Kathryn Nitzsche"
- secret_id = "...my_secret_id..."
- workspace_id = "408f05e3-d48f-4daf-b13a-1f5fd94259c0"
+ definition_id = "f49be625-99f1-47b5-861c-8d2f7dd6ee9c"
+ name = "Delia Kub Sr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "90282195-430f-4896-8a32-1f431fb3aad0"
}
```
@@ -37,11 +46,12 @@ resource "airbyte_source_bing_ads" "my_source_bingads" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -55,16 +65,33 @@ resource "airbyte_source_bing_ads" "my_source_bingads" {
Required:
- `client_id` (String) The Client ID of your Microsoft Advertising developer application.
-- `developer_token` (String) Developer token associated with user. See more info in the docs.
-- `refresh_token` (String) Refresh Token to renew the expired Access Token.
-- `reports_start_date` (String) The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format.
-- `source_type` (String) must be one of ["bing-ads"]
+- `developer_token` (String, Sensitive) Developer token associated with user. See more info in the docs.
+- `refresh_token` (String, Sensitive) Refresh Token to renew the expired Access Token.
Optional:
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_secret` (String) The Client Secret of your Microsoft Advertising developer application.
-- `lookback_window` (Number) Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode.
-- `tenant_id` (String) The Tenant ID of your Microsoft Advertising developer application. Set this to "common" unless you know you need a different value.
+- `client_secret` (String) Default: ""
+The Client Secret of your Microsoft Advertising developer application.
+- `custom_reports` (Attributes List) You can add your Custom Bing Ads report by creating one. (see [below for nested schema](#nestedatt--configuration--custom_reports))
+- `lookback_window` (Number) Default: 0
+Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode without specified Reports Start Date.
+- `reports_start_date` (String) The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format. If not set, data from previous and current calendar year will be replicated.
+- `tenant_id` (String) Default: "common"
+The Tenant ID of your Microsoft Advertising developer application. Set this to "common" unless you know you need a different value.
+
+
+### Nested Schema for `configuration.custom_reports`
+
+Required:
+
+- `name` (String) The name of the custom report, this name would be used as stream name
+- `report_columns` (List of String) A list of available report object columns. You can find it in description of reporting object that you want to add to custom report.
+- `reporting_object` (String) must be one of ["AccountPerformanceReportRequest", "AdDynamicTextPerformanceReportRequest", "AdExtensionByAdReportRequest", "AdExtensionByKeywordReportRequest", "AdExtensionDetailReportRequest", "AdGroupPerformanceReportRequest", "AdPerformanceReportRequest", "AgeGenderAudienceReportRequest", "AudiencePerformanceReportRequest", "CallDetailReportRequest", "CampaignPerformanceReportRequest", "ConversionPerformanceReportRequest", "DestinationUrlPerformanceReportRequest", "DSAAutoTargetPerformanceReportRequest", "DSACategoryPerformanceReportRequest", "DSASearchQueryPerformanceReportRequest", "GeographicPerformanceReportRequest", "GoalsAndFunnelsReportRequest", "HotelDimensionPerformanceReportRequest", "HotelGroupPerformanceReportRequest", "KeywordPerformanceReportRequest", "NegativeKeywordConflictReportRequest", "ProductDimensionPerformanceReportRequest", "ProductMatchCountReportRequest", "ProductNegativeKeywordConflictReportRequest", "ProductPartitionPerformanceReportRequest", "ProductPartitionUnitPerformanceReportRequest", "ProductSearchQueryPerformanceReportRequest", "ProfessionalDemographicsAudienceReportRequest", "PublisherUsagePerformanceReportRequest", "SearchCampaignChangeHistoryReportRequest", "SearchQueryPerformanceReportRequest", "ShareOfVoiceReportRequest", "UserLocationPerformanceReportRequest"]
+The name of the the object derives from the ReportRequest object. You can find it in Bing Ads Api docs - Reporting API - Reporting Data Objects.
+
+Optional:
+
+- `report_aggregation` (String) Default: "[Hourly]"
+A list of available aggregations.
diff --git a/docs/resources/source_braintree.md b/docs/resources/source_braintree.md
index e4b899ddb..e6c557362 100644
--- a/docs/resources/source_braintree.md
+++ b/docs/resources/source_braintree.md
@@ -15,16 +15,16 @@ SourceBraintree Resource
```terraform
resource "airbyte_source_braintree" "my_source_braintree" {
configuration = {
- environment = "Development"
+ environment = "Qa"
merchant_id = "...my_merchant_id..."
private_key = "...my_private_key..."
public_key = "...my_public_key..."
- source_type = "braintree"
start_date = "2020-12-30"
}
- name = "Henrietta Nienow"
- secret_id = "...my_secret_id..."
- workspace_id = "4f3b756c-11f6-4c37-a512-6243835bbc05"
+ definition_id = "12fcb5a7-fdd8-454e-8c39-c22fe17df57a"
+ name = "Ms. Tommie Bins"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5ff7f1a2-7e8f-4d2f-993d-4f9ab29a2f83"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_braintree" "my_source_braintree" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -54,9 +55,8 @@ Required:
- `environment` (String) must be one of ["Development", "Sandbox", "Qa", "Production"]
Environment specifies where the data will come from.
- `merchant_id` (String) The unique identifier for your entire gateway account. See the docs for more information on how to obtain this ID.
-- `private_key` (String) Braintree Private Key. See the docs for more information on how to obtain this key.
-- `public_key` (String) Braintree Public Key. See the docs for more information on how to obtain this key.
-- `source_type` (String) must be one of ["braintree"]
+- `private_key` (String, Sensitive) Braintree Private Key. See the docs for more information on how to obtain this key.
+- `public_key` (String, Sensitive) Braintree Public Key. See the docs for more information on how to obtain this key.
Optional:
diff --git a/docs/resources/source_braze.md b/docs/resources/source_braze.md
index 9b54f5633..4af3aff5d 100644
--- a/docs/resources/source_braze.md
+++ b/docs/resources/source_braze.md
@@ -15,14 +15,14 @@ SourceBraze Resource
```terraform
resource "airbyte_source_braze" "my_source_braze" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "braze"
- start_date = "2022-09-06"
- url = "...my_url..."
+ api_key = "...my_api_key..."
+ start_date = "2022-07-08"
+ url = "...my_url..."
}
- name = "Rosie Glover"
- secret_id = "...my_secret_id..."
- workspace_id = "efc5fde1-0a0c-4e21-a9e5-10019c6dc5e3"
+ definition_id = "dec4e3ea-b02c-4cb9-8852-3df16a0cc499"
+ name = "Margarita Leuschke"
+ secret_id = "...my_secret_id..."
+ workspace_id = "682b0a70-74f0-416f-b212-7f33f8652b25"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_braze" "my_source_braze" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,8 +50,7 @@ resource "airbyte_source_braze" "my_source_braze" {
Required:
-- `api_key` (String) Braze REST API key
-- `source_type` (String) must be one of ["braze"]
+- `api_key` (String, Sensitive) Braze REST API key
- `start_date` (String) Rows after this date will be synced
- `url` (String) Braze REST API endpoint
diff --git a/docs/resources/source_cart.md b/docs/resources/source_cart.md
new file mode 100644
index 000000000..063acf385
--- /dev/null
+++ b/docs/resources/source_cart.md
@@ -0,0 +1,90 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_cart Resource - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceCart Resource
+---
+
+# airbyte_source_cart (Resource)
+
+SourceCart Resource
+
+## Example Usage
+
+```terraform
+resource "airbyte_source_cart" "my_source_cart" {
+ configuration = {
+ credentials = {
+ central_api_router = {
+ site_id = "...my_site_id..."
+ user_name = "Ethyl.Bosco18"
+ user_secret = "...my_user_secret..."
+ }
+ }
+ start_date = "2021-01-01T00:00:00Z"
+ }
+ definition_id = "3ec1224a-7ffb-4268-9c18-7087d37ac99f"
+ name = "Jamie Macejkovic III"
+ secret_id = "...my_secret_id..."
+ workspace_id = "12305e0c-1f4b-465d-9ebd-757e5946981c"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the source e.g. dev-mysql-instance.
+- `workspace_id` (String)
+
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
+
+### Read-Only
+
+- `source_id` (String)
+- `source_type` (String)
+
+
+### Nested Schema for `configuration`
+
+Required:
+
+- `start_date` (String) The date from which you'd like to replicate the data
+
+Optional:
+
+- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
+
+
+### Nested Schema for `configuration.credentials`
+
+Optional:
+
+- `central_api_router` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--central_api_router))
+- `single_store_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--single_store_access_token))
+
+
+### Nested Schema for `configuration.credentials.central_api_router`
+
+Required:
+
+- `site_id` (String) You can determine a site provisioning site Id by hitting https://site.com/store/sitemonitor.aspx and reading the response param PSID
+- `user_name` (String) Enter your application's User Name
+- `user_secret` (String) Enter your application's User Secret
+
+
+
+### Nested Schema for `configuration.credentials.single_store_access_token`
+
+Required:
+
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
+- `store_name` (String) The name of Cart.com Online Store. All API URLs start with https://[mystorename.com]/api/v1/, where [mystorename.com] is the domain name of your store.
+
+
diff --git a/docs/resources/source_chargebee.md b/docs/resources/source_chargebee.md
index 441d8aec3..cc5354deb 100644
--- a/docs/resources/source_chargebee.md
+++ b/docs/resources/source_chargebee.md
@@ -15,15 +15,15 @@ SourceChargebee Resource
```terraform
resource "airbyte_source_chargebee" "my_source_chargebee" {
configuration = {
- product_catalog = "1.0"
+ product_catalog = "2.0"
site = "airbyte-test"
site_api_key = "...my_site_api_key..."
- source_type = "chargebee"
start_date = "2021-01-25T00:00:00Z"
}
- name = "Viola Morissette"
- secret_id = "...my_secret_id..."
- workspace_id = "fbbe6949-fb2b-4b4e-8ae6-c3d5db3adebd"
+ definition_id = "08691686-308e-4adb-b3c3-69be0c12ece5"
+ name = "Jean Mann"
+ secret_id = "...my_secret_id..."
+ workspace_id = "aef8e474-9058-48d0-a293-9574a681eea7"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_chargebee" "my_source_chargebee" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,8 +54,7 @@ Required:
- `product_catalog` (String) must be one of ["1.0", "2.0"]
Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under `API Version` section.
- `site` (String) The site prefix for your Chargebee instance.
-- `site_api_key` (String) Chargebee API Key. See the docs for more information on how to obtain this key.
-- `source_type` (String) must be one of ["chargebee"]
+- `site_api_key` (String, Sensitive) Chargebee API Key. See the docs for more information on how to obtain this key.
- `start_date` (String) UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.
diff --git a/docs/resources/source_chartmogul.md b/docs/resources/source_chartmogul.md
index eae9586b4..88ecc29c4 100644
--- a/docs/resources/source_chartmogul.md
+++ b/docs/resources/source_chartmogul.md
@@ -15,14 +15,13 @@ SourceChartmogul Resource
```terraform
resource "airbyte_source_chartmogul" "my_source_chartmogul" {
configuration = {
- api_key = "...my_api_key..."
- interval = "week"
- source_type = "chartmogul"
- start_date = "2017-01-25T00:00:00Z"
+ api_key = "...my_api_key..."
+ start_date = "2017-01-25T00:00:00Z"
}
- name = "Neal Gorczany"
- secret_id = "...my_secret_id..."
- workspace_id = "06a8aa94-c026-444c-b5e9-d9a4578adc1a"
+ definition_id = "87a1fb18-7d33-4223-980b-b99362d2f459"
+ name = "Monica Pagac"
+ secret_id = "...my_secret_id..."
+ workspace_id = "bc3680ab-b376-4bce-a6a7-c0ce20da3e9a"
}
```
@@ -32,11 +31,12 @@ resource "airbyte_source_chartmogul" "my_source_chartmogul" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,10 +49,7 @@ resource "airbyte_source_chartmogul" "my_source_chartmogul" {
Required:
-- `api_key` (String) Your Chartmogul API key. See the docs for info on how to obtain this.
-- `interval` (String) must be one of ["day", "week", "month", "quarter"]
-Some APIs such as Metrics require intervals to cluster data.
-- `source_type` (String) must be one of ["chartmogul"]
+- `api_key` (String, Sensitive) Your Chartmogul API key. See the docs for info on how to obtain this.
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated.
diff --git a/docs/resources/source_clickhouse.md b/docs/resources/source_clickhouse.md
index 0752f6a3e..b8f07ea23 100644
--- a/docs/resources/source_clickhouse.md
+++ b/docs/resources/source_clickhouse.md
@@ -15,21 +15,19 @@ SourceClickhouse Resource
```terraform
resource "airbyte_source_clickhouse" "my_source_clickhouse" {
configuration = {
- database = "default"
- host = "...my_host..."
- password = "...my_password..."
- port = 8123
- source_type = "clickhouse"
+ database = "default"
+ host = "...my_host..."
+ password = "...my_password..."
+ port = 8123
tunnel_method = {
- source_clickhouse_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_clickhouse_no_tunnel = {}
}
- username = "Gerry81"
+ username = "Maximus28"
}
- name = "Mr. Simon Altenwerth"
- secret_id = "...my_secret_id..."
- workspace_id = "c802e2ec-09ff-48f0-b816-ff3477c13e90"
+ definition_id = "54cb2418-93e1-4da4-ac4f-685d205011b8"
+ name = "Milton Crooks"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3b757391-0861-48e9-9445-d83c494a849c"
}
```
@@ -39,11 +37,12 @@ resource "airbyte_source_clickhouse" "my_source_clickhouse" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,13 +57,13 @@ Required:
- `database` (String) The name of the database.
- `host` (String) The host endpoint of the Clickhouse cluster.
-- `port` (Number) The port of the database.
-- `source_type` (String) must be one of ["clickhouse"]
- `username` (String) The username which is used to access the database.
Optional:
-- `password` (String) The password associated with this username.
+- `password` (String, Sensitive) The password associated with this username.
+- `port` (Number) Default: 8123
+The port of the database.
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
@@ -72,80 +71,41 @@ Optional:
Optional:
-- `source_clickhouse_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_ssh_tunnel_method_no_tunnel))
-- `source_clickhouse_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_ssh_tunnel_method_password_authentication))
-- `source_clickhouse_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_ssh_tunnel_method_ssh_key_authentication))
-- `source_clickhouse_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_update_ssh_tunnel_method_no_tunnel))
-- `source_clickhouse_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_update_ssh_tunnel_method_password_authentication))
-- `source_clickhouse_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_clickhouse_update_ssh_tunnel_method_ssh_key_authentication))
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_ssh_tunnel_method_no_tunnel`
-
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_update_ssh_tunnel_method_no_tunnel`
-
-Required:
+Optional:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_clickhouse_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/source_clickup_api.md b/docs/resources/source_clickup_api.md
index 3fe26b280..2bc07698e 100644
--- a/docs/resources/source_clickup_api.md
+++ b/docs/resources/source_clickup_api.md
@@ -19,13 +19,13 @@ resource "airbyte_source_clickup_api" "my_source_clickupapi" {
folder_id = "...my_folder_id..."
include_closed_tasks = true
list_id = "...my_list_id..."
- source_type = "clickup-api"
space_id = "...my_space_id..."
team_id = "...my_team_id..."
}
- name = "Mr. Jack Gottlieb"
- secret_id = "...my_secret_id..."
- workspace_id = "b0960a66-8151-4a47-aaf9-23c5949f83f3"
+ definition_id = "517f0e32-c2e3-402e-ade9-2b3e43098446"
+ name = "Freddie Little"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e6422d15-b828-4621-a877-d2e625cdd80b"
}
```
@@ -35,11 +35,12 @@ resource "airbyte_source_clickup_api" "my_source_clickupapi" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -52,13 +53,13 @@ resource "airbyte_source_clickup_api" "my_source_clickupapi" {
Required:
-- `api_token` (String) Every ClickUp API call required authentication. This field is your personal API token. See here.
-- `source_type` (String) must be one of ["clickup-api"]
+- `api_token` (String, Sensitive) Every ClickUp API call required authentication. This field is your personal API token. See here.
Optional:
- `folder_id` (String) The ID of your folder in your space. Retrieve it from the `/space/{space_id}/folder` of the ClickUp API. See here.
-- `include_closed_tasks` (Boolean) Include or exclude closed tasks. By default, they are excluded. See here.
+- `include_closed_tasks` (Boolean) Default: false
+Include or exclude closed tasks. By default, they are excluded. See here.
- `list_id` (String) The ID of your list in your folder. Retrieve it from the `/folder/{folder_id}/list` of the ClickUp API. See here.
- `space_id` (String) The ID of your space in your workspace. Retrieve it from the `/team/{team_id}/space` of the ClickUp API. See here.
- `team_id` (String) The ID of your team in ClickUp. Retrieve it from the `/team` of the ClickUp API. See here.
diff --git a/docs/resources/source_clockify.md b/docs/resources/source_clockify.md
index 113eb7b03..59cfafc53 100644
--- a/docs/resources/source_clockify.md
+++ b/docs/resources/source_clockify.md
@@ -17,12 +17,12 @@ resource "airbyte_source_clockify" "my_source_clockify" {
configuration = {
api_key = "...my_api_key..."
api_url = "...my_api_url..."
- source_type = "clockify"
workspace_id = "...my_workspace_id..."
}
- name = "Angela Schaefer"
- secret_id = "...my_secret_id..."
- workspace_id = "76ffb901-c6ec-4bb4-a243-cf789ffafeda"
+ definition_id = "a5ff53c6-fc10-4ca6-ba82-7c3d349f444d"
+ name = "Julius Lockman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9d8494dc-faea-4550-8380-1e9f446900c8"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_clockify" "my_source_clockify" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,12 +50,12 @@ resource "airbyte_source_clockify" "my_source_clockify" {
Required:
-- `api_key` (String) You can get your api access_key here This API is Case Sensitive.
-- `source_type` (String) must be one of ["clockify"]
+- `api_key` (String, Sensitive) You can get your api access_key here This API is Case Sensitive.
- `workspace_id` (String) WorkSpace Id
Optional:
-- `api_url` (String) The URL for the Clockify API. This should only need to be modified if connecting to an enterprise version of Clockify.
+- `api_url` (String) Default: "https://api.clockify.me"
+The URL for the Clockify API. This should only need to be modified if connecting to an enterprise version of Clockify.
diff --git a/docs/resources/source_close_com.md b/docs/resources/source_close_com.md
index b28cb6c3a..6b9611ad8 100644
--- a/docs/resources/source_close_com.md
+++ b/docs/resources/source_close_com.md
@@ -15,13 +15,13 @@ SourceCloseCom Resource
```terraform
resource "airbyte_source_close_com" "my_source_closecom" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "close-com"
- start_date = "2021-01-01"
+ api_key = "...my_api_key..."
+ start_date = "2021-01-01"
}
- name = "Ronnie Nikolaus"
- secret_id = "...my_secret_id..."
- workspace_id = "e0ac184c-2b9c-4247-8883-73a40e1942f3"
+ definition_id = "ba7b45cf-ea08-4abd-9a32-8f6c373e0666"
+ name = "Miss Eva Collier"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a3ab4d44-755b-4910-a5c9-99e89cbd0e8f"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_close_com" "my_source_closecom" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,11 +49,11 @@ resource "airbyte_source_close_com" "my_source_closecom" {
Required:
-- `api_key` (String) Close.com API key (usually starts with 'api_'; find yours here).
-- `source_type` (String) must be one of ["close-com"]
+- `api_key` (String, Sensitive) Close.com API key (usually starts with 'api_'; find yours here).
Optional:
-- `start_date` (String) The start date to sync data; all data after this date will be replicated. Leave blank to retrieve all the data available in the account. Format: YYYY-MM-DD.
+- `start_date` (String) Default: "2021-01-01"
+The start date to sync data; all data after this date will be replicated. Leave blank to retrieve all the data available in the account. Format: YYYY-MM-DD.
diff --git a/docs/resources/source_coda.md b/docs/resources/source_coda.md
index cc909b5dd..242e36d48 100644
--- a/docs/resources/source_coda.md
+++ b/docs/resources/source_coda.md
@@ -15,12 +15,12 @@ SourceCoda Resource
```terraform
resource "airbyte_source_coda" "my_source_coda" {
configuration = {
- auth_token = "...my_auth_token..."
- source_type = "coda"
+ auth_token = "...my_auth_token..."
}
- name = "Lila Harris II"
- secret_id = "...my_secret_id..."
- workspace_id = "5756f5d5-6d0b-4d0a-b2df-e13db4f62cba"
+ definition_id = "2a37cc1f-bec8-483d-a2fe-cd2cab29e0bc"
+ name = "Lisa Barrows"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3cc123e8-783d-450d-8d2b-80c50dc344f6"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_coda" "my_source_coda" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_coda" "my_source_coda" {
Required:
-- `auth_token` (String) Bearer token
-- `source_type` (String) must be one of ["coda"]
+- `auth_token` (String, Sensitive) Bearer token
diff --git a/docs/resources/source_coin_api.md b/docs/resources/source_coin_api.md
index 3bd2f51bf..6014811bc 100644
--- a/docs/resources/source_coin_api.md
+++ b/docs/resources/source_coin_api.md
@@ -18,15 +18,15 @@ resource "airbyte_source_coin_api" "my_source_coinapi" {
api_key = "...my_api_key..."
end_date = "2019-01-01T00:00:00"
environment = "sandbox"
- limit = 10
+ limit = 8
period = "2MTH"
- source_type = "coin-api"
start_date = "2019-01-01T00:00:00"
symbol_id = "...my_symbol_id..."
}
- name = "Francis Boyle"
- secret_id = "...my_secret_id..."
- workspace_id = "bc0b80a6-924d-43b2-acfc-c8f895010f5d"
+ definition_id = "f0e9a05e-994a-4ce4-9dc5-b42f2a228e88"
+ name = "Rhonda Kunze"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d4275060-42c1-4c65-a61b-2485a060238e"
}
```
@@ -36,11 +36,12 @@ resource "airbyte_source_coin_api" "my_source_coinapi" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,11 +54,8 @@ resource "airbyte_source_coin_api" "my_source_coinapi" {
Required:
-- `api_key` (String) API Key
-- `environment` (String) must be one of ["sandbox", "production"]
-The environment to use. Either sandbox or production.
+- `api_key` (String, Sensitive) API Key
- `period` (String) The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get
-- `source_type` (String) must be one of ["coin-api"]
- `start_date` (String) The start date in ISO 8601 format.
- `symbol_id` (String) The symbol ID to use. See the documentation for a list.
https://docs.coinapi.io/#list-all-symbols-get
@@ -67,7 +65,10 @@ Optional:
- `end_date` (String) The end date in ISO 8601 format. If not supplied, data will be returned
from the start date to the current time, or when the count of result
elements reaches its limit.
-- `limit` (Number) The maximum number of elements to return. If not supplied, the default
+- `environment` (String) must be one of ["sandbox", "production"]; Default: "sandbox"
+The environment to use. Either sandbox or production.
+- `limit` (Number) Default: 100
+The maximum number of elements to return. If not supplied, the default
is 100. For numbers larger than 100, each 100 items is counted as one
request for pricing purposes. Maximum value is 100000.
diff --git a/docs/resources/source_coinmarketcap.md b/docs/resources/source_coinmarketcap.md
index c2cd2f263..672a071ae 100644
--- a/docs/resources/source_coinmarketcap.md
+++ b/docs/resources/source_coinmarketcap.md
@@ -15,16 +15,16 @@ SourceCoinmarketcap Resource
```terraform
resource "airbyte_source_coinmarketcap" "my_source_coinmarketcap" {
configuration = {
- api_key = "...my_api_key..."
- data_type = "historical"
- source_type = "coinmarketcap"
+ api_key = "...my_api_key..."
+ data_type = "historical"
symbols = [
"...",
]
}
- name = "Meredith Kassulke"
- secret_id = "...my_secret_id..."
- workspace_id = "1804e54c-82f1-468a-b63c-8873e484380b"
+ definition_id = "a1361d3c-00cf-4e1b-a68d-340502b96029"
+ name = "Pat Robel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9e6abf17-c2d5-40cb-ae6f-f332bdf14577"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_coinmarketcap" "my_source_coinmarketcap" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,10 +52,9 @@ resource "airbyte_source_coinmarketcap" "my_source_coinmarketcap" {
Required:
-- `api_key` (String) Your API Key. See here. The token is case sensitive.
+- `api_key` (String, Sensitive) Your API Key. See here. The token is case sensitive.
- `data_type` (String) must be one of ["latest", "historical"]
/latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here.
-- `source_type` (String) must be one of ["coinmarketcap"]
Optional:
diff --git a/docs/resources/source_configcat.md b/docs/resources/source_configcat.md
index 0664fac49..741ca4a38 100644
--- a/docs/resources/source_configcat.md
+++ b/docs/resources/source_configcat.md
@@ -15,13 +15,13 @@ SourceConfigcat Resource
```terraform
resource "airbyte_source_configcat" "my_source_configcat" {
configuration = {
- password = "...my_password..."
- source_type = "configcat"
- username = "Art_Wiegand"
+ password = "...my_password..."
+ username = "Estrella_Wilkinson70"
}
- name = "Lowell Oberbrunner"
- secret_id = "...my_secret_id..."
- workspace_id = "5a60a04c-495c-4c69-9171-b51c1bdb1cf4"
+ definition_id = "81a7466b-f78b-43b7-9ede-547fc7c1cb53"
+ name = "Ms. Luis Harris"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9ddb3b3d-7401-439d-82cf-2cb416442d85"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_configcat" "my_source_configcat" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_configcat" "my_source_configcat" {
Required:
-- `password` (String) Basic auth password. See here.
-- `source_type` (String) must be one of ["configcat"]
+- `password` (String, Sensitive) Basic auth password. See here.
- `username` (String) Basic auth user name. See here.
diff --git a/docs/resources/source_confluence.md b/docs/resources/source_confluence.md
index 65945d69a..fdf332e89 100644
--- a/docs/resources/source_confluence.md
+++ b/docs/resources/source_confluence.md
@@ -18,11 +18,11 @@ resource "airbyte_source_confluence" "my_source_confluence" {
api_token = "...my_api_token..."
domain_name = "...my_domain_name..."
email = "abc@example.com"
- source_type = "confluence"
}
- name = "Jody Will"
- secret_id = "...my_secret_id..."
- workspace_id = "ccca99bc-7fc0-4b2d-8e10-873e42b006d6"
+ definition_id = "82e70e18-a817-42f9-b227-1c9f9cbaa542"
+ name = "Ms. Nathaniel Walter V"
+ secret_id = "...my_secret_id..."
+ workspace_id = "61d84c3f-bc24-4f86-8fce-85198c116e72"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_confluence" "my_source_confluence" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,9 +50,8 @@ resource "airbyte_source_confluence" "my_source_confluence" {
Required:
-- `api_token` (String) Please follow the Jira confluence for generating an API token: generating an API token.
+- `api_token` (String, Sensitive) Please follow the Jira confluence for generating an API token: generating an API token.
- `domain_name` (String) Your Confluence domain name
- `email` (String) Your Confluence login email
-- `source_type` (String) must be one of ["confluence"]
diff --git a/docs/resources/source_convex.md b/docs/resources/source_convex.md
index 107ac4ea7..f01b1acfa 100644
--- a/docs/resources/source_convex.md
+++ b/docs/resources/source_convex.md
@@ -17,11 +17,11 @@ resource "airbyte_source_convex" "my_source_convex" {
configuration = {
access_key = "...my_access_key..."
deployment_url = "https://murky-swan-635.convex.cloud"
- source_type = "convex"
}
- name = "Guy Kovacek"
- secret_id = "...my_secret_id..."
- workspace_id = "a8581a58-208c-454f-afa9-c95f2eac5565"
+ definition_id = "581ee677-0fa8-4ec1-ba80-4bd6457a40e8"
+ name = "Corey Braun"
+ secret_id = "...my_secret_id..."
+ workspace_id = "541ba6f5-d90d-45a8-a349-e2072bdff381"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_convex" "my_source_convex" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_convex" "my_source_convex" {
Required:
-- `access_key` (String) API access key used to retrieve data from Convex.
+- `access_key` (String, Sensitive) API access key used to retrieve data from Convex.
- `deployment_url` (String)
-- `source_type` (String) must be one of ["convex"]
diff --git a/docs/resources/source_datascope.md b/docs/resources/source_datascope.md
index bae217d54..49d09813b 100644
--- a/docs/resources/source_datascope.md
+++ b/docs/resources/source_datascope.md
@@ -15,13 +15,13 @@ SourceDatascope Resource
```terraform
resource "airbyte_source_datascope" "my_source_datascope" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "datascope"
- start_date = "dd/mm/YYYY HH:MM"
+ api_key = "...my_api_key..."
+ start_date = "dd/mm/YYYY HH:MM"
}
- name = "Danny Bahringer"
- secret_id = "...my_secret_id..."
- workspace_id = "fee81206-e281-43fa-8a41-c480d3f2132a"
+ definition_id = "8dbe50fc-b32a-4781-b3ab-b82e6a7189e9"
+ name = "Erin Johns"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4638d140-9463-49cf-9dd4-a0c05f536f6b"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_datascope" "my_source_datascope" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_datascope" "my_source_datascope" {
Required:
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["datascope"]
+- `api_key` (String, Sensitive) API Key
- `start_date` (String) Start date for the data to be replicated
diff --git a/docs/resources/source_delighted.md b/docs/resources/source_delighted.md
index b49cbcb20..24120990c 100644
--- a/docs/resources/source_delighted.md
+++ b/docs/resources/source_delighted.md
@@ -15,13 +15,13 @@ SourceDelighted Resource
```terraform
resource "airbyte_source_delighted" "my_source_delighted" {
configuration = {
- api_key = "...my_api_key..."
- since = "2022-05-30 04:50:23"
- source_type = "delighted"
+ api_key = "...my_api_key..."
+ since = "2022-05-30 04:50:23"
}
- name = "Sarah Collier"
- secret_id = "...my_secret_id..."
- workspace_id = "14f4cc6f-18bf-4962-9a6a-4f77a87ee3e4"
+ definition_id = "b8f8f6af-bf36-45d6-87e0-87e3905b6a41"
+ name = "Elsa Osinski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4f73b7e8-dc37-41ec-bee1-0511b439ed17"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_delighted" "my_source_delighted" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_delighted" "my_source_delighted" {
Required:
-- `api_key` (String) A Delighted API key.
+- `api_key` (String, Sensitive) A Delighted API key.
- `since` (String) The date from which you'd like to replicate the data
-- `source_type` (String) must be one of ["delighted"]
diff --git a/docs/resources/source_dixa.md b/docs/resources/source_dixa.md
index 00d9fb658..8721b0e31 100644
--- a/docs/resources/source_dixa.md
+++ b/docs/resources/source_dixa.md
@@ -15,14 +15,14 @@ SourceDixa Resource
```terraform
resource "airbyte_source_dixa" "my_source_dixa" {
configuration = {
- api_token = "...my_api_token..."
- batch_size = 31
- source_type = "dixa"
- start_date = "YYYY-MM-DD"
+ api_token = "...my_api_token..."
+ batch_size = 1
+ start_date = "YYYY-MM-DD"
}
- name = "Brittany Cole"
- secret_id = "...my_secret_id..."
- workspace_id = "5b34418e-3bb9-41c8-9975-e0e8419d8f84"
+ definition_id = "9f9b4783-ac23-42bf-a41c-80b23345c949"
+ name = "Arturo Hammes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9f5a34ff-680c-488d-8e9f-7431721e4227"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_dixa" "my_source_dixa" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,12 +50,12 @@ resource "airbyte_source_dixa" "my_source_dixa" {
Required:
-- `api_token` (String) Dixa API token
-- `source_type` (String) must be one of ["dixa"]
+- `api_token` (String, Sensitive) Dixa API token
- `start_date` (String) The connector pulls records updated from this date onwards.
Optional:
-- `batch_size` (Number) Number of days to batch into one request. Max 31.
+- `batch_size` (Number) Default: 31
+Number of days to batch into one request. Max 31.
diff --git a/docs/resources/source_dockerhub.md b/docs/resources/source_dockerhub.md
index 45ad7b7aa..1f60a4c26 100644
--- a/docs/resources/source_dockerhub.md
+++ b/docs/resources/source_dockerhub.md
@@ -16,11 +16,11 @@ SourceDockerhub Resource
resource "airbyte_source_dockerhub" "my_source_dockerhub" {
configuration = {
docker_username = "airbyte"
- source_type = "dockerhub"
}
- name = "Joe Haag"
- secret_id = "...my_secret_id..."
- workspace_id = "3e07edcc-4aa5-4f3c-abd9-05a972e05672"
+ definition_id = "fd51b66e-c345-4b5c-9bae-74726a8cd9c5"
+ name = "Ernesto Swaniawski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "afda11e1-0d00-42e1-873f-9ba1e39a63be"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_dockerhub" "my_source_dockerhub" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,6 +49,5 @@ resource "airbyte_source_dockerhub" "my_source_dockerhub" {
Required:
- `docker_username` (String) Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/ API call)
-- `source_type` (String) must be one of ["dockerhub"]
diff --git a/docs/resources/source_dremio.md b/docs/resources/source_dremio.md
index ace6581e5..0a00d2f00 100644
--- a/docs/resources/source_dremio.md
+++ b/docs/resources/source_dremio.md
@@ -15,13 +15,13 @@ SourceDremio Resource
```terraform
resource "airbyte_source_dremio" "my_source_dremio" {
configuration = {
- api_key = "...my_api_key..."
- base_url = "...my_base_url..."
- source_type = "dremio"
+ api_key = "...my_api_key..."
+ base_url = "...my_base_url..."
}
- name = "Aaron Connelly"
- secret_id = "...my_secret_id..."
- workspace_id = "2d309470-bf7a-44fa-87cf-535a6fae54eb"
+ definition_id = "209caa59-3eb8-408e-88c0-a1f11671a56d"
+ name = "Jeanne Lebsack"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b5e4c195-9643-43e1-9514-84aac586d055"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_dremio" "my_source_dremio" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,11 @@ resource "airbyte_source_dremio" "my_source_dremio" {
Required:
-- `api_key` (String) API Key that is generated when you authenticate to Dremio API
-- `base_url` (String) URL of your Dremio instance
-- `source_type` (String) must be one of ["dremio"]
+- `api_key` (String, Sensitive) API Key that is generated when you authenticate to Dremio API
+
+Optional:
+
+- `base_url` (String) Default: "https://app.dremio.cloud"
+URL of your Dremio instance
diff --git a/docs/resources/source_dynamodb.md b/docs/resources/source_dynamodb.md
index 6de681c81..ad1249a81 100644
--- a/docs/resources/source_dynamodb.md
+++ b/docs/resources/source_dynamodb.md
@@ -17,14 +17,14 @@ resource "airbyte_source_dynamodb" "my_source_dynamodb" {
configuration = {
access_key_id = "A012345678910EXAMPLE"
endpoint = "https://{aws_dynamo_db_url}.com"
- region = "us-gov-west-1"
+ region = "us-west-1"
reserved_attribute_names = "name, field_name, field-name"
secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
- source_type = "dynamodb"
}
- name = "Sandra Rowe Sr."
- secret_id = "...my_secret_id..."
- workspace_id = "f023b75d-2367-4fe1-a0cc-8df79f0a396d"
+ definition_id = "44c5465b-457a-42c2-a18f-e1b91dcce8e6"
+ name = "Faye Streich"
+ secret_id = "...my_secret_id..."
+ workspace_id = "75fb5812-2af6-4a8a-8655-36a205f1e4d3"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_dynamodb" "my_source_dynamodb" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,14 +52,14 @@ resource "airbyte_source_dynamodb" "my_source_dynamodb" {
Required:
-- `access_key_id` (String) The access key id to access Dynamodb. Airbyte requires read permissions to the database
-- `secret_access_key` (String) The corresponding secret to the access key id.
-- `source_type` (String) must be one of ["dynamodb"]
+- `access_key_id` (String, Sensitive) The access key id to access Dynamodb. Airbyte requires read permissions to the database
+- `secret_access_key` (String, Sensitive) The corresponding secret to the access key id.
Optional:
-- `endpoint` (String) the URL of the Dynamodb database
-- `region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]
+- `endpoint` (String) Default: ""
+the URL of the Dynamodb database
+- `region` (String) must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""
The region of the Dynamodb database
- `reserved_attribute_names` (String) Comma separated reserved attribute names present in your tables
diff --git a/docs/resources/source_e2e_test_cloud.md b/docs/resources/source_e2e_test_cloud.md
deleted file mode 100644
index f8fb3fe22..000000000
--- a/docs/resources/source_e2e_test_cloud.md
+++ /dev/null
@@ -1,124 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_e2e_test_cloud Resource - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceE2eTestCloud Resource
----
-
-# airbyte_source_e2e_test_cloud (Resource)
-
-SourceE2eTestCloud Resource
-
-## Example Usage
-
-```terraform
-resource "airbyte_source_e2e_test_cloud" "my_source_e2etestcloud" {
- configuration = {
- max_messages = 6
- message_interval_ms = 0
- mock_catalog = {
- source_e2e_test_cloud_mock_catalog_multi_schema = {
- stream_schemas = "...my_stream_schemas..."
- type = "MULTI_STREAM"
- }
- }
- seed = 42
- source_type = "e2e-test-cloud"
- type = "CONTINUOUS_FEED"
- }
- name = "Gertrude Grant"
- secret_id = "...my_secret_id..."
- workspace_id = "c15dfbac-e188-4b1c-8ee2-c8c6ce611fee"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `source_id` (String)
-- `source_type` (String)
-
-
-### Nested Schema for `configuration`
-
-Required:
-
-- `max_messages` (Number) Number of records to emit per stream. Min 1. Max 100 billion.
-- `mock_catalog` (Attributes) (see [below for nested schema](#nestedatt--configuration--mock_catalog))
-- `source_type` (String) must be one of ["e2e-test-cloud"]
-
-Optional:
-
-- `message_interval_ms` (Number) Interval between messages in ms. Min 0 ms. Max 60000 ms (1 minute).
-- `seed` (Number) When the seed is unspecified, the current time millis will be used as the seed. Range: [0, 1000000].
-- `type` (String) must be one of ["CONTINUOUS_FEED"]
-
-
-### Nested Schema for `configuration.mock_catalog`
-
-Optional:
-
-- `source_e2e_test_cloud_mock_catalog_multi_schema` (Attributes) A catalog with multiple data streams, each with a different schema. (see [below for nested schema](#nestedatt--configuration--mock_catalog--source_e2e_test_cloud_mock_catalog_multi_schema))
-- `source_e2e_test_cloud_mock_catalog_single_schema` (Attributes) A catalog with one or multiple streams that share the same schema. (see [below for nested schema](#nestedatt--configuration--mock_catalog--source_e2e_test_cloud_mock_catalog_single_schema))
-- `source_e2e_test_cloud_update_mock_catalog_multi_schema` (Attributes) A catalog with multiple data streams, each with a different schema. (see [below for nested schema](#nestedatt--configuration--mock_catalog--source_e2e_test_cloud_update_mock_catalog_multi_schema))
-- `source_e2e_test_cloud_update_mock_catalog_single_schema` (Attributes) A catalog with one or multiple streams that share the same schema. (see [below for nested schema](#nestedatt--configuration--mock_catalog--source_e2e_test_cloud_update_mock_catalog_single_schema))
-
-
-### Nested Schema for `configuration.mock_catalog.source_e2e_test_cloud_mock_catalog_multi_schema`
-
-Required:
-
-- `stream_schemas` (String) A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.
-- `type` (String) must be one of ["MULTI_STREAM"]
-
-
-
-### Nested Schema for `configuration.mock_catalog.source_e2e_test_cloud_mock_catalog_single_schema`
-
-Required:
-
-- `stream_name` (String) Name of the data stream.
-- `stream_schema` (String) A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.
-- `type` (String) must be one of ["SINGLE_STREAM"]
-
-Optional:
-
-- `stream_duplication` (Number) Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.
-
-
-
-### Nested Schema for `configuration.mock_catalog.source_e2e_test_cloud_update_mock_catalog_multi_schema`
-
-Required:
-
-- `stream_schemas` (String) A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.
-- `type` (String) must be one of ["MULTI_STREAM"]
-
-
-
-### Nested Schema for `configuration.mock_catalog.source_e2e_test_cloud_update_mock_catalog_single_schema`
-
-Required:
-
-- `stream_name` (String) Name of the data stream.
-- `stream_schema` (String) A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.
-- `type` (String) must be one of ["SINGLE_STREAM"]
-
-Optional:
-
-- `stream_duplication` (Number) Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.
-
-
diff --git a/docs/resources/source_emailoctopus.md b/docs/resources/source_emailoctopus.md
index 43ef90e3a..d272fae25 100644
--- a/docs/resources/source_emailoctopus.md
+++ b/docs/resources/source_emailoctopus.md
@@ -15,12 +15,12 @@ SourceEmailoctopus Resource
```terraform
resource "airbyte_source_emailoctopus" "my_source_emailoctopus" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "emailoctopus"
+ api_key = "...my_api_key..."
}
- name = "Gregory Satterfield"
- secret_id = "...my_secret_id..."
- workspace_id = "bdb6eec7-4378-4ba2-9317-747dc915ad2c"
+ definition_id = "09ea5800-594f-4bd8-a631-4cace02f96b8"
+ name = "Annie Hegmann"
+ secret_id = "...my_secret_id..."
+ workspace_id = "f7e4181b-36cf-41af-8f94-e3c79cbeca1c"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_emailoctopus" "my_source_emailoctopus" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_emailoctopus" "my_source_emailoctopus" {
Required:
-- `api_key` (String) EmailOctopus API Key. See the docs for information on how to generate this key.
-- `source_type` (String) must be one of ["emailoctopus"]
+- `api_key` (String, Sensitive) EmailOctopus API Key. See the docs for information on how to generate this key.
diff --git a/docs/resources/source_exchange_rates.md b/docs/resources/source_exchange_rates.md
index ca8d8a6e0..049b80fda 100644
--- a/docs/resources/source_exchange_rates.md
+++ b/docs/resources/source_exchange_rates.md
@@ -16,14 +16,14 @@ SourceExchangeRates Resource
resource "airbyte_source_exchange_rates" "my_source_exchangerates" {
configuration = {
access_key = "...my_access_key..."
- base = "USD"
- ignore_weekends = false
- source_type = "exchange-rates"
+ base = "EUR"
+ ignore_weekends = true
start_date = "YYYY-MM-DD"
}
- name = "Mrs. Leslie Klocko"
- secret_id = "...my_secret_id..."
- workspace_id = "c0f5ae2f-3a6b-4700-8787-56143f5a6c98"
+ definition_id = "a5bbba82-d4c0-4a2c-af78-12475bca9a48"
+ name = "Amber Osinski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0ddc3156-b2ff-4d5d-ac69-da5497add71f"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_exchange_rates" "my_source_exchangerates" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,13 +51,13 @@ resource "airbyte_source_exchange_rates" "my_source_exchangerates" {
Required:
-- `access_key` (String) Your API Key. See here. The key is case sensitive.
-- `source_type` (String) must be one of ["exchange-rates"]
+- `access_key` (String, Sensitive) Your API Key. See here. The key is case sensitive.
- `start_date` (String) Start getting data from that date.
Optional:
- `base` (String) ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default base currency is EUR
-- `ignore_weekends` (Boolean) Ignore weekends? (Exchanges don't run on weekends)
+- `ignore_weekends` (Boolean) Default: true
+Ignore weekends? (Exchanges don't run on weekends)
diff --git a/docs/resources/source_facebook_marketing.md b/docs/resources/source_facebook_marketing.md
index f34d17319..e99a07706 100644
--- a/docs/resources/source_facebook_marketing.md
+++ b/docs/resources/source_facebook_marketing.md
@@ -23,35 +23,34 @@ resource "airbyte_source_facebook_marketing" "my_source_facebookmarketing" {
custom_insights = [
{
action_breakdowns = [
- "action_destination",
+ "action_video_sound",
]
- action_report_time = "conversion"
+ action_report_time = "mixed"
breakdowns = [
- "frequency_value",
+ "mmm",
]
end_date = "2017-01-26T00:00:00Z"
fields = [
- "account_name",
+ "cpp",
]
- insights_lookback_window = 6
+ insights_lookback_window = 7
level = "ad"
- name = "Jesus Batz"
+ name = "Julio Beier"
start_date = "2017-01-25T00:00:00Z"
- time_increment = 8
+ time_increment = 9
},
]
end_date = "2017-01-26T00:00:00Z"
fetch_thumbnail_images = false
- include_deleted = true
- insights_lookback_window = 4
- max_batch_size = 7
+ include_deleted = false
+ insights_lookback_window = 2
page_size = 3
- source_type = "facebook-marketing"
start_date = "2017-01-25T00:00:00Z"
}
- name = "Ms. Wilbert McGlynn"
- secret_id = "...my_secret_id..."
- workspace_id = "04f926ba-d255-4381-9b47-4b0ed20e5624"
+ definition_id = "7eb149e6-fe9a-476b-9271-d6f7a77e51b0"
+ name = "Olivia MacGyver"
+ secret_id = "...my_secret_id..."
+ workspace_id = "2e6bc1e2-2381-4cdc-ae96-42f3c2fe19c3"
}
```
@@ -61,11 +60,12 @@ resource "airbyte_source_facebook_marketing" "my_source_facebookmarketing" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -78,23 +78,26 @@ resource "airbyte_source_facebook_marketing" "my_source_facebookmarketing" {
Required:
-- `access_token` (String) The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information.
-- `account_id` (String) The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. Open your Meta Ads Manager. The Ad account ID number is in the account dropdown menu or in your browser's address bar. See the docs for more information.
-- `source_type` (String) must be one of ["facebook-marketing"]
-- `start_date` (String) The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
+- `access_token` (String, Sensitive) The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information.
+- `account_id` (String) The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. The Ad account ID number is in the account dropdown menu or in your browser's address bar of your Meta Ads Manager. See the docs for more information.
Optional:
-- `action_breakdowns_allow_empty` (Boolean) Allows action_breakdowns to be an empty list
+- `action_breakdowns_allow_empty` (Boolean) Default: true
+Allows action_breakdowns to be an empty list
- `client_id` (String) The Client Id for your OAuth app
- `client_secret` (String) The Client Secret for your OAuth app
- `custom_insights` (Attributes List) A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field. (see [below for nested schema](#nestedatt--configuration--custom_insights))
- `end_date` (String) The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.
-- `fetch_thumbnail_images` (Boolean) Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.
-- `include_deleted` (Boolean) Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.
-- `insights_lookback_window` (Number) The attribution window. Facebook freezes insight data 28 days after it was generated, which means that all data from the past 28 days may have changed since we last emitted it, so you can retrieve refreshed insights from the past by setting this parameter. If you set a custom lookback window value in Facebook account, please provide the same value here.
-- `max_batch_size` (Number) Maximum batch size used when sending batch requests to Facebook API. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.
-- `page_size` (Number) Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.
+- `fetch_thumbnail_images` (Boolean) Default: false
+Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.
+- `include_deleted` (Boolean) Default: false
+Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.
+- `insights_lookback_window` (Number) Default: 28
+The attribution window. Facebook freezes insight data 28 days after it was generated, which means that all data from the past 28 days may have changed since we last emitted it, so you can retrieve refreshed insights from the past by setting this parameter. If you set a custom lookback window value in Facebook account, please provide the same value here.
+- `page_size` (Number) Default: 100
+Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.
+- `start_date` (String) The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data will be replicated for usual streams and only last 2 years for insight streams.
### Nested Schema for `configuration.custom_insights`
@@ -106,15 +109,17 @@ Required:
Optional:
- `action_breakdowns` (List of String) A list of chosen action_breakdowns for action_breakdowns
-- `action_report_time` (String) must be one of ["conversion", "impression", "mixed"]
+- `action_report_time` (String) must be one of ["conversion", "impression", "mixed"]; Default: "mixed"
Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.
- `breakdowns` (List of String) A list of chosen breakdowns for breakdowns
- `end_date` (String) The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.
- `fields` (List of String) A list of chosen fields for fields parameter
-- `insights_lookback_window` (Number) The attribution window
-- `level` (String) must be one of ["ad", "adset", "campaign", "account"]
+- `insights_lookback_window` (Number) Default: 28
+The attribution window
+- `level` (String) must be one of ["ad", "adset", "campaign", "account"]; Default: "ad"
Chosen level for API
- `start_date` (String) The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z.
-- `time_increment` (Number) Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).
+- `time_increment` (Number) Default: 1
+Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).
diff --git a/docs/resources/source_facebook_pages.md b/docs/resources/source_facebook_pages.md
index 92ca82db3..2b419fbd8 100644
--- a/docs/resources/source_facebook_pages.md
+++ b/docs/resources/source_facebook_pages.md
@@ -17,11 +17,11 @@ resource "airbyte_source_facebook_pages" "my_source_facebookpages" {
configuration = {
access_token = "...my_access_token..."
page_id = "...my_page_id..."
- source_type = "facebook-pages"
}
- name = "Moses Wuckert"
- secret_id = "...my_secret_id..."
- workspace_id = "39a910ab-dcab-4626-b669-6e1ec00221b3"
+ definition_id = "2edfee92-bc33-473a-92c8-87f28ef975a7"
+ name = "Scott Baumbach"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5487915a-2f44-49e5-b0b6-8d5fb4b99e2f"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_facebook_pages" "my_source_facebookpages" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_facebook_pages" "my_source_facebookpages" {
Required:
-- `access_token` (String) Facebook Page Access Token
+- `access_token` (String, Sensitive) Facebook Page Access Token
- `page_id` (String) Page ID
-- `source_type` (String) must be one of ["facebook-pages"]
diff --git a/docs/resources/source_faker.md b/docs/resources/source_faker.md
index 5fa327f17..f2466bb13 100644
--- a/docs/resources/source_faker.md
+++ b/docs/resources/source_faker.md
@@ -16,15 +16,15 @@ SourceFaker Resource
resource "airbyte_source_faker" "my_source_faker" {
configuration = {
always_updated = false
- count = 3
- parallelism = 9
- records_per_slice = 5
- seed = 6
- source_type = "faker"
+ count = 9
+ parallelism = 8
+ records_per_slice = 1
+ seed = 5
}
- name = "Delbert Reynolds"
- secret_id = "...my_secret_id..."
- workspace_id = "cfda8d0c-549e-4f03-8049-78a61fa1cf20"
+ definition_id = "33c76bbd-55f5-466b-8ade-0498ec40fd8a"
+ name = "Kirk Braun MD"
+ secret_id = "...my_secret_id..."
+ workspace_id = "05c5e889-977e-4ae0-86e3-c2d33082ab84"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_faker" "my_source_faker" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,16 +50,17 @@ resource "airbyte_source_faker" "my_source_faker" {
### Nested Schema for `configuration`
-Required:
-
-- `count` (Number) How many users should be generated in total. This setting does not apply to the purchases or products stream.
-- `source_type` (String) must be one of ["faker"]
-
Optional:
-- `always_updated` (Boolean) Should the updated_at values for every record be new each sync? Setting this to false will case the source to stop emitting records after COUNT records have been emitted.
-- `parallelism` (Number) How many parallel workers should we use to generate fake data? Choose a value equal to the number of CPUs you will allocate to this source.
-- `records_per_slice` (Number) How many fake records will be in each page (stream slice), before a state message is emitted?
-- `seed` (Number) Manually control the faker random seed to return the same values on subsequent runs (leave -1 for random)
+- `always_updated` (Boolean) Default: true
+Should the updated_at values for every record be new each sync? Setting this to false will case the source to stop emitting records after COUNT records have been emitted.
+- `count` (Number) Default: 1000
+How many users should be generated in total. This setting does not apply to the purchases or products stream.
+- `parallelism` (Number) Default: 4
+How many parallel workers should we use to generate fake data? Choose a value equal to the number of CPUs you will allocate to this source.
+- `records_per_slice` (Number) Default: 1000
+How many fake records will be in each page (stream slice), before a state message is emitted?
+- `seed` (Number) Default: -1
+Manually control the faker random seed to return the same values on subsequent runs (leave -1 for random)
diff --git a/docs/resources/source_fauna.md b/docs/resources/source_fauna.md
index e08ebd353..f43058a55 100644
--- a/docs/resources/source_fauna.md
+++ b/docs/resources/source_fauna.md
@@ -17,21 +17,19 @@ resource "airbyte_source_fauna" "my_source_fauna" {
configuration = {
collection = {
deletions = {
- source_fauna_collection_deletion_mode_disabled = {
- deletion_mode = "ignore"
- }
+ disabled = {}
}
- page_size = 4
+ page_size = 0
}
- domain = "...my_domain..."
- port = 5
- scheme = "...my_scheme..."
- secret = "...my_secret..."
- source_type = "fauna"
+ domain = "...my_domain..."
+ port = 10
+ scheme = "...my_scheme..."
+ secret = "...my_secret..."
}
- name = "Irvin Klein"
- secret_id = "...my_secret_id..."
- workspace_id = "1ffc71dc-a163-4f2a-bc80-a97ff334cddf"
+ definition_id = "56112c1f-da02-410a-9cfb-ec287654f12b"
+ name = "Mr. Willard Gislason"
+ secret_id = "...my_secret_id..."
+ workspace_id = "fbb0cddc-f802-4e3e-a016-5466352da9b0"
}
```
@@ -41,11 +39,12 @@ resource "airbyte_source_fauna" "my_source_fauna" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,15 +57,17 @@ resource "airbyte_source_fauna" "my_source_fauna" {
Required:
-- `domain` (String) Domain of Fauna to query. Defaults db.fauna.com. See the docs.
-- `port` (Number) Endpoint port.
-- `scheme` (String) URL scheme.
- `secret` (String) Fauna secret, used when authenticating with the database.
-- `source_type` (String) must be one of ["fauna"]
Optional:
- `collection` (Attributes) Settings for the Fauna Collection. (see [below for nested schema](#nestedatt--configuration--collection))
+- `domain` (String) Default: "db.fauna.com"
+Domain of Fauna to query. Defaults db.fauna.com. See the docs.
+- `port` (Number) Default: 443
+Endpoint port.
+- `scheme` (String) Default: "https"
+URL scheme.
### Nested Schema for `configuration.collection`
@@ -77,7 +78,11 @@ Required:
Enabling deletion mode informs your destination of deleted documents.
Disabled - Leave this feature disabled, and ignore deleted documents.
Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions))
-- `page_size` (Number) The page size used when reading documents from the database. The larger the page size, the faster the connector processes documents. However, if a page is too large, the connector may fail.
+
+Optional:
+
+- `page_size` (Number) Default: 64
+The page size used when reading documents from the database. The larger the page size, the faster the connector processes documents. However, if a page is too large, the connector may fail.
Choose your page size based on how large the documents are.
See the docs.
@@ -86,54 +91,25 @@ See This only applies to incremental syncs.
+Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions--disabled))
+- `enabled` (Attributes) This only applies to incremental syncs.
Enabling deletion mode informs your destination of deleted documents.
Disabled - Leave this feature disabled, and ignore deleted documents.
-Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions--source_fauna_collection_deletion_mode_enabled))
-- `source_fauna_update_collection_deletion_mode_disabled` (Attributes) This only applies to incremental syncs.
-Enabling deletion mode informs your destination of deleted documents.
-Disabled - Leave this feature disabled, and ignore deleted documents.
-Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions--source_fauna_update_collection_deletion_mode_disabled))
-- `source_fauna_update_collection_deletion_mode_enabled` (Attributes) This only applies to incremental syncs.
-Enabling deletion mode informs your destination of deleted documents.
-Disabled - Leave this feature disabled, and ignore deleted documents.
-Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions--source_fauna_update_collection_deletion_mode_enabled))
-
-
-### Nested Schema for `configuration.collection.deletions.source_fauna_update_collection_deletion_mode_enabled`
-
-Required:
-
-- `deletion_mode` (String) must be one of ["ignore"]
+Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted. (see [below for nested schema](#nestedatt--configuration--collection--deletions--enabled))
+
+### Nested Schema for `configuration.collection.deletions.enabled`
-
-### Nested Schema for `configuration.collection.deletions.source_fauna_update_collection_deletion_mode_enabled`
-Required:
-
-- `column` (String) Name of the "deleted at" column.
-- `deletion_mode` (String) must be one of ["deleted_field"]
-
-
-
-### Nested Schema for `configuration.collection.deletions.source_fauna_update_collection_deletion_mode_enabled`
-
-Required:
+
+### Nested Schema for `configuration.collection.deletions.enabled`
-- `deletion_mode` (String) must be one of ["ignore"]
-
-
-
-### Nested Schema for `configuration.collection.deletions.source_fauna_update_collection_deletion_mode_enabled`
-
-Required:
+Optional:
-- `column` (String) Name of the "deleted at" column.
-- `deletion_mode` (String) must be one of ["deleted_field"]
+- `column` (String) Default: "deleted_at"
+Name of the "deleted at" column.
diff --git a/docs/resources/source_file.md b/docs/resources/source_file.md
new file mode 100644
index 000000000..fbc5be7ca
--- /dev/null
+++ b/docs/resources/source_file.md
@@ -0,0 +1,164 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_file Resource - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceFile Resource
+---
+
+# airbyte_source_file (Resource)
+
+SourceFile Resource
+
+## Example Usage
+
+```terraform
+resource "airbyte_source_file" "my_source_file" {
+ configuration = {
+ dataset_name = "...my_dataset_name..."
+ format = "jsonl"
+ provider = {
+ az_blob_azure_blob_storage = {
+ sas_token = "...my_sas_token..."
+ shared_key = "...my_shared_key..."
+ storage_account = "...my_storage_account..."
+ }
+ }
+ reader_options = "{\"sep\": \"\t\", \"header\": 0, \"names\": [\"column1\", \"column2\"] }"
+ url = "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv"
+ }
+ definition_id = "6c5d5cf5-0fbf-4713-864e-d5bf6d67306c"
+ name = "Floyd Goyette"
+ secret_id = "...my_secret_id..."
+ workspace_id = "68cfaeff-480d-4f14-bee1-0f8279e427b2"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the source e.g. dev-mysql-instance.
+- `workspace_id` (String)
+
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
+
+### Read-Only
+
+- `source_id` (String)
+- `source_type` (String)
+
+
+### Nested Schema for `configuration`
+
+Required:
+
+- `dataset_name` (String) The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
+- `provider` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider))
+- `url` (String) The URL path to access the file which should be replicated.
+
+Optional:
+
+- `format` (String) must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "feather", "parquet", "yaml"]; Default: "csv"
+The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
+- `reader_options` (String) This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
+
+
+### Nested Schema for `configuration.provider`
+
+Optional:
+
+- `az_blob_azure_blob_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--az_blob_azure_blob_storage))
+- `gcs_google_cloud_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--gcs_google_cloud_storage))
+- `https_public_web` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--https_public_web))
+- `s3_amazon_web_services` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--s3_amazon_web_services))
+- `scp_secure_copy_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--scp_secure_copy_protocol))
+- `sftp_secure_file_transfer_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--sftp_secure_file_transfer_protocol))
+- `ssh_secure_shell` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--ssh_secure_shell))
+
+
+### Nested Schema for `configuration.provider.az_blob_azure_blob_storage`
+
+Required:
+
+- `storage_account` (String) The globally unique name of the storage account that the desired blob sits within. See here for more details.
+
+Optional:
+
+- `sas_token` (String, Sensitive) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
+- `shared_key` (String, Sensitive) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
+
+
+
+### Nested Schema for `configuration.provider.gcs_google_cloud_storage`
+
+Optional:
+
+- `service_account_json` (String) In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
+
+
+
+### Nested Schema for `configuration.provider.https_public_web`
+
+Optional:
+
+- `user_agent` (Boolean) Default: false
+Add User-Agent to request
+
+
+
+### Nested Schema for `configuration.provider.s3_amazon_web_services`
+
+Optional:
+
+- `aws_access_key_id` (String, Sensitive) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+- `aws_secret_access_key` (String, Sensitive) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+
+
+
+### Nested Schema for `configuration.provider.scp_secure_copy_protocol`
+
+Required:
+
+- `host` (String)
+- `user` (String)
+
+Optional:
+
+- `password` (String, Sensitive)
+- `port` (String) Default: "22"
+
+
+
+### Nested Schema for `configuration.provider.sftp_secure_file_transfer_protocol`
+
+Required:
+
+- `host` (String)
+- `user` (String)
+
+Optional:
+
+- `password` (String, Sensitive)
+- `port` (String) Default: "22"
+
+
+
+### Nested Schema for `configuration.provider.ssh_secure_shell`
+
+Required:
+
+- `host` (String)
+- `user` (String)
+
+Optional:
+
+- `password` (String, Sensitive)
+- `port` (String) Default: "22"
+
+
diff --git a/docs/resources/source_file_secure.md b/docs/resources/source_file_secure.md
deleted file mode 100644
index 79d6ae22a..000000000
--- a/docs/resources/source_file_secure.md
+++ /dev/null
@@ -1,283 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_file_secure Resource - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceFileSecure Resource
----
-
-# airbyte_source_file_secure (Resource)
-
-SourceFileSecure Resource
-
-## Example Usage
-
-```terraform
-resource "airbyte_source_file_secure" "my_source_filesecure" {
- configuration = {
- dataset_name = "...my_dataset_name..."
- format = "excel_binary"
- provider = {
- source_file_secure_storage_provider_az_blob_azure_blob_storage = {
- sas_token = "...my_sas_token..."
- shared_key = "...my_shared_key..."
- storage = "AzBlob"
- storage_account = "...my_storage_account..."
- }
- }
- reader_options = "{\"sep\": \" \"}"
- source_type = "file-secure"
- url = "gs://my-google-bucket/data.csv"
- }
- name = "Freddie Von V"
- secret_id = "...my_secret_id..."
- workspace_id = "76c6ab21-d29d-4fc9-8d6f-ecd799390066"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `source_id` (String)
-- `source_type` (String)
-
-
-### Nested Schema for `configuration`
-
-Required:
-
-- `dataset_name` (String) The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
-- `format` (String) must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "feather", "parquet", "yaml"]
-The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
-- `provider` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider))
-- `source_type` (String) must be one of ["file-secure"]
-- `url` (String) The URL path to access the file which should be replicated.
-
-Optional:
-
-- `reader_options` (String) This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
-
-
-### Nested Schema for `configuration.provider`
-
-Optional:
-
-- `source_file_secure_storage_provider_az_blob_azure_blob_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_az_blob_azure_blob_storage))
-- `source_file_secure_storage_provider_gcs_google_cloud_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_gcs_google_cloud_storage))
-- `source_file_secure_storage_provider_https_public_web` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_https_public_web))
-- `source_file_secure_storage_provider_s3_amazon_web_services` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_s3_amazon_web_services))
-- `source_file_secure_storage_provider_scp_secure_copy_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_scp_secure_copy_protocol))
-- `source_file_secure_storage_provider_sftp_secure_file_transfer_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_sftp_secure_file_transfer_protocol))
-- `source_file_secure_storage_provider_ssh_secure_shell` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_storage_provider_ssh_secure_shell))
-- `source_file_secure_update_storage_provider_az_blob_azure_blob_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_az_blob_azure_blob_storage))
-- `source_file_secure_update_storage_provider_gcs_google_cloud_storage` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_gcs_google_cloud_storage))
-- `source_file_secure_update_storage_provider_https_public_web` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_https_public_web))
-- `source_file_secure_update_storage_provider_s3_amazon_web_services` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_s3_amazon_web_services))
-- `source_file_secure_update_storage_provider_scp_secure_copy_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_scp_secure_copy_protocol))
-- `source_file_secure_update_storage_provider_sftp_secure_file_transfer_protocol` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_sftp_secure_file_transfer_protocol))
-- `source_file_secure_update_storage_provider_ssh_secure_shell` (Attributes) The storage Provider or Location of the file(s) which should be replicated. (see [below for nested schema](#nestedatt--configuration--provider--source_file_secure_update_storage_provider_ssh_secure_shell))
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_az_blob_azure_blob_storage`
-
-Required:
-
-- `storage` (String) must be one of ["AzBlob"]
-- `storage_account` (String) The globally unique name of the storage account that the desired blob sits within. See here for more details.
-
-Optional:
-
-- `sas_token` (String) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
-- `shared_key` (String) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_gcs_google_cloud_storage`
-
-Required:
-
-- `storage` (String) must be one of ["GCS"]
-
-Optional:
-
-- `service_account_json` (String) In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_https_public_web`
-
-Required:
-
-- `storage` (String) must be one of ["HTTPS"]
-
-Optional:
-
-- `user_agent` (Boolean) Add User-Agent to request
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_s3_amazon_web_services`
-
-Required:
-
-- `storage` (String) must be one of ["S3"]
-
-Optional:
-
-- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_scp_secure_copy_protocol`
-
-Required:
-
-- `host` (String)
-- `storage` (String) must be one of ["SCP"]
-- `user` (String)
-
-Optional:
-
-- `password` (String)
-- `port` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_sftp_secure_file_transfer_protocol`
-
-Required:
-
-- `host` (String)
-- `storage` (String) must be one of ["SFTP"]
-- `user` (String)
-
-Optional:
-
-- `password` (String)
-- `port` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_storage_provider_ssh_secure_shell`
-
-Required:
-
-- `host` (String)
-- `storage` (String) must be one of ["SSH"]
-- `user` (String)
-
-Optional:
-
-- `password` (String)
-- `port` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_az_blob_azure_blob_storage`
-
-Required:
-
-- `storage` (String) must be one of ["AzBlob"]
-- `storage_account` (String) The globally unique name of the storage account that the desired blob sits within. See here for more details.
-
-Optional:
-
-- `sas_token` (String) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
-- `shared_key` (String) To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_gcs_google_cloud_storage`
-
-Required:
-
-- `storage` (String) must be one of ["GCS"]
-
-Optional:
-
-- `service_account_json` (String) In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_https_public_web`
-
-Required:
-
-- `storage` (String) must be one of ["HTTPS"]
-
-Optional:
-
-- `user_agent` (Boolean) Add User-Agent to request
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_s3_amazon_web_services`
-
-Required:
-
-- `storage` (String) must be one of ["S3"]
-
-Optional:
-
-- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_scp_secure_copy_protocol`
-
-Required:
-
-- `host` (String)
-- `storage` (String) must be one of ["SCP"]
-- `user` (String)
-
-Optional:
-
-- `password` (String)
-- `port` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_sftp_secure_file_transfer_protocol`
-
-Required:
-
-- `host` (String)
-- `storage` (String) must be one of ["SFTP"]
-- `user` (String)
-
-Optional:
-
-- `password` (String)
-- `port` (String)
-
-
-
-### Nested Schema for `configuration.provider.source_file_secure_update_storage_provider_ssh_secure_shell`
-
-Required:
-
-- `host` (String)
-- `storage` (String) must be one of ["SSH"]
-- `user` (String)
-
-Optional:
-
-- `password` (String)
-- `port` (String)
-
-
diff --git a/docs/resources/source_firebolt.md b/docs/resources/source_firebolt.md
index 6d39095c2..3e34c8954 100644
--- a/docs/resources/source_firebolt.md
+++ b/docs/resources/source_firebolt.md
@@ -15,17 +15,17 @@ SourceFirebolt Resource
```terraform
resource "airbyte_source_firebolt" "my_source_firebolt" {
configuration = {
- account = "...my_account..."
- database = "...my_database..."
- engine = "...my_engine..."
- host = "api.app.firebolt.io"
- password = "...my_password..."
- source_type = "firebolt"
- username = "username@email.com"
+ account = "...my_account..."
+ database = "...my_database..."
+ engine = "...my_engine..."
+ host = "api.app.firebolt.io"
+ password = "...my_password..."
+ username = "username@email.com"
}
- name = "Donna Abshire"
- secret_id = "...my_secret_id..."
- workspace_id = "5338cec0-86fa-421e-9152-cb3119167b8e"
+ definition_id = "e1d4b428-b10c-462a-aeab-6a16bc0f1be5"
+ name = "Laurie Kuhlman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7324c6ca-7fcd-4ac6-b878-54b69c42e8b9"
}
```
@@ -35,11 +35,12 @@ resource "airbyte_source_firebolt" "my_source_firebolt" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,8 +54,7 @@ resource "airbyte_source_firebolt" "my_source_firebolt" {
Required:
- `database` (String) The database to connect to.
-- `password` (String) Firebolt password.
-- `source_type` (String) must be one of ["firebolt"]
+- `password` (String, Sensitive) Firebolt password.
- `username` (String) Firebolt email address you use to login.
Optional:
diff --git a/docs/resources/source_freshcaller.md b/docs/resources/source_freshcaller.md
index 5201870a0..63b1cc885 100644
--- a/docs/resources/source_freshcaller.md
+++ b/docs/resources/source_freshcaller.md
@@ -17,14 +17,14 @@ resource "airbyte_source_freshcaller" "my_source_freshcaller" {
configuration = {
api_key = "...my_api_key..."
domain = "snaptravel"
- requests_per_minute = 2
- source_type = "freshcaller"
+ requests_per_minute = 7
start_date = "2022-01-01T12:00:00Z"
- sync_lag_minutes = 9
+ sync_lag_minutes = 2
}
- name = "Kenneth Friesen IV"
- secret_id = "...my_secret_id..."
- workspace_id = "d6d364ff-d455-4906-9126-3d48e935c2c9"
+ definition_id = "c06fe5a2-e94e-4ff2-91ad-fc721dd1f802"
+ name = "Margarita Nitzsche"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9660c93e-b114-448c-9cd3-afe5ef85381e"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_freshcaller" "my_source_freshcaller" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,14 +52,13 @@ resource "airbyte_source_freshcaller" "my_source_freshcaller" {
Required:
-- `api_key` (String) Freshcaller API Key. See the docs for more information on how to obtain this key.
+- `api_key` (String, Sensitive) Freshcaller API Key. See the docs for more information on how to obtain this key.
- `domain` (String) Used to construct Base URL for the Freshcaller APIs
-- `source_type` (String) must be one of ["freshcaller"]
-- `start_date` (String) UTC date and time. Any data created after this date will be replicated.
Optional:
- `requests_per_minute` (Number) The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.
+- `start_date` (String) UTC date and time. Any data created after this date will be replicated.
- `sync_lag_minutes` (Number) Lag in minutes for each sync, i.e., at time T, data for the time range [prev_sync_time, T-30] will be fetched
diff --git a/docs/resources/source_freshdesk.md b/docs/resources/source_freshdesk.md
index ac7a22698..34885805c 100644
--- a/docs/resources/source_freshdesk.md
+++ b/docs/resources/source_freshdesk.md
@@ -17,13 +17,13 @@ resource "airbyte_source_freshdesk" "my_source_freshdesk" {
configuration = {
api_key = "...my_api_key..."
domain = "myaccount.freshdesk.com"
- requests_per_minute = 10
- source_type = "freshdesk"
+ requests_per_minute = 1
start_date = "2020-12-01T00:00:00Z"
}
- name = "Dale Altenwerth"
- secret_id = "...my_secret_id..."
- workspace_id = "3e43202d-7216-4576-9066-41870d9d21f9"
+ definition_id = "9fe1bd22-2412-41e6-b15b-e306a4e83994"
+ name = "Frances Farrell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c75d4c70-b588-42c8-81a0-878bfdf7e2fa"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_freshdesk" "my_source_freshdesk" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,9 +51,8 @@ resource "airbyte_source_freshdesk" "my_source_freshdesk" {
Required:
-- `api_key` (String) Freshdesk API Key. See the docs for more information on how to obtain this key.
+- `api_key` (String, Sensitive) Freshdesk API Key. See the docs for more information on how to obtain this key.
- `domain` (String) Freshdesk domain
-- `source_type` (String) must be one of ["freshdesk"]
Optional:
diff --git a/docs/resources/source_freshsales.md b/docs/resources/source_freshsales.md
index 01138f18f..1a533607c 100644
--- a/docs/resources/source_freshsales.md
+++ b/docs/resources/source_freshsales.md
@@ -17,11 +17,11 @@ resource "airbyte_source_freshsales" "my_source_freshsales" {
configuration = {
api_key = "...my_api_key..."
domain_name = "mydomain.myfreshworks.com"
- source_type = "freshsales"
}
- name = "Gustavo Adams DDS"
- secret_id = "...my_secret_id..."
- workspace_id = "4ecc11a0-8364-4290-a8b8-502a55e7f73b"
+ definition_id = "4a63623e-34bb-4a48-ad6d-0eaf7f54c7c3"
+ name = "Shelly Wolf"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b0a3dd00-07da-4ef7-b0c8-1f95c5b8dd2d"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_freshsales" "my_source_freshsales" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_freshsales" "my_source_freshsales" {
Required:
-- `api_key` (String) Freshsales API Key. See here. The key is case sensitive.
+- `api_key` (String, Sensitive) Freshsales API Key. See here. The key is case sensitive.
- `domain_name` (String) The Name of your Freshsales domain
-- `source_type` (String) must be one of ["freshsales"]
diff --git a/docs/resources/source_gainsight_px.md b/docs/resources/source_gainsight_px.md
index b61fefa57..b50e81e34 100644
--- a/docs/resources/source_gainsight_px.md
+++ b/docs/resources/source_gainsight_px.md
@@ -15,12 +15,12 @@ SourceGainsightPx Resource
```terraform
resource "airbyte_source_gainsight_px" "my_source_gainsightpx" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "gainsight-px"
+ api_key = "...my_api_key..."
}
- name = "Hugh Goodwin"
- secret_id = "...my_secret_id..."
- workspace_id = "320a319f-4bad-4f94-bc9a-867bc4242666"
+ definition_id = "32b37f6f-ec5c-4d0a-8fda-52f69543b862"
+ name = "Cristina McKenzie"
+ secret_id = "...my_secret_id..."
+ workspace_id = "50480aaa-f77a-4e08-bd2c-af83f045910a"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_gainsight_px" "my_source_gainsightpx" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_gainsight_px" "my_source_gainsightpx" {
Required:
-- `api_key` (String) The Aptrinsic API Key which is recieved from the dashboard settings (ref - https://app.aptrinsic.com/settings/api-keys)
-- `source_type` (String) must be one of ["gainsight-px"]
+- `api_key` (String, Sensitive) The Aptrinsic API Key which is recieved from the dashboard settings (ref - https://app.aptrinsic.com/settings/api-keys)
diff --git a/docs/resources/source_gcs.md b/docs/resources/source_gcs.md
index 36995e4c2..e5add9054 100644
--- a/docs/resources/source_gcs.md
+++ b/docs/resources/source_gcs.md
@@ -15,14 +15,53 @@ SourceGcs Resource
```terraform
resource "airbyte_source_gcs" "my_source_gcs" {
configuration = {
- gcs_bucket = "...my_gcs_bucket..."
- gcs_path = "...my_gcs_path..."
- service_account = "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID, \"private_key_id\": YOUR_PRIVATE_KEY, ... }"
- source_type = "gcs"
+ bucket = "...my_bucket..."
+ service_account = "...my_service_account..."
+ start_date = "2021-01-01T00:00:00.000000Z"
+ streams = [
+ {
+ days_to_sync_if_history_is_full = 3
+ format = {
+ source_gcs_csv_format = {
+ delimiter = "...my_delimiter..."
+ double_quote = false
+ encoding = "...my_encoding..."
+ escape_char = "...my_escape_char..."
+ false_values = [
+ "...",
+ ]
+ header_definition = {
+ source_gcs_autogenerated = {}
+ }
+ inference_type = "None"
+ null_values = [
+ "...",
+ ]
+ quote_char = "...my_quote_char..."
+ skip_rows_after_header = 3
+ skip_rows_before_header = 5
+ strings_can_be_null = false
+ true_values = [
+ "...",
+ ]
+ }
+ }
+ globs = [
+ "...",
+ ]
+ input_schema = "...my_input_schema..."
+ legacy_prefix = "...my_legacy_prefix..."
+ name = "Guy Langosh III"
+ primary_key = "...my_primary_key..."
+ schemaless = false
+ validation_policy = "Wait for Discover"
+ },
+ ]
}
- name = "Olga Blanda"
- secret_id = "...my_secret_id..."
- workspace_id = "dca8ef51-fcb4-4c59-bec1-2cdaad0ec7af"
+ definition_id = "a4e6d7c2-fcaa-4386-9a1d-2ddf0351c49c"
+ name = "Leah Jerde Jr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "51741425-e4d3-48a3-8ea5-6cdfa27fbf62"
}
```
@@ -31,12 +70,15 @@ resource "airbyte_source_gcs" "my_source_gcs" {
### Required
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `configuration` (Attributes) NOTE: When this Spec is changed, legacy_config_transformer.py must also be
+modified to uptake the changes because it is responsible for converting
+legacy GCS configs into file based configs using the File-Based CDK. (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,9 +91,91 @@ resource "airbyte_source_gcs" "my_source_gcs" {
Required:
-- `gcs_bucket` (String) GCS bucket name
-- `gcs_path` (String) GCS path to data
+- `bucket` (String) Name of the GCS bucket where the file(s) exist.
- `service_account` (String) Enter your Google Cloud service account key in JSON format
-- `source_type` (String) must be one of ["gcs"]
+- `streams` (Attributes List) Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. (see [below for nested schema](#nestedatt--configuration--streams))
+
+Optional:
+
+- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
+
+
+### Nested Schema for `configuration.streams`
+
+Required:
+
+- `format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format))
+- `name` (String) The name of the stream.
+
+Optional:
+
+- `days_to_sync_if_history_is_full` (Number) Default: 3
+When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
+- `globs` (List of String) The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
+- `input_schema` (String) The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
+- `legacy_prefix` (String) The path prefix configured in previous versions of the GCS connector. This option is deprecated in favor of a single glob.
+- `primary_key` (String, Sensitive) The column or columns (for a composite key) that serves as the unique identifier of a record.
+- `schemaless` (Boolean) Default: false
+When enabled, syncs will not validate or structure records against the stream's schema.
+- `validation_policy` (String) must be one of ["Emit Record", "Skip Record", "Wait for Discover"]; Default: "Emit Record"
+The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+
+
+### Nested Schema for `configuration.streams.format`
+
+Optional:
+
+- `csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--csv_format))
+
+
+### Nested Schema for `configuration.streams.format.csv_format`
+
+Optional:
+
+- `delimiter` (String) Default: ","
+The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+- `double_quote` (Boolean) Default: true
+Whether two quotes in a quoted CSV value denote a single quote in the data.
+- `encoding` (String) Default: "utf8"
+The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
+- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values.
+- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--csv_format--header_definition))
+- `inference_type` (String) must be one of ["None", "Primitive Types Only"]; Default: "None"
+How to infer the types of the columns. If none, inference default to strings.
+- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
+- `quote_char` (String) Default: "\""
+The character used for quoting CSV values. To disallow quoting, make this field blank.
+- `skip_rows_after_header` (Number) Default: 0
+The number of rows to skip after the header row.
+- `skip_rows_before_header` (Number) Default: 0
+The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+- `strings_can_be_null` (Boolean) Default: true
+Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values.
+
+
+### Nested Schema for `configuration.streams.format.csv_format.header_definition`
+
+Optional:
+
+- `autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--csv_format--header_definition--autogenerated))
+- `from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--csv_format--header_definition--from_csv))
+- `user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--csv_format--header_definition--user_provided))
+
+
+### Nested Schema for `configuration.streams.format.csv_format.header_definition.user_provided`
+
+
+
+### Nested Schema for `configuration.streams.format.csv_format.header_definition.user_provided`
+
+
+
+### Nested Schema for `configuration.streams.format.csv_format.header_definition.user_provided`
+
+Required:
+
+- `column_names` (List of String) The column names that will be used while emitting the CSV records
diff --git a/docs/resources/source_getlago.md b/docs/resources/source_getlago.md
index ffb27eb2c..d7be0dee3 100644
--- a/docs/resources/source_getlago.md
+++ b/docs/resources/source_getlago.md
@@ -15,12 +15,13 @@ SourceGetlago Resource
```terraform
resource "airbyte_source_getlago" "my_source_getlago" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "getlago"
+ api_key = "...my_api_key..."
+ api_url = "...my_api_url..."
}
- name = "Irving Rohan"
- secret_id = "...my_secret_id..."
- workspace_id = "0df448a4-7f93-490c-9888-0983dabf9ef3"
+ definition_id = "25b4bae6-1112-4211-be87-b490ecc6bf75"
+ name = "Mrs. Willie Bins"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c803c831-1a97-4a1a-a894-9629432a02ce"
}
```
@@ -30,11 +31,12 @@ resource "airbyte_source_getlago" "my_source_getlago" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +49,11 @@ resource "airbyte_source_getlago" "my_source_getlago" {
Required:
-- `api_key` (String) Your API Key. See here.
-- `source_type` (String) must be one of ["getlago"]
+- `api_key` (String, Sensitive) Your API Key. See here.
+
+Optional:
+
+- `api_url` (String) Default: "https://api.getlago.com/api/v1"
+Your Lago API URL
diff --git a/docs/resources/source_github.md b/docs/resources/source_github.md
index fe3191ddf..4272f0335 100644
--- a/docs/resources/source_github.md
+++ b/docs/resources/source_github.md
@@ -15,23 +15,29 @@ SourceGithub Resource
```terraform
resource "airbyte_source_github" "my_source_github" {
configuration = {
- branch = "airbytehq/airbyte/master airbytehq/airbyte/my-branch"
+ api_url = "https://github.company.org"
+ branch = "airbytehq/airbyte/master airbytehq/airbyte/my-branch"
+ branches = [
+ "...",
+ ]
credentials = {
- source_github_authentication_o_auth = {
+ o_auth = {
access_token = "...my_access_token..."
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- option_title = "OAuth Credentials"
}
}
+ repositories = [
+ "...",
+ ]
repository = "airbytehq/airbyte"
requests_per_hour = 10
- source_type = "github"
start_date = "2021-03-01T00:00:00Z"
}
- name = "Van Kuhlman IV"
- secret_id = "...my_secret_id..."
- workspace_id = "9af4d357-24cd-4b0f-8d28-1187d56844ed"
+ definition_id = "e017f905-2f20-440e-8692-82dd6a12cb01"
+ name = "Bennie Stroman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "aeeda058-2852-4791-bedf-cf9c9058e69d"
}
```
@@ -41,11 +47,12 @@ resource "airbyte_source_github" "my_source_github" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,75 +65,45 @@ resource "airbyte_source_github" "my_source_github" {
Required:
-- `repository` (String) Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
-- `source_type` (String) must be one of ["github"]
-- `start_date` (String) The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info
+- `credentials` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials))
+- `repositories` (List of String) List of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
Optional:
-- `branch` (String) Space-delimited list of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
-- `credentials` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials))
+- `api_url` (String) Default: "https://api.github.com/"
+Please enter your basic URL from self-hosted GitHub instance or leave it empty to use GitHub.
+- `branch` (String) (DEPRCATED) Space-delimited list of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
+- `branches` (List of String) List of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
+- `repository` (String) (DEPRCATED) Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
- `requests_per_hour` (Number) The GitHub API allows for a maximum of 5000 requests per hour (15000 for Github Enterprise). You can specify a lower value to limit your use of the API quota.
+- `start_date` (String) The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. If the date is not set, all data will be replicated. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info
### Nested Schema for `configuration.credentials`
Optional:
-- `source_github_authentication_o_auth` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--source_github_authentication_o_auth))
-- `source_github_authentication_personal_access_token` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--source_github_authentication_personal_access_token))
-- `source_github_update_authentication_o_auth` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--source_github_update_authentication_o_auth))
-- `source_github_update_authentication_personal_access_token` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--source_github_update_authentication_personal_access_token))
+- `o_auth` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--o_auth))
+- `personal_access_token` (Attributes) Choose how to authenticate to GitHub (see [below for nested schema](#nestedatt--configuration--credentials--personal_access_token))
-
-### Nested Schema for `configuration.credentials.source_github_authentication_o_auth`
+
+### Nested Schema for `configuration.credentials.o_auth`
Required:
-- `access_token` (String) OAuth access token
+- `access_token` (String, Sensitive) OAuth access token
Optional:
- `client_id` (String) OAuth Client Id
- `client_secret` (String) OAuth Client secret
-- `option_title` (String) must be one of ["OAuth Credentials"]
-
-
-
-### Nested Schema for `configuration.credentials.source_github_authentication_personal_access_token`
-
-Required:
-- `personal_access_token` (String) Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","
-
-Optional:
-
-- `option_title` (String) must be one of ["PAT Credentials"]
-
-
-### Nested Schema for `configuration.credentials.source_github_update_authentication_o_auth`
+
+### Nested Schema for `configuration.credentials.personal_access_token`
Required:
-- `access_token` (String) OAuth access token
-
-Optional:
-
-- `client_id` (String) OAuth Client Id
-- `client_secret` (String) OAuth Client secret
-- `option_title` (String) must be one of ["OAuth Credentials"]
-
-
-
-### Nested Schema for `configuration.credentials.source_github_update_authentication_personal_access_token`
-
-Required:
-
-- `personal_access_token` (String) Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","
-
-Optional:
-
-- `option_title` (String) must be one of ["PAT Credentials"]
+- `personal_access_token` (String, Sensitive) Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","
diff --git a/docs/resources/source_gitlab.md b/docs/resources/source_gitlab.md
index 169076e81..60cbba234 100644
--- a/docs/resources/source_gitlab.md
+++ b/docs/resources/source_gitlab.md
@@ -15,25 +15,30 @@ SourceGitlab Resource
```terraform
resource "airbyte_source_gitlab" "my_source_gitlab" {
configuration = {
- api_url = "https://gitlab.company.org"
+ api_url = "gitlab.com"
credentials = {
- source_gitlab_authorization_method_o_auth2_0 = {
+ source_gitlab_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_type = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
- token_expiry_date = "2021-06-26T03:36:42.239Z"
+ token_expiry_date = "2022-01-24T13:56:19.954Z"
}
}
- groups = "airbyte.io"
- projects = "airbyte.io/documentation"
- source_type = "gitlab"
- start_date = "2021-03-01T00:00:00Z"
+ groups = "airbyte.io"
+ groups_list = [
+ "...",
+ ]
+ projects = "airbyte.io/documentation"
+ projects_list = [
+ "...",
+ ]
+ start_date = "2021-03-01T00:00:00Z"
}
- name = "Frank Keeling"
- secret_id = "...my_secret_id..."
- workspace_id = "628bdfc2-032b-46c8-b992-3b7e13584f7a"
+ definition_id = "e4cb55c6-95e2-4f08-ab76-e351cef20de4"
+ name = "Winston Schroeder"
+ secret_id = "...my_secret_id..."
+ workspace_id = "2b42c84c-d8bc-4607-ae71-4fbf0cfd3aed"
}
```
@@ -43,11 +48,12 @@ resource "airbyte_source_gitlab" "my_source_gitlab" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -61,78 +67,42 @@ resource "airbyte_source_gitlab" "my_source_gitlab" {
Required:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["gitlab"]
-- `start_date` (String) The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
Optional:
-- `api_url` (String) Please enter your basic URL from GitLab instance.
-- `groups` (String) Space-delimited list of groups. e.g. airbyte.io.
-- `projects` (String) Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.
+- `api_url` (String) Default: "gitlab.com"
+Please enter your basic URL from GitLab instance.
+- `groups` (String) [DEPRECATED] Space-delimited list of groups. e.g. airbyte.io.
+- `groups_list` (List of String) List of groups. e.g. airbyte.io.
+- `projects` (String) [DEPRECATED] Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.
+- `projects_list` (List of String) Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.
+- `start_date` (String) The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data will be replicated. All data generated after this date will be replicated.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_gitlab_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_gitlab_authorization_method_o_auth2_0))
-- `source_gitlab_authorization_method_private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_gitlab_authorization_method_private_token))
-- `source_gitlab_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_gitlab_update_authorization_method_o_auth2_0))
-- `source_gitlab_update_authorization_method_private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_gitlab_update_authorization_method_private_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
+- `private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--private_token))
-
-### Nested Schema for `configuration.credentials.source_gitlab_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Access Token for making authenticated requests.
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
- `client_id` (String) The API ID of the Gitlab developer application.
- `client_secret` (String) The API Secret the Gitlab developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
+- `refresh_token` (String, Sensitive) The key to refresh the expired access_token.
+- `token_expiry_date` (String, Sensitive) The date-time when the access token should be refreshed.
-Optional:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-
-
-
-### Nested Schema for `configuration.credentials.source_gitlab_authorization_method_private_token`
-
-Required:
-
-- `access_token` (String) Log into your Gitlab account and then generate a personal Access Token.
-
-Optional:
-
-- `auth_type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_gitlab_update_authorization_method_o_auth2_0`
-Required:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `client_id` (String) The API ID of the Gitlab developer application.
-- `client_secret` (String) The API Secret the Gitlab developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-Optional:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-
-
-
-### Nested Schema for `configuration.credentials.source_gitlab_update_authorization_method_private_token`
+
+### Nested Schema for `configuration.credentials.private_token`
Required:
-- `access_token` (String) Log into your Gitlab account and then generate a personal Access Token.
-
-Optional:
-
-- `auth_type` (String) must be one of ["access_token"]
+- `access_token` (String, Sensitive) Log into your Gitlab account and then generate a personal Access Token.
diff --git a/docs/resources/source_glassfrog.md b/docs/resources/source_glassfrog.md
index 9d864489f..6b2e89ce5 100644
--- a/docs/resources/source_glassfrog.md
+++ b/docs/resources/source_glassfrog.md
@@ -15,12 +15,12 @@ SourceGlassfrog Resource
```terraform
resource "airbyte_source_glassfrog" "my_source_glassfrog" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "glassfrog"
+ api_key = "...my_api_key..."
}
- name = "Carl Davis"
- secret_id = "...my_secret_id..."
- workspace_id = "891f82ce-1157-4172-b053-77dcfa89df97"
+ definition_id = "54ef24d0-de80-4e3d-b905-02015d2de4b8"
+ name = "Jonathon Erdman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "2b3a27b0-b342-4a10-bbc4-7ca706139037"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_glassfrog" "my_source_glassfrog" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_glassfrog" "my_source_glassfrog" {
Required:
-- `api_key` (String) API key provided by Glassfrog
-- `source_type` (String) must be one of ["glassfrog"]
+- `api_key` (String, Sensitive) API key provided by Glassfrog
diff --git a/docs/resources/source_gnews.md b/docs/resources/source_gnews.md
index 43bff1408..e9dbdfa0b 100644
--- a/docs/resources/source_gnews.md
+++ b/docs/resources/source_gnews.md
@@ -16,25 +16,25 @@ SourceGnews Resource
resource "airbyte_source_gnews" "my_source_gnews" {
configuration = {
api_key = "...my_api_key..."
- country = "ie"
+ country = "es"
end_date = "2022-08-21 16:27:09"
in = [
- "content",
+ "description",
]
- language = "fr"
+ language = "ta"
nullable = [
- "description",
+ "content",
]
- query = "Apple AND NOT iPhone"
- sortby = "publishedAt"
- source_type = "gnews"
+ query = "Intel AND (i7 OR i9)"
+ sortby = "relevance"
start_date = "2022-08-21 16:27:09"
top_headlines_query = "Apple AND NOT iPhone"
- top_headlines_topic = "business"
+ top_headlines_topic = "world"
}
- name = "Katrina Considine"
- secret_id = "...my_secret_id..."
- workspace_id = "c3ddc5f1-11de-4a10-a6d5-41a4d190feb2"
+ definition_id = "df3c14a3-49fd-4e89-ab27-6cbad00caee1"
+ name = "Sadie Gleichner"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5b57e54a-27b6-417a-812e-6bf68e1922df"
}
```
@@ -44,11 +44,12 @@ resource "airbyte_source_gnews" "my_source_gnews" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -61,7 +62,7 @@ resource "airbyte_source_gnews" "my_source_gnews" {
Required:
-- `api_key` (String) API Key
+- `api_key` (String, Sensitive) API Key
- `query` (String) This parameter allows you to specify your search keywords to find the news articles you are looking for. The keywords will be used to return the most relevant articles. It is possible to use logical operators with keywords. - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by
quotation marks are used to search for articles with the exact same keyword sequence.
For example the query: "Apple iPhone" will return articles matching at least once this sequence of keywords.
@@ -76,7 +77,6 @@ Required:
specified keywords. To use it, you need to add NOT in front of each word or phrase surrounded by quotes.
For example the query: Apple NOT iPhone will return all articles matching the keyword Apple but not the keyword
iPhone
-- `source_type` (String) must be one of ["gnews"]
Optional:
diff --git a/docs/resources/source_google_ads.md b/docs/resources/source_google_ads.md
index 0d20ad3dc..fd66b76e0 100644
--- a/docs/resources/source_google_ads.md
+++ b/docs/resources/source_google_ads.md
@@ -32,12 +32,12 @@ resource "airbyte_source_google_ads" "my_source_googleads" {
customer_id = "6783948572,5839201945"
end_date = "2017-01-30"
login_customer_id = "7349206847"
- source_type = "google-ads"
start_date = "2017-01-25"
}
- name = "Dr. Forrest Roob"
- secret_id = "...my_secret_id..."
- workspace_id = "bddb4847-08fb-44e3-91e6-bc158c4c4e54"
+ definition_id = "14313a52-3140-431f-97b8-2b3c164c1950"
+ name = "Dr. Matt Feeney"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ecd9b5a7-5a7c-45fc-a1d7-22b310b676fb"
}
```
@@ -47,11 +47,12 @@ resource "airbyte_source_google_ads" "my_source_googleads" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -66,11 +67,11 @@ Required:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
- `customer_id` (String) Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation.
-- `source_type` (String) must be one of ["google-ads"]
Optional:
-- `conversion_window_days` (Number) A conversion window is the number of days after an ad interaction (such as an ad click or video view) during which a conversion, such as a purchase, is recorded in Google Ads. For more information, see Google's documentation.
+- `conversion_window_days` (Number) Default: 14
+A conversion window is the number of days after an ad interaction (such as an ad click or video view) during which a conversion, such as a purchase, is recorded in Google Ads. For more information, see Google's documentation.
- `custom_queries` (Attributes List) (see [below for nested schema](#nestedatt--configuration--custom_queries))
- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set)
- `login_customer_id` (String) If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation.
@@ -83,12 +84,12 @@ Required:
- `client_id` (String) The Client ID of your Google Ads developer application. For detailed instructions on finding this value, refer to our documentation.
- `client_secret` (String) The Client Secret of your Google Ads developer application. For detailed instructions on finding this value, refer to our documentation.
-- `developer_token` (String) The Developer Token granted by Google to use their APIs. For detailed instructions on finding this value, refer to our documentation.
-- `refresh_token` (String) The token used to obtain a new Access Token. For detailed instructions on finding this value, refer to our documentation.
+- `developer_token` (String, Sensitive) The Developer Token granted by Google to use their APIs. For detailed instructions on finding this value, refer to our documentation.
+- `refresh_token` (String, Sensitive) The token used to obtain a new Access Token. For detailed instructions on finding this value, refer to our documentation.
Optional:
-- `access_token` (String) The Access Token for making authenticated requests. For detailed instructions on finding this value, refer to our documentation.
+- `access_token` (String, Sensitive) The Access Token for making authenticated requests. For detailed instructions on finding this value, refer to our documentation.
diff --git a/docs/resources/source_google_analytics_data_api.md b/docs/resources/source_google_analytics_data_api.md
index cbd825a38..395884245 100644
--- a/docs/resources/source_google_analytics_data_api.md
+++ b/docs/resources/source_google_analytics_data_api.md
@@ -16,23 +16,80 @@ SourceGoogleAnalyticsDataAPI Resource
resource "airbyte_source_google_analytics_data_api" "my_source_googleanalyticsdataapi" {
configuration = {
credentials = {
- source_google_analytics_data_api_credentials_authenticate_via_google_oauth_ = {
+ authenticate_via_google_oauth = {
access_token = "...my_access_token..."
- auth_type = "Client"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
}
}
- custom_reports = "...my_custom_reports..."
+ custom_reports_array = [
+ {
+ dimension_filter = {
+ and_group = {
+ expressions = [
+ {
+ field_name = "...my_field_name..."
+ filter = {
+ source_google_analytics_data_api_update_schemas_custom_reports_array_between_filter = {
+ from_value = {
+ source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter_1_expressions_double_value = {
+ value = 45.05
+ }
+ }
+ to_value = {
+ source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter_1_expressions_filter_double_value = {
+ value = 22.65
+ }
+ }
+ }
+ }
+ },
+ ]
+ }
+ }
+ dimensions = [
+ "...",
+ ]
+ metric_filter = {
+ source_google_analytics_data_api_update_and_group = {
+ expressions = [
+ {
+ field_name = "...my_field_name..."
+ filter = {
+ source_google_analytics_data_api_schemas_custom_reports_array_metric_filter_between_filter = {
+ from_value = {
+ source_google_analytics_data_api_schemas_custom_reports_array_metric_filter_metrics_filter_1_expressions_filter_double_value = {
+ value = 8.4
+ }
+ }
+ to_value = {
+ source_google_analytics_data_api_schemas_custom_reports_array_metric_filter_metrics_filter_1_double_value = {
+ value = 77.49
+ }
+ }
+ }
+ }
+ },
+ ]
+ }
+ }
+ metrics = [
+ "...",
+ ]
+ name = "Mrs. Mercedes Herman PhD"
+ },
+ ]
date_ranges_start_date = "2021-01-01"
- property_id = "5729978930"
- source_type = "google-analytics-data-api"
- window_in_days = 364
+ property_ids = [
+ "...",
+ ]
+ window_in_days = 60
}
- name = "Juanita Collier"
- secret_id = "...my_secret_id..."
- workspace_id = "0e9b200c-e78a-41bd-8fb7-a0a116ce723d"
+ definition_id = "d4fc0324-2ccd-4276-ba0d-30eb91c3df25"
+ name = "Rodney Goldner"
+ secret_id = "...my_secret_id..."
+ workspace_id = "52dc8258-f30a-4271-83b0-0ec7045956c0"
}
```
@@ -42,11 +99,12 @@ resource "airbyte_source_google_analytics_data_api" "my_source_googleanalyticsda
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -59,77 +117,1228 @@ resource "airbyte_source_google_analytics_data_api" "my_source_googleanalyticsda
Required:
-- `date_ranges_start_date` (String) The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.
-- `property_id` (String) The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.
-- `source_type` (String) must be one of ["google-analytics-data-api"]
+- `property_ids` (List of String) A list of your Property IDs. The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.
Optional:
- `credentials` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials))
-- `custom_reports` (String) A JSON array describing the custom reports you want to sync from Google Analytics. See the documentation for more information about the exact format you can use to fill out this field.
-- `window_in_days` (Number) The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.
+- `custom_reports_array` (Attributes List) You can add your Custom Analytics report by creating one. (see [below for nested schema](#nestedatt--configuration--custom_reports_array))
+- `date_ranges_start_date` (String) The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.
+- `window_in_days` (Number) Default: 1
+The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_google_analytics_data_api_credentials_authenticate_via_google_oauth` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_data_api_credentials_authenticate_via_google_oauth))
-- `source_google_analytics_data_api_credentials_service_account_key_authentication` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_data_api_credentials_service_account_key_authentication))
-- `source_google_analytics_data_api_update_credentials_authenticate_via_google_oauth` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_data_api_update_credentials_authenticate_via_google_oauth))
-- `source_google_analytics_data_api_update_credentials_service_account_key_authentication` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_data_api_update_credentials_service_account_key_authentication))
+- `authenticate_via_google_oauth` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_google_oauth))
+- `service_account_key_authentication` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--service_account_key_authentication))
-
-### Nested Schema for `configuration.credentials.source_google_analytics_data_api_credentials_authenticate_via_google_oauth`
+
+### Nested Schema for `configuration.credentials.authenticate_via_google_oauth`
Required:
- `client_id` (String) The Client ID of your Google Analytics developer application.
- `client_secret` (String) The Client Secret of your Google Analytics developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
+- `refresh_token` (String, Sensitive) The token for obtaining a new access token.
Optional:
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["Client"]
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
-
-### Nested Schema for `configuration.credentials.source_google_analytics_data_api_credentials_service_account_key_authentication`
+
+### Nested Schema for `configuration.credentials.service_account_key_authentication`
Required:
- `credentials_json` (String) The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.
+
+
+
+### Nested Schema for `configuration.custom_reports_array`
+
+Required:
+
+- `dimensions` (List of String) A list of dimensions.
+- `metrics` (List of String) A list of metrics.
+- `name` (String) The name of the custom report, this name would be used as stream name.
+
Optional:
-- `auth_type` (String) must be one of ["Service"]
+- `dimension_filter` (Attributes) Dimensions filter (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter))
+- `metric_filter` (Attributes) Metrics filter (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter))
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter`
-
-### Nested Schema for `configuration.credentials.source_google_analytics_data_api_update_credentials_authenticate_via_google_oauth`
+Optional:
+
+- `and_group` (Attributes) The FilterExpressions in andGroup have an AND relationship. (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--and_group))
+- `filter` (Attributes) A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all dimensions. (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--filter))
+- `not_expression` (Attributes) The FilterExpression is NOT of notExpression. (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--not_expression))
+- `or_group` (Attributes) The FilterExpressions in orGroup have an OR relationship. (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group`
Required:
-- `client_id` (String) The Client ID of your Google Analytics developer application.
-- `client_secret` (String) The Client Secret of your Google Analytics developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
+- `expressions` (Attributes List) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions`
+
+Required:
+
+- `field_name` (String)
+- `filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter`
+
+Optional:
+
+- `between_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--between_filter))
+- `in_list_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--in_list_filter))
+- `numeric_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--numeric_filter))
+- `string_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `from_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--from_value))
+- `to_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value`
Optional:
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["Client"]
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
-
-### Nested Schema for `configuration.credentials.source_google_analytics_data_api_update_credentials_service_account_key_authentication`
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value.int64_value`
Required:
-- `credentials_json` (String) The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.
+- `value` (String)
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `values` (List of String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `operation` (List of String)
+- `value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `value` (String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+- `match_type` (List of String)
+
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group`
+
+Required:
+
+- `field_name` (String)
+- `filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter`
+
+Optional:
+
+- `between_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--between_filter))
+- `in_list_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--in_list_filter))
+- `numeric_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--numeric_filter))
+- `string_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter`
+
+Required:
+
+- `from_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter--from_value))
+- `to_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter--to_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter`
+
+Required:
+
+- `values` (List of String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter`
+
+Required:
+
+- `operation` (List of String)
+- `value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter--value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter.value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter--value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--filter--string_filter--value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.filter.string_filter`
+
+Required:
+
+- `value` (String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+- `match_type` (List of String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group`
+
+Optional:
+
+- `expression` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression`
+
+Required:
+
+- `field_name` (String)
+- `filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter`
+
+Optional:
+
+- `between_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--between_filter))
+- `in_list_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--in_list_filter))
+- `numeric_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--numeric_filter))
+- `string_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter`
+
+Required:
+
+- `from_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter--from_value))
+- `to_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter--to_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter`
+
+Required:
+
+- `values` (List of String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter`
+
+Required:
+
+- `operation` (List of String)
+- `value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter--value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter.value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter--value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expression--filter--string_filter--value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter.value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expression.filter.string_filter`
+
+Required:
+
+- `value` (String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+- `match_type` (List of String)
+
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group`
+
+Required:
+
+- `expressions` (Attributes List) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions`
+
+Required:
+
+- `field_name` (String)
+- `filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter`
+
+Optional:
+
+- `between_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--between_filter))
+- `in_list_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--in_list_filter))
+- `numeric_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--numeric_filter))
+- `string_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `from_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--from_value))
+- `to_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `values` (List of String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `operation` (List of String)
+- `value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--dimension_filter--or_group--expressions--filter--string_filter--value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.dimension_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `value` (String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+- `match_type` (List of String)
+
+
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter`
+
+Optional:
+
+- `and_group` (Attributes) The FilterExpressions in andGroup have an AND relationship. (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--and_group))
+- `filter` (Attributes) A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all metrics. (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--filter))
+- `not_expression` (Attributes) The FilterExpression is NOT of notExpression. (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--not_expression))
+- `or_group` (Attributes) The FilterExpressions in orGroup have an OR relationship. (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group`
+
+Required:
+
+- `expressions` (Attributes List) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions`
+
+Required:
+
+- `field_name` (String)
+- `filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter`
+
+Optional:
+
+- `between_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--between_filter))
+- `in_list_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--in_list_filter))
+- `numeric_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--numeric_filter))
+- `string_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `from_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--from_value))
+- `to_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `values` (List of String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `operation` (List of String)
+- `value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `value` (String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+- `match_type` (List of String)
+
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group`
+
+Required:
+
+- `field_name` (String)
+- `filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter`
+
+Optional:
+
+- `between_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--between_filter))
+- `in_list_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--in_list_filter))
+- `numeric_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--numeric_filter))
+- `string_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter`
+
+Required:
+
+- `from_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter--from_value))
+- `to_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter--to_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter`
+
+Required:
+
+- `values` (List of String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter`
+
+Required:
+
+- `operation` (List of String)
+- `value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter--value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter.value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter--value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--filter--string_filter--value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.filter.string_filter`
+
+Required:
+
+- `value` (String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+- `match_type` (List of String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group`
+
+Optional:
+
+- `expression` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression`
+
+Required:
+
+- `field_name` (String)
+- `filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter`
+
+Optional:
+
+- `between_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--between_filter))
+- `in_list_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--in_list_filter))
+- `numeric_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--numeric_filter))
+- `string_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter`
+
+Required:
+
+- `from_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter--from_value))
+- `to_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter--to_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter`
+
+Required:
+
+- `values` (List of String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter`
+
+Required:
+
+- `operation` (List of String)
+- `value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter--value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter.value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter--value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expression--filter--string_filter--value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter.value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expression.filter.string_filter`
+
+Required:
+
+- `value` (String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+- `match_type` (List of String)
+
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group`
+
+Required:
+
+- `expressions` (Attributes List) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions`
+
+Required:
+
+- `field_name` (String)
+- `filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter`
+
+Optional:
+
+- `between_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--between_filter))
+- `in_list_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--in_list_filter))
+- `numeric_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--numeric_filter))
+- `string_filter` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `from_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--from_value))
+- `to_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--to_value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.to_value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `values` (List of String)
+
+Optional:
+
+- `case_sensitive` (Boolean)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `operation` (List of String)
+- `value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.value`
+
+Optional:
+
+- `double_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--value--double_value))
+- `int64_value` (Attributes) (see [below for nested schema](#nestedatt--configuration--custom_reports_array--metric_filter--or_group--expressions--filter--string_filter--value--int64_value))
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.value.double_value`
+
+Required:
+
+- `value` (Number)
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter.value.int64_value`
+
+Required:
+
+- `value` (String)
+
+
+
+
+
+### Nested Schema for `configuration.custom_reports_array.metric_filter.or_group.expressions.filter.string_filter`
+
+Required:
+
+- `value` (String)
Optional:
-- `auth_type` (String) must be one of ["Service"]
+- `case_sensitive` (Boolean)
+- `match_type` (List of String)
diff --git a/docs/resources/source_google_analytics_v4.md b/docs/resources/source_google_analytics_v4.md
deleted file mode 100644
index 1a2d2ab2a..000000000
--- a/docs/resources/source_google_analytics_v4.md
+++ /dev/null
@@ -1,135 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_google_analytics_v4 Resource - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceGoogleAnalyticsV4 Resource
----
-
-# airbyte_source_google_analytics_v4 (Resource)
-
-SourceGoogleAnalyticsV4 Resource
-
-## Example Usage
-
-```terraform
-resource "airbyte_source_google_analytics_v4" "my_source_googleanalyticsv4" {
- configuration = {
- credentials = {
- source_google_analytics_v4_credentials_authenticate_via_google_oauth_ = {
- access_token = "...my_access_token..."
- auth_type = "Client"
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
- }
- }
- custom_reports = "...my_custom_reports..."
- source_type = "google-analytics-v4"
- start_date = "2020-06-01"
- view_id = "...my_view_id..."
- window_in_days = 120
- }
- name = "Dr. Doug Dibbert"
- secret_id = "...my_secret_id..."
- workspace_id = "af725b29-1220-430d-83f5-aeb7799d22e8"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `source_id` (String)
-- `source_type` (String)
-
-
-### Nested Schema for `configuration`
-
-Required:
-
-- `source_type` (String) must be one of ["google-analytics-v4"]
-- `start_date` (String) The date in the format YYYY-MM-DD. Any data before this date will not be replicated.
-- `view_id` (String) The ID for the Google Analytics View you want to fetch data from. This can be found from the Google Analytics Account Explorer.
-
-Optional:
-
-- `credentials` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials))
-- `custom_reports` (String) A JSON array describing the custom reports you want to sync from Google Analytics. See the docs for more information about the exact format you can use to fill out this field.
-- `window_in_days` (Number) The time increment used by the connector when requesting data from the Google Analytics API. More information is available in the the docs. The bigger this value is, the faster the sync will be, but the more likely that sampling will be applied to your data, potentially causing inaccuracies in the returned results. We recommend setting this to 1 unless you have a hard requirement to make the sync faster at the expense of accuracy. The minimum allowed value for this field is 1, and the maximum is 364.
-
-
-### Nested Schema for `configuration.credentials`
-
-Optional:
-
-- `source_google_analytics_v4_credentials_authenticate_via_google_oauth` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_v4_credentials_authenticate_via_google_oauth))
-- `source_google_analytics_v4_credentials_service_account_key_authentication` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_v4_credentials_service_account_key_authentication))
-- `source_google_analytics_v4_update_credentials_authenticate_via_google_oauth` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_v4_update_credentials_authenticate_via_google_oauth))
-- `source_google_analytics_v4_update_credentials_service_account_key_authentication` (Attributes) Credentials for the service (see [below for nested schema](#nestedatt--configuration--credentials--source_google_analytics_v4_update_credentials_service_account_key_authentication))
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_v4_credentials_authenticate_via_google_oauth`
-
-Required:
-
-- `client_id` (String) The Client ID of your Google Analytics developer application.
-- `client_secret` (String) The Client Secret of your Google Analytics developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-
-Optional:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["Client"]
-
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_v4_credentials_service_account_key_authentication`
-
-Required:
-
-- `credentials_json` (String) The JSON key of the service account to use for authorization
-
-Optional:
-
-- `auth_type` (String) must be one of ["Service"]
-
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_v4_update_credentials_authenticate_via_google_oauth`
-
-Required:
-
-- `client_id` (String) The Client ID of your Google Analytics developer application.
-- `client_secret` (String) The Client Secret of your Google Analytics developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-
-Optional:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["Client"]
-
-
-
-### Nested Schema for `configuration.credentials.source_google_analytics_v4_update_credentials_service_account_key_authentication`
-
-Required:
-
-- `credentials_json` (String) The JSON key of the service account to use for authorization
-
-Optional:
-
-- `auth_type` (String) must be one of ["Service"]
-
-
diff --git a/docs/resources/source_google_directory.md b/docs/resources/source_google_directory.md
index 277f8a891..0142780e7 100644
--- a/docs/resources/source_google_directory.md
+++ b/docs/resources/source_google_directory.md
@@ -16,17 +16,16 @@ SourceGoogleDirectory Resource
resource "airbyte_source_google_directory" "my_source_googledirectory" {
configuration = {
credentials = {
- source_google_directory_google_credentials_service_account_key = {
- credentials_json = "...my_credentials_json..."
- credentials_title = "Service accounts"
- email = "Ayla.Zulauf@hotmail.com"
+ service_account_key = {
+ credentials_json = "...my_credentials_json..."
+ email = "Sharon_Schmidt@gmail.com"
}
}
- source_type = "google-directory"
}
- name = "Mrs. Allen Lockman"
- secret_id = "...my_secret_id..."
- workspace_id = "dc42c876-c2c2-4dfb-8cfc-1c76230f841f"
+ definition_id = "8b68fdfc-0692-4b4f-9673-f59a8d0acc99"
+ name = "Mr. Mattie Rau"
+ secret_id = "...my_secret_id..."
+ workspace_id = "1059fac1-d6c9-4b0f-8f35-d942704e93eb"
}
```
@@ -36,11 +35,12 @@ resource "airbyte_source_google_directory" "my_source_googledirectory" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,10 +51,6 @@ resource "airbyte_source_google_directory" "my_source_googledirectory" {
### Nested Schema for `configuration`
-Required:
-
-- `source_type` (String) must be one of ["google-directory"]
-
Optional:
- `credentials` (Attributes) Google APIs use the OAuth 2.0 protocol for authentication and authorization. The Source supports Web server application and Service accounts scenarios. (see [below for nested schema](#nestedatt--configuration--credentials))
@@ -64,66 +60,25 @@ Optional:
Optional:
-- `source_google_directory_google_credentials_service_account_key` (Attributes) For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email. (see [below for nested schema](#nestedatt--configuration--credentials--source_google_directory_google_credentials_service_account_key))
-- `source_google_directory_google_credentials_sign_in_via_google_o_auth` (Attributes) For these scenario user only needs to give permission to read Google Directory data. (see [below for nested schema](#nestedatt--configuration--credentials--source_google_directory_google_credentials_sign_in_via_google_o_auth))
-- `source_google_directory_update_google_credentials_service_account_key` (Attributes) For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email. (see [below for nested schema](#nestedatt--configuration--credentials--source_google_directory_update_google_credentials_service_account_key))
-- `source_google_directory_update_google_credentials_sign_in_via_google_o_auth` (Attributes) For these scenario user only needs to give permission to read Google Directory data. (see [below for nested schema](#nestedatt--configuration--credentials--source_google_directory_update_google_credentials_sign_in_via_google_o_auth))
+- `service_account_key` (Attributes) For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email. (see [below for nested schema](#nestedatt--configuration--credentials--service_account_key))
+- `sign_in_via_google_o_auth` (Attributes) For these scenario user only needs to give permission to read Google Directory data. (see [below for nested schema](#nestedatt--configuration--credentials--sign_in_via_google_o_auth))
-
-### Nested Schema for `configuration.credentials.source_google_directory_google_credentials_service_account_key`
+
+### Nested Schema for `configuration.credentials.service_account_key`
Required:
- `credentials_json` (String) The contents of the JSON service account key. See the docs for more information on how to generate this key.
- `email` (String) The email of the user, which has permissions to access the Google Workspace Admin APIs.
-Optional:
-
-- `credentials_title` (String) must be one of ["Service accounts"]
-Authentication Scenario
-
-
-### Nested Schema for `configuration.credentials.source_google_directory_google_credentials_sign_in_via_google_o_auth`
+
+### Nested Schema for `configuration.credentials.sign_in_via_google_o_auth`
Required:
- `client_id` (String) The Client ID of the developer application.
- `client_secret` (String) The Client Secret of the developer application.
-- `refresh_token` (String) The Token for obtaining a new access token.
-
-Optional:
-
-- `credentials_title` (String) must be one of ["Web server app"]
-Authentication Scenario
-
-
-
-### Nested Schema for `configuration.credentials.source_google_directory_update_google_credentials_service_account_key`
-
-Required:
-
-- `credentials_json` (String) The contents of the JSON service account key. See the docs for more information on how to generate this key.
-- `email` (String) The email of the user, which has permissions to access the Google Workspace Admin APIs.
-
-Optional:
-
-- `credentials_title` (String) must be one of ["Service accounts"]
-Authentication Scenario
-
-
-
-### Nested Schema for `configuration.credentials.source_google_directory_update_google_credentials_sign_in_via_google_o_auth`
-
-Required:
-
-- `client_id` (String) The Client ID of the developer application.
-- `client_secret` (String) The Client Secret of the developer application.
-- `refresh_token` (String) The Token for obtaining a new access token.
-
-Optional:
-
-- `credentials_title` (String) must be one of ["Web server app"]
-Authentication Scenario
+- `refresh_token` (String, Sensitive) The Token for obtaining a new access token.
diff --git a/docs/resources/source_google_drive.md b/docs/resources/source_google_drive.md
new file mode 100644
index 000000000..edbd2b8e2
--- /dev/null
+++ b/docs/resources/source_google_drive.md
@@ -0,0 +1,226 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_google_drive Resource - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceGoogleDrive Resource
+---
+
+# airbyte_source_google_drive (Resource)
+
+SourceGoogleDrive Resource
+
+## Example Usage
+
+```terraform
+resource "airbyte_source_google_drive" "my_source_googledrive" {
+ configuration = {
+ credentials = {
+ source_google_drive_authenticate_via_google_o_auth = {
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
+ }
+ }
+ folder_url = "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn"
+ start_date = "2021-01-01T00:00:00.000000Z"
+ streams = [
+ {
+ days_to_sync_if_history_is_full = 4
+ format = {
+ source_google_drive_avro_format = {
+ double_as_string = false
+ }
+ }
+ globs = [
+ "...",
+ ]
+ input_schema = "...my_input_schema..."
+ name = "Rex Pacocha"
+ primary_key = "...my_primary_key..."
+ schemaless = false
+ validation_policy = "Emit Record"
+ },
+ ]
+ }
+ definition_id = "f0c4c84b-89e6-425b-ae87-6a32dc31e1b4"
+ name = "Lester Kihn"
+ secret_id = "...my_secret_id..."
+ workspace_id = "53bf2def-ea2f-4d14-9f48-d36313985539"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `configuration` (Attributes) Used during spec; allows the developer to configure the cloud provider specific options
+that are needed when users configure a file-based source. (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the source e.g. dev-mysql-instance.
+- `workspace_id` (String)
+
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
+
+### Read-Only
+
+- `source_id` (String)
+- `source_type` (String)
+
+
+### Nested Schema for `configuration`
+
+Required:
+
+- `credentials` (Attributes) Credentials for connecting to the Google Drive API (see [below for nested schema](#nestedatt--configuration--credentials))
+- `folder_url` (String) URL for the folder you want to sync. Using individual streams and glob patterns, it's possible to only sync a subset of all files located in the folder.
+- `streams` (Attributes List) Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. (see [below for nested schema](#nestedatt--configuration--streams))
+
+Optional:
+
+- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
+
+
+### Nested Schema for `configuration.credentials`
+
+Optional:
+
+- `authenticate_via_google_o_auth` (Attributes) Credentials for connecting to the Google Drive API (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_google_o_auth))
+- `service_account_key_authentication` (Attributes) Credentials for connecting to the Google Drive API (see [below for nested schema](#nestedatt--configuration--credentials--service_account_key_authentication))
+
+
+### Nested Schema for `configuration.credentials.authenticate_via_google_o_auth`
+
+Required:
+
+- `client_id` (String) Client ID for the Google Drive API
+- `client_secret` (String) Client Secret for the Google Drive API
+- `refresh_token` (String, Sensitive) Refresh Token for the Google Drive API
+
+
+
+### Nested Schema for `configuration.credentials.service_account_key_authentication`
+
+Required:
+
+- `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here.
+
+
+
+
+### Nested Schema for `configuration.streams`
+
+Required:
+
+- `format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format))
+- `name` (String) The name of the stream.
+
+Optional:
+
+- `days_to_sync_if_history_is_full` (Number) Default: 3
+When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
+- `globs` (List of String) The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
+- `input_schema` (String) The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
+- `primary_key` (String, Sensitive) The column or columns (for a composite key) that serves as the unique identifier of a record.
+- `schemaless` (Boolean) Default: false
+When enabled, syncs will not validate or structure records against the stream's schema.
+- `validation_policy` (String) must be one of ["Emit Record", "Skip Record", "Wait for Discover"]; Default: "Emit Record"
+The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+
+
+### Nested Schema for `configuration.streams.format`
+
+Optional:
+
+- `avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--avro_format))
+- `csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--csv_format))
+- `document_file_type_format_experimental` (Attributes) Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file. (see [below for nested schema](#nestedatt--configuration--streams--format--document_file_type_format_experimental))
+- `jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--jsonl_format))
+- `parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format))
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+Optional:
+
+- `double_as_string` (Boolean) Default: false
+Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+Optional:
+
+- `delimiter` (String) Default: ","
+The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+- `double_quote` (Boolean) Default: true
+Whether two quotes in a quoted CSV value denote a single quote in the data.
+- `encoding` (String) Default: "utf8"
+The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
+- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values.
+- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition))
+- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
+- `quote_char` (String) Default: "\""
+The character used for quoting CSV values. To disallow quoting, make this field blank.
+- `skip_rows_after_header` (Number) Default: 0
+The number of rows to skip after the header row.
+- `skip_rows_before_header` (Number) Default: 0
+The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+- `strings_can_be_null` (Boolean) Default: true
+Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values.
+
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition`
+
+Optional:
+
+- `autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition--autogenerated))
+- `from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition--from_csv))
+- `user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition--user_provided))
+
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition.user_provided`
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition.user_provided`
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition.user_provided`
+
+Required:
+
+- `column_names` (List of String) The column names that will be used while emitting the CSV records
+
+
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+Optional:
+
+- `skip_unprocessable_file_types` (Boolean) Default: true
+If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+
+
+### Nested Schema for `configuration.streams.format.parquet_format`
+
+Optional:
+
+- `decimal_as_float` (Boolean) Default: false
+Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
+
+
diff --git a/docs/resources/source_google_pagespeed_insights.md b/docs/resources/source_google_pagespeed_insights.md
index c574bdc92..9229e1f80 100644
--- a/docs/resources/source_google_pagespeed_insights.md
+++ b/docs/resources/source_google_pagespeed_insights.md
@@ -17,9 +17,8 @@ resource "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedin
configuration = {
api_key = "...my_api_key..."
categories = [
- "pwa",
+ "seo",
]
- source_type = "google-pagespeed-insights"
strategies = [
"desktop",
]
@@ -27,9 +26,10 @@ resource "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedin
"...",
]
}
- name = "Kristopher Dare"
- secret_id = "...my_secret_id..."
- workspace_id = "db14db6b-e5a6-4859-98e2-2ae20da16fc2"
+ definition_id = "52d3206a-fb3a-4724-a60d-40134e58876c"
+ name = "Miss Ronald Erdman Sr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "8ae06a57-c7c5-477a-b1e5-baddd2747bbc"
}
```
@@ -39,11 +39,12 @@ resource "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedin
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,12 +58,11 @@ resource "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedin
Required:
- `categories` (List of String) Defines which Lighthouse category to run. One or many of: "accessibility", "best-practices", "performance", "pwa", "seo".
-- `source_type` (String) must be one of ["google-pagespeed-insights"]
- `strategies` (List of String) The analyses strategy to use. Either "desktop" or "mobile".
- `urls` (List of String) The URLs to retrieve pagespeed information from. The connector will attempt to sync PageSpeed reports for all the defined URLs. Format: https://(www.)url.domain
Optional:
-- `api_key` (String) Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited when using without API Key. Creating and using the API key therefore is recommended. The key is case sensitive.
+- `api_key` (String, Sensitive) Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited when using without API Key. Creating and using the API key therefore is recommended. The key is case sensitive.
diff --git a/docs/resources/source_google_search_console.md b/docs/resources/source_google_search_console.md
index 5d721b254..79735b33c 100644
--- a/docs/resources/source_google_search_console.md
+++ b/docs/resources/source_google_search_console.md
@@ -16,9 +16,8 @@ SourceGoogleSearchConsole Resource
resource "airbyte_source_google_search_console" "my_source_googlesearchconsole" {
configuration = {
authorization = {
- source_google_search_console_authentication_type_o_auth = {
+ source_google_search_console_o_auth = {
access_token = "...my_access_token..."
- auth_type = "Client"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
@@ -28,9 +27,9 @@ resource "airbyte_source_google_search_console" "my_source_googlesearchconsole"
custom_reports_array = [
{
dimensions = [
- "page",
+ "device",
]
- name = "Heidi Bernier"
+ name = "Ms. Randy Gorczany V"
},
]
data_state = "all"
@@ -38,12 +37,12 @@ resource "airbyte_source_google_search_console" "my_source_googlesearchconsole"
site_urls = [
"...",
]
- source_type = "google-search-console"
- start_date = "2022-07-11"
+ start_date = "2020-03-18"
}
- name = "Jordan Hilll"
- secret_id = "...my_secret_id..."
- workspace_id = "90439d22-2465-4694-a240-7084f7ab37ce"
+ definition_id = "165bc484-0e7f-4b5d-b254-77f370b0ec7c"
+ name = "Wendell Rempel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0cb9d8df-c27a-48c7-ac3e-b5dc55714db0"
}
```
@@ -53,11 +52,12 @@ resource "airbyte_source_google_search_console" "my_source_googlesearchconsole"
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -72,73 +72,44 @@ Required:
- `authorization` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization))
- `site_urls` (List of String) The URLs of the website property attached to your GSC account. Learn more about properties here.
-- `source_type` (String) must be one of ["google-search-console"]
Optional:
- `custom_reports` (String) (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports.
- `custom_reports_array` (Attributes List) You can add your Custom Analytics report by creating one. (see [below for nested schema](#nestedatt--configuration--custom_reports_array))
-- `data_state` (String) must be one of ["final", "all"]
+- `data_state` (String) must be one of ["final", "all"]; Default: "final"
If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.
- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.
-- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.
+- `start_date` (String) Default: "2021-01-01"
+UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.
### Nested Schema for `configuration.authorization`
Optional:
-- `source_google_search_console_authentication_type_o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--source_google_search_console_authentication_type_o_auth))
-- `source_google_search_console_authentication_type_service_account_key_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--source_google_search_console_authentication_type_service_account_key_authentication))
-- `source_google_search_console_update_authentication_type_o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--source_google_search_console_update_authentication_type_o_auth))
-- `source_google_search_console_update_authentication_type_service_account_key_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--source_google_search_console_update_authentication_type_service_account_key_authentication))
+- `o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--o_auth))
+- `service_account_key_authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization--service_account_key_authentication))
-
-### Nested Schema for `configuration.authorization.source_google_search_console_authentication_type_o_auth`
+
+### Nested Schema for `configuration.authorization.o_auth`
Required:
-- `auth_type` (String) must be one of ["Client"]
- `client_id` (String) The client ID of your Google Search Console developer application. Read more here.
- `client_secret` (String) The client secret of your Google Search Console developer application. Read more here.
-- `refresh_token` (String) The token for obtaining a new access token. Read more here.
+- `refresh_token` (String, Sensitive) The token for obtaining a new access token. Read more here.
Optional:
-- `access_token` (String) Access token for making authenticated requests. Read more here.
+- `access_token` (String, Sensitive) Access token for making authenticated requests. Read more here.
-
-### Nested Schema for `configuration.authorization.source_google_search_console_authentication_type_service_account_key_authentication`
+
+### Nested Schema for `configuration.authorization.service_account_key_authentication`
Required:
-- `auth_type` (String) must be one of ["Service"]
-- `email` (String) The email of the user which has permissions to access the Google Workspace Admin APIs.
-- `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here.
-
-
-
-### Nested Schema for `configuration.authorization.source_google_search_console_update_authentication_type_o_auth`
-
-Required:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The client ID of your Google Search Console developer application. Read more here.
-- `client_secret` (String) The client secret of your Google Search Console developer application. Read more here.
-- `refresh_token` (String) The token for obtaining a new access token. Read more here.
-
-Optional:
-
-- `access_token` (String) Access token for making authenticated requests. Read more here.
-
-
-
-### Nested Schema for `configuration.authorization.source_google_search_console_update_authentication_type_service_account_key_authentication`
-
-Required:
-
-- `auth_type` (String) must be one of ["Service"]
- `email` (String) The email of the user which has permissions to access the Google Workspace Admin APIs.
- `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here.
@@ -149,7 +120,7 @@ Required:
Required:
-- `dimensions` (List of String) A list of dimensions (country, date, device, page, query)
+- `dimensions` (List of String) A list of available dimensions. Please note, that for technical reasons `date` is the default dimension which will be included in your query whether you specify it or not. Primary key will consist of your custom dimensions and the default dimension along with `site_url` and `search_type`.
- `name` (String) The name of the custom report, this name would be used as stream name
diff --git a/docs/resources/source_google_sheets.md b/docs/resources/source_google_sheets.md
index 3ba1744db..79e44ade4 100644
--- a/docs/resources/source_google_sheets.md
+++ b/docs/resources/source_google_sheets.md
@@ -16,20 +16,19 @@ SourceGoogleSheets Resource
resource "airbyte_source_google_sheets" "my_source_googlesheets" {
configuration = {
credentials = {
- source_google_sheets_authentication_authenticate_via_google_o_auth_ = {
- auth_type = "Client"
+ source_google_sheets_authenticate_via_google_o_auth = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
}
}
names_conversion = true
- source_type = "google-sheets"
spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit"
}
- name = "Irene Davis"
- secret_id = "...my_secret_id..."
- workspace_id = "194db554-10ad-4c66-9af9-0a26c7cdc981"
+ definition_id = "d7698733-386b-453a-879a-0805ff1793bf"
+ name = "Roderick Kutch"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d63199bd-6b46-48c8-9ec2-1a9ab567f13c"
}
```
@@ -39,11 +38,12 @@ resource "airbyte_source_google_sheets" "my_source_googlesheets" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,60 +57,36 @@ resource "airbyte_source_google_sheets" "my_source_googlesheets" {
Required:
- `credentials` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["google-sheets"]
- `spreadsheet_id` (String) Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'.
Optional:
-- `names_conversion` (Boolean) Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.
+- `names_conversion` (Boolean) Default: false
+Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_google_sheets_authentication_authenticate_via_google_o_auth` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--source_google_sheets_authentication_authenticate_via_google_o_auth))
-- `source_google_sheets_authentication_service_account_key_authentication` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--source_google_sheets_authentication_service_account_key_authentication))
-- `source_google_sheets_update_authentication_authenticate_via_google_o_auth` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--source_google_sheets_update_authentication_authenticate_via_google_o_auth))
-- `source_google_sheets_update_authentication_service_account_key_authentication` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--source_google_sheets_update_authentication_service_account_key_authentication))
+- `authenticate_via_google_o_auth` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_google_o_auth))
+- `service_account_key_authentication` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials--service_account_key_authentication))
-
-### Nested Schema for `configuration.credentials.source_google_sheets_authentication_authenticate_via_google_o_auth`
+
+### Nested Schema for `configuration.credentials.authenticate_via_google_o_auth`
Required:
-- `auth_type` (String) must be one of ["Client"]
- `client_id` (String) Enter your Google application's Client ID. See Google's documentation for more information.
- `client_secret` (String) Enter your Google application's Client Secret. See Google's documentation for more information.
-- `refresh_token` (String) Enter your Google application's refresh token. See Google's documentation for more information.
+- `refresh_token` (String, Sensitive) Enter your Google application's refresh token. See Google's documentation for more information.
-
-### Nested Schema for `configuration.credentials.source_google_sheets_authentication_service_account_key_authentication`
+
+### Nested Schema for `configuration.credentials.service_account_key_authentication`
Required:
-- `auth_type` (String) must be one of ["Service"]
-- `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_sheets_update_authentication_authenticate_via_google_o_auth`
-
-Required:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) Enter your Google application's Client ID. See Google's documentation for more information.
-- `client_secret` (String) Enter your Google application's Client Secret. See Google's documentation for more information.
-- `refresh_token` (String) Enter your Google application's refresh token. See Google's documentation for more information.
-
-
-
-### Nested Schema for `configuration.credentials.source_google_sheets_update_authentication_service_account_key_authentication`
-
-Required:
-
-- `auth_type` (String) must be one of ["Service"]
- `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here.
diff --git a/docs/resources/source_google_webfonts.md b/docs/resources/source_google_webfonts.md
index 002d3ab6f..68bf04cf3 100644
--- a/docs/resources/source_google_webfonts.md
+++ b/docs/resources/source_google_webfonts.md
@@ -19,11 +19,11 @@ resource "airbyte_source_google_webfonts" "my_source_googlewebfonts" {
api_key = "...my_api_key..."
pretty_print = "...my_pretty_print..."
sort = "...my_sort..."
- source_type = "google-webfonts"
}
- name = "Donald Hyatt"
- secret_id = "...my_secret_id..."
- workspace_id = "81d6bb33-cfaa-4348-831b-f407ee4fcf0c"
+ definition_id = "77e51fa7-73fc-4f1a-8306-e082909d97bf"
+ name = "Kerry Reinger"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3671a9ca-1d9c-4174-bee4-145562d27576"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_google_webfonts" "my_source_googlewebfonts" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,8 +51,7 @@ resource "airbyte_source_google_webfonts" "my_source_googlewebfonts" {
Required:
-- `api_key` (String) API key is required to access google apis, For getting your's goto google console and generate api key for Webfonts
-- `source_type` (String) must be one of ["google-webfonts"]
+- `api_key` (String, Sensitive) API key is required to access google apis, For getting your's goto google console and generate api key for Webfonts
Optional:
diff --git a/docs/resources/source_google_workspace_admin_reports.md b/docs/resources/source_google_workspace_admin_reports.md
index 9c81767bf..473dc000a 100644
--- a/docs/resources/source_google_workspace_admin_reports.md
+++ b/docs/resources/source_google_workspace_admin_reports.md
@@ -16,13 +16,13 @@ SourceGoogleWorkspaceAdminReports Resource
resource "airbyte_source_google_workspace_admin_reports" "my_source_googleworkspaceadminreports" {
configuration = {
credentials_json = "...my_credentials_json..."
- email = "Bridgette_Rohan@gmail.com"
- lookback = 10
- source_type = "google-workspace-admin-reports"
+ email = "Daisha.Halvorson12@gmail.com"
+ lookback = 8
}
- name = "Samantha Huels"
- secret_id = "...my_secret_id..."
- workspace_id = "398a0dc7-6632-44cc-b06c-8ca12d025292"
+ definition_id = "b8adc8fd-2a7f-4940-9ec4-4e216dff8929"
+ name = "Francisco Swaniawski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a00b494f-7d68-4d64-a810-b2959587ed0c"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_google_workspace_admin_reports" "my_source_googleworksp
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,7 +52,6 @@ Required:
- `credentials_json` (String) The contents of the JSON service account key. See the docs for more information on how to generate this key.
- `email` (String) The email of the user, which has permissions to access the Google Workspace Admin APIs.
-- `source_type` (String) must be one of ["google-workspace-admin-reports"]
Optional:
diff --git a/docs/resources/source_greenhouse.md b/docs/resources/source_greenhouse.md
index 1a78be76f..cf7c580db 100644
--- a/docs/resources/source_greenhouse.md
+++ b/docs/resources/source_greenhouse.md
@@ -15,12 +15,12 @@ SourceGreenhouse Resource
```terraform
resource "airbyte_source_greenhouse" "my_source_greenhouse" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "greenhouse"
+ api_key = "...my_api_key..."
}
- name = "Patricia Pouros"
- secret_id = "...my_secret_id..."
- workspace_id = "5722dd89-5b8b-4cf2-8db9-59693352f745"
+ definition_id = "47c0f9ce-33c0-4f29-8c11-e4e993d29474"
+ name = "Cassandra Carroll"
+ secret_id = "...my_secret_id..."
+ workspace_id = "54dff6cf-9b79-4e23-a888-b6bde25154a5"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_greenhouse" "my_source_greenhouse" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_greenhouse" "my_source_greenhouse" {
Required:
-- `api_key` (String) Greenhouse API Key. See the docs for more information on how to generate this key.
-- `source_type` (String) must be one of ["greenhouse"]
+- `api_key` (String, Sensitive) Greenhouse API Key. See the docs for more information on how to generate this key.
diff --git a/docs/resources/source_gridly.md b/docs/resources/source_gridly.md
index 01d5a0080..da177273d 100644
--- a/docs/resources/source_gridly.md
+++ b/docs/resources/source_gridly.md
@@ -15,13 +15,13 @@ SourceGridly Resource
```terraform
resource "airbyte_source_gridly" "my_source_gridly" {
configuration = {
- api_key = "...my_api_key..."
- grid_id = "...my_grid_id..."
- source_type = "gridly"
+ api_key = "...my_api_key..."
+ grid_id = "...my_grid_id..."
}
- name = "Josephine McCullough"
- secret_id = "...my_secret_id..."
- workspace_id = "d78de3b6-e938-49f5-abb7-f662550a2838"
+ definition_id = "2da80f2b-fa49-4853-a695-0935ad536c50"
+ name = "Megan Kshlerin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e30b46b9-59e4-4e75-8ac0-9227119b95b6"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_gridly" "my_source_gridly" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_gridly" "my_source_gridly" {
Required:
-- `api_key` (String)
+- `api_key` (String, Sensitive)
- `grid_id` (String) ID of a grid, or can be ID of a branch
-- `source_type` (String) must be one of ["gridly"]
diff --git a/docs/resources/source_harvest.md b/docs/resources/source_harvest.md
index 1eebc2ea3..1d335ea56 100644
--- a/docs/resources/source_harvest.md
+++ b/docs/resources/source_harvest.md
@@ -17,20 +17,20 @@ resource "airbyte_source_harvest" "my_source_harvest" {
configuration = {
account_id = "...my_account_id..."
credentials = {
- source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth_ = {
- auth_type = "Client"
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
+ authenticate_via_harvest_o_auth = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
}
}
replication_end_date = "2017-01-25T00:00:00Z"
replication_start_date = "2017-01-25T00:00:00Z"
- source_type = "harvest"
}
- name = "Rodney Orn"
- secret_id = "...my_secret_id..."
- workspace_id = "2315bba6-5016-44e0-af5b-f6ae591bc8bd"
+ definition_id = "bb7037ab-5561-4ce1-bb1c-adaa0e328a3b"
+ name = "Jorge Heathcote"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e04de287-b752-465f-9ff2-deb8cbf2674a"
}
```
@@ -40,11 +40,12 @@ resource "airbyte_source_harvest" "my_source_harvest" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -59,7 +60,6 @@ Required:
- `account_id` (String) Harvest account ID. Required for all Harvest requests in pair with Personal Access Token
- `replication_start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `source_type` (String) must be one of ["harvest"]
Optional:
@@ -71,64 +71,32 @@ Optional:
Optional:
-- `source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth))
-- `source_harvest_authentication_mechanism_authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--source_harvest_authentication_mechanism_authenticate_with_personal_access_token))
-- `source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth))
-- `source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token))
+- `authenticate_via_harvest_o_auth` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_harvest_o_auth))
+- `authenticate_with_personal_access_token` (Attributes) Choose how to authenticate to Harvest. (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_with_personal_access_token))
-
-### Nested Schema for `configuration.credentials.source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth`
+
+### Nested Schema for `configuration.credentials.authenticate_via_harvest_o_auth`
Required:
- `client_id` (String) The Client ID of your Harvest developer application.
- `client_secret` (String) The Client Secret of your Harvest developer application.
-- `refresh_token` (String) Refresh Token to renew the expired Access Token.
+- `refresh_token` (String, Sensitive) Refresh Token to renew the expired Access Token.
Optional:
- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["Client"]
-
-### Nested Schema for `configuration.credentials.source_harvest_authentication_mechanism_authenticate_with_personal_access_token`
+
+### Nested Schema for `configuration.credentials.authenticate_with_personal_access_token`
Required:
-- `api_token` (String) Log into Harvest and then create new personal access token.
+- `api_token` (String, Sensitive) Log into Harvest and then create new personal access token.
Optional:
- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["Token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth`
-
-Required:
-
-- `client_id` (String) The Client ID of your Harvest developer application.
-- `client_secret` (String) The Client Secret of your Harvest developer application.
-- `refresh_token` (String) Refresh Token to renew the expired Access Token.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["Client"]
-
-
-
-### Nested Schema for `configuration.credentials.source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token`
-
-Required:
-
-- `api_token` (String) Log into Harvest and then create new personal access token.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["Token"]
diff --git a/docs/resources/source_hubplanner.md b/docs/resources/source_hubplanner.md
index f20025573..4dbbecfd9 100644
--- a/docs/resources/source_hubplanner.md
+++ b/docs/resources/source_hubplanner.md
@@ -15,12 +15,12 @@ SourceHubplanner Resource
```terraform
resource "airbyte_source_hubplanner" "my_source_hubplanner" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "hubplanner"
+ api_key = "...my_api_key..."
}
- name = "Cary Emmerich Sr."
- secret_id = "...my_secret_id..."
- workspace_id = "b63c205f-da84-4077-8a68-a9a35d086b6f"
+ definition_id = "92033b17-bfcc-4526-af10-da401fb0fc52"
+ name = "Gladys Adams"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9994a41e-4a89-485c-b8fa-7d86bdf5bf91"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_hubplanner" "my_source_hubplanner" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_hubplanner" "my_source_hubplanner" {
Required:
-- `api_key` (String) Hubplanner API key. See https://github.com/hubplanner/API#authentication for more details.
-- `source_type` (String) must be one of ["hubplanner"]
+- `api_key` (String, Sensitive) Hubplanner API key. See https://github.com/hubplanner/API#authentication for more details.
diff --git a/docs/resources/source_hubspot.md b/docs/resources/source_hubspot.md
index a6f8e891a..2de005fba 100644
--- a/docs/resources/source_hubspot.md
+++ b/docs/resources/source_hubspot.md
@@ -16,19 +16,18 @@ SourceHubspot Resource
resource "airbyte_source_hubspot" "my_source_hubspot" {
configuration = {
credentials = {
- source_hubspot_authentication_o_auth = {
- client_id = "123456789000"
- client_secret = "secret"
- credentials_title = "OAuth Credentials"
- refresh_token = "refresh_token"
+ source_hubspot_o_auth = {
+ client_id = "123456789000"
+ client_secret = "secret"
+ refresh_token = "refresh_token"
}
}
- source_type = "hubspot"
- start_date = "2017-01-25T00:00:00Z"
+ start_date = "2017-01-25T00:00:00Z"
}
- name = "Mr. Tomas Wisozk DVM"
- secret_id = "...my_secret_id..."
- workspace_id = "9f443b42-57b9-492c-8dbd-a6a61efa2198"
+ definition_id = "b1210837-28d8-49e3-91e8-68df1f2c5ad8"
+ name = "Amelia Gulgowski II"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3eb240d6-26d4-4887-8caa-f58e0f5c1159"
}
```
@@ -38,11 +37,12 @@ resource "airbyte_source_hubspot" "my_source_hubspot" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -56,7 +56,6 @@ resource "airbyte_source_hubspot" "my_source_hubspot" {
Required:
- `credentials` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["hubspot"]
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
@@ -64,52 +63,24 @@ Required:
Optional:
-- `source_hubspot_authentication_o_auth` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--source_hubspot_authentication_o_auth))
-- `source_hubspot_authentication_private_app` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--source_hubspot_authentication_private_app))
-- `source_hubspot_update_authentication_o_auth` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--source_hubspot_update_authentication_o_auth))
-- `source_hubspot_update_authentication_private_app` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--source_hubspot_update_authentication_private_app))
+- `o_auth` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--o_auth))
+- `private_app` (Attributes) Choose how to authenticate to HubSpot. (see [below for nested schema](#nestedatt--configuration--credentials--private_app))
-
-### Nested Schema for `configuration.credentials.source_hubspot_authentication_o_auth`
+
+### Nested Schema for `configuration.credentials.o_auth`
Required:
- `client_id` (String) The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.
- `client_secret` (String) The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.
-- `credentials_title` (String) must be one of ["OAuth Credentials"]
-Name of the credentials
-- `refresh_token` (String) Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.
+- `refresh_token` (String, Sensitive) Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.
-
-### Nested Schema for `configuration.credentials.source_hubspot_authentication_private_app`
+
+### Nested Schema for `configuration.credentials.private_app`
Required:
-- `access_token` (String) HubSpot Access token. See the Hubspot docs if you need help finding this token.
-- `credentials_title` (String) must be one of ["Private App Credentials"]
-Name of the credentials set
-
-
-
-### Nested Schema for `configuration.credentials.source_hubspot_update_authentication_o_auth`
-
-Required:
-
-- `client_id` (String) The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.
-- `client_secret` (String) The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.
-- `credentials_title` (String) must be one of ["OAuth Credentials"]
-Name of the credentials
-- `refresh_token` (String) Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.
-
-
-
-### Nested Schema for `configuration.credentials.source_hubspot_update_authentication_private_app`
-
-Required:
-
-- `access_token` (String) HubSpot Access token. See the Hubspot docs if you need help finding this token.
-- `credentials_title` (String) must be one of ["Private App Credentials"]
-Name of the credentials set
+- `access_token` (String, Sensitive) HubSpot Access token. See the Hubspot docs if you need help finding this token.
diff --git a/docs/resources/source_insightly.md b/docs/resources/source_insightly.md
index 576daa4ed..dbc734766 100644
--- a/docs/resources/source_insightly.md
+++ b/docs/resources/source_insightly.md
@@ -15,13 +15,13 @@ SourceInsightly Resource
```terraform
resource "airbyte_source_insightly" "my_source_insightly" {
configuration = {
- source_type = "insightly"
- start_date = "2021-03-01T00:00:00Z"
- token = "...my_token..."
+ start_date = "2021-03-01T00:00:00Z"
+ token = "...my_token..."
}
- name = "Dana Lindgren"
- secret_id = "...my_secret_id..."
- workspace_id = "0a9eba47-f7d3-4ef0-8964-0d6a1831c87a"
+ definition_id = "d6014991-0eec-4fc7-b384-ec604057d045"
+ name = "Geneva Bogan"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b5cccbbb-db31-4196-8f99-d67745afb65f"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_insightly" "my_source_insightly" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_insightly" "my_source_insightly" {
Required:
-- `source_type` (String) must be one of ["insightly"]
- `start_date` (String) The date from which you'd like to replicate data for Insightly in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. Note that it will be used only for incremental streams.
-- `token` (String) Your Insightly API token.
+- `token` (String, Sensitive) Your Insightly API token.
diff --git a/docs/resources/source_instagram.md b/docs/resources/source_instagram.md
index 926ad00ad..1d40aa72d 100644
--- a/docs/resources/source_instagram.md
+++ b/docs/resources/source_instagram.md
@@ -18,12 +18,12 @@ resource "airbyte_source_instagram" "my_source_instagram" {
access_token = "...my_access_token..."
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- source_type = "instagram"
start_date = "2017-01-25T00:00:00Z"
}
- name = "Mae Hoppe"
- secret_id = "...my_secret_id..."
- workspace_id = "f1ad837a-e80c-41c1-9c95-ba998678fa3f"
+ definition_id = "20bd7a7e-c191-4626-87e6-80e4417c6f4b"
+ name = "Margaret Maggio"
+ secret_id = "...my_secret_id..."
+ workspace_id = "206a4b04-3ef0-49e6-9b75-b726765eab1a"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_instagram" "my_source_instagram" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,13 +51,12 @@ resource "airbyte_source_instagram" "my_source_instagram" {
Required:
-- `access_token` (String) The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information
-- `source_type` (String) must be one of ["instagram"]
-- `start_date` (String) The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
+- `access_token` (String, Sensitive) The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information
Optional:
- `client_id` (String) The Client ID for your Oauth application
- `client_secret` (String) The Client Secret for your Oauth application
+- `start_date` (String) The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If left blank, the start date will be set to 2 years before the present date.
diff --git a/docs/resources/source_instatus.md b/docs/resources/source_instatus.md
index 830c152ab..165c64999 100644
--- a/docs/resources/source_instatus.md
+++ b/docs/resources/source_instatus.md
@@ -15,12 +15,12 @@ SourceInstatus Resource
```terraform
resource "airbyte_source_instatus" "my_source_instatus" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "instatus"
+ api_key = "...my_api_key..."
}
- name = "Bobbie Johnston"
- secret_id = "...my_secret_id..."
- workspace_id = "1af388ce-0361-4444-8c79-77a0ef2f5360"
+ definition_id = "d842954b-d759-4bdc-8b93-f80b7f557094"
+ name = "Enrique Kovacek"
+ secret_id = "...my_secret_id..."
+ workspace_id = "356d5339-1630-4fd2-b131-d4fbef253f33"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_instatus" "my_source_instatus" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_instatus" "my_source_instatus" {
Required:
-- `api_key` (String) Instatus REST API key
-- `source_type` (String) must be one of ["instatus"]
+- `api_key` (String, Sensitive) Instatus REST API key
diff --git a/docs/resources/source_intercom.md b/docs/resources/source_intercom.md
index d2ad37413..a674190f6 100644
--- a/docs/resources/source_intercom.md
+++ b/docs/resources/source_intercom.md
@@ -18,12 +18,12 @@ resource "airbyte_source_intercom" "my_source_intercom" {
access_token = "...my_access_token..."
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- source_type = "intercom"
start_date = "2020-11-16T00:00:00Z"
}
- name = "Darnell Watsica"
- secret_id = "...my_secret_id..."
- workspace_id = "934152ed-7e25-43f4-8157-deaa7170f445"
+ definition_id = "135dc90f-6379-44a9-bd5a-cf56253a66e5"
+ name = "Clint Douglas V"
+ secret_id = "...my_secret_id..."
+ workspace_id = "29314c65-ed70-4eb1-bcb4-fc24002ca0d0"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_intercom" "my_source_intercom" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,8 +51,7 @@ resource "airbyte_source_intercom" "my_source_intercom" {
Required:
-- `access_token` (String) Access token for making authenticated requests. See the Intercom docs for more information.
-- `source_type` (String) must be one of ["intercom"]
+- `access_token` (String, Sensitive) Access token for making authenticated requests. See the Intercom docs for more information.
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
Optional:
diff --git a/docs/resources/source_ip2whois.md b/docs/resources/source_ip2whois.md
index 623f30ce1..76dd61de9 100644
--- a/docs/resources/source_ip2whois.md
+++ b/docs/resources/source_ip2whois.md
@@ -15,13 +15,13 @@ SourceIp2whois Resource
```terraform
resource "airbyte_source_ip2whois" "my_source_ip2whois" {
configuration = {
- api_key = "...my_api_key..."
- domain = "www.facebook.com"
- source_type = "ip2whois"
+ api_key = "...my_api_key..."
+ domain = "www.google.com"
}
- name = "Leland Wisoky"
- secret_id = "...my_secret_id..."
- workspace_id = "7aaf9bba-d185-4fe4-b1d6-bf5c838fbb8c"
+ definition_id = "711f25a2-8dde-404a-9ce3-be57bfa46127"
+ name = "Monica Champlin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5ed08074-e17a-4648-8571-1ab94fe75a51"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_ip2whois" "my_source_ip2whois" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_ip2whois" "my_source_ip2whois" {
Optional:
-- `api_key` (String) Your API Key. See here.
+- `api_key` (String, Sensitive) Your API Key. See here.
- `domain` (String) Domain name. See here.
-- `source_type` (String) must be one of ["ip2whois"]
diff --git a/docs/resources/source_iterable.md b/docs/resources/source_iterable.md
index 4c6f49f35..f8b63d58a 100644
--- a/docs/resources/source_iterable.md
+++ b/docs/resources/source_iterable.md
@@ -15,13 +15,13 @@ SourceIterable Resource
```terraform
resource "airbyte_source_iterable" "my_source_iterable" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "iterable"
- start_date = "2021-04-01T00:00:00Z"
+ api_key = "...my_api_key..."
+ start_date = "2021-04-01T00:00:00Z"
}
- name = "Archie Jaskolski"
- secret_id = "...my_secret_id..."
- workspace_id = "c4b425e9-9e62-434c-9f7b-79dfeb77a5c3"
+ definition_id = "00977793-827c-406d-986b-4fbde6ae5395"
+ name = "Katherine Bashirian"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d8df8fdd-acae-4826-9af8-b9bb4850d654"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_iterable" "my_source_iterable" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_iterable" "my_source_iterable" {
Required:
-- `api_key` (String) Iterable API Key. See the docs for more information on how to obtain this key.
-- `source_type` (String) must be one of ["iterable"]
+- `api_key` (String, Sensitive) Iterable API Key. See the docs for more information on how to obtain this key.
- `start_date` (String) The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
diff --git a/docs/resources/source_jira.md b/docs/resources/source_jira.md
index cccfae197..5e6b89d2f 100644
--- a/docs/resources/source_jira.md
+++ b/docs/resources/source_jira.md
@@ -16,20 +16,24 @@ SourceJira Resource
resource "airbyte_source_jira" "my_source_jira" {
configuration = {
api_token = "...my_api_token..."
- domain = ".jira.com"
- email = "Eldridge_Reichert@hotmail.com"
+ domain = "jira..com"
+ email = "Benton_Tromp@hotmail.com"
enable_experimental_streams = false
expand_issue_changelog = false
+ expand_issue_transition = true
+ issues_stream_expand_with = [
+ "transitions",
+ ]
projects = [
"...",
]
- render_fields = false
- source_type = "jira"
+ render_fields = true
start_date = "2021-03-01T00:00:00Z"
}
- name = "Olive Windler"
- secret_id = "...my_secret_id..."
- workspace_id = "0a54b475-f16f-456d-b85a-3c4ac631b99e"
+ definition_id = "7e778751-26eb-4569-8431-2d5d5e6a2d83"
+ name = "Kenneth Runte"
+ secret_id = "...my_secret_id..."
+ workspace_id = "8dd54122-5651-4393-a1b0-488926ab9cfe"
}
```
@@ -39,11 +43,12 @@ resource "airbyte_source_jira" "my_source_jira" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -56,17 +61,22 @@ resource "airbyte_source_jira" "my_source_jira" {
Required:
-- `api_token` (String) Jira API Token. See the docs for more information on how to generate this key. API Token is used for Authorization to your account by BasicAuth.
+- `api_token` (String, Sensitive) Jira API Token. See the docs for more information on how to generate this key. API Token is used for Authorization to your account by BasicAuth.
- `domain` (String) The Domain for your Jira account, e.g. airbyteio.atlassian.net, airbyteio.jira.com, jira.your-domain.com
- `email` (String) The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth.
-- `source_type` (String) must be one of ["jira"]
Optional:
-- `enable_experimental_streams` (Boolean) Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.
-- `expand_issue_changelog` (Boolean) Expand the changelog when replicating issues.
+- `enable_experimental_streams` (Boolean) Default: false
+Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.
+- `expand_issue_changelog` (Boolean) Default: false
+(DEPRECATED) Expand the changelog when replicating issues.
+- `expand_issue_transition` (Boolean) Default: false
+(DEPRECATED) Expand the transitions when replicating issues.
+- `issues_stream_expand_with` (List of String) Select fields to Expand the `Issues` stream when replicating with:
- `projects` (List of String) List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects.
-- `render_fields` (Boolean) Render issue fields in HTML format in addition to Jira JSON-like format.
+- `render_fields` (Boolean) Default: false
+(DEPRECATED) Render issue fields in HTML format in addition to Jira JSON-like format.
- `start_date` (String) The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation.
diff --git a/docs/resources/source_k6_cloud.md b/docs/resources/source_k6_cloud.md
index 7259f593b..a04b668c9 100644
--- a/docs/resources/source_k6_cloud.md
+++ b/docs/resources/source_k6_cloud.md
@@ -15,12 +15,12 @@ SourceK6Cloud Resource
```terraform
resource "airbyte_source_k6_cloud" "my_source_k6cloud" {
configuration = {
- api_token = "...my_api_token..."
- source_type = "k6-cloud"
+ api_token = "...my_api_token..."
}
- name = "Ella Runolfsdottir"
- secret_id = "...my_secret_id..."
- workspace_id = "8f9fdb94-10f6-43bb-b817-837b01afdd78"
+ definition_id = "2e85afcc-9acc-46e7-a95c-9a7c9f197511"
+ name = "Franklin D'Amore"
+ secret_id = "...my_secret_id..."
+ workspace_id = "96585095-001a-4ad5-a5f9-cfb0d1e8d3ac"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_k6_cloud" "my_source_k6cloud" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_k6_cloud" "my_source_k6cloud" {
Required:
-- `api_token` (String) Your API Token. See here. The key is case sensitive.
-- `source_type` (String) must be one of ["k6-cloud"]
+- `api_token` (String, Sensitive) Your API Token. See here. The key is case sensitive.
diff --git a/docs/resources/source_klarna.md b/docs/resources/source_klarna.md
index 3ecc96a20..b54a7ade5 100644
--- a/docs/resources/source_klarna.md
+++ b/docs/resources/source_klarna.md
@@ -15,15 +15,15 @@ SourceKlarna Resource
```terraform
resource "airbyte_source_klarna" "my_source_klarna" {
configuration = {
- password = "...my_password..."
- playground = true
- region = "us"
- source_type = "klarna"
- username = "Chase50"
+ password = "...my_password..."
+ playground = true
+ region = "oc"
+ username = "Lessie_Beatty"
}
- name = "Caleb Rau"
- secret_id = "...my_secret_id..."
- workspace_id = "873f5033-f19d-4bf1-a5ce-4152eab9cd7e"
+ definition_id = "ed1087b9-882d-454c-a598-cc59eb952f06"
+ name = "Carmen Bins"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7fd8f9d1-baac-46e0-9b1e-50c14468d231"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_klarna" "my_source_klarna" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,11 +51,14 @@ resource "airbyte_source_klarna" "my_source_klarna" {
Required:
-- `password` (String) A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)
-- `playground` (Boolean) Propertie defining if connector is used against playground or production environment
+- `password` (String, Sensitive) A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)
- `region` (String) must be one of ["eu", "us", "oc"]
Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'
-- `source_type` (String) must be one of ["klarna"]
- `username` (String) Consists of your Merchant ID (eid) - a unique number that identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)
+Optional:
+
+- `playground` (Boolean) Default: false
+Propertie defining if connector is used against playground or production environment
+
diff --git a/docs/resources/source_klaviyo.md b/docs/resources/source_klaviyo.md
index 34b618c3f..1b485cfb0 100644
--- a/docs/resources/source_klaviyo.md
+++ b/docs/resources/source_klaviyo.md
@@ -15,13 +15,13 @@ SourceKlaviyo Resource
```terraform
resource "airbyte_source_klaviyo" "my_source_klaviyo" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "klaviyo"
- start_date = "2017-01-25T00:00:00Z"
+ api_key = "...my_api_key..."
+ start_date = "2017-01-25T00:00:00Z"
}
- name = "Charlotte Muller"
- secret_id = "...my_secret_id..."
- workspace_id = "0e123b78-47ec-459e-9f67-f3c4cce4b6d7"
+ definition_id = "d98f81ed-eee1-4be4-a723-eeaf419bc59e"
+ name = "Joanne Murray"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9e9d149f-3b04-4e32-9c64-9b6bc8e2c7d0"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_klaviyo" "my_source_klaviyo" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,10 @@ resource "airbyte_source_klaviyo" "my_source_klaviyo" {
Required:
-- `api_key` (String) Klaviyo API Key. See our docs if you need help finding this key.
-- `source_type` (String) must be one of ["klaviyo"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
+- `api_key` (String, Sensitive) Klaviyo API Key. See our docs if you need help finding this key.
+
+Optional:
+
+- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This field is optional - if not provided, all data will be replicated.
diff --git a/docs/resources/source_kustomer_singer.md b/docs/resources/source_kustomer_singer.md
index 1fdcb97e2..1bd2dabea 100644
--- a/docs/resources/source_kustomer_singer.md
+++ b/docs/resources/source_kustomer_singer.md
@@ -15,13 +15,13 @@ SourceKustomerSinger Resource
```terraform
resource "airbyte_source_kustomer_singer" "my_source_kustomersinger" {
configuration = {
- api_token = "...my_api_token..."
- source_type = "kustomer-singer"
- start_date = "2019-01-01T00:00:00Z"
+ api_token = "...my_api_token..."
+ start_date = "2019-01-01T00:00:00Z"
}
- name = "Bobbie Jacobs"
- secret_id = "...my_secret_id..."
- workspace_id = "3c574750-1357-4e44-b51f-8b084c3197e1"
+ definition_id = "de0f8a2b-57ad-4de2-8e75-111fd0612ffd"
+ name = "Mr. Antonia Yost"
+ secret_id = "...my_secret_id..."
+ workspace_id = "78b38595-7e3c-4921-8c92-84a21155c549"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_kustomer_singer" "my_source_kustomersinger" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_kustomer_singer" "my_source_kustomersinger" {
Required:
-- `api_token` (String) Kustomer API Token. See the docs on how to obtain this
-- `source_type` (String) must be one of ["kustomer-singer"]
+- `api_token` (String, Sensitive) Kustomer API Token. See the docs on how to obtain this
- `start_date` (String) The date from which you'd like to replicate the data
diff --git a/docs/resources/source_kyve.md b/docs/resources/source_kyve.md
index 2ce36824e..62187a2e6 100644
--- a/docs/resources/source_kyve.md
+++ b/docs/resources/source_kyve.md
@@ -15,16 +15,16 @@ SourceKyve Resource
```terraform
resource "airbyte_source_kyve" "my_source_kyve" {
configuration = {
- max_pages = 6
- page_size = 2
- pool_ids = "0,1"
- source_type = "kyve"
- start_ids = "0"
- url_base = "https://api.korellia.kyve.network/"
+ max_pages = 0
+ page_size = 0
+ pool_ids = "0"
+ start_ids = "0"
+ url_base = "https://api.beta.kyve.network/"
}
- name = "Gail Homenick"
- secret_id = "...my_secret_id..."
- workspace_id = "94874c2d-5cc4-4972-a33e-66bd8fe5d00b"
+ definition_id = "be9a984e-4b07-4bca-b13e-d5606ac59e7c"
+ name = "Wilbur Turcotte"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b09ffd37-53fe-446a-9403-ba1bd8103cfb"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_kyve" "my_source_kyve" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -52,13 +53,14 @@ resource "airbyte_source_kyve" "my_source_kyve" {
Required:
- `pool_ids` (String) The IDs of the KYVE storage pool you want to archive. (Comma separated)
-- `source_type` (String) must be one of ["kyve"]
- `start_ids` (String) The start-id defines, from which bundle id the pipeline should start to extract the data (Comma separated)
Optional:
- `max_pages` (Number) The maximum amount of pages to go trough. Set to 'null' for all pages.
-- `page_size` (Number) The pagesize for pagination, smaller numbers are used in integration tests.
-- `url_base` (String) URL to the KYVE Chain API.
+- `page_size` (Number) Default: 100
+The pagesize for pagination, smaller numbers are used in integration tests.
+- `url_base` (String) Default: "https://api.korellia.kyve.network"
+URL to the KYVE Chain API.
diff --git a/docs/resources/source_launchdarkly.md b/docs/resources/source_launchdarkly.md
index aac121903..0fd734655 100644
--- a/docs/resources/source_launchdarkly.md
+++ b/docs/resources/source_launchdarkly.md
@@ -16,11 +16,11 @@ SourceLaunchdarkly Resource
resource "airbyte_source_launchdarkly" "my_source_launchdarkly" {
configuration = {
access_token = "...my_access_token..."
- source_type = "launchdarkly"
}
- name = "Darren Monahan"
- secret_id = "...my_secret_id..."
- workspace_id = "20387320-590c-4cc1-8964-00313b3e5044"
+ definition_id = "422849b5-8575-49fd-b9d7-4aa20ea69f1b"
+ name = "Jodi Marquardt"
+ secret_id = "...my_secret_id..."
+ workspace_id = "dd1b5a02-95b1-497b-bb02-27d625c3155f"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_launchdarkly" "my_source_launchdarkly" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_launchdarkly" "my_source_launchdarkly" {
Required:
-- `access_token` (String) Your Access token. See here.
-- `source_type` (String) must be one of ["launchdarkly"]
+- `access_token` (String, Sensitive) Your Access token. See here.
diff --git a/docs/resources/source_lemlist.md b/docs/resources/source_lemlist.md
index 0b231b6ab..0169f3f7a 100644
--- a/docs/resources/source_lemlist.md
+++ b/docs/resources/source_lemlist.md
@@ -15,12 +15,12 @@ SourceLemlist Resource
```terraform
resource "airbyte_source_lemlist" "my_source_lemlist" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "lemlist"
+ api_key = "...my_api_key..."
}
- name = "Gene Herman"
- secret_id = "...my_secret_id..."
- workspace_id = "72dc4077-d0cc-43f4-88ef-c15ceb4d6e1e"
+ definition_id = "731c6e6b-c1ca-4f16-aaee-78925477f387"
+ name = "Mr. Clyde Dibbert"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ba4aed29-95c6-463b-ad13-c6e3bbb93bd4"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_lemlist" "my_source_lemlist" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_lemlist" "my_source_lemlist" {
Required:
-- `api_key` (String) Lemlist API key,
-- `source_type` (String) must be one of ["lemlist"]
+- `api_key` (String, Sensitive) Lemlist API key,
diff --git a/docs/resources/source_lever_hiring.md b/docs/resources/source_lever_hiring.md
index 3e3d97ffa..456b7f93e 100644
--- a/docs/resources/source_lever_hiring.md
+++ b/docs/resources/source_lever_hiring.md
@@ -16,18 +16,17 @@ SourceLeverHiring Resource
resource "airbyte_source_lever_hiring" "my_source_leverhiring" {
configuration = {
credentials = {
- source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key_ = {
- api_key = "...my_api_key..."
- auth_type = "Api Key"
+ authenticate_via_lever_api_key = {
+ api_key = "...my_api_key..."
}
}
- environment = "Sandbox"
- source_type = "lever-hiring"
+ environment = "Production"
start_date = "2021-03-01T00:00:00Z"
}
- name = "Donald Wuckert"
- secret_id = "...my_secret_id..."
- workspace_id = "aedf2aca-b58b-4991-8926-ddb589461e74"
+ definition_id = "3d75c669-3a6b-492e-b166-50e4c3120d77"
+ name = "Bill Howell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c796fdac-1f48-4b8f-8670-1054c1db1ce4"
}
```
@@ -37,11 +36,12 @@ resource "airbyte_source_lever_hiring" "my_source_leverhiring" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -54,13 +54,12 @@ resource "airbyte_source_lever_hiring" "my_source_leverhiring" {
Required:
-- `source_type` (String) must be one of ["lever-hiring"]
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. Note that it will be used only in the following incremental streams: comments, commits, and issues.
Optional:
- `credentials` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `environment` (String) must be one of ["Production", "Sandbox"]
+- `environment` (String) must be one of ["Production", "Sandbox"]; Default: "Sandbox"
The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.
@@ -68,59 +67,26 @@ The environment in which you'd like to replicate data for Lever. This is used to
Optional:
-- `source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key))
-- `source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth))
-- `source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_api_key` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_api_key))
-- `source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_o_auth` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_o_auth))
+- `authenticate_via_lever_api_key` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_lever_api_key))
+- `authenticate_via_lever_o_auth` (Attributes) Choose how to authenticate to Lever Hiring. (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_lever_o_auth))
-
-### Nested Schema for `configuration.credentials.source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key`
+
+### Nested Schema for `configuration.credentials.authenticate_via_lever_api_key`
Required:
-- `api_key` (String) The Api Key of your Lever Hiring account.
-
-Optional:
-
-- `auth_type` (String) must be one of ["Api Key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth`
-
-Required:
-
-- `refresh_token` (String) The token for obtaining new access token.
-
-Optional:
-
-- `auth_type` (String) must be one of ["Client"]
-- `client_id` (String) The Client ID of your Lever Hiring developer application.
-- `client_secret` (String) The Client Secret of your Lever Hiring developer application.
-
-
-
-### Nested Schema for `configuration.credentials.source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_api_key`
-
-Required:
-
-- `api_key` (String) The Api Key of your Lever Hiring account.
-
-Optional:
-
-- `auth_type` (String) must be one of ["Api Key"]
+- `api_key` (String, Sensitive) The Api Key of your Lever Hiring account.
-
-### Nested Schema for `configuration.credentials.source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_o_auth`
+
+### Nested Schema for `configuration.credentials.authenticate_via_lever_o_auth`
Required:
-- `refresh_token` (String) The token for obtaining new access token.
+- `refresh_token` (String, Sensitive) The token for obtaining new access token.
Optional:
-- `auth_type` (String) must be one of ["Client"]
- `client_id` (String) The Client ID of your Lever Hiring developer application.
- `client_secret` (String) The Client Secret of your Lever Hiring developer application.
diff --git a/docs/resources/source_linkedin_ads.md b/docs/resources/source_linkedin_ads.md
index 958334248..2473ef4dc 100644
--- a/docs/resources/source_linkedin_ads.md
+++ b/docs/resources/source_linkedin_ads.md
@@ -16,27 +16,26 @@ SourceLinkedinAds Resource
resource "airbyte_source_linkedin_ads" "my_source_linkedinads" {
configuration = {
account_ids = [
- 1,
+ 6,
]
ad_analytics_reports = [
{
- name = "Kara Rohan"
- pivot_by = "MEMBER_REGION_V2"
+ name = "Dwayne Zboncak"
+ pivot_by = "IMPRESSION_DEVICE_TYPE"
time_granularity = "MONTHLY"
},
]
credentials = {
- source_linkedin_ads_authentication_access_token = {
+ access_token = {
access_token = "...my_access_token..."
- auth_method = "access_token"
}
}
- source_type = "linkedin-ads"
- start_date = "2021-05-17"
+ start_date = "2021-05-17"
}
- name = "Elsa Adams"
- secret_id = "...my_secret_id..."
- workspace_id = "930b69f7-ac2f-472f-8850-090491160820"
+ definition_id = "4672645c-fb24-449e-af87-64eb4b875ea1"
+ name = "Blake Howell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6c0fac14-03cf-4d91-9cc5-3ae1f1c37b35"
}
```
@@ -46,11 +45,12 @@ resource "airbyte_source_linkedin_ads" "my_source_linkedinads" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -63,7 +63,6 @@ resource "airbyte_source_linkedin_ads" "my_source_linkedinads" {
Required:
-- `source_type` (String) must be one of ["linkedin-ads"]
- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.
Optional:
@@ -89,60 +88,24 @@ Choose how to group the data in your report by time. The options are:
- 'ALL'
Optional:
-- `source_linkedin_ads_authentication_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_ads_authentication_access_token))
-- `source_linkedin_ads_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_ads_authentication_o_auth2_0))
-- `source_linkedin_ads_update_authentication_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_ads_update_authentication_access_token))
-- `source_linkedin_ads_update_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_ads_update_authentication_o_auth2_0))
+- `access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--access_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_linkedin_ads_authentication_access_token`
+
+### Nested Schema for `configuration.credentials.access_token`
Required:
-- `access_token` (String) The access token generated for your developer application. Refer to our documentation for more information.
+- `access_token` (String, Sensitive) The access token generated for your developer application. Refer to our documentation for more information.
-Optional:
-
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_ads_authentication_o_auth2_0`
-
-Required:
-
-- `client_id` (String) The client ID of your developer application. Refer to our documentation for more information.
-- `client_secret` (String) The client secret of your developer application. Refer to our documentation for more information.
-- `refresh_token` (String) The key to refresh the expired access token. Refer to our documentation for more information.
-
-Optional:
-
-- `auth_method` (String) must be one of ["oAuth2.0"]
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_ads_update_authentication_access_token`
-
-Required:
-
-- `access_token` (String) The access token generated for your developer application. Refer to our documentation for more information.
-
-Optional:
-
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_ads_update_authentication_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
- `client_id` (String) The client ID of your developer application. Refer to our documentation for more information.
- `client_secret` (String) The client secret of your developer application. Refer to our documentation for more information.
-- `refresh_token` (String) The key to refresh the expired access token. Refer to our documentation for more information.
-
-Optional:
-
-- `auth_method` (String) must be one of ["oAuth2.0"]
+- `refresh_token` (String, Sensitive) The key to refresh the expired access token. Refer to our documentation for more information.
diff --git a/docs/resources/source_linkedin_pages.md b/docs/resources/source_linkedin_pages.md
index 14ab6fbf6..ae6c85e25 100644
--- a/docs/resources/source_linkedin_pages.md
+++ b/docs/resources/source_linkedin_pages.md
@@ -16,17 +16,16 @@ SourceLinkedinPages Resource
resource "airbyte_source_linkedin_pages" "my_source_linkedinpages" {
configuration = {
credentials = {
- source_linkedin_pages_authentication_access_token = {
+ source_linkedin_pages_access_token = {
access_token = "...my_access_token..."
- auth_method = "access_token"
}
}
- org_id = "123456789"
- source_type = "linkedin-pages"
+ org_id = "123456789"
}
- name = "Tracey Kutch"
- secret_id = "...my_secret_id..."
- workspace_id = "c66183bf-e965-49eb-80ec-16faf75b0b53"
+ definition_id = "0ebb3981-c89f-4963-b1e6-164cc8788ff7"
+ name = "Kayla Haley"
+ secret_id = "...my_secret_id..."
+ workspace_id = "33f7738d-63dc-47b7-b8b1-6c6167f1e8f0"
}
```
@@ -36,11 +35,12 @@ resource "airbyte_source_linkedin_pages" "my_source_linkedinpages" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -54,7 +54,6 @@ resource "airbyte_source_linkedin_pages" "my_source_linkedinpages" {
Required:
- `org_id` (String) Specify the Organization ID
-- `source_type` (String) must be one of ["linkedin-pages"]
Optional:
@@ -65,60 +64,24 @@ Optional:
Optional:
-- `source_linkedin_pages_authentication_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_pages_authentication_access_token))
-- `source_linkedin_pages_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_pages_authentication_o_auth2_0))
-- `source_linkedin_pages_update_authentication_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_pages_update_authentication_access_token))
-- `source_linkedin_pages_update_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_linkedin_pages_update_authentication_o_auth2_0))
+- `access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--access_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_linkedin_pages_authentication_access_token`
+
+### Nested Schema for `configuration.credentials.access_token`
Required:
-- `access_token` (String) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
+- `access_token` (String, Sensitive) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
-Optional:
-
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_pages_authentication_o_auth2_0`
-
-Required:
-
-- `client_id` (String) The client ID of the LinkedIn developer application.
-- `client_secret` (String) The client secret of the LinkedIn developer application.
-- `refresh_token` (String) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
-
-Optional:
-
-- `auth_method` (String) must be one of ["oAuth2.0"]
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_pages_update_authentication_access_token`
-
-Required:
-
-- `access_token` (String) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
-
-Optional:
-
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_linkedin_pages_update_authentication_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
- `client_id` (String) The client ID of the LinkedIn developer application.
- `client_secret` (String) The client secret of the LinkedIn developer application.
-- `refresh_token` (String) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
-
-Optional:
-
-- `auth_method` (String) must be one of ["oAuth2.0"]
+- `refresh_token` (String, Sensitive) The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
diff --git a/docs/resources/source_linnworks.md b/docs/resources/source_linnworks.md
index 80be8629f..b23b69a5d 100644
--- a/docs/resources/source_linnworks.md
+++ b/docs/resources/source_linnworks.md
@@ -17,13 +17,13 @@ resource "airbyte_source_linnworks" "my_source_linnworks" {
configuration = {
application_id = "...my_application_id..."
application_secret = "...my_application_secret..."
- source_type = "linnworks"
- start_date = "2022-05-04T07:21:12.859Z"
+ start_date = "2022-09-13T03:04:12.490Z"
token = "...my_token..."
}
- name = "Antonia Muller"
- secret_id = "...my_secret_id..."
- workspace_id = "cbaaf445-2c48-442c-9b2a-d32dafe81a88"
+ definition_id = "2f92210b-5c8f-4204-a6a7-75647eb6babc"
+ name = "Melba McDermott IV"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b2eafdc4-53fb-46a0-992c-447712b4a020"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_linnworks" "my_source_linnworks" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -52,8 +53,7 @@ Required:
- `application_id` (String) Linnworks Application ID
- `application_secret` (String) Linnworks Application Secret
-- `source_type` (String) must be one of ["linnworks"]
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `token` (String)
+- `token` (String, Sensitive)
diff --git a/docs/resources/source_lokalise.md b/docs/resources/source_lokalise.md
index c887b9bc6..607cbc39a 100644
--- a/docs/resources/source_lokalise.md
+++ b/docs/resources/source_lokalise.md
@@ -15,13 +15,13 @@ SourceLokalise Resource
```terraform
resource "airbyte_source_lokalise" "my_source_lokalise" {
configuration = {
- api_key = "...my_api_key..."
- project_id = "...my_project_id..."
- source_type = "lokalise"
+ api_key = "...my_api_key..."
+ project_id = "...my_project_id..."
}
- name = "Bernard Gottlieb"
- secret_id = "...my_secret_id..."
- workspace_id = "573fecd4-7353-4f63-8820-9379aa69cd5f"
+ definition_id = "8830aabe-ffb8-4d97-a510-59b440a5f2f6"
+ name = "Inez Gottlieb"
+ secret_id = "...my_secret_id..."
+ workspace_id = "66849f7b-beaa-4ef5-a404-3cb4c473e8c7"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_lokalise" "my_source_lokalise" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_lokalise" "my_source_lokalise" {
Required:
-- `api_key` (String) Lokalise API Key with read-access. Available at Profile settings > API tokens. See here.
+- `api_key` (String, Sensitive) Lokalise API Key with read-access. Available at Profile settings > API tokens. See here.
- `project_id` (String) Lokalise project ID. Available at Project Settings > General.
-- `source_type` (String) must be one of ["lokalise"]
diff --git a/docs/resources/source_mailchimp.md b/docs/resources/source_mailchimp.md
index b80c36148..f02344ca1 100644
--- a/docs/resources/source_mailchimp.md
+++ b/docs/resources/source_mailchimp.md
@@ -17,16 +17,15 @@ resource "airbyte_source_mailchimp" "my_source_mailchimp" {
configuration = {
campaign_id = "...my_campaign_id..."
credentials = {
- source_mailchimp_authentication_api_key = {
- apikey = "...my_apikey..."
- auth_type = "apikey"
+ api_key = {
+ apikey = "...my_apikey..."
}
}
- source_type = "mailchimp"
}
- name = "Benny Williamson"
- secret_id = "...my_secret_id..."
- workspace_id = "da18a782-2bf9-4589-8e68-61adb55f9e5d"
+ definition_id = "bd591517-4a55-43fd-a41d-af7626ef51c5"
+ name = "Lyle Haley"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0c6c0cc9-3e76-4e9f-9ef5-41f06ca13b1e"
}
```
@@ -36,11 +35,12 @@ resource "airbyte_source_mailchimp" "my_source_mailchimp" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,10 +51,6 @@ resource "airbyte_source_mailchimp" "my_source_mailchimp" {
### Nested Schema for `configuration`
-Required:
-
-- `source_type` (String) must be one of ["mailchimp"]
-
Optional:
- `campaign_id` (String)
@@ -65,50 +61,23 @@ Optional:
Optional:
-- `source_mailchimp_authentication_api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_mailchimp_authentication_api_key))
-- `source_mailchimp_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_mailchimp_authentication_o_auth2_0))
-- `source_mailchimp_update_authentication_api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_mailchimp_update_authentication_api_key))
-- `source_mailchimp_update_authentication_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_mailchimp_update_authentication_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_mailchimp_authentication_api_key`
-
-Required:
-
-- `apikey` (String) Mailchimp API Key. See the docs for information on how to generate this key.
-- `auth_type` (String) must be one of ["apikey"]
-
-
-
-### Nested Schema for `configuration.credentials.source_mailchimp_authentication_o_auth2_0`
-
-Required:
-
-- `access_token` (String) An access token generated using the above client ID and secret.
-- `auth_type` (String) must be one of ["oauth2.0"]
-
-Optional:
-
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-
+- `api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--api_key))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_mailchimp_update_authentication_api_key`
+
+### Nested Schema for `configuration.credentials.api_key`
Required:
-- `apikey` (String) Mailchimp API Key. See the docs for information on how to generate this key.
-- `auth_type` (String) must be one of ["apikey"]
+- `apikey` (String, Sensitive) Mailchimp API Key. See the docs for information on how to generate this key.
-
-### Nested Schema for `configuration.credentials.source_mailchimp_update_authentication_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) An access token generated using the above client ID and secret.
-- `auth_type` (String) must be one of ["oauth2.0"]
+- `access_token` (String, Sensitive) An access token generated using the above client ID and secret.
Optional:
diff --git a/docs/resources/source_mailgun.md b/docs/resources/source_mailgun.md
index c771986a8..f51dc6e47 100644
--- a/docs/resources/source_mailgun.md
+++ b/docs/resources/source_mailgun.md
@@ -17,12 +17,12 @@ resource "airbyte_source_mailgun" "my_source_mailgun" {
configuration = {
domain_region = "...my_domain_region..."
private_key = "...my_private_key..."
- source_type = "mailgun"
start_date = "2023-08-01T00:00:00Z"
}
- name = "Sheri Mayert"
- secret_id = "...my_secret_id..."
- workspace_id = "8f7502bf-dc34-4508-81f1-764456379f3f"
+ definition_id = "c1488faa-411d-49d9-a226-9c9d648f0bcc"
+ name = "Ervin Deckow"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5af6ed3c-47c1-4416-8113-c2d3cb5eaa64"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_mailgun" "my_source_mailgun" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,12 +50,12 @@ resource "airbyte_source_mailgun" "my_source_mailgun" {
Required:
-- `private_key` (String) Primary account API key to access your Mailgun data.
-- `source_type` (String) must be one of ["mailgun"]
+- `private_key` (String, Sensitive) Primary account API key to access your Mailgun data.
Optional:
-- `domain_region` (String) Domain region code. 'EU' or 'US' are possible values. The default is 'US'.
+- `domain_region` (String) Default: "US"
+Domain region code. 'EU' or 'US' are possible values. The default is 'US'.
- `start_date` (String) UTC date and time in the format 2020-10-01 00:00:00. Any data before this date will not be replicated. If omitted, defaults to 3 days ago.
diff --git a/docs/resources/source_mailjet_sms.md b/docs/resources/source_mailjet_sms.md
index 80e5ef086..ac354ca3b 100644
--- a/docs/resources/source_mailjet_sms.md
+++ b/docs/resources/source_mailjet_sms.md
@@ -15,14 +15,14 @@ SourceMailjetSms Resource
```terraform
resource "airbyte_source_mailjet_sms" "my_source_mailjetsms" {
configuration = {
- end_date = 1666281656
- source_type = "mailjet-sms"
- start_date = 1666261656
- token = "...my_token..."
+ end_date = 1666281656
+ start_date = 1666261656
+ token = "...my_token..."
}
- name = "Dr. Eloise Cronin"
- secret_id = "...my_secret_id..."
- workspace_id = "62657b36-fc6b-49f5-87ce-525c67641a83"
+ definition_id = "6a42dbbb-853e-4c4b-9e6a-18b0d79003de"
+ name = "Gilberto Pagac"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3bfaadd2-9a6d-4ff6-8b6b-f32faf825bea"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_mailjet_sms" "my_source_mailjetsms" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,8 +50,7 @@ resource "airbyte_source_mailjet_sms" "my_source_mailjetsms" {
Required:
-- `source_type` (String) must be one of ["mailjet-sms"]
-- `token` (String) Your access token. See here.
+- `token` (String, Sensitive) Your access token. See here.
Optional:
diff --git a/docs/resources/source_marketo.md b/docs/resources/source_marketo.md
index f59260c4d..33e9cd346 100644
--- a/docs/resources/source_marketo.md
+++ b/docs/resources/source_marketo.md
@@ -18,12 +18,12 @@ resource "airbyte_source_marketo" "my_source_marketo" {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
domain_url = "https://000-AAA-000.mktorest.com"
- source_type = "marketo"
start_date = "2020-09-25T00:00:00Z"
}
- name = "Jerome Berge"
- secret_id = "...my_secret_id..."
- workspace_id = "b4c21ccb-423a-4bcd-891f-aabdd88e71f6"
+ definition_id = "c87aaffe-b9ea-4290-b7e9-f4166b42b69c"
+ name = "Doris Steuber"
+ secret_id = "...my_secret_id..."
+ workspace_id = "bbad3f0b-f8ca-4743-bfb1-506e5d6deb8b"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_marketo" "my_source_marketo" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,7 +54,6 @@ Required:
- `client_id` (String) The Client ID of your Marketo developer application. See the docs for info on how to obtain this.
- `client_secret` (String) The Client Secret of your Marketo developer application. See the docs for info on how to obtain this.
- `domain_url` (String) Your Marketo Base URL. See the docs for info on how to obtain this.
-- `source_type` (String) must be one of ["marketo"]
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
diff --git a/docs/resources/source_metabase.md b/docs/resources/source_metabase.md
index f6b805f97..544da9702 100644
--- a/docs/resources/source_metabase.md
+++ b/docs/resources/source_metabase.md
@@ -18,12 +18,12 @@ resource "airbyte_source_metabase" "my_source_metabase" {
instance_api_url = "https://localhost:3000/api/"
password = "...my_password..."
session_token = "...my_session_token..."
- source_type = "metabase"
- username = "Peyton.Green"
+ username = "Efren_Mante15"
}
- name = "Tammy Sporer"
- secret_id = "...my_secret_id..."
- workspace_id = "71e7fd07-4009-4ef8-929d-e1dd7097b5da"
+ definition_id = "f283fdf1-b362-4a3e-b9ca-cc879ba7ac01"
+ name = "Gail Kirlin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7c271c50-44a2-45a4-b7e4-eabe3a97768e"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_metabase" "my_source_metabase" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,12 +52,11 @@ resource "airbyte_source_metabase" "my_source_metabase" {
Required:
- `instance_api_url` (String) URL to your metabase instance API
-- `source_type` (String) must be one of ["metabase"]
Optional:
-- `password` (String)
-- `session_token` (String) To generate your session token, you need to run the following command: ``` curl -X POST \
+- `password` (String, Sensitive)
+- `session_token` (String, Sensitive) To generate your session token, you need to run the following command: ``` curl -X POST \
-H "Content-Type: application/json" \
-d '{"username": "person@metabase.com", "password": "fakepassword"}' \
http://localhost:3000/api/session
diff --git a/docs/resources/source_microsoft_teams.md b/docs/resources/source_microsoft_teams.md
index f7977bd25..9eb3bcf43 100644
--- a/docs/resources/source_microsoft_teams.md
+++ b/docs/resources/source_microsoft_teams.md
@@ -16,19 +16,18 @@ SourceMicrosoftTeams Resource
resource "airbyte_source_microsoft_teams" "my_source_microsoftteams" {
configuration = {
credentials = {
- source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft = {
- auth_type = "Token"
+ authenticate_via_microsoft = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
tenant_id = "...my_tenant_id..."
}
}
- period = "D7"
- source_type = "microsoft-teams"
+ period = "D7"
}
- name = "Brandy Ryan"
- secret_id = "...my_secret_id..."
- workspace_id = "fa6c78a2-16e1-49ba-beca-6191498140b6"
+ definition_id = "79345d14-4630-4331-8f29-cf10b0742b93"
+ name = "Jesus Marquardt Sr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "1a320cca-d5ad-4c13-b0ef-57488395b5ae"
}
```
@@ -38,11 +37,12 @@ resource "airbyte_source_microsoft_teams" "my_source_microsoftteams" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -56,7 +56,6 @@ resource "airbyte_source_microsoft_teams" "my_source_microsoftteams" {
Required:
- `period` (String) Specifies the length of time over which the Team Device Report stream is aggregated. The supported values are: D7, D30, D90, and D180.
-- `source_type` (String) must be one of ["microsoft-teams"]
Optional:
@@ -67,13 +66,11 @@ Optional:
Optional:
-- `source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft))
-- `source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0))
-- `source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft))
-- `source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0))
+- `authenticate_via_microsoft` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_microsoft))
+- `authenticate_via_microsoft_o_auth20` (Attributes) Choose how to authenticate to Microsoft (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_microsoft_o_auth20))
-
-### Nested Schema for `configuration.credentials.source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft`
+
+### Nested Schema for `configuration.credentials.authenticate_via_microsoft`
Required:
@@ -81,52 +78,15 @@ Required:
- `client_secret` (String) The Client Secret of your Microsoft Teams developer application.
- `tenant_id` (String) A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL
-Optional:
-
-- `auth_type` (String) must be one of ["Token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0`
-
-Required:
-
-- `client_id` (String) The Client ID of your Microsoft Teams developer application.
-- `client_secret` (String) The Client Secret of your Microsoft Teams developer application.
-- `refresh_token` (String) A Refresh Token to renew the expired Access Token.
-- `tenant_id` (String) A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL
-
-Optional:
-
-- `auth_type` (String) must be one of ["Client"]
-
-
-### Nested Schema for `configuration.credentials.source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft`
+
+### Nested Schema for `configuration.credentials.authenticate_via_microsoft_o_auth20`
Required:
- `client_id` (String) The Client ID of your Microsoft Teams developer application.
- `client_secret` (String) The Client Secret of your Microsoft Teams developer application.
+- `refresh_token` (String, Sensitive) A Refresh Token to renew the expired Access Token.
- `tenant_id` (String) A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL
-Optional:
-
-- `auth_type` (String) must be one of ["Token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0`
-
-Required:
-
-- `client_id` (String) The Client ID of your Microsoft Teams developer application.
-- `client_secret` (String) The Client Secret of your Microsoft Teams developer application.
-- `refresh_token` (String) A Refresh Token to renew the expired Access Token.
-- `tenant_id` (String) A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL
-
-Optional:
-
-- `auth_type` (String) must be one of ["Client"]
-
diff --git a/docs/resources/source_mixpanel.md b/docs/resources/source_mixpanel.md
index 4b56bde43..f7d91625d 100644
--- a/docs/resources/source_mixpanel.md
+++ b/docs/resources/source_mixpanel.md
@@ -15,25 +15,23 @@ SourceMixpanel Resource
```terraform
resource "airbyte_source_mixpanel" "my_source_mixpanel" {
configuration = {
- attribution_window = 2
+ attribution_window = 0
credentials = {
- source_mixpanel_authentication_wildcard_project_secret = {
- api_secret = "...my_api_secret..."
- option_title = "Project Secret"
+ project_secret = {
+ api_secret = "...my_api_secret..."
}
}
- date_window_size = 10
+ date_window_size = 3
end_date = "2021-11-16"
- project_id = 7
project_timezone = "UTC"
region = "US"
select_properties_by_default = true
- source_type = "mixpanel"
start_date = "2021-11-16"
}
- name = "Donald Ernser"
- secret_id = "...my_secret_id..."
- workspace_id = "f37e4aa8-6855-4596-a732-aa5dcb6682cb"
+ definition_id = "a514955f-a2ea-425a-91d7-622e389cc420"
+ name = "Cecilia Gerlach"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b3299a61-1cc7-4be3-a8ba-7188dc05c92c"
}
```
@@ -43,11 +41,12 @@ resource "airbyte_source_mixpanel" "my_source_mixpanel" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,18 +57,23 @@ resource "airbyte_source_mixpanel" "my_source_mixpanel" {
### Nested Schema for `configuration`
-Optional:
+Required:
-- `attribution_window` (Number) A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days.
- `credentials` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials))
-- `date_window_size` (Number) Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment.
+
+Optional:
+
+- `attribution_window` (Number) Default: 5
+A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days. (This value should be non-negative integer)
+- `date_window_size` (Number) Default: 30
+Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment. (This value should be positive integer)
- `end_date` (String) The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date
-- `project_id` (Number) Your project ID number. See the docs for more information on how to obtain this.
-- `project_timezone` (String) Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console.
-- `region` (String) must be one of ["US", "EU"]
+- `project_timezone` (String) Default: "US/Pacific"
+Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console.
+- `region` (String) must be one of ["US", "EU"]; Default: "US"
The region of mixpanel domain instance either US or EU.
-- `select_properties_by_default` (Boolean) Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored.
-- `source_type` (String) must be one of ["mixpanel"]
+- `select_properties_by_default` (Boolean) Default: true
+Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored.
- `start_date` (String) The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default.
@@ -77,58 +81,24 @@ The region of mixpanel domain instance either US or EU.
Optional:
-- `source_mixpanel_authentication_wildcard_project_secret` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--source_mixpanel_authentication_wildcard_project_secret))
-- `source_mixpanel_authentication_wildcard_service_account` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--source_mixpanel_authentication_wildcard_service_account))
-- `source_mixpanel_update_authentication_wildcard_project_secret` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--source_mixpanel_update_authentication_wildcard_project_secret))
-- `source_mixpanel_update_authentication_wildcard_service_account` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--source_mixpanel_update_authentication_wildcard_service_account))
-
-
-### Nested Schema for `configuration.credentials.source_mixpanel_authentication_wildcard_project_secret`
-
-Required:
-
-- `api_secret` (String) Mixpanel project secret. See the docs for more information on how to obtain this.
-
-Optional:
-
-- `option_title` (String) must be one of ["Project Secret"]
-
-
-
-### Nested Schema for `configuration.credentials.source_mixpanel_authentication_wildcard_service_account`
-
-Required:
-
-- `secret` (String) Mixpanel Service Account Secret. See the docs for more information on how to obtain this.
-- `username` (String) Mixpanel Service Account Username. See the docs for more information on how to obtain this.
-
-Optional:
-
-- `option_title` (String) must be one of ["Service Account"]
+- `project_secret` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--project_secret))
+- `service_account` (Attributes) Choose how to authenticate to Mixpanel (see [below for nested schema](#nestedatt--configuration--credentials--service_account))
-
-
-### Nested Schema for `configuration.credentials.source_mixpanel_update_authentication_wildcard_project_secret`
+
+### Nested Schema for `configuration.credentials.project_secret`
Required:
- `api_secret` (String) Mixpanel project secret. See the docs for more information on how to obtain this.
-Optional:
-
-- `option_title` (String) must be one of ["Project Secret"]
-
-
-### Nested Schema for `configuration.credentials.source_mixpanel_update_authentication_wildcard_service_account`
+
+### Nested Schema for `configuration.credentials.service_account`
Required:
+- `project_id` (Number) Your project ID number. See the docs for more information on how to obtain this.
- `secret` (String) Mixpanel Service Account Secret. See the docs for more information on how to obtain this.
- `username` (String) Mixpanel Service Account Username. See the docs for more information on how to obtain this.
-Optional:
-
-- `option_title` (String) must be one of ["Service Account"]
-
diff --git a/docs/resources/source_monday.md b/docs/resources/source_monday.md
index ef5c56f9b..e4e97b8b7 100644
--- a/docs/resources/source_monday.md
+++ b/docs/resources/source_monday.md
@@ -16,16 +16,15 @@ SourceMonday Resource
resource "airbyte_source_monday" "my_source_monday" {
configuration = {
credentials = {
- source_monday_authorization_method_api_token = {
+ api_token = {
api_token = "...my_api_token..."
- auth_type = "api_token"
}
}
- source_type = "monday"
}
- name = "Shirley Wisoky"
- secret_id = "...my_secret_id..."
- workspace_id = "fd5fb6e9-1b9a-49f7-8846-e2c3309db053"
+ definition_id = "2050fdf2-ba7d-443d-a0d3-384e15ed5352"
+ name = "Stella Lubowitz"
+ secret_id = "...my_secret_id..."
+ workspace_id = "aeabadeb-93c7-4728-b9b6-069b6a28df31"
}
```
@@ -35,11 +34,12 @@ resource "airbyte_source_monday" "my_source_monday" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,10 +50,6 @@ resource "airbyte_source_monday" "my_source_monday" {
### Nested Schema for `configuration`
-Required:
-
-- `source_type` (String) must be one of ["monday"]
-
Optional:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
@@ -63,56 +59,29 @@ Optional:
Optional:
-- `source_monday_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_monday_authorization_method_api_token))
-- `source_monday_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_monday_authorization_method_o_auth2_0))
-- `source_monday_update_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_monday_update_authorization_method_api_token))
-- `source_monday_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_monday_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_monday_authorization_method_api_token`
-
-Required:
-
-- `api_token` (String) API Token for making authenticated requests.
-- `auth_type` (String) must be one of ["api_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_monday_authorization_method_o_auth2_0`
-
-Required:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-
-Optional:
-
-- `subdomain` (String) Slug/subdomain of the account, or the first part of the URL that comes before .monday.com
-
+- `api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--api_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_monday_update_authorization_method_api_token`
+
+### Nested Schema for `configuration.credentials.api_token`
Required:
-- `api_token` (String) API Token for making authenticated requests.
-- `auth_type` (String) must be one of ["api_token"]
+- `api_token` (String, Sensitive) API Token for making authenticated requests.
-
-### Nested Schema for `configuration.credentials.source_monday_update_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
- `client_id` (String) The Client ID of your OAuth application.
- `client_secret` (String) The Client Secret of your OAuth application.
Optional:
-- `subdomain` (String) Slug/subdomain of the account, or the first part of the URL that comes before .monday.com
+- `subdomain` (String) Default: ""
+Slug/subdomain of the account, or the first part of the URL that comes before .monday.com
diff --git a/docs/resources/source_mongodb.md b/docs/resources/source_mongodb.md
deleted file mode 100644
index 9b8bca0ff..000000000
--- a/docs/resources/source_mongodb.md
+++ /dev/null
@@ -1,152 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_mongodb Resource - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceMongodb Resource
----
-
-# airbyte_source_mongodb (Resource)
-
-SourceMongodb Resource
-
-## Example Usage
-
-```terraform
-resource "airbyte_source_mongodb" "my_source_mongodb" {
- configuration = {
- auth_source = "admin"
- database = "...my_database..."
- instance_type = {
- source_mongodb_mongo_db_instance_type_mongo_db_atlas = {
- cluster_url = "...my_cluster_url..."
- instance = "atlas"
- }
- }
- password = "...my_password..."
- source_type = "mongodb"
- user = "...my_user..."
- }
- name = "Doreen Mayer"
- secret_id = "...my_secret_id..."
- workspace_id = "5ca006f5-392c-411a-a5a8-bf92f97428ad"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `source_id` (String)
-- `source_type` (String)
-
-
-### Nested Schema for `configuration`
-
-Required:
-
-- `database` (String) The database you want to replicate.
-- `source_type` (String) must be one of ["mongodb"]
-
-Optional:
-
-- `auth_source` (String) The authentication source where the user information is stored.
-- `instance_type` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type))
-- `password` (String) The password associated with this username.
-- `user` (String) The username which is used to access the database.
-
-
-### Nested Schema for `configuration.instance_type`
-
-Optional:
-
-- `source_mongodb_mongo_db_instance_type_mongo_db_atlas` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_mongo_db_instance_type_mongo_db_atlas))
-- `source_mongodb_mongo_db_instance_type_replica_set` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_mongo_db_instance_type_replica_set))
-- `source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance))
-- `source_mongodb_update_mongo_db_instance_type_mongo_db_atlas` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_update_mongo_db_instance_type_mongo_db_atlas))
-- `source_mongodb_update_mongo_db_instance_type_replica_set` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_update_mongo_db_instance_type_replica_set))
-- `source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance` (Attributes) The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default. (see [below for nested schema](#nestedatt--configuration--instance_type--source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance))
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_mongo_db_instance_type_mongo_db_atlas`
-
-Required:
-
-- `cluster_url` (String) The URL of a cluster to connect to.
-- `instance` (String) must be one of ["atlas"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_mongo_db_instance_type_replica_set`
-
-Required:
-
-- `instance` (String) must be one of ["replica"]
-- `server_addresses` (String) The members of a replica set. Please specify `host`:`port` of each member separated by comma.
-
-Optional:
-
-- `replica_set` (String) A replica set in MongoDB is a group of mongod processes that maintain the same data set.
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance`
-
-Required:
-
-- `host` (String) The host name of the Mongo database.
-- `instance` (String) must be one of ["standalone"]
-- `port` (Number) The port of the Mongo database.
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_update_mongo_db_instance_type_mongo_db_atlas`
-
-Required:
-
-- `cluster_url` (String) The URL of a cluster to connect to.
-- `instance` (String) must be one of ["atlas"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_update_mongo_db_instance_type_replica_set`
-
-Required:
-
-- `instance` (String) must be one of ["replica"]
-- `server_addresses` (String) The members of a replica set. Please specify `host`:`port` of each member separated by comma.
-
-Optional:
-
-- `replica_set` (String) A replica set in MongoDB is a group of mongod processes that maintain the same data set.
-
-
-
-### Nested Schema for `configuration.instance_type.source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance`
-
-Required:
-
-- `host` (String) The host name of the Mongo database.
-- `instance` (String) must be one of ["standalone"]
-- `port` (Number) The port of the Mongo database.
-
-
diff --git a/docs/resources/source_mongodb_internal_poc.md b/docs/resources/source_mongodb_internal_poc.md
index c7044fa78..f7f47d95c 100644
--- a/docs/resources/source_mongodb_internal_poc.md
+++ b/docs/resources/source_mongodb_internal_poc.md
@@ -19,12 +19,12 @@ resource "airbyte_source_mongodb_internal_poc" "my_source_mongodbinternalpoc" {
connection_string = "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017"
password = "...my_password..."
replica_set = "...my_replica_set..."
- source_type = "mongodb-internal-poc"
user = "...my_user..."
}
- name = "Eduardo Weissnat"
- secret_id = "...my_secret_id..."
- workspace_id = "f8221125-359d-4983-87f7-a79cd72cd248"
+ definition_id = "6ea9203c-b787-46e7-9a53-1f3b4802a3b9"
+ name = "Hector Kuhic"
+ secret_id = "...my_secret_id..."
+ workspace_id = "76dbe116-c781-416c-b0bf-b32667c47d50"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_mongodb_internal_poc" "my_source_mongodbinternalpoc" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,15 +50,12 @@ resource "airbyte_source_mongodb_internal_poc" "my_source_mongodbinternalpoc" {
### Nested Schema for `configuration`
-Required:
-
-- `source_type` (String) must be one of ["mongodb-internal-poc"]
-
Optional:
-- `auth_source` (String) The authentication source where the user information is stored.
+- `auth_source` (String) Default: "admin"
+The authentication source where the user information is stored.
- `connection_string` (String) The connection string of the database that you want to replicate..
-- `password` (String) The password associated with this username.
+- `password` (String, Sensitive) The password associated with this username.
- `replica_set` (String) The name of the replica set to be replicated.
- `user` (String) The username which is used to access the database.
diff --git a/docs/resources/source_mongodb_v2.md b/docs/resources/source_mongodb_v2.md
new file mode 100644
index 000000000..920722deb
--- /dev/null
+++ b/docs/resources/source_mongodb_v2.md
@@ -0,0 +1,115 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_mongodb_v2 Resource - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceMongodbV2 Resource
+---
+
+# airbyte_source_mongodb_v2 (Resource)
+
+SourceMongodbV2 Resource
+
+## Example Usage
+
+```terraform
+resource "airbyte_source_mongodb_v2" "my_source_mongodbv2" {
+ configuration = {
+ database_config = {
+ mongo_db_atlas_replica_set = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ auth_source = "admin"
+ connection_string = "mongodb+srv://cluster0.abcd1.mongodb.net/"
+ database = "...my_database..."
+ password = "...my_password..."
+ username = "Curtis38"
+ }
+ }
+ discover_sample_size = 1
+ initial_waiting_seconds = 0
+ queue_size = 5
+ }
+ definition_id = "c03f8392-0634-4c9d-b1c4-26709282f0b3"
+ name = "Nora Waelchi"
+ secret_id = "...my_secret_id..."
+ workspace_id = "729ff502-4b69-40b2-b36f-2f7a3b95d4ab"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the source e.g. dev-mysql-instance.
+- `workspace_id` (String)
+
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
+
+### Read-Only
+
+- `source_id` (String)
+- `source_type` (String)
+
+
+### Nested Schema for `configuration`
+
+Required:
+
+- `database_config` (Attributes) Configures the MongoDB cluster type. (see [below for nested schema](#nestedatt--configuration--database_config))
+
+Optional:
+
+- `discover_sample_size` (Number) Default: 10000
+The maximum number of documents to sample when attempting to discover the unique fields for a collection.
+- `initial_waiting_seconds` (Number) Default: 300
+The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds.
+- `queue_size` (Number) Default: 10000
+The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
+
+
+### Nested Schema for `configuration.database_config`
+
+Optional:
+
+- `mongo_db_atlas_replica_set` (Attributes) MongoDB Atlas-hosted cluster configured as a replica set (see [below for nested schema](#nestedatt--configuration--database_config--mongo_db_atlas_replica_set))
+- `self_managed_replica_set` (Attributes) MongoDB self-hosted cluster configured as a replica set (see [below for nested schema](#nestedatt--configuration--database_config--self_managed_replica_set))
+
+
+### Nested Schema for `configuration.database_config.mongo_db_atlas_replica_set`
+
+Required:
+
+- `connection_string` (String) The connection string of the cluster that you want to replicate.
+- `database` (String) The name of the MongoDB database that contains the collection(s) to replicate.
+- `password` (String, Sensitive) The password associated with this username.
+- `username` (String) The username which is used to access the database.
+
+Optional:
+
+- `additional_properties` (String) Parsed as JSON.
+- `auth_source` (String) Default: "admin"
+The authentication source where the user information is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource for more details.
+
+
+
+### Nested Schema for `configuration.database_config.self_managed_replica_set`
+
+Required:
+
+- `connection_string` (String) The connection string of the cluster that you want to replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string for more information.
+- `database` (String) The name of the MongoDB database that contains the collection(s) to replicate.
+
+Optional:
+
+- `additional_properties` (String) Parsed as JSON.
+- `auth_source` (String) Default: "admin"
+The authentication source where the user information is stored.
+- `password` (String, Sensitive) The password associated with this username.
+- `username` (String) The username which is used to access the database.
+
+
diff --git a/docs/resources/source_mssql.md b/docs/resources/source_mssql.md
index c8c1d5f2b..18b570fc2 100644
--- a/docs/resources/source_mssql.md
+++ b/docs/resources/source_mssql.md
@@ -21,32 +21,27 @@ resource "airbyte_source_mssql" "my_source_mssql" {
password = "...my_password..."
port = 1433
replication_method = {
- source_mssql_update_method_read_changes_using_change_data_capture_cdc_ = {
+ read_changes_using_change_data_capture_cdc = {
data_to_sync = "New Changes Only"
- initial_waiting_seconds = 7
- method = "CDC"
- snapshot_isolation = "Snapshot"
+ initial_waiting_seconds = 2
+ snapshot_isolation = "Read Committed"
}
}
schemas = [
"...",
]
- source_type = "mssql"
ssl_method = {
- source_mssql_ssl_method_encrypted_trust_server_certificate_ = {
- ssl_method = "encrypted_trust_server_certificate"
- }
+ source_mssql_encrypted_trust_server_certificate = {}
}
tunnel_method = {
- source_mssql_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_mssql_no_tunnel = {}
}
- username = "Bobbie60"
+ username = "Salvatore_Weissnat66"
}
- name = "Clarence Murazik"
- secret_id = "...my_secret_id..."
- workspace_id = "1ef5725f-1169-4ac1-a41d-8a23c23e34f2"
+ definition_id = "b6ad0e44-a4dc-4970-8078-573a20ac990f"
+ name = "Wm Corkery"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7a67a851-50ea-4861-a0cd-618d74280681"
}
```
@@ -56,11 +51,12 @@ resource "airbyte_source_mssql" "my_source_mssql" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -76,13 +72,12 @@ Required:
- `database` (String) The name of the database.
- `host` (String) The hostname of the database.
- `port` (Number) The port of the database.
-- `source_type` (String) must be one of ["mssql"]
- `username` (String) The username which is used to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) The password associated with the username.
+- `password` (String, Sensitive) The password associated with the username.
- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method))
- `schemas` (List of String) The list of schemas to sync from. Defaults to user. Case sensitive.
- `ssl_method` (Attributes) The encryption method which is used when communicating with the database. (see [below for nested schema](#nestedatt--configuration--ssl_method))
@@ -93,57 +88,24 @@ Optional:
Optional:
-- `source_mssql_update_method_read_changes_using_change_data_capture_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_method_read_changes_using_change_data_capture_cdc))
-- `source_mssql_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_method_scan_changes_with_user_defined_cursor))
-- `source_mssql_update_update_method_read_changes_using_change_data_capture_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_update_method_read_changes_using_change_data_capture_cdc))
-- `source_mssql_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_update_method_scan_changes_with_user_defined_cursor))
-
-
-### Nested Schema for `configuration.replication_method.source_mssql_update_method_read_changes_using_change_data_capture_cdc`
-
-Required:
+- `read_changes_using_change_data_capture_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--read_changes_using_change_data_capture_cdc))
+- `scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--scan_changes_with_user_defined_cursor))
-- `method` (String) must be one of ["CDC"]
+
+### Nested Schema for `configuration.replication_method.read_changes_using_change_data_capture_cdc`
Optional:
-- `data_to_sync` (String) must be one of ["Existing and New", "New Changes Only"]
+- `data_to_sync` (String) must be one of ["Existing and New", "New Changes Only"]; Default: "Existing and New"
What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `snapshot_isolation` (String) must be one of ["Snapshot", "Read Committed"]
+- `initial_waiting_seconds` (Number) Default: 300
+The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
+- `snapshot_isolation` (String) must be one of ["Snapshot", "Read Committed"]; Default: "Snapshot"
Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
-
-### Nested Schema for `configuration.replication_method.source_mssql_update_method_scan_changes_with_user_defined_cursor`
-
-Required:
-
-- `method` (String) must be one of ["STANDARD"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_mssql_update_update_method_read_changes_using_change_data_capture_cdc`
-
-Required:
-
-- `method` (String) must be one of ["CDC"]
-
-Optional:
-
-- `data_to_sync` (String) must be one of ["Existing and New", "New Changes Only"]
-What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `snapshot_isolation` (String) must be one of ["Snapshot", "Read Committed"]
-Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
-
-
-
-### Nested Schema for `configuration.replication_method.source_mssql_update_update_method_scan_changes_with_user_defined_cursor`
-
-Required:
-
-- `method` (String) must be one of ["STANDARD"]
+
+### Nested Schema for `configuration.replication_method.scan_changes_with_user_defined_cursor`
@@ -152,45 +114,15 @@ Required:
Optional:
-- `source_mssql_ssl_method_encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--source_mssql_ssl_method_encrypted_trust_server_certificate))
-- `source_mssql_ssl_method_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--source_mssql_ssl_method_encrypted_verify_certificate))
-- `source_mssql_update_ssl_method_encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--source_mssql_update_ssl_method_encrypted_trust_server_certificate))
-- `source_mssql_update_ssl_method_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--source_mssql_update_ssl_method_encrypted_verify_certificate))
-
-
-### Nested Schema for `configuration.ssl_method.source_mssql_ssl_method_encrypted_trust_server_certificate`
-
-Required:
-
-- `ssl_method` (String) must be one of ["encrypted_trust_server_certificate"]
+- `encrypted_trust_server_certificate` (Attributes) Use the certificate provided by the server without verification. (For testing purposes only!) (see [below for nested schema](#nestedatt--configuration--ssl_method--encrypted_trust_server_certificate))
+- `encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--ssl_method--encrypted_verify_certificate))
+
+### Nested Schema for `configuration.ssl_method.encrypted_trust_server_certificate`
-
-### Nested Schema for `configuration.ssl_method.source_mssql_ssl_method_encrypted_verify_certificate`
-
-Required:
-- `ssl_method` (String) must be one of ["encrypted_verify_certificate"]
-
-Optional:
-
-- `host_name_in_certificate` (String) Specifies the host name of the server. The value of this property must match the subject property of the certificate.
-
-
-
-### Nested Schema for `configuration.ssl_method.source_mssql_update_ssl_method_encrypted_trust_server_certificate`
-
-Required:
-
-- `ssl_method` (String) must be one of ["encrypted_trust_server_certificate"]
-
-
-
-### Nested Schema for `configuration.ssl_method.source_mssql_update_ssl_method_encrypted_verify_certificate`
-
-Required:
-
-- `ssl_method` (String) must be one of ["encrypted_verify_certificate"]
+
+### Nested Schema for `configuration.ssl_method.encrypted_verify_certificate`
Optional:
@@ -203,80 +135,41 @@ Optional:
Optional:
-- `source_mssql_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_ssh_tunnel_method_no_tunnel))
-- `source_mssql_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_ssh_tunnel_method_password_authentication))
-- `source_mssql_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_ssh_tunnel_method_ssh_key_authentication))
-- `source_mssql_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_update_ssh_tunnel_method_no_tunnel))
-- `source_mssql_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_update_ssh_tunnel_method_password_authentication))
-- `source_mssql_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mssql_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_ssh_tunnel_method_no_tunnel`
-
-Required:
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
+Optional:
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_update_ssh_tunnel_method_no_tunnel`
-
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mssql_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/source_my_hours.md b/docs/resources/source_my_hours.md
index 0085bed7f..bb769889f 100644
--- a/docs/resources/source_my_hours.md
+++ b/docs/resources/source_my_hours.md
@@ -18,12 +18,12 @@ resource "airbyte_source_my_hours" "my_source_myhours" {
email = "john@doe.com"
logs_batch_size = 30
password = "...my_password..."
- source_type = "my-hours"
- start_date = "2016-01-01"
+ start_date = "%Y-%m-%d"
}
- name = "Elsa Kerluke"
- secret_id = "...my_secret_id..."
- workspace_id = "922151fe-1712-4099-853e-9f543d854439"
+ definition_id = "95261555-3a71-4349-8a3f-9799a12d6e33"
+ name = "Franklin Jerde"
+ secret_id = "...my_secret_id..."
+ workspace_id = "00d47724-56d0-4d26-9914-7bb3566ca647"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_my_hours" "my_source_myhours" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,12 +52,12 @@ resource "airbyte_source_my_hours" "my_source_myhours" {
Required:
- `email` (String) Your My Hours username
-- `password` (String) The password associated to the username
-- `source_type` (String) must be one of ["my-hours"]
+- `password` (String, Sensitive) The password associated to the username
- `start_date` (String) Start date for collecting time logs
Optional:
-- `logs_batch_size` (Number) Pagination size used for retrieving logs in days
+- `logs_batch_size` (Number) Default: 30
+Pagination size used for retrieving logs in days
diff --git a/docs/resources/source_mysql.md b/docs/resources/source_mysql.md
index b945dfda0..2c4cc4385 100644
--- a/docs/resources/source_mysql.md
+++ b/docs/resources/source_mysql.md
@@ -21,28 +21,23 @@ resource "airbyte_source_mysql" "my_source_mysql" {
password = "...my_password..."
port = 3306
replication_method = {
- source_mysql_update_method_read_changes_using_binary_log_cdc_ = {
- initial_waiting_seconds = 10
- method = "CDC"
+ read_changes_using_binary_log_cdc = {
+ initial_waiting_seconds = 7
server_time_zone = "...my_server_time_zone..."
}
}
- source_type = "mysql"
ssl_mode = {
- source_mysql_ssl_modes_preferred = {
- mode = "preferred"
- }
+ preferred = {}
}
tunnel_method = {
- source_mysql_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_mysql_no_tunnel = {}
}
- username = "Carley25"
+ username = "Eino_White"
}
- name = "Ruth Goodwin"
- secret_id = "...my_secret_id..."
- workspace_id = "bc154188-c2f5-46e8-9da7-832eabd617c3"
+ definition_id = "aba25784-141a-421c-8938-ad6fcbb78bed"
+ name = "Mr. Ross Cole"
+ secret_id = "...my_secret_id..."
+ workspace_id = "704ae193-8752-47d5-a3ef-7246d0c0b796"
}
```
@@ -52,11 +47,12 @@ resource "airbyte_source_mysql" "my_source_mysql" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -71,15 +67,15 @@ Required:
- `database` (String) The database name.
- `host` (String) The host name of the database.
-- `port` (Number) The port to connect to.
- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method))
-- `source_type` (String) must be one of ["mysql"]
- `username` (String) The username which is used to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.
-- `password` (String) The password associated with the username.
+- `password` (String, Sensitive) The password associated with the username.
+- `port` (Number) Default: 3306
+The port to connect to.
- `ssl_mode` (Attributes) SSL connection modes. Read more in the docs. (see [below for nested schema](#nestedatt--configuration--ssl_mode))
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
@@ -88,51 +84,21 @@ Optional:
Optional:
-- `source_mysql_update_method_read_changes_using_binary_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mysql_update_method_read_changes_using_binary_log_cdc))
-- `source_mysql_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mysql_update_method_scan_changes_with_user_defined_cursor))
-- `source_mysql_update_update_method_read_changes_using_binary_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mysql_update_update_method_read_changes_using_binary_log_cdc))
-- `source_mysql_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mysql_update_update_method_scan_changes_with_user_defined_cursor))
-
-
-### Nested Schema for `configuration.replication_method.source_mysql_update_method_read_changes_using_binary_log_cdc`
-
-Required:
-
-- `method` (String) must be one of ["CDC"]
-
-Optional:
-
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `server_time_zone` (String) Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.
-
-
-
-### Nested Schema for `configuration.replication_method.source_mysql_update_method_scan_changes_with_user_defined_cursor`
-
-Required:
-
-- `method` (String) must be one of ["STANDARD"]
+- `read_changes_using_binary_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--read_changes_using_binary_log_cdc))
+- `scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--scan_changes_with_user_defined_cursor))
-
-
-### Nested Schema for `configuration.replication_method.source_mysql_update_update_method_read_changes_using_binary_log_cdc`
-
-Required:
-
-- `method` (String) must be one of ["CDC"]
+
+### Nested Schema for `configuration.replication_method.read_changes_using_binary_log_cdc`
Optional:
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
+- `initial_waiting_seconds` (Number) Default: 300
+The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
- `server_time_zone` (String) Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.
-
-### Nested Schema for `configuration.replication_method.source_mysql_update_update_method_scan_changes_with_user_defined_cursor`
-
-Required:
-
-- `method` (String) must be one of ["STANDARD"]
+
+### Nested Schema for `configuration.replication_method.scan_changes_with_user_defined_cursor`
@@ -141,105 +107,45 @@ Required:
Optional:
-- `source_mysql_ssl_modes_preferred` (Attributes) Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_ssl_modes_preferred))
-- `source_mysql_ssl_modes_required` (Attributes) Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_ssl_modes_required))
-- `source_mysql_ssl_modes_verify_ca` (Attributes) Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_ssl_modes_verify_ca))
-- `source_mysql_ssl_modes_verify_identity` (Attributes) Always connect with SSL. Verify both CA and Hostname. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_ssl_modes_verify_identity))
-- `source_mysql_update_ssl_modes_preferred` (Attributes) Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_update_ssl_modes_preferred))
-- `source_mysql_update_ssl_modes_required` (Attributes) Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_update_ssl_modes_required))
-- `source_mysql_update_ssl_modes_verify_ca` (Attributes) Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_update_ssl_modes_verify_ca))
-- `source_mysql_update_ssl_modes_verify_identity` (Attributes) Always connect with SSL. Verify both CA and Hostname. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_mysql_update_ssl_modes_verify_identity))
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_ssl_modes_preferred`
-
-Required:
-
-- `mode` (String) must be one of ["preferred"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_ssl_modes_required`
-
-Required:
-
-- `mode` (String) must be one of ["required"]
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_ssl_modes_verify_ca`
-
-Required:
-
-- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify_ca"]
-
-Optional:
-
-- `client_certificate` (String) Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
-- `client_key` (String) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_ssl_modes_verify_identity`
-
-Required:
-
-- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify_identity"]
-
-Optional:
-
-- `client_certificate` (String) Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
-- `client_key` (String) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_update_ssl_modes_preferred`
-
-Required:
+- `preferred` (Attributes) Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection. (see [below for nested schema](#nestedatt--configuration--ssl_mode--preferred))
+- `required` (Attributes) Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified. (see [below for nested schema](#nestedatt--configuration--ssl_mode--required))
+- `verify_ca` (Attributes) Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match. (see [below for nested schema](#nestedatt--configuration--ssl_mode--verify_ca))
+- `verify_identity` (Attributes) Always connect with SSL. Verify both CA and Hostname. (see [below for nested schema](#nestedatt--configuration--ssl_mode--verify_identity))
-- `mode` (String) must be one of ["preferred"]
+
+### Nested Schema for `configuration.ssl_mode.preferred`
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_update_ssl_modes_required`
-
-Required:
-
-- `mode` (String) must be one of ["required"]
+
+### Nested Schema for `configuration.ssl_mode.required`
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_update_ssl_modes_verify_ca`
+
+### Nested Schema for `configuration.ssl_mode.verify_ca`
Required:
- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify_ca"]
Optional:
- `client_certificate` (String) Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
-- `client_key` (String) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
+- `client_key` (String, Sensitive) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
+- `client_key_password` (String, Sensitive) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
-
-### Nested Schema for `configuration.ssl_mode.source_mysql_update_ssl_modes_verify_identity`
+
+### Nested Schema for `configuration.ssl_mode.verify_identity`
Required:
- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify_identity"]
Optional:
- `client_certificate` (String) Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
-- `client_key` (String) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
-- `client_key_password` (String) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
+- `client_key` (String, Sensitive) Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
+- `client_key_password` (String, Sensitive) Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
@@ -248,80 +154,41 @@ Optional:
Optional:
-- `source_mysql_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_ssh_tunnel_method_no_tunnel))
-- `source_mysql_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_ssh_tunnel_method_password_authentication))
-- `source_mysql_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_ssh_tunnel_method_ssh_key_authentication))
-- `source_mysql_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_update_ssh_tunnel_method_no_tunnel))
-- `source_mysql_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_update_ssh_tunnel_method_password_authentication))
-- `source_mysql_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_mysql_update_ssh_tunnel_method_ssh_key_authentication))
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_ssh_tunnel_method_no_tunnel`
-
-Required:
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_update_ssh_tunnel_method_no_tunnel`
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-Required:
+Optional:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_mysql_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/source_netsuite.md b/docs/resources/source_netsuite.md
index 5d9f80f52..e10220080 100644
--- a/docs/resources/source_netsuite.md
+++ b/docs/resources/source_netsuite.md
@@ -21,15 +21,15 @@ resource "airbyte_source_netsuite" "my_source_netsuite" {
"...",
]
realm = "...my_realm..."
- source_type = "netsuite"
start_datetime = "2017-01-25T00:00:00Z"
token_key = "...my_token_key..."
token_secret = "...my_token_secret..."
- window_in_days = 7
+ window_in_days = 5
}
- name = "Miss Meredith Hand"
- secret_id = "...my_secret_id..."
- workspace_id = "4bf01bad-8706-4d46-882b-fbdc41ff5d4e"
+ definition_id = "b7242137-fe2e-49e2-ac4c-104f1dbe3b1f"
+ name = "Ramona Bahringer"
+ secret_id = "...my_secret_id..."
+ workspace_id = "77573847-65c7-4741-8014-d1f263651b77"
}
```
@@ -39,11 +39,12 @@ resource "airbyte_source_netsuite" "my_source_netsuite" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -56,17 +57,17 @@ resource "airbyte_source_netsuite" "my_source_netsuite" {
Required:
-- `consumer_key` (String) Consumer key associated with your integration
+- `consumer_key` (String, Sensitive) Consumer key associated with your integration
- `consumer_secret` (String) Consumer secret associated with your integration
- `realm` (String) Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1, as for the `sandbox`
-- `source_type` (String) must be one of ["netsuite"]
- `start_datetime` (String) Starting point for your data replication, in format of "YYYY-MM-DDTHH:mm:ssZ"
-- `token_key` (String) Access token key
-- `token_secret` (String) Access token secret
+- `token_key` (String, Sensitive) Access token key
+- `token_secret` (String, Sensitive) Access token secret
Optional:
- `object_types` (List of String) The API names of the Netsuite objects you want to sync. Setting this speeds up the connection setup process by limiting the number of schemas that need to be retrieved from Netsuite.
-- `window_in_days` (Number) The amount of days used to query the data with date chunks. Set smaller value, if you have lots of data.
+- `window_in_days` (Number) Default: 30
+The amount of days used to query the data with date chunks. Set smaller value, if you have lots of data.
diff --git a/docs/resources/source_notion.md b/docs/resources/source_notion.md
index 4bcf04c38..2408d49b4 100644
--- a/docs/resources/source_notion.md
+++ b/docs/resources/source_notion.md
@@ -16,17 +16,16 @@ SourceNotion Resource
resource "airbyte_source_notion" "my_source_notion" {
configuration = {
credentials = {
- source_notion_authenticate_using_access_token = {
- auth_type = "token"
- token = "...my_token..."
+ source_notion_access_token = {
+ token = "...my_token..."
}
}
- source_type = "notion"
- start_date = "2020-11-16T00:00:00.000Z"
+ start_date = "2020-11-16T00:00:00.000Z"
}
- name = "Francisco Yost"
- secret_id = "...my_secret_id..."
- workspace_id = "cb35d176-38f1-4edb-b835-9ecc5cb860f8"
+ definition_id = "fe0e5e5f-386d-40ac-9af3-c6558d9b03d2"
+ name = "Jeannette Ward"
+ secret_id = "...my_secret_id..."
+ workspace_id = "dbadc477-cb62-4b59-b9f1-ee4249578a5b"
}
```
@@ -36,11 +35,12 @@ resource "airbyte_source_notion" "my_source_notion" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,60 +53,35 @@ resource "airbyte_source_notion" "my_source_notion" {
Required:
-- `source_type` (String) must be one of ["notion"]
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00.000Z. Any data before this date will not be replicated.
+- `credentials` (Attributes) Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information. (see [below for nested schema](#nestedatt--configuration--credentials))
Optional:
-- `credentials` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials))
+- `start_date` (String) UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z. During incremental sync, any data generated before this date will not be replicated. If left blank, the start date will be set to 2 years before the present date.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_notion_authenticate_using_access_token` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials--source_notion_authenticate_using_access_token))
-- `source_notion_authenticate_using_o_auth2_0` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials--source_notion_authenticate_using_o_auth2_0))
-- `source_notion_update_authenticate_using_access_token` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials--source_notion_update_authenticate_using_access_token))
-- `source_notion_update_authenticate_using_o_auth2_0` (Attributes) Pick an authentication method. (see [below for nested schema](#nestedatt--configuration--credentials--source_notion_update_authenticate_using_o_auth2_0))
+- `access_token` (Attributes) Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information. (see [below for nested schema](#nestedatt--configuration--credentials--access_token))
+- `o_auth20` (Attributes) Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information. (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_notion_authenticate_using_access_token`
+
+### Nested Schema for `configuration.credentials.access_token`
Required:
-- `auth_type` (String) must be one of ["token"]
-- `token` (String) Notion API access token, see the docs for more information on how to obtain this token.
+- `token` (String, Sensitive) The Access Token for your private Notion integration. See the docs for more information on how to obtain this token.
-
-### Nested Schema for `configuration.credentials.source_notion_authenticate_using_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Access Token is a token you received by complete the OauthWebFlow of Notion.
-- `auth_type` (String) must be one of ["OAuth2.0"]
-- `client_id` (String) The ClientID of your Notion integration.
-- `client_secret` (String) The ClientSecret of your Notion integration.
-
-
-
-### Nested Schema for `configuration.credentials.source_notion_update_authenticate_using_access_token`
-
-Required:
-
-- `auth_type` (String) must be one of ["token"]
-- `token` (String) Notion API access token, see the docs for more information on how to obtain this token.
-
-
-
-### Nested Schema for `configuration.credentials.source_notion_update_authenticate_using_o_auth2_0`
-
-Required:
-
-- `access_token` (String) Access Token is a token you received by complete the OauthWebFlow of Notion.
-- `auth_type` (String) must be one of ["OAuth2.0"]
-- `client_id` (String) The ClientID of your Notion integration.
-- `client_secret` (String) The ClientSecret of your Notion integration.
+- `access_token` (String, Sensitive) The Access Token received by completing the OAuth flow for your Notion integration. See our docs for more information.
+- `client_id` (String) The Client ID of your Notion integration. See our docs for more information.
+- `client_secret` (String) The Client Secret of your Notion integration. See our docs for more information.
diff --git a/docs/resources/source_nytimes.md b/docs/resources/source_nytimes.md
index ff83b2189..e8a01c83c 100644
--- a/docs/resources/source_nytimes.md
+++ b/docs/resources/source_nytimes.md
@@ -15,16 +15,16 @@ SourceNytimes Resource
```terraform
resource "airbyte_source_nytimes" "my_source_nytimes" {
configuration = {
- api_key = "...my_api_key..."
- end_date = "1851-01"
- period = "7"
- share_type = "facebook"
- source_type = "nytimes"
- start_date = "2022-08"
+ api_key = "...my_api_key..."
+ end_date = "1851-01"
+ period = "30"
+ share_type = "facebook"
+ start_date = "2022-08"
}
- name = "Mr. Emily Macejkovic"
- secret_id = "...my_secret_id..."
- workspace_id = "4fe44472-97cd-43b1-9d3b-bce247b7684e"
+ definition_id = "83b2c4dd-4d42-4907-b41e-e0bbab0457d9"
+ name = "Sue Durgan"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e6ecd841-e72a-4766-a686-faa512d8044b"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_nytimes" "my_source_nytimes" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,10 +52,9 @@ resource "airbyte_source_nytimes" "my_source_nytimes" {
Required:
-- `api_key` (String) API Key
+- `api_key` (String, Sensitive) API Key
- `period` (Number) must be one of ["1", "7", "30"]
Period of time (in days)
-- `source_type` (String) must be one of ["nytimes"]
- `start_date` (String) Start date to begin the article retrieval (format YYYY-MM)
Optional:
diff --git a/docs/resources/source_okta.md b/docs/resources/source_okta.md
index 818e96444..b61cd09fe 100644
--- a/docs/resources/source_okta.md
+++ b/docs/resources/source_okta.md
@@ -16,18 +16,17 @@ SourceOkta Resource
resource "airbyte_source_okta" "my_source_okta" {
configuration = {
credentials = {
- source_okta_authorization_method_api_token = {
+ source_okta_api_token = {
api_token = "...my_api_token..."
- auth_type = "api_token"
}
}
- domain = "...my_domain..."
- source_type = "okta"
- start_date = "2022-07-22T00:00:00Z"
+ domain = "...my_domain..."
+ start_date = "2022-07-22T00:00:00Z"
}
- name = "Mr. Emmett Heidenreich"
- secret_id = "...my_secret_id..."
- workspace_id = "6d71cffb-d0eb-474b-8421-953b44bd3c43"
+ definition_id = "05c5b711-2361-4f26-947b-86cdec1a2bc2"
+ name = "Isaac Bruen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5e3ceb6c-910d-4c95-a96c-b5f3bc4b3253"
}
```
@@ -37,11 +36,12 @@ resource "airbyte_source_okta" "my_source_okta" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -52,10 +52,6 @@ resource "airbyte_source_okta" "my_source_okta" {
### Nested Schema for `configuration`
-Required:
-
-- `source_type` (String) must be one of ["okta"]
-
Optional:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
@@ -67,48 +63,24 @@ Optional:
Optional:
-- `source_okta_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_okta_authorization_method_api_token))
-- `source_okta_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_okta_authorization_method_o_auth2_0))
-- `source_okta_update_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_okta_update_authorization_method_api_token))
-- `source_okta_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_okta_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_okta_authorization_method_api_token`
-
-Required:
-
-- `api_token` (String) An Okta token. See the docs for instructions on how to generate it.
-- `auth_type` (String) must be one of ["api_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_okta_authorization_method_o_auth2_0`
-
-Required:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
-
+- `api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--api_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_okta_update_authorization_method_api_token`
+
+### Nested Schema for `configuration.credentials.api_token`
Required:
-- `api_token` (String) An Okta token. See the docs for instructions on how to generate it.
-- `auth_type` (String) must be one of ["api_token"]
+- `api_token` (String, Sensitive) An Okta token. See the docs for instructions on how to generate it.
-
-### Nested Schema for `configuration.credentials.source_okta_update_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `auth_type` (String) must be one of ["oauth2.0"]
- `client_id` (String) The Client ID of your OAuth application.
- `client_secret` (String) The Client Secret of your OAuth application.
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
+- `refresh_token` (String, Sensitive) Refresh Token to obtain new Access Token, when it's expired.
diff --git a/docs/resources/source_omnisend.md b/docs/resources/source_omnisend.md
index f0b9461ad..20f97491c 100644
--- a/docs/resources/source_omnisend.md
+++ b/docs/resources/source_omnisend.md
@@ -15,12 +15,12 @@ SourceOmnisend Resource
```terraform
resource "airbyte_source_omnisend" "my_source_omnisend" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "omnisend"
+ api_key = "...my_api_key..."
}
- name = "Lynn Miller"
- secret_id = "...my_secret_id..."
- workspace_id = "3e5953c0-0113-4986-baa4-1e6c31cc2f1f"
+ definition_id = "e6bd591e-2544-44d2-a34f-d1d8ea1c7d43"
+ name = "Rachel Ankunding"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c9c1a8da-b7e7-43a5-9718-14e4dc1f633a"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_omnisend" "my_source_omnisend" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_omnisend" "my_source_omnisend" {
Required:
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["omnisend"]
+- `api_key` (String, Sensitive) API Key
diff --git a/docs/resources/source_onesignal.md b/docs/resources/source_onesignal.md
index e0b682980..6bafb676e 100644
--- a/docs/resources/source_onesignal.md
+++ b/docs/resources/source_onesignal.md
@@ -23,13 +23,13 @@ resource "airbyte_source_onesignal" "my_source_onesignal" {
},
]
outcome_names = "os__session_duration.count,os__click.count,CustomOutcomeName.sum"
- source_type = "onesignal"
start_date = "2020-11-16T00:00:00Z"
user_auth_key = "...my_user_auth_key..."
}
- name = "Joan Schaefer"
- secret_id = "...my_secret_id..."
- workspace_id = "41ffbe9c-bd79-45ee-a5e0-76cc7abf616e"
+ definition_id = "58a542d5-17fc-488b-8499-8d75efedea33"
+ name = "Krystal Hamill"
+ secret_id = "...my_secret_id..."
+ workspace_id = "15598db9-2c72-4d54-9f53-8928a50561c1"
}
```
@@ -39,11 +39,12 @@ resource "airbyte_source_onesignal" "my_source_onesignal" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,16 +59,15 @@ Required:
- `applications` (Attributes List) Applications keys, see the docs for more information on how to obtain this data (see [below for nested schema](#nestedatt--configuration--applications))
- `outcome_names` (String) Comma-separated list of names and the value (sum/count) for the returned outcome data. See the docs for more details
-- `source_type` (String) must be one of ["onesignal"]
- `start_date` (String) The date from which you'd like to replicate data for OneSignal API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
-- `user_auth_key` (String) OneSignal User Auth Key, see the docs for more information on how to obtain this key.
+- `user_auth_key` (String, Sensitive) OneSignal User Auth Key, see the docs for more information on how to obtain this key.
### Nested Schema for `configuration.applications`
Required:
-- `app_api_key` (String)
+- `app_api_key` (String, Sensitive)
- `app_id` (String)
Optional:
diff --git a/docs/resources/source_oracle.md b/docs/resources/source_oracle.md
index 093d12f88..db3b9e1b5 100644
--- a/docs/resources/source_oracle.md
+++ b/docs/resources/source_oracle.md
@@ -16,35 +16,31 @@ SourceOracle Resource
resource "airbyte_source_oracle" "my_source_oracle" {
configuration = {
connection_data = {
- source_oracle_connect_by_service_name = {
- connection_type = "service_name"
- service_name = "...my_service_name..."
+ service_name = {
+ service_name = "...my_service_name..."
}
}
encryption = {
- source_oracle_encryption_native_network_encryption_nne_ = {
- encryption_algorithm = "RC4_56"
- encryption_method = "client_nne"
+ native_network_encryption_nne = {
+ encryption_algorithm = "3DES168"
}
}
host = "...my_host..."
jdbc_url_params = "...my_jdbc_url_params..."
password = "...my_password..."
- port = 4
+ port = 8
schemas = [
"...",
]
- source_type = "oracle"
tunnel_method = {
- source_oracle_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_oracle_no_tunnel = {}
}
- username = "Oswaldo42"
+ username = "Hellen.Champlin"
}
- name = "Cheryl McKenzie"
- secret_id = "...my_secret_id..."
- workspace_id = "b90f2e09-d19d-42fc-af9e-2e105944b935"
+ definition_id = "a1ad7b3d-761e-429e-b26a-e07d2b59ab56"
+ name = "Jake Pfeffer"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c000ccde-ed12-4bd5-ab73-d022a608737f"
}
```
@@ -54,11 +50,12 @@ resource "airbyte_source_oracle" "my_source_oracle" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -73,18 +70,18 @@ Required:
- `encryption` (Attributes) The encryption method with is used when communicating with the database. (see [below for nested schema](#nestedatt--configuration--encryption))
- `host` (String) Hostname of the database.
-- `port` (Number) Port of the database.
-Oracle Corporations recommends the following port numbers:
-1521 - Default listening port for client connections to the listener.
-2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL
-- `source_type` (String) must be one of ["oracle"]
- `username` (String) The username which is used to access the database.
Optional:
- `connection_data` (Attributes) Connect data that will be used for DB connection (see [below for nested schema](#nestedatt--configuration--connection_data))
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
-- `password` (String) The password associated with the username.
+- `password` (String, Sensitive) The password associated with the username.
+- `port` (Number) Default: 1521
+Port of the database.
+Oracle Corporations recommends the following port numbers:
+1521 - Default listening port for client connections to the listener.
+2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL
- `schemas` (List of String) The list of schemas to sync from. Defaults to user. Case sensitive.
- `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method))
@@ -93,52 +90,23 @@ Optional:
Optional:
-- `source_oracle_encryption_native_network_encryption_nne` (Attributes) The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports. (see [below for nested schema](#nestedatt--configuration--encryption--source_oracle_encryption_native_network_encryption_nne))
-- `source_oracle_encryption_tls_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--encryption--source_oracle_encryption_tls_encrypted_verify_certificate))
-- `source_oracle_update_encryption_native_network_encryption_nne` (Attributes) The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports. (see [below for nested schema](#nestedatt--configuration--encryption--source_oracle_update_encryption_native_network_encryption_nne))
-- `source_oracle_update_encryption_tls_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--encryption--source_oracle_update_encryption_tls_encrypted_verify_certificate))
-
-
-### Nested Schema for `configuration.encryption.source_oracle_encryption_native_network_encryption_nne`
-
-Required:
-
-- `encryption_method` (String) must be one of ["client_nne"]
-
-Optional:
-
-- `encryption_algorithm` (String) must be one of ["AES256", "RC4_56", "3DES168"]
-This parameter defines what encryption algorithm is used.
-
-
-
-### Nested Schema for `configuration.encryption.source_oracle_encryption_tls_encrypted_verify_certificate`
-
-Required:
-
-- `encryption_method` (String) must be one of ["encrypted_verify_certificate"]
-- `ssl_certificate` (String) Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.
+- `native_network_encryption_nne` (Attributes) The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports. (see [below for nested schema](#nestedatt--configuration--encryption--native_network_encryption_nne))
+- `tls_encrypted_verify_certificate` (Attributes) Verify and use the certificate provided by the server. (see [below for nested schema](#nestedatt--configuration--encryption--tls_encrypted_verify_certificate))
-
-
-### Nested Schema for `configuration.encryption.source_oracle_update_encryption_native_network_encryption_nne`
-
-Required:
-
-- `encryption_method` (String) must be one of ["client_nne"]
+
+### Nested Schema for `configuration.encryption.native_network_encryption_nne`
Optional:
-- `encryption_algorithm` (String) must be one of ["AES256", "RC4_56", "3DES168"]
+- `encryption_algorithm` (String) must be one of ["AES256", "RC4_56", "3DES168"]; Default: "AES256"
This parameter defines what encryption algorithm is used.
-
-### Nested Schema for `configuration.encryption.source_oracle_update_encryption_tls_encrypted_verify_certificate`
+
+### Nested Schema for `configuration.encryption.tls_encrypted_verify_certificate`
Required:
-- `encryption_method` (String) must be one of ["encrypted_verify_certificate"]
- `ssl_certificate` (String) Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.
@@ -148,58 +116,24 @@ Required:
Optional:
-- `source_oracle_connect_by_service_name` (Attributes) Use service name (see [below for nested schema](#nestedatt--configuration--connection_data--source_oracle_connect_by_service_name))
-- `source_oracle_connect_by_system_id_sid` (Attributes) Use SID (Oracle System Identifier) (see [below for nested schema](#nestedatt--configuration--connection_data--source_oracle_connect_by_system_id_sid))
-- `source_oracle_update_connect_by_service_name` (Attributes) Use service name (see [below for nested schema](#nestedatt--configuration--connection_data--source_oracle_update_connect_by_service_name))
-- `source_oracle_update_connect_by_system_id_sid` (Attributes) Use SID (Oracle System Identifier) (see [below for nested schema](#nestedatt--configuration--connection_data--source_oracle_update_connect_by_system_id_sid))
+- `service_name` (Attributes) Use service name (see [below for nested schema](#nestedatt--configuration--connection_data--service_name))
+- `system_idsid` (Attributes) Use SID (Oracle System Identifier) (see [below for nested schema](#nestedatt--configuration--connection_data--system_idsid))
-
-### Nested Schema for `configuration.connection_data.source_oracle_connect_by_service_name`
+
+### Nested Schema for `configuration.connection_data.service_name`
Required:
- `service_name` (String)
-Optional:
-
-- `connection_type` (String) must be one of ["service_name"]
-
-
-### Nested Schema for `configuration.connection_data.source_oracle_connect_by_system_id_sid`
+
+### Nested Schema for `configuration.connection_data.system_idsid`
Required:
- `sid` (String)
-Optional:
-
-- `connection_type` (String) must be one of ["sid"]
-
-
-
-### Nested Schema for `configuration.connection_data.source_oracle_update_connect_by_service_name`
-
-Required:
-
-- `service_name` (String)
-
-Optional:
-
-- `connection_type` (String) must be one of ["service_name"]
-
-
-
-### Nested Schema for `configuration.connection_data.source_oracle_update_connect_by_system_id_sid`
-
-Required:
-
-- `sid` (String)
-
-Optional:
-
-- `connection_type` (String) must be one of ["sid"]
-
@@ -207,80 +141,41 @@ Optional:
Optional:
-- `source_oracle_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_ssh_tunnel_method_no_tunnel))
-- `source_oracle_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_ssh_tunnel_method_password_authentication))
-- `source_oracle_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_ssh_tunnel_method_ssh_key_authentication))
-- `source_oracle_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_update_ssh_tunnel_method_no_tunnel))
-- `source_oracle_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_update_ssh_tunnel_method_password_authentication))
-- `source_oracle_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_oracle_update_ssh_tunnel_method_ssh_key_authentication))
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_ssh_tunnel_method_no_tunnel`
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-Required:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-Required:
-
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_update_ssh_tunnel_method_no_tunnel`
-
-Required:
+Optional:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_oracle_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/source_orb.md b/docs/resources/source_orb.md
index d6144b287..a9c9dce8b 100644
--- a/docs/resources/source_orb.md
+++ b/docs/resources/source_orb.md
@@ -16,21 +16,21 @@ SourceOrb Resource
resource "airbyte_source_orb" "my_source_orb" {
configuration = {
api_key = "...my_api_key..."
- lookback_window_days = 9
+ lookback_window_days = 6
numeric_event_properties_keys = [
"...",
]
- plan_id = "...my_plan_id..."
- source_type = "orb"
- start_date = "2022-03-01T00:00:00Z"
+ plan_id = "...my_plan_id..."
+ start_date = "2022-03-01T00:00:00Z"
string_event_properties_keys = [
"...",
]
subscription_usage_grouping_key = "...my_subscription_usage_grouping_key..."
}
- name = "Josephine Kilback"
- secret_id = "...my_secret_id..."
- workspace_id = "2f90849d-6aed-44ae-8b75-37cd9222c9ff"
+ definition_id = "f9cf17c9-c1c9-4188-a190-0dfc35041fcd"
+ name = "Shaun Schimmel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "262ef24d-9236-49b1-bf5a-7ba288f10a06"
}
```
@@ -40,11 +40,12 @@ resource "airbyte_source_orb" "my_source_orb" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,16 +58,16 @@ resource "airbyte_source_orb" "my_source_orb" {
Required:
-- `api_key` (String) Orb API Key, issued from the Orb admin console.
-- `source_type` (String) must be one of ["orb"]
+- `api_key` (String, Sensitive) Orb API Key, issued from the Orb admin console.
- `start_date` (String) UTC date and time in the format 2022-03-01T00:00:00Z. Any data with created_at before this data will not be synced. For Subscription Usage, this becomes the `timeframe_start` API parameter.
Optional:
-- `lookback_window_days` (Number) When set to N, the connector will always refresh resources created within the past N days. By default, updated objects that are not newly created are not incrementally synced.
-- `numeric_event_properties_keys` (List of String) Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.
+- `lookback_window_days` (Number) Default: 0
+When set to N, the connector will always refresh resources created within the past N days. By default, updated objects that are not newly created are not incrementally synced.
+- `numeric_event_properties_keys` (List of String, Sensitive) Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.
- `plan_id` (String) Orb Plan ID to filter subscriptions that should have usage fetched.
-- `string_event_properties_keys` (List of String) Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.
-- `subscription_usage_grouping_key` (String) Property key name to group subscription usage by.
+- `string_event_properties_keys` (List of String, Sensitive) Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.
+- `subscription_usage_grouping_key` (String, Sensitive) Property key name to group subscription usage by.
diff --git a/docs/resources/source_orbit.md b/docs/resources/source_orbit.md
index 6f9cbf315..46f3c242d 100644
--- a/docs/resources/source_orbit.md
+++ b/docs/resources/source_orbit.md
@@ -15,14 +15,14 @@ SourceOrbit Resource
```terraform
resource "airbyte_source_orbit" "my_source_orbit" {
configuration = {
- api_token = "...my_api_token..."
- source_type = "orbit"
- start_date = "...my_start_date..."
- workspace = "...my_workspace..."
+ api_token = "...my_api_token..."
+ start_date = "...my_start_date..."
+ workspace = "...my_workspace..."
}
- name = "Jo Greenholt V"
- secret_id = "...my_secret_id..."
- workspace_id = "abfa2e76-1f0c-4a4d-856e-f1031e6899f0"
+ definition_id = "35ff19f3-8868-45d8-941e-7db0723f9473"
+ name = "Salvatore Schmitt DVM"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e5b71225-778f-47a0-a3c1-e08d80f694c4"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_orbit" "my_source_orbit" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,8 +50,7 @@ resource "airbyte_source_orbit" "my_source_orbit" {
Required:
-- `api_token` (String) Authorizes you to work with Orbit workspaces associated with the token.
-- `source_type` (String) must be one of ["orbit"]
+- `api_token` (String, Sensitive) Authorizes you to work with Orbit workspaces associated with the token.
- `workspace` (String) The unique name of the workspace that your API token is associated with.
Optional:
diff --git a/docs/resources/source_outbrain_amplify.md b/docs/resources/source_outbrain_amplify.md
index 18a19739c..2fa20a753 100644
--- a/docs/resources/source_outbrain_amplify.md
+++ b/docs/resources/source_outbrain_amplify.md
@@ -16,20 +16,19 @@ SourceOutbrainAmplify Resource
resource "airbyte_source_outbrain_amplify" "my_source_outbrainamplify" {
configuration = {
credentials = {
- source_outbrain_amplify_authentication_method_access_token = {
+ source_outbrain_amplify_access_token = {
access_token = "...my_access_token..."
- type = "access_token"
}
}
end_date = "...my_end_date..."
- geo_location_breakdown = "subregion"
- report_granularity = "daily"
- source_type = "outbrain-amplify"
+ geo_location_breakdown = "region"
+ report_granularity = "monthly"
start_date = "...my_start_date..."
}
- name = "Cynthia Boyer"
- secret_id = "...my_secret_id..."
- workspace_id = "2cd55cc0-584a-4184-976d-971fc820c65b"
+ definition_id = "9d0f84cc-bad7-41da-b038-014a124b6e7b"
+ name = "Donna Leannon"
+ secret_id = "...my_secret_id..."
+ workspace_id = "37b0c992-762a-438a-a73d-79a85cb72465"
}
```
@@ -39,11 +38,12 @@ resource "airbyte_source_outbrain_amplify" "my_source_outbrainamplify" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,7 +57,6 @@ resource "airbyte_source_outbrain_amplify" "my_source_outbrainamplify" {
Required:
- `credentials` (Attributes) Credentials for making authenticated requests requires either username/password or access_token. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["outbrain-amplify"]
- `start_date` (String) Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before this date will not be replicated.
Optional:
@@ -73,46 +72,23 @@ The granularity used for periodic data in reports. See
-### Nested Schema for `configuration.credentials.source_outbrain_amplify_authentication_method_access_token`
+
+### Nested Schema for `configuration.credentials.access_token`
Required:
-- `access_token` (String) Access Token for making authenticated requests.
-- `type` (String) must be one of ["access_token"]
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
-
-### Nested Schema for `configuration.credentials.source_outbrain_amplify_authentication_method_username_password`
+
+### Nested Schema for `configuration.credentials.username_password`
Required:
-- `password` (String) Add Password for authentication.
-- `type` (String) must be one of ["username_password"]
-- `username` (String) Add Username for authentication.
-
-
-
-### Nested Schema for `configuration.credentials.source_outbrain_amplify_update_authentication_method_access_token`
-
-Required:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_outbrain_amplify_update_authentication_method_username_password`
-
-Required:
-
-- `password` (String) Add Password for authentication.
-- `type` (String) must be one of ["username_password"]
+- `password` (String, Sensitive) Add Password for authentication.
- `username` (String) Add Username for authentication.
diff --git a/docs/resources/source_outreach.md b/docs/resources/source_outreach.md
index 585c858dc..ca19326a4 100644
--- a/docs/resources/source_outreach.md
+++ b/docs/resources/source_outreach.md
@@ -19,12 +19,12 @@ resource "airbyte_source_outreach" "my_source_outreach" {
client_secret = "...my_client_secret..."
redirect_uri = "...my_redirect_uri..."
refresh_token = "...my_refresh_token..."
- source_type = "outreach"
start_date = "2020-11-16T00:00:00Z"
}
- name = "Kim Kirlin"
- secret_id = "...my_secret_id..."
- workspace_id = "8e0cc885-187e-44de-84af-28c5dddb46aa"
+ definition_id = "18021619-8723-463e-89a2-aae62d9d7702"
+ name = "Tanya Hand"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6995c576-52df-4199-822b-3629976b741d"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_outreach" "my_source_outreach" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -54,8 +55,7 @@ Required:
- `client_id` (String) The Client ID of your Outreach developer application.
- `client_secret` (String) The Client Secret of your Outreach developer application.
- `redirect_uri` (String) A Redirect URI is the location where the authorization server sends the user once the app has been successfully authorized and granted an authorization code or access token.
-- `refresh_token` (String) The token for obtaining the new access token.
-- `source_type` (String) must be one of ["outreach"]
+- `refresh_token` (String, Sensitive) The token for obtaining the new access token.
- `start_date` (String) The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
diff --git a/docs/resources/source_paypal_transaction.md b/docs/resources/source_paypal_transaction.md
index d78081001..f2a0e03fc 100644
--- a/docs/resources/source_paypal_transaction.md
+++ b/docs/resources/source_paypal_transaction.md
@@ -19,12 +19,13 @@ resource "airbyte_source_paypal_transaction" "my_source_paypaltransaction" {
client_secret = "...my_client_secret..."
is_sandbox = false
refresh_token = "...my_refresh_token..."
- source_type = "paypal-transaction"
start_date = "2021-06-11T23:59:59+00:00"
+ time_window = 7
}
- name = "Ernestine Little"
- secret_id = "...my_secret_id..."
- workspace_id = "da013191-1296-4466-85c1-d81f29042f56"
+ definition_id = "dd349afd-0cd9-45bc-be33-42dc402aef61"
+ name = "Edna Hamill"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9f94f985-aa22-4e67-bc77-be4e4244a41c"
}
```
@@ -34,11 +35,12 @@ resource "airbyte_source_paypal_transaction" "my_source_paypaltransaction" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,12 +55,14 @@ Required:
- `client_id` (String) The Client ID of your Paypal developer application.
- `client_secret` (String) The Client Secret of your Paypal developer application.
-- `is_sandbox` (Boolean) Determines whether to use the sandbox or production environment.
-- `source_type` (String) must be one of ["paypal-transaction"]
-- `start_date` (String) Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.
+- `start_date` (String) Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.
Optional:
-- `refresh_token` (String) The key to refresh the expired access token.
+- `is_sandbox` (Boolean) Default: false
+Determines whether to use the sandbox or production environment.
+- `refresh_token` (String, Sensitive) The key to refresh the expired access token.
+- `time_window` (Number) Default: 7
+The number of days per request. Must be a number between 1 and 31.
diff --git a/docs/resources/source_paystack.md b/docs/resources/source_paystack.md
index 68355ea78..929eda1d7 100644
--- a/docs/resources/source_paystack.md
+++ b/docs/resources/source_paystack.md
@@ -15,14 +15,14 @@ SourcePaystack Resource
```terraform
resource "airbyte_source_paystack" "my_source_paystack" {
configuration = {
- lookback_window_days = 6
+ lookback_window_days = 9
secret_key = "...my_secret_key..."
- source_type = "paystack"
start_date = "2017-01-25T00:00:00Z"
}
- name = "Dr. Boyd Wilderman"
- secret_id = "...my_secret_id..."
- workspace_id = "2216cbe0-71bc-4163-a279-a3b084da9925"
+ definition_id = "5b489304-8e9c-41af-9961-b1c883a57271"
+ name = "Kari Lemke"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b6433cb8-2b32-4ad0-bfd9-a9d8ba9b0df8"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_paystack" "my_source_paystack" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,12 +50,12 @@ resource "airbyte_source_paystack" "my_source_paystack" {
Required:
-- `secret_key` (String) The Paystack API key (usually starts with 'sk_live_'; find yours here).
-- `source_type` (String) must be one of ["paystack"]
+- `secret_key` (String, Sensitive) The Paystack API key (usually starts with 'sk_live_'; find yours here).
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
Optional:
-- `lookback_window_days` (Number) When set, the connector will always reload data from the past N days, where N is the value set here. This is useful if your data is updated after creation.
+- `lookback_window_days` (Number) Default: 0
+When set, the connector will always reload data from the past N days, where N is the value set here. This is useful if your data is updated after creation.
diff --git a/docs/resources/source_pendo.md b/docs/resources/source_pendo.md
index 8f1f63acb..4c98cd8a0 100644
--- a/docs/resources/source_pendo.md
+++ b/docs/resources/source_pendo.md
@@ -15,12 +15,12 @@ SourcePendo Resource
```terraform
resource "airbyte_source_pendo" "my_source_pendo" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "pendo"
+ api_key = "...my_api_key..."
}
- name = "Estelle Bechtelar"
- secret_id = "...my_secret_id..."
- workspace_id = "40847a74-2d84-4496-8bde-ecf6b99bc635"
+ definition_id = "6503c474-3ee7-49bd-93e2-04659bbdc56c"
+ name = "Mandy Conroy"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0259c6b1-3998-4d3f-8543-0ae066d4a91b"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_pendo" "my_source_pendo" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_pendo" "my_source_pendo" {
Required:
-- `api_key` (String)
-- `source_type` (String) must be one of ["pendo"]
+- `api_key` (String, Sensitive)
diff --git a/docs/resources/source_persistiq.md b/docs/resources/source_persistiq.md
index aaa226c27..bf06782d4 100644
--- a/docs/resources/source_persistiq.md
+++ b/docs/resources/source_persistiq.md
@@ -15,12 +15,12 @@ SourcePersistiq Resource
```terraform
resource "airbyte_source_persistiq" "my_source_persistiq" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "persistiq"
+ api_key = "...my_api_key..."
}
- name = "Nicole Vandervort"
- secret_id = "...my_secret_id..."
- workspace_id = "df55c294-c060-4b06-a128-7764eef6d0c6"
+ definition_id = "bbc35ba8-92b6-4d58-85ab-7b9331a5ddaf"
+ name = "Taylor Keeling"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5ec8caac-d8d2-4abf-9c0f-33811ddad7d7"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_persistiq" "my_source_persistiq" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_persistiq" "my_source_persistiq" {
Required:
-- `api_key` (String) PersistIq API Key. See the docs for more information on where to find that key.
-- `source_type` (String) must be one of ["persistiq"]
+- `api_key` (String, Sensitive) PersistIq API Key. See the docs for more information on where to find that key.
diff --git a/docs/resources/source_pexels_api.md b/docs/resources/source_pexels_api.md
index 3f0f8b35a..67bdde69e 100644
--- a/docs/resources/source_pexels_api.md
+++ b/docs/resources/source_pexels_api.md
@@ -17,15 +17,15 @@ resource "airbyte_source_pexels_api" "my_source_pexelsapi" {
configuration = {
api_key = "...my_api_key..."
color = "orange"
- locale = "en-US"
+ locale = "pt-BR"
orientation = "landscape"
- query = "oceans"
+ query = "people"
size = "small"
- source_type = "pexels-api"
}
- name = "Arnold Dooley"
- secret_id = "...my_secret_id..."
- workspace_id = "63457150-9a8e-4870-93c5-a1f9c242c7b6"
+ definition_id = "f68e00dc-dadd-4479-a116-8b4fa7262d2a"
+ name = "Brandy Weimann"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6dd11df0-9849-4375-b622-7890d41f1391"
}
```
@@ -35,11 +35,12 @@ resource "airbyte_source_pexels_api" "my_source_pexelsapi" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -52,9 +53,8 @@ resource "airbyte_source_pexels_api" "my_source_pexelsapi" {
Required:
-- `api_key` (String) API key is required to access pexels api, For getting your's goto https://www.pexels.com/api/documentation and create account for free.
+- `api_key` (String, Sensitive) API key is required to access pexels api, For getting your's goto https://www.pexels.com/api/documentation and create account for free.
- `query` (String) Optional, the search query, Example Ocean, Tigers, Pears, etc.
-- `source_type` (String) must be one of ["pexels-api"]
Optional:
diff --git a/docs/resources/source_pinterest.md b/docs/resources/source_pinterest.md
index 6805e0477..2b394a4c6 100644
--- a/docs/resources/source_pinterest.md
+++ b/docs/resources/source_pinterest.md
@@ -16,20 +16,39 @@ SourcePinterest Resource
resource "airbyte_source_pinterest" "my_source_pinterest" {
configuration = {
credentials = {
- source_pinterest_authorization_method_access_token = {
- access_token = "...my_access_token..."
- auth_method = "access_token"
+ source_pinterest_o_auth2_0 = {
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
}
}
- source_type = "pinterest"
- start_date = "2022-07-28"
+ custom_reports = [
+ {
+ attribution_types = [
+ "HOUSEHOLD",
+ ]
+ click_window_days = "30"
+ columns = [
+ "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT",
+ ]
+ conversion_report_time = "TIME_OF_AD_ACTION"
+ engagement_window_days = "7"
+ granularity = "MONTH"
+ level = "CAMPAIGN"
+ name = "Ms. Edgar Halvorson"
+ start_date = "2022-07-28"
+ view_window_days = "0"
+ },
+ ]
+ start_date = "2022-07-28"
status = [
"ACTIVE",
]
}
- name = "Nathan Bauch"
- secret_id = "...my_secret_id..."
- workspace_id = "3df5b671-9890-4f42-a4bb-438d85b26059"
+ definition_id = "66a5ec46-f2bc-4e2e-b7bb-ccef588ac548"
+ name = "Lamar Lakin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a9dbf52c-7929-43e2-8aa8-1903348b38fe"
}
```
@@ -39,11 +58,12 @@ resource "airbyte_source_pinterest" "my_source_pinterest" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -54,14 +74,11 @@ resource "airbyte_source_pinterest" "my_source_pinterest" {
### Nested Schema for `configuration`
-Required:
-
-- `source_type` (String) must be one of ["pinterest"]
-- `start_date` (String) A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today).
-
Optional:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
+- `custom_reports` (Attributes List) A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field. (see [below for nested schema](#nestedatt--configuration--custom_reports))
+- `start_date` (String) A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today).
- `status` (List of String) Entity statuses based off of campaigns, ad_groups, and ads. If you do not have a status set, it will be ignored completely.
@@ -69,27 +86,14 @@ Optional:
Optional:
-- `source_pinterest_authorization_method_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_pinterest_authorization_method_access_token))
-- `source_pinterest_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_pinterest_authorization_method_o_auth2_0))
-- `source_pinterest_update_authorization_method_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_pinterest_update_authorization_method_access_token))
-- `source_pinterest_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_pinterest_update_authorization_method_o_auth2_0))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_pinterest_authorization_method_access_token`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) The Access Token to make authenticated requests.
-- `auth_method` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_pinterest_authorization_method_o_auth2_0`
-
-Required:
-
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
+- `refresh_token` (String, Sensitive) Refresh Token to obtain new Access Token, when it's expired.
Optional:
@@ -97,26 +101,30 @@ Optional:
- `client_secret` (String) The Client Secret of your OAuth application.
-
-### Nested Schema for `configuration.credentials.source_pinterest_update_authorization_method_access_token`
-
-Required:
-
-- `access_token` (String) The Access Token to make authenticated requests.
-- `auth_method` (String) must be one of ["access_token"]
-
-
-### Nested Schema for `configuration.credentials.source_pinterest_update_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.custom_reports`
Required:
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
+- `columns` (List of String) A list of chosen columns
+- `name` (String) The name value of report
Optional:
-- `client_id` (String) The Client ID of your OAuth application
-- `client_secret` (String) The Client Secret of your OAuth application.
+- `attribution_types` (List of String) List of types of attribution for the conversion report
+- `click_window_days` (Number) must be one of ["0", "1", "7", "14", "30", "60"]; Default: 30
+Number of days to use as the conversion attribution window for a pin click action.
+- `conversion_report_time` (String) must be one of ["TIME_OF_AD_ACTION", "TIME_OF_CONVERSION"]; Default: "TIME_OF_AD_ACTION"
+The date by which the conversion metrics returned from this endpoint will be reported. There are two dates associated with a conversion event: the date that the user interacted with the ad, and the date that the user completed a conversion event..
+- `engagement_window_days` (Number) must be one of ["0", "1", "7", "14", "30", "60"]; Default: [30]
+Number of days to use as the conversion attribution window for an engagement action.
+- `granularity` (String) must be one of ["TOTAL", "DAY", "HOUR", "WEEK", "MONTH"]; Default: "TOTAL"
+Chosen granularity for API
+- `level` (String) must be one of ["ADVERTISER", "ADVERTISER_TARGETING", "CAMPAIGN", "CAMPAIGN_TARGETING", "AD_GROUP", "AD_GROUP_TARGETING", "PIN_PROMOTION", "PIN_PROMOTION_TARGETING", "KEYWORD", "PRODUCT_GROUP", "PRODUCT_GROUP_TARGETING", "PRODUCT_ITEM"]; Default: "ADVERTISER"
+Chosen level for API
+- `start_date` (String) A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by report api (913 days from today).
+- `view_window_days` (Number) must be one of ["0", "1", "7", "14", "30", "60"]; Default: [30]
+Number of days to use as the conversion attribution window for a view action.
diff --git a/docs/resources/source_pipedrive.md b/docs/resources/source_pipedrive.md
index 0d844c962..26dd89f2c 100644
--- a/docs/resources/source_pipedrive.md
+++ b/docs/resources/source_pipedrive.md
@@ -15,16 +15,13 @@ SourcePipedrive Resource
```terraform
resource "airbyte_source_pipedrive" "my_source_pipedrive" {
configuration = {
- authorization = {
- api_token = "...my_api_token..."
- auth_type = "Token"
- }
- replication_start_date = "2017-01-25T00:00:00Z"
- source_type = "pipedrive"
+ api_token = "...my_api_token..."
+ replication_start_date = "2017-01-25 00:00:00Z"
}
- name = "Rhonda Hammes"
- secret_id = "...my_secret_id..."
- workspace_id = "c2059c9c-3f56-47e0-a252-765b1d62fcda"
+ definition_id = "3b520112-5b29-4252-a784-d2d0f1707475"
+ name = "Sean Swaniawski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "49780ba1-d6a2-48c6-aefe-59b72db22407"
}
```
@@ -34,11 +31,12 @@ resource "airbyte_source_pipedrive" "my_source_pipedrive" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,19 +49,7 @@ resource "airbyte_source_pipedrive" "my_source_pipedrive" {
Required:
+- `api_token` (String, Sensitive) The Pipedrive API Token.
- `replication_start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental
-- `source_type` (String) must be one of ["pipedrive"]
-
-Optional:
-
-- `authorization` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization))
-
-
-### Nested Schema for `configuration.authorization`
-
-Required:
-
-- `api_token` (String) The Pipedrive API Token.
-- `auth_type` (String) must be one of ["Token"]
diff --git a/docs/resources/source_pocket.md b/docs/resources/source_pocket.md
index 740aec074..267414337 100644
--- a/docs/resources/source_pocket.md
+++ b/docs/resources/source_pocket.md
@@ -23,14 +23,14 @@ resource "airbyte_source_pocket" "my_source_pocket" {
favorite = true
search = "...my_search..."
since = "2022-10-20 14:14:14"
- sort = "site"
- source_type = "pocket"
+ sort = "newest"
state = "unread"
tag = "...my_tag..."
}
- name = "Christina Bode"
- secret_id = "...my_secret_id..."
- workspace_id = "e2239e8f-25cd-40d1-9d95-9f439e39266c"
+ definition_id = "da763315-0acf-4ec2-81f7-3646e1c87958"
+ name = "Brandi Hane"
+ secret_id = "...my_secret_id..."
+ workspace_id = "82553101-4017-4845-aa4c-1173de2c277a"
}
```
@@ -40,11 +40,12 @@ resource "airbyte_source_pocket" "my_source_pocket" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,9 +58,8 @@ resource "airbyte_source_pocket" "my_source_pocket" {
Required:
-- `access_token` (String) The user's Pocket access token.
-- `consumer_key` (String) Your application's Consumer Key.
-- `source_type` (String) must be one of ["pocket"]
+- `access_token` (String, Sensitive) The user's Pocket access token.
+- `consumer_key` (String, Sensitive) Your application's Consumer Key.
Optional:
@@ -68,7 +68,8 @@ Select the content type of the items to retrieve.
- `detail_type` (String) must be one of ["simple", "complete"]
Select the granularity of the information about each item.
- `domain` (String) Only return items from a particular `domain`.
-- `favorite` (Boolean) Retrieve only favorited items.
+- `favorite` (Boolean) Default: false
+Retrieve only favorited items.
- `search` (String) Only return items whose title or url contain the `search` string.
- `since` (String) Only return items modified since the given timestamp.
- `sort` (String) must be one of ["newest", "oldest", "title", "site"]
diff --git a/docs/resources/source_pokeapi.md b/docs/resources/source_pokeapi.md
index 209cba1b0..fba642a83 100644
--- a/docs/resources/source_pokeapi.md
+++ b/docs/resources/source_pokeapi.md
@@ -15,12 +15,12 @@ SourcePokeapi Resource
```terraform
resource "airbyte_source_pokeapi" "my_source_pokeapi" {
configuration = {
- pokemon_name = "snorlax"
- source_type = "pokeapi"
+ pokemon_name = "luxray"
}
- name = "Jeremiah Hahn"
- secret_id = "...my_secret_id..."
- workspace_id = "aa2b2411-3695-4d1e-a698-fcc4596217c2"
+ definition_id = "e2388fd0-120f-462c-91a2-676b4d9282ad"
+ name = "Ramona Stiedemann"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d5253fa0-2ef0-408f-918d-81572f724d1e"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_pokeapi" "my_source_pokeapi" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,7 @@ resource "airbyte_source_pokeapi" "my_source_pokeapi" {
Required:
-- `pokemon_name` (String) Pokemon requested from the API.
-- `source_type` (String) must be one of ["pokeapi"]
+- `pokemon_name` (String) must be one of ["bulbasaur", "ivysaur", "venusaur", "charmander", "charmeleon", "charizard", "squirtle", "wartortle", "blastoise", "caterpie", "metapod", "butterfree", "weedle", "kakuna", "beedrill", "pidgey", "pidgeotto", "pidgeot", "rattata", "raticate", "spearow", "fearow", "ekans", "arbok", "pikachu", "raichu", "sandshrew", "sandslash", "nidoranf", "nidorina", "nidoqueen", "nidoranm", "nidorino", "nidoking", "clefairy", "clefable", "vulpix", "ninetales", "jigglypuff", "wigglytuff", "zubat", "golbat", "oddish", "gloom", "vileplume", "paras", "parasect", "venonat", "venomoth", "diglett", "dugtrio", "meowth", "persian", "psyduck", "golduck", "mankey", "primeape", "growlithe", "arcanine", "poliwag", "poliwhirl", "poliwrath", "abra", "kadabra", "alakazam", "machop", "machoke", "machamp", "bellsprout", "weepinbell", "victreebel", "tentacool", "tentacruel", "geodude", "graveler", "golem", "ponyta", "rapidash", "slowpoke", "slowbro", "magnemite", "magneton", "farfetchd", "doduo", "dodrio", "seel", "dewgong", "grimer", "muk", "shellder", "cloyster", "gastly", "haunter", "gengar", "onix", "drowzee", "hypno", "krabby", "kingler", "voltorb", "electrode", "exeggcute", "exeggutor", "cubone", "marowak", "hitmonlee", "hitmonchan", "lickitung", "koffing", "weezing", "rhyhorn", "rhydon", "chansey", "tangela", "kangaskhan", "horsea", "seadra", "goldeen", "seaking", "staryu", "starmie", "mrmime", "scyther", "jynx", "electabuzz", "magmar", "pinsir", "tauros", "magikarp", "gyarados", "lapras", "ditto", "eevee", "vaporeon", "jolteon", "flareon", "porygon", "omanyte", "omastar", "kabuto", "kabutops", "aerodactyl", "snorlax", "articuno", "zapdos", "moltres", "dratini", "dragonair", "dragonite", "mewtwo", "mew", "chikorita", "bayleef", "meganium", "cyndaquil", "quilava", "typhlosion", "totodile", "croconaw", "feraligatr", "sentret", "furret", "hoothoot", "noctowl", "ledyba", "ledian", "spinarak", "ariados", "crobat", "chinchou", "lanturn", "pichu", "cleffa", "igglybuff", "togepi", "togetic", "natu", "xatu", "mareep", "flaaffy", "ampharos", "bellossom", "marill", "azumarill", "sudowoodo", "politoed", "hoppip", "skiploom", "jumpluff", "aipom", "sunkern", "sunflora", "yanma", "wooper", "quagsire", "espeon", "umbreon", "murkrow", "slowking", "misdreavus", "unown", "wobbuffet", "girafarig", "pineco", "forretress", "dunsparce", "gligar", "steelix", "snubbull", "granbull", "qwilfish", "scizor", "shuckle", "heracross", "sneasel", "teddiursa", "ursaring", "slugma", "magcargo", "swinub", "piloswine", "corsola", "remoraid", "octillery", "delibird", "mantine", "skarmory", "houndour", "houndoom", "kingdra", "phanpy", "donphan", "porygon2", "stantler", "smeargle", "tyrogue", "hitmontop", "smoochum", "elekid", "magby", "miltank", "blissey", "raikou", "entei", "suicune", "larvitar", "pupitar", "tyranitar", "lugia", "ho-oh", "celebi", "treecko", "grovyle", "sceptile", "torchic", "combusken", "blaziken", "mudkip", "marshtomp", "swampert", "poochyena", "mightyena", "zigzagoon", "linoone", "wurmple", "silcoon", "beautifly", "cascoon", "dustox", "lotad", "lombre", "ludicolo", "seedot", "nuzleaf", "shiftry", "taillow", "swellow", "wingull", "pelipper", "ralts", "kirlia", "gardevoir", "surskit", "masquerain", "shroomish", "breloom", "slakoth", "vigoroth", "slaking", "nincada", "ninjask", "shedinja", "whismur", "loudred", "exploud", "makuhita", "hariyama", "azurill", "nosepass", "skitty", "delcatty", "sableye", "mawile", "aron", "lairon", "aggron", "meditite", "medicham", "electrike", "manectric", "plusle", "minun", "volbeat", "illumise", "roselia", "gulpin", "swalot", "carvanha", "sharpedo", "wailmer", "wailord", "numel", "camerupt", "torkoal", "spoink", "grumpig", "spinda", "trapinch", "vibrava", "flygon", "cacnea", "cacturne", "swablu", "altaria", "zangoose", "seviper", "lunatone", "solrock", "barboach", "whiscash", "corphish", "crawdaunt", "baltoy", "claydol", "lileep", "cradily", "anorith", "armaldo", "feebas", "milotic", "castform", "kecleon", "shuppet", "banette", "duskull", "dusclops", "tropius", "chimecho", "absol", "wynaut", "snorunt", "glalie", "spheal", "sealeo", "walrein", "clamperl", "huntail", "gorebyss", "relicanth", "luvdisc", "bagon", "shelgon", "salamence", "beldum", "metang", "metagross", "regirock", "regice", "registeel", "latias", "latios", "kyogre", "groudon", "rayquaza", "jirachi", "deoxys", "turtwig", "grotle", "torterra", "chimchar", "monferno", "infernape", "piplup", "prinplup", "empoleon", "starly", "staravia", "staraptor", "bidoof", "bibarel", "kricketot", "kricketune", "shinx", "luxio", "luxray", "budew", "roserade", "cranidos", "rampardos", "shieldon", "bastiodon", "burmy", "wormadam", "mothim", "combee", "vespiquen", "pachirisu", "buizel", "floatzel", "cherubi", "cherrim", "shellos", "gastrodon", "ambipom", "drifloon", "drifblim", "buneary", "lopunny", "mismagius", "honchkrow", "glameow", "purugly", "chingling", "stunky", "skuntank", "bronzor", "bronzong", "bonsly", "mimejr", "happiny", "chatot", "spiritomb", "gible", "gabite", "garchomp", "munchlax", "riolu", "lucario", "hippopotas", "hippowdon", "skorupi", "drapion", "croagunk", "toxicroak", "carnivine", "finneon", "lumineon", "mantyke", "snover", "abomasnow", "weavile", "magnezone", "lickilicky", "rhyperior", "tangrowth", "electivire", "magmortar", "togekiss", "yanmega", "leafeon", "glaceon", "gliscor", "mamoswine", "porygon-z", "gallade", "probopass", "dusknoir", "froslass", "rotom", "uxie", "mesprit", "azelf", "dialga", "palkia", "heatran", "regigigas", "giratina", "cresselia", "phione", "manaphy", "darkrai", "shaymin", "arceus", "victini", "snivy", "servine", "serperior", "tepig", "pignite", "emboar", "oshawott", "dewott", "samurott", "patrat", "watchog", "lillipup", "herdier", "stoutland", "purrloin", "liepard", "pansage", "simisage", "pansear", "simisear", "panpour", "simipour", "munna", "musharna", "pidove", "tranquill", "unfezant", "blitzle", "zebstrika", "roggenrola", "boldore", "gigalith", "woobat", "swoobat", "drilbur", "excadrill", "audino", "timburr", "gurdurr", "conkeldurr", "tympole", "palpitoad", "seismitoad", "throh", "sawk", "sewaddle", "swadloon", "leavanny", "venipede", "whirlipede", "scolipede", "cottonee", "whimsicott", "petilil", "lilligant", "basculin", "sandile", "krokorok", "krookodile", "darumaka", "darmanitan", "maractus", "dwebble", "crustle", "scraggy", "scrafty", "sigilyph", "yamask", "cofagrigus", "tirtouga", "carracosta", "archen", "archeops", "trubbish", "garbodor", "zorua", "zoroark", "minccino", "cinccino", "gothita", "gothorita", "gothitelle", "solosis", "duosion", "reuniclus", "ducklett", "swanna", "vanillite", "vanillish", "vanilluxe", "deerling", "sawsbuck", "emolga", "karrablast", "escavalier", "foongus", "amoonguss", "frillish", "jellicent", "alomomola", "joltik", "galvantula", "ferroseed", "ferrothorn", "klink", "klang", "klinklang", "tynamo", "eelektrik", "eelektross", "elgyem", "beheeyem", "litwick", "lampent", "chandelure", "axew", "fraxure", "haxorus", "cubchoo", "beartic", "cryogonal", "shelmet", "accelgor", "stunfisk", "mienfoo", "mienshao", "druddigon", "golett", "golurk", "pawniard", "bisharp", "bouffalant", "rufflet", "braviary", "vullaby", "mandibuzz", "heatmor", "durant", "deino", "zweilous", "hydreigon", "larvesta", "volcarona", "cobalion", "terrakion", "virizion", "tornadus", "thundurus", "reshiram", "zekrom", "landorus", "kyurem", "keldeo", "meloetta", "genesect", "chespin", "quilladin", "chesnaught", "fennekin", "braixen", "delphox", "froakie", "frogadier", "greninja", "bunnelby", "diggersby", "fletchling", "fletchinder", "talonflame", "scatterbug", "spewpa", "vivillon", "litleo", "pyroar", "flabebe", "floette", "florges", "skiddo", "gogoat", "pancham", "pangoro", "furfrou", "espurr", "meowstic", "honedge", "doublade", "aegislash", "spritzee", "aromatisse", "swirlix", "slurpuff", "inkay", "malamar", "binacle", "barbaracle", "skrelp", "dragalge", "clauncher", "clawitzer", "helioptile", "heliolisk", "tyrunt", "tyrantrum", "amaura", "aurorus", "sylveon", "hawlucha", "dedenne", "carbink", "goomy", "sliggoo", "goodra", "klefki", "phantump", "trevenant", "pumpkaboo", "gourgeist", "bergmite", "avalugg", "noibat", "noivern", "xerneas", "yveltal", "zygarde", "diancie", "hoopa", "volcanion", "rowlet", "dartrix", "decidueye", "litten", "torracat", "incineroar", "popplio", "brionne", "primarina", "pikipek", "trumbeak", "toucannon", "yungoos", "gumshoos", "grubbin", "charjabug", "vikavolt", "crabrawler", "crabominable", "oricorio", "cutiefly", "ribombee", "rockruff", "lycanroc", "wishiwashi", "mareanie", "toxapex", "mudbray", "mudsdale", "dewpider", "araquanid", "fomantis", "lurantis", "morelull", "shiinotic", "salandit", "salazzle", "stufful", "bewear", "bounsweet", "steenee", "tsareena", "comfey", "oranguru", "passimian", "wimpod", "golisopod", "sandygast", "palossand", "pyukumuku", "typenull", "silvally", "minior", "komala", "turtonator", "togedemaru", "mimikyu", "bruxish", "drampa", "dhelmise", "jangmo-o", "hakamo-o", "kommo-o", "tapukoko", "tapulele", "tapubulu", "tapufini", "cosmog", "cosmoem", "solgaleo", "lunala", "nihilego", "buzzwole", "pheromosa", "xurkitree", "celesteela", "kartana", "guzzlord", "necrozma", "magearna", "marshadow", "poipole", "naganadel", "stakataka", "blacephalon", "zeraora", "meltan", "melmetal", "grookey", "thwackey", "rillaboom", "scorbunny", "raboot", "cinderace", "sobble", "drizzile", "inteleon", "skwovet", "greedent", "rookidee", "corvisquire", "corviknight", "blipbug", "dottler", "orbeetle", "nickit", "thievul", "gossifleur", "eldegoss", "wooloo", "dubwool", "chewtle", "drednaw", "yamper", "boltund", "rolycoly", "carkol", "coalossal", "applin", "flapple", "appletun", "silicobra", "sandaconda", "cramorant", "arrokuda", "barraskewda", "toxel", "toxtricity", "sizzlipede", "centiskorch", "clobbopus", "grapploct", "sinistea", "polteageist", "hatenna", "hattrem", "hatterene", "impidimp", "morgrem", "grimmsnarl", "obstagoon", "perrserker", "cursola", "sirfetchd", "mrrime", "runerigus", "milcery", "alcremie", "falinks", "pincurchin", "snom", "frosmoth", "stonjourner", "eiscue", "indeedee", "morpeko", "cufant", "copperajah", "dracozolt", "arctozolt", "dracovish", "arctovish", "duraludon", "dreepy", "drakloak", "dragapult", "zacian", "zamazenta", "eternatus", "kubfu", "urshifu", "zarude", "regieleki", "regidrago", "glastrier", "spectrier", "calyrex"]
+Pokemon requested from the API.
diff --git a/docs/resources/source_polygon_stock_api.md b/docs/resources/source_polygon_stock_api.md
index 5a80bed58..116b42ce4 100644
--- a/docs/resources/source_polygon_stock_api.md
+++ b/docs/resources/source_polygon_stock_api.md
@@ -15,20 +15,20 @@ SourcePolygonStockAPI Resource
```terraform
resource "airbyte_source_polygon_stock_api" "my_source_polygonstockapi" {
configuration = {
- adjusted = "false"
+ adjusted = "true"
api_key = "...my_api_key..."
end_date = "2020-10-14"
- limit = 100
+ limit = 120
multiplier = 1
- sort = "asc"
- source_type = "polygon-stock-api"
+ sort = "desc"
start_date = "2020-10-14"
- stocks_ticker = "IBM"
+ stocks_ticker = "MSFT"
timespan = "day"
}
- name = "Mary Fisher"
- secret_id = "...my_secret_id..."
- workspace_id = "fb5971e9-8190-4557-b89c-edbac7fda395"
+ definition_id = "15bf9f13-70c2-48b2-b8d2-5e4ee4a51abe"
+ name = "Antoinette Rempel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e8da5f86-7ba5-4cf8-9b48-a2cc4047b120"
}
```
@@ -38,11 +38,12 @@ resource "airbyte_source_polygon_stock_api" "my_source_polygonstockapi" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,7 +59,6 @@ Required:
- `api_key` (String) Your API ACCESS Key
- `end_date` (String) The target date for the aggregate window.
- `multiplier` (Number) The size of the timespan multiplier.
-- `source_type` (String) must be one of ["polygon-stock-api"]
- `start_date` (String) The beginning date for the aggregate window.
- `stocks_ticker` (String) The exchange symbol that this item is traded under.
- `timespan` (String) The size of the time window.
diff --git a/docs/resources/source_postgres.md b/docs/resources/source_postgres.md
index 304d0512c..0e23a3f3c 100644
--- a/docs/resources/source_postgres.md
+++ b/docs/resources/source_postgres.md
@@ -21,29 +21,25 @@ resource "airbyte_source_postgres" "my_source_postgres" {
password = "...my_password..."
port = 5432
replication_method = {
- source_postgres_update_method_detect_changes_with_xmin_system_column = {
- method = "Xmin"
- }
+ detect_changes_with_xmin_system_column = {}
}
schemas = [
"...",
]
- source_type = "postgres"
ssl_mode = {
- source_postgres_ssl_modes_allow = {
- mode = "allow"
+ source_postgres_allow = {
+ additional_properties = "{ \"see\": \"documentation\" }"
}
}
tunnel_method = {
- source_postgres_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_postgres_no_tunnel = {}
}
- username = "Edwardo.Streich"
+ username = "Dagmar_Towne8"
}
- name = "Roosevelt Cummings"
- secret_id = "...my_secret_id..."
- workspace_id = "480632b9-954b-46fa-a206-369828553cb1"
+ definition_id = "558e983f-33bb-4c2f-8e75-b95ee5dd11c7"
+ name = "Brandi Gerhold"
+ secret_id = "...my_secret_id..."
+ workspace_id = "aa4d1c74-fcd7-4d93-9b8b-6b2c0920aa8b"
}
```
@@ -53,11 +49,12 @@ resource "airbyte_source_postgres" "my_source_postgres" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -72,14 +69,14 @@ Required:
- `database` (String) Name of the database.
- `host` (String) Hostname of the database.
-- `port` (Number) Port of the database.
-- `source_type` (String) must be one of ["postgres"]
- `username` (String) Username to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.
-- `password` (String) Password associated with the username.
+- `password` (String, Sensitive) Password associated with the username.
+- `port` (Number) Default: 5432
+Port of the database.
- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method))
- `schemas` (List of String) The list of schemas (case sensitive) to sync from. Defaults to public.
- `ssl_mode` (Attributes) SSL connection modes.
@@ -91,83 +88,37 @@ Optional:
Optional:
-- `source_postgres_update_method_detect_changes_with_xmin_system_column` (Attributes) Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_detect_changes_with_xmin_system_column))
-- `source_postgres_update_method_read_changes_using_write_ahead_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_read_changes_using_write_ahead_log_cdc))
-- `source_postgres_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_scan_changes_with_user_defined_cursor))
-- `source_postgres_update_update_method_detect_changes_with_xmin_system_column` (Attributes) Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_detect_changes_with_xmin_system_column))
-- `source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc))
-- `source_postgres_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_scan_changes_with_user_defined_cursor))
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_method_detect_changes_with_xmin_system_column`
-
-Required:
+- `detect_changes_with_xmin_system_column` (Attributes) Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. (see [below for nested schema](#nestedatt--configuration--replication_method--detect_changes_with_xmin_system_column))
+- `read_changes_using_write_ahead_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. (see [below for nested schema](#nestedatt--configuration--replication_method--read_changes_using_write_ahead_log_cdc))
+- `scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--scan_changes_with_user_defined_cursor))
-- `method` (String) must be one of ["Xmin"]
+
+### Nested Schema for `configuration.replication_method.detect_changes_with_xmin_system_column`
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_method_read_changes_using_write_ahead_log_cdc`
+
+### Nested Schema for `configuration.replication_method.read_changes_using_write_ahead_log_cdc`
Required:
-- `method` (String) must be one of ["CDC"]
- `publication` (String) A Postgres publication used for consuming changes. Read about publications and replication identities.
- `replication_slot` (String) A plugin logical replication slot. Read about replication slots.
Optional:
- `additional_properties` (String) Parsed as JSON.
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]
-Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-- `plugin` (String) must be one of ["pgoutput"]
+- `initial_waiting_seconds` (Number) Default: 300
+The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
+- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]; Default: "After loading Data in the destination"
+Determines when Airbyte should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
+- `plugin` (String) must be one of ["pgoutput"]; Default: "pgoutput"
A logical decoding plugin installed on the PostgreSQL server.
-- `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_method_scan_changes_with_user_defined_cursor`
-
-Required:
-
-- `method` (String) must be one of ["Standard"]
-
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_detect_changes_with_xmin_system_column`
+- `queue_size` (Number) Default: 10000
+The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-Required:
-
-- `method` (String) must be one of ["Xmin"]
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc`
-
-Required:
-
-- `method` (String) must be one of ["CDC"]
-- `publication` (String) A Postgres publication used for consuming changes. Read about publications and replication identities.
-- `replication_slot` (String) A plugin logical replication slot. Read about replication slots.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `initial_waiting_seconds` (Number) The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
-- `lsn_commit_behaviour` (String) must be one of ["While reading Data", "After loading Data in the destination"]
-Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-- `plugin` (String) must be one of ["pgoutput"]
-A logical decoding plugin installed on the PostgreSQL server.
-- `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
-
-
-
-### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_scan_changes_with_user_defined_cursor`
-
-Required:
-
-- `method` (String) must be one of ["Standard"]
+
+### Nested Schema for `configuration.replication_method.scan_changes_with_user_defined_cursor`
@@ -176,177 +127,73 @@ Required:
Optional:
-- `source_postgres_ssl_modes_allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_allow))
-- `source_postgres_ssl_modes_disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_disable))
-- `source_postgres_ssl_modes_prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_prefer))
-- `source_postgres_ssl_modes_require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_require))
-- `source_postgres_ssl_modes_verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_verify_ca))
-- `source_postgres_ssl_modes_verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_ssl_modes_verify_full))
-- `source_postgres_update_ssl_modes_allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_allow))
-- `source_postgres_update_ssl_modes_disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_disable))
-- `source_postgres_update_ssl_modes_prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_prefer))
-- `source_postgres_update_ssl_modes_require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_require))
-- `source_postgres_update_ssl_modes_verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_verify_ca))
-- `source_postgres_update_ssl_modes_verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--source_postgres_update_ssl_modes_verify_full))
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_allow`
+- `allow` (Attributes) Enables encryption only when required by the source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--allow))
+- `disable` (Attributes) Disables encryption of communication between Airbyte and source database. (see [below for nested schema](#nestedatt--configuration--ssl_mode--disable))
+- `prefer` (Attributes) Allows unencrypted connection only if the source database does not support encryption. (see [below for nested schema](#nestedatt--configuration--ssl_mode--prefer))
+- `require` (Attributes) Always require encryption. If the source database server does not support encryption, connection will fail. (see [below for nested schema](#nestedatt--configuration--ssl_mode--require))
+- `verify_ca` (Attributes) Always require encryption and verifies that the source database server has a valid SSL certificate. (see [below for nested schema](#nestedatt--configuration--ssl_mode--verify_ca))
+- `verify_full` (Attributes) This is the most secure mode. Always require encryption and verifies the identity of the source database server. (see [below for nested schema](#nestedatt--configuration--ssl_mode--verify_full))
-Required:
-
-- `mode` (String) must be one of ["allow"]
+
+### Nested Schema for `configuration.ssl_mode.allow`
Optional:
- `additional_properties` (String) Parsed as JSON.
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_disable`
-
-Required:
-
-- `mode` (String) must be one of ["disable"]
+
+### Nested Schema for `configuration.ssl_mode.disable`
Optional:
- `additional_properties` (String) Parsed as JSON.
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_prefer`
-
-Required:
-
-- `mode` (String) must be one of ["prefer"]
+
+### Nested Schema for `configuration.ssl_mode.prefer`
Optional:
- `additional_properties` (String) Parsed as JSON.
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_require`
-
-Required:
-
-- `mode` (String) must be one of ["require"]
+
+### Nested Schema for `configuration.ssl_mode.require`
Optional:
- `additional_properties` (String) Parsed as JSON.
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_verify_ca`
+
+### Nested Schema for `configuration.ssl_mode.verify_ca`
Required:
- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-ca"]
Optional:
- `additional_properties` (String) Parsed as JSON.
- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
+- `client_key` (String, Sensitive) Client key
+- `client_key_password` (String, Sensitive) Password for keystorage. If you do not add it - the password will be generated automatically.
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_ssl_modes_verify_full`
+
+### Nested Schema for `configuration.ssl_mode.verify_full`
Required:
- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-full"]
Optional:
- `additional_properties` (String) Parsed as JSON.
- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_allow`
-
-Required:
-
-- `mode` (String) must be one of ["allow"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_disable`
-
-Required:
-
-- `mode` (String) must be one of ["disable"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_prefer`
-
-Required:
-
-- `mode` (String) must be one of ["prefer"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_require`
-
-Required:
-
-- `mode` (String) must be one of ["require"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_verify_ca`
-
-Required:
-
-- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-ca"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
-
-
-
-### Nested Schema for `configuration.ssl_mode.source_postgres_update_ssl_modes_verify_full`
-
-Required:
-
-- `ca_certificate` (String) CA certificate
-- `mode` (String) must be one of ["verify-full"]
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `client_certificate` (String) Client certificate
-- `client_key` (String) Client key
-- `client_key_password` (String) Password for keystorage. If you do not add it - the password will be generated automatically.
+- `client_key` (String, Sensitive) Client key
+- `client_key_password` (String, Sensitive) Password for keystorage. If you do not add it - the password will be generated automatically.
@@ -355,80 +202,41 @@ Optional:
Optional:
-- `source_postgres_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_ssh_tunnel_method_no_tunnel))
-- `source_postgres_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_ssh_tunnel_method_password_authentication))
-- `source_postgres_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_ssh_tunnel_method_ssh_key_authentication))
-- `source_postgres_update_ssh_tunnel_method_no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_update_ssh_tunnel_method_no_tunnel))
-- `source_postgres_update_ssh_tunnel_method_password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_update_ssh_tunnel_method_password_authentication))
-- `source_postgres_update_ssh_tunnel_method_ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--source_postgres_update_ssh_tunnel_method_ssh_key_authentication))
+- `no_tunnel` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--no_tunnel))
+- `password_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--password_authentication))
+- `ssh_key_authentication` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method--ssh_key_authentication))
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_ssh_tunnel_method_no_tunnel`
+
+### Nested Schema for `configuration.tunnel_method.no_tunnel`
-Required:
-
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.password_authentication`
Required:
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_ssh_tunnel_method_ssh_key_authentication`
-
-Required:
+- `tunnel_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_update_ssh_tunnel_method_no_tunnel`
-
-Required:
+Optional:
-- `tunnel_method` (String) must be one of ["NO_TUNNEL"]
-No ssh tunnel needed to connect to database
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_update_ssh_tunnel_method_password_authentication`
+
+### Nested Schema for `configuration.tunnel_method.ssh_key_authentication`
Required:
+- `ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through a jump server tunnel host using username and password authentication
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host
-- `tunnel_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.tunnel_method.source_postgres_update_ssh_tunnel_method_ssh_key_authentication`
+- `tunnel_user` (String) OS-level username for logging into the jump server host.
-Required:
+Optional:
-- `ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-- `tunnel_host` (String) Hostname of the jump server host that allows inbound ssh tunnel.
-- `tunnel_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through a jump server tunnel host using username and ssh key
-- `tunnel_port` (Number) Port on the proxy/jump server that accepts inbound ssh connections.
-- `tunnel_user` (String) OS-level username for logging into the jump server host.
+- `tunnel_port` (Number) Default: 22
+Port on the proxy/jump server that accepts inbound ssh connections.
diff --git a/docs/resources/source_posthog.md b/docs/resources/source_posthog.md
index 27fbc1d87..28848ec98 100644
--- a/docs/resources/source_posthog.md
+++ b/docs/resources/source_posthog.md
@@ -17,13 +17,13 @@ resource "airbyte_source_posthog" "my_source_posthog" {
configuration = {
api_key = "...my_api_key..."
base_url = "https://posthog.example.com"
- events_time_step = 30
- source_type = "posthog"
+ events_time_step = 5
start_date = "2021-01-01T00:00:00Z"
}
- name = "Terence Wisozk"
- secret_id = "...my_secret_id..."
- workspace_id = "21ec2053-b749-4366-ac8e-e0f2bf19588d"
+ definition_id = "07521b21-ea9b-4c9d-9c88-f1ee12f8a7db"
+ name = "Daisy Ledner"
+ secret_id = "...my_secret_id..."
+ workspace_id = "41266a87-d389-4094-afa6-7bbea9f5a35d"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_posthog" "my_source_posthog" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,13 +51,14 @@ resource "airbyte_source_posthog" "my_source_posthog" {
Required:
-- `api_key` (String) API Key. See the docs for information on how to generate this key.
-- `source_type` (String) must be one of ["posthog"]
+- `api_key` (String, Sensitive) API Key. See the docs for information on how to generate this key.
- `start_date` (String) The date from which you'd like to replicate the data. Any data before this date will not be replicated.
Optional:
-- `base_url` (String) Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).
-- `events_time_step` (Number) Set lower value in case of failing long running sync of events stream.
+- `base_url` (String) Default: "https://app.posthog.com"
+Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).
+- `events_time_step` (Number) Default: 30
+Set lower value in case of failing long running sync of events stream.
diff --git a/docs/resources/source_postmarkapp.md b/docs/resources/source_postmarkapp.md
index e9925ed62..6a265a1dd 100644
--- a/docs/resources/source_postmarkapp.md
+++ b/docs/resources/source_postmarkapp.md
@@ -15,13 +15,13 @@ SourcePostmarkapp Resource
```terraform
resource "airbyte_source_postmarkapp" "my_source_postmarkapp" {
configuration = {
- source_type = "postmarkapp"
x_postmark_account_token = "...my_x_postmark_account_token..."
x_postmark_server_token = "...my_x_postmark_server_token..."
}
- name = "Mr. Sharon Swift"
- secret_id = "...my_secret_id..."
- workspace_id = "3deba297-be3e-490b-840d-f868fd52405c"
+ definition_id = "1bd0fb63-21f6-4b4c-a647-2a5f8aec8fed"
+ name = "Felix Wisoky"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5367bfee-523e-436b-b4e8-f7b837d76b02"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_postmarkapp" "my_source_postmarkapp" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,7 +49,6 @@ resource "airbyte_source_postmarkapp" "my_source_postmarkapp" {
Required:
-- `source_type` (String) must be one of ["postmarkapp"]
- `x_postmark_account_token` (String) API Key for account
- `x_postmark_server_token` (String) API Key for server
diff --git a/docs/resources/source_prestashop.md b/docs/resources/source_prestashop.md
index bfcc11bc1..28e07a56d 100644
--- a/docs/resources/source_prestashop.md
+++ b/docs/resources/source_prestashop.md
@@ -15,14 +15,14 @@ SourcePrestashop Resource
```terraform
resource "airbyte_source_prestashop" "my_source_prestashop" {
configuration = {
- access_key = "...my_access_key..."
- source_type = "prestashop"
- start_date = "2022-01-01"
- url = "...my_url..."
+ access_key = "...my_access_key..."
+ start_date = "2022-01-01"
+ url = "...my_url..."
}
- name = "Evelyn Stracke"
- secret_id = "...my_secret_id..."
- workspace_id = "2f4f127f-b0e0-4bf1-b821-7978d0acca77"
+ definition_id = "d797c2fd-0239-4507-97b2-06b8fda8b48b"
+ name = "Dr. Jeffery Wuckert"
+ secret_id = "...my_secret_id..."
+ workspace_id = "631ebcaf-aa2e-4e7a-9e0c-b6197095b91e"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_prestashop" "my_source_prestashop" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,8 +50,7 @@ resource "airbyte_source_prestashop" "my_source_prestashop" {
Required:
-- `access_key` (String) Your PrestaShop access key. See the docs for info on how to obtain this.
-- `source_type` (String) must be one of ["prestashop"]
+- `access_key` (String, Sensitive) Your PrestaShop access key. See the docs for info on how to obtain this.
- `start_date` (String) The Start date in the format YYYY-MM-DD.
- `url` (String) Shop URL without trailing slash.
diff --git a/docs/resources/source_punk_api.md b/docs/resources/source_punk_api.md
index 33fb6c703..93a09f464 100644
--- a/docs/resources/source_punk_api.md
+++ b/docs/resources/source_punk_api.md
@@ -17,12 +17,12 @@ resource "airbyte_source_punk_api" "my_source_punkapi" {
configuration = {
brewed_after = "MM-YYYY"
brewed_before = "MM-YYYY"
- id = 22
- source_type = "punk-api"
+ id = 1
}
- name = "Darnell Turcotte"
- secret_id = "...my_secret_id..."
- workspace_id = "540ef53a-34a1-4b8f-a997-31adc05d85ae"
+ definition_id = "0c173d4d-6113-43dd-b2a9-5937ced0062e"
+ name = "Shelia Hettinger"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4e78152c-bd26-46e4-812d-05e7f58d4a06"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_punk_api" "my_source_punkapi" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,7 +52,6 @@ Required:
- `brewed_after` (String) To extract specific data with Unique ID
- `brewed_before` (String) To extract specific data with Unique ID
-- `source_type` (String) must be one of ["punk-api"]
Optional:
diff --git a/docs/resources/source_pypi.md b/docs/resources/source_pypi.md
index a6b79c0f9..3ca2bf16c 100644
--- a/docs/resources/source_pypi.md
+++ b/docs/resources/source_pypi.md
@@ -16,12 +16,12 @@ SourcePypi Resource
resource "airbyte_source_pypi" "my_source_pypi" {
configuration = {
project_name = "sampleproject"
- source_type = "pypi"
version = "1.2.0"
}
- name = "Antonia Wintheiser"
- secret_id = "...my_secret_id..."
- workspace_id = "0fb38742-90d3-4365-a1ec-a16ef89451bd"
+ definition_id = "25cbff5b-31f2-4b93-84d3-ebf32902de61"
+ name = "Ann Blanda"
+ secret_id = "...my_secret_id..."
+ workspace_id = "882924ee-80aa-4298-8d84-713ebef014dd"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_pypi" "my_source_pypi" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,7 +50,6 @@ resource "airbyte_source_pypi" "my_source_pypi" {
Required:
- `project_name` (String) Name of the project/package. Can only be in lowercase with hyphen. This is the name used using pip command for installing the package.
-- `source_type` (String) must be one of ["pypi"]
Optional:
diff --git a/docs/resources/source_qualaroo.md b/docs/resources/source_qualaroo.md
index 6659a5d39..de27dbeca 100644
--- a/docs/resources/source_qualaroo.md
+++ b/docs/resources/source_qualaroo.md
@@ -15,17 +15,17 @@ SourceQualaroo Resource
```terraform
resource "airbyte_source_qualaroo" "my_source_qualaroo" {
configuration = {
- key = "...my_key..."
- source_type = "qualaroo"
- start_date = "2021-03-01T00:00:00.000Z"
+ key = "...my_key..."
+ start_date = "2021-03-01T00:00:00.000Z"
survey_ids = [
"...",
]
token = "...my_token..."
}
- name = "Sue Thompson"
- secret_id = "...my_secret_id..."
- workspace_id = "b518c4da-1fad-4355-92f0-6d4e5b72f0f5"
+ definition_id = "9af7c7e9-c462-409e-a52c-707cb05c4a8d"
+ name = "Cheryl Schmitt"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4658e520-f854-4a56-b309-cc0ee4bba7fa"
}
```
@@ -35,11 +35,12 @@ resource "airbyte_source_qualaroo" "my_source_qualaroo" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -52,10 +53,9 @@ resource "airbyte_source_qualaroo" "my_source_qualaroo" {
Required:
-- `key` (String) A Qualaroo token. See the docs for instructions on how to generate it.
-- `source_type` (String) must be one of ["qualaroo"]
+- `key` (String, Sensitive) A Qualaroo token. See the docs for instructions on how to generate it.
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `token` (String) A Qualaroo token. See the docs for instructions on how to generate it.
+- `token` (String, Sensitive) A Qualaroo token. See the docs for instructions on how to generate it.
Optional:
diff --git a/docs/resources/source_quickbooks.md b/docs/resources/source_quickbooks.md
index c23c86ec2..bf297e598 100644
--- a/docs/resources/source_quickbooks.md
+++ b/docs/resources/source_quickbooks.md
@@ -16,23 +16,22 @@ SourceQuickbooks Resource
resource "airbyte_source_quickbooks" "my_source_quickbooks" {
configuration = {
credentials = {
- source_quickbooks_authorization_method_o_auth2_0 = {
+ source_quickbooks_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_type = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
realm_id = "...my_realm_id..."
refresh_token = "...my_refresh_token..."
- token_expiry_date = "2022-06-15T23:02:57.447Z"
+ token_expiry_date = "2020-06-15T02:42:19.793Z"
}
}
- sandbox = false
- source_type = "quickbooks"
- start_date = "2021-03-20T00:00:00Z"
+ sandbox = true
+ start_date = "2021-03-20T00:00:00Z"
}
- name = "William Gottlieb"
- secret_id = "...my_secret_id..."
- workspace_id = "e00a1d6e-b943-4464-9d03-084fbba5ccef"
+ definition_id = "054daa84-a4e2-48fe-a10a-8a64b77a4fe6"
+ name = "Patricia Dickens"
+ secret_id = "...my_secret_id..."
+ workspace_id = "88c95001-e515-4b2e-b405-22a67dad65e8"
}
```
@@ -42,11 +41,12 @@ resource "airbyte_source_quickbooks" "my_source_quickbooks" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -60,49 +60,30 @@ resource "airbyte_source_quickbooks" "my_source_quickbooks" {
Required:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `sandbox` (Boolean) Determines whether to use the sandbox or production environment.
-- `source_type` (String) must be one of ["quickbooks"]
- `start_date` (String) The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated.
-
-### Nested Schema for `configuration.credentials`
-
Optional:
-- `source_quickbooks_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_quickbooks_authorization_method_o_auth2_0))
-- `source_quickbooks_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_quickbooks_update_authorization_method_o_auth2_0))
-
-
-### Nested Schema for `configuration.credentials.source_quickbooks_authorization_method_o_auth2_0`
+- `sandbox` (Boolean) Default: false
+Determines whether to use the sandbox or production environment.
-Required:
-
-- `access_token` (String) Access token fot making authenticated requests.
-- `client_id` (String) Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
-- `client_secret` (String) Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
-- `realm_id` (String) Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token.
-- `refresh_token` (String) A token used when refreshing the access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
+
+### Nested Schema for `configuration.credentials`
Optional:
-- `auth_type` (String) must be one of ["oauth2.0"]
-
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_quickbooks_update_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Access token fot making authenticated requests.
+- `access_token` (String, Sensitive) Access token fot making authenticated requests.
- `client_id` (String) Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
- `client_secret` (String) Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
- `realm_id` (String) Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token.
-- `refresh_token` (String) A token used when refreshing the access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-Optional:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
+- `refresh_token` (String, Sensitive) A token used when refreshing the access token.
+- `token_expiry_date` (String, Sensitive) The date-time when the access token should be refreshed.
diff --git a/docs/resources/source_railz.md b/docs/resources/source_railz.md
index 09a7c18fa..b353dc15e 100644
--- a/docs/resources/source_railz.md
+++ b/docs/resources/source_railz.md
@@ -15,14 +15,14 @@ SourceRailz Resource
```terraform
resource "airbyte_source_railz" "my_source_railz" {
configuration = {
- client_id = "...my_client_id..."
- secret_key = "...my_secret_key..."
- source_type = "railz"
- start_date = "...my_start_date..."
+ client_id = "...my_client_id..."
+ secret_key = "...my_secret_key..."
+ start_date = "...my_start_date..."
}
- name = "Clyde Schmeler Jr."
- secret_id = "...my_secret_id..."
- workspace_id = "fe51e528-a45a-4c82-b85f-8bc2caba8da4"
+ definition_id = "ae1d217c-0fcb-4e7d-ad34-33ea862799ca"
+ name = "Alvin Roob"
+ secret_id = "...my_secret_id..."
+ workspace_id = "833469d3-410e-4395-a0aa-c55dc9d09788"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_railz" "my_source_railz" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,8 +51,7 @@ resource "airbyte_source_railz" "my_source_railz" {
Required:
- `client_id` (String) Client ID (client_id)
-- `secret_key` (String) Secret key (secret_key)
-- `source_type` (String) must be one of ["railz"]
+- `secret_key` (String, Sensitive) Secret key (secret_key)
- `start_date` (String) Start date
diff --git a/docs/resources/source_recharge.md b/docs/resources/source_recharge.md
index c3ab7a31b..a867ef726 100644
--- a/docs/resources/source_recharge.md
+++ b/docs/resources/source_recharge.md
@@ -16,12 +16,12 @@ SourceRecharge Resource
resource "airbyte_source_recharge" "my_source_recharge" {
configuration = {
access_token = "...my_access_token..."
- source_type = "recharge"
start_date = "2021-05-14T00:00:00Z"
}
- name = "Angel Stokes"
- secret_id = "...my_secret_id..."
- workspace_id = "7ff4711a-a1bc-474b-86ce-cc74f77b4848"
+ definition_id = "427992f6-5a71-405f-ae57-0ad372ede129"
+ name = "Hugo Hagenes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "1410fd6e-7ec4-4881-ab0c-62b8975147c3"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_recharge" "my_source_recharge" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_recharge" "my_source_recharge" {
Required:
-- `access_token` (String) The value of the Access Token generated. See the docs for more information.
-- `source_type` (String) must be one of ["recharge"]
+- `access_token` (String, Sensitive) The value of the Access Token generated. See the docs for more information.
- `start_date` (String) The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated.
diff --git a/docs/resources/source_recreation.md b/docs/resources/source_recreation.md
index 9b923b0f5..67edd7294 100644
--- a/docs/resources/source_recreation.md
+++ b/docs/resources/source_recreation.md
@@ -17,11 +17,11 @@ resource "airbyte_source_recreation" "my_source_recreation" {
configuration = {
apikey = "...my_apikey..."
query_campsites = "...my_query_campsites..."
- source_type = "recreation"
}
- name = "Taylor Kertzmann"
- secret_id = "...my_secret_id..."
- workspace_id = "f0441d2c-3b80-4809-8373-e060459bebba"
+ definition_id = "e6c8bd1c-ccad-43b1-8406-5293193648ca"
+ name = "Naomi Dietrich"
+ secret_id = "...my_secret_id..."
+ workspace_id = "8652384b-db82-41f9-88ef-a40dc207c50e"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_recreation" "my_source_recreation" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_recreation" "my_source_recreation" {
Required:
-- `apikey` (String) API Key
-- `source_type` (String) must be one of ["recreation"]
+- `apikey` (String, Sensitive) API Key
Optional:
diff --git a/docs/resources/source_recruitee.md b/docs/resources/source_recruitee.md
index ff172c647..5c11e508f 100644
--- a/docs/resources/source_recruitee.md
+++ b/docs/resources/source_recruitee.md
@@ -15,13 +15,13 @@ SourceRecruitee Resource
```terraform
resource "airbyte_source_recruitee" "my_source_recruitee" {
configuration = {
- api_key = "...my_api_key..."
- company_id = 9
- source_type = "recruitee"
+ api_key = "...my_api_key..."
+ company_id = 4
}
- name = "Mrs. Tina White"
- secret_id = "...my_secret_id..."
- workspace_id = "6bcf1525-58da-4a95-be6c-d02756c354aa"
+ definition_id = "f1211e1f-cb26-4b90-8c0d-f941919892a2"
+ name = "Mrs. Sherri Rosenbaum"
+ secret_id = "...my_secret_id..."
+ workspace_id = "af7bc34c-463b-4838-9c5f-976535f73a45"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_recruitee" "my_source_recruitee" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_recruitee" "my_source_recruitee" {
Required:
-- `api_key` (String) Recruitee API Key. See here.
+- `api_key` (String, Sensitive) Recruitee API Key. See here.
- `company_id` (Number) Recruitee Company ID. You can also find this ID on the Recruitee API tokens page.
-- `source_type` (String) must be one of ["recruitee"]
diff --git a/docs/resources/source_recurly.md b/docs/resources/source_recurly.md
index 4d6e8253e..4f6d49ab3 100644
--- a/docs/resources/source_recurly.md
+++ b/docs/resources/source_recurly.md
@@ -15,14 +15,14 @@ SourceRecurly Resource
```terraform
resource "airbyte_source_recurly" "my_source_recurly" {
configuration = {
- api_key = "...my_api_key..."
- begin_time = "2021-12-01T00:00:00"
- end_time = "2021-12-01T00:00:00"
- source_type = "recurly"
+ api_key = "...my_api_key..."
+ begin_time = "2021-12-01T00:00:00"
+ end_time = "2021-12-01T00:00:00"
}
- name = "Josephine Dibbert"
- secret_id = "...my_secret_id..."
- workspace_id = "7e1763c5-208c-423e-9802-d82f0d45eb4a"
+ definition_id = "535fff5d-1d34-4f0c-8e54-86a3a161dc53"
+ name = "Mrs. Glen Gottlieb"
+ secret_id = "...my_secret_id..."
+ workspace_id = "acb8b41d-5bf9-44a0-9397-d3dfd90aff66"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_recurly" "my_source_recurly" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,8 +50,7 @@ resource "airbyte_source_recurly" "my_source_recurly" {
Required:
-- `api_key` (String) Recurly API Key. See the docs for more information on how to generate this key.
-- `source_type` (String) must be one of ["recurly"]
+- `api_key` (String, Sensitive) Recurly API Key. See the docs for more information on how to generate this key.
Optional:
diff --git a/docs/resources/source_redshift.md b/docs/resources/source_redshift.md
index 8a38f454e..32d4b1b2b 100644
--- a/docs/resources/source_redshift.md
+++ b/docs/resources/source_redshift.md
@@ -23,12 +23,12 @@ resource "airbyte_source_redshift" "my_source_redshift" {
schemas = [
"...",
]
- source_type = "redshift"
- username = "Nelda.Jaskolski"
+ username = "Elton_Morissette"
}
- name = "Clay Hintz"
- secret_id = "...my_secret_id..."
- workspace_id = "c18edc7f-787e-432e-84b3-d3ed0c5670ef"
+ definition_id = "b974a7d8-001c-4be4-b7da-a2d7b021550a"
+ name = "Jake Ondricka"
+ secret_id = "...my_secret_id..."
+ workspace_id = "f01cf56e-e294-4adb-85bd-340789cf0b8d"
}
```
@@ -38,11 +38,12 @@ resource "airbyte_source_redshift" "my_source_redshift" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,14 +58,14 @@ Required:
- `database` (String) Name of the database.
- `host` (String) Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com).
-- `password` (String) Password associated with the username.
-- `port` (Number) Port of the database.
-- `source_type` (String) must be one of ["redshift"]
+- `password` (String, Sensitive) Password associated with the username.
- `username` (String) Username to use to access the database.
Optional:
- `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
+- `port` (Number) Default: 5439
+Port of the database.
- `schemas` (List of String) The list of schemas to sync from. Specify one or more explicitly or keep empty to process all schemas. Schema names are case sensitive.
diff --git a/docs/resources/source_retently.md b/docs/resources/source_retently.md
index 6cf7842b1..d2e98692e 100644
--- a/docs/resources/source_retently.md
+++ b/docs/resources/source_retently.md
@@ -16,18 +16,18 @@ SourceRetently Resource
resource "airbyte_source_retently" "my_source_retently" {
configuration = {
credentials = {
- source_retently_authentication_mechanism_authenticate_via_retently_o_auth_ = {
- auth_type = "Client"
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
+ authenticate_via_retently_o_auth = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
}
}
- source_type = "retently"
}
- name = "Kelly Pfeffer"
- secret_id = "...my_secret_id..."
- workspace_id = "c9f1cc50-3f6c-439b-8d0a-6290f957f385"
+ definition_id = "2c041244-3656-49fd-a4cd-2bcf08a635d7"
+ name = "Dave Schinner"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6ceccfae-93f7-4f0f-8c4b-4f8d4f6833e1"
}
```
@@ -37,11 +37,12 @@ resource "airbyte_source_retently" "my_source_retently" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -55,71 +56,38 @@ resource "airbyte_source_retently" "my_source_retently" {
Optional:
- `credentials` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["retently"]
### Nested Schema for `configuration.credentials`
Optional:
-- `source_retently_authentication_mechanism_authenticate_via_retently_o_auth` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--source_retently_authentication_mechanism_authenticate_via_retently_o_auth))
-- `source_retently_authentication_mechanism_authenticate_with_api_token` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--source_retently_authentication_mechanism_authenticate_with_api_token))
-- `source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth))
-- `source_retently_update_authentication_mechanism_authenticate_with_api_token` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--source_retently_update_authentication_mechanism_authenticate_with_api_token))
+- `authenticate_via_retently_o_auth` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_retently_o_auth))
+- `authenticate_with_api_token` (Attributes) Choose how to authenticate to Retently (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_with_api_token))
-
-### Nested Schema for `configuration.credentials.source_retently_authentication_mechanism_authenticate_via_retently_o_auth`
+
+### Nested Schema for `configuration.credentials.authenticate_via_retently_o_auth`
Required:
- `client_id` (String) The Client ID of your Retently developer application.
- `client_secret` (String) The Client Secret of your Retently developer application.
-- `refresh_token` (String) Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.
+- `refresh_token` (String, Sensitive) Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.
Optional:
- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["Client"]
-
-### Nested Schema for `configuration.credentials.source_retently_authentication_mechanism_authenticate_with_api_token`
+
+### Nested Schema for `configuration.credentials.authenticate_with_api_token`
Required:
-- `api_key` (String) Retently API Token. See the docs for more information on how to obtain this key.
+- `api_key` (String, Sensitive) Retently API Token. See the docs for more information on how to obtain this key.
Optional:
- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["Token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth`
-
-Required:
-
-- `client_id` (String) The Client ID of your Retently developer application.
-- `client_secret` (String) The Client Secret of your Retently developer application.
-- `refresh_token` (String) Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["Client"]
-
-
-
-### Nested Schema for `configuration.credentials.source_retently_update_authentication_mechanism_authenticate_with_api_token`
-
-Required:
-
-- `api_key` (String) Retently API Token. See the docs for more information on how to obtain this key.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["Token"]
diff --git a/docs/resources/source_rki_covid.md b/docs/resources/source_rki_covid.md
index 9f0c9d8c2..7fa499c52 100644
--- a/docs/resources/source_rki_covid.md
+++ b/docs/resources/source_rki_covid.md
@@ -15,12 +15,12 @@ SourceRkiCovid Resource
```terraform
resource "airbyte_source_rki_covid" "my_source_rkicovid" {
configuration = {
- source_type = "rki-covid"
- start_date = "...my_start_date..."
+ start_date = "...my_start_date..."
}
- name = "Penny Morissette"
- secret_id = "...my_secret_id..."
- workspace_id = "7ef807aa-e03f-433c-a79f-b9de4032ba26"
+ definition_id = "f3303ab0-45c8-491f-a9c8-dcb6cc1cd73d"
+ name = "Leticia Zieme Sr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "36d5989e-7dba-4ce4-805a-6307276c58b5"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_rki_covid" "my_source_rkicovid" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_rki_covid" "my_source_rkicovid" {
Required:
-- `source_type` (String) must be one of ["rki-covid"]
- `start_date` (String) UTC date in the format 2017-01-25. Any data before this date will not be replicated.
diff --git a/docs/resources/source_rss.md b/docs/resources/source_rss.md
index dfb2fe4b5..82fd10a38 100644
--- a/docs/resources/source_rss.md
+++ b/docs/resources/source_rss.md
@@ -15,12 +15,12 @@ SourceRss Resource
```terraform
resource "airbyte_source_rss" "my_source_rss" {
configuration = {
- source_type = "rss"
- url = "...my_url..."
+ url = "...my_url..."
}
- name = "Gustavo Donnelly"
- secret_id = "...my_secret_id..."
- workspace_id = "ba9216bc-b415-4835-8736-41723133edc0"
+ definition_id = "da21f739-86a7-41e9-92c2-b81056bc977a"
+ name = "Alison Wunsch"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ff8dd835-d804-427d-a3a4-e1d8c723c8e5"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_rss" "my_source_rss" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_rss" "my_source_rss" {
Required:
-- `source_type` (String) must be one of ["rss"]
- `url` (String) RSS Feed URL
diff --git a/docs/resources/source_s3.md b/docs/resources/source_s3.md
index de1bd4527..d4403ae72 100644
--- a/docs/resources/source_s3.md
+++ b/docs/resources/source_s3.md
@@ -19,11 +19,9 @@ resource "airbyte_source_s3" "my_source_s3" {
aws_secret_access_key = "...my_aws_secret_access_key..."
bucket = "...my_bucket..."
dataset = "...my_dataset..."
- endpoint = "...my_endpoint..."
+ endpoint = "https://my-s3-endpoint.com"
format = {
- source_s3_file_format_avro = {
- filetype = "avro"
- }
+ avro = {}
}
path_pattern = "**"
provider = {
@@ -34,17 +32,14 @@ resource "airbyte_source_s3" "my_source_s3" {
path_prefix = "...my_path_prefix..."
start_date = "2021-01-01T00:00:00Z"
}
- schema = "{\"column_1\": \"number\", \"column_2\": \"string\", \"column_3\": \"array\", \"column_4\": \"object\", \"column_5\": \"boolean\"}"
- source_type = "s3"
- start_date = "2021-01-01T00:00:00.000000Z"
+ schema = "{\"column_1\": \"number\", \"column_2\": \"string\", \"column_3\": \"array\", \"column_4\": \"object\", \"column_5\": \"boolean\"}"
+ start_date = "2021-01-01T00:00:00.000000Z"
streams = [
{
- days_to_sync_if_history_is_full = 1
- file_type = "...my_file_type..."
+ days_to_sync_if_history_is_full = 3
format = {
- source_s3_file_based_stream_config_format_avro_format = {
- double_as_string = true
- filetype = "avro"
+ source_s3_avro_format = {
+ double_as_string = false
}
}
globs = [
@@ -52,16 +47,17 @@ resource "airbyte_source_s3" "my_source_s3" {
]
input_schema = "...my_input_schema..."
legacy_prefix = "...my_legacy_prefix..."
- name = "Flora Rempel"
+ name = "Tyler Grimes"
primary_key = "...my_primary_key..."
schemaless = false
validation_policy = "Skip Record"
},
]
}
- name = "Jacqueline Kiehn"
- secret_id = "...my_secret_id..."
- workspace_id = "2c22c553-5049-45c5-9bb3-c57c1e4981e8"
+ definition_id = "5b5a324c-6128-4aab-bad0-730782c3e822"
+ name = "Mr. Phillip Hermann DVM"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3e25c699-48d0-4388-851e-c06fd3b8cc64"
}
```
@@ -72,11 +68,12 @@ resource "airbyte_source_s3" "my_source_s3" {
- `configuration` (Attributes) NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -90,19 +87,20 @@ because it is responsible for converting legacy S3 v3 configs into v4 configs us
Required:
- `bucket` (String) Name of the S3 bucket where the file(s) exist.
-- `source_type` (String) must be one of ["s3"]
- `streams` (Attributes List) Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. (see [below for nested schema](#nestedatt--configuration--streams))
Optional:
-- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+- `aws_access_key_id` (String, Sensitive) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+- `aws_secret_access_key` (String, Sensitive) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- `dataset` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.
-- `endpoint` (String) Endpoint to an S3 compatible service. Leave empty to use AWS.
+- `endpoint` (String) Default: ""
+Endpoint to an S3 compatible service. Leave empty to use AWS. The custom endpoint must be secure, but the 'https' prefix is not required.
- `format` (Attributes) Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate (see [below for nested schema](#nestedatt--configuration--format))
- `path_pattern` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.
- `provider` (Attributes) Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services (see [below for nested schema](#nestedatt--configuration--provider))
-- `schema` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.
+- `schema` (String) Default: "{}"
+Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
@@ -110,19 +108,20 @@ Optional:
Required:
-- `file_type` (String) The data file type that is being extracted for a stream.
+- `format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format))
- `name` (String) The name of the stream.
Optional:
-- `days_to_sync_if_history_is_full` (Number) When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
-- `format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format))
+- `days_to_sync_if_history_is_full` (Number) Default: 3
+When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
- `globs` (List of String) The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
- `input_schema` (String) The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
- `legacy_prefix` (String) The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
-- `primary_key` (String) The column or columns (for a composite key) that serves as the unique identifier of a record.
-- `schemaless` (Boolean) When enabled, syncs will not validate or structure records against the stream's schema.
-- `validation_policy` (String) must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
+- `primary_key` (String, Sensitive) The column or columns (for a composite key) that serves as the unique identifier of a record.
+- `schemaless` (Boolean) Default: false
+When enabled, syncs will not validate or structure records against the stream's schema.
+- `validation_policy` (String) must be one of ["Emit Record", "Skip Record", "Wait for Discover"]; Default: "Emit Record"
The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
@@ -130,185 +129,95 @@ The name of the validation policy that dictates sync behavior when a record does
Optional:
-- `source_s3_file_based_stream_config_format_avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_avro_format))
-- `source_s3_file_based_stream_config_format_csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_csv_format))
-- `source_s3_file_based_stream_config_format_jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_jsonl_format))
-- `source_s3_file_based_stream_config_format_parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_parquet_format))
-- `source_s3_update_file_based_stream_config_format_avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_avro_format))
-- `source_s3_update_file_based_stream_config_format_csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_csv_format))
-- `source_s3_update_file_based_stream_config_format_jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_jsonl_format))
-- `source_s3_update_file_based_stream_config_format_parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format))
+- `avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--avro_format))
+- `csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--csv_format))
+- `document_file_type_format_experimental` (Attributes) Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file. (see [below for nested schema](#nestedatt--configuration--streams--format--document_file_type_format_experimental))
+- `jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--jsonl_format))
+- `parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format))
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
+
+### Nested Schema for `configuration.streams.format.parquet_format`
Optional:
-- `double_as_string` (Boolean) Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
-- `filetype` (String) must be one of ["avro"]
+- `double_as_string` (Boolean) Default: false
+Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
+
+### Nested Schema for `configuration.streams.format.parquet_format`
Optional:
-- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
-- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data.
-- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+- `delimiter` (String) Default: ","
+The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+- `double_quote` (Boolean) Default: true
+Whether two quotes in a quoted CSV value denote a single quote in the data.
+- `encoding` (String) Default: "utf8"
+The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values.
-- `filetype` (String) must be one of ["csv"]
-- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition))
-- `inference_type` (String) must be one of ["None", "Primitive Types Only"]
+- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition))
+- `inference_type` (String) must be one of ["None", "Primitive Types Only"]; Default: "None"
How to infer the types of the columns. If none, inference default to strings.
- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
-- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank.
-- `skip_rows_after_header` (Number) The number of rows to skip after the header row.
-- `skip_rows_before_header` (Number) The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
-- `strings_can_be_null` (Boolean) Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+- `quote_char` (String) Default: "\""
+The character used for quoting CSV values. To disallow quoting, make this field blank.
+- `skip_rows_after_header` (Number) Default: 0
+The number of rows to skip after the header row.
+- `skip_rows_before_header` (Number) Default: 0
+The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+- `strings_can_be_null` (Boolean) Default: true
+Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values.
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition`
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition`
Optional:
-- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated))
-- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv))
-- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided))
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
+- `autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition--autogenerated))
+- `from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition--from_csv))
+- `user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--parquet_format--header_definition--user_provided))
-Optional:
-
-- `header_definition_type` (String) must be one of ["Autogenerated"]
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition.user_provided`
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition.user_provided`
-Optional:
-- `header_definition_type` (String) must be one of ["From CSV"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
+
+### Nested Schema for `configuration.streams.format.parquet_format.header_definition.user_provided`
Required:
- `column_names` (List of String) The column names that will be used while emitting the CSV records
-Optional:
-
-- `header_definition_type` (String) must be one of ["User Provided"]
-
-
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Optional:
-
-- `filetype` (String) must be one of ["jsonl"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Optional:
-
-- `decimal_as_float` (Boolean) Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
-- `filetype` (String) must be one of ["parquet"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Optional:
-
-- `double_as_string` (Boolean) Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
-- `filetype` (String) must be one of ["avro"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Optional:
-
-- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
-- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data.
-- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
-- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
-- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values.
-- `filetype` (String) must be one of ["csv"]
-- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition))
-- `inference_type` (String) must be one of ["None", "Primitive Types Only"]
-How to infer the types of the columns. If none, inference default to strings.
-- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
-- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank.
-- `skip_rows_after_header` (Number) The number of rows to skip after the header row.
-- `skip_rows_before_header` (Number) The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
-- `strings_can_be_null` (Boolean) Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
-- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values.
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition`
-
-Optional:
-
-- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated))
-- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv))
-- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided))
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
-
-Optional:
-- `header_definition_type` (String) must be one of ["Autogenerated"]
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
+
+### Nested Schema for `configuration.streams.format.parquet_format`
Optional:
-- `header_definition_type` (String) must be one of ["From CSV"]
-
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided`
-
-Required:
-
-- `column_names` (List of String) The column names that will be used while emitting the CSV records
-
-Optional:
-
-- `header_definition_type` (String) must be one of ["User Provided"]
-
+- `skip_unprocessable_file_types` (Boolean) Default: true
+If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.
+
+### Nested Schema for `configuration.streams.format.parquet_format`
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
-
-Optional:
-
-- `filetype` (String) must be one of ["jsonl"]
-
-
-### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format`
+
+### Nested Schema for `configuration.streams.format.parquet_format`
Optional:
-- `decimal_as_float` (Boolean) Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
-- `filetype` (String) must be one of ["parquet"]
+- `decimal_as_float` (Boolean) Default: false
+Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
@@ -318,111 +227,62 @@ Optional:
Optional:
-- `source_s3_file_format_avro` (Attributes) This connector utilises fastavro for Avro parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_file_format_avro))
-- `source_s3_file_format_csv` (Attributes) This connector utilises PyArrow (Apache Arrow) for CSV parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_file_format_csv))
-- `source_s3_file_format_jsonl` (Attributes) This connector uses PyArrow for JSON Lines (jsonl) file parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_file_format_jsonl))
-- `source_s3_file_format_parquet` (Attributes) This connector utilises PyArrow (Apache Arrow) for Parquet parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_file_format_parquet))
-- `source_s3_update_file_format_avro` (Attributes) This connector utilises fastavro for Avro parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_update_file_format_avro))
-- `source_s3_update_file_format_csv` (Attributes) This connector utilises PyArrow (Apache Arrow) for CSV parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_update_file_format_csv))
-- `source_s3_update_file_format_jsonl` (Attributes) This connector uses PyArrow for JSON Lines (jsonl) file parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_update_file_format_jsonl))
-- `source_s3_update_file_format_parquet` (Attributes) This connector utilises PyArrow (Apache Arrow) for Parquet parsing. (see [below for nested schema](#nestedatt--configuration--format--source_s3_update_file_format_parquet))
-
-
-### Nested Schema for `configuration.format.source_s3_file_format_avro`
-
-Optional:
-
-- `filetype` (String) must be one of ["avro"]
-
-
-
-### Nested Schema for `configuration.format.source_s3_file_format_csv`
-
-Optional:
-
-- `additional_reader_options` (String) Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.
-- `advanced_options` (String) Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.
-- `block_size` (Number) The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
-- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
-- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data.
-- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
-- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
-- `filetype` (String) must be one of ["csv"]
-- `infer_datatypes` (Boolean) Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings
-- `newlines_in_values` (Boolean) Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.
-- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank.
-
-
-
-### Nested Schema for `configuration.format.source_s3_file_format_jsonl`
-
-Optional:
-
-- `block_size` (Number) The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
-- `filetype` (String) must be one of ["jsonl"]
-- `newlines_in_values` (Boolean) Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.
-- `unexpected_field_behavior` (String) must be one of ["ignore", "infer", "error"]
-How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
-
-
-
-### Nested Schema for `configuration.format.source_s3_file_format_parquet`
-
-Optional:
-
-- `batch_size` (Number) Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.
-- `buffer_size` (Number) Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.
-- `columns` (List of String) If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.
-- `filetype` (String) must be one of ["parquet"]
-
-
-
-### Nested Schema for `configuration.format.source_s3_update_file_format_avro`
-
-Optional:
+- `avro` (Attributes) This connector utilises fastavro for Avro parsing. (see [below for nested schema](#nestedatt--configuration--format--avro))
+- `csv` (Attributes) This connector utilises PyArrow (Apache Arrow) for CSV parsing. (see [below for nested schema](#nestedatt--configuration--format--csv))
+- `jsonl` (Attributes) This connector uses PyArrow for JSON Lines (jsonl) file parsing. (see [below for nested schema](#nestedatt--configuration--format--jsonl))
+- `parquet` (Attributes) This connector utilises PyArrow (Apache Arrow) for Parquet parsing. (see [below for nested schema](#nestedatt--configuration--format--parquet))
-- `filetype` (String) must be one of ["avro"]
+
+### Nested Schema for `configuration.format.avro`
-
-### Nested Schema for `configuration.format.source_s3_update_file_format_csv`
+
+### Nested Schema for `configuration.format.csv`
Optional:
- `additional_reader_options` (String) Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.
- `advanced_options` (String) Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.
-- `block_size` (Number) The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
-- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
-- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data.
-- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+- `block_size` (Number) Default: 10000
+The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
+- `delimiter` (String) Default: ","
+The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+- `double_quote` (Boolean) Default: true
+Whether two quotes in a quoted CSV value denote a single quote in the data.
+- `encoding` (String) Default: "utf8"
+The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank.
-- `filetype` (String) must be one of ["csv"]
-- `infer_datatypes` (Boolean) Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings
-- `newlines_in_values` (Boolean) Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.
-- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank.
+- `infer_datatypes` (Boolean) Default: true
+Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings
+- `newlines_in_values` (Boolean) Default: false
+Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.
+- `quote_char` (String) Default: "\""
+The character used for quoting CSV values. To disallow quoting, make this field blank.
-
-### Nested Schema for `configuration.format.source_s3_update_file_format_jsonl`
+
+### Nested Schema for `configuration.format.jsonl`
Optional:
-- `block_size` (Number) The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
-- `filetype` (String) must be one of ["jsonl"]
-- `newlines_in_values` (Boolean) Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.
-- `unexpected_field_behavior` (String) must be one of ["ignore", "infer", "error"]
+- `block_size` (Number) Default: 0
+The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
+- `newlines_in_values` (Boolean) Default: false
+Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.
+- `unexpected_field_behavior` (String) must be one of ["ignore", "infer", "error"]; Default: "infer"
How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
-
-### Nested Schema for `configuration.format.source_s3_update_file_format_parquet`
+
+### Nested Schema for `configuration.format.parquet`
Optional:
-- `batch_size` (Number) Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.
-- `buffer_size` (Number) Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.
+- `batch_size` (Number) Default: 65536
+Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.
+- `buffer_size` (Number) Default: 2
+Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.
- `columns` (List of String) If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.
-- `filetype` (String) must be one of ["parquet"]
@@ -431,11 +291,13 @@ Optional:
Optional:
-- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
-- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+- `aws_access_key_id` (String, Sensitive) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+- `aws_secret_access_key` (String, Sensitive) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- `bucket` (String) Name of the S3 bucket where the file(s) exist.
-- `endpoint` (String) Endpoint to an S3 compatible service. Leave empty to use AWS.
-- `path_prefix` (String) By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate.
+- `endpoint` (String) Default: ""
+Endpoint to an S3 compatible service. Leave empty to use AWS.
+- `path_prefix` (String) Default: ""
+By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate.
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.
diff --git a/docs/resources/source_salesforce.md b/docs/resources/source_salesforce.md
index d3d243efe..4fe37c42e 100644
--- a/docs/resources/source_salesforce.md
+++ b/docs/resources/source_salesforce.md
@@ -15,24 +15,23 @@ SourceSalesforce Resource
```terraform
resource "airbyte_source_salesforce" "my_source_salesforce" {
configuration = {
- auth_type = "Client"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- force_use_bulk_api = true
+ force_use_bulk_api = false
is_sandbox = false
refresh_token = "...my_refresh_token..."
- source_type = "salesforce"
start_date = "2021-07-25"
streams_criteria = [
{
- criteria = "not contains"
+ criteria = "ends not with"
value = "...my_value..."
},
]
}
- name = "Gregg Boyer Sr."
- secret_id = "...my_secret_id..."
- workspace_id = "ebde64bf-cc54-469d-8015-dfa796206bef"
+ definition_id = "3692db06-d3b4-499d-8bda-e34afcb06318"
+ name = "Ms. Donna Krajcik"
+ secret_id = "...my_secret_id..."
+ workspace_id = "44d2b896-5caa-4bab-ae9d-6378e7243c02"
}
```
@@ -42,11 +41,12 @@ resource "airbyte_source_salesforce" "my_source_salesforce" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -61,14 +61,14 @@ Required:
- `client_id` (String) Enter your Salesforce developer application's Client ID
- `client_secret` (String) Enter your Salesforce developer application's Client secret
-- `refresh_token` (String) Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account.
-- `source_type` (String) must be one of ["salesforce"]
+- `refresh_token` (String, Sensitive) Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account.
Optional:
-- `auth_type` (String) must be one of ["Client"]
-- `force_use_bulk_api` (Boolean) Toggle to use Bulk API (this might cause empty fields for some streams)
-- `is_sandbox` (Boolean) Toggle if you're using a Salesforce Sandbox
+- `force_use_bulk_api` (Boolean) Default: false
+Toggle to use Bulk API (this might cause empty fields for some streams)
+- `is_sandbox` (Boolean) Default: false
+Toggle if you're using a Salesforce Sandbox
- `start_date` (String) Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ format. Airbyte will replicate the data updated on and after this date. If this field is blank, Airbyte will replicate the data for last two years.
- `streams_criteria` (Attributes List) Add filters to select only required stream based on `SObject` name. Use this field to filter which tables are displayed by this connector. This is useful if your Salesforce account has a large number of tables (>1000), in which case you may find it easier to navigate the UI and speed up the connector's performance if you restrict the tables displayed by this connector. (see [below for nested schema](#nestedatt--configuration--streams_criteria))
@@ -77,7 +77,10 @@ Optional:
Required:
-- `criteria` (String) must be one of ["starts with", "ends with", "contains", "exacts", "starts not with", "ends not with", "not contains", "not exacts"]
- `value` (String)
+Optional:
+
+- `criteria` (String) must be one of ["starts with", "ends with", "contains", "exacts", "starts not with", "ends not with", "not contains", "not exacts"]; Default: "contains"
+
diff --git a/docs/resources/source_salesloft.md b/docs/resources/source_salesloft.md
index a90fdd6c6..f2d1d4574 100644
--- a/docs/resources/source_salesloft.md
+++ b/docs/resources/source_salesloft.md
@@ -16,17 +16,16 @@ SourceSalesloft Resource
resource "airbyte_source_salesloft" "my_source_salesloft" {
configuration = {
credentials = {
- source_salesloft_credentials_authenticate_via_api_key = {
- api_key = "...my_api_key..."
- auth_type = "api_key"
+ authenticate_via_api_key = {
+ api_key = "...my_api_key..."
}
}
- source_type = "salesloft"
- start_date = "2020-11-16T00:00:00Z"
+ start_date = "2020-11-16T00:00:00Z"
}
- name = "Lynda Dicki"
- secret_id = "...my_secret_id..."
- workspace_id = "2c1aa010-e9aa-4c2e-9135-586d18f9f97a"
+ definition_id = "c073abf4-dfeb-4d41-8e5a-603e6b3fca03"
+ name = "Terrance Corwin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "14510264-179a-4403-81bb-87b13a43b1ea"
}
```
@@ -36,11 +35,12 @@ resource "airbyte_source_salesloft" "my_source_salesloft" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -54,7 +54,6 @@ resource "airbyte_source_salesloft" "my_source_salesloft" {
Required:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["salesloft"]
- `start_date` (String) The date from which you'd like to replicate data for Salesloft API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
@@ -62,52 +61,26 @@ Required:
Optional:
-- `source_salesloft_credentials_authenticate_via_api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_salesloft_credentials_authenticate_via_api_key))
-- `source_salesloft_credentials_authenticate_via_o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_salesloft_credentials_authenticate_via_o_auth))
-- `source_salesloft_update_credentials_authenticate_via_api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_salesloft_update_credentials_authenticate_via_api_key))
-- `source_salesloft_update_credentials_authenticate_via_o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_salesloft_update_credentials_authenticate_via_o_auth))
+- `authenticate_via_api_key` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_api_key))
+- `authenticate_via_o_auth` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--authenticate_via_o_auth))
-
-### Nested Schema for `configuration.credentials.source_salesloft_credentials_authenticate_via_api_key`
+
+### Nested Schema for `configuration.credentials.authenticate_via_api_key`
Required:
-- `api_key` (String) API Key for making authenticated requests. More instruction on how to find this value in our docs
-- `auth_type` (String) must be one of ["api_key"]
+- `api_key` (String, Sensitive) API Key for making authenticated requests. More instruction on how to find this value in our docs
-
-### Nested Schema for `configuration.credentials.source_salesloft_credentials_authenticate_via_o_auth`
+
+### Nested Schema for `configuration.credentials.authenticate_via_o_auth`
Required:
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
- `client_id` (String) The Client ID of your Salesloft developer application.
- `client_secret` (String) The Client Secret of your Salesloft developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-
-
-### Nested Schema for `configuration.credentials.source_salesloft_update_credentials_authenticate_via_api_key`
-
-Required:
-
-- `api_key` (String) API Key for making authenticated requests. More instruction on how to find this value in our docs
-- `auth_type` (String) must be one of ["api_key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_salesloft_update_credentials_authenticate_via_o_auth`
-
-Required:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your Salesloft developer application.
-- `client_secret` (String) The Client Secret of your Salesloft developer application.
-- `refresh_token` (String) The token for obtaining a new access token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
+- `refresh_token` (String, Sensitive) The token for obtaining a new access token.
+- `token_expiry_date` (String, Sensitive) The date-time when the access token should be refreshed.
diff --git a/docs/resources/source_sap_fieldglass.md b/docs/resources/source_sap_fieldglass.md
index 5e2a052ca..da686b68a 100644
--- a/docs/resources/source_sap_fieldglass.md
+++ b/docs/resources/source_sap_fieldglass.md
@@ -15,12 +15,12 @@ SourceSapFieldglass Resource
```terraform
resource "airbyte_source_sap_fieldglass" "my_source_sapfieldglass" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "sap-fieldglass"
+ api_key = "...my_api_key..."
}
- name = "Juana Williamson"
- secret_id = "...my_secret_id..."
- workspace_id = "2bf7d67c-a84a-4d99-b41d-61243531870c"
+ definition_id = "d703a4ee-b23f-4e55-b942-b58b6d0d2093"
+ name = "Krystal Krajcik"
+ secret_id = "...my_secret_id..."
+ workspace_id = "8d8619ec-3981-4178-ae44-e5272c20971d"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_sap_fieldglass" "my_source_sapfieldglass" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_sap_fieldglass" "my_source_sapfieldglass" {
Required:
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["sap-fieldglass"]
+- `api_key` (String, Sensitive) API Key
diff --git a/docs/resources/source_secoda.md b/docs/resources/source_secoda.md
index 14e946432..e24a1d03b 100644
--- a/docs/resources/source_secoda.md
+++ b/docs/resources/source_secoda.md
@@ -15,12 +15,12 @@ SourceSecoda Resource
```terraform
resource "airbyte_source_secoda" "my_source_secoda" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "secoda"
+ api_key = "...my_api_key..."
}
- name = "Brett Leannon I"
- secret_id = "...my_secret_id..."
- workspace_id = "ad421bd4-3d1f-40cb-8a00-03eb22d9b3a7"
+ definition_id = "544a65a7-d2b4-4609-94ec-6467c968cce9"
+ name = "Edna Mitchell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "8a35db32-f900-4f8c-be73-78a587702297"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_secoda" "my_source_secoda" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_secoda" "my_source_secoda" {
Required:
-- `api_key` (String) Your API Access Key. See here. The key is case sensitive.
-- `source_type` (String) must be one of ["secoda"]
+- `api_key` (String, Sensitive) Your API Access Key. See here. The key is case sensitive.
diff --git a/docs/resources/source_sendgrid.md b/docs/resources/source_sendgrid.md
index 8ec44ae9f..4348d3692 100644
--- a/docs/resources/source_sendgrid.md
+++ b/docs/resources/source_sendgrid.md
@@ -15,13 +15,13 @@ SourceSendgrid Resource
```terraform
resource "airbyte_source_sendgrid" "my_source_sendgrid" {
configuration = {
- apikey = "...my_apikey..."
- source_type = "sendgrid"
- start_time = "2020-01-01T01:01:01Z"
+ apikey = "...my_apikey..."
+ start_time = "2020-01-01T01:01:01Z"
}
- name = "Shari Pfannerstill"
- secret_id = "...my_secret_id..."
- workspace_id = "41c57d1f-edc2-4050-938d-c3ce185472f9"
+ definition_id = "37ec3d2a-b419-48d2-afe5-e34c931e7a72"
+ name = "Toby McGlynn"
+ secret_id = "...my_secret_id..."
+ workspace_id = "22c4d080-cde0-439d-95e8-c5778ddd1091"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_sendgrid" "my_source_sendgrid" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_sendgrid" "my_source_sendgrid" {
Required:
-- `apikey` (String) API Key, use admin to generate this key.
-- `source_type` (String) must be one of ["sendgrid"]
+- `apikey` (String, Sensitive) API Key, use admin to generate this key.
Optional:
diff --git a/docs/resources/source_sendinblue.md b/docs/resources/source_sendinblue.md
index 0d69e89da..e72437c22 100644
--- a/docs/resources/source_sendinblue.md
+++ b/docs/resources/source_sendinblue.md
@@ -15,12 +15,12 @@ SourceSendinblue Resource
```terraform
resource "airbyte_source_sendinblue" "my_source_sendinblue" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "sendinblue"
+ api_key = "...my_api_key..."
}
- name = "Terence Kassulke III"
- secret_id = "...my_secret_id..."
- workspace_id = "6a8be344-4eac-48b3-a287-5c6c1fe606d0"
+ definition_id = "0de87dfe-701e-4dbd-8d10-cf57eb672b8a"
+ name = "Derek Heller"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3fb2a63d-a091-47a6-951f-ac3e8ec69bab"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_sendinblue" "my_source_sendinblue" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_sendinblue" "my_source_sendinblue" {
Required:
-- `api_key` (String) Your API Key. See here.
-- `source_type` (String) must be one of ["sendinblue"]
+- `api_key` (String, Sensitive) Your API Key. See here.
diff --git a/docs/resources/source_senseforce.md b/docs/resources/source_senseforce.md
index 17f4207e8..dd2c5d97f 100644
--- a/docs/resources/source_senseforce.md
+++ b/docs/resources/source_senseforce.md
@@ -18,13 +18,13 @@ resource "airbyte_source_senseforce" "my_source_senseforce" {
access_token = "...my_access_token..."
backend_url = "https://galaxyapi.senseforce.io"
dataset_id = "8f418098-ca28-4df5-9498-0df9fe78eda7"
- slice_range = 10
- source_type = "senseforce"
+ slice_range = 180
start_date = "2017-01-25"
}
- name = "Rodolfo Langworth"
- secret_id = "...my_secret_id..."
- workspace_id = "e50c1666-1a1d-4913-aa7e-8d53213f3f65"
+ definition_id = "974cd0d5-39af-4231-9a6f-8898d74d7cd0"
+ name = "Lillie Anderson"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3c633751-f6c5-444c-a0e7-3f23dc46e62d"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_senseforce" "my_source_senseforce" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,14 +52,14 @@ resource "airbyte_source_senseforce" "my_source_senseforce" {
Required:
-- `access_token` (String) Your API access token. See here. The toke is case sensitive.
+- `access_token` (String, Sensitive) Your API access token. See here. The toke is case sensitive.
- `backend_url` (String) Your Senseforce API backend URL. This is the URL shown during the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the term 'galaxy' in their ULR)
- `dataset_id` (String) The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source).
-- `source_type` (String) must be one of ["senseforce"]
- `start_date` (String) UTC date and time in the format 2017-01-25. Only data with "Timestamp" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later
Optional:
-- `slice_range` (Number) The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more.
+- `slice_range` (Number) Default: 10
+The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more.
diff --git a/docs/resources/source_sentry.md b/docs/resources/source_sentry.md
index dfd8b7e3c..3d317537f 100644
--- a/docs/resources/source_sentry.md
+++ b/docs/resources/source_sentry.md
@@ -19,14 +19,14 @@ resource "airbyte_source_sentry" "my_source_sentry" {
discover_fields = [
"{ \"see\": \"documentation\" }",
]
- hostname = "muted-ingredient.biz"
+ hostname = "impressionable-honesty.org"
organization = "...my_organization..."
project = "...my_project..."
- source_type = "sentry"
}
- name = "Krystal Quitzon"
- secret_id = "...my_secret_id..."
- workspace_id = "4c59f0a5-6ceb-4cad-a29c-a79181c95671"
+ definition_id = "72778d5d-b92d-416e-9dcb-06fc1f7a171f"
+ name = "Brooke Breitenberg"
+ secret_id = "...my_secret_id..."
+ workspace_id = "bfddb09b-9a90-43f6-8eb4-a54b7cf533c5"
}
```
@@ -36,11 +36,12 @@ resource "airbyte_source_sentry" "my_source_sentry" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,14 +54,14 @@ resource "airbyte_source_sentry" "my_source_sentry" {
Required:
-- `auth_token` (String) Log into Sentry and then create authentication tokens.For self-hosted, you can find or create authentication tokens by visiting "{instance_url_prefix}/settings/account/api/auth-tokens/"
+- `auth_token` (String, Sensitive) Log into Sentry and then create authentication tokens.For self-hosted, you can find or create authentication tokens by visiting "{instance_url_prefix}/settings/account/api/auth-tokens/"
- `organization` (String) The slug of the organization the groups belong to.
- `project` (String) The name (slug) of the Project you want to sync.
-- `source_type` (String) must be one of ["sentry"]
Optional:
- `discover_fields` (List of String) Fields to retrieve when fetching discover events
-- `hostname` (String) Host name of Sentry API server.For self-hosted, specify your host name here. Otherwise, leave it empty.
+- `hostname` (String) Default: "sentry.io"
+Host name of Sentry API server.For self-hosted, specify your host name here. Otherwise, leave it empty.
diff --git a/docs/resources/source_sftp.md b/docs/resources/source_sftp.md
index c922caf87..8bf287270 100644
--- a/docs/resources/source_sftp.md
+++ b/docs/resources/source_sftp.md
@@ -16,22 +16,21 @@ SourceSftp Resource
resource "airbyte_source_sftp" "my_source_sftp" {
configuration = {
credentials = {
- source_sftp_authentication_wildcard_password_authentication = {
- auth_method = "SSH_PASSWORD_AUTH"
+ source_sftp_password_authentication = {
auth_user_password = "...my_auth_user_password..."
}
}
file_pattern = "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`"
file_types = "csv,json"
folder_path = "/logs/2022"
- host = "www.host.com"
+ host = "192.0.2.1"
port = 22
- source_type = "sftp"
user = "...my_user..."
}
- name = "Miss Tommy Emard"
- secret_id = "...my_secret_id..."
- workspace_id = "665163a3-6385-412a-b252-1b9f2e072467"
+ definition_id = "8a56e1f7-b10c-46dd-9e62-eb5fcf365dcc"
+ name = "Rogelio Schoen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e41cbe1d-2ecd-4015-81d5-2f6c56d3cf89"
}
```
@@ -41,11 +40,12 @@ resource "airbyte_source_sftp" "my_source_sftp" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -59,64 +59,41 @@ resource "airbyte_source_sftp" "my_source_sftp" {
Required:
- `host` (String) The server host address
-- `port` (Number) The server port
-- `source_type` (String) must be one of ["sftp"]
- `user` (String) The server user
Optional:
- `credentials` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials))
-- `file_pattern` (String) The regular expression to specify files for sync in a chosen Folder Path
-- `file_types` (String) Coma separated file types. Currently only 'csv' and 'json' types are supported.
-- `folder_path` (String) The directory to search files for sync
+- `file_pattern` (String) Default: ""
+The regular expression to specify files for sync in a chosen Folder Path
+- `file_types` (String) Default: "csv,json"
+Coma separated file types. Currently only 'csv' and 'json' types are supported.
+- `folder_path` (String) Default: ""
+The directory to search files for sync
+- `port` (Number) Default: 22
+The server port
### Nested Schema for `configuration.credentials`
Optional:
-- `source_sftp_authentication_wildcard_password_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_sftp_authentication_wildcard_password_authentication))
-- `source_sftp_authentication_wildcard_ssh_key_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_sftp_authentication_wildcard_ssh_key_authentication))
-- `source_sftp_update_authentication_wildcard_password_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_sftp_update_authentication_wildcard_password_authentication))
-- `source_sftp_update_authentication_wildcard_ssh_key_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_sftp_update_authentication_wildcard_ssh_key_authentication))
+- `password_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--password_authentication))
+- `ssh_key_authentication` (Attributes) The server authentication method (see [below for nested schema](#nestedatt--configuration--credentials--ssh_key_authentication))
-
-### Nested Schema for `configuration.credentials.source_sftp_authentication_wildcard_password_authentication`
+
+### Nested Schema for `configuration.credentials.password_authentication`
Required:
-- `auth_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through password authentication
-- `auth_user_password` (String) OS-level password for logging into the jump server host
+- `auth_user_password` (String, Sensitive) OS-level password for logging into the jump server host
-
-### Nested Schema for `configuration.credentials.source_sftp_authentication_wildcard_ssh_key_authentication`
+
+### Nested Schema for `configuration.credentials.ssh_key_authentication`
Required:
-- `auth_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through ssh key
-- `auth_ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
-
-
-
-### Nested Schema for `configuration.credentials.source_sftp_update_authentication_wildcard_password_authentication`
-
-Required:
-
-- `auth_method` (String) must be one of ["SSH_PASSWORD_AUTH"]
-Connect through password authentication
-- `auth_user_password` (String) OS-level password for logging into the jump server host
-
-
-
-### Nested Schema for `configuration.credentials.source_sftp_update_authentication_wildcard_ssh_key_authentication`
-
-Required:
-
-- `auth_method` (String) must be one of ["SSH_KEY_AUTH"]
-Connect through ssh key
-- `auth_ssh_key` (String) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
+- `auth_ssh_key` (String, Sensitive) OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
diff --git a/docs/resources/source_sftp_bulk.md b/docs/resources/source_sftp_bulk.md
index 0fc7a449a..2dea85d83 100644
--- a/docs/resources/source_sftp_bulk.md
+++ b/docs/resources/source_sftp_bulk.md
@@ -17,21 +17,21 @@ resource "airbyte_source_sftp_bulk" "my_source_sftpbulk" {
configuration = {
file_most_recent = false
file_pattern = "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`"
- file_type = "json"
+ file_type = "csv"
folder_path = "/logs/2022"
host = "192.0.2.1"
password = "...my_password..."
port = 22
private_key = "...my_private_key..."
separator = ","
- source_type = "sftp-bulk"
start_date = "2017-01-25T00:00:00Z"
stream_name = "ftp_contacts"
- username = "Pearline_Bailey"
+ username = "Serena.Beer65"
}
- name = "Wm Bartoletti"
- secret_id = "...my_secret_id..."
- workspace_id = "50edf22a-94d2-40ec-90ea-41d1f465e851"
+ definition_id = "6ecf0509-1d90-48d9-9001-753384297337"
+ name = "Dr. Jasmine Grimes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9291353f-9549-4bcc-b4d3-89bbf5d24f5b"
}
```
@@ -41,11 +41,12 @@ resource "airbyte_source_sftp_bulk" "my_source_sftpbulk" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,22 +59,26 @@ resource "airbyte_source_sftp_bulk" "my_source_sftpbulk" {
Required:
-- `folder_path` (String) The directory to search files for sync
- `host` (String) The server host address
-- `port` (Number) The server port
-- `source_type` (String) must be one of ["sftp-bulk"]
- `start_date` (String) The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
- `stream_name` (String) The name of the stream or table you want to create
- `username` (String) The server user
Optional:
-- `file_most_recent` (Boolean) Sync only the most recent file for the configured folder path and file pattern
-- `file_pattern` (String) The regular expression to specify files for sync in a chosen Folder Path
-- `file_type` (String) must be one of ["csv", "json"]
+- `file_most_recent` (Boolean) Default: false
+Sync only the most recent file for the configured folder path and file pattern
+- `file_pattern` (String) Default: ""
+The regular expression to specify files for sync in a chosen Folder Path
+- `file_type` (String) must be one of ["csv", "json"]; Default: "csv"
The file type you want to sync. Currently only 'csv' and 'json' files are supported.
-- `password` (String) OS-level password for logging into the jump server host
-- `private_key` (String) The private key
-- `separator` (String) The separator used in the CSV files. Define None if you want to use the Sniffer functionality
+- `folder_path` (String) Default: ""
+The directory to search files for sync
+- `password` (String, Sensitive) OS-level password for logging into the jump server host
+- `port` (Number) Default: 22
+The server port
+- `private_key` (String, Sensitive) The private key
+- `separator` (String) Default: ","
+The separator used in the CSV files. Define None if you want to use the Sniffer functionality
diff --git a/docs/resources/source_shopify.md b/docs/resources/source_shopify.md
index 3fe62e4f9..d6611b183 100644
--- a/docs/resources/source_shopify.md
+++ b/docs/resources/source_shopify.md
@@ -16,18 +16,17 @@ SourceShopify Resource
resource "airbyte_source_shopify" "my_source_shopify" {
configuration = {
credentials = {
- source_shopify_shopify_authorization_method_api_password = {
+ api_password = {
api_password = "...my_api_password..."
- auth_method = "api_password"
}
}
- shop = "my-store"
- source_type = "shopify"
- start_date = "2022-01-02"
+ shop = "my-store"
+ start_date = "2022-08-02"
}
- name = "Randal Kris"
- secret_id = "...my_secret_id..."
- workspace_id = "df54fdd5-ea95-4433-98da-fb42a8d63388"
+ definition_id = "4e1dc4a0-1d44-4fb9-b610-a4d0de91eaa4"
+ name = "Clinton Baumbach"
+ secret_id = "...my_secret_id..."
+ workspace_id = "cb870eb9-8050-4c39-a745-0657bfd1cb4d"
}
```
@@ -37,11 +36,12 @@ resource "airbyte_source_shopify" "my_source_shopify" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -55,65 +55,35 @@ resource "airbyte_source_shopify" "my_source_shopify" {
Required:
- `shop` (String) The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME' or 'NAME.myshopify.com'.
-- `source_type` (String) must be one of ["shopify"]
Optional:
- `credentials` (Attributes) The authorization method to use to retrieve data from Shopify (see [below for nested schema](#nestedatt--configuration--credentials))
-- `start_date` (String) The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated.
+- `start_date` (String) Default: "2020-01-01"
+The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_shopify_shopify_authorization_method_api_password` (Attributes) API Password Auth (see [below for nested schema](#nestedatt--configuration--credentials--source_shopify_shopify_authorization_method_api_password))
-- `source_shopify_shopify_authorization_method_o_auth2_0` (Attributes) OAuth2.0 (see [below for nested schema](#nestedatt--configuration--credentials--source_shopify_shopify_authorization_method_o_auth2_0))
-- `source_shopify_update_shopify_authorization_method_api_password` (Attributes) API Password Auth (see [below for nested schema](#nestedatt--configuration--credentials--source_shopify_update_shopify_authorization_method_api_password))
-- `source_shopify_update_shopify_authorization_method_o_auth2_0` (Attributes) OAuth2.0 (see [below for nested schema](#nestedatt--configuration--credentials--source_shopify_update_shopify_authorization_method_o_auth2_0))
+- `api_password` (Attributes) API Password Auth (see [below for nested schema](#nestedatt--configuration--credentials--api_password))
+- `o_auth20` (Attributes) OAuth2.0 (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_shopify_shopify_authorization_method_api_password`
+
+### Nested Schema for `configuration.credentials.api_password`
Required:
-- `api_password` (String) The API Password for your private application in the `Shopify` store.
-- `auth_method` (String) must be one of ["api_password"]
+- `api_password` (String, Sensitive) The API Password for your private application in the `Shopify` store.
-
-### Nested Schema for `configuration.credentials.source_shopify_shopify_authorization_method_o_auth2_0`
-
-Required:
-
-- `auth_method` (String) must be one of ["oauth2.0"]
-
-Optional:
-
-- `access_token` (String) The Access Token for making authenticated requests.
-- `client_id` (String) The Client ID of the Shopify developer application.
-- `client_secret` (String) The Client Secret of the Shopify developer application.
-
-
-
-### Nested Schema for `configuration.credentials.source_shopify_update_shopify_authorization_method_api_password`
-
-Required:
-
-- `api_password` (String) The API Password for your private application in the `Shopify` store.
-- `auth_method` (String) must be one of ["api_password"]
-
-
-
-### Nested Schema for `configuration.credentials.source_shopify_update_shopify_authorization_method_o_auth2_0`
-
-Required:
-
-- `auth_method` (String) must be one of ["oauth2.0"]
+
+### Nested Schema for `configuration.credentials.o_auth20`
Optional:
-- `access_token` (String) The Access Token for making authenticated requests.
+- `access_token` (String, Sensitive) The Access Token for making authenticated requests.
- `client_id` (String) The Client ID of the Shopify developer application.
- `client_secret` (String) The Client Secret of the Shopify developer application.
diff --git a/docs/resources/source_shortio.md b/docs/resources/source_shortio.md
index 7ece64e90..844f4ae7a 100644
--- a/docs/resources/source_shortio.md
+++ b/docs/resources/source_shortio.md
@@ -15,14 +15,14 @@ SourceShortio Resource
```terraform
resource "airbyte_source_shortio" "my_source_shortio" {
configuration = {
- domain_id = "...my_domain_id..."
- secret_key = "...my_secret_key..."
- source_type = "shortio"
- start_date = "2023-07-30T03:43:59.244Z"
+ domain_id = "...my_domain_id..."
+ secret_key = "...my_secret_key..."
+ start_date = "2023-07-30T03:43:59.244Z"
}
- name = "Troy Streich I"
- secret_id = "...my_secret_id..."
- workspace_id = "9ea5f9b1-8a24-44fd-a190-39dacd38ed0d"
+ definition_id = "b2aae6c2-0ac9-4c19-9b3e-1c883c55acce"
+ name = "Bethany Donnelly"
+ secret_id = "...my_secret_id..."
+ workspace_id = "29a15c36-062a-463f-9716-d2b265f2af56"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_shortio" "my_source_shortio" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,8 +51,7 @@ resource "airbyte_source_shortio" "my_source_shortio" {
Required:
- `domain_id` (String)
-- `secret_key` (String) Short.io Secret Key
-- `source_type` (String) must be one of ["shortio"]
+- `secret_key` (String, Sensitive) Short.io Secret Key
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
diff --git a/docs/resources/source_slack.md b/docs/resources/source_slack.md
index 3b9897835..c15e7f3a8 100644
--- a/docs/resources/source_slack.md
+++ b/docs/resources/source_slack.md
@@ -19,19 +19,18 @@ resource "airbyte_source_slack" "my_source_slack" {
"...",
]
credentials = {
- source_slack_authentication_mechanism_api_token = {
- api_token = "...my_api_token..."
- option_title = "API Token Credentials"
+ source_slack_api_token = {
+ api_token = "...my_api_token..."
}
}
- join_channels = false
- lookback_window = 7
- source_type = "slack"
+ join_channels = true
+ lookback_window = 14
start_date = "2017-01-25T00:00:00Z"
}
- name = "Dr. Jamie Wintheiser"
- secret_id = "...my_secret_id..."
- workspace_id = "af15920c-90d1-4b49-81f2-bd89c8a32639"
+ definition_id = "dd581ac6-4878-476f-8ad6-15bcace687b3"
+ name = "Ms. Marian Bergstrom"
+ secret_id = "...my_secret_id..."
+ workspace_id = "986a7b02-fd25-4c77-a7b3-6354281d3e7f"
}
```
@@ -41,11 +40,12 @@ resource "airbyte_source_slack" "my_source_slack" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,63 +58,40 @@ resource "airbyte_source_slack" "my_source_slack" {
Required:
-- `join_channels` (Boolean) Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages.
-- `lookback_window` (Number) How far into the past to look for messages in threads, default is 0 days
-- `source_type` (String) must be one of ["slack"]
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
Optional:
- `channel_filter` (List of String) A channel name list (without leading '#' char) which limit the channels from which you'd like to sync. Empty list means no filter.
- `credentials` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials))
+- `join_channels` (Boolean) Default: true
+Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages.
+- `lookback_window` (Number) Default: 0
+How far into the past to look for messages in threads, default is 0 days
### Nested Schema for `configuration.credentials`
Optional:
-- `source_slack_authentication_mechanism_api_token` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--source_slack_authentication_mechanism_api_token))
-- `source_slack_authentication_mechanism_sign_in_via_slack_o_auth` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--source_slack_authentication_mechanism_sign_in_via_slack_o_auth))
-- `source_slack_update_authentication_mechanism_api_token` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--source_slack_update_authentication_mechanism_api_token))
-- `source_slack_update_authentication_mechanism_sign_in_via_slack_o_auth` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--source_slack_update_authentication_mechanism_sign_in_via_slack_o_auth))
+- `api_token` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--api_token))
+- `sign_in_via_slack_o_auth` (Attributes) Choose how to authenticate into Slack (see [below for nested schema](#nestedatt--configuration--credentials--sign_in_via_slack_o_auth))
-
-### Nested Schema for `configuration.credentials.source_slack_authentication_mechanism_api_token`
+
+### Nested Schema for `configuration.credentials.api_token`
Required:
-- `api_token` (String) A Slack bot token. See the docs for instructions on how to generate it.
-- `option_title` (String) must be one of ["API Token Credentials"]
+- `api_token` (String, Sensitive) A Slack bot token. See the docs for instructions on how to generate it.
-
-### Nested Schema for `configuration.credentials.source_slack_authentication_mechanism_sign_in_via_slack_o_auth`
+
+### Nested Schema for `configuration.credentials.sign_in_via_slack_o_auth`
Required:
-- `access_token` (String) Slack access_token. See our docs if you need help generating the token.
+- `access_token` (String, Sensitive) Slack access_token. See our docs if you need help generating the token.
- `client_id` (String) Slack client_id. See our docs if you need help finding this id.
- `client_secret` (String) Slack client_secret. See our docs if you need help finding this secret.
-- `option_title` (String) must be one of ["Default OAuth2.0 authorization"]
-
-
-
-### Nested Schema for `configuration.credentials.source_slack_update_authentication_mechanism_api_token`
-
-Required:
-
-- `api_token` (String) A Slack bot token. See the docs for instructions on how to generate it.
-- `option_title` (String) must be one of ["API Token Credentials"]
-
-
-
-### Nested Schema for `configuration.credentials.source_slack_update_authentication_mechanism_sign_in_via_slack_o_auth`
-
-Required:
-
-- `access_token` (String) Slack access_token. See our docs if you need help generating the token.
-- `client_id` (String) Slack client_id. See our docs if you need help finding this id.
-- `client_secret` (String) Slack client_secret. See our docs if you need help finding this secret.
-- `option_title` (String) must be one of ["Default OAuth2.0 authorization"]
diff --git a/docs/resources/source_smaily.md b/docs/resources/source_smaily.md
index 9094ce421..a7612fa9b 100644
--- a/docs/resources/source_smaily.md
+++ b/docs/resources/source_smaily.md
@@ -18,11 +18,11 @@ resource "airbyte_source_smaily" "my_source_smaily" {
api_password = "...my_api_password..."
api_subdomain = "...my_api_subdomain..."
api_username = "...my_api_username..."
- source_type = "smaily"
}
- name = "Donnie Hauck"
- secret_id = "...my_secret_id..."
- workspace_id = "b6902b88-1a94-4f64-b664-a8f0af8c691d"
+ definition_id = "0bc649fe-5b08-4c82-9c40-ca1ab7663971"
+ name = "Ebony Carroll"
+ secret_id = "...my_secret_id..."
+ workspace_id = "331df025-a154-4586-87cd-fb558f87809d"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_smaily" "my_source_smaily" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,9 +50,8 @@ resource "airbyte_source_smaily" "my_source_smaily" {
Required:
-- `api_password` (String) API user password. See https://smaily.com/help/api/general/create-api-user/
+- `api_password` (String, Sensitive) API user password. See https://smaily.com/help/api/general/create-api-user/
- `api_subdomain` (String) API Subdomain. See https://smaily.com/help/api/general/create-api-user/
- `api_username` (String) API user username. See https://smaily.com/help/api/general/create-api-user/
-- `source_type` (String) must be one of ["smaily"]
diff --git a/docs/resources/source_smartengage.md b/docs/resources/source_smartengage.md
index c654b4525..eca027d34 100644
--- a/docs/resources/source_smartengage.md
+++ b/docs/resources/source_smartengage.md
@@ -15,12 +15,12 @@ SourceSmartengage Resource
```terraform
resource "airbyte_source_smartengage" "my_source_smartengage" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "smartengage"
+ api_key = "...my_api_key..."
}
- name = "Carmen Crist"
- secret_id = "...my_secret_id..."
- workspace_id = "fbaf9476-a2ae-48dc-850c-8a3512c73784"
+ definition_id = "3d1fcf2b-6755-4110-90ec-6c18f2017e88"
+ name = "Neil Pagac"
+ secret_id = "...my_secret_id..."
+ workspace_id = "64f95e84-efb6-4a93-9326-1882dc6ea377"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_smartengage" "my_source_smartengage" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_smartengage" "my_source_smartengage" {
Required:
-- `api_key` (String) API Key
-- `source_type` (String) must be one of ["smartengage"]
+- `api_key` (String, Sensitive) API Key
diff --git a/docs/resources/source_smartsheets.md b/docs/resources/source_smartsheets.md
index ecdad7c4a..07e2dfb28 100644
--- a/docs/resources/source_smartsheets.md
+++ b/docs/resources/source_smartsheets.md
@@ -16,21 +16,20 @@ SourceSmartsheets Resource
resource "airbyte_source_smartsheets" "my_source_smartsheets" {
configuration = {
credentials = {
- source_smartsheets_authorization_method_api_access_token = {
+ api_access_token = {
access_token = "...my_access_token..."
- auth_type = "access_token"
}
}
metadata_fields = [
- "row_access_level",
+ "row_number",
]
- source_type = "smartsheets"
spreadsheet_id = "...my_spreadsheet_id..."
- start_datetime = "2000-01-01T13:00:00-07:00"
+ start_datetime = "2000-01-01T13:00:00"
}
- name = "Joann Bechtelar Jr."
- secret_id = "...my_secret_id..."
- workspace_id = "e966ec73-6d43-4194-b98c-783c92398ed3"
+ definition_id = "a6744848-ac2b-404b-aae9-e175304065f6"
+ name = "Tara King"
+ secret_id = "...my_secret_id..."
+ workspace_id = "901f87c9-df1a-4f8f-9013-d5d0cf403b28"
}
```
@@ -40,11 +39,12 @@ resource "airbyte_source_smartsheets" "my_source_smartsheets" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,77 +58,39 @@ resource "airbyte_source_smartsheets" "my_source_smartsheets" {
Required:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["smartsheets"]
- `spreadsheet_id` (String) The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties
Optional:
- `metadata_fields` (List of String) A List of available columns which metadata can be pulled from.
-- `start_datetime` (String) Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: `2000-01-01T13:00:00`
+- `start_datetime` (String) Default: "2020-01-01T00:00:00+00:00"
+Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: `2000-01-01T13:00:00`
### Nested Schema for `configuration.credentials`
Optional:
-- `source_smartsheets_authorization_method_api_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_smartsheets_authorization_method_api_access_token))
-- `source_smartsheets_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_smartsheets_authorization_method_o_auth2_0))
-- `source_smartsheets_update_authorization_method_api_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_smartsheets_update_authorization_method_api_access_token))
-- `source_smartsheets_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_smartsheets_update_authorization_method_o_auth2_0))
+- `api_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--api_access_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_smartsheets_authorization_method_api_access_token`
+
+### Nested Schema for `configuration.credentials.api_access_token`
Required:
-- `access_token` (String) The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.
+- `access_token` (String, Sensitive) The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.
-Optional:
-
-- `auth_type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_smartsheets_authorization_method_o_auth2_0`
-
-Required:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `client_id` (String) The API ID of the SmartSheets developer application.
-- `client_secret` (String) The API Secret the SmartSheets developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-Optional:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-
-
-### Nested Schema for `configuration.credentials.source_smartsheets_update_authorization_method_api_access_token`
-
-Required:
-
-- `access_token` (String) The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.
-
-Optional:
-
-- `auth_type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_smartsheets_update_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Access Token for making authenticated requests.
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
- `client_id` (String) The API ID of the SmartSheets developer application.
- `client_secret` (String) The API Secret the SmartSheets developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-Optional:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
+- `refresh_token` (String, Sensitive) The key to refresh the expired access_token.
+- `token_expiry_date` (String, Sensitive) The date-time when the access token should be refreshed.
diff --git a/docs/resources/source_snapchat_marketing.md b/docs/resources/source_snapchat_marketing.md
index 058f57af5..ebf90371b 100644
--- a/docs/resources/source_snapchat_marketing.md
+++ b/docs/resources/source_snapchat_marketing.md
@@ -19,12 +19,12 @@ resource "airbyte_source_snapchat_marketing" "my_source_snapchatmarketing" {
client_secret = "...my_client_secret..."
end_date = "2022-01-30"
refresh_token = "...my_refresh_token..."
- source_type = "snapchat-marketing"
start_date = "2022-01-01"
}
- name = "Chelsea Ortiz"
- secret_id = "...my_secret_id..."
- workspace_id = "5ca8649a-70cf-4d5d-a989-b7206451077d"
+ definition_id = "8a6950f0-007e-4330-87d9-5358a56819d2"
+ name = "Rudy Toy"
+ secret_id = "...my_secret_id..."
+ workspace_id = "1d7e3d24-dfd3-4d51-a342-f997d059d38a"
}
```
@@ -34,11 +34,12 @@ resource "airbyte_source_snapchat_marketing" "my_source_snapchatmarketing" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,12 +54,12 @@ Required:
- `client_id` (String) The Client ID of your Snapchat developer application.
- `client_secret` (String) The Client Secret of your Snapchat developer application.
-- `refresh_token` (String) Refresh Token to renew the expired Access Token.
-- `source_type` (String) must be one of ["snapchat-marketing"]
+- `refresh_token` (String, Sensitive) Refresh Token to renew the expired Access Token.
Optional:
- `end_date` (String) Date in the format 2017-01-25. Any data after this date will not be replicated.
-- `start_date` (String) Date in the format 2022-01-01. Any data before this date will not be replicated.
+- `start_date` (String) Default: "2022-01-01"
+Date in the format 2022-01-01. Any data before this date will not be replicated.
diff --git a/docs/resources/source_snowflake.md b/docs/resources/source_snowflake.md
index e2189edc1..eb62d910d 100644
--- a/docs/resources/source_snowflake.md
+++ b/docs/resources/source_snowflake.md
@@ -16,9 +16,8 @@ SourceSnowflake Resource
resource "airbyte_source_snowflake" "my_source_snowflake" {
configuration = {
credentials = {
- source_snowflake_authorization_method_o_auth2_0 = {
+ source_snowflake_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_type = "OAuth"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
@@ -29,12 +28,12 @@ resource "airbyte_source_snowflake" "my_source_snowflake" {
jdbc_url_params = "...my_jdbc_url_params..."
role = "AIRBYTE_ROLE"
schema = "AIRBYTE_SCHEMA"
- source_type = "snowflake"
warehouse = "AIRBYTE_WAREHOUSE"
}
- name = "Katrina Tillman"
- secret_id = "...my_secret_id..."
- workspace_id = "3d492ed1-4b8a-42c1-9545-45e955dcc185"
+ definition_id = "2e5fcf99-c418-476f-a0cb-c1b99ee1e960"
+ name = "Mrs. Jeanette Howell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0d51b311-4e9e-4d57-941c-3612b0e8c8cf"
}
```
@@ -44,11 +43,12 @@ resource "airbyte_source_snowflake" "my_source_snowflake" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -64,7 +64,6 @@ Required:
- `database` (String) The database you created for Airbyte to access data.
- `host` (String) The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com).
- `role` (String) The role you created for Airbyte to access Snowflake.
-- `source_type` (String) must be one of ["snowflake"]
- `warehouse` (String) The warehouse you created for Airbyte to access data.
Optional:
@@ -78,58 +77,29 @@ Optional:
Optional:
-- `source_snowflake_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_snowflake_authorization_method_o_auth2_0))
-- `source_snowflake_authorization_method_username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_snowflake_authorization_method_username_and_password))
-- `source_snowflake_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_snowflake_update_authorization_method_o_auth2_0))
-- `source_snowflake_update_authorization_method_username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_snowflake_update_authorization_method_username_and_password))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
+- `username_and_password` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--username_and_password))
-
-### Nested Schema for `configuration.credentials.source_snowflake_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `auth_type` (String) must be one of ["OAuth"]
- `client_id` (String) The Client ID of your Snowflake developer application.
- `client_secret` (String) The Client Secret of your Snowflake developer application.
Optional:
-- `access_token` (String) Access Token for making authenticated requests.
-- `refresh_token` (String) Refresh Token for making authenticated requests.
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
+- `refresh_token` (String, Sensitive) Refresh Token for making authenticated requests.
-
-### Nested Schema for `configuration.credentials.source_snowflake_authorization_method_username_and_password`
+
+### Nested Schema for `configuration.credentials.username_and_password`
Required:
-- `auth_type` (String) must be one of ["username/password"]
-- `password` (String) The password associated with the username.
-- `username` (String) The username you created to allow Airbyte to access the database.
-
-
-
-### Nested Schema for `configuration.credentials.source_snowflake_update_authorization_method_o_auth2_0`
-
-Required:
-
-- `auth_type` (String) must be one of ["OAuth"]
-- `client_id` (String) The Client ID of your Snowflake developer application.
-- `client_secret` (String) The Client Secret of your Snowflake developer application.
-
-Optional:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `refresh_token` (String) Refresh Token for making authenticated requests.
-
-
-
-### Nested Schema for `configuration.credentials.source_snowflake_update_authorization_method_username_and_password`
-
-Required:
-
-- `auth_type` (String) must be one of ["username/password"]
-- `password` (String) The password associated with the username.
+- `password` (String, Sensitive) The password associated with the username.
- `username` (String) The username you created to allow Airbyte to access the database.
diff --git a/docs/resources/source_sonar_cloud.md b/docs/resources/source_sonar_cloud.md
index 4ef4699fa..681ea4162 100644
--- a/docs/resources/source_sonar_cloud.md
+++ b/docs/resources/source_sonar_cloud.md
@@ -20,13 +20,13 @@ resource "airbyte_source_sonar_cloud" "my_source_sonarcloud" {
]
end_date = "YYYY-MM-DD"
organization = "airbyte"
- source_type = "sonar-cloud"
start_date = "YYYY-MM-DD"
user_token = "...my_user_token..."
}
- name = "Mildred Rosenbaum"
- secret_id = "...my_secret_id..."
- workspace_id = "43ad2daa-784a-4ba3-9230-edf73811a115"
+ definition_id = "d259943d-fa52-4a9e-875a-bffba2c1e7b6"
+ name = "Jose Lindgren"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d761f19b-60aa-4080-8c97-1e60235dc09f"
}
```
@@ -36,11 +36,12 @@ resource "airbyte_source_sonar_cloud" "my_source_sonarcloud" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,10 +54,9 @@ resource "airbyte_source_sonar_cloud" "my_source_sonarcloud" {
Required:
-- `component_keys` (List of String) Comma-separated list of component keys.
+- `component_keys` (List of String, Sensitive) Comma-separated list of component keys.
- `organization` (String) Organization key. See here.
-- `source_type` (String) must be one of ["sonar-cloud"]
-- `user_token` (String) Your User Token. See here. The token is case sensitive.
+- `user_token` (String, Sensitive) Your User Token. See here. The token is case sensitive.
Optional:
diff --git a/docs/resources/source_spacex_api.md b/docs/resources/source_spacex_api.md
index 4fa19f065..342d1eaa1 100644
--- a/docs/resources/source_spacex_api.md
+++ b/docs/resources/source_spacex_api.md
@@ -15,13 +15,13 @@ SourceSpacexAPI Resource
```terraform
resource "airbyte_source_spacex_api" "my_source_spacexapi" {
configuration = {
- id = "382bd7ed-5650-4762-9c58-f4d7396564c2"
- options = "...my_options..."
- source_type = "spacex-api"
+ id = "adad73b7-9d20-4b48-acfd-c6fb504a12b7"
+ options = "...my_options..."
}
- name = "Lee Batz Jr."
- secret_id = "...my_secret_id..."
- workspace_id = "a961d24a-7dbb-48f5-b2d8-92cf7812cb51"
+ definition_id = "723cbf02-23ae-4822-a532-7d8cbc0547dc"
+ name = "Chad Swaniawski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7628c478-1358-42a6-b537-d9dfc7f45856"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_spacex_api" "my_source_spacexapi" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,6 +51,5 @@ Optional:
- `id` (String)
- `options` (String)
-- `source_type` (String) must be one of ["spacex-api"]
diff --git a/docs/resources/source_square.md b/docs/resources/source_square.md
index ff8865d50..b0505074e 100644
--- a/docs/resources/source_square.md
+++ b/docs/resources/source_square.md
@@ -16,19 +16,18 @@ SourceSquare Resource
resource "airbyte_source_square" "my_source_square" {
configuration = {
credentials = {
- source_square_authentication_api_key = {
- api_key = "...my_api_key..."
- auth_type = "API Key"
+ source_square_api_key = {
+ api_key = "...my_api_key..."
}
}
- include_deleted_objects = true
+ include_deleted_objects = false
is_sandbox = false
- source_type = "square"
- start_date = "2022-02-01"
+ start_date = "2022-11-22"
}
- name = "Miss Bruce Gibson"
- secret_id = "...my_secret_id..."
- workspace_id = "548f88f8-f1bf-40bc-8e1f-206d5d831d00"
+ definition_id = "55c9f06b-5482-4c9e-b770-03d0337f10a6"
+ name = "Connie Homenick"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4ee32ccb-4d52-4da6-928f-2436a122e394"
}
```
@@ -38,11 +37,12 @@ resource "airbyte_source_square" "my_source_square" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,64 +53,39 @@ resource "airbyte_source_square" "my_source_square" {
### Nested Schema for `configuration`
-Required:
-
-- `is_sandbox` (Boolean) Determines whether to use the sandbox or production environment.
-- `source_type` (String) must be one of ["square"]
-
Optional:
- `credentials` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `include_deleted_objects` (Boolean) In some streams there is an option to include deleted objects (Items, Categories, Discounts, Taxes)
-- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. If not set, all data will be replicated.
+- `include_deleted_objects` (Boolean) Default: false
+In some streams there is an option to include deleted objects (Items, Categories, Discounts, Taxes)
+- `is_sandbox` (Boolean) Default: false
+Determines whether to use the sandbox or production environment.
+- `start_date` (String) Default: "2021-01-01"
+UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. If not set, all data will be replicated.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_square_authentication_api_key` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--source_square_authentication_api_key))
-- `source_square_authentication_oauth_authentication` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--source_square_authentication_oauth_authentication))
-- `source_square_update_authentication_api_key` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--source_square_update_authentication_api_key))
-- `source_square_update_authentication_oauth_authentication` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--source_square_update_authentication_oauth_authentication))
-
-
-### Nested Schema for `configuration.credentials.source_square_authentication_api_key`
-
-Required:
-
-- `api_key` (String) The API key for a Square application
-- `auth_type` (String) must be one of ["API Key"]
-
-
-
-### Nested Schema for `configuration.credentials.source_square_authentication_oauth_authentication`
-
-Required:
-
-- `auth_type` (String) must be one of ["OAuth"]
-- `client_id` (String) The Square-issued ID of your application
-- `client_secret` (String) The Square-issued application secret for your application
-- `refresh_token` (String) A refresh token generated using the above client ID and secret
-
+- `api_key` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--api_key))
+- `oauth_authentication` (Attributes) Choose how to authenticate to Square. (see [below for nested schema](#nestedatt--configuration--credentials--oauth_authentication))
-
-### Nested Schema for `configuration.credentials.source_square_update_authentication_api_key`
+
+### Nested Schema for `configuration.credentials.api_key`
Required:
-- `api_key` (String) The API key for a Square application
-- `auth_type` (String) must be one of ["API Key"]
+- `api_key` (String, Sensitive) The API key for a Square application
-
-### Nested Schema for `configuration.credentials.source_square_update_authentication_oauth_authentication`
+
+### Nested Schema for `configuration.credentials.oauth_authentication`
Required:
-- `auth_type` (String) must be one of ["OAuth"]
- `client_id` (String) The Square-issued ID of your application
- `client_secret` (String) The Square-issued application secret for your application
-- `refresh_token` (String) A refresh token generated using the above client ID and secret
+- `refresh_token` (String, Sensitive) A refresh token generated using the above client ID and secret
diff --git a/docs/resources/source_strava.md b/docs/resources/source_strava.md
index 0c6532b7e..615c70092 100644
--- a/docs/resources/source_strava.md
+++ b/docs/resources/source_strava.md
@@ -16,16 +16,15 @@ SourceStrava Resource
resource "airbyte_source_strava" "my_source_strava" {
configuration = {
athlete_id = 17831421
- auth_type = "Client"
client_id = "12345"
client_secret = "fc6243f283e51f6ca989aab298b17da125496f50"
refresh_token = "fc6243f283e51f6ca989aab298b17da125496f50"
- source_type = "strava"
start_date = "2021-03-01T00:00:00Z"
}
- name = "Jeffrey Wintheiser"
- secret_id = "...my_secret_id..."
- workspace_id = "06673f3a-681c-4576-8dce-742409a215e0"
+ definition_id = "198a6bf6-f1cb-4db3-9a96-cd0e48f1e4b3"
+ name = "Elaine Johnson"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6ca0b303-cf01-47cd-9783-63f1be7e9b4a"
}
```
@@ -35,11 +34,12 @@ resource "airbyte_source_strava" "my_source_strava" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -55,12 +55,7 @@ Required:
- `athlete_id` (Number) The Athlete ID of your Strava developer application.
- `client_id` (String) The Client ID of your Strava developer application.
- `client_secret` (String) The Client Secret of your Strava developer application.
-- `refresh_token` (String) The Refresh Token with the activity: read_all permissions.
-- `source_type` (String) must be one of ["strava"]
+- `refresh_token` (String, Sensitive) The Refresh Token with the activity: read_all permissions.
- `start_date` (String) UTC date and time. Any data before this date will not be replicated.
-Optional:
-
-- `auth_type` (String) must be one of ["Client"]
-
diff --git a/docs/resources/source_stripe.md b/docs/resources/source_stripe.md
index 78a2b3f52..e5bae528c 100644
--- a/docs/resources/source_stripe.md
+++ b/docs/resources/source_stripe.md
@@ -16,15 +16,17 @@ SourceStripe Resource
resource "airbyte_source_stripe" "my_source_stripe" {
configuration = {
account_id = "...my_account_id..."
+ call_rate_limit = 100
client_secret = "...my_client_secret..."
- lookback_window_days = 5
- slice_range = 10
- source_type = "stripe"
+ lookback_window_days = 10
+ num_workers = 3
+ slice_range = 360
start_date = "2017-01-25T00:00:00Z"
}
- name = "Seth Nitzsche"
- secret_id = "...my_secret_id..."
- workspace_id = "63e3af3d-d9dd-4a33-9cd6-3483e4a7a98e"
+ definition_id = "46c36bb7-337b-4f0b-aca9-3a8ae78e1e53"
+ name = "Marcella Muller"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b6d5dc1e-250f-480f-bc59-5c3777bccfe7"
}
```
@@ -34,11 +36,12 @@ resource "airbyte_source_stripe" "my_source_stripe" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -53,12 +56,17 @@ Required:
- `account_id` (String) Your Stripe account ID (starts with 'acct_', find yours here).
- `client_secret` (String) Stripe API key (usually starts with 'sk_live_'; find yours here).
-- `source_type` (String) must be one of ["stripe"]
Optional:
-- `lookback_window_days` (Number) When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here
-- `slice_range` (Number) The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted.
-- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated.
+- `call_rate_limit` (Number) The number of API calls per second that you allow connector to make. This value can not be bigger than real API call rate limit (https://stripe.com/docs/rate-limits). If not specified the default maximum is 25 and 100 calls per second for test and production tokens respectively.
+- `lookback_window_days` (Number) Default: 0
+When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. The Lookback Window only applies to streams that do not support event-based incremental syncs: Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks, Refunds. More info here
+- `num_workers` (Number) Default: 10
+The number of worker thread to use for the sync. The performance upper boundary depends on call_rate_limit setting and type of account.
+- `slice_range` (Number) Default: 365
+The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted.
+- `start_date` (String) Default: "2017-01-25T00:00:00Z"
+UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated.
diff --git a/docs/resources/source_survey_sparrow.md b/docs/resources/source_survey_sparrow.md
index 4b759c695..b0a931d1b 100644
--- a/docs/resources/source_survey_sparrow.md
+++ b/docs/resources/source_survey_sparrow.md
@@ -17,18 +17,16 @@ resource "airbyte_source_survey_sparrow" "my_source_surveysparrow" {
configuration = {
access_token = "...my_access_token..."
region = {
- source_survey_sparrow_base_url_eu_based_account = {
- url_base = "https://eu-api.surveysparrow.com/v3"
- }
+ eu_based_account = {}
}
- source_type = "survey-sparrow"
survey_id = [
"{ \"see\": \"documentation\" }",
]
}
- name = "Hugo Kovacek"
- secret_id = "...my_secret_id..."
- workspace_id = "f02449d8-6f4b-4b20-be5d-911cbfe749ca"
+ definition_id = "4b91c615-d128-4040-ba03-eb3c0afcc3c8"
+ name = "Gerard Kerluke"
+ secret_id = "...my_secret_id..."
+ workspace_id = "fbbc8e3e-7db5-4a3e-846f-c1e0fa91f7ef"
}
```
@@ -38,11 +36,12 @@ resource "airbyte_source_survey_sparrow" "my_source_surveysparrow" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -55,8 +54,7 @@ resource "airbyte_source_survey_sparrow" "my_source_surveysparrow" {
Required:
-- `access_token` (String) Your access token. See here. The key is case sensitive.
-- `source_type` (String) must be one of ["survey-sparrow"]
+- `access_token` (String, Sensitive) Your access token. See here. The key is case sensitive.
Optional:
@@ -68,40 +66,14 @@ Optional:
Optional:
-- `source_survey_sparrow_base_url_eu_based_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--source_survey_sparrow_base_url_eu_based_account))
-- `source_survey_sparrow_base_url_global_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--source_survey_sparrow_base_url_global_account))
-- `source_survey_sparrow_update_base_url_eu_based_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--source_survey_sparrow_update_base_url_eu_based_account))
-- `source_survey_sparrow_update_base_url_global_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--source_survey_sparrow_update_base_url_global_account))
+- `eu_based_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--eu_based_account))
+- `global_account` (Attributes) Is your account location is EU based? If yes, the base url to retrieve data will be different. (see [below for nested schema](#nestedatt--configuration--region--global_account))
-
-### Nested Schema for `configuration.region.source_survey_sparrow_base_url_eu_based_account`
+
+### Nested Schema for `configuration.region.eu_based_account`
-Optional:
-
-- `url_base` (String) must be one of ["https://eu-api.surveysparrow.com/v3"]
-
-
-
-### Nested Schema for `configuration.region.source_survey_sparrow_base_url_global_account`
-
-Optional:
-
-- `url_base` (String) must be one of ["https://api.surveysparrow.com/v3"]
-
-
-
-### Nested Schema for `configuration.region.source_survey_sparrow_update_base_url_eu_based_account`
-
-Optional:
-
-- `url_base` (String) must be one of ["https://eu-api.surveysparrow.com/v3"]
-
-
-
-### Nested Schema for `configuration.region.source_survey_sparrow_update_base_url_global_account`
-
-Optional:
-- `url_base` (String) must be one of ["https://api.surveysparrow.com/v3"]
+
+### Nested Schema for `configuration.region.global_account`
diff --git a/docs/resources/source_surveymonkey.md b/docs/resources/source_surveymonkey.md
index 3d3fd7a8b..4e9c45519 100644
--- a/docs/resources/source_surveymonkey.md
+++ b/docs/resources/source_surveymonkey.md
@@ -17,20 +17,19 @@ resource "airbyte_source_surveymonkey" "my_source_surveymonkey" {
configuration = {
credentials = {
access_token = "...my_access_token..."
- auth_method = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
}
- origin = "USA"
- source_type = "surveymonkey"
- start_date = "2021-01-01T00:00:00Z"
+ origin = "USA"
+ start_date = "2021-01-01T00:00:00Z"
survey_ids = [
"...",
]
}
- name = "Pearl Trantow"
- secret_id = "...my_secret_id..."
- workspace_id = "b8955d41-3e13-4a48-a310-907bd354c092"
+ definition_id = "147e293c-7a4b-42d7-bbc2-90ef00ad5372"
+ name = "Renee Howe"
+ secret_id = "...my_secret_id..."
+ workspace_id = "50a2e7cf-e6f3-44ac-865c-56f5fa6778e4"
}
```
@@ -40,11 +39,12 @@ resource "airbyte_source_surveymonkey" "my_source_surveymonkey" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,13 +57,12 @@ resource "airbyte_source_surveymonkey" "my_source_surveymonkey" {
Required:
-- `source_type` (String) must be one of ["surveymonkey"]
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
Optional:
- `credentials` (Attributes) The authorization method to use to retrieve data from SurveyMonkey (see [below for nested schema](#nestedatt--configuration--credentials))
-- `origin` (String) must be one of ["USA", "Europe", "Canada"]
+- `origin` (String) must be one of ["USA", "Europe", "Canada"]; Default: "USA"
Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.
- `survey_ids` (List of String) IDs of the surveys from which you'd like to replicate data. If left empty, data from all boards to which you have access will be replicated.
@@ -72,8 +71,7 @@ Depending on the originating datacenter of the SurveyMonkey account, the API acc
Required:
-- `access_token` (String) Access Token for making authenticated requests. See the docs for information on how to generate this key.
-- `auth_method` (String) must be one of ["oauth2.0"]
+- `access_token` (String, Sensitive) Access Token for making authenticated requests. See the docs for information on how to generate this key.
Optional:
diff --git a/docs/resources/source_tempo.md b/docs/resources/source_tempo.md
index cf95bc67f..867d94b33 100644
--- a/docs/resources/source_tempo.md
+++ b/docs/resources/source_tempo.md
@@ -15,12 +15,12 @@ SourceTempo Resource
```terraform
resource "airbyte_source_tempo" "my_source_tempo" {
configuration = {
- api_token = "...my_api_token..."
- source_type = "tempo"
+ api_token = "...my_api_token..."
}
- name = "Edwin Haley"
- secret_id = "...my_secret_id..."
- workspace_id = "7f69e2c9-e6d1-40e9-9b3a-d4c6b03108d9"
+ definition_id = "5f462d7c-8446-4197-ba1b-271a5b009f29"
+ name = "Karen Kemmer"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6dac9959-2aae-4b21-989b-3db558d4aa17"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_tempo" "my_source_tempo" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_tempo" "my_source_tempo" {
Required:
-- `api_token` (String) Tempo API Token. Go to Tempo>Settings, scroll down to Data Access and select API integration.
-- `source_type` (String) must be one of ["tempo"]
+- `api_token` (String, Sensitive) Tempo API Token. Go to Tempo>Settings, scroll down to Data Access and select API integration.
diff --git a/docs/resources/source_the_guardian_api.md b/docs/resources/source_the_guardian_api.md
index 15c04bafb..8855b450a 100644
--- a/docs/resources/source_the_guardian_api.md
+++ b/docs/resources/source_the_guardian_api.md
@@ -15,17 +15,17 @@ SourceTheGuardianAPI Resource
```terraform
resource "airbyte_source_the_guardian_api" "my_source_theguardianapi" {
configuration = {
- api_key = "...my_api_key..."
- end_date = "YYYY-MM-DD"
- query = "political"
- section = "media"
- source_type = "the-guardian-api"
- start_date = "YYYY-MM-DD"
- tag = "environment/recycling"
+ api_key = "...my_api_key..."
+ end_date = "YYYY-MM-DD"
+ query = "environment AND political"
+ section = "media"
+ start_date = "YYYY-MM-DD"
+ tag = "environment/energyefficiency"
}
- name = "Pauline Kozey IV"
- secret_id = "...my_secret_id..."
- workspace_id = "2b94f2ab-1fd5-4671-a9c3-26350a467143"
+ definition_id = "e21a7b03-b315-4af1-9bc4-a1418c27e2e4"
+ name = "Toby Rempel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4753d48e-30cc-4cb1-939d-dfc649b7a58a"
}
```
@@ -35,11 +35,12 @@ resource "airbyte_source_the_guardian_api" "my_source_theguardianapi" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -52,8 +53,7 @@ resource "airbyte_source_the_guardian_api" "my_source_theguardianapi" {
Required:
-- `api_key` (String) Your API Key. See here. The key is case sensitive.
-- `source_type` (String) must be one of ["the-guardian-api"]
+- `api_key` (String, Sensitive) Your API Key. See here. The key is case sensitive.
- `start_date` (String) Use this to set the minimum date (YYYY-MM-DD) of the results. Results older than the start_date will not be shown.
Optional:
diff --git a/docs/resources/source_tiktok_marketing.md b/docs/resources/source_tiktok_marketing.md
index 3929518fe..aa14fc583 100644
--- a/docs/resources/source_tiktok_marketing.md
+++ b/docs/resources/source_tiktok_marketing.md
@@ -15,24 +15,23 @@ SourceTiktokMarketing Resource
```terraform
resource "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" {
configuration = {
- attribution_window = 5
+ attribution_window = 3
credentials = {
- source_tiktok_marketing_authentication_method_o_auth2_0 = {
+ source_tiktok_marketing_o_auth2_0 = {
access_token = "...my_access_token..."
advertiser_id = "...my_advertiser_id..."
app_id = "...my_app_id..."
- auth_type = "oauth2.0"
secret = "...my_secret..."
}
}
- end_date = "2021-10-08"
+ end_date = "2022-10-15"
include_deleted = false
- source_type = "tiktok-marketing"
- start_date = "2022-12-21"
+ start_date = "2022-12-08"
}
- name = "Mrs. Joey Mueller"
- secret_id = "...my_secret_id..."
- workspace_id = "4d93a74c-0252-4fe3-b4b4-db8b778ebb6e"
+ definition_id = "fd338f32-2856-4cd8-8e7e-494b9e5830e9"
+ name = "Elijah Prosacco"
+ secret_id = "...my_secret_id..."
+ workspace_id = "12cdcae9-f85c-4701-b380-526f8856cdf3"
}
```
@@ -42,11 +41,12 @@ resource "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -59,76 +59,43 @@ resource "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" {
Optional:
-- `attribution_window` (Number) The attribution window in days.
+- `attribution_window` (Number) Default: 3
+The attribution window in days.
- `credentials` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials))
- `end_date` (String) The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DD. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the data till the current date.
-- `include_deleted` (Boolean) Set to active if you want to include deleted data in reports.
-- `source_type` (String) must be one of ["tiktok-marketing"]
-- `start_date` (String) The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.
+- `include_deleted` (Boolean) Default: false
+Set to active if you want to include deleted data in reports.
+- `start_date` (String) Default: "2016-09-01"
+The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_tiktok_marketing_authentication_method_o_auth2_0` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_tiktok_marketing_authentication_method_o_auth2_0))
-- `source_tiktok_marketing_authentication_method_sandbox_access_token` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_tiktok_marketing_authentication_method_sandbox_access_token))
-- `source_tiktok_marketing_update_authentication_method_o_auth2_0` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_tiktok_marketing_update_authentication_method_o_auth2_0))
-- `source_tiktok_marketing_update_authentication_method_sandbox_access_token` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--source_tiktok_marketing_update_authentication_method_sandbox_access_token))
+- `o_auth20` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
+- `sandbox_access_token` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--credentials--sandbox_access_token))
-
-### Nested Schema for `configuration.credentials.source_tiktok_marketing_authentication_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Long-term Authorized Access Token.
+- `access_token` (String, Sensitive) Long-term Authorized Access Token.
- `app_id` (String) The Developer Application App ID.
- `secret` (String) The Developer Application Secret.
Optional:
- `advertiser_id` (String) The Advertiser ID to filter reports and streams. Let this empty to retrieve all.
-- `auth_type` (String) must be one of ["oauth2.0"]
-
-### Nested Schema for `configuration.credentials.source_tiktok_marketing_authentication_method_sandbox_access_token`
+
+### Nested Schema for `configuration.credentials.sandbox_access_token`
Required:
-- `access_token` (String) The long-term authorized access token.
+- `access_token` (String, Sensitive) The long-term authorized access token.
- `advertiser_id` (String) The Advertiser ID which generated for the developer's Sandbox application.
-Optional:
-
-- `auth_type` (String) must be one of ["sandbox_access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_tiktok_marketing_update_authentication_method_o_auth2_0`
-
-Required:
-
-- `access_token` (String) Long-term Authorized Access Token.
-- `app_id` (String) The Developer Application App ID.
-- `secret` (String) The Developer Application Secret.
-
-Optional:
-
-- `advertiser_id` (String) The Advertiser ID to filter reports and streams. Let this empty to retrieve all.
-- `auth_type` (String) must be one of ["oauth2.0"]
-
-
-
-### Nested Schema for `configuration.credentials.source_tiktok_marketing_update_authentication_method_sandbox_access_token`
-
-Required:
-
-- `access_token` (String) The long-term authorized access token.
-- `advertiser_id` (String) The Advertiser ID which generated for the developer's Sandbox application.
-
-Optional:
-
-- `auth_type` (String) must be one of ["sandbox_access_token"]
-
diff --git a/docs/resources/source_todoist.md b/docs/resources/source_todoist.md
index a9ec4940e..67409062b 100644
--- a/docs/resources/source_todoist.md
+++ b/docs/resources/source_todoist.md
@@ -15,12 +15,12 @@ SourceTodoist Resource
```terraform
resource "airbyte_source_todoist" "my_source_todoist" {
configuration = {
- source_type = "todoist"
- token = "...my_token..."
+ token = "...my_token..."
}
- name = "Hope Collins"
- secret_id = "...my_secret_id..."
- workspace_id = "502bafb2-cbc4-4635-95e6-5da028c3e951"
+ definition_id = "fdefbe19-9921-44f3-bfa4-8acadc06400b"
+ name = "Kristy Hilpert"
+ secret_id = "...my_secret_id..."
+ workspace_id = "13a2ccf2-b1ad-4e2f-8984-bfb0e1b3d2b8"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_todoist" "my_source_todoist" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_todoist" "my_source_todoist" {
Required:
-- `source_type` (String) must be one of ["todoist"]
-- `token` (String) Your API Token. See here. The token is case sensitive.
+- `token` (String, Sensitive) Your API Token. See here. The token is case sensitive.
diff --git a/docs/resources/source_trello.md b/docs/resources/source_trello.md
index 73e81dbc3..96e49b431 100644
--- a/docs/resources/source_trello.md
+++ b/docs/resources/source_trello.md
@@ -18,14 +18,14 @@ resource "airbyte_source_trello" "my_source_trello" {
board_ids = [
"...",
]
- key = "...my_key..."
- source_type = "trello"
- start_date = "2021-03-01T00:00:00Z"
- token = "...my_token..."
+ key = "...my_key..."
+ start_date = "2021-03-01T00:00:00Z"
+ token = "...my_token..."
}
- name = "Philip Armstrong"
- secret_id = "...my_secret_id..."
- workspace_id = "a966489d-7b78-4673-a13a-12a6b9924945"
+ definition_id = "26a8838c-f8d2-427f-b18d-4240654f4782"
+ name = "Esther Abshire"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b5a46242-8ebc-45c7-bead-f0c9ce16ebe8"
}
```
@@ -35,11 +35,12 @@ resource "airbyte_source_trello" "my_source_trello" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -52,13 +53,12 @@ resource "airbyte_source_trello" "my_source_trello" {
Required:
-- `key` (String) Trello API key. See the docs for instructions on how to generate it.
-- `source_type` (String) must be one of ["trello"]
+- `key` (String, Sensitive) Trello API key. See the docs for instructions on how to generate it.
- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
-- `token` (String) Trello API token. See the docs for instructions on how to generate it.
+- `token` (String, Sensitive) Trello API token. See the docs for instructions on how to generate it.
Optional:
-- `board_ids` (List of String) IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated.
+- `board_ids` (List of String) IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated. Please note that this is not the 8-character ID in the board's shortLink (URL of the board). Rather, what is required here is the 24-character ID usually returned by the API
diff --git a/docs/resources/source_trustpilot.md b/docs/resources/source_trustpilot.md
index 6187d6e0e..583983bdc 100644
--- a/docs/resources/source_trustpilot.md
+++ b/docs/resources/source_trustpilot.md
@@ -19,17 +19,16 @@ resource "airbyte_source_trustpilot" "my_source_trustpilot" {
"...",
]
credentials = {
- source_trustpilot_authorization_method_api_key = {
- auth_type = "apikey"
+ source_trustpilot_api_key = {
client_id = "...my_client_id..."
}
}
- source_type = "trustpilot"
- start_date = "%Y-%m-%dT%H:%M:%S"
+ start_date = "%Y-%m-%dT%H:%M:%S"
}
- name = "Bradley Goodwin"
- secret_id = "...my_secret_id..."
- workspace_id = "f5c84383-6b86-4b3c-9f64-15b0449f9df1"
+ definition_id = "5fa64aee-8d2b-4de4-8eef-ceb9e0d54b08"
+ name = "Clifford Quigley"
+ secret_id = "...my_secret_id..."
+ workspace_id = "98fe3f92-c06a-49aa-b270-2875abb88c39"
}
```
@@ -39,11 +38,12 @@ resource "airbyte_source_trustpilot" "my_source_trustpilot" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -58,7 +58,6 @@ Required:
- `business_units` (List of String) The names of business units which shall be synchronized. Some streams e.g. configured_business_units or private_reviews use this configuration.
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["trustpilot"]
- `start_date` (String) For streams with sync. method incremental the start date time to be used
@@ -66,64 +65,26 @@ Required:
Optional:
-- `source_trustpilot_authorization_method_api_key` (Attributes) The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0. (see [below for nested schema](#nestedatt--configuration--credentials--source_trustpilot_authorization_method_api_key))
-- `source_trustpilot_authorization_method_o_auth_2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_trustpilot_authorization_method_o_auth_2_0))
-- `source_trustpilot_update_authorization_method_api_key` (Attributes) The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0. (see [below for nested schema](#nestedatt--configuration--credentials--source_trustpilot_update_authorization_method_api_key))
-- `source_trustpilot_update_authorization_method_o_auth_2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_trustpilot_update_authorization_method_o_auth_2_0))
+- `api_key` (Attributes) The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0. (see [below for nested schema](#nestedatt--configuration--credentials--api_key))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_trustpilot_authorization_method_api_key`
+
+### Nested Schema for `configuration.credentials.api_key`
Required:
- `client_id` (String) The API key of the Trustpilot API application.
-Optional:
-
-- `auth_type` (String) must be one of ["apikey"]
-
-
-### Nested Schema for `configuration.credentials.source_trustpilot_authorization_method_o_auth_2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Access Token for making authenticated requests.
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
- `client_id` (String) The API key of the Trustpilot API application. (represents the OAuth Client ID)
- `client_secret` (String) The Secret of the Trustpilot API application. (represents the OAuth Client Secret)
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-Optional:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-
-
-
-### Nested Schema for `configuration.credentials.source_trustpilot_update_authorization_method_api_key`
-
-Required:
-
-- `client_id` (String) The API key of the Trustpilot API application.
-
-Optional:
-
-- `auth_type` (String) must be one of ["apikey"]
-
-
-
-### Nested Schema for `configuration.credentials.source_trustpilot_update_authorization_method_o_auth_2_0`
-
-Required:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `client_id` (String) The API key of the Trustpilot API application. (represents the OAuth Client ID)
-- `client_secret` (String) The Secret of the Trustpilot API application. (represents the OAuth Client Secret)
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-Optional:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
+- `refresh_token` (String, Sensitive) The key to refresh the expired access_token.
+- `token_expiry_date` (String, Sensitive) The date-time when the access token should be refreshed.
diff --git a/docs/resources/source_tvmaze_schedule.md b/docs/resources/source_tvmaze_schedule.md
index 8c7e0937d..d2dd6bdb0 100644
--- a/docs/resources/source_tvmaze_schedule.md
+++ b/docs/resources/source_tvmaze_schedule.md
@@ -15,15 +15,15 @@ SourceTvmazeSchedule Resource
```terraform
resource "airbyte_source_tvmaze_schedule" "my_source_tvmazeschedule" {
configuration = {
- domestic_schedule_country_code = "US"
+ domestic_schedule_country_code = "GB"
end_date = "...my_end_date..."
- source_type = "tvmaze-schedule"
start_date = "...my_start_date..."
web_schedule_country_code = "global"
}
- name = "Gretchen Waters"
- secret_id = "...my_secret_id..."
- workspace_id = "e78bf606-8258-494e-a763-d5c72795b785"
+ definition_id = "79666080-f3ec-4ae3-8b49-1ea7992cd63d"
+ name = "Dr. Victoria Lemke"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e3f7d5a4-33d3-40ca-8aa9-f684d9ab345e"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_tvmaze_schedule" "my_source_tvmazeschedule" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,7 +52,6 @@ resource "airbyte_source_tvmaze_schedule" "my_source_tvmazeschedule" {
Required:
- `domestic_schedule_country_code` (String) Country code for domestic TV schedule retrieval.
-- `source_type` (String) must be one of ["tvmaze-schedule"]
- `start_date` (String) Start date for TV schedule retrieval. May be in the future.
Optional:
diff --git a/docs/resources/source_twilio.md b/docs/resources/source_twilio.md
index e4b40a3ad..a23b47981 100644
--- a/docs/resources/source_twilio.md
+++ b/docs/resources/source_twilio.md
@@ -18,12 +18,12 @@ resource "airbyte_source_twilio" "my_source_twilio" {
account_sid = "...my_account_sid..."
auth_token = "...my_auth_token..."
lookback_window = 60
- source_type = "twilio"
start_date = "2020-10-01T00:00:00Z"
}
- name = "Andre Sporer"
- secret_id = "...my_secret_id..."
- workspace_id = "9e5635b3-3bc0-4f97-8c42-fc9f4844225e"
+ definition_id = "83cb2e52-a86a-4dbb-97c5-cbe7ccff9d07"
+ name = "Leslie Kihn"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a4b37eb2-05dd-4b7f-9b71-195e07e10364"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_twilio" "my_source_twilio" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -51,12 +52,12 @@ resource "airbyte_source_twilio" "my_source_twilio" {
Required:
- `account_sid` (String) Twilio account SID
-- `auth_token` (String) Twilio Auth Token.
-- `source_type` (String) must be one of ["twilio"]
+- `auth_token` (String, Sensitive) Twilio Auth Token.
- `start_date` (String) UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated.
Optional:
-- `lookback_window` (Number) How far into the past to look for records. (in minutes)
+- `lookback_window` (Number) Default: 0
+How far into the past to look for records. (in minutes)
diff --git a/docs/resources/source_twilio_taskrouter.md b/docs/resources/source_twilio_taskrouter.md
index a0dddb445..25c7b1541 100644
--- a/docs/resources/source_twilio_taskrouter.md
+++ b/docs/resources/source_twilio_taskrouter.md
@@ -17,11 +17,11 @@ resource "airbyte_source_twilio_taskrouter" "my_source_twiliotaskrouter" {
configuration = {
account_sid = "...my_account_sid..."
auth_token = "...my_auth_token..."
- source_type = "twilio-taskrouter"
}
- name = "Cathy Ratke"
- secret_id = "...my_secret_id..."
- workspace_id = "6065c0ef-a6f9-43b9-8a1b-8c95be1254b7"
+ definition_id = "3a6dfd2a-6022-45b2-ac62-eb10f1a0d51f"
+ name = "Guy Rath II"
+ secret_id = "...my_secret_id..."
+ workspace_id = "16cb49da-06c2-439e-baf3-ca2cc2a5392d"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_twilio_taskrouter" "my_source_twiliotaskrouter" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,7 +50,6 @@ resource "airbyte_source_twilio_taskrouter" "my_source_twiliotaskrouter" {
Required:
- `account_sid` (String) Twilio Account ID
-- `auth_token` (String) Twilio Auth Token
-- `source_type` (String) must be one of ["twilio-taskrouter"]
+- `auth_token` (String, Sensitive) Twilio Auth Token
diff --git a/docs/resources/source_twitter.md b/docs/resources/source_twitter.md
index 00876aa99..130f7b97d 100644
--- a/docs/resources/source_twitter.md
+++ b/docs/resources/source_twitter.md
@@ -15,15 +15,15 @@ SourceTwitter Resource
```terraform
resource "airbyte_source_twitter" "my_source_twitter" {
configuration = {
- api_key = "...my_api_key..."
- end_date = "2022-05-29T22:05:47.839Z"
- query = "...my_query..."
- source_type = "twitter"
- start_date = "2022-02-11T15:55:53.597Z"
+ api_key = "...my_api_key..."
+ end_date = "2022-09-12T14:25:08.896Z"
+ query = "...my_query..."
+ start_date = "2022-06-24T22:46:50.628Z"
}
- name = "Elbert Kuhic"
- secret_id = "...my_secret_id..."
- workspace_id = "10d1f655-8c99-4c72-ad2b-c0f94087d9ca"
+ definition_id = "89040904-7267-4ce8-aa32-2e02b7e6dd49"
+ name = "Domingo Heller"
+ secret_id = "...my_secret_id..."
+ workspace_id = "592a5dd7-ddbd-4797-92eb-894fd682a677"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_twitter" "my_source_twitter" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,9 +51,8 @@ resource "airbyte_source_twitter" "my_source_twitter" {
Required:
-- `api_key` (String) App only Bearer Token. See the docs for more information on how to obtain this token.
+- `api_key` (String, Sensitive) App only Bearer Token. See the docs for more information on how to obtain this token.
- `query` (String) Query for matching Tweets. You can learn how to build this query by reading build a query guide .
-- `source_type` (String) must be one of ["twitter"]
Optional:
diff --git a/docs/resources/source_typeform.md b/docs/resources/source_typeform.md
index 0f4c59a15..603d1d4e0 100644
--- a/docs/resources/source_typeform.md
+++ b/docs/resources/source_typeform.md
@@ -16,24 +16,23 @@ SourceTypeform Resource
resource "airbyte_source_typeform" "my_source_typeform" {
configuration = {
credentials = {
- source_typeform_authorization_method_o_auth2_0 = {
+ source_typeform_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_type = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
- token_expiry_date = "2021-02-23T09:05:08.511Z"
+ token_expiry_date = "2022-10-02T21:15:25.365Z"
}
}
form_ids = [
"...",
]
- source_type = "typeform"
- start_date = "2021-03-01T00:00:00Z"
+ start_date = "2021-03-01T00:00:00Z"
}
- name = "Rosemarie Spencer"
- secret_id = "...my_secret_id..."
- workspace_id = "aac9b4ca-a1cf-4e9e-95df-903907f37831"
+ definition_id = "dbbaeb9b-5c2e-42ee-8b85-f41cf2efd5ed"
+ name = "Nancy Hansen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e4deda30-dd3c-4fb0-aa2f-ad0584130837"
}
```
@@ -43,11 +42,12 @@ resource "airbyte_source_typeform" "my_source_typeform" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -61,7 +61,6 @@ resource "airbyte_source_typeform" "my_source_typeform" {
Required:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["typeform"]
Optional:
@@ -73,64 +72,26 @@ Optional:
Optional:
-- `source_typeform_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_typeform_authorization_method_o_auth2_0))
-- `source_typeform_authorization_method_private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_typeform_authorization_method_private_token))
-- `source_typeform_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_typeform_update_authorization_method_o_auth2_0))
-- `source_typeform_update_authorization_method_private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_typeform_update_authorization_method_private_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
+- `private_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--private_token))
-
-### Nested Schema for `configuration.credentials.source_typeform_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Access Token for making authenticated requests.
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
- `client_id` (String) The Client ID of the Typeform developer application.
- `client_secret` (String) The Client Secret the Typeform developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
+- `refresh_token` (String, Sensitive) The key to refresh the expired access_token.
+- `token_expiry_date` (String, Sensitive) The date-time when the access token should be refreshed.
-Optional:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-
-
-
-### Nested Schema for `configuration.credentials.source_typeform_authorization_method_private_token`
-
-Required:
-
-- `access_token` (String) Log into your Typeform account and then generate a personal Access Token.
-
-Optional:
-
-- `auth_type` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_typeform_update_authorization_method_o_auth2_0`
-Required:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `client_id` (String) The Client ID of the Typeform developer application.
-- `client_secret` (String) The Client Secret the Typeform developer application.
-- `refresh_token` (String) The key to refresh the expired access_token.
-- `token_expiry_date` (String) The date-time when the access token should be refreshed.
-
-Optional:
-
-- `auth_type` (String) must be one of ["oauth2.0"]
-
-
-
-### Nested Schema for `configuration.credentials.source_typeform_update_authorization_method_private_token`
+
+### Nested Schema for `configuration.credentials.private_token`
Required:
-- `access_token` (String) Log into your Typeform account and then generate a personal Access Token.
-
-Optional:
-
-- `auth_type` (String) must be one of ["access_token"]
+- `access_token` (String, Sensitive) Log into your Typeform account and then generate a personal Access Token.
diff --git a/docs/resources/source_us_census.md b/docs/resources/source_us_census.md
index 3a3f1131b..4e2170b7f 100644
--- a/docs/resources/source_us_census.md
+++ b/docs/resources/source_us_census.md
@@ -17,12 +17,12 @@ resource "airbyte_source_us_census" "my_source_uscensus" {
configuration = {
api_key = "...my_api_key..."
query_params = "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*"
- query_path = "data/2018/acs"
- source_type = "us-census"
+ query_path = "data/2019/cbp"
}
- name = "Ginger Gislason"
- secret_id = "...my_secret_id..."
- workspace_id = "54a85466-597c-4502-b3c1-471d51aaa6dd"
+ definition_id = "e5de43c9-07f6-43cc-82bc-2f7f5dfb2c26"
+ name = "Kyle McKenzie"
+ secret_id = "...my_secret_id..."
+ workspace_id = "915d3324-b481-49ff-b934-29d3165dd859"
}
```
@@ -32,11 +32,12 @@ resource "airbyte_source_us_census" "my_source_uscensus" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -49,9 +50,8 @@ resource "airbyte_source_us_census" "my_source_uscensus" {
Required:
-- `api_key` (String) Your API Key. Get your key here.
+- `api_key` (String, Sensitive) Your API Key. Get your key here.
- `query_path` (String) The path portion of the GET request
-- `source_type` (String) must be one of ["us-census"]
Optional:
diff --git a/docs/resources/source_vantage.md b/docs/resources/source_vantage.md
index fcbfd4b8a..4e45996be 100644
--- a/docs/resources/source_vantage.md
+++ b/docs/resources/source_vantage.md
@@ -16,11 +16,11 @@ SourceVantage Resource
resource "airbyte_source_vantage" "my_source_vantage" {
configuration = {
access_token = "...my_access_token..."
- source_type = "vantage"
}
- name = "Corey Pacocha"
- secret_id = "...my_secret_id..."
- workspace_id = "6487c5fc-2b86-42a0-8bef-69e100157630"
+ definition_id = "5e9c61e2-0db5-4f4b-b11c-60c3a7ba3362"
+ name = "Tracey Rippin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5dfad932-4f6a-4b9f-8334-526eae71eb75"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_vantage" "my_source_vantage" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_vantage" "my_source_vantage" {
Required:
-- `access_token` (String) Your API Access token. See here.
-- `source_type` (String) must be one of ["vantage"]
+- `access_token` (String, Sensitive) Your API Access token. See here.
diff --git a/docs/resources/source_webflow.md b/docs/resources/source_webflow.md
index 929ea88bc..959747437 100644
--- a/docs/resources/source_webflow.md
+++ b/docs/resources/source_webflow.md
@@ -15,13 +15,13 @@ SourceWebflow Resource
```terraform
resource "airbyte_source_webflow" "my_source_webflow" {
configuration = {
- api_key = "a very long hex sequence"
- site_id = "a relatively long hex sequence"
- source_type = "webflow"
+ api_key = "a very long hex sequence"
+ site_id = "a relatively long hex sequence"
}
- name = "Taylor Paucek"
- secret_id = "...my_secret_id..."
- workspace_id = "fded84a3-5a41-4238-a1a7-35ac26ae33be"
+ definition_id = "9d7dd0bf-2f57-4219-978f-bbe9226a954f"
+ name = "Cary Mitchell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "12e392ce-90b9-4169-bb30-db2efb21ef2b"
}
```
@@ -31,11 +31,12 @@ resource "airbyte_source_webflow" "my_source_webflow" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -48,8 +49,7 @@ resource "airbyte_source_webflow" "my_source_webflow" {
Required:
-- `api_key` (String) The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api
+- `api_key` (String, Sensitive) The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api
- `site_id` (String) The id of the Webflow site you are requesting data from. See https://developers.webflow.com/#sites
-- `source_type` (String) must be one of ["webflow"]
diff --git a/docs/resources/source_whisky_hunter.md b/docs/resources/source_whisky_hunter.md
index f78b5ecad..cd905e371 100644
--- a/docs/resources/source_whisky_hunter.md
+++ b/docs/resources/source_whisky_hunter.md
@@ -14,12 +14,11 @@ SourceWhiskyHunter Resource
```terraform
resource "airbyte_source_whisky_hunter" "my_source_whiskyhunter" {
- configuration = {
- source_type = "whisky-hunter"
- }
- name = "Miss Terrence Kulas"
- secret_id = "...my_secret_id..."
- workspace_id = "f46bca11-06fe-4965-b711-d08cf88ec9f7"
+ configuration = {}
+ definition_id = "c48bf07f-2e77-4213-a664-6fa9b2db7532"
+ name = "Jeremy Kutch"
+ secret_id = "...my_secret_id..."
+ workspace_id = "785b8d4a-d9bb-44c2-904c-6ceb0e440965"
}
```
@@ -29,11 +28,12 @@ resource "airbyte_source_whisky_hunter" "my_source_whiskyhunter" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -44,8 +44,4 @@ resource "airbyte_source_whisky_hunter" "my_source_whiskyhunter" {
### Nested Schema for `configuration`
-Optional:
-
-- `source_type` (String) must be one of ["whisky-hunter"]
-
diff --git a/docs/resources/source_wikipedia_pageviews.md b/docs/resources/source_wikipedia_pageviews.md
index fb8f9d007..197dca7c5 100644
--- a/docs/resources/source_wikipedia_pageviews.md
+++ b/docs/resources/source_wikipedia_pageviews.md
@@ -15,18 +15,18 @@ SourceWikipediaPageviews Resource
```terraform
resource "airbyte_source_wikipedia_pageviews" "my_source_wikipediapageviews" {
configuration = {
- access = "mobile-app"
- agent = "spider"
- article = "Are_You_the_One%3F"
- country = "IN"
- end = "...my_end..."
- project = "www.mediawiki.org"
- source_type = "wikipedia-pageviews"
- start = "...my_start..."
+ access = "mobile-app"
+ agent = "automated"
+ article = "Are_You_the_One%3F"
+ country = "IN"
+ end = "...my_end..."
+ project = "www.mediawiki.org"
+ start = "...my_start..."
}
- name = "Laura Murray"
- secret_id = "...my_secret_id..."
- workspace_id = "6ed333bb-0ce8-4aa6-9432-a986eb7e14ca"
+ definition_id = "ecaf35c1-5b37-479d-be3d-ccb9fd6e1ad7"
+ name = "Stella Balistreri"
+ secret_id = "...my_secret_id..."
+ workspace_id = "320ef50a-8ca7-46b0-83ea-280df1804a67"
}
```
@@ -36,11 +36,12 @@ resource "airbyte_source_wikipedia_pageviews" "my_source_wikipediapageviews" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -59,7 +60,6 @@ Required:
- `country` (String) The ISO 3166-1 alpha-2 code of a country for which to retrieve top articles.
- `end` (String) The date of the last day to include, in YYYYMMDD or YYYYMMDDHH format.
- `project` (String) If you want to filter by project, use the domain of any Wikimedia project.
-- `source_type` (String) must be one of ["wikipedia-pageviews"]
- `start` (String) The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format.
diff --git a/docs/resources/source_woocommerce.md b/docs/resources/source_woocommerce.md
index 6affe5809..5063e9cd8 100644
--- a/docs/resources/source_woocommerce.md
+++ b/docs/resources/source_woocommerce.md
@@ -15,15 +15,15 @@ SourceWoocommerce Resource
```terraform
resource "airbyte_source_woocommerce" "my_source_woocommerce" {
configuration = {
- api_key = "...my_api_key..."
- api_secret = "...my_api_secret..."
- shop = "...my_shop..."
- source_type = "woocommerce"
- start_date = "2021-01-01"
+ api_key = "...my_api_key..."
+ api_secret = "...my_api_secret..."
+ shop = "...my_shop..."
+ start_date = "2021-01-01"
}
- name = "Laura Lindgren III"
- secret_id = "...my_secret_id..."
- workspace_id = "0097019a-48f8-48ec-a7bf-904e01105d38"
+ definition_id = "f3e58149-5129-457c-a986-96756fe05881"
+ name = "Julia Cole"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ad45dc07-8875-4452-bf36-dab5122890f3"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_woocommerce" "my_source_woocommerce" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,10 +51,9 @@ resource "airbyte_source_woocommerce" "my_source_woocommerce" {
Required:
-- `api_key` (String) Customer Key for API in WooCommerce shop
+- `api_key` (String, Sensitive) Customer Key for API in WooCommerce shop
- `api_secret` (String) Customer Secret for API in WooCommerce shop
- `shop` (String) The name of the store. For https://EXAMPLE.com, the shop name is 'EXAMPLE.com'.
-- `source_type` (String) must be one of ["woocommerce"]
- `start_date` (String) The date you would like to replicate data from. Format: YYYY-MM-DD
diff --git a/docs/resources/source_xero.md b/docs/resources/source_xero.md
deleted file mode 100644
index 4026095cf..000000000
--- a/docs/resources/source_xero.md
+++ /dev/null
@@ -1,74 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_xero Resource - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceXero Resource
----
-
-# airbyte_source_xero (Resource)
-
-SourceXero Resource
-
-## Example Usage
-
-```terraform
-resource "airbyte_source_xero" "my_source_xero" {
- configuration = {
- authentication = {
- access_token = "...my_access_token..."
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
- token_expiry_date = "...my_token_expiry_date..."
- }
- source_type = "xero"
- start_date = "2022-03-01T00:00:00Z"
- tenant_id = "...my_tenant_id..."
- }
- name = "Roger Hudson"
- secret_id = "...my_secret_id..."
- workspace_id = "6beb68a0-f657-4b7d-83a1-480f8de30f06"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `source_id` (String)
-- `source_type` (String)
-
-
-### Nested Schema for `configuration`
-
-Required:
-
-- `authentication` (Attributes) (see [below for nested schema](#nestedatt--configuration--authentication))
-- `source_type` (String) must be one of ["xero"]
-- `start_date` (String) UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any data with created_at before this data will not be synced.
-- `tenant_id` (String) Enter your Xero organization's Tenant ID
-
-
-### Nested Schema for `configuration.authentication`
-
-Required:
-
-- `access_token` (String) Enter your Xero application's access token
-- `client_id` (String) Enter your Xero application's Client ID
-- `client_secret` (String) Enter your Xero application's Client Secret
-- `refresh_token` (String) Enter your Xero application's refresh token
-- `token_expiry_date` (String) The date-time when the access token should be refreshed
-
-
diff --git a/docs/resources/source_xkcd.md b/docs/resources/source_xkcd.md
index 521fbe492..ae97297c5 100644
--- a/docs/resources/source_xkcd.md
+++ b/docs/resources/source_xkcd.md
@@ -14,12 +14,11 @@ SourceXkcd Resource
```terraform
resource "airbyte_source_xkcd" "my_source_xkcd" {
- configuration = {
- source_type = "xkcd"
- }
- name = "Mr. Laurence Littel"
- secret_id = "...my_secret_id..."
- workspace_id = "18d97e15-2297-4510-9a80-312292cc61c2"
+ configuration = {}
+ definition_id = "e992c2a3-f4c8-4fc0-a6c7-cc4eafdab4c1"
+ name = "Wilbert Ortiz"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6c12869f-984d-4613-8285-42bb37a458fa"
}
```
@@ -29,11 +28,12 @@ resource "airbyte_source_xkcd" "my_source_xkcd" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -44,8 +44,4 @@ resource "airbyte_source_xkcd" "my_source_xkcd" {
### Nested Schema for `configuration`
-Optional:
-
-- `source_type` (String) must be one of ["xkcd"]
-
diff --git a/docs/resources/source_yandex_metrica.md b/docs/resources/source_yandex_metrica.md
index c51ceb07f..8e8532719 100644
--- a/docs/resources/source_yandex_metrica.md
+++ b/docs/resources/source_yandex_metrica.md
@@ -15,15 +15,15 @@ SourceYandexMetrica Resource
```terraform
resource "airbyte_source_yandex_metrica" "my_source_yandexmetrica" {
configuration = {
- auth_token = "...my_auth_token..."
- counter_id = "...my_counter_id..."
- end_date = "2022-01-01"
- source_type = "yandex-metrica"
- start_date = "2022-01-01"
+ auth_token = "...my_auth_token..."
+ counter_id = "...my_counter_id..."
+ end_date = "2022-01-01"
+ start_date = "2022-01-01"
}
- name = "Dominic Marvin"
- secret_id = "...my_secret_id..."
- workspace_id = "e102da2d-e35f-48e0-9bf3-3eaab45402ac"
+ definition_id = "71a16fff-1f04-4aee-bc30-6c4f3397c204"
+ name = "June Williamson"
+ secret_id = "...my_secret_id..."
+ workspace_id = "deba481e-413d-4d76-8cc3-ae1d775ee978"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_yandex_metrica" "my_source_yandexmetrica" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,9 +51,8 @@ resource "airbyte_source_yandex_metrica" "my_source_yandexmetrica" {
Required:
-- `auth_token` (String) Your Yandex Metrica API access token
+- `auth_token` (String, Sensitive) Your Yandex Metrica API access token
- `counter_id` (String) Counter ID
-- `source_type` (String) must be one of ["yandex-metrica"]
- `start_date` (String) Starting point for your data replication, in format of "YYYY-MM-DD".
Optional:
diff --git a/docs/resources/source_yotpo.md b/docs/resources/source_yotpo.md
index 0b4f4c155..c78e1321e 100644
--- a/docs/resources/source_yotpo.md
+++ b/docs/resources/source_yotpo.md
@@ -17,13 +17,13 @@ resource "airbyte_source_yotpo" "my_source_yotpo" {
configuration = {
access_token = "...my_access_token..."
app_key = "...my_app_key..."
- email = "Ibrahim74@gmail.com"
- source_type = "yotpo"
+ email = "Bradley96@hotmail.com"
start_date = "2022-03-01T00:00:00.000Z"
}
- name = "Clark McGlynn"
- secret_id = "...my_secret_id..."
- workspace_id = "61aae5eb-5f0c-4492-b574-4d08a2267aae"
+ definition_id = "746ac11e-b024-4372-8c2f-a90b3fc58aed"
+ name = "Reginald Howell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "07de9609-725c-46d5-a5da-35039f4e4098"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_yotpo" "my_source_yotpo" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,10 +51,13 @@ resource "airbyte_source_yotpo" "my_source_yotpo" {
Required:
-- `access_token` (String) Access token recieved as a result of API call to https://api.yotpo.com/oauth/token (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)
-- `app_key` (String) App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)
-- `email` (String) Email address registered with yotpo.
-- `source_type` (String) must be one of ["yotpo"]
+- `access_token` (String, Sensitive) Access token recieved as a result of API call to https://api.yotpo.com/oauth/token (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)
+- `app_key` (String, Sensitive) App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)
- `start_date` (String) Date time filter for incremental filter, Specify which date to extract from.
+Optional:
+
+- `email` (String) Default: "example@gmail.com"
+Email address registered with yotpo.
+
diff --git a/docs/resources/source_younium.md b/docs/resources/source_younium.md
deleted file mode 100644
index 187ae007c..000000000
--- a/docs/resources/source_younium.md
+++ /dev/null
@@ -1,62 +0,0 @@
----
-# generated by https://github.com/hashicorp/terraform-plugin-docs
-page_title: "airbyte_source_younium Resource - terraform-provider-airbyte"
-subcategory: ""
-description: |-
- SourceYounium Resource
----
-
-# airbyte_source_younium (Resource)
-
-SourceYounium Resource
-
-## Example Usage
-
-```terraform
-resource "airbyte_source_younium" "my_source_younium" {
- configuration = {
- legal_entity = "...my_legal_entity..."
- password = "...my_password..."
- playground = true
- source_type = "younium"
- username = "Jairo.Monahan79"
- }
- name = "Martha Orn"
- secret_id = "...my_secret_id..."
- workspace_id = "1becb83d-2378-4ae3-bfc2-3d9450a986a4"
-}
-```
-
-
-## Schema
-
-### Required
-
-- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
-- `workspace_id` (String)
-
-### Optional
-
-- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
-
-### Read-Only
-
-- `source_id` (String)
-- `source_type` (String)
-
-
-### Nested Schema for `configuration`
-
-Required:
-
-- `legal_entity` (String) Legal Entity that data should be pulled from
-- `password` (String) Account password for younium account API key
-- `source_type` (String) must be one of ["younium"]
-- `username` (String) Username for Younium account
-
-Optional:
-
-- `playground` (Boolean) Property defining if connector is used against playground or production environment
-
-
diff --git a/docs/resources/source_youtube_analytics.md b/docs/resources/source_youtube_analytics.md
index c3833c756..a65431cc8 100644
--- a/docs/resources/source_youtube_analytics.md
+++ b/docs/resources/source_youtube_analytics.md
@@ -16,15 +16,16 @@ SourceYoutubeAnalytics Resource
resource "airbyte_source_youtube_analytics" "my_source_youtubeanalytics" {
configuration = {
credentials = {
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
+ additional_properties = "{ \"see\": \"documentation\" }"
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
}
- source_type = "youtube-analytics"
}
- name = "Tommy Rippin"
- secret_id = "...my_secret_id..."
- workspace_id = "707f06b2-8ecc-4864-9238-6f62c969c4cc"
+ definition_id = "bb8c2a23-b3c0-4134-a218-66cf518dbd5e"
+ name = "Mr. Clay Terry"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e07eadc6-f53d-4253-9b8b-1e39d437be8f"
}
```
@@ -34,11 +35,12 @@ resource "airbyte_source_youtube_analytics" "my_source_youtubeanalytics" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -52,7 +54,6 @@ resource "airbyte_source_youtube_analytics" "my_source_youtubeanalytics" {
Required:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `source_type` (String) must be one of ["youtube-analytics"]
### Nested Schema for `configuration.credentials`
@@ -61,7 +62,7 @@ Required:
- `client_id` (String) The Client ID of your developer application
- `client_secret` (String) The client secret of your developer application
-- `refresh_token` (String) A refresh token generated using the above client ID and secret
+- `refresh_token` (String, Sensitive) A refresh token generated using the above client ID and secret
Optional:
diff --git a/docs/resources/source_zendesk_chat.md b/docs/resources/source_zendesk_chat.md
index 45c2cc0b7..548db31e6 100644
--- a/docs/resources/source_zendesk_chat.md
+++ b/docs/resources/source_zendesk_chat.md
@@ -16,18 +16,17 @@ SourceZendeskChat Resource
resource "airbyte_source_zendesk_chat" "my_source_zendeskchat" {
configuration = {
credentials = {
- source_zendesk_chat_authorization_method_access_token = {
+ source_zendesk_chat_access_token = {
access_token = "...my_access_token..."
- credentials = "access_token"
}
}
- source_type = "zendesk-chat"
- start_date = "2021-02-01T00:00:00Z"
- subdomain = "...my_subdomain..."
+ start_date = "2021-02-01T00:00:00Z"
+ subdomain = "...my_subdomain..."
}
- name = "Mabel Lebsack MD"
- secret_id = "...my_secret_id..."
- workspace_id = "3fd3c81d-a10f-48c2-bdf9-31da3edb51fa"
+ definition_id = "f797fa8a-e012-4beb-a22c-99641ef630f5"
+ name = "Julian Kuhic"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c0e34b35-2ddb-404c-9bce-387d66444a18"
}
```
@@ -37,11 +36,12 @@ resource "airbyte_source_zendesk_chat" "my_source_zendeskchat" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -54,69 +54,38 @@ resource "airbyte_source_zendesk_chat" "my_source_zendeskchat" {
Required:
-- `source_type` (String) must be one of ["zendesk-chat"]
- `start_date` (String) The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z.
Optional:
- `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials))
-- `subdomain` (String) Required if you access Zendesk Chat from a Zendesk Support subdomain.
+- `subdomain` (String) Default: ""
+Required if you access Zendesk Chat from a Zendesk Support subdomain.
### Nested Schema for `configuration.credentials`
Optional:
-- `source_zendesk_chat_authorization_method_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_chat_authorization_method_access_token))
-- `source_zendesk_chat_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_chat_authorization_method_o_auth2_0))
-- `source_zendesk_chat_update_authorization_method_access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_chat_update_authorization_method_access_token))
-- `source_zendesk_chat_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_chat_update_authorization_method_o_auth2_0))
+- `access_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--access_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_zendesk_chat_authorization_method_access_token`
+
+### Nested Schema for `configuration.credentials.access_token`
Required:
-- `access_token` (String) The Access Token to make authenticated requests.
-- `credentials` (String) must be one of ["access_token"]
+- `access_token` (String, Sensitive) The Access Token to make authenticated requests.
-
-### Nested Schema for `configuration.credentials.source_zendesk_chat_authorization_method_o_auth2_0`
-
-Required:
-
-- `credentials` (String) must be one of ["oauth2.0"]
-
-Optional:
-
-- `access_token` (String) Access Token for making authenticated requests.
-- `client_id` (String) The Client ID of your OAuth application
-- `client_secret` (String) The Client Secret of your OAuth application.
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_chat_update_authorization_method_access_token`
-
-Required:
-
-- `access_token` (String) The Access Token to make authenticated requests.
-- `credentials` (String) must be one of ["access_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_chat_update_authorization_method_o_auth2_0`
-
-Required:
-
-- `credentials` (String) must be one of ["oauth2.0"]
+
+### Nested Schema for `configuration.credentials.o_auth20`
Optional:
-- `access_token` (String) Access Token for making authenticated requests.
+- `access_token` (String, Sensitive) Access Token for making authenticated requests.
- `client_id` (String) The Client ID of your OAuth application
- `client_secret` (String) The Client Secret of your OAuth application.
-- `refresh_token` (String) Refresh Token to obtain new Access Token, when it's expired.
+- `refresh_token` (String, Sensitive) Refresh Token to obtain new Access Token, when it's expired.
diff --git a/docs/resources/source_zendesk_sell.md b/docs/resources/source_zendesk_sell.md
new file mode 100644
index 000000000..92d86a955
--- /dev/null
+++ b/docs/resources/source_zendesk_sell.md
@@ -0,0 +1,53 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "airbyte_source_zendesk_sell Resource - terraform-provider-airbyte"
+subcategory: ""
+description: |-
+ SourceZendeskSell Resource
+---
+
+# airbyte_source_zendesk_sell (Resource)
+
+SourceZendeskSell Resource
+
+## Example Usage
+
+```terraform
+resource "airbyte_source_zendesk_sell" "my_source_zendesksell" {
+ configuration = {
+ api_token = "f23yhd630otl94y85a8bf384958473pto95847fd006da49382716or937ruw059"
+ }
+ definition_id = "6797a763-e10f-499e-8087-9e49484a7485"
+ name = "Jane Batz"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4aee427f-93df-49bf-84b7-84edaaf2f424"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
+- `name` (String) Name of the source e.g. dev-mysql-instance.
+- `workspace_id` (String)
+
+### Optional
+
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
+
+### Read-Only
+
+- `source_id` (String)
+- `source_type` (String)
+
+
+### Nested Schema for `configuration`
+
+Required:
+
+- `api_token` (String, Sensitive) The API token for authenticating to Zendesk Sell
+
+
diff --git a/docs/resources/source_zendesk_sunshine.md b/docs/resources/source_zendesk_sunshine.md
index 625122394..f3fade55e 100644
--- a/docs/resources/source_zendesk_sunshine.md
+++ b/docs/resources/source_zendesk_sunshine.md
@@ -16,19 +16,18 @@ SourceZendeskSunshine Resource
resource "airbyte_source_zendesk_sunshine" "my_source_zendesksunshine" {
configuration = {
credentials = {
- source_zendesk_sunshine_authorization_method_api_token = {
- api_token = "...my_api_token..."
- auth_method = "api_token"
- email = "Leonor_Funk@hotmail.com"
+ source_zendesk_sunshine_api_token = {
+ api_token = "...my_api_token..."
+ email = "Robbie51@hotmail.com"
}
}
- source_type = "zendesk-sunshine"
- start_date = "2021-01-01T00:00:00Z"
- subdomain = "...my_subdomain..."
+ start_date = "2021-01-01T00:00:00Z"
+ subdomain = "...my_subdomain..."
}
- name = "Mrs. Edith Hermiston"
- secret_id = "...my_secret_id..."
- workspace_id = "726d1532-1b83-42a5-ad69-180ff60eb9a6"
+ definition_id = "6f099262-2de7-4b1a-93e5-915fe5844c8d"
+ name = "Kristie Moen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7badf74d-23a8-47a4-aabf-6ae57802daa8"
}
```
@@ -38,11 +37,12 @@ resource "airbyte_source_zendesk_sunshine" "my_source_zendesksunshine" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -55,7 +55,6 @@ resource "airbyte_source_zendesk_sunshine" "my_source_zendesksunshine" {
Required:
-- `source_type` (String) must be one of ["zendesk-sunshine"]
- `start_date` (String) The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z.
- `subdomain` (String) The subdomain for your Zendesk Account.
@@ -68,49 +67,24 @@ Optional:
Optional:
-- `source_zendesk_sunshine_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_sunshine_authorization_method_api_token))
-- `source_zendesk_sunshine_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_sunshine_authorization_method_o_auth2_0))
-- `source_zendesk_sunshine_update_authorization_method_api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_sunshine_update_authorization_method_api_token))
-- `source_zendesk_sunshine_update_authorization_method_o_auth2_0` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_sunshine_update_authorization_method_o_auth2_0))
+- `api_token` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--api_token))
+- `o_auth20` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_zendesk_sunshine_authorization_method_api_token`
+
+### Nested Schema for `configuration.credentials.api_token`
Required:
-- `api_token` (String) API Token. See the docs for information on how to generate this key.
-- `auth_method` (String) must be one of ["api_token"]
+- `api_token` (String, Sensitive) API Token. See the docs for information on how to generate this key.
- `email` (String) The user email for your Zendesk account
-
-### Nested Schema for `configuration.credentials.source_zendesk_sunshine_authorization_method_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) Long-term access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
-- `client_id` (String) The Client ID of your OAuth application.
-- `client_secret` (String) The Client Secret of your OAuth application.
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_sunshine_update_authorization_method_api_token`
-
-Required:
-
-- `api_token` (String) API Token. See the docs for information on how to generate this key.
-- `auth_method` (String) must be one of ["api_token"]
-- `email` (String) The user email for your Zendesk account
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_sunshine_update_authorization_method_o_auth2_0`
-
-Required:
-
-- `access_token` (String) Long-term access Token for making authenticated requests.
-- `auth_method` (String) must be one of ["oauth2.0"]
+- `access_token` (String, Sensitive) Long-term access Token for making authenticated requests.
- `client_id` (String) The Client ID of your OAuth application.
- `client_secret` (String) The Client Secret of your OAuth application.
diff --git a/docs/resources/source_zendesk_support.md b/docs/resources/source_zendesk_support.md
index 88d61152d..d014d91f0 100644
--- a/docs/resources/source_zendesk_support.md
+++ b/docs/resources/source_zendesk_support.md
@@ -16,20 +16,20 @@ SourceZendeskSupport Resource
resource "airbyte_source_zendesk_support" "my_source_zendesksupport" {
configuration = {
credentials = {
- source_zendesk_support_authentication_api_token = {
- api_token = "...my_api_token..."
- credentials = "api_token"
- email = "Ezequiel.Lindgren56@yahoo.com"
+ source_zendesk_support_api_token = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ api_token = "...my_api_token..."
+ email = "Ansel_McLaughlin@gmail.com"
}
}
- ignore_pagination = true
- source_type = "zendesk-support"
+ ignore_pagination = false
start_date = "2020-10-15T00:00:00Z"
subdomain = "...my_subdomain..."
}
- name = "Alexander Friesen"
- secret_id = "...my_secret_id..."
- workspace_id = "82dbec75-c68c-4606-9946-8ce304d8849b"
+ definition_id = "7526c0e6-8d41-4f29-878b-d831a4caf6a0"
+ name = "Linda Weissnat"
+ secret_id = "...my_secret_id..."
+ workspace_id = "20a84c82-feed-435f-9471-260525978122"
}
```
@@ -39,11 +39,12 @@ resource "airbyte_source_zendesk_support" "my_source_zendesksupport" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -56,13 +57,13 @@ resource "airbyte_source_zendesk_support" "my_source_zendesksupport" {
Required:
-- `source_type` (String) must be one of ["zendesk-support"]
- `subdomain` (String) This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain.
Optional:
- `credentials` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials))
-- `ignore_pagination` (Boolean) Makes each stream read a single page of data.
+- `ignore_pagination` (Boolean) Default: false
+Makes each stream read a single page of data.
- `start_date` (String) The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
@@ -70,66 +71,33 @@ Optional:
Optional:
-- `source_zendesk_support_authentication_api_token` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_support_authentication_api_token))
-- `source_zendesk_support_authentication_o_auth2_0` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_support_authentication_o_auth2_0))
-- `source_zendesk_support_update_authentication_api_token` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_support_update_authentication_api_token))
-- `source_zendesk_support_update_authentication_o_auth2_0` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_support_update_authentication_o_auth2_0))
+- `api_token` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--api_token))
+- `o_auth20` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_zendesk_support_authentication_api_token`
+
+### Nested Schema for `configuration.credentials.api_token`
Required:
-- `api_token` (String) The value of the API token generated. See our full documentation for more information on generating this token.
+- `api_token` (String, Sensitive) The value of the API token generated. See our full documentation for more information on generating this token.
- `email` (String) The user email for your Zendesk account.
Optional:
- `additional_properties` (String) Parsed as JSON.
-- `credentials` (String) must be one of ["api_token"]
-
-### Nested Schema for `configuration.credentials.source_zendesk_support_authentication_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) The OAuth access token. See the Zendesk docs for more information on generating this token.
+- `access_token` (String, Sensitive) The OAuth access token. See the Zendesk docs for more information on generating this token.
Optional:
- `additional_properties` (String) Parsed as JSON.
- `client_id` (String) The OAuth client's ID. See this guide for more information.
- `client_secret` (String) The OAuth client secret. See this guide for more information.
-- `credentials` (String) must be one of ["oauth2.0"]
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_support_update_authentication_api_token`
-
-Required:
-
-- `api_token` (String) The value of the API token generated. See our full documentation for more information on generating this token.
-- `email` (String) The user email for your Zendesk account.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `credentials` (String) must be one of ["api_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_support_update_authentication_o_auth2_0`
-
-Required:
-
-- `access_token` (String) The OAuth access token. See the Zendesk docs for more information on generating this token.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `client_id` (String) The OAuth client's ID. See this guide for more information.
-- `client_secret` (String) The OAuth client secret. See this guide for more information.
-- `credentials` (String) must be one of ["oauth2.0"]
diff --git a/docs/resources/source_zendesk_talk.md b/docs/resources/source_zendesk_talk.md
index e75284df5..a4b0ae93b 100644
--- a/docs/resources/source_zendesk_talk.md
+++ b/docs/resources/source_zendesk_talk.md
@@ -16,19 +16,19 @@ SourceZendeskTalk Resource
resource "airbyte_source_zendesk_talk" "my_source_zendesktalk" {
configuration = {
credentials = {
- source_zendesk_talk_authentication_api_token = {
- api_token = "...my_api_token..."
- auth_type = "api_token"
- email = "Kacie27@hotmail.com"
+ source_zendesk_talk_api_token = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ api_token = "...my_api_token..."
+ email = "Brain88@gmail.com"
}
}
- source_type = "zendesk-talk"
- start_date = "2020-10-15T00:00:00Z"
- subdomain = "...my_subdomain..."
+ start_date = "2020-10-15T00:00:00Z"
+ subdomain = "...my_subdomain..."
}
- name = "Jackie Welch"
- secret_id = "...my_secret_id..."
- workspace_id = "bb0c69e3-72db-4134-8ba9-f78a5c0ed7aa"
+ definition_id = "9a97873e-c6ec-423f-8936-834bb7f256aa"
+ name = "Gwen Towne"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7a7ac93c-e210-41f6-92ef-f8de56504728"
}
```
@@ -38,11 +38,12 @@ resource "airbyte_source_zendesk_talk" "my_source_zendesktalk" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -55,7 +56,6 @@ resource "airbyte_source_zendesk_talk" "my_source_zendesktalk" {
Required:
-- `source_type` (String) must be one of ["zendesk-talk"]
- `start_date` (String) The date from which you'd like to replicate data for Zendesk Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
- `subdomain` (String) This is your Zendesk subdomain that can be found in your account URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN is the value of your subdomain.
@@ -68,65 +68,32 @@ Optional:
Optional:
-- `source_zendesk_talk_authentication_api_token` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_talk_authentication_api_token))
-- `source_zendesk_talk_authentication_o_auth2_0` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_talk_authentication_o_auth2_0))
-- `source_zendesk_talk_update_authentication_api_token` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_talk_update_authentication_api_token))
-- `source_zendesk_talk_update_authentication_o_auth2_0` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--source_zendesk_talk_update_authentication_o_auth2_0))
+- `api_token` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--api_token))
+- `o_auth20` (Attributes) Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`. (see [below for nested schema](#nestedatt--configuration--credentials--o_auth20))
-
-### Nested Schema for `configuration.credentials.source_zendesk_talk_authentication_api_token`
+
+### Nested Schema for `configuration.credentials.api_token`
Required:
-- `api_token` (String) The value of the API token generated. See the docs for more information.
+- `api_token` (String, Sensitive) The value of the API token generated. See the docs for more information.
- `email` (String) The user email for your Zendesk account.
Optional:
- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["api_token"]
-
-### Nested Schema for `configuration.credentials.source_zendesk_talk_authentication_o_auth2_0`
+
+### Nested Schema for `configuration.credentials.o_auth20`
Required:
-- `access_token` (String) The value of the API token generated. See the docs for more information.
+- `access_token` (String, Sensitive) The value of the API token generated. See the docs for more information.
Optional:
- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["oauth2.0"]
-- `client_id` (String) Client ID
-- `client_secret` (String) Client Secret
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_talk_update_authentication_api_token`
-
-Required:
-
-- `api_token` (String) The value of the API token generated. See the docs for more information.
-- `email` (String) The user email for your Zendesk account.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["api_token"]
-
-
-
-### Nested Schema for `configuration.credentials.source_zendesk_talk_update_authentication_o_auth2_0`
-
-Required:
-
-- `access_token` (String) The value of the API token generated. See the docs for more information.
-
-Optional:
-
-- `additional_properties` (String) Parsed as JSON.
-- `auth_type` (String) must be one of ["oauth2.0"]
- `client_id` (String) Client ID
- `client_secret` (String) Client Secret
diff --git a/docs/resources/source_zenloop.md b/docs/resources/source_zenloop.md
index 46ba116d7..1e3ba7fca 100644
--- a/docs/resources/source_zenloop.md
+++ b/docs/resources/source_zenloop.md
@@ -17,13 +17,13 @@ resource "airbyte_source_zenloop" "my_source_zenloop" {
configuration = {
api_token = "...my_api_token..."
date_from = "2021-10-24T03:30:30Z"
- source_type = "zenloop"
survey_group_id = "...my_survey_group_id..."
survey_id = "...my_survey_id..."
}
- name = "Ricardo Champlin"
- secret_id = "...my_secret_id..."
- workspace_id = "7261fb0c-58d2-47b5-9996-b5b4b50eef71"
+ definition_id = "30aace29-0d7b-43b3-98af-f5206e7c6651"
+ name = "Colleen Hodkiewicz"
+ secret_id = "...my_secret_id..."
+ workspace_id = "de9cd819-ecc3-47ba-9700-ba64daf2cd7c"
}
```
@@ -33,11 +33,12 @@ resource "airbyte_source_zenloop" "my_source_zenloop" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -50,8 +51,7 @@ resource "airbyte_source_zenloop" "my_source_zenloop" {
Required:
-- `api_token` (String) Zenloop API Token. You can get the API token in settings page here
-- `source_type` (String) must be one of ["zenloop"]
+- `api_token` (String, Sensitive) Zenloop API Token. You can get the API token in settings page here
Optional:
diff --git a/docs/resources/source_zoho_crm.md b/docs/resources/source_zoho_crm.md
index 3bc56397a..aac734e04 100644
--- a/docs/resources/source_zoho_crm.md
+++ b/docs/resources/source_zoho_crm.md
@@ -17,16 +17,16 @@ resource "airbyte_source_zoho_crm" "my_source_zohocrm" {
configuration = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- dc_region = "US"
- edition = "Enterprise"
- environment = "Developer"
+ dc_region = "IN"
+ edition = "Ultimate"
+ environment = "Sandbox"
refresh_token = "...my_refresh_token..."
- source_type = "zoho-crm"
- start_datetime = "2000-01-01T13:00+00:00"
+ start_datetime = "2000-01-01 13:00"
}
- name = "Kenneth Fisher"
- secret_id = "...my_secret_id..."
- workspace_id = "b1710688-deeb-4ef8-97f3-dd0ccd33f11b"
+ definition_id = "7a306443-a75b-4cf4-a2e1-378db01d76f7"
+ name = "Jody Collins"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a6e51f0c-20e4-4312-90cb-fe39df03e297"
}
```
@@ -36,11 +36,12 @@ resource "airbyte_source_zoho_crm" "my_source_zohocrm" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -57,15 +58,14 @@ Required:
- `client_secret` (String) OAuth2.0 Client Secret
- `dc_region` (String) must be one of ["US", "AU", "EU", "IN", "CN", "JP"]
Please choose the region of your Data Center location. More info by this Link
-- `edition` (String) must be one of ["Free", "Standard", "Professional", "Enterprise", "Ultimate"]
-Choose your Edition of Zoho CRM to determine API Concurrency Limits
- `environment` (String) must be one of ["Production", "Developer", "Sandbox"]
Please choose the environment
-- `refresh_token` (String) OAuth2.0 Refresh Token
-- `source_type` (String) must be one of ["zoho-crm"]
+- `refresh_token` (String, Sensitive) OAuth2.0 Refresh Token
Optional:
+- `edition` (String) must be one of ["Free", "Standard", "Professional", "Enterprise", "Ultimate"]; Default: "Free"
+Choose your Edition of Zoho CRM to determine API Concurrency Limits
- `start_datetime` (String) ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`
diff --git a/docs/resources/source_zoom.md b/docs/resources/source_zoom.md
index 188b05b34..a055f4994 100644
--- a/docs/resources/source_zoom.md
+++ b/docs/resources/source_zoom.md
@@ -15,12 +15,12 @@ SourceZoom Resource
```terraform
resource "airbyte_source_zoom" "my_source_zoom" {
configuration = {
- jwt_token = "...my_jwt_token..."
- source_type = "zoom"
+ jwt_token = "...my_jwt_token..."
}
- name = "Alexis Gutmann IV"
- secret_id = "...my_secret_id..."
- workspace_id = "0aa10418-6ec7-459e-82f3-702c5c8e2d30"
+ definition_id = "d6f5cf39-b34f-4958-9f42-198f32822b82"
+ name = "Gregory Hirthe"
+ secret_id = "...my_secret_id..."
+ workspace_id = "bc2b7c1d-3540-4fbb-a2d8-a9d0010028d1"
}
```
@@ -30,11 +30,12 @@ resource "airbyte_source_zoom" "my_source_zoom" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -47,7 +48,6 @@ resource "airbyte_source_zoom" "my_source_zoom" {
Required:
-- `jwt_token` (String) JWT Token
-- `source_type` (String) must be one of ["zoom"]
+- `jwt_token` (String, Sensitive) JWT Token
diff --git a/docs/resources/source_zuora.md b/docs/resources/source_zuora.md
index d1174e7d4..44682f1ec 100644
--- a/docs/resources/source_zuora.md
+++ b/docs/resources/source_zuora.md
@@ -17,15 +17,15 @@ resource "airbyte_source_zuora" "my_source_zuora" {
configuration = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- data_query = "Unlimited"
- source_type = "zuora"
+ data_query = "Live"
start_date = "...my_start_date..."
- tenant_endpoint = "US Performance Test"
- window_in_days = "200"
+ tenant_endpoint = "EU Production"
+ window_in_days = "0.5"
}
- name = "Joan Bednar"
- secret_id = "...my_secret_id..."
- workspace_id = "a44707bf-375b-4442-8282-1fdb2f69e592"
+ definition_id = "280d807c-dd8e-4b8c-b5c4-610938eb2433"
+ name = "Anne Funk"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c5c5aa0b-5368-4b26-a568-aa6dc340bb15"
}
```
@@ -35,11 +35,12 @@ resource "airbyte_source_zuora" "my_source_zuora" {
### Required
- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration))
-- `name` (String)
+- `name` (String) Name of the source e.g. dev-mysql-instance.
- `workspace_id` (String)
### Optional
+- `definition_id` (String) The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow.
### Read-Only
@@ -54,15 +55,15 @@ Required:
- `client_id` (String) Your OAuth user Client ID
- `client_secret` (String) Your OAuth user Client Secret
-- `data_query` (String) must be one of ["Live", "Unlimited"]
-Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link
-- `source_type` (String) must be one of ["zuora"]
- `start_date` (String) Start Date in format: YYYY-MM-DD
- `tenant_endpoint` (String) must be one of ["US Production", "US Cloud Production", "US API Sandbox", "US Cloud API Sandbox", "US Central Sandbox", "US Performance Test", "EU Production", "EU API Sandbox", "EU Central Sandbox"]
Please choose the right endpoint where your Tenant is located. More info by this Link
Optional:
-- `window_in_days` (String) The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year).
+- `data_query` (String) must be one of ["Live", "Unlimited"]; Default: "Live"
+Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link
+- `window_in_days` (String) Default: "90"
+The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year).
diff --git a/docs/resources/workspace.md b/docs/resources/workspace.md
index a3c6ca178..d007f3ced 100644
--- a/docs/resources/workspace.md
+++ b/docs/resources/workspace.md
@@ -14,7 +14,7 @@ Workspace Resource
```terraform
resource "airbyte_workspace" "my_workspace" {
- name = "Glenda Schiller DDS"
+ name = "Jessie Moen"
}
```
@@ -27,7 +27,7 @@ resource "airbyte_workspace" "my_workspace" {
### Read-Only
-- `data_residency` (String) must be one of ["auto", "us", "eu"]
+- `data_residency` (String) must be one of ["auto", "us", "eu"]; Default: "auto"
- `workspace_id` (String)
diff --git a/examples/README.md b/examples/README.md
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_connection/data-source.tf b/examples/data-sources/airbyte_connection/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_aws_datalake/data-source.tf b/examples/data-sources/airbyte_destination_aws_datalake/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_azure_blob_storage/data-source.tf b/examples/data-sources/airbyte_destination_azure_blob_storage/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_bigquery/data-source.tf b/examples/data-sources/airbyte_destination_bigquery/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_bigquery_denormalized/data-source.tf b/examples/data-sources/airbyte_destination_bigquery_denormalized/data-source.tf
deleted file mode 100755
index bd43a6200..000000000
--- a/examples/data-sources/airbyte_destination_bigquery_denormalized/data-source.tf
+++ /dev/null
@@ -1,3 +0,0 @@
-data "airbyte_destination_bigquery_denormalized" "my_destination_bigquerydenormalized" {
- destination_id = "...my_destination_id..."
-}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_destination_clickhouse/data-source.tf b/examples/data-sources/airbyte_destination_clickhouse/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_convex/data-source.tf b/examples/data-sources/airbyte_destination_convex/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_cumulio/data-source.tf b/examples/data-sources/airbyte_destination_cumulio/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_databend/data-source.tf b/examples/data-sources/airbyte_destination_databend/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_databricks/data-source.tf b/examples/data-sources/airbyte_destination_databricks/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_dev_null/data-source.tf b/examples/data-sources/airbyte_destination_dev_null/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_duckdb/data-source.tf b/examples/data-sources/airbyte_destination_duckdb/data-source.tf
new file mode 100644
index 000000000..1fc479888
--- /dev/null
+++ b/examples/data-sources/airbyte_destination_duckdb/data-source.tf
@@ -0,0 +1,3 @@
+data "airbyte_destination_duckdb" "my_destination_duckdb" {
+ destination_id = "...my_destination_id..."
+}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_destination_dynamodb/data-source.tf b/examples/data-sources/airbyte_destination_dynamodb/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_elasticsearch/data-source.tf b/examples/data-sources/airbyte_destination_elasticsearch/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_firebolt/data-source.tf b/examples/data-sources/airbyte_destination_firebolt/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_firestore/data-source.tf b/examples/data-sources/airbyte_destination_firestore/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_gcs/data-source.tf b/examples/data-sources/airbyte_destination_gcs/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_google_sheets/data-source.tf b/examples/data-sources/airbyte_destination_google_sheets/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_keen/data-source.tf b/examples/data-sources/airbyte_destination_keen/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_kinesis/data-source.tf b/examples/data-sources/airbyte_destination_kinesis/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_langchain/data-source.tf b/examples/data-sources/airbyte_destination_langchain/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_milvus/data-source.tf b/examples/data-sources/airbyte_destination_milvus/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_mongodb/data-source.tf b/examples/data-sources/airbyte_destination_mongodb/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_mssql/data-source.tf b/examples/data-sources/airbyte_destination_mssql/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_mysql/data-source.tf b/examples/data-sources/airbyte_destination_mysql/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_oracle/data-source.tf b/examples/data-sources/airbyte_destination_oracle/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_pinecone/data-source.tf b/examples/data-sources/airbyte_destination_pinecone/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_postgres/data-source.tf b/examples/data-sources/airbyte_destination_postgres/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_pubsub/data-source.tf b/examples/data-sources/airbyte_destination_pubsub/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_qdrant/data-source.tf b/examples/data-sources/airbyte_destination_qdrant/data-source.tf
new file mode 100644
index 000000000..551fc6dcc
--- /dev/null
+++ b/examples/data-sources/airbyte_destination_qdrant/data-source.tf
@@ -0,0 +1,3 @@
+data "airbyte_destination_qdrant" "my_destination_qdrant" {
+ destination_id = "...my_destination_id..."
+}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_destination_redis/data-source.tf b/examples/data-sources/airbyte_destination_redis/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_redshift/data-source.tf b/examples/data-sources/airbyte_destination_redshift/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_s3/data-source.tf b/examples/data-sources/airbyte_destination_s3/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_s3_glue/data-source.tf b/examples/data-sources/airbyte_destination_s3_glue/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_sftp_json/data-source.tf b/examples/data-sources/airbyte_destination_sftp_json/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_snowflake/data-source.tf b/examples/data-sources/airbyte_destination_snowflake/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_timeplus/data-source.tf b/examples/data-sources/airbyte_destination_timeplus/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_typesense/data-source.tf b/examples/data-sources/airbyte_destination_typesense/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_vertica/data-source.tf b/examples/data-sources/airbyte_destination_vertica/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_destination_weaviate/data-source.tf b/examples/data-sources/airbyte_destination_weaviate/data-source.tf
new file mode 100644
index 000000000..eba5b7661
--- /dev/null
+++ b/examples/data-sources/airbyte_destination_weaviate/data-source.tf
@@ -0,0 +1,3 @@
+data "airbyte_destination_weaviate" "my_destination_weaviate" {
+ destination_id = "...my_destination_id..."
+}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_destination_xata/data-source.tf b/examples/data-sources/airbyte_destination_xata/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/data-sources/airbyte_source_aha/data-source.tf b/examples/data-sources/airbyte_source_aha/data-source.tf
old mode 100755
new mode 100644
index a884d2349..7d0ab67bb
--- a/examples/data-sources/airbyte_source_aha/data-source.tf
+++ b/examples/data-sources/airbyte_source_aha/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_aha" "my_source_aha" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_aircall/data-source.tf b/examples/data-sources/airbyte_source_aircall/data-source.tf
old mode 100755
new mode 100644
index 4977e7234..85a5ac8ae
--- a/examples/data-sources/airbyte_source_aircall/data-source.tf
+++ b/examples/data-sources/airbyte_source_aircall/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_aircall" "my_source_aircall" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_airtable/data-source.tf b/examples/data-sources/airbyte_source_airtable/data-source.tf
old mode 100755
new mode 100644
index 40dd65ae5..854917876
--- a/examples/data-sources/airbyte_source_airtable/data-source.tf
+++ b/examples/data-sources/airbyte_source_airtable/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_airtable" "my_source_airtable" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_alloydb/data-source.tf b/examples/data-sources/airbyte_source_alloydb/data-source.tf
old mode 100755
new mode 100644
index ecce2ea29..488f03d1a
--- a/examples/data-sources/airbyte_source_alloydb/data-source.tf
+++ b/examples/data-sources/airbyte_source_alloydb/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_alloydb" "my_source_alloydb" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_amazon_ads/data-source.tf b/examples/data-sources/airbyte_source_amazon_ads/data-source.tf
old mode 100755
new mode 100644
index 9e6fc7aeb..1cc4d22c5
--- a/examples/data-sources/airbyte_source_amazon_ads/data-source.tf
+++ b/examples/data-sources/airbyte_source_amazon_ads/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_amazon_ads" "my_source_amazonads" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_amazon_seller_partner/data-source.tf b/examples/data-sources/airbyte_source_amazon_seller_partner/data-source.tf
old mode 100755
new mode 100644
index 5c1ecf3e7..5944c8017
--- a/examples/data-sources/airbyte_source_amazon_seller_partner/data-source.tf
+++ b/examples/data-sources/airbyte_source_amazon_seller_partner/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_amazon_seller_partner" "my_source_amazonsellerpartner" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_amazon_sqs/data-source.tf b/examples/data-sources/airbyte_source_amazon_sqs/data-source.tf
old mode 100755
new mode 100644
index ec38dbf7a..941a05cd3
--- a/examples/data-sources/airbyte_source_amazon_sqs/data-source.tf
+++ b/examples/data-sources/airbyte_source_amazon_sqs/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_amazon_sqs" "my_source_amazonsqs" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_amplitude/data-source.tf b/examples/data-sources/airbyte_source_amplitude/data-source.tf
old mode 100755
new mode 100644
index 1128e1697..3c3689b26
--- a/examples/data-sources/airbyte_source_amplitude/data-source.tf
+++ b/examples/data-sources/airbyte_source_amplitude/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_amplitude" "my_source_amplitude" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_apify_dataset/data-source.tf b/examples/data-sources/airbyte_source_apify_dataset/data-source.tf
old mode 100755
new mode 100644
index 92baf1e27..f4c77fbc9
--- a/examples/data-sources/airbyte_source_apify_dataset/data-source.tf
+++ b/examples/data-sources/airbyte_source_apify_dataset/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_apify_dataset" "my_source_apifydataset" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_appfollow/data-source.tf b/examples/data-sources/airbyte_source_appfollow/data-source.tf
old mode 100755
new mode 100644
index a056b8d8e..5eb57bd6a
--- a/examples/data-sources/airbyte_source_appfollow/data-source.tf
+++ b/examples/data-sources/airbyte_source_appfollow/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_appfollow" "my_source_appfollow" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_asana/data-source.tf b/examples/data-sources/airbyte_source_asana/data-source.tf
old mode 100755
new mode 100644
index fe9dc1d39..c288e8eb5
--- a/examples/data-sources/airbyte_source_asana/data-source.tf
+++ b/examples/data-sources/airbyte_source_asana/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_asana" "my_source_asana" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_auth0/data-source.tf b/examples/data-sources/airbyte_source_auth0/data-source.tf
old mode 100755
new mode 100644
index 970bb2098..3dadb93c6
--- a/examples/data-sources/airbyte_source_auth0/data-source.tf
+++ b/examples/data-sources/airbyte_source_auth0/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_auth0" "my_source_auth0" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_aws_cloudtrail/data-source.tf b/examples/data-sources/airbyte_source_aws_cloudtrail/data-source.tf
old mode 100755
new mode 100644
index f1d192354..bf1ca9986
--- a/examples/data-sources/airbyte_source_aws_cloudtrail/data-source.tf
+++ b/examples/data-sources/airbyte_source_aws_cloudtrail/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_aws_cloudtrail" "my_source_awscloudtrail" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_azure_blob_storage/data-source.tf b/examples/data-sources/airbyte_source_azure_blob_storage/data-source.tf
old mode 100755
new mode 100644
index 6ae76eb67..eaf439bd4
--- a/examples/data-sources/airbyte_source_azure_blob_storage/data-source.tf
+++ b/examples/data-sources/airbyte_source_azure_blob_storage/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_azure_blob_storage" "my_source_azureblobstorage" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_azure_table/data-source.tf b/examples/data-sources/airbyte_source_azure_table/data-source.tf
old mode 100755
new mode 100644
index b114f875e..9f75fa937
--- a/examples/data-sources/airbyte_source_azure_table/data-source.tf
+++ b/examples/data-sources/airbyte_source_azure_table/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_azure_table" "my_source_azuretable" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_bamboo_hr/data-source.tf b/examples/data-sources/airbyte_source_bamboo_hr/data-source.tf
old mode 100755
new mode 100644
index 08bf50764..5ba2ead38
--- a/examples/data-sources/airbyte_source_bamboo_hr/data-source.tf
+++ b/examples/data-sources/airbyte_source_bamboo_hr/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_bamboo_hr" "my_source_bamboohr" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_bigcommerce/data-source.tf b/examples/data-sources/airbyte_source_bigcommerce/data-source.tf
deleted file mode 100755
index 150bc56dd..000000000
--- a/examples/data-sources/airbyte_source_bigcommerce/data-source.tf
+++ /dev/null
@@ -1,4 +0,0 @@
-data "airbyte_source_bigcommerce" "my_source_bigcommerce" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_bigquery/data-source.tf b/examples/data-sources/airbyte_source_bigquery/data-source.tf
old mode 100755
new mode 100644
index 9fe6e8c02..9c38fa525
--- a/examples/data-sources/airbyte_source_bigquery/data-source.tf
+++ b/examples/data-sources/airbyte_source_bigquery/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_bigquery" "my_source_bigquery" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_bing_ads/data-source.tf b/examples/data-sources/airbyte_source_bing_ads/data-source.tf
old mode 100755
new mode 100644
index e3d2f56aa..e2f1d5723
--- a/examples/data-sources/airbyte_source_bing_ads/data-source.tf
+++ b/examples/data-sources/airbyte_source_bing_ads/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_bing_ads" "my_source_bingads" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_braintree/data-source.tf b/examples/data-sources/airbyte_source_braintree/data-source.tf
old mode 100755
new mode 100644
index 2478bbce9..9252e92c5
--- a/examples/data-sources/airbyte_source_braintree/data-source.tf
+++ b/examples/data-sources/airbyte_source_braintree/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_braintree" "my_source_braintree" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_braze/data-source.tf b/examples/data-sources/airbyte_source_braze/data-source.tf
old mode 100755
new mode 100644
index 039a1a976..c0e15bd32
--- a/examples/data-sources/airbyte_source_braze/data-source.tf
+++ b/examples/data-sources/airbyte_source_braze/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_braze" "my_source_braze" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_cart/data-source.tf b/examples/data-sources/airbyte_source_cart/data-source.tf
new file mode 100644
index 000000000..e6edf3f77
--- /dev/null
+++ b/examples/data-sources/airbyte_source_cart/data-source.tf
@@ -0,0 +1,3 @@
+data "airbyte_source_cart" "my_source_cart" {
+ source_id = "...my_source_id..."
+}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_chargebee/data-source.tf b/examples/data-sources/airbyte_source_chargebee/data-source.tf
old mode 100755
new mode 100644
index 9101eaa9a..9589131f4
--- a/examples/data-sources/airbyte_source_chargebee/data-source.tf
+++ b/examples/data-sources/airbyte_source_chargebee/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_chargebee" "my_source_chargebee" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_chartmogul/data-source.tf b/examples/data-sources/airbyte_source_chartmogul/data-source.tf
old mode 100755
new mode 100644
index 1ea24b3ac..4423089d9
--- a/examples/data-sources/airbyte_source_chartmogul/data-source.tf
+++ b/examples/data-sources/airbyte_source_chartmogul/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_chartmogul" "my_source_chartmogul" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_clickhouse/data-source.tf b/examples/data-sources/airbyte_source_clickhouse/data-source.tf
old mode 100755
new mode 100644
index f6ffcda9b..832df2613
--- a/examples/data-sources/airbyte_source_clickhouse/data-source.tf
+++ b/examples/data-sources/airbyte_source_clickhouse/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_clickhouse" "my_source_clickhouse" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_clickup_api/data-source.tf b/examples/data-sources/airbyte_source_clickup_api/data-source.tf
old mode 100755
new mode 100644
index 44fc9d684..5efc8ceb6
--- a/examples/data-sources/airbyte_source_clickup_api/data-source.tf
+++ b/examples/data-sources/airbyte_source_clickup_api/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_clickup_api" "my_source_clickupapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_clockify/data-source.tf b/examples/data-sources/airbyte_source_clockify/data-source.tf
old mode 100755
new mode 100644
index 29273ea04..9fdb38412
--- a/examples/data-sources/airbyte_source_clockify/data-source.tf
+++ b/examples/data-sources/airbyte_source_clockify/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_clockify" "my_source_clockify" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_close_com/data-source.tf b/examples/data-sources/airbyte_source_close_com/data-source.tf
old mode 100755
new mode 100644
index 45319e5be..e712296c8
--- a/examples/data-sources/airbyte_source_close_com/data-source.tf
+++ b/examples/data-sources/airbyte_source_close_com/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_close_com" "my_source_closecom" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_coda/data-source.tf b/examples/data-sources/airbyte_source_coda/data-source.tf
old mode 100755
new mode 100644
index f0a0525c6..4a155effa
--- a/examples/data-sources/airbyte_source_coda/data-source.tf
+++ b/examples/data-sources/airbyte_source_coda/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_coda" "my_source_coda" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_coin_api/data-source.tf b/examples/data-sources/airbyte_source_coin_api/data-source.tf
old mode 100755
new mode 100644
index bda28b3a4..e3da54f9c
--- a/examples/data-sources/airbyte_source_coin_api/data-source.tf
+++ b/examples/data-sources/airbyte_source_coin_api/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_coin_api" "my_source_coinapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_coinmarketcap/data-source.tf b/examples/data-sources/airbyte_source_coinmarketcap/data-source.tf
old mode 100755
new mode 100644
index 205d6e996..82fb95377
--- a/examples/data-sources/airbyte_source_coinmarketcap/data-source.tf
+++ b/examples/data-sources/airbyte_source_coinmarketcap/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_coinmarketcap" "my_source_coinmarketcap" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_configcat/data-source.tf b/examples/data-sources/airbyte_source_configcat/data-source.tf
old mode 100755
new mode 100644
index e663eab97..d29c05435
--- a/examples/data-sources/airbyte_source_configcat/data-source.tf
+++ b/examples/data-sources/airbyte_source_configcat/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_configcat" "my_source_configcat" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_confluence/data-source.tf b/examples/data-sources/airbyte_source_confluence/data-source.tf
old mode 100755
new mode 100644
index 5994d9b2c..f0e4cef15
--- a/examples/data-sources/airbyte_source_confluence/data-source.tf
+++ b/examples/data-sources/airbyte_source_confluence/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_confluence" "my_source_confluence" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_convex/data-source.tf b/examples/data-sources/airbyte_source_convex/data-source.tf
old mode 100755
new mode 100644
index 8cb610d40..37157d7fa
--- a/examples/data-sources/airbyte_source_convex/data-source.tf
+++ b/examples/data-sources/airbyte_source_convex/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_convex" "my_source_convex" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_datascope/data-source.tf b/examples/data-sources/airbyte_source_datascope/data-source.tf
old mode 100755
new mode 100644
index d660b35cc..8ac912ad7
--- a/examples/data-sources/airbyte_source_datascope/data-source.tf
+++ b/examples/data-sources/airbyte_source_datascope/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_datascope" "my_source_datascope" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_delighted/data-source.tf b/examples/data-sources/airbyte_source_delighted/data-source.tf
old mode 100755
new mode 100644
index f05c04af6..e4bc114d6
--- a/examples/data-sources/airbyte_source_delighted/data-source.tf
+++ b/examples/data-sources/airbyte_source_delighted/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_delighted" "my_source_delighted" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_dixa/data-source.tf b/examples/data-sources/airbyte_source_dixa/data-source.tf
old mode 100755
new mode 100644
index cb2ba99d5..7e35d127f
--- a/examples/data-sources/airbyte_source_dixa/data-source.tf
+++ b/examples/data-sources/airbyte_source_dixa/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_dixa" "my_source_dixa" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_dockerhub/data-source.tf b/examples/data-sources/airbyte_source_dockerhub/data-source.tf
old mode 100755
new mode 100644
index 7db20af65..2bd6868f9
--- a/examples/data-sources/airbyte_source_dockerhub/data-source.tf
+++ b/examples/data-sources/airbyte_source_dockerhub/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_dockerhub" "my_source_dockerhub" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_dremio/data-source.tf b/examples/data-sources/airbyte_source_dremio/data-source.tf
old mode 100755
new mode 100644
index fda1b0762..fd95db3dc
--- a/examples/data-sources/airbyte_source_dremio/data-source.tf
+++ b/examples/data-sources/airbyte_source_dremio/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_dremio" "my_source_dremio" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_dynamodb/data-source.tf b/examples/data-sources/airbyte_source_dynamodb/data-source.tf
old mode 100755
new mode 100644
index 59a495c19..7bcfcb28c
--- a/examples/data-sources/airbyte_source_dynamodb/data-source.tf
+++ b/examples/data-sources/airbyte_source_dynamodb/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_dynamodb" "my_source_dynamodb" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_e2e_test_cloud/data-source.tf b/examples/data-sources/airbyte_source_e2e_test_cloud/data-source.tf
deleted file mode 100755
index 260290c7a..000000000
--- a/examples/data-sources/airbyte_source_e2e_test_cloud/data-source.tf
+++ /dev/null
@@ -1,4 +0,0 @@
-data "airbyte_source_e2e_test_cloud" "my_source_e2etestcloud" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_emailoctopus/data-source.tf b/examples/data-sources/airbyte_source_emailoctopus/data-source.tf
old mode 100755
new mode 100644
index 1a7363cd3..9f6835e69
--- a/examples/data-sources/airbyte_source_emailoctopus/data-source.tf
+++ b/examples/data-sources/airbyte_source_emailoctopus/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_emailoctopus" "my_source_emailoctopus" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_exchange_rates/data-source.tf b/examples/data-sources/airbyte_source_exchange_rates/data-source.tf
old mode 100755
new mode 100644
index a1ea11d0c..1212dfd20
--- a/examples/data-sources/airbyte_source_exchange_rates/data-source.tf
+++ b/examples/data-sources/airbyte_source_exchange_rates/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_exchange_rates" "my_source_exchangerates" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_facebook_marketing/data-source.tf b/examples/data-sources/airbyte_source_facebook_marketing/data-source.tf
old mode 100755
new mode 100644
index ad390c300..f3aa61f22
--- a/examples/data-sources/airbyte_source_facebook_marketing/data-source.tf
+++ b/examples/data-sources/airbyte_source_facebook_marketing/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_facebook_marketing" "my_source_facebookmarketing" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_facebook_pages/data-source.tf b/examples/data-sources/airbyte_source_facebook_pages/data-source.tf
old mode 100755
new mode 100644
index f989a2914..f7ea904c3
--- a/examples/data-sources/airbyte_source_facebook_pages/data-source.tf
+++ b/examples/data-sources/airbyte_source_facebook_pages/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_facebook_pages" "my_source_facebookpages" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_faker/data-source.tf b/examples/data-sources/airbyte_source_faker/data-source.tf
old mode 100755
new mode 100644
index 7bea337ae..d1b7d4559
--- a/examples/data-sources/airbyte_source_faker/data-source.tf
+++ b/examples/data-sources/airbyte_source_faker/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_faker" "my_source_faker" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_fauna/data-source.tf b/examples/data-sources/airbyte_source_fauna/data-source.tf
old mode 100755
new mode 100644
index d8e56fe4f..b9d6f8aae
--- a/examples/data-sources/airbyte_source_fauna/data-source.tf
+++ b/examples/data-sources/airbyte_source_fauna/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_fauna" "my_source_fauna" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_file/data-source.tf b/examples/data-sources/airbyte_source_file/data-source.tf
new file mode 100644
index 000000000..8695b1a2e
--- /dev/null
+++ b/examples/data-sources/airbyte_source_file/data-source.tf
@@ -0,0 +1,3 @@
+data "airbyte_source_file" "my_source_file" {
+ source_id = "...my_source_id..."
+}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_file_secure/data-source.tf b/examples/data-sources/airbyte_source_file_secure/data-source.tf
deleted file mode 100755
index 162996223..000000000
--- a/examples/data-sources/airbyte_source_file_secure/data-source.tf
+++ /dev/null
@@ -1,4 +0,0 @@
-data "airbyte_source_file_secure" "my_source_filesecure" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_firebolt/data-source.tf b/examples/data-sources/airbyte_source_firebolt/data-source.tf
old mode 100755
new mode 100644
index 0e9b325f2..bb7ce574e
--- a/examples/data-sources/airbyte_source_firebolt/data-source.tf
+++ b/examples/data-sources/airbyte_source_firebolt/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_firebolt" "my_source_firebolt" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_freshcaller/data-source.tf b/examples/data-sources/airbyte_source_freshcaller/data-source.tf
old mode 100755
new mode 100644
index a6dcf7052..68f72a3d9
--- a/examples/data-sources/airbyte_source_freshcaller/data-source.tf
+++ b/examples/data-sources/airbyte_source_freshcaller/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_freshcaller" "my_source_freshcaller" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_freshdesk/data-source.tf b/examples/data-sources/airbyte_source_freshdesk/data-source.tf
old mode 100755
new mode 100644
index b633283a6..e230f63dd
--- a/examples/data-sources/airbyte_source_freshdesk/data-source.tf
+++ b/examples/data-sources/airbyte_source_freshdesk/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_freshdesk" "my_source_freshdesk" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_freshsales/data-source.tf b/examples/data-sources/airbyte_source_freshsales/data-source.tf
old mode 100755
new mode 100644
index c6e9db379..5066de829
--- a/examples/data-sources/airbyte_source_freshsales/data-source.tf
+++ b/examples/data-sources/airbyte_source_freshsales/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_freshsales" "my_source_freshsales" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_gainsight_px/data-source.tf b/examples/data-sources/airbyte_source_gainsight_px/data-source.tf
old mode 100755
new mode 100644
index 86a3836e4..2271d444d
--- a/examples/data-sources/airbyte_source_gainsight_px/data-source.tf
+++ b/examples/data-sources/airbyte_source_gainsight_px/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_gainsight_px" "my_source_gainsightpx" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_gcs/data-source.tf b/examples/data-sources/airbyte_source_gcs/data-source.tf
old mode 100755
new mode 100644
index 26c5d30b2..7fd5af603
--- a/examples/data-sources/airbyte_source_gcs/data-source.tf
+++ b/examples/data-sources/airbyte_source_gcs/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_gcs" "my_source_gcs" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_getlago/data-source.tf b/examples/data-sources/airbyte_source_getlago/data-source.tf
old mode 100755
new mode 100644
index 453bb365a..1ed41cc79
--- a/examples/data-sources/airbyte_source_getlago/data-source.tf
+++ b/examples/data-sources/airbyte_source_getlago/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_getlago" "my_source_getlago" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_github/data-source.tf b/examples/data-sources/airbyte_source_github/data-source.tf
old mode 100755
new mode 100644
index 961ce312d..29e5646e5
--- a/examples/data-sources/airbyte_source_github/data-source.tf
+++ b/examples/data-sources/airbyte_source_github/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_github" "my_source_github" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_gitlab/data-source.tf b/examples/data-sources/airbyte_source_gitlab/data-source.tf
old mode 100755
new mode 100644
index 3978b6dad..e0b20d9b5
--- a/examples/data-sources/airbyte_source_gitlab/data-source.tf
+++ b/examples/data-sources/airbyte_source_gitlab/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_gitlab" "my_source_gitlab" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_glassfrog/data-source.tf b/examples/data-sources/airbyte_source_glassfrog/data-source.tf
old mode 100755
new mode 100644
index 8f978e562..6197afa60
--- a/examples/data-sources/airbyte_source_glassfrog/data-source.tf
+++ b/examples/data-sources/airbyte_source_glassfrog/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_glassfrog" "my_source_glassfrog" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_gnews/data-source.tf b/examples/data-sources/airbyte_source_gnews/data-source.tf
old mode 100755
new mode 100644
index ef4ccdf6b..867e7df58
--- a/examples/data-sources/airbyte_source_gnews/data-source.tf
+++ b/examples/data-sources/airbyte_source_gnews/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_gnews" "my_source_gnews" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_ads/data-source.tf b/examples/data-sources/airbyte_source_google_ads/data-source.tf
old mode 100755
new mode 100644
index cde40c3c1..c93c669b7
--- a/examples/data-sources/airbyte_source_google_ads/data-source.tf
+++ b/examples/data-sources/airbyte_source_google_ads/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_google_ads" "my_source_googleads" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_analytics_data_api/data-source.tf b/examples/data-sources/airbyte_source_google_analytics_data_api/data-source.tf
old mode 100755
new mode 100644
index f369779df..90c7a4d30
--- a/examples/data-sources/airbyte_source_google_analytics_data_api/data-source.tf
+++ b/examples/data-sources/airbyte_source_google_analytics_data_api/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_google_analytics_data_api" "my_source_googleanalyticsdataapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_analytics_v4/data-source.tf b/examples/data-sources/airbyte_source_google_analytics_v4/data-source.tf
deleted file mode 100755
index ec9feda20..000000000
--- a/examples/data-sources/airbyte_source_google_analytics_v4/data-source.tf
+++ /dev/null
@@ -1,4 +0,0 @@
-data "airbyte_source_google_analytics_v4" "my_source_googleanalyticsv4" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_directory/data-source.tf b/examples/data-sources/airbyte_source_google_directory/data-source.tf
old mode 100755
new mode 100644
index f242efff3..4fa6679a9
--- a/examples/data-sources/airbyte_source_google_directory/data-source.tf
+++ b/examples/data-sources/airbyte_source_google_directory/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_google_directory" "my_source_googledirectory" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_drive/data-source.tf b/examples/data-sources/airbyte_source_google_drive/data-source.tf
new file mode 100644
index 000000000..465dc2d7e
--- /dev/null
+++ b/examples/data-sources/airbyte_source_google_drive/data-source.tf
@@ -0,0 +1,3 @@
+data "airbyte_source_google_drive" "my_source_googledrive" {
+ source_id = "...my_source_id..."
+}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_pagespeed_insights/data-source.tf b/examples/data-sources/airbyte_source_google_pagespeed_insights/data-source.tf
old mode 100755
new mode 100644
index d9ab7d0e8..115e0f41f
--- a/examples/data-sources/airbyte_source_google_pagespeed_insights/data-source.tf
+++ b/examples/data-sources/airbyte_source_google_pagespeed_insights/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedinsights" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_search_console/data-source.tf b/examples/data-sources/airbyte_source_google_search_console/data-source.tf
old mode 100755
new mode 100644
index 4c49a1c3f..ed14d1cc8
--- a/examples/data-sources/airbyte_source_google_search_console/data-source.tf
+++ b/examples/data-sources/airbyte_source_google_search_console/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_google_search_console" "my_source_googlesearchconsole" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_sheets/data-source.tf b/examples/data-sources/airbyte_source_google_sheets/data-source.tf
old mode 100755
new mode 100644
index b6b1d68e4..b32ee25e3
--- a/examples/data-sources/airbyte_source_google_sheets/data-source.tf
+++ b/examples/data-sources/airbyte_source_google_sheets/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_google_sheets" "my_source_googlesheets" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_webfonts/data-source.tf b/examples/data-sources/airbyte_source_google_webfonts/data-source.tf
old mode 100755
new mode 100644
index 70fe85bbc..2502d1192
--- a/examples/data-sources/airbyte_source_google_webfonts/data-source.tf
+++ b/examples/data-sources/airbyte_source_google_webfonts/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_google_webfonts" "my_source_googlewebfonts" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_google_workspace_admin_reports/data-source.tf b/examples/data-sources/airbyte_source_google_workspace_admin_reports/data-source.tf
old mode 100755
new mode 100644
index 1b1a40847..1b56597b2
--- a/examples/data-sources/airbyte_source_google_workspace_admin_reports/data-source.tf
+++ b/examples/data-sources/airbyte_source_google_workspace_admin_reports/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_google_workspace_admin_reports" "my_source_googleworkspaceadminreports" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_greenhouse/data-source.tf b/examples/data-sources/airbyte_source_greenhouse/data-source.tf
old mode 100755
new mode 100644
index 1b11f37c6..4f54cd52f
--- a/examples/data-sources/airbyte_source_greenhouse/data-source.tf
+++ b/examples/data-sources/airbyte_source_greenhouse/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_greenhouse" "my_source_greenhouse" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_gridly/data-source.tf b/examples/data-sources/airbyte_source_gridly/data-source.tf
old mode 100755
new mode 100644
index 4197ada13..b562c4ef1
--- a/examples/data-sources/airbyte_source_gridly/data-source.tf
+++ b/examples/data-sources/airbyte_source_gridly/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_gridly" "my_source_gridly" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_harvest/data-source.tf b/examples/data-sources/airbyte_source_harvest/data-source.tf
old mode 100755
new mode 100644
index 624021b4a..31c55dc69
--- a/examples/data-sources/airbyte_source_harvest/data-source.tf
+++ b/examples/data-sources/airbyte_source_harvest/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_harvest" "my_source_harvest" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_hubplanner/data-source.tf b/examples/data-sources/airbyte_source_hubplanner/data-source.tf
old mode 100755
new mode 100644
index 5eb45fc4b..af3db95d9
--- a/examples/data-sources/airbyte_source_hubplanner/data-source.tf
+++ b/examples/data-sources/airbyte_source_hubplanner/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_hubplanner" "my_source_hubplanner" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_hubspot/data-source.tf b/examples/data-sources/airbyte_source_hubspot/data-source.tf
old mode 100755
new mode 100644
index e5cf58b60..b04f84e03
--- a/examples/data-sources/airbyte_source_hubspot/data-source.tf
+++ b/examples/data-sources/airbyte_source_hubspot/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_hubspot" "my_source_hubspot" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_insightly/data-source.tf b/examples/data-sources/airbyte_source_insightly/data-source.tf
old mode 100755
new mode 100644
index 31afae58f..865523cbd
--- a/examples/data-sources/airbyte_source_insightly/data-source.tf
+++ b/examples/data-sources/airbyte_source_insightly/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_insightly" "my_source_insightly" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_instagram/data-source.tf b/examples/data-sources/airbyte_source_instagram/data-source.tf
old mode 100755
new mode 100644
index 7332bca16..e45fa737b
--- a/examples/data-sources/airbyte_source_instagram/data-source.tf
+++ b/examples/data-sources/airbyte_source_instagram/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_instagram" "my_source_instagram" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_instatus/data-source.tf b/examples/data-sources/airbyte_source_instatus/data-source.tf
old mode 100755
new mode 100644
index 92569818a..ec39e4dbc
--- a/examples/data-sources/airbyte_source_instatus/data-source.tf
+++ b/examples/data-sources/airbyte_source_instatus/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_instatus" "my_source_instatus" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_intercom/data-source.tf b/examples/data-sources/airbyte_source_intercom/data-source.tf
old mode 100755
new mode 100644
index 0400ec9e8..f3779cc07
--- a/examples/data-sources/airbyte_source_intercom/data-source.tf
+++ b/examples/data-sources/airbyte_source_intercom/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_intercom" "my_source_intercom" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_ip2whois/data-source.tf b/examples/data-sources/airbyte_source_ip2whois/data-source.tf
old mode 100755
new mode 100644
index e0bb93cb1..5a5b00c1a
--- a/examples/data-sources/airbyte_source_ip2whois/data-source.tf
+++ b/examples/data-sources/airbyte_source_ip2whois/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_ip2whois" "my_source_ip2whois" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_iterable/data-source.tf b/examples/data-sources/airbyte_source_iterable/data-source.tf
old mode 100755
new mode 100644
index ebe992eb0..5bb966b0e
--- a/examples/data-sources/airbyte_source_iterable/data-source.tf
+++ b/examples/data-sources/airbyte_source_iterable/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_iterable" "my_source_iterable" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_jira/data-source.tf b/examples/data-sources/airbyte_source_jira/data-source.tf
old mode 100755
new mode 100644
index 8d598d92b..fbb1058b1
--- a/examples/data-sources/airbyte_source_jira/data-source.tf
+++ b/examples/data-sources/airbyte_source_jira/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_jira" "my_source_jira" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_k6_cloud/data-source.tf b/examples/data-sources/airbyte_source_k6_cloud/data-source.tf
old mode 100755
new mode 100644
index 1cce7a1e6..8754b0a7f
--- a/examples/data-sources/airbyte_source_k6_cloud/data-source.tf
+++ b/examples/data-sources/airbyte_source_k6_cloud/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_k6_cloud" "my_source_k6cloud" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_klarna/data-source.tf b/examples/data-sources/airbyte_source_klarna/data-source.tf
old mode 100755
new mode 100644
index 9837ee150..4a82dde86
--- a/examples/data-sources/airbyte_source_klarna/data-source.tf
+++ b/examples/data-sources/airbyte_source_klarna/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_klarna" "my_source_klarna" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_klaviyo/data-source.tf b/examples/data-sources/airbyte_source_klaviyo/data-source.tf
old mode 100755
new mode 100644
index 169f75dc0..9808465f3
--- a/examples/data-sources/airbyte_source_klaviyo/data-source.tf
+++ b/examples/data-sources/airbyte_source_klaviyo/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_klaviyo" "my_source_klaviyo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_kustomer_singer/data-source.tf b/examples/data-sources/airbyte_source_kustomer_singer/data-source.tf
old mode 100755
new mode 100644
index 808eabcc3..96ec85431
--- a/examples/data-sources/airbyte_source_kustomer_singer/data-source.tf
+++ b/examples/data-sources/airbyte_source_kustomer_singer/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_kustomer_singer" "my_source_kustomersinger" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_kyve/data-source.tf b/examples/data-sources/airbyte_source_kyve/data-source.tf
old mode 100755
new mode 100644
index 67e5849dd..77ca68cd0
--- a/examples/data-sources/airbyte_source_kyve/data-source.tf
+++ b/examples/data-sources/airbyte_source_kyve/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_kyve" "my_source_kyve" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_launchdarkly/data-source.tf b/examples/data-sources/airbyte_source_launchdarkly/data-source.tf
old mode 100755
new mode 100644
index 63bbcf9af..c1ca46b21
--- a/examples/data-sources/airbyte_source_launchdarkly/data-source.tf
+++ b/examples/data-sources/airbyte_source_launchdarkly/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_launchdarkly" "my_source_launchdarkly" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_lemlist/data-source.tf b/examples/data-sources/airbyte_source_lemlist/data-source.tf
old mode 100755
new mode 100644
index f51f4464e..d5a68cf9a
--- a/examples/data-sources/airbyte_source_lemlist/data-source.tf
+++ b/examples/data-sources/airbyte_source_lemlist/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_lemlist" "my_source_lemlist" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_lever_hiring/data-source.tf b/examples/data-sources/airbyte_source_lever_hiring/data-source.tf
old mode 100755
new mode 100644
index 87c204cfd..1a3f90f30
--- a/examples/data-sources/airbyte_source_lever_hiring/data-source.tf
+++ b/examples/data-sources/airbyte_source_lever_hiring/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_lever_hiring" "my_source_leverhiring" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_linkedin_ads/data-source.tf b/examples/data-sources/airbyte_source_linkedin_ads/data-source.tf
old mode 100755
new mode 100644
index 7c27cfd42..83722ef54
--- a/examples/data-sources/airbyte_source_linkedin_ads/data-source.tf
+++ b/examples/data-sources/airbyte_source_linkedin_ads/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_linkedin_ads" "my_source_linkedinads" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_linkedin_pages/data-source.tf b/examples/data-sources/airbyte_source_linkedin_pages/data-source.tf
old mode 100755
new mode 100644
index 54f739635..0218588e2
--- a/examples/data-sources/airbyte_source_linkedin_pages/data-source.tf
+++ b/examples/data-sources/airbyte_source_linkedin_pages/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_linkedin_pages" "my_source_linkedinpages" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_linnworks/data-source.tf b/examples/data-sources/airbyte_source_linnworks/data-source.tf
old mode 100755
new mode 100644
index 88713e9c7..850527e3a
--- a/examples/data-sources/airbyte_source_linnworks/data-source.tf
+++ b/examples/data-sources/airbyte_source_linnworks/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_linnworks" "my_source_linnworks" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_lokalise/data-source.tf b/examples/data-sources/airbyte_source_lokalise/data-source.tf
old mode 100755
new mode 100644
index e32ac0456..9cc0a12e8
--- a/examples/data-sources/airbyte_source_lokalise/data-source.tf
+++ b/examples/data-sources/airbyte_source_lokalise/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_lokalise" "my_source_lokalise" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_mailchimp/data-source.tf b/examples/data-sources/airbyte_source_mailchimp/data-source.tf
old mode 100755
new mode 100644
index 3a91a840f..538e2a7f5
--- a/examples/data-sources/airbyte_source_mailchimp/data-source.tf
+++ b/examples/data-sources/airbyte_source_mailchimp/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_mailchimp" "my_source_mailchimp" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_mailgun/data-source.tf b/examples/data-sources/airbyte_source_mailgun/data-source.tf
old mode 100755
new mode 100644
index 152e51044..522a6fd5b
--- a/examples/data-sources/airbyte_source_mailgun/data-source.tf
+++ b/examples/data-sources/airbyte_source_mailgun/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_mailgun" "my_source_mailgun" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_mailjet_sms/data-source.tf b/examples/data-sources/airbyte_source_mailjet_sms/data-source.tf
old mode 100755
new mode 100644
index 8da7f8d73..2d6ad34a2
--- a/examples/data-sources/airbyte_source_mailjet_sms/data-source.tf
+++ b/examples/data-sources/airbyte_source_mailjet_sms/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_mailjet_sms" "my_source_mailjetsms" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_marketo/data-source.tf b/examples/data-sources/airbyte_source_marketo/data-source.tf
old mode 100755
new mode 100644
index fc9afd849..d5501a681
--- a/examples/data-sources/airbyte_source_marketo/data-source.tf
+++ b/examples/data-sources/airbyte_source_marketo/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_marketo" "my_source_marketo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_metabase/data-source.tf b/examples/data-sources/airbyte_source_metabase/data-source.tf
old mode 100755
new mode 100644
index 4225bdf80..f829b2b0e
--- a/examples/data-sources/airbyte_source_metabase/data-source.tf
+++ b/examples/data-sources/airbyte_source_metabase/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_metabase" "my_source_metabase" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_microsoft_teams/data-source.tf b/examples/data-sources/airbyte_source_microsoft_teams/data-source.tf
old mode 100755
new mode 100644
index b980aec7f..92c5a196a
--- a/examples/data-sources/airbyte_source_microsoft_teams/data-source.tf
+++ b/examples/data-sources/airbyte_source_microsoft_teams/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_microsoft_teams" "my_source_microsoftteams" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_mixpanel/data-source.tf b/examples/data-sources/airbyte_source_mixpanel/data-source.tf
old mode 100755
new mode 100644
index aa36bd3d9..562ac3b13
--- a/examples/data-sources/airbyte_source_mixpanel/data-source.tf
+++ b/examples/data-sources/airbyte_source_mixpanel/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_mixpanel" "my_source_mixpanel" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_monday/data-source.tf b/examples/data-sources/airbyte_source_monday/data-source.tf
old mode 100755
new mode 100644
index aaaaeedbd..b90ade378
--- a/examples/data-sources/airbyte_source_monday/data-source.tf
+++ b/examples/data-sources/airbyte_source_monday/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_monday" "my_source_monday" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_mongodb/data-source.tf b/examples/data-sources/airbyte_source_mongodb/data-source.tf
deleted file mode 100755
index 3fd3c4d72..000000000
--- a/examples/data-sources/airbyte_source_mongodb/data-source.tf
+++ /dev/null
@@ -1,4 +0,0 @@
-data "airbyte_source_mongodb" "my_source_mongodb" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_mongodb_internal_poc/data-source.tf b/examples/data-sources/airbyte_source_mongodb_internal_poc/data-source.tf
old mode 100755
new mode 100644
index 68987e3fc..6c8e41b80
--- a/examples/data-sources/airbyte_source_mongodb_internal_poc/data-source.tf
+++ b/examples/data-sources/airbyte_source_mongodb_internal_poc/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_mongodb_internal_poc" "my_source_mongodbinternalpoc" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_mongodb_v2/data-source.tf b/examples/data-sources/airbyte_source_mongodb_v2/data-source.tf
new file mode 100644
index 000000000..9a8b34928
--- /dev/null
+++ b/examples/data-sources/airbyte_source_mongodb_v2/data-source.tf
@@ -0,0 +1,3 @@
+data "airbyte_source_mongodb_v2" "my_source_mongodbv2" {
+ source_id = "...my_source_id..."
+}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_mssql/data-source.tf b/examples/data-sources/airbyte_source_mssql/data-source.tf
old mode 100755
new mode 100644
index 398b4872b..4f2a414be
--- a/examples/data-sources/airbyte_source_mssql/data-source.tf
+++ b/examples/data-sources/airbyte_source_mssql/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_mssql" "my_source_mssql" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_my_hours/data-source.tf b/examples/data-sources/airbyte_source_my_hours/data-source.tf
old mode 100755
new mode 100644
index 24c17aa63..1c1117aaa
--- a/examples/data-sources/airbyte_source_my_hours/data-source.tf
+++ b/examples/data-sources/airbyte_source_my_hours/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_my_hours" "my_source_myhours" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_mysql/data-source.tf b/examples/data-sources/airbyte_source_mysql/data-source.tf
old mode 100755
new mode 100644
index af1dab4ca..d75df5881
--- a/examples/data-sources/airbyte_source_mysql/data-source.tf
+++ b/examples/data-sources/airbyte_source_mysql/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_mysql" "my_source_mysql" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_netsuite/data-source.tf b/examples/data-sources/airbyte_source_netsuite/data-source.tf
old mode 100755
new mode 100644
index 37512c89b..ed4c160d4
--- a/examples/data-sources/airbyte_source_netsuite/data-source.tf
+++ b/examples/data-sources/airbyte_source_netsuite/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_netsuite" "my_source_netsuite" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_notion/data-source.tf b/examples/data-sources/airbyte_source_notion/data-source.tf
old mode 100755
new mode 100644
index b69062431..f8eb13215
--- a/examples/data-sources/airbyte_source_notion/data-source.tf
+++ b/examples/data-sources/airbyte_source_notion/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_notion" "my_source_notion" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_nytimes/data-source.tf b/examples/data-sources/airbyte_source_nytimes/data-source.tf
old mode 100755
new mode 100644
index 5079f69fd..244d27bba
--- a/examples/data-sources/airbyte_source_nytimes/data-source.tf
+++ b/examples/data-sources/airbyte_source_nytimes/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_nytimes" "my_source_nytimes" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_okta/data-source.tf b/examples/data-sources/airbyte_source_okta/data-source.tf
old mode 100755
new mode 100644
index f9db8d6de..6d60dd5f3
--- a/examples/data-sources/airbyte_source_okta/data-source.tf
+++ b/examples/data-sources/airbyte_source_okta/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_okta" "my_source_okta" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_omnisend/data-source.tf b/examples/data-sources/airbyte_source_omnisend/data-source.tf
old mode 100755
new mode 100644
index 4a5104673..77f72e795
--- a/examples/data-sources/airbyte_source_omnisend/data-source.tf
+++ b/examples/data-sources/airbyte_source_omnisend/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_omnisend" "my_source_omnisend" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_onesignal/data-source.tf b/examples/data-sources/airbyte_source_onesignal/data-source.tf
old mode 100755
new mode 100644
index c9a4f7ef7..8de87a575
--- a/examples/data-sources/airbyte_source_onesignal/data-source.tf
+++ b/examples/data-sources/airbyte_source_onesignal/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_onesignal" "my_source_onesignal" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_oracle/data-source.tf b/examples/data-sources/airbyte_source_oracle/data-source.tf
old mode 100755
new mode 100644
index 46898b1da..6f88838e8
--- a/examples/data-sources/airbyte_source_oracle/data-source.tf
+++ b/examples/data-sources/airbyte_source_oracle/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_oracle" "my_source_oracle" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_orb/data-source.tf b/examples/data-sources/airbyte_source_orb/data-source.tf
old mode 100755
new mode 100644
index f73a755ff..ea60a5da5
--- a/examples/data-sources/airbyte_source_orb/data-source.tf
+++ b/examples/data-sources/airbyte_source_orb/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_orb" "my_source_orb" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_orbit/data-source.tf b/examples/data-sources/airbyte_source_orbit/data-source.tf
old mode 100755
new mode 100644
index 0d39c5bf5..2b49d4d1c
--- a/examples/data-sources/airbyte_source_orbit/data-source.tf
+++ b/examples/data-sources/airbyte_source_orbit/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_orbit" "my_source_orbit" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_outbrain_amplify/data-source.tf b/examples/data-sources/airbyte_source_outbrain_amplify/data-source.tf
old mode 100755
new mode 100644
index b1f8403d4..2aaf91996
--- a/examples/data-sources/airbyte_source_outbrain_amplify/data-source.tf
+++ b/examples/data-sources/airbyte_source_outbrain_amplify/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_outbrain_amplify" "my_source_outbrainamplify" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_outreach/data-source.tf b/examples/data-sources/airbyte_source_outreach/data-source.tf
old mode 100755
new mode 100644
index 76518cfe7..4f1b56188
--- a/examples/data-sources/airbyte_source_outreach/data-source.tf
+++ b/examples/data-sources/airbyte_source_outreach/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_outreach" "my_source_outreach" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_paypal_transaction/data-source.tf b/examples/data-sources/airbyte_source_paypal_transaction/data-source.tf
old mode 100755
new mode 100644
index 081675624..23e7d5f3f
--- a/examples/data-sources/airbyte_source_paypal_transaction/data-source.tf
+++ b/examples/data-sources/airbyte_source_paypal_transaction/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_paypal_transaction" "my_source_paypaltransaction" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_paystack/data-source.tf b/examples/data-sources/airbyte_source_paystack/data-source.tf
old mode 100755
new mode 100644
index 43ef44d8b..be03d2f27
--- a/examples/data-sources/airbyte_source_paystack/data-source.tf
+++ b/examples/data-sources/airbyte_source_paystack/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_paystack" "my_source_paystack" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_pendo/data-source.tf b/examples/data-sources/airbyte_source_pendo/data-source.tf
old mode 100755
new mode 100644
index 6c43dd144..00587ac60
--- a/examples/data-sources/airbyte_source_pendo/data-source.tf
+++ b/examples/data-sources/airbyte_source_pendo/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_pendo" "my_source_pendo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_persistiq/data-source.tf b/examples/data-sources/airbyte_source_persistiq/data-source.tf
old mode 100755
new mode 100644
index e089e3bbc..88955fba5
--- a/examples/data-sources/airbyte_source_persistiq/data-source.tf
+++ b/examples/data-sources/airbyte_source_persistiq/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_persistiq" "my_source_persistiq" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_pexels_api/data-source.tf b/examples/data-sources/airbyte_source_pexels_api/data-source.tf
old mode 100755
new mode 100644
index 2e40570e5..dfa94656a
--- a/examples/data-sources/airbyte_source_pexels_api/data-source.tf
+++ b/examples/data-sources/airbyte_source_pexels_api/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_pexels_api" "my_source_pexelsapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_pinterest/data-source.tf b/examples/data-sources/airbyte_source_pinterest/data-source.tf
old mode 100755
new mode 100644
index b4ff27640..76eed40e8
--- a/examples/data-sources/airbyte_source_pinterest/data-source.tf
+++ b/examples/data-sources/airbyte_source_pinterest/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_pinterest" "my_source_pinterest" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_pipedrive/data-source.tf b/examples/data-sources/airbyte_source_pipedrive/data-source.tf
old mode 100755
new mode 100644
index 36b865f5a..175c937a7
--- a/examples/data-sources/airbyte_source_pipedrive/data-source.tf
+++ b/examples/data-sources/airbyte_source_pipedrive/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_pipedrive" "my_source_pipedrive" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_pocket/data-source.tf b/examples/data-sources/airbyte_source_pocket/data-source.tf
old mode 100755
new mode 100644
index 58b4b75d6..41699bf58
--- a/examples/data-sources/airbyte_source_pocket/data-source.tf
+++ b/examples/data-sources/airbyte_source_pocket/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_pocket" "my_source_pocket" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_pokeapi/data-source.tf b/examples/data-sources/airbyte_source_pokeapi/data-source.tf
old mode 100755
new mode 100644
index b5d9388f3..2c37010fb
--- a/examples/data-sources/airbyte_source_pokeapi/data-source.tf
+++ b/examples/data-sources/airbyte_source_pokeapi/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_pokeapi" "my_source_pokeapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_polygon_stock_api/data-source.tf b/examples/data-sources/airbyte_source_polygon_stock_api/data-source.tf
old mode 100755
new mode 100644
index 67846ba79..cd4a8a05d
--- a/examples/data-sources/airbyte_source_polygon_stock_api/data-source.tf
+++ b/examples/data-sources/airbyte_source_polygon_stock_api/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_polygon_stock_api" "my_source_polygonstockapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_postgres/data-source.tf b/examples/data-sources/airbyte_source_postgres/data-source.tf
old mode 100755
new mode 100644
index e3d6e7ce8..257981b06
--- a/examples/data-sources/airbyte_source_postgres/data-source.tf
+++ b/examples/data-sources/airbyte_source_postgres/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_postgres" "my_source_postgres" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_posthog/data-source.tf b/examples/data-sources/airbyte_source_posthog/data-source.tf
old mode 100755
new mode 100644
index b8216530e..6a77e8467
--- a/examples/data-sources/airbyte_source_posthog/data-source.tf
+++ b/examples/data-sources/airbyte_source_posthog/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_posthog" "my_source_posthog" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_postmarkapp/data-source.tf b/examples/data-sources/airbyte_source_postmarkapp/data-source.tf
old mode 100755
new mode 100644
index b01e3ba4b..0d6eed10f
--- a/examples/data-sources/airbyte_source_postmarkapp/data-source.tf
+++ b/examples/data-sources/airbyte_source_postmarkapp/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_postmarkapp" "my_source_postmarkapp" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_prestashop/data-source.tf b/examples/data-sources/airbyte_source_prestashop/data-source.tf
old mode 100755
new mode 100644
index 383b01f53..7a2b6b39d
--- a/examples/data-sources/airbyte_source_prestashop/data-source.tf
+++ b/examples/data-sources/airbyte_source_prestashop/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_prestashop" "my_source_prestashop" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_punk_api/data-source.tf b/examples/data-sources/airbyte_source_punk_api/data-source.tf
old mode 100755
new mode 100644
index 6188852d8..098a08a1a
--- a/examples/data-sources/airbyte_source_punk_api/data-source.tf
+++ b/examples/data-sources/airbyte_source_punk_api/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_punk_api" "my_source_punkapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_pypi/data-source.tf b/examples/data-sources/airbyte_source_pypi/data-source.tf
old mode 100755
new mode 100644
index ad65449a5..e2470e29d
--- a/examples/data-sources/airbyte_source_pypi/data-source.tf
+++ b/examples/data-sources/airbyte_source_pypi/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_pypi" "my_source_pypi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_qualaroo/data-source.tf b/examples/data-sources/airbyte_source_qualaroo/data-source.tf
old mode 100755
new mode 100644
index f5e0e70ce..7156719c3
--- a/examples/data-sources/airbyte_source_qualaroo/data-source.tf
+++ b/examples/data-sources/airbyte_source_qualaroo/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_qualaroo" "my_source_qualaroo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_quickbooks/data-source.tf b/examples/data-sources/airbyte_source_quickbooks/data-source.tf
old mode 100755
new mode 100644
index 5ebb564f8..700159434
--- a/examples/data-sources/airbyte_source_quickbooks/data-source.tf
+++ b/examples/data-sources/airbyte_source_quickbooks/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_quickbooks" "my_source_quickbooks" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_railz/data-source.tf b/examples/data-sources/airbyte_source_railz/data-source.tf
old mode 100755
new mode 100644
index e3487eccb..f35b206d9
--- a/examples/data-sources/airbyte_source_railz/data-source.tf
+++ b/examples/data-sources/airbyte_source_railz/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_railz" "my_source_railz" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_recharge/data-source.tf b/examples/data-sources/airbyte_source_recharge/data-source.tf
old mode 100755
new mode 100644
index 81b160083..5c9ead4b0
--- a/examples/data-sources/airbyte_source_recharge/data-source.tf
+++ b/examples/data-sources/airbyte_source_recharge/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_recharge" "my_source_recharge" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_recreation/data-source.tf b/examples/data-sources/airbyte_source_recreation/data-source.tf
old mode 100755
new mode 100644
index 843845b92..755d20e7f
--- a/examples/data-sources/airbyte_source_recreation/data-source.tf
+++ b/examples/data-sources/airbyte_source_recreation/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_recreation" "my_source_recreation" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_recruitee/data-source.tf b/examples/data-sources/airbyte_source_recruitee/data-source.tf
old mode 100755
new mode 100644
index a7d8ce4da..e296aad38
--- a/examples/data-sources/airbyte_source_recruitee/data-source.tf
+++ b/examples/data-sources/airbyte_source_recruitee/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_recruitee" "my_source_recruitee" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_recurly/data-source.tf b/examples/data-sources/airbyte_source_recurly/data-source.tf
old mode 100755
new mode 100644
index 5a43ebac4..b83c146d1
--- a/examples/data-sources/airbyte_source_recurly/data-source.tf
+++ b/examples/data-sources/airbyte_source_recurly/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_recurly" "my_source_recurly" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_redshift/data-source.tf b/examples/data-sources/airbyte_source_redshift/data-source.tf
old mode 100755
new mode 100644
index 32bc1d894..5a4572c58
--- a/examples/data-sources/airbyte_source_redshift/data-source.tf
+++ b/examples/data-sources/airbyte_source_redshift/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_redshift" "my_source_redshift" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_retently/data-source.tf b/examples/data-sources/airbyte_source_retently/data-source.tf
old mode 100755
new mode 100644
index 498852282..9044aa7e2
--- a/examples/data-sources/airbyte_source_retently/data-source.tf
+++ b/examples/data-sources/airbyte_source_retently/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_retently" "my_source_retently" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_rki_covid/data-source.tf b/examples/data-sources/airbyte_source_rki_covid/data-source.tf
old mode 100755
new mode 100644
index af60f8ca4..0c868e5d8
--- a/examples/data-sources/airbyte_source_rki_covid/data-source.tf
+++ b/examples/data-sources/airbyte_source_rki_covid/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_rki_covid" "my_source_rkicovid" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_rss/data-source.tf b/examples/data-sources/airbyte_source_rss/data-source.tf
old mode 100755
new mode 100644
index 21abce678..3935f561d
--- a/examples/data-sources/airbyte_source_rss/data-source.tf
+++ b/examples/data-sources/airbyte_source_rss/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_rss" "my_source_rss" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_s3/data-source.tf b/examples/data-sources/airbyte_source_s3/data-source.tf
old mode 100755
new mode 100644
index 20e26e478..3a6c36d4f
--- a/examples/data-sources/airbyte_source_s3/data-source.tf
+++ b/examples/data-sources/airbyte_source_s3/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_s3" "my_source_s3" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_salesforce/data-source.tf b/examples/data-sources/airbyte_source_salesforce/data-source.tf
old mode 100755
new mode 100644
index 19f9f1cc1..b5dd2eea8
--- a/examples/data-sources/airbyte_source_salesforce/data-source.tf
+++ b/examples/data-sources/airbyte_source_salesforce/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_salesforce" "my_source_salesforce" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_salesloft/data-source.tf b/examples/data-sources/airbyte_source_salesloft/data-source.tf
old mode 100755
new mode 100644
index 5d1be1527..2c461e491
--- a/examples/data-sources/airbyte_source_salesloft/data-source.tf
+++ b/examples/data-sources/airbyte_source_salesloft/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_salesloft" "my_source_salesloft" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_sap_fieldglass/data-source.tf b/examples/data-sources/airbyte_source_sap_fieldglass/data-source.tf
old mode 100755
new mode 100644
index 54935bee4..ee95ad0a8
--- a/examples/data-sources/airbyte_source_sap_fieldglass/data-source.tf
+++ b/examples/data-sources/airbyte_source_sap_fieldglass/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_sap_fieldglass" "my_source_sapfieldglass" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_secoda/data-source.tf b/examples/data-sources/airbyte_source_secoda/data-source.tf
old mode 100755
new mode 100644
index a20c572e6..4a9e7f4c2
--- a/examples/data-sources/airbyte_source_secoda/data-source.tf
+++ b/examples/data-sources/airbyte_source_secoda/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_secoda" "my_source_secoda" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_sendgrid/data-source.tf b/examples/data-sources/airbyte_source_sendgrid/data-source.tf
old mode 100755
new mode 100644
index 71dd3b360..65120eab0
--- a/examples/data-sources/airbyte_source_sendgrid/data-source.tf
+++ b/examples/data-sources/airbyte_source_sendgrid/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_sendgrid" "my_source_sendgrid" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_sendinblue/data-source.tf b/examples/data-sources/airbyte_source_sendinblue/data-source.tf
old mode 100755
new mode 100644
index 3f5e65cb8..e48863979
--- a/examples/data-sources/airbyte_source_sendinblue/data-source.tf
+++ b/examples/data-sources/airbyte_source_sendinblue/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_sendinblue" "my_source_sendinblue" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_senseforce/data-source.tf b/examples/data-sources/airbyte_source_senseforce/data-source.tf
old mode 100755
new mode 100644
index a252b7f42..285d2cbcb
--- a/examples/data-sources/airbyte_source_senseforce/data-source.tf
+++ b/examples/data-sources/airbyte_source_senseforce/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_senseforce" "my_source_senseforce" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_sentry/data-source.tf b/examples/data-sources/airbyte_source_sentry/data-source.tf
old mode 100755
new mode 100644
index e5abe4184..74408f603
--- a/examples/data-sources/airbyte_source_sentry/data-source.tf
+++ b/examples/data-sources/airbyte_source_sentry/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_sentry" "my_source_sentry" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_sftp/data-source.tf b/examples/data-sources/airbyte_source_sftp/data-source.tf
old mode 100755
new mode 100644
index b54fdbbe4..31e546c7e
--- a/examples/data-sources/airbyte_source_sftp/data-source.tf
+++ b/examples/data-sources/airbyte_source_sftp/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_sftp" "my_source_sftp" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_sftp_bulk/data-source.tf b/examples/data-sources/airbyte_source_sftp_bulk/data-source.tf
old mode 100755
new mode 100644
index 304ad5ae3..b24280060
--- a/examples/data-sources/airbyte_source_sftp_bulk/data-source.tf
+++ b/examples/data-sources/airbyte_source_sftp_bulk/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_sftp_bulk" "my_source_sftpbulk" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_shopify/data-source.tf b/examples/data-sources/airbyte_source_shopify/data-source.tf
old mode 100755
new mode 100644
index 0d4982787..4e79aa457
--- a/examples/data-sources/airbyte_source_shopify/data-source.tf
+++ b/examples/data-sources/airbyte_source_shopify/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_shopify" "my_source_shopify" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_shortio/data-source.tf b/examples/data-sources/airbyte_source_shortio/data-source.tf
old mode 100755
new mode 100644
index 46bc470af..0404216bb
--- a/examples/data-sources/airbyte_source_shortio/data-source.tf
+++ b/examples/data-sources/airbyte_source_shortio/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_shortio" "my_source_shortio" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_slack/data-source.tf b/examples/data-sources/airbyte_source_slack/data-source.tf
old mode 100755
new mode 100644
index 886b66e5f..ddd2fe00e
--- a/examples/data-sources/airbyte_source_slack/data-source.tf
+++ b/examples/data-sources/airbyte_source_slack/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_slack" "my_source_slack" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_smaily/data-source.tf b/examples/data-sources/airbyte_source_smaily/data-source.tf
old mode 100755
new mode 100644
index 811afaca1..66c8b9a8a
--- a/examples/data-sources/airbyte_source_smaily/data-source.tf
+++ b/examples/data-sources/airbyte_source_smaily/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_smaily" "my_source_smaily" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_smartengage/data-source.tf b/examples/data-sources/airbyte_source_smartengage/data-source.tf
old mode 100755
new mode 100644
index 00e7fc1da..727639779
--- a/examples/data-sources/airbyte_source_smartengage/data-source.tf
+++ b/examples/data-sources/airbyte_source_smartengage/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_smartengage" "my_source_smartengage" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_smartsheets/data-source.tf b/examples/data-sources/airbyte_source_smartsheets/data-source.tf
old mode 100755
new mode 100644
index 673fbbb44..6eab4de3d
--- a/examples/data-sources/airbyte_source_smartsheets/data-source.tf
+++ b/examples/data-sources/airbyte_source_smartsheets/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_smartsheets" "my_source_smartsheets" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_snapchat_marketing/data-source.tf b/examples/data-sources/airbyte_source_snapchat_marketing/data-source.tf
old mode 100755
new mode 100644
index 6390a8668..2006e179d
--- a/examples/data-sources/airbyte_source_snapchat_marketing/data-source.tf
+++ b/examples/data-sources/airbyte_source_snapchat_marketing/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_snapchat_marketing" "my_source_snapchatmarketing" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_snowflake/data-source.tf b/examples/data-sources/airbyte_source_snowflake/data-source.tf
old mode 100755
new mode 100644
index 6fb2ae299..e7746021a
--- a/examples/data-sources/airbyte_source_snowflake/data-source.tf
+++ b/examples/data-sources/airbyte_source_snowflake/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_snowflake" "my_source_snowflake" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_sonar_cloud/data-source.tf b/examples/data-sources/airbyte_source_sonar_cloud/data-source.tf
old mode 100755
new mode 100644
index 9e18011c2..1b6879a59
--- a/examples/data-sources/airbyte_source_sonar_cloud/data-source.tf
+++ b/examples/data-sources/airbyte_source_sonar_cloud/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_sonar_cloud" "my_source_sonarcloud" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_spacex_api/data-source.tf b/examples/data-sources/airbyte_source_spacex_api/data-source.tf
old mode 100755
new mode 100644
index 52e3a7c98..4a0373123
--- a/examples/data-sources/airbyte_source_spacex_api/data-source.tf
+++ b/examples/data-sources/airbyte_source_spacex_api/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_spacex_api" "my_source_spacexapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_square/data-source.tf b/examples/data-sources/airbyte_source_square/data-source.tf
old mode 100755
new mode 100644
index 728c27fce..f8cf1fd75
--- a/examples/data-sources/airbyte_source_square/data-source.tf
+++ b/examples/data-sources/airbyte_source_square/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_square" "my_source_square" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_strava/data-source.tf b/examples/data-sources/airbyte_source_strava/data-source.tf
old mode 100755
new mode 100644
index ce657edc8..779e664b6
--- a/examples/data-sources/airbyte_source_strava/data-source.tf
+++ b/examples/data-sources/airbyte_source_strava/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_strava" "my_source_strava" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_stripe/data-source.tf b/examples/data-sources/airbyte_source_stripe/data-source.tf
old mode 100755
new mode 100644
index d4c23d26b..77931e17a
--- a/examples/data-sources/airbyte_source_stripe/data-source.tf
+++ b/examples/data-sources/airbyte_source_stripe/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_stripe" "my_source_stripe" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_survey_sparrow/data-source.tf b/examples/data-sources/airbyte_source_survey_sparrow/data-source.tf
old mode 100755
new mode 100644
index 05be31b7e..a6ceed413
--- a/examples/data-sources/airbyte_source_survey_sparrow/data-source.tf
+++ b/examples/data-sources/airbyte_source_survey_sparrow/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_survey_sparrow" "my_source_surveysparrow" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_surveymonkey/data-source.tf b/examples/data-sources/airbyte_source_surveymonkey/data-source.tf
old mode 100755
new mode 100644
index feca1561c..c972dd4d7
--- a/examples/data-sources/airbyte_source_surveymonkey/data-source.tf
+++ b/examples/data-sources/airbyte_source_surveymonkey/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_surveymonkey" "my_source_surveymonkey" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_tempo/data-source.tf b/examples/data-sources/airbyte_source_tempo/data-source.tf
old mode 100755
new mode 100644
index a66c96fb2..a945a354d
--- a/examples/data-sources/airbyte_source_tempo/data-source.tf
+++ b/examples/data-sources/airbyte_source_tempo/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_tempo" "my_source_tempo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_the_guardian_api/data-source.tf b/examples/data-sources/airbyte_source_the_guardian_api/data-source.tf
old mode 100755
new mode 100644
index e25a89de7..64a3d1f33
--- a/examples/data-sources/airbyte_source_the_guardian_api/data-source.tf
+++ b/examples/data-sources/airbyte_source_the_guardian_api/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_the_guardian_api" "my_source_theguardianapi" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_tiktok_marketing/data-source.tf b/examples/data-sources/airbyte_source_tiktok_marketing/data-source.tf
old mode 100755
new mode 100644
index d06722963..e09468c6e
--- a/examples/data-sources/airbyte_source_tiktok_marketing/data-source.tf
+++ b/examples/data-sources/airbyte_source_tiktok_marketing/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_todoist/data-source.tf b/examples/data-sources/airbyte_source_todoist/data-source.tf
old mode 100755
new mode 100644
index 9c04805ba..e476672e1
--- a/examples/data-sources/airbyte_source_todoist/data-source.tf
+++ b/examples/data-sources/airbyte_source_todoist/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_todoist" "my_source_todoist" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_trello/data-source.tf b/examples/data-sources/airbyte_source_trello/data-source.tf
old mode 100755
new mode 100644
index de64325e9..18c51468d
--- a/examples/data-sources/airbyte_source_trello/data-source.tf
+++ b/examples/data-sources/airbyte_source_trello/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_trello" "my_source_trello" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_trustpilot/data-source.tf b/examples/data-sources/airbyte_source_trustpilot/data-source.tf
old mode 100755
new mode 100644
index c2d7f937c..0d1871b20
--- a/examples/data-sources/airbyte_source_trustpilot/data-source.tf
+++ b/examples/data-sources/airbyte_source_trustpilot/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_trustpilot" "my_source_trustpilot" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_tvmaze_schedule/data-source.tf b/examples/data-sources/airbyte_source_tvmaze_schedule/data-source.tf
old mode 100755
new mode 100644
index db182ca49..7000fdb77
--- a/examples/data-sources/airbyte_source_tvmaze_schedule/data-source.tf
+++ b/examples/data-sources/airbyte_source_tvmaze_schedule/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_tvmaze_schedule" "my_source_tvmazeschedule" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_twilio/data-source.tf b/examples/data-sources/airbyte_source_twilio/data-source.tf
old mode 100755
new mode 100644
index df80e0e5a..c1105d023
--- a/examples/data-sources/airbyte_source_twilio/data-source.tf
+++ b/examples/data-sources/airbyte_source_twilio/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_twilio" "my_source_twilio" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_twilio_taskrouter/data-source.tf b/examples/data-sources/airbyte_source_twilio_taskrouter/data-source.tf
old mode 100755
new mode 100644
index e3c4752fd..be32401d6
--- a/examples/data-sources/airbyte_source_twilio_taskrouter/data-source.tf
+++ b/examples/data-sources/airbyte_source_twilio_taskrouter/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_twilio_taskrouter" "my_source_twiliotaskrouter" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_twitter/data-source.tf b/examples/data-sources/airbyte_source_twitter/data-source.tf
old mode 100755
new mode 100644
index 4d4e5c9ea..c005ab1a9
--- a/examples/data-sources/airbyte_source_twitter/data-source.tf
+++ b/examples/data-sources/airbyte_source_twitter/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_twitter" "my_source_twitter" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_typeform/data-source.tf b/examples/data-sources/airbyte_source_typeform/data-source.tf
old mode 100755
new mode 100644
index 6a17566fb..7cbdb35fe
--- a/examples/data-sources/airbyte_source_typeform/data-source.tf
+++ b/examples/data-sources/airbyte_source_typeform/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_typeform" "my_source_typeform" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_us_census/data-source.tf b/examples/data-sources/airbyte_source_us_census/data-source.tf
old mode 100755
new mode 100644
index e443a8d7f..0ba35b99c
--- a/examples/data-sources/airbyte_source_us_census/data-source.tf
+++ b/examples/data-sources/airbyte_source_us_census/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_us_census" "my_source_uscensus" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_vantage/data-source.tf b/examples/data-sources/airbyte_source_vantage/data-source.tf
old mode 100755
new mode 100644
index b6a4dc214..f6e972b9f
--- a/examples/data-sources/airbyte_source_vantage/data-source.tf
+++ b/examples/data-sources/airbyte_source_vantage/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_vantage" "my_source_vantage" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_webflow/data-source.tf b/examples/data-sources/airbyte_source_webflow/data-source.tf
old mode 100755
new mode 100644
index cba619f2f..b4f2c001b
--- a/examples/data-sources/airbyte_source_webflow/data-source.tf
+++ b/examples/data-sources/airbyte_source_webflow/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_webflow" "my_source_webflow" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_whisky_hunter/data-source.tf b/examples/data-sources/airbyte_source_whisky_hunter/data-source.tf
old mode 100755
new mode 100644
index 08d065a57..9edd173f8
--- a/examples/data-sources/airbyte_source_whisky_hunter/data-source.tf
+++ b/examples/data-sources/airbyte_source_whisky_hunter/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_whisky_hunter" "my_source_whiskyhunter" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_wikipedia_pageviews/data-source.tf b/examples/data-sources/airbyte_source_wikipedia_pageviews/data-source.tf
old mode 100755
new mode 100644
index 6d2c9195a..04a099a00
--- a/examples/data-sources/airbyte_source_wikipedia_pageviews/data-source.tf
+++ b/examples/data-sources/airbyte_source_wikipedia_pageviews/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_wikipedia_pageviews" "my_source_wikipediapageviews" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_woocommerce/data-source.tf b/examples/data-sources/airbyte_source_woocommerce/data-source.tf
old mode 100755
new mode 100644
index ca3d751bf..782b86cd4
--- a/examples/data-sources/airbyte_source_woocommerce/data-source.tf
+++ b/examples/data-sources/airbyte_source_woocommerce/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_woocommerce" "my_source_woocommerce" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_xero/data-source.tf b/examples/data-sources/airbyte_source_xero/data-source.tf
deleted file mode 100755
index 1afde77bd..000000000
--- a/examples/data-sources/airbyte_source_xero/data-source.tf
+++ /dev/null
@@ -1,4 +0,0 @@
-data "airbyte_source_xero" "my_source_xero" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_xkcd/data-source.tf b/examples/data-sources/airbyte_source_xkcd/data-source.tf
old mode 100755
new mode 100644
index 2836f83b6..287fd2e0b
--- a/examples/data-sources/airbyte_source_xkcd/data-source.tf
+++ b/examples/data-sources/airbyte_source_xkcd/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_xkcd" "my_source_xkcd" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_yandex_metrica/data-source.tf b/examples/data-sources/airbyte_source_yandex_metrica/data-source.tf
old mode 100755
new mode 100644
index 460f19bfb..1c0f11608
--- a/examples/data-sources/airbyte_source_yandex_metrica/data-source.tf
+++ b/examples/data-sources/airbyte_source_yandex_metrica/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_yandex_metrica" "my_source_yandexmetrica" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_yotpo/data-source.tf b/examples/data-sources/airbyte_source_yotpo/data-source.tf
old mode 100755
new mode 100644
index 6a13151a7..4661d7d84
--- a/examples/data-sources/airbyte_source_yotpo/data-source.tf
+++ b/examples/data-sources/airbyte_source_yotpo/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_yotpo" "my_source_yotpo" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_younium/data-source.tf b/examples/data-sources/airbyte_source_younium/data-source.tf
deleted file mode 100755
index 874e7b961..000000000
--- a/examples/data-sources/airbyte_source_younium/data-source.tf
+++ /dev/null
@@ -1,4 +0,0 @@
-data "airbyte_source_younium" "my_source_younium" {
- secret_id = "...my_secret_id..."
- source_id = "...my_source_id..."
-}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_youtube_analytics/data-source.tf b/examples/data-sources/airbyte_source_youtube_analytics/data-source.tf
old mode 100755
new mode 100644
index e881cc75e..46d106757
--- a/examples/data-sources/airbyte_source_youtube_analytics/data-source.tf
+++ b/examples/data-sources/airbyte_source_youtube_analytics/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_youtube_analytics" "my_source_youtubeanalytics" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_zendesk_chat/data-source.tf b/examples/data-sources/airbyte_source_zendesk_chat/data-source.tf
old mode 100755
new mode 100644
index d52de7ada..4951901c1
--- a/examples/data-sources/airbyte_source_zendesk_chat/data-source.tf
+++ b/examples/data-sources/airbyte_source_zendesk_chat/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_zendesk_chat" "my_source_zendeskchat" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_zendesk_sell/data-source.tf b/examples/data-sources/airbyte_source_zendesk_sell/data-source.tf
new file mode 100644
index 000000000..ecb04ca59
--- /dev/null
+++ b/examples/data-sources/airbyte_source_zendesk_sell/data-source.tf
@@ -0,0 +1,3 @@
+data "airbyte_source_zendesk_sell" "my_source_zendesksell" {
+ source_id = "...my_source_id..."
+}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_zendesk_sunshine/data-source.tf b/examples/data-sources/airbyte_source_zendesk_sunshine/data-source.tf
old mode 100755
new mode 100644
index d438b53f3..ba70fc6b5
--- a/examples/data-sources/airbyte_source_zendesk_sunshine/data-source.tf
+++ b/examples/data-sources/airbyte_source_zendesk_sunshine/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_zendesk_sunshine" "my_source_zendesksunshine" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_zendesk_support/data-source.tf b/examples/data-sources/airbyte_source_zendesk_support/data-source.tf
old mode 100755
new mode 100644
index c981986d2..837b8f121
--- a/examples/data-sources/airbyte_source_zendesk_support/data-source.tf
+++ b/examples/data-sources/airbyte_source_zendesk_support/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_zendesk_support" "my_source_zendesksupport" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_zendesk_talk/data-source.tf b/examples/data-sources/airbyte_source_zendesk_talk/data-source.tf
old mode 100755
new mode 100644
index 851133981..8bc230053
--- a/examples/data-sources/airbyte_source_zendesk_talk/data-source.tf
+++ b/examples/data-sources/airbyte_source_zendesk_talk/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_zendesk_talk" "my_source_zendesktalk" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_zenloop/data-source.tf b/examples/data-sources/airbyte_source_zenloop/data-source.tf
old mode 100755
new mode 100644
index eca8d6661..2257d2be9
--- a/examples/data-sources/airbyte_source_zenloop/data-source.tf
+++ b/examples/data-sources/airbyte_source_zenloop/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_zenloop" "my_source_zenloop" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_zoho_crm/data-source.tf b/examples/data-sources/airbyte_source_zoho_crm/data-source.tf
old mode 100755
new mode 100644
index 6e9f831d4..f55dc0b8d
--- a/examples/data-sources/airbyte_source_zoho_crm/data-source.tf
+++ b/examples/data-sources/airbyte_source_zoho_crm/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_zoho_crm" "my_source_zohocrm" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_zoom/data-source.tf b/examples/data-sources/airbyte_source_zoom/data-source.tf
old mode 100755
new mode 100644
index 06c0e737a..296f2b548
--- a/examples/data-sources/airbyte_source_zoom/data-source.tf
+++ b/examples/data-sources/airbyte_source_zoom/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_zoom" "my_source_zoom" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_source_zuora/data-source.tf b/examples/data-sources/airbyte_source_zuora/data-source.tf
old mode 100755
new mode 100644
index 451651f8f..c0d1b4a4a
--- a/examples/data-sources/airbyte_source_zuora/data-source.tf
+++ b/examples/data-sources/airbyte_source_zuora/data-source.tf
@@ -1,4 +1,3 @@
data "airbyte_source_zuora" "my_source_zuora" {
- secret_id = "...my_secret_id..."
source_id = "...my_source_id..."
}
\ No newline at end of file
diff --git a/examples/data-sources/airbyte_workspace/data-source.tf b/examples/data-sources/airbyte_workspace/data-source.tf
old mode 100755
new mode 100644
diff --git a/examples/provider/provider.tf b/examples/provider/provider.tf
old mode 100755
new mode 100644
index 612369c85..72414a797
--- a/examples/provider/provider.tf
+++ b/examples/provider/provider.tf
@@ -2,7 +2,7 @@ terraform {
required_providers {
airbyte = {
source = "airbytehq/airbyte"
- version = "0.3.4"
+ version = "0.3.5"
}
}
}
diff --git a/examples/resources/airbyte_connection/resource.tf b/examples/resources/airbyte_connection/resource.tf
old mode 100755
new mode 100644
index bdbe1cce8..fbbe4073f
--- a/examples/resources/airbyte_connection/resource.tf
+++ b/examples/resources/airbyte_connection/resource.tf
@@ -5,7 +5,7 @@ resource "airbyte_connection" "my_connection" {
cursor_field = [
"...",
]
- name = "Terrence Rau"
+ name = "Cecil Johnson"
primary_key = [
[
"...",
@@ -15,18 +15,18 @@ resource "airbyte_connection" "my_connection" {
},
]
}
- data_residency = "us"
- destination_id = "d69a674e-0f46-47cc-8796-ed151a05dfc2"
- name = "Wilfred Wolff"
- namespace_definition = "custom_format"
+ data_residency = "auto"
+ destination_id = "e362083e-afc8-4559-94e0-a570f6dd427d"
+ name = "Melvin O'Connell"
+ namespace_definition = "source"
namespace_format = SOURCE_NAMESPACE
- non_breaking_schema_updates_behavior = "disable_connection"
+ non_breaking_schema_updates_behavior = "propagate_columns"
prefix = "...my_prefix..."
schedule = {
basic_timing = "...my_basic_timing..."
cron_expression = "...my_cron_expression..."
- schedule_type = "cron"
+ schedule_type = "manual"
}
- source_id = "ca1ba928-fc81-4674-acb7-39205929396f"
+ source_id = "78358423-25b6-4c7b-bfd2-fd307d60cb97"
status = "deprecated"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_aws_datalake/resource.tf b/examples/resources/airbyte_destination_aws_datalake/resource.tf
old mode 100755
new mode 100644
index 06650d0bf..90651d9ba
--- a/examples/resources/airbyte_destination_aws_datalake/resource.tf
+++ b/examples/resources/airbyte_destination_aws_datalake/resource.tf
@@ -4,26 +4,25 @@ resource "airbyte_destination_aws_datalake" "my_destination_awsdatalake" {
bucket_name = "...my_bucket_name..."
bucket_prefix = "...my_bucket_prefix..."
credentials = {
- destination_aws_datalake_authentication_mode_iam_role = {
- credentials_title = "IAM Role"
- role_arn = "...my_role_arn..."
+ iam_role = {
+ role_arn = "...my_role_arn..."
}
}
- destination_type = "aws-datalake"
format = {
- destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json = {
+ json_lines_newline_delimited_json = {
compression_codec = "GZIP"
format_type = "JSONL"
}
}
- glue_catalog_float_as_decimal = true
+ glue_catalog_float_as_decimal = false
lakeformation_database_default_tag_key = "pii_level"
lakeformation_database_default_tag_values = "private,public"
lakeformation_database_name = "...my_lakeformation_database_name..."
- lakeformation_governed_tables = true
- partitioning = "DAY"
- region = "ap-southeast-1"
+ lakeformation_governed_tables = false
+ partitioning = "YEAR/MONTH/DAY"
+ region = "eu-west-1"
}
- name = "Dr. Rickey Boyle"
- workspace_id = "aa2352c5-9559-407a-bf1a-3a2fa9467739"
+ definition_id = "635b80f2-a9b0-4de1-897a-c8629f5a79ed"
+ name = "Blanche MacGyver"
+ workspace_id = "e76a2f8d-fb9a-4ea6-8f38-6615e68b5c3f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_azure_blob_storage/resource.tf b/examples/resources/airbyte_destination_azure_blob_storage/resource.tf
old mode 100755
new mode 100644
index e95267fb3..ddfcc1a5b
--- a/examples/resources/airbyte_destination_azure_blob_storage/resource.tf
+++ b/examples/resources/airbyte_destination_azure_blob_storage/resource.tf
@@ -6,14 +6,13 @@ resource "airbyte_destination_azure_blob_storage" "my_destination_azureblobstora
azure_blob_storage_endpoint_domain_name = "blob.core.windows.net"
azure_blob_storage_output_buffer_size = 5
azure_blob_storage_spill_size = 500
- destination_type = "azure-blob-storage"
format = {
- destination_azure_blob_storage_output_format_csv_comma_separated_values = {
- flattening = "No flattening"
- format_type = "CSV"
+ csv_comma_separated_values = {
+ flattening = "No flattening"
}
}
}
- name = "Matt Hamill"
- workspace_id = "3f5ad019-da1f-4fe7-8f09-7b0074f15471"
+ definition_id = "b38acf3b-23ea-44e3-abf4-ba0e7ac63cda"
+ name = "Rogelio Purdy"
+ workspace_id = "cd76c9fd-07c9-468d-acb9-cb44c87d9163"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_bigquery/resource.tf b/examples/resources/airbyte_destination_bigquery/resource.tf
old mode 100755
new mode 100644
index 52238cfc1..af78f6ee6
--- a/examples/resources/airbyte_destination_bigquery/resource.tf
+++ b/examples/resources/airbyte_destination_bigquery/resource.tf
@@ -3,28 +3,26 @@ resource "airbyte_destination_bigquery" "my_destination_bigquery" {
big_query_client_buffer_size_mb = 15
credentials_json = "...my_credentials_json..."
dataset_id = "...my_dataset_id..."
- dataset_location = "australia-southeast2"
- destination_type = "bigquery"
+ dataset_location = "me-central2"
+ disable_type_dedupe = true
loading_method = {
- destination_bigquery_loading_method_gcs_staging = {
+ gcs_staging = {
credential = {
- destination_bigquery_loading_method_gcs_staging_credential_hmac_key = {
- credential_type = "HMAC_KEY"
+ destination_bigquery_hmac_key = {
hmac_key_access_id = "1234567890abcdefghij1234"
hmac_key_secret = "1234567890abcdefghij1234567890ABCDEFGHIJ"
}
}
- file_buffer_count = 10
gcs_bucket_name = "airbyte_sync"
gcs_bucket_path = "data_sync/test"
- keep_files_in_gcs_bucket = "Delete all tmp files from GCS"
- method = "GCS Staging"
+ keep_files_in_gcs_bucket = "Keep all tmp files in GCS"
}
}
project_id = "...my_project_id..."
raw_data_dataset = "...my_raw_data_dataset..."
transformation_priority = "batch"
}
- name = "Edna Pouros"
- workspace_id = "d488e1e9-1e45-40ad-aabd-44269802d502"
+ definition_id = "2d142842-c5e9-475e-80d1-1a3c6d933cc0"
+ name = "Miss Celia Moore"
+ workspace_id = "2d2700dc-d43a-4c80-9ede-88b16b5e1575"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_bigquery_denormalized/resource.tf b/examples/resources/airbyte_destination_bigquery_denormalized/resource.tf
deleted file mode 100755
index 510a75b11..000000000
--- a/examples/resources/airbyte_destination_bigquery_denormalized/resource.tf
+++ /dev/null
@@ -1,28 +0,0 @@
-resource "airbyte_destination_bigquery_denormalized" "my_destination_bigquerydenormalized" {
- configuration = {
- big_query_client_buffer_size_mb = 15
- credentials_json = "...my_credentials_json..."
- dataset_id = "...my_dataset_id..."
- dataset_location = "europe-west7"
- destination_type = "bigquery-denormalized"
- loading_method = {
- destination_bigquery_denormalized_loading_method_gcs_staging = {
- credential = {
- destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key = {
- credential_type = "HMAC_KEY"
- hmac_key_access_id = "1234567890abcdefghij1234"
- hmac_key_secret = "1234567890abcdefghij1234567890ABCDEFGHIJ"
- }
- }
- file_buffer_count = 10
- gcs_bucket_name = "airbyte_sync"
- gcs_bucket_path = "data_sync/test"
- keep_files_in_gcs_bucket = "Keep all tmp files in GCS"
- method = "GCS Staging"
- }
- }
- project_id = "...my_project_id..."
- }
- name = "Francisco Windler"
- workspace_id = "c969e9a3-efa7-47df-b14c-d66ae395efb9"
-}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_clickhouse/resource.tf b/examples/resources/airbyte_destination_clickhouse/resource.tf
old mode 100755
new mode 100644
index 0f5974c9e..6378de374
--- a/examples/resources/airbyte_destination_clickhouse/resource.tf
+++ b/examples/resources/airbyte_destination_clickhouse/resource.tf
@@ -1,18 +1,16 @@
resource "airbyte_destination_clickhouse" "my_destination_clickhouse" {
configuration = {
- database = "...my_database..."
- destination_type = "clickhouse"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 8123
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 8123
tunnel_method = {
- destination_clickhouse_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ no_tunnel = {}
}
- username = "Magdalena_Kuvalis"
+ username = "Rhianna_Leannon"
}
- name = "Sandy Huels"
- workspace_id = "97074ba4-469b-46e2-9419-59890afa563e"
+ definition_id = "2c276398-b468-48ad-b426-53c327fa18b5"
+ name = "Gerardo Corwin"
+ workspace_id = "4f41e22e-39b6-461a-89af-71290b2c6d65"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_convex/resource.tf b/examples/resources/airbyte_destination_convex/resource.tf
old mode 100755
new mode 100644
index be190f800..1ab2ab647
--- a/examples/resources/airbyte_destination_convex/resource.tf
+++ b/examples/resources/airbyte_destination_convex/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_destination_convex" "my_destination_convex" {
configuration = {
- access_key = "...my_access_key..."
- deployment_url = "https://murky-swan-635.convex.cloud"
- destination_type = "convex"
+ access_key = "...my_access_key..."
+ deployment_url = "https://cluttered-owl-337.convex.cloud"
}
- name = "Joyce Kertzmann"
- workspace_id = "4c8b711e-5b7f-4d2e-9028-921cddc69260"
+ definition_id = "335e03ab-ebb7-41b5-8e87-2ec68b6d2a9c"
+ name = "Patsy Powlowski"
+ workspace_id = "6941566f-22fd-430a-a8af-8c1d27b3e573"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_cumulio/resource.tf b/examples/resources/airbyte_destination_cumulio/resource.tf
old mode 100755
new mode 100644
index 328e8df4d..622489c19
--- a/examples/resources/airbyte_destination_cumulio/resource.tf
+++ b/examples/resources/airbyte_destination_cumulio/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_destination_cumulio" "my_destination_cumulio" {
configuration = {
- api_host = "...my_api_host..."
- api_key = "...my_api_key..."
- api_token = "...my_api_token..."
- destination_type = "cumulio"
+ api_host = "...my_api_host..."
+ api_key = "...my_api_key..."
+ api_token = "...my_api_token..."
}
- name = "Ebony Predovic"
- workspace_id = "6b0d5f0d-30c5-4fbb-a587-053202c73d5f"
+ definition_id = "c0eb8223-613d-423c-a875-293aec4aa100"
+ name = "Felipe Champlin"
+ workspace_id = "22581a88-452d-4e7c-b5eb-92a9e952da29"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_databend/resource.tf b/examples/resources/airbyte_destination_databend/resource.tf
old mode 100755
new mode 100644
index b42cdb76b..641ec57d4
--- a/examples/resources/airbyte_destination_databend/resource.tf
+++ b/examples/resources/airbyte_destination_databend/resource.tf
@@ -1,13 +1,13 @@
resource "airbyte_destination_databend" "my_destination_databend" {
configuration = {
- database = "...my_database..."
- destination_type = "databend"
- host = "...my_host..."
- password = "...my_password..."
- port = 443
- table = "default"
- username = "Leo.Purdy"
+ database = "...my_database..."
+ host = "...my_host..."
+ password = "...my_password..."
+ port = 443
+ table = "default"
+ username = "Kira78"
}
- name = "Bobby Kutch V"
- workspace_id = "b3fe49a8-d9cb-4f48-a333-23f9b77f3a41"
+ definition_id = "006aecee-7c88-4461-9655-998ae24eec56"
+ name = "Josefina Rosenbaum"
+ workspace_id = "48d71917-bd77-4158-87e0-4c579843cbfb"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_databricks/resource.tf b/examples/resources/airbyte_destination_databricks/resource.tf
old mode 100755
new mode 100644
index 33aed18f4..54403b49c
--- a/examples/resources/airbyte_destination_databricks/resource.tf
+++ b/examples/resources/airbyte_destination_databricks/resource.tf
@@ -2,20 +2,18 @@ resource "airbyte_destination_databricks" "my_destination_databricks" {
configuration = {
accept_terms = false
data_source = {
- destination_databricks_data_source_recommended_managed_tables = {
- data_source_type = "MANAGED_TABLES_STORAGE"
- }
+ recommended_managed_tables = {}
}
database = "...my_database..."
databricks_http_path = "sql/protocolvx/o/1234567489/0000-1111111-abcd90"
databricks_personal_access_token = "dapi0123456789abcdefghij0123456789AB"
databricks_port = "443"
databricks_server_hostname = "abc-12345678-wxyz.cloud.databricks.com"
- destination_type = "databricks"
enable_schema_evolution = true
purge_staging_data = false
schema = "default"
}
- name = "Bertha Thompson"
- workspace_id = "69280d1b-a77a-489e-bf73-7ae4203ce5e6"
+ definition_id = "05d7306c-fa6f-460b-bc11-e74f736d7a95"
+ name = "Meghan Mitchell"
+ workspace_id = "4c049945-edd6-4e95-a416-d119e802e071"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_dev_null/resource.tf b/examples/resources/airbyte_destination_dev_null/resource.tf
old mode 100755
new mode 100644
index 2f870d3a4..85ddff7eb
--- a/examples/resources/airbyte_destination_dev_null/resource.tf
+++ b/examples/resources/airbyte_destination_dev_null/resource.tf
@@ -1,12 +1,10 @@
resource "airbyte_destination_dev_null" "my_destination_devnull" {
configuration = {
- destination_type = "dev-null"
test_destination = {
- destination_dev_null_test_destination_silent = {
- test_destination_type = "SILENT"
- }
+ silent = {}
}
}
- name = "Rene Hane"
- workspace_id = "a0d446ce-2af7-4a73-8f3b-e453f870b326"
+ definition_id = "29d4644f-9dd3-4d54-87cf-b82ef1e01ef5"
+ name = "Megan King"
+ workspace_id = "9e2c85c9-04a2-403f-b157-a47112db1eec"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_duckdb/resource.tf b/examples/resources/airbyte_destination_duckdb/resource.tf
new file mode 100644
index 000000000..f500e7d2f
--- /dev/null
+++ b/examples/resources/airbyte_destination_duckdb/resource.tf
@@ -0,0 +1,10 @@
+resource "airbyte_destination_duckdb" "my_destination_duckdb" {
+ configuration = {
+ destination_path = "motherduck:"
+ motherduck_api_key = "...my_motherduck_api_key..."
+ schema = "main"
+ }
+ definition_id = "9f91eb58-c332-4574-9699-3f062684640d"
+ name = "Bobbie Lang"
+ workspace_id = "d52cbff0-1858-4935-bdfe-2750539f4b80"
+}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_dynamodb/resource.tf b/examples/resources/airbyte_destination_dynamodb/resource.tf
old mode 100755
new mode 100644
index 51f90ef97..72023e8be
--- a/examples/resources/airbyte_destination_dynamodb/resource.tf
+++ b/examples/resources/airbyte_destination_dynamodb/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_destination_dynamodb" "my_destination_dynamodb" {
configuration = {
access_key_id = "A012345678910EXAMPLE"
- destination_type = "dynamodb"
dynamodb_endpoint = "http://localhost:9000"
- dynamodb_region = "eu-south-1"
+ dynamodb_region = "ap-southeast-1"
dynamodb_table_name_prefix = "airbyte_sync"
secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
}
- name = "Joanna Kohler"
- workspace_id = "29cdb1a8-422b-4b67-9d23-22715bf0cbb1"
+ definition_id = "f993efae-2dca-4f86-989d-ab1153f466f7"
+ name = "Ms. Larry Reynolds"
+ workspace_id = "5aa0db79-7942-4be7-a5f1-f78855663545"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_elasticsearch/resource.tf b/examples/resources/airbyte_destination_elasticsearch/resource.tf
old mode 100755
new mode 100644
index fb426e449..9d6997539
--- a/examples/resources/airbyte_destination_elasticsearch/resource.tf
+++ b/examples/resources/airbyte_destination_elasticsearch/resource.tf
@@ -1,17 +1,16 @@
resource "airbyte_destination_elasticsearch" "my_destination_elasticsearch" {
configuration = {
authentication_method = {
- destination_elasticsearch_authentication_method_api_key_secret = {
+ api_key_secret = {
api_key_id = "...my_api_key_id..."
api_key_secret = "...my_api_key_secret..."
- method = "secret"
}
}
- ca_certificate = "...my_ca_certificate..."
- destination_type = "elasticsearch"
- endpoint = "...my_endpoint..."
- upsert = true
+ ca_certificate = "...my_ca_certificate..."
+ endpoint = "...my_endpoint..."
+ upsert = false
}
- name = "Carolyn Rohan"
- workspace_id = "90f3443a-1108-4e0a-9cf4-b921879fce95"
+ definition_id = "da65ed46-5e75-48af-92ad-38ed7ed0e5e2"
+ name = "Katherine Considine"
+ workspace_id = "7d0e4e50-95ed-494b-8ecb-397d064562ef"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_firebolt/resource.tf b/examples/resources/airbyte_destination_firebolt/resource.tf
old mode 100755
new mode 100644
index 9ef3dbd3c..3184787db
--- a/examples/resources/airbyte_destination_firebolt/resource.tf
+++ b/examples/resources/airbyte_destination_firebolt/resource.tf
@@ -1,15 +1,13 @@
resource "airbyte_destination_firebolt" "my_destination_firebolt" {
configuration = {
- account = "...my_account..."
- database = "...my_database..."
- destination_type = "firebolt"
- engine = "...my_engine..."
- host = "api.app.firebolt.io"
+ account = "...my_account..."
+ database = "...my_database..."
+ engine = "...my_engine..."
+ host = "api.app.firebolt.io"
loading_method = {
- destination_firebolt_loading_method_external_table_via_s3 = {
+ external_table_via_s3 = {
aws_key_id = "...my_aws_key_id..."
aws_key_secret = "...my_aws_key_secret..."
- method = "S3"
s3_bucket = "...my_s3_bucket..."
s3_region = "us-east-1"
}
@@ -17,6 +15,7 @@ resource "airbyte_destination_firebolt" "my_destination_firebolt" {
password = "...my_password..."
username = "username@email.com"
}
- name = "Roman Kulas"
- workspace_id = "c7abd74d-d39c-40f5-92cf-f7c70a45626d"
+ definition_id = "d37ea6e5-cbc1-4c07-86ea-3ea494c42020"
+ name = "Jared Spencer"
+ workspace_id = "d1afa414-5a8e-4ad6-8436-1fa9c0130565"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_firestore/resource.tf b/examples/resources/airbyte_destination_firestore/resource.tf
old mode 100755
new mode 100644
index 27edf3869..ecc131f56
--- a/examples/resources/airbyte_destination_firestore/resource.tf
+++ b/examples/resources/airbyte_destination_firestore/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_destination_firestore" "my_destination_firestore" {
configuration = {
credentials_json = "...my_credentials_json..."
- destination_type = "firestore"
project_id = "...my_project_id..."
}
- name = "Paula Jacobs I"
- workspace_id = "f16d9f5f-ce6c-4556-946c-3e250fb008c4"
+ definition_id = "53a4e50c-dde3-4bcf-b11f-630fa923b2f8"
+ name = "Sheldon Bernhard"
+ workspace_id = "868bf037-297d-4cd6-abcb-9a13f0bea64a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_gcs/resource.tf b/examples/resources/airbyte_destination_gcs/resource.tf
old mode 100755
new mode 100644
index 0b8651aa1..cc662139a
--- a/examples/resources/airbyte_destination_gcs/resource.tf
+++ b/examples/resources/airbyte_destination_gcs/resource.tf
@@ -1,17 +1,16 @@
resource "airbyte_destination_gcs" "my_destination_gcs" {
configuration = {
credential = {
- destination_gcs_authentication_hmac_key = {
+ hmac_key = {
credential_type = "HMAC_KEY"
hmac_key_access_id = "1234567890abcdefghij1234"
hmac_key_secret = "1234567890abcdefghij1234567890ABCDEFGHIJ"
}
}
- destination_type = "gcs"
format = {
- destination_gcs_output_format_avro_apache_avro = {
+ avro_apache_avro = {
compression_codec = {
- destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2 = {
+ bzip2 = {
codec = "bzip2"
}
}
@@ -22,6 +21,7 @@ resource "airbyte_destination_gcs" "my_destination_gcs" {
gcs_bucket_path = "data_sync/test"
gcs_bucket_region = "us-west1"
}
- name = "Miss Dennis Friesen"
- workspace_id = "c366c8dd-6b14-4429-8747-4778a7bd466d"
+ definition_id = "37e4a59e-7bfd-41d4-96bd-14d08d4a7d5d"
+ name = "Opal D'Amore"
+ workspace_id = "153b42c3-2f48-4f6e-943a-0f0f39a6c151"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_google_sheets/resource.tf b/examples/resources/airbyte_destination_google_sheets/resource.tf
old mode 100755
new mode 100644
index 4a1cf9bba..49c27ff23
--- a/examples/resources/airbyte_destination_google_sheets/resource.tf
+++ b/examples/resources/airbyte_destination_google_sheets/resource.tf
@@ -5,9 +5,9 @@ resource "airbyte_destination_google_sheets" "my_destination_googlesheets" {
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
}
- destination_type = "google-sheets"
- spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit"
+ spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit"
}
- name = "Mr. Irma Schaefer"
- workspace_id = "b3cdca42-5190-44e5-a3c7-e0bc7178e479"
+ definition_id = "a78cf13c-3589-4bc3-aaba-63d3987f09ed"
+ name = "Manuel Cronin IV"
+ workspace_id = "dddbef1f-87bb-4506-9e16-a5a735a4e180"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_keen/resource.tf b/examples/resources/airbyte_destination_keen/resource.tf
old mode 100755
new mode 100644
index d7ed98c5e..6490c38ed
--- a/examples/resources/airbyte_destination_keen/resource.tf
+++ b/examples/resources/airbyte_destination_keen/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_destination_keen" "my_destination_keen" {
configuration = {
- api_key = "ABCDEFGHIJKLMNOPRSTUWXYZ"
- destination_type = "keen"
- infer_timestamp = false
- project_id = "58b4acc22ba938934e888322e"
+ api_key = "ABCDEFGHIJKLMNOPRSTUWXYZ"
+ infer_timestamp = false
+ project_id = "58b4acc22ba938934e888322e"
}
- name = "Todd Oberbrunner DDS"
- workspace_id = "688282aa-4825-462f-a22e-9817ee17cbe6"
+ definition_id = "23f0d76f-b78b-4f74-ba22-de12791b5f13"
+ name = "Mr. Angelina Becker"
+ workspace_id = "49774ae8-7c30-4892-bfb0-f41f82248d60"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_kinesis/resource.tf b/examples/resources/airbyte_destination_kinesis/resource.tf
old mode 100755
new mode 100644
index c2e703465..8600e3458
--- a/examples/resources/airbyte_destination_kinesis/resource.tf
+++ b/examples/resources/airbyte_destination_kinesis/resource.tf
@@ -1,13 +1,13 @@
resource "airbyte_destination_kinesis" "my_destination_kinesis" {
configuration = {
- access_key = "...my_access_key..."
- buffer_size = 1
- destination_type = "kinesis"
- endpoint = "kinesis.us‑west‑1.amazonaws.com"
- private_key = "...my_private_key..."
- region = "us‑west‑1"
- shard_count = 9
+ access_key = "...my_access_key..."
+ buffer_size = 1
+ endpoint = "kinesis.us‑west‑1.amazonaws.com"
+ private_key = "...my_private_key..."
+ region = "us‑west‑1"
+ shard_count = 1
}
- name = "Opal Kozey"
- workspace_id = "5bc0ab3c-20c4-4f37-89fd-871f99dd2efd"
+ definition_id = "83384bd8-7b5c-4ce3-a148-54333df23c5e"
+ name = "Mary Monahan"
+ workspace_id = "52521a04-7878-4c25-8cd1-84fd116e75f1"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_langchain/resource.tf b/examples/resources/airbyte_destination_langchain/resource.tf
old mode 100755
new mode 100644
index 2954d8c7f..ad8ef167d
--- a/examples/resources/airbyte_destination_langchain/resource.tf
+++ b/examples/resources/airbyte_destination_langchain/resource.tf
@@ -1,26 +1,23 @@
resource "airbyte_destination_langchain" "my_destination_langchain" {
configuration = {
- destination_type = "langchain"
embedding = {
- destination_langchain_embedding_fake = {
- mode = "fake"
- }
+ fake = {}
}
indexing = {
- destination_langchain_indexing_chroma_local_persistance_ = {
+ chroma_local_persistance = {
collection_name = "...my_collection_name..."
destination_path = "/local/my_chroma_db"
- mode = "chroma_local"
}
}
processing = {
- chunk_overlap = 0
- chunk_size = 1
+ chunk_overlap = 8
+ chunk_size = 3
text_fields = [
"...",
]
}
}
- name = "Hattie Nader"
- workspace_id = "1e674bdb-04f1-4575-a082-d68ea19f1d17"
+ definition_id = "0c9ec767-47b0-46cf-86fe-4a6f8bb810ed"
+ name = "Megan Kertzmann"
+ workspace_id = "02e7b218-3b2b-4c4f-adb7-afdacad2c14c"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_milvus/resource.tf b/examples/resources/airbyte_destination_milvus/resource.tf
old mode 100755
new mode 100644
index 5530c8c8f..da761b544
--- a/examples/resources/airbyte_destination_milvus/resource.tf
+++ b/examples/resources/airbyte_destination_milvus/resource.tf
@@ -1,36 +1,47 @@
resource "airbyte_destination_milvus" "my_destination_milvus" {
configuration = {
- destination_type = "milvus"
embedding = {
- destination_milvus_embedding_cohere = {
- cohere_key = "...my_cohere_key..."
- mode = "cohere"
+ azure_open_ai = {
+ api_base = "https://your-resource-name.openai.azure.com"
+ deployment = "your-resource-name"
+ openai_key = "...my_openai_key..."
}
}
indexing = {
auth = {
- destination_milvus_indexing_authentication_api_token = {
- mode = "token"
+ destination_milvus_api_token = {
token = "...my_token..."
}
}
collection = "...my_collection..."
db = "...my_db..."
- host = "https://my-instance.zone.zillizcloud.com"
+ host = "tcp://my-local-milvus:19530"
text_field = "...my_text_field..."
vector_field = "...my_vector_field..."
}
processing = {
- chunk_overlap = 3
- chunk_size = 0
+ chunk_overlap = 1
+ chunk_size = 5
+ field_name_mappings = [
+ {
+ from_field = "...my_from_field..."
+ to_field = "...my_to_field..."
+ },
+ ]
metadata_fields = [
"...",
]
text_fields = [
"...",
]
+ text_splitter = {
+ by_markdown_header = {
+ split_level = 7
+ }
+ }
}
}
- name = "Sherry Morar IV"
- workspace_id = "086a1840-394c-4260-b1f9-3f5f0642dac7"
+ definition_id = "6683bb76-cbdd-442c-84b7-b603cc8cd887"
+ name = "Mr. Karl Jacobson"
+ workspace_id = "13ef7fc0-d176-4e5f-8145-49f1242182d1"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_mongodb/resource.tf b/examples/resources/airbyte_destination_mongodb/resource.tf
old mode 100755
new mode 100644
index 6370f8749..81d987260
--- a/examples/resources/airbyte_destination_mongodb/resource.tf
+++ b/examples/resources/airbyte_destination_mongodb/resource.tf
@@ -1,26 +1,23 @@
resource "airbyte_destination_mongodb" "my_destination_mongodb" {
configuration = {
auth_type = {
- destination_mongodb_authorization_type_login_password = {
- authorization = "login/password"
- password = "...my_password..."
- username = "Lucienne.Yundt"
+ login_password = {
+ password = "...my_password..."
+ username = "Emmalee.Towne89"
}
}
- database = "...my_database..."
- destination_type = "mongodb"
+ database = "...my_database..."
instance_type = {
- destination_mongodb_mongo_db_instance_type_mongo_db_atlas = {
+ mongo_db_atlas = {
cluster_url = "...my_cluster_url..."
instance = "atlas"
}
}
tunnel_method = {
- destination_mongodb_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_mongodb_no_tunnel = {}
}
}
- name = "Robyn Schmitt I"
- workspace_id = "aa63aae8-d678-464d-bb67-5fd5e60b375e"
+ definition_id = "895c9212-6184-452d-9432-f33897fec4ca"
+ name = "Adrienne Lockman"
+ workspace_id = "bf882725-c3c6-4bc3-9a6d-3f396b39ea0e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_mssql/resource.tf b/examples/resources/airbyte_destination_mssql/resource.tf
old mode 100755
new mode 100644
index bfb5a45f1..947b52c34
--- a/examples/resources/airbyte_destination_mssql/resource.tf
+++ b/examples/resources/airbyte_destination_mssql/resource.tf
@@ -1,24 +1,20 @@
resource "airbyte_destination_mssql" "my_destination_mssql" {
configuration = {
- database = "...my_database..."
- destination_type = "mssql"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 1433
- schema = "public"
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 1433
+ schema = "public"
ssl_method = {
- destination_mssql_ssl_method_encrypted_trust_server_certificate_ = {
- ssl_method = "encrypted_trust_server_certificate"
- }
+ encrypted_trust_server_certificate = {}
}
tunnel_method = {
- destination_mssql_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_mssql_no_tunnel = {}
}
- username = "Desiree_Yost"
+ username = "Amalia.Blick"
}
- name = "Bert Treutel DVM"
- workspace_id = "33317fe3-5b60-4eb1-aa42-6555ba3c2874"
+ definition_id = "90e1a2bc-7de0-4ff6-b737-4915d3efc2cd"
+ name = "Jorge Beahan"
+ workspace_id = "6acc1e6f-1291-4560-8b55-b326e06d2448"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_mysql/resource.tf b/examples/resources/airbyte_destination_mysql/resource.tf
old mode 100755
new mode 100644
index 8827abfb9..93038abbd
--- a/examples/resources/airbyte_destination_mysql/resource.tf
+++ b/examples/resources/airbyte_destination_mysql/resource.tf
@@ -1,18 +1,16 @@
resource "airbyte_destination_mysql" "my_destination_mysql" {
configuration = {
- database = "...my_database..."
- destination_type = "mysql"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 3306
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 3306
tunnel_method = {
- destination_mysql_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_mysql_no_tunnel = {}
}
- username = "Sheldon.Smitham"
+ username = "Elissa16"
}
- name = "Guy Luettgen"
- workspace_id = "a8d8f5c0-b2f2-4fb7-b194-a276b26916fe"
+ definition_id = "a53050a9-afbc-466c-913a-5b78062a6a13"
+ name = "Nick Rogahn"
+ workspace_id = "63598ffb-0429-424f-aeae-5018c3193740"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_oracle/resource.tf b/examples/resources/airbyte_destination_oracle/resource.tf
old mode 100755
new mode 100644
index b0db9a2ae..4aad51db0
--- a/examples/resources/airbyte_destination_oracle/resource.tf
+++ b/examples/resources/airbyte_destination_oracle/resource.tf
@@ -1,19 +1,17 @@
resource "airbyte_destination_oracle" "my_destination_oracle" {
configuration = {
- destination_type = "oracle"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 1521
- schema = "airbyte"
- sid = "...my_sid..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 1521
+ schema = "airbyte"
+ sid = "...my_sid..."
tunnel_method = {
- destination_oracle_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_oracle_no_tunnel = {}
}
- username = "Viviane_Aufderhar"
+ username = "Abdullah_Ward15"
}
- name = "Tammy Medhurst"
- workspace_id = "3698f447-f603-4e8b-845e-80ca55efd20e"
+ definition_id = "2db6fe08-64a8-456a-8417-0ff8566dc323"
+ name = "Brittany Mohr"
+ workspace_id = "b07bf072-8b70-4775-98c6-7348eaa4356f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_pinecone/resource.tf b/examples/resources/airbyte_destination_pinecone/resource.tf
old mode 100755
new mode 100644
index 0e6dcbdd4..f683d26b0
--- a/examples/resources/airbyte_destination_pinecone/resource.tf
+++ b/examples/resources/airbyte_destination_pinecone/resource.tf
@@ -1,28 +1,40 @@
resource "airbyte_destination_pinecone" "my_destination_pinecone" {
configuration = {
- destination_type = "pinecone"
embedding = {
- destination_pinecone_embedding_cohere = {
- cohere_key = "...my_cohere_key..."
- mode = "cohere"
+ destination_pinecone_azure_open_ai = {
+ api_base = "https://your-resource-name.openai.azure.com"
+ deployment = "your-resource-name"
+ openai_key = "...my_openai_key..."
}
}
indexing = {
index = "...my_index..."
- pinecone_environment = "...my_pinecone_environment..."
+ pinecone_environment = "us-west1-gcp"
pinecone_key = "...my_pinecone_key..."
}
processing = {
- chunk_overlap = 2
- chunk_size = 3
+ chunk_overlap = 6
+ chunk_size = 6
+ field_name_mappings = [
+ {
+ from_field = "...my_from_field..."
+ to_field = "...my_to_field..."
+ },
+ ]
metadata_fields = [
"...",
]
text_fields = [
"...",
]
+ text_splitter = {
+ destination_pinecone_by_markdown_header = {
+ split_level = 7
+ }
+ }
}
}
- name = "Cecelia Braun"
- workspace_id = "8b6a89fb-e3a5-4aa8-a482-4d0ab4075088"
+ definition_id = "d49dbc4f-abbf-4199-8382-023b4de2c1a7"
+ name = "Bobby Lemke"
+ workspace_id = "d3cde3c9-d6fa-494b-b4b9-38f85ce1dfc1"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_postgres/resource.tf b/examples/resources/airbyte_destination_postgres/resource.tf
old mode 100755
new mode 100644
index 21400b6a8..3ee97f989
--- a/examples/resources/airbyte_destination_postgres/resource.tf
+++ b/examples/resources/airbyte_destination_postgres/resource.tf
@@ -1,24 +1,20 @@
resource "airbyte_destination_postgres" "my_destination_postgres" {
configuration = {
- database = "...my_database..."
- destination_type = "postgres"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 5432
- schema = "public"
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 5432
+ schema = "public"
ssl_mode = {
- destination_postgres_ssl_modes_allow = {
- mode = "allow"
- }
+ allow = {}
}
tunnel_method = {
- destination_postgres_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_postgres_no_tunnel = {}
}
- username = "Foster.Borer"
+ username = "Burley_Kuhic"
}
- name = "Karen Kautzer"
- workspace_id = "904f3b11-94b8-4abf-a03a-79f9dfe0ab7d"
+ definition_id = "db19e64b-83f6-43d3-8837-0e173ec9d4f3"
+ name = "Dianna Dooley V"
+ workspace_id = "2a8a43c0-f29f-47cb-912b-320943801c36"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_pubsub/resource.tf b/examples/resources/airbyte_destination_pubsub/resource.tf
old mode 100755
new mode 100644
index 0aa235ea2..6ce3f5e56
--- a/examples/resources/airbyte_destination_pubsub/resource.tf
+++ b/examples/resources/airbyte_destination_pubsub/resource.tf
@@ -1,15 +1,15 @@
resource "airbyte_destination_pubsub" "my_destination_pubsub" {
configuration = {
- batching_delay_threshold = 7
+ batching_delay_threshold = 5
batching_element_count_threshold = 5
batching_enabled = true
batching_request_bytes_threshold = 3
credentials_json = "...my_credentials_json..."
- destination_type = "pubsub"
ordering_enabled = true
project_id = "...my_project_id..."
topic_id = "...my_topic_id..."
}
- name = "Phil Boyer"
- workspace_id = "f86bc173-d689-4eee-9526-f8d986e881ea"
+ definition_id = "b6294a31-a29a-4af3-8680-70eca1537042"
+ name = "Ada Harber"
+ workspace_id = "e54dc306-1658-46b7-b990-fea69beba7dc"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_qdrant/resource.tf b/examples/resources/airbyte_destination_qdrant/resource.tf
new file mode 100644
index 000000000..f5b3af169
--- /dev/null
+++ b/examples/resources/airbyte_destination_qdrant/resource.tf
@@ -0,0 +1,49 @@
+resource "airbyte_destination_qdrant" "my_destination_qdrant" {
+ configuration = {
+ embedding = {
+ destination_qdrant_azure_open_ai = {
+ api_base = "https://your-resource-name.openai.azure.com"
+ deployment = "your-resource-name"
+ openai_key = "...my_openai_key..."
+ }
+ }
+ indexing = {
+ auth_method = {
+ api_key_auth = {
+ api_key = "...my_api_key..."
+ }
+ }
+ collection = "...my_collection..."
+ distance_metric = {
+ cos = {}
+ }
+ prefer_grpc = true
+ text_field = "...my_text_field..."
+ url = "...my_url..."
+ }
+ processing = {
+ chunk_overlap = 8
+ chunk_size = 9
+ field_name_mappings = [
+ {
+ from_field = "...my_from_field..."
+ to_field = "...my_to_field..."
+ },
+ ]
+ metadata_fields = [
+ "...",
+ ]
+ text_fields = [
+ "...",
+ ]
+ text_splitter = {
+ destination_qdrant_by_markdown_header = {
+ split_level = 9
+ }
+ }
+ }
+ }
+ definition_id = "8f8d8392-aab1-45fb-858b-ad9ea7671d58"
+ name = "Kathryn O'Keefe"
+ workspace_id = "9de520ce-3420-4a29-9e5c-09962877b187"
+}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_redis/resource.tf b/examples/resources/airbyte_destination_redis/resource.tf
old mode 100755
new mode 100644
index f4b410ea8..88de9a5a9
--- a/examples/resources/airbyte_destination_redis/resource.tf
+++ b/examples/resources/airbyte_destination_redis/resource.tf
@@ -1,23 +1,19 @@
resource "airbyte_destination_redis" "my_destination_redis" {
configuration = {
- cache_type = "hash"
- destination_type = "redis"
- host = "localhost,127.0.0.1"
- password = "...my_password..."
- port = 9
- ssl = false
+ cache_type = "hash"
+ host = "localhost,127.0.0.1"
+ password = "...my_password..."
+ port = 7
+ ssl = false
ssl_mode = {
- destination_redis_ssl_modes_disable = {
- mode = "disable"
- }
+ destination_redis_disable = {}
}
tunnel_method = {
- destination_redis_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_redis_no_tunnel = {}
}
- username = "Vivianne.Baumbach3"
+ username = "Keyshawn.Ledner"
}
- name = "Bonnie Halvorson"
- workspace_id = "f94e29e9-73e9-422a-97a1-5be3e060807e"
+ definition_id = "34412bc3-217a-4cbe-aad9-f3186486fc7b"
+ name = "Shannon Stroman"
+ workspace_id = "848f4034-6c04-4b19-bfb2-8918e382726e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_redshift/resource.tf b/examples/resources/airbyte_destination_redshift/resource.tf
old mode 100755
new mode 100644
index 069b130b6..48f3ffb74
--- a/examples/resources/airbyte_destination_redshift/resource.tf
+++ b/examples/resources/airbyte_destination_redshift/resource.tf
@@ -1,38 +1,34 @@
resource "airbyte_destination_redshift" "my_destination_redshift" {
configuration = {
- database = "...my_database..."
- destination_type = "redshift"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 5439
- schema = "public"
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 5439
+ schema = "public"
tunnel_method = {
- destination_redshift_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_redshift_no_tunnel = {}
}
uploading_method = {
- destination_redshift_uploading_method_s3_staging = {
+ s3_staging = {
access_key_id = "...my_access_key_id..."
encryption = {
- destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption = {
- encryption_type = "aes_cbc_envelope"
+ aes_cbc_envelope_encryption = {
key_encrypting_key = "...my_key_encrypting_key..."
}
}
file_buffer_count = 10
- file_name_pattern = "{timestamp}"
- method = "S3 Staging"
- purge_staging_data = false
+ file_name_pattern = "{date:yyyy_MM}"
+ purge_staging_data = true
s3_bucket_name = "airbyte.staging"
s3_bucket_path = "data_sync/test"
- s3_bucket_region = "us-west-2"
+ s3_bucket_region = "eu-west-1"
secret_access_key = "...my_secret_access_key..."
}
}
- username = "Margarette_Rau"
+ username = "Rollin_Ernser87"
}
- name = "Mrs. Geraldine Zulauf"
- workspace_id = "7a60ff2a-54a3-41e9-8764-a3e865e7956f"
+ definition_id = "1f9eaf9a-8e21-457a-8560-c89e77fd0c20"
+ name = "Linda Langworth"
+ workspace_id = "396de60f-942f-4937-a3c5-9508dd11c7ed"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_s3/resource.tf b/examples/resources/airbyte_destination_s3/resource.tf
old mode 100755
new mode 100644
index 4f2f1b546..3d4f22c90
--- a/examples/resources/airbyte_destination_s3/resource.tf
+++ b/examples/resources/airbyte_destination_s3/resource.tf
@@ -1,12 +1,11 @@
resource "airbyte_destination_s3" "my_destination_s3" {
configuration = {
access_key_id = "A012345678910EXAMPLE"
- destination_type = "s3"
- file_name_pattern = "{timestamp}"
+ file_name_pattern = "{date}"
format = {
- destination_s3_output_format_avro_apache_avro = {
+ destination_s3_avro_apache_avro = {
compression_codec = {
- destination_s3_output_format_avro_apache_avro_compression_codec_bzip2 = {
+ destination_s3_bzip2 = {
codec = "bzip2"
}
}
@@ -15,11 +14,12 @@ resource "airbyte_destination_s3" "my_destination_s3" {
}
s3_bucket_name = "airbyte_sync"
s3_bucket_path = "data_sync/test"
- s3_bucket_region = "us-west-1"
+ s3_bucket_region = "ap-southeast-1"
s3_endpoint = "http://localhost:9000"
s3_path_format = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_"
secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
}
- name = "Joyce O'Kon"
- workspace_id = "9da660ff-57bf-4aad-8f9e-fc1b4512c103"
+ definition_id = "b1d5b002-89a0-4dc0-a329-a5cae9f38884"
+ name = "Lloyd Watsica"
+ workspace_id = "20ebb305-f362-44c4-b900-725fa3e33722"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_s3_glue/resource.tf b/examples/resources/airbyte_destination_s3_glue/resource.tf
old mode 100755
new mode 100644
index 50a910473..aaaed6f3a
--- a/examples/resources/airbyte_destination_s3_glue/resource.tf
+++ b/examples/resources/airbyte_destination_s3_glue/resource.tf
@@ -1,28 +1,28 @@
resource "airbyte_destination_s3_glue" "my_destination_s3glue" {
configuration = {
access_key_id = "A012345678910EXAMPLE"
- destination_type = "s3-glue"
- file_name_pattern = "{date}"
+ file_name_pattern = "{sync_id}"
format = {
- destination_s3_glue_output_format_json_lines_newline_delimited_json = {
+ destination_s3_glue_json_lines_newline_delimited_json = {
compression = {
- destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip = {
+ destination_s3_glue_gzip = {
compression_type = "GZIP"
}
}
- flattening = "No flattening"
+ flattening = "Root level flattening"
format_type = "JSONL"
}
}
glue_database = "airbyte_database"
- glue_serialization_library = "org.openx.data.jsonserde.JsonSerDe"
+ glue_serialization_library = "org.apache.hive.hcatalog.data.JsonSerDe"
s3_bucket_name = "airbyte_sync"
s3_bucket_path = "data_sync/test"
- s3_bucket_region = "ca-central-1"
+ s3_bucket_region = "eu-central-1"
s3_endpoint = "http://localhost:9000"
s3_path_format = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_"
secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
}
- name = "Edmund Daugherty"
- workspace_id = "15199ebf-d0e9-4fe6-8632-ca3aed011799"
+ definition_id = "2f8e06ef-6fed-4365-9e7d-5496735da213"
+ name = "Jordan Johnston"
+ workspace_id = "b9fef8f5-3876-4e3d-a30a-86e4df19faac"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_sftp_json/resource.tf b/examples/resources/airbyte_destination_sftp_json/resource.tf
old mode 100755
new mode 100644
index 81c008f85..2c0b8b111
--- a/examples/resources/airbyte_destination_sftp_json/resource.tf
+++ b/examples/resources/airbyte_destination_sftp_json/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_destination_sftp_json" "my_destination_sftpjson" {
configuration = {
destination_path = "/json_data"
- destination_type = "sftp-json"
host = "...my_host..."
password = "...my_password..."
port = 22
- username = "Dayton98"
+ username = "Deshawn10"
}
- name = "Terence Beer"
- workspace_id = "71778ff6-1d01-4747-a360-a15db6a66065"
+ definition_id = "846ef364-4196-4a04-bb96-66e7d15e7eed"
+ name = "Frederick Howell"
+ workspace_id = "586b689f-dc13-4c29-afcf-ab73b9ba5d30"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_snowflake/resource.tf b/examples/resources/airbyte_destination_snowflake/resource.tf
old mode 100755
new mode 100644
index 53828d8f3..f876fbde6
--- a/examples/resources/airbyte_destination_snowflake/resource.tf
+++ b/examples/resources/airbyte_destination_snowflake/resource.tf
@@ -1,22 +1,22 @@
resource "airbyte_destination_snowflake" "my_destination_snowflake" {
configuration = {
credentials = {
- destination_snowflake_authorization_method_key_pair_authentication = {
- auth_type = "Key Pair Authentication"
+ key_pair_authentication = {
private_key = "...my_private_key..."
private_key_password = "...my_private_key_password..."
}
}
- database = "AIRBYTE_DATABASE"
- destination_type = "snowflake"
- host = "accountname.snowflakecomputing.com"
- jdbc_url_params = "...my_jdbc_url_params..."
- raw_data_schema = "...my_raw_data_schema..."
- role = "AIRBYTE_ROLE"
- schema = "AIRBYTE_SCHEMA"
- username = "AIRBYTE_USER"
- warehouse = "AIRBYTE_WAREHOUSE"
+ database = "AIRBYTE_DATABASE"
+ disable_type_dedupe = true
+ host = "accountname.us-east-2.aws.snowflakecomputing.com"
+ jdbc_url_params = "...my_jdbc_url_params..."
+ raw_data_schema = "...my_raw_data_schema..."
+ role = "AIRBYTE_ROLE"
+ schema = "AIRBYTE_SCHEMA"
+ username = "AIRBYTE_USER"
+ warehouse = "AIRBYTE_WAREHOUSE"
}
- name = "Shaun Osinski"
- workspace_id = "851d6c64-5b08-4b61-891b-aa0fe1ade008"
+ definition_id = "d28dce71-d7fd-4713-a64c-8ab088c248e9"
+ name = "Robin Marvin"
+ workspace_id = "3407545d-5006-486d-84e6-08039bc7eb07"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_timeplus/resource.tf b/examples/resources/airbyte_destination_timeplus/resource.tf
old mode 100755
new mode 100644
index 993ad14c9..8e9d91f9f
--- a/examples/resources/airbyte_destination_timeplus/resource.tf
+++ b/examples/resources/airbyte_destination_timeplus/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_destination_timeplus" "my_destination_timeplus" {
configuration = {
- apikey = "...my_apikey..."
- destination_type = "timeplus"
- endpoint = "https://us.timeplus.cloud/workspace_id"
+ apikey = "...my_apikey..."
+ endpoint = "https://us.timeplus.cloud/workspace_id"
}
- name = "Ruben Williamson"
- workspace_id = "5f350d8c-db5a-4341-8143-010421813d52"
+ definition_id = "32a47524-bb49-40aa-b53a-d11902ba1888"
+ name = "Kimberly Cole V"
+ workspace_id = "d193af49-1985-4c92-933c-ae7edb401c23"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_typesense/resource.tf b/examples/resources/airbyte_destination_typesense/resource.tf
old mode 100755
new mode 100644
index 8ac02039f..413cc2c19
--- a/examples/resources/airbyte_destination_typesense/resource.tf
+++ b/examples/resources/airbyte_destination_typesense/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_destination_typesense" "my_destination_typesense" {
configuration = {
- api_key = "...my_api_key..."
- batch_size = 0
- destination_type = "typesense"
- host = "...my_host..."
- port = "...my_port..."
- protocol = "...my_protocol..."
+ api_key = "...my_api_key..."
+ batch_size = 6
+ host = "...my_host..."
+ port = "...my_port..."
+ protocol = "...my_protocol..."
}
- name = "Conrad Rutherford"
- workspace_id = "e253b668-451c-46c6-a205-e16deab3fec9"
+ definition_id = "e69c6f21-d654-4173-8ccb-bc51a3caa62e"
+ name = "Lorraine Kiehn"
+ workspace_id = "a0d33800-2a57-467f-8f37-9fa4011eae8d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_vertica/resource.tf b/examples/resources/airbyte_destination_vertica/resource.tf
old mode 100755
new mode 100644
index 17bdfbd5c..bbf6ba55c
--- a/examples/resources/airbyte_destination_vertica/resource.tf
+++ b/examples/resources/airbyte_destination_vertica/resource.tf
@@ -1,19 +1,17 @@
resource "airbyte_destination_vertica" "my_destination_vertica" {
configuration = {
- database = "...my_database..."
- destination_type = "vertica"
- host = "...my_host..."
- jdbc_url_params = "...my_jdbc_url_params..."
- password = "...my_password..."
- port = 5433
- schema = "...my_schema..."
+ database = "...my_database..."
+ host = "...my_host..."
+ jdbc_url_params = "...my_jdbc_url_params..."
+ password = "...my_password..."
+ port = 5433
+ schema = "...my_schema..."
tunnel_method = {
- destination_vertica_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ destination_vertica_no_tunnel = {}
}
- username = "Jackson.Kuvalis"
+ username = "Bailey26"
}
- name = "Ida Lubowitz"
- workspace_id = "73a8418d-1623-409f-b092-9921aefb9f58"
+ definition_id = "f7f4dcb2-8108-4584-a7e5-cd333285c7cc"
+ name = "Josefina Sporer"
+ workspace_id = "34f786aa-e3aa-4f52-bfe1-9eb1bf8ee233"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_weaviate/resource.tf b/examples/resources/airbyte_destination_weaviate/resource.tf
new file mode 100644
index 000000000..e69a4a693
--- /dev/null
+++ b/examples/resources/airbyte_destination_weaviate/resource.tf
@@ -0,0 +1,52 @@
+resource "airbyte_destination_weaviate" "my_destination_weaviate" {
+ configuration = {
+ embedding = {
+ destination_weaviate_azure_open_ai = {
+ api_base = "https://your-resource-name.openai.azure.com"
+ deployment = "your-resource-name"
+ openai_key = "...my_openai_key..."
+ }
+ }
+ indexing = {
+ additional_headers = [
+ {
+ header_key = "...my_header_key..."
+ value = "...my_value..."
+ },
+ ]
+ auth = {
+ destination_weaviate_api_token = {
+ token = "...my_token..."
+ }
+ }
+ batch_size = 6
+ default_vectorizer = "text2vec-huggingface"
+ host = "https://my-cluster.weaviate.network"
+ text_field = "...my_text_field..."
+ }
+ processing = {
+ chunk_overlap = 4
+ chunk_size = 5
+ field_name_mappings = [
+ {
+ from_field = "...my_from_field..."
+ to_field = "...my_to_field..."
+ },
+ ]
+ metadata_fields = [
+ "...",
+ ]
+ text_fields = [
+ "...",
+ ]
+ text_splitter = {
+ destination_weaviate_by_markdown_header = {
+ split_level = 4
+ }
+ }
+ }
+ }
+ definition_id = "97e801e6-7689-4a46-b396-c7c6bf737242"
+ name = "Diana Runte Jr."
+ workspace_id = "59f1e303-60fc-40ea-a506-81bc3adb090c"
+}
\ No newline at end of file
diff --git a/examples/resources/airbyte_destination_xata/resource.tf b/examples/resources/airbyte_destination_xata/resource.tf
old mode 100755
new mode 100644
index 082cbbe68..ab3df7bb4
--- a/examples/resources/airbyte_destination_xata/resource.tf
+++ b/examples/resources/airbyte_destination_xata/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_destination_xata" "my_destination_xata" {
configuration = {
- api_key = "...my_api_key..."
- db_url = "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main"
- destination_type = "xata"
+ api_key = "...my_api_key..."
+ db_url = "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main"
}
- name = "Oscar Smith"
- workspace_id = "e68e4be0-5601-43f5-9da7-57a59ecfef66"
+ definition_id = "013842c1-01e2-465e-abc2-30b15094cc21"
+ name = "Derrick Green"
+ workspace_id = "b75e7d1c-9ddc-42da-b62f-af1b28fe26cb"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_aha/resource.tf b/examples/resources/airbyte_source_aha/resource.tf
old mode 100755
new mode 100644
index f1b3af875..d02bd8660
--- a/examples/resources/airbyte_source_aha/resource.tf
+++ b/examples/resources/airbyte_source_aha/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_aha" "my_source_aha" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "aha"
- url = "...my_url..."
+ api_key = "...my_api_key..."
+ url = "...my_url..."
}
- name = "Van Bergnaum"
- secret_id = "...my_secret_id..."
- workspace_id = "a3383c2b-eb47-4737-bc8d-72f64d1db1f2"
+ definition_id = "1bb0550b-4e34-4412-ae7f-29336e237818"
+ name = "Samuel Hammes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3da8d6ee-f047-4576-b0dd-bc2dbf188dfa"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_aircall/resource.tf b/examples/resources/airbyte_source_aircall/resource.tf
old mode 100755
new mode 100644
index 190aa852c..7ad95c78e
--- a/examples/resources/airbyte_source_aircall/resource.tf
+++ b/examples/resources/airbyte_source_aircall/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_aircall" "my_source_aircall" {
configuration = {
- api_id = "...my_api_id..."
- api_token = "...my_api_token..."
- source_type = "aircall"
- start_date = "2022-03-01T00:00:00.000Z"
+ api_id = "...my_api_id..."
+ api_token = "...my_api_token..."
+ start_date = "2022-03-01T00:00:00.000Z"
}
- name = "Martha Bashirian"
- secret_id = "...my_secret_id..."
- workspace_id = "1e96349e-1cf9-4e06-a3a4-37000ae6b6bc"
+ definition_id = "57111ac6-1dff-4a69-be71-43a3e9a244d7"
+ name = "Lucas Breitenberg"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a6e1cc19-3137-4221-8027-ee71b638bd64"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_airtable/resource.tf b/examples/resources/airbyte_source_airtable/resource.tf
old mode 100755
new mode 100644
index 07c584229..13b76966a
--- a/examples/resources/airbyte_source_airtable/resource.tf
+++ b/examples/resources/airbyte_source_airtable/resource.tf
@@ -1,18 +1,17 @@
resource "airbyte_source_airtable" "my_source_airtable" {
configuration = {
credentials = {
- source_airtable_authentication_o_auth2_0 = {
+ source_airtable_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_method = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
- token_expiry_date = "2021-08-01T09:41:55.270Z"
+ token_expiry_date = "2021-04-10T21:26:19.630Z"
}
}
- source_type = "airtable"
}
- name = "Tommie Klocko"
- secret_id = "...my_secret_id..."
- workspace_id = "eac55a97-41d3-4113-9296-5bb8a7202611"
+ definition_id = "54814afe-b93d-44bb-9e9f-2bb80cd3fe4a"
+ name = "Todd Lockman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "38c45275-6445-4179-b0ed-8d43c0dabba6"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_alloydb/resource.tf b/examples/resources/airbyte_source_alloydb/resource.tf
old mode 100755
new mode 100644
index 813b5dd16..434b8a791
--- a/examples/resources/airbyte_source_alloydb/resource.tf
+++ b/examples/resources/airbyte_source_alloydb/resource.tf
@@ -6,10 +6,10 @@ resource "airbyte_source_alloydb" "my_source_alloydb" {
password = "...my_password..."
port = 5432
replication_method = {
- source_alloydb_replication_method_logical_replication_cdc_ = {
- initial_waiting_seconds = 2
- lsn_commit_behaviour = "While reading Data"
- method = "CDC"
+ logical_replication_cdc = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ initial_waiting_seconds = 10
+ lsn_commit_behaviour = "After loading Data in the destination"
plugin = "pgoutput"
publication = "...my_publication..."
queue_size = 10
@@ -19,20 +19,18 @@ resource "airbyte_source_alloydb" "my_source_alloydb" {
schemas = [
"...",
]
- source_type = "alloydb"
ssl_mode = {
- source_alloydb_ssl_modes_allow = {
- mode = "allow"
+ source_alloydb_allow = {
+ additional_properties = "{ \"see\": \"documentation\" }"
}
}
tunnel_method = {
- source_alloydb_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_alloydb_no_tunnel = {}
}
- username = "Ashlynn_Emard"
+ username = "Olaf.Emard48"
}
- name = "Wilbert Crona"
- secret_id = "...my_secret_id..."
- workspace_id = "9b1abda8-c070-4e10-84cb-0672d1ad879e"
+ definition_id = "44fd252e-57aa-4673-9282-59f0c220e39e"
+ name = "Deborah Stanton"
+ secret_id = "...my_secret_id..."
+ workspace_id = "f09fb849-b0bd-4f3d-9ca9-6c63354ae1d2"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_amazon_ads/resource.tf b/examples/resources/airbyte_source_amazon_ads/resource.tf
old mode 100755
new mode 100644
index 6a42f262d..37aca98b5
--- a/examples/resources/airbyte_source_amazon_ads/resource.tf
+++ b/examples/resources/airbyte_source_amazon_ads/resource.tf
@@ -1,27 +1,26 @@
resource "airbyte_source_amazon_ads" "my_source_amazonads" {
configuration = {
- auth_type = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- look_back_window = 10
+ look_back_window = 3
marketplace_ids = [
"...",
]
profiles = [
- 6,
+ 2,
]
refresh_token = "...my_refresh_token..."
- region = "EU"
+ region = "FE"
report_record_types = [
- "asins_targets",
+ "adGroups",
]
- source_type = "amazon-ads"
- start_date = "2022-10-10"
+ start_date = "2022-10-10"
state_filter = [
- "archived",
+ "paused",
]
}
- name = "Dan Towne"
- secret_id = "...my_secret_id..."
- workspace_id = "d02bae0b-e2d7-4822-99e3-ea4b5197f924"
+ definition_id = "34df0d75-6d8b-40d9-8daf-9186ab63a7b2"
+ name = "Chris Littel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ec566b1d-1d8b-4b57-bf00-1ddb3cf074d6"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_amazon_seller_partner/resource.tf b/examples/resources/airbyte_source_amazon_seller_partner/resource.tf
old mode 100755
new mode 100644
index 189c56620..34d26db15
--- a/examples/resources/airbyte_source_amazon_seller_partner/resource.tf
+++ b/examples/resources/airbyte_source_amazon_seller_partner/resource.tf
@@ -1,23 +1,19 @@
resource "airbyte_source_amazon_seller_partner" "my_source_amazonsellerpartner" {
configuration = {
+ account_type = "Seller"
advanced_stream_options = "{\"GET_SALES_AND_TRAFFIC_REPORT\": {\"availability_sla_days\": 3}}"
- auth_type = "oauth2.0"
- aws_access_key = "...my_aws_access_key..."
- aws_environment = "PRODUCTION"
- aws_secret_key = "...my_aws_secret_key..."
+ aws_environment = "SANDBOX"
lwa_app_id = "...my_lwa_app_id..."
lwa_client_secret = "...my_lwa_client_secret..."
- max_wait_seconds = 1980
- period_in_days = 5
+ period_in_days = 2
refresh_token = "...my_refresh_token..."
- region = "SA"
+ region = "AE"
replication_end_date = "2017-01-25T00:00:00Z"
replication_start_date = "2017-01-25T00:00:00Z"
- report_options = "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}"
- role_arn = "...my_role_arn..."
- source_type = "amazon-seller-partner"
+ report_options = "{\"GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT\": {\"reportPeriod\": \"WEEK\"}}"
}
- name = "Phyllis Quitzon"
- secret_id = "...my_secret_id..."
- workspace_id = "5c537c64-54ef-4b0b-b489-6c3ca5acfbe2"
+ definition_id = "69bb26e6-b9f2-45aa-9f8c-7d4107048d9f"
+ name = "Caleb Legros"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9afeef69-ead1-4e5d-b690-efc6e828b1d2"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_amazon_sqs/resource.tf b/examples/resources/airbyte_source_amazon_sqs/resource.tf
old mode 100755
new mode 100644
index 9721a2135..6b5b9d15f
--- a/examples/resources/airbyte_source_amazon_sqs/resource.tf
+++ b/examples/resources/airbyte_source_amazon_sqs/resource.tf
@@ -2,16 +2,16 @@ resource "airbyte_source_amazon_sqs" "my_source_amazonsqs" {
configuration = {
access_key = "xxxxxHRNxxx3TBxxxxxx"
attributes_to_return = "attr1,attr2"
- delete_messages = false
+ delete_messages = true
max_batch_size = 5
max_wait_time = 5
queue_url = "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue"
- region = "ap-southeast-2"
+ region = "ap-northeast-2"
secret_key = "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz"
- source_type = "amazon-sqs"
visibility_timeout = 15
}
- name = "Cathy Kirlin"
- secret_id = "...my_secret_id..."
- workspace_id = "29177dea-c646-4ecb-9734-09e3eb1e5a2b"
+ definition_id = "aa9ea927-cae7-4b29-885e-6b85628652e0"
+ name = "Emmett Labadie"
+ secret_id = "...my_secret_id..."
+ workspace_id = "21b517b1-6f1f-4884-abcd-5137451945c4"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_amplitude/resource.tf b/examples/resources/airbyte_source_amplitude/resource.tf
old mode 100755
new mode 100644
index 4266cd121..c3d06178c
--- a/examples/resources/airbyte_source_amplitude/resource.tf
+++ b/examples/resources/airbyte_source_amplitude/resource.tf
@@ -2,12 +2,12 @@ resource "airbyte_source_amplitude" "my_source_amplitude" {
configuration = {
api_key = "...my_api_key..."
data_region = "Standard Server"
- request_time_range = 1
+ request_time_range = 2
secret_key = "...my_secret_key..."
- source_type = "amplitude"
start_date = "2021-01-25T00:00:00Z"
}
- name = "Robin Bednar"
- secret_id = "...my_secret_id..."
- workspace_id = "116db995-45fc-495f-a889-70e189dbb30f"
+ definition_id = "526ae8aa-3c4f-4287-913b-8668105e1180"
+ name = "Dominic Dach"
+ secret_id = "...my_secret_id..."
+ workspace_id = "75a1ca19-0e95-4bd1-982a-17eb0af63def"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_apify_dataset/resource.tf b/examples/resources/airbyte_source_apify_dataset/resource.tf
old mode 100755
new mode 100644
index 282254900..4842270d8
--- a/examples/resources/airbyte_source_apify_dataset/resource.tf
+++ b/examples/resources/airbyte_source_apify_dataset/resource.tf
@@ -1,11 +1,10 @@
resource "airbyte_source_apify_dataset" "my_source_apifydataset" {
configuration = {
- clean = true
- dataset_id = "...my_dataset_id..."
- source_type = "apify-dataset"
- token = "Personal API tokens"
+ dataset_id = "rHuMdwm6xCFt6WiGU"
+ token = "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk"
}
- name = "Dale Ferry"
- secret_id = "...my_secret_id..."
- workspace_id = "055b197c-d44e-42f5-ad82-d3513bb6f48b"
+ definition_id = "a73356f3-9bea-45e2-889f-0e8905c8543b"
+ name = "Justin Luettgen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ac7dcada-d293-48da-9765-e7880f00a30d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_appfollow/resource.tf b/examples/resources/airbyte_source_appfollow/resource.tf
old mode 100755
new mode 100644
index 1870408d1..90ad9ec63
--- a/examples/resources/airbyte_source_appfollow/resource.tf
+++ b/examples/resources/airbyte_source_appfollow/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_appfollow" "my_source_appfollow" {
configuration = {
- api_secret = "...my_api_secret..."
- source_type = "appfollow"
+ api_secret = "...my_api_secret..."
}
- name = "Regina Huel"
- secret_id = "...my_secret_id..."
- workspace_id = "db35ff2e-4b27-4537-a8cd-9e7319c177d5"
+ definition_id = "def9a90f-a7f8-4f44-9b58-dfc559a0bee1"
+ name = "Maurice Wilderman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "23389204-2261-4684-a73e-f602c915f597"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_asana/resource.tf b/examples/resources/airbyte_source_asana/resource.tf
old mode 100755
new mode 100644
index 24081f27a..0cf66a2b7
--- a/examples/resources/airbyte_source_asana/resource.tf
+++ b/examples/resources/airbyte_source_asana/resource.tf
@@ -1,16 +1,19 @@
resource "airbyte_source_asana" "my_source_asana" {
configuration = {
credentials = {
- source_asana_authentication_mechanism_authenticate_via_asana_oauth_ = {
+ authenticate_via_asana_oauth = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- option_title = "OAuth Credentials"
refresh_token = "...my_refresh_token..."
}
}
- source_type = "asana"
+ organization_export_ids = [
+ "{ \"see\": \"documentation\" }",
+ ]
+ test_mode = true
}
- name = "Jill Wintheiser"
- secret_id = "...my_secret_id..."
- workspace_id = "b114eeb5-2ff7-485f-8378-14d4c98e0c2b"
+ definition_id = "f5896557-ce17-4ccd-ab10-d6388d4fdfb9"
+ name = "Ms. Irvin Anderson"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c04191be-b057-4f07-8546-621bdba90354"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_auth0/resource.tf b/examples/resources/airbyte_source_auth0/resource.tf
old mode 100755
new mode 100644
index 91e63dcbc..6fca80544
--- a/examples/resources/airbyte_source_auth0/resource.tf
+++ b/examples/resources/airbyte_source_auth0/resource.tf
@@ -2,15 +2,14 @@ resource "airbyte_source_auth0" "my_source_auth0" {
configuration = {
base_url = "https://dev-yourOrg.us.auth0.com/"
credentials = {
- source_auth0_authentication_method_o_auth2_access_token = {
+ o_auth2_access_token = {
access_token = "...my_access_token..."
- auth_type = "oauth2_access_token"
}
}
- source_type = "auth0"
- start_date = "2023-08-05T00:43:59.244Z"
+ start_date = "2023-08-05T00:43:59.244Z"
}
- name = "Willard McLaughlin"
- secret_id = "...my_secret_id..."
- workspace_id = "75dad636-c600-4503-98bb-31180f739ae9"
+ definition_id = "f51ed0a8-181e-46e5-9fd9-ebe7b2f5ca6e"
+ name = "Dallas Wiza"
+ secret_id = "...my_secret_id..."
+ workspace_id = "2b052102-08e0-436b-a68d-758466c963e1"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_aws_cloudtrail/resource.tf b/examples/resources/airbyte_source_aws_cloudtrail/resource.tf
old mode 100755
new mode 100644
index 524f4ccc9..322d19c53
--- a/examples/resources/airbyte_source_aws_cloudtrail/resource.tf
+++ b/examples/resources/airbyte_source_aws_cloudtrail/resource.tf
@@ -3,10 +3,10 @@ resource "airbyte_source_aws_cloudtrail" "my_source_awscloudtrail" {
aws_key_id = "...my_aws_key_id..."
aws_region_name = "...my_aws_region_name..."
aws_secret_key = "...my_aws_secret_key..."
- source_type = "aws-cloudtrail"
start_date = "2021-01-01"
}
- name = "Nellie Waters"
- secret_id = "...my_secret_id..."
- workspace_id = "09e28103-31f3-4981-94c7-00b607f3c93c"
+ definition_id = "1b394b84-acdf-48db-aa4f-7e23711b260f"
+ name = "Janis Erdman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "1edcb36c-da3d-451c-bc15-623ec6453ce6"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_azure_blob_storage/resource.tf b/examples/resources/airbyte_source_azure_blob_storage/resource.tf
old mode 100755
new mode 100644
index cbf51bfc3..005dd649b
--- a/examples/resources/airbyte_source_azure_blob_storage/resource.tf
+++ b/examples/resources/airbyte_source_azure_blob_storage/resource.tf
@@ -1,19 +1,32 @@
resource "airbyte_source_azure_blob_storage" "my_source_azureblobstorage" {
configuration = {
- azure_blob_storage_account_key = "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="
- azure_blob_storage_account_name = "airbyte5storage"
- azure_blob_storage_blobs_prefix = "FolderA/FolderB/"
- azure_blob_storage_container_name = "airbytetescontainername"
- azure_blob_storage_endpoint = "blob.core.windows.net"
- azure_blob_storage_schema_inference_limit = 500
- format = {
- source_azure_blob_storage_input_format_json_lines_newline_delimited_json = {
- format_type = "JSONL"
- }
- }
- source_type = "azure-blob-storage"
+ azure_blob_storage_account_key = "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="
+ azure_blob_storage_account_name = "airbyte5storage"
+ azure_blob_storage_container_name = "airbytetescontainername"
+ azure_blob_storage_endpoint = "blob.core.windows.net"
+ start_date = "2021-01-01T00:00:00.000000Z"
+ streams = [
+ {
+ days_to_sync_if_history_is_full = 8
+ format = {
+ avro_format = {
+ double_as_string = true
+ }
+ }
+ globs = [
+ "...",
+ ]
+ input_schema = "...my_input_schema..."
+ legacy_prefix = "...my_legacy_prefix..."
+ name = "Angelina Armstrong"
+ primary_key = "...my_primary_key..."
+ schemaless = true
+ validation_policy = "Wait for Discover"
+ },
+ ]
}
- name = "Patty Mraz"
- secret_id = "...my_secret_id..."
- workspace_id = "3f2ceda7-e23f-4225-b411-faf4b7544e47"
+ definition_id = "e16b8da7-b814-43f8-91cf-99c7fd70e504"
+ name = "Joy Sipes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4f64874e-62c5-48d8-b92f-d48887cb19c4"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_azure_table/resource.tf b/examples/resources/airbyte_source_azure_table/resource.tf
old mode 100755
new mode 100644
index f8b67a2aa..36946bda4
--- a/examples/resources/airbyte_source_azure_table/resource.tf
+++ b/examples/resources/airbyte_source_azure_table/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_azure_table" "my_source_azuretable" {
configuration = {
- source_type = "azure-table"
storage_access_key = "...my_storage_access_key..."
storage_account_name = "...my_storage_account_name..."
- storage_endpoint_suffix = "core.windows.net"
+ storage_endpoint_suffix = "core.chinacloudapi.cn"
}
- name = "Ian Baumbach"
- secret_id = "...my_secret_id..."
- workspace_id = "57a5b404-63a7-4d57-9f14-00e764ad7334"
+ definition_id = "ec8b4573-d66d-4007-a52a-2e4396e7403e"
+ name = "Adam Stracke V"
+ secret_id = "...my_secret_id..."
+ workspace_id = "59a4fa50-e807-4c86-bd0c-bf5314eea0fa"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_bamboo_hr/resource.tf b/examples/resources/airbyte_source_bamboo_hr/resource.tf
old mode 100755
new mode 100644
index 034bf7c8c..7fece848a
--- a/examples/resources/airbyte_source_bamboo_hr/resource.tf
+++ b/examples/resources/airbyte_source_bamboo_hr/resource.tf
@@ -3,10 +3,10 @@ resource "airbyte_source_bamboo_hr" "my_source_bamboohr" {
api_key = "...my_api_key..."
custom_reports_fields = "...my_custom_reports_fields..."
custom_reports_include_default_fields = true
- source_type = "bamboo-hr"
subdomain = "...my_subdomain..."
}
- name = "Ralph Rau"
- secret_id = "...my_secret_id..."
- workspace_id = "1b36a080-88d1-400e-bada-200ef0422eb2"
+ definition_id = "1aa37367-271c-478a-9aa9-603df323c7d7"
+ name = "Joel Harber"
+ secret_id = "...my_secret_id..."
+ workspace_id = "f8882a19-738b-4218-b704-94da21b79cfd"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_bigcommerce/resource.tf b/examples/resources/airbyte_source_bigcommerce/resource.tf
deleted file mode 100755
index 8235355de..000000000
--- a/examples/resources/airbyte_source_bigcommerce/resource.tf
+++ /dev/null
@@ -1,11 +0,0 @@
-resource "airbyte_source_bigcommerce" "my_source_bigcommerce" {
- configuration = {
- access_token = "...my_access_token..."
- source_type = "bigcommerce"
- start_date = "2021-01-01"
- store_hash = "...my_store_hash..."
- }
- name = "Beth Gleason"
- secret_id = "...my_secret_id..."
- workspace_id = "9ab8366c-723f-4fda-9e06-bee4825c1fc0"
-}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_bigquery/resource.tf b/examples/resources/airbyte_source_bigquery/resource.tf
old mode 100755
new mode 100644
index c9122f490..25a297506
--- a/examples/resources/airbyte_source_bigquery/resource.tf
+++ b/examples/resources/airbyte_source_bigquery/resource.tf
@@ -3,9 +3,9 @@ resource "airbyte_source_bigquery" "my_source_bigquery" {
credentials_json = "...my_credentials_json..."
dataset_id = "...my_dataset_id..."
project_id = "...my_project_id..."
- source_type = "bigquery"
}
- name = "Joe Bradtke"
- secret_id = "...my_secret_id..."
- workspace_id = "80bff918-544e-4c42-9efc-ce8f1977773e"
+ definition_id = "9baf3821-deb7-4264-9ad9-e5fb53126691"
+ name = "Darrin Rogahn"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b9ea24da-51fb-473f-872f-2e8bbfe18227"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_bing_ads/resource.tf b/examples/resources/airbyte_source_bing_ads/resource.tf
old mode 100755
new mode 100644
index 7633b99ea..c47564262
--- a/examples/resources/airbyte_source_bing_ads/resource.tf
+++ b/examples/resources/airbyte_source_bing_ads/resource.tf
@@ -1,16 +1,25 @@
resource "airbyte_source_bing_ads" "my_source_bingads" {
configuration = {
- auth_method = "oauth2.0"
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ custom_reports = [
+ {
+ name = "AdDynamicTextPerformanceReport"
+ report_aggregation = "...my_report_aggregation..."
+ report_columns = [
+ "...",
+ ]
+ reporting_object = "ShareOfVoiceReportRequest"
+ },
+ ]
developer_token = "...my_developer_token..."
- lookback_window = 4
+ lookback_window = 3
refresh_token = "...my_refresh_token..."
- reports_start_date = "2022-08-23"
- source_type = "bing-ads"
+ reports_start_date = "2022-08-17"
tenant_id = "...my_tenant_id..."
}
- name = "Kathryn Nitzsche"
- secret_id = "...my_secret_id..."
- workspace_id = "408f05e3-d48f-4daf-b13a-1f5fd94259c0"
+ definition_id = "f49be625-99f1-47b5-861c-8d2f7dd6ee9c"
+ name = "Delia Kub Sr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "90282195-430f-4896-8a32-1f431fb3aad0"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_braintree/resource.tf b/examples/resources/airbyte_source_braintree/resource.tf
old mode 100755
new mode 100644
index e67333ee3..663e83a22
--- a/examples/resources/airbyte_source_braintree/resource.tf
+++ b/examples/resources/airbyte_source_braintree/resource.tf
@@ -1,13 +1,13 @@
resource "airbyte_source_braintree" "my_source_braintree" {
configuration = {
- environment = "Development"
+ environment = "Qa"
merchant_id = "...my_merchant_id..."
private_key = "...my_private_key..."
public_key = "...my_public_key..."
- source_type = "braintree"
start_date = "2020-12-30"
}
- name = "Henrietta Nienow"
- secret_id = "...my_secret_id..."
- workspace_id = "4f3b756c-11f6-4c37-a512-6243835bbc05"
+ definition_id = "12fcb5a7-fdd8-454e-8c39-c22fe17df57a"
+ name = "Ms. Tommie Bins"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5ff7f1a2-7e8f-4d2f-993d-4f9ab29a2f83"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_braze/resource.tf b/examples/resources/airbyte_source_braze/resource.tf
old mode 100755
new mode 100644
index ffaee2c2e..84411237e
--- a/examples/resources/airbyte_source_braze/resource.tf
+++ b/examples/resources/airbyte_source_braze/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_braze" "my_source_braze" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "braze"
- start_date = "2022-09-06"
- url = "...my_url..."
+ api_key = "...my_api_key..."
+ start_date = "2022-07-08"
+ url = "...my_url..."
}
- name = "Rosie Glover"
- secret_id = "...my_secret_id..."
- workspace_id = "efc5fde1-0a0c-4e21-a9e5-10019c6dc5e3"
+ definition_id = "dec4e3ea-b02c-4cb9-8852-3df16a0cc499"
+ name = "Margarita Leuschke"
+ secret_id = "...my_secret_id..."
+ workspace_id = "682b0a70-74f0-416f-b212-7f33f8652b25"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_cart/resource.tf b/examples/resources/airbyte_source_cart/resource.tf
new file mode 100644
index 000000000..c93f66657
--- /dev/null
+++ b/examples/resources/airbyte_source_cart/resource.tf
@@ -0,0 +1,16 @@
+resource "airbyte_source_cart" "my_source_cart" {
+ configuration = {
+ credentials = {
+ central_api_router = {
+ site_id = "...my_site_id..."
+ user_name = "Ethyl.Bosco18"
+ user_secret = "...my_user_secret..."
+ }
+ }
+ start_date = "2021-01-01T00:00:00Z"
+ }
+ definition_id = "3ec1224a-7ffb-4268-9c18-7087d37ac99f"
+ name = "Jamie Macejkovic III"
+ secret_id = "...my_secret_id..."
+ workspace_id = "12305e0c-1f4b-465d-9ebd-757e5946981c"
+}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_chargebee/resource.tf b/examples/resources/airbyte_source_chargebee/resource.tf
old mode 100755
new mode 100644
index 20c68d417..44076dbd7
--- a/examples/resources/airbyte_source_chargebee/resource.tf
+++ b/examples/resources/airbyte_source_chargebee/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_source_chargebee" "my_source_chargebee" {
configuration = {
- product_catalog = "1.0"
+ product_catalog = "2.0"
site = "airbyte-test"
site_api_key = "...my_site_api_key..."
- source_type = "chargebee"
start_date = "2021-01-25T00:00:00Z"
}
- name = "Viola Morissette"
- secret_id = "...my_secret_id..."
- workspace_id = "fbbe6949-fb2b-4b4e-8ae6-c3d5db3adebd"
+ definition_id = "08691686-308e-4adb-b3c3-69be0c12ece5"
+ name = "Jean Mann"
+ secret_id = "...my_secret_id..."
+ workspace_id = "aef8e474-9058-48d0-a293-9574a681eea7"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_chartmogul/resource.tf b/examples/resources/airbyte_source_chartmogul/resource.tf
old mode 100755
new mode 100644
index 26786c0bb..8b026e54e
--- a/examples/resources/airbyte_source_chartmogul/resource.tf
+++ b/examples/resources/airbyte_source_chartmogul/resource.tf
@@ -1,11 +1,10 @@
resource "airbyte_source_chartmogul" "my_source_chartmogul" {
configuration = {
- api_key = "...my_api_key..."
- interval = "week"
- source_type = "chartmogul"
- start_date = "2017-01-25T00:00:00Z"
+ api_key = "...my_api_key..."
+ start_date = "2017-01-25T00:00:00Z"
}
- name = "Neal Gorczany"
- secret_id = "...my_secret_id..."
- workspace_id = "06a8aa94-c026-444c-b5e9-d9a4578adc1a"
+ definition_id = "87a1fb18-7d33-4223-980b-b99362d2f459"
+ name = "Monica Pagac"
+ secret_id = "...my_secret_id..."
+ workspace_id = "bc3680ab-b376-4bce-a6a7-c0ce20da3e9a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_clickhouse/resource.tf b/examples/resources/airbyte_source_clickhouse/resource.tf
old mode 100755
new mode 100644
index 60847903f..0ba718cd0
--- a/examples/resources/airbyte_source_clickhouse/resource.tf
+++ b/examples/resources/airbyte_source_clickhouse/resource.tf
@@ -1,18 +1,16 @@
resource "airbyte_source_clickhouse" "my_source_clickhouse" {
configuration = {
- database = "default"
- host = "...my_host..."
- password = "...my_password..."
- port = 8123
- source_type = "clickhouse"
+ database = "default"
+ host = "...my_host..."
+ password = "...my_password..."
+ port = 8123
tunnel_method = {
- source_clickhouse_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_clickhouse_no_tunnel = {}
}
- username = "Gerry81"
+ username = "Maximus28"
}
- name = "Mr. Simon Altenwerth"
- secret_id = "...my_secret_id..."
- workspace_id = "c802e2ec-09ff-48f0-b816-ff3477c13e90"
+ definition_id = "54cb2418-93e1-4da4-ac4f-685d205011b8"
+ name = "Milton Crooks"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3b757391-0861-48e9-9445-d83c494a849c"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_clickup_api/resource.tf b/examples/resources/airbyte_source_clickup_api/resource.tf
old mode 100755
new mode 100644
index f07e8ffae..6ec9ec129
--- a/examples/resources/airbyte_source_clickup_api/resource.tf
+++ b/examples/resources/airbyte_source_clickup_api/resource.tf
@@ -4,11 +4,11 @@ resource "airbyte_source_clickup_api" "my_source_clickupapi" {
folder_id = "...my_folder_id..."
include_closed_tasks = true
list_id = "...my_list_id..."
- source_type = "clickup-api"
space_id = "...my_space_id..."
team_id = "...my_team_id..."
}
- name = "Mr. Jack Gottlieb"
- secret_id = "...my_secret_id..."
- workspace_id = "b0960a66-8151-4a47-aaf9-23c5949f83f3"
+ definition_id = "517f0e32-c2e3-402e-ade9-2b3e43098446"
+ name = "Freddie Little"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e6422d15-b828-4621-a877-d2e625cdd80b"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_clockify/resource.tf b/examples/resources/airbyte_source_clockify/resource.tf
old mode 100755
new mode 100644
index 6e40ed5d1..8f63ddc82
--- a/examples/resources/airbyte_source_clockify/resource.tf
+++ b/examples/resources/airbyte_source_clockify/resource.tf
@@ -2,10 +2,10 @@ resource "airbyte_source_clockify" "my_source_clockify" {
configuration = {
api_key = "...my_api_key..."
api_url = "...my_api_url..."
- source_type = "clockify"
workspace_id = "...my_workspace_id..."
}
- name = "Angela Schaefer"
- secret_id = "...my_secret_id..."
- workspace_id = "76ffb901-c6ec-4bb4-a243-cf789ffafeda"
+ definition_id = "a5ff53c6-fc10-4ca6-ba82-7c3d349f444d"
+ name = "Julius Lockman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9d8494dc-faea-4550-8380-1e9f446900c8"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_close_com/resource.tf b/examples/resources/airbyte_source_close_com/resource.tf
old mode 100755
new mode 100644
index c353f6aa5..5d4334f9f
--- a/examples/resources/airbyte_source_close_com/resource.tf
+++ b/examples/resources/airbyte_source_close_com/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_close_com" "my_source_closecom" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "close-com"
- start_date = "2021-01-01"
+ api_key = "...my_api_key..."
+ start_date = "2021-01-01"
}
- name = "Ronnie Nikolaus"
- secret_id = "...my_secret_id..."
- workspace_id = "e0ac184c-2b9c-4247-8883-73a40e1942f3"
+ definition_id = "ba7b45cf-ea08-4abd-9a32-8f6c373e0666"
+ name = "Miss Eva Collier"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a3ab4d44-755b-4910-a5c9-99e89cbd0e8f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_coda/resource.tf b/examples/resources/airbyte_source_coda/resource.tf
old mode 100755
new mode 100644
index ad3f1aa2b..aa25f8ba6
--- a/examples/resources/airbyte_source_coda/resource.tf
+++ b/examples/resources/airbyte_source_coda/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_coda" "my_source_coda" {
configuration = {
- auth_token = "...my_auth_token..."
- source_type = "coda"
+ auth_token = "...my_auth_token..."
}
- name = "Lila Harris II"
- secret_id = "...my_secret_id..."
- workspace_id = "5756f5d5-6d0b-4d0a-b2df-e13db4f62cba"
+ definition_id = "2a37cc1f-bec8-483d-a2fe-cd2cab29e0bc"
+ name = "Lisa Barrows"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3cc123e8-783d-450d-8d2b-80c50dc344f6"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_coin_api/resource.tf b/examples/resources/airbyte_source_coin_api/resource.tf
old mode 100755
new mode 100644
index 20698fb64..f5e5d4d2a
--- a/examples/resources/airbyte_source_coin_api/resource.tf
+++ b/examples/resources/airbyte_source_coin_api/resource.tf
@@ -3,13 +3,13 @@ resource "airbyte_source_coin_api" "my_source_coinapi" {
api_key = "...my_api_key..."
end_date = "2019-01-01T00:00:00"
environment = "sandbox"
- limit = 10
+ limit = 8
period = "2MTH"
- source_type = "coin-api"
start_date = "2019-01-01T00:00:00"
symbol_id = "...my_symbol_id..."
}
- name = "Francis Boyle"
- secret_id = "...my_secret_id..."
- workspace_id = "bc0b80a6-924d-43b2-acfc-c8f895010f5d"
+ definition_id = "f0e9a05e-994a-4ce4-9dc5-b42f2a228e88"
+ name = "Rhonda Kunze"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d4275060-42c1-4c65-a61b-2485a060238e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_coinmarketcap/resource.tf b/examples/resources/airbyte_source_coinmarketcap/resource.tf
old mode 100755
new mode 100644
index 4169d481c..091d4d179
--- a/examples/resources/airbyte_source_coinmarketcap/resource.tf
+++ b/examples/resources/airbyte_source_coinmarketcap/resource.tf
@@ -1,13 +1,13 @@
resource "airbyte_source_coinmarketcap" "my_source_coinmarketcap" {
configuration = {
- api_key = "...my_api_key..."
- data_type = "historical"
- source_type = "coinmarketcap"
+ api_key = "...my_api_key..."
+ data_type = "historical"
symbols = [
"...",
]
}
- name = "Meredith Kassulke"
- secret_id = "...my_secret_id..."
- workspace_id = "1804e54c-82f1-468a-b63c-8873e484380b"
+ definition_id = "a1361d3c-00cf-4e1b-a68d-340502b96029"
+ name = "Pat Robel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9e6abf17-c2d5-40cb-ae6f-f332bdf14577"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_configcat/resource.tf b/examples/resources/airbyte_source_configcat/resource.tf
old mode 100755
new mode 100644
index 1e2c23e89..0fd18bbb2
--- a/examples/resources/airbyte_source_configcat/resource.tf
+++ b/examples/resources/airbyte_source_configcat/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_configcat" "my_source_configcat" {
configuration = {
- password = "...my_password..."
- source_type = "configcat"
- username = "Art_Wiegand"
+ password = "...my_password..."
+ username = "Estrella_Wilkinson70"
}
- name = "Lowell Oberbrunner"
- secret_id = "...my_secret_id..."
- workspace_id = "5a60a04c-495c-4c69-9171-b51c1bdb1cf4"
+ definition_id = "81a7466b-f78b-43b7-9ede-547fc7c1cb53"
+ name = "Ms. Luis Harris"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9ddb3b3d-7401-439d-82cf-2cb416442d85"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_confluence/resource.tf b/examples/resources/airbyte_source_confluence/resource.tf
old mode 100755
new mode 100644
index 3234b7a6e..e13a4e83f
--- a/examples/resources/airbyte_source_confluence/resource.tf
+++ b/examples/resources/airbyte_source_confluence/resource.tf
@@ -3,9 +3,9 @@ resource "airbyte_source_confluence" "my_source_confluence" {
api_token = "...my_api_token..."
domain_name = "...my_domain_name..."
email = "abc@example.com"
- source_type = "confluence"
}
- name = "Jody Will"
- secret_id = "...my_secret_id..."
- workspace_id = "ccca99bc-7fc0-4b2d-8e10-873e42b006d6"
+ definition_id = "82e70e18-a817-42f9-b227-1c9f9cbaa542"
+ name = "Ms. Nathaniel Walter V"
+ secret_id = "...my_secret_id..."
+ workspace_id = "61d84c3f-bc24-4f86-8fce-85198c116e72"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_convex/resource.tf b/examples/resources/airbyte_source_convex/resource.tf
old mode 100755
new mode 100644
index ea4e9c71d..018973a93
--- a/examples/resources/airbyte_source_convex/resource.tf
+++ b/examples/resources/airbyte_source_convex/resource.tf
@@ -2,9 +2,9 @@ resource "airbyte_source_convex" "my_source_convex" {
configuration = {
access_key = "...my_access_key..."
deployment_url = "https://murky-swan-635.convex.cloud"
- source_type = "convex"
}
- name = "Guy Kovacek"
- secret_id = "...my_secret_id..."
- workspace_id = "a8581a58-208c-454f-afa9-c95f2eac5565"
+ definition_id = "581ee677-0fa8-4ec1-ba80-4bd6457a40e8"
+ name = "Corey Braun"
+ secret_id = "...my_secret_id..."
+ workspace_id = "541ba6f5-d90d-45a8-a349-e2072bdff381"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_datascope/resource.tf b/examples/resources/airbyte_source_datascope/resource.tf
old mode 100755
new mode 100644
index e590aad92..4a8034656
--- a/examples/resources/airbyte_source_datascope/resource.tf
+++ b/examples/resources/airbyte_source_datascope/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_datascope" "my_source_datascope" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "datascope"
- start_date = "dd/mm/YYYY HH:MM"
+ api_key = "...my_api_key..."
+ start_date = "dd/mm/YYYY HH:MM"
}
- name = "Danny Bahringer"
- secret_id = "...my_secret_id..."
- workspace_id = "fee81206-e281-43fa-8a41-c480d3f2132a"
+ definition_id = "8dbe50fc-b32a-4781-b3ab-b82e6a7189e9"
+ name = "Erin Johns"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4638d140-9463-49cf-9dd4-a0c05f536f6b"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_delighted/resource.tf b/examples/resources/airbyte_source_delighted/resource.tf
old mode 100755
new mode 100644
index 17c514f7c..7b2ead0df
--- a/examples/resources/airbyte_source_delighted/resource.tf
+++ b/examples/resources/airbyte_source_delighted/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_delighted" "my_source_delighted" {
configuration = {
- api_key = "...my_api_key..."
- since = "2022-05-30 04:50:23"
- source_type = "delighted"
+ api_key = "...my_api_key..."
+ since = "2022-05-30 04:50:23"
}
- name = "Sarah Collier"
- secret_id = "...my_secret_id..."
- workspace_id = "14f4cc6f-18bf-4962-9a6a-4f77a87ee3e4"
+ definition_id = "b8f8f6af-bf36-45d6-87e0-87e3905b6a41"
+ name = "Elsa Osinski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4f73b7e8-dc37-41ec-bee1-0511b439ed17"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_dixa/resource.tf b/examples/resources/airbyte_source_dixa/resource.tf
old mode 100755
new mode 100644
index 80cc2652d..8b645af59
--- a/examples/resources/airbyte_source_dixa/resource.tf
+++ b/examples/resources/airbyte_source_dixa/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_dixa" "my_source_dixa" {
configuration = {
- api_token = "...my_api_token..."
- batch_size = 31
- source_type = "dixa"
- start_date = "YYYY-MM-DD"
+ api_token = "...my_api_token..."
+ batch_size = 1
+ start_date = "YYYY-MM-DD"
}
- name = "Brittany Cole"
- secret_id = "...my_secret_id..."
- workspace_id = "5b34418e-3bb9-41c8-9975-e0e8419d8f84"
+ definition_id = "9f9b4783-ac23-42bf-a41c-80b23345c949"
+ name = "Arturo Hammes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9f5a34ff-680c-488d-8e9f-7431721e4227"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_dockerhub/resource.tf b/examples/resources/airbyte_source_dockerhub/resource.tf
old mode 100755
new mode 100644
index 7c3860795..2c9605612
--- a/examples/resources/airbyte_source_dockerhub/resource.tf
+++ b/examples/resources/airbyte_source_dockerhub/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_dockerhub" "my_source_dockerhub" {
configuration = {
docker_username = "airbyte"
- source_type = "dockerhub"
}
- name = "Joe Haag"
- secret_id = "...my_secret_id..."
- workspace_id = "3e07edcc-4aa5-4f3c-abd9-05a972e05672"
+ definition_id = "fd51b66e-c345-4b5c-9bae-74726a8cd9c5"
+ name = "Ernesto Swaniawski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "afda11e1-0d00-42e1-873f-9ba1e39a63be"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_dremio/resource.tf b/examples/resources/airbyte_source_dremio/resource.tf
old mode 100755
new mode 100644
index b46396f97..1c149fe41
--- a/examples/resources/airbyte_source_dremio/resource.tf
+++ b/examples/resources/airbyte_source_dremio/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_dremio" "my_source_dremio" {
configuration = {
- api_key = "...my_api_key..."
- base_url = "...my_base_url..."
- source_type = "dremio"
+ api_key = "...my_api_key..."
+ base_url = "...my_base_url..."
}
- name = "Aaron Connelly"
- secret_id = "...my_secret_id..."
- workspace_id = "2d309470-bf7a-44fa-87cf-535a6fae54eb"
+ definition_id = "209caa59-3eb8-408e-88c0-a1f11671a56d"
+ name = "Jeanne Lebsack"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b5e4c195-9643-43e1-9514-84aac586d055"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_dynamodb/resource.tf b/examples/resources/airbyte_source_dynamodb/resource.tf
old mode 100755
new mode 100644
index c50483ed3..2adff7f5d
--- a/examples/resources/airbyte_source_dynamodb/resource.tf
+++ b/examples/resources/airbyte_source_dynamodb/resource.tf
@@ -2,12 +2,12 @@ resource "airbyte_source_dynamodb" "my_source_dynamodb" {
configuration = {
access_key_id = "A012345678910EXAMPLE"
endpoint = "https://{aws_dynamo_db_url}.com"
- region = "us-gov-west-1"
+ region = "us-west-1"
reserved_attribute_names = "name, field_name, field-name"
secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"
- source_type = "dynamodb"
}
- name = "Sandra Rowe Sr."
- secret_id = "...my_secret_id..."
- workspace_id = "f023b75d-2367-4fe1-a0cc-8df79f0a396d"
+ definition_id = "44c5465b-457a-42c2-a18f-e1b91dcce8e6"
+ name = "Faye Streich"
+ secret_id = "...my_secret_id..."
+ workspace_id = "75fb5812-2af6-4a8a-8655-36a205f1e4d3"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_e2e_test_cloud/resource.tf b/examples/resources/airbyte_source_e2e_test_cloud/resource.tf
deleted file mode 100755
index 9c1ae62bb..000000000
--- a/examples/resources/airbyte_source_e2e_test_cloud/resource.tf
+++ /dev/null
@@ -1,18 +0,0 @@
-resource "airbyte_source_e2e_test_cloud" "my_source_e2etestcloud" {
- configuration = {
- max_messages = 6
- message_interval_ms = 0
- mock_catalog = {
- source_e2e_test_cloud_mock_catalog_multi_schema = {
- stream_schemas = "...my_stream_schemas..."
- type = "MULTI_STREAM"
- }
- }
- seed = 42
- source_type = "e2e-test-cloud"
- type = "CONTINUOUS_FEED"
- }
- name = "Gertrude Grant"
- secret_id = "...my_secret_id..."
- workspace_id = "c15dfbac-e188-4b1c-8ee2-c8c6ce611fee"
-}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_emailoctopus/resource.tf b/examples/resources/airbyte_source_emailoctopus/resource.tf
old mode 100755
new mode 100644
index c745d15ff..f0d4d8aa0
--- a/examples/resources/airbyte_source_emailoctopus/resource.tf
+++ b/examples/resources/airbyte_source_emailoctopus/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_emailoctopus" "my_source_emailoctopus" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "emailoctopus"
+ api_key = "...my_api_key..."
}
- name = "Gregory Satterfield"
- secret_id = "...my_secret_id..."
- workspace_id = "bdb6eec7-4378-4ba2-9317-747dc915ad2c"
+ definition_id = "09ea5800-594f-4bd8-a631-4cace02f96b8"
+ name = "Annie Hegmann"
+ secret_id = "...my_secret_id..."
+ workspace_id = "f7e4181b-36cf-41af-8f94-e3c79cbeca1c"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_exchange_rates/resource.tf b/examples/resources/airbyte_source_exchange_rates/resource.tf
old mode 100755
new mode 100644
index dd195c09e..067d0df7b
--- a/examples/resources/airbyte_source_exchange_rates/resource.tf
+++ b/examples/resources/airbyte_source_exchange_rates/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_source_exchange_rates" "my_source_exchangerates" {
configuration = {
access_key = "...my_access_key..."
- base = "USD"
- ignore_weekends = false
- source_type = "exchange-rates"
+ base = "EUR"
+ ignore_weekends = true
start_date = "YYYY-MM-DD"
}
- name = "Mrs. Leslie Klocko"
- secret_id = "...my_secret_id..."
- workspace_id = "c0f5ae2f-3a6b-4700-8787-56143f5a6c98"
+ definition_id = "a5bbba82-d4c0-4a2c-af78-12475bca9a48"
+ name = "Amber Osinski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0ddc3156-b2ff-4d5d-ac69-da5497add71f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_facebook_marketing/resource.tf b/examples/resources/airbyte_source_facebook_marketing/resource.tf
old mode 100755
new mode 100644
index 9bb332c63..9e60e57e5
--- a/examples/resources/airbyte_source_facebook_marketing/resource.tf
+++ b/examples/resources/airbyte_source_facebook_marketing/resource.tf
@@ -8,33 +8,32 @@ resource "airbyte_source_facebook_marketing" "my_source_facebookmarketing" {
custom_insights = [
{
action_breakdowns = [
- "action_destination",
+ "action_video_sound",
]
- action_report_time = "conversion"
+ action_report_time = "mixed"
breakdowns = [
- "frequency_value",
+ "mmm",
]
end_date = "2017-01-26T00:00:00Z"
fields = [
- "account_name",
+ "cpp",
]
- insights_lookback_window = 6
+ insights_lookback_window = 7
level = "ad"
- name = "Jesus Batz"
+ name = "Julio Beier"
start_date = "2017-01-25T00:00:00Z"
- time_increment = 8
+ time_increment = 9
},
]
end_date = "2017-01-26T00:00:00Z"
fetch_thumbnail_images = false
- include_deleted = true
- insights_lookback_window = 4
- max_batch_size = 7
+ include_deleted = false
+ insights_lookback_window = 2
page_size = 3
- source_type = "facebook-marketing"
start_date = "2017-01-25T00:00:00Z"
}
- name = "Ms. Wilbert McGlynn"
- secret_id = "...my_secret_id..."
- workspace_id = "04f926ba-d255-4381-9b47-4b0ed20e5624"
+ definition_id = "7eb149e6-fe9a-476b-9271-d6f7a77e51b0"
+ name = "Olivia MacGyver"
+ secret_id = "...my_secret_id..."
+ workspace_id = "2e6bc1e2-2381-4cdc-ae96-42f3c2fe19c3"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_facebook_pages/resource.tf b/examples/resources/airbyte_source_facebook_pages/resource.tf
old mode 100755
new mode 100644
index cd4d30448..475bd630b
--- a/examples/resources/airbyte_source_facebook_pages/resource.tf
+++ b/examples/resources/airbyte_source_facebook_pages/resource.tf
@@ -2,9 +2,9 @@ resource "airbyte_source_facebook_pages" "my_source_facebookpages" {
configuration = {
access_token = "...my_access_token..."
page_id = "...my_page_id..."
- source_type = "facebook-pages"
}
- name = "Moses Wuckert"
- secret_id = "...my_secret_id..."
- workspace_id = "39a910ab-dcab-4626-b669-6e1ec00221b3"
+ definition_id = "2edfee92-bc33-473a-92c8-87f28ef975a7"
+ name = "Scott Baumbach"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5487915a-2f44-49e5-b0b6-8d5fb4b99e2f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_faker/resource.tf b/examples/resources/airbyte_source_faker/resource.tf
old mode 100755
new mode 100644
index 5f5e8e89b..a0096b9b2
--- a/examples/resources/airbyte_source_faker/resource.tf
+++ b/examples/resources/airbyte_source_faker/resource.tf
@@ -1,13 +1,13 @@
resource "airbyte_source_faker" "my_source_faker" {
configuration = {
always_updated = false
- count = 3
- parallelism = 9
- records_per_slice = 5
- seed = 6
- source_type = "faker"
+ count = 9
+ parallelism = 8
+ records_per_slice = 1
+ seed = 5
}
- name = "Delbert Reynolds"
- secret_id = "...my_secret_id..."
- workspace_id = "cfda8d0c-549e-4f03-8049-78a61fa1cf20"
+ definition_id = "33c76bbd-55f5-466b-8ade-0498ec40fd8a"
+ name = "Kirk Braun MD"
+ secret_id = "...my_secret_id..."
+ workspace_id = "05c5e889-977e-4ae0-86e3-c2d33082ab84"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_fauna/resource.tf b/examples/resources/airbyte_source_fauna/resource.tf
old mode 100755
new mode 100644
index d5047d592..fc369a1e2
--- a/examples/resources/airbyte_source_fauna/resource.tf
+++ b/examples/resources/airbyte_source_fauna/resource.tf
@@ -2,19 +2,17 @@ resource "airbyte_source_fauna" "my_source_fauna" {
configuration = {
collection = {
deletions = {
- source_fauna_collection_deletion_mode_disabled = {
- deletion_mode = "ignore"
- }
+ disabled = {}
}
- page_size = 4
+ page_size = 0
}
- domain = "...my_domain..."
- port = 5
- scheme = "...my_scheme..."
- secret = "...my_secret..."
- source_type = "fauna"
+ domain = "...my_domain..."
+ port = 10
+ scheme = "...my_scheme..."
+ secret = "...my_secret..."
}
- name = "Irvin Klein"
- secret_id = "...my_secret_id..."
- workspace_id = "1ffc71dc-a163-4f2a-bc80-a97ff334cddf"
+ definition_id = "56112c1f-da02-410a-9cfb-ec287654f12b"
+ name = "Mr. Willard Gislason"
+ secret_id = "...my_secret_id..."
+ workspace_id = "fbb0cddc-f802-4e3e-a016-5466352da9b0"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_file/resource.tf b/examples/resources/airbyte_source_file/resource.tf
new file mode 100644
index 000000000..ef2728718
--- /dev/null
+++ b/examples/resources/airbyte_source_file/resource.tf
@@ -0,0 +1,19 @@
+resource "airbyte_source_file" "my_source_file" {
+ configuration = {
+ dataset_name = "...my_dataset_name..."
+ format = "jsonl"
+ provider = {
+ az_blob_azure_blob_storage = {
+ sas_token = "...my_sas_token..."
+ shared_key = "...my_shared_key..."
+ storage_account = "...my_storage_account..."
+ }
+ }
+ reader_options = "{\"sep\": \"\t\", \"header\": 0, \"names\": [\"column1\", \"column2\"] }"
+ url = "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv"
+ }
+ definition_id = "6c5d5cf5-0fbf-4713-864e-d5bf6d67306c"
+ name = "Floyd Goyette"
+ secret_id = "...my_secret_id..."
+ workspace_id = "68cfaeff-480d-4f14-bee1-0f8279e427b2"
+}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_file_secure/resource.tf b/examples/resources/airbyte_source_file_secure/resource.tf
deleted file mode 100755
index 3a1bb2009..000000000
--- a/examples/resources/airbyte_source_file_secure/resource.tf
+++ /dev/null
@@ -1,20 +0,0 @@
-resource "airbyte_source_file_secure" "my_source_filesecure" {
- configuration = {
- dataset_name = "...my_dataset_name..."
- format = "excel_binary"
- provider = {
- source_file_secure_storage_provider_az_blob_azure_blob_storage = {
- sas_token = "...my_sas_token..."
- shared_key = "...my_shared_key..."
- storage = "AzBlob"
- storage_account = "...my_storage_account..."
- }
- }
- reader_options = "{\"sep\": \" \"}"
- source_type = "file-secure"
- url = "gs://my-google-bucket/data.csv"
- }
- name = "Freddie Von V"
- secret_id = "...my_secret_id..."
- workspace_id = "76c6ab21-d29d-4fc9-8d6f-ecd799390066"
-}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_firebolt/resource.tf b/examples/resources/airbyte_source_firebolt/resource.tf
old mode 100755
new mode 100644
index 5c5ed6ec8..fe0f64146
--- a/examples/resources/airbyte_source_firebolt/resource.tf
+++ b/examples/resources/airbyte_source_firebolt/resource.tf
@@ -1,14 +1,14 @@
resource "airbyte_source_firebolt" "my_source_firebolt" {
configuration = {
- account = "...my_account..."
- database = "...my_database..."
- engine = "...my_engine..."
- host = "api.app.firebolt.io"
- password = "...my_password..."
- source_type = "firebolt"
- username = "username@email.com"
+ account = "...my_account..."
+ database = "...my_database..."
+ engine = "...my_engine..."
+ host = "api.app.firebolt.io"
+ password = "...my_password..."
+ username = "username@email.com"
}
- name = "Donna Abshire"
- secret_id = "...my_secret_id..."
- workspace_id = "5338cec0-86fa-421e-9152-cb3119167b8e"
+ definition_id = "e1d4b428-b10c-462a-aeab-6a16bc0f1be5"
+ name = "Laurie Kuhlman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7324c6ca-7fcd-4ac6-b878-54b69c42e8b9"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_freshcaller/resource.tf b/examples/resources/airbyte_source_freshcaller/resource.tf
old mode 100755
new mode 100644
index ffed3f24c..d23732f3e
--- a/examples/resources/airbyte_source_freshcaller/resource.tf
+++ b/examples/resources/airbyte_source_freshcaller/resource.tf
@@ -2,12 +2,12 @@ resource "airbyte_source_freshcaller" "my_source_freshcaller" {
configuration = {
api_key = "...my_api_key..."
domain = "snaptravel"
- requests_per_minute = 2
- source_type = "freshcaller"
+ requests_per_minute = 7
start_date = "2022-01-01T12:00:00Z"
- sync_lag_minutes = 9
+ sync_lag_minutes = 2
}
- name = "Kenneth Friesen IV"
- secret_id = "...my_secret_id..."
- workspace_id = "d6d364ff-d455-4906-9126-3d48e935c2c9"
+ definition_id = "c06fe5a2-e94e-4ff2-91ad-fc721dd1f802"
+ name = "Margarita Nitzsche"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9660c93e-b114-448c-9cd3-afe5ef85381e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_freshdesk/resource.tf b/examples/resources/airbyte_source_freshdesk/resource.tf
old mode 100755
new mode 100644
index 150e6b282..03a27342d
--- a/examples/resources/airbyte_source_freshdesk/resource.tf
+++ b/examples/resources/airbyte_source_freshdesk/resource.tf
@@ -2,11 +2,11 @@ resource "airbyte_source_freshdesk" "my_source_freshdesk" {
configuration = {
api_key = "...my_api_key..."
domain = "myaccount.freshdesk.com"
- requests_per_minute = 10
- source_type = "freshdesk"
+ requests_per_minute = 1
start_date = "2020-12-01T00:00:00Z"
}
- name = "Dale Altenwerth"
- secret_id = "...my_secret_id..."
- workspace_id = "3e43202d-7216-4576-9066-41870d9d21f9"
+ definition_id = "9fe1bd22-2412-41e6-b15b-e306a4e83994"
+ name = "Frances Farrell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c75d4c70-b588-42c8-81a0-878bfdf7e2fa"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_freshsales/resource.tf b/examples/resources/airbyte_source_freshsales/resource.tf
old mode 100755
new mode 100644
index c3410eb5d..c0e3a69bc
--- a/examples/resources/airbyte_source_freshsales/resource.tf
+++ b/examples/resources/airbyte_source_freshsales/resource.tf
@@ -2,9 +2,9 @@ resource "airbyte_source_freshsales" "my_source_freshsales" {
configuration = {
api_key = "...my_api_key..."
domain_name = "mydomain.myfreshworks.com"
- source_type = "freshsales"
}
- name = "Gustavo Adams DDS"
- secret_id = "...my_secret_id..."
- workspace_id = "4ecc11a0-8364-4290-a8b8-502a55e7f73b"
+ definition_id = "4a63623e-34bb-4a48-ad6d-0eaf7f54c7c3"
+ name = "Shelly Wolf"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b0a3dd00-07da-4ef7-b0c8-1f95c5b8dd2d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_gainsight_px/resource.tf b/examples/resources/airbyte_source_gainsight_px/resource.tf
old mode 100755
new mode 100644
index 6eb739429..d320c844b
--- a/examples/resources/airbyte_source_gainsight_px/resource.tf
+++ b/examples/resources/airbyte_source_gainsight_px/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_gainsight_px" "my_source_gainsightpx" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "gainsight-px"
+ api_key = "...my_api_key..."
}
- name = "Hugh Goodwin"
- secret_id = "...my_secret_id..."
- workspace_id = "320a319f-4bad-4f94-bc9a-867bc4242666"
+ definition_id = "32b37f6f-ec5c-4d0a-8fda-52f69543b862"
+ name = "Cristina McKenzie"
+ secret_id = "...my_secret_id..."
+ workspace_id = "50480aaa-f77a-4e08-bd2c-af83f045910a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_gcs/resource.tf b/examples/resources/airbyte_source_gcs/resource.tf
old mode 100755
new mode 100644
index cb82ef849..640ba21ed
--- a/examples/resources/airbyte_source_gcs/resource.tf
+++ b/examples/resources/airbyte_source_gcs/resource.tf
@@ -1,11 +1,50 @@
resource "airbyte_source_gcs" "my_source_gcs" {
configuration = {
- gcs_bucket = "...my_gcs_bucket..."
- gcs_path = "...my_gcs_path..."
- service_account = "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID, \"private_key_id\": YOUR_PRIVATE_KEY, ... }"
- source_type = "gcs"
+ bucket = "...my_bucket..."
+ service_account = "...my_service_account..."
+ start_date = "2021-01-01T00:00:00.000000Z"
+ streams = [
+ {
+ days_to_sync_if_history_is_full = 3
+ format = {
+ source_gcs_csv_format = {
+ delimiter = "...my_delimiter..."
+ double_quote = false
+ encoding = "...my_encoding..."
+ escape_char = "...my_escape_char..."
+ false_values = [
+ "...",
+ ]
+ header_definition = {
+ source_gcs_autogenerated = {}
+ }
+ inference_type = "None"
+ null_values = [
+ "...",
+ ]
+ quote_char = "...my_quote_char..."
+ skip_rows_after_header = 3
+ skip_rows_before_header = 5
+ strings_can_be_null = false
+ true_values = [
+ "...",
+ ]
+ }
+ }
+ globs = [
+ "...",
+ ]
+ input_schema = "...my_input_schema..."
+ legacy_prefix = "...my_legacy_prefix..."
+ name = "Guy Langosh III"
+ primary_key = "...my_primary_key..."
+ schemaless = false
+ validation_policy = "Wait for Discover"
+ },
+ ]
}
- name = "Olga Blanda"
- secret_id = "...my_secret_id..."
- workspace_id = "dca8ef51-fcb4-4c59-bec1-2cdaad0ec7af"
+ definition_id = "a4e6d7c2-fcaa-4386-9a1d-2ddf0351c49c"
+ name = "Leah Jerde Jr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "51741425-e4d3-48a3-8ea5-6cdfa27fbf62"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_getlago/resource.tf b/examples/resources/airbyte_source_getlago/resource.tf
old mode 100755
new mode 100644
index 0904859da..896fb08fb
--- a/examples/resources/airbyte_source_getlago/resource.tf
+++ b/examples/resources/airbyte_source_getlago/resource.tf
@@ -1,9 +1,10 @@
resource "airbyte_source_getlago" "my_source_getlago" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "getlago"
+ api_key = "...my_api_key..."
+ api_url = "...my_api_url..."
}
- name = "Irving Rohan"
- secret_id = "...my_secret_id..."
- workspace_id = "0df448a4-7f93-490c-9888-0983dabf9ef3"
+ definition_id = "25b4bae6-1112-4211-be87-b490ecc6bf75"
+ name = "Mrs. Willie Bins"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c803c831-1a97-4a1a-a894-9629432a02ce"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_github/resource.tf b/examples/resources/airbyte_source_github/resource.tf
old mode 100755
new mode 100644
index 02416c44e..f531534c5
--- a/examples/resources/airbyte_source_github/resource.tf
+++ b/examples/resources/airbyte_source_github/resource.tf
@@ -1,20 +1,26 @@
resource "airbyte_source_github" "my_source_github" {
configuration = {
- branch = "airbytehq/airbyte/master airbytehq/airbyte/my-branch"
+ api_url = "https://github.company.org"
+ branch = "airbytehq/airbyte/master airbytehq/airbyte/my-branch"
+ branches = [
+ "...",
+ ]
credentials = {
- source_github_authentication_o_auth = {
+ o_auth = {
access_token = "...my_access_token..."
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- option_title = "OAuth Credentials"
}
}
+ repositories = [
+ "...",
+ ]
repository = "airbytehq/airbyte"
requests_per_hour = 10
- source_type = "github"
start_date = "2021-03-01T00:00:00Z"
}
- name = "Van Kuhlman IV"
- secret_id = "...my_secret_id..."
- workspace_id = "9af4d357-24cd-4b0f-8d28-1187d56844ed"
+ definition_id = "e017f905-2f20-440e-8692-82dd6a12cb01"
+ name = "Bennie Stroman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "aeeda058-2852-4791-bedf-cf9c9058e69d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_gitlab/resource.tf b/examples/resources/airbyte_source_gitlab/resource.tf
old mode 100755
new mode 100644
index c2a93e727..b8c41ac2f
--- a/examples/resources/airbyte_source_gitlab/resource.tf
+++ b/examples/resources/airbyte_source_gitlab/resource.tf
@@ -1,22 +1,27 @@
resource "airbyte_source_gitlab" "my_source_gitlab" {
configuration = {
- api_url = "https://gitlab.company.org"
+ api_url = "gitlab.com"
credentials = {
- source_gitlab_authorization_method_o_auth2_0 = {
+ source_gitlab_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_type = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
- token_expiry_date = "2021-06-26T03:36:42.239Z"
+ token_expiry_date = "2022-01-24T13:56:19.954Z"
}
}
- groups = "airbyte.io"
- projects = "airbyte.io/documentation"
- source_type = "gitlab"
- start_date = "2021-03-01T00:00:00Z"
+ groups = "airbyte.io"
+ groups_list = [
+ "...",
+ ]
+ projects = "airbyte.io/documentation"
+ projects_list = [
+ "...",
+ ]
+ start_date = "2021-03-01T00:00:00Z"
}
- name = "Frank Keeling"
- secret_id = "...my_secret_id..."
- workspace_id = "628bdfc2-032b-46c8-b992-3b7e13584f7a"
+ definition_id = "e4cb55c6-95e2-4f08-ab76-e351cef20de4"
+ name = "Winston Schroeder"
+ secret_id = "...my_secret_id..."
+ workspace_id = "2b42c84c-d8bc-4607-ae71-4fbf0cfd3aed"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_glassfrog/resource.tf b/examples/resources/airbyte_source_glassfrog/resource.tf
old mode 100755
new mode 100644
index 919fbfcdd..f4b2a88b8
--- a/examples/resources/airbyte_source_glassfrog/resource.tf
+++ b/examples/resources/airbyte_source_glassfrog/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_glassfrog" "my_source_glassfrog" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "glassfrog"
+ api_key = "...my_api_key..."
}
- name = "Carl Davis"
- secret_id = "...my_secret_id..."
- workspace_id = "891f82ce-1157-4172-b053-77dcfa89df97"
+ definition_id = "54ef24d0-de80-4e3d-b905-02015d2de4b8"
+ name = "Jonathon Erdman"
+ secret_id = "...my_secret_id..."
+ workspace_id = "2b3a27b0-b342-4a10-bbc4-7ca706139037"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_gnews/resource.tf b/examples/resources/airbyte_source_gnews/resource.tf
old mode 100755
new mode 100644
index cf35bf01a..3832b1d0e
--- a/examples/resources/airbyte_source_gnews/resource.tf
+++ b/examples/resources/airbyte_source_gnews/resource.tf
@@ -1,23 +1,23 @@
resource "airbyte_source_gnews" "my_source_gnews" {
configuration = {
api_key = "...my_api_key..."
- country = "ie"
+ country = "es"
end_date = "2022-08-21 16:27:09"
in = [
- "content",
+ "description",
]
- language = "fr"
+ language = "ta"
nullable = [
- "description",
+ "content",
]
- query = "Apple AND NOT iPhone"
- sortby = "publishedAt"
- source_type = "gnews"
+ query = "Intel AND (i7 OR i9)"
+ sortby = "relevance"
start_date = "2022-08-21 16:27:09"
top_headlines_query = "Apple AND NOT iPhone"
- top_headlines_topic = "business"
+ top_headlines_topic = "world"
}
- name = "Katrina Considine"
- secret_id = "...my_secret_id..."
- workspace_id = "c3ddc5f1-11de-4a10-a6d5-41a4d190feb2"
+ definition_id = "df3c14a3-49fd-4e89-ab27-6cbad00caee1"
+ name = "Sadie Gleichner"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5b57e54a-27b6-417a-812e-6bf68e1922df"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_ads/resource.tf b/examples/resources/airbyte_source_google_ads/resource.tf
old mode 100755
new mode 100644
index 325505c0b..0de15eb08
--- a/examples/resources/airbyte_source_google_ads/resource.tf
+++ b/examples/resources/airbyte_source_google_ads/resource.tf
@@ -17,10 +17,10 @@ resource "airbyte_source_google_ads" "my_source_googleads" {
customer_id = "6783948572,5839201945"
end_date = "2017-01-30"
login_customer_id = "7349206847"
- source_type = "google-ads"
start_date = "2017-01-25"
}
- name = "Dr. Forrest Roob"
- secret_id = "...my_secret_id..."
- workspace_id = "bddb4847-08fb-44e3-91e6-bc158c4c4e54"
+ definition_id = "14313a52-3140-431f-97b8-2b3c164c1950"
+ name = "Dr. Matt Feeney"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ecd9b5a7-5a7c-45fc-a1d7-22b310b676fb"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_analytics_data_api/resource.tf b/examples/resources/airbyte_source_google_analytics_data_api/resource.tf
old mode 100755
new mode 100644
index 817811e0b..080c4735a
--- a/examples/resources/airbyte_source_google_analytics_data_api/resource.tf
+++ b/examples/resources/airbyte_source_google_analytics_data_api/resource.tf
@@ -1,21 +1,78 @@
resource "airbyte_source_google_analytics_data_api" "my_source_googleanalyticsdataapi" {
configuration = {
credentials = {
- source_google_analytics_data_api_credentials_authenticate_via_google_oauth_ = {
+ authenticate_via_google_oauth = {
access_token = "...my_access_token..."
- auth_type = "Client"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
}
}
- custom_reports = "...my_custom_reports..."
+ custom_reports_array = [
+ {
+ dimension_filter = {
+ and_group = {
+ expressions = [
+ {
+ field_name = "...my_field_name..."
+ filter = {
+ source_google_analytics_data_api_update_schemas_custom_reports_array_between_filter = {
+ from_value = {
+ source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter_1_expressions_double_value = {
+ value = 45.05
+ }
+ }
+ to_value = {
+ source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter_1_expressions_filter_double_value = {
+ value = 22.65
+ }
+ }
+ }
+ }
+ },
+ ]
+ }
+ }
+ dimensions = [
+ "...",
+ ]
+ metric_filter = {
+ source_google_analytics_data_api_update_and_group = {
+ expressions = [
+ {
+ field_name = "...my_field_name..."
+ filter = {
+ source_google_analytics_data_api_schemas_custom_reports_array_metric_filter_between_filter = {
+ from_value = {
+ source_google_analytics_data_api_schemas_custom_reports_array_metric_filter_metrics_filter_1_expressions_filter_double_value = {
+ value = 8.4
+ }
+ }
+ to_value = {
+ source_google_analytics_data_api_schemas_custom_reports_array_metric_filter_metrics_filter_1_double_value = {
+ value = 77.49
+ }
+ }
+ }
+ }
+ },
+ ]
+ }
+ }
+ metrics = [
+ "...",
+ ]
+ name = "Mrs. Mercedes Herman PhD"
+ },
+ ]
date_ranges_start_date = "2021-01-01"
- property_id = "5729978930"
- source_type = "google-analytics-data-api"
- window_in_days = 364
+ property_ids = [
+ "...",
+ ]
+ window_in_days = 60
}
- name = "Juanita Collier"
- secret_id = "...my_secret_id..."
- workspace_id = "0e9b200c-e78a-41bd-8fb7-a0a116ce723d"
+ definition_id = "d4fc0324-2ccd-4276-ba0d-30eb91c3df25"
+ name = "Rodney Goldner"
+ secret_id = "...my_secret_id..."
+ workspace_id = "52dc8258-f30a-4271-83b0-0ec7045956c0"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_analytics_v4/resource.tf b/examples/resources/airbyte_source_google_analytics_v4/resource.tf
deleted file mode 100755
index 013f62889..000000000
--- a/examples/resources/airbyte_source_google_analytics_v4/resource.tf
+++ /dev/null
@@ -1,21 +0,0 @@
-resource "airbyte_source_google_analytics_v4" "my_source_googleanalyticsv4" {
- configuration = {
- credentials = {
- source_google_analytics_v4_credentials_authenticate_via_google_oauth_ = {
- access_token = "...my_access_token..."
- auth_type = "Client"
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
- }
- }
- custom_reports = "...my_custom_reports..."
- source_type = "google-analytics-v4"
- start_date = "2020-06-01"
- view_id = "...my_view_id..."
- window_in_days = 120
- }
- name = "Dr. Doug Dibbert"
- secret_id = "...my_secret_id..."
- workspace_id = "af725b29-1220-430d-83f5-aeb7799d22e8"
-}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_directory/resource.tf b/examples/resources/airbyte_source_google_directory/resource.tf
old mode 100755
new mode 100644
index 8e6f6a292..61adeeed9
--- a/examples/resources/airbyte_source_google_directory/resource.tf
+++ b/examples/resources/airbyte_source_google_directory/resource.tf
@@ -1,15 +1,14 @@
resource "airbyte_source_google_directory" "my_source_googledirectory" {
configuration = {
credentials = {
- source_google_directory_google_credentials_service_account_key = {
- credentials_json = "...my_credentials_json..."
- credentials_title = "Service accounts"
- email = "Ayla.Zulauf@hotmail.com"
+ service_account_key = {
+ credentials_json = "...my_credentials_json..."
+ email = "Sharon_Schmidt@gmail.com"
}
}
- source_type = "google-directory"
}
- name = "Mrs. Allen Lockman"
- secret_id = "...my_secret_id..."
- workspace_id = "dc42c876-c2c2-4dfb-8cfc-1c76230f841f"
+ definition_id = "8b68fdfc-0692-4b4f-9673-f59a8d0acc99"
+ name = "Mr. Mattie Rau"
+ secret_id = "...my_secret_id..."
+ workspace_id = "1059fac1-d6c9-4b0f-8f35-d942704e93eb"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_drive/resource.tf b/examples/resources/airbyte_source_google_drive/resource.tf
new file mode 100644
index 000000000..c87c7b6b9
--- /dev/null
+++ b/examples/resources/airbyte_source_google_drive/resource.tf
@@ -0,0 +1,35 @@
+resource "airbyte_source_google_drive" "my_source_googledrive" {
+ configuration = {
+ credentials = {
+ source_google_drive_authenticate_via_google_o_auth = {
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
+ }
+ }
+ folder_url = "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn"
+ start_date = "2021-01-01T00:00:00.000000Z"
+ streams = [
+ {
+ days_to_sync_if_history_is_full = 4
+ format = {
+ source_google_drive_avro_format = {
+ double_as_string = false
+ }
+ }
+ globs = [
+ "...",
+ ]
+ input_schema = "...my_input_schema..."
+ name = "Rex Pacocha"
+ primary_key = "...my_primary_key..."
+ schemaless = false
+ validation_policy = "Emit Record"
+ },
+ ]
+ }
+ definition_id = "f0c4c84b-89e6-425b-ae87-6a32dc31e1b4"
+ name = "Lester Kihn"
+ secret_id = "...my_secret_id..."
+ workspace_id = "53bf2def-ea2f-4d14-9f48-d36313985539"
+}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_pagespeed_insights/resource.tf b/examples/resources/airbyte_source_google_pagespeed_insights/resource.tf
old mode 100755
new mode 100644
index fc20bedc4..8cf56cb35
--- a/examples/resources/airbyte_source_google_pagespeed_insights/resource.tf
+++ b/examples/resources/airbyte_source_google_pagespeed_insights/resource.tf
@@ -2,9 +2,8 @@ resource "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedin
configuration = {
api_key = "...my_api_key..."
categories = [
- "pwa",
+ "seo",
]
- source_type = "google-pagespeed-insights"
strategies = [
"desktop",
]
@@ -12,7 +11,8 @@ resource "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedin
"...",
]
}
- name = "Kristopher Dare"
- secret_id = "...my_secret_id..."
- workspace_id = "db14db6b-e5a6-4859-98e2-2ae20da16fc2"
+ definition_id = "52d3206a-fb3a-4724-a60d-40134e58876c"
+ name = "Miss Ronald Erdman Sr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "8ae06a57-c7c5-477a-b1e5-baddd2747bbc"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_search_console/resource.tf b/examples/resources/airbyte_source_google_search_console/resource.tf
old mode 100755
new mode 100644
index c4f5686bf..b592f14ab
--- a/examples/resources/airbyte_source_google_search_console/resource.tf
+++ b/examples/resources/airbyte_source_google_search_console/resource.tf
@@ -1,9 +1,8 @@
resource "airbyte_source_google_search_console" "my_source_googlesearchconsole" {
configuration = {
authorization = {
- source_google_search_console_authentication_type_o_auth = {
+ source_google_search_console_o_auth = {
access_token = "...my_access_token..."
- auth_type = "Client"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
@@ -13,9 +12,9 @@ resource "airbyte_source_google_search_console" "my_source_googlesearchconsole"
custom_reports_array = [
{
dimensions = [
- "page",
+ "device",
]
- name = "Heidi Bernier"
+ name = "Ms. Randy Gorczany V"
},
]
data_state = "all"
@@ -23,10 +22,10 @@ resource "airbyte_source_google_search_console" "my_source_googlesearchconsole"
site_urls = [
"...",
]
- source_type = "google-search-console"
- start_date = "2022-07-11"
+ start_date = "2020-03-18"
}
- name = "Jordan Hilll"
- secret_id = "...my_secret_id..."
- workspace_id = "90439d22-2465-4694-a240-7084f7ab37ce"
+ definition_id = "165bc484-0e7f-4b5d-b254-77f370b0ec7c"
+ name = "Wendell Rempel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0cb9d8df-c27a-48c7-ac3e-b5dc55714db0"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_sheets/resource.tf b/examples/resources/airbyte_source_google_sheets/resource.tf
old mode 100755
new mode 100644
index 0cf171cfe..2fb7a44e2
--- a/examples/resources/airbyte_source_google_sheets/resource.tf
+++ b/examples/resources/airbyte_source_google_sheets/resource.tf
@@ -1,18 +1,17 @@
resource "airbyte_source_google_sheets" "my_source_googlesheets" {
configuration = {
credentials = {
- source_google_sheets_authentication_authenticate_via_google_o_auth_ = {
- auth_type = "Client"
+ source_google_sheets_authenticate_via_google_o_auth = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
}
}
names_conversion = true
- source_type = "google-sheets"
spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit"
}
- name = "Irene Davis"
- secret_id = "...my_secret_id..."
- workspace_id = "194db554-10ad-4c66-9af9-0a26c7cdc981"
+ definition_id = "d7698733-386b-453a-879a-0805ff1793bf"
+ name = "Roderick Kutch"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d63199bd-6b46-48c8-9ec2-1a9ab567f13c"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_webfonts/resource.tf b/examples/resources/airbyte_source_google_webfonts/resource.tf
old mode 100755
new mode 100644
index aa77478b4..988018ac9
--- a/examples/resources/airbyte_source_google_webfonts/resource.tf
+++ b/examples/resources/airbyte_source_google_webfonts/resource.tf
@@ -4,9 +4,9 @@ resource "airbyte_source_google_webfonts" "my_source_googlewebfonts" {
api_key = "...my_api_key..."
pretty_print = "...my_pretty_print..."
sort = "...my_sort..."
- source_type = "google-webfonts"
}
- name = "Donald Hyatt"
- secret_id = "...my_secret_id..."
- workspace_id = "81d6bb33-cfaa-4348-831b-f407ee4fcf0c"
+ definition_id = "77e51fa7-73fc-4f1a-8306-e082909d97bf"
+ name = "Kerry Reinger"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3671a9ca-1d9c-4174-bee4-145562d27576"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_google_workspace_admin_reports/resource.tf b/examples/resources/airbyte_source_google_workspace_admin_reports/resource.tf
old mode 100755
new mode 100644
index 68d8d1824..e5b9c6b2c
--- a/examples/resources/airbyte_source_google_workspace_admin_reports/resource.tf
+++ b/examples/resources/airbyte_source_google_workspace_admin_reports/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_google_workspace_admin_reports" "my_source_googleworkspaceadminreports" {
configuration = {
credentials_json = "...my_credentials_json..."
- email = "Bridgette_Rohan@gmail.com"
- lookback = 10
- source_type = "google-workspace-admin-reports"
+ email = "Daisha.Halvorson12@gmail.com"
+ lookback = 8
}
- name = "Samantha Huels"
- secret_id = "...my_secret_id..."
- workspace_id = "398a0dc7-6632-44cc-b06c-8ca12d025292"
+ definition_id = "b8adc8fd-2a7f-4940-9ec4-4e216dff8929"
+ name = "Francisco Swaniawski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a00b494f-7d68-4d64-a810-b2959587ed0c"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_greenhouse/resource.tf b/examples/resources/airbyte_source_greenhouse/resource.tf
old mode 100755
new mode 100644
index 6d269f662..3fe567a04
--- a/examples/resources/airbyte_source_greenhouse/resource.tf
+++ b/examples/resources/airbyte_source_greenhouse/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_greenhouse" "my_source_greenhouse" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "greenhouse"
+ api_key = "...my_api_key..."
}
- name = "Patricia Pouros"
- secret_id = "...my_secret_id..."
- workspace_id = "5722dd89-5b8b-4cf2-8db9-59693352f745"
+ definition_id = "47c0f9ce-33c0-4f29-8c11-e4e993d29474"
+ name = "Cassandra Carroll"
+ secret_id = "...my_secret_id..."
+ workspace_id = "54dff6cf-9b79-4e23-a888-b6bde25154a5"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_gridly/resource.tf b/examples/resources/airbyte_source_gridly/resource.tf
old mode 100755
new mode 100644
index 688646912..3650ce611
--- a/examples/resources/airbyte_source_gridly/resource.tf
+++ b/examples/resources/airbyte_source_gridly/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_gridly" "my_source_gridly" {
configuration = {
- api_key = "...my_api_key..."
- grid_id = "...my_grid_id..."
- source_type = "gridly"
+ api_key = "...my_api_key..."
+ grid_id = "...my_grid_id..."
}
- name = "Josephine McCullough"
- secret_id = "...my_secret_id..."
- workspace_id = "d78de3b6-e938-49f5-abb7-f662550a2838"
+ definition_id = "2da80f2b-fa49-4853-a695-0935ad536c50"
+ name = "Megan Kshlerin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e30b46b9-59e4-4e75-8ac0-9227119b95b6"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_harvest/resource.tf b/examples/resources/airbyte_source_harvest/resource.tf
old mode 100755
new mode 100644
index d86bbd45a..fd0088e0b
--- a/examples/resources/airbyte_source_harvest/resource.tf
+++ b/examples/resources/airbyte_source_harvest/resource.tf
@@ -2,18 +2,18 @@ resource "airbyte_source_harvest" "my_source_harvest" {
configuration = {
account_id = "...my_account_id..."
credentials = {
- source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth_ = {
- auth_type = "Client"
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
+ authenticate_via_harvest_o_auth = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
}
}
replication_end_date = "2017-01-25T00:00:00Z"
replication_start_date = "2017-01-25T00:00:00Z"
- source_type = "harvest"
}
- name = "Rodney Orn"
- secret_id = "...my_secret_id..."
- workspace_id = "2315bba6-5016-44e0-af5b-f6ae591bc8bd"
+ definition_id = "bb7037ab-5561-4ce1-bb1c-adaa0e328a3b"
+ name = "Jorge Heathcote"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e04de287-b752-465f-9ff2-deb8cbf2674a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_hubplanner/resource.tf b/examples/resources/airbyte_source_hubplanner/resource.tf
old mode 100755
new mode 100644
index a1b471fa7..6eef15cd9
--- a/examples/resources/airbyte_source_hubplanner/resource.tf
+++ b/examples/resources/airbyte_source_hubplanner/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_hubplanner" "my_source_hubplanner" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "hubplanner"
+ api_key = "...my_api_key..."
}
- name = "Cary Emmerich Sr."
- secret_id = "...my_secret_id..."
- workspace_id = "b63c205f-da84-4077-8a68-a9a35d086b6f"
+ definition_id = "92033b17-bfcc-4526-af10-da401fb0fc52"
+ name = "Gladys Adams"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9994a41e-4a89-485c-b8fa-7d86bdf5bf91"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_hubspot/resource.tf b/examples/resources/airbyte_source_hubspot/resource.tf
old mode 100755
new mode 100644
index 4eaa75fe6..5678e8045
--- a/examples/resources/airbyte_source_hubspot/resource.tf
+++ b/examples/resources/airbyte_source_hubspot/resource.tf
@@ -1,17 +1,16 @@
resource "airbyte_source_hubspot" "my_source_hubspot" {
configuration = {
credentials = {
- source_hubspot_authentication_o_auth = {
- client_id = "123456789000"
- client_secret = "secret"
- credentials_title = "OAuth Credentials"
- refresh_token = "refresh_token"
+ source_hubspot_o_auth = {
+ client_id = "123456789000"
+ client_secret = "secret"
+ refresh_token = "refresh_token"
}
}
- source_type = "hubspot"
- start_date = "2017-01-25T00:00:00Z"
+ start_date = "2017-01-25T00:00:00Z"
}
- name = "Mr. Tomas Wisozk DVM"
- secret_id = "...my_secret_id..."
- workspace_id = "9f443b42-57b9-492c-8dbd-a6a61efa2198"
+ definition_id = "b1210837-28d8-49e3-91e8-68df1f2c5ad8"
+ name = "Amelia Gulgowski II"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3eb240d6-26d4-4887-8caa-f58e0f5c1159"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_insightly/resource.tf b/examples/resources/airbyte_source_insightly/resource.tf
old mode 100755
new mode 100644
index 582ee7125..1f8b482b4
--- a/examples/resources/airbyte_source_insightly/resource.tf
+++ b/examples/resources/airbyte_source_insightly/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_insightly" "my_source_insightly" {
configuration = {
- source_type = "insightly"
- start_date = "2021-03-01T00:00:00Z"
- token = "...my_token..."
+ start_date = "2021-03-01T00:00:00Z"
+ token = "...my_token..."
}
- name = "Dana Lindgren"
- secret_id = "...my_secret_id..."
- workspace_id = "0a9eba47-f7d3-4ef0-8964-0d6a1831c87a"
+ definition_id = "d6014991-0eec-4fc7-b384-ec604057d045"
+ name = "Geneva Bogan"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b5cccbbb-db31-4196-8f99-d67745afb65f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_instagram/resource.tf b/examples/resources/airbyte_source_instagram/resource.tf
old mode 100755
new mode 100644
index 53a8e0460..c582c14a2
--- a/examples/resources/airbyte_source_instagram/resource.tf
+++ b/examples/resources/airbyte_source_instagram/resource.tf
@@ -3,10 +3,10 @@ resource "airbyte_source_instagram" "my_source_instagram" {
access_token = "...my_access_token..."
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- source_type = "instagram"
start_date = "2017-01-25T00:00:00Z"
}
- name = "Mae Hoppe"
- secret_id = "...my_secret_id..."
- workspace_id = "f1ad837a-e80c-41c1-9c95-ba998678fa3f"
+ definition_id = "20bd7a7e-c191-4626-87e6-80e4417c6f4b"
+ name = "Margaret Maggio"
+ secret_id = "...my_secret_id..."
+ workspace_id = "206a4b04-3ef0-49e6-9b75-b726765eab1a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_instatus/resource.tf b/examples/resources/airbyte_source_instatus/resource.tf
old mode 100755
new mode 100644
index 23ad265ef..d6b8b1d0c
--- a/examples/resources/airbyte_source_instatus/resource.tf
+++ b/examples/resources/airbyte_source_instatus/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_instatus" "my_source_instatus" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "instatus"
+ api_key = "...my_api_key..."
}
- name = "Bobbie Johnston"
- secret_id = "...my_secret_id..."
- workspace_id = "1af388ce-0361-4444-8c79-77a0ef2f5360"
+ definition_id = "d842954b-d759-4bdc-8b93-f80b7f557094"
+ name = "Enrique Kovacek"
+ secret_id = "...my_secret_id..."
+ workspace_id = "356d5339-1630-4fd2-b131-d4fbef253f33"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_intercom/resource.tf b/examples/resources/airbyte_source_intercom/resource.tf
old mode 100755
new mode 100644
index 9ebb6836e..f87c48084
--- a/examples/resources/airbyte_source_intercom/resource.tf
+++ b/examples/resources/airbyte_source_intercom/resource.tf
@@ -3,10 +3,10 @@ resource "airbyte_source_intercom" "my_source_intercom" {
access_token = "...my_access_token..."
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- source_type = "intercom"
start_date = "2020-11-16T00:00:00Z"
}
- name = "Darnell Watsica"
- secret_id = "...my_secret_id..."
- workspace_id = "934152ed-7e25-43f4-8157-deaa7170f445"
+ definition_id = "135dc90f-6379-44a9-bd5a-cf56253a66e5"
+ name = "Clint Douglas V"
+ secret_id = "...my_secret_id..."
+ workspace_id = "29314c65-ed70-4eb1-bcb4-fc24002ca0d0"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_ip2whois/resource.tf b/examples/resources/airbyte_source_ip2whois/resource.tf
old mode 100755
new mode 100644
index 0dbb1423d..140ed15d0
--- a/examples/resources/airbyte_source_ip2whois/resource.tf
+++ b/examples/resources/airbyte_source_ip2whois/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_ip2whois" "my_source_ip2whois" {
configuration = {
- api_key = "...my_api_key..."
- domain = "www.facebook.com"
- source_type = "ip2whois"
+ api_key = "...my_api_key..."
+ domain = "www.google.com"
}
- name = "Leland Wisoky"
- secret_id = "...my_secret_id..."
- workspace_id = "7aaf9bba-d185-4fe4-b1d6-bf5c838fbb8c"
+ definition_id = "711f25a2-8dde-404a-9ce3-be57bfa46127"
+ name = "Monica Champlin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5ed08074-e17a-4648-8571-1ab94fe75a51"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_iterable/resource.tf b/examples/resources/airbyte_source_iterable/resource.tf
old mode 100755
new mode 100644
index 6482f729a..197ea0d3e
--- a/examples/resources/airbyte_source_iterable/resource.tf
+++ b/examples/resources/airbyte_source_iterable/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_iterable" "my_source_iterable" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "iterable"
- start_date = "2021-04-01T00:00:00Z"
+ api_key = "...my_api_key..."
+ start_date = "2021-04-01T00:00:00Z"
}
- name = "Archie Jaskolski"
- secret_id = "...my_secret_id..."
- workspace_id = "c4b425e9-9e62-434c-9f7b-79dfeb77a5c3"
+ definition_id = "00977793-827c-406d-986b-4fbde6ae5395"
+ name = "Katherine Bashirian"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d8df8fdd-acae-4826-9af8-b9bb4850d654"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_jira/resource.tf b/examples/resources/airbyte_source_jira/resource.tf
old mode 100755
new mode 100644
index a8ae76693..c62592e94
--- a/examples/resources/airbyte_source_jira/resource.tf
+++ b/examples/resources/airbyte_source_jira/resource.tf
@@ -1,18 +1,22 @@
resource "airbyte_source_jira" "my_source_jira" {
configuration = {
api_token = "...my_api_token..."
- domain = ".jira.com"
- email = "Eldridge_Reichert@hotmail.com"
+ domain = "jira..com"
+ email = "Benton_Tromp@hotmail.com"
enable_experimental_streams = false
expand_issue_changelog = false
+ expand_issue_transition = true
+ issues_stream_expand_with = [
+ "transitions",
+ ]
projects = [
"...",
]
- render_fields = false
- source_type = "jira"
+ render_fields = true
start_date = "2021-03-01T00:00:00Z"
}
- name = "Olive Windler"
- secret_id = "...my_secret_id..."
- workspace_id = "0a54b475-f16f-456d-b85a-3c4ac631b99e"
+ definition_id = "7e778751-26eb-4569-8431-2d5d5e6a2d83"
+ name = "Kenneth Runte"
+ secret_id = "...my_secret_id..."
+ workspace_id = "8dd54122-5651-4393-a1b0-488926ab9cfe"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_k6_cloud/resource.tf b/examples/resources/airbyte_source_k6_cloud/resource.tf
old mode 100755
new mode 100644
index 0e9669348..7ec0bdd8e
--- a/examples/resources/airbyte_source_k6_cloud/resource.tf
+++ b/examples/resources/airbyte_source_k6_cloud/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_k6_cloud" "my_source_k6cloud" {
configuration = {
- api_token = "...my_api_token..."
- source_type = "k6-cloud"
+ api_token = "...my_api_token..."
}
- name = "Ella Runolfsdottir"
- secret_id = "...my_secret_id..."
- workspace_id = "8f9fdb94-10f6-43bb-b817-837b01afdd78"
+ definition_id = "2e85afcc-9acc-46e7-a95c-9a7c9f197511"
+ name = "Franklin D'Amore"
+ secret_id = "...my_secret_id..."
+ workspace_id = "96585095-001a-4ad5-a5f9-cfb0d1e8d3ac"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_klarna/resource.tf b/examples/resources/airbyte_source_klarna/resource.tf
old mode 100755
new mode 100644
index 512bb119f..58676d8f3
--- a/examples/resources/airbyte_source_klarna/resource.tf
+++ b/examples/resources/airbyte_source_klarna/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_source_klarna" "my_source_klarna" {
configuration = {
- password = "...my_password..."
- playground = true
- region = "us"
- source_type = "klarna"
- username = "Chase50"
+ password = "...my_password..."
+ playground = true
+ region = "oc"
+ username = "Lessie_Beatty"
}
- name = "Caleb Rau"
- secret_id = "...my_secret_id..."
- workspace_id = "873f5033-f19d-4bf1-a5ce-4152eab9cd7e"
+ definition_id = "ed1087b9-882d-454c-a598-cc59eb952f06"
+ name = "Carmen Bins"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7fd8f9d1-baac-46e0-9b1e-50c14468d231"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_klaviyo/resource.tf b/examples/resources/airbyte_source_klaviyo/resource.tf
old mode 100755
new mode 100644
index 1367f888c..9f94c8c8e
--- a/examples/resources/airbyte_source_klaviyo/resource.tf
+++ b/examples/resources/airbyte_source_klaviyo/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_klaviyo" "my_source_klaviyo" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "klaviyo"
- start_date = "2017-01-25T00:00:00Z"
+ api_key = "...my_api_key..."
+ start_date = "2017-01-25T00:00:00Z"
}
- name = "Charlotte Muller"
- secret_id = "...my_secret_id..."
- workspace_id = "0e123b78-47ec-459e-9f67-f3c4cce4b6d7"
+ definition_id = "d98f81ed-eee1-4be4-a723-eeaf419bc59e"
+ name = "Joanne Murray"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9e9d149f-3b04-4e32-9c64-9b6bc8e2c7d0"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_kustomer_singer/resource.tf b/examples/resources/airbyte_source_kustomer_singer/resource.tf
old mode 100755
new mode 100644
index c4813b3f9..ae8b59a58
--- a/examples/resources/airbyte_source_kustomer_singer/resource.tf
+++ b/examples/resources/airbyte_source_kustomer_singer/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_kustomer_singer" "my_source_kustomersinger" {
configuration = {
- api_token = "...my_api_token..."
- source_type = "kustomer-singer"
- start_date = "2019-01-01T00:00:00Z"
+ api_token = "...my_api_token..."
+ start_date = "2019-01-01T00:00:00Z"
}
- name = "Bobbie Jacobs"
- secret_id = "...my_secret_id..."
- workspace_id = "3c574750-1357-4e44-b51f-8b084c3197e1"
+ definition_id = "de0f8a2b-57ad-4de2-8e75-111fd0612ffd"
+ name = "Mr. Antonia Yost"
+ secret_id = "...my_secret_id..."
+ workspace_id = "78b38595-7e3c-4921-8c92-84a21155c549"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_kyve/resource.tf b/examples/resources/airbyte_source_kyve/resource.tf
old mode 100755
new mode 100644
index f9f9e84a0..b756af876
--- a/examples/resources/airbyte_source_kyve/resource.tf
+++ b/examples/resources/airbyte_source_kyve/resource.tf
@@ -1,13 +1,13 @@
resource "airbyte_source_kyve" "my_source_kyve" {
configuration = {
- max_pages = 6
- page_size = 2
- pool_ids = "0,1"
- source_type = "kyve"
- start_ids = "0"
- url_base = "https://api.korellia.kyve.network/"
+ max_pages = 0
+ page_size = 0
+ pool_ids = "0"
+ start_ids = "0"
+ url_base = "https://api.beta.kyve.network/"
}
- name = "Gail Homenick"
- secret_id = "...my_secret_id..."
- workspace_id = "94874c2d-5cc4-4972-a33e-66bd8fe5d00b"
+ definition_id = "be9a984e-4b07-4bca-b13e-d5606ac59e7c"
+ name = "Wilbur Turcotte"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b09ffd37-53fe-446a-9403-ba1bd8103cfb"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_launchdarkly/resource.tf b/examples/resources/airbyte_source_launchdarkly/resource.tf
old mode 100755
new mode 100644
index c6a3e6f9a..840c66e28
--- a/examples/resources/airbyte_source_launchdarkly/resource.tf
+++ b/examples/resources/airbyte_source_launchdarkly/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_launchdarkly" "my_source_launchdarkly" {
configuration = {
access_token = "...my_access_token..."
- source_type = "launchdarkly"
}
- name = "Darren Monahan"
- secret_id = "...my_secret_id..."
- workspace_id = "20387320-590c-4cc1-8964-00313b3e5044"
+ definition_id = "422849b5-8575-49fd-b9d7-4aa20ea69f1b"
+ name = "Jodi Marquardt"
+ secret_id = "...my_secret_id..."
+ workspace_id = "dd1b5a02-95b1-497b-bb02-27d625c3155f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_lemlist/resource.tf b/examples/resources/airbyte_source_lemlist/resource.tf
old mode 100755
new mode 100644
index f30eccca4..cf4eb84d0
--- a/examples/resources/airbyte_source_lemlist/resource.tf
+++ b/examples/resources/airbyte_source_lemlist/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_lemlist" "my_source_lemlist" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "lemlist"
+ api_key = "...my_api_key..."
}
- name = "Gene Herman"
- secret_id = "...my_secret_id..."
- workspace_id = "72dc4077-d0cc-43f4-88ef-c15ceb4d6e1e"
+ definition_id = "731c6e6b-c1ca-4f16-aaee-78925477f387"
+ name = "Mr. Clyde Dibbert"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ba4aed29-95c6-463b-ad13-c6e3bbb93bd4"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_lever_hiring/resource.tf b/examples/resources/airbyte_source_lever_hiring/resource.tf
old mode 100755
new mode 100644
index 62eb79581..8ab529a34
--- a/examples/resources/airbyte_source_lever_hiring/resource.tf
+++ b/examples/resources/airbyte_source_lever_hiring/resource.tf
@@ -1,16 +1,15 @@
resource "airbyte_source_lever_hiring" "my_source_leverhiring" {
configuration = {
credentials = {
- source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key_ = {
- api_key = "...my_api_key..."
- auth_type = "Api Key"
+ authenticate_via_lever_api_key = {
+ api_key = "...my_api_key..."
}
}
- environment = "Sandbox"
- source_type = "lever-hiring"
+ environment = "Production"
start_date = "2021-03-01T00:00:00Z"
}
- name = "Donald Wuckert"
- secret_id = "...my_secret_id..."
- workspace_id = "aedf2aca-b58b-4991-8926-ddb589461e74"
+ definition_id = "3d75c669-3a6b-492e-b166-50e4c3120d77"
+ name = "Bill Howell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c796fdac-1f48-4b8f-8670-1054c1db1ce4"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_linkedin_ads/resource.tf b/examples/resources/airbyte_source_linkedin_ads/resource.tf
old mode 100755
new mode 100644
index cf38a588a..e9f6826dc
--- a/examples/resources/airbyte_source_linkedin_ads/resource.tf
+++ b/examples/resources/airbyte_source_linkedin_ads/resource.tf
@@ -1,25 +1,24 @@
resource "airbyte_source_linkedin_ads" "my_source_linkedinads" {
configuration = {
account_ids = [
- 1,
+ 6,
]
ad_analytics_reports = [
{
- name = "Kara Rohan"
- pivot_by = "MEMBER_REGION_V2"
+ name = "Dwayne Zboncak"
+ pivot_by = "IMPRESSION_DEVICE_TYPE"
time_granularity = "MONTHLY"
},
]
credentials = {
- source_linkedin_ads_authentication_access_token = {
+ access_token = {
access_token = "...my_access_token..."
- auth_method = "access_token"
}
}
- source_type = "linkedin-ads"
- start_date = "2021-05-17"
+ start_date = "2021-05-17"
}
- name = "Elsa Adams"
- secret_id = "...my_secret_id..."
- workspace_id = "930b69f7-ac2f-472f-8850-090491160820"
+ definition_id = "4672645c-fb24-449e-af87-64eb4b875ea1"
+ name = "Blake Howell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6c0fac14-03cf-4d91-9cc5-3ae1f1c37b35"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_linkedin_pages/resource.tf b/examples/resources/airbyte_source_linkedin_pages/resource.tf
old mode 100755
new mode 100644
index 60a09bbc6..0acf5a528
--- a/examples/resources/airbyte_source_linkedin_pages/resource.tf
+++ b/examples/resources/airbyte_source_linkedin_pages/resource.tf
@@ -1,15 +1,14 @@
resource "airbyte_source_linkedin_pages" "my_source_linkedinpages" {
configuration = {
credentials = {
- source_linkedin_pages_authentication_access_token = {
+ source_linkedin_pages_access_token = {
access_token = "...my_access_token..."
- auth_method = "access_token"
}
}
- org_id = "123456789"
- source_type = "linkedin-pages"
+ org_id = "123456789"
}
- name = "Tracey Kutch"
- secret_id = "...my_secret_id..."
- workspace_id = "c66183bf-e965-49eb-80ec-16faf75b0b53"
+ definition_id = "0ebb3981-c89f-4963-b1e6-164cc8788ff7"
+ name = "Kayla Haley"
+ secret_id = "...my_secret_id..."
+ workspace_id = "33f7738d-63dc-47b7-b8b1-6c6167f1e8f0"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_linnworks/resource.tf b/examples/resources/airbyte_source_linnworks/resource.tf
old mode 100755
new mode 100644
index 72d488706..b96766fe5
--- a/examples/resources/airbyte_source_linnworks/resource.tf
+++ b/examples/resources/airbyte_source_linnworks/resource.tf
@@ -2,11 +2,11 @@ resource "airbyte_source_linnworks" "my_source_linnworks" {
configuration = {
application_id = "...my_application_id..."
application_secret = "...my_application_secret..."
- source_type = "linnworks"
- start_date = "2022-05-04T07:21:12.859Z"
+ start_date = "2022-09-13T03:04:12.490Z"
token = "...my_token..."
}
- name = "Antonia Muller"
- secret_id = "...my_secret_id..."
- workspace_id = "cbaaf445-2c48-442c-9b2a-d32dafe81a88"
+ definition_id = "2f92210b-5c8f-4204-a6a7-75647eb6babc"
+ name = "Melba McDermott IV"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b2eafdc4-53fb-46a0-992c-447712b4a020"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_lokalise/resource.tf b/examples/resources/airbyte_source_lokalise/resource.tf
old mode 100755
new mode 100644
index f14863c27..d9fff00f6
--- a/examples/resources/airbyte_source_lokalise/resource.tf
+++ b/examples/resources/airbyte_source_lokalise/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_lokalise" "my_source_lokalise" {
configuration = {
- api_key = "...my_api_key..."
- project_id = "...my_project_id..."
- source_type = "lokalise"
+ api_key = "...my_api_key..."
+ project_id = "...my_project_id..."
}
- name = "Bernard Gottlieb"
- secret_id = "...my_secret_id..."
- workspace_id = "573fecd4-7353-4f63-8820-9379aa69cd5f"
+ definition_id = "8830aabe-ffb8-4d97-a510-59b440a5f2f6"
+ name = "Inez Gottlieb"
+ secret_id = "...my_secret_id..."
+ workspace_id = "66849f7b-beaa-4ef5-a404-3cb4c473e8c7"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_mailchimp/resource.tf b/examples/resources/airbyte_source_mailchimp/resource.tf
old mode 100755
new mode 100644
index aa2a83969..9f316d477
--- a/examples/resources/airbyte_source_mailchimp/resource.tf
+++ b/examples/resources/airbyte_source_mailchimp/resource.tf
@@ -2,14 +2,13 @@ resource "airbyte_source_mailchimp" "my_source_mailchimp" {
configuration = {
campaign_id = "...my_campaign_id..."
credentials = {
- source_mailchimp_authentication_api_key = {
- apikey = "...my_apikey..."
- auth_type = "apikey"
+ api_key = {
+ apikey = "...my_apikey..."
}
}
- source_type = "mailchimp"
}
- name = "Benny Williamson"
- secret_id = "...my_secret_id..."
- workspace_id = "da18a782-2bf9-4589-8e68-61adb55f9e5d"
+ definition_id = "bd591517-4a55-43fd-a41d-af7626ef51c5"
+ name = "Lyle Haley"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0c6c0cc9-3e76-4e9f-9ef5-41f06ca13b1e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_mailgun/resource.tf b/examples/resources/airbyte_source_mailgun/resource.tf
old mode 100755
new mode 100644
index 351a2840a..3725e7794
--- a/examples/resources/airbyte_source_mailgun/resource.tf
+++ b/examples/resources/airbyte_source_mailgun/resource.tf
@@ -2,10 +2,10 @@ resource "airbyte_source_mailgun" "my_source_mailgun" {
configuration = {
domain_region = "...my_domain_region..."
private_key = "...my_private_key..."
- source_type = "mailgun"
start_date = "2023-08-01T00:00:00Z"
}
- name = "Sheri Mayert"
- secret_id = "...my_secret_id..."
- workspace_id = "8f7502bf-dc34-4508-81f1-764456379f3f"
+ definition_id = "c1488faa-411d-49d9-a226-9c9d648f0bcc"
+ name = "Ervin Deckow"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5af6ed3c-47c1-4416-8113-c2d3cb5eaa64"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_mailjet_sms/resource.tf b/examples/resources/airbyte_source_mailjet_sms/resource.tf
old mode 100755
new mode 100644
index 0b4b039c5..e474debcd
--- a/examples/resources/airbyte_source_mailjet_sms/resource.tf
+++ b/examples/resources/airbyte_source_mailjet_sms/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_mailjet_sms" "my_source_mailjetsms" {
configuration = {
- end_date = 1666281656
- source_type = "mailjet-sms"
- start_date = 1666261656
- token = "...my_token..."
+ end_date = 1666281656
+ start_date = 1666261656
+ token = "...my_token..."
}
- name = "Dr. Eloise Cronin"
- secret_id = "...my_secret_id..."
- workspace_id = "62657b36-fc6b-49f5-87ce-525c67641a83"
+ definition_id = "6a42dbbb-853e-4c4b-9e6a-18b0d79003de"
+ name = "Gilberto Pagac"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3bfaadd2-9a6d-4ff6-8b6b-f32faf825bea"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_marketo/resource.tf b/examples/resources/airbyte_source_marketo/resource.tf
old mode 100755
new mode 100644
index 52094c4a2..2985974c4
--- a/examples/resources/airbyte_source_marketo/resource.tf
+++ b/examples/resources/airbyte_source_marketo/resource.tf
@@ -3,10 +3,10 @@ resource "airbyte_source_marketo" "my_source_marketo" {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
domain_url = "https://000-AAA-000.mktorest.com"
- source_type = "marketo"
start_date = "2020-09-25T00:00:00Z"
}
- name = "Jerome Berge"
- secret_id = "...my_secret_id..."
- workspace_id = "b4c21ccb-423a-4bcd-891f-aabdd88e71f6"
+ definition_id = "c87aaffe-b9ea-4290-b7e9-f4166b42b69c"
+ name = "Doris Steuber"
+ secret_id = "...my_secret_id..."
+ workspace_id = "bbad3f0b-f8ca-4743-bfb1-506e5d6deb8b"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_metabase/resource.tf b/examples/resources/airbyte_source_metabase/resource.tf
old mode 100755
new mode 100644
index 8e677b990..d6c3af50b
--- a/examples/resources/airbyte_source_metabase/resource.tf
+++ b/examples/resources/airbyte_source_metabase/resource.tf
@@ -3,10 +3,10 @@ resource "airbyte_source_metabase" "my_source_metabase" {
instance_api_url = "https://localhost:3000/api/"
password = "...my_password..."
session_token = "...my_session_token..."
- source_type = "metabase"
- username = "Peyton.Green"
+ username = "Efren_Mante15"
}
- name = "Tammy Sporer"
- secret_id = "...my_secret_id..."
- workspace_id = "71e7fd07-4009-4ef8-929d-e1dd7097b5da"
+ definition_id = "f283fdf1-b362-4a3e-b9ca-cc879ba7ac01"
+ name = "Gail Kirlin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7c271c50-44a2-45a4-b7e4-eabe3a97768e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_microsoft_teams/resource.tf b/examples/resources/airbyte_source_microsoft_teams/resource.tf
old mode 100755
new mode 100644
index c1781a88b..9e87b583c
--- a/examples/resources/airbyte_source_microsoft_teams/resource.tf
+++ b/examples/resources/airbyte_source_microsoft_teams/resource.tf
@@ -1,17 +1,16 @@
resource "airbyte_source_microsoft_teams" "my_source_microsoftteams" {
configuration = {
credentials = {
- source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft = {
- auth_type = "Token"
+ authenticate_via_microsoft = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
tenant_id = "...my_tenant_id..."
}
}
- period = "D7"
- source_type = "microsoft-teams"
+ period = "D7"
}
- name = "Brandy Ryan"
- secret_id = "...my_secret_id..."
- workspace_id = "fa6c78a2-16e1-49ba-beca-6191498140b6"
+ definition_id = "79345d14-4630-4331-8f29-cf10b0742b93"
+ name = "Jesus Marquardt Sr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "1a320cca-d5ad-4c13-b0ef-57488395b5ae"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_mixpanel/resource.tf b/examples/resources/airbyte_source_mixpanel/resource.tf
old mode 100755
new mode 100644
index 0e0998645..5ae012160
--- a/examples/resources/airbyte_source_mixpanel/resource.tf
+++ b/examples/resources/airbyte_source_mixpanel/resource.tf
@@ -1,22 +1,20 @@
resource "airbyte_source_mixpanel" "my_source_mixpanel" {
configuration = {
- attribution_window = 2
+ attribution_window = 0
credentials = {
- source_mixpanel_authentication_wildcard_project_secret = {
- api_secret = "...my_api_secret..."
- option_title = "Project Secret"
+ project_secret = {
+ api_secret = "...my_api_secret..."
}
}
- date_window_size = 10
+ date_window_size = 3
end_date = "2021-11-16"
- project_id = 7
project_timezone = "UTC"
region = "US"
select_properties_by_default = true
- source_type = "mixpanel"
start_date = "2021-11-16"
}
- name = "Donald Ernser"
- secret_id = "...my_secret_id..."
- workspace_id = "f37e4aa8-6855-4596-a732-aa5dcb6682cb"
+ definition_id = "a514955f-a2ea-425a-91d7-622e389cc420"
+ name = "Cecilia Gerlach"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b3299a61-1cc7-4be3-a8ba-7188dc05c92c"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_monday/resource.tf b/examples/resources/airbyte_source_monday/resource.tf
old mode 100755
new mode 100644
index ffbd6efed..4b5c11dc5
--- a/examples/resources/airbyte_source_monday/resource.tf
+++ b/examples/resources/airbyte_source_monday/resource.tf
@@ -1,14 +1,13 @@
resource "airbyte_source_monday" "my_source_monday" {
configuration = {
credentials = {
- source_monday_authorization_method_api_token = {
+ api_token = {
api_token = "...my_api_token..."
- auth_type = "api_token"
}
}
- source_type = "monday"
}
- name = "Shirley Wisoky"
- secret_id = "...my_secret_id..."
- workspace_id = "fd5fb6e9-1b9a-49f7-8846-e2c3309db053"
+ definition_id = "2050fdf2-ba7d-443d-a0d3-384e15ed5352"
+ name = "Stella Lubowitz"
+ secret_id = "...my_secret_id..."
+ workspace_id = "aeabadeb-93c7-4728-b9b6-069b6a28df31"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_mongodb/resource.tf b/examples/resources/airbyte_source_mongodb/resource.tf
deleted file mode 100755
index 6fdbef3dc..000000000
--- a/examples/resources/airbyte_source_mongodb/resource.tf
+++ /dev/null
@@ -1,18 +0,0 @@
-resource "airbyte_source_mongodb" "my_source_mongodb" {
- configuration = {
- auth_source = "admin"
- database = "...my_database..."
- instance_type = {
- source_mongodb_mongo_db_instance_type_mongo_db_atlas = {
- cluster_url = "...my_cluster_url..."
- instance = "atlas"
- }
- }
- password = "...my_password..."
- source_type = "mongodb"
- user = "...my_user..."
- }
- name = "Doreen Mayer"
- secret_id = "...my_secret_id..."
- workspace_id = "5ca006f5-392c-411a-a5a8-bf92f97428ad"
-}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_mongodb_internal_poc/resource.tf b/examples/resources/airbyte_source_mongodb_internal_poc/resource.tf
old mode 100755
new mode 100644
index 600385cb2..3e7176227
--- a/examples/resources/airbyte_source_mongodb_internal_poc/resource.tf
+++ b/examples/resources/airbyte_source_mongodb_internal_poc/resource.tf
@@ -4,10 +4,10 @@ resource "airbyte_source_mongodb_internal_poc" "my_source_mongodbinternalpoc" {
connection_string = "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017"
password = "...my_password..."
replica_set = "...my_replica_set..."
- source_type = "mongodb-internal-poc"
user = "...my_user..."
}
- name = "Eduardo Weissnat"
- secret_id = "...my_secret_id..."
- workspace_id = "f8221125-359d-4983-87f7-a79cd72cd248"
+ definition_id = "6ea9203c-b787-46e7-9a53-1f3b4802a3b9"
+ name = "Hector Kuhic"
+ secret_id = "...my_secret_id..."
+ workspace_id = "76dbe116-c781-416c-b0bf-b32667c47d50"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_mongodb_v2/resource.tf b/examples/resources/airbyte_source_mongodb_v2/resource.tf
new file mode 100644
index 000000000..a541f3992
--- /dev/null
+++ b/examples/resources/airbyte_source_mongodb_v2/resource.tf
@@ -0,0 +1,21 @@
+resource "airbyte_source_mongodb_v2" "my_source_mongodbv2" {
+ configuration = {
+ database_config = {
+ mongo_db_atlas_replica_set = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ auth_source = "admin"
+ connection_string = "mongodb+srv://cluster0.abcd1.mongodb.net/"
+ database = "...my_database..."
+ password = "...my_password..."
+ username = "Curtis38"
+ }
+ }
+ discover_sample_size = 1
+ initial_waiting_seconds = 0
+ queue_size = 5
+ }
+ definition_id = "c03f8392-0634-4c9d-b1c4-26709282f0b3"
+ name = "Nora Waelchi"
+ secret_id = "...my_secret_id..."
+ workspace_id = "729ff502-4b69-40b2-b36f-2f7a3b95d4ab"
+}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_mssql/resource.tf b/examples/resources/airbyte_source_mssql/resource.tf
old mode 100755
new mode 100644
index 215a27ace..7d0f278cd
--- a/examples/resources/airbyte_source_mssql/resource.tf
+++ b/examples/resources/airbyte_source_mssql/resource.tf
@@ -6,30 +6,25 @@ resource "airbyte_source_mssql" "my_source_mssql" {
password = "...my_password..."
port = 1433
replication_method = {
- source_mssql_update_method_read_changes_using_change_data_capture_cdc_ = {
+ read_changes_using_change_data_capture_cdc = {
data_to_sync = "New Changes Only"
- initial_waiting_seconds = 7
- method = "CDC"
- snapshot_isolation = "Snapshot"
+ initial_waiting_seconds = 2
+ snapshot_isolation = "Read Committed"
}
}
schemas = [
"...",
]
- source_type = "mssql"
ssl_method = {
- source_mssql_ssl_method_encrypted_trust_server_certificate_ = {
- ssl_method = "encrypted_trust_server_certificate"
- }
+ source_mssql_encrypted_trust_server_certificate = {}
}
tunnel_method = {
- source_mssql_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_mssql_no_tunnel = {}
}
- username = "Bobbie60"
+ username = "Salvatore_Weissnat66"
}
- name = "Clarence Murazik"
- secret_id = "...my_secret_id..."
- workspace_id = "1ef5725f-1169-4ac1-a41d-8a23c23e34f2"
+ definition_id = "b6ad0e44-a4dc-4970-8078-573a20ac990f"
+ name = "Wm Corkery"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7a67a851-50ea-4861-a0cd-618d74280681"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_my_hours/resource.tf b/examples/resources/airbyte_source_my_hours/resource.tf
old mode 100755
new mode 100644
index f07121f85..19cdd5193
--- a/examples/resources/airbyte_source_my_hours/resource.tf
+++ b/examples/resources/airbyte_source_my_hours/resource.tf
@@ -3,10 +3,10 @@ resource "airbyte_source_my_hours" "my_source_myhours" {
email = "john@doe.com"
logs_batch_size = 30
password = "...my_password..."
- source_type = "my-hours"
- start_date = "2016-01-01"
+ start_date = "%Y-%m-%d"
}
- name = "Elsa Kerluke"
- secret_id = "...my_secret_id..."
- workspace_id = "922151fe-1712-4099-853e-9f543d854439"
+ definition_id = "95261555-3a71-4349-8a3f-9799a12d6e33"
+ name = "Franklin Jerde"
+ secret_id = "...my_secret_id..."
+ workspace_id = "00d47724-56d0-4d26-9914-7bb3566ca647"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_mysql/resource.tf b/examples/resources/airbyte_source_mysql/resource.tf
old mode 100755
new mode 100644
index 06257c92b..6d2e42947
--- a/examples/resources/airbyte_source_mysql/resource.tf
+++ b/examples/resources/airbyte_source_mysql/resource.tf
@@ -6,26 +6,21 @@ resource "airbyte_source_mysql" "my_source_mysql" {
password = "...my_password..."
port = 3306
replication_method = {
- source_mysql_update_method_read_changes_using_binary_log_cdc_ = {
- initial_waiting_seconds = 10
- method = "CDC"
+ read_changes_using_binary_log_cdc = {
+ initial_waiting_seconds = 7
server_time_zone = "...my_server_time_zone..."
}
}
- source_type = "mysql"
ssl_mode = {
- source_mysql_ssl_modes_preferred = {
- mode = "preferred"
- }
+ preferred = {}
}
tunnel_method = {
- source_mysql_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_mysql_no_tunnel = {}
}
- username = "Carley25"
+ username = "Eino_White"
}
- name = "Ruth Goodwin"
- secret_id = "...my_secret_id..."
- workspace_id = "bc154188-c2f5-46e8-9da7-832eabd617c3"
+ definition_id = "aba25784-141a-421c-8938-ad6fcbb78bed"
+ name = "Mr. Ross Cole"
+ secret_id = "...my_secret_id..."
+ workspace_id = "704ae193-8752-47d5-a3ef-7246d0c0b796"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_netsuite/resource.tf b/examples/resources/airbyte_source_netsuite/resource.tf
old mode 100755
new mode 100644
index 7f15f2723..86ddb2eec
--- a/examples/resources/airbyte_source_netsuite/resource.tf
+++ b/examples/resources/airbyte_source_netsuite/resource.tf
@@ -6,13 +6,13 @@ resource "airbyte_source_netsuite" "my_source_netsuite" {
"...",
]
realm = "...my_realm..."
- source_type = "netsuite"
start_datetime = "2017-01-25T00:00:00Z"
token_key = "...my_token_key..."
token_secret = "...my_token_secret..."
- window_in_days = 7
+ window_in_days = 5
}
- name = "Miss Meredith Hand"
- secret_id = "...my_secret_id..."
- workspace_id = "4bf01bad-8706-4d46-882b-fbdc41ff5d4e"
+ definition_id = "b7242137-fe2e-49e2-ac4c-104f1dbe3b1f"
+ name = "Ramona Bahringer"
+ secret_id = "...my_secret_id..."
+ workspace_id = "77573847-65c7-4741-8014-d1f263651b77"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_notion/resource.tf b/examples/resources/airbyte_source_notion/resource.tf
old mode 100755
new mode 100644
index a197d16f1..155b43020
--- a/examples/resources/airbyte_source_notion/resource.tf
+++ b/examples/resources/airbyte_source_notion/resource.tf
@@ -1,15 +1,14 @@
resource "airbyte_source_notion" "my_source_notion" {
configuration = {
credentials = {
- source_notion_authenticate_using_access_token = {
- auth_type = "token"
- token = "...my_token..."
+ source_notion_access_token = {
+ token = "...my_token..."
}
}
- source_type = "notion"
- start_date = "2020-11-16T00:00:00.000Z"
+ start_date = "2020-11-16T00:00:00.000Z"
}
- name = "Francisco Yost"
- secret_id = "...my_secret_id..."
- workspace_id = "cb35d176-38f1-4edb-b835-9ecc5cb860f8"
+ definition_id = "fe0e5e5f-386d-40ac-9af3-c6558d9b03d2"
+ name = "Jeannette Ward"
+ secret_id = "...my_secret_id..."
+ workspace_id = "dbadc477-cb62-4b59-b9f1-ee4249578a5b"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_nytimes/resource.tf b/examples/resources/airbyte_source_nytimes/resource.tf
old mode 100755
new mode 100644
index e6a87e50d..ec0fdbd3a
--- a/examples/resources/airbyte_source_nytimes/resource.tf
+++ b/examples/resources/airbyte_source_nytimes/resource.tf
@@ -1,13 +1,13 @@
resource "airbyte_source_nytimes" "my_source_nytimes" {
configuration = {
- api_key = "...my_api_key..."
- end_date = "1851-01"
- period = "7"
- share_type = "facebook"
- source_type = "nytimes"
- start_date = "2022-08"
+ api_key = "...my_api_key..."
+ end_date = "1851-01"
+ period = "30"
+ share_type = "facebook"
+ start_date = "2022-08"
}
- name = "Mr. Emily Macejkovic"
- secret_id = "...my_secret_id..."
- workspace_id = "4fe44472-97cd-43b1-9d3b-bce247b7684e"
+ definition_id = "83b2c4dd-4d42-4907-b41e-e0bbab0457d9"
+ name = "Sue Durgan"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e6ecd841-e72a-4766-a686-faa512d8044b"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_okta/resource.tf b/examples/resources/airbyte_source_okta/resource.tf
old mode 100755
new mode 100644
index 68b9ac0e9..93779910f
--- a/examples/resources/airbyte_source_okta/resource.tf
+++ b/examples/resources/airbyte_source_okta/resource.tf
@@ -1,16 +1,15 @@
resource "airbyte_source_okta" "my_source_okta" {
configuration = {
credentials = {
- source_okta_authorization_method_api_token = {
+ source_okta_api_token = {
api_token = "...my_api_token..."
- auth_type = "api_token"
}
}
- domain = "...my_domain..."
- source_type = "okta"
- start_date = "2022-07-22T00:00:00Z"
+ domain = "...my_domain..."
+ start_date = "2022-07-22T00:00:00Z"
}
- name = "Mr. Emmett Heidenreich"
- secret_id = "...my_secret_id..."
- workspace_id = "6d71cffb-d0eb-474b-8421-953b44bd3c43"
+ definition_id = "05c5b711-2361-4f26-947b-86cdec1a2bc2"
+ name = "Isaac Bruen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5e3ceb6c-910d-4c95-a96c-b5f3bc4b3253"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_omnisend/resource.tf b/examples/resources/airbyte_source_omnisend/resource.tf
old mode 100755
new mode 100644
index cfbc86e3a..841adc6bd
--- a/examples/resources/airbyte_source_omnisend/resource.tf
+++ b/examples/resources/airbyte_source_omnisend/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_omnisend" "my_source_omnisend" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "omnisend"
+ api_key = "...my_api_key..."
}
- name = "Lynn Miller"
- secret_id = "...my_secret_id..."
- workspace_id = "3e5953c0-0113-4986-baa4-1e6c31cc2f1f"
+ definition_id = "e6bd591e-2544-44d2-a34f-d1d8ea1c7d43"
+ name = "Rachel Ankunding"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c9c1a8da-b7e7-43a5-9718-14e4dc1f633a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_onesignal/resource.tf b/examples/resources/airbyte_source_onesignal/resource.tf
old mode 100755
new mode 100644
index feb38ab1a..6545f9ecf
--- a/examples/resources/airbyte_source_onesignal/resource.tf
+++ b/examples/resources/airbyte_source_onesignal/resource.tf
@@ -8,11 +8,11 @@ resource "airbyte_source_onesignal" "my_source_onesignal" {
},
]
outcome_names = "os__session_duration.count,os__click.count,CustomOutcomeName.sum"
- source_type = "onesignal"
start_date = "2020-11-16T00:00:00Z"
user_auth_key = "...my_user_auth_key..."
}
- name = "Joan Schaefer"
- secret_id = "...my_secret_id..."
- workspace_id = "41ffbe9c-bd79-45ee-a5e0-76cc7abf616e"
+ definition_id = "58a542d5-17fc-488b-8499-8d75efedea33"
+ name = "Krystal Hamill"
+ secret_id = "...my_secret_id..."
+ workspace_id = "15598db9-2c72-4d54-9f53-8928a50561c1"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_oracle/resource.tf b/examples/resources/airbyte_source_oracle/resource.tf
old mode 100755
new mode 100644
index 457d7de4a..2503a56dc
--- a/examples/resources/airbyte_source_oracle/resource.tf
+++ b/examples/resources/airbyte_source_oracle/resource.tf
@@ -1,33 +1,29 @@
resource "airbyte_source_oracle" "my_source_oracle" {
configuration = {
connection_data = {
- source_oracle_connect_by_service_name = {
- connection_type = "service_name"
- service_name = "...my_service_name..."
+ service_name = {
+ service_name = "...my_service_name..."
}
}
encryption = {
- source_oracle_encryption_native_network_encryption_nne_ = {
- encryption_algorithm = "RC4_56"
- encryption_method = "client_nne"
+ native_network_encryption_nne = {
+ encryption_algorithm = "3DES168"
}
}
host = "...my_host..."
jdbc_url_params = "...my_jdbc_url_params..."
password = "...my_password..."
- port = 4
+ port = 8
schemas = [
"...",
]
- source_type = "oracle"
tunnel_method = {
- source_oracle_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_oracle_no_tunnel = {}
}
- username = "Oswaldo42"
+ username = "Hellen.Champlin"
}
- name = "Cheryl McKenzie"
- secret_id = "...my_secret_id..."
- workspace_id = "b90f2e09-d19d-42fc-af9e-2e105944b935"
+ definition_id = "a1ad7b3d-761e-429e-b26a-e07d2b59ab56"
+ name = "Jake Pfeffer"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c000ccde-ed12-4bd5-ab73-d022a608737f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_orb/resource.tf b/examples/resources/airbyte_source_orb/resource.tf
old mode 100755
new mode 100644
index 5a3a772ce..6c9e1c8fb
--- a/examples/resources/airbyte_source_orb/resource.tf
+++ b/examples/resources/airbyte_source_orb/resource.tf
@@ -1,19 +1,19 @@
resource "airbyte_source_orb" "my_source_orb" {
configuration = {
api_key = "...my_api_key..."
- lookback_window_days = 9
+ lookback_window_days = 6
numeric_event_properties_keys = [
"...",
]
- plan_id = "...my_plan_id..."
- source_type = "orb"
- start_date = "2022-03-01T00:00:00Z"
+ plan_id = "...my_plan_id..."
+ start_date = "2022-03-01T00:00:00Z"
string_event_properties_keys = [
"...",
]
subscription_usage_grouping_key = "...my_subscription_usage_grouping_key..."
}
- name = "Josephine Kilback"
- secret_id = "...my_secret_id..."
- workspace_id = "2f90849d-6aed-44ae-8b75-37cd9222c9ff"
+ definition_id = "f9cf17c9-c1c9-4188-a190-0dfc35041fcd"
+ name = "Shaun Schimmel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "262ef24d-9236-49b1-bf5a-7ba288f10a06"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_orbit/resource.tf b/examples/resources/airbyte_source_orbit/resource.tf
old mode 100755
new mode 100644
index 0564ed716..c147ea87a
--- a/examples/resources/airbyte_source_orbit/resource.tf
+++ b/examples/resources/airbyte_source_orbit/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_orbit" "my_source_orbit" {
configuration = {
- api_token = "...my_api_token..."
- source_type = "orbit"
- start_date = "...my_start_date..."
- workspace = "...my_workspace..."
+ api_token = "...my_api_token..."
+ start_date = "...my_start_date..."
+ workspace = "...my_workspace..."
}
- name = "Jo Greenholt V"
- secret_id = "...my_secret_id..."
- workspace_id = "abfa2e76-1f0c-4a4d-856e-f1031e6899f0"
+ definition_id = "35ff19f3-8868-45d8-941e-7db0723f9473"
+ name = "Salvatore Schmitt DVM"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e5b71225-778f-47a0-a3c1-e08d80f694c4"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_outbrain_amplify/resource.tf b/examples/resources/airbyte_source_outbrain_amplify/resource.tf
old mode 100755
new mode 100644
index 3d11879df..d9a7987e4
--- a/examples/resources/airbyte_source_outbrain_amplify/resource.tf
+++ b/examples/resources/airbyte_source_outbrain_amplify/resource.tf
@@ -1,18 +1,17 @@
resource "airbyte_source_outbrain_amplify" "my_source_outbrainamplify" {
configuration = {
credentials = {
- source_outbrain_amplify_authentication_method_access_token = {
+ source_outbrain_amplify_access_token = {
access_token = "...my_access_token..."
- type = "access_token"
}
}
end_date = "...my_end_date..."
- geo_location_breakdown = "subregion"
- report_granularity = "daily"
- source_type = "outbrain-amplify"
+ geo_location_breakdown = "region"
+ report_granularity = "monthly"
start_date = "...my_start_date..."
}
- name = "Cynthia Boyer"
- secret_id = "...my_secret_id..."
- workspace_id = "2cd55cc0-584a-4184-976d-971fc820c65b"
+ definition_id = "9d0f84cc-bad7-41da-b038-014a124b6e7b"
+ name = "Donna Leannon"
+ secret_id = "...my_secret_id..."
+ workspace_id = "37b0c992-762a-438a-a73d-79a85cb72465"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_outreach/resource.tf b/examples/resources/airbyte_source_outreach/resource.tf
old mode 100755
new mode 100644
index 379b400ed..2917fa4c3
--- a/examples/resources/airbyte_source_outreach/resource.tf
+++ b/examples/resources/airbyte_source_outreach/resource.tf
@@ -4,10 +4,10 @@ resource "airbyte_source_outreach" "my_source_outreach" {
client_secret = "...my_client_secret..."
redirect_uri = "...my_redirect_uri..."
refresh_token = "...my_refresh_token..."
- source_type = "outreach"
start_date = "2020-11-16T00:00:00Z"
}
- name = "Kim Kirlin"
- secret_id = "...my_secret_id..."
- workspace_id = "8e0cc885-187e-44de-84af-28c5dddb46aa"
+ definition_id = "18021619-8723-463e-89a2-aae62d9d7702"
+ name = "Tanya Hand"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6995c576-52df-4199-822b-3629976b741d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_paypal_transaction/resource.tf b/examples/resources/airbyte_source_paypal_transaction/resource.tf
old mode 100755
new mode 100644
index 9f3533fc0..dc6de4cad
--- a/examples/resources/airbyte_source_paypal_transaction/resource.tf
+++ b/examples/resources/airbyte_source_paypal_transaction/resource.tf
@@ -4,10 +4,11 @@ resource "airbyte_source_paypal_transaction" "my_source_paypaltransaction" {
client_secret = "...my_client_secret..."
is_sandbox = false
refresh_token = "...my_refresh_token..."
- source_type = "paypal-transaction"
start_date = "2021-06-11T23:59:59+00:00"
+ time_window = 7
}
- name = "Ernestine Little"
- secret_id = "...my_secret_id..."
- workspace_id = "da013191-1296-4466-85c1-d81f29042f56"
+ definition_id = "dd349afd-0cd9-45bc-be33-42dc402aef61"
+ name = "Edna Hamill"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9f94f985-aa22-4e67-bc77-be4e4244a41c"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_paystack/resource.tf b/examples/resources/airbyte_source_paystack/resource.tf
old mode 100755
new mode 100644
index dbca94488..a28198b71
--- a/examples/resources/airbyte_source_paystack/resource.tf
+++ b/examples/resources/airbyte_source_paystack/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_paystack" "my_source_paystack" {
configuration = {
- lookback_window_days = 6
+ lookback_window_days = 9
secret_key = "...my_secret_key..."
- source_type = "paystack"
start_date = "2017-01-25T00:00:00Z"
}
- name = "Dr. Boyd Wilderman"
- secret_id = "...my_secret_id..."
- workspace_id = "2216cbe0-71bc-4163-a279-a3b084da9925"
+ definition_id = "5b489304-8e9c-41af-9961-b1c883a57271"
+ name = "Kari Lemke"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b6433cb8-2b32-4ad0-bfd9-a9d8ba9b0df8"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_pendo/resource.tf b/examples/resources/airbyte_source_pendo/resource.tf
old mode 100755
new mode 100644
index d4cc1314e..cb933c4bf
--- a/examples/resources/airbyte_source_pendo/resource.tf
+++ b/examples/resources/airbyte_source_pendo/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_pendo" "my_source_pendo" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "pendo"
+ api_key = "...my_api_key..."
}
- name = "Estelle Bechtelar"
- secret_id = "...my_secret_id..."
- workspace_id = "40847a74-2d84-4496-8bde-ecf6b99bc635"
+ definition_id = "6503c474-3ee7-49bd-93e2-04659bbdc56c"
+ name = "Mandy Conroy"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0259c6b1-3998-4d3f-8543-0ae066d4a91b"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_persistiq/resource.tf b/examples/resources/airbyte_source_persistiq/resource.tf
old mode 100755
new mode 100644
index 54a89064e..0e9158ba6
--- a/examples/resources/airbyte_source_persistiq/resource.tf
+++ b/examples/resources/airbyte_source_persistiq/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_persistiq" "my_source_persistiq" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "persistiq"
+ api_key = "...my_api_key..."
}
- name = "Nicole Vandervort"
- secret_id = "...my_secret_id..."
- workspace_id = "df55c294-c060-4b06-a128-7764eef6d0c6"
+ definition_id = "bbc35ba8-92b6-4d58-85ab-7b9331a5ddaf"
+ name = "Taylor Keeling"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5ec8caac-d8d2-4abf-9c0f-33811ddad7d7"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_pexels_api/resource.tf b/examples/resources/airbyte_source_pexels_api/resource.tf
old mode 100755
new mode 100644
index 18312ed96..58122aa07
--- a/examples/resources/airbyte_source_pexels_api/resource.tf
+++ b/examples/resources/airbyte_source_pexels_api/resource.tf
@@ -2,13 +2,13 @@ resource "airbyte_source_pexels_api" "my_source_pexelsapi" {
configuration = {
api_key = "...my_api_key..."
color = "orange"
- locale = "en-US"
+ locale = "pt-BR"
orientation = "landscape"
- query = "oceans"
+ query = "people"
size = "small"
- source_type = "pexels-api"
}
- name = "Arnold Dooley"
- secret_id = "...my_secret_id..."
- workspace_id = "63457150-9a8e-4870-93c5-a1f9c242c7b6"
+ definition_id = "f68e00dc-dadd-4479-a116-8b4fa7262d2a"
+ name = "Brandy Weimann"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6dd11df0-9849-4375-b622-7890d41f1391"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_pinterest/resource.tf b/examples/resources/airbyte_source_pinterest/resource.tf
old mode 100755
new mode 100644
index 76a69a918..92dce58a3
--- a/examples/resources/airbyte_source_pinterest/resource.tf
+++ b/examples/resources/airbyte_source_pinterest/resource.tf
@@ -1,18 +1,37 @@
resource "airbyte_source_pinterest" "my_source_pinterest" {
configuration = {
credentials = {
- source_pinterest_authorization_method_access_token = {
- access_token = "...my_access_token..."
- auth_method = "access_token"
+ source_pinterest_o_auth2_0 = {
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
}
}
- source_type = "pinterest"
- start_date = "2022-07-28"
+ custom_reports = [
+ {
+ attribution_types = [
+ "HOUSEHOLD",
+ ]
+ click_window_days = "30"
+ columns = [
+ "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT",
+ ]
+ conversion_report_time = "TIME_OF_AD_ACTION"
+ engagement_window_days = "7"
+ granularity = "MONTH"
+ level = "CAMPAIGN"
+ name = "Ms. Edgar Halvorson"
+ start_date = "2022-07-28"
+ view_window_days = "0"
+ },
+ ]
+ start_date = "2022-07-28"
status = [
"ACTIVE",
]
}
- name = "Nathan Bauch"
- secret_id = "...my_secret_id..."
- workspace_id = "3df5b671-9890-4f42-a4bb-438d85b26059"
+ definition_id = "66a5ec46-f2bc-4e2e-b7bb-ccef588ac548"
+ name = "Lamar Lakin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a9dbf52c-7929-43e2-8aa8-1903348b38fe"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_pipedrive/resource.tf b/examples/resources/airbyte_source_pipedrive/resource.tf
old mode 100755
new mode 100644
index 7ca558813..5ae7c709d
--- a/examples/resources/airbyte_source_pipedrive/resource.tf
+++ b/examples/resources/airbyte_source_pipedrive/resource.tf
@@ -1,13 +1,10 @@
resource "airbyte_source_pipedrive" "my_source_pipedrive" {
configuration = {
- authorization = {
- api_token = "...my_api_token..."
- auth_type = "Token"
- }
- replication_start_date = "2017-01-25T00:00:00Z"
- source_type = "pipedrive"
+ api_token = "...my_api_token..."
+ replication_start_date = "2017-01-25 00:00:00Z"
}
- name = "Rhonda Hammes"
- secret_id = "...my_secret_id..."
- workspace_id = "c2059c9c-3f56-47e0-a252-765b1d62fcda"
+ definition_id = "3b520112-5b29-4252-a784-d2d0f1707475"
+ name = "Sean Swaniawski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "49780ba1-d6a2-48c6-aefe-59b72db22407"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_pocket/resource.tf b/examples/resources/airbyte_source_pocket/resource.tf
old mode 100755
new mode 100644
index 48fd04c0a..98f609d60
--- a/examples/resources/airbyte_source_pocket/resource.tf
+++ b/examples/resources/airbyte_source_pocket/resource.tf
@@ -8,12 +8,12 @@ resource "airbyte_source_pocket" "my_source_pocket" {
favorite = true
search = "...my_search..."
since = "2022-10-20 14:14:14"
- sort = "site"
- source_type = "pocket"
+ sort = "newest"
state = "unread"
tag = "...my_tag..."
}
- name = "Christina Bode"
- secret_id = "...my_secret_id..."
- workspace_id = "e2239e8f-25cd-40d1-9d95-9f439e39266c"
+ definition_id = "da763315-0acf-4ec2-81f7-3646e1c87958"
+ name = "Brandi Hane"
+ secret_id = "...my_secret_id..."
+ workspace_id = "82553101-4017-4845-aa4c-1173de2c277a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_pokeapi/resource.tf b/examples/resources/airbyte_source_pokeapi/resource.tf
old mode 100755
new mode 100644
index f8a819bee..818f2e763
--- a/examples/resources/airbyte_source_pokeapi/resource.tf
+++ b/examples/resources/airbyte_source_pokeapi/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_pokeapi" "my_source_pokeapi" {
configuration = {
- pokemon_name = "snorlax"
- source_type = "pokeapi"
+ pokemon_name = "luxray"
}
- name = "Jeremiah Hahn"
- secret_id = "...my_secret_id..."
- workspace_id = "aa2b2411-3695-4d1e-a698-fcc4596217c2"
+ definition_id = "e2388fd0-120f-462c-91a2-676b4d9282ad"
+ name = "Ramona Stiedemann"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d5253fa0-2ef0-408f-918d-81572f724d1e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_polygon_stock_api/resource.tf b/examples/resources/airbyte_source_polygon_stock_api/resource.tf
old mode 100755
new mode 100644
index 5bdd339ea..930d82917
--- a/examples/resources/airbyte_source_polygon_stock_api/resource.tf
+++ b/examples/resources/airbyte_source_polygon_stock_api/resource.tf
@@ -1,17 +1,17 @@
resource "airbyte_source_polygon_stock_api" "my_source_polygonstockapi" {
configuration = {
- adjusted = "false"
+ adjusted = "true"
api_key = "...my_api_key..."
end_date = "2020-10-14"
- limit = 100
+ limit = 120
multiplier = 1
- sort = "asc"
- source_type = "polygon-stock-api"
+ sort = "desc"
start_date = "2020-10-14"
- stocks_ticker = "IBM"
+ stocks_ticker = "MSFT"
timespan = "day"
}
- name = "Mary Fisher"
- secret_id = "...my_secret_id..."
- workspace_id = "fb5971e9-8190-4557-b89c-edbac7fda395"
+ definition_id = "15bf9f13-70c2-48b2-b8d2-5e4ee4a51abe"
+ name = "Antoinette Rempel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e8da5f86-7ba5-4cf8-9b48-a2cc4047b120"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_postgres/resource.tf b/examples/resources/airbyte_source_postgres/resource.tf
old mode 100755
new mode 100644
index 2bc80bb59..370f2631f
--- a/examples/resources/airbyte_source_postgres/resource.tf
+++ b/examples/resources/airbyte_source_postgres/resource.tf
@@ -6,27 +6,23 @@ resource "airbyte_source_postgres" "my_source_postgres" {
password = "...my_password..."
port = 5432
replication_method = {
- source_postgres_update_method_detect_changes_with_xmin_system_column = {
- method = "Xmin"
- }
+ detect_changes_with_xmin_system_column = {}
}
schemas = [
"...",
]
- source_type = "postgres"
ssl_mode = {
- source_postgres_ssl_modes_allow = {
- mode = "allow"
+ source_postgres_allow = {
+ additional_properties = "{ \"see\": \"documentation\" }"
}
}
tunnel_method = {
- source_postgres_ssh_tunnel_method_no_tunnel = {
- tunnel_method = "NO_TUNNEL"
- }
+ source_postgres_no_tunnel = {}
}
- username = "Edwardo.Streich"
+ username = "Dagmar_Towne8"
}
- name = "Roosevelt Cummings"
- secret_id = "...my_secret_id..."
- workspace_id = "480632b9-954b-46fa-a206-369828553cb1"
+ definition_id = "558e983f-33bb-4c2f-8e75-b95ee5dd11c7"
+ name = "Brandi Gerhold"
+ secret_id = "...my_secret_id..."
+ workspace_id = "aa4d1c74-fcd7-4d93-9b8b-6b2c0920aa8b"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_posthog/resource.tf b/examples/resources/airbyte_source_posthog/resource.tf
old mode 100755
new mode 100644
index 84b31ca82..fd6b92db4
--- a/examples/resources/airbyte_source_posthog/resource.tf
+++ b/examples/resources/airbyte_source_posthog/resource.tf
@@ -2,11 +2,11 @@ resource "airbyte_source_posthog" "my_source_posthog" {
configuration = {
api_key = "...my_api_key..."
base_url = "https://posthog.example.com"
- events_time_step = 30
- source_type = "posthog"
+ events_time_step = 5
start_date = "2021-01-01T00:00:00Z"
}
- name = "Terence Wisozk"
- secret_id = "...my_secret_id..."
- workspace_id = "21ec2053-b749-4366-ac8e-e0f2bf19588d"
+ definition_id = "07521b21-ea9b-4c9d-9c88-f1ee12f8a7db"
+ name = "Daisy Ledner"
+ secret_id = "...my_secret_id..."
+ workspace_id = "41266a87-d389-4094-afa6-7bbea9f5a35d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_postmarkapp/resource.tf b/examples/resources/airbyte_source_postmarkapp/resource.tf
old mode 100755
new mode 100644
index 2ddf60220..979e9868d
--- a/examples/resources/airbyte_source_postmarkapp/resource.tf
+++ b/examples/resources/airbyte_source_postmarkapp/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_postmarkapp" "my_source_postmarkapp" {
configuration = {
- source_type = "postmarkapp"
x_postmark_account_token = "...my_x_postmark_account_token..."
x_postmark_server_token = "...my_x_postmark_server_token..."
}
- name = "Mr. Sharon Swift"
- secret_id = "...my_secret_id..."
- workspace_id = "3deba297-be3e-490b-840d-f868fd52405c"
+ definition_id = "1bd0fb63-21f6-4b4c-a647-2a5f8aec8fed"
+ name = "Felix Wisoky"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5367bfee-523e-436b-b4e8-f7b837d76b02"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_prestashop/resource.tf b/examples/resources/airbyte_source_prestashop/resource.tf
old mode 100755
new mode 100644
index d880d5bf5..f5ae3f980
--- a/examples/resources/airbyte_source_prestashop/resource.tf
+++ b/examples/resources/airbyte_source_prestashop/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_prestashop" "my_source_prestashop" {
configuration = {
- access_key = "...my_access_key..."
- source_type = "prestashop"
- start_date = "2022-01-01"
- url = "...my_url..."
+ access_key = "...my_access_key..."
+ start_date = "2022-01-01"
+ url = "...my_url..."
}
- name = "Evelyn Stracke"
- secret_id = "...my_secret_id..."
- workspace_id = "2f4f127f-b0e0-4bf1-b821-7978d0acca77"
+ definition_id = "d797c2fd-0239-4507-97b2-06b8fda8b48b"
+ name = "Dr. Jeffery Wuckert"
+ secret_id = "...my_secret_id..."
+ workspace_id = "631ebcaf-aa2e-4e7a-9e0c-b6197095b91e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_punk_api/resource.tf b/examples/resources/airbyte_source_punk_api/resource.tf
old mode 100755
new mode 100644
index c2c285f0b..05c5ccf9c
--- a/examples/resources/airbyte_source_punk_api/resource.tf
+++ b/examples/resources/airbyte_source_punk_api/resource.tf
@@ -2,10 +2,10 @@ resource "airbyte_source_punk_api" "my_source_punkapi" {
configuration = {
brewed_after = "MM-YYYY"
brewed_before = "MM-YYYY"
- id = 22
- source_type = "punk-api"
+ id = 1
}
- name = "Darnell Turcotte"
- secret_id = "...my_secret_id..."
- workspace_id = "540ef53a-34a1-4b8f-a997-31adc05d85ae"
+ definition_id = "0c173d4d-6113-43dd-b2a9-5937ced0062e"
+ name = "Shelia Hettinger"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4e78152c-bd26-46e4-812d-05e7f58d4a06"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_pypi/resource.tf b/examples/resources/airbyte_source_pypi/resource.tf
old mode 100755
new mode 100644
index 05cdc7664..12eeac323
--- a/examples/resources/airbyte_source_pypi/resource.tf
+++ b/examples/resources/airbyte_source_pypi/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_pypi" "my_source_pypi" {
configuration = {
project_name = "sampleproject"
- source_type = "pypi"
version = "1.2.0"
}
- name = "Antonia Wintheiser"
- secret_id = "...my_secret_id..."
- workspace_id = "0fb38742-90d3-4365-a1ec-a16ef89451bd"
+ definition_id = "25cbff5b-31f2-4b93-84d3-ebf32902de61"
+ name = "Ann Blanda"
+ secret_id = "...my_secret_id..."
+ workspace_id = "882924ee-80aa-4298-8d84-713ebef014dd"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_qualaroo/resource.tf b/examples/resources/airbyte_source_qualaroo/resource.tf
old mode 100755
new mode 100644
index b83094854..265a41bcc
--- a/examples/resources/airbyte_source_qualaroo/resource.tf
+++ b/examples/resources/airbyte_source_qualaroo/resource.tf
@@ -1,14 +1,14 @@
resource "airbyte_source_qualaroo" "my_source_qualaroo" {
configuration = {
- key = "...my_key..."
- source_type = "qualaroo"
- start_date = "2021-03-01T00:00:00.000Z"
+ key = "...my_key..."
+ start_date = "2021-03-01T00:00:00.000Z"
survey_ids = [
"...",
]
token = "...my_token..."
}
- name = "Sue Thompson"
- secret_id = "...my_secret_id..."
- workspace_id = "b518c4da-1fad-4355-92f0-6d4e5b72f0f5"
+ definition_id = "9af7c7e9-c462-409e-a52c-707cb05c4a8d"
+ name = "Cheryl Schmitt"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4658e520-f854-4a56-b309-cc0ee4bba7fa"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_quickbooks/resource.tf b/examples/resources/airbyte_source_quickbooks/resource.tf
old mode 100755
new mode 100644
index 0a783e7b6..f7eb4b23c
--- a/examples/resources/airbyte_source_quickbooks/resource.tf
+++ b/examples/resources/airbyte_source_quickbooks/resource.tf
@@ -1,21 +1,20 @@
resource "airbyte_source_quickbooks" "my_source_quickbooks" {
configuration = {
credentials = {
- source_quickbooks_authorization_method_o_auth2_0 = {
+ source_quickbooks_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_type = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
realm_id = "...my_realm_id..."
refresh_token = "...my_refresh_token..."
- token_expiry_date = "2022-06-15T23:02:57.447Z"
+ token_expiry_date = "2020-06-15T02:42:19.793Z"
}
}
- sandbox = false
- source_type = "quickbooks"
- start_date = "2021-03-20T00:00:00Z"
+ sandbox = true
+ start_date = "2021-03-20T00:00:00Z"
}
- name = "William Gottlieb"
- secret_id = "...my_secret_id..."
- workspace_id = "e00a1d6e-b943-4464-9d03-084fbba5ccef"
+ definition_id = "054daa84-a4e2-48fe-a10a-8a64b77a4fe6"
+ name = "Patricia Dickens"
+ secret_id = "...my_secret_id..."
+ workspace_id = "88c95001-e515-4b2e-b405-22a67dad65e8"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_railz/resource.tf b/examples/resources/airbyte_source_railz/resource.tf
old mode 100755
new mode 100644
index bf3d3dabd..013a60f7d
--- a/examples/resources/airbyte_source_railz/resource.tf
+++ b/examples/resources/airbyte_source_railz/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_railz" "my_source_railz" {
configuration = {
- client_id = "...my_client_id..."
- secret_key = "...my_secret_key..."
- source_type = "railz"
- start_date = "...my_start_date..."
+ client_id = "...my_client_id..."
+ secret_key = "...my_secret_key..."
+ start_date = "...my_start_date..."
}
- name = "Clyde Schmeler Jr."
- secret_id = "...my_secret_id..."
- workspace_id = "fe51e528-a45a-4c82-b85f-8bc2caba8da4"
+ definition_id = "ae1d217c-0fcb-4e7d-ad34-33ea862799ca"
+ name = "Alvin Roob"
+ secret_id = "...my_secret_id..."
+ workspace_id = "833469d3-410e-4395-a0aa-c55dc9d09788"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_recharge/resource.tf b/examples/resources/airbyte_source_recharge/resource.tf
old mode 100755
new mode 100644
index fceba1a03..8432a3ecb
--- a/examples/resources/airbyte_source_recharge/resource.tf
+++ b/examples/resources/airbyte_source_recharge/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_recharge" "my_source_recharge" {
configuration = {
access_token = "...my_access_token..."
- source_type = "recharge"
start_date = "2021-05-14T00:00:00Z"
}
- name = "Angel Stokes"
- secret_id = "...my_secret_id..."
- workspace_id = "7ff4711a-a1bc-474b-86ce-cc74f77b4848"
+ definition_id = "427992f6-5a71-405f-ae57-0ad372ede129"
+ name = "Hugo Hagenes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "1410fd6e-7ec4-4881-ab0c-62b8975147c3"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_recreation/resource.tf b/examples/resources/airbyte_source_recreation/resource.tf
old mode 100755
new mode 100644
index 90e04ebbc..5075fa346
--- a/examples/resources/airbyte_source_recreation/resource.tf
+++ b/examples/resources/airbyte_source_recreation/resource.tf
@@ -2,9 +2,9 @@ resource "airbyte_source_recreation" "my_source_recreation" {
configuration = {
apikey = "...my_apikey..."
query_campsites = "...my_query_campsites..."
- source_type = "recreation"
}
- name = "Taylor Kertzmann"
- secret_id = "...my_secret_id..."
- workspace_id = "f0441d2c-3b80-4809-8373-e060459bebba"
+ definition_id = "e6c8bd1c-ccad-43b1-8406-5293193648ca"
+ name = "Naomi Dietrich"
+ secret_id = "...my_secret_id..."
+ workspace_id = "8652384b-db82-41f9-88ef-a40dc207c50e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_recruitee/resource.tf b/examples/resources/airbyte_source_recruitee/resource.tf
old mode 100755
new mode 100644
index 6efcdbe0e..badf3f069
--- a/examples/resources/airbyte_source_recruitee/resource.tf
+++ b/examples/resources/airbyte_source_recruitee/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_recruitee" "my_source_recruitee" {
configuration = {
- api_key = "...my_api_key..."
- company_id = 9
- source_type = "recruitee"
+ api_key = "...my_api_key..."
+ company_id = 4
}
- name = "Mrs. Tina White"
- secret_id = "...my_secret_id..."
- workspace_id = "6bcf1525-58da-4a95-be6c-d02756c354aa"
+ definition_id = "f1211e1f-cb26-4b90-8c0d-f941919892a2"
+ name = "Mrs. Sherri Rosenbaum"
+ secret_id = "...my_secret_id..."
+ workspace_id = "af7bc34c-463b-4838-9c5f-976535f73a45"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_recurly/resource.tf b/examples/resources/airbyte_source_recurly/resource.tf
old mode 100755
new mode 100644
index f3ce9d851..7be7a2849
--- a/examples/resources/airbyte_source_recurly/resource.tf
+++ b/examples/resources/airbyte_source_recurly/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_recurly" "my_source_recurly" {
configuration = {
- api_key = "...my_api_key..."
- begin_time = "2021-12-01T00:00:00"
- end_time = "2021-12-01T00:00:00"
- source_type = "recurly"
+ api_key = "...my_api_key..."
+ begin_time = "2021-12-01T00:00:00"
+ end_time = "2021-12-01T00:00:00"
}
- name = "Josephine Dibbert"
- secret_id = "...my_secret_id..."
- workspace_id = "7e1763c5-208c-423e-9802-d82f0d45eb4a"
+ definition_id = "535fff5d-1d34-4f0c-8e54-86a3a161dc53"
+ name = "Mrs. Glen Gottlieb"
+ secret_id = "...my_secret_id..."
+ workspace_id = "acb8b41d-5bf9-44a0-9397-d3dfd90aff66"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_redshift/resource.tf b/examples/resources/airbyte_source_redshift/resource.tf
old mode 100755
new mode 100644
index 8db6f068a..4260d4771
--- a/examples/resources/airbyte_source_redshift/resource.tf
+++ b/examples/resources/airbyte_source_redshift/resource.tf
@@ -8,10 +8,10 @@ resource "airbyte_source_redshift" "my_source_redshift" {
schemas = [
"...",
]
- source_type = "redshift"
- username = "Nelda.Jaskolski"
+ username = "Elton_Morissette"
}
- name = "Clay Hintz"
- secret_id = "...my_secret_id..."
- workspace_id = "c18edc7f-787e-432e-84b3-d3ed0c5670ef"
+ definition_id = "b974a7d8-001c-4be4-b7da-a2d7b021550a"
+ name = "Jake Ondricka"
+ secret_id = "...my_secret_id..."
+ workspace_id = "f01cf56e-e294-4adb-85bd-340789cf0b8d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_retently/resource.tf b/examples/resources/airbyte_source_retently/resource.tf
old mode 100755
new mode 100644
index 611cccff5..87aeec578
--- a/examples/resources/airbyte_source_retently/resource.tf
+++ b/examples/resources/airbyte_source_retently/resource.tf
@@ -1,16 +1,16 @@
resource "airbyte_source_retently" "my_source_retently" {
configuration = {
credentials = {
- source_retently_authentication_mechanism_authenticate_via_retently_o_auth_ = {
- auth_type = "Client"
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
+ authenticate_via_retently_o_auth = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
}
}
- source_type = "retently"
}
- name = "Kelly Pfeffer"
- secret_id = "...my_secret_id..."
- workspace_id = "c9f1cc50-3f6c-439b-8d0a-6290f957f385"
+ definition_id = "2c041244-3656-49fd-a4cd-2bcf08a635d7"
+ name = "Dave Schinner"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6ceccfae-93f7-4f0f-8c4b-4f8d4f6833e1"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_rki_covid/resource.tf b/examples/resources/airbyte_source_rki_covid/resource.tf
old mode 100755
new mode 100644
index e33e2377d..6fc21e95b
--- a/examples/resources/airbyte_source_rki_covid/resource.tf
+++ b/examples/resources/airbyte_source_rki_covid/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_rki_covid" "my_source_rkicovid" {
configuration = {
- source_type = "rki-covid"
- start_date = "...my_start_date..."
+ start_date = "...my_start_date..."
}
- name = "Penny Morissette"
- secret_id = "...my_secret_id..."
- workspace_id = "7ef807aa-e03f-433c-a79f-b9de4032ba26"
+ definition_id = "f3303ab0-45c8-491f-a9c8-dcb6cc1cd73d"
+ name = "Leticia Zieme Sr."
+ secret_id = "...my_secret_id..."
+ workspace_id = "36d5989e-7dba-4ce4-805a-6307276c58b5"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_rss/resource.tf b/examples/resources/airbyte_source_rss/resource.tf
old mode 100755
new mode 100644
index 55fce02f1..89bc65fde
--- a/examples/resources/airbyte_source_rss/resource.tf
+++ b/examples/resources/airbyte_source_rss/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_rss" "my_source_rss" {
configuration = {
- source_type = "rss"
- url = "...my_url..."
+ url = "...my_url..."
}
- name = "Gustavo Donnelly"
- secret_id = "...my_secret_id..."
- workspace_id = "ba9216bc-b415-4835-8736-41723133edc0"
+ definition_id = "da21f739-86a7-41e9-92c2-b81056bc977a"
+ name = "Alison Wunsch"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ff8dd835-d804-427d-a3a4-e1d8c723c8e5"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_s3/resource.tf b/examples/resources/airbyte_source_s3/resource.tf
old mode 100755
new mode 100644
index c1afe122c..d6bf9f30a
--- a/examples/resources/airbyte_source_s3/resource.tf
+++ b/examples/resources/airbyte_source_s3/resource.tf
@@ -4,11 +4,9 @@ resource "airbyte_source_s3" "my_source_s3" {
aws_secret_access_key = "...my_aws_secret_access_key..."
bucket = "...my_bucket..."
dataset = "...my_dataset..."
- endpoint = "...my_endpoint..."
+ endpoint = "https://my-s3-endpoint.com"
format = {
- source_s3_file_format_avro = {
- filetype = "avro"
- }
+ avro = {}
}
path_pattern = "**"
provider = {
@@ -19,17 +17,14 @@ resource "airbyte_source_s3" "my_source_s3" {
path_prefix = "...my_path_prefix..."
start_date = "2021-01-01T00:00:00Z"
}
- schema = "{\"column_1\": \"number\", \"column_2\": \"string\", \"column_3\": \"array\", \"column_4\": \"object\", \"column_5\": \"boolean\"}"
- source_type = "s3"
- start_date = "2021-01-01T00:00:00.000000Z"
+ schema = "{\"column_1\": \"number\", \"column_2\": \"string\", \"column_3\": \"array\", \"column_4\": \"object\", \"column_5\": \"boolean\"}"
+ start_date = "2021-01-01T00:00:00.000000Z"
streams = [
{
- days_to_sync_if_history_is_full = 1
- file_type = "...my_file_type..."
+ days_to_sync_if_history_is_full = 3
format = {
- source_s3_file_based_stream_config_format_avro_format = {
- double_as_string = true
- filetype = "avro"
+ source_s3_avro_format = {
+ double_as_string = false
}
}
globs = [
@@ -37,14 +32,15 @@ resource "airbyte_source_s3" "my_source_s3" {
]
input_schema = "...my_input_schema..."
legacy_prefix = "...my_legacy_prefix..."
- name = "Flora Rempel"
+ name = "Tyler Grimes"
primary_key = "...my_primary_key..."
schemaless = false
validation_policy = "Skip Record"
},
]
}
- name = "Jacqueline Kiehn"
- secret_id = "...my_secret_id..."
- workspace_id = "2c22c553-5049-45c5-9bb3-c57c1e4981e8"
+ definition_id = "5b5a324c-6128-4aab-bad0-730782c3e822"
+ name = "Mr. Phillip Hermann DVM"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3e25c699-48d0-4388-851e-c06fd3b8cc64"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_salesforce/resource.tf b/examples/resources/airbyte_source_salesforce/resource.tf
old mode 100755
new mode 100644
index b29e447e6..07ce503fa
--- a/examples/resources/airbyte_source_salesforce/resource.tf
+++ b/examples/resources/airbyte_source_salesforce/resource.tf
@@ -1,21 +1,20 @@
resource "airbyte_source_salesforce" "my_source_salesforce" {
configuration = {
- auth_type = "Client"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- force_use_bulk_api = true
+ force_use_bulk_api = false
is_sandbox = false
refresh_token = "...my_refresh_token..."
- source_type = "salesforce"
start_date = "2021-07-25"
streams_criteria = [
{
- criteria = "not contains"
+ criteria = "ends not with"
value = "...my_value..."
},
]
}
- name = "Gregg Boyer Sr."
- secret_id = "...my_secret_id..."
- workspace_id = "ebde64bf-cc54-469d-8015-dfa796206bef"
+ definition_id = "3692db06-d3b4-499d-8bda-e34afcb06318"
+ name = "Ms. Donna Krajcik"
+ secret_id = "...my_secret_id..."
+ workspace_id = "44d2b896-5caa-4bab-ae9d-6378e7243c02"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_salesloft/resource.tf b/examples/resources/airbyte_source_salesloft/resource.tf
old mode 100755
new mode 100644
index 6d96366c6..f8084ac32
--- a/examples/resources/airbyte_source_salesloft/resource.tf
+++ b/examples/resources/airbyte_source_salesloft/resource.tf
@@ -1,15 +1,14 @@
resource "airbyte_source_salesloft" "my_source_salesloft" {
configuration = {
credentials = {
- source_salesloft_credentials_authenticate_via_api_key = {
- api_key = "...my_api_key..."
- auth_type = "api_key"
+ authenticate_via_api_key = {
+ api_key = "...my_api_key..."
}
}
- source_type = "salesloft"
- start_date = "2020-11-16T00:00:00Z"
+ start_date = "2020-11-16T00:00:00Z"
}
- name = "Lynda Dicki"
- secret_id = "...my_secret_id..."
- workspace_id = "2c1aa010-e9aa-4c2e-9135-586d18f9f97a"
+ definition_id = "c073abf4-dfeb-4d41-8e5a-603e6b3fca03"
+ name = "Terrance Corwin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "14510264-179a-4403-81bb-87b13a43b1ea"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_sap_fieldglass/resource.tf b/examples/resources/airbyte_source_sap_fieldglass/resource.tf
old mode 100755
new mode 100644
index 3b2ad185a..415bb9c6b
--- a/examples/resources/airbyte_source_sap_fieldglass/resource.tf
+++ b/examples/resources/airbyte_source_sap_fieldglass/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_sap_fieldglass" "my_source_sapfieldglass" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "sap-fieldglass"
+ api_key = "...my_api_key..."
}
- name = "Juana Williamson"
- secret_id = "...my_secret_id..."
- workspace_id = "2bf7d67c-a84a-4d99-b41d-61243531870c"
+ definition_id = "d703a4ee-b23f-4e55-b942-b58b6d0d2093"
+ name = "Krystal Krajcik"
+ secret_id = "...my_secret_id..."
+ workspace_id = "8d8619ec-3981-4178-ae44-e5272c20971d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_secoda/resource.tf b/examples/resources/airbyte_source_secoda/resource.tf
old mode 100755
new mode 100644
index f66c05253..38c383e42
--- a/examples/resources/airbyte_source_secoda/resource.tf
+++ b/examples/resources/airbyte_source_secoda/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_secoda" "my_source_secoda" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "secoda"
+ api_key = "...my_api_key..."
}
- name = "Brett Leannon I"
- secret_id = "...my_secret_id..."
- workspace_id = "ad421bd4-3d1f-40cb-8a00-03eb22d9b3a7"
+ definition_id = "544a65a7-d2b4-4609-94ec-6467c968cce9"
+ name = "Edna Mitchell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "8a35db32-f900-4f8c-be73-78a587702297"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_sendgrid/resource.tf b/examples/resources/airbyte_source_sendgrid/resource.tf
old mode 100755
new mode 100644
index 3a300d8e5..6a73ac2a6
--- a/examples/resources/airbyte_source_sendgrid/resource.tf
+++ b/examples/resources/airbyte_source_sendgrid/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_sendgrid" "my_source_sendgrid" {
configuration = {
- apikey = "...my_apikey..."
- source_type = "sendgrid"
- start_time = "2020-01-01T01:01:01Z"
+ apikey = "...my_apikey..."
+ start_time = "2020-01-01T01:01:01Z"
}
- name = "Shari Pfannerstill"
- secret_id = "...my_secret_id..."
- workspace_id = "41c57d1f-edc2-4050-938d-c3ce185472f9"
+ definition_id = "37ec3d2a-b419-48d2-afe5-e34c931e7a72"
+ name = "Toby McGlynn"
+ secret_id = "...my_secret_id..."
+ workspace_id = "22c4d080-cde0-439d-95e8-c5778ddd1091"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_sendinblue/resource.tf b/examples/resources/airbyte_source_sendinblue/resource.tf
old mode 100755
new mode 100644
index 13be2f4c8..5635c7ec4
--- a/examples/resources/airbyte_source_sendinblue/resource.tf
+++ b/examples/resources/airbyte_source_sendinblue/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_sendinblue" "my_source_sendinblue" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "sendinblue"
+ api_key = "...my_api_key..."
}
- name = "Terence Kassulke III"
- secret_id = "...my_secret_id..."
- workspace_id = "6a8be344-4eac-48b3-a287-5c6c1fe606d0"
+ definition_id = "0de87dfe-701e-4dbd-8d10-cf57eb672b8a"
+ name = "Derek Heller"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3fb2a63d-a091-47a6-951f-ac3e8ec69bab"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_senseforce/resource.tf b/examples/resources/airbyte_source_senseforce/resource.tf
old mode 100755
new mode 100644
index 11adf0994..df0edb96d
--- a/examples/resources/airbyte_source_senseforce/resource.tf
+++ b/examples/resources/airbyte_source_senseforce/resource.tf
@@ -3,11 +3,11 @@ resource "airbyte_source_senseforce" "my_source_senseforce" {
access_token = "...my_access_token..."
backend_url = "https://galaxyapi.senseforce.io"
dataset_id = "8f418098-ca28-4df5-9498-0df9fe78eda7"
- slice_range = 10
- source_type = "senseforce"
+ slice_range = 180
start_date = "2017-01-25"
}
- name = "Rodolfo Langworth"
- secret_id = "...my_secret_id..."
- workspace_id = "e50c1666-1a1d-4913-aa7e-8d53213f3f65"
+ definition_id = "974cd0d5-39af-4231-9a6f-8898d74d7cd0"
+ name = "Lillie Anderson"
+ secret_id = "...my_secret_id..."
+ workspace_id = "3c633751-f6c5-444c-a0e7-3f23dc46e62d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_sentry/resource.tf b/examples/resources/airbyte_source_sentry/resource.tf
old mode 100755
new mode 100644
index 9620c2f19..78265953a
--- a/examples/resources/airbyte_source_sentry/resource.tf
+++ b/examples/resources/airbyte_source_sentry/resource.tf
@@ -4,12 +4,12 @@ resource "airbyte_source_sentry" "my_source_sentry" {
discover_fields = [
"{ \"see\": \"documentation\" }",
]
- hostname = "muted-ingredient.biz"
+ hostname = "impressionable-honesty.org"
organization = "...my_organization..."
project = "...my_project..."
- source_type = "sentry"
}
- name = "Krystal Quitzon"
- secret_id = "...my_secret_id..."
- workspace_id = "4c59f0a5-6ceb-4cad-a29c-a79181c95671"
+ definition_id = "72778d5d-b92d-416e-9dcb-06fc1f7a171f"
+ name = "Brooke Breitenberg"
+ secret_id = "...my_secret_id..."
+ workspace_id = "bfddb09b-9a90-43f6-8eb4-a54b7cf533c5"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_sftp/resource.tf b/examples/resources/airbyte_source_sftp/resource.tf
old mode 100755
new mode 100644
index c0b68b795..6a3ba7476
--- a/examples/resources/airbyte_source_sftp/resource.tf
+++ b/examples/resources/airbyte_source_sftp/resource.tf
@@ -1,20 +1,19 @@
resource "airbyte_source_sftp" "my_source_sftp" {
configuration = {
credentials = {
- source_sftp_authentication_wildcard_password_authentication = {
- auth_method = "SSH_PASSWORD_AUTH"
+ source_sftp_password_authentication = {
auth_user_password = "...my_auth_user_password..."
}
}
file_pattern = "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`"
file_types = "csv,json"
folder_path = "/logs/2022"
- host = "www.host.com"
+ host = "192.0.2.1"
port = 22
- source_type = "sftp"
user = "...my_user..."
}
- name = "Miss Tommy Emard"
- secret_id = "...my_secret_id..."
- workspace_id = "665163a3-6385-412a-b252-1b9f2e072467"
+ definition_id = "8a56e1f7-b10c-46dd-9e62-eb5fcf365dcc"
+ name = "Rogelio Schoen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e41cbe1d-2ecd-4015-81d5-2f6c56d3cf89"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_sftp_bulk/resource.tf b/examples/resources/airbyte_source_sftp_bulk/resource.tf
old mode 100755
new mode 100644
index 64ae1edc4..7a9857a8a
--- a/examples/resources/airbyte_source_sftp_bulk/resource.tf
+++ b/examples/resources/airbyte_source_sftp_bulk/resource.tf
@@ -2,19 +2,19 @@ resource "airbyte_source_sftp_bulk" "my_source_sftpbulk" {
configuration = {
file_most_recent = false
file_pattern = "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`"
- file_type = "json"
+ file_type = "csv"
folder_path = "/logs/2022"
host = "192.0.2.1"
password = "...my_password..."
port = 22
private_key = "...my_private_key..."
separator = ","
- source_type = "sftp-bulk"
start_date = "2017-01-25T00:00:00Z"
stream_name = "ftp_contacts"
- username = "Pearline_Bailey"
+ username = "Serena.Beer65"
}
- name = "Wm Bartoletti"
- secret_id = "...my_secret_id..."
- workspace_id = "50edf22a-94d2-40ec-90ea-41d1f465e851"
+ definition_id = "6ecf0509-1d90-48d9-9001-753384297337"
+ name = "Dr. Jasmine Grimes"
+ secret_id = "...my_secret_id..."
+ workspace_id = "9291353f-9549-4bcc-b4d3-89bbf5d24f5b"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_shopify/resource.tf b/examples/resources/airbyte_source_shopify/resource.tf
old mode 100755
new mode 100644
index d986b5622..731389083
--- a/examples/resources/airbyte_source_shopify/resource.tf
+++ b/examples/resources/airbyte_source_shopify/resource.tf
@@ -1,16 +1,15 @@
resource "airbyte_source_shopify" "my_source_shopify" {
configuration = {
credentials = {
- source_shopify_shopify_authorization_method_api_password = {
+ api_password = {
api_password = "...my_api_password..."
- auth_method = "api_password"
}
}
- shop = "my-store"
- source_type = "shopify"
- start_date = "2022-01-02"
+ shop = "my-store"
+ start_date = "2022-08-02"
}
- name = "Randal Kris"
- secret_id = "...my_secret_id..."
- workspace_id = "df54fdd5-ea95-4433-98da-fb42a8d63388"
+ definition_id = "4e1dc4a0-1d44-4fb9-b610-a4d0de91eaa4"
+ name = "Clinton Baumbach"
+ secret_id = "...my_secret_id..."
+ workspace_id = "cb870eb9-8050-4c39-a745-0657bfd1cb4d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_shortio/resource.tf b/examples/resources/airbyte_source_shortio/resource.tf
old mode 100755
new mode 100644
index bc9768ca7..1cdb6a301
--- a/examples/resources/airbyte_source_shortio/resource.tf
+++ b/examples/resources/airbyte_source_shortio/resource.tf
@@ -1,11 +1,11 @@
resource "airbyte_source_shortio" "my_source_shortio" {
configuration = {
- domain_id = "...my_domain_id..."
- secret_key = "...my_secret_key..."
- source_type = "shortio"
- start_date = "2023-07-30T03:43:59.244Z"
+ domain_id = "...my_domain_id..."
+ secret_key = "...my_secret_key..."
+ start_date = "2023-07-30T03:43:59.244Z"
}
- name = "Troy Streich I"
- secret_id = "...my_secret_id..."
- workspace_id = "9ea5f9b1-8a24-44fd-a190-39dacd38ed0d"
+ definition_id = "b2aae6c2-0ac9-4c19-9b3e-1c883c55acce"
+ name = "Bethany Donnelly"
+ secret_id = "...my_secret_id..."
+ workspace_id = "29a15c36-062a-463f-9716-d2b265f2af56"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_slack/resource.tf b/examples/resources/airbyte_source_slack/resource.tf
old mode 100755
new mode 100644
index 19d0d7616..51f535f8a
--- a/examples/resources/airbyte_source_slack/resource.tf
+++ b/examples/resources/airbyte_source_slack/resource.tf
@@ -4,17 +4,16 @@ resource "airbyte_source_slack" "my_source_slack" {
"...",
]
credentials = {
- source_slack_authentication_mechanism_api_token = {
- api_token = "...my_api_token..."
- option_title = "API Token Credentials"
+ source_slack_api_token = {
+ api_token = "...my_api_token..."
}
}
- join_channels = false
- lookback_window = 7
- source_type = "slack"
+ join_channels = true
+ lookback_window = 14
start_date = "2017-01-25T00:00:00Z"
}
- name = "Dr. Jamie Wintheiser"
- secret_id = "...my_secret_id..."
- workspace_id = "af15920c-90d1-4b49-81f2-bd89c8a32639"
+ definition_id = "dd581ac6-4878-476f-8ad6-15bcace687b3"
+ name = "Ms. Marian Bergstrom"
+ secret_id = "...my_secret_id..."
+ workspace_id = "986a7b02-fd25-4c77-a7b3-6354281d3e7f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_smaily/resource.tf b/examples/resources/airbyte_source_smaily/resource.tf
old mode 100755
new mode 100644
index bdf367829..a09b05d15
--- a/examples/resources/airbyte_source_smaily/resource.tf
+++ b/examples/resources/airbyte_source_smaily/resource.tf
@@ -3,9 +3,9 @@ resource "airbyte_source_smaily" "my_source_smaily" {
api_password = "...my_api_password..."
api_subdomain = "...my_api_subdomain..."
api_username = "...my_api_username..."
- source_type = "smaily"
}
- name = "Donnie Hauck"
- secret_id = "...my_secret_id..."
- workspace_id = "b6902b88-1a94-4f64-b664-a8f0af8c691d"
+ definition_id = "0bc649fe-5b08-4c82-9c40-ca1ab7663971"
+ name = "Ebony Carroll"
+ secret_id = "...my_secret_id..."
+ workspace_id = "331df025-a154-4586-87cd-fb558f87809d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_smartengage/resource.tf b/examples/resources/airbyte_source_smartengage/resource.tf
old mode 100755
new mode 100644
index 3d73a1f37..660ef99cb
--- a/examples/resources/airbyte_source_smartengage/resource.tf
+++ b/examples/resources/airbyte_source_smartengage/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_smartengage" "my_source_smartengage" {
configuration = {
- api_key = "...my_api_key..."
- source_type = "smartengage"
+ api_key = "...my_api_key..."
}
- name = "Carmen Crist"
- secret_id = "...my_secret_id..."
- workspace_id = "fbaf9476-a2ae-48dc-850c-8a3512c73784"
+ definition_id = "3d1fcf2b-6755-4110-90ec-6c18f2017e88"
+ name = "Neil Pagac"
+ secret_id = "...my_secret_id..."
+ workspace_id = "64f95e84-efb6-4a93-9326-1882dc6ea377"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_smartsheets/resource.tf b/examples/resources/airbyte_source_smartsheets/resource.tf
old mode 100755
new mode 100644
index addba7c24..3640e3f3f
--- a/examples/resources/airbyte_source_smartsheets/resource.tf
+++ b/examples/resources/airbyte_source_smartsheets/resource.tf
@@ -1,19 +1,18 @@
resource "airbyte_source_smartsheets" "my_source_smartsheets" {
configuration = {
credentials = {
- source_smartsheets_authorization_method_api_access_token = {
+ api_access_token = {
access_token = "...my_access_token..."
- auth_type = "access_token"
}
}
metadata_fields = [
- "row_access_level",
+ "row_number",
]
- source_type = "smartsheets"
spreadsheet_id = "...my_spreadsheet_id..."
- start_datetime = "2000-01-01T13:00:00-07:00"
+ start_datetime = "2000-01-01T13:00:00"
}
- name = "Joann Bechtelar Jr."
- secret_id = "...my_secret_id..."
- workspace_id = "e966ec73-6d43-4194-b98c-783c92398ed3"
+ definition_id = "a6744848-ac2b-404b-aae9-e175304065f6"
+ name = "Tara King"
+ secret_id = "...my_secret_id..."
+ workspace_id = "901f87c9-df1a-4f8f-9013-d5d0cf403b28"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_snapchat_marketing/resource.tf b/examples/resources/airbyte_source_snapchat_marketing/resource.tf
old mode 100755
new mode 100644
index ed5f2884d..03f04b1c1
--- a/examples/resources/airbyte_source_snapchat_marketing/resource.tf
+++ b/examples/resources/airbyte_source_snapchat_marketing/resource.tf
@@ -4,10 +4,10 @@ resource "airbyte_source_snapchat_marketing" "my_source_snapchatmarketing" {
client_secret = "...my_client_secret..."
end_date = "2022-01-30"
refresh_token = "...my_refresh_token..."
- source_type = "snapchat-marketing"
start_date = "2022-01-01"
}
- name = "Chelsea Ortiz"
- secret_id = "...my_secret_id..."
- workspace_id = "5ca8649a-70cf-4d5d-a989-b7206451077d"
+ definition_id = "8a6950f0-007e-4330-87d9-5358a56819d2"
+ name = "Rudy Toy"
+ secret_id = "...my_secret_id..."
+ workspace_id = "1d7e3d24-dfd3-4d51-a342-f997d059d38a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_snowflake/resource.tf b/examples/resources/airbyte_source_snowflake/resource.tf
old mode 100755
new mode 100644
index 15f3db7ca..abb56e616
--- a/examples/resources/airbyte_source_snowflake/resource.tf
+++ b/examples/resources/airbyte_source_snowflake/resource.tf
@@ -1,9 +1,8 @@
resource "airbyte_source_snowflake" "my_source_snowflake" {
configuration = {
credentials = {
- source_snowflake_authorization_method_o_auth2_0 = {
+ source_snowflake_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_type = "OAuth"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
@@ -14,10 +13,10 @@ resource "airbyte_source_snowflake" "my_source_snowflake" {
jdbc_url_params = "...my_jdbc_url_params..."
role = "AIRBYTE_ROLE"
schema = "AIRBYTE_SCHEMA"
- source_type = "snowflake"
warehouse = "AIRBYTE_WAREHOUSE"
}
- name = "Katrina Tillman"
- secret_id = "...my_secret_id..."
- workspace_id = "3d492ed1-4b8a-42c1-9545-45e955dcc185"
+ definition_id = "2e5fcf99-c418-476f-a0cb-c1b99ee1e960"
+ name = "Mrs. Jeanette Howell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "0d51b311-4e9e-4d57-941c-3612b0e8c8cf"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_sonar_cloud/resource.tf b/examples/resources/airbyte_source_sonar_cloud/resource.tf
old mode 100755
new mode 100644
index 0de15f144..b9eb05ed6
--- a/examples/resources/airbyte_source_sonar_cloud/resource.tf
+++ b/examples/resources/airbyte_source_sonar_cloud/resource.tf
@@ -5,11 +5,11 @@ resource "airbyte_source_sonar_cloud" "my_source_sonarcloud" {
]
end_date = "YYYY-MM-DD"
organization = "airbyte"
- source_type = "sonar-cloud"
start_date = "YYYY-MM-DD"
user_token = "...my_user_token..."
}
- name = "Mildred Rosenbaum"
- secret_id = "...my_secret_id..."
- workspace_id = "43ad2daa-784a-4ba3-9230-edf73811a115"
+ definition_id = "d259943d-fa52-4a9e-875a-bffba2c1e7b6"
+ name = "Jose Lindgren"
+ secret_id = "...my_secret_id..."
+ workspace_id = "d761f19b-60aa-4080-8c97-1e60235dc09f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_spacex_api/resource.tf b/examples/resources/airbyte_source_spacex_api/resource.tf
old mode 100755
new mode 100644
index 8824f13ee..d1578363e
--- a/examples/resources/airbyte_source_spacex_api/resource.tf
+++ b/examples/resources/airbyte_source_spacex_api/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_spacex_api" "my_source_spacexapi" {
configuration = {
- id = "382bd7ed-5650-4762-9c58-f4d7396564c2"
- options = "...my_options..."
- source_type = "spacex-api"
+ id = "adad73b7-9d20-4b48-acfd-c6fb504a12b7"
+ options = "...my_options..."
}
- name = "Lee Batz Jr."
- secret_id = "...my_secret_id..."
- workspace_id = "a961d24a-7dbb-48f5-b2d8-92cf7812cb51"
+ definition_id = "723cbf02-23ae-4822-a532-7d8cbc0547dc"
+ name = "Chad Swaniawski"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7628c478-1358-42a6-b537-d9dfc7f45856"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_square/resource.tf b/examples/resources/airbyte_source_square/resource.tf
old mode 100755
new mode 100644
index 61c86f490..884624831
--- a/examples/resources/airbyte_source_square/resource.tf
+++ b/examples/resources/airbyte_source_square/resource.tf
@@ -1,17 +1,16 @@
resource "airbyte_source_square" "my_source_square" {
configuration = {
credentials = {
- source_square_authentication_api_key = {
- api_key = "...my_api_key..."
- auth_type = "API Key"
+ source_square_api_key = {
+ api_key = "...my_api_key..."
}
}
- include_deleted_objects = true
+ include_deleted_objects = false
is_sandbox = false
- source_type = "square"
- start_date = "2022-02-01"
+ start_date = "2022-11-22"
}
- name = "Miss Bruce Gibson"
- secret_id = "...my_secret_id..."
- workspace_id = "548f88f8-f1bf-40bc-8e1f-206d5d831d00"
+ definition_id = "55c9f06b-5482-4c9e-b770-03d0337f10a6"
+ name = "Connie Homenick"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4ee32ccb-4d52-4da6-928f-2436a122e394"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_strava/resource.tf b/examples/resources/airbyte_source_strava/resource.tf
old mode 100755
new mode 100644
index 9f3ccc201..9d85e070e
--- a/examples/resources/airbyte_source_strava/resource.tf
+++ b/examples/resources/airbyte_source_strava/resource.tf
@@ -1,14 +1,13 @@
resource "airbyte_source_strava" "my_source_strava" {
configuration = {
athlete_id = 17831421
- auth_type = "Client"
client_id = "12345"
client_secret = "fc6243f283e51f6ca989aab298b17da125496f50"
refresh_token = "fc6243f283e51f6ca989aab298b17da125496f50"
- source_type = "strava"
start_date = "2021-03-01T00:00:00Z"
}
- name = "Jeffrey Wintheiser"
- secret_id = "...my_secret_id..."
- workspace_id = "06673f3a-681c-4576-8dce-742409a215e0"
+ definition_id = "198a6bf6-f1cb-4db3-9a96-cd0e48f1e4b3"
+ name = "Elaine Johnson"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6ca0b303-cf01-47cd-9783-63f1be7e9b4a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_stripe/resource.tf b/examples/resources/airbyte_source_stripe/resource.tf
old mode 100755
new mode 100644
index 2d22ec1b8..efe07b15a
--- a/examples/resources/airbyte_source_stripe/resource.tf
+++ b/examples/resources/airbyte_source_stripe/resource.tf
@@ -1,13 +1,15 @@
resource "airbyte_source_stripe" "my_source_stripe" {
configuration = {
account_id = "...my_account_id..."
+ call_rate_limit = 100
client_secret = "...my_client_secret..."
- lookback_window_days = 5
- slice_range = 10
- source_type = "stripe"
+ lookback_window_days = 10
+ num_workers = 3
+ slice_range = 360
start_date = "2017-01-25T00:00:00Z"
}
- name = "Seth Nitzsche"
- secret_id = "...my_secret_id..."
- workspace_id = "63e3af3d-d9dd-4a33-9cd6-3483e4a7a98e"
+ definition_id = "46c36bb7-337b-4f0b-aca9-3a8ae78e1e53"
+ name = "Marcella Muller"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b6d5dc1e-250f-480f-bc59-5c3777bccfe7"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_survey_sparrow/resource.tf b/examples/resources/airbyte_source_survey_sparrow/resource.tf
old mode 100755
new mode 100644
index 7c5f90d2e..7317a4aca
--- a/examples/resources/airbyte_source_survey_sparrow/resource.tf
+++ b/examples/resources/airbyte_source_survey_sparrow/resource.tf
@@ -2,16 +2,14 @@ resource "airbyte_source_survey_sparrow" "my_source_surveysparrow" {
configuration = {
access_token = "...my_access_token..."
region = {
- source_survey_sparrow_base_url_eu_based_account = {
- url_base = "https://eu-api.surveysparrow.com/v3"
- }
+ eu_based_account = {}
}
- source_type = "survey-sparrow"
survey_id = [
"{ \"see\": \"documentation\" }",
]
}
- name = "Hugo Kovacek"
- secret_id = "...my_secret_id..."
- workspace_id = "f02449d8-6f4b-4b20-be5d-911cbfe749ca"
+ definition_id = "4b91c615-d128-4040-ba03-eb3c0afcc3c8"
+ name = "Gerard Kerluke"
+ secret_id = "...my_secret_id..."
+ workspace_id = "fbbc8e3e-7db5-4a3e-846f-c1e0fa91f7ef"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_surveymonkey/resource.tf b/examples/resources/airbyte_source_surveymonkey/resource.tf
old mode 100755
new mode 100644
index a6b11652b..70c38aa2a
--- a/examples/resources/airbyte_source_surveymonkey/resource.tf
+++ b/examples/resources/airbyte_source_surveymonkey/resource.tf
@@ -2,18 +2,17 @@ resource "airbyte_source_surveymonkey" "my_source_surveymonkey" {
configuration = {
credentials = {
access_token = "...my_access_token..."
- auth_method = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
}
- origin = "USA"
- source_type = "surveymonkey"
- start_date = "2021-01-01T00:00:00Z"
+ origin = "USA"
+ start_date = "2021-01-01T00:00:00Z"
survey_ids = [
"...",
]
}
- name = "Pearl Trantow"
- secret_id = "...my_secret_id..."
- workspace_id = "b8955d41-3e13-4a48-a310-907bd354c092"
+ definition_id = "147e293c-7a4b-42d7-bbc2-90ef00ad5372"
+ name = "Renee Howe"
+ secret_id = "...my_secret_id..."
+ workspace_id = "50a2e7cf-e6f3-44ac-865c-56f5fa6778e4"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_tempo/resource.tf b/examples/resources/airbyte_source_tempo/resource.tf
old mode 100755
new mode 100644
index 03e5b4828..e9b47bc9f
--- a/examples/resources/airbyte_source_tempo/resource.tf
+++ b/examples/resources/airbyte_source_tempo/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_tempo" "my_source_tempo" {
configuration = {
- api_token = "...my_api_token..."
- source_type = "tempo"
+ api_token = "...my_api_token..."
}
- name = "Edwin Haley"
- secret_id = "...my_secret_id..."
- workspace_id = "7f69e2c9-e6d1-40e9-9b3a-d4c6b03108d9"
+ definition_id = "5f462d7c-8446-4197-ba1b-271a5b009f29"
+ name = "Karen Kemmer"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6dac9959-2aae-4b21-989b-3db558d4aa17"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_the_guardian_api/resource.tf b/examples/resources/airbyte_source_the_guardian_api/resource.tf
old mode 100755
new mode 100644
index 678d3a191..ea09139ab
--- a/examples/resources/airbyte_source_the_guardian_api/resource.tf
+++ b/examples/resources/airbyte_source_the_guardian_api/resource.tf
@@ -1,14 +1,14 @@
resource "airbyte_source_the_guardian_api" "my_source_theguardianapi" {
configuration = {
- api_key = "...my_api_key..."
- end_date = "YYYY-MM-DD"
- query = "political"
- section = "media"
- source_type = "the-guardian-api"
- start_date = "YYYY-MM-DD"
- tag = "environment/recycling"
+ api_key = "...my_api_key..."
+ end_date = "YYYY-MM-DD"
+ query = "environment AND political"
+ section = "media"
+ start_date = "YYYY-MM-DD"
+ tag = "environment/energyefficiency"
}
- name = "Pauline Kozey IV"
- secret_id = "...my_secret_id..."
- workspace_id = "2b94f2ab-1fd5-4671-a9c3-26350a467143"
+ definition_id = "e21a7b03-b315-4af1-9bc4-a1418c27e2e4"
+ name = "Toby Rempel"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4753d48e-30cc-4cb1-939d-dfc649b7a58a"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_tiktok_marketing/resource.tf b/examples/resources/airbyte_source_tiktok_marketing/resource.tf
old mode 100755
new mode 100644
index 21a1160b5..f313ea07d
--- a/examples/resources/airbyte_source_tiktok_marketing/resource.tf
+++ b/examples/resources/airbyte_source_tiktok_marketing/resource.tf
@@ -1,21 +1,20 @@
resource "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" {
configuration = {
- attribution_window = 5
+ attribution_window = 3
credentials = {
- source_tiktok_marketing_authentication_method_o_auth2_0 = {
+ source_tiktok_marketing_o_auth2_0 = {
access_token = "...my_access_token..."
advertiser_id = "...my_advertiser_id..."
app_id = "...my_app_id..."
- auth_type = "oauth2.0"
secret = "...my_secret..."
}
}
- end_date = "2021-10-08"
+ end_date = "2022-10-15"
include_deleted = false
- source_type = "tiktok-marketing"
- start_date = "2022-12-21"
+ start_date = "2022-12-08"
}
- name = "Mrs. Joey Mueller"
- secret_id = "...my_secret_id..."
- workspace_id = "4d93a74c-0252-4fe3-b4b4-db8b778ebb6e"
+ definition_id = "fd338f32-2856-4cd8-8e7e-494b9e5830e9"
+ name = "Elijah Prosacco"
+ secret_id = "...my_secret_id..."
+ workspace_id = "12cdcae9-f85c-4701-b380-526f8856cdf3"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_todoist/resource.tf b/examples/resources/airbyte_source_todoist/resource.tf
old mode 100755
new mode 100644
index 0400ee0c2..29e3118c2
--- a/examples/resources/airbyte_source_todoist/resource.tf
+++ b/examples/resources/airbyte_source_todoist/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_todoist" "my_source_todoist" {
configuration = {
- source_type = "todoist"
- token = "...my_token..."
+ token = "...my_token..."
}
- name = "Hope Collins"
- secret_id = "...my_secret_id..."
- workspace_id = "502bafb2-cbc4-4635-95e6-5da028c3e951"
+ definition_id = "fdefbe19-9921-44f3-bfa4-8acadc06400b"
+ name = "Kristy Hilpert"
+ secret_id = "...my_secret_id..."
+ workspace_id = "13a2ccf2-b1ad-4e2f-8984-bfb0e1b3d2b8"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_trello/resource.tf b/examples/resources/airbyte_source_trello/resource.tf
old mode 100755
new mode 100644
index ef2bb2452..f7738204a
--- a/examples/resources/airbyte_source_trello/resource.tf
+++ b/examples/resources/airbyte_source_trello/resource.tf
@@ -3,12 +3,12 @@ resource "airbyte_source_trello" "my_source_trello" {
board_ids = [
"...",
]
- key = "...my_key..."
- source_type = "trello"
- start_date = "2021-03-01T00:00:00Z"
- token = "...my_token..."
+ key = "...my_key..."
+ start_date = "2021-03-01T00:00:00Z"
+ token = "...my_token..."
}
- name = "Philip Armstrong"
- secret_id = "...my_secret_id..."
- workspace_id = "a966489d-7b78-4673-a13a-12a6b9924945"
+ definition_id = "26a8838c-f8d2-427f-b18d-4240654f4782"
+ name = "Esther Abshire"
+ secret_id = "...my_secret_id..."
+ workspace_id = "b5a46242-8ebc-45c7-bead-f0c9ce16ebe8"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_trustpilot/resource.tf b/examples/resources/airbyte_source_trustpilot/resource.tf
old mode 100755
new mode 100644
index fcfc79005..dd7bf0b50
--- a/examples/resources/airbyte_source_trustpilot/resource.tf
+++ b/examples/resources/airbyte_source_trustpilot/resource.tf
@@ -4,15 +4,14 @@ resource "airbyte_source_trustpilot" "my_source_trustpilot" {
"...",
]
credentials = {
- source_trustpilot_authorization_method_api_key = {
- auth_type = "apikey"
+ source_trustpilot_api_key = {
client_id = "...my_client_id..."
}
}
- source_type = "trustpilot"
- start_date = "%Y-%m-%dT%H:%M:%S"
+ start_date = "%Y-%m-%dT%H:%M:%S"
}
- name = "Bradley Goodwin"
- secret_id = "...my_secret_id..."
- workspace_id = "f5c84383-6b86-4b3c-9f64-15b0449f9df1"
+ definition_id = "5fa64aee-8d2b-4de4-8eef-ceb9e0d54b08"
+ name = "Clifford Quigley"
+ secret_id = "...my_secret_id..."
+ workspace_id = "98fe3f92-c06a-49aa-b270-2875abb88c39"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_tvmaze_schedule/resource.tf b/examples/resources/airbyte_source_tvmaze_schedule/resource.tf
old mode 100755
new mode 100644
index ce39a3b63..8edb587dc
--- a/examples/resources/airbyte_source_tvmaze_schedule/resource.tf
+++ b/examples/resources/airbyte_source_tvmaze_schedule/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_source_tvmaze_schedule" "my_source_tvmazeschedule" {
configuration = {
- domestic_schedule_country_code = "US"
+ domestic_schedule_country_code = "GB"
end_date = "...my_end_date..."
- source_type = "tvmaze-schedule"
start_date = "...my_start_date..."
web_schedule_country_code = "global"
}
- name = "Gretchen Waters"
- secret_id = "...my_secret_id..."
- workspace_id = "e78bf606-8258-494e-a763-d5c72795b785"
+ definition_id = "79666080-f3ec-4ae3-8b49-1ea7992cd63d"
+ name = "Dr. Victoria Lemke"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e3f7d5a4-33d3-40ca-8aa9-f684d9ab345e"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_twilio/resource.tf b/examples/resources/airbyte_source_twilio/resource.tf
old mode 100755
new mode 100644
index 8d0453266..d3909d36e
--- a/examples/resources/airbyte_source_twilio/resource.tf
+++ b/examples/resources/airbyte_source_twilio/resource.tf
@@ -3,10 +3,10 @@ resource "airbyte_source_twilio" "my_source_twilio" {
account_sid = "...my_account_sid..."
auth_token = "...my_auth_token..."
lookback_window = 60
- source_type = "twilio"
start_date = "2020-10-01T00:00:00Z"
}
- name = "Andre Sporer"
- secret_id = "...my_secret_id..."
- workspace_id = "9e5635b3-3bc0-4f97-8c42-fc9f4844225e"
+ definition_id = "83cb2e52-a86a-4dbb-97c5-cbe7ccff9d07"
+ name = "Leslie Kihn"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a4b37eb2-05dd-4b7f-9b71-195e07e10364"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_twilio_taskrouter/resource.tf b/examples/resources/airbyte_source_twilio_taskrouter/resource.tf
old mode 100755
new mode 100644
index 3586436da..9ead70009
--- a/examples/resources/airbyte_source_twilio_taskrouter/resource.tf
+++ b/examples/resources/airbyte_source_twilio_taskrouter/resource.tf
@@ -2,9 +2,9 @@ resource "airbyte_source_twilio_taskrouter" "my_source_twiliotaskrouter" {
configuration = {
account_sid = "...my_account_sid..."
auth_token = "...my_auth_token..."
- source_type = "twilio-taskrouter"
}
- name = "Cathy Ratke"
- secret_id = "...my_secret_id..."
- workspace_id = "6065c0ef-a6f9-43b9-8a1b-8c95be1254b7"
+ definition_id = "3a6dfd2a-6022-45b2-ac62-eb10f1a0d51f"
+ name = "Guy Rath II"
+ secret_id = "...my_secret_id..."
+ workspace_id = "16cb49da-06c2-439e-baf3-ca2cc2a5392d"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_twitter/resource.tf b/examples/resources/airbyte_source_twitter/resource.tf
old mode 100755
new mode 100644
index 32d6dcd9b..3568b0402
--- a/examples/resources/airbyte_source_twitter/resource.tf
+++ b/examples/resources/airbyte_source_twitter/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_source_twitter" "my_source_twitter" {
configuration = {
- api_key = "...my_api_key..."
- end_date = "2022-05-29T22:05:47.839Z"
- query = "...my_query..."
- source_type = "twitter"
- start_date = "2022-02-11T15:55:53.597Z"
+ api_key = "...my_api_key..."
+ end_date = "2022-09-12T14:25:08.896Z"
+ query = "...my_query..."
+ start_date = "2022-06-24T22:46:50.628Z"
}
- name = "Elbert Kuhic"
- secret_id = "...my_secret_id..."
- workspace_id = "10d1f655-8c99-4c72-ad2b-c0f94087d9ca"
+ definition_id = "89040904-7267-4ce8-aa32-2e02b7e6dd49"
+ name = "Domingo Heller"
+ secret_id = "...my_secret_id..."
+ workspace_id = "592a5dd7-ddbd-4797-92eb-894fd682a677"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_typeform/resource.tf b/examples/resources/airbyte_source_typeform/resource.tf
old mode 100755
new mode 100644
index 82cc96196..1509974e7
--- a/examples/resources/airbyte_source_typeform/resource.tf
+++ b/examples/resources/airbyte_source_typeform/resource.tf
@@ -1,22 +1,21 @@
resource "airbyte_source_typeform" "my_source_typeform" {
configuration = {
credentials = {
- source_typeform_authorization_method_o_auth2_0 = {
+ source_typeform_o_auth2_0 = {
access_token = "...my_access_token..."
- auth_type = "oauth2.0"
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
refresh_token = "...my_refresh_token..."
- token_expiry_date = "2021-02-23T09:05:08.511Z"
+ token_expiry_date = "2022-10-02T21:15:25.365Z"
}
}
form_ids = [
"...",
]
- source_type = "typeform"
- start_date = "2021-03-01T00:00:00Z"
+ start_date = "2021-03-01T00:00:00Z"
}
- name = "Rosemarie Spencer"
- secret_id = "...my_secret_id..."
- workspace_id = "aac9b4ca-a1cf-4e9e-95df-903907f37831"
+ definition_id = "dbbaeb9b-5c2e-42ee-8b85-f41cf2efd5ed"
+ name = "Nancy Hansen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e4deda30-dd3c-4fb0-aa2f-ad0584130837"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_us_census/resource.tf b/examples/resources/airbyte_source_us_census/resource.tf
old mode 100755
new mode 100644
index 3761f849f..c28ef116f
--- a/examples/resources/airbyte_source_us_census/resource.tf
+++ b/examples/resources/airbyte_source_us_census/resource.tf
@@ -2,10 +2,10 @@ resource "airbyte_source_us_census" "my_source_uscensus" {
configuration = {
api_key = "...my_api_key..."
query_params = "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*"
- query_path = "data/2018/acs"
- source_type = "us-census"
+ query_path = "data/2019/cbp"
}
- name = "Ginger Gislason"
- secret_id = "...my_secret_id..."
- workspace_id = "54a85466-597c-4502-b3c1-471d51aaa6dd"
+ definition_id = "e5de43c9-07f6-43cc-82bc-2f7f5dfb2c26"
+ name = "Kyle McKenzie"
+ secret_id = "...my_secret_id..."
+ workspace_id = "915d3324-b481-49ff-b934-29d3165dd859"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_vantage/resource.tf b/examples/resources/airbyte_source_vantage/resource.tf
old mode 100755
new mode 100644
index 23c9ccef6..297f2cd76
--- a/examples/resources/airbyte_source_vantage/resource.tf
+++ b/examples/resources/airbyte_source_vantage/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_vantage" "my_source_vantage" {
configuration = {
access_token = "...my_access_token..."
- source_type = "vantage"
}
- name = "Corey Pacocha"
- secret_id = "...my_secret_id..."
- workspace_id = "6487c5fc-2b86-42a0-8bef-69e100157630"
+ definition_id = "5e9c61e2-0db5-4f4b-b11c-60c3a7ba3362"
+ name = "Tracey Rippin"
+ secret_id = "...my_secret_id..."
+ workspace_id = "5dfad932-4f6a-4b9f-8334-526eae71eb75"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_webflow/resource.tf b/examples/resources/airbyte_source_webflow/resource.tf
old mode 100755
new mode 100644
index c667b4b1d..cb9807d3c
--- a/examples/resources/airbyte_source_webflow/resource.tf
+++ b/examples/resources/airbyte_source_webflow/resource.tf
@@ -1,10 +1,10 @@
resource "airbyte_source_webflow" "my_source_webflow" {
configuration = {
- api_key = "a very long hex sequence"
- site_id = "a relatively long hex sequence"
- source_type = "webflow"
+ api_key = "a very long hex sequence"
+ site_id = "a relatively long hex sequence"
}
- name = "Taylor Paucek"
- secret_id = "...my_secret_id..."
- workspace_id = "fded84a3-5a41-4238-a1a7-35ac26ae33be"
+ definition_id = "9d7dd0bf-2f57-4219-978f-bbe9226a954f"
+ name = "Cary Mitchell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "12e392ce-90b9-4169-bb30-db2efb21ef2b"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_whisky_hunter/resource.tf b/examples/resources/airbyte_source_whisky_hunter/resource.tf
old mode 100755
new mode 100644
index 296450e92..9337670a9
--- a/examples/resources/airbyte_source_whisky_hunter/resource.tf
+++ b/examples/resources/airbyte_source_whisky_hunter/resource.tf
@@ -1,8 +1,7 @@
resource "airbyte_source_whisky_hunter" "my_source_whiskyhunter" {
- configuration = {
- source_type = "whisky-hunter"
- }
- name = "Miss Terrence Kulas"
- secret_id = "...my_secret_id..."
- workspace_id = "f46bca11-06fe-4965-b711-d08cf88ec9f7"
+ configuration = {}
+ definition_id = "c48bf07f-2e77-4213-a664-6fa9b2db7532"
+ name = "Jeremy Kutch"
+ secret_id = "...my_secret_id..."
+ workspace_id = "785b8d4a-d9bb-44c2-904c-6ceb0e440965"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_wikipedia_pageviews/resource.tf b/examples/resources/airbyte_source_wikipedia_pageviews/resource.tf
old mode 100755
new mode 100644
index 8646b4062..cc917da33
--- a/examples/resources/airbyte_source_wikipedia_pageviews/resource.tf
+++ b/examples/resources/airbyte_source_wikipedia_pageviews/resource.tf
@@ -1,15 +1,15 @@
resource "airbyte_source_wikipedia_pageviews" "my_source_wikipediapageviews" {
configuration = {
- access = "mobile-app"
- agent = "spider"
- article = "Are_You_the_One%3F"
- country = "IN"
- end = "...my_end..."
- project = "www.mediawiki.org"
- source_type = "wikipedia-pageviews"
- start = "...my_start..."
+ access = "mobile-app"
+ agent = "automated"
+ article = "Are_You_the_One%3F"
+ country = "IN"
+ end = "...my_end..."
+ project = "www.mediawiki.org"
+ start = "...my_start..."
}
- name = "Laura Murray"
- secret_id = "...my_secret_id..."
- workspace_id = "6ed333bb-0ce8-4aa6-9432-a986eb7e14ca"
+ definition_id = "ecaf35c1-5b37-479d-be3d-ccb9fd6e1ad7"
+ name = "Stella Balistreri"
+ secret_id = "...my_secret_id..."
+ workspace_id = "320ef50a-8ca7-46b0-83ea-280df1804a67"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_woocommerce/resource.tf b/examples/resources/airbyte_source_woocommerce/resource.tf
old mode 100755
new mode 100644
index 3dc16611e..0920d5b37
--- a/examples/resources/airbyte_source_woocommerce/resource.tf
+++ b/examples/resources/airbyte_source_woocommerce/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_source_woocommerce" "my_source_woocommerce" {
configuration = {
- api_key = "...my_api_key..."
- api_secret = "...my_api_secret..."
- shop = "...my_shop..."
- source_type = "woocommerce"
- start_date = "2021-01-01"
+ api_key = "...my_api_key..."
+ api_secret = "...my_api_secret..."
+ shop = "...my_shop..."
+ start_date = "2021-01-01"
}
- name = "Laura Lindgren III"
- secret_id = "...my_secret_id..."
- workspace_id = "0097019a-48f8-48ec-a7bf-904e01105d38"
+ definition_id = "f3e58149-5129-457c-a986-96756fe05881"
+ name = "Julia Cole"
+ secret_id = "...my_secret_id..."
+ workspace_id = "ad45dc07-8875-4452-bf36-dab5122890f3"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_xero/resource.tf b/examples/resources/airbyte_source_xero/resource.tf
deleted file mode 100755
index d6c017113..000000000
--- a/examples/resources/airbyte_source_xero/resource.tf
+++ /dev/null
@@ -1,17 +0,0 @@
-resource "airbyte_source_xero" "my_source_xero" {
- configuration = {
- authentication = {
- access_token = "...my_access_token..."
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
- token_expiry_date = "...my_token_expiry_date..."
- }
- source_type = "xero"
- start_date = "2022-03-01T00:00:00Z"
- tenant_id = "...my_tenant_id..."
- }
- name = "Roger Hudson"
- secret_id = "...my_secret_id..."
- workspace_id = "6beb68a0-f657-4b7d-83a1-480f8de30f06"
-}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_xkcd/resource.tf b/examples/resources/airbyte_source_xkcd/resource.tf
old mode 100755
new mode 100644
index 4c5088013..94174ba04
--- a/examples/resources/airbyte_source_xkcd/resource.tf
+++ b/examples/resources/airbyte_source_xkcd/resource.tf
@@ -1,8 +1,7 @@
resource "airbyte_source_xkcd" "my_source_xkcd" {
- configuration = {
- source_type = "xkcd"
- }
- name = "Mr. Laurence Littel"
- secret_id = "...my_secret_id..."
- workspace_id = "18d97e15-2297-4510-9a80-312292cc61c2"
+ configuration = {}
+ definition_id = "e992c2a3-f4c8-4fc0-a6c7-cc4eafdab4c1"
+ name = "Wilbert Ortiz"
+ secret_id = "...my_secret_id..."
+ workspace_id = "6c12869f-984d-4613-8285-42bb37a458fa"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_yandex_metrica/resource.tf b/examples/resources/airbyte_source_yandex_metrica/resource.tf
old mode 100755
new mode 100644
index 63c10201a..3ef0588d1
--- a/examples/resources/airbyte_source_yandex_metrica/resource.tf
+++ b/examples/resources/airbyte_source_yandex_metrica/resource.tf
@@ -1,12 +1,12 @@
resource "airbyte_source_yandex_metrica" "my_source_yandexmetrica" {
configuration = {
- auth_token = "...my_auth_token..."
- counter_id = "...my_counter_id..."
- end_date = "2022-01-01"
- source_type = "yandex-metrica"
- start_date = "2022-01-01"
+ auth_token = "...my_auth_token..."
+ counter_id = "...my_counter_id..."
+ end_date = "2022-01-01"
+ start_date = "2022-01-01"
}
- name = "Dominic Marvin"
- secret_id = "...my_secret_id..."
- workspace_id = "e102da2d-e35f-48e0-9bf3-3eaab45402ac"
+ definition_id = "71a16fff-1f04-4aee-bc30-6c4f3397c204"
+ name = "June Williamson"
+ secret_id = "...my_secret_id..."
+ workspace_id = "deba481e-413d-4d76-8cc3-ae1d775ee978"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_yotpo/resource.tf b/examples/resources/airbyte_source_yotpo/resource.tf
old mode 100755
new mode 100644
index 8f2d3e22c..8812f6cb9
--- a/examples/resources/airbyte_source_yotpo/resource.tf
+++ b/examples/resources/airbyte_source_yotpo/resource.tf
@@ -2,11 +2,11 @@ resource "airbyte_source_yotpo" "my_source_yotpo" {
configuration = {
access_token = "...my_access_token..."
app_key = "...my_app_key..."
- email = "Ibrahim74@gmail.com"
- source_type = "yotpo"
+ email = "Bradley96@hotmail.com"
start_date = "2022-03-01T00:00:00.000Z"
}
- name = "Clark McGlynn"
- secret_id = "...my_secret_id..."
- workspace_id = "61aae5eb-5f0c-4492-b574-4d08a2267aae"
+ definition_id = "746ac11e-b024-4372-8c2f-a90b3fc58aed"
+ name = "Reginald Howell"
+ secret_id = "...my_secret_id..."
+ workspace_id = "07de9609-725c-46d5-a5da-35039f4e4098"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_younium/resource.tf b/examples/resources/airbyte_source_younium/resource.tf
deleted file mode 100755
index 3ebeaeb4e..000000000
--- a/examples/resources/airbyte_source_younium/resource.tf
+++ /dev/null
@@ -1,12 +0,0 @@
-resource "airbyte_source_younium" "my_source_younium" {
- configuration = {
- legal_entity = "...my_legal_entity..."
- password = "...my_password..."
- playground = true
- source_type = "younium"
- username = "Jairo.Monahan79"
- }
- name = "Martha Orn"
- secret_id = "...my_secret_id..."
- workspace_id = "1becb83d-2378-4ae3-bfc2-3d9450a986a4"
-}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_youtube_analytics/resource.tf b/examples/resources/airbyte_source_youtube_analytics/resource.tf
old mode 100755
new mode 100644
index c2dc84e6b..2246a13cb
--- a/examples/resources/airbyte_source_youtube_analytics/resource.tf
+++ b/examples/resources/airbyte_source_youtube_analytics/resource.tf
@@ -1,13 +1,14 @@
resource "airbyte_source_youtube_analytics" "my_source_youtubeanalytics" {
configuration = {
credentials = {
- client_id = "...my_client_id..."
- client_secret = "...my_client_secret..."
- refresh_token = "...my_refresh_token..."
+ additional_properties = "{ \"see\": \"documentation\" }"
+ client_id = "...my_client_id..."
+ client_secret = "...my_client_secret..."
+ refresh_token = "...my_refresh_token..."
}
- source_type = "youtube-analytics"
}
- name = "Tommy Rippin"
- secret_id = "...my_secret_id..."
- workspace_id = "707f06b2-8ecc-4864-9238-6f62c969c4cc"
+ definition_id = "bb8c2a23-b3c0-4134-a218-66cf518dbd5e"
+ name = "Mr. Clay Terry"
+ secret_id = "...my_secret_id..."
+ workspace_id = "e07eadc6-f53d-4253-9b8b-1e39d437be8f"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_zendesk_chat/resource.tf b/examples/resources/airbyte_source_zendesk_chat/resource.tf
old mode 100755
new mode 100644
index 92b8327b0..46f48fd07
--- a/examples/resources/airbyte_source_zendesk_chat/resource.tf
+++ b/examples/resources/airbyte_source_zendesk_chat/resource.tf
@@ -1,16 +1,15 @@
resource "airbyte_source_zendesk_chat" "my_source_zendeskchat" {
configuration = {
credentials = {
- source_zendesk_chat_authorization_method_access_token = {
+ source_zendesk_chat_access_token = {
access_token = "...my_access_token..."
- credentials = "access_token"
}
}
- source_type = "zendesk-chat"
- start_date = "2021-02-01T00:00:00Z"
- subdomain = "...my_subdomain..."
+ start_date = "2021-02-01T00:00:00Z"
+ subdomain = "...my_subdomain..."
}
- name = "Mabel Lebsack MD"
- secret_id = "...my_secret_id..."
- workspace_id = "3fd3c81d-a10f-48c2-bdf9-31da3edb51fa"
+ definition_id = "f797fa8a-e012-4beb-a22c-99641ef630f5"
+ name = "Julian Kuhic"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c0e34b35-2ddb-404c-9bce-387d66444a18"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_zendesk_sell/resource.tf b/examples/resources/airbyte_source_zendesk_sell/resource.tf
new file mode 100644
index 000000000..f6f5b7f80
--- /dev/null
+++ b/examples/resources/airbyte_source_zendesk_sell/resource.tf
@@ -0,0 +1,9 @@
+resource "airbyte_source_zendesk_sell" "my_source_zendesksell" {
+ configuration = {
+ api_token = "f23yhd630otl94y85a8bf384958473pto95847fd006da49382716or937ruw059"
+ }
+ definition_id = "6797a763-e10f-499e-8087-9e49484a7485"
+ name = "Jane Batz"
+ secret_id = "...my_secret_id..."
+ workspace_id = "4aee427f-93df-49bf-84b7-84edaaf2f424"
+}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_zendesk_sunshine/resource.tf b/examples/resources/airbyte_source_zendesk_sunshine/resource.tf
old mode 100755
new mode 100644
index 1ec0edb61..c2ed04214
--- a/examples/resources/airbyte_source_zendesk_sunshine/resource.tf
+++ b/examples/resources/airbyte_source_zendesk_sunshine/resource.tf
@@ -1,17 +1,16 @@
resource "airbyte_source_zendesk_sunshine" "my_source_zendesksunshine" {
configuration = {
credentials = {
- source_zendesk_sunshine_authorization_method_api_token = {
- api_token = "...my_api_token..."
- auth_method = "api_token"
- email = "Leonor_Funk@hotmail.com"
+ source_zendesk_sunshine_api_token = {
+ api_token = "...my_api_token..."
+ email = "Robbie51@hotmail.com"
}
}
- source_type = "zendesk-sunshine"
- start_date = "2021-01-01T00:00:00Z"
- subdomain = "...my_subdomain..."
+ start_date = "2021-01-01T00:00:00Z"
+ subdomain = "...my_subdomain..."
}
- name = "Mrs. Edith Hermiston"
- secret_id = "...my_secret_id..."
- workspace_id = "726d1532-1b83-42a5-ad69-180ff60eb9a6"
+ definition_id = "6f099262-2de7-4b1a-93e5-915fe5844c8d"
+ name = "Kristie Moen"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7badf74d-23a8-47a4-aabf-6ae57802daa8"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_zendesk_support/resource.tf b/examples/resources/airbyte_source_zendesk_support/resource.tf
old mode 100755
new mode 100644
index 795dc3b22..5ced0825e
--- a/examples/resources/airbyte_source_zendesk_support/resource.tf
+++ b/examples/resources/airbyte_source_zendesk_support/resource.tf
@@ -1,18 +1,18 @@
resource "airbyte_source_zendesk_support" "my_source_zendesksupport" {
configuration = {
credentials = {
- source_zendesk_support_authentication_api_token = {
- api_token = "...my_api_token..."
- credentials = "api_token"
- email = "Ezequiel.Lindgren56@yahoo.com"
+ source_zendesk_support_api_token = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ api_token = "...my_api_token..."
+ email = "Ansel_McLaughlin@gmail.com"
}
}
- ignore_pagination = true
- source_type = "zendesk-support"
+ ignore_pagination = false
start_date = "2020-10-15T00:00:00Z"
subdomain = "...my_subdomain..."
}
- name = "Alexander Friesen"
- secret_id = "...my_secret_id..."
- workspace_id = "82dbec75-c68c-4606-9946-8ce304d8849b"
+ definition_id = "7526c0e6-8d41-4f29-878b-d831a4caf6a0"
+ name = "Linda Weissnat"
+ secret_id = "...my_secret_id..."
+ workspace_id = "20a84c82-feed-435f-9471-260525978122"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_zendesk_talk/resource.tf b/examples/resources/airbyte_source_zendesk_talk/resource.tf
old mode 100755
new mode 100644
index 86c3f87c4..a9f2a1f1e
--- a/examples/resources/airbyte_source_zendesk_talk/resource.tf
+++ b/examples/resources/airbyte_source_zendesk_talk/resource.tf
@@ -1,17 +1,17 @@
resource "airbyte_source_zendesk_talk" "my_source_zendesktalk" {
configuration = {
credentials = {
- source_zendesk_talk_authentication_api_token = {
- api_token = "...my_api_token..."
- auth_type = "api_token"
- email = "Kacie27@hotmail.com"
+ source_zendesk_talk_api_token = {
+ additional_properties = "{ \"see\": \"documentation\" }"
+ api_token = "...my_api_token..."
+ email = "Brain88@gmail.com"
}
}
- source_type = "zendesk-talk"
- start_date = "2020-10-15T00:00:00Z"
- subdomain = "...my_subdomain..."
+ start_date = "2020-10-15T00:00:00Z"
+ subdomain = "...my_subdomain..."
}
- name = "Jackie Welch"
- secret_id = "...my_secret_id..."
- workspace_id = "bb0c69e3-72db-4134-8ba9-f78a5c0ed7aa"
+ definition_id = "9a97873e-c6ec-423f-8936-834bb7f256aa"
+ name = "Gwen Towne"
+ secret_id = "...my_secret_id..."
+ workspace_id = "7a7ac93c-e210-41f6-92ef-f8de56504728"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_zenloop/resource.tf b/examples/resources/airbyte_source_zenloop/resource.tf
old mode 100755
new mode 100644
index 0f6d18eb5..f3f70a2f6
--- a/examples/resources/airbyte_source_zenloop/resource.tf
+++ b/examples/resources/airbyte_source_zenloop/resource.tf
@@ -2,11 +2,11 @@ resource "airbyte_source_zenloop" "my_source_zenloop" {
configuration = {
api_token = "...my_api_token..."
date_from = "2021-10-24T03:30:30Z"
- source_type = "zenloop"
survey_group_id = "...my_survey_group_id..."
survey_id = "...my_survey_id..."
}
- name = "Ricardo Champlin"
- secret_id = "...my_secret_id..."
- workspace_id = "7261fb0c-58d2-47b5-9996-b5b4b50eef71"
+ definition_id = "30aace29-0d7b-43b3-98af-f5206e7c6651"
+ name = "Colleen Hodkiewicz"
+ secret_id = "...my_secret_id..."
+ workspace_id = "de9cd819-ecc3-47ba-9700-ba64daf2cd7c"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_zoho_crm/resource.tf b/examples/resources/airbyte_source_zoho_crm/resource.tf
old mode 100755
new mode 100644
index 9aebdfb91..d4deffaaf
--- a/examples/resources/airbyte_source_zoho_crm/resource.tf
+++ b/examples/resources/airbyte_source_zoho_crm/resource.tf
@@ -2,14 +2,14 @@ resource "airbyte_source_zoho_crm" "my_source_zohocrm" {
configuration = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- dc_region = "US"
- edition = "Enterprise"
- environment = "Developer"
+ dc_region = "IN"
+ edition = "Ultimate"
+ environment = "Sandbox"
refresh_token = "...my_refresh_token..."
- source_type = "zoho-crm"
- start_datetime = "2000-01-01T13:00+00:00"
+ start_datetime = "2000-01-01 13:00"
}
- name = "Kenneth Fisher"
- secret_id = "...my_secret_id..."
- workspace_id = "b1710688-deeb-4ef8-97f3-dd0ccd33f11b"
+ definition_id = "7a306443-a75b-4cf4-a2e1-378db01d76f7"
+ name = "Jody Collins"
+ secret_id = "...my_secret_id..."
+ workspace_id = "a6e51f0c-20e4-4312-90cb-fe39df03e297"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_zoom/resource.tf b/examples/resources/airbyte_source_zoom/resource.tf
old mode 100755
new mode 100644
index bfac92c45..706fd376f
--- a/examples/resources/airbyte_source_zoom/resource.tf
+++ b/examples/resources/airbyte_source_zoom/resource.tf
@@ -1,9 +1,9 @@
resource "airbyte_source_zoom" "my_source_zoom" {
configuration = {
- jwt_token = "...my_jwt_token..."
- source_type = "zoom"
+ jwt_token = "...my_jwt_token..."
}
- name = "Alexis Gutmann IV"
- secret_id = "...my_secret_id..."
- workspace_id = "0aa10418-6ec7-459e-82f3-702c5c8e2d30"
+ definition_id = "d6f5cf39-b34f-4958-9f42-198f32822b82"
+ name = "Gregory Hirthe"
+ secret_id = "...my_secret_id..."
+ workspace_id = "bc2b7c1d-3540-4fbb-a2d8-a9d0010028d1"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_source_zuora/resource.tf b/examples/resources/airbyte_source_zuora/resource.tf
old mode 100755
new mode 100644
index d1e679297..b925a18ec
--- a/examples/resources/airbyte_source_zuora/resource.tf
+++ b/examples/resources/airbyte_source_zuora/resource.tf
@@ -2,13 +2,13 @@ resource "airbyte_source_zuora" "my_source_zuora" {
configuration = {
client_id = "...my_client_id..."
client_secret = "...my_client_secret..."
- data_query = "Unlimited"
- source_type = "zuora"
+ data_query = "Live"
start_date = "...my_start_date..."
- tenant_endpoint = "US Performance Test"
- window_in_days = "200"
+ tenant_endpoint = "EU Production"
+ window_in_days = "0.5"
}
- name = "Joan Bednar"
- secret_id = "...my_secret_id..."
- workspace_id = "a44707bf-375b-4442-8282-1fdb2f69e592"
+ definition_id = "280d807c-dd8e-4b8c-b5c4-610938eb2433"
+ name = "Anne Funk"
+ secret_id = "...my_secret_id..."
+ workspace_id = "c5c5aa0b-5368-4b26-a568-aa6dc340bb15"
}
\ No newline at end of file
diff --git a/examples/resources/airbyte_workspace/resource.tf b/examples/resources/airbyte_workspace/resource.tf
old mode 100755
new mode 100644
index e67208f35..1e37c56fe
--- a/examples/resources/airbyte_workspace/resource.tf
+++ b/examples/resources/airbyte_workspace/resource.tf
@@ -1,3 +1,3 @@
resource "airbyte_workspace" "my_workspace" {
- name = "Glenda Schiller DDS"
+ name = "Jessie Moen"
}
\ No newline at end of file
diff --git a/files.gen b/files.gen
index d389f8a38..97be718c5 100755
--- a/files.gen
+++ b/files.gen
@@ -5,687 +5,6 @@ internal/sdk/sources.go
internal/sdk/streams.go
internal/sdk/workspaces.go
internal/sdk/sdk.go
-internal/provider/type_stream_configuration.go
-internal/provider/type_stream_configurations.go
-internal/provider/type_connection_schedule.go
-internal/provider/type_destination_aws_datalake_authentication_mode_iam_role.go
-internal/provider/type_destination_aws_datalake_authentication_mode_iam_user.go
-internal/provider/type_destination_aws_datalake_authentication_mode.go
-internal/provider/type_destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json.go
-internal/provider/type_destination_aws_datalake_output_format_wildcard_parquet_columnar_storage.go
-internal/provider/type_destination_aws_datalake_output_format_wildcard.go
-internal/provider/type_destination_aws_datalake.go
-internal/provider/type_destination_azure_blob_storage_output_format_csv_comma_separated_values.go
-internal/provider/type_destination_azure_blob_storage_output_format_json_lines_newline_delimited_json.go
-internal/provider/type_destination_azure_blob_storage_output_format.go
-internal/provider/type_destination_azure_blob_storage.go
-internal/provider/type_destination_bigquery_loading_method_gcs_staging_credential_hmac_key.go
-internal/provider/type_destination_bigquery_loading_method_gcs_staging_credential.go
-internal/provider/type_destination_bigquery_loading_method_gcs_staging.go
-internal/provider/type_destination_bigquery_loading_method_standard_inserts.go
-internal/provider/type_destination_bigquery_update_loading_method_gcs_staging_credential.go
-internal/provider/type_destination_bigquery_update_loading_method_gcs_staging.go
-internal/provider/type_destination_bigquery_loading_method.go
-internal/provider/type_destination_bigquery.go
-internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging_credential.go
-internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging.go
-internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging_credential.go
-internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging.go
-internal/provider/type_destination_bigquery_denormalized_loading_method.go
-internal/provider/type_destination_bigquery_denormalized.go
-internal/provider/type_destination_clickhouse_ssh_tunnel_method_no_tunnel.go
-internal/provider/type_destination_clickhouse_ssh_tunnel_method_password_authentication.go
-internal/provider/type_destination_clickhouse_ssh_tunnel_method_ssh_key_authentication.go
-internal/provider/type_destination_clickhouse_ssh_tunnel_method.go
-internal/provider/type_destination_clickhouse.go
-internal/provider/type_destination_convex.go
-internal/provider/type_destination_cumulio.go
-internal/provider/type_destination_databend.go
-internal/provider/type_destination_databricks_data_source_recommended_managed_tables.go
-internal/provider/type_destination_databricks_data_source_amazon_s3.go
-internal/provider/type_destination_databricks_data_source_azure_blob_storage.go
-internal/provider/type_destination_databricks_update_data_source_amazon_s3.go
-internal/provider/type_destination_databricks_data_source1.go
-internal/provider/type_destination_databricks.go
-internal/provider/type_destination_dev_null_test_destination_silent.go
-internal/provider/type_destination_dev_null_test_destination.go
-internal/provider/type_destination_dev_null.go
-internal/provider/type_destination_dynamodb.go
-internal/provider/type_destination_elasticsearch_authentication_method_api_key_secret.go
-internal/provider/type_destination_elasticsearch_authentication_method_username_password.go
-internal/provider/type_destination_elasticsearch_authentication_method.go
-internal/provider/type_destination_elasticsearch.go
-internal/provider/type_destination_firebolt_loading_method_external_table_via_s3.go
-internal/provider/type_destination_firebolt_loading_method_sql_inserts.go
-internal/provider/type_destination_firebolt_loading_method.go
-internal/provider/type_destination_firebolt.go
-internal/provider/type_destination_firestore.go
-internal/provider/type_destination_gcs_authentication.go
-internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2.go
-internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_deflate.go
-internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression.go
-internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_snappy.go
-internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_xz.go
-internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard.go
-internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec.go
-internal/provider/type_destination_gcs_output_format_avro_apache_avro.go
-internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression_gzip.go
-internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression_no_compression.go
-internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression.go
-internal/provider/type_destination_gcs_output_format_csv_comma_separated_values.go
-internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip.go
-internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression.go
-internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression.go
-internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json.go
-internal/provider/type_destination_gcs_output_format_parquet_columnar_storage.go
-internal/provider/type_destination_gcs_update_output_format_avro_apache_avro_compression_codec.go
-internal/provider/type_destination_gcs_update_output_format_avro_apache_avro.go
-internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values_compression.go
-internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values.go
-internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json_compression.go
-internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json.go
-internal/provider/type_destination_gcs_output_format.go
-internal/provider/type_destination_gcs.go
-internal/provider/type_destination_google_sheets_authentication_via_google_o_auth.go
-internal/provider/type_destination_google_sheets.go
-internal/provider/type_destination_keen.go
-internal/provider/type_destination_kinesis.go
-internal/provider/type_destination_langchain_embedding_fake.go
-internal/provider/type_destination_langchain_embedding_open_ai.go
-internal/provider/type_destination_langchain_embedding.go
-internal/provider/type_destination_langchain_indexing_chroma_local_persistance.go
-internal/provider/type_destination_langchain_indexing_doc_array_hnsw_search.go
-internal/provider/type_destination_langchain_indexing_pinecone.go
-internal/provider/type_destination_langchain_indexing.go
-internal/provider/type_destination_langchain_processing_config_model.go
-internal/provider/type_destination_langchain.go
-internal/provider/type_destination_milvus_embedding_cohere.go
-internal/provider/type_destination_milvus_embedding_from_field.go
-internal/provider/type_destination_milvus_embedding.go
-internal/provider/type_destination_milvus_indexing_authentication_api_token.go
-internal/provider/type_destination_milvus_indexing_authentication_no_auth.go
-internal/provider/type_destination_milvus_indexing_authentication_username_password.go
-internal/provider/type_destination_milvus_indexing_authentication.go
-internal/provider/type_destination_milvus_indexing.go
-internal/provider/type_destination_milvus_processing_config_model.go
-internal/provider/type_destination_milvus.go
-internal/provider/type_destination_mongodb_authorization_type_login_password.go
-internal/provider/type_destination_mongodb_authorization_type_none.go
-internal/provider/type_destination_mongodb_authorization_type.go
-internal/provider/type_destination_mongodb_mongo_db_instance_type_mongo_db_atlas.go
-internal/provider/type_destination_mongodb_mongo_db_instance_type_replica_set.go
-internal/provider/type_destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go
-internal/provider/type_destination_mongodb_mongo_db_instance_type.go
-internal/provider/type_destination_mongodb_ssh_tunnel_method.go
-internal/provider/type_destination_mongodb.go
-internal/provider/type_destination_mssql_ssl_method_encrypted_trust_server_certificate.go
-internal/provider/type_destination_mssql_ssl_method_encrypted_verify_certificate.go
-internal/provider/type_destination_mssql_ssl_method.go
-internal/provider/type_destination_mssql_ssh_tunnel_method.go
-internal/provider/type_destination_mssql.go
-internal/provider/type_destination_mysql_ssh_tunnel_method.go
-internal/provider/type_destination_mysql.go
-internal/provider/type_destination_oracle_ssh_tunnel_method.go
-internal/provider/type_destination_oracle.go
-internal/provider/type_destination_pinecone_embedding.go
-internal/provider/type_destination_pinecone_indexing.go
-internal/provider/type_destination_pinecone.go
-internal/provider/type_destination_postgres_ssl_modes_allow.go
-internal/provider/type_destination_postgres_ssl_modes_disable.go
-internal/provider/type_destination_postgres_ssl_modes_prefer.go
-internal/provider/type_destination_postgres_ssl_modes_require.go
-internal/provider/type_destination_postgres_ssl_modes_verify_ca.go
-internal/provider/type_destination_postgres_ssl_modes_verify_full.go
-internal/provider/type_destination_postgres_ssl_modes.go
-internal/provider/type_destination_postgres_ssh_tunnel_method.go
-internal/provider/type_destination_postgres.go
-internal/provider/type_destination_pubsub.go
-internal/provider/type_destination_redis_ssl_modes.go
-internal/provider/type_destination_redis_ssh_tunnel_method.go
-internal/provider/type_destination_redis.go
-internal/provider/type_destination_redshift_ssh_tunnel_method.go
-internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_aescbc_envelope_encryption.go
-internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_no_encryption.go
-internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption.go
-internal/provider/type_destination_redshift_uploading_method_s3_staging.go
-internal/provider/type_destination_redshift_uploading_method_standard.go
-internal/provider/type_destination_redshift_update_uploading_method_s3_staging_encryption.go
-internal/provider/type_destination_redshift_update_uploading_method_s3_staging.go
-internal/provider/type_destination_redshift_uploading_method.go
-internal/provider/type_destination_redshift.go
-internal/provider/type_destination_s3_output_format_avro_apache_avro_compression_codec.go
-internal/provider/type_destination_s3_output_format_avro_apache_avro.go
-internal/provider/type_destination_s3_output_format_csv_comma_separated_values_compression.go
-internal/provider/type_destination_s3_output_format_csv_comma_separated_values.go
-internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json_compression.go
-internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json.go
-internal/provider/type_destination_s3_output_format_parquet_columnar_storage.go
-internal/provider/type_destination_s3_update_output_format_avro_apache_avro_compression_codec.go
-internal/provider/type_destination_s3_update_output_format_avro_apache_avro.go
-internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values_compression.go
-internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values.go
-internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json_compression.go
-internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json.go
-internal/provider/type_destination_s3_output_format.go
-internal/provider/type_destination_s3.go
-internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json_compression.go
-internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json.go
-internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression.go
-internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json.go
-internal/provider/type_destination_s3_glue_output_format.go
-internal/provider/type_destination_s3_glue.go
-internal/provider/type_destination_sftp_json.go
-internal/provider/type_destination_snowflake_authorization_method_key_pair_authentication.go
-internal/provider/type_destination_snowflake_authorization_method_o_auth20.go
-internal/provider/type_destination_snowflake_authorization_method_username_and_password.go
-internal/provider/type_destination_snowflake_authorization_method.go
-internal/provider/type_destination_snowflake.go
-internal/provider/type_destination_timeplus.go
-internal/provider/type_destination_typesense.go
-internal/provider/type_destination_vertica_ssh_tunnel_method.go
-internal/provider/type_destination_vertica.go
-internal/provider/type_destination_xata.go
-internal/provider/type_source_aha.go
-internal/provider/type_source_aircall.go
-internal/provider/type_source_airtable_authentication_o_auth20.go
-internal/provider/type_source_airtable_authentication_personal_access_token.go
-internal/provider/type_source_airtable_authentication.go
-internal/provider/type_source_airtable.go
-internal/provider/type_source_alloydb_replication_method_logical_replication_cdc.go
-internal/provider/type_source_alloydb_replication_method_standard.go
-internal/provider/type_source_alloydb_replication_method_standard_xmin.go
-internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc.go
-internal/provider/type_source_alloydb_replication_method.go
-internal/provider/type_source_alloydb_ssl_modes_allow.go
-internal/provider/type_source_alloydb_ssl_modes_disable.go
-internal/provider/type_source_alloydb_ssl_modes_prefer.go
-internal/provider/type_source_alloydb_ssl_modes_require.go
-internal/provider/type_source_alloydb_ssl_modes_verify_ca.go
-internal/provider/type_source_alloydb_ssl_modes_verify_full.go
-internal/provider/type_source_alloydb_update_ssl_modes_allow.go
-internal/provider/type_source_alloydb_update_ssl_modes_disable.go
-internal/provider/type_source_alloydb_update_ssl_modes_prefer.go
-internal/provider/type_source_alloydb_update_ssl_modes_require.go
-internal/provider/type_source_alloydb_update_ssl_modes_verify_ca.go
-internal/provider/type_source_alloydb_update_ssl_modes_verify_full.go
-internal/provider/type_source_alloydb_ssl_modes.go
-internal/provider/type_source_alloydb_ssh_tunnel_method.go
-internal/provider/type_source_alloydb.go
-internal/provider/type_source_amazon_ads.go
-internal/provider/type_source_amazon_seller_partner.go
-internal/provider/type_source_amazon_sqs.go
-internal/provider/type_source_amplitude.go
-internal/provider/type_source_apify_dataset.go
-internal/provider/type_source_appfollow.go
-internal/provider/type_source_asana_authentication_mechanism_authenticate_via_asana_oauth.go
-internal/provider/type_source_asana_authentication_mechanism_authenticate_with_personal_access_token.go
-internal/provider/type_source_asana_authentication_mechanism.go
-internal/provider/type_source_asana.go
-internal/provider/type_source_auth0_authentication_method_o_auth2_access_token.go
-internal/provider/type_source_auth0_authentication_method_o_auth2_confidential_application.go
-internal/provider/type_source_auth0_authentication_method.go
-internal/provider/type_source_auth0.go
-internal/provider/type_source_aws_cloudtrail.go
-internal/provider/type_source_azure_blob_storage_input_format_json_lines_newline_delimited_json.go
-internal/provider/type_source_azure_blob_storage_input_format.go
-internal/provider/type_source_azure_blob_storage.go
-internal/provider/type_source_azure_table.go
-internal/provider/type_source_bamboo_hr.go
-internal/provider/type_source_bigcommerce.go
-internal/provider/type_source_bigquery.go
-internal/provider/type_source_bing_ads.go
-internal/provider/type_source_braintree.go
-internal/provider/type_source_braze.go
-internal/provider/type_source_chargebee.go
-internal/provider/type_source_chartmogul.go
-internal/provider/type_source_clickhouse_ssh_tunnel_method.go
-internal/provider/type_source_clickhouse.go
-internal/provider/type_source_clickup_api.go
-internal/provider/type_source_clockify.go
-internal/provider/type_source_close_com.go
-internal/provider/type_source_coda.go
-internal/provider/type_source_coin_api.go
-internal/provider/type_source_coinmarketcap.go
-internal/provider/type_source_configcat.go
-internal/provider/type_source_confluence.go
-internal/provider/type_source_convex.go
-internal/provider/type_source_datascope.go
-internal/provider/type_source_delighted.go
-internal/provider/type_source_dixa.go
-internal/provider/type_source_dockerhub.go
-internal/provider/type_source_dremio.go
-internal/provider/type_source_dynamodb.go
-internal/provider/type_source_e2e_test_cloud_mock_catalog_multi_schema.go
-internal/provider/type_source_e2e_test_cloud_mock_catalog_single_schema.go
-internal/provider/type_source_e2e_test_cloud_mock_catalog.go
-internal/provider/type_source_e2e_test_cloud.go
-internal/provider/type_source_emailoctopus.go
-internal/provider/type_source_exchange_rates.go
-internal/provider/type_source_facebook_marketing_insight_config.go
-internal/provider/type_source_facebook_marketing.go
-internal/provider/type_source_facebook_pages.go
-internal/provider/type_source_faker.go
-internal/provider/type_source_fauna_collection_deletion_mode_disabled.go
-internal/provider/type_source_fauna_collection_deletion_mode_enabled.go
-internal/provider/type_source_fauna_collection_deletion_mode.go
-internal/provider/type_source_fauna_collection.go
-internal/provider/type_source_fauna.go
-internal/provider/type_source_file_secure_storage_provider_az_blob_azure_blob_storage.go
-internal/provider/type_source_file_secure_storage_provider_gcs_google_cloud_storage.go
-internal/provider/type_source_file_secure_storage_provider_https_public_web.go
-internal/provider/type_source_file_secure_storage_provider_s3_amazon_web_services.go
-internal/provider/type_source_file_secure_storage_provider_scp_secure_copy_protocol.go
-internal/provider/type_source_file_secure_storage_provider_sftp_secure_file_transfer_protocol.go
-internal/provider/type_source_file_secure_storage_provider_ssh_secure_shell.go
-internal/provider/type_source_file_secure_storage_provider.go
-internal/provider/type_source_file_secure.go
-internal/provider/type_source_firebolt.go
-internal/provider/type_source_freshcaller.go
-internal/provider/type_source_freshdesk.go
-internal/provider/type_source_freshsales.go
-internal/provider/type_source_gainsight_px.go
-internal/provider/type_source_gcs.go
-internal/provider/type_source_getlago.go
-internal/provider/type_source_github_authentication_o_auth.go
-internal/provider/type_source_github_authentication_personal_access_token.go
-internal/provider/type_source_github_authentication.go
-internal/provider/type_source_github.go
-internal/provider/type_source_gitlab_authorization_method_o_auth20.go
-internal/provider/type_source_gitlab_authorization_method_private_token.go
-internal/provider/type_source_gitlab_authorization_method.go
-internal/provider/type_source_gitlab.go
-internal/provider/type_source_glassfrog.go
-internal/provider/type_source_gnews.go
-internal/provider/type_source_google_ads_google_credentials.go
-internal/provider/type_source_google_ads_custom_queries.go
-internal/provider/type_source_google_ads.go
-internal/provider/type_source_google_analytics_data_api_credentials_authenticate_via_google_oauth.go
-internal/provider/type_source_google_analytics_data_api_credentials_service_account_key_authentication.go
-internal/provider/type_source_google_analytics_data_api_credentials.go
-internal/provider/type_source_google_analytics_data_api.go
-internal/provider/type_source_google_analytics_v4_credentials.go
-internal/provider/type_source_google_analytics_v4.go
-internal/provider/type_source_google_directory_google_credentials_service_account_key.go
-internal/provider/type_source_google_directory_google_credentials_sign_in_via_google_o_auth.go
-internal/provider/type_source_google_directory_google_credentials.go
-internal/provider/type_source_google_directory.go
-internal/provider/type_source_google_pagespeed_insights.go
-internal/provider/type_source_google_search_console_authentication_type_o_auth.go
-internal/provider/type_source_google_search_console_authentication_type_service_account_key_authentication.go
-internal/provider/type_source_google_search_console_authentication_type.go
-internal/provider/type_source_google_search_console_custom_report_config.go
-internal/provider/type_source_google_search_console.go
-internal/provider/type_source_google_sheets_authentication_authenticate_via_google_o_auth.go
-internal/provider/type_source_google_sheets_authentication_service_account_key_authentication.go
-internal/provider/type_source_google_sheets_authentication.go
-internal/provider/type_source_google_sheets.go
-internal/provider/type_source_google_webfonts.go
-internal/provider/type_source_google_workspace_admin_reports.go
-internal/provider/type_source_greenhouse.go
-internal/provider/type_source_gridly.go
-internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth.go
-internal/provider/type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token.go
-internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth.go
-internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token.go
-internal/provider/type_source_harvest_authentication_mechanism.go
-internal/provider/type_source_harvest.go
-internal/provider/type_source_hubplanner.go
-internal/provider/type_source_hubspot_authentication_o_auth.go
-internal/provider/type_source_hubspot_authentication_private_app.go
-internal/provider/type_source_hubspot_authentication.go
-internal/provider/type_source_hubspot.go
-internal/provider/type_source_insightly.go
-internal/provider/type_source_instagram.go
-internal/provider/type_source_instatus.go
-internal/provider/type_source_intercom.go
-internal/provider/type_source_ip2whois.go
-internal/provider/type_source_iterable.go
-internal/provider/type_source_jira.go
-internal/provider/type_source_k6_cloud.go
-internal/provider/type_source_klarna.go
-internal/provider/type_source_klaviyo.go
-internal/provider/type_source_kustomer_singer.go
-internal/provider/type_source_kyve.go
-internal/provider/type_source_launchdarkly.go
-internal/provider/type_source_lemlist.go
-internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key.go
-internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth.go
-internal/provider/type_source_lever_hiring_authentication_mechanism.go
-internal/provider/type_source_lever_hiring.go
-internal/provider/type_source_linkedin_ads_ad_analytics_report_configuration.go
-internal/provider/type_source_linkedin_ads_authentication_access_token.go
-internal/provider/type_source_linkedin_ads_authentication_o_auth20.go
-internal/provider/type_source_linkedin_ads_authentication.go
-internal/provider/type_source_linkedin_ads.go
-internal/provider/type_source_linkedin_pages_authentication.go
-internal/provider/type_source_linkedin_pages.go
-internal/provider/type_source_linnworks.go
-internal/provider/type_source_lokalise.go
-internal/provider/type_source_mailchimp_authentication_api_key.go
-internal/provider/type_source_mailchimp_authentication_o_auth20.go
-internal/provider/type_source_mailchimp_authentication.go
-internal/provider/type_source_mailchimp.go
-internal/provider/type_source_mailgun.go
-internal/provider/type_source_mailjet_sms.go
-internal/provider/type_source_marketo.go
-internal/provider/type_source_metabase.go
-internal/provider/type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft.go
-internal/provider/type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth20.go
-internal/provider/type_source_microsoft_teams_authentication_mechanism.go
-internal/provider/type_source_microsoft_teams.go
-internal/provider/type_source_mixpanel_authentication_wildcard_project_secret.go
-internal/provider/type_source_mixpanel_authentication_wildcard_service_account.go
-internal/provider/type_source_mixpanel_authentication_wildcard.go
-internal/provider/type_source_mixpanel.go
-internal/provider/type_source_monday_authorization_method_api_token.go
-internal/provider/type_source_monday_authorization_method_o_auth20.go
-internal/provider/type_source_monday_authorization_method.go
-internal/provider/type_source_monday.go
-internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas.go
-internal/provider/type_source_mongodb_mongo_db_instance_type_replica_set.go
-internal/provider/type_source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go
-internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas.go
-internal/provider/type_source_mongodb_mongo_db_instance_type.go
-internal/provider/type_source_mongodb.go
-internal/provider/type_source_mongodb_internal_poc.go
-internal/provider/type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go
-internal/provider/type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go
-internal/provider/type_source_mssql_update_method.go
-internal/provider/type_source_mssql_ssl_method.go
-internal/provider/type_source_mssql_ssh_tunnel_method.go
-internal/provider/type_source_mssql.go
-internal/provider/type_source_my_hours.go
-internal/provider/type_source_mysql_update_method_read_changes_using_binary_log_cdc.go
-internal/provider/type_source_mysql_update_method.go
-internal/provider/type_source_mysql_ssl_modes_preferred.go
-internal/provider/type_source_mysql_ssl_modes_required.go
-internal/provider/type_source_mysql_ssl_modes_verify_ca.go
-internal/provider/type_source_mysql_ssl_modes_verify_identity.go
-internal/provider/type_source_mysql_ssl_modes.go
-internal/provider/type_source_mysql_ssh_tunnel_method.go
-internal/provider/type_source_mysql.go
-internal/provider/type_source_netsuite.go
-internal/provider/type_source_notion_authenticate_using_access_token.go
-internal/provider/type_source_notion_authenticate_using_o_auth20.go
-internal/provider/type_source_notion_authenticate_using.go
-internal/provider/type_source_notion.go
-internal/provider/type_source_nytimes.go
-internal/provider/type_source_okta_authorization_method_o_auth20.go
-internal/provider/type_source_okta_authorization_method.go
-internal/provider/type_source_okta.go
-internal/provider/type_source_omnisend.go
-internal/provider/type_source_onesignal_applications.go
-internal/provider/type_source_onesignal.go
-internal/provider/type_source_oracle_connect_by_service_name.go
-internal/provider/type_source_oracle_connect_by_system_idsid.go
-internal/provider/type_source_oracle_connect_by.go
-internal/provider/type_source_oracle_encryption_native_network_encryption_nne.go
-internal/provider/type_source_oracle_encryption_tls_encrypted_verify_certificate.go
-internal/provider/type_source_oracle_encryption.go
-internal/provider/type_source_oracle_ssh_tunnel_method.go
-internal/provider/type_source_oracle.go
-internal/provider/type_source_orb.go
-internal/provider/type_source_orbit.go
-internal/provider/type_source_outbrain_amplify_authentication_method_access_token.go
-internal/provider/type_source_outbrain_amplify_authentication_method_username_password.go
-internal/provider/type_source_outbrain_amplify_authentication_method.go
-internal/provider/type_source_outbrain_amplify.go
-internal/provider/type_source_outreach.go
-internal/provider/type_source_paypal_transaction.go
-internal/provider/type_source_paystack.go
-internal/provider/type_source_pendo.go
-internal/provider/type_source_persistiq.go
-internal/provider/type_source_pexels_api.go
-internal/provider/type_source_pinterest_authorization_method_o_auth20.go
-internal/provider/type_source_pinterest_authorization_method.go
-internal/provider/type_source_pinterest.go
-internal/provider/type_source_pipedrive_api_key_authentication.go
-internal/provider/type_source_pipedrive.go
-internal/provider/type_source_pocket.go
-internal/provider/type_source_pokeapi.go
-internal/provider/type_source_polygon_stock_api.go
-internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go
-internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go
-internal/provider/type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go
-internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go
-internal/provider/type_source_postgres_update_method.go
-internal/provider/type_source_postgres_ssl_modes_allow.go
-internal/provider/type_source_postgres_ssl_modes_disable.go
-internal/provider/type_source_postgres_ssl_modes_prefer.go
-internal/provider/type_source_postgres_ssl_modes_require.go
-internal/provider/type_source_postgres_ssl_modes_verify_ca.go
-internal/provider/type_source_postgres_ssl_modes_verify_full.go
-internal/provider/type_source_postgres_update_ssl_modes_allow.go
-internal/provider/type_source_postgres_update_ssl_modes_disable.go
-internal/provider/type_source_postgres_update_ssl_modes_prefer.go
-internal/provider/type_source_postgres_update_ssl_modes_require.go
-internal/provider/type_source_postgres_update_ssl_modes_verify_ca.go
-internal/provider/type_source_postgres_update_ssl_modes_verify_full.go
-internal/provider/type_source_postgres_ssl_modes.go
-internal/provider/type_source_postgres_ssh_tunnel_method.go
-internal/provider/type_source_postgres.go
-internal/provider/type_source_posthog.go
-internal/provider/type_source_postmarkapp.go
-internal/provider/type_source_prestashop.go
-internal/provider/type_source_punk_api.go
-internal/provider/type_source_pypi.go
-internal/provider/type_source_qualaroo.go
-internal/provider/type_source_quickbooks_authorization_method_o_auth20.go
-internal/provider/type_source_quickbooks_authorization_method.go
-internal/provider/type_source_quickbooks.go
-internal/provider/type_source_railz.go
-internal/provider/type_source_recharge.go
-internal/provider/type_source_recreation.go
-internal/provider/type_source_recruitee.go
-internal/provider/type_source_recurly.go
-internal/provider/type_source_redshift.go
-internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth.go
-internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token.go
-internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth.go
-internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token.go
-internal/provider/type_source_retently_authentication_mechanism.go
-internal/provider/type_source_retently.go
-internal/provider/type_source_rki_covid.go
-internal/provider/type_source_rss.go
-internal/provider/type_source_s3_file_format_avro.go
-internal/provider/type_source_s3_file_format_csv.go
-internal/provider/type_source_s3_file_format_jsonl.go
-internal/provider/type_source_s3_file_format_parquet.go
-internal/provider/type_source_s3_file_format.go
-internal/provider/type_source_s3_s3_amazon_web_services.go
-internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go
-internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go
-internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go
-internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go
-internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go
-internal/provider/type_source_s3_file_based_stream_config_format_csv_format.go
-internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go
-internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go
-internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go
-internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go
-internal/provider/type_source_s3_file_based_stream_config_format.go
-internal/provider/type_source_s3_file_based_stream_config.go
-internal/provider/type_source_s3.go
-internal/provider/type_source_salesforce_streams_criteria.go
-internal/provider/type_source_salesforce.go
-internal/provider/type_source_salesloft_credentials_authenticate_via_api_key.go
-internal/provider/type_source_salesloft_credentials.go
-internal/provider/type_source_salesloft.go
-internal/provider/type_source_sap_fieldglass.go
-internal/provider/type_source_secoda.go
-internal/provider/type_source_sendgrid.go
-internal/provider/type_source_sendinblue.go
-internal/provider/type_source_senseforce.go
-internal/provider/type_source_sentry.go
-internal/provider/type_source_sftp_authentication_wildcard_password_authentication.go
-internal/provider/type_source_sftp_authentication_wildcard_ssh_key_authentication.go
-internal/provider/type_source_sftp_authentication_wildcard.go
-internal/provider/type_source_sftp.go
-internal/provider/type_source_sftp_bulk.go
-internal/provider/type_source_shopify_shopify_authorization_method_api_password.go
-internal/provider/type_source_shopify_shopify_authorization_method_o_auth20.go
-internal/provider/type_source_shopify_shopify_authorization_method.go
-internal/provider/type_source_shopify.go
-internal/provider/type_source_shortio.go
-internal/provider/type_source_slack_authentication_mechanism_api_token.go
-internal/provider/type_source_slack_authentication_mechanism_sign_in_via_slack_o_auth.go
-internal/provider/type_source_slack_authentication_mechanism.go
-internal/provider/type_source_slack.go
-internal/provider/type_source_smaily.go
-internal/provider/type_source_smartengage.go
-internal/provider/type_source_smartsheets_authorization_method.go
-internal/provider/type_source_smartsheets.go
-internal/provider/type_source_snapchat_marketing.go
-internal/provider/type_source_snowflake_authorization_method_o_auth20.go
-internal/provider/type_source_snowflake_authorization_method_username_and_password.go
-internal/provider/type_source_snowflake_authorization_method.go
-internal/provider/type_source_snowflake.go
-internal/provider/type_source_sonar_cloud.go
-internal/provider/type_source_spacex_api.go
-internal/provider/type_source_square_authentication_api_key.go
-internal/provider/type_source_square_authentication_oauth_authentication.go
-internal/provider/type_source_square_authentication.go
-internal/provider/type_source_square.go
-internal/provider/type_source_strava.go
-internal/provider/type_source_stripe.go
-internal/provider/type_source_surveymonkey_survey_monkey_authorization_method.go
-internal/provider/type_source_surveymonkey.go
-internal/provider/type_source_survey_sparrow_base_urleu_based_account.go
-internal/provider/type_source_survey_sparrow_base_url_global_account.go
-internal/provider/type_source_survey_sparrow_base_url.go
-internal/provider/type_source_survey_sparrow.go
-internal/provider/type_source_tempo.go
-internal/provider/type_source_the_guardian_api.go
-internal/provider/type_source_tiktok_marketing_authentication_method_o_auth20.go
-internal/provider/type_source_tiktok_marketing_authentication_method_sandbox_access_token.go
-internal/provider/type_source_tiktok_marketing_authentication_method.go
-internal/provider/type_source_tiktok_marketing.go
-internal/provider/type_source_todoist.go
-internal/provider/type_source_trello.go
-internal/provider/type_source_trustpilot_authorization_method_api_key.go
-internal/provider/type_source_trustpilot_authorization_method.go
-internal/provider/type_source_trustpilot.go
-internal/provider/type_source_tvmaze_schedule.go
-internal/provider/type_source_twilio.go
-internal/provider/type_source_twilio_taskrouter.go
-internal/provider/type_source_twitter.go
-internal/provider/type_source_typeform_authorization_method.go
-internal/provider/type_source_typeform.go
-internal/provider/type_source_us_census.go
-internal/provider/type_source_vantage.go
-internal/provider/type_source_webflow.go
-internal/provider/type_source_whisky_hunter.go
-internal/provider/type_source_wikipedia_pageviews.go
-internal/provider/type_source_woocommerce.go
-internal/provider/type_source_xero_authenticate_via_xero_o_auth.go
-internal/provider/type_source_xero.go
-internal/provider/type_source_xkcd.go
-internal/provider/type_source_yandex_metrica.go
-internal/provider/type_source_yotpo.go
-internal/provider/type_source_younium.go
-internal/provider/type_source_youtube_analytics_authenticate_via_o_auth20.go
-internal/provider/type_source_youtube_analytics.go
-internal/provider/type_source_zendesk_chat_authorization_method_access_token.go
-internal/provider/type_source_zendesk_chat_authorization_method_o_auth20.go
-internal/provider/type_source_zendesk_chat_authorization_method.go
-internal/provider/type_source_zendesk_chat.go
-internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go
-internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go
-internal/provider/type_source_zendesk_sunshine_authorization_method.go
-internal/provider/type_source_zendesk_sunshine.go
-internal/provider/type_source_zendesk_support_authentication_api_token.go
-internal/provider/type_source_zendesk_support_authentication_o_auth20.go
-internal/provider/type_source_zendesk_support_update_authentication_api_token.go
-internal/provider/type_source_zendesk_support_update_authentication_o_auth20.go
-internal/provider/type_source_zendesk_support_authentication.go
-internal/provider/type_source_zendesk_support.go
-internal/provider/type_source_zendesk_talk_authentication_api_token.go
-internal/provider/type_source_zendesk_talk_authentication_o_auth20.go
-internal/provider/type_source_zendesk_talk_update_authentication_api_token.go
-internal/provider/type_source_zendesk_talk_update_authentication_o_auth20.go
-internal/provider/type_source_zendesk_talk_authentication.go
-internal/provider/type_source_zendesk_talk.go
-internal/provider/type_source_zenloop.go
-internal/provider/type_source_zoho_crm.go
-internal/provider/type_source_zoom.go
-internal/provider/type_source_zuora.go
-internal/provider/type_destination_aws_datalake1.go
-internal/provider/type_destination_databricks_data_source_amazon_s31.go
-internal/provider/type_destination_databricks_update_data_source_amazon_s31.go
-internal/provider/type_destination_databricks_data_source2.go
-internal/provider/type_destination_databricks1.go
-internal/provider/type_destination_dynamodb1.go
-internal/provider/type_destination_redshift_uploading_method_s3_staging1.go
-internal/provider/type_destination_redshift_update_uploading_method_s3_staging1.go
-internal/provider/type_destination_redshift_uploading_method1.go
-internal/provider/type_destination_redshift1.go
-internal/provider/type_destination_s31.go
-internal/provider/type_destination_s3_glue1.go
-internal/provider/type_source_alloydb_replication_method_logical_replication_cdc1.go
-internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc1.go
-internal/provider/type_source_alloydb_replication_method1.go
-internal/provider/type_source_alloydb_ssl_modes_allow1.go
-internal/provider/type_source_alloydb_ssl_modes_disable1.go
-internal/provider/type_source_alloydb_ssl_modes_prefer1.go
-internal/provider/type_source_alloydb_ssl_modes_require1.go
-internal/provider/type_source_alloydb_ssl_modes_verify_ca1.go
-internal/provider/type_source_alloydb_ssl_modes_verify_full1.go
-internal/provider/type_source_alloydb_update_ssl_modes_allow1.go
-internal/provider/type_source_alloydb_update_ssl_modes_disable1.go
-internal/provider/type_source_alloydb_update_ssl_modes_prefer1.go
-internal/provider/type_source_alloydb_update_ssl_modes_require1.go
-internal/provider/type_source_alloydb_update_ssl_modes_verify_ca1.go
-internal/provider/type_source_alloydb_update_ssl_modes_verify_full1.go
-internal/provider/type_source_alloydb_ssl_modes1.go
-internal/provider/type_source_alloydb1.go
-internal/provider/type_source_dynamodb1.go
-internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth1.go
-internal/provider/type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token1.go
-internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth1.go
-internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token1.go
-internal/provider/type_source_harvest_authentication_mechanism1.go
-internal/provider/type_source_harvest1.go
-internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas1.go
-internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas1.go
-internal/provider/type_source_mongodb_mongo_db_instance_type1.go
-internal/provider/type_source_mongodb1.go
-internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go
-internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go
-internal/provider/type_source_postgres_update_method1.go
-internal/provider/type_source_postgres_ssl_modes_allow1.go
-internal/provider/type_source_postgres_ssl_modes_disable1.go
-internal/provider/type_source_postgres_ssl_modes_prefer1.go
-internal/provider/type_source_postgres_ssl_modes_require1.go
-internal/provider/type_source_postgres_ssl_modes_verify_ca1.go
-internal/provider/type_source_postgres_ssl_modes_verify_full1.go
-internal/provider/type_source_postgres_update_ssl_modes_allow1.go
-internal/provider/type_source_postgres_update_ssl_modes_disable1.go
-internal/provider/type_source_postgres_update_ssl_modes_prefer1.go
-internal/provider/type_source_postgres_update_ssl_modes_require1.go
-internal/provider/type_source_postgres_update_ssl_modes_verify_ca1.go
-internal/provider/type_source_postgres_update_ssl_modes_verify_full1.go
-internal/provider/type_source_postgres_ssl_modes1.go
-internal/provider/type_source_postgres1.go
-internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth1.go
-internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token1.go
-internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth1.go
-internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token1.go
-internal/provider/type_source_retently_authentication_mechanism1.go
-internal/provider/type_source_retently1.go
-internal/provider/type_source_youtube_analytics_authenticate_via_o_auth201.go
-internal/provider/type_source_youtube_analytics1.go
-internal/provider/type_source_zendesk_support_authentication_api_token1.go
-internal/provider/type_source_zendesk_support_authentication_o_auth201.go
-internal/provider/type_source_zendesk_support_update_authentication_api_token1.go
-internal/provider/type_source_zendesk_support_update_authentication_o_auth201.go
-internal/provider/type_source_zendesk_support_authentication1.go
-internal/provider/type_source_zendesk_support1.go
-internal/provider/type_source_zendesk_talk_authentication_api_token1.go
-internal/provider/type_source_zendesk_talk_authentication_o_auth201.go
-internal/provider/type_source_zendesk_talk_update_authentication_api_token1.go
-internal/provider/type_source_zendesk_talk_update_authentication_o_auth201.go
-internal/provider/type_source_zendesk_talk_authentication1.go
-internal/provider/type_source_zendesk_talk1.go
examples/README.md
go.mod
go.sum
@@ -714,12 +33,16 @@ internal/provider/reflect/primitive.go
internal/provider/reflect/slice.go
internal/provider/reflect/struct.go
internal/provider/utils.go
+internal/sdk/pkg/models/sdkerrors/sdkerror.go
internal/sdk/pkg/types/bigint.go
internal/sdk/pkg/types/date.go
internal/sdk/pkg/types/datetime.go
+internal/sdk/pkg/types/decimal.go
+internal/sdk/pkg/types/pointers.go
internal/sdk/pkg/utils/contenttype.go
internal/sdk/pkg/utils/form.go
internal/sdk/pkg/utils/headers.go
+internal/sdk/pkg/utils/json.go
internal/sdk/pkg/utils/pathparams.go
internal/sdk/pkg/utils/queryparams.go
internal/sdk/pkg/utils/requestbody.go
@@ -742,13 +65,13 @@ internal/sdk/pkg/models/operations/createdestination.go
internal/sdk/pkg/models/operations/createdestinationawsdatalake.go
internal/sdk/pkg/models/operations/createdestinationazureblobstorage.go
internal/sdk/pkg/models/operations/createdestinationbigquery.go
-internal/sdk/pkg/models/operations/createdestinationbigquerydenormalized.go
internal/sdk/pkg/models/operations/createdestinationclickhouse.go
internal/sdk/pkg/models/operations/createdestinationconvex.go
internal/sdk/pkg/models/operations/createdestinationcumulio.go
internal/sdk/pkg/models/operations/createdestinationdatabend.go
internal/sdk/pkg/models/operations/createdestinationdatabricks.go
internal/sdk/pkg/models/operations/createdestinationdevnull.go
+internal/sdk/pkg/models/operations/createdestinationduckdb.go
internal/sdk/pkg/models/operations/createdestinationdynamodb.go
internal/sdk/pkg/models/operations/createdestinationelasticsearch.go
internal/sdk/pkg/models/operations/createdestinationfirebolt.go
@@ -766,6 +89,7 @@ internal/sdk/pkg/models/operations/createdestinationoracle.go
internal/sdk/pkg/models/operations/createdestinationpinecone.go
internal/sdk/pkg/models/operations/createdestinationpostgres.go
internal/sdk/pkg/models/operations/createdestinationpubsub.go
+internal/sdk/pkg/models/operations/createdestinationqdrant.go
internal/sdk/pkg/models/operations/createdestinationredis.go
internal/sdk/pkg/models/operations/createdestinationredshift.go
internal/sdk/pkg/models/operations/createdestinations3.go
@@ -775,18 +99,19 @@ internal/sdk/pkg/models/operations/createdestinationsnowflake.go
internal/sdk/pkg/models/operations/createdestinationtimeplus.go
internal/sdk/pkg/models/operations/createdestinationtypesense.go
internal/sdk/pkg/models/operations/createdestinationvertica.go
+internal/sdk/pkg/models/operations/createdestinationweaviate.go
internal/sdk/pkg/models/operations/createdestinationxata.go
internal/sdk/pkg/models/operations/deletedestination.go
internal/sdk/pkg/models/operations/deletedestinationawsdatalake.go
internal/sdk/pkg/models/operations/deletedestinationazureblobstorage.go
internal/sdk/pkg/models/operations/deletedestinationbigquery.go
-internal/sdk/pkg/models/operations/deletedestinationbigquerydenormalized.go
internal/sdk/pkg/models/operations/deletedestinationclickhouse.go
internal/sdk/pkg/models/operations/deletedestinationconvex.go
internal/sdk/pkg/models/operations/deletedestinationcumulio.go
internal/sdk/pkg/models/operations/deletedestinationdatabend.go
internal/sdk/pkg/models/operations/deletedestinationdatabricks.go
internal/sdk/pkg/models/operations/deletedestinationdevnull.go
+internal/sdk/pkg/models/operations/deletedestinationduckdb.go
internal/sdk/pkg/models/operations/deletedestinationdynamodb.go
internal/sdk/pkg/models/operations/deletedestinationelasticsearch.go
internal/sdk/pkg/models/operations/deletedestinationfirebolt.go
@@ -804,6 +129,7 @@ internal/sdk/pkg/models/operations/deletedestinationoracle.go
internal/sdk/pkg/models/operations/deletedestinationpinecone.go
internal/sdk/pkg/models/operations/deletedestinationpostgres.go
internal/sdk/pkg/models/operations/deletedestinationpubsub.go
+internal/sdk/pkg/models/operations/deletedestinationqdrant.go
internal/sdk/pkg/models/operations/deletedestinationredis.go
internal/sdk/pkg/models/operations/deletedestinationredshift.go
internal/sdk/pkg/models/operations/deletedestinations3.go
@@ -813,18 +139,19 @@ internal/sdk/pkg/models/operations/deletedestinationsnowflake.go
internal/sdk/pkg/models/operations/deletedestinationtimeplus.go
internal/sdk/pkg/models/operations/deletedestinationtypesense.go
internal/sdk/pkg/models/operations/deletedestinationvertica.go
+internal/sdk/pkg/models/operations/deletedestinationweaviate.go
internal/sdk/pkg/models/operations/deletedestinationxata.go
internal/sdk/pkg/models/operations/getdestination.go
internal/sdk/pkg/models/operations/getdestinationawsdatalake.go
internal/sdk/pkg/models/operations/getdestinationazureblobstorage.go
internal/sdk/pkg/models/operations/getdestinationbigquery.go
-internal/sdk/pkg/models/operations/getdestinationbigquerydenormalized.go
internal/sdk/pkg/models/operations/getdestinationclickhouse.go
internal/sdk/pkg/models/operations/getdestinationconvex.go
internal/sdk/pkg/models/operations/getdestinationcumulio.go
internal/sdk/pkg/models/operations/getdestinationdatabend.go
internal/sdk/pkg/models/operations/getdestinationdatabricks.go
internal/sdk/pkg/models/operations/getdestinationdevnull.go
+internal/sdk/pkg/models/operations/getdestinationduckdb.go
internal/sdk/pkg/models/operations/getdestinationdynamodb.go
internal/sdk/pkg/models/operations/getdestinationelasticsearch.go
internal/sdk/pkg/models/operations/getdestinationfirebolt.go
@@ -842,6 +169,7 @@ internal/sdk/pkg/models/operations/getdestinationoracle.go
internal/sdk/pkg/models/operations/getdestinationpinecone.go
internal/sdk/pkg/models/operations/getdestinationpostgres.go
internal/sdk/pkg/models/operations/getdestinationpubsub.go
+internal/sdk/pkg/models/operations/getdestinationqdrant.go
internal/sdk/pkg/models/operations/getdestinationredis.go
internal/sdk/pkg/models/operations/getdestinationredshift.go
internal/sdk/pkg/models/operations/getdestinations3.go
@@ -851,6 +179,7 @@ internal/sdk/pkg/models/operations/getdestinationsnowflake.go
internal/sdk/pkg/models/operations/getdestinationtimeplus.go
internal/sdk/pkg/models/operations/getdestinationtypesense.go
internal/sdk/pkg/models/operations/getdestinationvertica.go
+internal/sdk/pkg/models/operations/getdestinationweaviate.go
internal/sdk/pkg/models/operations/getdestinationxata.go
internal/sdk/pkg/models/operations/listdestinations.go
internal/sdk/pkg/models/operations/patchdestination.go
@@ -858,13 +187,13 @@ internal/sdk/pkg/models/operations/putdestination.go
internal/sdk/pkg/models/operations/putdestinationawsdatalake.go
internal/sdk/pkg/models/operations/putdestinationazureblobstorage.go
internal/sdk/pkg/models/operations/putdestinationbigquery.go
-internal/sdk/pkg/models/operations/putdestinationbigquerydenormalized.go
internal/sdk/pkg/models/operations/putdestinationclickhouse.go
internal/sdk/pkg/models/operations/putdestinationconvex.go
internal/sdk/pkg/models/operations/putdestinationcumulio.go
internal/sdk/pkg/models/operations/putdestinationdatabend.go
internal/sdk/pkg/models/operations/putdestinationdatabricks.go
internal/sdk/pkg/models/operations/putdestinationdevnull.go
+internal/sdk/pkg/models/operations/putdestinationduckdb.go
internal/sdk/pkg/models/operations/putdestinationdynamodb.go
internal/sdk/pkg/models/operations/putdestinationelasticsearch.go
internal/sdk/pkg/models/operations/putdestinationfirebolt.go
@@ -882,6 +211,7 @@ internal/sdk/pkg/models/operations/putdestinationoracle.go
internal/sdk/pkg/models/operations/putdestinationpinecone.go
internal/sdk/pkg/models/operations/putdestinationpostgres.go
internal/sdk/pkg/models/operations/putdestinationpubsub.go
+internal/sdk/pkg/models/operations/putdestinationqdrant.go
internal/sdk/pkg/models/operations/putdestinationredis.go
internal/sdk/pkg/models/operations/putdestinationredshift.go
internal/sdk/pkg/models/operations/putdestinations3.go
@@ -891,6 +221,7 @@ internal/sdk/pkg/models/operations/putdestinationsnowflake.go
internal/sdk/pkg/models/operations/putdestinationtimeplus.go
internal/sdk/pkg/models/operations/putdestinationtypesense.go
internal/sdk/pkg/models/operations/putdestinationvertica.go
+internal/sdk/pkg/models/operations/putdestinationweaviate.go
internal/sdk/pkg/models/operations/putdestinationxata.go
internal/sdk/pkg/models/operations/canceljob.go
internal/sdk/pkg/models/operations/createjob.go
@@ -913,11 +244,11 @@ internal/sdk/pkg/models/operations/createsourceawscloudtrail.go
internal/sdk/pkg/models/operations/createsourceazureblobstorage.go
internal/sdk/pkg/models/operations/createsourceazuretable.go
internal/sdk/pkg/models/operations/createsourcebamboohr.go
-internal/sdk/pkg/models/operations/createsourcebigcommerce.go
internal/sdk/pkg/models/operations/createsourcebigquery.go
internal/sdk/pkg/models/operations/createsourcebingads.go
internal/sdk/pkg/models/operations/createsourcebraintree.go
internal/sdk/pkg/models/operations/createsourcebraze.go
+internal/sdk/pkg/models/operations/createsourcecart.go
internal/sdk/pkg/models/operations/createsourcechargebee.go
internal/sdk/pkg/models/operations/createsourcechartmogul.go
internal/sdk/pkg/models/operations/createsourceclickhouse.go
@@ -936,14 +267,13 @@ internal/sdk/pkg/models/operations/createsourcedixa.go
internal/sdk/pkg/models/operations/createsourcedockerhub.go
internal/sdk/pkg/models/operations/createsourcedremio.go
internal/sdk/pkg/models/operations/createsourcedynamodb.go
-internal/sdk/pkg/models/operations/createsourcee2etestcloud.go
internal/sdk/pkg/models/operations/createsourceemailoctopus.go
internal/sdk/pkg/models/operations/createsourceexchangerates.go
internal/sdk/pkg/models/operations/createsourcefacebookmarketing.go
internal/sdk/pkg/models/operations/createsourcefacebookpages.go
internal/sdk/pkg/models/operations/createsourcefaker.go
internal/sdk/pkg/models/operations/createsourcefauna.go
-internal/sdk/pkg/models/operations/createsourcefilesecure.go
+internal/sdk/pkg/models/operations/createsourcefile.go
internal/sdk/pkg/models/operations/createsourcefirebolt.go
internal/sdk/pkg/models/operations/createsourcefreshcaller.go
internal/sdk/pkg/models/operations/createsourcefreshdesk.go
@@ -957,8 +287,8 @@ internal/sdk/pkg/models/operations/createsourceglassfrog.go
internal/sdk/pkg/models/operations/createsourcegnews.go
internal/sdk/pkg/models/operations/createsourcegoogleads.go
internal/sdk/pkg/models/operations/createsourcegoogleanalyticsdataapi.go
-internal/sdk/pkg/models/operations/createsourcegoogleanalyticsv4.go
internal/sdk/pkg/models/operations/createsourcegoogledirectory.go
+internal/sdk/pkg/models/operations/createsourcegoogledrive.go
internal/sdk/pkg/models/operations/createsourcegooglepagespeedinsights.go
internal/sdk/pkg/models/operations/createsourcegooglesearchconsole.go
internal/sdk/pkg/models/operations/createsourcegooglesheets.go
@@ -996,8 +326,8 @@ internal/sdk/pkg/models/operations/createsourcemetabase.go
internal/sdk/pkg/models/operations/createsourcemicrosoftteams.go
internal/sdk/pkg/models/operations/createsourcemixpanel.go
internal/sdk/pkg/models/operations/createsourcemonday.go
-internal/sdk/pkg/models/operations/createsourcemongodb.go
internal/sdk/pkg/models/operations/createsourcemongodbinternalpoc.go
+internal/sdk/pkg/models/operations/createsourcemongodbv2.go
internal/sdk/pkg/models/operations/createsourcemssql.go
internal/sdk/pkg/models/operations/createsourcemyhours.go
internal/sdk/pkg/models/operations/createsourcemysql.go
@@ -1082,13 +412,12 @@ internal/sdk/pkg/models/operations/createsourcewebflow.go
internal/sdk/pkg/models/operations/createsourcewhiskyhunter.go
internal/sdk/pkg/models/operations/createsourcewikipediapageviews.go
internal/sdk/pkg/models/operations/createsourcewoocommerce.go
-internal/sdk/pkg/models/operations/createsourcexero.go
internal/sdk/pkg/models/operations/createsourcexkcd.go
internal/sdk/pkg/models/operations/createsourceyandexmetrica.go
internal/sdk/pkg/models/operations/createsourceyotpo.go
-internal/sdk/pkg/models/operations/createsourceyounium.go
internal/sdk/pkg/models/operations/createsourceyoutubeanalytics.go
internal/sdk/pkg/models/operations/createsourcezendeskchat.go
+internal/sdk/pkg/models/operations/createsourcezendesksell.go
internal/sdk/pkg/models/operations/createsourcezendesksunshine.go
internal/sdk/pkg/models/operations/createsourcezendesksupport.go
internal/sdk/pkg/models/operations/createsourcezendesktalk.go
@@ -1113,11 +442,11 @@ internal/sdk/pkg/models/operations/deletesourceawscloudtrail.go
internal/sdk/pkg/models/operations/deletesourceazureblobstorage.go
internal/sdk/pkg/models/operations/deletesourceazuretable.go
internal/sdk/pkg/models/operations/deletesourcebamboohr.go
-internal/sdk/pkg/models/operations/deletesourcebigcommerce.go
internal/sdk/pkg/models/operations/deletesourcebigquery.go
internal/sdk/pkg/models/operations/deletesourcebingads.go
internal/sdk/pkg/models/operations/deletesourcebraintree.go
internal/sdk/pkg/models/operations/deletesourcebraze.go
+internal/sdk/pkg/models/operations/deletesourcecart.go
internal/sdk/pkg/models/operations/deletesourcechargebee.go
internal/sdk/pkg/models/operations/deletesourcechartmogul.go
internal/sdk/pkg/models/operations/deletesourceclickhouse.go
@@ -1136,14 +465,13 @@ internal/sdk/pkg/models/operations/deletesourcedixa.go
internal/sdk/pkg/models/operations/deletesourcedockerhub.go
internal/sdk/pkg/models/operations/deletesourcedremio.go
internal/sdk/pkg/models/operations/deletesourcedynamodb.go
-internal/sdk/pkg/models/operations/deletesourcee2etestcloud.go
internal/sdk/pkg/models/operations/deletesourceemailoctopus.go
internal/sdk/pkg/models/operations/deletesourceexchangerates.go
internal/sdk/pkg/models/operations/deletesourcefacebookmarketing.go
internal/sdk/pkg/models/operations/deletesourcefacebookpages.go
internal/sdk/pkg/models/operations/deletesourcefaker.go
internal/sdk/pkg/models/operations/deletesourcefauna.go
-internal/sdk/pkg/models/operations/deletesourcefilesecure.go
+internal/sdk/pkg/models/operations/deletesourcefile.go
internal/sdk/pkg/models/operations/deletesourcefirebolt.go
internal/sdk/pkg/models/operations/deletesourcefreshcaller.go
internal/sdk/pkg/models/operations/deletesourcefreshdesk.go
@@ -1157,8 +485,8 @@ internal/sdk/pkg/models/operations/deletesourceglassfrog.go
internal/sdk/pkg/models/operations/deletesourcegnews.go
internal/sdk/pkg/models/operations/deletesourcegoogleads.go
internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsdataapi.go
-internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsv4.go
internal/sdk/pkg/models/operations/deletesourcegoogledirectory.go
+internal/sdk/pkg/models/operations/deletesourcegoogledrive.go
internal/sdk/pkg/models/operations/deletesourcegooglepagespeedinsights.go
internal/sdk/pkg/models/operations/deletesourcegooglesearchconsole.go
internal/sdk/pkg/models/operations/deletesourcegooglesheets.go
@@ -1196,8 +524,8 @@ internal/sdk/pkg/models/operations/deletesourcemetabase.go
internal/sdk/pkg/models/operations/deletesourcemicrosoftteams.go
internal/sdk/pkg/models/operations/deletesourcemixpanel.go
internal/sdk/pkg/models/operations/deletesourcemonday.go
-internal/sdk/pkg/models/operations/deletesourcemongodb.go
internal/sdk/pkg/models/operations/deletesourcemongodbinternalpoc.go
+internal/sdk/pkg/models/operations/deletesourcemongodbv2.go
internal/sdk/pkg/models/operations/deletesourcemssql.go
internal/sdk/pkg/models/operations/deletesourcemyhours.go
internal/sdk/pkg/models/operations/deletesourcemysql.go
@@ -1282,13 +610,12 @@ internal/sdk/pkg/models/operations/deletesourcewebflow.go
internal/sdk/pkg/models/operations/deletesourcewhiskyhunter.go
internal/sdk/pkg/models/operations/deletesourcewikipediapageviews.go
internal/sdk/pkg/models/operations/deletesourcewoocommerce.go
-internal/sdk/pkg/models/operations/deletesourcexero.go
internal/sdk/pkg/models/operations/deletesourcexkcd.go
internal/sdk/pkg/models/operations/deletesourceyandexmetrica.go
internal/sdk/pkg/models/operations/deletesourceyotpo.go
-internal/sdk/pkg/models/operations/deletesourceyounium.go
internal/sdk/pkg/models/operations/deletesourceyoutubeanalytics.go
internal/sdk/pkg/models/operations/deletesourcezendeskchat.go
+internal/sdk/pkg/models/operations/deletesourcezendesksell.go
internal/sdk/pkg/models/operations/deletesourcezendesksunshine.go
internal/sdk/pkg/models/operations/deletesourcezendesksupport.go
internal/sdk/pkg/models/operations/deletesourcezendesktalk.go
@@ -1313,11 +640,11 @@ internal/sdk/pkg/models/operations/getsourceawscloudtrail.go
internal/sdk/pkg/models/operations/getsourceazureblobstorage.go
internal/sdk/pkg/models/operations/getsourceazuretable.go
internal/sdk/pkg/models/operations/getsourcebamboohr.go
-internal/sdk/pkg/models/operations/getsourcebigcommerce.go
internal/sdk/pkg/models/operations/getsourcebigquery.go
internal/sdk/pkg/models/operations/getsourcebingads.go
internal/sdk/pkg/models/operations/getsourcebraintree.go
internal/sdk/pkg/models/operations/getsourcebraze.go
+internal/sdk/pkg/models/operations/getsourcecart.go
internal/sdk/pkg/models/operations/getsourcechargebee.go
internal/sdk/pkg/models/operations/getsourcechartmogul.go
internal/sdk/pkg/models/operations/getsourceclickhouse.go
@@ -1336,14 +663,13 @@ internal/sdk/pkg/models/operations/getsourcedixa.go
internal/sdk/pkg/models/operations/getsourcedockerhub.go
internal/sdk/pkg/models/operations/getsourcedremio.go
internal/sdk/pkg/models/operations/getsourcedynamodb.go
-internal/sdk/pkg/models/operations/getsourcee2etestcloud.go
internal/sdk/pkg/models/operations/getsourceemailoctopus.go
internal/sdk/pkg/models/operations/getsourceexchangerates.go
internal/sdk/pkg/models/operations/getsourcefacebookmarketing.go
internal/sdk/pkg/models/operations/getsourcefacebookpages.go
internal/sdk/pkg/models/operations/getsourcefaker.go
internal/sdk/pkg/models/operations/getsourcefauna.go
-internal/sdk/pkg/models/operations/getsourcefilesecure.go
+internal/sdk/pkg/models/operations/getsourcefile.go
internal/sdk/pkg/models/operations/getsourcefirebolt.go
internal/sdk/pkg/models/operations/getsourcefreshcaller.go
internal/sdk/pkg/models/operations/getsourcefreshdesk.go
@@ -1357,8 +683,8 @@ internal/sdk/pkg/models/operations/getsourceglassfrog.go
internal/sdk/pkg/models/operations/getsourcegnews.go
internal/sdk/pkg/models/operations/getsourcegoogleads.go
internal/sdk/pkg/models/operations/getsourcegoogleanalyticsdataapi.go
-internal/sdk/pkg/models/operations/getsourcegoogleanalyticsv4.go
internal/sdk/pkg/models/operations/getsourcegoogledirectory.go
+internal/sdk/pkg/models/operations/getsourcegoogledrive.go
internal/sdk/pkg/models/operations/getsourcegooglepagespeedinsights.go
internal/sdk/pkg/models/operations/getsourcegooglesearchconsole.go
internal/sdk/pkg/models/operations/getsourcegooglesheets.go
@@ -1396,8 +722,8 @@ internal/sdk/pkg/models/operations/getsourcemetabase.go
internal/sdk/pkg/models/operations/getsourcemicrosoftteams.go
internal/sdk/pkg/models/operations/getsourcemixpanel.go
internal/sdk/pkg/models/operations/getsourcemonday.go
-internal/sdk/pkg/models/operations/getsourcemongodb.go
internal/sdk/pkg/models/operations/getsourcemongodbinternalpoc.go
+internal/sdk/pkg/models/operations/getsourcemongodbv2.go
internal/sdk/pkg/models/operations/getsourcemssql.go
internal/sdk/pkg/models/operations/getsourcemyhours.go
internal/sdk/pkg/models/operations/getsourcemysql.go
@@ -1482,13 +808,12 @@ internal/sdk/pkg/models/operations/getsourcewebflow.go
internal/sdk/pkg/models/operations/getsourcewhiskyhunter.go
internal/sdk/pkg/models/operations/getsourcewikipediapageviews.go
internal/sdk/pkg/models/operations/getsourcewoocommerce.go
-internal/sdk/pkg/models/operations/getsourcexero.go
internal/sdk/pkg/models/operations/getsourcexkcd.go
internal/sdk/pkg/models/operations/getsourceyandexmetrica.go
internal/sdk/pkg/models/operations/getsourceyotpo.go
-internal/sdk/pkg/models/operations/getsourceyounium.go
internal/sdk/pkg/models/operations/getsourceyoutubeanalytics.go
internal/sdk/pkg/models/operations/getsourcezendeskchat.go
+internal/sdk/pkg/models/operations/getsourcezendesksell.go
internal/sdk/pkg/models/operations/getsourcezendesksunshine.go
internal/sdk/pkg/models/operations/getsourcezendesksupport.go
internal/sdk/pkg/models/operations/getsourcezendesktalk.go
@@ -1516,11 +841,11 @@ internal/sdk/pkg/models/operations/putsourceawscloudtrail.go
internal/sdk/pkg/models/operations/putsourceazureblobstorage.go
internal/sdk/pkg/models/operations/putsourceazuretable.go
internal/sdk/pkg/models/operations/putsourcebamboohr.go
-internal/sdk/pkg/models/operations/putsourcebigcommerce.go
internal/sdk/pkg/models/operations/putsourcebigquery.go
internal/sdk/pkg/models/operations/putsourcebingads.go
internal/sdk/pkg/models/operations/putsourcebraintree.go
internal/sdk/pkg/models/operations/putsourcebraze.go
+internal/sdk/pkg/models/operations/putsourcecart.go
internal/sdk/pkg/models/operations/putsourcechargebee.go
internal/sdk/pkg/models/operations/putsourcechartmogul.go
internal/sdk/pkg/models/operations/putsourceclickhouse.go
@@ -1539,14 +864,13 @@ internal/sdk/pkg/models/operations/putsourcedixa.go
internal/sdk/pkg/models/operations/putsourcedockerhub.go
internal/sdk/pkg/models/operations/putsourcedremio.go
internal/sdk/pkg/models/operations/putsourcedynamodb.go
-internal/sdk/pkg/models/operations/putsourcee2etestcloud.go
internal/sdk/pkg/models/operations/putsourceemailoctopus.go
internal/sdk/pkg/models/operations/putsourceexchangerates.go
internal/sdk/pkg/models/operations/putsourcefacebookmarketing.go
internal/sdk/pkg/models/operations/putsourcefacebookpages.go
internal/sdk/pkg/models/operations/putsourcefaker.go
internal/sdk/pkg/models/operations/putsourcefauna.go
-internal/sdk/pkg/models/operations/putsourcefilesecure.go
+internal/sdk/pkg/models/operations/putsourcefile.go
internal/sdk/pkg/models/operations/putsourcefirebolt.go
internal/sdk/pkg/models/operations/putsourcefreshcaller.go
internal/sdk/pkg/models/operations/putsourcefreshdesk.go
@@ -1560,8 +884,8 @@ internal/sdk/pkg/models/operations/putsourceglassfrog.go
internal/sdk/pkg/models/operations/putsourcegnews.go
internal/sdk/pkg/models/operations/putsourcegoogleads.go
internal/sdk/pkg/models/operations/putsourcegoogleanalyticsdataapi.go
-internal/sdk/pkg/models/operations/putsourcegoogleanalyticsv4.go
internal/sdk/pkg/models/operations/putsourcegoogledirectory.go
+internal/sdk/pkg/models/operations/putsourcegoogledrive.go
internal/sdk/pkg/models/operations/putsourcegooglepagespeedinsights.go
internal/sdk/pkg/models/operations/putsourcegooglesearchconsole.go
internal/sdk/pkg/models/operations/putsourcegooglesheets.go
@@ -1599,8 +923,8 @@ internal/sdk/pkg/models/operations/putsourcemetabase.go
internal/sdk/pkg/models/operations/putsourcemicrosoftteams.go
internal/sdk/pkg/models/operations/putsourcemixpanel.go
internal/sdk/pkg/models/operations/putsourcemonday.go
-internal/sdk/pkg/models/operations/putsourcemongodb.go
internal/sdk/pkg/models/operations/putsourcemongodbinternalpoc.go
+internal/sdk/pkg/models/operations/putsourcemongodbv2.go
internal/sdk/pkg/models/operations/putsourcemssql.go
internal/sdk/pkg/models/operations/putsourcemyhours.go
internal/sdk/pkg/models/operations/putsourcemysql.go
@@ -1685,13 +1009,12 @@ internal/sdk/pkg/models/operations/putsourcewebflow.go
internal/sdk/pkg/models/operations/putsourcewhiskyhunter.go
internal/sdk/pkg/models/operations/putsourcewikipediapageviews.go
internal/sdk/pkg/models/operations/putsourcewoocommerce.go
-internal/sdk/pkg/models/operations/putsourcexero.go
internal/sdk/pkg/models/operations/putsourcexkcd.go
internal/sdk/pkg/models/operations/putsourceyandexmetrica.go
internal/sdk/pkg/models/operations/putsourceyotpo.go
-internal/sdk/pkg/models/operations/putsourceyounium.go
internal/sdk/pkg/models/operations/putsourceyoutubeanalytics.go
internal/sdk/pkg/models/operations/putsourcezendeskchat.go
+internal/sdk/pkg/models/operations/putsourcezendesksell.go
internal/sdk/pkg/models/operations/putsourcezendesksunshine.go
internal/sdk/pkg/models/operations/putsourcezendesksupport.go
internal/sdk/pkg/models/operations/putsourcezendesktalk.go
@@ -1732,8 +1055,6 @@ internal/sdk/pkg/models/shared/destinationazureblobstoragecreaterequest.go
internal/sdk/pkg/models/shared/destinationazureblobstorage.go
internal/sdk/pkg/models/shared/destinationbigquerycreaterequest.go
internal/sdk/pkg/models/shared/destinationbigquery.go
-internal/sdk/pkg/models/shared/destinationbigquerydenormalizedcreaterequest.go
-internal/sdk/pkg/models/shared/destinationbigquerydenormalized.go
internal/sdk/pkg/models/shared/destinationclickhousecreaterequest.go
internal/sdk/pkg/models/shared/destinationclickhouse.go
internal/sdk/pkg/models/shared/destinationconvexcreaterequest.go
@@ -1746,6 +1067,8 @@ internal/sdk/pkg/models/shared/destinationdatabrickscreaterequest.go
internal/sdk/pkg/models/shared/destinationdatabricks.go
internal/sdk/pkg/models/shared/destinationdevnullcreaterequest.go
internal/sdk/pkg/models/shared/destinationdevnull.go
+internal/sdk/pkg/models/shared/destinationduckdbcreaterequest.go
+internal/sdk/pkg/models/shared/destinationduckdb.go
internal/sdk/pkg/models/shared/destinationdynamodbcreaterequest.go
internal/sdk/pkg/models/shared/destinationdynamodb.go
internal/sdk/pkg/models/shared/destinationelasticsearchcreaterequest.go
@@ -1780,6 +1103,8 @@ internal/sdk/pkg/models/shared/destinationpostgrescreaterequest.go
internal/sdk/pkg/models/shared/destinationpostgres.go
internal/sdk/pkg/models/shared/destinationpubsubcreaterequest.go
internal/sdk/pkg/models/shared/destinationpubsub.go
+internal/sdk/pkg/models/shared/destinationqdrantcreaterequest.go
+internal/sdk/pkg/models/shared/destinationqdrant.go
internal/sdk/pkg/models/shared/destinationrediscreaterequest.go
internal/sdk/pkg/models/shared/destinationredis.go
internal/sdk/pkg/models/shared/destinationredshiftcreaterequest.go
@@ -1798,6 +1123,8 @@ internal/sdk/pkg/models/shared/destinationtypesensecreaterequest.go
internal/sdk/pkg/models/shared/destinationtypesense.go
internal/sdk/pkg/models/shared/destinationverticacreaterequest.go
internal/sdk/pkg/models/shared/destinationvertica.go
+internal/sdk/pkg/models/shared/destinationweaviatecreaterequest.go
+internal/sdk/pkg/models/shared/destinationweaviate.go
internal/sdk/pkg/models/shared/destinationxatacreaterequest.go
internal/sdk/pkg/models/shared/destinationxata.go
internal/sdk/pkg/models/shared/destinationsresponse.go
@@ -1809,8 +1136,6 @@ internal/sdk/pkg/models/shared/destinationazureblobstorageputrequest.go
internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go
internal/sdk/pkg/models/shared/destinationbigqueryputrequest.go
internal/sdk/pkg/models/shared/destinationbigqueryupdate.go
-internal/sdk/pkg/models/shared/destinationbigquerydenormalizedputrequest.go
-internal/sdk/pkg/models/shared/destinationbigquerydenormalizedupdate.go
internal/sdk/pkg/models/shared/destinationclickhouseputrequest.go
internal/sdk/pkg/models/shared/destinationclickhouseupdate.go
internal/sdk/pkg/models/shared/destinationconvexputrequest.go
@@ -1823,6 +1148,8 @@ internal/sdk/pkg/models/shared/destinationdatabricksputrequest.go
internal/sdk/pkg/models/shared/destinationdatabricksupdate.go
internal/sdk/pkg/models/shared/destinationdevnullputrequest.go
internal/sdk/pkg/models/shared/destinationdevnullupdate.go
+internal/sdk/pkg/models/shared/destinationduckdbputrequest.go
+internal/sdk/pkg/models/shared/destinationduckdbupdate.go
internal/sdk/pkg/models/shared/destinationdynamodbputrequest.go
internal/sdk/pkg/models/shared/destinationdynamodbupdate.go
internal/sdk/pkg/models/shared/destinationelasticsearchputrequest.go
@@ -1857,6 +1184,8 @@ internal/sdk/pkg/models/shared/destinationpostgresputrequest.go
internal/sdk/pkg/models/shared/destinationpostgresupdate.go
internal/sdk/pkg/models/shared/destinationpubsubputrequest.go
internal/sdk/pkg/models/shared/destinationpubsubupdate.go
+internal/sdk/pkg/models/shared/destinationqdrantputrequest.go
+internal/sdk/pkg/models/shared/destinationqdrantupdate.go
internal/sdk/pkg/models/shared/destinationredisputrequest.go
internal/sdk/pkg/models/shared/destinationredisupdate.go
internal/sdk/pkg/models/shared/destinationredshiftputrequest.go
@@ -1875,6 +1204,8 @@ internal/sdk/pkg/models/shared/destinationtypesenseputrequest.go
internal/sdk/pkg/models/shared/destinationtypesenseupdate.go
internal/sdk/pkg/models/shared/destinationverticaputrequest.go
internal/sdk/pkg/models/shared/destinationverticaupdate.go
+internal/sdk/pkg/models/shared/destinationweaviateputrequest.go
+internal/sdk/pkg/models/shared/destinationweaviateupdate.go
internal/sdk/pkg/models/shared/destinationxataputrequest.go
internal/sdk/pkg/models/shared/destinationxataupdate.go
internal/sdk/pkg/models/shared/jobresponse.go
@@ -1916,8 +1247,6 @@ internal/sdk/pkg/models/shared/sourceazuretablecreaterequest.go
internal/sdk/pkg/models/shared/sourceazuretable.go
internal/sdk/pkg/models/shared/sourcebamboohrcreaterequest.go
internal/sdk/pkg/models/shared/sourcebamboohr.go
-internal/sdk/pkg/models/shared/sourcebigcommercecreaterequest.go
-internal/sdk/pkg/models/shared/sourcebigcommerce.go
internal/sdk/pkg/models/shared/sourcebigquerycreaterequest.go
internal/sdk/pkg/models/shared/sourcebigquery.go
internal/sdk/pkg/models/shared/sourcebingadscreaterequest.go
@@ -1926,6 +1255,8 @@ internal/sdk/pkg/models/shared/sourcebraintreecreaterequest.go
internal/sdk/pkg/models/shared/sourcebraintree.go
internal/sdk/pkg/models/shared/sourcebrazecreaterequest.go
internal/sdk/pkg/models/shared/sourcebraze.go
+internal/sdk/pkg/models/shared/sourcecartcreaterequest.go
+internal/sdk/pkg/models/shared/sourcecart.go
internal/sdk/pkg/models/shared/sourcechargebeecreaterequest.go
internal/sdk/pkg/models/shared/sourcechargebee.go
internal/sdk/pkg/models/shared/sourcechartmogulcreaterequest.go
@@ -1962,8 +1293,6 @@ internal/sdk/pkg/models/shared/sourcedremiocreaterequest.go
internal/sdk/pkg/models/shared/sourcedremio.go
internal/sdk/pkg/models/shared/sourcedynamodbcreaterequest.go
internal/sdk/pkg/models/shared/sourcedynamodb.go
-internal/sdk/pkg/models/shared/sourcee2etestcloudcreaterequest.go
-internal/sdk/pkg/models/shared/sourcee2etestcloud.go
internal/sdk/pkg/models/shared/sourceemailoctopuscreaterequest.go
internal/sdk/pkg/models/shared/sourceemailoctopus.go
internal/sdk/pkg/models/shared/sourceexchangeratescreaterequest.go
@@ -1976,8 +1305,8 @@ internal/sdk/pkg/models/shared/sourcefakercreaterequest.go
internal/sdk/pkg/models/shared/sourcefaker.go
internal/sdk/pkg/models/shared/sourcefaunacreaterequest.go
internal/sdk/pkg/models/shared/sourcefauna.go
-internal/sdk/pkg/models/shared/sourcefilesecurecreaterequest.go
-internal/sdk/pkg/models/shared/sourcefilesecure.go
+internal/sdk/pkg/models/shared/sourcefilecreaterequest.go
+internal/sdk/pkg/models/shared/sourcefile.go
internal/sdk/pkg/models/shared/sourcefireboltcreaterequest.go
internal/sdk/pkg/models/shared/sourcefirebolt.go
internal/sdk/pkg/models/shared/sourcefreshcallercreaterequest.go
@@ -2004,10 +1333,10 @@ internal/sdk/pkg/models/shared/sourcegoogleadscreaterequest.go
internal/sdk/pkg/models/shared/sourcegoogleads.go
internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapicreaterequest.go
internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go
-internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4createrequest.go
-internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4.go
internal/sdk/pkg/models/shared/sourcegoogledirectorycreaterequest.go
internal/sdk/pkg/models/shared/sourcegoogledirectory.go
+internal/sdk/pkg/models/shared/sourcegoogledrivecreaterequest.go
+internal/sdk/pkg/models/shared/sourcegoogledrive.go
internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightscreaterequest.go
internal/sdk/pkg/models/shared/sourcegooglepagespeedinsights.go
internal/sdk/pkg/models/shared/sourcegooglesearchconsolecreaterequest.go
@@ -2082,10 +1411,10 @@ internal/sdk/pkg/models/shared/sourcemixpanelcreaterequest.go
internal/sdk/pkg/models/shared/sourcemixpanel.go
internal/sdk/pkg/models/shared/sourcemondaycreaterequest.go
internal/sdk/pkg/models/shared/sourcemonday.go
-internal/sdk/pkg/models/shared/sourcemongodbcreaterequest.go
-internal/sdk/pkg/models/shared/sourcemongodb.go
internal/sdk/pkg/models/shared/sourcemongodbinternalpoccreaterequest.go
internal/sdk/pkg/models/shared/sourcemongodbinternalpoc.go
+internal/sdk/pkg/models/shared/sourcemongodbv2createrequest.go
+internal/sdk/pkg/models/shared/sourcemongodbv2.go
internal/sdk/pkg/models/shared/sourcemssqlcreaterequest.go
internal/sdk/pkg/models/shared/sourcemssql.go
internal/sdk/pkg/models/shared/sourcemyhourscreaterequest.go
@@ -2254,20 +1583,18 @@ internal/sdk/pkg/models/shared/sourcewikipediapageviewscreaterequest.go
internal/sdk/pkg/models/shared/sourcewikipediapageviews.go
internal/sdk/pkg/models/shared/sourcewoocommercecreaterequest.go
internal/sdk/pkg/models/shared/sourcewoocommerce.go
-internal/sdk/pkg/models/shared/sourcexerocreaterequest.go
-internal/sdk/pkg/models/shared/sourcexero.go
internal/sdk/pkg/models/shared/sourcexkcdcreaterequest.go
internal/sdk/pkg/models/shared/sourcexkcd.go
internal/sdk/pkg/models/shared/sourceyandexmetricacreaterequest.go
internal/sdk/pkg/models/shared/sourceyandexmetrica.go
internal/sdk/pkg/models/shared/sourceyotpocreaterequest.go
internal/sdk/pkg/models/shared/sourceyotpo.go
-internal/sdk/pkg/models/shared/sourceyouniumcreaterequest.go
-internal/sdk/pkg/models/shared/sourceyounium.go
internal/sdk/pkg/models/shared/sourceyoutubeanalyticscreaterequest.go
internal/sdk/pkg/models/shared/sourceyoutubeanalytics.go
internal/sdk/pkg/models/shared/sourcezendeskchatcreaterequest.go
internal/sdk/pkg/models/shared/sourcezendeskchat.go
+internal/sdk/pkg/models/shared/sourcezendesksellcreaterequest.go
+internal/sdk/pkg/models/shared/sourcezendesksell.go
internal/sdk/pkg/models/shared/sourcezendesksunshinecreaterequest.go
internal/sdk/pkg/models/shared/sourcezendesksunshine.go
internal/sdk/pkg/models/shared/sourcezendesksupportcreaterequest.go
@@ -2319,8 +1646,6 @@ internal/sdk/pkg/models/shared/sourceazuretableputrequest.go
internal/sdk/pkg/models/shared/sourceazuretableupdate.go
internal/sdk/pkg/models/shared/sourcebamboohrputrequest.go
internal/sdk/pkg/models/shared/sourcebamboohrupdate.go
-internal/sdk/pkg/models/shared/sourcebigcommerceputrequest.go
-internal/sdk/pkg/models/shared/sourcebigcommerceupdate.go
internal/sdk/pkg/models/shared/sourcebigqueryputrequest.go
internal/sdk/pkg/models/shared/sourcebigqueryupdate.go
internal/sdk/pkg/models/shared/sourcebingadsputrequest.go
@@ -2329,6 +1654,8 @@ internal/sdk/pkg/models/shared/sourcebraintreeputrequest.go
internal/sdk/pkg/models/shared/sourcebraintreeupdate.go
internal/sdk/pkg/models/shared/sourcebrazeputrequest.go
internal/sdk/pkg/models/shared/sourcebrazeupdate.go
+internal/sdk/pkg/models/shared/sourcecartputrequest.go
+internal/sdk/pkg/models/shared/sourcecartupdate.go
internal/sdk/pkg/models/shared/sourcechargebeeputrequest.go
internal/sdk/pkg/models/shared/sourcechargebeeupdate.go
internal/sdk/pkg/models/shared/sourcechartmogulputrequest.go
@@ -2365,8 +1692,6 @@ internal/sdk/pkg/models/shared/sourcedremioputrequest.go
internal/sdk/pkg/models/shared/sourcedremioupdate.go
internal/sdk/pkg/models/shared/sourcedynamodbputrequest.go
internal/sdk/pkg/models/shared/sourcedynamodbupdate.go
-internal/sdk/pkg/models/shared/sourcee2etestcloudputrequest.go
-internal/sdk/pkg/models/shared/sourcee2etestcloudupdate.go
internal/sdk/pkg/models/shared/sourceemailoctopusputrequest.go
internal/sdk/pkg/models/shared/sourceemailoctopusupdate.go
internal/sdk/pkg/models/shared/sourceexchangeratesputrequest.go
@@ -2379,8 +1704,8 @@ internal/sdk/pkg/models/shared/sourcefakerputrequest.go
internal/sdk/pkg/models/shared/sourcefakerupdate.go
internal/sdk/pkg/models/shared/sourcefaunaputrequest.go
internal/sdk/pkg/models/shared/sourcefaunaupdate.go
-internal/sdk/pkg/models/shared/sourcefilesecureputrequest.go
-internal/sdk/pkg/models/shared/sourcefilesecureupdate.go
+internal/sdk/pkg/models/shared/sourcefileputrequest.go
+internal/sdk/pkg/models/shared/sourcefileupdate.go
internal/sdk/pkg/models/shared/sourcefireboltputrequest.go
internal/sdk/pkg/models/shared/sourcefireboltupdate.go
internal/sdk/pkg/models/shared/sourcefreshcallerputrequest.go
@@ -2407,10 +1732,10 @@ internal/sdk/pkg/models/shared/sourcegoogleadsputrequest.go
internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go
internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiputrequest.go
internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go
-internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4putrequest.go
-internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4update.go
internal/sdk/pkg/models/shared/sourcegoogledirectoryputrequest.go
internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go
+internal/sdk/pkg/models/shared/sourcegoogledriveputrequest.go
+internal/sdk/pkg/models/shared/sourcegoogledriveupdate.go
internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsputrequest.go
internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsupdate.go
internal/sdk/pkg/models/shared/sourcegooglesearchconsoleputrequest.go
@@ -2485,10 +1810,10 @@ internal/sdk/pkg/models/shared/sourcemixpanelputrequest.go
internal/sdk/pkg/models/shared/sourcemixpanelupdate.go
internal/sdk/pkg/models/shared/sourcemondayputrequest.go
internal/sdk/pkg/models/shared/sourcemondayupdate.go
-internal/sdk/pkg/models/shared/sourcemongodbputrequest.go
-internal/sdk/pkg/models/shared/sourcemongodbupdate.go
internal/sdk/pkg/models/shared/sourcemongodbinternalpocputrequest.go
internal/sdk/pkg/models/shared/sourcemongodbinternalpocupdate.go
+internal/sdk/pkg/models/shared/sourcemongodbv2putrequest.go
+internal/sdk/pkg/models/shared/sourcemongodbv2update.go
internal/sdk/pkg/models/shared/sourcemssqlputrequest.go
internal/sdk/pkg/models/shared/sourcemssqlupdate.go
internal/sdk/pkg/models/shared/sourcemyhoursputrequest.go
@@ -2657,20 +1982,18 @@ internal/sdk/pkg/models/shared/sourcewikipediapageviewsputrequest.go
internal/sdk/pkg/models/shared/sourcewikipediapageviewsupdate.go
internal/sdk/pkg/models/shared/sourcewoocommerceputrequest.go
internal/sdk/pkg/models/shared/sourcewoocommerceupdate.go
-internal/sdk/pkg/models/shared/sourcexeroputrequest.go
-internal/sdk/pkg/models/shared/sourcexeroupdate.go
internal/sdk/pkg/models/shared/sourcexkcdputrequest.go
internal/sdk/pkg/models/shared/sourcexkcdupdate.go
internal/sdk/pkg/models/shared/sourceyandexmetricaputrequest.go
internal/sdk/pkg/models/shared/sourceyandexmetricaupdate.go
internal/sdk/pkg/models/shared/sourceyotpoputrequest.go
internal/sdk/pkg/models/shared/sourceyotpoupdate.go
-internal/sdk/pkg/models/shared/sourceyouniumputrequest.go
-internal/sdk/pkg/models/shared/sourceyouniumupdate.go
internal/sdk/pkg/models/shared/sourceyoutubeanalyticsputrequest.go
internal/sdk/pkg/models/shared/sourceyoutubeanalyticsupdate.go
internal/sdk/pkg/models/shared/sourcezendeskchatputrequest.go
internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go
+internal/sdk/pkg/models/shared/sourcezendesksellputrequest.go
+internal/sdk/pkg/models/shared/sourcezendesksellupdate.go
internal/sdk/pkg/models/shared/sourcezendesksunshineputrequest.go
internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go
internal/sdk/pkg/models/shared/sourcezendesksupportputrequest.go
@@ -2695,6 +2018,461 @@ internal/sdk/pkg/models/shared/workspacecreaterequest.go
internal/sdk/pkg/models/shared/workspacesresponse.go
internal/sdk/pkg/models/shared/workspaceupdaterequest.go
internal/sdk/pkg/models/shared/security.go
+internal/sdk/pkg/models/shared/schemebasicauth.go
+internal/provider/type_stream_configuration.go
+internal/provider/type_stream_configurations.go
+internal/provider/type_connection_schedule.go
+internal/provider/type_iam_role.go
+internal/provider/type_iam_user.go
+internal/provider/type_destination_aws_datalake_authentication_mode.go
+internal/provider/type_json_lines_newline_delimited_json.go
+internal/provider/type_parquet_columnar_storage.go
+internal/provider/type_destination_aws_datalake_output_format_wildcard.go
+internal/provider/type_destination_aws_datalake.go
+internal/provider/type_csv_comma_separated_values.go
+internal/provider/type_destination_azure_blob_storage_json_lines_newline_delimited_json.go
+internal/provider/type_destination_azure_blob_storage_output_format.go
+internal/provider/type_destination_azure_blob_storage.go
+internal/provider/type_destination_bigquery_hmac_key.go
+internal/provider/type_destination_bigquery_credential.go
+internal/provider/type_gcs_staging.go
+internal/provider/type_destination_bigquery_loading_method.go
+internal/provider/type_destination_bigquery.go
+internal/provider/type_password_authentication.go
+internal/provider/type_ssh_key_authentication.go
+internal/provider/type_destination_clickhouse_ssh_tunnel_method.go
+internal/provider/type_destination_clickhouse.go
+internal/provider/type_destination_convex.go
+internal/provider/type_destination_cumulio.go
+internal/provider/type_destination_databend.go
+internal/provider/type_amazon_s3.go
+internal/provider/type_destination_databricks_azure_blob_storage.go
+internal/provider/type_destination_databricks_data_source1.go
+internal/provider/type_destination_databricks.go
+internal/provider/type_destination_dev_null_test_destination.go
+internal/provider/type_destination_dev_null.go
+internal/provider/type_destination_duckdb.go
+internal/provider/type_destination_dynamodb.go
+internal/provider/type_api_key_secret.go
+internal/provider/type_username_password.go
+internal/provider/type_destination_elasticsearch_authentication_method.go
+internal/provider/type_destination_elasticsearch.go
+internal/provider/type_external_table_via_s3.go
+internal/provider/type_destination_firebolt_loading_method.go
+internal/provider/type_destination_firebolt.go
+internal/provider/type_destination_firestore.go
+internal/provider/type_hmac_key.go
+internal/provider/type_destination_gcs_authentication.go
+internal/provider/type_bzip2.go
+internal/provider/type_deflate.go
+internal/provider/type_no_compression.go
+internal/provider/type_snappy.go
+internal/provider/type_xz.go
+internal/provider/type_zstandard.go
+internal/provider/type_destination_gcs_compression_codec.go
+internal/provider/type_avro_apache_avro.go
+internal/provider/type_gzip.go
+internal/provider/type_destination_gcs_update_no_compression.go
+internal/provider/type_destination_gcs_compression.go
+internal/provider/type_destination_gcs_csv_comma_separated_values.go
+internal/provider/type_destination_gcs_json_lines_newline_delimited_json.go
+internal/provider/type_destination_gcs_parquet_columnar_storage.go
+internal/provider/type_destination_gcs_output_format.go
+internal/provider/type_destination_gcs.go
+internal/provider/type_destination_google_sheets_authentication_via_google_o_auth.go
+internal/provider/type_destination_google_sheets.go
+internal/provider/type_destination_keen.go
+internal/provider/type_destination_kinesis.go
+internal/provider/type_open_ai.go
+internal/provider/type_destination_langchain_embedding.go
+internal/provider/type_chroma_local_persistance.go
+internal/provider/type_doc_array_hnsw_search.go
+internal/provider/type_destination_langchain_pinecone.go
+internal/provider/type_destination_langchain_indexing.go
+internal/provider/type_destination_langchain_processing_config_model.go
+internal/provider/type_destination_langchain.go
+internal/provider/type_azure_open_ai.go
+internal/provider/type_cohere.go
+internal/provider/type_from_field.go
+internal/provider/type_open_ai_compatible.go
+internal/provider/type_destination_milvus_embedding.go
+internal/provider/type_destination_milvus_api_token.go
+internal/provider/type_destination_milvus_authentication.go
+internal/provider/type_destination_milvus_indexing.go
+internal/provider/type_field_name_mapping_config_model.go
+internal/provider/type_by_markdown_header.go
+internal/provider/type_by_programming_language.go
+internal/provider/type_by_separator.go
+internal/provider/type_destination_milvus_text_splitter.go
+internal/provider/type_destination_milvus_processing_config_model.go
+internal/provider/type_destination_milvus.go
+internal/provider/type_destination_mongodb_authorization_type.go
+internal/provider/type_mongo_db_atlas.go
+internal/provider/type_replica_set.go
+internal/provider/type_standalone_mongo_db_instance.go
+internal/provider/type_destination_mongodb_mongo_db_instance_type.go
+internal/provider/type_destination_mongodb.go
+internal/provider/type_encrypted_verify_certificate.go
+internal/provider/type_destination_mssql_ssl_method.go
+internal/provider/type_destination_mssql.go
+internal/provider/type_destination_oracle.go
+internal/provider/type_destination_pinecone_embedding.go
+internal/provider/type_destination_pinecone.go
+internal/provider/type_verify_ca.go
+internal/provider/type_verify_full.go
+internal/provider/type_destination_postgres_ssl_modes.go
+internal/provider/type_destination_postgres.go
+internal/provider/type_destination_pubsub.go
+internal/provider/type_api_key_auth.go
+internal/provider/type_destination_qdrant_authentication_method.go
+internal/provider/type_destination_qdrant_distance_metric.go
+internal/provider/type_destination_qdrant_indexing.go
+internal/provider/type_destination_qdrant.go
+internal/provider/type_destination_redis_ssl_modes.go
+internal/provider/type_destination_redis.go
+internal/provider/type_aescbc_envelope_encryption.go
+internal/provider/type_destination_redshift_encryption.go
+internal/provider/type_s3_staging.go
+internal/provider/type_destination_redshift_uploading_method.go
+internal/provider/type_destination_redshift.go
+internal/provider/type_destination_s3_json_lines_newline_delimited_json.go
+internal/provider/type_destination_s3_output_format.go
+internal/provider/type_destination_s3.go
+internal/provider/type_destination_s3_glue_output_format.go
+internal/provider/type_destination_s3_glue.go
+internal/provider/type_destination_sftp_json.go
+internal/provider/type_key_pair_authentication.go
+internal/provider/type_o_auth20.go
+internal/provider/type_username_and_password.go
+internal/provider/type_destination_snowflake_authorization_method.go
+internal/provider/type_destination_snowflake.go
+internal/provider/type_destination_timeplus.go
+internal/provider/type_destination_typesense.go
+internal/provider/type_destination_vertica.go
+internal/provider/type_destination_weaviate_embedding.go
+internal/provider/type_header.go
+internal/provider/type_destination_weaviate_authentication.go
+internal/provider/type_destination_weaviate_indexing.go
+internal/provider/type_destination_weaviate.go
+internal/provider/type_destination_xata.go
+internal/provider/type_source_aha.go
+internal/provider/type_source_aircall.go
+internal/provider/type_source_airtable_o_auth20.go
+internal/provider/type_source_airtable_authentication.go
+internal/provider/type_source_airtable.go
+internal/provider/type_logical_replication_cdc.go
+internal/provider/type_source_alloydb_replication_method.go
+internal/provider/type_source_alloydb_allow.go
+internal/provider/type_source_alloydb_verify_ca.go
+internal/provider/type_source_alloydb_ssl_modes.go
+internal/provider/type_source_alloydb.go
+internal/provider/type_source_amazon_ads.go
+internal/provider/type_source_amazon_seller_partner.go
+internal/provider/type_source_amazon_sqs.go
+internal/provider/type_source_amplitude.go
+internal/provider/type_source_apify_dataset.go
+internal/provider/type_source_appfollow.go
+internal/provider/type_authenticate_with_personal_access_token.go
+internal/provider/type_source_asana_authentication_mechanism.go
+internal/provider/type_source_asana.go
+internal/provider/type_o_auth2_access_token.go
+internal/provider/type_o_auth2_confidential_application.go
+internal/provider/type_source_auth0_authentication_method.go
+internal/provider/type_source_auth0.go
+internal/provider/type_source_aws_cloudtrail.go
+internal/provider/type_avro_format.go
+internal/provider/type_user_provided.go
+internal/provider/type_source_azure_blob_storage_csv_header_definition.go
+internal/provider/type_csv_format.go
+internal/provider/type_document_file_type_format_experimental.go
+internal/provider/type_parquet_format.go
+internal/provider/type_source_azure_blob_storage_format.go
+internal/provider/type_file_based_stream_config.go
+internal/provider/type_source_azure_blob_storage.go
+internal/provider/type_source_azure_table.go
+internal/provider/type_source_bamboo_hr.go
+internal/provider/type_source_bigquery.go
+internal/provider/type_custom_report_config.go
+internal/provider/type_source_bing_ads.go
+internal/provider/type_source_braintree.go
+internal/provider/type_source_braze.go
+internal/provider/type_central_api_router.go
+internal/provider/type_single_store_access_token.go
+internal/provider/type_source_cart_authorization_method.go
+internal/provider/type_source_cart.go
+internal/provider/type_source_chargebee.go
+internal/provider/type_source_chartmogul.go
+internal/provider/type_source_clickhouse.go
+internal/provider/type_source_clickup_api.go
+internal/provider/type_source_clockify.go
+internal/provider/type_source_close_com.go
+internal/provider/type_source_coda.go
+internal/provider/type_source_coin_api.go
+internal/provider/type_source_coinmarketcap.go
+internal/provider/type_source_confluence.go
+internal/provider/type_source_datascope.go
+internal/provider/type_source_delighted.go
+internal/provider/type_source_dixa.go
+internal/provider/type_source_dockerhub.go
+internal/provider/type_source_dremio.go
+internal/provider/type_source_dynamodb.go
+internal/provider/type_source_exchange_rates.go
+internal/provider/type_insight_config.go
+internal/provider/type_source_facebook_marketing.go
+internal/provider/type_source_facebook_pages.go
+internal/provider/type_source_faker.go
+internal/provider/type_enabled.go
+internal/provider/type_source_fauna_deletion_mode.go
+internal/provider/type_source_fauna_collection.go
+internal/provider/type_source_fauna.go
+internal/provider/type_az_blob_azure_blob_storage.go
+internal/provider/type_gcs_google_cloud_storage.go
+internal/provider/type_https_public_web.go
+internal/provider/type_source_file_s3_amazon_web_services.go
+internal/provider/type_scp_secure_copy_protocol.go
+internal/provider/type_source_file_storage_provider.go
+internal/provider/type_source_file.go
+internal/provider/type_source_firebolt.go
+internal/provider/type_source_freshcaller.go
+internal/provider/type_source_freshdesk.go
+internal/provider/type_source_freshsales.go
+internal/provider/type_source_gcs_format.go
+internal/provider/type_source_gcs_stream_config.go
+internal/provider/type_source_gcs.go
+internal/provider/type_source_getlago.go
+internal/provider/type_o_auth.go
+internal/provider/type_source_github_authentication.go
+internal/provider/type_source_github.go
+internal/provider/type_source_gitlab_o_auth20.go
+internal/provider/type_source_gitlab_authorization_method.go
+internal/provider/type_source_gitlab.go
+internal/provider/type_source_gnews.go
+internal/provider/type_source_google_ads_google_credentials.go
+internal/provider/type_custom_queries.go
+internal/provider/type_source_google_ads.go
+internal/provider/type_authenticate_via_google_oauth.go
+internal/provider/type_service_account_key_authentication.go
+internal/provider/type_source_google_analytics_data_api_credentials.go
+internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_double_value.go
+internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_int64_value.go
+internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_from_value.go
+internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_between_filter.go
+internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_in_list_filter.go
+internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_numeric_filter.go
+internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_string_filter.go
+internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_filter.go
+internal/provider/type_expression.go
+internal/provider/type_and_group.go
+internal/provider/type_not_expression.go
+internal/provider/type_source_google_analytics_data_api_dimensions_filter.go
+internal/provider/type_source_google_analytics_data_api_custom_report_config.go
+internal/provider/type_source_google_analytics_data_api.go
+internal/provider/type_service_account_key.go
+internal/provider/type_source_google_directory_google_credentials.go
+internal/provider/type_source_google_directory.go
+internal/provider/type_source_google_drive_service_account_key_authentication.go
+internal/provider/type_source_google_drive_authentication.go
+internal/provider/type_source_google_drive_csv_format.go
+internal/provider/type_source_google_drive_format.go
+internal/provider/type_source_google_drive_file_based_stream_config.go
+internal/provider/type_source_google_drive.go
+internal/provider/type_source_google_pagespeed_insights.go
+internal/provider/type_source_google_search_console_service_account_key_authentication.go
+internal/provider/type_source_google_search_console_authentication_type.go
+internal/provider/type_source_google_search_console_custom_report_config.go
+internal/provider/type_source_google_search_console.go
+internal/provider/type_source_google_sheets.go
+internal/provider/type_source_google_webfonts.go
+internal/provider/type_source_google_workspace_admin_reports.go
+internal/provider/type_source_gridly.go
+internal/provider/type_authenticate_via_harvest_o_auth.go
+internal/provider/type_source_harvest_authenticate_with_personal_access_token.go
+internal/provider/type_source_harvest_authentication_mechanism.go
+internal/provider/type_source_harvest.go
+internal/provider/type_source_hubspot_authentication.go
+internal/provider/type_source_hubspot.go
+internal/provider/type_source_insightly.go
+internal/provider/type_source_instagram.go
+internal/provider/type_source_intercom.go
+internal/provider/type_source_ip2whois.go
+internal/provider/type_source_jira.go
+internal/provider/type_source_k6_cloud.go
+internal/provider/type_source_klarna.go
+internal/provider/type_source_klaviyo.go
+internal/provider/type_source_kustomer_singer.go
+internal/provider/type_source_kyve.go
+internal/provider/type_authenticate_via_lever_o_auth.go
+internal/provider/type_source_lever_hiring_authentication_mechanism.go
+internal/provider/type_source_lever_hiring.go
+internal/provider/type_ad_analytics_report_configuration.go
+internal/provider/type_source_linkedin_ads_authentication.go
+internal/provider/type_source_linkedin_ads.go
+internal/provider/type_source_linkedin_pages.go
+internal/provider/type_source_linnworks.go
+internal/provider/type_source_lokalise.go
+internal/provider/type_api_key.go
+internal/provider/type_source_mailchimp_authentication.go
+internal/provider/type_source_mailchimp.go
+internal/provider/type_source_mailgun.go
+internal/provider/type_source_mailjet_sms.go
+internal/provider/type_source_marketo.go
+internal/provider/type_source_metabase.go
+internal/provider/type_authenticate_via_microsoft.go
+internal/provider/type_authenticate_via_microsoft_o_auth20.go
+internal/provider/type_source_microsoft_teams_authentication_mechanism.go
+internal/provider/type_source_microsoft_teams.go
+internal/provider/type_project_secret.go
+internal/provider/type_service_account.go
+internal/provider/type_source_mixpanel_authentication_wildcard.go
+internal/provider/type_source_mixpanel.go
+internal/provider/type_source_monday_o_auth20.go
+internal/provider/type_source_monday_authorization_method.go
+internal/provider/type_source_monday.go
+internal/provider/type_source_mongodb_internal_poc.go
+internal/provider/type_mongo_db_atlas_replica_set.go
+internal/provider/type_self_managed_replica_set.go
+internal/provider/type_source_mongodb_v2_cluster_type.go
+internal/provider/type_source_mongodb_v2.go
+internal/provider/type_read_changes_using_change_data_capture_cdc.go
+internal/provider/type_source_mssql_update_method.go
+internal/provider/type_source_mssql.go
+internal/provider/type_source_my_hours.go
+internal/provider/type_read_changes_using_binary_log_cdc.go
+internal/provider/type_source_mysql_update_method.go
+internal/provider/type_source_mysql_verify_ca.go
+internal/provider/type_source_mysql_ssl_modes.go
+internal/provider/type_source_mysql.go
+internal/provider/type_source_netsuite.go
+internal/provider/type_source_notion_o_auth20.go
+internal/provider/type_source_notion_authentication_method.go
+internal/provider/type_source_notion.go
+internal/provider/type_source_nytimes.go
+internal/provider/type_source_okta_authorization_method.go
+internal/provider/type_source_okta.go
+internal/provider/type_applications.go
+internal/provider/type_source_onesignal.go
+internal/provider/type_service_name.go
+internal/provider/type_system_idsid.go
+internal/provider/type_source_oracle_connect_by.go
+internal/provider/type_native_network_encryption_nne.go
+internal/provider/type_tls_encrypted_verify_certificate.go
+internal/provider/type_source_oracle_encryption.go
+internal/provider/type_source_oracle.go
+internal/provider/type_source_orb.go
+internal/provider/type_source_orbit.go
+internal/provider/type_source_outbrain_amplify_authentication_method.go
+internal/provider/type_source_outbrain_amplify.go
+internal/provider/type_source_outreach.go
+internal/provider/type_source_paypal_transaction.go
+internal/provider/type_source_paystack.go
+internal/provider/type_source_pexels_api.go
+internal/provider/type_source_pinterest_authorization_method.go
+internal/provider/type_report_config.go
+internal/provider/type_source_pinterest.go
+internal/provider/type_source_pipedrive.go
+internal/provider/type_source_pocket.go
+internal/provider/type_source_pokeapi.go
+internal/provider/type_source_polygon_stock_api.go
+internal/provider/type_source_postgres_update_method.go
+internal/provider/type_source_postgres.go
+internal/provider/type_source_posthog.go
+internal/provider/type_source_postmarkapp.go
+internal/provider/type_source_prestashop.go
+internal/provider/type_source_punk_api.go
+internal/provider/type_source_pypi.go
+internal/provider/type_source_qualaroo.go
+internal/provider/type_source_quickbooks_o_auth20.go
+internal/provider/type_source_quickbooks_authorization_method.go
+internal/provider/type_source_quickbooks.go
+internal/provider/type_source_railz.go
+internal/provider/type_source_recharge.go
+internal/provider/type_source_recreation.go
+internal/provider/type_source_recruitee.go
+internal/provider/type_source_recurly.go
+internal/provider/type_source_redshift.go
+internal/provider/type_authenticate_with_api_token.go
+internal/provider/type_source_retently_authentication_mechanism.go
+internal/provider/type_source_retently.go
+internal/provider/type_source_rki_covid.go
+internal/provider/type_source_rss.go
+internal/provider/type_csv.go
+internal/provider/type_jsonl.go
+internal/provider/type_parquet.go
+internal/provider/type_source_s3_file_format.go
+internal/provider/type_source_s3_s3_amazon_web_services.go
+internal/provider/type_source_s3.go
+internal/provider/type_streams_criteria.go
+internal/provider/type_source_salesforce.go
+internal/provider/type_source_salesloft_credentials.go
+internal/provider/type_source_salesloft.go
+internal/provider/type_source_sendgrid.go
+internal/provider/type_source_senseforce.go
+internal/provider/type_source_sentry.go
+internal/provider/type_source_sftp_password_authentication.go
+internal/provider/type_source_sftp_ssh_key_authentication.go
+internal/provider/type_source_sftp_authentication_wildcard.go
+internal/provider/type_source_sftp.go
+internal/provider/type_source_sftp_bulk.go
+internal/provider/type_api_password.go
+internal/provider/type_source_shopify_o_auth20.go
+internal/provider/type_source_shopify_shopify_authorization_method.go
+internal/provider/type_source_shopify.go
+internal/provider/type_source_shortio.go
+internal/provider/type_source_slack_authentication_mechanism.go
+internal/provider/type_source_slack.go
+internal/provider/type_source_smaily.go
+internal/provider/type_source_smartsheets_authorization_method.go
+internal/provider/type_source_smartsheets.go
+internal/provider/type_source_snapchat_marketing.go
+internal/provider/type_source_snowflake_o_auth20.go
+internal/provider/type_source_snowflake_authorization_method.go
+internal/provider/type_source_snowflake.go
+internal/provider/type_source_sonar_cloud.go
+internal/provider/type_source_spacex_api.go
+internal/provider/type_source_square_authentication.go
+internal/provider/type_source_square.go
+internal/provider/type_source_strava.go
+internal/provider/type_source_stripe.go
+internal/provider/type_source_surveymonkey.go
+internal/provider/type_source_survey_sparrow_base_url.go
+internal/provider/type_source_survey_sparrow.go
+internal/provider/type_source_the_guardian_api.go
+internal/provider/type_source_tiktok_marketing_o_auth20.go
+internal/provider/type_sandbox_access_token.go
+internal/provider/type_source_tiktok_marketing_authentication_method.go
+internal/provider/type_source_tiktok_marketing.go
+internal/provider/type_source_trello.go
+internal/provider/type_source_trustpilot_api_key.go
+internal/provider/type_source_trustpilot_authorization_method.go
+internal/provider/type_source_trustpilot.go
+internal/provider/type_source_tvmaze_schedule.go
+internal/provider/type_source_twilio.go
+internal/provider/type_source_twilio_taskrouter.go
+internal/provider/type_source_twitter.go
+internal/provider/type_source_typeform.go
+internal/provider/type_source_us_census.go
+internal/provider/type_source_webflow.go
+internal/provider/type_source_wikipedia_pageviews.go
+internal/provider/type_source_woocommerce.go
+internal/provider/type_source_yandex_metrica.go
+internal/provider/type_source_yotpo.go
+internal/provider/type_source_youtube_analytics.go
+internal/provider/type_source_zendesk_chat_o_auth20.go
+internal/provider/type_source_zendesk_chat_authorization_method.go
+internal/provider/type_source_zendesk_chat.go
+internal/provider/type_source_zendesk_sunshine_api_token.go
+internal/provider/type_source_zendesk_sunshine_authorization_method.go
+internal/provider/type_source_zendesk_sunshine.go
+internal/provider/type_source_zendesk_support_api_token.go
+internal/provider/type_source_zendesk_support_o_auth20.go
+internal/provider/type_source_zendesk_support_authentication.go
+internal/provider/type_source_zendesk_support.go
+internal/provider/type_source_zendesk_talk.go
+internal/provider/type_source_zenloop.go
+internal/provider/type_source_zoho_crm.go
+internal/provider/type_source_zoom.go
+internal/provider/type_source_zuora.go
+internal/provider/type_connection_schedule_response.go
USAGE.md
internal/provider/provider.go
examples/provider/provider.tf
@@ -2710,9 +2488,6 @@ examples/resources/airbyte_destination_azure_blob_storage/resource.tf
internal/provider/destination_bigquery_resource.go
internal/provider/destination_bigquery_resource_sdk.go
examples/resources/airbyte_destination_bigquery/resource.tf
-internal/provider/destination_bigquerydenormalized_resource.go
-internal/provider/destination_bigquerydenormalized_resource_sdk.go
-examples/resources/airbyte_destination_bigquery_denormalized/resource.tf
internal/provider/destination_clickhouse_resource.go
internal/provider/destination_clickhouse_resource_sdk.go
examples/resources/airbyte_destination_clickhouse/resource.tf
@@ -2731,6 +2506,9 @@ examples/resources/airbyte_destination_databricks/resource.tf
internal/provider/destination_devnull_resource.go
internal/provider/destination_devnull_resource_sdk.go
examples/resources/airbyte_destination_dev_null/resource.tf
+internal/provider/destination_duckdb_resource.go
+internal/provider/destination_duckdb_resource_sdk.go
+examples/resources/airbyte_destination_duckdb/resource.tf
internal/provider/destination_dynamodb_resource.go
internal/provider/destination_dynamodb_resource_sdk.go
examples/resources/airbyte_destination_dynamodb/resource.tf
@@ -2782,6 +2560,9 @@ examples/resources/airbyte_destination_postgres/resource.tf
internal/provider/destination_pubsub_resource.go
internal/provider/destination_pubsub_resource_sdk.go
examples/resources/airbyte_destination_pubsub/resource.tf
+internal/provider/destination_qdrant_resource.go
+internal/provider/destination_qdrant_resource_sdk.go
+examples/resources/airbyte_destination_qdrant/resource.tf
internal/provider/destination_redis_resource.go
internal/provider/destination_redis_resource_sdk.go
examples/resources/airbyte_destination_redis/resource.tf
@@ -2809,6 +2590,9 @@ examples/resources/airbyte_destination_typesense/resource.tf
internal/provider/destination_vertica_resource.go
internal/provider/destination_vertica_resource_sdk.go
examples/resources/airbyte_destination_vertica/resource.tf
+internal/provider/destination_weaviate_resource.go
+internal/provider/destination_weaviate_resource_sdk.go
+examples/resources/airbyte_destination_weaviate/resource.tf
internal/provider/destination_xata_resource.go
internal/provider/destination_xata_resource_sdk.go
examples/resources/airbyte_destination_xata/resource.tf
@@ -2860,9 +2644,6 @@ examples/resources/airbyte_source_azure_table/resource.tf
internal/provider/source_bamboohr_resource.go
internal/provider/source_bamboohr_resource_sdk.go
examples/resources/airbyte_source_bamboo_hr/resource.tf
-internal/provider/source_bigcommerce_resource.go
-internal/provider/source_bigcommerce_resource_sdk.go
-examples/resources/airbyte_source_bigcommerce/resource.tf
internal/provider/source_bigquery_resource.go
internal/provider/source_bigquery_resource_sdk.go
examples/resources/airbyte_source_bigquery/resource.tf
@@ -2875,6 +2656,9 @@ examples/resources/airbyte_source_braintree/resource.tf
internal/provider/source_braze_resource.go
internal/provider/source_braze_resource_sdk.go
examples/resources/airbyte_source_braze/resource.tf
+internal/provider/source_cart_resource.go
+internal/provider/source_cart_resource_sdk.go
+examples/resources/airbyte_source_cart/resource.tf
internal/provider/source_chargebee_resource.go
internal/provider/source_chargebee_resource_sdk.go
examples/resources/airbyte_source_chargebee/resource.tf
@@ -2929,9 +2713,6 @@ examples/resources/airbyte_source_dremio/resource.tf
internal/provider/source_dynamodb_resource.go
internal/provider/source_dynamodb_resource_sdk.go
examples/resources/airbyte_source_dynamodb/resource.tf
-internal/provider/source_e2etestcloud_resource.go
-internal/provider/source_e2etestcloud_resource_sdk.go
-examples/resources/airbyte_source_e2e_test_cloud/resource.tf
internal/provider/source_emailoctopus_resource.go
internal/provider/source_emailoctopus_resource_sdk.go
examples/resources/airbyte_source_emailoctopus/resource.tf
@@ -2950,9 +2731,9 @@ examples/resources/airbyte_source_faker/resource.tf
internal/provider/source_fauna_resource.go
internal/provider/source_fauna_resource_sdk.go
examples/resources/airbyte_source_fauna/resource.tf
-internal/provider/source_filesecure_resource.go
-internal/provider/source_filesecure_resource_sdk.go
-examples/resources/airbyte_source_file_secure/resource.tf
+internal/provider/source_file_resource.go
+internal/provider/source_file_resource_sdk.go
+examples/resources/airbyte_source_file/resource.tf
internal/provider/source_firebolt_resource.go
internal/provider/source_firebolt_resource_sdk.go
examples/resources/airbyte_source_firebolt/resource.tf
@@ -2992,12 +2773,12 @@ examples/resources/airbyte_source_google_ads/resource.tf
internal/provider/source_googleanalyticsdataapi_resource.go
internal/provider/source_googleanalyticsdataapi_resource_sdk.go
examples/resources/airbyte_source_google_analytics_data_api/resource.tf
-internal/provider/source_googleanalyticsv4_resource.go
-internal/provider/source_googleanalyticsv4_resource_sdk.go
-examples/resources/airbyte_source_google_analytics_v4/resource.tf
internal/provider/source_googledirectory_resource.go
internal/provider/source_googledirectory_resource_sdk.go
examples/resources/airbyte_source_google_directory/resource.tf
+internal/provider/source_googledrive_resource.go
+internal/provider/source_googledrive_resource_sdk.go
+examples/resources/airbyte_source_google_drive/resource.tf
internal/provider/source_googlepagespeedinsights_resource.go
internal/provider/source_googlepagespeedinsights_resource_sdk.go
examples/resources/airbyte_source_google_pagespeed_insights/resource.tf
@@ -3109,12 +2890,12 @@ examples/resources/airbyte_source_mixpanel/resource.tf
internal/provider/source_monday_resource.go
internal/provider/source_monday_resource_sdk.go
examples/resources/airbyte_source_monday/resource.tf
-internal/provider/source_mongodb_resource.go
-internal/provider/source_mongodb_resource_sdk.go
-examples/resources/airbyte_source_mongodb/resource.tf
internal/provider/source_mongodbinternalpoc_resource.go
internal/provider/source_mongodbinternalpoc_resource_sdk.go
examples/resources/airbyte_source_mongodb_internal_poc/resource.tf
+internal/provider/source_mongodbv2_resource.go
+internal/provider/source_mongodbv2_resource_sdk.go
+examples/resources/airbyte_source_mongodb_v2/resource.tf
internal/provider/source_mssql_resource.go
internal/provider/source_mssql_resource_sdk.go
examples/resources/airbyte_source_mssql/resource.tf
@@ -3367,9 +3148,6 @@ examples/resources/airbyte_source_wikipedia_pageviews/resource.tf
internal/provider/source_woocommerce_resource.go
internal/provider/source_woocommerce_resource_sdk.go
examples/resources/airbyte_source_woocommerce/resource.tf
-internal/provider/source_xero_resource.go
-internal/provider/source_xero_resource_sdk.go
-examples/resources/airbyte_source_xero/resource.tf
internal/provider/source_xkcd_resource.go
internal/provider/source_xkcd_resource_sdk.go
examples/resources/airbyte_source_xkcd/resource.tf
@@ -3379,15 +3157,15 @@ examples/resources/airbyte_source_yandex_metrica/resource.tf
internal/provider/source_yotpo_resource.go
internal/provider/source_yotpo_resource_sdk.go
examples/resources/airbyte_source_yotpo/resource.tf
-internal/provider/source_younium_resource.go
-internal/provider/source_younium_resource_sdk.go
-examples/resources/airbyte_source_younium/resource.tf
internal/provider/source_youtubeanalytics_resource.go
internal/provider/source_youtubeanalytics_resource_sdk.go
examples/resources/airbyte_source_youtube_analytics/resource.tf
internal/provider/source_zendeskchat_resource.go
internal/provider/source_zendeskchat_resource_sdk.go
examples/resources/airbyte_source_zendesk_chat/resource.tf
+internal/provider/source_zendesksell_resource.go
+internal/provider/source_zendesksell_resource_sdk.go
+examples/resources/airbyte_source_zendesk_sell/resource.tf
internal/provider/source_zendesksunshine_resource.go
internal/provider/source_zendesksunshine_resource_sdk.go
examples/resources/airbyte_source_zendesk_sunshine/resource.tf
@@ -3424,9 +3202,6 @@ examples/data-sources/airbyte_destination_azure_blob_storage/data-source.tf
internal/provider/destination_bigquery_data_source.go
internal/provider/destination_bigquery_data_source_sdk.go
examples/data-sources/airbyte_destination_bigquery/data-source.tf
-internal/provider/destination_bigquerydenormalized_data_source.go
-internal/provider/destination_bigquerydenormalized_data_source_sdk.go
-examples/data-sources/airbyte_destination_bigquery_denormalized/data-source.tf
internal/provider/destination_clickhouse_data_source.go
internal/provider/destination_clickhouse_data_source_sdk.go
examples/data-sources/airbyte_destination_clickhouse/data-source.tf
@@ -3445,6 +3220,9 @@ examples/data-sources/airbyte_destination_databricks/data-source.tf
internal/provider/destination_devnull_data_source.go
internal/provider/destination_devnull_data_source_sdk.go
examples/data-sources/airbyte_destination_dev_null/data-source.tf
+internal/provider/destination_duckdb_data_source.go
+internal/provider/destination_duckdb_data_source_sdk.go
+examples/data-sources/airbyte_destination_duckdb/data-source.tf
internal/provider/destination_dynamodb_data_source.go
internal/provider/destination_dynamodb_data_source_sdk.go
examples/data-sources/airbyte_destination_dynamodb/data-source.tf
@@ -3496,6 +3274,9 @@ examples/data-sources/airbyte_destination_postgres/data-source.tf
internal/provider/destination_pubsub_data_source.go
internal/provider/destination_pubsub_data_source_sdk.go
examples/data-sources/airbyte_destination_pubsub/data-source.tf
+internal/provider/destination_qdrant_data_source.go
+internal/provider/destination_qdrant_data_source_sdk.go
+examples/data-sources/airbyte_destination_qdrant/data-source.tf
internal/provider/destination_redis_data_source.go
internal/provider/destination_redis_data_source_sdk.go
examples/data-sources/airbyte_destination_redis/data-source.tf
@@ -3523,6 +3304,9 @@ examples/data-sources/airbyte_destination_typesense/data-source.tf
internal/provider/destination_vertica_data_source.go
internal/provider/destination_vertica_data_source_sdk.go
examples/data-sources/airbyte_destination_vertica/data-source.tf
+internal/provider/destination_weaviate_data_source.go
+internal/provider/destination_weaviate_data_source_sdk.go
+examples/data-sources/airbyte_destination_weaviate/data-source.tf
internal/provider/destination_xata_data_source.go
internal/provider/destination_xata_data_source_sdk.go
examples/data-sources/airbyte_destination_xata/data-source.tf
@@ -3574,9 +3358,6 @@ examples/data-sources/airbyte_source_azure_table/data-source.tf
internal/provider/source_bamboohr_data_source.go
internal/provider/source_bamboohr_data_source_sdk.go
examples/data-sources/airbyte_source_bamboo_hr/data-source.tf
-internal/provider/source_bigcommerce_data_source.go
-internal/provider/source_bigcommerce_data_source_sdk.go
-examples/data-sources/airbyte_source_bigcommerce/data-source.tf
internal/provider/source_bigquery_data_source.go
internal/provider/source_bigquery_data_source_sdk.go
examples/data-sources/airbyte_source_bigquery/data-source.tf
@@ -3589,6 +3370,9 @@ examples/data-sources/airbyte_source_braintree/data-source.tf
internal/provider/source_braze_data_source.go
internal/provider/source_braze_data_source_sdk.go
examples/data-sources/airbyte_source_braze/data-source.tf
+internal/provider/source_cart_data_source.go
+internal/provider/source_cart_data_source_sdk.go
+examples/data-sources/airbyte_source_cart/data-source.tf
internal/provider/source_chargebee_data_source.go
internal/provider/source_chargebee_data_source_sdk.go
examples/data-sources/airbyte_source_chargebee/data-source.tf
@@ -3643,9 +3427,6 @@ examples/data-sources/airbyte_source_dremio/data-source.tf
internal/provider/source_dynamodb_data_source.go
internal/provider/source_dynamodb_data_source_sdk.go
examples/data-sources/airbyte_source_dynamodb/data-source.tf
-internal/provider/source_e2etestcloud_data_source.go
-internal/provider/source_e2etestcloud_data_source_sdk.go
-examples/data-sources/airbyte_source_e2e_test_cloud/data-source.tf
internal/provider/source_emailoctopus_data_source.go
internal/provider/source_emailoctopus_data_source_sdk.go
examples/data-sources/airbyte_source_emailoctopus/data-source.tf
@@ -3664,9 +3445,9 @@ examples/data-sources/airbyte_source_faker/data-source.tf
internal/provider/source_fauna_data_source.go
internal/provider/source_fauna_data_source_sdk.go
examples/data-sources/airbyte_source_fauna/data-source.tf
-internal/provider/source_filesecure_data_source.go
-internal/provider/source_filesecure_data_source_sdk.go
-examples/data-sources/airbyte_source_file_secure/data-source.tf
+internal/provider/source_file_data_source.go
+internal/provider/source_file_data_source_sdk.go
+examples/data-sources/airbyte_source_file/data-source.tf
internal/provider/source_firebolt_data_source.go
internal/provider/source_firebolt_data_source_sdk.go
examples/data-sources/airbyte_source_firebolt/data-source.tf
@@ -3706,12 +3487,12 @@ examples/data-sources/airbyte_source_google_ads/data-source.tf
internal/provider/source_googleanalyticsdataapi_data_source.go
internal/provider/source_googleanalyticsdataapi_data_source_sdk.go
examples/data-sources/airbyte_source_google_analytics_data_api/data-source.tf
-internal/provider/source_googleanalyticsv4_data_source.go
-internal/provider/source_googleanalyticsv4_data_source_sdk.go
-examples/data-sources/airbyte_source_google_analytics_v4/data-source.tf
internal/provider/source_googledirectory_data_source.go
internal/provider/source_googledirectory_data_source_sdk.go
examples/data-sources/airbyte_source_google_directory/data-source.tf
+internal/provider/source_googledrive_data_source.go
+internal/provider/source_googledrive_data_source_sdk.go
+examples/data-sources/airbyte_source_google_drive/data-source.tf
internal/provider/source_googlepagespeedinsights_data_source.go
internal/provider/source_googlepagespeedinsights_data_source_sdk.go
examples/data-sources/airbyte_source_google_pagespeed_insights/data-source.tf
@@ -3823,12 +3604,12 @@ examples/data-sources/airbyte_source_mixpanel/data-source.tf
internal/provider/source_monday_data_source.go
internal/provider/source_monday_data_source_sdk.go
examples/data-sources/airbyte_source_monday/data-source.tf
-internal/provider/source_mongodb_data_source.go
-internal/provider/source_mongodb_data_source_sdk.go
-examples/data-sources/airbyte_source_mongodb/data-source.tf
internal/provider/source_mongodbinternalpoc_data_source.go
internal/provider/source_mongodbinternalpoc_data_source_sdk.go
examples/data-sources/airbyte_source_mongodb_internal_poc/data-source.tf
+internal/provider/source_mongodbv2_data_source.go
+internal/provider/source_mongodbv2_data_source_sdk.go
+examples/data-sources/airbyte_source_mongodb_v2/data-source.tf
internal/provider/source_mssql_data_source.go
internal/provider/source_mssql_data_source_sdk.go
examples/data-sources/airbyte_source_mssql/data-source.tf
@@ -4081,9 +3862,6 @@ examples/data-sources/airbyte_source_wikipedia_pageviews/data-source.tf
internal/provider/source_woocommerce_data_source.go
internal/provider/source_woocommerce_data_source_sdk.go
examples/data-sources/airbyte_source_woocommerce/data-source.tf
-internal/provider/source_xero_data_source.go
-internal/provider/source_xero_data_source_sdk.go
-examples/data-sources/airbyte_source_xero/data-source.tf
internal/provider/source_xkcd_data_source.go
internal/provider/source_xkcd_data_source_sdk.go
examples/data-sources/airbyte_source_xkcd/data-source.tf
@@ -4093,15 +3871,15 @@ examples/data-sources/airbyte_source_yandex_metrica/data-source.tf
internal/provider/source_yotpo_data_source.go
internal/provider/source_yotpo_data_source_sdk.go
examples/data-sources/airbyte_source_yotpo/data-source.tf
-internal/provider/source_younium_data_source.go
-internal/provider/source_younium_data_source_sdk.go
-examples/data-sources/airbyte_source_younium/data-source.tf
internal/provider/source_youtubeanalytics_data_source.go
internal/provider/source_youtubeanalytics_data_source_sdk.go
examples/data-sources/airbyte_source_youtube_analytics/data-source.tf
internal/provider/source_zendeskchat_data_source.go
internal/provider/source_zendeskchat_data_source_sdk.go
examples/data-sources/airbyte_source_zendesk_chat/data-source.tf
+internal/provider/source_zendesksell_data_source.go
+internal/provider/source_zendesksell_data_source_sdk.go
+examples/data-sources/airbyte_source_zendesk_sell/data-source.tf
internal/provider/source_zendesksunshine_data_source.go
internal/provider/source_zendesksunshine_data_source_sdk.go
examples/data-sources/airbyte_source_zendesk_sunshine/data-source.tf
diff --git a/gen.yaml b/gen.yaml
index 72bf23f81..aa9735cd7 100755
--- a/gen.yaml
+++ b/gen.yaml
@@ -1,15 +1,29 @@
configVersion: 1.0.0
generation:
+ comments: {}
sdkClassName: SDK
- singleTagPerOp: false
+ usageSnippets:
+ optionalPropertyRendering: withExample
telemetryEnabled: true
features:
terraform:
- core: 2.88.0
+ additionalProperties: 0.1.1
+ constsAndDefaults: 0.1.1
+ core: 3.5.0
globalSecurity: 2.81.1
- globalServerURLs: 2.82.0
- includes: 2.81.1
+ globalServerURLs: 2.82.1
+ unions: 2.81.5
terraform:
- version: 0.3.4
+ version: 0.3.5
author: airbytehq
+ imports:
+ option: openapi
+ paths:
+ callbacks: callbacks
+ errors: sdkerrors
+ operations: operations
+ shared: shared
+ webhooks: webhooks
+ inputModelSuffix: input
+ outputModelSuffix: output
packageName: airbyte
diff --git a/go.mod b/go.mod
old mode 100755
new mode 100644
index 87deb906f..0dbc531a1
--- a/go.mod
+++ b/go.mod
@@ -1,11 +1,13 @@
-module airbyte
+module github.com/airbytehq/terraform-provider-airbyte
go 1.18
require (
github.com/cenkalti/backoff/v4 v4.2.0
+ github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05
github.com/hashicorp/terraform-plugin-framework v1.3.5
github.com/hashicorp/terraform-plugin-go v0.18.0
+ github.com/spyzhov/ajson v0.9.0
)
require (
diff --git a/go.sum b/go.sum
old mode 100755
new mode 100644
index fb08a537c..853448be6
--- a/go.sum
+++ b/go.sum
@@ -31,6 +31,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
+github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05 h1:S92OBrGuLLZsyM5ybUzgc/mPjIYk2AZqufieooe98uw=
+github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05/go.mod h1:M9R1FoZ3y//hwwnJtO51ypFGwm8ZfpxPT/ZLtO1mcgQ=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w=
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
@@ -169,6 +171,8 @@ github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMB
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
+github.com/spyzhov/ajson v0.9.0 h1:tF46gJGOenYVj+k9K1U1XpCxVWhmiyY5PsVCAs1+OJ0=
+github.com/spyzhov/ajson v0.9.0/go.mod h1:a6oSw0MMb7Z5aD2tPoPO+jq11ETKgXUr2XktHdT8Wt8=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
diff --git a/internal/planmodifiers/boolplanmodifier/suppress_diff.go b/internal/planmodifiers/boolplanmodifier/suppress_diff.go
old mode 100755
new mode 100644
index 71fce2781..590e3ea53
--- a/internal/planmodifiers/boolplanmodifier/suppress_diff.go
+++ b/internal/planmodifiers/boolplanmodifier/suppress_diff.go
@@ -3,8 +3,8 @@
package boolplanmodifier
import (
- "airbyte/internal/planmodifiers/utils"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/utils"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
)
diff --git a/internal/planmodifiers/float64planmodifier/suppress_diff.go b/internal/planmodifiers/float64planmodifier/suppress_diff.go
old mode 100755
new mode 100644
index 090269a11..d16a762fa
--- a/internal/planmodifiers/float64planmodifier/suppress_diff.go
+++ b/internal/planmodifiers/float64planmodifier/suppress_diff.go
@@ -3,8 +3,8 @@
package float64planmodifier
import (
- "airbyte/internal/planmodifiers/utils"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/utils"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
)
diff --git a/internal/planmodifiers/int64planmodifier/suppress_diff.go b/internal/planmodifiers/int64planmodifier/suppress_diff.go
old mode 100755
new mode 100644
index c65c0486d..8b8cc8e7c
--- a/internal/planmodifiers/int64planmodifier/suppress_diff.go
+++ b/internal/planmodifiers/int64planmodifier/suppress_diff.go
@@ -3,8 +3,8 @@
package int64planmodifier
import (
- "airbyte/internal/planmodifiers/utils"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/utils"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
)
diff --git a/internal/planmodifiers/listplanmodifier/suppress_diff.go b/internal/planmodifiers/listplanmodifier/suppress_diff.go
old mode 100755
new mode 100644
index a986c9aee..a28aedfba
--- a/internal/planmodifiers/listplanmodifier/suppress_diff.go
+++ b/internal/planmodifiers/listplanmodifier/suppress_diff.go
@@ -3,8 +3,8 @@
package listplanmodifier
import (
- "airbyte/internal/planmodifiers/utils"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/utils"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
)
diff --git a/internal/planmodifiers/mapplanmodifier/suppress_diff.go b/internal/planmodifiers/mapplanmodifier/suppress_diff.go
old mode 100755
new mode 100644
index cb16380eb..c6ba12eaf
--- a/internal/planmodifiers/mapplanmodifier/suppress_diff.go
+++ b/internal/planmodifiers/mapplanmodifier/suppress_diff.go
@@ -3,8 +3,8 @@
package mapplanmodifier
import (
- "airbyte/internal/planmodifiers/utils"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/utils"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
)
diff --git a/internal/planmodifiers/numberplanmodifier/suppress_diff.go b/internal/planmodifiers/numberplanmodifier/suppress_diff.go
old mode 100755
new mode 100644
index 319b8b978..cd7466ebb
--- a/internal/planmodifiers/numberplanmodifier/suppress_diff.go
+++ b/internal/planmodifiers/numberplanmodifier/suppress_diff.go
@@ -3,8 +3,8 @@
package numberplanmodifier
import (
- "airbyte/internal/planmodifiers/utils"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/utils"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
)
diff --git a/internal/planmodifiers/objectplanmodifier/suppress_diff.go b/internal/planmodifiers/objectplanmodifier/suppress_diff.go
old mode 100755
new mode 100644
index 0b2b47f43..b5718ec6c
--- a/internal/planmodifiers/objectplanmodifier/suppress_diff.go
+++ b/internal/planmodifiers/objectplanmodifier/suppress_diff.go
@@ -3,8 +3,8 @@
package objectplanmodifier
import (
- "airbyte/internal/planmodifiers/utils"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/utils"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
)
diff --git a/internal/planmodifiers/setplanmodifier/suppress_diff.go b/internal/planmodifiers/setplanmodifier/suppress_diff.go
old mode 100755
new mode 100644
index 46c512a6d..dcf32934a
--- a/internal/planmodifiers/setplanmodifier/suppress_diff.go
+++ b/internal/planmodifiers/setplanmodifier/suppress_diff.go
@@ -3,8 +3,8 @@
package setplanmodifier
import (
- "airbyte/internal/planmodifiers/utils"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/utils"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
)
diff --git a/internal/planmodifiers/stringplanmodifier/suppress_diff.go b/internal/planmodifiers/stringplanmodifier/suppress_diff.go
old mode 100755
new mode 100644
index 81f0cbd8c..a9f121c22
--- a/internal/planmodifiers/stringplanmodifier/suppress_diff.go
+++ b/internal/planmodifiers/stringplanmodifier/suppress_diff.go
@@ -3,8 +3,8 @@
package stringplanmodifier
import (
- "airbyte/internal/planmodifiers/utils"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/utils"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
)
diff --git a/internal/planmodifiers/utils/state_check.go b/internal/planmodifiers/utils/state_check.go
old mode 100755
new mode 100644
diff --git a/internal/provider/connection_data_source.go b/internal/provider/connection_data_source.go
old mode 100755
new mode 100644
index 7786fd69e..6f387c39a
--- a/internal/provider/connection_data_source.go
+++ b/internal/provider/connection_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,19 +29,19 @@ type ConnectionDataSource struct {
// ConnectionDataSourceModel describes the data model.
type ConnectionDataSourceModel struct {
- Configurations *StreamConfigurations `tfsdk:"configurations"`
- ConnectionID types.String `tfsdk:"connection_id"`
- DataResidency types.String `tfsdk:"data_residency"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- NamespaceDefinition types.String `tfsdk:"namespace_definition"`
- NamespaceFormat types.String `tfsdk:"namespace_format"`
- NonBreakingSchemaUpdatesBehavior types.String `tfsdk:"non_breaking_schema_updates_behavior"`
- Prefix types.String `tfsdk:"prefix"`
- Schedule *ConnectionSchedule `tfsdk:"schedule"`
- SourceID types.String `tfsdk:"source_id"`
- Status types.String `tfsdk:"status"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configurations StreamConfigurations `tfsdk:"configurations"`
+ ConnectionID types.String `tfsdk:"connection_id"`
+ DataResidency types.String `tfsdk:"data_residency"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ Name types.String `tfsdk:"name"`
+ NamespaceDefinition types.String `tfsdk:"namespace_definition"`
+ NamespaceFormat types.String `tfsdk:"namespace_format"`
+ NonBreakingSchemaUpdatesBehavior types.String `tfsdk:"non_breaking_schema_updates_behavior"`
+ Prefix types.String `tfsdk:"prefix"`
+ Schedule ConnectionScheduleResponse `tfsdk:"schedule"`
+ SourceID types.String `tfsdk:"source_id"`
+ Status types.String `tfsdk:"status"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -80,15 +78,7 @@ func (r *ConnectionDataSource) Schema(ctx context.Context, req datasource.Schema
Description: `Paths to the fields that will be used as primary key. This field is REQUIRED if ` + "`" + `destination_sync_mode` + "`" + ` is ` + "`" + `*_dedup` + "`" + ` unless it is already supplied by the source schema.`,
},
"sync_mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "full_refresh_overwrite",
- "full_refresh_append",
- "incremental_append",
- "incremental_deduped_history",
- ),
- },
+ Computed: true,
Description: `must be one of ["full_refresh_overwrite", "full_refresh_append", "incremental_append", "incremental_deduped_history"]`,
},
},
@@ -101,55 +91,30 @@ func (r *ConnectionDataSource) Schema(ctx context.Context, req datasource.Schema
Required: true,
},
"data_residency": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "auto",
- "us",
- "eu",
- ),
- },
- Description: `must be one of ["auto", "us", "eu"]`,
+ Computed: true,
+ Description: `must be one of ["auto", "us", "eu"]; Default: "auto"`,
},
"destination_id": schema.StringAttribute{
Computed: true,
},
"name": schema.StringAttribute{
- Computed: true,
- Description: `Optional name of the connection`,
+ Computed: true,
},
"namespace_definition": schema.StringAttribute{
Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "source",
- "destination",
- "custom_format",
- ),
- },
- MarkdownDescription: `must be one of ["source", "destination", "custom_format"]` + "\n" +
+ MarkdownDescription: `must be one of ["source", "destination", "custom_format"]; Default: "destination"` + "\n" +
`Define the location where the data will be stored in the destination`,
},
"namespace_format": schema.StringAttribute{
- Computed: true,
- Description: `Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.`,
+ Computed: true,
},
"non_breaking_schema_updates_behavior": schema.StringAttribute{
Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ignore",
- "disable_connection",
- "propagate_columns",
- "propagate_fully",
- ),
- },
- MarkdownDescription: `must be one of ["ignore", "disable_connection", "propagate_columns", "propagate_fully"]` + "\n" +
+ MarkdownDescription: `must be one of ["ignore", "disable_connection", "propagate_columns", "propagate_fully"]; Default: "ignore"` + "\n" +
`Set how Airbyte handles syncs when it detects a non-breaking schema change in the source`,
},
"prefix": schema.StringAttribute{
- Computed: true,
- Description: `Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”).`,
+ Computed: true,
},
"schedule": schema.SingleNestedAttribute{
Computed: true,
@@ -161,14 +126,8 @@ func (r *ConnectionDataSource) Schema(ctx context.Context, req datasource.Schema
Computed: true,
},
"schedule_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "manual",
- "cron",
- ),
- },
- Description: `must be one of ["manual", "cron"]`,
+ Computed: true,
+ Description: `must be one of ["manual", "cron", "basic"]`,
},
},
Description: `schedule for when the the connection should run, per the schedule type`,
@@ -177,14 +136,7 @@ func (r *ConnectionDataSource) Schema(ctx context.Context, req datasource.Schema
Computed: true,
},
"status": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "active",
- "inactive",
- "deprecated",
- ),
- },
+ Computed: true,
Description: `must be one of ["active", "inactive", "deprecated"]`,
},
"workspace_id": schema.StringAttribute{
diff --git a/internal/provider/connection_data_source_sdk.go b/internal/provider/connection_data_source_sdk.go
old mode 100755
new mode 100644
index f2feb82d8..a9315a0ec
--- a/internal/provider/connection_data_source_sdk.go
+++ b/internal/provider/connection_data_source_sdk.go
@@ -3,16 +3,15 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *ConnectionDataSourceModel) RefreshFromGetResponse(resp *shared.ConnectionResponse) {
- if r.Configurations == nil {
- r.Configurations = &StreamConfigurations{}
+ if len(r.Configurations.Streams) > len(resp.Configurations.Streams) {
+ r.Configurations.Streams = r.Configurations.Streams[:len(resp.Configurations.Streams)]
}
- r.Configurations.Streams = nil
- for _, streamsItem := range resp.Configurations.Streams {
+ for streamsCount, streamsItem := range resp.Configurations.Streams {
var streams1 StreamConfiguration
streams1.CursorField = nil
for _, v := range streamsItem.CursorField {
@@ -33,10 +32,21 @@ func (r *ConnectionDataSourceModel) RefreshFromGetResponse(resp *shared.Connecti
} else {
streams1.SyncMode = types.StringNull()
}
- r.Configurations.Streams = append(r.Configurations.Streams, streams1)
+ if streamsCount+1 > len(r.Configurations.Streams) {
+ r.Configurations.Streams = append(r.Configurations.Streams, streams1)
+ } else {
+ r.Configurations.Streams[streamsCount].CursorField = streams1.CursorField
+ r.Configurations.Streams[streamsCount].Name = streams1.Name
+ r.Configurations.Streams[streamsCount].PrimaryKey = streams1.PrimaryKey
+ r.Configurations.Streams[streamsCount].SyncMode = streams1.SyncMode
+ }
}
r.ConnectionID = types.StringValue(resp.ConnectionID)
- r.DataResidency = types.StringValue(string(resp.DataResidency))
+ if resp.DataResidency != nil {
+ r.DataResidency = types.StringValue(string(*resp.DataResidency))
+ } else {
+ r.DataResidency = types.StringNull()
+ }
r.DestinationID = types.StringValue(resp.DestinationID)
r.Name = types.StringValue(resp.Name)
if resp.NamespaceDefinition != nil {
@@ -59,9 +69,6 @@ func (r *ConnectionDataSourceModel) RefreshFromGetResponse(resp *shared.Connecti
} else {
r.Prefix = types.StringNull()
}
- if r.Schedule == nil {
- r.Schedule = &ConnectionSchedule{}
- }
if resp.Schedule.BasicTiming != nil {
r.Schedule.BasicTiming = types.StringValue(*resp.Schedule.BasicTiming)
} else {
diff --git a/internal/provider/connection_resource.go b/internal/provider/connection_resource.go
old mode 100755
new mode 100644
index 7ea6c776a..9ee28c084
--- a/internal/provider/connection_resource.go
+++ b/internal/provider/connection_resource.go
@@ -3,19 +3,20 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_listplanmodifier "airbyte/internal/planmodifiers/listplanmodifier"
- speakeasy_objectplanmodifier "airbyte/internal/planmodifiers/objectplanmodifier"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_listplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/listplanmodifier"
+ speakeasy_objectplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/objectplanmodifier"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -106,7 +107,8 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["full_refresh_overwrite", "full_refresh_append", "incremental_append", "incremental_deduped_history"]`,
Validators: []validator.String{
stringvalidator.OneOf(
"full_refresh_overwrite",
@@ -115,7 +117,6 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
"incremental_deduped_history",
),
},
- Description: `must be one of ["full_refresh_overwrite", "full_refresh_append", "incremental_append", "incremental_deduped_history"]`,
},
},
},
@@ -134,7 +135,8 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["auto", "us", "eu"]; Default: "auto"`,
Validators: []validator.String{
stringvalidator.OneOf(
"auto",
@@ -142,10 +144,10 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
"eu",
),
},
- Description: `must be one of ["auto", "us", "eu"]`,
},
"destination_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
speakeasy_stringplanmodifier.SuppressDiff(),
},
Required: true,
@@ -164,6 +166,8 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
speakeasy_stringplanmodifier.SuppressDiff(),
},
Optional: true,
+ MarkdownDescription: `must be one of ["source", "destination", "custom_format"]; Default: "destination"` + "\n" +
+ `Define the location where the data will be stored in the destination`,
Validators: []validator.String{
stringvalidator.OneOf(
"source",
@@ -171,16 +175,15 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
"custom_format",
),
},
- MarkdownDescription: `must be one of ["source", "destination", "custom_format"]` + "\n" +
- `Define the location where the data will be stored in the destination`,
},
"namespace_format": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Optional: true,
- Description: `Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.`,
+ Optional: true,
+ MarkdownDescription: `Default: null` + "\n" +
+ `Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.`,
},
"non_breaking_schema_updates_behavior": schema.StringAttribute{
Computed: true,
@@ -188,6 +191,8 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
speakeasy_stringplanmodifier.SuppressDiff(),
},
Optional: true,
+ MarkdownDescription: `must be one of ["ignore", "disable_connection", "propagate_columns", "propagate_fully"]; Default: "ignore"` + "\n" +
+ `Set how Airbyte handles syncs when it detects a non-breaking schema change in the source`,
Validators: []validator.String{
stringvalidator.OneOf(
"ignore",
@@ -196,8 +201,6 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
"propagate_fully",
),
},
- MarkdownDescription: `must be one of ["ignore", "disable_connection", "propagate_columns", "propagate_fully"]` + "\n" +
- `Set how Airbyte handles syncs when it detects a non-breaking schema change in the source`,
},
"prefix": schema.StringAttribute{
Computed: true,
@@ -231,20 +234,21 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `must be one of ["manual", "cron"]`,
Validators: []validator.String{
stringvalidator.OneOf(
"manual",
"cron",
),
},
- Description: `must be one of ["manual", "cron"]`,
},
},
Description: `schedule for when the the connection should run, per the schedule type`,
},
"source_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
speakeasy_stringplanmodifier.SuppressDiff(),
},
Required: true,
@@ -254,7 +258,8 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["active", "inactive", "deprecated"]`,
Validators: []validator.String{
stringvalidator.OneOf(
"active",
@@ -262,7 +267,6 @@ func (r *ConnectionResource) Schema(ctx context.Context, req resource.SchemaRequ
"deprecated",
),
},
- Description: `must be one of ["active", "inactive", "deprecated"]`,
},
"workspace_id": schema.StringAttribute{
Computed: true,
@@ -468,5 +472,5 @@ func (r *ConnectionResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *ConnectionResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("connection_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("connection_id"), req.ID)...)
}
diff --git a/internal/provider/connection_resource_sdk.go b/internal/provider/connection_resource_sdk.go
old mode 100755
new mode 100644
index 1760666f7..b679b7526
--- a/internal/provider/connection_resource_sdk.go
+++ b/internal/provider/connection_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -235,8 +235,10 @@ func (r *ConnectionResourceModel) RefreshFromGetResponse(resp *shared.Connection
if r.Configurations == nil {
r.Configurations = &StreamConfigurations{}
}
- r.Configurations.Streams = nil
- for _, streamsItem := range resp.Configurations.Streams {
+ if len(r.Configurations.Streams) > len(resp.Configurations.Streams) {
+ r.Configurations.Streams = r.Configurations.Streams[:len(resp.Configurations.Streams)]
+ }
+ for streamsCount, streamsItem := range resp.Configurations.Streams {
var streams1 StreamConfiguration
streams1.CursorField = nil
for _, v := range streamsItem.CursorField {
@@ -257,10 +259,21 @@ func (r *ConnectionResourceModel) RefreshFromGetResponse(resp *shared.Connection
} else {
streams1.SyncMode = types.StringNull()
}
- r.Configurations.Streams = append(r.Configurations.Streams, streams1)
+ if streamsCount+1 > len(r.Configurations.Streams) {
+ r.Configurations.Streams = append(r.Configurations.Streams, streams1)
+ } else {
+ r.Configurations.Streams[streamsCount].CursorField = streams1.CursorField
+ r.Configurations.Streams[streamsCount].Name = streams1.Name
+ r.Configurations.Streams[streamsCount].PrimaryKey = streams1.PrimaryKey
+ r.Configurations.Streams[streamsCount].SyncMode = streams1.SyncMode
+ }
}
r.ConnectionID = types.StringValue(resp.ConnectionID)
- r.DataResidency = types.StringValue(string(resp.DataResidency))
+ if resp.DataResidency != nil {
+ r.DataResidency = types.StringValue(string(*resp.DataResidency))
+ } else {
+ r.DataResidency = types.StringNull()
+ }
r.DestinationID = types.StringValue(resp.DestinationID)
r.Name = types.StringValue(resp.Name)
if resp.NamespaceDefinition != nil {
diff --git a/internal/provider/destination_awsdatalake_data_source.go b/internal/provider/destination_awsdatalake_data_source.go
old mode 100755
new mode 100644
index b7aec0a84..0642e9085
--- a/internal/provider/destination_awsdatalake_data_source.go
+++ b/internal/provider/destination_awsdatalake_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationAwsDatalakeDataSource struct {
// DestinationAwsDatalakeDataSourceModel describes the data model.
type DestinationAwsDatalakeDataSourceModel struct {
- Configuration DestinationAwsDatalake1 `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,320 +47,17 @@ func (r *DestinationAwsDatalakeDataSource) Schema(ctx context.Context, req datas
MarkdownDescription: "DestinationAwsDatalake DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_account_id": schema.StringAttribute{
- Computed: true,
- Description: `target aws account id`,
- },
- "bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the S3 bucket. Read more here.`,
- },
- "bucket_prefix": schema.StringAttribute{
- Computed: true,
- Description: `S3 prefix`,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_aws_datalake_authentication_mode_iam_role": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "IAM Role",
- ),
- },
- MarkdownDescription: `must be one of ["IAM Role"]` + "\n" +
- `Name of the credentials`,
- },
- "role_arn": schema.StringAttribute{
- Computed: true,
- Description: `Will assume this role to write data to s3`,
- },
- },
- Description: `Choose How to Authenticate to AWS.`,
- },
- "destination_aws_datalake_authentication_mode_iam_user": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `AWS User Access Key Id`,
- },
- "aws_secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `Secret Access Key`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "IAM User",
- ),
- },
- MarkdownDescription: `must be one of ["IAM User"]` + "\n" +
- `Name of the credentials`,
- },
- },
- Description: `Choose How to Authenticate to AWS.`,
- },
- "destination_aws_datalake_update_authentication_mode_iam_role": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "IAM Role",
- ),
- },
- MarkdownDescription: `must be one of ["IAM Role"]` + "\n" +
- `Name of the credentials`,
- },
- "role_arn": schema.StringAttribute{
- Computed: true,
- Description: `Will assume this role to write data to s3`,
- },
- },
- Description: `Choose How to Authenticate to AWS.`,
- },
- "destination_aws_datalake_update_authentication_mode_iam_user": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `AWS User Access Key Id`,
- },
- "aws_secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `Secret Access Key`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "IAM User",
- ),
- },
- MarkdownDescription: `must be one of ["IAM User"]` + "\n" +
- `Name of the credentials`,
- },
- },
- Description: `Choose How to Authenticate to AWS.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose How to Authenticate to AWS.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aws-datalake",
- ),
- },
- Description: `must be one of ["aws-datalake"]`,
- },
- "format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "GZIP",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "GZIP"]` + "\n" +
- `The compression algorithm used to compress data.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Format of the data output.`,
- },
- "destination_aws_datalake_output_format_wildcard_parquet_columnar_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "SNAPPY",
- "GZIP",
- "ZSTD",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Parquet",
- ),
- },
- Description: `must be one of ["Parquet"]`,
- },
- },
- Description: `Format of the data output.`,
- },
- "destination_aws_datalake_update_output_format_wildcard_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "GZIP",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "GZIP"]` + "\n" +
- `The compression algorithm used to compress data.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Format of the data output.`,
- },
- "destination_aws_datalake_update_output_format_wildcard_parquet_columnar_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "SNAPPY",
- "GZIP",
- "ZSTD",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Parquet",
- ),
- },
- Description: `must be one of ["Parquet"]`,
- },
- },
- Description: `Format of the data output.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Format of the data output.`,
- },
- "glue_catalog_float_as_decimal": schema.BoolAttribute{
- Computed: true,
- Description: `Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source.`,
- },
- "lakeformation_database_default_tag_key": schema.StringAttribute{
- Computed: true,
- Description: `Add a default tag key to databases created by this destination`,
- },
- "lakeformation_database_default_tag_values": schema.StringAttribute{
- Computed: true,
- Description: `Add default values for the ` + "`" + `Tag Key` + "`" + ` to databases created by this destination. Comma separate for multiple values.`,
- },
- "lakeformation_database_name": schema.StringAttribute{
- Computed: true,
- Description: `The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.`,
- },
- "lakeformation_governed_tables": schema.BoolAttribute{
- Computed: true,
- Description: `Whether to create tables as LF governed tables.`,
- },
- "partitioning": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO PARTITIONING",
- "DATE",
- "YEAR",
- "MONTH",
- "DAY",
- "YEAR/MONTH",
- "YEAR/MONTH/DAY",
- ),
- },
- MarkdownDescription: `must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]` + "\n" +
- `Partition data by cursor fields when a cursor field is a date`,
- },
- "region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- "us-gov-east-1",
- "us-gov-west-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the S3 bucket. See here for all region codes.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_awsdatalake_data_source_sdk.go b/internal/provider/destination_awsdatalake_data_source_sdk.go
old mode 100755
new mode 100644
index 483e38c83..3c9050b33
--- a/internal/provider/destination_awsdatalake_data_source_sdk.go
+++ b/internal/provider/destination_awsdatalake_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationAwsDatalakeDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_awsdatalake_resource.go b/internal/provider/destination_awsdatalake_resource.go
old mode 100755
new mode 100644
index 33c71003c..fd64754cb
--- a/internal/provider/destination_awsdatalake_resource.go
+++ b/internal/provider/destination_awsdatalake_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationAwsDatalakeResource struct {
// DestinationAwsDatalakeResourceModel describes the resource data model.
type DestinationAwsDatalakeResourceModel struct {
Configuration DestinationAwsDatalake `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -69,19 +71,9 @@ func (r *DestinationAwsDatalakeResource) Schema(ctx context.Context, req resourc
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_aws_datalake_authentication_mode_iam_role": schema.SingleNestedAttribute{
+ "iam_role": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "credentials_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "IAM Role",
- ),
- },
- MarkdownDescription: `must be one of ["IAM Role"]` + "\n" +
- `Name of the credentials`,
- },
"role_arn": schema.StringAttribute{
Required: true,
Description: `Will assume this role to write data to s3`,
@@ -89,177 +81,64 @@ func (r *DestinationAwsDatalakeResource) Schema(ctx context.Context, req resourc
},
Description: `Choose How to Authenticate to AWS.`,
},
- "destination_aws_datalake_authentication_mode_iam_user": schema.SingleNestedAttribute{
+ "iam_user": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"aws_access_key_id": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `AWS User Access Key Id`,
},
"aws_secret_access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Secret Access Key`,
},
- "credentials_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "IAM User",
- ),
- },
- MarkdownDescription: `must be one of ["IAM User"]` + "\n" +
- `Name of the credentials`,
- },
- },
- Description: `Choose How to Authenticate to AWS.`,
- },
- "destination_aws_datalake_update_authentication_mode_iam_role": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "credentials_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "IAM Role",
- ),
- },
- MarkdownDescription: `must be one of ["IAM Role"]` + "\n" +
- `Name of the credentials`,
- },
- "role_arn": schema.StringAttribute{
- Required: true,
- Description: `Will assume this role to write data to s3`,
- },
- },
- Description: `Choose How to Authenticate to AWS.`,
- },
- "destination_aws_datalake_update_authentication_mode_iam_user": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "aws_access_key_id": schema.StringAttribute{
- Required: true,
- Description: `AWS User Access Key Id`,
- },
- "aws_secret_access_key": schema.StringAttribute{
- Required: true,
- Description: `Secret Access Key`,
- },
- "credentials_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "IAM User",
- ),
- },
- MarkdownDescription: `must be one of ["IAM User"]` + "\n" +
- `Name of the credentials`,
- },
},
Description: `Choose How to Authenticate to AWS.`,
},
},
+ Description: `Choose How to Authenticate to AWS.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose How to Authenticate to AWS.`,
- },
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aws-datalake",
- ),
- },
- Description: `must be one of ["aws-datalake"]`,
},
"format": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "GZIP",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "GZIP"]` + "\n" +
- `The compression algorithm used to compress data.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Format of the data output.`,
- },
- "destination_aws_datalake_output_format_wildcard_parquet_columnar_storage": schema.SingleNestedAttribute{
+ "json_lines_newline_delimited_json": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_codec": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "SNAPPY",
- "GZIP",
- "ZSTD",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]` + "\n" +
+ MarkdownDescription: `must be one of ["UNCOMPRESSED", "GZIP"]; Default: "UNCOMPRESSED"` + "\n" +
`The compression algorithm used to compress data.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Parquet",
- ),
- },
- Description: `must be one of ["Parquet"]`,
- },
- },
- Description: `Format of the data output.`,
- },
- "destination_aws_datalake_update_output_format_wildcard_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.StringAttribute{
- Optional: true,
Validators: []validator.String{
stringvalidator.OneOf(
"UNCOMPRESSED",
"GZIP",
),
},
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "GZIP"]` + "\n" +
- `The compression algorithm used to compress data.`,
},
"format_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["JSONL"]; Default: "JSONL"`,
Validators: []validator.String{
stringvalidator.OneOf(
"JSONL",
),
},
- Description: `must be one of ["JSONL"]`,
},
},
Description: `Format of the data output.`,
},
- "destination_aws_datalake_update_output_format_wildcard_parquet_columnar_storage": schema.SingleNestedAttribute{
+ "parquet_columnar_storage": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_codec": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]; Default: "SNAPPY"` + "\n" +
+ `The compression algorithm used to compress data.`,
Validators: []validator.String{
stringvalidator.OneOf(
"UNCOMPRESSED",
@@ -268,33 +147,33 @@ func (r *DestinationAwsDatalakeResource) Schema(ctx context.Context, req resourc
"ZSTD",
),
},
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data.`,
},
"format_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["Parquet"]; Default: "Parquet"`,
Validators: []validator.String{
stringvalidator.OneOf(
"Parquet",
),
},
- Description: `must be one of ["Parquet"]`,
},
},
Description: `Format of the data output.`,
},
},
+ Description: `Format of the data output.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Format of the data output.`,
},
"glue_catalog_float_as_decimal": schema.BoolAttribute{
- Optional: true,
- Description: `Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source.`,
},
"lakeformation_database_default_tag_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Add a default tag key to databases created by this destination`,
},
"lakeformation_database_default_tag_values": schema.StringAttribute{
@@ -306,11 +185,14 @@ func (r *DestinationAwsDatalakeResource) Schema(ctx context.Context, req resourc
Description: `The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.`,
},
"lakeformation_governed_tables": schema.BoolAttribute{
- Optional: true,
- Description: `Whether to create tables as LF governed tables.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to create tables as LF governed tables.`,
},
"partitioning": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]; Default: "NO PARTITIONING"` + "\n" +
+ `Partition data by cursor fields when a cursor field is a date`,
Validators: []validator.String{
stringvalidator.OneOf(
"NO PARTITIONING",
@@ -322,11 +204,11 @@ func (r *DestinationAwsDatalakeResource) Schema(ctx context.Context, req resourc
"YEAR/MONTH/DAY",
),
},
- MarkdownDescription: `must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]` + "\n" +
- `Partition data by cursor fields when a cursor field is a date`,
},
"region": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""` + "\n" +
+ `The region of the S3 bucket. See here for all region codes.`,
Validators: []validator.String{
stringvalidator.OneOf(
"",
@@ -357,11 +239,16 @@ func (r *DestinationAwsDatalakeResource) Schema(ctx context.Context, req resourc
"us-gov-west-1",
),
},
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the S3 bucket. See here for all region codes.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -378,7 +265,8 @@ func (r *DestinationAwsDatalakeResource) Schema(ctx context.Context, req resourc
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -428,7 +316,7 @@ func (r *DestinationAwsDatalakeResource) Create(ctx context.Context, req resourc
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationAwsDatalake(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -604,5 +492,5 @@ func (r *DestinationAwsDatalakeResource) Delete(ctx context.Context, req resourc
}
func (r *DestinationAwsDatalakeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_awsdatalake_resource_sdk.go b/internal/provider/destination_awsdatalake_resource_sdk.go
old mode 100755
new mode 100644
index 4c8d1a063..ed03834c1
--- a/internal/provider/destination_awsdatalake_resource_sdk.go
+++ b/internal/provider/destination_awsdatalake_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -22,75 +22,80 @@ func (r *DestinationAwsDatalakeResourceModel) ToCreateSDKType() *shared.Destinat
bucketPrefix = nil
}
var credentials shared.DestinationAwsDatalakeAuthenticationMode
- var destinationAwsDatalakeAuthenticationModeIAMRole *shared.DestinationAwsDatalakeAuthenticationModeIAMRole
- if r.Configuration.Credentials.DestinationAwsDatalakeAuthenticationModeIAMRole != nil {
- credentialsTitle := shared.DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle(r.Configuration.Credentials.DestinationAwsDatalakeAuthenticationModeIAMRole.CredentialsTitle.ValueString())
- roleArn := r.Configuration.Credentials.DestinationAwsDatalakeAuthenticationModeIAMRole.RoleArn.ValueString()
- destinationAwsDatalakeAuthenticationModeIAMRole = &shared.DestinationAwsDatalakeAuthenticationModeIAMRole{
- CredentialsTitle: credentialsTitle,
- RoleArn: roleArn,
+ var destinationAwsDatalakeIAMRole *shared.DestinationAwsDatalakeIAMRole
+ if r.Configuration.Credentials.IAMRole != nil {
+ roleArn := r.Configuration.Credentials.IAMRole.RoleArn.ValueString()
+ destinationAwsDatalakeIAMRole = &shared.DestinationAwsDatalakeIAMRole{
+ RoleArn: roleArn,
}
}
- if destinationAwsDatalakeAuthenticationModeIAMRole != nil {
+ if destinationAwsDatalakeIAMRole != nil {
credentials = shared.DestinationAwsDatalakeAuthenticationMode{
- DestinationAwsDatalakeAuthenticationModeIAMRole: destinationAwsDatalakeAuthenticationModeIAMRole,
+ DestinationAwsDatalakeIAMRole: destinationAwsDatalakeIAMRole,
}
}
- var destinationAwsDatalakeAuthenticationModeIAMUser *shared.DestinationAwsDatalakeAuthenticationModeIAMUser
- if r.Configuration.Credentials.DestinationAwsDatalakeAuthenticationModeIAMUser != nil {
- awsAccessKeyID := r.Configuration.Credentials.DestinationAwsDatalakeAuthenticationModeIAMUser.AwsAccessKeyID.ValueString()
- awsSecretAccessKey := r.Configuration.Credentials.DestinationAwsDatalakeAuthenticationModeIAMUser.AwsSecretAccessKey.ValueString()
- credentialsTitle1 := shared.DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle(r.Configuration.Credentials.DestinationAwsDatalakeAuthenticationModeIAMUser.CredentialsTitle.ValueString())
- destinationAwsDatalakeAuthenticationModeIAMUser = &shared.DestinationAwsDatalakeAuthenticationModeIAMUser{
+ var destinationAwsDatalakeIAMUser *shared.DestinationAwsDatalakeIAMUser
+ if r.Configuration.Credentials.IAMUser != nil {
+ awsAccessKeyID := r.Configuration.Credentials.IAMUser.AwsAccessKeyID.ValueString()
+ awsSecretAccessKey := r.Configuration.Credentials.IAMUser.AwsSecretAccessKey.ValueString()
+ destinationAwsDatalakeIAMUser = &shared.DestinationAwsDatalakeIAMUser{
AwsAccessKeyID: awsAccessKeyID,
AwsSecretAccessKey: awsSecretAccessKey,
- CredentialsTitle: credentialsTitle1,
}
}
- if destinationAwsDatalakeAuthenticationModeIAMUser != nil {
+ if destinationAwsDatalakeIAMUser != nil {
credentials = shared.DestinationAwsDatalakeAuthenticationMode{
- DestinationAwsDatalakeAuthenticationModeIAMUser: destinationAwsDatalakeAuthenticationModeIAMUser,
+ DestinationAwsDatalakeIAMUser: destinationAwsDatalakeIAMUser,
}
}
- destinationType := shared.DestinationAwsDatalakeAwsDatalake(r.Configuration.DestinationType.ValueString())
var format *shared.DestinationAwsDatalakeOutputFormatWildcard
if r.Configuration.Format != nil {
- var destinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON *shared.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON != nil {
- compressionCodec := new(shared.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional)
- if !r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON.CompressionCodec.IsUnknown() && !r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON.CompressionCodec.IsNull() {
- *compressionCodec = shared.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional(r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON.CompressionCodec.ValueString())
+ var destinationAwsDatalakeJSONLinesNewlineDelimitedJSON *shared.DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ compressionCodec := new(shared.DestinationAwsDatalakeCompressionCodecOptional)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.CompressionCodec.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.CompressionCodec.IsNull() {
+ *compressionCodec = shared.DestinationAwsDatalakeCompressionCodecOptional(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.CompressionCodec.ValueString())
} else {
compressionCodec = nil
}
- formatType := shared.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard(r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON = &shared.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON{
+ formatType := new(shared.DestinationAwsDatalakeFormatTypeWildcard)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsNull() {
+ *formatType = shared.DestinationAwsDatalakeFormatTypeWildcard(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.ValueString())
+ } else {
+ formatType = nil
+ }
+ destinationAwsDatalakeJSONLinesNewlineDelimitedJSON = &shared.DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON{
CompressionCodec: compressionCodec,
FormatType: formatType,
}
}
- if destinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON != nil {
+ if destinationAwsDatalakeJSONLinesNewlineDelimitedJSON != nil {
format = &shared.DestinationAwsDatalakeOutputFormatWildcard{
- DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON: destinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON,
+ DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON: destinationAwsDatalakeJSONLinesNewlineDelimitedJSON,
}
}
- var destinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage *shared.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage
- if r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage != nil {
- compressionCodec1 := new(shared.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional)
- if !r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage.CompressionCodec.IsNull() {
- *compressionCodec1 = shared.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional(r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage.CompressionCodec.ValueString())
+ var destinationAwsDatalakeParquetColumnarStorage *shared.DestinationAwsDatalakeParquetColumnarStorage
+ if r.Configuration.Format.ParquetColumnarStorage != nil {
+ compressionCodec1 := new(shared.DestinationAwsDatalakeSchemasCompressionCodecOptional)
+ if !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsNull() {
+ *compressionCodec1 = shared.DestinationAwsDatalakeSchemasCompressionCodecOptional(r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.ValueString())
} else {
compressionCodec1 = nil
}
- formatType1 := shared.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard(r.Configuration.Format.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage.FormatType.ValueString())
- destinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage = &shared.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage{
+ formatType1 := new(shared.DestinationAwsDatalakeSchemasFormatTypeWildcard)
+ if !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsNull() {
+ *formatType1 = shared.DestinationAwsDatalakeSchemasFormatTypeWildcard(r.Configuration.Format.ParquetColumnarStorage.FormatType.ValueString())
+ } else {
+ formatType1 = nil
+ }
+ destinationAwsDatalakeParquetColumnarStorage = &shared.DestinationAwsDatalakeParquetColumnarStorage{
CompressionCodec: compressionCodec1,
FormatType: formatType1,
}
}
- if destinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage != nil {
+ if destinationAwsDatalakeParquetColumnarStorage != nil {
format = &shared.DestinationAwsDatalakeOutputFormatWildcard{
- DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage: destinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage,
+ DestinationAwsDatalakeParquetColumnarStorage: destinationAwsDatalakeParquetColumnarStorage,
}
}
}
@@ -125,13 +130,17 @@ func (r *DestinationAwsDatalakeResourceModel) ToCreateSDKType() *shared.Destinat
} else {
partitioning = nil
}
- region := shared.DestinationAwsDatalakeS3BucketRegion(r.Configuration.Region.ValueString())
+ region := new(shared.DestinationAwsDatalakeS3BucketRegion)
+ if !r.Configuration.Region.IsUnknown() && !r.Configuration.Region.IsNull() {
+ *region = shared.DestinationAwsDatalakeS3BucketRegion(r.Configuration.Region.ValueString())
+ } else {
+ region = nil
+ }
configuration := shared.DestinationAwsDatalake{
AwsAccountID: awsAccountID,
BucketName: bucketName,
BucketPrefix: bucketPrefix,
Credentials: credentials,
- DestinationType: destinationType,
Format: format,
GlueCatalogFloatAsDecimal: glueCatalogFloatAsDecimal,
LakeformationDatabaseDefaultTagKey: lakeformationDatabaseDefaultTagKey,
@@ -141,10 +150,17 @@ func (r *DestinationAwsDatalakeResourceModel) ToCreateSDKType() *shared.Destinat
Partitioning: partitioning,
Region: region,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationAwsDatalakeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -170,75 +186,81 @@ func (r *DestinationAwsDatalakeResourceModel) ToUpdateSDKType() *shared.Destinat
} else {
bucketPrefix = nil
}
- var credentials shared.DestinationAwsDatalakeUpdateAuthenticationMode
- var destinationAwsDatalakeUpdateAuthenticationModeIAMRole *shared.DestinationAwsDatalakeUpdateAuthenticationModeIAMRole
- if r.Configuration.Credentials.DestinationAwsDatalakeUpdateAuthenticationModeIAMRole != nil {
- credentialsTitle := shared.DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle(r.Configuration.Credentials.DestinationAwsDatalakeUpdateAuthenticationModeIAMRole.CredentialsTitle.ValueString())
- roleArn := r.Configuration.Credentials.DestinationAwsDatalakeUpdateAuthenticationModeIAMRole.RoleArn.ValueString()
- destinationAwsDatalakeUpdateAuthenticationModeIAMRole = &shared.DestinationAwsDatalakeUpdateAuthenticationModeIAMRole{
- CredentialsTitle: credentialsTitle,
- RoleArn: roleArn,
+ var credentials shared.AuthenticationMode
+ var iamRole *shared.IAMRole
+ if r.Configuration.Credentials.IAMRole != nil {
+ roleArn := r.Configuration.Credentials.IAMRole.RoleArn.ValueString()
+ iamRole = &shared.IAMRole{
+ RoleArn: roleArn,
}
}
- if destinationAwsDatalakeUpdateAuthenticationModeIAMRole != nil {
- credentials = shared.DestinationAwsDatalakeUpdateAuthenticationMode{
- DestinationAwsDatalakeUpdateAuthenticationModeIAMRole: destinationAwsDatalakeUpdateAuthenticationModeIAMRole,
+ if iamRole != nil {
+ credentials = shared.AuthenticationMode{
+ IAMRole: iamRole,
}
}
- var destinationAwsDatalakeUpdateAuthenticationModeIAMUser *shared.DestinationAwsDatalakeUpdateAuthenticationModeIAMUser
- if r.Configuration.Credentials.DestinationAwsDatalakeUpdateAuthenticationModeIAMUser != nil {
- awsAccessKeyID := r.Configuration.Credentials.DestinationAwsDatalakeUpdateAuthenticationModeIAMUser.AwsAccessKeyID.ValueString()
- awsSecretAccessKey := r.Configuration.Credentials.DestinationAwsDatalakeUpdateAuthenticationModeIAMUser.AwsSecretAccessKey.ValueString()
- credentialsTitle1 := shared.DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle(r.Configuration.Credentials.DestinationAwsDatalakeUpdateAuthenticationModeIAMUser.CredentialsTitle.ValueString())
- destinationAwsDatalakeUpdateAuthenticationModeIAMUser = &shared.DestinationAwsDatalakeUpdateAuthenticationModeIAMUser{
+ var iamUser *shared.IAMUser
+ if r.Configuration.Credentials.IAMUser != nil {
+ awsAccessKeyID := r.Configuration.Credentials.IAMUser.AwsAccessKeyID.ValueString()
+ awsSecretAccessKey := r.Configuration.Credentials.IAMUser.AwsSecretAccessKey.ValueString()
+ iamUser = &shared.IAMUser{
AwsAccessKeyID: awsAccessKeyID,
AwsSecretAccessKey: awsSecretAccessKey,
- CredentialsTitle: credentialsTitle1,
}
}
- if destinationAwsDatalakeUpdateAuthenticationModeIAMUser != nil {
- credentials = shared.DestinationAwsDatalakeUpdateAuthenticationMode{
- DestinationAwsDatalakeUpdateAuthenticationModeIAMUser: destinationAwsDatalakeUpdateAuthenticationModeIAMUser,
+ if iamUser != nil {
+ credentials = shared.AuthenticationMode{
+ IAMUser: iamUser,
}
}
- var format *shared.DestinationAwsDatalakeUpdateOutputFormatWildcard
+ var format *shared.OutputFormatWildcard
if r.Configuration.Format != nil {
- var destinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON *shared.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON != nil {
- compressionCodec := new(shared.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional)
- if !r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON.CompressionCodec.IsUnknown() && !r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON.CompressionCodec.IsNull() {
- *compressionCodec = shared.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional(r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON.CompressionCodec.ValueString())
+ var jsonLinesNewlineDelimitedJSON *shared.JSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ compressionCodec := new(shared.CompressionCodecOptional)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.CompressionCodec.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.CompressionCodec.IsNull() {
+ *compressionCodec = shared.CompressionCodecOptional(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.CompressionCodec.ValueString())
} else {
compressionCodec = nil
}
- formatType := shared.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard(r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON = &shared.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON{
+ formatType := new(shared.FormatTypeWildcard)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsNull() {
+ *formatType = shared.FormatTypeWildcard(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.ValueString())
+ } else {
+ formatType = nil
+ }
+ jsonLinesNewlineDelimitedJSON = &shared.JSONLinesNewlineDelimitedJSON{
CompressionCodec: compressionCodec,
FormatType: formatType,
}
}
- if destinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON != nil {
- format = &shared.DestinationAwsDatalakeUpdateOutputFormatWildcard{
- DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON: destinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON,
+ if jsonLinesNewlineDelimitedJSON != nil {
+ format = &shared.OutputFormatWildcard{
+ JSONLinesNewlineDelimitedJSON: jsonLinesNewlineDelimitedJSON,
}
}
- var destinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage *shared.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage
- if r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage != nil {
- compressionCodec1 := new(shared.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional)
- if !r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage.CompressionCodec.IsNull() {
- *compressionCodec1 = shared.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional(r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage.CompressionCodec.ValueString())
+ var parquetColumnarStorage *shared.ParquetColumnarStorage
+ if r.Configuration.Format.ParquetColumnarStorage != nil {
+ compressionCodec1 := new(shared.DestinationAwsDatalakeUpdateCompressionCodecOptional)
+ if !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsNull() {
+ *compressionCodec1 = shared.DestinationAwsDatalakeUpdateCompressionCodecOptional(r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.ValueString())
} else {
compressionCodec1 = nil
}
- formatType1 := shared.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard(r.Configuration.Format.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage.FormatType.ValueString())
- destinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage = &shared.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage{
+ formatType1 := new(shared.DestinationAwsDatalakeUpdateFormatTypeWildcard)
+ if !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsNull() {
+ *formatType1 = shared.DestinationAwsDatalakeUpdateFormatTypeWildcard(r.Configuration.Format.ParquetColumnarStorage.FormatType.ValueString())
+ } else {
+ formatType1 = nil
+ }
+ parquetColumnarStorage = &shared.ParquetColumnarStorage{
CompressionCodec: compressionCodec1,
FormatType: formatType1,
}
}
- if destinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage != nil {
- format = &shared.DestinationAwsDatalakeUpdateOutputFormatWildcard{
- DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage: destinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage,
+ if parquetColumnarStorage != nil {
+ format = &shared.OutputFormatWildcard{
+ ParquetColumnarStorage: parquetColumnarStorage,
}
}
}
@@ -267,13 +289,18 @@ func (r *DestinationAwsDatalakeResourceModel) ToUpdateSDKType() *shared.Destinat
} else {
lakeformationGovernedTables = nil
}
- partitioning := new(shared.DestinationAwsDatalakeUpdateChooseHowToPartitionData)
+ partitioning := new(shared.ChooseHowToPartitionData)
if !r.Configuration.Partitioning.IsUnknown() && !r.Configuration.Partitioning.IsNull() {
- *partitioning = shared.DestinationAwsDatalakeUpdateChooseHowToPartitionData(r.Configuration.Partitioning.ValueString())
+ *partitioning = shared.ChooseHowToPartitionData(r.Configuration.Partitioning.ValueString())
} else {
partitioning = nil
}
- region := shared.DestinationAwsDatalakeUpdateS3BucketRegion(r.Configuration.Region.ValueString())
+ region := new(shared.S3BucketRegion)
+ if !r.Configuration.Region.IsUnknown() && !r.Configuration.Region.IsNull() {
+ *region = shared.S3BucketRegion(r.Configuration.Region.ValueString())
+ } else {
+ region = nil
+ }
configuration := shared.DestinationAwsDatalakeUpdate{
AwsAccountID: awsAccountID,
BucketName: bucketName,
diff --git a/internal/provider/destination_azureblobstorage_data_source.go b/internal/provider/destination_azureblobstorage_data_source.go
old mode 100755
new mode 100644
index 29f43bcf3..766807d23
--- a/internal/provider/destination_azureblobstorage_data_source.go
+++ b/internal/provider/destination_azureblobstorage_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationAzureBlobStorageDataSource struct {
// DestinationAzureBlobStorageDataSourceModel describes the data model.
type DestinationAzureBlobStorageDataSourceModel struct {
- Configuration DestinationAzureBlobStorage `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,138 +47,17 @@ func (r *DestinationAzureBlobStorageDataSource) Schema(ctx context.Context, req
MarkdownDescription: "DestinationAzureBlobStorage DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "azure_blob_storage_account_key": schema.StringAttribute{
- Computed: true,
- Description: `The Azure blob storage account key.`,
- },
- "azure_blob_storage_account_name": schema.StringAttribute{
- Computed: true,
- Description: `The account's name of the Azure Blob Storage.`,
- },
- "azure_blob_storage_container_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the Azure blob storage container. If not exists - will be created automatically. May be empty, then will be created automatically airbytecontainer+timestamp`,
- },
- "azure_blob_storage_endpoint_domain_name": schema.StringAttribute{
- Computed: true,
- Description: `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
- },
- "azure_blob_storage_output_buffer_size": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure.`,
- },
- "azure_blob_storage_spill_size": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "azure-blob-storage",
- ),
- },
- Description: `must be one of ["azure-blob-storage"]`,
- },
- "format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_azure_blob_storage_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
- },
- },
- Description: `Output data format`,
- },
- "destination_azure_blob_storage_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Output data format`,
- },
- "destination_azure_blob_storage_update_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
- },
- },
- Description: `Output data format`,
- },
- "destination_azure_blob_storage_update_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Output data format`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Output data format`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_azureblobstorage_data_source_sdk.go b/internal/provider/destination_azureblobstorage_data_source_sdk.go
old mode 100755
new mode 100644
index 0fb873d0f..96acfa087
--- a/internal/provider/destination_azureblobstorage_data_source_sdk.go
+++ b/internal/provider/destination_azureblobstorage_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationAzureBlobStorageDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_azureblobstorage_resource.go b/internal/provider/destination_azureblobstorage_resource.go
old mode 100755
new mode 100644
index b231ba289..591a09469
--- a/internal/provider/destination_azureblobstorage_resource.go
+++ b/internal/provider/destination_azureblobstorage_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationAzureBlobStorageResource struct {
// DestinationAzureBlobStorageResourceModel describes the resource data model.
type DestinationAzureBlobStorageResourceModel struct {
Configuration DestinationAzureBlobStorage `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -56,6 +58,7 @@ func (r *DestinationAzureBlobStorageResource) Schema(ctx context.Context, req re
Attributes: map[string]schema.Attribute{
"azure_blob_storage_account_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Azure blob storage account key.`,
},
"azure_blob_storage_account_name": schema.StringAttribute{
@@ -67,119 +70,60 @@ func (r *DestinationAzureBlobStorageResource) Schema(ctx context.Context, req re
Description: `The name of the Azure blob storage container. If not exists - will be created automatically. May be empty, then will be created automatically airbytecontainer+timestamp`,
},
"azure_blob_storage_endpoint_domain_name": schema.StringAttribute{
- Optional: true,
- Description: `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
+ Optional: true,
+ MarkdownDescription: `Default: "blob.core.windows.net"` + "\n" +
+ `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
},
"azure_blob_storage_output_buffer_size": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure.`,
+ Optional: true,
+ MarkdownDescription: `Default: 5` + "\n" +
+ `The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure.`,
},
"azure_blob_storage_spill_size": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable`,
- },
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "azure-blob-storage",
- ),
- },
- Description: `must be one of ["azure-blob-storage"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 500` + "\n" +
+ `The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable`,
},
"format": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_azure_blob_storage_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
+ "csv_comma_separated_values": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"flattening": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
+ Optional: true,
+ MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]; Default: "No flattening"` + "\n" +
`Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
- },
- },
- Description: `Output data format`,
- },
- "destination_azure_blob_storage_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Output data format`,
- },
- "destination_azure_blob_storage_update_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "flattening": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
stringvalidator.OneOf(
"No flattening",
"Root level flattening",
),
},
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
},
},
Description: `Output data format`,
},
- "destination_azure_blob_storage_update_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
+ "json_lines_newline_delimited_json": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Output data format`,
},
},
+ Description: `Output data format`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Output data format`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -196,7 +140,8 @@ func (r *DestinationAzureBlobStorageResource) Schema(ctx context.Context, req re
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -246,7 +191,7 @@ func (r *DestinationAzureBlobStorageResource) Create(ctx context.Context, req re
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationAzureBlobStorage(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -422,5 +367,5 @@ func (r *DestinationAzureBlobStorageResource) Delete(ctx context.Context, req re
}
func (r *DestinationAzureBlobStorageResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_azureblobstorage_resource_sdk.go b/internal/provider/destination_azureblobstorage_resource_sdk.go
old mode 100755
new mode 100644
index eca6bb3a4..7d8b40364
--- a/internal/provider/destination_azureblobstorage_resource_sdk.go
+++ b/internal/provider/destination_azureblobstorage_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -34,32 +34,31 @@ func (r *DestinationAzureBlobStorageResourceModel) ToCreateSDKType() *shared.Des
} else {
azureBlobStorageSpillSize = nil
}
- destinationType := shared.DestinationAzureBlobStorageAzureBlobStorage(r.Configuration.DestinationType.ValueString())
var format shared.DestinationAzureBlobStorageOutputFormat
- var destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues *shared.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues
- if r.Configuration.Format.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues != nil {
- flattening := shared.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening(r.Configuration.Format.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues.Flattening.ValueString())
- formatType := shared.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatType(r.Configuration.Format.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues.FormatType.ValueString())
- destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues = &shared.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues{
+ var destinationAzureBlobStorageCSVCommaSeparatedValues *shared.DestinationAzureBlobStorageCSVCommaSeparatedValues
+ if r.Configuration.Format.CSVCommaSeparatedValues != nil {
+ flattening := new(shared.DestinationAzureBlobStorageNormalizationFlattening)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsNull() {
+ *flattening = shared.DestinationAzureBlobStorageNormalizationFlattening(r.Configuration.Format.CSVCommaSeparatedValues.Flattening.ValueString())
+ } else {
+ flattening = nil
+ }
+ destinationAzureBlobStorageCSVCommaSeparatedValues = &shared.DestinationAzureBlobStorageCSVCommaSeparatedValues{
Flattening: flattening,
- FormatType: formatType,
}
}
- if destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues != nil {
+ if destinationAzureBlobStorageCSVCommaSeparatedValues != nil {
format = shared.DestinationAzureBlobStorageOutputFormat{
- DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues: destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues,
+ DestinationAzureBlobStorageCSVCommaSeparatedValues: destinationAzureBlobStorageCSVCommaSeparatedValues,
}
}
- var destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON *shared.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- formatType1 := shared.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON = &shared.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON{
- FormatType: formatType1,
- }
+ var destinationAzureBlobStorageJSONLinesNewlineDelimitedJSON *shared.DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ destinationAzureBlobStorageJSONLinesNewlineDelimitedJSON = &shared.DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON{}
}
- if destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON != nil {
+ if destinationAzureBlobStorageJSONLinesNewlineDelimitedJSON != nil {
format = shared.DestinationAzureBlobStorageOutputFormat{
- DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON: destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON: destinationAzureBlobStorageJSONLinesNewlineDelimitedJSON,
}
}
configuration := shared.DestinationAzureBlobStorage{
@@ -69,13 +68,19 @@ func (r *DestinationAzureBlobStorageResourceModel) ToCreateSDKType() *shared.Des
AzureBlobStorageEndpointDomainName: azureBlobStorageEndpointDomainName,
AzureBlobStorageOutputBufferSize: azureBlobStorageOutputBufferSize,
AzureBlobStorageSpillSize: azureBlobStorageSpillSize,
- DestinationType: destinationType,
Format: format,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationAzureBlobStorageCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -114,31 +119,31 @@ func (r *DestinationAzureBlobStorageResourceModel) ToUpdateSDKType() *shared.Des
} else {
azureBlobStorageSpillSize = nil
}
- var format shared.DestinationAzureBlobStorageUpdateOutputFormat
- var destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues *shared.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues
- if r.Configuration.Format.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues != nil {
- flattening := shared.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening(r.Configuration.Format.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues.Flattening.ValueString())
- formatType := shared.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatType(r.Configuration.Format.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues.FormatType.ValueString())
- destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues = &shared.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues{
+ var format shared.OutputFormat
+ var csvCommaSeparatedValues *shared.CSVCommaSeparatedValues
+ if r.Configuration.Format.CSVCommaSeparatedValues != nil {
+ flattening := new(shared.NormalizationFlattening)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsNull() {
+ *flattening = shared.NormalizationFlattening(r.Configuration.Format.CSVCommaSeparatedValues.Flattening.ValueString())
+ } else {
+ flattening = nil
+ }
+ csvCommaSeparatedValues = &shared.CSVCommaSeparatedValues{
Flattening: flattening,
- FormatType: formatType,
}
}
- if destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues != nil {
- format = shared.DestinationAzureBlobStorageUpdateOutputFormat{
- DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues: destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues,
+ if csvCommaSeparatedValues != nil {
+ format = shared.OutputFormat{
+ CSVCommaSeparatedValues: csvCommaSeparatedValues,
}
}
- var destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON *shared.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- formatType1 := shared.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON = &shared.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON{
- FormatType: formatType1,
- }
+ var destinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON *shared.DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ destinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON = &shared.DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON{}
}
- if destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- format = shared.DestinationAzureBlobStorageUpdateOutputFormat{
- DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON: destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON,
+ if destinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON != nil {
+ format = shared.OutputFormat{
+ DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON: destinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON,
}
}
configuration := shared.DestinationAzureBlobStorageUpdate{
diff --git a/internal/provider/destination_bigquery_data_source.go b/internal/provider/destination_bigquery_data_source.go
old mode 100755
new mode 100644
index 3a009203a..2d66e59cc
--- a/internal/provider/destination_bigquery_data_source.go
+++ b/internal/provider/destination_bigquery_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationBigqueryDataSource struct {
// DestinationBigqueryDataSourceModel describes the data model.
type DestinationBigqueryDataSourceModel struct {
- Configuration DestinationBigquery `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,282 +47,17 @@ func (r *DestinationBigqueryDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "DestinationBigquery DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "big_query_client_buffer_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.`,
- },
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.`,
- },
- "dataset_id": schema.StringAttribute{
- Computed: true,
- Description: `The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.`,
- },
- "dataset_location": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "US",
- "EU",
- "asia-east1",
- "asia-east2",
- "asia-northeast1",
- "asia-northeast2",
- "asia-northeast3",
- "asia-south1",
- "asia-south2",
- "asia-southeast1",
- "asia-southeast2",
- "australia-southeast1",
- "australia-southeast2",
- "europe-central1",
- "europe-central2",
- "europe-north1",
- "europe-southwest1",
- "europe-west1",
- "europe-west2",
- "europe-west3",
- "europe-west4",
- "europe-west6",
- "europe-west7",
- "europe-west8",
- "europe-west9",
- "me-west1",
- "northamerica-northeast1",
- "northamerica-northeast2",
- "southamerica-east1",
- "southamerica-west1",
- "us-central1",
- "us-east1",
- "us-east2",
- "us-east3",
- "us-east4",
- "us-east5",
- "us-west1",
- "us-west2",
- "us-west3",
- "us-west4",
- ),
- },
- MarkdownDescription: `must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-west1", "us-west2", "us-west3", "us-west4"]` + "\n" +
- `The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bigquery",
- ),
- },
- Description: `must be one of ["bigquery"]`,
- },
- "loading_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_loading_method_gcs_staging": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_loading_method_gcs_staging_credential_hmac_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Computed: true,
- Description: `HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Computed: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "gcs_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the GCS bucket. Read more here.`,
- },
- "gcs_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `Directory under the GCS bucket where data will be written.`,
- },
- "keep_files_in_gcs_bucket": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Delete all tmp files from GCS",
- "Keep all tmp files in GCS",
- ),
- },
- MarkdownDescription: `must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]` + "\n" +
- `This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS Staging",
- ),
- },
- Description: `must be one of ["GCS Staging"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_loading_method_standard_inserts": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_update_loading_method_gcs_staging": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_update_loading_method_gcs_staging_credential_hmac_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Computed: true,
- Description: `HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Computed: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "gcs_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the GCS bucket. Read more here.`,
- },
- "gcs_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `Directory under the GCS bucket where data will be written.`,
- },
- "keep_files_in_gcs_bucket": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Delete all tmp files from GCS",
- "Keep all tmp files in GCS",
- ),
- },
- MarkdownDescription: `must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]` + "\n" +
- `This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS Staging",
- ),
- },
- Description: `must be one of ["GCS Staging"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_update_loading_method_standard_inserts": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "project_id": schema.StringAttribute{
- Computed: true,
- Description: `The GCP project ID for the project containing the target BigQuery dataset. Read more here.`,
- },
- "raw_data_dataset": schema.StringAttribute{
- Computed: true,
- Description: `The dataset to write raw tables into`,
- },
- "transformation_priority": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "interactive",
- "batch",
- ),
- },
- MarkdownDescription: `must be one of ["interactive", "batch"]` + "\n" +
- `Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_bigquery_data_source_sdk.go b/internal/provider/destination_bigquery_data_source_sdk.go
old mode 100755
new mode 100644
index 3d04ccb72..534a9e712
--- a/internal/provider/destination_bigquery_data_source_sdk.go
+++ b/internal/provider/destination_bigquery_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationBigqueryDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_bigquery_resource.go b/internal/provider/destination_bigquery_resource.go
old mode 100755
new mode 100644
index dac4e5752..7d8fd3a9a
--- a/internal/provider/destination_bigquery_resource.go
+++ b/internal/provider/destination_bigquery_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationBigqueryResource struct {
// DestinationBigqueryResourceModel describes the resource data model.
type DestinationBigqueryResourceModel struct {
Configuration DestinationBigquery `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -55,8 +57,9 @@ func (r *DestinationBigqueryResource) Schema(ctx context.Context, req resource.S
Required: true,
Attributes: map[string]schema.Attribute{
"big_query_client_buffer_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.`,
+ Optional: true,
+ MarkdownDescription: `Default: 15` + "\n" +
+ `Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.`,
},
"credentials_json": schema.StringAttribute{
Optional: true,
@@ -68,6 +71,8 @@ func (r *DestinationBigqueryResource) Schema(ctx context.Context, req resource.S
},
"dataset_location": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]` + "\n" +
+ `The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.`,
Validators: []validator.String{
stringvalidator.OneOf(
"US",
@@ -95,6 +100,9 @@ func (r *DestinationBigqueryResource) Schema(ctx context.Context, req resource.S
"europe-west7",
"europe-west8",
"europe-west9",
+ "europe-west12",
+ "me-central1",
+ "me-central2",
"me-west1",
"northamerica-northeast1",
"northamerica-northeast2",
@@ -106,150 +114,49 @@ func (r *DestinationBigqueryResource) Schema(ctx context.Context, req resource.S
"us-east3",
"us-east4",
"us-east5",
+ "us-south1",
"us-west1",
"us-west2",
"us-west3",
"us-west4",
),
},
- MarkdownDescription: `must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-west1", "us-west2", "us-west3", "us-west4"]` + "\n" +
- `The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bigquery",
- ),
- },
- Description: `must be one of ["bigquery"]`,
+ "disable_type_dedupe": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions`,
},
"loading_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_bigquery_loading_method_gcs_staging": schema.SingleNestedAttribute{
+ "gcs_staging": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"credential": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_bigquery_loading_method_gcs_staging_credential_hmac_key": schema.SingleNestedAttribute{
+ "hmac_key": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
"hmac_key_access_id": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.`,
},
"hmac_key_secret": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string.`,
},
},
Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
},
},
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Optional: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "gcs_bucket_name": schema.StringAttribute{
- Required: true,
- Description: `The name of the GCS bucket. Read more here.`,
- },
- "gcs_bucket_path": schema.StringAttribute{
- Required: true,
- Description: `Directory under the GCS bucket where data will be written.`,
- },
- "keep_files_in_gcs_bucket": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Delete all tmp files from GCS",
- "Keep all tmp files in GCS",
- ),
- },
- MarkdownDescription: `must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]` + "\n" +
- `This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS Staging",
- ),
- },
- Description: `must be one of ["GCS Staging"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_loading_method_standard_inserts": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_update_loading_method_gcs_staging": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "credential": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_update_loading_method_gcs_staging_credential_hmac_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Required: true,
- Description: `HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Required: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- },
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Optional: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
},
"gcs_bucket_name": schema.StringAttribute{
Required: true,
@@ -261,47 +168,28 @@ func (r *DestinationBigqueryResource) Schema(ctx context.Context, req resource.S
},
"keep_files_in_gcs_bucket": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]; Default: "Delete all tmp files from GCS"` + "\n" +
+ `This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.`,
Validators: []validator.String{
stringvalidator.OneOf(
"Delete all tmp files from GCS",
"Keep all tmp files in GCS",
),
},
- MarkdownDescription: `must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]` + "\n" +
- `This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS Staging",
- ),
- },
- Description: `must be one of ["GCS Staging"]`,
},
},
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
+ Description: `(recommended) Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery. Provides best-in-class speed, reliability and scalability. Read more about GCS Staging here.`,
},
- "destination_bigquery_update_loading_method_standard_inserts": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
+ "standard_inserts": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `(not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use GCS staging.`,
},
},
+ Description: `The way data will be uploaded to BigQuery.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
},
"project_id": schema.StringAttribute{
Required: true,
@@ -309,21 +197,28 @@ func (r *DestinationBigqueryResource) Schema(ctx context.Context, req resource.S
},
"raw_data_dataset": schema.StringAttribute{
Optional: true,
- Description: `The dataset to write raw tables into`,
+ Description: `The dataset to write raw tables into (default: airbyte_internal)`,
},
"transformation_priority": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["interactive", "batch"]; Default: "interactive"` + "\n" +
+ `Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.`,
Validators: []validator.String{
stringvalidator.OneOf(
"interactive",
"batch",
),
},
- MarkdownDescription: `must be one of ["interactive", "batch"]` + "\n" +
- `Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -340,7 +235,8 @@ func (r *DestinationBigqueryResource) Schema(ctx context.Context, req resource.S
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -390,7 +286,7 @@ func (r *DestinationBigqueryResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationBigquery(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -566,5 +462,5 @@ func (r *DestinationBigqueryResource) Delete(ctx context.Context, req resource.D
}
func (r *DestinationBigqueryResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_bigquery_resource_sdk.go b/internal/provider/destination_bigquery_resource_sdk.go
old mode 100755
new mode 100644
index 94e19ecd3..c311ba4f5
--- a/internal/provider/destination_bigquery_resource_sdk.go
+++ b/internal/provider/destination_bigquery_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -22,67 +22,58 @@ func (r *DestinationBigqueryResourceModel) ToCreateSDKType() *shared.Destination
}
datasetID := r.Configuration.DatasetID.ValueString()
datasetLocation := shared.DestinationBigqueryDatasetLocation(r.Configuration.DatasetLocation.ValueString())
- destinationType := shared.DestinationBigqueryBigquery(r.Configuration.DestinationType.ValueString())
+ disableTypeDedupe := new(bool)
+ if !r.Configuration.DisableTypeDedupe.IsUnknown() && !r.Configuration.DisableTypeDedupe.IsNull() {
+ *disableTypeDedupe = r.Configuration.DisableTypeDedupe.ValueBool()
+ } else {
+ disableTypeDedupe = nil
+ }
var loadingMethod *shared.DestinationBigqueryLoadingMethod
if r.Configuration.LoadingMethod != nil {
- var destinationBigqueryLoadingMethodStandardInserts *shared.DestinationBigqueryLoadingMethodStandardInserts
- if r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodStandardInserts != nil {
- method := shared.DestinationBigqueryLoadingMethodStandardInsertsMethod(r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodStandardInserts.Method.ValueString())
- destinationBigqueryLoadingMethodStandardInserts = &shared.DestinationBigqueryLoadingMethodStandardInserts{
- Method: method,
- }
- }
- if destinationBigqueryLoadingMethodStandardInserts != nil {
- loadingMethod = &shared.DestinationBigqueryLoadingMethod{
- DestinationBigqueryLoadingMethodStandardInserts: destinationBigqueryLoadingMethodStandardInserts,
- }
- }
- var destinationBigqueryLoadingMethodGCSStaging *shared.DestinationBigqueryLoadingMethodGCSStaging
- if r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging != nil {
- var credential shared.DestinationBigqueryLoadingMethodGCSStagingCredential
- var destinationBigqueryLoadingMethodGCSStagingCredentialHMACKey *shared.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey
- if r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.Credential.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey != nil {
- credentialType := shared.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType(r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.Credential.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey.CredentialType.ValueString())
- hmacKeyAccessID := r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.Credential.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey.HmacKeyAccessID.ValueString()
- hmacKeySecret := r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.Credential.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey.HmacKeySecret.ValueString()
- destinationBigqueryLoadingMethodGCSStagingCredentialHMACKey = &shared.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey{
- CredentialType: credentialType,
+ var destinationBigqueryGCSStaging *shared.DestinationBigqueryGCSStaging
+ if r.Configuration.LoadingMethod.GCSStaging != nil {
+ var credential shared.DestinationBigqueryCredential
+ var destinationBigqueryHMACKey *shared.DestinationBigqueryHMACKey
+ if r.Configuration.LoadingMethod.GCSStaging.Credential.HMACKey != nil {
+ hmacKeyAccessID := r.Configuration.LoadingMethod.GCSStaging.Credential.HMACKey.HmacKeyAccessID.ValueString()
+ hmacKeySecret := r.Configuration.LoadingMethod.GCSStaging.Credential.HMACKey.HmacKeySecret.ValueString()
+ destinationBigqueryHMACKey = &shared.DestinationBigqueryHMACKey{
HmacKeyAccessID: hmacKeyAccessID,
HmacKeySecret: hmacKeySecret,
}
}
- if destinationBigqueryLoadingMethodGCSStagingCredentialHMACKey != nil {
- credential = shared.DestinationBigqueryLoadingMethodGCSStagingCredential{
- DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey: destinationBigqueryLoadingMethodGCSStagingCredentialHMACKey,
+ if destinationBigqueryHMACKey != nil {
+ credential = shared.DestinationBigqueryCredential{
+ DestinationBigqueryHMACKey: destinationBigqueryHMACKey,
}
}
- fileBufferCount := new(int64)
- if !r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.FileBufferCount.IsUnknown() && !r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.FileBufferCount.IsNull() {
- *fileBufferCount = r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.FileBufferCount.ValueInt64()
- } else {
- fileBufferCount = nil
- }
- gcsBucketName := r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.GcsBucketName.ValueString()
- gcsBucketPath := r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.GcsBucketPath.ValueString()
- keepFilesInGcsBucket := new(shared.DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing)
- if !r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.KeepFilesInGcsBucket.IsUnknown() && !r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.KeepFilesInGcsBucket.IsNull() {
- *keepFilesInGcsBucket = shared.DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.KeepFilesInGcsBucket.ValueString())
+ gcsBucketName := r.Configuration.LoadingMethod.GCSStaging.GcsBucketName.ValueString()
+ gcsBucketPath := r.Configuration.LoadingMethod.GCSStaging.GcsBucketPath.ValueString()
+ keepFilesInGcsBucket := new(shared.DestinationBigqueryGCSTmpFilesAfterwardProcessing)
+ if !r.Configuration.LoadingMethod.GCSStaging.KeepFilesInGcsBucket.IsUnknown() && !r.Configuration.LoadingMethod.GCSStaging.KeepFilesInGcsBucket.IsNull() {
+ *keepFilesInGcsBucket = shared.DestinationBigqueryGCSTmpFilesAfterwardProcessing(r.Configuration.LoadingMethod.GCSStaging.KeepFilesInGcsBucket.ValueString())
} else {
keepFilesInGcsBucket = nil
}
- method1 := shared.DestinationBigqueryLoadingMethodGCSStagingMethod(r.Configuration.LoadingMethod.DestinationBigqueryLoadingMethodGCSStaging.Method.ValueString())
- destinationBigqueryLoadingMethodGCSStaging = &shared.DestinationBigqueryLoadingMethodGCSStaging{
+ destinationBigqueryGCSStaging = &shared.DestinationBigqueryGCSStaging{
Credential: credential,
- FileBufferCount: fileBufferCount,
GcsBucketName: gcsBucketName,
GcsBucketPath: gcsBucketPath,
KeepFilesInGcsBucket: keepFilesInGcsBucket,
- Method: method1,
}
}
- if destinationBigqueryLoadingMethodGCSStaging != nil {
+ if destinationBigqueryGCSStaging != nil {
+ loadingMethod = &shared.DestinationBigqueryLoadingMethod{
+ DestinationBigqueryGCSStaging: destinationBigqueryGCSStaging,
+ }
+ }
+ var destinationBigqueryStandardInserts *shared.DestinationBigqueryStandardInserts
+ if r.Configuration.LoadingMethod.StandardInserts != nil {
+ destinationBigqueryStandardInserts = &shared.DestinationBigqueryStandardInserts{}
+ }
+ if destinationBigqueryStandardInserts != nil {
loadingMethod = &shared.DestinationBigqueryLoadingMethod{
- DestinationBigqueryLoadingMethodGCSStaging: destinationBigqueryLoadingMethodGCSStaging,
+ DestinationBigqueryStandardInserts: destinationBigqueryStandardInserts,
}
}
}
@@ -104,16 +95,23 @@ func (r *DestinationBigqueryResourceModel) ToCreateSDKType() *shared.Destination
CredentialsJSON: credentialsJSON,
DatasetID: datasetID,
DatasetLocation: datasetLocation,
- DestinationType: destinationType,
+ DisableTypeDedupe: disableTypeDedupe,
LoadingMethod: loadingMethod,
ProjectID: projectID,
RawDataDataset: rawDataDataset,
TransformationPriority: transformationPriority,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationBigqueryCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -139,67 +137,59 @@ func (r *DestinationBigqueryResourceModel) ToUpdateSDKType() *shared.Destination
credentialsJSON = nil
}
datasetID := r.Configuration.DatasetID.ValueString()
- datasetLocation := shared.DestinationBigqueryUpdateDatasetLocation(r.Configuration.DatasetLocation.ValueString())
- var loadingMethod *shared.DestinationBigqueryUpdateLoadingMethod
+ datasetLocation := shared.DatasetLocation(r.Configuration.DatasetLocation.ValueString())
+ disableTypeDedupe := new(bool)
+ if !r.Configuration.DisableTypeDedupe.IsUnknown() && !r.Configuration.DisableTypeDedupe.IsNull() {
+ *disableTypeDedupe = r.Configuration.DisableTypeDedupe.ValueBool()
+ } else {
+ disableTypeDedupe = nil
+ }
+ var loadingMethod *shared.LoadingMethod
if r.Configuration.LoadingMethod != nil {
- var destinationBigqueryUpdateLoadingMethodStandardInserts *shared.DestinationBigqueryUpdateLoadingMethodStandardInserts
- if r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodStandardInserts != nil {
- method := shared.DestinationBigqueryUpdateLoadingMethodStandardInsertsMethod(r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodStandardInserts.Method.ValueString())
- destinationBigqueryUpdateLoadingMethodStandardInserts = &shared.DestinationBigqueryUpdateLoadingMethodStandardInserts{
- Method: method,
- }
- }
- if destinationBigqueryUpdateLoadingMethodStandardInserts != nil {
- loadingMethod = &shared.DestinationBigqueryUpdateLoadingMethod{
- DestinationBigqueryUpdateLoadingMethodStandardInserts: destinationBigqueryUpdateLoadingMethodStandardInserts,
- }
- }
- var destinationBigqueryUpdateLoadingMethodGCSStaging *shared.DestinationBigqueryUpdateLoadingMethodGCSStaging
- if r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging != nil {
- var credential shared.DestinationBigqueryUpdateLoadingMethodGCSStagingCredential
- var destinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey *shared.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey
- if r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.Credential.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey != nil {
- credentialType := shared.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType(r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.Credential.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey.CredentialType.ValueString())
- hmacKeyAccessID := r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.Credential.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey.HmacKeyAccessID.ValueString()
- hmacKeySecret := r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.Credential.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey.HmacKeySecret.ValueString()
- destinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey = &shared.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey{
- CredentialType: credentialType,
+ var gcsStaging *shared.GCSStaging
+ if r.Configuration.LoadingMethod.GCSStaging != nil {
+ var credential shared.Credential
+ var destinationBigqueryUpdateHMACKey *shared.DestinationBigqueryUpdateHMACKey
+ if r.Configuration.LoadingMethod.GCSStaging.Credential.HMACKey != nil {
+ hmacKeyAccessID := r.Configuration.LoadingMethod.GCSStaging.Credential.HMACKey.HmacKeyAccessID.ValueString()
+ hmacKeySecret := r.Configuration.LoadingMethod.GCSStaging.Credential.HMACKey.HmacKeySecret.ValueString()
+ destinationBigqueryUpdateHMACKey = &shared.DestinationBigqueryUpdateHMACKey{
HmacKeyAccessID: hmacKeyAccessID,
HmacKeySecret: hmacKeySecret,
}
}
- if destinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey != nil {
- credential = shared.DestinationBigqueryUpdateLoadingMethodGCSStagingCredential{
- DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey: destinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey,
+ if destinationBigqueryUpdateHMACKey != nil {
+ credential = shared.Credential{
+ DestinationBigqueryUpdateHMACKey: destinationBigqueryUpdateHMACKey,
}
}
- fileBufferCount := new(int64)
- if !r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.FileBufferCount.IsUnknown() && !r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.FileBufferCount.IsNull() {
- *fileBufferCount = r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.FileBufferCount.ValueInt64()
- } else {
- fileBufferCount = nil
- }
- gcsBucketName := r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.GcsBucketName.ValueString()
- gcsBucketPath := r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.GcsBucketPath.ValueString()
- keepFilesInGcsBucket := new(shared.DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing)
- if !r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.KeepFilesInGcsBucket.IsUnknown() && !r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.KeepFilesInGcsBucket.IsNull() {
- *keepFilesInGcsBucket = shared.DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.KeepFilesInGcsBucket.ValueString())
+ gcsBucketName := r.Configuration.LoadingMethod.GCSStaging.GcsBucketName.ValueString()
+ gcsBucketPath := r.Configuration.LoadingMethod.GCSStaging.GcsBucketPath.ValueString()
+ keepFilesInGcsBucket := new(shared.GCSTmpFilesAfterwardProcessing)
+ if !r.Configuration.LoadingMethod.GCSStaging.KeepFilesInGcsBucket.IsUnknown() && !r.Configuration.LoadingMethod.GCSStaging.KeepFilesInGcsBucket.IsNull() {
+ *keepFilesInGcsBucket = shared.GCSTmpFilesAfterwardProcessing(r.Configuration.LoadingMethod.GCSStaging.KeepFilesInGcsBucket.ValueString())
} else {
keepFilesInGcsBucket = nil
}
- method1 := shared.DestinationBigqueryUpdateLoadingMethodGCSStagingMethod(r.Configuration.LoadingMethod.DestinationBigqueryUpdateLoadingMethodGCSStaging.Method.ValueString())
- destinationBigqueryUpdateLoadingMethodGCSStaging = &shared.DestinationBigqueryUpdateLoadingMethodGCSStaging{
+ gcsStaging = &shared.GCSStaging{
Credential: credential,
- FileBufferCount: fileBufferCount,
GcsBucketName: gcsBucketName,
GcsBucketPath: gcsBucketPath,
KeepFilesInGcsBucket: keepFilesInGcsBucket,
- Method: method1,
}
}
- if destinationBigqueryUpdateLoadingMethodGCSStaging != nil {
- loadingMethod = &shared.DestinationBigqueryUpdateLoadingMethod{
- DestinationBigqueryUpdateLoadingMethodGCSStaging: destinationBigqueryUpdateLoadingMethodGCSStaging,
+ if gcsStaging != nil {
+ loadingMethod = &shared.LoadingMethod{
+ GCSStaging: gcsStaging,
+ }
+ }
+ var standardInserts *shared.StandardInserts
+ if r.Configuration.LoadingMethod.StandardInserts != nil {
+ standardInserts = &shared.StandardInserts{}
+ }
+ if standardInserts != nil {
+ loadingMethod = &shared.LoadingMethod{
+ StandardInserts: standardInserts,
}
}
}
@@ -210,9 +200,9 @@ func (r *DestinationBigqueryResourceModel) ToUpdateSDKType() *shared.Destination
} else {
rawDataDataset = nil
}
- transformationPriority := new(shared.DestinationBigqueryUpdateTransformationQueryRunType)
+ transformationPriority := new(shared.TransformationQueryRunType)
if !r.Configuration.TransformationPriority.IsUnknown() && !r.Configuration.TransformationPriority.IsNull() {
- *transformationPriority = shared.DestinationBigqueryUpdateTransformationQueryRunType(r.Configuration.TransformationPriority.ValueString())
+ *transformationPriority = shared.TransformationQueryRunType(r.Configuration.TransformationPriority.ValueString())
} else {
transformationPriority = nil
}
@@ -221,6 +211,7 @@ func (r *DestinationBigqueryResourceModel) ToUpdateSDKType() *shared.Destination
CredentialsJSON: credentialsJSON,
DatasetID: datasetID,
DatasetLocation: datasetLocation,
+ DisableTypeDedupe: disableTypeDedupe,
LoadingMethod: loadingMethod,
ProjectID: projectID,
RawDataDataset: rawDataDataset,
diff --git a/internal/provider/destination_bigquerydenormalized_data_source.go b/internal/provider/destination_bigquerydenormalized_data_source.go
deleted file mode 100755
index bb544308b..000000000
--- a/internal/provider/destination_bigquerydenormalized_data_source.go
+++ /dev/null
@@ -1,389 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
- "context"
- "fmt"
-
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ datasource.DataSource = &DestinationBigqueryDenormalizedDataSource{}
-var _ datasource.DataSourceWithConfigure = &DestinationBigqueryDenormalizedDataSource{}
-
-func NewDestinationBigqueryDenormalizedDataSource() datasource.DataSource {
- return &DestinationBigqueryDenormalizedDataSource{}
-}
-
-// DestinationBigqueryDenormalizedDataSource is the data source implementation.
-type DestinationBigqueryDenormalizedDataSource struct {
- client *sdk.SDK
-}
-
-// DestinationBigqueryDenormalizedDataSourceModel describes the data model.
-type DestinationBigqueryDenormalizedDataSourceModel struct {
- Configuration DestinationBigqueryDenormalized `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-// Metadata returns the data source type name.
-func (r *DestinationBigqueryDenormalizedDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_destination_bigquery_denormalized"
-}
-
-// Schema defines the schema for the data source.
-func (r *DestinationBigqueryDenormalizedDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "DestinationBigqueryDenormalized DataSource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "big_query_client_buffer_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.`,
- },
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.`,
- },
- "dataset_id": schema.StringAttribute{
- Computed: true,
- Description: `The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.`,
- },
- "dataset_location": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "US",
- "EU",
- "asia-east1",
- "asia-east2",
- "asia-northeast1",
- "asia-northeast2",
- "asia-northeast3",
- "asia-south1",
- "asia-south2",
- "asia-southeast1",
- "asia-southeast2",
- "australia-southeast1",
- "australia-southeast2",
- "europe-central1",
- "europe-central2",
- "europe-north1",
- "europe-southwest1",
- "europe-west1",
- "europe-west2",
- "europe-west3",
- "europe-west4",
- "europe-west6",
- "europe-west7",
- "europe-west8",
- "europe-west9",
- "me-west1",
- "northamerica-northeast1",
- "northamerica-northeast2",
- "southamerica-east1",
- "southamerica-west1",
- "us-central1",
- "us-east1",
- "us-east2",
- "us-east3",
- "us-east4",
- "us-east5",
- "us-west1",
- "us-west2",
- "us-west3",
- "us-west4",
- ),
- },
- MarkdownDescription: `must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-west1", "us-west2", "us-west3", "us-west4"]` + "\n" +
- `The location of the dataset. Warning: Changes made after creation will not be applied. The default "US" value is used if not set explicitly. Read more here.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bigquery-denormalized",
- ),
- },
- Description: `must be one of ["bigquery-denormalized"]`,
- },
- "loading_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_denormalized_loading_method_gcs_staging": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Computed: true,
- Description: `HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Computed: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "gcs_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the GCS bucket. Read more here.`,
- },
- "gcs_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `Directory under the GCS bucket where data will be written. Read more here.`,
- },
- "keep_files_in_gcs_bucket": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Delete all tmp files from GCS",
- "Keep all tmp files in GCS",
- ),
- },
- MarkdownDescription: `must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]` + "\n" +
- `This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS Staging",
- ),
- },
- Description: `must be one of ["GCS Staging"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_denormalized_loading_method_standard_inserts": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_denormalized_update_loading_method_gcs_staging": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_denormalized_update_loading_method_gcs_staging_credential_hmac_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Computed: true,
- Description: `HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Computed: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "gcs_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the GCS bucket. Read more here.`,
- },
- "gcs_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `Directory under the GCS bucket where data will be written. Read more here.`,
- },
- "keep_files_in_gcs_bucket": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Delete all tmp files from GCS",
- "Keep all tmp files in GCS",
- ),
- },
- MarkdownDescription: `must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]` + "\n" +
- `This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS Staging",
- ),
- },
- Description: `must be one of ["GCS Staging"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_denormalized_update_loading_method_standard_inserts": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "project_id": schema.StringAttribute{
- Computed: true,
- Description: `The GCP project ID for the project containing the target BigQuery dataset. Read more here.`,
- },
- },
- },
- "destination_id": schema.StringAttribute{
- Required: true,
- },
- "name": schema.StringAttribute{
- Computed: true,
- },
- "workspace_id": schema.StringAttribute{
- Computed: true,
- },
- },
- }
-}
-
-func (r *DestinationBigqueryDenormalizedDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected DataSource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *DestinationBigqueryDenormalizedDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data *DestinationBigqueryDenormalizedDataSourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- destinationID := data.DestinationID.ValueString()
- request := operations.GetDestinationBigqueryDenormalizedRequest{
- DestinationID: destinationID,
- }
- res, err := r.client.Destinations.GetDestinationBigqueryDenormalized(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.DestinationResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.DestinationResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/internal/provider/destination_bigquerydenormalized_data_source_sdk.go b/internal/provider/destination_bigquerydenormalized_data_source_sdk.go
deleted file mode 100755
index ff30c3a66..000000000
--- a/internal/provider/destination_bigquerydenormalized_data_source_sdk.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *DestinationBigqueryDenormalizedDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
- r.DestinationID = types.StringValue(resp.DestinationID)
- r.Name = types.StringValue(resp.Name)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
diff --git a/internal/provider/destination_bigquerydenormalized_resource.go b/internal/provider/destination_bigquerydenormalized_resource.go
deleted file mode 100755
index 728b107e2..000000000
--- a/internal/provider/destination_bigquerydenormalized_resource.go
+++ /dev/null
@@ -1,555 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "context"
- "fmt"
-
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ resource.Resource = &DestinationBigqueryDenormalizedResource{}
-var _ resource.ResourceWithImportState = &DestinationBigqueryDenormalizedResource{}
-
-func NewDestinationBigqueryDenormalizedResource() resource.Resource {
- return &DestinationBigqueryDenormalizedResource{}
-}
-
-// DestinationBigqueryDenormalizedResource defines the resource implementation.
-type DestinationBigqueryDenormalizedResource struct {
- client *sdk.SDK
-}
-
-// DestinationBigqueryDenormalizedResourceModel describes the resource data model.
-type DestinationBigqueryDenormalizedResourceModel struct {
- Configuration DestinationBigqueryDenormalized `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- DestinationType types.String `tfsdk:"destination_type"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-func (r *DestinationBigqueryDenormalizedResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_destination_bigquery_denormalized"
-}
-
-func (r *DestinationBigqueryDenormalizedResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "DestinationBigqueryDenormalized Resource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "big_query_client_buffer_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.`,
- },
- "credentials_json": schema.StringAttribute{
- Optional: true,
- Description: `The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.`,
- },
- "dataset_id": schema.StringAttribute{
- Required: true,
- Description: `The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.`,
- },
- "dataset_location": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "US",
- "EU",
- "asia-east1",
- "asia-east2",
- "asia-northeast1",
- "asia-northeast2",
- "asia-northeast3",
- "asia-south1",
- "asia-south2",
- "asia-southeast1",
- "asia-southeast2",
- "australia-southeast1",
- "australia-southeast2",
- "europe-central1",
- "europe-central2",
- "europe-north1",
- "europe-southwest1",
- "europe-west1",
- "europe-west2",
- "europe-west3",
- "europe-west4",
- "europe-west6",
- "europe-west7",
- "europe-west8",
- "europe-west9",
- "me-west1",
- "northamerica-northeast1",
- "northamerica-northeast2",
- "southamerica-east1",
- "southamerica-west1",
- "us-central1",
- "us-east1",
- "us-east2",
- "us-east3",
- "us-east4",
- "us-east5",
- "us-west1",
- "us-west2",
- "us-west3",
- "us-west4",
- ),
- },
- MarkdownDescription: `must be one of ["US", "EU", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central1", "europe-central2", "europe-north1", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west7", "europe-west8", "europe-west9", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east2", "us-east3", "us-east4", "us-east5", "us-west1", "us-west2", "us-west3", "us-west4"]` + "\n" +
- `The location of the dataset. Warning: Changes made after creation will not be applied. The default "US" value is used if not set explicitly. Read more here.`,
- },
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bigquery-denormalized",
- ),
- },
- Description: `must be one of ["bigquery-denormalized"]`,
- },
- "loading_method": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_denormalized_loading_method_gcs_staging": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "credential": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Required: true,
- Description: `HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Required: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Optional: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "gcs_bucket_name": schema.StringAttribute{
- Required: true,
- Description: `The name of the GCS bucket. Read more here.`,
- },
- "gcs_bucket_path": schema.StringAttribute{
- Required: true,
- Description: `Directory under the GCS bucket where data will be written. Read more here.`,
- },
- "keep_files_in_gcs_bucket": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Delete all tmp files from GCS",
- "Keep all tmp files in GCS",
- ),
- },
- MarkdownDescription: `must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]` + "\n" +
- `This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS Staging",
- ),
- },
- Description: `must be one of ["GCS Staging"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_denormalized_loading_method_standard_inserts": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_denormalized_update_loading_method_gcs_staging": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "credential": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "destination_bigquery_denormalized_update_loading_method_gcs_staging_credential_hmac_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Required: true,
- Description: `HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Required: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Optional: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "gcs_bucket_name": schema.StringAttribute{
- Required: true,
- Description: `The name of the GCS bucket. Read more here.`,
- },
- "gcs_bucket_path": schema.StringAttribute{
- Required: true,
- Description: `Directory under the GCS bucket where data will be written. Read more here.`,
- },
- "keep_files_in_gcs_bucket": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Delete all tmp files from GCS",
- "Keep all tmp files in GCS",
- ),
- },
- MarkdownDescription: `must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]` + "\n" +
- `This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS Staging",
- ),
- },
- Description: `must be one of ["GCS Staging"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "destination_bigquery_denormalized_update_loading_method_standard_inserts": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.`,
- },
- "project_id": schema.StringAttribute{
- Required: true,
- Description: `The GCP project ID for the project containing the target BigQuery dataset. Read more here.`,
- },
- },
- },
- "destination_id": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "name": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- "workspace_id": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- },
- }
-}
-
-func (r *DestinationBigqueryDenormalizedResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected Resource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *DestinationBigqueryDenormalizedResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data *DestinationBigqueryDenormalizedResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- request := *data.ToCreateSDKType()
- res, err := r.client.Destinations.CreateDestinationBigqueryDenormalized(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.DestinationResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromCreateResponse(res.DestinationResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *DestinationBigqueryDenormalizedResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data *DestinationBigqueryDenormalizedResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- destinationID := data.DestinationID.ValueString()
- request := operations.GetDestinationBigqueryDenormalizedRequest{
- DestinationID: destinationID,
- }
- res, err := r.client.Destinations.GetDestinationBigqueryDenormalized(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.DestinationResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.DestinationResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *DestinationBigqueryDenormalizedResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data *DestinationBigqueryDenormalizedResourceModel
- merge(ctx, req, resp, &data)
- if resp.Diagnostics.HasError() {
- return
- }
-
- destinationBigqueryDenormalizedPutRequest := data.ToUpdateSDKType()
- destinationID := data.DestinationID.ValueString()
- request := operations.PutDestinationBigqueryDenormalizedRequest{
- DestinationBigqueryDenormalizedPutRequest: destinationBigqueryDenormalizedPutRequest,
- DestinationID: destinationID,
- }
- res, err := r.client.Destinations.PutDestinationBigqueryDenormalized(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- destinationId1 := data.DestinationID.ValueString()
- getRequest := operations.GetDestinationBigqueryDenormalizedRequest{
- DestinationID: destinationId1,
- }
- getResponse, err := r.client.Destinations.GetDestinationBigqueryDenormalized(ctx, getRequest)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if getResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
- return
- }
- if getResponse.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
- return
- }
- if getResponse.DestinationResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
- return
- }
- data.RefreshFromGetResponse(getResponse.DestinationResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *DestinationBigqueryDenormalizedResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data *DestinationBigqueryDenormalizedResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- destinationID := data.DestinationID.ValueString()
- request := operations.DeleteDestinationBigqueryDenormalizedRequest{
- DestinationID: destinationID,
- }
- res, err := r.client.Destinations.DeleteDestinationBigqueryDenormalized(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
-
-}
-
-func (r *DestinationBigqueryDenormalizedResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
-}
diff --git a/internal/provider/destination_bigquerydenormalized_resource_sdk.go b/internal/provider/destination_bigquerydenormalized_resource_sdk.go
deleted file mode 100755
index 2c6a2b9b6..000000000
--- a/internal/provider/destination_bigquerydenormalized_resource_sdk.go
+++ /dev/null
@@ -1,235 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *DestinationBigqueryDenormalizedResourceModel) ToCreateSDKType() *shared.DestinationBigqueryDenormalizedCreateRequest {
- bigQueryClientBufferSizeMb := new(int64)
- if !r.Configuration.BigQueryClientBufferSizeMb.IsUnknown() && !r.Configuration.BigQueryClientBufferSizeMb.IsNull() {
- *bigQueryClientBufferSizeMb = r.Configuration.BigQueryClientBufferSizeMb.ValueInt64()
- } else {
- bigQueryClientBufferSizeMb = nil
- }
- credentialsJSON := new(string)
- if !r.Configuration.CredentialsJSON.IsUnknown() && !r.Configuration.CredentialsJSON.IsNull() {
- *credentialsJSON = r.Configuration.CredentialsJSON.ValueString()
- } else {
- credentialsJSON = nil
- }
- datasetID := r.Configuration.DatasetID.ValueString()
- datasetLocation := new(shared.DestinationBigqueryDenormalizedDatasetLocation)
- if !r.Configuration.DatasetLocation.IsUnknown() && !r.Configuration.DatasetLocation.IsNull() {
- *datasetLocation = shared.DestinationBigqueryDenormalizedDatasetLocation(r.Configuration.DatasetLocation.ValueString())
- } else {
- datasetLocation = nil
- }
- destinationType := shared.DestinationBigqueryDenormalizedBigqueryDenormalized(r.Configuration.DestinationType.ValueString())
- var loadingMethod *shared.DestinationBigqueryDenormalizedLoadingMethod
- if r.Configuration.LoadingMethod != nil {
- var destinationBigqueryDenormalizedLoadingMethodStandardInserts *shared.DestinationBigqueryDenormalizedLoadingMethodStandardInserts
- if r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodStandardInserts != nil {
- method := shared.DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod(r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodStandardInserts.Method.ValueString())
- destinationBigqueryDenormalizedLoadingMethodStandardInserts = &shared.DestinationBigqueryDenormalizedLoadingMethodStandardInserts{
- Method: method,
- }
- }
- if destinationBigqueryDenormalizedLoadingMethodStandardInserts != nil {
- loadingMethod = &shared.DestinationBigqueryDenormalizedLoadingMethod{
- DestinationBigqueryDenormalizedLoadingMethodStandardInserts: destinationBigqueryDenormalizedLoadingMethodStandardInserts,
- }
- }
- var destinationBigqueryDenormalizedLoadingMethodGCSStaging *shared.DestinationBigqueryDenormalizedLoadingMethodGCSStaging
- if r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging != nil {
- var credential shared.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential
- var destinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey *shared.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey
- if r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.Credential.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey != nil {
- credentialType := shared.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType(r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.Credential.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey.CredentialType.ValueString())
- hmacKeyAccessID := r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.Credential.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey.HmacKeyAccessID.ValueString()
- hmacKeySecret := r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.Credential.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey.HmacKeySecret.ValueString()
- destinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey = &shared.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey{
- CredentialType: credentialType,
- HmacKeyAccessID: hmacKeyAccessID,
- HmacKeySecret: hmacKeySecret,
- }
- }
- if destinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey != nil {
- credential = shared.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential{
- DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey: destinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey,
- }
- }
- fileBufferCount := new(int64)
- if !r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.FileBufferCount.IsUnknown() && !r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.FileBufferCount.IsNull() {
- *fileBufferCount = r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.FileBufferCount.ValueInt64()
- } else {
- fileBufferCount = nil
- }
- gcsBucketName := r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.GcsBucketName.ValueString()
- gcsBucketPath := r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.GcsBucketPath.ValueString()
- keepFilesInGcsBucket := new(shared.DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing)
- if !r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.KeepFilesInGcsBucket.IsUnknown() && !r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.KeepFilesInGcsBucket.IsNull() {
- *keepFilesInGcsBucket = shared.DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.KeepFilesInGcsBucket.ValueString())
- } else {
- keepFilesInGcsBucket = nil
- }
- method1 := shared.DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod(r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedLoadingMethodGCSStaging.Method.ValueString())
- destinationBigqueryDenormalizedLoadingMethodGCSStaging = &shared.DestinationBigqueryDenormalizedLoadingMethodGCSStaging{
- Credential: credential,
- FileBufferCount: fileBufferCount,
- GcsBucketName: gcsBucketName,
- GcsBucketPath: gcsBucketPath,
- KeepFilesInGcsBucket: keepFilesInGcsBucket,
- Method: method1,
- }
- }
- if destinationBigqueryDenormalizedLoadingMethodGCSStaging != nil {
- loadingMethod = &shared.DestinationBigqueryDenormalizedLoadingMethod{
- DestinationBigqueryDenormalizedLoadingMethodGCSStaging: destinationBigqueryDenormalizedLoadingMethodGCSStaging,
- }
- }
- }
- projectID := r.Configuration.ProjectID.ValueString()
- configuration := shared.DestinationBigqueryDenormalized{
- BigQueryClientBufferSizeMb: bigQueryClientBufferSizeMb,
- CredentialsJSON: credentialsJSON,
- DatasetID: datasetID,
- DatasetLocation: datasetLocation,
- DestinationType: destinationType,
- LoadingMethod: loadingMethod,
- ProjectID: projectID,
- }
- name := r.Name.ValueString()
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.DestinationBigqueryDenormalizedCreateRequest{
- Configuration: configuration,
- Name: name,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *DestinationBigqueryDenormalizedResourceModel) ToGetSDKType() *shared.DestinationBigqueryDenormalizedCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *DestinationBigqueryDenormalizedResourceModel) ToUpdateSDKType() *shared.DestinationBigqueryDenormalizedPutRequest {
- bigQueryClientBufferSizeMb := new(int64)
- if !r.Configuration.BigQueryClientBufferSizeMb.IsUnknown() && !r.Configuration.BigQueryClientBufferSizeMb.IsNull() {
- *bigQueryClientBufferSizeMb = r.Configuration.BigQueryClientBufferSizeMb.ValueInt64()
- } else {
- bigQueryClientBufferSizeMb = nil
- }
- credentialsJSON := new(string)
- if !r.Configuration.CredentialsJSON.IsUnknown() && !r.Configuration.CredentialsJSON.IsNull() {
- *credentialsJSON = r.Configuration.CredentialsJSON.ValueString()
- } else {
- credentialsJSON = nil
- }
- datasetID := r.Configuration.DatasetID.ValueString()
- datasetLocation := new(shared.DestinationBigqueryDenormalizedUpdateDatasetLocation)
- if !r.Configuration.DatasetLocation.IsUnknown() && !r.Configuration.DatasetLocation.IsNull() {
- *datasetLocation = shared.DestinationBigqueryDenormalizedUpdateDatasetLocation(r.Configuration.DatasetLocation.ValueString())
- } else {
- datasetLocation = nil
- }
- var loadingMethod *shared.DestinationBigqueryDenormalizedUpdateLoadingMethod
- if r.Configuration.LoadingMethod != nil {
- var destinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts *shared.DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts
- if r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts != nil {
- method := shared.DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethod(r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts.Method.ValueString())
- destinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts = &shared.DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts{
- Method: method,
- }
- }
- if destinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts != nil {
- loadingMethod = &shared.DestinationBigqueryDenormalizedUpdateLoadingMethod{
- DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts: destinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts,
- }
- }
- var destinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging *shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging
- if r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging != nil {
- var credential shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential
- var destinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey *shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey
- if r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.Credential.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey != nil {
- credentialType := shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType(r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.Credential.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey.CredentialType.ValueString())
- hmacKeyAccessID := r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.Credential.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey.HmacKeyAccessID.ValueString()
- hmacKeySecret := r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.Credential.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey.HmacKeySecret.ValueString()
- destinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey = &shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey{
- CredentialType: credentialType,
- HmacKeyAccessID: hmacKeyAccessID,
- HmacKeySecret: hmacKeySecret,
- }
- }
- if destinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey != nil {
- credential = shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential{
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey: destinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey,
- }
- }
- fileBufferCount := new(int64)
- if !r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.FileBufferCount.IsUnknown() && !r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.FileBufferCount.IsNull() {
- *fileBufferCount = r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.FileBufferCount.ValueInt64()
- } else {
- fileBufferCount = nil
- }
- gcsBucketName := r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.GcsBucketName.ValueString()
- gcsBucketPath := r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.GcsBucketPath.ValueString()
- keepFilesInGcsBucket := new(shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing)
- if !r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.KeepFilesInGcsBucket.IsUnknown() && !r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.KeepFilesInGcsBucket.IsNull() {
- *keepFilesInGcsBucket = shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.KeepFilesInGcsBucket.ValueString())
- } else {
- keepFilesInGcsBucket = nil
- }
- method1 := shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethod(r.Configuration.LoadingMethod.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging.Method.ValueString())
- destinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging = &shared.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging{
- Credential: credential,
- FileBufferCount: fileBufferCount,
- GcsBucketName: gcsBucketName,
- GcsBucketPath: gcsBucketPath,
- KeepFilesInGcsBucket: keepFilesInGcsBucket,
- Method: method1,
- }
- }
- if destinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging != nil {
- loadingMethod = &shared.DestinationBigqueryDenormalizedUpdateLoadingMethod{
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging: destinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging,
- }
- }
- }
- projectID := r.Configuration.ProjectID.ValueString()
- configuration := shared.DestinationBigqueryDenormalizedUpdate{
- BigQueryClientBufferSizeMb: bigQueryClientBufferSizeMb,
- CredentialsJSON: credentialsJSON,
- DatasetID: datasetID,
- DatasetLocation: datasetLocation,
- LoadingMethod: loadingMethod,
- ProjectID: projectID,
- }
- name := r.Name.ValueString()
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.DestinationBigqueryDenormalizedPutRequest{
- Configuration: configuration,
- Name: name,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *DestinationBigqueryDenormalizedResourceModel) ToDeleteSDKType() *shared.DestinationBigqueryDenormalizedCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *DestinationBigqueryDenormalizedResourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
- r.DestinationID = types.StringValue(resp.DestinationID)
- r.DestinationType = types.StringValue(resp.DestinationType)
- r.Name = types.StringValue(resp.Name)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
-
-func (r *DestinationBigqueryDenormalizedResourceModel) RefreshFromCreateResponse(resp *shared.DestinationResponse) {
- r.RefreshFromGetResponse(resp)
-}
diff --git a/internal/provider/destination_clickhouse_data_source.go b/internal/provider/destination_clickhouse_data_source.go
old mode 100755
new mode 100644
index 721a72aef..4364c5973
--- a/internal/provider/destination_clickhouse_data_source.go
+++ b/internal/provider/destination_clickhouse_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationClickhouseDataSource struct {
// DestinationClickhouseDataSourceModel describes the data model.
type DestinationClickhouseDataSourceModel struct {
- Configuration DestinationClickhouse `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,216 +47,17 @@ func (r *DestinationClickhouseDataSource) Schema(ctx context.Context, req dataso
MarkdownDescription: "DestinationClickhouse DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "clickhouse",
- ),
- },
- Description: `must be one of ["clickhouse"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `HTTP port of the database.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_clickhouse_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_clickhouse_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_clickhouse_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_clickhouse_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_clickhouse_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_clickhouse_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_clickhouse_data_source_sdk.go b/internal/provider/destination_clickhouse_data_source_sdk.go
old mode 100755
new mode 100644
index 22165849b..d3849bfd9
--- a/internal/provider/destination_clickhouse_data_source_sdk.go
+++ b/internal/provider/destination_clickhouse_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationClickhouseDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_clickhouse_resource.go b/internal/provider/destination_clickhouse_resource.go
old mode 100755
new mode 100644
index 1a1ac8681..b58b96840
--- a/internal/provider/destination_clickhouse_resource.go
+++ b/internal/provider/destination_clickhouse_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationClickhouseResource struct {
// DestinationClickhouseResourceModel describes the resource data model.
type DestinationClickhouseResourceModel struct {
Configuration DestinationClickhouse `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -58,15 +59,6 @@ func (r *DestinationClickhouseResource) Schema(ctx context.Context, req resource
Required: true,
Description: `Name of the database.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "clickhouse",
- ),
- },
- Description: `must be one of ["clickhouse"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `Hostname of the database.`,
@@ -77,131 +69,33 @@ func (r *DestinationClickhouseResource) Schema(ctx context.Context, req resource
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `HTTP port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 8123` + "\n" +
+ `HTTP port of the database.`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_clickhouse_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_clickhouse_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_clickhouse_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_clickhouse_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_clickhouse_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -209,35 +103,28 @@ func (r *DestinationClickhouseResource) Schema(ctx context.Context, req resource
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_clickhouse_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -247,10 +134,10 @@ func (r *DestinationClickhouseResource) Schema(ctx context.Context, req resource
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -258,6 +145,13 @@ func (r *DestinationClickhouseResource) Schema(ctx context.Context, req resource
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -274,7 +168,8 @@ func (r *DestinationClickhouseResource) Schema(ctx context.Context, req resource
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -324,7 +219,7 @@ func (r *DestinationClickhouseResource) Create(ctx context.Context, req resource
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationClickhouse(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -500,5 +395,5 @@ func (r *DestinationClickhouseResource) Delete(ctx context.Context, req resource
}
func (r *DestinationClickhouseResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_clickhouse_resource_sdk.go b/internal/provider/destination_clickhouse_resource_sdk.go
old mode 100755
new mode 100644
index 2a45212af..46cd882fd
--- a/internal/provider/destination_clickhouse_resource_sdk.go
+++ b/internal/provider/destination_clickhouse_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationClickhouseResourceModel) ToCreateSDKType() *shared.DestinationClickhouseCreateRequest {
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationClickhouseClickhouse(r.Configuration.DestinationType.ValueString())
host := r.Configuration.Host.ValueString()
jdbcURLParams := new(string)
if !r.Configuration.JdbcURLParams.IsUnknown() && !r.Configuration.JdbcURLParams.IsNull() {
@@ -23,77 +22,91 @@ func (r *DestinationClickhouseResourceModel) ToCreateSDKType() *shared.Destinati
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var tunnelMethod *shared.DestinationClickhouseSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationClickhouseSSHTunnelMethodNoTunnel *shared.DestinationClickhouseSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationClickhouseSSHTunnelMethodNoTunnel = &shared.DestinationClickhouseSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationClickhouseNoTunnel *shared.DestinationClickhouseNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationClickhouseNoTunnel = &shared.DestinationClickhouseNoTunnel{}
}
- if destinationClickhouseSSHTunnelMethodNoTunnel != nil {
+ if destinationClickhouseNoTunnel != nil {
tunnelMethod = &shared.DestinationClickhouseSSHTunnelMethod{
- DestinationClickhouseSSHTunnelMethodNoTunnel: destinationClickhouseSSHTunnelMethodNoTunnel,
+ DestinationClickhouseNoTunnel: destinationClickhouseNoTunnel,
}
}
- var destinationClickhouseSSHTunnelMethodSSHKeyAuthentication *shared.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationClickhouseSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationClickhouseSSHKeyAuthentication *shared.DestinationClickhouseSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationClickhouseSSHKeyAuthentication = &shared.DestinationClickhouseSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationClickhouseSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationClickhouseSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationClickhouseSSHTunnelMethod{
- DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication: destinationClickhouseSSHTunnelMethodSSHKeyAuthentication,
+ DestinationClickhouseSSHKeyAuthentication: destinationClickhouseSSHKeyAuthentication,
}
}
- var destinationClickhouseSSHTunnelMethodPasswordAuthentication *shared.DestinationClickhouseSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationClickhouseSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationClickhouseSSHTunnelMethodPasswordAuthentication = &shared.DestinationClickhouseSSHTunnelMethodPasswordAuthentication{
+ var destinationClickhousePasswordAuthentication *shared.DestinationClickhousePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationClickhousePasswordAuthentication = &shared.DestinationClickhousePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationClickhouseSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationClickhousePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationClickhouseSSHTunnelMethod{
- DestinationClickhouseSSHTunnelMethodPasswordAuthentication: destinationClickhouseSSHTunnelMethodPasswordAuthentication,
+ DestinationClickhousePasswordAuthentication: destinationClickhousePasswordAuthentication,
}
}
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationClickhouse{
- Database: database,
- DestinationType: destinationType,
- Host: host,
- JdbcURLParams: jdbcURLParams,
- Password: password,
- Port: port,
- TunnelMethod: tunnelMethod,
- Username: username,
+ Database: database,
+ Host: host,
+ JdbcURLParams: jdbcURLParams,
+ Password: password,
+ Port: port,
+ TunnelMethod: tunnelMethod,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationClickhouseCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -120,59 +133,67 @@ func (r *DestinationClickhouseResourceModel) ToUpdateSDKType() *shared.Destinati
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
- var tunnelMethod *shared.DestinationClickhouseUpdateSSHTunnelMethod
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ var tunnelMethod *shared.SSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationClickhouseUpdateSSHTunnelMethodNoTunnel *shared.DestinationClickhouseUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationClickhouseUpdateSSHTunnelMethodNoTunnel = &shared.DestinationClickhouseUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var noTunnel *shared.NoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ noTunnel = &shared.NoTunnel{}
}
- if destinationClickhouseUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod = &shared.DestinationClickhouseUpdateSSHTunnelMethod{
- DestinationClickhouseUpdateSSHTunnelMethodNoTunnel: destinationClickhouseUpdateSSHTunnelMethodNoTunnel,
+ if noTunnel != nil {
+ tunnelMethod = &shared.SSHTunnelMethod{
+ NoTunnel: noTunnel,
}
}
- var destinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication *shared.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sshKeyAuthentication *shared.SSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sshKeyAuthentication = &shared.SSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- tunnelMethod = &shared.DestinationClickhouseUpdateSSHTunnelMethod{
- DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication: destinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication,
+ if sshKeyAuthentication != nil {
+ tunnelMethod = &shared.SSHTunnelMethod{
+ SSHKeyAuthentication: sshKeyAuthentication,
}
}
- var destinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication *shared.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication = &shared.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication{
+ var passwordAuthentication *shared.PasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ passwordAuthentication = &shared.PasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelMethod = &shared.DestinationClickhouseUpdateSSHTunnelMethod{
- DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication: destinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication,
+ if passwordAuthentication != nil {
+ tunnelMethod = &shared.SSHTunnelMethod{
+ PasswordAuthentication: passwordAuthentication,
}
}
}
diff --git a/internal/provider/destination_convex_data_source.go b/internal/provider/destination_convex_data_source.go
old mode 100755
new mode 100644
index 9f7ce6936..11b3c0642
--- a/internal/provider/destination_convex_data_source.go
+++ b/internal/provider/destination_convex_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationConvexDataSource struct {
// DestinationConvexDataSourceModel describes the data model.
type DestinationConvexDataSourceModel struct {
- Configuration DestinationConvex `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,31 +47,17 @@ func (r *DestinationConvexDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "DestinationConvex DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key": schema.StringAttribute{
- Computed: true,
- Description: `API access key used to send data to a Convex deployment.`,
- },
- "deployment_url": schema.StringAttribute{
- Computed: true,
- Description: `URL of the Convex deployment that is the destination`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "convex",
- ),
- },
- Description: `must be one of ["convex"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_convex_data_source_sdk.go b/internal/provider/destination_convex_data_source_sdk.go
old mode 100755
new mode 100644
index 4d7980c72..b530f10ba
--- a/internal/provider/destination_convex_data_source_sdk.go
+++ b/internal/provider/destination_convex_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationConvexDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_convex_resource.go b/internal/provider/destination_convex_resource.go
old mode 100755
new mode 100644
index 608007a8b..7ea58cd9d
--- a/internal/provider/destination_convex_resource.go
+++ b/internal/provider/destination_convex_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationConvexResource struct {
// DestinationConvexResourceModel describes the resource data model.
type DestinationConvexResourceModel struct {
Configuration DestinationConvex `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -55,23 +55,22 @@ func (r *DestinationConvexResource) Schema(ctx context.Context, req resource.Sch
Attributes: map[string]schema.Attribute{
"access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API access key used to send data to a Convex deployment.`,
},
"deployment_url": schema.StringAttribute{
Required: true,
Description: `URL of the Convex deployment that is the destination`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "convex",
- ),
- },
- Description: `must be one of ["convex"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -88,7 +87,8 @@ func (r *DestinationConvexResource) Schema(ctx context.Context, req resource.Sch
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -138,7 +138,7 @@ func (r *DestinationConvexResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationConvex(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -314,5 +314,5 @@ func (r *DestinationConvexResource) Delete(ctx context.Context, req resource.Del
}
func (r *DestinationConvexResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_convex_resource_sdk.go b/internal/provider/destination_convex_resource_sdk.go
old mode 100755
new mode 100644
index aa9848ce7..225aae0c3
--- a/internal/provider/destination_convex_resource_sdk.go
+++ b/internal/provider/destination_convex_resource_sdk.go
@@ -3,23 +3,28 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationConvexResourceModel) ToCreateSDKType() *shared.DestinationConvexCreateRequest {
accessKey := r.Configuration.AccessKey.ValueString()
deploymentURL := r.Configuration.DeploymentURL.ValueString()
- destinationType := shared.DestinationConvexConvex(r.Configuration.DestinationType.ValueString())
configuration := shared.DestinationConvex{
- AccessKey: accessKey,
- DeploymentURL: deploymentURL,
- DestinationType: destinationType,
+ AccessKey: accessKey,
+ DeploymentURL: deploymentURL,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationConvexCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
diff --git a/internal/provider/destination_cumulio_data_source.go b/internal/provider/destination_cumulio_data_source.go
old mode 100755
new mode 100644
index 7ac97a472..e003e66ed
--- a/internal/provider/destination_cumulio_data_source.go
+++ b/internal/provider/destination_cumulio_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationCumulioDataSource struct {
// DestinationCumulioDataSourceModel describes the data model.
type DestinationCumulioDataSourceModel struct {
- Configuration DestinationCumulio `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,35 +47,17 @@ func (r *DestinationCumulioDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "DestinationCumulio DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_host": schema.StringAttribute{
- Computed: true,
- Description: `URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.`,
- },
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).`,
- },
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cumulio",
- ),
- },
- Description: `must be one of ["cumulio"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_cumulio_data_source_sdk.go b/internal/provider/destination_cumulio_data_source_sdk.go
old mode 100755
new mode 100644
index 242f6f92f..0718929aa
--- a/internal/provider/destination_cumulio_data_source_sdk.go
+++ b/internal/provider/destination_cumulio_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationCumulioDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_cumulio_resource.go b/internal/provider/destination_cumulio_resource.go
old mode 100755
new mode 100644
index a805aa2ca..1af60ba47
--- a/internal/provider/destination_cumulio_resource.go
+++ b/internal/provider/destination_cumulio_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationCumulioResource struct {
// DestinationCumulioResourceModel describes the resource data model.
type DestinationCumulioResourceModel struct {
Configuration DestinationCumulio `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -54,28 +54,29 @@ func (r *DestinationCumulioResource) Schema(ctx context.Context, req resource.Sc
Required: true,
Attributes: map[string]schema.Attribute{
"api_host": schema.StringAttribute{
- Required: true,
- Description: `URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.`,
+ Optional: true,
+ MarkdownDescription: `Default: "https://api.cumul.io"` + "\n" +
+ `URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.`,
},
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).`,
},
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cumulio",
- ),
- },
- Description: `must be one of ["cumulio"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -92,7 +93,8 @@ func (r *DestinationCumulioResource) Schema(ctx context.Context, req resource.Sc
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -142,7 +144,7 @@ func (r *DestinationCumulioResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationCumulio(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -318,5 +320,5 @@ func (r *DestinationCumulioResource) Delete(ctx context.Context, req resource.De
}
func (r *DestinationCumulioResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_cumulio_resource_sdk.go b/internal/provider/destination_cumulio_resource_sdk.go
old mode 100755
new mode 100644
index 9edcbda1d..bf1bfed2a
--- a/internal/provider/destination_cumulio_resource_sdk.go
+++ b/internal/provider/destination_cumulio_resource_sdk.go
@@ -3,25 +3,35 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationCumulioResourceModel) ToCreateSDKType() *shared.DestinationCumulioCreateRequest {
- apiHost := r.Configuration.APIHost.ValueString()
+ apiHost := new(string)
+ if !r.Configuration.APIHost.IsUnknown() && !r.Configuration.APIHost.IsNull() {
+ *apiHost = r.Configuration.APIHost.ValueString()
+ } else {
+ apiHost = nil
+ }
apiKey := r.Configuration.APIKey.ValueString()
apiToken := r.Configuration.APIToken.ValueString()
- destinationType := shared.DestinationCumulioCumulio(r.Configuration.DestinationType.ValueString())
configuration := shared.DestinationCumulio{
- APIHost: apiHost,
- APIKey: apiKey,
- APIToken: apiToken,
- DestinationType: destinationType,
+ APIHost: apiHost,
+ APIKey: apiKey,
+ APIToken: apiToken,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationCumulioCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -34,7 +44,12 @@ func (r *DestinationCumulioResourceModel) ToGetSDKType() *shared.DestinationCumu
}
func (r *DestinationCumulioResourceModel) ToUpdateSDKType() *shared.DestinationCumulioPutRequest {
- apiHost := r.Configuration.APIHost.ValueString()
+ apiHost := new(string)
+ if !r.Configuration.APIHost.IsUnknown() && !r.Configuration.APIHost.IsNull() {
+ *apiHost = r.Configuration.APIHost.ValueString()
+ } else {
+ apiHost = nil
+ }
apiKey := r.Configuration.APIKey.ValueString()
apiToken := r.Configuration.APIToken.ValueString()
configuration := shared.DestinationCumulioUpdate{
diff --git a/internal/provider/destination_databend_data_source.go b/internal/provider/destination_databend_data_source.go
old mode 100755
new mode 100644
index f6a262ab2..de9f47b92
--- a/internal/provider/destination_databend_data_source.go
+++ b/internal/provider/destination_databend_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationDatabendDataSource struct {
// DestinationDatabendDataSourceModel describes the data model.
type DestinationDatabendDataSourceModel struct {
- Configuration DestinationDatabend `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,47 +47,17 @@ func (r *DestinationDatabendDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "DestinationDatabend DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "databend",
- ),
- },
- Description: `must be one of ["databend"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the database.`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of the database.`,
- },
- "table": schema.StringAttribute{
- Computed: true,
- Description: `The default table was written to.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_databend_data_source_sdk.go b/internal/provider/destination_databend_data_source_sdk.go
old mode 100755
new mode 100644
index 5bfdcef4c..6cc8102de
--- a/internal/provider/destination_databend_data_source_sdk.go
+++ b/internal/provider/destination_databend_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationDatabendDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_databend_resource.go b/internal/provider/destination_databend_resource.go
old mode 100755
new mode 100644
index a8d5a2243..46fdaf6cf
--- a/internal/provider/destination_databend_resource.go
+++ b/internal/provider/destination_databend_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationDatabendResource struct {
// DestinationDatabendResourceModel describes the resource data model.
type DestinationDatabendResourceModel struct {
Configuration DestinationDatabend `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -57,30 +57,24 @@ func (r *DestinationDatabendResource) Schema(ctx context.Context, req resource.S
Required: true,
Description: `Name of the database.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "databend",
- ),
- },
- Description: `must be one of ["databend"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `Hostname of the database.`,
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Optional: true,
- Description: `Port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 443` + "\n" +
+ `Port of the database.`,
},
"table": schema.StringAttribute{
- Optional: true,
- Description: `The default table was written to.`,
+ Optional: true,
+ MarkdownDescription: `Default: "default"` + "\n" +
+ `The default table was written to.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -88,6 +82,13 @@ func (r *DestinationDatabendResource) Schema(ctx context.Context, req resource.S
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -104,7 +105,8 @@ func (r *DestinationDatabendResource) Schema(ctx context.Context, req resource.S
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -154,7 +156,7 @@ func (r *DestinationDatabendResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationDatabend(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -330,5 +332,5 @@ func (r *DestinationDatabendResource) Delete(ctx context.Context, req resource.D
}
func (r *DestinationDatabendResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_databend_resource_sdk.go b/internal/provider/destination_databend_resource_sdk.go
old mode 100755
new mode 100644
index 35651e919..74b0e6ad8
--- a/internal/provider/destination_databend_resource_sdk.go
+++ b/internal/provider/destination_databend_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationDatabendResourceModel) ToCreateSDKType() *shared.DestinationDatabendCreateRequest {
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationDatabendDatabend(r.Configuration.DestinationType.ValueString())
host := r.Configuration.Host.ValueString()
password := new(string)
if !r.Configuration.Password.IsUnknown() && !r.Configuration.Password.IsNull() {
@@ -31,18 +30,24 @@ func (r *DestinationDatabendResourceModel) ToCreateSDKType() *shared.Destination
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationDatabend{
- Database: database,
- DestinationType: destinationType,
- Host: host,
- Password: password,
- Port: port,
- Table: table,
- Username: username,
+ Database: database,
+ Host: host,
+ Password: password,
+ Port: port,
+ Table: table,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationDatabendCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
diff --git a/internal/provider/destination_databricks_data_source.go b/internal/provider/destination_databricks_data_source.go
old mode 100755
new mode 100644
index ad37dc060..ed80085a1
--- a/internal/provider/destination_databricks_data_source.go
+++ b/internal/provider/destination_databricks_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationDatabricksDataSource struct {
// DestinationDatabricksDataSourceModel describes the data model.
type DestinationDatabricksDataSourceModel struct {
- Configuration DestinationDatabricks1 `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,300 +47,17 @@ func (r *DestinationDatabricksDataSource) Schema(ctx context.Context, req dataso
MarkdownDescription: "DestinationDatabricks DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "accept_terms": schema.BoolAttribute{
- Computed: true,
- Description: `You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector.`,
- },
- "data_source": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_databricks_data_source_recommended_managed_tables": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "data_source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "MANAGED_TABLES_STORAGE",
- ),
- },
- Description: `must be one of ["MANAGED_TABLES_STORAGE"]`,
- },
- },
- Description: `Storage on which the delta lake is built.`,
- },
- "destination_databricks_data_source_amazon_s3": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "data_source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3_STORAGE",
- ),
- },
- Description: `must be one of ["S3_STORAGE"]`,
- },
- "file_name_pattern": schema.StringAttribute{
- Computed: true,
- Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
- },
- "s3_access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.`,
- },
- "s3_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the S3 bucket to use for intermittent staging of the data.`,
- },
- "s3_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `The directory under the S3 bucket where data will be written.`,
- },
- "s3_bucket_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- "us-gov-east-1",
- "us-gov-west-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the S3 staging bucket to use if utilising a copy strategy.`,
- },
- "s3_secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret to the above access key id.`,
- },
- },
- Description: `Storage on which the delta lake is built.`,
- },
- "destination_databricks_data_source_azure_blob_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "azure_blob_storage_account_name": schema.StringAttribute{
- Computed: true,
- Description: `The account's name of the Azure Blob Storage.`,
- },
- "azure_blob_storage_container_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the Azure blob storage container.`,
- },
- "azure_blob_storage_endpoint_domain_name": schema.StringAttribute{
- Computed: true,
- Description: `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
- },
- "azure_blob_storage_sas_token": schema.StringAttribute{
- Computed: true,
- Description: `Shared access signature (SAS) token to grant limited access to objects in your storage account.`,
- },
- "data_source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AZURE_BLOB_STORAGE",
- ),
- },
- Description: `must be one of ["AZURE_BLOB_STORAGE"]`,
- },
- },
- Description: `Storage on which the delta lake is built.`,
- },
- "destination_databricks_update_data_source_recommended_managed_tables": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "data_source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "MANAGED_TABLES_STORAGE",
- ),
- },
- Description: `must be one of ["MANAGED_TABLES_STORAGE"]`,
- },
- },
- Description: `Storage on which the delta lake is built.`,
- },
- "destination_databricks_update_data_source_amazon_s3": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "data_source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3_STORAGE",
- ),
- },
- Description: `must be one of ["S3_STORAGE"]`,
- },
- "file_name_pattern": schema.StringAttribute{
- Computed: true,
- Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
- },
- "s3_access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.`,
- },
- "s3_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the S3 bucket to use for intermittent staging of the data.`,
- },
- "s3_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `The directory under the S3 bucket where data will be written.`,
- },
- "s3_bucket_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- "us-gov-east-1",
- "us-gov-west-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the S3 staging bucket to use if utilising a copy strategy.`,
- },
- "s3_secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret to the above access key id.`,
- },
- },
- Description: `Storage on which the delta lake is built.`,
- },
- "destination_databricks_update_data_source_azure_blob_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "azure_blob_storage_account_name": schema.StringAttribute{
- Computed: true,
- Description: `The account's name of the Azure Blob Storage.`,
- },
- "azure_blob_storage_container_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the Azure blob storage container.`,
- },
- "azure_blob_storage_endpoint_domain_name": schema.StringAttribute{
- Computed: true,
- Description: `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
- },
- "azure_blob_storage_sas_token": schema.StringAttribute{
- Computed: true,
- Description: `Shared access signature (SAS) token to grant limited access to objects in your storage account.`,
- },
- "data_source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AZURE_BLOB_STORAGE",
- ),
- },
- Description: `must be one of ["AZURE_BLOB_STORAGE"]`,
- },
- },
- Description: `Storage on which the delta lake is built.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Storage on which the delta lake is built.`,
- },
- "database": schema.StringAttribute{
- Computed: true,
- Description: `The name of the catalog. If not specified otherwise, the "hive_metastore" will be used.`,
- },
- "databricks_http_path": schema.StringAttribute{
- Computed: true,
- Description: `Databricks Cluster HTTP Path.`,
- },
- "databricks_personal_access_token": schema.StringAttribute{
- Computed: true,
- Description: `Databricks Personal Access Token for making authenticated requests.`,
- },
- "databricks_port": schema.StringAttribute{
- Computed: true,
- Description: `Databricks Cluster Port.`,
- },
- "databricks_server_hostname": schema.StringAttribute{
- Computed: true,
- Description: `Databricks Cluster Server Hostname.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "databricks",
- ),
- },
- Description: `must be one of ["databricks"]`,
- },
- "enable_schema_evolution": schema.BoolAttribute{
- Computed: true,
- Description: `Support schema evolution for all streams. If "false", the connector might fail when a stream's schema changes.`,
- },
- "purge_staging_data": schema.BoolAttribute{
- Computed: true,
- Description: `Default to 'true'. Switch it to 'false' for debugging purpose.`,
- },
- "schema": schema.StringAttribute{
- Computed: true,
- Description: `The default schema tables are written. If not specified otherwise, the "default" will be used.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_databricks_data_source_sdk.go b/internal/provider/destination_databricks_data_source_sdk.go
old mode 100755
new mode 100644
index ff22c27a1..8d4a8d923
--- a/internal/provider/destination_databricks_data_source_sdk.go
+++ b/internal/provider/destination_databricks_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationDatabricksDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_databricks_resource.go b/internal/provider/destination_databricks_resource.go
old mode 100755
new mode 100644
index 9a1085c42..d32582c07
--- a/internal/provider/destination_databricks_resource.go
+++ b/internal/provider/destination_databricks_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationDatabricksResource struct {
// DestinationDatabricksResourceModel describes the resource data model.
type DestinationDatabricksResourceModel struct {
Configuration DestinationDatabricks `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -55,45 +57,28 @@ func (r *DestinationDatabricksResource) Schema(ctx context.Context, req resource
Required: true,
Attributes: map[string]schema.Attribute{
"accept_terms": schema.BoolAttribute{
- Required: true,
- Description: `You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector.`,
},
"data_source": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_databricks_data_source_recommended_managed_tables": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "data_source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "MANAGED_TABLES_STORAGE",
- ),
- },
- Description: `must be one of ["MANAGED_TABLES_STORAGE"]`,
- },
- },
+ "recommended_managed_tables": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Storage on which the delta lake is built.`,
},
- "destination_databricks_data_source_amazon_s3": schema.SingleNestedAttribute{
+ "amazon_s3": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "data_source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3_STORAGE",
- ),
- },
- Description: `must be one of ["S3_STORAGE"]`,
- },
"file_name_pattern": schema.StringAttribute{
Optional: true,
Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
},
"s3_access_key_id": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.`,
},
"s3_bucket_name": schema.StringAttribute{
@@ -105,123 +90,9 @@ func (r *DestinationDatabricksResource) Schema(ctx context.Context, req resource
Description: `The directory under the S3 bucket where data will be written.`,
},
"s3_bucket_region": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- "us-gov-east-1",
- "us-gov-west-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
+ Optional: true,
+ MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""` + "\n" +
`The region of the S3 staging bucket to use if utilising a copy strategy.`,
- },
- "s3_secret_access_key": schema.StringAttribute{
- Required: true,
- Description: `The corresponding secret to the above access key id.`,
- },
- },
- Description: `Storage on which the delta lake is built.`,
- },
- "destination_databricks_data_source_azure_blob_storage": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "azure_blob_storage_account_name": schema.StringAttribute{
- Required: true,
- Description: `The account's name of the Azure Blob Storage.`,
- },
- "azure_blob_storage_container_name": schema.StringAttribute{
- Required: true,
- Description: `The name of the Azure blob storage container.`,
- },
- "azure_blob_storage_endpoint_domain_name": schema.StringAttribute{
- Optional: true,
- Description: `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
- },
- "azure_blob_storage_sas_token": schema.StringAttribute{
- Required: true,
- Description: `Shared access signature (SAS) token to grant limited access to objects in your storage account.`,
- },
- "data_source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AZURE_BLOB_STORAGE",
- ),
- },
- Description: `must be one of ["AZURE_BLOB_STORAGE"]`,
- },
- },
- Description: `Storage on which the delta lake is built.`,
- },
- "destination_databricks_update_data_source_recommended_managed_tables": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "data_source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "MANAGED_TABLES_STORAGE",
- ),
- },
- Description: `must be one of ["MANAGED_TABLES_STORAGE"]`,
- },
- },
- Description: `Storage on which the delta lake is built.`,
- },
- "destination_databricks_update_data_source_amazon_s3": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "data_source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3_STORAGE",
- ),
- },
- Description: `must be one of ["S3_STORAGE"]`,
- },
- "file_name_pattern": schema.StringAttribute{
- Optional: true,
- Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
- },
- "s3_access_key_id": schema.StringAttribute{
- Required: true,
- Description: `The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.`,
- },
- "s3_bucket_name": schema.StringAttribute{
- Required: true,
- Description: `The name of the S3 bucket to use for intermittent staging of the data.`,
- },
- "s3_bucket_path": schema.StringAttribute{
- Required: true,
- Description: `The directory under the S3 bucket where data will be written.`,
- },
- "s3_bucket_region": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
stringvalidator.OneOf(
"",
@@ -252,17 +123,16 @@ func (r *DestinationDatabricksResource) Schema(ctx context.Context, req resource
"us-gov-west-1",
),
},
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the S3 staging bucket to use if utilising a copy strategy.`,
},
"s3_secret_access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The corresponding secret to the above access key id.`,
},
},
Description: `Storage on which the delta lake is built.`,
},
- "destination_databricks_update_data_source_azure_blob_storage": schema.SingleNestedAttribute{
+ "azure_blob_storage": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"azure_blob_storage_account_name": schema.StringAttribute{
@@ -274,30 +144,23 @@ func (r *DestinationDatabricksResource) Schema(ctx context.Context, req resource
Description: `The name of the Azure blob storage container.`,
},
"azure_blob_storage_endpoint_domain_name": schema.StringAttribute{
- Optional: true,
- Description: `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
+ Optional: true,
+ MarkdownDescription: `Default: "blob.core.windows.net"` + "\n" +
+ `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
},
"azure_blob_storage_sas_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Shared access signature (SAS) token to grant limited access to objects in your storage account.`,
},
- "data_source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AZURE_BLOB_STORAGE",
- ),
- },
- Description: `must be one of ["AZURE_BLOB_STORAGE"]`,
- },
},
Description: `Storage on which the delta lake is built.`,
},
},
+ Description: `Storage on which the delta lake is built.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Storage on which the delta lake is built.`,
},
"database": schema.StringAttribute{
Optional: true,
@@ -309,39 +172,42 @@ func (r *DestinationDatabricksResource) Schema(ctx context.Context, req resource
},
"databricks_personal_access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Databricks Personal Access Token for making authenticated requests.`,
},
"databricks_port": schema.StringAttribute{
- Optional: true,
- Description: `Databricks Cluster Port.`,
+ Optional: true,
+ MarkdownDescription: `Default: "443"` + "\n" +
+ `Databricks Cluster Port.`,
},
"databricks_server_hostname": schema.StringAttribute{
Required: true,
Description: `Databricks Cluster Server Hostname.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "databricks",
- ),
- },
- Description: `must be one of ["databricks"]`,
- },
"enable_schema_evolution": schema.BoolAttribute{
- Optional: true,
- Description: `Support schema evolution for all streams. If "false", the connector might fail when a stream's schema changes.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Support schema evolution for all streams. If "false", the connector might fail when a stream's schema changes.`,
},
"purge_staging_data": schema.BoolAttribute{
- Optional: true,
- Description: `Default to 'true'. Switch it to 'false' for debugging purpose.`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Default to 'true'. Switch it to 'false' for debugging purpose.`,
},
"schema": schema.StringAttribute{
- Optional: true,
- Description: `The default schema tables are written. If not specified otherwise, the "default" will be used.`,
+ Optional: true,
+ MarkdownDescription: `Default: "default"` + "\n" +
+ `The default schema tables are written. If not specified otherwise, the "default" will be used.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -358,7 +224,8 @@ func (r *DestinationDatabricksResource) Schema(ctx context.Context, req resource
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -408,7 +275,7 @@ func (r *DestinationDatabricksResource) Create(ctx context.Context, req resource
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationDatabricks(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -584,5 +451,5 @@ func (r *DestinationDatabricksResource) Delete(ctx context.Context, req resource
}
func (r *DestinationDatabricksResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_databricks_resource_sdk.go b/internal/provider/destination_databricks_resource_sdk.go
old mode 100755
new mode 100644
index a444df320..c79c0723f
--- a/internal/provider/destination_databricks_resource_sdk.go
+++ b/internal/provider/destination_databricks_resource_sdk.go
@@ -3,41 +3,46 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationDatabricksResourceModel) ToCreateSDKType() *shared.DestinationDatabricksCreateRequest {
- acceptTerms := r.Configuration.AcceptTerms.ValueBool()
+ acceptTerms := new(bool)
+ if !r.Configuration.AcceptTerms.IsUnknown() && !r.Configuration.AcceptTerms.IsNull() {
+ *acceptTerms = r.Configuration.AcceptTerms.ValueBool()
+ } else {
+ acceptTerms = nil
+ }
var dataSource shared.DestinationDatabricksDataSource
- var destinationDatabricksDataSourceRecommendedManagedTables *shared.DestinationDatabricksDataSourceRecommendedManagedTables
- if r.Configuration.DataSource.DestinationDatabricksDataSourceRecommendedManagedTables != nil {
- dataSourceType := shared.DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType(r.Configuration.DataSource.DestinationDatabricksDataSourceRecommendedManagedTables.DataSourceType.ValueString())
- destinationDatabricksDataSourceRecommendedManagedTables = &shared.DestinationDatabricksDataSourceRecommendedManagedTables{
- DataSourceType: dataSourceType,
- }
+ var destinationDatabricksRecommendedManagedTables *shared.DestinationDatabricksRecommendedManagedTables
+ if r.Configuration.DataSource.RecommendedManagedTables != nil {
+ destinationDatabricksRecommendedManagedTables = &shared.DestinationDatabricksRecommendedManagedTables{}
}
- if destinationDatabricksDataSourceRecommendedManagedTables != nil {
+ if destinationDatabricksRecommendedManagedTables != nil {
dataSource = shared.DestinationDatabricksDataSource{
- DestinationDatabricksDataSourceRecommendedManagedTables: destinationDatabricksDataSourceRecommendedManagedTables,
+ DestinationDatabricksRecommendedManagedTables: destinationDatabricksRecommendedManagedTables,
}
}
- var destinationDatabricksDataSourceAmazonS3 *shared.DestinationDatabricksDataSourceAmazonS3
- if r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3 != nil {
- dataSourceType1 := shared.DestinationDatabricksDataSourceAmazonS3DataSourceType(r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3.DataSourceType.ValueString())
+ var destinationDatabricksAmazonS3 *shared.DestinationDatabricksAmazonS3
+ if r.Configuration.DataSource.AmazonS3 != nil {
fileNamePattern := new(string)
- if !r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3.FileNamePattern.IsUnknown() && !r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3.FileNamePattern.IsNull() {
- *fileNamePattern = r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3.FileNamePattern.ValueString()
+ if !r.Configuration.DataSource.AmazonS3.FileNamePattern.IsUnknown() && !r.Configuration.DataSource.AmazonS3.FileNamePattern.IsNull() {
+ *fileNamePattern = r.Configuration.DataSource.AmazonS3.FileNamePattern.ValueString()
} else {
fileNamePattern = nil
}
- s3AccessKeyID := r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3.S3AccessKeyID.ValueString()
- s3BucketName := r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3.S3BucketName.ValueString()
- s3BucketPath := r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3.S3BucketPath.ValueString()
- s3BucketRegion := shared.DestinationDatabricksDataSourceAmazonS3S3BucketRegion(r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3.S3BucketRegion.ValueString())
- s3SecretAccessKey := r.Configuration.DataSource.DestinationDatabricksDataSourceAmazonS3.S3SecretAccessKey.ValueString()
- destinationDatabricksDataSourceAmazonS3 = &shared.DestinationDatabricksDataSourceAmazonS3{
- DataSourceType: dataSourceType1,
+ s3AccessKeyID := r.Configuration.DataSource.AmazonS3.S3AccessKeyID.ValueString()
+ s3BucketName := r.Configuration.DataSource.AmazonS3.S3BucketName.ValueString()
+ s3BucketPath := r.Configuration.DataSource.AmazonS3.S3BucketPath.ValueString()
+ s3BucketRegion := new(shared.DestinationDatabricksS3BucketRegion)
+ if !r.Configuration.DataSource.AmazonS3.S3BucketRegion.IsUnknown() && !r.Configuration.DataSource.AmazonS3.S3BucketRegion.IsNull() {
+ *s3BucketRegion = shared.DestinationDatabricksS3BucketRegion(r.Configuration.DataSource.AmazonS3.S3BucketRegion.ValueString())
+ } else {
+ s3BucketRegion = nil
+ }
+ s3SecretAccessKey := r.Configuration.DataSource.AmazonS3.S3SecretAccessKey.ValueString()
+ destinationDatabricksAmazonS3 = &shared.DestinationDatabricksAmazonS3{
FileNamePattern: fileNamePattern,
S3AccessKeyID: s3AccessKeyID,
S3BucketName: s3BucketName,
@@ -46,34 +51,32 @@ func (r *DestinationDatabricksResourceModel) ToCreateSDKType() *shared.Destinati
S3SecretAccessKey: s3SecretAccessKey,
}
}
- if destinationDatabricksDataSourceAmazonS3 != nil {
+ if destinationDatabricksAmazonS3 != nil {
dataSource = shared.DestinationDatabricksDataSource{
- DestinationDatabricksDataSourceAmazonS3: destinationDatabricksDataSourceAmazonS3,
+ DestinationDatabricksAmazonS3: destinationDatabricksAmazonS3,
}
}
- var destinationDatabricksDataSourceAzureBlobStorage *shared.DestinationDatabricksDataSourceAzureBlobStorage
- if r.Configuration.DataSource.DestinationDatabricksDataSourceAzureBlobStorage != nil {
- azureBlobStorageAccountName := r.Configuration.DataSource.DestinationDatabricksDataSourceAzureBlobStorage.AzureBlobStorageAccountName.ValueString()
- azureBlobStorageContainerName := r.Configuration.DataSource.DestinationDatabricksDataSourceAzureBlobStorage.AzureBlobStorageContainerName.ValueString()
+ var destinationDatabricksAzureBlobStorage *shared.DestinationDatabricksAzureBlobStorage
+ if r.Configuration.DataSource.AzureBlobStorage != nil {
+ azureBlobStorageAccountName := r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageAccountName.ValueString()
+ azureBlobStorageContainerName := r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageContainerName.ValueString()
azureBlobStorageEndpointDomainName := new(string)
- if !r.Configuration.DataSource.DestinationDatabricksDataSourceAzureBlobStorage.AzureBlobStorageEndpointDomainName.IsUnknown() && !r.Configuration.DataSource.DestinationDatabricksDataSourceAzureBlobStorage.AzureBlobStorageEndpointDomainName.IsNull() {
- *azureBlobStorageEndpointDomainName = r.Configuration.DataSource.DestinationDatabricksDataSourceAzureBlobStorage.AzureBlobStorageEndpointDomainName.ValueString()
+ if !r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageEndpointDomainName.IsUnknown() && !r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageEndpointDomainName.IsNull() {
+ *azureBlobStorageEndpointDomainName = r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageEndpointDomainName.ValueString()
} else {
azureBlobStorageEndpointDomainName = nil
}
- azureBlobStorageSasToken := r.Configuration.DataSource.DestinationDatabricksDataSourceAzureBlobStorage.AzureBlobStorageSasToken.ValueString()
- dataSourceType2 := shared.DestinationDatabricksDataSourceAzureBlobStorageDataSourceType(r.Configuration.DataSource.DestinationDatabricksDataSourceAzureBlobStorage.DataSourceType.ValueString())
- destinationDatabricksDataSourceAzureBlobStorage = &shared.DestinationDatabricksDataSourceAzureBlobStorage{
+ azureBlobStorageSasToken := r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageSasToken.ValueString()
+ destinationDatabricksAzureBlobStorage = &shared.DestinationDatabricksAzureBlobStorage{
AzureBlobStorageAccountName: azureBlobStorageAccountName,
AzureBlobStorageContainerName: azureBlobStorageContainerName,
AzureBlobStorageEndpointDomainName: azureBlobStorageEndpointDomainName,
AzureBlobStorageSasToken: azureBlobStorageSasToken,
- DataSourceType: dataSourceType2,
}
}
- if destinationDatabricksDataSourceAzureBlobStorage != nil {
+ if destinationDatabricksAzureBlobStorage != nil {
dataSource = shared.DestinationDatabricksDataSource{
- DestinationDatabricksDataSourceAzureBlobStorage: destinationDatabricksDataSourceAzureBlobStorage,
+ DestinationDatabricksAzureBlobStorage: destinationDatabricksAzureBlobStorage,
}
}
database := new(string)
@@ -91,7 +94,6 @@ func (r *DestinationDatabricksResourceModel) ToCreateSDKType() *shared.Destinati
databricksPort = nil
}
databricksServerHostname := r.Configuration.DatabricksServerHostname.ValueString()
- destinationType := shared.DestinationDatabricksDatabricks(r.Configuration.DestinationType.ValueString())
enableSchemaEvolution := new(bool)
if !r.Configuration.EnableSchemaEvolution.IsUnknown() && !r.Configuration.EnableSchemaEvolution.IsNull() {
*enableSchemaEvolution = r.Configuration.EnableSchemaEvolution.ValueBool()
@@ -118,15 +120,21 @@ func (r *DestinationDatabricksResourceModel) ToCreateSDKType() *shared.Destinati
DatabricksPersonalAccessToken: databricksPersonalAccessToken,
DatabricksPort: databricksPort,
DatabricksServerHostname: databricksServerHostname,
- DestinationType: destinationType,
EnableSchemaEvolution: enableSchemaEvolution,
PurgeStagingData: purgeStagingData,
Schema: schema,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationDatabricksCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -139,36 +147,41 @@ func (r *DestinationDatabricksResourceModel) ToGetSDKType() *shared.DestinationD
}
func (r *DestinationDatabricksResourceModel) ToUpdateSDKType() *shared.DestinationDatabricksPutRequest {
- acceptTerms := r.Configuration.AcceptTerms.ValueBool()
- var dataSource shared.DestinationDatabricksUpdateDataSource
- var destinationDatabricksUpdateDataSourceRecommendedManagedTables *shared.DestinationDatabricksUpdateDataSourceRecommendedManagedTables
- if r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceRecommendedManagedTables != nil {
- dataSourceType := shared.DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceType(r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceRecommendedManagedTables.DataSourceType.ValueString())
- destinationDatabricksUpdateDataSourceRecommendedManagedTables = &shared.DestinationDatabricksUpdateDataSourceRecommendedManagedTables{
- DataSourceType: dataSourceType,
- }
+ acceptTerms := new(bool)
+ if !r.Configuration.AcceptTerms.IsUnknown() && !r.Configuration.AcceptTerms.IsNull() {
+ *acceptTerms = r.Configuration.AcceptTerms.ValueBool()
+ } else {
+ acceptTerms = nil
}
- if destinationDatabricksUpdateDataSourceRecommendedManagedTables != nil {
- dataSource = shared.DestinationDatabricksUpdateDataSource{
- DestinationDatabricksUpdateDataSourceRecommendedManagedTables: destinationDatabricksUpdateDataSourceRecommendedManagedTables,
+ var dataSource shared.DataSource
+ var recommendedManagedTables *shared.RecommendedManagedTables
+ if r.Configuration.DataSource.RecommendedManagedTables != nil {
+ recommendedManagedTables = &shared.RecommendedManagedTables{}
+ }
+ if recommendedManagedTables != nil {
+ dataSource = shared.DataSource{
+ RecommendedManagedTables: recommendedManagedTables,
}
}
- var destinationDatabricksUpdateDataSourceAmazonS3 *shared.DestinationDatabricksUpdateDataSourceAmazonS3
- if r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3 != nil {
- dataSourceType1 := shared.DestinationDatabricksUpdateDataSourceAmazonS3DataSourceType(r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3.DataSourceType.ValueString())
+ var amazonS3 *shared.AmazonS3
+ if r.Configuration.DataSource.AmazonS3 != nil {
fileNamePattern := new(string)
- if !r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3.FileNamePattern.IsUnknown() && !r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3.FileNamePattern.IsNull() {
- *fileNamePattern = r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3.FileNamePattern.ValueString()
+ if !r.Configuration.DataSource.AmazonS3.FileNamePattern.IsUnknown() && !r.Configuration.DataSource.AmazonS3.FileNamePattern.IsNull() {
+ *fileNamePattern = r.Configuration.DataSource.AmazonS3.FileNamePattern.ValueString()
} else {
fileNamePattern = nil
}
- s3AccessKeyID := r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3.S3AccessKeyID.ValueString()
- s3BucketName := r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3.S3BucketName.ValueString()
- s3BucketPath := r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3.S3BucketPath.ValueString()
- s3BucketRegion := shared.DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion(r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3.S3BucketRegion.ValueString())
- s3SecretAccessKey := r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAmazonS3.S3SecretAccessKey.ValueString()
- destinationDatabricksUpdateDataSourceAmazonS3 = &shared.DestinationDatabricksUpdateDataSourceAmazonS3{
- DataSourceType: dataSourceType1,
+ s3AccessKeyID := r.Configuration.DataSource.AmazonS3.S3AccessKeyID.ValueString()
+ s3BucketName := r.Configuration.DataSource.AmazonS3.S3BucketName.ValueString()
+ s3BucketPath := r.Configuration.DataSource.AmazonS3.S3BucketPath.ValueString()
+ s3BucketRegion := new(shared.DestinationDatabricksUpdateS3BucketRegion)
+ if !r.Configuration.DataSource.AmazonS3.S3BucketRegion.IsUnknown() && !r.Configuration.DataSource.AmazonS3.S3BucketRegion.IsNull() {
+ *s3BucketRegion = shared.DestinationDatabricksUpdateS3BucketRegion(r.Configuration.DataSource.AmazonS3.S3BucketRegion.ValueString())
+ } else {
+ s3BucketRegion = nil
+ }
+ s3SecretAccessKey := r.Configuration.DataSource.AmazonS3.S3SecretAccessKey.ValueString()
+ amazonS3 = &shared.AmazonS3{
FileNamePattern: fileNamePattern,
S3AccessKeyID: s3AccessKeyID,
S3BucketName: s3BucketName,
@@ -177,34 +190,32 @@ func (r *DestinationDatabricksResourceModel) ToUpdateSDKType() *shared.Destinati
S3SecretAccessKey: s3SecretAccessKey,
}
}
- if destinationDatabricksUpdateDataSourceAmazonS3 != nil {
- dataSource = shared.DestinationDatabricksUpdateDataSource{
- DestinationDatabricksUpdateDataSourceAmazonS3: destinationDatabricksUpdateDataSourceAmazonS3,
+ if amazonS3 != nil {
+ dataSource = shared.DataSource{
+ AmazonS3: amazonS3,
}
}
- var destinationDatabricksUpdateDataSourceAzureBlobStorage *shared.DestinationDatabricksUpdateDataSourceAzureBlobStorage
- if r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAzureBlobStorage != nil {
- azureBlobStorageAccountName := r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAzureBlobStorage.AzureBlobStorageAccountName.ValueString()
- azureBlobStorageContainerName := r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAzureBlobStorage.AzureBlobStorageContainerName.ValueString()
+ var destinationDatabricksUpdateAzureBlobStorage *shared.DestinationDatabricksUpdateAzureBlobStorage
+ if r.Configuration.DataSource.AzureBlobStorage != nil {
+ azureBlobStorageAccountName := r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageAccountName.ValueString()
+ azureBlobStorageContainerName := r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageContainerName.ValueString()
azureBlobStorageEndpointDomainName := new(string)
- if !r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAzureBlobStorage.AzureBlobStorageEndpointDomainName.IsUnknown() && !r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAzureBlobStorage.AzureBlobStorageEndpointDomainName.IsNull() {
- *azureBlobStorageEndpointDomainName = r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAzureBlobStorage.AzureBlobStorageEndpointDomainName.ValueString()
+ if !r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageEndpointDomainName.IsUnknown() && !r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageEndpointDomainName.IsNull() {
+ *azureBlobStorageEndpointDomainName = r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageEndpointDomainName.ValueString()
} else {
azureBlobStorageEndpointDomainName = nil
}
- azureBlobStorageSasToken := r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAzureBlobStorage.AzureBlobStorageSasToken.ValueString()
- dataSourceType2 := shared.DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceType(r.Configuration.DataSource.DestinationDatabricksUpdateDataSourceAzureBlobStorage.DataSourceType.ValueString())
- destinationDatabricksUpdateDataSourceAzureBlobStorage = &shared.DestinationDatabricksUpdateDataSourceAzureBlobStorage{
+ azureBlobStorageSasToken := r.Configuration.DataSource.AzureBlobStorage.AzureBlobStorageSasToken.ValueString()
+ destinationDatabricksUpdateAzureBlobStorage = &shared.DestinationDatabricksUpdateAzureBlobStorage{
AzureBlobStorageAccountName: azureBlobStorageAccountName,
AzureBlobStorageContainerName: azureBlobStorageContainerName,
AzureBlobStorageEndpointDomainName: azureBlobStorageEndpointDomainName,
AzureBlobStorageSasToken: azureBlobStorageSasToken,
- DataSourceType: dataSourceType2,
}
}
- if destinationDatabricksUpdateDataSourceAzureBlobStorage != nil {
- dataSource = shared.DestinationDatabricksUpdateDataSource{
- DestinationDatabricksUpdateDataSourceAzureBlobStorage: destinationDatabricksUpdateDataSourceAzureBlobStorage,
+ if destinationDatabricksUpdateAzureBlobStorage != nil {
+ dataSource = shared.DataSource{
+ DestinationDatabricksUpdateAzureBlobStorage: destinationDatabricksUpdateAzureBlobStorage,
}
}
database := new(string)
diff --git a/internal/provider/destination_devnull_data_source.go b/internal/provider/destination_devnull_data_source.go
old mode 100755
new mode 100644
index 55bf513ec..e7d371bb6
--- a/internal/provider/destination_devnull_data_source.go
+++ b/internal/provider/destination_devnull_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationDevNullDataSource struct {
// DestinationDevNullDataSourceModel describes the data model.
type DestinationDevNullDataSourceModel struct {
- Configuration DestinationDevNull `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,62 +47,17 @@ func (r *DestinationDevNullDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "DestinationDevNull DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dev-null",
- ),
- },
- Description: `must be one of ["dev-null"]`,
- },
- "test_destination": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_dev_null_test_destination_silent": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "test_destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SILENT",
- ),
- },
- Description: `must be one of ["SILENT"]`,
- },
- },
- Description: `The type of destination to be used`,
- },
- "destination_dev_null_update_test_destination_silent": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "test_destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SILENT",
- ),
- },
- Description: `must be one of ["SILENT"]`,
- },
- },
- Description: `The type of destination to be used`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The type of destination to be used`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_devnull_data_source_sdk.go b/internal/provider/destination_devnull_data_source_sdk.go
old mode 100755
new mode 100644
index 2f3a3a072..1adcaf01a
--- a/internal/provider/destination_devnull_data_source_sdk.go
+++ b/internal/provider/destination_devnull_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationDevNullDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_devnull_resource.go b/internal/provider/destination_devnull_resource.go
old mode 100755
new mode 100644
index cc3324717..e18bf115a
--- a/internal/provider/destination_devnull_resource.go
+++ b/internal/provider/destination_devnull_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationDevNullResource struct {
// DestinationDevNullResourceModel describes the resource data model.
type DestinationDevNullResourceModel struct {
Configuration DestinationDevNull `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -54,56 +55,29 @@ func (r *DestinationDevNullResource) Schema(ctx context.Context, req resource.Sc
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dev-null",
- ),
- },
- Description: `must be one of ["dev-null"]`,
- },
"test_destination": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_dev_null_test_destination_silent": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "test_destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SILENT",
- ),
- },
- Description: `must be one of ["SILENT"]`,
- },
- },
- Description: `The type of destination to be used`,
- },
- "destination_dev_null_update_test_destination_silent": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "test_destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SILENT",
- ),
- },
- Description: `must be one of ["SILENT"]`,
- },
- },
+ "silent": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `The type of destination to be used`,
},
},
+ Description: `The type of destination to be used`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The type of destination to be used`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -120,7 +94,8 @@ func (r *DestinationDevNullResource) Schema(ctx context.Context, req resource.Sc
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -170,7 +145,7 @@ func (r *DestinationDevNullResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationDevNull(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -346,5 +321,5 @@ func (r *DestinationDevNullResource) Delete(ctx context.Context, req resource.De
}
func (r *DestinationDevNullResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_devnull_resource_sdk.go b/internal/provider/destination_devnull_resource_sdk.go
old mode 100755
new mode 100644
index f55739fc5..5583059ad
--- a/internal/provider/destination_devnull_resource_sdk.go
+++ b/internal/provider/destination_devnull_resource_sdk.go
@@ -3,33 +3,35 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationDevNullResourceModel) ToCreateSDKType() *shared.DestinationDevNullCreateRequest {
- destinationType := shared.DestinationDevNullDevNull(r.Configuration.DestinationType.ValueString())
var testDestination shared.DestinationDevNullTestDestination
- var destinationDevNullTestDestinationSilent *shared.DestinationDevNullTestDestinationSilent
- if r.Configuration.TestDestination.DestinationDevNullTestDestinationSilent != nil {
- testDestinationType := shared.DestinationDevNullTestDestinationSilentTestDestinationType(r.Configuration.TestDestination.DestinationDevNullTestDestinationSilent.TestDestinationType.ValueString())
- destinationDevNullTestDestinationSilent = &shared.DestinationDevNullTestDestinationSilent{
- TestDestinationType: testDestinationType,
- }
+ var destinationDevNullSilent *shared.DestinationDevNullSilent
+ if r.Configuration.TestDestination.Silent != nil {
+ destinationDevNullSilent = &shared.DestinationDevNullSilent{}
}
- if destinationDevNullTestDestinationSilent != nil {
+ if destinationDevNullSilent != nil {
testDestination = shared.DestinationDevNullTestDestination{
- DestinationDevNullTestDestinationSilent: destinationDevNullTestDestinationSilent,
+ DestinationDevNullSilent: destinationDevNullSilent,
}
}
configuration := shared.DestinationDevNull{
- DestinationType: destinationType,
TestDestination: testDestination,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationDevNullCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -42,17 +44,14 @@ func (r *DestinationDevNullResourceModel) ToGetSDKType() *shared.DestinationDevN
}
func (r *DestinationDevNullResourceModel) ToUpdateSDKType() *shared.DestinationDevNullPutRequest {
- var testDestination shared.DestinationDevNullUpdateTestDestination
- var destinationDevNullUpdateTestDestinationSilent *shared.DestinationDevNullUpdateTestDestinationSilent
- if r.Configuration.TestDestination.DestinationDevNullUpdateTestDestinationSilent != nil {
- testDestinationType := shared.DestinationDevNullUpdateTestDestinationSilentTestDestinationType(r.Configuration.TestDestination.DestinationDevNullUpdateTestDestinationSilent.TestDestinationType.ValueString())
- destinationDevNullUpdateTestDestinationSilent = &shared.DestinationDevNullUpdateTestDestinationSilent{
- TestDestinationType: testDestinationType,
- }
+ var testDestination shared.TestDestination
+ var silent *shared.Silent
+ if r.Configuration.TestDestination.Silent != nil {
+ silent = &shared.Silent{}
}
- if destinationDevNullUpdateTestDestinationSilent != nil {
- testDestination = shared.DestinationDevNullUpdateTestDestination{
- DestinationDevNullUpdateTestDestinationSilent: destinationDevNullUpdateTestDestinationSilent,
+ if silent != nil {
+ testDestination = shared.TestDestination{
+ Silent: silent,
}
}
configuration := shared.DestinationDevNullUpdate{
diff --git a/internal/provider/destination_duckdb_data_source.go b/internal/provider/destination_duckdb_data_source.go
new file mode 100644
index 000000000..3f30f339a
--- /dev/null
+++ b/internal/provider/destination_duckdb_data_source.go
@@ -0,0 +1,137 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ datasource.DataSource = &DestinationDuckdbDataSource{}
+var _ datasource.DataSourceWithConfigure = &DestinationDuckdbDataSource{}
+
+func NewDestinationDuckdbDataSource() datasource.DataSource {
+ return &DestinationDuckdbDataSource{}
+}
+
+// DestinationDuckdbDataSource is the data source implementation.
+type DestinationDuckdbDataSource struct {
+ client *sdk.SDK
+}
+
+// DestinationDuckdbDataSourceModel describes the data model.
+type DestinationDuckdbDataSourceModel struct {
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+// Metadata returns the data source type name.
+func (r *DestinationDuckdbDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_destination_duckdb"
+}
+
+// Schema defines the schema for the data source.
+func (r *DestinationDuckdbDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "DestinationDuckdb DataSource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.StringAttribute{
+ Computed: true,
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
+ },
+ "destination_id": schema.StringAttribute{
+ Required: true,
+ },
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ },
+ "workspace_id": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ }
+}
+
+func (r *DestinationDuckdbDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected DataSource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *DestinationDuckdbDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data *DestinationDuckdbDataSourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ destinationID := data.DestinationID.ValueString()
+ request := operations.GetDestinationDuckdbRequest{
+ DestinationID: destinationID,
+ }
+ res, err := r.client.Destinations.GetDestinationDuckdb(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.DestinationResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.DestinationResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/internal/provider/destination_duckdb_data_source_sdk.go b/internal/provider/destination_duckdb_data_source_sdk.go
new file mode 100644
index 000000000..8c1e87400
--- /dev/null
+++ b/internal/provider/destination_duckdb_data_source_sdk.go
@@ -0,0 +1,18 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *DestinationDuckdbDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
+ r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
+ r.Name = types.StringValue(resp.Name)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
diff --git a/internal/provider/source_younium_resource.go b/internal/provider/destination_duckdb_resource.go
old mode 100755
new mode 100644
similarity index 56%
rename from internal/provider/source_younium_resource.go
rename to internal/provider/destination_duckdb_resource.go
index 97e9c54e9..ae3b59e1b
--- a/internal/provider/source_younium_resource.go
+++ b/internal/provider/destination_duckdb_resource.go
@@ -3,106 +3,97 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
// Ensure provider defined types fully satisfy framework interfaces.
-var _ resource.Resource = &SourceYouniumResource{}
-var _ resource.ResourceWithImportState = &SourceYouniumResource{}
+var _ resource.Resource = &DestinationDuckdbResource{}
+var _ resource.ResourceWithImportState = &DestinationDuckdbResource{}
-func NewSourceYouniumResource() resource.Resource {
- return &SourceYouniumResource{}
+func NewDestinationDuckdbResource() resource.Resource {
+ return &DestinationDuckdbResource{}
}
-// SourceYouniumResource defines the resource implementation.
-type SourceYouniumResource struct {
+// DestinationDuckdbResource defines the resource implementation.
+type DestinationDuckdbResource struct {
client *sdk.SDK
}
-// SourceYouniumResourceModel describes the resource data model.
-type SourceYouniumResourceModel struct {
- Configuration SourceYounium `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+// DestinationDuckdbResourceModel describes the resource data model.
+type DestinationDuckdbResourceModel struct {
+ Configuration DestinationDuckdb `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
-func (r *SourceYouniumResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_younium"
+func (r *DestinationDuckdbResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_destination_duckdb"
}
-func (r *SourceYouniumResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+func (r *DestinationDuckdbResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
resp.Schema = schema.Schema{
- MarkdownDescription: "SourceYounium Resource",
+ MarkdownDescription: "DestinationDuckdb Resource",
Attributes: map[string]schema.Attribute{
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "legal_entity": schema.StringAttribute{
+ "destination_path": schema.StringAttribute{
Required: true,
- Description: `Legal Entity that data should be pulled from`,
+ Description: `Path to the .duckdb file, or the text 'md:' to connect to MotherDuck. The file will be placed inside that local mount. For more information check out our docs`,
},
- "password": schema.StringAttribute{
- Required: true,
- Description: `Account password for younium account API key`,
- },
- "playground": schema.BoolAttribute{
+ "motherduck_api_key": schema.StringAttribute{
Optional: true,
- Description: `Property defining if connector is used against playground or production environment`,
+ Sensitive: true,
+ Description: `API key to use for authentication to a MotherDuck database.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "younium",
- ),
- },
- Description: `must be one of ["younium"]`,
- },
- "username": schema.StringAttribute{
- Required: true,
- Description: `Username for Younium account`,
+ "schema": schema.StringAttribute{
+ Optional: true,
+ Description: `Database schema name, default for duckdb is 'main'.`,
},
},
},
- "name": schema.StringAttribute{
+ "definition_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
+ stringplanmodifier.RequiresReplace(),
},
- Required: true,
- },
- "secret_id": schema.StringAttribute{
Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
},
- "source_id": schema.StringAttribute{
+ "destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
},
- "source_type": schema.StringAttribute{
+ "destination_type": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
},
+ "name": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
+ },
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
@@ -113,7 +104,7 @@ func (r *SourceYouniumResource) Schema(ctx context.Context, req resource.SchemaR
}
}
-func (r *SourceYouniumResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+func (r *DestinationDuckdbResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
// Prevent panic if the provider has not been configured.
if req.ProviderData == nil {
return
@@ -133,8 +124,8 @@ func (r *SourceYouniumResource) Configure(ctx context.Context, req resource.Conf
r.client = client
}
-func (r *SourceYouniumResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data *SourceYouniumResourceModel
+func (r *DestinationDuckdbResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data *DestinationDuckdbResourceModel
var item types.Object
resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
@@ -151,8 +142,8 @@ func (r *SourceYouniumResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
- res, err := r.client.Sources.CreateSourceYounium(ctx, request)
+ request := data.ToCreateSDKType()
+ res, err := r.client.Destinations.CreateDestinationDuckdb(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -168,18 +159,18 @@ func (r *SourceYouniumResource) Create(ctx context.Context, req resource.CreateR
resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
return
}
- if res.SourceResponse == nil {
+ if res.DestinationResponse == nil {
resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
return
}
- data.RefreshFromCreateResponse(res.SourceResponse)
+ data.RefreshFromCreateResponse(res.DestinationResponse)
// Save updated data into Terraform state
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
-func (r *SourceYouniumResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data *SourceYouniumResourceModel
+func (r *DestinationDuckdbResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data *DestinationDuckdbResourceModel
var item types.Object
resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
@@ -196,11 +187,11 @@ func (r *SourceYouniumResource) Read(ctx context.Context, req resource.ReadReque
return
}
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceYouniumRequest{
- SourceID: sourceID,
+ destinationID := data.DestinationID.ValueString()
+ request := operations.GetDestinationDuckdbRequest{
+ DestinationID: destinationID,
}
- res, err := r.client.Sources.GetSourceYounium(ctx, request)
+ res, err := r.client.Destinations.GetDestinationDuckdb(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -216,30 +207,30 @@ func (r *SourceYouniumResource) Read(ctx context.Context, req resource.ReadReque
resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
return
}
- if res.SourceResponse == nil {
+ if res.DestinationResponse == nil {
resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
return
}
- data.RefreshFromGetResponse(res.SourceResponse)
+ data.RefreshFromGetResponse(res.DestinationResponse)
// Save updated data into Terraform state
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
-func (r *SourceYouniumResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data *SourceYouniumResourceModel
+func (r *DestinationDuckdbResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data *DestinationDuckdbResourceModel
merge(ctx, req, resp, &data)
if resp.Diagnostics.HasError() {
return
}
- sourceYouniumPutRequest := data.ToUpdateSDKType()
- sourceID := data.SourceID.ValueString()
- request := operations.PutSourceYouniumRequest{
- SourceYouniumPutRequest: sourceYouniumPutRequest,
- SourceID: sourceID,
+ destinationDuckdbPutRequest := data.ToUpdateSDKType()
+ destinationID := data.DestinationID.ValueString()
+ request := operations.PutDestinationDuckdbRequest{
+ DestinationDuckdbPutRequest: destinationDuckdbPutRequest,
+ DestinationID: destinationID,
}
- res, err := r.client.Sources.PutSourceYounium(ctx, request)
+ res, err := r.client.Destinations.PutDestinationDuckdb(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -255,11 +246,11 @@ func (r *SourceYouniumResource) Update(ctx context.Context, req resource.UpdateR
resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
return
}
- sourceId1 := data.SourceID.ValueString()
- getRequest := operations.GetSourceYouniumRequest{
- SourceID: sourceId1,
+ destinationId1 := data.DestinationID.ValueString()
+ getRequest := operations.GetDestinationDuckdbRequest{
+ DestinationID: destinationId1,
}
- getResponse, err := r.client.Sources.GetSourceYounium(ctx, getRequest)
+ getResponse, err := r.client.Destinations.GetDestinationDuckdb(ctx, getRequest)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -275,18 +266,18 @@ func (r *SourceYouniumResource) Update(ctx context.Context, req resource.UpdateR
resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
return
}
- if getResponse.SourceResponse == nil {
+ if getResponse.DestinationResponse == nil {
resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
return
}
- data.RefreshFromGetResponse(getResponse.SourceResponse)
+ data.RefreshFromGetResponse(getResponse.DestinationResponse)
// Save updated data into Terraform state
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
-func (r *SourceYouniumResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data *SourceYouniumResourceModel
+func (r *DestinationDuckdbResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data *DestinationDuckdbResourceModel
var item types.Object
resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
@@ -303,11 +294,11 @@ func (r *SourceYouniumResource) Delete(ctx context.Context, req resource.DeleteR
return
}
- sourceID := data.SourceID.ValueString()
- request := operations.DeleteSourceYouniumRequest{
- SourceID: sourceID,
+ destinationID := data.DestinationID.ValueString()
+ request := operations.DeleteDestinationDuckdbRequest{
+ DestinationID: destinationID,
}
- res, err := r.client.Sources.DeleteSourceYounium(ctx, request)
+ res, err := r.client.Destinations.DeleteDestinationDuckdb(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -326,6 +317,6 @@ func (r *SourceYouniumResource) Delete(ctx context.Context, req resource.DeleteR
}
-func (r *SourceYouniumResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+func (r *DestinationDuckdbResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_duckdb_resource_sdk.go b/internal/provider/destination_duckdb_resource_sdk.go
new file mode 100644
index 000000000..6feee431d
--- /dev/null
+++ b/internal/provider/destination_duckdb_resource_sdk.go
@@ -0,0 +1,94 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *DestinationDuckdbResourceModel) ToCreateSDKType() *shared.DestinationDuckdbCreateRequest {
+ destinationPath := r.Configuration.DestinationPath.ValueString()
+ motherduckAPIKey := new(string)
+ if !r.Configuration.MotherduckAPIKey.IsUnknown() && !r.Configuration.MotherduckAPIKey.IsNull() {
+ *motherduckAPIKey = r.Configuration.MotherduckAPIKey.ValueString()
+ } else {
+ motherduckAPIKey = nil
+ }
+ schema := new(string)
+ if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
+ *schema = r.Configuration.Schema.ValueString()
+ } else {
+ schema = nil
+ }
+ configuration := shared.DestinationDuckdb{
+ DestinationPath: destinationPath,
+ MotherduckAPIKey: motherduckAPIKey,
+ Schema: schema,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.DestinationDuckdbCreateRequest{
+ Configuration: configuration,
+ DefinitionID: definitionID,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *DestinationDuckdbResourceModel) ToGetSDKType() *shared.DestinationDuckdbCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *DestinationDuckdbResourceModel) ToUpdateSDKType() *shared.DestinationDuckdbPutRequest {
+ destinationPath := r.Configuration.DestinationPath.ValueString()
+ motherduckAPIKey := new(string)
+ if !r.Configuration.MotherduckAPIKey.IsUnknown() && !r.Configuration.MotherduckAPIKey.IsNull() {
+ *motherduckAPIKey = r.Configuration.MotherduckAPIKey.ValueString()
+ } else {
+ motherduckAPIKey = nil
+ }
+ schema := new(string)
+ if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
+ *schema = r.Configuration.Schema.ValueString()
+ } else {
+ schema = nil
+ }
+ configuration := shared.DestinationDuckdbUpdate{
+ DestinationPath: destinationPath,
+ MotherduckAPIKey: motherduckAPIKey,
+ Schema: schema,
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.DestinationDuckdbPutRequest{
+ Configuration: configuration,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *DestinationDuckdbResourceModel) ToDeleteSDKType() *shared.DestinationDuckdbCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *DestinationDuckdbResourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
+ r.Name = types.StringValue(resp.Name)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
+
+func (r *DestinationDuckdbResourceModel) RefreshFromCreateResponse(resp *shared.DestinationResponse) {
+ r.RefreshFromGetResponse(resp)
+}
diff --git a/internal/provider/destination_dynamodb_data_source.go b/internal/provider/destination_dynamodb_data_source.go
old mode 100755
new mode 100644
index 4f4049bd5..bfec796c8
--- a/internal/provider/destination_dynamodb_data_source.go
+++ b/internal/provider/destination_dynamodb_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationDynamodbDataSource struct {
// DestinationDynamodbDataSourceModel describes the data model.
type DestinationDynamodbDataSourceModel struct {
- Configuration DestinationDynamodb1 `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,74 +47,17 @@ func (r *DestinationDynamodbDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "DestinationDynamodb DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `The access key id to access the DynamoDB. Airbyte requires Read and Write permissions to the DynamoDB.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dynamodb",
- ),
- },
- Description: `must be one of ["dynamodb"]`,
- },
- "dynamodb_endpoint": schema.StringAttribute{
- Computed: true,
- Description: `This is your DynamoDB endpoint url.(if you are working with AWS DynamoDB, just leave empty).`,
- },
- "dynamodb_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- "us-gov-east-1",
- "us-gov-west-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the DynamoDB.`,
- },
- "dynamodb_table_name_prefix": schema.StringAttribute{
- Computed: true,
- Description: `The prefix to use when naming DynamoDB tables.`,
- },
- "secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret to the access key id.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_dynamodb_data_source_sdk.go b/internal/provider/destination_dynamodb_data_source_sdk.go
old mode 100755
new mode 100644
index 6604581a5..c1e6b876a
--- a/internal/provider/destination_dynamodb_data_source_sdk.go
+++ b/internal/provider/destination_dynamodb_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationDynamodbDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_dynamodb_resource.go b/internal/provider/destination_dynamodb_resource.go
old mode 100755
new mode 100644
index 0e5531102..bc0716a28
--- a/internal/provider/destination_dynamodb_resource.go
+++ b/internal/provider/destination_dynamodb_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type DestinationDynamodbResource struct {
// DestinationDynamodbResourceModel describes the resource data model.
type DestinationDynamodbResourceModel struct {
Configuration DestinationDynamodb `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -55,23 +57,18 @@ func (r *DestinationDynamodbResource) Schema(ctx context.Context, req resource.S
Attributes: map[string]schema.Attribute{
"access_key_id": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The access key id to access the DynamoDB. Airbyte requires Read and Write permissions to the DynamoDB.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dynamodb",
- ),
- },
- Description: `must be one of ["dynamodb"]`,
- },
"dynamodb_endpoint": schema.StringAttribute{
- Optional: true,
- Description: `This is your DynamoDB endpoint url.(if you are working with AWS DynamoDB, just leave empty).`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `This is your DynamoDB endpoint url.(if you are working with AWS DynamoDB, just leave empty).`,
},
"dynamodb_region": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""` + "\n" +
+ `The region of the DynamoDB.`,
Validators: []validator.String{
stringvalidator.OneOf(
"",
@@ -102,8 +99,6 @@ func (r *DestinationDynamodbResource) Schema(ctx context.Context, req resource.S
"us-gov-west-1",
),
},
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the DynamoDB.`,
},
"dynamodb_table_name_prefix": schema.StringAttribute{
Required: true,
@@ -111,10 +106,18 @@ func (r *DestinationDynamodbResource) Schema(ctx context.Context, req resource.S
},
"secret_access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The corresponding secret to the access key id.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -131,7 +134,8 @@ func (r *DestinationDynamodbResource) Schema(ctx context.Context, req resource.S
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -181,7 +185,7 @@ func (r *DestinationDynamodbResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationDynamodb(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -357,5 +361,5 @@ func (r *DestinationDynamodbResource) Delete(ctx context.Context, req resource.D
}
func (r *DestinationDynamodbResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_dynamodb_resource_sdk.go b/internal/provider/destination_dynamodb_resource_sdk.go
old mode 100755
new mode 100644
index 3334b2070..a42f5687a
--- a/internal/provider/destination_dynamodb_resource_sdk.go
+++ b/internal/provider/destination_dynamodb_resource_sdk.go
@@ -3,34 +3,44 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationDynamodbResourceModel) ToCreateSDKType() *shared.DestinationDynamodbCreateRequest {
accessKeyID := r.Configuration.AccessKeyID.ValueString()
- destinationType := shared.DestinationDynamodbDynamodb(r.Configuration.DestinationType.ValueString())
dynamodbEndpoint := new(string)
if !r.Configuration.DynamodbEndpoint.IsUnknown() && !r.Configuration.DynamodbEndpoint.IsNull() {
*dynamodbEndpoint = r.Configuration.DynamodbEndpoint.ValueString()
} else {
dynamodbEndpoint = nil
}
- dynamodbRegion := shared.DestinationDynamodbDynamoDBRegion(r.Configuration.DynamodbRegion.ValueString())
+ dynamodbRegion := new(shared.DestinationDynamodbDynamoDBRegion)
+ if !r.Configuration.DynamodbRegion.IsUnknown() && !r.Configuration.DynamodbRegion.IsNull() {
+ *dynamodbRegion = shared.DestinationDynamodbDynamoDBRegion(r.Configuration.DynamodbRegion.ValueString())
+ } else {
+ dynamodbRegion = nil
+ }
dynamodbTableNamePrefix := r.Configuration.DynamodbTableNamePrefix.ValueString()
secretAccessKey := r.Configuration.SecretAccessKey.ValueString()
configuration := shared.DestinationDynamodb{
AccessKeyID: accessKeyID,
- DestinationType: destinationType,
DynamodbEndpoint: dynamodbEndpoint,
DynamodbRegion: dynamodbRegion,
DynamodbTableNamePrefix: dynamodbTableNamePrefix,
SecretAccessKey: secretAccessKey,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationDynamodbCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -50,7 +60,12 @@ func (r *DestinationDynamodbResourceModel) ToUpdateSDKType() *shared.Destination
} else {
dynamodbEndpoint = nil
}
- dynamodbRegion := shared.DestinationDynamodbUpdateDynamoDBRegion(r.Configuration.DynamodbRegion.ValueString())
+ dynamodbRegion := new(shared.DynamoDBRegion)
+ if !r.Configuration.DynamodbRegion.IsUnknown() && !r.Configuration.DynamodbRegion.IsNull() {
+ *dynamodbRegion = shared.DynamoDBRegion(r.Configuration.DynamodbRegion.ValueString())
+ } else {
+ dynamodbRegion = nil
+ }
dynamodbTableNamePrefix := r.Configuration.DynamodbTableNamePrefix.ValueString()
secretAccessKey := r.Configuration.SecretAccessKey.ValueString()
configuration := shared.DestinationDynamodbUpdate{
diff --git a/internal/provider/destination_elasticsearch_data_source.go b/internal/provider/destination_elasticsearch_data_source.go
old mode 100755
new mode 100644
index 2ef979c5f..dc2b59d14
--- a/internal/provider/destination_elasticsearch_data_source.go
+++ b/internal/provider/destination_elasticsearch_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationElasticsearchDataSource struct {
// DestinationElasticsearchDataSourceModel describes the data model.
type DestinationElasticsearchDataSourceModel struct {
- Configuration DestinationElasticsearch `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,136 +47,17 @@ func (r *DestinationElasticsearchDataSource) Schema(ctx context.Context, req dat
MarkdownDescription: "DestinationElasticsearch DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "authentication_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_elasticsearch_authentication_method_api_key_secret": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key_id": schema.StringAttribute{
- Computed: true,
- Description: `The Key ID to used when accessing an enterprise Elasticsearch instance.`,
- },
- "api_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `The secret associated with the API Key ID.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "secret",
- ),
- },
- Description: `must be one of ["secret"]`,
- },
- },
- Description: `Use a api key and secret combination to authenticate`,
- },
- "destination_elasticsearch_authentication_method_username_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "basic",
- ),
- },
- Description: `must be one of ["basic"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Basic auth password to access a secure Elasticsearch server`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Basic auth username to access a secure Elasticsearch server`,
- },
- },
- Description: `Basic auth header with a username and password`,
- },
- "destination_elasticsearch_update_authentication_method_api_key_secret": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key_id": schema.StringAttribute{
- Computed: true,
- Description: `The Key ID to used when accessing an enterprise Elasticsearch instance.`,
- },
- "api_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `The secret associated with the API Key ID.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "secret",
- ),
- },
- Description: `must be one of ["secret"]`,
- },
- },
- Description: `Use a api key and secret combination to authenticate`,
- },
- "destination_elasticsearch_update_authentication_method_username_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "basic",
- ),
- },
- Description: `must be one of ["basic"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Basic auth password to access a secure Elasticsearch server`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Basic auth username to access a secure Elasticsearch server`,
- },
- },
- Description: `Basic auth header with a username and password`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The type of authentication to be used`,
- },
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "elasticsearch",
- ),
- },
- Description: `must be one of ["elasticsearch"]`,
- },
- "endpoint": schema.StringAttribute{
- Computed: true,
- Description: `The full url of the Elasticsearch server`,
- },
- "upsert": schema.BoolAttribute{
- Computed: true,
- Description: `If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_elasticsearch_data_source_sdk.go b/internal/provider/destination_elasticsearch_data_source_sdk.go
old mode 100755
new mode 100644
index f24830770..243f9a23a
--- a/internal/provider/destination_elasticsearch_data_source_sdk.go
+++ b/internal/provider/destination_elasticsearch_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationElasticsearchDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_elasticsearch_resource.go b/internal/provider/destination_elasticsearch_resource.go
old mode 100755
new mode 100644
index bbaba299f..151d4f8a5
--- a/internal/provider/destination_elasticsearch_resource.go
+++ b/internal/provider/destination_elasticsearch_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationElasticsearchResource struct {
// DestinationElasticsearchResourceModel describes the resource data model.
type DestinationElasticsearchResourceModel struct {
Configuration DestinationElasticsearch `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -57,7 +58,7 @@ func (r *DestinationElasticsearchResource) Schema(ctx context.Context, req resou
"authentication_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_elasticsearch_authentication_method_api_key_secret": schema.SingleNestedAttribute{
+ "api_key_secret": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_key_id": schema.StringAttribute{
@@ -68,78 +69,15 @@ func (r *DestinationElasticsearchResource) Schema(ctx context.Context, req resou
Required: true,
Description: `The secret associated with the API Key ID.`,
},
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "secret",
- ),
- },
- Description: `must be one of ["secret"]`,
- },
- },
- Description: `Use a api key and secret combination to authenticate`,
- },
- "destination_elasticsearch_authentication_method_username_password": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "basic",
- ),
- },
- Description: `must be one of ["basic"]`,
- },
- "password": schema.StringAttribute{
- Required: true,
- Description: `Basic auth password to access a secure Elasticsearch server`,
- },
- "username": schema.StringAttribute{
- Required: true,
- Description: `Basic auth username to access a secure Elasticsearch server`,
- },
- },
- Description: `Basic auth header with a username and password`,
- },
- "destination_elasticsearch_update_authentication_method_api_key_secret": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_key_id": schema.StringAttribute{
- Required: true,
- Description: `The Key ID to used when accessing an enterprise Elasticsearch instance.`,
- },
- "api_key_secret": schema.StringAttribute{
- Required: true,
- Description: `The secret associated with the API Key ID.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "secret",
- ),
- },
- Description: `must be one of ["secret"]`,
- },
},
Description: `Use a api key and secret combination to authenticate`,
},
- "destination_elasticsearch_update_authentication_method_username_password": schema.SingleNestedAttribute{
+ "username_password": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "basic",
- ),
- },
- Description: `must be one of ["basic"]`,
- },
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Basic auth password to access a secure Elasticsearch server`,
},
"username": schema.StringAttribute{
@@ -150,34 +88,33 @@ func (r *DestinationElasticsearchResource) Schema(ctx context.Context, req resou
Description: `Basic auth header with a username and password`,
},
},
+ Description: `The type of authentication to be used`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The type of authentication to be used`,
},
"ca_certificate": schema.StringAttribute{
Optional: true,
Description: `CA certificate`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "elasticsearch",
- ),
- },
- Description: `must be one of ["elasticsearch"]`,
- },
"endpoint": schema.StringAttribute{
Required: true,
Description: `The full url of the Elasticsearch server`,
},
"upsert": schema.BoolAttribute{
- Optional: true,
- Description: `If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -194,7 +131,8 @@ func (r *DestinationElasticsearchResource) Schema(ctx context.Context, req resou
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -244,7 +182,7 @@ func (r *DestinationElasticsearchResource) Create(ctx context.Context, req resou
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationElasticsearch(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -420,5 +358,5 @@ func (r *DestinationElasticsearchResource) Delete(ctx context.Context, req resou
}
func (r *DestinationElasticsearchResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_elasticsearch_resource_sdk.go b/internal/provider/destination_elasticsearch_resource_sdk.go
old mode 100755
new mode 100644
index 26e17daf2..987d754bc
--- a/internal/provider/destination_elasticsearch_resource_sdk.go
+++ b/internal/provider/destination_elasticsearch_resource_sdk.go
@@ -3,43 +3,39 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationElasticsearchResourceModel) ToCreateSDKType() *shared.DestinationElasticsearchCreateRequest {
var authenticationMethod *shared.DestinationElasticsearchAuthenticationMethod
if r.Configuration.AuthenticationMethod != nil {
- var destinationElasticsearchAuthenticationMethodAPIKeySecret *shared.DestinationElasticsearchAuthenticationMethodAPIKeySecret
- if r.Configuration.AuthenticationMethod.DestinationElasticsearchAuthenticationMethodAPIKeySecret != nil {
- apiKeyID := r.Configuration.AuthenticationMethod.DestinationElasticsearchAuthenticationMethodAPIKeySecret.APIKeyID.ValueString()
- apiKeySecret := r.Configuration.AuthenticationMethod.DestinationElasticsearchAuthenticationMethodAPIKeySecret.APIKeySecret.ValueString()
- method := shared.DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod(r.Configuration.AuthenticationMethod.DestinationElasticsearchAuthenticationMethodAPIKeySecret.Method.ValueString())
- destinationElasticsearchAuthenticationMethodAPIKeySecret = &shared.DestinationElasticsearchAuthenticationMethodAPIKeySecret{
+ var destinationElasticsearchAPIKeySecret *shared.DestinationElasticsearchAPIKeySecret
+ if r.Configuration.AuthenticationMethod.APIKeySecret != nil {
+ apiKeyID := r.Configuration.AuthenticationMethod.APIKeySecret.APIKeyID.ValueString()
+ apiKeySecret := r.Configuration.AuthenticationMethod.APIKeySecret.APIKeySecret.ValueString()
+ destinationElasticsearchAPIKeySecret = &shared.DestinationElasticsearchAPIKeySecret{
APIKeyID: apiKeyID,
APIKeySecret: apiKeySecret,
- Method: method,
}
}
- if destinationElasticsearchAuthenticationMethodAPIKeySecret != nil {
+ if destinationElasticsearchAPIKeySecret != nil {
authenticationMethod = &shared.DestinationElasticsearchAuthenticationMethod{
- DestinationElasticsearchAuthenticationMethodAPIKeySecret: destinationElasticsearchAuthenticationMethodAPIKeySecret,
+ DestinationElasticsearchAPIKeySecret: destinationElasticsearchAPIKeySecret,
}
}
- var destinationElasticsearchAuthenticationMethodUsernamePassword *shared.DestinationElasticsearchAuthenticationMethodUsernamePassword
- if r.Configuration.AuthenticationMethod.DestinationElasticsearchAuthenticationMethodUsernamePassword != nil {
- method1 := shared.DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod(r.Configuration.AuthenticationMethod.DestinationElasticsearchAuthenticationMethodUsernamePassword.Method.ValueString())
- password := r.Configuration.AuthenticationMethod.DestinationElasticsearchAuthenticationMethodUsernamePassword.Password.ValueString()
- username := r.Configuration.AuthenticationMethod.DestinationElasticsearchAuthenticationMethodUsernamePassword.Username.ValueString()
- destinationElasticsearchAuthenticationMethodUsernamePassword = &shared.DestinationElasticsearchAuthenticationMethodUsernamePassword{
- Method: method1,
+ var destinationElasticsearchUsernamePassword *shared.DestinationElasticsearchUsernamePassword
+ if r.Configuration.AuthenticationMethod.UsernamePassword != nil {
+ password := r.Configuration.AuthenticationMethod.UsernamePassword.Password.ValueString()
+ username := r.Configuration.AuthenticationMethod.UsernamePassword.Username.ValueString()
+ destinationElasticsearchUsernamePassword = &shared.DestinationElasticsearchUsernamePassword{
Password: password,
Username: username,
}
}
- if destinationElasticsearchAuthenticationMethodUsernamePassword != nil {
+ if destinationElasticsearchUsernamePassword != nil {
authenticationMethod = &shared.DestinationElasticsearchAuthenticationMethod{
- DestinationElasticsearchAuthenticationMethodUsernamePassword: destinationElasticsearchAuthenticationMethodUsernamePassword,
+ DestinationElasticsearchUsernamePassword: destinationElasticsearchUsernamePassword,
}
}
}
@@ -49,7 +45,6 @@ func (r *DestinationElasticsearchResourceModel) ToCreateSDKType() *shared.Destin
} else {
caCertificate = nil
}
- destinationType := shared.DestinationElasticsearchElasticsearch(r.Configuration.DestinationType.ValueString())
endpoint := r.Configuration.Endpoint.ValueString()
upsert := new(bool)
if !r.Configuration.Upsert.IsUnknown() && !r.Configuration.Upsert.IsNull() {
@@ -60,14 +55,20 @@ func (r *DestinationElasticsearchResourceModel) ToCreateSDKType() *shared.Destin
configuration := shared.DestinationElasticsearch{
AuthenticationMethod: authenticationMethod,
CaCertificate: caCertificate,
- DestinationType: destinationType,
Endpoint: endpoint,
Upsert: upsert,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationElasticsearchCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -80,38 +81,34 @@ func (r *DestinationElasticsearchResourceModel) ToGetSDKType() *shared.Destinati
}
func (r *DestinationElasticsearchResourceModel) ToUpdateSDKType() *shared.DestinationElasticsearchPutRequest {
- var authenticationMethod *shared.DestinationElasticsearchUpdateAuthenticationMethod
+ var authenticationMethod *shared.AuthenticationMethod
if r.Configuration.AuthenticationMethod != nil {
- var destinationElasticsearchUpdateAuthenticationMethodAPIKeySecret *shared.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret
- if r.Configuration.AuthenticationMethod.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret != nil {
- apiKeyID := r.Configuration.AuthenticationMethod.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret.APIKeyID.ValueString()
- apiKeySecret := r.Configuration.AuthenticationMethod.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret.APIKeySecret.ValueString()
- method := shared.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethod(r.Configuration.AuthenticationMethod.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret.Method.ValueString())
- destinationElasticsearchUpdateAuthenticationMethodAPIKeySecret = &shared.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret{
+ var apiKeySecret *shared.APIKeySecret
+ if r.Configuration.AuthenticationMethod.APIKeySecret != nil {
+ apiKeyID := r.Configuration.AuthenticationMethod.APIKeySecret.APIKeyID.ValueString()
+ apiKeySecret1 := r.Configuration.AuthenticationMethod.APIKeySecret.APIKeySecret.ValueString()
+ apiKeySecret = &shared.APIKeySecret{
APIKeyID: apiKeyID,
- APIKeySecret: apiKeySecret,
- Method: method,
+ APIKeySecret: apiKeySecret1,
}
}
- if destinationElasticsearchUpdateAuthenticationMethodAPIKeySecret != nil {
- authenticationMethod = &shared.DestinationElasticsearchUpdateAuthenticationMethod{
- DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret: destinationElasticsearchUpdateAuthenticationMethodAPIKeySecret,
+ if apiKeySecret != nil {
+ authenticationMethod = &shared.AuthenticationMethod{
+ APIKeySecret: apiKeySecret,
}
}
- var destinationElasticsearchUpdateAuthenticationMethodUsernamePassword *shared.DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword
- if r.Configuration.AuthenticationMethod.DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword != nil {
- method1 := shared.DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethod(r.Configuration.AuthenticationMethod.DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword.Method.ValueString())
- password := r.Configuration.AuthenticationMethod.DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword.Password.ValueString()
- username := r.Configuration.AuthenticationMethod.DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword.Username.ValueString()
- destinationElasticsearchUpdateAuthenticationMethodUsernamePassword = &shared.DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword{
- Method: method1,
+ var usernamePassword *shared.UsernamePassword
+ if r.Configuration.AuthenticationMethod.UsernamePassword != nil {
+ password := r.Configuration.AuthenticationMethod.UsernamePassword.Password.ValueString()
+ username := r.Configuration.AuthenticationMethod.UsernamePassword.Username.ValueString()
+ usernamePassword = &shared.UsernamePassword{
Password: password,
Username: username,
}
}
- if destinationElasticsearchUpdateAuthenticationMethodUsernamePassword != nil {
- authenticationMethod = &shared.DestinationElasticsearchUpdateAuthenticationMethod{
- DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword: destinationElasticsearchUpdateAuthenticationMethodUsernamePassword,
+ if usernamePassword != nil {
+ authenticationMethod = &shared.AuthenticationMethod{
+ UsernamePassword: usernamePassword,
}
}
}
diff --git a/internal/provider/destination_firebolt_data_source.go b/internal/provider/destination_firebolt_data_source.go
old mode 100755
new mode 100644
index 9e2038fe4..fee84f1ba
--- a/internal/provider/destination_firebolt_data_source.go
+++ b/internal/provider/destination_firebolt_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationFireboltDataSource struct {
// DestinationFireboltDataSourceModel describes the data model.
type DestinationFireboltDataSourceModel struct {
- Configuration DestinationFirebolt `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,148 +47,17 @@ func (r *DestinationFireboltDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "DestinationFirebolt DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "account": schema.StringAttribute{
- Computed: true,
- Description: `Firebolt account to login.`,
- },
- "database": schema.StringAttribute{
- Computed: true,
- Description: `The database to connect to.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "firebolt",
- ),
- },
- Description: `must be one of ["firebolt"]`,
- },
- "engine": schema.StringAttribute{
- Computed: true,
- Description: `Engine name or url to connect to.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The host name of your Firebolt database.`,
- },
- "loading_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_firebolt_loading_method_external_table_via_s3": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_key_id": schema.StringAttribute{
- Computed: true,
- Description: `AWS access key granting read and write access to S3.`,
- },
- "aws_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `Corresponding secret part of the AWS Key`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3",
- ),
- },
- Description: `must be one of ["S3"]`,
- },
- "s3_bucket": schema.StringAttribute{
- Computed: true,
- Description: `The name of the S3 bucket.`,
- },
- "s3_region": schema.StringAttribute{
- Computed: true,
- Description: `Region name of the S3 bucket.`,
- },
- },
- Description: `Loading method used to select the way data will be uploaded to Firebolt`,
- },
- "destination_firebolt_loading_method_sql_inserts": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SQL",
- ),
- },
- Description: `must be one of ["SQL"]`,
- },
- },
- Description: `Loading method used to select the way data will be uploaded to Firebolt`,
- },
- "destination_firebolt_update_loading_method_external_table_via_s3": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_key_id": schema.StringAttribute{
- Computed: true,
- Description: `AWS access key granting read and write access to S3.`,
- },
- "aws_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `Corresponding secret part of the AWS Key`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3",
- ),
- },
- Description: `must be one of ["S3"]`,
- },
- "s3_bucket": schema.StringAttribute{
- Computed: true,
- Description: `The name of the S3 bucket.`,
- },
- "s3_region": schema.StringAttribute{
- Computed: true,
- Description: `Region name of the S3 bucket.`,
- },
- },
- Description: `Loading method used to select the way data will be uploaded to Firebolt`,
- },
- "destination_firebolt_update_loading_method_sql_inserts": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SQL",
- ),
- },
- Description: `must be one of ["SQL"]`,
- },
- },
- Description: `Loading method used to select the way data will be uploaded to Firebolt`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Loading method used to select the way data will be uploaded to Firebolt`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Firebolt password.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Firebolt email address you use to login.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_firebolt_data_source_sdk.go b/internal/provider/destination_firebolt_data_source_sdk.go
old mode 100755
new mode 100644
index 70308ab1b..a527015e2
--- a/internal/provider/destination_firebolt_data_source_sdk.go
+++ b/internal/provider/destination_firebolt_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationFireboltDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_firebolt_resource.go b/internal/provider/destination_firebolt_resource.go
old mode 100755
new mode 100644
index 0ab38e643..6d8d370fc
--- a/internal/provider/destination_firebolt_resource.go
+++ b/internal/provider/destination_firebolt_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationFireboltResource struct {
// DestinationFireboltResourceModel describes the resource data model.
type DestinationFireboltResourceModel struct {
Configuration DestinationFirebolt `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -62,15 +63,6 @@ func (r *DestinationFireboltResource) Schema(ctx context.Context, req resource.S
Required: true,
Description: `The database to connect to.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "firebolt",
- ),
- },
- Description: `must be one of ["firebolt"]`,
- },
"engine": schema.StringAttribute{
Optional: true,
Description: `Engine name or url to connect to.`,
@@ -82,26 +74,19 @@ func (r *DestinationFireboltResource) Schema(ctx context.Context, req resource.S
"loading_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_firebolt_loading_method_external_table_via_s3": schema.SingleNestedAttribute{
+ "external_table_via_s3": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"aws_key_id": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `AWS access key granting read and write access to S3.`,
},
"aws_key_secret": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Corresponding secret part of the AWS Key`,
},
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3",
- ),
- },
- Description: `must be one of ["S3"]`,
- },
"s3_bucket": schema.StringAttribute{
Required: true,
Description: `The name of the S3 bucket.`,
@@ -113,75 +98,20 @@ func (r *DestinationFireboltResource) Schema(ctx context.Context, req resource.S
},
Description: `Loading method used to select the way data will be uploaded to Firebolt`,
},
- "destination_firebolt_loading_method_sql_inserts": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SQL",
- ),
- },
- Description: `must be one of ["SQL"]`,
- },
- },
- Description: `Loading method used to select the way data will be uploaded to Firebolt`,
- },
- "destination_firebolt_update_loading_method_external_table_via_s3": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "aws_key_id": schema.StringAttribute{
- Required: true,
- Description: `AWS access key granting read and write access to S3.`,
- },
- "aws_key_secret": schema.StringAttribute{
- Required: true,
- Description: `Corresponding secret part of the AWS Key`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3",
- ),
- },
- Description: `must be one of ["S3"]`,
- },
- "s3_bucket": schema.StringAttribute{
- Required: true,
- Description: `The name of the S3 bucket.`,
- },
- "s3_region": schema.StringAttribute{
- Required: true,
- Description: `Region name of the S3 bucket.`,
- },
- },
- Description: `Loading method used to select the way data will be uploaded to Firebolt`,
- },
- "destination_firebolt_update_loading_method_sql_inserts": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SQL",
- ),
- },
- Description: `must be one of ["SQL"]`,
- },
- },
+ "sql_inserts": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Loading method used to select the way data will be uploaded to Firebolt`,
},
},
+ Description: `Loading method used to select the way data will be uploaded to Firebolt`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Loading method used to select the way data will be uploaded to Firebolt`,
},
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Firebolt password.`,
},
"username": schema.StringAttribute{
@@ -190,6 +120,13 @@ func (r *DestinationFireboltResource) Schema(ctx context.Context, req resource.S
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -206,7 +143,8 @@ func (r *DestinationFireboltResource) Schema(ctx context.Context, req resource.S
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -256,7 +194,7 @@ func (r *DestinationFireboltResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationFirebolt(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -432,5 +370,5 @@ func (r *DestinationFireboltResource) Delete(ctx context.Context, req resource.D
}
func (r *DestinationFireboltResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_firebolt_resource_sdk.go b/internal/provider/destination_firebolt_resource_sdk.go
old mode 100755
new mode 100644
index acb3d4ba9..ac7255825
--- a/internal/provider/destination_firebolt_resource_sdk.go
+++ b/internal/provider/destination_firebolt_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -15,7 +15,6 @@ func (r *DestinationFireboltResourceModel) ToCreateSDKType() *shared.Destination
account = nil
}
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationFireboltFirebolt(r.Configuration.DestinationType.ValueString())
engine := new(string)
if !r.Configuration.Engine.IsUnknown() && !r.Configuration.Engine.IsNull() {
*engine = r.Configuration.Engine.ValueString()
@@ -30,55 +29,56 @@ func (r *DestinationFireboltResourceModel) ToCreateSDKType() *shared.Destination
}
var loadingMethod *shared.DestinationFireboltLoadingMethod
if r.Configuration.LoadingMethod != nil {
- var destinationFireboltLoadingMethodSQLInserts *shared.DestinationFireboltLoadingMethodSQLInserts
- if r.Configuration.LoadingMethod.DestinationFireboltLoadingMethodSQLInserts != nil {
- method := shared.DestinationFireboltLoadingMethodSQLInsertsMethod(r.Configuration.LoadingMethod.DestinationFireboltLoadingMethodSQLInserts.Method.ValueString())
- destinationFireboltLoadingMethodSQLInserts = &shared.DestinationFireboltLoadingMethodSQLInserts{
- Method: method,
- }
+ var destinationFireboltSQLInserts *shared.DestinationFireboltSQLInserts
+ if r.Configuration.LoadingMethod.SQLInserts != nil {
+ destinationFireboltSQLInserts = &shared.DestinationFireboltSQLInserts{}
}
- if destinationFireboltLoadingMethodSQLInserts != nil {
+ if destinationFireboltSQLInserts != nil {
loadingMethod = &shared.DestinationFireboltLoadingMethod{
- DestinationFireboltLoadingMethodSQLInserts: destinationFireboltLoadingMethodSQLInserts,
+ DestinationFireboltSQLInserts: destinationFireboltSQLInserts,
}
}
- var destinationFireboltLoadingMethodExternalTableViaS3 *shared.DestinationFireboltLoadingMethodExternalTableViaS3
- if r.Configuration.LoadingMethod.DestinationFireboltLoadingMethodExternalTableViaS3 != nil {
- awsKeyID := r.Configuration.LoadingMethod.DestinationFireboltLoadingMethodExternalTableViaS3.AwsKeyID.ValueString()
- awsKeySecret := r.Configuration.LoadingMethod.DestinationFireboltLoadingMethodExternalTableViaS3.AwsKeySecret.ValueString()
- method1 := shared.DestinationFireboltLoadingMethodExternalTableViaS3Method(r.Configuration.LoadingMethod.DestinationFireboltLoadingMethodExternalTableViaS3.Method.ValueString())
- s3Bucket := r.Configuration.LoadingMethod.DestinationFireboltLoadingMethodExternalTableViaS3.S3Bucket.ValueString()
- s3Region := r.Configuration.LoadingMethod.DestinationFireboltLoadingMethodExternalTableViaS3.S3Region.ValueString()
- destinationFireboltLoadingMethodExternalTableViaS3 = &shared.DestinationFireboltLoadingMethodExternalTableViaS3{
+ var destinationFireboltExternalTableViaS3 *shared.DestinationFireboltExternalTableViaS3
+ if r.Configuration.LoadingMethod.ExternalTableViaS3 != nil {
+ awsKeyID := r.Configuration.LoadingMethod.ExternalTableViaS3.AwsKeyID.ValueString()
+ awsKeySecret := r.Configuration.LoadingMethod.ExternalTableViaS3.AwsKeySecret.ValueString()
+ s3Bucket := r.Configuration.LoadingMethod.ExternalTableViaS3.S3Bucket.ValueString()
+ s3Region := r.Configuration.LoadingMethod.ExternalTableViaS3.S3Region.ValueString()
+ destinationFireboltExternalTableViaS3 = &shared.DestinationFireboltExternalTableViaS3{
AwsKeyID: awsKeyID,
AwsKeySecret: awsKeySecret,
- Method: method1,
S3Bucket: s3Bucket,
S3Region: s3Region,
}
}
- if destinationFireboltLoadingMethodExternalTableViaS3 != nil {
+ if destinationFireboltExternalTableViaS3 != nil {
loadingMethod = &shared.DestinationFireboltLoadingMethod{
- DestinationFireboltLoadingMethodExternalTableViaS3: destinationFireboltLoadingMethodExternalTableViaS3,
+ DestinationFireboltExternalTableViaS3: destinationFireboltExternalTableViaS3,
}
}
}
password := r.Configuration.Password.ValueString()
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationFirebolt{
- Account: account,
- Database: database,
- DestinationType: destinationType,
- Engine: engine,
- Host: host,
- LoadingMethod: loadingMethod,
- Password: password,
- Username: username,
+ Account: account,
+ Database: database,
+ Engine: engine,
+ Host: host,
+ LoadingMethod: loadingMethod,
+ Password: password,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationFireboltCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -112,36 +112,31 @@ func (r *DestinationFireboltResourceModel) ToUpdateSDKType() *shared.Destination
}
var loadingMethod *shared.DestinationFireboltUpdateLoadingMethod
if r.Configuration.LoadingMethod != nil {
- var destinationFireboltUpdateLoadingMethodSQLInserts *shared.DestinationFireboltUpdateLoadingMethodSQLInserts
- if r.Configuration.LoadingMethod.DestinationFireboltUpdateLoadingMethodSQLInserts != nil {
- method := shared.DestinationFireboltUpdateLoadingMethodSQLInsertsMethod(r.Configuration.LoadingMethod.DestinationFireboltUpdateLoadingMethodSQLInserts.Method.ValueString())
- destinationFireboltUpdateLoadingMethodSQLInserts = &shared.DestinationFireboltUpdateLoadingMethodSQLInserts{
- Method: method,
- }
+ var sqlInserts *shared.SQLInserts
+ if r.Configuration.LoadingMethod.SQLInserts != nil {
+ sqlInserts = &shared.SQLInserts{}
}
- if destinationFireboltUpdateLoadingMethodSQLInserts != nil {
+ if sqlInserts != nil {
loadingMethod = &shared.DestinationFireboltUpdateLoadingMethod{
- DestinationFireboltUpdateLoadingMethodSQLInserts: destinationFireboltUpdateLoadingMethodSQLInserts,
+ SQLInserts: sqlInserts,
}
}
- var destinationFireboltUpdateLoadingMethodExternalTableViaS3 *shared.DestinationFireboltUpdateLoadingMethodExternalTableViaS3
- if r.Configuration.LoadingMethod.DestinationFireboltUpdateLoadingMethodExternalTableViaS3 != nil {
- awsKeyID := r.Configuration.LoadingMethod.DestinationFireboltUpdateLoadingMethodExternalTableViaS3.AwsKeyID.ValueString()
- awsKeySecret := r.Configuration.LoadingMethod.DestinationFireboltUpdateLoadingMethodExternalTableViaS3.AwsKeySecret.ValueString()
- method1 := shared.DestinationFireboltUpdateLoadingMethodExternalTableViaS3Method(r.Configuration.LoadingMethod.DestinationFireboltUpdateLoadingMethodExternalTableViaS3.Method.ValueString())
- s3Bucket := r.Configuration.LoadingMethod.DestinationFireboltUpdateLoadingMethodExternalTableViaS3.S3Bucket.ValueString()
- s3Region := r.Configuration.LoadingMethod.DestinationFireboltUpdateLoadingMethodExternalTableViaS3.S3Region.ValueString()
- destinationFireboltUpdateLoadingMethodExternalTableViaS3 = &shared.DestinationFireboltUpdateLoadingMethodExternalTableViaS3{
+ var externalTableViaS3 *shared.ExternalTableViaS3
+ if r.Configuration.LoadingMethod.ExternalTableViaS3 != nil {
+ awsKeyID := r.Configuration.LoadingMethod.ExternalTableViaS3.AwsKeyID.ValueString()
+ awsKeySecret := r.Configuration.LoadingMethod.ExternalTableViaS3.AwsKeySecret.ValueString()
+ s3Bucket := r.Configuration.LoadingMethod.ExternalTableViaS3.S3Bucket.ValueString()
+ s3Region := r.Configuration.LoadingMethod.ExternalTableViaS3.S3Region.ValueString()
+ externalTableViaS3 = &shared.ExternalTableViaS3{
AwsKeyID: awsKeyID,
AwsKeySecret: awsKeySecret,
- Method: method1,
S3Bucket: s3Bucket,
S3Region: s3Region,
}
}
- if destinationFireboltUpdateLoadingMethodExternalTableViaS3 != nil {
+ if externalTableViaS3 != nil {
loadingMethod = &shared.DestinationFireboltUpdateLoadingMethod{
- DestinationFireboltUpdateLoadingMethodExternalTableViaS3: destinationFireboltUpdateLoadingMethodExternalTableViaS3,
+ ExternalTableViaS3: externalTableViaS3,
}
}
}
diff --git a/internal/provider/destination_firestore_data_source.go b/internal/provider/destination_firestore_data_source.go
old mode 100755
new mode 100644
index f097c9b38..f5ae181a6
--- a/internal/provider/destination_firestore_data_source.go
+++ b/internal/provider/destination_firestore_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationFirestoreDataSource struct {
// DestinationFirestoreDataSourceModel describes the data model.
type DestinationFirestoreDataSourceModel struct {
- Configuration DestinationFirestore `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,31 +47,17 @@ func (r *DestinationFirestoreDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "DestinationFirestore DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "firestore",
- ),
- },
- Description: `must be one of ["firestore"]`,
- },
- "project_id": schema.StringAttribute{
- Computed: true,
- Description: `The GCP project ID for the project containing the target BigQuery dataset.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_firestore_data_source_sdk.go b/internal/provider/destination_firestore_data_source_sdk.go
old mode 100755
new mode 100644
index cf2e358c6..c7c9dd163
--- a/internal/provider/destination_firestore_data_source_sdk.go
+++ b/internal/provider/destination_firestore_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationFirestoreDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_firestore_resource.go b/internal/provider/destination_firestore_resource.go
old mode 100755
new mode 100644
index bcbc69c2a..483d26a03
--- a/internal/provider/destination_firestore_resource.go
+++ b/internal/provider/destination_firestore_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationFirestoreResource struct {
// DestinationFirestoreResourceModel describes the resource data model.
type DestinationFirestoreResourceModel struct {
Configuration DestinationFirestore `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -57,21 +57,19 @@ func (r *DestinationFirestoreResource) Schema(ctx context.Context, req resource.
Optional: true,
Description: `The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "firestore",
- ),
- },
- Description: `must be one of ["firestore"]`,
- },
"project_id": schema.StringAttribute{
Required: true,
Description: `The GCP project ID for the project containing the target BigQuery dataset.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -88,7 +86,8 @@ func (r *DestinationFirestoreResource) Schema(ctx context.Context, req resource.
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -138,7 +137,7 @@ func (r *DestinationFirestoreResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationFirestore(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -314,5 +313,5 @@ func (r *DestinationFirestoreResource) Delete(ctx context.Context, req resource.
}
func (r *DestinationFirestoreResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_firestore_resource_sdk.go b/internal/provider/destination_firestore_resource_sdk.go
old mode 100755
new mode 100644
index f503a0f0d..0386833d1
--- a/internal/provider/destination_firestore_resource_sdk.go
+++ b/internal/provider/destination_firestore_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -14,17 +14,22 @@ func (r *DestinationFirestoreResourceModel) ToCreateSDKType() *shared.Destinatio
} else {
credentialsJSON = nil
}
- destinationType := shared.DestinationFirestoreFirestore(r.Configuration.DestinationType.ValueString())
projectID := r.Configuration.ProjectID.ValueString()
configuration := shared.DestinationFirestore{
CredentialsJSON: credentialsJSON,
- DestinationType: destinationType,
ProjectID: projectID,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationFirestoreCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
diff --git a/internal/provider/destination_gcs_data_source.go b/internal/provider/destination_gcs_data_source.go
old mode 100755
new mode 100644
index 76072f3e1..b78e0e08a
--- a/internal/provider/destination_gcs_data_source.go
+++ b/internal/provider/destination_gcs_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationGcsDataSource struct {
// DestinationGcsDataSourceModel describes the data model.
type DestinationGcsDataSourceModel struct {
- Configuration DestinationGcs `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,739 +47,17 @@ func (r *DestinationGcsDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "DestinationGcs DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_authentication_hmac_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Computed: true,
- Description: `When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "destination_gcs_update_authentication_hmac_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Computed: true,
- Description: `When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gcs",
- ),
- },
- Description: `must be one of ["gcs"]`,
- },
- "format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_output_format_avro_apache_avro": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no compression",
- ),
- },
- Description: `must be one of ["no compression"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_output_format_avro_apache_avro_compression_codec_deflate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Deflate",
- ),
- },
- Description: `must be one of ["Deflate"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `0: no compression & fastest, 9: best compression & slowest.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bzip2",
- ),
- },
- Description: `must be one of ["bzip2"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_output_format_avro_apache_avro_compression_codec_xz": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xz",
- ),
- },
- Description: `must be one of ["xz"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zstandard",
- ),
- },
- Description: `must be one of ["zstandard"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
- },
- "include_checksum": schema.BoolAttribute{
- Computed: true,
- Description: `If true, include a checksum with each data block.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_output_format_avro_apache_avro_compression_codec_snappy": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snappy",
- ),
- },
- Description: `must be one of ["snappy"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Avro",
- ),
- },
- Description: `must be one of ["Avro"]`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_output_format_csv_comma_separated_values_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "destination_gcs_output_format_csv_comma_separated_values_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_output_format_parquet_columnar_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "block_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
- },
- "compression_codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "SNAPPY",
- "GZIP",
- "LZO",
- "BROTLI",
- "LZ4",
- "ZSTD",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data pages.`,
- },
- "dictionary_encoding": schema.BoolAttribute{
- Computed: true,
- Description: `Default: true.`,
- },
- "dictionary_page_size_kb": schema.Int64Attribute{
- Computed: true,
- Description: `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Parquet",
- ),
- },
- Description: `must be one of ["Parquet"]`,
- },
- "max_padding_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
- },
- "page_size_kb": schema.Int64Attribute{
- Computed: true,
- Description: `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no compression",
- ),
- },
- Description: `must be one of ["no compression"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_deflate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Deflate",
- ),
- },
- Description: `must be one of ["Deflate"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `0: no compression & fastest, 9: best compression & slowest.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_bzip2": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bzip2",
- ),
- },
- Description: `must be one of ["bzip2"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_xz": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xz",
- ),
- },
- Description: `must be one of ["xz"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_zstandard": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zstandard",
- ),
- },
- Description: `must be one of ["zstandard"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
- },
- "include_checksum": schema.BoolAttribute{
- Computed: true,
- Description: `If true, include a checksum with each data block.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_snappy": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snappy",
- ),
- },
- Description: `must be one of ["snappy"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Avro",
- ),
- },
- Description: `must be one of ["Avro"]`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_update_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_update_output_format_csv_comma_separated_values_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "destination_gcs_update_output_format_csv_comma_separated_values_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_update_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_update_output_format_parquet_columnar_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "block_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
- },
- "compression_codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "SNAPPY",
- "GZIP",
- "LZO",
- "BROTLI",
- "LZ4",
- "ZSTD",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data pages.`,
- },
- "dictionary_encoding": schema.BoolAttribute{
- Computed: true,
- Description: `Default: true.`,
- },
- "dictionary_page_size_kb": schema.Int64Attribute{
- Computed: true,
- Description: `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Parquet",
- ),
- },
- Description: `must be one of ["Parquet"]`,
- },
- "max_padding_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
- },
- "page_size_kb": schema.Int64Attribute{
- Computed: true,
- Description: `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "gcs_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `You can find the bucket name in the App Engine Admin console Application Settings page, under the label Google Cloud Storage Bucket. Read more here.`,
- },
- "gcs_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `GCS Bucket Path string Subdirectory under the above bucket to sync the data into.`,
- },
- "gcs_bucket_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "northamerica-northeast1",
- "northamerica-northeast2",
- "us-central1",
- "us-east1",
- "us-east4",
- "us-west1",
- "us-west2",
- "us-west3",
- "us-west4",
- "southamerica-east1",
- "southamerica-west1",
- "europe-central2",
- "europe-north1",
- "europe-west1",
- "europe-west2",
- "europe-west3",
- "europe-west4",
- "europe-west6",
- "asia-east1",
- "asia-east2",
- "asia-northeast1",
- "asia-northeast2",
- "asia-northeast3",
- "asia-south1",
- "asia-south2",
- "asia-southeast1",
- "asia-southeast2",
- "australia-southeast1",
- "australia-southeast2",
- "asia",
- "eu",
- "us",
- "asia1",
- "eur4",
- "nam4",
- ),
- },
- MarkdownDescription: `must be one of ["northamerica-northeast1", "northamerica-northeast2", "us-central1", "us-east1", "us-east4", "us-west1", "us-west2", "us-west3", "us-west4", "southamerica-east1", "southamerica-west1", "europe-central2", "europe-north1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "asia", "eu", "us", "asia1", "eur4", "nam4"]` + "\n" +
- `Select a Region of the GCS Bucket. Read more here.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_gcs_data_source_sdk.go b/internal/provider/destination_gcs_data_source_sdk.go
old mode 100755
new mode 100644
index 2444899a3..1340119e6
--- a/internal/provider/destination_gcs_data_source_sdk.go
+++ b/internal/provider/destination_gcs_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationGcsDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_gcs_resource.go b/internal/provider/destination_gcs_resource.go
old mode 100755
new mode 100644
index e868c0ed2..d4ae8e1cd
--- a/internal/provider/destination_gcs_resource.go
+++ b/internal/provider/destination_gcs_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationGcsResource struct {
// DestinationGcsResourceModel describes the resource data model.
type DestinationGcsResourceModel struct {
Configuration DestinationGcs `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -57,328 +59,305 @@ func (r *DestinationGcsResource) Schema(ctx context.Context, req resource.Schema
"credential": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_gcs_authentication_hmac_key": schema.SingleNestedAttribute{
+ "hmac_key": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"credential_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HMAC_KEY",
- ),
- },
- Description: `must be one of ["HMAC_KEY"]`,
- },
- "hmac_key_access_id": schema.StringAttribute{
- Required: true,
- Description: `When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.`,
- },
- "hmac_key_secret": schema.StringAttribute{
- Required: true,
- Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.`,
- },
- },
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "destination_gcs_update_authentication_hmac_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "credential_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["HMAC_KEY"]; Default: "HMAC_KEY"`,
Validators: []validator.String{
stringvalidator.OneOf(
"HMAC_KEY",
),
},
- Description: `must be one of ["HMAC_KEY"]`,
},
"hmac_key_access_id": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.`,
},
"hmac_key_secret": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.`,
},
},
Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
},
},
+ Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.`,
- },
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gcs",
- ),
- },
- Description: `must be one of ["gcs"]`,
},
"format": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_gcs_output_format_avro_apache_avro": schema.SingleNestedAttribute{
+ "avro_apache_avro": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_codec": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2": schema.SingleNestedAttribute{
+ "bzip2": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["bzip2"]; Default: "bzip2"`,
Validators: []validator.String{
stringvalidator.OneOf(
"bzip2",
),
},
- Description: `must be one of ["bzip2"]`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_gcs_output_format_avro_apache_avro_compression_codec_deflate": schema.SingleNestedAttribute{
+ "deflate": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["Deflate"]; Default: "Deflate"`,
Validators: []validator.String{
stringvalidator.OneOf(
"Deflate",
),
},
- Description: `must be one of ["Deflate"]`,
},
"compression_level": schema.Int64Attribute{
- Optional: true,
- Description: `0: no compression & fastest, 9: best compression & slowest.`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `0: no compression & fastest, 9: best compression & slowest.`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression": schema.SingleNestedAttribute{
+ "no_compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["no compression"]; Default: "no compression"`,
Validators: []validator.String{
stringvalidator.OneOf(
"no compression",
),
},
- Description: `must be one of ["no compression"]`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_gcs_output_format_avro_apache_avro_compression_codec_snappy": schema.SingleNestedAttribute{
+ "snappy": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["snappy"]; Default: "snappy"`,
Validators: []validator.String{
stringvalidator.OneOf(
"snappy",
),
},
- Description: `must be one of ["snappy"]`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_gcs_output_format_avro_apache_avro_compression_codec_xz": schema.SingleNestedAttribute{
+ "xz": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["xz"]; Default: "xz"`,
Validators: []validator.String{
stringvalidator.OneOf(
"xz",
),
},
- Description: `must be one of ["xz"]`,
},
"compression_level": schema.Int64Attribute{
- Optional: true,
- Description: `The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.`,
+ Optional: true,
+ MarkdownDescription: `Default: 6` + "\n" +
+ `The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard": schema.SingleNestedAttribute{
+ "zstandard": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["zstandard"]; Default: "zstandard"`,
Validators: []validator.String{
stringvalidator.OneOf(
"zstandard",
),
},
- Description: `must be one of ["zstandard"]`,
},
"compression_level": schema.Int64Attribute{
- Optional: true,
- Description: `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
+ Optional: true,
+ MarkdownDescription: `Default: 3` + "\n" +
+ `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
},
"include_checksum": schema.BoolAttribute{
- Optional: true,
- Description: `If true, include a checksum with each data block.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `If true, include a checksum with each data block.`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
},
+ Description: `The compression algorithm used to compress data. Default to no compression.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The compression algorithm used to compress data. Default to no compression.`,
},
"format_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["Avro"]; Default: "Avro"`,
Validators: []validator.String{
stringvalidator.OneOf(
"Avro",
),
},
- Description: `must be one of ["Avro"]`,
},
},
Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
},
- "destination_gcs_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
+ "csv_comma_separated_values": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_gcs_output_format_csv_comma_separated_values_compression_gzip": schema.SingleNestedAttribute{
+ "gzip": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["GZIP"]; Default: "GZIP"`,
Validators: []validator.String{
stringvalidator.OneOf(
"GZIP",
),
},
- Description: `must be one of ["GZIP"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
},
- "destination_gcs_output_format_csv_comma_separated_values_compression_no_compression": schema.SingleNestedAttribute{
+ "no_compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["No Compression"]; Default: "No Compression"`,
Validators: []validator.String{
stringvalidator.OneOf(
"No Compression",
),
},
- Description: `must be one of ["No Compression"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
},
},
+ Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
},
"flattening": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]; Default: "No flattening"` + "\n" +
+ `Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
Validators: []validator.String{
stringvalidator.OneOf(
"No flattening",
"Root level flattening",
),
},
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
},
"format_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["CSV"]; Default: "CSV"`,
Validators: []validator.String{
stringvalidator.OneOf(
"CSV",
),
},
- Description: `must be one of ["CSV"]`,
},
},
Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
},
- "destination_gcs_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
+ "json_lines_newline_delimited_json": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
+ "gzip": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["GZIP"]; Default: "GZIP"`,
Validators: []validator.String{
stringvalidator.OneOf(
"GZIP",
),
},
- Description: `must be one of ["GZIP"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
},
- "destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
+ "no_compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["No Compression"]; Default: "No Compression"`,
Validators: []validator.String{
stringvalidator.OneOf(
"No Compression",
),
},
- Description: `must be one of ["No Compression"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
},
},
+ Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
},
"format_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["JSONL"]; Default: "JSONL"`,
Validators: []validator.String{
stringvalidator.OneOf(
"JSONL",
),
},
- Description: `must be one of ["JSONL"]`,
},
},
Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
},
- "destination_gcs_output_format_parquet_columnar_storage": schema.SingleNestedAttribute{
+ "parquet_columnar_storage": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"block_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
+ Optional: true,
+ MarkdownDescription: `Default: 128` + "\n" +
+ `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
},
"compression_codec": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]; Default: "UNCOMPRESSED"` + "\n" +
+ `The compression algorithm used to compress data pages.`,
Validators: []validator.String{
stringvalidator.OneOf(
"UNCOMPRESSED",
@@ -390,342 +369,44 @@ func (r *DestinationGcsResource) Schema(ctx context.Context, req resource.Schema
"ZSTD",
),
},
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data pages.`,
},
"dictionary_encoding": schema.BoolAttribute{
- Optional: true,
- Description: `Default: true.`,
- },
- "dictionary_page_size_kb": schema.Int64Attribute{
- Optional: true,
- Description: `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Parquet",
- ),
- },
- Description: `must be one of ["Parquet"]`,
- },
- "max_padding_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
- },
- "page_size_kb": schema.Int64Attribute{
- Optional: true,
- Description: `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_no_compression": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no compression",
- ),
- },
- Description: `must be one of ["no compression"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_deflate": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Deflate",
- ),
- },
- Description: `must be one of ["Deflate"]`,
- },
- "compression_level": schema.Int64Attribute{
- Optional: true,
- Description: `0: no compression & fastest, 9: best compression & slowest.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_bzip2": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bzip2",
- ),
- },
- Description: `must be one of ["bzip2"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_xz": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xz",
- ),
- },
- Description: `must be one of ["xz"]`,
- },
- "compression_level": schema.Int64Attribute{
- Optional: true,
- Description: `The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_zstandard": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zstandard",
- ),
- },
- Description: `must be one of ["zstandard"]`,
- },
- "compression_level": schema.Int64Attribute{
- Optional: true,
- Description: `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
- },
- "include_checksum": schema.BoolAttribute{
- Optional: true,
- Description: `If true, include a checksum with each data block.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_gcs_update_output_format_avro_apache_avro_compression_codec_snappy": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snappy",
- ),
- },
- Description: `must be one of ["snappy"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Avro",
- ),
- },
- Description: `must be one of ["Avro"]`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_update_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
Optional: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_update_output_format_csv_comma_separated_values_compression_no_compression": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "destination_gcs_update_output_format_csv_comma_separated_values_compression_gzip": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Default: true.`,
},
- "flattening": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_update_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
+ "dictionary_page_size_kb": schema.Int64Attribute{
Optional: true,
- Attributes: map[string]schema.Attribute{
- "destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
+ MarkdownDescription: `Default: 1024` + "\n" +
+ `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
},
"format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
- },
- "destination_gcs_update_output_format_parquet_columnar_storage": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "block_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
- },
- "compression_codec": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "SNAPPY",
- "GZIP",
- "LZO",
- "BROTLI",
- "LZ4",
- "ZSTD",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data pages.`,
- },
- "dictionary_encoding": schema.BoolAttribute{
- Optional: true,
- Description: `Default: true.`,
- },
- "dictionary_page_size_kb": schema.Int64Attribute{
Optional: true,
- Description: `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
+ Description: `must be one of ["Parquet"]; Default: "Parquet"`,
Validators: []validator.String{
stringvalidator.OneOf(
"Parquet",
),
},
- Description: `must be one of ["Parquet"]`,
},
"max_padding_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
+ Optional: true,
+ MarkdownDescription: `Default: 8` + "\n" +
+ `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
},
"page_size_kb": schema.Int64Attribute{
- Optional: true,
- Description: `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
+ Optional: true,
+ MarkdownDescription: `Default: 1024` + "\n" +
+ `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
},
},
Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
},
},
+ Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.`,
},
"gcs_bucket_name": schema.StringAttribute{
Required: true,
@@ -737,6 +418,8 @@ func (r *DestinationGcsResource) Schema(ctx context.Context, req resource.Schema
},
"gcs_bucket_region": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["northamerica-northeast1", "northamerica-northeast2", "us-central1", "us-east1", "us-east4", "us-west1", "us-west2", "us-west3", "us-west4", "southamerica-east1", "southamerica-west1", "europe-central2", "europe-north1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "asia", "eu", "us", "asia1", "eur4", "nam4"]; Default: "us"` + "\n" +
+ `Select a Region of the GCS Bucket. Read more here.`,
Validators: []validator.String{
stringvalidator.OneOf(
"northamerica-northeast1",
@@ -776,11 +459,16 @@ func (r *DestinationGcsResource) Schema(ctx context.Context, req resource.Schema
"nam4",
),
},
- MarkdownDescription: `must be one of ["northamerica-northeast1", "northamerica-northeast2", "us-central1", "us-east1", "us-east4", "us-west1", "us-west2", "us-west3", "us-west4", "southamerica-east1", "southamerica-west1", "europe-central2", "europe-north1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "asia", "eu", "us", "asia1", "eur4", "nam4"]` + "\n" +
- `Select a Region of the GCS Bucket. Read more here.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -797,7 +485,8 @@ func (r *DestinationGcsResource) Schema(ctx context.Context, req resource.Schema
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -847,7 +536,7 @@ func (r *DestinationGcsResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationGcs(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -1023,5 +712,5 @@ func (r *DestinationGcsResource) Delete(ctx context.Context, req resource.Delete
}
func (r *DestinationGcsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_gcs_resource_sdk.go b/internal/provider/destination_gcs_resource_sdk.go
old mode 100755
new mode 100644
index 73785e2a5..d163b0909
--- a/internal/provider/destination_gcs_resource_sdk.go
+++ b/internal/provider/destination_gcs_resource_sdk.go
@@ -3,291 +3,345 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationGcsResourceModel) ToCreateSDKType() *shared.DestinationGcsCreateRequest {
var credential shared.DestinationGcsAuthentication
- var destinationGcsAuthenticationHMACKey *shared.DestinationGcsAuthenticationHMACKey
- if r.Configuration.Credential.DestinationGcsAuthenticationHMACKey != nil {
- credentialType := shared.DestinationGcsAuthenticationHMACKeyCredentialType(r.Configuration.Credential.DestinationGcsAuthenticationHMACKey.CredentialType.ValueString())
- hmacKeyAccessID := r.Configuration.Credential.DestinationGcsAuthenticationHMACKey.HmacKeyAccessID.ValueString()
- hmacKeySecret := r.Configuration.Credential.DestinationGcsAuthenticationHMACKey.HmacKeySecret.ValueString()
- destinationGcsAuthenticationHMACKey = &shared.DestinationGcsAuthenticationHMACKey{
+ var destinationGcsHMACKey *shared.DestinationGcsHMACKey
+ if r.Configuration.Credential.HMACKey != nil {
+ credentialType := new(shared.DestinationGcsCredentialType)
+ if !r.Configuration.Credential.HMACKey.CredentialType.IsUnknown() && !r.Configuration.Credential.HMACKey.CredentialType.IsNull() {
+ *credentialType = shared.DestinationGcsCredentialType(r.Configuration.Credential.HMACKey.CredentialType.ValueString())
+ } else {
+ credentialType = nil
+ }
+ hmacKeyAccessID := r.Configuration.Credential.HMACKey.HmacKeyAccessID.ValueString()
+ hmacKeySecret := r.Configuration.Credential.HMACKey.HmacKeySecret.ValueString()
+ destinationGcsHMACKey = &shared.DestinationGcsHMACKey{
CredentialType: credentialType,
HmacKeyAccessID: hmacKeyAccessID,
HmacKeySecret: hmacKeySecret,
}
}
- if destinationGcsAuthenticationHMACKey != nil {
+ if destinationGcsHMACKey != nil {
credential = shared.DestinationGcsAuthentication{
- DestinationGcsAuthenticationHMACKey: destinationGcsAuthenticationHMACKey,
+ DestinationGcsHMACKey: destinationGcsHMACKey,
}
}
- destinationType := shared.DestinationGcsGcs(r.Configuration.DestinationType.ValueString())
var format shared.DestinationGcsOutputFormat
- var destinationGcsOutputFormatAvroApacheAvro *shared.DestinationGcsOutputFormatAvroApacheAvro
- if r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro != nil {
- var compressionCodec shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodec
- var destinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression *shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression
- if r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- codec := shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec(r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression.Codec.ValueString())
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression = &shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression{
+ var destinationGcsAvroApacheAvro *shared.DestinationGcsAvroApacheAvro
+ if r.Configuration.Format.AvroApacheAvro != nil {
+ var compressionCodec shared.DestinationGcsCompressionCodec
+ var destinationGcsNoCompression *shared.DestinationGcsNoCompression
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression != nil {
+ codec := new(shared.DestinationGcsCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.IsNull() {
+ *codec = shared.DestinationGcsCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.ValueString())
+ } else {
+ codec = nil
+ }
+ destinationGcsNoCompression = &shared.DestinationGcsNoCompression{
Codec: codec,
}
}
- if destinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- compressionCodec = shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression: destinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression,
+ if destinationGcsNoCompression != nil {
+ compressionCodec = shared.DestinationGcsCompressionCodec{
+ DestinationGcsNoCompression: destinationGcsNoCompression,
}
}
- var destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate *shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate
- if r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- codec1 := shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodec(r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate.Codec.ValueString())
+ var destinationGcsDeflate *shared.DestinationGcsDeflate
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate != nil {
+ codec1 := new(shared.DestinationGcsSchemasCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.IsNull() {
+ *codec1 = shared.DestinationGcsSchemasCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.ValueString())
+ } else {
+ codec1 = nil
+ }
compressionLevel := new(int64)
- if !r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate.CompressionLevel.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate.CompressionLevel.IsNull() {
- *compressionLevel = r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate.CompressionLevel.ValueInt64()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.IsNull() {
+ *compressionLevel = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.ValueInt64()
} else {
compressionLevel = nil
}
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate = &shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate{
+ destinationGcsDeflate = &shared.DestinationGcsDeflate{
Codec: codec1,
CompressionLevel: compressionLevel,
}
}
- if destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- compressionCodec = shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate: destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate,
+ if destinationGcsDeflate != nil {
+ compressionCodec = shared.DestinationGcsCompressionCodec{
+ DestinationGcsDeflate: destinationGcsDeflate,
}
}
- var destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 *shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2
- if r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- codec2 := shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2Codec(r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2.Codec.ValueString())
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 = &shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2{
+ var destinationGcsBzip2 *shared.DestinationGcsBzip2
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2 != nil {
+ codec2 := new(shared.DestinationGcsSchemasFormatCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.IsNull() {
+ *codec2 = shared.DestinationGcsSchemasFormatCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.ValueString())
+ } else {
+ codec2 = nil
+ }
+ destinationGcsBzip2 = &shared.DestinationGcsBzip2{
Codec: codec2,
}
}
- if destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- compressionCodec = shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2: destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2,
+ if destinationGcsBzip2 != nil {
+ compressionCodec = shared.DestinationGcsCompressionCodec{
+ DestinationGcsBzip2: destinationGcsBzip2,
}
}
- var destinationGcsOutputFormatAvroApacheAvroCompressionCodecXz *shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz
- if r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz != nil {
- codec3 := shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodec(r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz.Codec.ValueString())
+ var destinationGcsXz *shared.DestinationGcsXz
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz != nil {
+ codec3 := new(shared.DestinationGcsSchemasFormatOutputFormatCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.IsNull() {
+ *codec3 = shared.DestinationGcsSchemasFormatOutputFormatCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.ValueString())
+ } else {
+ codec3 = nil
+ }
compressionLevel1 := new(int64)
- if !r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz.CompressionLevel.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz.CompressionLevel.IsNull() {
- *compressionLevel1 = r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz.CompressionLevel.ValueInt64()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.IsNull() {
+ *compressionLevel1 = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.ValueInt64()
} else {
compressionLevel1 = nil
}
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecXz = &shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz{
+ destinationGcsXz = &shared.DestinationGcsXz{
Codec: codec3,
CompressionLevel: compressionLevel1,
}
}
- if destinationGcsOutputFormatAvroApacheAvroCompressionCodecXz != nil {
- compressionCodec = shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz: destinationGcsOutputFormatAvroApacheAvroCompressionCodecXz,
+ if destinationGcsXz != nil {
+ compressionCodec = shared.DestinationGcsCompressionCodec{
+ DestinationGcsXz: destinationGcsXz,
}
}
- var destinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard *shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard
- if r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- codec4 := shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodec(r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard.Codec.ValueString())
+ var destinationGcsZstandard *shared.DestinationGcsZstandard
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard != nil {
+ codec4 := new(shared.DestinationGcsSchemasFormatOutputFormat1Codec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.IsNull() {
+ *codec4 = shared.DestinationGcsSchemasFormatOutputFormat1Codec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.ValueString())
+ } else {
+ codec4 = nil
+ }
compressionLevel2 := new(int64)
- if !r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard.CompressionLevel.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard.CompressionLevel.IsNull() {
- *compressionLevel2 = r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard.CompressionLevel.ValueInt64()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.IsNull() {
+ *compressionLevel2 = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.ValueInt64()
} else {
compressionLevel2 = nil
}
includeChecksum := new(bool)
- if !r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.IsNull() {
- *includeChecksum = r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.ValueBool()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.IsNull() {
+ *includeChecksum = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.ValueBool()
} else {
includeChecksum = nil
}
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard = &shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard{
+ destinationGcsZstandard = &shared.DestinationGcsZstandard{
Codec: codec4,
CompressionLevel: compressionLevel2,
IncludeChecksum: includeChecksum,
}
}
- if destinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- compressionCodec = shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard: destinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard,
+ if destinationGcsZstandard != nil {
+ compressionCodec = shared.DestinationGcsCompressionCodec{
+ DestinationGcsZstandard: destinationGcsZstandard,
}
}
- var destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy *shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy
- if r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- codec5 := shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodec(r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy.Codec.ValueString())
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy = &shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy{
+ var destinationGcsSnappy *shared.DestinationGcsSnappy
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy != nil {
+ codec5 := new(shared.DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.IsNull() {
+ *codec5 = shared.DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.ValueString())
+ } else {
+ codec5 = nil
+ }
+ destinationGcsSnappy = &shared.DestinationGcsSnappy{
Codec: codec5,
}
}
- if destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- compressionCodec = shared.DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy: destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy,
+ if destinationGcsSnappy != nil {
+ compressionCodec = shared.DestinationGcsCompressionCodec{
+ DestinationGcsSnappy: destinationGcsSnappy,
}
}
- formatType := shared.DestinationGcsOutputFormatAvroApacheAvroFormatType(r.Configuration.Format.DestinationGcsOutputFormatAvroApacheAvro.FormatType.ValueString())
- destinationGcsOutputFormatAvroApacheAvro = &shared.DestinationGcsOutputFormatAvroApacheAvro{
+ formatType := new(shared.DestinationGcsFormatType)
+ if !r.Configuration.Format.AvroApacheAvro.FormatType.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.FormatType.IsNull() {
+ *formatType = shared.DestinationGcsFormatType(r.Configuration.Format.AvroApacheAvro.FormatType.ValueString())
+ } else {
+ formatType = nil
+ }
+ destinationGcsAvroApacheAvro = &shared.DestinationGcsAvroApacheAvro{
CompressionCodec: compressionCodec,
FormatType: formatType,
}
}
- if destinationGcsOutputFormatAvroApacheAvro != nil {
+ if destinationGcsAvroApacheAvro != nil {
format = shared.DestinationGcsOutputFormat{
- DestinationGcsOutputFormatAvroApacheAvro: destinationGcsOutputFormatAvroApacheAvro,
+ DestinationGcsAvroApacheAvro: destinationGcsAvroApacheAvro,
}
}
- var destinationGcsOutputFormatCSVCommaSeparatedValues *shared.DestinationGcsOutputFormatCSVCommaSeparatedValues
- if r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues != nil {
- var compression *shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression
- if r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Compression != nil {
- var destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression *shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- if r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- compressionType := new(shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.IsNull() {
- *compressionType = shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.ValueString())
+ var destinationGcsCSVCommaSeparatedValues *shared.DestinationGcsCSVCommaSeparatedValues
+ if r.Configuration.Format.CSVCommaSeparatedValues != nil {
+ var compression *shared.DestinationGcsCompression
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression != nil {
+ var destinationGcsSchemasNoCompression *shared.DestinationGcsSchemasNoCompression
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression != nil {
+ compressionType := new(shared.DestinationGcsCompressionType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType = shared.DestinationGcsCompressionType(r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType = nil
}
- destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression = &shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression{
+ destinationGcsSchemasNoCompression = &shared.DestinationGcsSchemasNoCompression{
CompressionType: compressionType,
}
}
- if destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- compression = &shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression: destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression,
+ if destinationGcsSchemasNoCompression != nil {
+ compression = &shared.DestinationGcsCompression{
+ DestinationGcsSchemasNoCompression: destinationGcsSchemasNoCompression,
}
}
- var destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP *shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP
- if r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- compressionType1 := new(shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.IsNull() {
- *compressionType1 = shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.ValueString())
+ var destinationGcsGZIP *shared.DestinationGcsGZIP
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip != nil {
+ compressionType1 := new(shared.DestinationGcsSchemasCompressionType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType1 = shared.DestinationGcsSchemasCompressionType(r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType1 = nil
}
- destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP = &shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP{
+ destinationGcsGZIP = &shared.DestinationGcsGZIP{
CompressionType: compressionType1,
}
}
- if destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- compression = &shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP: destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP,
+ if destinationGcsGZIP != nil {
+ compression = &shared.DestinationGcsCompression{
+ DestinationGcsGZIP: destinationGcsGZIP,
}
}
}
- flattening := new(shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization)
- if !r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Flattening.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Flattening.IsNull() {
- *flattening = shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization(r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.Flattening.ValueString())
+ flattening := new(shared.DestinationGcsNormalization)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsNull() {
+ *flattening = shared.DestinationGcsNormalization(r.Configuration.Format.CSVCommaSeparatedValues.Flattening.ValueString())
} else {
flattening = nil
}
- formatType1 := shared.DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatType(r.Configuration.Format.DestinationGcsOutputFormatCSVCommaSeparatedValues.FormatType.ValueString())
- destinationGcsOutputFormatCSVCommaSeparatedValues = &shared.DestinationGcsOutputFormatCSVCommaSeparatedValues{
+ formatType1 := new(shared.DestinationGcsSchemasFormatType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.FormatType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.FormatType.IsNull() {
+ *formatType1 = shared.DestinationGcsSchemasFormatType(r.Configuration.Format.CSVCommaSeparatedValues.FormatType.ValueString())
+ } else {
+ formatType1 = nil
+ }
+ destinationGcsCSVCommaSeparatedValues = &shared.DestinationGcsCSVCommaSeparatedValues{
Compression: compression,
Flattening: flattening,
FormatType: formatType1,
}
}
- if destinationGcsOutputFormatCSVCommaSeparatedValues != nil {
+ if destinationGcsCSVCommaSeparatedValues != nil {
format = shared.DestinationGcsOutputFormat{
- DestinationGcsOutputFormatCSVCommaSeparatedValues: destinationGcsOutputFormatCSVCommaSeparatedValues,
+ DestinationGcsCSVCommaSeparatedValues: destinationGcsCSVCommaSeparatedValues,
}
}
- var destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON *shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- var compression1 *shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression
- if r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.Compression != nil {
- var destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- if r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compressionType2 := new(shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsNull() {
- *compressionType2 = shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.ValueString())
+ var destinationGcsJSONLinesNewlineDelimitedJSON *shared.DestinationGcsJSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ var compression1 *shared.DestinationGcsSchemasCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression != nil {
+ var destinationGcsSchemasFormatNoCompression *shared.DestinationGcsSchemasFormatNoCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression != nil {
+ compressionType2 := new(shared.DestinationGcsSchemasFormatCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType2 = shared.DestinationGcsSchemasFormatCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType2 = nil
}
- destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = &shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression{
+ destinationGcsSchemasFormatNoCompression = &shared.DestinationGcsSchemasFormatNoCompression{
CompressionType: compressionType2,
}
}
- if destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compression1 = &shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
+ if destinationGcsSchemasFormatNoCompression != nil {
+ compression1 = &shared.DestinationGcsSchemasCompression{
+ DestinationGcsSchemasFormatNoCompression: destinationGcsSchemasFormatNoCompression,
}
}
- var destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- if r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compressionType3 := new(shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsNull() {
- *compressionType3 = shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.ValueString())
+ var destinationGcsSchemasGZIP *shared.DestinationGcsSchemasGZIP
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip != nil {
+ compressionType3 := new(shared.DestinationGcsSchemasFormatOutputFormatCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType3 = shared.DestinationGcsSchemasFormatOutputFormatCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType3 = nil
}
- destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = &shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP{
+ destinationGcsSchemasGZIP = &shared.DestinationGcsSchemasGZIP{
CompressionType: compressionType3,
}
}
- if destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compression1 = &shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
+ if destinationGcsSchemasGZIP != nil {
+ compression1 = &shared.DestinationGcsSchemasCompression{
+ DestinationGcsSchemasGZIP: destinationGcsSchemasGZIP,
}
}
}
- formatType2 := shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON = &shared.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON{
+ formatType2 := new(shared.DestinationGcsSchemasFormatFormatType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsNull() {
+ *formatType2 = shared.DestinationGcsSchemasFormatFormatType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.ValueString())
+ } else {
+ formatType2 = nil
+ }
+ destinationGcsJSONLinesNewlineDelimitedJSON = &shared.DestinationGcsJSONLinesNewlineDelimitedJSON{
Compression: compression1,
FormatType: formatType2,
}
}
- if destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON != nil {
+ if destinationGcsJSONLinesNewlineDelimitedJSON != nil {
format = shared.DestinationGcsOutputFormat{
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON: destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationGcsJSONLinesNewlineDelimitedJSON: destinationGcsJSONLinesNewlineDelimitedJSON,
}
}
- var destinationGcsOutputFormatParquetColumnarStorage *shared.DestinationGcsOutputFormatParquetColumnarStorage
- if r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage != nil {
+ var destinationGcsParquetColumnarStorage *shared.DestinationGcsParquetColumnarStorage
+ if r.Configuration.Format.ParquetColumnarStorage != nil {
blockSizeMb := new(int64)
- if !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.BlockSizeMb.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.BlockSizeMb.IsNull() {
- *blockSizeMb = r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.BlockSizeMb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.IsNull() {
+ *blockSizeMb = r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.ValueInt64()
} else {
blockSizeMb = nil
}
- compressionCodec1 := new(shared.DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec)
- if !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.CompressionCodec.IsNull() {
- *compressionCodec1 = shared.DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec(r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.CompressionCodec.ValueString())
+ compressionCodec1 := new(shared.DestinationGcsSchemasCompressionCodec)
+ if !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsNull() {
+ *compressionCodec1 = shared.DestinationGcsSchemasCompressionCodec(r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.ValueString())
} else {
compressionCodec1 = nil
}
dictionaryEncoding := new(bool)
- if !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.DictionaryEncoding.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.DictionaryEncoding.IsNull() {
- *dictionaryEncoding = r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.DictionaryEncoding.ValueBool()
+ if !r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.IsNull() {
+ *dictionaryEncoding = r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.ValueBool()
} else {
dictionaryEncoding = nil
}
dictionaryPageSizeKb := new(int64)
- if !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.DictionaryPageSizeKb.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.DictionaryPageSizeKb.IsNull() {
- *dictionaryPageSizeKb = r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.DictionaryPageSizeKb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.IsNull() {
+ *dictionaryPageSizeKb = r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.ValueInt64()
} else {
dictionaryPageSizeKb = nil
}
- formatType3 := shared.DestinationGcsOutputFormatParquetColumnarStorageFormatType(r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.FormatType.ValueString())
+ formatType3 := new(shared.DestinationGcsSchemasFormatOutputFormatFormatType)
+ if !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsNull() {
+ *formatType3 = shared.DestinationGcsSchemasFormatOutputFormatFormatType(r.Configuration.Format.ParquetColumnarStorage.FormatType.ValueString())
+ } else {
+ formatType3 = nil
+ }
maxPaddingSizeMb := new(int64)
- if !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.MaxPaddingSizeMb.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.MaxPaddingSizeMb.IsNull() {
- *maxPaddingSizeMb = r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.MaxPaddingSizeMb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.IsNull() {
+ *maxPaddingSizeMb = r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.ValueInt64()
} else {
maxPaddingSizeMb = nil
}
pageSizeKb := new(int64)
- if !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.PageSizeKb.IsUnknown() && !r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.PageSizeKb.IsNull() {
- *pageSizeKb = r.Configuration.Format.DestinationGcsOutputFormatParquetColumnarStorage.PageSizeKb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.IsNull() {
+ *pageSizeKb = r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.ValueInt64()
} else {
pageSizeKb = nil
}
- destinationGcsOutputFormatParquetColumnarStorage = &shared.DestinationGcsOutputFormatParquetColumnarStorage{
+ destinationGcsParquetColumnarStorage = &shared.DestinationGcsParquetColumnarStorage{
BlockSizeMb: blockSizeMb,
CompressionCodec: compressionCodec1,
DictionaryEncoding: dictionaryEncoding,
@@ -297,9 +351,9 @@ func (r *DestinationGcsResourceModel) ToCreateSDKType() *shared.DestinationGcsCr
PageSizeKb: pageSizeKb,
}
}
- if destinationGcsOutputFormatParquetColumnarStorage != nil {
+ if destinationGcsParquetColumnarStorage != nil {
format = shared.DestinationGcsOutputFormat{
- DestinationGcsOutputFormatParquetColumnarStorage: destinationGcsOutputFormatParquetColumnarStorage,
+ DestinationGcsParquetColumnarStorage: destinationGcsParquetColumnarStorage,
}
}
gcsBucketName := r.Configuration.GcsBucketName.ValueString()
@@ -312,16 +366,22 @@ func (r *DestinationGcsResourceModel) ToCreateSDKType() *shared.DestinationGcsCr
}
configuration := shared.DestinationGcs{
Credential: credential,
- DestinationType: destinationType,
Format: format,
GcsBucketName: gcsBucketName,
GcsBucketPath: gcsBucketPath,
GcsBucketRegion: gcsBucketRegion,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationGcsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -334,285 +394,340 @@ func (r *DestinationGcsResourceModel) ToGetSDKType() *shared.DestinationGcsCreat
}
func (r *DestinationGcsResourceModel) ToUpdateSDKType() *shared.DestinationGcsPutRequest {
- var credential shared.DestinationGcsUpdateAuthentication
- var destinationGcsUpdateAuthenticationHMACKey *shared.DestinationGcsUpdateAuthenticationHMACKey
- if r.Configuration.Credential.DestinationGcsUpdateAuthenticationHMACKey != nil {
- credentialType := shared.DestinationGcsUpdateAuthenticationHMACKeyCredentialType(r.Configuration.Credential.DestinationGcsUpdateAuthenticationHMACKey.CredentialType.ValueString())
- hmacKeyAccessID := r.Configuration.Credential.DestinationGcsUpdateAuthenticationHMACKey.HmacKeyAccessID.ValueString()
- hmacKeySecret := r.Configuration.Credential.DestinationGcsUpdateAuthenticationHMACKey.HmacKeySecret.ValueString()
- destinationGcsUpdateAuthenticationHMACKey = &shared.DestinationGcsUpdateAuthenticationHMACKey{
+ var credential shared.Authentication
+ var hmacKey *shared.HMACKey
+ if r.Configuration.Credential.HMACKey != nil {
+ credentialType := new(shared.CredentialType)
+ if !r.Configuration.Credential.HMACKey.CredentialType.IsUnknown() && !r.Configuration.Credential.HMACKey.CredentialType.IsNull() {
+ *credentialType = shared.CredentialType(r.Configuration.Credential.HMACKey.CredentialType.ValueString())
+ } else {
+ credentialType = nil
+ }
+ hmacKeyAccessID := r.Configuration.Credential.HMACKey.HmacKeyAccessID.ValueString()
+ hmacKeySecret := r.Configuration.Credential.HMACKey.HmacKeySecret.ValueString()
+ hmacKey = &shared.HMACKey{
CredentialType: credentialType,
HmacKeyAccessID: hmacKeyAccessID,
HmacKeySecret: hmacKeySecret,
}
}
- if destinationGcsUpdateAuthenticationHMACKey != nil {
- credential = shared.DestinationGcsUpdateAuthentication{
- DestinationGcsUpdateAuthenticationHMACKey: destinationGcsUpdateAuthenticationHMACKey,
+ if hmacKey != nil {
+ credential = shared.Authentication{
+ HMACKey: hmacKey,
}
}
var format shared.DestinationGcsUpdateOutputFormat
- var destinationGcsUpdateOutputFormatAvroApacheAvro *shared.DestinationGcsUpdateOutputFormatAvroApacheAvro
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro != nil {
- var compressionCodec shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec
- var destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression *shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- codec := shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec(r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression.Codec.ValueString())
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression = &shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression{
+ var avroApacheAvro *shared.AvroApacheAvro
+ if r.Configuration.Format.AvroApacheAvro != nil {
+ var compressionCodec shared.CompressionCodec
+ var noCompression *shared.NoCompression
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression != nil {
+ codec := new(shared.Codec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.IsNull() {
+ *codec = shared.Codec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.ValueString())
+ } else {
+ codec = nil
+ }
+ noCompression = &shared.NoCompression{
Codec: codec,
}
}
- if destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- compressionCodec = shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression: destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression,
+ if noCompression != nil {
+ compressionCodec = shared.CompressionCodec{
+ NoCompression: noCompression,
}
}
- var destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate *shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- codec1 := shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec(r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate.Codec.ValueString())
+ var deflate *shared.Deflate
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate != nil {
+ codec1 := new(shared.DestinationGcsUpdateCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.IsNull() {
+ *codec1 = shared.DestinationGcsUpdateCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.ValueString())
+ } else {
+ codec1 = nil
+ }
compressionLevel := new(int64)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate.CompressionLevel.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate.CompressionLevel.IsNull() {
- *compressionLevel = r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate.CompressionLevel.ValueInt64()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.IsNull() {
+ *compressionLevel = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.ValueInt64()
} else {
compressionLevel = nil
}
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate = &shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate{
+ deflate = &shared.Deflate{
Codec: codec1,
CompressionLevel: compressionLevel,
}
}
- if destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- compressionCodec = shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate: destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate,
+ if deflate != nil {
+ compressionCodec = shared.CompressionCodec{
+ Deflate: deflate,
}
}
- var destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 *shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- codec2 := shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec(r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2.Codec.ValueString())
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 = &shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2{
+ var bzip2 *shared.Bzip2
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2 != nil {
+ codec2 := new(shared.DestinationGcsUpdateSchemasCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.IsNull() {
+ *codec2 = shared.DestinationGcsUpdateSchemasCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.ValueString())
+ } else {
+ codec2 = nil
+ }
+ bzip2 = &shared.Bzip2{
Codec: codec2,
}
}
- if destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- compressionCodec = shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2: destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2,
+ if bzip2 != nil {
+ compressionCodec = shared.CompressionCodec{
+ Bzip2: bzip2,
}
}
- var destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz *shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz != nil {
- codec3 := shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec(r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz.Codec.ValueString())
+ var xz *shared.Xz
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz != nil {
+ codec3 := new(shared.DestinationGcsUpdateSchemasFormatCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.IsNull() {
+ *codec3 = shared.DestinationGcsUpdateSchemasFormatCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.ValueString())
+ } else {
+ codec3 = nil
+ }
compressionLevel1 := new(int64)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz.CompressionLevel.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz.CompressionLevel.IsNull() {
- *compressionLevel1 = r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz.CompressionLevel.ValueInt64()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.IsNull() {
+ *compressionLevel1 = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.ValueInt64()
} else {
compressionLevel1 = nil
}
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz = &shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz{
+ xz = &shared.Xz{
Codec: codec3,
CompressionLevel: compressionLevel1,
}
}
- if destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz != nil {
- compressionCodec = shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz: destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz,
+ if xz != nil {
+ compressionCodec = shared.CompressionCodec{
+ Xz: xz,
}
}
- var destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard *shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- codec4 := shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec(r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.Codec.ValueString())
+ var zstandard *shared.Zstandard
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard != nil {
+ codec4 := new(shared.DestinationGcsUpdateSchemasFormatOutputFormatCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.IsNull() {
+ *codec4 = shared.DestinationGcsUpdateSchemasFormatOutputFormatCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.ValueString())
+ } else {
+ codec4 = nil
+ }
compressionLevel2 := new(int64)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.CompressionLevel.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.CompressionLevel.IsNull() {
- *compressionLevel2 = r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.CompressionLevel.ValueInt64()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.IsNull() {
+ *compressionLevel2 = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.ValueInt64()
} else {
compressionLevel2 = nil
}
includeChecksum := new(bool)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.IsNull() {
- *includeChecksum = r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.ValueBool()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.IsNull() {
+ *includeChecksum = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.ValueBool()
} else {
includeChecksum = nil
}
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard = &shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard{
+ zstandard = &shared.Zstandard{
Codec: codec4,
CompressionLevel: compressionLevel2,
IncludeChecksum: includeChecksum,
}
}
- if destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- compressionCodec = shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard: destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard,
+ if zstandard != nil {
+ compressionCodec = shared.CompressionCodec{
+ Zstandard: zstandard,
}
}
- var destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy *shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- codec5 := shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec(r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy.Codec.ValueString())
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy = &shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy{
+ var snappy *shared.Snappy
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy != nil {
+ codec5 := new(shared.DestinationGcsUpdateSchemasFormatOutputFormat1Codec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.IsNull() {
+ *codec5 = shared.DestinationGcsUpdateSchemasFormatOutputFormat1Codec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.ValueString())
+ } else {
+ codec5 = nil
+ }
+ snappy = &shared.Snappy{
Codec: codec5,
}
}
- if destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- compressionCodec = shared.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy: destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy,
+ if snappy != nil {
+ compressionCodec = shared.CompressionCodec{
+ Snappy: snappy,
}
}
- formatType := shared.DestinationGcsUpdateOutputFormatAvroApacheAvroFormatType(r.Configuration.Format.DestinationGcsUpdateOutputFormatAvroApacheAvro.FormatType.ValueString())
- destinationGcsUpdateOutputFormatAvroApacheAvro = &shared.DestinationGcsUpdateOutputFormatAvroApacheAvro{
+ formatType := new(shared.DestinationGcsUpdateFormatType)
+ if !r.Configuration.Format.AvroApacheAvro.FormatType.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.FormatType.IsNull() {
+ *formatType = shared.DestinationGcsUpdateFormatType(r.Configuration.Format.AvroApacheAvro.FormatType.ValueString())
+ } else {
+ formatType = nil
+ }
+ avroApacheAvro = &shared.AvroApacheAvro{
CompressionCodec: compressionCodec,
FormatType: formatType,
}
}
- if destinationGcsUpdateOutputFormatAvroApacheAvro != nil {
+ if avroApacheAvro != nil {
format = shared.DestinationGcsUpdateOutputFormat{
- DestinationGcsUpdateOutputFormatAvroApacheAvro: destinationGcsUpdateOutputFormatAvroApacheAvro,
+ AvroApacheAvro: avroApacheAvro,
}
}
- var destinationGcsUpdateOutputFormatCSVCommaSeparatedValues *shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues != nil {
- var compression *shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Compression != nil {
- var destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression *shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- compressionType := new(shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.IsNull() {
- *compressionType = shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.ValueString())
+ var destinationGcsUpdateCSVCommaSeparatedValues *shared.DestinationGcsUpdateCSVCommaSeparatedValues
+ if r.Configuration.Format.CSVCommaSeparatedValues != nil {
+ var compression *shared.Compression
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression != nil {
+ var destinationGcsUpdateNoCompression *shared.DestinationGcsUpdateNoCompression
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression != nil {
+ compressionType := new(shared.CompressionType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType = shared.CompressionType(r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType = nil
}
- destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression = &shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression{
+ destinationGcsUpdateNoCompression = &shared.DestinationGcsUpdateNoCompression{
CompressionType: compressionType,
}
}
- if destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- compression = &shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression: destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression,
+ if destinationGcsUpdateNoCompression != nil {
+ compression = &shared.Compression{
+ DestinationGcsUpdateNoCompression: destinationGcsUpdateNoCompression,
}
}
- var destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP *shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- compressionType1 := new(shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.IsNull() {
- *compressionType1 = shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.ValueString())
+ var gzip *shared.Gzip
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip != nil {
+ compressionType1 := new(shared.DestinationGcsUpdateCompressionType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType1 = shared.DestinationGcsUpdateCompressionType(r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType1 = nil
}
- destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP = &shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP{
+ gzip = &shared.Gzip{
CompressionType: compressionType1,
}
}
- if destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- compression = &shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP: destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP,
+ if gzip != nil {
+ compression = &shared.Compression{
+ Gzip: gzip,
}
}
}
- flattening := new(shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Flattening.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Flattening.IsNull() {
- *flattening = shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization(r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.Flattening.ValueString())
+ flattening := new(shared.Normalization)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsNull() {
+ *flattening = shared.Normalization(r.Configuration.Format.CSVCommaSeparatedValues.Flattening.ValueString())
} else {
flattening = nil
}
- formatType1 := shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatType(r.Configuration.Format.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues.FormatType.ValueString())
- destinationGcsUpdateOutputFormatCSVCommaSeparatedValues = &shared.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues{
+ formatType1 := new(shared.DestinationGcsUpdateSchemasFormatType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.FormatType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.FormatType.IsNull() {
+ *formatType1 = shared.DestinationGcsUpdateSchemasFormatType(r.Configuration.Format.CSVCommaSeparatedValues.FormatType.ValueString())
+ } else {
+ formatType1 = nil
+ }
+ destinationGcsUpdateCSVCommaSeparatedValues = &shared.DestinationGcsUpdateCSVCommaSeparatedValues{
Compression: compression,
Flattening: flattening,
FormatType: formatType1,
}
}
- if destinationGcsUpdateOutputFormatCSVCommaSeparatedValues != nil {
+ if destinationGcsUpdateCSVCommaSeparatedValues != nil {
format = shared.DestinationGcsUpdateOutputFormat{
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues: destinationGcsUpdateOutputFormatCSVCommaSeparatedValues,
+ DestinationGcsUpdateCSVCommaSeparatedValues: destinationGcsUpdateCSVCommaSeparatedValues,
}
}
- var destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON *shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- var compression1 *shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression != nil {
- var destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compressionType2 := new(shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsNull() {
- *compressionType2 = shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.ValueString())
+ var destinationGcsUpdateJSONLinesNewlineDelimitedJSON *shared.DestinationGcsUpdateJSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ var compression1 *shared.DestinationGcsUpdateCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression != nil {
+ var destinationGcsUpdateSchemasNoCompression *shared.DestinationGcsUpdateSchemasNoCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression != nil {
+ compressionType2 := new(shared.DestinationGcsUpdateSchemasCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType2 = shared.DestinationGcsUpdateSchemasCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType2 = nil
}
- destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = &shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression{
+ destinationGcsUpdateSchemasNoCompression = &shared.DestinationGcsUpdateSchemasNoCompression{
CompressionType: compressionType2,
}
}
- if destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compression1 = &shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
+ if destinationGcsUpdateSchemasNoCompression != nil {
+ compression1 = &shared.DestinationGcsUpdateCompression{
+ DestinationGcsUpdateSchemasNoCompression: destinationGcsUpdateSchemasNoCompression,
}
}
- var destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compressionType3 := new(shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsNull() {
- *compressionType3 = shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.ValueString())
+ var destinationGcsUpdateGZIP *shared.DestinationGcsUpdateGZIP
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip != nil {
+ compressionType3 := new(shared.DestinationGcsUpdateSchemasFormatCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType3 = shared.DestinationGcsUpdateSchemasFormatCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType3 = nil
}
- destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = &shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP{
+ destinationGcsUpdateGZIP = &shared.DestinationGcsUpdateGZIP{
CompressionType: compressionType3,
}
}
- if destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compression1 = &shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
+ if destinationGcsUpdateGZIP != nil {
+ compression1 = &shared.DestinationGcsUpdateCompression{
+ DestinationGcsUpdateGZIP: destinationGcsUpdateGZIP,
}
}
}
- formatType2 := shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON = &shared.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON{
+ formatType2 := new(shared.DestinationGcsUpdateSchemasFormatFormatType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsNull() {
+ *formatType2 = shared.DestinationGcsUpdateSchemasFormatFormatType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.ValueString())
+ } else {
+ formatType2 = nil
+ }
+ destinationGcsUpdateJSONLinesNewlineDelimitedJSON = &shared.DestinationGcsUpdateJSONLinesNewlineDelimitedJSON{
Compression: compression1,
FormatType: formatType2,
}
}
- if destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
+ if destinationGcsUpdateJSONLinesNewlineDelimitedJSON != nil {
format = shared.DestinationGcsUpdateOutputFormat{
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON: destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationGcsUpdateJSONLinesNewlineDelimitedJSON: destinationGcsUpdateJSONLinesNewlineDelimitedJSON,
}
}
- var destinationGcsUpdateOutputFormatParquetColumnarStorage *shared.DestinationGcsUpdateOutputFormatParquetColumnarStorage
- if r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage != nil {
+ var destinationGcsUpdateParquetColumnarStorage *shared.DestinationGcsUpdateParquetColumnarStorage
+ if r.Configuration.Format.ParquetColumnarStorage != nil {
blockSizeMb := new(int64)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.BlockSizeMb.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.BlockSizeMb.IsNull() {
- *blockSizeMb = r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.BlockSizeMb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.IsNull() {
+ *blockSizeMb = r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.ValueInt64()
} else {
blockSizeMb = nil
}
- compressionCodec1 := new(shared.DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.CompressionCodec.IsNull() {
- *compressionCodec1 = shared.DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec(r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.CompressionCodec.ValueString())
+ compressionCodec1 := new(shared.DestinationGcsUpdateCompressionCodec)
+ if !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsNull() {
+ *compressionCodec1 = shared.DestinationGcsUpdateCompressionCodec(r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.ValueString())
} else {
compressionCodec1 = nil
}
dictionaryEncoding := new(bool)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.DictionaryEncoding.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.DictionaryEncoding.IsNull() {
- *dictionaryEncoding = r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.DictionaryEncoding.ValueBool()
+ if !r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.IsNull() {
+ *dictionaryEncoding = r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.ValueBool()
} else {
dictionaryEncoding = nil
}
dictionaryPageSizeKb := new(int64)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.DictionaryPageSizeKb.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.DictionaryPageSizeKb.IsNull() {
- *dictionaryPageSizeKb = r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.DictionaryPageSizeKb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.IsNull() {
+ *dictionaryPageSizeKb = r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.ValueInt64()
} else {
dictionaryPageSizeKb = nil
}
- formatType3 := shared.DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatType(r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.FormatType.ValueString())
+ formatType3 := new(shared.DestinationGcsUpdateSchemasFormatOutputFormatFormatType)
+ if !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsNull() {
+ *formatType3 = shared.DestinationGcsUpdateSchemasFormatOutputFormatFormatType(r.Configuration.Format.ParquetColumnarStorage.FormatType.ValueString())
+ } else {
+ formatType3 = nil
+ }
maxPaddingSizeMb := new(int64)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.MaxPaddingSizeMb.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.MaxPaddingSizeMb.IsNull() {
- *maxPaddingSizeMb = r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.MaxPaddingSizeMb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.IsNull() {
+ *maxPaddingSizeMb = r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.ValueInt64()
} else {
maxPaddingSizeMb = nil
}
pageSizeKb := new(int64)
- if !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.PageSizeKb.IsUnknown() && !r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.PageSizeKb.IsNull() {
- *pageSizeKb = r.Configuration.Format.DestinationGcsUpdateOutputFormatParquetColumnarStorage.PageSizeKb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.IsNull() {
+ *pageSizeKb = r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.ValueInt64()
} else {
pageSizeKb = nil
}
- destinationGcsUpdateOutputFormatParquetColumnarStorage = &shared.DestinationGcsUpdateOutputFormatParquetColumnarStorage{
+ destinationGcsUpdateParquetColumnarStorage = &shared.DestinationGcsUpdateParquetColumnarStorage{
BlockSizeMb: blockSizeMb,
CompressionCodec: compressionCodec1,
DictionaryEncoding: dictionaryEncoding,
@@ -622,16 +737,16 @@ func (r *DestinationGcsResourceModel) ToUpdateSDKType() *shared.DestinationGcsPu
PageSizeKb: pageSizeKb,
}
}
- if destinationGcsUpdateOutputFormatParquetColumnarStorage != nil {
+ if destinationGcsUpdateParquetColumnarStorage != nil {
format = shared.DestinationGcsUpdateOutputFormat{
- DestinationGcsUpdateOutputFormatParquetColumnarStorage: destinationGcsUpdateOutputFormatParquetColumnarStorage,
+ DestinationGcsUpdateParquetColumnarStorage: destinationGcsUpdateParquetColumnarStorage,
}
}
gcsBucketName := r.Configuration.GcsBucketName.ValueString()
gcsBucketPath := r.Configuration.GcsBucketPath.ValueString()
- gcsBucketRegion := new(shared.DestinationGCSUpdateGCSBucketRegion)
+ gcsBucketRegion := new(shared.GCSBucketRegion)
if !r.Configuration.GcsBucketRegion.IsUnknown() && !r.Configuration.GcsBucketRegion.IsNull() {
- *gcsBucketRegion = shared.DestinationGCSUpdateGCSBucketRegion(r.Configuration.GcsBucketRegion.ValueString())
+ *gcsBucketRegion = shared.GCSBucketRegion(r.Configuration.GcsBucketRegion.ValueString())
} else {
gcsBucketRegion = nil
}
diff --git a/internal/provider/destination_googlesheets_data_source.go b/internal/provider/destination_googlesheets_data_source.go
old mode 100755
new mode 100644
index d9219a9cf..801ce615f
--- a/internal/provider/destination_googlesheets_data_source.go
+++ b/internal/provider/destination_googlesheets_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationGoogleSheetsDataSource struct {
// DestinationGoogleSheetsDataSourceModel describes the data model.
type DestinationGoogleSheetsDataSourceModel struct {
- Configuration DestinationGoogleSheets `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,45 +47,17 @@ func (r *DestinationGoogleSheetsDataSource) Schema(ctx context.Context, req data
MarkdownDescription: "DestinationGoogleSheets DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Google Sheets developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Google Sheets developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining new access token.`,
- },
- },
- Description: `Google API Credentials for connecting to Google Sheets and Google Drive APIs`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-sheets",
- ),
- },
- Description: `must be one of ["google-sheets"]`,
- },
- "spreadsheet_id": schema.StringAttribute{
- Computed: true,
- Description: `The link to your spreadsheet. See this guide for more details.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_googlesheets_data_source_sdk.go b/internal/provider/destination_googlesheets_data_source_sdk.go
old mode 100755
new mode 100644
index 437c6eaa1..754b98fa8
--- a/internal/provider/destination_googlesheets_data_source_sdk.go
+++ b/internal/provider/destination_googlesheets_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationGoogleSheetsDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_googlesheets_resource.go b/internal/provider/destination_googlesheets_resource.go
old mode 100755
new mode 100644
index 8ccd505dc..1956a7fd1
--- a/internal/provider/destination_googlesheets_resource.go
+++ b/internal/provider/destination_googlesheets_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationGoogleSheetsResource struct {
// DestinationGoogleSheetsResourceModel describes the resource data model.
type DestinationGoogleSheetsResourceModel struct {
Configuration DestinationGoogleSheets `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -66,26 +66,25 @@ func (r *DestinationGoogleSheetsResource) Schema(ctx context.Context, req resour
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The token for obtaining new access token.`,
},
},
Description: `Google API Credentials for connecting to Google Sheets and Google Drive APIs`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-sheets",
- ),
- },
- Description: `must be one of ["google-sheets"]`,
- },
"spreadsheet_id": schema.StringAttribute{
Required: true,
Description: `The link to your spreadsheet. See this guide for more details.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -102,7 +101,8 @@ func (r *DestinationGoogleSheetsResource) Schema(ctx context.Context, req resour
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -152,7 +152,7 @@ func (r *DestinationGoogleSheetsResource) Create(ctx context.Context, req resour
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationGoogleSheets(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -328,5 +328,5 @@ func (r *DestinationGoogleSheetsResource) Delete(ctx context.Context, req resour
}
func (r *DestinationGoogleSheetsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_googlesheets_resource_sdk.go b/internal/provider/destination_googlesheets_resource_sdk.go
old mode 100755
new mode 100644
index d9b9cb6ba..aa75e33e3
--- a/internal/provider/destination_googlesheets_resource_sdk.go
+++ b/internal/provider/destination_googlesheets_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -16,17 +16,22 @@ func (r *DestinationGoogleSheetsResourceModel) ToCreateSDKType() *shared.Destina
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
- destinationType := shared.DestinationGoogleSheetsGoogleSheets(r.Configuration.DestinationType.ValueString())
spreadsheetID := r.Configuration.SpreadsheetID.ValueString()
configuration := shared.DestinationGoogleSheets{
- Credentials: credentials,
- DestinationType: destinationType,
- SpreadsheetID: spreadsheetID,
+ Credentials: credentials,
+ SpreadsheetID: spreadsheetID,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationGoogleSheetsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -42,7 +47,7 @@ func (r *DestinationGoogleSheetsResourceModel) ToUpdateSDKType() *shared.Destina
clientID := r.Configuration.Credentials.ClientID.ValueString()
clientSecret := r.Configuration.Credentials.ClientSecret.ValueString()
refreshToken := r.Configuration.Credentials.RefreshToken.ValueString()
- credentials := shared.DestinationGoogleSheetsUpdateAuthenticationViaGoogleOAuth{
+ credentials := shared.AuthenticationViaGoogleOAuth{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
diff --git a/internal/provider/destination_keen_data_source.go b/internal/provider/destination_keen_data_source.go
old mode 100755
new mode 100644
index 7d01313e1..22f4ce9fb
--- a/internal/provider/destination_keen_data_source.go
+++ b/internal/provider/destination_keen_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationKeenDataSource struct {
// DestinationKeenDataSourceModel describes the data model.
type DestinationKeenDataSourceModel struct {
- Configuration DestinationKeen `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,35 +47,17 @@ func (r *DestinationKeenDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "DestinationKeen DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `To get Keen Master API Key, navigate to the Access tab from the left-hand, side panel and check the Project Details section.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "keen",
- ),
- },
- Description: `must be one of ["keen"]`,
- },
- "infer_timestamp": schema.BoolAttribute{
- Computed: true,
- Description: `Allow connector to guess keen.timestamp value based on the streamed data.`,
- },
- "project_id": schema.StringAttribute{
- Computed: true,
- Description: `To get Keen Project ID, navigate to the Access tab from the left-hand, side panel and check the Project Details section.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_keen_data_source_sdk.go b/internal/provider/destination_keen_data_source_sdk.go
old mode 100755
new mode 100644
index 5754f12b3..624d14e1c
--- a/internal/provider/destination_keen_data_source_sdk.go
+++ b/internal/provider/destination_keen_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationKeenDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_keen_resource.go b/internal/provider/destination_keen_resource.go
old mode 100755
new mode 100644
index 2f70e5d9d..9f0d4bc33
--- a/internal/provider/destination_keen_resource.go
+++ b/internal/provider/destination_keen_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationKeenResource struct {
// DestinationKeenResourceModel describes the resource data model.
type DestinationKeenResourceModel struct {
Configuration DestinationKeen `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -55,20 +55,13 @@ func (r *DestinationKeenResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `To get Keen Master API Key, navigate to the Access tab from the left-hand, side panel and check the Project Details section.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "keen",
- ),
- },
- Description: `must be one of ["keen"]`,
- },
"infer_timestamp": schema.BoolAttribute{
- Optional: true,
- Description: `Allow connector to guess keen.timestamp value based on the streamed data.`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Allow connector to guess keen.timestamp value based on the streamed data.`,
},
"project_id": schema.StringAttribute{
Required: true,
@@ -76,6 +69,13 @@ func (r *DestinationKeenResource) Schema(ctx context.Context, req resource.Schem
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -92,7 +92,8 @@ func (r *DestinationKeenResource) Schema(ctx context.Context, req resource.Schem
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -142,7 +143,7 @@ func (r *DestinationKeenResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationKeen(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -318,5 +319,5 @@ func (r *DestinationKeenResource) Delete(ctx context.Context, req resource.Delet
}
func (r *DestinationKeenResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_keen_resource_sdk.go b/internal/provider/destination_keen_resource_sdk.go
old mode 100755
new mode 100644
index 42b31ced5..3a674fddc
--- a/internal/provider/destination_keen_resource_sdk.go
+++ b/internal/provider/destination_keen_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationKeenResourceModel) ToCreateSDKType() *shared.DestinationKeenCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- destinationType := shared.DestinationKeenKeen(r.Configuration.DestinationType.ValueString())
inferTimestamp := new(bool)
if !r.Configuration.InferTimestamp.IsUnknown() && !r.Configuration.InferTimestamp.IsNull() {
*inferTimestamp = r.Configuration.InferTimestamp.ValueBool()
@@ -18,15 +17,21 @@ func (r *DestinationKeenResourceModel) ToCreateSDKType() *shared.DestinationKeen
}
projectID := r.Configuration.ProjectID.ValueString()
configuration := shared.DestinationKeen{
- APIKey: apiKey,
- DestinationType: destinationType,
- InferTimestamp: inferTimestamp,
- ProjectID: projectID,
+ APIKey: apiKey,
+ InferTimestamp: inferTimestamp,
+ ProjectID: projectID,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationKeenCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
diff --git a/internal/provider/destination_kinesis_data_source.go b/internal/provider/destination_kinesis_data_source.go
old mode 100755
new mode 100644
index f4d98b581..746563466
--- a/internal/provider/destination_kinesis_data_source.go
+++ b/internal/provider/destination_kinesis_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationKinesisDataSource struct {
// DestinationKinesisDataSourceModel describes the data model.
type DestinationKinesisDataSourceModel struct {
- Configuration DestinationKinesis `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,47 +47,17 @@ func (r *DestinationKinesisDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "DestinationKinesis DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key": schema.StringAttribute{
- Computed: true,
- Description: `Generate the AWS Access Key for current user.`,
- },
- "buffer_size": schema.Int64Attribute{
- Computed: true,
- Description: `Buffer size for storing kinesis records before being batch streamed.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "kinesis",
- ),
- },
- Description: `must be one of ["kinesis"]`,
- },
- "endpoint": schema.StringAttribute{
- Computed: true,
- Description: `AWS Kinesis endpoint.`,
- },
- "private_key": schema.StringAttribute{
- Computed: true,
- Description: `The AWS Private Key - a string of numbers and letters that are unique for each account, also known as a "recovery phrase".`,
- },
- "region": schema.StringAttribute{
- Computed: true,
- Description: `AWS region. Your account determines the Regions that are available to you.`,
- },
- "shard_count": schema.Int64Attribute{
- Computed: true,
- Description: `Number of shards to which the data should be streamed.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_kinesis_data_source_sdk.go b/internal/provider/destination_kinesis_data_source_sdk.go
old mode 100755
new mode 100644
index 7a7f343fb..4ffe24bd1
--- a/internal/provider/destination_kinesis_data_source_sdk.go
+++ b/internal/provider/destination_kinesis_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationKinesisDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_kinesis_resource.go b/internal/provider/destination_kinesis_resource.go
old mode 100755
new mode 100644
index 264fef8c2..89e57dd0e
--- a/internal/provider/destination_kinesis_resource.go
+++ b/internal/provider/destination_kinesis_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationKinesisResource struct {
// DestinationKinesisResourceModel describes the resource data model.
type DestinationKinesisResourceModel struct {
Configuration DestinationKinesis `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -58,17 +58,9 @@ func (r *DestinationKinesisResource) Schema(ctx context.Context, req resource.Sc
Description: `Generate the AWS Access Key for current user.`,
},
"buffer_size": schema.Int64Attribute{
- Required: true,
- Description: `Buffer size for storing kinesis records before being batch streamed.`,
- },
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "kinesis",
- ),
- },
- Description: `must be one of ["kinesis"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 100` + "\n" +
+ `Buffer size for storing kinesis records before being batch streamed.`,
},
"endpoint": schema.StringAttribute{
Required: true,
@@ -83,11 +75,19 @@ func (r *DestinationKinesisResource) Schema(ctx context.Context, req resource.Sc
Description: `AWS region. Your account determines the Regions that are available to you.`,
},
"shard_count": schema.Int64Attribute{
- Required: true,
- Description: `Number of shards to which the data should be streamed.`,
+ Optional: true,
+ MarkdownDescription: `Default: 5` + "\n" +
+ `Number of shards to which the data should be streamed.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -104,7 +104,8 @@ func (r *DestinationKinesisResource) Schema(ctx context.Context, req resource.Sc
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -154,7 +155,7 @@ func (r *DestinationKinesisResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationKinesis(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -330,5 +331,5 @@ func (r *DestinationKinesisResource) Delete(ctx context.Context, req resource.De
}
func (r *DestinationKinesisResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_kinesis_resource_sdk.go b/internal/provider/destination_kinesis_resource_sdk.go
old mode 100755
new mode 100644
index abe4aa45b..faf7e1325
--- a/internal/provider/destination_kinesis_resource_sdk.go
+++ b/internal/provider/destination_kinesis_resource_sdk.go
@@ -3,31 +3,46 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationKinesisResourceModel) ToCreateSDKType() *shared.DestinationKinesisCreateRequest {
accessKey := r.Configuration.AccessKey.ValueString()
- bufferSize := r.Configuration.BufferSize.ValueInt64()
- destinationType := shared.DestinationKinesisKinesis(r.Configuration.DestinationType.ValueString())
+ bufferSize := new(int64)
+ if !r.Configuration.BufferSize.IsUnknown() && !r.Configuration.BufferSize.IsNull() {
+ *bufferSize = r.Configuration.BufferSize.ValueInt64()
+ } else {
+ bufferSize = nil
+ }
endpoint := r.Configuration.Endpoint.ValueString()
privateKey := r.Configuration.PrivateKey.ValueString()
region := r.Configuration.Region.ValueString()
- shardCount := r.Configuration.ShardCount.ValueInt64()
+ shardCount := new(int64)
+ if !r.Configuration.ShardCount.IsUnknown() && !r.Configuration.ShardCount.IsNull() {
+ *shardCount = r.Configuration.ShardCount.ValueInt64()
+ } else {
+ shardCount = nil
+ }
configuration := shared.DestinationKinesis{
- AccessKey: accessKey,
- BufferSize: bufferSize,
- DestinationType: destinationType,
- Endpoint: endpoint,
- PrivateKey: privateKey,
- Region: region,
- ShardCount: shardCount,
+ AccessKey: accessKey,
+ BufferSize: bufferSize,
+ Endpoint: endpoint,
+ PrivateKey: privateKey,
+ Region: region,
+ ShardCount: shardCount,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationKinesisCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -41,11 +56,21 @@ func (r *DestinationKinesisResourceModel) ToGetSDKType() *shared.DestinationKine
func (r *DestinationKinesisResourceModel) ToUpdateSDKType() *shared.DestinationKinesisPutRequest {
accessKey := r.Configuration.AccessKey.ValueString()
- bufferSize := r.Configuration.BufferSize.ValueInt64()
+ bufferSize := new(int64)
+ if !r.Configuration.BufferSize.IsUnknown() && !r.Configuration.BufferSize.IsNull() {
+ *bufferSize = r.Configuration.BufferSize.ValueInt64()
+ } else {
+ bufferSize = nil
+ }
endpoint := r.Configuration.Endpoint.ValueString()
privateKey := r.Configuration.PrivateKey.ValueString()
region := r.Configuration.Region.ValueString()
- shardCount := r.Configuration.ShardCount.ValueInt64()
+ shardCount := new(int64)
+ if !r.Configuration.ShardCount.IsUnknown() && !r.Configuration.ShardCount.IsNull() {
+ *shardCount = r.Configuration.ShardCount.ValueInt64()
+ } else {
+ shardCount = nil
+ }
configuration := shared.DestinationKinesisUpdate{
AccessKey: accessKey,
BufferSize: bufferSize,
diff --git a/internal/provider/destination_langchain_data_source.go b/internal/provider/destination_langchain_data_source.go
old mode 100755
new mode 100644
index ba389222d..08a28094b
--- a/internal/provider/destination_langchain_data_source.go
+++ b/internal/provider/destination_langchain_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationLangchainDataSource struct {
// DestinationLangchainDataSourceModel describes the data model.
type DestinationLangchainDataSourceModel struct {
- Configuration DestinationLangchain `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,261 +47,17 @@ func (r *DestinationLangchainDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "DestinationLangchain DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "langchain",
- ),
- },
- Description: `must be one of ["langchain"]`,
- },
- "embedding": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_langchain_embedding_fake": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
- },
- },
- Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
- },
- "destination_langchain_embedding_open_ai": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
- "openai_key": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
- },
- "destination_langchain_update_embedding_fake": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
- },
- },
- Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
- },
- "destination_langchain_update_embedding_open_ai": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
- "openai_key": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Embedding configuration`,
- },
- "indexing": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_langchain_indexing_chroma_local_persistance": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "collection_name": schema.StringAttribute{
- Computed: true,
- Description: `Name of the collection to use.`,
- },
- "destination_path": schema.StringAttribute{
- Computed: true,
- Description: `Path to the directory where chroma files will be written. The files will be placed inside that local mount.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "chroma_local",
- ),
- },
- Description: `must be one of ["chroma_local"]`,
- },
- },
- Description: `Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync.`,
- },
- "destination_langchain_indexing_doc_array_hnsw_search": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_path": schema.StringAttribute{
- Computed: true,
- Description: `Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "DocArrayHnswSearch",
- ),
- },
- Description: `must be one of ["DocArrayHnswSearch"]`,
- },
- },
- Description: `DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite.`,
- },
- "destination_langchain_indexing_pinecone": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "index": schema.StringAttribute{
- Computed: true,
- Description: `Pinecone index to use`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pinecone",
- ),
- },
- Description: `must be one of ["pinecone"]`,
- },
- "pinecone_environment": schema.StringAttribute{
- Computed: true,
- Description: `Pinecone environment to use`,
- },
- "pinecone_key": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain.`,
- },
- "destination_langchain_update_indexing_chroma_local_persistance": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "collection_name": schema.StringAttribute{
- Computed: true,
- Description: `Name of the collection to use.`,
- },
- "destination_path": schema.StringAttribute{
- Computed: true,
- Description: `Path to the directory where chroma files will be written. The files will be placed inside that local mount.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "chroma_local",
- ),
- },
- Description: `must be one of ["chroma_local"]`,
- },
- },
- Description: `Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync.`,
- },
- "destination_langchain_update_indexing_doc_array_hnsw_search": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_path": schema.StringAttribute{
- Computed: true,
- Description: `Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "DocArrayHnswSearch",
- ),
- },
- Description: `must be one of ["DocArrayHnswSearch"]`,
- },
- },
- Description: `DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite.`,
- },
- "destination_langchain_update_indexing_pinecone": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "index": schema.StringAttribute{
- Computed: true,
- Description: `Pinecone index to use`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pinecone",
- ),
- },
- Description: `must be one of ["pinecone"]`,
- },
- "pinecone_environment": schema.StringAttribute{
- Computed: true,
- Description: `Pinecone environment to use`,
- },
- "pinecone_key": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Indexing configuration`,
- },
- "processing": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "chunk_overlap": schema.Int64Attribute{
- Computed: true,
- Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
- },
- "chunk_size": schema.Int64Attribute{
- Computed: true,
- Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`,
- },
- "text_fields": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `List of fields in the record that should be used to calculate the embedding. All other fields are passed along as meta fields. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`,
- },
- },
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_langchain_data_source_sdk.go b/internal/provider/destination_langchain_data_source_sdk.go
old mode 100755
new mode 100644
index d61441ccb..90274d405
--- a/internal/provider/destination_langchain_data_source_sdk.go
+++ b/internal/provider/destination_langchain_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationLangchainDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_langchain_resource.go b/internal/provider/destination_langchain_resource.go
old mode 100755
new mode 100644
index db414aa14..2e61ea152
--- a/internal/provider/destination_langchain_resource.go
+++ b/internal/provider/destination_langchain_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationLangchainResource struct {
// DestinationLangchainResourceModel describes the resource data model.
type DestinationLangchainResourceModel struct {
Configuration DestinationLangchain `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -54,241 +55,89 @@ func (r *DestinationLangchainResource) Schema(ctx context.Context, req resource.
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "langchain",
- ),
- },
- Description: `must be one of ["langchain"]`,
- },
"embedding": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_langchain_embedding_fake": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
- },
- },
- Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
- },
- "destination_langchain_embedding_open_ai": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
- "openai_key": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
- },
- "destination_langchain_update_embedding_fake": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
- },
- },
+ "fake": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
},
- "destination_langchain_update_embedding_open_ai": schema.SingleNestedAttribute{
+ "open_ai": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
"openai_key": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Sensitive: true,
},
},
Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
},
},
+ Description: `Embedding configuration`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Embedding configuration`,
},
"indexing": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_langchain_indexing_chroma_local_persistance": schema.SingleNestedAttribute{
+ "chroma_local_persistance": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"collection_name": schema.StringAttribute{
- Optional: true,
- Description: `Name of the collection to use.`,
- },
- "destination_path": schema.StringAttribute{
- Required: true,
- Description: `Path to the directory where chroma files will be written. The files will be placed inside that local mount.`,
- },
- "mode": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "chroma_local",
- ),
- },
- Description: `must be one of ["chroma_local"]`,
- },
- },
- Description: `Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync.`,
- },
- "destination_langchain_indexing_doc_array_hnsw_search": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "destination_path": schema.StringAttribute{
- Required: true,
- Description: `Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.`,
- },
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "DocArrayHnswSearch",
- ),
- },
- Description: `must be one of ["DocArrayHnswSearch"]`,
- },
- },
- Description: `DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite.`,
- },
- "destination_langchain_indexing_pinecone": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "index": schema.StringAttribute{
- Required: true,
- Description: `Pinecone index to use`,
- },
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pinecone",
- ),
- },
- Description: `must be one of ["pinecone"]`,
- },
- "pinecone_environment": schema.StringAttribute{
- Required: true,
- Description: `Pinecone environment to use`,
- },
- "pinecone_key": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain.`,
- },
- "destination_langchain_update_indexing_chroma_local_persistance": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "collection_name": schema.StringAttribute{
- Optional: true,
- Description: `Name of the collection to use.`,
+ MarkdownDescription: `Default: "langchain"` + "\n" +
+ `Name of the collection to use.`,
},
"destination_path": schema.StringAttribute{
Required: true,
Description: `Path to the directory where chroma files will be written. The files will be placed inside that local mount.`,
},
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "chroma_local",
- ),
- },
- Description: `must be one of ["chroma_local"]`,
- },
},
Description: `Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync.`,
},
- "destination_langchain_update_indexing_doc_array_hnsw_search": schema.SingleNestedAttribute{
+ "doc_array_hnsw_search": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"destination_path": schema.StringAttribute{
Required: true,
Description: `Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.`,
},
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "DocArrayHnswSearch",
- ),
- },
- Description: `must be one of ["DocArrayHnswSearch"]`,
- },
},
Description: `DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite.`,
},
- "destination_langchain_update_indexing_pinecone": schema.SingleNestedAttribute{
+ "pinecone": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"index": schema.StringAttribute{
Required: true,
Description: `Pinecone index to use`,
},
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pinecone",
- ),
- },
- Description: `must be one of ["pinecone"]`,
- },
"pinecone_environment": schema.StringAttribute{
Required: true,
Description: `Pinecone environment to use`,
},
"pinecone_key": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Sensitive: true,
},
},
Description: `Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain.`,
},
},
+ Description: `Indexing configuration`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Indexing configuration`,
},
"processing": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
"chunk_overlap": schema.Int64Attribute{
- Optional: true,
- Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
},
"chunk_size": schema.Int64Attribute{
Required: true,
@@ -303,6 +152,13 @@ func (r *DestinationLangchainResource) Schema(ctx context.Context, req resource.
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -319,7 +175,8 @@ func (r *DestinationLangchainResource) Schema(ctx context.Context, req resource.
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -369,7 +226,7 @@ func (r *DestinationLangchainResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationLangchain(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -545,5 +402,5 @@ func (r *DestinationLangchainResource) Delete(ctx context.Context, req resource.
}
func (r *DestinationLangchainResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_langchain_resource_sdk.go b/internal/provider/destination_langchain_resource_sdk.go
old mode 100755
new mode 100644
index a2580868b..919b2da1c
--- a/internal/provider/destination_langchain_resource_sdk.go
+++ b/internal/provider/destination_langchain_resource_sdk.go
@@ -3,116 +3,79 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationLangchainResourceModel) ToCreateSDKType() *shared.DestinationLangchainCreateRequest {
- destinationType := shared.DestinationLangchainLangchain(r.Configuration.DestinationType.ValueString())
var embedding shared.DestinationLangchainEmbedding
- var destinationLangchainEmbeddingOpenAI *shared.DestinationLangchainEmbeddingOpenAI
- if r.Configuration.Embedding.DestinationLangchainEmbeddingOpenAI != nil {
- mode := new(shared.DestinationLangchainEmbeddingOpenAIMode)
- if !r.Configuration.Embedding.DestinationLangchainEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationLangchainEmbeddingOpenAI.Mode.IsNull() {
- *mode = shared.DestinationLangchainEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationLangchainEmbeddingOpenAI.Mode.ValueString())
- } else {
- mode = nil
- }
- openaiKey := r.Configuration.Embedding.DestinationLangchainEmbeddingOpenAI.OpenaiKey.ValueString()
- destinationLangchainEmbeddingOpenAI = &shared.DestinationLangchainEmbeddingOpenAI{
- Mode: mode,
+ var destinationLangchainOpenAI *shared.DestinationLangchainOpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ destinationLangchainOpenAI = &shared.DestinationLangchainOpenAI{
OpenaiKey: openaiKey,
}
}
- if destinationLangchainEmbeddingOpenAI != nil {
+ if destinationLangchainOpenAI != nil {
embedding = shared.DestinationLangchainEmbedding{
- DestinationLangchainEmbeddingOpenAI: destinationLangchainEmbeddingOpenAI,
+ DestinationLangchainOpenAI: destinationLangchainOpenAI,
}
}
- var destinationLangchainEmbeddingFake *shared.DestinationLangchainEmbeddingFake
- if r.Configuration.Embedding.DestinationLangchainEmbeddingFake != nil {
- mode1 := new(shared.DestinationLangchainEmbeddingFakeMode)
- if !r.Configuration.Embedding.DestinationLangchainEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationLangchainEmbeddingFake.Mode.IsNull() {
- *mode1 = shared.DestinationLangchainEmbeddingFakeMode(r.Configuration.Embedding.DestinationLangchainEmbeddingFake.Mode.ValueString())
- } else {
- mode1 = nil
- }
- destinationLangchainEmbeddingFake = &shared.DestinationLangchainEmbeddingFake{
- Mode: mode1,
- }
+ var destinationLangchainFake *shared.DestinationLangchainFake
+ if r.Configuration.Embedding.Fake != nil {
+ destinationLangchainFake = &shared.DestinationLangchainFake{}
}
- if destinationLangchainEmbeddingFake != nil {
+ if destinationLangchainFake != nil {
embedding = shared.DestinationLangchainEmbedding{
- DestinationLangchainEmbeddingFake: destinationLangchainEmbeddingFake,
+ DestinationLangchainFake: destinationLangchainFake,
}
}
var indexing shared.DestinationLangchainIndexing
- var destinationLangchainIndexingPinecone *shared.DestinationLangchainIndexingPinecone
- if r.Configuration.Indexing.DestinationLangchainIndexingPinecone != nil {
- index := r.Configuration.Indexing.DestinationLangchainIndexingPinecone.Index.ValueString()
- mode2 := new(shared.DestinationLangchainIndexingPineconeMode)
- if !r.Configuration.Indexing.DestinationLangchainIndexingPinecone.Mode.IsUnknown() && !r.Configuration.Indexing.DestinationLangchainIndexingPinecone.Mode.IsNull() {
- *mode2 = shared.DestinationLangchainIndexingPineconeMode(r.Configuration.Indexing.DestinationLangchainIndexingPinecone.Mode.ValueString())
- } else {
- mode2 = nil
- }
- pineconeEnvironment := r.Configuration.Indexing.DestinationLangchainIndexingPinecone.PineconeEnvironment.ValueString()
- pineconeKey := r.Configuration.Indexing.DestinationLangchainIndexingPinecone.PineconeKey.ValueString()
- destinationLangchainIndexingPinecone = &shared.DestinationLangchainIndexingPinecone{
+ var destinationLangchainPinecone *shared.DestinationLangchainPinecone
+ if r.Configuration.Indexing.Pinecone != nil {
+ index := r.Configuration.Indexing.Pinecone.Index.ValueString()
+ pineconeEnvironment := r.Configuration.Indexing.Pinecone.PineconeEnvironment.ValueString()
+ pineconeKey := r.Configuration.Indexing.Pinecone.PineconeKey.ValueString()
+ destinationLangchainPinecone = &shared.DestinationLangchainPinecone{
Index: index,
- Mode: mode2,
PineconeEnvironment: pineconeEnvironment,
PineconeKey: pineconeKey,
}
}
- if destinationLangchainIndexingPinecone != nil {
+ if destinationLangchainPinecone != nil {
indexing = shared.DestinationLangchainIndexing{
- DestinationLangchainIndexingPinecone: destinationLangchainIndexingPinecone,
+ DestinationLangchainPinecone: destinationLangchainPinecone,
}
}
- var destinationLangchainIndexingDocArrayHnswSearch *shared.DestinationLangchainIndexingDocArrayHnswSearch
- if r.Configuration.Indexing.DestinationLangchainIndexingDocArrayHnswSearch != nil {
- destinationPath := r.Configuration.Indexing.DestinationLangchainIndexingDocArrayHnswSearch.DestinationPath.ValueString()
- mode3 := new(shared.DestinationLangchainIndexingDocArrayHnswSearchMode)
- if !r.Configuration.Indexing.DestinationLangchainIndexingDocArrayHnswSearch.Mode.IsUnknown() && !r.Configuration.Indexing.DestinationLangchainIndexingDocArrayHnswSearch.Mode.IsNull() {
- *mode3 = shared.DestinationLangchainIndexingDocArrayHnswSearchMode(r.Configuration.Indexing.DestinationLangchainIndexingDocArrayHnswSearch.Mode.ValueString())
- } else {
- mode3 = nil
- }
- destinationLangchainIndexingDocArrayHnswSearch = &shared.DestinationLangchainIndexingDocArrayHnswSearch{
+ var destinationLangchainDocArrayHnswSearch *shared.DestinationLangchainDocArrayHnswSearch
+ if r.Configuration.Indexing.DocArrayHnswSearch != nil {
+ destinationPath := r.Configuration.Indexing.DocArrayHnswSearch.DestinationPath.ValueString()
+ destinationLangchainDocArrayHnswSearch = &shared.DestinationLangchainDocArrayHnswSearch{
DestinationPath: destinationPath,
- Mode: mode3,
}
}
- if destinationLangchainIndexingDocArrayHnswSearch != nil {
+ if destinationLangchainDocArrayHnswSearch != nil {
indexing = shared.DestinationLangchainIndexing{
- DestinationLangchainIndexingDocArrayHnswSearch: destinationLangchainIndexingDocArrayHnswSearch,
+ DestinationLangchainDocArrayHnswSearch: destinationLangchainDocArrayHnswSearch,
}
}
- var destinationLangchainIndexingChromaLocalPersistance *shared.DestinationLangchainIndexingChromaLocalPersistance
- if r.Configuration.Indexing.DestinationLangchainIndexingChromaLocalPersistance != nil {
+ var destinationLangchainChromaLocalPersistance *shared.DestinationLangchainChromaLocalPersistance
+ if r.Configuration.Indexing.ChromaLocalPersistance != nil {
collectionName := new(string)
- if !r.Configuration.Indexing.DestinationLangchainIndexingChromaLocalPersistance.CollectionName.IsUnknown() && !r.Configuration.Indexing.DestinationLangchainIndexingChromaLocalPersistance.CollectionName.IsNull() {
- *collectionName = r.Configuration.Indexing.DestinationLangchainIndexingChromaLocalPersistance.CollectionName.ValueString()
+ if !r.Configuration.Indexing.ChromaLocalPersistance.CollectionName.IsUnknown() && !r.Configuration.Indexing.ChromaLocalPersistance.CollectionName.IsNull() {
+ *collectionName = r.Configuration.Indexing.ChromaLocalPersistance.CollectionName.ValueString()
} else {
collectionName = nil
}
- destinationPath1 := r.Configuration.Indexing.DestinationLangchainIndexingChromaLocalPersistance.DestinationPath.ValueString()
- mode4 := new(shared.DestinationLangchainIndexingChromaLocalPersistanceMode)
- if !r.Configuration.Indexing.DestinationLangchainIndexingChromaLocalPersistance.Mode.IsUnknown() && !r.Configuration.Indexing.DestinationLangchainIndexingChromaLocalPersistance.Mode.IsNull() {
- *mode4 = shared.DestinationLangchainIndexingChromaLocalPersistanceMode(r.Configuration.Indexing.DestinationLangchainIndexingChromaLocalPersistance.Mode.ValueString())
- } else {
- mode4 = nil
- }
- destinationLangchainIndexingChromaLocalPersistance = &shared.DestinationLangchainIndexingChromaLocalPersistance{
+ destinationPath1 := r.Configuration.Indexing.ChromaLocalPersistance.DestinationPath.ValueString()
+ destinationLangchainChromaLocalPersistance = &shared.DestinationLangchainChromaLocalPersistance{
CollectionName: collectionName,
DestinationPath: destinationPath1,
- Mode: mode4,
}
}
- if destinationLangchainIndexingChromaLocalPersistance != nil {
+ if destinationLangchainChromaLocalPersistance != nil {
indexing = shared.DestinationLangchainIndexing{
- DestinationLangchainIndexingChromaLocalPersistance: destinationLangchainIndexingChromaLocalPersistance,
+ DestinationLangchainChromaLocalPersistance: destinationLangchainChromaLocalPersistance,
}
}
chunkOverlap := new(int64)
@@ -132,15 +95,21 @@ func (r *DestinationLangchainResourceModel) ToCreateSDKType() *shared.Destinatio
TextFields: textFields,
}
configuration := shared.DestinationLangchain{
- DestinationType: destinationType,
- Embedding: embedding,
- Indexing: indexing,
- Processing: processing,
+ Embedding: embedding,
+ Indexing: indexing,
+ Processing: processing,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationLangchainCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -153,110 +122,74 @@ func (r *DestinationLangchainResourceModel) ToGetSDKType() *shared.DestinationLa
}
func (r *DestinationLangchainResourceModel) ToUpdateSDKType() *shared.DestinationLangchainPutRequest {
- var embedding shared.DestinationLangchainUpdateEmbedding
- var destinationLangchainUpdateEmbeddingOpenAI *shared.DestinationLangchainUpdateEmbeddingOpenAI
- if r.Configuration.Embedding.DestinationLangchainUpdateEmbeddingOpenAI != nil {
- mode := new(shared.DestinationLangchainUpdateEmbeddingOpenAIMode)
- if !r.Configuration.Embedding.DestinationLangchainUpdateEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationLangchainUpdateEmbeddingOpenAI.Mode.IsNull() {
- *mode = shared.DestinationLangchainUpdateEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationLangchainUpdateEmbeddingOpenAI.Mode.ValueString())
- } else {
- mode = nil
- }
- openaiKey := r.Configuration.Embedding.DestinationLangchainUpdateEmbeddingOpenAI.OpenaiKey.ValueString()
- destinationLangchainUpdateEmbeddingOpenAI = &shared.DestinationLangchainUpdateEmbeddingOpenAI{
- Mode: mode,
+ var embedding shared.Embedding
+ var openAI *shared.OpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ openAI = &shared.OpenAI{
OpenaiKey: openaiKey,
}
}
- if destinationLangchainUpdateEmbeddingOpenAI != nil {
- embedding = shared.DestinationLangchainUpdateEmbedding{
- DestinationLangchainUpdateEmbeddingOpenAI: destinationLangchainUpdateEmbeddingOpenAI,
+ if openAI != nil {
+ embedding = shared.Embedding{
+ OpenAI: openAI,
}
}
- var destinationLangchainUpdateEmbeddingFake *shared.DestinationLangchainUpdateEmbeddingFake
- if r.Configuration.Embedding.DestinationLangchainUpdateEmbeddingFake != nil {
- mode1 := new(shared.DestinationLangchainUpdateEmbeddingFakeMode)
- if !r.Configuration.Embedding.DestinationLangchainUpdateEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationLangchainUpdateEmbeddingFake.Mode.IsNull() {
- *mode1 = shared.DestinationLangchainUpdateEmbeddingFakeMode(r.Configuration.Embedding.DestinationLangchainUpdateEmbeddingFake.Mode.ValueString())
- } else {
- mode1 = nil
- }
- destinationLangchainUpdateEmbeddingFake = &shared.DestinationLangchainUpdateEmbeddingFake{
- Mode: mode1,
- }
+ var fake *shared.Fake
+ if r.Configuration.Embedding.Fake != nil {
+ fake = &shared.Fake{}
}
- if destinationLangchainUpdateEmbeddingFake != nil {
- embedding = shared.DestinationLangchainUpdateEmbedding{
- DestinationLangchainUpdateEmbeddingFake: destinationLangchainUpdateEmbeddingFake,
+ if fake != nil {
+ embedding = shared.Embedding{
+ Fake: fake,
}
}
- var indexing shared.DestinationLangchainUpdateIndexing
- var destinationLangchainUpdateIndexingPinecone *shared.DestinationLangchainUpdateIndexingPinecone
- if r.Configuration.Indexing.DestinationLangchainUpdateIndexingPinecone != nil {
- index := r.Configuration.Indexing.DestinationLangchainUpdateIndexingPinecone.Index.ValueString()
- mode2 := new(shared.DestinationLangchainUpdateIndexingPineconeMode)
- if !r.Configuration.Indexing.DestinationLangchainUpdateIndexingPinecone.Mode.IsUnknown() && !r.Configuration.Indexing.DestinationLangchainUpdateIndexingPinecone.Mode.IsNull() {
- *mode2 = shared.DestinationLangchainUpdateIndexingPineconeMode(r.Configuration.Indexing.DestinationLangchainUpdateIndexingPinecone.Mode.ValueString())
- } else {
- mode2 = nil
- }
- pineconeEnvironment := r.Configuration.Indexing.DestinationLangchainUpdateIndexingPinecone.PineconeEnvironment.ValueString()
- pineconeKey := r.Configuration.Indexing.DestinationLangchainUpdateIndexingPinecone.PineconeKey.ValueString()
- destinationLangchainUpdateIndexingPinecone = &shared.DestinationLangchainUpdateIndexingPinecone{
+ var indexing shared.Indexing
+ var destinationLangchainUpdatePinecone *shared.DestinationLangchainUpdatePinecone
+ if r.Configuration.Indexing.Pinecone != nil {
+ index := r.Configuration.Indexing.Pinecone.Index.ValueString()
+ pineconeEnvironment := r.Configuration.Indexing.Pinecone.PineconeEnvironment.ValueString()
+ pineconeKey := r.Configuration.Indexing.Pinecone.PineconeKey.ValueString()
+ destinationLangchainUpdatePinecone = &shared.DestinationLangchainUpdatePinecone{
Index: index,
- Mode: mode2,
PineconeEnvironment: pineconeEnvironment,
PineconeKey: pineconeKey,
}
}
- if destinationLangchainUpdateIndexingPinecone != nil {
- indexing = shared.DestinationLangchainUpdateIndexing{
- DestinationLangchainUpdateIndexingPinecone: destinationLangchainUpdateIndexingPinecone,
+ if destinationLangchainUpdatePinecone != nil {
+ indexing = shared.Indexing{
+ DestinationLangchainUpdatePinecone: destinationLangchainUpdatePinecone,
}
}
- var destinationLangchainUpdateIndexingDocArrayHnswSearch *shared.DestinationLangchainUpdateIndexingDocArrayHnswSearch
- if r.Configuration.Indexing.DestinationLangchainUpdateIndexingDocArrayHnswSearch != nil {
- destinationPath := r.Configuration.Indexing.DestinationLangchainUpdateIndexingDocArrayHnswSearch.DestinationPath.ValueString()
- mode3 := new(shared.DestinationLangchainUpdateIndexingDocArrayHnswSearchMode)
- if !r.Configuration.Indexing.DestinationLangchainUpdateIndexingDocArrayHnswSearch.Mode.IsUnknown() && !r.Configuration.Indexing.DestinationLangchainUpdateIndexingDocArrayHnswSearch.Mode.IsNull() {
- *mode3 = shared.DestinationLangchainUpdateIndexingDocArrayHnswSearchMode(r.Configuration.Indexing.DestinationLangchainUpdateIndexingDocArrayHnswSearch.Mode.ValueString())
- } else {
- mode3 = nil
- }
- destinationLangchainUpdateIndexingDocArrayHnswSearch = &shared.DestinationLangchainUpdateIndexingDocArrayHnswSearch{
+ var docArrayHnswSearch *shared.DocArrayHnswSearch
+ if r.Configuration.Indexing.DocArrayHnswSearch != nil {
+ destinationPath := r.Configuration.Indexing.DocArrayHnswSearch.DestinationPath.ValueString()
+ docArrayHnswSearch = &shared.DocArrayHnswSearch{
DestinationPath: destinationPath,
- Mode: mode3,
}
}
- if destinationLangchainUpdateIndexingDocArrayHnswSearch != nil {
- indexing = shared.DestinationLangchainUpdateIndexing{
- DestinationLangchainUpdateIndexingDocArrayHnswSearch: destinationLangchainUpdateIndexingDocArrayHnswSearch,
+ if docArrayHnswSearch != nil {
+ indexing = shared.Indexing{
+ DocArrayHnswSearch: docArrayHnswSearch,
}
}
- var destinationLangchainUpdateIndexingChromaLocalPersistance *shared.DestinationLangchainUpdateIndexingChromaLocalPersistance
- if r.Configuration.Indexing.DestinationLangchainUpdateIndexingChromaLocalPersistance != nil {
+ var chromaLocalPersistance *shared.ChromaLocalPersistance
+ if r.Configuration.Indexing.ChromaLocalPersistance != nil {
collectionName := new(string)
- if !r.Configuration.Indexing.DestinationLangchainUpdateIndexingChromaLocalPersistance.CollectionName.IsUnknown() && !r.Configuration.Indexing.DestinationLangchainUpdateIndexingChromaLocalPersistance.CollectionName.IsNull() {
- *collectionName = r.Configuration.Indexing.DestinationLangchainUpdateIndexingChromaLocalPersistance.CollectionName.ValueString()
+ if !r.Configuration.Indexing.ChromaLocalPersistance.CollectionName.IsUnknown() && !r.Configuration.Indexing.ChromaLocalPersistance.CollectionName.IsNull() {
+ *collectionName = r.Configuration.Indexing.ChromaLocalPersistance.CollectionName.ValueString()
} else {
collectionName = nil
}
- destinationPath1 := r.Configuration.Indexing.DestinationLangchainUpdateIndexingChromaLocalPersistance.DestinationPath.ValueString()
- mode4 := new(shared.DestinationLangchainUpdateIndexingChromaLocalPersistanceMode)
- if !r.Configuration.Indexing.DestinationLangchainUpdateIndexingChromaLocalPersistance.Mode.IsUnknown() && !r.Configuration.Indexing.DestinationLangchainUpdateIndexingChromaLocalPersistance.Mode.IsNull() {
- *mode4 = shared.DestinationLangchainUpdateIndexingChromaLocalPersistanceMode(r.Configuration.Indexing.DestinationLangchainUpdateIndexingChromaLocalPersistance.Mode.ValueString())
- } else {
- mode4 = nil
- }
- destinationLangchainUpdateIndexingChromaLocalPersistance = &shared.DestinationLangchainUpdateIndexingChromaLocalPersistance{
+ destinationPath1 := r.Configuration.Indexing.ChromaLocalPersistance.DestinationPath.ValueString()
+ chromaLocalPersistance = &shared.ChromaLocalPersistance{
CollectionName: collectionName,
DestinationPath: destinationPath1,
- Mode: mode4,
}
}
- if destinationLangchainUpdateIndexingChromaLocalPersistance != nil {
- indexing = shared.DestinationLangchainUpdateIndexing{
- DestinationLangchainUpdateIndexingChromaLocalPersistance: destinationLangchainUpdateIndexingChromaLocalPersistance,
+ if chromaLocalPersistance != nil {
+ indexing = shared.Indexing{
+ ChromaLocalPersistance: chromaLocalPersistance,
}
}
chunkOverlap := new(int64)
@@ -270,7 +203,7 @@ func (r *DestinationLangchainResourceModel) ToUpdateSDKType() *shared.Destinatio
for _, textFieldsItem := range r.Configuration.Processing.TextFields {
textFields = append(textFields, textFieldsItem.ValueString())
}
- processing := shared.DestinationLangchainUpdateProcessingConfigModel{
+ processing := shared.ProcessingConfigModel{
ChunkOverlap: chunkOverlap,
ChunkSize: chunkSize,
TextFields: textFields,
diff --git a/internal/provider/destination_milvus_data_source.go b/internal/provider/destination_milvus_data_source.go
old mode 100755
new mode 100644
index f78664b73..79a36f3c6
--- a/internal/provider/destination_milvus_data_source.go
+++ b/internal/provider/destination_milvus_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationMilvusDataSource struct {
// DestinationMilvusDataSourceModel describes the data model.
type DestinationMilvusDataSourceModel struct {
- Configuration DestinationMilvus `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,352 +47,17 @@ func (r *DestinationMilvusDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "DestinationMilvus DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "milvus",
- ),
- },
- Description: `must be one of ["milvus"]`,
- },
- "embedding": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_milvus_embedding_cohere": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "cohere_key": schema.StringAttribute{
- Computed: true,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cohere",
- ),
- },
- Description: `must be one of ["cohere"]`,
- },
- },
- Description: `Use the Cohere API to embed text.`,
- },
- "destination_milvus_embedding_fake": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
- },
- },
- Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
- },
- "destination_milvus_embedding_from_field": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "dimensions": schema.Int64Attribute{
- Computed: true,
- Description: `The number of dimensions the embedding model is generating`,
- },
- "field_name": schema.StringAttribute{
- Computed: true,
- Description: `Name of the field in the record that contains the embedding`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "from_field",
- ),
- },
- Description: `must be one of ["from_field"]`,
- },
- },
- Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`,
- },
- "destination_milvus_embedding_open_ai": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
- "openai_key": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
- },
- "destination_milvus_update_embedding_cohere": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "cohere_key": schema.StringAttribute{
- Computed: true,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cohere",
- ),
- },
- Description: `must be one of ["cohere"]`,
- },
- },
- Description: `Use the Cohere API to embed text.`,
- },
- "destination_milvus_update_embedding_fake": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
- },
- },
- Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
- },
- "destination_milvus_update_embedding_from_field": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "dimensions": schema.Int64Attribute{
- Computed: true,
- Description: `The number of dimensions the embedding model is generating`,
- },
- "field_name": schema.StringAttribute{
- Computed: true,
- Description: `Name of the field in the record that contains the embedding`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "from_field",
- ),
- },
- Description: `must be one of ["from_field"]`,
- },
- },
- Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`,
- },
- "destination_milvus_update_embedding_open_ai": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
- "openai_key": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Embedding configuration`,
- },
- "indexing": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_milvus_indexing_authentication_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "token",
- ),
- },
- Description: `must be one of ["token"]`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `API Token for the Milvus instance`,
- },
- },
- Description: `Authenticate using an API token (suitable for Zilliz Cloud)`,
- },
- "destination_milvus_indexing_authentication_no_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no_auth",
- ),
- },
- Description: `must be one of ["no_auth"]`,
- },
- },
- Description: `Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)`,
- },
- "destination_milvus_indexing_authentication_username_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username_password",
- ),
- },
- Description: `must be one of ["username_password"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password for the Milvus instance`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username for the Milvus instance`,
- },
- },
- Description: `Authenticate using username and password (suitable for self-managed Milvus clusters)`,
- },
- "destination_milvus_update_indexing_authentication_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "token",
- ),
- },
- Description: `must be one of ["token"]`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `API Token for the Milvus instance`,
- },
- },
- Description: `Authenticate using an API token (suitable for Zilliz Cloud)`,
- },
- "destination_milvus_update_indexing_authentication_no_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no_auth",
- ),
- },
- Description: `must be one of ["no_auth"]`,
- },
- },
- Description: `Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)`,
- },
- "destination_milvus_update_indexing_authentication_username_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username_password",
- ),
- },
- Description: `must be one of ["username_password"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password for the Milvus instance`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username for the Milvus instance`,
- },
- },
- Description: `Authenticate using username and password (suitable for self-managed Milvus clusters)`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Authentication method`,
- },
- "collection": schema.StringAttribute{
- Computed: true,
- Description: `The collection to load data into`,
- },
- "db": schema.StringAttribute{
- Computed: true,
- Description: `The database to connect to`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The public endpoint of the Milvus instance. `,
- },
- "text_field": schema.StringAttribute{
- Computed: true,
- Description: `The field in the entity that contains the embedded text`,
- },
- "vector_field": schema.StringAttribute{
- Computed: true,
- Description: `The field in the entity that contains the vector`,
- },
- },
- Description: `Indexing configuration`,
- },
- "processing": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "chunk_overlap": schema.Int64Attribute{
- Computed: true,
- Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
- },
- "chunk_size": schema.Int64Attribute{
- Computed: true,
- Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`,
- },
- "metadata_fields": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.`,
- },
- "text_fields": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`,
- },
- },
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_milvus_data_source_sdk.go b/internal/provider/destination_milvus_data_source_sdk.go
old mode 100755
new mode 100644
index 9ef8f76e3..d1e9deef0
--- a/internal/provider/destination_milvus_data_source_sdk.go
+++ b/internal/provider/destination_milvus_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationMilvusDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_milvus_resource.go b/internal/provider/destination_milvus_resource.go
old mode 100755
new mode 100644
index 97b2461aa..1a1189133
--- a/internal/provider/destination_milvus_resource.go
+++ b/internal/provider/destination_milvus_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationMilvusResource struct {
// DestinationMilvusResourceModel describes the resource data model.
type DestinationMilvusResourceModel struct {
Configuration DestinationMilvus `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -54,52 +56,44 @@ func (r *DestinationMilvusResource) Schema(ctx context.Context, req resource.Sch
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "milvus",
- ),
- },
- Description: `must be one of ["milvus"]`,
- },
"embedding": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_milvus_embedding_cohere": schema.SingleNestedAttribute{
+ "azure_open_ai": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "cohere_key": schema.StringAttribute{
- Required: true,
+ "api_base": schema.StringAttribute{
+ Required: true,
+ Description: `The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
},
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cohere",
- ),
- },
- Description: `must be one of ["cohere"]`,
+ "deployment": schema.StringAttribute{
+ Required: true,
+ Description: `The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
+ },
+ "openai_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
},
},
- Description: `Use the Cohere API to embed text.`,
+ Description: `Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
},
- "destination_milvus_embedding_fake": schema.SingleNestedAttribute{
+ "cohere": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
+ "cohere_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
},
},
+ Description: `Use the Cohere API to embed text.`,
+ },
+ "fake": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
},
- "destination_milvus_embedding_from_field": schema.SingleNestedAttribute{
+ "from_field": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"dimensions": schema.Int64Attribute{
@@ -110,115 +104,48 @@ func (r *DestinationMilvusResource) Schema(ctx context.Context, req resource.Sch
Required: true,
Description: `Name of the field in the record that contains the embedding`,
},
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "from_field",
- ),
- },
- Description: `must be one of ["from_field"]`,
- },
},
Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`,
},
- "destination_milvus_embedding_open_ai": schema.SingleNestedAttribute{
+ "open_ai": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
"openai_key": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Sensitive: true,
},
},
Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
},
- "destination_milvus_update_embedding_cohere": schema.SingleNestedAttribute{
+ "open_ai_compatible": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "cohere_key": schema.StringAttribute{
- Required: true,
- },
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cohere",
- ),
- },
- Description: `must be one of ["cohere"]`,
+ "api_key": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `Default: ""`,
},
- },
- Description: `Use the Cohere API to embed text.`,
- },
- "destination_milvus_update_embedding_fake": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
+ "base_url": schema.StringAttribute{
+ Required: true,
+ Description: `The base URL for your OpenAI-compatible service`,
},
- },
- Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
- },
- "destination_milvus_update_embedding_from_field": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"dimensions": schema.Int64Attribute{
Required: true,
Description: `The number of dimensions the embedding model is generating`,
},
- "field_name": schema.StringAttribute{
- Required: true,
- Description: `Name of the field in the record that contains the embedding`,
- },
- "mode": schema.StringAttribute{
+ "model_name": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "from_field",
- ),
- },
- Description: `must be one of ["from_field"]`,
- },
- },
- Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`,
- },
- "destination_milvus_update_embedding_open_ai": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
- "openai_key": schema.StringAttribute{
- Required: true,
+ MarkdownDescription: `Default: "text-embedding-ada-002"` + "\n" +
+ `The name of the model to use for embedding`,
},
},
- Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
+ Description: `Use a service that's compatible with the OpenAI API to embed text.`,
},
},
+ Description: `Embedding configuration`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Embedding configuration`,
},
"indexing": schema.SingleNestedAttribute{
Required: true,
@@ -226,111 +153,28 @@ func (r *DestinationMilvusResource) Schema(ctx context.Context, req resource.Sch
"auth": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_milvus_indexing_authentication_api_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "token",
- ),
- },
- Description: `must be one of ["token"]`,
- },
- "token": schema.StringAttribute{
- Required: true,
- Description: `API Token for the Milvus instance`,
- },
- },
- Description: `Authenticate using an API token (suitable for Zilliz Cloud)`,
- },
- "destination_milvus_indexing_authentication_no_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no_auth",
- ),
- },
- Description: `must be one of ["no_auth"]`,
- },
- },
- Description: `Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)`,
- },
- "destination_milvus_indexing_authentication_username_password": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username_password",
- ),
- },
- Description: `must be one of ["username_password"]`,
- },
- "password": schema.StringAttribute{
- Required: true,
- Description: `Password for the Milvus instance`,
- },
- "username": schema.StringAttribute{
- Required: true,
- Description: `Username for the Milvus instance`,
- },
- },
- Description: `Authenticate using username and password (suitable for self-managed Milvus clusters)`,
- },
- "destination_milvus_update_indexing_authentication_api_token": schema.SingleNestedAttribute{
+ "api_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "token",
- ),
- },
- Description: `must be one of ["token"]`,
- },
"token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Token for the Milvus instance`,
},
},
Description: `Authenticate using an API token (suitable for Zilliz Cloud)`,
},
- "destination_milvus_update_indexing_authentication_no_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no_auth",
- ),
- },
- Description: `must be one of ["no_auth"]`,
- },
- },
+ "no_auth": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)`,
},
- "destination_milvus_update_indexing_authentication_username_password": schema.SingleNestedAttribute{
+ "username_password": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username_password",
- ),
- },
- Description: `must be one of ["username_password"]`,
- },
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Password for the Milvus instance`,
},
"username": schema.StringAttribute{
@@ -341,30 +185,33 @@ func (r *DestinationMilvusResource) Schema(ctx context.Context, req resource.Sch
Description: `Authenticate using username and password (suitable for self-managed Milvus clusters)`,
},
},
+ Description: `Authentication method`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Authentication method`,
},
"collection": schema.StringAttribute{
Required: true,
Description: `The collection to load data into`,
},
"db": schema.StringAttribute{
- Optional: true,
- Description: `The database to connect to`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `The database to connect to`,
},
"host": schema.StringAttribute{
Required: true,
Description: `The public endpoint of the Milvus instance. `,
},
"text_field": schema.StringAttribute{
- Optional: true,
- Description: `The field in the entity that contains the embedded text`,
+ Optional: true,
+ MarkdownDescription: `Default: "text"` + "\n" +
+ `The field in the entity that contains the embedded text`,
},
"vector_field": schema.StringAttribute{
- Optional: true,
- Description: `The field in the entity that contains the vector`,
+ Optional: true,
+ MarkdownDescription: `Default: "vector"` + "\n" +
+ `The field in the entity that contains the vector`,
},
},
Description: `Indexing configuration`,
@@ -373,13 +220,30 @@ func (r *DestinationMilvusResource) Schema(ctx context.Context, req resource.Sch
Required: true,
Attributes: map[string]schema.Attribute{
"chunk_overlap": schema.Int64Attribute{
- Optional: true,
- Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
},
"chunk_size": schema.Int64Attribute{
Required: true,
Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`,
},
+ "field_name_mappings": schema.ListNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "from_field": schema.StringAttribute{
+ Required: true,
+ Description: `The field name in the source`,
+ },
+ "to_field": schema.StringAttribute{
+ Required: true,
+ Description: `The field name to use in the destination`,
+ },
+ },
+ },
+ Description: `List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.`,
+ },
"metadata_fields": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
@@ -390,10 +254,84 @@ func (r *DestinationMilvusResource) Schema(ctx context.Context, req resource.Sch
ElementType: types.StringType,
Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`,
},
+ "text_splitter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "by_markdown_header": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "split_level": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 1` + "\n" +
+ `Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points`,
+ },
+ },
+ Description: `Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.`,
+ },
+ "by_programming_language": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "language": schema.StringAttribute{
+ Required: true,
+ MarkdownDescription: `must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]` + "\n" +
+ `Split code in suitable places based on the programming language`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "cpp",
+ "go",
+ "java",
+ "js",
+ "php",
+ "proto",
+ "python",
+ "rst",
+ "ruby",
+ "rust",
+ "scala",
+ "swift",
+ "markdown",
+ "latex",
+ "html",
+ "sol",
+ ),
+ },
+ },
+ },
+ Description: `Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.`,
+ },
+ "by_separator": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "keep_separator": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to keep the separator in the resulting chunks`,
+ },
+ "separators": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".`,
+ },
+ },
+ Description: `Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.`,
+ },
+ },
+ Description: `Split text fields into chunks based on the specified method.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
},
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -410,7 +348,8 @@ func (r *DestinationMilvusResource) Schema(ctx context.Context, req resource.Sch
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -460,7 +399,7 @@ func (r *DestinationMilvusResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationMilvus(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -636,5 +575,5 @@ func (r *DestinationMilvusResource) Delete(ctx context.Context, req resource.Del
}
func (r *DestinationMilvusResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_milvus_resource_sdk.go b/internal/provider/destination_milvus_resource_sdk.go
old mode 100755
new mode 100644
index 6089fa5e2..0c3998fe2
--- a/internal/provider/destination_milvus_resource_sdk.go
+++ b/internal/provider/destination_milvus_resource_sdk.go
@@ -3,145 +3,137 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationMilvusResourceModel) ToCreateSDKType() *shared.DestinationMilvusCreateRequest {
- destinationType := shared.DestinationMilvusMilvus(r.Configuration.DestinationType.ValueString())
var embedding shared.DestinationMilvusEmbedding
- var destinationMilvusEmbeddingOpenAI *shared.DestinationMilvusEmbeddingOpenAI
- if r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI != nil {
- mode := new(shared.DestinationMilvusEmbeddingOpenAIMode)
- if !r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI.Mode.IsNull() {
- *mode = shared.DestinationMilvusEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI.Mode.ValueString())
- } else {
- mode = nil
- }
- openaiKey := r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI.OpenaiKey.ValueString()
- destinationMilvusEmbeddingOpenAI = &shared.DestinationMilvusEmbeddingOpenAI{
- Mode: mode,
+ var destinationMilvusOpenAI *shared.DestinationMilvusOpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ destinationMilvusOpenAI = &shared.DestinationMilvusOpenAI{
OpenaiKey: openaiKey,
}
}
- if destinationMilvusEmbeddingOpenAI != nil {
+ if destinationMilvusOpenAI != nil {
embedding = shared.DestinationMilvusEmbedding{
- DestinationMilvusEmbeddingOpenAI: destinationMilvusEmbeddingOpenAI,
+ DestinationMilvusOpenAI: destinationMilvusOpenAI,
}
}
- var destinationMilvusEmbeddingCohere *shared.DestinationMilvusEmbeddingCohere
- if r.Configuration.Embedding.DestinationMilvusEmbeddingCohere != nil {
- cohereKey := r.Configuration.Embedding.DestinationMilvusEmbeddingCohere.CohereKey.ValueString()
- mode1 := new(shared.DestinationMilvusEmbeddingCohereMode)
- if !r.Configuration.Embedding.DestinationMilvusEmbeddingCohere.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusEmbeddingCohere.Mode.IsNull() {
- *mode1 = shared.DestinationMilvusEmbeddingCohereMode(r.Configuration.Embedding.DestinationMilvusEmbeddingCohere.Mode.ValueString())
- } else {
- mode1 = nil
- }
- destinationMilvusEmbeddingCohere = &shared.DestinationMilvusEmbeddingCohere{
+ var destinationMilvusCohere *shared.DestinationMilvusCohere
+ if r.Configuration.Embedding.Cohere != nil {
+ cohereKey := r.Configuration.Embedding.Cohere.CohereKey.ValueString()
+ destinationMilvusCohere = &shared.DestinationMilvusCohere{
CohereKey: cohereKey,
- Mode: mode1,
}
}
- if destinationMilvusEmbeddingCohere != nil {
+ if destinationMilvusCohere != nil {
embedding = shared.DestinationMilvusEmbedding{
- DestinationMilvusEmbeddingCohere: destinationMilvusEmbeddingCohere,
+ DestinationMilvusCohere: destinationMilvusCohere,
}
}
- var destinationMilvusEmbeddingFake *shared.DestinationMilvusEmbeddingFake
- if r.Configuration.Embedding.DestinationMilvusEmbeddingFake != nil {
- mode2 := new(shared.DestinationMilvusEmbeddingFakeMode)
- if !r.Configuration.Embedding.DestinationMilvusEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusEmbeddingFake.Mode.IsNull() {
- *mode2 = shared.DestinationMilvusEmbeddingFakeMode(r.Configuration.Embedding.DestinationMilvusEmbeddingFake.Mode.ValueString())
- } else {
- mode2 = nil
- }
- destinationMilvusEmbeddingFake = &shared.DestinationMilvusEmbeddingFake{
- Mode: mode2,
- }
+ var destinationMilvusFake *shared.DestinationMilvusFake
+ if r.Configuration.Embedding.Fake != nil {
+ destinationMilvusFake = &shared.DestinationMilvusFake{}
}
- if destinationMilvusEmbeddingFake != nil {
+ if destinationMilvusFake != nil {
embedding = shared.DestinationMilvusEmbedding{
- DestinationMilvusEmbeddingFake: destinationMilvusEmbeddingFake,
+ DestinationMilvusFake: destinationMilvusFake,
}
}
- var destinationMilvusEmbeddingFromField *shared.DestinationMilvusEmbeddingFromField
- if r.Configuration.Embedding.DestinationMilvusEmbeddingFromField != nil {
- dimensions := r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.Dimensions.ValueInt64()
- fieldName := r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.FieldName.ValueString()
- mode3 := new(shared.DestinationMilvusEmbeddingFromFieldMode)
- if !r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.Mode.IsNull() {
- *mode3 = shared.DestinationMilvusEmbeddingFromFieldMode(r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.Mode.ValueString())
- } else {
- mode3 = nil
- }
- destinationMilvusEmbeddingFromField = &shared.DestinationMilvusEmbeddingFromField{
+ var destinationMilvusFromField *shared.DestinationMilvusFromField
+ if r.Configuration.Embedding.FromField != nil {
+ dimensions := r.Configuration.Embedding.FromField.Dimensions.ValueInt64()
+ fieldName := r.Configuration.Embedding.FromField.FieldName.ValueString()
+ destinationMilvusFromField = &shared.DestinationMilvusFromField{
Dimensions: dimensions,
FieldName: fieldName,
- Mode: mode3,
}
}
- if destinationMilvusEmbeddingFromField != nil {
+ if destinationMilvusFromField != nil {
+ embedding = shared.DestinationMilvusEmbedding{
+ DestinationMilvusFromField: destinationMilvusFromField,
+ }
+ }
+ var destinationMilvusAzureOpenAI *shared.DestinationMilvusAzureOpenAI
+ if r.Configuration.Embedding.AzureOpenAI != nil {
+ apiBase := r.Configuration.Embedding.AzureOpenAI.APIBase.ValueString()
+ deployment := r.Configuration.Embedding.AzureOpenAI.Deployment.ValueString()
+ openaiKey1 := r.Configuration.Embedding.AzureOpenAI.OpenaiKey.ValueString()
+ destinationMilvusAzureOpenAI = &shared.DestinationMilvusAzureOpenAI{
+ APIBase: apiBase,
+ Deployment: deployment,
+ OpenaiKey: openaiKey1,
+ }
+ }
+ if destinationMilvusAzureOpenAI != nil {
embedding = shared.DestinationMilvusEmbedding{
- DestinationMilvusEmbeddingFromField: destinationMilvusEmbeddingFromField,
+ DestinationMilvusAzureOpenAI: destinationMilvusAzureOpenAI,
}
}
- var auth shared.DestinationMilvusIndexingAuthentication
- var destinationMilvusIndexingAuthenticationAPIToken *shared.DestinationMilvusIndexingAuthenticationAPIToken
- if r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken != nil {
- mode4 := new(shared.DestinationMilvusIndexingAuthenticationAPITokenMode)
- if !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken.Mode.IsNull() {
- *mode4 = shared.DestinationMilvusIndexingAuthenticationAPITokenMode(r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken.Mode.ValueString())
+ var destinationMilvusOpenAICompatible *shared.DestinationMilvusOpenAICompatible
+ if r.Configuration.Embedding.OpenAICompatible != nil {
+ apiKey := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.APIKey.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.APIKey.IsNull() {
+ *apiKey = r.Configuration.Embedding.OpenAICompatible.APIKey.ValueString()
} else {
- mode4 = nil
+ apiKey = nil
}
- token := r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken.Token.ValueString()
- destinationMilvusIndexingAuthenticationAPIToken = &shared.DestinationMilvusIndexingAuthenticationAPIToken{
- Mode: mode4,
- Token: token,
+ baseURL := r.Configuration.Embedding.OpenAICompatible.BaseURL.ValueString()
+ dimensions1 := r.Configuration.Embedding.OpenAICompatible.Dimensions.ValueInt64()
+ modelName := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.ModelName.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.ModelName.IsNull() {
+ *modelName = r.Configuration.Embedding.OpenAICompatible.ModelName.ValueString()
+ } else {
+ modelName = nil
+ }
+ destinationMilvusOpenAICompatible = &shared.DestinationMilvusOpenAICompatible{
+ APIKey: apiKey,
+ BaseURL: baseURL,
+ Dimensions: dimensions1,
+ ModelName: modelName,
}
}
- if destinationMilvusIndexingAuthenticationAPIToken != nil {
- auth = shared.DestinationMilvusIndexingAuthentication{
- DestinationMilvusIndexingAuthenticationAPIToken: destinationMilvusIndexingAuthenticationAPIToken,
+ if destinationMilvusOpenAICompatible != nil {
+ embedding = shared.DestinationMilvusEmbedding{
+ DestinationMilvusOpenAICompatible: destinationMilvusOpenAICompatible,
}
}
- var destinationMilvusIndexingAuthenticationUsernamePassword *shared.DestinationMilvusIndexingAuthenticationUsernamePassword
- if r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword != nil {
- mode5 := new(shared.DestinationMilvusIndexingAuthenticationUsernamePasswordMode)
- if !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Mode.IsNull() {
- *mode5 = shared.DestinationMilvusIndexingAuthenticationUsernamePasswordMode(r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Mode.ValueString())
- } else {
- mode5 = nil
+ var auth shared.DestinationMilvusAuthentication
+ var destinationMilvusAPIToken *shared.DestinationMilvusAPIToken
+ if r.Configuration.Indexing.Auth.APIToken != nil {
+ token := r.Configuration.Indexing.Auth.APIToken.Token.ValueString()
+ destinationMilvusAPIToken = &shared.DestinationMilvusAPIToken{
+ Token: token,
}
- password := r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Password.ValueString()
- username := r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Username.ValueString()
- destinationMilvusIndexingAuthenticationUsernamePassword = &shared.DestinationMilvusIndexingAuthenticationUsernamePassword{
- Mode: mode5,
+ }
+ if destinationMilvusAPIToken != nil {
+ auth = shared.DestinationMilvusAuthentication{
+ DestinationMilvusAPIToken: destinationMilvusAPIToken,
+ }
+ }
+ var destinationMilvusUsernamePassword *shared.DestinationMilvusUsernamePassword
+ if r.Configuration.Indexing.Auth.UsernamePassword != nil {
+ password := r.Configuration.Indexing.Auth.UsernamePassword.Password.ValueString()
+ username := r.Configuration.Indexing.Auth.UsernamePassword.Username.ValueString()
+ destinationMilvusUsernamePassword = &shared.DestinationMilvusUsernamePassword{
Password: password,
Username: username,
}
}
- if destinationMilvusIndexingAuthenticationUsernamePassword != nil {
- auth = shared.DestinationMilvusIndexingAuthentication{
- DestinationMilvusIndexingAuthenticationUsernamePassword: destinationMilvusIndexingAuthenticationUsernamePassword,
+ if destinationMilvusUsernamePassword != nil {
+ auth = shared.DestinationMilvusAuthentication{
+ DestinationMilvusUsernamePassword: destinationMilvusUsernamePassword,
}
}
- var destinationMilvusIndexingAuthenticationNoAuth *shared.DestinationMilvusIndexingAuthenticationNoAuth
- if r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationNoAuth != nil {
- mode6 := new(shared.DestinationMilvusIndexingAuthenticationNoAuthMode)
- if !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationNoAuth.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationNoAuth.Mode.IsNull() {
- *mode6 = shared.DestinationMilvusIndexingAuthenticationNoAuthMode(r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationNoAuth.Mode.ValueString())
- } else {
- mode6 = nil
- }
- destinationMilvusIndexingAuthenticationNoAuth = &shared.DestinationMilvusIndexingAuthenticationNoAuth{
- Mode: mode6,
- }
+ var destinationMilvusNoAuth *shared.DestinationMilvusNoAuth
+ if r.Configuration.Indexing.Auth.NoAuth != nil {
+ destinationMilvusNoAuth = &shared.DestinationMilvusNoAuth{}
}
- if destinationMilvusIndexingAuthenticationNoAuth != nil {
- auth = shared.DestinationMilvusIndexingAuthentication{
- DestinationMilvusIndexingAuthenticationNoAuth: destinationMilvusIndexingAuthenticationNoAuth,
+ if destinationMilvusNoAuth != nil {
+ auth = shared.DestinationMilvusAuthentication{
+ DestinationMilvusNoAuth: destinationMilvusNoAuth,
}
}
collection := r.Configuration.Indexing.Collection.ValueString()
@@ -179,6 +171,15 @@ func (r *DestinationMilvusResourceModel) ToCreateSDKType() *shared.DestinationMi
chunkOverlap = nil
}
chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64()
+ var fieldNameMappings []shared.DestinationMilvusFieldNameMappingConfigModel = nil
+ for _, fieldNameMappingsItem := range r.Configuration.Processing.FieldNameMappings {
+ fromField := fieldNameMappingsItem.FromField.ValueString()
+ toField := fieldNameMappingsItem.ToField.ValueString()
+ fieldNameMappings = append(fieldNameMappings, shared.DestinationMilvusFieldNameMappingConfigModel{
+ FromField: fromField,
+ ToField: toField,
+ })
+ }
var metadataFields []string = nil
for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields {
metadataFields = append(metadataFields, metadataFieldsItem.ValueString())
@@ -187,22 +188,84 @@ func (r *DestinationMilvusResourceModel) ToCreateSDKType() *shared.DestinationMi
for _, textFieldsItem := range r.Configuration.Processing.TextFields {
textFields = append(textFields, textFieldsItem.ValueString())
}
+ var textSplitter *shared.DestinationMilvusTextSplitter
+ if r.Configuration.Processing.TextSplitter != nil {
+ var destinationMilvusBySeparator *shared.DestinationMilvusBySeparator
+ if r.Configuration.Processing.TextSplitter.BySeparator != nil {
+ keepSeparator := new(bool)
+ if !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsUnknown() && !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsNull() {
+ *keepSeparator = r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.ValueBool()
+ } else {
+ keepSeparator = nil
+ }
+ var separators []string = nil
+ for _, separatorsItem := range r.Configuration.Processing.TextSplitter.BySeparator.Separators {
+ separators = append(separators, separatorsItem.ValueString())
+ }
+ destinationMilvusBySeparator = &shared.DestinationMilvusBySeparator{
+ KeepSeparator: keepSeparator,
+ Separators: separators,
+ }
+ }
+ if destinationMilvusBySeparator != nil {
+ textSplitter = &shared.DestinationMilvusTextSplitter{
+ DestinationMilvusBySeparator: destinationMilvusBySeparator,
+ }
+ }
+ var destinationMilvusByMarkdownHeader *shared.DestinationMilvusByMarkdownHeader
+ if r.Configuration.Processing.TextSplitter.ByMarkdownHeader != nil {
+ splitLevel := new(int64)
+ if !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsUnknown() && !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsNull() {
+ *splitLevel = r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.ValueInt64()
+ } else {
+ splitLevel = nil
+ }
+ destinationMilvusByMarkdownHeader = &shared.DestinationMilvusByMarkdownHeader{
+ SplitLevel: splitLevel,
+ }
+ }
+ if destinationMilvusByMarkdownHeader != nil {
+ textSplitter = &shared.DestinationMilvusTextSplitter{
+ DestinationMilvusByMarkdownHeader: destinationMilvusByMarkdownHeader,
+ }
+ }
+ var destinationMilvusByProgrammingLanguage *shared.DestinationMilvusByProgrammingLanguage
+ if r.Configuration.Processing.TextSplitter.ByProgrammingLanguage != nil {
+ language := shared.DestinationMilvusLanguage(r.Configuration.Processing.TextSplitter.ByProgrammingLanguage.Language.ValueString())
+ destinationMilvusByProgrammingLanguage = &shared.DestinationMilvusByProgrammingLanguage{
+ Language: language,
+ }
+ }
+ if destinationMilvusByProgrammingLanguage != nil {
+ textSplitter = &shared.DestinationMilvusTextSplitter{
+ DestinationMilvusByProgrammingLanguage: destinationMilvusByProgrammingLanguage,
+ }
+ }
+ }
processing := shared.DestinationMilvusProcessingConfigModel{
- ChunkOverlap: chunkOverlap,
- ChunkSize: chunkSize,
- MetadataFields: metadataFields,
- TextFields: textFields,
+ ChunkOverlap: chunkOverlap,
+ ChunkSize: chunkSize,
+ FieldNameMappings: fieldNameMappings,
+ MetadataFields: metadataFields,
+ TextFields: textFields,
+ TextSplitter: textSplitter,
}
configuration := shared.DestinationMilvus{
- DestinationType: destinationType,
- Embedding: embedding,
- Indexing: indexing,
- Processing: processing,
+ Embedding: embedding,
+ Indexing: indexing,
+ Processing: processing,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationMilvusCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -216,138 +279,131 @@ func (r *DestinationMilvusResourceModel) ToGetSDKType() *shared.DestinationMilvu
func (r *DestinationMilvusResourceModel) ToUpdateSDKType() *shared.DestinationMilvusPutRequest {
var embedding shared.DestinationMilvusUpdateEmbedding
- var destinationMilvusUpdateEmbeddingOpenAI *shared.DestinationMilvusUpdateEmbeddingOpenAI
- if r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI != nil {
- mode := new(shared.DestinationMilvusUpdateEmbeddingOpenAIMode)
- if !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI.Mode.IsNull() {
- *mode = shared.DestinationMilvusUpdateEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI.Mode.ValueString())
- } else {
- mode = nil
- }
- openaiKey := r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI.OpenaiKey.ValueString()
- destinationMilvusUpdateEmbeddingOpenAI = &shared.DestinationMilvusUpdateEmbeddingOpenAI{
- Mode: mode,
+ var destinationMilvusUpdateOpenAI *shared.DestinationMilvusUpdateOpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ destinationMilvusUpdateOpenAI = &shared.DestinationMilvusUpdateOpenAI{
OpenaiKey: openaiKey,
}
}
- if destinationMilvusUpdateEmbeddingOpenAI != nil {
+ if destinationMilvusUpdateOpenAI != nil {
embedding = shared.DestinationMilvusUpdateEmbedding{
- DestinationMilvusUpdateEmbeddingOpenAI: destinationMilvusUpdateEmbeddingOpenAI,
+ DestinationMilvusUpdateOpenAI: destinationMilvusUpdateOpenAI,
}
}
- var destinationMilvusUpdateEmbeddingCohere *shared.DestinationMilvusUpdateEmbeddingCohere
- if r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere != nil {
- cohereKey := r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere.CohereKey.ValueString()
- mode1 := new(shared.DestinationMilvusUpdateEmbeddingCohereMode)
- if !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere.Mode.IsNull() {
- *mode1 = shared.DestinationMilvusUpdateEmbeddingCohereMode(r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere.Mode.ValueString())
- } else {
- mode1 = nil
- }
- destinationMilvusUpdateEmbeddingCohere = &shared.DestinationMilvusUpdateEmbeddingCohere{
+ var cohere *shared.Cohere
+ if r.Configuration.Embedding.Cohere != nil {
+ cohereKey := r.Configuration.Embedding.Cohere.CohereKey.ValueString()
+ cohere = &shared.Cohere{
CohereKey: cohereKey,
- Mode: mode1,
}
}
- if destinationMilvusUpdateEmbeddingCohere != nil {
+ if cohere != nil {
embedding = shared.DestinationMilvusUpdateEmbedding{
- DestinationMilvusUpdateEmbeddingCohere: destinationMilvusUpdateEmbeddingCohere,
+ Cohere: cohere,
}
}
- var destinationMilvusUpdateEmbeddingFake *shared.DestinationMilvusUpdateEmbeddingFake
- if r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFake != nil {
- mode2 := new(shared.DestinationMilvusUpdateEmbeddingFakeMode)
- if !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFake.Mode.IsNull() {
- *mode2 = shared.DestinationMilvusUpdateEmbeddingFakeMode(r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFake.Mode.ValueString())
- } else {
- mode2 = nil
- }
- destinationMilvusUpdateEmbeddingFake = &shared.DestinationMilvusUpdateEmbeddingFake{
- Mode: mode2,
- }
+ var destinationMilvusUpdateFake *shared.DestinationMilvusUpdateFake
+ if r.Configuration.Embedding.Fake != nil {
+ destinationMilvusUpdateFake = &shared.DestinationMilvusUpdateFake{}
}
- if destinationMilvusUpdateEmbeddingFake != nil {
+ if destinationMilvusUpdateFake != nil {
embedding = shared.DestinationMilvusUpdateEmbedding{
- DestinationMilvusUpdateEmbeddingFake: destinationMilvusUpdateEmbeddingFake,
+ DestinationMilvusUpdateFake: destinationMilvusUpdateFake,
}
}
- var destinationMilvusUpdateEmbeddingFromField *shared.DestinationMilvusUpdateEmbeddingFromField
- if r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField != nil {
- dimensions := r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.Dimensions.ValueInt64()
- fieldName := r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.FieldName.ValueString()
- mode3 := new(shared.DestinationMilvusUpdateEmbeddingFromFieldMode)
- if !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.Mode.IsNull() {
- *mode3 = shared.DestinationMilvusUpdateEmbeddingFromFieldMode(r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.Mode.ValueString())
- } else {
- mode3 = nil
- }
- destinationMilvusUpdateEmbeddingFromField = &shared.DestinationMilvusUpdateEmbeddingFromField{
+ var fromField *shared.FromField
+ if r.Configuration.Embedding.FromField != nil {
+ dimensions := r.Configuration.Embedding.FromField.Dimensions.ValueInt64()
+ fieldName := r.Configuration.Embedding.FromField.FieldName.ValueString()
+ fromField = &shared.FromField{
Dimensions: dimensions,
FieldName: fieldName,
- Mode: mode3,
}
}
- if destinationMilvusUpdateEmbeddingFromField != nil {
+ if fromField != nil {
+ embedding = shared.DestinationMilvusUpdateEmbedding{
+ FromField: fromField,
+ }
+ }
+ var azureOpenAI *shared.AzureOpenAI
+ if r.Configuration.Embedding.AzureOpenAI != nil {
+ apiBase := r.Configuration.Embedding.AzureOpenAI.APIBase.ValueString()
+ deployment := r.Configuration.Embedding.AzureOpenAI.Deployment.ValueString()
+ openaiKey1 := r.Configuration.Embedding.AzureOpenAI.OpenaiKey.ValueString()
+ azureOpenAI = &shared.AzureOpenAI{
+ APIBase: apiBase,
+ Deployment: deployment,
+ OpenaiKey: openaiKey1,
+ }
+ }
+ if azureOpenAI != nil {
embedding = shared.DestinationMilvusUpdateEmbedding{
- DestinationMilvusUpdateEmbeddingFromField: destinationMilvusUpdateEmbeddingFromField,
+ AzureOpenAI: azureOpenAI,
}
}
- var auth shared.DestinationMilvusUpdateIndexingAuthentication
- var destinationMilvusUpdateIndexingAuthenticationAPIToken *shared.DestinationMilvusUpdateIndexingAuthenticationAPIToken
- if r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken != nil {
- mode4 := new(shared.DestinationMilvusUpdateIndexingAuthenticationAPITokenMode)
- if !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken.Mode.IsNull() {
- *mode4 = shared.DestinationMilvusUpdateIndexingAuthenticationAPITokenMode(r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken.Mode.ValueString())
+ var openAICompatible *shared.OpenAICompatible
+ if r.Configuration.Embedding.OpenAICompatible != nil {
+ apiKey := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.APIKey.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.APIKey.IsNull() {
+ *apiKey = r.Configuration.Embedding.OpenAICompatible.APIKey.ValueString()
} else {
- mode4 = nil
+ apiKey = nil
}
- token := r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken.Token.ValueString()
- destinationMilvusUpdateIndexingAuthenticationAPIToken = &shared.DestinationMilvusUpdateIndexingAuthenticationAPIToken{
- Mode: mode4,
- Token: token,
+ baseURL := r.Configuration.Embedding.OpenAICompatible.BaseURL.ValueString()
+ dimensions1 := r.Configuration.Embedding.OpenAICompatible.Dimensions.ValueInt64()
+ modelName := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.ModelName.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.ModelName.IsNull() {
+ *modelName = r.Configuration.Embedding.OpenAICompatible.ModelName.ValueString()
+ } else {
+ modelName = nil
+ }
+ openAICompatible = &shared.OpenAICompatible{
+ APIKey: apiKey,
+ BaseURL: baseURL,
+ Dimensions: dimensions1,
+ ModelName: modelName,
+ }
+ }
+ if openAICompatible != nil {
+ embedding = shared.DestinationMilvusUpdateEmbedding{
+ OpenAICompatible: openAICompatible,
}
}
- if destinationMilvusUpdateIndexingAuthenticationAPIToken != nil {
- auth = shared.DestinationMilvusUpdateIndexingAuthentication{
- DestinationMilvusUpdateIndexingAuthenticationAPIToken: destinationMilvusUpdateIndexingAuthenticationAPIToken,
+ var auth shared.DestinationMilvusUpdateAuthentication
+ var destinationMilvusUpdateAPIToken *shared.DestinationMilvusUpdateAPIToken
+ if r.Configuration.Indexing.Auth.APIToken != nil {
+ token := r.Configuration.Indexing.Auth.APIToken.Token.ValueString()
+ destinationMilvusUpdateAPIToken = &shared.DestinationMilvusUpdateAPIToken{
+ Token: token,
}
}
- var destinationMilvusUpdateIndexingAuthenticationUsernamePassword *shared.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword
- if r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword != nil {
- mode5 := new(shared.DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode)
- if !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Mode.IsNull() {
- *mode5 = shared.DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode(r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Mode.ValueString())
- } else {
- mode5 = nil
+ if destinationMilvusUpdateAPIToken != nil {
+ auth = shared.DestinationMilvusUpdateAuthentication{
+ DestinationMilvusUpdateAPIToken: destinationMilvusUpdateAPIToken,
}
- password := r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Password.ValueString()
- username := r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Username.ValueString()
- destinationMilvusUpdateIndexingAuthenticationUsernamePassword = &shared.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword{
- Mode: mode5,
+ }
+ var destinationMilvusUpdateUsernamePassword *shared.DestinationMilvusUpdateUsernamePassword
+ if r.Configuration.Indexing.Auth.UsernamePassword != nil {
+ password := r.Configuration.Indexing.Auth.UsernamePassword.Password.ValueString()
+ username := r.Configuration.Indexing.Auth.UsernamePassword.Username.ValueString()
+ destinationMilvusUpdateUsernamePassword = &shared.DestinationMilvusUpdateUsernamePassword{
Password: password,
Username: username,
}
}
- if destinationMilvusUpdateIndexingAuthenticationUsernamePassword != nil {
- auth = shared.DestinationMilvusUpdateIndexingAuthentication{
- DestinationMilvusUpdateIndexingAuthenticationUsernamePassword: destinationMilvusUpdateIndexingAuthenticationUsernamePassword,
+ if destinationMilvusUpdateUsernamePassword != nil {
+ auth = shared.DestinationMilvusUpdateAuthentication{
+ DestinationMilvusUpdateUsernamePassword: destinationMilvusUpdateUsernamePassword,
}
}
- var destinationMilvusUpdateIndexingAuthenticationNoAuth *shared.DestinationMilvusUpdateIndexingAuthenticationNoAuth
- if r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationNoAuth != nil {
- mode6 := new(shared.DestinationMilvusUpdateIndexingAuthenticationNoAuthMode)
- if !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationNoAuth.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationNoAuth.Mode.IsNull() {
- *mode6 = shared.DestinationMilvusUpdateIndexingAuthenticationNoAuthMode(r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationNoAuth.Mode.ValueString())
- } else {
- mode6 = nil
- }
- destinationMilvusUpdateIndexingAuthenticationNoAuth = &shared.DestinationMilvusUpdateIndexingAuthenticationNoAuth{
- Mode: mode6,
- }
+ var noAuth *shared.NoAuth
+ if r.Configuration.Indexing.Auth.NoAuth != nil {
+ noAuth = &shared.NoAuth{}
}
- if destinationMilvusUpdateIndexingAuthenticationNoAuth != nil {
- auth = shared.DestinationMilvusUpdateIndexingAuthentication{
- DestinationMilvusUpdateIndexingAuthenticationNoAuth: destinationMilvusUpdateIndexingAuthenticationNoAuth,
+ if noAuth != nil {
+ auth = shared.DestinationMilvusUpdateAuthentication{
+ NoAuth: noAuth,
}
}
collection := r.Configuration.Indexing.Collection.ValueString()
@@ -385,6 +441,15 @@ func (r *DestinationMilvusResourceModel) ToUpdateSDKType() *shared.DestinationMi
chunkOverlap = nil
}
chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64()
+ var fieldNameMappings []shared.FieldNameMappingConfigModel = nil
+ for _, fieldNameMappingsItem := range r.Configuration.Processing.FieldNameMappings {
+ fromField1 := fieldNameMappingsItem.FromField.ValueString()
+ toField := fieldNameMappingsItem.ToField.ValueString()
+ fieldNameMappings = append(fieldNameMappings, shared.FieldNameMappingConfigModel{
+ FromField: fromField1,
+ ToField: toField,
+ })
+ }
var metadataFields []string = nil
for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields {
metadataFields = append(metadataFields, metadataFieldsItem.ValueString())
@@ -393,11 +458,67 @@ func (r *DestinationMilvusResourceModel) ToUpdateSDKType() *shared.DestinationMi
for _, textFieldsItem := range r.Configuration.Processing.TextFields {
textFields = append(textFields, textFieldsItem.ValueString())
}
+ var textSplitter *shared.TextSplitter
+ if r.Configuration.Processing.TextSplitter != nil {
+ var bySeparator *shared.BySeparator
+ if r.Configuration.Processing.TextSplitter.BySeparator != nil {
+ keepSeparator := new(bool)
+ if !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsUnknown() && !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsNull() {
+ *keepSeparator = r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.ValueBool()
+ } else {
+ keepSeparator = nil
+ }
+ var separators []string = nil
+ for _, separatorsItem := range r.Configuration.Processing.TextSplitter.BySeparator.Separators {
+ separators = append(separators, separatorsItem.ValueString())
+ }
+ bySeparator = &shared.BySeparator{
+ KeepSeparator: keepSeparator,
+ Separators: separators,
+ }
+ }
+ if bySeparator != nil {
+ textSplitter = &shared.TextSplitter{
+ BySeparator: bySeparator,
+ }
+ }
+ var byMarkdownHeader *shared.ByMarkdownHeader
+ if r.Configuration.Processing.TextSplitter.ByMarkdownHeader != nil {
+ splitLevel := new(int64)
+ if !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsUnknown() && !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsNull() {
+ *splitLevel = r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.ValueInt64()
+ } else {
+ splitLevel = nil
+ }
+ byMarkdownHeader = &shared.ByMarkdownHeader{
+ SplitLevel: splitLevel,
+ }
+ }
+ if byMarkdownHeader != nil {
+ textSplitter = &shared.TextSplitter{
+ ByMarkdownHeader: byMarkdownHeader,
+ }
+ }
+ var byProgrammingLanguage *shared.ByProgrammingLanguage
+ if r.Configuration.Processing.TextSplitter.ByProgrammingLanguage != nil {
+ language := shared.DestinationMilvusUpdateLanguage(r.Configuration.Processing.TextSplitter.ByProgrammingLanguage.Language.ValueString())
+ byProgrammingLanguage = &shared.ByProgrammingLanguage{
+ Language: language,
+ }
+ }
+ if byProgrammingLanguage != nil {
+ textSplitter = &shared.TextSplitter{
+ ByProgrammingLanguage: byProgrammingLanguage,
+ }
+ }
+ }
processing := shared.DestinationMilvusUpdateProcessingConfigModel{
- ChunkOverlap: chunkOverlap,
- ChunkSize: chunkSize,
- MetadataFields: metadataFields,
- TextFields: textFields,
+ ChunkOverlap: chunkOverlap,
+ ChunkSize: chunkSize,
+ FieldNameMappings: fieldNameMappings,
+ MetadataFields: metadataFields,
+ TextFields: textFields,
+ TextSplitter: textSplitter,
}
configuration := shared.DestinationMilvusUpdate{
Embedding: embedding,
diff --git a/internal/provider/destination_mongodb_data_source.go b/internal/provider/destination_mongodb_data_source.go
old mode 100755
new mode 100644
index bd5f75f80..f7da434bc
--- a/internal/provider/destination_mongodb_data_source.go
+++ b/internal/provider/destination_mongodb_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationMongodbDataSource struct {
// DestinationMongodbDataSourceModel describes the data model.
type DestinationMongodbDataSourceModel struct {
- Configuration DestinationMongodb `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,420 +47,17 @@ func (r *DestinationMongodbDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "DestinationMongodb DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_mongodb_authorization_type_login_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "authorization": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "login/password",
- ),
- },
- Description: `must be one of ["login/password"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the database.`,
- },
- },
- Description: `Login/Password.`,
- },
- "destination_mongodb_authorization_type_none": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "authorization": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "none",
- ),
- },
- Description: `must be one of ["none"]`,
- },
- },
- Description: `None.`,
- },
- "destination_mongodb_update_authorization_type_login_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "authorization": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "login/password",
- ),
- },
- Description: `must be one of ["login/password"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the database.`,
- },
- },
- Description: `Login/Password.`,
- },
- "destination_mongodb_update_authorization_type_none": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "authorization": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "none",
- ),
- },
- Description: `must be one of ["none"]`,
- },
- },
- Description: `None.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Authorization type.`,
- },
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mongodb",
- ),
- },
- Description: `must be one of ["mongodb"]`,
- },
- "instance_type": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_mongodb_mongo_db_instance_type_mongo_db_atlas": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "cluster_url": schema.StringAttribute{
- Computed: true,
- Description: `URL of a cluster to connect to.`,
- },
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "atlas",
- ),
- },
- Description: `must be one of ["atlas"]`,
- },
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "destination_mongodb_mongo_db_instance_type_replica_set": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "replica",
- ),
- },
- Description: `must be one of ["replica"]`,
- },
- "replica_set": schema.StringAttribute{
- Computed: true,
- Description: `A replica set name.`,
- },
- "server_addresses": schema.StringAttribute{
- Computed: true,
- Description: `The members of a replica set. Please specify ` + "`" + `host` + "`" + `:` + "`" + `port` + "`" + ` of each member seperated by comma.`,
- },
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The Host of a Mongo database to be replicated.`,
- },
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "standalone",
- ),
- },
- Description: `must be one of ["standalone"]`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The Port of a Mongo database to be replicated.`,
- },
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "destination_mongodb_update_mongo_db_instance_type_mongo_db_atlas": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "cluster_url": schema.StringAttribute{
- Computed: true,
- Description: `URL of a cluster to connect to.`,
- },
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "atlas",
- ),
- },
- Description: `must be one of ["atlas"]`,
- },
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "destination_mongodb_update_mongo_db_instance_type_replica_set": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "replica",
- ),
- },
- Description: `must be one of ["replica"]`,
- },
- "replica_set": schema.StringAttribute{
- Computed: true,
- Description: `A replica set name.`,
- },
- "server_addresses": schema.StringAttribute{
- Computed: true,
- Description: `The members of a replica set. Please specify ` + "`" + `host` + "`" + `:` + "`" + `port` + "`" + ` of each member seperated by comma.`,
- },
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "destination_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The Host of a Mongo database to be replicated.`,
- },
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "standalone",
- ),
- },
- Description: `must be one of ["standalone"]`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The Port of a Mongo database to be replicated.`,
- },
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_mongodb_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mongodb_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mongodb_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mongodb_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mongodb_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mongodb_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_mongodb_data_source_sdk.go b/internal/provider/destination_mongodb_data_source_sdk.go
old mode 100755
new mode 100644
index 089f7b33f..73ae4197a
--- a/internal/provider/destination_mongodb_data_source_sdk.go
+++ b/internal/provider/destination_mongodb_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationMongodbDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_mongodb_resource.go b/internal/provider/destination_mongodb_resource.go
old mode 100755
new mode 100644
index 6d36bdf5e..fffd92a6d
--- a/internal/provider/destination_mongodb_resource.go
+++ b/internal/provider/destination_mongodb_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationMongodbResource struct {
// DestinationMongodbResourceModel describes the resource data model.
type DestinationMongodbResourceModel struct {
Configuration DestinationMongodb `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -57,58 +59,12 @@ func (r *DestinationMongodbResource) Schema(ctx context.Context, req resource.Sc
"auth_type": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_mongodb_authorization_type_login_password": schema.SingleNestedAttribute{
+ "login_password": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "authorization": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "login/password",
- ),
- },
- Description: `must be one of ["login/password"]`,
- },
- "password": schema.StringAttribute{
- Required: true,
- Description: `Password associated with the username.`,
- },
- "username": schema.StringAttribute{
- Required: true,
- Description: `Username to use to access the database.`,
- },
- },
- Description: `Login/Password.`,
- },
- "destination_mongodb_authorization_type_none": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "authorization": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "none",
- ),
- },
- Description: `must be one of ["none"]`,
- },
- },
- Description: `None.`,
- },
- "destination_mongodb_update_authorization_type_login_password": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "authorization": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "login/password",
- ),
- },
- Description: `must be one of ["login/password"]`,
- },
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"username": schema.StringAttribute{
@@ -118,44 +74,25 @@ func (r *DestinationMongodbResource) Schema(ctx context.Context, req resource.Sc
},
Description: `Login/Password.`,
},
- "destination_mongodb_update_authorization_type_none": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "authorization": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "none",
- ),
- },
- Description: `must be one of ["none"]`,
- },
- },
+ "none": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `None.`,
},
},
+ Description: `Authorization type.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Authorization type.`,
},
"database": schema.StringAttribute{
Required: true,
Description: `Name of the database.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mongodb",
- ),
- },
- Description: `must be one of ["mongodb"]`,
- },
"instance_type": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_mongodb_mongo_db_instance_type_mongo_db_atlas": schema.SingleNestedAttribute{
+ "mongo_db_atlas": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"cluster_url": schema.StringAttribute{
@@ -163,93 +100,28 @@ func (r *DestinationMongodbResource) Schema(ctx context.Context, req resource.Sc
Description: `URL of a cluster to connect to.`,
},
"instance": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "atlas",
- ),
- },
- Description: `must be one of ["atlas"]`,
- },
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "destination_mongodb_mongo_db_instance_type_replica_set": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "instance": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "replica",
- ),
- },
- Description: `must be one of ["replica"]`,
- },
- "replica_set": schema.StringAttribute{
Optional: true,
- Description: `A replica set name.`,
- },
- "server_addresses": schema.StringAttribute{
- Required: true,
- Description: `The members of a replica set. Please specify ` + "`" + `host` + "`" + `:` + "`" + `port` + "`" + ` of each member seperated by comma.`,
- },
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Required: true,
- Description: `The Host of a Mongo database to be replicated.`,
- },
- "instance": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "standalone",
- ),
- },
- Description: `must be one of ["standalone"]`,
- },
- "port": schema.Int64Attribute{
- Required: true,
- Description: `The Port of a Mongo database to be replicated.`,
- },
- },
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "destination_mongodb_update_mongo_db_instance_type_mongo_db_atlas": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "cluster_url": schema.StringAttribute{
- Required: true,
- Description: `URL of a cluster to connect to.`,
- },
- "instance": schema.StringAttribute{
- Required: true,
+ Description: `must be one of ["atlas"]; Default: "atlas"`,
Validators: []validator.String{
stringvalidator.OneOf(
"atlas",
),
},
- Description: `must be one of ["atlas"]`,
},
},
Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
},
- "destination_mongodb_update_mongo_db_instance_type_replica_set": schema.SingleNestedAttribute{
+ "replica_set": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"instance": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["replica"]; Default: "replica"`,
Validators: []validator.String{
stringvalidator.OneOf(
"replica",
),
},
- Description: `must be one of ["replica"]`,
},
"replica_set": schema.StringAttribute{
Optional: true,
@@ -262,7 +134,7 @@ func (r *DestinationMongodbResource) Schema(ctx context.Context, req resource.Sc
},
Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
},
- "destination_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance": schema.SingleNestedAttribute{
+ "standalone_mongo_db_instance": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"host": schema.StringAttribute{
@@ -270,66 +142,47 @@ func (r *DestinationMongodbResource) Schema(ctx context.Context, req resource.Sc
Description: `The Host of a Mongo database to be replicated.`,
},
"instance": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["standalone"]; Default: "standalone"`,
Validators: []validator.String{
stringvalidator.OneOf(
"standalone",
),
},
- Description: `must be one of ["standalone"]`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `The Port of a Mongo database to be replicated.`,
+ Optional: true,
+ MarkdownDescription: `Default: 27017` + "\n" +
+ `The Port of a Mongo database to be replicated.`,
},
},
Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
},
},
+ Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_mongodb_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_mongodb_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -337,115 +190,28 @@ func (r *DestinationMongodbResource) Schema(ctx context.Context, req resource.Sc
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_mongodb_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mongodb_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mongodb_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mongodb_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -455,13 +221,20 @@ func (r *DestinationMongodbResource) Schema(ctx context.Context, req resource.Sc
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -478,7 +251,8 @@ func (r *DestinationMongodbResource) Schema(ctx context.Context, req resource.Sc
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -528,7 +302,7 @@ func (r *DestinationMongodbResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationMongodb(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -704,5 +478,5 @@ func (r *DestinationMongodbResource) Delete(ctx context.Context, req resource.De
}
func (r *DestinationMongodbResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_mongodb_resource_sdk.go b/internal/provider/destination_mongodb_resource_sdk.go
old mode 100755
new mode 100644
index 717933b7e..6937033a1
--- a/internal/provider/destination_mongodb_resource_sdk.go
+++ b/internal/provider/destination_mongodb_resource_sdk.go
@@ -3,162 +3,185 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationMongodbResourceModel) ToCreateSDKType() *shared.DestinationMongodbCreateRequest {
var authType shared.DestinationMongodbAuthorizationType
- var destinationMongodbAuthorizationTypeNone *shared.DestinationMongodbAuthorizationTypeNone
- if r.Configuration.AuthType.DestinationMongodbAuthorizationTypeNone != nil {
- authorization := shared.DestinationMongodbAuthorizationTypeNoneAuthorization(r.Configuration.AuthType.DestinationMongodbAuthorizationTypeNone.Authorization.ValueString())
- destinationMongodbAuthorizationTypeNone = &shared.DestinationMongodbAuthorizationTypeNone{
- Authorization: authorization,
- }
+ var destinationMongodbNone *shared.DestinationMongodbNone
+ if r.Configuration.AuthType.None != nil {
+ destinationMongodbNone = &shared.DestinationMongodbNone{}
}
- if destinationMongodbAuthorizationTypeNone != nil {
+ if destinationMongodbNone != nil {
authType = shared.DestinationMongodbAuthorizationType{
- DestinationMongodbAuthorizationTypeNone: destinationMongodbAuthorizationTypeNone,
+ DestinationMongodbNone: destinationMongodbNone,
}
}
- var destinationMongodbAuthorizationTypeLoginPassword *shared.DestinationMongodbAuthorizationTypeLoginPassword
- if r.Configuration.AuthType.DestinationMongodbAuthorizationTypeLoginPassword != nil {
- authorization1 := shared.DestinationMongodbAuthorizationTypeLoginPasswordAuthorization(r.Configuration.AuthType.DestinationMongodbAuthorizationTypeLoginPassword.Authorization.ValueString())
- password := r.Configuration.AuthType.DestinationMongodbAuthorizationTypeLoginPassword.Password.ValueString()
- username := r.Configuration.AuthType.DestinationMongodbAuthorizationTypeLoginPassword.Username.ValueString()
- destinationMongodbAuthorizationTypeLoginPassword = &shared.DestinationMongodbAuthorizationTypeLoginPassword{
- Authorization: authorization1,
- Password: password,
- Username: username,
+ var destinationMongodbLoginPassword *shared.DestinationMongodbLoginPassword
+ if r.Configuration.AuthType.LoginPassword != nil {
+ password := r.Configuration.AuthType.LoginPassword.Password.ValueString()
+ username := r.Configuration.AuthType.LoginPassword.Username.ValueString()
+ destinationMongodbLoginPassword = &shared.DestinationMongodbLoginPassword{
+ Password: password,
+ Username: username,
}
}
- if destinationMongodbAuthorizationTypeLoginPassword != nil {
+ if destinationMongodbLoginPassword != nil {
authType = shared.DestinationMongodbAuthorizationType{
- DestinationMongodbAuthorizationTypeLoginPassword: destinationMongodbAuthorizationTypeLoginPassword,
+ DestinationMongodbLoginPassword: destinationMongodbLoginPassword,
}
}
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationMongodbMongodb(r.Configuration.DestinationType.ValueString())
var instanceType *shared.DestinationMongodbMongoDbInstanceType
if r.Configuration.InstanceType != nil {
- var destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance *shared.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
- if r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- host := r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance.Host.ValueString()
- instance := shared.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance(r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance.Instance.ValueString())
- port := r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance.Port.ValueInt64()
- destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance = &shared.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance{
+ var destinationMongodbStandaloneMongoDbInstance *shared.DestinationMongodbStandaloneMongoDbInstance
+ if r.Configuration.InstanceType.StandaloneMongoDbInstance != nil {
+ host := r.Configuration.InstanceType.StandaloneMongoDbInstance.Host.ValueString()
+ instance := new(shared.DestinationMongodbInstance)
+ if !r.Configuration.InstanceType.StandaloneMongoDbInstance.Instance.IsUnknown() && !r.Configuration.InstanceType.StandaloneMongoDbInstance.Instance.IsNull() {
+ *instance = shared.DestinationMongodbInstance(r.Configuration.InstanceType.StandaloneMongoDbInstance.Instance.ValueString())
+ } else {
+ instance = nil
+ }
+ port := new(int64)
+ if !r.Configuration.InstanceType.StandaloneMongoDbInstance.Port.IsUnknown() && !r.Configuration.InstanceType.StandaloneMongoDbInstance.Port.IsNull() {
+ *port = r.Configuration.InstanceType.StandaloneMongoDbInstance.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ destinationMongodbStandaloneMongoDbInstance = &shared.DestinationMongodbStandaloneMongoDbInstance{
Host: host,
Instance: instance,
Port: port,
}
}
- if destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
+ if destinationMongodbStandaloneMongoDbInstance != nil {
instanceType = &shared.DestinationMongodbMongoDbInstanceType{
- DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance: destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance,
+ DestinationMongodbStandaloneMongoDbInstance: destinationMongodbStandaloneMongoDbInstance,
}
}
- var destinationMongodbMongoDbInstanceTypeReplicaSet *shared.DestinationMongodbMongoDbInstanceTypeReplicaSet
- if r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeReplicaSet != nil {
- instance1 := shared.DestinationMongodbMongoDbInstanceTypeReplicaSetInstance(r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeReplicaSet.Instance.ValueString())
+ var destinationMongodbReplicaSet *shared.DestinationMongodbReplicaSet
+ if r.Configuration.InstanceType.ReplicaSet != nil {
+ instance1 := new(shared.DestinationMongodbSchemasInstance)
+ if !r.Configuration.InstanceType.ReplicaSet.Instance.IsUnknown() && !r.Configuration.InstanceType.ReplicaSet.Instance.IsNull() {
+ *instance1 = shared.DestinationMongodbSchemasInstance(r.Configuration.InstanceType.ReplicaSet.Instance.ValueString())
+ } else {
+ instance1 = nil
+ }
replicaSet := new(string)
- if !r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeReplicaSet.ReplicaSet.IsUnknown() && !r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeReplicaSet.ReplicaSet.IsNull() {
- *replicaSet = r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeReplicaSet.ReplicaSet.ValueString()
+ if !r.Configuration.InstanceType.ReplicaSet.ReplicaSet.IsUnknown() && !r.Configuration.InstanceType.ReplicaSet.ReplicaSet.IsNull() {
+ *replicaSet = r.Configuration.InstanceType.ReplicaSet.ReplicaSet.ValueString()
} else {
replicaSet = nil
}
- serverAddresses := r.Configuration.InstanceType.DestinationMongodbMongoDbInstanceTypeReplicaSet.ServerAddresses.ValueString()
- destinationMongodbMongoDbInstanceTypeReplicaSet = &shared.DestinationMongodbMongoDbInstanceTypeReplicaSet{
+ serverAddresses := r.Configuration.InstanceType.ReplicaSet.ServerAddresses.ValueString()
+ destinationMongodbReplicaSet = &shared.DestinationMongodbReplicaSet{
Instance: instance1,
ReplicaSet: replicaSet,
ServerAddresses: serverAddresses,
}
}
- if destinationMongodbMongoDbInstanceTypeReplicaSet != nil {
+ if destinationMongodbReplicaSet != nil {
instanceType = &shared.DestinationMongodbMongoDbInstanceType{
- DestinationMongodbMongoDbInstanceTypeReplicaSet: destinationMongodbMongoDbInstanceTypeReplicaSet,
+ DestinationMongodbReplicaSet: destinationMongodbReplicaSet,
}
}
- var destinationMongodbMongoDBInstanceTypeMongoDBAtlas *shared.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas
- if r.Configuration.InstanceType.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas != nil {
- clusterURL := r.Configuration.InstanceType.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas.ClusterURL.ValueString()
- instance2 := shared.DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstance(r.Configuration.InstanceType.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas.Instance.ValueString())
- destinationMongodbMongoDBInstanceTypeMongoDBAtlas = &shared.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas{
+ var destinationMongodbMongoDBAtlas *shared.DestinationMongodbMongoDBAtlas
+ if r.Configuration.InstanceType.MongoDBAtlas != nil {
+ clusterURL := r.Configuration.InstanceType.MongoDBAtlas.ClusterURL.ValueString()
+ instance2 := new(shared.DestinationMongodbSchemasInstanceTypeInstance)
+ if !r.Configuration.InstanceType.MongoDBAtlas.Instance.IsUnknown() && !r.Configuration.InstanceType.MongoDBAtlas.Instance.IsNull() {
+ *instance2 = shared.DestinationMongodbSchemasInstanceTypeInstance(r.Configuration.InstanceType.MongoDBAtlas.Instance.ValueString())
+ } else {
+ instance2 = nil
+ }
+ destinationMongodbMongoDBAtlas = &shared.DestinationMongodbMongoDBAtlas{
ClusterURL: clusterURL,
Instance: instance2,
}
}
- if destinationMongodbMongoDBInstanceTypeMongoDBAtlas != nil {
+ if destinationMongodbMongoDBAtlas != nil {
instanceType = &shared.DestinationMongodbMongoDbInstanceType{
- DestinationMongodbMongoDBInstanceTypeMongoDBAtlas: destinationMongodbMongoDBInstanceTypeMongoDBAtlas,
+ DestinationMongodbMongoDBAtlas: destinationMongodbMongoDBAtlas,
}
}
}
var tunnelMethod *shared.DestinationMongodbSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationMongodbSSHTunnelMethodNoTunnel *shared.DestinationMongodbSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationMongodbSSHTunnelMethodNoTunnel = &shared.DestinationMongodbSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationMongodbNoTunnel *shared.DestinationMongodbNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationMongodbNoTunnel = &shared.DestinationMongodbNoTunnel{}
}
- if destinationMongodbSSHTunnelMethodNoTunnel != nil {
+ if destinationMongodbNoTunnel != nil {
tunnelMethod = &shared.DestinationMongodbSSHTunnelMethod{
- DestinationMongodbSSHTunnelMethodNoTunnel: destinationMongodbSSHTunnelMethodNoTunnel,
+ DestinationMongodbNoTunnel: destinationMongodbNoTunnel,
}
}
- var destinationMongodbSSHTunnelMethodSSHKeyAuthentication *shared.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationMongodbSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationMongodbSSHKeyAuthentication *shared.DestinationMongodbSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationMongodbSSHKeyAuthentication = &shared.DestinationMongodbSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationMongodbSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationMongodbSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationMongodbSSHTunnelMethod{
- DestinationMongodbSSHTunnelMethodSSHKeyAuthentication: destinationMongodbSSHTunnelMethodSSHKeyAuthentication,
+ DestinationMongodbSSHKeyAuthentication: destinationMongodbSSHKeyAuthentication,
}
}
- var destinationMongodbSSHTunnelMethodPasswordAuthentication *shared.DestinationMongodbSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationMongodbSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationMongodbSSHTunnelMethodPasswordAuthentication = &shared.DestinationMongodbSSHTunnelMethodPasswordAuthentication{
+ var destinationMongodbPasswordAuthentication *shared.DestinationMongodbPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationMongodbPasswordAuthentication = &shared.DestinationMongodbPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationMongodbSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationMongodbPasswordAuthentication != nil {
tunnelMethod = &shared.DestinationMongodbSSHTunnelMethod{
- DestinationMongodbSSHTunnelMethodPasswordAuthentication: destinationMongodbSSHTunnelMethodPasswordAuthentication,
+ DestinationMongodbPasswordAuthentication: destinationMongodbPasswordAuthentication,
}
}
}
configuration := shared.DestinationMongodb{
- AuthType: authType,
- Database: database,
- DestinationType: destinationType,
- InstanceType: instanceType,
- TunnelMethod: tunnelMethod,
+ AuthType: authType,
+ Database: database,
+ InstanceType: instanceType,
+ TunnelMethod: tunnelMethod,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationMongodbCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -171,142 +194,160 @@ func (r *DestinationMongodbResourceModel) ToGetSDKType() *shared.DestinationMong
}
func (r *DestinationMongodbResourceModel) ToUpdateSDKType() *shared.DestinationMongodbPutRequest {
- var authType shared.DestinationMongodbUpdateAuthorizationType
- var destinationMongodbUpdateAuthorizationTypeNone *shared.DestinationMongodbUpdateAuthorizationTypeNone
- if r.Configuration.AuthType.DestinationMongodbUpdateAuthorizationTypeNone != nil {
- authorization := shared.DestinationMongodbUpdateAuthorizationTypeNoneAuthorization(r.Configuration.AuthType.DestinationMongodbUpdateAuthorizationTypeNone.Authorization.ValueString())
- destinationMongodbUpdateAuthorizationTypeNone = &shared.DestinationMongodbUpdateAuthorizationTypeNone{
- Authorization: authorization,
- }
+ var authType shared.AuthorizationType
+ var none *shared.None
+ if r.Configuration.AuthType.None != nil {
+ none = &shared.None{}
}
- if destinationMongodbUpdateAuthorizationTypeNone != nil {
- authType = shared.DestinationMongodbUpdateAuthorizationType{
- DestinationMongodbUpdateAuthorizationTypeNone: destinationMongodbUpdateAuthorizationTypeNone,
+ if none != nil {
+ authType = shared.AuthorizationType{
+ None: none,
}
}
- var destinationMongodbUpdateAuthorizationTypeLoginPassword *shared.DestinationMongodbUpdateAuthorizationTypeLoginPassword
- if r.Configuration.AuthType.DestinationMongodbUpdateAuthorizationTypeLoginPassword != nil {
- authorization1 := shared.DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorization(r.Configuration.AuthType.DestinationMongodbUpdateAuthorizationTypeLoginPassword.Authorization.ValueString())
- password := r.Configuration.AuthType.DestinationMongodbUpdateAuthorizationTypeLoginPassword.Password.ValueString()
- username := r.Configuration.AuthType.DestinationMongodbUpdateAuthorizationTypeLoginPassword.Username.ValueString()
- destinationMongodbUpdateAuthorizationTypeLoginPassword = &shared.DestinationMongodbUpdateAuthorizationTypeLoginPassword{
- Authorization: authorization1,
- Password: password,
- Username: username,
+ var loginPassword *shared.LoginPassword
+ if r.Configuration.AuthType.LoginPassword != nil {
+ password := r.Configuration.AuthType.LoginPassword.Password.ValueString()
+ username := r.Configuration.AuthType.LoginPassword.Username.ValueString()
+ loginPassword = &shared.LoginPassword{
+ Password: password,
+ Username: username,
}
}
- if destinationMongodbUpdateAuthorizationTypeLoginPassword != nil {
- authType = shared.DestinationMongodbUpdateAuthorizationType{
- DestinationMongodbUpdateAuthorizationTypeLoginPassword: destinationMongodbUpdateAuthorizationTypeLoginPassword,
+ if loginPassword != nil {
+ authType = shared.AuthorizationType{
+ LoginPassword: loginPassword,
}
}
database := r.Configuration.Database.ValueString()
- var instanceType *shared.DestinationMongodbUpdateMongoDbInstanceType
+ var instanceType *shared.MongoDbInstanceType
if r.Configuration.InstanceType != nil {
- var destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance *shared.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
- if r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- host := r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance.Host.ValueString()
- instance := shared.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance(r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance.Instance.ValueString())
- port := r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance.Port.ValueInt64()
- destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance = &shared.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance{
+ var standaloneMongoDbInstance *shared.StandaloneMongoDbInstance
+ if r.Configuration.InstanceType.StandaloneMongoDbInstance != nil {
+ host := r.Configuration.InstanceType.StandaloneMongoDbInstance.Host.ValueString()
+ instance := new(shared.Instance)
+ if !r.Configuration.InstanceType.StandaloneMongoDbInstance.Instance.IsUnknown() && !r.Configuration.InstanceType.StandaloneMongoDbInstance.Instance.IsNull() {
+ *instance = shared.Instance(r.Configuration.InstanceType.StandaloneMongoDbInstance.Instance.ValueString())
+ } else {
+ instance = nil
+ }
+ port := new(int64)
+ if !r.Configuration.InstanceType.StandaloneMongoDbInstance.Port.IsUnknown() && !r.Configuration.InstanceType.StandaloneMongoDbInstance.Port.IsNull() {
+ *port = r.Configuration.InstanceType.StandaloneMongoDbInstance.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ standaloneMongoDbInstance = &shared.StandaloneMongoDbInstance{
Host: host,
Instance: instance,
Port: port,
}
}
- if destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- instanceType = &shared.DestinationMongodbUpdateMongoDbInstanceType{
- DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance: destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance,
+ if standaloneMongoDbInstance != nil {
+ instanceType = &shared.MongoDbInstanceType{
+ StandaloneMongoDbInstance: standaloneMongoDbInstance,
}
}
- var destinationMongodbUpdateMongoDbInstanceTypeReplicaSet *shared.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet
- if r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet != nil {
- instance1 := shared.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstance(r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet.Instance.ValueString())
- replicaSet := new(string)
- if !r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet.ReplicaSet.IsUnknown() && !r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet.ReplicaSet.IsNull() {
- *replicaSet = r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet.ReplicaSet.ValueString()
+ var replicaSet *shared.ReplicaSet
+ if r.Configuration.InstanceType.ReplicaSet != nil {
+ instance1 := new(shared.DestinationMongodbUpdateInstance)
+ if !r.Configuration.InstanceType.ReplicaSet.Instance.IsUnknown() && !r.Configuration.InstanceType.ReplicaSet.Instance.IsNull() {
+ *instance1 = shared.DestinationMongodbUpdateInstance(r.Configuration.InstanceType.ReplicaSet.Instance.ValueString())
} else {
- replicaSet = nil
+ instance1 = nil
+ }
+ replicaSet1 := new(string)
+ if !r.Configuration.InstanceType.ReplicaSet.ReplicaSet.IsUnknown() && !r.Configuration.InstanceType.ReplicaSet.ReplicaSet.IsNull() {
+ *replicaSet1 = r.Configuration.InstanceType.ReplicaSet.ReplicaSet.ValueString()
+ } else {
+ replicaSet1 = nil
}
- serverAddresses := r.Configuration.InstanceType.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet.ServerAddresses.ValueString()
- destinationMongodbUpdateMongoDbInstanceTypeReplicaSet = &shared.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet{
+ serverAddresses := r.Configuration.InstanceType.ReplicaSet.ServerAddresses.ValueString()
+ replicaSet = &shared.ReplicaSet{
Instance: instance1,
- ReplicaSet: replicaSet,
+ ReplicaSet: replicaSet1,
ServerAddresses: serverAddresses,
}
}
- if destinationMongodbUpdateMongoDbInstanceTypeReplicaSet != nil {
- instanceType = &shared.DestinationMongodbUpdateMongoDbInstanceType{
- DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet: destinationMongodbUpdateMongoDbInstanceTypeReplicaSet,
+ if replicaSet != nil {
+ instanceType = &shared.MongoDbInstanceType{
+ ReplicaSet: replicaSet,
}
}
- var destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas *shared.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
- if r.Configuration.InstanceType.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil {
- clusterURL := r.Configuration.InstanceType.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas.ClusterURL.ValueString()
- instance2 := shared.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance(r.Configuration.InstanceType.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas.Instance.ValueString())
- destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas = &shared.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas{
+ var mongoDBAtlas *shared.MongoDBAtlas
+ if r.Configuration.InstanceType.MongoDBAtlas != nil {
+ clusterURL := r.Configuration.InstanceType.MongoDBAtlas.ClusterURL.ValueString()
+ instance2 := new(shared.DestinationMongodbUpdateSchemasInstance)
+ if !r.Configuration.InstanceType.MongoDBAtlas.Instance.IsUnknown() && !r.Configuration.InstanceType.MongoDBAtlas.Instance.IsNull() {
+ *instance2 = shared.DestinationMongodbUpdateSchemasInstance(r.Configuration.InstanceType.MongoDBAtlas.Instance.ValueString())
+ } else {
+ instance2 = nil
+ }
+ mongoDBAtlas = &shared.MongoDBAtlas{
ClusterURL: clusterURL,
Instance: instance2,
}
}
- if destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil {
- instanceType = &shared.DestinationMongodbUpdateMongoDbInstanceType{
- DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas: destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas,
+ if mongoDBAtlas != nil {
+ instanceType = &shared.MongoDbInstanceType{
+ MongoDBAtlas: mongoDBAtlas,
}
}
}
var tunnelMethod *shared.DestinationMongodbUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationMongodbUpdateSSHTunnelMethodNoTunnel *shared.DestinationMongodbUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationMongodbUpdateSSHTunnelMethodNoTunnel = &shared.DestinationMongodbUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationMongodbUpdateNoTunnel *shared.DestinationMongodbUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationMongodbUpdateNoTunnel = &shared.DestinationMongodbUpdateNoTunnel{}
}
- if destinationMongodbUpdateSSHTunnelMethodNoTunnel != nil {
+ if destinationMongodbUpdateNoTunnel != nil {
tunnelMethod = &shared.DestinationMongodbUpdateSSHTunnelMethod{
- DestinationMongodbUpdateSSHTunnelMethodNoTunnel: destinationMongodbUpdateSSHTunnelMethodNoTunnel,
+ DestinationMongodbUpdateNoTunnel: destinationMongodbUpdateNoTunnel,
}
}
- var destinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication *shared.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationMongodbUpdateSSHKeyAuthentication *shared.DestinationMongodbUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationMongodbUpdateSSHKeyAuthentication = &shared.DestinationMongodbUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationMongodbUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationMongodbUpdateSSHTunnelMethod{
- DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication: destinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationMongodbUpdateSSHKeyAuthentication: destinationMongodbUpdateSSHKeyAuthentication,
}
}
- var destinationMongodbUpdateSSHTunnelMethodPasswordAuthentication *shared.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationMongodbUpdateSSHTunnelMethodPasswordAuthentication = &shared.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication{
+ var destinationMongodbUpdatePasswordAuthentication *shared.DestinationMongodbUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationMongodbUpdatePasswordAuthentication = &shared.DestinationMongodbUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationMongodbUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationMongodbUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationMongodbUpdateSSHTunnelMethod{
- DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication: destinationMongodbUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationMongodbUpdatePasswordAuthentication: destinationMongodbUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/destination_mssql_data_source.go b/internal/provider/destination_mssql_data_source.go
old mode 100755
new mode 100644
index f6bf10569..b1f1f8171
--- a/internal/provider/destination_mssql_data_source.go
+++ b/internal/provider/destination_mssql_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationMssqlDataSource struct {
// DestinationMssqlDataSourceModel describes the data model.
type DestinationMssqlDataSourceModel struct {
- Configuration DestinationMssql `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,297 +47,17 @@ func (r *DestinationMssqlDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "DestinationMssql DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `The name of the MSSQL database.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mssql",
- ),
- },
- Description: `must be one of ["mssql"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The host name of the MSSQL database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with this username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The port of the MSSQL database.`,
- },
- "schema": schema.StringAttribute{
- Computed: true,
- Description: `The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".`,
- },
- "ssl_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_mssql_ssl_method_encrypted_trust_server_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssl_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_trust_server_certificate",
- ),
- },
- Description: `must be one of ["encrypted_trust_server_certificate"]`,
- },
- },
- Description: `Use the certificate provided by the server without verification. (For testing purposes only!)`,
- },
- "destination_mssql_ssl_method_encrypted_verify_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host_name_in_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Specifies the host name of the server. The value of this property must match the subject property of the certificate.`,
- },
- "ssl_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
- },
- Description: `Verify and use the certificate provided by the server.`,
- },
- "destination_mssql_update_ssl_method_encrypted_trust_server_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssl_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_trust_server_certificate",
- ),
- },
- Description: `must be one of ["encrypted_trust_server_certificate"]`,
- },
- },
- Description: `Use the certificate provided by the server without verification. (For testing purposes only!)`,
- },
- "destination_mssql_update_ssl_method_encrypted_verify_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host_name_in_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Specifies the host name of the server. The value of this property must match the subject property of the certificate.`,
- },
- "ssl_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
- },
- Description: `Verify and use the certificate provided by the server.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The encryption method which is used to communicate with the database.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_mssql_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mssql_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mssql_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mssql_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mssql_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mssql_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `The username which is used to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_mssql_data_source_sdk.go b/internal/provider/destination_mssql_data_source_sdk.go
old mode 100755
new mode 100644
index c28610d7a..e700e6b31
--- a/internal/provider/destination_mssql_data_source_sdk.go
+++ b/internal/provider/destination_mssql_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationMssqlDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_mssql_resource.go b/internal/provider/destination_mssql_resource.go
old mode 100755
new mode 100644
index f7c626167..d8432cae3
--- a/internal/provider/destination_mssql_resource.go
+++ b/internal/provider/destination_mssql_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationMssqlResource struct {
// DestinationMssqlResourceModel describes the resource data model.
type DestinationMssqlResourceModel struct {
Configuration DestinationMssql `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -58,15 +59,6 @@ func (r *DestinationMssqlResource) Schema(ctx context.Context, req resource.Sche
Required: true,
Description: `The name of the MSSQL database.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mssql",
- ),
- },
- Description: `must be one of ["mssql"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `The host name of the MSSQL database.`,
@@ -77,132 +69,62 @@ func (r *DestinationMssqlResource) Schema(ctx context.Context, req resource.Sche
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The password associated with this username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `The port of the MSSQL database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 1433` + "\n" +
+ `The port of the MSSQL database.`,
},
"schema": schema.StringAttribute{
- Required: true,
- Description: `The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".`,
+ Optional: true,
+ MarkdownDescription: `Default: "public"` + "\n" +
+ `The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".`,
},
"ssl_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_mssql_ssl_method_encrypted_trust_server_certificate": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssl_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_trust_server_certificate",
- ),
- },
- Description: `must be one of ["encrypted_trust_server_certificate"]`,
- },
- },
- Description: `Use the certificate provided by the server without verification. (For testing purposes only!)`,
- },
- "destination_mssql_ssl_method_encrypted_verify_certificate": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host_name_in_certificate": schema.StringAttribute{
- Optional: true,
- Description: `Specifies the host name of the server. The value of this property must match the subject property of the certificate.`,
- },
- "ssl_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
- },
- Description: `Verify and use the certificate provided by the server.`,
- },
- "destination_mssql_update_ssl_method_encrypted_trust_server_certificate": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssl_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_trust_server_certificate",
- ),
- },
- Description: `must be one of ["encrypted_trust_server_certificate"]`,
- },
- },
+ "encrypted_trust_server_certificate": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Use the certificate provided by the server without verification. (For testing purposes only!)`,
},
- "destination_mssql_update_ssl_method_encrypted_verify_certificate": schema.SingleNestedAttribute{
+ "encrypted_verify_certificate": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"host_name_in_certificate": schema.StringAttribute{
Optional: true,
Description: `Specifies the host name of the server. The value of this property must match the subject property of the certificate.`,
},
- "ssl_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
},
Description: `Verify and use the certificate provided by the server.`,
},
},
+ Description: `The encryption method which is used to communicate with the database.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The encryption method which is used to communicate with the database.`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_mssql_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_mssql_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -210,115 +132,28 @@ func (r *DestinationMssqlResource) Schema(ctx context.Context, req resource.Sche
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_mssql_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mssql_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mssql_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mssql_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -328,10 +163,10 @@ func (r *DestinationMssqlResource) Schema(ctx context.Context, req resource.Sche
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -339,6 +174,13 @@ func (r *DestinationMssqlResource) Schema(ctx context.Context, req resource.Sche
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -355,7 +197,8 @@ func (r *DestinationMssqlResource) Schema(ctx context.Context, req resource.Sche
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -405,7 +248,7 @@ func (r *DestinationMssqlResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationMssql(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -581,5 +424,5 @@ func (r *DestinationMssqlResource) Delete(ctx context.Context, req resource.Dele
}
func (r *DestinationMssqlResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_mssql_resource_sdk.go b/internal/provider/destination_mssql_resource_sdk.go
old mode 100755
new mode 100644
index c34683962..3c69f1c2a
--- a/internal/provider/destination_mssql_resource_sdk.go
+++ b/internal/provider/destination_mssql_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationMssqlResourceModel) ToCreateSDKType() *shared.DestinationMssqlCreateRequest {
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationMssqlMssql(r.Configuration.DestinationType.ValueString())
host := r.Configuration.Host.ValueString()
jdbcURLParams := new(string)
if !r.Configuration.JdbcURLParams.IsUnknown() && !r.Configuration.JdbcURLParams.IsNull() {
@@ -23,114 +22,128 @@ func (r *DestinationMssqlResourceModel) ToCreateSDKType() *shared.DestinationMss
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
- schema := r.Configuration.Schema.ValueString()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ schema := new(string)
+ if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
+ *schema = r.Configuration.Schema.ValueString()
+ } else {
+ schema = nil
+ }
var sslMethod *shared.DestinationMssqlSSLMethod
if r.Configuration.SslMethod != nil {
- var destinationMssqlSSLMethodEncryptedTrustServerCertificate *shared.DestinationMssqlSSLMethodEncryptedTrustServerCertificate
- if r.Configuration.SslMethod.DestinationMssqlSSLMethodEncryptedTrustServerCertificate != nil {
- sslMethod1 := shared.DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod(r.Configuration.SslMethod.DestinationMssqlSSLMethodEncryptedTrustServerCertificate.SslMethod.ValueString())
- destinationMssqlSSLMethodEncryptedTrustServerCertificate = &shared.DestinationMssqlSSLMethodEncryptedTrustServerCertificate{
- SslMethod: sslMethod1,
- }
+ var destinationMssqlEncryptedTrustServerCertificate *shared.DestinationMssqlEncryptedTrustServerCertificate
+ if r.Configuration.SslMethod.EncryptedTrustServerCertificate != nil {
+ destinationMssqlEncryptedTrustServerCertificate = &shared.DestinationMssqlEncryptedTrustServerCertificate{}
}
- if destinationMssqlSSLMethodEncryptedTrustServerCertificate != nil {
+ if destinationMssqlEncryptedTrustServerCertificate != nil {
sslMethod = &shared.DestinationMssqlSSLMethod{
- DestinationMssqlSSLMethodEncryptedTrustServerCertificate: destinationMssqlSSLMethodEncryptedTrustServerCertificate,
+ DestinationMssqlEncryptedTrustServerCertificate: destinationMssqlEncryptedTrustServerCertificate,
}
}
- var destinationMssqlSSLMethodEncryptedVerifyCertificate *shared.DestinationMssqlSSLMethodEncryptedVerifyCertificate
- if r.Configuration.SslMethod.DestinationMssqlSSLMethodEncryptedVerifyCertificate != nil {
+ var destinationMssqlEncryptedVerifyCertificate *shared.DestinationMssqlEncryptedVerifyCertificate
+ if r.Configuration.SslMethod.EncryptedVerifyCertificate != nil {
hostNameInCertificate := new(string)
- if !r.Configuration.SslMethod.DestinationMssqlSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.IsUnknown() && !r.Configuration.SslMethod.DestinationMssqlSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.IsNull() {
- *hostNameInCertificate = r.Configuration.SslMethod.DestinationMssqlSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.ValueString()
+ if !r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.IsUnknown() && !r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.IsNull() {
+ *hostNameInCertificate = r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.ValueString()
} else {
hostNameInCertificate = nil
}
- sslMethod2 := shared.DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethod(r.Configuration.SslMethod.DestinationMssqlSSLMethodEncryptedVerifyCertificate.SslMethod.ValueString())
- destinationMssqlSSLMethodEncryptedVerifyCertificate = &shared.DestinationMssqlSSLMethodEncryptedVerifyCertificate{
+ destinationMssqlEncryptedVerifyCertificate = &shared.DestinationMssqlEncryptedVerifyCertificate{
HostNameInCertificate: hostNameInCertificate,
- SslMethod: sslMethod2,
}
}
- if destinationMssqlSSLMethodEncryptedVerifyCertificate != nil {
+ if destinationMssqlEncryptedVerifyCertificate != nil {
sslMethod = &shared.DestinationMssqlSSLMethod{
- DestinationMssqlSSLMethodEncryptedVerifyCertificate: destinationMssqlSSLMethodEncryptedVerifyCertificate,
+ DestinationMssqlEncryptedVerifyCertificate: destinationMssqlEncryptedVerifyCertificate,
}
}
}
var tunnelMethod *shared.DestinationMssqlSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationMssqlSSHTunnelMethodNoTunnel *shared.DestinationMssqlSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationMssqlSSHTunnelMethodNoTunnel = &shared.DestinationMssqlSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationMssqlNoTunnel *shared.DestinationMssqlNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationMssqlNoTunnel = &shared.DestinationMssqlNoTunnel{}
}
- if destinationMssqlSSHTunnelMethodNoTunnel != nil {
+ if destinationMssqlNoTunnel != nil {
tunnelMethod = &shared.DestinationMssqlSSHTunnelMethod{
- DestinationMssqlSSHTunnelMethodNoTunnel: destinationMssqlSSHTunnelMethodNoTunnel,
+ DestinationMssqlNoTunnel: destinationMssqlNoTunnel,
}
}
- var destinationMssqlSSHTunnelMethodSSHKeyAuthentication *shared.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationMssqlSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationMssqlSSHKeyAuthentication *shared.DestinationMssqlSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationMssqlSSHKeyAuthentication = &shared.DestinationMssqlSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationMssqlSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationMssqlSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationMssqlSSHTunnelMethod{
- DestinationMssqlSSHTunnelMethodSSHKeyAuthentication: destinationMssqlSSHTunnelMethodSSHKeyAuthentication,
+ DestinationMssqlSSHKeyAuthentication: destinationMssqlSSHKeyAuthentication,
}
}
- var destinationMssqlSSHTunnelMethodPasswordAuthentication *shared.DestinationMssqlSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationMssqlSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationMssqlSSHTunnelMethodPasswordAuthentication = &shared.DestinationMssqlSSHTunnelMethodPasswordAuthentication{
+ var destinationMssqlPasswordAuthentication *shared.DestinationMssqlPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationMssqlPasswordAuthentication = &shared.DestinationMssqlPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationMssqlSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationMssqlPasswordAuthentication != nil {
tunnelMethod = &shared.DestinationMssqlSSHTunnelMethod{
- DestinationMssqlSSHTunnelMethodPasswordAuthentication: destinationMssqlSSHTunnelMethodPasswordAuthentication,
+ DestinationMssqlPasswordAuthentication: destinationMssqlPasswordAuthentication,
}
}
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationMssql{
- Database: database,
- DestinationType: destinationType,
- Host: host,
- JdbcURLParams: jdbcURLParams,
- Password: password,
- Port: port,
- Schema: schema,
- SslMethod: sslMethod,
- TunnelMethod: tunnelMethod,
- Username: username,
+ Database: database,
+ Host: host,
+ JdbcURLParams: jdbcURLParams,
+ Password: password,
+ Port: port,
+ Schema: schema,
+ SslMethod: sslMethod,
+ TunnelMethod: tunnelMethod,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationMssqlCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -157,94 +170,102 @@ func (r *DestinationMssqlResourceModel) ToUpdateSDKType() *shared.DestinationMss
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
- schema := r.Configuration.Schema.ValueString()
- var sslMethod *shared.DestinationMssqlUpdateSSLMethod
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ schema := new(string)
+ if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
+ *schema = r.Configuration.Schema.ValueString()
+ } else {
+ schema = nil
+ }
+ var sslMethod *shared.SSLMethod
if r.Configuration.SslMethod != nil {
- var destinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate *shared.DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate
- if r.Configuration.SslMethod.DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate != nil {
- sslMethod1 := shared.DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod(r.Configuration.SslMethod.DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate.SslMethod.ValueString())
- destinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate = &shared.DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate{
- SslMethod: sslMethod1,
- }
+ var encryptedTrustServerCertificate *shared.EncryptedTrustServerCertificate
+ if r.Configuration.SslMethod.EncryptedTrustServerCertificate != nil {
+ encryptedTrustServerCertificate = &shared.EncryptedTrustServerCertificate{}
}
- if destinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate != nil {
- sslMethod = &shared.DestinationMssqlUpdateSSLMethod{
- DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate: destinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate,
+ if encryptedTrustServerCertificate != nil {
+ sslMethod = &shared.SSLMethod{
+ EncryptedTrustServerCertificate: encryptedTrustServerCertificate,
}
}
- var destinationMssqlUpdateSSLMethodEncryptedVerifyCertificate *shared.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate
- if r.Configuration.SslMethod.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate != nil {
+ var encryptedVerifyCertificate *shared.EncryptedVerifyCertificate
+ if r.Configuration.SslMethod.EncryptedVerifyCertificate != nil {
hostNameInCertificate := new(string)
- if !r.Configuration.SslMethod.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.IsUnknown() && !r.Configuration.SslMethod.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.IsNull() {
- *hostNameInCertificate = r.Configuration.SslMethod.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.ValueString()
+ if !r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.IsUnknown() && !r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.IsNull() {
+ *hostNameInCertificate = r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.ValueString()
} else {
hostNameInCertificate = nil
}
- sslMethod2 := shared.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod(r.Configuration.SslMethod.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate.SslMethod.ValueString())
- destinationMssqlUpdateSSLMethodEncryptedVerifyCertificate = &shared.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate{
+ encryptedVerifyCertificate = &shared.EncryptedVerifyCertificate{
HostNameInCertificate: hostNameInCertificate,
- SslMethod: sslMethod2,
}
}
- if destinationMssqlUpdateSSLMethodEncryptedVerifyCertificate != nil {
- sslMethod = &shared.DestinationMssqlUpdateSSLMethod{
- DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate: destinationMssqlUpdateSSLMethodEncryptedVerifyCertificate,
+ if encryptedVerifyCertificate != nil {
+ sslMethod = &shared.SSLMethod{
+ EncryptedVerifyCertificate: encryptedVerifyCertificate,
}
}
}
var tunnelMethod *shared.DestinationMssqlUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationMssqlUpdateSSHTunnelMethodNoTunnel *shared.DestinationMssqlUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationMssqlUpdateSSHTunnelMethodNoTunnel = &shared.DestinationMssqlUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationMssqlUpdateNoTunnel *shared.DestinationMssqlUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationMssqlUpdateNoTunnel = &shared.DestinationMssqlUpdateNoTunnel{}
}
- if destinationMssqlUpdateSSHTunnelMethodNoTunnel != nil {
+ if destinationMssqlUpdateNoTunnel != nil {
tunnelMethod = &shared.DestinationMssqlUpdateSSHTunnelMethod{
- DestinationMssqlUpdateSSHTunnelMethodNoTunnel: destinationMssqlUpdateSSHTunnelMethodNoTunnel,
+ DestinationMssqlUpdateNoTunnel: destinationMssqlUpdateNoTunnel,
}
}
- var destinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication *shared.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationMssqlUpdateSSHKeyAuthentication *shared.DestinationMssqlUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationMssqlUpdateSSHKeyAuthentication = &shared.DestinationMssqlUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationMssqlUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationMssqlUpdateSSHTunnelMethod{
- DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication: destinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationMssqlUpdateSSHKeyAuthentication: destinationMssqlUpdateSSHKeyAuthentication,
}
}
- var destinationMssqlUpdateSSHTunnelMethodPasswordAuthentication *shared.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationMssqlUpdateSSHTunnelMethodPasswordAuthentication = &shared.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication{
+ var destinationMssqlUpdatePasswordAuthentication *shared.DestinationMssqlUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationMssqlUpdatePasswordAuthentication = &shared.DestinationMssqlUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationMssqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationMssqlUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationMssqlUpdateSSHTunnelMethod{
- DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication: destinationMssqlUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationMssqlUpdatePasswordAuthentication: destinationMssqlUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/destination_mysql_data_source.go b/internal/provider/destination_mysql_data_source.go
old mode 100755
new mode 100644
index 6e6512227..7af1df295
--- a/internal/provider/destination_mysql_data_source.go
+++ b/internal/provider/destination_mysql_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationMysqlDataSource struct {
// DestinationMysqlDataSourceModel describes the data model.
type DestinationMysqlDataSourceModel struct {
- Configuration DestinationMysql `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,216 +47,17 @@ func (r *DestinationMysqlDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "DestinationMysql DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mysql",
- ),
- },
- Description: `must be one of ["mysql"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of the database.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_mysql_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mysql_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mysql_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mysql_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mysql_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mysql_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_mysql_data_source_sdk.go b/internal/provider/destination_mysql_data_source_sdk.go
old mode 100755
new mode 100644
index 9ef2d5458..67b79914f
--- a/internal/provider/destination_mysql_data_source_sdk.go
+++ b/internal/provider/destination_mysql_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationMysqlDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_mysql_resource.go b/internal/provider/destination_mysql_resource.go
old mode 100755
new mode 100644
index 67fbad8f9..f070bb3c3
--- a/internal/provider/destination_mysql_resource.go
+++ b/internal/provider/destination_mysql_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,11 +35,12 @@ type DestinationMysqlResource struct {
// DestinationMysqlResourceModel describes the resource data model.
type DestinationMysqlResourceModel struct {
- Configuration DestinationMysql `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- DestinationType types.String `tfsdk:"destination_type"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration DestinationClickhouse `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *DestinationMysqlResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -58,15 +59,6 @@ func (r *DestinationMysqlResource) Schema(ctx context.Context, req resource.Sche
Required: true,
Description: `Name of the database.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mysql",
- ),
- },
- Description: `must be one of ["mysql"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `Hostname of the database.`,
@@ -77,131 +69,33 @@ func (r *DestinationMysqlResource) Schema(ctx context.Context, req resource.Sche
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `Port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 3306` + "\n" +
+ `Port of the database.`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_mysql_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mysql_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mysql_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_mysql_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_mysql_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -209,35 +103,28 @@ func (r *DestinationMysqlResource) Schema(ctx context.Context, req resource.Sche
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_mysql_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -247,10 +134,10 @@ func (r *DestinationMysqlResource) Schema(ctx context.Context, req resource.Sche
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -258,6 +145,13 @@ func (r *DestinationMysqlResource) Schema(ctx context.Context, req resource.Sche
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -274,7 +168,8 @@ func (r *DestinationMysqlResource) Schema(ctx context.Context, req resource.Sche
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -324,7 +219,7 @@ func (r *DestinationMysqlResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationMysql(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -500,5 +395,5 @@ func (r *DestinationMysqlResource) Delete(ctx context.Context, req resource.Dele
}
func (r *DestinationMysqlResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_mysql_resource_sdk.go b/internal/provider/destination_mysql_resource_sdk.go
old mode 100755
new mode 100644
index 8e449d928..846666ca1
--- a/internal/provider/destination_mysql_resource_sdk.go
+++ b/internal/provider/destination_mysql_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationMysqlResourceModel) ToCreateSDKType() *shared.DestinationMysqlCreateRequest {
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationMysqlMysql(r.Configuration.DestinationType.ValueString())
host := r.Configuration.Host.ValueString()
jdbcURLParams := new(string)
if !r.Configuration.JdbcURLParams.IsUnknown() && !r.Configuration.JdbcURLParams.IsNull() {
@@ -23,77 +22,91 @@ func (r *DestinationMysqlResourceModel) ToCreateSDKType() *shared.DestinationMys
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var tunnelMethod *shared.DestinationMysqlSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationMysqlSSHTunnelMethodNoTunnel *shared.DestinationMysqlSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationMysqlSSHTunnelMethodNoTunnel = &shared.DestinationMysqlSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationMysqlNoTunnel *shared.DestinationMysqlNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationMysqlNoTunnel = &shared.DestinationMysqlNoTunnel{}
}
- if destinationMysqlSSHTunnelMethodNoTunnel != nil {
+ if destinationMysqlNoTunnel != nil {
tunnelMethod = &shared.DestinationMysqlSSHTunnelMethod{
- DestinationMysqlSSHTunnelMethodNoTunnel: destinationMysqlSSHTunnelMethodNoTunnel,
+ DestinationMysqlNoTunnel: destinationMysqlNoTunnel,
}
}
- var destinationMysqlSSHTunnelMethodSSHKeyAuthentication *shared.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationMysqlSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationMysqlSSHKeyAuthentication *shared.DestinationMysqlSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationMysqlSSHKeyAuthentication = &shared.DestinationMysqlSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationMysqlSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationMysqlSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationMysqlSSHTunnelMethod{
- DestinationMysqlSSHTunnelMethodSSHKeyAuthentication: destinationMysqlSSHTunnelMethodSSHKeyAuthentication,
+ DestinationMysqlSSHKeyAuthentication: destinationMysqlSSHKeyAuthentication,
}
}
- var destinationMysqlSSHTunnelMethodPasswordAuthentication *shared.DestinationMysqlSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationMysqlSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationMysqlSSHTunnelMethodPasswordAuthentication = &shared.DestinationMysqlSSHTunnelMethodPasswordAuthentication{
+ var destinationMysqlPasswordAuthentication *shared.DestinationMysqlPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationMysqlPasswordAuthentication = &shared.DestinationMysqlPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationMysqlSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationMysqlPasswordAuthentication != nil {
tunnelMethod = &shared.DestinationMysqlSSHTunnelMethod{
- DestinationMysqlSSHTunnelMethodPasswordAuthentication: destinationMysqlSSHTunnelMethodPasswordAuthentication,
+ DestinationMysqlPasswordAuthentication: destinationMysqlPasswordAuthentication,
}
}
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationMysql{
- Database: database,
- DestinationType: destinationType,
- Host: host,
- JdbcURLParams: jdbcURLParams,
- Password: password,
- Port: port,
- TunnelMethod: tunnelMethod,
- Username: username,
+ Database: database,
+ Host: host,
+ JdbcURLParams: jdbcURLParams,
+ Password: password,
+ Port: port,
+ TunnelMethod: tunnelMethod,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationMysqlCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -120,59 +133,67 @@ func (r *DestinationMysqlResourceModel) ToUpdateSDKType() *shared.DestinationMys
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var tunnelMethod *shared.DestinationMysqlUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationMysqlUpdateSSHTunnelMethodNoTunnel *shared.DestinationMysqlUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationMysqlUpdateSSHTunnelMethodNoTunnel = &shared.DestinationMysqlUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationMysqlUpdateNoTunnel *shared.DestinationMysqlUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationMysqlUpdateNoTunnel = &shared.DestinationMysqlUpdateNoTunnel{}
}
- if destinationMysqlUpdateSSHTunnelMethodNoTunnel != nil {
+ if destinationMysqlUpdateNoTunnel != nil {
tunnelMethod = &shared.DestinationMysqlUpdateSSHTunnelMethod{
- DestinationMysqlUpdateSSHTunnelMethodNoTunnel: destinationMysqlUpdateSSHTunnelMethodNoTunnel,
+ DestinationMysqlUpdateNoTunnel: destinationMysqlUpdateNoTunnel,
}
}
- var destinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication *shared.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationMysqlUpdateSSHKeyAuthentication *shared.DestinationMysqlUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationMysqlUpdateSSHKeyAuthentication = &shared.DestinationMysqlUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationMysqlUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationMysqlUpdateSSHTunnelMethod{
- DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication: destinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationMysqlUpdateSSHKeyAuthentication: destinationMysqlUpdateSSHKeyAuthentication,
}
}
- var destinationMysqlUpdateSSHTunnelMethodPasswordAuthentication *shared.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationMysqlUpdateSSHTunnelMethodPasswordAuthentication = &shared.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication{
+ var destinationMysqlUpdatePasswordAuthentication *shared.DestinationMysqlUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationMysqlUpdatePasswordAuthentication = &shared.DestinationMysqlUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationMysqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationMysqlUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationMysqlUpdateSSHTunnelMethod{
- DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication: destinationMysqlUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationMysqlUpdatePasswordAuthentication: destinationMysqlUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/destination_oracle_data_source.go b/internal/provider/destination_oracle_data_source.go
old mode 100755
new mode 100644
index 898bbf03e..c7cfa900a
--- a/internal/provider/destination_oracle_data_source.go
+++ b/internal/provider/destination_oracle_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationOracleDataSource struct {
// DestinationOracleDataSourceModel describes the data model.
type DestinationOracleDataSourceModel struct {
- Configuration DestinationOracle `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,220 +47,17 @@ func (r *DestinationOracleDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "DestinationOracle DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oracle",
- ),
- },
- Description: `must be one of ["oracle"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The hostname of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The port of the database.`,
- },
- "schema": schema.StringAttribute{
- Computed: true,
- Description: `The default schema is used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. The usual value for this field is "airbyte". In Oracle, schemas and users are the same thing, so the "user" parameter is used as the login credentials and this is used for the default Airbyte message schema.`,
- },
- "sid": schema.StringAttribute{
- Computed: true,
- Description: `The System Identifier uniquely distinguishes the instance from any other instance on the same computer.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_oracle_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_oracle_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_oracle_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_oracle_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_oracle_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_oracle_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `The username to access the database. This user must have CREATE USER privileges in the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_oracle_data_source_sdk.go b/internal/provider/destination_oracle_data_source_sdk.go
old mode 100755
new mode 100644
index ecef8f72c..1afc1e2c5
--- a/internal/provider/destination_oracle_data_source_sdk.go
+++ b/internal/provider/destination_oracle_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationOracleDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_oracle_resource.go b/internal/provider/destination_oracle_resource.go
old mode 100755
new mode 100644
index 6b97e80e7..d2b45e8e6
--- a/internal/provider/destination_oracle_resource.go
+++ b/internal/provider/destination_oracle_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationOracleResource struct {
// DestinationOracleResourceModel describes the resource data model.
type DestinationOracleResourceModel struct {
Configuration DestinationOracle `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -54,15 +55,6 @@ func (r *DestinationOracleResource) Schema(ctx context.Context, req resource.Sch
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oracle",
- ),
- },
- Description: `must be one of ["oracle"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `The hostname of the database.`,
@@ -73,15 +65,18 @@ func (r *DestinationOracleResource) Schema(ctx context.Context, req resource.Sch
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `The port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 1521` + "\n" +
+ `The port of the database.`,
},
"schema": schema.StringAttribute{
- Optional: true,
- Description: `The default schema is used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. The usual value for this field is "airbyte". In Oracle, schemas and users are the same thing, so the "user" parameter is used as the login credentials and this is used for the default Airbyte message schema.`,
+ Optional: true,
+ MarkdownDescription: `Default: "airbyte"` + "\n" +
+ `The default schema is used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. The usual value for this field is "airbyte". In Oracle, schemas and users are the same thing, so the "user" parameter is used as the login credentials and this is used for the default Airbyte message schema.`,
},
"sid": schema.StringAttribute{
Required: true,
@@ -90,122 +85,22 @@ func (r *DestinationOracleResource) Schema(ctx context.Context, req resource.Sch
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_oracle_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_oracle_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_oracle_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_oracle_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_oracle_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -213,35 +108,28 @@ func (r *DestinationOracleResource) Schema(ctx context.Context, req resource.Sch
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_oracle_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -251,10 +139,10 @@ func (r *DestinationOracleResource) Schema(ctx context.Context, req resource.Sch
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -262,6 +150,13 @@ func (r *DestinationOracleResource) Schema(ctx context.Context, req resource.Sch
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -278,7 +173,8 @@ func (r *DestinationOracleResource) Schema(ctx context.Context, req resource.Sch
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -328,7 +224,7 @@ func (r *DestinationOracleResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationOracle(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -504,5 +400,5 @@ func (r *DestinationOracleResource) Delete(ctx context.Context, req resource.Del
}
func (r *DestinationOracleResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_oracle_resource_sdk.go b/internal/provider/destination_oracle_resource_sdk.go
old mode 100755
new mode 100644
index f48563fb0..ab5a5fad7
--- a/internal/provider/destination_oracle_resource_sdk.go
+++ b/internal/provider/destination_oracle_resource_sdk.go
@@ -3,12 +3,11 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationOracleResourceModel) ToCreateSDKType() *shared.DestinationOracleCreateRequest {
- destinationType := shared.DestinationOracleOracle(r.Configuration.DestinationType.ValueString())
host := r.Configuration.Host.ValueString()
jdbcURLParams := new(string)
if !r.Configuration.JdbcURLParams.IsUnknown() && !r.Configuration.JdbcURLParams.IsNull() {
@@ -22,7 +21,12 @@ func (r *DestinationOracleResourceModel) ToCreateSDKType() *shared.DestinationOr
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
schema := new(string)
if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
*schema = r.Configuration.Schema.ValueString()
@@ -32,75 +36,84 @@ func (r *DestinationOracleResourceModel) ToCreateSDKType() *shared.DestinationOr
sid := r.Configuration.Sid.ValueString()
var tunnelMethod *shared.DestinationOracleSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationOracleSSHTunnelMethodNoTunnel *shared.DestinationOracleSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationOracleSSHTunnelMethodNoTunnel = &shared.DestinationOracleSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationOracleNoTunnel *shared.DestinationOracleNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationOracleNoTunnel = &shared.DestinationOracleNoTunnel{}
}
- if destinationOracleSSHTunnelMethodNoTunnel != nil {
+ if destinationOracleNoTunnel != nil {
tunnelMethod = &shared.DestinationOracleSSHTunnelMethod{
- DestinationOracleSSHTunnelMethodNoTunnel: destinationOracleSSHTunnelMethodNoTunnel,
+ DestinationOracleNoTunnel: destinationOracleNoTunnel,
}
}
- var destinationOracleSSHTunnelMethodSSHKeyAuthentication *shared.DestinationOracleSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationOracleSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationOracleSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationOracleSSHKeyAuthentication *shared.DestinationOracleSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationOracleSSHKeyAuthentication = &shared.DestinationOracleSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationOracleSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationOracleSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationOracleSSHTunnelMethod{
- DestinationOracleSSHTunnelMethodSSHKeyAuthentication: destinationOracleSSHTunnelMethodSSHKeyAuthentication,
+ DestinationOracleSSHKeyAuthentication: destinationOracleSSHKeyAuthentication,
}
}
- var destinationOracleSSHTunnelMethodPasswordAuthentication *shared.DestinationOracleSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationOracleSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationOracleSSHTunnelMethodPasswordAuthentication = &shared.DestinationOracleSSHTunnelMethodPasswordAuthentication{
+ var destinationOraclePasswordAuthentication *shared.DestinationOraclePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationOraclePasswordAuthentication = &shared.DestinationOraclePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationOracleSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationOraclePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationOracleSSHTunnelMethod{
- DestinationOracleSSHTunnelMethodPasswordAuthentication: destinationOracleSSHTunnelMethodPasswordAuthentication,
+ DestinationOraclePasswordAuthentication: destinationOraclePasswordAuthentication,
}
}
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationOracle{
- DestinationType: destinationType,
- Host: host,
- JdbcURLParams: jdbcURLParams,
- Password: password,
- Port: port,
- Schema: schema,
- Sid: sid,
- TunnelMethod: tunnelMethod,
- Username: username,
+ Host: host,
+ JdbcURLParams: jdbcURLParams,
+ Password: password,
+ Port: port,
+ Schema: schema,
+ Sid: sid,
+ TunnelMethod: tunnelMethod,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationOracleCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -126,7 +139,12 @@ func (r *DestinationOracleResourceModel) ToUpdateSDKType() *shared.DestinationOr
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
schema := new(string)
if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
*schema = r.Configuration.Schema.ValueString()
@@ -136,56 +154,59 @@ func (r *DestinationOracleResourceModel) ToUpdateSDKType() *shared.DestinationOr
sid := r.Configuration.Sid.ValueString()
var tunnelMethod *shared.DestinationOracleUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationOracleUpdateSSHTunnelMethodNoTunnel *shared.DestinationOracleUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationOracleUpdateSSHTunnelMethodNoTunnel = &shared.DestinationOracleUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationOracleUpdateNoTunnel *shared.DestinationOracleUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationOracleUpdateNoTunnel = &shared.DestinationOracleUpdateNoTunnel{}
}
- if destinationOracleUpdateSSHTunnelMethodNoTunnel != nil {
+ if destinationOracleUpdateNoTunnel != nil {
tunnelMethod = &shared.DestinationOracleUpdateSSHTunnelMethod{
- DestinationOracleUpdateSSHTunnelMethodNoTunnel: destinationOracleUpdateSSHTunnelMethodNoTunnel,
+ DestinationOracleUpdateNoTunnel: destinationOracleUpdateNoTunnel,
}
}
- var destinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication *shared.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationOracleUpdateSSHKeyAuthentication *shared.DestinationOracleUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationOracleUpdateSSHKeyAuthentication = &shared.DestinationOracleUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationOracleUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationOracleUpdateSSHTunnelMethod{
- DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication: destinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationOracleUpdateSSHKeyAuthentication: destinationOracleUpdateSSHKeyAuthentication,
}
}
- var destinationOracleUpdateSSHTunnelMethodPasswordAuthentication *shared.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationOracleUpdateSSHTunnelMethodPasswordAuthentication = &shared.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication{
+ var destinationOracleUpdatePasswordAuthentication *shared.DestinationOracleUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationOracleUpdatePasswordAuthentication = &shared.DestinationOracleUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationOracleUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationOracleUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationOracleUpdateSSHTunnelMethod{
- DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication: destinationOracleUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationOracleUpdatePasswordAuthentication: destinationOracleUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/destination_pinecone_data_source.go b/internal/provider/destination_pinecone_data_source.go
old mode 100755
new mode 100644
index fc6ff453c..79b0fb798
--- a/internal/provider/destination_pinecone_data_source.go
+++ b/internal/provider/destination_pinecone_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationPineconeDataSource struct {
// DestinationPineconeDataSourceModel describes the data model.
type DestinationPineconeDataSourceModel struct {
- Configuration DestinationPinecone `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,174 +47,17 @@ func (r *DestinationPineconeDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "DestinationPinecone DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pinecone",
- ),
- },
- Description: `must be one of ["pinecone"]`,
- },
- "embedding": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_pinecone_embedding_cohere": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "cohere_key": schema.StringAttribute{
- Computed: true,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cohere",
- ),
- },
- Description: `must be one of ["cohere"]`,
- },
- },
- Description: `Use the Cohere API to embed text.`,
- },
- "destination_pinecone_embedding_fake": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
- },
- },
- Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
- },
- "destination_pinecone_embedding_open_ai": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
- "openai_key": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
- },
- "destination_pinecone_update_embedding_cohere": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "cohere_key": schema.StringAttribute{
- Computed: true,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cohere",
- ),
- },
- Description: `must be one of ["cohere"]`,
- },
- },
- Description: `Use the Cohere API to embed text.`,
- },
- "destination_pinecone_update_embedding_fake": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
- },
- },
- Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
- },
- "destination_pinecone_update_embedding_open_ai": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
- "openai_key": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Embedding configuration`,
- },
- "indexing": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "index": schema.StringAttribute{
- Computed: true,
- Description: `Pinecone index to use`,
- },
- "pinecone_environment": schema.StringAttribute{
- Computed: true,
- Description: `Pinecone environment to use`,
- },
- "pinecone_key": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Pinecone is a popular vector store that can be used to store and retrieve embeddings.`,
- },
- "processing": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "chunk_overlap": schema.Int64Attribute{
- Computed: true,
- Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
- },
- "chunk_size": schema.Int64Attribute{
- Computed: true,
- Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`,
- },
- "metadata_fields": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.`,
- },
- "text_fields": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`,
- },
- },
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_pinecone_data_source_sdk.go b/internal/provider/destination_pinecone_data_source_sdk.go
old mode 100755
new mode 100644
index a382d69e4..5bf2db915
--- a/internal/provider/destination_pinecone_data_source_sdk.go
+++ b/internal/provider/destination_pinecone_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationPineconeDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_pinecone_resource.go b/internal/provider/destination_pinecone_resource.go
old mode 100755
new mode 100644
index e64f09126..8b798e18a
--- a/internal/provider/destination_pinecone_resource.go
+++ b/internal/provider/destination_pinecone_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationPineconeResource struct {
// DestinationPineconeResourceModel describes the resource data model.
type DestinationPineconeResourceModel struct {
Configuration DestinationPinecone `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -54,139 +56,98 @@ func (r *DestinationPineconeResource) Schema(ctx context.Context, req resource.S
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pinecone",
- ),
- },
- Description: `must be one of ["pinecone"]`,
- },
"embedding": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_pinecone_embedding_cohere": schema.SingleNestedAttribute{
+ "azure_open_ai": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "cohere_key": schema.StringAttribute{
- Required: true,
+ "api_base": schema.StringAttribute{
+ Required: true,
+ Description: `The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
},
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cohere",
- ),
- },
- Description: `must be one of ["cohere"]`,
+ "deployment": schema.StringAttribute{
+ Required: true,
+ Description: `The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
+ },
+ "openai_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
},
},
- Description: `Use the Cohere API to embed text.`,
+ Description: `Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
},
- "destination_pinecone_embedding_fake": schema.SingleNestedAttribute{
+ "cohere": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
+ "cohere_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
},
},
+ Description: `Use the Cohere API to embed text.`,
+ },
+ "fake": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
},
- "destination_pinecone_embedding_open_ai": schema.SingleNestedAttribute{
+ "open_ai": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
"openai_key": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Sensitive: true,
},
},
Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
},
- "destination_pinecone_update_embedding_cohere": schema.SingleNestedAttribute{
+ "open_ai_compatible": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "cohere_key": schema.StringAttribute{
- Required: true,
+ "api_key": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `Default: ""`,
},
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "cohere",
- ),
- },
- Description: `must be one of ["cohere"]`,
+ "base_url": schema.StringAttribute{
+ Required: true,
+ Description: `The base URL for your OpenAI-compatible service`,
},
- },
- Description: `Use the Cohere API to embed text.`,
- },
- "destination_pinecone_update_embedding_fake": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fake",
- ),
- },
- Description: `must be one of ["fake"]`,
+ "dimensions": schema.Int64Attribute{
+ Required: true,
+ Description: `The number of dimensions the embedding model is generating`,
},
- },
- Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
- },
- "destination_pinecone_update_embedding_open_ai": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
+ "model_name": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "openai",
- ),
- },
- Description: `must be one of ["openai"]`,
- },
- "openai_key": schema.StringAttribute{
- Required: true,
+ MarkdownDescription: `Default: "text-embedding-ada-002"` + "\n" +
+ `The name of the model to use for embedding`,
},
},
- Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
+ Description: `Use a service that's compatible with the OpenAI API to embed text.`,
},
},
+ Description: `Embedding configuration`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Embedding configuration`,
},
"indexing": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
"index": schema.StringAttribute{
Required: true,
- Description: `Pinecone index to use`,
+ Description: `Pinecone index in your project to load data into`,
},
"pinecone_environment": schema.StringAttribute{
Required: true,
- Description: `Pinecone environment to use`,
+ Description: `Pinecone Cloud environment to use`,
},
"pinecone_key": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Sensitive: true,
+ Description: `The Pinecone API key to use matching the environment (copy from Pinecone console)`,
},
},
Description: `Pinecone is a popular vector store that can be used to store and retrieve embeddings.`,
@@ -195,13 +156,30 @@ func (r *DestinationPineconeResource) Schema(ctx context.Context, req resource.S
Required: true,
Attributes: map[string]schema.Attribute{
"chunk_overlap": schema.Int64Attribute{
- Optional: true,
- Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
},
"chunk_size": schema.Int64Attribute{
Required: true,
Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`,
},
+ "field_name_mappings": schema.ListNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "from_field": schema.StringAttribute{
+ Required: true,
+ Description: `The field name in the source`,
+ },
+ "to_field": schema.StringAttribute{
+ Required: true,
+ Description: `The field name to use in the destination`,
+ },
+ },
+ },
+ Description: `List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.`,
+ },
"metadata_fields": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
@@ -212,10 +190,84 @@ func (r *DestinationPineconeResource) Schema(ctx context.Context, req resource.S
ElementType: types.StringType,
Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`,
},
+ "text_splitter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "by_markdown_header": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "split_level": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 1` + "\n" +
+ `Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points`,
+ },
+ },
+ Description: `Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.`,
+ },
+ "by_programming_language": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "language": schema.StringAttribute{
+ Required: true,
+ MarkdownDescription: `must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]` + "\n" +
+ `Split code in suitable places based on the programming language`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "cpp",
+ "go",
+ "java",
+ "js",
+ "php",
+ "proto",
+ "python",
+ "rst",
+ "ruby",
+ "rust",
+ "scala",
+ "swift",
+ "markdown",
+ "latex",
+ "html",
+ "sol",
+ ),
+ },
+ },
+ },
+ Description: `Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.`,
+ },
+ "by_separator": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "keep_separator": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to keep the separator in the resulting chunks`,
+ },
+ "separators": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".`,
+ },
+ },
+ Description: `Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.`,
+ },
+ },
+ Description: `Split text fields into chunks based on the specified method.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
},
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -232,7 +284,8 @@ func (r *DestinationPineconeResource) Schema(ctx context.Context, req resource.S
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -282,7 +335,7 @@ func (r *DestinationPineconeResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationPinecone(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -458,5 +511,5 @@ func (r *DestinationPineconeResource) Delete(ctx context.Context, req resource.D
}
func (r *DestinationPineconeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_pinecone_resource_sdk.go b/internal/provider/destination_pinecone_resource_sdk.go
old mode 100755
new mode 100644
index 5769344a7..8e7f94476
--- a/internal/provider/destination_pinecone_resource_sdk.go
+++ b/internal/provider/destination_pinecone_resource_sdk.go
@@ -3,66 +3,87 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationPineconeResourceModel) ToCreateSDKType() *shared.DestinationPineconeCreateRequest {
- destinationType := shared.DestinationPineconePinecone(r.Configuration.DestinationType.ValueString())
var embedding shared.DestinationPineconeEmbedding
- var destinationPineconeEmbeddingOpenAI *shared.DestinationPineconeEmbeddingOpenAI
- if r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI != nil {
- mode := new(shared.DestinationPineconeEmbeddingOpenAIMode)
- if !r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI.Mode.IsNull() {
- *mode = shared.DestinationPineconeEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI.Mode.ValueString())
- } else {
- mode = nil
- }
- openaiKey := r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI.OpenaiKey.ValueString()
- destinationPineconeEmbeddingOpenAI = &shared.DestinationPineconeEmbeddingOpenAI{
- Mode: mode,
+ var destinationPineconeOpenAI *shared.DestinationPineconeOpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ destinationPineconeOpenAI = &shared.DestinationPineconeOpenAI{
OpenaiKey: openaiKey,
}
}
- if destinationPineconeEmbeddingOpenAI != nil {
+ if destinationPineconeOpenAI != nil {
embedding = shared.DestinationPineconeEmbedding{
- DestinationPineconeEmbeddingOpenAI: destinationPineconeEmbeddingOpenAI,
+ DestinationPineconeOpenAI: destinationPineconeOpenAI,
}
}
- var destinationPineconeEmbeddingCohere *shared.DestinationPineconeEmbeddingCohere
- if r.Configuration.Embedding.DestinationPineconeEmbeddingCohere != nil {
- cohereKey := r.Configuration.Embedding.DestinationPineconeEmbeddingCohere.CohereKey.ValueString()
- mode1 := new(shared.DestinationPineconeEmbeddingCohereMode)
- if !r.Configuration.Embedding.DestinationPineconeEmbeddingCohere.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeEmbeddingCohere.Mode.IsNull() {
- *mode1 = shared.DestinationPineconeEmbeddingCohereMode(r.Configuration.Embedding.DestinationPineconeEmbeddingCohere.Mode.ValueString())
- } else {
- mode1 = nil
- }
- destinationPineconeEmbeddingCohere = &shared.DestinationPineconeEmbeddingCohere{
+ var destinationPineconeCohere *shared.DestinationPineconeCohere
+ if r.Configuration.Embedding.Cohere != nil {
+ cohereKey := r.Configuration.Embedding.Cohere.CohereKey.ValueString()
+ destinationPineconeCohere = &shared.DestinationPineconeCohere{
CohereKey: cohereKey,
- Mode: mode1,
}
}
- if destinationPineconeEmbeddingCohere != nil {
+ if destinationPineconeCohere != nil {
+ embedding = shared.DestinationPineconeEmbedding{
+ DestinationPineconeCohere: destinationPineconeCohere,
+ }
+ }
+ var destinationPineconeFake *shared.DestinationPineconeFake
+ if r.Configuration.Embedding.Fake != nil {
+ destinationPineconeFake = &shared.DestinationPineconeFake{}
+ }
+ if destinationPineconeFake != nil {
+ embedding = shared.DestinationPineconeEmbedding{
+ DestinationPineconeFake: destinationPineconeFake,
+ }
+ }
+ var destinationPineconeAzureOpenAI *shared.DestinationPineconeAzureOpenAI
+ if r.Configuration.Embedding.AzureOpenAI != nil {
+ apiBase := r.Configuration.Embedding.AzureOpenAI.APIBase.ValueString()
+ deployment := r.Configuration.Embedding.AzureOpenAI.Deployment.ValueString()
+ openaiKey1 := r.Configuration.Embedding.AzureOpenAI.OpenaiKey.ValueString()
+ destinationPineconeAzureOpenAI = &shared.DestinationPineconeAzureOpenAI{
+ APIBase: apiBase,
+ Deployment: deployment,
+ OpenaiKey: openaiKey1,
+ }
+ }
+ if destinationPineconeAzureOpenAI != nil {
embedding = shared.DestinationPineconeEmbedding{
- DestinationPineconeEmbeddingCohere: destinationPineconeEmbeddingCohere,
+ DestinationPineconeAzureOpenAI: destinationPineconeAzureOpenAI,
}
}
- var destinationPineconeEmbeddingFake *shared.DestinationPineconeEmbeddingFake
- if r.Configuration.Embedding.DestinationPineconeEmbeddingFake != nil {
- mode2 := new(shared.DestinationPineconeEmbeddingFakeMode)
- if !r.Configuration.Embedding.DestinationPineconeEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeEmbeddingFake.Mode.IsNull() {
- *mode2 = shared.DestinationPineconeEmbeddingFakeMode(r.Configuration.Embedding.DestinationPineconeEmbeddingFake.Mode.ValueString())
+ var destinationPineconeOpenAICompatible *shared.DestinationPineconeOpenAICompatible
+ if r.Configuration.Embedding.OpenAICompatible != nil {
+ apiKey := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.APIKey.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.APIKey.IsNull() {
+ *apiKey = r.Configuration.Embedding.OpenAICompatible.APIKey.ValueString()
} else {
- mode2 = nil
+ apiKey = nil
}
- destinationPineconeEmbeddingFake = &shared.DestinationPineconeEmbeddingFake{
- Mode: mode2,
+ baseURL := r.Configuration.Embedding.OpenAICompatible.BaseURL.ValueString()
+ dimensions := r.Configuration.Embedding.OpenAICompatible.Dimensions.ValueInt64()
+ modelName := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.ModelName.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.ModelName.IsNull() {
+ *modelName = r.Configuration.Embedding.OpenAICompatible.ModelName.ValueString()
+ } else {
+ modelName = nil
+ }
+ destinationPineconeOpenAICompatible = &shared.DestinationPineconeOpenAICompatible{
+ APIKey: apiKey,
+ BaseURL: baseURL,
+ Dimensions: dimensions,
+ ModelName: modelName,
}
}
- if destinationPineconeEmbeddingFake != nil {
+ if destinationPineconeOpenAICompatible != nil {
embedding = shared.DestinationPineconeEmbedding{
- DestinationPineconeEmbeddingFake: destinationPineconeEmbeddingFake,
+ DestinationPineconeOpenAICompatible: destinationPineconeOpenAICompatible,
}
}
index := r.Configuration.Indexing.Index.ValueString()
@@ -80,6 +101,15 @@ func (r *DestinationPineconeResourceModel) ToCreateSDKType() *shared.Destination
chunkOverlap = nil
}
chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64()
+ var fieldNameMappings []shared.DestinationPineconeFieldNameMappingConfigModel = nil
+ for _, fieldNameMappingsItem := range r.Configuration.Processing.FieldNameMappings {
+ fromField := fieldNameMappingsItem.FromField.ValueString()
+ toField := fieldNameMappingsItem.ToField.ValueString()
+ fieldNameMappings = append(fieldNameMappings, shared.DestinationPineconeFieldNameMappingConfigModel{
+ FromField: fromField,
+ ToField: toField,
+ })
+ }
var metadataFields []string = nil
for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields {
metadataFields = append(metadataFields, metadataFieldsItem.ValueString())
@@ -88,22 +118,84 @@ func (r *DestinationPineconeResourceModel) ToCreateSDKType() *shared.Destination
for _, textFieldsItem := range r.Configuration.Processing.TextFields {
textFields = append(textFields, textFieldsItem.ValueString())
}
+ var textSplitter *shared.DestinationPineconeTextSplitter
+ if r.Configuration.Processing.TextSplitter != nil {
+ var destinationPineconeBySeparator *shared.DestinationPineconeBySeparator
+ if r.Configuration.Processing.TextSplitter.BySeparator != nil {
+ keepSeparator := new(bool)
+ if !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsUnknown() && !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsNull() {
+ *keepSeparator = r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.ValueBool()
+ } else {
+ keepSeparator = nil
+ }
+ var separators []string = nil
+ for _, separatorsItem := range r.Configuration.Processing.TextSplitter.BySeparator.Separators {
+ separators = append(separators, separatorsItem.ValueString())
+ }
+ destinationPineconeBySeparator = &shared.DestinationPineconeBySeparator{
+ KeepSeparator: keepSeparator,
+ Separators: separators,
+ }
+ }
+ if destinationPineconeBySeparator != nil {
+ textSplitter = &shared.DestinationPineconeTextSplitter{
+ DestinationPineconeBySeparator: destinationPineconeBySeparator,
+ }
+ }
+ var destinationPineconeByMarkdownHeader *shared.DestinationPineconeByMarkdownHeader
+ if r.Configuration.Processing.TextSplitter.ByMarkdownHeader != nil {
+ splitLevel := new(int64)
+ if !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsUnknown() && !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsNull() {
+ *splitLevel = r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.ValueInt64()
+ } else {
+ splitLevel = nil
+ }
+ destinationPineconeByMarkdownHeader = &shared.DestinationPineconeByMarkdownHeader{
+ SplitLevel: splitLevel,
+ }
+ }
+ if destinationPineconeByMarkdownHeader != nil {
+ textSplitter = &shared.DestinationPineconeTextSplitter{
+ DestinationPineconeByMarkdownHeader: destinationPineconeByMarkdownHeader,
+ }
+ }
+ var destinationPineconeByProgrammingLanguage *shared.DestinationPineconeByProgrammingLanguage
+ if r.Configuration.Processing.TextSplitter.ByProgrammingLanguage != nil {
+ language := shared.DestinationPineconeLanguage(r.Configuration.Processing.TextSplitter.ByProgrammingLanguage.Language.ValueString())
+ destinationPineconeByProgrammingLanguage = &shared.DestinationPineconeByProgrammingLanguage{
+ Language: language,
+ }
+ }
+ if destinationPineconeByProgrammingLanguage != nil {
+ textSplitter = &shared.DestinationPineconeTextSplitter{
+ DestinationPineconeByProgrammingLanguage: destinationPineconeByProgrammingLanguage,
+ }
+ }
+ }
processing := shared.DestinationPineconeProcessingConfigModel{
- ChunkOverlap: chunkOverlap,
- ChunkSize: chunkSize,
- MetadataFields: metadataFields,
- TextFields: textFields,
+ ChunkOverlap: chunkOverlap,
+ ChunkSize: chunkSize,
+ FieldNameMappings: fieldNameMappings,
+ MetadataFields: metadataFields,
+ TextFields: textFields,
+ TextSplitter: textSplitter,
}
configuration := shared.DestinationPinecone{
- DestinationType: destinationType,
- Embedding: embedding,
- Indexing: indexing,
- Processing: processing,
+ Embedding: embedding,
+ Indexing: indexing,
+ Processing: processing,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationPineconeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -117,59 +209,81 @@ func (r *DestinationPineconeResourceModel) ToGetSDKType() *shared.DestinationPin
func (r *DestinationPineconeResourceModel) ToUpdateSDKType() *shared.DestinationPineconePutRequest {
var embedding shared.DestinationPineconeUpdateEmbedding
- var destinationPineconeUpdateEmbeddingOpenAI *shared.DestinationPineconeUpdateEmbeddingOpenAI
- if r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI != nil {
- mode := new(shared.DestinationPineconeUpdateEmbeddingOpenAIMode)
- if !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI.Mode.IsNull() {
- *mode = shared.DestinationPineconeUpdateEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI.Mode.ValueString())
- } else {
- mode = nil
- }
- openaiKey := r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI.OpenaiKey.ValueString()
- destinationPineconeUpdateEmbeddingOpenAI = &shared.DestinationPineconeUpdateEmbeddingOpenAI{
- Mode: mode,
+ var destinationPineconeUpdateOpenAI *shared.DestinationPineconeUpdateOpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ destinationPineconeUpdateOpenAI = &shared.DestinationPineconeUpdateOpenAI{
OpenaiKey: openaiKey,
}
}
- if destinationPineconeUpdateEmbeddingOpenAI != nil {
+ if destinationPineconeUpdateOpenAI != nil {
embedding = shared.DestinationPineconeUpdateEmbedding{
- DestinationPineconeUpdateEmbeddingOpenAI: destinationPineconeUpdateEmbeddingOpenAI,
+ DestinationPineconeUpdateOpenAI: destinationPineconeUpdateOpenAI,
}
}
- var destinationPineconeUpdateEmbeddingCohere *shared.DestinationPineconeUpdateEmbeddingCohere
- if r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere != nil {
- cohereKey := r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere.CohereKey.ValueString()
- mode1 := new(shared.DestinationPineconeUpdateEmbeddingCohereMode)
- if !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere.Mode.IsNull() {
- *mode1 = shared.DestinationPineconeUpdateEmbeddingCohereMode(r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere.Mode.ValueString())
- } else {
- mode1 = nil
- }
- destinationPineconeUpdateEmbeddingCohere = &shared.DestinationPineconeUpdateEmbeddingCohere{
+ var destinationPineconeUpdateCohere *shared.DestinationPineconeUpdateCohere
+ if r.Configuration.Embedding.Cohere != nil {
+ cohereKey := r.Configuration.Embedding.Cohere.CohereKey.ValueString()
+ destinationPineconeUpdateCohere = &shared.DestinationPineconeUpdateCohere{
CohereKey: cohereKey,
- Mode: mode1,
}
}
- if destinationPineconeUpdateEmbeddingCohere != nil {
+ if destinationPineconeUpdateCohere != nil {
embedding = shared.DestinationPineconeUpdateEmbedding{
- DestinationPineconeUpdateEmbeddingCohere: destinationPineconeUpdateEmbeddingCohere,
+ DestinationPineconeUpdateCohere: destinationPineconeUpdateCohere,
}
}
- var destinationPineconeUpdateEmbeddingFake *shared.DestinationPineconeUpdateEmbeddingFake
- if r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingFake != nil {
- mode2 := new(shared.DestinationPineconeUpdateEmbeddingFakeMode)
- if !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingFake.Mode.IsNull() {
- *mode2 = shared.DestinationPineconeUpdateEmbeddingFakeMode(r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingFake.Mode.ValueString())
+ var destinationPineconeUpdateFake *shared.DestinationPineconeUpdateFake
+ if r.Configuration.Embedding.Fake != nil {
+ destinationPineconeUpdateFake = &shared.DestinationPineconeUpdateFake{}
+ }
+ if destinationPineconeUpdateFake != nil {
+ embedding = shared.DestinationPineconeUpdateEmbedding{
+ DestinationPineconeUpdateFake: destinationPineconeUpdateFake,
+ }
+ }
+ var destinationPineconeUpdateAzureOpenAI *shared.DestinationPineconeUpdateAzureOpenAI
+ if r.Configuration.Embedding.AzureOpenAI != nil {
+ apiBase := r.Configuration.Embedding.AzureOpenAI.APIBase.ValueString()
+ deployment := r.Configuration.Embedding.AzureOpenAI.Deployment.ValueString()
+ openaiKey1 := r.Configuration.Embedding.AzureOpenAI.OpenaiKey.ValueString()
+ destinationPineconeUpdateAzureOpenAI = &shared.DestinationPineconeUpdateAzureOpenAI{
+ APIBase: apiBase,
+ Deployment: deployment,
+ OpenaiKey: openaiKey1,
+ }
+ }
+ if destinationPineconeUpdateAzureOpenAI != nil {
+ embedding = shared.DestinationPineconeUpdateEmbedding{
+ DestinationPineconeUpdateAzureOpenAI: destinationPineconeUpdateAzureOpenAI,
+ }
+ }
+ var destinationPineconeUpdateOpenAICompatible *shared.DestinationPineconeUpdateOpenAICompatible
+ if r.Configuration.Embedding.OpenAICompatible != nil {
+ apiKey := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.APIKey.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.APIKey.IsNull() {
+ *apiKey = r.Configuration.Embedding.OpenAICompatible.APIKey.ValueString()
+ } else {
+ apiKey = nil
+ }
+ baseURL := r.Configuration.Embedding.OpenAICompatible.BaseURL.ValueString()
+ dimensions := r.Configuration.Embedding.OpenAICompatible.Dimensions.ValueInt64()
+ modelName := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.ModelName.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.ModelName.IsNull() {
+ *modelName = r.Configuration.Embedding.OpenAICompatible.ModelName.ValueString()
} else {
- mode2 = nil
+ modelName = nil
}
- destinationPineconeUpdateEmbeddingFake = &shared.DestinationPineconeUpdateEmbeddingFake{
- Mode: mode2,
+ destinationPineconeUpdateOpenAICompatible = &shared.DestinationPineconeUpdateOpenAICompatible{
+ APIKey: apiKey,
+ BaseURL: baseURL,
+ Dimensions: dimensions,
+ ModelName: modelName,
}
}
- if destinationPineconeUpdateEmbeddingFake != nil {
+ if destinationPineconeUpdateOpenAICompatible != nil {
embedding = shared.DestinationPineconeUpdateEmbedding{
- DestinationPineconeUpdateEmbeddingFake: destinationPineconeUpdateEmbeddingFake,
+ DestinationPineconeUpdateOpenAICompatible: destinationPineconeUpdateOpenAICompatible,
}
}
index := r.Configuration.Indexing.Index.ValueString()
@@ -187,6 +301,15 @@ func (r *DestinationPineconeResourceModel) ToUpdateSDKType() *shared.Destination
chunkOverlap = nil
}
chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64()
+ var fieldNameMappings []shared.DestinationPineconeUpdateFieldNameMappingConfigModel = nil
+ for _, fieldNameMappingsItem := range r.Configuration.Processing.FieldNameMappings {
+ fromField := fieldNameMappingsItem.FromField.ValueString()
+ toField := fieldNameMappingsItem.ToField.ValueString()
+ fieldNameMappings = append(fieldNameMappings, shared.DestinationPineconeUpdateFieldNameMappingConfigModel{
+ FromField: fromField,
+ ToField: toField,
+ })
+ }
var metadataFields []string = nil
for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields {
metadataFields = append(metadataFields, metadataFieldsItem.ValueString())
@@ -195,11 +318,67 @@ func (r *DestinationPineconeResourceModel) ToUpdateSDKType() *shared.Destination
for _, textFieldsItem := range r.Configuration.Processing.TextFields {
textFields = append(textFields, textFieldsItem.ValueString())
}
+ var textSplitter *shared.DestinationPineconeUpdateTextSplitter
+ if r.Configuration.Processing.TextSplitter != nil {
+ var destinationPineconeUpdateBySeparator *shared.DestinationPineconeUpdateBySeparator
+ if r.Configuration.Processing.TextSplitter.BySeparator != nil {
+ keepSeparator := new(bool)
+ if !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsUnknown() && !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsNull() {
+ *keepSeparator = r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.ValueBool()
+ } else {
+ keepSeparator = nil
+ }
+ var separators []string = nil
+ for _, separatorsItem := range r.Configuration.Processing.TextSplitter.BySeparator.Separators {
+ separators = append(separators, separatorsItem.ValueString())
+ }
+ destinationPineconeUpdateBySeparator = &shared.DestinationPineconeUpdateBySeparator{
+ KeepSeparator: keepSeparator,
+ Separators: separators,
+ }
+ }
+ if destinationPineconeUpdateBySeparator != nil {
+ textSplitter = &shared.DestinationPineconeUpdateTextSplitter{
+ DestinationPineconeUpdateBySeparator: destinationPineconeUpdateBySeparator,
+ }
+ }
+ var destinationPineconeUpdateByMarkdownHeader *shared.DestinationPineconeUpdateByMarkdownHeader
+ if r.Configuration.Processing.TextSplitter.ByMarkdownHeader != nil {
+ splitLevel := new(int64)
+ if !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsUnknown() && !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsNull() {
+ *splitLevel = r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.ValueInt64()
+ } else {
+ splitLevel = nil
+ }
+ destinationPineconeUpdateByMarkdownHeader = &shared.DestinationPineconeUpdateByMarkdownHeader{
+ SplitLevel: splitLevel,
+ }
+ }
+ if destinationPineconeUpdateByMarkdownHeader != nil {
+ textSplitter = &shared.DestinationPineconeUpdateTextSplitter{
+ DestinationPineconeUpdateByMarkdownHeader: destinationPineconeUpdateByMarkdownHeader,
+ }
+ }
+ var destinationPineconeUpdateByProgrammingLanguage *shared.DestinationPineconeUpdateByProgrammingLanguage
+ if r.Configuration.Processing.TextSplitter.ByProgrammingLanguage != nil {
+ language := shared.DestinationPineconeUpdateLanguage(r.Configuration.Processing.TextSplitter.ByProgrammingLanguage.Language.ValueString())
+ destinationPineconeUpdateByProgrammingLanguage = &shared.DestinationPineconeUpdateByProgrammingLanguage{
+ Language: language,
+ }
+ }
+ if destinationPineconeUpdateByProgrammingLanguage != nil {
+ textSplitter = &shared.DestinationPineconeUpdateTextSplitter{
+ DestinationPineconeUpdateByProgrammingLanguage: destinationPineconeUpdateByProgrammingLanguage,
+ }
+ }
+ }
processing := shared.DestinationPineconeUpdateProcessingConfigModel{
- ChunkOverlap: chunkOverlap,
- ChunkSize: chunkSize,
- MetadataFields: metadataFields,
- TextFields: textFields,
+ ChunkOverlap: chunkOverlap,
+ ChunkSize: chunkSize,
+ FieldNameMappings: fieldNameMappings,
+ MetadataFields: metadataFields,
+ TextFields: textFields,
+ TextSplitter: textSplitter,
}
configuration := shared.DestinationPineconeUpdate{
Embedding: embedding,
diff --git a/internal/provider/destination_postgres_data_source.go b/internal/provider/destination_postgres_data_source.go
old mode 100755
new mode 100644
index 11118d55a..710dfe716
--- a/internal/provider/destination_postgres_data_source.go
+++ b/internal/provider/destination_postgres_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationPostgresDataSource struct {
// DestinationPostgresDataSourceModel describes the data model.
type DestinationPostgresDataSourceModel struct {
- Configuration DestinationPostgres `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,464 +47,17 @@ func (r *DestinationPostgresDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "DestinationPostgres DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "postgres",
- ),
- },
- Description: `must be one of ["postgres"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of the database.`,
- },
- "schema": schema.StringAttribute{
- Computed: true,
- Description: `The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".`,
- },
- "ssl_mode": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_postgres_ssl_modes_allow": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- },
- Description: `Allow SSL mode.`,
- },
- "destination_postgres_ssl_modes_disable": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- },
- Description: `Disable SSL.`,
- },
- "destination_postgres_ssl_modes_prefer": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- },
- Description: `Prefer SSL mode.`,
- },
- "destination_postgres_ssl_modes_require": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
- },
- Description: `Require SSL mode.`,
- },
- "destination_postgres_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
- },
- Description: `Verify-ca SSL mode.`,
- },
- "destination_postgres_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- },
- Description: `Verify-full SSL mode.`,
- },
- "destination_postgres_update_ssl_modes_allow": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- },
- Description: `Allow SSL mode.`,
- },
- "destination_postgres_update_ssl_modes_disable": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- },
- Description: `Disable SSL.`,
- },
- "destination_postgres_update_ssl_modes_prefer": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- },
- Description: `Prefer SSL mode.`,
- },
- "destination_postgres_update_ssl_modes_require": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
- },
- Description: `Require SSL mode.`,
- },
- "destination_postgres_update_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
- },
- Description: `Verify-ca SSL mode.`,
- },
- "destination_postgres_update_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- },
- Description: `Verify-full SSL mode.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- MarkdownDescription: `SSL connection modes. ` + "\n" +
- ` disable - Chose this mode to disable encryption of communication between Airbyte and destination database` + "\n" +
- ` allow - Chose this mode to enable encryption only when required by the source database` + "\n" +
- ` prefer - Chose this mode to allow unencrypted connection only if the source database does not support encryption` + "\n" +
- ` require - Chose this mode to always require encryption. If the source database server does not support encryption, connection will fail` + "\n" +
- ` verify-ca - Chose this mode to always require encryption and to verify that the source database server has a valid SSL certificate` + "\n" +
- ` verify-full - This is the most secure mode. Chose this mode to always require encryption and to verify the identity of the source database server` + "\n" +
- ` See more information - in the docs.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_postgres_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_postgres_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_postgres_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_postgres_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_postgres_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_postgres_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_postgres_data_source_sdk.go b/internal/provider/destination_postgres_data_source_sdk.go
old mode 100755
new mode 100644
index 337d946a3..701c5000b
--- a/internal/provider/destination_postgres_data_source_sdk.go
+++ b/internal/provider/destination_postgres_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationPostgresDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_postgres_resource.go b/internal/provider/destination_postgres_resource.go
old mode 100755
new mode 100644
index 75311201a..0e8be4104
--- a/internal/provider/destination_postgres_resource.go
+++ b/internal/provider/destination_postgres_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationPostgresResource struct {
// DestinationPostgresResourceModel describes the resource data model.
type DestinationPostgresResourceModel struct {
Configuration DestinationPostgres `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -58,15 +59,6 @@ func (r *DestinationPostgresResource) Schema(ctx context.Context, req resource.S
Required: true,
Description: `Name of the database.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "postgres",
- ),
- },
- Description: `must be one of ["postgres"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `Hostname of the database.`,
@@ -77,194 +69,43 @@ func (r *DestinationPostgresResource) Schema(ctx context.Context, req resource.S
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `Port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 5432` + "\n" +
+ `Port of the database.`,
},
"schema": schema.StringAttribute{
- Required: true,
- Description: `The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".`,
+ Optional: true,
+ MarkdownDescription: `Default: "public"` + "\n" +
+ `The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".`,
},
"ssl_mode": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_postgres_ssl_modes_allow": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- },
- Description: `Allow SSL mode.`,
- },
- "destination_postgres_ssl_modes_disable": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- },
- Description: `Disable SSL.`,
- },
- "destination_postgres_ssl_modes_prefer": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- },
- Description: `Prefer SSL mode.`,
- },
- "destination_postgres_ssl_modes_require": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
- },
- Description: `Require SSL mode.`,
- },
- "destination_postgres_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Required: true,
- Description: `CA certificate`,
- },
- "client_key_password": schema.StringAttribute{
- Optional: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
- },
- Description: `Verify-ca SSL mode.`,
- },
- "destination_postgres_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Required: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Required: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Required: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Optional: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- },
- Description: `Verify-full SSL mode.`,
- },
- "destination_postgres_update_ssl_modes_allow": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- },
+ "allow": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Allow SSL mode.`,
},
- "destination_postgres_update_ssl_modes_disable": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- },
+ "disable": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Disable SSL.`,
},
- "destination_postgres_update_ssl_modes_prefer": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- },
+ "prefer": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Prefer SSL mode.`,
},
- "destination_postgres_update_ssl_modes_require": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
- },
+ "require": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Require SSL mode.`,
},
- "destination_postgres_update_ssl_modes_verify_ca": schema.SingleNestedAttribute{
+ "verify_ca": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ca_certificate": schema.StringAttribute{
@@ -273,21 +114,13 @@ func (r *DestinationPostgresResource) Schema(ctx context.Context, req resource.S
},
"client_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
},
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
},
Description: `Verify-ca SSL mode.`,
},
- "destination_postgres_update_ssl_modes_verify_full": schema.SingleNestedAttribute{
+ "verify_full": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ca_certificate": schema.StringAttribute{
@@ -300,28 +133,18 @@ func (r *DestinationPostgresResource) Schema(ctx context.Context, req resource.S
},
"client_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Client key`,
},
"client_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
},
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
},
Description: `Verify-full SSL mode.`,
},
},
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
MarkdownDescription: `SSL connection modes. ` + "\n" +
` disable - Chose this mode to disable encryption of communication between Airbyte and destination database` + "\n" +
` allow - Chose this mode to enable encryption only when required by the source database` + "\n" +
@@ -330,126 +153,29 @@ func (r *DestinationPostgresResource) Schema(ctx context.Context, req resource.S
` verify-ca - Chose this mode to always require encryption and to verify that the source database server has a valid SSL certificate` + "\n" +
` verify-full - This is the most secure mode. Chose this mode to always require encryption and to verify the identity of the source database server` + "\n" +
` See more information - in the docs.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_postgres_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_postgres_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_postgres_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_postgres_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_postgres_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -457,35 +183,28 @@ func (r *DestinationPostgresResource) Schema(ctx context.Context, req resource.S
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_postgres_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -495,10 +214,10 @@ func (r *DestinationPostgresResource) Schema(ctx context.Context, req resource.S
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -506,6 +225,13 @@ func (r *DestinationPostgresResource) Schema(ctx context.Context, req resource.S
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -522,7 +248,8 @@ func (r *DestinationPostgresResource) Schema(ctx context.Context, req resource.S
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -572,7 +299,7 @@ func (r *DestinationPostgresResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationPostgres(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -748,5 +475,5 @@ func (r *DestinationPostgresResource) Delete(ctx context.Context, req resource.D
}
func (r *DestinationPostgresResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_postgres_resource_sdk.go b/internal/provider/destination_postgres_resource_sdk.go
old mode 100755
new mode 100644
index fa4821cf7..ebe22cbd7
--- a/internal/provider/destination_postgres_resource_sdk.go
+++ b/internal/provider/destination_postgres_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationPostgresResourceModel) ToCreateSDKType() *shared.DestinationPostgresCreateRequest {
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationPostgresPostgres(r.Configuration.DestinationType.ValueString())
host := r.Configuration.Host.ValueString()
jdbcURLParams := new(string)
if !r.Configuration.JdbcURLParams.IsUnknown() && !r.Configuration.JdbcURLParams.IsNull() {
@@ -23,177 +22,180 @@ func (r *DestinationPostgresResourceModel) ToCreateSDKType() *shared.Destination
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
- schema := r.Configuration.Schema.ValueString()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ schema := new(string)
+ if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
+ *schema = r.Configuration.Schema.ValueString()
+ } else {
+ schema = nil
+ }
var sslMode *shared.DestinationPostgresSSLModes
if r.Configuration.SslMode != nil {
- var destinationPostgresSSLModesDisable *shared.DestinationPostgresSSLModesDisable
- if r.Configuration.SslMode.DestinationPostgresSSLModesDisable != nil {
- mode := shared.DestinationPostgresSSLModesDisableMode(r.Configuration.SslMode.DestinationPostgresSSLModesDisable.Mode.ValueString())
- destinationPostgresSSLModesDisable = &shared.DestinationPostgresSSLModesDisable{
- Mode: mode,
- }
+ var destinationPostgresDisable *shared.DestinationPostgresDisable
+ if r.Configuration.SslMode.Disable != nil {
+ destinationPostgresDisable = &shared.DestinationPostgresDisable{}
}
- if destinationPostgresSSLModesDisable != nil {
+ if destinationPostgresDisable != nil {
sslMode = &shared.DestinationPostgresSSLModes{
- DestinationPostgresSSLModesDisable: destinationPostgresSSLModesDisable,
+ DestinationPostgresDisable: destinationPostgresDisable,
}
}
- var destinationPostgresSSLModesAllow *shared.DestinationPostgresSSLModesAllow
- if r.Configuration.SslMode.DestinationPostgresSSLModesAllow != nil {
- mode1 := shared.DestinationPostgresSSLModesAllowMode(r.Configuration.SslMode.DestinationPostgresSSLModesAllow.Mode.ValueString())
- destinationPostgresSSLModesAllow = &shared.DestinationPostgresSSLModesAllow{
- Mode: mode1,
- }
+ var destinationPostgresAllow *shared.DestinationPostgresAllow
+ if r.Configuration.SslMode.Allow != nil {
+ destinationPostgresAllow = &shared.DestinationPostgresAllow{}
}
- if destinationPostgresSSLModesAllow != nil {
+ if destinationPostgresAllow != nil {
sslMode = &shared.DestinationPostgresSSLModes{
- DestinationPostgresSSLModesAllow: destinationPostgresSSLModesAllow,
+ DestinationPostgresAllow: destinationPostgresAllow,
}
}
- var destinationPostgresSSLModesPrefer *shared.DestinationPostgresSSLModesPrefer
- if r.Configuration.SslMode.DestinationPostgresSSLModesPrefer != nil {
- mode2 := shared.DestinationPostgresSSLModesPreferMode(r.Configuration.SslMode.DestinationPostgresSSLModesPrefer.Mode.ValueString())
- destinationPostgresSSLModesPrefer = &shared.DestinationPostgresSSLModesPrefer{
- Mode: mode2,
- }
+ var destinationPostgresPrefer *shared.DestinationPostgresPrefer
+ if r.Configuration.SslMode.Prefer != nil {
+ destinationPostgresPrefer = &shared.DestinationPostgresPrefer{}
}
- if destinationPostgresSSLModesPrefer != nil {
+ if destinationPostgresPrefer != nil {
sslMode = &shared.DestinationPostgresSSLModes{
- DestinationPostgresSSLModesPrefer: destinationPostgresSSLModesPrefer,
+ DestinationPostgresPrefer: destinationPostgresPrefer,
}
}
- var destinationPostgresSSLModesRequire *shared.DestinationPostgresSSLModesRequire
- if r.Configuration.SslMode.DestinationPostgresSSLModesRequire != nil {
- mode3 := shared.DestinationPostgresSSLModesRequireMode(r.Configuration.SslMode.DestinationPostgresSSLModesRequire.Mode.ValueString())
- destinationPostgresSSLModesRequire = &shared.DestinationPostgresSSLModesRequire{
- Mode: mode3,
- }
+ var destinationPostgresRequire *shared.DestinationPostgresRequire
+ if r.Configuration.SslMode.Require != nil {
+ destinationPostgresRequire = &shared.DestinationPostgresRequire{}
}
- if destinationPostgresSSLModesRequire != nil {
+ if destinationPostgresRequire != nil {
sslMode = &shared.DestinationPostgresSSLModes{
- DestinationPostgresSSLModesRequire: destinationPostgresSSLModesRequire,
+ DestinationPostgresRequire: destinationPostgresRequire,
}
}
- var destinationPostgresSSLModesVerifyCa *shared.DestinationPostgresSSLModesVerifyCa
- if r.Configuration.SslMode.DestinationPostgresSSLModesVerifyCa != nil {
- caCertificate := r.Configuration.SslMode.DestinationPostgresSSLModesVerifyCa.CaCertificate.ValueString()
+ var destinationPostgresVerifyCa *shared.DestinationPostgresVerifyCa
+ if r.Configuration.SslMode.VerifyCa != nil {
+ caCertificate := r.Configuration.SslMode.VerifyCa.CaCertificate.ValueString()
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.DestinationPostgresSSLModesVerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.DestinationPostgresSSLModesVerifyCa.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.DestinationPostgresSSLModesVerifyCa.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyCa.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode4 := shared.DestinationPostgresSSLModesVerifyCaMode(r.Configuration.SslMode.DestinationPostgresSSLModesVerifyCa.Mode.ValueString())
- destinationPostgresSSLModesVerifyCa = &shared.DestinationPostgresSSLModesVerifyCa{
+ destinationPostgresVerifyCa = &shared.DestinationPostgresVerifyCa{
CaCertificate: caCertificate,
ClientKeyPassword: clientKeyPassword,
- Mode: mode4,
}
}
- if destinationPostgresSSLModesVerifyCa != nil {
+ if destinationPostgresVerifyCa != nil {
sslMode = &shared.DestinationPostgresSSLModes{
- DestinationPostgresSSLModesVerifyCa: destinationPostgresSSLModesVerifyCa,
+ DestinationPostgresVerifyCa: destinationPostgresVerifyCa,
}
}
- var destinationPostgresSSLModesVerifyFull *shared.DestinationPostgresSSLModesVerifyFull
- if r.Configuration.SslMode.DestinationPostgresSSLModesVerifyFull != nil {
- caCertificate1 := r.Configuration.SslMode.DestinationPostgresSSLModesVerifyFull.CaCertificate.ValueString()
- clientCertificate := r.Configuration.SslMode.DestinationPostgresSSLModesVerifyFull.ClientCertificate.ValueString()
- clientKey := r.Configuration.SslMode.DestinationPostgresSSLModesVerifyFull.ClientKey.ValueString()
+ var destinationPostgresVerifyFull *shared.DestinationPostgresVerifyFull
+ if r.Configuration.SslMode.VerifyFull != nil {
+ caCertificate1 := r.Configuration.SslMode.VerifyFull.CaCertificate.ValueString()
+ clientCertificate := r.Configuration.SslMode.VerifyFull.ClientCertificate.ValueString()
+ clientKey := r.Configuration.SslMode.VerifyFull.ClientKey.ValueString()
clientKeyPassword1 := new(string)
- if !r.Configuration.SslMode.DestinationPostgresSSLModesVerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.DestinationPostgresSSLModesVerifyFull.ClientKeyPassword.IsNull() {
- *clientKeyPassword1 = r.Configuration.SslMode.DestinationPostgresSSLModesVerifyFull.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsNull() {
+ *clientKeyPassword1 = r.Configuration.SslMode.VerifyFull.ClientKeyPassword.ValueString()
} else {
clientKeyPassword1 = nil
}
- mode5 := shared.DestinationPostgresSSLModesVerifyFullMode(r.Configuration.SslMode.DestinationPostgresSSLModesVerifyFull.Mode.ValueString())
- destinationPostgresSSLModesVerifyFull = &shared.DestinationPostgresSSLModesVerifyFull{
+ destinationPostgresVerifyFull = &shared.DestinationPostgresVerifyFull{
CaCertificate: caCertificate1,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword1,
- Mode: mode5,
}
}
- if destinationPostgresSSLModesVerifyFull != nil {
+ if destinationPostgresVerifyFull != nil {
sslMode = &shared.DestinationPostgresSSLModes{
- DestinationPostgresSSLModesVerifyFull: destinationPostgresSSLModesVerifyFull,
+ DestinationPostgresVerifyFull: destinationPostgresVerifyFull,
}
}
}
var tunnelMethod *shared.DestinationPostgresSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationPostgresSSHTunnelMethodNoTunnel *shared.DestinationPostgresSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationPostgresSSHTunnelMethodNoTunnel = &shared.DestinationPostgresSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationPostgresNoTunnel *shared.DestinationPostgresNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationPostgresNoTunnel = &shared.DestinationPostgresNoTunnel{}
}
- if destinationPostgresSSHTunnelMethodNoTunnel != nil {
+ if destinationPostgresNoTunnel != nil {
tunnelMethod = &shared.DestinationPostgresSSHTunnelMethod{
- DestinationPostgresSSHTunnelMethodNoTunnel: destinationPostgresSSHTunnelMethodNoTunnel,
+ DestinationPostgresNoTunnel: destinationPostgresNoTunnel,
}
}
- var destinationPostgresSSHTunnelMethodSSHKeyAuthentication *shared.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationPostgresSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationPostgresSSHKeyAuthentication *shared.DestinationPostgresSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationPostgresSSHKeyAuthentication = &shared.DestinationPostgresSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationPostgresSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationPostgresSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationPostgresSSHTunnelMethod{
- DestinationPostgresSSHTunnelMethodSSHKeyAuthentication: destinationPostgresSSHTunnelMethodSSHKeyAuthentication,
+ DestinationPostgresSSHKeyAuthentication: destinationPostgresSSHKeyAuthentication,
}
}
- var destinationPostgresSSHTunnelMethodPasswordAuthentication *shared.DestinationPostgresSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationPostgresSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationPostgresSSHTunnelMethodPasswordAuthentication = &shared.DestinationPostgresSSHTunnelMethodPasswordAuthentication{
+ var destinationPostgresPasswordAuthentication *shared.DestinationPostgresPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationPostgresPasswordAuthentication = &shared.DestinationPostgresPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationPostgresSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationPostgresPasswordAuthentication != nil {
tunnelMethod = &shared.DestinationPostgresSSHTunnelMethod{
- DestinationPostgresSSHTunnelMethodPasswordAuthentication: destinationPostgresSSHTunnelMethodPasswordAuthentication,
+ DestinationPostgresPasswordAuthentication: destinationPostgresPasswordAuthentication,
}
}
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationPostgres{
- Database: database,
- DestinationType: destinationType,
- Host: host,
- JdbcURLParams: jdbcURLParams,
- Password: password,
- Port: port,
- Schema: schema,
- SslMode: sslMode,
- TunnelMethod: tunnelMethod,
- Username: username,
+ Database: database,
+ Host: host,
+ JdbcURLParams: jdbcURLParams,
+ Password: password,
+ Port: port,
+ Schema: schema,
+ SslMode: sslMode,
+ TunnelMethod: tunnelMethod,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationPostgresCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -220,157 +222,154 @@ func (r *DestinationPostgresResourceModel) ToUpdateSDKType() *shared.Destination
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
- schema := r.Configuration.Schema.ValueString()
- var sslMode *shared.DestinationPostgresUpdateSSLModes
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ schema := new(string)
+ if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
+ *schema = r.Configuration.Schema.ValueString()
+ } else {
+ schema = nil
+ }
+ var sslMode *shared.SSLModes
if r.Configuration.SslMode != nil {
- var destinationPostgresUpdateSSLModesDisable *shared.DestinationPostgresUpdateSSLModesDisable
- if r.Configuration.SslMode.DestinationPostgresUpdateSSLModesDisable != nil {
- mode := shared.DestinationPostgresUpdateSSLModesDisableMode(r.Configuration.SslMode.DestinationPostgresUpdateSSLModesDisable.Mode.ValueString())
- destinationPostgresUpdateSSLModesDisable = &shared.DestinationPostgresUpdateSSLModesDisable{
- Mode: mode,
- }
+ var disable *shared.Disable
+ if r.Configuration.SslMode.Disable != nil {
+ disable = &shared.Disable{}
}
- if destinationPostgresUpdateSSLModesDisable != nil {
- sslMode = &shared.DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesDisable: destinationPostgresUpdateSSLModesDisable,
+ if disable != nil {
+ sslMode = &shared.SSLModes{
+ Disable: disable,
}
}
- var destinationPostgresUpdateSSLModesAllow *shared.DestinationPostgresUpdateSSLModesAllow
- if r.Configuration.SslMode.DestinationPostgresUpdateSSLModesAllow != nil {
- mode1 := shared.DestinationPostgresUpdateSSLModesAllowMode(r.Configuration.SslMode.DestinationPostgresUpdateSSLModesAllow.Mode.ValueString())
- destinationPostgresUpdateSSLModesAllow = &shared.DestinationPostgresUpdateSSLModesAllow{
- Mode: mode1,
- }
+ var allow *shared.Allow
+ if r.Configuration.SslMode.Allow != nil {
+ allow = &shared.Allow{}
}
- if destinationPostgresUpdateSSLModesAllow != nil {
- sslMode = &shared.DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesAllow: destinationPostgresUpdateSSLModesAllow,
+ if allow != nil {
+ sslMode = &shared.SSLModes{
+ Allow: allow,
}
}
- var destinationPostgresUpdateSSLModesPrefer *shared.DestinationPostgresUpdateSSLModesPrefer
- if r.Configuration.SslMode.DestinationPostgresUpdateSSLModesPrefer != nil {
- mode2 := shared.DestinationPostgresUpdateSSLModesPreferMode(r.Configuration.SslMode.DestinationPostgresUpdateSSLModesPrefer.Mode.ValueString())
- destinationPostgresUpdateSSLModesPrefer = &shared.DestinationPostgresUpdateSSLModesPrefer{
- Mode: mode2,
- }
+ var prefer *shared.Prefer
+ if r.Configuration.SslMode.Prefer != nil {
+ prefer = &shared.Prefer{}
}
- if destinationPostgresUpdateSSLModesPrefer != nil {
- sslMode = &shared.DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesPrefer: destinationPostgresUpdateSSLModesPrefer,
+ if prefer != nil {
+ sslMode = &shared.SSLModes{
+ Prefer: prefer,
}
}
- var destinationPostgresUpdateSSLModesRequire *shared.DestinationPostgresUpdateSSLModesRequire
- if r.Configuration.SslMode.DestinationPostgresUpdateSSLModesRequire != nil {
- mode3 := shared.DestinationPostgresUpdateSSLModesRequireMode(r.Configuration.SslMode.DestinationPostgresUpdateSSLModesRequire.Mode.ValueString())
- destinationPostgresUpdateSSLModesRequire = &shared.DestinationPostgresUpdateSSLModesRequire{
- Mode: mode3,
- }
+ var require *shared.Require
+ if r.Configuration.SslMode.Require != nil {
+ require = &shared.Require{}
}
- if destinationPostgresUpdateSSLModesRequire != nil {
- sslMode = &shared.DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesRequire: destinationPostgresUpdateSSLModesRequire,
+ if require != nil {
+ sslMode = &shared.SSLModes{
+ Require: require,
}
}
- var destinationPostgresUpdateSSLModesVerifyCa *shared.DestinationPostgresUpdateSSLModesVerifyCa
- if r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyCa != nil {
- caCertificate := r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyCa.CaCertificate.ValueString()
+ var verifyCa *shared.VerifyCa
+ if r.Configuration.SslMode.VerifyCa != nil {
+ caCertificate := r.Configuration.SslMode.VerifyCa.CaCertificate.ValueString()
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyCa.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyCa.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyCa.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode4 := shared.DestinationPostgresUpdateSSLModesVerifyCaMode(r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyCa.Mode.ValueString())
- destinationPostgresUpdateSSLModesVerifyCa = &shared.DestinationPostgresUpdateSSLModesVerifyCa{
+ verifyCa = &shared.VerifyCa{
CaCertificate: caCertificate,
ClientKeyPassword: clientKeyPassword,
- Mode: mode4,
}
}
- if destinationPostgresUpdateSSLModesVerifyCa != nil {
- sslMode = &shared.DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesVerifyCa: destinationPostgresUpdateSSLModesVerifyCa,
+ if verifyCa != nil {
+ sslMode = &shared.SSLModes{
+ VerifyCa: verifyCa,
}
}
- var destinationPostgresUpdateSSLModesVerifyFull *shared.DestinationPostgresUpdateSSLModesVerifyFull
- if r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyFull != nil {
- caCertificate1 := r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyFull.CaCertificate.ValueString()
- clientCertificate := r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyFull.ClientCertificate.ValueString()
- clientKey := r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyFull.ClientKey.ValueString()
+ var verifyFull *shared.VerifyFull
+ if r.Configuration.SslMode.VerifyFull != nil {
+ caCertificate1 := r.Configuration.SslMode.VerifyFull.CaCertificate.ValueString()
+ clientCertificate := r.Configuration.SslMode.VerifyFull.ClientCertificate.ValueString()
+ clientKey := r.Configuration.SslMode.VerifyFull.ClientKey.ValueString()
clientKeyPassword1 := new(string)
- if !r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyFull.ClientKeyPassword.IsNull() {
- *clientKeyPassword1 = r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyFull.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsNull() {
+ *clientKeyPassword1 = r.Configuration.SslMode.VerifyFull.ClientKeyPassword.ValueString()
} else {
clientKeyPassword1 = nil
}
- mode5 := shared.DestinationPostgresUpdateSSLModesVerifyFullMode(r.Configuration.SslMode.DestinationPostgresUpdateSSLModesVerifyFull.Mode.ValueString())
- destinationPostgresUpdateSSLModesVerifyFull = &shared.DestinationPostgresUpdateSSLModesVerifyFull{
+ verifyFull = &shared.VerifyFull{
CaCertificate: caCertificate1,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword1,
- Mode: mode5,
}
}
- if destinationPostgresUpdateSSLModesVerifyFull != nil {
- sslMode = &shared.DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesVerifyFull: destinationPostgresUpdateSSLModesVerifyFull,
+ if verifyFull != nil {
+ sslMode = &shared.SSLModes{
+ VerifyFull: verifyFull,
}
}
}
var tunnelMethod *shared.DestinationPostgresUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationPostgresUpdateSSHTunnelMethodNoTunnel *shared.DestinationPostgresUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationPostgresUpdateSSHTunnelMethodNoTunnel = &shared.DestinationPostgresUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationPostgresUpdateNoTunnel *shared.DestinationPostgresUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationPostgresUpdateNoTunnel = &shared.DestinationPostgresUpdateNoTunnel{}
}
- if destinationPostgresUpdateSSHTunnelMethodNoTunnel != nil {
+ if destinationPostgresUpdateNoTunnel != nil {
tunnelMethod = &shared.DestinationPostgresUpdateSSHTunnelMethod{
- DestinationPostgresUpdateSSHTunnelMethodNoTunnel: destinationPostgresUpdateSSHTunnelMethodNoTunnel,
+ DestinationPostgresUpdateNoTunnel: destinationPostgresUpdateNoTunnel,
}
}
- var destinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication *shared.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationPostgresUpdateSSHKeyAuthentication *shared.DestinationPostgresUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationPostgresUpdateSSHKeyAuthentication = &shared.DestinationPostgresUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationPostgresUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationPostgresUpdateSSHTunnelMethod{
- DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication: destinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationPostgresUpdateSSHKeyAuthentication: destinationPostgresUpdateSSHKeyAuthentication,
}
}
- var destinationPostgresUpdateSSHTunnelMethodPasswordAuthentication *shared.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationPostgresUpdateSSHTunnelMethodPasswordAuthentication = &shared.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication{
+ var destinationPostgresUpdatePasswordAuthentication *shared.DestinationPostgresUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationPostgresUpdatePasswordAuthentication = &shared.DestinationPostgresUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationPostgresUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationPostgresUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationPostgresUpdateSSHTunnelMethod{
- DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication: destinationPostgresUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationPostgresUpdatePasswordAuthentication: destinationPostgresUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/destination_pubsub_data_source.go b/internal/provider/destination_pubsub_data_source.go
old mode 100755
new mode 100644
index 239100c03..afb4a2266
--- a/internal/provider/destination_pubsub_data_source.go
+++ b/internal/provider/destination_pubsub_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationPubsubDataSource struct {
// DestinationPubsubDataSourceModel describes the data model.
type DestinationPubsubDataSourceModel struct {
- Configuration DestinationPubsub `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,55 +47,17 @@ func (r *DestinationPubsubDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "DestinationPubsub DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "batching_delay_threshold": schema.Int64Attribute{
- Computed: true,
- Description: `Number of ms before the buffer is flushed`,
- },
- "batching_element_count_threshold": schema.Int64Attribute{
- Computed: true,
- Description: `Number of messages before the buffer is flushed`,
- },
- "batching_enabled": schema.BoolAttribute{
- Computed: true,
- Description: `If TRUE messages will be buffered instead of sending them one by one`,
- },
- "batching_request_bytes_threshold": schema.Int64Attribute{
- Computed: true,
- Description: `Number of bytes before the buffer is flushed`,
- },
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The contents of the JSON service account key. Check out the docs if you need help generating this key.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pubsub",
- ),
- },
- Description: `must be one of ["pubsub"]`,
- },
- "ordering_enabled": schema.BoolAttribute{
- Computed: true,
- Description: `If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key of stream`,
- },
- "project_id": schema.StringAttribute{
- Computed: true,
- Description: `The GCP project ID for the project containing the target PubSub.`,
- },
- "topic_id": schema.StringAttribute{
- Computed: true,
- Description: `The PubSub topic ID in the given GCP project ID.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_pubsub_data_source_sdk.go b/internal/provider/destination_pubsub_data_source_sdk.go
old mode 100755
new mode 100644
index 05f7a1e74..12cad2420
--- a/internal/provider/destination_pubsub_data_source_sdk.go
+++ b/internal/provider/destination_pubsub_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationPubsubDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_pubsub_resource.go b/internal/provider/destination_pubsub_resource.go
old mode 100755
new mode 100644
index f23b8742b..dc067b6a0
--- a/internal/provider/destination_pubsub_resource.go
+++ b/internal/provider/destination_pubsub_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationPubsubResource struct {
// DestinationPubsubResourceModel describes the resource data model.
type DestinationPubsubResourceModel struct {
Configuration DestinationPubsub `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -54,37 +54,33 @@ func (r *DestinationPubsubResource) Schema(ctx context.Context, req resource.Sch
Required: true,
Attributes: map[string]schema.Attribute{
"batching_delay_threshold": schema.Int64Attribute{
- Optional: true,
- Description: `Number of ms before the buffer is flushed`,
+ Optional: true,
+ MarkdownDescription: `Default: 1` + "\n" +
+ `Number of ms before the buffer is flushed`,
},
"batching_element_count_threshold": schema.Int64Attribute{
- Optional: true,
- Description: `Number of messages before the buffer is flushed`,
+ Optional: true,
+ MarkdownDescription: `Default: 1` + "\n" +
+ `Number of messages before the buffer is flushed`,
},
"batching_enabled": schema.BoolAttribute{
- Required: true,
- Description: `If TRUE messages will be buffered instead of sending them one by one`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `If TRUE messages will be buffered instead of sending them one by one`,
},
"batching_request_bytes_threshold": schema.Int64Attribute{
- Optional: true,
- Description: `Number of bytes before the buffer is flushed`,
+ Optional: true,
+ MarkdownDescription: `Default: 1` + "\n" +
+ `Number of bytes before the buffer is flushed`,
},
"credentials_json": schema.StringAttribute{
Required: true,
Description: `The contents of the JSON service account key. Check out the docs if you need help generating this key.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pubsub",
- ),
- },
- Description: `must be one of ["pubsub"]`,
- },
"ordering_enabled": schema.BoolAttribute{
- Required: true,
- Description: `If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key of stream`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key of stream`,
},
"project_id": schema.StringAttribute{
Required: true,
@@ -96,6 +92,13 @@ func (r *DestinationPubsubResource) Schema(ctx context.Context, req resource.Sch
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -112,7 +115,8 @@ func (r *DestinationPubsubResource) Schema(ctx context.Context, req resource.Sch
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -162,7 +166,7 @@ func (r *DestinationPubsubResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationPubsub(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -338,5 +342,5 @@ func (r *DestinationPubsubResource) Delete(ctx context.Context, req resource.Del
}
func (r *DestinationPubsubResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_pubsub_resource_sdk.go b/internal/provider/destination_pubsub_resource_sdk.go
old mode 100755
new mode 100644
index 15aeacd90..136cb8381
--- a/internal/provider/destination_pubsub_resource_sdk.go
+++ b/internal/provider/destination_pubsub_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -20,7 +20,12 @@ func (r *DestinationPubsubResourceModel) ToCreateSDKType() *shared.DestinationPu
} else {
batchingElementCountThreshold = nil
}
- batchingEnabled := r.Configuration.BatchingEnabled.ValueBool()
+ batchingEnabled := new(bool)
+ if !r.Configuration.BatchingEnabled.IsUnknown() && !r.Configuration.BatchingEnabled.IsNull() {
+ *batchingEnabled = r.Configuration.BatchingEnabled.ValueBool()
+ } else {
+ batchingEnabled = nil
+ }
batchingRequestBytesThreshold := new(int64)
if !r.Configuration.BatchingRequestBytesThreshold.IsUnknown() && !r.Configuration.BatchingRequestBytesThreshold.IsNull() {
*batchingRequestBytesThreshold = r.Configuration.BatchingRequestBytesThreshold.ValueInt64()
@@ -28,8 +33,12 @@ func (r *DestinationPubsubResourceModel) ToCreateSDKType() *shared.DestinationPu
batchingRequestBytesThreshold = nil
}
credentialsJSON := r.Configuration.CredentialsJSON.ValueString()
- destinationType := shared.DestinationPubsubPubsub(r.Configuration.DestinationType.ValueString())
- orderingEnabled := r.Configuration.OrderingEnabled.ValueBool()
+ orderingEnabled := new(bool)
+ if !r.Configuration.OrderingEnabled.IsUnknown() && !r.Configuration.OrderingEnabled.IsNull() {
+ *orderingEnabled = r.Configuration.OrderingEnabled.ValueBool()
+ } else {
+ orderingEnabled = nil
+ }
projectID := r.Configuration.ProjectID.ValueString()
topicID := r.Configuration.TopicID.ValueString()
configuration := shared.DestinationPubsub{
@@ -38,15 +47,21 @@ func (r *DestinationPubsubResourceModel) ToCreateSDKType() *shared.DestinationPu
BatchingEnabled: batchingEnabled,
BatchingRequestBytesThreshold: batchingRequestBytesThreshold,
CredentialsJSON: credentialsJSON,
- DestinationType: destinationType,
OrderingEnabled: orderingEnabled,
ProjectID: projectID,
TopicID: topicID,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationPubsubCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -71,7 +86,12 @@ func (r *DestinationPubsubResourceModel) ToUpdateSDKType() *shared.DestinationPu
} else {
batchingElementCountThreshold = nil
}
- batchingEnabled := r.Configuration.BatchingEnabled.ValueBool()
+ batchingEnabled := new(bool)
+ if !r.Configuration.BatchingEnabled.IsUnknown() && !r.Configuration.BatchingEnabled.IsNull() {
+ *batchingEnabled = r.Configuration.BatchingEnabled.ValueBool()
+ } else {
+ batchingEnabled = nil
+ }
batchingRequestBytesThreshold := new(int64)
if !r.Configuration.BatchingRequestBytesThreshold.IsUnknown() && !r.Configuration.BatchingRequestBytesThreshold.IsNull() {
*batchingRequestBytesThreshold = r.Configuration.BatchingRequestBytesThreshold.ValueInt64()
@@ -79,7 +99,12 @@ func (r *DestinationPubsubResourceModel) ToUpdateSDKType() *shared.DestinationPu
batchingRequestBytesThreshold = nil
}
credentialsJSON := r.Configuration.CredentialsJSON.ValueString()
- orderingEnabled := r.Configuration.OrderingEnabled.ValueBool()
+ orderingEnabled := new(bool)
+ if !r.Configuration.OrderingEnabled.IsUnknown() && !r.Configuration.OrderingEnabled.IsNull() {
+ *orderingEnabled = r.Configuration.OrderingEnabled.ValueBool()
+ } else {
+ orderingEnabled = nil
+ }
projectID := r.Configuration.ProjectID.ValueString()
topicID := r.Configuration.TopicID.ValueString()
configuration := shared.DestinationPubsubUpdate{
diff --git a/internal/provider/destination_qdrant_data_source.go b/internal/provider/destination_qdrant_data_source.go
new file mode 100644
index 000000000..478cfff68
--- /dev/null
+++ b/internal/provider/destination_qdrant_data_source.go
@@ -0,0 +1,137 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ datasource.DataSource = &DestinationQdrantDataSource{}
+var _ datasource.DataSourceWithConfigure = &DestinationQdrantDataSource{}
+
+func NewDestinationQdrantDataSource() datasource.DataSource {
+ return &DestinationQdrantDataSource{}
+}
+
+// DestinationQdrantDataSource is the data source implementation.
+type DestinationQdrantDataSource struct {
+ client *sdk.SDK
+}
+
+// DestinationQdrantDataSourceModel describes the data model.
+type DestinationQdrantDataSourceModel struct {
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+// Metadata returns the data source type name.
+func (r *DestinationQdrantDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_destination_qdrant"
+}
+
+// Schema defines the schema for the data source.
+func (r *DestinationQdrantDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "DestinationQdrant DataSource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.StringAttribute{
+ Computed: true,
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
+ },
+ "destination_id": schema.StringAttribute{
+ Required: true,
+ },
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ },
+ "workspace_id": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ }
+}
+
+func (r *DestinationQdrantDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected DataSource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *DestinationQdrantDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data *DestinationQdrantDataSourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ destinationID := data.DestinationID.ValueString()
+ request := operations.GetDestinationQdrantRequest{
+ DestinationID: destinationID,
+ }
+ res, err := r.client.Destinations.GetDestinationQdrant(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.DestinationResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.DestinationResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/internal/provider/destination_qdrant_data_source_sdk.go b/internal/provider/destination_qdrant_data_source_sdk.go
new file mode 100644
index 000000000..a3d9db322
--- /dev/null
+++ b/internal/provider/destination_qdrant_data_source_sdk.go
@@ -0,0 +1,18 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *DestinationQdrantDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
+ r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
+ r.Name = types.StringValue(resp.Name)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
diff --git a/internal/provider/destination_qdrant_resource.go b/internal/provider/destination_qdrant_resource.go
new file mode 100644
index 000000000..659ed2436
--- /dev/null
+++ b/internal/provider/destination_qdrant_resource.go
@@ -0,0 +1,583 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ resource.Resource = &DestinationQdrantResource{}
+var _ resource.ResourceWithImportState = &DestinationQdrantResource{}
+
+func NewDestinationQdrantResource() resource.Resource {
+ return &DestinationQdrantResource{}
+}
+
+// DestinationQdrantResource defines the resource implementation.
+type DestinationQdrantResource struct {
+ client *sdk.SDK
+}
+
+// DestinationQdrantResourceModel describes the resource data model.
+type DestinationQdrantResourceModel struct {
+ Configuration DestinationQdrant `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+func (r *DestinationQdrantResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_destination_qdrant"
+}
+
+func (r *DestinationQdrantResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "DestinationQdrant Resource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "embedding": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "azure_open_ai": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "api_base": schema.StringAttribute{
+ Required: true,
+ Description: `The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
+ },
+ "deployment": schema.StringAttribute{
+ Required: true,
+ Description: `The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
+ },
+ "openai_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
+ },
+ },
+ Description: `Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
+ },
+ "cohere": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "cohere_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ },
+ },
+ Description: `Use the Cohere API to embed text.`,
+ },
+ "fake": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
+ },
+ "from_field": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "dimensions": schema.Int64Attribute{
+ Required: true,
+ Description: `The number of dimensions the embedding model is generating`,
+ },
+ "field_name": schema.StringAttribute{
+ Required: true,
+ Description: `Name of the field in the record that contains the embedding`,
+ },
+ },
+ Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`,
+ },
+ "open_ai": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "openai_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ },
+ },
+ Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
+ },
+ "open_ai_compatible": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "api_key": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `Default: ""`,
+ },
+ "base_url": schema.StringAttribute{
+ Required: true,
+ Description: `The base URL for your OpenAI-compatible service`,
+ },
+ "dimensions": schema.Int64Attribute{
+ Required: true,
+ Description: `The number of dimensions the embedding model is generating`,
+ },
+ "model_name": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "text-embedding-ada-002"` + "\n" +
+ `The name of the model to use for embedding`,
+ },
+ },
+ Description: `Use a service that's compatible with the OpenAI API to embed text.`,
+ },
+ },
+ Description: `Embedding configuration`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "indexing": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "auth_method": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "api_key_auth": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "api_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `API Key for the Qdrant instance`,
+ },
+ },
+ Description: `Method to authenticate with the Qdrant Instance`,
+ },
+ "no_auth": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `Method to authenticate with the Qdrant Instance`,
+ },
+ },
+ Description: `Method to authenticate with the Qdrant Instance`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "collection": schema.StringAttribute{
+ Required: true,
+ Description: `The collection to load data into`,
+ },
+ "distance_metric": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "cos": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.`,
+ },
+ "dot": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.`,
+ },
+ "euc": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.`,
+ },
+ },
+ Description: `The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "prefer_grpc": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether to prefer gRPC over HTTP. Set to true for Qdrant cloud clusters`,
+ },
+ "text_field": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "text"` + "\n" +
+ `The field in the payload that contains the embedded text`,
+ },
+ "url": schema.StringAttribute{
+ Required: true,
+ Description: `Public Endpoint of the Qdrant cluser`,
+ },
+ },
+ Description: `Indexing configuration`,
+ },
+ "processing": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "chunk_overlap": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
+ },
+ "chunk_size": schema.Int64Attribute{
+ Required: true,
+ Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`,
+ },
+ "field_name_mappings": schema.ListNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "from_field": schema.StringAttribute{
+ Required: true,
+ Description: `The field name in the source`,
+ },
+ "to_field": schema.StringAttribute{
+ Required: true,
+ Description: `The field name to use in the destination`,
+ },
+ },
+ },
+ Description: `List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.`,
+ },
+ "metadata_fields": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.`,
+ },
+ "text_fields": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`,
+ },
+ "text_splitter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "by_markdown_header": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "split_level": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 1` + "\n" +
+ `Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points`,
+ },
+ },
+ Description: `Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.`,
+ },
+ "by_programming_language": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "language": schema.StringAttribute{
+ Required: true,
+ MarkdownDescription: `must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]` + "\n" +
+ `Split code in suitable places based on the programming language`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "cpp",
+ "go",
+ "java",
+ "js",
+ "php",
+ "proto",
+ "python",
+ "rst",
+ "ruby",
+ "rust",
+ "scala",
+ "swift",
+ "markdown",
+ "latex",
+ "html",
+ "sol",
+ ),
+ },
+ },
+ },
+ Description: `Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.`,
+ },
+ "by_separator": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "keep_separator": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to keep the separator in the resulting chunks`,
+ },
+ "separators": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".`,
+ },
+ },
+ Description: `Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.`,
+ },
+ },
+ Description: `Split text fields into chunks based on the specified method.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ },
+ },
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
+ "destination_id": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "name": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
+ },
+ "workspace_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ },
+ },
+ }
+}
+
+func (r *DestinationQdrantResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected Resource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *DestinationQdrantResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data *DestinationQdrantResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ request := data.ToCreateSDKType()
+ res, err := r.client.Destinations.CreateDestinationQdrant(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.DestinationResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromCreateResponse(res.DestinationResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *DestinationQdrantResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data *DestinationQdrantResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ destinationID := data.DestinationID.ValueString()
+ request := operations.GetDestinationQdrantRequest{
+ DestinationID: destinationID,
+ }
+ res, err := r.client.Destinations.GetDestinationQdrant(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.DestinationResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.DestinationResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *DestinationQdrantResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data *DestinationQdrantResourceModel
+ merge(ctx, req, resp, &data)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ destinationQdrantPutRequest := data.ToUpdateSDKType()
+ destinationID := data.DestinationID.ValueString()
+ request := operations.PutDestinationQdrantRequest{
+ DestinationQdrantPutRequest: destinationQdrantPutRequest,
+ DestinationID: destinationID,
+ }
+ res, err := r.client.Destinations.PutDestinationQdrant(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ destinationId1 := data.DestinationID.ValueString()
+ getRequest := operations.GetDestinationQdrantRequest{
+ DestinationID: destinationId1,
+ }
+ getResponse, err := r.client.Destinations.GetDestinationQdrant(ctx, getRequest)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if getResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
+ return
+ }
+ if getResponse.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
+ return
+ }
+ if getResponse.DestinationResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(getResponse.DestinationResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *DestinationQdrantResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data *DestinationQdrantResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ destinationID := data.DestinationID.ValueString()
+ request := operations.DeleteDestinationQdrantRequest{
+ DestinationID: destinationID,
+ }
+ res, err := r.client.Destinations.DeleteDestinationQdrant(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+
+}
+
+func (r *DestinationQdrantResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
+}
diff --git a/internal/provider/destination_qdrant_resource_sdk.go b/internal/provider/destination_qdrant_resource_sdk.go
new file mode 100644
index 000000000..e3f0d21b6
--- /dev/null
+++ b/internal/provider/destination_qdrant_resource_sdk.go
@@ -0,0 +1,576 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *DestinationQdrantResourceModel) ToCreateSDKType() *shared.DestinationQdrantCreateRequest {
+ var embedding shared.DestinationQdrantEmbedding
+ var destinationQdrantOpenAI *shared.DestinationQdrantOpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ destinationQdrantOpenAI = &shared.DestinationQdrantOpenAI{
+ OpenaiKey: openaiKey,
+ }
+ }
+ if destinationQdrantOpenAI != nil {
+ embedding = shared.DestinationQdrantEmbedding{
+ DestinationQdrantOpenAI: destinationQdrantOpenAI,
+ }
+ }
+ var destinationQdrantCohere *shared.DestinationQdrantCohere
+ if r.Configuration.Embedding.Cohere != nil {
+ cohereKey := r.Configuration.Embedding.Cohere.CohereKey.ValueString()
+ destinationQdrantCohere = &shared.DestinationQdrantCohere{
+ CohereKey: cohereKey,
+ }
+ }
+ if destinationQdrantCohere != nil {
+ embedding = shared.DestinationQdrantEmbedding{
+ DestinationQdrantCohere: destinationQdrantCohere,
+ }
+ }
+ var destinationQdrantFake *shared.DestinationQdrantFake
+ if r.Configuration.Embedding.Fake != nil {
+ destinationQdrantFake = &shared.DestinationQdrantFake{}
+ }
+ if destinationQdrantFake != nil {
+ embedding = shared.DestinationQdrantEmbedding{
+ DestinationQdrantFake: destinationQdrantFake,
+ }
+ }
+ var destinationQdrantFromField *shared.DestinationQdrantFromField
+ if r.Configuration.Embedding.FromField != nil {
+ dimensions := r.Configuration.Embedding.FromField.Dimensions.ValueInt64()
+ fieldName := r.Configuration.Embedding.FromField.FieldName.ValueString()
+ destinationQdrantFromField = &shared.DestinationQdrantFromField{
+ Dimensions: dimensions,
+ FieldName: fieldName,
+ }
+ }
+ if destinationQdrantFromField != nil {
+ embedding = shared.DestinationQdrantEmbedding{
+ DestinationQdrantFromField: destinationQdrantFromField,
+ }
+ }
+ var destinationQdrantAzureOpenAI *shared.DestinationQdrantAzureOpenAI
+ if r.Configuration.Embedding.AzureOpenAI != nil {
+ apiBase := r.Configuration.Embedding.AzureOpenAI.APIBase.ValueString()
+ deployment := r.Configuration.Embedding.AzureOpenAI.Deployment.ValueString()
+ openaiKey1 := r.Configuration.Embedding.AzureOpenAI.OpenaiKey.ValueString()
+ destinationQdrantAzureOpenAI = &shared.DestinationQdrantAzureOpenAI{
+ APIBase: apiBase,
+ Deployment: deployment,
+ OpenaiKey: openaiKey1,
+ }
+ }
+ if destinationQdrantAzureOpenAI != nil {
+ embedding = shared.DestinationQdrantEmbedding{
+ DestinationQdrantAzureOpenAI: destinationQdrantAzureOpenAI,
+ }
+ }
+ var destinationQdrantOpenAICompatible *shared.DestinationQdrantOpenAICompatible
+ if r.Configuration.Embedding.OpenAICompatible != nil {
+ apiKey := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.APIKey.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.APIKey.IsNull() {
+ *apiKey = r.Configuration.Embedding.OpenAICompatible.APIKey.ValueString()
+ } else {
+ apiKey = nil
+ }
+ baseURL := r.Configuration.Embedding.OpenAICompatible.BaseURL.ValueString()
+ dimensions1 := r.Configuration.Embedding.OpenAICompatible.Dimensions.ValueInt64()
+ modelName := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.ModelName.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.ModelName.IsNull() {
+ *modelName = r.Configuration.Embedding.OpenAICompatible.ModelName.ValueString()
+ } else {
+ modelName = nil
+ }
+ destinationQdrantOpenAICompatible = &shared.DestinationQdrantOpenAICompatible{
+ APIKey: apiKey,
+ BaseURL: baseURL,
+ Dimensions: dimensions1,
+ ModelName: modelName,
+ }
+ }
+ if destinationQdrantOpenAICompatible != nil {
+ embedding = shared.DestinationQdrantEmbedding{
+ DestinationQdrantOpenAICompatible: destinationQdrantOpenAICompatible,
+ }
+ }
+ var authMethod *shared.DestinationQdrantAuthenticationMethod
+ if r.Configuration.Indexing.AuthMethod != nil {
+ var destinationQdrantAPIKeyAuth *shared.DestinationQdrantAPIKeyAuth
+ if r.Configuration.Indexing.AuthMethod.APIKeyAuth != nil {
+ apiKey1 := r.Configuration.Indexing.AuthMethod.APIKeyAuth.APIKey.ValueString()
+ destinationQdrantAPIKeyAuth = &shared.DestinationQdrantAPIKeyAuth{
+ APIKey: apiKey1,
+ }
+ }
+ if destinationQdrantAPIKeyAuth != nil {
+ authMethod = &shared.DestinationQdrantAuthenticationMethod{
+ DestinationQdrantAPIKeyAuth: destinationQdrantAPIKeyAuth,
+ }
+ }
+ var destinationQdrantNoAuth *shared.DestinationQdrantNoAuth
+ if r.Configuration.Indexing.AuthMethod.NoAuth != nil {
+ destinationQdrantNoAuth = &shared.DestinationQdrantNoAuth{}
+ }
+ if destinationQdrantNoAuth != nil {
+ authMethod = &shared.DestinationQdrantAuthenticationMethod{
+ DestinationQdrantNoAuth: destinationQdrantNoAuth,
+ }
+ }
+ }
+ collection := r.Configuration.Indexing.Collection.ValueString()
+ var distanceMetric *shared.DestinationQdrantDistanceMetric
+ if r.Configuration.Indexing.DistanceMetric != nil {
+ var destinationQdrantDot *shared.DestinationQdrantDot
+ if r.Configuration.Indexing.DistanceMetric.Dot != nil {
+ destinationQdrantDot = &shared.DestinationQdrantDot{}
+ }
+ if destinationQdrantDot != nil {
+ distanceMetric = &shared.DestinationQdrantDistanceMetric{
+ DestinationQdrantDot: destinationQdrantDot,
+ }
+ }
+ var destinationQdrantCos *shared.DestinationQdrantCos
+ if r.Configuration.Indexing.DistanceMetric.Cos != nil {
+ destinationQdrantCos = &shared.DestinationQdrantCos{}
+ }
+ if destinationQdrantCos != nil {
+ distanceMetric = &shared.DestinationQdrantDistanceMetric{
+ DestinationQdrantCos: destinationQdrantCos,
+ }
+ }
+ var destinationQdrantEuc *shared.DestinationQdrantEuc
+ if r.Configuration.Indexing.DistanceMetric.Euc != nil {
+ destinationQdrantEuc = &shared.DestinationQdrantEuc{}
+ }
+ if destinationQdrantEuc != nil {
+ distanceMetric = &shared.DestinationQdrantDistanceMetric{
+ DestinationQdrantEuc: destinationQdrantEuc,
+ }
+ }
+ }
+ preferGrpc := new(bool)
+ if !r.Configuration.Indexing.PreferGrpc.IsUnknown() && !r.Configuration.Indexing.PreferGrpc.IsNull() {
+ *preferGrpc = r.Configuration.Indexing.PreferGrpc.ValueBool()
+ } else {
+ preferGrpc = nil
+ }
+ textField := new(string)
+ if !r.Configuration.Indexing.TextField.IsUnknown() && !r.Configuration.Indexing.TextField.IsNull() {
+ *textField = r.Configuration.Indexing.TextField.ValueString()
+ } else {
+ textField = nil
+ }
+ url := r.Configuration.Indexing.URL.ValueString()
+ indexing := shared.DestinationQdrantIndexing{
+ AuthMethod: authMethod,
+ Collection: collection,
+ DistanceMetric: distanceMetric,
+ PreferGrpc: preferGrpc,
+ TextField: textField,
+ URL: url,
+ }
+ chunkOverlap := new(int64)
+ if !r.Configuration.Processing.ChunkOverlap.IsUnknown() && !r.Configuration.Processing.ChunkOverlap.IsNull() {
+ *chunkOverlap = r.Configuration.Processing.ChunkOverlap.ValueInt64()
+ } else {
+ chunkOverlap = nil
+ }
+ chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64()
+ var fieldNameMappings []shared.DestinationQdrantFieldNameMappingConfigModel = nil
+ for _, fieldNameMappingsItem := range r.Configuration.Processing.FieldNameMappings {
+ fromField := fieldNameMappingsItem.FromField.ValueString()
+ toField := fieldNameMappingsItem.ToField.ValueString()
+ fieldNameMappings = append(fieldNameMappings, shared.DestinationQdrantFieldNameMappingConfigModel{
+ FromField: fromField,
+ ToField: toField,
+ })
+ }
+ var metadataFields []string = nil
+ for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields {
+ metadataFields = append(metadataFields, metadataFieldsItem.ValueString())
+ }
+ var textFields []string = nil
+ for _, textFieldsItem := range r.Configuration.Processing.TextFields {
+ textFields = append(textFields, textFieldsItem.ValueString())
+ }
+ var textSplitter *shared.DestinationQdrantTextSplitter
+ if r.Configuration.Processing.TextSplitter != nil {
+ var destinationQdrantBySeparator *shared.DestinationQdrantBySeparator
+ if r.Configuration.Processing.TextSplitter.BySeparator != nil {
+ keepSeparator := new(bool)
+ if !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsUnknown() && !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsNull() {
+ *keepSeparator = r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.ValueBool()
+ } else {
+ keepSeparator = nil
+ }
+ var separators []string = nil
+ for _, separatorsItem := range r.Configuration.Processing.TextSplitter.BySeparator.Separators {
+ separators = append(separators, separatorsItem.ValueString())
+ }
+ destinationQdrantBySeparator = &shared.DestinationQdrantBySeparator{
+ KeepSeparator: keepSeparator,
+ Separators: separators,
+ }
+ }
+ if destinationQdrantBySeparator != nil {
+ textSplitter = &shared.DestinationQdrantTextSplitter{
+ DestinationQdrantBySeparator: destinationQdrantBySeparator,
+ }
+ }
+ var destinationQdrantByMarkdownHeader *shared.DestinationQdrantByMarkdownHeader
+ if r.Configuration.Processing.TextSplitter.ByMarkdownHeader != nil {
+ splitLevel := new(int64)
+ if !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsUnknown() && !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsNull() {
+ *splitLevel = r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.ValueInt64()
+ } else {
+ splitLevel = nil
+ }
+ destinationQdrantByMarkdownHeader = &shared.DestinationQdrantByMarkdownHeader{
+ SplitLevel: splitLevel,
+ }
+ }
+ if destinationQdrantByMarkdownHeader != nil {
+ textSplitter = &shared.DestinationQdrantTextSplitter{
+ DestinationQdrantByMarkdownHeader: destinationQdrantByMarkdownHeader,
+ }
+ }
+ var destinationQdrantByProgrammingLanguage *shared.DestinationQdrantByProgrammingLanguage
+ if r.Configuration.Processing.TextSplitter.ByProgrammingLanguage != nil {
+ language := shared.DestinationQdrantLanguage(r.Configuration.Processing.TextSplitter.ByProgrammingLanguage.Language.ValueString())
+ destinationQdrantByProgrammingLanguage = &shared.DestinationQdrantByProgrammingLanguage{
+ Language: language,
+ }
+ }
+ if destinationQdrantByProgrammingLanguage != nil {
+ textSplitter = &shared.DestinationQdrantTextSplitter{
+ DestinationQdrantByProgrammingLanguage: destinationQdrantByProgrammingLanguage,
+ }
+ }
+ }
+ processing := shared.DestinationQdrantProcessingConfigModel{
+ ChunkOverlap: chunkOverlap,
+ ChunkSize: chunkSize,
+ FieldNameMappings: fieldNameMappings,
+ MetadataFields: metadataFields,
+ TextFields: textFields,
+ TextSplitter: textSplitter,
+ }
+ configuration := shared.DestinationQdrant{
+ Embedding: embedding,
+ Indexing: indexing,
+ Processing: processing,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.DestinationQdrantCreateRequest{
+ Configuration: configuration,
+ DefinitionID: definitionID,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *DestinationQdrantResourceModel) ToGetSDKType() *shared.DestinationQdrantCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *DestinationQdrantResourceModel) ToUpdateSDKType() *shared.DestinationQdrantPutRequest {
+ var embedding shared.DestinationQdrantUpdateEmbedding
+ var destinationQdrantUpdateOpenAI *shared.DestinationQdrantUpdateOpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ destinationQdrantUpdateOpenAI = &shared.DestinationQdrantUpdateOpenAI{
+ OpenaiKey: openaiKey,
+ }
+ }
+ if destinationQdrantUpdateOpenAI != nil {
+ embedding = shared.DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateOpenAI: destinationQdrantUpdateOpenAI,
+ }
+ }
+ var destinationQdrantUpdateCohere *shared.DestinationQdrantUpdateCohere
+ if r.Configuration.Embedding.Cohere != nil {
+ cohereKey := r.Configuration.Embedding.Cohere.CohereKey.ValueString()
+ destinationQdrantUpdateCohere = &shared.DestinationQdrantUpdateCohere{
+ CohereKey: cohereKey,
+ }
+ }
+ if destinationQdrantUpdateCohere != nil {
+ embedding = shared.DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateCohere: destinationQdrantUpdateCohere,
+ }
+ }
+ var destinationQdrantUpdateFake *shared.DestinationQdrantUpdateFake
+ if r.Configuration.Embedding.Fake != nil {
+ destinationQdrantUpdateFake = &shared.DestinationQdrantUpdateFake{}
+ }
+ if destinationQdrantUpdateFake != nil {
+ embedding = shared.DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateFake: destinationQdrantUpdateFake,
+ }
+ }
+ var destinationQdrantUpdateFromField *shared.DestinationQdrantUpdateFromField
+ if r.Configuration.Embedding.FromField != nil {
+ dimensions := r.Configuration.Embedding.FromField.Dimensions.ValueInt64()
+ fieldName := r.Configuration.Embedding.FromField.FieldName.ValueString()
+ destinationQdrantUpdateFromField = &shared.DestinationQdrantUpdateFromField{
+ Dimensions: dimensions,
+ FieldName: fieldName,
+ }
+ }
+ if destinationQdrantUpdateFromField != nil {
+ embedding = shared.DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateFromField: destinationQdrantUpdateFromField,
+ }
+ }
+ var destinationQdrantUpdateAzureOpenAI *shared.DestinationQdrantUpdateAzureOpenAI
+ if r.Configuration.Embedding.AzureOpenAI != nil {
+ apiBase := r.Configuration.Embedding.AzureOpenAI.APIBase.ValueString()
+ deployment := r.Configuration.Embedding.AzureOpenAI.Deployment.ValueString()
+ openaiKey1 := r.Configuration.Embedding.AzureOpenAI.OpenaiKey.ValueString()
+ destinationQdrantUpdateAzureOpenAI = &shared.DestinationQdrantUpdateAzureOpenAI{
+ APIBase: apiBase,
+ Deployment: deployment,
+ OpenaiKey: openaiKey1,
+ }
+ }
+ if destinationQdrantUpdateAzureOpenAI != nil {
+ embedding = shared.DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateAzureOpenAI: destinationQdrantUpdateAzureOpenAI,
+ }
+ }
+ var destinationQdrantUpdateOpenAICompatible *shared.DestinationQdrantUpdateOpenAICompatible
+ if r.Configuration.Embedding.OpenAICompatible != nil {
+ apiKey := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.APIKey.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.APIKey.IsNull() {
+ *apiKey = r.Configuration.Embedding.OpenAICompatible.APIKey.ValueString()
+ } else {
+ apiKey = nil
+ }
+ baseURL := r.Configuration.Embedding.OpenAICompatible.BaseURL.ValueString()
+ dimensions1 := r.Configuration.Embedding.OpenAICompatible.Dimensions.ValueInt64()
+ modelName := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.ModelName.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.ModelName.IsNull() {
+ *modelName = r.Configuration.Embedding.OpenAICompatible.ModelName.ValueString()
+ } else {
+ modelName = nil
+ }
+ destinationQdrantUpdateOpenAICompatible = &shared.DestinationQdrantUpdateOpenAICompatible{
+ APIKey: apiKey,
+ BaseURL: baseURL,
+ Dimensions: dimensions1,
+ ModelName: modelName,
+ }
+ }
+ if destinationQdrantUpdateOpenAICompatible != nil {
+ embedding = shared.DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateOpenAICompatible: destinationQdrantUpdateOpenAICompatible,
+ }
+ }
+ var authMethod *shared.DestinationQdrantUpdateAuthenticationMethod
+ if r.Configuration.Indexing.AuthMethod != nil {
+ var apiKeyAuth *shared.APIKeyAuth
+ if r.Configuration.Indexing.AuthMethod.APIKeyAuth != nil {
+ apiKey1 := r.Configuration.Indexing.AuthMethod.APIKeyAuth.APIKey.ValueString()
+ apiKeyAuth = &shared.APIKeyAuth{
+ APIKey: apiKey1,
+ }
+ }
+ if apiKeyAuth != nil {
+ authMethod = &shared.DestinationQdrantUpdateAuthenticationMethod{
+ APIKeyAuth: apiKeyAuth,
+ }
+ }
+ var destinationQdrantUpdateNoAuth *shared.DestinationQdrantUpdateNoAuth
+ if r.Configuration.Indexing.AuthMethod.NoAuth != nil {
+ destinationQdrantUpdateNoAuth = &shared.DestinationQdrantUpdateNoAuth{}
+ }
+ if destinationQdrantUpdateNoAuth != nil {
+ authMethod = &shared.DestinationQdrantUpdateAuthenticationMethod{
+ DestinationQdrantUpdateNoAuth: destinationQdrantUpdateNoAuth,
+ }
+ }
+ }
+ collection := r.Configuration.Indexing.Collection.ValueString()
+ var distanceMetric *shared.DistanceMetric
+ if r.Configuration.Indexing.DistanceMetric != nil {
+ var dot *shared.Dot
+ if r.Configuration.Indexing.DistanceMetric.Dot != nil {
+ dot = &shared.Dot{}
+ }
+ if dot != nil {
+ distanceMetric = &shared.DistanceMetric{
+ Dot: dot,
+ }
+ }
+ var cos *shared.Cos
+ if r.Configuration.Indexing.DistanceMetric.Cos != nil {
+ cos = &shared.Cos{}
+ }
+ if cos != nil {
+ distanceMetric = &shared.DistanceMetric{
+ Cos: cos,
+ }
+ }
+ var euc *shared.Euc
+ if r.Configuration.Indexing.DistanceMetric.Euc != nil {
+ euc = &shared.Euc{}
+ }
+ if euc != nil {
+ distanceMetric = &shared.DistanceMetric{
+ Euc: euc,
+ }
+ }
+ }
+ preferGrpc := new(bool)
+ if !r.Configuration.Indexing.PreferGrpc.IsUnknown() && !r.Configuration.Indexing.PreferGrpc.IsNull() {
+ *preferGrpc = r.Configuration.Indexing.PreferGrpc.ValueBool()
+ } else {
+ preferGrpc = nil
+ }
+ textField := new(string)
+ if !r.Configuration.Indexing.TextField.IsUnknown() && !r.Configuration.Indexing.TextField.IsNull() {
+ *textField = r.Configuration.Indexing.TextField.ValueString()
+ } else {
+ textField = nil
+ }
+ url := r.Configuration.Indexing.URL.ValueString()
+ indexing := shared.DestinationQdrantUpdateIndexing{
+ AuthMethod: authMethod,
+ Collection: collection,
+ DistanceMetric: distanceMetric,
+ PreferGrpc: preferGrpc,
+ TextField: textField,
+ URL: url,
+ }
+ chunkOverlap := new(int64)
+ if !r.Configuration.Processing.ChunkOverlap.IsUnknown() && !r.Configuration.Processing.ChunkOverlap.IsNull() {
+ *chunkOverlap = r.Configuration.Processing.ChunkOverlap.ValueInt64()
+ } else {
+ chunkOverlap = nil
+ }
+ chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64()
+ var fieldNameMappings []shared.DestinationQdrantUpdateFieldNameMappingConfigModel = nil
+ for _, fieldNameMappingsItem := range r.Configuration.Processing.FieldNameMappings {
+ fromField := fieldNameMappingsItem.FromField.ValueString()
+ toField := fieldNameMappingsItem.ToField.ValueString()
+ fieldNameMappings = append(fieldNameMappings, shared.DestinationQdrantUpdateFieldNameMappingConfigModel{
+ FromField: fromField,
+ ToField: toField,
+ })
+ }
+ var metadataFields []string = nil
+ for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields {
+ metadataFields = append(metadataFields, metadataFieldsItem.ValueString())
+ }
+ var textFields []string = nil
+ for _, textFieldsItem := range r.Configuration.Processing.TextFields {
+ textFields = append(textFields, textFieldsItem.ValueString())
+ }
+ var textSplitter *shared.DestinationQdrantUpdateTextSplitter
+ if r.Configuration.Processing.TextSplitter != nil {
+ var destinationQdrantUpdateBySeparator *shared.DestinationQdrantUpdateBySeparator
+ if r.Configuration.Processing.TextSplitter.BySeparator != nil {
+ keepSeparator := new(bool)
+ if !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsUnknown() && !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsNull() {
+ *keepSeparator = r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.ValueBool()
+ } else {
+ keepSeparator = nil
+ }
+ var separators []string = nil
+ for _, separatorsItem := range r.Configuration.Processing.TextSplitter.BySeparator.Separators {
+ separators = append(separators, separatorsItem.ValueString())
+ }
+ destinationQdrantUpdateBySeparator = &shared.DestinationQdrantUpdateBySeparator{
+ KeepSeparator: keepSeparator,
+ Separators: separators,
+ }
+ }
+ if destinationQdrantUpdateBySeparator != nil {
+ textSplitter = &shared.DestinationQdrantUpdateTextSplitter{
+ DestinationQdrantUpdateBySeparator: destinationQdrantUpdateBySeparator,
+ }
+ }
+ var destinationQdrantUpdateByMarkdownHeader *shared.DestinationQdrantUpdateByMarkdownHeader
+ if r.Configuration.Processing.TextSplitter.ByMarkdownHeader != nil {
+ splitLevel := new(int64)
+ if !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsUnknown() && !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsNull() {
+ *splitLevel = r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.ValueInt64()
+ } else {
+ splitLevel = nil
+ }
+ destinationQdrantUpdateByMarkdownHeader = &shared.DestinationQdrantUpdateByMarkdownHeader{
+ SplitLevel: splitLevel,
+ }
+ }
+ if destinationQdrantUpdateByMarkdownHeader != nil {
+ textSplitter = &shared.DestinationQdrantUpdateTextSplitter{
+ DestinationQdrantUpdateByMarkdownHeader: destinationQdrantUpdateByMarkdownHeader,
+ }
+ }
+ var destinationQdrantUpdateByProgrammingLanguage *shared.DestinationQdrantUpdateByProgrammingLanguage
+ if r.Configuration.Processing.TextSplitter.ByProgrammingLanguage != nil {
+ language := shared.DestinationQdrantUpdateLanguage(r.Configuration.Processing.TextSplitter.ByProgrammingLanguage.Language.ValueString())
+ destinationQdrantUpdateByProgrammingLanguage = &shared.DestinationQdrantUpdateByProgrammingLanguage{
+ Language: language,
+ }
+ }
+ if destinationQdrantUpdateByProgrammingLanguage != nil {
+ textSplitter = &shared.DestinationQdrantUpdateTextSplitter{
+ DestinationQdrantUpdateByProgrammingLanguage: destinationQdrantUpdateByProgrammingLanguage,
+ }
+ }
+ }
+ processing := shared.DestinationQdrantUpdateProcessingConfigModel{
+ ChunkOverlap: chunkOverlap,
+ ChunkSize: chunkSize,
+ FieldNameMappings: fieldNameMappings,
+ MetadataFields: metadataFields,
+ TextFields: textFields,
+ TextSplitter: textSplitter,
+ }
+ configuration := shared.DestinationQdrantUpdate{
+ Embedding: embedding,
+ Indexing: indexing,
+ Processing: processing,
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.DestinationQdrantPutRequest{
+ Configuration: configuration,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *DestinationQdrantResourceModel) ToDeleteSDKType() *shared.DestinationQdrantCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *DestinationQdrantResourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
+ r.Name = types.StringValue(resp.Name)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
+
+func (r *DestinationQdrantResourceModel) RefreshFromCreateResponse(resp *shared.DestinationResponse) {
+ r.RefreshFromGetResponse(resp)
+}
diff --git a/internal/provider/destination_redis_data_source.go b/internal/provider/destination_redis_data_source.go
old mode 100755
new mode 100644
index ced2eea1c..12910630a
--- a/internal/provider/destination_redis_data_source.go
+++ b/internal/provider/destination_redis_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationRedisDataSource struct {
// DestinationRedisDataSourceModel describes the data model.
type DestinationRedisDataSourceModel struct {
- Configuration DestinationRedis `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,324 +47,17 @@ func (r *DestinationRedisDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "DestinationRedis DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "cache_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "hash",
- ),
- },
- MarkdownDescription: `must be one of ["hash"]` + "\n" +
- `Redis cache type to store data in.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "redis",
- ),
- },
- Description: `must be one of ["redis"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Redis host to connect to.`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with Redis.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of Redis.`,
- },
- "ssl": schema.BoolAttribute{
- Computed: true,
- Description: `Indicates whether SSL encryption protocol will be used to connect to Redis. It is recommended to use SSL connection if possible.`,
- },
- "ssl_mode": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_redis_ssl_modes_disable": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- },
- Description: `Disable SSL.`,
- },
- "destination_redis_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- },
- Description: `Verify-full SSL mode.`,
- },
- "destination_redis_update_ssl_modes_disable": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- },
- Description: `Disable SSL.`,
- },
- "destination_redis_update_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- },
- Description: `Verify-full SSL mode.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- MarkdownDescription: `SSL connection modes. ` + "\n" +
- ` verify-full - This is the most secure mode. Always require encryption and verifies the identity of the source database server`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_redis_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redis_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redis_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redis_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redis_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redis_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username associated with Redis.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_redis_data_source_sdk.go b/internal/provider/destination_redis_data_source_sdk.go
old mode 100755
new mode 100644
index de67eb7ae..57af74f44
--- a/internal/provider/destination_redis_data_source_sdk.go
+++ b/internal/provider/destination_redis_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationRedisDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_redis_resource.go b/internal/provider/destination_redis_resource.go
old mode 100755
new mode 100644
index a6300b037..0b373a57c
--- a/internal/provider/destination_redis_resource.go
+++ b/internal/provider/destination_redis_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationRedisResource struct {
// DestinationRedisResourceModel describes the resource data model.
type DestinationRedisResourceModel struct {
Configuration DestinationRedis `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -55,23 +57,14 @@ func (r *DestinationRedisResource) Schema(ctx context.Context, req resource.Sche
Required: true,
Attributes: map[string]schema.Attribute{
"cache_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "hash",
- ),
- },
- MarkdownDescription: `must be one of ["hash"]` + "\n" +
+ Optional: true,
+ MarkdownDescription: `must be one of ["hash"]; Default: "hash"` + "\n" +
`Redis cache type to store data in.`,
- },
- "destination_type": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
stringvalidator.OneOf(
- "redis",
+ "hash",
),
},
- Description: `must be one of ["redis"]`,
},
"host": schema.StringAttribute{
Required: true,
@@ -79,81 +72,28 @@ func (r *DestinationRedisResource) Schema(ctx context.Context, req resource.Sche
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password associated with Redis.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `Port of Redis.`,
+ Optional: true,
+ MarkdownDescription: `Default: 6379` + "\n" +
+ `Port of Redis.`,
},
"ssl": schema.BoolAttribute{
- Optional: true,
- Description: `Indicates whether SSL encryption protocol will be used to connect to Redis. It is recommended to use SSL connection if possible.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Indicates whether SSL encryption protocol will be used to connect to Redis. It is recommended to use SSL connection if possible.`,
},
"ssl_mode": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_redis_ssl_modes_disable": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- },
- Description: `Disable SSL.`,
- },
- "destination_redis_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Required: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Required: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Required: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Optional: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- },
- Description: `Verify-full SSL mode.`,
- },
- "destination_redis_update_ssl_modes_disable": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- },
+ "disable": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Disable SSL.`,
},
- "destination_redis_update_ssl_modes_verify_full": schema.SingleNestedAttribute{
+ "verify_full": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ca_certificate": schema.StringAttribute{
@@ -166,70 +106,43 @@ func (r *DestinationRedisResource) Schema(ctx context.Context, req resource.Sche
},
"client_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Client key`,
},
"client_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
},
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
},
Description: `Verify-full SSL mode.`,
},
},
+ MarkdownDescription: `SSL connection modes. ` + "\n" +
+ ` verify-full - This is the most secure mode. Always require encryption and verifies the identity of the source database server`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- MarkdownDescription: `SSL connection modes. ` + "\n" +
- ` verify-full - This is the most secure mode. Always require encryption and verifies the identity of the source database server`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_redis_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_redis_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -237,115 +150,28 @@ func (r *DestinationRedisResource) Schema(ctx context.Context, req resource.Sche
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_redis_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redis_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redis_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redis_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -355,10 +181,10 @@ func (r *DestinationRedisResource) Schema(ctx context.Context, req resource.Sche
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -366,6 +192,13 @@ func (r *DestinationRedisResource) Schema(ctx context.Context, req resource.Sche
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -382,7 +215,8 @@ func (r *DestinationRedisResource) Schema(ctx context.Context, req resource.Sche
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -432,7 +266,7 @@ func (r *DestinationRedisResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationRedis(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -608,5 +442,5 @@ func (r *DestinationRedisResource) Delete(ctx context.Context, req resource.Dele
}
func (r *DestinationRedisResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_redis_resource_sdk.go b/internal/provider/destination_redis_resource_sdk.go
old mode 100755
new mode 100644
index edddc8713..b23bd06e5
--- a/internal/provider/destination_redis_resource_sdk.go
+++ b/internal/provider/destination_redis_resource_sdk.go
@@ -3,13 +3,17 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationRedisResourceModel) ToCreateSDKType() *shared.DestinationRedisCreateRequest {
- cacheType := shared.DestinationRedisCacheType(r.Configuration.CacheType.ValueString())
- destinationType := shared.DestinationRedisRedis(r.Configuration.DestinationType.ValueString())
+ cacheType := new(shared.DestinationRedisCacheType)
+ if !r.Configuration.CacheType.IsUnknown() && !r.Configuration.CacheType.IsNull() {
+ *cacheType = shared.DestinationRedisCacheType(r.Configuration.CacheType.ValueString())
+ } else {
+ cacheType = nil
+ }
host := r.Configuration.Host.ValueString()
password := new(string)
if !r.Configuration.Password.IsUnknown() && !r.Configuration.Password.IsNull() {
@@ -17,7 +21,12 @@ func (r *DestinationRedisResourceModel) ToCreateSDKType() *shared.DestinationRed
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
ssl := new(bool)
if !r.Configuration.Ssl.IsUnknown() && !r.Configuration.Ssl.IsNull() {
*ssl = r.Configuration.Ssl.ValueBool()
@@ -26,115 +35,119 @@ func (r *DestinationRedisResourceModel) ToCreateSDKType() *shared.DestinationRed
}
var sslMode *shared.DestinationRedisSSLModes
if r.Configuration.SslMode != nil {
- var destinationRedisSSLModesDisable *shared.DestinationRedisSSLModesDisable
- if r.Configuration.SslMode.DestinationRedisSSLModesDisable != nil {
- mode := shared.DestinationRedisSSLModesDisableMode(r.Configuration.SslMode.DestinationRedisSSLModesDisable.Mode.ValueString())
- destinationRedisSSLModesDisable = &shared.DestinationRedisSSLModesDisable{
- Mode: mode,
- }
+ var destinationRedisDisable *shared.DestinationRedisDisable
+ if r.Configuration.SslMode.Disable != nil {
+ destinationRedisDisable = &shared.DestinationRedisDisable{}
}
- if destinationRedisSSLModesDisable != nil {
+ if destinationRedisDisable != nil {
sslMode = &shared.DestinationRedisSSLModes{
- DestinationRedisSSLModesDisable: destinationRedisSSLModesDisable,
+ DestinationRedisDisable: destinationRedisDisable,
}
}
- var destinationRedisSSLModesVerifyFull *shared.DestinationRedisSSLModesVerifyFull
- if r.Configuration.SslMode.DestinationRedisSSLModesVerifyFull != nil {
- caCertificate := r.Configuration.SslMode.DestinationRedisSSLModesVerifyFull.CaCertificate.ValueString()
- clientCertificate := r.Configuration.SslMode.DestinationRedisSSLModesVerifyFull.ClientCertificate.ValueString()
- clientKey := r.Configuration.SslMode.DestinationRedisSSLModesVerifyFull.ClientKey.ValueString()
+ var destinationRedisVerifyFull *shared.DestinationRedisVerifyFull
+ if r.Configuration.SslMode.VerifyFull != nil {
+ caCertificate := r.Configuration.SslMode.VerifyFull.CaCertificate.ValueString()
+ clientCertificate := r.Configuration.SslMode.VerifyFull.ClientCertificate.ValueString()
+ clientKey := r.Configuration.SslMode.VerifyFull.ClientKey.ValueString()
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.DestinationRedisSSLModesVerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.DestinationRedisSSLModesVerifyFull.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.DestinationRedisSSLModesVerifyFull.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyFull.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode1 := shared.DestinationRedisSSLModesVerifyFullMode(r.Configuration.SslMode.DestinationRedisSSLModesVerifyFull.Mode.ValueString())
- destinationRedisSSLModesVerifyFull = &shared.DestinationRedisSSLModesVerifyFull{
+ destinationRedisVerifyFull = &shared.DestinationRedisVerifyFull{
CaCertificate: caCertificate,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword,
- Mode: mode1,
}
}
- if destinationRedisSSLModesVerifyFull != nil {
+ if destinationRedisVerifyFull != nil {
sslMode = &shared.DestinationRedisSSLModes{
- DestinationRedisSSLModesVerifyFull: destinationRedisSSLModesVerifyFull,
+ DestinationRedisVerifyFull: destinationRedisVerifyFull,
}
}
}
var tunnelMethod *shared.DestinationRedisSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationRedisSSHTunnelMethodNoTunnel *shared.DestinationRedisSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationRedisSSHTunnelMethodNoTunnel = &shared.DestinationRedisSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationRedisNoTunnel *shared.DestinationRedisNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationRedisNoTunnel = &shared.DestinationRedisNoTunnel{}
}
- if destinationRedisSSHTunnelMethodNoTunnel != nil {
+ if destinationRedisNoTunnel != nil {
tunnelMethod = &shared.DestinationRedisSSHTunnelMethod{
- DestinationRedisSSHTunnelMethodNoTunnel: destinationRedisSSHTunnelMethodNoTunnel,
+ DestinationRedisNoTunnel: destinationRedisNoTunnel,
}
}
- var destinationRedisSSHTunnelMethodSSHKeyAuthentication *shared.DestinationRedisSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationRedisSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationRedisSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationRedisSSHKeyAuthentication *shared.DestinationRedisSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationRedisSSHKeyAuthentication = &shared.DestinationRedisSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationRedisSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationRedisSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationRedisSSHTunnelMethod{
- DestinationRedisSSHTunnelMethodSSHKeyAuthentication: destinationRedisSSHTunnelMethodSSHKeyAuthentication,
+ DestinationRedisSSHKeyAuthentication: destinationRedisSSHKeyAuthentication,
}
}
- var destinationRedisSSHTunnelMethodPasswordAuthentication *shared.DestinationRedisSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationRedisSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationRedisSSHTunnelMethodPasswordAuthentication = &shared.DestinationRedisSSHTunnelMethodPasswordAuthentication{
+ var destinationRedisPasswordAuthentication *shared.DestinationRedisPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationRedisPasswordAuthentication = &shared.DestinationRedisPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationRedisSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationRedisPasswordAuthentication != nil {
tunnelMethod = &shared.DestinationRedisSSHTunnelMethod{
- DestinationRedisSSHTunnelMethodPasswordAuthentication: destinationRedisSSHTunnelMethodPasswordAuthentication,
+ DestinationRedisPasswordAuthentication: destinationRedisPasswordAuthentication,
}
}
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationRedis{
- CacheType: cacheType,
- DestinationType: destinationType,
- Host: host,
- Password: password,
- Port: port,
- Ssl: ssl,
- SslMode: sslMode,
- TunnelMethod: tunnelMethod,
- Username: username,
+ CacheType: cacheType,
+ Host: host,
+ Password: password,
+ Port: port,
+ Ssl: ssl,
+ SslMode: sslMode,
+ TunnelMethod: tunnelMethod,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationRedisCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -147,7 +160,12 @@ func (r *DestinationRedisResourceModel) ToGetSDKType() *shared.DestinationRedisC
}
func (r *DestinationRedisResourceModel) ToUpdateSDKType() *shared.DestinationRedisPutRequest {
- cacheType := shared.DestinationRedisUpdateCacheType(r.Configuration.CacheType.ValueString())
+ cacheType := new(shared.CacheType)
+ if !r.Configuration.CacheType.IsUnknown() && !r.Configuration.CacheType.IsNull() {
+ *cacheType = shared.CacheType(r.Configuration.CacheType.ValueString())
+ } else {
+ cacheType = nil
+ }
host := r.Configuration.Host.ValueString()
password := new(string)
if !r.Configuration.Password.IsUnknown() && !r.Configuration.Password.IsNull() {
@@ -155,7 +173,12 @@ func (r *DestinationRedisResourceModel) ToUpdateSDKType() *shared.DestinationRed
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
ssl := new(bool)
if !r.Configuration.Ssl.IsUnknown() && !r.Configuration.Ssl.IsNull() {
*ssl = r.Configuration.Ssl.ValueBool()
@@ -164,96 +187,94 @@ func (r *DestinationRedisResourceModel) ToUpdateSDKType() *shared.DestinationRed
}
var sslMode *shared.DestinationRedisUpdateSSLModes
if r.Configuration.SslMode != nil {
- var destinationRedisUpdateSSLModesDisable *shared.DestinationRedisUpdateSSLModesDisable
- if r.Configuration.SslMode.DestinationRedisUpdateSSLModesDisable != nil {
- mode := shared.DestinationRedisUpdateSSLModesDisableMode(r.Configuration.SslMode.DestinationRedisUpdateSSLModesDisable.Mode.ValueString())
- destinationRedisUpdateSSLModesDisable = &shared.DestinationRedisUpdateSSLModesDisable{
- Mode: mode,
- }
+ var destinationRedisUpdateDisable *shared.DestinationRedisUpdateDisable
+ if r.Configuration.SslMode.Disable != nil {
+ destinationRedisUpdateDisable = &shared.DestinationRedisUpdateDisable{}
}
- if destinationRedisUpdateSSLModesDisable != nil {
+ if destinationRedisUpdateDisable != nil {
sslMode = &shared.DestinationRedisUpdateSSLModes{
- DestinationRedisUpdateSSLModesDisable: destinationRedisUpdateSSLModesDisable,
+ DestinationRedisUpdateDisable: destinationRedisUpdateDisable,
}
}
- var destinationRedisUpdateSSLModesVerifyFull *shared.DestinationRedisUpdateSSLModesVerifyFull
- if r.Configuration.SslMode.DestinationRedisUpdateSSLModesVerifyFull != nil {
- caCertificate := r.Configuration.SslMode.DestinationRedisUpdateSSLModesVerifyFull.CaCertificate.ValueString()
- clientCertificate := r.Configuration.SslMode.DestinationRedisUpdateSSLModesVerifyFull.ClientCertificate.ValueString()
- clientKey := r.Configuration.SslMode.DestinationRedisUpdateSSLModesVerifyFull.ClientKey.ValueString()
+ var destinationRedisUpdateVerifyFull *shared.DestinationRedisUpdateVerifyFull
+ if r.Configuration.SslMode.VerifyFull != nil {
+ caCertificate := r.Configuration.SslMode.VerifyFull.CaCertificate.ValueString()
+ clientCertificate := r.Configuration.SslMode.VerifyFull.ClientCertificate.ValueString()
+ clientKey := r.Configuration.SslMode.VerifyFull.ClientKey.ValueString()
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.DestinationRedisUpdateSSLModesVerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.DestinationRedisUpdateSSLModesVerifyFull.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.DestinationRedisUpdateSSLModesVerifyFull.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyFull.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode1 := shared.DestinationRedisUpdateSSLModesVerifyFullMode(r.Configuration.SslMode.DestinationRedisUpdateSSLModesVerifyFull.Mode.ValueString())
- destinationRedisUpdateSSLModesVerifyFull = &shared.DestinationRedisUpdateSSLModesVerifyFull{
+ destinationRedisUpdateVerifyFull = &shared.DestinationRedisUpdateVerifyFull{
CaCertificate: caCertificate,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword,
- Mode: mode1,
}
}
- if destinationRedisUpdateSSLModesVerifyFull != nil {
+ if destinationRedisUpdateVerifyFull != nil {
sslMode = &shared.DestinationRedisUpdateSSLModes{
- DestinationRedisUpdateSSLModesVerifyFull: destinationRedisUpdateSSLModesVerifyFull,
+ DestinationRedisUpdateVerifyFull: destinationRedisUpdateVerifyFull,
}
}
}
var tunnelMethod *shared.DestinationRedisUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationRedisUpdateSSHTunnelMethodNoTunnel *shared.DestinationRedisUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationRedisUpdateSSHTunnelMethodNoTunnel = &shared.DestinationRedisUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationRedisUpdateNoTunnel *shared.DestinationRedisUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationRedisUpdateNoTunnel = &shared.DestinationRedisUpdateNoTunnel{}
}
- if destinationRedisUpdateSSHTunnelMethodNoTunnel != nil {
+ if destinationRedisUpdateNoTunnel != nil {
tunnelMethod = &shared.DestinationRedisUpdateSSHTunnelMethod{
- DestinationRedisUpdateSSHTunnelMethodNoTunnel: destinationRedisUpdateSSHTunnelMethodNoTunnel,
+ DestinationRedisUpdateNoTunnel: destinationRedisUpdateNoTunnel,
}
}
- var destinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication *shared.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationRedisUpdateSSHKeyAuthentication *shared.DestinationRedisUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationRedisUpdateSSHKeyAuthentication = &shared.DestinationRedisUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationRedisUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationRedisUpdateSSHTunnelMethod{
- DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication: destinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationRedisUpdateSSHKeyAuthentication: destinationRedisUpdateSSHKeyAuthentication,
}
}
- var destinationRedisUpdateSSHTunnelMethodPasswordAuthentication *shared.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationRedisUpdateSSHTunnelMethodPasswordAuthentication = &shared.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication{
+ var destinationRedisUpdatePasswordAuthentication *shared.DestinationRedisUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationRedisUpdatePasswordAuthentication = &shared.DestinationRedisUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationRedisUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationRedisUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationRedisUpdateSSHTunnelMethod{
- DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication: destinationRedisUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationRedisUpdatePasswordAuthentication: destinationRedisUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/destination_redshift_data_source.go b/internal/provider/destination_redshift_data_source.go
old mode 100755
new mode 100644
index 5933025db..b3e614806
--- a/internal/provider/destination_redshift_data_source.go
+++ b/internal/provider/destination_redshift_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationRedshiftDataSource struct {
// DestinationRedshiftDataSourceModel describes the data model.
type DestinationRedshiftDataSourceModel struct {
- Configuration DestinationRedshift1 `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,497 +47,17 @@ func (r *DestinationRedshiftDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "DestinationRedshift DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "redshift",
- ),
- },
- Description: `must be one of ["redshift"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of the database.`,
- },
- "schema": schema.StringAttribute{
- Computed: true,
- Description: `The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public".`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_redshift_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redshift_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redshift_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redshift_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redshift_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redshift_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "uploading_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_redshift_uploading_method_s3_staging": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.`,
- },
- "encryption": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_redshift_uploading_method_s3_staging_encryption_no_encryption": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "encryption_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "none",
- ),
- },
- Description: `must be one of ["none"]`,
- },
- },
- Description: `Staging data will be stored in plaintext.`,
- },
- "destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "encryption_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aes_cbc_envelope",
- ),
- },
- Description: `must be one of ["aes_cbc_envelope"]`,
- },
- "key_encrypting_key": schema.StringAttribute{
- Computed: true,
- Description: `The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.`,
- },
- },
- Description: `Staging data will be encrypted using AES-CBC envelope encryption.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `How to encrypt the staging data`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Computed: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "file_name_pattern": schema.StringAttribute{
- Computed: true,
- Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3 Staging",
- ),
- },
- Description: `must be one of ["S3 Staging"]`,
- },
- "purge_staging_data": schema.BoolAttribute{
- Computed: true,
- Description: `Whether to delete the staging files from S3 after completing the sync. See docs for details.`,
- },
- "s3_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.`,
- },
- "s3_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.`,
- },
- "s3_bucket_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]` + "\n" +
- `The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.`,
- },
- "secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.`,
- },
- },
- Description: `The method how the data will be uploaded to the database.`,
- },
- "destination_redshift_uploading_method_standard": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `The method how the data will be uploaded to the database.`,
- },
- "destination_redshift_update_uploading_method_s3_staging": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.`,
- },
- "encryption": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_redshift_update_uploading_method_s3_staging_encryption_no_encryption": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "encryption_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "none",
- ),
- },
- Description: `must be one of ["none"]`,
- },
- },
- Description: `Staging data will be stored in plaintext.`,
- },
- "destination_redshift_update_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "encryption_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aes_cbc_envelope",
- ),
- },
- Description: `must be one of ["aes_cbc_envelope"]`,
- },
- "key_encrypting_key": schema.StringAttribute{
- Computed: true,
- Description: `The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.`,
- },
- },
- Description: `Staging data will be encrypted using AES-CBC envelope encryption.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `How to encrypt the staging data`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Computed: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "file_name_pattern": schema.StringAttribute{
- Computed: true,
- Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3 Staging",
- ),
- },
- Description: `must be one of ["S3 Staging"]`,
- },
- "purge_staging_data": schema.BoolAttribute{
- Computed: true,
- Description: `Whether to delete the staging files from S3 after completing the sync. See docs for details.`,
- },
- "s3_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.`,
- },
- "s3_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.`,
- },
- "s3_bucket_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]` + "\n" +
- `The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.`,
- },
- "secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.`,
- },
- },
- Description: `The method how the data will be uploaded to the database.`,
- },
- "destination_redshift_update_uploading_method_standard": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `The method how the data will be uploaded to the database.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The method how the data will be uploaded to the database.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_redshift_data_source_sdk.go b/internal/provider/destination_redshift_data_source_sdk.go
old mode 100755
new mode 100644
index f227d887b..98aa75305
--- a/internal/provider/destination_redshift_data_source_sdk.go
+++ b/internal/provider/destination_redshift_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationRedshiftDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_redshift_resource.go b/internal/provider/destination_redshift_resource.go
old mode 100755
new mode 100644
index ae284096e..523ddf318
--- a/internal/provider/destination_redshift_resource.go
+++ b/internal/provider/destination_redshift_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationRedshiftResource struct {
// DestinationRedshiftResourceModel describes the resource data model.
type DestinationRedshiftResourceModel struct {
Configuration DestinationRedshift `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -58,15 +60,6 @@ func (r *DestinationRedshiftResource) Schema(ctx context.Context, req resource.S
Required: true,
Description: `Name of the database.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "redshift",
- ),
- },
- Description: `must be one of ["redshift"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)`,
@@ -77,135 +70,38 @@ func (r *DestinationRedshiftResource) Schema(ctx context.Context, req resource.S
},
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `Port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 5439` + "\n" +
+ `Port of the database.`,
},
"schema": schema.StringAttribute{
- Required: true,
- Description: `The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public".`,
+ Optional: true,
+ MarkdownDescription: `Default: "public"` + "\n" +
+ `The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public".`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_redshift_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redshift_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redshift_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_redshift_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_redshift_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -213,35 +109,28 @@ func (r *DestinationRedshiftResource) Schema(ctx context.Context, req resource.S
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_redshift_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -251,229 +140,73 @@ func (r *DestinationRedshiftResource) Schema(ctx context.Context, req resource.S
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"uploading_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_redshift_uploading_method_s3_staging": schema.SingleNestedAttribute{
+ "s3_staging": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_key_id": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.`,
},
"encryption": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption": schema.SingleNestedAttribute{
+ "aescbc_envelope_encryption": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "encryption_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aes_cbc_envelope",
- ),
- },
- Description: `must be one of ["aes_cbc_envelope"]`,
- },
"key_encrypting_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.`,
},
},
Description: `Staging data will be encrypted using AES-CBC envelope encryption.`,
},
- "destination_redshift_uploading_method_s3_staging_encryption_no_encryption": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "encryption_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "none",
- ),
- },
- Description: `must be one of ["none"]`,
- },
- },
+ "no_encryption": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Staging data will be stored in plaintext.`,
},
},
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
Description: `How to encrypt the staging data`,
- },
- "file_buffer_count": schema.Int64Attribute{
- Optional: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
- },
- "file_name_pattern": schema.StringAttribute{
- Optional: true,
- Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3 Staging",
- ),
- },
- Description: `must be one of ["S3 Staging"]`,
- },
- "purge_staging_data": schema.BoolAttribute{
- Optional: true,
- Description: `Whether to delete the staging files from S3 after completing the sync. See docs for details.`,
- },
- "s3_bucket_name": schema.StringAttribute{
- Required: true,
- Description: `The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.`,
- },
- "s3_bucket_path": schema.StringAttribute{
- Optional: true,
- Description: `The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.`,
- },
- "s3_bucket_region": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]` + "\n" +
- `The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.`,
- },
- "secret_access_key": schema.StringAttribute{
- Required: true,
- Description: `The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.`,
- },
- },
- Description: `The method how the data will be uploaded to the database.`,
- },
- "destination_redshift_uploading_method_standard": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `The method how the data will be uploaded to the database.`,
- },
- "destination_redshift_update_uploading_method_s3_staging": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_key_id": schema.StringAttribute{
- Required: true,
- Description: `This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.`,
- },
- "encryption": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "destination_redshift_update_uploading_method_s3_staging_encryption_no_encryption": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "encryption_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "none",
- ),
- },
- Description: `must be one of ["none"]`,
- },
- },
- Description: `Staging data will be stored in plaintext.`,
- },
- "destination_redshift_update_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "encryption_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aes_cbc_envelope",
- ),
- },
- Description: `must be one of ["aes_cbc_envelope"]`,
- },
- "key_encrypting_key": schema.StringAttribute{
- Optional: true,
- Description: `The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.`,
- },
- },
- Description: `Staging data will be encrypted using AES-CBC envelope encryption.`,
- },
- },
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `How to encrypt the staging data`,
},
"file_buffer_count": schema.Int64Attribute{
- Optional: true,
- Description: `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
+ Optional: true,
+ MarkdownDescription: `Default: 10` + "\n" +
+ `Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects`,
},
"file_name_pattern": schema.StringAttribute{
Optional: true,
Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
},
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3 Staging",
- ),
- },
- Description: `must be one of ["S3 Staging"]`,
- },
"purge_staging_data": schema.BoolAttribute{
- Optional: true,
- Description: `Whether to delete the staging files from S3 after completing the sync. See docs for details.`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether to delete the staging files from S3 after completing the sync. See docs for details.`,
},
"s3_bucket_name": schema.StringAttribute{
Required: true,
- Description: `The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.`,
+ Description: `The name of the staging S3 bucket.`,
},
"s3_bucket_path": schema.StringAttribute{
Optional: true,
Description: `The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.`,
},
"s3_bucket_region": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]; Default: ""` + "\n" +
+ `The region of the S3 staging bucket.`,
Validators: []validator.String{
stringvalidator.OneOf(
"",
@@ -502,36 +235,25 @@ func (r *DestinationRedshiftResource) Schema(ctx context.Context, req resource.S
"me-south-1",
),
},
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1"]` + "\n" +
- `The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.`,
},
"secret_access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.`,
},
},
- Description: `The method how the data will be uploaded to the database.`,
+ Description: `(recommended) Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.`,
},
- "destination_redshift_update_uploading_method_standard": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `The method how the data will be uploaded to the database.`,
+ "standard": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `(not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use S3 uploading.`,
},
},
+ Description: `The way data will be uploaded to Redshift.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The method how the data will be uploaded to the database.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -539,6 +261,13 @@ func (r *DestinationRedshiftResource) Schema(ctx context.Context, req resource.S
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -555,7 +284,8 @@ func (r *DestinationRedshiftResource) Schema(ctx context.Context, req resource.S
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -605,7 +335,7 @@ func (r *DestinationRedshiftResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationRedshift(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -781,5 +511,5 @@ func (r *DestinationRedshiftResource) Delete(ctx context.Context, req resource.D
}
func (r *DestinationRedshiftResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_redshift_resource_sdk.go b/internal/provider/destination_redshift_resource_sdk.go
old mode 100755
new mode 100644
index c9b03f3b1..bcf96b337
--- a/internal/provider/destination_redshift_resource_sdk.go
+++ b/internal/provider/destination_redshift_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationRedshiftResourceModel) ToCreateSDKType() *shared.DestinationRedshiftCreateRequest {
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationRedshiftRedshift(r.Configuration.DestinationType.ValueString())
host := r.Configuration.Host.ValueString()
jdbcURLParams := new(string)
if !r.Configuration.JdbcURLParams.IsUnknown() && !r.Configuration.JdbcURLParams.IsNull() {
@@ -18,148 +17,147 @@ func (r *DestinationRedshiftResourceModel) ToCreateSDKType() *shared.Destination
jdbcURLParams = nil
}
password := r.Configuration.Password.ValueString()
- port := r.Configuration.Port.ValueInt64()
- schema := r.Configuration.Schema.ValueString()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ schema := new(string)
+ if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
+ *schema = r.Configuration.Schema.ValueString()
+ } else {
+ schema = nil
+ }
var tunnelMethod *shared.DestinationRedshiftSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationRedshiftSSHTunnelMethodNoTunnel *shared.DestinationRedshiftSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationRedshiftSSHTunnelMethodNoTunnel = &shared.DestinationRedshiftSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationRedshiftNoTunnel *shared.DestinationRedshiftNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationRedshiftNoTunnel = &shared.DestinationRedshiftNoTunnel{}
}
- if destinationRedshiftSSHTunnelMethodNoTunnel != nil {
+ if destinationRedshiftNoTunnel != nil {
tunnelMethod = &shared.DestinationRedshiftSSHTunnelMethod{
- DestinationRedshiftSSHTunnelMethodNoTunnel: destinationRedshiftSSHTunnelMethodNoTunnel,
+ DestinationRedshiftNoTunnel: destinationRedshiftNoTunnel,
}
}
- var destinationRedshiftSSHTunnelMethodSSHKeyAuthentication *shared.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationRedshiftSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationRedshiftSSHKeyAuthentication *shared.DestinationRedshiftSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationRedshiftSSHKeyAuthentication = &shared.DestinationRedshiftSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationRedshiftSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationRedshiftSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationRedshiftSSHTunnelMethod{
- DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication: destinationRedshiftSSHTunnelMethodSSHKeyAuthentication,
+ DestinationRedshiftSSHKeyAuthentication: destinationRedshiftSSHKeyAuthentication,
}
}
- var destinationRedshiftSSHTunnelMethodPasswordAuthentication *shared.DestinationRedshiftSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationRedshiftSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationRedshiftSSHTunnelMethodPasswordAuthentication = &shared.DestinationRedshiftSSHTunnelMethodPasswordAuthentication{
+ var destinationRedshiftPasswordAuthentication *shared.DestinationRedshiftPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationRedshiftPasswordAuthentication = &shared.DestinationRedshiftPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationRedshiftSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationRedshiftPasswordAuthentication != nil {
tunnelMethod = &shared.DestinationRedshiftSSHTunnelMethod{
- DestinationRedshiftSSHTunnelMethodPasswordAuthentication: destinationRedshiftSSHTunnelMethodPasswordAuthentication,
+ DestinationRedshiftPasswordAuthentication: destinationRedshiftPasswordAuthentication,
}
}
}
var uploadingMethod *shared.DestinationRedshiftUploadingMethod
if r.Configuration.UploadingMethod != nil {
- var destinationRedshiftUploadingMethodStandard *shared.DestinationRedshiftUploadingMethodStandard
- if r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodStandard != nil {
- method := shared.DestinationRedshiftUploadingMethodStandardMethod(r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodStandard.Method.ValueString())
- destinationRedshiftUploadingMethodStandard = &shared.DestinationRedshiftUploadingMethodStandard{
- Method: method,
- }
- }
- if destinationRedshiftUploadingMethodStandard != nil {
- uploadingMethod = &shared.DestinationRedshiftUploadingMethod{
- DestinationRedshiftUploadingMethodStandard: destinationRedshiftUploadingMethodStandard,
- }
- }
- var destinationRedshiftUploadingMethodS3Staging *shared.DestinationRedshiftUploadingMethodS3Staging
- if r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging != nil {
- accessKeyID := r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.AccessKeyID.ValueString()
- var encryption *shared.DestinationRedshiftUploadingMethodS3StagingEncryption
- if r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.Encryption != nil {
- var destinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption *shared.DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption
- if r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.Encryption.DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption != nil {
- encryptionType := shared.DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType(r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.Encryption.DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption.EncryptionType.ValueString())
- destinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption = &shared.DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption{
- EncryptionType: encryptionType,
- }
+ var destinationRedshiftS3Staging *shared.DestinationRedshiftS3Staging
+ if r.Configuration.UploadingMethod.S3Staging != nil {
+ accessKeyID := r.Configuration.UploadingMethod.S3Staging.AccessKeyID.ValueString()
+ var encryption *shared.DestinationRedshiftEncryption
+ if r.Configuration.UploadingMethod.S3Staging.Encryption != nil {
+ var destinationRedshiftNoEncryption *shared.DestinationRedshiftNoEncryption
+ if r.Configuration.UploadingMethod.S3Staging.Encryption.NoEncryption != nil {
+ destinationRedshiftNoEncryption = &shared.DestinationRedshiftNoEncryption{}
}
- if destinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption != nil {
- encryption = &shared.DestinationRedshiftUploadingMethodS3StagingEncryption{
- DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption: destinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption,
+ if destinationRedshiftNoEncryption != nil {
+ encryption = &shared.DestinationRedshiftEncryption{
+ DestinationRedshiftNoEncryption: destinationRedshiftNoEncryption,
}
}
- var destinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption *shared.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
- if r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.Encryption.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption != nil {
- encryptionType1 := shared.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType(r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.Encryption.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption.EncryptionType.ValueString())
+ var destinationRedshiftAESCBCEnvelopeEncryption *shared.DestinationRedshiftAESCBCEnvelopeEncryption
+ if r.Configuration.UploadingMethod.S3Staging.Encryption.AESCBCEnvelopeEncryption != nil {
keyEncryptingKey := new(string)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.Encryption.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption.KeyEncryptingKey.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.Encryption.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption.KeyEncryptingKey.IsNull() {
- *keyEncryptingKey = r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.Encryption.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption.KeyEncryptingKey.ValueString()
+ if !r.Configuration.UploadingMethod.S3Staging.Encryption.AESCBCEnvelopeEncryption.KeyEncryptingKey.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.Encryption.AESCBCEnvelopeEncryption.KeyEncryptingKey.IsNull() {
+ *keyEncryptingKey = r.Configuration.UploadingMethod.S3Staging.Encryption.AESCBCEnvelopeEncryption.KeyEncryptingKey.ValueString()
} else {
keyEncryptingKey = nil
}
- destinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption = &shared.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption{
- EncryptionType: encryptionType1,
+ destinationRedshiftAESCBCEnvelopeEncryption = &shared.DestinationRedshiftAESCBCEnvelopeEncryption{
KeyEncryptingKey: keyEncryptingKey,
}
}
- if destinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption != nil {
- encryption = &shared.DestinationRedshiftUploadingMethodS3StagingEncryption{
- DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption: destinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption,
+ if destinationRedshiftAESCBCEnvelopeEncryption != nil {
+ encryption = &shared.DestinationRedshiftEncryption{
+ DestinationRedshiftAESCBCEnvelopeEncryption: destinationRedshiftAESCBCEnvelopeEncryption,
}
}
}
fileBufferCount := new(int64)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.FileBufferCount.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.FileBufferCount.IsNull() {
- *fileBufferCount = r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.FileBufferCount.ValueInt64()
+ if !r.Configuration.UploadingMethod.S3Staging.FileBufferCount.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.FileBufferCount.IsNull() {
+ *fileBufferCount = r.Configuration.UploadingMethod.S3Staging.FileBufferCount.ValueInt64()
} else {
fileBufferCount = nil
}
fileNamePattern := new(string)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.FileNamePattern.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.FileNamePattern.IsNull() {
- *fileNamePattern = r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.FileNamePattern.ValueString()
+ if !r.Configuration.UploadingMethod.S3Staging.FileNamePattern.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.FileNamePattern.IsNull() {
+ *fileNamePattern = r.Configuration.UploadingMethod.S3Staging.FileNamePattern.ValueString()
} else {
fileNamePattern = nil
}
- method1 := shared.DestinationRedshiftUploadingMethodS3StagingMethod(r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.Method.ValueString())
purgeStagingData := new(bool)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.PurgeStagingData.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.PurgeStagingData.IsNull() {
- *purgeStagingData = r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.PurgeStagingData.ValueBool()
+ if !r.Configuration.UploadingMethod.S3Staging.PurgeStagingData.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.PurgeStagingData.IsNull() {
+ *purgeStagingData = r.Configuration.UploadingMethod.S3Staging.PurgeStagingData.ValueBool()
} else {
purgeStagingData = nil
}
- s3BucketName := r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.S3BucketName.ValueString()
+ s3BucketName := r.Configuration.UploadingMethod.S3Staging.S3BucketName.ValueString()
s3BucketPath := new(string)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.S3BucketPath.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.S3BucketPath.IsNull() {
- *s3BucketPath = r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.S3BucketPath.ValueString()
+ if !r.Configuration.UploadingMethod.S3Staging.S3BucketPath.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.S3BucketPath.IsNull() {
+ *s3BucketPath = r.Configuration.UploadingMethod.S3Staging.S3BucketPath.ValueString()
} else {
s3BucketPath = nil
}
- s3BucketRegion := shared.DestinationRedshiftUploadingMethodS3StagingS3BucketRegion(r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.S3BucketRegion.ValueString())
- secretAccessKey := r.Configuration.UploadingMethod.DestinationRedshiftUploadingMethodS3Staging.SecretAccessKey.ValueString()
- destinationRedshiftUploadingMethodS3Staging = &shared.DestinationRedshiftUploadingMethodS3Staging{
+ s3BucketRegion := new(shared.DestinationRedshiftS3BucketRegion)
+ if !r.Configuration.UploadingMethod.S3Staging.S3BucketRegion.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.S3BucketRegion.IsNull() {
+ *s3BucketRegion = shared.DestinationRedshiftS3BucketRegion(r.Configuration.UploadingMethod.S3Staging.S3BucketRegion.ValueString())
+ } else {
+ s3BucketRegion = nil
+ }
+ secretAccessKey := r.Configuration.UploadingMethod.S3Staging.SecretAccessKey.ValueString()
+ destinationRedshiftS3Staging = &shared.DestinationRedshiftS3Staging{
AccessKeyID: accessKeyID,
Encryption: encryption,
FileBufferCount: fileBufferCount,
FileNamePattern: fileNamePattern,
- Method: method1,
PurgeStagingData: purgeStagingData,
S3BucketName: s3BucketName,
S3BucketPath: s3BucketPath,
@@ -167,16 +165,24 @@ func (r *DestinationRedshiftResourceModel) ToCreateSDKType() *shared.Destination
SecretAccessKey: secretAccessKey,
}
}
- if destinationRedshiftUploadingMethodS3Staging != nil {
+ if destinationRedshiftS3Staging != nil {
+ uploadingMethod = &shared.DestinationRedshiftUploadingMethod{
+ DestinationRedshiftS3Staging: destinationRedshiftS3Staging,
+ }
+ }
+ var destinationRedshiftStandard *shared.DestinationRedshiftStandard
+ if r.Configuration.UploadingMethod.Standard != nil {
+ destinationRedshiftStandard = &shared.DestinationRedshiftStandard{}
+ }
+ if destinationRedshiftStandard != nil {
uploadingMethod = &shared.DestinationRedshiftUploadingMethod{
- DestinationRedshiftUploadingMethodS3Staging: destinationRedshiftUploadingMethodS3Staging,
+ DestinationRedshiftStandard: destinationRedshiftStandard,
}
}
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationRedshift{
Database: database,
- DestinationType: destinationType,
Host: host,
JdbcURLParams: jdbcURLParams,
Password: password,
@@ -186,10 +192,17 @@ func (r *DestinationRedshiftResourceModel) ToCreateSDKType() *shared.Destination
UploadingMethod: uploadingMethod,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationRedshiftCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -211,148 +224,147 @@ func (r *DestinationRedshiftResourceModel) ToUpdateSDKType() *shared.Destination
jdbcURLParams = nil
}
password := r.Configuration.Password.ValueString()
- port := r.Configuration.Port.ValueInt64()
- schema := r.Configuration.Schema.ValueString()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ schema := new(string)
+ if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() {
+ *schema = r.Configuration.Schema.ValueString()
+ } else {
+ schema = nil
+ }
var tunnelMethod *shared.DestinationRedshiftUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationRedshiftUpdateSSHTunnelMethodNoTunnel *shared.DestinationRedshiftUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationRedshiftUpdateSSHTunnelMethodNoTunnel = &shared.DestinationRedshiftUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationRedshiftUpdateNoTunnel *shared.DestinationRedshiftUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationRedshiftUpdateNoTunnel = &shared.DestinationRedshiftUpdateNoTunnel{}
}
- if destinationRedshiftUpdateSSHTunnelMethodNoTunnel != nil {
+ if destinationRedshiftUpdateNoTunnel != nil {
tunnelMethod = &shared.DestinationRedshiftUpdateSSHTunnelMethod{
- DestinationRedshiftUpdateSSHTunnelMethodNoTunnel: destinationRedshiftUpdateSSHTunnelMethodNoTunnel,
+ DestinationRedshiftUpdateNoTunnel: destinationRedshiftUpdateNoTunnel,
}
}
- var destinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication *shared.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationRedshiftUpdateSSHKeyAuthentication *shared.DestinationRedshiftUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationRedshiftUpdateSSHKeyAuthentication = &shared.DestinationRedshiftUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationRedshiftUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationRedshiftUpdateSSHTunnelMethod{
- DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication: destinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationRedshiftUpdateSSHKeyAuthentication: destinationRedshiftUpdateSSHKeyAuthentication,
}
}
- var destinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication *shared.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication = &shared.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication{
+ var destinationRedshiftUpdatePasswordAuthentication *shared.DestinationRedshiftUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationRedshiftUpdatePasswordAuthentication = &shared.DestinationRedshiftUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationRedshiftUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationRedshiftUpdateSSHTunnelMethod{
- DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication: destinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationRedshiftUpdatePasswordAuthentication: destinationRedshiftUpdatePasswordAuthentication,
}
}
}
- var uploadingMethod *shared.DestinationRedshiftUpdateUploadingMethod
+ var uploadingMethod *shared.UploadingMethod
if r.Configuration.UploadingMethod != nil {
- var destinationRedshiftUpdateUploadingMethodStandard *shared.DestinationRedshiftUpdateUploadingMethodStandard
- if r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodStandard != nil {
- method := shared.DestinationRedshiftUpdateUploadingMethodStandardMethod(r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodStandard.Method.ValueString())
- destinationRedshiftUpdateUploadingMethodStandard = &shared.DestinationRedshiftUpdateUploadingMethodStandard{
- Method: method,
- }
- }
- if destinationRedshiftUpdateUploadingMethodStandard != nil {
- uploadingMethod = &shared.DestinationRedshiftUpdateUploadingMethod{
- DestinationRedshiftUpdateUploadingMethodStandard: destinationRedshiftUpdateUploadingMethodStandard,
- }
- }
- var destinationRedshiftUpdateUploadingMethodS3Staging *shared.DestinationRedshiftUpdateUploadingMethodS3Staging
- if r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging != nil {
- accessKeyID := r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.AccessKeyID.ValueString()
- var encryption *shared.DestinationRedshiftUpdateUploadingMethodS3StagingEncryption
- if r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.Encryption != nil {
- var destinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption *shared.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption
- if r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.Encryption.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption != nil {
- encryptionType := shared.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType(r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.Encryption.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption.EncryptionType.ValueString())
- destinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption = &shared.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption{
- EncryptionType: encryptionType,
- }
+ var s3Staging *shared.S3Staging
+ if r.Configuration.UploadingMethod.S3Staging != nil {
+ accessKeyID := r.Configuration.UploadingMethod.S3Staging.AccessKeyID.ValueString()
+ var encryption *shared.DestinationRedshiftUpdateEncryption
+ if r.Configuration.UploadingMethod.S3Staging.Encryption != nil {
+ var noEncryption *shared.NoEncryption
+ if r.Configuration.UploadingMethod.S3Staging.Encryption.NoEncryption != nil {
+ noEncryption = &shared.NoEncryption{}
}
- if destinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption != nil {
- encryption = &shared.DestinationRedshiftUpdateUploadingMethodS3StagingEncryption{
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption: destinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption,
+ if noEncryption != nil {
+ encryption = &shared.DestinationRedshiftUpdateEncryption{
+ NoEncryption: noEncryption,
}
}
- var destinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption *shared.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
- if r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.Encryption.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption != nil {
- encryptionType1 := shared.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType(r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.Encryption.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption.EncryptionType.ValueString())
+ var aesCBCEnvelopeEncryption *shared.AESCBCEnvelopeEncryption
+ if r.Configuration.UploadingMethod.S3Staging.Encryption.AESCBCEnvelopeEncryption != nil {
keyEncryptingKey := new(string)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.Encryption.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption.KeyEncryptingKey.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.Encryption.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption.KeyEncryptingKey.IsNull() {
- *keyEncryptingKey = r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.Encryption.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption.KeyEncryptingKey.ValueString()
+ if !r.Configuration.UploadingMethod.S3Staging.Encryption.AESCBCEnvelopeEncryption.KeyEncryptingKey.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.Encryption.AESCBCEnvelopeEncryption.KeyEncryptingKey.IsNull() {
+ *keyEncryptingKey = r.Configuration.UploadingMethod.S3Staging.Encryption.AESCBCEnvelopeEncryption.KeyEncryptingKey.ValueString()
} else {
keyEncryptingKey = nil
}
- destinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption = &shared.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption{
- EncryptionType: encryptionType1,
+ aesCBCEnvelopeEncryption = &shared.AESCBCEnvelopeEncryption{
KeyEncryptingKey: keyEncryptingKey,
}
}
- if destinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption != nil {
- encryption = &shared.DestinationRedshiftUpdateUploadingMethodS3StagingEncryption{
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption: destinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption,
+ if aesCBCEnvelopeEncryption != nil {
+ encryption = &shared.DestinationRedshiftUpdateEncryption{
+ AESCBCEnvelopeEncryption: aesCBCEnvelopeEncryption,
}
}
}
fileBufferCount := new(int64)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.FileBufferCount.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.FileBufferCount.IsNull() {
- *fileBufferCount = r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.FileBufferCount.ValueInt64()
+ if !r.Configuration.UploadingMethod.S3Staging.FileBufferCount.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.FileBufferCount.IsNull() {
+ *fileBufferCount = r.Configuration.UploadingMethod.S3Staging.FileBufferCount.ValueInt64()
} else {
fileBufferCount = nil
}
fileNamePattern := new(string)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.FileNamePattern.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.FileNamePattern.IsNull() {
- *fileNamePattern = r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.FileNamePattern.ValueString()
+ if !r.Configuration.UploadingMethod.S3Staging.FileNamePattern.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.FileNamePattern.IsNull() {
+ *fileNamePattern = r.Configuration.UploadingMethod.S3Staging.FileNamePattern.ValueString()
} else {
fileNamePattern = nil
}
- method1 := shared.DestinationRedshiftUpdateUploadingMethodS3StagingMethod(r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.Method.ValueString())
purgeStagingData := new(bool)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.PurgeStagingData.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.PurgeStagingData.IsNull() {
- *purgeStagingData = r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.PurgeStagingData.ValueBool()
+ if !r.Configuration.UploadingMethod.S3Staging.PurgeStagingData.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.PurgeStagingData.IsNull() {
+ *purgeStagingData = r.Configuration.UploadingMethod.S3Staging.PurgeStagingData.ValueBool()
} else {
purgeStagingData = nil
}
- s3BucketName := r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.S3BucketName.ValueString()
+ s3BucketName := r.Configuration.UploadingMethod.S3Staging.S3BucketName.ValueString()
s3BucketPath := new(string)
- if !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.S3BucketPath.IsUnknown() && !r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.S3BucketPath.IsNull() {
- *s3BucketPath = r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.S3BucketPath.ValueString()
+ if !r.Configuration.UploadingMethod.S3Staging.S3BucketPath.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.S3BucketPath.IsNull() {
+ *s3BucketPath = r.Configuration.UploadingMethod.S3Staging.S3BucketPath.ValueString()
} else {
s3BucketPath = nil
}
- s3BucketRegion := shared.DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion(r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.S3BucketRegion.ValueString())
- secretAccessKey := r.Configuration.UploadingMethod.DestinationRedshiftUpdateUploadingMethodS3Staging.SecretAccessKey.ValueString()
- destinationRedshiftUpdateUploadingMethodS3Staging = &shared.DestinationRedshiftUpdateUploadingMethodS3Staging{
+ s3BucketRegion := new(shared.DestinationRedshiftUpdateS3BucketRegion)
+ if !r.Configuration.UploadingMethod.S3Staging.S3BucketRegion.IsUnknown() && !r.Configuration.UploadingMethod.S3Staging.S3BucketRegion.IsNull() {
+ *s3BucketRegion = shared.DestinationRedshiftUpdateS3BucketRegion(r.Configuration.UploadingMethod.S3Staging.S3BucketRegion.ValueString())
+ } else {
+ s3BucketRegion = nil
+ }
+ secretAccessKey := r.Configuration.UploadingMethod.S3Staging.SecretAccessKey.ValueString()
+ s3Staging = &shared.S3Staging{
AccessKeyID: accessKeyID,
Encryption: encryption,
FileBufferCount: fileBufferCount,
FileNamePattern: fileNamePattern,
- Method: method1,
PurgeStagingData: purgeStagingData,
S3BucketName: s3BucketName,
S3BucketPath: s3BucketPath,
@@ -360,9 +372,18 @@ func (r *DestinationRedshiftResourceModel) ToUpdateSDKType() *shared.Destination
SecretAccessKey: secretAccessKey,
}
}
- if destinationRedshiftUpdateUploadingMethodS3Staging != nil {
- uploadingMethod = &shared.DestinationRedshiftUpdateUploadingMethod{
- DestinationRedshiftUpdateUploadingMethodS3Staging: destinationRedshiftUpdateUploadingMethodS3Staging,
+ if s3Staging != nil {
+ uploadingMethod = &shared.UploadingMethod{
+ S3Staging: s3Staging,
+ }
+ }
+ var standard *shared.Standard
+ if r.Configuration.UploadingMethod.Standard != nil {
+ standard = &shared.Standard{}
+ }
+ if standard != nil {
+ uploadingMethod = &shared.UploadingMethod{
+ Standard: standard,
}
}
}
diff --git a/internal/provider/destination_s3_data_source.go b/internal/provider/destination_s3_data_source.go
old mode 100755
new mode 100644
index 250628054..a6ca7cce7
--- a/internal/provider/destination_s3_data_source.go
+++ b/internal/provider/destination_s3_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationS3DataSource struct {
// DestinationS3DataSourceModel describes the data model.
type DestinationS3DataSourceModel struct {
- Configuration DestinationS31 `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,717 +47,17 @@ func (r *DestinationS3DataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "DestinationS3 DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "s3",
- ),
- },
- Description: `must be one of ["s3"]`,
- },
- "file_name_pattern": schema.StringAttribute{
- Computed: true,
- Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
- },
- "format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_output_format_avro_apache_avro": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_output_format_avro_apache_avro_compression_codec_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no compression",
- ),
- },
- Description: `must be one of ["no compression"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_output_format_avro_apache_avro_compression_codec_deflate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Deflate",
- ),
- },
- Description: `must be one of ["Deflate"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `0: no compression & fastest, 9: best compression & slowest.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_output_format_avro_apache_avro_compression_codec_bzip2": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bzip2",
- ),
- },
- Description: `must be one of ["bzip2"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_output_format_avro_apache_avro_compression_codec_xz": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xz",
- ),
- },
- Description: `must be one of ["xz"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `See here for details.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_output_format_avro_apache_avro_compression_codec_zstandard": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zstandard",
- ),
- },
- Description: `must be one of ["zstandard"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
- },
- "include_checksum": schema.BoolAttribute{
- Computed: true,
- Description: `If true, include a checksum with each data block.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_output_format_avro_apache_avro_compression_codec_snappy": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snappy",
- ),
- },
- Description: `must be one of ["snappy"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Avro",
- ),
- },
- Description: `must be one of ["Avro"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_output_format_csv_comma_separated_values_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "destination_s3_output_format_csv_comma_separated_values_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "destination_s3_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_output_format_parquet_columnar_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "block_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
- },
- "compression_codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "SNAPPY",
- "GZIP",
- "LZO",
- "BROTLI",
- "LZ4",
- "ZSTD",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data pages.`,
- },
- "dictionary_encoding": schema.BoolAttribute{
- Computed: true,
- Description: `Default: true.`,
- },
- "dictionary_page_size_kb": schema.Int64Attribute{
- Computed: true,
- Description: `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Parquet",
- ),
- },
- Description: `must be one of ["Parquet"]`,
- },
- "max_padding_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
- },
- "page_size_kb": schema.Int64Attribute{
- Computed: true,
- Description: `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_update_output_format_avro_apache_avro": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no compression",
- ),
- },
- Description: `must be one of ["no compression"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_deflate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Deflate",
- ),
- },
- Description: `must be one of ["Deflate"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `0: no compression & fastest, 9: best compression & slowest.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_bzip2": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bzip2",
- ),
- },
- Description: `must be one of ["bzip2"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_xz": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xz",
- ),
- },
- Description: `must be one of ["xz"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `See here for details.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_zstandard": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zstandard",
- ),
- },
- Description: `must be one of ["zstandard"]`,
- },
- "compression_level": schema.Int64Attribute{
- Computed: true,
- Description: `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
- },
- "include_checksum": schema.BoolAttribute{
- Computed: true,
- Description: `If true, include a checksum with each data block.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_snappy": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snappy",
- ),
- },
- Description: `must be one of ["snappy"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Avro",
- ),
- },
- Description: `must be one of ["Avro"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_update_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_update_output_format_csv_comma_separated_values_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "destination_s3_update_output_format_csv_comma_separated_values_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_update_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_update_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "destination_s3_update_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_update_output_format_parquet_columnar_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "block_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
- },
- "compression_codec": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "SNAPPY",
- "GZIP",
- "LZO",
- "BROTLI",
- "LZ4",
- "ZSTD",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data pages.`,
- },
- "dictionary_encoding": schema.BoolAttribute{
- Computed: true,
- Description: `Default: true.`,
- },
- "dictionary_page_size_kb": schema.Int64Attribute{
- Computed: true,
- Description: `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Parquet",
- ),
- },
- Description: `must be one of ["Parquet"]`,
- },
- "max_padding_size_mb": schema.Int64Attribute{
- Computed: true,
- Description: `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
- },
- "page_size_kb": schema.Int64Attribute{
- Computed: true,
- Description: `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Format of the data output. See here for more details`,
- },
- "s3_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the S3 bucket. Read more here.`,
- },
- "s3_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `Directory under the S3 bucket where data will be written. Read more here`,
- },
- "s3_bucket_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- "us-gov-east-1",
- "us-gov-west-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the S3 bucket. See here for all region codes.`,
- },
- "s3_endpoint": schema.StringAttribute{
- Computed: true,
- Description: `Your S3 endpoint url. Read more here`,
- },
- "s3_path_format": schema.StringAttribute{
- Computed: true,
- Description: `Format string on how data will be organized inside the S3 bucket directory. Read more here`,
- },
- "secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret to the access key ID. Read more here`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_s3_data_source_sdk.go b/internal/provider/destination_s3_data_source_sdk.go
old mode 100755
new mode 100644
index 763b03483..7f2e87064
--- a/internal/provider/destination_s3_data_source_sdk.go
+++ b/internal/provider/destination_s3_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationS3DataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_s3_resource.go b/internal/provider/destination_s3_resource.go
old mode 100755
new mode 100644
index 5bebe9dbb..4285af18f
--- a/internal/provider/destination_s3_resource.go
+++ b/internal/provider/destination_s3_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationS3Resource struct {
// DestinationS3ResourceModel describes the resource data model.
type DestinationS3ResourceModel struct {
Configuration DestinationS3 `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -56,17 +58,9 @@ func (r *DestinationS3Resource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"access_key_id": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "s3",
- ),
- },
- Description: `must be one of ["s3"]`,
- },
"file_name_pattern": schema.StringAttribute{
Optional: true,
Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
@@ -74,275 +68,282 @@ func (r *DestinationS3Resource) Schema(ctx context.Context, req resource.SchemaR
"format": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_s3_output_format_avro_apache_avro": schema.SingleNestedAttribute{
+ "avro_apache_avro": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_codec": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_s3_output_format_avro_apache_avro_compression_codec_bzip2": schema.SingleNestedAttribute{
+ "bzip2": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["bzip2"]; Default: "bzip2"`,
Validators: []validator.String{
stringvalidator.OneOf(
"bzip2",
),
},
- Description: `must be one of ["bzip2"]`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_s3_output_format_avro_apache_avro_compression_codec_deflate": schema.SingleNestedAttribute{
+ "deflate": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["Deflate"]; Default: "Deflate"`,
Validators: []validator.String{
stringvalidator.OneOf(
"Deflate",
),
},
- Description: `must be one of ["Deflate"]`,
},
"compression_level": schema.Int64Attribute{
- Required: true,
- Description: `0: no compression & fastest, 9: best compression & slowest.`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `0: no compression & fastest, 9: best compression & slowest.`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_s3_output_format_avro_apache_avro_compression_codec_no_compression": schema.SingleNestedAttribute{
+ "no_compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["no compression"]; Default: "no compression"`,
Validators: []validator.String{
stringvalidator.OneOf(
"no compression",
),
},
- Description: `must be one of ["no compression"]`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_s3_output_format_avro_apache_avro_compression_codec_snappy": schema.SingleNestedAttribute{
+ "snappy": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["snappy"]; Default: "snappy"`,
Validators: []validator.String{
stringvalidator.OneOf(
"snappy",
),
},
- Description: `must be one of ["snappy"]`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_s3_output_format_avro_apache_avro_compression_codec_xz": schema.SingleNestedAttribute{
+ "xz": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["xz"]; Default: "xz"`,
Validators: []validator.String{
stringvalidator.OneOf(
"xz",
),
},
- Description: `must be one of ["xz"]`,
},
"compression_level": schema.Int64Attribute{
- Required: true,
- Description: `See here for details.`,
+ Optional: true,
+ MarkdownDescription: `Default: 6` + "\n" +
+ `See here for details.`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
- "destination_s3_output_format_avro_apache_avro_compression_codec_zstandard": schema.SingleNestedAttribute{
+ "zstandard": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"codec": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["zstandard"]; Default: "zstandard"`,
Validators: []validator.String{
stringvalidator.OneOf(
"zstandard",
),
},
- Description: `must be one of ["zstandard"]`,
},
"compression_level": schema.Int64Attribute{
- Required: true,
- Description: `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
+ Optional: true,
+ MarkdownDescription: `Default: 3` + "\n" +
+ `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
},
"include_checksum": schema.BoolAttribute{
- Optional: true,
- Description: `If true, include a checksum with each data block.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `If true, include a checksum with each data block.`,
},
},
Description: `The compression algorithm used to compress data. Default to no compression.`,
},
},
+ Description: `The compression algorithm used to compress data. Default to no compression.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The compression algorithm used to compress data. Default to no compression.`,
},
"format_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["Avro"]; Default: "Avro"`,
Validators: []validator.String{
stringvalidator.OneOf(
"Avro",
),
},
- Description: `must be one of ["Avro"]`,
},
},
Description: `Format of the data output. See here for more details`,
},
- "destination_s3_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
+ "csv_comma_separated_values": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_s3_output_format_csv_comma_separated_values_compression_gzip": schema.SingleNestedAttribute{
+ "gzip": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["GZIP"]; Default: "GZIP"`,
Validators: []validator.String{
stringvalidator.OneOf(
"GZIP",
),
},
- Description: `must be one of ["GZIP"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
},
- "destination_s3_output_format_csv_comma_separated_values_compression_no_compression": schema.SingleNestedAttribute{
+ "no_compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["No Compression"]; Default: "No Compression"`,
Validators: []validator.String{
stringvalidator.OneOf(
"No Compression",
),
},
- Description: `must be one of ["No Compression"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
},
},
+ Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
},
"flattening": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]; Default: "No flattening"` + "\n" +
+ `Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
Validators: []validator.String{
stringvalidator.OneOf(
"No flattening",
"Root level flattening",
),
},
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
},
"format_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["CSV"]; Default: "CSV"`,
Validators: []validator.String{
stringvalidator.OneOf(
"CSV",
),
},
- Description: `must be one of ["CSV"]`,
},
},
Description: `Format of the data output. See here for more details`,
},
- "destination_s3_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
+ "json_lines_newline_delimited_json": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_s3_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
+ "gzip": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["GZIP"]; Default: "GZIP"`,
Validators: []validator.String{
stringvalidator.OneOf(
"GZIP",
),
},
- Description: `must be one of ["GZIP"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
},
- "destination_s3_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
+ "no_compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["No Compression"]; Default: "No Compression"`,
Validators: []validator.String{
stringvalidator.OneOf(
"No Compression",
),
},
- Description: `must be one of ["No Compression"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
},
},
+ Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
},
"flattening": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]; Default: "No flattening"` + "\n" +
+ `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
Validators: []validator.String{
stringvalidator.OneOf(
"No flattening",
"Root level flattening",
),
},
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
},
"format_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["JSONL"]; Default: "JSONL"`,
Validators: []validator.String{
stringvalidator.OneOf(
"JSONL",
),
},
- Description: `must be one of ["JSONL"]`,
},
},
Description: `Format of the data output. See here for more details`,
},
- "destination_s3_output_format_parquet_columnar_storage": schema.SingleNestedAttribute{
+ "parquet_columnar_storage": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"block_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
+ Optional: true,
+ MarkdownDescription: `Default: 128` + "\n" +
+ `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
},
"compression_codec": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]; Default: "UNCOMPRESSED"` + "\n" +
+ `The compression algorithm used to compress data pages.`,
Validators: []validator.String{
stringvalidator.OneOf(
"UNCOMPRESSED",
@@ -354,353 +355,44 @@ func (r *DestinationS3Resource) Schema(ctx context.Context, req resource.SchemaR
"ZSTD",
),
},
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data pages.`,
},
"dictionary_encoding": schema.BoolAttribute{
- Optional: true,
- Description: `Default: true.`,
- },
- "dictionary_page_size_kb": schema.Int64Attribute{
- Optional: true,
- Description: `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Parquet",
- ),
- },
- Description: `must be one of ["Parquet"]`,
- },
- "max_padding_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
- },
- "page_size_kb": schema.Int64Attribute{
- Optional: true,
- Description: `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_update_output_format_avro_apache_avro": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_codec": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_no_compression": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "no compression",
- ),
- },
- Description: `must be one of ["no compression"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_deflate": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Deflate",
- ),
- },
- Description: `must be one of ["Deflate"]`,
- },
- "compression_level": schema.Int64Attribute{
- Required: true,
- Description: `0: no compression & fastest, 9: best compression & slowest.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_bzip2": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bzip2",
- ),
- },
- Description: `must be one of ["bzip2"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_xz": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xz",
- ),
- },
- Description: `must be one of ["xz"]`,
- },
- "compression_level": schema.Int64Attribute{
- Required: true,
- Description: `See here for details.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_zstandard": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zstandard",
- ),
- },
- Description: `must be one of ["zstandard"]`,
- },
- "compression_level": schema.Int64Attribute{
- Required: true,
- Description: `Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.`,
- },
- "include_checksum": schema.BoolAttribute{
- Optional: true,
- Description: `If true, include a checksum with each data block.`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "destination_s3_update_output_format_avro_apache_avro_compression_codec_snappy": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "codec": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snappy",
- ),
- },
- Description: `must be one of ["snappy"]`,
- },
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The compression algorithm used to compress data. Default to no compression.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Avro",
- ),
- },
- Description: `must be one of ["Avro"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_update_output_format_csv_comma_separated_values": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
Optional: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_update_output_format_csv_comma_separated_values_compression_no_compression": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "destination_s3_update_output_format_csv_comma_separated_values_compression_gzip": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").`,
- },
- "flattening": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CSV",
- ),
- },
- Description: `must be one of ["CSV"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_update_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_update_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "destination_s3_update_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Default: true.`,
},
- "flattening": schema.StringAttribute{
+ "dictionary_page_size_kb": schema.Int64Attribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
+ MarkdownDescription: `Default: 1024` + "\n" +
+ `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
},
"format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_update_output_format_parquet_columnar_storage": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "block_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.`,
- },
- "compression_codec": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "UNCOMPRESSED",
- "SNAPPY",
- "GZIP",
- "LZO",
- "BROTLI",
- "LZ4",
- "ZSTD",
- ),
- },
- MarkdownDescription: `must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "LZO", "BROTLI", "LZ4", "ZSTD"]` + "\n" +
- `The compression algorithm used to compress data pages.`,
- },
- "dictionary_encoding": schema.BoolAttribute{
Optional: true,
- Description: `Default: true.`,
- },
- "dictionary_page_size_kb": schema.Int64Attribute{
- Optional: true,
- Description: `There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
+ Description: `must be one of ["Parquet"]; Default: "Parquet"`,
Validators: []validator.String{
stringvalidator.OneOf(
"Parquet",
),
},
- Description: `must be one of ["Parquet"]`,
},
"max_padding_size_mb": schema.Int64Attribute{
- Optional: true,
- Description: `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
+ Optional: true,
+ MarkdownDescription: `Default: 8` + "\n" +
+ `Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.`,
},
"page_size_kb": schema.Int64Attribute{
- Optional: true,
- Description: `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
+ Optional: true,
+ MarkdownDescription: `Default: 1024` + "\n" +
+ `The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.`,
},
},
Description: `Format of the data output. See here for more details`,
},
},
+ Description: `Format of the data output. See here for more details`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Format of the data output. See here for more details`,
},
"s3_bucket_name": schema.StringAttribute{
Required: true,
@@ -711,7 +403,9 @@ func (r *DestinationS3Resource) Schema(ctx context.Context, req resource.SchemaR
Description: `Directory under the S3 bucket where data will be written. Read more here`,
},
"s3_bucket_region": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""` + "\n" +
+ `The region of the S3 bucket. See here for all region codes.`,
Validators: []validator.String{
stringvalidator.OneOf(
"",
@@ -742,12 +436,11 @@ func (r *DestinationS3Resource) Schema(ctx context.Context, req resource.SchemaR
"us-gov-west-1",
),
},
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the S3 bucket. See here for all region codes.`,
},
"s3_endpoint": schema.StringAttribute{
- Optional: true,
- Description: `Your S3 endpoint url. Read more here`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `Your S3 endpoint url. Read more here`,
},
"s3_path_format": schema.StringAttribute{
Optional: true,
@@ -755,10 +448,18 @@ func (r *DestinationS3Resource) Schema(ctx context.Context, req resource.SchemaR
},
"secret_access_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The corresponding secret to the access key ID. Read more here`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -775,7 +476,8 @@ func (r *DestinationS3Resource) Schema(ctx context.Context, req resource.SchemaR
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -825,7 +527,7 @@ func (r *DestinationS3Resource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationS3(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -1001,5 +703,5 @@ func (r *DestinationS3Resource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *DestinationS3Resource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_s3_resource_sdk.go b/internal/provider/destination_s3_resource_sdk.go
old mode 100755
new mode 100644
index 296df875a..5074ad38c
--- a/internal/provider/destination_s3_resource_sdk.go
+++ b/internal/provider/destination_s3_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -14,7 +14,6 @@ func (r *DestinationS3ResourceModel) ToCreateSDKType() *shared.DestinationS3Crea
} else {
accessKeyID = nil
}
- destinationType := shared.DestinationS3S3(r.Configuration.DestinationType.ValueString())
fileNamePattern := new(string)
if !r.Configuration.FileNamePattern.IsUnknown() && !r.Configuration.FileNamePattern.IsNull() {
*fileNamePattern = r.Configuration.FileNamePattern.ValueString()
@@ -22,254 +21,324 @@ func (r *DestinationS3ResourceModel) ToCreateSDKType() *shared.DestinationS3Crea
fileNamePattern = nil
}
var format shared.DestinationS3OutputFormat
- var destinationS3OutputFormatAvroApacheAvro *shared.DestinationS3OutputFormatAvroApacheAvro
- if r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro != nil {
- var compressionCodec shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodec
- var destinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression *shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression
- if r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- codec := shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec(r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression.Codec.ValueString())
- destinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression = &shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression{
+ var destinationS3AvroApacheAvro *shared.DestinationS3AvroApacheAvro
+ if r.Configuration.Format.AvroApacheAvro != nil {
+ var compressionCodec shared.DestinationS3CompressionCodec
+ var destinationS3NoCompression *shared.DestinationS3NoCompression
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression != nil {
+ codec := new(shared.DestinationS3Codec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.IsNull() {
+ *codec = shared.DestinationS3Codec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.ValueString())
+ } else {
+ codec = nil
+ }
+ destinationS3NoCompression = &shared.DestinationS3NoCompression{
Codec: codec,
}
}
- if destinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- compressionCodec = shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression: destinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression,
+ if destinationS3NoCompression != nil {
+ compressionCodec = shared.DestinationS3CompressionCodec{
+ DestinationS3NoCompression: destinationS3NoCompression,
}
}
- var destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate *shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate
- if r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- codec1 := shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodec(r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate.Codec.ValueString())
- compressionLevel := r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate.CompressionLevel.ValueInt64()
- destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate = &shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate{
+ var destinationS3Deflate *shared.DestinationS3Deflate
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate != nil {
+ codec1 := new(shared.DestinationS3SchemasCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.IsNull() {
+ *codec1 = shared.DestinationS3SchemasCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.ValueString())
+ } else {
+ codec1 = nil
+ }
+ compressionLevel := new(int64)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.IsNull() {
+ *compressionLevel = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.ValueInt64()
+ } else {
+ compressionLevel = nil
+ }
+ destinationS3Deflate = &shared.DestinationS3Deflate{
Codec: codec1,
CompressionLevel: compressionLevel,
}
}
- if destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- compressionCodec = shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate: destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate,
+ if destinationS3Deflate != nil {
+ compressionCodec = shared.DestinationS3CompressionCodec{
+ DestinationS3Deflate: destinationS3Deflate,
}
}
- var destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 *shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2
- if r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- codec2 := shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2Codec(r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2.Codec.ValueString())
- destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 = &shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2{
+ var destinationS3Bzip2 *shared.DestinationS3Bzip2
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2 != nil {
+ codec2 := new(shared.DestinationS3SchemasFormatCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.IsNull() {
+ *codec2 = shared.DestinationS3SchemasFormatCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.ValueString())
+ } else {
+ codec2 = nil
+ }
+ destinationS3Bzip2 = &shared.DestinationS3Bzip2{
Codec: codec2,
}
}
- if destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- compressionCodec = shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2: destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2,
+ if destinationS3Bzip2 != nil {
+ compressionCodec = shared.DestinationS3CompressionCodec{
+ DestinationS3Bzip2: destinationS3Bzip2,
}
}
- var destinationS3OutputFormatAvroApacheAvroCompressionCodecXz *shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz
- if r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz != nil {
- codec3 := shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodec(r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz.Codec.ValueString())
- compressionLevel1 := r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz.CompressionLevel.ValueInt64()
- destinationS3OutputFormatAvroApacheAvroCompressionCodecXz = &shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz{
+ var destinationS3Xz *shared.DestinationS3Xz
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz != nil {
+ codec3 := new(shared.DestinationS3SchemasFormatOutputFormatCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.IsNull() {
+ *codec3 = shared.DestinationS3SchemasFormatOutputFormatCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.ValueString())
+ } else {
+ codec3 = nil
+ }
+ compressionLevel1 := new(int64)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.IsNull() {
+ *compressionLevel1 = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.ValueInt64()
+ } else {
+ compressionLevel1 = nil
+ }
+ destinationS3Xz = &shared.DestinationS3Xz{
Codec: codec3,
CompressionLevel: compressionLevel1,
}
}
- if destinationS3OutputFormatAvroApacheAvroCompressionCodecXz != nil {
- compressionCodec = shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz: destinationS3OutputFormatAvroApacheAvroCompressionCodecXz,
+ if destinationS3Xz != nil {
+ compressionCodec = shared.DestinationS3CompressionCodec{
+ DestinationS3Xz: destinationS3Xz,
}
}
- var destinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard *shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard
- if r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- codec4 := shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodec(r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard.Codec.ValueString())
- compressionLevel2 := r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard.CompressionLevel.ValueInt64()
+ var destinationS3Zstandard *shared.DestinationS3Zstandard
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard != nil {
+ codec4 := new(shared.DestinationS3SchemasFormatOutputFormat1Codec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.IsNull() {
+ *codec4 = shared.DestinationS3SchemasFormatOutputFormat1Codec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.ValueString())
+ } else {
+ codec4 = nil
+ }
+ compressionLevel2 := new(int64)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.IsNull() {
+ *compressionLevel2 = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.ValueInt64()
+ } else {
+ compressionLevel2 = nil
+ }
includeChecksum := new(bool)
- if !r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.IsNull() {
- *includeChecksum = r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.ValueBool()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.IsNull() {
+ *includeChecksum = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.ValueBool()
} else {
includeChecksum = nil
}
- destinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard = &shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard{
+ destinationS3Zstandard = &shared.DestinationS3Zstandard{
Codec: codec4,
CompressionLevel: compressionLevel2,
IncludeChecksum: includeChecksum,
}
}
- if destinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- compressionCodec = shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard: destinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard,
+ if destinationS3Zstandard != nil {
+ compressionCodec = shared.DestinationS3CompressionCodec{
+ DestinationS3Zstandard: destinationS3Zstandard,
}
}
- var destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy *shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy
- if r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- codec5 := shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodec(r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.CompressionCodec.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy.Codec.ValueString())
- destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy = &shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy{
+ var destinationS3Snappy *shared.DestinationS3Snappy
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy != nil {
+ codec5 := new(shared.DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.IsNull() {
+ *codec5 = shared.DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.ValueString())
+ } else {
+ codec5 = nil
+ }
+ destinationS3Snappy = &shared.DestinationS3Snappy{
Codec: codec5,
}
}
- if destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- compressionCodec = shared.DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy: destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy,
+ if destinationS3Snappy != nil {
+ compressionCodec = shared.DestinationS3CompressionCodec{
+ DestinationS3Snappy: destinationS3Snappy,
}
}
- formatType := shared.DestinationS3OutputFormatAvroApacheAvroFormatType(r.Configuration.Format.DestinationS3OutputFormatAvroApacheAvro.FormatType.ValueString())
- destinationS3OutputFormatAvroApacheAvro = &shared.DestinationS3OutputFormatAvroApacheAvro{
+ formatType := new(shared.DestinationS3FormatType)
+ if !r.Configuration.Format.AvroApacheAvro.FormatType.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.FormatType.IsNull() {
+ *formatType = shared.DestinationS3FormatType(r.Configuration.Format.AvroApacheAvro.FormatType.ValueString())
+ } else {
+ formatType = nil
+ }
+ destinationS3AvroApacheAvro = &shared.DestinationS3AvroApacheAvro{
CompressionCodec: compressionCodec,
FormatType: formatType,
}
}
- if destinationS3OutputFormatAvroApacheAvro != nil {
+ if destinationS3AvroApacheAvro != nil {
format = shared.DestinationS3OutputFormat{
- DestinationS3OutputFormatAvroApacheAvro: destinationS3OutputFormatAvroApacheAvro,
- }
- }
- var destinationS3OutputFormatCSVCommaSeparatedValues *shared.DestinationS3OutputFormatCSVCommaSeparatedValues
- if r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues != nil {
- var compression *shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompression
- if r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Compression != nil {
- var destinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression *shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- if r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Compression.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- compressionType := new(shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Compression.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Compression.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.IsNull() {
- *compressionType = shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Compression.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.ValueString())
+ DestinationS3AvroApacheAvro: destinationS3AvroApacheAvro,
+ }
+ }
+ var destinationS3CSVCommaSeparatedValues *shared.DestinationS3CSVCommaSeparatedValues
+ if r.Configuration.Format.CSVCommaSeparatedValues != nil {
+ var compression *shared.DestinationS3Compression
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression != nil {
+ var destinationS3SchemasNoCompression *shared.DestinationS3SchemasNoCompression
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression != nil {
+ compressionType := new(shared.DestinationS3CompressionType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType = shared.DestinationS3CompressionType(r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType = nil
}
- destinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression = &shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression{
+ destinationS3SchemasNoCompression = &shared.DestinationS3SchemasNoCompression{
CompressionType: compressionType,
}
}
- if destinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- compression = &shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompression{
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression: destinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression,
+ if destinationS3SchemasNoCompression != nil {
+ compression = &shared.DestinationS3Compression{
+ DestinationS3SchemasNoCompression: destinationS3SchemasNoCompression,
}
}
- var destinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP *shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP
- if r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Compression.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- compressionType1 := new(shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Compression.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Compression.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.IsNull() {
- *compressionType1 = shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Compression.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.ValueString())
+ var destinationS3GZIP *shared.DestinationS3GZIP
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip != nil {
+ compressionType1 := new(shared.DestinationS3SchemasCompressionType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType1 = shared.DestinationS3SchemasCompressionType(r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType1 = nil
}
- destinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP = &shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP{
+ destinationS3GZIP = &shared.DestinationS3GZIP{
CompressionType: compressionType1,
}
}
- if destinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- compression = &shared.DestinationS3OutputFormatCSVCommaSeparatedValuesCompression{
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP: destinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP,
+ if destinationS3GZIP != nil {
+ compression = &shared.DestinationS3Compression{
+ DestinationS3GZIP: destinationS3GZIP,
}
}
}
- flattening := shared.DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening(r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.Flattening.ValueString())
- formatType1 := shared.DestinationS3OutputFormatCSVCommaSeparatedValuesFormatType(r.Configuration.Format.DestinationS3OutputFormatCSVCommaSeparatedValues.FormatType.ValueString())
- destinationS3OutputFormatCSVCommaSeparatedValues = &shared.DestinationS3OutputFormatCSVCommaSeparatedValues{
+ flattening := new(shared.DestinationS3Flattening)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsNull() {
+ *flattening = shared.DestinationS3Flattening(r.Configuration.Format.CSVCommaSeparatedValues.Flattening.ValueString())
+ } else {
+ flattening = nil
+ }
+ formatType1 := new(shared.DestinationS3SchemasFormatType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.FormatType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.FormatType.IsNull() {
+ *formatType1 = shared.DestinationS3SchemasFormatType(r.Configuration.Format.CSVCommaSeparatedValues.FormatType.ValueString())
+ } else {
+ formatType1 = nil
+ }
+ destinationS3CSVCommaSeparatedValues = &shared.DestinationS3CSVCommaSeparatedValues{
Compression: compression,
Flattening: flattening,
FormatType: formatType1,
}
}
- if destinationS3OutputFormatCSVCommaSeparatedValues != nil {
+ if destinationS3CSVCommaSeparatedValues != nil {
format = shared.DestinationS3OutputFormat{
- DestinationS3OutputFormatCSVCommaSeparatedValues: destinationS3OutputFormatCSVCommaSeparatedValues,
- }
- }
- var destinationS3OutputFormatJSONLinesNewlineDelimitedJSON *shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON != nil {
- var compression1 *shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression
- if r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Compression != nil {
- var destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- if r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compressionType2 := new(shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsNull() {
- *compressionType2 = shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.ValueString())
+ DestinationS3CSVCommaSeparatedValues: destinationS3CSVCommaSeparatedValues,
+ }
+ }
+ var destinationS3JSONLinesNewlineDelimitedJSON *shared.DestinationS3JSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ var compression1 *shared.DestinationS3SchemasCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression != nil {
+ var destinationS3SchemasFormatNoCompression *shared.DestinationS3SchemasFormatNoCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression != nil {
+ compressionType2 := new(shared.DestinationS3SchemasFormatCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType2 = shared.DestinationS3SchemasFormatCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType2 = nil
}
- destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = &shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression{
+ destinationS3SchemasFormatNoCompression = &shared.DestinationS3SchemasFormatNoCompression{
CompressionType: compressionType2,
}
}
- if destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compression1 = &shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
+ if destinationS3SchemasFormatNoCompression != nil {
+ compression1 = &shared.DestinationS3SchemasCompression{
+ DestinationS3SchemasFormatNoCompression: destinationS3SchemasFormatNoCompression,
}
}
- var destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- if r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compressionType3 := new(shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsNull() {
- *compressionType3 = shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.ValueString())
+ var destinationS3SchemasGZIP *shared.DestinationS3SchemasGZIP
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip != nil {
+ compressionType3 := new(shared.DestinationS3SchemasFormatOutputFormatCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType3 = shared.DestinationS3SchemasFormatOutputFormatCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType3 = nil
}
- destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = &shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP{
+ destinationS3SchemasGZIP = &shared.DestinationS3SchemasGZIP{
CompressionType: compressionType3,
}
}
- if destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compression1 = &shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
+ if destinationS3SchemasGZIP != nil {
+ compression1 = &shared.DestinationS3SchemasCompression{
+ DestinationS3SchemasGZIP: destinationS3SchemasGZIP,
}
}
}
- flattening1 := new(shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening)
- if !r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Flattening.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Flattening.IsNull() {
- *flattening1 = shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening(r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.Flattening.ValueString())
+ flattening1 := new(shared.DestinationS3SchemasFlattening)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.IsNull() {
+ *flattening1 = shared.DestinationS3SchemasFlattening(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.ValueString())
} else {
flattening1 = nil
}
- formatType2 := shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationS3OutputFormatJSONLinesNewlineDelimitedJSON = &shared.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON{
+ formatType2 := new(shared.DestinationS3SchemasFormatFormatType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsNull() {
+ *formatType2 = shared.DestinationS3SchemasFormatFormatType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.ValueString())
+ } else {
+ formatType2 = nil
+ }
+ destinationS3JSONLinesNewlineDelimitedJSON = &shared.DestinationS3JSONLinesNewlineDelimitedJSON{
Compression: compression1,
Flattening: flattening1,
FormatType: formatType2,
}
}
- if destinationS3OutputFormatJSONLinesNewlineDelimitedJSON != nil {
+ if destinationS3JSONLinesNewlineDelimitedJSON != nil {
format = shared.DestinationS3OutputFormat{
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON: destinationS3OutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationS3JSONLinesNewlineDelimitedJSON: destinationS3JSONLinesNewlineDelimitedJSON,
}
}
- var destinationS3OutputFormatParquetColumnarStorage *shared.DestinationS3OutputFormatParquetColumnarStorage
- if r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage != nil {
+ var destinationS3ParquetColumnarStorage *shared.DestinationS3ParquetColumnarStorage
+ if r.Configuration.Format.ParquetColumnarStorage != nil {
blockSizeMb := new(int64)
- if !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.BlockSizeMb.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.BlockSizeMb.IsNull() {
- *blockSizeMb = r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.BlockSizeMb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.IsNull() {
+ *blockSizeMb = r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.ValueInt64()
} else {
blockSizeMb = nil
}
- compressionCodec1 := new(shared.DestinationS3OutputFormatParquetColumnarStorageCompressionCodec)
- if !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.CompressionCodec.IsNull() {
- *compressionCodec1 = shared.DestinationS3OutputFormatParquetColumnarStorageCompressionCodec(r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.CompressionCodec.ValueString())
+ compressionCodec1 := new(shared.DestinationS3SchemasCompressionCodec)
+ if !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsNull() {
+ *compressionCodec1 = shared.DestinationS3SchemasCompressionCodec(r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.ValueString())
} else {
compressionCodec1 = nil
}
dictionaryEncoding := new(bool)
- if !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.DictionaryEncoding.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.DictionaryEncoding.IsNull() {
- *dictionaryEncoding = r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.DictionaryEncoding.ValueBool()
+ if !r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.IsNull() {
+ *dictionaryEncoding = r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.ValueBool()
} else {
dictionaryEncoding = nil
}
dictionaryPageSizeKb := new(int64)
- if !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.DictionaryPageSizeKb.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.DictionaryPageSizeKb.IsNull() {
- *dictionaryPageSizeKb = r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.DictionaryPageSizeKb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.IsNull() {
+ *dictionaryPageSizeKb = r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.ValueInt64()
} else {
dictionaryPageSizeKb = nil
}
- formatType3 := shared.DestinationS3OutputFormatParquetColumnarStorageFormatType(r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.FormatType.ValueString())
+ formatType3 := new(shared.DestinationS3SchemasFormatOutputFormatFormatType)
+ if !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsNull() {
+ *formatType3 = shared.DestinationS3SchemasFormatOutputFormatFormatType(r.Configuration.Format.ParquetColumnarStorage.FormatType.ValueString())
+ } else {
+ formatType3 = nil
+ }
maxPaddingSizeMb := new(int64)
- if !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.MaxPaddingSizeMb.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.MaxPaddingSizeMb.IsNull() {
- *maxPaddingSizeMb = r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.MaxPaddingSizeMb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.IsNull() {
+ *maxPaddingSizeMb = r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.ValueInt64()
} else {
maxPaddingSizeMb = nil
}
pageSizeKb := new(int64)
- if !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.PageSizeKb.IsUnknown() && !r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.PageSizeKb.IsNull() {
- *pageSizeKb = r.Configuration.Format.DestinationS3OutputFormatParquetColumnarStorage.PageSizeKb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.IsNull() {
+ *pageSizeKb = r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.ValueInt64()
} else {
pageSizeKb = nil
}
- destinationS3OutputFormatParquetColumnarStorage = &shared.DestinationS3OutputFormatParquetColumnarStorage{
+ destinationS3ParquetColumnarStorage = &shared.DestinationS3ParquetColumnarStorage{
BlockSizeMb: blockSizeMb,
CompressionCodec: compressionCodec1,
DictionaryEncoding: dictionaryEncoding,
@@ -279,14 +348,19 @@ func (r *DestinationS3ResourceModel) ToCreateSDKType() *shared.DestinationS3Crea
PageSizeKb: pageSizeKb,
}
}
- if destinationS3OutputFormatParquetColumnarStorage != nil {
+ if destinationS3ParquetColumnarStorage != nil {
format = shared.DestinationS3OutputFormat{
- DestinationS3OutputFormatParquetColumnarStorage: destinationS3OutputFormatParquetColumnarStorage,
+ DestinationS3ParquetColumnarStorage: destinationS3ParquetColumnarStorage,
}
}
s3BucketName := r.Configuration.S3BucketName.ValueString()
s3BucketPath := r.Configuration.S3BucketPath.ValueString()
- s3BucketRegion := shared.DestinationS3S3BucketRegion(r.Configuration.S3BucketRegion.ValueString())
+ s3BucketRegion := new(shared.DestinationS3S3BucketRegion)
+ if !r.Configuration.S3BucketRegion.IsUnknown() && !r.Configuration.S3BucketRegion.IsNull() {
+ *s3BucketRegion = shared.DestinationS3S3BucketRegion(r.Configuration.S3BucketRegion.ValueString())
+ } else {
+ s3BucketRegion = nil
+ }
s3Endpoint := new(string)
if !r.Configuration.S3Endpoint.IsUnknown() && !r.Configuration.S3Endpoint.IsNull() {
*s3Endpoint = r.Configuration.S3Endpoint.ValueString()
@@ -307,7 +381,6 @@ func (r *DestinationS3ResourceModel) ToCreateSDKType() *shared.DestinationS3Crea
}
configuration := shared.DestinationS3{
AccessKeyID: accessKeyID,
- DestinationType: destinationType,
FileNamePattern: fileNamePattern,
Format: format,
S3BucketName: s3BucketName,
@@ -317,10 +390,17 @@ func (r *DestinationS3ResourceModel) ToCreateSDKType() *shared.DestinationS3Crea
S3PathFormat: s3PathFormat,
SecretAccessKey: secretAccessKey,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationS3CreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -346,254 +426,324 @@ func (r *DestinationS3ResourceModel) ToUpdateSDKType() *shared.DestinationS3PutR
fileNamePattern = nil
}
var format shared.DestinationS3UpdateOutputFormat
- var destinationS3UpdateOutputFormatAvroApacheAvro *shared.DestinationS3UpdateOutputFormatAvroApacheAvro
- if r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro != nil {
- var compressionCodec shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec
- var destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression *shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
- if r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- codec := shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec(r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression.Codec.ValueString())
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression = &shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression{
+ var destinationS3UpdateAvroApacheAvro *shared.DestinationS3UpdateAvroApacheAvro
+ if r.Configuration.Format.AvroApacheAvro != nil {
+ var compressionCodec shared.DestinationS3UpdateCompressionCodec
+ var destinationS3UpdateNoCompression *shared.DestinationS3UpdateNoCompression
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression != nil {
+ codec := new(shared.DestinationS3UpdateCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.IsNull() {
+ *codec = shared.DestinationS3UpdateCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.NoCompression.Codec.ValueString())
+ } else {
+ codec = nil
+ }
+ destinationS3UpdateNoCompression = &shared.DestinationS3UpdateNoCompression{
Codec: codec,
}
}
- if destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- compressionCodec = shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression: destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression,
+ if destinationS3UpdateNoCompression != nil {
+ compressionCodec = shared.DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateNoCompression: destinationS3UpdateNoCompression,
}
}
- var destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate *shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
- if r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- codec1 := shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec(r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate.Codec.ValueString())
- compressionLevel := r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate.CompressionLevel.ValueInt64()
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate = &shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate{
+ var destinationS3UpdateDeflate *shared.DestinationS3UpdateDeflate
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate != nil {
+ codec1 := new(shared.DestinationS3UpdateSchemasCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.IsNull() {
+ *codec1 = shared.DestinationS3UpdateSchemasCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.Codec.ValueString())
+ } else {
+ codec1 = nil
+ }
+ compressionLevel := new(int64)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.IsNull() {
+ *compressionLevel = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Deflate.CompressionLevel.ValueInt64()
+ } else {
+ compressionLevel = nil
+ }
+ destinationS3UpdateDeflate = &shared.DestinationS3UpdateDeflate{
Codec: codec1,
CompressionLevel: compressionLevel,
}
}
- if destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- compressionCodec = shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate: destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate,
+ if destinationS3UpdateDeflate != nil {
+ compressionCodec = shared.DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateDeflate: destinationS3UpdateDeflate,
}
}
- var destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 *shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
- if r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- codec2 := shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec(r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2.Codec.ValueString())
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 = &shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2{
+ var destinationS3UpdateBzip2 *shared.DestinationS3UpdateBzip2
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2 != nil {
+ codec2 := new(shared.DestinationS3UpdateSchemasFormatCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.IsNull() {
+ *codec2 = shared.DestinationS3UpdateSchemasFormatCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Bzip2.Codec.ValueString())
+ } else {
+ codec2 = nil
+ }
+ destinationS3UpdateBzip2 = &shared.DestinationS3UpdateBzip2{
Codec: codec2,
}
}
- if destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- compressionCodec = shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2: destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2,
+ if destinationS3UpdateBzip2 != nil {
+ compressionCodec = shared.DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateBzip2: destinationS3UpdateBzip2,
}
}
- var destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz *shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz
- if r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz != nil {
- codec3 := shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec(r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz.Codec.ValueString())
- compressionLevel1 := r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz.CompressionLevel.ValueInt64()
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz = &shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz{
+ var destinationS3UpdateXz *shared.DestinationS3UpdateXz
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz != nil {
+ codec3 := new(shared.DestinationS3UpdateSchemasFormatOutputFormatCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.IsNull() {
+ *codec3 = shared.DestinationS3UpdateSchemasFormatOutputFormatCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.Codec.ValueString())
+ } else {
+ codec3 = nil
+ }
+ compressionLevel1 := new(int64)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.IsNull() {
+ *compressionLevel1 = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Xz.CompressionLevel.ValueInt64()
+ } else {
+ compressionLevel1 = nil
+ }
+ destinationS3UpdateXz = &shared.DestinationS3UpdateXz{
Codec: codec3,
CompressionLevel: compressionLevel1,
}
}
- if destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz != nil {
- compressionCodec = shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz: destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz,
+ if destinationS3UpdateXz != nil {
+ compressionCodec = shared.DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateXz: destinationS3UpdateXz,
}
}
- var destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard *shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
- if r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- codec4 := shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec(r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.Codec.ValueString())
- compressionLevel2 := r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.CompressionLevel.ValueInt64()
+ var destinationS3UpdateZstandard *shared.DestinationS3UpdateZstandard
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard != nil {
+ codec4 := new(shared.DestinationS3UpdateSchemasFormatOutputFormat1Codec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.IsNull() {
+ *codec4 = shared.DestinationS3UpdateSchemasFormatOutputFormat1Codec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.Codec.ValueString())
+ } else {
+ codec4 = nil
+ }
+ compressionLevel2 := new(int64)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.IsNull() {
+ *compressionLevel2 = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.CompressionLevel.ValueInt64()
+ } else {
+ compressionLevel2 = nil
+ }
includeChecksum := new(bool)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.IsNull() {
- *includeChecksum = r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard.IncludeChecksum.ValueBool()
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.IsNull() {
+ *includeChecksum = r.Configuration.Format.AvroApacheAvro.CompressionCodec.Zstandard.IncludeChecksum.ValueBool()
} else {
includeChecksum = nil
}
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard = &shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard{
+ destinationS3UpdateZstandard = &shared.DestinationS3UpdateZstandard{
Codec: codec4,
CompressionLevel: compressionLevel2,
IncludeChecksum: includeChecksum,
}
}
- if destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- compressionCodec = shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard: destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard,
+ if destinationS3UpdateZstandard != nil {
+ compressionCodec = shared.DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateZstandard: destinationS3UpdateZstandard,
}
}
- var destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy *shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
- if r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- codec5 := shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec(r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.CompressionCodec.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy.Codec.ValueString())
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy = &shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy{
+ var destinationS3UpdateSnappy *shared.DestinationS3UpdateSnappy
+ if r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy != nil {
+ codec5 := new(shared.DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec)
+ if !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.IsNull() {
+ *codec5 = shared.DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec(r.Configuration.Format.AvroApacheAvro.CompressionCodec.Snappy.Codec.ValueString())
+ } else {
+ codec5 = nil
+ }
+ destinationS3UpdateSnappy = &shared.DestinationS3UpdateSnappy{
Codec: codec5,
}
}
- if destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- compressionCodec = shared.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy: destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy,
+ if destinationS3UpdateSnappy != nil {
+ compressionCodec = shared.DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateSnappy: destinationS3UpdateSnappy,
}
}
- formatType := shared.DestinationS3UpdateOutputFormatAvroApacheAvroFormatType(r.Configuration.Format.DestinationS3UpdateOutputFormatAvroApacheAvro.FormatType.ValueString())
- destinationS3UpdateOutputFormatAvroApacheAvro = &shared.DestinationS3UpdateOutputFormatAvroApacheAvro{
+ formatType := new(shared.DestinationS3UpdateFormatType)
+ if !r.Configuration.Format.AvroApacheAvro.FormatType.IsUnknown() && !r.Configuration.Format.AvroApacheAvro.FormatType.IsNull() {
+ *formatType = shared.DestinationS3UpdateFormatType(r.Configuration.Format.AvroApacheAvro.FormatType.ValueString())
+ } else {
+ formatType = nil
+ }
+ destinationS3UpdateAvroApacheAvro = &shared.DestinationS3UpdateAvroApacheAvro{
CompressionCodec: compressionCodec,
FormatType: formatType,
}
}
- if destinationS3UpdateOutputFormatAvroApacheAvro != nil {
+ if destinationS3UpdateAvroApacheAvro != nil {
format = shared.DestinationS3UpdateOutputFormat{
- DestinationS3UpdateOutputFormatAvroApacheAvro: destinationS3UpdateOutputFormatAvroApacheAvro,
- }
- }
- var destinationS3UpdateOutputFormatCSVCommaSeparatedValues *shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues
- if r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues != nil {
- var compression *shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression
- if r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Compression != nil {
- var destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression *shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- if r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- compressionType := new(shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.IsNull() {
- *compressionType = shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression.CompressionType.ValueString())
+ DestinationS3UpdateAvroApacheAvro: destinationS3UpdateAvroApacheAvro,
+ }
+ }
+ var destinationS3UpdateCSVCommaSeparatedValues *shared.DestinationS3UpdateCSVCommaSeparatedValues
+ if r.Configuration.Format.CSVCommaSeparatedValues != nil {
+ var compression *shared.DestinationS3UpdateCompression
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression != nil {
+ var destinationS3UpdateSchemasNoCompression *shared.DestinationS3UpdateSchemasNoCompression
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression != nil {
+ compressionType := new(shared.DestinationS3UpdateCompressionType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType = shared.DestinationS3UpdateCompressionType(r.Configuration.Format.CSVCommaSeparatedValues.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType = nil
}
- destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression = &shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression{
+ destinationS3UpdateSchemasNoCompression = &shared.DestinationS3UpdateSchemasNoCompression{
CompressionType: compressionType,
}
}
- if destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- compression = &shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression: destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression,
+ if destinationS3UpdateSchemasNoCompression != nil {
+ compression = &shared.DestinationS3UpdateCompression{
+ DestinationS3UpdateSchemasNoCompression: destinationS3UpdateSchemasNoCompression,
}
}
- var destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP *shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
- if r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- compressionType1 := new(shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.IsNull() {
- *compressionType1 = shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Compression.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP.CompressionType.ValueString())
+ var destinationS3UpdateGZIP *shared.DestinationS3UpdateGZIP
+ if r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip != nil {
+ compressionType1 := new(shared.DestinationS3UpdateSchemasCompressionType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType1 = shared.DestinationS3UpdateSchemasCompressionType(r.Configuration.Format.CSVCommaSeparatedValues.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType1 = nil
}
- destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP = &shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP{
+ destinationS3UpdateGZIP = &shared.DestinationS3UpdateGZIP{
CompressionType: compressionType1,
}
}
- if destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- compression = &shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP: destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP,
+ if destinationS3UpdateGZIP != nil {
+ compression = &shared.DestinationS3UpdateCompression{
+ DestinationS3UpdateGZIP: destinationS3UpdateGZIP,
}
}
}
- flattening := shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening(r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.Flattening.ValueString())
- formatType1 := shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatType(r.Configuration.Format.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues.FormatType.ValueString())
- destinationS3UpdateOutputFormatCSVCommaSeparatedValues = &shared.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues{
+ flattening := new(shared.DestinationS3UpdateFlattening)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.Flattening.IsNull() {
+ *flattening = shared.DestinationS3UpdateFlattening(r.Configuration.Format.CSVCommaSeparatedValues.Flattening.ValueString())
+ } else {
+ flattening = nil
+ }
+ formatType1 := new(shared.DestinationS3UpdateSchemasFormatType)
+ if !r.Configuration.Format.CSVCommaSeparatedValues.FormatType.IsUnknown() && !r.Configuration.Format.CSVCommaSeparatedValues.FormatType.IsNull() {
+ *formatType1 = shared.DestinationS3UpdateSchemasFormatType(r.Configuration.Format.CSVCommaSeparatedValues.FormatType.ValueString())
+ } else {
+ formatType1 = nil
+ }
+ destinationS3UpdateCSVCommaSeparatedValues = &shared.DestinationS3UpdateCSVCommaSeparatedValues{
Compression: compression,
Flattening: flattening,
FormatType: formatType1,
}
}
- if destinationS3UpdateOutputFormatCSVCommaSeparatedValues != nil {
+ if destinationS3UpdateCSVCommaSeparatedValues != nil {
format = shared.DestinationS3UpdateOutputFormat{
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValues: destinationS3UpdateOutputFormatCSVCommaSeparatedValues,
- }
- }
- var destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON *shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- var compression1 *shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression
- if r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression != nil {
- var destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- if r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compressionType2 := new(shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsNull() {
- *compressionType2 = shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.ValueString())
+ DestinationS3UpdateCSVCommaSeparatedValues: destinationS3UpdateCSVCommaSeparatedValues,
+ }
+ }
+ var destinationS3UpdateJSONLinesNewlineDelimitedJSON *shared.DestinationS3UpdateJSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ var compression1 *shared.DestinationS3UpdateSchemasCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression != nil {
+ var destinationS3UpdateSchemasFormatNoCompression *shared.DestinationS3UpdateSchemasFormatNoCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression != nil {
+ compressionType2 := new(shared.DestinationS3UpdateSchemasFormatCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType2 = shared.DestinationS3UpdateSchemasFormatCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType2 = nil
}
- destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = &shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression{
+ destinationS3UpdateSchemasFormatNoCompression = &shared.DestinationS3UpdateSchemasFormatNoCompression{
CompressionType: compressionType2,
}
}
- if destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compression1 = &shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
+ if destinationS3UpdateSchemasFormatNoCompression != nil {
+ compression1 = &shared.DestinationS3UpdateSchemasCompression{
+ DestinationS3UpdateSchemasFormatNoCompression: destinationS3UpdateSchemasFormatNoCompression,
}
}
- var destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- if r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compressionType3 := new(shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsNull() {
- *compressionType3 = shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.ValueString())
+ var destinationS3UpdateSchemasGZIP *shared.DestinationS3UpdateSchemasGZIP
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip != nil {
+ compressionType3 := new(shared.DestinationS3UpdateSchemasFormatOutputFormatCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType3 = shared.DestinationS3UpdateSchemasFormatOutputFormatCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType3 = nil
}
- destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = &shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP{
+ destinationS3UpdateSchemasGZIP = &shared.DestinationS3UpdateSchemasGZIP{
CompressionType: compressionType3,
}
}
- if destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compression1 = &shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
+ if destinationS3UpdateSchemasGZIP != nil {
+ compression1 = &shared.DestinationS3UpdateSchemasCompression{
+ DestinationS3UpdateSchemasGZIP: destinationS3UpdateSchemasGZIP,
}
}
}
- flattening1 := new(shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Flattening.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Flattening.IsNull() {
- *flattening1 = shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening(r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.Flattening.ValueString())
+ flattening1 := new(shared.DestinationS3UpdateSchemasFlattening)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.IsNull() {
+ *flattening1 = shared.DestinationS3UpdateSchemasFlattening(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.ValueString())
} else {
flattening1 = nil
}
- formatType2 := shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON = &shared.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON{
+ formatType2 := new(shared.DestinationS3UpdateSchemasFormatFormatType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsNull() {
+ *formatType2 = shared.DestinationS3UpdateSchemasFormatFormatType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.ValueString())
+ } else {
+ formatType2 = nil
+ }
+ destinationS3UpdateJSONLinesNewlineDelimitedJSON = &shared.DestinationS3UpdateJSONLinesNewlineDelimitedJSON{
Compression: compression1,
Flattening: flattening1,
FormatType: formatType2,
}
}
- if destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
+ if destinationS3UpdateJSONLinesNewlineDelimitedJSON != nil {
format = shared.DestinationS3UpdateOutputFormat{
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON: destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationS3UpdateJSONLinesNewlineDelimitedJSON: destinationS3UpdateJSONLinesNewlineDelimitedJSON,
}
}
- var destinationS3UpdateOutputFormatParquetColumnarStorage *shared.DestinationS3UpdateOutputFormatParquetColumnarStorage
- if r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage != nil {
+ var destinationS3UpdateParquetColumnarStorage *shared.DestinationS3UpdateParquetColumnarStorage
+ if r.Configuration.Format.ParquetColumnarStorage != nil {
blockSizeMb := new(int64)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.BlockSizeMb.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.BlockSizeMb.IsNull() {
- *blockSizeMb = r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.BlockSizeMb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.IsNull() {
+ *blockSizeMb = r.Configuration.Format.ParquetColumnarStorage.BlockSizeMb.ValueInt64()
} else {
blockSizeMb = nil
}
- compressionCodec1 := new(shared.DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.CompressionCodec.IsNull() {
- *compressionCodec1 = shared.DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec(r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.CompressionCodec.ValueString())
+ compressionCodec1 := new(shared.DestinationS3UpdateSchemasCompressionCodec)
+ if !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.IsNull() {
+ *compressionCodec1 = shared.DestinationS3UpdateSchemasCompressionCodec(r.Configuration.Format.ParquetColumnarStorage.CompressionCodec.ValueString())
} else {
compressionCodec1 = nil
}
dictionaryEncoding := new(bool)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.DictionaryEncoding.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.DictionaryEncoding.IsNull() {
- *dictionaryEncoding = r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.DictionaryEncoding.ValueBool()
+ if !r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.IsNull() {
+ *dictionaryEncoding = r.Configuration.Format.ParquetColumnarStorage.DictionaryEncoding.ValueBool()
} else {
dictionaryEncoding = nil
}
dictionaryPageSizeKb := new(int64)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.DictionaryPageSizeKb.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.DictionaryPageSizeKb.IsNull() {
- *dictionaryPageSizeKb = r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.DictionaryPageSizeKb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.IsNull() {
+ *dictionaryPageSizeKb = r.Configuration.Format.ParquetColumnarStorage.DictionaryPageSizeKb.ValueInt64()
} else {
dictionaryPageSizeKb = nil
}
- formatType3 := shared.DestinationS3UpdateOutputFormatParquetColumnarStorageFormatType(r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.FormatType.ValueString())
+ formatType3 := new(shared.DestinationS3UpdateSchemasFormatOutputFormatFormatType)
+ if !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.FormatType.IsNull() {
+ *formatType3 = shared.DestinationS3UpdateSchemasFormatOutputFormatFormatType(r.Configuration.Format.ParquetColumnarStorage.FormatType.ValueString())
+ } else {
+ formatType3 = nil
+ }
maxPaddingSizeMb := new(int64)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.MaxPaddingSizeMb.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.MaxPaddingSizeMb.IsNull() {
- *maxPaddingSizeMb = r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.MaxPaddingSizeMb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.IsNull() {
+ *maxPaddingSizeMb = r.Configuration.Format.ParquetColumnarStorage.MaxPaddingSizeMb.ValueInt64()
} else {
maxPaddingSizeMb = nil
}
pageSizeKb := new(int64)
- if !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.PageSizeKb.IsUnknown() && !r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.PageSizeKb.IsNull() {
- *pageSizeKb = r.Configuration.Format.DestinationS3UpdateOutputFormatParquetColumnarStorage.PageSizeKb.ValueInt64()
+ if !r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.IsUnknown() && !r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.IsNull() {
+ *pageSizeKb = r.Configuration.Format.ParquetColumnarStorage.PageSizeKb.ValueInt64()
} else {
pageSizeKb = nil
}
- destinationS3UpdateOutputFormatParquetColumnarStorage = &shared.DestinationS3UpdateOutputFormatParquetColumnarStorage{
+ destinationS3UpdateParquetColumnarStorage = &shared.DestinationS3UpdateParquetColumnarStorage{
BlockSizeMb: blockSizeMb,
CompressionCodec: compressionCodec1,
DictionaryEncoding: dictionaryEncoding,
@@ -603,14 +753,19 @@ func (r *DestinationS3ResourceModel) ToUpdateSDKType() *shared.DestinationS3PutR
PageSizeKb: pageSizeKb,
}
}
- if destinationS3UpdateOutputFormatParquetColumnarStorage != nil {
+ if destinationS3UpdateParquetColumnarStorage != nil {
format = shared.DestinationS3UpdateOutputFormat{
- DestinationS3UpdateOutputFormatParquetColumnarStorage: destinationS3UpdateOutputFormatParquetColumnarStorage,
+ DestinationS3UpdateParquetColumnarStorage: destinationS3UpdateParquetColumnarStorage,
}
}
s3BucketName := r.Configuration.S3BucketName.ValueString()
s3BucketPath := r.Configuration.S3BucketPath.ValueString()
- s3BucketRegion := shared.DestinationS3UpdateS3BucketRegion(r.Configuration.S3BucketRegion.ValueString())
+ s3BucketRegion := new(shared.DestinationS3UpdateS3BucketRegion)
+ if !r.Configuration.S3BucketRegion.IsUnknown() && !r.Configuration.S3BucketRegion.IsNull() {
+ *s3BucketRegion = shared.DestinationS3UpdateS3BucketRegion(r.Configuration.S3BucketRegion.ValueString())
+ } else {
+ s3BucketRegion = nil
+ }
s3Endpoint := new(string)
if !r.Configuration.S3Endpoint.IsUnknown() && !r.Configuration.S3Endpoint.IsNull() {
*s3Endpoint = r.Configuration.S3Endpoint.ValueString()
diff --git a/internal/provider/destination_s3glue_data_source.go b/internal/provider/destination_s3glue_data_source.go
old mode 100755
new mode 100644
index c001567ad..a5cb61e89
--- a/internal/provider/destination_s3glue_data_source.go
+++ b/internal/provider/destination_s3glue_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationS3GlueDataSource struct {
// DestinationS3GlueDataSourceModel describes the data model.
type DestinationS3GlueDataSourceModel struct {
- Configuration DestinationS3Glue1 `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,240 +47,17 @@ func (r *DestinationS3GlueDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "DestinationS3Glue DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "s3-glue",
- ),
- },
- Description: `must be one of ["s3-glue"]`,
- },
- "file_name_pattern": schema.StringAttribute{
- Computed: true,
- Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
- },
- "format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_glue_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_glue_update_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "flattening": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Format of the data output. See here for more details`,
- },
- "glue_database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the glue database for creating the tables, leave blank if no integration`,
- },
- "glue_serialization_library": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "org.openx.data.jsonserde.JsonSerDe",
- "org.apache.hive.hcatalog.data.JsonSerDe",
- ),
- },
- MarkdownDescription: `must be one of ["org.openx.data.jsonserde.JsonSerDe", "org.apache.hive.hcatalog.data.JsonSerDe"]` + "\n" +
- `The library that your query engine will use for reading and writing data in your lake.`,
- },
- "s3_bucket_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the S3 bucket. Read more here.`,
- },
- "s3_bucket_path": schema.StringAttribute{
- Computed: true,
- Description: `Directory under the S3 bucket where data will be written. Read more here`,
- },
- "s3_bucket_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- "us-gov-east-1",
- "us-gov-west-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the S3 bucket. See here for all region codes.`,
- },
- "s3_endpoint": schema.StringAttribute{
- Computed: true,
- Description: `Your S3 endpoint url. Read more here`,
- },
- "s3_path_format": schema.StringAttribute{
- Computed: true,
- Description: `Format string on how data will be organized inside the S3 bucket directory. Read more here`,
- },
- "secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret to the access key ID. Read more here`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_s3glue_data_source_sdk.go b/internal/provider/destination_s3glue_data_source_sdk.go
old mode 100755
new mode 100644
index 2f5fd63d0..89de281a0
--- a/internal/provider/destination_s3glue_data_source_sdk.go
+++ b/internal/provider/destination_s3glue_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationS3GlueDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_s3glue_resource.go b/internal/provider/destination_s3glue_resource.go
old mode 100755
new mode 100644
index ad451b298..c2d77b669
--- a/internal/provider/destination_s3glue_resource.go
+++ b/internal/provider/destination_s3glue_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type DestinationS3GlueResource struct {
// DestinationS3GlueResourceModel describes the resource data model.
type DestinationS3GlueResourceModel struct {
Configuration DestinationS3Glue `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -56,17 +58,9 @@ func (r *DestinationS3GlueResource) Schema(ctx context.Context, req resource.Sch
Attributes: map[string]schema.Attribute{
"access_key_id": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "s3-glue",
- ),
- },
- Description: `must be one of ["s3-glue"]`,
- },
"file_name_pattern": schema.StringAttribute{
Optional: true,
Description: `The pattern allows you to set the file-name format for the S3 staging file(s)`,
@@ -74,156 +68,91 @@ func (r *DestinationS3GlueResource) Schema(ctx context.Context, req resource.Sch
"format": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "destination_s3_glue_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
+ "json_lines_newline_delimited_json": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
+ "gzip": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["GZIP"]; Default: "GZIP"`,
Validators: []validator.String{
stringvalidator.OneOf(
"GZIP",
),
},
- Description: `must be one of ["GZIP"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
},
- "destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
+ "no_compression": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"compression_type": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["No Compression"]; Default: "No Compression"`,
Validators: []validator.String{
stringvalidator.OneOf(
"No Compression",
),
},
- Description: `must be one of ["No Compression"]`,
},
},
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
},
},
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "flattening": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No flattening",
- "Root level flattening",
- ),
- },
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
- },
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Format of the data output. See here for more details`,
- },
- "destination_s3_glue_update_output_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_no_compression": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "No Compression",
- ),
- },
- Description: `must be one of ["No Compression"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- "destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_gzip": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "compression_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GZIP",
- ),
- },
- Description: `must be one of ["GZIP"]`,
- },
- },
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
- },
- },
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").`,
},
"flattening": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]; Default: "Root level flattening"` + "\n" +
+ `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
Validators: []validator.String{
stringvalidator.OneOf(
"No flattening",
"Root level flattening",
),
},
- MarkdownDescription: `must be one of ["No flattening", "Root level flattening"]` + "\n" +
- `Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.`,
},
"format_type": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["JSONL"]; Default: "JSONL"`,
Validators: []validator.String{
stringvalidator.OneOf(
"JSONL",
),
},
- Description: `must be one of ["JSONL"]`,
},
},
Description: `Format of the data output. See here for more details`,
},
},
+ Description: `Format of the data output. See here for more details`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Format of the data output. See here for more details`,
},
"glue_database": schema.StringAttribute{
Required: true,
Description: `Name of the glue database for creating the tables, leave blank if no integration`,
},
"glue_serialization_library": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["org.openx.data.jsonserde.JsonSerDe", "org.apache.hive.hcatalog.data.JsonSerDe"]; Default: "org.openx.data.jsonserde.JsonSerDe"` + "\n" +
+ `The library that your query engine will use for reading and writing data in your lake.`,
Validators: []validator.String{
stringvalidator.OneOf(
"org.openx.data.jsonserde.JsonSerDe",
"org.apache.hive.hcatalog.data.JsonSerDe",
),
},
- MarkdownDescription: `must be one of ["org.openx.data.jsonserde.JsonSerDe", "org.apache.hive.hcatalog.data.JsonSerDe"]` + "\n" +
- `The library that your query engine will use for reading and writing data in your lake.`,
},
"s3_bucket_name": schema.StringAttribute{
Required: true,
@@ -234,7 +163,9 @@ func (r *DestinationS3GlueResource) Schema(ctx context.Context, req resource.Sch
Description: `Directory under the S3 bucket where data will be written. Read more here`,
},
"s3_bucket_region": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""` + "\n" +
+ `The region of the S3 bucket. See here for all region codes.`,
Validators: []validator.String{
stringvalidator.OneOf(
"",
@@ -265,12 +196,11 @@ func (r *DestinationS3GlueResource) Schema(ctx context.Context, req resource.Sch
"us-gov-west-1",
),
},
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the S3 bucket. See here for all region codes.`,
},
"s3_endpoint": schema.StringAttribute{
- Optional: true,
- Description: `Your S3 endpoint url. Read more here`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `Your S3 endpoint url. Read more here`,
},
"s3_path_format": schema.StringAttribute{
Optional: true,
@@ -278,10 +208,18 @@ func (r *DestinationS3GlueResource) Schema(ctx context.Context, req resource.Sch
},
"secret_access_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The corresponding secret to the access key ID. Read more here`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -298,7 +236,8 @@ func (r *DestinationS3GlueResource) Schema(ctx context.Context, req resource.Sch
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -348,7 +287,7 @@ func (r *DestinationS3GlueResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationS3Glue(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -524,5 +463,5 @@ func (r *DestinationS3GlueResource) Delete(ctx context.Context, req resource.Del
}
func (r *DestinationS3GlueResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_s3glue_resource_sdk.go b/internal/provider/destination_s3glue_resource_sdk.go
old mode 100755
new mode 100644
index c2d08b73f..ea35ccaea
--- a/internal/provider/destination_s3glue_resource_sdk.go
+++ b/internal/provider/destination_s3glue_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -14,7 +14,6 @@ func (r *DestinationS3GlueResourceModel) ToCreateSDKType() *shared.DestinationS3
} else {
accessKeyID = nil
}
- destinationType := shared.DestinationS3GlueS3Glue(r.Configuration.DestinationType.ValueString())
fileNamePattern := new(string)
if !r.Configuration.FileNamePattern.IsUnknown() && !r.Configuration.FileNamePattern.IsNull() {
*fileNamePattern = r.Configuration.FileNamePattern.ValueString()
@@ -22,68 +21,83 @@ func (r *DestinationS3GlueResourceModel) ToCreateSDKType() *shared.DestinationS3
fileNamePattern = nil
}
var format shared.DestinationS3GlueOutputFormat
- var destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON *shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- var compression *shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression
- if r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Compression != nil {
- var destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- if r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compressionType := new(shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsNull() {
- *compressionType = shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.ValueString())
+ var destinationS3GlueJSONLinesNewlineDelimitedJSON *shared.DestinationS3GlueJSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ var compression *shared.DestinationS3GlueCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression != nil {
+ var destinationS3GlueNoCompression *shared.DestinationS3GlueNoCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression != nil {
+ compressionType := new(shared.DestinationS3GlueCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType = shared.DestinationS3GlueCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType = nil
}
- destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = &shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression{
+ destinationS3GlueNoCompression = &shared.DestinationS3GlueNoCompression{
CompressionType: compressionType,
}
}
- if destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compression = &shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
+ if destinationS3GlueNoCompression != nil {
+ compression = &shared.DestinationS3GlueCompression{
+ DestinationS3GlueNoCompression: destinationS3GlueNoCompression,
}
}
- var destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- if r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compressionType1 := new(shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsNull() {
- *compressionType1 = shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.ValueString())
+ var destinationS3GlueGZIP *shared.DestinationS3GlueGZIP
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip != nil {
+ compressionType1 := new(shared.DestinationS3GlueSchemasCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType1 = shared.DestinationS3GlueSchemasCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType1 = nil
}
- destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = &shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP{
+ destinationS3GlueGZIP = &shared.DestinationS3GlueGZIP{
CompressionType: compressionType1,
}
}
- if destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compression = &shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
+ if destinationS3GlueGZIP != nil {
+ compression = &shared.DestinationS3GlueCompression{
+ DestinationS3GlueGZIP: destinationS3GlueGZIP,
}
}
}
- flattening := new(shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening)
- if !r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Flattening.IsUnknown() && !r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Flattening.IsNull() {
- *flattening = shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening(r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.Flattening.ValueString())
+ flattening := new(shared.DestinationS3GlueFlattening)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.IsNull() {
+ *flattening = shared.DestinationS3GlueFlattening(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.ValueString())
} else {
flattening = nil
}
- formatType := shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON = &shared.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON{
+ formatType := new(shared.DestinationS3GlueFormatType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsNull() {
+ *formatType = shared.DestinationS3GlueFormatType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.ValueString())
+ } else {
+ formatType = nil
+ }
+ destinationS3GlueJSONLinesNewlineDelimitedJSON = &shared.DestinationS3GlueJSONLinesNewlineDelimitedJSON{
Compression: compression,
Flattening: flattening,
FormatType: formatType,
}
}
- if destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON != nil {
+ if destinationS3GlueJSONLinesNewlineDelimitedJSON != nil {
format = shared.DestinationS3GlueOutputFormat{
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON: destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationS3GlueJSONLinesNewlineDelimitedJSON: destinationS3GlueJSONLinesNewlineDelimitedJSON,
}
}
glueDatabase := r.Configuration.GlueDatabase.ValueString()
- glueSerializationLibrary := shared.DestinationS3GlueSerializationLibrary(r.Configuration.GlueSerializationLibrary.ValueString())
+ glueSerializationLibrary := new(shared.DestinationS3GlueSerializationLibrary)
+ if !r.Configuration.GlueSerializationLibrary.IsUnknown() && !r.Configuration.GlueSerializationLibrary.IsNull() {
+ *glueSerializationLibrary = shared.DestinationS3GlueSerializationLibrary(r.Configuration.GlueSerializationLibrary.ValueString())
+ } else {
+ glueSerializationLibrary = nil
+ }
s3BucketName := r.Configuration.S3BucketName.ValueString()
s3BucketPath := r.Configuration.S3BucketPath.ValueString()
- s3BucketRegion := shared.DestinationS3GlueS3BucketRegion(r.Configuration.S3BucketRegion.ValueString())
+ s3BucketRegion := new(shared.DestinationS3GlueS3BucketRegion)
+ if !r.Configuration.S3BucketRegion.IsUnknown() && !r.Configuration.S3BucketRegion.IsNull() {
+ *s3BucketRegion = shared.DestinationS3GlueS3BucketRegion(r.Configuration.S3BucketRegion.ValueString())
+ } else {
+ s3BucketRegion = nil
+ }
s3Endpoint := new(string)
if !r.Configuration.S3Endpoint.IsUnknown() && !r.Configuration.S3Endpoint.IsNull() {
*s3Endpoint = r.Configuration.S3Endpoint.ValueString()
@@ -104,7 +118,6 @@ func (r *DestinationS3GlueResourceModel) ToCreateSDKType() *shared.DestinationS3
}
configuration := shared.DestinationS3Glue{
AccessKeyID: accessKeyID,
- DestinationType: destinationType,
FileNamePattern: fileNamePattern,
Format: format,
GlueDatabase: glueDatabase,
@@ -116,10 +129,17 @@ func (r *DestinationS3GlueResourceModel) ToCreateSDKType() *shared.DestinationS3
S3PathFormat: s3PathFormat,
SecretAccessKey: secretAccessKey,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationS3GlueCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -145,68 +165,83 @@ func (r *DestinationS3GlueResourceModel) ToUpdateSDKType() *shared.DestinationS3
fileNamePattern = nil
}
var format shared.DestinationS3GlueUpdateOutputFormat
- var destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON *shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- var compression *shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression
- if r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression != nil {
- var destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- if r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compressionType := new(shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType)
- if !r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.IsNull() {
- *compressionType = shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression.CompressionType.ValueString())
+ var destinationS3GlueUpdateJSONLinesNewlineDelimitedJSON *shared.DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON != nil {
+ var compression *shared.DestinationS3GlueUpdateCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression != nil {
+ var destinationS3GlueUpdateNoCompression *shared.DestinationS3GlueUpdateNoCompression
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression != nil {
+ compressionType := new(shared.DestinationS3GlueUpdateCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.IsNull() {
+ *compressionType = shared.DestinationS3GlueUpdateCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.NoCompression.CompressionType.ValueString())
} else {
compressionType = nil
}
- destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = &shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression{
+ destinationS3GlueUpdateNoCompression = &shared.DestinationS3GlueUpdateNoCompression{
CompressionType: compressionType,
}
}
- if destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- compression = &shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
+ if destinationS3GlueUpdateNoCompression != nil {
+ compression = &shared.DestinationS3GlueUpdateCompression{
+ DestinationS3GlueUpdateNoCompression: destinationS3GlueUpdateNoCompression,
}
}
- var destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- if r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compressionType1 := new(shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType)
- if !r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsUnknown() && !r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.IsNull() {
- *compressionType1 = shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Compression.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP.CompressionType.ValueString())
+ var destinationS3GlueUpdateGZIP *shared.DestinationS3GlueUpdateGZIP
+ if r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip != nil {
+ compressionType1 := new(shared.DestinationS3GlueUpdateSchemasCompressionType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.IsNull() {
+ *compressionType1 = shared.DestinationS3GlueUpdateSchemasCompressionType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Compression.Gzip.CompressionType.ValueString())
} else {
compressionType1 = nil
}
- destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = &shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP{
+ destinationS3GlueUpdateGZIP = &shared.DestinationS3GlueUpdateGZIP{
CompressionType: compressionType1,
}
}
- if destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- compression = &shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
+ if destinationS3GlueUpdateGZIP != nil {
+ compression = &shared.DestinationS3GlueUpdateCompression{
+ DestinationS3GlueUpdateGZIP: destinationS3GlueUpdateGZIP,
}
}
}
- flattening := new(shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening)
- if !r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Flattening.IsUnknown() && !r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Flattening.IsNull() {
- *flattening = shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening(r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.Flattening.ValueString())
+ flattening := new(shared.Flattening)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.IsNull() {
+ *flattening = shared.Flattening(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.Flattening.ValueString())
} else {
flattening = nil
}
- formatType := shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON = &shared.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON{
+ formatType := new(shared.DestinationS3GlueUpdateFormatType)
+ if !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsUnknown() && !r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.IsNull() {
+ *formatType = shared.DestinationS3GlueUpdateFormatType(r.Configuration.Format.JSONLinesNewlineDelimitedJSON.FormatType.ValueString())
+ } else {
+ formatType = nil
+ }
+ destinationS3GlueUpdateJSONLinesNewlineDelimitedJSON = &shared.DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON{
Compression: compression,
Flattening: flattening,
FormatType: formatType,
}
}
- if destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
+ if destinationS3GlueUpdateJSONLinesNewlineDelimitedJSON != nil {
format = shared.DestinationS3GlueUpdateOutputFormat{
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON: destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON: destinationS3GlueUpdateJSONLinesNewlineDelimitedJSON,
}
}
glueDatabase := r.Configuration.GlueDatabase.ValueString()
- glueSerializationLibrary := shared.DestinationS3GlueUpdateSerializationLibrary(r.Configuration.GlueSerializationLibrary.ValueString())
+ glueSerializationLibrary := new(shared.SerializationLibrary)
+ if !r.Configuration.GlueSerializationLibrary.IsUnknown() && !r.Configuration.GlueSerializationLibrary.IsNull() {
+ *glueSerializationLibrary = shared.SerializationLibrary(r.Configuration.GlueSerializationLibrary.ValueString())
+ } else {
+ glueSerializationLibrary = nil
+ }
s3BucketName := r.Configuration.S3BucketName.ValueString()
s3BucketPath := r.Configuration.S3BucketPath.ValueString()
- s3BucketRegion := shared.DestinationS3GlueUpdateS3BucketRegion(r.Configuration.S3BucketRegion.ValueString())
+ s3BucketRegion := new(shared.DestinationS3GlueUpdateS3BucketRegion)
+ if !r.Configuration.S3BucketRegion.IsUnknown() && !r.Configuration.S3BucketRegion.IsNull() {
+ *s3BucketRegion = shared.DestinationS3GlueUpdateS3BucketRegion(r.Configuration.S3BucketRegion.ValueString())
+ } else {
+ s3BucketRegion = nil
+ }
s3Endpoint := new(string)
if !r.Configuration.S3Endpoint.IsUnknown() && !r.Configuration.S3Endpoint.IsNull() {
*s3Endpoint = r.Configuration.S3Endpoint.ValueString()
diff --git a/internal/provider/destination_sftpjson_data_source.go b/internal/provider/destination_sftpjson_data_source.go
old mode 100755
new mode 100644
index 57975c172..42b0cbb8a
--- a/internal/provider/destination_sftpjson_data_source.go
+++ b/internal/provider/destination_sftpjson_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationSftpJSONDataSource struct {
// DestinationSftpJSONDataSourceModel describes the data model.
type DestinationSftpJSONDataSourceModel struct {
- Configuration DestinationSftpJSON `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,43 +47,17 @@ func (r *DestinationSftpJSONDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "DestinationSftpJSON DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_path": schema.StringAttribute{
- Computed: true,
- Description: `Path to the directory where json files will be written.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sftp-json",
- ),
- },
- Description: `must be one of ["sftp-json"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the SFTP server.`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of the SFTP server.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the SFTP server.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_sftpjson_data_source_sdk.go b/internal/provider/destination_sftpjson_data_source_sdk.go
old mode 100755
new mode 100644
index 3cdfe2147..c2c392e00
--- a/internal/provider/destination_sftpjson_data_source_sdk.go
+++ b/internal/provider/destination_sftpjson_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationSftpJSONDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_sftpjson_resource.go b/internal/provider/destination_sftpjson_resource.go
old mode 100755
new mode 100644
index 6b961e3b5..2c66b12d6
--- a/internal/provider/destination_sftpjson_resource.go
+++ b/internal/provider/destination_sftpjson_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationSftpJSONResource struct {
// DestinationSftpJSONResourceModel describes the resource data model.
type DestinationSftpJSONResourceModel struct {
Configuration DestinationSftpJSON `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -57,26 +57,19 @@ func (r *DestinationSftpJSONResource) Schema(ctx context.Context, req resource.S
Required: true,
Description: `Path to the directory where json files will be written.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sftp-json",
- ),
- },
- Description: `must be one of ["sftp-json"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `Hostname of the SFTP server.`,
},
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Optional: true,
- Description: `Port of the SFTP server.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port of the SFTP server.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -84,6 +77,13 @@ func (r *DestinationSftpJSONResource) Schema(ctx context.Context, req resource.S
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -100,7 +100,8 @@ func (r *DestinationSftpJSONResource) Schema(ctx context.Context, req resource.S
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -150,7 +151,7 @@ func (r *DestinationSftpJSONResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationSftpJSON(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -326,5 +327,5 @@ func (r *DestinationSftpJSONResource) Delete(ctx context.Context, req resource.D
}
func (r *DestinationSftpJSONResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_sftpjson_resource_sdk.go b/internal/provider/destination_sftpjson_resource_sdk.go
old mode 100755
new mode 100644
index 38f495727..b3e26640f
--- a/internal/provider/destination_sftpjson_resource_sdk.go
+++ b/internal/provider/destination_sftpjson_resource_sdk.go
@@ -3,12 +3,11 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationSftpJSONResourceModel) ToCreateSDKType() *shared.DestinationSftpJSONCreateRequest {
- destinationType := shared.DestinationSftpJSONSftpJSON(r.Configuration.DestinationType.ValueString())
destinationPath := r.Configuration.DestinationPath.ValueString()
host := r.Configuration.Host.ValueString()
password := r.Configuration.Password.ValueString()
@@ -20,17 +19,23 @@ func (r *DestinationSftpJSONResourceModel) ToCreateSDKType() *shared.Destination
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationSftpJSON{
- DestinationType: destinationType,
DestinationPath: destinationPath,
Host: host,
Password: password,
Port: port,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationSftpJSONCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
diff --git a/internal/provider/destination_snowflake_data_source.go b/internal/provider/destination_snowflake_data_source.go
old mode 100755
new mode 100644
index 495b4c89a..88566471c
--- a/internal/provider/destination_snowflake_data_source.go
+++ b/internal/provider/destination_snowflake_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationSnowflakeDataSource struct {
// DestinationSnowflakeDataSourceModel describes the data model.
type DestinationSnowflakeDataSourceModel struct {
- Configuration DestinationSnowflake `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,203 +47,17 @@ func (r *DestinationSnowflakeDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "DestinationSnowflake DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_snowflake_authorization_method_key_pair_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Key Pair Authentication",
- ),
- },
- Description: `must be one of ["Key Pair Authentication"]`,
- },
- "private_key": schema.StringAttribute{
- Computed: true,
- Description: `RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.`,
- },
- "private_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Passphrase for private key`,
- },
- },
- },
- "destination_snowflake_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Enter you application's Access Token`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth2.0",
- ),
- },
- Description: `must be one of ["OAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Enter your application's Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Enter your application's Client secret`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Enter your application's Refresh Token`,
- },
- },
- },
- "destination_snowflake_authorization_method_username_and_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Username and Password",
- ),
- },
- Description: `must be one of ["Username and Password"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Enter the password associated with the username.`,
- },
- },
- },
- "destination_snowflake_update_authorization_method_key_pair_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Key Pair Authentication",
- ),
- },
- Description: `must be one of ["Key Pair Authentication"]`,
- },
- "private_key": schema.StringAttribute{
- Computed: true,
- Description: `RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.`,
- },
- "private_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Passphrase for private key`,
- },
- },
- },
- "destination_snowflake_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Enter you application's Access Token`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth2.0",
- ),
- },
- Description: `must be one of ["OAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Enter your application's Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Enter your application's Client secret`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Enter your application's Refresh Token`,
- },
- },
- },
- "destination_snowflake_update_authorization_method_username_and_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Username and Password",
- ),
- },
- Description: `must be one of ["Username and Password"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Enter the password associated with the username.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Enter the name of the database you want to sync data into`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snowflake",
- ),
- },
- Description: `must be one of ["snowflake"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3`,
- },
- "raw_data_schema": schema.StringAttribute{
- Computed: true,
- Description: `The schema to write raw tables into`,
- },
- "role": schema.StringAttribute{
- Computed: true,
- Description: `Enter the role that you want to use to access Snowflake`,
- },
- "schema": schema.StringAttribute{
- Computed: true,
- Description: `Enter the name of the default schema`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Enter the name of the user you want to use to access the database`,
- },
- "warehouse": schema.StringAttribute{
- Computed: true,
- Description: `Enter the name of the warehouse that you want to sync data into`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_snowflake_data_source_sdk.go b/internal/provider/destination_snowflake_data_source_sdk.go
old mode 100755
new mode 100644
index a964931be..a47f0e47a
--- a/internal/provider/destination_snowflake_data_source_sdk.go
+++ b/internal/provider/destination_snowflake_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationSnowflakeDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_snowflake_resource.go b/internal/provider/destination_snowflake_resource.go
old mode 100755
new mode 100644
index 8b76ae715..d10475af6
--- a/internal/provider/destination_snowflake_resource.go
+++ b/internal/provider/destination_snowflake_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationSnowflakeResource struct {
// DestinationSnowflakeResourceModel describes the resource data model.
type DestinationSnowflakeResourceModel struct {
Configuration DestinationSnowflake `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -57,114 +58,29 @@ func (r *DestinationSnowflakeResource) Schema(ctx context.Context, req resource.
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_snowflake_authorization_method_key_pair_authentication": schema.SingleNestedAttribute{
+ "key_pair_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Key Pair Authentication",
- ),
- },
- Description: `must be one of ["Key Pair Authentication"]`,
- },
- "private_key": schema.StringAttribute{
- Required: true,
- Description: `RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.`,
- },
- "private_key_password": schema.StringAttribute{
- Optional: true,
- Description: `Passphrase for private key`,
- },
- },
- },
- "destination_snowflake_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Enter you application's Access Token`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth2.0",
- ),
- },
- Description: `must be one of ["OAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Optional: true,
- Description: `Enter your application's Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Optional: true,
- Description: `Enter your application's Client secret`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `Enter your application's Refresh Token`,
- },
- },
- },
- "destination_snowflake_authorization_method_username_and_password": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Username and Password",
- ),
- },
- Description: `must be one of ["Username and Password"]`,
- },
- "password": schema.StringAttribute{
- Required: true,
- Description: `Enter the password associated with the username.`,
- },
- },
- },
- "destination_snowflake_update_authorization_method_key_pair_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Key Pair Authentication",
- ),
- },
- Description: `must be one of ["Key Pair Authentication"]`,
- },
"private_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.`,
},
"private_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Passphrase for private key`,
},
},
},
- "destination_snowflake_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Enter you application's Access Token`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth2.0",
- ),
- },
- Description: `must be one of ["OAuth2.0"]`,
- },
"client_id": schema.StringAttribute{
Optional: true,
Description: `Enter your application's Client ID`,
@@ -175,24 +91,17 @@ func (r *DestinationSnowflakeResource) Schema(ctx context.Context, req resource.
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Enter your application's Refresh Token`,
},
},
},
- "destination_snowflake_update_authorization_method_username_and_password": schema.SingleNestedAttribute{
+ "username_and_password": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Username and Password",
- ),
- },
- Description: `must be one of ["Username and Password"]`,
- },
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Enter the password associated with the username.`,
},
},
@@ -206,14 +115,10 @@ func (r *DestinationSnowflakeResource) Schema(ctx context.Context, req resource.
Required: true,
Description: `Enter the name of the database you want to sync data into`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snowflake",
- ),
- },
- Description: `must be one of ["snowflake"]`,
+ "disable_type_dedupe": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions`,
},
"host": schema.StringAttribute{
Required: true,
@@ -225,7 +130,7 @@ func (r *DestinationSnowflakeResource) Schema(ctx context.Context, req resource.
},
"raw_data_schema": schema.StringAttribute{
Optional: true,
- Description: `The schema to write raw tables into`,
+ Description: `The schema to write raw tables into (default: airbyte_internal)`,
},
"role": schema.StringAttribute{
Required: true,
@@ -245,6 +150,13 @@ func (r *DestinationSnowflakeResource) Schema(ctx context.Context, req resource.
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -261,7 +173,8 @@ func (r *DestinationSnowflakeResource) Schema(ctx context.Context, req resource.
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -311,7 +224,7 @@ func (r *DestinationSnowflakeResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationSnowflake(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -487,5 +400,5 @@ func (r *DestinationSnowflakeResource) Delete(ctx context.Context, req resource.
}
func (r *DestinationSnowflakeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_snowflake_resource_sdk.go b/internal/provider/destination_snowflake_resource_sdk.go
old mode 100755
new mode 100644
index e7e7fe834..66aabce6e
--- a/internal/provider/destination_snowflake_resource_sdk.go
+++ b/internal/provider/destination_snowflake_resource_sdk.go
@@ -3,96 +3,80 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationSnowflakeResourceModel) ToCreateSDKType() *shared.DestinationSnowflakeCreateRequest {
var credentials *shared.DestinationSnowflakeAuthorizationMethod
if r.Configuration.Credentials != nil {
- var destinationSnowflakeAuthorizationMethodOAuth20 *shared.DestinationSnowflakeAuthorizationMethodOAuth20
- if r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.DestinationSnowflakeAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.DestinationSnowflakeAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
+ var destinationSnowflakeOAuth20 *shared.DestinationSnowflakeOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
clientID := new(string)
- if !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- refreshToken := r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodOAuth20.RefreshToken.ValueString()
- destinationSnowflakeAuthorizationMethodOAuth20 = &shared.DestinationSnowflakeAuthorizationMethodOAuth20{
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ destinationSnowflakeOAuth20 = &shared.DestinationSnowflakeOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if destinationSnowflakeAuthorizationMethodOAuth20 != nil {
+ if destinationSnowflakeOAuth20 != nil {
credentials = &shared.DestinationSnowflakeAuthorizationMethod{
- DestinationSnowflakeAuthorizationMethodOAuth20: destinationSnowflakeAuthorizationMethodOAuth20,
+ DestinationSnowflakeOAuth20: destinationSnowflakeOAuth20,
}
}
- var destinationSnowflakeAuthorizationMethodKeyPairAuthentication *shared.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication
- if r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication != nil {
- authType1 := new(shared.DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType)
- if !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication.AuthType.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication.AuthType.IsNull() {
- *authType1 = shared.DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType(r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- privateKey := r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication.PrivateKey.ValueString()
+ var destinationSnowflakeKeyPairAuthentication *shared.DestinationSnowflakeKeyPairAuthentication
+ if r.Configuration.Credentials.KeyPairAuthentication != nil {
+ privateKey := r.Configuration.Credentials.KeyPairAuthentication.PrivateKey.ValueString()
privateKeyPassword := new(string)
- if !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication.PrivateKeyPassword.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication.PrivateKeyPassword.IsNull() {
- *privateKeyPassword = r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication.PrivateKeyPassword.ValueString()
+ if !r.Configuration.Credentials.KeyPairAuthentication.PrivateKeyPassword.IsUnknown() && !r.Configuration.Credentials.KeyPairAuthentication.PrivateKeyPassword.IsNull() {
+ *privateKeyPassword = r.Configuration.Credentials.KeyPairAuthentication.PrivateKeyPassword.ValueString()
} else {
privateKeyPassword = nil
}
- destinationSnowflakeAuthorizationMethodKeyPairAuthentication = &shared.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication{
- AuthType: authType1,
+ destinationSnowflakeKeyPairAuthentication = &shared.DestinationSnowflakeKeyPairAuthentication{
PrivateKey: privateKey,
PrivateKeyPassword: privateKeyPassword,
}
}
- if destinationSnowflakeAuthorizationMethodKeyPairAuthentication != nil {
+ if destinationSnowflakeKeyPairAuthentication != nil {
credentials = &shared.DestinationSnowflakeAuthorizationMethod{
- DestinationSnowflakeAuthorizationMethodKeyPairAuthentication: destinationSnowflakeAuthorizationMethodKeyPairAuthentication,
+ DestinationSnowflakeKeyPairAuthentication: destinationSnowflakeKeyPairAuthentication,
}
}
- var destinationSnowflakeAuthorizationMethodUsernameAndPassword *shared.DestinationSnowflakeAuthorizationMethodUsernameAndPassword
- if r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodUsernameAndPassword != nil {
- authType2 := new(shared.DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType)
- if !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodUsernameAndPassword.AuthType.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodUsernameAndPassword.AuthType.IsNull() {
- *authType2 = shared.DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType(r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodUsernameAndPassword.AuthType.ValueString())
- } else {
- authType2 = nil
- }
- password := r.Configuration.Credentials.DestinationSnowflakeAuthorizationMethodUsernameAndPassword.Password.ValueString()
- destinationSnowflakeAuthorizationMethodUsernameAndPassword = &shared.DestinationSnowflakeAuthorizationMethodUsernameAndPassword{
- AuthType: authType2,
+ var destinationSnowflakeUsernameAndPassword *shared.DestinationSnowflakeUsernameAndPassword
+ if r.Configuration.Credentials.UsernameAndPassword != nil {
+ password := r.Configuration.Credentials.UsernameAndPassword.Password.ValueString()
+ destinationSnowflakeUsernameAndPassword = &shared.DestinationSnowflakeUsernameAndPassword{
Password: password,
}
}
- if destinationSnowflakeAuthorizationMethodUsernameAndPassword != nil {
+ if destinationSnowflakeUsernameAndPassword != nil {
credentials = &shared.DestinationSnowflakeAuthorizationMethod{
- DestinationSnowflakeAuthorizationMethodUsernameAndPassword: destinationSnowflakeAuthorizationMethodUsernameAndPassword,
+ DestinationSnowflakeUsernameAndPassword: destinationSnowflakeUsernameAndPassword,
}
}
}
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationSnowflakeSnowflake(r.Configuration.DestinationType.ValueString())
+ disableTypeDedupe := new(bool)
+ if !r.Configuration.DisableTypeDedupe.IsUnknown() && !r.Configuration.DisableTypeDedupe.IsNull() {
+ *disableTypeDedupe = r.Configuration.DisableTypeDedupe.ValueBool()
+ } else {
+ disableTypeDedupe = nil
+ }
host := r.Configuration.Host.ValueString()
jdbcURLParams := new(string)
if !r.Configuration.JdbcURLParams.IsUnknown() && !r.Configuration.JdbcURLParams.IsNull() {
@@ -111,21 +95,28 @@ func (r *DestinationSnowflakeResourceModel) ToCreateSDKType() *shared.Destinatio
username := r.Configuration.Username.ValueString()
warehouse := r.Configuration.Warehouse.ValueString()
configuration := shared.DestinationSnowflake{
- Credentials: credentials,
- Database: database,
- DestinationType: destinationType,
- Host: host,
- JdbcURLParams: jdbcURLParams,
- RawDataSchema: rawDataSchema,
- Role: role,
- Schema: schema,
- Username: username,
- Warehouse: warehouse,
+ Credentials: credentials,
+ Database: database,
+ DisableTypeDedupe: disableTypeDedupe,
+ Host: host,
+ JdbcURLParams: jdbcURLParams,
+ RawDataSchema: rawDataSchema,
+ Role: role,
+ Schema: schema,
+ Username: username,
+ Warehouse: warehouse,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationSnowflakeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -138,90 +129,75 @@ func (r *DestinationSnowflakeResourceModel) ToGetSDKType() *shared.DestinationSn
}
func (r *DestinationSnowflakeResourceModel) ToUpdateSDKType() *shared.DestinationSnowflakePutRequest {
- var credentials *shared.DestinationSnowflakeUpdateAuthorizationMethod
+ var credentials *shared.AuthorizationMethod
if r.Configuration.Credentials != nil {
- var destinationSnowflakeUpdateAuthorizationMethodOAuth20 *shared.DestinationSnowflakeUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
+ var oAuth20 *shared.OAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
clientID := new(string)
- if !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- refreshToken := r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
- destinationSnowflakeUpdateAuthorizationMethodOAuth20 = &shared.DestinationSnowflakeUpdateAuthorizationMethodOAuth20{
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ oAuth20 = &shared.OAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if destinationSnowflakeUpdateAuthorizationMethodOAuth20 != nil {
- credentials = &shared.DestinationSnowflakeUpdateAuthorizationMethod{
- DestinationSnowflakeUpdateAuthorizationMethodOAuth20: destinationSnowflakeUpdateAuthorizationMethodOAuth20,
+ if oAuth20 != nil {
+ credentials = &shared.AuthorizationMethod{
+ OAuth20: oAuth20,
}
}
- var destinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication *shared.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication
- if r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication != nil {
- authType1 := new(shared.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType)
- if !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication.AuthType.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication.AuthType.IsNull() {
- *authType1 = shared.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType(r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- privateKey := r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication.PrivateKey.ValueString()
+ var keyPairAuthentication *shared.KeyPairAuthentication
+ if r.Configuration.Credentials.KeyPairAuthentication != nil {
+ privateKey := r.Configuration.Credentials.KeyPairAuthentication.PrivateKey.ValueString()
privateKeyPassword := new(string)
- if !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication.PrivateKeyPassword.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication.PrivateKeyPassword.IsNull() {
- *privateKeyPassword = r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication.PrivateKeyPassword.ValueString()
+ if !r.Configuration.Credentials.KeyPairAuthentication.PrivateKeyPassword.IsUnknown() && !r.Configuration.Credentials.KeyPairAuthentication.PrivateKeyPassword.IsNull() {
+ *privateKeyPassword = r.Configuration.Credentials.KeyPairAuthentication.PrivateKeyPassword.ValueString()
} else {
privateKeyPassword = nil
}
- destinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication = &shared.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication{
- AuthType: authType1,
+ keyPairAuthentication = &shared.KeyPairAuthentication{
PrivateKey: privateKey,
PrivateKeyPassword: privateKeyPassword,
}
}
- if destinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication != nil {
- credentials = &shared.DestinationSnowflakeUpdateAuthorizationMethod{
- DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication: destinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication,
+ if keyPairAuthentication != nil {
+ credentials = &shared.AuthorizationMethod{
+ KeyPairAuthentication: keyPairAuthentication,
}
}
- var destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword *shared.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword
- if r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword != nil {
- authType2 := new(shared.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType)
- if !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword.AuthType.IsUnknown() && !r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword.AuthType.IsNull() {
- *authType2 = shared.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType(r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword.AuthType.ValueString())
- } else {
- authType2 = nil
- }
- password := r.Configuration.Credentials.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword.Password.ValueString()
- destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword = &shared.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword{
- AuthType: authType2,
+ var usernameAndPassword *shared.UsernameAndPassword
+ if r.Configuration.Credentials.UsernameAndPassword != nil {
+ password := r.Configuration.Credentials.UsernameAndPassword.Password.ValueString()
+ usernameAndPassword = &shared.UsernameAndPassword{
Password: password,
}
}
- if destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword != nil {
- credentials = &shared.DestinationSnowflakeUpdateAuthorizationMethod{
- DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword: destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword,
+ if usernameAndPassword != nil {
+ credentials = &shared.AuthorizationMethod{
+ UsernameAndPassword: usernameAndPassword,
}
}
}
database := r.Configuration.Database.ValueString()
+ disableTypeDedupe := new(bool)
+ if !r.Configuration.DisableTypeDedupe.IsUnknown() && !r.Configuration.DisableTypeDedupe.IsNull() {
+ *disableTypeDedupe = r.Configuration.DisableTypeDedupe.ValueBool()
+ } else {
+ disableTypeDedupe = nil
+ }
host := r.Configuration.Host.ValueString()
jdbcURLParams := new(string)
if !r.Configuration.JdbcURLParams.IsUnknown() && !r.Configuration.JdbcURLParams.IsNull() {
@@ -240,15 +216,16 @@ func (r *DestinationSnowflakeResourceModel) ToUpdateSDKType() *shared.Destinatio
username := r.Configuration.Username.ValueString()
warehouse := r.Configuration.Warehouse.ValueString()
configuration := shared.DestinationSnowflakeUpdate{
- Credentials: credentials,
- Database: database,
- Host: host,
- JdbcURLParams: jdbcURLParams,
- RawDataSchema: rawDataSchema,
- Role: role,
- Schema: schema,
- Username: username,
- Warehouse: warehouse,
+ Credentials: credentials,
+ Database: database,
+ DisableTypeDedupe: disableTypeDedupe,
+ Host: host,
+ JdbcURLParams: jdbcURLParams,
+ RawDataSchema: rawDataSchema,
+ Role: role,
+ Schema: schema,
+ Username: username,
+ Warehouse: warehouse,
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
diff --git a/internal/provider/destination_timeplus_data_source.go b/internal/provider/destination_timeplus_data_source.go
old mode 100755
new mode 100644
index c453bee0f..b5a45bf5d
--- a/internal/provider/destination_timeplus_data_source.go
+++ b/internal/provider/destination_timeplus_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationTimeplusDataSource struct {
// DestinationTimeplusDataSourceModel describes the data model.
type DestinationTimeplusDataSourceModel struct {
- Configuration DestinationTimeplus `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,31 +47,17 @@ func (r *DestinationTimeplusDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "DestinationTimeplus DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "apikey": schema.StringAttribute{
- Computed: true,
- Description: `Personal API key`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "timeplus",
- ),
- },
- Description: `must be one of ["timeplus"]`,
- },
- "endpoint": schema.StringAttribute{
- Computed: true,
- Description: `Timeplus workspace endpoint`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_timeplus_data_source_sdk.go b/internal/provider/destination_timeplus_data_source_sdk.go
old mode 100755
new mode 100644
index 769427a51..d7e0acf2d
--- a/internal/provider/destination_timeplus_data_source_sdk.go
+++ b/internal/provider/destination_timeplus_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationTimeplusDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_timeplus_resource.go b/internal/provider/destination_timeplus_resource.go
old mode 100755
new mode 100644
index d0aa0823b..c535df4c2
--- a/internal/provider/destination_timeplus_resource.go
+++ b/internal/provider/destination_timeplus_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationTimeplusResource struct {
// DestinationTimeplusResourceModel describes the resource data model.
type DestinationTimeplusResourceModel struct {
Configuration DestinationTimeplus `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -55,23 +55,23 @@ func (r *DestinationTimeplusResource) Schema(ctx context.Context, req resource.S
Attributes: map[string]schema.Attribute{
"apikey": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Personal API key`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "timeplus",
- ),
- },
- Description: `must be one of ["timeplus"]`,
- },
"endpoint": schema.StringAttribute{
- Required: true,
- Description: `Timeplus workspace endpoint`,
+ Optional: true,
+ MarkdownDescription: `Default: "https://us.timeplus.cloud/"` + "\n" +
+ `Timeplus workspace endpoint`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -88,7 +88,8 @@ func (r *DestinationTimeplusResource) Schema(ctx context.Context, req resource.S
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -138,7 +139,7 @@ func (r *DestinationTimeplusResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationTimeplus(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -314,5 +315,5 @@ func (r *DestinationTimeplusResource) Delete(ctx context.Context, req resource.D
}
func (r *DestinationTimeplusResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_timeplus_resource_sdk.go b/internal/provider/destination_timeplus_resource_sdk.go
old mode 100755
new mode 100644
index 393e1ecfb..170bec511
--- a/internal/provider/destination_timeplus_resource_sdk.go
+++ b/internal/provider/destination_timeplus_resource_sdk.go
@@ -3,23 +3,33 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationTimeplusResourceModel) ToCreateSDKType() *shared.DestinationTimeplusCreateRequest {
apikey := r.Configuration.Apikey.ValueString()
- destinationType := shared.DestinationTimeplusTimeplus(r.Configuration.DestinationType.ValueString())
- endpoint := r.Configuration.Endpoint.ValueString()
+ endpoint := new(string)
+ if !r.Configuration.Endpoint.IsUnknown() && !r.Configuration.Endpoint.IsNull() {
+ *endpoint = r.Configuration.Endpoint.ValueString()
+ } else {
+ endpoint = nil
+ }
configuration := shared.DestinationTimeplus{
- Apikey: apikey,
- DestinationType: destinationType,
- Endpoint: endpoint,
+ Apikey: apikey,
+ Endpoint: endpoint,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationTimeplusCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -33,7 +43,12 @@ func (r *DestinationTimeplusResourceModel) ToGetSDKType() *shared.DestinationTim
func (r *DestinationTimeplusResourceModel) ToUpdateSDKType() *shared.DestinationTimeplusPutRequest {
apikey := r.Configuration.Apikey.ValueString()
- endpoint := r.Configuration.Endpoint.ValueString()
+ endpoint := new(string)
+ if !r.Configuration.Endpoint.IsUnknown() && !r.Configuration.Endpoint.IsNull() {
+ *endpoint = r.Configuration.Endpoint.ValueString()
+ } else {
+ endpoint = nil
+ }
configuration := shared.DestinationTimeplusUpdate{
Apikey: apikey,
Endpoint: endpoint,
diff --git a/internal/provider/destination_typesense_data_source.go b/internal/provider/destination_typesense_data_source.go
old mode 100755
new mode 100644
index f60aef7f5..850e9504c
--- a/internal/provider/destination_typesense_data_source.go
+++ b/internal/provider/destination_typesense_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationTypesenseDataSource struct {
// DestinationTypesenseDataSourceModel describes the data model.
type DestinationTypesenseDataSourceModel struct {
- Configuration DestinationTypesense `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,43 +47,17 @@ func (r *DestinationTypesenseDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "DestinationTypesense DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Typesense API Key`,
- },
- "batch_size": schema.Int64Attribute{
- Computed: true,
- Description: `How many documents should be imported together. Default 1000`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "typesense",
- ),
- },
- Description: `must be one of ["typesense"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the Typesense instance without protocol.`,
- },
- "port": schema.StringAttribute{
- Computed: true,
- Description: `Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443`,
- },
- "protocol": schema.StringAttribute{
- Computed: true,
- Description: `Protocol of the Typesense instance. Ex: http or https. Default is https`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_typesense_data_source_sdk.go b/internal/provider/destination_typesense_data_source_sdk.go
old mode 100755
new mode 100644
index 39d608421..dd56701be
--- a/internal/provider/destination_typesense_data_source_sdk.go
+++ b/internal/provider/destination_typesense_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationTypesenseDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_typesense_resource.go b/internal/provider/destination_typesense_resource.go
old mode 100755
new mode 100644
index 48641bfd0..9445ca5d7
--- a/internal/provider/destination_typesense_resource.go
+++ b/internal/provider/destination_typesense_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationTypesenseResource struct {
// DestinationTypesenseResourceModel describes the resource data model.
type DestinationTypesenseResourceModel struct {
Configuration DestinationTypesense `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -55,21 +55,13 @@ func (r *DestinationTypesenseResource) Schema(ctx context.Context, req resource.
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Typesense API Key`,
},
"batch_size": schema.Int64Attribute{
Optional: true,
Description: `How many documents should be imported together. Default 1000`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "typesense",
- ),
- },
- Description: `must be one of ["typesense"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `Hostname of the Typesense instance without protocol.`,
@@ -84,6 +76,13 @@ func (r *DestinationTypesenseResource) Schema(ctx context.Context, req resource.
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -100,7 +99,8 @@ func (r *DestinationTypesenseResource) Schema(ctx context.Context, req resource.
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -150,7 +150,7 @@ func (r *DestinationTypesenseResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationTypesense(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -326,5 +326,5 @@ func (r *DestinationTypesenseResource) Delete(ctx context.Context, req resource.
}
func (r *DestinationTypesenseResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_typesense_resource_sdk.go b/internal/provider/destination_typesense_resource_sdk.go
old mode 100755
new mode 100644
index 2b3b09610..58a9b3e62
--- a/internal/provider/destination_typesense_resource_sdk.go
+++ b/internal/provider/destination_typesense_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -15,7 +15,6 @@ func (r *DestinationTypesenseResourceModel) ToCreateSDKType() *shared.Destinatio
} else {
batchSize = nil
}
- destinationType := shared.DestinationTypesenseTypesense(r.Configuration.DestinationType.ValueString())
host := r.Configuration.Host.ValueString()
port := new(string)
if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
@@ -30,17 +29,23 @@ func (r *DestinationTypesenseResourceModel) ToCreateSDKType() *shared.Destinatio
protocol = nil
}
configuration := shared.DestinationTypesense{
- APIKey: apiKey,
- BatchSize: batchSize,
- DestinationType: destinationType,
- Host: host,
- Port: port,
- Protocol: protocol,
+ APIKey: apiKey,
+ BatchSize: batchSize,
+ Host: host,
+ Port: port,
+ Protocol: protocol,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationTypesenseCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
diff --git a/internal/provider/destination_vertica_data_source.go b/internal/provider/destination_vertica_data_source.go
old mode 100755
new mode 100644
index bfc51846f..4a7449fef
--- a/internal/provider/destination_vertica_data_source.go
+++ b/internal/provider/destination_vertica_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,11 @@ type DestinationVerticaDataSource struct {
// DestinationVerticaDataSourceModel describes the data model.
type DestinationVerticaDataSourceModel struct {
- Configuration DestinationVertica `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,220 +47,17 @@ func (r *DestinationVerticaDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "DestinationVertica DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "vertica",
- ),
- },
- Description: `must be one of ["vertica"]`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of the database.`,
- },
- "schema": schema.StringAttribute{
- Computed: true,
- Description: `Schema for vertica destination`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "destination_vertica_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_vertica_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_vertica_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_vertica_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_vertica_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_vertica_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_vertica_data_source_sdk.go b/internal/provider/destination_vertica_data_source_sdk.go
old mode 100755
new mode 100644
index 54ec1aa12..13ae9652b
--- a/internal/provider/destination_vertica_data_source_sdk.go
+++ b/internal/provider/destination_vertica_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationVerticaDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_vertica_resource.go b/internal/provider/destination_vertica_resource.go
old mode 100755
new mode 100644
index cc0100b8d..5b9f9ed9c
--- a/internal/provider/destination_vertica_resource.go
+++ b/internal/provider/destination_vertica_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type DestinationVerticaResource struct {
// DestinationVerticaResourceModel describes the resource data model.
type DestinationVerticaResourceModel struct {
Configuration DestinationVertica `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -58,15 +59,6 @@ func (r *DestinationVerticaResource) Schema(ctx context.Context, req resource.Sc
Required: true,
Description: `Name of the database.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "vertica",
- ),
- },
- Description: `must be one of ["vertica"]`,
- },
"host": schema.StringAttribute{
Required: true,
Description: `Hostname of the database.`,
@@ -77,11 +69,13 @@ func (r *DestinationVerticaResource) Schema(ctx context.Context, req resource.Sc
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `Port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 5433` + "\n" +
+ `Port of the database.`,
},
"schema": schema.StringAttribute{
Required: true,
@@ -90,122 +84,22 @@ func (r *DestinationVerticaResource) Schema(ctx context.Context, req resource.Sc
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "destination_vertica_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_vertica_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_vertica_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_vertica_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "destination_vertica_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -213,35 +107,28 @@ func (r *DestinationVerticaResource) Schema(ctx context.Context, req resource.Sc
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "destination_vertica_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -251,10 +138,10 @@ func (r *DestinationVerticaResource) Schema(ctx context.Context, req resource.Sc
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -262,6 +149,13 @@ func (r *DestinationVerticaResource) Schema(ctx context.Context, req resource.Sc
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -278,7 +172,8 @@ func (r *DestinationVerticaResource) Schema(ctx context.Context, req resource.Sc
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -328,7 +223,7 @@ func (r *DestinationVerticaResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationVertica(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -504,5 +399,5 @@ func (r *DestinationVerticaResource) Delete(ctx context.Context, req resource.De
}
func (r *DestinationVerticaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_vertica_resource_sdk.go b/internal/provider/destination_vertica_resource_sdk.go
old mode 100755
new mode 100644
index aef630b5e..6ad2cb2f0
--- a/internal/provider/destination_vertica_resource_sdk.go
+++ b/internal/provider/destination_vertica_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationVerticaResourceModel) ToCreateSDKType() *shared.DestinationVerticaCreateRequest {
database := r.Configuration.Database.ValueString()
- destinationType := shared.DestinationVerticaVertica(r.Configuration.DestinationType.ValueString())
host := r.Configuration.Host.ValueString()
jdbcURLParams := new(string)
if !r.Configuration.JdbcURLParams.IsUnknown() && !r.Configuration.JdbcURLParams.IsNull() {
@@ -23,79 +22,93 @@ func (r *DestinationVerticaResourceModel) ToCreateSDKType() *shared.DestinationV
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
schema := r.Configuration.Schema.ValueString()
var tunnelMethod *shared.DestinationVerticaSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationVerticaSSHTunnelMethodNoTunnel *shared.DestinationVerticaSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationVerticaSSHTunnelMethodNoTunnel = &shared.DestinationVerticaSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationVerticaNoTunnel *shared.DestinationVerticaNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationVerticaNoTunnel = &shared.DestinationVerticaNoTunnel{}
}
- if destinationVerticaSSHTunnelMethodNoTunnel != nil {
+ if destinationVerticaNoTunnel != nil {
tunnelMethod = &shared.DestinationVerticaSSHTunnelMethod{
- DestinationVerticaSSHTunnelMethodNoTunnel: destinationVerticaSSHTunnelMethodNoTunnel,
+ DestinationVerticaNoTunnel: destinationVerticaNoTunnel,
}
}
- var destinationVerticaSSHTunnelMethodSSHKeyAuthentication *shared.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationVerticaSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationVerticaSSHKeyAuthentication *shared.DestinationVerticaSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationVerticaSSHKeyAuthentication = &shared.DestinationVerticaSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationVerticaSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationVerticaSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationVerticaSSHTunnelMethod{
- DestinationVerticaSSHTunnelMethodSSHKeyAuthentication: destinationVerticaSSHTunnelMethodSSHKeyAuthentication,
+ DestinationVerticaSSHKeyAuthentication: destinationVerticaSSHKeyAuthentication,
}
}
- var destinationVerticaSSHTunnelMethodPasswordAuthentication *shared.DestinationVerticaSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationVerticaSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationVerticaSSHTunnelMethodPasswordAuthentication = &shared.DestinationVerticaSSHTunnelMethodPasswordAuthentication{
+ var destinationVerticaPasswordAuthentication *shared.DestinationVerticaPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationVerticaPasswordAuthentication = &shared.DestinationVerticaPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationVerticaSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationVerticaPasswordAuthentication != nil {
tunnelMethod = &shared.DestinationVerticaSSHTunnelMethod{
- DestinationVerticaSSHTunnelMethodPasswordAuthentication: destinationVerticaSSHTunnelMethodPasswordAuthentication,
+ DestinationVerticaPasswordAuthentication: destinationVerticaPasswordAuthentication,
}
}
}
username := r.Configuration.Username.ValueString()
configuration := shared.DestinationVertica{
- Database: database,
- DestinationType: destinationType,
- Host: host,
- JdbcURLParams: jdbcURLParams,
- Password: password,
- Port: port,
- Schema: schema,
- TunnelMethod: tunnelMethod,
- Username: username,
+ Database: database,
+ Host: host,
+ JdbcURLParams: jdbcURLParams,
+ Password: password,
+ Port: port,
+ Schema: schema,
+ TunnelMethod: tunnelMethod,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationVerticaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
@@ -122,60 +135,68 @@ func (r *DestinationVerticaResourceModel) ToUpdateSDKType() *shared.DestinationV
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
schema := r.Configuration.Schema.ValueString()
var tunnelMethod *shared.DestinationVerticaUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var destinationVerticaUpdateSSHTunnelMethodNoTunnel *shared.DestinationVerticaUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- destinationVerticaUpdateSSHTunnelMethodNoTunnel = &shared.DestinationVerticaUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var destinationVerticaUpdateNoTunnel *shared.DestinationVerticaUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ destinationVerticaUpdateNoTunnel = &shared.DestinationVerticaUpdateNoTunnel{}
}
- if destinationVerticaUpdateSSHTunnelMethodNoTunnel != nil {
+ if destinationVerticaUpdateNoTunnel != nil {
tunnelMethod = &shared.DestinationVerticaUpdateSSHTunnelMethod{
- DestinationVerticaUpdateSSHTunnelMethodNoTunnel: destinationVerticaUpdateSSHTunnelMethodNoTunnel,
+ DestinationVerticaUpdateNoTunnel: destinationVerticaUpdateNoTunnel,
}
}
- var destinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication *shared.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- destinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var destinationVerticaUpdateSSHKeyAuthentication *shared.DestinationVerticaUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ destinationVerticaUpdateSSHKeyAuthentication = &shared.DestinationVerticaUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if destinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if destinationVerticaUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.DestinationVerticaUpdateSSHTunnelMethod{
- DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication: destinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationVerticaUpdateSSHKeyAuthentication: destinationVerticaUpdateSSHKeyAuthentication,
}
}
- var destinationVerticaUpdateSSHTunnelMethodPasswordAuthentication *shared.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- destinationVerticaUpdateSSHTunnelMethodPasswordAuthentication = &shared.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication{
+ var destinationVerticaUpdatePasswordAuthentication *shared.DestinationVerticaUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ destinationVerticaUpdatePasswordAuthentication = &shared.DestinationVerticaUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if destinationVerticaUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if destinationVerticaUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.DestinationVerticaUpdateSSHTunnelMethod{
- DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication: destinationVerticaUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationVerticaUpdatePasswordAuthentication: destinationVerticaUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/destination_weaviate_data_source.go b/internal/provider/destination_weaviate_data_source.go
new file mode 100644
index 000000000..7c958492c
--- /dev/null
+++ b/internal/provider/destination_weaviate_data_source.go
@@ -0,0 +1,137 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ datasource.DataSource = &DestinationWeaviateDataSource{}
+var _ datasource.DataSourceWithConfigure = &DestinationWeaviateDataSource{}
+
+func NewDestinationWeaviateDataSource() datasource.DataSource {
+ return &DestinationWeaviateDataSource{}
+}
+
+// DestinationWeaviateDataSource is the data source implementation.
+type DestinationWeaviateDataSource struct {
+ client *sdk.SDK
+}
+
+// DestinationWeaviateDataSourceModel describes the data model.
+type DestinationWeaviateDataSourceModel struct {
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+// Metadata returns the data source type name.
+func (r *DestinationWeaviateDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_destination_weaviate"
+}
+
+// Schema defines the schema for the data source.
+func (r *DestinationWeaviateDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "DestinationWeaviate DataSource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.StringAttribute{
+ Computed: true,
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
+ },
+ "destination_id": schema.StringAttribute{
+ Required: true,
+ },
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ },
+ "workspace_id": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ }
+}
+
+func (r *DestinationWeaviateDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected DataSource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *DestinationWeaviateDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data *DestinationWeaviateDataSourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ destinationID := data.DestinationID.ValueString()
+ request := operations.GetDestinationWeaviateRequest{
+ DestinationID: destinationID,
+ }
+ res, err := r.client.Destinations.GetDestinationWeaviate(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.DestinationResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.DestinationResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/internal/provider/destination_weaviate_data_source_sdk.go b/internal/provider/destination_weaviate_data_source_sdk.go
new file mode 100644
index 000000000..9d29d2b64
--- /dev/null
+++ b/internal/provider/destination_weaviate_data_source_sdk.go
@@ -0,0 +1,18 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *DestinationWeaviateDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
+ r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
+ r.Name = types.StringValue(resp.Name)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
diff --git a/internal/provider/destination_weaviate_resource.go b/internal/provider/destination_weaviate_resource.go
new file mode 100644
index 000000000..12ae622ad
--- /dev/null
+++ b/internal/provider/destination_weaviate_resource.go
@@ -0,0 +1,607 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ resource.Resource = &DestinationWeaviateResource{}
+var _ resource.ResourceWithImportState = &DestinationWeaviateResource{}
+
+func NewDestinationWeaviateResource() resource.Resource {
+ return &DestinationWeaviateResource{}
+}
+
+// DestinationWeaviateResource defines the resource implementation.
+type DestinationWeaviateResource struct {
+ client *sdk.SDK
+}
+
+// DestinationWeaviateResourceModel describes the resource data model.
+type DestinationWeaviateResourceModel struct {
+ Configuration DestinationWeaviate `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+func (r *DestinationWeaviateResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_destination_weaviate"
+}
+
+func (r *DestinationWeaviateResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "DestinationWeaviate Resource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "embedding": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "azure_open_ai": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "api_base": schema.StringAttribute{
+ Required: true,
+ Description: `The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
+ },
+ "deployment": schema.StringAttribute{
+ Required: true,
+ Description: `The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
+ },
+ "openai_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource`,
+ },
+ },
+ Description: `Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
+ },
+ "cohere": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "cohere_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ },
+ },
+ Description: `Use the Cohere API to embed text.`,
+ },
+ "fake": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`,
+ },
+ "from_field": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "dimensions": schema.Int64Attribute{
+ Required: true,
+ Description: `The number of dimensions the embedding model is generating`,
+ },
+ "field_name": schema.StringAttribute{
+ Required: true,
+ Description: `Name of the field in the record that contains the embedding`,
+ },
+ },
+ Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`,
+ },
+ "no_external_embedding": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `Do not calculate and pass embeddings to Weaviate. Suitable for clusters with configured vectorizers to calculate embeddings within Weaviate or for classes that should only support regular text search.`,
+ },
+ "open_ai": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "openai_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ },
+ },
+ Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`,
+ },
+ "open_ai_compatible": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "api_key": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `Default: ""`,
+ },
+ "base_url": schema.StringAttribute{
+ Required: true,
+ Description: `The base URL for your OpenAI-compatible service`,
+ },
+ "dimensions": schema.Int64Attribute{
+ Required: true,
+ Description: `The number of dimensions the embedding model is generating`,
+ },
+ "model_name": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "text-embedding-ada-002"` + "\n" +
+ `The name of the model to use for embedding`,
+ },
+ },
+ Description: `Use a service that's compatible with the OpenAI API to embed text.`,
+ },
+ },
+ Description: `Embedding configuration`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "indexing": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "additional_headers": schema.ListNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "header_key": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ },
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ Description: `Additional HTTP headers to send with every request.`,
+ },
+ "auth": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "api_token": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "token": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `API Token for the Weaviate instance`,
+ },
+ },
+ Description: `Authenticate using an API token (suitable for Weaviate Cloud)`,
+ },
+ "no_authentication": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)`,
+ },
+ "username_password": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "password": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `Password for the Weaviate cluster`,
+ },
+ "username": schema.StringAttribute{
+ Required: true,
+ Description: `Username for the Weaviate cluster`,
+ },
+ },
+ Description: `Authenticate using username and password (suitable for self-managed Weaviate clusters)`,
+ },
+ },
+ Description: `Authentication method`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "batch_size": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 128` + "\n" +
+ `The number of records to send to Weaviate in each batch`,
+ },
+ "default_vectorizer": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["none", "text2vec-cohere", "text2vec-huggingface", "text2vec-openai", "text2vec-palm", "text2vec-contextionary", "text2vec-transformers", "text2vec-gpt4all"]; Default: "none"` + "\n" +
+ `The vectorizer to use if new classes need to be created`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "none",
+ "text2vec-cohere",
+ "text2vec-huggingface",
+ "text2vec-openai",
+ "text2vec-palm",
+ "text2vec-contextionary",
+ "text2vec-transformers",
+ "text2vec-gpt4all",
+ ),
+ },
+ },
+ "host": schema.StringAttribute{
+ Required: true,
+ Description: `The public endpoint of the Weaviate cluster.`,
+ },
+ "text_field": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "text"` + "\n" +
+ `The field in the object that contains the embedded text`,
+ },
+ },
+ Description: `Indexing configuration`,
+ },
+ "processing": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "chunk_overlap": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`,
+ },
+ "chunk_size": schema.Int64Attribute{
+ Required: true,
+ Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`,
+ },
+ "field_name_mappings": schema.ListNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "from_field": schema.StringAttribute{
+ Required: true,
+ Description: `The field name in the source`,
+ },
+ "to_field": schema.StringAttribute{
+ Required: true,
+ Description: `The field name to use in the destination`,
+ },
+ },
+ },
+ Description: `List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.`,
+ },
+ "metadata_fields": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.`,
+ },
+ "text_fields": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`,
+ },
+ "text_splitter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "by_markdown_header": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "split_level": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 1` + "\n" +
+ `Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points`,
+ },
+ },
+ Description: `Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.`,
+ },
+ "by_programming_language": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "language": schema.StringAttribute{
+ Required: true,
+ MarkdownDescription: `must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]` + "\n" +
+ `Split code in suitable places based on the programming language`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "cpp",
+ "go",
+ "java",
+ "js",
+ "php",
+ "proto",
+ "python",
+ "rst",
+ "ruby",
+ "rust",
+ "scala",
+ "swift",
+ "markdown",
+ "latex",
+ "html",
+ "sol",
+ ),
+ },
+ },
+ },
+ Description: `Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.`,
+ },
+ "by_separator": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "keep_separator": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to keep the separator in the resulting chunks`,
+ },
+ "separators": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".`,
+ },
+ },
+ Description: `Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.`,
+ },
+ },
+ Description: `Split text fields into chunks based on the specified method.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ },
+ },
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
+ "destination_id": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "name": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
+ },
+ "workspace_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ },
+ },
+ }
+}
+
+func (r *DestinationWeaviateResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected Resource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *DestinationWeaviateResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data *DestinationWeaviateResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ request := data.ToCreateSDKType()
+ res, err := r.client.Destinations.CreateDestinationWeaviate(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.DestinationResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromCreateResponse(res.DestinationResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *DestinationWeaviateResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data *DestinationWeaviateResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ destinationID := data.DestinationID.ValueString()
+ request := operations.GetDestinationWeaviateRequest{
+ DestinationID: destinationID,
+ }
+ res, err := r.client.Destinations.GetDestinationWeaviate(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.DestinationResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.DestinationResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *DestinationWeaviateResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data *DestinationWeaviateResourceModel
+ merge(ctx, req, resp, &data)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ destinationWeaviatePutRequest := data.ToUpdateSDKType()
+ destinationID := data.DestinationID.ValueString()
+ request := operations.PutDestinationWeaviateRequest{
+ DestinationWeaviatePutRequest: destinationWeaviatePutRequest,
+ DestinationID: destinationID,
+ }
+ res, err := r.client.Destinations.PutDestinationWeaviate(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ destinationId1 := data.DestinationID.ValueString()
+ getRequest := operations.GetDestinationWeaviateRequest{
+ DestinationID: destinationId1,
+ }
+ getResponse, err := r.client.Destinations.GetDestinationWeaviate(ctx, getRequest)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if getResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
+ return
+ }
+ if getResponse.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
+ return
+ }
+ if getResponse.DestinationResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(getResponse.DestinationResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *DestinationWeaviateResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data *DestinationWeaviateResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ destinationID := data.DestinationID.ValueString()
+ request := operations.DeleteDestinationWeaviateRequest{
+ DestinationID: destinationID,
+ }
+ res, err := r.client.Destinations.DeleteDestinationWeaviate(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+
+}
+
+func (r *DestinationWeaviateResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
+}
diff --git a/internal/provider/destination_weaviate_resource_sdk.go b/internal/provider/destination_weaviate_resource_sdk.go
new file mode 100644
index 000000000..a1b73a4f8
--- /dev/null
+++ b/internal/provider/destination_weaviate_resource_sdk.go
@@ -0,0 +1,586 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *DestinationWeaviateResourceModel) ToCreateSDKType() *shared.DestinationWeaviateCreateRequest {
+ var embedding shared.DestinationWeaviateEmbedding
+ var destinationWeaviateNoExternalEmbedding *shared.DestinationWeaviateNoExternalEmbedding
+ if r.Configuration.Embedding.NoExternalEmbedding != nil {
+ destinationWeaviateNoExternalEmbedding = &shared.DestinationWeaviateNoExternalEmbedding{}
+ }
+ if destinationWeaviateNoExternalEmbedding != nil {
+ embedding = shared.DestinationWeaviateEmbedding{
+ DestinationWeaviateNoExternalEmbedding: destinationWeaviateNoExternalEmbedding,
+ }
+ }
+ var destinationWeaviateAzureOpenAI *shared.DestinationWeaviateAzureOpenAI
+ if r.Configuration.Embedding.AzureOpenAI != nil {
+ apiBase := r.Configuration.Embedding.AzureOpenAI.APIBase.ValueString()
+ deployment := r.Configuration.Embedding.AzureOpenAI.Deployment.ValueString()
+ openaiKey := r.Configuration.Embedding.AzureOpenAI.OpenaiKey.ValueString()
+ destinationWeaviateAzureOpenAI = &shared.DestinationWeaviateAzureOpenAI{
+ APIBase: apiBase,
+ Deployment: deployment,
+ OpenaiKey: openaiKey,
+ }
+ }
+ if destinationWeaviateAzureOpenAI != nil {
+ embedding = shared.DestinationWeaviateEmbedding{
+ DestinationWeaviateAzureOpenAI: destinationWeaviateAzureOpenAI,
+ }
+ }
+ var destinationWeaviateOpenAI *shared.DestinationWeaviateOpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey1 := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ destinationWeaviateOpenAI = &shared.DestinationWeaviateOpenAI{
+ OpenaiKey: openaiKey1,
+ }
+ }
+ if destinationWeaviateOpenAI != nil {
+ embedding = shared.DestinationWeaviateEmbedding{
+ DestinationWeaviateOpenAI: destinationWeaviateOpenAI,
+ }
+ }
+ var destinationWeaviateCohere *shared.DestinationWeaviateCohere
+ if r.Configuration.Embedding.Cohere != nil {
+ cohereKey := r.Configuration.Embedding.Cohere.CohereKey.ValueString()
+ destinationWeaviateCohere = &shared.DestinationWeaviateCohere{
+ CohereKey: cohereKey,
+ }
+ }
+ if destinationWeaviateCohere != nil {
+ embedding = shared.DestinationWeaviateEmbedding{
+ DestinationWeaviateCohere: destinationWeaviateCohere,
+ }
+ }
+ var destinationWeaviateFromField *shared.DestinationWeaviateFromField
+ if r.Configuration.Embedding.FromField != nil {
+ dimensions := r.Configuration.Embedding.FromField.Dimensions.ValueInt64()
+ fieldName := r.Configuration.Embedding.FromField.FieldName.ValueString()
+ destinationWeaviateFromField = &shared.DestinationWeaviateFromField{
+ Dimensions: dimensions,
+ FieldName: fieldName,
+ }
+ }
+ if destinationWeaviateFromField != nil {
+ embedding = shared.DestinationWeaviateEmbedding{
+ DestinationWeaviateFromField: destinationWeaviateFromField,
+ }
+ }
+ var destinationWeaviateFake *shared.DestinationWeaviateFake
+ if r.Configuration.Embedding.Fake != nil {
+ destinationWeaviateFake = &shared.DestinationWeaviateFake{}
+ }
+ if destinationWeaviateFake != nil {
+ embedding = shared.DestinationWeaviateEmbedding{
+ DestinationWeaviateFake: destinationWeaviateFake,
+ }
+ }
+ var destinationWeaviateOpenAICompatible *shared.DestinationWeaviateOpenAICompatible
+ if r.Configuration.Embedding.OpenAICompatible != nil {
+ apiKey := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.APIKey.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.APIKey.IsNull() {
+ *apiKey = r.Configuration.Embedding.OpenAICompatible.APIKey.ValueString()
+ } else {
+ apiKey = nil
+ }
+ baseURL := r.Configuration.Embedding.OpenAICompatible.BaseURL.ValueString()
+ dimensions1 := r.Configuration.Embedding.OpenAICompatible.Dimensions.ValueInt64()
+ modelName := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.ModelName.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.ModelName.IsNull() {
+ *modelName = r.Configuration.Embedding.OpenAICompatible.ModelName.ValueString()
+ } else {
+ modelName = nil
+ }
+ destinationWeaviateOpenAICompatible = &shared.DestinationWeaviateOpenAICompatible{
+ APIKey: apiKey,
+ BaseURL: baseURL,
+ Dimensions: dimensions1,
+ ModelName: modelName,
+ }
+ }
+ if destinationWeaviateOpenAICompatible != nil {
+ embedding = shared.DestinationWeaviateEmbedding{
+ DestinationWeaviateOpenAICompatible: destinationWeaviateOpenAICompatible,
+ }
+ }
+ var additionalHeaders []shared.DestinationWeaviateHeader = nil
+ for _, additionalHeadersItem := range r.Configuration.Indexing.AdditionalHeaders {
+ headerKey := additionalHeadersItem.HeaderKey.ValueString()
+ value := additionalHeadersItem.Value.ValueString()
+ additionalHeaders = append(additionalHeaders, shared.DestinationWeaviateHeader{
+ HeaderKey: headerKey,
+ Value: value,
+ })
+ }
+ var auth shared.DestinationWeaviateAuthentication
+ var destinationWeaviateAPIToken *shared.DestinationWeaviateAPIToken
+ if r.Configuration.Indexing.Auth.APIToken != nil {
+ token := r.Configuration.Indexing.Auth.APIToken.Token.ValueString()
+ destinationWeaviateAPIToken = &shared.DestinationWeaviateAPIToken{
+ Token: token,
+ }
+ }
+ if destinationWeaviateAPIToken != nil {
+ auth = shared.DestinationWeaviateAuthentication{
+ DestinationWeaviateAPIToken: destinationWeaviateAPIToken,
+ }
+ }
+ var destinationWeaviateUsernamePassword *shared.DestinationWeaviateUsernamePassword
+ if r.Configuration.Indexing.Auth.UsernamePassword != nil {
+ password := r.Configuration.Indexing.Auth.UsernamePassword.Password.ValueString()
+ username := r.Configuration.Indexing.Auth.UsernamePassword.Username.ValueString()
+ destinationWeaviateUsernamePassword = &shared.DestinationWeaviateUsernamePassword{
+ Password: password,
+ Username: username,
+ }
+ }
+ if destinationWeaviateUsernamePassword != nil {
+ auth = shared.DestinationWeaviateAuthentication{
+ DestinationWeaviateUsernamePassword: destinationWeaviateUsernamePassword,
+ }
+ }
+ var destinationWeaviateNoAuthentication *shared.DestinationWeaviateNoAuthentication
+ if r.Configuration.Indexing.Auth.NoAuthentication != nil {
+ destinationWeaviateNoAuthentication = &shared.DestinationWeaviateNoAuthentication{}
+ }
+ if destinationWeaviateNoAuthentication != nil {
+ auth = shared.DestinationWeaviateAuthentication{
+ DestinationWeaviateNoAuthentication: destinationWeaviateNoAuthentication,
+ }
+ }
+ batchSize := new(int64)
+ if !r.Configuration.Indexing.BatchSize.IsUnknown() && !r.Configuration.Indexing.BatchSize.IsNull() {
+ *batchSize = r.Configuration.Indexing.BatchSize.ValueInt64()
+ } else {
+ batchSize = nil
+ }
+ defaultVectorizer := new(shared.DestinationWeaviateDefaultVectorizer)
+ if !r.Configuration.Indexing.DefaultVectorizer.IsUnknown() && !r.Configuration.Indexing.DefaultVectorizer.IsNull() {
+ *defaultVectorizer = shared.DestinationWeaviateDefaultVectorizer(r.Configuration.Indexing.DefaultVectorizer.ValueString())
+ } else {
+ defaultVectorizer = nil
+ }
+ host := r.Configuration.Indexing.Host.ValueString()
+ textField := new(string)
+ if !r.Configuration.Indexing.TextField.IsUnknown() && !r.Configuration.Indexing.TextField.IsNull() {
+ *textField = r.Configuration.Indexing.TextField.ValueString()
+ } else {
+ textField = nil
+ }
+ indexing := shared.DestinationWeaviateIndexing{
+ AdditionalHeaders: additionalHeaders,
+ Auth: auth,
+ BatchSize: batchSize,
+ DefaultVectorizer: defaultVectorizer,
+ Host: host,
+ TextField: textField,
+ }
+ chunkOverlap := new(int64)
+ if !r.Configuration.Processing.ChunkOverlap.IsUnknown() && !r.Configuration.Processing.ChunkOverlap.IsNull() {
+ *chunkOverlap = r.Configuration.Processing.ChunkOverlap.ValueInt64()
+ } else {
+ chunkOverlap = nil
+ }
+ chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64()
+ var fieldNameMappings []shared.DestinationWeaviateFieldNameMappingConfigModel = nil
+ for _, fieldNameMappingsItem := range r.Configuration.Processing.FieldNameMappings {
+ fromField := fieldNameMappingsItem.FromField.ValueString()
+ toField := fieldNameMappingsItem.ToField.ValueString()
+ fieldNameMappings = append(fieldNameMappings, shared.DestinationWeaviateFieldNameMappingConfigModel{
+ FromField: fromField,
+ ToField: toField,
+ })
+ }
+ var metadataFields []string = nil
+ for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields {
+ metadataFields = append(metadataFields, metadataFieldsItem.ValueString())
+ }
+ var textFields []string = nil
+ for _, textFieldsItem := range r.Configuration.Processing.TextFields {
+ textFields = append(textFields, textFieldsItem.ValueString())
+ }
+ var textSplitter *shared.DestinationWeaviateTextSplitter
+ if r.Configuration.Processing.TextSplitter != nil {
+ var destinationWeaviateBySeparator *shared.DestinationWeaviateBySeparator
+ if r.Configuration.Processing.TextSplitter.BySeparator != nil {
+ keepSeparator := new(bool)
+ if !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsUnknown() && !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsNull() {
+ *keepSeparator = r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.ValueBool()
+ } else {
+ keepSeparator = nil
+ }
+ var separators []string = nil
+ for _, separatorsItem := range r.Configuration.Processing.TextSplitter.BySeparator.Separators {
+ separators = append(separators, separatorsItem.ValueString())
+ }
+ destinationWeaviateBySeparator = &shared.DestinationWeaviateBySeparator{
+ KeepSeparator: keepSeparator,
+ Separators: separators,
+ }
+ }
+ if destinationWeaviateBySeparator != nil {
+ textSplitter = &shared.DestinationWeaviateTextSplitter{
+ DestinationWeaviateBySeparator: destinationWeaviateBySeparator,
+ }
+ }
+ var destinationWeaviateByMarkdownHeader *shared.DestinationWeaviateByMarkdownHeader
+ if r.Configuration.Processing.TextSplitter.ByMarkdownHeader != nil {
+ splitLevel := new(int64)
+ if !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsUnknown() && !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsNull() {
+ *splitLevel = r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.ValueInt64()
+ } else {
+ splitLevel = nil
+ }
+ destinationWeaviateByMarkdownHeader = &shared.DestinationWeaviateByMarkdownHeader{
+ SplitLevel: splitLevel,
+ }
+ }
+ if destinationWeaviateByMarkdownHeader != nil {
+ textSplitter = &shared.DestinationWeaviateTextSplitter{
+ DestinationWeaviateByMarkdownHeader: destinationWeaviateByMarkdownHeader,
+ }
+ }
+ var destinationWeaviateByProgrammingLanguage *shared.DestinationWeaviateByProgrammingLanguage
+ if r.Configuration.Processing.TextSplitter.ByProgrammingLanguage != nil {
+ language := shared.DestinationWeaviateLanguage(r.Configuration.Processing.TextSplitter.ByProgrammingLanguage.Language.ValueString())
+ destinationWeaviateByProgrammingLanguage = &shared.DestinationWeaviateByProgrammingLanguage{
+ Language: language,
+ }
+ }
+ if destinationWeaviateByProgrammingLanguage != nil {
+ textSplitter = &shared.DestinationWeaviateTextSplitter{
+ DestinationWeaviateByProgrammingLanguage: destinationWeaviateByProgrammingLanguage,
+ }
+ }
+ }
+ processing := shared.DestinationWeaviateProcessingConfigModel{
+ ChunkOverlap: chunkOverlap,
+ ChunkSize: chunkSize,
+ FieldNameMappings: fieldNameMappings,
+ MetadataFields: metadataFields,
+ TextFields: textFields,
+ TextSplitter: textSplitter,
+ }
+ configuration := shared.DestinationWeaviate{
+ Embedding: embedding,
+ Indexing: indexing,
+ Processing: processing,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.DestinationWeaviateCreateRequest{
+ Configuration: configuration,
+ DefinitionID: definitionID,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *DestinationWeaviateResourceModel) ToGetSDKType() *shared.DestinationWeaviateCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *DestinationWeaviateResourceModel) ToUpdateSDKType() *shared.DestinationWeaviatePutRequest {
+ var embedding shared.DestinationWeaviateUpdateEmbedding
+ var noExternalEmbedding *shared.NoExternalEmbedding
+ if r.Configuration.Embedding.NoExternalEmbedding != nil {
+ noExternalEmbedding = &shared.NoExternalEmbedding{}
+ }
+ if noExternalEmbedding != nil {
+ embedding = shared.DestinationWeaviateUpdateEmbedding{
+ NoExternalEmbedding: noExternalEmbedding,
+ }
+ }
+ var destinationWeaviateUpdateAzureOpenAI *shared.DestinationWeaviateUpdateAzureOpenAI
+ if r.Configuration.Embedding.AzureOpenAI != nil {
+ apiBase := r.Configuration.Embedding.AzureOpenAI.APIBase.ValueString()
+ deployment := r.Configuration.Embedding.AzureOpenAI.Deployment.ValueString()
+ openaiKey := r.Configuration.Embedding.AzureOpenAI.OpenaiKey.ValueString()
+ destinationWeaviateUpdateAzureOpenAI = &shared.DestinationWeaviateUpdateAzureOpenAI{
+ APIBase: apiBase,
+ Deployment: deployment,
+ OpenaiKey: openaiKey,
+ }
+ }
+ if destinationWeaviateUpdateAzureOpenAI != nil {
+ embedding = shared.DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateAzureOpenAI: destinationWeaviateUpdateAzureOpenAI,
+ }
+ }
+ var destinationWeaviateUpdateOpenAI *shared.DestinationWeaviateUpdateOpenAI
+ if r.Configuration.Embedding.OpenAI != nil {
+ openaiKey1 := r.Configuration.Embedding.OpenAI.OpenaiKey.ValueString()
+ destinationWeaviateUpdateOpenAI = &shared.DestinationWeaviateUpdateOpenAI{
+ OpenaiKey: openaiKey1,
+ }
+ }
+ if destinationWeaviateUpdateOpenAI != nil {
+ embedding = shared.DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateOpenAI: destinationWeaviateUpdateOpenAI,
+ }
+ }
+ var destinationWeaviateUpdateCohere *shared.DestinationWeaviateUpdateCohere
+ if r.Configuration.Embedding.Cohere != nil {
+ cohereKey := r.Configuration.Embedding.Cohere.CohereKey.ValueString()
+ destinationWeaviateUpdateCohere = &shared.DestinationWeaviateUpdateCohere{
+ CohereKey: cohereKey,
+ }
+ }
+ if destinationWeaviateUpdateCohere != nil {
+ embedding = shared.DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateCohere: destinationWeaviateUpdateCohere,
+ }
+ }
+ var destinationWeaviateUpdateFromField *shared.DestinationWeaviateUpdateFromField
+ if r.Configuration.Embedding.FromField != nil {
+ dimensions := r.Configuration.Embedding.FromField.Dimensions.ValueInt64()
+ fieldName := r.Configuration.Embedding.FromField.FieldName.ValueString()
+ destinationWeaviateUpdateFromField = &shared.DestinationWeaviateUpdateFromField{
+ Dimensions: dimensions,
+ FieldName: fieldName,
+ }
+ }
+ if destinationWeaviateUpdateFromField != nil {
+ embedding = shared.DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateFromField: destinationWeaviateUpdateFromField,
+ }
+ }
+ var destinationWeaviateUpdateFake *shared.DestinationWeaviateUpdateFake
+ if r.Configuration.Embedding.Fake != nil {
+ destinationWeaviateUpdateFake = &shared.DestinationWeaviateUpdateFake{}
+ }
+ if destinationWeaviateUpdateFake != nil {
+ embedding = shared.DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateFake: destinationWeaviateUpdateFake,
+ }
+ }
+ var destinationWeaviateUpdateOpenAICompatible *shared.DestinationWeaviateUpdateOpenAICompatible
+ if r.Configuration.Embedding.OpenAICompatible != nil {
+ apiKey := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.APIKey.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.APIKey.IsNull() {
+ *apiKey = r.Configuration.Embedding.OpenAICompatible.APIKey.ValueString()
+ } else {
+ apiKey = nil
+ }
+ baseURL := r.Configuration.Embedding.OpenAICompatible.BaseURL.ValueString()
+ dimensions1 := r.Configuration.Embedding.OpenAICompatible.Dimensions.ValueInt64()
+ modelName := new(string)
+ if !r.Configuration.Embedding.OpenAICompatible.ModelName.IsUnknown() && !r.Configuration.Embedding.OpenAICompatible.ModelName.IsNull() {
+ *modelName = r.Configuration.Embedding.OpenAICompatible.ModelName.ValueString()
+ } else {
+ modelName = nil
+ }
+ destinationWeaviateUpdateOpenAICompatible = &shared.DestinationWeaviateUpdateOpenAICompatible{
+ APIKey: apiKey,
+ BaseURL: baseURL,
+ Dimensions: dimensions1,
+ ModelName: modelName,
+ }
+ }
+ if destinationWeaviateUpdateOpenAICompatible != nil {
+ embedding = shared.DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateOpenAICompatible: destinationWeaviateUpdateOpenAICompatible,
+ }
+ }
+ var additionalHeaders []shared.Header = nil
+ for _, additionalHeadersItem := range r.Configuration.Indexing.AdditionalHeaders {
+ headerKey := additionalHeadersItem.HeaderKey.ValueString()
+ value := additionalHeadersItem.Value.ValueString()
+ additionalHeaders = append(additionalHeaders, shared.Header{
+ HeaderKey: headerKey,
+ Value: value,
+ })
+ }
+ var auth shared.DestinationWeaviateUpdateAuthentication
+ var destinationWeaviateUpdateAPIToken *shared.DestinationWeaviateUpdateAPIToken
+ if r.Configuration.Indexing.Auth.APIToken != nil {
+ token := r.Configuration.Indexing.Auth.APIToken.Token.ValueString()
+ destinationWeaviateUpdateAPIToken = &shared.DestinationWeaviateUpdateAPIToken{
+ Token: token,
+ }
+ }
+ if destinationWeaviateUpdateAPIToken != nil {
+ auth = shared.DestinationWeaviateUpdateAuthentication{
+ DestinationWeaviateUpdateAPIToken: destinationWeaviateUpdateAPIToken,
+ }
+ }
+ var destinationWeaviateUpdateUsernamePassword *shared.DestinationWeaviateUpdateUsernamePassword
+ if r.Configuration.Indexing.Auth.UsernamePassword != nil {
+ password := r.Configuration.Indexing.Auth.UsernamePassword.Password.ValueString()
+ username := r.Configuration.Indexing.Auth.UsernamePassword.Username.ValueString()
+ destinationWeaviateUpdateUsernamePassword = &shared.DestinationWeaviateUpdateUsernamePassword{
+ Password: password,
+ Username: username,
+ }
+ }
+ if destinationWeaviateUpdateUsernamePassword != nil {
+ auth = shared.DestinationWeaviateUpdateAuthentication{
+ DestinationWeaviateUpdateUsernamePassword: destinationWeaviateUpdateUsernamePassword,
+ }
+ }
+ var noAuthentication *shared.NoAuthentication
+ if r.Configuration.Indexing.Auth.NoAuthentication != nil {
+ noAuthentication = &shared.NoAuthentication{}
+ }
+ if noAuthentication != nil {
+ auth = shared.DestinationWeaviateUpdateAuthentication{
+ NoAuthentication: noAuthentication,
+ }
+ }
+ batchSize := new(int64)
+ if !r.Configuration.Indexing.BatchSize.IsUnknown() && !r.Configuration.Indexing.BatchSize.IsNull() {
+ *batchSize = r.Configuration.Indexing.BatchSize.ValueInt64()
+ } else {
+ batchSize = nil
+ }
+ defaultVectorizer := new(shared.DefaultVectorizer)
+ if !r.Configuration.Indexing.DefaultVectorizer.IsUnknown() && !r.Configuration.Indexing.DefaultVectorizer.IsNull() {
+ *defaultVectorizer = shared.DefaultVectorizer(r.Configuration.Indexing.DefaultVectorizer.ValueString())
+ } else {
+ defaultVectorizer = nil
+ }
+ host := r.Configuration.Indexing.Host.ValueString()
+ textField := new(string)
+ if !r.Configuration.Indexing.TextField.IsUnknown() && !r.Configuration.Indexing.TextField.IsNull() {
+ *textField = r.Configuration.Indexing.TextField.ValueString()
+ } else {
+ textField = nil
+ }
+ indexing := shared.DestinationWeaviateUpdateIndexing{
+ AdditionalHeaders: additionalHeaders,
+ Auth: auth,
+ BatchSize: batchSize,
+ DefaultVectorizer: defaultVectorizer,
+ Host: host,
+ TextField: textField,
+ }
+ chunkOverlap := new(int64)
+ if !r.Configuration.Processing.ChunkOverlap.IsUnknown() && !r.Configuration.Processing.ChunkOverlap.IsNull() {
+ *chunkOverlap = r.Configuration.Processing.ChunkOverlap.ValueInt64()
+ } else {
+ chunkOverlap = nil
+ }
+ chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64()
+ var fieldNameMappings []shared.DestinationWeaviateUpdateFieldNameMappingConfigModel = nil
+ for _, fieldNameMappingsItem := range r.Configuration.Processing.FieldNameMappings {
+ fromField := fieldNameMappingsItem.FromField.ValueString()
+ toField := fieldNameMappingsItem.ToField.ValueString()
+ fieldNameMappings = append(fieldNameMappings, shared.DestinationWeaviateUpdateFieldNameMappingConfigModel{
+ FromField: fromField,
+ ToField: toField,
+ })
+ }
+ var metadataFields []string = nil
+ for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields {
+ metadataFields = append(metadataFields, metadataFieldsItem.ValueString())
+ }
+ var textFields []string = nil
+ for _, textFieldsItem := range r.Configuration.Processing.TextFields {
+ textFields = append(textFields, textFieldsItem.ValueString())
+ }
+ var textSplitter *shared.DestinationWeaviateUpdateTextSplitter
+ if r.Configuration.Processing.TextSplitter != nil {
+ var destinationWeaviateUpdateBySeparator *shared.DestinationWeaviateUpdateBySeparator
+ if r.Configuration.Processing.TextSplitter.BySeparator != nil {
+ keepSeparator := new(bool)
+ if !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsUnknown() && !r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.IsNull() {
+ *keepSeparator = r.Configuration.Processing.TextSplitter.BySeparator.KeepSeparator.ValueBool()
+ } else {
+ keepSeparator = nil
+ }
+ var separators []string = nil
+ for _, separatorsItem := range r.Configuration.Processing.TextSplitter.BySeparator.Separators {
+ separators = append(separators, separatorsItem.ValueString())
+ }
+ destinationWeaviateUpdateBySeparator = &shared.DestinationWeaviateUpdateBySeparator{
+ KeepSeparator: keepSeparator,
+ Separators: separators,
+ }
+ }
+ if destinationWeaviateUpdateBySeparator != nil {
+ textSplitter = &shared.DestinationWeaviateUpdateTextSplitter{
+ DestinationWeaviateUpdateBySeparator: destinationWeaviateUpdateBySeparator,
+ }
+ }
+ var destinationWeaviateUpdateByMarkdownHeader *shared.DestinationWeaviateUpdateByMarkdownHeader
+ if r.Configuration.Processing.TextSplitter.ByMarkdownHeader != nil {
+ splitLevel := new(int64)
+ if !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsUnknown() && !r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.IsNull() {
+ *splitLevel = r.Configuration.Processing.TextSplitter.ByMarkdownHeader.SplitLevel.ValueInt64()
+ } else {
+ splitLevel = nil
+ }
+ destinationWeaviateUpdateByMarkdownHeader = &shared.DestinationWeaviateUpdateByMarkdownHeader{
+ SplitLevel: splitLevel,
+ }
+ }
+ if destinationWeaviateUpdateByMarkdownHeader != nil {
+ textSplitter = &shared.DestinationWeaviateUpdateTextSplitter{
+ DestinationWeaviateUpdateByMarkdownHeader: destinationWeaviateUpdateByMarkdownHeader,
+ }
+ }
+ var destinationWeaviateUpdateByProgrammingLanguage *shared.DestinationWeaviateUpdateByProgrammingLanguage
+ if r.Configuration.Processing.TextSplitter.ByProgrammingLanguage != nil {
+ language := shared.DestinationWeaviateUpdateLanguage(r.Configuration.Processing.TextSplitter.ByProgrammingLanguage.Language.ValueString())
+ destinationWeaviateUpdateByProgrammingLanguage = &shared.DestinationWeaviateUpdateByProgrammingLanguage{
+ Language: language,
+ }
+ }
+ if destinationWeaviateUpdateByProgrammingLanguage != nil {
+ textSplitter = &shared.DestinationWeaviateUpdateTextSplitter{
+ DestinationWeaviateUpdateByProgrammingLanguage: destinationWeaviateUpdateByProgrammingLanguage,
+ }
+ }
+ }
+ processing := shared.DestinationWeaviateUpdateProcessingConfigModel{
+ ChunkOverlap: chunkOverlap,
+ ChunkSize: chunkSize,
+ FieldNameMappings: fieldNameMappings,
+ MetadataFields: metadataFields,
+ TextFields: textFields,
+ TextSplitter: textSplitter,
+ }
+ configuration := shared.DestinationWeaviateUpdate{
+ Embedding: embedding,
+ Indexing: indexing,
+ Processing: processing,
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.DestinationWeaviatePutRequest{
+ Configuration: configuration,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *DestinationWeaviateResourceModel) ToDeleteSDKType() *shared.DestinationWeaviateCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *DestinationWeaviateResourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
+ r.Name = types.StringValue(resp.Name)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
+
+func (r *DestinationWeaviateResourceModel) RefreshFromCreateResponse(resp *shared.DestinationResponse) {
+ r.RefreshFromGetResponse(resp)
+}
diff --git a/internal/provider/destination_xata_data_source.go b/internal/provider/destination_xata_data_source.go
old mode 100755
new mode 100644
index 920cf9b80..87d2908b8
--- a/internal/provider/destination_xata_data_source.go
+++ b/internal/provider/destination_xata_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,11 @@ type DestinationXataDataSource struct {
// DestinationXataDataSourceModel describes the data model.
type DestinationXataDataSourceModel struct {
- Configuration DestinationXata `tfsdk:"configuration"`
- DestinationID types.String `tfsdk:"destination_id"`
- Name types.String `tfsdk:"name"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ DestinationID types.String `tfsdk:"destination_id"`
+ DestinationType types.String `tfsdk:"destination_type"`
+ Name types.String `tfsdk:"name"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -48,31 +47,17 @@ func (r *DestinationXataDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "DestinationXata DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key to connect.`,
- },
- "db_url": schema.StringAttribute{
- Computed: true,
- Description: `URL pointing to your workspace.`,
- },
- "destination_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xata",
- ),
- },
- Description: `must be one of ["xata"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the destination.`,
},
"destination_id": schema.StringAttribute{
Required: true,
},
+ "destination_type": schema.StringAttribute{
+ Computed: true,
+ },
"name": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/destination_xata_data_source_sdk.go b/internal/provider/destination_xata_data_source_sdk.go
old mode 100755
new mode 100644
index 770d61943..e9158e2b1
--- a/internal/provider/destination_xata_data_source_sdk.go
+++ b/internal/provider/destination_xata_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationXataDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.DestinationID = types.StringValue(resp.DestinationID)
+ r.DestinationType = types.StringValue(resp.DestinationType)
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/destination_xata_resource.go b/internal/provider/destination_xata_resource.go
old mode 100755
new mode 100644
index 8b6106317..0ee5c852c
--- a/internal/provider/destination_xata_resource.go
+++ b/internal/provider/destination_xata_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type DestinationXataResource struct {
// DestinationXataResourceModel describes the resource data model.
type DestinationXataResourceModel struct {
Configuration DestinationXata `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
DestinationID types.String `tfsdk:"destination_id"`
DestinationType types.String `tfsdk:"destination_type"`
Name types.String `tfsdk:"name"`
@@ -55,23 +55,22 @@ func (r *DestinationXataResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key to connect.`,
},
"db_url": schema.StringAttribute{
Required: true,
Description: `URL pointing to your workspace.`,
},
- "destination_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xata",
- ),
- },
- Description: `must be one of ["xata"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.`,
+ },
"destination_id": schema.StringAttribute{
Computed: true,
PlanModifiers: []planmodifier.String{
@@ -88,7 +87,8 @@ func (r *DestinationXataResource) Schema(ctx context.Context, req resource.Schem
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the destination e.g. dev-mysql-instance.`,
},
"workspace_id": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -138,7 +138,7 @@ func (r *DestinationXataResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Destinations.CreateDestinationXata(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -314,5 +314,5 @@ func (r *DestinationXataResource) Delete(ctx context.Context, req resource.Delet
}
func (r *DestinationXataResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("destination_id"), req.ID)...)
}
diff --git a/internal/provider/destination_xata_resource_sdk.go b/internal/provider/destination_xata_resource_sdk.go
old mode 100755
new mode 100644
index b4ba59ce2..567232a53
--- a/internal/provider/destination_xata_resource_sdk.go
+++ b/internal/provider/destination_xata_resource_sdk.go
@@ -3,23 +3,28 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *DestinationXataResourceModel) ToCreateSDKType() *shared.DestinationXataCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
dbURL := r.Configuration.DbURL.ValueString()
- destinationType := shared.DestinationXataXata(r.Configuration.DestinationType.ValueString())
configuration := shared.DestinationXata{
- APIKey: apiKey,
- DbURL: dbURL,
- DestinationType: destinationType,
+ APIKey: apiKey,
+ DbURL: dbURL,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.DestinationXataCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
WorkspaceID: workspaceID,
}
diff --git a/internal/provider/provider.go b/internal/provider/provider.go
old mode 100755
new mode 100644
index e490090df..362eb58c5
--- a/internal/provider/provider.go
+++ b/internal/provider/provider.go
@@ -3,9 +3,9 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/shared"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/provider"
@@ -110,13 +110,13 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou
NewDestinationAwsDatalakeResource,
NewDestinationAzureBlobStorageResource,
NewDestinationBigqueryResource,
- NewDestinationBigqueryDenormalizedResource,
NewDestinationClickhouseResource,
NewDestinationConvexResource,
NewDestinationCumulioResource,
NewDestinationDatabendResource,
NewDestinationDatabricksResource,
NewDestinationDevNullResource,
+ NewDestinationDuckdbResource,
NewDestinationDynamodbResource,
NewDestinationElasticsearchResource,
NewDestinationFireboltResource,
@@ -134,6 +134,7 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou
NewDestinationPineconeResource,
NewDestinationPostgresResource,
NewDestinationPubsubResource,
+ NewDestinationQdrantResource,
NewDestinationRedisResource,
NewDestinationRedshiftResource,
NewDestinationS3Resource,
@@ -143,6 +144,7 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou
NewDestinationTimeplusResource,
NewDestinationTypesenseResource,
NewDestinationVerticaResource,
+ NewDestinationWeaviateResource,
NewDestinationXataResource,
NewSourceAhaResource,
NewSourceAircallResource,
@@ -160,11 +162,11 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou
NewSourceAzureBlobStorageResource,
NewSourceAzureTableResource,
NewSourceBambooHrResource,
- NewSourceBigcommerceResource,
NewSourceBigqueryResource,
NewSourceBingAdsResource,
NewSourceBraintreeResource,
NewSourceBrazeResource,
+ NewSourceCartResource,
NewSourceChargebeeResource,
NewSourceChartmogulResource,
NewSourceClickhouseResource,
@@ -183,14 +185,13 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou
NewSourceDockerhubResource,
NewSourceDremioResource,
NewSourceDynamodbResource,
- NewSourceE2eTestCloudResource,
NewSourceEmailoctopusResource,
NewSourceExchangeRatesResource,
NewSourceFacebookMarketingResource,
NewSourceFacebookPagesResource,
NewSourceFakerResource,
NewSourceFaunaResource,
- NewSourceFileSecureResource,
+ NewSourceFileResource,
NewSourceFireboltResource,
NewSourceFreshcallerResource,
NewSourceFreshdeskResource,
@@ -204,8 +205,8 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou
NewSourceGnewsResource,
NewSourceGoogleAdsResource,
NewSourceGoogleAnalyticsDataAPIResource,
- NewSourceGoogleAnalyticsV4Resource,
NewSourceGoogleDirectoryResource,
+ NewSourceGoogleDriveResource,
NewSourceGooglePagespeedInsightsResource,
NewSourceGoogleSearchConsoleResource,
NewSourceGoogleSheetsResource,
@@ -243,8 +244,8 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou
NewSourceMicrosoftTeamsResource,
NewSourceMixpanelResource,
NewSourceMondayResource,
- NewSourceMongodbResource,
NewSourceMongodbInternalPocResource,
+ NewSourceMongodbV2Resource,
NewSourceMssqlResource,
NewSourceMyHoursResource,
NewSourceMysqlResource,
@@ -329,13 +330,12 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou
NewSourceWhiskyHunterResource,
NewSourceWikipediaPageviewsResource,
NewSourceWoocommerceResource,
- NewSourceXeroResource,
NewSourceXkcdResource,
NewSourceYandexMetricaResource,
NewSourceYotpoResource,
- NewSourceYouniumResource,
NewSourceYoutubeAnalyticsResource,
NewSourceZendeskChatResource,
+ NewSourceZendeskSellResource,
NewSourceZendeskSunshineResource,
NewSourceZendeskSupportResource,
NewSourceZendeskTalkResource,
@@ -353,13 +353,13 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D
NewDestinationAwsDatalakeDataSource,
NewDestinationAzureBlobStorageDataSource,
NewDestinationBigqueryDataSource,
- NewDestinationBigqueryDenormalizedDataSource,
NewDestinationClickhouseDataSource,
NewDestinationConvexDataSource,
NewDestinationCumulioDataSource,
NewDestinationDatabendDataSource,
NewDestinationDatabricksDataSource,
NewDestinationDevNullDataSource,
+ NewDestinationDuckdbDataSource,
NewDestinationDynamodbDataSource,
NewDestinationElasticsearchDataSource,
NewDestinationFireboltDataSource,
@@ -377,6 +377,7 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D
NewDestinationPineconeDataSource,
NewDestinationPostgresDataSource,
NewDestinationPubsubDataSource,
+ NewDestinationQdrantDataSource,
NewDestinationRedisDataSource,
NewDestinationRedshiftDataSource,
NewDestinationS3DataSource,
@@ -386,6 +387,7 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D
NewDestinationTimeplusDataSource,
NewDestinationTypesenseDataSource,
NewDestinationVerticaDataSource,
+ NewDestinationWeaviateDataSource,
NewDestinationXataDataSource,
NewSourceAhaDataSource,
NewSourceAircallDataSource,
@@ -403,11 +405,11 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D
NewSourceAzureBlobStorageDataSource,
NewSourceAzureTableDataSource,
NewSourceBambooHrDataSource,
- NewSourceBigcommerceDataSource,
NewSourceBigqueryDataSource,
NewSourceBingAdsDataSource,
NewSourceBraintreeDataSource,
NewSourceBrazeDataSource,
+ NewSourceCartDataSource,
NewSourceChargebeeDataSource,
NewSourceChartmogulDataSource,
NewSourceClickhouseDataSource,
@@ -426,14 +428,13 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D
NewSourceDockerhubDataSource,
NewSourceDremioDataSource,
NewSourceDynamodbDataSource,
- NewSourceE2eTestCloudDataSource,
NewSourceEmailoctopusDataSource,
NewSourceExchangeRatesDataSource,
NewSourceFacebookMarketingDataSource,
NewSourceFacebookPagesDataSource,
NewSourceFakerDataSource,
NewSourceFaunaDataSource,
- NewSourceFileSecureDataSource,
+ NewSourceFileDataSource,
NewSourceFireboltDataSource,
NewSourceFreshcallerDataSource,
NewSourceFreshdeskDataSource,
@@ -447,8 +448,8 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D
NewSourceGnewsDataSource,
NewSourceGoogleAdsDataSource,
NewSourceGoogleAnalyticsDataAPIDataSource,
- NewSourceGoogleAnalyticsV4DataSource,
NewSourceGoogleDirectoryDataSource,
+ NewSourceGoogleDriveDataSource,
NewSourceGooglePagespeedInsightsDataSource,
NewSourceGoogleSearchConsoleDataSource,
NewSourceGoogleSheetsDataSource,
@@ -486,8 +487,8 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D
NewSourceMicrosoftTeamsDataSource,
NewSourceMixpanelDataSource,
NewSourceMondayDataSource,
- NewSourceMongodbDataSource,
NewSourceMongodbInternalPocDataSource,
+ NewSourceMongodbV2DataSource,
NewSourceMssqlDataSource,
NewSourceMyHoursDataSource,
NewSourceMysqlDataSource,
@@ -572,13 +573,12 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D
NewSourceWhiskyHunterDataSource,
NewSourceWikipediaPageviewsDataSource,
NewSourceWoocommerceDataSource,
- NewSourceXeroDataSource,
NewSourceXkcdDataSource,
NewSourceYandexMetricaDataSource,
NewSourceYotpoDataSource,
- NewSourceYouniumDataSource,
NewSourceYoutubeAnalyticsDataSource,
NewSourceZendeskChatDataSource,
+ NewSourceZendeskSellDataSource,
NewSourceZendeskSunshineDataSource,
NewSourceZendeskSupportDataSource,
NewSourceZendeskTalkDataSource,
diff --git a/internal/provider/reflect/diags.go b/internal/provider/reflect/diags.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/doc.go b/internal/provider/reflect/doc.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/generic_attr_value.go b/internal/provider/reflect/generic_attr_value.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/helpers.go b/internal/provider/reflect/helpers.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/interfaces.go b/internal/provider/reflect/interfaces.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/into.go b/internal/provider/reflect/into.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/map.go b/internal/provider/reflect/map.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/number.go b/internal/provider/reflect/number.go
old mode 100755
new mode 100644
index a0e412605..37eef2164
--- a/internal/provider/reflect/number.go
+++ b/internal/provider/reflect/number.go
@@ -206,7 +206,7 @@ func Number(ctx context.Context, typ attr.Type, val tftypes.Value, target reflec
} else if acc == big.Below {
if floatResult == math.Inf(-1) || floatResult == -math.MaxFloat64 {
floatResult = -math.MaxFloat64
- } else if floatResult == -0.0 || floatResult == -math.SmallestNonzeroFloat64 { //nolint:staticcheck
+ } else if floatResult == 0.0 || floatResult == -math.SmallestNonzeroFloat64 {
floatResult = math.SmallestNonzeroFloat64
} else {
err := fmt.Errorf("not sure how to round %s and %f", acc, floatResult)
diff --git a/internal/provider/reflect/options.go b/internal/provider/reflect/options.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/outof.go b/internal/provider/reflect/outof.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/pointer.go b/internal/provider/reflect/pointer.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/primitive.go b/internal/provider/reflect/primitive.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/slice.go b/internal/provider/reflect/slice.go
old mode 100755
new mode 100644
diff --git a/internal/provider/reflect/struct.go b/internal/provider/reflect/struct.go
old mode 100755
new mode 100644
diff --git a/internal/provider/source_aha_data_source.go b/internal/provider/source_aha_data_source.go
old mode 100755
new mode 100644
index e324094d1..8ac6ffae9
--- a/internal/provider/source_aha_data_source.go
+++ b/internal/provider/source_aha_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceAhaDataSource struct {
// SourceAhaDataSourceModel describes the data model.
type SourceAhaDataSourceModel struct {
- Configuration SourceAha `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,38 +47,20 @@ func (r *SourceAhaDataSource) Schema(ctx context.Context, req datasource.SchemaR
MarkdownDescription: "SourceAha DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aha",
- ),
- },
- Description: `must be one of ["aha"]`,
- },
- "url": schema.StringAttribute{
- Computed: true,
- Description: `URL`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_aha_data_source_sdk.go b/internal/provider/source_aha_data_source_sdk.go
old mode 100755
new mode 100644
index a0afd30dd..41dce27af
--- a/internal/provider/source_aha_data_source_sdk.go
+++ b/internal/provider/source_aha_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAhaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_aha_resource.go b/internal/provider/source_aha_resource.go
old mode 100755
new mode 100644
index 16d0702ef..b163e02ef
--- a/internal/provider/source_aha_resource.go
+++ b/internal/provider/source_aha_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceAhaResource struct {
// SourceAhaResourceModel describes the resource data model.
type SourceAhaResourceModel struct {
Configuration SourceAha `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,33 @@ func (r *SourceAhaResource) Schema(ctx context.Context, req resource.SchemaReque
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aha",
- ),
- },
- Description: `must be one of ["aha"]`,
- },
"url": schema.StringAttribute{
Required: true,
Description: `URL`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceAhaResource) Create(ctx context.Context, req resource.CreateReque
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAha(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceAhaResource) Delete(ctx context.Context, req resource.DeleteReque
}
func (r *SourceAhaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_aha_resource_sdk.go b/internal/provider/source_aha_resource_sdk.go
old mode 100755
new mode 100644
index 3cc18e001..bdf6cc74d
--- a/internal/provider/source_aha_resource_sdk.go
+++ b/internal/provider/source_aha_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAhaResourceModel) ToCreateSDKType() *shared.SourceAhaCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceAhaAha(r.Configuration.SourceType.ValueString())
url := r.Configuration.URL.ValueString()
configuration := shared.SourceAha{
- APIKey: apiKey,
- SourceType: sourceType,
- URL: url,
+ APIKey: apiKey,
+ URL: url,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceAhaResourceModel) ToCreateSDKType() *shared.SourceAhaCreateReques
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAhaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_aircall_data_source.go b/internal/provider/source_aircall_data_source.go
old mode 100755
new mode 100644
index 3bc929f99..90f1f09a5
--- a/internal/provider/source_aircall_data_source.go
+++ b/internal/provider/source_aircall_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceAircallDataSource struct {
// SourceAircallDataSourceModel describes the data model.
type SourceAircallDataSourceModel struct {
- Configuration SourceAircall `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,45 +47,20 @@ func (r *SourceAircallDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceAircall DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_id": schema.StringAttribute{
- Computed: true,
- Description: `App ID found at settings https://dashboard.aircall.io/integrations/api-keys`,
- },
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aircall",
- ),
- },
- Description: `must be one of ["aircall"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `Date time filter for incremental filter, Specify which date to extract from.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_aircall_data_source_sdk.go b/internal/provider/source_aircall_data_source_sdk.go
old mode 100755
new mode 100644
index 0d042a9e6..4e951648a
--- a/internal/provider/source_aircall_data_source_sdk.go
+++ b/internal/provider/source_aircall_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAircallDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_aircall_resource.go b/internal/provider/source_aircall_resource.go
old mode 100755
new mode 100644
index 152eea3bb..cf1fde261
--- a/internal/provider/source_aircall_resource.go
+++ b/internal/provider/source_aircall_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceAircallResource struct {
// SourceAircallResourceModel describes the resource data model.
type SourceAircallResourceModel struct {
Configuration SourceAircall `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -61,33 +62,36 @@ func (r *SourceAircallResource) Schema(ctx context.Context, req resource.SchemaR
},
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aircall",
- ),
- },
- Description: `must be one of ["aircall"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `Date time filter for incremental filter, Specify which date to extract from.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `Date time filter for incremental filter, Specify which date to extract from.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -151,7 +155,7 @@ func (r *SourceAircallResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAircall(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -327,5 +331,5 @@ func (r *SourceAircallResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceAircallResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_aircall_resource_sdk.go b/internal/provider/source_aircall_resource_sdk.go
old mode 100755
new mode 100644
index dcb79fc85..4dca9328e
--- a/internal/provider/source_aircall_resource_sdk.go
+++ b/internal/provider/source_aircall_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -11,13 +11,17 @@ import (
func (r *SourceAircallResourceModel) ToCreateSDKType() *shared.SourceAircallCreateRequest {
apiID := r.Configuration.APIID.ValueString()
apiToken := r.Configuration.APIToken.ValueString()
- sourceType := shared.SourceAircallAircall(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceAircall{
- APIID: apiID,
- APIToken: apiToken,
- SourceType: sourceType,
- StartDate: startDate,
+ APIID: apiID,
+ APIToken: apiToken,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -29,6 +33,7 @@ func (r *SourceAircallResourceModel) ToCreateSDKType() *shared.SourceAircallCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAircallCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_airtable_data_source.go b/internal/provider/source_airtable_data_source.go
old mode 100755
new mode 100644
index 30b1b2f16..4c0cdd1bb
--- a/internal/provider/source_airtable_data_source.go
+++ b/internal/provider/source_airtable_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceAirtableDataSource struct {
// SourceAirtableDataSourceModel describes the data model.
type SourceAirtableDataSourceModel struct {
- Configuration SourceAirtable `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,148 +47,20 @@ func (r *SourceAirtableDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceAirtable DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_airtable_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The client ID of the Airtable developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret the Airtable developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_airtable_authentication_personal_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_key",
- ),
- },
- Description: `must be one of ["api_key"]`,
- },
- },
- },
- "source_airtable_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The client ID of the Airtable developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret the Airtable developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_airtable_update_authentication_personal_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_key",
- ),
- },
- Description: `must be one of ["api_key"]`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "airtable",
- ),
- },
- Description: `must be one of ["airtable"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_airtable_data_source_sdk.go b/internal/provider/source_airtable_data_source_sdk.go
old mode 100755
new mode 100644
index d4c74fe8f..689f0ed27
--- a/internal/provider/source_airtable_data_source_sdk.go
+++ b/internal/provider/source_airtable_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAirtableDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_airtable_resource.go b/internal/provider/source_airtable_resource.go
old mode 100755
new mode 100644
index a2e41acd2..b7164046d
--- a/internal/provider/source_airtable_resource.go
+++ b/internal/provider/source_airtable_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceAirtableResource struct {
// SourceAirtableResourceModel describes the resource data model.
type SourceAirtableResourceModel struct {
Configuration SourceAirtable `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,22 +59,14 @@ func (r *SourceAirtableResource) Schema(ctx context.Context, req resource.Schema
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_airtable_authentication_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The client ID of the Airtable developer application.`,
@@ -84,88 +77,27 @@ func (r *SourceAirtableResource) Schema(ctx context.Context, req resource.Schema
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The key to refresh the expired access token.`,
},
"token_expiry_date": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_airtable_authentication_personal_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Required: true,
- Description: `The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.`,
- },
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_key",
- ),
- },
- Description: `must be one of ["api_key"]`,
- },
- },
- },
- "source_airtable_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
Optional: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The client ID of the Airtable developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The client secret the Airtable developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The key to refresh the expired access token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Optional: true,
+ Sensitive: true,
+ Description: `The date-time when the access token should be refreshed.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date-time when the access token should be refreshed.`,
},
},
},
- "source_airtable_update_authentication_personal_access_token": schema.SingleNestedAttribute{
+ "personal_access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.`,
},
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_key",
- ),
- },
- Description: `must be one of ["api_key"]`,
- },
},
},
},
@@ -173,24 +105,26 @@ func (r *SourceAirtableResource) Schema(ctx context.Context, req resource.Schema
validators.ExactlyOneChild(),
},
},
- "source_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "airtable",
- ),
- },
- Description: `must be one of ["airtable"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -254,7 +188,7 @@ func (r *SourceAirtableResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAirtable(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -430,5 +364,5 @@ func (r *SourceAirtableResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceAirtableResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_airtable_resource_sdk.go b/internal/provider/source_airtable_resource_sdk.go
old mode 100755
new mode 100644
index 9b330f4e3..69c090d46
--- a/internal/provider/source_airtable_resource_sdk.go
+++ b/internal/provider/source_airtable_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -11,72 +11,57 @@ import (
func (r *SourceAirtableResourceModel) ToCreateSDKType() *shared.SourceAirtableCreateRequest {
var credentials *shared.SourceAirtableAuthentication
if r.Configuration.Credentials != nil {
- var sourceAirtableAuthenticationOAuth20 *shared.SourceAirtableAuthenticationOAuth20
- if r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20 != nil {
+ var sourceAirtableOAuth20 *shared.SourceAirtableOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.AccessToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
} else {
accessToken = nil
}
- authMethod := new(shared.SourceAirtableAuthenticationOAuth20AuthMethod)
- if !r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.AuthMethod.IsNull() {
- *authMethod = shared.SourceAirtableAuthenticationOAuth20AuthMethod(r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.AuthMethod.ValueString())
- } else {
- authMethod = nil
- }
- clientID := r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.RefreshToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
tokenExpiryDate := new(time.Time)
- if !r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.TokenExpiryDate.IsUnknown() && !r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.TokenExpiryDate.IsNull() {
- *tokenExpiryDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceAirtableAuthenticationOAuth20.TokenExpiryDate.ValueString())
+ if !r.Configuration.Credentials.OAuth20.TokenExpiryDate.IsUnknown() && !r.Configuration.Credentials.OAuth20.TokenExpiryDate.IsNull() {
+ *tokenExpiryDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
} else {
tokenExpiryDate = nil
}
- sourceAirtableAuthenticationOAuth20 = &shared.SourceAirtableAuthenticationOAuth20{
+ sourceAirtableOAuth20 = &shared.SourceAirtableOAuth20{
AccessToken: accessToken,
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceAirtableAuthenticationOAuth20 != nil {
+ if sourceAirtableOAuth20 != nil {
credentials = &shared.SourceAirtableAuthentication{
- SourceAirtableAuthenticationOAuth20: sourceAirtableAuthenticationOAuth20,
+ SourceAirtableOAuth20: sourceAirtableOAuth20,
}
}
- var sourceAirtableAuthenticationPersonalAccessToken *shared.SourceAirtableAuthenticationPersonalAccessToken
- if r.Configuration.Credentials.SourceAirtableAuthenticationPersonalAccessToken != nil {
- apiKey := r.Configuration.Credentials.SourceAirtableAuthenticationPersonalAccessToken.APIKey.ValueString()
- authMethod1 := new(shared.SourceAirtableAuthenticationPersonalAccessTokenAuthMethod)
- if !r.Configuration.Credentials.SourceAirtableAuthenticationPersonalAccessToken.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceAirtableAuthenticationPersonalAccessToken.AuthMethod.IsNull() {
- *authMethod1 = shared.SourceAirtableAuthenticationPersonalAccessTokenAuthMethod(r.Configuration.Credentials.SourceAirtableAuthenticationPersonalAccessToken.AuthMethod.ValueString())
- } else {
- authMethod1 = nil
- }
- sourceAirtableAuthenticationPersonalAccessToken = &shared.SourceAirtableAuthenticationPersonalAccessToken{
- APIKey: apiKey,
- AuthMethod: authMethod1,
+ var sourceAirtablePersonalAccessToken *shared.SourceAirtablePersonalAccessToken
+ if r.Configuration.Credentials.PersonalAccessToken != nil {
+ apiKey := r.Configuration.Credentials.PersonalAccessToken.APIKey.ValueString()
+ sourceAirtablePersonalAccessToken = &shared.SourceAirtablePersonalAccessToken{
+ APIKey: apiKey,
}
}
- if sourceAirtableAuthenticationPersonalAccessToken != nil {
+ if sourceAirtablePersonalAccessToken != nil {
credentials = &shared.SourceAirtableAuthentication{
- SourceAirtableAuthenticationPersonalAccessToken: sourceAirtableAuthenticationPersonalAccessToken,
+ SourceAirtablePersonalAccessToken: sourceAirtablePersonalAccessToken,
}
}
}
- sourceType := new(shared.SourceAirtableAirtable)
- if !r.Configuration.SourceType.IsUnknown() && !r.Configuration.SourceType.IsNull() {
- *sourceType = shared.SourceAirtableAirtable(r.Configuration.SourceType.ValueString())
- } else {
- sourceType = nil
- }
configuration := shared.SourceAirtable{
Credentials: credentials,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -88,6 +73,7 @@ func (r *SourceAirtableResourceModel) ToCreateSDKType() *shared.SourceAirtableCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAirtableCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -103,60 +89,46 @@ func (r *SourceAirtableResourceModel) ToGetSDKType() *shared.SourceAirtableCreat
func (r *SourceAirtableResourceModel) ToUpdateSDKType() *shared.SourceAirtablePutRequest {
var credentials *shared.SourceAirtableUpdateAuthentication
if r.Configuration.Credentials != nil {
- var sourceAirtableUpdateAuthenticationOAuth20 *shared.SourceAirtableUpdateAuthenticationOAuth20
- if r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20 != nil {
+ var sourceAirtableUpdateOAuth20 *shared.SourceAirtableUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.AccessToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
} else {
accessToken = nil
}
- authMethod := new(shared.SourceAirtableUpdateAuthenticationOAuth20AuthMethod)
- if !r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.AuthMethod.IsNull() {
- *authMethod = shared.SourceAirtableUpdateAuthenticationOAuth20AuthMethod(r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.AuthMethod.ValueString())
- } else {
- authMethod = nil
- }
- clientID := r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.RefreshToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
tokenExpiryDate := new(time.Time)
- if !r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.TokenExpiryDate.IsUnknown() && !r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.TokenExpiryDate.IsNull() {
- *tokenExpiryDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceAirtableUpdateAuthenticationOAuth20.TokenExpiryDate.ValueString())
+ if !r.Configuration.Credentials.OAuth20.TokenExpiryDate.IsUnknown() && !r.Configuration.Credentials.OAuth20.TokenExpiryDate.IsNull() {
+ *tokenExpiryDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
} else {
tokenExpiryDate = nil
}
- sourceAirtableUpdateAuthenticationOAuth20 = &shared.SourceAirtableUpdateAuthenticationOAuth20{
+ sourceAirtableUpdateOAuth20 = &shared.SourceAirtableUpdateOAuth20{
AccessToken: accessToken,
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceAirtableUpdateAuthenticationOAuth20 != nil {
+ if sourceAirtableUpdateOAuth20 != nil {
credentials = &shared.SourceAirtableUpdateAuthentication{
- SourceAirtableUpdateAuthenticationOAuth20: sourceAirtableUpdateAuthenticationOAuth20,
+ SourceAirtableUpdateOAuth20: sourceAirtableUpdateOAuth20,
}
}
- var sourceAirtableUpdateAuthenticationPersonalAccessToken *shared.SourceAirtableUpdateAuthenticationPersonalAccessToken
- if r.Configuration.Credentials.SourceAirtableUpdateAuthenticationPersonalAccessToken != nil {
- apiKey := r.Configuration.Credentials.SourceAirtableUpdateAuthenticationPersonalAccessToken.APIKey.ValueString()
- authMethod1 := new(shared.SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod)
- if !r.Configuration.Credentials.SourceAirtableUpdateAuthenticationPersonalAccessToken.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceAirtableUpdateAuthenticationPersonalAccessToken.AuthMethod.IsNull() {
- *authMethod1 = shared.SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod(r.Configuration.Credentials.SourceAirtableUpdateAuthenticationPersonalAccessToken.AuthMethod.ValueString())
- } else {
- authMethod1 = nil
- }
- sourceAirtableUpdateAuthenticationPersonalAccessToken = &shared.SourceAirtableUpdateAuthenticationPersonalAccessToken{
- APIKey: apiKey,
- AuthMethod: authMethod1,
+ var personalAccessToken *shared.PersonalAccessToken
+ if r.Configuration.Credentials.PersonalAccessToken != nil {
+ apiKey := r.Configuration.Credentials.PersonalAccessToken.APIKey.ValueString()
+ personalAccessToken = &shared.PersonalAccessToken{
+ APIKey: apiKey,
}
}
- if sourceAirtableUpdateAuthenticationPersonalAccessToken != nil {
+ if personalAccessToken != nil {
credentials = &shared.SourceAirtableUpdateAuthentication{
- SourceAirtableUpdateAuthenticationPersonalAccessToken: sourceAirtableUpdateAuthenticationPersonalAccessToken,
+ PersonalAccessToken: personalAccessToken,
}
}
}
diff --git a/internal/provider/source_alloydb_data_source.go b/internal/provider/source_alloydb_data_source.go
old mode 100755
new mode 100644
index 997b18a71..a8695bb73
--- a/internal/provider/source_alloydb_data_source.go
+++ b/internal/provider/source_alloydb_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceAlloydbDataSource struct {
// SourceAlloydbDataSourceModel describes the data model.
type SourceAlloydbDataSourceModel struct {
- Configuration SourceAlloydb1 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,753 +47,20 @@ func (r *SourceAlloydbDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceAlloydb DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of the database.`,
- },
- "replication_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_alloydb_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "initial_waiting_seconds": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "lsn_commit_behaviour": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "While reading Data",
- "After loading Data in the destination",
- ),
- },
- MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]` + "\n" +
- `Determines when Airbtye should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "plugin": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pgoutput",
- ),
- },
- MarkdownDescription: `must be one of ["pgoutput"]` + "\n" +
- `A logical decoding plugin installed on the PostgreSQL server.`,
- },
- "publication": schema.StringAttribute{
- Computed: true,
- Description: `A Postgres publication used for consuming changes. Read about publications and replication identities.`,
- },
- "queue_size": schema.Int64Attribute{
- Computed: true,
- Description: `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
- },
- "replication_slot": schema.StringAttribute{
- Computed: true,
- Description: `A plugin logical replication slot. Read about replication slots.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.`,
- },
- "source_alloydb_replication_method_standard": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`,
- },
- "source_alloydb_replication_method_standard_xmin": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Xmin",
- ),
- },
- Description: `must be one of ["Xmin"]`,
- },
- },
- Description: `Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.`,
- },
- "source_alloydb_update_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "initial_waiting_seconds": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "lsn_commit_behaviour": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "While reading Data",
- "After loading Data in the destination",
- ),
- },
- MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]` + "\n" +
- `Determines when Airbtye should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "plugin": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pgoutput",
- ),
- },
- MarkdownDescription: `must be one of ["pgoutput"]` + "\n" +
- `A logical decoding plugin installed on the PostgreSQL server.`,
- },
- "publication": schema.StringAttribute{
- Computed: true,
- Description: `A Postgres publication used for consuming changes. Read about publications and replication identities.`,
- },
- "queue_size": schema.Int64Attribute{
- Computed: true,
- Description: `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
- },
- "replication_slot": schema.StringAttribute{
- Computed: true,
- Description: `A plugin logical replication slot. Read about replication slots.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.`,
- },
- "source_alloydb_update_replication_method_standard": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`,
- },
- "source_alloydb_update_replication_method_standard_xmin": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Xmin",
- ),
- },
- Description: `must be one of ["Xmin"]`,
- },
- },
- Description: `Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Replication method for extracting data from the database.`,
- },
- "schemas": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The list of schemas (case sensitive) to sync from. Defaults to public.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "alloydb",
- ),
- },
- Description: `must be one of ["alloydb"]`,
- },
- "ssl_mode": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_alloydb_ssl_modes_allow": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Enables encryption only when required by the source database.`,
- },
- "source_alloydb_ssl_modes_disable": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Disables encryption of communication between Airbyte and source database.`,
- },
- "source_alloydb_ssl_modes_prefer": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Allows unencrypted connection only if the source database does not support encryption.`,
- },
- "source_alloydb_ssl_modes_require": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption. If the source database server does not support encryption, connection will fail.`,
- },
- "source_alloydb_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
- },
- "source_alloydb_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `This is the most secure mode. Always require encryption and verifies the identity of the source database server.`,
- },
- "source_alloydb_update_ssl_modes_allow": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Enables encryption only when required by the source database.`,
- },
- "source_alloydb_update_ssl_modes_disable": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Disables encryption of communication between Airbyte and source database.`,
- },
- "source_alloydb_update_ssl_modes_prefer": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Allows unencrypted connection only if the source database does not support encryption.`,
- },
- "source_alloydb_update_ssl_modes_require": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption. If the source database server does not support encryption, connection will fail.`,
- },
- "source_alloydb_update_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
- },
- "source_alloydb_update_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `This is the most secure mode. Always require encryption and verifies the identity of the source database server.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- MarkdownDescription: `SSL connection modes. ` + "\n" +
- ` Read more in the docs.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_alloydb_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_alloydb_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_alloydb_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_alloydb_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_alloydb_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_alloydb_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_alloydb_data_source_sdk.go b/internal/provider/source_alloydb_data_source_sdk.go
old mode 100755
new mode 100644
index ca57f454f..9b0b11993
--- a/internal/provider/source_alloydb_data_source_sdk.go
+++ b/internal/provider/source_alloydb_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAlloydbDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_alloydb_resource.go b/internal/provider/source_alloydb_resource.go
old mode 100755
new mode 100644
index 03c8ab9de..da470a61d
--- a/internal/provider/source_alloydb_resource.go
+++ b/internal/provider/source_alloydb_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceAlloydbResource struct {
// SourceAlloydbResourceModel describes the resource data model.
type SourceAlloydbResourceModel struct {
Configuration SourceAlloydb `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -69,345 +71,155 @@ func (r *SourceAlloydbResource) Schema(ctx context.Context, req resource.SchemaR
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `Port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 5432` + "\n" +
+ `Port of the database.`,
},
"replication_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_alloydb_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{
+ "logical_replication_cdc": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "initial_waiting_seconds": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "lsn_commit_behaviour": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "While reading Data",
- "After loading Data in the destination",
- ),
- },
- MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]` + "\n" +
- `Determines when Airbtye should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "plugin": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pgoutput",
- ),
- },
- MarkdownDescription: `must be one of ["pgoutput"]` + "\n" +
- `A logical decoding plugin installed on the PostgreSQL server.`,
- },
- "publication": schema.StringAttribute{
- Required: true,
- Description: `A Postgres publication used for consuming changes. Read about publications and replication identities.`,
- },
- "queue_size": schema.Int64Attribute{
- Optional: true,
- Description: `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
- },
- "replication_slot": schema.StringAttribute{
- Required: true,
- Description: `A plugin logical replication slot. Read about replication slots.`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
+ Optional: true,
Description: `Parsed as JSON.`,
- },
- },
- Description: `Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.`,
- },
- "source_alloydb_replication_method_standard": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`,
- },
- "source_alloydb_replication_method_standard_xmin": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Xmin",
- ),
+ validators.IsValidJSON(),
},
- Description: `must be one of ["Xmin"]`,
},
- },
- Description: `Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.`,
- },
- "source_alloydb_update_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"initial_waiting_seconds": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
+ Optional: true,
+ MarkdownDescription: `Default: 300` + "\n" +
+ `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
},
"lsn_commit_behaviour": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]; Default: "After loading Data in the destination"` + "\n" +
+ `Determines when Airbtye should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
Validators: []validator.String{
stringvalidator.OneOf(
"While reading Data",
"After loading Data in the destination",
),
},
- MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]` + "\n" +
- `Determines when Airbtye should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
},
"plugin": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["pgoutput"]; Default: "pgoutput"` + "\n" +
+ `A logical decoding plugin installed on the PostgreSQL server.`,
Validators: []validator.String{
stringvalidator.OneOf(
"pgoutput",
),
},
- MarkdownDescription: `must be one of ["pgoutput"]` + "\n" +
- `A logical decoding plugin installed on the PostgreSQL server.`,
},
"publication": schema.StringAttribute{
Required: true,
Description: `A Postgres publication used for consuming changes. Read about publications and replication identities.`,
},
"queue_size": schema.Int64Attribute{
- Optional: true,
- Description: `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
+ Optional: true,
+ MarkdownDescription: `Default: 10000` + "\n" +
+ `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
},
"replication_slot": schema.StringAttribute{
Required: true,
Description: `A plugin logical replication slot. Read about replication slots.`,
},
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.`,
},
- "source_alloydb_update_replication_method_standard": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
+ "standard": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`,
},
- "source_alloydb_update_replication_method_standard_xmin": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Xmin",
- ),
- },
- Description: `must be one of ["Xmin"]`,
- },
- },
+ "standard_xmin": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.`,
},
},
+ Description: `Replication method for extracting data from the database.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Replication method for extracting data from the database.`,
},
"schemas": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
Description: `The list of schemas (case sensitive) to sync from. Defaults to public.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "alloydb",
- ),
- },
- Description: `must be one of ["alloydb"]`,
- },
"ssl_mode": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_alloydb_ssl_modes_allow": schema.SingleNestedAttribute{
+ "allow": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
},
Description: `Enables encryption only when required by the source database.`,
},
- "source_alloydb_ssl_modes_disable": schema.SingleNestedAttribute{
+ "disable": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
},
Description: `Disables encryption of communication between Airbyte and source database.`,
},
- "source_alloydb_ssl_modes_prefer": schema.SingleNestedAttribute{
+ "prefer": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
},
Description: `Allows unencrypted connection only if the source database does not support encryption.`,
},
- "source_alloydb_ssl_modes_require": schema.SingleNestedAttribute{
+ "require": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
},
Description: `Always require encryption. If the source database server does not support encryption, connection will fail.`,
},
- "source_alloydb_ssl_modes_verify_ca": schema.SingleNestedAttribute{
+ "verify_ca": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Required: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Optional: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Optional: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Optional: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
- },
- Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
- },
- "source_alloydb_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"ca_certificate": schema.StringAttribute{
Required: true,
Description: `CA certificate`,
@@ -418,160 +230,27 @@ func (r *SourceAlloydbResource) Schema(ctx context.Context, req resource.SchemaR
},
"client_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Client key`,
},
"client_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
},
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `This is the most secure mode. Always require encryption and verifies the identity of the source database server.`,
- },
- "source_alloydb_update_ssl_modes_allow": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Enables encryption only when required by the source database.`,
- },
- "source_alloydb_update_ssl_modes_disable": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
- Description: `Disables encryption of communication between Airbyte and source database.`,
- },
- "source_alloydb_update_ssl_modes_prefer": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Allows unencrypted connection only if the source database does not support encryption.`,
+ Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
},
- "source_alloydb_update_ssl_modes_require": schema.SingleNestedAttribute{
+ "verify_full": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption. If the source database server does not support encryption, connection will fail.`,
- },
- "source_alloydb_update_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Required: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Optional: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Optional: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
Optional: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
- },
- Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
- },
- "source_alloydb_update_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"ca_certificate": schema.StringAttribute{
Required: true,
Description: `CA certificate`,
@@ -582,157 +261,43 @@ func (r *SourceAlloydbResource) Schema(ctx context.Context, req resource.SchemaR
},
"client_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Client key`,
},
"client_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
},
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `This is the most secure mode. Always require encryption and verifies the identity of the source database server.`,
},
},
+ MarkdownDescription: `SSL connection modes. ` + "\n" +
+ ` Read more in the docs.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- MarkdownDescription: `SSL connection modes. ` + "\n" +
- ` Read more in the docs.`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_alloydb_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_alloydb_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_alloydb_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_alloydb_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_alloydb_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -740,35 +305,28 @@ func (r *SourceAlloydbResource) Schema(ctx context.Context, req resource.SchemaR
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_alloydb_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -778,10 +336,10 @@ func (r *SourceAlloydbResource) Schema(ctx context.Context, req resource.SchemaR
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -789,13 +347,24 @@ func (r *SourceAlloydbResource) Schema(ctx context.Context, req resource.SchemaR
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -859,7 +428,7 @@ func (r *SourceAlloydbResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAlloydb(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -1035,5 +604,5 @@ func (r *SourceAlloydbResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceAlloydbResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_alloydb_resource_sdk.go b/internal/provider/source_alloydb_resource_sdk.go
old mode 100755
new mode 100644
index edf46d75c..1ee82e4ec
--- a/internal/provider/source_alloydb_resource_sdk.go
+++ b/internal/provider/source_alloydb_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -23,80 +23,77 @@ func (r *SourceAlloydbResourceModel) ToCreateSDKType() *shared.SourceAlloydbCrea
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var replicationMethod *shared.SourceAlloydbReplicationMethod
if r.Configuration.ReplicationMethod != nil {
- var sourceAlloydbReplicationMethodStandardXmin *shared.SourceAlloydbReplicationMethodStandardXmin
- if r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodStandardXmin != nil {
- method := shared.SourceAlloydbReplicationMethodStandardXminMethod(r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodStandardXmin.Method.ValueString())
- sourceAlloydbReplicationMethodStandardXmin = &shared.SourceAlloydbReplicationMethodStandardXmin{
- Method: method,
- }
+ var sourceAlloydbStandardXmin *shared.SourceAlloydbStandardXmin
+ if r.Configuration.ReplicationMethod.StandardXmin != nil {
+ sourceAlloydbStandardXmin = &shared.SourceAlloydbStandardXmin{}
}
- if sourceAlloydbReplicationMethodStandardXmin != nil {
+ if sourceAlloydbStandardXmin != nil {
replicationMethod = &shared.SourceAlloydbReplicationMethod{
- SourceAlloydbReplicationMethodStandardXmin: sourceAlloydbReplicationMethodStandardXmin,
+ SourceAlloydbStandardXmin: sourceAlloydbStandardXmin,
}
}
- var sourceAlloydbReplicationMethodLogicalReplicationCDC *shared.SourceAlloydbReplicationMethodLogicalReplicationCDC
- if r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC != nil {
+ var sourceAlloydbLogicalReplicationCDC *shared.SourceAlloydbLogicalReplicationCDC
+ if r.Configuration.ReplicationMethod.LogicalReplicationCDC != nil {
+ var additionalProperties interface{}
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.LogicalReplicationCDC.AdditionalProperties.ValueString()), &additionalProperties)
+ }
initialWaitingSeconds := new(int64)
- if !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsNull() {
- *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.ValueInt64()
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.ReplicationMethod.LogicalReplicationCDC.InitialWaitingSeconds.ValueInt64()
} else {
initialWaitingSeconds = nil
}
- lsnCommitBehaviour := new(shared.SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour)
- if !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.IsNull() {
- *lsnCommitBehaviour = shared.SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour(r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.ValueString())
+ lsnCommitBehaviour := new(shared.SourceAlloydbLSNCommitBehaviour)
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.LsnCommitBehaviour.IsNull() {
+ *lsnCommitBehaviour = shared.SourceAlloydbLSNCommitBehaviour(r.Configuration.ReplicationMethod.LogicalReplicationCDC.LsnCommitBehaviour.ValueString())
} else {
lsnCommitBehaviour = nil
}
- method1 := shared.SourceAlloydbReplicationMethodLogicalReplicationCDCMethod(r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.Method.ValueString())
- plugin := new(shared.SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin)
- if !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.Plugin.IsNull() {
- *plugin = shared.SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin(r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.Plugin.ValueString())
+ plugin := new(shared.SourceAlloydbPlugin)
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.Plugin.IsNull() {
+ *plugin = shared.SourceAlloydbPlugin(r.Configuration.ReplicationMethod.LogicalReplicationCDC.Plugin.ValueString())
} else {
plugin = nil
}
- publication := r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.Publication.ValueString()
+ publication := r.Configuration.ReplicationMethod.LogicalReplicationCDC.Publication.ValueString()
queueSize := new(int64)
- if !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.QueueSize.IsNull() {
- *queueSize = r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.QueueSize.ValueInt64()
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.QueueSize.IsNull() {
+ *queueSize = r.Configuration.ReplicationMethod.LogicalReplicationCDC.QueueSize.ValueInt64()
} else {
queueSize = nil
}
- replicationSlot := r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.ReplicationSlot.ValueString()
- var additionalProperties interface{}
- if !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodLogicalReplicationCDC.AdditionalProperties.ValueString()), &additionalProperties)
- }
- sourceAlloydbReplicationMethodLogicalReplicationCDC = &shared.SourceAlloydbReplicationMethodLogicalReplicationCDC{
+ replicationSlot := r.Configuration.ReplicationMethod.LogicalReplicationCDC.ReplicationSlot.ValueString()
+ sourceAlloydbLogicalReplicationCDC = &shared.SourceAlloydbLogicalReplicationCDC{
+ AdditionalProperties: additionalProperties,
InitialWaitingSeconds: initialWaitingSeconds,
LsnCommitBehaviour: lsnCommitBehaviour,
- Method: method1,
Plugin: plugin,
Publication: publication,
QueueSize: queueSize,
ReplicationSlot: replicationSlot,
- AdditionalProperties: additionalProperties,
}
}
- if sourceAlloydbReplicationMethodLogicalReplicationCDC != nil {
+ if sourceAlloydbLogicalReplicationCDC != nil {
replicationMethod = &shared.SourceAlloydbReplicationMethod{
- SourceAlloydbReplicationMethodLogicalReplicationCDC: sourceAlloydbReplicationMethodLogicalReplicationCDC,
+ SourceAlloydbLogicalReplicationCDC: sourceAlloydbLogicalReplicationCDC,
}
}
- var sourceAlloydbReplicationMethodStandard *shared.SourceAlloydbReplicationMethodStandard
- if r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodStandard != nil {
- method2 := shared.SourceAlloydbReplicationMethodStandardMethod(r.Configuration.ReplicationMethod.SourceAlloydbReplicationMethodStandard.Method.ValueString())
- sourceAlloydbReplicationMethodStandard = &shared.SourceAlloydbReplicationMethodStandard{
- Method: method2,
- }
+ var sourceAlloydbStandard *shared.SourceAlloydbStandard
+ if r.Configuration.ReplicationMethod.Standard != nil {
+ sourceAlloydbStandard = &shared.SourceAlloydbStandard{}
}
- if sourceAlloydbReplicationMethodStandard != nil {
+ if sourceAlloydbStandard != nil {
replicationMethod = &shared.SourceAlloydbReplicationMethod{
- SourceAlloydbReplicationMethodStandard: sourceAlloydbReplicationMethodStandard,
+ SourceAlloydbStandard: sourceAlloydbStandard,
}
}
}
@@ -104,210 +101,200 @@ func (r *SourceAlloydbResourceModel) ToCreateSDKType() *shared.SourceAlloydbCrea
for _, schemasItem := range r.Configuration.Schemas {
schemas = append(schemas, schemasItem.ValueString())
}
- sourceType := shared.SourceAlloydbAlloydb(r.Configuration.SourceType.ValueString())
var sslMode *shared.SourceAlloydbSSLModes
if r.Configuration.SslMode != nil {
- var sourceAlloydbSSLModesDisable *shared.SourceAlloydbSSLModesDisable
- if r.Configuration.SslMode.SourceAlloydbSSLModesDisable != nil {
- mode := shared.SourceAlloydbSSLModesDisableMode(r.Configuration.SslMode.SourceAlloydbSSLModesDisable.Mode.ValueString())
+ var sourceAlloydbDisable *shared.SourceAlloydbDisable
+ if r.Configuration.SslMode.Disable != nil {
var additionalProperties1 interface{}
- if !r.Configuration.SslMode.SourceAlloydbSSLModesDisable.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesDisable.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbSSLModesDisable.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.SslMode.Disable.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Disable.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Disable.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourceAlloydbSSLModesDisable = &shared.SourceAlloydbSSLModesDisable{
- Mode: mode,
+ sourceAlloydbDisable = &shared.SourceAlloydbDisable{
AdditionalProperties: additionalProperties1,
}
}
- if sourceAlloydbSSLModesDisable != nil {
+ if sourceAlloydbDisable != nil {
sslMode = &shared.SourceAlloydbSSLModes{
- SourceAlloydbSSLModesDisable: sourceAlloydbSSLModesDisable,
+ SourceAlloydbDisable: sourceAlloydbDisable,
}
}
- var sourceAlloydbSSLModesAllow *shared.SourceAlloydbSSLModesAllow
- if r.Configuration.SslMode.SourceAlloydbSSLModesAllow != nil {
- mode1 := shared.SourceAlloydbSSLModesAllowMode(r.Configuration.SslMode.SourceAlloydbSSLModesAllow.Mode.ValueString())
+ var sourceAlloydbAllow *shared.SourceAlloydbAllow
+ if r.Configuration.SslMode.Allow != nil {
var additionalProperties2 interface{}
- if !r.Configuration.SslMode.SourceAlloydbSSLModesAllow.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesAllow.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbSSLModesAllow.AdditionalProperties.ValueString()), &additionalProperties2)
+ if !r.Configuration.SslMode.Allow.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Allow.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Allow.AdditionalProperties.ValueString()), &additionalProperties2)
}
- sourceAlloydbSSLModesAllow = &shared.SourceAlloydbSSLModesAllow{
- Mode: mode1,
+ sourceAlloydbAllow = &shared.SourceAlloydbAllow{
AdditionalProperties: additionalProperties2,
}
}
- if sourceAlloydbSSLModesAllow != nil {
+ if sourceAlloydbAllow != nil {
sslMode = &shared.SourceAlloydbSSLModes{
- SourceAlloydbSSLModesAllow: sourceAlloydbSSLModesAllow,
+ SourceAlloydbAllow: sourceAlloydbAllow,
}
}
- var sourceAlloydbSSLModesPrefer *shared.SourceAlloydbSSLModesPrefer
- if r.Configuration.SslMode.SourceAlloydbSSLModesPrefer != nil {
- mode2 := shared.SourceAlloydbSSLModesPreferMode(r.Configuration.SslMode.SourceAlloydbSSLModesPrefer.Mode.ValueString())
+ var sourceAlloydbPrefer *shared.SourceAlloydbPrefer
+ if r.Configuration.SslMode.Prefer != nil {
var additionalProperties3 interface{}
- if !r.Configuration.SslMode.SourceAlloydbSSLModesPrefer.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesPrefer.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbSSLModesPrefer.AdditionalProperties.ValueString()), &additionalProperties3)
+ if !r.Configuration.SslMode.Prefer.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Prefer.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Prefer.AdditionalProperties.ValueString()), &additionalProperties3)
}
- sourceAlloydbSSLModesPrefer = &shared.SourceAlloydbSSLModesPrefer{
- Mode: mode2,
+ sourceAlloydbPrefer = &shared.SourceAlloydbPrefer{
AdditionalProperties: additionalProperties3,
}
}
- if sourceAlloydbSSLModesPrefer != nil {
+ if sourceAlloydbPrefer != nil {
sslMode = &shared.SourceAlloydbSSLModes{
- SourceAlloydbSSLModesPrefer: sourceAlloydbSSLModesPrefer,
+ SourceAlloydbPrefer: sourceAlloydbPrefer,
}
}
- var sourceAlloydbSSLModesRequire *shared.SourceAlloydbSSLModesRequire
- if r.Configuration.SslMode.SourceAlloydbSSLModesRequire != nil {
- mode3 := shared.SourceAlloydbSSLModesRequireMode(r.Configuration.SslMode.SourceAlloydbSSLModesRequire.Mode.ValueString())
+ var sourceAlloydbRequire *shared.SourceAlloydbRequire
+ if r.Configuration.SslMode.Require != nil {
var additionalProperties4 interface{}
- if !r.Configuration.SslMode.SourceAlloydbSSLModesRequire.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesRequire.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbSSLModesRequire.AdditionalProperties.ValueString()), &additionalProperties4)
+ if !r.Configuration.SslMode.Require.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Require.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Require.AdditionalProperties.ValueString()), &additionalProperties4)
}
- sourceAlloydbSSLModesRequire = &shared.SourceAlloydbSSLModesRequire{
- Mode: mode3,
+ sourceAlloydbRequire = &shared.SourceAlloydbRequire{
AdditionalProperties: additionalProperties4,
}
}
- if sourceAlloydbSSLModesRequire != nil {
+ if sourceAlloydbRequire != nil {
sslMode = &shared.SourceAlloydbSSLModes{
- SourceAlloydbSSLModesRequire: sourceAlloydbSSLModesRequire,
+ SourceAlloydbRequire: sourceAlloydbRequire,
}
}
- var sourceAlloydbSSLModesVerifyCa *shared.SourceAlloydbSSLModesVerifyCa
- if r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa != nil {
- caCertificate := r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.CaCertificate.ValueString()
+ var sourceAlloydbVerifyCa *shared.SourceAlloydbVerifyCa
+ if r.Configuration.SslMode.VerifyCa != nil {
+ var additionalProperties5 interface{}
+ if !r.Configuration.SslMode.VerifyCa.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.VerifyCa.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.VerifyCa.AdditionalProperties.ValueString()), &additionalProperties5)
+ }
+ caCertificate := r.Configuration.SslMode.VerifyCa.CaCertificate.ValueString()
clientCertificate := new(string)
- if !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.ClientCertificate.IsNull() {
- *clientCertificate = r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientCertificate.IsNull() {
+ *clientCertificate = r.Configuration.SslMode.VerifyCa.ClientCertificate.ValueString()
} else {
clientCertificate = nil
}
clientKey := new(string)
- if !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.ClientKey.IsNull() {
- *clientKey = r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKey.IsNull() {
+ *clientKey = r.Configuration.SslMode.VerifyCa.ClientKey.ValueString()
} else {
clientKey = nil
}
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyCa.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode4 := shared.SourceAlloydbSSLModesVerifyCaMode(r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.Mode.ValueString())
- var additionalProperties5 interface{}
- if !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbSSLModesVerifyCa.AdditionalProperties.ValueString()), &additionalProperties5)
- }
- sourceAlloydbSSLModesVerifyCa = &shared.SourceAlloydbSSLModesVerifyCa{
+ sourceAlloydbVerifyCa = &shared.SourceAlloydbVerifyCa{
+ AdditionalProperties: additionalProperties5,
CaCertificate: caCertificate,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword,
- Mode: mode4,
- AdditionalProperties: additionalProperties5,
}
}
- if sourceAlloydbSSLModesVerifyCa != nil {
+ if sourceAlloydbVerifyCa != nil {
sslMode = &shared.SourceAlloydbSSLModes{
- SourceAlloydbSSLModesVerifyCa: sourceAlloydbSSLModesVerifyCa,
+ SourceAlloydbVerifyCa: sourceAlloydbVerifyCa,
}
}
- var sourceAlloydbSSLModesVerifyFull *shared.SourceAlloydbSSLModesVerifyFull
- if r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull != nil {
- caCertificate1 := r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.CaCertificate.ValueString()
+ var sourceAlloydbVerifyFull *shared.SourceAlloydbVerifyFull
+ if r.Configuration.SslMode.VerifyFull != nil {
+ var additionalProperties6 interface{}
+ if !r.Configuration.SslMode.VerifyFull.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.VerifyFull.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.VerifyFull.AdditionalProperties.ValueString()), &additionalProperties6)
+ }
+ caCertificate1 := r.Configuration.SslMode.VerifyFull.CaCertificate.ValueString()
clientCertificate1 := new(string)
- if !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.ClientCertificate.IsNull() {
- *clientCertificate1 = r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientCertificate.IsNull() {
+ *clientCertificate1 = r.Configuration.SslMode.VerifyFull.ClientCertificate.ValueString()
} else {
clientCertificate1 = nil
}
clientKey1 := new(string)
- if !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.ClientKey.IsNull() {
- *clientKey1 = r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKey.IsNull() {
+ *clientKey1 = r.Configuration.SslMode.VerifyFull.ClientKey.ValueString()
} else {
clientKey1 = nil
}
clientKeyPassword1 := new(string)
- if !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.ClientKeyPassword.IsNull() {
- *clientKeyPassword1 = r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsNull() {
+ *clientKeyPassword1 = r.Configuration.SslMode.VerifyFull.ClientKeyPassword.ValueString()
} else {
clientKeyPassword1 = nil
}
- mode5 := shared.SourceAlloydbSSLModesVerifyFullMode(r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.Mode.ValueString())
- var additionalProperties6 interface{}
- if !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbSSLModesVerifyFull.AdditionalProperties.ValueString()), &additionalProperties6)
- }
- sourceAlloydbSSLModesVerifyFull = &shared.SourceAlloydbSSLModesVerifyFull{
+ sourceAlloydbVerifyFull = &shared.SourceAlloydbVerifyFull{
+ AdditionalProperties: additionalProperties6,
CaCertificate: caCertificate1,
ClientCertificate: clientCertificate1,
ClientKey: clientKey1,
ClientKeyPassword: clientKeyPassword1,
- Mode: mode5,
- AdditionalProperties: additionalProperties6,
}
}
- if sourceAlloydbSSLModesVerifyFull != nil {
+ if sourceAlloydbVerifyFull != nil {
sslMode = &shared.SourceAlloydbSSLModes{
- SourceAlloydbSSLModesVerifyFull: sourceAlloydbSSLModesVerifyFull,
+ SourceAlloydbVerifyFull: sourceAlloydbVerifyFull,
}
}
}
var tunnelMethod *shared.SourceAlloydbSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceAlloydbSSHTunnelMethodNoTunnel *shared.SourceAlloydbSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceAlloydbSSHTunnelMethodNoTunnel = &shared.SourceAlloydbSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceAlloydbNoTunnel *shared.SourceAlloydbNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceAlloydbNoTunnel = &shared.SourceAlloydbNoTunnel{}
}
- if sourceAlloydbSSHTunnelMethodNoTunnel != nil {
+ if sourceAlloydbNoTunnel != nil {
tunnelMethod = &shared.SourceAlloydbSSHTunnelMethod{
- SourceAlloydbSSHTunnelMethodNoTunnel: sourceAlloydbSSHTunnelMethodNoTunnel,
+ SourceAlloydbNoTunnel: sourceAlloydbNoTunnel,
}
}
- var sourceAlloydbSSHTunnelMethodSSHKeyAuthentication *shared.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceAlloydbSSHTunnelMethodSSHKeyAuthentication = &shared.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceAlloydbSSHKeyAuthentication *shared.SourceAlloydbSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceAlloydbSSHKeyAuthentication = &shared.SourceAlloydbSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceAlloydbSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceAlloydbSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceAlloydbSSHTunnelMethod{
- SourceAlloydbSSHTunnelMethodSSHKeyAuthentication: sourceAlloydbSSHTunnelMethodSSHKeyAuthentication,
+ SourceAlloydbSSHKeyAuthentication: sourceAlloydbSSHKeyAuthentication,
}
}
- var sourceAlloydbSSHTunnelMethodPasswordAuthentication *shared.SourceAlloydbSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceAlloydbSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceAlloydbSSHTunnelMethodPasswordAuthentication = &shared.SourceAlloydbSSHTunnelMethodPasswordAuthentication{
+ var sourceAlloydbPasswordAuthentication *shared.SourceAlloydbPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceAlloydbPasswordAuthentication = &shared.SourceAlloydbPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceAlloydbSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceAlloydbPasswordAuthentication != nil {
tunnelMethod = &shared.SourceAlloydbSSHTunnelMethod{
- SourceAlloydbSSHTunnelMethodPasswordAuthentication: sourceAlloydbSSHTunnelMethodPasswordAuthentication,
+ SourceAlloydbPasswordAuthentication: sourceAlloydbPasswordAuthentication,
}
}
}
@@ -320,11 +307,16 @@ func (r *SourceAlloydbResourceModel) ToCreateSDKType() *shared.SourceAlloydbCrea
Port: port,
ReplicationMethod: replicationMethod,
Schemas: schemas,
- SourceType: sourceType,
SslMode: sslMode,
TunnelMethod: tunnelMethod,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -335,6 +327,7 @@ func (r *SourceAlloydbResourceModel) ToCreateSDKType() *shared.SourceAlloydbCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAlloydbCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -362,80 +355,77 @@ func (r *SourceAlloydbResourceModel) ToUpdateSDKType() *shared.SourceAlloydbPutR
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
- var replicationMethod *shared.SourceAlloydbUpdateReplicationMethod
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ var replicationMethod *shared.ReplicationMethod
if r.Configuration.ReplicationMethod != nil {
- var sourceAlloydbUpdateReplicationMethodStandardXmin *shared.SourceAlloydbUpdateReplicationMethodStandardXmin
- if r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodStandardXmin != nil {
- method := shared.SourceAlloydbUpdateReplicationMethodStandardXminMethod(r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodStandardXmin.Method.ValueString())
- sourceAlloydbUpdateReplicationMethodStandardXmin = &shared.SourceAlloydbUpdateReplicationMethodStandardXmin{
- Method: method,
- }
+ var standardXmin *shared.StandardXmin
+ if r.Configuration.ReplicationMethod.StandardXmin != nil {
+ standardXmin = &shared.StandardXmin{}
}
- if sourceAlloydbUpdateReplicationMethodStandardXmin != nil {
- replicationMethod = &shared.SourceAlloydbUpdateReplicationMethod{
- SourceAlloydbUpdateReplicationMethodStandardXmin: sourceAlloydbUpdateReplicationMethodStandardXmin,
+ if standardXmin != nil {
+ replicationMethod = &shared.ReplicationMethod{
+ StandardXmin: standardXmin,
}
}
- var sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC *shared.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC
- if r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC != nil {
+ var logicalReplicationCDC *shared.LogicalReplicationCDC
+ if r.Configuration.ReplicationMethod.LogicalReplicationCDC != nil {
+ var additionalProperties interface{}
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.LogicalReplicationCDC.AdditionalProperties.ValueString()), &additionalProperties)
+ }
initialWaitingSeconds := new(int64)
- if !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsNull() {
- *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.ValueInt64()
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.ReplicationMethod.LogicalReplicationCDC.InitialWaitingSeconds.ValueInt64()
} else {
initialWaitingSeconds = nil
}
- lsnCommitBehaviour := new(shared.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour)
- if !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.IsNull() {
- *lsnCommitBehaviour = shared.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour(r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.ValueString())
+ lsnCommitBehaviour := new(shared.LSNCommitBehaviour)
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.LsnCommitBehaviour.IsNull() {
+ *lsnCommitBehaviour = shared.LSNCommitBehaviour(r.Configuration.ReplicationMethod.LogicalReplicationCDC.LsnCommitBehaviour.ValueString())
} else {
lsnCommitBehaviour = nil
}
- method1 := shared.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethod(r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.Method.ValueString())
- plugin := new(shared.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin)
- if !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.Plugin.IsNull() {
- *plugin = shared.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin(r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.Plugin.ValueString())
+ plugin := new(shared.Plugin)
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.Plugin.IsNull() {
+ *plugin = shared.Plugin(r.Configuration.ReplicationMethod.LogicalReplicationCDC.Plugin.ValueString())
} else {
plugin = nil
}
- publication := r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.Publication.ValueString()
+ publication := r.Configuration.ReplicationMethod.LogicalReplicationCDC.Publication.ValueString()
queueSize := new(int64)
- if !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.QueueSize.IsNull() {
- *queueSize = r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.QueueSize.ValueInt64()
+ if !r.Configuration.ReplicationMethod.LogicalReplicationCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.LogicalReplicationCDC.QueueSize.IsNull() {
+ *queueSize = r.Configuration.ReplicationMethod.LogicalReplicationCDC.QueueSize.ValueInt64()
} else {
queueSize = nil
}
- replicationSlot := r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.ReplicationSlot.ValueString()
- var additionalProperties interface{}
- if !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC.AdditionalProperties.ValueString()), &additionalProperties)
- }
- sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC = &shared.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC{
+ replicationSlot := r.Configuration.ReplicationMethod.LogicalReplicationCDC.ReplicationSlot.ValueString()
+ logicalReplicationCDC = &shared.LogicalReplicationCDC{
+ AdditionalProperties: additionalProperties,
InitialWaitingSeconds: initialWaitingSeconds,
LsnCommitBehaviour: lsnCommitBehaviour,
- Method: method1,
Plugin: plugin,
Publication: publication,
QueueSize: queueSize,
ReplicationSlot: replicationSlot,
- AdditionalProperties: additionalProperties,
}
}
- if sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC != nil {
- replicationMethod = &shared.SourceAlloydbUpdateReplicationMethod{
- SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC: sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC,
+ if logicalReplicationCDC != nil {
+ replicationMethod = &shared.ReplicationMethod{
+ LogicalReplicationCDC: logicalReplicationCDC,
}
}
- var sourceAlloydbUpdateReplicationMethodStandard *shared.SourceAlloydbUpdateReplicationMethodStandard
- if r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodStandard != nil {
- method2 := shared.SourceAlloydbUpdateReplicationMethodStandardMethod(r.Configuration.ReplicationMethod.SourceAlloydbUpdateReplicationMethodStandard.Method.ValueString())
- sourceAlloydbUpdateReplicationMethodStandard = &shared.SourceAlloydbUpdateReplicationMethodStandard{
- Method: method2,
- }
+ var sourceAlloydbUpdateStandard *shared.SourceAlloydbUpdateStandard
+ if r.Configuration.ReplicationMethod.Standard != nil {
+ sourceAlloydbUpdateStandard = &shared.SourceAlloydbUpdateStandard{}
}
- if sourceAlloydbUpdateReplicationMethodStandard != nil {
- replicationMethod = &shared.SourceAlloydbUpdateReplicationMethod{
- SourceAlloydbUpdateReplicationMethodStandard: sourceAlloydbUpdateReplicationMethodStandard,
+ if sourceAlloydbUpdateStandard != nil {
+ replicationMethod = &shared.ReplicationMethod{
+ SourceAlloydbUpdateStandard: sourceAlloydbUpdateStandard,
}
}
}
@@ -445,207 +435,198 @@ func (r *SourceAlloydbResourceModel) ToUpdateSDKType() *shared.SourceAlloydbPutR
}
var sslMode *shared.SourceAlloydbUpdateSSLModes
if r.Configuration.SslMode != nil {
- var sourceAlloydbUpdateSSLModesDisable *shared.SourceAlloydbUpdateSSLModesDisable
- if r.Configuration.SslMode.SourceAlloydbUpdateSSLModesDisable != nil {
- mode := shared.SourceAlloydbUpdateSSLModesDisableMode(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesDisable.Mode.ValueString())
+ var sourceAlloydbUpdateDisable *shared.SourceAlloydbUpdateDisable
+ if r.Configuration.SslMode.Disable != nil {
var additionalProperties1 interface{}
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesDisable.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesDisable.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesDisable.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.SslMode.Disable.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Disable.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Disable.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourceAlloydbUpdateSSLModesDisable = &shared.SourceAlloydbUpdateSSLModesDisable{
- Mode: mode,
+ sourceAlloydbUpdateDisable = &shared.SourceAlloydbUpdateDisable{
AdditionalProperties: additionalProperties1,
}
}
- if sourceAlloydbUpdateSSLModesDisable != nil {
+ if sourceAlloydbUpdateDisable != nil {
sslMode = &shared.SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesDisable: sourceAlloydbUpdateSSLModesDisable,
+ SourceAlloydbUpdateDisable: sourceAlloydbUpdateDisable,
}
}
- var sourceAlloydbUpdateSSLModesAllow *shared.SourceAlloydbUpdateSSLModesAllow
- if r.Configuration.SslMode.SourceAlloydbUpdateSSLModesAllow != nil {
- mode1 := shared.SourceAlloydbUpdateSSLModesAllowMode(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesAllow.Mode.ValueString())
+ var sourceAlloydbUpdateAllow *shared.SourceAlloydbUpdateAllow
+ if r.Configuration.SslMode.Allow != nil {
var additionalProperties2 interface{}
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesAllow.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesAllow.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesAllow.AdditionalProperties.ValueString()), &additionalProperties2)
+ if !r.Configuration.SslMode.Allow.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Allow.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Allow.AdditionalProperties.ValueString()), &additionalProperties2)
}
- sourceAlloydbUpdateSSLModesAllow = &shared.SourceAlloydbUpdateSSLModesAllow{
- Mode: mode1,
+ sourceAlloydbUpdateAllow = &shared.SourceAlloydbUpdateAllow{
AdditionalProperties: additionalProperties2,
}
}
- if sourceAlloydbUpdateSSLModesAllow != nil {
+ if sourceAlloydbUpdateAllow != nil {
sslMode = &shared.SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesAllow: sourceAlloydbUpdateSSLModesAllow,
+ SourceAlloydbUpdateAllow: sourceAlloydbUpdateAllow,
}
}
- var sourceAlloydbUpdateSSLModesPrefer *shared.SourceAlloydbUpdateSSLModesPrefer
- if r.Configuration.SslMode.SourceAlloydbUpdateSSLModesPrefer != nil {
- mode2 := shared.SourceAlloydbUpdateSSLModesPreferMode(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesPrefer.Mode.ValueString())
+ var sourceAlloydbUpdatePrefer *shared.SourceAlloydbUpdatePrefer
+ if r.Configuration.SslMode.Prefer != nil {
var additionalProperties3 interface{}
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesPrefer.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesPrefer.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesPrefer.AdditionalProperties.ValueString()), &additionalProperties3)
+ if !r.Configuration.SslMode.Prefer.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Prefer.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Prefer.AdditionalProperties.ValueString()), &additionalProperties3)
}
- sourceAlloydbUpdateSSLModesPrefer = &shared.SourceAlloydbUpdateSSLModesPrefer{
- Mode: mode2,
+ sourceAlloydbUpdatePrefer = &shared.SourceAlloydbUpdatePrefer{
AdditionalProperties: additionalProperties3,
}
}
- if sourceAlloydbUpdateSSLModesPrefer != nil {
+ if sourceAlloydbUpdatePrefer != nil {
sslMode = &shared.SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesPrefer: sourceAlloydbUpdateSSLModesPrefer,
+ SourceAlloydbUpdatePrefer: sourceAlloydbUpdatePrefer,
}
}
- var sourceAlloydbUpdateSSLModesRequire *shared.SourceAlloydbUpdateSSLModesRequire
- if r.Configuration.SslMode.SourceAlloydbUpdateSSLModesRequire != nil {
- mode3 := shared.SourceAlloydbUpdateSSLModesRequireMode(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesRequire.Mode.ValueString())
+ var sourceAlloydbUpdateRequire *shared.SourceAlloydbUpdateRequire
+ if r.Configuration.SslMode.Require != nil {
var additionalProperties4 interface{}
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesRequire.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesRequire.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesRequire.AdditionalProperties.ValueString()), &additionalProperties4)
+ if !r.Configuration.SslMode.Require.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Require.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Require.AdditionalProperties.ValueString()), &additionalProperties4)
}
- sourceAlloydbUpdateSSLModesRequire = &shared.SourceAlloydbUpdateSSLModesRequire{
- Mode: mode3,
+ sourceAlloydbUpdateRequire = &shared.SourceAlloydbUpdateRequire{
AdditionalProperties: additionalProperties4,
}
}
- if sourceAlloydbUpdateSSLModesRequire != nil {
+ if sourceAlloydbUpdateRequire != nil {
sslMode = &shared.SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesRequire: sourceAlloydbUpdateSSLModesRequire,
+ SourceAlloydbUpdateRequire: sourceAlloydbUpdateRequire,
}
}
- var sourceAlloydbUpdateSSLModesVerifyCa *shared.SourceAlloydbUpdateSSLModesVerifyCa
- if r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa != nil {
- caCertificate := r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.CaCertificate.ValueString()
+ var sourceAlloydbUpdateVerifyCa *shared.SourceAlloydbUpdateVerifyCa
+ if r.Configuration.SslMode.VerifyCa != nil {
+ var additionalProperties5 interface{}
+ if !r.Configuration.SslMode.VerifyCa.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.VerifyCa.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.VerifyCa.AdditionalProperties.ValueString()), &additionalProperties5)
+ }
+ caCertificate := r.Configuration.SslMode.VerifyCa.CaCertificate.ValueString()
clientCertificate := new(string)
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.ClientCertificate.IsNull() {
- *clientCertificate = r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientCertificate.IsNull() {
+ *clientCertificate = r.Configuration.SslMode.VerifyCa.ClientCertificate.ValueString()
} else {
clientCertificate = nil
}
clientKey := new(string)
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.ClientKey.IsNull() {
- *clientKey = r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKey.IsNull() {
+ *clientKey = r.Configuration.SslMode.VerifyCa.ClientKey.ValueString()
} else {
clientKey = nil
}
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyCa.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode4 := shared.SourceAlloydbUpdateSSLModesVerifyCaMode(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.Mode.ValueString())
- var additionalProperties5 interface{}
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyCa.AdditionalProperties.ValueString()), &additionalProperties5)
- }
- sourceAlloydbUpdateSSLModesVerifyCa = &shared.SourceAlloydbUpdateSSLModesVerifyCa{
+ sourceAlloydbUpdateVerifyCa = &shared.SourceAlloydbUpdateVerifyCa{
+ AdditionalProperties: additionalProperties5,
CaCertificate: caCertificate,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword,
- Mode: mode4,
- AdditionalProperties: additionalProperties5,
}
}
- if sourceAlloydbUpdateSSLModesVerifyCa != nil {
+ if sourceAlloydbUpdateVerifyCa != nil {
sslMode = &shared.SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesVerifyCa: sourceAlloydbUpdateSSLModesVerifyCa,
+ SourceAlloydbUpdateVerifyCa: sourceAlloydbUpdateVerifyCa,
}
}
- var sourceAlloydbUpdateSSLModesVerifyFull *shared.SourceAlloydbUpdateSSLModesVerifyFull
- if r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull != nil {
- caCertificate1 := r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.CaCertificate.ValueString()
+ var sourceAlloydbUpdateVerifyFull *shared.SourceAlloydbUpdateVerifyFull
+ if r.Configuration.SslMode.VerifyFull != nil {
+ var additionalProperties6 interface{}
+ if !r.Configuration.SslMode.VerifyFull.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.VerifyFull.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.VerifyFull.AdditionalProperties.ValueString()), &additionalProperties6)
+ }
+ caCertificate1 := r.Configuration.SslMode.VerifyFull.CaCertificate.ValueString()
clientCertificate1 := new(string)
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.ClientCertificate.IsNull() {
- *clientCertificate1 = r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientCertificate.IsNull() {
+ *clientCertificate1 = r.Configuration.SslMode.VerifyFull.ClientCertificate.ValueString()
} else {
clientCertificate1 = nil
}
clientKey1 := new(string)
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.ClientKey.IsNull() {
- *clientKey1 = r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKey.IsNull() {
+ *clientKey1 = r.Configuration.SslMode.VerifyFull.ClientKey.ValueString()
} else {
clientKey1 = nil
}
clientKeyPassword1 := new(string)
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.ClientKeyPassword.IsNull() {
- *clientKeyPassword1 = r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsNull() {
+ *clientKeyPassword1 = r.Configuration.SslMode.VerifyFull.ClientKeyPassword.ValueString()
} else {
clientKeyPassword1 = nil
}
- mode5 := shared.SourceAlloydbUpdateSSLModesVerifyFullMode(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.Mode.ValueString())
- var additionalProperties6 interface{}
- if !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourceAlloydbUpdateSSLModesVerifyFull.AdditionalProperties.ValueString()), &additionalProperties6)
- }
- sourceAlloydbUpdateSSLModesVerifyFull = &shared.SourceAlloydbUpdateSSLModesVerifyFull{
+ sourceAlloydbUpdateVerifyFull = &shared.SourceAlloydbUpdateVerifyFull{
+ AdditionalProperties: additionalProperties6,
CaCertificate: caCertificate1,
ClientCertificate: clientCertificate1,
ClientKey: clientKey1,
ClientKeyPassword: clientKeyPassword1,
- Mode: mode5,
- AdditionalProperties: additionalProperties6,
}
}
- if sourceAlloydbUpdateSSLModesVerifyFull != nil {
+ if sourceAlloydbUpdateVerifyFull != nil {
sslMode = &shared.SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesVerifyFull: sourceAlloydbUpdateSSLModesVerifyFull,
+ SourceAlloydbUpdateVerifyFull: sourceAlloydbUpdateVerifyFull,
}
}
}
var tunnelMethod *shared.SourceAlloydbUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceAlloydbUpdateSSHTunnelMethodNoTunnel *shared.SourceAlloydbUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceAlloydbUpdateSSHTunnelMethodNoTunnel = &shared.SourceAlloydbUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceAlloydbUpdateNoTunnel *shared.SourceAlloydbUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceAlloydbUpdateNoTunnel = &shared.SourceAlloydbUpdateNoTunnel{}
}
- if sourceAlloydbUpdateSSHTunnelMethodNoTunnel != nil {
+ if sourceAlloydbUpdateNoTunnel != nil {
tunnelMethod = &shared.SourceAlloydbUpdateSSHTunnelMethod{
- SourceAlloydbUpdateSSHTunnelMethodNoTunnel: sourceAlloydbUpdateSSHTunnelMethodNoTunnel,
+ SourceAlloydbUpdateNoTunnel: sourceAlloydbUpdateNoTunnel,
}
}
- var sourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication *shared.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceAlloydbUpdateSSHKeyAuthentication *shared.SourceAlloydbUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceAlloydbUpdateSSHKeyAuthentication = &shared.SourceAlloydbUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceAlloydbUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceAlloydbUpdateSSHTunnelMethod{
- SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication: sourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication,
+ SourceAlloydbUpdateSSHKeyAuthentication: sourceAlloydbUpdateSSHKeyAuthentication,
}
}
- var sourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication *shared.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication = &shared.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication{
+ var sourceAlloydbUpdatePasswordAuthentication *shared.SourceAlloydbUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceAlloydbUpdatePasswordAuthentication = &shared.SourceAlloydbUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceAlloydbUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.SourceAlloydbUpdateSSHTunnelMethod{
- SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication: sourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication,
+ SourceAlloydbUpdatePasswordAuthentication: sourceAlloydbUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/source_amazonads_data_source.go b/internal/provider/source_amazonads_data_source.go
old mode 100755
new mode 100644
index 7642e27f6..bd6cd49e2
--- a/internal/provider/source_amazonads_data_source.go
+++ b/internal/provider/source_amazonads_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceAmazonAdsDataSource struct {
// SourceAmazonAdsDataSourceModel describes the data model.
type SourceAmazonAdsDataSourceModel struct {
- Configuration SourceAmazonAds `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,91 +47,20 @@ func (r *SourceAmazonAdsDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceAmazonAds DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The client ID of your Amazon Ads developer application. See the docs for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret of your Amazon Ads developer application. See the docs for more information.`,
- },
- "look_back_window": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of days to go back in time to get the updated data from Amazon Ads`,
- },
- "marketplace_ids": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.`,
- },
- "profiles": schema.ListAttribute{
- Computed: true,
- ElementType: types.Int64Type,
- Description: `Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Amazon Ads refresh token. See the docs for more information on how to obtain this token.`,
- },
- "region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NA",
- "EU",
- "FE",
- ),
- },
- MarkdownDescription: `must be one of ["NA", "EU", "FE"]` + "\n" +
- `Region to pull data from (EU/NA/FE). See docs for more details.`,
- },
- "report_record_types": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "amazon-ads",
- ),
- },
- Description: `must be one of ["amazon-ads"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format`,
- },
- "state_filter": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `Reflects the state of the Display, Product, and Brand Campaign streams as enabled, paused, or archived. If you do not populate this field, it will be ignored completely.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_amazonads_data_source_sdk.go b/internal/provider/source_amazonads_data_source_sdk.go
old mode 100755
new mode 100644
index ddc485c59..70605f5db
--- a/internal/provider/source_amazonads_data_source_sdk.go
+++ b/internal/provider/source_amazonads_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAmazonAdsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_amazonads_resource.go b/internal/provider/source_amazonads_resource.go
old mode 100755
new mode 100644
index 7a38ce68f..3cd7a4a8e
--- a/internal/provider/source_amazonads_resource.go
+++ b/internal/provider/source_amazonads_resource.go
@@ -3,17 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +37,7 @@ type SourceAmazonAdsResource struct {
// SourceAmazonAdsResourceModel describes the resource data model.
type SourceAmazonAdsResourceModel struct {
Configuration SourceAmazonAds `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -54,15 +57,6 @@ func (r *SourceAmazonAdsResource) Schema(ctx context.Context, req resource.Schem
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The client ID of your Amazon Ads developer application. See the docs for more information.`,
@@ -72,8 +66,9 @@ func (r *SourceAmazonAdsResource) Schema(ctx context.Context, req resource.Schem
Description: `The client secret of your Amazon Ads developer application. See the docs for more information.`,
},
"look_back_window": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of days to go back in time to get the updated data from Amazon Ads`,
+ Optional: true,
+ MarkdownDescription: `Default: 3` + "\n" +
+ `The amount of days to go back in time to get the updated data from Amazon Ads`,
},
"marketplace_ids": schema.ListAttribute{
Optional: true,
@@ -87,10 +82,13 @@ func (r *SourceAmazonAdsResource) Schema(ctx context.Context, req resource.Schem
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Amazon Ads refresh token. See the docs for more information on how to obtain this token.`,
},
"region": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["NA", "EU", "FE"]; Default: "NA"` + "\n" +
+ `Region to pull data from (EU/NA/FE). See docs for more details.`,
Validators: []validator.String{
stringvalidator.OneOf(
"NA",
@@ -98,26 +96,18 @@ func (r *SourceAmazonAdsResource) Schema(ctx context.Context, req resource.Schem
"FE",
),
},
- MarkdownDescription: `must be one of ["NA", "EU", "FE"]` + "\n" +
- `Region to pull data from (EU/NA/FE). See docs for more details.`,
},
"report_record_types": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
Description: `Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "amazon-ads",
- ),
- },
- Description: `must be one of ["amazon-ads"]`,
- },
"start_date": schema.StringAttribute{
Optional: true,
Description: `The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format`,
+ Validators: []validator.String{
+ validators.IsValidDate(),
+ },
},
"state_filter": schema.ListAttribute{
Optional: true,
@@ -126,13 +116,24 @@ func (r *SourceAmazonAdsResource) Schema(ctx context.Context, req resource.Schem
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -196,7 +197,7 @@ func (r *SourceAmazonAdsResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAmazonAds(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -372,5 +373,5 @@ func (r *SourceAmazonAdsResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceAmazonAdsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_amazonads_resource_sdk.go b/internal/provider/source_amazonads_resource_sdk.go
old mode 100755
new mode 100644
index 698480e47..cd6027d9b
--- a/internal/provider/source_amazonads_resource_sdk.go
+++ b/internal/provider/source_amazonads_resource_sdk.go
@@ -3,17 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAmazonAdsResourceModel) ToCreateSDKType() *shared.SourceAmazonAdsCreateRequest {
- authType := new(shared.SourceAmazonAdsAuthType)
- if !r.Configuration.AuthType.IsUnknown() && !r.Configuration.AuthType.IsNull() {
- *authType = shared.SourceAmazonAdsAuthType(r.Configuration.AuthType.ValueString())
- } else {
- authType = nil
- }
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
lookBackWindow := new(int64)
@@ -41,10 +36,9 @@ func (r *SourceAmazonAdsResourceModel) ToCreateSDKType() *shared.SourceAmazonAds
for _, reportRecordTypesItem := range r.Configuration.ReportRecordTypes {
reportRecordTypes = append(reportRecordTypes, shared.SourceAmazonAdsReportRecordTypes(reportRecordTypesItem.ValueString()))
}
- sourceType := shared.SourceAmazonAdsAmazonAds(r.Configuration.SourceType.ValueString())
- startDate := new(string)
+ startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
- *startDate = r.Configuration.StartDate.ValueString()
+ startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
} else {
startDate = nil
}
@@ -53,7 +47,6 @@ func (r *SourceAmazonAdsResourceModel) ToCreateSDKType() *shared.SourceAmazonAds
stateFilter = append(stateFilter, shared.SourceAmazonAdsStateFilter(stateFilterItem.ValueString()))
}
configuration := shared.SourceAmazonAds{
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
LookBackWindow: lookBackWindow,
@@ -62,10 +55,15 @@ func (r *SourceAmazonAdsResourceModel) ToCreateSDKType() *shared.SourceAmazonAds
RefreshToken: refreshToken,
Region: region,
ReportRecordTypes: reportRecordTypes,
- SourceType: sourceType,
StartDate: startDate,
StateFilter: stateFilter,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -76,6 +74,7 @@ func (r *SourceAmazonAdsResourceModel) ToCreateSDKType() *shared.SourceAmazonAds
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAmazonAdsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -89,12 +88,6 @@ func (r *SourceAmazonAdsResourceModel) ToGetSDKType() *shared.SourceAmazonAdsCre
}
func (r *SourceAmazonAdsResourceModel) ToUpdateSDKType() *shared.SourceAmazonAdsPutRequest {
- authType := new(shared.SourceAmazonAdsUpdateAuthType)
- if !r.Configuration.AuthType.IsUnknown() && !r.Configuration.AuthType.IsNull() {
- *authType = shared.SourceAmazonAdsUpdateAuthType(r.Configuration.AuthType.ValueString())
- } else {
- authType = nil
- }
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
lookBackWindow := new(int64)
@@ -112,28 +105,27 @@ func (r *SourceAmazonAdsResourceModel) ToUpdateSDKType() *shared.SourceAmazonAds
profiles = append(profiles, profilesItem.ValueInt64())
}
refreshToken := r.Configuration.RefreshToken.ValueString()
- region := new(shared.SourceAmazonAdsUpdateRegion)
+ region := new(shared.Region)
if !r.Configuration.Region.IsUnknown() && !r.Configuration.Region.IsNull() {
- *region = shared.SourceAmazonAdsUpdateRegion(r.Configuration.Region.ValueString())
+ *region = shared.Region(r.Configuration.Region.ValueString())
} else {
region = nil
}
- var reportRecordTypes []shared.SourceAmazonAdsUpdateReportRecordTypes = nil
+ var reportRecordTypes []shared.ReportRecordTypes = nil
for _, reportRecordTypesItem := range r.Configuration.ReportRecordTypes {
- reportRecordTypes = append(reportRecordTypes, shared.SourceAmazonAdsUpdateReportRecordTypes(reportRecordTypesItem.ValueString()))
+ reportRecordTypes = append(reportRecordTypes, shared.ReportRecordTypes(reportRecordTypesItem.ValueString()))
}
- startDate := new(string)
+ startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
- *startDate = r.Configuration.StartDate.ValueString()
+ startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
} else {
startDate = nil
}
- var stateFilter []shared.SourceAmazonAdsUpdateStateFilter = nil
+ var stateFilter []shared.StateFilter = nil
for _, stateFilterItem := range r.Configuration.StateFilter {
- stateFilter = append(stateFilter, shared.SourceAmazonAdsUpdateStateFilter(stateFilterItem.ValueString()))
+ stateFilter = append(stateFilter, shared.StateFilter(stateFilterItem.ValueString()))
}
configuration := shared.SourceAmazonAdsUpdate{
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
LookBackWindow: lookBackWindow,
diff --git a/internal/provider/source_amazonsellerpartner_data_source.go b/internal/provider/source_amazonsellerpartner_data_source.go
old mode 100755
new mode 100644
index 8b5bf553e..a09057989
--- a/internal/provider/source_amazonsellerpartner_data_source.go
+++ b/internal/provider/source_amazonsellerpartner_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceAmazonSellerPartnerDataSource struct {
// SourceAmazonSellerPartnerDataSourceModel describes the data model.
type SourceAmazonSellerPartnerDataSourceModel struct {
- Configuration SourceAmazonSellerPartner `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,129 +47,20 @@ func (r *SourceAmazonSellerPartnerDataSource) Schema(ctx context.Context, req da
MarkdownDescription: "SourceAmazonSellerPartner DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "advanced_stream_options": schema.StringAttribute{
- Computed: true,
- Description: `Additional information to configure report options. This varies by report type, not every report implement this kind of feature. Must be a valid json string.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "aws_access_key": schema.StringAttribute{
- Computed: true,
- Description: `Specifies the AWS access key used as part of the credentials to authenticate the user.`,
- },
- "aws_environment": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PRODUCTION",
- "SANDBOX",
- ),
- },
- MarkdownDescription: `must be one of ["PRODUCTION", "SANDBOX"]` + "\n" +
- `Select the AWS Environment.`,
- },
- "aws_secret_key": schema.StringAttribute{
- Computed: true,
- Description: `Specifies the AWS secret key used as part of the credentials to authenticate the user.`,
- },
- "lwa_app_id": schema.StringAttribute{
- Computed: true,
- Description: `Your Login with Amazon Client ID.`,
- },
- "lwa_client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Your Login with Amazon Client Secret.`,
- },
- "max_wait_seconds": schema.Int64Attribute{
- Computed: true,
- Description: `Sometimes report can take up to 30 minutes to generate. This will set the limit for how long to wait for a successful report.`,
- },
- "period_in_days": schema.Int64Attribute{
- Computed: true,
- Description: `Will be used for stream slicing for initial full_refresh sync when no updated state is present for reports that support sliced incremental sync.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The Refresh Token obtained via OAuth flow authorization.`,
- },
- "region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AE",
- "AU",
- "BE",
- "BR",
- "CA",
- "DE",
- "EG",
- "ES",
- "FR",
- "GB",
- "IN",
- "IT",
- "JP",
- "MX",
- "NL",
- "PL",
- "SA",
- "SE",
- "SG",
- "TR",
- "UK",
- "US",
- ),
- },
- MarkdownDescription: `must be one of ["AE", "AU", "BE", "BR", "CA", "DE", "EG", "ES", "FR", "GB", "IN", "IT", "JP", "MX", "NL", "PL", "SA", "SE", "SG", "TR", "UK", "US"]` + "\n" +
- `Select the AWS Region.`,
- },
- "replication_end_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.`,
- },
- "replication_start_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- "report_options": schema.StringAttribute{
- Computed: true,
- Description: `Additional information passed to reports. This varies by report type. Must be a valid json string.`,
- },
- "role_arn": schema.StringAttribute{
- Computed: true,
- Description: `Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. (Needs permission to 'Assume Role' STS).`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "amazon-seller-partner",
- ),
- },
- Description: `must be one of ["amazon-seller-partner"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_amazonsellerpartner_data_source_sdk.go b/internal/provider/source_amazonsellerpartner_data_source_sdk.go
old mode 100755
new mode 100644
index 17d274f8c..21e88efcc
--- a/internal/provider/source_amazonsellerpartner_data_source_sdk.go
+++ b/internal/provider/source_amazonsellerpartner_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAmazonSellerPartnerDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_amazonsellerpartner_resource.go b/internal/provider/source_amazonsellerpartner_resource.go
old mode 100755
new mode 100644
index 754933011..2e3b471b8
--- a/internal/provider/source_amazonsellerpartner_resource.go
+++ b/internal/provider/source_amazonsellerpartner_resource.go
@@ -3,17 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +37,7 @@ type SourceAmazonSellerPartnerResource struct {
// SourceAmazonSellerPartnerResourceModel describes the resource data model.
type SourceAmazonSellerPartnerResourceModel struct {
Configuration SourceAmazonSellerPartner `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -54,37 +57,31 @@ func (r *SourceAmazonSellerPartnerResource) Schema(ctx context.Context, req reso
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "advanced_stream_options": schema.StringAttribute{
- Optional: true,
- Description: `Additional information to configure report options. This varies by report type, not every report implement this kind of feature. Must be a valid json string.`,
- },
- "auth_type": schema.StringAttribute{
+ "account_type": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["Seller", "Vendor"]; Default: "Seller"` + "\n" +
+ `Type of the Account you're going to authorize the Airbyte application by`,
Validators: []validator.String{
stringvalidator.OneOf(
- "oauth2.0",
+ "Seller",
+ "Vendor",
),
},
- Description: `must be one of ["oauth2.0"]`,
},
- "aws_access_key": schema.StringAttribute{
+ "advanced_stream_options": schema.StringAttribute{
Optional: true,
- Description: `Specifies the AWS access key used as part of the credentials to authenticate the user.`,
+ Description: `Additional information to configure report options. This varies by report type, not every report implement this kind of feature. Must be a valid json string.`,
},
"aws_environment": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["PRODUCTION", "SANDBOX"]; Default: "PRODUCTION"` + "\n" +
+ `Select the AWS Environment.`,
Validators: []validator.String{
stringvalidator.OneOf(
"PRODUCTION",
"SANDBOX",
),
},
- MarkdownDescription: `must be one of ["PRODUCTION", "SANDBOX"]` + "\n" +
- `Select the AWS Environment.`,
- },
- "aws_secret_key": schema.StringAttribute{
- Optional: true,
- Description: `Specifies the AWS secret key used as part of the credentials to authenticate the user.`,
},
"lwa_app_id": schema.StringAttribute{
Required: true,
@@ -94,20 +91,20 @@ func (r *SourceAmazonSellerPartnerResource) Schema(ctx context.Context, req reso
Required: true,
Description: `Your Login with Amazon Client Secret.`,
},
- "max_wait_seconds": schema.Int64Attribute{
- Optional: true,
- Description: `Sometimes report can take up to 30 minutes to generate. This will set the limit for how long to wait for a successful report.`,
- },
"period_in_days": schema.Int64Attribute{
- Optional: true,
- Description: `Will be used for stream slicing for initial full_refresh sync when no updated state is present for reports that support sliced incremental sync.`,
+ Optional: true,
+ MarkdownDescription: `Default: 90` + "\n" +
+ `Will be used for stream slicing for initial full_refresh sync when no updated state is present for reports that support sliced incremental sync.`,
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Refresh Token obtained via OAuth flow authorization.`,
},
"region": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["AE", "AU", "BE", "BR", "CA", "DE", "EG", "ES", "FR", "GB", "IN", "IT", "JP", "MX", "NL", "PL", "SA", "SE", "SG", "TR", "UK", "US"]; Default: "US"` + "\n" +
+ `Select the AWS Region.`,
Validators: []validator.String{
stringvalidator.OneOf(
"AE",
@@ -134,43 +131,45 @@ func (r *SourceAmazonSellerPartnerResource) Schema(ctx context.Context, req reso
"US",
),
},
- MarkdownDescription: `must be one of ["AE", "AU", "BE", "BR", "CA", "DE", "EG", "ES", "FR", "GB", "IN", "IT", "JP", "MX", "NL", "PL", "SA", "SE", "SG", "TR", "UK", "US"]` + "\n" +
- `Select the AWS Region.`,
},
"replication_end_date": schema.StringAttribute{
Optional: true,
Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.`,
+ Validators: []validator.String{
+ validators.IsRFC3339(),
+ },
},
"replication_start_date": schema.StringAttribute{
Required: true,
Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
+ Validators: []validator.String{
+ validators.IsRFC3339(),
+ },
},
"report_options": schema.StringAttribute{
Optional: true,
Description: `Additional information passed to reports. This varies by report type. Must be a valid json string.`,
},
- "role_arn": schema.StringAttribute{
- Optional: true,
- Description: `Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. (Needs permission to 'Assume Role' STS).`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "amazon-seller-partner",
- ),
- },
- Description: `must be one of ["amazon-seller-partner"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -234,7 +233,7 @@ func (r *SourceAmazonSellerPartnerResource) Create(ctx context.Context, req reso
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAmazonSellerPartner(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -410,5 +409,5 @@ func (r *SourceAmazonSellerPartnerResource) Delete(ctx context.Context, req reso
}
func (r *SourceAmazonSellerPartnerResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_amazonsellerpartner_resource_sdk.go b/internal/provider/source_amazonsellerpartner_resource_sdk.go
old mode 100755
new mode 100644
index 0932ea21f..df7271d25
--- a/internal/provider/source_amazonsellerpartner_resource_sdk.go
+++ b/internal/provider/source_amazonsellerpartner_resource_sdk.go
@@ -3,44 +3,32 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "time"
)
func (r *SourceAmazonSellerPartnerResourceModel) ToCreateSDKType() *shared.SourceAmazonSellerPartnerCreateRequest {
+ accountType := new(shared.SourceAmazonSellerPartnerAWSSellerPartnerAccountType)
+ if !r.Configuration.AccountType.IsUnknown() && !r.Configuration.AccountType.IsNull() {
+ *accountType = shared.SourceAmazonSellerPartnerAWSSellerPartnerAccountType(r.Configuration.AccountType.ValueString())
+ } else {
+ accountType = nil
+ }
advancedStreamOptions := new(string)
if !r.Configuration.AdvancedStreamOptions.IsUnknown() && !r.Configuration.AdvancedStreamOptions.IsNull() {
*advancedStreamOptions = r.Configuration.AdvancedStreamOptions.ValueString()
} else {
advancedStreamOptions = nil
}
- authType := new(shared.SourceAmazonSellerPartnerAuthType)
- if !r.Configuration.AuthType.IsUnknown() && !r.Configuration.AuthType.IsNull() {
- *authType = shared.SourceAmazonSellerPartnerAuthType(r.Configuration.AuthType.ValueString())
- } else {
- authType = nil
- }
- awsAccessKey := new(string)
- if !r.Configuration.AwsAccessKey.IsUnknown() && !r.Configuration.AwsAccessKey.IsNull() {
- *awsAccessKey = r.Configuration.AwsAccessKey.ValueString()
+ awsEnvironment := new(shared.SourceAmazonSellerPartnerAWSEnvironment)
+ if !r.Configuration.AwsEnvironment.IsUnknown() && !r.Configuration.AwsEnvironment.IsNull() {
+ *awsEnvironment = shared.SourceAmazonSellerPartnerAWSEnvironment(r.Configuration.AwsEnvironment.ValueString())
} else {
- awsAccessKey = nil
- }
- awsEnvironment := shared.SourceAmazonSellerPartnerAWSEnvironment(r.Configuration.AwsEnvironment.ValueString())
- awsSecretKey := new(string)
- if !r.Configuration.AwsSecretKey.IsUnknown() && !r.Configuration.AwsSecretKey.IsNull() {
- *awsSecretKey = r.Configuration.AwsSecretKey.ValueString()
- } else {
- awsSecretKey = nil
+ awsEnvironment = nil
}
lwaAppID := r.Configuration.LwaAppID.ValueString()
lwaClientSecret := r.Configuration.LwaClientSecret.ValueString()
- maxWaitSeconds := new(int64)
- if !r.Configuration.MaxWaitSeconds.IsUnknown() && !r.Configuration.MaxWaitSeconds.IsNull() {
- *maxWaitSeconds = r.Configuration.MaxWaitSeconds.ValueInt64()
- } else {
- maxWaitSeconds = nil
- }
periodInDays := new(int64)
if !r.Configuration.PeriodInDays.IsUnknown() && !r.Configuration.PeriodInDays.IsNull() {
*periodInDays = r.Configuration.PeriodInDays.ValueInt64()
@@ -48,44 +36,43 @@ func (r *SourceAmazonSellerPartnerResourceModel) ToCreateSDKType() *shared.Sourc
periodInDays = nil
}
refreshToken := r.Configuration.RefreshToken.ValueString()
- region := shared.SourceAmazonSellerPartnerAWSRegion(r.Configuration.Region.ValueString())
- replicationEndDate := new(string)
+ region := new(shared.SourceAmazonSellerPartnerAWSRegion)
+ if !r.Configuration.Region.IsUnknown() && !r.Configuration.Region.IsNull() {
+ *region = shared.SourceAmazonSellerPartnerAWSRegion(r.Configuration.Region.ValueString())
+ } else {
+ region = nil
+ }
+ replicationEndDate := new(time.Time)
if !r.Configuration.ReplicationEndDate.IsUnknown() && !r.Configuration.ReplicationEndDate.IsNull() {
- *replicationEndDate = r.Configuration.ReplicationEndDate.ValueString()
+ *replicationEndDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.ReplicationEndDate.ValueString())
} else {
replicationEndDate = nil
}
- replicationStartDate := r.Configuration.ReplicationStartDate.ValueString()
+ replicationStartDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.ReplicationStartDate.ValueString())
reportOptions := new(string)
if !r.Configuration.ReportOptions.IsUnknown() && !r.Configuration.ReportOptions.IsNull() {
*reportOptions = r.Configuration.ReportOptions.ValueString()
} else {
reportOptions = nil
}
- roleArn := new(string)
- if !r.Configuration.RoleArn.IsUnknown() && !r.Configuration.RoleArn.IsNull() {
- *roleArn = r.Configuration.RoleArn.ValueString()
- } else {
- roleArn = nil
- }
- sourceType := shared.SourceAmazonSellerPartnerAmazonSellerPartner(r.Configuration.SourceType.ValueString())
configuration := shared.SourceAmazonSellerPartner{
+ AccountType: accountType,
AdvancedStreamOptions: advancedStreamOptions,
- AuthType: authType,
- AwsAccessKey: awsAccessKey,
AwsEnvironment: awsEnvironment,
- AwsSecretKey: awsSecretKey,
LwaAppID: lwaAppID,
LwaClientSecret: lwaClientSecret,
- MaxWaitSeconds: maxWaitSeconds,
PeriodInDays: periodInDays,
RefreshToken: refreshToken,
Region: region,
ReplicationEndDate: replicationEndDate,
ReplicationStartDate: replicationStartDate,
ReportOptions: reportOptions,
- RoleArn: roleArn,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -97,6 +84,7 @@ func (r *SourceAmazonSellerPartnerResourceModel) ToCreateSDKType() *shared.Sourc
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAmazonSellerPartnerCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -110,39 +98,26 @@ func (r *SourceAmazonSellerPartnerResourceModel) ToGetSDKType() *shared.SourceAm
}
func (r *SourceAmazonSellerPartnerResourceModel) ToUpdateSDKType() *shared.SourceAmazonSellerPartnerPutRequest {
+ accountType := new(shared.AWSSellerPartnerAccountType)
+ if !r.Configuration.AccountType.IsUnknown() && !r.Configuration.AccountType.IsNull() {
+ *accountType = shared.AWSSellerPartnerAccountType(r.Configuration.AccountType.ValueString())
+ } else {
+ accountType = nil
+ }
advancedStreamOptions := new(string)
if !r.Configuration.AdvancedStreamOptions.IsUnknown() && !r.Configuration.AdvancedStreamOptions.IsNull() {
*advancedStreamOptions = r.Configuration.AdvancedStreamOptions.ValueString()
} else {
advancedStreamOptions = nil
}
- authType := new(shared.SourceAmazonSellerPartnerUpdateAuthType)
- if !r.Configuration.AuthType.IsUnknown() && !r.Configuration.AuthType.IsNull() {
- *authType = shared.SourceAmazonSellerPartnerUpdateAuthType(r.Configuration.AuthType.ValueString())
- } else {
- authType = nil
- }
- awsAccessKey := new(string)
- if !r.Configuration.AwsAccessKey.IsUnknown() && !r.Configuration.AwsAccessKey.IsNull() {
- *awsAccessKey = r.Configuration.AwsAccessKey.ValueString()
- } else {
- awsAccessKey = nil
- }
- awsEnvironment := shared.SourceAmazonSellerPartnerUpdateAWSEnvironment(r.Configuration.AwsEnvironment.ValueString())
- awsSecretKey := new(string)
- if !r.Configuration.AwsSecretKey.IsUnknown() && !r.Configuration.AwsSecretKey.IsNull() {
- *awsSecretKey = r.Configuration.AwsSecretKey.ValueString()
+ awsEnvironment := new(shared.AWSEnvironment)
+ if !r.Configuration.AwsEnvironment.IsUnknown() && !r.Configuration.AwsEnvironment.IsNull() {
+ *awsEnvironment = shared.AWSEnvironment(r.Configuration.AwsEnvironment.ValueString())
} else {
- awsSecretKey = nil
+ awsEnvironment = nil
}
lwaAppID := r.Configuration.LwaAppID.ValueString()
lwaClientSecret := r.Configuration.LwaClientSecret.ValueString()
- maxWaitSeconds := new(int64)
- if !r.Configuration.MaxWaitSeconds.IsUnknown() && !r.Configuration.MaxWaitSeconds.IsNull() {
- *maxWaitSeconds = r.Configuration.MaxWaitSeconds.ValueInt64()
- } else {
- maxWaitSeconds = nil
- }
periodInDays := new(int64)
if !r.Configuration.PeriodInDays.IsUnknown() && !r.Configuration.PeriodInDays.IsNull() {
*periodInDays = r.Configuration.PeriodInDays.ValueInt64()
@@ -150,42 +125,37 @@ func (r *SourceAmazonSellerPartnerResourceModel) ToUpdateSDKType() *shared.Sourc
periodInDays = nil
}
refreshToken := r.Configuration.RefreshToken.ValueString()
- region := shared.SourceAmazonSellerPartnerUpdateAWSRegion(r.Configuration.Region.ValueString())
- replicationEndDate := new(string)
+ region := new(shared.AWSRegion)
+ if !r.Configuration.Region.IsUnknown() && !r.Configuration.Region.IsNull() {
+ *region = shared.AWSRegion(r.Configuration.Region.ValueString())
+ } else {
+ region = nil
+ }
+ replicationEndDate := new(time.Time)
if !r.Configuration.ReplicationEndDate.IsUnknown() && !r.Configuration.ReplicationEndDate.IsNull() {
- *replicationEndDate = r.Configuration.ReplicationEndDate.ValueString()
+ *replicationEndDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.ReplicationEndDate.ValueString())
} else {
replicationEndDate = nil
}
- replicationStartDate := r.Configuration.ReplicationStartDate.ValueString()
+ replicationStartDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.ReplicationStartDate.ValueString())
reportOptions := new(string)
if !r.Configuration.ReportOptions.IsUnknown() && !r.Configuration.ReportOptions.IsNull() {
*reportOptions = r.Configuration.ReportOptions.ValueString()
} else {
reportOptions = nil
}
- roleArn := new(string)
- if !r.Configuration.RoleArn.IsUnknown() && !r.Configuration.RoleArn.IsNull() {
- *roleArn = r.Configuration.RoleArn.ValueString()
- } else {
- roleArn = nil
- }
configuration := shared.SourceAmazonSellerPartnerUpdate{
+ AccountType: accountType,
AdvancedStreamOptions: advancedStreamOptions,
- AuthType: authType,
- AwsAccessKey: awsAccessKey,
AwsEnvironment: awsEnvironment,
- AwsSecretKey: awsSecretKey,
LwaAppID: lwaAppID,
LwaClientSecret: lwaClientSecret,
- MaxWaitSeconds: maxWaitSeconds,
PeriodInDays: periodInDays,
RefreshToken: refreshToken,
Region: region,
ReplicationEndDate: replicationEndDate,
ReplicationStartDate: replicationStartDate,
ReportOptions: reportOptions,
- RoleArn: roleArn,
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
diff --git a/internal/provider/source_amazonsqs_data_source.go b/internal/provider/source_amazonsqs_data_source.go
old mode 100755
new mode 100644
index db965566c..e1d3b8363
--- a/internal/provider/source_amazonsqs_data_source.go
+++ b/internal/provider/source_amazonsqs_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceAmazonSqsDataSource struct {
// SourceAmazonSqsDataSourceModel describes the data model.
type SourceAmazonSqsDataSourceModel struct {
- Configuration SourceAmazonSqs `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,96 +47,20 @@ func (r *SourceAmazonSqsDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceAmazonSqs DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key": schema.StringAttribute{
- Computed: true,
- Description: `The Access Key ID of the AWS IAM Role to use for pulling messages`,
- },
- "attributes_to_return": schema.StringAttribute{
- Computed: true,
- Description: `Comma separated list of Mesage Attribute names to return`,
- },
- "delete_messages": schema.BoolAttribute{
- Computed: true,
- Description: `If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail. `,
- },
- "max_batch_size": schema.Int64Attribute{
- Computed: true,
- Description: `Max amount of messages to get in one batch (10 max)`,
- },
- "max_wait_time": schema.Int64Attribute{
- Computed: true,
- Description: `Max amount of time in seconds to wait for messages in a single poll (20 max)`,
- },
- "queue_url": schema.StringAttribute{
- Computed: true,
- Description: `URL of the SQS Queue`,
- },
- "region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- "us-gov-east-1",
- "us-gov-west-1",
- ),
- },
- MarkdownDescription: `must be one of ["us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `AWS Region of the SQS Queue`,
- },
- "secret_key": schema.StringAttribute{
- Computed: true,
- Description: `The Secret Key of the AWS IAM Role to use for pulling messages`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "amazon-sqs",
- ),
- },
- Description: `must be one of ["amazon-sqs"]`,
- },
- "visibility_timeout": schema.Int64Attribute{
- Computed: true,
- Description: `Modify the Visibility Timeout of the individual message from the Queue's default (seconds).`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_amazonsqs_data_source_sdk.go b/internal/provider/source_amazonsqs_data_source_sdk.go
old mode 100755
new mode 100644
index 5bbcc460b..74e60dc17
--- a/internal/provider/source_amazonsqs_data_source_sdk.go
+++ b/internal/provider/source_amazonsqs_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAmazonSqsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_amazonsqs_resource.go b/internal/provider/source_amazonsqs_resource.go
old mode 100755
new mode 100644
index 32b869e03..04c61d56f
--- a/internal/provider/source_amazonsqs_resource.go
+++ b/internal/provider/source_amazonsqs_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourceAmazonSqsResource struct {
// SourceAmazonSqsResourceModel describes the resource data model.
type SourceAmazonSqsResourceModel struct {
Configuration SourceAmazonSqs `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +58,7 @@ func (r *SourceAmazonSqsResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"access_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The Access Key ID of the AWS IAM Role to use for pulling messages`,
},
"attributes_to_return": schema.StringAttribute{
@@ -63,8 +66,9 @@ func (r *SourceAmazonSqsResource) Schema(ctx context.Context, req resource.Schem
Description: `Comma separated list of Mesage Attribute names to return`,
},
"delete_messages": schema.BoolAttribute{
- Required: true,
- Description: `If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail. `,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail. `,
},
"max_batch_size": schema.Int64Attribute{
Optional: true,
@@ -80,6 +84,8 @@ func (r *SourceAmazonSqsResource) Schema(ctx context.Context, req resource.Schem
},
"region": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
+ `AWS Region of the SQS Queue`,
Validators: []validator.String{
stringvalidator.OneOf(
"us-east-1",
@@ -109,35 +115,36 @@ func (r *SourceAmazonSqsResource) Schema(ctx context.Context, req resource.Schem
"us-gov-west-1",
),
},
- MarkdownDescription: `must be one of ["us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `AWS Region of the SQS Queue`,
},
"secret_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The Secret Key of the AWS IAM Role to use for pulling messages`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "amazon-sqs",
- ),
- },
- Description: `must be one of ["amazon-sqs"]`,
- },
"visibility_timeout": schema.Int64Attribute{
Optional: true,
Description: `Modify the Visibility Timeout of the individual message from the Queue's default (seconds).`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -201,7 +208,7 @@ func (r *SourceAmazonSqsResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAmazonSqs(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -377,5 +384,5 @@ func (r *SourceAmazonSqsResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceAmazonSqsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_amazonsqs_resource_sdk.go b/internal/provider/source_amazonsqs_resource_sdk.go
old mode 100755
new mode 100644
index 19a2695ec..9166360d7
--- a/internal/provider/source_amazonsqs_resource_sdk.go
+++ b/internal/provider/source_amazonsqs_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -20,7 +20,12 @@ func (r *SourceAmazonSqsResourceModel) ToCreateSDKType() *shared.SourceAmazonSqs
} else {
attributesToReturn = nil
}
- deleteMessages := r.Configuration.DeleteMessages.ValueBool()
+ deleteMessages := new(bool)
+ if !r.Configuration.DeleteMessages.IsUnknown() && !r.Configuration.DeleteMessages.IsNull() {
+ *deleteMessages = r.Configuration.DeleteMessages.ValueBool()
+ } else {
+ deleteMessages = nil
+ }
maxBatchSize := new(int64)
if !r.Configuration.MaxBatchSize.IsUnknown() && !r.Configuration.MaxBatchSize.IsNull() {
*maxBatchSize = r.Configuration.MaxBatchSize.ValueInt64()
@@ -41,7 +46,6 @@ func (r *SourceAmazonSqsResourceModel) ToCreateSDKType() *shared.SourceAmazonSqs
} else {
secretKey = nil
}
- sourceType := shared.SourceAmazonSqsAmazonSqs(r.Configuration.SourceType.ValueString())
visibilityTimeout := new(int64)
if !r.Configuration.VisibilityTimeout.IsUnknown() && !r.Configuration.VisibilityTimeout.IsNull() {
*visibilityTimeout = r.Configuration.VisibilityTimeout.ValueInt64()
@@ -57,9 +61,14 @@ func (r *SourceAmazonSqsResourceModel) ToCreateSDKType() *shared.SourceAmazonSqs
QueueURL: queueURL,
Region: region,
SecretKey: secretKey,
- SourceType: sourceType,
VisibilityTimeout: visibilityTimeout,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -70,6 +79,7 @@ func (r *SourceAmazonSqsResourceModel) ToCreateSDKType() *shared.SourceAmazonSqs
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAmazonSqsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -95,7 +105,12 @@ func (r *SourceAmazonSqsResourceModel) ToUpdateSDKType() *shared.SourceAmazonSqs
} else {
attributesToReturn = nil
}
- deleteMessages := r.Configuration.DeleteMessages.ValueBool()
+ deleteMessages := new(bool)
+ if !r.Configuration.DeleteMessages.IsUnknown() && !r.Configuration.DeleteMessages.IsNull() {
+ *deleteMessages = r.Configuration.DeleteMessages.ValueBool()
+ } else {
+ deleteMessages = nil
+ }
maxBatchSize := new(int64)
if !r.Configuration.MaxBatchSize.IsUnknown() && !r.Configuration.MaxBatchSize.IsNull() {
*maxBatchSize = r.Configuration.MaxBatchSize.ValueInt64()
diff --git a/internal/provider/source_amplitude_data_source.go b/internal/provider/source_amplitude_data_source.go
old mode 100755
new mode 100644
index b51db4eab..e7e1894be
--- a/internal/provider/source_amplitude_data_source.go
+++ b/internal/provider/source_amplitude_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceAmplitudeDataSource struct {
// SourceAmplitudeDataSourceModel describes the data model.
type SourceAmplitudeDataSourceModel struct {
- Configuration SourceAmplitude `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,57 +47,20 @@ func (r *SourceAmplitudeDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceAmplitude DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Amplitude API Key. See the setup guide for more information on how to obtain this key.`,
- },
- "data_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard Server",
- "EU Residency Server",
- ),
- },
- MarkdownDescription: `must be one of ["Standard Server", "EU Residency Server"]` + "\n" +
- `Amplitude data region server`,
- },
- "request_time_range": schema.Int64Attribute{
- Computed: true,
- Description: `According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours.`,
- },
- "secret_key": schema.StringAttribute{
- Computed: true,
- Description: `Amplitude Secret Key. See the setup guide for more information on how to obtain this key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "amplitude",
- ),
- },
- Description: `must be one of ["amplitude"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_amplitude_data_source_sdk.go b/internal/provider/source_amplitude_data_source_sdk.go
old mode 100755
new mode 100644
index 057ecc9f9..c9808abb8
--- a/internal/provider/source_amplitude_data_source_sdk.go
+++ b/internal/provider/source_amplitude_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAmplitudeDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_amplitude_resource.go b/internal/provider/source_amplitude_resource.go
old mode 100755
new mode 100644
index 63b798eea..094c97029
--- a/internal/provider/source_amplitude_resource.go
+++ b/internal/provider/source_amplitude_resource.go
@@ -3,17 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +37,7 @@ type SourceAmplitudeResource struct {
// SourceAmplitudeResourceModel describes the resource data model.
type SourceAmplitudeResourceModel struct {
Configuration SourceAmplitude `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,49 +59,57 @@ func (r *SourceAmplitudeResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Amplitude API Key. See the setup guide for more information on how to obtain this key.`,
},
"data_region": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["Standard Server", "EU Residency Server"]; Default: "Standard Server"` + "\n" +
+ `Amplitude data region server`,
Validators: []validator.String{
stringvalidator.OneOf(
"Standard Server",
"EU Residency Server",
),
},
- MarkdownDescription: `must be one of ["Standard Server", "EU Residency Server"]` + "\n" +
- `Amplitude data region server`,
},
"request_time_range": schema.Int64Attribute{
- Optional: true,
- Description: `According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours.`,
+ Optional: true,
+ MarkdownDescription: `Default: 24` + "\n" +
+ `According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours.`,
},
"secret_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Amplitude Secret Key. See the setup guide for more information on how to obtain this key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "amplitude",
- ),
- },
- Description: `must be one of ["amplitude"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.`,
+ Validators: []validator.String{
+ validators.IsRFC3339(),
+ },
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -162,7 +173,7 @@ func (r *SourceAmplitudeResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAmplitude(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -338,5 +349,5 @@ func (r *SourceAmplitudeResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceAmplitudeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_amplitude_resource_sdk.go b/internal/provider/source_amplitude_resource_sdk.go
old mode 100755
new mode 100644
index 4c31e479a..af57787da
--- a/internal/provider/source_amplitude_resource_sdk.go
+++ b/internal/provider/source_amplitude_resource_sdk.go
@@ -3,8 +3,9 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "time"
)
func (r *SourceAmplitudeResourceModel) ToCreateSDKType() *shared.SourceAmplitudeCreateRequest {
@@ -22,16 +23,20 @@ func (r *SourceAmplitudeResourceModel) ToCreateSDKType() *shared.SourceAmplitude
requestTimeRange = nil
}
secretKey := r.Configuration.SecretKey.ValueString()
- sourceType := shared.SourceAmplitudeAmplitude(r.Configuration.SourceType.ValueString())
- startDate := r.Configuration.StartDate.ValueString()
+ startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceAmplitude{
APIKey: apiKey,
DataRegion: dataRegion,
RequestTimeRange: requestTimeRange,
SecretKey: secretKey,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -42,6 +47,7 @@ func (r *SourceAmplitudeResourceModel) ToCreateSDKType() *shared.SourceAmplitude
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAmplitudeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -56,9 +62,9 @@ func (r *SourceAmplitudeResourceModel) ToGetSDKType() *shared.SourceAmplitudeCre
func (r *SourceAmplitudeResourceModel) ToUpdateSDKType() *shared.SourceAmplitudePutRequest {
apiKey := r.Configuration.APIKey.ValueString()
- dataRegion := new(shared.SourceAmplitudeUpdateDataRegion)
+ dataRegion := new(shared.DataRegion)
if !r.Configuration.DataRegion.IsUnknown() && !r.Configuration.DataRegion.IsNull() {
- *dataRegion = shared.SourceAmplitudeUpdateDataRegion(r.Configuration.DataRegion.ValueString())
+ *dataRegion = shared.DataRegion(r.Configuration.DataRegion.ValueString())
} else {
dataRegion = nil
}
@@ -69,7 +75,7 @@ func (r *SourceAmplitudeResourceModel) ToUpdateSDKType() *shared.SourceAmplitude
requestTimeRange = nil
}
secretKey := r.Configuration.SecretKey.ValueString()
- startDate := r.Configuration.StartDate.ValueString()
+ startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceAmplitudeUpdate{
APIKey: apiKey,
DataRegion: dataRegion,
diff --git a/internal/provider/source_apifydataset_data_source.go b/internal/provider/source_apifydataset_data_source.go
old mode 100755
new mode 100644
index 50312adf6..294a9969c
--- a/internal/provider/source_apifydataset_data_source.go
+++ b/internal/provider/source_apifydataset_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceApifyDatasetDataSource struct {
// SourceApifyDatasetDataSourceModel describes the data model.
type SourceApifyDatasetDataSourceModel struct {
- Configuration SourceApifyDataset `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourceApifyDatasetDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "SourceApifyDataset DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "clean": schema.BoolAttribute{
- Computed: true,
- Description: `If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false.`,
- },
- "dataset_id": schema.StringAttribute{
- Computed: true,
- Description: `ID of the dataset you would like to load to Airbyte.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apify-dataset",
- ),
- },
- Description: `must be one of ["apify-dataset"]`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `Your application's Client Secret. You can find this value on the console integrations tab after you login.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_apifydataset_data_source_sdk.go b/internal/provider/source_apifydataset_data_source_sdk.go
old mode 100755
new mode 100644
index 334ac9d5f..7b169a415
--- a/internal/provider/source_apifydataset_data_source_sdk.go
+++ b/internal/provider/source_apifydataset_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceApifyDatasetDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_apifydataset_resource.go b/internal/provider/source_apifydataset_resource.go
old mode 100755
new mode 100644
index 95eba777a..f30b7c91e
--- a/internal/provider/source_apifydataset_resource.go
+++ b/internal/provider/source_apifydataset_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceApifyDatasetResource struct {
// SourceApifyDatasetResourceModel describes the resource data model.
type SourceApifyDatasetResourceModel struct {
Configuration SourceApifyDataset `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -54,36 +54,35 @@ func (r *SourceApifyDatasetResource) Schema(ctx context.Context, req resource.Sc
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "clean": schema.BoolAttribute{
- Optional: true,
- Description: `If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false.`,
- },
"dataset_id": schema.StringAttribute{
- Optional: true,
- Description: `ID of the dataset you would like to load to Airbyte.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apify-dataset",
- ),
- },
- Description: `must be one of ["apify-dataset"]`,
+ Required: true,
+ Description: `ID of the dataset you would like to load to Airbyte. In Apify Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs for more information.`,
},
"token": schema.StringAttribute{
Required: true,
- Description: `Your application's Client Secret. You can find this value on the console integrations tab after you login.`,
+ Sensitive: true,
+ Description: `Personal API token of your Apify account. In Apify Console, you can find your API token in the Settings section under the Integrations tab after you login. See the Apify Docs for more information.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +146,7 @@ func (r *SourceApifyDatasetResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceApifyDataset(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +322,5 @@ func (r *SourceApifyDatasetResource) Delete(ctx context.Context, req resource.De
}
func (r *SourceApifyDatasetResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_apifydataset_resource_sdk.go b/internal/provider/source_apifydataset_resource_sdk.go
old mode 100755
new mode 100644
index 42e8502df..de77457e7
--- a/internal/provider/source_apifydataset_resource_sdk.go
+++ b/internal/provider/source_apifydataset_resource_sdk.go
@@ -3,30 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceApifyDatasetResourceModel) ToCreateSDKType() *shared.SourceApifyDatasetCreateRequest {
- clean := new(bool)
- if !r.Configuration.Clean.IsUnknown() && !r.Configuration.Clean.IsNull() {
- *clean = r.Configuration.Clean.ValueBool()
- } else {
- clean = nil
- }
- datasetID := new(string)
- if !r.Configuration.DatasetID.IsUnknown() && !r.Configuration.DatasetID.IsNull() {
- *datasetID = r.Configuration.DatasetID.ValueString()
- } else {
- datasetID = nil
- }
- sourceType := shared.SourceApifyDatasetApifyDataset(r.Configuration.SourceType.ValueString())
+ datasetID := r.Configuration.DatasetID.ValueString()
token := r.Configuration.Token.ValueString()
configuration := shared.SourceApifyDataset{
- Clean: clean,
- DatasetID: datasetID,
- SourceType: sourceType,
- Token: token,
+ DatasetID: datasetID,
+ Token: token,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -38,6 +30,7 @@ func (r *SourceApifyDatasetResourceModel) ToCreateSDKType() *shared.SourceApifyD
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceApifyDatasetCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -51,21 +44,9 @@ func (r *SourceApifyDatasetResourceModel) ToGetSDKType() *shared.SourceApifyData
}
func (r *SourceApifyDatasetResourceModel) ToUpdateSDKType() *shared.SourceApifyDatasetPutRequest {
- clean := new(bool)
- if !r.Configuration.Clean.IsUnknown() && !r.Configuration.Clean.IsNull() {
- *clean = r.Configuration.Clean.ValueBool()
- } else {
- clean = nil
- }
- datasetID := new(string)
- if !r.Configuration.DatasetID.IsUnknown() && !r.Configuration.DatasetID.IsNull() {
- *datasetID = r.Configuration.DatasetID.ValueString()
- } else {
- datasetID = nil
- }
+ datasetID := r.Configuration.DatasetID.ValueString()
token := r.Configuration.Token.ValueString()
configuration := shared.SourceApifyDatasetUpdate{
- Clean: clean,
DatasetID: datasetID,
Token: token,
}
diff --git a/internal/provider/source_appfollow_data_source.go b/internal/provider/source_appfollow_data_source.go
old mode 100755
new mode 100644
index 133d99b4c..5a945d928
--- a/internal/provider/source_appfollow_data_source.go
+++ b/internal/provider/source_appfollow_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceAppfollowDataSource struct {
// SourceAppfollowDataSourceModel describes the data model.
type SourceAppfollowDataSourceModel struct {
- Configuration SourceAppfollow `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceAppfollowDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceAppfollow DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_secret": schema.StringAttribute{
- Computed: true,
- Description: `API Key provided by Appfollow`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "appfollow",
- ),
- },
- Description: `must be one of ["appfollow"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_appfollow_data_source_sdk.go b/internal/provider/source_appfollow_data_source_sdk.go
old mode 100755
new mode 100644
index 2e0aa0421..dff7a066b
--- a/internal/provider/source_appfollow_data_source_sdk.go
+++ b/internal/provider/source_appfollow_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAppfollowDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_appfollow_resource.go b/internal/provider/source_appfollow_resource.go
old mode 100755
new mode 100644
index 40f56b5f5..1a1205ab0
--- a/internal/provider/source_appfollow_resource.go
+++ b/internal/provider/source_appfollow_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceAppfollowResource struct {
// SourceAppfollowResourceModel describes the resource data model.
type SourceAppfollowResourceModel struct {
Configuration SourceAppfollow `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,24 +58,26 @@ func (r *SourceAppfollowResource) Schema(ctx context.Context, req resource.Schem
Optional: true,
Description: `API Key provided by Appfollow`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "appfollow",
- ),
- },
- Description: `must be one of ["appfollow"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +141,7 @@ func (r *SourceAppfollowResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAppfollow(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +317,5 @@ func (r *SourceAppfollowResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceAppfollowResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_appfollow_resource_sdk.go b/internal/provider/source_appfollow_resource_sdk.go
old mode 100755
new mode 100644
index 5ca74dcca..4b21c7a8c
--- a/internal/provider/source_appfollow_resource_sdk.go
+++ b/internal/provider/source_appfollow_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -14,10 +14,14 @@ func (r *SourceAppfollowResourceModel) ToCreateSDKType() *shared.SourceAppfollow
} else {
apiSecret = nil
}
- sourceType := shared.SourceAppfollowAppfollow(r.Configuration.SourceType.ValueString())
configuration := shared.SourceAppfollow{
- APISecret: apiSecret,
- SourceType: sourceType,
+ APISecret: apiSecret,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -29,6 +33,7 @@ func (r *SourceAppfollowResourceModel) ToCreateSDKType() *shared.SourceAppfollow
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAppfollowCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_asana_data_source.go b/internal/provider/source_asana_data_source.go
old mode 100755
new mode 100644
index eb8f6efbc..0e50e7045
--- a/internal/provider/source_asana_data_source.go
+++ b/internal/provider/source_asana_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceAsanaDataSource struct {
// SourceAsanaDataSourceModel describes the data model.
type SourceAsanaDataSourceModel struct {
- Configuration SourceAsana `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,129 +47,20 @@ func (r *SourceAsanaDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceAsana DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_asana_authentication_mechanism_authenticate_via_asana_oauth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["OAuth Credentials"]` + "\n" +
- `OAuth Credentials`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Choose how to authenticate to Github`,
- },
- "source_asana_authentication_mechanism_authenticate_with_personal_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PAT Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["PAT Credentials"]` + "\n" +
- `PAT Credentials`,
- },
- "personal_access_token": schema.StringAttribute{
- Computed: true,
- Description: `Asana Personal Access Token (generate yours here).`,
- },
- },
- Description: `Choose how to authenticate to Github`,
- },
- "source_asana_update_authentication_mechanism_authenticate_via_asana_oauth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["OAuth Credentials"]` + "\n" +
- `OAuth Credentials`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Choose how to authenticate to Github`,
- },
- "source_asana_update_authentication_mechanism_authenticate_with_personal_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PAT Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["PAT Credentials"]` + "\n" +
- `PAT Credentials`,
- },
- "personal_access_token": schema.StringAttribute{
- Computed: true,
- Description: `Asana Personal Access Token (generate yours here).`,
- },
- },
- Description: `Choose how to authenticate to Github`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate to Github`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "asana",
- ),
- },
- Description: `must be one of ["asana"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_asana_data_source_sdk.go b/internal/provider/source_asana_data_source_sdk.go
old mode 100755
new mode 100644
index 7303f5c20..737e43b22
--- a/internal/provider/source_asana_data_source_sdk.go
+++ b/internal/provider/source_asana_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAsanaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_asana_resource.go b/internal/provider/source_asana_resource.go
old mode 100755
new mode 100644
index b79c929dd..e83cbb843
--- a/internal/provider/source_asana_resource.go
+++ b/internal/provider/source_asana_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
+ "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceAsanaResource struct {
// SourceAsanaResourceModel describes the resource data model.
type SourceAsanaResourceModel struct {
Configuration SourceAsana `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,7 +60,7 @@ func (r *SourceAsanaResource) Schema(ctx context.Context, req resource.SchemaReq
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_asana_authentication_mechanism_authenticate_via_asana_oauth": schema.SingleNestedAttribute{
+ "authenticate_via_asana_oauth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"client_id": schema.StringAttribute{
@@ -67,111 +69,62 @@ func (r *SourceAsanaResource) Schema(ctx context.Context, req resource.SchemaReq
"client_secret": schema.StringAttribute{
Required: true,
},
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["OAuth Credentials"]` + "\n" +
- `OAuth Credentials`,
- },
"refresh_token": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Sensitive: true,
},
},
Description: `Choose how to authenticate to Github`,
},
- "source_asana_authentication_mechanism_authenticate_with_personal_access_token": schema.SingleNestedAttribute{
+ "authenticate_with_personal_access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PAT Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["PAT Credentials"]` + "\n" +
- `PAT Credentials`,
- },
- "personal_access_token": schema.StringAttribute{
- Required: true,
- Description: `Asana Personal Access Token (generate yours here).`,
- },
- },
- Description: `Choose how to authenticate to Github`,
- },
- "source_asana_update_authentication_mechanism_authenticate_via_asana_oauth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Required: true,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- },
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["OAuth Credentials"]` + "\n" +
- `OAuth Credentials`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `Choose how to authenticate to Github`,
- },
- "source_asana_update_authentication_mechanism_authenticate_with_personal_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PAT Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["PAT Credentials"]` + "\n" +
- `PAT Credentials`,
- },
"personal_access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Asana Personal Access Token (generate yours here).`,
},
},
Description: `Choose how to authenticate to Github`,
},
},
+ Description: `Choose how to authenticate to Github`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate to Github`,
},
- "source_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "asana",
- ),
+ "organization_export_ids": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `Globally unique identifiers for the organization exports`,
+ Validators: []validator.List{
+ listvalidator.ValueStringsAre(validators.IsValidJSON()),
},
- Description: `must be one of ["asana"]`,
+ },
+ "test_mode": schema.BoolAttribute{
+ Optional: true,
+ Description: `This flag is used for testing purposes for certain streams that return a lot of data. This flag is not meant to be enabled for prod.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -235,7 +188,7 @@ func (r *SourceAsanaResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAsana(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -411,5 +364,5 @@ func (r *SourceAsanaResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceAsanaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_asana_resource_sdk.go b/internal/provider/source_asana_resource_sdk.go
old mode 100755
new mode 100644
index 95160e00a..a37d065c5
--- a/internal/provider/source_asana_resource_sdk.go
+++ b/internal/provider/source_asana_resource_sdk.go
@@ -3,65 +3,65 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAsanaResourceModel) ToCreateSDKType() *shared.SourceAsanaCreateRequest {
var credentials *shared.SourceAsanaAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth *shared.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth
- if r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth != nil {
- clientID := r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth.ClientSecret.ValueString()
- optionTitle := new(shared.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle)
- if !r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth.OptionTitle.IsNull() {
- *optionTitle = shared.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle(r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth.OptionTitle.ValueString())
- } else {
- optionTitle = nil
- }
- refreshToken := r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth.RefreshToken.ValueString()
- sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth = &shared.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth{
+ var sourceAsanaAuthenticateViaAsanaOauth *shared.SourceAsanaAuthenticateViaAsanaOauth
+ if r.Configuration.Credentials.AuthenticateViaAsanaOauth != nil {
+ clientID := r.Configuration.Credentials.AuthenticateViaAsanaOauth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaAsanaOauth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaAsanaOauth.RefreshToken.ValueString()
+ sourceAsanaAuthenticateViaAsanaOauth = &shared.SourceAsanaAuthenticateViaAsanaOauth{
ClientID: clientID,
ClientSecret: clientSecret,
- OptionTitle: optionTitle,
RefreshToken: refreshToken,
}
}
- if sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth != nil {
+ if sourceAsanaAuthenticateViaAsanaOauth != nil {
credentials = &shared.SourceAsanaAuthenticationMechanism{
- SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth: sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth,
+ SourceAsanaAuthenticateViaAsanaOauth: sourceAsanaAuthenticateViaAsanaOauth,
}
}
- var sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken *shared.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken
- if r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
- optionTitle1 := new(shared.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle)
- if !r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken.OptionTitle.IsNull() {
- *optionTitle1 = shared.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle(r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken.OptionTitle.ValueString())
- } else {
- optionTitle1 = nil
- }
- personalAccessToken := r.Configuration.Credentials.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken.PersonalAccessToken.ValueString()
- sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken = &shared.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken{
- OptionTitle: optionTitle1,
+ var sourceAsanaAuthenticateWithPersonalAccessToken *shared.SourceAsanaAuthenticateWithPersonalAccessToken
+ if r.Configuration.Credentials.AuthenticateWithPersonalAccessToken != nil {
+ personalAccessToken := r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.PersonalAccessToken.ValueString()
+ sourceAsanaAuthenticateWithPersonalAccessToken = &shared.SourceAsanaAuthenticateWithPersonalAccessToken{
PersonalAccessToken: personalAccessToken,
}
}
- if sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
+ if sourceAsanaAuthenticateWithPersonalAccessToken != nil {
credentials = &shared.SourceAsanaAuthenticationMechanism{
- SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken: sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken,
+ SourceAsanaAuthenticateWithPersonalAccessToken: sourceAsanaAuthenticateWithPersonalAccessToken,
}
}
}
- sourceType := new(shared.SourceAsanaAsana)
- if !r.Configuration.SourceType.IsUnknown() && !r.Configuration.SourceType.IsNull() {
- *sourceType = shared.SourceAsanaAsana(r.Configuration.SourceType.ValueString())
+ var organizationExportIds []interface{} = nil
+ for _, organizationExportIdsItem := range r.Configuration.OrganizationExportIds {
+ var organizationExportIdsTmp interface{}
+ _ = json.Unmarshal([]byte(organizationExportIdsItem.ValueString()), &organizationExportIdsTmp)
+ organizationExportIds = append(organizationExportIds, organizationExportIdsTmp)
+ }
+ testMode := new(bool)
+ if !r.Configuration.TestMode.IsUnknown() && !r.Configuration.TestMode.IsNull() {
+ *testMode = r.Configuration.TestMode.ValueBool()
} else {
- sourceType = nil
+ testMode = nil
}
configuration := shared.SourceAsana{
- Credentials: credentials,
- SourceType: sourceType,
+ Credentials: credentials,
+ OrganizationExportIds: organizationExportIds,
+ TestMode: testMode,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -73,6 +73,7 @@ func (r *SourceAsanaResourceModel) ToCreateSDKType() *shared.SourceAsanaCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAsanaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -86,53 +87,53 @@ func (r *SourceAsanaResourceModel) ToGetSDKType() *shared.SourceAsanaCreateReque
}
func (r *SourceAsanaResourceModel) ToUpdateSDKType() *shared.SourceAsanaPutRequest {
- var credentials *shared.SourceAsanaUpdateAuthenticationMechanism
+ var credentials *shared.AuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth *shared.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth
- if r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth != nil {
- clientID := r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth.ClientSecret.ValueString()
- optionTitle := new(shared.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle)
- if !r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth.OptionTitle.IsNull() {
- *optionTitle = shared.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle(r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth.OptionTitle.ValueString())
- } else {
- optionTitle = nil
- }
- refreshToken := r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth.RefreshToken.ValueString()
- sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth = &shared.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth{
+ var authenticateViaAsanaOauth *shared.AuthenticateViaAsanaOauth
+ if r.Configuration.Credentials.AuthenticateViaAsanaOauth != nil {
+ clientID := r.Configuration.Credentials.AuthenticateViaAsanaOauth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaAsanaOauth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaAsanaOauth.RefreshToken.ValueString()
+ authenticateViaAsanaOauth = &shared.AuthenticateViaAsanaOauth{
ClientID: clientID,
ClientSecret: clientSecret,
- OptionTitle: optionTitle,
RefreshToken: refreshToken,
}
}
- if sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth != nil {
- credentials = &shared.SourceAsanaUpdateAuthenticationMechanism{
- SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth: sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth,
+ if authenticateViaAsanaOauth != nil {
+ credentials = &shared.AuthenticationMechanism{
+ AuthenticateViaAsanaOauth: authenticateViaAsanaOauth,
}
}
- var sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken *shared.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
- if r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
- optionTitle1 := new(shared.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle)
- if !r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.OptionTitle.IsNull() {
- *optionTitle1 = shared.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle(r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.OptionTitle.ValueString())
- } else {
- optionTitle1 = nil
- }
- personalAccessToken := r.Configuration.Credentials.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.PersonalAccessToken.ValueString()
- sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken = &shared.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken{
- OptionTitle: optionTitle1,
+ var authenticateWithPersonalAccessToken *shared.AuthenticateWithPersonalAccessToken
+ if r.Configuration.Credentials.AuthenticateWithPersonalAccessToken != nil {
+ personalAccessToken := r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.PersonalAccessToken.ValueString()
+ authenticateWithPersonalAccessToken = &shared.AuthenticateWithPersonalAccessToken{
PersonalAccessToken: personalAccessToken,
}
}
- if sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
- credentials = &shared.SourceAsanaUpdateAuthenticationMechanism{
- SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken: sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken,
+ if authenticateWithPersonalAccessToken != nil {
+ credentials = &shared.AuthenticationMechanism{
+ AuthenticateWithPersonalAccessToken: authenticateWithPersonalAccessToken,
}
}
}
+ var organizationExportIds []interface{} = nil
+ for _, organizationExportIdsItem := range r.Configuration.OrganizationExportIds {
+ var organizationExportIdsTmp interface{}
+ _ = json.Unmarshal([]byte(organizationExportIdsItem.ValueString()), &organizationExportIdsTmp)
+ organizationExportIds = append(organizationExportIds, organizationExportIdsTmp)
+ }
+ testMode := new(bool)
+ if !r.Configuration.TestMode.IsUnknown() && !r.Configuration.TestMode.IsNull() {
+ *testMode = r.Configuration.TestMode.ValueBool()
+ } else {
+ testMode = nil
+ }
configuration := shared.SourceAsanaUpdate{
- Credentials: credentials,
+ Credentials: credentials,
+ OrganizationExportIds: organizationExportIds,
+ TestMode: testMode,
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
diff --git a/internal/provider/source_auth0_data_source.go b/internal/provider/source_auth0_data_source.go
old mode 100755
new mode 100644
index 5d2897b1b..f8d647fae
--- a/internal/provider/source_auth0_data_source.go
+++ b/internal/provider/source_auth0_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceAuth0DataSource struct {
// SourceAuth0DataSourceModel describes the data model.
type SourceAuth0DataSourceModel struct {
- Configuration SourceAuth0 `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,134 +47,20 @@ func (r *SourceAuth0DataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceAuth0 DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "base_url": schema.StringAttribute{
- Computed: true,
- Description: `The Authentication API is served over HTTPS. All URLs referenced in the documentation have the following base ` + "`" + `https://YOUR_DOMAIN` + "`" + ``,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_auth0_authentication_method_o_auth2_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2_access_token",
- ),
- },
- Description: `must be one of ["oauth2_access_token"]`,
- },
- },
- },
- "source_auth0_authentication_method_o_auth2_confidential_application": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "audience": schema.StringAttribute{
- Computed: true,
- Description: `The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2_confidential_application",
- ),
- },
- Description: `must be one of ["oauth2_confidential_application"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.`,
- },
- },
- },
- "source_auth0_update_authentication_method_o_auth2_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2_access_token",
- ),
- },
- Description: `must be one of ["oauth2_access_token"]`,
- },
- },
- },
- "source_auth0_update_authentication_method_o_auth2_confidential_application": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "audience": schema.StringAttribute{
- Computed: true,
- Description: `The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2_confidential_application",
- ),
- },
- Description: `must be one of ["oauth2_confidential_application"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "auth0",
- ),
- },
- Description: `must be one of ["auth0"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_auth0_data_source_sdk.go b/internal/provider/source_auth0_data_source_sdk.go
old mode 100755
new mode 100644
index 92ed4c416..605fded91
--- a/internal/provider/source_auth0_data_source_sdk.go
+++ b/internal/provider/source_auth0_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAuth0DataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_auth0_resource.go b/internal/provider/source_auth0_resource.go
old mode 100755
new mode 100644
index 257fe4860..bb3217c35
--- a/internal/provider/source_auth0_resource.go
+++ b/internal/provider/source_auth0_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceAuth0Resource struct {
// SourceAuth0ResourceModel describes the resource data model.
type SourceAuth0ResourceModel struct {
Configuration SourceAuth0 `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -62,84 +63,23 @@ func (r *SourceAuth0Resource) Schema(ctx context.Context, req resource.SchemaReq
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_auth0_authentication_method_o_auth2_access_token": schema.SingleNestedAttribute{
+ "o_auth2_access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2_access_token",
- ),
- },
- Description: `must be one of ["oauth2_access_token"]`,
- },
- },
- },
- "source_auth0_authentication_method_o_auth2_confidential_application": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "audience": schema.StringAttribute{
- Required: true,
- Description: `The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2_confidential_application",
- ),
- },
- Description: `must be one of ["oauth2_confidential_application"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.`,
- },
- },
- },
- "source_auth0_update_authentication_method_o_auth2_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2_access_token",
- ),
- },
- Description: `must be one of ["oauth2_access_token"]`,
- },
},
},
- "source_auth0_update_authentication_method_o_auth2_confidential_application": schema.SingleNestedAttribute{
+ "o_auth2_confidential_application": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"audience": schema.StringAttribute{
Required: true,
Description: `The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2_confidential_application",
- ),
- },
- Description: `must be one of ["oauth2_confidential_application"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.`,
@@ -155,28 +95,31 @@ func (r *SourceAuth0Resource) Schema(ctx context.Context, req resource.SchemaReq
validators.ExactlyOneChild(),
},
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "auth0",
- ),
- },
- Description: `must be one of ["auth0"]`,
- },
"start_date": schema.StringAttribute{
- Optional: true,
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
+ Optional: true,
+ MarkdownDescription: `Default: "2023-08-05T00:43:59.244Z"` + "\n" +
+ `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -240,7 +183,7 @@ func (r *SourceAuth0Resource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAuth0(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -416,5 +359,5 @@ func (r *SourceAuth0Resource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceAuth0Resource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_auth0_resource_sdk.go b/internal/provider/source_auth0_resource_sdk.go
old mode 100755
new mode 100644
index c898820df..bf9578bc9
--- a/internal/provider/source_auth0_resource_sdk.go
+++ b/internal/provider/source_auth0_resource_sdk.go
@@ -3,46 +3,41 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAuth0ResourceModel) ToCreateSDKType() *shared.SourceAuth0CreateRequest {
baseURL := r.Configuration.BaseURL.ValueString()
var credentials shared.SourceAuth0AuthenticationMethod
- var sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication *shared.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication
- if r.Configuration.Credentials.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication != nil {
- audience := r.Configuration.Credentials.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication.Audience.ValueString()
- authType := shared.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod(r.Configuration.Credentials.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication.ClientSecret.ValueString()
- sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication = &shared.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication{
+ var sourceAuth0OAuth2ConfidentialApplication *shared.SourceAuth0OAuth2ConfidentialApplication
+ if r.Configuration.Credentials.OAuth2ConfidentialApplication != nil {
+ audience := r.Configuration.Credentials.OAuth2ConfidentialApplication.Audience.ValueString()
+ clientID := r.Configuration.Credentials.OAuth2ConfidentialApplication.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth2ConfidentialApplication.ClientSecret.ValueString()
+ sourceAuth0OAuth2ConfidentialApplication = &shared.SourceAuth0OAuth2ConfidentialApplication{
Audience: audience,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
}
}
- if sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication != nil {
+ if sourceAuth0OAuth2ConfidentialApplication != nil {
credentials = shared.SourceAuth0AuthenticationMethod{
- SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication: sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication,
+ SourceAuth0OAuth2ConfidentialApplication: sourceAuth0OAuth2ConfidentialApplication,
}
}
- var sourceAuth0AuthenticationMethodOAuth2AccessToken *shared.SourceAuth0AuthenticationMethodOAuth2AccessToken
- if r.Configuration.Credentials.SourceAuth0AuthenticationMethodOAuth2AccessToken != nil {
- accessToken := r.Configuration.Credentials.SourceAuth0AuthenticationMethodOAuth2AccessToken.AccessToken.ValueString()
- authType1 := shared.SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethod(r.Configuration.Credentials.SourceAuth0AuthenticationMethodOAuth2AccessToken.AuthType.ValueString())
- sourceAuth0AuthenticationMethodOAuth2AccessToken = &shared.SourceAuth0AuthenticationMethodOAuth2AccessToken{
+ var sourceAuth0OAuth2AccessToken *shared.SourceAuth0OAuth2AccessToken
+ if r.Configuration.Credentials.OAuth2AccessToken != nil {
+ accessToken := r.Configuration.Credentials.OAuth2AccessToken.AccessToken.ValueString()
+ sourceAuth0OAuth2AccessToken = &shared.SourceAuth0OAuth2AccessToken{
AccessToken: accessToken,
- AuthType: authType1,
}
}
- if sourceAuth0AuthenticationMethodOAuth2AccessToken != nil {
+ if sourceAuth0OAuth2AccessToken != nil {
credentials = shared.SourceAuth0AuthenticationMethod{
- SourceAuth0AuthenticationMethodOAuth2AccessToken: sourceAuth0AuthenticationMethodOAuth2AccessToken,
+ SourceAuth0OAuth2AccessToken: sourceAuth0OAuth2AccessToken,
}
}
- sourceType := shared.SourceAuth0Auth0(r.Configuration.SourceType.ValueString())
startDate := new(string)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate = r.Configuration.StartDate.ValueString()
@@ -52,9 +47,14 @@ func (r *SourceAuth0ResourceModel) ToCreateSDKType() *shared.SourceAuth0CreateRe
configuration := shared.SourceAuth0{
BaseURL: baseURL,
Credentials: credentials,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -65,6 +65,7 @@ func (r *SourceAuth0ResourceModel) ToCreateSDKType() *shared.SourceAuth0CreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAuth0CreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -80,36 +81,32 @@ func (r *SourceAuth0ResourceModel) ToGetSDKType() *shared.SourceAuth0CreateReque
func (r *SourceAuth0ResourceModel) ToUpdateSDKType() *shared.SourceAuth0PutRequest {
baseURL := r.Configuration.BaseURL.ValueString()
var credentials shared.SourceAuth0UpdateAuthenticationMethod
- var sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication *shared.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication
- if r.Configuration.Credentials.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication != nil {
- audience := r.Configuration.Credentials.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication.Audience.ValueString()
- authType := shared.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod(r.Configuration.Credentials.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication.ClientSecret.ValueString()
- sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication = &shared.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication{
+ var oAuth2ConfidentialApplication *shared.OAuth2ConfidentialApplication
+ if r.Configuration.Credentials.OAuth2ConfidentialApplication != nil {
+ audience := r.Configuration.Credentials.OAuth2ConfidentialApplication.Audience.ValueString()
+ clientID := r.Configuration.Credentials.OAuth2ConfidentialApplication.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth2ConfidentialApplication.ClientSecret.ValueString()
+ oAuth2ConfidentialApplication = &shared.OAuth2ConfidentialApplication{
Audience: audience,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
}
}
- if sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication != nil {
+ if oAuth2ConfidentialApplication != nil {
credentials = shared.SourceAuth0UpdateAuthenticationMethod{
- SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication: sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication,
+ OAuth2ConfidentialApplication: oAuth2ConfidentialApplication,
}
}
- var sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken *shared.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken
- if r.Configuration.Credentials.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken != nil {
- accessToken := r.Configuration.Credentials.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken.AccessToken.ValueString()
- authType1 := shared.SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethod(r.Configuration.Credentials.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken.AuthType.ValueString())
- sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken = &shared.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken{
+ var oAuth2AccessToken *shared.OAuth2AccessToken
+ if r.Configuration.Credentials.OAuth2AccessToken != nil {
+ accessToken := r.Configuration.Credentials.OAuth2AccessToken.AccessToken.ValueString()
+ oAuth2AccessToken = &shared.OAuth2AccessToken{
AccessToken: accessToken,
- AuthType: authType1,
}
}
- if sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken != nil {
+ if oAuth2AccessToken != nil {
credentials = shared.SourceAuth0UpdateAuthenticationMethod{
- SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken: sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken,
+ OAuth2AccessToken: oAuth2AccessToken,
}
}
startDate := new(string)
diff --git a/internal/provider/source_awscloudtrail_data_source.go b/internal/provider/source_awscloudtrail_data_source.go
old mode 100755
new mode 100644
index bc9540e81..ac0a1783c
--- a/internal/provider/source_awscloudtrail_data_source.go
+++ b/internal/provider/source_awscloudtrail_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceAwsCloudtrailDataSource struct {
// SourceAwsCloudtrailDataSourceModel describes the data model.
type SourceAwsCloudtrailDataSourceModel struct {
- Configuration SourceAwsCloudtrail `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,49 +47,20 @@ func (r *SourceAwsCloudtrailDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "SourceAwsCloudtrail DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_key_id": schema.StringAttribute{
- Computed: true,
- Description: `AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.`,
- },
- "aws_region_name": schema.StringAttribute{
- Computed: true,
- Description: `The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name.`,
- },
- "aws_secret_key": schema.StringAttribute{
- Computed: true,
- Description: `AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aws-cloudtrail",
- ),
- },
- Description: `must be one of ["aws-cloudtrail"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_awscloudtrail_data_source_sdk.go b/internal/provider/source_awscloudtrail_data_source_sdk.go
old mode 100755
new mode 100644
index 02663ea37..ffc30b497
--- a/internal/provider/source_awscloudtrail_data_source_sdk.go
+++ b/internal/provider/source_awscloudtrail_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAwsCloudtrailDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_awscloudtrail_resource.go b/internal/provider/source_awscloudtrail_resource.go
old mode 100755
new mode 100644
index 4cfa35742..629a675c3
--- a/internal/provider/source_awscloudtrail_resource.go
+++ b/internal/provider/source_awscloudtrail_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceAwsCloudtrailResource struct {
// SourceAwsCloudtrailResourceModel describes the resource data model.
type SourceAwsCloudtrailResourceModel struct {
Configuration SourceAwsCloudtrail `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceAwsCloudtrailResource) Schema(ctx context.Context, req resource.S
Attributes: map[string]schema.Attribute{
"aws_key_id": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.`,
},
"aws_region_name": schema.StringAttribute{
@@ -65,33 +67,37 @@ func (r *SourceAwsCloudtrailResource) Schema(ctx context.Context, req resource.S
},
"aws_secret_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "aws-cloudtrail",
- ),
- },
- Description: `must be one of ["aws-cloudtrail"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `Default: "1970-01-01"` + "\n" +
+ `The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +161,7 @@ func (r *SourceAwsCloudtrailResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAwsCloudtrail(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +337,5 @@ func (r *SourceAwsCloudtrailResource) Delete(ctx context.Context, req resource.D
}
func (r *SourceAwsCloudtrailResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_awscloudtrail_resource_sdk.go b/internal/provider/source_awscloudtrail_resource_sdk.go
old mode 100755
new mode 100644
index 49105b7a3..d38d6deb0
--- a/internal/provider/source_awscloudtrail_resource_sdk.go
+++ b/internal/provider/source_awscloudtrail_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -12,15 +12,24 @@ func (r *SourceAwsCloudtrailResourceModel) ToCreateSDKType() *shared.SourceAwsCl
awsKeyID := r.Configuration.AwsKeyID.ValueString()
awsRegionName := r.Configuration.AwsRegionName.ValueString()
awsSecretKey := r.Configuration.AwsSecretKey.ValueString()
- sourceType := shared.SourceAwsCloudtrailAwsCloudtrail(r.Configuration.SourceType.ValueString())
- startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
+ startDate := new(customTypes.Date)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceAwsCloudtrail{
AwsKeyID: awsKeyID,
AwsRegionName: awsRegionName,
AwsSecretKey: awsSecretKey,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -31,6 +40,7 @@ func (r *SourceAwsCloudtrailResourceModel) ToCreateSDKType() *shared.SourceAwsCl
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAwsCloudtrailCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -47,7 +57,12 @@ func (r *SourceAwsCloudtrailResourceModel) ToUpdateSDKType() *shared.SourceAwsCl
awsKeyID := r.Configuration.AwsKeyID.ValueString()
awsRegionName := r.Configuration.AwsRegionName.ValueString()
awsSecretKey := r.Configuration.AwsSecretKey.ValueString()
- startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
+ startDate := new(customTypes.Date)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceAwsCloudtrailUpdate{
AwsKeyID: awsKeyID,
AwsRegionName: awsRegionName,
diff --git a/internal/provider/source_azureblobstorage_data_source.go b/internal/provider/source_azureblobstorage_data_source.go
old mode 100755
new mode 100644
index fae57860f..35a0bfa1b
--- a/internal/provider/source_azureblobstorage_data_source.go
+++ b/internal/provider/source_azureblobstorage_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceAzureBlobStorageDataSource struct {
// SourceAzureBlobStorageDataSourceModel describes the data model.
type SourceAzureBlobStorageDataSourceModel struct {
- Configuration SourceAzureBlobStorage `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,93 +47,20 @@ func (r *SourceAzureBlobStorageDataSource) Schema(ctx context.Context, req datas
MarkdownDescription: "SourceAzureBlobStorage DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "azure_blob_storage_account_key": schema.StringAttribute{
- Computed: true,
- Description: `The Azure blob storage account key.`,
- },
- "azure_blob_storage_account_name": schema.StringAttribute{
- Computed: true,
- Description: `The account's name of the Azure Blob Storage.`,
- },
- "azure_blob_storage_blobs_prefix": schema.StringAttribute{
- Computed: true,
- Description: `The Azure blob storage prefix to be applied`,
- },
- "azure_blob_storage_container_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the Azure blob storage container.`,
- },
- "azure_blob_storage_endpoint": schema.StringAttribute{
- Computed: true,
- Description: `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
- },
- "azure_blob_storage_schema_inference_limit": schema.Int64Attribute{
- Computed: true,
- Description: `The Azure blob storage blobs to scan for inferring the schema, useful on large amounts of data with consistent structure`,
- },
- "format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_azure_blob_storage_input_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Input data format`,
- },
- "source_azure_blob_storage_update_input_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "format_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
- },
- },
- Description: `Input data format`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Input data format`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "azure-blob-storage",
- ),
- },
- Description: `must be one of ["azure-blob-storage"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_azureblobstorage_data_source_sdk.go b/internal/provider/source_azureblobstorage_data_source_sdk.go
old mode 100755
new mode 100644
index 19bef01d8..13ad6f1c1
--- a/internal/provider/source_azureblobstorage_data_source_sdk.go
+++ b/internal/provider/source_azureblobstorage_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAzureBlobStorageDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_azureblobstorage_resource.go b/internal/provider/source_azureblobstorage_resource.go
old mode 100755
new mode 100644
index 9d81470a3..97bea223d
--- a/internal/provider/source_azureblobstorage_resource.go
+++ b/internal/provider/source_azureblobstorage_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceAzureBlobStorageResource struct {
// SourceAzureBlobStorageResourceModel describes the resource data model.
type SourceAzureBlobStorageResourceModel struct {
Configuration SourceAzureBlobStorage `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,16 +59,13 @@ func (r *SourceAzureBlobStorageResource) Schema(ctx context.Context, req resourc
Attributes: map[string]schema.Attribute{
"azure_blob_storage_account_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Azure blob storage account key.`,
},
"azure_blob_storage_account_name": schema.StringAttribute{
Required: true,
Description: `The account's name of the Azure Blob Storage.`,
},
- "azure_blob_storage_blobs_prefix": schema.StringAttribute{
- Optional: true,
- Description: `The Azure blob storage prefix to be applied`,
- },
"azure_blob_storage_container_name": schema.StringAttribute{
Required: true,
Description: `The name of the Azure blob storage container.`,
@@ -75,67 +74,235 @@ func (r *SourceAzureBlobStorageResource) Schema(ctx context.Context, req resourc
Optional: true,
Description: `This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.`,
},
- "azure_blob_storage_schema_inference_limit": schema.Int64Attribute{
+ "start_date": schema.StringAttribute{
Optional: true,
- Description: `The Azure blob storage blobs to scan for inferring the schema, useful on large amounts of data with consistent structure`,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.`,
+ Validators: []validator.String{
+ validators.IsRFC3339(),
+ },
},
- "format": schema.SingleNestedAttribute{
+ "streams": schema.ListNestedAttribute{
Required: true,
- Attributes: map[string]schema.Attribute{
- "source_azure_blob_storage_input_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "days_to_sync_if_history_is_full": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 3` + "\n" +
+ `When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.`,
+ },
+ "format": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "avro_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "double_as_string": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ },
+ "csv_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "delimiter": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: ","` + "\n" +
+ `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
+ },
+ "double_quote": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
+ },
+ "encoding": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "utf8"` + "\n" +
+ `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
+ },
+ "escape_char": schema.StringAttribute{
+ Optional: true,
+ Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
+ },
+ "false_values": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `A set of case-sensitive strings that should be interpreted as false values.`,
+ },
+ "header_definition": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "autogenerated": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ },
+ "from_csv": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ },
+ "user_provided": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "column_names": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ Description: `The column names that will be used while emitting the CSV records`,
+ },
+ },
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ },
+ },
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "inference_type": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["None", "Primitive Types Only"]; Default: "None"` + "\n" +
+ `How to infer the types of the columns. If none, inference default to strings.`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "None",
+ "Primitive Types Only",
+ ),
+ },
+ },
+ "null_values": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`,
+ },
+ "quote_char": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "\""` + "\n" +
+ `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
+ },
+ "skip_rows_after_header": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `The number of rows to skip after the header row.`,
+ },
+ "skip_rows_before_header": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`,
+ },
+ "strings_can_be_null": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`,
+ },
+ "true_values": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `A set of case-sensitive strings that should be interpreted as true values.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ },
+ "document_file_type_format_experimental": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "skip_unprocessable_file_types": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.`,
+ },
+ },
+ Description: `Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.`,
+ },
+ "jsonl_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
},
- Description: `must be one of ["JSONL"]`,
+ "parquet_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "decimal_as_float": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
},
},
- Description: `Input data format`,
- },
- "source_azure_blob_storage_update_input_format_json_lines_newline_delimited_json": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "format_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "JSONL",
- ),
- },
- Description: `must be one of ["JSONL"]`,
+ "globs": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.`,
+ },
+ "input_schema": schema.StringAttribute{
+ Optional: true,
+ Description: `The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.`,
+ },
+ "legacy_prefix": schema.StringAttribute{
+ Optional: true,
+ Description: `The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.`,
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: `The name of the stream.`,
+ },
+ "primary_key": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `The column or columns (for a composite key) that serves as the unique identifier of a record.`,
+ },
+ "schemaless": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `When enabled, syncs will not validate or structure records against the stream's schema.`,
+ },
+ "validation_policy": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["Emit Record", "Skip Record", "Wait for Discover"]; Default: "Emit Record"` + "\n" +
+ `The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "Emit Record",
+ "Skip Record",
+ "Wait for Discover",
+ ),
},
},
- Description: `Input data format`,
},
},
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Input data format`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "azure-blob-storage",
- ),
- },
- Description: `must be one of ["azure-blob-storage"]`,
+ Description: `Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.`,
},
},
+ MarkdownDescription: `NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes` + "\n" +
+ `because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.`,
+ },
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
},
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -199,7 +366,7 @@ func (r *SourceAzureBlobStorageResource) Create(ctx context.Context, req resourc
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAzureBlobStorage(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -375,5 +542,5 @@ func (r *SourceAzureBlobStorageResource) Delete(ctx context.Context, req resourc
}
func (r *SourceAzureBlobStorageResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_azureblobstorage_resource_sdk.go b/internal/provider/source_azureblobstorage_resource_sdk.go
old mode 100755
new mode 100644
index f27f9d7d6..18382e6de
--- a/internal/provider/source_azureblobstorage_resource_sdk.go
+++ b/internal/provider/source_azureblobstorage_resource_sdk.go
@@ -3,19 +3,14 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "time"
)
func (r *SourceAzureBlobStorageResourceModel) ToCreateSDKType() *shared.SourceAzureBlobStorageCreateRequest {
azureBlobStorageAccountKey := r.Configuration.AzureBlobStorageAccountKey.ValueString()
azureBlobStorageAccountName := r.Configuration.AzureBlobStorageAccountName.ValueString()
- azureBlobStorageBlobsPrefix := new(string)
- if !r.Configuration.AzureBlobStorageBlobsPrefix.IsUnknown() && !r.Configuration.AzureBlobStorageBlobsPrefix.IsNull() {
- *azureBlobStorageBlobsPrefix = r.Configuration.AzureBlobStorageBlobsPrefix.ValueString()
- } else {
- azureBlobStorageBlobsPrefix = nil
- }
azureBlobStorageContainerName := r.Configuration.AzureBlobStorageContainerName.ValueString()
azureBlobStorageEndpoint := new(string)
if !r.Configuration.AzureBlobStorageEndpoint.IsUnknown() && !r.Configuration.AzureBlobStorageEndpoint.IsNull() {
@@ -23,37 +18,268 @@ func (r *SourceAzureBlobStorageResourceModel) ToCreateSDKType() *shared.SourceAz
} else {
azureBlobStorageEndpoint = nil
}
- azureBlobStorageSchemaInferenceLimit := new(int64)
- if !r.Configuration.AzureBlobStorageSchemaInferenceLimit.IsUnknown() && !r.Configuration.AzureBlobStorageSchemaInferenceLimit.IsNull() {
- *azureBlobStorageSchemaInferenceLimit = r.Configuration.AzureBlobStorageSchemaInferenceLimit.ValueInt64()
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
} else {
- azureBlobStorageSchemaInferenceLimit = nil
+ startDate = nil
}
- var format shared.SourceAzureBlobStorageInputFormat
- var sourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON *shared.SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON != nil {
- formatType := shared.SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- sourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON = &shared.SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON{
- FormatType: formatType,
+ var streams []shared.SourceAzureBlobStorageFileBasedStreamConfig = nil
+ for _, streamsItem := range r.Configuration.Streams {
+ daysToSyncIfHistoryIsFull := new(int64)
+ if !streamsItem.DaysToSyncIfHistoryIsFull.IsUnknown() && !streamsItem.DaysToSyncIfHistoryIsFull.IsNull() {
+ *daysToSyncIfHistoryIsFull = streamsItem.DaysToSyncIfHistoryIsFull.ValueInt64()
+ } else {
+ daysToSyncIfHistoryIsFull = nil
}
- }
- if sourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON != nil {
- format = shared.SourceAzureBlobStorageInputFormat{
- SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON: sourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON,
+ var format shared.SourceAzureBlobStorageFormat
+ var sourceAzureBlobStorageAvroFormat *shared.SourceAzureBlobStorageAvroFormat
+ if streamsItem.Format.AvroFormat != nil {
+ doubleAsString := new(bool)
+ if !streamsItem.Format.AvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.AvroFormat.DoubleAsString.IsNull() {
+ *doubleAsString = streamsItem.Format.AvroFormat.DoubleAsString.ValueBool()
+ } else {
+ doubleAsString = nil
+ }
+ sourceAzureBlobStorageAvroFormat = &shared.SourceAzureBlobStorageAvroFormat{
+ DoubleAsString: doubleAsString,
+ }
+ }
+ if sourceAzureBlobStorageAvroFormat != nil {
+ format = shared.SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageAvroFormat: sourceAzureBlobStorageAvroFormat,
+ }
+ }
+ var sourceAzureBlobStorageCSVFormat *shared.SourceAzureBlobStorageCSVFormat
+ if streamsItem.Format.CSVFormat != nil {
+ delimiter := new(string)
+ if !streamsItem.Format.CSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.CSVFormat.Delimiter.IsNull() {
+ *delimiter = streamsItem.Format.CSVFormat.Delimiter.ValueString()
+ } else {
+ delimiter = nil
+ }
+ doubleQuote := new(bool)
+ if !streamsItem.Format.CSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.CSVFormat.DoubleQuote.IsNull() {
+ *doubleQuote = streamsItem.Format.CSVFormat.DoubleQuote.ValueBool()
+ } else {
+ doubleQuote = nil
+ }
+ encoding := new(string)
+ if !streamsItem.Format.CSVFormat.Encoding.IsUnknown() && !streamsItem.Format.CSVFormat.Encoding.IsNull() {
+ *encoding = streamsItem.Format.CSVFormat.Encoding.ValueString()
+ } else {
+ encoding = nil
+ }
+ escapeChar := new(string)
+ if !streamsItem.Format.CSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.CSVFormat.EscapeChar.IsNull() {
+ *escapeChar = streamsItem.Format.CSVFormat.EscapeChar.ValueString()
+ } else {
+ escapeChar = nil
+ }
+ var falseValues []string = nil
+ for _, falseValuesItem := range streamsItem.Format.CSVFormat.FalseValues {
+ falseValues = append(falseValues, falseValuesItem.ValueString())
+ }
+ var headerDefinition *shared.SourceAzureBlobStorageCSVHeaderDefinition
+ if streamsItem.Format.CSVFormat.HeaderDefinition != nil {
+ var sourceAzureBlobStorageFromCSV *shared.SourceAzureBlobStorageFromCSV
+ if streamsItem.Format.CSVFormat.HeaderDefinition.FromCSV != nil {
+ sourceAzureBlobStorageFromCSV = &shared.SourceAzureBlobStorageFromCSV{}
+ }
+ if sourceAzureBlobStorageFromCSV != nil {
+ headerDefinition = &shared.SourceAzureBlobStorageCSVHeaderDefinition{
+ SourceAzureBlobStorageFromCSV: sourceAzureBlobStorageFromCSV,
+ }
+ }
+ var sourceAzureBlobStorageAutogenerated *shared.SourceAzureBlobStorageAutogenerated
+ if streamsItem.Format.CSVFormat.HeaderDefinition.Autogenerated != nil {
+ sourceAzureBlobStorageAutogenerated = &shared.SourceAzureBlobStorageAutogenerated{}
+ }
+ if sourceAzureBlobStorageAutogenerated != nil {
+ headerDefinition = &shared.SourceAzureBlobStorageCSVHeaderDefinition{
+ SourceAzureBlobStorageAutogenerated: sourceAzureBlobStorageAutogenerated,
+ }
+ }
+ var sourceAzureBlobStorageUserProvided *shared.SourceAzureBlobStorageUserProvided
+ if streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided != nil {
+ var columnNames []string = nil
+ for _, columnNamesItem := range streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided.ColumnNames {
+ columnNames = append(columnNames, columnNamesItem.ValueString())
+ }
+ sourceAzureBlobStorageUserProvided = &shared.SourceAzureBlobStorageUserProvided{
+ ColumnNames: columnNames,
+ }
+ }
+ if sourceAzureBlobStorageUserProvided != nil {
+ headerDefinition = &shared.SourceAzureBlobStorageCSVHeaderDefinition{
+ SourceAzureBlobStorageUserProvided: sourceAzureBlobStorageUserProvided,
+ }
+ }
+ }
+ inferenceType := new(shared.SourceAzureBlobStorageInferenceType)
+ if !streamsItem.Format.CSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.CSVFormat.InferenceType.IsNull() {
+ *inferenceType = shared.SourceAzureBlobStorageInferenceType(streamsItem.Format.CSVFormat.InferenceType.ValueString())
+ } else {
+ inferenceType = nil
+ }
+ var nullValues []string = nil
+ for _, nullValuesItem := range streamsItem.Format.CSVFormat.NullValues {
+ nullValues = append(nullValues, nullValuesItem.ValueString())
+ }
+ quoteChar := new(string)
+ if !streamsItem.Format.CSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.CSVFormat.QuoteChar.IsNull() {
+ *quoteChar = streamsItem.Format.CSVFormat.QuoteChar.ValueString()
+ } else {
+ quoteChar = nil
+ }
+ skipRowsAfterHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsNull() {
+ *skipRowsAfterHeader = streamsItem.Format.CSVFormat.SkipRowsAfterHeader.ValueInt64()
+ } else {
+ skipRowsAfterHeader = nil
+ }
+ skipRowsBeforeHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsNull() {
+ *skipRowsBeforeHeader = streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.ValueInt64()
+ } else {
+ skipRowsBeforeHeader = nil
+ }
+ stringsCanBeNull := new(bool)
+ if !streamsItem.Format.CSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.CSVFormat.StringsCanBeNull.IsNull() {
+ *stringsCanBeNull = streamsItem.Format.CSVFormat.StringsCanBeNull.ValueBool()
+ } else {
+ stringsCanBeNull = nil
+ }
+ var trueValues []string = nil
+ for _, trueValuesItem := range streamsItem.Format.CSVFormat.TrueValues {
+ trueValues = append(trueValues, trueValuesItem.ValueString())
+ }
+ sourceAzureBlobStorageCSVFormat = &shared.SourceAzureBlobStorageCSVFormat{
+ Delimiter: delimiter,
+ DoubleQuote: doubleQuote,
+ Encoding: encoding,
+ EscapeChar: escapeChar,
+ FalseValues: falseValues,
+ HeaderDefinition: headerDefinition,
+ InferenceType: inferenceType,
+ NullValues: nullValues,
+ QuoteChar: quoteChar,
+ SkipRowsAfterHeader: skipRowsAfterHeader,
+ SkipRowsBeforeHeader: skipRowsBeforeHeader,
+ StringsCanBeNull: stringsCanBeNull,
+ TrueValues: trueValues,
+ }
+ }
+ if sourceAzureBlobStorageCSVFormat != nil {
+ format = shared.SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageCSVFormat: sourceAzureBlobStorageCSVFormat,
+ }
+ }
+ var sourceAzureBlobStorageJsonlFormat *shared.SourceAzureBlobStorageJsonlFormat
+ if streamsItem.Format.JsonlFormat != nil {
+ sourceAzureBlobStorageJsonlFormat = &shared.SourceAzureBlobStorageJsonlFormat{}
+ }
+ if sourceAzureBlobStorageJsonlFormat != nil {
+ format = shared.SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageJsonlFormat: sourceAzureBlobStorageJsonlFormat,
+ }
+ }
+ var sourceAzureBlobStorageParquetFormat *shared.SourceAzureBlobStorageParquetFormat
+ if streamsItem.Format.ParquetFormat != nil {
+ decimalAsFloat := new(bool)
+ if !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsNull() {
+ *decimalAsFloat = streamsItem.Format.ParquetFormat.DecimalAsFloat.ValueBool()
+ } else {
+ decimalAsFloat = nil
+ }
+ sourceAzureBlobStorageParquetFormat = &shared.SourceAzureBlobStorageParquetFormat{
+ DecimalAsFloat: decimalAsFloat,
+ }
+ }
+ if sourceAzureBlobStorageParquetFormat != nil {
+ format = shared.SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageParquetFormat: sourceAzureBlobStorageParquetFormat,
+ }
}
+ var sourceAzureBlobStorageDocumentFileTypeFormatExperimental *shared.SourceAzureBlobStorageDocumentFileTypeFormatExperimental
+ if streamsItem.Format.DocumentFileTypeFormatExperimental != nil {
+ skipUnprocessableFileTypes := new(bool)
+ if !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsUnknown() && !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsNull() {
+ *skipUnprocessableFileTypes = streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.ValueBool()
+ } else {
+ skipUnprocessableFileTypes = nil
+ }
+ sourceAzureBlobStorageDocumentFileTypeFormatExperimental = &shared.SourceAzureBlobStorageDocumentFileTypeFormatExperimental{
+ SkipUnprocessableFileTypes: skipUnprocessableFileTypes,
+ }
+ }
+ if sourceAzureBlobStorageDocumentFileTypeFormatExperimental != nil {
+ format = shared.SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageDocumentFileTypeFormatExperimental: sourceAzureBlobStorageDocumentFileTypeFormatExperimental,
+ }
+ }
+ var globs []string = nil
+ for _, globsItem := range streamsItem.Globs {
+ globs = append(globs, globsItem.ValueString())
+ }
+ inputSchema := new(string)
+ if !streamsItem.InputSchema.IsUnknown() && !streamsItem.InputSchema.IsNull() {
+ *inputSchema = streamsItem.InputSchema.ValueString()
+ } else {
+ inputSchema = nil
+ }
+ legacyPrefix := new(string)
+ if !streamsItem.LegacyPrefix.IsUnknown() && !streamsItem.LegacyPrefix.IsNull() {
+ *legacyPrefix = streamsItem.LegacyPrefix.ValueString()
+ } else {
+ legacyPrefix = nil
+ }
+ name := streamsItem.Name.ValueString()
+ primaryKey := new(string)
+ if !streamsItem.PrimaryKey.IsUnknown() && !streamsItem.PrimaryKey.IsNull() {
+ *primaryKey = streamsItem.PrimaryKey.ValueString()
+ } else {
+ primaryKey = nil
+ }
+ schemaless := new(bool)
+ if !streamsItem.Schemaless.IsUnknown() && !streamsItem.Schemaless.IsNull() {
+ *schemaless = streamsItem.Schemaless.ValueBool()
+ } else {
+ schemaless = nil
+ }
+ validationPolicy := new(shared.SourceAzureBlobStorageValidationPolicy)
+ if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() {
+ *validationPolicy = shared.SourceAzureBlobStorageValidationPolicy(streamsItem.ValidationPolicy.ValueString())
+ } else {
+ validationPolicy = nil
+ }
+ streams = append(streams, shared.SourceAzureBlobStorageFileBasedStreamConfig{
+ DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull,
+ Format: format,
+ Globs: globs,
+ InputSchema: inputSchema,
+ LegacyPrefix: legacyPrefix,
+ Name: name,
+ PrimaryKey: primaryKey,
+ Schemaless: schemaless,
+ ValidationPolicy: validationPolicy,
+ })
}
- sourceType := shared.SourceAzureBlobStorageAzureBlobStorage(r.Configuration.SourceType.ValueString())
configuration := shared.SourceAzureBlobStorage{
- AzureBlobStorageAccountKey: azureBlobStorageAccountKey,
- AzureBlobStorageAccountName: azureBlobStorageAccountName,
- AzureBlobStorageBlobsPrefix: azureBlobStorageBlobsPrefix,
- AzureBlobStorageContainerName: azureBlobStorageContainerName,
- AzureBlobStorageEndpoint: azureBlobStorageEndpoint,
- AzureBlobStorageSchemaInferenceLimit: azureBlobStorageSchemaInferenceLimit,
- Format: format,
- SourceType: sourceType,
+ AzureBlobStorageAccountKey: azureBlobStorageAccountKey,
+ AzureBlobStorageAccountName: azureBlobStorageAccountName,
+ AzureBlobStorageContainerName: azureBlobStorageContainerName,
+ AzureBlobStorageEndpoint: azureBlobStorageEndpoint,
+ StartDate: startDate,
+ Streams: streams,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
- name := r.Name.ValueString()
+ name1 := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
*secretID = r.SecretID.ValueString()
@@ -63,7 +289,8 @@ func (r *SourceAzureBlobStorageResourceModel) ToCreateSDKType() *shared.SourceAz
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAzureBlobStorageCreateRequest{
Configuration: configuration,
- Name: name,
+ DefinitionID: definitionID,
+ Name: name1,
SecretID: secretID,
WorkspaceID: workspaceID,
}
@@ -78,12 +305,6 @@ func (r *SourceAzureBlobStorageResourceModel) ToGetSDKType() *shared.SourceAzure
func (r *SourceAzureBlobStorageResourceModel) ToUpdateSDKType() *shared.SourceAzureBlobStoragePutRequest {
azureBlobStorageAccountKey := r.Configuration.AzureBlobStorageAccountKey.ValueString()
azureBlobStorageAccountName := r.Configuration.AzureBlobStorageAccountName.ValueString()
- azureBlobStorageBlobsPrefix := new(string)
- if !r.Configuration.AzureBlobStorageBlobsPrefix.IsUnknown() && !r.Configuration.AzureBlobStorageBlobsPrefix.IsNull() {
- *azureBlobStorageBlobsPrefix = r.Configuration.AzureBlobStorageBlobsPrefix.ValueString()
- } else {
- azureBlobStorageBlobsPrefix = nil
- }
azureBlobStorageContainerName := r.Configuration.AzureBlobStorageContainerName.ValueString()
azureBlobStorageEndpoint := new(string)
if !r.Configuration.AzureBlobStorageEndpoint.IsUnknown() && !r.Configuration.AzureBlobStorageEndpoint.IsNull() {
@@ -91,39 +312,266 @@ func (r *SourceAzureBlobStorageResourceModel) ToUpdateSDKType() *shared.SourceAz
} else {
azureBlobStorageEndpoint = nil
}
- azureBlobStorageSchemaInferenceLimit := new(int64)
- if !r.Configuration.AzureBlobStorageSchemaInferenceLimit.IsUnknown() && !r.Configuration.AzureBlobStorageSchemaInferenceLimit.IsNull() {
- *azureBlobStorageSchemaInferenceLimit = r.Configuration.AzureBlobStorageSchemaInferenceLimit.ValueInt64()
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
} else {
- azureBlobStorageSchemaInferenceLimit = nil
+ startDate = nil
}
- var format shared.SourceAzureBlobStorageUpdateInputFormat
- var sourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON *shared.SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON
- if r.Configuration.Format.SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON != nil {
- formatType := shared.SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatType(r.Configuration.Format.SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON.FormatType.ValueString())
- sourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON = &shared.SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON{
- FormatType: formatType,
+ var streams []shared.FileBasedStreamConfig = nil
+ for _, streamsItem := range r.Configuration.Streams {
+ daysToSyncIfHistoryIsFull := new(int64)
+ if !streamsItem.DaysToSyncIfHistoryIsFull.IsUnknown() && !streamsItem.DaysToSyncIfHistoryIsFull.IsNull() {
+ *daysToSyncIfHistoryIsFull = streamsItem.DaysToSyncIfHistoryIsFull.ValueInt64()
+ } else {
+ daysToSyncIfHistoryIsFull = nil
}
- }
- if sourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON != nil {
- format = shared.SourceAzureBlobStorageUpdateInputFormat{
- SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON: sourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON,
+ var format shared.Format
+ var avroFormat *shared.AvroFormat
+ if streamsItem.Format.AvroFormat != nil {
+ doubleAsString := new(bool)
+ if !streamsItem.Format.AvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.AvroFormat.DoubleAsString.IsNull() {
+ *doubleAsString = streamsItem.Format.AvroFormat.DoubleAsString.ValueBool()
+ } else {
+ doubleAsString = nil
+ }
+ avroFormat = &shared.AvroFormat{
+ DoubleAsString: doubleAsString,
+ }
+ }
+ if avroFormat != nil {
+ format = shared.Format{
+ AvroFormat: avroFormat,
+ }
+ }
+ var csvFormat *shared.CSVFormat
+ if streamsItem.Format.CSVFormat != nil {
+ delimiter := new(string)
+ if !streamsItem.Format.CSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.CSVFormat.Delimiter.IsNull() {
+ *delimiter = streamsItem.Format.CSVFormat.Delimiter.ValueString()
+ } else {
+ delimiter = nil
+ }
+ doubleQuote := new(bool)
+ if !streamsItem.Format.CSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.CSVFormat.DoubleQuote.IsNull() {
+ *doubleQuote = streamsItem.Format.CSVFormat.DoubleQuote.ValueBool()
+ } else {
+ doubleQuote = nil
+ }
+ encoding := new(string)
+ if !streamsItem.Format.CSVFormat.Encoding.IsUnknown() && !streamsItem.Format.CSVFormat.Encoding.IsNull() {
+ *encoding = streamsItem.Format.CSVFormat.Encoding.ValueString()
+ } else {
+ encoding = nil
+ }
+ escapeChar := new(string)
+ if !streamsItem.Format.CSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.CSVFormat.EscapeChar.IsNull() {
+ *escapeChar = streamsItem.Format.CSVFormat.EscapeChar.ValueString()
+ } else {
+ escapeChar = nil
+ }
+ var falseValues []string = nil
+ for _, falseValuesItem := range streamsItem.Format.CSVFormat.FalseValues {
+ falseValues = append(falseValues, falseValuesItem.ValueString())
+ }
+ var headerDefinition *shared.CSVHeaderDefinition
+ if streamsItem.Format.CSVFormat.HeaderDefinition != nil {
+ var fromCSV *shared.FromCSV
+ if streamsItem.Format.CSVFormat.HeaderDefinition.FromCSV != nil {
+ fromCSV = &shared.FromCSV{}
+ }
+ if fromCSV != nil {
+ headerDefinition = &shared.CSVHeaderDefinition{
+ FromCSV: fromCSV,
+ }
+ }
+ var autogenerated *shared.Autogenerated
+ if streamsItem.Format.CSVFormat.HeaderDefinition.Autogenerated != nil {
+ autogenerated = &shared.Autogenerated{}
+ }
+ if autogenerated != nil {
+ headerDefinition = &shared.CSVHeaderDefinition{
+ Autogenerated: autogenerated,
+ }
+ }
+ var userProvided *shared.UserProvided
+ if streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided != nil {
+ var columnNames []string = nil
+ for _, columnNamesItem := range streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided.ColumnNames {
+ columnNames = append(columnNames, columnNamesItem.ValueString())
+ }
+ userProvided = &shared.UserProvided{
+ ColumnNames: columnNames,
+ }
+ }
+ if userProvided != nil {
+ headerDefinition = &shared.CSVHeaderDefinition{
+ UserProvided: userProvided,
+ }
+ }
+ }
+ inferenceType := new(shared.InferenceType)
+ if !streamsItem.Format.CSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.CSVFormat.InferenceType.IsNull() {
+ *inferenceType = shared.InferenceType(streamsItem.Format.CSVFormat.InferenceType.ValueString())
+ } else {
+ inferenceType = nil
+ }
+ var nullValues []string = nil
+ for _, nullValuesItem := range streamsItem.Format.CSVFormat.NullValues {
+ nullValues = append(nullValues, nullValuesItem.ValueString())
+ }
+ quoteChar := new(string)
+ if !streamsItem.Format.CSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.CSVFormat.QuoteChar.IsNull() {
+ *quoteChar = streamsItem.Format.CSVFormat.QuoteChar.ValueString()
+ } else {
+ quoteChar = nil
+ }
+ skipRowsAfterHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsNull() {
+ *skipRowsAfterHeader = streamsItem.Format.CSVFormat.SkipRowsAfterHeader.ValueInt64()
+ } else {
+ skipRowsAfterHeader = nil
+ }
+ skipRowsBeforeHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsNull() {
+ *skipRowsBeforeHeader = streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.ValueInt64()
+ } else {
+ skipRowsBeforeHeader = nil
+ }
+ stringsCanBeNull := new(bool)
+ if !streamsItem.Format.CSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.CSVFormat.StringsCanBeNull.IsNull() {
+ *stringsCanBeNull = streamsItem.Format.CSVFormat.StringsCanBeNull.ValueBool()
+ } else {
+ stringsCanBeNull = nil
+ }
+ var trueValues []string = nil
+ for _, trueValuesItem := range streamsItem.Format.CSVFormat.TrueValues {
+ trueValues = append(trueValues, trueValuesItem.ValueString())
+ }
+ csvFormat = &shared.CSVFormat{
+ Delimiter: delimiter,
+ DoubleQuote: doubleQuote,
+ Encoding: encoding,
+ EscapeChar: escapeChar,
+ FalseValues: falseValues,
+ HeaderDefinition: headerDefinition,
+ InferenceType: inferenceType,
+ NullValues: nullValues,
+ QuoteChar: quoteChar,
+ SkipRowsAfterHeader: skipRowsAfterHeader,
+ SkipRowsBeforeHeader: skipRowsBeforeHeader,
+ StringsCanBeNull: stringsCanBeNull,
+ TrueValues: trueValues,
+ }
+ }
+ if csvFormat != nil {
+ format = shared.Format{
+ CSVFormat: csvFormat,
+ }
+ }
+ var jsonlFormat *shared.JsonlFormat
+ if streamsItem.Format.JsonlFormat != nil {
+ jsonlFormat = &shared.JsonlFormat{}
+ }
+ if jsonlFormat != nil {
+ format = shared.Format{
+ JsonlFormat: jsonlFormat,
+ }
+ }
+ var parquetFormat *shared.ParquetFormat
+ if streamsItem.Format.ParquetFormat != nil {
+ decimalAsFloat := new(bool)
+ if !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsNull() {
+ *decimalAsFloat = streamsItem.Format.ParquetFormat.DecimalAsFloat.ValueBool()
+ } else {
+ decimalAsFloat = nil
+ }
+ parquetFormat = &shared.ParquetFormat{
+ DecimalAsFloat: decimalAsFloat,
+ }
+ }
+ if parquetFormat != nil {
+ format = shared.Format{
+ ParquetFormat: parquetFormat,
+ }
+ }
+ var documentFileTypeFormatExperimental *shared.DocumentFileTypeFormatExperimental
+ if streamsItem.Format.DocumentFileTypeFormatExperimental != nil {
+ skipUnprocessableFileTypes := new(bool)
+ if !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsUnknown() && !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsNull() {
+ *skipUnprocessableFileTypes = streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.ValueBool()
+ } else {
+ skipUnprocessableFileTypes = nil
+ }
+ documentFileTypeFormatExperimental = &shared.DocumentFileTypeFormatExperimental{
+ SkipUnprocessableFileTypes: skipUnprocessableFileTypes,
+ }
+ }
+ if documentFileTypeFormatExperimental != nil {
+ format = shared.Format{
+ DocumentFileTypeFormatExperimental: documentFileTypeFormatExperimental,
+ }
+ }
+ var globs []string = nil
+ for _, globsItem := range streamsItem.Globs {
+ globs = append(globs, globsItem.ValueString())
+ }
+ inputSchema := new(string)
+ if !streamsItem.InputSchema.IsUnknown() && !streamsItem.InputSchema.IsNull() {
+ *inputSchema = streamsItem.InputSchema.ValueString()
+ } else {
+ inputSchema = nil
+ }
+ legacyPrefix := new(string)
+ if !streamsItem.LegacyPrefix.IsUnknown() && !streamsItem.LegacyPrefix.IsNull() {
+ *legacyPrefix = streamsItem.LegacyPrefix.ValueString()
+ } else {
+ legacyPrefix = nil
+ }
+ name := streamsItem.Name.ValueString()
+ primaryKey := new(string)
+ if !streamsItem.PrimaryKey.IsUnknown() && !streamsItem.PrimaryKey.IsNull() {
+ *primaryKey = streamsItem.PrimaryKey.ValueString()
+ } else {
+ primaryKey = nil
+ }
+ schemaless := new(bool)
+ if !streamsItem.Schemaless.IsUnknown() && !streamsItem.Schemaless.IsNull() {
+ *schemaless = streamsItem.Schemaless.ValueBool()
+ } else {
+ schemaless = nil
+ }
+ validationPolicy := new(shared.ValidationPolicy)
+ if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() {
+ *validationPolicy = shared.ValidationPolicy(streamsItem.ValidationPolicy.ValueString())
+ } else {
+ validationPolicy = nil
}
+ streams = append(streams, shared.FileBasedStreamConfig{
+ DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull,
+ Format: format,
+ Globs: globs,
+ InputSchema: inputSchema,
+ LegacyPrefix: legacyPrefix,
+ Name: name,
+ PrimaryKey: primaryKey,
+ Schemaless: schemaless,
+ ValidationPolicy: validationPolicy,
+ })
}
configuration := shared.SourceAzureBlobStorageUpdate{
- AzureBlobStorageAccountKey: azureBlobStorageAccountKey,
- AzureBlobStorageAccountName: azureBlobStorageAccountName,
- AzureBlobStorageBlobsPrefix: azureBlobStorageBlobsPrefix,
- AzureBlobStorageContainerName: azureBlobStorageContainerName,
- AzureBlobStorageEndpoint: azureBlobStorageEndpoint,
- AzureBlobStorageSchemaInferenceLimit: azureBlobStorageSchemaInferenceLimit,
- Format: format,
+ AzureBlobStorageAccountKey: azureBlobStorageAccountKey,
+ AzureBlobStorageAccountName: azureBlobStorageAccountName,
+ AzureBlobStorageContainerName: azureBlobStorageContainerName,
+ AzureBlobStorageEndpoint: azureBlobStorageEndpoint,
+ StartDate: startDate,
+ Streams: streams,
}
- name := r.Name.ValueString()
+ name1 := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAzureBlobStoragePutRequest{
Configuration: configuration,
- Name: name,
+ Name: name1,
WorkspaceID: workspaceID,
}
return &out
diff --git a/internal/provider/source_azuretable_data_source.go b/internal/provider/source_azuretable_data_source.go
old mode 100755
new mode 100644
index 5d5ef33a7..3aef645a6
--- a/internal/provider/source_azuretable_data_source.go
+++ b/internal/provider/source_azuretable_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceAzureTableDataSource struct {
// SourceAzureTableDataSourceModel describes the data model.
type SourceAzureTableDataSourceModel struct {
- Configuration SourceAzureTable `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourceAzureTableDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceAzureTable DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "azure-table",
- ),
- },
- Description: `must be one of ["azure-table"]`,
- },
- "storage_access_key": schema.StringAttribute{
- Computed: true,
- Description: `Azure Table Storage Access Key. See the docs for more information on how to obtain this key.`,
- },
- "storage_account_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of your storage account.`,
- },
- "storage_endpoint_suffix": schema.StringAttribute{
- Computed: true,
- Description: `Azure Table Storage service account URL suffix. See the docs for more information on how to obtain endpoint suffix`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_azuretable_data_source_sdk.go b/internal/provider/source_azuretable_data_source_sdk.go
old mode 100755
new mode 100644
index 320e3434f..17191c096
--- a/internal/provider/source_azuretable_data_source_sdk.go
+++ b/internal/provider/source_azuretable_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAzureTableDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_azuretable_resource.go b/internal/provider/source_azuretable_resource.go
old mode 100755
new mode 100644
index d295a8969..b009a88c2
--- a/internal/provider/source_azuretable_resource.go
+++ b/internal/provider/source_azuretable_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceAzureTableResource struct {
// SourceAzureTableResourceModel describes the resource data model.
type SourceAzureTableResourceModel struct {
Configuration SourceAzureTable `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -54,17 +54,9 @@ func (r *SourceAzureTableResource) Schema(ctx context.Context, req resource.Sche
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "azure-table",
- ),
- },
- Description: `must be one of ["azure-table"]`,
- },
"storage_access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Azure Table Storage Access Key. See the docs for more information on how to obtain this key.`,
},
"storage_account_name": schema.StringAttribute{
@@ -72,18 +64,30 @@ func (r *SourceAzureTableResource) Schema(ctx context.Context, req resource.Sche
Description: `The name of your storage account.`,
},
"storage_endpoint_suffix": schema.StringAttribute{
- Optional: true,
- Description: `Azure Table Storage service account URL suffix. See the docs for more information on how to obtain endpoint suffix`,
+ Optional: true,
+ MarkdownDescription: `Default: "core.windows.net"` + "\n" +
+ `Azure Table Storage service account URL suffix. See the docs for more information on how to obtain endpoint suffix`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +151,7 @@ func (r *SourceAzureTableResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceAzureTable(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +327,5 @@ func (r *SourceAzureTableResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceAzureTableResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_azuretable_resource_sdk.go b/internal/provider/source_azuretable_resource_sdk.go
old mode 100755
new mode 100644
index 3732a056b..70dc61396
--- a/internal/provider/source_azuretable_resource_sdk.go
+++ b/internal/provider/source_azuretable_resource_sdk.go
@@ -3,12 +3,11 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceAzureTableResourceModel) ToCreateSDKType() *shared.SourceAzureTableCreateRequest {
- sourceType := shared.SourceAzureTableAzureTable(r.Configuration.SourceType.ValueString())
storageAccessKey := r.Configuration.StorageAccessKey.ValueString()
storageAccountName := r.Configuration.StorageAccountName.ValueString()
storageEndpointSuffix := new(string)
@@ -18,11 +17,16 @@ func (r *SourceAzureTableResourceModel) ToCreateSDKType() *shared.SourceAzureTab
storageEndpointSuffix = nil
}
configuration := shared.SourceAzureTable{
- SourceType: sourceType,
StorageAccessKey: storageAccessKey,
StorageAccountName: storageAccountName,
StorageEndpointSuffix: storageEndpointSuffix,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -33,6 +37,7 @@ func (r *SourceAzureTableResourceModel) ToCreateSDKType() *shared.SourceAzureTab
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceAzureTableCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_bamboohr_data_source.go b/internal/provider/source_bamboohr_data_source.go
old mode 100755
new mode 100644
index eaa1b9b15..a90392100
--- a/internal/provider/source_bamboohr_data_source.go
+++ b/internal/provider/source_bamboohr_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceBambooHrDataSource struct {
// SourceBambooHrDataSourceModel describes the data model.
type SourceBambooHrDataSourceModel struct {
- Configuration SourceBambooHr `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,46 +47,20 @@ func (r *SourceBambooHrDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceBambooHr DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Api key of bamboo hr`,
- },
- "custom_reports_fields": schema.StringAttribute{
- Computed: true,
- Description: `Comma-separated list of fields to include in custom reports.`,
- },
- "custom_reports_include_default_fields": schema.BoolAttribute{
- Computed: true,
- Description: `If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bamboo-hr",
- ),
- },
- Description: `must be one of ["bamboo-hr"]`,
- },
- "subdomain": schema.StringAttribute{
- Computed: true,
- Description: `Sub Domain of bamboo hr`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_bamboohr_data_source_sdk.go b/internal/provider/source_bamboohr_data_source_sdk.go
old mode 100755
new mode 100644
index 5d2d76ac5..a378d3266
--- a/internal/provider/source_bamboohr_data_source_sdk.go
+++ b/internal/provider/source_bamboohr_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceBambooHrDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_bamboohr_resource.go b/internal/provider/source_bamboohr_resource.go
old mode 100755
new mode 100644
index 22cc94465..42a3368f0
--- a/internal/provider/source_bamboohr_resource.go
+++ b/internal/provider/source_bamboohr_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceBambooHrResource struct {
// SourceBambooHrResourceModel describes the resource data model.
type SourceBambooHrResourceModel struct {
Configuration SourceBambooHr `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,24 +56,18 @@ func (r *SourceBambooHrResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Api key of bamboo hr`,
},
"custom_reports_fields": schema.StringAttribute{
- Optional: true,
- Description: `Comma-separated list of fields to include in custom reports.`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `Comma-separated list of fields to include in custom reports.`,
},
"custom_reports_include_default_fields": schema.BoolAttribute{
- Optional: true,
- Description: `If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bamboo-hr",
- ),
- },
- Description: `must be one of ["bamboo-hr"]`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.`,
},
"subdomain": schema.StringAttribute{
Required: true,
@@ -81,13 +75,24 @@ func (r *SourceBambooHrResource) Schema(ctx context.Context, req resource.Schema
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -151,7 +156,7 @@ func (r *SourceBambooHrResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceBambooHr(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -327,5 +332,5 @@ func (r *SourceBambooHrResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceBambooHrResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_bamboohr_resource_sdk.go b/internal/provider/source_bamboohr_resource_sdk.go
old mode 100755
new mode 100644
index 38c0de641..033704975
--- a/internal/provider/source_bamboohr_resource_sdk.go
+++ b/internal/provider/source_bamboohr_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -21,15 +21,19 @@ func (r *SourceBambooHrResourceModel) ToCreateSDKType() *shared.SourceBambooHrCr
} else {
customReportsIncludeDefaultFields = nil
}
- sourceType := shared.SourceBambooHrBambooHr(r.Configuration.SourceType.ValueString())
subdomain := r.Configuration.Subdomain.ValueString()
configuration := shared.SourceBambooHr{
APIKey: apiKey,
CustomReportsFields: customReportsFields,
CustomReportsIncludeDefaultFields: customReportsIncludeDefaultFields,
- SourceType: sourceType,
Subdomain: subdomain,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -40,6 +44,7 @@ func (r *SourceBambooHrResourceModel) ToCreateSDKType() *shared.SourceBambooHrCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceBambooHrCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_bigcommerce_data_source_sdk.go b/internal/provider/source_bigcommerce_data_source_sdk.go
deleted file mode 100755
index f1dfb1de5..000000000
--- a/internal/provider/source_bigcommerce_data_source_sdk.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceBigcommerceDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
diff --git a/internal/provider/source_bigcommerce_resource_sdk.go b/internal/provider/source_bigcommerce_resource_sdk.go
deleted file mode 100755
index 88c4a94d5..000000000
--- a/internal/provider/source_bigcommerce_resource_sdk.go
+++ /dev/null
@@ -1,76 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceBigcommerceResourceModel) ToCreateSDKType() *shared.SourceBigcommerceCreateRequest {
- accessToken := r.Configuration.AccessToken.ValueString()
- sourceType := shared.SourceBigcommerceBigcommerce(r.Configuration.SourceType.ValueString())
- startDate := r.Configuration.StartDate.ValueString()
- storeHash := r.Configuration.StoreHash.ValueString()
- configuration := shared.SourceBigcommerce{
- AccessToken: accessToken,
- SourceType: sourceType,
- StartDate: startDate,
- StoreHash: storeHash,
- }
- name := r.Name.ValueString()
- secretID := new(string)
- if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
- *secretID = r.SecretID.ValueString()
- } else {
- secretID = nil
- }
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceBigcommerceCreateRequest{
- Configuration: configuration,
- Name: name,
- SecretID: secretID,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceBigcommerceResourceModel) ToGetSDKType() *shared.SourceBigcommerceCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceBigcommerceResourceModel) ToUpdateSDKType() *shared.SourceBigcommercePutRequest {
- accessToken := r.Configuration.AccessToken.ValueString()
- startDate := r.Configuration.StartDate.ValueString()
- storeHash := r.Configuration.StoreHash.ValueString()
- configuration := shared.SourceBigcommerceUpdate{
- AccessToken: accessToken,
- StartDate: startDate,
- StoreHash: storeHash,
- }
- name := r.Name.ValueString()
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceBigcommercePutRequest{
- Configuration: configuration,
- Name: name,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceBigcommerceResourceModel) ToDeleteSDKType() *shared.SourceBigcommerceCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceBigcommerceResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.SourceType = types.StringValue(resp.SourceType)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
-
-func (r *SourceBigcommerceResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
- r.RefreshFromGetResponse(resp)
-}
diff --git a/internal/provider/source_bigquery_data_source.go b/internal/provider/source_bigquery_data_source.go
old mode 100755
new mode 100644
index 3265ef716..7397d5d4d
--- a/internal/provider/source_bigquery_data_source.go
+++ b/internal/provider/source_bigquery_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceBigqueryDataSource struct {
// SourceBigqueryDataSourceModel describes the data model.
type SourceBigqueryDataSourceModel struct {
- Configuration SourceBigquery `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourceBigqueryDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceBigquery DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The contents of your Service Account Key JSON file. See the docs for more information on how to obtain this key.`,
- },
- "dataset_id": schema.StringAttribute{
- Computed: true,
- Description: `The dataset ID to search for tables and views. If you are only loading data from one dataset, setting this option could result in much faster schema discovery.`,
- },
- "project_id": schema.StringAttribute{
- Computed: true,
- Description: `The GCP project ID for the project containing the target BigQuery dataset.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bigquery",
- ),
- },
- Description: `must be one of ["bigquery"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_bigquery_data_source_sdk.go b/internal/provider/source_bigquery_data_source_sdk.go
old mode 100755
new mode 100644
index 7881e647d..ea5b14453
--- a/internal/provider/source_bigquery_data_source_sdk.go
+++ b/internal/provider/source_bigquery_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceBigqueryDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_bigquery_resource.go b/internal/provider/source_bigquery_resource.go
old mode 100755
new mode 100644
index 50cc0f9a1..e16ef1e42
--- a/internal/provider/source_bigquery_resource.go
+++ b/internal/provider/source_bigquery_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceBigqueryResource struct {
// SourceBigqueryResourceModel describes the resource data model.
type SourceBigqueryResourceModel struct {
Configuration SourceBigquery `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -66,24 +66,26 @@ func (r *SourceBigqueryResource) Schema(ctx context.Context, req resource.Schema
Required: true,
Description: `The GCP project ID for the project containing the target BigQuery dataset.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bigquery",
- ),
- },
- Description: `must be one of ["bigquery"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +149,7 @@ func (r *SourceBigqueryResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceBigquery(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +325,5 @@ func (r *SourceBigqueryResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceBigqueryResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_bigquery_resource_sdk.go b/internal/provider/source_bigquery_resource_sdk.go
old mode 100755
new mode 100644
index 10701d7f0..e0ad06695
--- a/internal/provider/source_bigquery_resource_sdk.go
+++ b/internal/provider/source_bigquery_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -16,12 +16,16 @@ func (r *SourceBigqueryResourceModel) ToCreateSDKType() *shared.SourceBigqueryCr
datasetID = nil
}
projectID := r.Configuration.ProjectID.ValueString()
- sourceType := shared.SourceBigqueryBigquery(r.Configuration.SourceType.ValueString())
configuration := shared.SourceBigquery{
CredentialsJSON: credentialsJSON,
DatasetID: datasetID,
ProjectID: projectID,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -33,6 +37,7 @@ func (r *SourceBigqueryResourceModel) ToCreateSDKType() *shared.SourceBigqueryCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceBigqueryCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_bingads_data_source.go b/internal/provider/source_bingads_data_source.go
old mode 100755
new mode 100644
index d6acef812..73f26875b
--- a/internal/provider/source_bingads_data_source.go
+++ b/internal/provider/source_bingads_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceBingAdsDataSource struct {
// SourceBingAdsDataSourceModel describes the data model.
type SourceBingAdsDataSourceModel struct {
- Configuration SourceBingAds `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,70 +47,20 @@ func (r *SourceBingAdsDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceBingAds DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Microsoft Advertising developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Microsoft Advertising developer application.`,
- },
- "developer_token": schema.StringAttribute{
- Computed: true,
- Description: `Developer token associated with user. See more info in the docs.`,
- },
- "lookback_window": schema.Int64Attribute{
- Computed: true,
- Description: `Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to renew the expired Access Token.`,
- },
- "reports_start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bing-ads",
- ),
- },
- Description: `must be one of ["bing-ads"]`,
- },
- "tenant_id": schema.StringAttribute{
- Computed: true,
- Description: `The Tenant ID of your Microsoft Advertising developer application. Set this to "common" unless you know you need a different value.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_bingads_data_source_sdk.go b/internal/provider/source_bingads_data_source_sdk.go
old mode 100755
new mode 100644
index 902cf595d..d02a0b1f6
--- a/internal/provider/source_bingads_data_source_sdk.go
+++ b/internal/provider/source_bingads_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceBingAdsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_bingads_resource.go b/internal/provider/source_bingads_resource.go
old mode 100755
new mode 100644
index 99af7b082..b90273ac4
--- a/internal/provider/source_bingads_resource.go
+++ b/internal/provider/source_bingads_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceBingAdsResource struct {
// SourceBingAdsResourceModel describes the resource data model.
type SourceBingAdsResourceModel struct {
Configuration SourceBingAds `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -55,64 +57,127 @@ func (r *SourceBingAdsResource) Schema(ctx context.Context, req resource.SchemaR
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your Microsoft Advertising developer application.`,
},
"client_secret": schema.StringAttribute{
- Optional: true,
- Description: `The Client Secret of your Microsoft Advertising developer application.`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `The Client Secret of your Microsoft Advertising developer application.`,
+ },
+ "custom_reports": schema.ListNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: `The name of the custom report, this name would be used as stream name`,
+ },
+ "report_aggregation": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "[Hourly]"` + "\n" +
+ `A list of available aggregations.`,
+ },
+ "report_columns": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ Description: `A list of available report object columns. You can find it in description of reporting object that you want to add to custom report.`,
+ },
+ "reporting_object": schema.StringAttribute{
+ Required: true,
+ MarkdownDescription: `must be one of ["AccountPerformanceReportRequest", "AdDynamicTextPerformanceReportRequest", "AdExtensionByAdReportRequest", "AdExtensionByKeywordReportRequest", "AdExtensionDetailReportRequest", "AdGroupPerformanceReportRequest", "AdPerformanceReportRequest", "AgeGenderAudienceReportRequest", "AudiencePerformanceReportRequest", "CallDetailReportRequest", "CampaignPerformanceReportRequest", "ConversionPerformanceReportRequest", "DestinationUrlPerformanceReportRequest", "DSAAutoTargetPerformanceReportRequest", "DSACategoryPerformanceReportRequest", "DSASearchQueryPerformanceReportRequest", "GeographicPerformanceReportRequest", "GoalsAndFunnelsReportRequest", "HotelDimensionPerformanceReportRequest", "HotelGroupPerformanceReportRequest", "KeywordPerformanceReportRequest", "NegativeKeywordConflictReportRequest", "ProductDimensionPerformanceReportRequest", "ProductMatchCountReportRequest", "ProductNegativeKeywordConflictReportRequest", "ProductPartitionPerformanceReportRequest", "ProductPartitionUnitPerformanceReportRequest", "ProductSearchQueryPerformanceReportRequest", "ProfessionalDemographicsAudienceReportRequest", "PublisherUsagePerformanceReportRequest", "SearchCampaignChangeHistoryReportRequest", "SearchQueryPerformanceReportRequest", "ShareOfVoiceReportRequest", "UserLocationPerformanceReportRequest"]` + "\n" +
+ `The name of the the object derives from the ReportRequest object. You can find it in Bing Ads Api docs - Reporting API - Reporting Data Objects.`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "AccountPerformanceReportRequest",
+ "AdDynamicTextPerformanceReportRequest",
+ "AdExtensionByAdReportRequest",
+ "AdExtensionByKeywordReportRequest",
+ "AdExtensionDetailReportRequest",
+ "AdGroupPerformanceReportRequest",
+ "AdPerformanceReportRequest",
+ "AgeGenderAudienceReportRequest",
+ "AudiencePerformanceReportRequest",
+ "CallDetailReportRequest",
+ "CampaignPerformanceReportRequest",
+ "ConversionPerformanceReportRequest",
+ "DestinationUrlPerformanceReportRequest",
+ "DSAAutoTargetPerformanceReportRequest",
+ "DSACategoryPerformanceReportRequest",
+ "DSASearchQueryPerformanceReportRequest",
+ "GeographicPerformanceReportRequest",
+ "GoalsAndFunnelsReportRequest",
+ "HotelDimensionPerformanceReportRequest",
+ "HotelGroupPerformanceReportRequest",
+ "KeywordPerformanceReportRequest",
+ "NegativeKeywordConflictReportRequest",
+ "ProductDimensionPerformanceReportRequest",
+ "ProductMatchCountReportRequest",
+ "ProductNegativeKeywordConflictReportRequest",
+ "ProductPartitionPerformanceReportRequest",
+ "ProductPartitionUnitPerformanceReportRequest",
+ "ProductSearchQueryPerformanceReportRequest",
+ "ProfessionalDemographicsAudienceReportRequest",
+ "PublisherUsagePerformanceReportRequest",
+ "SearchCampaignChangeHistoryReportRequest",
+ "SearchQueryPerformanceReportRequest",
+ "ShareOfVoiceReportRequest",
+ "UserLocationPerformanceReportRequest",
+ ),
+ },
+ },
+ },
+ },
+ Description: `You can add your Custom Bing Ads report by creating one.`,
},
"developer_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Developer token associated with user. See more info in the docs.`,
},
"lookback_window": schema.Int64Attribute{
- Optional: true,
- Description: `Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode.`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode without specified Reports Start Date.`,
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Refresh Token to renew the expired Access Token.`,
},
"reports_start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format. If not set, data from previous and current calendar year will be replicated.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bing-ads",
- ),
- },
- Description: `must be one of ["bing-ads"]`,
},
"tenant_id": schema.StringAttribute{
- Optional: true,
- Description: `The Tenant ID of your Microsoft Advertising developer application. Set this to "common" unless you know you need a different value.`,
+ Optional: true,
+ MarkdownDescription: `Default: "common"` + "\n" +
+ `The Tenant ID of your Microsoft Advertising developer application. Set this to "common" unless you know you need a different value.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -176,7 +241,7 @@ func (r *SourceBingAdsResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceBingAds(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -352,5 +417,5 @@ func (r *SourceBingAdsResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceBingAdsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_bingads_resource_sdk.go b/internal/provider/source_bingads_resource_sdk.go
old mode 100755
new mode 100644
index 7bedef5bd..1b7baa699
--- a/internal/provider/source_bingads_resource_sdk.go
+++ b/internal/provider/source_bingads_resource_sdk.go
@@ -3,18 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceBingAdsResourceModel) ToCreateSDKType() *shared.SourceBingAdsCreateRequest {
- authMethod := new(shared.SourceBingAdsAuthMethod)
- if !r.Configuration.AuthMethod.IsUnknown() && !r.Configuration.AuthMethod.IsNull() {
- *authMethod = shared.SourceBingAdsAuthMethod(r.Configuration.AuthMethod.ValueString())
- } else {
- authMethod = nil
- }
clientID := r.Configuration.ClientID.ValueString()
clientSecret := new(string)
if !r.Configuration.ClientSecret.IsUnknown() && !r.Configuration.ClientSecret.IsNull() {
@@ -22,6 +16,27 @@ func (r *SourceBingAdsResourceModel) ToCreateSDKType() *shared.SourceBingAdsCrea
} else {
clientSecret = nil
}
+ var customReports []shared.SourceBingAdsCustomReportConfig = nil
+ for _, customReportsItem := range r.Configuration.CustomReports {
+ name := customReportsItem.Name.ValueString()
+ reportAggregation := new(string)
+ if !customReportsItem.ReportAggregation.IsUnknown() && !customReportsItem.ReportAggregation.IsNull() {
+ *reportAggregation = customReportsItem.ReportAggregation.ValueString()
+ } else {
+ reportAggregation = nil
+ }
+ var reportColumns []string = nil
+ for _, reportColumnsItem := range customReportsItem.ReportColumns {
+ reportColumns = append(reportColumns, reportColumnsItem.ValueString())
+ }
+ reportingObject := shared.SourceBingAdsReportingDataObject(customReportsItem.ReportingObject.ValueString())
+ customReports = append(customReports, shared.SourceBingAdsCustomReportConfig{
+ Name: name,
+ ReportAggregation: reportAggregation,
+ ReportColumns: reportColumns,
+ ReportingObject: reportingObject,
+ })
+ }
developerToken := r.Configuration.DeveloperToken.ValueString()
lookbackWindow := new(int64)
if !r.Configuration.LookbackWindow.IsUnknown() && !r.Configuration.LookbackWindow.IsNull() {
@@ -30,8 +45,12 @@ func (r *SourceBingAdsResourceModel) ToCreateSDKType() *shared.SourceBingAdsCrea
lookbackWindow = nil
}
refreshToken := r.Configuration.RefreshToken.ValueString()
- reportsStartDate := customTypes.MustDateFromString(r.Configuration.ReportsStartDate.ValueString())
- sourceType := shared.SourceBingAdsBingAds(r.Configuration.SourceType.ValueString())
+ reportsStartDate := new(customTypes.Date)
+ if !r.Configuration.ReportsStartDate.IsUnknown() && !r.Configuration.ReportsStartDate.IsNull() {
+ reportsStartDate = customTypes.MustNewDateFromString(r.Configuration.ReportsStartDate.ValueString())
+ } else {
+ reportsStartDate = nil
+ }
tenantID := new(string)
if !r.Configuration.TenantID.IsUnknown() && !r.Configuration.TenantID.IsNull() {
*tenantID = r.Configuration.TenantID.ValueString()
@@ -39,17 +58,22 @@ func (r *SourceBingAdsResourceModel) ToCreateSDKType() *shared.SourceBingAdsCrea
tenantID = nil
}
configuration := shared.SourceBingAds{
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
+ CustomReports: customReports,
DeveloperToken: developerToken,
LookbackWindow: lookbackWindow,
RefreshToken: refreshToken,
ReportsStartDate: reportsStartDate,
- SourceType: sourceType,
TenantID: tenantID,
}
- name := r.Name.ValueString()
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name1 := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
*secretID = r.SecretID.ValueString()
@@ -59,7 +83,8 @@ func (r *SourceBingAdsResourceModel) ToCreateSDKType() *shared.SourceBingAdsCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceBingAdsCreateRequest{
Configuration: configuration,
- Name: name,
+ DefinitionID: definitionID,
+ Name: name1,
SecretID: secretID,
WorkspaceID: workspaceID,
}
@@ -72,12 +97,6 @@ func (r *SourceBingAdsResourceModel) ToGetSDKType() *shared.SourceBingAdsCreateR
}
func (r *SourceBingAdsResourceModel) ToUpdateSDKType() *shared.SourceBingAdsPutRequest {
- authMethod := new(shared.SourceBingAdsUpdateAuthMethod)
- if !r.Configuration.AuthMethod.IsUnknown() && !r.Configuration.AuthMethod.IsNull() {
- *authMethod = shared.SourceBingAdsUpdateAuthMethod(r.Configuration.AuthMethod.ValueString())
- } else {
- authMethod = nil
- }
clientID := r.Configuration.ClientID.ValueString()
clientSecret := new(string)
if !r.Configuration.ClientSecret.IsUnknown() && !r.Configuration.ClientSecret.IsNull() {
@@ -85,6 +104,27 @@ func (r *SourceBingAdsResourceModel) ToUpdateSDKType() *shared.SourceBingAdsPutR
} else {
clientSecret = nil
}
+ var customReports []shared.CustomReportConfig = nil
+ for _, customReportsItem := range r.Configuration.CustomReports {
+ name := customReportsItem.Name.ValueString()
+ reportAggregation := new(string)
+ if !customReportsItem.ReportAggregation.IsUnknown() && !customReportsItem.ReportAggregation.IsNull() {
+ *reportAggregation = customReportsItem.ReportAggregation.ValueString()
+ } else {
+ reportAggregation = nil
+ }
+ var reportColumns []string = nil
+ for _, reportColumnsItem := range customReportsItem.ReportColumns {
+ reportColumns = append(reportColumns, reportColumnsItem.ValueString())
+ }
+ reportingObject := shared.ReportingDataObject(customReportsItem.ReportingObject.ValueString())
+ customReports = append(customReports, shared.CustomReportConfig{
+ Name: name,
+ ReportAggregation: reportAggregation,
+ ReportColumns: reportColumns,
+ ReportingObject: reportingObject,
+ })
+ }
developerToken := r.Configuration.DeveloperToken.ValueString()
lookbackWindow := new(int64)
if !r.Configuration.LookbackWindow.IsUnknown() && !r.Configuration.LookbackWindow.IsNull() {
@@ -93,7 +133,12 @@ func (r *SourceBingAdsResourceModel) ToUpdateSDKType() *shared.SourceBingAdsPutR
lookbackWindow = nil
}
refreshToken := r.Configuration.RefreshToken.ValueString()
- reportsStartDate := customTypes.MustDateFromString(r.Configuration.ReportsStartDate.ValueString())
+ reportsStartDate := new(customTypes.Date)
+ if !r.Configuration.ReportsStartDate.IsUnknown() && !r.Configuration.ReportsStartDate.IsNull() {
+ reportsStartDate = customTypes.MustNewDateFromString(r.Configuration.ReportsStartDate.ValueString())
+ } else {
+ reportsStartDate = nil
+ }
tenantID := new(string)
if !r.Configuration.TenantID.IsUnknown() && !r.Configuration.TenantID.IsNull() {
*tenantID = r.Configuration.TenantID.ValueString()
@@ -101,20 +146,20 @@ func (r *SourceBingAdsResourceModel) ToUpdateSDKType() *shared.SourceBingAdsPutR
tenantID = nil
}
configuration := shared.SourceBingAdsUpdate{
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
+ CustomReports: customReports,
DeveloperToken: developerToken,
LookbackWindow: lookbackWindow,
RefreshToken: refreshToken,
ReportsStartDate: reportsStartDate,
TenantID: tenantID,
}
- name := r.Name.ValueString()
+ name1 := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceBingAdsPutRequest{
Configuration: configuration,
- Name: name,
+ Name: name1,
WorkspaceID: workspaceID,
}
return &out
diff --git a/internal/provider/source_braintree_data_source.go b/internal/provider/source_braintree_data_source.go
old mode 100755
new mode 100644
index 8dee5de99..d98cd6a7a
--- a/internal/provider/source_braintree_data_source.go
+++ b/internal/provider/source_braintree_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceBraintreeDataSource struct {
// SourceBraintreeDataSourceModel describes the data model.
type SourceBraintreeDataSourceModel struct {
- Configuration SourceBraintree `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,62 +47,20 @@ func (r *SourceBraintreeDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceBraintree DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "environment": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Development",
- "Sandbox",
- "Qa",
- "Production",
- ),
- },
- MarkdownDescription: `must be one of ["Development", "Sandbox", "Qa", "Production"]` + "\n" +
- `Environment specifies where the data will come from.`,
- },
- "merchant_id": schema.StringAttribute{
- Computed: true,
- Description: `The unique identifier for your entire gateway account. See the docs for more information on how to obtain this ID.`,
- },
- "private_key": schema.StringAttribute{
- Computed: true,
- Description: `Braintree Private Key. See the docs for more information on how to obtain this key.`,
- },
- "public_key": schema.StringAttribute{
- Computed: true,
- Description: `Braintree Public Key. See the docs for more information on how to obtain this key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "braintree",
- ),
- },
- Description: `must be one of ["braintree"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_braintree_data_source_sdk.go b/internal/provider/source_braintree_data_source_sdk.go
old mode 100755
new mode 100644
index 7c9b86ac0..dfd0dc72a
--- a/internal/provider/source_braintree_data_source_sdk.go
+++ b/internal/provider/source_braintree_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceBraintreeDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_braintree_resource.go b/internal/provider/source_braintree_resource.go
old mode 100755
new mode 100644
index 98a0f6ade..9fbcaf310
--- a/internal/provider/source_braintree_resource.go
+++ b/internal/provider/source_braintree_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceBraintreeResource struct {
// SourceBraintreeResourceModel describes the resource data model.
type SourceBraintreeResourceModel struct {
Configuration SourceBraintree `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +59,8 @@ func (r *SourceBraintreeResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"environment": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["Development", "Sandbox", "Qa", "Production"]` + "\n" +
+ `Environment specifies where the data will come from.`,
Validators: []validator.String{
stringvalidator.OneOf(
"Development",
@@ -65,8 +69,6 @@ func (r *SourceBraintreeResource) Schema(ctx context.Context, req resource.Schem
"Production",
),
},
- MarkdownDescription: `must be one of ["Development", "Sandbox", "Qa", "Production"]` + "\n" +
- `Environment specifies where the data will come from.`,
},
"merchant_id": schema.StringAttribute{
Required: true,
@@ -74,37 +76,41 @@ func (r *SourceBraintreeResource) Schema(ctx context.Context, req resource.Schem
},
"private_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Braintree Private Key. See the docs for more information on how to obtain this key.`,
},
"public_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Braintree Public Key. See the docs for more information on how to obtain this key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "braintree",
- ),
- },
- Description: `must be one of ["braintree"]`,
- },
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -168,7 +174,7 @@ func (r *SourceBraintreeResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceBraintree(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -344,5 +350,5 @@ func (r *SourceBraintreeResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceBraintreeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_braintree_resource_sdk.go b/internal/provider/source_braintree_resource_sdk.go
old mode 100755
new mode 100644
index 1a1602eae..d3f5b5ac4
--- a/internal/provider/source_braintree_resource_sdk.go
+++ b/internal/provider/source_braintree_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -13,7 +13,6 @@ func (r *SourceBraintreeResourceModel) ToCreateSDKType() *shared.SourceBraintree
merchantID := r.Configuration.MerchantID.ValueString()
privateKey := r.Configuration.PrivateKey.ValueString()
publicKey := r.Configuration.PublicKey.ValueString()
- sourceType := shared.SourceBraintreeBraintree(r.Configuration.SourceType.ValueString())
startDate := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -25,9 +24,14 @@ func (r *SourceBraintreeResourceModel) ToCreateSDKType() *shared.SourceBraintree
MerchantID: merchantID,
PrivateKey: privateKey,
PublicKey: publicKey,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -38,6 +42,7 @@ func (r *SourceBraintreeResourceModel) ToCreateSDKType() *shared.SourceBraintree
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceBraintreeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_braze_data_source.go b/internal/provider/source_braze_data_source.go
old mode 100755
new mode 100644
index 726e2b04b..fb5b7d4ff
--- a/internal/provider/source_braze_data_source.go
+++ b/internal/provider/source_braze_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceBrazeDataSource struct {
// SourceBrazeDataSourceModel describes the data model.
type SourceBrazeDataSourceModel struct {
- Configuration SourceBraze `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,45 +47,20 @@ func (r *SourceBrazeDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceBraze DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Braze REST API key`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "braze",
- ),
- },
- Description: `must be one of ["braze"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `Rows after this date will be synced`,
- },
- "url": schema.StringAttribute{
- Computed: true,
- Description: `Braze REST API endpoint`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_braze_data_source_sdk.go b/internal/provider/source_braze_data_source_sdk.go
old mode 100755
new mode 100644
index 7323670a2..01f03ce5f
--- a/internal/provider/source_braze_data_source_sdk.go
+++ b/internal/provider/source_braze_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceBrazeDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_braze_resource.go b/internal/provider/source_braze_resource.go
old mode 100755
new mode 100644
index 09a303212..99178dd19
--- a/internal/provider/source_braze_resource.go
+++ b/internal/provider/source_braze_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceBrazeResource struct {
// SourceBrazeResourceModel describes the resource data model.
type SourceBrazeResourceModel struct {
Configuration SourceBraze `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,23 +58,15 @@ func (r *SourceBrazeResource) Schema(ctx context.Context, req resource.SchemaReq
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Braze REST API key`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "braze",
- ),
- },
- Description: `must be one of ["braze"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `Rows after this date will be synced`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `Rows after this date will be synced`,
},
"url": schema.StringAttribute{
Required: true,
@@ -81,13 +74,24 @@ func (r *SourceBrazeResource) Schema(ctx context.Context, req resource.SchemaReq
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -151,7 +155,7 @@ func (r *SourceBrazeResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceBraze(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -327,5 +331,5 @@ func (r *SourceBrazeResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceBrazeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_braze_resource_sdk.go b/internal/provider/source_braze_resource_sdk.go
old mode 100755
new mode 100644
index b314b5c7e..62607f7ca
--- a/internal/provider/source_braze_resource_sdk.go
+++ b/internal/provider/source_braze_resource_sdk.go
@@ -3,21 +3,25 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceBrazeResourceModel) ToCreateSDKType() *shared.SourceBrazeCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceBrazeBraze(r.Configuration.SourceType.ValueString())
startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
url := r.Configuration.URL.ValueString()
configuration := shared.SourceBraze{
- APIKey: apiKey,
- SourceType: sourceType,
- StartDate: startDate,
- URL: url,
+ APIKey: apiKey,
+ StartDate: startDate,
+ URL: url,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -29,6 +33,7 @@ func (r *SourceBrazeResourceModel) ToCreateSDKType() *shared.SourceBrazeCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceBrazeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_cart_data_source.go b/internal/provider/source_cart_data_source.go
new file mode 100644
index 000000000..3b3dae8f1
--- /dev/null
+++ b/internal/provider/source_cart_data_source.go
@@ -0,0 +1,137 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ datasource.DataSource = &SourceCartDataSource{}
+var _ datasource.DataSourceWithConfigure = &SourceCartDataSource{}
+
+func NewSourceCartDataSource() datasource.DataSource {
+ return &SourceCartDataSource{}
+}
+
+// SourceCartDataSource is the data source implementation.
+type SourceCartDataSource struct {
+ client *sdk.SDK
+}
+
+// SourceCartDataSourceModel describes the data model.
+type SourceCartDataSourceModel struct {
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+// Metadata returns the data source type name.
+func (r *SourceCartDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_cart"
+}
+
+// Schema defines the schema for the data source.
+func (r *SourceCartDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "SourceCart DataSource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.StringAttribute{
+ Computed: true,
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ },
+ "source_id": schema.StringAttribute{
+ Required: true,
+ },
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
+ "workspace_id": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ }
+}
+
+func (r *SourceCartDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected DataSource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *SourceCartDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data *SourceCartDataSourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.GetSourceCartRequest{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.GetSourceCart(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/internal/provider/source_cart_data_source_sdk.go b/internal/provider/source_cart_data_source_sdk.go
new file mode 100644
index 000000000..250bc822c
--- /dev/null
+++ b/internal/provider/source_cart_data_source_sdk.go
@@ -0,0 +1,18 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *SourceCartDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
diff --git a/internal/provider/source_xero_resource.go b/internal/provider/source_cart_resource.go
old mode 100755
new mode 100644
similarity index 64%
rename from internal/provider/source_xero_resource.go
rename to internal/provider/source_cart_resource.go
index 060b899db..0e9a1bd9f
--- a/internal/provider/source_xero_resource.go
+++ b/internal/provider/source_cart_resource.go
@@ -3,39 +3,40 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
// Ensure provider defined types fully satisfy framework interfaces.
-var _ resource.Resource = &SourceXeroResource{}
-var _ resource.ResourceWithImportState = &SourceXeroResource{}
+var _ resource.Resource = &SourceCartResource{}
+var _ resource.ResourceWithImportState = &SourceCartResource{}
-func NewSourceXeroResource() resource.Resource {
- return &SourceXeroResource{}
+func NewSourceCartResource() resource.Resource {
+ return &SourceCartResource{}
}
-// SourceXeroResource defines the resource implementation.
-type SourceXeroResource struct {
+// SourceCartResource defines the resource implementation.
+type SourceCartResource struct {
client *sdk.SDK
}
-// SourceXeroResourceModel describes the resource data model.
-type SourceXeroResourceModel struct {
- Configuration SourceXero `tfsdk:"configuration"`
+// SourceCartResourceModel describes the resource data model.
+type SourceCartResourceModel struct {
+ Configuration SourceCart `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -43,72 +44,81 @@ type SourceXeroResourceModel struct {
WorkspaceID types.String `tfsdk:"workspace_id"`
}
-func (r *SourceXeroResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_xero"
+func (r *SourceCartResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_cart"
}
-func (r *SourceXeroResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+func (r *SourceCartResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
resp.Schema = schema.Schema{
- MarkdownDescription: "SourceXero Resource",
+ MarkdownDescription: "SourceCart Resource",
Attributes: map[string]schema.Attribute{
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "authentication": schema.SingleNestedAttribute{
- Required: true,
+ "credentials": schema.SingleNestedAttribute{
+ Optional: true,
Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Enter your Xero application's access token`,
+ "central_api_router": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "site_id": schema.StringAttribute{
+ Required: true,
+ Description: `You can determine a site provisioning site Id by hitting https://site.com/store/sitemonitor.aspx and reading the response param PSID`,
+ },
+ "user_name": schema.StringAttribute{
+ Required: true,
+ Description: `Enter your application's User Name`,
+ },
+ "user_secret": schema.StringAttribute{
+ Required: true,
+ Description: `Enter your application's User Secret`,
+ },
+ },
},
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `Enter your Xero application's Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `Enter your Xero application's Client Secret`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `Enter your Xero application's refresh token`,
- },
- "token_expiry_date": schema.StringAttribute{
- Required: true,
- Description: `The date-time when the access token should be refreshed`,
+ "single_store_access_token": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "access_token": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `Access Token for making authenticated requests.`,
+ },
+ "store_name": schema.StringAttribute{
+ Required: true,
+ Description: `The name of Cart.com Online Store. All API URLs start with https://[mystorename.com]/api/v1/, where [mystorename.com] is the domain name of your store.`,
+ },
+ },
},
},
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xero",
- ),
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
},
- Description: `must be one of ["xero"]`,
},
"start_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any data with created_at before this data will not be synced.`,
- },
- "tenant_id": schema.StringAttribute{
Required: true,
- Description: `Enter your Xero organization's Tenant ID`,
+ Description: `The date from which you'd like to replicate the data`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -134,7 +144,7 @@ func (r *SourceXeroResource) Schema(ctx context.Context, req resource.SchemaRequ
}
}
-func (r *SourceXeroResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+func (r *SourceCartResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
// Prevent panic if the provider has not been configured.
if req.ProviderData == nil {
return
@@ -154,8 +164,8 @@ func (r *SourceXeroResource) Configure(ctx context.Context, req resource.Configu
r.client = client
}
-func (r *SourceXeroResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data *SourceXeroResourceModel
+func (r *SourceCartResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data *SourceCartResourceModel
var item types.Object
resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
@@ -172,8 +182,8 @@ func (r *SourceXeroResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
- res, err := r.client.Sources.CreateSourceXero(ctx, request)
+ request := data.ToCreateSDKType()
+ res, err := r.client.Sources.CreateSourceCart(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -199,8 +209,8 @@ func (r *SourceXeroResource) Create(ctx context.Context, req resource.CreateRequ
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
-func (r *SourceXeroResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data *SourceXeroResourceModel
+func (r *SourceCartResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data *SourceCartResourceModel
var item types.Object
resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
@@ -218,10 +228,10 @@ func (r *SourceXeroResource) Read(ctx context.Context, req resource.ReadRequest,
}
sourceID := data.SourceID.ValueString()
- request := operations.GetSourceXeroRequest{
+ request := operations.GetSourceCartRequest{
SourceID: sourceID,
}
- res, err := r.client.Sources.GetSourceXero(ctx, request)
+ res, err := r.client.Sources.GetSourceCart(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -247,20 +257,20 @@ func (r *SourceXeroResource) Read(ctx context.Context, req resource.ReadRequest,
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
-func (r *SourceXeroResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data *SourceXeroResourceModel
+func (r *SourceCartResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data *SourceCartResourceModel
merge(ctx, req, resp, &data)
if resp.Diagnostics.HasError() {
return
}
- sourceXeroPutRequest := data.ToUpdateSDKType()
+ sourceCartPutRequest := data.ToUpdateSDKType()
sourceID := data.SourceID.ValueString()
- request := operations.PutSourceXeroRequest{
- SourceXeroPutRequest: sourceXeroPutRequest,
+ request := operations.PutSourceCartRequest{
+ SourceCartPutRequest: sourceCartPutRequest,
SourceID: sourceID,
}
- res, err := r.client.Sources.PutSourceXero(ctx, request)
+ res, err := r.client.Sources.PutSourceCart(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -277,10 +287,10 @@ func (r *SourceXeroResource) Update(ctx context.Context, req resource.UpdateRequ
return
}
sourceId1 := data.SourceID.ValueString()
- getRequest := operations.GetSourceXeroRequest{
+ getRequest := operations.GetSourceCartRequest{
SourceID: sourceId1,
}
- getResponse, err := r.client.Sources.GetSourceXero(ctx, getRequest)
+ getResponse, err := r.client.Sources.GetSourceCart(ctx, getRequest)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -306,8 +316,8 @@ func (r *SourceXeroResource) Update(ctx context.Context, req resource.UpdateRequ
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
-func (r *SourceXeroResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data *SourceXeroResourceModel
+func (r *SourceCartResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data *SourceCartResourceModel
var item types.Object
resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
@@ -325,10 +335,10 @@ func (r *SourceXeroResource) Delete(ctx context.Context, req resource.DeleteRequ
}
sourceID := data.SourceID.ValueString()
- request := operations.DeleteSourceXeroRequest{
+ request := operations.DeleteSourceCartRequest{
SourceID: sourceID,
}
- res, err := r.client.Sources.DeleteSourceXero(ctx, request)
+ res, err := r.client.Sources.DeleteSourceCart(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -347,6 +357,6 @@ func (r *SourceXeroResource) Delete(ctx context.Context, req resource.DeleteRequ
}
-func (r *SourceXeroResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+func (r *SourceCartResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_cart_resource_sdk.go b/internal/provider/source_cart_resource_sdk.go
new file mode 100644
index 000000000..3ef5095d4
--- /dev/null
+++ b/internal/provider/source_cart_resource_sdk.go
@@ -0,0 +1,141 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *SourceCartResourceModel) ToCreateSDKType() *shared.SourceCartCreateRequest {
+ var credentials *shared.SourceCartAuthorizationMethod
+ if r.Configuration.Credentials != nil {
+ var sourceCartCentralAPIRouter *shared.SourceCartCentralAPIRouter
+ if r.Configuration.Credentials.CentralAPIRouter != nil {
+ siteID := r.Configuration.Credentials.CentralAPIRouter.SiteID.ValueString()
+ userName := r.Configuration.Credentials.CentralAPIRouter.UserName.ValueString()
+ userSecret := r.Configuration.Credentials.CentralAPIRouter.UserSecret.ValueString()
+ sourceCartCentralAPIRouter = &shared.SourceCartCentralAPIRouter{
+ SiteID: siteID,
+ UserName: userName,
+ UserSecret: userSecret,
+ }
+ }
+ if sourceCartCentralAPIRouter != nil {
+ credentials = &shared.SourceCartAuthorizationMethod{
+ SourceCartCentralAPIRouter: sourceCartCentralAPIRouter,
+ }
+ }
+ var sourceCartSingleStoreAccessToken *shared.SourceCartSingleStoreAccessToken
+ if r.Configuration.Credentials.SingleStoreAccessToken != nil {
+ accessToken := r.Configuration.Credentials.SingleStoreAccessToken.AccessToken.ValueString()
+ storeName := r.Configuration.Credentials.SingleStoreAccessToken.StoreName.ValueString()
+ sourceCartSingleStoreAccessToken = &shared.SourceCartSingleStoreAccessToken{
+ AccessToken: accessToken,
+ StoreName: storeName,
+ }
+ }
+ if sourceCartSingleStoreAccessToken != nil {
+ credentials = &shared.SourceCartAuthorizationMethod{
+ SourceCartSingleStoreAccessToken: sourceCartSingleStoreAccessToken,
+ }
+ }
+ }
+ startDate := r.Configuration.StartDate.ValueString()
+ configuration := shared.SourceCart{
+ Credentials: credentials,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name := r.Name.ValueString()
+ secretID := new(string)
+ if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
+ *secretID = r.SecretID.ValueString()
+ } else {
+ secretID = nil
+ }
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceCartCreateRequest{
+ Configuration: configuration,
+ DefinitionID: definitionID,
+ Name: name,
+ SecretID: secretID,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceCartResourceModel) ToGetSDKType() *shared.SourceCartCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceCartResourceModel) ToUpdateSDKType() *shared.SourceCartPutRequest {
+ var credentials *shared.SourceCartUpdateAuthorizationMethod
+ if r.Configuration.Credentials != nil {
+ var centralAPIRouter *shared.CentralAPIRouter
+ if r.Configuration.Credentials.CentralAPIRouter != nil {
+ siteID := r.Configuration.Credentials.CentralAPIRouter.SiteID.ValueString()
+ userName := r.Configuration.Credentials.CentralAPIRouter.UserName.ValueString()
+ userSecret := r.Configuration.Credentials.CentralAPIRouter.UserSecret.ValueString()
+ centralAPIRouter = &shared.CentralAPIRouter{
+ SiteID: siteID,
+ UserName: userName,
+ UserSecret: userSecret,
+ }
+ }
+ if centralAPIRouter != nil {
+ credentials = &shared.SourceCartUpdateAuthorizationMethod{
+ CentralAPIRouter: centralAPIRouter,
+ }
+ }
+ var singleStoreAccessToken *shared.SingleStoreAccessToken
+ if r.Configuration.Credentials.SingleStoreAccessToken != nil {
+ accessToken := r.Configuration.Credentials.SingleStoreAccessToken.AccessToken.ValueString()
+ storeName := r.Configuration.Credentials.SingleStoreAccessToken.StoreName.ValueString()
+ singleStoreAccessToken = &shared.SingleStoreAccessToken{
+ AccessToken: accessToken,
+ StoreName: storeName,
+ }
+ }
+ if singleStoreAccessToken != nil {
+ credentials = &shared.SourceCartUpdateAuthorizationMethod{
+ SingleStoreAccessToken: singleStoreAccessToken,
+ }
+ }
+ }
+ startDate := r.Configuration.StartDate.ValueString()
+ configuration := shared.SourceCartUpdate{
+ Credentials: credentials,
+ StartDate: startDate,
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceCartPutRequest{
+ Configuration: configuration,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceCartResourceModel) ToDeleteSDKType() *shared.SourceCartCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceCartResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
+
+func (r *SourceCartResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
+ r.RefreshFromGetResponse(resp)
+}
diff --git a/internal/provider/source_chargebee_data_source.go b/internal/provider/source_chargebee_data_source.go
old mode 100755
new mode 100644
index 599cffbd4..24ce7c5be
--- a/internal/provider/source_chargebee_data_source.go
+++ b/internal/provider/source_chargebee_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceChargebeeDataSource struct {
// SourceChargebeeDataSourceModel describes the data model.
type SourceChargebeeDataSourceModel struct {
- Configuration SourceChargebee `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,56 +47,20 @@ func (r *SourceChargebeeDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceChargebee DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "product_catalog": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "1.0",
- "2.0",
- ),
- },
- MarkdownDescription: `must be one of ["1.0", "2.0"]` + "\n" +
- `Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under ` + "`" + `API Version` + "`" + ` section.`,
- },
- "site": schema.StringAttribute{
- Computed: true,
- Description: `The site prefix for your Chargebee instance.`,
- },
- "site_api_key": schema.StringAttribute{
- Computed: true,
- Description: `Chargebee API Key. See the docs for more information on how to obtain this key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "chargebee",
- ),
- },
- Description: `must be one of ["chargebee"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_chargebee_data_source_sdk.go b/internal/provider/source_chargebee_data_source_sdk.go
old mode 100755
new mode 100644
index 32ae620c3..86087774a
--- a/internal/provider/source_chargebee_data_source_sdk.go
+++ b/internal/provider/source_chargebee_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceChargebeeDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_chargebee_resource.go b/internal/provider/source_chargebee_resource.go
old mode 100755
new mode 100644
index f31a90528..793a29b11
--- a/internal/provider/source_chargebee_resource.go
+++ b/internal/provider/source_chargebee_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceChargebeeResource struct {
// SourceChargebeeResourceModel describes the resource data model.
type SourceChargebeeResourceModel struct {
Configuration SourceChargebee `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,14 +59,14 @@ func (r *SourceChargebeeResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"product_catalog": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["1.0", "2.0"]` + "\n" +
+ `Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under ` + "`" + `API Version` + "`" + ` section.`,
Validators: []validator.String{
stringvalidator.OneOf(
"1.0",
"2.0",
),
},
- MarkdownDescription: `must be one of ["1.0", "2.0"]` + "\n" +
- `Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under ` + "`" + `API Version` + "`" + ` section.`,
},
"site": schema.StringAttribute{
Required: true,
@@ -72,33 +74,36 @@ func (r *SourceChargebeeResource) Schema(ctx context.Context, req resource.Schem
},
"site_api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Chargebee API Key. See the docs for more information on how to obtain this key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "chargebee",
- ),
- },
- Description: `must be one of ["chargebee"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -162,7 +167,7 @@ func (r *SourceChargebeeResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceChargebee(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -338,5 +343,5 @@ func (r *SourceChargebeeResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceChargebeeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_chargebee_resource_sdk.go b/internal/provider/source_chargebee_resource_sdk.go
old mode 100755
new mode 100644
index 0e607dace..fb3321629
--- a/internal/provider/source_chargebee_resource_sdk.go
+++ b/internal/provider/source_chargebee_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -12,15 +12,19 @@ func (r *SourceChargebeeResourceModel) ToCreateSDKType() *shared.SourceChargebee
productCatalog := shared.SourceChargebeeProductCatalog(r.Configuration.ProductCatalog.ValueString())
site := r.Configuration.Site.ValueString()
siteAPIKey := r.Configuration.SiteAPIKey.ValueString()
- sourceType := shared.SourceChargebeeChargebee(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceChargebee{
ProductCatalog: productCatalog,
Site: site,
SiteAPIKey: siteAPIKey,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -31,6 +35,7 @@ func (r *SourceChargebeeResourceModel) ToCreateSDKType() *shared.SourceChargebee
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceChargebeeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -44,7 +49,7 @@ func (r *SourceChargebeeResourceModel) ToGetSDKType() *shared.SourceChargebeeCre
}
func (r *SourceChargebeeResourceModel) ToUpdateSDKType() *shared.SourceChargebeePutRequest {
- productCatalog := shared.SourceChargebeeUpdateProductCatalog(r.Configuration.ProductCatalog.ValueString())
+ productCatalog := shared.ProductCatalog(r.Configuration.ProductCatalog.ValueString())
site := r.Configuration.Site.ValueString()
siteAPIKey := r.Configuration.SiteAPIKey.ValueString()
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
diff --git a/internal/provider/source_chartmogul_data_source.go b/internal/provider/source_chartmogul_data_source.go
old mode 100755
new mode 100644
index 223bbcb01..55502d0d9
--- a/internal/provider/source_chartmogul_data_source.go
+++ b/internal/provider/source_chartmogul_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceChartmogulDataSource struct {
// SourceChartmogulDataSourceModel describes the data model.
type SourceChartmogulDataSourceModel struct {
- Configuration SourceChartmogul `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,54 +47,20 @@ func (r *SourceChartmogulDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceChartmogul DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Your Chartmogul API key. See the docs for info on how to obtain this.`,
- },
- "interval": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "day",
- "week",
- "month",
- "quarter",
- ),
- },
- MarkdownDescription: `must be one of ["day", "week", "month", "quarter"]` + "\n" +
- `Some APIs such as Metrics require intervals to cluster data.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "chartmogul",
- ),
- },
- Description: `must be one of ["chartmogul"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_chartmogul_data_source_sdk.go b/internal/provider/source_chartmogul_data_source_sdk.go
old mode 100755
new mode 100644
index be5daa443..64d01aa47
--- a/internal/provider/source_chartmogul_data_source_sdk.go
+++ b/internal/provider/source_chartmogul_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceChartmogulDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_chartmogul_resource.go b/internal/provider/source_chartmogul_resource.go
old mode 100755
new mode 100644
index 780306ffb..62f5d6f98
--- a/internal/provider/source_chartmogul_resource.go
+++ b/internal/provider/source_chartmogul_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceChartmogulResource struct {
// SourceChartmogulResourceModel describes the resource data model.
type SourceChartmogulResourceModel struct {
Configuration SourceChartmogul `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,46 +58,36 @@ func (r *SourceChartmogulResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your Chartmogul API key. See the docs for info on how to obtain this.`,
},
- "interval": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "day",
- "week",
- "month",
- "quarter",
- ),
- },
- MarkdownDescription: `must be one of ["day", "week", "month", "quarter"]` + "\n" +
- `Some APIs such as Metrics require intervals to cluster data.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "chartmogul",
- ),
- },
- Description: `must be one of ["chartmogul"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -160,7 +151,7 @@ func (r *SourceChartmogulResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceChartmogul(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -336,5 +327,5 @@ func (r *SourceChartmogulResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceChartmogulResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_chartmogul_resource_sdk.go b/internal/provider/source_chartmogul_resource_sdk.go
old mode 100755
new mode 100644
index 5af5b76dd..8e1503515
--- a/internal/provider/source_chartmogul_resource_sdk.go
+++ b/internal/provider/source_chartmogul_resource_sdk.go
@@ -3,21 +3,23 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceChartmogulResourceModel) ToCreateSDKType() *shared.SourceChartmogulCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- interval := shared.SourceChartmogulInterval(r.Configuration.Interval.ValueString())
- sourceType := shared.SourceChartmogulChartmogul(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceChartmogul{
- APIKey: apiKey,
- Interval: interval,
- SourceType: sourceType,
- StartDate: startDate,
+ APIKey: apiKey,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -29,6 +31,7 @@ func (r *SourceChartmogulResourceModel) ToCreateSDKType() *shared.SourceChartmog
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceChartmogulCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -43,11 +46,9 @@ func (r *SourceChartmogulResourceModel) ToGetSDKType() *shared.SourceChartmogulC
func (r *SourceChartmogulResourceModel) ToUpdateSDKType() *shared.SourceChartmogulPutRequest {
apiKey := r.Configuration.APIKey.ValueString()
- interval := shared.SourceChartmogulUpdateInterval(r.Configuration.Interval.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceChartmogulUpdate{
APIKey: apiKey,
- Interval: interval,
StartDate: startDate,
}
name := r.Name.ValueString()
diff --git a/internal/provider/source_clickhouse_data_source.go b/internal/provider/source_clickhouse_data_source.go
old mode 100755
new mode 100644
index d7407a934..6cf1ba95b
--- a/internal/provider/source_clickhouse_data_source.go
+++ b/internal/provider/source_clickhouse_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceClickhouseDataSource struct {
// SourceClickhouseDataSourceModel describes the data model.
type SourceClickhouseDataSourceModel struct {
- Configuration SourceClickhouse `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,219 +47,20 @@ func (r *SourceClickhouseDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceClickhouse DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `The name of the database.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The host endpoint of the Clickhouse cluster.`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with this username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The port of the database.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "clickhouse",
- ),
- },
- Description: `must be one of ["clickhouse"]`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_clickhouse_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_clickhouse_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_clickhouse_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_clickhouse_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_clickhouse_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_clickhouse_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `The username which is used to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_clickhouse_data_source_sdk.go b/internal/provider/source_clickhouse_data_source_sdk.go
old mode 100755
new mode 100644
index 771cd013d..6c3c0ec7c
--- a/internal/provider/source_clickhouse_data_source_sdk.go
+++ b/internal/provider/source_clickhouse_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceClickhouseDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_clickhouse_resource.go b/internal/provider/source_clickhouse_resource.go
old mode 100755
new mode 100644
index 3395fd013..a6da5adb9
--- a/internal/provider/source_clickhouse_resource.go
+++ b/internal/provider/source_clickhouse_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceClickhouseResource struct {
// SourceClickhouseResourceModel describes the resource data model.
type SourceClickhouseResourceModel struct {
Configuration SourceClickhouse `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -65,60 +66,33 @@ func (r *SourceClickhouseResource) Schema(ctx context.Context, req resource.Sche
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The password associated with this username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `The port of the database.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "clickhouse",
- ),
- },
- Description: `must be one of ["clickhouse"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 8123` + "\n" +
+ `The port of the database.`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_clickhouse_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_clickhouse_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -126,115 +100,28 @@ func (r *SourceClickhouseResource) Schema(ctx context.Context, req resource.Sche
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_clickhouse_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_clickhouse_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_clickhouse_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_clickhouse_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -244,10 +131,10 @@ func (r *SourceClickhouseResource) Schema(ctx context.Context, req resource.Sche
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -255,13 +142,24 @@ func (r *SourceClickhouseResource) Schema(ctx context.Context, req resource.Sche
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -325,7 +223,7 @@ func (r *SourceClickhouseResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceClickhouse(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -501,5 +399,5 @@ func (r *SourceClickhouseResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceClickhouseResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_clickhouse_resource_sdk.go b/internal/provider/source_clickhouse_resource_sdk.go
old mode 100755
new mode 100644
index e0afd1f91..065e012f2
--- a/internal/provider/source_clickhouse_resource_sdk.go
+++ b/internal/provider/source_clickhouse_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -16,60 +16,67 @@ func (r *SourceClickhouseResourceModel) ToCreateSDKType() *shared.SourceClickhou
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
- sourceType := shared.SourceClickhouseClickhouse(r.Configuration.SourceType.ValueString())
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var tunnelMethod *shared.SourceClickhouseSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceClickhouseSSHTunnelMethodNoTunnel *shared.SourceClickhouseSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceClickhouseSSHTunnelMethodNoTunnel = &shared.SourceClickhouseSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceClickhouseNoTunnel *shared.SourceClickhouseNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceClickhouseNoTunnel = &shared.SourceClickhouseNoTunnel{}
}
- if sourceClickhouseSSHTunnelMethodNoTunnel != nil {
+ if sourceClickhouseNoTunnel != nil {
tunnelMethod = &shared.SourceClickhouseSSHTunnelMethod{
- SourceClickhouseSSHTunnelMethodNoTunnel: sourceClickhouseSSHTunnelMethodNoTunnel,
+ SourceClickhouseNoTunnel: sourceClickhouseNoTunnel,
}
}
- var sourceClickhouseSSHTunnelMethodSSHKeyAuthentication *shared.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceClickhouseSSHTunnelMethodSSHKeyAuthentication = &shared.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceClickhouseSSHKeyAuthentication *shared.SourceClickhouseSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceClickhouseSSHKeyAuthentication = &shared.SourceClickhouseSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceClickhouseSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceClickhouseSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceClickhouseSSHTunnelMethod{
- SourceClickhouseSSHTunnelMethodSSHKeyAuthentication: sourceClickhouseSSHTunnelMethodSSHKeyAuthentication,
+ SourceClickhouseSSHKeyAuthentication: sourceClickhouseSSHKeyAuthentication,
}
}
- var sourceClickhouseSSHTunnelMethodPasswordAuthentication *shared.SourceClickhouseSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceClickhouseSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceClickhouseSSHTunnelMethodPasswordAuthentication = &shared.SourceClickhouseSSHTunnelMethodPasswordAuthentication{
+ var sourceClickhousePasswordAuthentication *shared.SourceClickhousePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceClickhousePasswordAuthentication = &shared.SourceClickhousePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceClickhouseSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceClickhousePasswordAuthentication != nil {
tunnelMethod = &shared.SourceClickhouseSSHTunnelMethod{
- SourceClickhouseSSHTunnelMethodPasswordAuthentication: sourceClickhouseSSHTunnelMethodPasswordAuthentication,
+ SourceClickhousePasswordAuthentication: sourceClickhousePasswordAuthentication,
}
}
}
@@ -79,10 +86,15 @@ func (r *SourceClickhouseResourceModel) ToCreateSDKType() *shared.SourceClickhou
Host: host,
Password: password,
Port: port,
- SourceType: sourceType,
TunnelMethod: tunnelMethod,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -93,6 +105,7 @@ func (r *SourceClickhouseResourceModel) ToCreateSDKType() *shared.SourceClickhou
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceClickhouseCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -114,59 +127,67 @@ func (r *SourceClickhouseResourceModel) ToUpdateSDKType() *shared.SourceClickhou
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var tunnelMethod *shared.SourceClickhouseUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceClickhouseUpdateSSHTunnelMethodNoTunnel *shared.SourceClickhouseUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceClickhouseUpdateSSHTunnelMethodNoTunnel = &shared.SourceClickhouseUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceClickhouseUpdateNoTunnel *shared.SourceClickhouseUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceClickhouseUpdateNoTunnel = &shared.SourceClickhouseUpdateNoTunnel{}
}
- if sourceClickhouseUpdateSSHTunnelMethodNoTunnel != nil {
+ if sourceClickhouseUpdateNoTunnel != nil {
tunnelMethod = &shared.SourceClickhouseUpdateSSHTunnelMethod{
- SourceClickhouseUpdateSSHTunnelMethodNoTunnel: sourceClickhouseUpdateSSHTunnelMethodNoTunnel,
+ SourceClickhouseUpdateNoTunnel: sourceClickhouseUpdateNoTunnel,
}
}
- var sourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication *shared.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceClickhouseUpdateSSHKeyAuthentication *shared.SourceClickhouseUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceClickhouseUpdateSSHKeyAuthentication = &shared.SourceClickhouseUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceClickhouseUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceClickhouseUpdateSSHTunnelMethod{
- SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication: sourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication,
+ SourceClickhouseUpdateSSHKeyAuthentication: sourceClickhouseUpdateSSHKeyAuthentication,
}
}
- var sourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication *shared.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication = &shared.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication{
+ var sourceClickhouseUpdatePasswordAuthentication *shared.SourceClickhouseUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceClickhouseUpdatePasswordAuthentication = &shared.SourceClickhouseUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceClickhouseUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.SourceClickhouseUpdateSSHTunnelMethod{
- SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication: sourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication,
+ SourceClickhouseUpdatePasswordAuthentication: sourceClickhouseUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/source_clickupapi_data_source.go b/internal/provider/source_clickupapi_data_source.go
old mode 100755
new mode 100644
index 4971a9634..fffe3eb0d
--- a/internal/provider/source_clickupapi_data_source.go
+++ b/internal/provider/source_clickupapi_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceClickupAPIDataSource struct {
// SourceClickupAPIDataSourceModel describes the data model.
type SourceClickupAPIDataSourceModel struct {
- Configuration SourceClickupAPI `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,54 +47,20 @@ func (r *SourceClickupAPIDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceClickupAPI DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Every ClickUp API call required authentication. This field is your personal API token. See here.`,
- },
- "folder_id": schema.StringAttribute{
- Computed: true,
- Description: `The ID of your folder in your space. Retrieve it from the ` + "`" + `/space/{space_id}/folder` + "`" + ` of the ClickUp API. See here.`,
- },
- "include_closed_tasks": schema.BoolAttribute{
- Computed: true,
- Description: `Include or exclude closed tasks. By default, they are excluded. See here.`,
- },
- "list_id": schema.StringAttribute{
- Computed: true,
- Description: `The ID of your list in your folder. Retrieve it from the ` + "`" + `/folder/{folder_id}/list` + "`" + ` of the ClickUp API. See here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "clickup-api",
- ),
- },
- Description: `must be one of ["clickup-api"]`,
- },
- "space_id": schema.StringAttribute{
- Computed: true,
- Description: `The ID of your space in your workspace. Retrieve it from the ` + "`" + `/team/{team_id}/space` + "`" + ` of the ClickUp API. See here.`,
- },
- "team_id": schema.StringAttribute{
- Computed: true,
- Description: `The ID of your team in ClickUp. Retrieve it from the ` + "`" + `/team` + "`" + ` of the ClickUp API. See here.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_clickupapi_data_source_sdk.go b/internal/provider/source_clickupapi_data_source_sdk.go
old mode 100755
new mode 100644
index 1b9fd7310..964300b1d
--- a/internal/provider/source_clickupapi_data_source_sdk.go
+++ b/internal/provider/source_clickupapi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceClickupAPIDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_clickupapi_resource.go b/internal/provider/source_clickupapi_resource.go
old mode 100755
new mode 100644
index 3a2e66c58..92a148dd0
--- a/internal/provider/source_clickupapi_resource.go
+++ b/internal/provider/source_clickupapi_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceClickupAPIResource struct {
// SourceClickupAPIResourceModel describes the resource data model.
type SourceClickupAPIResourceModel struct {
Configuration SourceClickupAPI `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +56,7 @@ func (r *SourceClickupAPIResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Every ClickUp API call required authentication. This field is your personal API token. See here.`,
},
"folder_id": schema.StringAttribute{
@@ -63,22 +64,14 @@ func (r *SourceClickupAPIResource) Schema(ctx context.Context, req resource.Sche
Description: `The ID of your folder in your space. Retrieve it from the ` + "`" + `/space/{space_id}/folder` + "`" + ` of the ClickUp API. See here.`,
},
"include_closed_tasks": schema.BoolAttribute{
- Optional: true,
- Description: `Include or exclude closed tasks. By default, they are excluded. See here.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Include or exclude closed tasks. By default, they are excluded. See here.`,
},
"list_id": schema.StringAttribute{
Optional: true,
Description: `The ID of your list in your folder. Retrieve it from the ` + "`" + `/folder/{folder_id}/list` + "`" + ` of the ClickUp API. See here.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "clickup-api",
- ),
- },
- Description: `must be one of ["clickup-api"]`,
- },
"space_id": schema.StringAttribute{
Optional: true,
Description: `The ID of your space in your workspace. Retrieve it from the ` + "`" + `/team/{team_id}/space` + "`" + ` of the ClickUp API. See here.`,
@@ -89,13 +82,24 @@ func (r *SourceClickupAPIResource) Schema(ctx context.Context, req resource.Sche
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -159,7 +163,7 @@ func (r *SourceClickupAPIResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceClickupAPI(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -335,5 +339,5 @@ func (r *SourceClickupAPIResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceClickupAPIResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_clickupapi_resource_sdk.go b/internal/provider/source_clickupapi_resource_sdk.go
old mode 100755
new mode 100644
index 4bedf8b5c..309cbbec6
--- a/internal/provider/source_clickupapi_resource_sdk.go
+++ b/internal/provider/source_clickupapi_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -27,7 +27,6 @@ func (r *SourceClickupAPIResourceModel) ToCreateSDKType() *shared.SourceClickupA
} else {
listID = nil
}
- sourceType := shared.SourceClickupAPIClickupAPI(r.Configuration.SourceType.ValueString())
spaceID := new(string)
if !r.Configuration.SpaceID.IsUnknown() && !r.Configuration.SpaceID.IsNull() {
*spaceID = r.Configuration.SpaceID.ValueString()
@@ -45,10 +44,15 @@ func (r *SourceClickupAPIResourceModel) ToCreateSDKType() *shared.SourceClickupA
FolderID: folderID,
IncludeClosedTasks: includeClosedTasks,
ListID: listID,
- SourceType: sourceType,
SpaceID: spaceID,
TeamID: teamID,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -59,6 +63,7 @@ func (r *SourceClickupAPIResourceModel) ToCreateSDKType() *shared.SourceClickupA
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceClickupAPICreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_clockify_data_source.go b/internal/provider/source_clockify_data_source.go
old mode 100755
new mode 100644
index 19edeb870..37d119170
--- a/internal/provider/source_clockify_data_source.go
+++ b/internal/provider/source_clockify_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceClockifyDataSource struct {
// SourceClockifyDataSourceModel describes the data model.
type SourceClockifyDataSourceModel struct {
- Configuration SourceClockify `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourceClockifyDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceClockify DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `You can get your api access_key here This API is Case Sensitive.`,
- },
- "api_url": schema.StringAttribute{
- Computed: true,
- Description: `The URL for the Clockify API. This should only need to be modified if connecting to an enterprise version of Clockify.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "clockify",
- ),
- },
- Description: `must be one of ["clockify"]`,
- },
- "workspace_id": schema.StringAttribute{
- Computed: true,
- Description: `WorkSpace Id`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_clockify_data_source_sdk.go b/internal/provider/source_clockify_data_source_sdk.go
old mode 100755
new mode 100644
index 94e56383b..b4917bb9a
--- a/internal/provider/source_clockify_data_source_sdk.go
+++ b/internal/provider/source_clockify_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceClockifyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_clockify_resource.go b/internal/provider/source_clockify_resource.go
old mode 100755
new mode 100644
index 90eee4c2e..2eb3b0430
--- a/internal/provider/source_clockify_resource.go
+++ b/internal/provider/source_clockify_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceClockifyResource struct {
// SourceClockifyResourceModel describes the resource data model.
type SourceClockifyResourceModel struct {
Configuration SourceClockify `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,20 +56,13 @@ func (r *SourceClockifyResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `You can get your api access_key here This API is Case Sensitive.`,
},
"api_url": schema.StringAttribute{
- Optional: true,
- Description: `The URL for the Clockify API. This should only need to be modified if connecting to an enterprise version of Clockify.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "clockify",
- ),
- },
- Description: `must be one of ["clockify"]`,
+ Optional: true,
+ MarkdownDescription: `Default: "https://api.clockify.me"` + "\n" +
+ `The URL for the Clockify API. This should only need to be modified if connecting to an enterprise version of Clockify.`,
},
"workspace_id": schema.StringAttribute{
Required: true,
@@ -77,13 +70,24 @@ func (r *SourceClockifyResource) Schema(ctx context.Context, req resource.Schema
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +151,7 @@ func (r *SourceClockifyResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceClockify(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +327,5 @@ func (r *SourceClockifyResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceClockifyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_clockify_resource_sdk.go b/internal/provider/source_clockify_resource_sdk.go
old mode 100755
new mode 100644
index 57127a19f..f3f77458d
--- a/internal/provider/source_clockify_resource_sdk.go
+++ b/internal/provider/source_clockify_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -15,14 +15,18 @@ func (r *SourceClockifyResourceModel) ToCreateSDKType() *shared.SourceClockifyCr
} else {
apiURL = nil
}
- sourceType := shared.SourceClockifyClockify(r.Configuration.SourceType.ValueString())
workspaceID := r.Configuration.WorkspaceID.ValueString()
configuration := shared.SourceClockify{
APIKey: apiKey,
APIURL: apiURL,
- SourceType: sourceType,
WorkspaceID: workspaceID,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -33,6 +37,7 @@ func (r *SourceClockifyResourceModel) ToCreateSDKType() *shared.SourceClockifyCr
workspaceId1 := r.WorkspaceID.ValueString()
out := shared.SourceClockifyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceId1,
diff --git a/internal/provider/source_closecom_data_source.go b/internal/provider/source_closecom_data_source.go
old mode 100755
new mode 100644
index f24b7f149..7d4ec4b54
--- a/internal/provider/source_closecom_data_source.go
+++ b/internal/provider/source_closecom_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceCloseComDataSource struct {
// SourceCloseComDataSourceModel describes the data model.
type SourceCloseComDataSourceModel struct {
- Configuration SourceCloseCom `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,41 +47,20 @@ func (r *SourceCloseComDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceCloseCom DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Close.com API key (usually starts with 'api_'; find yours here).`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "close-com",
- ),
- },
- Description: `must be one of ["close-com"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The start date to sync data; all data after this date will be replicated. Leave blank to retrieve all the data available in the account. Format: YYYY-MM-DD.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_closecom_data_source_sdk.go b/internal/provider/source_closecom_data_source_sdk.go
old mode 100755
new mode 100644
index e7464c285..08e16edbe
--- a/internal/provider/source_closecom_data_source_sdk.go
+++ b/internal/provider/source_closecom_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceCloseComDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_closecom_resource.go b/internal/provider/source_closecom_resource.go
old mode 100755
new mode 100644
index d9c6dfcc8..b1d0eaaf8
--- a/internal/provider/source_closecom_resource.go
+++ b/internal/provider/source_closecom_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceCloseComResource struct {
// SourceCloseComResourceModel describes the resource data model.
type SourceCloseComResourceModel struct {
Configuration SourceCloseCom `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,33 +58,37 @@ func (r *SourceCloseComResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Close.com API key (usually starts with 'api_'; find yours here).`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "close-com",
- ),
- },
- Description: `must be one of ["close-com"]`,
- },
"start_date": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `Default: "2021-01-01"` + "\n" +
+ `The start date to sync data; all data after this date will be replicated. Leave blank to retrieve all the data available in the account. Format: YYYY-MM-DD.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The start date to sync data; all data after this date will be replicated. Leave blank to retrieve all the data available in the account. Format: YYYY-MM-DD.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +152,7 @@ func (r *SourceCloseComResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceCloseCom(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +328,5 @@ func (r *SourceCloseComResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceCloseComResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_closecom_resource_sdk.go b/internal/provider/source_closecom_resource_sdk.go
old mode 100755
new mode 100644
index 87e5d7df0..497fe4b25
--- a/internal/provider/source_closecom_resource_sdk.go
+++ b/internal/provider/source_closecom_resource_sdk.go
@@ -3,14 +3,13 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceCloseComResourceModel) ToCreateSDKType() *shared.SourceCloseComCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceCloseComCloseCom(r.Configuration.SourceType.ValueString())
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
@@ -18,9 +17,14 @@ func (r *SourceCloseComResourceModel) ToCreateSDKType() *shared.SourceCloseComCr
startDate = nil
}
configuration := shared.SourceCloseCom{
- APIKey: apiKey,
- SourceType: sourceType,
- StartDate: startDate,
+ APIKey: apiKey,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -32,6 +36,7 @@ func (r *SourceCloseComResourceModel) ToCreateSDKType() *shared.SourceCloseComCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceCloseComCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_coda_data_source.go b/internal/provider/source_coda_data_source.go
old mode 100755
new mode 100644
index 914f61f88..e7e164e10
--- a/internal/provider/source_coda_data_source.go
+++ b/internal/provider/source_coda_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceCodaDataSource struct {
// SourceCodaDataSourceModel describes the data model.
type SourceCodaDataSourceModel struct {
- Configuration SourceCoda `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,34 +47,20 @@ func (r *SourceCodaDataSource) Schema(ctx context.Context, req datasource.Schema
MarkdownDescription: "SourceCoda DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_token": schema.StringAttribute{
- Computed: true,
- Description: `Bearer token`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "coda",
- ),
- },
- Description: `must be one of ["coda"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_coda_data_source_sdk.go b/internal/provider/source_coda_data_source_sdk.go
old mode 100755
new mode 100644
index be9278845..be0b9bc8a
--- a/internal/provider/source_coda_data_source_sdk.go
+++ b/internal/provider/source_coda_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceCodaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_coda_resource.go b/internal/provider/source_coda_resource.go
old mode 100755
new mode 100644
index 3d26fdf7f..e8aace6d8
--- a/internal/provider/source_coda_resource.go
+++ b/internal/provider/source_coda_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceCodaResource struct {
// SourceCodaResourceModel describes the resource data model.
type SourceCodaResourceModel struct {
Configuration SourceCoda `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,26 +56,29 @@ func (r *SourceCodaResource) Schema(ctx context.Context, req resource.SchemaRequ
Attributes: map[string]schema.Attribute{
"auth_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Bearer token`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "coda",
- ),
- },
- Description: `must be one of ["coda"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceCodaResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceCoda(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceCodaResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *SourceCodaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_coda_resource_sdk.go b/internal/provider/source_coda_resource_sdk.go
old mode 100755
new mode 100644
index fa1a2beb1..3d7c68af3
--- a/internal/provider/source_coda_resource_sdk.go
+++ b/internal/provider/source_coda_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceCodaResourceModel) ToCreateSDKType() *shared.SourceCodaCreateRequest {
authToken := r.Configuration.AuthToken.ValueString()
- sourceType := shared.SourceCodaCoda(r.Configuration.SourceType.ValueString())
configuration := shared.SourceCoda{
- AuthToken: authToken,
- SourceType: sourceType,
+ AuthToken: authToken,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceCodaResourceModel) ToCreateSDKType() *shared.SourceCodaCreateRequ
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceCodaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_coinapi_data_source.go b/internal/provider/source_coinapi_data_source.go
old mode 100755
new mode 100644
index 31a4f7633..95d517c74
--- a/internal/provider/source_coinapi_data_source.go
+++ b/internal/provider/source_coinapi_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceCoinAPIDataSource struct {
// SourceCoinAPIDataSourceModel describes the data model.
type SourceCoinAPIDataSourceModel struct {
- Configuration SourceCoinAPI `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,74 +47,20 @@ func (r *SourceCoinAPIDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceCoinAPI DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- MarkdownDescription: `The end date in ISO 8601 format. If not supplied, data will be returned` + "\n" +
- `from the start date to the current time, or when the count of result` + "\n" +
- `elements reaches its limit.` + "\n" +
- ``,
- },
- "environment": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sandbox",
- "production",
- ),
- },
- MarkdownDescription: `must be one of ["sandbox", "production"]` + "\n" +
- `The environment to use. Either sandbox or production.` + "\n" +
- ``,
- },
- "limit": schema.Int64Attribute{
- Computed: true,
- MarkdownDescription: `The maximum number of elements to return. If not supplied, the default` + "\n" +
- `is 100. For numbers larger than 100, each 100 items is counted as one` + "\n" +
- `request for pricing purposes. Maximum value is 100000.` + "\n" +
- ``,
- },
- "period": schema.StringAttribute{
- Computed: true,
- Description: `The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "coin-api",
- ),
- },
- Description: `must be one of ["coin-api"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `The start date in ISO 8601 format.`,
- },
- "symbol_id": schema.StringAttribute{
- Computed: true,
- MarkdownDescription: `The symbol ID to use. See the documentation for a list.` + "\n" +
- `https://docs.coinapi.io/#list-all-symbols-get` + "\n" +
- ``,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_coinapi_data_source_sdk.go b/internal/provider/source_coinapi_data_source_sdk.go
old mode 100755
new mode 100644
index 0b38ca900..5bac547ec
--- a/internal/provider/source_coinapi_data_source_sdk.go
+++ b/internal/provider/source_coinapi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceCoinAPIDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_coinapi_resource.go b/internal/provider/source_coinapi_resource.go
old mode 100755
new mode 100644
index 14e44f003..1579138aa
--- a/internal/provider/source_coinapi_resource.go
+++ b/internal/provider/source_coinapi_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourceCoinAPIResource struct {
// SourceCoinAPIResourceModel describes the resource data model.
type SourceCoinAPIResourceModel struct {
Configuration SourceCoinAPI `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +58,7 @@ func (r *SourceCoinAPIResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key`,
},
"end_date": schema.StringAttribute{
@@ -66,20 +69,21 @@ func (r *SourceCoinAPIResource) Schema(ctx context.Context, req resource.SchemaR
``,
},
"environment": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["sandbox", "production"]; Default: "sandbox"` + "\n" +
+ `The environment to use. Either sandbox or production.` + "\n" +
+ ``,
Validators: []validator.String{
stringvalidator.OneOf(
"sandbox",
"production",
),
},
- MarkdownDescription: `must be one of ["sandbox", "production"]` + "\n" +
- `The environment to use. Either sandbox or production.` + "\n" +
- ``,
},
"limit": schema.Int64Attribute{
Optional: true,
- MarkdownDescription: `The maximum number of elements to return. If not supplied, the default` + "\n" +
+ MarkdownDescription: `Default: 100` + "\n" +
+ `The maximum number of elements to return. If not supplied, the default` + "\n" +
`is 100. For numbers larger than 100, each 100 items is counted as one` + "\n" +
`request for pricing purposes. Maximum value is 100000.` + "\n" +
``,
@@ -88,15 +92,6 @@ func (r *SourceCoinAPIResource) Schema(ctx context.Context, req resource.SchemaR
Required: true,
Description: `The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "coin-api",
- ),
- },
- Description: `must be one of ["coin-api"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `The start date in ISO 8601 format.`,
@@ -109,13 +104,24 @@ func (r *SourceCoinAPIResource) Schema(ctx context.Context, req resource.SchemaR
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -179,7 +185,7 @@ func (r *SourceCoinAPIResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceCoinAPI(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -355,5 +361,5 @@ func (r *SourceCoinAPIResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceCoinAPIResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_coinapi_resource_sdk.go b/internal/provider/source_coinapi_resource_sdk.go
old mode 100755
new mode 100644
index 975110a15..31f1513c2
--- a/internal/provider/source_coinapi_resource_sdk.go
+++ b/internal/provider/source_coinapi_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -15,7 +15,12 @@ func (r *SourceCoinAPIResourceModel) ToCreateSDKType() *shared.SourceCoinAPICrea
} else {
endDate = nil
}
- environment := shared.SourceCoinAPIEnvironment(r.Configuration.Environment.ValueString())
+ environment := new(shared.SourceCoinAPIEnvironment)
+ if !r.Configuration.Environment.IsUnknown() && !r.Configuration.Environment.IsNull() {
+ *environment = shared.SourceCoinAPIEnvironment(r.Configuration.Environment.ValueString())
+ } else {
+ environment = nil
+ }
limit := new(int64)
if !r.Configuration.Limit.IsUnknown() && !r.Configuration.Limit.IsNull() {
*limit = r.Configuration.Limit.ValueInt64()
@@ -23,7 +28,6 @@ func (r *SourceCoinAPIResourceModel) ToCreateSDKType() *shared.SourceCoinAPICrea
limit = nil
}
period := r.Configuration.Period.ValueString()
- sourceType := shared.SourceCoinAPICoinAPI(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
symbolID := r.Configuration.SymbolID.ValueString()
configuration := shared.SourceCoinAPI{
@@ -32,10 +36,15 @@ func (r *SourceCoinAPIResourceModel) ToCreateSDKType() *shared.SourceCoinAPICrea
Environment: environment,
Limit: limit,
Period: period,
- SourceType: sourceType,
StartDate: startDate,
SymbolID: symbolID,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -46,6 +55,7 @@ func (r *SourceCoinAPIResourceModel) ToCreateSDKType() *shared.SourceCoinAPICrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceCoinAPICreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -66,7 +76,12 @@ func (r *SourceCoinAPIResourceModel) ToUpdateSDKType() *shared.SourceCoinAPIPutR
} else {
endDate = nil
}
- environment := shared.SourceCoinAPIUpdateEnvironment(r.Configuration.Environment.ValueString())
+ environment := new(shared.Environment)
+ if !r.Configuration.Environment.IsUnknown() && !r.Configuration.Environment.IsNull() {
+ *environment = shared.Environment(r.Configuration.Environment.ValueString())
+ } else {
+ environment = nil
+ }
limit := new(int64)
if !r.Configuration.Limit.IsUnknown() && !r.Configuration.Limit.IsNull() {
*limit = r.Configuration.Limit.ValueInt64()
diff --git a/internal/provider/source_coinmarketcap_data_source.go b/internal/provider/source_coinmarketcap_data_source.go
old mode 100755
new mode 100644
index f56c94225..1af782826
--- a/internal/provider/source_coinmarketcap_data_source.go
+++ b/internal/provider/source_coinmarketcap_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceCoinmarketcapDataSource struct {
// SourceCoinmarketcapDataSourceModel describes the data model.
type SourceCoinmarketcapDataSourceModel struct {
- Configuration SourceCoinmarketcap `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,50 +47,20 @@ func (r *SourceCoinmarketcapDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "SourceCoinmarketcap DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Your API Key. See here. The token is case sensitive.`,
- },
- "data_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "latest",
- "historical",
- ),
- },
- MarkdownDescription: `must be one of ["latest", "historical"]` + "\n" +
- `/latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "coinmarketcap",
- ),
- },
- Description: `must be one of ["coinmarketcap"]`,
- },
- "symbols": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `Cryptocurrency symbols. (only used for quotes stream)`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_coinmarketcap_data_source_sdk.go b/internal/provider/source_coinmarketcap_data_source_sdk.go
old mode 100755
new mode 100644
index ed4cbf960..aec90aa07
--- a/internal/provider/source_coinmarketcap_data_source_sdk.go
+++ b/internal/provider/source_coinmarketcap_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceCoinmarketcapDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_coinmarketcap_resource.go b/internal/provider/source_coinmarketcap_resource.go
old mode 100755
new mode 100644
index 1f9efcd3c..63f48aae5
--- a/internal/provider/source_coinmarketcap_resource.go
+++ b/internal/provider/source_coinmarketcap_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourceCoinmarketcapResource struct {
// SourceCoinmarketcapResourceModel describes the resource data model.
type SourceCoinmarketcapResourceModel struct {
Configuration SourceCoinmarketcap `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,27 +58,19 @@ func (r *SourceCoinmarketcapResource) Schema(ctx context.Context, req resource.S
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Key. See here. The token is case sensitive.`,
},
"data_type": schema.StringAttribute{
Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "latest",
- "historical",
- ),
- },
MarkdownDescription: `must be one of ["latest", "historical"]` + "\n" +
`/latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
stringvalidator.OneOf(
- "coinmarketcap",
+ "latest",
+ "historical",
),
},
- Description: `must be one of ["coinmarketcap"]`,
},
"symbols": schema.ListAttribute{
Optional: true,
@@ -85,13 +79,24 @@ func (r *SourceCoinmarketcapResource) Schema(ctx context.Context, req resource.S
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +160,7 @@ func (r *SourceCoinmarketcapResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceCoinmarketcap(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +336,5 @@ func (r *SourceCoinmarketcapResource) Delete(ctx context.Context, req resource.D
}
func (r *SourceCoinmarketcapResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_coinmarketcap_resource_sdk.go b/internal/provider/source_coinmarketcap_resource_sdk.go
old mode 100755
new mode 100644
index 07a1cbd90..248c7b80b
--- a/internal/provider/source_coinmarketcap_resource_sdk.go
+++ b/internal/provider/source_coinmarketcap_resource_sdk.go
@@ -3,23 +3,27 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceCoinmarketcapResourceModel) ToCreateSDKType() *shared.SourceCoinmarketcapCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
dataType := shared.SourceCoinmarketcapDataType(r.Configuration.DataType.ValueString())
- sourceType := shared.SourceCoinmarketcapCoinmarketcap(r.Configuration.SourceType.ValueString())
var symbols []string = nil
for _, symbolsItem := range r.Configuration.Symbols {
symbols = append(symbols, symbolsItem.ValueString())
}
configuration := shared.SourceCoinmarketcap{
- APIKey: apiKey,
- DataType: dataType,
- SourceType: sourceType,
- Symbols: symbols,
+ APIKey: apiKey,
+ DataType: dataType,
+ Symbols: symbols,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -31,6 +35,7 @@ func (r *SourceCoinmarketcapResourceModel) ToCreateSDKType() *shared.SourceCoinm
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceCoinmarketcapCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -45,7 +50,7 @@ func (r *SourceCoinmarketcapResourceModel) ToGetSDKType() *shared.SourceCoinmark
func (r *SourceCoinmarketcapResourceModel) ToUpdateSDKType() *shared.SourceCoinmarketcapPutRequest {
apiKey := r.Configuration.APIKey.ValueString()
- dataType := shared.SourceCoinmarketcapUpdateDataType(r.Configuration.DataType.ValueString())
+ dataType := shared.DataType(r.Configuration.DataType.ValueString())
var symbols []string = nil
for _, symbolsItem := range r.Configuration.Symbols {
symbols = append(symbols, symbolsItem.ValueString())
diff --git a/internal/provider/source_configcat_data_source.go b/internal/provider/source_configcat_data_source.go
old mode 100755
new mode 100644
index 6a09f342c..573d4dbae
--- a/internal/provider/source_configcat_data_source.go
+++ b/internal/provider/source_configcat_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceConfigcatDataSource struct {
// SourceConfigcatDataSourceModel describes the data model.
type SourceConfigcatDataSourceModel struct {
- Configuration SourceConfigcat `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceConfigcatDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceConfigcat DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Basic auth password. See here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "configcat",
- ),
- },
- Description: `must be one of ["configcat"]`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Basic auth user name. See here.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_configcat_data_source_sdk.go b/internal/provider/source_configcat_data_source_sdk.go
old mode 100755
new mode 100644
index bd30d3a6c..d10dab058
--- a/internal/provider/source_configcat_data_source_sdk.go
+++ b/internal/provider/source_configcat_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceConfigcatDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_configcat_resource.go b/internal/provider/source_configcat_resource.go
old mode 100755
new mode 100644
index c3fdbfdf8..f84aee6db
--- a/internal/provider/source_configcat_resource.go
+++ b/internal/provider/source_configcat_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceConfigcatResource struct {
// SourceConfigcatResourceModel describes the resource data model.
type SourceConfigcatResourceModel struct {
- Configuration SourceConfigcat `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration UsernamePassword `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceConfigcatResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,30 +56,33 @@ func (r *SourceConfigcatResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Basic auth password. See here.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "configcat",
- ),
- },
- Description: `must be one of ["configcat"]`,
- },
"username": schema.StringAttribute{
Required: true,
Description: `Basic auth user name. See here.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceConfigcatResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceConfigcat(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceConfigcatResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceConfigcatResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_configcat_resource_sdk.go b/internal/provider/source_configcat_resource_sdk.go
old mode 100755
new mode 100644
index 1c8bbdccd..1ef0063a0
--- a/internal/provider/source_configcat_resource_sdk.go
+++ b/internal/provider/source_configcat_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceConfigcatResourceModel) ToCreateSDKType() *shared.SourceConfigcatCreateRequest {
password := r.Configuration.Password.ValueString()
- sourceType := shared.SourceConfigcatConfigcat(r.Configuration.SourceType.ValueString())
username := r.Configuration.Username.ValueString()
configuration := shared.SourceConfigcat{
- Password: password,
- SourceType: sourceType,
- Username: username,
+ Password: password,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceConfigcatResourceModel) ToCreateSDKType() *shared.SourceConfigcat
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceConfigcatCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_confluence_data_source.go b/internal/provider/source_confluence_data_source.go
old mode 100755
new mode 100644
index e5b317012..562557b23
--- a/internal/provider/source_confluence_data_source.go
+++ b/internal/provider/source_confluence_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceConfluenceDataSource struct {
// SourceConfluenceDataSourceModel describes the data model.
type SourceConfluenceDataSourceModel struct {
- Configuration SourceConfluence `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourceConfluenceDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceConfluence DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Please follow the Jira confluence for generating an API token: generating an API token.`,
- },
- "domain_name": schema.StringAttribute{
- Computed: true,
- Description: `Your Confluence domain name`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `Your Confluence login email`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "confluence",
- ),
- },
- Description: `must be one of ["confluence"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_confluence_data_source_sdk.go b/internal/provider/source_confluence_data_source_sdk.go
old mode 100755
new mode 100644
index 40987e738..4929783fc
--- a/internal/provider/source_confluence_data_source_sdk.go
+++ b/internal/provider/source_confluence_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceConfluenceDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_confluence_resource.go b/internal/provider/source_confluence_resource.go
old mode 100755
new mode 100644
index 7da345b6b..1127c2a87
--- a/internal/provider/source_confluence_resource.go
+++ b/internal/provider/source_confluence_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceConfluenceResource struct {
// SourceConfluenceResourceModel describes the resource data model.
type SourceConfluenceResourceModel struct {
Configuration SourceConfluence `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +56,7 @@ func (r *SourceConfluenceResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Please follow the Jira confluence for generating an API token: generating an API token.`,
},
"domain_name": schema.StringAttribute{
@@ -66,24 +67,26 @@ func (r *SourceConfluenceResource) Schema(ctx context.Context, req resource.Sche
Required: true,
Description: `Your Confluence login email`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "confluence",
- ),
- },
- Description: `must be one of ["confluence"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +150,7 @@ func (r *SourceConfluenceResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceConfluence(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +326,5 @@ func (r *SourceConfluenceResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceConfluenceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_confluence_resource_sdk.go b/internal/provider/source_confluence_resource_sdk.go
old mode 100755
new mode 100644
index 6f33ce505..d0213ebac
--- a/internal/provider/source_confluence_resource_sdk.go
+++ b/internal/provider/source_confluence_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -11,12 +11,16 @@ func (r *SourceConfluenceResourceModel) ToCreateSDKType() *shared.SourceConfluen
apiToken := r.Configuration.APIToken.ValueString()
domainName := r.Configuration.DomainName.ValueString()
email := r.Configuration.Email.ValueString()
- sourceType := shared.SourceConfluenceConfluence(r.Configuration.SourceType.ValueString())
configuration := shared.SourceConfluence{
APIToken: apiToken,
DomainName: domainName,
Email: email,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -28,6 +32,7 @@ func (r *SourceConfluenceResourceModel) ToCreateSDKType() *shared.SourceConfluen
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceConfluenceCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_convex_data_source.go b/internal/provider/source_convex_data_source.go
old mode 100755
new mode 100644
index d3b0a4ce8..5bf948667
--- a/internal/provider/source_convex_data_source.go
+++ b/internal/provider/source_convex_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceConvexDataSource struct {
// SourceConvexDataSourceModel describes the data model.
type SourceConvexDataSourceModel struct {
- Configuration SourceConvex `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,37 +47,20 @@ func (r *SourceConvexDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceConvex DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key": schema.StringAttribute{
- Computed: true,
- Description: `API access key used to retrieve data from Convex.`,
- },
- "deployment_url": schema.StringAttribute{
- Computed: true,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "convex",
- ),
- },
- Description: `must be one of ["convex"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_convex_data_source_sdk.go b/internal/provider/source_convex_data_source_sdk.go
old mode 100755
new mode 100644
index 472fbd7db..0731170e2
--- a/internal/provider/source_convex_data_source_sdk.go
+++ b/internal/provider/source_convex_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceConvexDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_convex_resource.go b/internal/provider/source_convex_resource.go
old mode 100755
new mode 100644
index 1ff28a826..86dd13155
--- a/internal/provider/source_convex_resource.go
+++ b/internal/provider/source_convex_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceConvexResource struct {
// SourceConvexResourceModel describes the resource data model.
type SourceConvexResourceModel struct {
- Configuration SourceConvex `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration DestinationConvex `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceConvexResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,29 +56,32 @@ func (r *SourceConvexResource) Schema(ctx context.Context, req resource.SchemaRe
Attributes: map[string]schema.Attribute{
"access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API access key used to retrieve data from Convex.`,
},
"deployment_url": schema.StringAttribute{
Required: true,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "convex",
- ),
- },
- Description: `must be one of ["convex"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -142,7 +145,7 @@ func (r *SourceConvexResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceConvex(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -318,5 +321,5 @@ func (r *SourceConvexResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceConvexResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_convex_resource_sdk.go b/internal/provider/source_convex_resource_sdk.go
old mode 100755
new mode 100644
index 2b0c2b1be..c50d84d98
--- a/internal/provider/source_convex_resource_sdk.go
+++ b/internal/provider/source_convex_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceConvexResourceModel) ToCreateSDKType() *shared.SourceConvexCreateRequest {
accessKey := r.Configuration.AccessKey.ValueString()
deploymentURL := r.Configuration.DeploymentURL.ValueString()
- sourceType := shared.SourceConvexConvex(r.Configuration.SourceType.ValueString())
configuration := shared.SourceConvex{
AccessKey: accessKey,
DeploymentURL: deploymentURL,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceConvexResourceModel) ToCreateSDKType() *shared.SourceConvexCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceConvexCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_datascope_data_source.go b/internal/provider/source_datascope_data_source.go
old mode 100755
new mode 100644
index 77b2c7b61..980f45f95
--- a/internal/provider/source_datascope_data_source.go
+++ b/internal/provider/source_datascope_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceDatascopeDataSource struct {
// SourceDatascopeDataSourceModel describes the data model.
type SourceDatascopeDataSourceModel struct {
- Configuration SourceDatascope `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceDatascopeDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceDatascope DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "datascope",
- ),
- },
- Description: `must be one of ["datascope"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `Start date for the data to be replicated`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_datascope_data_source_sdk.go b/internal/provider/source_datascope_data_source_sdk.go
old mode 100755
new mode 100644
index e3b511e1b..21cfa39a8
--- a/internal/provider/source_datascope_data_source_sdk.go
+++ b/internal/provider/source_datascope_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceDatascopeDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_datascope_resource.go b/internal/provider/source_datascope_resource.go
old mode 100755
new mode 100644
index 6226166f8..011d68671
--- a/internal/provider/source_datascope_resource.go
+++ b/internal/provider/source_datascope_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceDatascopeResource struct {
// SourceDatascopeResourceModel describes the resource data model.
type SourceDatascopeResourceModel struct {
Configuration SourceDatascope `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,33 @@ func (r *SourceDatascopeResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "datascope",
- ),
- },
- Description: `must be one of ["datascope"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `Start date for the data to be replicated`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceDatascopeResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceDatascope(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceDatascopeResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceDatascopeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_datascope_resource_sdk.go b/internal/provider/source_datascope_resource_sdk.go
old mode 100755
new mode 100644
index f654711bb..571c2fa0d
--- a/internal/provider/source_datascope_resource_sdk.go
+++ b/internal/provider/source_datascope_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceDatascopeResourceModel) ToCreateSDKType() *shared.SourceDatascopeCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceDatascopeDatascope(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceDatascope{
- APIKey: apiKey,
- SourceType: sourceType,
- StartDate: startDate,
+ APIKey: apiKey,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceDatascopeResourceModel) ToCreateSDKType() *shared.SourceDatascope
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceDatascopeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_delighted_data_source.go b/internal/provider/source_delighted_data_source.go
old mode 100755
new mode 100644
index 5331bc74c..124321331
--- a/internal/provider/source_delighted_data_source.go
+++ b/internal/provider/source_delighted_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceDelightedDataSource struct {
// SourceDelightedDataSourceModel describes the data model.
type SourceDelightedDataSourceModel struct {
- Configuration SourceDelighted `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,41 +47,20 @@ func (r *SourceDelightedDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceDelighted DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `A Delighted API key.`,
- },
- "since": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate the data`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "delighted",
- ),
- },
- Description: `must be one of ["delighted"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_delighted_data_source_sdk.go b/internal/provider/source_delighted_data_source_sdk.go
old mode 100755
new mode 100644
index 5db938c12..0456da458
--- a/internal/provider/source_delighted_data_source_sdk.go
+++ b/internal/provider/source_delighted_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceDelightedDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_delighted_resource.go b/internal/provider/source_delighted_resource.go
old mode 100755
new mode 100644
index 88aa83548..bfe8bef2e
--- a/internal/provider/source_delighted_resource.go
+++ b/internal/provider/source_delighted_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceDelightedResource struct {
// SourceDelightedResourceModel describes the resource data model.
type SourceDelightedResourceModel struct {
Configuration SourceDelighted `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,33 +58,36 @@ func (r *SourceDelightedResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `A Delighted API key.`,
},
"since": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
+ Required: true,
Description: `The date from which you'd like to replicate the data`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
- stringvalidator.OneOf(
- "delighted",
- ),
+ validators.IsRFC3339(),
},
- Description: `must be one of ["delighted"]`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +151,7 @@ func (r *SourceDelightedResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceDelighted(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +327,5 @@ func (r *SourceDelightedResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceDelightedResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_delighted_resource_sdk.go b/internal/provider/source_delighted_resource_sdk.go
old mode 100755
new mode 100644
index cf3cae9a2..540ad88a8
--- a/internal/provider/source_delighted_resource_sdk.go
+++ b/internal/provider/source_delighted_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -11,11 +11,15 @@ import (
func (r *SourceDelightedResourceModel) ToCreateSDKType() *shared.SourceDelightedCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
since, _ := time.Parse(time.RFC3339Nano, r.Configuration.Since.ValueString())
- sourceType := shared.SourceDelightedDelighted(r.Configuration.SourceType.ValueString())
configuration := shared.SourceDelighted{
- APIKey: apiKey,
- Since: since,
- SourceType: sourceType,
+ APIKey: apiKey,
+ Since: since,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -27,6 +31,7 @@ func (r *SourceDelightedResourceModel) ToCreateSDKType() *shared.SourceDelighted
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceDelightedCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_dixa_data_source.go b/internal/provider/source_dixa_data_source.go
old mode 100755
new mode 100644
index 430ba7472..775c1e747
--- a/internal/provider/source_dixa_data_source.go
+++ b/internal/provider/source_dixa_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceDixaDataSource struct {
// SourceDixaDataSourceModel describes the data model.
type SourceDixaDataSourceModel struct {
- Configuration SourceDixa `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,42 +47,20 @@ func (r *SourceDixaDataSource) Schema(ctx context.Context, req datasource.Schema
MarkdownDescription: "SourceDixa DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Dixa API token`,
- },
- "batch_size": schema.Int64Attribute{
- Computed: true,
- Description: `Number of days to batch into one request. Max 31.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dixa",
- ),
- },
- Description: `must be one of ["dixa"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `The connector pulls records updated from this date onwards.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_dixa_data_source_sdk.go b/internal/provider/source_dixa_data_source_sdk.go
old mode 100755
new mode 100644
index f7bbe68e8..54c0450b8
--- a/internal/provider/source_dixa_data_source_sdk.go
+++ b/internal/provider/source_dixa_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceDixaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_dixa_resource.go b/internal/provider/source_dixa_resource.go
old mode 100755
new mode 100644
index e7eba4786..b6c214de1
--- a/internal/provider/source_dixa_resource.go
+++ b/internal/provider/source_dixa_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourceDixaResource struct {
// SourceDixaResourceModel describes the resource data model.
type SourceDixaResourceModel struct {
Configuration SourceDixa `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,34 +58,41 @@ func (r *SourceDixaResource) Schema(ctx context.Context, req resource.SchemaRequ
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Dixa API token`,
},
"batch_size": schema.Int64Attribute{
- Optional: true,
- Description: `Number of days to batch into one request. Max 31.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dixa",
- ),
- },
- Description: `must be one of ["dixa"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 31` + "\n" +
+ `Number of days to batch into one request. Max 31.`,
},
"start_date": schema.StringAttribute{
Required: true,
Description: `The connector pulls records updated from this date onwards.`,
+ Validators: []validator.String{
+ validators.IsRFC3339(),
+ },
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +156,7 @@ func (r *SourceDixaResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceDixa(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +332,5 @@ func (r *SourceDixaResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *SourceDixaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_dixa_resource_sdk.go b/internal/provider/source_dixa_resource_sdk.go
old mode 100755
new mode 100644
index a2ab53e21..c407db820
--- a/internal/provider/source_dixa_resource_sdk.go
+++ b/internal/provider/source_dixa_resource_sdk.go
@@ -3,8 +3,9 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "time"
)
func (r *SourceDixaResourceModel) ToCreateSDKType() *shared.SourceDixaCreateRequest {
@@ -15,13 +16,17 @@ func (r *SourceDixaResourceModel) ToCreateSDKType() *shared.SourceDixaCreateRequ
} else {
batchSize = nil
}
- sourceType := shared.SourceDixaDixa(r.Configuration.SourceType.ValueString())
- startDate := r.Configuration.StartDate.ValueString()
+ startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceDixa{
- APIToken: apiToken,
- BatchSize: batchSize,
- SourceType: sourceType,
- StartDate: startDate,
+ APIToken: apiToken,
+ BatchSize: batchSize,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -33,6 +38,7 @@ func (r *SourceDixaResourceModel) ToCreateSDKType() *shared.SourceDixaCreateRequ
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceDixaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -53,7 +59,7 @@ func (r *SourceDixaResourceModel) ToUpdateSDKType() *shared.SourceDixaPutRequest
} else {
batchSize = nil
}
- startDate := r.Configuration.StartDate.ValueString()
+ startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceDixaUpdate{
APIToken: apiToken,
BatchSize: batchSize,
diff --git a/internal/provider/source_dockerhub_data_source.go b/internal/provider/source_dockerhub_data_source.go
old mode 100755
new mode 100644
index 6eaf070b8..ecf332cde
--- a/internal/provider/source_dockerhub_data_source.go
+++ b/internal/provider/source_dockerhub_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceDockerhubDataSource struct {
// SourceDockerhubDataSourceModel describes the data model.
type SourceDockerhubDataSourceModel struct {
- Configuration SourceDockerhub `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceDockerhubDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceDockerhub DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "docker_username": schema.StringAttribute{
- Computed: true,
- Description: `Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/ API call)`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dockerhub",
- ),
- },
- Description: `must be one of ["dockerhub"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_dockerhub_data_source_sdk.go b/internal/provider/source_dockerhub_data_source_sdk.go
old mode 100755
new mode 100644
index a20dd1699..899058c07
--- a/internal/provider/source_dockerhub_data_source_sdk.go
+++ b/internal/provider/source_dockerhub_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceDockerhubDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_dockerhub_resource.go b/internal/provider/source_dockerhub_resource.go
old mode 100755
new mode 100644
index b7fbf86f1..20036be38
--- a/internal/provider/source_dockerhub_resource.go
+++ b/internal/provider/source_dockerhub_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceDockerhubResource struct {
// SourceDockerhubResourceModel describes the resource data model.
type SourceDockerhubResourceModel struct {
Configuration SourceDockerhub `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,24 +58,26 @@ func (r *SourceDockerhubResource) Schema(ctx context.Context, req resource.Schem
Required: true,
Description: `Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/ API call)`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dockerhub",
- ),
- },
- Description: `must be one of ["dockerhub"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +141,7 @@ func (r *SourceDockerhubResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceDockerhub(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +317,5 @@ func (r *SourceDockerhubResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceDockerhubResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_dockerhub_resource_sdk.go b/internal/provider/source_dockerhub_resource_sdk.go
old mode 100755
new mode 100644
index 319386b59..5bc8f643e
--- a/internal/provider/source_dockerhub_resource_sdk.go
+++ b/internal/provider/source_dockerhub_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceDockerhubResourceModel) ToCreateSDKType() *shared.SourceDockerhubCreateRequest {
dockerUsername := r.Configuration.DockerUsername.ValueString()
- sourceType := shared.SourceDockerhubDockerhub(r.Configuration.SourceType.ValueString())
configuration := shared.SourceDockerhub{
DockerUsername: dockerUsername,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceDockerhubResourceModel) ToCreateSDKType() *shared.SourceDockerhub
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceDockerhubCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_dremio_data_source.go b/internal/provider/source_dremio_data_source.go
old mode 100755
new mode 100644
index 75d85cbc0..4b019084b
--- a/internal/provider/source_dremio_data_source.go
+++ b/internal/provider/source_dremio_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceDremioDataSource struct {
// SourceDremioDataSourceModel describes the data model.
type SourceDremioDataSourceModel struct {
- Configuration SourceDremio `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,38 +47,20 @@ func (r *SourceDremioDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceDremio DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key that is generated when you authenticate to Dremio API`,
- },
- "base_url": schema.StringAttribute{
- Computed: true,
- Description: `URL of your Dremio instance`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dremio",
- ),
- },
- Description: `must be one of ["dremio"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_dremio_data_source_sdk.go b/internal/provider/source_dremio_data_source_sdk.go
old mode 100755
new mode 100644
index fbda7da7b..982c5a8e6
--- a/internal/provider/source_dremio_data_source_sdk.go
+++ b/internal/provider/source_dremio_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceDremioDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_dremio_resource.go b/internal/provider/source_dremio_resource.go
old mode 100755
new mode 100644
index 3b9c533e6..75600745f
--- a/internal/provider/source_dremio_resource.go
+++ b/internal/provider/source_dremio_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceDremioResource struct {
// SourceDremioResourceModel describes the resource data model.
type SourceDremioResourceModel struct {
Configuration SourceDremio `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,34 @@ func (r *SourceDremioResource) Schema(ctx context.Context, req resource.SchemaRe
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key that is generated when you authenticate to Dremio API`,
},
"base_url": schema.StringAttribute{
- Required: true,
- Description: `URL of your Dremio instance`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dremio",
- ),
- },
- Description: `must be one of ["dremio"]`,
+ Optional: true,
+ MarkdownDescription: `Default: "https://app.dremio.cloud"` + "\n" +
+ `URL of your Dremio instance`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +147,7 @@ func (r *SourceDremioResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceDremio(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +323,5 @@ func (r *SourceDremioResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceDremioResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_dremio_resource_sdk.go b/internal/provider/source_dremio_resource_sdk.go
old mode 100755
new mode 100644
index 82ab4d97b..24f08869c
--- a/internal/provider/source_dremio_resource_sdk.go
+++ b/internal/provider/source_dremio_resource_sdk.go
@@ -3,18 +3,27 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceDremioResourceModel) ToCreateSDKType() *shared.SourceDremioCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- baseURL := r.Configuration.BaseURL.ValueString()
- sourceType := shared.SourceDremioDremio(r.Configuration.SourceType.ValueString())
+ baseURL := new(string)
+ if !r.Configuration.BaseURL.IsUnknown() && !r.Configuration.BaseURL.IsNull() {
+ *baseURL = r.Configuration.BaseURL.ValueString()
+ } else {
+ baseURL = nil
+ }
configuration := shared.SourceDremio{
- APIKey: apiKey,
- BaseURL: baseURL,
- SourceType: sourceType,
+ APIKey: apiKey,
+ BaseURL: baseURL,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +35,7 @@ func (r *SourceDremioResourceModel) ToCreateSDKType() *shared.SourceDremioCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceDremioCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -40,7 +50,12 @@ func (r *SourceDremioResourceModel) ToGetSDKType() *shared.SourceDremioCreateReq
func (r *SourceDremioResourceModel) ToUpdateSDKType() *shared.SourceDremioPutRequest {
apiKey := r.Configuration.APIKey.ValueString()
- baseURL := r.Configuration.BaseURL.ValueString()
+ baseURL := new(string)
+ if !r.Configuration.BaseURL.IsUnknown() && !r.Configuration.BaseURL.IsNull() {
+ *baseURL = r.Configuration.BaseURL.ValueString()
+ } else {
+ baseURL = nil
+ }
configuration := shared.SourceDremioUpdate{
APIKey: apiKey,
BaseURL: baseURL,
diff --git a/internal/provider/source_dynamodb_data_source.go b/internal/provider/source_dynamodb_data_source.go
old mode 100755
new mode 100644
index c8edeaab4..8f0582d77
--- a/internal/provider/source_dynamodb_data_source.go
+++ b/internal/provider/source_dynamodb_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceDynamodbDataSource struct {
// SourceDynamodbDataSourceModel describes the data model.
type SourceDynamodbDataSourceModel struct {
- Configuration SourceDynamodb1 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,81 +47,20 @@ func (r *SourceDynamodbDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceDynamodb DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `The access key id to access Dynamodb. Airbyte requires read permissions to the database`,
- },
- "endpoint": schema.StringAttribute{
- Computed: true,
- Description: `the URL of the Dynamodb database`,
- },
- "region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "",
- "us-east-1",
- "us-east-2",
- "us-west-1",
- "us-west-2",
- "af-south-1",
- "ap-east-1",
- "ap-south-1",
- "ap-northeast-1",
- "ap-northeast-2",
- "ap-northeast-3",
- "ap-southeast-1",
- "ap-southeast-2",
- "ca-central-1",
- "cn-north-1",
- "cn-northwest-1",
- "eu-central-1",
- "eu-north-1",
- "eu-south-1",
- "eu-west-1",
- "eu-west-2",
- "eu-west-3",
- "sa-east-1",
- "me-south-1",
- "us-gov-east-1",
- "us-gov-west-1",
- ),
- },
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the Dynamodb database`,
- },
- "reserved_attribute_names": schema.StringAttribute{
- Computed: true,
- Description: `Comma separated reserved attribute names present in your tables`,
- },
- "secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `The corresponding secret to the access key id.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dynamodb",
- ),
- },
- Description: `must be one of ["dynamodb"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_dynamodb_data_source_sdk.go b/internal/provider/source_dynamodb_data_source_sdk.go
old mode 100755
new mode 100644
index 6b4cf7806..f9685f09b
--- a/internal/provider/source_dynamodb_data_source_sdk.go
+++ b/internal/provider/source_dynamodb_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceDynamodbDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_dynamodb_resource.go b/internal/provider/source_dynamodb_resource.go
old mode 100755
new mode 100644
index e5590c9c1..7f9f8a426
--- a/internal/provider/source_dynamodb_resource.go
+++ b/internal/provider/source_dynamodb_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourceDynamodbResource struct {
// SourceDynamodbResourceModel describes the resource data model.
type SourceDynamodbResourceModel struct {
Configuration SourceDynamodb `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,14 +58,18 @@ func (r *SourceDynamodbResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"access_key_id": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The access key id to access Dynamodb. Airbyte requires read permissions to the database`,
},
"endpoint": schema.StringAttribute{
- Optional: true,
- Description: `the URL of the Dynamodb database`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `the URL of the Dynamodb database`,
},
"region": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]; Default: ""` + "\n" +
+ `The region of the Dynamodb database`,
Validators: []validator.String{
stringvalidator.OneOf(
"",
@@ -94,8 +100,6 @@ func (r *SourceDynamodbResource) Schema(ctx context.Context, req resource.Schema
"us-gov-west-1",
),
},
- MarkdownDescription: `must be one of ["", "us-east-1", "us-east-2", "us-west-1", "us-west-2", "af-south-1", "ap-east-1", "ap-south-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-southeast-1", "ap-southeast-2", "ca-central-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-north-1", "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "me-south-1", "us-gov-east-1", "us-gov-west-1"]` + "\n" +
- `The region of the Dynamodb database`,
},
"reserved_attribute_names": schema.StringAttribute{
Optional: true,
@@ -103,26 +107,29 @@ func (r *SourceDynamodbResource) Schema(ctx context.Context, req resource.Schema
},
"secret_access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The corresponding secret to the access key id.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "dynamodb",
- ),
- },
- Description: `must be one of ["dynamodb"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -186,7 +193,7 @@ func (r *SourceDynamodbResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceDynamodb(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -362,5 +369,5 @@ func (r *SourceDynamodbResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceDynamodbResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_dynamodb_resource_sdk.go b/internal/provider/source_dynamodb_resource_sdk.go
old mode 100755
new mode 100644
index 89ef85438..5d2f8a213
--- a/internal/provider/source_dynamodb_resource_sdk.go
+++ b/internal/provider/source_dynamodb_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -28,14 +28,18 @@ func (r *SourceDynamodbResourceModel) ToCreateSDKType() *shared.SourceDynamodbCr
reservedAttributeNames = nil
}
secretAccessKey := r.Configuration.SecretAccessKey.ValueString()
- sourceType := shared.SourceDynamodbDynamodb(r.Configuration.SourceType.ValueString())
configuration := shared.SourceDynamodb{
AccessKeyID: accessKeyID,
Endpoint: endpoint,
Region: region,
ReservedAttributeNames: reservedAttributeNames,
SecretAccessKey: secretAccessKey,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -47,6 +51,7 @@ func (r *SourceDynamodbResourceModel) ToCreateSDKType() *shared.SourceDynamodbCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceDynamodbCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_e2etestcloud_data_source.go b/internal/provider/source_e2etestcloud_data_source.go
deleted file mode 100755
index 69a91fcef..000000000
--- a/internal/provider/source_e2etestcloud_data_source.go
+++ /dev/null
@@ -1,271 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
- "context"
- "fmt"
-
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ datasource.DataSource = &SourceE2eTestCloudDataSource{}
-var _ datasource.DataSourceWithConfigure = &SourceE2eTestCloudDataSource{}
-
-func NewSourceE2eTestCloudDataSource() datasource.DataSource {
- return &SourceE2eTestCloudDataSource{}
-}
-
-// SourceE2eTestCloudDataSource is the data source implementation.
-type SourceE2eTestCloudDataSource struct {
- client *sdk.SDK
-}
-
-// SourceE2eTestCloudDataSourceModel describes the data model.
-type SourceE2eTestCloudDataSourceModel struct {
- Configuration SourceE2eTestCloud `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-// Metadata returns the data source type name.
-func (r *SourceE2eTestCloudDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_e2e_test_cloud"
-}
-
-// Schema defines the schema for the data source.
-func (r *SourceE2eTestCloudDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceE2eTestCloud DataSource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "max_messages": schema.Int64Attribute{
- Computed: true,
- Description: `Number of records to emit per stream. Min 1. Max 100 billion.`,
- },
- "message_interval_ms": schema.Int64Attribute{
- Computed: true,
- Description: `Interval between messages in ms. Min 0 ms. Max 60000 ms (1 minute).`,
- },
- "mock_catalog": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_e2e_test_cloud_mock_catalog_multi_schema": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "stream_schemas": schema.StringAttribute{
- Computed: true,
- Description: `A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.`,
- },
- "type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "MULTI_STREAM",
- ),
- },
- Description: `must be one of ["MULTI_STREAM"]`,
- },
- },
- Description: `A catalog with multiple data streams, each with a different schema.`,
- },
- "source_e2e_test_cloud_mock_catalog_single_schema": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "stream_duplication": schema.Int64Attribute{
- Computed: true,
- Description: `Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.`,
- },
- "stream_name": schema.StringAttribute{
- Computed: true,
- Description: `Name of the data stream.`,
- },
- "stream_schema": schema.StringAttribute{
- Computed: true,
- Description: `A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.`,
- },
- "type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SINGLE_STREAM",
- ),
- },
- Description: `must be one of ["SINGLE_STREAM"]`,
- },
- },
- Description: `A catalog with one or multiple streams that share the same schema.`,
- },
- "source_e2e_test_cloud_update_mock_catalog_multi_schema": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "stream_schemas": schema.StringAttribute{
- Computed: true,
- Description: `A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.`,
- },
- "type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "MULTI_STREAM",
- ),
- },
- Description: `must be one of ["MULTI_STREAM"]`,
- },
- },
- Description: `A catalog with multiple data streams, each with a different schema.`,
- },
- "source_e2e_test_cloud_update_mock_catalog_single_schema": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "stream_duplication": schema.Int64Attribute{
- Computed: true,
- Description: `Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.`,
- },
- "stream_name": schema.StringAttribute{
- Computed: true,
- Description: `Name of the data stream.`,
- },
- "stream_schema": schema.StringAttribute{
- Computed: true,
- Description: `A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.`,
- },
- "type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SINGLE_STREAM",
- ),
- },
- Description: `must be one of ["SINGLE_STREAM"]`,
- },
- },
- Description: `A catalog with one or multiple streams that share the same schema.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "seed": schema.Int64Attribute{
- Computed: true,
- Description: `When the seed is unspecified, the current time millis will be used as the seed. Range: [0, 1000000].`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "e2e-test-cloud",
- ),
- },
- Description: `must be one of ["e2e-test-cloud"]`,
- },
- "type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CONTINUOUS_FEED",
- ),
- },
- Description: `must be one of ["CONTINUOUS_FEED"]`,
- },
- },
- },
- "name": schema.StringAttribute{
- Computed: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Required: true,
- },
- "workspace_id": schema.StringAttribute{
- Computed: true,
- },
- },
- }
-}
-
-func (r *SourceE2eTestCloudDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected DataSource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceE2eTestCloudDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data *SourceE2eTestCloudDataSourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceE2eTestCloudRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceE2eTestCloud(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/internal/provider/source_e2etestcloud_data_source_sdk.go b/internal/provider/source_e2etestcloud_data_source_sdk.go
deleted file mode 100755
index c352dea6d..000000000
--- a/internal/provider/source_e2etestcloud_data_source_sdk.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceE2eTestCloudDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
diff --git a/internal/provider/source_e2etestcloud_resource.go b/internal/provider/source_e2etestcloud_resource.go
deleted file mode 100755
index 460cec7dd..000000000
--- a/internal/provider/source_e2etestcloud_resource.go
+++ /dev/null
@@ -1,437 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "context"
- "fmt"
-
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ resource.Resource = &SourceE2eTestCloudResource{}
-var _ resource.ResourceWithImportState = &SourceE2eTestCloudResource{}
-
-func NewSourceE2eTestCloudResource() resource.Resource {
- return &SourceE2eTestCloudResource{}
-}
-
-// SourceE2eTestCloudResource defines the resource implementation.
-type SourceE2eTestCloudResource struct {
- client *sdk.SDK
-}
-
-// SourceE2eTestCloudResourceModel describes the resource data model.
-type SourceE2eTestCloudResourceModel struct {
- Configuration SourceE2eTestCloud `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-func (r *SourceE2eTestCloudResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_e2e_test_cloud"
-}
-
-func (r *SourceE2eTestCloudResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceE2eTestCloud Resource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "max_messages": schema.Int64Attribute{
- Required: true,
- Description: `Number of records to emit per stream. Min 1. Max 100 billion.`,
- },
- "message_interval_ms": schema.Int64Attribute{
- Optional: true,
- Description: `Interval between messages in ms. Min 0 ms. Max 60000 ms (1 minute).`,
- },
- "mock_catalog": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "source_e2e_test_cloud_mock_catalog_multi_schema": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "stream_schemas": schema.StringAttribute{
- Required: true,
- Description: `A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.`,
- },
- "type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "MULTI_STREAM",
- ),
- },
- Description: `must be one of ["MULTI_STREAM"]`,
- },
- },
- Description: `A catalog with multiple data streams, each with a different schema.`,
- },
- "source_e2e_test_cloud_mock_catalog_single_schema": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "stream_duplication": schema.Int64Attribute{
- Optional: true,
- Description: `Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.`,
- },
- "stream_name": schema.StringAttribute{
- Required: true,
- Description: `Name of the data stream.`,
- },
- "stream_schema": schema.StringAttribute{
- Required: true,
- Description: `A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.`,
- },
- "type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SINGLE_STREAM",
- ),
- },
- Description: `must be one of ["SINGLE_STREAM"]`,
- },
- },
- Description: `A catalog with one or multiple streams that share the same schema.`,
- },
- "source_e2e_test_cloud_update_mock_catalog_multi_schema": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "stream_schemas": schema.StringAttribute{
- Required: true,
- Description: `A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.`,
- },
- "type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "MULTI_STREAM",
- ),
- },
- Description: `must be one of ["MULTI_STREAM"]`,
- },
- },
- Description: `A catalog with multiple data streams, each with a different schema.`,
- },
- "source_e2e_test_cloud_update_mock_catalog_single_schema": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "stream_duplication": schema.Int64Attribute{
- Optional: true,
- Description: `Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.`,
- },
- "stream_name": schema.StringAttribute{
- Required: true,
- Description: `Name of the data stream.`,
- },
- "stream_schema": schema.StringAttribute{
- Required: true,
- Description: `A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.`,
- },
- "type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SINGLE_STREAM",
- ),
- },
- Description: `must be one of ["SINGLE_STREAM"]`,
- },
- },
- Description: `A catalog with one or multiple streams that share the same schema.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "seed": schema.Int64Attribute{
- Optional: true,
- Description: `When the seed is unspecified, the current time millis will be used as the seed. Range: [0, 1000000].`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "e2e-test-cloud",
- ),
- },
- Description: `must be one of ["e2e-test-cloud"]`,
- },
- "type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CONTINUOUS_FEED",
- ),
- },
- Description: `must be one of ["CONTINUOUS_FEED"]`,
- },
- },
- },
- "name": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "workspace_id": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- },
- }
-}
-
-func (r *SourceE2eTestCloudResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected Resource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceE2eTestCloudResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data *SourceE2eTestCloudResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- request := *data.ToCreateSDKType()
- res, err := r.client.Sources.CreateSourceE2eTestCloud(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromCreateResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceE2eTestCloudResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data *SourceE2eTestCloudResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceE2eTestCloudRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceE2eTestCloud(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceE2eTestCloudResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data *SourceE2eTestCloudResourceModel
- merge(ctx, req, resp, &data)
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceE2eTestCloudPutRequest := data.ToUpdateSDKType()
- sourceID := data.SourceID.ValueString()
- request := operations.PutSourceE2eTestCloudRequest{
- SourceE2eTestCloudPutRequest: sourceE2eTestCloudPutRequest,
- SourceID: sourceID,
- }
- res, err := r.client.Sources.PutSourceE2eTestCloud(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- sourceId1 := data.SourceID.ValueString()
- getRequest := operations.GetSourceE2eTestCloudRequest{
- SourceID: sourceId1,
- }
- getResponse, err := r.client.Sources.GetSourceE2eTestCloud(ctx, getRequest)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if getResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
- return
- }
- if getResponse.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
- return
- }
- if getResponse.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
- return
- }
- data.RefreshFromGetResponse(getResponse.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceE2eTestCloudResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data *SourceE2eTestCloudResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.DeleteSourceE2eTestCloudRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.DeleteSourceE2eTestCloud(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
-
-}
-
-func (r *SourceE2eTestCloudResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
-}
diff --git a/internal/provider/source_e2etestcloud_resource_sdk.go b/internal/provider/source_e2etestcloud_resource_sdk.go
deleted file mode 100755
index 17cef7c53..000000000
--- a/internal/provider/source_e2etestcloud_resource_sdk.go
+++ /dev/null
@@ -1,188 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceE2eTestCloudResourceModel) ToCreateSDKType() *shared.SourceE2eTestCloudCreateRequest {
- maxMessages := r.Configuration.MaxMessages.ValueInt64()
- messageIntervalMs := new(int64)
- if !r.Configuration.MessageIntervalMs.IsUnknown() && !r.Configuration.MessageIntervalMs.IsNull() {
- *messageIntervalMs = r.Configuration.MessageIntervalMs.ValueInt64()
- } else {
- messageIntervalMs = nil
- }
- var mockCatalog shared.SourceE2eTestCloudMockCatalog
- var sourceE2eTestCloudMockCatalogSingleSchema *shared.SourceE2eTestCloudMockCatalogSingleSchema
- if r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogSingleSchema != nil {
- streamDuplication := new(int64)
- if !r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogSingleSchema.StreamDuplication.IsUnknown() && !r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogSingleSchema.StreamDuplication.IsNull() {
- *streamDuplication = r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogSingleSchema.StreamDuplication.ValueInt64()
- } else {
- streamDuplication = nil
- }
- streamName := r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogSingleSchema.StreamName.ValueString()
- streamSchema := r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogSingleSchema.StreamSchema.ValueString()
- typeVar := shared.SourceE2eTestCloudMockCatalogSingleSchemaType(r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogSingleSchema.Type.ValueString())
- sourceE2eTestCloudMockCatalogSingleSchema = &shared.SourceE2eTestCloudMockCatalogSingleSchema{
- StreamDuplication: streamDuplication,
- StreamName: streamName,
- StreamSchema: streamSchema,
- Type: typeVar,
- }
- }
- if sourceE2eTestCloudMockCatalogSingleSchema != nil {
- mockCatalog = shared.SourceE2eTestCloudMockCatalog{
- SourceE2eTestCloudMockCatalogSingleSchema: sourceE2eTestCloudMockCatalogSingleSchema,
- }
- }
- var sourceE2eTestCloudMockCatalogMultiSchema *shared.SourceE2eTestCloudMockCatalogMultiSchema
- if r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogMultiSchema != nil {
- streamSchemas := r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogMultiSchema.StreamSchemas.ValueString()
- typeVar1 := shared.SourceE2eTestCloudMockCatalogMultiSchemaType(r.Configuration.MockCatalog.SourceE2eTestCloudMockCatalogMultiSchema.Type.ValueString())
- sourceE2eTestCloudMockCatalogMultiSchema = &shared.SourceE2eTestCloudMockCatalogMultiSchema{
- StreamSchemas: streamSchemas,
- Type: typeVar1,
- }
- }
- if sourceE2eTestCloudMockCatalogMultiSchema != nil {
- mockCatalog = shared.SourceE2eTestCloudMockCatalog{
- SourceE2eTestCloudMockCatalogMultiSchema: sourceE2eTestCloudMockCatalogMultiSchema,
- }
- }
- seed := new(int64)
- if !r.Configuration.Seed.IsUnknown() && !r.Configuration.Seed.IsNull() {
- *seed = r.Configuration.Seed.ValueInt64()
- } else {
- seed = nil
- }
- sourceType := shared.SourceE2eTestCloudE2eTestCloud(r.Configuration.SourceType.ValueString())
- typeVar2 := new(shared.SourceE2eTestCloudType)
- if !r.Configuration.Type.IsUnknown() && !r.Configuration.Type.IsNull() {
- *typeVar2 = shared.SourceE2eTestCloudType(r.Configuration.Type.ValueString())
- } else {
- typeVar2 = nil
- }
- configuration := shared.SourceE2eTestCloud{
- MaxMessages: maxMessages,
- MessageIntervalMs: messageIntervalMs,
- MockCatalog: mockCatalog,
- Seed: seed,
- SourceType: sourceType,
- Type: typeVar2,
- }
- name := r.Name.ValueString()
- secretID := new(string)
- if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
- *secretID = r.SecretID.ValueString()
- } else {
- secretID = nil
- }
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceE2eTestCloudCreateRequest{
- Configuration: configuration,
- Name: name,
- SecretID: secretID,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceE2eTestCloudResourceModel) ToGetSDKType() *shared.SourceE2eTestCloudCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceE2eTestCloudResourceModel) ToUpdateSDKType() *shared.SourceE2eTestCloudPutRequest {
- maxMessages := r.Configuration.MaxMessages.ValueInt64()
- messageIntervalMs := new(int64)
- if !r.Configuration.MessageIntervalMs.IsUnknown() && !r.Configuration.MessageIntervalMs.IsNull() {
- *messageIntervalMs = r.Configuration.MessageIntervalMs.ValueInt64()
- } else {
- messageIntervalMs = nil
- }
- var mockCatalog shared.SourceE2eTestCloudUpdateMockCatalog
- var sourceE2eTestCloudUpdateMockCatalogSingleSchema *shared.SourceE2eTestCloudUpdateMockCatalogSingleSchema
- if r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogSingleSchema != nil {
- streamDuplication := new(int64)
- if !r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogSingleSchema.StreamDuplication.IsUnknown() && !r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogSingleSchema.StreamDuplication.IsNull() {
- *streamDuplication = r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogSingleSchema.StreamDuplication.ValueInt64()
- } else {
- streamDuplication = nil
- }
- streamName := r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogSingleSchema.StreamName.ValueString()
- streamSchema := r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogSingleSchema.StreamSchema.ValueString()
- typeVar := shared.SourceE2eTestCloudUpdateMockCatalogSingleSchemaType(r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogSingleSchema.Type.ValueString())
- sourceE2eTestCloudUpdateMockCatalogSingleSchema = &shared.SourceE2eTestCloudUpdateMockCatalogSingleSchema{
- StreamDuplication: streamDuplication,
- StreamName: streamName,
- StreamSchema: streamSchema,
- Type: typeVar,
- }
- }
- if sourceE2eTestCloudUpdateMockCatalogSingleSchema != nil {
- mockCatalog = shared.SourceE2eTestCloudUpdateMockCatalog{
- SourceE2eTestCloudUpdateMockCatalogSingleSchema: sourceE2eTestCloudUpdateMockCatalogSingleSchema,
- }
- }
- var sourceE2eTestCloudUpdateMockCatalogMultiSchema *shared.SourceE2eTestCloudUpdateMockCatalogMultiSchema
- if r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogMultiSchema != nil {
- streamSchemas := r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogMultiSchema.StreamSchemas.ValueString()
- typeVar1 := shared.SourceE2eTestCloudUpdateMockCatalogMultiSchemaType(r.Configuration.MockCatalog.SourceE2eTestCloudUpdateMockCatalogMultiSchema.Type.ValueString())
- sourceE2eTestCloudUpdateMockCatalogMultiSchema = &shared.SourceE2eTestCloudUpdateMockCatalogMultiSchema{
- StreamSchemas: streamSchemas,
- Type: typeVar1,
- }
- }
- if sourceE2eTestCloudUpdateMockCatalogMultiSchema != nil {
- mockCatalog = shared.SourceE2eTestCloudUpdateMockCatalog{
- SourceE2eTestCloudUpdateMockCatalogMultiSchema: sourceE2eTestCloudUpdateMockCatalogMultiSchema,
- }
- }
- seed := new(int64)
- if !r.Configuration.Seed.IsUnknown() && !r.Configuration.Seed.IsNull() {
- *seed = r.Configuration.Seed.ValueInt64()
- } else {
- seed = nil
- }
- typeVar2 := new(shared.SourceE2eTestCloudUpdateType)
- if !r.Configuration.Type.IsUnknown() && !r.Configuration.Type.IsNull() {
- *typeVar2 = shared.SourceE2eTestCloudUpdateType(r.Configuration.Type.ValueString())
- } else {
- typeVar2 = nil
- }
- configuration := shared.SourceE2eTestCloudUpdate{
- MaxMessages: maxMessages,
- MessageIntervalMs: messageIntervalMs,
- MockCatalog: mockCatalog,
- Seed: seed,
- Type: typeVar2,
- }
- name := r.Name.ValueString()
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceE2eTestCloudPutRequest{
- Configuration: configuration,
- Name: name,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceE2eTestCloudResourceModel) ToDeleteSDKType() *shared.SourceE2eTestCloudCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceE2eTestCloudResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.SourceType = types.StringValue(resp.SourceType)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
-
-func (r *SourceE2eTestCloudResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
- r.RefreshFromGetResponse(resp)
-}
diff --git a/internal/provider/source_emailoctopus_data_source.go b/internal/provider/source_emailoctopus_data_source.go
old mode 100755
new mode 100644
index a9b4f9275..a0c68c199
--- a/internal/provider/source_emailoctopus_data_source.go
+++ b/internal/provider/source_emailoctopus_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceEmailoctopusDataSource struct {
// SourceEmailoctopusDataSourceModel describes the data model.
type SourceEmailoctopusDataSourceModel struct {
- Configuration SourceEmailoctopus `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceEmailoctopusDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "SourceEmailoctopus DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `EmailOctopus API Key. See the docs for information on how to generate this key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "emailoctopus",
- ),
- },
- Description: `must be one of ["emailoctopus"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_emailoctopus_data_source_sdk.go b/internal/provider/source_emailoctopus_data_source_sdk.go
old mode 100755
new mode 100644
index 3472f5fbb..8c6b138b2
--- a/internal/provider/source_emailoctopus_data_source_sdk.go
+++ b/internal/provider/source_emailoctopus_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceEmailoctopusDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_emailoctopus_resource.go b/internal/provider/source_emailoctopus_resource.go
old mode 100755
new mode 100644
index b899a6c24..b76d042bd
--- a/internal/provider/source_emailoctopus_resource.go
+++ b/internal/provider/source_emailoctopus_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceEmailoctopusResource struct {
// SourceEmailoctopusResourceModel describes the resource data model.
type SourceEmailoctopusResourceModel struct {
- Configuration SourceEmailoctopus `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceEmailoctopusResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceEmailoctopusResource) Schema(ctx context.Context, req resource.Sc
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `EmailOctopus API Key. See the docs for information on how to generate this key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "emailoctopus",
- ),
- },
- Description: `must be one of ["emailoctopus"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceEmailoctopusResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceEmailoctopus(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceEmailoctopusResource) Delete(ctx context.Context, req resource.De
}
func (r *SourceEmailoctopusResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_emailoctopus_resource_sdk.go b/internal/provider/source_emailoctopus_resource_sdk.go
old mode 100755
new mode 100644
index fefd5dc41..36f304814
--- a/internal/provider/source_emailoctopus_resource_sdk.go
+++ b/internal/provider/source_emailoctopus_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceEmailoctopusResourceModel) ToCreateSDKType() *shared.SourceEmailoctopusCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceEmailoctopusEmailoctopus(r.Configuration.SourceType.ValueString())
configuration := shared.SourceEmailoctopus{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceEmailoctopusResourceModel) ToCreateSDKType() *shared.SourceEmailo
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceEmailoctopusCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_exchangerates_data_source.go b/internal/provider/source_exchangerates_data_source.go
old mode 100755
new mode 100644
index f69f80eb8..d5e59545d
--- a/internal/provider/source_exchangerates_data_source.go
+++ b/internal/provider/source_exchangerates_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceExchangeRatesDataSource struct {
// SourceExchangeRatesDataSourceModel describes the data model.
type SourceExchangeRatesDataSourceModel struct {
- Configuration SourceExchangeRates `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,49 +47,20 @@ func (r *SourceExchangeRatesDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "SourceExchangeRates DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key": schema.StringAttribute{
- Computed: true,
- Description: `Your API Key. See here. The key is case sensitive.`,
- },
- "base": schema.StringAttribute{
- Computed: true,
- Description: `ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default base currency is EUR`,
- },
- "ignore_weekends": schema.BoolAttribute{
- Computed: true,
- Description: `Ignore weekends? (Exchanges don't run on weekends)`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "exchange-rates",
- ),
- },
- Description: `must be one of ["exchange-rates"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `Start getting data from that date.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_exchangerates_data_source_sdk.go b/internal/provider/source_exchangerates_data_source_sdk.go
old mode 100755
new mode 100644
index 5e81d4fdf..a4db7db7a
--- a/internal/provider/source_exchangerates_data_source_sdk.go
+++ b/internal/provider/source_exchangerates_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceExchangeRatesDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_exchangerates_resource.go b/internal/provider/source_exchangerates_resource.go
old mode 100755
new mode 100644
index e5fea23d3..35f3f637a
--- a/internal/provider/source_exchangerates_resource.go
+++ b/internal/provider/source_exchangerates_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceExchangeRatesResource struct {
// SourceExchangeRatesResourceModel describes the resource data model.
type SourceExchangeRatesResourceModel struct {
Configuration SourceExchangeRates `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceExchangeRatesResource) Schema(ctx context.Context, req resource.S
Attributes: map[string]schema.Attribute{
"access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Key. See here. The key is case sensitive.`,
},
"base": schema.StringAttribute{
@@ -64,34 +66,37 @@ func (r *SourceExchangeRatesResource) Schema(ctx context.Context, req resource.S
Description: `ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default base currency is EUR`,
},
"ignore_weekends": schema.BoolAttribute{
- Optional: true,
- Description: `Ignore weekends? (Exchanges don't run on weekends)`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "exchange-rates",
- ),
- },
- Description: `must be one of ["exchange-rates"]`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Ignore weekends? (Exchanges don't run on weekends)`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `Start getting data from that date.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `Start getting data from that date.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +160,7 @@ func (r *SourceExchangeRatesResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceExchangeRates(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +336,5 @@ func (r *SourceExchangeRatesResource) Delete(ctx context.Context, req resource.D
}
func (r *SourceExchangeRatesResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_exchangerates_resource_sdk.go b/internal/provider/source_exchangerates_resource_sdk.go
old mode 100755
new mode 100644
index c886b5c1a..f20caa5a5
--- a/internal/provider/source_exchangerates_resource_sdk.go
+++ b/internal/provider/source_exchangerates_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -22,15 +22,19 @@ func (r *SourceExchangeRatesResourceModel) ToCreateSDKType() *shared.SourceExcha
} else {
ignoreWeekends = nil
}
- sourceType := shared.SourceExchangeRatesExchangeRates(r.Configuration.SourceType.ValueString())
startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
configuration := shared.SourceExchangeRates{
AccessKey: accessKey,
Base: base,
IgnoreWeekends: ignoreWeekends,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -41,6 +45,7 @@ func (r *SourceExchangeRatesResourceModel) ToCreateSDKType() *shared.SourceExcha
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceExchangeRatesCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_facebookmarketing_data_source.go b/internal/provider/source_facebookmarketing_data_source.go
old mode 100755
new mode 100644
index 684622497..958448040
--- a/internal/provider/source_facebookmarketing_data_source.go
+++ b/internal/provider/source_facebookmarketing_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceFacebookMarketingDataSource struct {
// SourceFacebookMarketingDataSourceModel describes the data model.
type SourceFacebookMarketingDataSourceModel struct {
- Configuration SourceFacebookMarketing `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,158 +47,20 @@ func (r *SourceFacebookMarketingDataSource) Schema(ctx context.Context, req data
MarkdownDescription: "SourceFacebookMarketing DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information.`,
- },
- "account_id": schema.StringAttribute{
- Computed: true,
- Description: `The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. Open your Meta Ads Manager. The Ad account ID number is in the account dropdown menu or in your browser's address bar. See the docs for more information.`,
- },
- "action_breakdowns_allow_empty": schema.BoolAttribute{
- Computed: true,
- Description: `Allows action_breakdowns to be an empty list`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client Id for your OAuth app`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret for your OAuth app`,
- },
- "custom_insights": schema.ListNestedAttribute{
- Computed: true,
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "action_breakdowns": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A list of chosen action_breakdowns for action_breakdowns`,
- },
- "action_report_time": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "conversion",
- "impression",
- "mixed",
- ),
- },
- MarkdownDescription: `must be one of ["conversion", "impression", "mixed"]` + "\n" +
- `Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.`,
- },
- "breakdowns": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A list of chosen breakdowns for breakdowns`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.`,
- },
- "fields": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A list of chosen fields for fields parameter`,
- },
- "insights_lookback_window": schema.Int64Attribute{
- Computed: true,
- Description: `The attribution window`,
- },
- "level": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ad",
- "adset",
- "campaign",
- "account",
- ),
- },
- MarkdownDescription: `must be one of ["ad", "adset", "campaign", "account"]` + "\n" +
- `Chosen level for API`,
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: `The name value of insight`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z.`,
- },
- "time_increment": schema.Int64Attribute{
- Computed: true,
- Description: `Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).`,
- },
- },
- },
- Description: `A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.`,
- },
- "fetch_thumbnail_images": schema.BoolAttribute{
- Computed: true,
- Description: `Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.`,
- },
- "include_deleted": schema.BoolAttribute{
- Computed: true,
- Description: `Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.`,
- },
- "insights_lookback_window": schema.Int64Attribute{
- Computed: true,
- Description: `The attribution window. Facebook freezes insight data 28 days after it was generated, which means that all data from the past 28 days may have changed since we last emitted it, so you can retrieve refreshed insights from the past by setting this parameter. If you set a custom lookback window value in Facebook account, please provide the same value here.`,
- },
- "max_batch_size": schema.Int64Attribute{
- Computed: true,
- Description: `Maximum batch size used when sending batch requests to Facebook API. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.`,
- },
- "page_size": schema.Int64Attribute{
- Computed: true,
- Description: `Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "facebook-marketing",
- ),
- },
- Description: `must be one of ["facebook-marketing"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_facebookmarketing_data_source_sdk.go b/internal/provider/source_facebookmarketing_data_source_sdk.go
old mode 100755
new mode 100644
index 94774d2ef..d7ece20b4
--- a/internal/provider/source_facebookmarketing_data_source_sdk.go
+++ b/internal/provider/source_facebookmarketing_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFacebookMarketingDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_facebookmarketing_resource.go b/internal/provider/source_facebookmarketing_resource.go
old mode 100755
new mode 100644
index 367473bb8..1da747413
--- a/internal/provider/source_facebookmarketing_resource.go
+++ b/internal/provider/source_facebookmarketing_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceFacebookMarketingResource struct {
// SourceFacebookMarketingResourceModel describes the resource data model.
type SourceFacebookMarketingResourceModel struct {
Configuration SourceFacebookMarketing `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,15 +59,17 @@ func (r *SourceFacebookMarketingResource) Schema(ctx context.Context, req resour
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information.`,
},
"account_id": schema.StringAttribute{
Required: true,
- Description: `The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. Open your Meta Ads Manager. The Ad account ID number is in the account dropdown menu or in your browser's address bar. See the docs for more information.`,
+ Description: `The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. The Ad account ID number is in the account dropdown menu or in your browser's address bar of your Meta Ads Manager. See the docs for more information.`,
},
"action_breakdowns_allow_empty": schema.BoolAttribute{
- Optional: true,
- Description: `Allows action_breakdowns to be an empty list`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Allows action_breakdowns to be an empty list`,
},
"client_id": schema.StringAttribute{
Optional: true,
@@ -86,6 +90,8 @@ func (r *SourceFacebookMarketingResource) Schema(ctx context.Context, req resour
},
"action_report_time": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["conversion", "impression", "mixed"]; Default: "mixed"` + "\n" +
+ `Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.`,
Validators: []validator.String{
stringvalidator.OneOf(
"conversion",
@@ -93,8 +99,6 @@ func (r *SourceFacebookMarketingResource) Schema(ctx context.Context, req resour
"mixed",
),
},
- MarkdownDescription: `must be one of ["conversion", "impression", "mixed"]` + "\n" +
- `Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.`,
},
"breakdowns": schema.ListAttribute{
Optional: true,
@@ -102,11 +106,11 @@ func (r *SourceFacebookMarketingResource) Schema(ctx context.Context, req resour
Description: `A list of chosen breakdowns for breakdowns`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.`,
},
"fields": schema.ListAttribute{
Optional: true,
@@ -114,11 +118,14 @@ func (r *SourceFacebookMarketingResource) Schema(ctx context.Context, req resour
Description: `A list of chosen fields for fields parameter`,
},
"insights_lookback_window": schema.Int64Attribute{
- Optional: true,
- Description: `The attribution window`,
+ Optional: true,
+ MarkdownDescription: `Default: 28` + "\n" +
+ `The attribution window`,
},
"level": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["ad", "adset", "campaign", "account"]; Default: "ad"` + "\n" +
+ `Chosen level for API`,
Validators: []validator.String{
stringvalidator.OneOf(
"ad",
@@ -127,80 +134,81 @@ func (r *SourceFacebookMarketingResource) Schema(ctx context.Context, req resour
"account",
),
},
- MarkdownDescription: `must be one of ["ad", "adset", "campaign", "account"]` + "\n" +
- `Chosen level for API`,
},
"name": schema.StringAttribute{
Required: true,
Description: `The name value of insight`,
},
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z.`,
},
"time_increment": schema.Int64Attribute{
- Optional: true,
- Description: `Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).`,
+ Optional: true,
+ MarkdownDescription: `Default: 1` + "\n" +
+ `Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).`,
},
},
},
Description: `A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field.`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.`,
},
"fetch_thumbnail_images": schema.BoolAttribute{
- Optional: true,
- Description: `Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.`,
},
"include_deleted": schema.BoolAttribute{
- Optional: true,
- Description: `Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.`,
},
"insights_lookback_window": schema.Int64Attribute{
- Optional: true,
- Description: `The attribution window. Facebook freezes insight data 28 days after it was generated, which means that all data from the past 28 days may have changed since we last emitted it, so you can retrieve refreshed insights from the past by setting this parameter. If you set a custom lookback window value in Facebook account, please provide the same value here.`,
- },
- "max_batch_size": schema.Int64Attribute{
- Optional: true,
- Description: `Maximum batch size used when sending batch requests to Facebook API. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.`,
+ Optional: true,
+ MarkdownDescription: `Default: 28` + "\n" +
+ `The attribution window. Facebook freezes insight data 28 days after it was generated, which means that all data from the past 28 days may have changed since we last emitted it, so you can retrieve refreshed insights from the past by setting this parameter. If you set a custom lookback window value in Facebook account, please provide the same value here.`,
},
"page_size": schema.Int64Attribute{
- Optional: true,
- Description: `Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "facebook-marketing",
- ),
- },
- Description: `must be one of ["facebook-marketing"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 100` + "\n" +
+ `Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data will be replicated for usual streams and only last 2 years for insight streams.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -264,7 +272,7 @@ func (r *SourceFacebookMarketingResource) Create(ctx context.Context, req resour
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceFacebookMarketing(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -440,5 +448,5 @@ func (r *SourceFacebookMarketingResource) Delete(ctx context.Context, req resour
}
func (r *SourceFacebookMarketingResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_facebookmarketing_resource_sdk.go b/internal/provider/source_facebookmarketing_resource_sdk.go
old mode 100755
new mode 100644
index 24c9dd2c3..fbca97d2a
--- a/internal/provider/source_facebookmarketing_resource_sdk.go
+++ b/internal/provider/source_facebookmarketing_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -31,19 +31,19 @@ func (r *SourceFacebookMarketingResourceModel) ToCreateSDKType() *shared.SourceF
}
var customInsights []shared.SourceFacebookMarketingInsightConfig = nil
for _, customInsightsItem := range r.Configuration.CustomInsights {
- var actionBreakdowns []shared.SourceFacebookMarketingInsightConfigValidActionBreakdowns = nil
+ var actionBreakdowns []shared.SourceFacebookMarketingValidActionBreakdowns = nil
for _, actionBreakdownsItem := range customInsightsItem.ActionBreakdowns {
- actionBreakdowns = append(actionBreakdowns, shared.SourceFacebookMarketingInsightConfigValidActionBreakdowns(actionBreakdownsItem.ValueString()))
+ actionBreakdowns = append(actionBreakdowns, shared.SourceFacebookMarketingValidActionBreakdowns(actionBreakdownsItem.ValueString()))
}
- actionReportTime := new(shared.SourceFacebookMarketingInsightConfigActionReportTime)
+ actionReportTime := new(shared.SourceFacebookMarketingActionReportTime)
if !customInsightsItem.ActionReportTime.IsUnknown() && !customInsightsItem.ActionReportTime.IsNull() {
- *actionReportTime = shared.SourceFacebookMarketingInsightConfigActionReportTime(customInsightsItem.ActionReportTime.ValueString())
+ *actionReportTime = shared.SourceFacebookMarketingActionReportTime(customInsightsItem.ActionReportTime.ValueString())
} else {
actionReportTime = nil
}
- var breakdowns []shared.SourceFacebookMarketingInsightConfigValidBreakdowns = nil
+ var breakdowns []shared.SourceFacebookMarketingValidBreakdowns = nil
for _, breakdownsItem := range customInsightsItem.Breakdowns {
- breakdowns = append(breakdowns, shared.SourceFacebookMarketingInsightConfigValidBreakdowns(breakdownsItem.ValueString()))
+ breakdowns = append(breakdowns, shared.SourceFacebookMarketingValidBreakdowns(breakdownsItem.ValueString()))
}
endDate := new(time.Time)
if !customInsightsItem.EndDate.IsUnknown() && !customInsightsItem.EndDate.IsNull() {
@@ -51,9 +51,9 @@ func (r *SourceFacebookMarketingResourceModel) ToCreateSDKType() *shared.SourceF
} else {
endDate = nil
}
- var fields []shared.SourceFacebookMarketingInsightConfigValidEnums = nil
+ var fields []shared.SourceFacebookMarketingValidEnums = nil
for _, fieldsItem := range customInsightsItem.Fields {
- fields = append(fields, shared.SourceFacebookMarketingInsightConfigValidEnums(fieldsItem.ValueString()))
+ fields = append(fields, shared.SourceFacebookMarketingValidEnums(fieldsItem.ValueString()))
}
insightsLookbackWindow := new(int64)
if !customInsightsItem.InsightsLookbackWindow.IsUnknown() && !customInsightsItem.InsightsLookbackWindow.IsNull() {
@@ -61,9 +61,9 @@ func (r *SourceFacebookMarketingResourceModel) ToCreateSDKType() *shared.SourceF
} else {
insightsLookbackWindow = nil
}
- level := new(shared.SourceFacebookMarketingInsightConfigLevel)
+ level := new(shared.SourceFacebookMarketingLevel)
if !customInsightsItem.Level.IsUnknown() && !customInsightsItem.Level.IsNull() {
- *level = shared.SourceFacebookMarketingInsightConfigLevel(customInsightsItem.Level.ValueString())
+ *level = shared.SourceFacebookMarketingLevel(customInsightsItem.Level.ValueString())
} else {
level = nil
}
@@ -117,20 +117,18 @@ func (r *SourceFacebookMarketingResourceModel) ToCreateSDKType() *shared.SourceF
} else {
insightsLookbackWindow1 = nil
}
- maxBatchSize := new(int64)
- if !r.Configuration.MaxBatchSize.IsUnknown() && !r.Configuration.MaxBatchSize.IsNull() {
- *maxBatchSize = r.Configuration.MaxBatchSize.ValueInt64()
- } else {
- maxBatchSize = nil
- }
pageSize := new(int64)
if !r.Configuration.PageSize.IsUnknown() && !r.Configuration.PageSize.IsNull() {
*pageSize = r.Configuration.PageSize.ValueInt64()
} else {
pageSize = nil
}
- sourceType := shared.SourceFacebookMarketingFacebookMarketing(r.Configuration.SourceType.ValueString())
- startDate1, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate1 := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate1, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate1 = nil
+ }
configuration := shared.SourceFacebookMarketing{
AccessToken: accessToken,
AccountID: accountID,
@@ -142,11 +140,15 @@ func (r *SourceFacebookMarketingResourceModel) ToCreateSDKType() *shared.SourceF
FetchThumbnailImages: fetchThumbnailImages,
IncludeDeleted: includeDeleted,
InsightsLookbackWindow: insightsLookbackWindow1,
- MaxBatchSize: maxBatchSize,
PageSize: pageSize,
- SourceType: sourceType,
StartDate: startDate1,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name1 := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -157,6 +159,7 @@ func (r *SourceFacebookMarketingResourceModel) ToCreateSDKType() *shared.SourceF
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceFacebookMarketingCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name1,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -190,21 +193,21 @@ func (r *SourceFacebookMarketingResourceModel) ToUpdateSDKType() *shared.SourceF
} else {
clientSecret = nil
}
- var customInsights []shared.SourceFacebookMarketingUpdateInsightConfig = nil
+ var customInsights []shared.InsightConfig = nil
for _, customInsightsItem := range r.Configuration.CustomInsights {
- var actionBreakdowns []shared.SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = nil
+ var actionBreakdowns []shared.ValidActionBreakdowns = nil
for _, actionBreakdownsItem := range customInsightsItem.ActionBreakdowns {
- actionBreakdowns = append(actionBreakdowns, shared.SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns(actionBreakdownsItem.ValueString()))
+ actionBreakdowns = append(actionBreakdowns, shared.ValidActionBreakdowns(actionBreakdownsItem.ValueString()))
}
- actionReportTime := new(shared.SourceFacebookMarketingUpdateInsightConfigActionReportTime)
+ actionReportTime := new(shared.ActionReportTime)
if !customInsightsItem.ActionReportTime.IsUnknown() && !customInsightsItem.ActionReportTime.IsNull() {
- *actionReportTime = shared.SourceFacebookMarketingUpdateInsightConfigActionReportTime(customInsightsItem.ActionReportTime.ValueString())
+ *actionReportTime = shared.ActionReportTime(customInsightsItem.ActionReportTime.ValueString())
} else {
actionReportTime = nil
}
- var breakdowns []shared.SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = nil
+ var breakdowns []shared.ValidBreakdowns = nil
for _, breakdownsItem := range customInsightsItem.Breakdowns {
- breakdowns = append(breakdowns, shared.SourceFacebookMarketingUpdateInsightConfigValidBreakdowns(breakdownsItem.ValueString()))
+ breakdowns = append(breakdowns, shared.ValidBreakdowns(breakdownsItem.ValueString()))
}
endDate := new(time.Time)
if !customInsightsItem.EndDate.IsUnknown() && !customInsightsItem.EndDate.IsNull() {
@@ -212,9 +215,9 @@ func (r *SourceFacebookMarketingResourceModel) ToUpdateSDKType() *shared.SourceF
} else {
endDate = nil
}
- var fields []shared.SourceFacebookMarketingUpdateInsightConfigValidEnums = nil
+ var fields []shared.SourceFacebookMarketingUpdateValidEnums = nil
for _, fieldsItem := range customInsightsItem.Fields {
- fields = append(fields, shared.SourceFacebookMarketingUpdateInsightConfigValidEnums(fieldsItem.ValueString()))
+ fields = append(fields, shared.SourceFacebookMarketingUpdateValidEnums(fieldsItem.ValueString()))
}
insightsLookbackWindow := new(int64)
if !customInsightsItem.InsightsLookbackWindow.IsUnknown() && !customInsightsItem.InsightsLookbackWindow.IsNull() {
@@ -222,9 +225,9 @@ func (r *SourceFacebookMarketingResourceModel) ToUpdateSDKType() *shared.SourceF
} else {
insightsLookbackWindow = nil
}
- level := new(shared.SourceFacebookMarketingUpdateInsightConfigLevel)
+ level := new(shared.Level)
if !customInsightsItem.Level.IsUnknown() && !customInsightsItem.Level.IsNull() {
- *level = shared.SourceFacebookMarketingUpdateInsightConfigLevel(customInsightsItem.Level.ValueString())
+ *level = shared.Level(customInsightsItem.Level.ValueString())
} else {
level = nil
}
@@ -241,7 +244,7 @@ func (r *SourceFacebookMarketingResourceModel) ToUpdateSDKType() *shared.SourceF
} else {
timeIncrement = nil
}
- customInsights = append(customInsights, shared.SourceFacebookMarketingUpdateInsightConfig{
+ customInsights = append(customInsights, shared.InsightConfig{
ActionBreakdowns: actionBreakdowns,
ActionReportTime: actionReportTime,
Breakdowns: breakdowns,
@@ -278,19 +281,18 @@ func (r *SourceFacebookMarketingResourceModel) ToUpdateSDKType() *shared.SourceF
} else {
insightsLookbackWindow1 = nil
}
- maxBatchSize := new(int64)
- if !r.Configuration.MaxBatchSize.IsUnknown() && !r.Configuration.MaxBatchSize.IsNull() {
- *maxBatchSize = r.Configuration.MaxBatchSize.ValueInt64()
- } else {
- maxBatchSize = nil
- }
pageSize := new(int64)
if !r.Configuration.PageSize.IsUnknown() && !r.Configuration.PageSize.IsNull() {
*pageSize = r.Configuration.PageSize.ValueInt64()
} else {
pageSize = nil
}
- startDate1, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate1 := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate1, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate1 = nil
+ }
configuration := shared.SourceFacebookMarketingUpdate{
AccessToken: accessToken,
AccountID: accountID,
@@ -302,7 +304,6 @@ func (r *SourceFacebookMarketingResourceModel) ToUpdateSDKType() *shared.SourceF
FetchThumbnailImages: fetchThumbnailImages,
IncludeDeleted: includeDeleted,
InsightsLookbackWindow: insightsLookbackWindow1,
- MaxBatchSize: maxBatchSize,
PageSize: pageSize,
StartDate: startDate1,
}
diff --git a/internal/provider/source_facebookpages_data_source.go b/internal/provider/source_facebookpages_data_source.go
old mode 100755
new mode 100644
index 41d632dee..9bd52782d
--- a/internal/provider/source_facebookpages_data_source.go
+++ b/internal/provider/source_facebookpages_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceFacebookPagesDataSource struct {
// SourceFacebookPagesDataSourceModel describes the data model.
type SourceFacebookPagesDataSourceModel struct {
- Configuration SourceFacebookPages `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceFacebookPagesDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "SourceFacebookPages DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Facebook Page Access Token`,
- },
- "page_id": schema.StringAttribute{
- Computed: true,
- Description: `Page ID`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "facebook-pages",
- ),
- },
- Description: `must be one of ["facebook-pages"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_facebookpages_data_source_sdk.go b/internal/provider/source_facebookpages_data_source_sdk.go
old mode 100755
new mode 100644
index 6dacb49ca..6ffd99aac
--- a/internal/provider/source_facebookpages_data_source_sdk.go
+++ b/internal/provider/source_facebookpages_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFacebookPagesDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_facebookpages_resource.go b/internal/provider/source_facebookpages_resource.go
old mode 100755
new mode 100644
index d3d9ed49d..16f47452d
--- a/internal/provider/source_facebookpages_resource.go
+++ b/internal/provider/source_facebookpages_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceFacebookPagesResource struct {
// SourceFacebookPagesResourceModel describes the resource data model.
type SourceFacebookPagesResourceModel struct {
Configuration SourceFacebookPages `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,33 @@ func (r *SourceFacebookPagesResource) Schema(ctx context.Context, req resource.S
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Facebook Page Access Token`,
},
"page_id": schema.StringAttribute{
Required: true,
Description: `Page ID`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "facebook-pages",
- ),
- },
- Description: `must be one of ["facebook-pages"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceFacebookPagesResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceFacebookPages(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceFacebookPagesResource) Delete(ctx context.Context, req resource.D
}
func (r *SourceFacebookPagesResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_facebookpages_resource_sdk.go b/internal/provider/source_facebookpages_resource_sdk.go
old mode 100755
new mode 100644
index 4660521a4..ec4aceffa
--- a/internal/provider/source_facebookpages_resource_sdk.go
+++ b/internal/provider/source_facebookpages_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFacebookPagesResourceModel) ToCreateSDKType() *shared.SourceFacebookPagesCreateRequest {
accessToken := r.Configuration.AccessToken.ValueString()
pageID := r.Configuration.PageID.ValueString()
- sourceType := shared.SourceFacebookPagesFacebookPages(r.Configuration.SourceType.ValueString())
configuration := shared.SourceFacebookPages{
AccessToken: accessToken,
PageID: pageID,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceFacebookPagesResourceModel) ToCreateSDKType() *shared.SourceFaceb
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceFacebookPagesCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_faker_data_source.go b/internal/provider/source_faker_data_source.go
old mode 100755
new mode 100644
index fb3af204e..c41fef6e1
--- a/internal/provider/source_faker_data_source.go
+++ b/internal/provider/source_faker_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceFakerDataSource struct {
// SourceFakerDataSourceModel describes the data model.
type SourceFakerDataSourceModel struct {
- Configuration SourceFaker `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,50 +47,20 @@ func (r *SourceFakerDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceFaker DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "always_updated": schema.BoolAttribute{
- Computed: true,
- Description: `Should the updated_at values for every record be new each sync? Setting this to false will case the source to stop emitting records after COUNT records have been emitted.`,
- },
- "count": schema.Int64Attribute{
- Computed: true,
- Description: `How many users should be generated in total. This setting does not apply to the purchases or products stream.`,
- },
- "parallelism": schema.Int64Attribute{
- Computed: true,
- Description: `How many parallel workers should we use to generate fake data? Choose a value equal to the number of CPUs you will allocate to this source.`,
- },
- "records_per_slice": schema.Int64Attribute{
- Computed: true,
- Description: `How many fake records will be in each page (stream slice), before a state message is emitted?`,
- },
- "seed": schema.Int64Attribute{
- Computed: true,
- Description: `Manually control the faker random seed to return the same values on subsequent runs (leave -1 for random)`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "faker",
- ),
- },
- Description: `must be one of ["faker"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_faker_data_source_sdk.go b/internal/provider/source_faker_data_source_sdk.go
old mode 100755
new mode 100644
index e31b831b3..03ab9c864
--- a/internal/provider/source_faker_data_source_sdk.go
+++ b/internal/provider/source_faker_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFakerDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_faker_resource.go b/internal/provider/source_faker_resource.go
old mode 100755
new mode 100644
index da31613f6..a3432433f
--- a/internal/provider/source_faker_resource.go
+++ b/internal/provider/source_faker_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceFakerResource struct {
// SourceFakerResourceModel describes the resource data model.
type SourceFakerResourceModel struct {
Configuration SourceFaker `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -55,43 +55,50 @@ func (r *SourceFakerResource) Schema(ctx context.Context, req resource.SchemaReq
Required: true,
Attributes: map[string]schema.Attribute{
"always_updated": schema.BoolAttribute{
- Optional: true,
- Description: `Should the updated_at values for every record be new each sync? Setting this to false will case the source to stop emitting records after COUNT records have been emitted.`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Should the updated_at values for every record be new each sync? Setting this to false will case the source to stop emitting records after COUNT records have been emitted.`,
},
"count": schema.Int64Attribute{
- Required: true,
- Description: `How many users should be generated in total. This setting does not apply to the purchases or products stream.`,
+ Optional: true,
+ MarkdownDescription: `Default: 1000` + "\n" +
+ `How many users should be generated in total. This setting does not apply to the purchases or products stream.`,
},
"parallelism": schema.Int64Attribute{
- Optional: true,
- Description: `How many parallel workers should we use to generate fake data? Choose a value equal to the number of CPUs you will allocate to this source.`,
+ Optional: true,
+ MarkdownDescription: `Default: 4` + "\n" +
+ `How many parallel workers should we use to generate fake data? Choose a value equal to the number of CPUs you will allocate to this source.`,
},
"records_per_slice": schema.Int64Attribute{
- Optional: true,
- Description: `How many fake records will be in each page (stream slice), before a state message is emitted?`,
+ Optional: true,
+ MarkdownDescription: `Default: 1000` + "\n" +
+ `How many fake records will be in each page (stream slice), before a state message is emitted?`,
},
"seed": schema.Int64Attribute{
- Optional: true,
- Description: `Manually control the faker random seed to return the same values on subsequent runs (leave -1 for random)`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "faker",
- ),
- },
- Description: `must be one of ["faker"]`,
+ Optional: true,
+ MarkdownDescription: `Default: -1` + "\n" +
+ `Manually control the faker random seed to return the same values on subsequent runs (leave -1 for random)`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +162,7 @@ func (r *SourceFakerResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceFaker(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +338,5 @@ func (r *SourceFakerResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceFakerResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_faker_resource_sdk.go b/internal/provider/source_faker_resource_sdk.go
old mode 100755
new mode 100644
index eab80f519..e97398bf4
--- a/internal/provider/source_faker_resource_sdk.go
+++ b/internal/provider/source_faker_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -14,7 +14,12 @@ func (r *SourceFakerResourceModel) ToCreateSDKType() *shared.SourceFakerCreateRe
} else {
alwaysUpdated = nil
}
- count := r.Configuration.Count.ValueInt64()
+ count := new(int64)
+ if !r.Configuration.Count.IsUnknown() && !r.Configuration.Count.IsNull() {
+ *count = r.Configuration.Count.ValueInt64()
+ } else {
+ count = nil
+ }
parallelism := new(int64)
if !r.Configuration.Parallelism.IsUnknown() && !r.Configuration.Parallelism.IsNull() {
*parallelism = r.Configuration.Parallelism.ValueInt64()
@@ -33,14 +38,18 @@ func (r *SourceFakerResourceModel) ToCreateSDKType() *shared.SourceFakerCreateRe
} else {
seed = nil
}
- sourceType := shared.SourceFakerFaker(r.Configuration.SourceType.ValueString())
configuration := shared.SourceFaker{
AlwaysUpdated: alwaysUpdated,
Count: count,
Parallelism: parallelism,
RecordsPerSlice: recordsPerSlice,
Seed: seed,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -52,6 +61,7 @@ func (r *SourceFakerResourceModel) ToCreateSDKType() *shared.SourceFakerCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceFakerCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -71,7 +81,12 @@ func (r *SourceFakerResourceModel) ToUpdateSDKType() *shared.SourceFakerPutReque
} else {
alwaysUpdated = nil
}
- count := r.Configuration.Count.ValueInt64()
+ count := new(int64)
+ if !r.Configuration.Count.IsUnknown() && !r.Configuration.Count.IsNull() {
+ *count = r.Configuration.Count.ValueInt64()
+ } else {
+ count = nil
+ }
parallelism := new(int64)
if !r.Configuration.Parallelism.IsUnknown() && !r.Configuration.Parallelism.IsNull() {
*parallelism = r.Configuration.Parallelism.ValueInt64()
diff --git a/internal/provider/source_fauna_data_source.go b/internal/provider/source_fauna_data_source.go
old mode 100755
new mode 100644
index f839d5605..5bcd06c68
--- a/internal/provider/source_fauna_data_source.go
+++ b/internal/provider/source_fauna_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceFaunaDataSource struct {
// SourceFaunaDataSourceModel describes the data model.
type SourceFaunaDataSourceModel struct {
- Configuration SourceFauna `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,150 +47,20 @@ func (r *SourceFaunaDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceFauna DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "collection": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "deletions": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_fauna_collection_deletion_mode_disabled": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "deletion_mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ignore",
- ),
- },
- Description: `must be one of ["ignore"]`,
- },
- },
- MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
- `Enabling deletion mode informs your destination of deleted documents.
` + "\n" +
- `Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
- `Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
- },
- "source_fauna_collection_deletion_mode_enabled": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "column": schema.StringAttribute{
- Computed: true,
- Description: `Name of the "deleted at" column.`,
- },
- "deletion_mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "deleted_field",
- ),
- },
- Description: `must be one of ["deleted_field"]`,
- },
- },
- MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
- `Enabling deletion mode informs your destination of deleted documents.
` + "\n" +
- `Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
- `Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
- },
- "source_fauna_update_collection_deletion_mode_disabled": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "deletion_mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ignore",
- ),
- },
- Description: `must be one of ["ignore"]`,
- },
- },
- MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
- `Enabling deletion mode informs your destination of deleted documents.
` + "\n" +
- `Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
- `Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
- },
- "source_fauna_update_collection_deletion_mode_enabled": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "column": schema.StringAttribute{
- Computed: true,
- Description: `Name of the "deleted at" column.`,
- },
- "deletion_mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "deleted_field",
- ),
- },
- Description: `must be one of ["deleted_field"]`,
- },
- },
- MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
- `Enabling deletion mode informs your destination of deleted documents.
` + "\n" +
- `Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
- `Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
- `Enabling deletion mode informs your destination of deleted documents.
` + "\n" +
- `Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
- `Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
- },
- "page_size": schema.Int64Attribute{
- Computed: true,
- MarkdownDescription: `The page size used when reading documents from the database. The larger the page size, the faster the connector processes documents. However, if a page is too large, the connector may fail.
` + "\n" +
- `Choose your page size based on how large the documents are.
` + "\n" +
- `See the docs.`,
- },
- },
- Description: `Settings for the Fauna Collection.`,
- },
- "domain": schema.StringAttribute{
- Computed: true,
- Description: `Domain of Fauna to query. Defaults db.fauna.com. See the docs.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Endpoint port.`,
- },
- "scheme": schema.StringAttribute{
- Computed: true,
- Description: `URL scheme.`,
- },
- "secret": schema.StringAttribute{
- Computed: true,
- Description: `Fauna secret, used when authenticating with the database.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fauna",
- ),
- },
- Description: `must be one of ["fauna"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_fauna_data_source_sdk.go b/internal/provider/source_fauna_data_source_sdk.go
old mode 100755
new mode 100644
index 70cc33f71..ac4dd772f
--- a/internal/provider/source_fauna_data_source_sdk.go
+++ b/internal/provider/source_fauna_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFaunaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_fauna_resource.go b/internal/provider/source_fauna_resource.go
old mode 100755
new mode 100644
index f6d1969fe..903769442
--- a/internal/provider/source_fauna_resource.go
+++ b/internal/provider/source_fauna_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceFaunaResource struct {
// SourceFaunaResourceModel describes the resource data model.
type SourceFaunaResourceModel struct {
Configuration SourceFauna `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -61,57 +62,21 @@ func (r *SourceFaunaResource) Schema(ctx context.Context, req resource.SchemaReq
"deletions": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_fauna_collection_deletion_mode_disabled": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "deletion_mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ignore",
- ),
- },
- Description: `must be one of ["ignore"]`,
- },
- },
+ "disabled": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
`Enabling deletion mode informs your destination of deleted documents.
` + "\n" +
`Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
`Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
},
- "source_fauna_collection_deletion_mode_enabled": schema.SingleNestedAttribute{
+ "enabled": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"column": schema.StringAttribute{
- Required: true,
- Description: `Name of the "deleted at" column.`,
- },
- "deletion_mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "deleted_field",
- ),
- },
- Description: `must be one of ["deleted_field"]`,
- },
- },
- MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
- `Enabling deletion mode informs your destination of deleted documents.
` + "\n" +
- `Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
- `Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
- },
- "source_fauna_update_collection_deletion_mode_disabled": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "deletion_mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ignore",
- ),
- },
- Description: `must be one of ["ignore"]`,
+ Optional: true,
+ MarkdownDescription: `Default: "deleted_at"` + "\n" +
+ `Name of the "deleted at" column.`,
},
},
MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
@@ -119,40 +84,19 @@ func (r *SourceFaunaResource) Schema(ctx context.Context, req resource.SchemaReq
`Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
`Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
},
- "source_fauna_update_collection_deletion_mode_enabled": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "column": schema.StringAttribute{
- Required: true,
- Description: `Name of the "deleted at" column.`,
- },
- "deletion_mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "deleted_field",
- ),
- },
- Description: `must be one of ["deleted_field"]`,
- },
- },
- MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
- `Enabling deletion mode informs your destination of deleted documents.
` + "\n" +
- `Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
- `Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
},
MarkdownDescription: `This only applies to incremental syncs.
` + "\n" +
`Enabling deletion mode informs your destination of deleted documents.
` + "\n" +
`Disabled - Leave this feature disabled, and ignore deleted documents.
` + "\n" +
`Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
},
"page_size": schema.Int64Attribute{
- Required: true,
- MarkdownDescription: `The page size used when reading documents from the database. The larger the page size, the faster the connector processes documents. However, if a page is too large, the connector may fail.
` + "\n" +
+ Optional: true,
+ MarkdownDescription: `Default: 64` + "\n" +
+ `The page size used when reading documents from the database. The larger the page size, the faster the connector processes documents. However, if a page is too large, the connector may fail.
` + "\n" +
`Choose your page size based on how large the documents are.
` + "\n" +
`See the docs.`,
},
@@ -160,39 +104,44 @@ func (r *SourceFaunaResource) Schema(ctx context.Context, req resource.SchemaReq
Description: `Settings for the Fauna Collection.`,
},
"domain": schema.StringAttribute{
- Required: true,
- Description: `Domain of Fauna to query. Defaults db.fauna.com. See the docs.`,
+ Optional: true,
+ MarkdownDescription: `Default: "db.fauna.com"` + "\n" +
+ `Domain of Fauna to query. Defaults db.fauna.com. See the docs.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `Endpoint port.`,
+ Optional: true,
+ MarkdownDescription: `Default: 443` + "\n" +
+ `Endpoint port.`,
},
"scheme": schema.StringAttribute{
- Required: true,
- Description: `URL scheme.`,
+ Optional: true,
+ MarkdownDescription: `Default: "https"` + "\n" +
+ `URL scheme.`,
},
"secret": schema.StringAttribute{
Required: true,
Description: `Fauna secret, used when authenticating with the database.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "fauna",
- ),
- },
- Description: `must be one of ["fauna"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -256,7 +205,7 @@ func (r *SourceFaunaResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceFauna(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -432,5 +381,5 @@ func (r *SourceFaunaResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceFaunaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_fauna_resource_sdk.go b/internal/provider/source_fauna_resource_sdk.go
old mode 100755
new mode 100644
index 044422a44..073a760d2
--- a/internal/provider/source_fauna_resource_sdk.go
+++ b/internal/provider/source_fauna_resource_sdk.go
@@ -3,58 +3,82 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFaunaResourceModel) ToCreateSDKType() *shared.SourceFaunaCreateRequest {
var collection *shared.SourceFaunaCollection
if r.Configuration.Collection != nil {
- var deletions shared.SourceFaunaCollectionDeletionMode
- var sourceFaunaCollectionDeletionModeDisabled *shared.SourceFaunaCollectionDeletionModeDisabled
- if r.Configuration.Collection.Deletions.SourceFaunaCollectionDeletionModeDisabled != nil {
- deletionMode := shared.SourceFaunaCollectionDeletionModeDisabledDeletionMode(r.Configuration.Collection.Deletions.SourceFaunaCollectionDeletionModeDisabled.DeletionMode.ValueString())
- sourceFaunaCollectionDeletionModeDisabled = &shared.SourceFaunaCollectionDeletionModeDisabled{
- DeletionMode: deletionMode,
- }
+ var deletions shared.SourceFaunaDeletionMode
+ var sourceFaunaDisabled *shared.SourceFaunaDisabled
+ if r.Configuration.Collection.Deletions.Disabled != nil {
+ sourceFaunaDisabled = &shared.SourceFaunaDisabled{}
}
- if sourceFaunaCollectionDeletionModeDisabled != nil {
- deletions = shared.SourceFaunaCollectionDeletionMode{
- SourceFaunaCollectionDeletionModeDisabled: sourceFaunaCollectionDeletionModeDisabled,
+ if sourceFaunaDisabled != nil {
+ deletions = shared.SourceFaunaDeletionMode{
+ SourceFaunaDisabled: sourceFaunaDisabled,
}
}
- var sourceFaunaCollectionDeletionModeEnabled *shared.SourceFaunaCollectionDeletionModeEnabled
- if r.Configuration.Collection.Deletions.SourceFaunaCollectionDeletionModeEnabled != nil {
- column := r.Configuration.Collection.Deletions.SourceFaunaCollectionDeletionModeEnabled.Column.ValueString()
- deletionMode1 := shared.SourceFaunaCollectionDeletionModeEnabledDeletionMode(r.Configuration.Collection.Deletions.SourceFaunaCollectionDeletionModeEnabled.DeletionMode.ValueString())
- sourceFaunaCollectionDeletionModeEnabled = &shared.SourceFaunaCollectionDeletionModeEnabled{
- Column: column,
- DeletionMode: deletionMode1,
+ var sourceFaunaEnabled *shared.SourceFaunaEnabled
+ if r.Configuration.Collection.Deletions.Enabled != nil {
+ column := new(string)
+ if !r.Configuration.Collection.Deletions.Enabled.Column.IsUnknown() && !r.Configuration.Collection.Deletions.Enabled.Column.IsNull() {
+ *column = r.Configuration.Collection.Deletions.Enabled.Column.ValueString()
+ } else {
+ column = nil
+ }
+ sourceFaunaEnabled = &shared.SourceFaunaEnabled{
+ Column: column,
}
}
- if sourceFaunaCollectionDeletionModeEnabled != nil {
- deletions = shared.SourceFaunaCollectionDeletionMode{
- SourceFaunaCollectionDeletionModeEnabled: sourceFaunaCollectionDeletionModeEnabled,
+ if sourceFaunaEnabled != nil {
+ deletions = shared.SourceFaunaDeletionMode{
+ SourceFaunaEnabled: sourceFaunaEnabled,
}
}
- pageSize := r.Configuration.Collection.PageSize.ValueInt64()
+ pageSize := new(int64)
+ if !r.Configuration.Collection.PageSize.IsUnknown() && !r.Configuration.Collection.PageSize.IsNull() {
+ *pageSize = r.Configuration.Collection.PageSize.ValueInt64()
+ } else {
+ pageSize = nil
+ }
collection = &shared.SourceFaunaCollection{
Deletions: deletions,
PageSize: pageSize,
}
}
- domain := r.Configuration.Domain.ValueString()
- port := r.Configuration.Port.ValueInt64()
- scheme := r.Configuration.Scheme.ValueString()
+ domain := new(string)
+ if !r.Configuration.Domain.IsUnknown() && !r.Configuration.Domain.IsNull() {
+ *domain = r.Configuration.Domain.ValueString()
+ } else {
+ domain = nil
+ }
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ scheme := new(string)
+ if !r.Configuration.Scheme.IsUnknown() && !r.Configuration.Scheme.IsNull() {
+ *scheme = r.Configuration.Scheme.ValueString()
+ } else {
+ scheme = nil
+ }
secret := r.Configuration.Secret.ValueString()
- sourceType := shared.SourceFaunaFauna(r.Configuration.SourceType.ValueString())
configuration := shared.SourceFauna{
Collection: collection,
Domain: domain,
Port: port,
Scheme: scheme,
Secret: secret,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -66,6 +90,7 @@ func (r *SourceFaunaResourceModel) ToCreateSDKType() *shared.SourceFaunaCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceFaunaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -79,44 +104,64 @@ func (r *SourceFaunaResourceModel) ToGetSDKType() *shared.SourceFaunaCreateReque
}
func (r *SourceFaunaResourceModel) ToUpdateSDKType() *shared.SourceFaunaPutRequest {
- var collection *shared.SourceFaunaUpdateCollection
+ var collection *shared.Collection
if r.Configuration.Collection != nil {
- var deletions shared.SourceFaunaUpdateCollectionDeletionMode
- var sourceFaunaUpdateCollectionDeletionModeDisabled *shared.SourceFaunaUpdateCollectionDeletionModeDisabled
- if r.Configuration.Collection.Deletions.SourceFaunaUpdateCollectionDeletionModeDisabled != nil {
- deletionMode := shared.SourceFaunaUpdateCollectionDeletionModeDisabledDeletionMode(r.Configuration.Collection.Deletions.SourceFaunaUpdateCollectionDeletionModeDisabled.DeletionMode.ValueString())
- sourceFaunaUpdateCollectionDeletionModeDisabled = &shared.SourceFaunaUpdateCollectionDeletionModeDisabled{
- DeletionMode: deletionMode,
- }
+ var deletions shared.DeletionMode
+ var disabled *shared.Disabled
+ if r.Configuration.Collection.Deletions.Disabled != nil {
+ disabled = &shared.Disabled{}
}
- if sourceFaunaUpdateCollectionDeletionModeDisabled != nil {
- deletions = shared.SourceFaunaUpdateCollectionDeletionMode{
- SourceFaunaUpdateCollectionDeletionModeDisabled: sourceFaunaUpdateCollectionDeletionModeDisabled,
+ if disabled != nil {
+ deletions = shared.DeletionMode{
+ Disabled: disabled,
}
}
- var sourceFaunaUpdateCollectionDeletionModeEnabled *shared.SourceFaunaUpdateCollectionDeletionModeEnabled
- if r.Configuration.Collection.Deletions.SourceFaunaUpdateCollectionDeletionModeEnabled != nil {
- column := r.Configuration.Collection.Deletions.SourceFaunaUpdateCollectionDeletionModeEnabled.Column.ValueString()
- deletionMode1 := shared.SourceFaunaUpdateCollectionDeletionModeEnabledDeletionMode(r.Configuration.Collection.Deletions.SourceFaunaUpdateCollectionDeletionModeEnabled.DeletionMode.ValueString())
- sourceFaunaUpdateCollectionDeletionModeEnabled = &shared.SourceFaunaUpdateCollectionDeletionModeEnabled{
- Column: column,
- DeletionMode: deletionMode1,
+ var enabled *shared.Enabled
+ if r.Configuration.Collection.Deletions.Enabled != nil {
+ column := new(string)
+ if !r.Configuration.Collection.Deletions.Enabled.Column.IsUnknown() && !r.Configuration.Collection.Deletions.Enabled.Column.IsNull() {
+ *column = r.Configuration.Collection.Deletions.Enabled.Column.ValueString()
+ } else {
+ column = nil
+ }
+ enabled = &shared.Enabled{
+ Column: column,
}
}
- if sourceFaunaUpdateCollectionDeletionModeEnabled != nil {
- deletions = shared.SourceFaunaUpdateCollectionDeletionMode{
- SourceFaunaUpdateCollectionDeletionModeEnabled: sourceFaunaUpdateCollectionDeletionModeEnabled,
+ if enabled != nil {
+ deletions = shared.DeletionMode{
+ Enabled: enabled,
}
}
- pageSize := r.Configuration.Collection.PageSize.ValueInt64()
- collection = &shared.SourceFaunaUpdateCollection{
+ pageSize := new(int64)
+ if !r.Configuration.Collection.PageSize.IsUnknown() && !r.Configuration.Collection.PageSize.IsNull() {
+ *pageSize = r.Configuration.Collection.PageSize.ValueInt64()
+ } else {
+ pageSize = nil
+ }
+ collection = &shared.Collection{
Deletions: deletions,
PageSize: pageSize,
}
}
- domain := r.Configuration.Domain.ValueString()
- port := r.Configuration.Port.ValueInt64()
- scheme := r.Configuration.Scheme.ValueString()
+ domain := new(string)
+ if !r.Configuration.Domain.IsUnknown() && !r.Configuration.Domain.IsNull() {
+ *domain = r.Configuration.Domain.ValueString()
+ } else {
+ domain = nil
+ }
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
+ scheme := new(string)
+ if !r.Configuration.Scheme.IsUnknown() && !r.Configuration.Scheme.IsNull() {
+ *scheme = r.Configuration.Scheme.ValueString()
+ } else {
+ scheme = nil
+ }
secret := r.Configuration.Secret.ValueString()
configuration := shared.SourceFaunaUpdate{
Collection: collection,
diff --git a/internal/provider/source_file_data_source.go b/internal/provider/source_file_data_source.go
new file mode 100644
index 000000000..1431613f3
--- /dev/null
+++ b/internal/provider/source_file_data_source.go
@@ -0,0 +1,137 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ datasource.DataSource = &SourceFileDataSource{}
+var _ datasource.DataSourceWithConfigure = &SourceFileDataSource{}
+
+func NewSourceFileDataSource() datasource.DataSource {
+ return &SourceFileDataSource{}
+}
+
+// SourceFileDataSource is the data source implementation.
+type SourceFileDataSource struct {
+ client *sdk.SDK
+}
+
+// SourceFileDataSourceModel describes the data model.
+type SourceFileDataSourceModel struct {
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+// Metadata returns the data source type name.
+func (r *SourceFileDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_file"
+}
+
+// Schema defines the schema for the data source.
+func (r *SourceFileDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "SourceFile DataSource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.StringAttribute{
+ Computed: true,
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ },
+ "source_id": schema.StringAttribute{
+ Required: true,
+ },
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
+ "workspace_id": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ }
+}
+
+func (r *SourceFileDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected DataSource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *SourceFileDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data *SourceFileDataSourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.GetSourceFileRequest{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.GetSourceFile(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/internal/provider/source_file_data_source_sdk.go b/internal/provider/source_file_data_source_sdk.go
new file mode 100644
index 000000000..f0086946c
--- /dev/null
+++ b/internal/provider/source_file_data_source_sdk.go
@@ -0,0 +1,18 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *SourceFileDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
diff --git a/internal/provider/source_file_resource.go b/internal/provider/source_file_resource.go
new file mode 100644
index 000000000..54fa09927
--- /dev/null
+++ b/internal/provider/source_file_resource.go
@@ -0,0 +1,475 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ resource.Resource = &SourceFileResource{}
+var _ resource.ResourceWithImportState = &SourceFileResource{}
+
+func NewSourceFileResource() resource.Resource {
+ return &SourceFileResource{}
+}
+
+// SourceFileResource defines the resource implementation.
+type SourceFileResource struct {
+ client *sdk.SDK
+}
+
+// SourceFileResourceModel describes the resource data model.
+type SourceFileResourceModel struct {
+ Configuration SourceFile `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+func (r *SourceFileResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_file"
+}
+
+func (r *SourceFileResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "SourceFile Resource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "dataset_name": schema.StringAttribute{
+ Required: true,
+ Description: `The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).`,
+ },
+ "format": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "feather", "parquet", "yaml"]; Default: "csv"` + "\n" +
+ `The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "csv",
+ "json",
+ "jsonl",
+ "excel",
+ "excel_binary",
+ "feather",
+ "parquet",
+ "yaml",
+ ),
+ },
+ },
+ "provider": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "az_blob_azure_blob_storage": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "sas_token": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.`,
+ },
+ "shared_key": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.`,
+ },
+ "storage_account": schema.StringAttribute{
+ Required: true,
+ Description: `The globally unique name of the storage account that the desired blob sits within. See here for more details.`,
+ },
+ },
+ Description: `The storage Provider or Location of the file(s) which should be replicated.`,
+ },
+ "gcs_google_cloud_storage": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "service_account_json": schema.StringAttribute{
+ Optional: true,
+ Description: `In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.`,
+ },
+ },
+ Description: `The storage Provider or Location of the file(s) which should be replicated.`,
+ },
+ "https_public_web": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "user_agent": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Add User-Agent to request`,
+ },
+ },
+ Description: `The storage Provider or Location of the file(s) which should be replicated.`,
+ },
+ "s3_amazon_web_services": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "aws_access_key_id": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
+ },
+ "aws_secret_access_key": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
+ },
+ },
+ Description: `The storage Provider or Location of the file(s) which should be replicated.`,
+ },
+ "scp_secure_copy_protocol": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "host": schema.StringAttribute{
+ Required: true,
+ },
+ "password": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ },
+ "port": schema.StringAttribute{
+ Optional: true,
+ Description: `Default: "22"`,
+ },
+ "user": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ Description: `The storage Provider or Location of the file(s) which should be replicated.`,
+ },
+ "sftp_secure_file_transfer_protocol": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "host": schema.StringAttribute{
+ Required: true,
+ },
+ "password": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ },
+ "port": schema.StringAttribute{
+ Optional: true,
+ Description: `Default: "22"`,
+ },
+ "user": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ Description: `The storage Provider or Location of the file(s) which should be replicated.`,
+ },
+ "ssh_secure_shell": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "host": schema.StringAttribute{
+ Required: true,
+ },
+ "password": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ },
+ "port": schema.StringAttribute{
+ Optional: true,
+ Description: `Default: "22"`,
+ },
+ "user": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ Description: `The storage Provider or Location of the file(s) which should be replicated.`,
+ },
+ },
+ Description: `The storage Provider or Location of the file(s) which should be replicated.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "reader_options": schema.StringAttribute{
+ Optional: true,
+ Description: `This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.`,
+ },
+ "url": schema.StringAttribute{
+ Required: true,
+ Description: `The URL path to access the file which should be replicated.`,
+ },
+ },
+ },
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
+ "name": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
+ },
+ "secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
+ },
+ "source_id": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "workspace_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ },
+ },
+ }
+}
+
+func (r *SourceFileResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected Resource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *SourceFileResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data *SourceFileResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ request := data.ToCreateSDKType()
+ res, err := r.client.Sources.CreateSourceFile(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromCreateResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *SourceFileResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data *SourceFileResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.GetSourceFileRequest{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.GetSourceFile(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *SourceFileResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data *SourceFileResourceModel
+ merge(ctx, req, resp, &data)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceFilePutRequest := data.ToUpdateSDKType()
+ sourceID := data.SourceID.ValueString()
+ request := operations.PutSourceFileRequest{
+ SourceFilePutRequest: sourceFilePutRequest,
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.PutSourceFile(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ sourceId1 := data.SourceID.ValueString()
+ getRequest := operations.GetSourceFileRequest{
+ SourceID: sourceId1,
+ }
+ getResponse, err := r.client.Sources.GetSourceFile(ctx, getRequest)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if getResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
+ return
+ }
+ if getResponse.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
+ return
+ }
+ if getResponse.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(getResponse.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *SourceFileResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data *SourceFileResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.DeleteSourceFileRequest{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.DeleteSourceFile(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+
+}
+
+func (r *SourceFileResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
+}
diff --git a/internal/provider/source_file_resource_sdk.go b/internal/provider/source_file_resource_sdk.go
new file mode 100644
index 000000000..482e42f23
--- /dev/null
+++ b/internal/provider/source_file_resource_sdk.go
@@ -0,0 +1,445 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *SourceFileResourceModel) ToCreateSDKType() *shared.SourceFileCreateRequest {
+ datasetName := r.Configuration.DatasetName.ValueString()
+ format := new(shared.SourceFileFileFormat)
+ if !r.Configuration.Format.IsUnknown() && !r.Configuration.Format.IsNull() {
+ *format = shared.SourceFileFileFormat(r.Configuration.Format.ValueString())
+ } else {
+ format = nil
+ }
+ var provider shared.SourceFileStorageProvider
+ var sourceFileHTTPSPublicWeb *shared.SourceFileHTTPSPublicWeb
+ if r.Configuration.Provider.HTTPSPublicWeb != nil {
+ userAgent := new(bool)
+ if !r.Configuration.Provider.HTTPSPublicWeb.UserAgent.IsUnknown() && !r.Configuration.Provider.HTTPSPublicWeb.UserAgent.IsNull() {
+ *userAgent = r.Configuration.Provider.HTTPSPublicWeb.UserAgent.ValueBool()
+ } else {
+ userAgent = nil
+ }
+ sourceFileHTTPSPublicWeb = &shared.SourceFileHTTPSPublicWeb{
+ UserAgent: userAgent,
+ }
+ }
+ if sourceFileHTTPSPublicWeb != nil {
+ provider = shared.SourceFileStorageProvider{
+ SourceFileHTTPSPublicWeb: sourceFileHTTPSPublicWeb,
+ }
+ }
+ var sourceFileGCSGoogleCloudStorage *shared.SourceFileGCSGoogleCloudStorage
+ if r.Configuration.Provider.GCSGoogleCloudStorage != nil {
+ serviceAccountJSON := new(string)
+ if !r.Configuration.Provider.GCSGoogleCloudStorage.ServiceAccountJSON.IsUnknown() && !r.Configuration.Provider.GCSGoogleCloudStorage.ServiceAccountJSON.IsNull() {
+ *serviceAccountJSON = r.Configuration.Provider.GCSGoogleCloudStorage.ServiceAccountJSON.ValueString()
+ } else {
+ serviceAccountJSON = nil
+ }
+ sourceFileGCSGoogleCloudStorage = &shared.SourceFileGCSGoogleCloudStorage{
+ ServiceAccountJSON: serviceAccountJSON,
+ }
+ }
+ if sourceFileGCSGoogleCloudStorage != nil {
+ provider = shared.SourceFileStorageProvider{
+ SourceFileGCSGoogleCloudStorage: sourceFileGCSGoogleCloudStorage,
+ }
+ }
+ var sourceFileS3AmazonWebServices *shared.SourceFileS3AmazonWebServices
+ if r.Configuration.Provider.S3AmazonWebServices != nil {
+ awsAccessKeyID := new(string)
+ if !r.Configuration.Provider.S3AmazonWebServices.AwsAccessKeyID.IsUnknown() && !r.Configuration.Provider.S3AmazonWebServices.AwsAccessKeyID.IsNull() {
+ *awsAccessKeyID = r.Configuration.Provider.S3AmazonWebServices.AwsAccessKeyID.ValueString()
+ } else {
+ awsAccessKeyID = nil
+ }
+ awsSecretAccessKey := new(string)
+ if !r.Configuration.Provider.S3AmazonWebServices.AwsSecretAccessKey.IsUnknown() && !r.Configuration.Provider.S3AmazonWebServices.AwsSecretAccessKey.IsNull() {
+ *awsSecretAccessKey = r.Configuration.Provider.S3AmazonWebServices.AwsSecretAccessKey.ValueString()
+ } else {
+ awsSecretAccessKey = nil
+ }
+ sourceFileS3AmazonWebServices = &shared.SourceFileS3AmazonWebServices{
+ AwsAccessKeyID: awsAccessKeyID,
+ AwsSecretAccessKey: awsSecretAccessKey,
+ }
+ }
+ if sourceFileS3AmazonWebServices != nil {
+ provider = shared.SourceFileStorageProvider{
+ SourceFileS3AmazonWebServices: sourceFileS3AmazonWebServices,
+ }
+ }
+ var sourceFileAzBlobAzureBlobStorage *shared.SourceFileAzBlobAzureBlobStorage
+ if r.Configuration.Provider.AzBlobAzureBlobStorage != nil {
+ sasToken := new(string)
+ if !r.Configuration.Provider.AzBlobAzureBlobStorage.SasToken.IsUnknown() && !r.Configuration.Provider.AzBlobAzureBlobStorage.SasToken.IsNull() {
+ *sasToken = r.Configuration.Provider.AzBlobAzureBlobStorage.SasToken.ValueString()
+ } else {
+ sasToken = nil
+ }
+ sharedKey := new(string)
+ if !r.Configuration.Provider.AzBlobAzureBlobStorage.SharedKey.IsUnknown() && !r.Configuration.Provider.AzBlobAzureBlobStorage.SharedKey.IsNull() {
+ *sharedKey = r.Configuration.Provider.AzBlobAzureBlobStorage.SharedKey.ValueString()
+ } else {
+ sharedKey = nil
+ }
+ storageAccount := r.Configuration.Provider.AzBlobAzureBlobStorage.StorageAccount.ValueString()
+ sourceFileAzBlobAzureBlobStorage = &shared.SourceFileAzBlobAzureBlobStorage{
+ SasToken: sasToken,
+ SharedKey: sharedKey,
+ StorageAccount: storageAccount,
+ }
+ }
+ if sourceFileAzBlobAzureBlobStorage != nil {
+ provider = shared.SourceFileStorageProvider{
+ SourceFileAzBlobAzureBlobStorage: sourceFileAzBlobAzureBlobStorage,
+ }
+ }
+ var sourceFileSSHSecureShell *shared.SourceFileSSHSecureShell
+ if r.Configuration.Provider.SSHSecureShell != nil {
+ host := r.Configuration.Provider.SSHSecureShell.Host.ValueString()
+ password := new(string)
+ if !r.Configuration.Provider.SSHSecureShell.Password.IsUnknown() && !r.Configuration.Provider.SSHSecureShell.Password.IsNull() {
+ *password = r.Configuration.Provider.SSHSecureShell.Password.ValueString()
+ } else {
+ password = nil
+ }
+ port := new(string)
+ if !r.Configuration.Provider.SSHSecureShell.Port.IsUnknown() && !r.Configuration.Provider.SSHSecureShell.Port.IsNull() {
+ *port = r.Configuration.Provider.SSHSecureShell.Port.ValueString()
+ } else {
+ port = nil
+ }
+ user := r.Configuration.Provider.SSHSecureShell.User.ValueString()
+ sourceFileSSHSecureShell = &shared.SourceFileSSHSecureShell{
+ Host: host,
+ Password: password,
+ Port: port,
+ User: user,
+ }
+ }
+ if sourceFileSSHSecureShell != nil {
+ provider = shared.SourceFileStorageProvider{
+ SourceFileSSHSecureShell: sourceFileSSHSecureShell,
+ }
+ }
+ var sourceFileSCPSecureCopyProtocol *shared.SourceFileSCPSecureCopyProtocol
+ if r.Configuration.Provider.SCPSecureCopyProtocol != nil {
+ host1 := r.Configuration.Provider.SCPSecureCopyProtocol.Host.ValueString()
+ password1 := new(string)
+ if !r.Configuration.Provider.SCPSecureCopyProtocol.Password.IsUnknown() && !r.Configuration.Provider.SCPSecureCopyProtocol.Password.IsNull() {
+ *password1 = r.Configuration.Provider.SCPSecureCopyProtocol.Password.ValueString()
+ } else {
+ password1 = nil
+ }
+ port1 := new(string)
+ if !r.Configuration.Provider.SCPSecureCopyProtocol.Port.IsUnknown() && !r.Configuration.Provider.SCPSecureCopyProtocol.Port.IsNull() {
+ *port1 = r.Configuration.Provider.SCPSecureCopyProtocol.Port.ValueString()
+ } else {
+ port1 = nil
+ }
+ user1 := r.Configuration.Provider.SCPSecureCopyProtocol.User.ValueString()
+ sourceFileSCPSecureCopyProtocol = &shared.SourceFileSCPSecureCopyProtocol{
+ Host: host1,
+ Password: password1,
+ Port: port1,
+ User: user1,
+ }
+ }
+ if sourceFileSCPSecureCopyProtocol != nil {
+ provider = shared.SourceFileStorageProvider{
+ SourceFileSCPSecureCopyProtocol: sourceFileSCPSecureCopyProtocol,
+ }
+ }
+ var sourceFileSFTPSecureFileTransferProtocol *shared.SourceFileSFTPSecureFileTransferProtocol
+ if r.Configuration.Provider.SFTPSecureFileTransferProtocol != nil {
+ host2 := r.Configuration.Provider.SFTPSecureFileTransferProtocol.Host.ValueString()
+ password2 := new(string)
+ if !r.Configuration.Provider.SFTPSecureFileTransferProtocol.Password.IsUnknown() && !r.Configuration.Provider.SFTPSecureFileTransferProtocol.Password.IsNull() {
+ *password2 = r.Configuration.Provider.SFTPSecureFileTransferProtocol.Password.ValueString()
+ } else {
+ password2 = nil
+ }
+ port2 := new(string)
+ if !r.Configuration.Provider.SFTPSecureFileTransferProtocol.Port.IsUnknown() && !r.Configuration.Provider.SFTPSecureFileTransferProtocol.Port.IsNull() {
+ *port2 = r.Configuration.Provider.SFTPSecureFileTransferProtocol.Port.ValueString()
+ } else {
+ port2 = nil
+ }
+ user2 := r.Configuration.Provider.SFTPSecureFileTransferProtocol.User.ValueString()
+ sourceFileSFTPSecureFileTransferProtocol = &shared.SourceFileSFTPSecureFileTransferProtocol{
+ Host: host2,
+ Password: password2,
+ Port: port2,
+ User: user2,
+ }
+ }
+ if sourceFileSFTPSecureFileTransferProtocol != nil {
+ provider = shared.SourceFileStorageProvider{
+ SourceFileSFTPSecureFileTransferProtocol: sourceFileSFTPSecureFileTransferProtocol,
+ }
+ }
+ readerOptions := new(string)
+ if !r.Configuration.ReaderOptions.IsUnknown() && !r.Configuration.ReaderOptions.IsNull() {
+ *readerOptions = r.Configuration.ReaderOptions.ValueString()
+ } else {
+ readerOptions = nil
+ }
+ url := r.Configuration.URL.ValueString()
+ configuration := shared.SourceFile{
+ DatasetName: datasetName,
+ Format: format,
+ Provider: provider,
+ ReaderOptions: readerOptions,
+ URL: url,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name := r.Name.ValueString()
+ secretID := new(string)
+ if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
+ *secretID = r.SecretID.ValueString()
+ } else {
+ secretID = nil
+ }
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceFileCreateRequest{
+ Configuration: configuration,
+ DefinitionID: definitionID,
+ Name: name,
+ SecretID: secretID,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceFileResourceModel) ToGetSDKType() *shared.SourceFileCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceFileResourceModel) ToUpdateSDKType() *shared.SourceFilePutRequest {
+ datasetName := r.Configuration.DatasetName.ValueString()
+ format := new(shared.FileFormat)
+ if !r.Configuration.Format.IsUnknown() && !r.Configuration.Format.IsNull() {
+ *format = shared.FileFormat(r.Configuration.Format.ValueString())
+ } else {
+ format = nil
+ }
+ var provider shared.StorageProvider
+ var httpsPublicWeb *shared.HTTPSPublicWeb
+ if r.Configuration.Provider.HTTPSPublicWeb != nil {
+ userAgent := new(bool)
+ if !r.Configuration.Provider.HTTPSPublicWeb.UserAgent.IsUnknown() && !r.Configuration.Provider.HTTPSPublicWeb.UserAgent.IsNull() {
+ *userAgent = r.Configuration.Provider.HTTPSPublicWeb.UserAgent.ValueBool()
+ } else {
+ userAgent = nil
+ }
+ httpsPublicWeb = &shared.HTTPSPublicWeb{
+ UserAgent: userAgent,
+ }
+ }
+ if httpsPublicWeb != nil {
+ provider = shared.StorageProvider{
+ HTTPSPublicWeb: httpsPublicWeb,
+ }
+ }
+ var gcsGoogleCloudStorage *shared.GCSGoogleCloudStorage
+ if r.Configuration.Provider.GCSGoogleCloudStorage != nil {
+ serviceAccountJSON := new(string)
+ if !r.Configuration.Provider.GCSGoogleCloudStorage.ServiceAccountJSON.IsUnknown() && !r.Configuration.Provider.GCSGoogleCloudStorage.ServiceAccountJSON.IsNull() {
+ *serviceAccountJSON = r.Configuration.Provider.GCSGoogleCloudStorage.ServiceAccountJSON.ValueString()
+ } else {
+ serviceAccountJSON = nil
+ }
+ gcsGoogleCloudStorage = &shared.GCSGoogleCloudStorage{
+ ServiceAccountJSON: serviceAccountJSON,
+ }
+ }
+ if gcsGoogleCloudStorage != nil {
+ provider = shared.StorageProvider{
+ GCSGoogleCloudStorage: gcsGoogleCloudStorage,
+ }
+ }
+ var sourceFileUpdateS3AmazonWebServices *shared.SourceFileUpdateS3AmazonWebServices
+ if r.Configuration.Provider.S3AmazonWebServices != nil {
+ awsAccessKeyID := new(string)
+ if !r.Configuration.Provider.S3AmazonWebServices.AwsAccessKeyID.IsUnknown() && !r.Configuration.Provider.S3AmazonWebServices.AwsAccessKeyID.IsNull() {
+ *awsAccessKeyID = r.Configuration.Provider.S3AmazonWebServices.AwsAccessKeyID.ValueString()
+ } else {
+ awsAccessKeyID = nil
+ }
+ awsSecretAccessKey := new(string)
+ if !r.Configuration.Provider.S3AmazonWebServices.AwsSecretAccessKey.IsUnknown() && !r.Configuration.Provider.S3AmazonWebServices.AwsSecretAccessKey.IsNull() {
+ *awsSecretAccessKey = r.Configuration.Provider.S3AmazonWebServices.AwsSecretAccessKey.ValueString()
+ } else {
+ awsSecretAccessKey = nil
+ }
+ sourceFileUpdateS3AmazonWebServices = &shared.SourceFileUpdateS3AmazonWebServices{
+ AwsAccessKeyID: awsAccessKeyID,
+ AwsSecretAccessKey: awsSecretAccessKey,
+ }
+ }
+ if sourceFileUpdateS3AmazonWebServices != nil {
+ provider = shared.StorageProvider{
+ SourceFileUpdateS3AmazonWebServices: sourceFileUpdateS3AmazonWebServices,
+ }
+ }
+ var azBlobAzureBlobStorage *shared.AzBlobAzureBlobStorage
+ if r.Configuration.Provider.AzBlobAzureBlobStorage != nil {
+ sasToken := new(string)
+ if !r.Configuration.Provider.AzBlobAzureBlobStorage.SasToken.IsUnknown() && !r.Configuration.Provider.AzBlobAzureBlobStorage.SasToken.IsNull() {
+ *sasToken = r.Configuration.Provider.AzBlobAzureBlobStorage.SasToken.ValueString()
+ } else {
+ sasToken = nil
+ }
+ sharedKey := new(string)
+ if !r.Configuration.Provider.AzBlobAzureBlobStorage.SharedKey.IsUnknown() && !r.Configuration.Provider.AzBlobAzureBlobStorage.SharedKey.IsNull() {
+ *sharedKey = r.Configuration.Provider.AzBlobAzureBlobStorage.SharedKey.ValueString()
+ } else {
+ sharedKey = nil
+ }
+ storageAccount := r.Configuration.Provider.AzBlobAzureBlobStorage.StorageAccount.ValueString()
+ azBlobAzureBlobStorage = &shared.AzBlobAzureBlobStorage{
+ SasToken: sasToken,
+ SharedKey: sharedKey,
+ StorageAccount: storageAccount,
+ }
+ }
+ if azBlobAzureBlobStorage != nil {
+ provider = shared.StorageProvider{
+ AzBlobAzureBlobStorage: azBlobAzureBlobStorage,
+ }
+ }
+ var sshSecureShell *shared.SSHSecureShell
+ if r.Configuration.Provider.SSHSecureShell != nil {
+ host := r.Configuration.Provider.SSHSecureShell.Host.ValueString()
+ password := new(string)
+ if !r.Configuration.Provider.SSHSecureShell.Password.IsUnknown() && !r.Configuration.Provider.SSHSecureShell.Password.IsNull() {
+ *password = r.Configuration.Provider.SSHSecureShell.Password.ValueString()
+ } else {
+ password = nil
+ }
+ port := new(string)
+ if !r.Configuration.Provider.SSHSecureShell.Port.IsUnknown() && !r.Configuration.Provider.SSHSecureShell.Port.IsNull() {
+ *port = r.Configuration.Provider.SSHSecureShell.Port.ValueString()
+ } else {
+ port = nil
+ }
+ user := r.Configuration.Provider.SSHSecureShell.User.ValueString()
+ sshSecureShell = &shared.SSHSecureShell{
+ Host: host,
+ Password: password,
+ Port: port,
+ User: user,
+ }
+ }
+ if sshSecureShell != nil {
+ provider = shared.StorageProvider{
+ SSHSecureShell: sshSecureShell,
+ }
+ }
+ var scpSecureCopyProtocol *shared.SCPSecureCopyProtocol
+ if r.Configuration.Provider.SCPSecureCopyProtocol != nil {
+ host1 := r.Configuration.Provider.SCPSecureCopyProtocol.Host.ValueString()
+ password1 := new(string)
+ if !r.Configuration.Provider.SCPSecureCopyProtocol.Password.IsUnknown() && !r.Configuration.Provider.SCPSecureCopyProtocol.Password.IsNull() {
+ *password1 = r.Configuration.Provider.SCPSecureCopyProtocol.Password.ValueString()
+ } else {
+ password1 = nil
+ }
+ port1 := new(string)
+ if !r.Configuration.Provider.SCPSecureCopyProtocol.Port.IsUnknown() && !r.Configuration.Provider.SCPSecureCopyProtocol.Port.IsNull() {
+ *port1 = r.Configuration.Provider.SCPSecureCopyProtocol.Port.ValueString()
+ } else {
+ port1 = nil
+ }
+ user1 := r.Configuration.Provider.SCPSecureCopyProtocol.User.ValueString()
+ scpSecureCopyProtocol = &shared.SCPSecureCopyProtocol{
+ Host: host1,
+ Password: password1,
+ Port: port1,
+ User: user1,
+ }
+ }
+ if scpSecureCopyProtocol != nil {
+ provider = shared.StorageProvider{
+ SCPSecureCopyProtocol: scpSecureCopyProtocol,
+ }
+ }
+ var sftpSecureFileTransferProtocol *shared.SFTPSecureFileTransferProtocol
+ if r.Configuration.Provider.SFTPSecureFileTransferProtocol != nil {
+ host2 := r.Configuration.Provider.SFTPSecureFileTransferProtocol.Host.ValueString()
+ password2 := new(string)
+ if !r.Configuration.Provider.SFTPSecureFileTransferProtocol.Password.IsUnknown() && !r.Configuration.Provider.SFTPSecureFileTransferProtocol.Password.IsNull() {
+ *password2 = r.Configuration.Provider.SFTPSecureFileTransferProtocol.Password.ValueString()
+ } else {
+ password2 = nil
+ }
+ port2 := new(string)
+ if !r.Configuration.Provider.SFTPSecureFileTransferProtocol.Port.IsUnknown() && !r.Configuration.Provider.SFTPSecureFileTransferProtocol.Port.IsNull() {
+ *port2 = r.Configuration.Provider.SFTPSecureFileTransferProtocol.Port.ValueString()
+ } else {
+ port2 = nil
+ }
+ user2 := r.Configuration.Provider.SFTPSecureFileTransferProtocol.User.ValueString()
+ sftpSecureFileTransferProtocol = &shared.SFTPSecureFileTransferProtocol{
+ Host: host2,
+ Password: password2,
+ Port: port2,
+ User: user2,
+ }
+ }
+ if sftpSecureFileTransferProtocol != nil {
+ provider = shared.StorageProvider{
+ SFTPSecureFileTransferProtocol: sftpSecureFileTransferProtocol,
+ }
+ }
+ readerOptions := new(string)
+ if !r.Configuration.ReaderOptions.IsUnknown() && !r.Configuration.ReaderOptions.IsNull() {
+ *readerOptions = r.Configuration.ReaderOptions.ValueString()
+ } else {
+ readerOptions = nil
+ }
+ url := r.Configuration.URL.ValueString()
+ configuration := shared.SourceFileUpdate{
+ DatasetName: datasetName,
+ Format: format,
+ Provider: provider,
+ ReaderOptions: readerOptions,
+ URL: url,
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceFilePutRequest{
+ Configuration: configuration,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceFileResourceModel) ToDeleteSDKType() *shared.SourceFileCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceFileResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
+
+func (r *SourceFileResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
+ r.RefreshFromGetResponse(resp)
+}
diff --git a/internal/provider/source_filesecure_data_source.go b/internal/provider/source_filesecure_data_source.go
deleted file mode 100755
index be66e39a3..000000000
--- a/internal/provider/source_filesecure_data_source.go
+++ /dev/null
@@ -1,526 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
- "context"
- "fmt"
-
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ datasource.DataSource = &SourceFileSecureDataSource{}
-var _ datasource.DataSourceWithConfigure = &SourceFileSecureDataSource{}
-
-func NewSourceFileSecureDataSource() datasource.DataSource {
- return &SourceFileSecureDataSource{}
-}
-
-// SourceFileSecureDataSource is the data source implementation.
-type SourceFileSecureDataSource struct {
- client *sdk.SDK
-}
-
-// SourceFileSecureDataSourceModel describes the data model.
-type SourceFileSecureDataSourceModel struct {
- Configuration SourceFileSecure `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-// Metadata returns the data source type name.
-func (r *SourceFileSecureDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_file_secure"
-}
-
-// Schema defines the schema for the data source.
-func (r *SourceFileSecureDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceFileSecure DataSource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "dataset_name": schema.StringAttribute{
- Computed: true,
- Description: `The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).`,
- },
- "format": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- "json",
- "jsonl",
- "excel",
- "excel_binary",
- "feather",
- "parquet",
- "yaml",
- ),
- },
- MarkdownDescription: `must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "feather", "parquet", "yaml"]` + "\n" +
- `The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).`,
- },
- "provider": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_file_secure_storage_provider_az_blob_azure_blob_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "sas_token": schema.StringAttribute{
- Computed: true,
- Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.`,
- },
- "shared_key": schema.StringAttribute{
- Computed: true,
- Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AzBlob",
- ),
- },
- Description: `must be one of ["AzBlob"]`,
- },
- "storage_account": schema.StringAttribute{
- Computed: true,
- Description: `The globally unique name of the storage account that the desired blob sits within. See here for more details.`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_gcs_google_cloud_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "service_account_json": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS",
- ),
- },
- Description: `must be one of ["GCS"]`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_https_public_web": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HTTPS",
- ),
- },
- Description: `must be one of ["HTTPS"]`,
- },
- "user_agent": schema.BoolAttribute{
- Computed: true,
- Description: `Add User-Agent to request`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_s3_amazon_web_services": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "aws_secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3",
- ),
- },
- Description: `must be one of ["S3"]`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_scp_secure_copy_protocol": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- },
- "password": schema.StringAttribute{
- Computed: true,
- },
- "port": schema.StringAttribute{
- Computed: true,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SCP",
- ),
- },
- Description: `must be one of ["SCP"]`,
- },
- "user": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_sftp_secure_file_transfer_protocol": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- },
- "password": schema.StringAttribute{
- Computed: true,
- },
- "port": schema.StringAttribute{
- Computed: true,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SFTP",
- ),
- },
- Description: `must be one of ["SFTP"]`,
- },
- "user": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_ssh_secure_shell": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- },
- "password": schema.StringAttribute{
- Computed: true,
- },
- "port": schema.StringAttribute{
- Computed: true,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH",
- ),
- },
- Description: `must be one of ["SSH"]`,
- },
- "user": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_az_blob_azure_blob_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "sas_token": schema.StringAttribute{
- Computed: true,
- Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.`,
- },
- "shared_key": schema.StringAttribute{
- Computed: true,
- Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AzBlob",
- ),
- },
- Description: `must be one of ["AzBlob"]`,
- },
- "storage_account": schema.StringAttribute{
- Computed: true,
- Description: `The globally unique name of the storage account that the desired blob sits within. See here for more details.`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_gcs_google_cloud_storage": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "service_account_json": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS",
- ),
- },
- Description: `must be one of ["GCS"]`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_https_public_web": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HTTPS",
- ),
- },
- Description: `must be one of ["HTTPS"]`,
- },
- "user_agent": schema.BoolAttribute{
- Computed: true,
- Description: `Add User-Agent to request`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_s3_amazon_web_services": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "aws_secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3",
- ),
- },
- Description: `must be one of ["S3"]`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_scp_secure_copy_protocol": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- },
- "password": schema.StringAttribute{
- Computed: true,
- },
- "port": schema.StringAttribute{
- Computed: true,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SCP",
- ),
- },
- Description: `must be one of ["SCP"]`,
- },
- "user": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_sftp_secure_file_transfer_protocol": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- },
- "password": schema.StringAttribute{
- Computed: true,
- },
- "port": schema.StringAttribute{
- Computed: true,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SFTP",
- ),
- },
- Description: `must be one of ["SFTP"]`,
- },
- "user": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_ssh_secure_shell": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- },
- "password": schema.StringAttribute{
- Computed: true,
- },
- "port": schema.StringAttribute{
- Computed: true,
- },
- "storage": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH",
- ),
- },
- Description: `must be one of ["SSH"]`,
- },
- "user": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "reader_options": schema.StringAttribute{
- Computed: true,
- Description: `This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "file-secure",
- ),
- },
- Description: `must be one of ["file-secure"]`,
- },
- "url": schema.StringAttribute{
- Computed: true,
- Description: `The URL path to access the file which should be replicated.`,
- },
- },
- },
- "name": schema.StringAttribute{
- Computed: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Required: true,
- },
- "workspace_id": schema.StringAttribute{
- Computed: true,
- },
- },
- }
-}
-
-func (r *SourceFileSecureDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected DataSource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceFileSecureDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data *SourceFileSecureDataSourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceFileSecureRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceFileSecure(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/internal/provider/source_filesecure_data_source_sdk.go b/internal/provider/source_filesecure_data_source_sdk.go
deleted file mode 100755
index f0cc03046..000000000
--- a/internal/provider/source_filesecure_data_source_sdk.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceFileSecureDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
diff --git a/internal/provider/source_filesecure_resource.go b/internal/provider/source_filesecure_resource.go
deleted file mode 100755
index 5a4eec7f3..000000000
--- a/internal/provider/source_filesecure_resource.go
+++ /dev/null
@@ -1,692 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "context"
- "fmt"
-
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ resource.Resource = &SourceFileSecureResource{}
-var _ resource.ResourceWithImportState = &SourceFileSecureResource{}
-
-func NewSourceFileSecureResource() resource.Resource {
- return &SourceFileSecureResource{}
-}
-
-// SourceFileSecureResource defines the resource implementation.
-type SourceFileSecureResource struct {
- client *sdk.SDK
-}
-
-// SourceFileSecureResourceModel describes the resource data model.
-type SourceFileSecureResourceModel struct {
- Configuration SourceFileSecure `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-func (r *SourceFileSecureResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_file_secure"
-}
-
-func (r *SourceFileSecureResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceFileSecure Resource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "dataset_name": schema.StringAttribute{
- Required: true,
- Description: `The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).`,
- },
- "format": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- "json",
- "jsonl",
- "excel",
- "excel_binary",
- "feather",
- "parquet",
- "yaml",
- ),
- },
- MarkdownDescription: `must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "feather", "parquet", "yaml"]` + "\n" +
- `The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).`,
- },
- "provider": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "source_file_secure_storage_provider_az_blob_azure_blob_storage": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "sas_token": schema.StringAttribute{
- Optional: true,
- Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.`,
- },
- "shared_key": schema.StringAttribute{
- Optional: true,
- Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AzBlob",
- ),
- },
- Description: `must be one of ["AzBlob"]`,
- },
- "storage_account": schema.StringAttribute{
- Required: true,
- Description: `The globally unique name of the storage account that the desired blob sits within. See here for more details.`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_gcs_google_cloud_storage": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "service_account_json": schema.StringAttribute{
- Optional: true,
- Description: `In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS",
- ),
- },
- Description: `must be one of ["GCS"]`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_https_public_web": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HTTPS",
- ),
- },
- Description: `must be one of ["HTTPS"]`,
- },
- "user_agent": schema.BoolAttribute{
- Optional: true,
- Description: `Add User-Agent to request`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_s3_amazon_web_services": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "aws_access_key_id": schema.StringAttribute{
- Optional: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "aws_secret_access_key": schema.StringAttribute{
- Optional: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3",
- ),
- },
- Description: `must be one of ["S3"]`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_scp_secure_copy_protocol": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Required: true,
- },
- "password": schema.StringAttribute{
- Optional: true,
- },
- "port": schema.StringAttribute{
- Optional: true,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SCP",
- ),
- },
- Description: `must be one of ["SCP"]`,
- },
- "user": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_sftp_secure_file_transfer_protocol": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Required: true,
- },
- "password": schema.StringAttribute{
- Optional: true,
- },
- "port": schema.StringAttribute{
- Optional: true,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SFTP",
- ),
- },
- Description: `must be one of ["SFTP"]`,
- },
- "user": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_storage_provider_ssh_secure_shell": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Required: true,
- },
- "password": schema.StringAttribute{
- Optional: true,
- },
- "port": schema.StringAttribute{
- Optional: true,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH",
- ),
- },
- Description: `must be one of ["SSH"]`,
- },
- "user": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_az_blob_azure_blob_storage": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "sas_token": schema.StringAttribute{
- Optional: true,
- Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.`,
- },
- "shared_key": schema.StringAttribute{
- Optional: true,
- Description: `To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AzBlob",
- ),
- },
- Description: `must be one of ["AzBlob"]`,
- },
- "storage_account": schema.StringAttribute{
- Required: true,
- Description: `The globally unique name of the storage account that the desired blob sits within. See here for more details.`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_gcs_google_cloud_storage": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "service_account_json": schema.StringAttribute{
- Optional: true,
- Description: `In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "GCS",
- ),
- },
- Description: `must be one of ["GCS"]`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_https_public_web": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "HTTPS",
- ),
- },
- Description: `must be one of ["HTTPS"]`,
- },
- "user_agent": schema.BoolAttribute{
- Optional: true,
- Description: `Add User-Agent to request`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_s3_amazon_web_services": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "aws_access_key_id": schema.StringAttribute{
- Optional: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "aws_secret_access_key": schema.StringAttribute{
- Optional: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "S3",
- ),
- },
- Description: `must be one of ["S3"]`,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_scp_secure_copy_protocol": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Required: true,
- },
- "password": schema.StringAttribute{
- Optional: true,
- },
- "port": schema.StringAttribute{
- Optional: true,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SCP",
- ),
- },
- Description: `must be one of ["SCP"]`,
- },
- "user": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_sftp_secure_file_transfer_protocol": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Required: true,
- },
- "password": schema.StringAttribute{
- Optional: true,
- },
- "port": schema.StringAttribute{
- Optional: true,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SFTP",
- ),
- },
- Description: `must be one of ["SFTP"]`,
- },
- "user": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "source_file_secure_update_storage_provider_ssh_secure_shell": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Required: true,
- },
- "password": schema.StringAttribute{
- Optional: true,
- },
- "port": schema.StringAttribute{
- Optional: true,
- },
- "storage": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH",
- ),
- },
- Description: `must be one of ["SSH"]`,
- },
- "user": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The storage Provider or Location of the file(s) which should be replicated.`,
- },
- "reader_options": schema.StringAttribute{
- Optional: true,
- Description: `This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "file-secure",
- ),
- },
- Description: `must be one of ["file-secure"]`,
- },
- "url": schema.StringAttribute{
- Required: true,
- Description: `The URL path to access the file which should be replicated.`,
- },
- },
- },
- "name": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "workspace_id": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- },
- }
-}
-
-func (r *SourceFileSecureResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected Resource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceFileSecureResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data *SourceFileSecureResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- request := *data.ToCreateSDKType()
- res, err := r.client.Sources.CreateSourceFileSecure(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromCreateResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceFileSecureResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data *SourceFileSecureResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceFileSecureRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceFileSecure(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceFileSecureResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data *SourceFileSecureResourceModel
- merge(ctx, req, resp, &data)
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceFileSecurePutRequest := data.ToUpdateSDKType()
- sourceID := data.SourceID.ValueString()
- request := operations.PutSourceFileSecureRequest{
- SourceFileSecurePutRequest: sourceFileSecurePutRequest,
- SourceID: sourceID,
- }
- res, err := r.client.Sources.PutSourceFileSecure(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- sourceId1 := data.SourceID.ValueString()
- getRequest := operations.GetSourceFileSecureRequest{
- SourceID: sourceId1,
- }
- getResponse, err := r.client.Sources.GetSourceFileSecure(ctx, getRequest)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if getResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
- return
- }
- if getResponse.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
- return
- }
- if getResponse.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
- return
- }
- data.RefreshFromGetResponse(getResponse.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceFileSecureResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data *SourceFileSecureResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.DeleteSourceFileSecureRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.DeleteSourceFileSecure(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
-
-}
-
-func (r *SourceFileSecureResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
-}
diff --git a/internal/provider/source_filesecure_resource_sdk.go b/internal/provider/source_filesecure_resource_sdk.go
deleted file mode 100755
index e9b05229e..000000000
--- a/internal/provider/source_filesecure_resource_sdk.go
+++ /dev/null
@@ -1,458 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceFileSecureResourceModel) ToCreateSDKType() *shared.SourceFileSecureCreateRequest {
- datasetName := r.Configuration.DatasetName.ValueString()
- format := shared.SourceFileSecureFileFormat(r.Configuration.Format.ValueString())
- var provider shared.SourceFileSecureStorageProvider
- var sourceFileSecureStorageProviderHTTPSPublicWeb *shared.SourceFileSecureStorageProviderHTTPSPublicWeb
- if r.Configuration.Provider.SourceFileSecureStorageProviderHTTPSPublicWeb != nil {
- storage := shared.SourceFileSecureStorageProviderHTTPSPublicWebStorage(r.Configuration.Provider.SourceFileSecureStorageProviderHTTPSPublicWeb.Storage.ValueString())
- userAgent := new(bool)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderHTTPSPublicWeb.UserAgent.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderHTTPSPublicWeb.UserAgent.IsNull() {
- *userAgent = r.Configuration.Provider.SourceFileSecureStorageProviderHTTPSPublicWeb.UserAgent.ValueBool()
- } else {
- userAgent = nil
- }
- sourceFileSecureStorageProviderHTTPSPublicWeb = &shared.SourceFileSecureStorageProviderHTTPSPublicWeb{
- Storage: storage,
- UserAgent: userAgent,
- }
- }
- if sourceFileSecureStorageProviderHTTPSPublicWeb != nil {
- provider = shared.SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderHTTPSPublicWeb: sourceFileSecureStorageProviderHTTPSPublicWeb,
- }
- }
- var sourceFileSecureStorageProviderGCSGoogleCloudStorage *shared.SourceFileSecureStorageProviderGCSGoogleCloudStorage
- if r.Configuration.Provider.SourceFileSecureStorageProviderGCSGoogleCloudStorage != nil {
- serviceAccountJSON := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderGCSGoogleCloudStorage.ServiceAccountJSON.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderGCSGoogleCloudStorage.ServiceAccountJSON.IsNull() {
- *serviceAccountJSON = r.Configuration.Provider.SourceFileSecureStorageProviderGCSGoogleCloudStorage.ServiceAccountJSON.ValueString()
- } else {
- serviceAccountJSON = nil
- }
- storage1 := shared.SourceFileSecureStorageProviderGCSGoogleCloudStorageStorage(r.Configuration.Provider.SourceFileSecureStorageProviderGCSGoogleCloudStorage.Storage.ValueString())
- sourceFileSecureStorageProviderGCSGoogleCloudStorage = &shared.SourceFileSecureStorageProviderGCSGoogleCloudStorage{
- ServiceAccountJSON: serviceAccountJSON,
- Storage: storage1,
- }
- }
- if sourceFileSecureStorageProviderGCSGoogleCloudStorage != nil {
- provider = shared.SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderGCSGoogleCloudStorage: sourceFileSecureStorageProviderGCSGoogleCloudStorage,
- }
- }
- var sourceFileSecureStorageProviderS3AmazonWebServices *shared.SourceFileSecureStorageProviderS3AmazonWebServices
- if r.Configuration.Provider.SourceFileSecureStorageProviderS3AmazonWebServices != nil {
- awsAccessKeyID := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderS3AmazonWebServices.AwsAccessKeyID.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderS3AmazonWebServices.AwsAccessKeyID.IsNull() {
- *awsAccessKeyID = r.Configuration.Provider.SourceFileSecureStorageProviderS3AmazonWebServices.AwsAccessKeyID.ValueString()
- } else {
- awsAccessKeyID = nil
- }
- awsSecretAccessKey := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderS3AmazonWebServices.AwsSecretAccessKey.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderS3AmazonWebServices.AwsSecretAccessKey.IsNull() {
- *awsSecretAccessKey = r.Configuration.Provider.SourceFileSecureStorageProviderS3AmazonWebServices.AwsSecretAccessKey.ValueString()
- } else {
- awsSecretAccessKey = nil
- }
- storage2 := shared.SourceFileSecureStorageProviderS3AmazonWebServicesStorage(r.Configuration.Provider.SourceFileSecureStorageProviderS3AmazonWebServices.Storage.ValueString())
- sourceFileSecureStorageProviderS3AmazonWebServices = &shared.SourceFileSecureStorageProviderS3AmazonWebServices{
- AwsAccessKeyID: awsAccessKeyID,
- AwsSecretAccessKey: awsSecretAccessKey,
- Storage: storage2,
- }
- }
- if sourceFileSecureStorageProviderS3AmazonWebServices != nil {
- provider = shared.SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderS3AmazonWebServices: sourceFileSecureStorageProviderS3AmazonWebServices,
- }
- }
- var sourceFileSecureStorageProviderAzBlobAzureBlobStorage *shared.SourceFileSecureStorageProviderAzBlobAzureBlobStorage
- if r.Configuration.Provider.SourceFileSecureStorageProviderAzBlobAzureBlobStorage != nil {
- sasToken := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderAzBlobAzureBlobStorage.SasToken.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderAzBlobAzureBlobStorage.SasToken.IsNull() {
- *sasToken = r.Configuration.Provider.SourceFileSecureStorageProviderAzBlobAzureBlobStorage.SasToken.ValueString()
- } else {
- sasToken = nil
- }
- sharedKey := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderAzBlobAzureBlobStorage.SharedKey.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderAzBlobAzureBlobStorage.SharedKey.IsNull() {
- *sharedKey = r.Configuration.Provider.SourceFileSecureStorageProviderAzBlobAzureBlobStorage.SharedKey.ValueString()
- } else {
- sharedKey = nil
- }
- storage3 := shared.SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorage(r.Configuration.Provider.SourceFileSecureStorageProviderAzBlobAzureBlobStorage.Storage.ValueString())
- storageAccount := r.Configuration.Provider.SourceFileSecureStorageProviderAzBlobAzureBlobStorage.StorageAccount.ValueString()
- sourceFileSecureStorageProviderAzBlobAzureBlobStorage = &shared.SourceFileSecureStorageProviderAzBlobAzureBlobStorage{
- SasToken: sasToken,
- SharedKey: sharedKey,
- Storage: storage3,
- StorageAccount: storageAccount,
- }
- }
- if sourceFileSecureStorageProviderAzBlobAzureBlobStorage != nil {
- provider = shared.SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderAzBlobAzureBlobStorage: sourceFileSecureStorageProviderAzBlobAzureBlobStorage,
- }
- }
- var sourceFileSecureStorageProviderSSHSecureShell *shared.SourceFileSecureStorageProviderSSHSecureShell
- if r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell != nil {
- host := r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell.Host.ValueString()
- password := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell.Password.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell.Password.IsNull() {
- *password = r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell.Password.ValueString()
- } else {
- password = nil
- }
- port := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell.Port.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell.Port.IsNull() {
- *port = r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell.Port.ValueString()
- } else {
- port = nil
- }
- storage4 := shared.SourceFileSecureStorageProviderSSHSecureShellStorage(r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell.Storage.ValueString())
- user := r.Configuration.Provider.SourceFileSecureStorageProviderSSHSecureShell.User.ValueString()
- sourceFileSecureStorageProviderSSHSecureShell = &shared.SourceFileSecureStorageProviderSSHSecureShell{
- Host: host,
- Password: password,
- Port: port,
- Storage: storage4,
- User: user,
- }
- }
- if sourceFileSecureStorageProviderSSHSecureShell != nil {
- provider = shared.SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderSSHSecureShell: sourceFileSecureStorageProviderSSHSecureShell,
- }
- }
- var sourceFileSecureStorageProviderSCPSecureCopyProtocol *shared.SourceFileSecureStorageProviderSCPSecureCopyProtocol
- if r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol != nil {
- host1 := r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol.Host.ValueString()
- password1 := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol.Password.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol.Password.IsNull() {
- *password1 = r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol.Password.ValueString()
- } else {
- password1 = nil
- }
- port1 := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol.Port.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol.Port.IsNull() {
- *port1 = r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol.Port.ValueString()
- } else {
- port1 = nil
- }
- storage5 := shared.SourceFileSecureStorageProviderSCPSecureCopyProtocolStorage(r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol.Storage.ValueString())
- user1 := r.Configuration.Provider.SourceFileSecureStorageProviderSCPSecureCopyProtocol.User.ValueString()
- sourceFileSecureStorageProviderSCPSecureCopyProtocol = &shared.SourceFileSecureStorageProviderSCPSecureCopyProtocol{
- Host: host1,
- Password: password1,
- Port: port1,
- Storage: storage5,
- User: user1,
- }
- }
- if sourceFileSecureStorageProviderSCPSecureCopyProtocol != nil {
- provider = shared.SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderSCPSecureCopyProtocol: sourceFileSecureStorageProviderSCPSecureCopyProtocol,
- }
- }
- var sourceFileSecureStorageProviderSFTPSecureFileTransferProtocol *shared.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol
- if r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol != nil {
- host2 := r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol.Host.ValueString()
- password2 := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol.Password.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol.Password.IsNull() {
- *password2 = r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol.Password.ValueString()
- } else {
- password2 = nil
- }
- port2 := new(string)
- if !r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol.Port.IsUnknown() && !r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol.Port.IsNull() {
- *port2 = r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol.Port.ValueString()
- } else {
- port2 = nil
- }
- storage6 := shared.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorage(r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol.Storage.ValueString())
- user2 := r.Configuration.Provider.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol.User.ValueString()
- sourceFileSecureStorageProviderSFTPSecureFileTransferProtocol = &shared.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol{
- Host: host2,
- Password: password2,
- Port: port2,
- Storage: storage6,
- User: user2,
- }
- }
- if sourceFileSecureStorageProviderSFTPSecureFileTransferProtocol != nil {
- provider = shared.SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol: sourceFileSecureStorageProviderSFTPSecureFileTransferProtocol,
- }
- }
- readerOptions := new(string)
- if !r.Configuration.ReaderOptions.IsUnknown() && !r.Configuration.ReaderOptions.IsNull() {
- *readerOptions = r.Configuration.ReaderOptions.ValueString()
- } else {
- readerOptions = nil
- }
- sourceType := shared.SourceFileSecureFileSecure(r.Configuration.SourceType.ValueString())
- url := r.Configuration.URL.ValueString()
- configuration := shared.SourceFileSecure{
- DatasetName: datasetName,
- Format: format,
- Provider: provider,
- ReaderOptions: readerOptions,
- SourceType: sourceType,
- URL: url,
- }
- name := r.Name.ValueString()
- secretID := new(string)
- if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
- *secretID = r.SecretID.ValueString()
- } else {
- secretID = nil
- }
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceFileSecureCreateRequest{
- Configuration: configuration,
- Name: name,
- SecretID: secretID,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceFileSecureResourceModel) ToGetSDKType() *shared.SourceFileSecureCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceFileSecureResourceModel) ToUpdateSDKType() *shared.SourceFileSecurePutRequest {
- datasetName := r.Configuration.DatasetName.ValueString()
- format := shared.SourceFileSecureUpdateFileFormat(r.Configuration.Format.ValueString())
- var provider shared.SourceFileSecureUpdateStorageProvider
- var sourceFileSecureUpdateStorageProviderHTTPSPublicWeb *shared.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb
- if r.Configuration.Provider.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb != nil {
- storage := shared.SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorage(r.Configuration.Provider.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb.Storage.ValueString())
- userAgent := new(bool)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb.UserAgent.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb.UserAgent.IsNull() {
- *userAgent = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb.UserAgent.ValueBool()
- } else {
- userAgent = nil
- }
- sourceFileSecureUpdateStorageProviderHTTPSPublicWeb = &shared.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb{
- Storage: storage,
- UserAgent: userAgent,
- }
- }
- if sourceFileSecureUpdateStorageProviderHTTPSPublicWeb != nil {
- provider = shared.SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderHTTPSPublicWeb: sourceFileSecureUpdateStorageProviderHTTPSPublicWeb,
- }
- }
- var sourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage *shared.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage
- if r.Configuration.Provider.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage != nil {
- serviceAccountJSON := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage.ServiceAccountJSON.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage.ServiceAccountJSON.IsNull() {
- *serviceAccountJSON = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage.ServiceAccountJSON.ValueString()
- } else {
- serviceAccountJSON = nil
- }
- storage1 := shared.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorage(r.Configuration.Provider.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage.Storage.ValueString())
- sourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage = &shared.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage{
- ServiceAccountJSON: serviceAccountJSON,
- Storage: storage1,
- }
- }
- if sourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage != nil {
- provider = shared.SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage: sourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage,
- }
- }
- var sourceFileSecureUpdateStorageProviderS3AmazonWebServices *shared.SourceFileSecureUpdateStorageProviderS3AmazonWebServices
- if r.Configuration.Provider.SourceFileSecureUpdateStorageProviderS3AmazonWebServices != nil {
- awsAccessKeyID := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderS3AmazonWebServices.AwsAccessKeyID.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderS3AmazonWebServices.AwsAccessKeyID.IsNull() {
- *awsAccessKeyID = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderS3AmazonWebServices.AwsAccessKeyID.ValueString()
- } else {
- awsAccessKeyID = nil
- }
- awsSecretAccessKey := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderS3AmazonWebServices.AwsSecretAccessKey.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderS3AmazonWebServices.AwsSecretAccessKey.IsNull() {
- *awsSecretAccessKey = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderS3AmazonWebServices.AwsSecretAccessKey.ValueString()
- } else {
- awsSecretAccessKey = nil
- }
- storage2 := shared.SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorage(r.Configuration.Provider.SourceFileSecureUpdateStorageProviderS3AmazonWebServices.Storage.ValueString())
- sourceFileSecureUpdateStorageProviderS3AmazonWebServices = &shared.SourceFileSecureUpdateStorageProviderS3AmazonWebServices{
- AwsAccessKeyID: awsAccessKeyID,
- AwsSecretAccessKey: awsSecretAccessKey,
- Storage: storage2,
- }
- }
- if sourceFileSecureUpdateStorageProviderS3AmazonWebServices != nil {
- provider = shared.SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderS3AmazonWebServices: sourceFileSecureUpdateStorageProviderS3AmazonWebServices,
- }
- }
- var sourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage *shared.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage
- if r.Configuration.Provider.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage != nil {
- sasToken := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage.SasToken.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage.SasToken.IsNull() {
- *sasToken = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage.SasToken.ValueString()
- } else {
- sasToken = nil
- }
- sharedKey := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage.SharedKey.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage.SharedKey.IsNull() {
- *sharedKey = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage.SharedKey.ValueString()
- } else {
- sharedKey = nil
- }
- storage3 := shared.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorage(r.Configuration.Provider.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage.Storage.ValueString())
- storageAccount := r.Configuration.Provider.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage.StorageAccount.ValueString()
- sourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage = &shared.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage{
- SasToken: sasToken,
- SharedKey: sharedKey,
- Storage: storage3,
- StorageAccount: storageAccount,
- }
- }
- if sourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage != nil {
- provider = shared.SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage: sourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage,
- }
- }
- var sourceFileSecureUpdateStorageProviderSSHSecureShell *shared.SourceFileSecureUpdateStorageProviderSSHSecureShell
- if r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell != nil {
- host := r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell.Host.ValueString()
- password := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell.Password.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell.Password.IsNull() {
- *password = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell.Password.ValueString()
- } else {
- password = nil
- }
- port := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell.Port.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell.Port.IsNull() {
- *port = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell.Port.ValueString()
- } else {
- port = nil
- }
- storage4 := shared.SourceFileSecureUpdateStorageProviderSSHSecureShellStorage(r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell.Storage.ValueString())
- user := r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSSHSecureShell.User.ValueString()
- sourceFileSecureUpdateStorageProviderSSHSecureShell = &shared.SourceFileSecureUpdateStorageProviderSSHSecureShell{
- Host: host,
- Password: password,
- Port: port,
- Storage: storage4,
- User: user,
- }
- }
- if sourceFileSecureUpdateStorageProviderSSHSecureShell != nil {
- provider = shared.SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderSSHSecureShell: sourceFileSecureUpdateStorageProviderSSHSecureShell,
- }
- }
- var sourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol *shared.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol
- if r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol != nil {
- host1 := r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol.Host.ValueString()
- password1 := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol.Password.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol.Password.IsNull() {
- *password1 = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol.Password.ValueString()
- } else {
- password1 = nil
- }
- port1 := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol.Port.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol.Port.IsNull() {
- *port1 = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol.Port.ValueString()
- } else {
- port1 = nil
- }
- storage5 := shared.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorage(r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol.Storage.ValueString())
- user1 := r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol.User.ValueString()
- sourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol = &shared.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol{
- Host: host1,
- Password: password1,
- Port: port1,
- Storage: storage5,
- User: user1,
- }
- }
- if sourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol != nil {
- provider = shared.SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol: sourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol,
- }
- }
- var sourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol *shared.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol
- if r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol != nil {
- host2 := r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol.Host.ValueString()
- password2 := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol.Password.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol.Password.IsNull() {
- *password2 = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol.Password.ValueString()
- } else {
- password2 = nil
- }
- port2 := new(string)
- if !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol.Port.IsUnknown() && !r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol.Port.IsNull() {
- *port2 = r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol.Port.ValueString()
- } else {
- port2 = nil
- }
- storage6 := shared.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorage(r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol.Storage.ValueString())
- user2 := r.Configuration.Provider.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol.User.ValueString()
- sourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol = &shared.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol{
- Host: host2,
- Password: password2,
- Port: port2,
- Storage: storage6,
- User: user2,
- }
- }
- if sourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol != nil {
- provider = shared.SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol: sourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol,
- }
- }
- readerOptions := new(string)
- if !r.Configuration.ReaderOptions.IsUnknown() && !r.Configuration.ReaderOptions.IsNull() {
- *readerOptions = r.Configuration.ReaderOptions.ValueString()
- } else {
- readerOptions = nil
- }
- url := r.Configuration.URL.ValueString()
- configuration := shared.SourceFileSecureUpdate{
- DatasetName: datasetName,
- Format: format,
- Provider: provider,
- ReaderOptions: readerOptions,
- URL: url,
- }
- name := r.Name.ValueString()
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceFileSecurePutRequest{
- Configuration: configuration,
- Name: name,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceFileSecureResourceModel) ToDeleteSDKType() *shared.SourceFileSecureCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceFileSecureResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.SourceType = types.StringValue(resp.SourceType)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
-
-func (r *SourceFileSecureResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
- r.RefreshFromGetResponse(resp)
-}
diff --git a/internal/provider/source_firebolt_data_source.go b/internal/provider/source_firebolt_data_source.go
old mode 100755
new mode 100644
index 6dd072324..cbef47daf
--- a/internal/provider/source_firebolt_data_source.go
+++ b/internal/provider/source_firebolt_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceFireboltDataSource struct {
// SourceFireboltDataSourceModel describes the data model.
type SourceFireboltDataSourceModel struct {
- Configuration SourceFirebolt `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,54 +47,20 @@ func (r *SourceFireboltDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceFirebolt DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "account": schema.StringAttribute{
- Computed: true,
- Description: `Firebolt account to login.`,
- },
- "database": schema.StringAttribute{
- Computed: true,
- Description: `The database to connect to.`,
- },
- "engine": schema.StringAttribute{
- Computed: true,
- Description: `Engine name or url to connect to.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The host name of your Firebolt database.`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Firebolt password.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "firebolt",
- ),
- },
- Description: `must be one of ["firebolt"]`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Firebolt email address you use to login.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_firebolt_data_source_sdk.go b/internal/provider/source_firebolt_data_source_sdk.go
old mode 100755
new mode 100644
index 06bd9183d..8928ead8f
--- a/internal/provider/source_firebolt_data_source_sdk.go
+++ b/internal/provider/source_firebolt_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFireboltDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_firebolt_resource.go b/internal/provider/source_firebolt_resource.go
old mode 100755
new mode 100644
index f344fb97c..064a65ad0
--- a/internal/provider/source_firebolt_resource.go
+++ b/internal/provider/source_firebolt_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceFireboltResource struct {
// SourceFireboltResourceModel describes the resource data model.
type SourceFireboltResourceModel struct {
Configuration SourceFirebolt `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -72,30 +72,33 @@ func (r *SourceFireboltResource) Schema(ctx context.Context, req resource.Schema
},
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Firebolt password.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "firebolt",
- ),
- },
- Description: `must be one of ["firebolt"]`,
- },
"username": schema.StringAttribute{
Required: true,
Description: `Firebolt email address you use to login.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -159,7 +162,7 @@ func (r *SourceFireboltResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceFirebolt(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -335,5 +338,5 @@ func (r *SourceFireboltResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceFireboltResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_firebolt_resource_sdk.go b/internal/provider/source_firebolt_resource_sdk.go
old mode 100755
new mode 100644
index 7667b6b69..f66260d0f
--- a/internal/provider/source_firebolt_resource_sdk.go
+++ b/internal/provider/source_firebolt_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -28,16 +28,20 @@ func (r *SourceFireboltResourceModel) ToCreateSDKType() *shared.SourceFireboltCr
host = nil
}
password := r.Configuration.Password.ValueString()
- sourceType := shared.SourceFireboltFirebolt(r.Configuration.SourceType.ValueString())
username := r.Configuration.Username.ValueString()
configuration := shared.SourceFirebolt{
- Account: account,
- Database: database,
- Engine: engine,
- Host: host,
- Password: password,
- SourceType: sourceType,
- Username: username,
+ Account: account,
+ Database: database,
+ Engine: engine,
+ Host: host,
+ Password: password,
+ Username: username,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -49,6 +53,7 @@ func (r *SourceFireboltResourceModel) ToCreateSDKType() *shared.SourceFireboltCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceFireboltCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_freshcaller_data_source.go b/internal/provider/source_freshcaller_data_source.go
old mode 100755
new mode 100644
index fbe2dc59a..b7aea6ec5
--- a/internal/provider/source_freshcaller_data_source.go
+++ b/internal/provider/source_freshcaller_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceFreshcallerDataSource struct {
// SourceFreshcallerDataSourceModel describes the data model.
type SourceFreshcallerDataSourceModel struct {
- Configuration SourceFreshcaller `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,53 +47,20 @@ func (r *SourceFreshcallerDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourceFreshcaller DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Freshcaller API Key. See the docs for more information on how to obtain this key.`,
- },
- "domain": schema.StringAttribute{
- Computed: true,
- Description: `Used to construct Base URL for the Freshcaller APIs`,
- },
- "requests_per_minute": schema.Int64Attribute{
- Computed: true,
- Description: `The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "freshcaller",
- ),
- },
- Description: `must be one of ["freshcaller"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time. Any data created after this date will be replicated.`,
- },
- "sync_lag_minutes": schema.Int64Attribute{
- Computed: true,
- Description: `Lag in minutes for each sync, i.e., at time T, data for the time range [prev_sync_time, T-30] will be fetched`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_freshcaller_data_source_sdk.go b/internal/provider/source_freshcaller_data_source_sdk.go
old mode 100755
new mode 100644
index b28eaeec3..7e1190ba5
--- a/internal/provider/source_freshcaller_data_source_sdk.go
+++ b/internal/provider/source_freshcaller_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFreshcallerDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_freshcaller_resource.go b/internal/provider/source_freshcaller_resource.go
old mode 100755
new mode 100644
index cc5ee6787..d9a4216a2
--- a/internal/provider/source_freshcaller_resource.go
+++ b/internal/provider/source_freshcaller_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceFreshcallerResource struct {
// SourceFreshcallerResourceModel describes the resource data model.
type SourceFreshcallerResourceModel struct {
Configuration SourceFreshcaller `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceFreshcallerResource) Schema(ctx context.Context, req resource.Sch
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Freshcaller API Key. See the docs for more information on how to obtain this key.`,
},
"domain": schema.StringAttribute{
@@ -67,21 +69,12 @@ func (r *SourceFreshcallerResource) Schema(ctx context.Context, req resource.Sch
Optional: true,
Description: `The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "freshcaller",
- ),
- },
- Description: `must be one of ["freshcaller"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `UTC date and time. Any data created after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time. Any data created after this date will be replicated.`,
},
"sync_lag_minutes": schema.Int64Attribute{
Optional: true,
@@ -89,13 +82,24 @@ func (r *SourceFreshcallerResource) Schema(ctx context.Context, req resource.Sch
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -159,7 +163,7 @@ func (r *SourceFreshcallerResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceFreshcaller(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -335,5 +339,5 @@ func (r *SourceFreshcallerResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourceFreshcallerResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_freshcaller_resource_sdk.go b/internal/provider/source_freshcaller_resource_sdk.go
old mode 100755
new mode 100644
index a054c25d5..ff0f15ce2
--- a/internal/provider/source_freshcaller_resource_sdk.go
+++ b/internal/provider/source_freshcaller_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -17,8 +17,12 @@ func (r *SourceFreshcallerResourceModel) ToCreateSDKType() *shared.SourceFreshca
} else {
requestsPerMinute = nil
}
- sourceType := shared.SourceFreshcallerFreshcaller(r.Configuration.SourceType.ValueString())
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
syncLagMinutes := new(int64)
if !r.Configuration.SyncLagMinutes.IsUnknown() && !r.Configuration.SyncLagMinutes.IsNull() {
*syncLagMinutes = r.Configuration.SyncLagMinutes.ValueInt64()
@@ -29,10 +33,15 @@ func (r *SourceFreshcallerResourceModel) ToCreateSDKType() *shared.SourceFreshca
APIKey: apiKey,
Domain: domain,
RequestsPerMinute: requestsPerMinute,
- SourceType: sourceType,
StartDate: startDate,
SyncLagMinutes: syncLagMinutes,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -43,6 +52,7 @@ func (r *SourceFreshcallerResourceModel) ToCreateSDKType() *shared.SourceFreshca
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceFreshcallerCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -64,7 +74,12 @@ func (r *SourceFreshcallerResourceModel) ToUpdateSDKType() *shared.SourceFreshca
} else {
requestsPerMinute = nil
}
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
syncLagMinutes := new(int64)
if !r.Configuration.SyncLagMinutes.IsUnknown() && !r.Configuration.SyncLagMinutes.IsNull() {
*syncLagMinutes = r.Configuration.SyncLagMinutes.ValueInt64()
diff --git a/internal/provider/source_freshdesk_data_source.go b/internal/provider/source_freshdesk_data_source.go
old mode 100755
new mode 100644
index 6adc96e50..fadc2f676
--- a/internal/provider/source_freshdesk_data_source.go
+++ b/internal/provider/source_freshdesk_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceFreshdeskDataSource struct {
// SourceFreshdeskDataSourceModel describes the data model.
type SourceFreshdeskDataSourceModel struct {
- Configuration SourceFreshdesk `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,49 +47,20 @@ func (r *SourceFreshdeskDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceFreshdesk DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Freshdesk API Key. See the docs for more information on how to obtain this key.`,
- },
- "domain": schema.StringAttribute{
- Computed: true,
- Description: `Freshdesk domain`,
- },
- "requests_per_minute": schema.Int64Attribute{
- Computed: true,
- Description: `The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "freshdesk",
- ),
- },
- Description: `must be one of ["freshdesk"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_freshdesk_data_source_sdk.go b/internal/provider/source_freshdesk_data_source_sdk.go
old mode 100755
new mode 100644
index 04216f6a0..8b97e1e9b
--- a/internal/provider/source_freshdesk_data_source_sdk.go
+++ b/internal/provider/source_freshdesk_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFreshdeskDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_freshdesk_resource.go b/internal/provider/source_freshdesk_resource.go
old mode 100755
new mode 100644
index 9f0627064..c813cbbb4
--- a/internal/provider/source_freshdesk_resource.go
+++ b/internal/provider/source_freshdesk_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceFreshdeskResource struct {
// SourceFreshdeskResourceModel describes the resource data model.
type SourceFreshdeskResourceModel struct {
Configuration SourceFreshdesk `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceFreshdeskResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Freshdesk API Key. See the docs for more information on how to obtain this key.`,
},
"domain": schema.StringAttribute{
@@ -67,31 +69,33 @@ func (r *SourceFreshdeskResource) Schema(ctx context.Context, req resource.Schem
Optional: true,
Description: `The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "freshdesk",
- ),
- },
- Description: `must be one of ["freshdesk"]`,
- },
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +159,7 @@ func (r *SourceFreshdeskResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceFreshdesk(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +335,5 @@ func (r *SourceFreshdeskResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceFreshdeskResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_freshdesk_resource_sdk.go b/internal/provider/source_freshdesk_resource_sdk.go
old mode 100755
new mode 100644
index c38e4c191..715c73abc
--- a/internal/provider/source_freshdesk_resource_sdk.go
+++ b/internal/provider/source_freshdesk_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -17,7 +17,6 @@ func (r *SourceFreshdeskResourceModel) ToCreateSDKType() *shared.SourceFreshdesk
} else {
requestsPerMinute = nil
}
- sourceType := shared.SourceFreshdeskFreshdesk(r.Configuration.SourceType.ValueString())
startDate := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -28,9 +27,14 @@ func (r *SourceFreshdeskResourceModel) ToCreateSDKType() *shared.SourceFreshdesk
APIKey: apiKey,
Domain: domain,
RequestsPerMinute: requestsPerMinute,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -41,6 +45,7 @@ func (r *SourceFreshdeskResourceModel) ToCreateSDKType() *shared.SourceFreshdesk
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceFreshdeskCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_freshsales_data_source.go b/internal/provider/source_freshsales_data_source.go
old mode 100755
new mode 100644
index 14ab8778b..732db2ba2
--- a/internal/provider/source_freshsales_data_source.go
+++ b/internal/provider/source_freshsales_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceFreshsalesDataSource struct {
// SourceFreshsalesDataSourceModel describes the data model.
type SourceFreshsalesDataSourceModel struct {
- Configuration SourceFreshsales `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceFreshsalesDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceFreshsales DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Freshsales API Key. See here. The key is case sensitive.`,
- },
- "domain_name": schema.StringAttribute{
- Computed: true,
- Description: `The Name of your Freshsales domain`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "freshsales",
- ),
- },
- Description: `must be one of ["freshsales"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_freshsales_data_source_sdk.go b/internal/provider/source_freshsales_data_source_sdk.go
old mode 100755
new mode 100644
index 054aa8ffe..75102fb75
--- a/internal/provider/source_freshsales_data_source_sdk.go
+++ b/internal/provider/source_freshsales_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFreshsalesDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_freshsales_resource.go b/internal/provider/source_freshsales_resource.go
old mode 100755
new mode 100644
index 356b049c6..74a83d1b3
--- a/internal/provider/source_freshsales_resource.go
+++ b/internal/provider/source_freshsales_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceFreshsalesResource struct {
// SourceFreshsalesResourceModel describes the resource data model.
type SourceFreshsalesResourceModel struct {
Configuration SourceFreshsales `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,33 @@ func (r *SourceFreshsalesResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Freshsales API Key. See here. The key is case sensitive.`,
},
"domain_name": schema.StringAttribute{
Required: true,
Description: `The Name of your Freshsales domain`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "freshsales",
- ),
- },
- Description: `must be one of ["freshsales"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceFreshsalesResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceFreshsales(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceFreshsalesResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceFreshsalesResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_freshsales_resource_sdk.go b/internal/provider/source_freshsales_resource_sdk.go
old mode 100755
new mode 100644
index ba29f5598..d6f13ba6a
--- a/internal/provider/source_freshsales_resource_sdk.go
+++ b/internal/provider/source_freshsales_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceFreshsalesResourceModel) ToCreateSDKType() *shared.SourceFreshsalesCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
domainName := r.Configuration.DomainName.ValueString()
- sourceType := shared.SourceFreshsalesFreshsales(r.Configuration.SourceType.ValueString())
configuration := shared.SourceFreshsales{
APIKey: apiKey,
DomainName: domainName,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceFreshsalesResourceModel) ToCreateSDKType() *shared.SourceFreshsal
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceFreshsalesCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_gainsightpx_data_source.go b/internal/provider/source_gainsightpx_data_source.go
old mode 100755
new mode 100644
index 98a467c86..a28c8212f
--- a/internal/provider/source_gainsightpx_data_source.go
+++ b/internal/provider/source_gainsightpx_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceGainsightPxDataSource struct {
// SourceGainsightPxDataSourceModel describes the data model.
type SourceGainsightPxDataSourceModel struct {
- Configuration SourceGainsightPx `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceGainsightPxDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourceGainsightPx DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `The Aptrinsic API Key which is recieved from the dashboard settings (ref - https://app.aptrinsic.com/settings/api-keys)`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gainsight-px",
- ),
- },
- Description: `must be one of ["gainsight-px"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_gainsightpx_data_source_sdk.go b/internal/provider/source_gainsightpx_data_source_sdk.go
old mode 100755
new mode 100644
index f318f3ea9..e97ece76e
--- a/internal/provider/source_gainsightpx_data_source_sdk.go
+++ b/internal/provider/source_gainsightpx_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGainsightPxDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_gainsightpx_resource.go b/internal/provider/source_gainsightpx_resource.go
old mode 100755
new mode 100644
index 8791701a4..b1f5b18c6
--- a/internal/provider/source_gainsightpx_resource.go
+++ b/internal/provider/source_gainsightpx_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceGainsightPxResource struct {
// SourceGainsightPxResourceModel describes the resource data model.
type SourceGainsightPxResourceModel struct {
- Configuration SourceGainsightPx `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceGainsightPxResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceGainsightPxResource) Schema(ctx context.Context, req resource.Sch
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Aptrinsic API Key which is recieved from the dashboard settings (ref - https://app.aptrinsic.com/settings/api-keys)`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gainsight-px",
- ),
- },
- Description: `must be one of ["gainsight-px"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceGainsightPxResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGainsightPx(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceGainsightPxResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourceGainsightPxResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_gainsightpx_resource_sdk.go b/internal/provider/source_gainsightpx_resource_sdk.go
old mode 100755
new mode 100644
index f51a933ad..56cde1469
--- a/internal/provider/source_gainsightpx_resource_sdk.go
+++ b/internal/provider/source_gainsightpx_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGainsightPxResourceModel) ToCreateSDKType() *shared.SourceGainsightPxCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceGainsightPxGainsightPx(r.Configuration.SourceType.ValueString())
configuration := shared.SourceGainsightPx{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceGainsightPxResourceModel) ToCreateSDKType() *shared.SourceGainsig
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGainsightPxCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_gcs_data_source.go b/internal/provider/source_gcs_data_source.go
old mode 100755
new mode 100644
index d9867bc6d..2c169368e
--- a/internal/provider/source_gcs_data_source.go
+++ b/internal/provider/source_gcs_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceGcsDataSource struct {
// SourceGcsDataSourceModel describes the data model.
type SourceGcsDataSourceModel struct {
- Configuration SourceGcs `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,42 +47,20 @@ func (r *SourceGcsDataSource) Schema(ctx context.Context, req datasource.SchemaR
MarkdownDescription: "SourceGcs DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "gcs_bucket": schema.StringAttribute{
- Computed: true,
- Description: `GCS bucket name`,
- },
- "gcs_path": schema.StringAttribute{
- Computed: true,
- Description: `GCS path to data`,
- },
- "service_account": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Google Cloud service account key in JSON format`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gcs",
- ),
- },
- Description: `must be one of ["gcs"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_gcs_data_source_sdk.go b/internal/provider/source_gcs_data_source_sdk.go
old mode 100755
new mode 100644
index b3403d57e..c51abc78d
--- a/internal/provider/source_gcs_data_source_sdk.go
+++ b/internal/provider/source_gcs_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGcsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_gcs_resource.go b/internal/provider/source_gcs_resource.go
old mode 100755
new mode 100644
index ddfd647d0..7c065e5ef
--- a/internal/provider/source_gcs_resource.go
+++ b/internal/provider/source_gcs_resource.go
@@ -3,17 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +37,7 @@ type SourceGcsResource struct {
// SourceGcsResourceModel describes the resource data model.
type SourceGcsResourceModel struct {
Configuration SourceGcs `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -54,36 +57,206 @@ func (r *SourceGcsResource) Schema(ctx context.Context, req resource.SchemaReque
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "gcs_bucket": schema.StringAttribute{
+ "bucket": schema.StringAttribute{
Required: true,
- Description: `GCS bucket name`,
- },
- "gcs_path": schema.StringAttribute{
- Required: true,
- Description: `GCS path to data`,
+ Description: `Name of the GCS bucket where the file(s) exist.`,
},
"service_account": schema.StringAttribute{
Required: true,
Description: `Enter your Google Cloud service account key in JSON format`,
},
- "source_type": schema.StringAttribute{
- Required: true,
+ "start_date": schema.StringAttribute{
+ Optional: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.`,
Validators: []validator.String{
- stringvalidator.OneOf(
- "gcs",
- ),
+ validators.IsRFC3339(),
},
- Description: `must be one of ["gcs"]`,
+ },
+ "streams": schema.ListNestedAttribute{
+ Required: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "days_to_sync_if_history_is_full": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 3` + "\n" +
+ `When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.`,
+ },
+ "format": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "csv_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "delimiter": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: ","` + "\n" +
+ `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
+ },
+ "double_quote": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
+ },
+ "encoding": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "utf8"` + "\n" +
+ `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
+ },
+ "escape_char": schema.StringAttribute{
+ Optional: true,
+ Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
+ },
+ "false_values": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `A set of case-sensitive strings that should be interpreted as false values.`,
+ },
+ "header_definition": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "autogenerated": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ },
+ "from_csv": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ },
+ "user_provided": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "column_names": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ Description: `The column names that will be used while emitting the CSV records`,
+ },
+ },
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ },
+ },
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "inference_type": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["None", "Primitive Types Only"]; Default: "None"` + "\n" +
+ `How to infer the types of the columns. If none, inference default to strings.`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "None",
+ "Primitive Types Only",
+ ),
+ },
+ },
+ "null_values": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`,
+ },
+ "quote_char": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "\""` + "\n" +
+ `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
+ },
+ "skip_rows_after_header": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `The number of rows to skip after the header row.`,
+ },
+ "skip_rows_before_header": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`,
+ },
+ "strings_can_be_null": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`,
+ },
+ "true_values": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `A set of case-sensitive strings that should be interpreted as true values.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "globs": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.`,
+ },
+ "input_schema": schema.StringAttribute{
+ Optional: true,
+ Description: `The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.`,
+ },
+ "legacy_prefix": schema.StringAttribute{
+ Optional: true,
+ Description: `The path prefix configured in previous versions of the GCS connector. This option is deprecated in favor of a single glob.`,
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: `The name of the stream.`,
+ },
+ "primary_key": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `The column or columns (for a composite key) that serves as the unique identifier of a record.`,
+ },
+ "schemaless": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `When enabled, syncs will not validate or structure records against the stream's schema.`,
+ },
+ "validation_policy": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["Emit Record", "Skip Record", "Wait for Discover"]; Default: "Emit Record"` + "\n" +
+ `The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "Emit Record",
+ "Skip Record",
+ "Wait for Discover",
+ ),
+ },
+ },
+ },
+ },
+ Description: `Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.`,
},
},
+ MarkdownDescription: `NOTE: When this Spec is changed, legacy_config_transformer.py must also be` + "\n" +
+ `modified to uptake the changes because it is responsible for converting` + "\n" +
+ `legacy GCS configs into file based configs using the File-Based CDK.`,
+ },
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
},
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +320,7 @@ func (r *SourceGcsResource) Create(ctx context.Context, req resource.CreateReque
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGcs(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +496,5 @@ func (r *SourceGcsResource) Delete(ctx context.Context, req resource.DeleteReque
}
func (r *SourceGcsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_gcs_resource_sdk.go b/internal/provider/source_gcs_resource_sdk.go
old mode 100755
new mode 100644
index 465b221e1..fabebde30
--- a/internal/provider/source_gcs_resource_sdk.go
+++ b/internal/provider/source_gcs_resource_sdk.go
@@ -3,22 +3,214 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
+ "time"
)
func (r *SourceGcsResourceModel) ToCreateSDKType() *shared.SourceGcsCreateRequest {
- gcsBucket := r.Configuration.GcsBucket.ValueString()
- gcsPath := r.Configuration.GcsPath.ValueString()
+ bucket := r.Configuration.Bucket.ValueString()
serviceAccount := r.Configuration.ServiceAccount.ValueString()
- sourceType := shared.SourceGcsGcs(r.Configuration.SourceType.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
+ var streams []shared.SourceGCSSourceGCSStreamConfig = nil
+ for _, streamsItem := range r.Configuration.Streams {
+ daysToSyncIfHistoryIsFull := new(int64)
+ if !streamsItem.DaysToSyncIfHistoryIsFull.IsUnknown() && !streamsItem.DaysToSyncIfHistoryIsFull.IsNull() {
+ *daysToSyncIfHistoryIsFull = streamsItem.DaysToSyncIfHistoryIsFull.ValueInt64()
+ } else {
+ daysToSyncIfHistoryIsFull = nil
+ }
+ var format shared.SourceGcsFormat
+ var sourceGcsCSVFormat *shared.SourceGcsCSVFormat
+ if streamsItem.Format.CSVFormat != nil {
+ delimiter := new(string)
+ if !streamsItem.Format.CSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.CSVFormat.Delimiter.IsNull() {
+ *delimiter = streamsItem.Format.CSVFormat.Delimiter.ValueString()
+ } else {
+ delimiter = nil
+ }
+ doubleQuote := new(bool)
+ if !streamsItem.Format.CSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.CSVFormat.DoubleQuote.IsNull() {
+ *doubleQuote = streamsItem.Format.CSVFormat.DoubleQuote.ValueBool()
+ } else {
+ doubleQuote = nil
+ }
+ encoding := new(string)
+ if !streamsItem.Format.CSVFormat.Encoding.IsUnknown() && !streamsItem.Format.CSVFormat.Encoding.IsNull() {
+ *encoding = streamsItem.Format.CSVFormat.Encoding.ValueString()
+ } else {
+ encoding = nil
+ }
+ escapeChar := new(string)
+ if !streamsItem.Format.CSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.CSVFormat.EscapeChar.IsNull() {
+ *escapeChar = streamsItem.Format.CSVFormat.EscapeChar.ValueString()
+ } else {
+ escapeChar = nil
+ }
+ var falseValues []string = nil
+ for _, falseValuesItem := range streamsItem.Format.CSVFormat.FalseValues {
+ falseValues = append(falseValues, falseValuesItem.ValueString())
+ }
+ var headerDefinition *shared.SourceGcsCSVHeaderDefinition
+ if streamsItem.Format.CSVFormat.HeaderDefinition != nil {
+ var sourceGcsFromCSV *shared.SourceGcsFromCSV
+ if streamsItem.Format.CSVFormat.HeaderDefinition.FromCSV != nil {
+ sourceGcsFromCSV = &shared.SourceGcsFromCSV{}
+ }
+ if sourceGcsFromCSV != nil {
+ headerDefinition = &shared.SourceGcsCSVHeaderDefinition{
+ SourceGcsFromCSV: sourceGcsFromCSV,
+ }
+ }
+ var sourceGcsAutogenerated *shared.SourceGcsAutogenerated
+ if streamsItem.Format.CSVFormat.HeaderDefinition.Autogenerated != nil {
+ sourceGcsAutogenerated = &shared.SourceGcsAutogenerated{}
+ }
+ if sourceGcsAutogenerated != nil {
+ headerDefinition = &shared.SourceGcsCSVHeaderDefinition{
+ SourceGcsAutogenerated: sourceGcsAutogenerated,
+ }
+ }
+ var sourceGcsUserProvided *shared.SourceGcsUserProvided
+ if streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided != nil {
+ var columnNames []string = nil
+ for _, columnNamesItem := range streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided.ColumnNames {
+ columnNames = append(columnNames, columnNamesItem.ValueString())
+ }
+ sourceGcsUserProvided = &shared.SourceGcsUserProvided{
+ ColumnNames: columnNames,
+ }
+ }
+ if sourceGcsUserProvided != nil {
+ headerDefinition = &shared.SourceGcsCSVHeaderDefinition{
+ SourceGcsUserProvided: sourceGcsUserProvided,
+ }
+ }
+ }
+ inferenceType := new(shared.SourceGcsInferenceType)
+ if !streamsItem.Format.CSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.CSVFormat.InferenceType.IsNull() {
+ *inferenceType = shared.SourceGcsInferenceType(streamsItem.Format.CSVFormat.InferenceType.ValueString())
+ } else {
+ inferenceType = nil
+ }
+ var nullValues []string = nil
+ for _, nullValuesItem := range streamsItem.Format.CSVFormat.NullValues {
+ nullValues = append(nullValues, nullValuesItem.ValueString())
+ }
+ quoteChar := new(string)
+ if !streamsItem.Format.CSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.CSVFormat.QuoteChar.IsNull() {
+ *quoteChar = streamsItem.Format.CSVFormat.QuoteChar.ValueString()
+ } else {
+ quoteChar = nil
+ }
+ skipRowsAfterHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsNull() {
+ *skipRowsAfterHeader = streamsItem.Format.CSVFormat.SkipRowsAfterHeader.ValueInt64()
+ } else {
+ skipRowsAfterHeader = nil
+ }
+ skipRowsBeforeHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsNull() {
+ *skipRowsBeforeHeader = streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.ValueInt64()
+ } else {
+ skipRowsBeforeHeader = nil
+ }
+ stringsCanBeNull := new(bool)
+ if !streamsItem.Format.CSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.CSVFormat.StringsCanBeNull.IsNull() {
+ *stringsCanBeNull = streamsItem.Format.CSVFormat.StringsCanBeNull.ValueBool()
+ } else {
+ stringsCanBeNull = nil
+ }
+ var trueValues []string = nil
+ for _, trueValuesItem := range streamsItem.Format.CSVFormat.TrueValues {
+ trueValues = append(trueValues, trueValuesItem.ValueString())
+ }
+ sourceGcsCSVFormat = &shared.SourceGcsCSVFormat{
+ Delimiter: delimiter,
+ DoubleQuote: doubleQuote,
+ Encoding: encoding,
+ EscapeChar: escapeChar,
+ FalseValues: falseValues,
+ HeaderDefinition: headerDefinition,
+ InferenceType: inferenceType,
+ NullValues: nullValues,
+ QuoteChar: quoteChar,
+ SkipRowsAfterHeader: skipRowsAfterHeader,
+ SkipRowsBeforeHeader: skipRowsBeforeHeader,
+ StringsCanBeNull: stringsCanBeNull,
+ TrueValues: trueValues,
+ }
+ }
+ if sourceGcsCSVFormat != nil {
+ format = shared.SourceGcsFormat{
+ SourceGcsCSVFormat: sourceGcsCSVFormat,
+ }
+ }
+ var globs []string = nil
+ for _, globsItem := range streamsItem.Globs {
+ globs = append(globs, globsItem.ValueString())
+ }
+ inputSchema := new(string)
+ if !streamsItem.InputSchema.IsUnknown() && !streamsItem.InputSchema.IsNull() {
+ *inputSchema = streamsItem.InputSchema.ValueString()
+ } else {
+ inputSchema = nil
+ }
+ legacyPrefix := new(string)
+ if !streamsItem.LegacyPrefix.IsUnknown() && !streamsItem.LegacyPrefix.IsNull() {
+ *legacyPrefix = streamsItem.LegacyPrefix.ValueString()
+ } else {
+ legacyPrefix = nil
+ }
+ name := streamsItem.Name.ValueString()
+ primaryKey := new(string)
+ if !streamsItem.PrimaryKey.IsUnknown() && !streamsItem.PrimaryKey.IsNull() {
+ *primaryKey = streamsItem.PrimaryKey.ValueString()
+ } else {
+ primaryKey = nil
+ }
+ schemaless := new(bool)
+ if !streamsItem.Schemaless.IsUnknown() && !streamsItem.Schemaless.IsNull() {
+ *schemaless = streamsItem.Schemaless.ValueBool()
+ } else {
+ schemaless = nil
+ }
+ validationPolicy := new(shared.SourceGcsValidationPolicy)
+ if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() {
+ *validationPolicy = shared.SourceGcsValidationPolicy(streamsItem.ValidationPolicy.ValueString())
+ } else {
+ validationPolicy = nil
+ }
+ streams = append(streams, shared.SourceGCSSourceGCSStreamConfig{
+ DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull,
+ Format: format,
+ Globs: globs,
+ InputSchema: inputSchema,
+ LegacyPrefix: legacyPrefix,
+ Name: name,
+ PrimaryKey: primaryKey,
+ Schemaless: schemaless,
+ ValidationPolicy: validationPolicy,
+ })
+ }
configuration := shared.SourceGcs{
- GcsBucket: gcsBucket,
- GcsPath: gcsPath,
+ Bucket: bucket,
ServiceAccount: serviceAccount,
- SourceType: sourceType,
+ StartDate: startDate,
+ Streams: streams,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
- name := r.Name.ValueString()
+ name1 := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
*secretID = r.SecretID.ValueString()
@@ -28,7 +220,8 @@ func (r *SourceGcsResourceModel) ToCreateSDKType() *shared.SourceGcsCreateReques
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGcsCreateRequest{
Configuration: configuration,
- Name: name,
+ DefinitionID: definitionID,
+ Name: name1,
SecretID: secretID,
WorkspaceID: workspaceID,
}
@@ -41,19 +234,206 @@ func (r *SourceGcsResourceModel) ToGetSDKType() *shared.SourceGcsCreateRequest {
}
func (r *SourceGcsResourceModel) ToUpdateSDKType() *shared.SourceGcsPutRequest {
- gcsBucket := r.Configuration.GcsBucket.ValueString()
- gcsPath := r.Configuration.GcsPath.ValueString()
+ bucket := r.Configuration.Bucket.ValueString()
serviceAccount := r.Configuration.ServiceAccount.ValueString()
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
+ var streams []shared.SourceGCSStreamConfig = nil
+ for _, streamsItem := range r.Configuration.Streams {
+ daysToSyncIfHistoryIsFull := new(int64)
+ if !streamsItem.DaysToSyncIfHistoryIsFull.IsUnknown() && !streamsItem.DaysToSyncIfHistoryIsFull.IsNull() {
+ *daysToSyncIfHistoryIsFull = streamsItem.DaysToSyncIfHistoryIsFull.ValueInt64()
+ } else {
+ daysToSyncIfHistoryIsFull = nil
+ }
+ var format shared.SourceGcsUpdateFormat
+ var sourceGcsUpdateCSVFormat *shared.SourceGcsUpdateCSVFormat
+ if streamsItem.Format.CSVFormat != nil {
+ delimiter := new(string)
+ if !streamsItem.Format.CSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.CSVFormat.Delimiter.IsNull() {
+ *delimiter = streamsItem.Format.CSVFormat.Delimiter.ValueString()
+ } else {
+ delimiter = nil
+ }
+ doubleQuote := new(bool)
+ if !streamsItem.Format.CSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.CSVFormat.DoubleQuote.IsNull() {
+ *doubleQuote = streamsItem.Format.CSVFormat.DoubleQuote.ValueBool()
+ } else {
+ doubleQuote = nil
+ }
+ encoding := new(string)
+ if !streamsItem.Format.CSVFormat.Encoding.IsUnknown() && !streamsItem.Format.CSVFormat.Encoding.IsNull() {
+ *encoding = streamsItem.Format.CSVFormat.Encoding.ValueString()
+ } else {
+ encoding = nil
+ }
+ escapeChar := new(string)
+ if !streamsItem.Format.CSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.CSVFormat.EscapeChar.IsNull() {
+ *escapeChar = streamsItem.Format.CSVFormat.EscapeChar.ValueString()
+ } else {
+ escapeChar = nil
+ }
+ var falseValues []string = nil
+ for _, falseValuesItem := range streamsItem.Format.CSVFormat.FalseValues {
+ falseValues = append(falseValues, falseValuesItem.ValueString())
+ }
+ var headerDefinition *shared.SourceGcsUpdateCSVHeaderDefinition
+ if streamsItem.Format.CSVFormat.HeaderDefinition != nil {
+ var sourceGcsUpdateFromCSV *shared.SourceGcsUpdateFromCSV
+ if streamsItem.Format.CSVFormat.HeaderDefinition.FromCSV != nil {
+ sourceGcsUpdateFromCSV = &shared.SourceGcsUpdateFromCSV{}
+ }
+ if sourceGcsUpdateFromCSV != nil {
+ headerDefinition = &shared.SourceGcsUpdateCSVHeaderDefinition{
+ SourceGcsUpdateFromCSV: sourceGcsUpdateFromCSV,
+ }
+ }
+ var sourceGcsUpdateAutogenerated *shared.SourceGcsUpdateAutogenerated
+ if streamsItem.Format.CSVFormat.HeaderDefinition.Autogenerated != nil {
+ sourceGcsUpdateAutogenerated = &shared.SourceGcsUpdateAutogenerated{}
+ }
+ if sourceGcsUpdateAutogenerated != nil {
+ headerDefinition = &shared.SourceGcsUpdateCSVHeaderDefinition{
+ SourceGcsUpdateAutogenerated: sourceGcsUpdateAutogenerated,
+ }
+ }
+ var sourceGcsUpdateUserProvided *shared.SourceGcsUpdateUserProvided
+ if streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided != nil {
+ var columnNames []string = nil
+ for _, columnNamesItem := range streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided.ColumnNames {
+ columnNames = append(columnNames, columnNamesItem.ValueString())
+ }
+ sourceGcsUpdateUserProvided = &shared.SourceGcsUpdateUserProvided{
+ ColumnNames: columnNames,
+ }
+ }
+ if sourceGcsUpdateUserProvided != nil {
+ headerDefinition = &shared.SourceGcsUpdateCSVHeaderDefinition{
+ SourceGcsUpdateUserProvided: sourceGcsUpdateUserProvided,
+ }
+ }
+ }
+ inferenceType := new(shared.SourceGcsUpdateInferenceType)
+ if !streamsItem.Format.CSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.CSVFormat.InferenceType.IsNull() {
+ *inferenceType = shared.SourceGcsUpdateInferenceType(streamsItem.Format.CSVFormat.InferenceType.ValueString())
+ } else {
+ inferenceType = nil
+ }
+ var nullValues []string = nil
+ for _, nullValuesItem := range streamsItem.Format.CSVFormat.NullValues {
+ nullValues = append(nullValues, nullValuesItem.ValueString())
+ }
+ quoteChar := new(string)
+ if !streamsItem.Format.CSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.CSVFormat.QuoteChar.IsNull() {
+ *quoteChar = streamsItem.Format.CSVFormat.QuoteChar.ValueString()
+ } else {
+ quoteChar = nil
+ }
+ skipRowsAfterHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsNull() {
+ *skipRowsAfterHeader = streamsItem.Format.CSVFormat.SkipRowsAfterHeader.ValueInt64()
+ } else {
+ skipRowsAfterHeader = nil
+ }
+ skipRowsBeforeHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsNull() {
+ *skipRowsBeforeHeader = streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.ValueInt64()
+ } else {
+ skipRowsBeforeHeader = nil
+ }
+ stringsCanBeNull := new(bool)
+ if !streamsItem.Format.CSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.CSVFormat.StringsCanBeNull.IsNull() {
+ *stringsCanBeNull = streamsItem.Format.CSVFormat.StringsCanBeNull.ValueBool()
+ } else {
+ stringsCanBeNull = nil
+ }
+ var trueValues []string = nil
+ for _, trueValuesItem := range streamsItem.Format.CSVFormat.TrueValues {
+ trueValues = append(trueValues, trueValuesItem.ValueString())
+ }
+ sourceGcsUpdateCSVFormat = &shared.SourceGcsUpdateCSVFormat{
+ Delimiter: delimiter,
+ DoubleQuote: doubleQuote,
+ Encoding: encoding,
+ EscapeChar: escapeChar,
+ FalseValues: falseValues,
+ HeaderDefinition: headerDefinition,
+ InferenceType: inferenceType,
+ NullValues: nullValues,
+ QuoteChar: quoteChar,
+ SkipRowsAfterHeader: skipRowsAfterHeader,
+ SkipRowsBeforeHeader: skipRowsBeforeHeader,
+ StringsCanBeNull: stringsCanBeNull,
+ TrueValues: trueValues,
+ }
+ }
+ if sourceGcsUpdateCSVFormat != nil {
+ format = shared.SourceGcsUpdateFormat{
+ SourceGcsUpdateCSVFormat: sourceGcsUpdateCSVFormat,
+ }
+ }
+ var globs []string = nil
+ for _, globsItem := range streamsItem.Globs {
+ globs = append(globs, globsItem.ValueString())
+ }
+ inputSchema := new(string)
+ if !streamsItem.InputSchema.IsUnknown() && !streamsItem.InputSchema.IsNull() {
+ *inputSchema = streamsItem.InputSchema.ValueString()
+ } else {
+ inputSchema = nil
+ }
+ legacyPrefix := new(string)
+ if !streamsItem.LegacyPrefix.IsUnknown() && !streamsItem.LegacyPrefix.IsNull() {
+ *legacyPrefix = streamsItem.LegacyPrefix.ValueString()
+ } else {
+ legacyPrefix = nil
+ }
+ name := streamsItem.Name.ValueString()
+ primaryKey := new(string)
+ if !streamsItem.PrimaryKey.IsUnknown() && !streamsItem.PrimaryKey.IsNull() {
+ *primaryKey = streamsItem.PrimaryKey.ValueString()
+ } else {
+ primaryKey = nil
+ }
+ schemaless := new(bool)
+ if !streamsItem.Schemaless.IsUnknown() && !streamsItem.Schemaless.IsNull() {
+ *schemaless = streamsItem.Schemaless.ValueBool()
+ } else {
+ schemaless = nil
+ }
+ validationPolicy := new(shared.SourceGcsUpdateValidationPolicy)
+ if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() {
+ *validationPolicy = shared.SourceGcsUpdateValidationPolicy(streamsItem.ValidationPolicy.ValueString())
+ } else {
+ validationPolicy = nil
+ }
+ streams = append(streams, shared.SourceGCSStreamConfig{
+ DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull,
+ Format: format,
+ Globs: globs,
+ InputSchema: inputSchema,
+ LegacyPrefix: legacyPrefix,
+ Name: name,
+ PrimaryKey: primaryKey,
+ Schemaless: schemaless,
+ ValidationPolicy: validationPolicy,
+ })
+ }
configuration := shared.SourceGcsUpdate{
- GcsBucket: gcsBucket,
- GcsPath: gcsPath,
+ Bucket: bucket,
ServiceAccount: serviceAccount,
+ StartDate: startDate,
+ Streams: streams,
}
- name := r.Name.ValueString()
+ name1 := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGcsPutRequest{
Configuration: configuration,
- Name: name,
+ Name: name1,
WorkspaceID: workspaceID,
}
return &out
diff --git a/internal/provider/source_getlago_data_source.go b/internal/provider/source_getlago_data_source.go
old mode 100755
new mode 100644
index 648e64108..2bc89347b
--- a/internal/provider/source_getlago_data_source.go
+++ b/internal/provider/source_getlago_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceGetlagoDataSource struct {
// SourceGetlagoDataSourceModel describes the data model.
type SourceGetlagoDataSourceModel struct {
- Configuration SourceGetlago `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceGetlagoDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceGetlago DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Your API Key. See here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "getlago",
- ),
- },
- Description: `must be one of ["getlago"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_getlago_data_source_sdk.go b/internal/provider/source_getlago_data_source_sdk.go
old mode 100755
new mode 100644
index 0963f47f1..23e1c7369
--- a/internal/provider/source_getlago_data_source_sdk.go
+++ b/internal/provider/source_getlago_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGetlagoDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_getlago_resource.go b/internal/provider/source_getlago_resource.go
old mode 100755
new mode 100644
index 2d3970378..cbcf3ab4b
--- a/internal/provider/source_getlago_resource.go
+++ b/internal/provider/source_getlago_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceGetlagoResource struct {
// SourceGetlagoResourceModel describes the resource data model.
type SourceGetlagoResourceModel struct {
Configuration SourceGetlago `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,26 +56,34 @@ func (r *SourceGetlagoResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Key. See here.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "getlago",
- ),
- },
- Description: `must be one of ["getlago"]`,
+ "api_url": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "https://api.getlago.com/api/v1"` + "\n" +
+ `Your Lago API URL`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +147,7 @@ func (r *SourceGetlagoResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGetlago(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +323,5 @@ func (r *SourceGetlagoResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceGetlagoResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_getlago_resource_sdk.go b/internal/provider/source_getlago_resource_sdk.go
old mode 100755
new mode 100644
index 39038e575..cc01f1baf
--- a/internal/provider/source_getlago_resource_sdk.go
+++ b/internal/provider/source_getlago_resource_sdk.go
@@ -3,16 +3,27 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGetlagoResourceModel) ToCreateSDKType() *shared.SourceGetlagoCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceGetlagoGetlago(r.Configuration.SourceType.ValueString())
+ apiURL := new(string)
+ if !r.Configuration.APIURL.IsUnknown() && !r.Configuration.APIURL.IsNull() {
+ *apiURL = r.Configuration.APIURL.ValueString()
+ } else {
+ apiURL = nil
+ }
configuration := shared.SourceGetlago{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ APIURL: apiURL,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +35,7 @@ func (r *SourceGetlagoResourceModel) ToCreateSDKType() *shared.SourceGetlagoCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGetlagoCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -38,8 +50,15 @@ func (r *SourceGetlagoResourceModel) ToGetSDKType() *shared.SourceGetlagoCreateR
func (r *SourceGetlagoResourceModel) ToUpdateSDKType() *shared.SourceGetlagoPutRequest {
apiKey := r.Configuration.APIKey.ValueString()
+ apiURL := new(string)
+ if !r.Configuration.APIURL.IsUnknown() && !r.Configuration.APIURL.IsNull() {
+ *apiURL = r.Configuration.APIURL.ValueString()
+ } else {
+ apiURL = nil
+ }
configuration := shared.SourceGetlagoUpdate{
APIKey: apiKey,
+ APIURL: apiURL,
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
diff --git a/internal/provider/source_github_data_source.go b/internal/provider/source_github_data_source.go
old mode 100755
new mode 100644
index 5f50b7b0a..594551355
--- a/internal/provider/source_github_data_source.go
+++ b/internal/provider/source_github_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceGithubDataSource struct {
// SourceGithubDataSourceModel describes the data model.
type SourceGithubDataSourceModel struct {
- Configuration SourceGithub `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,150 +47,20 @@ func (r *SourceGithubDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceGithub DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "branch": schema.StringAttribute{
- Computed: true,
- Description: `Space-delimited list of GitHub repository branches to pull commits for, e.g. ` + "`" + `airbytehq/airbyte/master` + "`" + `. If no branches are specified for a repository, the default branch will be pulled.`,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_github_authentication_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `OAuth access token`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `OAuth Client Id`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `OAuth Client secret`,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- Description: `must be one of ["OAuth Credentials"]`,
- },
- },
- Description: `Choose how to authenticate to GitHub`,
- },
- "source_github_authentication_personal_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PAT Credentials",
- ),
- },
- Description: `must be one of ["PAT Credentials"]`,
- },
- "personal_access_token": schema.StringAttribute{
- Computed: true,
- Description: `Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","`,
- },
- },
- Description: `Choose how to authenticate to GitHub`,
- },
- "source_github_update_authentication_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `OAuth access token`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `OAuth Client Id`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `OAuth Client secret`,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- Description: `must be one of ["OAuth Credentials"]`,
- },
- },
- Description: `Choose how to authenticate to GitHub`,
- },
- "source_github_update_authentication_personal_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PAT Credentials",
- ),
- },
- Description: `must be one of ["PAT Credentials"]`,
- },
- "personal_access_token": schema.StringAttribute{
- Computed: true,
- Description: `Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","`,
- },
- },
- Description: `Choose how to authenticate to GitHub`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate to GitHub`,
- },
- "repository": schema.StringAttribute{
- Computed: true,
- Description: `Space-delimited list of GitHub organizations/repositories, e.g. ` + "`" + `airbytehq/airbyte` + "`" + ` for single repository, ` + "`" + `airbytehq/*` + "`" + ` for get all repositories from organization and ` + "`" + `airbytehq/airbyte airbytehq/another-repo` + "`" + ` for multiple repositories.`,
- },
- "requests_per_hour": schema.Int64Attribute{
- Computed: true,
- Description: `The GitHub API allows for a maximum of 5000 requests per hour (15000 for Github Enterprise). You can specify a lower value to limit your use of the API quota.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "github",
- ),
- },
- Description: `must be one of ["github"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_github_data_source_sdk.go b/internal/provider/source_github_data_source_sdk.go
old mode 100755
new mode 100644
index 430e69473..a5faaa330
--- a/internal/provider/source_github_data_source_sdk.go
+++ b/internal/provider/source_github_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGithubDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_github_resource.go b/internal/provider/source_github_resource.go
old mode 100755
new mode 100644
index eca9bf75a..1ea0d9612
--- a/internal/provider/source_github_resource.go
+++ b/internal/provider/source_github_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceGithubResource struct {
// SourceGithubResourceModel describes the resource data model.
type SourceGithubResourceModel struct {
Configuration SourceGithub `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -55,18 +56,29 @@ func (r *SourceGithubResource) Schema(ctx context.Context, req resource.SchemaRe
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
+ "api_url": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "https://api.github.com/"` + "\n" +
+ `Please enter your basic URL from self-hosted GitHub instance or leave it empty to use GitHub.`,
+ },
"branch": schema.StringAttribute{
Optional: true,
- Description: `Space-delimited list of GitHub repository branches to pull commits for, e.g. ` + "`" + `airbytehq/airbyte/master` + "`" + `. If no branches are specified for a repository, the default branch will be pulled.`,
+ Description: `(DEPRCATED) Space-delimited list of GitHub repository branches to pull commits for, e.g. ` + "`" + `airbytehq/airbyte/master` + "`" + `. If no branches are specified for a repository, the default branch will be pulled.`,
+ },
+ "branches": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of GitHub repository branches to pull commits for, e.g. ` + "`" + `airbytehq/airbyte/master` + "`" + `. If no branches are specified for a repository, the default branch will be pulled.`,
},
"credentials": schema.SingleNestedAttribute{
- Optional: true,
+ Required: true,
Attributes: map[string]schema.Attribute{
- "source_github_authentication_o_auth": schema.SingleNestedAttribute{
+ "o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OAuth access token`,
},
"client_id": schema.StringAttribute{
@@ -77,122 +89,66 @@ func (r *SourceGithubResource) Schema(ctx context.Context, req resource.SchemaRe
Optional: true,
Description: `OAuth Client secret`,
},
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- Description: `must be one of ["OAuth Credentials"]`,
- },
},
Description: `Choose how to authenticate to GitHub`,
},
- "source_github_authentication_personal_access_token": schema.SingleNestedAttribute{
+ "personal_access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PAT Credentials",
- ),
- },
- Description: `must be one of ["PAT Credentials"]`,
- },
- "personal_access_token": schema.StringAttribute{
- Required: true,
- Description: `Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","`,
- },
- },
- Description: `Choose how to authenticate to GitHub`,
- },
- "source_github_update_authentication_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `OAuth access token`,
- },
- "client_id": schema.StringAttribute{
- Optional: true,
- Description: `OAuth Client Id`,
- },
- "client_secret": schema.StringAttribute{
- Optional: true,
- Description: `OAuth Client secret`,
- },
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- Description: `must be one of ["OAuth Credentials"]`,
- },
- },
- Description: `Choose how to authenticate to GitHub`,
- },
- "source_github_update_authentication_personal_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "PAT Credentials",
- ),
- },
- Description: `must be one of ["PAT Credentials"]`,
- },
"personal_access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","`,
},
},
Description: `Choose how to authenticate to GitHub`,
},
},
+ Description: `Choose how to authenticate to GitHub`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate to GitHub`,
},
- "repository": schema.StringAttribute{
+ "repositories": schema.ListAttribute{
Required: true,
- Description: `Space-delimited list of GitHub organizations/repositories, e.g. ` + "`" + `airbytehq/airbyte` + "`" + ` for single repository, ` + "`" + `airbytehq/*` + "`" + ` for get all repositories from organization and ` + "`" + `airbytehq/airbyte airbytehq/another-repo` + "`" + ` for multiple repositories.`,
+ ElementType: types.StringType,
+ Description: `List of GitHub organizations/repositories, e.g. ` + "`" + `airbytehq/airbyte` + "`" + ` for single repository, ` + "`" + `airbytehq/*` + "`" + ` for get all repositories from organization and ` + "`" + `airbytehq/airbyte airbytehq/another-repo` + "`" + ` for multiple repositories.`,
+ },
+ "repository": schema.StringAttribute{
+ Optional: true,
+ Description: `(DEPRCATED) Space-delimited list of GitHub organizations/repositories, e.g. ` + "`" + `airbytehq/airbyte` + "`" + ` for single repository, ` + "`" + `airbytehq/*` + "`" + ` for get all repositories from organization and ` + "`" + `airbytehq/airbyte airbytehq/another-repo` + "`" + ` for multiple repositories.`,
},
"requests_per_hour": schema.Int64Attribute{
Optional: true,
Description: `The GitHub API allows for a maximum of 5000 requests per hour (15000 for Github Enterprise). You can specify a lower value to limit your use of the API quota.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "github",
- ),
- },
- Description: `must be one of ["github"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. If the date is not set, all data will be replicated. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -256,7 +212,7 @@ func (r *SourceGithubResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGithub(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -432,5 +388,5 @@ func (r *SourceGithubResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceGithubResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_github_resource_sdk.go b/internal/provider/source_github_resource_sdk.go
old mode 100755
new mode 100644
index 1a3e07ee0..5435a57be
--- a/internal/provider/source_github_resource_sdk.go
+++ b/internal/provider/source_github_resource_sdk.go
@@ -3,90 +3,105 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceGithubResourceModel) ToCreateSDKType() *shared.SourceGithubCreateRequest {
+ apiURL := new(string)
+ if !r.Configuration.APIURL.IsUnknown() && !r.Configuration.APIURL.IsNull() {
+ *apiURL = r.Configuration.APIURL.ValueString()
+ } else {
+ apiURL = nil
+ }
branch := new(string)
if !r.Configuration.Branch.IsUnknown() && !r.Configuration.Branch.IsNull() {
*branch = r.Configuration.Branch.ValueString()
} else {
branch = nil
}
- var credentials *shared.SourceGithubAuthentication
- if r.Configuration.Credentials != nil {
- var sourceGithubAuthenticationOAuth *shared.SourceGithubAuthenticationOAuth
- if r.Configuration.Credentials.SourceGithubAuthenticationOAuth != nil {
- accessToken := r.Configuration.Credentials.SourceGithubAuthenticationOAuth.AccessToken.ValueString()
- clientID := new(string)
- if !r.Configuration.Credentials.SourceGithubAuthenticationOAuth.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceGithubAuthenticationOAuth.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceGithubAuthenticationOAuth.ClientID.ValueString()
- } else {
- clientID = nil
- }
- clientSecret := new(string)
- if !r.Configuration.Credentials.SourceGithubAuthenticationOAuth.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceGithubAuthenticationOAuth.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceGithubAuthenticationOAuth.ClientSecret.ValueString()
- } else {
- clientSecret = nil
- }
- optionTitle := new(shared.SourceGithubAuthenticationOAuthOptionTitle)
- if !r.Configuration.Credentials.SourceGithubAuthenticationOAuth.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceGithubAuthenticationOAuth.OptionTitle.IsNull() {
- *optionTitle = shared.SourceGithubAuthenticationOAuthOptionTitle(r.Configuration.Credentials.SourceGithubAuthenticationOAuth.OptionTitle.ValueString())
- } else {
- optionTitle = nil
- }
- sourceGithubAuthenticationOAuth = &shared.SourceGithubAuthenticationOAuth{
- AccessToken: accessToken,
- ClientID: clientID,
- ClientSecret: clientSecret,
- OptionTitle: optionTitle,
- }
+ var branches []string = nil
+ for _, branchesItem := range r.Configuration.Branches {
+ branches = append(branches, branchesItem.ValueString())
+ }
+ var credentials shared.SourceGithubAuthentication
+ var sourceGithubOAuth *shared.SourceGithubOAuth
+ if r.Configuration.Credentials.OAuth != nil {
+ accessToken := r.Configuration.Credentials.OAuth.AccessToken.ValueString()
+ clientID := new(string)
+ if !r.Configuration.Credentials.OAuth.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth.ClientID.ValueString()
+ } else {
+ clientID = nil
+ }
+ clientSecret := new(string)
+ if !r.Configuration.Credentials.OAuth.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth.ClientSecret.ValueString()
+ } else {
+ clientSecret = nil
}
- if sourceGithubAuthenticationOAuth != nil {
- credentials = &shared.SourceGithubAuthentication{
- SourceGithubAuthenticationOAuth: sourceGithubAuthenticationOAuth,
- }
+ sourceGithubOAuth = &shared.SourceGithubOAuth{
+ AccessToken: accessToken,
+ ClientID: clientID,
+ ClientSecret: clientSecret,
}
- var sourceGithubAuthenticationPersonalAccessToken *shared.SourceGithubAuthenticationPersonalAccessToken
- if r.Configuration.Credentials.SourceGithubAuthenticationPersonalAccessToken != nil {
- optionTitle1 := new(shared.SourceGithubAuthenticationPersonalAccessTokenOptionTitle)
- if !r.Configuration.Credentials.SourceGithubAuthenticationPersonalAccessToken.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceGithubAuthenticationPersonalAccessToken.OptionTitle.IsNull() {
- *optionTitle1 = shared.SourceGithubAuthenticationPersonalAccessTokenOptionTitle(r.Configuration.Credentials.SourceGithubAuthenticationPersonalAccessToken.OptionTitle.ValueString())
- } else {
- optionTitle1 = nil
- }
- personalAccessToken := r.Configuration.Credentials.SourceGithubAuthenticationPersonalAccessToken.PersonalAccessToken.ValueString()
- sourceGithubAuthenticationPersonalAccessToken = &shared.SourceGithubAuthenticationPersonalAccessToken{
- OptionTitle: optionTitle1,
- PersonalAccessToken: personalAccessToken,
- }
+ }
+ if sourceGithubOAuth != nil {
+ credentials = shared.SourceGithubAuthentication{
+ SourceGithubOAuth: sourceGithubOAuth,
+ }
+ }
+ var sourceGithubPersonalAccessToken *shared.SourceGithubPersonalAccessToken
+ if r.Configuration.Credentials.PersonalAccessToken != nil {
+ personalAccessToken := r.Configuration.Credentials.PersonalAccessToken.PersonalAccessToken.ValueString()
+ sourceGithubPersonalAccessToken = &shared.SourceGithubPersonalAccessToken{
+ PersonalAccessToken: personalAccessToken,
}
- if sourceGithubAuthenticationPersonalAccessToken != nil {
- credentials = &shared.SourceGithubAuthentication{
- SourceGithubAuthenticationPersonalAccessToken: sourceGithubAuthenticationPersonalAccessToken,
- }
+ }
+ if sourceGithubPersonalAccessToken != nil {
+ credentials = shared.SourceGithubAuthentication{
+ SourceGithubPersonalAccessToken: sourceGithubPersonalAccessToken,
}
}
- repository := r.Configuration.Repository.ValueString()
+ var repositories []string = nil
+ for _, repositoriesItem := range r.Configuration.Repositories {
+ repositories = append(repositories, repositoriesItem.ValueString())
+ }
+ repository := new(string)
+ if !r.Configuration.Repository.IsUnknown() && !r.Configuration.Repository.IsNull() {
+ *repository = r.Configuration.Repository.ValueString()
+ } else {
+ repository = nil
+ }
requestsPerHour := new(int64)
if !r.Configuration.RequestsPerHour.IsUnknown() && !r.Configuration.RequestsPerHour.IsNull() {
*requestsPerHour = r.Configuration.RequestsPerHour.ValueInt64()
} else {
requestsPerHour = nil
}
- sourceType := shared.SourceGithubGithub(r.Configuration.SourceType.ValueString())
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceGithub{
+ APIURL: apiURL,
Branch: branch,
+ Branches: branches,
Credentials: credentials,
+ Repositories: repositories,
Repository: repository,
RequestsPerHour: requestsPerHour,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -97,6 +112,7 @@ func (r *SourceGithubResourceModel) ToCreateSDKType() *shared.SourceGithubCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGithubCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -110,78 +126,89 @@ func (r *SourceGithubResourceModel) ToGetSDKType() *shared.SourceGithubCreateReq
}
func (r *SourceGithubResourceModel) ToUpdateSDKType() *shared.SourceGithubPutRequest {
+ apiURL := new(string)
+ if !r.Configuration.APIURL.IsUnknown() && !r.Configuration.APIURL.IsNull() {
+ *apiURL = r.Configuration.APIURL.ValueString()
+ } else {
+ apiURL = nil
+ }
branch := new(string)
if !r.Configuration.Branch.IsUnknown() && !r.Configuration.Branch.IsNull() {
*branch = r.Configuration.Branch.ValueString()
} else {
branch = nil
}
- var credentials *shared.SourceGithubUpdateAuthentication
- if r.Configuration.Credentials != nil {
- var sourceGithubUpdateAuthenticationOAuth *shared.SourceGithubUpdateAuthenticationOAuth
- if r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth != nil {
- accessToken := r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.AccessToken.ValueString()
- clientID := new(string)
- if !r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.ClientID.ValueString()
- } else {
- clientID = nil
- }
- clientSecret := new(string)
- if !r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.ClientSecret.ValueString()
- } else {
- clientSecret = nil
- }
- optionTitle := new(shared.SourceGithubUpdateAuthenticationOAuthOptionTitle)
- if !r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.OptionTitle.IsNull() {
- *optionTitle = shared.SourceGithubUpdateAuthenticationOAuthOptionTitle(r.Configuration.Credentials.SourceGithubUpdateAuthenticationOAuth.OptionTitle.ValueString())
- } else {
- optionTitle = nil
- }
- sourceGithubUpdateAuthenticationOAuth = &shared.SourceGithubUpdateAuthenticationOAuth{
- AccessToken: accessToken,
- ClientID: clientID,
- ClientSecret: clientSecret,
- OptionTitle: optionTitle,
- }
+ var branches []string = nil
+ for _, branchesItem := range r.Configuration.Branches {
+ branches = append(branches, branchesItem.ValueString())
+ }
+ var credentials shared.SourceGithubUpdateAuthentication
+ var oAuth *shared.OAuth
+ if r.Configuration.Credentials.OAuth != nil {
+ accessToken := r.Configuration.Credentials.OAuth.AccessToken.ValueString()
+ clientID := new(string)
+ if !r.Configuration.Credentials.OAuth.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth.ClientID.ValueString()
+ } else {
+ clientID = nil
}
- if sourceGithubUpdateAuthenticationOAuth != nil {
- credentials = &shared.SourceGithubUpdateAuthentication{
- SourceGithubUpdateAuthenticationOAuth: sourceGithubUpdateAuthenticationOAuth,
- }
+ clientSecret := new(string)
+ if !r.Configuration.Credentials.OAuth.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth.ClientSecret.ValueString()
+ } else {
+ clientSecret = nil
+ }
+ oAuth = &shared.OAuth{
+ AccessToken: accessToken,
+ ClientID: clientID,
+ ClientSecret: clientSecret,
+ }
+ }
+ if oAuth != nil {
+ credentials = shared.SourceGithubUpdateAuthentication{
+ OAuth: oAuth,
}
- var sourceGithubUpdateAuthenticationPersonalAccessToken *shared.SourceGithubUpdateAuthenticationPersonalAccessToken
- if r.Configuration.Credentials.SourceGithubUpdateAuthenticationPersonalAccessToken != nil {
- optionTitle1 := new(shared.SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle)
- if !r.Configuration.Credentials.SourceGithubUpdateAuthenticationPersonalAccessToken.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceGithubUpdateAuthenticationPersonalAccessToken.OptionTitle.IsNull() {
- *optionTitle1 = shared.SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle(r.Configuration.Credentials.SourceGithubUpdateAuthenticationPersonalAccessToken.OptionTitle.ValueString())
- } else {
- optionTitle1 = nil
- }
- personalAccessToken := r.Configuration.Credentials.SourceGithubUpdateAuthenticationPersonalAccessToken.PersonalAccessToken.ValueString()
- sourceGithubUpdateAuthenticationPersonalAccessToken = &shared.SourceGithubUpdateAuthenticationPersonalAccessToken{
- OptionTitle: optionTitle1,
- PersonalAccessToken: personalAccessToken,
- }
+ }
+ var sourceGithubUpdatePersonalAccessToken *shared.SourceGithubUpdatePersonalAccessToken
+ if r.Configuration.Credentials.PersonalAccessToken != nil {
+ personalAccessToken := r.Configuration.Credentials.PersonalAccessToken.PersonalAccessToken.ValueString()
+ sourceGithubUpdatePersonalAccessToken = &shared.SourceGithubUpdatePersonalAccessToken{
+ PersonalAccessToken: personalAccessToken,
}
- if sourceGithubUpdateAuthenticationPersonalAccessToken != nil {
- credentials = &shared.SourceGithubUpdateAuthentication{
- SourceGithubUpdateAuthenticationPersonalAccessToken: sourceGithubUpdateAuthenticationPersonalAccessToken,
- }
+ }
+ if sourceGithubUpdatePersonalAccessToken != nil {
+ credentials = shared.SourceGithubUpdateAuthentication{
+ SourceGithubUpdatePersonalAccessToken: sourceGithubUpdatePersonalAccessToken,
}
}
- repository := r.Configuration.Repository.ValueString()
+ var repositories []string = nil
+ for _, repositoriesItem := range r.Configuration.Repositories {
+ repositories = append(repositories, repositoriesItem.ValueString())
+ }
+ repository := new(string)
+ if !r.Configuration.Repository.IsUnknown() && !r.Configuration.Repository.IsNull() {
+ *repository = r.Configuration.Repository.ValueString()
+ } else {
+ repository = nil
+ }
requestsPerHour := new(int64)
if !r.Configuration.RequestsPerHour.IsUnknown() && !r.Configuration.RequestsPerHour.IsNull() {
*requestsPerHour = r.Configuration.RequestsPerHour.ValueInt64()
} else {
requestsPerHour = nil
}
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceGithubUpdate{
+ APIURL: apiURL,
Branch: branch,
+ Branches: branches,
Credentials: credentials,
+ Repositories: repositories,
Repository: repository,
RequestsPerHour: requestsPerHour,
StartDate: startDate,
diff --git a/internal/provider/source_gitlab_data_source.go b/internal/provider/source_gitlab_data_source.go
old mode 100755
new mode 100644
index 1b32d1046..dd44d07f8
--- a/internal/provider/source_gitlab_data_source.go
+++ b/internal/provider/source_gitlab_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceGitlabDataSource struct {
// SourceGitlabDataSourceModel describes the data model.
type SourceGitlabDataSourceModel struct {
- Configuration SourceGitlab `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,167 +47,20 @@ func (r *SourceGitlabDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceGitlab DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_url": schema.StringAttribute{
- Computed: true,
- Description: `Please enter your basic URL from GitLab instance.`,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_gitlab_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The API ID of the Gitlab developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The API Secret the Gitlab developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_gitlab_authorization_method_private_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Log into your Gitlab account and then generate a personal Access Token.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_gitlab_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The API ID of the Gitlab developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The API Secret the Gitlab developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_gitlab_update_authorization_method_private_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Log into your Gitlab account and then generate a personal Access Token.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "groups": schema.StringAttribute{
- Computed: true,
- Description: `Space-delimited list of groups. e.g. airbyte.io.`,
- },
- "projects": schema.StringAttribute{
- Computed: true,
- Description: `Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gitlab",
- ),
- },
- Description: `must be one of ["gitlab"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_gitlab_data_source_sdk.go b/internal/provider/source_gitlab_data_source_sdk.go
old mode 100755
new mode 100644
index 93ab1a946..eb2420c62
--- a/internal/provider/source_gitlab_data_source_sdk.go
+++ b/internal/provider/source_gitlab_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGitlabDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_gitlab_resource.go b/internal/provider/source_gitlab_resource.go
old mode 100755
new mode 100644
index d6787758e..35a53ec39
--- a/internal/provider/source_gitlab_resource.go
+++ b/internal/provider/source_gitlab_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceGitlabResource struct {
// SourceGitlabResourceModel describes the resource data model.
type SourceGitlabResourceModel struct {
Configuration SourceGitlab `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,28 +57,21 @@ func (r *SourceGitlabResource) Schema(ctx context.Context, req resource.SchemaRe
Required: true,
Attributes: map[string]schema.Attribute{
"api_url": schema.StringAttribute{
- Optional: true,
- Description: `Please enter your basic URL from GitLab instance.`,
+ Optional: true,
+ MarkdownDescription: `Default: "gitlab.com"` + "\n" +
+ `Please enter your basic URL from GitLab instance.`,
},
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_gitlab_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The API ID of the Gitlab developer application.`,
@@ -88,88 +82,27 @@ func (r *SourceGitlabResource) Schema(ctx context.Context, req resource.SchemaRe
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The key to refresh the expired access_token.`,
},
"token_expiry_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_gitlab_authorization_method_private_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
Required: true,
- Description: `Log into your Gitlab account and then generate a personal Access Token.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_gitlab_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The API ID of the Gitlab developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The API Secret the Gitlab developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Required: true,
+ Sensitive: true,
+ Description: `The date-time when the access token should be refreshed.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date-time when the access token should be refreshed.`,
},
},
},
- "source_gitlab_update_authorization_method_private_token": schema.SingleNestedAttribute{
+ "private_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Log into your Gitlab account and then generate a personal Access Token.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
},
},
},
@@ -179,37 +112,49 @@ func (r *SourceGitlabResource) Schema(ctx context.Context, req resource.SchemaRe
},
"groups": schema.StringAttribute{
Optional: true,
- Description: `Space-delimited list of groups. e.g. airbyte.io.`,
+ Description: `[DEPRECATED] Space-delimited list of groups. e.g. airbyte.io.`,
+ },
+ "groups_list": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of groups. e.g. airbyte.io.`,
},
"projects": schema.StringAttribute{
Optional: true,
- Description: `Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.`,
+ Description: `[DEPRECATED] Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gitlab",
- ),
- },
- Description: `must be one of ["gitlab"]`,
+ "projects_list": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data will be replicated. All data generated after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -273,7 +218,7 @@ func (r *SourceGitlabResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGitlab(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -449,5 +394,5 @@ func (r *SourceGitlabResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceGitlabResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_gitlab_resource_sdk.go b/internal/provider/source_gitlab_resource_sdk.go
old mode 100755
new mode 100644
index 52dc916b2..dbbaf67ab
--- a/internal/provider/source_gitlab_resource_sdk.go
+++ b/internal/provider/source_gitlab_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -16,50 +16,36 @@ func (r *SourceGitlabResourceModel) ToCreateSDKType() *shared.SourceGitlabCreate
apiURL = nil
}
var credentials shared.SourceGitlabAuthorizationMethod
- var sourceGitlabAuthorizationMethodOAuth20 *shared.SourceGitlabAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceGitlabAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceGitlabAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceGitlabAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceGitlabAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGitlabAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceGitlabAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceGitlabAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceGitlabAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGitlabAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceGitlabAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceGitlabAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceGitlabAuthorizationMethodOAuth20 = &shared.SourceGitlabAuthorizationMethodOAuth20{
+ var sourceGitlabOAuth20 *shared.SourceGitlabOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceGitlabOAuth20 = &shared.SourceGitlabOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceGitlabAuthorizationMethodOAuth20 != nil {
+ if sourceGitlabOAuth20 != nil {
credentials = shared.SourceGitlabAuthorizationMethod{
- SourceGitlabAuthorizationMethodOAuth20: sourceGitlabAuthorizationMethodOAuth20,
+ SourceGitlabOAuth20: sourceGitlabOAuth20,
}
}
- var sourceGitlabAuthorizationMethodPrivateToken *shared.SourceGitlabAuthorizationMethodPrivateToken
- if r.Configuration.Credentials.SourceGitlabAuthorizationMethodPrivateToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceGitlabAuthorizationMethodPrivateToken.AccessToken.ValueString()
- authType1 := new(shared.SourceGitlabAuthorizationMethodPrivateTokenAuthType)
- if !r.Configuration.Credentials.SourceGitlabAuthorizationMethodPrivateToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGitlabAuthorizationMethodPrivateToken.AuthType.IsNull() {
- *authType1 = shared.SourceGitlabAuthorizationMethodPrivateTokenAuthType(r.Configuration.Credentials.SourceGitlabAuthorizationMethodPrivateToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceGitlabAuthorizationMethodPrivateToken = &shared.SourceGitlabAuthorizationMethodPrivateToken{
+ var sourceGitlabPrivateToken *shared.SourceGitlabPrivateToken
+ if r.Configuration.Credentials.PrivateToken != nil {
+ accessToken1 := r.Configuration.Credentials.PrivateToken.AccessToken.ValueString()
+ sourceGitlabPrivateToken = &shared.SourceGitlabPrivateToken{
AccessToken: accessToken1,
- AuthType: authType1,
}
}
- if sourceGitlabAuthorizationMethodPrivateToken != nil {
+ if sourceGitlabPrivateToken != nil {
credentials = shared.SourceGitlabAuthorizationMethod{
- SourceGitlabAuthorizationMethodPrivateToken: sourceGitlabAuthorizationMethodPrivateToken,
+ SourceGitlabPrivateToken: sourceGitlabPrivateToken,
}
}
groups := new(string)
@@ -68,21 +54,40 @@ func (r *SourceGitlabResourceModel) ToCreateSDKType() *shared.SourceGitlabCreate
} else {
groups = nil
}
+ var groupsList []string = nil
+ for _, groupsListItem := range r.Configuration.GroupsList {
+ groupsList = append(groupsList, groupsListItem.ValueString())
+ }
projects := new(string)
if !r.Configuration.Projects.IsUnknown() && !r.Configuration.Projects.IsNull() {
*projects = r.Configuration.Projects.ValueString()
} else {
projects = nil
}
- sourceType := shared.SourceGitlabGitlab(r.Configuration.SourceType.ValueString())
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ var projectsList []string = nil
+ for _, projectsListItem := range r.Configuration.ProjectsList {
+ projectsList = append(projectsList, projectsListItem.ValueString())
+ }
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceGitlab{
- APIURL: apiURL,
- Credentials: credentials,
- Groups: groups,
- Projects: projects,
- SourceType: sourceType,
- StartDate: startDate,
+ APIURL: apiURL,
+ Credentials: credentials,
+ Groups: groups,
+ GroupsList: groupsList,
+ Projects: projects,
+ ProjectsList: projectsList,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -94,6 +99,7 @@ func (r *SourceGitlabResourceModel) ToCreateSDKType() *shared.SourceGitlabCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGitlabCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -114,50 +120,36 @@ func (r *SourceGitlabResourceModel) ToUpdateSDKType() *shared.SourceGitlabPutReq
apiURL = nil
}
var credentials shared.SourceGitlabUpdateAuthorizationMethod
- var sourceGitlabUpdateAuthorizationMethodOAuth20 *shared.SourceGitlabUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceGitlabUpdateAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceGitlabUpdateAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceGitlabUpdateAuthorizationMethodOAuth20 = &shared.SourceGitlabUpdateAuthorizationMethodOAuth20{
+ var sourceGitlabUpdateOAuth20 *shared.SourceGitlabUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceGitlabUpdateOAuth20 = &shared.SourceGitlabUpdateOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceGitlabUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceGitlabUpdateOAuth20 != nil {
credentials = shared.SourceGitlabUpdateAuthorizationMethod{
- SourceGitlabUpdateAuthorizationMethodOAuth20: sourceGitlabUpdateAuthorizationMethodOAuth20,
+ SourceGitlabUpdateOAuth20: sourceGitlabUpdateOAuth20,
}
}
- var sourceGitlabUpdateAuthorizationMethodPrivateToken *shared.SourceGitlabUpdateAuthorizationMethodPrivateToken
- if r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodPrivateToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodPrivateToken.AccessToken.ValueString()
- authType1 := new(shared.SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType)
- if !r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodPrivateToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodPrivateToken.AuthType.IsNull() {
- *authType1 = shared.SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType(r.Configuration.Credentials.SourceGitlabUpdateAuthorizationMethodPrivateToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceGitlabUpdateAuthorizationMethodPrivateToken = &shared.SourceGitlabUpdateAuthorizationMethodPrivateToken{
+ var privateToken *shared.PrivateToken
+ if r.Configuration.Credentials.PrivateToken != nil {
+ accessToken1 := r.Configuration.Credentials.PrivateToken.AccessToken.ValueString()
+ privateToken = &shared.PrivateToken{
AccessToken: accessToken1,
- AuthType: authType1,
}
}
- if sourceGitlabUpdateAuthorizationMethodPrivateToken != nil {
+ if privateToken != nil {
credentials = shared.SourceGitlabUpdateAuthorizationMethod{
- SourceGitlabUpdateAuthorizationMethodPrivateToken: sourceGitlabUpdateAuthorizationMethodPrivateToken,
+ PrivateToken: privateToken,
}
}
groups := new(string)
@@ -166,19 +158,34 @@ func (r *SourceGitlabResourceModel) ToUpdateSDKType() *shared.SourceGitlabPutReq
} else {
groups = nil
}
+ var groupsList []string = nil
+ for _, groupsListItem := range r.Configuration.GroupsList {
+ groupsList = append(groupsList, groupsListItem.ValueString())
+ }
projects := new(string)
if !r.Configuration.Projects.IsUnknown() && !r.Configuration.Projects.IsNull() {
*projects = r.Configuration.Projects.ValueString()
} else {
projects = nil
}
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ var projectsList []string = nil
+ for _, projectsListItem := range r.Configuration.ProjectsList {
+ projectsList = append(projectsList, projectsListItem.ValueString())
+ }
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceGitlabUpdate{
- APIURL: apiURL,
- Credentials: credentials,
- Groups: groups,
- Projects: projects,
- StartDate: startDate,
+ APIURL: apiURL,
+ Credentials: credentials,
+ Groups: groups,
+ GroupsList: groupsList,
+ Projects: projects,
+ ProjectsList: projectsList,
+ StartDate: startDate,
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
diff --git a/internal/provider/source_glassfrog_data_source.go b/internal/provider/source_glassfrog_data_source.go
old mode 100755
new mode 100644
index df08c109c..1786dcefc
--- a/internal/provider/source_glassfrog_data_source.go
+++ b/internal/provider/source_glassfrog_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceGlassfrogDataSource struct {
// SourceGlassfrogDataSourceModel describes the data model.
type SourceGlassfrogDataSourceModel struct {
- Configuration SourceGlassfrog `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceGlassfrogDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceGlassfrog DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API key provided by Glassfrog`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "glassfrog",
- ),
- },
- Description: `must be one of ["glassfrog"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_glassfrog_data_source_sdk.go b/internal/provider/source_glassfrog_data_source_sdk.go
old mode 100755
new mode 100644
index 92b85617c..f6f3962f3
--- a/internal/provider/source_glassfrog_data_source_sdk.go
+++ b/internal/provider/source_glassfrog_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGlassfrogDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_glassfrog_resource.go b/internal/provider/source_glassfrog_resource.go
old mode 100755
new mode 100644
index c62e6df03..31efdc21c
--- a/internal/provider/source_glassfrog_resource.go
+++ b/internal/provider/source_glassfrog_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceGlassfrogResource struct {
// SourceGlassfrogResourceModel describes the resource data model.
type SourceGlassfrogResourceModel struct {
- Configuration SourceGlassfrog `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceGlassfrogResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceGlassfrogResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API key provided by Glassfrog`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "glassfrog",
- ),
- },
- Description: `must be one of ["glassfrog"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceGlassfrogResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGlassfrog(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceGlassfrogResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceGlassfrogResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_glassfrog_resource_sdk.go b/internal/provider/source_glassfrog_resource_sdk.go
old mode 100755
new mode 100644
index 1871473d9..4073618bc
--- a/internal/provider/source_glassfrog_resource_sdk.go
+++ b/internal/provider/source_glassfrog_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGlassfrogResourceModel) ToCreateSDKType() *shared.SourceGlassfrogCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceGlassfrogGlassfrog(r.Configuration.SourceType.ValueString())
configuration := shared.SourceGlassfrog{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceGlassfrogResourceModel) ToCreateSDKType() *shared.SourceGlassfrog
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGlassfrogCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_gnews_data_source.go b/internal/provider/source_gnews_data_source.go
old mode 100755
new mode 100644
index 7bdfff5b3..9e5579c8b
--- a/internal/provider/source_gnews_data_source.go
+++ b/internal/provider/source_gnews_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceGnewsDataSource struct {
// SourceGnewsDataSourceModel describes the data model.
type SourceGnewsDataSourceModel struct {
- Configuration SourceGnews `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,186 +47,20 @@ func (r *SourceGnewsDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceGnews DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key`,
- },
- "country": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "au",
- "br",
- "ca",
- "cn",
- "eg",
- "fr",
- "de",
- "gr",
- "hk",
- "in",
- "ie",
- "il",
- "it",
- "jp",
- "nl",
- "no",
- "pk",
- "pe",
- "ph",
- "pt",
- "ro",
- "ru",
- "sg",
- "es",
- "se",
- "ch",
- "tw",
- "ua",
- "gb",
- "us",
- ),
- },
- MarkdownDescription: `must be one of ["au", "br", "ca", "cn", "eg", "fr", "de", "gr", "hk", "in", "ie", "il", "it", "jp", "nl", "no", "pk", "pe", "ph", "pt", "ro", "ru", "sg", "es", "se", "ch", "tw", "ua", "gb", "us"]` + "\n" +
- `This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Description: `This parameter allows you to filter the articles that have a publication date smaller than or equal to the specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)`,
- },
- "in": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `This parameter allows you to choose in which attributes the keywords are searched. The attributes that can be set are title, description and content. It is possible to combine several attributes.`,
- },
- "language": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ar",
- "zh",
- "nl",
- "en",
- "fr",
- "de",
- "el",
- "he",
- "hi",
- "it",
- "ja",
- "ml",
- "mr",
- "no",
- "pt",
- "ro",
- "ru",
- "es",
- "sv",
- "ta",
- "te",
- "uk",
- ),
- },
- Description: `must be one of ["ar", "zh", "nl", "en", "fr", "de", "el", "he", "hi", "it", "ja", "ml", "mr", "no", "pt", "ro", "ru", "es", "sv", "ta", "te", "uk"]`,
- },
- "nullable": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `This parameter allows you to specify the attributes that you allow to return null values. The attributes that can be set are title, description and content. It is possible to combine several attributes`,
- },
- "query": schema.StringAttribute{
- Computed: true,
- MarkdownDescription: `This parameter allows you to specify your search keywords to find the news articles you are looking for. The keywords will be used to return the most relevant articles. It is possible to use logical operators with keywords. - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by ` + "\n" +
- ` quotation marks are used to search for articles with the exact same keyword sequence. ` + "\n" +
- ` For example the query: "Apple iPhone" will return articles matching at least once this sequence of keywords.` + "\n" +
- `- Logical AND Operator: This operator allows you to make sure that several keywords are all used in the article` + "\n" +
- ` search. By default the space character acts as an AND operator, it is possible to replace the space character ` + "\n" +
- ` by AND to obtain the same result. For example the query: Apple Microsoft is equivalent to Apple AND Microsoft` + "\n" +
- `- Logical OR Operator: This operator allows you to retrieve articles matching the keyword a or the keyword b.` + "\n" +
- ` It is important to note that this operator has a higher precedence than the AND operator. For example the ` + "\n" +
- ` query: Apple OR Microsoft will return all articles matching the keyword Apple as well as all articles matching ` + "\n" +
- ` the keyword Microsoft` + "\n" +
- `- Logical NOT Operator: This operator allows you to remove from the results the articles corresponding to the` + "\n" +
- ` specified keywords. To use it, you need to add NOT in front of each word or phrase surrounded by quotes.` + "\n" +
- ` For example the query: Apple NOT iPhone will return all articles matching the keyword Apple but not the keyword` + "\n" +
- ` iPhone`,
- },
- "sortby": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "publishedAt",
- "relevance",
- ),
- },
- MarkdownDescription: `must be one of ["publishedAt", "relevance"]` + "\n" +
- `This parameter allows you to choose with which type of sorting the articles should be returned. Two values are possible:` + "\n" +
- ` - publishedAt = sort by publication date, the articles with the most recent publication date are returned first` + "\n" +
- ` - relevance = sort by best match to keywords, the articles with the best match are returned first`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gnews",
- ),
- },
- Description: `must be one of ["gnews"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `This parameter allows you to filter the articles that have a publication date greater than or equal to the specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)`,
- },
- "top_headlines_query": schema.StringAttribute{
- Computed: true,
- MarkdownDescription: `This parameter allows you to specify your search keywords to find the news articles you are looking for. The keywords will be used to return the most relevant articles. It is possible to use logical operators with keywords. - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by ` + "\n" +
- ` quotation marks are used to search for articles with the exact same keyword sequence. ` + "\n" +
- ` For example the query: "Apple iPhone" will return articles matching at least once this sequence of keywords.` + "\n" +
- `- Logical AND Operator: This operator allows you to make sure that several keywords are all used in the article` + "\n" +
- ` search. By default the space character acts as an AND operator, it is possible to replace the space character ` + "\n" +
- ` by AND to obtain the same result. For example the query: Apple Microsoft is equivalent to Apple AND Microsoft` + "\n" +
- `- Logical OR Operator: This operator allows you to retrieve articles matching the keyword a or the keyword b.` + "\n" +
- ` It is important to note that this operator has a higher precedence than the AND operator. For example the ` + "\n" +
- ` query: Apple OR Microsoft will return all articles matching the keyword Apple as well as all articles matching ` + "\n" +
- ` the keyword Microsoft` + "\n" +
- `- Logical NOT Operator: This operator allows you to remove from the results the articles corresponding to the` + "\n" +
- ` specified keywords. To use it, you need to add NOT in front of each word or phrase surrounded by quotes.` + "\n" +
- ` For example the query: Apple NOT iPhone will return all articles matching the keyword Apple but not the keyword` + "\n" +
- ` iPhone`,
- },
- "top_headlines_topic": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "breaking-news",
- "world",
- "nation",
- "business",
- "technology",
- "entertainment",
- "sports",
- "science",
- "health",
- ),
- },
- MarkdownDescription: `must be one of ["breaking-news", "world", "nation", "business", "technology", "entertainment", "sports", "science", "health"]` + "\n" +
- `This parameter allows you to change the category for the request.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_gnews_data_source_sdk.go b/internal/provider/source_gnews_data_source_sdk.go
old mode 100755
new mode 100644
index 7e4cf2bb3..7eada9b75
--- a/internal/provider/source_gnews_data_source_sdk.go
+++ b/internal/provider/source_gnews_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGnewsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_gnews_resource.go b/internal/provider/source_gnews_resource.go
old mode 100755
new mode 100644
index 14150d08f..77e97f828
--- a/internal/provider/source_gnews_resource.go
+++ b/internal/provider/source_gnews_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourceGnewsResource struct {
// SourceGnewsResourceModel describes the resource data model.
type SourceGnewsResourceModel struct {
Configuration SourceGnews `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,10 +58,13 @@ func (r *SourceGnewsResource) Schema(ctx context.Context, req resource.SchemaReq
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key`,
},
"country": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["au", "br", "ca", "cn", "eg", "fr", "de", "gr", "hk", "in", "ie", "il", "it", "jp", "nl", "no", "pk", "pe", "ph", "pt", "ro", "ru", "sg", "es", "se", "ch", "tw", "ua", "gb", "us"]` + "\n" +
+ `This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter.`,
Validators: []validator.String{
stringvalidator.OneOf(
"au",
@@ -94,8 +99,6 @@ func (r *SourceGnewsResource) Schema(ctx context.Context, req resource.SchemaReq
"us",
),
},
- MarkdownDescription: `must be one of ["au", "br", "ca", "cn", "eg", "fr", "de", "gr", "hk", "in", "ie", "il", "it", "jp", "nl", "no", "pk", "pe", "ph", "pt", "ro", "ru", "sg", "es", "se", "ch", "tw", "ua", "gb", "us"]` + "\n" +
- `This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter.`,
},
"end_date": schema.StringAttribute{
Optional: true,
@@ -107,7 +110,8 @@ func (r *SourceGnewsResource) Schema(ctx context.Context, req resource.SchemaReq
Description: `This parameter allows you to choose in which attributes the keywords are searched. The attributes that can be set are title, description and content. It is possible to combine several attributes.`,
},
"language": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `must be one of ["ar", "zh", "nl", "en", "fr", "de", "el", "he", "hi", "it", "ja", "ml", "mr", "no", "pt", "ro", "ru", "es", "sv", "ta", "te", "uk"]`,
Validators: []validator.String{
stringvalidator.OneOf(
"ar",
@@ -134,7 +138,6 @@ func (r *SourceGnewsResource) Schema(ctx context.Context, req resource.SchemaReq
"uk",
),
},
- Description: `must be one of ["ar", "zh", "nl", "en", "fr", "de", "el", "he", "hi", "it", "ja", "ml", "mr", "no", "pt", "ro", "ru", "es", "sv", "ta", "te", "uk"]`,
},
"nullable": schema.ListAttribute{
Optional: true,
@@ -160,25 +163,16 @@ func (r *SourceGnewsResource) Schema(ctx context.Context, req resource.SchemaReq
},
"sortby": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "publishedAt",
- "relevance",
- ),
- },
MarkdownDescription: `must be one of ["publishedAt", "relevance"]` + "\n" +
`This parameter allows you to choose with which type of sorting the articles should be returned. Two values are possible:` + "\n" +
` - publishedAt = sort by publication date, the articles with the most recent publication date are returned first` + "\n" +
` - relevance = sort by best match to keywords, the articles with the best match are returned first`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
stringvalidator.OneOf(
- "gnews",
+ "publishedAt",
+ "relevance",
),
},
- Description: `must be one of ["gnews"]`,
},
"start_date": schema.StringAttribute{
Optional: true,
@@ -203,6 +197,8 @@ func (r *SourceGnewsResource) Schema(ctx context.Context, req resource.SchemaReq
},
"top_headlines_topic": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["breaking-news", "world", "nation", "business", "technology", "entertainment", "sports", "science", "health"]` + "\n" +
+ `This parameter allows you to change the category for the request.`,
Validators: []validator.String{
stringvalidator.OneOf(
"breaking-news",
@@ -216,18 +212,27 @@ func (r *SourceGnewsResource) Schema(ctx context.Context, req resource.SchemaReq
"health",
),
},
- MarkdownDescription: `must be one of ["breaking-news", "world", "nation", "business", "technology", "entertainment", "sports", "science", "health"]` + "\n" +
- `This parameter allows you to change the category for the request.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -291,7 +296,7 @@ func (r *SourceGnewsResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGnews(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -467,5 +472,5 @@ func (r *SourceGnewsResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceGnewsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_gnews_resource_sdk.go b/internal/provider/source_gnews_resource_sdk.go
old mode 100755
new mode 100644
index 9d2254134..d076a85d0
--- a/internal/provider/source_gnews_resource_sdk.go
+++ b/internal/provider/source_gnews_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -42,7 +42,6 @@ func (r *SourceGnewsResourceModel) ToCreateSDKType() *shared.SourceGnewsCreateRe
} else {
sortby = nil
}
- sourceType := shared.SourceGnewsGnews(r.Configuration.SourceType.ValueString())
startDate := new(string)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate = r.Configuration.StartDate.ValueString()
@@ -70,11 +69,16 @@ func (r *SourceGnewsResourceModel) ToCreateSDKType() *shared.SourceGnewsCreateRe
Nullable: nullable,
Query: query,
Sortby: sortby,
- SourceType: sourceType,
StartDate: startDate,
TopHeadlinesQuery: topHeadlinesQuery,
TopHeadlinesTopic: topHeadlinesTopic,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -85,6 +89,7 @@ func (r *SourceGnewsResourceModel) ToCreateSDKType() *shared.SourceGnewsCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGnewsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -99,9 +104,9 @@ func (r *SourceGnewsResourceModel) ToGetSDKType() *shared.SourceGnewsCreateReque
func (r *SourceGnewsResourceModel) ToUpdateSDKType() *shared.SourceGnewsPutRequest {
apiKey := r.Configuration.APIKey.ValueString()
- country := new(shared.SourceGnewsUpdateCountry)
+ country := new(shared.Country)
if !r.Configuration.Country.IsUnknown() && !r.Configuration.Country.IsNull() {
- *country = shared.SourceGnewsUpdateCountry(r.Configuration.Country.ValueString())
+ *country = shared.Country(r.Configuration.Country.ValueString())
} else {
country = nil
}
@@ -111,24 +116,24 @@ func (r *SourceGnewsResourceModel) ToUpdateSDKType() *shared.SourceGnewsPutReque
} else {
endDate = nil
}
- var in []shared.SourceGnewsUpdateIn = nil
+ var in []shared.In = nil
for _, inItem := range r.Configuration.In {
- in = append(in, shared.SourceGnewsUpdateIn(inItem.ValueString()))
+ in = append(in, shared.In(inItem.ValueString()))
}
- language := new(shared.SourceGnewsUpdateLanguage)
+ language := new(shared.Language)
if !r.Configuration.Language.IsUnknown() && !r.Configuration.Language.IsNull() {
- *language = shared.SourceGnewsUpdateLanguage(r.Configuration.Language.ValueString())
+ *language = shared.Language(r.Configuration.Language.ValueString())
} else {
language = nil
}
- var nullable []shared.SourceGnewsUpdateNullable = nil
+ var nullable []shared.Nullable = nil
for _, nullableItem := range r.Configuration.Nullable {
- nullable = append(nullable, shared.SourceGnewsUpdateNullable(nullableItem.ValueString()))
+ nullable = append(nullable, shared.Nullable(nullableItem.ValueString()))
}
query := r.Configuration.Query.ValueString()
- sortby := new(shared.SourceGnewsUpdateSortBy)
+ sortby := new(shared.SortBy)
if !r.Configuration.Sortby.IsUnknown() && !r.Configuration.Sortby.IsNull() {
- *sortby = shared.SourceGnewsUpdateSortBy(r.Configuration.Sortby.ValueString())
+ *sortby = shared.SortBy(r.Configuration.Sortby.ValueString())
} else {
sortby = nil
}
@@ -144,9 +149,9 @@ func (r *SourceGnewsResourceModel) ToUpdateSDKType() *shared.SourceGnewsPutReque
} else {
topHeadlinesQuery = nil
}
- topHeadlinesTopic := new(shared.SourceGnewsUpdateTopHeadlinesTopic)
+ topHeadlinesTopic := new(shared.TopHeadlinesTopic)
if !r.Configuration.TopHeadlinesTopic.IsUnknown() && !r.Configuration.TopHeadlinesTopic.IsNull() {
- *topHeadlinesTopic = shared.SourceGnewsUpdateTopHeadlinesTopic(r.Configuration.TopHeadlinesTopic.ValueString())
+ *topHeadlinesTopic = shared.TopHeadlinesTopic(r.Configuration.TopHeadlinesTopic.ValueString())
} else {
topHeadlinesTopic = nil
}
diff --git a/internal/provider/source_googleads_data_source.go b/internal/provider/source_googleads_data_source.go
old mode 100755
new mode 100644
index db2bb51ab..3d8157a5f
--- a/internal/provider/source_googleads_data_source.go
+++ b/internal/provider/source_googleads_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceGoogleAdsDataSource struct {
// SourceGoogleAdsDataSourceModel describes the data model.
type SourceGoogleAdsDataSourceModel struct {
- Configuration SourceGoogleAds `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,96 +47,20 @@ func (r *SourceGoogleAdsDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceGoogleAds DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "conversion_window_days": schema.Int64Attribute{
- Computed: true,
- Description: `A conversion window is the number of days after an ad interaction (such as an ad click or video view) during which a conversion, such as a purchase, is recorded in Google Ads. For more information, see Google's documentation.`,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The Access Token for making authenticated requests. For detailed instructions on finding this value, refer to our documentation.`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Google Ads developer application. For detailed instructions on finding this value, refer to our documentation.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Google Ads developer application. For detailed instructions on finding this value, refer to our documentation.`,
- },
- "developer_token": schema.StringAttribute{
- Computed: true,
- Description: `The Developer Token granted by Google to use their APIs. For detailed instructions on finding this value, refer to our documentation.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token used to obtain a new Access Token. For detailed instructions on finding this value, refer to our documentation.`,
- },
- },
- },
- "custom_queries": schema.ListNestedAttribute{
- Computed: true,
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "query": schema.StringAttribute{
- Computed: true,
- Description: `A custom defined GAQL query for building the report. Avoid including the segments.date field; wherever possible, Airbyte will automatically include it for incremental syncs. For more information, refer to Google's documentation.`,
- },
- "table_name": schema.StringAttribute{
- Computed: true,
- Description: `The table name in your destination database for the chosen query.`,
- },
- },
- },
- },
- "customer_id": schema.StringAttribute{
- Computed: true,
- Description: `Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set)`,
- },
- "login_customer_id": schema.StringAttribute{
- Computed: true,
- Description: `If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-ads",
- ),
- },
- Description: `must be one of ["google-ads"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set)`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_googleads_data_source_sdk.go b/internal/provider/source_googleads_data_source_sdk.go
old mode 100755
new mode 100644
index 89b4dee57..3e847c88e
--- a/internal/provider/source_googleads_data_source_sdk.go
+++ b/internal/provider/source_googleads_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleAdsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_googleads_resource.go b/internal/provider/source_googleads_resource.go
old mode 100755
new mode 100644
index 03201a523..21564e87b
--- a/internal/provider/source_googleads_resource.go
+++ b/internal/provider/source_googleads_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceGoogleAdsResource struct {
// SourceGoogleAdsResourceModel describes the resource data model.
type SourceGoogleAdsResourceModel struct {
Configuration SourceGoogleAds `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,14 +57,16 @@ func (r *SourceGoogleAdsResource) Schema(ctx context.Context, req resource.Schem
Required: true,
Attributes: map[string]schema.Attribute{
"conversion_window_days": schema.Int64Attribute{
- Optional: true,
- Description: `A conversion window is the number of days after an ad interaction (such as an ad click or video view) during which a conversion, such as a purchase, is recorded in Google Ads. For more information, see Google's documentation.`,
+ Optional: true,
+ MarkdownDescription: `Default: 14` + "\n" +
+ `A conversion window is the number of days after an ad interaction (such as an ad click or video view) during which a conversion, such as a purchase, is recorded in Google Ads. For more information, see Google's documentation.`,
},
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The Access Token for making authenticated requests. For detailed instructions on finding this value, refer to our documentation.`,
},
"client_id": schema.StringAttribute{
@@ -76,10 +79,12 @@ func (r *SourceGoogleAdsResource) Schema(ctx context.Context, req resource.Schem
},
"developer_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Developer Token granted by Google to use their APIs. For detailed instructions on finding this value, refer to our documentation.`,
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The token used to obtain a new Access Token. For detailed instructions on finding this value, refer to our documentation.`,
},
},
@@ -104,41 +109,43 @@ func (r *SourceGoogleAdsResource) Schema(ctx context.Context, req resource.Schem
Description: `Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation.`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set)`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set)`,
},
"login_customer_id": schema.StringAttribute{
Optional: true,
Description: `If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-ads",
- ),
- },
- Description: `must be one of ["google-ads"]`,
- },
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set)`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set)`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -202,7 +209,7 @@ func (r *SourceGoogleAdsResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGoogleAds(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -378,5 +385,5 @@ func (r *SourceGoogleAdsResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceGoogleAdsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_googleads_resource_sdk.go b/internal/provider/source_googleads_resource_sdk.go
old mode 100755
new mode 100644
index 1e5453a54..92784d352
--- a/internal/provider/source_googleads_resource_sdk.go
+++ b/internal/provider/source_googleads_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -54,7 +54,6 @@ func (r *SourceGoogleAdsResourceModel) ToCreateSDKType() *shared.SourceGoogleAds
} else {
loginCustomerID = nil
}
- sourceType := shared.SourceGoogleAdsGoogleAds(r.Configuration.SourceType.ValueString())
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
@@ -68,9 +67,14 @@ func (r *SourceGoogleAdsResourceModel) ToCreateSDKType() *shared.SourceGoogleAds
CustomerID: customerID,
EndDate: endDate,
LoginCustomerID: loginCustomerID,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -81,6 +85,7 @@ func (r *SourceGoogleAdsResourceModel) ToCreateSDKType() *shared.SourceGoogleAds
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGoogleAdsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -110,18 +115,18 @@ func (r *SourceGoogleAdsResourceModel) ToUpdateSDKType() *shared.SourceGoogleAds
clientSecret := r.Configuration.Credentials.ClientSecret.ValueString()
developerToken := r.Configuration.Credentials.DeveloperToken.ValueString()
refreshToken := r.Configuration.Credentials.RefreshToken.ValueString()
- credentials := shared.SourceGoogleAdsUpdateGoogleCredentials{
+ credentials := shared.GoogleCredentials{
AccessToken: accessToken,
ClientID: clientID,
ClientSecret: clientSecret,
DeveloperToken: developerToken,
RefreshToken: refreshToken,
}
- var customQueries []shared.SourceGoogleAdsUpdateCustomQueries = nil
+ var customQueries []shared.CustomQueries = nil
for _, customQueriesItem := range r.Configuration.CustomQueries {
query := customQueriesItem.Query.ValueString()
tableName := customQueriesItem.TableName.ValueString()
- customQueries = append(customQueries, shared.SourceGoogleAdsUpdateCustomQueries{
+ customQueries = append(customQueries, shared.CustomQueries{
Query: query,
TableName: tableName,
})
diff --git a/internal/provider/source_googleanalyticsdataapi_data_source.go b/internal/provider/source_googleanalyticsdataapi_data_source.go
old mode 100755
new mode 100644
index 3de45f0c0..4b240112b
--- a/internal/provider/source_googleanalyticsdataapi_data_source.go
+++ b/internal/provider/source_googleanalyticsdataapi_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceGoogleAnalyticsDataAPIDataSource struct {
// SourceGoogleAnalyticsDataAPIDataSourceModel describes the data model.
type SourceGoogleAnalyticsDataAPIDataSourceModel struct {
- Configuration SourceGoogleAnalyticsDataAPI `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,158 +47,20 @@ func (r *SourceGoogleAnalyticsDataAPIDataSource) Schema(ctx context.Context, req
MarkdownDescription: "SourceGoogleAnalyticsDataAPI DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_google_analytics_data_api_credentials_authenticate_via_google_oauth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Google Analytics developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Google Analytics developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining a new access token.`,
- },
- },
- Description: `Credentials for the service`,
- },
- "source_google_analytics_data_api_credentials_service_account_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.`,
- },
- },
- Description: `Credentials for the service`,
- },
- "source_google_analytics_data_api_update_credentials_authenticate_via_google_oauth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Google Analytics developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Google Analytics developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining a new access token.`,
- },
- },
- Description: `Credentials for the service`,
- },
- "source_google_analytics_data_api_update_credentials_service_account_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.`,
- },
- },
- Description: `Credentials for the service`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Credentials for the service`,
- },
- "custom_reports": schema.StringAttribute{
- Computed: true,
- Description: `A JSON array describing the custom reports you want to sync from Google Analytics. See the documentation for more information about the exact format you can use to fill out this field.`,
- },
- "date_ranges_start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.`,
- },
- "property_id": schema.StringAttribute{
- Computed: true,
- Description: `The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-analytics-data-api",
- ),
- },
- Description: `must be one of ["google-analytics-data-api"]`,
- },
- "window_in_days": schema.Int64Attribute{
- Computed: true,
- Description: `The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_googleanalyticsdataapi_data_source_sdk.go b/internal/provider/source_googleanalyticsdataapi_data_source_sdk.go
old mode 100755
new mode 100644
index 46bf62c92..3e1dc4767
--- a/internal/provider/source_googleanalyticsdataapi_data_source_sdk.go
+++ b/internal/provider/source_googleanalyticsdataapi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleAnalyticsDataAPIDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_googleanalyticsdataapi_resource.go b/internal/provider/source_googleanalyticsdataapi_resource.go
old mode 100755
new mode 100644
index 3359ef86d..9015eb5ae
--- a/internal/provider/source_googleanalyticsdataapi_resource.go
+++ b/internal/provider/source_googleanalyticsdataapi_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceGoogleAnalyticsDataAPIResource struct {
// SourceGoogleAnalyticsDataAPIResourceModel describes the resource data model.
type SourceGoogleAnalyticsDataAPIResourceModel struct {
Configuration SourceGoogleAnalyticsDataAPI `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,22 +59,14 @@ func (r *SourceGoogleAnalyticsDataAPIResource) Schema(ctx context.Context, req r
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_google_analytics_data_api_credentials_authenticate_via_google_oauth": schema.SingleNestedAttribute{
+ "authenticate_via_google_oauth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your Google Analytics developer application.`,
@@ -84,23 +77,15 @@ func (r *SourceGoogleAnalyticsDataAPIResource) Schema(ctx context.Context, req r
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The token for obtaining a new access token.`,
},
},
Description: `Credentials for the service`,
},
- "source_google_analytics_data_api_credentials_service_account_key_authentication": schema.SingleNestedAttribute{
+ "service_account_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
"credentials_json": schema.StringAttribute{
Required: true,
Description: `The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.`,
@@ -108,99 +93,1167 @@ func (r *SourceGoogleAnalyticsDataAPIResource) Schema(ctx context.Context, req r
},
Description: `Credentials for the service`,
},
- "source_google_analytics_data_api_update_credentials_authenticate_via_google_oauth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Optional: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
+ },
+ Description: `Credentials for the service`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "custom_reports_array": schema.ListNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "dimension_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "and_group": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "expressions": schema.ListNestedAttribute{
+ Required: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "field_name": schema.StringAttribute{
+ Required: true,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "between_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "from_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "to_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "in_list_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "values": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ },
+ },
+ "numeric_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "operation": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "string_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "match_type": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ },
+ },
+ Description: `The FilterExpressions in andGroup have an AND relationship.`,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "field_name": schema.StringAttribute{
+ Required: true,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "between_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "from_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "to_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "in_list_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "values": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ },
+ },
+ "numeric_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "operation": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "string_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "match_type": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ Description: `A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all dimensions.`,
+ },
+ "not_expression": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "expression": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "field_name": schema.StringAttribute{
+ Required: true,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "between_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "from_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "to_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "in_list_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "values": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ },
+ },
+ "numeric_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "operation": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "string_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "match_type": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ },
+ Description: `The FilterExpression is NOT of notExpression.`,
+ },
+ "or_group": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "expressions": schema.ListNestedAttribute{
+ Required: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "field_name": schema.StringAttribute{
+ Required: true,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "between_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "from_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "to_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "in_list_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "values": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ },
+ },
+ "numeric_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "operation": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "string_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "match_type": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ },
+ },
+ Description: `The FilterExpressions in orGroup have an OR relationship.`,
},
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your Google Analytics developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your Google Analytics developer application.`,
},
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The token for obtaining a new access token.`,
+ Description: `Dimensions filter`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
},
},
- Description: `Credentials for the service`,
- },
- "source_google_analytics_data_api_update_credentials_service_account_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
+ "dimensions": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ Description: `A list of dimensions.`,
+ },
+ "metric_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "and_group": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "expressions": schema.ListNestedAttribute{
+ Required: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "field_name": schema.StringAttribute{
+ Required: true,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "between_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "from_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "to_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "in_list_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "values": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ },
+ },
+ "numeric_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "operation": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "string_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "match_type": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ },
+ },
+ Description: `The FilterExpressions in andGroup have an AND relationship.`,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "field_name": schema.StringAttribute{
+ Required: true,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "between_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "from_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "to_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "in_list_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "values": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ },
+ },
+ "numeric_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "operation": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "string_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "match_type": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ Description: `A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all metrics.`,
+ },
+ "not_expression": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "expression": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "field_name": schema.StringAttribute{
+ Required: true,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "between_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "from_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "to_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "in_list_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "values": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ },
+ },
+ "numeric_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "operation": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "string_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "match_type": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ },
+ Description: `The FilterExpression is NOT of notExpression.`,
+ },
+ "or_group": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "expressions": schema.ListNestedAttribute{
+ Required: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "field_name": schema.StringAttribute{
+ Required: true,
+ },
+ "filter": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "between_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "from_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "to_value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "in_list_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "values": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ },
+ },
+ "numeric_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "operation": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "double_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.NumberAttribute{
+ Required: true,
+ },
+ },
+ },
+ "int64_value": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ "string_filter": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "case_sensitive": schema.BoolAttribute{
+ Optional: true,
+ },
+ "match_type": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ },
+ "value": schema.StringAttribute{
+ Required: true,
+ },
+ },
+ },
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ },
+ },
+ },
+ },
+ Description: `The FilterExpressions in orGroup have an OR relationship.`,
},
- Description: `must be one of ["Service"]`,
},
- "credentials_json": schema.StringAttribute{
- Required: true,
- Description: `The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.`,
+ Description: `Metrics filter`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
},
},
- Description: `Credentials for the service`,
+ "metrics": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ Description: `A list of metrics.`,
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: `The name of the custom report, this name would be used as stream name.`,
+ },
},
},
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Credentials for the service`,
- },
- "custom_reports": schema.StringAttribute{
- Optional: true,
- Description: `A JSON array describing the custom reports you want to sync from Google Analytics. See the documentation for more information about the exact format you can use to fill out this field.`,
+ Description: `You can add your Custom Analytics report by creating one.`,
},
"date_ranges_start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.`,
},
- "property_id": schema.StringAttribute{
+ "property_ids": schema.ListAttribute{
Required: true,
- Description: `The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-analytics-data-api",
- ),
- },
- Description: `must be one of ["google-analytics-data-api"]`,
+ ElementType: types.StringType,
+ Description: `A list of your Property IDs. The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.`,
},
"window_in_days": schema.Int64Attribute{
- Optional: true,
- Description: `The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.`,
+ Optional: true,
+ MarkdownDescription: `Default: 1` + "\n" +
+ `The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -264,7 +1317,7 @@ func (r *SourceGoogleAnalyticsDataAPIResource) Create(ctx context.Context, req r
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGoogleAnalyticsDataAPI(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -440,5 +1493,5 @@ func (r *SourceGoogleAnalyticsDataAPIResource) Delete(ctx context.Context, req r
}
func (r *SourceGoogleAnalyticsDataAPIResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_googleanalyticsdataapi_resource_sdk.go b/internal/provider/source_googleanalyticsdataapi_resource_sdk.go
old mode 100755
new mode 100644
index ab4f95893..ac8d95d55
--- a/internal/provider/source_googleanalyticsdataapi_resource_sdk.go
+++ b/internal/provider/source_googleanalyticsdataapi_resource_sdk.go
@@ -3,73 +3,1425 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleAnalyticsDataAPIResourceModel) ToCreateSDKType() *shared.SourceGoogleAnalyticsDataAPICreateRequest {
var credentials *shared.SourceGoogleAnalyticsDataAPICredentials
if r.Configuration.Credentials != nil {
- var sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth *shared.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth
- if r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth != nil {
+ var sourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth *shared.SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth
+ if r.Configuration.Credentials.AuthenticateViaGoogleOauth != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth.AccessToken.ValueString()
+ if !r.Configuration.Credentials.AuthenticateViaGoogleOauth.AccessToken.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaGoogleOauth.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.AuthenticateViaGoogleOauth.AccessToken.ValueString()
} else {
accessToken = nil
}
- authType := new(shared.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth.AuthType.IsNull() {
- *authType = shared.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType(r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth.RefreshToken.ValueString()
- sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth = &shared.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth{
+ clientID := r.Configuration.Credentials.AuthenticateViaGoogleOauth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaGoogleOauth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaGoogleOauth.RefreshToken.ValueString()
+ sourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth = &shared.SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth != nil {
+ if sourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth != nil {
credentials = &shared.SourceGoogleAnalyticsDataAPICredentials{
- SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth: sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth,
+ SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth: sourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth,
}
}
- var sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication *shared.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication
- if r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication != nil {
- authType1 := new(shared.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication.AuthType.IsNull() {
- *authType1 = shared.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType(r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- credentialsJSON := r.Configuration.Credentials.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication.CredentialsJSON.ValueString()
- sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication = &shared.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication{
- AuthType: authType1,
+ var sourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication *shared.SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication
+ if r.Configuration.Credentials.ServiceAccountKeyAuthentication != nil {
+ credentialsJSON := r.Configuration.Credentials.ServiceAccountKeyAuthentication.CredentialsJSON.ValueString()
+ sourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication = &shared.SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication{
CredentialsJSON: credentialsJSON,
}
}
- if sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication != nil {
+ if sourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication != nil {
credentials = &shared.SourceGoogleAnalyticsDataAPICredentials{
- SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication: sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication,
+ SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication: sourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication,
}
}
}
- customReports := new(string)
- if !r.Configuration.CustomReports.IsUnknown() && !r.Configuration.CustomReports.IsNull() {
- *customReports = r.Configuration.CustomReports.ValueString()
+ var customReportsArray []shared.SourceGoogleAnalyticsDataAPICustomReportConfig = nil
+ for _, customReportsArrayItem := range r.Configuration.CustomReportsArray {
+ var dimensionFilter *shared.SourceGoogleAnalyticsDataAPIDimensionsFilter
+ if customReportsArrayItem.DimensionFilter != nil {
+ var sourceGoogleAnalyticsDataAPIAndGroup *shared.SourceGoogleAnalyticsDataAPIAndGroup
+ if customReportsArrayItem.DimensionFilter.AndGroup != nil {
+ var expressions []shared.SourceGoogleAnalyticsDataAPIExpression = nil
+ for _, expressionsItem := range customReportsArrayItem.DimensionFilter.AndGroup.Expressions {
+ fieldName := expressionsItem.FieldName.ValueString()
+ var filter shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter
+ if expressionsItem.Filter.StringFilter != nil {
+ caseSensitive := new(bool)
+ if !expressionsItem.Filter.StringFilter.CaseSensitive.IsUnknown() && !expressionsItem.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive = expressionsItem.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive = nil
+ }
+ var matchType []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = nil
+ for _, matchTypeItem := range expressionsItem.Filter.StringFilter.MatchType {
+ matchType = append(matchType, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums(matchTypeItem.ValueString()))
+ }
+ value := expressionsItem.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter{
+ CaseSensitive: caseSensitive,
+ MatchType: matchType,
+ Value: value,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter != nil {
+ filter = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter
+ if expressionsItem.Filter.InListFilter != nil {
+ caseSensitive1 := new(bool)
+ if !expressionsItem.Filter.InListFilter.CaseSensitive.IsUnknown() && !expressionsItem.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive1 = expressionsItem.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive1 = nil
+ }
+ var values []string = nil
+ for _, valuesItem := range expressionsItem.Filter.InListFilter.Values {
+ values = append(values, valuesItem.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter{
+ CaseSensitive: caseSensitive1,
+ Values: values,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter != nil {
+ filter = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter
+ if expressionsItem.Filter.NumericFilter != nil {
+ var operation []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = nil
+ for _, operationItem := range expressionsItem.Filter.NumericFilter.Operation {
+ operation = append(operation, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums(operationItem.ValueString()))
+ }
+ var value1 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+ if expressionsItem.Filter.NumericFilter.Value.Int64Value != nil {
+ value2 := expressionsItem.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value{
+ Value: value2,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value != nil {
+ value1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+ if expressionsItem.Filter.NumericFilter.Value.DoubleValue != nil {
+ value3, _ := expressionsItem.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue{
+ Value: value3,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue != nil {
+ value1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter{
+ Operation: operation,
+ Value: value1,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter != nil {
+ filter = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter
+ if expressionsItem.Filter.BetweenFilter != nil {
+ var fromValue shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+ if expressionsItem.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value4 := expressionsItem.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value{
+ Value: value4,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value != nil {
+ fromValue = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+ if expressionsItem.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value5, _ := expressionsItem.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue{
+ Value: value5,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue != nil {
+ fromValue = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue,
+ }
+ }
+ var toValue shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+ if expressionsItem.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value6 := expressionsItem.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value{
+ Value: value6,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value != nil {
+ toValue = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+ if expressionsItem.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value7, _ := expressionsItem.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue{
+ Value: value7,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue != nil {
+ toValue = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter{
+ FromValue: fromValue,
+ ToValue: toValue,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter != nil {
+ filter = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter,
+ }
+ }
+ expressions = append(expressions, shared.SourceGoogleAnalyticsDataAPIExpression{
+ FieldName: fieldName,
+ Filter: filter,
+ })
+ }
+ sourceGoogleAnalyticsDataAPIAndGroup = &shared.SourceGoogleAnalyticsDataAPIAndGroup{
+ Expressions: expressions,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIAndGroup != nil {
+ dimensionFilter = &shared.SourceGoogleAnalyticsDataAPIDimensionsFilter{
+ SourceGoogleAnalyticsDataAPIAndGroup: sourceGoogleAnalyticsDataAPIAndGroup,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIOrGroup *shared.SourceGoogleAnalyticsDataAPIOrGroup
+ if customReportsArrayItem.DimensionFilter.OrGroup != nil {
+ var expressions1 []shared.SourceGoogleAnalyticsDataAPISchemasExpression = nil
+ for _, expressionsItem1 := range customReportsArrayItem.DimensionFilter.OrGroup.Expressions {
+ fieldName1 := expressionsItem1.FieldName.ValueString()
+ var filter1 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter
+ if expressionsItem1.Filter.StringFilter != nil {
+ caseSensitive2 := new(bool)
+ if !expressionsItem1.Filter.StringFilter.CaseSensitive.IsUnknown() && !expressionsItem1.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive2 = expressionsItem1.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive2 = nil
+ }
+ var matchType1 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = nil
+ for _, matchTypeItem1 := range expressionsItem1.Filter.StringFilter.MatchType {
+ matchType1 = append(matchType1, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums(matchTypeItem1.ValueString()))
+ }
+ value8 := expressionsItem1.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter{
+ CaseSensitive: caseSensitive2,
+ MatchType: matchType1,
+ Value: value8,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter != nil {
+ filter1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter
+ if expressionsItem1.Filter.InListFilter != nil {
+ caseSensitive3 := new(bool)
+ if !expressionsItem1.Filter.InListFilter.CaseSensitive.IsUnknown() && !expressionsItem1.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive3 = expressionsItem1.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive3 = nil
+ }
+ var values1 []string = nil
+ for _, valuesItem1 := range expressionsItem1.Filter.InListFilter.Values {
+ values1 = append(values1, valuesItem1.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter{
+ CaseSensitive: caseSensitive3,
+ Values: values1,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter != nil {
+ filter1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter
+ if expressionsItem1.Filter.NumericFilter != nil {
+ var operation1 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums = nil
+ for _, operationItem1 := range expressionsItem1.Filter.NumericFilter.Operation {
+ operation1 = append(operation1, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums(operationItem1.ValueString()))
+ }
+ var value9 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value
+ if expressionsItem1.Filter.NumericFilter.Value.Int64Value != nil {
+ value10 := expressionsItem1.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value{
+ Value: value10,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value != nil {
+ value9 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue
+ if expressionsItem1.Filter.NumericFilter.Value.DoubleValue != nil {
+ value11, _ := expressionsItem1.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue{
+ Value: value11,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue != nil {
+ value9 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter{
+ Operation: operation1,
+ Value: value9,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter != nil {
+ filter1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter
+ if expressionsItem1.Filter.BetweenFilter != nil {
+ var fromValue1 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+ if expressionsItem1.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value12 := expressionsItem1.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value{
+ Value: value12,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value != nil {
+ fromValue1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+ if expressionsItem1.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value13, _ := expressionsItem1.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue{
+ Value: value13,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue != nil {
+ fromValue1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue,
+ }
+ }
+ var toValue1 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+ if expressionsItem1.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value14 := expressionsItem1.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value{
+ Value: value14,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value != nil {
+ toValue1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+ if expressionsItem1.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value15, _ := expressionsItem1.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue{
+ Value: value15,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue != nil {
+ toValue1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter{
+ FromValue: fromValue1,
+ ToValue: toValue1,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter != nil {
+ filter1 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter,
+ }
+ }
+ expressions1 = append(expressions1, shared.SourceGoogleAnalyticsDataAPISchemasExpression{
+ FieldName: fieldName1,
+ Filter: filter1,
+ })
+ }
+ sourceGoogleAnalyticsDataAPIOrGroup = &shared.SourceGoogleAnalyticsDataAPIOrGroup{
+ Expressions: expressions1,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIOrGroup != nil {
+ dimensionFilter = &shared.SourceGoogleAnalyticsDataAPIDimensionsFilter{
+ SourceGoogleAnalyticsDataAPIOrGroup: sourceGoogleAnalyticsDataAPIOrGroup,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPINotExpression *shared.SourceGoogleAnalyticsDataAPINotExpression
+ if customReportsArrayItem.DimensionFilter.NotExpression != nil {
+ var expression *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression != nil {
+ fieldName2 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.FieldName.ValueString()
+ var filter2 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter != nil {
+ caseSensitive4 := new(bool)
+ if !customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive4 = customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive4 = nil
+ }
+ var matchType2 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = nil
+ for _, matchTypeItem2 := range customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.MatchType {
+ matchType2 = append(matchType2, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums(matchTypeItem2.ValueString()))
+ }
+ value16 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter{
+ CaseSensitive: caseSensitive4,
+ MatchType: matchType2,
+ Value: value16,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter != nil {
+ filter2 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter != nil {
+ caseSensitive5 := new(bool)
+ if !customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive5 = customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive5 = nil
+ }
+ var values2 []string = nil
+ for _, valuesItem2 := range customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter.Values {
+ values2 = append(values2, valuesItem2.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter{
+ CaseSensitive: caseSensitive5,
+ Values: values2,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter != nil {
+ filter2 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter != nil {
+ var operation2 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = nil
+ for _, operationItem2 := range customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Operation {
+ operation2 = append(operation2, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums(operationItem2.ValueString()))
+ }
+ var value17 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Value.Int64Value != nil {
+ value18 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value{
+ Value: value18,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value != nil {
+ value17 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Value.DoubleValue != nil {
+ value19, _ := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue{
+ Value: value19,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue != nil {
+ value17 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter{
+ Operation: operation2,
+ Value: value17,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter != nil {
+ filter2 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter != nil {
+ var fromValue2 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value20 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value{
+ Value: value20,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value != nil {
+ fromValue2 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value21, _ := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue{
+ Value: value21,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue != nil {
+ fromValue2 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue,
+ }
+ }
+ var toValue2 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value22 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value{
+ Value: value22,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value != nil {
+ toValue2 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value23, _ := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue{
+ Value: value23,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue != nil {
+ toValue2 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter{
+ FromValue: fromValue2,
+ ToValue: toValue2,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter != nil {
+ filter2 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter,
+ }
+ }
+ expression = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression{
+ FieldName: fieldName2,
+ Filter: filter2,
+ }
+ }
+ sourceGoogleAnalyticsDataAPINotExpression = &shared.SourceGoogleAnalyticsDataAPINotExpression{
+ Expression: expression,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPINotExpression != nil {
+ dimensionFilter = &shared.SourceGoogleAnalyticsDataAPIDimensionsFilter{
+ SourceGoogleAnalyticsDataAPINotExpression: sourceGoogleAnalyticsDataAPINotExpression,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIFilter *shared.SourceGoogleAnalyticsDataAPIFilter
+ if customReportsArrayItem.DimensionFilter.Filter != nil {
+ fieldName3 := customReportsArrayItem.DimensionFilter.Filter.FieldName.ValueString()
+ var filter3 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter
+ var sourceGoogleAnalyticsDataAPIStringFilter *shared.SourceGoogleAnalyticsDataAPIStringFilter
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter != nil {
+ caseSensitive6 := new(bool)
+ if !customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive6 = customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive6 = nil
+ }
+ var matchType3 []shared.SourceGoogleAnalyticsDataAPIValidEnums = nil
+ for _, matchTypeItem3 := range customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.MatchType {
+ matchType3 = append(matchType3, shared.SourceGoogleAnalyticsDataAPIValidEnums(matchTypeItem3.ValueString()))
+ }
+ value24 := customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIStringFilter = &shared.SourceGoogleAnalyticsDataAPIStringFilter{
+ CaseSensitive: caseSensitive6,
+ MatchType: matchType3,
+ Value: value24,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIStringFilter != nil {
+ filter3 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIStringFilter: sourceGoogleAnalyticsDataAPIStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIInListFilter *shared.SourceGoogleAnalyticsDataAPIInListFilter
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter != nil {
+ caseSensitive7 := new(bool)
+ if !customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive7 = customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive7 = nil
+ }
+ var values3 []string = nil
+ for _, valuesItem3 := range customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter.Values {
+ values3 = append(values3, valuesItem3.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPIInListFilter = &shared.SourceGoogleAnalyticsDataAPIInListFilter{
+ CaseSensitive: caseSensitive7,
+ Values: values3,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIInListFilter != nil {
+ filter3 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIInListFilter: sourceGoogleAnalyticsDataAPIInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPINumericFilter *shared.SourceGoogleAnalyticsDataAPINumericFilter
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter != nil {
+ var operation3 []shared.SourceGoogleAnalyticsDataAPISchemasValidEnums = nil
+ for _, operationItem3 := range customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Operation {
+ operation3 = append(operation3, shared.SourceGoogleAnalyticsDataAPISchemasValidEnums(operationItem3.ValueString()))
+ }
+ var value25 shared.SourceGoogleAnalyticsDataAPIValue
+ var sourceGoogleAnalyticsDataAPIInt64Value *shared.SourceGoogleAnalyticsDataAPIInt64Value
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Value.Int64Value != nil {
+ value26 := customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIInt64Value = &shared.SourceGoogleAnalyticsDataAPIInt64Value{
+ Value: value26,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIInt64Value != nil {
+ value25 = shared.SourceGoogleAnalyticsDataAPIValue{
+ SourceGoogleAnalyticsDataAPIInt64Value: sourceGoogleAnalyticsDataAPIInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIDoubleValue *shared.SourceGoogleAnalyticsDataAPIDoubleValue
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Value.DoubleValue != nil {
+ value27, _ := customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIDoubleValue = &shared.SourceGoogleAnalyticsDataAPIDoubleValue{
+ Value: value27,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIDoubleValue != nil {
+ value25 = shared.SourceGoogleAnalyticsDataAPIValue{
+ SourceGoogleAnalyticsDataAPIDoubleValue: sourceGoogleAnalyticsDataAPIDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPINumericFilter = &shared.SourceGoogleAnalyticsDataAPINumericFilter{
+ Operation: operation3,
+ Value: value25,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPINumericFilter != nil {
+ filter3 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPINumericFilter: sourceGoogleAnalyticsDataAPINumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIBetweenFilter *shared.SourceGoogleAnalyticsDataAPIBetweenFilter
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter != nil {
+ var fromValue3 shared.SourceGoogleAnalyticsDataAPIFromValue
+ var sourceGoogleAnalyticsDataAPISchemasInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasInt64Value
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value28 := customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasInt64Value{
+ Value: value28,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasInt64Value != nil {
+ fromValue3 = shared.SourceGoogleAnalyticsDataAPIFromValue{
+ SourceGoogleAnalyticsDataAPISchemasInt64Value: sourceGoogleAnalyticsDataAPISchemasInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasDoubleValue
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value29, _ := customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasDoubleValue{
+ Value: value29,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasDoubleValue != nil {
+ fromValue3 = shared.SourceGoogleAnalyticsDataAPIFromValue{
+ SourceGoogleAnalyticsDataAPISchemasDoubleValue: sourceGoogleAnalyticsDataAPISchemasDoubleValue,
+ }
+ }
+ var toValue3 shared.SourceGoogleAnalyticsDataAPIToValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value30 := customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value{
+ Value: value30,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value != nil {
+ toValue3 = shared.SourceGoogleAnalyticsDataAPIToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value31, _ := customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue{
+ Value: value31,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue != nil {
+ toValue3 = shared.SourceGoogleAnalyticsDataAPIToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIBetweenFilter = &shared.SourceGoogleAnalyticsDataAPIBetweenFilter{
+ FromValue: fromValue3,
+ ToValue: toValue3,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIBetweenFilter != nil {
+ filter3 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIBetweenFilter: sourceGoogleAnalyticsDataAPIBetweenFilter,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIFilter = &shared.SourceGoogleAnalyticsDataAPIFilter{
+ FieldName: fieldName3,
+ Filter: filter3,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIFilter != nil {
+ dimensionFilter = &shared.SourceGoogleAnalyticsDataAPIDimensionsFilter{
+ SourceGoogleAnalyticsDataAPIFilter: sourceGoogleAnalyticsDataAPIFilter,
+ }
+ }
+ }
+ var dimensions []string = nil
+ for _, dimensionsItem := range customReportsArrayItem.Dimensions {
+ dimensions = append(dimensions, dimensionsItem.ValueString())
+ }
+ var metricFilter *shared.SourceGoogleAnalyticsDataAPIMetricsFilter
+ if customReportsArrayItem.MetricFilter != nil {
+ var sourceGoogleAnalyticsDataAPISchemasAndGroup *shared.SourceGoogleAnalyticsDataAPISchemasAndGroup
+ if customReportsArrayItem.MetricFilter.AndGroup != nil {
+ var expressions2 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression = nil
+ for _, expressionsItem2 := range customReportsArrayItem.MetricFilter.AndGroup.Expressions {
+ fieldName4 := expressionsItem2.FieldName.ValueString()
+ var filter4 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter
+ if expressionsItem2.Filter.StringFilter != nil {
+ caseSensitive8 := new(bool)
+ if !expressionsItem2.Filter.StringFilter.CaseSensitive.IsUnknown() && !expressionsItem2.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive8 = expressionsItem2.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive8 = nil
+ }
+ var matchType4 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = nil
+ for _, matchTypeItem4 := range expressionsItem2.Filter.StringFilter.MatchType {
+ matchType4 = append(matchType4, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums(matchTypeItem4.ValueString()))
+ }
+ value32 := expressionsItem2.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter{
+ CaseSensitive: caseSensitive8,
+ MatchType: matchType4,
+ Value: value32,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter != nil {
+ filter4 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter
+ if expressionsItem2.Filter.InListFilter != nil {
+ caseSensitive9 := new(bool)
+ if !expressionsItem2.Filter.InListFilter.CaseSensitive.IsUnknown() && !expressionsItem2.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive9 = expressionsItem2.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive9 = nil
+ }
+ var values4 []string = nil
+ for _, valuesItem4 := range expressionsItem2.Filter.InListFilter.Values {
+ values4 = append(values4, valuesItem4.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter{
+ CaseSensitive: caseSensitive9,
+ Values: values4,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter != nil {
+ filter4 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter
+ if expressionsItem2.Filter.NumericFilter != nil {
+ var operation4 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = nil
+ for _, operationItem4 := range expressionsItem2.Filter.NumericFilter.Operation {
+ operation4 = append(operation4, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums(operationItem4.ValueString()))
+ }
+ var value33 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+ if expressionsItem2.Filter.NumericFilter.Value.Int64Value != nil {
+ value34 := expressionsItem2.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value{
+ Value: value34,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value != nil {
+ value33 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+ if expressionsItem2.Filter.NumericFilter.Value.DoubleValue != nil {
+ value35, _ := expressionsItem2.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue{
+ Value: value35,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue != nil {
+ value33 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter{
+ Operation: operation4,
+ Value: value33,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter != nil {
+ filter4 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter
+ if expressionsItem2.Filter.BetweenFilter != nil {
+ var fromValue4 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+ if expressionsItem2.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value36 := expressionsItem2.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value{
+ Value: value36,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value != nil {
+ fromValue4 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+ if expressionsItem2.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value37, _ := expressionsItem2.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue{
+ Value: value37,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue != nil {
+ fromValue4 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue,
+ }
+ }
+ var toValue4 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+ if expressionsItem2.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value38 := expressionsItem2.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value{
+ Value: value38,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value != nil {
+ toValue4 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+ if expressionsItem2.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value39, _ := expressionsItem2.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue{
+ Value: value39,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue != nil {
+ toValue4 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter{
+ FromValue: fromValue4,
+ ToValue: toValue4,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter != nil {
+ filter4 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter,
+ }
+ }
+ expressions2 = append(expressions2, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression{
+ FieldName: fieldName4,
+ Filter: filter4,
+ })
+ }
+ sourceGoogleAnalyticsDataAPISchemasAndGroup = &shared.SourceGoogleAnalyticsDataAPISchemasAndGroup{
+ Expressions: expressions2,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasAndGroup != nil {
+ metricFilter = &shared.SourceGoogleAnalyticsDataAPIMetricsFilter{
+ SourceGoogleAnalyticsDataAPISchemasAndGroup: sourceGoogleAnalyticsDataAPISchemasAndGroup,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasOrGroup *shared.SourceGoogleAnalyticsDataAPISchemasOrGroup
+ if customReportsArrayItem.MetricFilter.OrGroup != nil {
+ var expressions3 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression = nil
+ for _, expressionsItem3 := range customReportsArrayItem.MetricFilter.OrGroup.Expressions {
+ fieldName5 := expressionsItem3.FieldName.ValueString()
+ var filter5 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+ if expressionsItem3.Filter.StringFilter != nil {
+ caseSensitive10 := new(bool)
+ if !expressionsItem3.Filter.StringFilter.CaseSensitive.IsUnknown() && !expressionsItem3.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive10 = expressionsItem3.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive10 = nil
+ }
+ var matchType5 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = nil
+ for _, matchTypeItem5 := range expressionsItem3.Filter.StringFilter.MatchType {
+ matchType5 = append(matchType5, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums(matchTypeItem5.ValueString()))
+ }
+ value40 := expressionsItem3.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter{
+ CaseSensitive: caseSensitive10,
+ MatchType: matchType5,
+ Value: value40,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter != nil {
+ filter5 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+ if expressionsItem3.Filter.InListFilter != nil {
+ caseSensitive11 := new(bool)
+ if !expressionsItem3.Filter.InListFilter.CaseSensitive.IsUnknown() && !expressionsItem3.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive11 = expressionsItem3.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive11 = nil
+ }
+ var values5 []string = nil
+ for _, valuesItem5 := range expressionsItem3.Filter.InListFilter.Values {
+ values5 = append(values5, valuesItem5.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter{
+ CaseSensitive: caseSensitive11,
+ Values: values5,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter != nil {
+ filter5 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+ if expressionsItem3.Filter.NumericFilter != nil {
+ var operation5 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = nil
+ for _, operationItem5 := range expressionsItem3.Filter.NumericFilter.Operation {
+ operation5 = append(operation5, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums(operationItem5.ValueString()))
+ }
+ var value41 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+ if expressionsItem3.Filter.NumericFilter.Value.Int64Value != nil {
+ value42 := expressionsItem3.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value{
+ Value: value42,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value != nil {
+ value41 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+ if expressionsItem3.Filter.NumericFilter.Value.DoubleValue != nil {
+ value43, _ := expressionsItem3.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue{
+ Value: value43,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue != nil {
+ value41 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter{
+ Operation: operation5,
+ Value: value41,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter != nil {
+ filter5 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+ if expressionsItem3.Filter.BetweenFilter != nil {
+ var fromValue5 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+ if expressionsItem3.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value44 := expressionsItem3.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value{
+ Value: value44,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value != nil {
+ fromValue5 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+ if expressionsItem3.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value45, _ := expressionsItem3.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue{
+ Value: value45,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue != nil {
+ fromValue5 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue,
+ }
+ }
+ var toValue5 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+ if expressionsItem3.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value46 := expressionsItem3.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value{
+ Value: value46,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value != nil {
+ toValue5 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+ if expressionsItem3.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value47, _ := expressionsItem3.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue{
+ Value: value47,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue != nil {
+ toValue5 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter{
+ FromValue: fromValue5,
+ ToValue: toValue5,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter != nil {
+ filter5 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter,
+ }
+ }
+ expressions3 = append(expressions3, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression{
+ FieldName: fieldName5,
+ Filter: filter5,
+ })
+ }
+ sourceGoogleAnalyticsDataAPISchemasOrGroup = &shared.SourceGoogleAnalyticsDataAPISchemasOrGroup{
+ Expressions: expressions3,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasOrGroup != nil {
+ metricFilter = &shared.SourceGoogleAnalyticsDataAPIMetricsFilter{
+ SourceGoogleAnalyticsDataAPISchemasOrGroup: sourceGoogleAnalyticsDataAPISchemasOrGroup,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasNotExpression *shared.SourceGoogleAnalyticsDataAPISchemasNotExpression
+ if customReportsArrayItem.MetricFilter.NotExpression != nil {
+ var expression1 *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Expression
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression != nil {
+ fieldName6 := customReportsArrayItem.MetricFilter.NotExpression.Expression.FieldName.ValueString()
+ var filter6 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter != nil {
+ caseSensitive12 := new(bool)
+ if !customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive12 = customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive12 = nil
+ }
+ var matchType6 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = nil
+ for _, matchTypeItem6 := range customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.MatchType {
+ matchType6 = append(matchType6, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums(matchTypeItem6.ValueString()))
+ }
+ value48 := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter{
+ CaseSensitive: caseSensitive12,
+ MatchType: matchType6,
+ Value: value48,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter != nil {
+ filter6 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter != nil {
+ caseSensitive13 := new(bool)
+ if !customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive13 = customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive13 = nil
+ }
+ var values6 []string = nil
+ for _, valuesItem6 := range customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter.Values {
+ values6 = append(values6, valuesItem6.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter{
+ CaseSensitive: caseSensitive13,
+ Values: values6,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter != nil {
+ filter6 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter != nil {
+ var operation6 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = nil
+ for _, operationItem6 := range customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Operation {
+ operation6 = append(operation6, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums(operationItem6.ValueString()))
+ }
+ var value49 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Value.Int64Value != nil {
+ value50 := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value{
+ Value: value50,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value != nil {
+ value49 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Value.DoubleValue != nil {
+ value51, _ := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue{
+ Value: value51,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue != nil {
+ value49 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter{
+ Operation: operation6,
+ Value: value49,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter != nil {
+ filter6 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter != nil {
+ var fromValue6 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value52 := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value{
+ Value: value52,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value != nil {
+ fromValue6 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value53, _ := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue{
+ Value: value53,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue != nil {
+ fromValue6 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue,
+ }
+ }
+ var toValue6 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value54 := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value{
+ Value: value54,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value != nil {
+ toValue6 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value55, _ := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue{
+ Value: value55,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue != nil {
+ toValue6 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter{
+ FromValue: fromValue6,
+ ToValue: toValue6,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter != nil {
+ filter6 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter,
+ }
+ }
+ expression1 = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Expression{
+ FieldName: fieldName6,
+ Filter: filter6,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasNotExpression = &shared.SourceGoogleAnalyticsDataAPISchemasNotExpression{
+ Expression: expression1,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasNotExpression != nil {
+ metricFilter = &shared.SourceGoogleAnalyticsDataAPIMetricsFilter{
+ SourceGoogleAnalyticsDataAPISchemasNotExpression: sourceGoogleAnalyticsDataAPISchemasNotExpression,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasFilter *shared.SourceGoogleAnalyticsDataAPISchemasFilter
+ if customReportsArrayItem.MetricFilter.Filter != nil {
+ fieldName7 := customReportsArrayItem.MetricFilter.Filter.FieldName.ValueString()
+ var filter7 shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter
+ var sourceGoogleAnalyticsDataAPISchemasStringFilter *shared.SourceGoogleAnalyticsDataAPISchemasStringFilter
+ if customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter != nil {
+ caseSensitive14 := new(bool)
+ if !customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive14 = customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive14 = nil
+ }
+ var matchType7 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums = nil
+ for _, matchTypeItem7 := range customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.MatchType {
+ matchType7 = append(matchType7, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums(matchTypeItem7.ValueString()))
+ }
+ value56 := customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasStringFilter = &shared.SourceGoogleAnalyticsDataAPISchemasStringFilter{
+ CaseSensitive: caseSensitive14,
+ MatchType: matchType7,
+ Value: value56,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasStringFilter != nil {
+ filter7 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasStringFilter: sourceGoogleAnalyticsDataAPISchemasStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasInListFilter *shared.SourceGoogleAnalyticsDataAPISchemasInListFilter
+ if customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter != nil {
+ caseSensitive15 := new(bool)
+ if !customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive15 = customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive15 = nil
+ }
+ var values7 []string = nil
+ for _, valuesItem7 := range customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter.Values {
+ values7 = append(values7, valuesItem7.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPISchemasInListFilter = &shared.SourceGoogleAnalyticsDataAPISchemasInListFilter{
+ CaseSensitive: caseSensitive15,
+ Values: values7,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasInListFilter != nil {
+ filter7 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasInListFilter: sourceGoogleAnalyticsDataAPISchemasInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasNumericFilter *shared.SourceGoogleAnalyticsDataAPISchemasNumericFilter
+ if customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter != nil {
+ var operation7 []shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums = nil
+ for _, operationItem7 := range customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Operation {
+ operation7 = append(operation7, shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums(operationItem7.ValueString()))
+ }
+ var value57 shared.SourceGoogleAnalyticsDataAPISchemasValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value
+ if customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Value.Int64Value != nil {
+ value58 := customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value{
+ Value: value58,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value != nil {
+ value57 = shared.SourceGoogleAnalyticsDataAPISchemasValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue
+ if customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Value.DoubleValue != nil {
+ value59, _ := customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue{
+ Value: value59,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue != nil {
+ value57 = shared.SourceGoogleAnalyticsDataAPISchemasValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasNumericFilter = &shared.SourceGoogleAnalyticsDataAPISchemasNumericFilter{
+ Operation: operation7,
+ Value: value57,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasNumericFilter != nil {
+ filter7 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasNumericFilter: sourceGoogleAnalyticsDataAPISchemasNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasBetweenFilter *shared.SourceGoogleAnalyticsDataAPISchemasBetweenFilter
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter != nil {
+ var fromValue7 shared.SourceGoogleAnalyticsDataAPISchemasFromValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value60 := customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value{
+ Value: value60,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value != nil {
+ fromValue7 = shared.SourceGoogleAnalyticsDataAPISchemasFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value61, _ := customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue{
+ Value: value61,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue != nil {
+ fromValue7 = shared.SourceGoogleAnalyticsDataAPISchemasFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue,
+ }
+ }
+ var toValue7 shared.SourceGoogleAnalyticsDataAPISchemasToValue
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value62 := customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value{
+ Value: value62,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value != nil {
+ toValue7 = shared.SourceGoogleAnalyticsDataAPISchemasToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue *shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value63, _ := customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue = &shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue{
+ Value: value63,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue != nil {
+ toValue7 = shared.SourceGoogleAnalyticsDataAPISchemasToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue: sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasBetweenFilter = &shared.SourceGoogleAnalyticsDataAPISchemasBetweenFilter{
+ FromValue: fromValue7,
+ ToValue: toValue7,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasBetweenFilter != nil {
+ filter7 = shared.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasBetweenFilter: sourceGoogleAnalyticsDataAPISchemasBetweenFilter,
+ }
+ }
+ sourceGoogleAnalyticsDataAPISchemasFilter = &shared.SourceGoogleAnalyticsDataAPISchemasFilter{
+ FieldName: fieldName7,
+ Filter: filter7,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPISchemasFilter != nil {
+ metricFilter = &shared.SourceGoogleAnalyticsDataAPIMetricsFilter{
+ SourceGoogleAnalyticsDataAPISchemasFilter: sourceGoogleAnalyticsDataAPISchemasFilter,
+ }
+ }
+ }
+ var metrics []string = nil
+ for _, metricsItem := range customReportsArrayItem.Metrics {
+ metrics = append(metrics, metricsItem.ValueString())
+ }
+ name := customReportsArrayItem.Name.ValueString()
+ customReportsArray = append(customReportsArray, shared.SourceGoogleAnalyticsDataAPICustomReportConfig{
+ DimensionFilter: dimensionFilter,
+ Dimensions: dimensions,
+ MetricFilter: metricFilter,
+ Metrics: metrics,
+ Name: name,
+ })
+ }
+ dateRangesStartDate := new(customTypes.Date)
+ if !r.Configuration.DateRangesStartDate.IsUnknown() && !r.Configuration.DateRangesStartDate.IsNull() {
+ dateRangesStartDate = customTypes.MustNewDateFromString(r.Configuration.DateRangesStartDate.ValueString())
} else {
- customReports = nil
+ dateRangesStartDate = nil
+ }
+ var propertyIds []string = nil
+ for _, propertyIdsItem := range r.Configuration.PropertyIds {
+ propertyIds = append(propertyIds, propertyIdsItem.ValueString())
}
- dateRangesStartDate := customTypes.MustDateFromString(r.Configuration.DateRangesStartDate.ValueString())
- propertyID := r.Configuration.PropertyID.ValueString()
- sourceType := shared.SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI(r.Configuration.SourceType.ValueString())
windowInDays := new(int64)
if !r.Configuration.WindowInDays.IsUnknown() && !r.Configuration.WindowInDays.IsNull() {
*windowInDays = r.Configuration.WindowInDays.ValueInt64()
@@ -78,13 +1430,18 @@ func (r *SourceGoogleAnalyticsDataAPIResourceModel) ToCreateSDKType() *shared.So
}
configuration := shared.SourceGoogleAnalyticsDataAPI{
Credentials: credentials,
- CustomReports: customReports,
+ CustomReportsArray: customReportsArray,
DateRangesStartDate: dateRangesStartDate,
- PropertyID: propertyID,
- SourceType: sourceType,
+ PropertyIds: propertyIds,
WindowInDays: windowInDays,
}
- name := r.Name.ValueString()
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name1 := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
*secretID = r.SecretID.ValueString()
@@ -94,7 +1451,8 @@ func (r *SourceGoogleAnalyticsDataAPIResourceModel) ToCreateSDKType() *shared.So
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGoogleAnalyticsDataAPICreateRequest{
Configuration: configuration,
- Name: name,
+ DefinitionID: definitionID,
+ Name: name1,
SecretID: secretID,
WorkspaceID: workspaceID,
}
@@ -107,66 +1465,1419 @@ func (r *SourceGoogleAnalyticsDataAPIResourceModel) ToGetSDKType() *shared.Sourc
}
func (r *SourceGoogleAnalyticsDataAPIResourceModel) ToUpdateSDKType() *shared.SourceGoogleAnalyticsDataAPIPutRequest {
- var credentials *shared.SourceGoogleAnalyticsDataAPIUpdateCredentials
+ var credentials *shared.Credentials
if r.Configuration.Credentials != nil {
- var sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth *shared.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth
- if r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth != nil {
+ var authenticateViaGoogleOauth *shared.AuthenticateViaGoogleOauth
+ if r.Configuration.Credentials.AuthenticateViaGoogleOauth != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth.AccessToken.ValueString()
+ if !r.Configuration.Credentials.AuthenticateViaGoogleOauth.AccessToken.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaGoogleOauth.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.AuthenticateViaGoogleOauth.AccessToken.ValueString()
} else {
accessToken = nil
}
- authType := new(shared.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth.AuthType.IsNull() {
- *authType = shared.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType(r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth.RefreshToken.ValueString()
- sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth = &shared.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth{
+ clientID := r.Configuration.Credentials.AuthenticateViaGoogleOauth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaGoogleOauth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaGoogleOauth.RefreshToken.ValueString()
+ authenticateViaGoogleOauth = &shared.AuthenticateViaGoogleOauth{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth != nil {
- credentials = &shared.SourceGoogleAnalyticsDataAPIUpdateCredentials{
- SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth: sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth,
+ if authenticateViaGoogleOauth != nil {
+ credentials = &shared.Credentials{
+ AuthenticateViaGoogleOauth: authenticateViaGoogleOauth,
}
}
- var sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication *shared.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication
- if r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication != nil {
- authType1 := new(shared.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication.AuthType.IsNull() {
- *authType1 = shared.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType(r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- credentialsJSON := r.Configuration.Credentials.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication.CredentialsJSON.ValueString()
- sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication = &shared.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication{
- AuthType: authType1,
+ var serviceAccountKeyAuthentication *shared.ServiceAccountKeyAuthentication
+ if r.Configuration.Credentials.ServiceAccountKeyAuthentication != nil {
+ credentialsJSON := r.Configuration.Credentials.ServiceAccountKeyAuthentication.CredentialsJSON.ValueString()
+ serviceAccountKeyAuthentication = &shared.ServiceAccountKeyAuthentication{
CredentialsJSON: credentialsJSON,
}
}
- if sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication != nil {
- credentials = &shared.SourceGoogleAnalyticsDataAPIUpdateCredentials{
- SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication: sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication,
+ if serviceAccountKeyAuthentication != nil {
+ credentials = &shared.Credentials{
+ ServiceAccountKeyAuthentication: serviceAccountKeyAuthentication,
}
}
}
- customReports := new(string)
- if !r.Configuration.CustomReports.IsUnknown() && !r.Configuration.CustomReports.IsNull() {
- *customReports = r.Configuration.CustomReports.ValueString()
+ var customReportsArray []shared.SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig = nil
+ for _, customReportsArrayItem := range r.Configuration.CustomReportsArray {
+ var dimensionFilter *shared.DimensionsFilter
+ if customReportsArrayItem.DimensionFilter != nil {
+ var andGroup *shared.AndGroup
+ if customReportsArrayItem.DimensionFilter.AndGroup != nil {
+ var expressions []shared.Expression = nil
+ for _, expressionsItem := range customReportsArrayItem.DimensionFilter.AndGroup.Expressions {
+ fieldName := expressionsItem.FieldName.ValueString()
+ var filter shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter
+ if expressionsItem.Filter.StringFilter != nil {
+ caseSensitive := new(bool)
+ if !expressionsItem.Filter.StringFilter.CaseSensitive.IsUnknown() && !expressionsItem.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive = expressionsItem.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive = nil
+ }
+ var matchType []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = nil
+ for _, matchTypeItem := range expressionsItem.Filter.StringFilter.MatchType {
+ matchType = append(matchType, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums(matchTypeItem.ValueString()))
+ }
+ value := expressionsItem.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter{
+ CaseSensitive: caseSensitive,
+ MatchType: matchType,
+ Value: value,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter != nil {
+ filter = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter
+ if expressionsItem.Filter.InListFilter != nil {
+ caseSensitive1 := new(bool)
+ if !expressionsItem.Filter.InListFilter.CaseSensitive.IsUnknown() && !expressionsItem.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive1 = expressionsItem.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive1 = nil
+ }
+ var values []string = nil
+ for _, valuesItem := range expressionsItem.Filter.InListFilter.Values {
+ values = append(values, valuesItem.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter{
+ CaseSensitive: caseSensitive1,
+ Values: values,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter != nil {
+ filter = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter
+ if expressionsItem.Filter.NumericFilter != nil {
+ var operation []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = nil
+ for _, operationItem := range expressionsItem.Filter.NumericFilter.Operation {
+ operation = append(operation, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums(operationItem.ValueString()))
+ }
+ var value1 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+ if expressionsItem.Filter.NumericFilter.Value.Int64Value != nil {
+ value2 := expressionsItem.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value{
+ Value: value2,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value != nil {
+ value1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+ if expressionsItem.Filter.NumericFilter.Value.DoubleValue != nil {
+ value3, _ := expressionsItem.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue{
+ Value: value3,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue != nil {
+ value1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter{
+ Operation: operation,
+ Value: value1,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter != nil {
+ filter = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter
+ if expressionsItem.Filter.BetweenFilter != nil {
+ var fromValue shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+ if expressionsItem.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value4 := expressionsItem.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value{
+ Value: value4,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value != nil {
+ fromValue = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+ if expressionsItem.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value5, _ := expressionsItem.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue{
+ Value: value5,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue != nil {
+ fromValue = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue,
+ }
+ }
+ var toValue shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+ if expressionsItem.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value6 := expressionsItem.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value{
+ Value: value6,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value != nil {
+ toValue = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+ if expressionsItem.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value7, _ := expressionsItem.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue{
+ Value: value7,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue != nil {
+ toValue = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter{
+ FromValue: fromValue,
+ ToValue: toValue,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter != nil {
+ filter = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter,
+ }
+ }
+ expressions = append(expressions, shared.Expression{
+ FieldName: fieldName,
+ Filter: filter,
+ })
+ }
+ andGroup = &shared.AndGroup{
+ Expressions: expressions,
+ }
+ }
+ if andGroup != nil {
+ dimensionFilter = &shared.DimensionsFilter{
+ AndGroup: andGroup,
+ }
+ }
+ var orGroup *shared.OrGroup
+ if customReportsArrayItem.DimensionFilter.OrGroup != nil {
+ var expressions1 []shared.SourceGoogleAnalyticsDataAPIUpdateExpression = nil
+ for _, expressionsItem1 := range customReportsArrayItem.DimensionFilter.OrGroup.Expressions {
+ fieldName1 := expressionsItem1.FieldName.ValueString()
+ var filter1 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter
+ if expressionsItem1.Filter.StringFilter != nil {
+ caseSensitive2 := new(bool)
+ if !expressionsItem1.Filter.StringFilter.CaseSensitive.IsUnknown() && !expressionsItem1.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive2 = expressionsItem1.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive2 = nil
+ }
+ var matchType1 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = nil
+ for _, matchTypeItem1 := range expressionsItem1.Filter.StringFilter.MatchType {
+ matchType1 = append(matchType1, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums(matchTypeItem1.ValueString()))
+ }
+ value8 := expressionsItem1.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter{
+ CaseSensitive: caseSensitive2,
+ MatchType: matchType1,
+ Value: value8,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter != nil {
+ filter1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter
+ if expressionsItem1.Filter.InListFilter != nil {
+ caseSensitive3 := new(bool)
+ if !expressionsItem1.Filter.InListFilter.CaseSensitive.IsUnknown() && !expressionsItem1.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive3 = expressionsItem1.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive3 = nil
+ }
+ var values1 []string = nil
+ for _, valuesItem1 := range expressionsItem1.Filter.InListFilter.Values {
+ values1 = append(values1, valuesItem1.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter{
+ CaseSensitive: caseSensitive3,
+ Values: values1,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter != nil {
+ filter1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter
+ if expressionsItem1.Filter.NumericFilter != nil {
+ var operation1 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums = nil
+ for _, operationItem1 := range expressionsItem1.Filter.NumericFilter.Operation {
+ operation1 = append(operation1, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums(operationItem1.ValueString()))
+ }
+ var value9 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+ if expressionsItem1.Filter.NumericFilter.Value.Int64Value != nil {
+ value10 := expressionsItem1.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value{
+ Value: value10,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value != nil {
+ value9 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+ if expressionsItem1.Filter.NumericFilter.Value.DoubleValue != nil {
+ value11, _ := expressionsItem1.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue{
+ Value: value11,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue != nil {
+ value9 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter{
+ Operation: operation1,
+ Value: value9,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter != nil {
+ filter1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter
+ if expressionsItem1.Filter.BetweenFilter != nil {
+ var fromValue1 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value
+ if expressionsItem1.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value12 := expressionsItem1.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value{
+ Value: value12,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value != nil {
+ fromValue1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue
+ if expressionsItem1.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value13, _ := expressionsItem1.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue{
+ Value: value13,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue != nil {
+ fromValue1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue,
+ }
+ }
+ var toValue1 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+ if expressionsItem1.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value14 := expressionsItem1.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value{
+ Value: value14,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value != nil {
+ toValue1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+ if expressionsItem1.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value15, _ := expressionsItem1.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue{
+ Value: value15,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue != nil {
+ toValue1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter{
+ FromValue: fromValue1,
+ ToValue: toValue1,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter != nil {
+ filter1 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter,
+ }
+ }
+ expressions1 = append(expressions1, shared.SourceGoogleAnalyticsDataAPIUpdateExpression{
+ FieldName: fieldName1,
+ Filter: filter1,
+ })
+ }
+ orGroup = &shared.OrGroup{
+ Expressions: expressions1,
+ }
+ }
+ if orGroup != nil {
+ dimensionFilter = &shared.DimensionsFilter{
+ OrGroup: orGroup,
+ }
+ }
+ var notExpression *shared.NotExpression
+ if customReportsArrayItem.DimensionFilter.NotExpression != nil {
+ var expression *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasExpression
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression != nil {
+ fieldName2 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.FieldName.ValueString()
+ var filter2 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter != nil {
+ caseSensitive4 := new(bool)
+ if !customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive4 = customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive4 = nil
+ }
+ var matchType2 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = nil
+ for _, matchTypeItem2 := range customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.MatchType {
+ matchType2 = append(matchType2, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums(matchTypeItem2.ValueString()))
+ }
+ value16 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter{
+ CaseSensitive: caseSensitive4,
+ MatchType: matchType2,
+ Value: value16,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter != nil {
+ filter2 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter != nil {
+ caseSensitive5 := new(bool)
+ if !customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive5 = customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive5 = nil
+ }
+ var values2 []string = nil
+ for _, valuesItem2 := range customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.InListFilter.Values {
+ values2 = append(values2, valuesItem2.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter{
+ CaseSensitive: caseSensitive5,
+ Values: values2,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter != nil {
+ filter2 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter != nil {
+ var operation2 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = nil
+ for _, operationItem2 := range customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Operation {
+ operation2 = append(operation2, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums(operationItem2.ValueString()))
+ }
+ var value17 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Value.Int64Value != nil {
+ value18 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value{
+ Value: value18,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value != nil {
+ value17 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Value.DoubleValue != nil {
+ value19, _ := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue{
+ Value: value19,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue != nil {
+ value17 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter{
+ Operation: operation2,
+ Value: value17,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter != nil {
+ filter2 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter != nil {
+ var fromValue2 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value20 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value{
+ Value: value20,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value != nil {
+ fromValue2 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value21, _ := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue{
+ Value: value21,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue != nil {
+ fromValue2 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue,
+ }
+ }
+ var toValue2 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasToValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value22 := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value{
+ Value: value22,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value != nil {
+ toValue2 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+ if customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value23, _ := customReportsArrayItem.DimensionFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue{
+ Value: value23,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue != nil {
+ toValue2 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter{
+ FromValue: fromValue2,
+ ToValue: toValue2,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter != nil {
+ filter2 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter,
+ }
+ }
+ expression = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasExpression{
+ FieldName: fieldName2,
+ Filter: filter2,
+ }
+ }
+ notExpression = &shared.NotExpression{
+ Expression: expression,
+ }
+ }
+ if notExpression != nil {
+ dimensionFilter = &shared.DimensionsFilter{
+ NotExpression: notExpression,
+ }
+ }
+ var filter3 *shared.Filter
+ if customReportsArrayItem.DimensionFilter.Filter != nil {
+ fieldName3 := customReportsArrayItem.DimensionFilter.Filter.FieldName.ValueString()
+ var filter4 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasFilter
+ var stringFilter *shared.StringFilter
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter != nil {
+ caseSensitive6 := new(bool)
+ if !customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive6 = customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive6 = nil
+ }
+ var matchType3 []shared.SourceGoogleAnalyticsDataAPIUpdateValidEnums = nil
+ for _, matchTypeItem3 := range customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.MatchType {
+ matchType3 = append(matchType3, shared.SourceGoogleAnalyticsDataAPIUpdateValidEnums(matchTypeItem3.ValueString()))
+ }
+ value24 := customReportsArrayItem.DimensionFilter.Filter.Filter.StringFilter.Value.ValueString()
+ stringFilter = &shared.StringFilter{
+ CaseSensitive: caseSensitive6,
+ MatchType: matchType3,
+ Value: value24,
+ }
+ }
+ if stringFilter != nil {
+ filter4 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasFilter{
+ StringFilter: stringFilter,
+ }
+ }
+ var inListFilter *shared.InListFilter
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter != nil {
+ caseSensitive7 := new(bool)
+ if !customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive7 = customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive7 = nil
+ }
+ var values3 []string = nil
+ for _, valuesItem3 := range customReportsArrayItem.DimensionFilter.Filter.Filter.InListFilter.Values {
+ values3 = append(values3, valuesItem3.ValueString())
+ }
+ inListFilter = &shared.InListFilter{
+ CaseSensitive: caseSensitive7,
+ Values: values3,
+ }
+ }
+ if inListFilter != nil {
+ filter4 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasFilter{
+ InListFilter: inListFilter,
+ }
+ }
+ var numericFilter *shared.NumericFilter
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter != nil {
+ var operation3 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums = nil
+ for _, operationItem3 := range customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Operation {
+ operation3 = append(operation3, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums(operationItem3.ValueString()))
+ }
+ var value25 shared.Value
+ var int64Value *shared.Int64Value
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Value.Int64Value != nil {
+ value26 := customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ int64Value = &shared.Int64Value{
+ Value: value26,
+ }
+ }
+ if int64Value != nil {
+ value25 = shared.Value{
+ Int64Value: int64Value,
+ }
+ }
+ var doubleValue *shared.DoubleValue
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Value.DoubleValue != nil {
+ value27, _ := customReportsArrayItem.DimensionFilter.Filter.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ doubleValue = &shared.DoubleValue{
+ Value: value27,
+ }
+ }
+ if doubleValue != nil {
+ value25 = shared.Value{
+ DoubleValue: doubleValue,
+ }
+ }
+ numericFilter = &shared.NumericFilter{
+ Operation: operation3,
+ Value: value25,
+ }
+ }
+ if numericFilter != nil {
+ filter4 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasFilter{
+ NumericFilter: numericFilter,
+ }
+ }
+ var betweenFilter *shared.BetweenFilter
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter != nil {
+ var fromValue3 shared.FromValue
+ var sourceGoogleAnalyticsDataAPIUpdateInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateInt64Value
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value28 := customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateInt64Value{
+ Value: value28,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateInt64Value != nil {
+ fromValue3 = shared.FromValue{
+ SourceGoogleAnalyticsDataAPIUpdateInt64Value: sourceGoogleAnalyticsDataAPIUpdateInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateDoubleValue
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value29, _ := customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateDoubleValue{
+ Value: value29,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateDoubleValue != nil {
+ fromValue3 = shared.FromValue{
+ SourceGoogleAnalyticsDataAPIUpdateDoubleValue: sourceGoogleAnalyticsDataAPIUpdateDoubleValue,
+ }
+ }
+ var toValue3 shared.ToValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value30 := customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value{
+ Value: value30,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value != nil {
+ toValue3 = shared.ToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue
+ if customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value31, _ := customReportsArrayItem.DimensionFilter.Filter.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue{
+ Value: value31,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue != nil {
+ toValue3 = shared.ToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue,
+ }
+ }
+ betweenFilter = &shared.BetweenFilter{
+ FromValue: fromValue3,
+ ToValue: toValue3,
+ }
+ }
+ if betweenFilter != nil {
+ filter4 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasFilter{
+ BetweenFilter: betweenFilter,
+ }
+ }
+ filter3 = &shared.Filter{
+ FieldName: fieldName3,
+ Filter: filter4,
+ }
+ }
+ if filter3 != nil {
+ dimensionFilter = &shared.DimensionsFilter{
+ Filter: filter3,
+ }
+ }
+ }
+ var dimensions []string = nil
+ for _, dimensionsItem := range customReportsArrayItem.Dimensions {
+ dimensions = append(dimensions, dimensionsItem.ValueString())
+ }
+ var metricFilter *shared.MetricsFilter
+ if customReportsArrayItem.MetricFilter != nil {
+ var sourceGoogleAnalyticsDataAPIUpdateAndGroup *shared.SourceGoogleAnalyticsDataAPIUpdateAndGroup
+ if customReportsArrayItem.MetricFilter.AndGroup != nil {
+ var expressions2 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayExpression = nil
+ for _, expressionsItem2 := range customReportsArrayItem.MetricFilter.AndGroup.Expressions {
+ fieldName4 := expressionsItem2.FieldName.ValueString()
+ var filter5 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter
+ if expressionsItem2.Filter.StringFilter != nil {
+ caseSensitive8 := new(bool)
+ if !expressionsItem2.Filter.StringFilter.CaseSensitive.IsUnknown() && !expressionsItem2.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive8 = expressionsItem2.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive8 = nil
+ }
+ var matchType4 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = nil
+ for _, matchTypeItem4 := range expressionsItem2.Filter.StringFilter.MatchType {
+ matchType4 = append(matchType4, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums(matchTypeItem4.ValueString()))
+ }
+ value32 := expressionsItem2.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter{
+ CaseSensitive: caseSensitive8,
+ MatchType: matchType4,
+ Value: value32,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter != nil {
+ filter5 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter
+ if expressionsItem2.Filter.InListFilter != nil {
+ caseSensitive9 := new(bool)
+ if !expressionsItem2.Filter.InListFilter.CaseSensitive.IsUnknown() && !expressionsItem2.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive9 = expressionsItem2.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive9 = nil
+ }
+ var values4 []string = nil
+ for _, valuesItem4 := range expressionsItem2.Filter.InListFilter.Values {
+ values4 = append(values4, valuesItem4.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter{
+ CaseSensitive: caseSensitive9,
+ Values: values4,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter != nil {
+ filter5 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter
+ if expressionsItem2.Filter.NumericFilter != nil {
+ var operation4 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = nil
+ for _, operationItem4 := range expressionsItem2.Filter.NumericFilter.Operation {
+ operation4 = append(operation4, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums(operationItem4.ValueString()))
+ }
+ var value33 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+ if expressionsItem2.Filter.NumericFilter.Value.Int64Value != nil {
+ value34 := expressionsItem2.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value{
+ Value: value34,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value != nil {
+ value33 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+ if expressionsItem2.Filter.NumericFilter.Value.DoubleValue != nil {
+ value35, _ := expressionsItem2.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue{
+ Value: value35,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue != nil {
+ value33 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter{
+ Operation: operation4,
+ Value: value33,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter != nil {
+ filter5 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter
+ if expressionsItem2.Filter.BetweenFilter != nil {
+ var fromValue4 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+ if expressionsItem2.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value36 := expressionsItem2.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value{
+ Value: value36,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value != nil {
+ fromValue4 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+ if expressionsItem2.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value37, _ := expressionsItem2.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue{
+ Value: value37,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue != nil {
+ fromValue4 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue,
+ }
+ }
+ var toValue4 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+ if expressionsItem2.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value38 := expressionsItem2.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value{
+ Value: value38,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value != nil {
+ toValue4 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+ if expressionsItem2.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value39, _ := expressionsItem2.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue{
+ Value: value39,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue != nil {
+ toValue4 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter{
+ FromValue: fromValue4,
+ ToValue: toValue4,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter != nil {
+ filter5 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter,
+ }
+ }
+ expressions2 = append(expressions2, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayExpression{
+ FieldName: fieldName4,
+ Filter: filter5,
+ })
+ }
+ sourceGoogleAnalyticsDataAPIUpdateAndGroup = &shared.SourceGoogleAnalyticsDataAPIUpdateAndGroup{
+ Expressions: expressions2,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateAndGroup != nil {
+ metricFilter = &shared.MetricsFilter{
+ SourceGoogleAnalyticsDataAPIUpdateAndGroup: sourceGoogleAnalyticsDataAPIUpdateAndGroup,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateOrGroup *shared.SourceGoogleAnalyticsDataAPIUpdateOrGroup
+ if customReportsArrayItem.MetricFilter.OrGroup != nil {
+ var expressions3 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterExpression = nil
+ for _, expressionsItem3 := range customReportsArrayItem.MetricFilter.OrGroup.Expressions {
+ fieldName5 := expressionsItem3.FieldName.ValueString()
+ var filter6 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+ if expressionsItem3.Filter.StringFilter != nil {
+ caseSensitive10 := new(bool)
+ if !expressionsItem3.Filter.StringFilter.CaseSensitive.IsUnknown() && !expressionsItem3.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive10 = expressionsItem3.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive10 = nil
+ }
+ var matchType5 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = nil
+ for _, matchTypeItem5 := range expressionsItem3.Filter.StringFilter.MatchType {
+ matchType5 = append(matchType5, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums(matchTypeItem5.ValueString()))
+ }
+ value40 := expressionsItem3.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter{
+ CaseSensitive: caseSensitive10,
+ MatchType: matchType5,
+ Value: value40,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter != nil {
+ filter6 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+ if expressionsItem3.Filter.InListFilter != nil {
+ caseSensitive11 := new(bool)
+ if !expressionsItem3.Filter.InListFilter.CaseSensitive.IsUnknown() && !expressionsItem3.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive11 = expressionsItem3.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive11 = nil
+ }
+ var values5 []string = nil
+ for _, valuesItem5 := range expressionsItem3.Filter.InListFilter.Values {
+ values5 = append(values5, valuesItem5.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter{
+ CaseSensitive: caseSensitive11,
+ Values: values5,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter != nil {
+ filter6 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+ if expressionsItem3.Filter.NumericFilter != nil {
+ var operation5 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = nil
+ for _, operationItem5 := range expressionsItem3.Filter.NumericFilter.Operation {
+ operation5 = append(operation5, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums(operationItem5.ValueString()))
+ }
+ var value41 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+ if expressionsItem3.Filter.NumericFilter.Value.Int64Value != nil {
+ value42 := expressionsItem3.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value{
+ Value: value42,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value != nil {
+ value41 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+ if expressionsItem3.Filter.NumericFilter.Value.DoubleValue != nil {
+ value43, _ := expressionsItem3.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue{
+ Value: value43,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue != nil {
+ value41 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter{
+ Operation: operation5,
+ Value: value41,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter != nil {
+ filter6 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+ if expressionsItem3.Filter.BetweenFilter != nil {
+ var fromValue5 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+ if expressionsItem3.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value44 := expressionsItem3.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value{
+ Value: value44,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value != nil {
+ fromValue5 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+ if expressionsItem3.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value45, _ := expressionsItem3.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue{
+ Value: value45,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue != nil {
+ fromValue5 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue,
+ }
+ }
+ var toValue5 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+ if expressionsItem3.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value46 := expressionsItem3.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value{
+ Value: value46,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value != nil {
+ toValue5 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+ if expressionsItem3.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value47, _ := expressionsItem3.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue{
+ Value: value47,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue != nil {
+ toValue5 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter{
+ FromValue: fromValue5,
+ ToValue: toValue5,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter != nil {
+ filter6 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter,
+ }
+ }
+ expressions3 = append(expressions3, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterExpression{
+ FieldName: fieldName5,
+ Filter: filter6,
+ })
+ }
+ sourceGoogleAnalyticsDataAPIUpdateOrGroup = &shared.SourceGoogleAnalyticsDataAPIUpdateOrGroup{
+ Expressions: expressions3,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateOrGroup != nil {
+ metricFilter = &shared.MetricsFilter{
+ SourceGoogleAnalyticsDataAPIUpdateOrGroup: sourceGoogleAnalyticsDataAPIUpdateOrGroup,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateNotExpression *shared.SourceGoogleAnalyticsDataAPIUpdateNotExpression
+ if customReportsArrayItem.MetricFilter.NotExpression != nil {
+ var expression1 *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterExpression
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression != nil {
+ fieldName6 := customReportsArrayItem.MetricFilter.NotExpression.Expression.FieldName.ValueString()
+ var filter7 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter != nil {
+ caseSensitive12 := new(bool)
+ if !customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive12 = customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive12 = nil
+ }
+ var matchType6 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = nil
+ for _, matchTypeItem6 := range customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.MatchType {
+ matchType6 = append(matchType6, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums(matchTypeItem6.ValueString()))
+ }
+ value48 := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter{
+ CaseSensitive: caseSensitive12,
+ MatchType: matchType6,
+ Value: value48,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter != nil {
+ filter7 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter != nil {
+ caseSensitive13 := new(bool)
+ if !customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive13 = customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive13 = nil
+ }
+ var values6 []string = nil
+ for _, valuesItem6 := range customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.InListFilter.Values {
+ values6 = append(values6, valuesItem6.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter{
+ CaseSensitive: caseSensitive13,
+ Values: values6,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter != nil {
+ filter7 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter != nil {
+ var operation6 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = nil
+ for _, operationItem6 := range customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Operation {
+ operation6 = append(operation6, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums(operationItem6.ValueString()))
+ }
+ var value49 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Value.Int64Value != nil {
+ value50 := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value{
+ Value: value50,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value != nil {
+ value49 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Value.DoubleValue != nil {
+ value51, _ := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue{
+ Value: value51,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue != nil {
+ value49 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter{
+ Operation: operation6,
+ Value: value49,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter != nil {
+ filter7 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter != nil {
+ var fromValue6 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value52 := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value{
+ Value: value52,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value != nil {
+ fromValue6 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value53, _ := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue{
+ Value: value53,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue != nil {
+ fromValue6 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue,
+ }
+ }
+ var toValue6 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value54 := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value{
+ Value: value54,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value != nil {
+ toValue6 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+ if customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value55, _ := customReportsArrayItem.MetricFilter.NotExpression.Expression.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue{
+ Value: value55,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue != nil {
+ toValue6 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter{
+ FromValue: fromValue6,
+ ToValue: toValue6,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter != nil {
+ filter7 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter,
+ }
+ }
+ expression1 = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterExpression{
+ FieldName: fieldName6,
+ Filter: filter7,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateNotExpression = &shared.SourceGoogleAnalyticsDataAPIUpdateNotExpression{
+ Expression: expression1,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateNotExpression != nil {
+ metricFilter = &shared.MetricsFilter{
+ SourceGoogleAnalyticsDataAPIUpdateNotExpression: sourceGoogleAnalyticsDataAPIUpdateNotExpression,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateFilter *shared.SourceGoogleAnalyticsDataAPIUpdateFilter
+ if customReportsArrayItem.MetricFilter.Filter != nil {
+ fieldName7 := customReportsArrayItem.MetricFilter.Filter.FieldName.ValueString()
+ var filter8 shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter
+ var sourceGoogleAnalyticsDataAPIUpdateStringFilter *shared.SourceGoogleAnalyticsDataAPIUpdateStringFilter
+ if customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter != nil {
+ caseSensitive14 := new(bool)
+ if !customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.CaseSensitive.IsNull() {
+ *caseSensitive14 = customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive14 = nil
+ }
+ var matchType7 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums = nil
+ for _, matchTypeItem7 := range customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.MatchType {
+ matchType7 = append(matchType7, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums(matchTypeItem7.ValueString()))
+ }
+ value56 := customReportsArrayItem.MetricFilter.Filter.Filter.StringFilter.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateStringFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateStringFilter{
+ CaseSensitive: caseSensitive14,
+ MatchType: matchType7,
+ Value: value56,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateStringFilter != nil {
+ filter8 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIUpdateStringFilter: sourceGoogleAnalyticsDataAPIUpdateStringFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateInListFilter *shared.SourceGoogleAnalyticsDataAPIUpdateInListFilter
+ if customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter != nil {
+ caseSensitive15 := new(bool)
+ if !customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter.CaseSensitive.IsUnknown() && !customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter.CaseSensitive.IsNull() {
+ *caseSensitive15 = customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter.CaseSensitive.ValueBool()
+ } else {
+ caseSensitive15 = nil
+ }
+ var values7 []string = nil
+ for _, valuesItem7 := range customReportsArrayItem.MetricFilter.Filter.Filter.InListFilter.Values {
+ values7 = append(values7, valuesItem7.ValueString())
+ }
+ sourceGoogleAnalyticsDataAPIUpdateInListFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateInListFilter{
+ CaseSensitive: caseSensitive15,
+ Values: values7,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateInListFilter != nil {
+ filter8 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIUpdateInListFilter: sourceGoogleAnalyticsDataAPIUpdateInListFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateNumericFilter *shared.SourceGoogleAnalyticsDataAPIUpdateNumericFilter
+ if customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter != nil {
+ var operation7 []shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums = nil
+ for _, operationItem7 := range customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Operation {
+ operation7 = append(operation7, shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums(operationItem7.ValueString()))
+ }
+ var value57 shared.SourceGoogleAnalyticsDataAPIUpdateValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value
+ if customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Value.Int64Value != nil {
+ value58 := customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Value.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value{
+ Value: value58,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value != nil {
+ value57 = shared.SourceGoogleAnalyticsDataAPIUpdateValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue
+ if customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Value.DoubleValue != nil {
+ value59, _ := customReportsArrayItem.MetricFilter.Filter.Filter.NumericFilter.Value.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue{
+ Value: value59,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue != nil {
+ value57 = shared.SourceGoogleAnalyticsDataAPIUpdateValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateNumericFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateNumericFilter{
+ Operation: operation7,
+ Value: value57,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateNumericFilter != nil {
+ filter8 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIUpdateNumericFilter: sourceGoogleAnalyticsDataAPIUpdateNumericFilter,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateBetweenFilter *shared.SourceGoogleAnalyticsDataAPIUpdateBetweenFilter
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter != nil {
+ var fromValue7 shared.SourceGoogleAnalyticsDataAPIUpdateFromValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.FromValue.Int64Value != nil {
+ value60 := customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.FromValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value{
+ Value: value60,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value != nil {
+ fromValue7 = shared.SourceGoogleAnalyticsDataAPIUpdateFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.FromValue.DoubleValue != nil {
+ value61, _ := customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.FromValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue{
+ Value: value61,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue != nil {
+ fromValue7 = shared.SourceGoogleAnalyticsDataAPIUpdateFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue,
+ }
+ }
+ var toValue7 shared.SourceGoogleAnalyticsDataAPIUpdateToValue
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.ToValue.Int64Value != nil {
+ value62 := customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.ToValue.Int64Value.Value.ValueString()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value{
+ Value: value62,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value != nil {
+ toValue7 = shared.SourceGoogleAnalyticsDataAPIUpdateToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value,
+ }
+ }
+ var sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue *shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+ if customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.ToValue.DoubleValue != nil {
+ value63, _ := customReportsArrayItem.MetricFilter.Filter.Filter.BetweenFilter.ToValue.DoubleValue.Value.ValueBigFloat().Float64()
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue = &shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue{
+ Value: value63,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue != nil {
+ toValue7 = shared.SourceGoogleAnalyticsDataAPIUpdateToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue: sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateBetweenFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateBetweenFilter{
+ FromValue: fromValue7,
+ ToValue: toValue7,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateBetweenFilter != nil {
+ filter8 = shared.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIUpdateBetweenFilter: sourceGoogleAnalyticsDataAPIUpdateBetweenFilter,
+ }
+ }
+ sourceGoogleAnalyticsDataAPIUpdateFilter = &shared.SourceGoogleAnalyticsDataAPIUpdateFilter{
+ FieldName: fieldName7,
+ Filter: filter8,
+ }
+ }
+ if sourceGoogleAnalyticsDataAPIUpdateFilter != nil {
+ metricFilter = &shared.MetricsFilter{
+ SourceGoogleAnalyticsDataAPIUpdateFilter: sourceGoogleAnalyticsDataAPIUpdateFilter,
+ }
+ }
+ }
+ var metrics []string = nil
+ for _, metricsItem := range customReportsArrayItem.Metrics {
+ metrics = append(metrics, metricsItem.ValueString())
+ }
+ name := customReportsArrayItem.Name.ValueString()
+ customReportsArray = append(customReportsArray, shared.SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig{
+ DimensionFilter: dimensionFilter,
+ Dimensions: dimensions,
+ MetricFilter: metricFilter,
+ Metrics: metrics,
+ Name: name,
+ })
+ }
+ dateRangesStartDate := new(customTypes.Date)
+ if !r.Configuration.DateRangesStartDate.IsUnknown() && !r.Configuration.DateRangesStartDate.IsNull() {
+ dateRangesStartDate = customTypes.MustNewDateFromString(r.Configuration.DateRangesStartDate.ValueString())
} else {
- customReports = nil
+ dateRangesStartDate = nil
+ }
+ var propertyIds []string = nil
+ for _, propertyIdsItem := range r.Configuration.PropertyIds {
+ propertyIds = append(propertyIds, propertyIdsItem.ValueString())
}
- dateRangesStartDate := customTypes.MustDateFromString(r.Configuration.DateRangesStartDate.ValueString())
- propertyID := r.Configuration.PropertyID.ValueString()
windowInDays := new(int64)
if !r.Configuration.WindowInDays.IsUnknown() && !r.Configuration.WindowInDays.IsNull() {
*windowInDays = r.Configuration.WindowInDays.ValueInt64()
@@ -175,16 +2886,16 @@ func (r *SourceGoogleAnalyticsDataAPIResourceModel) ToUpdateSDKType() *shared.So
}
configuration := shared.SourceGoogleAnalyticsDataAPIUpdate{
Credentials: credentials,
- CustomReports: customReports,
+ CustomReportsArray: customReportsArray,
DateRangesStartDate: dateRangesStartDate,
- PropertyID: propertyID,
+ PropertyIds: propertyIds,
WindowInDays: windowInDays,
}
- name := r.Name.ValueString()
+ name1 := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGoogleAnalyticsDataAPIPutRequest{
Configuration: configuration,
- Name: name,
+ Name: name1,
WorkspaceID: workspaceID,
}
return &out
diff --git a/internal/provider/source_googleanalyticsv4_data_source.go b/internal/provider/source_googleanalyticsv4_data_source.go
deleted file mode 100755
index 0c232d992..000000000
--- a/internal/provider/source_googleanalyticsv4_data_source.go
+++ /dev/null
@@ -1,278 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
- "context"
- "fmt"
-
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ datasource.DataSource = &SourceGoogleAnalyticsV4DataSource{}
-var _ datasource.DataSourceWithConfigure = &SourceGoogleAnalyticsV4DataSource{}
-
-func NewSourceGoogleAnalyticsV4DataSource() datasource.DataSource {
- return &SourceGoogleAnalyticsV4DataSource{}
-}
-
-// SourceGoogleAnalyticsV4DataSource is the data source implementation.
-type SourceGoogleAnalyticsV4DataSource struct {
- client *sdk.SDK
-}
-
-// SourceGoogleAnalyticsV4DataSourceModel describes the data model.
-type SourceGoogleAnalyticsV4DataSourceModel struct {
- Configuration SourceGoogleAnalyticsV4 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-// Metadata returns the data source type name.
-func (r *SourceGoogleAnalyticsV4DataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_google_analytics_v4"
-}
-
-// Schema defines the schema for the data source.
-func (r *SourceGoogleAnalyticsV4DataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceGoogleAnalyticsV4 DataSource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_google_analytics_v4_credentials_authenticate_via_google_oauth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Google Analytics developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Google Analytics developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining a new access token.`,
- },
- },
- Description: `Credentials for the service`,
- },
- "source_google_analytics_v4_credentials_service_account_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The JSON key of the service account to use for authorization`,
- },
- },
- Description: `Credentials for the service`,
- },
- "source_google_analytics_v4_update_credentials_authenticate_via_google_oauth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Google Analytics developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Google Analytics developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining a new access token.`,
- },
- },
- Description: `Credentials for the service`,
- },
- "source_google_analytics_v4_update_credentials_service_account_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The JSON key of the service account to use for authorization`,
- },
- },
- Description: `Credentials for the service`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Credentials for the service`,
- },
- "custom_reports": schema.StringAttribute{
- Computed: true,
- Description: `A JSON array describing the custom reports you want to sync from Google Analytics. See the docs for more information about the exact format you can use to fill out this field.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-analytics-v4",
- ),
- },
- Description: `must be one of ["google-analytics-v4"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The date in the format YYYY-MM-DD. Any data before this date will not be replicated.`,
- },
- "view_id": schema.StringAttribute{
- Computed: true,
- Description: `The ID for the Google Analytics View you want to fetch data from. This can be found from the Google Analytics Account Explorer.`,
- },
- "window_in_days": schema.Int64Attribute{
- Computed: true,
- Description: `The time increment used by the connector when requesting data from the Google Analytics API. More information is available in the the docs. The bigger this value is, the faster the sync will be, but the more likely that sampling will be applied to your data, potentially causing inaccuracies in the returned results. We recommend setting this to 1 unless you have a hard requirement to make the sync faster at the expense of accuracy. The minimum allowed value for this field is 1, and the maximum is 364. `,
- },
- },
- },
- "name": schema.StringAttribute{
- Computed: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Required: true,
- },
- "workspace_id": schema.StringAttribute{
- Computed: true,
- },
- },
- }
-}
-
-func (r *SourceGoogleAnalyticsV4DataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected DataSource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceGoogleAnalyticsV4DataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data *SourceGoogleAnalyticsV4DataSourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceGoogleAnalyticsV4Request{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceGoogleAnalyticsV4(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/internal/provider/source_googleanalyticsv4_data_source_sdk.go b/internal/provider/source_googleanalyticsv4_data_source_sdk.go
deleted file mode 100755
index 50023c0cf..000000000
--- a/internal/provider/source_googleanalyticsv4_data_source_sdk.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceGoogleAnalyticsV4DataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
diff --git a/internal/provider/source_googleanalyticsv4_resource.go b/internal/provider/source_googleanalyticsv4_resource.go
deleted file mode 100755
index d5652470e..000000000
--- a/internal/provider/source_googleanalyticsv4_resource.go
+++ /dev/null
@@ -1,444 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "context"
- "fmt"
-
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ resource.Resource = &SourceGoogleAnalyticsV4Resource{}
-var _ resource.ResourceWithImportState = &SourceGoogleAnalyticsV4Resource{}
-
-func NewSourceGoogleAnalyticsV4Resource() resource.Resource {
- return &SourceGoogleAnalyticsV4Resource{}
-}
-
-// SourceGoogleAnalyticsV4Resource defines the resource implementation.
-type SourceGoogleAnalyticsV4Resource struct {
- client *sdk.SDK
-}
-
-// SourceGoogleAnalyticsV4ResourceModel describes the resource data model.
-type SourceGoogleAnalyticsV4ResourceModel struct {
- Configuration SourceGoogleAnalyticsV4 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-func (r *SourceGoogleAnalyticsV4Resource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_google_analytics_v4"
-}
-
-func (r *SourceGoogleAnalyticsV4Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceGoogleAnalyticsV4 Resource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "source_google_analytics_v4_credentials_authenticate_via_google_oauth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Optional: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your Google Analytics developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your Google Analytics developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The token for obtaining a new access token.`,
- },
- },
- Description: `Credentials for the service`,
- },
- "source_google_analytics_v4_credentials_service_account_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "credentials_json": schema.StringAttribute{
- Required: true,
- Description: `The JSON key of the service account to use for authorization`,
- },
- },
- Description: `Credentials for the service`,
- },
- "source_google_analytics_v4_update_credentials_authenticate_via_google_oauth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Optional: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your Google Analytics developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your Google Analytics developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The token for obtaining a new access token.`,
- },
- },
- Description: `Credentials for the service`,
- },
- "source_google_analytics_v4_update_credentials_service_account_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "credentials_json": schema.StringAttribute{
- Required: true,
- Description: `The JSON key of the service account to use for authorization`,
- },
- },
- Description: `Credentials for the service`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Credentials for the service`,
- },
- "custom_reports": schema.StringAttribute{
- Optional: true,
- Description: `A JSON array describing the custom reports you want to sync from Google Analytics. See the docs for more information about the exact format you can use to fill out this field.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-analytics-v4",
- ),
- },
- Description: `must be one of ["google-analytics-v4"]`,
- },
- "start_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The date in the format YYYY-MM-DD. Any data before this date will not be replicated.`,
- },
- "view_id": schema.StringAttribute{
- Required: true,
- Description: `The ID for the Google Analytics View you want to fetch data from. This can be found from the Google Analytics Account Explorer.`,
- },
- "window_in_days": schema.Int64Attribute{
- Optional: true,
- Description: `The time increment used by the connector when requesting data from the Google Analytics API. More information is available in the the docs. The bigger this value is, the faster the sync will be, but the more likely that sampling will be applied to your data, potentially causing inaccuracies in the returned results. We recommend setting this to 1 unless you have a hard requirement to make the sync faster at the expense of accuracy. The minimum allowed value for this field is 1, and the maximum is 364. `,
- },
- },
- },
- "name": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "workspace_id": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- },
- }
-}
-
-func (r *SourceGoogleAnalyticsV4Resource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected Resource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceGoogleAnalyticsV4Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data *SourceGoogleAnalyticsV4ResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- request := *data.ToCreateSDKType()
- res, err := r.client.Sources.CreateSourceGoogleAnalyticsV4(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromCreateResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceGoogleAnalyticsV4Resource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data *SourceGoogleAnalyticsV4ResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceGoogleAnalyticsV4Request{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceGoogleAnalyticsV4(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceGoogleAnalyticsV4Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data *SourceGoogleAnalyticsV4ResourceModel
- merge(ctx, req, resp, &data)
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceGoogleAnalyticsV4PutRequest := data.ToUpdateSDKType()
- sourceID := data.SourceID.ValueString()
- request := operations.PutSourceGoogleAnalyticsV4Request{
- SourceGoogleAnalyticsV4PutRequest: sourceGoogleAnalyticsV4PutRequest,
- SourceID: sourceID,
- }
- res, err := r.client.Sources.PutSourceGoogleAnalyticsV4(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- sourceId1 := data.SourceID.ValueString()
- getRequest := operations.GetSourceGoogleAnalyticsV4Request{
- SourceID: sourceId1,
- }
- getResponse, err := r.client.Sources.GetSourceGoogleAnalyticsV4(ctx, getRequest)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if getResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
- return
- }
- if getResponse.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
- return
- }
- if getResponse.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
- return
- }
- data.RefreshFromGetResponse(getResponse.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceGoogleAnalyticsV4Resource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data *SourceGoogleAnalyticsV4ResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.DeleteSourceGoogleAnalyticsV4Request{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.DeleteSourceGoogleAnalyticsV4(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
-
-}
-
-func (r *SourceGoogleAnalyticsV4Resource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
-}
diff --git a/internal/provider/source_googleanalyticsv4_resource_sdk.go b/internal/provider/source_googleanalyticsv4_resource_sdk.go
deleted file mode 100755
index 0b4bd4e81..000000000
--- a/internal/provider/source_googleanalyticsv4_resource_sdk.go
+++ /dev/null
@@ -1,207 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceGoogleAnalyticsV4ResourceModel) ToCreateSDKType() *shared.SourceGoogleAnalyticsV4CreateRequest {
- var credentials *shared.SourceGoogleAnalyticsV4Credentials
- if r.Configuration.Credentials != nil {
- var sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth *shared.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth
- if r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth != nil {
- accessToken := new(string)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth.AccessToken.ValueString()
- } else {
- accessToken = nil
- }
- authType := new(shared.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth.AuthType.IsNull() {
- *authType = shared.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType(r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth.RefreshToken.ValueString()
- sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth = &shared.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth{
- AccessToken: accessToken,
- AuthType: authType,
- ClientID: clientID,
- ClientSecret: clientSecret,
- RefreshToken: refreshToken,
- }
- }
- if sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth != nil {
- credentials = &shared.SourceGoogleAnalyticsV4Credentials{
- SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth: sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth,
- }
- }
- var sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication *shared.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication
- if r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication != nil {
- authType1 := new(shared.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication.AuthType.IsNull() {
- *authType1 = shared.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType(r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- credentialsJSON := r.Configuration.Credentials.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication.CredentialsJSON.ValueString()
- sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication = &shared.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication{
- AuthType: authType1,
- CredentialsJSON: credentialsJSON,
- }
- }
- if sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication != nil {
- credentials = &shared.SourceGoogleAnalyticsV4Credentials{
- SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication: sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication,
- }
- }
- }
- customReports := new(string)
- if !r.Configuration.CustomReports.IsUnknown() && !r.Configuration.CustomReports.IsNull() {
- *customReports = r.Configuration.CustomReports.ValueString()
- } else {
- customReports = nil
- }
- sourceType := shared.SourceGoogleAnalyticsV4GoogleAnalyticsV4(r.Configuration.SourceType.ValueString())
- startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
- viewID := r.Configuration.ViewID.ValueString()
- windowInDays := new(int64)
- if !r.Configuration.WindowInDays.IsUnknown() && !r.Configuration.WindowInDays.IsNull() {
- *windowInDays = r.Configuration.WindowInDays.ValueInt64()
- } else {
- windowInDays = nil
- }
- configuration := shared.SourceGoogleAnalyticsV4{
- Credentials: credentials,
- CustomReports: customReports,
- SourceType: sourceType,
- StartDate: startDate,
- ViewID: viewID,
- WindowInDays: windowInDays,
- }
- name := r.Name.ValueString()
- secretID := new(string)
- if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
- *secretID = r.SecretID.ValueString()
- } else {
- secretID = nil
- }
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceGoogleAnalyticsV4CreateRequest{
- Configuration: configuration,
- Name: name,
- SecretID: secretID,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceGoogleAnalyticsV4ResourceModel) ToGetSDKType() *shared.SourceGoogleAnalyticsV4CreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceGoogleAnalyticsV4ResourceModel) ToUpdateSDKType() *shared.SourceGoogleAnalyticsV4PutRequest {
- var credentials *shared.SourceGoogleAnalyticsV4UpdateCredentials
- if r.Configuration.Credentials != nil {
- var sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth *shared.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth
- if r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth != nil {
- accessToken := new(string)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth.AccessToken.ValueString()
- } else {
- accessToken = nil
- }
- authType := new(shared.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth.AuthType.IsNull() {
- *authType = shared.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType(r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth.RefreshToken.ValueString()
- sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth = &shared.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth{
- AccessToken: accessToken,
- AuthType: authType,
- ClientID: clientID,
- ClientSecret: clientSecret,
- RefreshToken: refreshToken,
- }
- }
- if sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth != nil {
- credentials = &shared.SourceGoogleAnalyticsV4UpdateCredentials{
- SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth: sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth,
- }
- }
- var sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication *shared.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication
- if r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication != nil {
- authType1 := new(shared.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType)
- if !r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication.AuthType.IsNull() {
- *authType1 = shared.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType(r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- credentialsJSON := r.Configuration.Credentials.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication.CredentialsJSON.ValueString()
- sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication = &shared.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication{
- AuthType: authType1,
- CredentialsJSON: credentialsJSON,
- }
- }
- if sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication != nil {
- credentials = &shared.SourceGoogleAnalyticsV4UpdateCredentials{
- SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication: sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication,
- }
- }
- }
- customReports := new(string)
- if !r.Configuration.CustomReports.IsUnknown() && !r.Configuration.CustomReports.IsNull() {
- *customReports = r.Configuration.CustomReports.ValueString()
- } else {
- customReports = nil
- }
- startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
- viewID := r.Configuration.ViewID.ValueString()
- windowInDays := new(int64)
- if !r.Configuration.WindowInDays.IsUnknown() && !r.Configuration.WindowInDays.IsNull() {
- *windowInDays = r.Configuration.WindowInDays.ValueInt64()
- } else {
- windowInDays = nil
- }
- configuration := shared.SourceGoogleAnalyticsV4Update{
- Credentials: credentials,
- CustomReports: customReports,
- StartDate: startDate,
- ViewID: viewID,
- WindowInDays: windowInDays,
- }
- name := r.Name.ValueString()
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceGoogleAnalyticsV4PutRequest{
- Configuration: configuration,
- Name: name,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceGoogleAnalyticsV4ResourceModel) ToDeleteSDKType() *shared.SourceGoogleAnalyticsV4CreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceGoogleAnalyticsV4ResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.SourceType = types.StringValue(resp.SourceType)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
-
-func (r *SourceGoogleAnalyticsV4ResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
- r.RefreshFromGetResponse(resp)
-}
diff --git a/internal/provider/source_googledirectory_data_source.go b/internal/provider/source_googledirectory_data_source.go
old mode 100755
new mode 100644
index cb0bb4afd..5672c6ff7
--- a/internal/provider/source_googledirectory_data_source.go
+++ b/internal/provider/source_googledirectory_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceGoogleDirectoryDataSource struct {
// SourceGoogleDirectoryDataSourceModel describes the data model.
type SourceGoogleDirectoryDataSourceModel struct {
- Configuration SourceGoogleDirectory `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,143 +47,20 @@ func (r *SourceGoogleDirectoryDataSource) Schema(ctx context.Context, req dataso
MarkdownDescription: "SourceGoogleDirectory DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_google_directory_google_credentials_service_account_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The contents of the JSON service account key. See the docs for more information on how to generate this key.`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service accounts",
- ),
- },
- MarkdownDescription: `must be one of ["Service accounts"]` + "\n" +
- `Authentication Scenario`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The email of the user, which has permissions to access the Google Workspace Admin APIs.`,
- },
- },
- Description: `For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email.`,
- },
- "source_google_directory_google_credentials_sign_in_via_google_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of the developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of the developer application.`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Web server app",
- ),
- },
- MarkdownDescription: `must be one of ["Web server app"]` + "\n" +
- `Authentication Scenario`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The Token for obtaining a new access token.`,
- },
- },
- Description: `For these scenario user only needs to give permission to read Google Directory data.`,
- },
- "source_google_directory_update_google_credentials_service_account_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The contents of the JSON service account key. See the docs for more information on how to generate this key.`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service accounts",
- ),
- },
- MarkdownDescription: `must be one of ["Service accounts"]` + "\n" +
- `Authentication Scenario`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The email of the user, which has permissions to access the Google Workspace Admin APIs.`,
- },
- },
- Description: `For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email.`,
- },
- "source_google_directory_update_google_credentials_sign_in_via_google_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of the developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of the developer application.`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Web server app",
- ),
- },
- MarkdownDescription: `must be one of ["Web server app"]` + "\n" +
- `Authentication Scenario`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The Token for obtaining a new access token.`,
- },
- },
- Description: `For these scenario user only needs to give permission to read Google Directory data.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Google APIs use the OAuth 2.0 protocol for authentication and authorization. The Source supports Web server application and Service accounts scenarios.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-directory",
- ),
- },
- Description: `must be one of ["google-directory"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_googledirectory_data_source_sdk.go b/internal/provider/source_googledirectory_data_source_sdk.go
old mode 100755
new mode 100644
index 4a3794603..19117bfc6
--- a/internal/provider/source_googledirectory_data_source_sdk.go
+++ b/internal/provider/source_googledirectory_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleDirectoryDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_googledirectory_resource.go b/internal/provider/source_googledirectory_resource.go
old mode 100755
new mode 100644
index 4ce8d1962..b60c295ab
--- a/internal/provider/source_googledirectory_resource.go
+++ b/internal/provider/source_googledirectory_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceGoogleDirectoryResource struct {
// SourceGoogleDirectoryResourceModel describes the resource data model.
type SourceGoogleDirectoryResourceModel struct {
Configuration SourceGoogleDirectory `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,23 +59,13 @@ func (r *SourceGoogleDirectoryResource) Schema(ctx context.Context, req resource
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_google_directory_google_credentials_service_account_key": schema.SingleNestedAttribute{
+ "service_account_key": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"credentials_json": schema.StringAttribute{
Required: true,
Description: `The contents of the JSON service account key. See the docs for more information on how to generate this key.`,
},
- "credentials_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service accounts",
- ),
- },
- MarkdownDescription: `must be one of ["Service accounts"]` + "\n" +
- `Authentication Scenario`,
- },
"email": schema.StringAttribute{
Required: true,
Description: `The email of the user, which has permissions to access the Google Workspace Admin APIs.`,
@@ -82,7 +73,7 @@ func (r *SourceGoogleDirectoryResource) Schema(ctx context.Context, req resource
},
Description: `For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email.`,
},
- "source_google_directory_google_credentials_sign_in_via_google_o_auth": schema.SingleNestedAttribute{
+ "sign_in_via_google_o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"client_id": schema.StringAttribute{
@@ -93,99 +84,40 @@ func (r *SourceGoogleDirectoryResource) Schema(ctx context.Context, req resource
Required: true,
Description: `The Client Secret of the developer application.`,
},
- "credentials_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Web server app",
- ),
- },
- MarkdownDescription: `must be one of ["Web server app"]` + "\n" +
- `Authentication Scenario`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The Token for obtaining a new access token.`,
- },
- },
- Description: `For these scenario user only needs to give permission to read Google Directory data.`,
- },
- "source_google_directory_update_google_credentials_service_account_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "credentials_json": schema.StringAttribute{
- Required: true,
- Description: `The contents of the JSON service account key. See the docs for more information on how to generate this key.`,
- },
- "credentials_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service accounts",
- ),
- },
- MarkdownDescription: `must be one of ["Service accounts"]` + "\n" +
- `Authentication Scenario`,
- },
- "email": schema.StringAttribute{
- Required: true,
- Description: `The email of the user, which has permissions to access the Google Workspace Admin APIs.`,
- },
- },
- Description: `For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email.`,
- },
- "source_google_directory_update_google_credentials_sign_in_via_google_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of the developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of the developer application.`,
- },
- "credentials_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Web server app",
- ),
- },
- MarkdownDescription: `must be one of ["Web server app"]` + "\n" +
- `Authentication Scenario`,
- },
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Token for obtaining a new access token.`,
},
},
Description: `For these scenario user only needs to give permission to read Google Directory data.`,
},
},
+ Description: `Google APIs use the OAuth 2.0 protocol for authentication and authorization. The Source supports Web server application and Service accounts scenarios.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Google APIs use the OAuth 2.0 protocol for authentication and authorization. The Source supports Web server application and Service accounts scenarios.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-directory",
- ),
- },
- Description: `must be one of ["google-directory"]`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -249,7 +181,7 @@ func (r *SourceGoogleDirectoryResource) Create(ctx context.Context, req resource
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGoogleDirectory(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -425,5 +357,5 @@ func (r *SourceGoogleDirectoryResource) Delete(ctx context.Context, req resource
}
func (r *SourceGoogleDirectoryResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_googledirectory_resource_sdk.go b/internal/provider/source_googledirectory_resource_sdk.go
old mode 100755
new mode 100644
index 5ad80051a..f7227df0f
--- a/internal/provider/source_googledirectory_resource_sdk.go
+++ b/internal/provider/source_googledirectory_resource_sdk.go
@@ -3,62 +3,52 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleDirectoryResourceModel) ToCreateSDKType() *shared.SourceGoogleDirectoryCreateRequest {
var credentials *shared.SourceGoogleDirectoryGoogleCredentials
if r.Configuration.Credentials != nil {
- var sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth *shared.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth
- if r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth != nil {
- clientID := r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth.ClientSecret.ValueString()
- credentialsTitle := new(shared.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle)
- if !r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth.CredentialsTitle.IsUnknown() && !r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth.CredentialsTitle.IsNull() {
- *credentialsTitle = shared.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle(r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth.CredentialsTitle.ValueString())
- } else {
- credentialsTitle = nil
- }
- refreshToken := r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth.RefreshToken.ValueString()
- sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth = &shared.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth{
- ClientID: clientID,
- ClientSecret: clientSecret,
- CredentialsTitle: credentialsTitle,
- RefreshToken: refreshToken,
+ var sourceGoogleDirectorySignInViaGoogleOAuth *shared.SourceGoogleDirectorySignInViaGoogleOAuth
+ if r.Configuration.Credentials.SignInViaGoogleOAuth != nil {
+ clientID := r.Configuration.Credentials.SignInViaGoogleOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.SignInViaGoogleOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.SignInViaGoogleOAuth.RefreshToken.ValueString()
+ sourceGoogleDirectorySignInViaGoogleOAuth = &shared.SourceGoogleDirectorySignInViaGoogleOAuth{
+ ClientID: clientID,
+ ClientSecret: clientSecret,
+ RefreshToken: refreshToken,
}
}
- if sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth != nil {
+ if sourceGoogleDirectorySignInViaGoogleOAuth != nil {
credentials = &shared.SourceGoogleDirectoryGoogleCredentials{
- SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth: sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth,
+ SourceGoogleDirectorySignInViaGoogleOAuth: sourceGoogleDirectorySignInViaGoogleOAuth,
}
}
- var sourceGoogleDirectoryGoogleCredentialsServiceAccountKey *shared.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey
- if r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey != nil {
- credentialsJSON := r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey.CredentialsJSON.ValueString()
- credentialsTitle1 := new(shared.SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle)
- if !r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey.CredentialsTitle.IsUnknown() && !r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey.CredentialsTitle.IsNull() {
- *credentialsTitle1 = shared.SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle(r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey.CredentialsTitle.ValueString())
- } else {
- credentialsTitle1 = nil
- }
- email := r.Configuration.Credentials.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey.Email.ValueString()
- sourceGoogleDirectoryGoogleCredentialsServiceAccountKey = &shared.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey{
- CredentialsJSON: credentialsJSON,
- CredentialsTitle: credentialsTitle1,
- Email: email,
+ var sourceGoogleDirectoryServiceAccountKey *shared.SourceGoogleDirectoryServiceAccountKey
+ if r.Configuration.Credentials.ServiceAccountKey != nil {
+ credentialsJSON := r.Configuration.Credentials.ServiceAccountKey.CredentialsJSON.ValueString()
+ email := r.Configuration.Credentials.ServiceAccountKey.Email.ValueString()
+ sourceGoogleDirectoryServiceAccountKey = &shared.SourceGoogleDirectoryServiceAccountKey{
+ CredentialsJSON: credentialsJSON,
+ Email: email,
}
}
- if sourceGoogleDirectoryGoogleCredentialsServiceAccountKey != nil {
+ if sourceGoogleDirectoryServiceAccountKey != nil {
credentials = &shared.SourceGoogleDirectoryGoogleCredentials{
- SourceGoogleDirectoryGoogleCredentialsServiceAccountKey: sourceGoogleDirectoryGoogleCredentialsServiceAccountKey,
+ SourceGoogleDirectoryServiceAccountKey: sourceGoogleDirectoryServiceAccountKey,
}
}
}
- sourceType := shared.SourceGoogleDirectoryGoogleDirectory(r.Configuration.SourceType.ValueString())
configuration := shared.SourceGoogleDirectory{
Credentials: credentials,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -70,6 +60,7 @@ func (r *SourceGoogleDirectoryResourceModel) ToCreateSDKType() *shared.SourceGoo
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGoogleDirectoryCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -85,48 +76,34 @@ func (r *SourceGoogleDirectoryResourceModel) ToGetSDKType() *shared.SourceGoogle
func (r *SourceGoogleDirectoryResourceModel) ToUpdateSDKType() *shared.SourceGoogleDirectoryPutRequest {
var credentials *shared.SourceGoogleDirectoryUpdateGoogleCredentials
if r.Configuration.Credentials != nil {
- var sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth *shared.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth
- if r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth != nil {
- clientID := r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth.ClientSecret.ValueString()
- credentialsTitle := new(shared.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle)
- if !r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth.CredentialsTitle.IsUnknown() && !r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth.CredentialsTitle.IsNull() {
- *credentialsTitle = shared.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle(r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth.CredentialsTitle.ValueString())
- } else {
- credentialsTitle = nil
- }
- refreshToken := r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth.RefreshToken.ValueString()
- sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth = &shared.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth{
- ClientID: clientID,
- ClientSecret: clientSecret,
- CredentialsTitle: credentialsTitle,
- RefreshToken: refreshToken,
+ var signInViaGoogleOAuth *shared.SignInViaGoogleOAuth
+ if r.Configuration.Credentials.SignInViaGoogleOAuth != nil {
+ clientID := r.Configuration.Credentials.SignInViaGoogleOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.SignInViaGoogleOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.SignInViaGoogleOAuth.RefreshToken.ValueString()
+ signInViaGoogleOAuth = &shared.SignInViaGoogleOAuth{
+ ClientID: clientID,
+ ClientSecret: clientSecret,
+ RefreshToken: refreshToken,
}
}
- if sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth != nil {
+ if signInViaGoogleOAuth != nil {
credentials = &shared.SourceGoogleDirectoryUpdateGoogleCredentials{
- SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth: sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth,
+ SignInViaGoogleOAuth: signInViaGoogleOAuth,
}
}
- var sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey *shared.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey
- if r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey != nil {
- credentialsJSON := r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey.CredentialsJSON.ValueString()
- credentialsTitle1 := new(shared.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle)
- if !r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey.CredentialsTitle.IsUnknown() && !r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey.CredentialsTitle.IsNull() {
- *credentialsTitle1 = shared.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle(r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey.CredentialsTitle.ValueString())
- } else {
- credentialsTitle1 = nil
- }
- email := r.Configuration.Credentials.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey.Email.ValueString()
- sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey = &shared.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey{
- CredentialsJSON: credentialsJSON,
- CredentialsTitle: credentialsTitle1,
- Email: email,
+ var serviceAccountKey *shared.ServiceAccountKey
+ if r.Configuration.Credentials.ServiceAccountKey != nil {
+ credentialsJSON := r.Configuration.Credentials.ServiceAccountKey.CredentialsJSON.ValueString()
+ email := r.Configuration.Credentials.ServiceAccountKey.Email.ValueString()
+ serviceAccountKey = &shared.ServiceAccountKey{
+ CredentialsJSON: credentialsJSON,
+ Email: email,
}
}
- if sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey != nil {
+ if serviceAccountKey != nil {
credentials = &shared.SourceGoogleDirectoryUpdateGoogleCredentials{
- SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey: sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey,
+ ServiceAccountKey: serviceAccountKey,
}
}
}
diff --git a/internal/provider/source_bigcommerce_data_source.go b/internal/provider/source_googledrive_data_source.go
old mode 100755
new mode 100644
similarity index 53%
rename from internal/provider/source_bigcommerce_data_source.go
rename to internal/provider/source_googledrive_data_source.go
index 364c04149..61a7e69d3
--- a/internal/provider/source_bigcommerce_data_source.go
+++ b/internal/provider/source_googledrive_data_source.go
@@ -3,88 +3,64 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
// Ensure provider defined types fully satisfy framework interfaces.
-var _ datasource.DataSource = &SourceBigcommerceDataSource{}
-var _ datasource.DataSourceWithConfigure = &SourceBigcommerceDataSource{}
+var _ datasource.DataSource = &SourceGoogleDriveDataSource{}
+var _ datasource.DataSourceWithConfigure = &SourceGoogleDriveDataSource{}
-func NewSourceBigcommerceDataSource() datasource.DataSource {
- return &SourceBigcommerceDataSource{}
+func NewSourceGoogleDriveDataSource() datasource.DataSource {
+ return &SourceGoogleDriveDataSource{}
}
-// SourceBigcommerceDataSource is the data source implementation.
-type SourceBigcommerceDataSource struct {
+// SourceGoogleDriveDataSource is the data source implementation.
+type SourceGoogleDriveDataSource struct {
client *sdk.SDK
}
-// SourceBigcommerceDataSourceModel describes the data model.
-type SourceBigcommerceDataSourceModel struct {
- Configuration SourceBigcommerce `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+// SourceGoogleDriveDataSourceModel describes the data model.
+type SourceGoogleDriveDataSourceModel struct {
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
-func (r *SourceBigcommerceDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_bigcommerce"
+func (r *SourceGoogleDriveDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_google_drive"
}
// Schema defines the schema for the data source.
-func (r *SourceBigcommerceDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+func (r *SourceGoogleDriveDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
resp.Schema = schema.Schema{
- MarkdownDescription: "SourceBigcommerce DataSource",
+ MarkdownDescription: "SourceGoogleDrive DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bigcommerce",
- ),
- },
- Description: `must be one of ["bigcommerce"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `The date you would like to replicate data. Format: YYYY-MM-DD.`,
- },
- "store_hash": schema.StringAttribute{
- Computed: true,
- Description: `The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/, The store's hash code is 'HASH_CODE'.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
@@ -92,7 +68,7 @@ func (r *SourceBigcommerceDataSource) Schema(ctx context.Context, req datasource
}
}
-func (r *SourceBigcommerceDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+func (r *SourceGoogleDriveDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
// Prevent panic if the provider has not been configured.
if req.ProviderData == nil {
return
@@ -112,8 +88,8 @@ func (r *SourceBigcommerceDataSource) Configure(ctx context.Context, req datasou
r.client = client
}
-func (r *SourceBigcommerceDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data *SourceBigcommerceDataSourceModel
+func (r *SourceGoogleDriveDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data *SourceGoogleDriveDataSourceModel
var item types.Object
resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
@@ -131,10 +107,10 @@ func (r *SourceBigcommerceDataSource) Read(ctx context.Context, req datasource.R
}
sourceID := data.SourceID.ValueString()
- request := operations.GetSourceBigcommerceRequest{
+ request := operations.GetSourceGoogleDriveRequest{
SourceID: sourceID,
}
- res, err := r.client.Sources.GetSourceBigcommerce(ctx, request)
+ res, err := r.client.Sources.GetSourceGoogleDrive(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
diff --git a/internal/provider/source_googledrive_data_source_sdk.go b/internal/provider/source_googledrive_data_source_sdk.go
new file mode 100644
index 000000000..b7ffc078c
--- /dev/null
+++ b/internal/provider/source_googledrive_data_source_sdk.go
@@ -0,0 +1,18 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *SourceGoogleDriveDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
diff --git a/internal/provider/source_googledrive_resource.go b/internal/provider/source_googledrive_resource.go
new file mode 100644
index 000000000..f44c4b136
--- /dev/null
+++ b/internal/provider/source_googledrive_resource.go
@@ -0,0 +1,556 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ resource.Resource = &SourceGoogleDriveResource{}
+var _ resource.ResourceWithImportState = &SourceGoogleDriveResource{}
+
+func NewSourceGoogleDriveResource() resource.Resource {
+ return &SourceGoogleDriveResource{}
+}
+
+// SourceGoogleDriveResource defines the resource implementation.
+type SourceGoogleDriveResource struct {
+ client *sdk.SDK
+}
+
+// SourceGoogleDriveResourceModel describes the resource data model.
+type SourceGoogleDriveResourceModel struct {
+ Configuration SourceGoogleDrive `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+func (r *SourceGoogleDriveResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_google_drive"
+}
+
+func (r *SourceGoogleDriveResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "SourceGoogleDrive Resource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "credentials": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "authenticate_via_google_o_auth": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "client_id": schema.StringAttribute{
+ Required: true,
+ Description: `Client ID for the Google Drive API`,
+ },
+ "client_secret": schema.StringAttribute{
+ Required: true,
+ Description: `Client Secret for the Google Drive API`,
+ },
+ "refresh_token": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `Refresh Token for the Google Drive API`,
+ },
+ },
+ Description: `Credentials for connecting to the Google Drive API`,
+ },
+ "service_account_key_authentication": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "service_account_info": schema.StringAttribute{
+ Required: true,
+ Description: `The JSON key of the service account to use for authorization. Read more here.`,
+ },
+ },
+ Description: `Credentials for connecting to the Google Drive API`,
+ },
+ },
+ Description: `Credentials for connecting to the Google Drive API`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "folder_url": schema.StringAttribute{
+ Required: true,
+ Description: `URL for the folder you want to sync. Using individual streams and glob patterns, it's possible to only sync a subset of all files located in the folder.`,
+ },
+ "start_date": schema.StringAttribute{
+ Optional: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.`,
+ Validators: []validator.String{
+ validators.IsRFC3339(),
+ },
+ },
+ "streams": schema.ListNestedAttribute{
+ Required: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "days_to_sync_if_history_is_full": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 3` + "\n" +
+ `When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.`,
+ },
+ "format": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "avro_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "double_as_string": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ },
+ "csv_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "delimiter": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: ","` + "\n" +
+ `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
+ },
+ "double_quote": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
+ },
+ "encoding": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "utf8"` + "\n" +
+ `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
+ },
+ "escape_char": schema.StringAttribute{
+ Optional: true,
+ Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
+ },
+ "false_values": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `A set of case-sensitive strings that should be interpreted as false values.`,
+ },
+ "header_definition": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "autogenerated": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ },
+ "from_csv": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ },
+ "user_provided": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "column_names": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ Description: `The column names that will be used while emitting the CSV records`,
+ },
+ },
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ },
+ },
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "null_values": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`,
+ },
+ "quote_char": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "\""` + "\n" +
+ `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
+ },
+ "skip_rows_after_header": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `The number of rows to skip after the header row.`,
+ },
+ "skip_rows_before_header": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`,
+ },
+ "strings_can_be_null": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`,
+ },
+ "true_values": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `A set of case-sensitive strings that should be interpreted as true values.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ },
+ "document_file_type_format_experimental": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "skip_unprocessable_file_types": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.`,
+ },
+ },
+ Description: `Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.`,
+ },
+ "jsonl_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ },
+ "parquet_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "decimal_as_float": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ },
+ },
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "globs": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.`,
+ },
+ "input_schema": schema.StringAttribute{
+ Optional: true,
+ Description: `The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.`,
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: `The name of the stream.`,
+ },
+ "primary_key": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `The column or columns (for a composite key) that serves as the unique identifier of a record.`,
+ },
+ "schemaless": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `When enabled, syncs will not validate or structure records against the stream's schema.`,
+ },
+ "validation_policy": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["Emit Record", "Skip Record", "Wait for Discover"]; Default: "Emit Record"` + "\n" +
+ `The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "Emit Record",
+ "Skip Record",
+ "Wait for Discover",
+ ),
+ },
+ },
+ },
+ },
+ Description: `Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.`,
+ },
+ },
+ MarkdownDescription: `Used during spec; allows the developer to configure the cloud provider specific options` + "\n" +
+ `that are needed when users configure a file-based source.`,
+ },
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
+ "name": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
+ },
+ "secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
+ },
+ "source_id": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "workspace_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ },
+ },
+ }
+}
+
+func (r *SourceGoogleDriveResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected Resource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *SourceGoogleDriveResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data *SourceGoogleDriveResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ request := data.ToCreateSDKType()
+ res, err := r.client.Sources.CreateSourceGoogleDrive(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromCreateResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *SourceGoogleDriveResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data *SourceGoogleDriveResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.GetSourceGoogleDriveRequest{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.GetSourceGoogleDrive(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *SourceGoogleDriveResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data *SourceGoogleDriveResourceModel
+ merge(ctx, req, resp, &data)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceGoogleDrivePutRequest := data.ToUpdateSDKType()
+ sourceID := data.SourceID.ValueString()
+ request := operations.PutSourceGoogleDriveRequest{
+ SourceGoogleDrivePutRequest: sourceGoogleDrivePutRequest,
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.PutSourceGoogleDrive(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ sourceId1 := data.SourceID.ValueString()
+ getRequest := operations.GetSourceGoogleDriveRequest{
+ SourceID: sourceId1,
+ }
+ getResponse, err := r.client.Sources.GetSourceGoogleDrive(ctx, getRequest)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if getResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
+ return
+ }
+ if getResponse.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
+ return
+ }
+ if getResponse.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(getResponse.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *SourceGoogleDriveResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data *SourceGoogleDriveResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.DeleteSourceGoogleDriveRequest{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.DeleteSourceGoogleDrive(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+
+}
+
+func (r *SourceGoogleDriveResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
+}
diff --git a/internal/provider/source_googledrive_resource_sdk.go b/internal/provider/source_googledrive_resource_sdk.go
new file mode 100644
index 000000000..c203c1c4b
--- /dev/null
+++ b/internal/provider/source_googledrive_resource_sdk.go
@@ -0,0 +1,604 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "time"
+)
+
+func (r *SourceGoogleDriveResourceModel) ToCreateSDKType() *shared.SourceGoogleDriveCreateRequest {
+ var credentials shared.SourceGoogleDriveAuthentication
+ var sourceGoogleDriveAuthenticateViaGoogleOAuth *shared.SourceGoogleDriveAuthenticateViaGoogleOAuth
+ if r.Configuration.Credentials.AuthenticateViaGoogleOAuth != nil {
+ clientID := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.RefreshToken.ValueString()
+ sourceGoogleDriveAuthenticateViaGoogleOAuth = &shared.SourceGoogleDriveAuthenticateViaGoogleOAuth{
+ ClientID: clientID,
+ ClientSecret: clientSecret,
+ RefreshToken: refreshToken,
+ }
+ }
+ if sourceGoogleDriveAuthenticateViaGoogleOAuth != nil {
+ credentials = shared.SourceGoogleDriveAuthentication{
+ SourceGoogleDriveAuthenticateViaGoogleOAuth: sourceGoogleDriveAuthenticateViaGoogleOAuth,
+ }
+ }
+ var sourceGoogleDriveServiceAccountKeyAuthentication *shared.SourceGoogleDriveServiceAccountKeyAuthentication
+ if r.Configuration.Credentials.ServiceAccountKeyAuthentication != nil {
+ serviceAccountInfo := r.Configuration.Credentials.ServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
+ sourceGoogleDriveServiceAccountKeyAuthentication = &shared.SourceGoogleDriveServiceAccountKeyAuthentication{
+ ServiceAccountInfo: serviceAccountInfo,
+ }
+ }
+ if sourceGoogleDriveServiceAccountKeyAuthentication != nil {
+ credentials = shared.SourceGoogleDriveAuthentication{
+ SourceGoogleDriveServiceAccountKeyAuthentication: sourceGoogleDriveServiceAccountKeyAuthentication,
+ }
+ }
+ folderURL := r.Configuration.FolderURL.ValueString()
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
+ var streams []shared.SourceGoogleDriveFileBasedStreamConfig = nil
+ for _, streamsItem := range r.Configuration.Streams {
+ daysToSyncIfHistoryIsFull := new(int64)
+ if !streamsItem.DaysToSyncIfHistoryIsFull.IsUnknown() && !streamsItem.DaysToSyncIfHistoryIsFull.IsNull() {
+ *daysToSyncIfHistoryIsFull = streamsItem.DaysToSyncIfHistoryIsFull.ValueInt64()
+ } else {
+ daysToSyncIfHistoryIsFull = nil
+ }
+ var format shared.SourceGoogleDriveFormat
+ var sourceGoogleDriveAvroFormat *shared.SourceGoogleDriveAvroFormat
+ if streamsItem.Format.AvroFormat != nil {
+ doubleAsString := new(bool)
+ if !streamsItem.Format.AvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.AvroFormat.DoubleAsString.IsNull() {
+ *doubleAsString = streamsItem.Format.AvroFormat.DoubleAsString.ValueBool()
+ } else {
+ doubleAsString = nil
+ }
+ sourceGoogleDriveAvroFormat = &shared.SourceGoogleDriveAvroFormat{
+ DoubleAsString: doubleAsString,
+ }
+ }
+ if sourceGoogleDriveAvroFormat != nil {
+ format = shared.SourceGoogleDriveFormat{
+ SourceGoogleDriveAvroFormat: sourceGoogleDriveAvroFormat,
+ }
+ }
+ var sourceGoogleDriveCSVFormat *shared.SourceGoogleDriveCSVFormat
+ if streamsItem.Format.CSVFormat != nil {
+ delimiter := new(string)
+ if !streamsItem.Format.CSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.CSVFormat.Delimiter.IsNull() {
+ *delimiter = streamsItem.Format.CSVFormat.Delimiter.ValueString()
+ } else {
+ delimiter = nil
+ }
+ doubleQuote := new(bool)
+ if !streamsItem.Format.CSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.CSVFormat.DoubleQuote.IsNull() {
+ *doubleQuote = streamsItem.Format.CSVFormat.DoubleQuote.ValueBool()
+ } else {
+ doubleQuote = nil
+ }
+ encoding := new(string)
+ if !streamsItem.Format.CSVFormat.Encoding.IsUnknown() && !streamsItem.Format.CSVFormat.Encoding.IsNull() {
+ *encoding = streamsItem.Format.CSVFormat.Encoding.ValueString()
+ } else {
+ encoding = nil
+ }
+ escapeChar := new(string)
+ if !streamsItem.Format.CSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.CSVFormat.EscapeChar.IsNull() {
+ *escapeChar = streamsItem.Format.CSVFormat.EscapeChar.ValueString()
+ } else {
+ escapeChar = nil
+ }
+ var falseValues []string = nil
+ for _, falseValuesItem := range streamsItem.Format.CSVFormat.FalseValues {
+ falseValues = append(falseValues, falseValuesItem.ValueString())
+ }
+ var headerDefinition *shared.SourceGoogleDriveCSVHeaderDefinition
+ if streamsItem.Format.CSVFormat.HeaderDefinition != nil {
+ var sourceGoogleDriveFromCSV *shared.SourceGoogleDriveFromCSV
+ if streamsItem.Format.CSVFormat.HeaderDefinition.FromCSV != nil {
+ sourceGoogleDriveFromCSV = &shared.SourceGoogleDriveFromCSV{}
+ }
+ if sourceGoogleDriveFromCSV != nil {
+ headerDefinition = &shared.SourceGoogleDriveCSVHeaderDefinition{
+ SourceGoogleDriveFromCSV: sourceGoogleDriveFromCSV,
+ }
+ }
+ var sourceGoogleDriveAutogenerated *shared.SourceGoogleDriveAutogenerated
+ if streamsItem.Format.CSVFormat.HeaderDefinition.Autogenerated != nil {
+ sourceGoogleDriveAutogenerated = &shared.SourceGoogleDriveAutogenerated{}
+ }
+ if sourceGoogleDriveAutogenerated != nil {
+ headerDefinition = &shared.SourceGoogleDriveCSVHeaderDefinition{
+ SourceGoogleDriveAutogenerated: sourceGoogleDriveAutogenerated,
+ }
+ }
+ var sourceGoogleDriveUserProvided *shared.SourceGoogleDriveUserProvided
+ if streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided != nil {
+ var columnNames []string = nil
+ for _, columnNamesItem := range streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided.ColumnNames {
+ columnNames = append(columnNames, columnNamesItem.ValueString())
+ }
+ sourceGoogleDriveUserProvided = &shared.SourceGoogleDriveUserProvided{
+ ColumnNames: columnNames,
+ }
+ }
+ if sourceGoogleDriveUserProvided != nil {
+ headerDefinition = &shared.SourceGoogleDriveCSVHeaderDefinition{
+ SourceGoogleDriveUserProvided: sourceGoogleDriveUserProvided,
+ }
+ }
+ }
+ var nullValues []string = nil
+ for _, nullValuesItem := range streamsItem.Format.CSVFormat.NullValues {
+ nullValues = append(nullValues, nullValuesItem.ValueString())
+ }
+ quoteChar := new(string)
+ if !streamsItem.Format.CSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.CSVFormat.QuoteChar.IsNull() {
+ *quoteChar = streamsItem.Format.CSVFormat.QuoteChar.ValueString()
+ } else {
+ quoteChar = nil
+ }
+ skipRowsAfterHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsNull() {
+ *skipRowsAfterHeader = streamsItem.Format.CSVFormat.SkipRowsAfterHeader.ValueInt64()
+ } else {
+ skipRowsAfterHeader = nil
+ }
+ skipRowsBeforeHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsNull() {
+ *skipRowsBeforeHeader = streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.ValueInt64()
+ } else {
+ skipRowsBeforeHeader = nil
+ }
+ stringsCanBeNull := new(bool)
+ if !streamsItem.Format.CSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.CSVFormat.StringsCanBeNull.IsNull() {
+ *stringsCanBeNull = streamsItem.Format.CSVFormat.StringsCanBeNull.ValueBool()
+ } else {
+ stringsCanBeNull = nil
+ }
+ var trueValues []string = nil
+ for _, trueValuesItem := range streamsItem.Format.CSVFormat.TrueValues {
+ trueValues = append(trueValues, trueValuesItem.ValueString())
+ }
+ sourceGoogleDriveCSVFormat = &shared.SourceGoogleDriveCSVFormat{
+ Delimiter: delimiter,
+ DoubleQuote: doubleQuote,
+ Encoding: encoding,
+ EscapeChar: escapeChar,
+ FalseValues: falseValues,
+ HeaderDefinition: headerDefinition,
+ NullValues: nullValues,
+ QuoteChar: quoteChar,
+ SkipRowsAfterHeader: skipRowsAfterHeader,
+ SkipRowsBeforeHeader: skipRowsBeforeHeader,
+ StringsCanBeNull: stringsCanBeNull,
+ TrueValues: trueValues,
+ }
+ }
+ if sourceGoogleDriveCSVFormat != nil {
+ format = shared.SourceGoogleDriveFormat{
+ SourceGoogleDriveCSVFormat: sourceGoogleDriveCSVFormat,
+ }
+ }
+ var sourceGoogleDriveJsonlFormat *shared.SourceGoogleDriveJsonlFormat
+ if streamsItem.Format.JsonlFormat != nil {
+ sourceGoogleDriveJsonlFormat = &shared.SourceGoogleDriveJsonlFormat{}
+ }
+ if sourceGoogleDriveJsonlFormat != nil {
+ format = shared.SourceGoogleDriveFormat{
+ SourceGoogleDriveJsonlFormat: sourceGoogleDriveJsonlFormat,
+ }
+ }
+ var sourceGoogleDriveParquetFormat *shared.SourceGoogleDriveParquetFormat
+ if streamsItem.Format.ParquetFormat != nil {
+ decimalAsFloat := new(bool)
+ if !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsNull() {
+ *decimalAsFloat = streamsItem.Format.ParquetFormat.DecimalAsFloat.ValueBool()
+ } else {
+ decimalAsFloat = nil
+ }
+ sourceGoogleDriveParquetFormat = &shared.SourceGoogleDriveParquetFormat{
+ DecimalAsFloat: decimalAsFloat,
+ }
+ }
+ if sourceGoogleDriveParquetFormat != nil {
+ format = shared.SourceGoogleDriveFormat{
+ SourceGoogleDriveParquetFormat: sourceGoogleDriveParquetFormat,
+ }
+ }
+ var sourceGoogleDriveDocumentFileTypeFormatExperimental *shared.SourceGoogleDriveDocumentFileTypeFormatExperimental
+ if streamsItem.Format.DocumentFileTypeFormatExperimental != nil {
+ skipUnprocessableFileTypes := new(bool)
+ if !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsUnknown() && !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsNull() {
+ *skipUnprocessableFileTypes = streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.ValueBool()
+ } else {
+ skipUnprocessableFileTypes = nil
+ }
+ sourceGoogleDriveDocumentFileTypeFormatExperimental = &shared.SourceGoogleDriveDocumentFileTypeFormatExperimental{
+ SkipUnprocessableFileTypes: skipUnprocessableFileTypes,
+ }
+ }
+ if sourceGoogleDriveDocumentFileTypeFormatExperimental != nil {
+ format = shared.SourceGoogleDriveFormat{
+ SourceGoogleDriveDocumentFileTypeFormatExperimental: sourceGoogleDriveDocumentFileTypeFormatExperimental,
+ }
+ }
+ var globs []string = nil
+ for _, globsItem := range streamsItem.Globs {
+ globs = append(globs, globsItem.ValueString())
+ }
+ inputSchema := new(string)
+ if !streamsItem.InputSchema.IsUnknown() && !streamsItem.InputSchema.IsNull() {
+ *inputSchema = streamsItem.InputSchema.ValueString()
+ } else {
+ inputSchema = nil
+ }
+ name := streamsItem.Name.ValueString()
+ primaryKey := new(string)
+ if !streamsItem.PrimaryKey.IsUnknown() && !streamsItem.PrimaryKey.IsNull() {
+ *primaryKey = streamsItem.PrimaryKey.ValueString()
+ } else {
+ primaryKey = nil
+ }
+ schemaless := new(bool)
+ if !streamsItem.Schemaless.IsUnknown() && !streamsItem.Schemaless.IsNull() {
+ *schemaless = streamsItem.Schemaless.ValueBool()
+ } else {
+ schemaless = nil
+ }
+ validationPolicy := new(shared.SourceGoogleDriveValidationPolicy)
+ if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() {
+ *validationPolicy = shared.SourceGoogleDriveValidationPolicy(streamsItem.ValidationPolicy.ValueString())
+ } else {
+ validationPolicy = nil
+ }
+ streams = append(streams, shared.SourceGoogleDriveFileBasedStreamConfig{
+ DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull,
+ Format: format,
+ Globs: globs,
+ InputSchema: inputSchema,
+ Name: name,
+ PrimaryKey: primaryKey,
+ Schemaless: schemaless,
+ ValidationPolicy: validationPolicy,
+ })
+ }
+ configuration := shared.SourceGoogleDrive{
+ Credentials: credentials,
+ FolderURL: folderURL,
+ StartDate: startDate,
+ Streams: streams,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name1 := r.Name.ValueString()
+ secretID := new(string)
+ if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
+ *secretID = r.SecretID.ValueString()
+ } else {
+ secretID = nil
+ }
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceGoogleDriveCreateRequest{
+ Configuration: configuration,
+ DefinitionID: definitionID,
+ Name: name1,
+ SecretID: secretID,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceGoogleDriveResourceModel) ToGetSDKType() *shared.SourceGoogleDriveCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceGoogleDriveResourceModel) ToUpdateSDKType() *shared.SourceGoogleDrivePutRequest {
+ var credentials shared.SourceGoogleDriveUpdateAuthentication
+ var sourceGoogleDriveUpdateAuthenticateViaGoogleOAuth *shared.SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth
+ if r.Configuration.Credentials.AuthenticateViaGoogleOAuth != nil {
+ clientID := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.RefreshToken.ValueString()
+ sourceGoogleDriveUpdateAuthenticateViaGoogleOAuth = &shared.SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth{
+ ClientID: clientID,
+ ClientSecret: clientSecret,
+ RefreshToken: refreshToken,
+ }
+ }
+ if sourceGoogleDriveUpdateAuthenticateViaGoogleOAuth != nil {
+ credentials = shared.SourceGoogleDriveUpdateAuthentication{
+ SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth: sourceGoogleDriveUpdateAuthenticateViaGoogleOAuth,
+ }
+ }
+ var sourceGoogleDriveUpdateServiceAccountKeyAuthentication *shared.SourceGoogleDriveUpdateServiceAccountKeyAuthentication
+ if r.Configuration.Credentials.ServiceAccountKeyAuthentication != nil {
+ serviceAccountInfo := r.Configuration.Credentials.ServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
+ sourceGoogleDriveUpdateServiceAccountKeyAuthentication = &shared.SourceGoogleDriveUpdateServiceAccountKeyAuthentication{
+ ServiceAccountInfo: serviceAccountInfo,
+ }
+ }
+ if sourceGoogleDriveUpdateServiceAccountKeyAuthentication != nil {
+ credentials = shared.SourceGoogleDriveUpdateAuthentication{
+ SourceGoogleDriveUpdateServiceAccountKeyAuthentication: sourceGoogleDriveUpdateServiceAccountKeyAuthentication,
+ }
+ }
+ folderURL := r.Configuration.FolderURL.ValueString()
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
+ var streams []shared.SourceGoogleDriveUpdateFileBasedStreamConfig = nil
+ for _, streamsItem := range r.Configuration.Streams {
+ daysToSyncIfHistoryIsFull := new(int64)
+ if !streamsItem.DaysToSyncIfHistoryIsFull.IsUnknown() && !streamsItem.DaysToSyncIfHistoryIsFull.IsNull() {
+ *daysToSyncIfHistoryIsFull = streamsItem.DaysToSyncIfHistoryIsFull.ValueInt64()
+ } else {
+ daysToSyncIfHistoryIsFull = nil
+ }
+ var format shared.SourceGoogleDriveUpdateFormat
+ var sourceGoogleDriveUpdateAvroFormat *shared.SourceGoogleDriveUpdateAvroFormat
+ if streamsItem.Format.AvroFormat != nil {
+ doubleAsString := new(bool)
+ if !streamsItem.Format.AvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.AvroFormat.DoubleAsString.IsNull() {
+ *doubleAsString = streamsItem.Format.AvroFormat.DoubleAsString.ValueBool()
+ } else {
+ doubleAsString = nil
+ }
+ sourceGoogleDriveUpdateAvroFormat = &shared.SourceGoogleDriveUpdateAvroFormat{
+ DoubleAsString: doubleAsString,
+ }
+ }
+ if sourceGoogleDriveUpdateAvroFormat != nil {
+ format = shared.SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateAvroFormat: sourceGoogleDriveUpdateAvroFormat,
+ }
+ }
+ var sourceGoogleDriveUpdateCSVFormat *shared.SourceGoogleDriveUpdateCSVFormat
+ if streamsItem.Format.CSVFormat != nil {
+ delimiter := new(string)
+ if !streamsItem.Format.CSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.CSVFormat.Delimiter.IsNull() {
+ *delimiter = streamsItem.Format.CSVFormat.Delimiter.ValueString()
+ } else {
+ delimiter = nil
+ }
+ doubleQuote := new(bool)
+ if !streamsItem.Format.CSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.CSVFormat.DoubleQuote.IsNull() {
+ *doubleQuote = streamsItem.Format.CSVFormat.DoubleQuote.ValueBool()
+ } else {
+ doubleQuote = nil
+ }
+ encoding := new(string)
+ if !streamsItem.Format.CSVFormat.Encoding.IsUnknown() && !streamsItem.Format.CSVFormat.Encoding.IsNull() {
+ *encoding = streamsItem.Format.CSVFormat.Encoding.ValueString()
+ } else {
+ encoding = nil
+ }
+ escapeChar := new(string)
+ if !streamsItem.Format.CSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.CSVFormat.EscapeChar.IsNull() {
+ *escapeChar = streamsItem.Format.CSVFormat.EscapeChar.ValueString()
+ } else {
+ escapeChar = nil
+ }
+ var falseValues []string = nil
+ for _, falseValuesItem := range streamsItem.Format.CSVFormat.FalseValues {
+ falseValues = append(falseValues, falseValuesItem.ValueString())
+ }
+ var headerDefinition *shared.SourceGoogleDriveUpdateCSVHeaderDefinition
+ if streamsItem.Format.CSVFormat.HeaderDefinition != nil {
+ var sourceGoogleDriveUpdateFromCSV *shared.SourceGoogleDriveUpdateFromCSV
+ if streamsItem.Format.CSVFormat.HeaderDefinition.FromCSV != nil {
+ sourceGoogleDriveUpdateFromCSV = &shared.SourceGoogleDriveUpdateFromCSV{}
+ }
+ if sourceGoogleDriveUpdateFromCSV != nil {
+ headerDefinition = &shared.SourceGoogleDriveUpdateCSVHeaderDefinition{
+ SourceGoogleDriveUpdateFromCSV: sourceGoogleDriveUpdateFromCSV,
+ }
+ }
+ var sourceGoogleDriveUpdateAutogenerated *shared.SourceGoogleDriveUpdateAutogenerated
+ if streamsItem.Format.CSVFormat.HeaderDefinition.Autogenerated != nil {
+ sourceGoogleDriveUpdateAutogenerated = &shared.SourceGoogleDriveUpdateAutogenerated{}
+ }
+ if sourceGoogleDriveUpdateAutogenerated != nil {
+ headerDefinition = &shared.SourceGoogleDriveUpdateCSVHeaderDefinition{
+ SourceGoogleDriveUpdateAutogenerated: sourceGoogleDriveUpdateAutogenerated,
+ }
+ }
+ var sourceGoogleDriveUpdateUserProvided *shared.SourceGoogleDriveUpdateUserProvided
+ if streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided != nil {
+ var columnNames []string = nil
+ for _, columnNamesItem := range streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided.ColumnNames {
+ columnNames = append(columnNames, columnNamesItem.ValueString())
+ }
+ sourceGoogleDriveUpdateUserProvided = &shared.SourceGoogleDriveUpdateUserProvided{
+ ColumnNames: columnNames,
+ }
+ }
+ if sourceGoogleDriveUpdateUserProvided != nil {
+ headerDefinition = &shared.SourceGoogleDriveUpdateCSVHeaderDefinition{
+ SourceGoogleDriveUpdateUserProvided: sourceGoogleDriveUpdateUserProvided,
+ }
+ }
+ }
+ var nullValues []string = nil
+ for _, nullValuesItem := range streamsItem.Format.CSVFormat.NullValues {
+ nullValues = append(nullValues, nullValuesItem.ValueString())
+ }
+ quoteChar := new(string)
+ if !streamsItem.Format.CSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.CSVFormat.QuoteChar.IsNull() {
+ *quoteChar = streamsItem.Format.CSVFormat.QuoteChar.ValueString()
+ } else {
+ quoteChar = nil
+ }
+ skipRowsAfterHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsNull() {
+ *skipRowsAfterHeader = streamsItem.Format.CSVFormat.SkipRowsAfterHeader.ValueInt64()
+ } else {
+ skipRowsAfterHeader = nil
+ }
+ skipRowsBeforeHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsNull() {
+ *skipRowsBeforeHeader = streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.ValueInt64()
+ } else {
+ skipRowsBeforeHeader = nil
+ }
+ stringsCanBeNull := new(bool)
+ if !streamsItem.Format.CSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.CSVFormat.StringsCanBeNull.IsNull() {
+ *stringsCanBeNull = streamsItem.Format.CSVFormat.StringsCanBeNull.ValueBool()
+ } else {
+ stringsCanBeNull = nil
+ }
+ var trueValues []string = nil
+ for _, trueValuesItem := range streamsItem.Format.CSVFormat.TrueValues {
+ trueValues = append(trueValues, trueValuesItem.ValueString())
+ }
+ sourceGoogleDriveUpdateCSVFormat = &shared.SourceGoogleDriveUpdateCSVFormat{
+ Delimiter: delimiter,
+ DoubleQuote: doubleQuote,
+ Encoding: encoding,
+ EscapeChar: escapeChar,
+ FalseValues: falseValues,
+ HeaderDefinition: headerDefinition,
+ NullValues: nullValues,
+ QuoteChar: quoteChar,
+ SkipRowsAfterHeader: skipRowsAfterHeader,
+ SkipRowsBeforeHeader: skipRowsBeforeHeader,
+ StringsCanBeNull: stringsCanBeNull,
+ TrueValues: trueValues,
+ }
+ }
+ if sourceGoogleDriveUpdateCSVFormat != nil {
+ format = shared.SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateCSVFormat: sourceGoogleDriveUpdateCSVFormat,
+ }
+ }
+ var sourceGoogleDriveUpdateJsonlFormat *shared.SourceGoogleDriveUpdateJsonlFormat
+ if streamsItem.Format.JsonlFormat != nil {
+ sourceGoogleDriveUpdateJsonlFormat = &shared.SourceGoogleDriveUpdateJsonlFormat{}
+ }
+ if sourceGoogleDriveUpdateJsonlFormat != nil {
+ format = shared.SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateJsonlFormat: sourceGoogleDriveUpdateJsonlFormat,
+ }
+ }
+ var sourceGoogleDriveUpdateParquetFormat *shared.SourceGoogleDriveUpdateParquetFormat
+ if streamsItem.Format.ParquetFormat != nil {
+ decimalAsFloat := new(bool)
+ if !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsNull() {
+ *decimalAsFloat = streamsItem.Format.ParquetFormat.DecimalAsFloat.ValueBool()
+ } else {
+ decimalAsFloat = nil
+ }
+ sourceGoogleDriveUpdateParquetFormat = &shared.SourceGoogleDriveUpdateParquetFormat{
+ DecimalAsFloat: decimalAsFloat,
+ }
+ }
+ if sourceGoogleDriveUpdateParquetFormat != nil {
+ format = shared.SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateParquetFormat: sourceGoogleDriveUpdateParquetFormat,
+ }
+ }
+ var sourceGoogleDriveUpdateDocumentFileTypeFormatExperimental *shared.SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental
+ if streamsItem.Format.DocumentFileTypeFormatExperimental != nil {
+ skipUnprocessableFileTypes := new(bool)
+ if !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsUnknown() && !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsNull() {
+ *skipUnprocessableFileTypes = streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.ValueBool()
+ } else {
+ skipUnprocessableFileTypes = nil
+ }
+ sourceGoogleDriveUpdateDocumentFileTypeFormatExperimental = &shared.SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental{
+ SkipUnprocessableFileTypes: skipUnprocessableFileTypes,
+ }
+ }
+ if sourceGoogleDriveUpdateDocumentFileTypeFormatExperimental != nil {
+ format = shared.SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental: sourceGoogleDriveUpdateDocumentFileTypeFormatExperimental,
+ }
+ }
+ var globs []string = nil
+ for _, globsItem := range streamsItem.Globs {
+ globs = append(globs, globsItem.ValueString())
+ }
+ inputSchema := new(string)
+ if !streamsItem.InputSchema.IsUnknown() && !streamsItem.InputSchema.IsNull() {
+ *inputSchema = streamsItem.InputSchema.ValueString()
+ } else {
+ inputSchema = nil
+ }
+ name := streamsItem.Name.ValueString()
+ primaryKey := new(string)
+ if !streamsItem.PrimaryKey.IsUnknown() && !streamsItem.PrimaryKey.IsNull() {
+ *primaryKey = streamsItem.PrimaryKey.ValueString()
+ } else {
+ primaryKey = nil
+ }
+ schemaless := new(bool)
+ if !streamsItem.Schemaless.IsUnknown() && !streamsItem.Schemaless.IsNull() {
+ *schemaless = streamsItem.Schemaless.ValueBool()
+ } else {
+ schemaless = nil
+ }
+ validationPolicy := new(shared.SourceGoogleDriveUpdateValidationPolicy)
+ if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() {
+ *validationPolicy = shared.SourceGoogleDriveUpdateValidationPolicy(streamsItem.ValidationPolicy.ValueString())
+ } else {
+ validationPolicy = nil
+ }
+ streams = append(streams, shared.SourceGoogleDriveUpdateFileBasedStreamConfig{
+ DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull,
+ Format: format,
+ Globs: globs,
+ InputSchema: inputSchema,
+ Name: name,
+ PrimaryKey: primaryKey,
+ Schemaless: schemaless,
+ ValidationPolicy: validationPolicy,
+ })
+ }
+ configuration := shared.SourceGoogleDriveUpdate{
+ Credentials: credentials,
+ FolderURL: folderURL,
+ StartDate: startDate,
+ Streams: streams,
+ }
+ name1 := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceGoogleDrivePutRequest{
+ Configuration: configuration,
+ Name: name1,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceGoogleDriveResourceModel) ToDeleteSDKType() *shared.SourceGoogleDriveCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceGoogleDriveResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
+
+func (r *SourceGoogleDriveResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
+ r.RefreshFromGetResponse(resp)
+}
diff --git a/internal/provider/source_googlepagespeedinsights_data_source.go b/internal/provider/source_googlepagespeedinsights_data_source.go
old mode 100755
new mode 100644
index 72a3406ee..0cc63c6a6
--- a/internal/provider/source_googlepagespeedinsights_data_source.go
+++ b/internal/provider/source_googlepagespeedinsights_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceGooglePagespeedInsightsDataSource struct {
// SourceGooglePagespeedInsightsDataSourceModel describes the data model.
type SourceGooglePagespeedInsightsDataSourceModel struct {
- Configuration SourceGooglePagespeedInsights `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,49 +47,20 @@ func (r *SourceGooglePagespeedInsightsDataSource) Schema(ctx context.Context, re
MarkdownDescription: "SourceGooglePagespeedInsights DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited when using without API Key. Creating and using the API key therefore is recommended. The key is case sensitive.`,
- },
- "categories": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `Defines which Lighthouse category to run. One or many of: "accessibility", "best-practices", "performance", "pwa", "seo".`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-pagespeed-insights",
- ),
- },
- Description: `must be one of ["google-pagespeed-insights"]`,
- },
- "strategies": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The analyses strategy to use. Either "desktop" or "mobile".`,
- },
- "urls": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The URLs to retrieve pagespeed information from. The connector will attempt to sync PageSpeed reports for all the defined URLs. Format: https://(www.)url.domain`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_googlepagespeedinsights_data_source_sdk.go b/internal/provider/source_googlepagespeedinsights_data_source_sdk.go
old mode 100755
new mode 100644
index 2f13042bd..22f4441ad
--- a/internal/provider/source_googlepagespeedinsights_data_source_sdk.go
+++ b/internal/provider/source_googlepagespeedinsights_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGooglePagespeedInsightsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_googlepagespeedinsights_resource.go b/internal/provider/source_googlepagespeedinsights_resource.go
old mode 100755
new mode 100644
index 7f59ac721..3a2bffc30
--- a/internal/provider/source_googlepagespeedinsights_resource.go
+++ b/internal/provider/source_googlepagespeedinsights_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceGooglePagespeedInsightsResource struct {
// SourceGooglePagespeedInsightsResourceModel describes the resource data model.
type SourceGooglePagespeedInsightsResourceModel struct {
Configuration SourceGooglePagespeedInsights `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +56,7 @@ func (r *SourceGooglePagespeedInsightsResource) Schema(ctx context.Context, req
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited when using without API Key. Creating and using the API key therefore is recommended. The key is case sensitive.`,
},
"categories": schema.ListAttribute{
@@ -63,15 +64,6 @@ func (r *SourceGooglePagespeedInsightsResource) Schema(ctx context.Context, req
ElementType: types.StringType,
Description: `Defines which Lighthouse category to run. One or many of: "accessibility", "best-practices", "performance", "pwa", "seo".`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-pagespeed-insights",
- ),
- },
- Description: `must be one of ["google-pagespeed-insights"]`,
- },
"strategies": schema.ListAttribute{
Required: true,
ElementType: types.StringType,
@@ -84,13 +76,24 @@ func (r *SourceGooglePagespeedInsightsResource) Schema(ctx context.Context, req
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -154,7 +157,7 @@ func (r *SourceGooglePagespeedInsightsResource) Create(ctx context.Context, req
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGooglePagespeedInsights(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -330,5 +333,5 @@ func (r *SourceGooglePagespeedInsightsResource) Delete(ctx context.Context, req
}
func (r *SourceGooglePagespeedInsightsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_googlepagespeedinsights_resource_sdk.go b/internal/provider/source_googlepagespeedinsights_resource_sdk.go
old mode 100755
new mode 100644
index c1cc06ddb..54bdf48cb
--- a/internal/provider/source_googlepagespeedinsights_resource_sdk.go
+++ b/internal/provider/source_googlepagespeedinsights_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -18,7 +18,6 @@ func (r *SourceGooglePagespeedInsightsResourceModel) ToCreateSDKType() *shared.S
for _, categoriesItem := range r.Configuration.Categories {
categories = append(categories, shared.SourceGooglePagespeedInsightsCategories(categoriesItem.ValueString()))
}
- sourceType := shared.SourceGooglePagespeedInsightsGooglePagespeedInsights(r.Configuration.SourceType.ValueString())
var strategies []shared.SourceGooglePagespeedInsightsStrategies = nil
for _, strategiesItem := range r.Configuration.Strategies {
strategies = append(strategies, shared.SourceGooglePagespeedInsightsStrategies(strategiesItem.ValueString()))
@@ -30,10 +29,15 @@ func (r *SourceGooglePagespeedInsightsResourceModel) ToCreateSDKType() *shared.S
configuration := shared.SourceGooglePagespeedInsights{
APIKey: apiKey,
Categories: categories,
- SourceType: sourceType,
Strategies: strategies,
Urls: urls,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -44,6 +48,7 @@ func (r *SourceGooglePagespeedInsightsResourceModel) ToCreateSDKType() *shared.S
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGooglePagespeedInsightsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -63,13 +68,13 @@ func (r *SourceGooglePagespeedInsightsResourceModel) ToUpdateSDKType() *shared.S
} else {
apiKey = nil
}
- var categories []shared.SourceGooglePagespeedInsightsUpdateCategories = nil
+ var categories []shared.Categories = nil
for _, categoriesItem := range r.Configuration.Categories {
- categories = append(categories, shared.SourceGooglePagespeedInsightsUpdateCategories(categoriesItem.ValueString()))
+ categories = append(categories, shared.Categories(categoriesItem.ValueString()))
}
- var strategies []shared.SourceGooglePagespeedInsightsUpdateStrategies = nil
+ var strategies []shared.Strategies = nil
for _, strategiesItem := range r.Configuration.Strategies {
- strategies = append(strategies, shared.SourceGooglePagespeedInsightsUpdateStrategies(strategiesItem.ValueString()))
+ strategies = append(strategies, shared.Strategies(strategiesItem.ValueString()))
}
var urls []string = nil
for _, urlsItem := range r.Configuration.Urls {
diff --git a/internal/provider/source_googlesearchconsole_data_source.go b/internal/provider/source_googlesearchconsole_data_source.go
old mode 100755
new mode 100644
index 22daef8b9..ce9314e7e
--- a/internal/provider/source_googlesearchconsole_data_source.go
+++ b/internal/provider/source_googlesearchconsole_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceGoogleSearchConsoleDataSource struct {
// SourceGoogleSearchConsoleDataSourceModel describes the data model.
type SourceGoogleSearchConsoleDataSourceModel struct {
- Configuration SourceGoogleSearchConsole `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,193 +47,20 @@ func (r *SourceGoogleSearchConsoleDataSource) Schema(ctx context.Context, req da
MarkdownDescription: "SourceGoogleSearchConsole DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "authorization": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_google_search_console_authentication_type_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access token for making authenticated requests. Read more here.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The client ID of your Google Search Console developer application. Read more here.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret of your Google Search Console developer application. Read more here.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining a new access token. Read more here.`,
- },
- },
- },
- "source_google_search_console_authentication_type_service_account_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The email of the user which has permissions to access the Google Workspace Admin APIs.`,
- },
- "service_account_info": schema.StringAttribute{
- Computed: true,
- Description: `The JSON key of the service account to use for authorization. Read more here.`,
- },
- },
- },
- "source_google_search_console_update_authentication_type_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access token for making authenticated requests. Read more here.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The client ID of your Google Search Console developer application. Read more here.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret of your Google Search Console developer application. Read more here.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining a new access token. Read more here.`,
- },
- },
- },
- "source_google_search_console_update_authentication_type_service_account_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The email of the user which has permissions to access the Google Workspace Admin APIs.`,
- },
- "service_account_info": schema.StringAttribute{
- Computed: true,
- Description: `The JSON key of the service account to use for authorization. Read more here.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "custom_reports": schema.StringAttribute{
- Computed: true,
- Description: `(DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports.`,
- },
- "custom_reports_array": schema.ListNestedAttribute{
- Computed: true,
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "dimensions": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A list of dimensions (country, date, device, page, query)`,
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the custom report, this name would be used as stream name`,
- },
- },
- },
- Description: `You can add your Custom Analytics report by creating one.`,
- },
- "data_state": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "final",
- "all",
- ),
- },
- MarkdownDescription: `must be one of ["final", "all"]` + "\n" +
- `If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.`,
- },
- "site_urls": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The URLs of the website property attached to your GSC account. Learn more about properties here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-search-console",
- ),
- },
- Description: `must be one of ["google-search-console"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_googlesearchconsole_data_source_sdk.go b/internal/provider/source_googlesearchconsole_data_source_sdk.go
old mode 100755
new mode 100644
index de100568e..5ee8f7ac2
--- a/internal/provider/source_googlesearchconsole_data_source_sdk.go
+++ b/internal/provider/source_googlesearchconsole_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleSearchConsoleDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_googlesearchconsole_resource.go b/internal/provider/source_googlesearchconsole_resource.go
old mode 100755
new mode 100644
index 9152d546a..a66f81b77
--- a/internal/provider/source_googlesearchconsole_resource.go
+++ b/internal/provider/source_googlesearchconsole_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceGoogleSearchConsoleResource struct {
// SourceGoogleSearchConsoleResourceModel describes the resource data model.
type SourceGoogleSearchConsoleResourceModel struct {
Configuration SourceGoogleSearchConsole `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,22 +60,14 @@ func (r *SourceGoogleSearchConsoleResource) Schema(ctx context.Context, req reso
"authorization": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_google_search_console_authentication_type_o_auth": schema.SingleNestedAttribute{
+ "o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Access token for making authenticated requests. Read more here.`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The client ID of your Google Search Console developer application. Read more here.`,
@@ -84,74 +78,14 @@ func (r *SourceGoogleSearchConsoleResource) Schema(ctx context.Context, req reso
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The token for obtaining a new access token. Read more here.`,
},
},
},
- "source_google_search_console_authentication_type_service_account_key_authentication": schema.SingleNestedAttribute{
+ "service_account_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "email": schema.StringAttribute{
- Required: true,
- Description: `The email of the user which has permissions to access the Google Workspace Admin APIs.`,
- },
- "service_account_info": schema.StringAttribute{
- Required: true,
- Description: `The JSON key of the service account to use for authorization. Read more here.`,
- },
- },
- },
- "source_google_search_console_update_authentication_type_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Optional: true,
- Description: `Access token for making authenticated requests. Read more here.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The client ID of your Google Search Console developer application. Read more here.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The client secret of your Google Search Console developer application. Read more here.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The token for obtaining a new access token. Read more here.`,
- },
- },
- },
- "source_google_search_console_update_authentication_type_service_account_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
"email": schema.StringAttribute{
Required: true,
Description: `The email of the user which has permissions to access the Google Workspace Admin APIs.`,
@@ -178,7 +112,7 @@ func (r *SourceGoogleSearchConsoleResource) Schema(ctx context.Context, req reso
"dimensions": schema.ListAttribute{
Required: true,
ElementType: types.StringType,
- Description: `A list of dimensions (country, date, device, page, query)`,
+ Description: `A list of available dimensions. Please note, that for technical reasons ` + "`" + `date` + "`" + ` is the default dimension which will be included in your query whether you specify it or not. Primary key will consist of your custom dimensions and the default dimension along with ` + "`" + `site_url` + "`" + ` and ` + "`" + `search_type` + "`" + `.`,
},
"name": schema.StringAttribute{
Required: true,
@@ -190,52 +124,55 @@ func (r *SourceGoogleSearchConsoleResource) Schema(ctx context.Context, req reso
},
"data_state": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["final", "all"]; Default: "final"` + "\n" +
+ `If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.`,
Validators: []validator.String{
stringvalidator.OneOf(
"final",
"all",
),
},
- MarkdownDescription: `must be one of ["final", "all"]` + "\n" +
- `If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.`,
},
"site_urls": schema.ListAttribute{
Required: true,
ElementType: types.StringType,
Description: `The URLs of the website property attached to your GSC account. Learn more about properties here.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-search-console",
- ),
- },
- Description: `must be one of ["google-search-console"]`,
- },
"start_date": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `Default: "2021-01-01"` + "\n" +
+ `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -299,7 +236,7 @@ func (r *SourceGoogleSearchConsoleResource) Create(ctx context.Context, req reso
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGoogleSearchConsole(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -475,5 +412,5 @@ func (r *SourceGoogleSearchConsoleResource) Delete(ctx context.Context, req reso
}
func (r *SourceGoogleSearchConsoleResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_googlesearchconsole_resource_sdk.go b/internal/provider/source_googlesearchconsole_resource_sdk.go
old mode 100755
new mode 100644
index 3aa2b46fb..34319d8cf
--- a/internal/provider/source_googlesearchconsole_resource_sdk.go
+++ b/internal/provider/source_googlesearchconsole_resource_sdk.go
@@ -3,52 +3,48 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleSearchConsoleResourceModel) ToCreateSDKType() *shared.SourceGoogleSearchConsoleCreateRequest {
var authorization shared.SourceGoogleSearchConsoleAuthenticationType
- var sourceGoogleSearchConsoleAuthenticationTypeOAuth *shared.SourceGoogleSearchConsoleAuthenticationTypeOAuth
- if r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeOAuth != nil {
+ var sourceGoogleSearchConsoleOAuth *shared.SourceGoogleSearchConsoleOAuth
+ if r.Configuration.Authorization.OAuth != nil {
accessToken := new(string)
- if !r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeOAuth.AccessToken.IsUnknown() && !r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeOAuth.AccessToken.IsNull() {
- *accessToken = r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeOAuth.AccessToken.ValueString()
+ if !r.Configuration.Authorization.OAuth.AccessToken.IsUnknown() && !r.Configuration.Authorization.OAuth.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Authorization.OAuth.AccessToken.ValueString()
} else {
accessToken = nil
}
- authType := shared.SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthType(r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeOAuth.AuthType.ValueString())
- clientID := r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeOAuth.RefreshToken.ValueString()
- sourceGoogleSearchConsoleAuthenticationTypeOAuth = &shared.SourceGoogleSearchConsoleAuthenticationTypeOAuth{
+ clientID := r.Configuration.Authorization.OAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Authorization.OAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Authorization.OAuth.RefreshToken.ValueString()
+ sourceGoogleSearchConsoleOAuth = &shared.SourceGoogleSearchConsoleOAuth{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceGoogleSearchConsoleAuthenticationTypeOAuth != nil {
+ if sourceGoogleSearchConsoleOAuth != nil {
authorization = shared.SourceGoogleSearchConsoleAuthenticationType{
- SourceGoogleSearchConsoleAuthenticationTypeOAuth: sourceGoogleSearchConsoleAuthenticationTypeOAuth,
+ SourceGoogleSearchConsoleOAuth: sourceGoogleSearchConsoleOAuth,
}
}
- var sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication *shared.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication
- if r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication != nil {
- authType1 := shared.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthType(r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication.AuthType.ValueString())
- email := r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication.Email.ValueString()
- serviceAccountInfo := r.Configuration.Authorization.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
- sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication = &shared.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication{
- AuthType: authType1,
+ var sourceGoogleSearchConsoleServiceAccountKeyAuthentication *shared.SourceGoogleSearchConsoleServiceAccountKeyAuthentication
+ if r.Configuration.Authorization.ServiceAccountKeyAuthentication != nil {
+ email := r.Configuration.Authorization.ServiceAccountKeyAuthentication.Email.ValueString()
+ serviceAccountInfo := r.Configuration.Authorization.ServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
+ sourceGoogleSearchConsoleServiceAccountKeyAuthentication = &shared.SourceGoogleSearchConsoleServiceAccountKeyAuthentication{
Email: email,
ServiceAccountInfo: serviceAccountInfo,
}
}
- if sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication != nil {
+ if sourceGoogleSearchConsoleServiceAccountKeyAuthentication != nil {
authorization = shared.SourceGoogleSearchConsoleAuthenticationType{
- SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication: sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication,
+ SourceGoogleSearchConsoleServiceAccountKeyAuthentication: sourceGoogleSearchConsoleServiceAccountKeyAuthentication,
}
}
customReports := new(string)
@@ -59,9 +55,9 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToCreateSDKType() *shared.Sourc
}
var customReportsArray []shared.SourceGoogleSearchConsoleCustomReportConfig = nil
for _, customReportsArrayItem := range r.Configuration.CustomReportsArray {
- var dimensions []shared.SourceGoogleSearchConsoleCustomReportConfigValidEnums = nil
+ var dimensions []shared.SourceGoogleSearchConsoleValidEnums = nil
for _, dimensionsItem := range customReportsArrayItem.Dimensions {
- dimensions = append(dimensions, shared.SourceGoogleSearchConsoleCustomReportConfigValidEnums(dimensionsItem.ValueString()))
+ dimensions = append(dimensions, shared.SourceGoogleSearchConsoleValidEnums(dimensionsItem.ValueString()))
}
name := customReportsArrayItem.Name.ValueString()
customReportsArray = append(customReportsArray, shared.SourceGoogleSearchConsoleCustomReportConfig{
@@ -85,7 +81,6 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToCreateSDKType() *shared.Sourc
for _, siteUrlsItem := range r.Configuration.SiteUrls {
siteUrls = append(siteUrls, siteUrlsItem.ValueString())
}
- sourceType := shared.SourceGoogleSearchConsoleGoogleSearchConsole(r.Configuration.SourceType.ValueString())
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
@@ -99,9 +94,14 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToCreateSDKType() *shared.Sourc
DataState: dataState,
EndDate: endDate,
SiteUrls: siteUrls,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name1 := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -112,6 +112,7 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToCreateSDKType() *shared.Sourc
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGoogleSearchConsoleCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name1,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -125,46 +126,42 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToGetSDKType() *shared.SourceGo
}
func (r *SourceGoogleSearchConsoleResourceModel) ToUpdateSDKType() *shared.SourceGoogleSearchConsolePutRequest {
- var authorization shared.SourceGoogleSearchConsoleUpdateAuthenticationType
- var sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth *shared.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth
- if r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth != nil {
+ var authorization shared.AuthenticationType
+ var sourceGoogleSearchConsoleUpdateOAuth *shared.SourceGoogleSearchConsoleUpdateOAuth
+ if r.Configuration.Authorization.OAuth != nil {
accessToken := new(string)
- if !r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth.AccessToken.IsUnknown() && !r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth.AccessToken.IsNull() {
- *accessToken = r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth.AccessToken.ValueString()
+ if !r.Configuration.Authorization.OAuth.AccessToken.IsUnknown() && !r.Configuration.Authorization.OAuth.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Authorization.OAuth.AccessToken.ValueString()
} else {
accessToken = nil
}
- authType := shared.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthType(r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth.AuthType.ValueString())
- clientID := r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth.RefreshToken.ValueString()
- sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth = &shared.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth{
+ clientID := r.Configuration.Authorization.OAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Authorization.OAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Authorization.OAuth.RefreshToken.ValueString()
+ sourceGoogleSearchConsoleUpdateOAuth = &shared.SourceGoogleSearchConsoleUpdateOAuth{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth != nil {
- authorization = shared.SourceGoogleSearchConsoleUpdateAuthenticationType{
- SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth: sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth,
+ if sourceGoogleSearchConsoleUpdateOAuth != nil {
+ authorization = shared.AuthenticationType{
+ SourceGoogleSearchConsoleUpdateOAuth: sourceGoogleSearchConsoleUpdateOAuth,
}
}
- var sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication *shared.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication
- if r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication != nil {
- authType1 := shared.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthType(r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication.AuthType.ValueString())
- email := r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication.Email.ValueString()
- serviceAccountInfo := r.Configuration.Authorization.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
- sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication = &shared.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication{
- AuthType: authType1,
+ var sourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication *shared.SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication
+ if r.Configuration.Authorization.ServiceAccountKeyAuthentication != nil {
+ email := r.Configuration.Authorization.ServiceAccountKeyAuthentication.Email.ValueString()
+ serviceAccountInfo := r.Configuration.Authorization.ServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
+ sourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication = &shared.SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication{
Email: email,
ServiceAccountInfo: serviceAccountInfo,
}
}
- if sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication != nil {
- authorization = shared.SourceGoogleSearchConsoleUpdateAuthenticationType{
- SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication: sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication,
+ if sourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication != nil {
+ authorization = shared.AuthenticationType{
+ SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication: sourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication,
}
}
customReports := new(string)
@@ -175,9 +172,9 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToUpdateSDKType() *shared.Sourc
}
var customReportsArray []shared.SourceGoogleSearchConsoleUpdateCustomReportConfig = nil
for _, customReportsArrayItem := range r.Configuration.CustomReportsArray {
- var dimensions []shared.SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = nil
+ var dimensions []shared.SourceGoogleSearchConsoleUpdateValidEnums = nil
for _, dimensionsItem := range customReportsArrayItem.Dimensions {
- dimensions = append(dimensions, shared.SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums(dimensionsItem.ValueString()))
+ dimensions = append(dimensions, shared.SourceGoogleSearchConsoleUpdateValidEnums(dimensionsItem.ValueString()))
}
name := customReportsArrayItem.Name.ValueString()
customReportsArray = append(customReportsArray, shared.SourceGoogleSearchConsoleUpdateCustomReportConfig{
@@ -185,9 +182,9 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToUpdateSDKType() *shared.Sourc
Name: name,
})
}
- dataState := new(shared.SourceGoogleSearchConsoleUpdateDataFreshness)
+ dataState := new(shared.DataFreshness)
if !r.Configuration.DataState.IsUnknown() && !r.Configuration.DataState.IsNull() {
- *dataState = shared.SourceGoogleSearchConsoleUpdateDataFreshness(r.Configuration.DataState.ValueString())
+ *dataState = shared.DataFreshness(r.Configuration.DataState.ValueString())
} else {
dataState = nil
}
diff --git a/internal/provider/source_googlesheets_data_source.go b/internal/provider/source_googlesheets_data_source.go
old mode 100755
new mode 100644
index 0c561e58a..55e3d7701
--- a/internal/provider/source_googlesheets_data_source.go
+++ b/internal/provider/source_googlesheets_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceGoogleSheetsDataSource struct {
// SourceGoogleSheetsDataSourceModel describes the data model.
type SourceGoogleSheetsDataSourceModel struct {
- Configuration SourceGoogleSheets `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,139 +47,20 @@ func (r *SourceGoogleSheetsDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "SourceGoogleSheets DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_google_sheets_authentication_authenticate_via_google_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Google application's Client ID. See Google's documentation for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Google application's Client Secret. See Google's documentation for more information.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Google application's refresh token. See Google's documentation for more information.`,
- },
- },
- Description: `Credentials for connecting to the Google Sheets API`,
- },
- "source_google_sheets_authentication_service_account_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "service_account_info": schema.StringAttribute{
- Computed: true,
- Description: `The JSON key of the service account to use for authorization. Read more here.`,
- },
- },
- Description: `Credentials for connecting to the Google Sheets API`,
- },
- "source_google_sheets_update_authentication_authenticate_via_google_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Google application's Client ID. See Google's documentation for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Google application's Client Secret. See Google's documentation for more information.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Google application's refresh token. See Google's documentation for more information.`,
- },
- },
- Description: `Credentials for connecting to the Google Sheets API`,
- },
- "source_google_sheets_update_authentication_service_account_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "service_account_info": schema.StringAttribute{
- Computed: true,
- Description: `The JSON key of the service account to use for authorization. Read more here.`,
- },
- },
- Description: `Credentials for connecting to the Google Sheets API`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Credentials for connecting to the Google Sheets API`,
- },
- "names_conversion": schema.BoolAttribute{
- Computed: true,
- Description: `Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-sheets",
- ),
- },
- Description: `must be one of ["google-sheets"]`,
- },
- "spreadsheet_id": schema.StringAttribute{
- Computed: true,
- Description: `Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_googlesheets_data_source_sdk.go b/internal/provider/source_googlesheets_data_source_sdk.go
old mode 100755
new mode 100644
index e9d1d1d5a..17e3708cc
--- a/internal/provider/source_googlesheets_data_source_sdk.go
+++ b/internal/provider/source_googlesheets_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleSheetsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_googlesheets_resource.go b/internal/provider/source_googlesheets_resource.go
old mode 100755
new mode 100644
index 11413f548..ca2e1daf9
--- a/internal/provider/source_googlesheets_resource.go
+++ b/internal/provider/source_googlesheets_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceGoogleSheetsResource struct {
// SourceGoogleSheetsResourceModel describes the resource data model.
type SourceGoogleSheetsResourceModel struct {
Configuration SourceGoogleSheets `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,64 +59,9 @@ func (r *SourceGoogleSheetsResource) Schema(ctx context.Context, req resource.Sc
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_google_sheets_authentication_authenticate_via_google_o_auth": schema.SingleNestedAttribute{
+ "authenticate_via_google_o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `Enter your Google application's Client ID. See Google's documentation for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `Enter your Google application's Client Secret. See Google's documentation for more information.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `Enter your Google application's refresh token. See Google's documentation for more information.`,
- },
- },
- Description: `Credentials for connecting to the Google Sheets API`,
- },
- "source_google_sheets_authentication_service_account_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
- "service_account_info": schema.StringAttribute{
- Required: true,
- Description: `The JSON key of the service account to use for authorization. Read more here.`,
- },
- },
- Description: `Credentials for connecting to the Google Sheets API`,
- },
- "source_google_sheets_update_authentication_authenticate_via_google_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `Enter your Google application's Client ID. See Google's documentation for more information.`,
@@ -126,23 +72,15 @@ func (r *SourceGoogleSheetsResource) Schema(ctx context.Context, req resource.Sc
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Enter your Google application's refresh token. See Google's documentation for more information.`,
},
},
Description: `Credentials for connecting to the Google Sheets API`,
},
- "source_google_sheets_update_authentication_service_account_key_authentication": schema.SingleNestedAttribute{
+ "service_account_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service",
- ),
- },
- Description: `must be one of ["Service"]`,
- },
"service_account_info": schema.StringAttribute{
Required: true,
Description: `The JSON key of the service account to use for authorization. Read more here.`,
@@ -151,23 +89,15 @@ func (r *SourceGoogleSheetsResource) Schema(ctx context.Context, req resource.Sc
Description: `Credentials for connecting to the Google Sheets API`,
},
},
+ Description: `Credentials for connecting to the Google Sheets API`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Credentials for connecting to the Google Sheets API`,
},
"names_conversion": schema.BoolAttribute{
- Optional: true,
- Description: `Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-sheets",
- ),
- },
- Description: `must be one of ["google-sheets"]`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.`,
},
"spreadsheet_id": schema.StringAttribute{
Required: true,
@@ -175,13 +105,24 @@ func (r *SourceGoogleSheetsResource) Schema(ctx context.Context, req resource.Sc
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -245,7 +186,7 @@ func (r *SourceGoogleSheetsResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGoogleSheets(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -421,5 +362,5 @@ func (r *SourceGoogleSheetsResource) Delete(ctx context.Context, req resource.De
}
func (r *SourceGoogleSheetsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_googlesheets_resource_sdk.go b/internal/provider/source_googlesheets_resource_sdk.go
old mode 100755
new mode 100644
index d217bd4dc..4db4b5242
--- a/internal/provider/source_googlesheets_resource_sdk.go
+++ b/internal/provider/source_googlesheets_resource_sdk.go
@@ -3,42 +3,38 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleSheetsResourceModel) ToCreateSDKType() *shared.SourceGoogleSheetsCreateRequest {
var credentials shared.SourceGoogleSheetsAuthentication
- var sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth *shared.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth
- if r.Configuration.Credentials.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth != nil {
- authType := shared.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthType(r.Configuration.Credentials.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth.RefreshToken.ValueString()
- sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth = &shared.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth{
- AuthType: authType,
+ var sourceGoogleSheetsAuthenticateViaGoogleOAuth *shared.SourceGoogleSheetsAuthenticateViaGoogleOAuth
+ if r.Configuration.Credentials.AuthenticateViaGoogleOAuth != nil {
+ clientID := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.RefreshToken.ValueString()
+ sourceGoogleSheetsAuthenticateViaGoogleOAuth = &shared.SourceGoogleSheetsAuthenticateViaGoogleOAuth{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth != nil {
+ if sourceGoogleSheetsAuthenticateViaGoogleOAuth != nil {
credentials = shared.SourceGoogleSheetsAuthentication{
- SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth: sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth,
+ SourceGoogleSheetsAuthenticateViaGoogleOAuth: sourceGoogleSheetsAuthenticateViaGoogleOAuth,
}
}
- var sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication *shared.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication
- if r.Configuration.Credentials.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication != nil {
- authType1 := shared.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthType(r.Configuration.Credentials.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication.AuthType.ValueString())
- serviceAccountInfo := r.Configuration.Credentials.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
- sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication = &shared.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication{
- AuthType: authType1,
+ var sourceGoogleSheetsServiceAccountKeyAuthentication *shared.SourceGoogleSheetsServiceAccountKeyAuthentication
+ if r.Configuration.Credentials.ServiceAccountKeyAuthentication != nil {
+ serviceAccountInfo := r.Configuration.Credentials.ServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
+ sourceGoogleSheetsServiceAccountKeyAuthentication = &shared.SourceGoogleSheetsServiceAccountKeyAuthentication{
ServiceAccountInfo: serviceAccountInfo,
}
}
- if sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication != nil {
+ if sourceGoogleSheetsServiceAccountKeyAuthentication != nil {
credentials = shared.SourceGoogleSheetsAuthentication{
- SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication: sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication,
+ SourceGoogleSheetsServiceAccountKeyAuthentication: sourceGoogleSheetsServiceAccountKeyAuthentication,
}
}
namesConversion := new(bool)
@@ -47,14 +43,18 @@ func (r *SourceGoogleSheetsResourceModel) ToCreateSDKType() *shared.SourceGoogle
} else {
namesConversion = nil
}
- sourceType := shared.SourceGoogleSheetsGoogleSheets(r.Configuration.SourceType.ValueString())
spreadsheetID := r.Configuration.SpreadsheetID.ValueString()
configuration := shared.SourceGoogleSheets{
Credentials: credentials,
NamesConversion: namesConversion,
- SourceType: sourceType,
SpreadsheetID: spreadsheetID,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -65,6 +65,7 @@ func (r *SourceGoogleSheetsResourceModel) ToCreateSDKType() *shared.SourceGoogle
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGoogleSheetsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -79,36 +80,32 @@ func (r *SourceGoogleSheetsResourceModel) ToGetSDKType() *shared.SourceGoogleShe
func (r *SourceGoogleSheetsResourceModel) ToUpdateSDKType() *shared.SourceGoogleSheetsPutRequest {
var credentials shared.SourceGoogleSheetsUpdateAuthentication
- var sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth *shared.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth
- if r.Configuration.Credentials.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth != nil {
- authType := shared.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthType(r.Configuration.Credentials.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth.RefreshToken.ValueString()
- sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth = &shared.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth{
- AuthType: authType,
+ var sourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth *shared.SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth
+ if r.Configuration.Credentials.AuthenticateViaGoogleOAuth != nil {
+ clientID := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaGoogleOAuth.RefreshToken.ValueString()
+ sourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth = &shared.SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth != nil {
+ if sourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth != nil {
credentials = shared.SourceGoogleSheetsUpdateAuthentication{
- SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth: sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth,
+ SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth: sourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth,
}
}
- var sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication *shared.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication
- if r.Configuration.Credentials.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication != nil {
- authType1 := shared.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthType(r.Configuration.Credentials.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication.AuthType.ValueString())
- serviceAccountInfo := r.Configuration.Credentials.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
- sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication = &shared.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication{
- AuthType: authType1,
+ var sourceGoogleSheetsUpdateServiceAccountKeyAuthentication *shared.SourceGoogleSheetsUpdateServiceAccountKeyAuthentication
+ if r.Configuration.Credentials.ServiceAccountKeyAuthentication != nil {
+ serviceAccountInfo := r.Configuration.Credentials.ServiceAccountKeyAuthentication.ServiceAccountInfo.ValueString()
+ sourceGoogleSheetsUpdateServiceAccountKeyAuthentication = &shared.SourceGoogleSheetsUpdateServiceAccountKeyAuthentication{
ServiceAccountInfo: serviceAccountInfo,
}
}
- if sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication != nil {
+ if sourceGoogleSheetsUpdateServiceAccountKeyAuthentication != nil {
credentials = shared.SourceGoogleSheetsUpdateAuthentication{
- SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication: sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication,
+ SourceGoogleSheetsUpdateServiceAccountKeyAuthentication: sourceGoogleSheetsUpdateServiceAccountKeyAuthentication,
}
}
namesConversion := new(bool)
diff --git a/internal/provider/source_googlewebfonts_data_source.go b/internal/provider/source_googlewebfonts_data_source.go
old mode 100755
new mode 100644
index d1e5544f8..3e528feb3
--- a/internal/provider/source_googlewebfonts_data_source.go
+++ b/internal/provider/source_googlewebfonts_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceGoogleWebfontsDataSource struct {
// SourceGoogleWebfontsDataSourceModel describes the data model.
type SourceGoogleWebfontsDataSourceModel struct {
- Configuration SourceGoogleWebfonts `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,46 +47,20 @@ func (r *SourceGoogleWebfontsDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "SourceGoogleWebfonts DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "alt": schema.StringAttribute{
- Computed: true,
- Description: `Optional, Available params- json, media, proto`,
- },
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API key is required to access google apis, For getting your's goto google console and generate api key for Webfonts`,
- },
- "pretty_print": schema.StringAttribute{
- Computed: true,
- Description: `Optional, boolean type`,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- Description: `Optional, to find how to sort`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-webfonts",
- ),
- },
- Description: `must be one of ["google-webfonts"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_googlewebfonts_data_source_sdk.go b/internal/provider/source_googlewebfonts_data_source_sdk.go
old mode 100755
new mode 100644
index abce3faae..20f343ca5
--- a/internal/provider/source_googlewebfonts_data_source_sdk.go
+++ b/internal/provider/source_googlewebfonts_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleWebfontsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_googlewebfonts_resource.go b/internal/provider/source_googlewebfonts_resource.go
old mode 100755
new mode 100644
index a3193d208..7d8f72e7a
--- a/internal/provider/source_googlewebfonts_resource.go
+++ b/internal/provider/source_googlewebfonts_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceGoogleWebfontsResource struct {
// SourceGoogleWebfontsResourceModel describes the resource data model.
type SourceGoogleWebfontsResourceModel struct {
Configuration SourceGoogleWebfonts `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -60,6 +60,7 @@ func (r *SourceGoogleWebfontsResource) Schema(ctx context.Context, req resource.
},
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API key is required to access google apis, For getting your's goto google console and generate api key for Webfonts`,
},
"pretty_print": schema.StringAttribute{
@@ -70,24 +71,26 @@ func (r *SourceGoogleWebfontsResource) Schema(ctx context.Context, req resource.
Optional: true,
Description: `Optional, to find how to sort`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-webfonts",
- ),
- },
- Description: `must be one of ["google-webfonts"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -151,7 +154,7 @@ func (r *SourceGoogleWebfontsResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGoogleWebfonts(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -327,5 +330,5 @@ func (r *SourceGoogleWebfontsResource) Delete(ctx context.Context, req resource.
}
func (r *SourceGoogleWebfontsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_googlewebfonts_resource_sdk.go b/internal/provider/source_googlewebfonts_resource_sdk.go
old mode 100755
new mode 100644
index 173025433..d833614e9
--- a/internal/provider/source_googlewebfonts_resource_sdk.go
+++ b/internal/provider/source_googlewebfonts_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -27,13 +27,17 @@ func (r *SourceGoogleWebfontsResourceModel) ToCreateSDKType() *shared.SourceGoog
} else {
sort = nil
}
- sourceType := shared.SourceGoogleWebfontsGoogleWebfonts(r.Configuration.SourceType.ValueString())
configuration := shared.SourceGoogleWebfonts{
Alt: alt,
APIKey: apiKey,
PrettyPrint: prettyPrint,
Sort: sort,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -45,6 +49,7 @@ func (r *SourceGoogleWebfontsResourceModel) ToCreateSDKType() *shared.SourceGoog
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGoogleWebfontsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_googleworkspaceadminreports_data_source.go b/internal/provider/source_googleworkspaceadminreports_data_source.go
old mode 100755
new mode 100644
index 358cb284c..9f768457a
--- a/internal/provider/source_googleworkspaceadminreports_data_source.go
+++ b/internal/provider/source_googleworkspaceadminreports_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceGoogleWorkspaceAdminReportsDataSource struct {
// SourceGoogleWorkspaceAdminReportsDataSourceModel describes the data model.
type SourceGoogleWorkspaceAdminReportsDataSourceModel struct {
- Configuration SourceGoogleWorkspaceAdminReports `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourceGoogleWorkspaceAdminReportsDataSource) Schema(ctx context.Context
MarkdownDescription: "SourceGoogleWorkspaceAdminReports DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials_json": schema.StringAttribute{
- Computed: true,
- Description: `The contents of the JSON service account key. See the docs for more information on how to generate this key.`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The email of the user, which has permissions to access the Google Workspace Admin APIs.`,
- },
- "lookback": schema.Int64Attribute{
- Computed: true,
- Description: `Sets the range of time shown in the report. Reports API allows from up to 180 days ago. `,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-workspace-admin-reports",
- ),
- },
- Description: `must be one of ["google-workspace-admin-reports"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_googleworkspaceadminreports_data_source_sdk.go b/internal/provider/source_googleworkspaceadminreports_data_source_sdk.go
old mode 100755
new mode 100644
index 347093fab..a3080b5ca
--- a/internal/provider/source_googleworkspaceadminreports_data_source_sdk.go
+++ b/internal/provider/source_googleworkspaceadminreports_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGoogleWorkspaceAdminReportsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_googleworkspaceadminreports_resource.go b/internal/provider/source_googleworkspaceadminreports_resource.go
old mode 100755
new mode 100644
index fd51059db..5bef86adc
--- a/internal/provider/source_googleworkspaceadminreports_resource.go
+++ b/internal/provider/source_googleworkspaceadminreports_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceGoogleWorkspaceAdminReportsResource struct {
// SourceGoogleWorkspaceAdminReportsResourceModel describes the resource data model.
type SourceGoogleWorkspaceAdminReportsResourceModel struct {
Configuration SourceGoogleWorkspaceAdminReports `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -66,24 +66,26 @@ func (r *SourceGoogleWorkspaceAdminReportsResource) Schema(ctx context.Context,
Optional: true,
Description: `Sets the range of time shown in the report. Reports API allows from up to 180 days ago. `,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "google-workspace-admin-reports",
- ),
- },
- Description: `must be one of ["google-workspace-admin-reports"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +149,7 @@ func (r *SourceGoogleWorkspaceAdminReportsResource) Create(ctx context.Context,
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGoogleWorkspaceAdminReports(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +325,5 @@ func (r *SourceGoogleWorkspaceAdminReportsResource) Delete(ctx context.Context,
}
func (r *SourceGoogleWorkspaceAdminReportsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_googleworkspaceadminreports_resource_sdk.go b/internal/provider/source_googleworkspaceadminreports_resource_sdk.go
old mode 100755
new mode 100644
index 2960fa054..e9945ca0f
--- a/internal/provider/source_googleworkspaceadminreports_resource_sdk.go
+++ b/internal/provider/source_googleworkspaceadminreports_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -16,12 +16,16 @@ func (r *SourceGoogleWorkspaceAdminReportsResourceModel) ToCreateSDKType() *shar
} else {
lookback = nil
}
- sourceType := shared.SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports(r.Configuration.SourceType.ValueString())
configuration := shared.SourceGoogleWorkspaceAdminReports{
CredentialsJSON: credentialsJSON,
Email: email,
Lookback: lookback,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -33,6 +37,7 @@ func (r *SourceGoogleWorkspaceAdminReportsResourceModel) ToCreateSDKType() *shar
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGoogleWorkspaceAdminReportsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_greenhouse_data_source.go b/internal/provider/source_greenhouse_data_source.go
old mode 100755
new mode 100644
index f1a1cd16d..c02596cf9
--- a/internal/provider/source_greenhouse_data_source.go
+++ b/internal/provider/source_greenhouse_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceGreenhouseDataSource struct {
// SourceGreenhouseDataSourceModel describes the data model.
type SourceGreenhouseDataSourceModel struct {
- Configuration SourceGreenhouse `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceGreenhouseDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceGreenhouse DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Greenhouse API Key. See the docs for more information on how to generate this key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "greenhouse",
- ),
- },
- Description: `must be one of ["greenhouse"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_greenhouse_data_source_sdk.go b/internal/provider/source_greenhouse_data_source_sdk.go
old mode 100755
new mode 100644
index 788d9d09c..79e6a2fe2
--- a/internal/provider/source_greenhouse_data_source_sdk.go
+++ b/internal/provider/source_greenhouse_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGreenhouseDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_greenhouse_resource.go b/internal/provider/source_greenhouse_resource.go
old mode 100755
new mode 100644
index 95238ef3e..3cf6d83d6
--- a/internal/provider/source_greenhouse_resource.go
+++ b/internal/provider/source_greenhouse_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceGreenhouseResource struct {
// SourceGreenhouseResourceModel describes the resource data model.
type SourceGreenhouseResourceModel struct {
- Configuration SourceGreenhouse `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceGreenhouseResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceGreenhouseResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Greenhouse API Key. See the docs for more information on how to generate this key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "greenhouse",
- ),
- },
- Description: `must be one of ["greenhouse"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceGreenhouseResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGreenhouse(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceGreenhouseResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceGreenhouseResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_greenhouse_resource_sdk.go b/internal/provider/source_greenhouse_resource_sdk.go
old mode 100755
new mode 100644
index fe04cffc4..4d131fce5
--- a/internal/provider/source_greenhouse_resource_sdk.go
+++ b/internal/provider/source_greenhouse_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGreenhouseResourceModel) ToCreateSDKType() *shared.SourceGreenhouseCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceGreenhouseGreenhouse(r.Configuration.SourceType.ValueString())
configuration := shared.SourceGreenhouse{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceGreenhouseResourceModel) ToCreateSDKType() *shared.SourceGreenhou
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGreenhouseCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_gridly_data_source.go b/internal/provider/source_gridly_data_source.go
old mode 100755
new mode 100644
index 2f42ffcb7..a8332aecb
--- a/internal/provider/source_gridly_data_source.go
+++ b/internal/provider/source_gridly_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceGridlyDataSource struct {
// SourceGridlyDataSourceModel describes the data model.
type SourceGridlyDataSourceModel struct {
- Configuration SourceGridly `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,37 +47,20 @@ func (r *SourceGridlyDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceGridly DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- },
- "grid_id": schema.StringAttribute{
- Computed: true,
- Description: `ID of a grid, or can be ID of a branch`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gridly",
- ),
- },
- Description: `must be one of ["gridly"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_gridly_data_source_sdk.go b/internal/provider/source_gridly_data_source_sdk.go
old mode 100755
new mode 100644
index 3c7202dd7..2d41a8370
--- a/internal/provider/source_gridly_data_source_sdk.go
+++ b/internal/provider/source_gridly_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGridlyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_gridly_resource.go b/internal/provider/source_gridly_resource.go
old mode 100755
new mode 100644
index c86069539..b9ef51877
--- a/internal/provider/source_gridly_resource.go
+++ b/internal/provider/source_gridly_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceGridlyResource struct {
// SourceGridlyResourceModel describes the resource data model.
type SourceGridlyResourceModel struct {
Configuration SourceGridly `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -55,30 +55,33 @@ func (r *SourceGridlyResource) Schema(ctx context.Context, req resource.SchemaRe
Required: true,
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Sensitive: true,
},
"grid_id": schema.StringAttribute{
Required: true,
Description: `ID of a grid, or can be ID of a branch`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "gridly",
- ),
- },
- Description: `must be one of ["gridly"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -142,7 +145,7 @@ func (r *SourceGridlyResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceGridly(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -318,5 +321,5 @@ func (r *SourceGridlyResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceGridlyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_gridly_resource_sdk.go b/internal/provider/source_gridly_resource_sdk.go
old mode 100755
new mode 100644
index 505bf4e6f..a70bf3980
--- a/internal/provider/source_gridly_resource_sdk.go
+++ b/internal/provider/source_gridly_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceGridlyResourceModel) ToCreateSDKType() *shared.SourceGridlyCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
gridID := r.Configuration.GridID.ValueString()
- sourceType := shared.SourceGridlyGridly(r.Configuration.SourceType.ValueString())
configuration := shared.SourceGridly{
- APIKey: apiKey,
- GridID: gridID,
- SourceType: sourceType,
+ APIKey: apiKey,
+ GridID: gridID,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceGridlyResourceModel) ToCreateSDKType() *shared.SourceGridlyCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceGridlyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_harvest_data_source.go b/internal/provider/source_harvest_data_source.go
old mode 100755
new mode 100644
index 7bfeb0f59..35c03143d
--- a/internal/provider/source_harvest_data_source.go
+++ b/internal/provider/source_harvest_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceHarvestDataSource struct {
// SourceHarvestDataSourceModel describes the data model.
type SourceHarvestDataSourceModel struct {
- Configuration SourceHarvest1 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,177 +47,20 @@ func (r *SourceHarvestDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceHarvest DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "account_id": schema.StringAttribute{
- Computed: true,
- Description: `Harvest account ID. Required for all Harvest requests in pair with Personal Access Token`,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Harvest developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Harvest developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to renew the expired Access Token.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Harvest.`,
- },
- "source_harvest_authentication_mechanism_authenticate_with_personal_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Log into Harvest and then create new personal access token.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Harvest.`,
- },
- "source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Harvest developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Harvest developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to renew the expired Access Token.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Harvest.`,
- },
- "source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Log into Harvest and then create new personal access token.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Harvest.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate to Harvest.`,
- },
- "replication_end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.`,
- },
- "replication_start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "harvest",
- ),
- },
- Description: `must be one of ["harvest"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_harvest_data_source_sdk.go b/internal/provider/source_harvest_data_source_sdk.go
old mode 100755
new mode 100644
index 9c7d0171c..ffb68a22d
--- a/internal/provider/source_harvest_data_source_sdk.go
+++ b/internal/provider/source_harvest_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceHarvestDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_harvest_resource.go b/internal/provider/source_harvest_resource.go
old mode 100755
new mode 100644
index 0c30849d9..2351f02f5
--- a/internal/provider/source_harvest_resource.go
+++ b/internal/provider/source_harvest_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceHarvestResource struct {
// SourceHarvestResourceModel describes the resource data model.
type SourceHarvestResourceModel struct {
Configuration SourceHarvest `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -62,17 +63,15 @@ func (r *SourceHarvestResource) Schema(ctx context.Context, req resource.SchemaR
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth": schema.SingleNestedAttribute{
+ "authenticate_via_harvest_o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
+ "additional_properties": schema.StringAttribute{
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
+ validators.IsValidJSON(),
},
- Description: `must be one of ["Client"]`,
},
"client_id": schema.StringAttribute{
Required: true,
@@ -84,142 +83,70 @@ func (r *SourceHarvestResource) Schema(ctx context.Context, req resource.SchemaR
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Refresh Token to renew the expired Access Token.`,
},
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Choose how to authenticate to Harvest.`,
},
- "source_harvest_authentication_mechanism_authenticate_with_personal_access_token": schema.SingleNestedAttribute{
+ "authenticate_with_personal_access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Required: true,
- Description: `Log into Harvest and then create new personal access token.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
+ Optional: true,
Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Harvest.`,
- },
- "source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your Harvest developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your Harvest developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `Refresh Token to renew the expired Access Token.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
- },
- Description: `Choose how to authenticate to Harvest.`,
- },
- "source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Log into Harvest and then create new personal access token.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Choose how to authenticate to Harvest.`,
},
},
+ Description: `Choose how to authenticate to Harvest.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate to Harvest.`,
},
"replication_end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.`,
},
"replication_start_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
+ Required: true,
Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
- stringvalidator.OneOf(
- "harvest",
- ),
+ validators.IsRFC3339(),
},
- Description: `must be one of ["harvest"]`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -283,7 +210,7 @@ func (r *SourceHarvestResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceHarvest(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -459,5 +386,5 @@ func (r *SourceHarvestResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceHarvestResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_harvest_resource_sdk.go b/internal/provider/source_harvest_resource_sdk.go
old mode 100755
new mode 100644
index 1aac94b56..daa61989f
--- a/internal/provider/source_harvest_resource_sdk.go
+++ b/internal/provider/source_harvest_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -13,56 +13,42 @@ func (r *SourceHarvestResourceModel) ToCreateSDKType() *shared.SourceHarvestCrea
accountID := r.Configuration.AccountID.ValueString()
var credentials *shared.SourceHarvestAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth *shared.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth
- if r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth != nil {
- authType := new(shared.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType)
- if !r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth.AuthType.IsNull() {
- *authType = shared.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType(r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth.RefreshToken.ValueString()
+ var sourceHarvestAuthenticateViaHarvestOAuth *shared.SourceHarvestAuthenticateViaHarvestOAuth
+ if r.Configuration.Credentials.AuthenticateViaHarvestOAuth != nil {
var additionalProperties interface{}
- if !r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth.AdditionalProperties.ValueString()), &additionalProperties)
+ if !r.Configuration.Credentials.AuthenticateViaHarvestOAuth.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaHarvestOAuth.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.AuthenticateViaHarvestOAuth.AdditionalProperties.ValueString()), &additionalProperties)
}
- sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth = &shared.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth{
- AuthType: authType,
+ clientID := r.Configuration.Credentials.AuthenticateViaHarvestOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaHarvestOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaHarvestOAuth.RefreshToken.ValueString()
+ sourceHarvestAuthenticateViaHarvestOAuth = &shared.SourceHarvestAuthenticateViaHarvestOAuth{
+ AdditionalProperties: additionalProperties,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
- AdditionalProperties: additionalProperties,
}
}
- if sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth != nil {
+ if sourceHarvestAuthenticateViaHarvestOAuth != nil {
credentials = &shared.SourceHarvestAuthenticationMechanism{
- SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth: sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth,
+ SourceHarvestAuthenticateViaHarvestOAuth: sourceHarvestAuthenticateViaHarvestOAuth,
}
}
- var sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken *shared.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken
- if r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
- apiToken := r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken.APIToken.ValueString()
- authType1 := new(shared.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType)
- if !r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken.AuthType.IsNull() {
- *authType1 = shared.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType(r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
+ var sourceHarvestAuthenticateWithPersonalAccessToken *shared.SourceHarvestAuthenticateWithPersonalAccessToken
+ if r.Configuration.Credentials.AuthenticateWithPersonalAccessToken != nil {
var additionalProperties1 interface{}
- if !r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken = &shared.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken{
- APIToken: apiToken,
- AuthType: authType1,
+ apiToken := r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.APIToken.ValueString()
+ sourceHarvestAuthenticateWithPersonalAccessToken = &shared.SourceHarvestAuthenticateWithPersonalAccessToken{
AdditionalProperties: additionalProperties1,
+ APIToken: apiToken,
}
}
- if sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
+ if sourceHarvestAuthenticateWithPersonalAccessToken != nil {
credentials = &shared.SourceHarvestAuthenticationMechanism{
- SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken: sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken,
+ SourceHarvestAuthenticateWithPersonalAccessToken: sourceHarvestAuthenticateWithPersonalAccessToken,
}
}
}
@@ -73,13 +59,17 @@ func (r *SourceHarvestResourceModel) ToCreateSDKType() *shared.SourceHarvestCrea
replicationEndDate = nil
}
replicationStartDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.ReplicationStartDate.ValueString())
- sourceType := shared.SourceHarvestHarvest(r.Configuration.SourceType.ValueString())
configuration := shared.SourceHarvest{
AccountID: accountID,
Credentials: credentials,
ReplicationEndDate: replicationEndDate,
ReplicationStartDate: replicationStartDate,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -91,6 +81,7 @@ func (r *SourceHarvestResourceModel) ToCreateSDKType() *shared.SourceHarvestCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceHarvestCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -107,56 +98,42 @@ func (r *SourceHarvestResourceModel) ToUpdateSDKType() *shared.SourceHarvestPutR
accountID := r.Configuration.AccountID.ValueString()
var credentials *shared.SourceHarvestUpdateAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth *shared.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth
- if r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth != nil {
- authType := new(shared.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType)
- if !r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth.AuthType.IsNull() {
- *authType = shared.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType(r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth.RefreshToken.ValueString()
+ var authenticateViaHarvestOAuth *shared.AuthenticateViaHarvestOAuth
+ if r.Configuration.Credentials.AuthenticateViaHarvestOAuth != nil {
var additionalProperties interface{}
- if !r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth.AdditionalProperties.ValueString()), &additionalProperties)
+ if !r.Configuration.Credentials.AuthenticateViaHarvestOAuth.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaHarvestOAuth.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.AuthenticateViaHarvestOAuth.AdditionalProperties.ValueString()), &additionalProperties)
}
- sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth = &shared.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth{
- AuthType: authType,
+ clientID := r.Configuration.Credentials.AuthenticateViaHarvestOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaHarvestOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaHarvestOAuth.RefreshToken.ValueString()
+ authenticateViaHarvestOAuth = &shared.AuthenticateViaHarvestOAuth{
+ AdditionalProperties: additionalProperties,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
- AdditionalProperties: additionalProperties,
}
}
- if sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth != nil {
+ if authenticateViaHarvestOAuth != nil {
credentials = &shared.SourceHarvestUpdateAuthenticationMechanism{
- SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth: sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth,
+ AuthenticateViaHarvestOAuth: authenticateViaHarvestOAuth,
}
}
- var sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken *shared.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
- if r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
- apiToken := r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.APIToken.ValueString()
- authType1 := new(shared.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType)
- if !r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.AuthType.IsNull() {
- *authType1 = shared.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType(r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
+ var sourceHarvestUpdateAuthenticateWithPersonalAccessToken *shared.SourceHarvestUpdateAuthenticateWithPersonalAccessToken
+ if r.Configuration.Credentials.AuthenticateWithPersonalAccessToken != nil {
var additionalProperties1 interface{}
- if !r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken = &shared.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken{
- APIToken: apiToken,
- AuthType: authType1,
+ apiToken := r.Configuration.Credentials.AuthenticateWithPersonalAccessToken.APIToken.ValueString()
+ sourceHarvestUpdateAuthenticateWithPersonalAccessToken = &shared.SourceHarvestUpdateAuthenticateWithPersonalAccessToken{
AdditionalProperties: additionalProperties1,
+ APIToken: apiToken,
}
}
- if sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
+ if sourceHarvestUpdateAuthenticateWithPersonalAccessToken != nil {
credentials = &shared.SourceHarvestUpdateAuthenticationMechanism{
- SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken: sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken,
+ SourceHarvestUpdateAuthenticateWithPersonalAccessToken: sourceHarvestUpdateAuthenticateWithPersonalAccessToken,
}
}
}
diff --git a/internal/provider/source_hubplanner_data_source.go b/internal/provider/source_hubplanner_data_source.go
old mode 100755
new mode 100644
index ce9c4267f..2accebd05
--- a/internal/provider/source_hubplanner_data_source.go
+++ b/internal/provider/source_hubplanner_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceHubplannerDataSource struct {
// SourceHubplannerDataSourceModel describes the data model.
type SourceHubplannerDataSourceModel struct {
- Configuration SourceHubplanner `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceHubplannerDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceHubplanner DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Hubplanner API key. See https://github.com/hubplanner/API#authentication for more details.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "hubplanner",
- ),
- },
- Description: `must be one of ["hubplanner"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_hubplanner_data_source_sdk.go b/internal/provider/source_hubplanner_data_source_sdk.go
old mode 100755
new mode 100644
index ff2e9f375..2d016c708
--- a/internal/provider/source_hubplanner_data_source_sdk.go
+++ b/internal/provider/source_hubplanner_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceHubplannerDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_hubplanner_resource.go b/internal/provider/source_hubplanner_resource.go
old mode 100755
new mode 100644
index b2ce65b2b..337dd7184
--- a/internal/provider/source_hubplanner_resource.go
+++ b/internal/provider/source_hubplanner_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceHubplannerResource struct {
// SourceHubplannerResourceModel describes the resource data model.
type SourceHubplannerResourceModel struct {
- Configuration SourceHubplanner `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceHubplannerResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceHubplannerResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Hubplanner API key. See https://github.com/hubplanner/API#authentication for more details.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "hubplanner",
- ),
- },
- Description: `must be one of ["hubplanner"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceHubplannerResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceHubplanner(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceHubplannerResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceHubplannerResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_hubplanner_resource_sdk.go b/internal/provider/source_hubplanner_resource_sdk.go
old mode 100755
new mode 100644
index 186617581..60b8d6062
--- a/internal/provider/source_hubplanner_resource_sdk.go
+++ b/internal/provider/source_hubplanner_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceHubplannerResourceModel) ToCreateSDKType() *shared.SourceHubplannerCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceHubplannerHubplanner(r.Configuration.SourceType.ValueString())
configuration := shared.SourceHubplanner{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceHubplannerResourceModel) ToCreateSDKType() *shared.SourceHubplann
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceHubplannerCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_hubspot_data_source.go b/internal/provider/source_hubspot_data_source.go
old mode 100755
new mode 100644
index 98d6207bb..e15afaa63
--- a/internal/provider/source_hubspot_data_source.go
+++ b/internal/provider/source_hubspot_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceHubspotDataSource struct {
// SourceHubspotDataSourceModel describes the data model.
type SourceHubspotDataSourceModel struct {
- Configuration SourceHubspot `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,142 +47,20 @@ func (r *SourceHubspotDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceHubspot DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_hubspot_authentication_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["OAuth Credentials"]` + "\n" +
- `Name of the credentials`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.`,
- },
- },
- Description: `Choose how to authenticate to HubSpot.`,
- },
- "source_hubspot_authentication_private_app": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `HubSpot Access token. See the Hubspot docs if you need help finding this token.`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Private App Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["Private App Credentials"]` + "\n" +
- `Name of the credentials set`,
- },
- },
- Description: `Choose how to authenticate to HubSpot.`,
- },
- "source_hubspot_update_authentication_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["OAuth Credentials"]` + "\n" +
- `Name of the credentials`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.`,
- },
- },
- Description: `Choose how to authenticate to HubSpot.`,
- },
- "source_hubspot_update_authentication_private_app": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `HubSpot Access token. See the Hubspot docs if you need help finding this token.`,
- },
- "credentials_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Private App Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["Private App Credentials"]` + "\n" +
- `Name of the credentials set`,
- },
- },
- Description: `Choose how to authenticate to HubSpot.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate to HubSpot.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "hubspot",
- ),
- },
- Description: `must be one of ["hubspot"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_hubspot_data_source_sdk.go b/internal/provider/source_hubspot_data_source_sdk.go
old mode 100755
new mode 100644
index bf609792f..7980b7178
--- a/internal/provider/source_hubspot_data_source_sdk.go
+++ b/internal/provider/source_hubspot_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceHubspotDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_hubspot_resource.go b/internal/provider/source_hubspot_resource.go
old mode 100755
new mode 100644
index 0d7ca0151..ef3d9183e
--- a/internal/provider/source_hubspot_resource.go
+++ b/internal/provider/source_hubspot_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceHubspotResource struct {
// SourceHubspotResourceModel describes the resource data model.
type SourceHubspotResourceModel struct {
Configuration SourceHubspot `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,7 +59,7 @@ func (r *SourceHubspotResource) Schema(ctx context.Context, req resource.SchemaR
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_hubspot_authentication_o_auth": schema.SingleNestedAttribute{
+ "o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"client_id": schema.StringAttribute{
@@ -69,122 +70,58 @@ func (r *SourceHubspotResource) Schema(ctx context.Context, req resource.SchemaR
Required: true,
Description: `The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.`,
},
- "credentials_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["OAuth Credentials"]` + "\n" +
- `Name of the credentials`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.`,
- },
- },
- Description: `Choose how to authenticate to HubSpot.`,
- },
- "source_hubspot_authentication_private_app": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `HubSpot Access token. See the Hubspot docs if you need help finding this token.`,
- },
- "credentials_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Private App Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["Private App Credentials"]` + "\n" +
- `Name of the credentials set`,
- },
- },
- Description: `Choose how to authenticate to HubSpot.`,
- },
- "source_hubspot_update_authentication_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.`,
- },
- "credentials_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["OAuth Credentials"]` + "\n" +
- `Name of the credentials`,
- },
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.`,
},
},
Description: `Choose how to authenticate to HubSpot.`,
},
- "source_hubspot_update_authentication_private_app": schema.SingleNestedAttribute{
+ "private_app": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `HubSpot Access token. See the Hubspot docs if you need help finding this token.`,
},
- "credentials_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Private App Credentials",
- ),
- },
- MarkdownDescription: `must be one of ["Private App Credentials"]` + "\n" +
- `Name of the credentials set`,
- },
},
Description: `Choose how to authenticate to HubSpot.`,
},
},
+ Description: `Choose how to authenticate to HubSpot.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate to HubSpot.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "hubspot",
- ),
- },
- Description: `must be one of ["hubspot"]`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -248,7 +185,7 @@ func (r *SourceHubspotResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceHubspot(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -424,5 +361,5 @@ func (r *SourceHubspotResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceHubspotResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_hubspot_resource_sdk.go b/internal/provider/source_hubspot_resource_sdk.go
old mode 100755
new mode 100644
index c8c4ec64f..fe5a5cee5
--- a/internal/provider/source_hubspot_resource_sdk.go
+++ b/internal/provider/source_hubspot_resource_sdk.go
@@ -3,52 +3,52 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceHubspotResourceModel) ToCreateSDKType() *shared.SourceHubspotCreateRequest {
var credentials shared.SourceHubspotAuthentication
- var sourceHubspotAuthenticationOAuth *shared.SourceHubspotAuthenticationOAuth
- if r.Configuration.Credentials.SourceHubspotAuthenticationOAuth != nil {
- clientID := r.Configuration.Credentials.SourceHubspotAuthenticationOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceHubspotAuthenticationOAuth.ClientSecret.ValueString()
- credentialsTitle := shared.SourceHubspotAuthenticationOAuthAuthType(r.Configuration.Credentials.SourceHubspotAuthenticationOAuth.CredentialsTitle.ValueString())
- refreshToken := r.Configuration.Credentials.SourceHubspotAuthenticationOAuth.RefreshToken.ValueString()
- sourceHubspotAuthenticationOAuth = &shared.SourceHubspotAuthenticationOAuth{
- ClientID: clientID,
- ClientSecret: clientSecret,
- CredentialsTitle: credentialsTitle,
- RefreshToken: refreshToken,
+ var sourceHubspotOAuth *shared.SourceHubspotOAuth
+ if r.Configuration.Credentials.OAuth != nil {
+ clientID := r.Configuration.Credentials.OAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth.RefreshToken.ValueString()
+ sourceHubspotOAuth = &shared.SourceHubspotOAuth{
+ ClientID: clientID,
+ ClientSecret: clientSecret,
+ RefreshToken: refreshToken,
}
}
- if sourceHubspotAuthenticationOAuth != nil {
+ if sourceHubspotOAuth != nil {
credentials = shared.SourceHubspotAuthentication{
- SourceHubspotAuthenticationOAuth: sourceHubspotAuthenticationOAuth,
+ SourceHubspotOAuth: sourceHubspotOAuth,
}
}
- var sourceHubspotAuthenticationPrivateApp *shared.SourceHubspotAuthenticationPrivateApp
- if r.Configuration.Credentials.SourceHubspotAuthenticationPrivateApp != nil {
- accessToken := r.Configuration.Credentials.SourceHubspotAuthenticationPrivateApp.AccessToken.ValueString()
- credentialsTitle1 := shared.SourceHubspotAuthenticationPrivateAppAuthType(r.Configuration.Credentials.SourceHubspotAuthenticationPrivateApp.CredentialsTitle.ValueString())
- sourceHubspotAuthenticationPrivateApp = &shared.SourceHubspotAuthenticationPrivateApp{
- AccessToken: accessToken,
- CredentialsTitle: credentialsTitle1,
+ var sourceHubspotPrivateApp *shared.SourceHubspotPrivateApp
+ if r.Configuration.Credentials.PrivateApp != nil {
+ accessToken := r.Configuration.Credentials.PrivateApp.AccessToken.ValueString()
+ sourceHubspotPrivateApp = &shared.SourceHubspotPrivateApp{
+ AccessToken: accessToken,
}
}
- if sourceHubspotAuthenticationPrivateApp != nil {
+ if sourceHubspotPrivateApp != nil {
credentials = shared.SourceHubspotAuthentication{
- SourceHubspotAuthenticationPrivateApp: sourceHubspotAuthenticationPrivateApp,
+ SourceHubspotPrivateApp: sourceHubspotPrivateApp,
}
}
- sourceType := shared.SourceHubspotHubspot(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceHubspot{
Credentials: credentials,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -59,6 +59,7 @@ func (r *SourceHubspotResourceModel) ToCreateSDKType() *shared.SourceHubspotCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceHubspotCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -73,36 +74,32 @@ func (r *SourceHubspotResourceModel) ToGetSDKType() *shared.SourceHubspotCreateR
func (r *SourceHubspotResourceModel) ToUpdateSDKType() *shared.SourceHubspotPutRequest {
var credentials shared.SourceHubspotUpdateAuthentication
- var sourceHubspotUpdateAuthenticationOAuth *shared.SourceHubspotUpdateAuthenticationOAuth
- if r.Configuration.Credentials.SourceHubspotUpdateAuthenticationOAuth != nil {
- clientID := r.Configuration.Credentials.SourceHubspotUpdateAuthenticationOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceHubspotUpdateAuthenticationOAuth.ClientSecret.ValueString()
- credentialsTitle := shared.SourceHubspotUpdateAuthenticationOAuthAuthType(r.Configuration.Credentials.SourceHubspotUpdateAuthenticationOAuth.CredentialsTitle.ValueString())
- refreshToken := r.Configuration.Credentials.SourceHubspotUpdateAuthenticationOAuth.RefreshToken.ValueString()
- sourceHubspotUpdateAuthenticationOAuth = &shared.SourceHubspotUpdateAuthenticationOAuth{
- ClientID: clientID,
- ClientSecret: clientSecret,
- CredentialsTitle: credentialsTitle,
- RefreshToken: refreshToken,
+ var sourceHubspotUpdateOAuth *shared.SourceHubspotUpdateOAuth
+ if r.Configuration.Credentials.OAuth != nil {
+ clientID := r.Configuration.Credentials.OAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth.RefreshToken.ValueString()
+ sourceHubspotUpdateOAuth = &shared.SourceHubspotUpdateOAuth{
+ ClientID: clientID,
+ ClientSecret: clientSecret,
+ RefreshToken: refreshToken,
}
}
- if sourceHubspotUpdateAuthenticationOAuth != nil {
+ if sourceHubspotUpdateOAuth != nil {
credentials = shared.SourceHubspotUpdateAuthentication{
- SourceHubspotUpdateAuthenticationOAuth: sourceHubspotUpdateAuthenticationOAuth,
+ SourceHubspotUpdateOAuth: sourceHubspotUpdateOAuth,
}
}
- var sourceHubspotUpdateAuthenticationPrivateApp *shared.SourceHubspotUpdateAuthenticationPrivateApp
- if r.Configuration.Credentials.SourceHubspotUpdateAuthenticationPrivateApp != nil {
- accessToken := r.Configuration.Credentials.SourceHubspotUpdateAuthenticationPrivateApp.AccessToken.ValueString()
- credentialsTitle1 := shared.SourceHubspotUpdateAuthenticationPrivateAppAuthType(r.Configuration.Credentials.SourceHubspotUpdateAuthenticationPrivateApp.CredentialsTitle.ValueString())
- sourceHubspotUpdateAuthenticationPrivateApp = &shared.SourceHubspotUpdateAuthenticationPrivateApp{
- AccessToken: accessToken,
- CredentialsTitle: credentialsTitle1,
+ var privateApp *shared.PrivateApp
+ if r.Configuration.Credentials.PrivateApp != nil {
+ accessToken := r.Configuration.Credentials.PrivateApp.AccessToken.ValueString()
+ privateApp = &shared.PrivateApp{
+ AccessToken: accessToken,
}
}
- if sourceHubspotUpdateAuthenticationPrivateApp != nil {
+ if privateApp != nil {
credentials = shared.SourceHubspotUpdateAuthentication{
- SourceHubspotUpdateAuthenticationPrivateApp: sourceHubspotUpdateAuthenticationPrivateApp,
+ PrivateApp: privateApp,
}
}
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
diff --git a/internal/provider/source_insightly_data_source.go b/internal/provider/source_insightly_data_source.go
old mode 100755
new mode 100644
index 7649c42e6..e4f006f48
--- a/internal/provider/source_insightly_data_source.go
+++ b/internal/provider/source_insightly_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceInsightlyDataSource struct {
// SourceInsightlyDataSourceModel describes the data model.
type SourceInsightlyDataSourceModel struct {
- Configuration SourceInsightly `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceInsightlyDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceInsightly DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "insightly",
- ),
- },
- Description: `must be one of ["insightly"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `The date from which you'd like to replicate data for Insightly in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. Note that it will be used only for incremental streams.`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `Your Insightly API token.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_insightly_data_source_sdk.go b/internal/provider/source_insightly_data_source_sdk.go
old mode 100755
new mode 100644
index 116a1b360..e861762d7
--- a/internal/provider/source_insightly_data_source_sdk.go
+++ b/internal/provider/source_insightly_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceInsightlyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_insightly_resource.go b/internal/provider/source_insightly_resource.go
old mode 100755
new mode 100644
index 2cefb82eb..4509265ab
--- a/internal/provider/source_insightly_resource.go
+++ b/internal/provider/source_insightly_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceInsightlyResource struct {
// SourceInsightlyResourceModel describes the resource data model.
type SourceInsightlyResourceModel struct {
Configuration SourceInsightly `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -54,32 +54,35 @@ func (r *SourceInsightlyResource) Schema(ctx context.Context, req resource.Schem
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "insightly",
- ),
- },
- Description: `must be one of ["insightly"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `The date from which you'd like to replicate data for Insightly in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. Note that it will be used only for incremental streams.`,
},
"token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your Insightly API token.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceInsightlyResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceInsightly(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceInsightlyResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceInsightlyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_insightly_resource_sdk.go b/internal/provider/source_insightly_resource_sdk.go
old mode 100755
new mode 100644
index 052dbae7a..734d8e843
--- a/internal/provider/source_insightly_resource_sdk.go
+++ b/internal/provider/source_insightly_resource_sdk.go
@@ -3,12 +3,11 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceInsightlyResourceModel) ToCreateSDKType() *shared.SourceInsightlyCreateRequest {
- sourceType := shared.SourceInsightlyInsightly(r.Configuration.SourceType.ValueString())
startDate := new(string)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate = r.Configuration.StartDate.ValueString()
@@ -22,9 +21,14 @@ func (r *SourceInsightlyResourceModel) ToCreateSDKType() *shared.SourceInsightly
token = nil
}
configuration := shared.SourceInsightly{
- SourceType: sourceType,
- StartDate: startDate,
- Token: token,
+ StartDate: startDate,
+ Token: token,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -36,6 +40,7 @@ func (r *SourceInsightlyResourceModel) ToCreateSDKType() *shared.SourceInsightly
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceInsightlyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_instagram_data_source.go b/internal/provider/source_instagram_data_source.go
old mode 100755
new mode 100644
index fbcc8cac4..10df941b8
--- a/internal/provider/source_instagram_data_source.go
+++ b/internal/provider/source_instagram_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceInstagramDataSource struct {
// SourceInstagramDataSourceModel describes the data model.
type SourceInstagramDataSourceModel struct {
- Configuration SourceInstagram `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,49 +47,20 @@ func (r *SourceInstagramDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceInstagram DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID for your Oauth application`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret for your Oauth application`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "instagram",
- ),
- },
- Description: `must be one of ["instagram"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_instagram_data_source_sdk.go b/internal/provider/source_instagram_data_source_sdk.go
old mode 100755
new mode 100644
index e088321aa..5e1d9f1ff
--- a/internal/provider/source_instagram_data_source_sdk.go
+++ b/internal/provider/source_instagram_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceInstagramDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_instagram_resource.go b/internal/provider/source_instagram_resource.go
old mode 100755
new mode 100644
index 794ea1ed9..4b9e95153
--- a/internal/provider/source_instagram_resource.go
+++ b/internal/provider/source_instagram_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceInstagramResource struct {
// SourceInstagramResourceModel describes the resource data model.
type SourceInstagramResourceModel struct {
Configuration SourceInstagram `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceInstagramResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information`,
},
"client_id": schema.StringAttribute{
@@ -67,31 +69,33 @@ func (r *SourceInstagramResource) Schema(ctx context.Context, req resource.Schem
Optional: true,
Description: `The Client Secret for your Oauth application`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "instagram",
- ),
- },
- Description: `must be one of ["instagram"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If left blank, the start date will be set to 2 years before the present date.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +159,7 @@ func (r *SourceInstagramResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceInstagram(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +335,5 @@ func (r *SourceInstagramResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceInstagramResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_instagram_resource_sdk.go b/internal/provider/source_instagram_resource_sdk.go
old mode 100755
new mode 100644
index 0ad22e08c..b29ae0b3c
--- a/internal/provider/source_instagram_resource_sdk.go
+++ b/internal/provider/source_instagram_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -22,15 +22,24 @@ func (r *SourceInstagramResourceModel) ToCreateSDKType() *shared.SourceInstagram
} else {
clientSecret = nil
}
- sourceType := shared.SourceInstagramInstagram(r.Configuration.SourceType.ValueString())
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceInstagram{
AccessToken: accessToken,
ClientID: clientID,
ClientSecret: clientSecret,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -41,6 +50,7 @@ func (r *SourceInstagramResourceModel) ToCreateSDKType() *shared.SourceInstagram
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceInstagramCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -67,7 +77,12 @@ func (r *SourceInstagramResourceModel) ToUpdateSDKType() *shared.SourceInstagram
} else {
clientSecret = nil
}
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceInstagramUpdate{
AccessToken: accessToken,
ClientID: clientID,
diff --git a/internal/provider/source_instatus_data_source.go b/internal/provider/source_instatus_data_source.go
old mode 100755
new mode 100644
index 3ee64abca..3ecfe0823
--- a/internal/provider/source_instatus_data_source.go
+++ b/internal/provider/source_instatus_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceInstatusDataSource struct {
// SourceInstatusDataSourceModel describes the data model.
type SourceInstatusDataSourceModel struct {
- Configuration SourceInstatus `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceInstatusDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceInstatus DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Instatus REST API key`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "instatus",
- ),
- },
- Description: `must be one of ["instatus"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_instatus_data_source_sdk.go b/internal/provider/source_instatus_data_source_sdk.go
old mode 100755
new mode 100644
index a7a5b2d6a..02851e9a3
--- a/internal/provider/source_instatus_data_source_sdk.go
+++ b/internal/provider/source_instatus_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceInstatusDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_instatus_resource.go b/internal/provider/source_instatus_resource.go
old mode 100755
new mode 100644
index 22a829fd4..a03800f40
--- a/internal/provider/source_instatus_resource.go
+++ b/internal/provider/source_instatus_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceInstatusResource struct {
// SourceInstatusResourceModel describes the resource data model.
type SourceInstatusResourceModel struct {
- Configuration SourceInstatus `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceInstatusResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceInstatusResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Instatus REST API key`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "instatus",
- ),
- },
- Description: `must be one of ["instatus"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceInstatusResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceInstatus(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceInstatusResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceInstatusResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_instatus_resource_sdk.go b/internal/provider/source_instatus_resource_sdk.go
old mode 100755
new mode 100644
index c92a836db..c61934638
--- a/internal/provider/source_instatus_resource_sdk.go
+++ b/internal/provider/source_instatus_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceInstatusResourceModel) ToCreateSDKType() *shared.SourceInstatusCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceInstatusInstatus(r.Configuration.SourceType.ValueString())
configuration := shared.SourceInstatus{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceInstatusResourceModel) ToCreateSDKType() *shared.SourceInstatusCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceInstatusCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_intercom_data_source.go b/internal/provider/source_intercom_data_source.go
old mode 100755
new mode 100644
index 83a103ec4..360b3fc8f
--- a/internal/provider/source_intercom_data_source.go
+++ b/internal/provider/source_intercom_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceIntercomDataSource struct {
// SourceIntercomDataSourceModel describes the data model.
type SourceIntercomDataSourceModel struct {
- Configuration SourceIntercom `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,49 +47,20 @@ func (r *SourceIntercomDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceIntercom DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access token for making authenticated requests. See the Intercom docs for more information.`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Client Id for your Intercom application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Client Secret for your Intercom application.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "intercom",
- ),
- },
- Description: `must be one of ["intercom"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_intercom_data_source_sdk.go b/internal/provider/source_intercom_data_source_sdk.go
old mode 100755
new mode 100644
index 0c5b8c941..5f0d98398
--- a/internal/provider/source_intercom_data_source_sdk.go
+++ b/internal/provider/source_intercom_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceIntercomDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_intercom_resource.go b/internal/provider/source_intercom_resource.go
old mode 100755
new mode 100644
index 41a8570b7..3052d8753
--- a/internal/provider/source_intercom_resource.go
+++ b/internal/provider/source_intercom_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceIntercomResource struct {
// SourceIntercomResourceModel describes the resource data model.
type SourceIntercomResourceModel struct {
Configuration SourceIntercom `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceIntercomResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access token for making authenticated requests. See the Intercom docs for more information.`,
},
"client_id": schema.StringAttribute{
@@ -67,31 +69,33 @@ func (r *SourceIntercomResource) Schema(ctx context.Context, req resource.Schema
Optional: true,
Description: `Client Secret for your Intercom application.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "intercom",
- ),
- },
- Description: `must be one of ["intercom"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +159,7 @@ func (r *SourceIntercomResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceIntercom(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +335,5 @@ func (r *SourceIntercomResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceIntercomResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_intercom_resource_sdk.go b/internal/provider/source_intercom_resource_sdk.go
old mode 100755
new mode 100644
index 294ac16be..5c6e6155f
--- a/internal/provider/source_intercom_resource_sdk.go
+++ b/internal/provider/source_intercom_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -22,15 +22,19 @@ func (r *SourceIntercomResourceModel) ToCreateSDKType() *shared.SourceIntercomCr
} else {
clientSecret = nil
}
- sourceType := shared.SourceIntercomIntercom(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceIntercom{
AccessToken: accessToken,
ClientID: clientID,
ClientSecret: clientSecret,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -41,6 +45,7 @@ func (r *SourceIntercomResourceModel) ToCreateSDKType() *shared.SourceIntercomCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceIntercomCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_ip2whois_data_source.go b/internal/provider/source_ip2whois_data_source.go
old mode 100755
new mode 100644
index cbbc7dd5e..8c11bf9a6
--- a/internal/provider/source_ip2whois_data_source.go
+++ b/internal/provider/source_ip2whois_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceIp2whoisDataSource struct {
// SourceIp2whoisDataSourceModel describes the data model.
type SourceIp2whoisDataSourceModel struct {
- Configuration SourceIp2whois `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceIp2whoisDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceIp2whois DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Your API Key. See here.`,
- },
- "domain": schema.StringAttribute{
- Computed: true,
- Description: `Domain name. See here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ip2whois",
- ),
- },
- Description: `must be one of ["ip2whois"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_ip2whois_data_source_sdk.go b/internal/provider/source_ip2whois_data_source_sdk.go
old mode 100755
new mode 100644
index cc1dd2dbf..cadf987d2
--- a/internal/provider/source_ip2whois_data_source_sdk.go
+++ b/internal/provider/source_ip2whois_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceIp2whoisDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_ip2whois_resource.go b/internal/provider/source_ip2whois_resource.go
old mode 100755
new mode 100644
index 5f59df069..5b84ffdc3
--- a/internal/provider/source_ip2whois_resource.go
+++ b/internal/provider/source_ip2whois_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceIp2whoisResource struct {
// SourceIp2whoisResourceModel describes the resource data model.
type SourceIp2whoisResourceModel struct {
Configuration SourceIp2whois `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,33 @@ func (r *SourceIp2whoisResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Your API Key. See here.`,
},
"domain": schema.StringAttribute{
Optional: true,
Description: `Domain name. See here.`,
},
- "source_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ip2whois",
- ),
- },
- Description: `must be one of ["ip2whois"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceIp2whoisResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceIp2whois(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceIp2whoisResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceIp2whoisResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_ip2whois_resource_sdk.go b/internal/provider/source_ip2whois_resource_sdk.go
old mode 100755
new mode 100644
index a72344578..b9858f0e6
--- a/internal/provider/source_ip2whois_resource_sdk.go
+++ b/internal/provider/source_ip2whois_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -20,16 +20,15 @@ func (r *SourceIp2whoisResourceModel) ToCreateSDKType() *shared.SourceIp2whoisCr
} else {
domain = nil
}
- sourceType := new(shared.SourceIp2whoisIp2whois)
- if !r.Configuration.SourceType.IsUnknown() && !r.Configuration.SourceType.IsNull() {
- *sourceType = shared.SourceIp2whoisIp2whois(r.Configuration.SourceType.ValueString())
- } else {
- sourceType = nil
- }
configuration := shared.SourceIp2whois{
- APIKey: apiKey,
- Domain: domain,
- SourceType: sourceType,
+ APIKey: apiKey,
+ Domain: domain,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -41,6 +40,7 @@ func (r *SourceIp2whoisResourceModel) ToCreateSDKType() *shared.SourceIp2whoisCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceIp2whoisCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_iterable_data_source.go b/internal/provider/source_iterable_data_source.go
old mode 100755
new mode 100644
index e8bd496a6..589598551
--- a/internal/provider/source_iterable_data_source.go
+++ b/internal/provider/source_iterable_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceIterableDataSource struct {
// SourceIterableDataSourceModel describes the data model.
type SourceIterableDataSourceModel struct {
- Configuration SourceIterable `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,41 +47,20 @@ func (r *SourceIterableDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceIterable DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Iterable API Key. See the docs for more information on how to obtain this key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "iterable",
- ),
- },
- Description: `must be one of ["iterable"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_iterable_data_source_sdk.go b/internal/provider/source_iterable_data_source_sdk.go
old mode 100755
new mode 100644
index 4f38fd81c..3ba9b55bc
--- a/internal/provider/source_iterable_data_source_sdk.go
+++ b/internal/provider/source_iterable_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceIterableDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_iterable_resource.go b/internal/provider/source_iterable_resource.go
old mode 100755
new mode 100644
index 3b6362af0..0aae93cf1
--- a/internal/provider/source_iterable_resource.go
+++ b/internal/provider/source_iterable_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,12 +35,13 @@ type SourceIterableResource struct {
// SourceIterableResourceModel describes the resource data model.
type SourceIterableResourceModel struct {
- Configuration SourceIterable `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration SourceChartmogul `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceIterableResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -57,33 +58,36 @@ func (r *SourceIterableResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Iterable API Key. See the docs for more information on how to obtain this key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "iterable",
- ),
- },
- Description: `must be one of ["iterable"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +151,7 @@ func (r *SourceIterableResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceIterable(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +327,5 @@ func (r *SourceIterableResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceIterableResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_iterable_resource_sdk.go b/internal/provider/source_iterable_resource_sdk.go
old mode 100755
new mode 100644
index c9aa886db..64b3bb40e
--- a/internal/provider/source_iterable_resource_sdk.go
+++ b/internal/provider/source_iterable_resource_sdk.go
@@ -3,19 +3,23 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceIterableResourceModel) ToCreateSDKType() *shared.SourceIterableCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceIterableIterable(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceIterable{
- APIKey: apiKey,
- SourceType: sourceType,
- StartDate: startDate,
+ APIKey: apiKey,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -27,6 +31,7 @@ func (r *SourceIterableResourceModel) ToCreateSDKType() *shared.SourceIterableCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceIterableCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_jira_data_source.go b/internal/provider/source_jira_data_source.go
old mode 100755
new mode 100644
index aaacbfaeb..18dd2cd1c
--- a/internal/provider/source_jira_data_source.go
+++ b/internal/provider/source_jira_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceJiraDataSource struct {
// SourceJiraDataSourceModel describes the data model.
type SourceJiraDataSourceModel struct {
- Configuration SourceJira `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,66 +47,20 @@ func (r *SourceJiraDataSource) Schema(ctx context.Context, req datasource.Schema
MarkdownDescription: "SourceJira DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Jira API Token. See the docs for more information on how to generate this key. API Token is used for Authorization to your account by BasicAuth.`,
- },
- "domain": schema.StringAttribute{
- Computed: true,
- Description: `The Domain for your Jira account, e.g. airbyteio.atlassian.net, airbyteio.jira.com, jira.your-domain.com`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth.`,
- },
- "enable_experimental_streams": schema.BoolAttribute{
- Computed: true,
- Description: `Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.`,
- },
- "expand_issue_changelog": schema.BoolAttribute{
- Computed: true,
- Description: `Expand the changelog when replicating issues.`,
- },
- "projects": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects.`,
- },
- "render_fields": schema.BoolAttribute{
- Computed: true,
- Description: `Render issue fields in HTML format in addition to Jira JSON-like format.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jira",
- ),
- },
- Description: `must be one of ["jira"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_jira_data_source_sdk.go b/internal/provider/source_jira_data_source_sdk.go
old mode 100755
new mode 100644
index 6a8ffdf00..14a6cf5e3
--- a/internal/provider/source_jira_data_source_sdk.go
+++ b/internal/provider/source_jira_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceJiraDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_jira_resource.go b/internal/provider/source_jira_resource.go
old mode 100755
new mode 100644
index 68b7c47ca..5fe5c304d
--- a/internal/provider/source_jira_resource.go
+++ b/internal/provider/source_jira_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceJiraResource struct {
// SourceJiraResourceModel describes the resource data model.
type SourceJiraResourceModel struct {
Configuration SourceJira `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceJiraResource) Schema(ctx context.Context, req resource.SchemaRequ
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Jira API Token. See the docs for more information on how to generate this key. API Token is used for Authorization to your account by BasicAuth.`,
},
"domain": schema.StringAttribute{
@@ -68,12 +70,24 @@ func (r *SourceJiraResource) Schema(ctx context.Context, req resource.SchemaRequ
Description: `The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth.`,
},
"enable_experimental_streams": schema.BoolAttribute{
- Optional: true,
- Description: `Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.`,
},
"expand_issue_changelog": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `(DEPRECATED) Expand the changelog when replicating issues.`,
+ },
+ "expand_issue_transition": schema.BoolAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `(DEPRECATED) Expand the transitions when replicating issues.`,
+ },
+ "issues_stream_expand_with": schema.ListAttribute{
Optional: true,
- Description: `Expand the changelog when replicating issues.`,
+ ElementType: types.StringType,
+ Description: `Select fields to Expand the ` + "`" + `Issues` + "`" + ` stream when replicating with: `,
},
"projects": schema.ListAttribute{
Optional: true,
@@ -81,34 +95,37 @@ func (r *SourceJiraResource) Schema(ctx context.Context, req resource.SchemaRequ
Description: `List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects.`,
},
"render_fields": schema.BoolAttribute{
- Optional: true,
- Description: `Render issue fields in HTML format in addition to Jira JSON-like format.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jira",
- ),
- },
- Description: `must be one of ["jira"]`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `(DEPRECATED) Render issue fields in HTML format in addition to Jira JSON-like format.`,
},
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -172,7 +189,7 @@ func (r *SourceJiraResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceJira(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -348,5 +365,5 @@ func (r *SourceJiraResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *SourceJiraResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_jira_resource_sdk.go b/internal/provider/source_jira_resource_sdk.go
old mode 100755
new mode 100644
index 11c35104c..e67aaff14
--- a/internal/provider/source_jira_resource_sdk.go
+++ b/internal/provider/source_jira_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -24,6 +24,16 @@ func (r *SourceJiraResourceModel) ToCreateSDKType() *shared.SourceJiraCreateRequ
} else {
expandIssueChangelog = nil
}
+ expandIssueTransition := new(bool)
+ if !r.Configuration.ExpandIssueTransition.IsUnknown() && !r.Configuration.ExpandIssueTransition.IsNull() {
+ *expandIssueTransition = r.Configuration.ExpandIssueTransition.ValueBool()
+ } else {
+ expandIssueTransition = nil
+ }
+ var issuesStreamExpandWith []shared.SourceJiraIssuesStreamExpandWith = nil
+ for _, issuesStreamExpandWithItem := range r.Configuration.IssuesStreamExpandWith {
+ issuesStreamExpandWith = append(issuesStreamExpandWith, shared.SourceJiraIssuesStreamExpandWith(issuesStreamExpandWithItem.ValueString()))
+ }
var projects []string = nil
for _, projectsItem := range r.Configuration.Projects {
projects = append(projects, projectsItem.ValueString())
@@ -34,7 +44,6 @@ func (r *SourceJiraResourceModel) ToCreateSDKType() *shared.SourceJiraCreateRequ
} else {
renderFields = nil
}
- sourceType := shared.SourceJiraJira(r.Configuration.SourceType.ValueString())
startDate := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -47,11 +56,18 @@ func (r *SourceJiraResourceModel) ToCreateSDKType() *shared.SourceJiraCreateRequ
Email: email,
EnableExperimentalStreams: enableExperimentalStreams,
ExpandIssueChangelog: expandIssueChangelog,
+ ExpandIssueTransition: expandIssueTransition,
+ IssuesStreamExpandWith: issuesStreamExpandWith,
Projects: projects,
RenderFields: renderFields,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -62,6 +78,7 @@ func (r *SourceJiraResourceModel) ToCreateSDKType() *shared.SourceJiraCreateRequ
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceJiraCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -90,6 +107,16 @@ func (r *SourceJiraResourceModel) ToUpdateSDKType() *shared.SourceJiraPutRequest
} else {
expandIssueChangelog = nil
}
+ expandIssueTransition := new(bool)
+ if !r.Configuration.ExpandIssueTransition.IsUnknown() && !r.Configuration.ExpandIssueTransition.IsNull() {
+ *expandIssueTransition = r.Configuration.ExpandIssueTransition.ValueBool()
+ } else {
+ expandIssueTransition = nil
+ }
+ var issuesStreamExpandWith []shared.IssuesStreamExpandWith = nil
+ for _, issuesStreamExpandWithItem := range r.Configuration.IssuesStreamExpandWith {
+ issuesStreamExpandWith = append(issuesStreamExpandWith, shared.IssuesStreamExpandWith(issuesStreamExpandWithItem.ValueString()))
+ }
var projects []string = nil
for _, projectsItem := range r.Configuration.Projects {
projects = append(projects, projectsItem.ValueString())
@@ -112,6 +139,8 @@ func (r *SourceJiraResourceModel) ToUpdateSDKType() *shared.SourceJiraPutRequest
Email: email,
EnableExperimentalStreams: enableExperimentalStreams,
ExpandIssueChangelog: expandIssueChangelog,
+ ExpandIssueTransition: expandIssueTransition,
+ IssuesStreamExpandWith: issuesStreamExpandWith,
Projects: projects,
RenderFields: renderFields,
StartDate: startDate,
diff --git a/internal/provider/source_k6cloud_data_source.go b/internal/provider/source_k6cloud_data_source.go
old mode 100755
new mode 100644
index 7b8bc5616..c65e7dc2a
--- a/internal/provider/source_k6cloud_data_source.go
+++ b/internal/provider/source_k6cloud_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceK6CloudDataSource struct {
// SourceK6CloudDataSourceModel describes the data model.
type SourceK6CloudDataSourceModel struct {
- Configuration SourceK6Cloud `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceK6CloudDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceK6Cloud DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Your API Token. See here. The key is case sensitive.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "k6-cloud",
- ),
- },
- Description: `must be one of ["k6-cloud"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_k6cloud_data_source_sdk.go b/internal/provider/source_k6cloud_data_source_sdk.go
old mode 100755
new mode 100644
index 96ad4896e..6195d1cc1
--- a/internal/provider/source_k6cloud_data_source_sdk.go
+++ b/internal/provider/source_k6cloud_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceK6CloudDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_k6cloud_resource.go b/internal/provider/source_k6cloud_resource.go
old mode 100755
new mode 100644
index b1338b338..b8c188cc0
--- a/internal/provider/source_k6cloud_resource.go
+++ b/internal/provider/source_k6cloud_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceK6CloudResource struct {
// SourceK6CloudResourceModel describes the resource data model.
type SourceK6CloudResourceModel struct {
Configuration SourceK6Cloud `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,26 +56,29 @@ func (r *SourceK6CloudResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Token. See here. The key is case sensitive.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "k6-cloud",
- ),
- },
- Description: `must be one of ["k6-cloud"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceK6CloudResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceK6Cloud(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceK6CloudResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceK6CloudResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_k6cloud_resource_sdk.go b/internal/provider/source_k6cloud_resource_sdk.go
old mode 100755
new mode 100644
index 36e121be6..39d61e512
--- a/internal/provider/source_k6cloud_resource_sdk.go
+++ b/internal/provider/source_k6cloud_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceK6CloudResourceModel) ToCreateSDKType() *shared.SourceK6CloudCreateRequest {
apiToken := r.Configuration.APIToken.ValueString()
- sourceType := shared.SourceK6CloudK6Cloud(r.Configuration.SourceType.ValueString())
configuration := shared.SourceK6Cloud{
- APIToken: apiToken,
- SourceType: sourceType,
+ APIToken: apiToken,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceK6CloudResourceModel) ToCreateSDKType() *shared.SourceK6CloudCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceK6CloudCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_klarna_data_source.go b/internal/provider/source_klarna_data_source.go
old mode 100755
new mode 100644
index e1444a5a6..e6398ef4b
--- a/internal/provider/source_klarna_data_source.go
+++ b/internal/provider/source_klarna_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceKlarnaDataSource struct {
// SourceKlarnaDataSourceModel describes the data model.
type SourceKlarnaDataSourceModel struct {
- Configuration SourceKlarna `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,54 +47,20 @@ func (r *SourceKlarnaDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceKlarna DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "password": schema.StringAttribute{
- Computed: true,
- Description: `A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)`,
- },
- "playground": schema.BoolAttribute{
- Computed: true,
- Description: `Propertie defining if connector is used against playground or production environment`,
- },
- "region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "eu",
- "us",
- "oc",
- ),
- },
- MarkdownDescription: `must be one of ["eu", "us", "oc"]` + "\n" +
- `Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "klarna",
- ),
- },
- Description: `must be one of ["klarna"]`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Consists of your Merchant ID (eid) - a unique number that identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_klarna_data_source_sdk.go b/internal/provider/source_klarna_data_source_sdk.go
old mode 100755
new mode 100644
index ec1675679..03e481a8c
--- a/internal/provider/source_klarna_data_source_sdk.go
+++ b/internal/provider/source_klarna_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceKlarnaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_klarna_resource.go b/internal/provider/source_klarna_resource.go
old mode 100755
new mode 100644
index d8b6c8e5a..285002252
--- a/internal/provider/source_klarna_resource.go
+++ b/internal/provider/source_klarna_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourceKlarnaResource struct {
// SourceKlarnaResourceModel describes the resource data model.
type SourceKlarnaResourceModel struct {
Configuration SourceKlarna `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,14 +58,18 @@ func (r *SourceKlarnaResource) Schema(ctx context.Context, req resource.SchemaRe
Attributes: map[string]schema.Attribute{
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)`,
},
"playground": schema.BoolAttribute{
- Required: true,
- Description: `Propertie defining if connector is used against playground or production environment`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Propertie defining if connector is used against playground or production environment`,
},
"region": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["eu", "us", "oc"]` + "\n" +
+ `Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'`,
Validators: []validator.String{
stringvalidator.OneOf(
"eu",
@@ -71,17 +77,6 @@ func (r *SourceKlarnaResource) Schema(ctx context.Context, req resource.SchemaRe
"oc",
),
},
- MarkdownDescription: `must be one of ["eu", "us", "oc"]` + "\n" +
- `Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "klarna",
- ),
- },
- Description: `must be one of ["klarna"]`,
},
"username": schema.StringAttribute{
Required: true,
@@ -89,13 +84,24 @@ func (r *SourceKlarnaResource) Schema(ctx context.Context, req resource.SchemaRe
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -159,7 +165,7 @@ func (r *SourceKlarnaResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceKlarna(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -335,5 +341,5 @@ func (r *SourceKlarnaResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceKlarnaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_klarna_resource_sdk.go b/internal/provider/source_klarna_resource_sdk.go
old mode 100755
new mode 100644
index 94f49cfa7..c0f617699
--- a/internal/provider/source_klarna_resource_sdk.go
+++ b/internal/provider/source_klarna_resource_sdk.go
@@ -3,23 +3,32 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceKlarnaResourceModel) ToCreateSDKType() *shared.SourceKlarnaCreateRequest {
password := r.Configuration.Password.ValueString()
- playground := r.Configuration.Playground.ValueBool()
+ playground := new(bool)
+ if !r.Configuration.Playground.IsUnknown() && !r.Configuration.Playground.IsNull() {
+ *playground = r.Configuration.Playground.ValueBool()
+ } else {
+ playground = nil
+ }
region := shared.SourceKlarnaRegion(r.Configuration.Region.ValueString())
- sourceType := shared.SourceKlarnaKlarna(r.Configuration.SourceType.ValueString())
username := r.Configuration.Username.ValueString()
configuration := shared.SourceKlarna{
Password: password,
Playground: playground,
Region: region,
- SourceType: sourceType,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -30,6 +39,7 @@ func (r *SourceKlarnaResourceModel) ToCreateSDKType() *shared.SourceKlarnaCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceKlarnaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -44,7 +54,12 @@ func (r *SourceKlarnaResourceModel) ToGetSDKType() *shared.SourceKlarnaCreateReq
func (r *SourceKlarnaResourceModel) ToUpdateSDKType() *shared.SourceKlarnaPutRequest {
password := r.Configuration.Password.ValueString()
- playground := r.Configuration.Playground.ValueBool()
+ playground := new(bool)
+ if !r.Configuration.Playground.IsUnknown() && !r.Configuration.Playground.IsNull() {
+ *playground = r.Configuration.Playground.ValueBool()
+ } else {
+ playground = nil
+ }
region := shared.SourceKlarnaUpdateRegion(r.Configuration.Region.ValueString())
username := r.Configuration.Username.ValueString()
configuration := shared.SourceKlarnaUpdate{
diff --git a/internal/provider/source_klaviyo_data_source.go b/internal/provider/source_klaviyo_data_source.go
old mode 100755
new mode 100644
index 0400255d1..4a5b92057
--- a/internal/provider/source_klaviyo_data_source.go
+++ b/internal/provider/source_klaviyo_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceKlaviyoDataSource struct {
// SourceKlaviyoDataSourceModel describes the data model.
type SourceKlaviyoDataSourceModel struct {
- Configuration SourceKlaviyo `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,41 +47,20 @@ func (r *SourceKlaviyoDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceKlaviyo DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Klaviyo API Key. See our docs if you need help finding this key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "klaviyo",
- ),
- },
- Description: `must be one of ["klaviyo"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_klaviyo_data_source_sdk.go b/internal/provider/source_klaviyo_data_source_sdk.go
old mode 100755
new mode 100644
index 4ae979327..f2e8c97f1
--- a/internal/provider/source_klaviyo_data_source_sdk.go
+++ b/internal/provider/source_klaviyo_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceKlaviyoDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_klaviyo_resource.go b/internal/provider/source_klaviyo_resource.go
old mode 100755
new mode 100644
index ce406358e..afa3341db
--- a/internal/provider/source_klaviyo_resource.go
+++ b/internal/provider/source_klaviyo_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceKlaviyoResource struct {
// SourceKlaviyoResourceModel describes the resource data model.
type SourceKlaviyoResourceModel struct {
Configuration SourceKlaviyo `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,33 +58,36 @@ func (r *SourceKlaviyoResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Klaviyo API Key. See our docs if you need help finding this key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "klaviyo",
- ),
- },
- Description: `must be one of ["klaviyo"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This field is optional - if not provided, all data will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +151,7 @@ func (r *SourceKlaviyoResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceKlaviyo(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +327,5 @@ func (r *SourceKlaviyoResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceKlaviyoResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_klaviyo_resource_sdk.go b/internal/provider/source_klaviyo_resource_sdk.go
old mode 100755
new mode 100644
index d53377af1..efbd9004d
--- a/internal/provider/source_klaviyo_resource_sdk.go
+++ b/internal/provider/source_klaviyo_resource_sdk.go
@@ -3,19 +3,28 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceKlaviyoResourceModel) ToCreateSDKType() *shared.SourceKlaviyoCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceKlaviyoKlaviyo(r.Configuration.SourceType.ValueString())
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceKlaviyo{
- APIKey: apiKey,
- SourceType: sourceType,
- StartDate: startDate,
+ APIKey: apiKey,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -27,6 +36,7 @@ func (r *SourceKlaviyoResourceModel) ToCreateSDKType() *shared.SourceKlaviyoCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceKlaviyoCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -41,7 +51,12 @@ func (r *SourceKlaviyoResourceModel) ToGetSDKType() *shared.SourceKlaviyoCreateR
func (r *SourceKlaviyoResourceModel) ToUpdateSDKType() *shared.SourceKlaviyoPutRequest {
apiKey := r.Configuration.APIKey.ValueString()
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceKlaviyoUpdate{
APIKey: apiKey,
StartDate: startDate,
diff --git a/internal/provider/source_kustomersinger_data_source.go b/internal/provider/source_kustomersinger_data_source.go
old mode 100755
new mode 100644
index 0ce09811c..809423bb5
--- a/internal/provider/source_kustomersinger_data_source.go
+++ b/internal/provider/source_kustomersinger_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceKustomerSingerDataSource struct {
// SourceKustomerSingerDataSourceModel describes the data model.
type SourceKustomerSingerDataSourceModel struct {
- Configuration SourceKustomerSinger `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceKustomerSingerDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "SourceKustomerSinger DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Kustomer API Token. See the docs on how to obtain this`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "kustomer-singer",
- ),
- },
- Description: `must be one of ["kustomer-singer"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `The date from which you'd like to replicate the data`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_kustomersinger_data_source_sdk.go b/internal/provider/source_kustomersinger_data_source_sdk.go
old mode 100755
new mode 100644
index f6664e586..0e954e616
--- a/internal/provider/source_kustomersinger_data_source_sdk.go
+++ b/internal/provider/source_kustomersinger_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceKustomerSingerDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_kustomersinger_resource.go b/internal/provider/source_kustomersinger_resource.go
old mode 100755
new mode 100644
index 1ec3392ce..2fac6ee8e
--- a/internal/provider/source_kustomersinger_resource.go
+++ b/internal/provider/source_kustomersinger_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceKustomerSingerResource struct {
// SourceKustomerSingerResourceModel describes the resource data model.
type SourceKustomerSingerResourceModel struct {
Configuration SourceKustomerSinger `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,33 @@ func (r *SourceKustomerSingerResource) Schema(ctx context.Context, req resource.
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Kustomer API Token. See the docs on how to obtain this`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "kustomer-singer",
- ),
- },
- Description: `must be one of ["kustomer-singer"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `The date from which you'd like to replicate the data`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceKustomerSingerResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceKustomerSinger(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceKustomerSingerResource) Delete(ctx context.Context, req resource.
}
func (r *SourceKustomerSingerResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_kustomersinger_resource_sdk.go b/internal/provider/source_kustomersinger_resource_sdk.go
old mode 100755
new mode 100644
index 47c139b24..534a2e0cb
--- a/internal/provider/source_kustomersinger_resource_sdk.go
+++ b/internal/provider/source_kustomersinger_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceKustomerSingerResourceModel) ToCreateSDKType() *shared.SourceKustomerSingerCreateRequest {
apiToken := r.Configuration.APIToken.ValueString()
- sourceType := shared.SourceKustomerSingerKustomerSinger(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceKustomerSinger{
- APIToken: apiToken,
- SourceType: sourceType,
- StartDate: startDate,
+ APIToken: apiToken,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceKustomerSingerResourceModel) ToCreateSDKType() *shared.SourceKust
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceKustomerSingerCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_kyve_data_source.go b/internal/provider/source_kyve_data_source.go
old mode 100755
new mode 100644
index 78e25a1ae..7898464e4
--- a/internal/provider/source_kyve_data_source.go
+++ b/internal/provider/source_kyve_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceKyveDataSource struct {
// SourceKyveDataSourceModel describes the data model.
type SourceKyveDataSourceModel struct {
- Configuration SourceKyve `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,50 +47,20 @@ func (r *SourceKyveDataSource) Schema(ctx context.Context, req datasource.Schema
MarkdownDescription: "SourceKyve DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "max_pages": schema.Int64Attribute{
- Computed: true,
- Description: `The maximum amount of pages to go trough. Set to 'null' for all pages.`,
- },
- "page_size": schema.Int64Attribute{
- Computed: true,
- Description: `The pagesize for pagination, smaller numbers are used in integration tests.`,
- },
- "pool_ids": schema.StringAttribute{
- Computed: true,
- Description: `The IDs of the KYVE storage pool you want to archive. (Comma separated)`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "kyve",
- ),
- },
- Description: `must be one of ["kyve"]`,
- },
- "start_ids": schema.StringAttribute{
- Computed: true,
- Description: `The start-id defines, from which bundle id the pipeline should start to extract the data (Comma separated)`,
- },
- "url_base": schema.StringAttribute{
- Computed: true,
- Description: `URL to the KYVE Chain API.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_kyve_data_source_sdk.go b/internal/provider/source_kyve_data_source_sdk.go
old mode 100755
new mode 100644
index 9212451d5..6565ecb12
--- a/internal/provider/source_kyve_data_source_sdk.go
+++ b/internal/provider/source_kyve_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceKyveDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_kyve_resource.go b/internal/provider/source_kyve_resource.go
old mode 100755
new mode 100644
index 9df2914f6..85f973d69
--- a/internal/provider/source_kyve_resource.go
+++ b/internal/provider/source_kyve_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceKyveResource struct {
// SourceKyveResourceModel describes the resource data model.
type SourceKyveResourceModel struct {
Configuration SourceKyve `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -59,39 +59,43 @@ func (r *SourceKyveResource) Schema(ctx context.Context, req resource.SchemaRequ
Description: `The maximum amount of pages to go trough. Set to 'null' for all pages.`,
},
"page_size": schema.Int64Attribute{
- Optional: true,
- Description: `The pagesize for pagination, smaller numbers are used in integration tests.`,
+ Optional: true,
+ MarkdownDescription: `Default: 100` + "\n" +
+ `The pagesize for pagination, smaller numbers are used in integration tests.`,
},
"pool_ids": schema.StringAttribute{
Required: true,
Description: `The IDs of the KYVE storage pool you want to archive. (Comma separated)`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "kyve",
- ),
- },
- Description: `must be one of ["kyve"]`,
- },
"start_ids": schema.StringAttribute{
Required: true,
Description: `The start-id defines, from which bundle id the pipeline should start to extract the data (Comma separated)`,
},
"url_base": schema.StringAttribute{
- Optional: true,
- Description: `URL to the KYVE Chain API.`,
+ Optional: true,
+ MarkdownDescription: `Default: "https://api.korellia.kyve.network"` + "\n" +
+ `URL to the KYVE Chain API.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +159,7 @@ func (r *SourceKyveResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceKyve(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +335,5 @@ func (r *SourceKyveResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *SourceKyveResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_kyve_resource_sdk.go b/internal/provider/source_kyve_resource_sdk.go
old mode 100755
new mode 100644
index ccc94ace3..f74c7d1a2
--- a/internal/provider/source_kyve_resource_sdk.go
+++ b/internal/provider/source_kyve_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -21,7 +21,6 @@ func (r *SourceKyveResourceModel) ToCreateSDKType() *shared.SourceKyveCreateRequ
pageSize = nil
}
poolIds := r.Configuration.PoolIds.ValueString()
- sourceType := shared.SourceKyveKyve(r.Configuration.SourceType.ValueString())
startIds := r.Configuration.StartIds.ValueString()
urlBase := new(string)
if !r.Configuration.URLBase.IsUnknown() && !r.Configuration.URLBase.IsNull() {
@@ -30,12 +29,17 @@ func (r *SourceKyveResourceModel) ToCreateSDKType() *shared.SourceKyveCreateRequ
urlBase = nil
}
configuration := shared.SourceKyve{
- MaxPages: maxPages,
- PageSize: pageSize,
- PoolIds: poolIds,
- SourceType: sourceType,
- StartIds: startIds,
- URLBase: urlBase,
+ MaxPages: maxPages,
+ PageSize: pageSize,
+ PoolIds: poolIds,
+ StartIds: startIds,
+ URLBase: urlBase,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -47,6 +51,7 @@ func (r *SourceKyveResourceModel) ToCreateSDKType() *shared.SourceKyveCreateRequ
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceKyveCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_launchdarkly_data_source.go b/internal/provider/source_launchdarkly_data_source.go
old mode 100755
new mode 100644
index 6a934e1e6..b2591a961
--- a/internal/provider/source_launchdarkly_data_source.go
+++ b/internal/provider/source_launchdarkly_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceLaunchdarklyDataSource struct {
// SourceLaunchdarklyDataSourceModel describes the data model.
type SourceLaunchdarklyDataSourceModel struct {
- Configuration SourceLaunchdarkly `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceLaunchdarklyDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "SourceLaunchdarkly DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Your Access token. See here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "launchdarkly",
- ),
- },
- Description: `must be one of ["launchdarkly"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_launchdarkly_data_source_sdk.go b/internal/provider/source_launchdarkly_data_source_sdk.go
old mode 100755
new mode 100644
index f6fcad34a..f8337fba7
--- a/internal/provider/source_launchdarkly_data_source_sdk.go
+++ b/internal/provider/source_launchdarkly_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLaunchdarklyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_launchdarkly_resource.go b/internal/provider/source_launchdarkly_resource.go
old mode 100755
new mode 100644
index 95cfa4aa6..3d740c8c7
--- a/internal/provider/source_launchdarkly_resource.go
+++ b/internal/provider/source_launchdarkly_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceLaunchdarklyResource struct {
// SourceLaunchdarklyResourceModel describes the resource data model.
type SourceLaunchdarklyResourceModel struct {
- Configuration SourceLaunchdarkly `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration OAuth2AccessToken `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceLaunchdarklyResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceLaunchdarklyResource) Schema(ctx context.Context, req resource.Sc
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your Access token. See here.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "launchdarkly",
- ),
- },
- Description: `must be one of ["launchdarkly"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceLaunchdarklyResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceLaunchdarkly(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceLaunchdarklyResource) Delete(ctx context.Context, req resource.De
}
func (r *SourceLaunchdarklyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_launchdarkly_resource_sdk.go b/internal/provider/source_launchdarkly_resource_sdk.go
old mode 100755
new mode 100644
index b27a94577..7a9096429
--- a/internal/provider/source_launchdarkly_resource_sdk.go
+++ b/internal/provider/source_launchdarkly_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLaunchdarklyResourceModel) ToCreateSDKType() *shared.SourceLaunchdarklyCreateRequest {
accessToken := r.Configuration.AccessToken.ValueString()
- sourceType := shared.SourceLaunchdarklyLaunchdarkly(r.Configuration.SourceType.ValueString())
configuration := shared.SourceLaunchdarkly{
AccessToken: accessToken,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceLaunchdarklyResourceModel) ToCreateSDKType() *shared.SourceLaunch
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceLaunchdarklyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_lemlist_data_source.go b/internal/provider/source_lemlist_data_source.go
old mode 100755
new mode 100644
index f2c6dce4b..f36400c04
--- a/internal/provider/source_lemlist_data_source.go
+++ b/internal/provider/source_lemlist_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceLemlistDataSource struct {
// SourceLemlistDataSourceModel describes the data model.
type SourceLemlistDataSourceModel struct {
- Configuration SourceLemlist `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceLemlistDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceLemlist DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Lemlist API key,`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "lemlist",
- ),
- },
- Description: `must be one of ["lemlist"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_lemlist_data_source_sdk.go b/internal/provider/source_lemlist_data_source_sdk.go
old mode 100755
new mode 100644
index 085c63df8..c0d584077
--- a/internal/provider/source_lemlist_data_source_sdk.go
+++ b/internal/provider/source_lemlist_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLemlistDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_lemlist_resource.go b/internal/provider/source_lemlist_resource.go
old mode 100755
new mode 100644
index 6702061dd..88f25fbe9
--- a/internal/provider/source_lemlist_resource.go
+++ b/internal/provider/source_lemlist_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceLemlistResource struct {
// SourceLemlistResourceModel describes the resource data model.
type SourceLemlistResourceModel struct {
- Configuration SourceLemlist `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceLemlistResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceLemlistResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Lemlist API key,`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "lemlist",
- ),
- },
- Description: `must be one of ["lemlist"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceLemlistResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceLemlist(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceLemlistResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceLemlistResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_lemlist_resource_sdk.go b/internal/provider/source_lemlist_resource_sdk.go
old mode 100755
new mode 100644
index 1a709fee7..fbd93165e
--- a/internal/provider/source_lemlist_resource_sdk.go
+++ b/internal/provider/source_lemlist_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLemlistResourceModel) ToCreateSDKType() *shared.SourceLemlistCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceLemlistLemlist(r.Configuration.SourceType.ValueString())
configuration := shared.SourceLemlist{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceLemlistResourceModel) ToCreateSDKType() *shared.SourceLemlistCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceLemlistCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_leverhiring_data_source.go b/internal/provider/source_leverhiring_data_source.go
old mode 100755
new mode 100644
index fec3157d5..e82dcb673
--- a/internal/provider/source_leverhiring_data_source.go
+++ b/internal/provider/source_leverhiring_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceLeverHiringDataSource struct {
// SourceLeverHiringDataSourceModel describes the data model.
type SourceLeverHiringDataSourceModel struct {
- Configuration SourceLeverHiring `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,146 +47,20 @@ func (r *SourceLeverHiringDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourceLeverHiring DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `The Api Key of your Lever Hiring account.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Api Key",
- ),
- },
- Description: `must be one of ["Api Key"]`,
- },
- },
- Description: `Choose how to authenticate to Lever Hiring.`,
- },
- "source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Lever Hiring developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Lever Hiring developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining new access token.`,
- },
- },
- Description: `Choose how to authenticate to Lever Hiring.`,
- },
- "source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `The Api Key of your Lever Hiring account.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Api Key",
- ),
- },
- Description: `must be one of ["Api Key"]`,
- },
- },
- Description: `Choose how to authenticate to Lever Hiring.`,
- },
- "source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Lever Hiring developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Lever Hiring developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining new access token.`,
- },
- },
- Description: `Choose how to authenticate to Lever Hiring.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate to Lever Hiring.`,
- },
- "environment": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Production",
- "Sandbox",
- ),
- },
- MarkdownDescription: `must be one of ["Production", "Sandbox"]` + "\n" +
- `The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "lever-hiring",
- ),
- },
- Description: `must be one of ["lever-hiring"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. Note that it will be used only in the following incremental streams: comments, commits, and issues.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_leverhiring_data_source_sdk.go b/internal/provider/source_leverhiring_data_source_sdk.go
old mode 100755
new mode 100644
index 722aa1dc5..0d20e8e20
--- a/internal/provider/source_leverhiring_data_source_sdk.go
+++ b/internal/provider/source_leverhiring_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLeverHiringDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_leverhiring_resource.go b/internal/provider/source_leverhiring_resource.go
old mode 100755
new mode 100644
index 81fc65b09..6089f3b98
--- a/internal/provider/source_leverhiring_resource.go
+++ b/internal/provider/source_leverhiring_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceLeverHiringResource struct {
// SourceLeverHiringResourceModel describes the resource data model.
type SourceLeverHiringResourceModel struct {
Configuration SourceLeverHiring `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,83 +60,20 @@ func (r *SourceLeverHiringResource) Schema(ctx context.Context, req resource.Sch
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key": schema.SingleNestedAttribute{
+ "authenticate_via_lever_api_key": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Api Key of your Lever Hiring account.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Api Key",
- ),
- },
- Description: `must be one of ["Api Key"]`,
- },
- },
- Description: `Choose how to authenticate to Lever Hiring.`,
- },
- "source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Optional: true,
- Description: `The Client ID of your Lever Hiring developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Optional: true,
- Description: `The Client Secret of your Lever Hiring developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The token for obtaining new access token.`,
- },
- },
- Description: `Choose how to authenticate to Lever Hiring.`,
- },
- "source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_api_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Required: true,
- Description: `The Api Key of your Lever Hiring account.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Api Key",
- ),
- },
- Description: `must be one of ["Api Key"]`,
- },
},
Description: `Choose how to authenticate to Lever Hiring.`,
},
- "source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_o_auth": schema.SingleNestedAttribute{
+ "authenticate_via_lever_o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
"client_id": schema.StringAttribute{
Optional: true,
Description: `The Client ID of your Lever Hiring developer application.`,
@@ -145,36 +84,28 @@ func (r *SourceLeverHiringResource) Schema(ctx context.Context, req resource.Sch
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The token for obtaining new access token.`,
},
},
Description: `Choose how to authenticate to Lever Hiring.`,
},
},
+ Description: `Choose how to authenticate to Lever Hiring.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate to Lever Hiring.`,
},
"environment": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["Production", "Sandbox"]; Default: "Sandbox"` + "\n" +
+ `The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.`,
Validators: []validator.String{
stringvalidator.OneOf(
"Production",
"Sandbox",
),
},
- MarkdownDescription: `must be one of ["Production", "Sandbox"]` + "\n" +
- `The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "lever-hiring",
- ),
- },
- Description: `must be one of ["lever-hiring"]`,
},
"start_date": schema.StringAttribute{
Required: true,
@@ -182,13 +113,24 @@ func (r *SourceLeverHiringResource) Schema(ctx context.Context, req resource.Sch
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -252,7 +194,7 @@ func (r *SourceLeverHiringResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceLeverHiring(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -428,5 +370,5 @@ func (r *SourceLeverHiringResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourceLeverHiringResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_leverhiring_resource_sdk.go b/internal/provider/source_leverhiring_resource_sdk.go
old mode 100755
new mode 100644
index 62d59ed33..19483926c
--- a/internal/provider/source_leverhiring_resource_sdk.go
+++ b/internal/provider/source_leverhiring_resource_sdk.go
@@ -3,63 +3,49 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLeverHiringResourceModel) ToCreateSDKType() *shared.SourceLeverHiringCreateRequest {
var credentials *shared.SourceLeverHiringAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth *shared.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth
- if r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth != nil {
- authType := new(shared.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType)
- if !r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.AuthType.IsNull() {
- *authType = shared.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType(r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.AuthType.ValueString())
- } else {
- authType = nil
- }
+ var sourceLeverHiringAuthenticateViaLeverOAuth *shared.SourceLeverHiringAuthenticateViaLeverOAuth
+ if r.Configuration.Credentials.AuthenticateViaLeverOAuth != nil {
clientID := new(string)
- if !r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.ClientID.ValueString()
+ if !r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientID.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientSecret.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- refreshToken := r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth.RefreshToken.ValueString()
- sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth = &shared.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth{
- AuthType: authType,
+ refreshToken := r.Configuration.Credentials.AuthenticateViaLeverOAuth.RefreshToken.ValueString()
+ sourceLeverHiringAuthenticateViaLeverOAuth = &shared.SourceLeverHiringAuthenticateViaLeverOAuth{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth != nil {
+ if sourceLeverHiringAuthenticateViaLeverOAuth != nil {
credentials = &shared.SourceLeverHiringAuthenticationMechanism{
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth: sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth,
+ SourceLeverHiringAuthenticateViaLeverOAuth: sourceLeverHiringAuthenticateViaLeverOAuth,
}
}
- var sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey *shared.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey
- if r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey != nil {
- apiKey := r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey.APIKey.ValueString()
- authType1 := new(shared.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType)
- if !r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey.AuthType.IsNull() {
- *authType1 = shared.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType(r.Configuration.Credentials.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey = &shared.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey{
- APIKey: apiKey,
- AuthType: authType1,
+ var sourceLeverHiringAuthenticateViaLeverAPIKey *shared.SourceLeverHiringAuthenticateViaLeverAPIKey
+ if r.Configuration.Credentials.AuthenticateViaLeverAPIKey != nil {
+ apiKey := r.Configuration.Credentials.AuthenticateViaLeverAPIKey.APIKey.ValueString()
+ sourceLeverHiringAuthenticateViaLeverAPIKey = &shared.SourceLeverHiringAuthenticateViaLeverAPIKey{
+ APIKey: apiKey,
}
}
- if sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey != nil {
+ if sourceLeverHiringAuthenticateViaLeverAPIKey != nil {
credentials = &shared.SourceLeverHiringAuthenticationMechanism{
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey: sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey,
+ SourceLeverHiringAuthenticateViaLeverAPIKey: sourceLeverHiringAuthenticateViaLeverAPIKey,
}
}
}
@@ -69,14 +55,18 @@ func (r *SourceLeverHiringResourceModel) ToCreateSDKType() *shared.SourceLeverHi
} else {
environment = nil
}
- sourceType := shared.SourceLeverHiringLeverHiring(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceLeverHiring{
Credentials: credentials,
Environment: environment,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -87,6 +77,7 @@ func (r *SourceLeverHiringResourceModel) ToCreateSDKType() *shared.SourceLeverHi
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceLeverHiringCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -102,56 +93,42 @@ func (r *SourceLeverHiringResourceModel) ToGetSDKType() *shared.SourceLeverHirin
func (r *SourceLeverHiringResourceModel) ToUpdateSDKType() *shared.SourceLeverHiringPutRequest {
var credentials *shared.SourceLeverHiringUpdateAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth *shared.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth
- if r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth != nil {
- authType := new(shared.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType)
- if !r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.AuthType.IsNull() {
- *authType = shared.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType(r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.AuthType.ValueString())
- } else {
- authType = nil
- }
+ var authenticateViaLeverOAuth *shared.AuthenticateViaLeverOAuth
+ if r.Configuration.Credentials.AuthenticateViaLeverOAuth != nil {
clientID := new(string)
- if !r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.ClientID.ValueString()
+ if !r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientID.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientSecret.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.AuthenticateViaLeverOAuth.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- refreshToken := r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth.RefreshToken.ValueString()
- sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth = &shared.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth{
- AuthType: authType,
+ refreshToken := r.Configuration.Credentials.AuthenticateViaLeverOAuth.RefreshToken.ValueString()
+ authenticateViaLeverOAuth = &shared.AuthenticateViaLeverOAuth{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth != nil {
+ if authenticateViaLeverOAuth != nil {
credentials = &shared.SourceLeverHiringUpdateAuthenticationMechanism{
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth: sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth,
+ AuthenticateViaLeverOAuth: authenticateViaLeverOAuth,
}
}
- var sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey *shared.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey
- if r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey != nil {
- apiKey := r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey.APIKey.ValueString()
- authType1 := new(shared.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType)
- if !r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey.AuthType.IsNull() {
- *authType1 = shared.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType(r.Configuration.Credentials.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey = &shared.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey{
- APIKey: apiKey,
- AuthType: authType1,
+ var authenticateViaLeverAPIKey *shared.AuthenticateViaLeverAPIKey
+ if r.Configuration.Credentials.AuthenticateViaLeverAPIKey != nil {
+ apiKey := r.Configuration.Credentials.AuthenticateViaLeverAPIKey.APIKey.ValueString()
+ authenticateViaLeverAPIKey = &shared.AuthenticateViaLeverAPIKey{
+ APIKey: apiKey,
}
}
- if sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey != nil {
+ if authenticateViaLeverAPIKey != nil {
credentials = &shared.SourceLeverHiringUpdateAuthenticationMechanism{
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey: sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey,
+ AuthenticateViaLeverAPIKey: authenticateViaLeverAPIKey,
}
}
}
diff --git a/internal/provider/source_linkedinads_data_source.go b/internal/provider/source_linkedinads_data_source.go
old mode 100755
new mode 100644
index ec25069f9..e13d765fb
--- a/internal/provider/source_linkedinads_data_source.go
+++ b/internal/provider/source_linkedinads_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceLinkedinAdsDataSource struct {
// SourceLinkedinAdsDataSourceModel describes the data model.
type SourceLinkedinAdsDataSourceModel struct {
- Configuration SourceLinkedinAds `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,192 +47,20 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourceLinkedinAds DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "account_ids": schema.ListAttribute{
- Computed: true,
- ElementType: types.Int64Type,
- Description: `Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs.`,
- },
- "ad_analytics_reports": schema.ListNestedAttribute{
- Computed: true,
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "name": schema.StringAttribute{
- Computed: true,
- Description: `The name for the custom report.`,
- },
- "pivot_by": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "COMPANY",
- "ACCOUNT",
- "SHARE",
- "CAMPAIGN",
- "CREATIVE",
- "CAMPAIGN_GROUP",
- "CONVERSION",
- "CONVERSATION_NODE",
- "CONVERSATION_NODE_OPTION_INDEX",
- "SERVING_LOCATION",
- "CARD_INDEX",
- "MEMBER_COMPANY_SIZE",
- "MEMBER_INDUSTRY",
- "MEMBER_SENIORITY",
- "MEMBER_JOB_TITLE ",
- "MEMBER_JOB_FUNCTION ",
- "MEMBER_COUNTRY_V2 ",
- "MEMBER_REGION_V2",
- "MEMBER_COMPANY",
- "PLACEMENT_NAME",
- "IMPRESSION_DEVICE_TYPE",
- ),
- },
- MarkdownDescription: `must be one of ["COMPANY", "ACCOUNT", "SHARE", "CAMPAIGN", "CREATIVE", "CAMPAIGN_GROUP", "CONVERSION", "CONVERSATION_NODE", "CONVERSATION_NODE_OPTION_INDEX", "SERVING_LOCATION", "CARD_INDEX", "MEMBER_COMPANY_SIZE", "MEMBER_INDUSTRY", "MEMBER_SENIORITY", "MEMBER_JOB_TITLE ", "MEMBER_JOB_FUNCTION ", "MEMBER_COUNTRY_V2 ", "MEMBER_REGION_V2", "MEMBER_COMPANY", "PLACEMENT_NAME", "IMPRESSION_DEVICE_TYPE"]` + "\n" +
- `Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.`,
- },
- "time_granularity": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ALL",
- "DAILY",
- "MONTHLY",
- "YEARLY",
- ),
- },
- MarkdownDescription: `must be one of ["ALL", "DAILY", "MONTHLY", "YEARLY"]` + "\n" +
- `Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.`,
- },
- },
- },
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_linkedin_ads_authentication_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The access token generated for your developer application. Refer to our documentation for more information.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_linkedin_ads_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oAuth2.0",
- ),
- },
- Description: `must be one of ["oAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The client ID of your developer application. Refer to our documentation for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret of your developer application. Refer to our documentation for more information.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access token. Refer to our documentation for more information.`,
- },
- },
- },
- "source_linkedin_ads_update_authentication_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The access token generated for your developer application. Refer to our documentation for more information.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_linkedin_ads_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oAuth2.0",
- ),
- },
- Description: `must be one of ["oAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The client ID of your developer application. Refer to our documentation for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret of your developer application. Refer to our documentation for more information.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access token. Refer to our documentation for more information.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "linkedin-ads",
- ),
- },
- Description: `must be one of ["linkedin-ads"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_linkedinads_data_source_sdk.go b/internal/provider/source_linkedinads_data_source_sdk.go
old mode 100755
new mode 100644
index ed57c66c4..9431280fe
--- a/internal/provider/source_linkedinads_data_source_sdk.go
+++ b/internal/provider/source_linkedinads_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLinkedinAdsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_linkedinads_resource.go b/internal/provider/source_linkedinads_resource.go
old mode 100755
new mode 100644
index ca24efad9..6bda1962d
--- a/internal/provider/source_linkedinads_resource.go
+++ b/internal/provider/source_linkedinads_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceLinkedinAdsResource struct {
// SourceLinkedinAdsResourceModel describes the resource data model.
type SourceLinkedinAdsResourceModel struct {
Configuration SourceLinkedinAds `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -70,6 +72,8 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch
},
"pivot_by": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["COMPANY", "ACCOUNT", "SHARE", "CAMPAIGN", "CREATIVE", "CAMPAIGN_GROUP", "CONVERSION", "CONVERSATION_NODE", "CONVERSATION_NODE_OPTION_INDEX", "SERVING_LOCATION", "CARD_INDEX", "MEMBER_COMPANY_SIZE", "MEMBER_INDUSTRY", "MEMBER_SENIORITY", "MEMBER_JOB_TITLE ", "MEMBER_JOB_FUNCTION ", "MEMBER_COUNTRY_V2 ", "MEMBER_REGION_V2", "MEMBER_COMPANY", "PLACEMENT_NAME", "IMPRESSION_DEVICE_TYPE"]` + "\n" +
+ `Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.`,
Validators: []validator.String{
stringvalidator.OneOf(
"COMPANY",
@@ -95,11 +99,11 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch
"IMPRESSION_DEVICE_TYPE",
),
},
- MarkdownDescription: `must be one of ["COMPANY", "ACCOUNT", "SHARE", "CAMPAIGN", "CREATIVE", "CAMPAIGN_GROUP", "CONVERSION", "CONVERSATION_NODE", "CONVERSATION_NODE_OPTION_INDEX", "SERVING_LOCATION", "CARD_INDEX", "MEMBER_COMPANY_SIZE", "MEMBER_INDUSTRY", "MEMBER_SENIORITY", "MEMBER_JOB_TITLE ", "MEMBER_JOB_FUNCTION ", "MEMBER_COUNTRY_V2 ", "MEMBER_REGION_V2", "MEMBER_COMPANY", "PLACEMENT_NAME", "IMPRESSION_DEVICE_TYPE"]` + "\n" +
- `Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.`,
},
"time_granularity": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["ALL", "DAILY", "MONTHLY", "YEARLY"]` + "\n" +
+ `Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.`,
Validators: []validator.String{
stringvalidator.OneOf(
"ALL",
@@ -108,8 +112,6 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch
"YEARLY",
),
},
- MarkdownDescription: `must be one of ["ALL", "DAILY", "MONTHLY", "YEARLY"]` + "\n" +
- `Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.`,
},
},
},
@@ -117,80 +119,19 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_linkedin_ads_authentication_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `The access token generated for your developer application. Refer to our documentation for more information.`,
- },
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_linkedin_ads_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oAuth2.0",
- ),
- },
- Description: `must be one of ["oAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The client ID of your developer application. Refer to our documentation for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The client secret of your developer application. Refer to our documentation for more information.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The key to refresh the expired access token. Refer to our documentation for more information.`,
- },
- },
- },
- "source_linkedin_ads_update_authentication_access_token": schema.SingleNestedAttribute{
+ "access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The access token generated for your developer application. Refer to our documentation for more information.`,
},
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
},
},
- "source_linkedin_ads_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oAuth2.0",
- ),
- },
- Description: `must be one of ["oAuth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The client ID of your developer application. Refer to our documentation for more information.`,
@@ -201,6 +142,7 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The key to refresh the expired access token. Refer to our documentation for more information.`,
},
},
@@ -210,31 +152,33 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch
validators.ExactlyOneChild(),
},
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "linkedin-ads",
- ),
- },
- Description: `must be one of ["linkedin-ads"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -298,7 +242,7 @@ func (r *SourceLinkedinAdsResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceLinkedinAds(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -474,5 +418,5 @@ func (r *SourceLinkedinAdsResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourceLinkedinAdsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_linkedinads_resource_sdk.go b/internal/provider/source_linkedinads_resource_sdk.go
old mode 100755
new mode 100644
index dcfecef45..93297c4ca
--- a/internal/provider/source_linkedinads_resource_sdk.go
+++ b/internal/provider/source_linkedinads_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -16,8 +16,8 @@ func (r *SourceLinkedinAdsResourceModel) ToCreateSDKType() *shared.SourceLinkedi
var adAnalyticsReports []shared.SourceLinkedinAdsAdAnalyticsReportConfiguration = nil
for _, adAnalyticsReportsItem := range r.Configuration.AdAnalyticsReports {
name := adAnalyticsReportsItem.Name.ValueString()
- pivotBy := shared.SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory(adAnalyticsReportsItem.PivotBy.ValueString())
- timeGranularity := shared.SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity(adAnalyticsReportsItem.TimeGranularity.ValueString())
+ pivotBy := shared.SourceLinkedinAdsPivotCategory(adAnalyticsReportsItem.PivotBy.ValueString())
+ timeGranularity := shared.SourceLinkedinAdsTimeGranularity(adAnalyticsReportsItem.TimeGranularity.ValueString())
adAnalyticsReports = append(adAnalyticsReports, shared.SourceLinkedinAdsAdAnalyticsReportConfiguration{
Name: name,
PivotBy: pivotBy,
@@ -26,58 +26,48 @@ func (r *SourceLinkedinAdsResourceModel) ToCreateSDKType() *shared.SourceLinkedi
}
var credentials *shared.SourceLinkedinAdsAuthentication
if r.Configuration.Credentials != nil {
- var sourceLinkedinAdsAuthenticationOAuth20 *shared.SourceLinkedinAdsAuthenticationOAuth20
- if r.Configuration.Credentials.SourceLinkedinAdsAuthenticationOAuth20 != nil {
- authMethod := new(shared.SourceLinkedinAdsAuthenticationOAuth20AuthMethod)
- if !r.Configuration.Credentials.SourceLinkedinAdsAuthenticationOAuth20.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceLinkedinAdsAuthenticationOAuth20.AuthMethod.IsNull() {
- *authMethod = shared.SourceLinkedinAdsAuthenticationOAuth20AuthMethod(r.Configuration.Credentials.SourceLinkedinAdsAuthenticationOAuth20.AuthMethod.ValueString())
- } else {
- authMethod = nil
- }
- clientID := r.Configuration.Credentials.SourceLinkedinAdsAuthenticationOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceLinkedinAdsAuthenticationOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceLinkedinAdsAuthenticationOAuth20.RefreshToken.ValueString()
- sourceLinkedinAdsAuthenticationOAuth20 = &shared.SourceLinkedinAdsAuthenticationOAuth20{
- AuthMethod: authMethod,
+ var sourceLinkedinAdsOAuth20 *shared.SourceLinkedinAdsOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ sourceLinkedinAdsOAuth20 = &shared.SourceLinkedinAdsOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceLinkedinAdsAuthenticationOAuth20 != nil {
+ if sourceLinkedinAdsOAuth20 != nil {
credentials = &shared.SourceLinkedinAdsAuthentication{
- SourceLinkedinAdsAuthenticationOAuth20: sourceLinkedinAdsAuthenticationOAuth20,
+ SourceLinkedinAdsOAuth20: sourceLinkedinAdsOAuth20,
}
}
- var sourceLinkedinAdsAuthenticationAccessToken *shared.SourceLinkedinAdsAuthenticationAccessToken
- if r.Configuration.Credentials.SourceLinkedinAdsAuthenticationAccessToken != nil {
- accessToken := r.Configuration.Credentials.SourceLinkedinAdsAuthenticationAccessToken.AccessToken.ValueString()
- authMethod1 := new(shared.SourceLinkedinAdsAuthenticationAccessTokenAuthMethod)
- if !r.Configuration.Credentials.SourceLinkedinAdsAuthenticationAccessToken.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceLinkedinAdsAuthenticationAccessToken.AuthMethod.IsNull() {
- *authMethod1 = shared.SourceLinkedinAdsAuthenticationAccessTokenAuthMethod(r.Configuration.Credentials.SourceLinkedinAdsAuthenticationAccessToken.AuthMethod.ValueString())
- } else {
- authMethod1 = nil
- }
- sourceLinkedinAdsAuthenticationAccessToken = &shared.SourceLinkedinAdsAuthenticationAccessToken{
+ var sourceLinkedinAdsAccessToken *shared.SourceLinkedinAdsAccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ accessToken := r.Configuration.Credentials.AccessToken.AccessToken.ValueString()
+ sourceLinkedinAdsAccessToken = &shared.SourceLinkedinAdsAccessToken{
AccessToken: accessToken,
- AuthMethod: authMethod1,
}
}
- if sourceLinkedinAdsAuthenticationAccessToken != nil {
+ if sourceLinkedinAdsAccessToken != nil {
credentials = &shared.SourceLinkedinAdsAuthentication{
- SourceLinkedinAdsAuthenticationAccessToken: sourceLinkedinAdsAuthenticationAccessToken,
+ SourceLinkedinAdsAccessToken: sourceLinkedinAdsAccessToken,
}
}
}
- sourceType := shared.SourceLinkedinAdsLinkedinAds(r.Configuration.SourceType.ValueString())
startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
configuration := shared.SourceLinkedinAds{
AccountIds: accountIds,
AdAnalyticsReports: adAnalyticsReports,
Credentials: credentials,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name1 := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -88,6 +78,7 @@ func (r *SourceLinkedinAdsResourceModel) ToCreateSDKType() *shared.SourceLinkedi
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceLinkedinAdsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name1,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -105,12 +96,12 @@ func (r *SourceLinkedinAdsResourceModel) ToUpdateSDKType() *shared.SourceLinkedi
for _, accountIdsItem := range r.Configuration.AccountIds {
accountIds = append(accountIds, accountIdsItem.ValueInt64())
}
- var adAnalyticsReports []shared.SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration = nil
+ var adAnalyticsReports []shared.AdAnalyticsReportConfiguration = nil
for _, adAnalyticsReportsItem := range r.Configuration.AdAnalyticsReports {
name := adAnalyticsReportsItem.Name.ValueString()
- pivotBy := shared.SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory(adAnalyticsReportsItem.PivotBy.ValueString())
- timeGranularity := shared.SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity(adAnalyticsReportsItem.TimeGranularity.ValueString())
- adAnalyticsReports = append(adAnalyticsReports, shared.SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration{
+ pivotBy := shared.PivotCategory(adAnalyticsReportsItem.PivotBy.ValueString())
+ timeGranularity := shared.TimeGranularity(adAnalyticsReportsItem.TimeGranularity.ValueString())
+ adAnalyticsReports = append(adAnalyticsReports, shared.AdAnalyticsReportConfiguration{
Name: name,
PivotBy: pivotBy,
TimeGranularity: timeGranularity,
@@ -118,46 +109,32 @@ func (r *SourceLinkedinAdsResourceModel) ToUpdateSDKType() *shared.SourceLinkedi
}
var credentials *shared.SourceLinkedinAdsUpdateAuthentication
if r.Configuration.Credentials != nil {
- var sourceLinkedinAdsUpdateAuthenticationOAuth20 *shared.SourceLinkedinAdsUpdateAuthenticationOAuth20
- if r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationOAuth20 != nil {
- authMethod := new(shared.SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod)
- if !r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationOAuth20.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationOAuth20.AuthMethod.IsNull() {
- *authMethod = shared.SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod(r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationOAuth20.AuthMethod.ValueString())
- } else {
- authMethod = nil
- }
- clientID := r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationOAuth20.RefreshToken.ValueString()
- sourceLinkedinAdsUpdateAuthenticationOAuth20 = &shared.SourceLinkedinAdsUpdateAuthenticationOAuth20{
- AuthMethod: authMethod,
+ var sourceLinkedinAdsUpdateOAuth20 *shared.SourceLinkedinAdsUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ sourceLinkedinAdsUpdateOAuth20 = &shared.SourceLinkedinAdsUpdateOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceLinkedinAdsUpdateAuthenticationOAuth20 != nil {
+ if sourceLinkedinAdsUpdateOAuth20 != nil {
credentials = &shared.SourceLinkedinAdsUpdateAuthentication{
- SourceLinkedinAdsUpdateAuthenticationOAuth20: sourceLinkedinAdsUpdateAuthenticationOAuth20,
+ SourceLinkedinAdsUpdateOAuth20: sourceLinkedinAdsUpdateOAuth20,
}
}
- var sourceLinkedinAdsUpdateAuthenticationAccessToken *shared.SourceLinkedinAdsUpdateAuthenticationAccessToken
- if r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationAccessToken != nil {
- accessToken := r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationAccessToken.AccessToken.ValueString()
- authMethod1 := new(shared.SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod)
- if !r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationAccessToken.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationAccessToken.AuthMethod.IsNull() {
- *authMethod1 = shared.SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod(r.Configuration.Credentials.SourceLinkedinAdsUpdateAuthenticationAccessToken.AuthMethod.ValueString())
- } else {
- authMethod1 = nil
- }
- sourceLinkedinAdsUpdateAuthenticationAccessToken = &shared.SourceLinkedinAdsUpdateAuthenticationAccessToken{
- AccessToken: accessToken,
- AuthMethod: authMethod1,
+ var accessToken *shared.AccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ accessToken1 := r.Configuration.Credentials.AccessToken.AccessToken.ValueString()
+ accessToken = &shared.AccessToken{
+ AccessToken: accessToken1,
}
}
- if sourceLinkedinAdsUpdateAuthenticationAccessToken != nil {
+ if accessToken != nil {
credentials = &shared.SourceLinkedinAdsUpdateAuthentication{
- SourceLinkedinAdsUpdateAuthenticationAccessToken: sourceLinkedinAdsUpdateAuthenticationAccessToken,
+ AccessToken: accessToken,
}
}
}
diff --git a/internal/provider/source_linkedinpages_data_source.go b/internal/provider/source_linkedinpages_data_source.go
old mode 100755
new mode 100644
index 4086a1ea3..60b88d1b1
--- a/internal/provider/source_linkedinpages_data_source.go
+++ b/internal/provider/source_linkedinpages_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceLinkedinPagesDataSource struct {
// SourceLinkedinPagesDataSourceModel describes the data model.
type SourceLinkedinPagesDataSourceModel struct {
- Configuration SourceLinkedinPages `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,130 +47,20 @@ func (r *SourceLinkedinPagesDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "SourceLinkedinPages DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_linkedin_pages_authentication_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_linkedin_pages_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oAuth2.0",
- ),
- },
- Description: `must be one of ["oAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The client ID of the LinkedIn developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret of the LinkedIn developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.`,
- },
- },
- },
- "source_linkedin_pages_update_authentication_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_linkedin_pages_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oAuth2.0",
- ),
- },
- Description: `must be one of ["oAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The client ID of the LinkedIn developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret of the LinkedIn developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "org_id": schema.StringAttribute{
- Computed: true,
- Description: `Specify the Organization ID`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "linkedin-pages",
- ),
- },
- Description: `must be one of ["linkedin-pages"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_linkedinpages_data_source_sdk.go b/internal/provider/source_linkedinpages_data_source_sdk.go
old mode 100755
new mode 100644
index e94180f02..e946c3abc
--- a/internal/provider/source_linkedinpages_data_source_sdk.go
+++ b/internal/provider/source_linkedinpages_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLinkedinPagesDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_linkedinpages_resource.go b/internal/provider/source_linkedinpages_resource.go
old mode 100755
new mode 100644
index c36890622..6822dab06
--- a/internal/provider/source_linkedinpages_resource.go
+++ b/internal/provider/source_linkedinpages_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceLinkedinPagesResource struct {
// SourceLinkedinPagesResourceModel describes the resource data model.
type SourceLinkedinPagesResourceModel struct {
Configuration SourceLinkedinPages `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,80 +59,19 @@ func (r *SourceLinkedinPagesResource) Schema(ctx context.Context, req resource.S
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_linkedin_pages_authentication_access_token": schema.SingleNestedAttribute{
+ "access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.`,
},
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
},
},
- "source_linkedin_pages_authentication_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oAuth2.0",
- ),
- },
- Description: `must be one of ["oAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The client ID of the LinkedIn developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The client secret of the LinkedIn developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.`,
- },
- },
- },
- "source_linkedin_pages_update_authentication_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.`,
- },
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_linkedin_pages_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oAuth2.0",
- ),
- },
- Description: `must be one of ["oAuth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The client ID of the LinkedIn developer application.`,
@@ -142,6 +82,7 @@ func (r *SourceLinkedinPagesResource) Schema(ctx context.Context, req resource.S
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.`,
},
},
@@ -155,24 +96,26 @@ func (r *SourceLinkedinPagesResource) Schema(ctx context.Context, req resource.S
Required: true,
Description: `Specify the Organization ID`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "linkedin-pages",
- ),
- },
- Description: `must be one of ["linkedin-pages"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -236,7 +179,7 @@ func (r *SourceLinkedinPagesResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceLinkedinPages(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -412,5 +355,5 @@ func (r *SourceLinkedinPagesResource) Delete(ctx context.Context, req resource.D
}
func (r *SourceLinkedinPagesResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_linkedinpages_resource_sdk.go b/internal/provider/source_linkedinpages_resource_sdk.go
old mode 100755
new mode 100644
index 270cfeaeb..072e3e2d6
--- a/internal/provider/source_linkedinpages_resource_sdk.go
+++ b/internal/provider/source_linkedinpages_resource_sdk.go
@@ -3,62 +3,52 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLinkedinPagesResourceModel) ToCreateSDKType() *shared.SourceLinkedinPagesCreateRequest {
var credentials *shared.SourceLinkedinPagesAuthentication
if r.Configuration.Credentials != nil {
- var sourceLinkedinPagesAuthenticationOAuth20 *shared.SourceLinkedinPagesAuthenticationOAuth20
- if r.Configuration.Credentials.SourceLinkedinPagesAuthenticationOAuth20 != nil {
- authMethod := new(shared.SourceLinkedinPagesAuthenticationOAuth20AuthMethod)
- if !r.Configuration.Credentials.SourceLinkedinPagesAuthenticationOAuth20.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceLinkedinPagesAuthenticationOAuth20.AuthMethod.IsNull() {
- *authMethod = shared.SourceLinkedinPagesAuthenticationOAuth20AuthMethod(r.Configuration.Credentials.SourceLinkedinPagesAuthenticationOAuth20.AuthMethod.ValueString())
- } else {
- authMethod = nil
- }
- clientID := r.Configuration.Credentials.SourceLinkedinPagesAuthenticationOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceLinkedinPagesAuthenticationOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceLinkedinPagesAuthenticationOAuth20.RefreshToken.ValueString()
- sourceLinkedinPagesAuthenticationOAuth20 = &shared.SourceLinkedinPagesAuthenticationOAuth20{
- AuthMethod: authMethod,
+ var sourceLinkedinPagesOAuth20 *shared.SourceLinkedinPagesOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ sourceLinkedinPagesOAuth20 = &shared.SourceLinkedinPagesOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceLinkedinPagesAuthenticationOAuth20 != nil {
+ if sourceLinkedinPagesOAuth20 != nil {
credentials = &shared.SourceLinkedinPagesAuthentication{
- SourceLinkedinPagesAuthenticationOAuth20: sourceLinkedinPagesAuthenticationOAuth20,
+ SourceLinkedinPagesOAuth20: sourceLinkedinPagesOAuth20,
}
}
- var sourceLinkedinPagesAuthenticationAccessToken *shared.SourceLinkedinPagesAuthenticationAccessToken
- if r.Configuration.Credentials.SourceLinkedinPagesAuthenticationAccessToken != nil {
- accessToken := r.Configuration.Credentials.SourceLinkedinPagesAuthenticationAccessToken.AccessToken.ValueString()
- authMethod1 := new(shared.SourceLinkedinPagesAuthenticationAccessTokenAuthMethod)
- if !r.Configuration.Credentials.SourceLinkedinPagesAuthenticationAccessToken.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceLinkedinPagesAuthenticationAccessToken.AuthMethod.IsNull() {
- *authMethod1 = shared.SourceLinkedinPagesAuthenticationAccessTokenAuthMethod(r.Configuration.Credentials.SourceLinkedinPagesAuthenticationAccessToken.AuthMethod.ValueString())
- } else {
- authMethod1 = nil
- }
- sourceLinkedinPagesAuthenticationAccessToken = &shared.SourceLinkedinPagesAuthenticationAccessToken{
+ var sourceLinkedinPagesAccessToken *shared.SourceLinkedinPagesAccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ accessToken := r.Configuration.Credentials.AccessToken.AccessToken.ValueString()
+ sourceLinkedinPagesAccessToken = &shared.SourceLinkedinPagesAccessToken{
AccessToken: accessToken,
- AuthMethod: authMethod1,
}
}
- if sourceLinkedinPagesAuthenticationAccessToken != nil {
+ if sourceLinkedinPagesAccessToken != nil {
credentials = &shared.SourceLinkedinPagesAuthentication{
- SourceLinkedinPagesAuthenticationAccessToken: sourceLinkedinPagesAuthenticationAccessToken,
+ SourceLinkedinPagesAccessToken: sourceLinkedinPagesAccessToken,
}
}
}
orgID := r.Configuration.OrgID.ValueString()
- sourceType := shared.SourceLinkedinPagesLinkedinPages(r.Configuration.SourceType.ValueString())
configuration := shared.SourceLinkedinPages{
Credentials: credentials,
OrgID: orgID,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -70,6 +60,7 @@ func (r *SourceLinkedinPagesResourceModel) ToCreateSDKType() *shared.SourceLinke
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceLinkedinPagesCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -85,46 +76,32 @@ func (r *SourceLinkedinPagesResourceModel) ToGetSDKType() *shared.SourceLinkedin
func (r *SourceLinkedinPagesResourceModel) ToUpdateSDKType() *shared.SourceLinkedinPagesPutRequest {
var credentials *shared.SourceLinkedinPagesUpdateAuthentication
if r.Configuration.Credentials != nil {
- var sourceLinkedinPagesUpdateAuthenticationOAuth20 *shared.SourceLinkedinPagesUpdateAuthenticationOAuth20
- if r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationOAuth20 != nil {
- authMethod := new(shared.SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod)
- if !r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationOAuth20.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationOAuth20.AuthMethod.IsNull() {
- *authMethod = shared.SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod(r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationOAuth20.AuthMethod.ValueString())
- } else {
- authMethod = nil
- }
- clientID := r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationOAuth20.RefreshToken.ValueString()
- sourceLinkedinPagesUpdateAuthenticationOAuth20 = &shared.SourceLinkedinPagesUpdateAuthenticationOAuth20{
- AuthMethod: authMethod,
+ var sourceLinkedinPagesUpdateOAuth20 *shared.SourceLinkedinPagesUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ sourceLinkedinPagesUpdateOAuth20 = &shared.SourceLinkedinPagesUpdateOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceLinkedinPagesUpdateAuthenticationOAuth20 != nil {
+ if sourceLinkedinPagesUpdateOAuth20 != nil {
credentials = &shared.SourceLinkedinPagesUpdateAuthentication{
- SourceLinkedinPagesUpdateAuthenticationOAuth20: sourceLinkedinPagesUpdateAuthenticationOAuth20,
+ SourceLinkedinPagesUpdateOAuth20: sourceLinkedinPagesUpdateOAuth20,
}
}
- var sourceLinkedinPagesUpdateAuthenticationAccessToken *shared.SourceLinkedinPagesUpdateAuthenticationAccessToken
- if r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationAccessToken != nil {
- accessToken := r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationAccessToken.AccessToken.ValueString()
- authMethod1 := new(shared.SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod)
- if !r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationAccessToken.AuthMethod.IsUnknown() && !r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationAccessToken.AuthMethod.IsNull() {
- *authMethod1 = shared.SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod(r.Configuration.Credentials.SourceLinkedinPagesUpdateAuthenticationAccessToken.AuthMethod.ValueString())
- } else {
- authMethod1 = nil
- }
- sourceLinkedinPagesUpdateAuthenticationAccessToken = &shared.SourceLinkedinPagesUpdateAuthenticationAccessToken{
+ var sourceLinkedinPagesUpdateAccessToken *shared.SourceLinkedinPagesUpdateAccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ accessToken := r.Configuration.Credentials.AccessToken.AccessToken.ValueString()
+ sourceLinkedinPagesUpdateAccessToken = &shared.SourceLinkedinPagesUpdateAccessToken{
AccessToken: accessToken,
- AuthMethod: authMethod1,
}
}
- if sourceLinkedinPagesUpdateAuthenticationAccessToken != nil {
+ if sourceLinkedinPagesUpdateAccessToken != nil {
credentials = &shared.SourceLinkedinPagesUpdateAuthentication{
- SourceLinkedinPagesUpdateAuthenticationAccessToken: sourceLinkedinPagesUpdateAuthenticationAccessToken,
+ SourceLinkedinPagesUpdateAccessToken: sourceLinkedinPagesUpdateAccessToken,
}
}
}
diff --git a/internal/provider/source_linnworks_data_source.go b/internal/provider/source_linnworks_data_source.go
old mode 100755
new mode 100644
index 9e5c7e90c..3d2fd7ed2
--- a/internal/provider/source_linnworks_data_source.go
+++ b/internal/provider/source_linnworks_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceLinnworksDataSource struct {
// SourceLinnworksDataSourceModel describes the data model.
type SourceLinnworksDataSourceModel struct {
- Configuration SourceLinnworks `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,48 +47,20 @@ func (r *SourceLinnworksDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceLinnworks DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "application_id": schema.StringAttribute{
- Computed: true,
- Description: `Linnworks Application ID`,
- },
- "application_secret": schema.StringAttribute{
- Computed: true,
- Description: `Linnworks Application Secret`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "linnworks",
- ),
- },
- Description: `must be one of ["linnworks"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_linnworks_data_source_sdk.go b/internal/provider/source_linnworks_data_source_sdk.go
old mode 100755
new mode 100644
index dde5945cc..9653ed835
--- a/internal/provider/source_linnworks_data_source_sdk.go
+++ b/internal/provider/source_linnworks_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLinnworksDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_linnworks_resource.go b/internal/provider/source_linnworks_resource.go
old mode 100755
new mode 100644
index 7641f5322..ae5e03c74
--- a/internal/provider/source_linnworks_resource.go
+++ b/internal/provider/source_linnworks_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceLinnworksResource struct {
// SourceLinnworksResourceModel describes the resource data model.
type SourceLinnworksResourceModel struct {
Configuration SourceLinnworks `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -63,34 +64,37 @@ func (r *SourceLinnworksResource) Schema(ctx context.Context, req resource.Schem
Required: true,
Description: `Linnworks Application Secret`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "linnworks",
- ),
- },
- Description: `must be one of ["linnworks"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
"token": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Sensitive: true,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -154,7 +158,7 @@ func (r *SourceLinnworksResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceLinnworks(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -330,5 +334,5 @@ func (r *SourceLinnworksResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceLinnworksResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_linnworks_resource_sdk.go b/internal/provider/source_linnworks_resource_sdk.go
old mode 100755
new mode 100644
index e252ce722..ad6308f57
--- a/internal/provider/source_linnworks_resource_sdk.go
+++ b/internal/provider/source_linnworks_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -11,16 +11,20 @@ import (
func (r *SourceLinnworksResourceModel) ToCreateSDKType() *shared.SourceLinnworksCreateRequest {
applicationID := r.Configuration.ApplicationID.ValueString()
applicationSecret := r.Configuration.ApplicationSecret.ValueString()
- sourceType := shared.SourceLinnworksLinnworks(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
token := r.Configuration.Token.ValueString()
configuration := shared.SourceLinnworks{
ApplicationID: applicationID,
ApplicationSecret: applicationSecret,
- SourceType: sourceType,
StartDate: startDate,
Token: token,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -31,6 +35,7 @@ func (r *SourceLinnworksResourceModel) ToCreateSDKType() *shared.SourceLinnworks
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceLinnworksCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_lokalise_data_source.go b/internal/provider/source_lokalise_data_source.go
old mode 100755
new mode 100644
index fe1e24c68..89a2fa64f
--- a/internal/provider/source_lokalise_data_source.go
+++ b/internal/provider/source_lokalise_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceLokaliseDataSource struct {
// SourceLokaliseDataSourceModel describes the data model.
type SourceLokaliseDataSourceModel struct {
- Configuration SourceLokalise `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceLokaliseDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceLokalise DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Lokalise API Key with read-access. Available at Profile settings > API tokens. See here.`,
- },
- "project_id": schema.StringAttribute{
- Computed: true,
- Description: `Lokalise project ID. Available at Project Settings > General.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "lokalise",
- ),
- },
- Description: `must be one of ["lokalise"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_lokalise_data_source_sdk.go b/internal/provider/source_lokalise_data_source_sdk.go
old mode 100755
new mode 100644
index 99ba39fcb..db48928f3
--- a/internal/provider/source_lokalise_data_source_sdk.go
+++ b/internal/provider/source_lokalise_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLokaliseDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_lokalise_resource.go b/internal/provider/source_lokalise_resource.go
old mode 100755
new mode 100644
index fc57f6c61..b900d9587
--- a/internal/provider/source_lokalise_resource.go
+++ b/internal/provider/source_lokalise_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceLokaliseResource struct {
// SourceLokaliseResourceModel describes the resource data model.
type SourceLokaliseResourceModel struct {
Configuration SourceLokalise `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,33 @@ func (r *SourceLokaliseResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Lokalise API Key with read-access. Available at Profile settings > API tokens. See here.`,
},
"project_id": schema.StringAttribute{
Required: true,
Description: `Lokalise project ID. Available at Project Settings > General.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "lokalise",
- ),
- },
- Description: `must be one of ["lokalise"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceLokaliseResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceLokalise(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceLokaliseResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceLokaliseResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_lokalise_resource_sdk.go b/internal/provider/source_lokalise_resource_sdk.go
old mode 100755
new mode 100644
index 188d44d4b..ae25478fe
--- a/internal/provider/source_lokalise_resource_sdk.go
+++ b/internal/provider/source_lokalise_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceLokaliseResourceModel) ToCreateSDKType() *shared.SourceLokaliseCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
projectID := r.Configuration.ProjectID.ValueString()
- sourceType := shared.SourceLokaliseLokalise(r.Configuration.SourceType.ValueString())
configuration := shared.SourceLokalise{
- APIKey: apiKey,
- ProjectID: projectID,
- SourceType: sourceType,
+ APIKey: apiKey,
+ ProjectID: projectID,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceLokaliseResourceModel) ToCreateSDKType() *shared.SourceLokaliseCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceLokaliseCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_mailchimp_data_source.go b/internal/provider/source_mailchimp_data_source.go
old mode 100755
new mode 100644
index f82f09404..7553b5fb3
--- a/internal/provider/source_mailchimp_data_source.go
+++ b/internal/provider/source_mailchimp_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceMailchimpDataSource struct {
// SourceMailchimpDataSourceModel describes the data model.
type SourceMailchimpDataSourceModel struct {
- Configuration SourceMailchimp `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,129 +47,20 @@ func (r *SourceMailchimpDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceMailchimp DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "campaign_id": schema.StringAttribute{
- Computed: true,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_mailchimp_authentication_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "apikey": schema.StringAttribute{
- Computed: true,
- Description: `Mailchimp API Key. See the docs for information on how to generate this key.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apikey",
- ),
- },
- Description: `must be one of ["apikey"]`,
- },
- },
- },
- "source_mailchimp_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `An access token generated using the above client ID and secret.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- },
- },
- "source_mailchimp_update_authentication_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "apikey": schema.StringAttribute{
- Computed: true,
- Description: `Mailchimp API Key. See the docs for information on how to generate this key.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apikey",
- ),
- },
- Description: `must be one of ["apikey"]`,
- },
- },
- },
- "source_mailchimp_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `An access token generated using the above client ID and secret.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mailchimp",
- ),
- },
- Description: `must be one of ["mailchimp"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_mailchimp_data_source_sdk.go b/internal/provider/source_mailchimp_data_source_sdk.go
old mode 100755
new mode 100644
index cc7786242..da4337eda
--- a/internal/provider/source_mailchimp_data_source_sdk.go
+++ b/internal/provider/source_mailchimp_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMailchimpDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_mailchimp_resource.go b/internal/provider/source_mailchimp_resource.go
old mode 100755
new mode 100644
index f85f0cc82..b12161a3d
--- a/internal/provider/source_mailchimp_resource.go
+++ b/internal/provider/source_mailchimp_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceMailchimpResource struct {
// SourceMailchimpResourceModel describes the resource data model.
type SourceMailchimpResourceModel struct {
Configuration SourceMailchimp `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -61,84 +62,24 @@ func (r *SourceMailchimpResource) Schema(ctx context.Context, req resource.Schem
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_mailchimp_authentication_api_key": schema.SingleNestedAttribute{
+ "api_key": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"apikey": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Mailchimp API Key. See the docs for information on how to generate this key.`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apikey",
- ),
- },
- Description: `must be one of ["apikey"]`,
- },
},
},
- "source_mailchimp_authentication_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `An access token generated using the above client ID and secret.`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Optional: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Optional: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- },
- },
- "source_mailchimp_update_authentication_api_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "apikey": schema.StringAttribute{
- Required: true,
- Description: `Mailchimp API Key. See the docs for information on how to generate this key.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apikey",
- ),
- },
- Description: `must be one of ["apikey"]`,
- },
- },
- },
- "source_mailchimp_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `An access token generated using the above client ID and secret.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Optional: true,
Description: `The Client ID of your OAuth application.`,
@@ -154,24 +95,26 @@ func (r *SourceMailchimpResource) Schema(ctx context.Context, req resource.Schem
validators.ExactlyOneChild(),
},
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mailchimp",
- ),
- },
- Description: `must be one of ["mailchimp"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -235,7 +178,7 @@ func (r *SourceMailchimpResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMailchimp(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -411,5 +354,5 @@ func (r *SourceMailchimpResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceMailchimpResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_mailchimp_resource_sdk.go b/internal/provider/source_mailchimp_resource_sdk.go
old mode 100755
new mode 100644
index 01eac8861..3db2f7482
--- a/internal/provider/source_mailchimp_resource_sdk.go
+++ b/internal/provider/source_mailchimp_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -16,54 +16,54 @@ func (r *SourceMailchimpResourceModel) ToCreateSDKType() *shared.SourceMailchimp
}
var credentials *shared.SourceMailchimpAuthentication
if r.Configuration.Credentials != nil {
- var sourceMailchimpAuthenticationOAuth20 *shared.SourceMailchimpAuthenticationOAuth20
- if r.Configuration.Credentials.SourceMailchimpAuthenticationOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceMailchimpAuthenticationOAuth20.AccessToken.ValueString()
- authType := shared.SourceMailchimpAuthenticationOAuth20AuthType(r.Configuration.Credentials.SourceMailchimpAuthenticationOAuth20.AuthType.ValueString())
+ var sourceMailchimpOAuth20 *shared.SourceMailchimpOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
clientID := new(string)
- if !r.Configuration.Credentials.SourceMailchimpAuthenticationOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceMailchimpAuthenticationOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceMailchimpAuthenticationOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceMailchimpAuthenticationOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceMailchimpAuthenticationOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceMailchimpAuthenticationOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- sourceMailchimpAuthenticationOAuth20 = &shared.SourceMailchimpAuthenticationOAuth20{
+ sourceMailchimpOAuth20 = &shared.SourceMailchimpOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
}
}
- if sourceMailchimpAuthenticationOAuth20 != nil {
+ if sourceMailchimpOAuth20 != nil {
credentials = &shared.SourceMailchimpAuthentication{
- SourceMailchimpAuthenticationOAuth20: sourceMailchimpAuthenticationOAuth20,
+ SourceMailchimpOAuth20: sourceMailchimpOAuth20,
}
}
- var sourceMailchimpAuthenticationAPIKey *shared.SourceMailchimpAuthenticationAPIKey
- if r.Configuration.Credentials.SourceMailchimpAuthenticationAPIKey != nil {
- apikey := r.Configuration.Credentials.SourceMailchimpAuthenticationAPIKey.Apikey.ValueString()
- authType1 := shared.SourceMailchimpAuthenticationAPIKeyAuthType(r.Configuration.Credentials.SourceMailchimpAuthenticationAPIKey.AuthType.ValueString())
- sourceMailchimpAuthenticationAPIKey = &shared.SourceMailchimpAuthenticationAPIKey{
- Apikey: apikey,
- AuthType: authType1,
+ var sourceMailchimpAPIKey *shared.SourceMailchimpAPIKey
+ if r.Configuration.Credentials.APIKey != nil {
+ apikey := r.Configuration.Credentials.APIKey.Apikey.ValueString()
+ sourceMailchimpAPIKey = &shared.SourceMailchimpAPIKey{
+ Apikey: apikey,
}
}
- if sourceMailchimpAuthenticationAPIKey != nil {
+ if sourceMailchimpAPIKey != nil {
credentials = &shared.SourceMailchimpAuthentication{
- SourceMailchimpAuthenticationAPIKey: sourceMailchimpAuthenticationAPIKey,
+ SourceMailchimpAPIKey: sourceMailchimpAPIKey,
}
}
}
- sourceType := shared.SourceMailchimpMailchimp(r.Configuration.SourceType.ValueString())
configuration := shared.SourceMailchimp{
CampaignID: campaignID,
Credentials: credentials,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -75,6 +75,7 @@ func (r *SourceMailchimpResourceModel) ToCreateSDKType() *shared.SourceMailchimp
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMailchimpCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -96,46 +97,42 @@ func (r *SourceMailchimpResourceModel) ToUpdateSDKType() *shared.SourceMailchimp
}
var credentials *shared.SourceMailchimpUpdateAuthentication
if r.Configuration.Credentials != nil {
- var sourceMailchimpUpdateAuthenticationOAuth20 *shared.SourceMailchimpUpdateAuthenticationOAuth20
- if r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationOAuth20.AccessToken.ValueString()
- authType := shared.SourceMailchimpUpdateAuthenticationOAuth20AuthType(r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationOAuth20.AuthType.ValueString())
+ var sourceMailchimpUpdateOAuth20 *shared.SourceMailchimpUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
clientID := new(string)
- if !r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- sourceMailchimpUpdateAuthenticationOAuth20 = &shared.SourceMailchimpUpdateAuthenticationOAuth20{
+ sourceMailchimpUpdateOAuth20 = &shared.SourceMailchimpUpdateOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
}
}
- if sourceMailchimpUpdateAuthenticationOAuth20 != nil {
+ if sourceMailchimpUpdateOAuth20 != nil {
credentials = &shared.SourceMailchimpUpdateAuthentication{
- SourceMailchimpUpdateAuthenticationOAuth20: sourceMailchimpUpdateAuthenticationOAuth20,
+ SourceMailchimpUpdateOAuth20: sourceMailchimpUpdateOAuth20,
}
}
- var sourceMailchimpUpdateAuthenticationAPIKey *shared.SourceMailchimpUpdateAuthenticationAPIKey
- if r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationAPIKey != nil {
- apikey := r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationAPIKey.Apikey.ValueString()
- authType1 := shared.SourceMailchimpUpdateAuthenticationAPIKeyAuthType(r.Configuration.Credentials.SourceMailchimpUpdateAuthenticationAPIKey.AuthType.ValueString())
- sourceMailchimpUpdateAuthenticationAPIKey = &shared.SourceMailchimpUpdateAuthenticationAPIKey{
- Apikey: apikey,
- AuthType: authType1,
+ var apiKey *shared.APIKey
+ if r.Configuration.Credentials.APIKey != nil {
+ apikey := r.Configuration.Credentials.APIKey.Apikey.ValueString()
+ apiKey = &shared.APIKey{
+ Apikey: apikey,
}
}
- if sourceMailchimpUpdateAuthenticationAPIKey != nil {
+ if apiKey != nil {
credentials = &shared.SourceMailchimpUpdateAuthentication{
- SourceMailchimpUpdateAuthenticationAPIKey: sourceMailchimpUpdateAuthenticationAPIKey,
+ APIKey: apiKey,
}
}
}
diff --git a/internal/provider/source_mailgun_data_source.go b/internal/provider/source_mailgun_data_source.go
old mode 100755
new mode 100644
index 7e8bf062a..75fdc8686
--- a/internal/provider/source_mailgun_data_source.go
+++ b/internal/provider/source_mailgun_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceMailgunDataSource struct {
// SourceMailgunDataSourceModel describes the data model.
type SourceMailgunDataSourceModel struct {
- Configuration SourceMailgun `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,45 +47,20 @@ func (r *SourceMailgunDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceMailgun DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "domain_region": schema.StringAttribute{
- Computed: true,
- Description: `Domain region code. 'EU' or 'US' are possible values. The default is 'US'.`,
- },
- "private_key": schema.StringAttribute{
- Computed: true,
- Description: `Primary account API key to access your Mailgun data.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mailgun",
- ),
- },
- Description: `must be one of ["mailgun"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2020-10-01 00:00:00. Any data before this date will not be replicated. If omitted, defaults to 3 days ago.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_mailgun_data_source_sdk.go b/internal/provider/source_mailgun_data_source_sdk.go
old mode 100755
new mode 100644
index feb6e9113..83e2dcb9e
--- a/internal/provider/source_mailgun_data_source_sdk.go
+++ b/internal/provider/source_mailgun_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMailgunDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_mailgun_resource.go b/internal/provider/source_mailgun_resource.go
old mode 100755
new mode 100644
index a076803d7..232f0fd59
--- a/internal/provider/source_mailgun_resource.go
+++ b/internal/provider/source_mailgun_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceMailgunResource struct {
// SourceMailgunResourceModel describes the resource data model.
type SourceMailgunResourceModel struct {
Configuration SourceMailgun `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,38 +57,42 @@ func (r *SourceMailgunResource) Schema(ctx context.Context, req resource.SchemaR
Required: true,
Attributes: map[string]schema.Attribute{
"domain_region": schema.StringAttribute{
- Optional: true,
- Description: `Domain region code. 'EU' or 'US' are possible values. The default is 'US'.`,
+ Optional: true,
+ MarkdownDescription: `Default: "US"` + "\n" +
+ `Domain region code. 'EU' or 'US' are possible values. The default is 'US'.`,
},
"private_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Primary account API key to access your Mailgun data.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mailgun",
- ),
- },
- Description: `must be one of ["mailgun"]`,
- },
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `UTC date and time in the format 2020-10-01 00:00:00. Any data before this date will not be replicated. If omitted, defaults to 3 days ago.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2020-10-01 00:00:00. Any data before this date will not be replicated. If omitted, defaults to 3 days ago.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -151,7 +156,7 @@ func (r *SourceMailgunResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMailgun(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -327,5 +332,5 @@ func (r *SourceMailgunResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceMailgunResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_mailgun_resource_sdk.go b/internal/provider/source_mailgun_resource_sdk.go
old mode 100755
new mode 100644
index 1896a9f7d..6e7e2254a
--- a/internal/provider/source_mailgun_resource_sdk.go
+++ b/internal/provider/source_mailgun_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -16,7 +16,6 @@ func (r *SourceMailgunResourceModel) ToCreateSDKType() *shared.SourceMailgunCrea
domainRegion = nil
}
privateKey := r.Configuration.PrivateKey.ValueString()
- sourceType := shared.SourceMailgunMailgun(r.Configuration.SourceType.ValueString())
startDate := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -26,9 +25,14 @@ func (r *SourceMailgunResourceModel) ToCreateSDKType() *shared.SourceMailgunCrea
configuration := shared.SourceMailgun{
DomainRegion: domainRegion,
PrivateKey: privateKey,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -39,6 +43,7 @@ func (r *SourceMailgunResourceModel) ToCreateSDKType() *shared.SourceMailgunCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMailgunCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_mailjetsms_data_source.go b/internal/provider/source_mailjetsms_data_source.go
old mode 100755
new mode 100644
index 05a857404..ade8aedf1
--- a/internal/provider/source_mailjetsms_data_source.go
+++ b/internal/provider/source_mailjetsms_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceMailjetSmsDataSource struct {
// SourceMailjetSmsDataSourceModel describes the data model.
type SourceMailjetSmsDataSourceModel struct {
- Configuration SourceMailjetSms `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourceMailjetSmsDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceMailjetSms DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "end_date": schema.Int64Attribute{
- Computed: true,
- Description: `Retrieve SMS messages created before the specified timestamp. Required format - Unix timestamp.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mailjet-sms",
- ),
- },
- Description: `must be one of ["mailjet-sms"]`,
- },
- "start_date": schema.Int64Attribute{
- Computed: true,
- Description: `Retrieve SMS messages created after the specified timestamp. Required format - Unix timestamp.`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `Your access token. See here.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_mailjetsms_data_source_sdk.go b/internal/provider/source_mailjetsms_data_source_sdk.go
old mode 100755
new mode 100644
index c21a5c880..ed66bc585
--- a/internal/provider/source_mailjetsms_data_source_sdk.go
+++ b/internal/provider/source_mailjetsms_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMailjetSmsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_mailjetsms_resource.go b/internal/provider/source_mailjetsms_resource.go
old mode 100755
new mode 100644
index a5146aaf5..e60316062
--- a/internal/provider/source_mailjetsms_resource.go
+++ b/internal/provider/source_mailjetsms_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceMailjetSmsResource struct {
// SourceMailjetSmsResourceModel describes the resource data model.
type SourceMailjetSmsResourceModel struct {
Configuration SourceMailjetSms `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,32 +58,35 @@ func (r *SourceMailjetSmsResource) Schema(ctx context.Context, req resource.Sche
Optional: true,
Description: `Retrieve SMS messages created before the specified timestamp. Required format - Unix timestamp.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mailjet-sms",
- ),
- },
- Description: `must be one of ["mailjet-sms"]`,
- },
"start_date": schema.Int64Attribute{
Optional: true,
Description: `Retrieve SMS messages created after the specified timestamp. Required format - Unix timestamp.`,
},
"token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your access token. See here.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +150,7 @@ func (r *SourceMailjetSmsResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMailjetSms(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +326,5 @@ func (r *SourceMailjetSmsResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceMailjetSmsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_mailjetsms_resource_sdk.go b/internal/provider/source_mailjetsms_resource_sdk.go
old mode 100755
new mode 100644
index 6c96f394b..936500113
--- a/internal/provider/source_mailjetsms_resource_sdk.go
+++ b/internal/provider/source_mailjetsms_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -14,7 +14,6 @@ func (r *SourceMailjetSmsResourceModel) ToCreateSDKType() *shared.SourceMailjetS
} else {
endDate = nil
}
- sourceType := shared.SourceMailjetSmsMailjetSms(r.Configuration.SourceType.ValueString())
startDate := new(int64)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate = r.Configuration.StartDate.ValueInt64()
@@ -23,10 +22,15 @@ func (r *SourceMailjetSmsResourceModel) ToCreateSDKType() *shared.SourceMailjetS
}
token := r.Configuration.Token.ValueString()
configuration := shared.SourceMailjetSms{
- EndDate: endDate,
- SourceType: sourceType,
- StartDate: startDate,
- Token: token,
+ EndDate: endDate,
+ StartDate: startDate,
+ Token: token,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -38,6 +42,7 @@ func (r *SourceMailjetSmsResourceModel) ToCreateSDKType() *shared.SourceMailjetS
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMailjetSmsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_marketo_data_source.go b/internal/provider/source_marketo_data_source.go
old mode 100755
new mode 100644
index 7fb1f386c..412bf1cd8
--- a/internal/provider/source_marketo_data_source.go
+++ b/internal/provider/source_marketo_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceMarketoDataSource struct {
// SourceMarketoDataSourceModel describes the data model.
type SourceMarketoDataSourceModel struct {
- Configuration SourceMarketo `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,49 +47,20 @@ func (r *SourceMarketoDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceMarketo DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Marketo developer application. See the docs for info on how to obtain this.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Marketo developer application. See the docs for info on how to obtain this.`,
- },
- "domain_url": schema.StringAttribute{
- Computed: true,
- Description: `Your Marketo Base URL. See the docs for info on how to obtain this.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "marketo",
- ),
- },
- Description: `must be one of ["marketo"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_marketo_data_source_sdk.go b/internal/provider/source_marketo_data_source_sdk.go
old mode 100755
new mode 100644
index f51c96bb9..908976346
--- a/internal/provider/source_marketo_data_source_sdk.go
+++ b/internal/provider/source_marketo_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMarketoDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_marketo_resource.go b/internal/provider/source_marketo_resource.go
old mode 100755
new mode 100644
index 366fd5332..3b05fe74d
--- a/internal/provider/source_marketo_resource.go
+++ b/internal/provider/source_marketo_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceMarketoResource struct {
// SourceMarketoResourceModel describes the resource data model.
type SourceMarketoResourceModel struct {
Configuration SourceMarketo `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -67,31 +68,33 @@ func (r *SourceMarketoResource) Schema(ctx context.Context, req resource.SchemaR
Required: true,
Description: `Your Marketo Base URL. See the docs for info on how to obtain this.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "marketo",
- ),
- },
- Description: `must be one of ["marketo"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +158,7 @@ func (r *SourceMarketoResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMarketo(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +334,5 @@ func (r *SourceMarketoResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceMarketoResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_marketo_resource_sdk.go b/internal/provider/source_marketo_resource_sdk.go
old mode 100755
new mode 100644
index 38bd15d4f..303c2a98a
--- a/internal/provider/source_marketo_resource_sdk.go
+++ b/internal/provider/source_marketo_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -12,15 +12,19 @@ func (r *SourceMarketoResourceModel) ToCreateSDKType() *shared.SourceMarketoCrea
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
domainURL := r.Configuration.DomainURL.ValueString()
- sourceType := shared.SourceMarketoMarketo(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceMarketo{
ClientID: clientID,
ClientSecret: clientSecret,
DomainURL: domainURL,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -31,6 +35,7 @@ func (r *SourceMarketoResourceModel) ToCreateSDKType() *shared.SourceMarketoCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMarketoCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_metabase_data_source.go b/internal/provider/source_metabase_data_source.go
old mode 100755
new mode 100644
index e4d597e12..ac6bfa2a1
--- a/internal/provider/source_metabase_data_source.go
+++ b/internal/provider/source_metabase_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceMetabaseDataSource struct {
// SourceMetabaseDataSourceModel describes the data model.
type SourceMetabaseDataSourceModel struct {
- Configuration SourceMetabase `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,49 +47,20 @@ func (r *SourceMetabaseDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceMetabase DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "instance_api_url": schema.StringAttribute{
- Computed: true,
- Description: `URL to your metabase instance API`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- },
- "session_token": schema.StringAttribute{
- Computed: true,
- MarkdownDescription: `To generate your session token, you need to run the following command: ` + "```" + ` curl -X POST \` + "\n" +
- ` -H "Content-Type: application/json" \` + "\n" +
- ` -d '{"username": "person@metabase.com", "password": "fakepassword"}' \` + "\n" +
- ` http://localhost:3000/api/session` + "\n" +
- `` + "```" + ` Then copy the value of the ` + "`" + `id` + "`" + ` field returned by a successful call to that API.` + "\n" +
- `Note that by default, sessions are good for 14 days and needs to be regenerated.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "metabase",
- ),
- },
- Description: `must be one of ["metabase"]`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_metabase_data_source_sdk.go b/internal/provider/source_metabase_data_source_sdk.go
old mode 100755
new mode 100644
index ba6d2ee15..8c47f5202
--- a/internal/provider/source_metabase_data_source_sdk.go
+++ b/internal/provider/source_metabase_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMetabaseDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_metabase_resource.go b/internal/provider/source_metabase_resource.go
old mode 100755
new mode 100644
index 1cfb98f48..15b289771
--- a/internal/provider/source_metabase_resource.go
+++ b/internal/provider/source_metabase_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceMetabaseResource struct {
// SourceMetabaseResourceModel describes the resource data model.
type SourceMetabaseResourceModel struct {
Configuration SourceMetabase `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -59,10 +59,12 @@ func (r *SourceMetabaseResource) Schema(ctx context.Context, req resource.Schema
Description: `URL to your metabase instance API`,
},
"password": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Sensitive: true,
},
"session_token": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Sensitive: true,
MarkdownDescription: `To generate your session token, you need to run the following command: ` + "```" + ` curl -X POST \` + "\n" +
` -H "Content-Type: application/json" \` + "\n" +
` -d '{"username": "person@metabase.com", "password": "fakepassword"}' \` + "\n" +
@@ -70,27 +72,29 @@ func (r *SourceMetabaseResource) Schema(ctx context.Context, req resource.Schema
`` + "```" + ` Then copy the value of the ` + "`" + `id` + "`" + ` field returned by a successful call to that API.` + "\n" +
`Note that by default, sessions are good for 14 days and needs to be regenerated.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "metabase",
- ),
- },
- Description: `must be one of ["metabase"]`,
- },
"username": schema.StringAttribute{
Optional: true,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -154,7 +158,7 @@ func (r *SourceMetabaseResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMetabase(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -330,5 +334,5 @@ func (r *SourceMetabaseResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceMetabaseResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_metabase_resource_sdk.go b/internal/provider/source_metabase_resource_sdk.go
old mode 100755
new mode 100644
index 7956843e0..11628c6fc
--- a/internal/provider/source_metabase_resource_sdk.go
+++ b/internal/provider/source_metabase_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -21,7 +21,6 @@ func (r *SourceMetabaseResourceModel) ToCreateSDKType() *shared.SourceMetabaseCr
} else {
sessionToken = nil
}
- sourceType := shared.SourceMetabaseMetabase(r.Configuration.SourceType.ValueString())
username := new(string)
if !r.Configuration.Username.IsUnknown() && !r.Configuration.Username.IsNull() {
*username = r.Configuration.Username.ValueString()
@@ -32,9 +31,14 @@ func (r *SourceMetabaseResourceModel) ToCreateSDKType() *shared.SourceMetabaseCr
InstanceAPIURL: instanceAPIURL,
Password: password,
SessionToken: sessionToken,
- SourceType: sourceType,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -45,6 +49,7 @@ func (r *SourceMetabaseResourceModel) ToCreateSDKType() *shared.SourceMetabaseCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMetabaseCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_microsoftteams_data_source.go b/internal/provider/source_microsoftteams_data_source.go
old mode 100755
new mode 100644
index 1b0ba90c1..d009d22b2
--- a/internal/provider/source_microsoftteams_data_source.go
+++ b/internal/provider/source_microsoftteams_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceMicrosoftTeamsDataSource struct {
// SourceMicrosoftTeamsDataSourceModel describes the data model.
type SourceMicrosoftTeamsDataSourceModel struct {
- Configuration SourceMicrosoftTeams `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,159 +47,20 @@ func (r *SourceMicrosoftTeamsDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "SourceMicrosoftTeams DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Microsoft Teams developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Microsoft Teams developer application.`,
- },
- "tenant_id": schema.StringAttribute{
- Computed: true,
- Description: `A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL`,
- },
- },
- Description: `Choose how to authenticate to Microsoft`,
- },
- "source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Microsoft Teams developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Microsoft Teams developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `A Refresh Token to renew the expired Access Token.`,
- },
- "tenant_id": schema.StringAttribute{
- Computed: true,
- Description: `A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL`,
- },
- },
- Description: `Choose how to authenticate to Microsoft`,
- },
- "source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Microsoft Teams developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Microsoft Teams developer application.`,
- },
- "tenant_id": schema.StringAttribute{
- Computed: true,
- Description: `A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL`,
- },
- },
- Description: `Choose how to authenticate to Microsoft`,
- },
- "source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Microsoft Teams developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Microsoft Teams developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `A Refresh Token to renew the expired Access Token.`,
- },
- "tenant_id": schema.StringAttribute{
- Computed: true,
- Description: `A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL`,
- },
- },
- Description: `Choose how to authenticate to Microsoft`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate to Microsoft`,
- },
- "period": schema.StringAttribute{
- Computed: true,
- Description: `Specifies the length of time over which the Team Device Report stream is aggregated. The supported values are: D7, D30, D90, and D180.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "microsoft-teams",
- ),
- },
- Description: `must be one of ["microsoft-teams"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_microsoftteams_data_source_sdk.go b/internal/provider/source_microsoftteams_data_source_sdk.go
old mode 100755
new mode 100644
index 202645a30..f5bfce54d
--- a/internal/provider/source_microsoftteams_data_source_sdk.go
+++ b/internal/provider/source_microsoftteams_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMicrosoftTeamsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_microsoftteams_resource.go b/internal/provider/source_microsoftteams_resource.go
old mode 100755
new mode 100644
index a10145d77..af0c22920
--- a/internal/provider/source_microsoftteams_resource.go
+++ b/internal/provider/source_microsoftteams_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceMicrosoftTeamsResource struct {
// SourceMicrosoftTeamsResourceModel describes the resource data model.
type SourceMicrosoftTeamsResourceModel struct {
Configuration SourceMicrosoftTeams `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,18 +59,9 @@ func (r *SourceMicrosoftTeamsResource) Schema(ctx context.Context, req resource.
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft": schema.SingleNestedAttribute{
+ "authenticate_via_microsoft": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your Microsoft Teams developer application.`,
@@ -85,76 +77,9 @@ func (r *SourceMicrosoftTeamsResource) Schema(ctx context.Context, req resource.
},
Description: `Choose how to authenticate to Microsoft`,
},
- "source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0": schema.SingleNestedAttribute{
+ "authenticate_via_microsoft_o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your Microsoft Teams developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your Microsoft Teams developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `A Refresh Token to renew the expired Access Token.`,
- },
- "tenant_id": schema.StringAttribute{
- Required: true,
- Description: `A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL`,
- },
- },
- Description: `Choose how to authenticate to Microsoft`,
- },
- "source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your Microsoft Teams developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your Microsoft Teams developer application.`,
- },
- "tenant_id": schema.StringAttribute{
- Required: true,
- Description: `A globally unique identifier (GUID) that is different than your organization name or domain. Follow these steps to obtain: open one of the Teams where you belong inside the Teams Application -> Click on the … next to the Team title -> Click on Get link to team -> Copy the link to the team and grab the tenant ID form the URL`,
- },
- },
- Description: `Choose how to authenticate to Microsoft`,
- },
- "source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your Microsoft Teams developer application.`,
@@ -165,6 +90,7 @@ func (r *SourceMicrosoftTeamsResource) Schema(ctx context.Context, req resource.
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `A Refresh Token to renew the expired Access Token.`,
},
"tenant_id": schema.StringAttribute{
@@ -175,33 +101,35 @@ func (r *SourceMicrosoftTeamsResource) Schema(ctx context.Context, req resource.
Description: `Choose how to authenticate to Microsoft`,
},
},
+ Description: `Choose how to authenticate to Microsoft`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate to Microsoft`,
},
"period": schema.StringAttribute{
Required: true,
Description: `Specifies the length of time over which the Team Device Report stream is aggregated. The supported values are: D7, D30, D90, and D180.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "microsoft-teams",
- ),
- },
- Description: `must be one of ["microsoft-teams"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -265,7 +193,7 @@ func (r *SourceMicrosoftTeamsResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMicrosoftTeams(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -441,5 +369,5 @@ func (r *SourceMicrosoftTeamsResource) Delete(ctx context.Context, req resource.
}
func (r *SourceMicrosoftTeamsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_microsoftteams_resource_sdk.go b/internal/provider/source_microsoftteams_resource_sdk.go
old mode 100755
new mode 100644
index ce1c8429d..b5ac9c68c
--- a/internal/provider/source_microsoftteams_resource_sdk.go
+++ b/internal/provider/source_microsoftteams_resource_sdk.go
@@ -3,68 +3,58 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMicrosoftTeamsResourceModel) ToCreateSDKType() *shared.SourceMicrosoftTeamsCreateRequest {
var credentials *shared.SourceMicrosoftTeamsAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 *shared.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
- if r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil {
- authType := new(shared.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType)
- if !r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.AuthType.IsNull() {
- *authType = shared.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType(r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.RefreshToken.ValueString()
- tenantID := r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.TenantID.ValueString()
- sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 = &shared.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20{
- AuthType: authType,
+ var sourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 *shared.SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20
+ if r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20 != nil {
+ clientID := r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20.RefreshToken.ValueString()
+ tenantID := r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20.TenantID.ValueString()
+ sourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 = &shared.SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TenantID: tenantID,
}
}
- if sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil {
+ if sourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 != nil {
credentials = &shared.SourceMicrosoftTeamsAuthenticationMechanism{
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20: sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20,
+ SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20: sourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20,
}
}
- var sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft *shared.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft
- if r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft != nil {
- authType1 := new(shared.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType)
- if !r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft.AuthType.IsNull() {
- *authType1 = shared.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType(r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- clientId1 := r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft.ClientID.ValueString()
- clientSecret1 := r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft.ClientSecret.ValueString()
- tenantId1 := r.Configuration.Credentials.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft.TenantID.ValueString()
- sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft = &shared.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft{
- AuthType: authType1,
+ var sourceMicrosoftTeamsAuthenticateViaMicrosoft *shared.SourceMicrosoftTeamsAuthenticateViaMicrosoft
+ if r.Configuration.Credentials.AuthenticateViaMicrosoft != nil {
+ clientId1 := r.Configuration.Credentials.AuthenticateViaMicrosoft.ClientID.ValueString()
+ clientSecret1 := r.Configuration.Credentials.AuthenticateViaMicrosoft.ClientSecret.ValueString()
+ tenantId1 := r.Configuration.Credentials.AuthenticateViaMicrosoft.TenantID.ValueString()
+ sourceMicrosoftTeamsAuthenticateViaMicrosoft = &shared.SourceMicrosoftTeamsAuthenticateViaMicrosoft{
ClientID: clientId1,
ClientSecret: clientSecret1,
TenantID: tenantId1,
}
}
- if sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft != nil {
+ if sourceMicrosoftTeamsAuthenticateViaMicrosoft != nil {
credentials = &shared.SourceMicrosoftTeamsAuthenticationMechanism{
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft: sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft,
+ SourceMicrosoftTeamsAuthenticateViaMicrosoft: sourceMicrosoftTeamsAuthenticateViaMicrosoft,
}
}
}
period := r.Configuration.Period.ValueString()
- sourceType := shared.SourceMicrosoftTeamsMicrosoftTeams(r.Configuration.SourceType.ValueString())
configuration := shared.SourceMicrosoftTeams{
Credentials: credentials,
Period: period,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -76,6 +66,7 @@ func (r *SourceMicrosoftTeamsResourceModel) ToCreateSDKType() *shared.SourceMicr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMicrosoftTeamsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -91,52 +82,38 @@ func (r *SourceMicrosoftTeamsResourceModel) ToGetSDKType() *shared.SourceMicroso
func (r *SourceMicrosoftTeamsResourceModel) ToUpdateSDKType() *shared.SourceMicrosoftTeamsPutRequest {
var credentials *shared.SourceMicrosoftTeamsUpdateAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 *shared.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
- if r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil {
- authType := new(shared.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType)
- if !r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.AuthType.IsNull() {
- *authType = shared.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType(r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.RefreshToken.ValueString()
- tenantID := r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20.TenantID.ValueString()
- sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 = &shared.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20{
- AuthType: authType,
+ var authenticateViaMicrosoftOAuth20 *shared.AuthenticateViaMicrosoftOAuth20
+ if r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20 != nil {
+ clientID := r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20.RefreshToken.ValueString()
+ tenantID := r.Configuration.Credentials.AuthenticateViaMicrosoftOAuth20.TenantID.ValueString()
+ authenticateViaMicrosoftOAuth20 = &shared.AuthenticateViaMicrosoftOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TenantID: tenantID,
}
}
- if sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil {
+ if authenticateViaMicrosoftOAuth20 != nil {
credentials = &shared.SourceMicrosoftTeamsUpdateAuthenticationMechanism{
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20: sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20,
+ AuthenticateViaMicrosoftOAuth20: authenticateViaMicrosoftOAuth20,
}
}
- var sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft *shared.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft
- if r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft != nil {
- authType1 := new(shared.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType)
- if !r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft.AuthType.IsNull() {
- *authType1 = shared.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType(r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- clientId1 := r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft.ClientID.ValueString()
- clientSecret1 := r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft.ClientSecret.ValueString()
- tenantId1 := r.Configuration.Credentials.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft.TenantID.ValueString()
- sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft = &shared.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft{
- AuthType: authType1,
+ var authenticateViaMicrosoft *shared.AuthenticateViaMicrosoft
+ if r.Configuration.Credentials.AuthenticateViaMicrosoft != nil {
+ clientId1 := r.Configuration.Credentials.AuthenticateViaMicrosoft.ClientID.ValueString()
+ clientSecret1 := r.Configuration.Credentials.AuthenticateViaMicrosoft.ClientSecret.ValueString()
+ tenantId1 := r.Configuration.Credentials.AuthenticateViaMicrosoft.TenantID.ValueString()
+ authenticateViaMicrosoft = &shared.AuthenticateViaMicrosoft{
ClientID: clientId1,
ClientSecret: clientSecret1,
TenantID: tenantId1,
}
}
- if sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft != nil {
+ if authenticateViaMicrosoft != nil {
credentials = &shared.SourceMicrosoftTeamsUpdateAuthenticationMechanism{
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft: sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft,
+ AuthenticateViaMicrosoft: authenticateViaMicrosoft,
}
}
}
diff --git a/internal/provider/source_mixpanel_data_source.go b/internal/provider/source_mixpanel_data_source.go
old mode 100755
new mode 100644
index 80de35b4c..8ac41c5dd
--- a/internal/provider/source_mixpanel_data_source.go
+++ b/internal/provider/source_mixpanel_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceMixpanelDataSource struct {
// SourceMixpanelDataSourceModel describes the data model.
type SourceMixpanelDataSourceModel struct {
- Configuration SourceMixpanel `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,168 +47,20 @@ func (r *SourceMixpanelDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceMixpanel DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "attribution_window": schema.Int64Attribute{
- Computed: true,
- Description: ` A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days.`,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_mixpanel_authentication_wildcard_project_secret": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_secret": schema.StringAttribute{
- Computed: true,
- Description: `Mixpanel project secret. See the docs for more information on how to obtain this.`,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Project Secret",
- ),
- },
- Description: `must be one of ["Project Secret"]`,
- },
- },
- Description: `Choose how to authenticate to Mixpanel`,
- },
- "source_mixpanel_authentication_wildcard_service_account": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service Account",
- ),
- },
- Description: `must be one of ["Service Account"]`,
- },
- "secret": schema.StringAttribute{
- Computed: true,
- Description: `Mixpanel Service Account Secret. See the docs for more information on how to obtain this.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Mixpanel Service Account Username. See the docs for more information on how to obtain this.`,
- },
- },
- Description: `Choose how to authenticate to Mixpanel`,
- },
- "source_mixpanel_update_authentication_wildcard_project_secret": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_secret": schema.StringAttribute{
- Computed: true,
- Description: `Mixpanel project secret. See the docs for more information on how to obtain this.`,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Project Secret",
- ),
- },
- Description: `must be one of ["Project Secret"]`,
- },
- },
- Description: `Choose how to authenticate to Mixpanel`,
- },
- "source_mixpanel_update_authentication_wildcard_service_account": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service Account",
- ),
- },
- Description: `must be one of ["Service Account"]`,
- },
- "secret": schema.StringAttribute{
- Computed: true,
- Description: `Mixpanel Service Account Secret. See the docs for more information on how to obtain this.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Mixpanel Service Account Username. See the docs for more information on how to obtain this.`,
- },
- },
- Description: `Choose how to authenticate to Mixpanel`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate to Mixpanel`,
- },
- "date_window_size": schema.Int64Attribute{
- Computed: true,
- Description: `Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date`,
- },
- "project_id": schema.Int64Attribute{
- Computed: true,
- Description: `Your project ID number. See the docs for more information on how to obtain this.`,
- },
- "project_timezone": schema.StringAttribute{
- Computed: true,
- Description: `Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console.`,
- },
- "region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "US",
- "EU",
- ),
- },
- MarkdownDescription: `must be one of ["US", "EU"]` + "\n" +
- `The region of mixpanel domain instance either US or EU.`,
- },
- "select_properties_by_default": schema.BoolAttribute{
- Computed: true,
- Description: `Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mixpanel",
- ),
- },
- Description: `must be one of ["mixpanel"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_mixpanel_data_source_sdk.go b/internal/provider/source_mixpanel_data_source_sdk.go
old mode 100755
new mode 100644
index cc5f70289..f5fb44052
--- a/internal/provider/source_mixpanel_data_source_sdk.go
+++ b/internal/provider/source_mixpanel_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMixpanelDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_mixpanel_resource.go b/internal/provider/source_mixpanel_resource.go
old mode 100755
new mode 100644
index ee9121a43..2bb6f2ee1
--- a/internal/provider/source_mixpanel_resource.go
+++ b/internal/provider/source_mixpanel_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceMixpanelResource struct {
// SourceMixpanelResourceModel describes the resource data model.
type SourceMixpanelResourceModel struct {
Configuration SourceMixpanel `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,84 +58,29 @@ func (r *SourceMixpanelResource) Schema(ctx context.Context, req resource.Schema
Required: true,
Attributes: map[string]schema.Attribute{
"attribution_window": schema.Int64Attribute{
- Optional: true,
- Description: ` A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days.`,
+ Optional: true,
+ MarkdownDescription: `Default: 5` + "\n" +
+ `A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days. (This value should be non-negative integer)`,
},
"credentials": schema.SingleNestedAttribute{
- Optional: true,
+ Required: true,
Attributes: map[string]schema.Attribute{
- "source_mixpanel_authentication_wildcard_project_secret": schema.SingleNestedAttribute{
+ "project_secret": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_secret": schema.StringAttribute{
Required: true,
Description: `Mixpanel project secret. See the docs for more information on how to obtain this.`,
},
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Project Secret",
- ),
- },
- Description: `must be one of ["Project Secret"]`,
- },
},
Description: `Choose how to authenticate to Mixpanel`,
},
- "source_mixpanel_authentication_wildcard_service_account": schema.SingleNestedAttribute{
+ "service_account": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service Account",
- ),
- },
- Description: `must be one of ["Service Account"]`,
- },
- "secret": schema.StringAttribute{
+ "project_id": schema.Int64Attribute{
Required: true,
- Description: `Mixpanel Service Account Secret. See the docs for more information on how to obtain this.`,
- },
- "username": schema.StringAttribute{
- Required: true,
- Description: `Mixpanel Service Account Username. See the docs for more information on how to obtain this.`,
- },
- },
- Description: `Choose how to authenticate to Mixpanel`,
- },
- "source_mixpanel_update_authentication_wildcard_project_secret": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_secret": schema.StringAttribute{
- Required: true,
- Description: `Mixpanel project secret. See the docs for more information on how to obtain this.`,
- },
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Project Secret",
- ),
- },
- Description: `must be one of ["Project Secret"]`,
- },
- },
- Description: `Choose how to authenticate to Mixpanel`,
- },
- "source_mixpanel_update_authentication_wildcard_service_account": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "option_title": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Service Account",
- ),
- },
- Description: `must be one of ["Service Account"]`,
+ Description: `Your project ID number. See the docs for more information on how to obtain this.`,
},
"secret": schema.StringAttribute{
Required: true,
@@ -147,70 +94,71 @@ func (r *SourceMixpanelResource) Schema(ctx context.Context, req resource.Schema
Description: `Choose how to authenticate to Mixpanel`,
},
},
+ Description: `Choose how to authenticate to Mixpanel`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate to Mixpanel`,
},
"date_window_size": schema.Int64Attribute{
- Optional: true,
- Description: `Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment.`,
+ Optional: true,
+ MarkdownDescription: `Default: 30` + "\n" +
+ `Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment. (This value should be positive integer)`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date`,
- },
- "project_id": schema.Int64Attribute{
- Optional: true,
- Description: `Your project ID number. See the docs for more information on how to obtain this.`,
},
"project_timezone": schema.StringAttribute{
- Optional: true,
- Description: `Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console.`,
+ Optional: true,
+ MarkdownDescription: `Default: "US/Pacific"` + "\n" +
+ `Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console.`,
},
"region": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["US", "EU"]; Default: "US"` + "\n" +
+ `The region of mixpanel domain instance either US or EU.`,
Validators: []validator.String{
stringvalidator.OneOf(
"US",
"EU",
),
},
- MarkdownDescription: `must be one of ["US", "EU"]` + "\n" +
- `The region of mixpanel domain instance either US or EU.`,
},
"select_properties_by_default": schema.BoolAttribute{
- Optional: true,
- Description: `Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored.`,
- },
- "source_type": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mixpanel",
- ),
- },
- Description: `must be one of ["mixpanel"]`,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored.`,
},
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -274,7 +222,7 @@ func (r *SourceMixpanelResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMixpanel(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -450,5 +398,5 @@ func (r *SourceMixpanelResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceMixpanelResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_mixpanel_resource_sdk.go b/internal/provider/source_mixpanel_resource_sdk.go
old mode 100755
new mode 100644
index c9610900e..bce7243e8
--- a/internal/provider/source_mixpanel_resource_sdk.go
+++ b/internal/provider/source_mixpanel_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -15,47 +15,33 @@ func (r *SourceMixpanelResourceModel) ToCreateSDKType() *shared.SourceMixpanelCr
} else {
attributionWindow = nil
}
- var credentials *shared.SourceMixpanelAuthenticationWildcard
- if r.Configuration.Credentials != nil {
- var sourceMixpanelAuthenticationWildcardServiceAccount *shared.SourceMixpanelAuthenticationWildcardServiceAccount
- if r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardServiceAccount != nil {
- optionTitle := new(shared.SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle)
- if !r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardServiceAccount.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardServiceAccount.OptionTitle.IsNull() {
- *optionTitle = shared.SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle(r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardServiceAccount.OptionTitle.ValueString())
- } else {
- optionTitle = nil
- }
- secret := r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardServiceAccount.Secret.ValueString()
- username := r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardServiceAccount.Username.ValueString()
- sourceMixpanelAuthenticationWildcardServiceAccount = &shared.SourceMixpanelAuthenticationWildcardServiceAccount{
- OptionTitle: optionTitle,
- Secret: secret,
- Username: username,
- }
+ var credentials shared.SourceMixpanelAuthenticationWildcard
+ var sourceMixpanelServiceAccount *shared.SourceMixpanelServiceAccount
+ if r.Configuration.Credentials.ServiceAccount != nil {
+ projectID := r.Configuration.Credentials.ServiceAccount.ProjectID.ValueInt64()
+ secret := r.Configuration.Credentials.ServiceAccount.Secret.ValueString()
+ username := r.Configuration.Credentials.ServiceAccount.Username.ValueString()
+ sourceMixpanelServiceAccount = &shared.SourceMixpanelServiceAccount{
+ ProjectID: projectID,
+ Secret: secret,
+ Username: username,
}
- if sourceMixpanelAuthenticationWildcardServiceAccount != nil {
- credentials = &shared.SourceMixpanelAuthenticationWildcard{
- SourceMixpanelAuthenticationWildcardServiceAccount: sourceMixpanelAuthenticationWildcardServiceAccount,
- }
+ }
+ if sourceMixpanelServiceAccount != nil {
+ credentials = shared.SourceMixpanelAuthenticationWildcard{
+ SourceMixpanelServiceAccount: sourceMixpanelServiceAccount,
}
- var sourceMixpanelAuthenticationWildcardProjectSecret *shared.SourceMixpanelAuthenticationWildcardProjectSecret
- if r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardProjectSecret != nil {
- apiSecret := r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardProjectSecret.APISecret.ValueString()
- optionTitle1 := new(shared.SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle)
- if !r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardProjectSecret.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardProjectSecret.OptionTitle.IsNull() {
- *optionTitle1 = shared.SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle(r.Configuration.Credentials.SourceMixpanelAuthenticationWildcardProjectSecret.OptionTitle.ValueString())
- } else {
- optionTitle1 = nil
- }
- sourceMixpanelAuthenticationWildcardProjectSecret = &shared.SourceMixpanelAuthenticationWildcardProjectSecret{
- APISecret: apiSecret,
- OptionTitle: optionTitle1,
- }
+ }
+ var sourceMixpanelProjectSecret *shared.SourceMixpanelProjectSecret
+ if r.Configuration.Credentials.ProjectSecret != nil {
+ apiSecret := r.Configuration.Credentials.ProjectSecret.APISecret.ValueString()
+ sourceMixpanelProjectSecret = &shared.SourceMixpanelProjectSecret{
+ APISecret: apiSecret,
}
- if sourceMixpanelAuthenticationWildcardProjectSecret != nil {
- credentials = &shared.SourceMixpanelAuthenticationWildcard{
- SourceMixpanelAuthenticationWildcardProjectSecret: sourceMixpanelAuthenticationWildcardProjectSecret,
- }
+ }
+ if sourceMixpanelProjectSecret != nil {
+ credentials = shared.SourceMixpanelAuthenticationWildcard{
+ SourceMixpanelProjectSecret: sourceMixpanelProjectSecret,
}
}
dateWindowSize := new(int64)
@@ -70,12 +56,6 @@ func (r *SourceMixpanelResourceModel) ToCreateSDKType() *shared.SourceMixpanelCr
} else {
endDate = nil
}
- projectID := new(int64)
- if !r.Configuration.ProjectID.IsUnknown() && !r.Configuration.ProjectID.IsNull() {
- *projectID = r.Configuration.ProjectID.ValueInt64()
- } else {
- projectID = nil
- }
projectTimezone := new(string)
if !r.Configuration.ProjectTimezone.IsUnknown() && !r.Configuration.ProjectTimezone.IsNull() {
*projectTimezone = r.Configuration.ProjectTimezone.ValueString()
@@ -94,12 +74,6 @@ func (r *SourceMixpanelResourceModel) ToCreateSDKType() *shared.SourceMixpanelCr
} else {
selectPropertiesByDefault = nil
}
- sourceType := new(shared.SourceMixpanelMixpanel)
- if !r.Configuration.SourceType.IsUnknown() && !r.Configuration.SourceType.IsNull() {
- *sourceType = shared.SourceMixpanelMixpanel(r.Configuration.SourceType.ValueString())
- } else {
- sourceType = nil
- }
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
@@ -111,13 +85,17 @@ func (r *SourceMixpanelResourceModel) ToCreateSDKType() *shared.SourceMixpanelCr
Credentials: credentials,
DateWindowSize: dateWindowSize,
EndDate: endDate,
- ProjectID: projectID,
ProjectTimezone: projectTimezone,
Region: region,
SelectPropertiesByDefault: selectPropertiesByDefault,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -128,6 +106,7 @@ func (r *SourceMixpanelResourceModel) ToCreateSDKType() *shared.SourceMixpanelCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMixpanelCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -147,47 +126,33 @@ func (r *SourceMixpanelResourceModel) ToUpdateSDKType() *shared.SourceMixpanelPu
} else {
attributionWindow = nil
}
- var credentials *shared.SourceMixpanelUpdateAuthenticationWildcard
- if r.Configuration.Credentials != nil {
- var sourceMixpanelUpdateAuthenticationWildcardServiceAccount *shared.SourceMixpanelUpdateAuthenticationWildcardServiceAccount
- if r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardServiceAccount != nil {
- optionTitle := new(shared.SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle)
- if !r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardServiceAccount.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardServiceAccount.OptionTitle.IsNull() {
- *optionTitle = shared.SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle(r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardServiceAccount.OptionTitle.ValueString())
- } else {
- optionTitle = nil
- }
- secret := r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardServiceAccount.Secret.ValueString()
- username := r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardServiceAccount.Username.ValueString()
- sourceMixpanelUpdateAuthenticationWildcardServiceAccount = &shared.SourceMixpanelUpdateAuthenticationWildcardServiceAccount{
- OptionTitle: optionTitle,
- Secret: secret,
- Username: username,
- }
+ var credentials shared.AuthenticationWildcard
+ var serviceAccount *shared.ServiceAccount
+ if r.Configuration.Credentials.ServiceAccount != nil {
+ projectID := r.Configuration.Credentials.ServiceAccount.ProjectID.ValueInt64()
+ secret := r.Configuration.Credentials.ServiceAccount.Secret.ValueString()
+ username := r.Configuration.Credentials.ServiceAccount.Username.ValueString()
+ serviceAccount = &shared.ServiceAccount{
+ ProjectID: projectID,
+ Secret: secret,
+ Username: username,
}
- if sourceMixpanelUpdateAuthenticationWildcardServiceAccount != nil {
- credentials = &shared.SourceMixpanelUpdateAuthenticationWildcard{
- SourceMixpanelUpdateAuthenticationWildcardServiceAccount: sourceMixpanelUpdateAuthenticationWildcardServiceAccount,
- }
+ }
+ if serviceAccount != nil {
+ credentials = shared.AuthenticationWildcard{
+ ServiceAccount: serviceAccount,
}
- var sourceMixpanelUpdateAuthenticationWildcardProjectSecret *shared.SourceMixpanelUpdateAuthenticationWildcardProjectSecret
- if r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardProjectSecret != nil {
- apiSecret := r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardProjectSecret.APISecret.ValueString()
- optionTitle1 := new(shared.SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle)
- if !r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardProjectSecret.OptionTitle.IsUnknown() && !r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardProjectSecret.OptionTitle.IsNull() {
- *optionTitle1 = shared.SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle(r.Configuration.Credentials.SourceMixpanelUpdateAuthenticationWildcardProjectSecret.OptionTitle.ValueString())
- } else {
- optionTitle1 = nil
- }
- sourceMixpanelUpdateAuthenticationWildcardProjectSecret = &shared.SourceMixpanelUpdateAuthenticationWildcardProjectSecret{
- APISecret: apiSecret,
- OptionTitle: optionTitle1,
- }
+ }
+ var projectSecret *shared.ProjectSecret
+ if r.Configuration.Credentials.ProjectSecret != nil {
+ apiSecret := r.Configuration.Credentials.ProjectSecret.APISecret.ValueString()
+ projectSecret = &shared.ProjectSecret{
+ APISecret: apiSecret,
}
- if sourceMixpanelUpdateAuthenticationWildcardProjectSecret != nil {
- credentials = &shared.SourceMixpanelUpdateAuthenticationWildcard{
- SourceMixpanelUpdateAuthenticationWildcardProjectSecret: sourceMixpanelUpdateAuthenticationWildcardProjectSecret,
- }
+ }
+ if projectSecret != nil {
+ credentials = shared.AuthenticationWildcard{
+ ProjectSecret: projectSecret,
}
}
dateWindowSize := new(int64)
@@ -202,12 +167,6 @@ func (r *SourceMixpanelResourceModel) ToUpdateSDKType() *shared.SourceMixpanelPu
} else {
endDate = nil
}
- projectID := new(int64)
- if !r.Configuration.ProjectID.IsUnknown() && !r.Configuration.ProjectID.IsNull() {
- *projectID = r.Configuration.ProjectID.ValueInt64()
- } else {
- projectID = nil
- }
projectTimezone := new(string)
if !r.Configuration.ProjectTimezone.IsUnknown() && !r.Configuration.ProjectTimezone.IsNull() {
*projectTimezone = r.Configuration.ProjectTimezone.ValueString()
@@ -237,7 +196,6 @@ func (r *SourceMixpanelResourceModel) ToUpdateSDKType() *shared.SourceMixpanelPu
Credentials: credentials,
DateWindowSize: dateWindowSize,
EndDate: endDate,
- ProjectID: projectID,
ProjectTimezone: projectTimezone,
Region: region,
SelectPropertiesByDefault: selectPropertiesByDefault,
diff --git a/internal/provider/source_monday_data_source.go b/internal/provider/source_monday_data_source.go
old mode 100755
new mode 100644
index ecbc54cb8..80f539e45
--- a/internal/provider/source_monday_data_source.go
+++ b/internal/provider/source_monday_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceMondayDataSource struct {
// SourceMondayDataSourceModel describes the data model.
type SourceMondayDataSourceModel struct {
- Configuration SourceMonday `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,134 +47,20 @@ func (r *SourceMondayDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceMonday DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_monday_authorization_method_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `API Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- },
- },
- "source_monday_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "subdomain": schema.StringAttribute{
- Computed: true,
- Description: `Slug/subdomain of the account, or the first part of the URL that comes before .monday.com`,
- },
- },
- },
- "source_monday_update_authorization_method_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `API Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- },
- },
- "source_monday_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "subdomain": schema.StringAttribute{
- Computed: true,
- Description: `Slug/subdomain of the account, or the first part of the URL that comes before .monday.com`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "monday",
- ),
- },
- Description: `must be one of ["monday"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_monday_data_source_sdk.go b/internal/provider/source_monday_data_source_sdk.go
old mode 100755
new mode 100644
index d92290ba6..b7d3e2acd
--- a/internal/provider/source_monday_data_source_sdk.go
+++ b/internal/provider/source_monday_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMondayDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_monday_resource.go b/internal/provider/source_monday_resource.go
old mode 100755
new mode 100644
index 0a50cf7a3..ce2c44a5f
--- a/internal/provider/source_monday_resource.go
+++ b/internal/provider/source_monday_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceMondayResource struct {
// SourceMondayResourceModel describes the resource data model.
type SourceMondayResourceModel struct {
Configuration SourceMonday `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,40 +59,24 @@ func (r *SourceMondayResource) Schema(ctx context.Context, req resource.SchemaRe
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_monday_authorization_method_api_token": schema.SingleNestedAttribute{
+ "api_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Token for making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
},
},
- "source_monday_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your OAuth application.`,
@@ -101,56 +86,9 @@ func (r *SourceMondayResource) Schema(ctx context.Context, req resource.SchemaRe
Description: `The Client Secret of your OAuth application.`,
},
"subdomain": schema.StringAttribute{
- Optional: true,
- Description: `Slug/subdomain of the account, or the first part of the URL that comes before .monday.com`,
- },
- },
- },
- "source_monday_update_authorization_method_api_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Required: true,
- Description: `API Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- },
- },
- "source_monday_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "subdomain": schema.StringAttribute{
- Optional: true,
- Description: `Slug/subdomain of the account, or the first part of the URL that comes before .monday.com`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `Slug/subdomain of the account, or the first part of the URL that comes before .monday.com`,
},
},
},
@@ -159,24 +97,26 @@ func (r *SourceMondayResource) Schema(ctx context.Context, req resource.SchemaRe
validators.ExactlyOneChild(),
},
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "monday",
- ),
- },
- Description: `must be one of ["monday"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -240,7 +180,7 @@ func (r *SourceMondayResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMonday(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -416,5 +356,5 @@ func (r *SourceMondayResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceMondayResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_monday_resource_sdk.go b/internal/provider/source_monday_resource_sdk.go
old mode 100755
new mode 100644
index fbd649fed..b2342acb4
--- a/internal/provider/source_monday_resource_sdk.go
+++ b/internal/provider/source_monday_resource_sdk.go
@@ -3,57 +3,57 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMondayResourceModel) ToCreateSDKType() *shared.SourceMondayCreateRequest {
var credentials *shared.SourceMondayAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceMondayAuthorizationMethodOAuth20 *shared.SourceMondayAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceMondayAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceMondayAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := shared.SourceMondayAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceMondayAuthorizationMethodOAuth20.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceMondayAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceMondayAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ var sourceMondayOAuth20 *shared.SourceMondayOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
subdomain := new(string)
- if !r.Configuration.Credentials.SourceMondayAuthorizationMethodOAuth20.Subdomain.IsUnknown() && !r.Configuration.Credentials.SourceMondayAuthorizationMethodOAuth20.Subdomain.IsNull() {
- *subdomain = r.Configuration.Credentials.SourceMondayAuthorizationMethodOAuth20.Subdomain.ValueString()
+ if !r.Configuration.Credentials.OAuth20.Subdomain.IsUnknown() && !r.Configuration.Credentials.OAuth20.Subdomain.IsNull() {
+ *subdomain = r.Configuration.Credentials.OAuth20.Subdomain.ValueString()
} else {
subdomain = nil
}
- sourceMondayAuthorizationMethodOAuth20 = &shared.SourceMondayAuthorizationMethodOAuth20{
+ sourceMondayOAuth20 = &shared.SourceMondayOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
Subdomain: subdomain,
}
}
- if sourceMondayAuthorizationMethodOAuth20 != nil {
+ if sourceMondayOAuth20 != nil {
credentials = &shared.SourceMondayAuthorizationMethod{
- SourceMondayAuthorizationMethodOAuth20: sourceMondayAuthorizationMethodOAuth20,
+ SourceMondayOAuth20: sourceMondayOAuth20,
}
}
- var sourceMondayAuthorizationMethodAPIToken *shared.SourceMondayAuthorizationMethodAPIToken
- if r.Configuration.Credentials.SourceMondayAuthorizationMethodAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceMondayAuthorizationMethodAPIToken.APIToken.ValueString()
- authType1 := shared.SourceMondayAuthorizationMethodAPITokenAuthType(r.Configuration.Credentials.SourceMondayAuthorizationMethodAPIToken.AuthType.ValueString())
- sourceMondayAuthorizationMethodAPIToken = &shared.SourceMondayAuthorizationMethodAPIToken{
+ var sourceMondayAPIToken *shared.SourceMondayAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ sourceMondayAPIToken = &shared.SourceMondayAPIToken{
APIToken: apiToken,
- AuthType: authType1,
}
}
- if sourceMondayAuthorizationMethodAPIToken != nil {
+ if sourceMondayAPIToken != nil {
credentials = &shared.SourceMondayAuthorizationMethod{
- SourceMondayAuthorizationMethodAPIToken: sourceMondayAuthorizationMethodAPIToken,
+ SourceMondayAPIToken: sourceMondayAPIToken,
}
}
}
- sourceType := shared.SourceMondayMonday(r.Configuration.SourceType.ValueString())
configuration := shared.SourceMonday{
Credentials: credentials,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -65,6 +65,7 @@ func (r *SourceMondayResourceModel) ToCreateSDKType() *shared.SourceMondayCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMondayCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -80,43 +81,39 @@ func (r *SourceMondayResourceModel) ToGetSDKType() *shared.SourceMondayCreateReq
func (r *SourceMondayResourceModel) ToUpdateSDKType() *shared.SourceMondayPutRequest {
var credentials *shared.SourceMondayUpdateAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceMondayUpdateAuthorizationMethodOAuth20 *shared.SourceMondayUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := shared.SourceMondayUpdateAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodOAuth20.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ var sourceMondayUpdateOAuth20 *shared.SourceMondayUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
subdomain := new(string)
- if !r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodOAuth20.Subdomain.IsUnknown() && !r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodOAuth20.Subdomain.IsNull() {
- *subdomain = r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodOAuth20.Subdomain.ValueString()
+ if !r.Configuration.Credentials.OAuth20.Subdomain.IsUnknown() && !r.Configuration.Credentials.OAuth20.Subdomain.IsNull() {
+ *subdomain = r.Configuration.Credentials.OAuth20.Subdomain.ValueString()
} else {
subdomain = nil
}
- sourceMondayUpdateAuthorizationMethodOAuth20 = &shared.SourceMondayUpdateAuthorizationMethodOAuth20{
+ sourceMondayUpdateOAuth20 = &shared.SourceMondayUpdateOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
Subdomain: subdomain,
}
}
- if sourceMondayUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceMondayUpdateOAuth20 != nil {
credentials = &shared.SourceMondayUpdateAuthorizationMethod{
- SourceMondayUpdateAuthorizationMethodOAuth20: sourceMondayUpdateAuthorizationMethodOAuth20,
+ SourceMondayUpdateOAuth20: sourceMondayUpdateOAuth20,
}
}
- var sourceMondayUpdateAuthorizationMethodAPIToken *shared.SourceMondayUpdateAuthorizationMethodAPIToken
- if r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodAPIToken.APIToken.ValueString()
- authType1 := shared.SourceMondayUpdateAuthorizationMethodAPITokenAuthType(r.Configuration.Credentials.SourceMondayUpdateAuthorizationMethodAPIToken.AuthType.ValueString())
- sourceMondayUpdateAuthorizationMethodAPIToken = &shared.SourceMondayUpdateAuthorizationMethodAPIToken{
- APIToken: apiToken,
- AuthType: authType1,
+ var apiToken *shared.APIToken
+ if r.Configuration.Credentials.APIToken != nil {
+ apiToken1 := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ apiToken = &shared.APIToken{
+ APIToken: apiToken1,
}
}
- if sourceMondayUpdateAuthorizationMethodAPIToken != nil {
+ if apiToken != nil {
credentials = &shared.SourceMondayUpdateAuthorizationMethod{
- SourceMondayUpdateAuthorizationMethodAPIToken: sourceMondayUpdateAuthorizationMethodAPIToken,
+ APIToken: apiToken,
}
}
}
diff --git a/internal/provider/source_mongodb_data_source.go b/internal/provider/source_mongodb_data_source.go
deleted file mode 100755
index 907de685e..000000000
--- a/internal/provider/source_mongodb_data_source.go
+++ /dev/null
@@ -1,319 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
- "context"
- "fmt"
-
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ datasource.DataSource = &SourceMongodbDataSource{}
-var _ datasource.DataSourceWithConfigure = &SourceMongodbDataSource{}
-
-func NewSourceMongodbDataSource() datasource.DataSource {
- return &SourceMongodbDataSource{}
-}
-
-// SourceMongodbDataSource is the data source implementation.
-type SourceMongodbDataSource struct {
- client *sdk.SDK
-}
-
-// SourceMongodbDataSourceModel describes the data model.
-type SourceMongodbDataSourceModel struct {
- Configuration SourceMongodb1 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-// Metadata returns the data source type name.
-func (r *SourceMongodbDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_mongodb"
-}
-
-// Schema defines the schema for the data source.
-func (r *SourceMongodbDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceMongodb DataSource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_source": schema.StringAttribute{
- Computed: true,
- Description: `The authentication source where the user information is stored.`,
- },
- "database": schema.StringAttribute{
- Computed: true,
- Description: `The database you want to replicate.`,
- },
- "instance_type": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_mongodb_mongo_db_instance_type_mongo_db_atlas": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "cluster_url": schema.StringAttribute{
- Computed: true,
- Description: `The URL of a cluster to connect to.`,
- },
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "atlas",
- ),
- },
- Description: `must be one of ["atlas"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_mongo_db_instance_type_replica_set": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "replica",
- ),
- },
- Description: `must be one of ["replica"]`,
- },
- "replica_set": schema.StringAttribute{
- Computed: true,
- Description: `A replica set in MongoDB is a group of mongod processes that maintain the same data set.`,
- },
- "server_addresses": schema.StringAttribute{
- Computed: true,
- Description: `The members of a replica set. Please specify ` + "`" + `host` + "`" + `:` + "`" + `port` + "`" + ` of each member separated by comma.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The host name of the Mongo database.`,
- },
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "standalone",
- ),
- },
- Description: `must be one of ["standalone"]`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The port of the Mongo database.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_update_mongo_db_instance_type_mongo_db_atlas": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "cluster_url": schema.StringAttribute{
- Computed: true,
- Description: `The URL of a cluster to connect to.`,
- },
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "atlas",
- ),
- },
- Description: `must be one of ["atlas"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_update_mongo_db_instance_type_replica_set": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "replica",
- ),
- },
- Description: `must be one of ["replica"]`,
- },
- "replica_set": schema.StringAttribute{
- Computed: true,
- Description: `A replica set in MongoDB is a group of mongod processes that maintain the same data set.`,
- },
- "server_addresses": schema.StringAttribute{
- Computed: true,
- Description: `The members of a replica set. Please specify ` + "`" + `host` + "`" + `:` + "`" + `port` + "`" + ` of each member separated by comma.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The host name of the Mongo database.`,
- },
- "instance": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "standalone",
- ),
- },
- Description: `must be one of ["standalone"]`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The port of the Mongo database.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with this username.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mongodb",
- ),
- },
- Description: `must be one of ["mongodb"]`,
- },
- "user": schema.StringAttribute{
- Computed: true,
- Description: `The username which is used to access the database.`,
- },
- },
- },
- "name": schema.StringAttribute{
- Computed: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Required: true,
- },
- "workspace_id": schema.StringAttribute{
- Computed: true,
- },
- },
- }
-}
-
-func (r *SourceMongodbDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected DataSource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceMongodbDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data *SourceMongodbDataSourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceMongodbRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceMongodb(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/internal/provider/source_mongodb_data_source_sdk.go b/internal/provider/source_mongodb_data_source_sdk.go
deleted file mode 100755
index 765bde24d..000000000
--- a/internal/provider/source_mongodb_data_source_sdk.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceMongodbDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
diff --git a/internal/provider/source_mongodb_resource.go b/internal/provider/source_mongodb_resource.go
deleted file mode 100755
index ed1431835..000000000
--- a/internal/provider/source_mongodb_resource.go
+++ /dev/null
@@ -1,485 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "context"
- "fmt"
-
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/path"
- "github.com/hashicorp/terraform-plugin-framework/resource"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema"
- "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ resource.Resource = &SourceMongodbResource{}
-var _ resource.ResourceWithImportState = &SourceMongodbResource{}
-
-func NewSourceMongodbResource() resource.Resource {
- return &SourceMongodbResource{}
-}
-
-// SourceMongodbResource defines the resource implementation.
-type SourceMongodbResource struct {
- client *sdk.SDK
-}
-
-// SourceMongodbResourceModel describes the resource data model.
-type SourceMongodbResourceModel struct {
- Configuration SourceMongodb `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-func (r *SourceMongodbResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_mongodb"
-}
-
-func (r *SourceMongodbResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceMongodb Resource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "auth_source": schema.StringAttribute{
- Optional: true,
- Description: `The authentication source where the user information is stored.`,
- },
- "database": schema.StringAttribute{
- Required: true,
- Description: `The database you want to replicate.`,
- },
- "instance_type": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "source_mongodb_mongo_db_instance_type_mongo_db_atlas": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "cluster_url": schema.StringAttribute{
- Required: true,
- Description: `The URL of a cluster to connect to.`,
- },
- "instance": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "atlas",
- ),
- },
- Description: `must be one of ["atlas"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_mongo_db_instance_type_replica_set": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "instance": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "replica",
- ),
- },
- Description: `must be one of ["replica"]`,
- },
- "replica_set": schema.StringAttribute{
- Optional: true,
- Description: `A replica set in MongoDB is a group of mongod processes that maintain the same data set.`,
- },
- "server_addresses": schema.StringAttribute{
- Required: true,
- Description: `The members of a replica set. Please specify ` + "`" + `host` + "`" + `:` + "`" + `port` + "`" + ` of each member separated by comma.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Required: true,
- Description: `The host name of the Mongo database.`,
- },
- "instance": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "standalone",
- ),
- },
- Description: `must be one of ["standalone"]`,
- },
- "port": schema.Int64Attribute{
- Required: true,
- Description: `The port of the Mongo database.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_update_mongo_db_instance_type_mongo_db_atlas": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "cluster_url": schema.StringAttribute{
- Required: true,
- Description: `The URL of a cluster to connect to.`,
- },
- "instance": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "atlas",
- ),
- },
- Description: `must be one of ["atlas"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_update_mongo_db_instance_type_replica_set": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "instance": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "replica",
- ),
- },
- Description: `must be one of ["replica"]`,
- },
- "replica_set": schema.StringAttribute{
- Optional: true,
- Description: `A replica set in MongoDB is a group of mongod processes that maintain the same data set.`,
- },
- "server_addresses": schema.StringAttribute{
- Required: true,
- Description: `The members of a replica set. Please specify ` + "`" + `host` + "`" + `:` + "`" + `port` + "`" + ` of each member separated by comma.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host": schema.StringAttribute{
- Required: true,
- Description: `The host name of the Mongo database.`,
- },
- "instance": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "standalone",
- ),
- },
- Description: `must be one of ["standalone"]`,
- },
- "port": schema.Int64Attribute{
- Required: true,
- Description: `The port of the Mongo database.`,
- },
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.`,
- },
- "password": schema.StringAttribute{
- Optional: true,
- Description: `The password associated with this username.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mongodb",
- ),
- },
- Description: `must be one of ["mongodb"]`,
- },
- "user": schema.StringAttribute{
- Optional: true,
- Description: `The username which is used to access the database.`,
- },
- },
- },
- "name": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- },
- "workspace_id": schema.StringAttribute{
- PlanModifiers: []planmodifier.String{
- speakeasy_stringplanmodifier.SuppressDiff(),
- },
- Required: true,
- },
- },
- }
-}
-
-func (r *SourceMongodbResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected Resource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceMongodbResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data *SourceMongodbResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- request := *data.ToCreateSDKType()
- res, err := r.client.Sources.CreateSourceMongodb(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromCreateResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceMongodbResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data *SourceMongodbResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceMongodbRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceMongodb(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceMongodbResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data *SourceMongodbResourceModel
- merge(ctx, req, resp, &data)
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceMongodbPutRequest := data.ToUpdateSDKType()
- sourceID := data.SourceID.ValueString()
- request := operations.PutSourceMongodbRequest{
- SourceMongodbPutRequest: sourceMongodbPutRequest,
- SourceID: sourceID,
- }
- res, err := r.client.Sources.PutSourceMongodb(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- sourceId1 := data.SourceID.ValueString()
- getRequest := operations.GetSourceMongodbRequest{
- SourceID: sourceId1,
- }
- getResponse, err := r.client.Sources.GetSourceMongodb(ctx, getRequest)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if getResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
- return
- }
- if getResponse.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
- return
- }
- if getResponse.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
- return
- }
- data.RefreshFromGetResponse(getResponse.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
-
-func (r *SourceMongodbResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data *SourceMongodbResourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.DeleteSourceMongodbRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.DeleteSourceMongodb(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
-
-}
-
-func (r *SourceMongodbResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
-}
diff --git a/internal/provider/source_mongodb_resource_sdk.go b/internal/provider/source_mongodb_resource_sdk.go
deleted file mode 100755
index 018f5e04d..000000000
--- a/internal/provider/source_mongodb_resource_sdk.go
+++ /dev/null
@@ -1,231 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "encoding/json"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceMongodbResourceModel) ToCreateSDKType() *shared.SourceMongodbCreateRequest {
- authSource := new(string)
- if !r.Configuration.AuthSource.IsUnknown() && !r.Configuration.AuthSource.IsNull() {
- *authSource = r.Configuration.AuthSource.ValueString()
- } else {
- authSource = nil
- }
- database := r.Configuration.Database.ValueString()
- var instanceType *shared.SourceMongodbMongoDbInstanceType
- if r.Configuration.InstanceType != nil {
- var sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance *shared.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
- if r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- host := r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance.Host.ValueString()
- instance := shared.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance(r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance.Instance.ValueString())
- port := r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance.Port.ValueInt64()
- sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance = &shared.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance{
- Host: host,
- Instance: instance,
- Port: port,
- }
- }
- if sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- instanceType = &shared.SourceMongodbMongoDbInstanceType{
- SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance: sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance,
- }
- }
- var sourceMongodbMongoDbInstanceTypeReplicaSet *shared.SourceMongodbMongoDbInstanceTypeReplicaSet
- if r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeReplicaSet != nil {
- instance1 := shared.SourceMongodbMongoDbInstanceTypeReplicaSetInstance(r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeReplicaSet.Instance.ValueString())
- replicaSet := new(string)
- if !r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeReplicaSet.ReplicaSet.IsUnknown() && !r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeReplicaSet.ReplicaSet.IsNull() {
- *replicaSet = r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeReplicaSet.ReplicaSet.ValueString()
- } else {
- replicaSet = nil
- }
- serverAddresses := r.Configuration.InstanceType.SourceMongodbMongoDbInstanceTypeReplicaSet.ServerAddresses.ValueString()
- sourceMongodbMongoDbInstanceTypeReplicaSet = &shared.SourceMongodbMongoDbInstanceTypeReplicaSet{
- Instance: instance1,
- ReplicaSet: replicaSet,
- ServerAddresses: serverAddresses,
- }
- }
- if sourceMongodbMongoDbInstanceTypeReplicaSet != nil {
- instanceType = &shared.SourceMongodbMongoDbInstanceType{
- SourceMongodbMongoDbInstanceTypeReplicaSet: sourceMongodbMongoDbInstanceTypeReplicaSet,
- }
- }
- var sourceMongodbMongoDBInstanceTypeMongoDBAtlas *shared.SourceMongodbMongoDBInstanceTypeMongoDBAtlas
- if r.Configuration.InstanceType.SourceMongodbMongoDBInstanceTypeMongoDBAtlas != nil {
- clusterURL := r.Configuration.InstanceType.SourceMongodbMongoDBInstanceTypeMongoDBAtlas.ClusterURL.ValueString()
- instance2 := shared.SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstance(r.Configuration.InstanceType.SourceMongodbMongoDBInstanceTypeMongoDBAtlas.Instance.ValueString())
- var additionalProperties interface{}
- if !r.Configuration.InstanceType.SourceMongodbMongoDBInstanceTypeMongoDBAtlas.AdditionalProperties.IsUnknown() && !r.Configuration.InstanceType.SourceMongodbMongoDBInstanceTypeMongoDBAtlas.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.InstanceType.SourceMongodbMongoDBInstanceTypeMongoDBAtlas.AdditionalProperties.ValueString()), &additionalProperties)
- }
- sourceMongodbMongoDBInstanceTypeMongoDBAtlas = &shared.SourceMongodbMongoDBInstanceTypeMongoDBAtlas{
- ClusterURL: clusterURL,
- Instance: instance2,
- AdditionalProperties: additionalProperties,
- }
- }
- if sourceMongodbMongoDBInstanceTypeMongoDBAtlas != nil {
- instanceType = &shared.SourceMongodbMongoDbInstanceType{
- SourceMongodbMongoDBInstanceTypeMongoDBAtlas: sourceMongodbMongoDBInstanceTypeMongoDBAtlas,
- }
- }
- }
- password := new(string)
- if !r.Configuration.Password.IsUnknown() && !r.Configuration.Password.IsNull() {
- *password = r.Configuration.Password.ValueString()
- } else {
- password = nil
- }
- sourceType := shared.SourceMongodbMongodb(r.Configuration.SourceType.ValueString())
- user := new(string)
- if !r.Configuration.User.IsUnknown() && !r.Configuration.User.IsNull() {
- *user = r.Configuration.User.ValueString()
- } else {
- user = nil
- }
- configuration := shared.SourceMongodb{
- AuthSource: authSource,
- Database: database,
- InstanceType: instanceType,
- Password: password,
- SourceType: sourceType,
- User: user,
- }
- name := r.Name.ValueString()
- secretID := new(string)
- if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
- *secretID = r.SecretID.ValueString()
- } else {
- secretID = nil
- }
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceMongodbCreateRequest{
- Configuration: configuration,
- Name: name,
- SecretID: secretID,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceMongodbResourceModel) ToGetSDKType() *shared.SourceMongodbCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceMongodbResourceModel) ToUpdateSDKType() *shared.SourceMongodbPutRequest {
- authSource := new(string)
- if !r.Configuration.AuthSource.IsUnknown() && !r.Configuration.AuthSource.IsNull() {
- *authSource = r.Configuration.AuthSource.ValueString()
- } else {
- authSource = nil
- }
- database := r.Configuration.Database.ValueString()
- var instanceType *shared.SourceMongodbUpdateMongoDbInstanceType
- if r.Configuration.InstanceType != nil {
- var sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance *shared.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
- if r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- host := r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance.Host.ValueString()
- instance := shared.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance(r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance.Instance.ValueString())
- port := r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance.Port.ValueInt64()
- sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance = &shared.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance{
- Host: host,
- Instance: instance,
- Port: port,
- }
- }
- if sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- instanceType = &shared.SourceMongodbUpdateMongoDbInstanceType{
- SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance: sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance,
- }
- }
- var sourceMongodbUpdateMongoDbInstanceTypeReplicaSet *shared.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet
- if r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet != nil {
- instance1 := shared.SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstance(r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet.Instance.ValueString())
- replicaSet := new(string)
- if !r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet.ReplicaSet.IsUnknown() && !r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet.ReplicaSet.IsNull() {
- *replicaSet = r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet.ReplicaSet.ValueString()
- } else {
- replicaSet = nil
- }
- serverAddresses := r.Configuration.InstanceType.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet.ServerAddresses.ValueString()
- sourceMongodbUpdateMongoDbInstanceTypeReplicaSet = &shared.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet{
- Instance: instance1,
- ReplicaSet: replicaSet,
- ServerAddresses: serverAddresses,
- }
- }
- if sourceMongodbUpdateMongoDbInstanceTypeReplicaSet != nil {
- instanceType = &shared.SourceMongodbUpdateMongoDbInstanceType{
- SourceMongodbUpdateMongoDbInstanceTypeReplicaSet: sourceMongodbUpdateMongoDbInstanceTypeReplicaSet,
- }
- }
- var sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas *shared.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
- if r.Configuration.InstanceType.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil {
- clusterURL := r.Configuration.InstanceType.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas.ClusterURL.ValueString()
- instance2 := shared.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance(r.Configuration.InstanceType.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas.Instance.ValueString())
- var additionalProperties interface{}
- if !r.Configuration.InstanceType.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas.AdditionalProperties.IsUnknown() && !r.Configuration.InstanceType.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.InstanceType.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas.AdditionalProperties.ValueString()), &additionalProperties)
- }
- sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas = &shared.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas{
- ClusterURL: clusterURL,
- Instance: instance2,
- AdditionalProperties: additionalProperties,
- }
- }
- if sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil {
- instanceType = &shared.SourceMongodbUpdateMongoDbInstanceType{
- SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas: sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas,
- }
- }
- }
- password := new(string)
- if !r.Configuration.Password.IsUnknown() && !r.Configuration.Password.IsNull() {
- *password = r.Configuration.Password.ValueString()
- } else {
- password = nil
- }
- user := new(string)
- if !r.Configuration.User.IsUnknown() && !r.Configuration.User.IsNull() {
- *user = r.Configuration.User.ValueString()
- } else {
- user = nil
- }
- configuration := shared.SourceMongodbUpdate{
- AuthSource: authSource,
- Database: database,
- InstanceType: instanceType,
- Password: password,
- User: user,
- }
- name := r.Name.ValueString()
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceMongodbPutRequest{
- Configuration: configuration,
- Name: name,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceMongodbResourceModel) ToDeleteSDKType() *shared.SourceMongodbCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceMongodbResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.SourceType = types.StringValue(resp.SourceType)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
-
-func (r *SourceMongodbResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
- r.RefreshFromGetResponse(resp)
-}
diff --git a/internal/provider/source_mongodbinternalpoc_data_source.go b/internal/provider/source_mongodbinternalpoc_data_source.go
old mode 100755
new mode 100644
index 1e06a70e6..f0ce2a3f3
--- a/internal/provider/source_mongodbinternalpoc_data_source.go
+++ b/internal/provider/source_mongodbinternalpoc_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceMongodbInternalPocDataSource struct {
// SourceMongodbInternalPocDataSourceModel describes the data model.
type SourceMongodbInternalPocDataSourceModel struct {
- Configuration SourceMongodbInternalPoc `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,50 +47,20 @@ func (r *SourceMongodbInternalPocDataSource) Schema(ctx context.Context, req dat
MarkdownDescription: "SourceMongodbInternalPoc DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_source": schema.StringAttribute{
- Computed: true,
- Description: `The authentication source where the user information is stored.`,
- },
- "connection_string": schema.StringAttribute{
- Computed: true,
- Description: `The connection string of the database that you want to replicate..`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with this username.`,
- },
- "replica_set": schema.StringAttribute{
- Computed: true,
- Description: `The name of the replica set to be replicated.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mongodb-internal-poc",
- ),
- },
- Description: `must be one of ["mongodb-internal-poc"]`,
- },
- "user": schema.StringAttribute{
- Computed: true,
- Description: `The username which is used to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_mongodbinternalpoc_data_source_sdk.go b/internal/provider/source_mongodbinternalpoc_data_source_sdk.go
old mode 100755
new mode 100644
index 6038cd6a4..5a482e1b4
--- a/internal/provider/source_mongodbinternalpoc_data_source_sdk.go
+++ b/internal/provider/source_mongodbinternalpoc_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMongodbInternalPocDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_mongodbinternalpoc_resource.go b/internal/provider/source_mongodbinternalpoc_resource.go
old mode 100755
new mode 100644
index ff1977ff6..9ea8da9ca
--- a/internal/provider/source_mongodbinternalpoc_resource.go
+++ b/internal/provider/source_mongodbinternalpoc_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceMongodbInternalPocResource struct {
// SourceMongodbInternalPocResourceModel describes the resource data model.
type SourceMongodbInternalPocResourceModel struct {
Configuration SourceMongodbInternalPoc `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -55,8 +55,9 @@ func (r *SourceMongodbInternalPocResource) Schema(ctx context.Context, req resou
Required: true,
Attributes: map[string]schema.Attribute{
"auth_source": schema.StringAttribute{
- Optional: true,
- Description: `The authentication source where the user information is stored.`,
+ Optional: true,
+ MarkdownDescription: `Default: "admin"` + "\n" +
+ `The authentication source where the user information is stored.`,
},
"connection_string": schema.StringAttribute{
Optional: true,
@@ -64,34 +65,37 @@ func (r *SourceMongodbInternalPocResource) Schema(ctx context.Context, req resou
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The password associated with this username.`,
},
"replica_set": schema.StringAttribute{
Optional: true,
Description: `The name of the replica set to be replicated.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mongodb-internal-poc",
- ),
- },
- Description: `must be one of ["mongodb-internal-poc"]`,
- },
"user": schema.StringAttribute{
Optional: true,
Description: `The username which is used to access the database.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +159,7 @@ func (r *SourceMongodbInternalPocResource) Create(ctx context.Context, req resou
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMongodbInternalPoc(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +335,5 @@ func (r *SourceMongodbInternalPocResource) Delete(ctx context.Context, req resou
}
func (r *SourceMongodbInternalPocResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_mongodbinternalpoc_resource_sdk.go b/internal/provider/source_mongodbinternalpoc_resource_sdk.go
old mode 100755
new mode 100644
index 9e784063b..0ef07183a
--- a/internal/provider/source_mongodbinternalpoc_resource_sdk.go
+++ b/internal/provider/source_mongodbinternalpoc_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -32,7 +32,6 @@ func (r *SourceMongodbInternalPocResourceModel) ToCreateSDKType() *shared.Source
} else {
replicaSet = nil
}
- sourceType := shared.SourceMongodbInternalPocMongodbInternalPoc(r.Configuration.SourceType.ValueString())
user := new(string)
if !r.Configuration.User.IsUnknown() && !r.Configuration.User.IsNull() {
*user = r.Configuration.User.ValueString()
@@ -44,9 +43,14 @@ func (r *SourceMongodbInternalPocResourceModel) ToCreateSDKType() *shared.Source
ConnectionString: connectionString,
Password: password,
ReplicaSet: replicaSet,
- SourceType: sourceType,
User: user,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -57,6 +61,7 @@ func (r *SourceMongodbInternalPocResourceModel) ToCreateSDKType() *shared.Source
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMongodbInternalPocCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_mongodbv2_data_source.go b/internal/provider/source_mongodbv2_data_source.go
new file mode 100644
index 000000000..101e5dea2
--- /dev/null
+++ b/internal/provider/source_mongodbv2_data_source.go
@@ -0,0 +1,137 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ datasource.DataSource = &SourceMongodbV2DataSource{}
+var _ datasource.DataSourceWithConfigure = &SourceMongodbV2DataSource{}
+
+func NewSourceMongodbV2DataSource() datasource.DataSource {
+ return &SourceMongodbV2DataSource{}
+}
+
+// SourceMongodbV2DataSource is the data source implementation.
+type SourceMongodbV2DataSource struct {
+ client *sdk.SDK
+}
+
+// SourceMongodbV2DataSourceModel describes the data model.
+type SourceMongodbV2DataSourceModel struct {
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+// Metadata returns the data source type name.
+func (r *SourceMongodbV2DataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_mongodb_v2"
+}
+
+// Schema defines the schema for the data source.
+func (r *SourceMongodbV2DataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "SourceMongodbV2 DataSource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.StringAttribute{
+ Computed: true,
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ },
+ "source_id": schema.StringAttribute{
+ Required: true,
+ },
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
+ "workspace_id": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ }
+}
+
+func (r *SourceMongodbV2DataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected DataSource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *SourceMongodbV2DataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data *SourceMongodbV2DataSourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.GetSourceMongodbV2Request{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.GetSourceMongodbV2(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/internal/provider/source_mongodbv2_data_source_sdk.go b/internal/provider/source_mongodbv2_data_source_sdk.go
new file mode 100644
index 000000000..c0699f9b3
--- /dev/null
+++ b/internal/provider/source_mongodbv2_data_source_sdk.go
@@ -0,0 +1,18 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *SourceMongodbV2DataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
diff --git a/internal/provider/source_mongodbv2_resource.go b/internal/provider/source_mongodbv2_resource.go
new file mode 100644
index 000000000..475f893e2
--- /dev/null
+++ b/internal/provider/source_mongodbv2_resource.go
@@ -0,0 +1,413 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ resource.Resource = &SourceMongodbV2Resource{}
+var _ resource.ResourceWithImportState = &SourceMongodbV2Resource{}
+
+func NewSourceMongodbV2Resource() resource.Resource {
+ return &SourceMongodbV2Resource{}
+}
+
+// SourceMongodbV2Resource defines the resource implementation.
+type SourceMongodbV2Resource struct {
+ client *sdk.SDK
+}
+
+// SourceMongodbV2ResourceModel describes the resource data model.
+type SourceMongodbV2ResourceModel struct {
+ Configuration SourceMongodbV2 `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+func (r *SourceMongodbV2Resource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_mongodb_v2"
+}
+
+func (r *SourceMongodbV2Resource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "SourceMongodbV2 Resource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "database_config": schema.SingleNestedAttribute{
+ Required: true,
+ Attributes: map[string]schema.Attribute{
+ "mongo_db_atlas_replica_set": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "additional_properties": schema.StringAttribute{
+ Optional: true,
+ Description: `Parsed as JSON.`,
+ Validators: []validator.String{
+ validators.IsValidJSON(),
+ },
+ },
+ "auth_source": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "admin"` + "\n" +
+ `The authentication source where the user information is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource for more details.`,
+ },
+ "connection_string": schema.StringAttribute{
+ Required: true,
+ Description: `The connection string of the cluster that you want to replicate.`,
+ },
+ "database": schema.StringAttribute{
+ Required: true,
+ Description: `The name of the MongoDB database that contains the collection(s) to replicate.`,
+ },
+ "password": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `The password associated with this username.`,
+ },
+ "username": schema.StringAttribute{
+ Required: true,
+ Description: `The username which is used to access the database.`,
+ },
+ },
+ Description: `MongoDB Atlas-hosted cluster configured as a replica set`,
+ },
+ "self_managed_replica_set": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{
+ "additional_properties": schema.StringAttribute{
+ Optional: true,
+ Description: `Parsed as JSON.`,
+ Validators: []validator.String{
+ validators.IsValidJSON(),
+ },
+ },
+ "auth_source": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `Default: "admin"` + "\n" +
+ `The authentication source where the user information is stored.`,
+ },
+ "connection_string": schema.StringAttribute{
+ Required: true,
+ Description: `The connection string of the cluster that you want to replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string for more information.`,
+ },
+ "database": schema.StringAttribute{
+ Required: true,
+ Description: `The name of the MongoDB database that contains the collection(s) to replicate.`,
+ },
+ "password": schema.StringAttribute{
+ Optional: true,
+ Sensitive: true,
+ Description: `The password associated with this username.`,
+ },
+ "username": schema.StringAttribute{
+ Optional: true,
+ Description: `The username which is used to access the database.`,
+ },
+ },
+ Description: `MongoDB self-hosted cluster configured as a replica set`,
+ },
+ },
+ Description: `Configures the MongoDB cluster type.`,
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "discover_sample_size": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 10000` + "\n" +
+ `The maximum number of documents to sample when attempting to discover the unique fields for a collection.`,
+ },
+ "initial_waiting_seconds": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 300` + "\n" +
+ `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds.`,
+ },
+ "queue_size": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 10000` + "\n" +
+ `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
+ },
+ },
+ },
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
+ "name": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
+ },
+ "secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
+ },
+ "source_id": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ },
+ "workspace_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ speakeasy_stringplanmodifier.SuppressDiff(),
+ },
+ Required: true,
+ },
+ },
+ }
+}
+
+func (r *SourceMongodbV2Resource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected Resource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *SourceMongodbV2Resource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data *SourceMongodbV2ResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ request := data.ToCreateSDKType()
+ res, err := r.client.Sources.CreateSourceMongodbV2(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromCreateResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *SourceMongodbV2Resource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data *SourceMongodbV2ResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.GetSourceMongodbV2Request{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.GetSourceMongodbV2(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *SourceMongodbV2Resource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data *SourceMongodbV2ResourceModel
+ merge(ctx, req, resp, &data)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceMongodbV2PutRequest := data.ToUpdateSDKType()
+ sourceID := data.SourceID.ValueString()
+ request := operations.PutSourceMongodbV2Request{
+ SourceMongodbV2PutRequest: sourceMongodbV2PutRequest,
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.PutSourceMongodbV2(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ sourceId1 := data.SourceID.ValueString()
+ getRequest := operations.GetSourceMongodbV2Request{
+ SourceID: sourceId1,
+ }
+ getResponse, err := r.client.Sources.GetSourceMongodbV2(ctx, getRequest)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if getResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse))
+ return
+ }
+ if getResponse.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse))
+ return
+ }
+ if getResponse.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(getResponse.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
+
+func (r *SourceMongodbV2Resource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data *SourceMongodbV2ResourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.DeleteSourceMongodbV2Request{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.DeleteSourceMongodbV2(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if fmt.Sprintf("%v", res.StatusCode)[0] != '2' {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+
+}
+
+func (r *SourceMongodbV2Resource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
+}
diff --git a/internal/provider/source_mongodbv2_resource_sdk.go b/internal/provider/source_mongodbv2_resource_sdk.go
new file mode 100644
index 000000000..093eb4233
--- /dev/null
+++ b/internal/provider/source_mongodbv2_resource_sdk.go
@@ -0,0 +1,256 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *SourceMongodbV2ResourceModel) ToCreateSDKType() *shared.SourceMongodbV2CreateRequest {
+ var databaseConfig shared.SourceMongodbV2ClusterType
+ var sourceMongodbV2MongoDBAtlasReplicaSet *shared.SourceMongodbV2MongoDBAtlasReplicaSet
+ if r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet != nil {
+ var additionalProperties interface{}
+ if !r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AdditionalProperties.IsUnknown() && !r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AdditionalProperties.ValueString()), &additionalProperties)
+ }
+ authSource := new(string)
+ if !r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AuthSource.IsUnknown() && !r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AuthSource.IsNull() {
+ *authSource = r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AuthSource.ValueString()
+ } else {
+ authSource = nil
+ }
+ connectionString := r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.ConnectionString.ValueString()
+ database := r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.Database.ValueString()
+ password := r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.Password.ValueString()
+ username := r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.Username.ValueString()
+ sourceMongodbV2MongoDBAtlasReplicaSet = &shared.SourceMongodbV2MongoDBAtlasReplicaSet{
+ AdditionalProperties: additionalProperties,
+ AuthSource: authSource,
+ ConnectionString: connectionString,
+ Database: database,
+ Password: password,
+ Username: username,
+ }
+ }
+ if sourceMongodbV2MongoDBAtlasReplicaSet != nil {
+ databaseConfig = shared.SourceMongodbV2ClusterType{
+ SourceMongodbV2MongoDBAtlasReplicaSet: sourceMongodbV2MongoDBAtlasReplicaSet,
+ }
+ }
+ var sourceMongodbV2SelfManagedReplicaSet *shared.SourceMongodbV2SelfManagedReplicaSet
+ if r.Configuration.DatabaseConfig.SelfManagedReplicaSet != nil {
+ var additionalProperties1 interface{}
+ if !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AdditionalProperties.IsUnknown() && !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AdditionalProperties.ValueString()), &additionalProperties1)
+ }
+ authSource1 := new(string)
+ if !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AuthSource.IsUnknown() && !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AuthSource.IsNull() {
+ *authSource1 = r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AuthSource.ValueString()
+ } else {
+ authSource1 = nil
+ }
+ connectionString1 := r.Configuration.DatabaseConfig.SelfManagedReplicaSet.ConnectionString.ValueString()
+ database1 := r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Database.ValueString()
+ password1 := new(string)
+ if !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Password.IsUnknown() && !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Password.IsNull() {
+ *password1 = r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Password.ValueString()
+ } else {
+ password1 = nil
+ }
+ username1 := new(string)
+ if !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Username.IsUnknown() && !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Username.IsNull() {
+ *username1 = r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Username.ValueString()
+ } else {
+ username1 = nil
+ }
+ sourceMongodbV2SelfManagedReplicaSet = &shared.SourceMongodbV2SelfManagedReplicaSet{
+ AdditionalProperties: additionalProperties1,
+ AuthSource: authSource1,
+ ConnectionString: connectionString1,
+ Database: database1,
+ Password: password1,
+ Username: username1,
+ }
+ }
+ if sourceMongodbV2SelfManagedReplicaSet != nil {
+ databaseConfig = shared.SourceMongodbV2ClusterType{
+ SourceMongodbV2SelfManagedReplicaSet: sourceMongodbV2SelfManagedReplicaSet,
+ }
+ }
+ discoverSampleSize := new(int64)
+ if !r.Configuration.DiscoverSampleSize.IsUnknown() && !r.Configuration.DiscoverSampleSize.IsNull() {
+ *discoverSampleSize = r.Configuration.DiscoverSampleSize.ValueInt64()
+ } else {
+ discoverSampleSize = nil
+ }
+ initialWaitingSeconds := new(int64)
+ if !r.Configuration.InitialWaitingSeconds.IsUnknown() && !r.Configuration.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.InitialWaitingSeconds.ValueInt64()
+ } else {
+ initialWaitingSeconds = nil
+ }
+ queueSize := new(int64)
+ if !r.Configuration.QueueSize.IsUnknown() && !r.Configuration.QueueSize.IsNull() {
+ *queueSize = r.Configuration.QueueSize.ValueInt64()
+ } else {
+ queueSize = nil
+ }
+ configuration := shared.SourceMongodbV2{
+ DatabaseConfig: databaseConfig,
+ DiscoverSampleSize: discoverSampleSize,
+ InitialWaitingSeconds: initialWaitingSeconds,
+ QueueSize: queueSize,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name := r.Name.ValueString()
+ secretID := new(string)
+ if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
+ *secretID = r.SecretID.ValueString()
+ } else {
+ secretID = nil
+ }
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceMongodbV2CreateRequest{
+ Configuration: configuration,
+ DefinitionID: definitionID,
+ Name: name,
+ SecretID: secretID,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceMongodbV2ResourceModel) ToGetSDKType() *shared.SourceMongodbV2CreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceMongodbV2ResourceModel) ToUpdateSDKType() *shared.SourceMongodbV2PutRequest {
+ var databaseConfig shared.ClusterType
+ var mongoDBAtlasReplicaSet *shared.MongoDBAtlasReplicaSet
+ if r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet != nil {
+ var additionalProperties interface{}
+ if !r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AdditionalProperties.IsUnknown() && !r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AdditionalProperties.ValueString()), &additionalProperties)
+ }
+ authSource := new(string)
+ if !r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AuthSource.IsUnknown() && !r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AuthSource.IsNull() {
+ *authSource = r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.AuthSource.ValueString()
+ } else {
+ authSource = nil
+ }
+ connectionString := r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.ConnectionString.ValueString()
+ database := r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.Database.ValueString()
+ password := r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.Password.ValueString()
+ username := r.Configuration.DatabaseConfig.MongoDBAtlasReplicaSet.Username.ValueString()
+ mongoDBAtlasReplicaSet = &shared.MongoDBAtlasReplicaSet{
+ AdditionalProperties: additionalProperties,
+ AuthSource: authSource,
+ ConnectionString: connectionString,
+ Database: database,
+ Password: password,
+ Username: username,
+ }
+ }
+ if mongoDBAtlasReplicaSet != nil {
+ databaseConfig = shared.ClusterType{
+ MongoDBAtlasReplicaSet: mongoDBAtlasReplicaSet,
+ }
+ }
+ var selfManagedReplicaSet *shared.SelfManagedReplicaSet
+ if r.Configuration.DatabaseConfig.SelfManagedReplicaSet != nil {
+ var additionalProperties1 interface{}
+ if !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AdditionalProperties.IsUnknown() && !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AdditionalProperties.ValueString()), &additionalProperties1)
+ }
+ authSource1 := new(string)
+ if !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AuthSource.IsUnknown() && !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AuthSource.IsNull() {
+ *authSource1 = r.Configuration.DatabaseConfig.SelfManagedReplicaSet.AuthSource.ValueString()
+ } else {
+ authSource1 = nil
+ }
+ connectionString1 := r.Configuration.DatabaseConfig.SelfManagedReplicaSet.ConnectionString.ValueString()
+ database1 := r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Database.ValueString()
+ password1 := new(string)
+ if !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Password.IsUnknown() && !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Password.IsNull() {
+ *password1 = r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Password.ValueString()
+ } else {
+ password1 = nil
+ }
+ username1 := new(string)
+ if !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Username.IsUnknown() && !r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Username.IsNull() {
+ *username1 = r.Configuration.DatabaseConfig.SelfManagedReplicaSet.Username.ValueString()
+ } else {
+ username1 = nil
+ }
+ selfManagedReplicaSet = &shared.SelfManagedReplicaSet{
+ AdditionalProperties: additionalProperties1,
+ AuthSource: authSource1,
+ ConnectionString: connectionString1,
+ Database: database1,
+ Password: password1,
+ Username: username1,
+ }
+ }
+ if selfManagedReplicaSet != nil {
+ databaseConfig = shared.ClusterType{
+ SelfManagedReplicaSet: selfManagedReplicaSet,
+ }
+ }
+ discoverSampleSize := new(int64)
+ if !r.Configuration.DiscoverSampleSize.IsUnknown() && !r.Configuration.DiscoverSampleSize.IsNull() {
+ *discoverSampleSize = r.Configuration.DiscoverSampleSize.ValueInt64()
+ } else {
+ discoverSampleSize = nil
+ }
+ initialWaitingSeconds := new(int64)
+ if !r.Configuration.InitialWaitingSeconds.IsUnknown() && !r.Configuration.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.InitialWaitingSeconds.ValueInt64()
+ } else {
+ initialWaitingSeconds = nil
+ }
+ queueSize := new(int64)
+ if !r.Configuration.QueueSize.IsUnknown() && !r.Configuration.QueueSize.IsNull() {
+ *queueSize = r.Configuration.QueueSize.ValueInt64()
+ } else {
+ queueSize = nil
+ }
+ configuration := shared.SourceMongodbV2Update{
+ DatabaseConfig: databaseConfig,
+ DiscoverSampleSize: discoverSampleSize,
+ InitialWaitingSeconds: initialWaitingSeconds,
+ QueueSize: queueSize,
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceMongodbV2PutRequest{
+ Configuration: configuration,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceMongodbV2ResourceModel) ToDeleteSDKType() *shared.SourceMongodbV2CreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceMongodbV2ResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
+
+func (r *SourceMongodbV2ResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
+ r.RefreshFromGetResponse(resp)
+}
diff --git a/internal/provider/source_mssql_data_source.go b/internal/provider/source_mssql_data_source.go
old mode 100755
new mode 100644
index fcf445595..211643065
--- a/internal/provider/source_mssql_data_source.go
+++ b/internal/provider/source_mssql_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceMssqlDataSource struct {
// SourceMssqlDataSourceModel describes the data model.
type SourceMssqlDataSourceModel struct {
- Configuration SourceMssql `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,426 +47,20 @@ func (r *SourceMssqlDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceMssql DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `The name of the database.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The hostname of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The port of the database.`,
- },
- "replication_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_mssql_update_method_read_changes_using_change_data_capture_cdc": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "data_to_sync": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Existing and New",
- "New Changes Only",
- ),
- },
- MarkdownDescription: `must be one of ["Existing and New", "New Changes Only"]` + "\n" +
- `What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.`,
- },
- "initial_waiting_seconds": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "snapshot_isolation": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Snapshot",
- "Read Committed",
- ),
- },
- MarkdownDescription: `must be one of ["Snapshot", "Read Committed"]` + "\n" +
- `Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.`,
- },
- "source_mssql_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "STANDARD",
- ),
- },
- Description: `must be one of ["STANDARD"]`,
- },
- },
- Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
- },
- "source_mssql_update_update_method_read_changes_using_change_data_capture_cdc": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "data_to_sync": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Existing and New",
- "New Changes Only",
- ),
- },
- MarkdownDescription: `must be one of ["Existing and New", "New Changes Only"]` + "\n" +
- `What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.`,
- },
- "initial_waiting_seconds": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "snapshot_isolation": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Snapshot",
- "Read Committed",
- ),
- },
- MarkdownDescription: `must be one of ["Snapshot", "Read Committed"]` + "\n" +
- `Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.`,
- },
- "source_mssql_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "STANDARD",
- ),
- },
- Description: `must be one of ["STANDARD"]`,
- },
- },
- Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Configures how data is extracted from the database.`,
- },
- "schemas": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The list of schemas to sync from. Defaults to user. Case sensitive.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mssql",
- ),
- },
- Description: `must be one of ["mssql"]`,
- },
- "ssl_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_mssql_ssl_method_encrypted_trust_server_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssl_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_trust_server_certificate",
- ),
- },
- Description: `must be one of ["encrypted_trust_server_certificate"]`,
- },
- },
- Description: `Use the certificate provided by the server without verification. (For testing purposes only!)`,
- },
- "source_mssql_ssl_method_encrypted_verify_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host_name_in_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Specifies the host name of the server. The value of this property must match the subject property of the certificate.`,
- },
- "ssl_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
- },
- Description: `Verify and use the certificate provided by the server.`,
- },
- "source_mssql_update_ssl_method_encrypted_trust_server_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssl_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_trust_server_certificate",
- ),
- },
- Description: `must be one of ["encrypted_trust_server_certificate"]`,
- },
- },
- Description: `Use the certificate provided by the server without verification. (For testing purposes only!)`,
- },
- "source_mssql_update_ssl_method_encrypted_verify_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "host_name_in_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Specifies the host name of the server. The value of this property must match the subject property of the certificate.`,
- },
- "ssl_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
- },
- Description: `Verify and use the certificate provided by the server.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The encryption method which is used when communicating with the database.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_mssql_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mssql_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mssql_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mssql_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mssql_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mssql_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `The username which is used to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_mssql_data_source_sdk.go b/internal/provider/source_mssql_data_source_sdk.go
old mode 100755
new mode 100644
index 3beeb3ed5..294538716
--- a/internal/provider/source_mssql_data_source_sdk.go
+++ b/internal/provider/source_mssql_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMssqlDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_mssql_resource.go b/internal/provider/source_mssql_resource.go
old mode 100755
new mode 100644
index 81820013d..7449d0641
--- a/internal/provider/source_mssql_resource.go
+++ b/internal/provider/source_mssql_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceMssqlResource struct {
// SourceMssqlResourceModel describes the resource data model.
type SourceMssqlResourceModel struct {
Configuration SourceMssql `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -69,6 +71,7 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The password associated with the username.`,
},
"port": schema.Int64Attribute{
@@ -78,334 +81,98 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq
"replication_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_mssql_update_method_read_changes_using_change_data_capture_cdc": schema.SingleNestedAttribute{
+ "read_changes_using_change_data_capture_cdc": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"data_to_sync": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Existing and New",
- "New Changes Only",
- ),
- },
- MarkdownDescription: `must be one of ["Existing and New", "New Changes Only"]` + "\n" +
+ MarkdownDescription: `must be one of ["Existing and New", "New Changes Only"]; Default: "Existing and New"` + "\n" +
`What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.`,
- },
- "initial_waiting_seconds": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "snapshot_isolation": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Snapshot",
- "Read Committed",
- ),
- },
- MarkdownDescription: `must be one of ["Snapshot", "Read Committed"]` + "\n" +
- `Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.`,
- },
- "source_mssql_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "STANDARD",
- ),
- },
- Description: `must be one of ["STANDARD"]`,
- },
- },
- Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
- },
- "source_mssql_update_update_method_read_changes_using_change_data_capture_cdc": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "data_to_sync": schema.StringAttribute{
- Optional: true,
Validators: []validator.String{
stringvalidator.OneOf(
"Existing and New",
"New Changes Only",
),
},
- MarkdownDescription: `must be one of ["Existing and New", "New Changes Only"]` + "\n" +
- `What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.`,
},
"initial_waiting_seconds": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 300` + "\n" +
+ `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
},
"snapshot_isolation": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["Snapshot", "Read Committed"]; Default: "Snapshot"` + "\n" +
+ `Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.`,
Validators: []validator.String{
stringvalidator.OneOf(
"Snapshot",
"Read Committed",
),
},
- MarkdownDescription: `must be one of ["Snapshot", "Read Committed"]` + "\n" +
- `Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.`,
},
},
Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.`,
},
- "source_mssql_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "STANDARD",
- ),
- },
- Description: `must be one of ["STANDARD"]`,
- },
- },
+ "scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
},
},
+ Description: `Configures how data is extracted from the database.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Configures how data is extracted from the database.`,
},
"schemas": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
Description: `The list of schemas to sync from. Defaults to user. Case sensitive.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mssql",
- ),
- },
- Description: `must be one of ["mssql"]`,
- },
"ssl_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_mssql_ssl_method_encrypted_trust_server_certificate": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssl_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_trust_server_certificate",
- ),
- },
- Description: `must be one of ["encrypted_trust_server_certificate"]`,
- },
- },
- Description: `Use the certificate provided by the server without verification. (For testing purposes only!)`,
- },
- "source_mssql_ssl_method_encrypted_verify_certificate": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "host_name_in_certificate": schema.StringAttribute{
- Optional: true,
- Description: `Specifies the host name of the server. The value of this property must match the subject property of the certificate.`,
- },
- "ssl_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
- },
- Description: `Verify and use the certificate provided by the server.`,
- },
- "source_mssql_update_ssl_method_encrypted_trust_server_certificate": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssl_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_trust_server_certificate",
- ),
- },
- Description: `must be one of ["encrypted_trust_server_certificate"]`,
- },
- },
+ "encrypted_trust_server_certificate": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Use the certificate provided by the server without verification. (For testing purposes only!)`,
},
- "source_mssql_update_ssl_method_encrypted_verify_certificate": schema.SingleNestedAttribute{
+ "encrypted_verify_certificate": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"host_name_in_certificate": schema.StringAttribute{
Optional: true,
Description: `Specifies the host name of the server. The value of this property must match the subject property of the certificate.`,
},
- "ssl_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
},
Description: `Verify and use the certificate provided by the server.`,
},
},
+ Description: `The encryption method which is used when communicating with the database.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The encryption method which is used when communicating with the database.`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_mssql_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_mssql_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mssql_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mssql_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mssql_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -413,35 +180,28 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_mssql_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -451,10 +211,10 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -462,13 +222,24 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -532,7 +303,7 @@ func (r *SourceMssqlResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMssql(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -708,5 +479,5 @@ func (r *SourceMssqlResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceMssqlResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_mssql_resource_sdk.go b/internal/provider/source_mssql_resource_sdk.go
old mode 100755
new mode 100644
index 4dfb51698..53f7eb3d1
--- a/internal/provider/source_mssql_resource_sdk.go
+++ b/internal/provider/source_mssql_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -25,49 +25,44 @@ func (r *SourceMssqlResourceModel) ToCreateSDKType() *shared.SourceMssqlCreateRe
port := r.Configuration.Port.ValueInt64()
var replicationMethod *shared.SourceMssqlUpdateMethod
if r.Configuration.ReplicationMethod != nil {
- var sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC *shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC
- if r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil {
- dataToSync := new(shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync)
- if !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.IsNull() {
- *dataToSync = shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync(r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.ValueString())
+ var sourceMssqlReadChangesUsingChangeDataCaptureCDC *shared.SourceMssqlReadChangesUsingChangeDataCaptureCDC
+ if r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC != nil {
+ dataToSync := new(shared.SourceMssqlDataToSync)
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.DataToSync.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.DataToSync.IsNull() {
+ *dataToSync = shared.SourceMssqlDataToSync(r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.DataToSync.ValueString())
} else {
dataToSync = nil
}
initialWaitingSeconds := new(int64)
- if !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsNull() {
- *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.ValueInt64()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.ValueInt64()
} else {
initialWaitingSeconds = nil
}
- method := shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.Method.ValueString())
- snapshotIsolation := new(shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel)
- if !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsNull() {
- *snapshotIsolation = shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel(r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.ValueString())
+ snapshotIsolation := new(shared.SourceMssqlInitialSnapshotIsolationLevel)
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsNull() {
+ *snapshotIsolation = shared.SourceMssqlInitialSnapshotIsolationLevel(r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.ValueString())
} else {
snapshotIsolation = nil
}
- sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC = &shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC{
+ sourceMssqlReadChangesUsingChangeDataCaptureCDC = &shared.SourceMssqlReadChangesUsingChangeDataCaptureCDC{
DataToSync: dataToSync,
InitialWaitingSeconds: initialWaitingSeconds,
- Method: method,
SnapshotIsolation: snapshotIsolation,
}
}
- if sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil {
+ if sourceMssqlReadChangesUsingChangeDataCaptureCDC != nil {
replicationMethod = &shared.SourceMssqlUpdateMethod{
- SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC: sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC,
+ SourceMssqlReadChangesUsingChangeDataCaptureCDC: sourceMssqlReadChangesUsingChangeDataCaptureCDC,
}
}
- var sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor *shared.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor
- if r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor != nil {
- method1 := shared.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString())
- sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor{
- Method: method1,
- }
+ var sourceMssqlScanChangesWithUserDefinedCursor *shared.SourceMssqlScanChangesWithUserDefinedCursor
+ if r.Configuration.ReplicationMethod.ScanChangesWithUserDefinedCursor != nil {
+ sourceMssqlScanChangesWithUserDefinedCursor = &shared.SourceMssqlScanChangesWithUserDefinedCursor{}
}
- if sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor != nil {
+ if sourceMssqlScanChangesWithUserDefinedCursor != nil {
replicationMethod = &shared.SourceMssqlUpdateMethod{
- SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor: sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor,
+ SourceMssqlScanChangesWithUserDefinedCursor: sourceMssqlScanChangesWithUserDefinedCursor,
}
}
}
@@ -75,93 +70,90 @@ func (r *SourceMssqlResourceModel) ToCreateSDKType() *shared.SourceMssqlCreateRe
for _, schemasItem := range r.Configuration.Schemas {
schemas = append(schemas, schemasItem.ValueString())
}
- sourceType := shared.SourceMssqlMssql(r.Configuration.SourceType.ValueString())
var sslMethod *shared.SourceMssqlSSLMethod
if r.Configuration.SslMethod != nil {
- var sourceMssqlSSLMethodEncryptedTrustServerCertificate *shared.SourceMssqlSSLMethodEncryptedTrustServerCertificate
- if r.Configuration.SslMethod.SourceMssqlSSLMethodEncryptedTrustServerCertificate != nil {
- sslMethod1 := shared.SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod(r.Configuration.SslMethod.SourceMssqlSSLMethodEncryptedTrustServerCertificate.SslMethod.ValueString())
- sourceMssqlSSLMethodEncryptedTrustServerCertificate = &shared.SourceMssqlSSLMethodEncryptedTrustServerCertificate{
- SslMethod: sslMethod1,
- }
+ var sourceMssqlEncryptedTrustServerCertificate *shared.SourceMssqlEncryptedTrustServerCertificate
+ if r.Configuration.SslMethod.EncryptedTrustServerCertificate != nil {
+ sourceMssqlEncryptedTrustServerCertificate = &shared.SourceMssqlEncryptedTrustServerCertificate{}
}
- if sourceMssqlSSLMethodEncryptedTrustServerCertificate != nil {
+ if sourceMssqlEncryptedTrustServerCertificate != nil {
sslMethod = &shared.SourceMssqlSSLMethod{
- SourceMssqlSSLMethodEncryptedTrustServerCertificate: sourceMssqlSSLMethodEncryptedTrustServerCertificate,
+ SourceMssqlEncryptedTrustServerCertificate: sourceMssqlEncryptedTrustServerCertificate,
}
}
- var sourceMssqlSSLMethodEncryptedVerifyCertificate *shared.SourceMssqlSSLMethodEncryptedVerifyCertificate
- if r.Configuration.SslMethod.SourceMssqlSSLMethodEncryptedVerifyCertificate != nil {
+ var sourceMssqlEncryptedVerifyCertificate *shared.SourceMssqlEncryptedVerifyCertificate
+ if r.Configuration.SslMethod.EncryptedVerifyCertificate != nil {
hostNameInCertificate := new(string)
- if !r.Configuration.SslMethod.SourceMssqlSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.IsUnknown() && !r.Configuration.SslMethod.SourceMssqlSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.IsNull() {
- *hostNameInCertificate = r.Configuration.SslMethod.SourceMssqlSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.ValueString()
+ if !r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.IsUnknown() && !r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.IsNull() {
+ *hostNameInCertificate = r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.ValueString()
} else {
hostNameInCertificate = nil
}
- sslMethod2 := shared.SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethod(r.Configuration.SslMethod.SourceMssqlSSLMethodEncryptedVerifyCertificate.SslMethod.ValueString())
- sourceMssqlSSLMethodEncryptedVerifyCertificate = &shared.SourceMssqlSSLMethodEncryptedVerifyCertificate{
+ sourceMssqlEncryptedVerifyCertificate = &shared.SourceMssqlEncryptedVerifyCertificate{
HostNameInCertificate: hostNameInCertificate,
- SslMethod: sslMethod2,
}
}
- if sourceMssqlSSLMethodEncryptedVerifyCertificate != nil {
+ if sourceMssqlEncryptedVerifyCertificate != nil {
sslMethod = &shared.SourceMssqlSSLMethod{
- SourceMssqlSSLMethodEncryptedVerifyCertificate: sourceMssqlSSLMethodEncryptedVerifyCertificate,
+ SourceMssqlEncryptedVerifyCertificate: sourceMssqlEncryptedVerifyCertificate,
}
}
}
var tunnelMethod *shared.SourceMssqlSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceMssqlSSHTunnelMethodNoTunnel *shared.SourceMssqlSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceMssqlSSHTunnelMethodNoTunnel = &shared.SourceMssqlSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceMssqlNoTunnel *shared.SourceMssqlNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceMssqlNoTunnel = &shared.SourceMssqlNoTunnel{}
}
- if sourceMssqlSSHTunnelMethodNoTunnel != nil {
+ if sourceMssqlNoTunnel != nil {
tunnelMethod = &shared.SourceMssqlSSHTunnelMethod{
- SourceMssqlSSHTunnelMethodNoTunnel: sourceMssqlSSHTunnelMethodNoTunnel,
+ SourceMssqlNoTunnel: sourceMssqlNoTunnel,
}
}
- var sourceMssqlSSHTunnelMethodSSHKeyAuthentication *shared.SourceMssqlSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceMssqlSSHTunnelMethodSSHKeyAuthentication = &shared.SourceMssqlSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceMssqlSSHKeyAuthentication *shared.SourceMssqlSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceMssqlSSHKeyAuthentication = &shared.SourceMssqlSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceMssqlSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceMssqlSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceMssqlSSHTunnelMethod{
- SourceMssqlSSHTunnelMethodSSHKeyAuthentication: sourceMssqlSSHTunnelMethodSSHKeyAuthentication,
+ SourceMssqlSSHKeyAuthentication: sourceMssqlSSHKeyAuthentication,
}
}
- var sourceMssqlSSHTunnelMethodPasswordAuthentication *shared.SourceMssqlSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceMssqlSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceMssqlSSHTunnelMethodPasswordAuthentication = &shared.SourceMssqlSSHTunnelMethodPasswordAuthentication{
+ var sourceMssqlPasswordAuthentication *shared.SourceMssqlPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceMssqlPasswordAuthentication = &shared.SourceMssqlPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceMssqlSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceMssqlPasswordAuthentication != nil {
tunnelMethod = &shared.SourceMssqlSSHTunnelMethod{
- SourceMssqlSSHTunnelMethodPasswordAuthentication: sourceMssqlSSHTunnelMethodPasswordAuthentication,
+ SourceMssqlPasswordAuthentication: sourceMssqlPasswordAuthentication,
}
}
}
@@ -174,11 +166,16 @@ func (r *SourceMssqlResourceModel) ToCreateSDKType() *shared.SourceMssqlCreateRe
Port: port,
ReplicationMethod: replicationMethod,
Schemas: schemas,
- SourceType: sourceType,
SslMethod: sslMethod,
TunnelMethod: tunnelMethod,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -189,6 +186,7 @@ func (r *SourceMssqlResourceModel) ToCreateSDKType() *shared.SourceMssqlCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMssqlCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -217,51 +215,46 @@ func (r *SourceMssqlResourceModel) ToUpdateSDKType() *shared.SourceMssqlPutReque
password = nil
}
port := r.Configuration.Port.ValueInt64()
- var replicationMethod *shared.SourceMssqlUpdateUpdateMethod
+ var replicationMethod *shared.UpdateMethod
if r.Configuration.ReplicationMethod != nil {
- var sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC *shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC
- if r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil {
- dataToSync := new(shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync)
- if !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.IsNull() {
- *dataToSync = shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync(r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.ValueString())
+ var readChangesUsingChangeDataCaptureCDC *shared.ReadChangesUsingChangeDataCaptureCDC
+ if r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC != nil {
+ dataToSync := new(shared.DataToSync)
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.DataToSync.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.DataToSync.IsNull() {
+ *dataToSync = shared.DataToSync(r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.DataToSync.ValueString())
} else {
dataToSync = nil
}
initialWaitingSeconds := new(int64)
- if !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsNull() {
- *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.ValueInt64()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.ValueInt64()
} else {
initialWaitingSeconds = nil
}
- method := shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.Method.ValueString())
- snapshotIsolation := new(shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel)
- if !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsNull() {
- *snapshotIsolation = shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel(r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.ValueString())
+ snapshotIsolation := new(shared.InitialSnapshotIsolationLevel)
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsNull() {
+ *snapshotIsolation = shared.InitialSnapshotIsolationLevel(r.Configuration.ReplicationMethod.ReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.ValueString())
} else {
snapshotIsolation = nil
}
- sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC = &shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC{
+ readChangesUsingChangeDataCaptureCDC = &shared.ReadChangesUsingChangeDataCaptureCDC{
DataToSync: dataToSync,
InitialWaitingSeconds: initialWaitingSeconds,
- Method: method,
SnapshotIsolation: snapshotIsolation,
}
}
- if sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil {
- replicationMethod = &shared.SourceMssqlUpdateUpdateMethod{
- SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC: sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC,
+ if readChangesUsingChangeDataCaptureCDC != nil {
+ replicationMethod = &shared.UpdateMethod{
+ ReadChangesUsingChangeDataCaptureCDC: readChangesUsingChangeDataCaptureCDC,
}
}
- var sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *shared.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
- if r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil {
- method1 := shared.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString())
- sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor{
- Method: method1,
- }
+ var scanChangesWithUserDefinedCursor *shared.ScanChangesWithUserDefinedCursor
+ if r.Configuration.ReplicationMethod.ScanChangesWithUserDefinedCursor != nil {
+ scanChangesWithUserDefinedCursor = &shared.ScanChangesWithUserDefinedCursor{}
}
- if sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil {
- replicationMethod = &shared.SourceMssqlUpdateUpdateMethod{
- SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor: sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor,
+ if scanChangesWithUserDefinedCursor != nil {
+ replicationMethod = &shared.UpdateMethod{
+ ScanChangesWithUserDefinedCursor: scanChangesWithUserDefinedCursor,
}
}
}
@@ -271,90 +264,88 @@ func (r *SourceMssqlResourceModel) ToUpdateSDKType() *shared.SourceMssqlPutReque
}
var sslMethod *shared.SourceMssqlUpdateSSLMethod
if r.Configuration.SslMethod != nil {
- var sourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate *shared.SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate
- if r.Configuration.SslMethod.SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate != nil {
- sslMethod1 := shared.SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod(r.Configuration.SslMethod.SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate.SslMethod.ValueString())
- sourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate = &shared.SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate{
- SslMethod: sslMethod1,
- }
+ var sourceMssqlUpdateEncryptedTrustServerCertificate *shared.SourceMssqlUpdateEncryptedTrustServerCertificate
+ if r.Configuration.SslMethod.EncryptedTrustServerCertificate != nil {
+ sourceMssqlUpdateEncryptedTrustServerCertificate = &shared.SourceMssqlUpdateEncryptedTrustServerCertificate{}
}
- if sourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate != nil {
+ if sourceMssqlUpdateEncryptedTrustServerCertificate != nil {
sslMethod = &shared.SourceMssqlUpdateSSLMethod{
- SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate: sourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate,
+ SourceMssqlUpdateEncryptedTrustServerCertificate: sourceMssqlUpdateEncryptedTrustServerCertificate,
}
}
- var sourceMssqlUpdateSSLMethodEncryptedVerifyCertificate *shared.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate
- if r.Configuration.SslMethod.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate != nil {
+ var sourceMssqlUpdateEncryptedVerifyCertificate *shared.SourceMssqlUpdateEncryptedVerifyCertificate
+ if r.Configuration.SslMethod.EncryptedVerifyCertificate != nil {
hostNameInCertificate := new(string)
- if !r.Configuration.SslMethod.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.IsUnknown() && !r.Configuration.SslMethod.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.IsNull() {
- *hostNameInCertificate = r.Configuration.SslMethod.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate.HostNameInCertificate.ValueString()
+ if !r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.IsUnknown() && !r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.IsNull() {
+ *hostNameInCertificate = r.Configuration.SslMethod.EncryptedVerifyCertificate.HostNameInCertificate.ValueString()
} else {
hostNameInCertificate = nil
}
- sslMethod2 := shared.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod(r.Configuration.SslMethod.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate.SslMethod.ValueString())
- sourceMssqlUpdateSSLMethodEncryptedVerifyCertificate = &shared.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate{
+ sourceMssqlUpdateEncryptedVerifyCertificate = &shared.SourceMssqlUpdateEncryptedVerifyCertificate{
HostNameInCertificate: hostNameInCertificate,
- SslMethod: sslMethod2,
}
}
- if sourceMssqlUpdateSSLMethodEncryptedVerifyCertificate != nil {
+ if sourceMssqlUpdateEncryptedVerifyCertificate != nil {
sslMethod = &shared.SourceMssqlUpdateSSLMethod{
- SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate: sourceMssqlUpdateSSLMethodEncryptedVerifyCertificate,
+ SourceMssqlUpdateEncryptedVerifyCertificate: sourceMssqlUpdateEncryptedVerifyCertificate,
}
}
}
var tunnelMethod *shared.SourceMssqlUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceMssqlUpdateSSHTunnelMethodNoTunnel *shared.SourceMssqlUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceMssqlUpdateSSHTunnelMethodNoTunnel = &shared.SourceMssqlUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceMssqlUpdateNoTunnel *shared.SourceMssqlUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceMssqlUpdateNoTunnel = &shared.SourceMssqlUpdateNoTunnel{}
}
- if sourceMssqlUpdateSSHTunnelMethodNoTunnel != nil {
+ if sourceMssqlUpdateNoTunnel != nil {
tunnelMethod = &shared.SourceMssqlUpdateSSHTunnelMethod{
- SourceMssqlUpdateSSHTunnelMethodNoTunnel: sourceMssqlUpdateSSHTunnelMethodNoTunnel,
+ SourceMssqlUpdateNoTunnel: sourceMssqlUpdateNoTunnel,
}
}
- var sourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication *shared.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceMssqlUpdateSSHKeyAuthentication *shared.SourceMssqlUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceMssqlUpdateSSHKeyAuthentication = &shared.SourceMssqlUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceMssqlUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceMssqlUpdateSSHTunnelMethod{
- SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication: sourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication,
+ SourceMssqlUpdateSSHKeyAuthentication: sourceMssqlUpdateSSHKeyAuthentication,
}
}
- var sourceMssqlUpdateSSHTunnelMethodPasswordAuthentication *shared.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceMssqlUpdateSSHTunnelMethodPasswordAuthentication = &shared.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication{
+ var sourceMssqlUpdatePasswordAuthentication *shared.SourceMssqlUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceMssqlUpdatePasswordAuthentication = &shared.SourceMssqlUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceMssqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceMssqlUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.SourceMssqlUpdateSSHTunnelMethod{
- SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication: sourceMssqlUpdateSSHTunnelMethodPasswordAuthentication,
+ SourceMssqlUpdatePasswordAuthentication: sourceMssqlUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/source_myhours_data_source.go b/internal/provider/source_myhours_data_source.go
old mode 100755
new mode 100644
index 3d371a099..7f0c37238
--- a/internal/provider/source_myhours_data_source.go
+++ b/internal/provider/source_myhours_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceMyHoursDataSource struct {
// SourceMyHoursDataSourceModel describes the data model.
type SourceMyHoursDataSourceModel struct {
- Configuration SourceMyHours `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,46 +47,20 @@ func (r *SourceMyHoursDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceMyHours DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "email": schema.StringAttribute{
- Computed: true,
- Description: `Your My Hours username`,
- },
- "logs_batch_size": schema.Int64Attribute{
- Computed: true,
- Description: `Pagination size used for retrieving logs in days`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated to the username`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "my-hours",
- ),
- },
- Description: `must be one of ["my-hours"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `Start date for collecting time logs`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_myhours_data_source_sdk.go b/internal/provider/source_myhours_data_source_sdk.go
old mode 100755
new mode 100644
index 4fd86ffbb..d86783664
--- a/internal/provider/source_myhours_data_source_sdk.go
+++ b/internal/provider/source_myhours_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMyHoursDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_myhours_resource.go b/internal/provider/source_myhours_resource.go
old mode 100755
new mode 100644
index 45b84f1c5..3bc55ea98
--- a/internal/provider/source_myhours_resource.go
+++ b/internal/provider/source_myhours_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceMyHoursResource struct {
// SourceMyHoursResourceModel describes the resource data model.
type SourceMyHoursResourceModel struct {
Configuration SourceMyHours `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -59,35 +59,39 @@ func (r *SourceMyHoursResource) Schema(ctx context.Context, req resource.SchemaR
Description: `Your My Hours username`,
},
"logs_batch_size": schema.Int64Attribute{
- Optional: true,
- Description: `Pagination size used for retrieving logs in days`,
+ Optional: true,
+ MarkdownDescription: `Default: 30` + "\n" +
+ `Pagination size used for retrieving logs in days`,
},
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The password associated to the username`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "my-hours",
- ),
- },
- Description: `must be one of ["my-hours"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `Start date for collecting time logs`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -151,7 +155,7 @@ func (r *SourceMyHoursResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMyHours(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -327,5 +331,5 @@ func (r *SourceMyHoursResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceMyHoursResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_myhours_resource_sdk.go b/internal/provider/source_myhours_resource_sdk.go
old mode 100755
new mode 100644
index 3ecc5dfdd..cf6a01c2c
--- a/internal/provider/source_myhours_resource_sdk.go
+++ b/internal/provider/source_myhours_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -16,15 +16,19 @@ func (r *SourceMyHoursResourceModel) ToCreateSDKType() *shared.SourceMyHoursCrea
logsBatchSize = nil
}
password := r.Configuration.Password.ValueString()
- sourceType := shared.SourceMyHoursMyHours(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceMyHours{
Email: email,
LogsBatchSize: logsBatchSize,
Password: password,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -35,6 +39,7 @@ func (r *SourceMyHoursResourceModel) ToCreateSDKType() *shared.SourceMyHoursCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMyHoursCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_mysql_data_source.go b/internal/provider/source_mysql_data_source.go
old mode 100755
new mode 100644
index f4c2fad0f..d731c49c8
--- a/internal/provider/source_mysql_data_source.go
+++ b/internal/provider/source_mysql_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceMysqlDataSource struct {
// SourceMysqlDataSourceModel describes the data model.
type SourceMysqlDataSourceModel struct {
- Configuration SourceMysql `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,501 +47,20 @@ func (r *SourceMysqlDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceMysql DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `The database name.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The host name of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The port to connect to.`,
- },
- "replication_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_mysql_update_method_read_changes_using_binary_log_cdc": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "initial_waiting_seconds": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "server_time_zone": schema.StringAttribute{
- Computed: true,
- Description: `Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.`,
- },
- "source_mysql_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "STANDARD",
- ),
- },
- Description: `must be one of ["STANDARD"]`,
- },
- },
- Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
- },
- "source_mysql_update_update_method_read_changes_using_binary_log_cdc": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "initial_waiting_seconds": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "server_time_zone": schema.StringAttribute{
- Computed: true,
- Description: `Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.`,
- },
- "source_mysql_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "STANDARD",
- ),
- },
- Description: `must be one of ["STANDARD"]`,
- },
- },
- Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Configures how data is extracted from the database.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mysql",
- ),
- },
- Description: `must be one of ["mysql"]`,
- },
- "ssl_mode": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_mysql_ssl_modes_preferred": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "preferred",
- ),
- },
- Description: `must be one of ["preferred"]`,
- },
- },
- Description: `Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.`,
- },
- "source_mysql_ssl_modes_required": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "required",
- ),
- },
- Description: `must be one of ["required"]`,
- },
- },
- Description: `Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.`,
- },
- "source_mysql_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify_ca",
- ),
- },
- Description: `must be one of ["verify_ca"]`,
- },
- },
- Description: `Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.`,
- },
- "source_mysql_ssl_modes_verify_identity": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify_identity",
- ),
- },
- Description: `must be one of ["verify_identity"]`,
- },
- },
- Description: `Always connect with SSL. Verify both CA and Hostname.`,
- },
- "source_mysql_update_ssl_modes_preferred": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "preferred",
- ),
- },
- Description: `must be one of ["preferred"]`,
- },
- },
- Description: `Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.`,
- },
- "source_mysql_update_ssl_modes_required": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "required",
- ),
- },
- Description: `must be one of ["required"]`,
- },
- },
- Description: `Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.`,
- },
- "source_mysql_update_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify_ca",
- ),
- },
- Description: `must be one of ["verify_ca"]`,
- },
- },
- Description: `Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.`,
- },
- "source_mysql_update_ssl_modes_verify_identity": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify_identity",
- ),
- },
- Description: `must be one of ["verify_identity"]`,
- },
- },
- Description: `Always connect with SSL. Verify both CA and Hostname.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `SSL connection modes. Read more in the docs.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_mysql_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mysql_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mysql_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mysql_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mysql_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mysql_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `The username which is used to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_mysql_data_source_sdk.go b/internal/provider/source_mysql_data_source_sdk.go
old mode 100755
new mode 100644
index 120cf0d7b..49c8d0e9c
--- a/internal/provider/source_mysql_data_source_sdk.go
+++ b/internal/provider/source_mysql_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceMysqlDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_mysql_resource.go b/internal/provider/source_mysql_resource.go
old mode 100755
new mode 100644
index a0ae28168..c401a6b5e
--- a/internal/provider/source_mysql_resource.go
+++ b/internal/provider/source_mysql_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceMysqlResource struct {
// SourceMysqlResourceModel describes the resource data model.
type SourceMysqlResourceModel struct {
Configuration SourceMysql `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -69,68 +70,24 @@ func (r *SourceMysqlResource) Schema(ctx context.Context, req resource.SchemaReq
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `The port to connect to.`,
+ Optional: true,
+ MarkdownDescription: `Default: 3306` + "\n" +
+ `The port to connect to.`,
},
"replication_method": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_mysql_update_method_read_changes_using_binary_log_cdc": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "initial_waiting_seconds": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "server_time_zone": schema.StringAttribute{
- Optional: true,
- Description: `Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.`,
- },
- "source_mysql_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "STANDARD",
- ),
- },
- Description: `must be one of ["STANDARD"]`,
- },
- },
- Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
- },
- "source_mysql_update_update_method_read_changes_using_binary_log_cdc": schema.SingleNestedAttribute{
+ "read_changes_using_binary_log_cdc": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"initial_waiting_seconds": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 300` + "\n" +
+ `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
},
"server_time_zone": schema.StringAttribute{
Optional: true,
@@ -139,70 +96,31 @@ func (r *SourceMysqlResource) Schema(ctx context.Context, req resource.SchemaReq
},
Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.`,
},
- "source_mysql_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "STANDARD",
- ),
- },
- Description: `must be one of ["STANDARD"]`,
- },
- },
+ "scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
},
},
+ Description: `Configures how data is extracted from the database.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Configures how data is extracted from the database.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "mysql",
- ),
- },
- Description: `must be one of ["mysql"]`,
},
"ssl_mode": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_mysql_ssl_modes_preferred": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "preferred",
- ),
- },
- Description: `must be one of ["preferred"]`,
- },
- },
+ "preferred": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.`,
},
- "source_mysql_ssl_modes_required": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "required",
- ),
- },
- Description: `must be one of ["required"]`,
- },
- },
+ "required": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.`,
},
- "source_mysql_ssl_modes_verify_ca": schema.SingleNestedAttribute{
+ "verify_ca": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ca_certificate": schema.StringAttribute{
@@ -215,25 +133,18 @@ func (r *SourceMysqlResource) Schema(ctx context.Context, req resource.SchemaReq
},
"client_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)`,
},
"client_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
},
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify_ca",
- ),
- },
- Description: `must be one of ["verify_ca"]`,
- },
},
Description: `Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.`,
},
- "source_mysql_ssl_modes_verify_identity": schema.SingleNestedAttribute{
+ "verify_identity": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ca_certificate": schema.StringAttribute{
@@ -246,241 +157,42 @@ func (r *SourceMysqlResource) Schema(ctx context.Context, req resource.SchemaReq
},
"client_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)`,
},
"client_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
},
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify_identity",
- ),
- },
- Description: `must be one of ["verify_identity"]`,
- },
- },
- Description: `Always connect with SSL. Verify both CA and Hostname.`,
- },
- "source_mysql_update_ssl_modes_preferred": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "preferred",
- ),
- },
- Description: `must be one of ["preferred"]`,
- },
- },
- Description: `Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.`,
- },
- "source_mysql_update_ssl_modes_required": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "required",
- ),
- },
- Description: `must be one of ["required"]`,
- },
- },
- Description: `Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.`,
- },
- "source_mysql_update_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Required: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Optional: true,
- Description: `Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)`,
- },
- "client_key": schema.StringAttribute{
- Optional: true,
- Description: `Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)`,
- },
- "client_key_password": schema.StringAttribute{
- Optional: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify_ca",
- ),
- },
- Description: `must be one of ["verify_ca"]`,
- },
- },
- Description: `Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.`,
- },
- "source_mysql_update_ssl_modes_verify_identity": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Required: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Optional: true,
- Description: `Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)`,
- },
- "client_key": schema.StringAttribute{
- Optional: true,
- Description: `Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)`,
- },
- "client_key_password": schema.StringAttribute{
- Optional: true,
- Description: `Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify_identity",
- ),
- },
- Description: `must be one of ["verify_identity"]`,
- },
},
Description: `Always connect with SSL. Verify both CA and Hostname.`,
},
},
+ Description: `SSL connection modes. Read more in the docs.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `SSL connection modes. Read more in the docs.`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_mysql_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mysql_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_mysql_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mysql_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_mysql_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -488,35 +200,28 @@ func (r *SourceMysqlResource) Schema(ctx context.Context, req resource.SchemaReq
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_mysql_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -526,10 +231,10 @@ func (r *SourceMysqlResource) Schema(ctx context.Context, req resource.SchemaReq
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -537,13 +242,24 @@ func (r *SourceMysqlResource) Schema(ctx context.Context, req resource.SchemaReq
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -607,7 +323,7 @@ func (r *SourceMysqlResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceMysql(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -783,5 +499,5 @@ func (r *SourceMysqlResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceMysqlResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_mysql_resource_sdk.go b/internal/provider/source_mysql_resource_sdk.go
old mode 100755
new mode 100644
index 25c466314..1f541fc31
--- a/internal/provider/source_mysql_resource_sdk.go
+++ b/internal/provider/source_mysql_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -22,196 +22,188 @@ func (r *SourceMysqlResourceModel) ToCreateSDKType() *shared.SourceMysqlCreateRe
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var replicationMethod shared.SourceMysqlUpdateMethod
- var sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC *shared.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC
- if r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC != nil {
+ var sourceMysqlReadChangesUsingBinaryLogCDC *shared.SourceMysqlReadChangesUsingBinaryLogCDC
+ if r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC != nil {
initialWaitingSeconds := new(int64)
- if !r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.IsNull() {
- *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.ValueInt64()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.ValueInt64()
} else {
initialWaitingSeconds = nil
}
- method := shared.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethod(r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC.Method.ValueString())
serverTimeZone := new(string)
- if !r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC.ServerTimeZone.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC.ServerTimeZone.IsNull() {
- *serverTimeZone = r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC.ServerTimeZone.ValueString()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.ServerTimeZone.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.ServerTimeZone.IsNull() {
+ *serverTimeZone = r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.ServerTimeZone.ValueString()
} else {
serverTimeZone = nil
}
- sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC = &shared.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC{
+ sourceMysqlReadChangesUsingBinaryLogCDC = &shared.SourceMysqlReadChangesUsingBinaryLogCDC{
InitialWaitingSeconds: initialWaitingSeconds,
- Method: method,
ServerTimeZone: serverTimeZone,
}
}
- if sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC != nil {
+ if sourceMysqlReadChangesUsingBinaryLogCDC != nil {
replicationMethod = shared.SourceMysqlUpdateMethod{
- SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC: sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC,
+ SourceMysqlReadChangesUsingBinaryLogCDC: sourceMysqlReadChangesUsingBinaryLogCDC,
}
}
- var sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor *shared.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor
- if r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor != nil {
- method1 := shared.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString())
- sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor{
- Method: method1,
- }
+ var sourceMysqlScanChangesWithUserDefinedCursor *shared.SourceMysqlScanChangesWithUserDefinedCursor
+ if r.Configuration.ReplicationMethod.ScanChangesWithUserDefinedCursor != nil {
+ sourceMysqlScanChangesWithUserDefinedCursor = &shared.SourceMysqlScanChangesWithUserDefinedCursor{}
}
- if sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor != nil {
+ if sourceMysqlScanChangesWithUserDefinedCursor != nil {
replicationMethod = shared.SourceMysqlUpdateMethod{
- SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor: sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor,
+ SourceMysqlScanChangesWithUserDefinedCursor: sourceMysqlScanChangesWithUserDefinedCursor,
}
}
- sourceType := shared.SourceMysqlMysql(r.Configuration.SourceType.ValueString())
var sslMode *shared.SourceMysqlSSLModes
if r.Configuration.SslMode != nil {
- var sourceMysqlSSLModesPreferred *shared.SourceMysqlSSLModesPreferred
- if r.Configuration.SslMode.SourceMysqlSSLModesPreferred != nil {
- mode := shared.SourceMysqlSSLModesPreferredMode(r.Configuration.SslMode.SourceMysqlSSLModesPreferred.Mode.ValueString())
- sourceMysqlSSLModesPreferred = &shared.SourceMysqlSSLModesPreferred{
- Mode: mode,
- }
+ var sourceMysqlPreferred *shared.SourceMysqlPreferred
+ if r.Configuration.SslMode.Preferred != nil {
+ sourceMysqlPreferred = &shared.SourceMysqlPreferred{}
}
- if sourceMysqlSSLModesPreferred != nil {
+ if sourceMysqlPreferred != nil {
sslMode = &shared.SourceMysqlSSLModes{
- SourceMysqlSSLModesPreferred: sourceMysqlSSLModesPreferred,
+ SourceMysqlPreferred: sourceMysqlPreferred,
}
}
- var sourceMysqlSSLModesRequired *shared.SourceMysqlSSLModesRequired
- if r.Configuration.SslMode.SourceMysqlSSLModesRequired != nil {
- mode1 := shared.SourceMysqlSSLModesRequiredMode(r.Configuration.SslMode.SourceMysqlSSLModesRequired.Mode.ValueString())
- sourceMysqlSSLModesRequired = &shared.SourceMysqlSSLModesRequired{
- Mode: mode1,
- }
+ var sourceMysqlRequired *shared.SourceMysqlRequired
+ if r.Configuration.SslMode.Required != nil {
+ sourceMysqlRequired = &shared.SourceMysqlRequired{}
}
- if sourceMysqlSSLModesRequired != nil {
+ if sourceMysqlRequired != nil {
sslMode = &shared.SourceMysqlSSLModes{
- SourceMysqlSSLModesRequired: sourceMysqlSSLModesRequired,
+ SourceMysqlRequired: sourceMysqlRequired,
}
}
- var sourceMysqlSSLModesVerifyCA *shared.SourceMysqlSSLModesVerifyCA
- if r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA != nil {
- caCertificate := r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.CaCertificate.ValueString()
+ var sourceMysqlVerifyCA *shared.SourceMysqlVerifyCA
+ if r.Configuration.SslMode.VerifyCA != nil {
+ caCertificate := r.Configuration.SslMode.VerifyCA.CaCertificate.ValueString()
clientCertificate := new(string)
- if !r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.ClientCertificate.IsNull() {
- *clientCertificate = r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyCA.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyCA.ClientCertificate.IsNull() {
+ *clientCertificate = r.Configuration.SslMode.VerifyCA.ClientCertificate.ValueString()
} else {
clientCertificate = nil
}
clientKey := new(string)
- if !r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.ClientKey.IsNull() {
- *clientKey = r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyCA.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyCA.ClientKey.IsNull() {
+ *clientKey = r.Configuration.SslMode.VerifyCA.ClientKey.ValueString()
} else {
clientKey = nil
}
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyCA.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyCA.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyCA.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode2 := shared.SourceMysqlSSLModesVerifyCAMode(r.Configuration.SslMode.SourceMysqlSSLModesVerifyCA.Mode.ValueString())
- sourceMysqlSSLModesVerifyCA = &shared.SourceMysqlSSLModesVerifyCA{
+ sourceMysqlVerifyCA = &shared.SourceMysqlVerifyCA{
CaCertificate: caCertificate,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword,
- Mode: mode2,
}
}
- if sourceMysqlSSLModesVerifyCA != nil {
+ if sourceMysqlVerifyCA != nil {
sslMode = &shared.SourceMysqlSSLModes{
- SourceMysqlSSLModesVerifyCA: sourceMysqlSSLModesVerifyCA,
+ SourceMysqlVerifyCA: sourceMysqlVerifyCA,
}
}
- var sourceMysqlSSLModesVerifyIdentity *shared.SourceMysqlSSLModesVerifyIdentity
- if r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity != nil {
- caCertificate1 := r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.CaCertificate.ValueString()
+ var sourceMysqlVerifyIdentity *shared.SourceMysqlVerifyIdentity
+ if r.Configuration.SslMode.VerifyIdentity != nil {
+ caCertificate1 := r.Configuration.SslMode.VerifyIdentity.CaCertificate.ValueString()
clientCertificate1 := new(string)
- if !r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.ClientCertificate.IsNull() {
- *clientCertificate1 = r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyIdentity.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyIdentity.ClientCertificate.IsNull() {
+ *clientCertificate1 = r.Configuration.SslMode.VerifyIdentity.ClientCertificate.ValueString()
} else {
clientCertificate1 = nil
}
clientKey1 := new(string)
- if !r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.ClientKey.IsNull() {
- *clientKey1 = r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyIdentity.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyIdentity.ClientKey.IsNull() {
+ *clientKey1 = r.Configuration.SslMode.VerifyIdentity.ClientKey.ValueString()
} else {
clientKey1 = nil
}
clientKeyPassword1 := new(string)
- if !r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.ClientKeyPassword.IsNull() {
- *clientKeyPassword1 = r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyIdentity.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyIdentity.ClientKeyPassword.IsNull() {
+ *clientKeyPassword1 = r.Configuration.SslMode.VerifyIdentity.ClientKeyPassword.ValueString()
} else {
clientKeyPassword1 = nil
}
- mode3 := shared.SourceMysqlSSLModesVerifyIdentityMode(r.Configuration.SslMode.SourceMysqlSSLModesVerifyIdentity.Mode.ValueString())
- sourceMysqlSSLModesVerifyIdentity = &shared.SourceMysqlSSLModesVerifyIdentity{
+ sourceMysqlVerifyIdentity = &shared.SourceMysqlVerifyIdentity{
CaCertificate: caCertificate1,
ClientCertificate: clientCertificate1,
ClientKey: clientKey1,
ClientKeyPassword: clientKeyPassword1,
- Mode: mode3,
}
}
- if sourceMysqlSSLModesVerifyIdentity != nil {
+ if sourceMysqlVerifyIdentity != nil {
sslMode = &shared.SourceMysqlSSLModes{
- SourceMysqlSSLModesVerifyIdentity: sourceMysqlSSLModesVerifyIdentity,
+ SourceMysqlVerifyIdentity: sourceMysqlVerifyIdentity,
}
}
}
var tunnelMethod *shared.SourceMysqlSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceMysqlSSHTunnelMethodNoTunnel *shared.SourceMysqlSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceMysqlSSHTunnelMethodNoTunnel = &shared.SourceMysqlSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceMysqlNoTunnel *shared.SourceMysqlNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceMysqlNoTunnel = &shared.SourceMysqlNoTunnel{}
}
- if sourceMysqlSSHTunnelMethodNoTunnel != nil {
+ if sourceMysqlNoTunnel != nil {
tunnelMethod = &shared.SourceMysqlSSHTunnelMethod{
- SourceMysqlSSHTunnelMethodNoTunnel: sourceMysqlSSHTunnelMethodNoTunnel,
+ SourceMysqlNoTunnel: sourceMysqlNoTunnel,
}
}
- var sourceMysqlSSHTunnelMethodSSHKeyAuthentication *shared.SourceMysqlSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceMysqlSSHTunnelMethodSSHKeyAuthentication = &shared.SourceMysqlSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceMysqlSSHKeyAuthentication *shared.SourceMysqlSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceMysqlSSHKeyAuthentication = &shared.SourceMysqlSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceMysqlSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceMysqlSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceMysqlSSHTunnelMethod{
- SourceMysqlSSHTunnelMethodSSHKeyAuthentication: sourceMysqlSSHTunnelMethodSSHKeyAuthentication,
+ SourceMysqlSSHKeyAuthentication: sourceMysqlSSHKeyAuthentication,
}
}
- var sourceMysqlSSHTunnelMethodPasswordAuthentication *shared.SourceMysqlSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceMysqlSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceMysqlSSHTunnelMethodPasswordAuthentication = &shared.SourceMysqlSSHTunnelMethodPasswordAuthentication{
+ var sourceMysqlPasswordAuthentication *shared.SourceMysqlPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceMysqlPasswordAuthentication = &shared.SourceMysqlPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceMysqlSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceMysqlPasswordAuthentication != nil {
tunnelMethod = &shared.SourceMysqlSSHTunnelMethod{
- SourceMysqlSSHTunnelMethodPasswordAuthentication: sourceMysqlSSHTunnelMethodPasswordAuthentication,
+ SourceMysqlPasswordAuthentication: sourceMysqlPasswordAuthentication,
}
}
}
@@ -223,11 +215,16 @@ func (r *SourceMysqlResourceModel) ToCreateSDKType() *shared.SourceMysqlCreateRe
Password: password,
Port: port,
ReplicationMethod: replicationMethod,
- SourceType: sourceType,
SslMode: sslMode,
TunnelMethod: tunnelMethod,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -238,6 +235,7 @@ func (r *SourceMysqlResourceModel) ToCreateSDKType() *shared.SourceMysqlCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceMysqlCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -265,195 +263,188 @@ func (r *SourceMysqlResourceModel) ToUpdateSDKType() *shared.SourceMysqlPutReque
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var replicationMethod shared.SourceMysqlUpdateUpdateMethod
- var sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC *shared.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC
- if r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC != nil {
+ var readChangesUsingBinaryLogCDC *shared.ReadChangesUsingBinaryLogCDC
+ if r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC != nil {
initialWaitingSeconds := new(int64)
- if !r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.IsNull() {
- *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.ValueInt64()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.InitialWaitingSeconds.ValueInt64()
} else {
initialWaitingSeconds = nil
}
- method := shared.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethod(r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC.Method.ValueString())
serverTimeZone := new(string)
- if !r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC.ServerTimeZone.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC.ServerTimeZone.IsNull() {
- *serverTimeZone = r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC.ServerTimeZone.ValueString()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.ServerTimeZone.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.ServerTimeZone.IsNull() {
+ *serverTimeZone = r.Configuration.ReplicationMethod.ReadChangesUsingBinaryLogCDC.ServerTimeZone.ValueString()
} else {
serverTimeZone = nil
}
- sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC = &shared.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC{
+ readChangesUsingBinaryLogCDC = &shared.ReadChangesUsingBinaryLogCDC{
InitialWaitingSeconds: initialWaitingSeconds,
- Method: method,
ServerTimeZone: serverTimeZone,
}
}
- if sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC != nil {
+ if readChangesUsingBinaryLogCDC != nil {
replicationMethod = shared.SourceMysqlUpdateUpdateMethod{
- SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC: sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC,
+ ReadChangesUsingBinaryLogCDC: readChangesUsingBinaryLogCDC,
}
}
- var sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *shared.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
- if r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil {
- method1 := shared.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString())
- sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor{
- Method: method1,
- }
+ var sourceMysqlUpdateScanChangesWithUserDefinedCursor *shared.SourceMysqlUpdateScanChangesWithUserDefinedCursor
+ if r.Configuration.ReplicationMethod.ScanChangesWithUserDefinedCursor != nil {
+ sourceMysqlUpdateScanChangesWithUserDefinedCursor = &shared.SourceMysqlUpdateScanChangesWithUserDefinedCursor{}
}
- if sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil {
+ if sourceMysqlUpdateScanChangesWithUserDefinedCursor != nil {
replicationMethod = shared.SourceMysqlUpdateUpdateMethod{
- SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor: sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor,
+ SourceMysqlUpdateScanChangesWithUserDefinedCursor: sourceMysqlUpdateScanChangesWithUserDefinedCursor,
}
}
var sslMode *shared.SourceMysqlUpdateSSLModes
if r.Configuration.SslMode != nil {
- var sourceMysqlUpdateSSLModesPreferred *shared.SourceMysqlUpdateSSLModesPreferred
- if r.Configuration.SslMode.SourceMysqlUpdateSSLModesPreferred != nil {
- mode := shared.SourceMysqlUpdateSSLModesPreferredMode(r.Configuration.SslMode.SourceMysqlUpdateSSLModesPreferred.Mode.ValueString())
- sourceMysqlUpdateSSLModesPreferred = &shared.SourceMysqlUpdateSSLModesPreferred{
- Mode: mode,
- }
+ var preferred *shared.Preferred
+ if r.Configuration.SslMode.Preferred != nil {
+ preferred = &shared.Preferred{}
}
- if sourceMysqlUpdateSSLModesPreferred != nil {
+ if preferred != nil {
sslMode = &shared.SourceMysqlUpdateSSLModes{
- SourceMysqlUpdateSSLModesPreferred: sourceMysqlUpdateSSLModesPreferred,
+ Preferred: preferred,
}
}
- var sourceMysqlUpdateSSLModesRequired *shared.SourceMysqlUpdateSSLModesRequired
- if r.Configuration.SslMode.SourceMysqlUpdateSSLModesRequired != nil {
- mode1 := shared.SourceMysqlUpdateSSLModesRequiredMode(r.Configuration.SslMode.SourceMysqlUpdateSSLModesRequired.Mode.ValueString())
- sourceMysqlUpdateSSLModesRequired = &shared.SourceMysqlUpdateSSLModesRequired{
- Mode: mode1,
- }
+ var required *shared.Required
+ if r.Configuration.SslMode.Required != nil {
+ required = &shared.Required{}
}
- if sourceMysqlUpdateSSLModesRequired != nil {
+ if required != nil {
sslMode = &shared.SourceMysqlUpdateSSLModes{
- SourceMysqlUpdateSSLModesRequired: sourceMysqlUpdateSSLModesRequired,
+ Required: required,
}
}
- var sourceMysqlUpdateSSLModesVerifyCA *shared.SourceMysqlUpdateSSLModesVerifyCA
- if r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA != nil {
- caCertificate := r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.CaCertificate.ValueString()
+ var sourceMysqlUpdateVerifyCA *shared.SourceMysqlUpdateVerifyCA
+ if r.Configuration.SslMode.VerifyCA != nil {
+ caCertificate := r.Configuration.SslMode.VerifyCA.CaCertificate.ValueString()
clientCertificate := new(string)
- if !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.ClientCertificate.IsNull() {
- *clientCertificate = r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyCA.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyCA.ClientCertificate.IsNull() {
+ *clientCertificate = r.Configuration.SslMode.VerifyCA.ClientCertificate.ValueString()
} else {
clientCertificate = nil
}
clientKey := new(string)
- if !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.ClientKey.IsNull() {
- *clientKey = r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyCA.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyCA.ClientKey.IsNull() {
+ *clientKey = r.Configuration.SslMode.VerifyCA.ClientKey.ValueString()
} else {
clientKey = nil
}
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyCA.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyCA.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyCA.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode2 := shared.SourceMysqlUpdateSSLModesVerifyCAMode(r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyCA.Mode.ValueString())
- sourceMysqlUpdateSSLModesVerifyCA = &shared.SourceMysqlUpdateSSLModesVerifyCA{
+ sourceMysqlUpdateVerifyCA = &shared.SourceMysqlUpdateVerifyCA{
CaCertificate: caCertificate,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword,
- Mode: mode2,
}
}
- if sourceMysqlUpdateSSLModesVerifyCA != nil {
+ if sourceMysqlUpdateVerifyCA != nil {
sslMode = &shared.SourceMysqlUpdateSSLModes{
- SourceMysqlUpdateSSLModesVerifyCA: sourceMysqlUpdateSSLModesVerifyCA,
+ SourceMysqlUpdateVerifyCA: sourceMysqlUpdateVerifyCA,
}
}
- var sourceMysqlUpdateSSLModesVerifyIdentity *shared.SourceMysqlUpdateSSLModesVerifyIdentity
- if r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity != nil {
- caCertificate1 := r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.CaCertificate.ValueString()
+ var verifyIdentity *shared.VerifyIdentity
+ if r.Configuration.SslMode.VerifyIdentity != nil {
+ caCertificate1 := r.Configuration.SslMode.VerifyIdentity.CaCertificate.ValueString()
clientCertificate1 := new(string)
- if !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.ClientCertificate.IsNull() {
- *clientCertificate1 = r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyIdentity.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyIdentity.ClientCertificate.IsNull() {
+ *clientCertificate1 = r.Configuration.SslMode.VerifyIdentity.ClientCertificate.ValueString()
} else {
clientCertificate1 = nil
}
clientKey1 := new(string)
- if !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.ClientKey.IsNull() {
- *clientKey1 = r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyIdentity.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyIdentity.ClientKey.IsNull() {
+ *clientKey1 = r.Configuration.SslMode.VerifyIdentity.ClientKey.ValueString()
} else {
clientKey1 = nil
}
clientKeyPassword1 := new(string)
- if !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.ClientKeyPassword.IsNull() {
- *clientKeyPassword1 = r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyIdentity.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyIdentity.ClientKeyPassword.IsNull() {
+ *clientKeyPassword1 = r.Configuration.SslMode.VerifyIdentity.ClientKeyPassword.ValueString()
} else {
clientKeyPassword1 = nil
}
- mode3 := shared.SourceMysqlUpdateSSLModesVerifyIdentityMode(r.Configuration.SslMode.SourceMysqlUpdateSSLModesVerifyIdentity.Mode.ValueString())
- sourceMysqlUpdateSSLModesVerifyIdentity = &shared.SourceMysqlUpdateSSLModesVerifyIdentity{
+ verifyIdentity = &shared.VerifyIdentity{
CaCertificate: caCertificate1,
ClientCertificate: clientCertificate1,
ClientKey: clientKey1,
ClientKeyPassword: clientKeyPassword1,
- Mode: mode3,
}
}
- if sourceMysqlUpdateSSLModesVerifyIdentity != nil {
+ if verifyIdentity != nil {
sslMode = &shared.SourceMysqlUpdateSSLModes{
- SourceMysqlUpdateSSLModesVerifyIdentity: sourceMysqlUpdateSSLModesVerifyIdentity,
+ VerifyIdentity: verifyIdentity,
}
}
}
var tunnelMethod *shared.SourceMysqlUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceMysqlUpdateSSHTunnelMethodNoTunnel *shared.SourceMysqlUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceMysqlUpdateSSHTunnelMethodNoTunnel = &shared.SourceMysqlUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceMysqlUpdateNoTunnel *shared.SourceMysqlUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceMysqlUpdateNoTunnel = &shared.SourceMysqlUpdateNoTunnel{}
}
- if sourceMysqlUpdateSSHTunnelMethodNoTunnel != nil {
+ if sourceMysqlUpdateNoTunnel != nil {
tunnelMethod = &shared.SourceMysqlUpdateSSHTunnelMethod{
- SourceMysqlUpdateSSHTunnelMethodNoTunnel: sourceMysqlUpdateSSHTunnelMethodNoTunnel,
+ SourceMysqlUpdateNoTunnel: sourceMysqlUpdateNoTunnel,
}
}
- var sourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication *shared.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceMysqlUpdateSSHKeyAuthentication *shared.SourceMysqlUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceMysqlUpdateSSHKeyAuthentication = &shared.SourceMysqlUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceMysqlUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceMysqlUpdateSSHTunnelMethod{
- SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication: sourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication,
+ SourceMysqlUpdateSSHKeyAuthentication: sourceMysqlUpdateSSHKeyAuthentication,
}
}
- var sourceMysqlUpdateSSHTunnelMethodPasswordAuthentication *shared.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceMysqlUpdateSSHTunnelMethodPasswordAuthentication = &shared.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication{
+ var sourceMysqlUpdatePasswordAuthentication *shared.SourceMysqlUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceMysqlUpdatePasswordAuthentication = &shared.SourceMysqlUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceMysqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceMysqlUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.SourceMysqlUpdateSSHTunnelMethod{
- SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication: sourceMysqlUpdateSSHTunnelMethodPasswordAuthentication,
+ SourceMysqlUpdatePasswordAuthentication: sourceMysqlUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/source_netsuite_data_source.go b/internal/provider/source_netsuite_data_source.go
old mode 100755
new mode 100644
index d09b27043..bb1e22155
--- a/internal/provider/source_netsuite_data_source.go
+++ b/internal/provider/source_netsuite_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceNetsuiteDataSource struct {
// SourceNetsuiteDataSourceModel describes the data model.
type SourceNetsuiteDataSourceModel struct {
- Configuration SourceNetsuite `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,63 +47,20 @@ func (r *SourceNetsuiteDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceNetsuite DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "consumer_key": schema.StringAttribute{
- Computed: true,
- Description: `Consumer key associated with your integration`,
- },
- "consumer_secret": schema.StringAttribute{
- Computed: true,
- Description: `Consumer secret associated with your integration`,
- },
- "object_types": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The API names of the Netsuite objects you want to sync. Setting this speeds up the connection setup process by limiting the number of schemas that need to be retrieved from Netsuite.`,
- },
- "realm": schema.StringAttribute{
- Computed: true,
- Description: `Netsuite realm e.g. 2344535, as for ` + "`" + `production` + "`" + ` or 2344535_SB1, as for the ` + "`" + `sandbox` + "`" + ``,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "netsuite",
- ),
- },
- Description: `must be one of ["netsuite"]`,
- },
- "start_datetime": schema.StringAttribute{
- Computed: true,
- Description: `Starting point for your data replication, in format of "YYYY-MM-DDTHH:mm:ssZ"`,
- },
- "token_key": schema.StringAttribute{
- Computed: true,
- Description: `Access token key`,
- },
- "token_secret": schema.StringAttribute{
- Computed: true,
- Description: `Access token secret`,
- },
- "window_in_days": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of days used to query the data with date chunks. Set smaller value, if you have lots of data.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_netsuite_data_source_sdk.go b/internal/provider/source_netsuite_data_source_sdk.go
old mode 100755
new mode 100644
index 1b569d1a2..9b494f06d
--- a/internal/provider/source_netsuite_data_source_sdk.go
+++ b/internal/provider/source_netsuite_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceNetsuiteDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_netsuite_resource.go b/internal/provider/source_netsuite_resource.go
old mode 100755
new mode 100644
index fc30631cb..e75a614d0
--- a/internal/provider/source_netsuite_resource.go
+++ b/internal/provider/source_netsuite_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceNetsuiteResource struct {
// SourceNetsuiteResourceModel describes the resource data model.
type SourceNetsuiteResourceModel struct {
Configuration SourceNetsuite `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +56,7 @@ func (r *SourceNetsuiteResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"consumer_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Consumer key associated with your integration`,
},
"consumer_secret": schema.StringAttribute{
@@ -71,40 +72,45 @@ func (r *SourceNetsuiteResource) Schema(ctx context.Context, req resource.Schema
Required: true,
Description: `Netsuite realm e.g. 2344535, as for ` + "`" + `production` + "`" + ` or 2344535_SB1, as for the ` + "`" + `sandbox` + "`" + ``,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "netsuite",
- ),
- },
- Description: `must be one of ["netsuite"]`,
- },
"start_datetime": schema.StringAttribute{
Required: true,
Description: `Starting point for your data replication, in format of "YYYY-MM-DDTHH:mm:ssZ"`,
},
"token_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access token key`,
},
"token_secret": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access token secret`,
},
"window_in_days": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of days used to query the data with date chunks. Set smaller value, if you have lots of data.`,
+ Optional: true,
+ MarkdownDescription: `Default: 30` + "\n" +
+ `The amount of days used to query the data with date chunks. Set smaller value, if you have lots of data.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -168,7 +174,7 @@ func (r *SourceNetsuiteResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceNetsuite(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -344,5 +350,5 @@ func (r *SourceNetsuiteResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceNetsuiteResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_netsuite_resource_sdk.go b/internal/provider/source_netsuite_resource_sdk.go
old mode 100755
new mode 100644
index becf37a80..306307ca1
--- a/internal/provider/source_netsuite_resource_sdk.go
+++ b/internal/provider/source_netsuite_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -15,7 +15,6 @@ func (r *SourceNetsuiteResourceModel) ToCreateSDKType() *shared.SourceNetsuiteCr
objectTypes = append(objectTypes, objectTypesItem.ValueString())
}
realm := r.Configuration.Realm.ValueString()
- sourceType := shared.SourceNetsuiteNetsuite(r.Configuration.SourceType.ValueString())
startDatetime := r.Configuration.StartDatetime.ValueString()
tokenKey := r.Configuration.TokenKey.ValueString()
tokenSecret := r.Configuration.TokenSecret.ValueString()
@@ -30,12 +29,17 @@ func (r *SourceNetsuiteResourceModel) ToCreateSDKType() *shared.SourceNetsuiteCr
ConsumerSecret: consumerSecret,
ObjectTypes: objectTypes,
Realm: realm,
- SourceType: sourceType,
StartDatetime: startDatetime,
TokenKey: tokenKey,
TokenSecret: tokenSecret,
WindowInDays: windowInDays,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -46,6 +50,7 @@ func (r *SourceNetsuiteResourceModel) ToCreateSDKType() *shared.SourceNetsuiteCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceNetsuiteCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_notion_data_source.go b/internal/provider/source_notion_data_source.go
old mode 100755
new mode 100644
index 1feec1771..d151e187d
--- a/internal/provider/source_notion_data_source.go
+++ b/internal/provider/source_notion_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceNotionDataSource struct {
// SourceNotionDataSourceModel describes the data model.
type SourceNotionDataSourceModel struct {
- Configuration SourceNotion `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,138 +47,20 @@ func (r *SourceNotionDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceNotion DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_notion_authenticate_using_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "token",
- ),
- },
- Description: `must be one of ["token"]`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `Notion API access token, see the docs for more information on how to obtain this token.`,
- },
- },
- Description: `Pick an authentication method.`,
- },
- "source_notion_authenticate_using_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token is a token you received by complete the OauthWebFlow of Notion.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth2.0",
- ),
- },
- Description: `must be one of ["OAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The ClientID of your Notion integration.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The ClientSecret of your Notion integration.`,
- },
- },
- Description: `Pick an authentication method.`,
- },
- "source_notion_update_authenticate_using_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "token",
- ),
- },
- Description: `must be one of ["token"]`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `Notion API access token, see the docs for more information on how to obtain this token.`,
- },
- },
- Description: `Pick an authentication method.`,
- },
- "source_notion_update_authenticate_using_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token is a token you received by complete the OauthWebFlow of Notion.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth2.0",
- ),
- },
- Description: `must be one of ["OAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The ClientID of your Notion integration.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The ClientSecret of your Notion integration.`,
- },
- },
- Description: `Pick an authentication method.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Pick an authentication method.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "notion",
- ),
- },
- Description: `must be one of ["notion"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00.000Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_notion_data_source_sdk.go b/internal/provider/source_notion_data_source_sdk.go
old mode 100755
new mode 100644
index 720b552b4..3294075b5
--- a/internal/provider/source_notion_data_source_sdk.go
+++ b/internal/provider/source_notion_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceNotionDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_notion_resource.go b/internal/provider/source_notion_resource.go
old mode 100755
new mode 100644
index fa0b495be..c6aae4d09
--- a/internal/provider/source_notion_resource.go
+++ b/internal/provider/source_notion_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceNotionResource struct {
// SourceNotionResourceModel describes the resource data model.
type SourceNotionResourceModel struct {
Configuration SourceNotion `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,131 +57,71 @@ func (r *SourceNotionResource) Schema(ctx context.Context, req resource.SchemaRe
Required: true,
Attributes: map[string]schema.Attribute{
"credentials": schema.SingleNestedAttribute{
- Optional: true,
+ Required: true,
Attributes: map[string]schema.Attribute{
- "source_notion_authenticate_using_access_token": schema.SingleNestedAttribute{
+ "access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "token",
- ),
- },
- Description: `must be one of ["token"]`,
- },
"token": schema.StringAttribute{
Required: true,
- Description: `Notion API access token, see the docs for more information on how to obtain this token.`,
+ Sensitive: true,
+ Description: `The Access Token for your private Notion integration. See the docs for more information on how to obtain this token.`,
},
},
- Description: `Pick an authentication method.`,
+ Description: `Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information.`,
},
- "source_notion_authenticate_using_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
- Description: `Access Token is a token you received by complete the OauthWebFlow of Notion.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth2.0",
- ),
- },
- Description: `must be one of ["OAuth2.0"]`,
+ Sensitive: true,
+ Description: `The Access Token received by completing the OAuth flow for your Notion integration. See our docs for more information.`,
},
"client_id": schema.StringAttribute{
Required: true,
- Description: `The ClientID of your Notion integration.`,
+ Description: `The Client ID of your Notion integration. See our docs for more information.`,
},
"client_secret": schema.StringAttribute{
Required: true,
- Description: `The ClientSecret of your Notion integration.`,
+ Description: `The Client Secret of your Notion integration. See our docs for more information.`,
},
},
- Description: `Pick an authentication method.`,
- },
- "source_notion_update_authenticate_using_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "token",
- ),
- },
- Description: `must be one of ["token"]`,
- },
- "token": schema.StringAttribute{
- Required: true,
- Description: `Notion API access token, see the docs for more information on how to obtain this token.`,
- },
- },
- Description: `Pick an authentication method.`,
- },
- "source_notion_update_authenticate_using_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Access Token is a token you received by complete the OauthWebFlow of Notion.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth2.0",
- ),
- },
- Description: `must be one of ["OAuth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The ClientID of your Notion integration.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The ClientSecret of your Notion integration.`,
- },
- },
- Description: `Pick an authentication method.`,
+ Description: `Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information.`,
},
},
+ Description: `Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Pick an authentication method.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "notion",
- ),
- },
- Description: `must be one of ["notion"]`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z. During incremental sync, any data generated before this date will not be replicated. If left blank, the start date will be set to 2 years before the present date.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00.000Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -244,7 +185,7 @@ func (r *SourceNotionResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceNotion(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -420,5 +361,5 @@ func (r *SourceNotionResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceNotionResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_notion_resource_sdk.go b/internal/provider/source_notion_resource_sdk.go
old mode 100755
new mode 100644
index f769e56fa..2dea2982f
--- a/internal/provider/source_notion_resource_sdk.go
+++ b/internal/provider/source_notion_resource_sdk.go
@@ -3,54 +3,57 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceNotionResourceModel) ToCreateSDKType() *shared.SourceNotionCreateRequest {
- var credentials *shared.SourceNotionAuthenticateUsing
- if r.Configuration.Credentials != nil {
- var sourceNotionAuthenticateUsingOAuth20 *shared.SourceNotionAuthenticateUsingOAuth20
- if r.Configuration.Credentials.SourceNotionAuthenticateUsingOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceNotionAuthenticateUsingOAuth20.AccessToken.ValueString()
- authType := shared.SourceNotionAuthenticateUsingOAuth20AuthType(r.Configuration.Credentials.SourceNotionAuthenticateUsingOAuth20.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceNotionAuthenticateUsingOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceNotionAuthenticateUsingOAuth20.ClientSecret.ValueString()
- sourceNotionAuthenticateUsingOAuth20 = &shared.SourceNotionAuthenticateUsingOAuth20{
- AccessToken: accessToken,
- AuthType: authType,
- ClientID: clientID,
- ClientSecret: clientSecret,
- }
+ var credentials shared.SourceNotionAuthenticationMethod
+ var sourceNotionOAuth20 *shared.SourceNotionOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ sourceNotionOAuth20 = &shared.SourceNotionOAuth20{
+ AccessToken: accessToken,
+ ClientID: clientID,
+ ClientSecret: clientSecret,
}
- if sourceNotionAuthenticateUsingOAuth20 != nil {
- credentials = &shared.SourceNotionAuthenticateUsing{
- SourceNotionAuthenticateUsingOAuth20: sourceNotionAuthenticateUsingOAuth20,
- }
+ }
+ if sourceNotionOAuth20 != nil {
+ credentials = shared.SourceNotionAuthenticationMethod{
+ SourceNotionOAuth20: sourceNotionOAuth20,
}
- var sourceNotionAuthenticateUsingAccessToken *shared.SourceNotionAuthenticateUsingAccessToken
- if r.Configuration.Credentials.SourceNotionAuthenticateUsingAccessToken != nil {
- authType1 := shared.SourceNotionAuthenticateUsingAccessTokenAuthType(r.Configuration.Credentials.SourceNotionAuthenticateUsingAccessToken.AuthType.ValueString())
- token := r.Configuration.Credentials.SourceNotionAuthenticateUsingAccessToken.Token.ValueString()
- sourceNotionAuthenticateUsingAccessToken = &shared.SourceNotionAuthenticateUsingAccessToken{
- AuthType: authType1,
- Token: token,
- }
+ }
+ var sourceNotionAccessToken *shared.SourceNotionAccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ token := r.Configuration.Credentials.AccessToken.Token.ValueString()
+ sourceNotionAccessToken = &shared.SourceNotionAccessToken{
+ Token: token,
}
- if sourceNotionAuthenticateUsingAccessToken != nil {
- credentials = &shared.SourceNotionAuthenticateUsing{
- SourceNotionAuthenticateUsingAccessToken: sourceNotionAuthenticateUsingAccessToken,
- }
+ }
+ if sourceNotionAccessToken != nil {
+ credentials = shared.SourceNotionAuthenticationMethod{
+ SourceNotionAccessToken: sourceNotionAccessToken,
}
}
- sourceType := shared.SourceNotionNotion(r.Configuration.SourceType.ValueString())
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceNotion{
Credentials: credentials,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -61,6 +64,7 @@ func (r *SourceNotionResourceModel) ToCreateSDKType() *shared.SourceNotionCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceNotionCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -74,42 +78,41 @@ func (r *SourceNotionResourceModel) ToGetSDKType() *shared.SourceNotionCreateReq
}
func (r *SourceNotionResourceModel) ToUpdateSDKType() *shared.SourceNotionPutRequest {
- var credentials *shared.SourceNotionUpdateAuthenticateUsing
- if r.Configuration.Credentials != nil {
- var sourceNotionUpdateAuthenticateUsingOAuth20 *shared.SourceNotionUpdateAuthenticateUsingOAuth20
- if r.Configuration.Credentials.SourceNotionUpdateAuthenticateUsingOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceNotionUpdateAuthenticateUsingOAuth20.AccessToken.ValueString()
- authType := shared.SourceNotionUpdateAuthenticateUsingOAuth20AuthType(r.Configuration.Credentials.SourceNotionUpdateAuthenticateUsingOAuth20.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceNotionUpdateAuthenticateUsingOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceNotionUpdateAuthenticateUsingOAuth20.ClientSecret.ValueString()
- sourceNotionUpdateAuthenticateUsingOAuth20 = &shared.SourceNotionUpdateAuthenticateUsingOAuth20{
- AccessToken: accessToken,
- AuthType: authType,
- ClientID: clientID,
- ClientSecret: clientSecret,
- }
+ var credentials shared.SourceNotionUpdateAuthenticationMethod
+ var sourceNotionUpdateOAuth20 *shared.SourceNotionUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ sourceNotionUpdateOAuth20 = &shared.SourceNotionUpdateOAuth20{
+ AccessToken: accessToken,
+ ClientID: clientID,
+ ClientSecret: clientSecret,
}
- if sourceNotionUpdateAuthenticateUsingOAuth20 != nil {
- credentials = &shared.SourceNotionUpdateAuthenticateUsing{
- SourceNotionUpdateAuthenticateUsingOAuth20: sourceNotionUpdateAuthenticateUsingOAuth20,
- }
+ }
+ if sourceNotionUpdateOAuth20 != nil {
+ credentials = shared.SourceNotionUpdateAuthenticationMethod{
+ SourceNotionUpdateOAuth20: sourceNotionUpdateOAuth20,
}
- var sourceNotionUpdateAuthenticateUsingAccessToken *shared.SourceNotionUpdateAuthenticateUsingAccessToken
- if r.Configuration.Credentials.SourceNotionUpdateAuthenticateUsingAccessToken != nil {
- authType1 := shared.SourceNotionUpdateAuthenticateUsingAccessTokenAuthType(r.Configuration.Credentials.SourceNotionUpdateAuthenticateUsingAccessToken.AuthType.ValueString())
- token := r.Configuration.Credentials.SourceNotionUpdateAuthenticateUsingAccessToken.Token.ValueString()
- sourceNotionUpdateAuthenticateUsingAccessToken = &shared.SourceNotionUpdateAuthenticateUsingAccessToken{
- AuthType: authType1,
- Token: token,
- }
+ }
+ var sourceNotionUpdateAccessToken *shared.SourceNotionUpdateAccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ token := r.Configuration.Credentials.AccessToken.Token.ValueString()
+ sourceNotionUpdateAccessToken = &shared.SourceNotionUpdateAccessToken{
+ Token: token,
}
- if sourceNotionUpdateAuthenticateUsingAccessToken != nil {
- credentials = &shared.SourceNotionUpdateAuthenticateUsing{
- SourceNotionUpdateAuthenticateUsingAccessToken: sourceNotionUpdateAuthenticateUsingAccessToken,
- }
+ }
+ if sourceNotionUpdateAccessToken != nil {
+ credentials = shared.SourceNotionUpdateAuthenticationMethod{
+ SourceNotionUpdateAccessToken: sourceNotionUpdateAccessToken,
}
}
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ startDate := new(time.Time)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
configuration := shared.SourceNotionUpdate{
Credentials: credentials,
StartDate: startDate,
diff --git a/internal/provider/source_nytimes_data_source.go b/internal/provider/source_nytimes_data_source.go
old mode 100755
new mode 100644
index 13e0abc42..ec775eb09
--- a/internal/provider/source_nytimes_data_source.go
+++ b/internal/provider/source_nytimes_data_source.go
@@ -3,17 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -33,11 +29,11 @@ type SourceNytimesDataSource struct {
// SourceNytimesDataSourceModel describes the data model.
type SourceNytimesDataSourceModel struct {
- Configuration SourceNytimes `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -51,72 +47,20 @@ func (r *SourceNytimesDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceNytimes DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `End date to stop the article retrieval (format YYYY-MM)`,
- },
- "period": schema.Int64Attribute{
- Computed: true,
- Validators: []validator.Int64{
- int64validator.OneOf(
- []int64{
- 1,
- 7,
- 30,
- }...,
- ),
- },
- MarkdownDescription: `must be one of ["1", "7", "30"]` + "\n" +
- `Period of time (in days)`,
- },
- "share_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "facebook",
- ),
- },
- MarkdownDescription: `must be one of ["facebook"]` + "\n" +
- `Share Type`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "nytimes",
- ),
- },
- Description: `must be one of ["nytimes"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `Start date to begin the article retrieval (format YYYY-MM)`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_nytimes_data_source_sdk.go b/internal/provider/source_nytimes_data_source_sdk.go
old mode 100755
new mode 100644
index 678f4a2bd..8a4096907
--- a/internal/provider/source_nytimes_data_source_sdk.go
+++ b/internal/provider/source_nytimes_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceNytimesDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_nytimes_resource.go b/internal/provider/source_nytimes_resource.go
old mode 100755
new mode 100644
index a2102b3ec..407db1659
--- a/internal/provider/source_nytimes_resource.go
+++ b/internal/provider/source_nytimes_resource.go
@@ -3,19 +3,20 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -37,6 +38,7 @@ type SourceNytimesResource struct {
// SourceNytimesResourceModel describes the resource data model.
type SourceNytimesResourceModel struct {
Configuration SourceNytimes `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,17 +60,20 @@ func (r *SourceNytimesResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `End date to stop the article retrieval (format YYYY-MM)`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `End date to stop the article retrieval (format YYYY-MM)`,
},
"period": schema.Int64Attribute{
Required: true,
+ MarkdownDescription: `must be one of ["1", "7", "30"]` + "\n" +
+ `Period of time (in days)`,
Validators: []validator.Int64{
int64validator.OneOf(
[]int64{
@@ -78,44 +83,44 @@ func (r *SourceNytimesResource) Schema(ctx context.Context, req resource.SchemaR
}...,
),
},
- MarkdownDescription: `must be one of ["1", "7", "30"]` + "\n" +
- `Period of time (in days)`,
},
"share_type": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "facebook",
- ),
- },
MarkdownDescription: `must be one of ["facebook"]` + "\n" +
`Share Type`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
stringvalidator.OneOf(
- "nytimes",
+ "facebook",
),
},
- Description: `must be one of ["nytimes"]`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `Start date to begin the article retrieval (format YYYY-MM)`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `Start date to begin the article retrieval (format YYYY-MM)`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -179,7 +184,7 @@ func (r *SourceNytimesResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceNytimes(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -355,5 +360,5 @@ func (r *SourceNytimesResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceNytimesResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_nytimes_resource_sdk.go b/internal/provider/source_nytimes_resource_sdk.go
old mode 100755
new mode 100644
index a4c13ca98..63d845fe8
--- a/internal/provider/source_nytimes_resource_sdk.go
+++ b/internal/provider/source_nytimes_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -23,15 +23,19 @@ func (r *SourceNytimesResourceModel) ToCreateSDKType() *shared.SourceNytimesCrea
} else {
shareType = nil
}
- sourceType := shared.SourceNytimesNytimes(r.Configuration.SourceType.ValueString())
startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
configuration := shared.SourceNytimes{
- APIKey: apiKey,
- EndDate: endDate,
- Period: period,
- ShareType: shareType,
- SourceType: sourceType,
- StartDate: startDate,
+ APIKey: apiKey,
+ EndDate: endDate,
+ Period: period,
+ ShareType: shareType,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -43,6 +47,7 @@ func (r *SourceNytimesResourceModel) ToCreateSDKType() *shared.SourceNytimesCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceNytimesCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -63,10 +68,10 @@ func (r *SourceNytimesResourceModel) ToUpdateSDKType() *shared.SourceNytimesPutR
} else {
endDate = nil
}
- period := shared.SourceNytimesUpdatePeriodUsedForMostPopularStreams(r.Configuration.Period.ValueInt64())
- shareType := new(shared.SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream)
+ period := shared.PeriodUsedForMostPopularStreams(r.Configuration.Period.ValueInt64())
+ shareType := new(shared.ShareTypeUsedForMostPopularSharedStream)
if !r.Configuration.ShareType.IsUnknown() && !r.Configuration.ShareType.IsNull() {
- *shareType = shared.SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream(r.Configuration.ShareType.ValueString())
+ *shareType = shared.ShareTypeUsedForMostPopularSharedStream(r.Configuration.ShareType.ValueString())
} else {
shareType = nil
}
diff --git a/internal/provider/source_okta_data_source.go b/internal/provider/source_okta_data_source.go
old mode 100755
new mode 100644
index bb47a0f41..7f1228d04
--- a/internal/provider/source_okta_data_source.go
+++ b/internal/provider/source_okta_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceOktaDataSource struct {
// SourceOktaDataSourceModel describes the data model.
type SourceOktaDataSourceModel struct {
- Configuration SourceOkta `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,134 +47,20 @@ func (r *SourceOktaDataSource) Schema(ctx context.Context, req datasource.Schema
MarkdownDescription: "SourceOkta DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_okta_authorization_method_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `An Okta token. See the docs for instructions on how to generate it.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- },
- },
- "source_okta_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to obtain new Access Token, when it's expired.`,
- },
- },
- },
- "source_okta_update_authorization_method_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `An Okta token. See the docs for instructions on how to generate it.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- },
- },
- "source_okta_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to obtain new Access Token, when it's expired.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "domain": schema.StringAttribute{
- Computed: true,
- Description: `The Okta domain. See the docs for instructions on how to find it.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "okta",
- ),
- },
- Description: `must be one of ["okta"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_okta_data_source_sdk.go b/internal/provider/source_okta_data_source_sdk.go
old mode 100755
new mode 100644
index 2c9bcf180..e544b8249
--- a/internal/provider/source_okta_data_source_sdk.go
+++ b/internal/provider/source_okta_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOktaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_okta_resource.go b/internal/provider/source_okta_resource.go
old mode 100755
new mode 100644
index 8c53197fc..6fb962caa
--- a/internal/provider/source_okta_resource.go
+++ b/internal/provider/source_okta_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceOktaResource struct {
// SourceOktaResourceModel describes the resource data model.
type SourceOktaResourceModel struct {
Configuration SourceOkta `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,80 +59,19 @@ func (r *SourceOktaResource) Schema(ctx context.Context, req resource.SchemaRequ
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_okta_authorization_method_api_token": schema.SingleNestedAttribute{
+ "api_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `An Okta token. See the docs for instructions on how to generate it.`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
},
},
- "source_okta_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `Refresh Token to obtain new Access Token, when it's expired.`,
- },
- },
- },
- "source_okta_update_authorization_method_api_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Required: true,
- Description: `An Okta token. See the docs for instructions on how to generate it.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- },
- },
- "source_okta_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your OAuth application.`,
@@ -142,6 +82,7 @@ func (r *SourceOktaResource) Schema(ctx context.Context, req resource.SchemaRequ
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Refresh Token to obtain new Access Token, when it's expired.`,
},
},
@@ -155,28 +96,30 @@ func (r *SourceOktaResource) Schema(ctx context.Context, req resource.SchemaRequ
Optional: true,
Description: `The Okta domain. See the docs for instructions on how to find it.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "okta",
- ),
- },
- Description: `must be one of ["okta"]`,
- },
"start_date": schema.StringAttribute{
Optional: true,
Description: `UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -240,7 +183,7 @@ func (r *SourceOktaResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceOkta(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -416,5 +359,5 @@ func (r *SourceOktaResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *SourceOktaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_okta_resource_sdk.go b/internal/provider/source_okta_resource_sdk.go
old mode 100755
new mode 100644
index f22f66282..91a085312
--- a/internal/provider/source_okta_resource_sdk.go
+++ b/internal/provider/source_okta_resource_sdk.go
@@ -3,43 +3,39 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOktaResourceModel) ToCreateSDKType() *shared.SourceOktaCreateRequest {
var credentials *shared.SourceOktaAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceOktaAuthorizationMethodOAuth20 *shared.SourceOktaAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceOktaAuthorizationMethodOAuth20 != nil {
- authType := shared.SourceOktaAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceOktaAuthorizationMethodOAuth20.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceOktaAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceOktaAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceOktaAuthorizationMethodOAuth20.RefreshToken.ValueString()
- sourceOktaAuthorizationMethodOAuth20 = &shared.SourceOktaAuthorizationMethodOAuth20{
- AuthType: authType,
+ var sourceOktaOAuth20 *shared.SourceOktaOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ sourceOktaOAuth20 = &shared.SourceOktaOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceOktaAuthorizationMethodOAuth20 != nil {
+ if sourceOktaOAuth20 != nil {
credentials = &shared.SourceOktaAuthorizationMethod{
- SourceOktaAuthorizationMethodOAuth20: sourceOktaAuthorizationMethodOAuth20,
+ SourceOktaOAuth20: sourceOktaOAuth20,
}
}
- var sourceOktaAuthorizationMethodAPIToken *shared.SourceOktaAuthorizationMethodAPIToken
- if r.Configuration.Credentials.SourceOktaAuthorizationMethodAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceOktaAuthorizationMethodAPIToken.APIToken.ValueString()
- authType1 := shared.SourceOktaAuthorizationMethodAPITokenAuthType(r.Configuration.Credentials.SourceOktaAuthorizationMethodAPIToken.AuthType.ValueString())
- sourceOktaAuthorizationMethodAPIToken = &shared.SourceOktaAuthorizationMethodAPIToken{
+ var sourceOktaAPIToken *shared.SourceOktaAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ sourceOktaAPIToken = &shared.SourceOktaAPIToken{
APIToken: apiToken,
- AuthType: authType1,
}
}
- if sourceOktaAuthorizationMethodAPIToken != nil {
+ if sourceOktaAPIToken != nil {
credentials = &shared.SourceOktaAuthorizationMethod{
- SourceOktaAuthorizationMethodAPIToken: sourceOktaAuthorizationMethodAPIToken,
+ SourceOktaAPIToken: sourceOktaAPIToken,
}
}
}
@@ -49,7 +45,6 @@ func (r *SourceOktaResourceModel) ToCreateSDKType() *shared.SourceOktaCreateRequ
} else {
domain = nil
}
- sourceType := shared.SourceOktaOkta(r.Configuration.SourceType.ValueString())
startDate := new(string)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate = r.Configuration.StartDate.ValueString()
@@ -59,9 +54,14 @@ func (r *SourceOktaResourceModel) ToCreateSDKType() *shared.SourceOktaCreateRequ
configuration := shared.SourceOkta{
Credentials: credentials,
Domain: domain,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -72,6 +72,7 @@ func (r *SourceOktaResourceModel) ToCreateSDKType() *shared.SourceOktaCreateRequ
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceOktaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -87,36 +88,32 @@ func (r *SourceOktaResourceModel) ToGetSDKType() *shared.SourceOktaCreateRequest
func (r *SourceOktaResourceModel) ToUpdateSDKType() *shared.SourceOktaPutRequest {
var credentials *shared.SourceOktaUpdateAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceOktaUpdateAuthorizationMethodOAuth20 *shared.SourceOktaUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceOktaUpdateAuthorizationMethodOAuth20 != nil {
- authType := shared.SourceOktaUpdateAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceOktaUpdateAuthorizationMethodOAuth20.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceOktaUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceOktaUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceOktaUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
- sourceOktaUpdateAuthorizationMethodOAuth20 = &shared.SourceOktaUpdateAuthorizationMethodOAuth20{
- AuthType: authType,
+ var sourceOktaUpdateOAuth20 *shared.SourceOktaUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ sourceOktaUpdateOAuth20 = &shared.SourceOktaUpdateOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceOktaUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceOktaUpdateOAuth20 != nil {
credentials = &shared.SourceOktaUpdateAuthorizationMethod{
- SourceOktaUpdateAuthorizationMethodOAuth20: sourceOktaUpdateAuthorizationMethodOAuth20,
+ SourceOktaUpdateOAuth20: sourceOktaUpdateOAuth20,
}
}
- var sourceOktaUpdateAuthorizationMethodAPIToken *shared.SourceOktaUpdateAuthorizationMethodAPIToken
- if r.Configuration.Credentials.SourceOktaUpdateAuthorizationMethodAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceOktaUpdateAuthorizationMethodAPIToken.APIToken.ValueString()
- authType1 := shared.SourceOktaUpdateAuthorizationMethodAPITokenAuthType(r.Configuration.Credentials.SourceOktaUpdateAuthorizationMethodAPIToken.AuthType.ValueString())
- sourceOktaUpdateAuthorizationMethodAPIToken = &shared.SourceOktaUpdateAuthorizationMethodAPIToken{
+ var sourceOktaUpdateAPIToken *shared.SourceOktaUpdateAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ sourceOktaUpdateAPIToken = &shared.SourceOktaUpdateAPIToken{
APIToken: apiToken,
- AuthType: authType1,
}
}
- if sourceOktaUpdateAuthorizationMethodAPIToken != nil {
+ if sourceOktaUpdateAPIToken != nil {
credentials = &shared.SourceOktaUpdateAuthorizationMethod{
- SourceOktaUpdateAuthorizationMethodAPIToken: sourceOktaUpdateAuthorizationMethodAPIToken,
+ SourceOktaUpdateAPIToken: sourceOktaUpdateAPIToken,
}
}
}
diff --git a/internal/provider/source_omnisend_data_source.go b/internal/provider/source_omnisend_data_source.go
old mode 100755
new mode 100644
index 126ccad51..5402eaee1
--- a/internal/provider/source_omnisend_data_source.go
+++ b/internal/provider/source_omnisend_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceOmnisendDataSource struct {
// SourceOmnisendDataSourceModel describes the data model.
type SourceOmnisendDataSourceModel struct {
- Configuration SourceOmnisend `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceOmnisendDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceOmnisend DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "omnisend",
- ),
- },
- Description: `must be one of ["omnisend"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_omnisend_data_source_sdk.go b/internal/provider/source_omnisend_data_source_sdk.go
old mode 100755
new mode 100644
index a16de977f..0958424ab
--- a/internal/provider/source_omnisend_data_source_sdk.go
+++ b/internal/provider/source_omnisend_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOmnisendDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_omnisend_resource.go b/internal/provider/source_omnisend_resource.go
old mode 100755
new mode 100644
index 07ab889c6..e4c65ddff
--- a/internal/provider/source_omnisend_resource.go
+++ b/internal/provider/source_omnisend_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceOmnisendResource struct {
// SourceOmnisendResourceModel describes the resource data model.
type SourceOmnisendResourceModel struct {
- Configuration SourceOmnisend `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceOmnisendResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceOmnisendResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "omnisend",
- ),
- },
- Description: `must be one of ["omnisend"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceOmnisendResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceOmnisend(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceOmnisendResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceOmnisendResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_omnisend_resource_sdk.go b/internal/provider/source_omnisend_resource_sdk.go
old mode 100755
new mode 100644
index 49239e314..03f53a91d
--- a/internal/provider/source_omnisend_resource_sdk.go
+++ b/internal/provider/source_omnisend_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOmnisendResourceModel) ToCreateSDKType() *shared.SourceOmnisendCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceOmnisendOmnisend(r.Configuration.SourceType.ValueString())
configuration := shared.SourceOmnisend{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceOmnisendResourceModel) ToCreateSDKType() *shared.SourceOmnisendCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceOmnisendCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_onesignal_data_source.go b/internal/provider/source_onesignal_data_source.go
old mode 100755
new mode 100644
index d8bdbdada..00ca3a3be
--- a/internal/provider/source_onesignal_data_source.go
+++ b/internal/provider/source_onesignal_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceOnesignalDataSource struct {
// SourceOnesignalDataSourceModel describes the data model.
type SourceOnesignalDataSourceModel struct {
- Configuration SourceOnesignal `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,62 +47,20 @@ func (r *SourceOnesignalDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceOnesignal DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "applications": schema.ListNestedAttribute{
- Computed: true,
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "app_api_key": schema.StringAttribute{
- Computed: true,
- },
- "app_id": schema.StringAttribute{
- Computed: true,
- },
- "app_name": schema.StringAttribute{
- Computed: true,
- },
- },
- },
- Description: `Applications keys, see the docs for more information on how to obtain this data`,
- },
- "outcome_names": schema.StringAttribute{
- Computed: true,
- Description: `Comma-separated list of names and the value (sum/count) for the returned outcome data. See the docs for more details`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "onesignal",
- ),
- },
- Description: `must be one of ["onesignal"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for OneSignal API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- "user_auth_key": schema.StringAttribute{
- Computed: true,
- Description: `OneSignal User Auth Key, see the docs for more information on how to obtain this key.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_onesignal_data_source_sdk.go b/internal/provider/source_onesignal_data_source_sdk.go
old mode 100755
new mode 100644
index 2ff0e5de0..c73a4ebf3
--- a/internal/provider/source_onesignal_data_source_sdk.go
+++ b/internal/provider/source_onesignal_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOnesignalDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_onesignal_resource.go b/internal/provider/source_onesignal_resource.go
old mode 100755
new mode 100644
index f5ac90772..86ec5f1ea
--- a/internal/provider/source_onesignal_resource.go
+++ b/internal/provider/source_onesignal_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceOnesignalResource struct {
// SourceOnesignalResourceModel describes the resource data model.
type SourceOnesignalResourceModel struct {
Configuration SourceOnesignal `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -60,7 +61,8 @@ func (r *SourceOnesignalResource) Schema(ctx context.Context, req resource.Schem
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"app_api_key": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Sensitive: true,
},
"app_id": schema.StringAttribute{
Required: true,
@@ -76,35 +78,38 @@ func (r *SourceOnesignalResource) Schema(ctx context.Context, req resource.Schem
Required: true,
Description: `Comma-separated list of names and the value (sum/count) for the returned outcome data. See the docs for more details`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "onesignal",
- ),
- },
- Description: `must be one of ["onesignal"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date from which you'd like to replicate data for OneSignal API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for OneSignal API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
"user_auth_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OneSignal User Auth Key, see the docs for more information on how to obtain this key.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -168,7 +173,7 @@ func (r *SourceOnesignalResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceOnesignal(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -344,5 +349,5 @@ func (r *SourceOnesignalResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceOnesignalResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_onesignal_resource_sdk.go b/internal/provider/source_onesignal_resource_sdk.go
old mode 100755
new mode 100644
index a57e1e65d..83f9de6d8
--- a/internal/provider/source_onesignal_resource_sdk.go
+++ b/internal/provider/source_onesignal_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -26,16 +26,20 @@ func (r *SourceOnesignalResourceModel) ToCreateSDKType() *shared.SourceOnesignal
})
}
outcomeNames := r.Configuration.OutcomeNames.ValueString()
- sourceType := shared.SourceOnesignalOnesignal(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
userAuthKey := r.Configuration.UserAuthKey.ValueString()
configuration := shared.SourceOnesignal{
Applications: applications,
OutcomeNames: outcomeNames,
- SourceType: sourceType,
StartDate: startDate,
UserAuthKey: userAuthKey,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -46,6 +50,7 @@ func (r *SourceOnesignalResourceModel) ToCreateSDKType() *shared.SourceOnesignal
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceOnesignalCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -59,7 +64,7 @@ func (r *SourceOnesignalResourceModel) ToGetSDKType() *shared.SourceOnesignalCre
}
func (r *SourceOnesignalResourceModel) ToUpdateSDKType() *shared.SourceOnesignalPutRequest {
- var applications []shared.SourceOnesignalUpdateApplications = nil
+ var applications []shared.Applications = nil
for _, applicationsItem := range r.Configuration.Applications {
appAPIKey := applicationsItem.AppAPIKey.ValueString()
appID := applicationsItem.AppID.ValueString()
@@ -69,7 +74,7 @@ func (r *SourceOnesignalResourceModel) ToUpdateSDKType() *shared.SourceOnesignal
} else {
appName = nil
}
- applications = append(applications, shared.SourceOnesignalUpdateApplications{
+ applications = append(applications, shared.Applications{
AppAPIKey: appAPIKey,
AppID: appID,
AppName: appName,
diff --git a/internal/provider/source_oracle_data_source.go b/internal/provider/source_oracle_data_source.go
old mode 100755
new mode 100644
index 0201ea993..e45ef39b8
--- a/internal/provider/source_oracle_data_source.go
+++ b/internal/provider/source_oracle_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceOracleDataSource struct {
// SourceOracleDataSourceModel describes the data model.
type SourceOracleDataSourceModel struct {
- Configuration SourceOracle `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,409 +47,20 @@ func (r *SourceOracleDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceOracle DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "connection_data": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_oracle_connect_by_service_name": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "connection_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "service_name",
- ),
- },
- Description: `must be one of ["service_name"]`,
- },
- "service_name": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use service name`,
- },
- "source_oracle_connect_by_system_id_sid": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "connection_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sid",
- ),
- },
- Description: `must be one of ["sid"]`,
- },
- "sid": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use SID (Oracle System Identifier)`,
- },
- "source_oracle_update_connect_by_service_name": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "connection_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "service_name",
- ),
- },
- Description: `must be one of ["service_name"]`,
- },
- "service_name": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use service name`,
- },
- "source_oracle_update_connect_by_system_id_sid": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "connection_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sid",
- ),
- },
- Description: `must be one of ["sid"]`,
- },
- "sid": schema.StringAttribute{
- Computed: true,
- },
- },
- Description: `Use SID (Oracle System Identifier)`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Connect data that will be used for DB connection`,
- },
- "encryption": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_oracle_encryption_native_network_encryption_nne": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "encryption_algorithm": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AES256",
- "RC4_56",
- "3DES168",
- ),
- },
- MarkdownDescription: `must be one of ["AES256", "RC4_56", "3DES168"]` + "\n" +
- `This parameter defines what encryption algorithm is used.`,
- },
- "encryption_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "client_nne",
- ),
- },
- Description: `must be one of ["client_nne"]`,
- },
- },
- Description: `The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.`,
- },
- "source_oracle_encryption_tls_encrypted_verify_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "encryption_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
- "ssl_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.`,
- },
- },
- Description: `Verify and use the certificate provided by the server.`,
- },
- "source_oracle_update_encryption_native_network_encryption_nne": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "encryption_algorithm": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AES256",
- "RC4_56",
- "3DES168",
- ),
- },
- MarkdownDescription: `must be one of ["AES256", "RC4_56", "3DES168"]` + "\n" +
- `This parameter defines what encryption algorithm is used.`,
- },
- "encryption_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "client_nne",
- ),
- },
- Description: `must be one of ["client_nne"]`,
- },
- },
- Description: `The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.`,
- },
- "source_oracle_update_encryption_tls_encrypted_verify_certificate": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "encryption_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
- "ssl_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.`,
- },
- },
- Description: `Verify and use the certificate provided by the server.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The encryption method with is used when communicating with the database.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- MarkdownDescription: `Port of the database.` + "\n" +
- `Oracle Corporations recommends the following port numbers:` + "\n" +
- `1521 - Default listening port for client connections to the listener. ` + "\n" +
- `2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL`,
- },
- "schemas": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The list of schemas to sync from. Defaults to user. Case sensitive.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oracle",
- ),
- },
- Description: `must be one of ["oracle"]`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_oracle_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_oracle_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_oracle_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_oracle_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_oracle_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_oracle_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `The username which is used to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_oracle_data_source_sdk.go b/internal/provider/source_oracle_data_source_sdk.go
old mode 100755
new mode 100644
index 80fad831d..f3bb27691
--- a/internal/provider/source_oracle_data_source_sdk.go
+++ b/internal/provider/source_oracle_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOracleDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_oracle_resource.go b/internal/provider/source_oracle_resource.go
old mode 100755
new mode 100644
index 169821167..2778957b2
--- a/internal/provider/source_oracle_resource.go
+++ b/internal/provider/source_oracle_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceOracleResource struct {
// SourceOracleResourceModel describes the resource data model.
type SourceOracleResourceModel struct {
Configuration SourceOracle `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,72 +60,18 @@ func (r *SourceOracleResource) Schema(ctx context.Context, req resource.SchemaRe
"connection_data": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_oracle_connect_by_service_name": schema.SingleNestedAttribute{
+ "service_name": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "connection_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "service_name",
- ),
- },
- Description: `must be one of ["service_name"]`,
- },
"service_name": schema.StringAttribute{
Required: true,
},
},
Description: `Use service name`,
},
- "source_oracle_connect_by_system_id_sid": schema.SingleNestedAttribute{
+ "system_idsid": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "connection_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sid",
- ),
- },
- Description: `must be one of ["sid"]`,
- },
- "sid": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `Use SID (Oracle System Identifier)`,
- },
- "source_oracle_update_connect_by_service_name": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "connection_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "service_name",
- ),
- },
- Description: `must be one of ["service_name"]`,
- },
- "service_name": schema.StringAttribute{
- Required: true,
- },
- },
- Description: `Use service name`,
- },
- "source_oracle_update_connect_by_system_id_sid": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "connection_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sid",
- ),
- },
- Description: `must be one of ["sid"]`,
- },
"sid": schema.StringAttribute{
Required: true,
},
@@ -131,65 +79,21 @@ func (r *SourceOracleResource) Schema(ctx context.Context, req resource.SchemaRe
Description: `Use SID (Oracle System Identifier)`,
},
},
+ Description: `Connect data that will be used for DB connection`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Connect data that will be used for DB connection`,
},
"encryption": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_oracle_encryption_native_network_encryption_nne": schema.SingleNestedAttribute{
+ "native_network_encryption_nne": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"encryption_algorithm": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "AES256",
- "RC4_56",
- "3DES168",
- ),
- },
- MarkdownDescription: `must be one of ["AES256", "RC4_56", "3DES168"]` + "\n" +
+ MarkdownDescription: `must be one of ["AES256", "RC4_56", "3DES168"]; Default: "AES256"` + "\n" +
`This parameter defines what encryption algorithm is used.`,
- },
- "encryption_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "client_nne",
- ),
- },
- Description: `must be one of ["client_nne"]`,
- },
- },
- Description: `The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.`,
- },
- "source_oracle_encryption_tls_encrypted_verify_certificate": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "encryption_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
- "ssl_certificate": schema.StringAttribute{
- Required: true,
- Description: `Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.`,
- },
- },
- Description: `Verify and use the certificate provided by the server.`,
- },
- "source_oracle_update_encryption_native_network_encryption_nne": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "encryption_algorithm": schema.StringAttribute{
- Optional: true,
Validators: []validator.String{
stringvalidator.OneOf(
"AES256",
@@ -197,33 +101,13 @@ func (r *SourceOracleResource) Schema(ctx context.Context, req resource.SchemaRe
"3DES168",
),
},
- MarkdownDescription: `must be one of ["AES256", "RC4_56", "3DES168"]` + "\n" +
- `This parameter defines what encryption algorithm is used.`,
- },
- "encryption_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "client_nne",
- ),
- },
- Description: `must be one of ["client_nne"]`,
},
},
Description: `The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.`,
},
- "source_oracle_update_encryption_tls_encrypted_verify_certificate": schema.SingleNestedAttribute{
+ "tls_encrypted_verify_certificate": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "encryption_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "encrypted_verify_certificate",
- ),
- },
- Description: `must be one of ["encrypted_verify_certificate"]`,
- },
"ssl_certificate": schema.StringAttribute{
Required: true,
Description: `Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.`,
@@ -232,10 +116,10 @@ func (r *SourceOracleResource) Schema(ctx context.Context, req resource.SchemaRe
Description: `Verify and use the certificate provided by the server.`,
},
},
+ Description: `The encryption method with is used when communicating with the database.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The encryption method with is used when communicating with the database.`,
},
"host": schema.StringAttribute{
Required: true,
@@ -247,11 +131,13 @@ func (r *SourceOracleResource) Schema(ctx context.Context, req resource.SchemaRe
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- MarkdownDescription: `Port of the database.` + "\n" +
+ Optional: true,
+ MarkdownDescription: `Default: 1521` + "\n" +
+ `Port of the database.` + "\n" +
`Oracle Corporations recommends the following port numbers:` + "\n" +
`1521 - Default listening port for client connections to the listener. ` + "\n" +
`2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL`,
@@ -261,134 +147,25 @@ func (r *SourceOracleResource) Schema(ctx context.Context, req resource.SchemaRe
ElementType: types.StringType,
Description: `The list of schemas to sync from. Defaults to user. Case sensitive.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oracle",
- ),
- },
- Description: `must be one of ["oracle"]`,
- },
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_oracle_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_oracle_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_oracle_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_oracle_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_oracle_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -396,35 +173,28 @@ func (r *SourceOracleResource) Schema(ctx context.Context, req resource.SchemaRe
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_oracle_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -434,10 +204,10 @@ func (r *SourceOracleResource) Schema(ctx context.Context, req resource.SchemaRe
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -445,13 +215,24 @@ func (r *SourceOracleResource) Schema(ctx context.Context, req resource.SchemaRe
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -515,7 +296,7 @@ func (r *SourceOracleResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceOracle(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -691,5 +472,5 @@ func (r *SourceOracleResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceOracleResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_oracle_resource_sdk.go b/internal/provider/source_oracle_resource_sdk.go
old mode 100755
new mode 100644
index 5987adc96..8dd0a5e28
--- a/internal/provider/source_oracle_resource_sdk.go
+++ b/internal/provider/source_oracle_resource_sdk.go
@@ -3,84 +3,66 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOracleResourceModel) ToCreateSDKType() *shared.SourceOracleCreateRequest {
var connectionData *shared.SourceOracleConnectBy
if r.Configuration.ConnectionData != nil {
- var sourceOracleConnectByServiceName *shared.SourceOracleConnectByServiceName
- if r.Configuration.ConnectionData.SourceOracleConnectByServiceName != nil {
- connectionType := new(shared.SourceOracleConnectByServiceNameConnectionType)
- if !r.Configuration.ConnectionData.SourceOracleConnectByServiceName.ConnectionType.IsUnknown() && !r.Configuration.ConnectionData.SourceOracleConnectByServiceName.ConnectionType.IsNull() {
- *connectionType = shared.SourceOracleConnectByServiceNameConnectionType(r.Configuration.ConnectionData.SourceOracleConnectByServiceName.ConnectionType.ValueString())
- } else {
- connectionType = nil
- }
- serviceName := r.Configuration.ConnectionData.SourceOracleConnectByServiceName.ServiceName.ValueString()
- sourceOracleConnectByServiceName = &shared.SourceOracleConnectByServiceName{
- ConnectionType: connectionType,
- ServiceName: serviceName,
+ var sourceOracleServiceName *shared.SourceOracleServiceName
+ if r.Configuration.ConnectionData.ServiceName != nil {
+ serviceName := r.Configuration.ConnectionData.ServiceName.ServiceName.ValueString()
+ sourceOracleServiceName = &shared.SourceOracleServiceName{
+ ServiceName: serviceName,
}
}
- if sourceOracleConnectByServiceName != nil {
+ if sourceOracleServiceName != nil {
connectionData = &shared.SourceOracleConnectBy{
- SourceOracleConnectByServiceName: sourceOracleConnectByServiceName,
+ SourceOracleServiceName: sourceOracleServiceName,
}
}
- var sourceOracleConnectBySystemIDSID *shared.SourceOracleConnectBySystemIDSID
- if r.Configuration.ConnectionData.SourceOracleConnectBySystemIDSID != nil {
- connectionType1 := new(shared.SourceOracleConnectBySystemIDSIDConnectionType)
- if !r.Configuration.ConnectionData.SourceOracleConnectBySystemIDSID.ConnectionType.IsUnknown() && !r.Configuration.ConnectionData.SourceOracleConnectBySystemIDSID.ConnectionType.IsNull() {
- *connectionType1 = shared.SourceOracleConnectBySystemIDSIDConnectionType(r.Configuration.ConnectionData.SourceOracleConnectBySystemIDSID.ConnectionType.ValueString())
- } else {
- connectionType1 = nil
- }
- sid := r.Configuration.ConnectionData.SourceOracleConnectBySystemIDSID.Sid.ValueString()
- sourceOracleConnectBySystemIDSID = &shared.SourceOracleConnectBySystemIDSID{
- ConnectionType: connectionType1,
- Sid: sid,
+ var sourceOracleSystemIDSID *shared.SourceOracleSystemIDSID
+ if r.Configuration.ConnectionData.SystemIDSID != nil {
+ sid := r.Configuration.ConnectionData.SystemIDSID.Sid.ValueString()
+ sourceOracleSystemIDSID = &shared.SourceOracleSystemIDSID{
+ Sid: sid,
}
}
- if sourceOracleConnectBySystemIDSID != nil {
+ if sourceOracleSystemIDSID != nil {
connectionData = &shared.SourceOracleConnectBy{
- SourceOracleConnectBySystemIDSID: sourceOracleConnectBySystemIDSID,
+ SourceOracleSystemIDSID: sourceOracleSystemIDSID,
}
}
}
var encryption shared.SourceOracleEncryption
- var sourceOracleEncryptionNativeNetworkEncryptionNNE *shared.SourceOracleEncryptionNativeNetworkEncryptionNNE
- if r.Configuration.Encryption.SourceOracleEncryptionNativeNetworkEncryptionNNE != nil {
- encryptionAlgorithm := new(shared.SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm)
- if !r.Configuration.Encryption.SourceOracleEncryptionNativeNetworkEncryptionNNE.EncryptionAlgorithm.IsUnknown() && !r.Configuration.Encryption.SourceOracleEncryptionNativeNetworkEncryptionNNE.EncryptionAlgorithm.IsNull() {
- *encryptionAlgorithm = shared.SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm(r.Configuration.Encryption.SourceOracleEncryptionNativeNetworkEncryptionNNE.EncryptionAlgorithm.ValueString())
+ var sourceOracleNativeNetworkEncryptionNNE *shared.SourceOracleNativeNetworkEncryptionNNE
+ if r.Configuration.Encryption.NativeNetworkEncryptionNNE != nil {
+ encryptionAlgorithm := new(shared.SourceOracleEncryptionAlgorithm)
+ if !r.Configuration.Encryption.NativeNetworkEncryptionNNE.EncryptionAlgorithm.IsUnknown() && !r.Configuration.Encryption.NativeNetworkEncryptionNNE.EncryptionAlgorithm.IsNull() {
+ *encryptionAlgorithm = shared.SourceOracleEncryptionAlgorithm(r.Configuration.Encryption.NativeNetworkEncryptionNNE.EncryptionAlgorithm.ValueString())
} else {
encryptionAlgorithm = nil
}
- encryptionMethod := shared.SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod(r.Configuration.Encryption.SourceOracleEncryptionNativeNetworkEncryptionNNE.EncryptionMethod.ValueString())
- sourceOracleEncryptionNativeNetworkEncryptionNNE = &shared.SourceOracleEncryptionNativeNetworkEncryptionNNE{
+ sourceOracleNativeNetworkEncryptionNNE = &shared.SourceOracleNativeNetworkEncryptionNNE{
EncryptionAlgorithm: encryptionAlgorithm,
- EncryptionMethod: encryptionMethod,
}
}
- if sourceOracleEncryptionNativeNetworkEncryptionNNE != nil {
+ if sourceOracleNativeNetworkEncryptionNNE != nil {
encryption = shared.SourceOracleEncryption{
- SourceOracleEncryptionNativeNetworkEncryptionNNE: sourceOracleEncryptionNativeNetworkEncryptionNNE,
+ SourceOracleNativeNetworkEncryptionNNE: sourceOracleNativeNetworkEncryptionNNE,
}
}
- var sourceOracleEncryptionTLSEncryptedVerifyCertificate *shared.SourceOracleEncryptionTLSEncryptedVerifyCertificate
- if r.Configuration.Encryption.SourceOracleEncryptionTLSEncryptedVerifyCertificate != nil {
- encryptionMethod1 := shared.SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod(r.Configuration.Encryption.SourceOracleEncryptionTLSEncryptedVerifyCertificate.EncryptionMethod.ValueString())
- sslCertificate := r.Configuration.Encryption.SourceOracleEncryptionTLSEncryptedVerifyCertificate.SslCertificate.ValueString()
- sourceOracleEncryptionTLSEncryptedVerifyCertificate = &shared.SourceOracleEncryptionTLSEncryptedVerifyCertificate{
- EncryptionMethod: encryptionMethod1,
- SslCertificate: sslCertificate,
+ var sourceOracleTLSEncryptedVerifyCertificate *shared.SourceOracleTLSEncryptedVerifyCertificate
+ if r.Configuration.Encryption.TLSEncryptedVerifyCertificate != nil {
+ sslCertificate := r.Configuration.Encryption.TLSEncryptedVerifyCertificate.SslCertificate.ValueString()
+ sourceOracleTLSEncryptedVerifyCertificate = &shared.SourceOracleTLSEncryptedVerifyCertificate{
+ SslCertificate: sslCertificate,
}
}
- if sourceOracleEncryptionTLSEncryptedVerifyCertificate != nil {
+ if sourceOracleTLSEncryptedVerifyCertificate != nil {
encryption = shared.SourceOracleEncryption{
- SourceOracleEncryptionTLSEncryptedVerifyCertificate: sourceOracleEncryptionTLSEncryptedVerifyCertificate,
+ SourceOracleTLSEncryptedVerifyCertificate: sourceOracleTLSEncryptedVerifyCertificate,
}
}
host := r.Configuration.Host.ValueString()
@@ -96,64 +78,71 @@ func (r *SourceOracleResourceModel) ToCreateSDKType() *shared.SourceOracleCreate
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var schemas []string = nil
for _, schemasItem := range r.Configuration.Schemas {
schemas = append(schemas, schemasItem.ValueString())
}
- sourceType := shared.SourceOracleOracle(r.Configuration.SourceType.ValueString())
var tunnelMethod *shared.SourceOracleSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceOracleSSHTunnelMethodNoTunnel *shared.SourceOracleSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceOracleSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceOracleSSHTunnelMethodNoTunnel = &shared.SourceOracleSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceOracleNoTunnel *shared.SourceOracleNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceOracleNoTunnel = &shared.SourceOracleNoTunnel{}
}
- if sourceOracleSSHTunnelMethodNoTunnel != nil {
+ if sourceOracleNoTunnel != nil {
tunnelMethod = &shared.SourceOracleSSHTunnelMethod{
- SourceOracleSSHTunnelMethodNoTunnel: sourceOracleSSHTunnelMethodNoTunnel,
+ SourceOracleNoTunnel: sourceOracleNoTunnel,
}
}
- var sourceOracleSSHTunnelMethodSSHKeyAuthentication *shared.SourceOracleSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceOracleSSHTunnelMethodSSHKeyAuthentication = &shared.SourceOracleSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceOracleSSHKeyAuthentication *shared.SourceOracleSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceOracleSSHKeyAuthentication = &shared.SourceOracleSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceOracleSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceOracleSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceOracleSSHTunnelMethod{
- SourceOracleSSHTunnelMethodSSHKeyAuthentication: sourceOracleSSHTunnelMethodSSHKeyAuthentication,
+ SourceOracleSSHKeyAuthentication: sourceOracleSSHKeyAuthentication,
}
}
- var sourceOracleSSHTunnelMethodPasswordAuthentication *shared.SourceOracleSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceOracleSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceOracleSSHTunnelMethodPasswordAuthentication = &shared.SourceOracleSSHTunnelMethodPasswordAuthentication{
+ var sourceOraclePasswordAuthentication *shared.SourceOraclePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceOraclePasswordAuthentication = &shared.SourceOraclePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceOracleSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceOraclePasswordAuthentication != nil {
tunnelMethod = &shared.SourceOracleSSHTunnelMethod{
- SourceOracleSSHTunnelMethodPasswordAuthentication: sourceOracleSSHTunnelMethodPasswordAuthentication,
+ SourceOraclePasswordAuthentication: sourceOraclePasswordAuthentication,
}
}
}
@@ -166,10 +155,15 @@ func (r *SourceOracleResourceModel) ToCreateSDKType() *shared.SourceOracleCreate
Password: password,
Port: port,
Schemas: schemas,
- SourceType: sourceType,
TunnelMethod: tunnelMethod,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -180,6 +174,7 @@ func (r *SourceOracleResourceModel) ToCreateSDKType() *shared.SourceOracleCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceOracleCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -193,79 +188,61 @@ func (r *SourceOracleResourceModel) ToGetSDKType() *shared.SourceOracleCreateReq
}
func (r *SourceOracleResourceModel) ToUpdateSDKType() *shared.SourceOraclePutRequest {
- var connectionData *shared.SourceOracleUpdateConnectBy
+ var connectionData *shared.ConnectBy
if r.Configuration.ConnectionData != nil {
- var sourceOracleUpdateConnectByServiceName *shared.SourceOracleUpdateConnectByServiceName
- if r.Configuration.ConnectionData.SourceOracleUpdateConnectByServiceName != nil {
- connectionType := new(shared.SourceOracleUpdateConnectByServiceNameConnectionType)
- if !r.Configuration.ConnectionData.SourceOracleUpdateConnectByServiceName.ConnectionType.IsUnknown() && !r.Configuration.ConnectionData.SourceOracleUpdateConnectByServiceName.ConnectionType.IsNull() {
- *connectionType = shared.SourceOracleUpdateConnectByServiceNameConnectionType(r.Configuration.ConnectionData.SourceOracleUpdateConnectByServiceName.ConnectionType.ValueString())
- } else {
- connectionType = nil
- }
- serviceName := r.Configuration.ConnectionData.SourceOracleUpdateConnectByServiceName.ServiceName.ValueString()
- sourceOracleUpdateConnectByServiceName = &shared.SourceOracleUpdateConnectByServiceName{
- ConnectionType: connectionType,
- ServiceName: serviceName,
+ var serviceName *shared.ServiceName
+ if r.Configuration.ConnectionData.ServiceName != nil {
+ serviceName1 := r.Configuration.ConnectionData.ServiceName.ServiceName.ValueString()
+ serviceName = &shared.ServiceName{
+ ServiceName: serviceName1,
}
}
- if sourceOracleUpdateConnectByServiceName != nil {
- connectionData = &shared.SourceOracleUpdateConnectBy{
- SourceOracleUpdateConnectByServiceName: sourceOracleUpdateConnectByServiceName,
+ if serviceName != nil {
+ connectionData = &shared.ConnectBy{
+ ServiceName: serviceName,
}
}
- var sourceOracleUpdateConnectBySystemIDSID *shared.SourceOracleUpdateConnectBySystemIDSID
- if r.Configuration.ConnectionData.SourceOracleUpdateConnectBySystemIDSID != nil {
- connectionType1 := new(shared.SourceOracleUpdateConnectBySystemIDSIDConnectionType)
- if !r.Configuration.ConnectionData.SourceOracleUpdateConnectBySystemIDSID.ConnectionType.IsUnknown() && !r.Configuration.ConnectionData.SourceOracleUpdateConnectBySystemIDSID.ConnectionType.IsNull() {
- *connectionType1 = shared.SourceOracleUpdateConnectBySystemIDSIDConnectionType(r.Configuration.ConnectionData.SourceOracleUpdateConnectBySystemIDSID.ConnectionType.ValueString())
- } else {
- connectionType1 = nil
- }
- sid := r.Configuration.ConnectionData.SourceOracleUpdateConnectBySystemIDSID.Sid.ValueString()
- sourceOracleUpdateConnectBySystemIDSID = &shared.SourceOracleUpdateConnectBySystemIDSID{
- ConnectionType: connectionType1,
- Sid: sid,
+ var systemIDSID *shared.SystemIDSID
+ if r.Configuration.ConnectionData.SystemIDSID != nil {
+ sid := r.Configuration.ConnectionData.SystemIDSID.Sid.ValueString()
+ systemIDSID = &shared.SystemIDSID{
+ Sid: sid,
}
}
- if sourceOracleUpdateConnectBySystemIDSID != nil {
- connectionData = &shared.SourceOracleUpdateConnectBy{
- SourceOracleUpdateConnectBySystemIDSID: sourceOracleUpdateConnectBySystemIDSID,
+ if systemIDSID != nil {
+ connectionData = &shared.ConnectBy{
+ SystemIDSID: systemIDSID,
}
}
}
- var encryption shared.SourceOracleUpdateEncryption
- var sourceOracleUpdateEncryptionNativeNetworkEncryptionNNE *shared.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE
- if r.Configuration.Encryption.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE != nil {
- encryptionAlgorithm := new(shared.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm)
- if !r.Configuration.Encryption.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE.EncryptionAlgorithm.IsUnknown() && !r.Configuration.Encryption.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE.EncryptionAlgorithm.IsNull() {
- *encryptionAlgorithm = shared.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm(r.Configuration.Encryption.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE.EncryptionAlgorithm.ValueString())
+ var encryption shared.Encryption
+ var nativeNetworkEncryptionNNE *shared.NativeNetworkEncryptionNNE
+ if r.Configuration.Encryption.NativeNetworkEncryptionNNE != nil {
+ encryptionAlgorithm := new(shared.EncryptionAlgorithm)
+ if !r.Configuration.Encryption.NativeNetworkEncryptionNNE.EncryptionAlgorithm.IsUnknown() && !r.Configuration.Encryption.NativeNetworkEncryptionNNE.EncryptionAlgorithm.IsNull() {
+ *encryptionAlgorithm = shared.EncryptionAlgorithm(r.Configuration.Encryption.NativeNetworkEncryptionNNE.EncryptionAlgorithm.ValueString())
} else {
encryptionAlgorithm = nil
}
- encryptionMethod := shared.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethod(r.Configuration.Encryption.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE.EncryptionMethod.ValueString())
- sourceOracleUpdateEncryptionNativeNetworkEncryptionNNE = &shared.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE{
+ nativeNetworkEncryptionNNE = &shared.NativeNetworkEncryptionNNE{
EncryptionAlgorithm: encryptionAlgorithm,
- EncryptionMethod: encryptionMethod,
}
}
- if sourceOracleUpdateEncryptionNativeNetworkEncryptionNNE != nil {
- encryption = shared.SourceOracleUpdateEncryption{
- SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE: sourceOracleUpdateEncryptionNativeNetworkEncryptionNNE,
+ if nativeNetworkEncryptionNNE != nil {
+ encryption = shared.Encryption{
+ NativeNetworkEncryptionNNE: nativeNetworkEncryptionNNE,
}
}
- var sourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate *shared.SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate
- if r.Configuration.Encryption.SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate != nil {
- encryptionMethod1 := shared.SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethod(r.Configuration.Encryption.SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate.EncryptionMethod.ValueString())
- sslCertificate := r.Configuration.Encryption.SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate.SslCertificate.ValueString()
- sourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate = &shared.SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate{
- EncryptionMethod: encryptionMethod1,
- SslCertificate: sslCertificate,
+ var tlsEncryptedVerifyCertificate *shared.TLSEncryptedVerifyCertificate
+ if r.Configuration.Encryption.TLSEncryptedVerifyCertificate != nil {
+ sslCertificate := r.Configuration.Encryption.TLSEncryptedVerifyCertificate.SslCertificate.ValueString()
+ tlsEncryptedVerifyCertificate = &shared.TLSEncryptedVerifyCertificate{
+ SslCertificate: sslCertificate,
}
}
- if sourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate != nil {
- encryption = shared.SourceOracleUpdateEncryption{
- SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate: sourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate,
+ if tlsEncryptedVerifyCertificate != nil {
+ encryption = shared.Encryption{
+ TLSEncryptedVerifyCertificate: tlsEncryptedVerifyCertificate,
}
}
host := r.Configuration.Host.ValueString()
@@ -281,63 +258,71 @@ func (r *SourceOracleResourceModel) ToUpdateSDKType() *shared.SourceOraclePutReq
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var schemas []string = nil
for _, schemasItem := range r.Configuration.Schemas {
schemas = append(schemas, schemasItem.ValueString())
}
var tunnelMethod *shared.SourceOracleUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourceOracleUpdateSSHTunnelMethodNoTunnel *shared.SourceOracleUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourceOracleUpdateSSHTunnelMethodNoTunnel = &shared.SourceOracleUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourceOracleUpdateNoTunnel *shared.SourceOracleUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourceOracleUpdateNoTunnel = &shared.SourceOracleUpdateNoTunnel{}
}
- if sourceOracleUpdateSSHTunnelMethodNoTunnel != nil {
+ if sourceOracleUpdateNoTunnel != nil {
tunnelMethod = &shared.SourceOracleUpdateSSHTunnelMethod{
- SourceOracleUpdateSSHTunnelMethodNoTunnel: sourceOracleUpdateSSHTunnelMethodNoTunnel,
+ SourceOracleUpdateNoTunnel: sourceOracleUpdateNoTunnel,
}
}
- var sourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication *shared.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourceOracleUpdateSSHKeyAuthentication *shared.SourceOracleUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourceOracleUpdateSSHKeyAuthentication = &shared.SourceOracleUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourceOracleUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourceOracleUpdateSSHTunnelMethod{
- SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication: sourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication,
+ SourceOracleUpdateSSHKeyAuthentication: sourceOracleUpdateSSHKeyAuthentication,
}
}
- var sourceOracleUpdateSSHTunnelMethodPasswordAuthentication *shared.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourceOracleUpdateSSHTunnelMethodPasswordAuthentication = &shared.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication{
+ var sourceOracleUpdatePasswordAuthentication *shared.SourceOracleUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourceOracleUpdatePasswordAuthentication = &shared.SourceOracleUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourceOracleUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if sourceOracleUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.SourceOracleUpdateSSHTunnelMethod{
- SourceOracleUpdateSSHTunnelMethodPasswordAuthentication: sourceOracleUpdateSSHTunnelMethodPasswordAuthentication,
+ SourceOracleUpdatePasswordAuthentication: sourceOracleUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/source_orb_data_source.go b/internal/provider/source_orb_data_source.go
old mode 100755
new mode 100644
index a379c0d18..57f52d385
--- a/internal/provider/source_orb_data_source.go
+++ b/internal/provider/source_orb_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceOrbDataSource struct {
// SourceOrbDataSourceModel describes the data model.
type SourceOrbDataSourceModel struct {
- Configuration SourceOrb `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,60 +47,20 @@ func (r *SourceOrbDataSource) Schema(ctx context.Context, req datasource.SchemaR
MarkdownDescription: "SourceOrb DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Orb API Key, issued from the Orb admin console.`,
- },
- "lookback_window_days": schema.Int64Attribute{
- Computed: true,
- Description: `When set to N, the connector will always refresh resources created within the past N days. By default, updated objects that are not newly created are not incrementally synced.`,
- },
- "numeric_event_properties_keys": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.`,
- },
- "plan_id": schema.StringAttribute{
- Computed: true,
- Description: `Orb Plan ID to filter subscriptions that should have usage fetched.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "orb",
- ),
- },
- Description: `must be one of ["orb"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date and time in the format 2022-03-01T00:00:00Z. Any data with created_at before this data will not be synced. For Subscription Usage, this becomes the ` + "`" + `timeframe_start` + "`" + ` API parameter.`,
- },
- "string_event_properties_keys": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.`,
- },
- "subscription_usage_grouping_key": schema.StringAttribute{
- Computed: true,
- Description: `Property key name to group subscription usage by.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_orb_data_source_sdk.go b/internal/provider/source_orb_data_source_sdk.go
old mode 100755
new mode 100644
index a7bf3fdd3..8e1424172
--- a/internal/provider/source_orb_data_source_sdk.go
+++ b/internal/provider/source_orb_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOrbDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_orb_resource.go b/internal/provider/source_orb_resource.go
old mode 100755
new mode 100644
index ecec3ecb5..4f1ce8ed2
--- a/internal/provider/source_orb_resource.go
+++ b/internal/provider/source_orb_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceOrbResource struct {
// SourceOrbResourceModel describes the resource data model.
type SourceOrbResourceModel struct {
Configuration SourceOrb `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,14 +56,17 @@ func (r *SourceOrbResource) Schema(ctx context.Context, req resource.SchemaReque
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Orb API Key, issued from the Orb admin console.`,
},
"lookback_window_days": schema.Int64Attribute{
- Optional: true,
- Description: `When set to N, the connector will always refresh resources created within the past N days. By default, updated objects that are not newly created are not incrementally synced.`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `When set to N, the connector will always refresh resources created within the past N days. By default, updated objects that are not newly created are not incrementally synced.`,
},
"numeric_event_properties_keys": schema.ListAttribute{
Optional: true,
+ Sensitive: true,
ElementType: types.StringType,
Description: `Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.`,
},
@@ -71,37 +74,41 @@ func (r *SourceOrbResource) Schema(ctx context.Context, req resource.SchemaReque
Optional: true,
Description: `Orb Plan ID to filter subscriptions that should have usage fetched.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "orb",
- ),
- },
- Description: `must be one of ["orb"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `UTC date and time in the format 2022-03-01T00:00:00Z. Any data with created_at before this data will not be synced. For Subscription Usage, this becomes the ` + "`" + `timeframe_start` + "`" + ` API parameter.`,
},
"string_event_properties_keys": schema.ListAttribute{
Optional: true,
+ Sensitive: true,
ElementType: types.StringType,
Description: `Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.`,
},
"subscription_usage_grouping_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Property key name to group subscription usage by.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -165,7 +172,7 @@ func (r *SourceOrbResource) Create(ctx context.Context, req resource.CreateReque
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceOrb(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -341,5 +348,5 @@ func (r *SourceOrbResource) Delete(ctx context.Context, req resource.DeleteReque
}
func (r *SourceOrbResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_orb_resource_sdk.go b/internal/provider/source_orb_resource_sdk.go
old mode 100755
new mode 100644
index 5c249155d..dc2affdb4
--- a/internal/provider/source_orb_resource_sdk.go
+++ b/internal/provider/source_orb_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -25,7 +25,6 @@ func (r *SourceOrbResourceModel) ToCreateSDKType() *shared.SourceOrbCreateReques
} else {
planID = nil
}
- sourceType := shared.SourceOrbOrb(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
var stringEventPropertiesKeys []string = nil
for _, stringEventPropertiesKeysItem := range r.Configuration.StringEventPropertiesKeys {
@@ -42,11 +41,16 @@ func (r *SourceOrbResourceModel) ToCreateSDKType() *shared.SourceOrbCreateReques
LookbackWindowDays: lookbackWindowDays,
NumericEventPropertiesKeys: numericEventPropertiesKeys,
PlanID: planID,
- SourceType: sourceType,
StartDate: startDate,
StringEventPropertiesKeys: stringEventPropertiesKeys,
SubscriptionUsageGroupingKey: subscriptionUsageGroupingKey,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -57,6 +61,7 @@ func (r *SourceOrbResourceModel) ToCreateSDKType() *shared.SourceOrbCreateReques
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceOrbCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_orbit_data_source.go b/internal/provider/source_orbit_data_source.go
old mode 100755
new mode 100644
index 5999c1943..bc395671c
--- a/internal/provider/source_orbit_data_source.go
+++ b/internal/provider/source_orbit_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceOrbitDataSource struct {
// SourceOrbitDataSourceModel describes the data model.
type SourceOrbitDataSourceModel struct {
- Configuration SourceOrbit `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,42 +47,20 @@ func (r *SourceOrbitDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceOrbit DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Authorizes you to work with Orbit workspaces associated with the token.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "orbit",
- ),
- },
- Description: `must be one of ["orbit"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `Date in the format 2022-06-26. Only load members whose last activities are after this date.`,
- },
- "workspace": schema.StringAttribute{
- Computed: true,
- Description: `The unique name of the workspace that your API token is associated with.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_orbit_data_source_sdk.go b/internal/provider/source_orbit_data_source_sdk.go
old mode 100755
new mode 100644
index ccc6d3c6c..08d18ecf3
--- a/internal/provider/source_orbit_data_source_sdk.go
+++ b/internal/provider/source_orbit_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOrbitDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_orbit_resource.go b/internal/provider/source_orbit_resource.go
old mode 100755
new mode 100644
index f1e8d50e0..d3530ccef
--- a/internal/provider/source_orbit_resource.go
+++ b/internal/provider/source_orbit_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceOrbitResource struct {
// SourceOrbitResourceModel describes the resource data model.
type SourceOrbitResourceModel struct {
Configuration SourceOrbit `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,17 +56,9 @@ func (r *SourceOrbitResource) Schema(ctx context.Context, req resource.SchemaReq
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Authorizes you to work with Orbit workspaces associated with the token.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "orbit",
- ),
- },
- Description: `must be one of ["orbit"]`,
- },
"start_date": schema.StringAttribute{
Optional: true,
Description: `Date in the format 2022-06-26. Only load members whose last activities are after this date.`,
@@ -77,13 +69,24 @@ func (r *SourceOrbitResource) Schema(ctx context.Context, req resource.SchemaReq
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +150,7 @@ func (r *SourceOrbitResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceOrbit(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +326,5 @@ func (r *SourceOrbitResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceOrbitResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_orbit_resource_sdk.go b/internal/provider/source_orbit_resource_sdk.go
old mode 100755
new mode 100644
index 9114b95b6..81663d81c
--- a/internal/provider/source_orbit_resource_sdk.go
+++ b/internal/provider/source_orbit_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOrbitResourceModel) ToCreateSDKType() *shared.SourceOrbitCreateRequest {
apiToken := r.Configuration.APIToken.ValueString()
- sourceType := shared.SourceOrbitOrbit(r.Configuration.SourceType.ValueString())
startDate := new(string)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate = r.Configuration.StartDate.ValueString()
@@ -18,10 +17,15 @@ func (r *SourceOrbitResourceModel) ToCreateSDKType() *shared.SourceOrbitCreateRe
}
workspace := r.Configuration.Workspace.ValueString()
configuration := shared.SourceOrbit{
- APIToken: apiToken,
- SourceType: sourceType,
- StartDate: startDate,
- Workspace: workspace,
+ APIToken: apiToken,
+ StartDate: startDate,
+ Workspace: workspace,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -33,6 +37,7 @@ func (r *SourceOrbitResourceModel) ToCreateSDKType() *shared.SourceOrbitCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceOrbitCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_outbrainamplify_data_source.go b/internal/provider/source_outbrainamplify_data_source.go
old mode 100755
new mode 100644
index cc993c0b3..bf21f5154
--- a/internal/provider/source_outbrainamplify_data_source.go
+++ b/internal/provider/source_outbrainamplify_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceOutbrainAmplifyDataSource struct {
// SourceOutbrainAmplifyDataSourceModel describes the data model.
type SourceOutbrainAmplifyDataSourceModel struct {
- Configuration SourceOutbrainAmplify `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,155 +47,20 @@ func (r *SourceOutbrainAmplifyDataSource) Schema(ctx context.Context, req dataso
MarkdownDescription: "SourceOutbrainAmplify DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_outbrain_amplify_authentication_method_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
- },
- "source_outbrain_amplify_authentication_method_username_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Add Password for authentication.`,
- },
- "type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username_password",
- ),
- },
- Description: `must be one of ["username_password"]`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Add Username for authentication.`,
- },
- },
- Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
- },
- "source_outbrain_amplify_update_authentication_method_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
- },
- "source_outbrain_amplify_update_authentication_method_username_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Add Password for authentication.`,
- },
- "type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username_password",
- ),
- },
- Description: `must be one of ["username_password"]`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Add Username for authentication.`,
- },
- },
- Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Description: `Date in the format YYYY-MM-DD.`,
- },
- "geo_location_breakdown": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "country",
- "region",
- "subregion",
- ),
- },
- MarkdownDescription: `must be one of ["country", "region", "subregion"]` + "\n" +
- `The granularity used for geo location data in reports.`,
- },
- "report_granularity": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "daily",
- "weekly",
- "monthly",
- ),
- },
- MarkdownDescription: `must be one of ["daily", "weekly", "monthly"]` + "\n" +
- `The granularity used for periodic data in reports. See the docs.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "outbrain-amplify",
- ),
- },
- Description: `must be one of ["outbrain-amplify"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_outbrainamplify_data_source_sdk.go b/internal/provider/source_outbrainamplify_data_source_sdk.go
old mode 100755
new mode 100644
index 38a10297d..b8407a3cc
--- a/internal/provider/source_outbrainamplify_data_source_sdk.go
+++ b/internal/provider/source_outbrainamplify_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOutbrainAmplifyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_outbrainamplify_resource.go b/internal/provider/source_outbrainamplify_resource.go
old mode 100755
new mode 100644
index f534ef8be..6799bf36f
--- a/internal/provider/source_outbrainamplify_resource.go
+++ b/internal/provider/source_outbrainamplify_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceOutbrainAmplifyResource struct {
// SourceOutbrainAmplifyResourceModel describes the resource data model.
type SourceOutbrainAmplifyResourceModel struct {
Configuration SourceOutbrainAmplify `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,83 +60,25 @@ func (r *SourceOutbrainAmplifyResource) Schema(ctx context.Context, req resource
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_outbrain_amplify_authentication_method_access_token": schema.SingleNestedAttribute{
+ "access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
},
Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
},
- "source_outbrain_amplify_authentication_method_username_password": schema.SingleNestedAttribute{
+ "username_password": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Add Password for authentication.`,
},
- "type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username_password",
- ),
- },
- Description: `must be one of ["username_password"]`,
- },
- "username": schema.StringAttribute{
- Required: true,
- Description: `Add Username for authentication.`,
- },
- },
- Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
- },
- "source_outbrain_amplify_update_authentication_method_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
- },
- "source_outbrain_amplify_update_authentication_method_username_password": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "password": schema.StringAttribute{
- Required: true,
- Description: `Add Password for authentication.`,
- },
- "type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username_password",
- ),
- },
- Description: `must be one of ["username_password"]`,
- },
"username": schema.StringAttribute{
Required: true,
Description: `Add Username for authentication.`,
@@ -143,10 +87,10 @@ func (r *SourceOutbrainAmplifyResource) Schema(ctx context.Context, req resource
Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
},
},
+ Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Credentials for making authenticated requests requires either username/password or access_token.`,
},
"end_date": schema.StringAttribute{
Optional: true,
@@ -154,6 +98,8 @@ func (r *SourceOutbrainAmplifyResource) Schema(ctx context.Context, req resource
},
"geo_location_breakdown": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["country", "region", "subregion"]` + "\n" +
+ `The granularity used for geo location data in reports.`,
Validators: []validator.String{
stringvalidator.OneOf(
"country",
@@ -161,11 +107,11 @@ func (r *SourceOutbrainAmplifyResource) Schema(ctx context.Context, req resource
"subregion",
),
},
- MarkdownDescription: `must be one of ["country", "region", "subregion"]` + "\n" +
- `The granularity used for geo location data in reports.`,
},
"report_granularity": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["daily", "weekly", "monthly"]` + "\n" +
+ `The granularity used for periodic data in reports. See the docs.`,
Validators: []validator.String{
stringvalidator.OneOf(
"daily",
@@ -173,17 +119,6 @@ func (r *SourceOutbrainAmplifyResource) Schema(ctx context.Context, req resource
"monthly",
),
},
- MarkdownDescription: `must be one of ["daily", "weekly", "monthly"]` + "\n" +
- `The granularity used for periodic data in reports. See the docs.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "outbrain-amplify",
- ),
- },
- Description: `must be one of ["outbrain-amplify"]`,
},
"start_date": schema.StringAttribute{
Required: true,
@@ -191,13 +126,24 @@ func (r *SourceOutbrainAmplifyResource) Schema(ctx context.Context, req resource
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -261,7 +207,7 @@ func (r *SourceOutbrainAmplifyResource) Create(ctx context.Context, req resource
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceOutbrainAmplify(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -437,5 +383,5 @@ func (r *SourceOutbrainAmplifyResource) Delete(ctx context.Context, req resource
}
func (r *SourceOutbrainAmplifyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_outbrainamplify_resource_sdk.go b/internal/provider/source_outbrainamplify_resource_sdk.go
old mode 100755
new mode 100644
index 76174f12d..3d2bf6a4d
--- a/internal/provider/source_outbrainamplify_resource_sdk.go
+++ b/internal/provider/source_outbrainamplify_resource_sdk.go
@@ -3,40 +3,36 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOutbrainAmplifyResourceModel) ToCreateSDKType() *shared.SourceOutbrainAmplifyCreateRequest {
var credentials shared.SourceOutbrainAmplifyAuthenticationMethod
- var sourceOutbrainAmplifyAuthenticationMethodAccessToken *shared.SourceOutbrainAmplifyAuthenticationMethodAccessToken
- if r.Configuration.Credentials.SourceOutbrainAmplifyAuthenticationMethodAccessToken != nil {
- accessToken := r.Configuration.Credentials.SourceOutbrainAmplifyAuthenticationMethodAccessToken.AccessToken.ValueString()
- typeVar := shared.SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests(r.Configuration.Credentials.SourceOutbrainAmplifyAuthenticationMethodAccessToken.Type.ValueString())
- sourceOutbrainAmplifyAuthenticationMethodAccessToken = &shared.SourceOutbrainAmplifyAuthenticationMethodAccessToken{
+ var sourceOutbrainAmplifyAccessToken *shared.SourceOutbrainAmplifyAccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ accessToken := r.Configuration.Credentials.AccessToken.AccessToken.ValueString()
+ sourceOutbrainAmplifyAccessToken = &shared.SourceOutbrainAmplifyAccessToken{
AccessToken: accessToken,
- Type: typeVar,
}
}
- if sourceOutbrainAmplifyAuthenticationMethodAccessToken != nil {
+ if sourceOutbrainAmplifyAccessToken != nil {
credentials = shared.SourceOutbrainAmplifyAuthenticationMethod{
- SourceOutbrainAmplifyAuthenticationMethodAccessToken: sourceOutbrainAmplifyAuthenticationMethodAccessToken,
+ SourceOutbrainAmplifyAccessToken: sourceOutbrainAmplifyAccessToken,
}
}
- var sourceOutbrainAmplifyAuthenticationMethodUsernamePassword *shared.SourceOutbrainAmplifyAuthenticationMethodUsernamePassword
- if r.Configuration.Credentials.SourceOutbrainAmplifyAuthenticationMethodUsernamePassword != nil {
- password := r.Configuration.Credentials.SourceOutbrainAmplifyAuthenticationMethodUsernamePassword.Password.ValueString()
- typeVar1 := shared.SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest(r.Configuration.Credentials.SourceOutbrainAmplifyAuthenticationMethodUsernamePassword.Type.ValueString())
- username := r.Configuration.Credentials.SourceOutbrainAmplifyAuthenticationMethodUsernamePassword.Username.ValueString()
- sourceOutbrainAmplifyAuthenticationMethodUsernamePassword = &shared.SourceOutbrainAmplifyAuthenticationMethodUsernamePassword{
+ var sourceOutbrainAmplifyUsernamePassword *shared.SourceOutbrainAmplifyUsernamePassword
+ if r.Configuration.Credentials.UsernamePassword != nil {
+ password := r.Configuration.Credentials.UsernamePassword.Password.ValueString()
+ username := r.Configuration.Credentials.UsernamePassword.Username.ValueString()
+ sourceOutbrainAmplifyUsernamePassword = &shared.SourceOutbrainAmplifyUsernamePassword{
Password: password,
- Type: typeVar1,
Username: username,
}
}
- if sourceOutbrainAmplifyAuthenticationMethodUsernamePassword != nil {
+ if sourceOutbrainAmplifyUsernamePassword != nil {
credentials = shared.SourceOutbrainAmplifyAuthenticationMethod{
- SourceOutbrainAmplifyAuthenticationMethodUsernamePassword: sourceOutbrainAmplifyAuthenticationMethodUsernamePassword,
+ SourceOutbrainAmplifyUsernamePassword: sourceOutbrainAmplifyUsernamePassword,
}
}
endDate := new(string)
@@ -57,16 +53,20 @@ func (r *SourceOutbrainAmplifyResourceModel) ToCreateSDKType() *shared.SourceOut
} else {
reportGranularity = nil
}
- sourceType := shared.SourceOutbrainAmplifyOutbrainAmplify(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceOutbrainAmplify{
Credentials: credentials,
EndDate: endDate,
GeoLocationBreakdown: geoLocationBreakdown,
ReportGranularity: reportGranularity,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -77,6 +77,7 @@ func (r *SourceOutbrainAmplifyResourceModel) ToCreateSDKType() *shared.SourceOut
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceOutbrainAmplifyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -91,34 +92,30 @@ func (r *SourceOutbrainAmplifyResourceModel) ToGetSDKType() *shared.SourceOutbra
func (r *SourceOutbrainAmplifyResourceModel) ToUpdateSDKType() *shared.SourceOutbrainAmplifyPutRequest {
var credentials shared.SourceOutbrainAmplifyUpdateAuthenticationMethod
- var sourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken *shared.SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken
- if r.Configuration.Credentials.SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken != nil {
- accessToken := r.Configuration.Credentials.SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken.AccessToken.ValueString()
- typeVar := shared.SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests(r.Configuration.Credentials.SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken.Type.ValueString())
- sourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken = &shared.SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken{
+ var sourceOutbrainAmplifyUpdateAccessToken *shared.SourceOutbrainAmplifyUpdateAccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ accessToken := r.Configuration.Credentials.AccessToken.AccessToken.ValueString()
+ sourceOutbrainAmplifyUpdateAccessToken = &shared.SourceOutbrainAmplifyUpdateAccessToken{
AccessToken: accessToken,
- Type: typeVar,
}
}
- if sourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken != nil {
+ if sourceOutbrainAmplifyUpdateAccessToken != nil {
credentials = shared.SourceOutbrainAmplifyUpdateAuthenticationMethod{
- SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken: sourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken,
+ SourceOutbrainAmplifyUpdateAccessToken: sourceOutbrainAmplifyUpdateAccessToken,
}
}
- var sourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword *shared.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword
- if r.Configuration.Credentials.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword != nil {
- password := r.Configuration.Credentials.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword.Password.ValueString()
- typeVar1 := shared.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest(r.Configuration.Credentials.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword.Type.ValueString())
- username := r.Configuration.Credentials.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword.Username.ValueString()
- sourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword = &shared.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword{
+ var sourceOutbrainAmplifyUpdateUsernamePassword *shared.SourceOutbrainAmplifyUpdateUsernamePassword
+ if r.Configuration.Credentials.UsernamePassword != nil {
+ password := r.Configuration.Credentials.UsernamePassword.Password.ValueString()
+ username := r.Configuration.Credentials.UsernamePassword.Username.ValueString()
+ sourceOutbrainAmplifyUpdateUsernamePassword = &shared.SourceOutbrainAmplifyUpdateUsernamePassword{
Password: password,
- Type: typeVar1,
Username: username,
}
}
- if sourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword != nil {
+ if sourceOutbrainAmplifyUpdateUsernamePassword != nil {
credentials = shared.SourceOutbrainAmplifyUpdateAuthenticationMethod{
- SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword: sourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword,
+ SourceOutbrainAmplifyUpdateUsernamePassword: sourceOutbrainAmplifyUpdateUsernamePassword,
}
}
endDate := new(string)
@@ -127,15 +124,15 @@ func (r *SourceOutbrainAmplifyResourceModel) ToUpdateSDKType() *shared.SourceOut
} else {
endDate = nil
}
- geoLocationBreakdown := new(shared.SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion)
+ geoLocationBreakdown := new(shared.GranularityForGeoLocationRegion)
if !r.Configuration.GeoLocationBreakdown.IsUnknown() && !r.Configuration.GeoLocationBreakdown.IsNull() {
- *geoLocationBreakdown = shared.SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion(r.Configuration.GeoLocationBreakdown.ValueString())
+ *geoLocationBreakdown = shared.GranularityForGeoLocationRegion(r.Configuration.GeoLocationBreakdown.ValueString())
} else {
geoLocationBreakdown = nil
}
- reportGranularity := new(shared.SourceOutbrainAmplifyUpdateGranularityForPeriodicReports)
+ reportGranularity := new(shared.GranularityForPeriodicReports)
if !r.Configuration.ReportGranularity.IsUnknown() && !r.Configuration.ReportGranularity.IsNull() {
- *reportGranularity = shared.SourceOutbrainAmplifyUpdateGranularityForPeriodicReports(r.Configuration.ReportGranularity.ValueString())
+ *reportGranularity = shared.GranularityForPeriodicReports(r.Configuration.ReportGranularity.ValueString())
} else {
reportGranularity = nil
}
diff --git a/internal/provider/source_outreach_data_source.go b/internal/provider/source_outreach_data_source.go
old mode 100755
new mode 100644
index b54e06692..83073d261
--- a/internal/provider/source_outreach_data_source.go
+++ b/internal/provider/source_outreach_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceOutreachDataSource struct {
// SourceOutreachDataSourceModel describes the data model.
type SourceOutreachDataSourceModel struct {
- Configuration SourceOutreach `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,50 +47,20 @@ func (r *SourceOutreachDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceOutreach DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Outreach developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Outreach developer application.`,
- },
- "redirect_uri": schema.StringAttribute{
- Computed: true,
- Description: `A Redirect URI is the location where the authorization server sends the user once the app has been successfully authorized and granted an authorization code or access token.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining the new access token.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "outreach",
- ),
- },
- Description: `must be one of ["outreach"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_outreach_data_source_sdk.go b/internal/provider/source_outreach_data_source_sdk.go
old mode 100755
new mode 100644
index 70672eaba..4eafaaae2
--- a/internal/provider/source_outreach_data_source_sdk.go
+++ b/internal/provider/source_outreach_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceOutreachDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_outreach_resource.go b/internal/provider/source_outreach_resource.go
old mode 100755
new mode 100644
index ac603cf50..e43392db9
--- a/internal/provider/source_outreach_resource.go
+++ b/internal/provider/source_outreach_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceOutreachResource struct {
// SourceOutreachResourceModel describes the resource data model.
type SourceOutreachResourceModel struct {
Configuration SourceOutreach `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -68,30 +68,33 @@ func (r *SourceOutreachResource) Schema(ctx context.Context, req resource.Schema
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The token for obtaining the new access token.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "outreach",
- ),
- },
- Description: `must be one of ["outreach"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +158,7 @@ func (r *SourceOutreachResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceOutreach(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +334,5 @@ func (r *SourceOutreachResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceOutreachResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_outreach_resource_sdk.go b/internal/provider/source_outreach_resource_sdk.go
old mode 100755
new mode 100644
index 4092c82a4..c38f42401
--- a/internal/provider/source_outreach_resource_sdk.go
+++ b/internal/provider/source_outreach_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -12,16 +12,20 @@ func (r *SourceOutreachResourceModel) ToCreateSDKType() *shared.SourceOutreachCr
clientSecret := r.Configuration.ClientSecret.ValueString()
redirectURI := r.Configuration.RedirectURI.ValueString()
refreshToken := r.Configuration.RefreshToken.ValueString()
- sourceType := shared.SourceOutreachOutreach(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceOutreach{
ClientID: clientID,
ClientSecret: clientSecret,
RedirectURI: redirectURI,
RefreshToken: refreshToken,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -32,6 +36,7 @@ func (r *SourceOutreachResourceModel) ToCreateSDKType() *shared.SourceOutreachCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceOutreachCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_paypaltransaction_data_source.go b/internal/provider/source_paypaltransaction_data_source.go
old mode 100755
new mode 100644
index 378acee90..0360ab12a
--- a/internal/provider/source_paypaltransaction_data_source.go
+++ b/internal/provider/source_paypaltransaction_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourcePaypalTransactionDataSource struct {
// SourcePaypalTransactionDataSourceModel describes the data model.
type SourcePaypalTransactionDataSourceModel struct {
- Configuration SourcePaypalTransaction `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,53 +47,20 @@ func (r *SourcePaypalTransactionDataSource) Schema(ctx context.Context, req data
MarkdownDescription: "SourcePaypalTransaction DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Paypal developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Paypal developer application.`,
- },
- "is_sandbox": schema.BoolAttribute{
- Computed: true,
- Description: `Determines whether to use the sandbox or production environment.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access token.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "paypal-transaction",
- ),
- },
- Description: `must be one of ["paypal-transaction"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_paypaltransaction_data_source_sdk.go b/internal/provider/source_paypaltransaction_data_source_sdk.go
old mode 100755
new mode 100644
index 8cfe244fe..936196d01
--- a/internal/provider/source_paypaltransaction_data_source_sdk.go
+++ b/internal/provider/source_paypaltransaction_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePaypalTransactionDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_paypaltransaction_resource.go b/internal/provider/source_paypaltransaction_resource.go
old mode 100755
new mode 100644
index 8502448c7..0eb3f3acf
--- a/internal/provider/source_paypaltransaction_resource.go
+++ b/internal/provider/source_paypaltransaction_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourcePaypalTransactionResource struct {
// SourcePaypalTransactionResourceModel describes the resource data model.
type SourcePaypalTransactionResourceModel struct {
Configuration SourcePaypalTransaction `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -64,38 +65,47 @@ func (r *SourcePaypalTransactionResource) Schema(ctx context.Context, req resour
Description: `The Client Secret of your Paypal developer application.`,
},
"is_sandbox": schema.BoolAttribute{
- Required: true,
- Description: `Determines whether to use the sandbox or production environment.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Determines whether to use the sandbox or production environment.`,
},
"refresh_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The key to refresh the expired access token.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "paypal-transaction",
- ),
- },
- Description: `must be one of ["paypal-transaction"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.`,
+ },
+ "time_window": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 7` + "\n" +
+ `The number of days per request. Must be a number between 1 and 31.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -159,7 +169,7 @@ func (r *SourcePaypalTransactionResource) Create(ctx context.Context, req resour
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePaypalTransaction(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -335,5 +345,5 @@ func (r *SourcePaypalTransactionResource) Delete(ctx context.Context, req resour
}
func (r *SourcePaypalTransactionResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_paypaltransaction_resource_sdk.go b/internal/provider/source_paypaltransaction_resource_sdk.go
old mode 100755
new mode 100644
index a75107f8d..79bec4dd9
--- a/internal/provider/source_paypaltransaction_resource_sdk.go
+++ b/internal/provider/source_paypaltransaction_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -11,22 +11,38 @@ import (
func (r *SourcePaypalTransactionResourceModel) ToCreateSDKType() *shared.SourcePaypalTransactionCreateRequest {
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
- isSandbox := r.Configuration.IsSandbox.ValueBool()
+ isSandbox := new(bool)
+ if !r.Configuration.IsSandbox.IsUnknown() && !r.Configuration.IsSandbox.IsNull() {
+ *isSandbox = r.Configuration.IsSandbox.ValueBool()
+ } else {
+ isSandbox = nil
+ }
refreshToken := new(string)
if !r.Configuration.RefreshToken.IsUnknown() && !r.Configuration.RefreshToken.IsNull() {
*refreshToken = r.Configuration.RefreshToken.ValueString()
} else {
refreshToken = nil
}
- sourceType := shared.SourcePaypalTransactionPaypalTransaction(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ timeWindow := new(int64)
+ if !r.Configuration.TimeWindow.IsUnknown() && !r.Configuration.TimeWindow.IsNull() {
+ *timeWindow = r.Configuration.TimeWindow.ValueInt64()
+ } else {
+ timeWindow = nil
+ }
configuration := shared.SourcePaypalTransaction{
ClientID: clientID,
ClientSecret: clientSecret,
IsSandbox: isSandbox,
RefreshToken: refreshToken,
- SourceType: sourceType,
StartDate: startDate,
+ TimeWindow: timeWindow,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -38,6 +54,7 @@ func (r *SourcePaypalTransactionResourceModel) ToCreateSDKType() *shared.SourceP
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePaypalTransactionCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -53,7 +70,12 @@ func (r *SourcePaypalTransactionResourceModel) ToGetSDKType() *shared.SourcePayp
func (r *SourcePaypalTransactionResourceModel) ToUpdateSDKType() *shared.SourcePaypalTransactionPutRequest {
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
- isSandbox := r.Configuration.IsSandbox.ValueBool()
+ isSandbox := new(bool)
+ if !r.Configuration.IsSandbox.IsUnknown() && !r.Configuration.IsSandbox.IsNull() {
+ *isSandbox = r.Configuration.IsSandbox.ValueBool()
+ } else {
+ isSandbox = nil
+ }
refreshToken := new(string)
if !r.Configuration.RefreshToken.IsUnknown() && !r.Configuration.RefreshToken.IsNull() {
*refreshToken = r.Configuration.RefreshToken.ValueString()
@@ -61,12 +83,19 @@ func (r *SourcePaypalTransactionResourceModel) ToUpdateSDKType() *shared.SourceP
refreshToken = nil
}
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
+ timeWindow := new(int64)
+ if !r.Configuration.TimeWindow.IsUnknown() && !r.Configuration.TimeWindow.IsNull() {
+ *timeWindow = r.Configuration.TimeWindow.ValueInt64()
+ } else {
+ timeWindow = nil
+ }
configuration := shared.SourcePaypalTransactionUpdate{
ClientID: clientID,
ClientSecret: clientSecret,
IsSandbox: isSandbox,
RefreshToken: refreshToken,
StartDate: startDate,
+ TimeWindow: timeWindow,
}
name := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
diff --git a/internal/provider/source_paystack_data_source.go b/internal/provider/source_paystack_data_source.go
old mode 100755
new mode 100644
index ee2bb7fde..5b335097b
--- a/internal/provider/source_paystack_data_source.go
+++ b/internal/provider/source_paystack_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourcePaystackDataSource struct {
// SourcePaystackDataSourceModel describes the data model.
type SourcePaystackDataSourceModel struct {
- Configuration SourcePaystack `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,45 +47,20 @@ func (r *SourcePaystackDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourcePaystack DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "lookback_window_days": schema.Int64Attribute{
- Computed: true,
- Description: `When set, the connector will always reload data from the past N days, where N is the value set here. This is useful if your data is updated after creation.`,
- },
- "secret_key": schema.StringAttribute{
- Computed: true,
- Description: `The Paystack API key (usually starts with 'sk_live_'; find yours here).`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "paystack",
- ),
- },
- Description: `must be one of ["paystack"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_paystack_data_source_sdk.go b/internal/provider/source_paystack_data_source_sdk.go
old mode 100755
new mode 100644
index b017b1515..32b4a284a
--- a/internal/provider/source_paystack_data_source_sdk.go
+++ b/internal/provider/source_paystack_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePaystackDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_paystack_resource.go b/internal/provider/source_paystack_resource.go
old mode 100755
new mode 100644
index ee57a0993..8faef3c35
--- a/internal/provider/source_paystack_resource.go
+++ b/internal/provider/source_paystack_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourcePaystackResource struct {
// SourcePaystackResourceModel describes the resource data model.
type SourcePaystackResourceModel struct {
Configuration SourcePaystack `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,38 +57,42 @@ func (r *SourcePaystackResource) Schema(ctx context.Context, req resource.Schema
Required: true,
Attributes: map[string]schema.Attribute{
"lookback_window_days": schema.Int64Attribute{
- Optional: true,
- Description: `When set, the connector will always reload data from the past N days, where N is the value set here. This is useful if your data is updated after creation.`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `When set, the connector will always reload data from the past N days, where N is the value set here. This is useful if your data is updated after creation.`,
},
"secret_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Paystack API key (usually starts with 'sk_live_'; find yours here).`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "paystack",
- ),
- },
- Description: `must be one of ["paystack"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -151,7 +156,7 @@ func (r *SourcePaystackResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePaystack(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -327,5 +332,5 @@ func (r *SourcePaystackResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourcePaystackResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_paystack_resource_sdk.go b/internal/provider/source_paystack_resource_sdk.go
old mode 100755
new mode 100644
index 6eb4ce368..b8d76c377
--- a/internal/provider/source_paystack_resource_sdk.go
+++ b/internal/provider/source_paystack_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -16,14 +16,18 @@ func (r *SourcePaystackResourceModel) ToCreateSDKType() *shared.SourcePaystackCr
lookbackWindowDays = nil
}
secretKey := r.Configuration.SecretKey.ValueString()
- sourceType := shared.SourcePaystackPaystack(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourcePaystack{
LookbackWindowDays: lookbackWindowDays,
SecretKey: secretKey,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -34,6 +38,7 @@ func (r *SourcePaystackResourceModel) ToCreateSDKType() *shared.SourcePaystackCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePaystackCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_pendo_data_source.go b/internal/provider/source_pendo_data_source.go
old mode 100755
new mode 100644
index 147c18387..a107f7b9e
--- a/internal/provider/source_pendo_data_source.go
+++ b/internal/provider/source_pendo_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourcePendoDataSource struct {
// SourcePendoDataSourceModel describes the data model.
type SourcePendoDataSourceModel struct {
- Configuration SourcePendo `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,33 +47,20 @@ func (r *SourcePendoDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourcePendo DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pendo",
- ),
- },
- Description: `must be one of ["pendo"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_pendo_data_source_sdk.go b/internal/provider/source_pendo_data_source_sdk.go
old mode 100755
new mode 100644
index fdd02f230..6dcb447b4
--- a/internal/provider/source_pendo_data_source_sdk.go
+++ b/internal/provider/source_pendo_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePendoDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_pendo_resource.go b/internal/provider/source_pendo_resource.go
old mode 100755
new mode 100644
index f6b387046..b0f695d2c
--- a/internal/provider/source_pendo_resource.go
+++ b/internal/provider/source_pendo_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,7 +33,8 @@ type SourcePendoResource struct {
// SourcePendoResourceModel describes the resource data model.
type SourcePendoResourceModel struct {
- Configuration SourcePendo `tfsdk:"configuration"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -55,26 +55,29 @@ func (r *SourcePendoResource) Schema(ctx context.Context, req resource.SchemaReq
Required: true,
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
- Required: true,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pendo",
- ),
- },
- Description: `must be one of ["pendo"]`,
+ Required: true,
+ Sensitive: true,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -138,7 +141,7 @@ func (r *SourcePendoResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePendo(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -314,5 +317,5 @@ func (r *SourcePendoResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourcePendoResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_pendo_resource_sdk.go b/internal/provider/source_pendo_resource_sdk.go
old mode 100755
new mode 100644
index 025fc1cc4..6032dc438
--- a/internal/provider/source_pendo_resource_sdk.go
+++ b/internal/provider/source_pendo_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePendoResourceModel) ToCreateSDKType() *shared.SourcePendoCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourcePendoPendo(r.Configuration.SourceType.ValueString())
configuration := shared.SourcePendo{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourcePendoResourceModel) ToCreateSDKType() *shared.SourcePendoCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePendoCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_persistiq_data_source.go b/internal/provider/source_persistiq_data_source.go
old mode 100755
new mode 100644
index 59dfd87b1..6db623e79
--- a/internal/provider/source_persistiq_data_source.go
+++ b/internal/provider/source_persistiq_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourcePersistiqDataSource struct {
// SourcePersistiqDataSourceModel describes the data model.
type SourcePersistiqDataSourceModel struct {
- Configuration SourcePersistiq `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourcePersistiqDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourcePersistiq DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `PersistIq API Key. See the docs for more information on where to find that key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "persistiq",
- ),
- },
- Description: `must be one of ["persistiq"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_persistiq_data_source_sdk.go b/internal/provider/source_persistiq_data_source_sdk.go
old mode 100755
new mode 100644
index 6288dfc03..6478059d4
--- a/internal/provider/source_persistiq_data_source_sdk.go
+++ b/internal/provider/source_persistiq_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePersistiqDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_persistiq_resource.go b/internal/provider/source_persistiq_resource.go
old mode 100755
new mode 100644
index f8f1f116b..075a8fc60
--- a/internal/provider/source_persistiq_resource.go
+++ b/internal/provider/source_persistiq_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourcePersistiqResource struct {
// SourcePersistiqResourceModel describes the resource data model.
type SourcePersistiqResourceModel struct {
- Configuration SourcePersistiq `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourcePersistiqResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourcePersistiqResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `PersistIq API Key. See the docs for more information on where to find that key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "persistiq",
- ),
- },
- Description: `must be one of ["persistiq"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourcePersistiqResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePersistiq(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourcePersistiqResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourcePersistiqResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_persistiq_resource_sdk.go b/internal/provider/source_persistiq_resource_sdk.go
old mode 100755
new mode 100644
index 8001ce73e..22ea2f1ab
--- a/internal/provider/source_persistiq_resource_sdk.go
+++ b/internal/provider/source_persistiq_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePersistiqResourceModel) ToCreateSDKType() *shared.SourcePersistiqCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourcePersistiqPersistiq(r.Configuration.SourceType.ValueString())
configuration := shared.SourcePersistiq{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourcePersistiqResourceModel) ToCreateSDKType() *shared.SourcePersistiq
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePersistiqCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_pexelsapi_data_source.go b/internal/provider/source_pexelsapi_data_source.go
old mode 100755
new mode 100644
index f1c88657f..5ad2d23cb
--- a/internal/provider/source_pexelsapi_data_source.go
+++ b/internal/provider/source_pexelsapi_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourcePexelsAPIDataSource struct {
// SourcePexelsAPIDataSourceModel describes the data model.
type SourcePexelsAPIDataSourceModel struct {
- Configuration SourcePexelsAPI `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,54 +47,20 @@ func (r *SourcePexelsAPIDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourcePexelsAPI DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API key is required to access pexels api, For getting your's goto https://www.pexels.com/api/documentation and create account for free.`,
- },
- "color": schema.StringAttribute{
- Computed: true,
- Description: `Optional, Desired photo color. Supported colors red, orange, yellow, green, turquoise, blue, violet, pink, brown, black, gray, white or any hexidecimal color code.`,
- },
- "locale": schema.StringAttribute{
- Computed: true,
- Description: `Optional, The locale of the search you are performing. The current supported locales are 'en-US' 'pt-BR' 'es-ES' 'ca-ES' 'de-DE' 'it-IT' 'fr-FR' 'sv-SE' 'id-ID' 'pl-PL' 'ja-JP' 'zh-TW' 'zh-CN' 'ko-KR' 'th-TH' 'nl-NL' 'hu-HU' 'vi-VN' 'cs-CZ' 'da-DK' 'fi-FI' 'uk-UA' 'el-GR' 'ro-RO' 'nb-NO' 'sk-SK' 'tr-TR' 'ru-RU'.`,
- },
- "orientation": schema.StringAttribute{
- Computed: true,
- Description: `Optional, Desired photo orientation. The current supported orientations are landscape, portrait or square`,
- },
- "query": schema.StringAttribute{
- Computed: true,
- Description: `Optional, the search query, Example Ocean, Tigers, Pears, etc.`,
- },
- "size": schema.StringAttribute{
- Computed: true,
- Description: `Optional, Minimum photo size. The current supported sizes are large(24MP), medium(12MP) or small(4MP).`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pexels-api",
- ),
- },
- Description: `must be one of ["pexels-api"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_pexelsapi_data_source_sdk.go b/internal/provider/source_pexelsapi_data_source_sdk.go
old mode 100755
new mode 100644
index 43f0561b1..e32124f9e
--- a/internal/provider/source_pexelsapi_data_source_sdk.go
+++ b/internal/provider/source_pexelsapi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePexelsAPIDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_pexelsapi_resource.go b/internal/provider/source_pexelsapi_resource.go
old mode 100755
new mode 100644
index 7228ff335..96c3de86c
--- a/internal/provider/source_pexelsapi_resource.go
+++ b/internal/provider/source_pexelsapi_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourcePexelsAPIResource struct {
// SourcePexelsAPIResourceModel describes the resource data model.
type SourcePexelsAPIResourceModel struct {
Configuration SourcePexelsAPI `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +56,7 @@ func (r *SourcePexelsAPIResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API key is required to access pexels api, For getting your's goto https://www.pexels.com/api/documentation and create account for free.`,
},
"color": schema.StringAttribute{
@@ -78,24 +79,26 @@ func (r *SourcePexelsAPIResource) Schema(ctx context.Context, req resource.Schem
Optional: true,
Description: `Optional, Minimum photo size. The current supported sizes are large(24MP), medium(12MP) or small(4MP).`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pexels-api",
- ),
- },
- Description: `must be one of ["pexels-api"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -159,7 +162,7 @@ func (r *SourcePexelsAPIResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePexelsAPI(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -335,5 +338,5 @@ func (r *SourcePexelsAPIResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourcePexelsAPIResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_pexelsapi_resource_sdk.go b/internal/provider/source_pexelsapi_resource_sdk.go
old mode 100755
new mode 100644
index fddcdfbcf..20b4b0d71
--- a/internal/provider/source_pexelsapi_resource_sdk.go
+++ b/internal/provider/source_pexelsapi_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -34,7 +34,6 @@ func (r *SourcePexelsAPIResourceModel) ToCreateSDKType() *shared.SourcePexelsAPI
} else {
size = nil
}
- sourceType := shared.SourcePexelsAPIPexelsAPI(r.Configuration.SourceType.ValueString())
configuration := shared.SourcePexelsAPI{
APIKey: apiKey,
Color: color,
@@ -42,7 +41,12 @@ func (r *SourcePexelsAPIResourceModel) ToCreateSDKType() *shared.SourcePexelsAPI
Orientation: orientation,
Query: query,
Size: size,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -54,6 +58,7 @@ func (r *SourcePexelsAPIResourceModel) ToCreateSDKType() *shared.SourcePexelsAPI
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePexelsAPICreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_pinterest_data_source.go b/internal/provider/source_pinterest_data_source.go
old mode 100755
new mode 100644
index b6fbbdf2e..04b573bfa
--- a/internal/provider/source_pinterest_data_source.go
+++ b/internal/provider/source_pinterest_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourcePinterestDataSource struct {
// SourcePinterestDataSourceModel describes the data model.
type SourcePinterestDataSourceModel struct {
- Configuration SourcePinterest `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,138 +47,20 @@ func (r *SourcePinterestDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourcePinterest DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_pinterest_authorization_method_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The Access Token to make authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_pinterest_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to obtain new Access Token, when it's expired.`,
- },
- },
- },
- "source_pinterest_update_authorization_method_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The Access Token to make authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_pinterest_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to obtain new Access Token, when it's expired.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pinterest",
- ),
- },
- Description: `must be one of ["pinterest"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today).`,
- },
- "status": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `Entity statuses based off of campaigns, ad_groups, and ads. If you do not have a status set, it will be ignored completely.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_pinterest_data_source_sdk.go b/internal/provider/source_pinterest_data_source_sdk.go
old mode 100755
new mode 100644
index 0b6f868f9..a719efd90
--- a/internal/provider/source_pinterest_data_source_sdk.go
+++ b/internal/provider/source_pinterest_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePinterestDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_pinterest_resource.go b/internal/provider/source_pinterest_resource.go
old mode 100755
new mode 100644
index 9798839f1..cda5ea948
--- a/internal/provider/source_pinterest_resource.go
+++ b/internal/provider/source_pinterest_resource.go
@@ -3,18 +3,20 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
+ "github.com/hashicorp/terraform-plugin-framework-validators/int64validator"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +38,7 @@ type SourcePinterestResource struct {
// SourcePinterestResourceModel describes the resource data model.
type SourcePinterestResourceModel struct {
Configuration SourcePinterest `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,36 +61,9 @@ func (r *SourcePinterestResource) Schema(ctx context.Context, req resource.Schem
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_pinterest_authorization_method_access_token": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `The Access Token to make authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_pinterest_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Optional: true,
Description: `The Client ID of your OAuth application`,
@@ -98,74 +74,148 @@ func (r *SourcePinterestResource) Schema(ctx context.Context, req resource.Schem
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Refresh Token to obtain new Access Token, when it's expired.`,
},
},
},
- "source_pinterest_update_authorization_method_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `The Access Token to make authenticated requests.`,
+ },
+ Validators: []validator.Object{
+ validators.ExactlyOneChild(),
+ },
+ },
+ "custom_reports": schema.ListNestedAttribute{
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "attribution_types": schema.ListAttribute{
+ Optional: true,
+ ElementType: types.StringType,
+ Description: `List of types of attribution for the conversion report`,
+ },
+ "click_window_days": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["0", "1", "7", "14", "30", "60"]; Default: 30` + "\n" +
+ `Number of days to use as the conversion attribution window for a pin click action.`,
+ Validators: []validator.Int64{
+ int64validator.OneOf(
+ []int64{
+ 0,
+ 1,
+ 7,
+ 14,
+ 30,
+ 60,
+ }...,
+ ),
},
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
+ },
+ "columns": schema.ListAttribute{
+ Required: true,
+ ElementType: types.StringType,
+ Description: `A list of chosen columns`,
+ },
+ "conversion_report_time": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["TIME_OF_AD_ACTION", "TIME_OF_CONVERSION"]; Default: "TIME_OF_AD_ACTION"` + "\n" +
+ `The date by which the conversion metrics returned from this endpoint will be reported. There are two dates associated with a conversion event: the date that the user interacted with the ad, and the date that the user completed a conversion event..`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "TIME_OF_AD_ACTION",
+ "TIME_OF_CONVERSION",
+ ),
},
},
- },
- "source_pinterest_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
+ "engagement_window_days": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["0", "1", "7", "14", "30", "60"]; Default: [30]` + "\n" +
+ `Number of days to use as the conversion attribution window for an engagement action.`,
+ Validators: []validator.Int64{
+ int64validator.OneOf(
+ []int64{
+ 0,
+ 1,
+ 7,
+ 14,
+ 30,
+ 60,
+ }...,
+ ),
},
- "client_id": schema.StringAttribute{
- Optional: true,
- Description: `The Client ID of your OAuth application`,
+ },
+ "granularity": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["TOTAL", "DAY", "HOUR", "WEEK", "MONTH"]; Default: "TOTAL"` + "\n" +
+ `Chosen granularity for API`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "TOTAL",
+ "DAY",
+ "HOUR",
+ "WEEK",
+ "MONTH",
+ ),
},
- "client_secret": schema.StringAttribute{
- Optional: true,
- Description: `The Client Secret of your OAuth application.`,
+ },
+ "level": schema.StringAttribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["ADVERTISER", "ADVERTISER_TARGETING", "CAMPAIGN", "CAMPAIGN_TARGETING", "AD_GROUP", "AD_GROUP_TARGETING", "PIN_PROMOTION", "PIN_PROMOTION_TARGETING", "KEYWORD", "PRODUCT_GROUP", "PRODUCT_GROUP_TARGETING", "PRODUCT_ITEM"]; Default: "ADVERTISER"` + "\n" +
+ `Chosen level for API`,
+ Validators: []validator.String{
+ stringvalidator.OneOf(
+ "ADVERTISER",
+ "ADVERTISER_TARGETING",
+ "CAMPAIGN",
+ "CAMPAIGN_TARGETING",
+ "AD_GROUP",
+ "AD_GROUP_TARGETING",
+ "PIN_PROMOTION",
+ "PIN_PROMOTION_TARGETING",
+ "KEYWORD",
+ "PRODUCT_GROUP",
+ "PRODUCT_GROUP_TARGETING",
+ "PRODUCT_ITEM",
+ ),
},
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `Refresh Token to obtain new Access Token, when it's expired.`,
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: `The name value of report`,
+ },
+ "start_date": schema.StringAttribute{
+ Optional: true,
+ Description: `A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by report api (913 days from today).`,
+ Validators: []validator.String{
+ validators.IsValidDate(),
+ },
+ },
+ "view_window_days": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `must be one of ["0", "1", "7", "14", "30", "60"]; Default: [30]` + "\n" +
+ `Number of days to use as the conversion attribution window for a view action.`,
+ Validators: []validator.Int64{
+ int64validator.OneOf(
+ []int64{
+ 0,
+ 1,
+ 7,
+ 14,
+ 30,
+ 60,
+ }...,
+ ),
},
},
},
},
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pinterest",
- ),
- },
- Description: `must be one of ["pinterest"]`,
+ Description: `A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field.`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today).`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today).`,
},
"status": schema.ListAttribute{
Optional: true,
@@ -174,13 +224,24 @@ func (r *SourcePinterestResource) Schema(ctx context.Context, req resource.Schem
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -244,7 +305,7 @@ func (r *SourcePinterestResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePinterest(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -420,5 +481,5 @@ func (r *SourcePinterestResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourcePinterestResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_pinterest_resource_sdk.go b/internal/provider/source_pinterest_resource_sdk.go
old mode 100755
new mode 100644
index c1b2d7017..dbbfae729
--- a/internal/provider/source_pinterest_resource_sdk.go
+++ b/internal/provider/source_pinterest_resource_sdk.go
@@ -3,70 +3,130 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePinterestResourceModel) ToCreateSDKType() *shared.SourcePinterestCreateRequest {
var credentials *shared.SourcePinterestAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourcePinterestAuthorizationMethodOAuth20 *shared.SourcePinterestAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourcePinterestAuthorizationMethodOAuth20 != nil {
- authMethod := shared.SourcePinterestAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourcePinterestAuthorizationMethodOAuth20.AuthMethod.ValueString())
+ var sourcePinterestOAuth20 *shared.SourcePinterestOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
clientID := new(string)
- if !r.Configuration.Credentials.SourcePinterestAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourcePinterestAuthorizationMethodOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourcePinterestAuthorizationMethodOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourcePinterestAuthorizationMethodOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourcePinterestAuthorizationMethodOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourcePinterestAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- refreshToken := r.Configuration.Credentials.SourcePinterestAuthorizationMethodOAuth20.RefreshToken.ValueString()
- sourcePinterestAuthorizationMethodOAuth20 = &shared.SourcePinterestAuthorizationMethodOAuth20{
- AuthMethod: authMethod,
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ sourcePinterestOAuth20 = &shared.SourcePinterestOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourcePinterestAuthorizationMethodOAuth20 != nil {
+ if sourcePinterestOAuth20 != nil {
credentials = &shared.SourcePinterestAuthorizationMethod{
- SourcePinterestAuthorizationMethodOAuth20: sourcePinterestAuthorizationMethodOAuth20,
+ SourcePinterestOAuth20: sourcePinterestOAuth20,
}
}
- var sourcePinterestAuthorizationMethodAccessToken *shared.SourcePinterestAuthorizationMethodAccessToken
- if r.Configuration.Credentials.SourcePinterestAuthorizationMethodAccessToken != nil {
- accessToken := r.Configuration.Credentials.SourcePinterestAuthorizationMethodAccessToken.AccessToken.ValueString()
- authMethod1 := shared.SourcePinterestAuthorizationMethodAccessTokenAuthMethod(r.Configuration.Credentials.SourcePinterestAuthorizationMethodAccessToken.AuthMethod.ValueString())
- sourcePinterestAuthorizationMethodAccessToken = &shared.SourcePinterestAuthorizationMethodAccessToken{
- AccessToken: accessToken,
- AuthMethod: authMethod1,
- }
+ }
+ var customReports []shared.SourcePinterestReportConfig = nil
+ for _, customReportsItem := range r.Configuration.CustomReports {
+ var attributionTypes []shared.SourcePinterestValidEnums = nil
+ for _, attributionTypesItem := range customReportsItem.AttributionTypes {
+ attributionTypes = append(attributionTypes, shared.SourcePinterestValidEnums(attributionTypesItem.ValueString()))
}
- if sourcePinterestAuthorizationMethodAccessToken != nil {
- credentials = &shared.SourcePinterestAuthorizationMethod{
- SourcePinterestAuthorizationMethodAccessToken: sourcePinterestAuthorizationMethodAccessToken,
- }
+ clickWindowDays := new(shared.SourcePinterestClickWindowDays)
+ if !customReportsItem.ClickWindowDays.IsUnknown() && !customReportsItem.ClickWindowDays.IsNull() {
+ *clickWindowDays = shared.SourcePinterestClickWindowDays(customReportsItem.ClickWindowDays.ValueInt64())
+ } else {
+ clickWindowDays = nil
+ }
+ var columns []shared.SourcePinterestSchemasValidEnums = nil
+ for _, columnsItem := range customReportsItem.Columns {
+ columns = append(columns, shared.SourcePinterestSchemasValidEnums(columnsItem.ValueString()))
+ }
+ conversionReportTime := new(shared.SourcePinterestConversionReportTime)
+ if !customReportsItem.ConversionReportTime.IsUnknown() && !customReportsItem.ConversionReportTime.IsNull() {
+ *conversionReportTime = shared.SourcePinterestConversionReportTime(customReportsItem.ConversionReportTime.ValueString())
+ } else {
+ conversionReportTime = nil
+ }
+ engagementWindowDays := new(shared.SourcePinterestEngagementWindowDays)
+ if !customReportsItem.EngagementWindowDays.IsUnknown() && !customReportsItem.EngagementWindowDays.IsNull() {
+ *engagementWindowDays = shared.SourcePinterestEngagementWindowDays(customReportsItem.EngagementWindowDays.ValueInt64())
+ } else {
+ engagementWindowDays = nil
}
+ granularity := new(shared.SourcePinterestGranularity)
+ if !customReportsItem.Granularity.IsUnknown() && !customReportsItem.Granularity.IsNull() {
+ *granularity = shared.SourcePinterestGranularity(customReportsItem.Granularity.ValueString())
+ } else {
+ granularity = nil
+ }
+ level := new(shared.SourcePinterestLevel)
+ if !customReportsItem.Level.IsUnknown() && !customReportsItem.Level.IsNull() {
+ *level = shared.SourcePinterestLevel(customReportsItem.Level.ValueString())
+ } else {
+ level = nil
+ }
+ name := customReportsItem.Name.ValueString()
+ startDate := new(customTypes.Date)
+ if !customReportsItem.StartDate.IsUnknown() && !customReportsItem.StartDate.IsNull() {
+ startDate = customTypes.MustNewDateFromString(customReportsItem.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
+ viewWindowDays := new(shared.SourcePinterestViewWindowDays)
+ if !customReportsItem.ViewWindowDays.IsUnknown() && !customReportsItem.ViewWindowDays.IsNull() {
+ *viewWindowDays = shared.SourcePinterestViewWindowDays(customReportsItem.ViewWindowDays.ValueInt64())
+ } else {
+ viewWindowDays = nil
+ }
+ customReports = append(customReports, shared.SourcePinterestReportConfig{
+ AttributionTypes: attributionTypes,
+ ClickWindowDays: clickWindowDays,
+ Columns: columns,
+ ConversionReportTime: conversionReportTime,
+ EngagementWindowDays: engagementWindowDays,
+ Granularity: granularity,
+ Level: level,
+ Name: name,
+ StartDate: startDate,
+ ViewWindowDays: viewWindowDays,
+ })
+ }
+ startDate1 := new(customTypes.Date)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ startDate1 = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
+ } else {
+ startDate1 = nil
}
- sourceType := shared.SourcePinterestPinterest(r.Configuration.SourceType.ValueString())
- startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
var status []shared.SourcePinterestStatus = nil
for _, statusItem := range r.Configuration.Status {
status = append(status, shared.SourcePinterestStatus(statusItem.ValueString()))
}
configuration := shared.SourcePinterest{
- Credentials: credentials,
- SourceType: sourceType,
- StartDate: startDate,
- Status: status,
+ Credentials: credentials,
+ CustomReports: customReports,
+ StartDate: startDate1,
+ Status: status,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
- name := r.Name.ValueString()
+ name1 := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
*secretID = r.SecretID.ValueString()
@@ -76,7 +136,8 @@ func (r *SourcePinterestResourceModel) ToCreateSDKType() *shared.SourcePinterest
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePinterestCreateRequest{
Configuration: configuration,
- Name: name,
+ DefinitionID: definitionID,
+ Name: name1,
SecretID: secretID,
WorkspaceID: workspaceID,
}
@@ -91,64 +152,120 @@ func (r *SourcePinterestResourceModel) ToGetSDKType() *shared.SourcePinterestCre
func (r *SourcePinterestResourceModel) ToUpdateSDKType() *shared.SourcePinterestPutRequest {
var credentials *shared.SourcePinterestUpdateAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourcePinterestUpdateAuthorizationMethodOAuth20 *shared.SourcePinterestUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodOAuth20 != nil {
- authMethod := shared.SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodOAuth20.AuthMethod.ValueString())
+ var sourcePinterestUpdateOAuth20 *shared.SourcePinterestUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
clientID := new(string)
- if !r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- refreshToken := r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
- sourcePinterestUpdateAuthorizationMethodOAuth20 = &shared.SourcePinterestUpdateAuthorizationMethodOAuth20{
- AuthMethod: authMethod,
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ sourcePinterestUpdateOAuth20 = &shared.SourcePinterestUpdateOAuth20{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourcePinterestUpdateAuthorizationMethodOAuth20 != nil {
+ if sourcePinterestUpdateOAuth20 != nil {
credentials = &shared.SourcePinterestUpdateAuthorizationMethod{
- SourcePinterestUpdateAuthorizationMethodOAuth20: sourcePinterestUpdateAuthorizationMethodOAuth20,
+ SourcePinterestUpdateOAuth20: sourcePinterestUpdateOAuth20,
}
}
- var sourcePinterestUpdateAuthorizationMethodAccessToken *shared.SourcePinterestUpdateAuthorizationMethodAccessToken
- if r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodAccessToken != nil {
- accessToken := r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodAccessToken.AccessToken.ValueString()
- authMethod1 := shared.SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethod(r.Configuration.Credentials.SourcePinterestUpdateAuthorizationMethodAccessToken.AuthMethod.ValueString())
- sourcePinterestUpdateAuthorizationMethodAccessToken = &shared.SourcePinterestUpdateAuthorizationMethodAccessToken{
- AccessToken: accessToken,
- AuthMethod: authMethod1,
- }
+ }
+ var customReports []shared.ReportConfig = nil
+ for _, customReportsItem := range r.Configuration.CustomReports {
+ var attributionTypes []shared.SourcePinterestUpdateValidEnums = nil
+ for _, attributionTypesItem := range customReportsItem.AttributionTypes {
+ attributionTypes = append(attributionTypes, shared.SourcePinterestUpdateValidEnums(attributionTypesItem.ValueString()))
}
- if sourcePinterestUpdateAuthorizationMethodAccessToken != nil {
- credentials = &shared.SourcePinterestUpdateAuthorizationMethod{
- SourcePinterestUpdateAuthorizationMethodAccessToken: sourcePinterestUpdateAuthorizationMethodAccessToken,
- }
+ clickWindowDays := new(shared.ClickWindowDays)
+ if !customReportsItem.ClickWindowDays.IsUnknown() && !customReportsItem.ClickWindowDays.IsNull() {
+ *clickWindowDays = shared.ClickWindowDays(customReportsItem.ClickWindowDays.ValueInt64())
+ } else {
+ clickWindowDays = nil
+ }
+ var columns []shared.SourcePinterestUpdateSchemasValidEnums = nil
+ for _, columnsItem := range customReportsItem.Columns {
+ columns = append(columns, shared.SourcePinterestUpdateSchemasValidEnums(columnsItem.ValueString()))
+ }
+ conversionReportTime := new(shared.ConversionReportTime)
+ if !customReportsItem.ConversionReportTime.IsUnknown() && !customReportsItem.ConversionReportTime.IsNull() {
+ *conversionReportTime = shared.ConversionReportTime(customReportsItem.ConversionReportTime.ValueString())
+ } else {
+ conversionReportTime = nil
}
+ engagementWindowDays := new(shared.EngagementWindowDays)
+ if !customReportsItem.EngagementWindowDays.IsUnknown() && !customReportsItem.EngagementWindowDays.IsNull() {
+ *engagementWindowDays = shared.EngagementWindowDays(customReportsItem.EngagementWindowDays.ValueInt64())
+ } else {
+ engagementWindowDays = nil
+ }
+ granularity := new(shared.Granularity)
+ if !customReportsItem.Granularity.IsUnknown() && !customReportsItem.Granularity.IsNull() {
+ *granularity = shared.Granularity(customReportsItem.Granularity.ValueString())
+ } else {
+ granularity = nil
+ }
+ level := new(shared.SourcePinterestUpdateLevel)
+ if !customReportsItem.Level.IsUnknown() && !customReportsItem.Level.IsNull() {
+ *level = shared.SourcePinterestUpdateLevel(customReportsItem.Level.ValueString())
+ } else {
+ level = nil
+ }
+ name := customReportsItem.Name.ValueString()
+ startDate := new(customTypes.Date)
+ if !customReportsItem.StartDate.IsUnknown() && !customReportsItem.StartDate.IsNull() {
+ startDate = customTypes.MustNewDateFromString(customReportsItem.StartDate.ValueString())
+ } else {
+ startDate = nil
+ }
+ viewWindowDays := new(shared.ViewWindowDays)
+ if !customReportsItem.ViewWindowDays.IsUnknown() && !customReportsItem.ViewWindowDays.IsNull() {
+ *viewWindowDays = shared.ViewWindowDays(customReportsItem.ViewWindowDays.ValueInt64())
+ } else {
+ viewWindowDays = nil
+ }
+ customReports = append(customReports, shared.ReportConfig{
+ AttributionTypes: attributionTypes,
+ ClickWindowDays: clickWindowDays,
+ Columns: columns,
+ ConversionReportTime: conversionReportTime,
+ EngagementWindowDays: engagementWindowDays,
+ Granularity: granularity,
+ Level: level,
+ Name: name,
+ StartDate: startDate,
+ ViewWindowDays: viewWindowDays,
+ })
+ }
+ startDate1 := new(customTypes.Date)
+ if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
+ startDate1 = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
+ } else {
+ startDate1 = nil
}
- startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
- var status []shared.SourcePinterestUpdateStatus = nil
+ var status []shared.Status = nil
for _, statusItem := range r.Configuration.Status {
- status = append(status, shared.SourcePinterestUpdateStatus(statusItem.ValueString()))
+ status = append(status, shared.Status(statusItem.ValueString()))
}
configuration := shared.SourcePinterestUpdate{
- Credentials: credentials,
- StartDate: startDate,
- Status: status,
+ Credentials: credentials,
+ CustomReports: customReports,
+ StartDate: startDate1,
+ Status: status,
}
- name := r.Name.ValueString()
+ name1 := r.Name.ValueString()
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePinterestPutRequest{
Configuration: configuration,
- Name: name,
+ Name: name1,
WorkspaceID: workspaceID,
}
return &out
diff --git a/internal/provider/source_pipedrive_data_source.go b/internal/provider/source_pipedrive_data_source.go
old mode 100755
new mode 100644
index c784099d2..989c23ffc
--- a/internal/provider/source_pipedrive_data_source.go
+++ b/internal/provider/source_pipedrive_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourcePipedriveDataSource struct {
// SourcePipedriveDataSourceModel describes the data model.
type SourcePipedriveDataSourceModel struct {
- Configuration SourcePipedrive `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,55 +47,20 @@ func (r *SourcePipedriveDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourcePipedrive DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "authorization": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `The Pipedrive API Token.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- },
- },
- "replication_start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pipedrive",
- ),
- },
- Description: `must be one of ["pipedrive"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_pipedrive_data_source_sdk.go b/internal/provider/source_pipedrive_data_source_sdk.go
old mode 100755
new mode 100644
index 506667405..8e6d6ad23
--- a/internal/provider/source_pipedrive_data_source_sdk.go
+++ b/internal/provider/source_pipedrive_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePipedriveDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_pipedrive_resource.go b/internal/provider/source_pipedrive_resource.go
old mode 100755
new mode 100644
index e65771de4..22c03ffd8
--- a/internal/provider/source_pipedrive_resource.go
+++ b/internal/provider/source_pipedrive_resource.go
@@ -3,19 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -36,6 +34,7 @@ type SourcePipedriveResource struct {
// SourcePipedriveResourceModel describes the resource data model.
type SourcePipedriveResourceModel struct {
Configuration SourcePipedrive `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -55,49 +54,35 @@ func (r *SourcePipedriveResource) Schema(ctx context.Context, req resource.Schem
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "authorization": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Required: true,
- Description: `The Pipedrive API Token.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- },
+ "api_token": schema.StringAttribute{
+ Required: true,
+ Sensitive: true,
+ Description: `The Pipedrive API Token.`,
},
"replication_start_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
+ Required: true,
Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pipedrive",
- ),
- },
- Description: `must be one of ["pipedrive"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -161,7 +146,7 @@ func (r *SourcePipedriveResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePipedrive(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -337,5 +322,5 @@ func (r *SourcePipedriveResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourcePipedriveResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_pipedrive_resource_sdk.go b/internal/provider/source_pipedrive_resource_sdk.go
old mode 100755
new mode 100644
index 6f7391476..77fdfa6a8
--- a/internal/provider/source_pipedrive_resource_sdk.go
+++ b/internal/provider/source_pipedrive_resource_sdk.go
@@ -3,27 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
- "time"
)
func (r *SourcePipedriveResourceModel) ToCreateSDKType() *shared.SourcePipedriveCreateRequest {
- var authorization *shared.SourcePipedriveAPIKeyAuthentication
- if r.Configuration.Authorization != nil {
- apiToken := r.Configuration.Authorization.APIToken.ValueString()
- authType := shared.SourcePipedriveAPIKeyAuthenticationAuthType(r.Configuration.Authorization.AuthType.ValueString())
- authorization = &shared.SourcePipedriveAPIKeyAuthentication{
- APIToken: apiToken,
- AuthType: authType,
- }
- }
- replicationStartDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.ReplicationStartDate.ValueString())
- sourceType := shared.SourcePipedrivePipedrive(r.Configuration.SourceType.ValueString())
+ apiToken := r.Configuration.APIToken.ValueString()
+ replicationStartDate := r.Configuration.ReplicationStartDate.ValueString()
configuration := shared.SourcePipedrive{
- Authorization: authorization,
+ APIToken: apiToken,
ReplicationStartDate: replicationStartDate,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -35,6 +30,7 @@ func (r *SourcePipedriveResourceModel) ToCreateSDKType() *shared.SourcePipedrive
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePipedriveCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -48,18 +44,10 @@ func (r *SourcePipedriveResourceModel) ToGetSDKType() *shared.SourcePipedriveCre
}
func (r *SourcePipedriveResourceModel) ToUpdateSDKType() *shared.SourcePipedrivePutRequest {
- var authorization *shared.SourcePipedriveUpdateAPIKeyAuthentication
- if r.Configuration.Authorization != nil {
- apiToken := r.Configuration.Authorization.APIToken.ValueString()
- authType := shared.SourcePipedriveUpdateAPIKeyAuthenticationAuthType(r.Configuration.Authorization.AuthType.ValueString())
- authorization = &shared.SourcePipedriveUpdateAPIKeyAuthentication{
- APIToken: apiToken,
- AuthType: authType,
- }
- }
- replicationStartDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.ReplicationStartDate.ValueString())
+ apiToken := r.Configuration.APIToken.ValueString()
+ replicationStartDate := r.Configuration.ReplicationStartDate.ValueString()
configuration := shared.SourcePipedriveUpdate{
- Authorization: authorization,
+ APIToken: apiToken,
ReplicationStartDate: replicationStartDate,
}
name := r.Name.ValueString()
diff --git a/internal/provider/source_pocket_data_source.go b/internal/provider/source_pocket_data_source.go
old mode 100755
new mode 100644
index 36f5a9de8..7f6d6c991
--- a/internal/provider/source_pocket_data_source.go
+++ b/internal/provider/source_pocket_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourcePocketDataSource struct {
// SourcePocketDataSourceModel describes the data model.
type SourcePocketDataSourceModel struct {
- Configuration SourcePocket `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,106 +47,20 @@ func (r *SourcePocketDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourcePocket DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The user's Pocket access token.`,
- },
- "consumer_key": schema.StringAttribute{
- Computed: true,
- Description: `Your application's Consumer Key.`,
- },
- "content_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "article",
- "video",
- "image",
- ),
- },
- MarkdownDescription: `must be one of ["article", "video", "image"]` + "\n" +
- `Select the content type of the items to retrieve.`,
- },
- "detail_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "simple",
- "complete",
- ),
- },
- MarkdownDescription: `must be one of ["simple", "complete"]` + "\n" +
- `Select the granularity of the information about each item.`,
- },
- "domain": schema.StringAttribute{
- Computed: true,
- Description: `Only return items from a particular ` + "`" + `domain` + "`" + `.`,
- },
- "favorite": schema.BoolAttribute{
- Computed: true,
- Description: `Retrieve only favorited items.`,
- },
- "search": schema.StringAttribute{
- Computed: true,
- Description: `Only return items whose title or url contain the ` + "`" + `search` + "`" + ` string.`,
- },
- "since": schema.StringAttribute{
- Computed: true,
- Description: `Only return items modified since the given timestamp.`,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "newest",
- "oldest",
- "title",
- "site",
- ),
- },
- MarkdownDescription: `must be one of ["newest", "oldest", "title", "site"]` + "\n" +
- `Sort retrieved items by the given criteria.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pocket",
- ),
- },
- Description: `must be one of ["pocket"]`,
- },
- "state": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "unread",
- "archive",
- "all",
- ),
- },
- MarkdownDescription: `must be one of ["unread", "archive", "all"]` + "\n" +
- `Select the state of the items to retrieve.`,
- },
- "tag": schema.StringAttribute{
- Computed: true,
- Description: `Return only items tagged with this tag name. Use _untagged_ for retrieving only untagged items.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_pocket_data_source_sdk.go b/internal/provider/source_pocket_data_source_sdk.go
old mode 100755
new mode 100644
index cf49c4f0e..618ebd839
--- a/internal/provider/source_pocket_data_source_sdk.go
+++ b/internal/provider/source_pocket_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePocketDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_pocket_resource.go b/internal/provider/source_pocket_resource.go
old mode 100755
new mode 100644
index ce3905b33..cc0022923
--- a/internal/provider/source_pocket_resource.go
+++ b/internal/provider/source_pocket_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourcePocketResource struct {
// SourcePocketResourceModel describes the resource data model.
type SourcePocketResourceModel struct {
Configuration SourcePocket `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,14 +58,18 @@ func (r *SourcePocketResource) Schema(ctx context.Context, req resource.SchemaRe
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The user's Pocket access token.`,
},
"consumer_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your application's Consumer Key.`,
},
"content_type": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["article", "video", "image"]` + "\n" +
+ `Select the content type of the items to retrieve.`,
Validators: []validator.String{
stringvalidator.OneOf(
"article",
@@ -71,27 +77,26 @@ func (r *SourcePocketResource) Schema(ctx context.Context, req resource.SchemaRe
"image",
),
},
- MarkdownDescription: `must be one of ["article", "video", "image"]` + "\n" +
- `Select the content type of the items to retrieve.`,
},
"detail_type": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["simple", "complete"]` + "\n" +
+ `Select the granularity of the information about each item.`,
Validators: []validator.String{
stringvalidator.OneOf(
"simple",
"complete",
),
},
- MarkdownDescription: `must be one of ["simple", "complete"]` + "\n" +
- `Select the granularity of the information about each item.`,
},
"domain": schema.StringAttribute{
Optional: true,
Description: `Only return items from a particular ` + "`" + `domain` + "`" + `.`,
},
"favorite": schema.BoolAttribute{
- Optional: true,
- Description: `Retrieve only favorited items.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Retrieve only favorited items.`,
},
"search": schema.StringAttribute{
Optional: true,
@@ -103,6 +108,8 @@ func (r *SourcePocketResource) Schema(ctx context.Context, req resource.SchemaRe
},
"sort": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["newest", "oldest", "title", "site"]` + "\n" +
+ `Sort retrieved items by the given criteria.`,
Validators: []validator.String{
stringvalidator.OneOf(
"newest",
@@ -111,20 +118,11 @@ func (r *SourcePocketResource) Schema(ctx context.Context, req resource.SchemaRe
"site",
),
},
- MarkdownDescription: `must be one of ["newest", "oldest", "title", "site"]` + "\n" +
- `Sort retrieved items by the given criteria.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pocket",
- ),
- },
- Description: `must be one of ["pocket"]`,
},
"state": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["unread", "archive", "all"]` + "\n" +
+ `Select the state of the items to retrieve.`,
Validators: []validator.String{
stringvalidator.OneOf(
"unread",
@@ -132,8 +130,6 @@ func (r *SourcePocketResource) Schema(ctx context.Context, req resource.SchemaRe
"all",
),
},
- MarkdownDescription: `must be one of ["unread", "archive", "all"]` + "\n" +
- `Select the state of the items to retrieve.`,
},
"tag": schema.StringAttribute{
Optional: true,
@@ -141,13 +137,24 @@ func (r *SourcePocketResource) Schema(ctx context.Context, req resource.SchemaRe
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -211,7 +218,7 @@ func (r *SourcePocketResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePocket(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -387,5 +394,5 @@ func (r *SourcePocketResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourcePocketResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_pocket_resource_sdk.go b/internal/provider/source_pocket_resource_sdk.go
old mode 100755
new mode 100644
index 5a3977290..7b6694b39
--- a/internal/provider/source_pocket_resource_sdk.go
+++ b/internal/provider/source_pocket_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -52,7 +52,6 @@ func (r *SourcePocketResourceModel) ToCreateSDKType() *shared.SourcePocketCreate
} else {
sort = nil
}
- sourceType := shared.SourcePocketPocket(r.Configuration.SourceType.ValueString())
state := new(shared.SourcePocketState)
if !r.Configuration.State.IsUnknown() && !r.Configuration.State.IsNull() {
*state = shared.SourcePocketState(r.Configuration.State.ValueString())
@@ -75,10 +74,15 @@ func (r *SourcePocketResourceModel) ToCreateSDKType() *shared.SourcePocketCreate
Search: search,
Since: since,
Sort: sort,
- SourceType: sourceType,
State: state,
Tag: tag,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -89,6 +93,7 @@ func (r *SourcePocketResourceModel) ToCreateSDKType() *shared.SourcePocketCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePocketCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -104,15 +109,15 @@ func (r *SourcePocketResourceModel) ToGetSDKType() *shared.SourcePocketCreateReq
func (r *SourcePocketResourceModel) ToUpdateSDKType() *shared.SourcePocketPutRequest {
accessToken := r.Configuration.AccessToken.ValueString()
consumerKey := r.Configuration.ConsumerKey.ValueString()
- contentType := new(shared.SourcePocketUpdateContentType)
+ contentType := new(shared.ContentType)
if !r.Configuration.ContentType.IsUnknown() && !r.Configuration.ContentType.IsNull() {
- *contentType = shared.SourcePocketUpdateContentType(r.Configuration.ContentType.ValueString())
+ *contentType = shared.ContentType(r.Configuration.ContentType.ValueString())
} else {
contentType = nil
}
- detailType := new(shared.SourcePocketUpdateDetailType)
+ detailType := new(shared.DetailType)
if !r.Configuration.DetailType.IsUnknown() && !r.Configuration.DetailType.IsNull() {
- *detailType = shared.SourcePocketUpdateDetailType(r.Configuration.DetailType.ValueString())
+ *detailType = shared.DetailType(r.Configuration.DetailType.ValueString())
} else {
detailType = nil
}
@@ -146,9 +151,9 @@ func (r *SourcePocketResourceModel) ToUpdateSDKType() *shared.SourcePocketPutReq
} else {
sort = nil
}
- state := new(shared.SourcePocketUpdateState)
+ state := new(shared.State)
if !r.Configuration.State.IsUnknown() && !r.Configuration.State.IsNull() {
- *state = shared.SourcePocketUpdateState(r.Configuration.State.ValueString())
+ *state = shared.State(r.Configuration.State.ValueString())
} else {
state = nil
}
diff --git a/internal/provider/source_pokeapi_data_source.go b/internal/provider/source_pokeapi_data_source.go
old mode 100755
new mode 100644
index 3ed209593..4b02e0c3a
--- a/internal/provider/source_pokeapi_data_source.go
+++ b/internal/provider/source_pokeapi_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourcePokeapiDataSource struct {
// SourcePokeapiDataSourceModel describes the data model.
type SourcePokeapiDataSourceModel struct {
- Configuration SourcePokeapi `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourcePokeapiDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourcePokeapi DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "pokemon_name": schema.StringAttribute{
- Computed: true,
- Description: `Pokemon requested from the API.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pokeapi",
- ),
- },
- Description: `must be one of ["pokeapi"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_pokeapi_data_source_sdk.go b/internal/provider/source_pokeapi_data_source_sdk.go
old mode 100755
new mode 100644
index bc493814d..d0627b989
--- a/internal/provider/source_pokeapi_data_source_sdk.go
+++ b/internal/provider/source_pokeapi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePokeapiDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_pokeapi_resource.go b/internal/provider/source_pokeapi_resource.go
old mode 100755
new mode 100644
index 749227d6a..d3428dc77
--- a/internal/provider/source_pokeapi_resource.go
+++ b/internal/provider/source_pokeapi_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourcePokeapiResource struct {
// SourcePokeapiResourceModel describes the resource data model.
type SourcePokeapiResourceModel struct {
Configuration SourcePokeapi `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -55,27 +57,932 @@ func (r *SourcePokeapiResource) Schema(ctx context.Context, req resource.SchemaR
Required: true,
Attributes: map[string]schema.Attribute{
"pokemon_name": schema.StringAttribute{
- Required: true,
- Description: `Pokemon requested from the API.`,
- },
- "source_type": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["bulbasaur", "ivysaur", "venusaur", "charmander", "charmeleon", "charizard", "squirtle", "wartortle", "blastoise", "caterpie", "metapod", "butterfree", "weedle", "kakuna", "beedrill", "pidgey", "pidgeotto", "pidgeot", "rattata", "raticate", "spearow", "fearow", "ekans", "arbok", "pikachu", "raichu", "sandshrew", "sandslash", "nidoranf", "nidorina", "nidoqueen", "nidoranm", "nidorino", "nidoking", "clefairy", "clefable", "vulpix", "ninetales", "jigglypuff", "wigglytuff", "zubat", "golbat", "oddish", "gloom", "vileplume", "paras", "parasect", "venonat", "venomoth", "diglett", "dugtrio", "meowth", "persian", "psyduck", "golduck", "mankey", "primeape", "growlithe", "arcanine", "poliwag", "poliwhirl", "poliwrath", "abra", "kadabra", "alakazam", "machop", "machoke", "machamp", "bellsprout", "weepinbell", "victreebel", "tentacool", "tentacruel", "geodude", "graveler", "golem", "ponyta", "rapidash", "slowpoke", "slowbro", "magnemite", "magneton", "farfetchd", "doduo", "dodrio", "seel", "dewgong", "grimer", "muk", "shellder", "cloyster", "gastly", "haunter", "gengar", "onix", "drowzee", "hypno", "krabby", "kingler", "voltorb", "electrode", "exeggcute", "exeggutor", "cubone", "marowak", "hitmonlee", "hitmonchan", "lickitung", "koffing", "weezing", "rhyhorn", "rhydon", "chansey", "tangela", "kangaskhan", "horsea", "seadra", "goldeen", "seaking", "staryu", "starmie", "mrmime", "scyther", "jynx", "electabuzz", "magmar", "pinsir", "tauros", "magikarp", "gyarados", "lapras", "ditto", "eevee", "vaporeon", "jolteon", "flareon", "porygon", "omanyte", "omastar", "kabuto", "kabutops", "aerodactyl", "snorlax", "articuno", "zapdos", "moltres", "dratini", "dragonair", "dragonite", "mewtwo", "mew", "chikorita", "bayleef", "meganium", "cyndaquil", "quilava", "typhlosion", "totodile", "croconaw", "feraligatr", "sentret", "furret", "hoothoot", "noctowl", "ledyba", "ledian", "spinarak", "ariados", "crobat", "chinchou", "lanturn", "pichu", "cleffa", "igglybuff", "togepi", "togetic", "natu", "xatu", "mareep", "flaaffy", "ampharos", "bellossom", "marill", "azumarill", "sudowoodo", "politoed", "hoppip", "skiploom", "jumpluff", "aipom", "sunkern", "sunflora", "yanma", "wooper", "quagsire", "espeon", "umbreon", "murkrow", "slowking", "misdreavus", "unown", "wobbuffet", "girafarig", "pineco", "forretress", "dunsparce", "gligar", "steelix", "snubbull", "granbull", "qwilfish", "scizor", "shuckle", "heracross", "sneasel", "teddiursa", "ursaring", "slugma", "magcargo", "swinub", "piloswine", "corsola", "remoraid", "octillery", "delibird", "mantine", "skarmory", "houndour", "houndoom", "kingdra", "phanpy", "donphan", "porygon2", "stantler", "smeargle", "tyrogue", "hitmontop", "smoochum", "elekid", "magby", "miltank", "blissey", "raikou", "entei", "suicune", "larvitar", "pupitar", "tyranitar", "lugia", "ho-oh", "celebi", "treecko", "grovyle", "sceptile", "torchic", "combusken", "blaziken", "mudkip", "marshtomp", "swampert", "poochyena", "mightyena", "zigzagoon", "linoone", "wurmple", "silcoon", "beautifly", "cascoon", "dustox", "lotad", "lombre", "ludicolo", "seedot", "nuzleaf", "shiftry", "taillow", "swellow", "wingull", "pelipper", "ralts", "kirlia", "gardevoir", "surskit", "masquerain", "shroomish", "breloom", "slakoth", "vigoroth", "slaking", "nincada", "ninjask", "shedinja", "whismur", "loudred", "exploud", "makuhita", "hariyama", "azurill", "nosepass", "skitty", "delcatty", "sableye", "mawile", "aron", "lairon", "aggron", "meditite", "medicham", "electrike", "manectric", "plusle", "minun", "volbeat", "illumise", "roselia", "gulpin", "swalot", "carvanha", "sharpedo", "wailmer", "wailord", "numel", "camerupt", "torkoal", "spoink", "grumpig", "spinda", "trapinch", "vibrava", "flygon", "cacnea", "cacturne", "swablu", "altaria", "zangoose", "seviper", "lunatone", "solrock", "barboach", "whiscash", "corphish", "crawdaunt", "baltoy", "claydol", "lileep", "cradily", "anorith", "armaldo", "feebas", "milotic", "castform", "kecleon", "shuppet", "banette", "duskull", "dusclops", "tropius", "chimecho", "absol", "wynaut", "snorunt", "glalie", "spheal", "sealeo", "walrein", "clamperl", "huntail", "gorebyss", "relicanth", "luvdisc", "bagon", "shelgon", "salamence", "beldum", "metang", "metagross", "regirock", "regice", "registeel", "latias", "latios", "kyogre", "groudon", "rayquaza", "jirachi", "deoxys", "turtwig", "grotle", "torterra", "chimchar", "monferno", "infernape", "piplup", "prinplup", "empoleon", "starly", "staravia", "staraptor", "bidoof", "bibarel", "kricketot", "kricketune", "shinx", "luxio", "luxray", "budew", "roserade", "cranidos", "rampardos", "shieldon", "bastiodon", "burmy", "wormadam", "mothim", "combee", "vespiquen", "pachirisu", "buizel", "floatzel", "cherubi", "cherrim", "shellos", "gastrodon", "ambipom", "drifloon", "drifblim", "buneary", "lopunny", "mismagius", "honchkrow", "glameow", "purugly", "chingling", "stunky", "skuntank", "bronzor", "bronzong", "bonsly", "mimejr", "happiny", "chatot", "spiritomb", "gible", "gabite", "garchomp", "munchlax", "riolu", "lucario", "hippopotas", "hippowdon", "skorupi", "drapion", "croagunk", "toxicroak", "carnivine", "finneon", "lumineon", "mantyke", "snover", "abomasnow", "weavile", "magnezone", "lickilicky", "rhyperior", "tangrowth", "electivire", "magmortar", "togekiss", "yanmega", "leafeon", "glaceon", "gliscor", "mamoswine", "porygon-z", "gallade", "probopass", "dusknoir", "froslass", "rotom", "uxie", "mesprit", "azelf", "dialga", "palkia", "heatran", "regigigas", "giratina", "cresselia", "phione", "manaphy", "darkrai", "shaymin", "arceus", "victini", "snivy", "servine", "serperior", "tepig", "pignite", "emboar", "oshawott", "dewott", "samurott", "patrat", "watchog", "lillipup", "herdier", "stoutland", "purrloin", "liepard", "pansage", "simisage", "pansear", "simisear", "panpour", "simipour", "munna", "musharna", "pidove", "tranquill", "unfezant", "blitzle", "zebstrika", "roggenrola", "boldore", "gigalith", "woobat", "swoobat", "drilbur", "excadrill", "audino", "timburr", "gurdurr", "conkeldurr", "tympole", "palpitoad", "seismitoad", "throh", "sawk", "sewaddle", "swadloon", "leavanny", "venipede", "whirlipede", "scolipede", "cottonee", "whimsicott", "petilil", "lilligant", "basculin", "sandile", "krokorok", "krookodile", "darumaka", "darmanitan", "maractus", "dwebble", "crustle", "scraggy", "scrafty", "sigilyph", "yamask", "cofagrigus", "tirtouga", "carracosta", "archen", "archeops", "trubbish", "garbodor", "zorua", "zoroark", "minccino", "cinccino", "gothita", "gothorita", "gothitelle", "solosis", "duosion", "reuniclus", "ducklett", "swanna", "vanillite", "vanillish", "vanilluxe", "deerling", "sawsbuck", "emolga", "karrablast", "escavalier", "foongus", "amoonguss", "frillish", "jellicent", "alomomola", "joltik", "galvantula", "ferroseed", "ferrothorn", "klink", "klang", "klinklang", "tynamo", "eelektrik", "eelektross", "elgyem", "beheeyem", "litwick", "lampent", "chandelure", "axew", "fraxure", "haxorus", "cubchoo", "beartic", "cryogonal", "shelmet", "accelgor", "stunfisk", "mienfoo", "mienshao", "druddigon", "golett", "golurk", "pawniard", "bisharp", "bouffalant", "rufflet", "braviary", "vullaby", "mandibuzz", "heatmor", "durant", "deino", "zweilous", "hydreigon", "larvesta", "volcarona", "cobalion", "terrakion", "virizion", "tornadus", "thundurus", "reshiram", "zekrom", "landorus", "kyurem", "keldeo", "meloetta", "genesect", "chespin", "quilladin", "chesnaught", "fennekin", "braixen", "delphox", "froakie", "frogadier", "greninja", "bunnelby", "diggersby", "fletchling", "fletchinder", "talonflame", "scatterbug", "spewpa", "vivillon", "litleo", "pyroar", "flabebe", "floette", "florges", "skiddo", "gogoat", "pancham", "pangoro", "furfrou", "espurr", "meowstic", "honedge", "doublade", "aegislash", "spritzee", "aromatisse", "swirlix", "slurpuff", "inkay", "malamar", "binacle", "barbaracle", "skrelp", "dragalge", "clauncher", "clawitzer", "helioptile", "heliolisk", "tyrunt", "tyrantrum", "amaura", "aurorus", "sylveon", "hawlucha", "dedenne", "carbink", "goomy", "sliggoo", "goodra", "klefki", "phantump", "trevenant", "pumpkaboo", "gourgeist", "bergmite", "avalugg", "noibat", "noivern", "xerneas", "yveltal", "zygarde", "diancie", "hoopa", "volcanion", "rowlet", "dartrix", "decidueye", "litten", "torracat", "incineroar", "popplio", "brionne", "primarina", "pikipek", "trumbeak", "toucannon", "yungoos", "gumshoos", "grubbin", "charjabug", "vikavolt", "crabrawler", "crabominable", "oricorio", "cutiefly", "ribombee", "rockruff", "lycanroc", "wishiwashi", "mareanie", "toxapex", "mudbray", "mudsdale", "dewpider", "araquanid", "fomantis", "lurantis", "morelull", "shiinotic", "salandit", "salazzle", "stufful", "bewear", "bounsweet", "steenee", "tsareena", "comfey", "oranguru", "passimian", "wimpod", "golisopod", "sandygast", "palossand", "pyukumuku", "typenull", "silvally", "minior", "komala", "turtonator", "togedemaru", "mimikyu", "bruxish", "drampa", "dhelmise", "jangmo-o", "hakamo-o", "kommo-o", "tapukoko", "tapulele", "tapubulu", "tapufini", "cosmog", "cosmoem", "solgaleo", "lunala", "nihilego", "buzzwole", "pheromosa", "xurkitree", "celesteela", "kartana", "guzzlord", "necrozma", "magearna", "marshadow", "poipole", "naganadel", "stakataka", "blacephalon", "zeraora", "meltan", "melmetal", "grookey", "thwackey", "rillaboom", "scorbunny", "raboot", "cinderace", "sobble", "drizzile", "inteleon", "skwovet", "greedent", "rookidee", "corvisquire", "corviknight", "blipbug", "dottler", "orbeetle", "nickit", "thievul", "gossifleur", "eldegoss", "wooloo", "dubwool", "chewtle", "drednaw", "yamper", "boltund", "rolycoly", "carkol", "coalossal", "applin", "flapple", "appletun", "silicobra", "sandaconda", "cramorant", "arrokuda", "barraskewda", "toxel", "toxtricity", "sizzlipede", "centiskorch", "clobbopus", "grapploct", "sinistea", "polteageist", "hatenna", "hattrem", "hatterene", "impidimp", "morgrem", "grimmsnarl", "obstagoon", "perrserker", "cursola", "sirfetchd", "mrrime", "runerigus", "milcery", "alcremie", "falinks", "pincurchin", "snom", "frosmoth", "stonjourner", "eiscue", "indeedee", "morpeko", "cufant", "copperajah", "dracozolt", "arctozolt", "dracovish", "arctovish", "duraludon", "dreepy", "drakloak", "dragapult", "zacian", "zamazenta", "eternatus", "kubfu", "urshifu", "zarude", "regieleki", "regidrago", "glastrier", "spectrier", "calyrex"]` + "\n" +
+ `Pokemon requested from the API.`,
Validators: []validator.String{
stringvalidator.OneOf(
- "pokeapi",
+ "bulbasaur",
+ "ivysaur",
+ "venusaur",
+ "charmander",
+ "charmeleon",
+ "charizard",
+ "squirtle",
+ "wartortle",
+ "blastoise",
+ "caterpie",
+ "metapod",
+ "butterfree",
+ "weedle",
+ "kakuna",
+ "beedrill",
+ "pidgey",
+ "pidgeotto",
+ "pidgeot",
+ "rattata",
+ "raticate",
+ "spearow",
+ "fearow",
+ "ekans",
+ "arbok",
+ "pikachu",
+ "raichu",
+ "sandshrew",
+ "sandslash",
+ "nidoranf",
+ "nidorina",
+ "nidoqueen",
+ "nidoranm",
+ "nidorino",
+ "nidoking",
+ "clefairy",
+ "clefable",
+ "vulpix",
+ "ninetales",
+ "jigglypuff",
+ "wigglytuff",
+ "zubat",
+ "golbat",
+ "oddish",
+ "gloom",
+ "vileplume",
+ "paras",
+ "parasect",
+ "venonat",
+ "venomoth",
+ "diglett",
+ "dugtrio",
+ "meowth",
+ "persian",
+ "psyduck",
+ "golduck",
+ "mankey",
+ "primeape",
+ "growlithe",
+ "arcanine",
+ "poliwag",
+ "poliwhirl",
+ "poliwrath",
+ "abra",
+ "kadabra",
+ "alakazam",
+ "machop",
+ "machoke",
+ "machamp",
+ "bellsprout",
+ "weepinbell",
+ "victreebel",
+ "tentacool",
+ "tentacruel",
+ "geodude",
+ "graveler",
+ "golem",
+ "ponyta",
+ "rapidash",
+ "slowpoke",
+ "slowbro",
+ "magnemite",
+ "magneton",
+ "farfetchd",
+ "doduo",
+ "dodrio",
+ "seel",
+ "dewgong",
+ "grimer",
+ "muk",
+ "shellder",
+ "cloyster",
+ "gastly",
+ "haunter",
+ "gengar",
+ "onix",
+ "drowzee",
+ "hypno",
+ "krabby",
+ "kingler",
+ "voltorb",
+ "electrode",
+ "exeggcute",
+ "exeggutor",
+ "cubone",
+ "marowak",
+ "hitmonlee",
+ "hitmonchan",
+ "lickitung",
+ "koffing",
+ "weezing",
+ "rhyhorn",
+ "rhydon",
+ "chansey",
+ "tangela",
+ "kangaskhan",
+ "horsea",
+ "seadra",
+ "goldeen",
+ "seaking",
+ "staryu",
+ "starmie",
+ "mrmime",
+ "scyther",
+ "jynx",
+ "electabuzz",
+ "magmar",
+ "pinsir",
+ "tauros",
+ "magikarp",
+ "gyarados",
+ "lapras",
+ "ditto",
+ "eevee",
+ "vaporeon",
+ "jolteon",
+ "flareon",
+ "porygon",
+ "omanyte",
+ "omastar",
+ "kabuto",
+ "kabutops",
+ "aerodactyl",
+ "snorlax",
+ "articuno",
+ "zapdos",
+ "moltres",
+ "dratini",
+ "dragonair",
+ "dragonite",
+ "mewtwo",
+ "mew",
+ "chikorita",
+ "bayleef",
+ "meganium",
+ "cyndaquil",
+ "quilava",
+ "typhlosion",
+ "totodile",
+ "croconaw",
+ "feraligatr",
+ "sentret",
+ "furret",
+ "hoothoot",
+ "noctowl",
+ "ledyba",
+ "ledian",
+ "spinarak",
+ "ariados",
+ "crobat",
+ "chinchou",
+ "lanturn",
+ "pichu",
+ "cleffa",
+ "igglybuff",
+ "togepi",
+ "togetic",
+ "natu",
+ "xatu",
+ "mareep",
+ "flaaffy",
+ "ampharos",
+ "bellossom",
+ "marill",
+ "azumarill",
+ "sudowoodo",
+ "politoed",
+ "hoppip",
+ "skiploom",
+ "jumpluff",
+ "aipom",
+ "sunkern",
+ "sunflora",
+ "yanma",
+ "wooper",
+ "quagsire",
+ "espeon",
+ "umbreon",
+ "murkrow",
+ "slowking",
+ "misdreavus",
+ "unown",
+ "wobbuffet",
+ "girafarig",
+ "pineco",
+ "forretress",
+ "dunsparce",
+ "gligar",
+ "steelix",
+ "snubbull",
+ "granbull",
+ "qwilfish",
+ "scizor",
+ "shuckle",
+ "heracross",
+ "sneasel",
+ "teddiursa",
+ "ursaring",
+ "slugma",
+ "magcargo",
+ "swinub",
+ "piloswine",
+ "corsola",
+ "remoraid",
+ "octillery",
+ "delibird",
+ "mantine",
+ "skarmory",
+ "houndour",
+ "houndoom",
+ "kingdra",
+ "phanpy",
+ "donphan",
+ "porygon2",
+ "stantler",
+ "smeargle",
+ "tyrogue",
+ "hitmontop",
+ "smoochum",
+ "elekid",
+ "magby",
+ "miltank",
+ "blissey",
+ "raikou",
+ "entei",
+ "suicune",
+ "larvitar",
+ "pupitar",
+ "tyranitar",
+ "lugia",
+ "ho-oh",
+ "celebi",
+ "treecko",
+ "grovyle",
+ "sceptile",
+ "torchic",
+ "combusken",
+ "blaziken",
+ "mudkip",
+ "marshtomp",
+ "swampert",
+ "poochyena",
+ "mightyena",
+ "zigzagoon",
+ "linoone",
+ "wurmple",
+ "silcoon",
+ "beautifly",
+ "cascoon",
+ "dustox",
+ "lotad",
+ "lombre",
+ "ludicolo",
+ "seedot",
+ "nuzleaf",
+ "shiftry",
+ "taillow",
+ "swellow",
+ "wingull",
+ "pelipper",
+ "ralts",
+ "kirlia",
+ "gardevoir",
+ "surskit",
+ "masquerain",
+ "shroomish",
+ "breloom",
+ "slakoth",
+ "vigoroth",
+ "slaking",
+ "nincada",
+ "ninjask",
+ "shedinja",
+ "whismur",
+ "loudred",
+ "exploud",
+ "makuhita",
+ "hariyama",
+ "azurill",
+ "nosepass",
+ "skitty",
+ "delcatty",
+ "sableye",
+ "mawile",
+ "aron",
+ "lairon",
+ "aggron",
+ "meditite",
+ "medicham",
+ "electrike",
+ "manectric",
+ "plusle",
+ "minun",
+ "volbeat",
+ "illumise",
+ "roselia",
+ "gulpin",
+ "swalot",
+ "carvanha",
+ "sharpedo",
+ "wailmer",
+ "wailord",
+ "numel",
+ "camerupt",
+ "torkoal",
+ "spoink",
+ "grumpig",
+ "spinda",
+ "trapinch",
+ "vibrava",
+ "flygon",
+ "cacnea",
+ "cacturne",
+ "swablu",
+ "altaria",
+ "zangoose",
+ "seviper",
+ "lunatone",
+ "solrock",
+ "barboach",
+ "whiscash",
+ "corphish",
+ "crawdaunt",
+ "baltoy",
+ "claydol",
+ "lileep",
+ "cradily",
+ "anorith",
+ "armaldo",
+ "feebas",
+ "milotic",
+ "castform",
+ "kecleon",
+ "shuppet",
+ "banette",
+ "duskull",
+ "dusclops",
+ "tropius",
+ "chimecho",
+ "absol",
+ "wynaut",
+ "snorunt",
+ "glalie",
+ "spheal",
+ "sealeo",
+ "walrein",
+ "clamperl",
+ "huntail",
+ "gorebyss",
+ "relicanth",
+ "luvdisc",
+ "bagon",
+ "shelgon",
+ "salamence",
+ "beldum",
+ "metang",
+ "metagross",
+ "regirock",
+ "regice",
+ "registeel",
+ "latias",
+ "latios",
+ "kyogre",
+ "groudon",
+ "rayquaza",
+ "jirachi",
+ "deoxys",
+ "turtwig",
+ "grotle",
+ "torterra",
+ "chimchar",
+ "monferno",
+ "infernape",
+ "piplup",
+ "prinplup",
+ "empoleon",
+ "starly",
+ "staravia",
+ "staraptor",
+ "bidoof",
+ "bibarel",
+ "kricketot",
+ "kricketune",
+ "shinx",
+ "luxio",
+ "luxray",
+ "budew",
+ "roserade",
+ "cranidos",
+ "rampardos",
+ "shieldon",
+ "bastiodon",
+ "burmy",
+ "wormadam",
+ "mothim",
+ "combee",
+ "vespiquen",
+ "pachirisu",
+ "buizel",
+ "floatzel",
+ "cherubi",
+ "cherrim",
+ "shellos",
+ "gastrodon",
+ "ambipom",
+ "drifloon",
+ "drifblim",
+ "buneary",
+ "lopunny",
+ "mismagius",
+ "honchkrow",
+ "glameow",
+ "purugly",
+ "chingling",
+ "stunky",
+ "skuntank",
+ "bronzor",
+ "bronzong",
+ "bonsly",
+ "mimejr",
+ "happiny",
+ "chatot",
+ "spiritomb",
+ "gible",
+ "gabite",
+ "garchomp",
+ "munchlax",
+ "riolu",
+ "lucario",
+ "hippopotas",
+ "hippowdon",
+ "skorupi",
+ "drapion",
+ "croagunk",
+ "toxicroak",
+ "carnivine",
+ "finneon",
+ "lumineon",
+ "mantyke",
+ "snover",
+ "abomasnow",
+ "weavile",
+ "magnezone",
+ "lickilicky",
+ "rhyperior",
+ "tangrowth",
+ "electivire",
+ "magmortar",
+ "togekiss",
+ "yanmega",
+ "leafeon",
+ "glaceon",
+ "gliscor",
+ "mamoswine",
+ "porygon-z",
+ "gallade",
+ "probopass",
+ "dusknoir",
+ "froslass",
+ "rotom",
+ "uxie",
+ "mesprit",
+ "azelf",
+ "dialga",
+ "palkia",
+ "heatran",
+ "regigigas",
+ "giratina",
+ "cresselia",
+ "phione",
+ "manaphy",
+ "darkrai",
+ "shaymin",
+ "arceus",
+ "victini",
+ "snivy",
+ "servine",
+ "serperior",
+ "tepig",
+ "pignite",
+ "emboar",
+ "oshawott",
+ "dewott",
+ "samurott",
+ "patrat",
+ "watchog",
+ "lillipup",
+ "herdier",
+ "stoutland",
+ "purrloin",
+ "liepard",
+ "pansage",
+ "simisage",
+ "pansear",
+ "simisear",
+ "panpour",
+ "simipour",
+ "munna",
+ "musharna",
+ "pidove",
+ "tranquill",
+ "unfezant",
+ "blitzle",
+ "zebstrika",
+ "roggenrola",
+ "boldore",
+ "gigalith",
+ "woobat",
+ "swoobat",
+ "drilbur",
+ "excadrill",
+ "audino",
+ "timburr",
+ "gurdurr",
+ "conkeldurr",
+ "tympole",
+ "palpitoad",
+ "seismitoad",
+ "throh",
+ "sawk",
+ "sewaddle",
+ "swadloon",
+ "leavanny",
+ "venipede",
+ "whirlipede",
+ "scolipede",
+ "cottonee",
+ "whimsicott",
+ "petilil",
+ "lilligant",
+ "basculin",
+ "sandile",
+ "krokorok",
+ "krookodile",
+ "darumaka",
+ "darmanitan",
+ "maractus",
+ "dwebble",
+ "crustle",
+ "scraggy",
+ "scrafty",
+ "sigilyph",
+ "yamask",
+ "cofagrigus",
+ "tirtouga",
+ "carracosta",
+ "archen",
+ "archeops",
+ "trubbish",
+ "garbodor",
+ "zorua",
+ "zoroark",
+ "minccino",
+ "cinccino",
+ "gothita",
+ "gothorita",
+ "gothitelle",
+ "solosis",
+ "duosion",
+ "reuniclus",
+ "ducklett",
+ "swanna",
+ "vanillite",
+ "vanillish",
+ "vanilluxe",
+ "deerling",
+ "sawsbuck",
+ "emolga",
+ "karrablast",
+ "escavalier",
+ "foongus",
+ "amoonguss",
+ "frillish",
+ "jellicent",
+ "alomomola",
+ "joltik",
+ "galvantula",
+ "ferroseed",
+ "ferrothorn",
+ "klink",
+ "klang",
+ "klinklang",
+ "tynamo",
+ "eelektrik",
+ "eelektross",
+ "elgyem",
+ "beheeyem",
+ "litwick",
+ "lampent",
+ "chandelure",
+ "axew",
+ "fraxure",
+ "haxorus",
+ "cubchoo",
+ "beartic",
+ "cryogonal",
+ "shelmet",
+ "accelgor",
+ "stunfisk",
+ "mienfoo",
+ "mienshao",
+ "druddigon",
+ "golett",
+ "golurk",
+ "pawniard",
+ "bisharp",
+ "bouffalant",
+ "rufflet",
+ "braviary",
+ "vullaby",
+ "mandibuzz",
+ "heatmor",
+ "durant",
+ "deino",
+ "zweilous",
+ "hydreigon",
+ "larvesta",
+ "volcarona",
+ "cobalion",
+ "terrakion",
+ "virizion",
+ "tornadus",
+ "thundurus",
+ "reshiram",
+ "zekrom",
+ "landorus",
+ "kyurem",
+ "keldeo",
+ "meloetta",
+ "genesect",
+ "chespin",
+ "quilladin",
+ "chesnaught",
+ "fennekin",
+ "braixen",
+ "delphox",
+ "froakie",
+ "frogadier",
+ "greninja",
+ "bunnelby",
+ "diggersby",
+ "fletchling",
+ "fletchinder",
+ "talonflame",
+ "scatterbug",
+ "spewpa",
+ "vivillon",
+ "litleo",
+ "pyroar",
+ "flabebe",
+ "floette",
+ "florges",
+ "skiddo",
+ "gogoat",
+ "pancham",
+ "pangoro",
+ "furfrou",
+ "espurr",
+ "meowstic",
+ "honedge",
+ "doublade",
+ "aegislash",
+ "spritzee",
+ "aromatisse",
+ "swirlix",
+ "slurpuff",
+ "inkay",
+ "malamar",
+ "binacle",
+ "barbaracle",
+ "skrelp",
+ "dragalge",
+ "clauncher",
+ "clawitzer",
+ "helioptile",
+ "heliolisk",
+ "tyrunt",
+ "tyrantrum",
+ "amaura",
+ "aurorus",
+ "sylveon",
+ "hawlucha",
+ "dedenne",
+ "carbink",
+ "goomy",
+ "sliggoo",
+ "goodra",
+ "klefki",
+ "phantump",
+ "trevenant",
+ "pumpkaboo",
+ "gourgeist",
+ "bergmite",
+ "avalugg",
+ "noibat",
+ "noivern",
+ "xerneas",
+ "yveltal",
+ "zygarde",
+ "diancie",
+ "hoopa",
+ "volcanion",
+ "rowlet",
+ "dartrix",
+ "decidueye",
+ "litten",
+ "torracat",
+ "incineroar",
+ "popplio",
+ "brionne",
+ "primarina",
+ "pikipek",
+ "trumbeak",
+ "toucannon",
+ "yungoos",
+ "gumshoos",
+ "grubbin",
+ "charjabug",
+ "vikavolt",
+ "crabrawler",
+ "crabominable",
+ "oricorio",
+ "cutiefly",
+ "ribombee",
+ "rockruff",
+ "lycanroc",
+ "wishiwashi",
+ "mareanie",
+ "toxapex",
+ "mudbray",
+ "mudsdale",
+ "dewpider",
+ "araquanid",
+ "fomantis",
+ "lurantis",
+ "morelull",
+ "shiinotic",
+ "salandit",
+ "salazzle",
+ "stufful",
+ "bewear",
+ "bounsweet",
+ "steenee",
+ "tsareena",
+ "comfey",
+ "oranguru",
+ "passimian",
+ "wimpod",
+ "golisopod",
+ "sandygast",
+ "palossand",
+ "pyukumuku",
+ "typenull",
+ "silvally",
+ "minior",
+ "komala",
+ "turtonator",
+ "togedemaru",
+ "mimikyu",
+ "bruxish",
+ "drampa",
+ "dhelmise",
+ "jangmo-o",
+ "hakamo-o",
+ "kommo-o",
+ "tapukoko",
+ "tapulele",
+ "tapubulu",
+ "tapufini",
+ "cosmog",
+ "cosmoem",
+ "solgaleo",
+ "lunala",
+ "nihilego",
+ "buzzwole",
+ "pheromosa",
+ "xurkitree",
+ "celesteela",
+ "kartana",
+ "guzzlord",
+ "necrozma",
+ "magearna",
+ "marshadow",
+ "poipole",
+ "naganadel",
+ "stakataka",
+ "blacephalon",
+ "zeraora",
+ "meltan",
+ "melmetal",
+ "grookey",
+ "thwackey",
+ "rillaboom",
+ "scorbunny",
+ "raboot",
+ "cinderace",
+ "sobble",
+ "drizzile",
+ "inteleon",
+ "skwovet",
+ "greedent",
+ "rookidee",
+ "corvisquire",
+ "corviknight",
+ "blipbug",
+ "dottler",
+ "orbeetle",
+ "nickit",
+ "thievul",
+ "gossifleur",
+ "eldegoss",
+ "wooloo",
+ "dubwool",
+ "chewtle",
+ "drednaw",
+ "yamper",
+ "boltund",
+ "rolycoly",
+ "carkol",
+ "coalossal",
+ "applin",
+ "flapple",
+ "appletun",
+ "silicobra",
+ "sandaconda",
+ "cramorant",
+ "arrokuda",
+ "barraskewda",
+ "toxel",
+ "toxtricity",
+ "sizzlipede",
+ "centiskorch",
+ "clobbopus",
+ "grapploct",
+ "sinistea",
+ "polteageist",
+ "hatenna",
+ "hattrem",
+ "hatterene",
+ "impidimp",
+ "morgrem",
+ "grimmsnarl",
+ "obstagoon",
+ "perrserker",
+ "cursola",
+ "sirfetchd",
+ "mrrime",
+ "runerigus",
+ "milcery",
+ "alcremie",
+ "falinks",
+ "pincurchin",
+ "snom",
+ "frosmoth",
+ "stonjourner",
+ "eiscue",
+ "indeedee",
+ "morpeko",
+ "cufant",
+ "copperajah",
+ "dracozolt",
+ "arctozolt",
+ "dracovish",
+ "arctovish",
+ "duraludon",
+ "dreepy",
+ "drakloak",
+ "dragapult",
+ "zacian",
+ "zamazenta",
+ "eternatus",
+ "kubfu",
+ "urshifu",
+ "zarude",
+ "regieleki",
+ "regidrago",
+ "glastrier",
+ "spectrier",
+ "calyrex",
),
},
- Description: `must be one of ["pokeapi"]`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +1046,7 @@ func (r *SourcePokeapiResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePokeapi(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +1222,5 @@ func (r *SourcePokeapiResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourcePokeapiResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_pokeapi_resource_sdk.go b/internal/provider/source_pokeapi_resource_sdk.go
old mode 100755
new mode 100644
index e7fa08720..2cf84644a
--- a/internal/provider/source_pokeapi_resource_sdk.go
+++ b/internal/provider/source_pokeapi_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePokeapiResourceModel) ToCreateSDKType() *shared.SourcePokeapiCreateRequest {
- pokemonName := r.Configuration.PokemonName.ValueString()
- sourceType := shared.SourcePokeapiPokeapi(r.Configuration.SourceType.ValueString())
+ pokemonName := shared.SourcePokeapiPokemonName(r.Configuration.PokemonName.ValueString())
configuration := shared.SourcePokeapi{
PokemonName: pokemonName,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourcePokeapiResourceModel) ToCreateSDKType() *shared.SourcePokeapiCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePokeapiCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -37,7 +42,7 @@ func (r *SourcePokeapiResourceModel) ToGetSDKType() *shared.SourcePokeapiCreateR
}
func (r *SourcePokeapiResourceModel) ToUpdateSDKType() *shared.SourcePokeapiPutRequest {
- pokemonName := r.Configuration.PokemonName.ValueString()
+ pokemonName := shared.PokemonName(r.Configuration.PokemonName.ValueString())
configuration := shared.SourcePokeapiUpdate{
PokemonName: pokemonName,
}
diff --git a/internal/provider/source_polygonstockapi_data_source.go b/internal/provider/source_polygonstockapi_data_source.go
old mode 100755
new mode 100644
index 70663cb3b..ae563887a
--- a/internal/provider/source_polygonstockapi_data_source.go
+++ b/internal/provider/source_polygonstockapi_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourcePolygonStockAPIDataSource struct {
// SourcePolygonStockAPIDataSourceModel describes the data model.
type SourcePolygonStockAPIDataSourceModel struct {
- Configuration SourcePolygonStockAPI `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,72 +47,20 @@ func (r *SourcePolygonStockAPIDataSource) Schema(ctx context.Context, req dataso
MarkdownDescription: "SourcePolygonStockAPI DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "adjusted": schema.StringAttribute{
- Computed: true,
- Description: `Determines whether or not the results are adjusted for splits. By default, results are adjusted and set to true. Set this to false to get results that are NOT adjusted for splits.`,
- },
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Your API ACCESS Key`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The target date for the aggregate window.`,
- },
- "limit": schema.Int64Attribute{
- Computed: true,
- Description: `The target date for the aggregate window.`,
- },
- "multiplier": schema.Int64Attribute{
- Computed: true,
- Description: `The size of the timespan multiplier.`,
- },
- "sort": schema.StringAttribute{
- Computed: true,
- Description: `Sort the results by timestamp. asc will return results in ascending order (oldest at the top), desc will return results in descending order (newest at the top).`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "polygon-stock-api",
- ),
- },
- Description: `must be one of ["polygon-stock-api"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The beginning date for the aggregate window.`,
- },
- "stocks_ticker": schema.StringAttribute{
- Computed: true,
- Description: `The exchange symbol that this item is traded under.`,
- },
- "timespan": schema.StringAttribute{
- Computed: true,
- Description: `The size of the time window.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_polygonstockapi_data_source_sdk.go b/internal/provider/source_polygonstockapi_data_source_sdk.go
old mode 100755
new mode 100644
index 908237ea8..625832317
--- a/internal/provider/source_polygonstockapi_data_source_sdk.go
+++ b/internal/provider/source_polygonstockapi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePolygonStockAPIDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_polygonstockapi_resource.go b/internal/provider/source_polygonstockapi_resource.go
old mode 100755
new mode 100644
index 36e7da298..acaa69b28
--- a/internal/provider/source_polygonstockapi_resource.go
+++ b/internal/provider/source_polygonstockapi_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourcePolygonStockAPIResource struct {
// SourcePolygonStockAPIResourceModel describes the resource data model.
type SourcePolygonStockAPIResourceModel struct {
Configuration SourcePolygonStockAPI `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -64,11 +65,11 @@ func (r *SourcePolygonStockAPIResource) Schema(ctx context.Context, req resource
Description: `Your API ACCESS Key`,
},
"end_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The target date for the aggregate window.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The target date for the aggregate window.`,
},
"limit": schema.Int64Attribute{
Optional: true,
@@ -82,21 +83,12 @@ func (r *SourcePolygonStockAPIResource) Schema(ctx context.Context, req resource
Optional: true,
Description: `Sort the results by timestamp. asc will return results in ascending order (oldest at the top), desc will return results in descending order (newest at the top).`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "polygon-stock-api",
- ),
- },
- Description: `must be one of ["polygon-stock-api"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The beginning date for the aggregate window.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The beginning date for the aggregate window.`,
},
"stocks_ticker": schema.StringAttribute{
Required: true,
@@ -108,13 +100,24 @@ func (r *SourcePolygonStockAPIResource) Schema(ctx context.Context, req resource
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -178,7 +181,7 @@ func (r *SourcePolygonStockAPIResource) Create(ctx context.Context, req resource
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePolygonStockAPI(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -354,5 +357,5 @@ func (r *SourcePolygonStockAPIResource) Delete(ctx context.Context, req resource
}
func (r *SourcePolygonStockAPIResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_polygonstockapi_resource_sdk.go b/internal/provider/source_polygonstockapi_resource_sdk.go
old mode 100755
new mode 100644
index 82e31a88b..4019c0136
--- a/internal/provider/source_polygonstockapi_resource_sdk.go
+++ b/internal/provider/source_polygonstockapi_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -30,7 +30,6 @@ func (r *SourcePolygonStockAPIResourceModel) ToCreateSDKType() *shared.SourcePol
} else {
sort = nil
}
- sourceType := shared.SourcePolygonStockAPIPolygonStockAPI(r.Configuration.SourceType.ValueString())
startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
stocksTicker := r.Configuration.StocksTicker.ValueString()
timespan := r.Configuration.Timespan.ValueString()
@@ -41,11 +40,16 @@ func (r *SourcePolygonStockAPIResourceModel) ToCreateSDKType() *shared.SourcePol
Limit: limit,
Multiplier: multiplier,
Sort: sort,
- SourceType: sourceType,
StartDate: startDate,
StocksTicker: stocksTicker,
Timespan: timespan,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -56,6 +60,7 @@ func (r *SourcePolygonStockAPIResourceModel) ToCreateSDKType() *shared.SourcePol
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePolygonStockAPICreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_postgres_data_source.go b/internal/provider/source_postgres_data_source.go
old mode 100755
new mode 100644
index 33d3caf26..712991c85
--- a/internal/provider/source_postgres_data_source.go
+++ b/internal/provider/source_postgres_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourcePostgresDataSource struct {
// SourcePostgresDataSourceModel describes the data model.
type SourcePostgresDataSourceModel struct {
- Configuration SourcePostgres1 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,753 +47,20 @@ func (r *SourcePostgresDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourcePostgres DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the database.`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of the database.`,
- },
- "replication_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_postgres_update_method_detect_changes_with_xmin_system_column": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Xmin",
- ),
- },
- Description: `must be one of ["Xmin"]`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.`,
- },
- "source_postgres_update_method_read_changes_using_write_ahead_log_cdc": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "initial_waiting_seconds": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "lsn_commit_behaviour": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "While reading Data",
- "After loading Data in the destination",
- ),
- },
- MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]` + "\n" +
- `Determines when Airbtye should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "plugin": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pgoutput",
- ),
- },
- MarkdownDescription: `must be one of ["pgoutput"]` + "\n" +
- `A logical decoding plugin installed on the PostgreSQL server.`,
- },
- "publication": schema.StringAttribute{
- Computed: true,
- Description: `A Postgres publication used for consuming changes. Read about publications and replication identities.`,
- },
- "queue_size": schema.Int64Attribute{
- Computed: true,
- Description: `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
- },
- "replication_slot": schema.StringAttribute{
- Computed: true,
- Description: `A plugin logical replication slot. Read about replication slots.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.`,
- },
- "source_postgres_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
- },
- "source_postgres_update_update_method_detect_changes_with_xmin_system_column": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Xmin",
- ),
- },
- Description: `must be one of ["Xmin"]`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.`,
- },
- "source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "initial_waiting_seconds": schema.Int64Attribute{
- Computed: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "lsn_commit_behaviour": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "While reading Data",
- "After loading Data in the destination",
- ),
- },
- MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]` + "\n" +
- `Determines when Airbtye should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
- },
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "plugin": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pgoutput",
- ),
- },
- MarkdownDescription: `must be one of ["pgoutput"]` + "\n" +
- `A logical decoding plugin installed on the PostgreSQL server.`,
- },
- "publication": schema.StringAttribute{
- Computed: true,
- Description: `A Postgres publication used for consuming changes. Read about publications and replication identities.`,
- },
- "queue_size": schema.Int64Attribute{
- Computed: true,
- Description: `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
- },
- "replication_slot": schema.StringAttribute{
- Computed: true,
- Description: `A plugin logical replication slot. Read about replication slots.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.`,
- },
- "source_postgres_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Configures how data is extracted from the database.`,
- },
- "schemas": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The list of schemas (case sensitive) to sync from. Defaults to public.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "postgres",
- ),
- },
- Description: `must be one of ["postgres"]`,
- },
- "ssl_mode": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_postgres_ssl_modes_allow": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Enables encryption only when required by the source database.`,
- },
- "source_postgres_ssl_modes_disable": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Disables encryption of communication between Airbyte and source database.`,
- },
- "source_postgres_ssl_modes_prefer": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Allows unencrypted connection only if the source database does not support encryption.`,
- },
- "source_postgres_ssl_modes_require": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption. If the source database server does not support encryption, connection will fail.`,
- },
- "source_postgres_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
- },
- "source_postgres_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `This is the most secure mode. Always require encryption and verifies the identity of the source database server.`,
- },
- "source_postgres_update_ssl_modes_allow": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Enables encryption only when required by the source database.`,
- },
- "source_postgres_update_ssl_modes_disable": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Disables encryption of communication between Airbyte and source database.`,
- },
- "source_postgres_update_ssl_modes_prefer": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Allows unencrypted connection only if the source database does not support encryption.`,
- },
- "source_postgres_update_ssl_modes_require": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption. If the source database server does not support encryption, connection will fail.`,
- },
- "source_postgres_update_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
- },
- "source_postgres_update_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Computed: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Computed: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Computed: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Computed: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `This is the most secure mode. Always require encryption and verifies the identity of the source database server.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- MarkdownDescription: `SSL connection modes. ` + "\n" +
- ` Read more in the docs.`,
- },
- "tunnel_method": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_postgres_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_postgres_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_postgres_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_postgres_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_postgres_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_postgres_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Computed: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Computed: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Computed: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_postgres_data_source_sdk.go b/internal/provider/source_postgres_data_source_sdk.go
old mode 100755
new mode 100644
index debfd269e..061f2f6d8
--- a/internal/provider/source_postgres_data_source_sdk.go
+++ b/internal/provider/source_postgres_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePostgresDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_postgres_resource.go b/internal/provider/source_postgres_resource.go
old mode 100755
new mode 100644
index cd6641c86..7cf1f18f8
--- a/internal/provider/source_postgres_resource.go
+++ b/internal/provider/source_postgres_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourcePostgresResource struct {
// SourcePostgresResourceModel describes the resource data model.
type SourcePostgresResourceModel struct {
Configuration SourcePostgres `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -69,345 +71,155 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `Port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 5432` + "\n" +
+ `Port of the database.`,
},
"replication_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_postgres_update_method_detect_changes_with_xmin_system_column": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Xmin",
- ),
- },
- Description: `must be one of ["Xmin"]`,
- },
- },
+ "detect_changes_with_xmin_system_column": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.`,
},
- "source_postgres_update_method_read_changes_using_write_ahead_log_cdc": schema.SingleNestedAttribute{
+ "read_changes_using_write_ahead_log_cdc": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "initial_waiting_seconds": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
- },
- "lsn_commit_behaviour": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "While reading Data",
- "After loading Data in the destination",
- ),
- },
- MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]` + "\n" +
- `Determines when Airbtye should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
- },
- "plugin": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pgoutput",
- ),
- },
- MarkdownDescription: `must be one of ["pgoutput"]` + "\n" +
- `A logical decoding plugin installed on the PostgreSQL server.`,
- },
- "publication": schema.StringAttribute{
- Required: true,
- Description: `A Postgres publication used for consuming changes. Read about publications and replication identities.`,
- },
- "queue_size": schema.Int64Attribute{
- Optional: true,
- Description: `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
- },
- "replication_slot": schema.StringAttribute{
- Required: true,
- Description: `A plugin logical replication slot. Read about replication slots.`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
+ Optional: true,
Description: `Parsed as JSON.`,
- },
- },
- Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.`,
- },
- "source_postgres_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
- Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
- },
- "source_postgres_update_update_method_detect_changes_with_xmin_system_column": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Xmin",
- ),
+ validators.IsValidJSON(),
},
- Description: `must be one of ["Xmin"]`,
},
- },
- Description: `Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.`,
- },
- "source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"initial_waiting_seconds": schema.Int64Attribute{
- Optional: true,
- Description: `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
+ Optional: true,
+ MarkdownDescription: `Default: 300` + "\n" +
+ `The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.`,
},
"lsn_commit_behaviour": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]; Default: "After loading Data in the destination"` + "\n" +
+ `Determines when Airbyte should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
Validators: []validator.String{
stringvalidator.OneOf(
"While reading Data",
"After loading Data in the destination",
),
},
- MarkdownDescription: `must be one of ["While reading Data", "After loading Data in the destination"]` + "\n" +
- `Determines when Airbtye should flush the LSN of processed WAL logs in the source database. ` + "`" + `After loading Data in the destination` + "`" + ` is default. If ` + "`" + `While reading Data` + "`" + ` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.`,
- },
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "CDC",
- ),
- },
- Description: `must be one of ["CDC"]`,
},
"plugin": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["pgoutput"]; Default: "pgoutput"` + "\n" +
+ `A logical decoding plugin installed on the PostgreSQL server.`,
Validators: []validator.String{
stringvalidator.OneOf(
"pgoutput",
),
},
- MarkdownDescription: `must be one of ["pgoutput"]` + "\n" +
- `A logical decoding plugin installed on the PostgreSQL server.`,
},
"publication": schema.StringAttribute{
Required: true,
Description: `A Postgres publication used for consuming changes. Read about publications and replication identities.`,
},
"queue_size": schema.Int64Attribute{
- Optional: true,
- Description: `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
+ Optional: true,
+ MarkdownDescription: `Default: 10000` + "\n" +
+ `The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.`,
},
"replication_slot": schema.StringAttribute{
Required: true,
Description: `A plugin logical replication slot. Read about replication slots.`,
},
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.`,
},
- "source_postgres_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Standard",
- ),
- },
- Description: `must be one of ["Standard"]`,
- },
- },
+ "scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`,
},
},
+ Description: `Configures how data is extracted from the database.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Configures how data is extracted from the database.`,
},
"schemas": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
Description: `The list of schemas (case sensitive) to sync from. Defaults to public.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "postgres",
- ),
- },
- Description: `must be one of ["postgres"]`,
- },
"ssl_mode": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_postgres_ssl_modes_allow": schema.SingleNestedAttribute{
+ "allow": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
},
Description: `Enables encryption only when required by the source database.`,
},
- "source_postgres_ssl_modes_disable": schema.SingleNestedAttribute{
+ "disable": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
},
Description: `Disables encryption of communication between Airbyte and source database.`,
},
- "source_postgres_ssl_modes_prefer": schema.SingleNestedAttribute{
+ "prefer": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
},
Description: `Allows unencrypted connection only if the source database does not support encryption.`,
},
- "source_postgres_ssl_modes_require": schema.SingleNestedAttribute{
+ "require": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
},
Description: `Always require encryption. If the source database server does not support encryption, connection will fail.`,
},
- "source_postgres_ssl_modes_verify_ca": schema.SingleNestedAttribute{
+ "verify_ca": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Required: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Optional: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
- Optional: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Optional: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
- },
- Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
- },
- "source_postgres_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"ca_certificate": schema.StringAttribute{
Required: true,
Description: `CA certificate`,
@@ -418,160 +230,27 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema
},
"client_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Client key`,
},
"client_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
},
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
- Description: `This is the most secure mode. Always require encryption and verifies the identity of the source database server.`,
- },
- "source_postgres_update_ssl_modes_allow": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "allow",
- ),
- },
- Description: `must be one of ["allow"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Enables encryption only when required by the source database.`,
- },
- "source_postgres_update_ssl_modes_disable": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "disable",
- ),
- },
- Description: `must be one of ["disable"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Disables encryption of communication between Airbyte and source database.`,
- },
- "source_postgres_update_ssl_modes_prefer": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prefer",
- ),
- },
- Description: `must be one of ["prefer"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Allows unencrypted connection only if the source database does not support encryption.`,
+ Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
},
- "source_postgres_update_ssl_modes_require": schema.SingleNestedAttribute{
+ "verify_full": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "require",
- ),
- },
- Description: `must be one of ["require"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Always require encryption. If the source database server does not support encryption, connection will fail.`,
- },
- "source_postgres_update_ssl_modes_verify_ca": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ca_certificate": schema.StringAttribute{
- Required: true,
- Description: `CA certificate`,
- },
- "client_certificate": schema.StringAttribute{
- Optional: true,
- Description: `Client certificate`,
- },
- "client_key": schema.StringAttribute{
Optional: true,
- Description: `Client key`,
- },
- "client_key_password": schema.StringAttribute{
- Optional: true,
- Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
- },
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-ca",
- ),
- },
- Description: `must be one of ["verify-ca"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
- },
- Description: `Always require encryption and verifies that the source database server has a valid SSL certificate.`,
- },
- "source_postgres_update_ssl_modes_verify_full": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"ca_certificate": schema.StringAttribute{
Required: true,
Description: `CA certificate`,
@@ -582,157 +261,43 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema
},
"client_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Client key`,
},
"client_key_password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Password for keystorage. If you do not add it - the password will be generated automatically.`,
},
- "mode": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "verify-full",
- ),
- },
- Description: `must be one of ["verify-full"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `This is the most secure mode. Always require encryption and verifies the identity of the source database server.`,
},
},
+ MarkdownDescription: `SSL connection modes. ` + "\n" +
+ ` Read more in the docs.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- MarkdownDescription: `SSL connection modes. ` + "\n" +
- ` Read more in the docs.`,
},
"tunnel_method": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_postgres_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
+ "no_tunnel": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_postgres_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host`,
- },
- "tunnel_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_postgres_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
- },
- "tunnel_user": schema.StringAttribute{
- Required: true,
- Description: `OS-level username for logging into the jump server host.`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_postgres_update_ssh_tunnel_method_no_tunnel": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "NO_TUNNEL",
- ),
- },
- MarkdownDescription: `must be one of ["NO_TUNNEL"]` + "\n" +
- `No ssh tunnel needed to connect to database`,
- },
- },
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
- },
- "source_postgres_update_ssh_tunnel_method_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "tunnel_host": schema.StringAttribute{
- Required: true,
- Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
- },
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and password authentication`,
- },
- "tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -740,35 +305,28 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema
},
"tunnel_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
- "source_postgres_update_ssh_tunnel_method_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
"tunnel_host": schema.StringAttribute{
Required: true,
Description: `Hostname of the jump server host that allows inbound ssh tunnel.`,
},
- "tunnel_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through a jump server tunnel host using username and ssh key`,
- },
"tunnel_port": schema.Int64Attribute{
- Required: true,
- Description: `Port on the proxy/jump server that accepts inbound ssh connections.`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `Port on the proxy/jump server that accepts inbound ssh connections.`,
},
"tunnel_user": schema.StringAttribute{
Required: true,
@@ -778,10 +336,10 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema
Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
},
+ Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.`,
},
"username": schema.StringAttribute{
Required: true,
@@ -789,13 +347,24 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -859,7 +428,7 @@ func (r *SourcePostgresResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePostgres(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -1035,5 +604,5 @@ func (r *SourcePostgresResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourcePostgresResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_postgres_resource_sdk.go b/internal/provider/source_postgres_resource_sdk.go
old mode 100755
new mode 100644
index 1623a5ac9..9e1bfc754
--- a/internal/provider/source_postgres_resource_sdk.go
+++ b/internal/provider/source_postgres_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -23,80 +23,77 @@ func (r *SourcePostgresResourceModel) ToCreateSDKType() *shared.SourcePostgresCr
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var replicationMethod *shared.SourcePostgresUpdateMethod
if r.Configuration.ReplicationMethod != nil {
- var sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC *shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC
- if r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC != nil {
+ var sourcePostgresReadChangesUsingWriteAheadLogCDC *shared.SourcePostgresReadChangesUsingWriteAheadLogCDC
+ if r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC != nil {
+ var additionalProperties interface{}
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.AdditionalProperties.ValueString()), &additionalProperties)
+ }
initialWaitingSeconds := new(int64)
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsNull() {
- *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.ValueInt64()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.ValueInt64()
} else {
initialWaitingSeconds = nil
}
- lsnCommitBehaviour := new(shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour)
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsNull() {
- *lsnCommitBehaviour = shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.ValueString())
+ lsnCommitBehaviour := new(shared.SourcePostgresLSNCommitBehaviour)
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsNull() {
+ *lsnCommitBehaviour = shared.SourcePostgresLSNCommitBehaviour(r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.ValueString())
} else {
lsnCommitBehaviour = nil
}
- method := shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Method.ValueString())
- plugin := new(shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin)
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.IsNull() {
- *plugin = shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.ValueString())
+ plugin := new(shared.SourcePostgresPlugin)
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.Plugin.IsNull() {
+ *plugin = shared.SourcePostgresPlugin(r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.Plugin.ValueString())
} else {
plugin = nil
}
- publication := r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Publication.ValueString()
+ publication := r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.Publication.ValueString()
queueSize := new(int64)
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.IsNull() {
- *queueSize = r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.ValueInt64()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.QueueSize.IsNull() {
+ *queueSize = r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.QueueSize.ValueInt64()
} else {
queueSize = nil
}
- replicationSlot := r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.ReplicationSlot.ValueString()
- var additionalProperties interface{}
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.ValueString()), &additionalProperties)
- }
- sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC = &shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC{
+ replicationSlot := r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.ReplicationSlot.ValueString()
+ sourcePostgresReadChangesUsingWriteAheadLogCDC = &shared.SourcePostgresReadChangesUsingWriteAheadLogCDC{
+ AdditionalProperties: additionalProperties,
InitialWaitingSeconds: initialWaitingSeconds,
LsnCommitBehaviour: lsnCommitBehaviour,
- Method: method,
Plugin: plugin,
Publication: publication,
QueueSize: queueSize,
ReplicationSlot: replicationSlot,
- AdditionalProperties: additionalProperties,
}
}
- if sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC != nil {
+ if sourcePostgresReadChangesUsingWriteAheadLogCDC != nil {
replicationMethod = &shared.SourcePostgresUpdateMethod{
- SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC: sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC,
+ SourcePostgresReadChangesUsingWriteAheadLogCDC: sourcePostgresReadChangesUsingWriteAheadLogCDC,
}
}
- var sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn *shared.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn
- if r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn != nil {
- method1 := shared.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn.Method.ValueString())
- sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn = &shared.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn{
- Method: method1,
- }
+ var sourcePostgresDetectChangesWithXminSystemColumn *shared.SourcePostgresDetectChangesWithXminSystemColumn
+ if r.Configuration.ReplicationMethod.DetectChangesWithXminSystemColumn != nil {
+ sourcePostgresDetectChangesWithXminSystemColumn = &shared.SourcePostgresDetectChangesWithXminSystemColumn{}
}
- if sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn != nil {
+ if sourcePostgresDetectChangesWithXminSystemColumn != nil {
replicationMethod = &shared.SourcePostgresUpdateMethod{
- SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn: sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn,
+ SourcePostgresDetectChangesWithXminSystemColumn: sourcePostgresDetectChangesWithXminSystemColumn,
}
}
- var sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor *shared.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor
- if r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor != nil {
- method2 := shared.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString())
- sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor{
- Method: method2,
- }
+ var sourcePostgresScanChangesWithUserDefinedCursor *shared.SourcePostgresScanChangesWithUserDefinedCursor
+ if r.Configuration.ReplicationMethod.ScanChangesWithUserDefinedCursor != nil {
+ sourcePostgresScanChangesWithUserDefinedCursor = &shared.SourcePostgresScanChangesWithUserDefinedCursor{}
}
- if sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor != nil {
+ if sourcePostgresScanChangesWithUserDefinedCursor != nil {
replicationMethod = &shared.SourcePostgresUpdateMethod{
- SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor: sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor,
+ SourcePostgresScanChangesWithUserDefinedCursor: sourcePostgresScanChangesWithUserDefinedCursor,
}
}
}
@@ -104,210 +101,200 @@ func (r *SourcePostgresResourceModel) ToCreateSDKType() *shared.SourcePostgresCr
for _, schemasItem := range r.Configuration.Schemas {
schemas = append(schemas, schemasItem.ValueString())
}
- sourceType := shared.SourcePostgresPostgres(r.Configuration.SourceType.ValueString())
var sslMode *shared.SourcePostgresSSLModes
if r.Configuration.SslMode != nil {
- var sourcePostgresSSLModesDisable *shared.SourcePostgresSSLModesDisable
- if r.Configuration.SslMode.SourcePostgresSSLModesDisable != nil {
- mode := shared.SourcePostgresSSLModesDisableMode(r.Configuration.SslMode.SourcePostgresSSLModesDisable.Mode.ValueString())
+ var sourcePostgresDisable *shared.SourcePostgresDisable
+ if r.Configuration.SslMode.Disable != nil {
var additionalProperties1 interface{}
- if !r.Configuration.SslMode.SourcePostgresSSLModesDisable.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesDisable.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresSSLModesDisable.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.SslMode.Disable.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Disable.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Disable.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourcePostgresSSLModesDisable = &shared.SourcePostgresSSLModesDisable{
- Mode: mode,
+ sourcePostgresDisable = &shared.SourcePostgresDisable{
AdditionalProperties: additionalProperties1,
}
}
- if sourcePostgresSSLModesDisable != nil {
+ if sourcePostgresDisable != nil {
sslMode = &shared.SourcePostgresSSLModes{
- SourcePostgresSSLModesDisable: sourcePostgresSSLModesDisable,
+ SourcePostgresDisable: sourcePostgresDisable,
}
}
- var sourcePostgresSSLModesAllow *shared.SourcePostgresSSLModesAllow
- if r.Configuration.SslMode.SourcePostgresSSLModesAllow != nil {
- mode1 := shared.SourcePostgresSSLModesAllowMode(r.Configuration.SslMode.SourcePostgresSSLModesAllow.Mode.ValueString())
+ var sourcePostgresAllow *shared.SourcePostgresAllow
+ if r.Configuration.SslMode.Allow != nil {
var additionalProperties2 interface{}
- if !r.Configuration.SslMode.SourcePostgresSSLModesAllow.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesAllow.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresSSLModesAllow.AdditionalProperties.ValueString()), &additionalProperties2)
+ if !r.Configuration.SslMode.Allow.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Allow.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Allow.AdditionalProperties.ValueString()), &additionalProperties2)
}
- sourcePostgresSSLModesAllow = &shared.SourcePostgresSSLModesAllow{
- Mode: mode1,
+ sourcePostgresAllow = &shared.SourcePostgresAllow{
AdditionalProperties: additionalProperties2,
}
}
- if sourcePostgresSSLModesAllow != nil {
+ if sourcePostgresAllow != nil {
sslMode = &shared.SourcePostgresSSLModes{
- SourcePostgresSSLModesAllow: sourcePostgresSSLModesAllow,
+ SourcePostgresAllow: sourcePostgresAllow,
}
}
- var sourcePostgresSSLModesPrefer *shared.SourcePostgresSSLModesPrefer
- if r.Configuration.SslMode.SourcePostgresSSLModesPrefer != nil {
- mode2 := shared.SourcePostgresSSLModesPreferMode(r.Configuration.SslMode.SourcePostgresSSLModesPrefer.Mode.ValueString())
+ var sourcePostgresPrefer *shared.SourcePostgresPrefer
+ if r.Configuration.SslMode.Prefer != nil {
var additionalProperties3 interface{}
- if !r.Configuration.SslMode.SourcePostgresSSLModesPrefer.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesPrefer.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresSSLModesPrefer.AdditionalProperties.ValueString()), &additionalProperties3)
+ if !r.Configuration.SslMode.Prefer.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Prefer.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Prefer.AdditionalProperties.ValueString()), &additionalProperties3)
}
- sourcePostgresSSLModesPrefer = &shared.SourcePostgresSSLModesPrefer{
- Mode: mode2,
+ sourcePostgresPrefer = &shared.SourcePostgresPrefer{
AdditionalProperties: additionalProperties3,
}
}
- if sourcePostgresSSLModesPrefer != nil {
+ if sourcePostgresPrefer != nil {
sslMode = &shared.SourcePostgresSSLModes{
- SourcePostgresSSLModesPrefer: sourcePostgresSSLModesPrefer,
+ SourcePostgresPrefer: sourcePostgresPrefer,
}
}
- var sourcePostgresSSLModesRequire *shared.SourcePostgresSSLModesRequire
- if r.Configuration.SslMode.SourcePostgresSSLModesRequire != nil {
- mode3 := shared.SourcePostgresSSLModesRequireMode(r.Configuration.SslMode.SourcePostgresSSLModesRequire.Mode.ValueString())
+ var sourcePostgresRequire *shared.SourcePostgresRequire
+ if r.Configuration.SslMode.Require != nil {
var additionalProperties4 interface{}
- if !r.Configuration.SslMode.SourcePostgresSSLModesRequire.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesRequire.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresSSLModesRequire.AdditionalProperties.ValueString()), &additionalProperties4)
+ if !r.Configuration.SslMode.Require.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Require.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Require.AdditionalProperties.ValueString()), &additionalProperties4)
}
- sourcePostgresSSLModesRequire = &shared.SourcePostgresSSLModesRequire{
- Mode: mode3,
+ sourcePostgresRequire = &shared.SourcePostgresRequire{
AdditionalProperties: additionalProperties4,
}
}
- if sourcePostgresSSLModesRequire != nil {
+ if sourcePostgresRequire != nil {
sslMode = &shared.SourcePostgresSSLModes{
- SourcePostgresSSLModesRequire: sourcePostgresSSLModesRequire,
+ SourcePostgresRequire: sourcePostgresRequire,
}
}
- var sourcePostgresSSLModesVerifyCa *shared.SourcePostgresSSLModesVerifyCa
- if r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa != nil {
- caCertificate := r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.CaCertificate.ValueString()
+ var sourcePostgresVerifyCa *shared.SourcePostgresVerifyCa
+ if r.Configuration.SslMode.VerifyCa != nil {
+ var additionalProperties5 interface{}
+ if !r.Configuration.SslMode.VerifyCa.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.VerifyCa.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.VerifyCa.AdditionalProperties.ValueString()), &additionalProperties5)
+ }
+ caCertificate := r.Configuration.SslMode.VerifyCa.CaCertificate.ValueString()
clientCertificate := new(string)
- if !r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.ClientCertificate.IsNull() {
- *clientCertificate = r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientCertificate.IsNull() {
+ *clientCertificate = r.Configuration.SslMode.VerifyCa.ClientCertificate.ValueString()
} else {
clientCertificate = nil
}
clientKey := new(string)
- if !r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.ClientKey.IsNull() {
- *clientKey = r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKey.IsNull() {
+ *clientKey = r.Configuration.SslMode.VerifyCa.ClientKey.ValueString()
} else {
clientKey = nil
}
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyCa.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode4 := shared.SourcePostgresSSLModesVerifyCaMode(r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.Mode.ValueString())
- var additionalProperties5 interface{}
- if !r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresSSLModesVerifyCa.AdditionalProperties.ValueString()), &additionalProperties5)
- }
- sourcePostgresSSLModesVerifyCa = &shared.SourcePostgresSSLModesVerifyCa{
+ sourcePostgresVerifyCa = &shared.SourcePostgresVerifyCa{
+ AdditionalProperties: additionalProperties5,
CaCertificate: caCertificate,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword,
- Mode: mode4,
- AdditionalProperties: additionalProperties5,
}
}
- if sourcePostgresSSLModesVerifyCa != nil {
+ if sourcePostgresVerifyCa != nil {
sslMode = &shared.SourcePostgresSSLModes{
- SourcePostgresSSLModesVerifyCa: sourcePostgresSSLModesVerifyCa,
+ SourcePostgresVerifyCa: sourcePostgresVerifyCa,
}
}
- var sourcePostgresSSLModesVerifyFull *shared.SourcePostgresSSLModesVerifyFull
- if r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull != nil {
- caCertificate1 := r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.CaCertificate.ValueString()
+ var sourcePostgresVerifyFull *shared.SourcePostgresVerifyFull
+ if r.Configuration.SslMode.VerifyFull != nil {
+ var additionalProperties6 interface{}
+ if !r.Configuration.SslMode.VerifyFull.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.VerifyFull.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.VerifyFull.AdditionalProperties.ValueString()), &additionalProperties6)
+ }
+ caCertificate1 := r.Configuration.SslMode.VerifyFull.CaCertificate.ValueString()
clientCertificate1 := new(string)
- if !r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.ClientCertificate.IsNull() {
- *clientCertificate1 = r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientCertificate.IsNull() {
+ *clientCertificate1 = r.Configuration.SslMode.VerifyFull.ClientCertificate.ValueString()
} else {
clientCertificate1 = nil
}
clientKey1 := new(string)
- if !r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.ClientKey.IsNull() {
- *clientKey1 = r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKey.IsNull() {
+ *clientKey1 = r.Configuration.SslMode.VerifyFull.ClientKey.ValueString()
} else {
clientKey1 = nil
}
clientKeyPassword1 := new(string)
- if !r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.ClientKeyPassword.IsNull() {
- *clientKeyPassword1 = r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsNull() {
+ *clientKeyPassword1 = r.Configuration.SslMode.VerifyFull.ClientKeyPassword.ValueString()
} else {
clientKeyPassword1 = nil
}
- mode5 := shared.SourcePostgresSSLModesVerifyFullMode(r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.Mode.ValueString())
- var additionalProperties6 interface{}
- if !r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresSSLModesVerifyFull.AdditionalProperties.ValueString()), &additionalProperties6)
- }
- sourcePostgresSSLModesVerifyFull = &shared.SourcePostgresSSLModesVerifyFull{
+ sourcePostgresVerifyFull = &shared.SourcePostgresVerifyFull{
+ AdditionalProperties: additionalProperties6,
CaCertificate: caCertificate1,
ClientCertificate: clientCertificate1,
ClientKey: clientKey1,
ClientKeyPassword: clientKeyPassword1,
- Mode: mode5,
- AdditionalProperties: additionalProperties6,
}
}
- if sourcePostgresSSLModesVerifyFull != nil {
+ if sourcePostgresVerifyFull != nil {
sslMode = &shared.SourcePostgresSSLModes{
- SourcePostgresSSLModesVerifyFull: sourcePostgresSSLModesVerifyFull,
+ SourcePostgresVerifyFull: sourcePostgresVerifyFull,
}
}
}
var tunnelMethod *shared.SourcePostgresSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourcePostgresSSHTunnelMethodNoTunnel *shared.SourcePostgresSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourcePostgresSSHTunnelMethodNoTunnel = &shared.SourcePostgresSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourcePostgresNoTunnel *shared.SourcePostgresNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourcePostgresNoTunnel = &shared.SourcePostgresNoTunnel{}
}
- if sourcePostgresSSHTunnelMethodNoTunnel != nil {
+ if sourcePostgresNoTunnel != nil {
tunnelMethod = &shared.SourcePostgresSSHTunnelMethod{
- SourcePostgresSSHTunnelMethodNoTunnel: sourcePostgresSSHTunnelMethodNoTunnel,
+ SourcePostgresNoTunnel: sourcePostgresNoTunnel,
}
}
- var sourcePostgresSSHTunnelMethodSSHKeyAuthentication *shared.SourcePostgresSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourcePostgresSSHTunnelMethodSSHKeyAuthentication = &shared.SourcePostgresSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourcePostgresSSHKeyAuthentication *shared.SourcePostgresSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourcePostgresSSHKeyAuthentication = &shared.SourcePostgresSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourcePostgresSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourcePostgresSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourcePostgresSSHTunnelMethod{
- SourcePostgresSSHTunnelMethodSSHKeyAuthentication: sourcePostgresSSHTunnelMethodSSHKeyAuthentication,
+ SourcePostgresSSHKeyAuthentication: sourcePostgresSSHKeyAuthentication,
}
}
- var sourcePostgresSSHTunnelMethodPasswordAuthentication *shared.SourcePostgresSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourcePostgresSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourcePostgresSSHTunnelMethodPasswordAuthentication = &shared.SourcePostgresSSHTunnelMethodPasswordAuthentication{
+ var sourcePostgresPasswordAuthentication *shared.SourcePostgresPasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourcePostgresPasswordAuthentication = &shared.SourcePostgresPasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourcePostgresSSHTunnelMethodPasswordAuthentication != nil {
+ if sourcePostgresPasswordAuthentication != nil {
tunnelMethod = &shared.SourcePostgresSSHTunnelMethod{
- SourcePostgresSSHTunnelMethodPasswordAuthentication: sourcePostgresSSHTunnelMethodPasswordAuthentication,
+ SourcePostgresPasswordAuthentication: sourcePostgresPasswordAuthentication,
}
}
}
@@ -320,11 +307,16 @@ func (r *SourcePostgresResourceModel) ToCreateSDKType() *shared.SourcePostgresCr
Port: port,
ReplicationMethod: replicationMethod,
Schemas: schemas,
- SourceType: sourceType,
SslMode: sslMode,
TunnelMethod: tunnelMethod,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -335,6 +327,7 @@ func (r *SourcePostgresResourceModel) ToCreateSDKType() *shared.SourcePostgresCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePostgresCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -362,80 +355,77 @@ func (r *SourcePostgresResourceModel) ToUpdateSDKType() *shared.SourcePostgresPu
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var replicationMethod *shared.SourcePostgresUpdateUpdateMethod
if r.Configuration.ReplicationMethod != nil {
- var sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC *shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC
- if r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC != nil {
+ var readChangesUsingWriteAheadLogCDC *shared.ReadChangesUsingWriteAheadLogCDC
+ if r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC != nil {
+ var additionalProperties interface{}
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.AdditionalProperties.ValueString()), &additionalProperties)
+ }
initialWaitingSeconds := new(int64)
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsNull() {
- *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.ValueInt64()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsNull() {
+ *initialWaitingSeconds = r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.ValueInt64()
} else {
initialWaitingSeconds = nil
}
- lsnCommitBehaviour := new(shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour)
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsNull() {
- *lsnCommitBehaviour = shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.ValueString())
+ lsnCommitBehaviour := new(shared.SourcePostgresUpdateLSNCommitBehaviour)
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsNull() {
+ *lsnCommitBehaviour = shared.SourcePostgresUpdateLSNCommitBehaviour(r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.ValueString())
} else {
lsnCommitBehaviour = nil
}
- method := shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Method.ValueString())
- plugin := new(shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin)
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.IsNull() {
- *plugin = shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.ValueString())
+ plugin := new(shared.SourcePostgresUpdatePlugin)
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.Plugin.IsNull() {
+ *plugin = shared.SourcePostgresUpdatePlugin(r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.Plugin.ValueString())
} else {
plugin = nil
}
- publication := r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Publication.ValueString()
+ publication := r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.Publication.ValueString()
queueSize := new(int64)
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.IsNull() {
- *queueSize = r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.ValueInt64()
+ if !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.QueueSize.IsNull() {
+ *queueSize = r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.QueueSize.ValueInt64()
} else {
queueSize = nil
}
- replicationSlot := r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.ReplicationSlot.ValueString()
- var additionalProperties interface{}
- if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.ValueString()), &additionalProperties)
- }
- sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC = &shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC{
+ replicationSlot := r.Configuration.ReplicationMethod.ReadChangesUsingWriteAheadLogCDC.ReplicationSlot.ValueString()
+ readChangesUsingWriteAheadLogCDC = &shared.ReadChangesUsingWriteAheadLogCDC{
+ AdditionalProperties: additionalProperties,
InitialWaitingSeconds: initialWaitingSeconds,
LsnCommitBehaviour: lsnCommitBehaviour,
- Method: method,
Plugin: plugin,
Publication: publication,
QueueSize: queueSize,
ReplicationSlot: replicationSlot,
- AdditionalProperties: additionalProperties,
}
}
- if sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC != nil {
+ if readChangesUsingWriteAheadLogCDC != nil {
replicationMethod = &shared.SourcePostgresUpdateUpdateMethod{
- SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC: sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC,
+ ReadChangesUsingWriteAheadLogCDC: readChangesUsingWriteAheadLogCDC,
}
}
- var sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn *shared.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn
- if r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn != nil {
- method1 := shared.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn.Method.ValueString())
- sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn = &shared.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn{
- Method: method1,
- }
+ var detectChangesWithXminSystemColumn *shared.DetectChangesWithXminSystemColumn
+ if r.Configuration.ReplicationMethod.DetectChangesWithXminSystemColumn != nil {
+ detectChangesWithXminSystemColumn = &shared.DetectChangesWithXminSystemColumn{}
}
- if sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn != nil {
+ if detectChangesWithXminSystemColumn != nil {
replicationMethod = &shared.SourcePostgresUpdateUpdateMethod{
- SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn: sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn,
+ DetectChangesWithXminSystemColumn: detectChangesWithXminSystemColumn,
}
}
- var sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor *shared.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor
- if r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil {
- method2 := shared.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString())
- sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor{
- Method: method2,
- }
+ var sourcePostgresUpdateScanChangesWithUserDefinedCursor *shared.SourcePostgresUpdateScanChangesWithUserDefinedCursor
+ if r.Configuration.ReplicationMethod.ScanChangesWithUserDefinedCursor != nil {
+ sourcePostgresUpdateScanChangesWithUserDefinedCursor = &shared.SourcePostgresUpdateScanChangesWithUserDefinedCursor{}
}
- if sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil {
+ if sourcePostgresUpdateScanChangesWithUserDefinedCursor != nil {
replicationMethod = &shared.SourcePostgresUpdateUpdateMethod{
- SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor: sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor,
+ SourcePostgresUpdateScanChangesWithUserDefinedCursor: sourcePostgresUpdateScanChangesWithUserDefinedCursor,
}
}
}
@@ -445,207 +435,198 @@ func (r *SourcePostgresResourceModel) ToUpdateSDKType() *shared.SourcePostgresPu
}
var sslMode *shared.SourcePostgresUpdateSSLModes
if r.Configuration.SslMode != nil {
- var sourcePostgresUpdateSSLModesDisable *shared.SourcePostgresUpdateSSLModesDisable
- if r.Configuration.SslMode.SourcePostgresUpdateSSLModesDisable != nil {
- mode := shared.SourcePostgresUpdateSSLModesDisableMode(r.Configuration.SslMode.SourcePostgresUpdateSSLModesDisable.Mode.ValueString())
+ var sourcePostgresUpdateDisable *shared.SourcePostgresUpdateDisable
+ if r.Configuration.SslMode.Disable != nil {
var additionalProperties1 interface{}
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesDisable.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesDisable.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresUpdateSSLModesDisable.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.SslMode.Disable.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Disable.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Disable.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourcePostgresUpdateSSLModesDisable = &shared.SourcePostgresUpdateSSLModesDisable{
- Mode: mode,
+ sourcePostgresUpdateDisable = &shared.SourcePostgresUpdateDisable{
AdditionalProperties: additionalProperties1,
}
}
- if sourcePostgresUpdateSSLModesDisable != nil {
+ if sourcePostgresUpdateDisable != nil {
sslMode = &shared.SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesDisable: sourcePostgresUpdateSSLModesDisable,
+ SourcePostgresUpdateDisable: sourcePostgresUpdateDisable,
}
}
- var sourcePostgresUpdateSSLModesAllow *shared.SourcePostgresUpdateSSLModesAllow
- if r.Configuration.SslMode.SourcePostgresUpdateSSLModesAllow != nil {
- mode1 := shared.SourcePostgresUpdateSSLModesAllowMode(r.Configuration.SslMode.SourcePostgresUpdateSSLModesAllow.Mode.ValueString())
+ var sourcePostgresUpdateAllow *shared.SourcePostgresUpdateAllow
+ if r.Configuration.SslMode.Allow != nil {
var additionalProperties2 interface{}
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesAllow.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesAllow.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresUpdateSSLModesAllow.AdditionalProperties.ValueString()), &additionalProperties2)
+ if !r.Configuration.SslMode.Allow.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Allow.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Allow.AdditionalProperties.ValueString()), &additionalProperties2)
}
- sourcePostgresUpdateSSLModesAllow = &shared.SourcePostgresUpdateSSLModesAllow{
- Mode: mode1,
+ sourcePostgresUpdateAllow = &shared.SourcePostgresUpdateAllow{
AdditionalProperties: additionalProperties2,
}
}
- if sourcePostgresUpdateSSLModesAllow != nil {
+ if sourcePostgresUpdateAllow != nil {
sslMode = &shared.SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesAllow: sourcePostgresUpdateSSLModesAllow,
+ SourcePostgresUpdateAllow: sourcePostgresUpdateAllow,
}
}
- var sourcePostgresUpdateSSLModesPrefer *shared.SourcePostgresUpdateSSLModesPrefer
- if r.Configuration.SslMode.SourcePostgresUpdateSSLModesPrefer != nil {
- mode2 := shared.SourcePostgresUpdateSSLModesPreferMode(r.Configuration.SslMode.SourcePostgresUpdateSSLModesPrefer.Mode.ValueString())
+ var sourcePostgresUpdatePrefer *shared.SourcePostgresUpdatePrefer
+ if r.Configuration.SslMode.Prefer != nil {
var additionalProperties3 interface{}
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesPrefer.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesPrefer.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresUpdateSSLModesPrefer.AdditionalProperties.ValueString()), &additionalProperties3)
+ if !r.Configuration.SslMode.Prefer.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Prefer.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Prefer.AdditionalProperties.ValueString()), &additionalProperties3)
}
- sourcePostgresUpdateSSLModesPrefer = &shared.SourcePostgresUpdateSSLModesPrefer{
- Mode: mode2,
+ sourcePostgresUpdatePrefer = &shared.SourcePostgresUpdatePrefer{
AdditionalProperties: additionalProperties3,
}
}
- if sourcePostgresUpdateSSLModesPrefer != nil {
+ if sourcePostgresUpdatePrefer != nil {
sslMode = &shared.SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesPrefer: sourcePostgresUpdateSSLModesPrefer,
+ SourcePostgresUpdatePrefer: sourcePostgresUpdatePrefer,
}
}
- var sourcePostgresUpdateSSLModesRequire *shared.SourcePostgresUpdateSSLModesRequire
- if r.Configuration.SslMode.SourcePostgresUpdateSSLModesRequire != nil {
- mode3 := shared.SourcePostgresUpdateSSLModesRequireMode(r.Configuration.SslMode.SourcePostgresUpdateSSLModesRequire.Mode.ValueString())
+ var sourcePostgresUpdateRequire *shared.SourcePostgresUpdateRequire
+ if r.Configuration.SslMode.Require != nil {
var additionalProperties4 interface{}
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesRequire.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesRequire.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresUpdateSSLModesRequire.AdditionalProperties.ValueString()), &additionalProperties4)
+ if !r.Configuration.SslMode.Require.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.Require.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.Require.AdditionalProperties.ValueString()), &additionalProperties4)
}
- sourcePostgresUpdateSSLModesRequire = &shared.SourcePostgresUpdateSSLModesRequire{
- Mode: mode3,
+ sourcePostgresUpdateRequire = &shared.SourcePostgresUpdateRequire{
AdditionalProperties: additionalProperties4,
}
}
- if sourcePostgresUpdateSSLModesRequire != nil {
+ if sourcePostgresUpdateRequire != nil {
sslMode = &shared.SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesRequire: sourcePostgresUpdateSSLModesRequire,
+ SourcePostgresUpdateRequire: sourcePostgresUpdateRequire,
}
}
- var sourcePostgresUpdateSSLModesVerifyCa *shared.SourcePostgresUpdateSSLModesVerifyCa
- if r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa != nil {
- caCertificate := r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.CaCertificate.ValueString()
+ var sourcePostgresUpdateVerifyCa *shared.SourcePostgresUpdateVerifyCa
+ if r.Configuration.SslMode.VerifyCa != nil {
+ var additionalProperties5 interface{}
+ if !r.Configuration.SslMode.VerifyCa.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.VerifyCa.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.VerifyCa.AdditionalProperties.ValueString()), &additionalProperties5)
+ }
+ caCertificate := r.Configuration.SslMode.VerifyCa.CaCertificate.ValueString()
clientCertificate := new(string)
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.ClientCertificate.IsNull() {
- *clientCertificate = r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientCertificate.IsNull() {
+ *clientCertificate = r.Configuration.SslMode.VerifyCa.ClientCertificate.ValueString()
} else {
clientCertificate = nil
}
clientKey := new(string)
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.ClientKey.IsNull() {
- *clientKey = r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKey.IsNull() {
+ *clientKey = r.Configuration.SslMode.VerifyCa.ClientKey.ValueString()
} else {
clientKey = nil
}
clientKeyPassword := new(string)
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.ClientKeyPassword.IsNull() {
- *clientKeyPassword = r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyCa.ClientKeyPassword.IsNull() {
+ *clientKeyPassword = r.Configuration.SslMode.VerifyCa.ClientKeyPassword.ValueString()
} else {
clientKeyPassword = nil
}
- mode4 := shared.SourcePostgresUpdateSSLModesVerifyCaMode(r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.Mode.ValueString())
- var additionalProperties5 interface{}
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyCa.AdditionalProperties.ValueString()), &additionalProperties5)
- }
- sourcePostgresUpdateSSLModesVerifyCa = &shared.SourcePostgresUpdateSSLModesVerifyCa{
+ sourcePostgresUpdateVerifyCa = &shared.SourcePostgresUpdateVerifyCa{
+ AdditionalProperties: additionalProperties5,
CaCertificate: caCertificate,
ClientCertificate: clientCertificate,
ClientKey: clientKey,
ClientKeyPassword: clientKeyPassword,
- Mode: mode4,
- AdditionalProperties: additionalProperties5,
}
}
- if sourcePostgresUpdateSSLModesVerifyCa != nil {
+ if sourcePostgresUpdateVerifyCa != nil {
sslMode = &shared.SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesVerifyCa: sourcePostgresUpdateSSLModesVerifyCa,
+ SourcePostgresUpdateVerifyCa: sourcePostgresUpdateVerifyCa,
}
}
- var sourcePostgresUpdateSSLModesVerifyFull *shared.SourcePostgresUpdateSSLModesVerifyFull
- if r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull != nil {
- caCertificate1 := r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.CaCertificate.ValueString()
+ var sourcePostgresUpdateVerifyFull *shared.SourcePostgresUpdateVerifyFull
+ if r.Configuration.SslMode.VerifyFull != nil {
+ var additionalProperties6 interface{}
+ if !r.Configuration.SslMode.VerifyFull.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.VerifyFull.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.SslMode.VerifyFull.AdditionalProperties.ValueString()), &additionalProperties6)
+ }
+ caCertificate1 := r.Configuration.SslMode.VerifyFull.CaCertificate.ValueString()
clientCertificate1 := new(string)
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.ClientCertificate.IsNull() {
- *clientCertificate1 = r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.ClientCertificate.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientCertificate.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientCertificate.IsNull() {
+ *clientCertificate1 = r.Configuration.SslMode.VerifyFull.ClientCertificate.ValueString()
} else {
clientCertificate1 = nil
}
clientKey1 := new(string)
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.ClientKey.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.ClientKey.IsNull() {
- *clientKey1 = r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.ClientKey.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKey.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKey.IsNull() {
+ *clientKey1 = r.Configuration.SslMode.VerifyFull.ClientKey.ValueString()
} else {
clientKey1 = nil
}
clientKeyPassword1 := new(string)
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.ClientKeyPassword.IsNull() {
- *clientKeyPassword1 = r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.ClientKeyPassword.ValueString()
+ if !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsUnknown() && !r.Configuration.SslMode.VerifyFull.ClientKeyPassword.IsNull() {
+ *clientKeyPassword1 = r.Configuration.SslMode.VerifyFull.ClientKeyPassword.ValueString()
} else {
clientKeyPassword1 = nil
}
- mode5 := shared.SourcePostgresUpdateSSLModesVerifyFullMode(r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.Mode.ValueString())
- var additionalProperties6 interface{}
- if !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.AdditionalProperties.IsUnknown() && !r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.SslMode.SourcePostgresUpdateSSLModesVerifyFull.AdditionalProperties.ValueString()), &additionalProperties6)
- }
- sourcePostgresUpdateSSLModesVerifyFull = &shared.SourcePostgresUpdateSSLModesVerifyFull{
+ sourcePostgresUpdateVerifyFull = &shared.SourcePostgresUpdateVerifyFull{
+ AdditionalProperties: additionalProperties6,
CaCertificate: caCertificate1,
ClientCertificate: clientCertificate1,
ClientKey: clientKey1,
ClientKeyPassword: clientKeyPassword1,
- Mode: mode5,
- AdditionalProperties: additionalProperties6,
}
}
- if sourcePostgresUpdateSSLModesVerifyFull != nil {
+ if sourcePostgresUpdateVerifyFull != nil {
sslMode = &shared.SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesVerifyFull: sourcePostgresUpdateSSLModesVerifyFull,
+ SourcePostgresUpdateVerifyFull: sourcePostgresUpdateVerifyFull,
}
}
}
var tunnelMethod *shared.SourcePostgresUpdateSSHTunnelMethod
if r.Configuration.TunnelMethod != nil {
- var sourcePostgresUpdateSSHTunnelMethodNoTunnel *shared.SourcePostgresUpdateSSHTunnelMethodNoTunnel
- if r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodNoTunnel != nil {
- tunnelMethod1 := shared.SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod(r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodNoTunnel.TunnelMethod.ValueString())
- sourcePostgresUpdateSSHTunnelMethodNoTunnel = &shared.SourcePostgresUpdateSSHTunnelMethodNoTunnel{
- TunnelMethod: tunnelMethod1,
- }
+ var sourcePostgresUpdateNoTunnel *shared.SourcePostgresUpdateNoTunnel
+ if r.Configuration.TunnelMethod.NoTunnel != nil {
+ sourcePostgresUpdateNoTunnel = &shared.SourcePostgresUpdateNoTunnel{}
}
- if sourcePostgresUpdateSSHTunnelMethodNoTunnel != nil {
+ if sourcePostgresUpdateNoTunnel != nil {
tunnelMethod = &shared.SourcePostgresUpdateSSHTunnelMethod{
- SourcePostgresUpdateSSHTunnelMethodNoTunnel: sourcePostgresUpdateSSHTunnelMethodNoTunnel,
+ SourcePostgresUpdateNoTunnel: sourcePostgresUpdateNoTunnel,
}
}
- var sourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication *shared.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication
- if r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- sshKey := r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication.SSHKey.ValueString()
- tunnelHost := r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelHost.ValueString()
- tunnelMethod2 := shared.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelMethod.ValueString())
- tunnelPort := r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelPort.ValueInt64()
- tunnelUser := r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication.TunnelUser.ValueString()
- sourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication = &shared.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication{
- SSHKey: sshKey,
- TunnelHost: tunnelHost,
- TunnelMethod: tunnelMethod2,
- TunnelPort: tunnelPort,
- TunnelUser: tunnelUser,
+ var sourcePostgresUpdateSSHKeyAuthentication *shared.SourcePostgresUpdateSSHKeyAuthentication
+ if r.Configuration.TunnelMethod.SSHKeyAuthentication != nil {
+ sshKey := r.Configuration.TunnelMethod.SSHKeyAuthentication.SSHKey.ValueString()
+ tunnelHost := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelHost.ValueString()
+ tunnelPort := new(int64)
+ if !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.IsNull() {
+ *tunnelPort = r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort = nil
+ }
+ tunnelUser := r.Configuration.TunnelMethod.SSHKeyAuthentication.TunnelUser.ValueString()
+ sourcePostgresUpdateSSHKeyAuthentication = &shared.SourcePostgresUpdateSSHKeyAuthentication{
+ SSHKey: sshKey,
+ TunnelHost: tunnelHost,
+ TunnelPort: tunnelPort,
+ TunnelUser: tunnelUser,
}
}
- if sourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
+ if sourcePostgresUpdateSSHKeyAuthentication != nil {
tunnelMethod = &shared.SourcePostgresUpdateSSHTunnelMethod{
- SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication: sourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication,
+ SourcePostgresUpdateSSHKeyAuthentication: sourcePostgresUpdateSSHKeyAuthentication,
}
}
- var sourcePostgresUpdateSSHTunnelMethodPasswordAuthentication *shared.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication
- if r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication != nil {
- tunnelHost1 := r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelHost.ValueString()
- tunnelMethod3 := shared.SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelMethod.ValueString())
- tunnelPort1 := r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelPort.ValueInt64()
- tunnelUser1 := r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelUser.ValueString()
- tunnelUserPassword := r.Configuration.TunnelMethod.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication.TunnelUserPassword.ValueString()
- sourcePostgresUpdateSSHTunnelMethodPasswordAuthentication = &shared.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication{
+ var sourcePostgresUpdatePasswordAuthentication *shared.SourcePostgresUpdatePasswordAuthentication
+ if r.Configuration.TunnelMethod.PasswordAuthentication != nil {
+ tunnelHost1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelHost.ValueString()
+ tunnelPort1 := new(int64)
+ if !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsUnknown() && !r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.IsNull() {
+ *tunnelPort1 = r.Configuration.TunnelMethod.PasswordAuthentication.TunnelPort.ValueInt64()
+ } else {
+ tunnelPort1 = nil
+ }
+ tunnelUser1 := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUser.ValueString()
+ tunnelUserPassword := r.Configuration.TunnelMethod.PasswordAuthentication.TunnelUserPassword.ValueString()
+ sourcePostgresUpdatePasswordAuthentication = &shared.SourcePostgresUpdatePasswordAuthentication{
TunnelHost: tunnelHost1,
- TunnelMethod: tunnelMethod3,
TunnelPort: tunnelPort1,
TunnelUser: tunnelUser1,
TunnelUserPassword: tunnelUserPassword,
}
}
- if sourcePostgresUpdateSSHTunnelMethodPasswordAuthentication != nil {
+ if sourcePostgresUpdatePasswordAuthentication != nil {
tunnelMethod = &shared.SourcePostgresUpdateSSHTunnelMethod{
- SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication: sourcePostgresUpdateSSHTunnelMethodPasswordAuthentication,
+ SourcePostgresUpdatePasswordAuthentication: sourcePostgresUpdatePasswordAuthentication,
}
}
}
diff --git a/internal/provider/source_posthog_data_source.go b/internal/provider/source_posthog_data_source.go
old mode 100755
new mode 100644
index 8a2130162..df3f2f1cc
--- a/internal/provider/source_posthog_data_source.go
+++ b/internal/provider/source_posthog_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourcePosthogDataSource struct {
// SourcePosthogDataSourceModel describes the data model.
type SourcePosthogDataSourceModel struct {
- Configuration SourcePosthog `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,49 +47,20 @@ func (r *SourcePosthogDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourcePosthog DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key. See the docs for information on how to generate this key.`,
- },
- "base_url": schema.StringAttribute{
- Computed: true,
- Description: `Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).`,
- },
- "events_time_step": schema.Int64Attribute{
- Computed: true,
- Description: `Set lower value in case of failing long running sync of events stream.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "posthog",
- ),
- },
- Description: `must be one of ["posthog"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate the data. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_posthog_data_source_sdk.go b/internal/provider/source_posthog_data_source_sdk.go
old mode 100755
new mode 100644
index 3b4b2eb08..686b78a41
--- a/internal/provider/source_posthog_data_source_sdk.go
+++ b/internal/provider/source_posthog_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePosthogDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_posthog_resource.go b/internal/provider/source_posthog_resource.go
old mode 100755
new mode 100644
index 7e7f785d3..e7e411c06
--- a/internal/provider/source_posthog_resource.go
+++ b/internal/provider/source_posthog_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourcePosthogResource struct {
// SourcePosthogResourceModel describes the resource data model.
type SourcePosthogResourceModel struct {
Configuration SourcePosthog `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,41 +58,46 @@ func (r *SourcePosthogResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key. See the docs for information on how to generate this key.`,
},
"base_url": schema.StringAttribute{
- Optional: true,
- Description: `Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).`,
+ Optional: true,
+ MarkdownDescription: `Default: "https://app.posthog.com"` + "\n" +
+ `Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).`,
},
"events_time_step": schema.Int64Attribute{
- Optional: true,
- Description: `Set lower value in case of failing long running sync of events stream.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "posthog",
- ),
- },
- Description: `must be one of ["posthog"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 30` + "\n" +
+ `Set lower value in case of failing long running sync of events stream.`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date from which you'd like to replicate the data. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate the data. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +161,7 @@ func (r *SourcePosthogResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePosthog(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +337,5 @@ func (r *SourcePosthogResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourcePosthogResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_posthog_resource_sdk.go b/internal/provider/source_posthog_resource_sdk.go
old mode 100755
new mode 100644
index 9570e4d14..3a62f95d1
--- a/internal/provider/source_posthog_resource_sdk.go
+++ b/internal/provider/source_posthog_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -22,15 +22,19 @@ func (r *SourcePosthogResourceModel) ToCreateSDKType() *shared.SourcePosthogCrea
} else {
eventsTimeStep = nil
}
- sourceType := shared.SourcePosthogPosthog(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourcePosthog{
APIKey: apiKey,
BaseURL: baseURL,
EventsTimeStep: eventsTimeStep,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -41,6 +45,7 @@ func (r *SourcePosthogResourceModel) ToCreateSDKType() *shared.SourcePosthogCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePosthogCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_postmarkapp_data_source.go b/internal/provider/source_postmarkapp_data_source.go
old mode 100755
new mode 100644
index da92392fe..d4585384f
--- a/internal/provider/source_postmarkapp_data_source.go
+++ b/internal/provider/source_postmarkapp_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourcePostmarkappDataSource struct {
// SourcePostmarkappDataSourceModel describes the data model.
type SourcePostmarkappDataSourceModel struct {
- Configuration SourcePostmarkapp `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourcePostmarkappDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourcePostmarkapp DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "postmarkapp",
- ),
- },
- Description: `must be one of ["postmarkapp"]`,
- },
- "x_postmark_account_token": schema.StringAttribute{
- Computed: true,
- Description: `API Key for account`,
- },
- "x_postmark_server_token": schema.StringAttribute{
- Computed: true,
- Description: `API Key for server`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_postmarkapp_data_source_sdk.go b/internal/provider/source_postmarkapp_data_source_sdk.go
old mode 100755
new mode 100644
index c328c51a6..759d9c9fc
--- a/internal/provider/source_postmarkapp_data_source_sdk.go
+++ b/internal/provider/source_postmarkapp_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePostmarkappDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_postmarkapp_resource.go b/internal/provider/source_postmarkapp_resource.go
old mode 100755
new mode 100644
index 52a14ef69..1657c6646
--- a/internal/provider/source_postmarkapp_resource.go
+++ b/internal/provider/source_postmarkapp_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourcePostmarkappResource struct {
// SourcePostmarkappResourceModel describes the resource data model.
type SourcePostmarkappResourceModel struct {
Configuration SourcePostmarkapp `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -54,15 +54,6 @@ func (r *SourcePostmarkappResource) Schema(ctx context.Context, req resource.Sch
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "postmarkapp",
- ),
- },
- Description: `must be one of ["postmarkapp"]`,
- },
"x_postmark_account_token": schema.StringAttribute{
Required: true,
Description: `API Key for account`,
@@ -73,13 +64,24 @@ func (r *SourcePostmarkappResource) Schema(ctx context.Context, req resource.Sch
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +145,7 @@ func (r *SourcePostmarkappResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePostmarkapp(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +321,5 @@ func (r *SourcePostmarkappResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourcePostmarkappResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_postmarkapp_resource_sdk.go b/internal/provider/source_postmarkapp_resource_sdk.go
old mode 100755
new mode 100644
index e7de42c82..c6ad73cb1
--- a/internal/provider/source_postmarkapp_resource_sdk.go
+++ b/internal/provider/source_postmarkapp_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePostmarkappResourceModel) ToCreateSDKType() *shared.SourcePostmarkappCreateRequest {
xPostmarkAccountToken := r.Configuration.XPostmarkAccountToken.ValueString()
xPostmarkServerToken := r.Configuration.XPostmarkServerToken.ValueString()
- sourceType := shared.SourcePostmarkappPostmarkapp(r.Configuration.SourceType.ValueString())
configuration := shared.SourcePostmarkapp{
XPostmarkAccountToken: xPostmarkAccountToken,
XPostmarkServerToken: xPostmarkServerToken,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourcePostmarkappResourceModel) ToCreateSDKType() *shared.SourcePostmar
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePostmarkappCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_prestashop_data_source.go b/internal/provider/source_prestashop_data_source.go
old mode 100755
new mode 100644
index 452100f85..c3e2532cd
--- a/internal/provider/source_prestashop_data_source.go
+++ b/internal/provider/source_prestashop_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourcePrestashopDataSource struct {
// SourcePrestashopDataSourceModel describes the data model.
type SourcePrestashopDataSourceModel struct {
- Configuration SourcePrestashop `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,45 +47,20 @@ func (r *SourcePrestashopDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourcePrestashop DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_key": schema.StringAttribute{
- Computed: true,
- Description: `Your PrestaShop access key. See the docs for info on how to obtain this.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prestashop",
- ),
- },
- Description: `must be one of ["prestashop"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The Start date in the format YYYY-MM-DD.`,
- },
- "url": schema.StringAttribute{
- Computed: true,
- Description: `Shop URL without trailing slash.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_prestashop_data_source_sdk.go b/internal/provider/source_prestashop_data_source_sdk.go
old mode 100755
new mode 100644
index 68f083566..214ce92bb
--- a/internal/provider/source_prestashop_data_source_sdk.go
+++ b/internal/provider/source_prestashop_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePrestashopDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_prestashop_resource.go b/internal/provider/source_prestashop_resource.go
old mode 100755
new mode 100644
index 51750ca65..a3f67ed4e
--- a/internal/provider/source_prestashop_resource.go
+++ b/internal/provider/source_prestashop_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourcePrestashopResource struct {
// SourcePrestashopResourceModel describes the resource data model.
type SourcePrestashopResourceModel struct {
Configuration SourcePrestashop `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,23 +58,15 @@ func (r *SourcePrestashopResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"access_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your PrestaShop access key. See the docs for info on how to obtain this.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "prestashop",
- ),
- },
- Description: `must be one of ["prestashop"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The Start date in the format YYYY-MM-DD.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The Start date in the format YYYY-MM-DD.`,
},
"url": schema.StringAttribute{
Required: true,
@@ -81,13 +74,24 @@ func (r *SourcePrestashopResource) Schema(ctx context.Context, req resource.Sche
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -151,7 +155,7 @@ func (r *SourcePrestashopResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePrestashop(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -327,5 +331,5 @@ func (r *SourcePrestashopResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourcePrestashopResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_prestashop_resource_sdk.go b/internal/provider/source_prestashop_resource_sdk.go
old mode 100755
new mode 100644
index 2665f23eb..4a0ae5de1
--- a/internal/provider/source_prestashop_resource_sdk.go
+++ b/internal/provider/source_prestashop_resource_sdk.go
@@ -3,21 +3,25 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePrestashopResourceModel) ToCreateSDKType() *shared.SourcePrestashopCreateRequest {
accessKey := r.Configuration.AccessKey.ValueString()
- sourceType := shared.SourcePrestashopPrestashop(r.Configuration.SourceType.ValueString())
startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
url := r.Configuration.URL.ValueString()
configuration := shared.SourcePrestashop{
- AccessKey: accessKey,
- SourceType: sourceType,
- StartDate: startDate,
- URL: url,
+ AccessKey: accessKey,
+ StartDate: startDate,
+ URL: url,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -29,6 +33,7 @@ func (r *SourcePrestashopResourceModel) ToCreateSDKType() *shared.SourcePrestash
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePrestashopCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_punkapi_data_source.go b/internal/provider/source_punkapi_data_source.go
old mode 100755
new mode 100644
index cc68f0888..554f50fc9
--- a/internal/provider/source_punkapi_data_source.go
+++ b/internal/provider/source_punkapi_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourcePunkAPIDataSource struct {
// SourcePunkAPIDataSourceModel describes the data model.
type SourcePunkAPIDataSourceModel struct {
- Configuration SourcePunkAPI `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourcePunkAPIDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourcePunkAPI DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "brewed_after": schema.StringAttribute{
- Computed: true,
- Description: `To extract specific data with Unique ID`,
- },
- "brewed_before": schema.StringAttribute{
- Computed: true,
- Description: `To extract specific data with Unique ID`,
- },
- "id": schema.StringAttribute{
- Computed: true,
- Description: `To extract specific data with Unique ID`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "punk-api",
- ),
- },
- Description: `must be one of ["punk-api"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_punkapi_data_source_sdk.go b/internal/provider/source_punkapi_data_source_sdk.go
old mode 100755
new mode 100644
index 949c91cee..3d6ab038f
--- a/internal/provider/source_punkapi_data_source_sdk.go
+++ b/internal/provider/source_punkapi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePunkAPIDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_punkapi_resource.go b/internal/provider/source_punkapi_resource.go
old mode 100755
new mode 100644
index 31cd48560..31613d14b
--- a/internal/provider/source_punkapi_resource.go
+++ b/internal/provider/source_punkapi_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourcePunkAPIResource struct {
// SourcePunkAPIResourceModel describes the resource data model.
type SourcePunkAPIResourceModel struct {
Configuration SourcePunkAPI `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -66,24 +66,26 @@ func (r *SourcePunkAPIResource) Schema(ctx context.Context, req resource.SchemaR
Optional: true,
Description: `To extract specific data with Unique ID`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "punk-api",
- ),
- },
- Description: `must be one of ["punk-api"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +149,7 @@ func (r *SourcePunkAPIResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePunkAPI(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +325,5 @@ func (r *SourcePunkAPIResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourcePunkAPIResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_punkapi_resource_sdk.go b/internal/provider/source_punkapi_resource_sdk.go
old mode 100755
new mode 100644
index 2ed96e868..32c8183ee
--- a/internal/provider/source_punkapi_resource_sdk.go
+++ b/internal/provider/source_punkapi_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -16,12 +16,16 @@ func (r *SourcePunkAPIResourceModel) ToCreateSDKType() *shared.SourcePunkAPICrea
} else {
id = nil
}
- sourceType := shared.SourcePunkAPIPunkAPI(r.Configuration.SourceType.ValueString())
configuration := shared.SourcePunkAPI{
BrewedAfter: brewedAfter,
BrewedBefore: brewedBefore,
ID: id,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -33,6 +37,7 @@ func (r *SourcePunkAPIResourceModel) ToCreateSDKType() *shared.SourcePunkAPICrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePunkAPICreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_pypi_data_source.go b/internal/provider/source_pypi_data_source.go
old mode 100755
new mode 100644
index 6458a6011..0b8931260
--- a/internal/provider/source_pypi_data_source.go
+++ b/internal/provider/source_pypi_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourcePypiDataSource struct {
// SourcePypiDataSourceModel describes the data model.
type SourcePypiDataSourceModel struct {
- Configuration SourcePypi `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,38 +47,20 @@ func (r *SourcePypiDataSource) Schema(ctx context.Context, req datasource.Schema
MarkdownDescription: "SourcePypi DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "project_name": schema.StringAttribute{
- Computed: true,
- Description: `Name of the project/package. Can only be in lowercase with hyphen. This is the name used using pip command for installing the package.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pypi",
- ),
- },
- Description: `must be one of ["pypi"]`,
- },
- "version": schema.StringAttribute{
- Computed: true,
- Description: `Version of the project/package. Use it to find a particular release instead of all releases.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_pypi_data_source_sdk.go b/internal/provider/source_pypi_data_source_sdk.go
old mode 100755
new mode 100644
index 509e6d1a5..cb857ff6a
--- a/internal/provider/source_pypi_data_source_sdk.go
+++ b/internal/provider/source_pypi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePypiDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_pypi_resource.go b/internal/provider/source_pypi_resource.go
old mode 100755
new mode 100644
index 0d1cc9ca8..ce36a5f0a
--- a/internal/provider/source_pypi_resource.go
+++ b/internal/provider/source_pypi_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourcePypiResource struct {
// SourcePypiResourceModel describes the resource data model.
type SourcePypiResourceModel struct {
Configuration SourcePypi `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,28 +58,30 @@ func (r *SourcePypiResource) Schema(ctx context.Context, req resource.SchemaRequ
Required: true,
Description: `Name of the project/package. Can only be in lowercase with hyphen. This is the name used using pip command for installing the package.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "pypi",
- ),
- },
- Description: `must be one of ["pypi"]`,
- },
"version": schema.StringAttribute{
Optional: true,
Description: `Version of the project/package. Use it to find a particular release instead of all releases.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +145,7 @@ func (r *SourcePypiResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourcePypi(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +321,5 @@ func (r *SourcePypiResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *SourcePypiResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_pypi_resource_sdk.go b/internal/provider/source_pypi_resource_sdk.go
old mode 100755
new mode 100644
index 7b1cde446..208d20c11
--- a/internal/provider/source_pypi_resource_sdk.go
+++ b/internal/provider/source_pypi_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourcePypiResourceModel) ToCreateSDKType() *shared.SourcePypiCreateRequest {
projectName := r.Configuration.ProjectName.ValueString()
- sourceType := shared.SourcePypiPypi(r.Configuration.SourceType.ValueString())
version := new(string)
if !r.Configuration.Version.IsUnknown() && !r.Configuration.Version.IsNull() {
*version = r.Configuration.Version.ValueString()
@@ -18,9 +17,14 @@ func (r *SourcePypiResourceModel) ToCreateSDKType() *shared.SourcePypiCreateRequ
}
configuration := shared.SourcePypi{
ProjectName: projectName,
- SourceType: sourceType,
Version: version,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -31,6 +35,7 @@ func (r *SourcePypiResourceModel) ToCreateSDKType() *shared.SourcePypiCreateRequ
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourcePypiCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_qualaroo_data_source.go b/internal/provider/source_qualaroo_data_source.go
old mode 100755
new mode 100644
index 8eb9adaca..f0d3ef642
--- a/internal/provider/source_qualaroo_data_source.go
+++ b/internal/provider/source_qualaroo_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceQualarooDataSource struct {
// SourceQualarooDataSourceModel describes the data model.
type SourceQualarooDataSourceModel struct {
- Configuration SourceQualaroo `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,47 +47,20 @@ func (r *SourceQualarooDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceQualaroo DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "key": schema.StringAttribute{
- Computed: true,
- Description: `A Qualaroo token. See the docs for instructions on how to generate it.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "qualaroo",
- ),
- },
- Description: `must be one of ["qualaroo"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- "survey_ids": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `IDs of the surveys from which you'd like to replicate data. If left empty, data from all surveys to which you have access will be replicated.`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `A Qualaroo token. See the docs for instructions on how to generate it.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_qualaroo_data_source_sdk.go b/internal/provider/source_qualaroo_data_source_sdk.go
old mode 100755
new mode 100644
index 4b4bc22d4..3a0ab45a3
--- a/internal/provider/source_qualaroo_data_source_sdk.go
+++ b/internal/provider/source_qualaroo_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceQualarooDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_qualaroo_resource.go b/internal/provider/source_qualaroo_resource.go
old mode 100755
new mode 100644
index e8d992c69..1c6cbeac3
--- a/internal/provider/source_qualaroo_resource.go
+++ b/internal/provider/source_qualaroo_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceQualarooResource struct {
// SourceQualarooResourceModel describes the resource data model.
type SourceQualarooResourceModel struct {
Configuration SourceQualaroo `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,17 +56,9 @@ func (r *SourceQualarooResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `A Qualaroo token. See the docs for instructions on how to generate it.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "qualaroo",
- ),
- },
- Description: `must be one of ["qualaroo"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
@@ -78,17 +70,29 @@ func (r *SourceQualarooResource) Schema(ctx context.Context, req resource.Schema
},
"token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `A Qualaroo token. See the docs for instructions on how to generate it.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -152,7 +156,7 @@ func (r *SourceQualarooResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceQualaroo(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -328,5 +332,5 @@ func (r *SourceQualarooResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceQualarooResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_qualaroo_resource_sdk.go b/internal/provider/source_qualaroo_resource_sdk.go
old mode 100755
new mode 100644
index b0c19cbd3..78f544315
--- a/internal/provider/source_qualaroo_resource_sdk.go
+++ b/internal/provider/source_qualaroo_resource_sdk.go
@@ -3,13 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceQualarooResourceModel) ToCreateSDKType() *shared.SourceQualarooCreateRequest {
key := r.Configuration.Key.ValueString()
- sourceType := shared.SourceQualarooQualaroo(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
var surveyIds []string = nil
for _, surveyIdsItem := range r.Configuration.SurveyIds {
@@ -17,11 +16,16 @@ func (r *SourceQualarooResourceModel) ToCreateSDKType() *shared.SourceQualarooCr
}
token := r.Configuration.Token.ValueString()
configuration := shared.SourceQualaroo{
- Key: key,
- SourceType: sourceType,
- StartDate: startDate,
- SurveyIds: surveyIds,
- Token: token,
+ Key: key,
+ StartDate: startDate,
+ SurveyIds: surveyIds,
+ Token: token,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -33,6 +37,7 @@ func (r *SourceQualarooResourceModel) ToCreateSDKType() *shared.SourceQualarooCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceQualarooCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_quickbooks_data_source.go b/internal/provider/source_quickbooks_data_source.go
old mode 100755
new mode 100644
index da5c49976..f2b59f5b2
--- a/internal/provider/source_quickbooks_data_source.go
+++ b/internal/provider/source_quickbooks_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceQuickbooksDataSource struct {
// SourceQuickbooksDataSourceModel describes the data model.
type SourceQuickbooksDataSourceModel struct {
- Configuration SourceQuickbooks `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,131 +47,20 @@ func (r *SourceQuickbooksDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceQuickbooks DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_quickbooks_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access token fot making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: ` Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.`,
- },
- "realm_id": schema.StringAttribute{
- Computed: true,
- Description: `Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `A token used when refreshing the access token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_quickbooks_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access token fot making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: ` Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.`,
- },
- "realm_id": schema.StringAttribute{
- Computed: true,
- Description: `Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `A token used when refreshing the access token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "sandbox": schema.BoolAttribute{
- Computed: true,
- Description: `Determines whether to use the sandbox or production environment.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "quickbooks",
- ),
- },
- Description: `must be one of ["quickbooks"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_quickbooks_data_source_sdk.go b/internal/provider/source_quickbooks_data_source_sdk.go
old mode 100755
new mode 100644
index cc933eea7..69d8b0f3a
--- a/internal/provider/source_quickbooks_data_source_sdk.go
+++ b/internal/provider/source_quickbooks_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceQuickbooksDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_quickbooks_resource.go b/internal/provider/source_quickbooks_resource.go
old mode 100755
new mode 100644
index b4f67374b..3d3f44004
--- a/internal/provider/source_quickbooks_resource.go
+++ b/internal/provider/source_quickbooks_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceQuickbooksResource struct {
// SourceQuickbooksResourceModel describes the resource data model.
type SourceQuickbooksResourceModel struct {
Configuration SourceQuickbooks `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,22 +59,14 @@ func (r *SourceQuickbooksResource) Schema(ctx context.Context, req resource.Sche
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_quickbooks_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access token fot making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.`,
@@ -88,55 +81,16 @@ func (r *SourceQuickbooksResource) Schema(ctx context.Context, req resource.Sche
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `A token used when refreshing the access token.`,
},
"token_expiry_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_quickbooks_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
Required: true,
- Description: `Access token fot making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: ` Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.`,
- },
- "realm_id": schema.StringAttribute{
- Required: true,
- Description: `Labeled Company ID. The Make API Calls panel is populated with the realm id and the current access token.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `A token used when refreshing the access token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Required: true,
+ Sensitive: true,
+ Description: `The date-time when the access token should be refreshed.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date-time when the access token should be refreshed.`,
},
},
},
@@ -146,34 +100,37 @@ func (r *SourceQuickbooksResource) Schema(ctx context.Context, req resource.Sche
},
},
"sandbox": schema.BoolAttribute{
- Required: true,
- Description: `Determines whether to use the sandbox or production environment.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "quickbooks",
- ),
- },
- Description: `must be one of ["quickbooks"]`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Determines whether to use the sandbox or production environment.`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -237,7 +194,7 @@ func (r *SourceQuickbooksResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceQuickbooks(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -413,5 +370,5 @@ func (r *SourceQuickbooksResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceQuickbooksResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_quickbooks_resource_sdk.go b/internal/provider/source_quickbooks_resource_sdk.go
old mode 100755
new mode 100644
index 1bc2ad89d..708f94d1e
--- a/internal/provider/source_quickbooks_resource_sdk.go
+++ b/internal/provider/source_quickbooks_resource_sdk.go
@@ -3,30 +3,23 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceQuickbooksResourceModel) ToCreateSDKType() *shared.SourceQuickbooksCreateRequest {
var credentials shared.SourceQuickbooksAuthorizationMethod
- var sourceQuickbooksAuthorizationMethodOAuth20 *shared.SourceQuickbooksAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceQuickbooksAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceQuickbooksAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20.ClientSecret.ValueString()
- realmID := r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20.RealmID.ValueString()
- refreshToken := r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceQuickbooksAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceQuickbooksAuthorizationMethodOAuth20 = &shared.SourceQuickbooksAuthorizationMethodOAuth20{
+ var sourceQuickbooksOAuth20 *shared.SourceQuickbooksOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ realmID := r.Configuration.Credentials.OAuth20.RealmID.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceQuickbooksOAuth20 = &shared.SourceQuickbooksOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RealmID: realmID,
@@ -34,20 +27,29 @@ func (r *SourceQuickbooksResourceModel) ToCreateSDKType() *shared.SourceQuickboo
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceQuickbooksAuthorizationMethodOAuth20 != nil {
+ if sourceQuickbooksOAuth20 != nil {
credentials = shared.SourceQuickbooksAuthorizationMethod{
- SourceQuickbooksAuthorizationMethodOAuth20: sourceQuickbooksAuthorizationMethodOAuth20,
+ SourceQuickbooksOAuth20: sourceQuickbooksOAuth20,
}
}
- sandbox := r.Configuration.Sandbox.ValueBool()
- sourceType := shared.SourceQuickbooksQuickbooks(r.Configuration.SourceType.ValueString())
+ sandbox := new(bool)
+ if !r.Configuration.Sandbox.IsUnknown() && !r.Configuration.Sandbox.IsNull() {
+ *sandbox = r.Configuration.Sandbox.ValueBool()
+ } else {
+ sandbox = nil
+ }
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceQuickbooks{
Credentials: credentials,
Sandbox: sandbox,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -58,6 +60,7 @@ func (r *SourceQuickbooksResourceModel) ToCreateSDKType() *shared.SourceQuickboo
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceQuickbooksCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -72,23 +75,16 @@ func (r *SourceQuickbooksResourceModel) ToGetSDKType() *shared.SourceQuickbooksC
func (r *SourceQuickbooksResourceModel) ToUpdateSDKType() *shared.SourceQuickbooksPutRequest {
var credentials shared.SourceQuickbooksUpdateAuthorizationMethod
- var sourceQuickbooksUpdateAuthorizationMethodOAuth20 *shared.SourceQuickbooksUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
- realmID := r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20.RealmID.ValueString()
- refreshToken := r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceQuickbooksUpdateAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceQuickbooksUpdateAuthorizationMethodOAuth20 = &shared.SourceQuickbooksUpdateAuthorizationMethodOAuth20{
+ var sourceQuickbooksUpdateOAuth20 *shared.SourceQuickbooksUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ realmID := r.Configuration.Credentials.OAuth20.RealmID.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceQuickbooksUpdateOAuth20 = &shared.SourceQuickbooksUpdateOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RealmID: realmID,
@@ -96,12 +92,17 @@ func (r *SourceQuickbooksResourceModel) ToUpdateSDKType() *shared.SourceQuickboo
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceQuickbooksUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceQuickbooksUpdateOAuth20 != nil {
credentials = shared.SourceQuickbooksUpdateAuthorizationMethod{
- SourceQuickbooksUpdateAuthorizationMethodOAuth20: sourceQuickbooksUpdateAuthorizationMethodOAuth20,
+ SourceQuickbooksUpdateOAuth20: sourceQuickbooksUpdateOAuth20,
}
}
- sandbox := r.Configuration.Sandbox.ValueBool()
+ sandbox := new(bool)
+ if !r.Configuration.Sandbox.IsUnknown() && !r.Configuration.Sandbox.IsNull() {
+ *sandbox = r.Configuration.Sandbox.ValueBool()
+ } else {
+ sandbox = nil
+ }
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceQuickbooksUpdate{
Credentials: credentials,
diff --git a/internal/provider/source_railz_data_source.go b/internal/provider/source_railz_data_source.go
old mode 100755
new mode 100644
index e46847d36..de9fa6cea
--- a/internal/provider/source_railz_data_source.go
+++ b/internal/provider/source_railz_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceRailzDataSource struct {
// SourceRailzDataSourceModel describes the data model.
type SourceRailzDataSourceModel struct {
- Configuration SourceRailz `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,42 +47,20 @@ func (r *SourceRailzDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceRailz DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Client ID (client_id)`,
- },
- "secret_key": schema.StringAttribute{
- Computed: true,
- Description: `Secret key (secret_key)`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "railz",
- ),
- },
- Description: `must be one of ["railz"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `Start date`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_railz_data_source_sdk.go b/internal/provider/source_railz_data_source_sdk.go
old mode 100755
new mode 100644
index d9d0fb401..b767f90b4
--- a/internal/provider/source_railz_data_source_sdk.go
+++ b/internal/provider/source_railz_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRailzDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_railz_resource.go b/internal/provider/source_railz_resource.go
old mode 100755
new mode 100644
index 5386c032c..a9385a8de
--- a/internal/provider/source_railz_resource.go
+++ b/internal/provider/source_railz_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceRailzResource struct {
// SourceRailzResourceModel describes the resource data model.
type SourceRailzResourceModel struct {
Configuration SourceRailz `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -60,30 +60,33 @@ func (r *SourceRailzResource) Schema(ctx context.Context, req resource.SchemaReq
},
"secret_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Secret key (secret_key)`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "railz",
- ),
- },
- Description: `must be one of ["railz"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `Start date`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +150,7 @@ func (r *SourceRailzResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceRailz(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +326,5 @@ func (r *SourceRailzResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceRailzResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_railz_resource_sdk.go b/internal/provider/source_railz_resource_sdk.go
old mode 100755
new mode 100644
index 689001b08..c283e5fae
--- a/internal/provider/source_railz_resource_sdk.go
+++ b/internal/provider/source_railz_resource_sdk.go
@@ -3,20 +3,24 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRailzResourceModel) ToCreateSDKType() *shared.SourceRailzCreateRequest {
clientID := r.Configuration.ClientID.ValueString()
secretKey := r.Configuration.SecretKey.ValueString()
- sourceType := shared.SourceRailzRailz(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceRailz{
- ClientID: clientID,
- SecretKey: secretKey,
- SourceType: sourceType,
- StartDate: startDate,
+ ClientID: clientID,
+ SecretKey: secretKey,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -28,6 +32,7 @@ func (r *SourceRailzResourceModel) ToCreateSDKType() *shared.SourceRailzCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceRailzCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_recharge_data_source.go b/internal/provider/source_recharge_data_source.go
old mode 100755
new mode 100644
index 7899cbb40..f9215bd32
--- a/internal/provider/source_recharge_data_source.go
+++ b/internal/provider/source_recharge_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceRechargeDataSource struct {
// SourceRechargeDataSourceModel describes the data model.
type SourceRechargeDataSourceModel struct {
- Configuration SourceRecharge `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,41 +47,20 @@ func (r *SourceRechargeDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceRecharge DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The value of the Access Token generated. See the docs for more information.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "recharge",
- ),
- },
- Description: `must be one of ["recharge"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_recharge_data_source_sdk.go b/internal/provider/source_recharge_data_source_sdk.go
old mode 100755
new mode 100644
index 61182bfb4..46632d8d1
--- a/internal/provider/source_recharge_data_source_sdk.go
+++ b/internal/provider/source_recharge_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRechargeDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_recharge_resource.go b/internal/provider/source_recharge_resource.go
old mode 100755
new mode 100644
index c7cae7815..0f5cb093c
--- a/internal/provider/source_recharge_resource.go
+++ b/internal/provider/source_recharge_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceRechargeResource struct {
// SourceRechargeResourceModel describes the resource data model.
type SourceRechargeResourceModel struct {
Configuration SourceRecharge `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,33 +58,36 @@ func (r *SourceRechargeResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The value of the Access Token generated. See the docs for more information.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "recharge",
- ),
- },
- Description: `must be one of ["recharge"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +151,7 @@ func (r *SourceRechargeResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceRecharge(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +327,5 @@ func (r *SourceRechargeResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceRechargeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_recharge_resource_sdk.go b/internal/provider/source_recharge_resource_sdk.go
old mode 100755
new mode 100644
index b598e5ac0..9deca2667
--- a/internal/provider/source_recharge_resource_sdk.go
+++ b/internal/provider/source_recharge_resource_sdk.go
@@ -3,20 +3,24 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceRechargeResourceModel) ToCreateSDKType() *shared.SourceRechargeCreateRequest {
accessToken := r.Configuration.AccessToken.ValueString()
- sourceType := shared.SourceRechargeRecharge(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceRecharge{
AccessToken: accessToken,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -27,6 +31,7 @@ func (r *SourceRechargeResourceModel) ToCreateSDKType() *shared.SourceRechargeCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceRechargeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_recreation_data_source.go b/internal/provider/source_recreation_data_source.go
old mode 100755
new mode 100644
index 29b9b095c..fb61eb44e
--- a/internal/provider/source_recreation_data_source.go
+++ b/internal/provider/source_recreation_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceRecreationDataSource struct {
// SourceRecreationDataSourceModel describes the data model.
type SourceRecreationDataSourceModel struct {
- Configuration SourceRecreation `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,37 +47,20 @@ func (r *SourceRecreationDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceRecreation DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "apikey": schema.StringAttribute{
- Computed: true,
- Description: `API Key`,
- },
- "query_campsites": schema.StringAttribute{
- Computed: true,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "recreation",
- ),
- },
- Description: `must be one of ["recreation"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_recreation_data_source_sdk.go b/internal/provider/source_recreation_data_source_sdk.go
old mode 100755
new mode 100644
index f2cdbdd8a..dc3d823d1
--- a/internal/provider/source_recreation_data_source_sdk.go
+++ b/internal/provider/source_recreation_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRecreationDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_recreation_resource.go b/internal/provider/source_recreation_resource.go
old mode 100755
new mode 100644
index 0f2aeade9..b79eed53a
--- a/internal/provider/source_recreation_resource.go
+++ b/internal/provider/source_recreation_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceRecreationResource struct {
// SourceRecreationResourceModel describes the resource data model.
type SourceRecreationResourceModel struct {
Configuration SourceRecreation `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,29 +56,32 @@ func (r *SourceRecreationResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"apikey": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key`,
},
"query_campsites": schema.StringAttribute{
Optional: true,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "recreation",
- ),
- },
- Description: `must be one of ["recreation"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -142,7 +145,7 @@ func (r *SourceRecreationResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceRecreation(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -318,5 +321,5 @@ func (r *SourceRecreationResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceRecreationResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_recreation_resource_sdk.go b/internal/provider/source_recreation_resource_sdk.go
old mode 100755
new mode 100644
index 861cd563e..e4e323159
--- a/internal/provider/source_recreation_resource_sdk.go
+++ b/internal/provider/source_recreation_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -15,11 +15,15 @@ func (r *SourceRecreationResourceModel) ToCreateSDKType() *shared.SourceRecreati
} else {
queryCampsites = nil
}
- sourceType := shared.SourceRecreationRecreation(r.Configuration.SourceType.ValueString())
configuration := shared.SourceRecreation{
Apikey: apikey,
QueryCampsites: queryCampsites,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -31,6 +35,7 @@ func (r *SourceRecreationResourceModel) ToCreateSDKType() *shared.SourceRecreati
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceRecreationCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_recruitee_data_source.go b/internal/provider/source_recruitee_data_source.go
old mode 100755
new mode 100644
index a4cff712d..9c29777e4
--- a/internal/provider/source_recruitee_data_source.go
+++ b/internal/provider/source_recruitee_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceRecruiteeDataSource struct {
// SourceRecruiteeDataSourceModel describes the data model.
type SourceRecruiteeDataSourceModel struct {
- Configuration SourceRecruitee `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceRecruiteeDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceRecruitee DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Recruitee API Key. See here.`,
- },
- "company_id": schema.Int64Attribute{
- Computed: true,
- Description: `Recruitee Company ID. You can also find this ID on the Recruitee API tokens page.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "recruitee",
- ),
- },
- Description: `must be one of ["recruitee"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_recruitee_data_source_sdk.go b/internal/provider/source_recruitee_data_source_sdk.go
old mode 100755
new mode 100644
index 3b0f413e6..6bce48a5b
--- a/internal/provider/source_recruitee_data_source_sdk.go
+++ b/internal/provider/source_recruitee_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRecruiteeDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_recruitee_resource.go b/internal/provider/source_recruitee_resource.go
old mode 100755
new mode 100644
index 79f470d3c..169819cb4
--- a/internal/provider/source_recruitee_resource.go
+++ b/internal/provider/source_recruitee_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceRecruiteeResource struct {
// SourceRecruiteeResourceModel describes the resource data model.
type SourceRecruiteeResourceModel struct {
Configuration SourceRecruitee `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,33 @@ func (r *SourceRecruiteeResource) Schema(ctx context.Context, req resource.Schem
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Recruitee API Key. See here.`,
},
"company_id": schema.Int64Attribute{
Required: true,
Description: `Recruitee Company ID. You can also find this ID on the Recruitee API tokens page.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "recruitee",
- ),
- },
- Description: `must be one of ["recruitee"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceRecruiteeResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceRecruitee(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceRecruiteeResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceRecruiteeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_recruitee_resource_sdk.go b/internal/provider/source_recruitee_resource_sdk.go
old mode 100755
new mode 100644
index 3b027efac..c6a1f3336
--- a/internal/provider/source_recruitee_resource_sdk.go
+++ b/internal/provider/source_recruitee_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRecruiteeResourceModel) ToCreateSDKType() *shared.SourceRecruiteeCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
companyID := r.Configuration.CompanyID.ValueInt64()
- sourceType := shared.SourceRecruiteeRecruitee(r.Configuration.SourceType.ValueString())
configuration := shared.SourceRecruitee{
- APIKey: apiKey,
- CompanyID: companyID,
- SourceType: sourceType,
+ APIKey: apiKey,
+ CompanyID: companyID,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceRecruiteeResourceModel) ToCreateSDKType() *shared.SourceRecruitee
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceRecruiteeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_recurly_data_source.go b/internal/provider/source_recurly_data_source.go
old mode 100755
new mode 100644
index 6878dfcea..4025ec8ea
--- a/internal/provider/source_recurly_data_source.go
+++ b/internal/provider/source_recurly_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceRecurlyDataSource struct {
// SourceRecurlyDataSourceModel describes the data model.
type SourceRecurlyDataSourceModel struct {
- Configuration SourceRecurly `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourceRecurlyDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceRecurly DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Recurly API Key. See the docs for more information on how to generate this key.`,
- },
- "begin_time": schema.StringAttribute{
- Computed: true,
- Description: `ISO8601 timestamp from which the replication from Recurly API will start from.`,
- },
- "end_time": schema.StringAttribute{
- Computed: true,
- Description: `ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "recurly",
- ),
- },
- Description: `must be one of ["recurly"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_recurly_data_source_sdk.go b/internal/provider/source_recurly_data_source_sdk.go
old mode 100755
new mode 100644
index 361388677..4abcf5d5e
--- a/internal/provider/source_recurly_data_source_sdk.go
+++ b/internal/provider/source_recurly_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRecurlyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_recurly_resource.go b/internal/provider/source_recurly_resource.go
old mode 100755
new mode 100644
index 9426f194c..943477c73
--- a/internal/provider/source_recurly_resource.go
+++ b/internal/provider/source_recurly_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceRecurlyResource struct {
// SourceRecurlyResourceModel describes the resource data model.
type SourceRecurlyResourceModel struct {
Configuration SourceRecurly `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +56,7 @@ func (r *SourceRecurlyResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Recurly API Key. See the docs for more information on how to generate this key.`,
},
"begin_time": schema.StringAttribute{
@@ -66,24 +67,26 @@ func (r *SourceRecurlyResource) Schema(ctx context.Context, req resource.SchemaR
Optional: true,
Description: `ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "recurly",
- ),
- },
- Description: `must be one of ["recurly"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +150,7 @@ func (r *SourceRecurlyResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceRecurly(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +326,5 @@ func (r *SourceRecurlyResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceRecurlyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_recurly_resource_sdk.go b/internal/provider/source_recurly_resource_sdk.go
old mode 100755
new mode 100644
index 2a8074376..f6ed099e4
--- a/internal/provider/source_recurly_resource_sdk.go
+++ b/internal/provider/source_recurly_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -21,12 +21,16 @@ func (r *SourceRecurlyResourceModel) ToCreateSDKType() *shared.SourceRecurlyCrea
} else {
endTime = nil
}
- sourceType := shared.SourceRecurlyRecurly(r.Configuration.SourceType.ValueString())
configuration := shared.SourceRecurly{
- APIKey: apiKey,
- BeginTime: beginTime,
- EndTime: endTime,
- SourceType: sourceType,
+ APIKey: apiKey,
+ BeginTime: beginTime,
+ EndTime: endTime,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -38,6 +42,7 @@ func (r *SourceRecurlyResourceModel) ToCreateSDKType() *shared.SourceRecurlyCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceRecurlyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_redshift_data_source.go b/internal/provider/source_redshift_data_source.go
old mode 100755
new mode 100644
index a5f7c1a9c..00d7f0d26
--- a/internal/provider/source_redshift_data_source.go
+++ b/internal/provider/source_redshift_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceRedshiftDataSource struct {
// SourceRedshiftDataSourceModel describes the data model.
type SourceRedshiftDataSourceModel struct {
- Configuration SourceRedshift `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,59 +47,20 @@ func (r *SourceRedshiftDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceRedshift DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "database": schema.StringAttribute{
- Computed: true,
- Description: `Name of the database.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com).`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Password associated with the username.`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `Port of the database.`,
- },
- "schemas": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The list of schemas to sync from. Specify one or more explicitly or keep empty to process all schemas. Schema names are case sensitive.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "redshift",
- ),
- },
- Description: `must be one of ["redshift"]`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username to use to access the database.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_redshift_data_source_sdk.go b/internal/provider/source_redshift_data_source_sdk.go
old mode 100755
new mode 100644
index 026087a37..973383688
--- a/internal/provider/source_redshift_data_source_sdk.go
+++ b/internal/provider/source_redshift_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRedshiftDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_redshift_resource.go b/internal/provider/source_redshift_resource.go
old mode 100755
new mode 100644
index cb7d315e6..57c549122
--- a/internal/provider/source_redshift_resource.go
+++ b/internal/provider/source_redshift_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceRedshiftResource struct {
// SourceRedshiftResourceModel describes the resource data model.
type SourceRedshiftResourceModel struct {
Configuration SourceRedshift `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -68,39 +68,43 @@ func (r *SourceRedshiftResource) Schema(ctx context.Context, req resource.Schema
},
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Password associated with the username.`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `Port of the database.`,
+ Optional: true,
+ MarkdownDescription: `Default: 5439` + "\n" +
+ `Port of the database.`,
},
"schemas": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
Description: `The list of schemas to sync from. Specify one or more explicitly or keep empty to process all schemas. Schema names are case sensitive.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "redshift",
- ),
- },
- Description: `must be one of ["redshift"]`,
- },
"username": schema.StringAttribute{
Required: true,
Description: `Username to use to access the database.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -164,7 +168,7 @@ func (r *SourceRedshiftResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceRedshift(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -340,5 +344,5 @@ func (r *SourceRedshiftResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceRedshiftResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_redshift_resource_sdk.go b/internal/provider/source_redshift_resource_sdk.go
old mode 100755
new mode 100644
index c3a57e86a..e073abee9
--- a/internal/provider/source_redshift_resource_sdk.go
+++ b/internal/provider/source_redshift_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -17,12 +17,16 @@ func (r *SourceRedshiftResourceModel) ToCreateSDKType() *shared.SourceRedshiftCr
jdbcURLParams = nil
}
password := r.Configuration.Password.ValueString()
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var schemas []string = nil
for _, schemasItem := range r.Configuration.Schemas {
schemas = append(schemas, schemasItem.ValueString())
}
- sourceType := shared.SourceRedshiftRedshift(r.Configuration.SourceType.ValueString())
username := r.Configuration.Username.ValueString()
configuration := shared.SourceRedshift{
Database: database,
@@ -31,9 +35,14 @@ func (r *SourceRedshiftResourceModel) ToCreateSDKType() *shared.SourceRedshiftCr
Password: password,
Port: port,
Schemas: schemas,
- SourceType: sourceType,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -44,6 +53,7 @@ func (r *SourceRedshiftResourceModel) ToCreateSDKType() *shared.SourceRedshiftCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceRedshiftCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -66,7 +76,12 @@ func (r *SourceRedshiftResourceModel) ToUpdateSDKType() *shared.SourceRedshiftPu
jdbcURLParams = nil
}
password := r.Configuration.Password.ValueString()
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
var schemas []string = nil
for _, schemasItem := range r.Configuration.Schemas {
schemas = append(schemas, schemasItem.ValueString())
diff --git a/internal/provider/source_retently_data_source.go b/internal/provider/source_retently_data_source.go
old mode 100755
new mode 100644
index 5fe191cc4..7910a6233
--- a/internal/provider/source_retently_data_source.go
+++ b/internal/provider/source_retently_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceRetentlyDataSource struct {
// SourceRetentlyDataSourceModel describes the data model.
type SourceRetentlyDataSourceModel struct {
- Configuration SourceRetently1 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,159 +47,20 @@ func (r *SourceRetentlyDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceRetently DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_retently_authentication_mechanism_authenticate_via_retently_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Retently developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Retently developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Retently`,
- },
- "source_retently_authentication_mechanism_authenticate_with_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Retently API Token. See the docs for more information on how to obtain this key.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Retently`,
- },
- "source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Retently developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Retently developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Retently`,
- },
- "source_retently_update_authentication_mechanism_authenticate_with_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Retently API Token. See the docs for more information on how to obtain this key.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Retently`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate to Retently`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "retently",
- ),
- },
- Description: `must be one of ["retently"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_retently_data_source_sdk.go b/internal/provider/source_retently_data_source_sdk.go
old mode 100755
new mode 100644
index 94e8f08ef..146b49e33
--- a/internal/provider/source_retently_data_source_sdk.go
+++ b/internal/provider/source_retently_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRetentlyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_retently_resource.go b/internal/provider/source_retently_resource.go
old mode 100755
new mode 100644
index bc531f5b7..73151a6b4
--- a/internal/provider/source_retently_resource.go
+++ b/internal/provider/source_retently_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceRetentlyResource struct {
// SourceRetentlyResourceModel describes the resource data model.
type SourceRetentlyResourceModel struct {
Configuration SourceRetently `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,17 +59,15 @@ func (r *SourceRetentlyResource) Schema(ctx context.Context, req resource.Schema
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_retently_authentication_mechanism_authenticate_via_retently_o_auth": schema.SingleNestedAttribute{
+ "authenticate_via_retently_o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
+ "additional_properties": schema.StringAttribute{
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
+ validators.IsValidJSON(),
},
- Description: `must be one of ["Client"]`,
},
"client_id": schema.StringAttribute{
Required: true,
@@ -80,128 +79,56 @@ func (r *SourceRetentlyResource) Schema(ctx context.Context, req resource.Schema
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.`,
},
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Choose how to authenticate to Retently`,
},
- "source_retently_authentication_mechanism_authenticate_with_api_token": schema.SingleNestedAttribute{
+ "authenticate_with_api_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Required: true,
- Description: `Retently API Token. See the docs for more information on how to obtain this key.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
+ Optional: true,
Description: `Parsed as JSON.`,
- },
- },
- Description: `Choose how to authenticate to Retently`,
- },
- "source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your Retently developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your Retently developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
- },
- Description: `Choose how to authenticate to Retently`,
- },
- "source_retently_update_authentication_mechanism_authenticate_with_api_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Retently API Token. See the docs for more information on how to obtain this key.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Token",
- ),
- },
- Description: `must be one of ["Token"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Choose how to authenticate to Retently`,
},
},
+ Description: `Choose how to authenticate to Retently`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate to Retently`,
- },
- "source_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "retently",
- ),
- },
- Description: `must be one of ["retently"]`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -265,7 +192,7 @@ func (r *SourceRetentlyResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceRetently(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -441,5 +368,5 @@ func (r *SourceRetentlyResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceRetentlyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_retently_resource_sdk.go b/internal/provider/source_retently_resource_sdk.go
old mode 100755
new mode 100644
index 451442bba..3026c9a94
--- a/internal/provider/source_retently_resource_sdk.go
+++ b/internal/provider/source_retently_resource_sdk.go
@@ -3,76 +3,61 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRetentlyResourceModel) ToCreateSDKType() *shared.SourceRetentlyCreateRequest {
var credentials *shared.SourceRetentlyAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth *shared.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth
- if r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil {
- authType := new(shared.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType)
- if !r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth.AuthType.IsNull() {
- *authType = shared.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType(r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth.RefreshToken.ValueString()
+ var sourceRetentlyAuthenticateViaRetentlyOAuth *shared.SourceRetentlyAuthenticateViaRetentlyOAuth
+ if r.Configuration.Credentials.AuthenticateViaRetentlyOAuth != nil {
var additionalProperties interface{}
- if !r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth.AdditionalProperties.ValueString()), &additionalProperties)
+ if !r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.AdditionalProperties.ValueString()), &additionalProperties)
}
- sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth = &shared.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth{
- AuthType: authType,
+ clientID := r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.RefreshToken.ValueString()
+ sourceRetentlyAuthenticateViaRetentlyOAuth = &shared.SourceRetentlyAuthenticateViaRetentlyOAuth{
+ AdditionalProperties: additionalProperties,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
- AdditionalProperties: additionalProperties,
}
}
- if sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil {
+ if sourceRetentlyAuthenticateViaRetentlyOAuth != nil {
credentials = &shared.SourceRetentlyAuthenticationMechanism{
- SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth: sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth,
+ SourceRetentlyAuthenticateViaRetentlyOAuth: sourceRetentlyAuthenticateViaRetentlyOAuth,
}
}
- var sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken *shared.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken
- if r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken != nil {
- apiKey := r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken.APIKey.ValueString()
- authType1 := new(shared.SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType)
- if !r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken.AuthType.IsNull() {
- *authType1 = shared.SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType(r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
+ var sourceRetentlyAuthenticateWithAPIToken *shared.SourceRetentlyAuthenticateWithAPIToken
+ if r.Configuration.Credentials.AuthenticateWithAPIToken != nil {
var additionalProperties1 interface{}
- if !r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.Credentials.AuthenticateWithAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AuthenticateWithAPIToken.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.AuthenticateWithAPIToken.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken = &shared.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken{
- APIKey: apiKey,
- AuthType: authType1,
+ apiKey := r.Configuration.Credentials.AuthenticateWithAPIToken.APIKey.ValueString()
+ sourceRetentlyAuthenticateWithAPIToken = &shared.SourceRetentlyAuthenticateWithAPIToken{
AdditionalProperties: additionalProperties1,
+ APIKey: apiKey,
}
}
- if sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken != nil {
+ if sourceRetentlyAuthenticateWithAPIToken != nil {
credentials = &shared.SourceRetentlyAuthenticationMechanism{
- SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken: sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken,
+ SourceRetentlyAuthenticateWithAPIToken: sourceRetentlyAuthenticateWithAPIToken,
}
}
}
- sourceType := new(shared.SourceRetentlyRetently)
- if !r.Configuration.SourceType.IsUnknown() && !r.Configuration.SourceType.IsNull() {
- *sourceType = shared.SourceRetentlyRetently(r.Configuration.SourceType.ValueString())
- } else {
- sourceType = nil
- }
configuration := shared.SourceRetently{
Credentials: credentials,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -84,6 +69,7 @@ func (r *SourceRetentlyResourceModel) ToCreateSDKType() *shared.SourceRetentlyCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceRetentlyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -99,56 +85,42 @@ func (r *SourceRetentlyResourceModel) ToGetSDKType() *shared.SourceRetentlyCreat
func (r *SourceRetentlyResourceModel) ToUpdateSDKType() *shared.SourceRetentlyPutRequest {
var credentials *shared.SourceRetentlyUpdateAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth *shared.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth
- if r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil {
- authType := new(shared.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType)
- if !r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth.AuthType.IsNull() {
- *authType = shared.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType(r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth.RefreshToken.ValueString()
+ var authenticateViaRetentlyOAuth *shared.AuthenticateViaRetentlyOAuth
+ if r.Configuration.Credentials.AuthenticateViaRetentlyOAuth != nil {
var additionalProperties interface{}
- if !r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth.AdditionalProperties.ValueString()), &additionalProperties)
+ if !r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.AdditionalProperties.ValueString()), &additionalProperties)
}
- sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth = &shared.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth{
- AuthType: authType,
+ clientID := r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaRetentlyOAuth.RefreshToken.ValueString()
+ authenticateViaRetentlyOAuth = &shared.AuthenticateViaRetentlyOAuth{
+ AdditionalProperties: additionalProperties,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
- AdditionalProperties: additionalProperties,
}
}
- if sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil {
+ if authenticateViaRetentlyOAuth != nil {
credentials = &shared.SourceRetentlyUpdateAuthenticationMechanism{
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth: sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth,
+ AuthenticateViaRetentlyOAuth: authenticateViaRetentlyOAuth,
}
}
- var sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken *shared.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken
- if r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken != nil {
- apiKey := r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken.APIKey.ValueString()
- authType1 := new(shared.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType)
- if !r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken.AuthType.IsNull() {
- *authType1 = shared.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType(r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
+ var authenticateWithAPIToken *shared.AuthenticateWithAPIToken
+ if r.Configuration.Credentials.AuthenticateWithAPIToken != nil {
var additionalProperties1 interface{}
- if !r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.Credentials.AuthenticateWithAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AuthenticateWithAPIToken.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.AuthenticateWithAPIToken.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken = &shared.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken{
- APIKey: apiKey,
- AuthType: authType1,
+ apiKey := r.Configuration.Credentials.AuthenticateWithAPIToken.APIKey.ValueString()
+ authenticateWithAPIToken = &shared.AuthenticateWithAPIToken{
AdditionalProperties: additionalProperties1,
+ APIKey: apiKey,
}
}
- if sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken != nil {
+ if authenticateWithAPIToken != nil {
credentials = &shared.SourceRetentlyUpdateAuthenticationMechanism{
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken: sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken,
+ AuthenticateWithAPIToken: authenticateWithAPIToken,
}
}
}
diff --git a/internal/provider/source_rkicovid_data_source.go b/internal/provider/source_rkicovid_data_source.go
old mode 100755
new mode 100644
index 096d23fa0..ff91970e7
--- a/internal/provider/source_rkicovid_data_source.go
+++ b/internal/provider/source_rkicovid_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceRkiCovidDataSource struct {
// SourceRkiCovidDataSourceModel describes the data model.
type SourceRkiCovidDataSourceModel struct {
- Configuration SourceRkiCovid `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceRkiCovidDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceRkiCovid DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "rki-covid",
- ),
- },
- Description: `must be one of ["rki-covid"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date in the format 2017-01-25. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_rkicovid_data_source_sdk.go b/internal/provider/source_rkicovid_data_source_sdk.go
old mode 100755
new mode 100644
index a5a15a1dc..26f844801
--- a/internal/provider/source_rkicovid_data_source_sdk.go
+++ b/internal/provider/source_rkicovid_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRkiCovidDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_rkicovid_resource.go b/internal/provider/source_rkicovid_resource.go
old mode 100755
new mode 100644
index 26a7590d0..3a9aeada0
--- a/internal/provider/source_rkicovid_resource.go
+++ b/internal/provider/source_rkicovid_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceRkiCovidResource struct {
// SourceRkiCovidResourceModel describes the resource data model.
type SourceRkiCovidResourceModel struct {
Configuration SourceRkiCovid `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -54,28 +54,30 @@ func (r *SourceRkiCovidResource) Schema(ctx context.Context, req resource.Schema
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "rki-covid",
- ),
- },
- Description: `must be one of ["rki-covid"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `UTC date in the format 2017-01-25. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +141,7 @@ func (r *SourceRkiCovidResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceRkiCovid(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +317,5 @@ func (r *SourceRkiCovidResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceRkiCovidResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_rkicovid_resource_sdk.go b/internal/provider/source_rkicovid_resource_sdk.go
old mode 100755
new mode 100644
index c87fbef8c..652ac6a72
--- a/internal/provider/source_rkicovid_resource_sdk.go
+++ b/internal/provider/source_rkicovid_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRkiCovidResourceModel) ToCreateSDKType() *shared.SourceRkiCovidCreateRequest {
- sourceType := shared.SourceRkiCovidRkiCovid(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceRkiCovid{
- SourceType: sourceType,
- StartDate: startDate,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceRkiCovidResourceModel) ToCreateSDKType() *shared.SourceRkiCovidCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceRkiCovidCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_rss_data_source.go b/internal/provider/source_rss_data_source.go
old mode 100755
new mode 100644
index fc22fe2a7..83e59748e
--- a/internal/provider/source_rss_data_source.go
+++ b/internal/provider/source_rss_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceRssDataSource struct {
// SourceRssDataSourceModel describes the data model.
type SourceRssDataSourceModel struct {
- Configuration SourceRss `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,34 +47,20 @@ func (r *SourceRssDataSource) Schema(ctx context.Context, req datasource.SchemaR
MarkdownDescription: "SourceRss DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "rss",
- ),
- },
- Description: `must be one of ["rss"]`,
- },
- "url": schema.StringAttribute{
- Computed: true,
- Description: `RSS Feed URL`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_rss_data_source_sdk.go b/internal/provider/source_rss_data_source_sdk.go
old mode 100755
new mode 100644
index 7b3d90d92..44223b9b9
--- a/internal/provider/source_rss_data_source_sdk.go
+++ b/internal/provider/source_rss_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRssDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_rss_resource.go b/internal/provider/source_rss_resource.go
old mode 100755
new mode 100644
index 7edd34f14..809bc7789
--- a/internal/provider/source_rss_resource.go
+++ b/internal/provider/source_rss_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceRssResource struct {
// SourceRssResourceModel describes the resource data model.
type SourceRssResourceModel struct {
Configuration SourceRss `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -54,28 +54,30 @@ func (r *SourceRssResource) Schema(ctx context.Context, req resource.SchemaReque
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "rss",
- ),
- },
- Description: `must be one of ["rss"]`,
- },
"url": schema.StringAttribute{
Required: true,
Description: `RSS Feed URL`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +141,7 @@ func (r *SourceRssResource) Create(ctx context.Context, req resource.CreateReque
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceRss(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +317,5 @@ func (r *SourceRssResource) Delete(ctx context.Context, req resource.DeleteReque
}
func (r *SourceRssResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_rss_resource_sdk.go b/internal/provider/source_rss_resource_sdk.go
old mode 100755
new mode 100644
index 8b5e66bb0..0962c59ae
--- a/internal/provider/source_rss_resource_sdk.go
+++ b/internal/provider/source_rss_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceRssResourceModel) ToCreateSDKType() *shared.SourceRssCreateRequest {
- sourceType := shared.SourceRssRss(r.Configuration.SourceType.ValueString())
url := r.Configuration.URL.ValueString()
configuration := shared.SourceRss{
- SourceType: sourceType,
- URL: url,
+ URL: url,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceRssResourceModel) ToCreateSDKType() *shared.SourceRssCreateReques
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceRssCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_s3_data_source.go b/internal/provider/source_s3_data_source.go
old mode 100755
new mode 100644
index 9cf3e173e..307a4e8d1
--- a/internal/provider/source_s3_data_source.go
+++ b/internal/provider/source_s3_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceS3DataSource struct {
// SourceS3DataSourceModel describes the data model.
type SourceS3DataSourceModel struct {
- Configuration SourceS3 `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,807 +47,20 @@ func (r *SourceS3DataSource) Schema(ctx context.Context, req datasource.SchemaRe
MarkdownDescription: "SourceS3 DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "aws_secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "bucket": schema.StringAttribute{
- Computed: true,
- Description: `Name of the S3 bucket where the file(s) exist.`,
- },
- "dataset": schema.StringAttribute{
- Computed: true,
- Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.`,
- },
- "endpoint": schema.StringAttribute{
- Computed: true,
- Description: `Endpoint to an S3 compatible service. Leave empty to use AWS.`,
- },
- "format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_s3_file_format_avro": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "avro",
- ),
- },
- Description: `must be one of ["avro"]`,
- },
- },
- Description: `This connector utilises fastavro for Avro parsing.`,
- },
- "source_s3_file_format_csv": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "additional_reader_options": schema.StringAttribute{
- Computed: true,
- Description: `Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.`,
- },
- "advanced_options": schema.StringAttribute{
- Computed: true,
- Description: `Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.`,
- },
- "block_size": schema.Int64Attribute{
- Computed: true,
- Description: `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
- },
- "delimiter": schema.StringAttribute{
- Computed: true,
- Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
- },
- "double_quote": schema.BoolAttribute{
- Computed: true,
- Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
- },
- "encoding": schema.StringAttribute{
- Computed: true,
- Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
- },
- "escape_char": schema.StringAttribute{
- Computed: true,
- Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- ),
- },
- Description: `must be one of ["csv"]`,
- },
- "infer_datatypes": schema.BoolAttribute{
- Computed: true,
- Description: `Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings`,
- },
- "newlines_in_values": schema.BoolAttribute{
- Computed: true,
- Description: `Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.`,
- },
- "quote_char": schema.StringAttribute{
- Computed: true,
- Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
- },
- },
- Description: `This connector utilises PyArrow (Apache Arrow) for CSV parsing.`,
- },
- "source_s3_file_format_jsonl": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "block_size": schema.Int64Attribute{
- Computed: true,
- Description: `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jsonl",
- ),
- },
- Description: `must be one of ["jsonl"]`,
- },
- "newlines_in_values": schema.BoolAttribute{
- Computed: true,
- Description: `Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.`,
- },
- "unexpected_field_behavior": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ignore",
- "infer",
- "error",
- ),
- },
- MarkdownDescription: `must be one of ["ignore", "infer", "error"]` + "\n" +
- `How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details`,
- },
- },
- Description: `This connector uses PyArrow for JSON Lines (jsonl) file parsing.`,
- },
- "source_s3_file_format_parquet": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "batch_size": schema.Int64Attribute{
- Computed: true,
- Description: `Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.`,
- },
- "buffer_size": schema.Int64Attribute{
- Computed: true,
- Description: `Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.`,
- },
- "columns": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "parquet",
- ),
- },
- Description: `must be one of ["parquet"]`,
- },
- },
- Description: `This connector utilises PyArrow (Apache Arrow) for Parquet parsing.`,
- },
- "source_s3_update_file_format_avro": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "avro",
- ),
- },
- Description: `must be one of ["avro"]`,
- },
- },
- Description: `This connector utilises fastavro for Avro parsing.`,
- },
- "source_s3_update_file_format_csv": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "additional_reader_options": schema.StringAttribute{
- Computed: true,
- Description: `Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.`,
- },
- "advanced_options": schema.StringAttribute{
- Computed: true,
- Description: `Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.`,
- },
- "block_size": schema.Int64Attribute{
- Computed: true,
- Description: `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
- },
- "delimiter": schema.StringAttribute{
- Computed: true,
- Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
- },
- "double_quote": schema.BoolAttribute{
- Computed: true,
- Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
- },
- "encoding": schema.StringAttribute{
- Computed: true,
- Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
- },
- "escape_char": schema.StringAttribute{
- Computed: true,
- Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- ),
- },
- Description: `must be one of ["csv"]`,
- },
- "infer_datatypes": schema.BoolAttribute{
- Computed: true,
- Description: `Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings`,
- },
- "newlines_in_values": schema.BoolAttribute{
- Computed: true,
- Description: `Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.`,
- },
- "quote_char": schema.StringAttribute{
- Computed: true,
- Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
- },
- },
- Description: `This connector utilises PyArrow (Apache Arrow) for CSV parsing.`,
- },
- "source_s3_update_file_format_jsonl": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "block_size": schema.Int64Attribute{
- Computed: true,
- Description: `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jsonl",
- ),
- },
- Description: `must be one of ["jsonl"]`,
- },
- "newlines_in_values": schema.BoolAttribute{
- Computed: true,
- Description: `Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.`,
- },
- "unexpected_field_behavior": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ignore",
- "infer",
- "error",
- ),
- },
- MarkdownDescription: `must be one of ["ignore", "infer", "error"]` + "\n" +
- `How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details`,
- },
- },
- Description: `This connector uses PyArrow for JSON Lines (jsonl) file parsing.`,
- },
- "source_s3_update_file_format_parquet": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "batch_size": schema.Int64Attribute{
- Computed: true,
- Description: `Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.`,
- },
- "buffer_size": schema.Int64Attribute{
- Computed: true,
- Description: `Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.`,
- },
- "columns": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "parquet",
- ),
- },
- Description: `must be one of ["parquet"]`,
- },
- },
- Description: `This connector utilises PyArrow (Apache Arrow) for Parquet parsing.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate`,
- },
- "path_pattern": schema.StringAttribute{
- Computed: true,
- Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.`,
- },
- "provider": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "aws_access_key_id": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "aws_secret_access_key": schema.StringAttribute{
- Computed: true,
- Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
- },
- "bucket": schema.StringAttribute{
- Computed: true,
- Description: `Name of the S3 bucket where the file(s) exist.`,
- },
- "endpoint": schema.StringAttribute{
- Computed: true,
- Description: `Endpoint to an S3 compatible service. Leave empty to use AWS.`,
- },
- "path_prefix": schema.StringAttribute{
- Computed: true,
- Description: `By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate.`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.`,
- },
- },
- Description: `Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services`,
- },
- "schema": schema.StringAttribute{
- Computed: true,
- Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "s3",
- ),
- },
- Description: `must be one of ["s3"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.`,
- },
- "streams": schema.ListNestedAttribute{
- Computed: true,
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "days_to_sync_if_history_is_full": schema.Int64Attribute{
- Computed: true,
- Description: `When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.`,
- },
- "file_type": schema.StringAttribute{
- Computed: true,
- Description: `The data file type that is being extracted for a stream.`,
- },
- "format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_s3_file_based_stream_config_format_avro_format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "double_as_string": schema.BoolAttribute{
- Computed: true,
- Description: `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "avro",
- ),
- },
- Description: `must be one of ["avro"]`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_file_based_stream_config_format_csv_format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "delimiter": schema.StringAttribute{
- Computed: true,
- Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
- },
- "double_quote": schema.BoolAttribute{
- Computed: true,
- Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
- },
- "encoding": schema.StringAttribute{
- Computed: true,
- Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
- },
- "escape_char": schema.StringAttribute{
- Computed: true,
- Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
- },
- "false_values": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A set of case-sensitive strings that should be interpreted as false values.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- ),
- },
- Description: `must be one of ["csv"]`,
- },
- "header_definition": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "header_definition_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "From CSV",
- ),
- },
- Description: `must be one of ["From CSV"]`,
- },
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "header_definition_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Autogenerated",
- ),
- },
- Description: `must be one of ["Autogenerated"]`,
- },
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "column_names": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The column names that will be used while emitting the CSV records`,
- },
- "header_definition_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "User Provided",
- ),
- },
- Description: `must be one of ["User Provided"]`,
- },
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- "inference_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "None",
- "Primitive Types Only",
- ),
- },
- MarkdownDescription: `must be one of ["None", "Primitive Types Only"]` + "\n" +
- `How to infer the types of the columns. If none, inference default to strings.`,
- },
- "null_values": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`,
- },
- "quote_char": schema.StringAttribute{
- Computed: true,
- Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
- },
- "skip_rows_after_header": schema.Int64Attribute{
- Computed: true,
- Description: `The number of rows to skip after the header row.`,
- },
- "skip_rows_before_header": schema.Int64Attribute{
- Computed: true,
- Description: `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`,
- },
- "strings_can_be_null": schema.BoolAttribute{
- Computed: true,
- Description: `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`,
- },
- "true_values": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A set of case-sensitive strings that should be interpreted as true values.`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_file_based_stream_config_format_jsonl_format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jsonl",
- ),
- },
- Description: `must be one of ["jsonl"]`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_file_based_stream_config_format_parquet_format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "decimal_as_float": schema.BoolAttribute{
- Computed: true,
- Description: `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "parquet",
- ),
- },
- Description: `must be one of ["parquet"]`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_update_file_based_stream_config_format_avro_format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "double_as_string": schema.BoolAttribute{
- Computed: true,
- Description: `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "avro",
- ),
- },
- Description: `must be one of ["avro"]`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_update_file_based_stream_config_format_csv_format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "delimiter": schema.StringAttribute{
- Computed: true,
- Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
- },
- "double_quote": schema.BoolAttribute{
- Computed: true,
- Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
- },
- "encoding": schema.StringAttribute{
- Computed: true,
- Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
- },
- "escape_char": schema.StringAttribute{
- Computed: true,
- Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
- },
- "false_values": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A set of case-sensitive strings that should be interpreted as false values.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- ),
- },
- Description: `must be one of ["csv"]`,
- },
- "header_definition": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "header_definition_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "From CSV",
- ),
- },
- Description: `must be one of ["From CSV"]`,
- },
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "header_definition_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Autogenerated",
- ),
- },
- Description: `must be one of ["Autogenerated"]`,
- },
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "column_names": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The column names that will be used while emitting the CSV records`,
- },
- "header_definition_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "User Provided",
- ),
- },
- Description: `must be one of ["User Provided"]`,
- },
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- "inference_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "None",
- "Primitive Types Only",
- ),
- },
- MarkdownDescription: `must be one of ["None", "Primitive Types Only"]` + "\n" +
- `How to infer the types of the columns. If none, inference default to strings.`,
- },
- "null_values": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`,
- },
- "quote_char": schema.StringAttribute{
- Computed: true,
- Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
- },
- "skip_rows_after_header": schema.Int64Attribute{
- Computed: true,
- Description: `The number of rows to skip after the header row.`,
- },
- "skip_rows_before_header": schema.Int64Attribute{
- Computed: true,
- Description: `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`,
- },
- "strings_can_be_null": schema.BoolAttribute{
- Computed: true,
- Description: `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`,
- },
- "true_values": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A set of case-sensitive strings that should be interpreted as true values.`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_update_file_based_stream_config_format_jsonl_format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jsonl",
- ),
- },
- Description: `must be one of ["jsonl"]`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_update_file_based_stream_config_format_parquet_format": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "decimal_as_float": schema.BoolAttribute{
- Computed: true,
- Description: `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`,
- },
- "filetype": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "parquet",
- ),
- },
- Description: `must be one of ["parquet"]`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "globs": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.`,
- },
- "input_schema": schema.StringAttribute{
- Computed: true,
- Description: `The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.`,
- },
- "legacy_prefix": schema.StringAttribute{
- Computed: true,
- Description: `The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.`,
- },
- "name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the stream.`,
- },
- "primary_key": schema.StringAttribute{
- Computed: true,
- Description: `The column or columns (for a composite key) that serves as the unique identifier of a record.`,
- },
- "schemaless": schema.BoolAttribute{
- Computed: true,
- Description: `When enabled, syncs will not validate or structure records against the stream's schema.`,
- },
- "validation_policy": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Emit Record",
- "Skip Record",
- "Wait for Discover",
- ),
- },
- MarkdownDescription: `must be one of ["Emit Record", "Skip Record", "Wait for Discover"]` + "\n" +
- `The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.`,
- },
- },
- },
- Description: `Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.`,
- },
- },
- MarkdownDescription: `NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes` + "\n" +
- `because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK.`,
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_s3_data_source_sdk.go b/internal/provider/source_s3_data_source_sdk.go
old mode 100755
new mode 100644
index 5a1bdfb44..e09a44735
--- a/internal/provider/source_s3_data_source_sdk.go
+++ b/internal/provider/source_s3_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceS3DataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_s3_resource.go b/internal/provider/source_s3_resource.go
old mode 100755
new mode 100644
index 3d61c3ce6..6621e1a4a
--- a/internal/provider/source_s3_resource.go
+++ b/internal/provider/source_s3_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceS3Resource struct {
// SourceS3ResourceModel describes the resource data model.
type SourceS3ResourceModel struct {
Configuration SourceS3 `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,10 +59,12 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
Attributes: map[string]schema.Attribute{
"aws_access_key_id": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
},
"aws_secret_access_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
},
"bucket": schema.StringAttribute{
@@ -72,28 +76,19 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.`,
},
"endpoint": schema.StringAttribute{
- Optional: true,
- Description: `Endpoint to an S3 compatible service. Leave empty to use AWS.`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `Endpoint to an S3 compatible service. Leave empty to use AWS. The custom endpoint must be secure, but the 'https' prefix is not required.`,
},
"format": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_s3_file_format_avro": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "filetype": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "avro",
- ),
- },
- Description: `must be one of ["avro"]`,
- },
- },
+ "avro": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `This connector utilises fastavro for Avro parsing.`,
},
- "source_s3_file_format_csv": schema.SingleNestedAttribute{
+ "csv": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"additional_reader_options": schema.StringAttribute{
@@ -105,204 +100,64 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
Description: `Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.`,
},
"block_size": schema.Int64Attribute{
- Optional: true,
- Description: `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
+ Optional: true,
+ MarkdownDescription: `Default: 10000` + "\n" +
+ `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
},
"delimiter": schema.StringAttribute{
- Optional: true,
- Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
+ Optional: true,
+ MarkdownDescription: `Default: ","` + "\n" +
+ `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
},
"double_quote": schema.BoolAttribute{
- Optional: true,
- Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
},
"encoding": schema.StringAttribute{
- Optional: true,
- Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
+ Optional: true,
+ MarkdownDescription: `Default: "utf8"` + "\n" +
+ `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
},
"escape_char": schema.StringAttribute{
Optional: true,
Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
},
- "filetype": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- ),
- },
- Description: `must be one of ["csv"]`,
- },
"infer_datatypes": schema.BoolAttribute{
- Optional: true,
- Description: `Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings`,
- },
- "newlines_in_values": schema.BoolAttribute{
- Optional: true,
- Description: `Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.`,
- },
- "quote_char": schema.StringAttribute{
- Optional: true,
- Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
- },
- },
- Description: `This connector utilises PyArrow (Apache Arrow) for CSV parsing.`,
- },
- "source_s3_file_format_jsonl": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "block_size": schema.Int64Attribute{
- Optional: true,
- Description: `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
- },
- "filetype": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jsonl",
- ),
- },
- Description: `must be one of ["jsonl"]`,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings`,
},
"newlines_in_values": schema.BoolAttribute{
- Optional: true,
- Description: `Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.`,
- },
- "unexpected_field_behavior": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "ignore",
- "infer",
- "error",
- ),
- },
- MarkdownDescription: `must be one of ["ignore", "infer", "error"]` + "\n" +
- `How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details`,
- },
- },
- Description: `This connector uses PyArrow for JSON Lines (jsonl) file parsing.`,
- },
- "source_s3_file_format_parquet": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "batch_size": schema.Int64Attribute{
- Optional: true,
- Description: `Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.`,
- },
- "buffer_size": schema.Int64Attribute{
- Optional: true,
- Description: `Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.`,
- },
- "columns": schema.ListAttribute{
- Optional: true,
- ElementType: types.StringType,
- Description: `If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.`,
- },
- "filetype": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "parquet",
- ),
- },
- Description: `must be one of ["parquet"]`,
- },
- },
- Description: `This connector utilises PyArrow (Apache Arrow) for Parquet parsing.`,
- },
- "source_s3_update_file_format_avro": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "filetype": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "avro",
- ),
- },
- Description: `must be one of ["avro"]`,
- },
- },
- Description: `This connector utilises fastavro for Avro parsing.`,
- },
- "source_s3_update_file_format_csv": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "additional_reader_options": schema.StringAttribute{
- Optional: true,
- Description: `Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.`,
- },
- "advanced_options": schema.StringAttribute{
- Optional: true,
- Description: `Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.`,
- },
- "block_size": schema.Int64Attribute{
- Optional: true,
- Description: `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
- },
- "delimiter": schema.StringAttribute{
- Optional: true,
- Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
- },
- "double_quote": schema.BoolAttribute{
- Optional: true,
- Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
- },
- "encoding": schema.StringAttribute{
- Optional: true,
- Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
- },
- "escape_char": schema.StringAttribute{
- Optional: true,
- Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
- },
- "filetype": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- ),
- },
- Description: `must be one of ["csv"]`,
- },
- "infer_datatypes": schema.BoolAttribute{
- Optional: true,
- Description: `Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings`,
- },
- "newlines_in_values": schema.BoolAttribute{
- Optional: true,
- Description: `Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.`,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.`,
},
"quote_char": schema.StringAttribute{
- Optional: true,
- Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
+ Optional: true,
+ MarkdownDescription: `Default: "\""` + "\n" +
+ `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
},
},
Description: `This connector utilises PyArrow (Apache Arrow) for CSV parsing.`,
},
- "source_s3_update_file_format_jsonl": schema.SingleNestedAttribute{
+ "jsonl": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"block_size": schema.Int64Attribute{
- Optional: true,
- Description: `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
- },
- "filetype": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jsonl",
- ),
- },
- Description: `must be one of ["jsonl"]`,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.`,
},
"newlines_in_values": schema.BoolAttribute{
- Optional: true,
- Description: `Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.`,
},
"unexpected_field_behavior": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["ignore", "infer", "error"]; Default: "infer"` + "\n" +
+ `How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details`,
Validators: []validator.String{
stringvalidator.OneOf(
"ignore",
@@ -310,45 +165,36 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
"error",
),
},
- MarkdownDescription: `must be one of ["ignore", "infer", "error"]` + "\n" +
- `How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details`,
},
},
Description: `This connector uses PyArrow for JSON Lines (jsonl) file parsing.`,
},
- "source_s3_update_file_format_parquet": schema.SingleNestedAttribute{
+ "parquet": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"batch_size": schema.Int64Attribute{
- Optional: true,
- Description: `Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.`,
+ Optional: true,
+ MarkdownDescription: `Default: 65536` + "\n" +
+ `Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.`,
},
"buffer_size": schema.Int64Attribute{
- Optional: true,
- Description: `Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.`,
+ Optional: true,
+ MarkdownDescription: `Default: 2` + "\n" +
+ `Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.`,
},
"columns": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
Description: `If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.`,
},
- "filetype": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "parquet",
- ),
- },
- Description: `must be one of ["parquet"]`,
- },
},
Description: `This connector utilises PyArrow (Apache Arrow) for Parquet parsing.`,
},
},
+ Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate`,
},
"path_pattern": schema.StringAttribute{
Optional: true,
@@ -359,10 +205,12 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
Attributes: map[string]schema.Attribute{
"aws_access_key_id": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
},
"aws_secret_access_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`,
},
"bucket": schema.StringAttribute{
@@ -370,276 +218,77 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
Description: `Name of the S3 bucket where the file(s) exist.`,
},
"endpoint": schema.StringAttribute{
- Optional: true,
- Description: `Endpoint to an S3 compatible service. Leave empty to use AWS.`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `Endpoint to an S3 compatible service. Leave empty to use AWS.`,
},
"path_prefix": schema.StringAttribute{
- Optional: true,
- Description: `By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate.`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate.`,
},
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.`,
},
},
Description: `Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services`,
},
"schema": schema.StringAttribute{
- Optional: true,
- Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "s3",
- ),
- },
- Description: `must be one of ["s3"]`,
+ Optional: true,
+ MarkdownDescription: `Default: "{}"` + "\n" +
+ `Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.`,
},
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.`,
},
"streams": schema.ListNestedAttribute{
Required: true,
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"days_to_sync_if_history_is_full": schema.Int64Attribute{
- Optional: true,
- Description: `When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.`,
- },
- "file_type": schema.StringAttribute{
- Required: true,
- Description: `The data file type that is being extracted for a stream.`,
+ Optional: true,
+ MarkdownDescription: `Default: 3` + "\n" +
+ `When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.`,
},
"format": schema.SingleNestedAttribute{
- Optional: true,
+ Required: true,
Attributes: map[string]schema.Attribute{
- "source_s3_file_based_stream_config_format_avro_format": schema.SingleNestedAttribute{
+ "avro_format": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"double_as_string": schema.BoolAttribute{
- Optional: true,
- Description: `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`,
- },
- "filetype": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "avro",
- ),
- },
- Description: `must be one of ["avro"]`,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`,
},
},
Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
},
- "source_s3_file_based_stream_config_format_csv_format": schema.SingleNestedAttribute{
+ "csv_format": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"delimiter": schema.StringAttribute{
- Optional: true,
- Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
- },
- "double_quote": schema.BoolAttribute{
- Optional: true,
- Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
- },
- "encoding": schema.StringAttribute{
- Optional: true,
- Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
- },
- "escape_char": schema.StringAttribute{
- Optional: true,
- Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`,
- },
- "false_values": schema.ListAttribute{
- Optional: true,
- ElementType: types.StringType,
- Description: `A set of case-sensitive strings that should be interpreted as false values.`,
- },
- "filetype": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- ),
- },
- Description: `must be one of ["csv"]`,
- },
- "header_definition": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "header_definition_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Autogenerated",
- ),
- },
- Description: `must be one of ["Autogenerated"]`,
- },
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "header_definition_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "From CSV",
- ),
- },
- Description: `must be one of ["From CSV"]`,
- },
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "column_names": schema.ListAttribute{
- Required: true,
- ElementType: types.StringType,
- Description: `The column names that will be used while emitting the CSV records`,
- },
- "header_definition_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "User Provided",
- ),
- },
- Description: `must be one of ["User Provided"]`,
- },
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
- },
- "inference_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "None",
- "Primitive Types Only",
- ),
- },
- MarkdownDescription: `must be one of ["None", "Primitive Types Only"]` + "\n" +
- `How to infer the types of the columns. If none, inference default to strings.`,
- },
- "null_values": schema.ListAttribute{
- Optional: true,
- ElementType: types.StringType,
- Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`,
- },
- "quote_char": schema.StringAttribute{
- Optional: true,
- Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
- },
- "skip_rows_after_header": schema.Int64Attribute{
- Optional: true,
- Description: `The number of rows to skip after the header row.`,
- },
- "skip_rows_before_header": schema.Int64Attribute{
- Optional: true,
- Description: `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`,
- },
- "strings_can_be_null": schema.BoolAttribute{
- Optional: true,
- Description: `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`,
- },
- "true_values": schema.ListAttribute{
- Optional: true,
- ElementType: types.StringType,
- Description: `A set of case-sensitive strings that should be interpreted as true values.`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_file_based_stream_config_format_jsonl_format": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "filetype": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jsonl",
- ),
- },
- Description: `must be one of ["jsonl"]`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_file_based_stream_config_format_parquet_format": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "decimal_as_float": schema.BoolAttribute{
- Optional: true,
- Description: `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`,
- },
- "filetype": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "parquet",
- ),
- },
- Description: `must be one of ["parquet"]`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_update_file_based_stream_config_format_avro_format": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "double_as_string": schema.BoolAttribute{
- Optional: true,
- Description: `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`,
- },
- "filetype": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "avro",
- ),
- },
- Description: `must be one of ["avro"]`,
- },
- },
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
- },
- "source_s3_update_file_based_stream_config_format_csv_format": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "delimiter": schema.StringAttribute{
- Optional: true,
- Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
+ MarkdownDescription: `Default: ","` + "\n" +
+ `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`,
},
"double_quote": schema.BoolAttribute{
- Optional: true,
- Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether two quotes in a quoted CSV value denote a single quote in the data.`,
},
"encoding": schema.StringAttribute{
- Optional: true,
- Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
+ Optional: true,
+ MarkdownDescription: `Default: "utf8"` + "\n" +
+ `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`,
},
"escape_char": schema.StringAttribute{
Optional: true,
@@ -650,49 +299,20 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
ElementType: types.StringType,
Description: `A set of case-sensitive strings that should be interpreted as false values.`,
},
- "filetype": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- ),
- },
- Description: `must be one of ["csv"]`,
- },
"header_definition": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "header_definition_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "From CSV",
- ),
- },
- Description: `must be one of ["From CSV"]`,
- },
- },
+ "autogenerated": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
},
- "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "header_definition_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Autogenerated",
- ),
- },
- Description: `must be one of ["Autogenerated"]`,
- },
- },
+ "from_csv": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
},
- "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided": schema.SingleNestedAttribute{
+ "user_provided": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"column_names": schema.ListAttribute{
@@ -700,34 +320,25 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
ElementType: types.StringType,
Description: `The column names that will be used while emitting the CSV records`,
},
- "header_definition_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "User Provided",
- ),
- },
- Description: `must be one of ["User Provided"]`,
- },
},
Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
},
},
+ Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`,
},
"inference_type": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["None", "Primitive Types Only"]; Default: "None"` + "\n" +
+ `How to infer the types of the columns. If none, inference default to strings.`,
Validators: []validator.String{
stringvalidator.OneOf(
"None",
"Primitive Types Only",
),
},
- MarkdownDescription: `must be one of ["None", "Primitive Types Only"]` + "\n" +
- `How to infer the types of the columns. If none, inference default to strings.`,
},
"null_values": schema.ListAttribute{
Optional: true,
@@ -735,20 +346,24 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`,
},
"quote_char": schema.StringAttribute{
- Optional: true,
- Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
+ Optional: true,
+ MarkdownDescription: `Default: "\""` + "\n" +
+ `The character used for quoting CSV values. To disallow quoting, make this field blank.`,
},
"skip_rows_after_header": schema.Int64Attribute{
- Optional: true,
- Description: `The number of rows to skip after the header row.`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `The number of rows to skip after the header row.`,
},
"skip_rows_before_header": schema.Int64Attribute{
- Optional: true,
- Description: `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`,
},
"strings_can_be_null": schema.BoolAttribute{
- Optional: true,
- Description: `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`,
},
"true_values": schema.ListAttribute{
Optional: true,
@@ -758,45 +373,38 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
},
Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
},
- "source_s3_update_file_based_stream_config_format_jsonl_format": schema.SingleNestedAttribute{
+ "document_file_type_format_experimental": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "filetype": schema.StringAttribute{
+ "skip_unprocessable_file_types": schema.BoolAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "jsonl",
- ),
- },
- Description: `must be one of ["jsonl"]`,
+ MarkdownDescription: `Default: true` + "\n" +
+ `If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.`,
},
},
+ Description: `Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.`,
+ },
+ "jsonl_format": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
},
- "source_s3_update_file_based_stream_config_format_parquet_format": schema.SingleNestedAttribute{
+ "parquet_format": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"decimal_as_float": schema.BoolAttribute{
- Optional: true,
- Description: `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`,
- },
- "filetype": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "parquet",
- ),
- },
- Description: `must be one of ["parquet"]`,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`,
},
},
Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
},
},
+ Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`,
},
"globs": schema.ListAttribute{
Optional: true,
@@ -817,14 +425,18 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
},
"primary_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The column or columns (for a composite key) that serves as the unique identifier of a record.`,
},
"schemaless": schema.BoolAttribute{
- Optional: true,
- Description: `When enabled, syncs will not validate or structure records against the stream's schema.`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `When enabled, syncs will not validate or structure records against the stream's schema.`,
},
"validation_policy": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["Emit Record", "Skip Record", "Wait for Discover"]; Default: "Emit Record"` + "\n" +
+ `The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.`,
Validators: []validator.String{
stringvalidator.OneOf(
"Emit Record",
@@ -832,8 +444,6 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
"Wait for Discover",
),
},
- MarkdownDescription: `must be one of ["Emit Record", "Skip Record", "Wait for Discover"]` + "\n" +
- `The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.`,
},
},
},
@@ -843,13 +453,24 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques
MarkdownDescription: `NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes` + "\n" +
`because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK.`,
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -913,7 +534,7 @@ func (r *SourceS3Resource) Create(ctx context.Context, req resource.CreateReques
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceS3(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -1089,5 +710,5 @@ func (r *SourceS3Resource) Delete(ctx context.Context, req resource.DeleteReques
}
func (r *SourceS3Resource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_s3_resource_sdk.go b/internal/provider/source_s3_resource_sdk.go
old mode 100755
new mode 100644
index b4f6ab052..39f6af1ac
--- a/internal/provider/source_s3_resource_sdk.go
+++ b/internal/provider/source_s3_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -36,75 +36,69 @@ func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest
}
var format *shared.SourceS3FileFormat
if r.Configuration.Format != nil {
- var sourceS3FileFormatCSV *shared.SourceS3FileFormatCSV
- if r.Configuration.Format.SourceS3FileFormatCSV != nil {
+ var sourceS3CSV *shared.SourceS3CSV
+ if r.Configuration.Format.Csv != nil {
additionalReaderOptions := new(string)
- if !r.Configuration.Format.SourceS3FileFormatCSV.AdditionalReaderOptions.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.AdditionalReaderOptions.IsNull() {
- *additionalReaderOptions = r.Configuration.Format.SourceS3FileFormatCSV.AdditionalReaderOptions.ValueString()
+ if !r.Configuration.Format.Csv.AdditionalReaderOptions.IsUnknown() && !r.Configuration.Format.Csv.AdditionalReaderOptions.IsNull() {
+ *additionalReaderOptions = r.Configuration.Format.Csv.AdditionalReaderOptions.ValueString()
} else {
additionalReaderOptions = nil
}
advancedOptions := new(string)
- if !r.Configuration.Format.SourceS3FileFormatCSV.AdvancedOptions.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.AdvancedOptions.IsNull() {
- *advancedOptions = r.Configuration.Format.SourceS3FileFormatCSV.AdvancedOptions.ValueString()
+ if !r.Configuration.Format.Csv.AdvancedOptions.IsUnknown() && !r.Configuration.Format.Csv.AdvancedOptions.IsNull() {
+ *advancedOptions = r.Configuration.Format.Csv.AdvancedOptions.ValueString()
} else {
advancedOptions = nil
}
blockSize := new(int64)
- if !r.Configuration.Format.SourceS3FileFormatCSV.BlockSize.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.BlockSize.IsNull() {
- *blockSize = r.Configuration.Format.SourceS3FileFormatCSV.BlockSize.ValueInt64()
+ if !r.Configuration.Format.Csv.BlockSize.IsUnknown() && !r.Configuration.Format.Csv.BlockSize.IsNull() {
+ *blockSize = r.Configuration.Format.Csv.BlockSize.ValueInt64()
} else {
blockSize = nil
}
delimiter := new(string)
- if !r.Configuration.Format.SourceS3FileFormatCSV.Delimiter.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.Delimiter.IsNull() {
- *delimiter = r.Configuration.Format.SourceS3FileFormatCSV.Delimiter.ValueString()
+ if !r.Configuration.Format.Csv.Delimiter.IsUnknown() && !r.Configuration.Format.Csv.Delimiter.IsNull() {
+ *delimiter = r.Configuration.Format.Csv.Delimiter.ValueString()
} else {
delimiter = nil
}
doubleQuote := new(bool)
- if !r.Configuration.Format.SourceS3FileFormatCSV.DoubleQuote.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.DoubleQuote.IsNull() {
- *doubleQuote = r.Configuration.Format.SourceS3FileFormatCSV.DoubleQuote.ValueBool()
+ if !r.Configuration.Format.Csv.DoubleQuote.IsUnknown() && !r.Configuration.Format.Csv.DoubleQuote.IsNull() {
+ *doubleQuote = r.Configuration.Format.Csv.DoubleQuote.ValueBool()
} else {
doubleQuote = nil
}
encoding := new(string)
- if !r.Configuration.Format.SourceS3FileFormatCSV.Encoding.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.Encoding.IsNull() {
- *encoding = r.Configuration.Format.SourceS3FileFormatCSV.Encoding.ValueString()
+ if !r.Configuration.Format.Csv.Encoding.IsUnknown() && !r.Configuration.Format.Csv.Encoding.IsNull() {
+ *encoding = r.Configuration.Format.Csv.Encoding.ValueString()
} else {
encoding = nil
}
escapeChar := new(string)
- if !r.Configuration.Format.SourceS3FileFormatCSV.EscapeChar.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.EscapeChar.IsNull() {
- *escapeChar = r.Configuration.Format.SourceS3FileFormatCSV.EscapeChar.ValueString()
+ if !r.Configuration.Format.Csv.EscapeChar.IsUnknown() && !r.Configuration.Format.Csv.EscapeChar.IsNull() {
+ *escapeChar = r.Configuration.Format.Csv.EscapeChar.ValueString()
} else {
escapeChar = nil
}
- filetype := new(shared.SourceS3FileFormatCSVFiletype)
- if !r.Configuration.Format.SourceS3FileFormatCSV.Filetype.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.Filetype.IsNull() {
- *filetype = shared.SourceS3FileFormatCSVFiletype(r.Configuration.Format.SourceS3FileFormatCSV.Filetype.ValueString())
- } else {
- filetype = nil
- }
inferDatatypes := new(bool)
- if !r.Configuration.Format.SourceS3FileFormatCSV.InferDatatypes.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.InferDatatypes.IsNull() {
- *inferDatatypes = r.Configuration.Format.SourceS3FileFormatCSV.InferDatatypes.ValueBool()
+ if !r.Configuration.Format.Csv.InferDatatypes.IsUnknown() && !r.Configuration.Format.Csv.InferDatatypes.IsNull() {
+ *inferDatatypes = r.Configuration.Format.Csv.InferDatatypes.ValueBool()
} else {
inferDatatypes = nil
}
newlinesInValues := new(bool)
- if !r.Configuration.Format.SourceS3FileFormatCSV.NewlinesInValues.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.NewlinesInValues.IsNull() {
- *newlinesInValues = r.Configuration.Format.SourceS3FileFormatCSV.NewlinesInValues.ValueBool()
+ if !r.Configuration.Format.Csv.NewlinesInValues.IsUnknown() && !r.Configuration.Format.Csv.NewlinesInValues.IsNull() {
+ *newlinesInValues = r.Configuration.Format.Csv.NewlinesInValues.ValueBool()
} else {
newlinesInValues = nil
}
quoteChar := new(string)
- if !r.Configuration.Format.SourceS3FileFormatCSV.QuoteChar.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatCSV.QuoteChar.IsNull() {
- *quoteChar = r.Configuration.Format.SourceS3FileFormatCSV.QuoteChar.ValueString()
+ if !r.Configuration.Format.Csv.QuoteChar.IsUnknown() && !r.Configuration.Format.Csv.QuoteChar.IsNull() {
+ *quoteChar = r.Configuration.Format.Csv.QuoteChar.ValueString()
} else {
quoteChar = nil
}
- sourceS3FileFormatCSV = &shared.SourceS3FileFormatCSV{
+ sourceS3CSV = &shared.SourceS3CSV{
AdditionalReaderOptions: additionalReaderOptions,
AdvancedOptions: advancedOptions,
BlockSize: blockSize,
@@ -112,106 +106,83 @@ func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest
DoubleQuote: doubleQuote,
Encoding: encoding,
EscapeChar: escapeChar,
- Filetype: filetype,
InferDatatypes: inferDatatypes,
NewlinesInValues: newlinesInValues,
QuoteChar: quoteChar,
}
}
- if sourceS3FileFormatCSV != nil {
+ if sourceS3CSV != nil {
format = &shared.SourceS3FileFormat{
- SourceS3FileFormatCSV: sourceS3FileFormatCSV,
+ SourceS3CSV: sourceS3CSV,
}
}
- var sourceS3FileFormatParquet *shared.SourceS3FileFormatParquet
- if r.Configuration.Format.SourceS3FileFormatParquet != nil {
+ var sourceS3Parquet *shared.SourceS3Parquet
+ if r.Configuration.Format.Parquet != nil {
batchSize := new(int64)
- if !r.Configuration.Format.SourceS3FileFormatParquet.BatchSize.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatParquet.BatchSize.IsNull() {
- *batchSize = r.Configuration.Format.SourceS3FileFormatParquet.BatchSize.ValueInt64()
+ if !r.Configuration.Format.Parquet.BatchSize.IsUnknown() && !r.Configuration.Format.Parquet.BatchSize.IsNull() {
+ *batchSize = r.Configuration.Format.Parquet.BatchSize.ValueInt64()
} else {
batchSize = nil
}
bufferSize := new(int64)
- if !r.Configuration.Format.SourceS3FileFormatParquet.BufferSize.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatParquet.BufferSize.IsNull() {
- *bufferSize = r.Configuration.Format.SourceS3FileFormatParquet.BufferSize.ValueInt64()
+ if !r.Configuration.Format.Parquet.BufferSize.IsUnknown() && !r.Configuration.Format.Parquet.BufferSize.IsNull() {
+ *bufferSize = r.Configuration.Format.Parquet.BufferSize.ValueInt64()
} else {
bufferSize = nil
}
var columns []string = nil
- for _, columnsItem := range r.Configuration.Format.SourceS3FileFormatParquet.Columns {
+ for _, columnsItem := range r.Configuration.Format.Parquet.Columns {
columns = append(columns, columnsItem.ValueString())
}
- filetype1 := new(shared.SourceS3FileFormatParquetFiletype)
- if !r.Configuration.Format.SourceS3FileFormatParquet.Filetype.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatParquet.Filetype.IsNull() {
- *filetype1 = shared.SourceS3FileFormatParquetFiletype(r.Configuration.Format.SourceS3FileFormatParquet.Filetype.ValueString())
- } else {
- filetype1 = nil
- }
- sourceS3FileFormatParquet = &shared.SourceS3FileFormatParquet{
+ sourceS3Parquet = &shared.SourceS3Parquet{
BatchSize: batchSize,
BufferSize: bufferSize,
Columns: columns,
- Filetype: filetype1,
}
}
- if sourceS3FileFormatParquet != nil {
+ if sourceS3Parquet != nil {
format = &shared.SourceS3FileFormat{
- SourceS3FileFormatParquet: sourceS3FileFormatParquet,
+ SourceS3Parquet: sourceS3Parquet,
}
}
- var sourceS3FileFormatAvro *shared.SourceS3FileFormatAvro
- if r.Configuration.Format.SourceS3FileFormatAvro != nil {
- filetype2 := new(shared.SourceS3FileFormatAvroFiletype)
- if !r.Configuration.Format.SourceS3FileFormatAvro.Filetype.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatAvro.Filetype.IsNull() {
- *filetype2 = shared.SourceS3FileFormatAvroFiletype(r.Configuration.Format.SourceS3FileFormatAvro.Filetype.ValueString())
- } else {
- filetype2 = nil
- }
- sourceS3FileFormatAvro = &shared.SourceS3FileFormatAvro{
- Filetype: filetype2,
- }
+ var sourceS3Avro *shared.SourceS3Avro
+ if r.Configuration.Format.Avro != nil {
+ sourceS3Avro = &shared.SourceS3Avro{}
}
- if sourceS3FileFormatAvro != nil {
+ if sourceS3Avro != nil {
format = &shared.SourceS3FileFormat{
- SourceS3FileFormatAvro: sourceS3FileFormatAvro,
+ SourceS3Avro: sourceS3Avro,
}
}
- var sourceS3FileFormatJsonl *shared.SourceS3FileFormatJsonl
- if r.Configuration.Format.SourceS3FileFormatJsonl != nil {
+ var sourceS3Jsonl *shared.SourceS3Jsonl
+ if r.Configuration.Format.Jsonl != nil {
blockSize1 := new(int64)
- if !r.Configuration.Format.SourceS3FileFormatJsonl.BlockSize.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatJsonl.BlockSize.IsNull() {
- *blockSize1 = r.Configuration.Format.SourceS3FileFormatJsonl.BlockSize.ValueInt64()
+ if !r.Configuration.Format.Jsonl.BlockSize.IsUnknown() && !r.Configuration.Format.Jsonl.BlockSize.IsNull() {
+ *blockSize1 = r.Configuration.Format.Jsonl.BlockSize.ValueInt64()
} else {
blockSize1 = nil
}
- filetype3 := new(shared.SourceS3FileFormatJsonlFiletype)
- if !r.Configuration.Format.SourceS3FileFormatJsonl.Filetype.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatJsonl.Filetype.IsNull() {
- *filetype3 = shared.SourceS3FileFormatJsonlFiletype(r.Configuration.Format.SourceS3FileFormatJsonl.Filetype.ValueString())
- } else {
- filetype3 = nil
- }
newlinesInValues1 := new(bool)
- if !r.Configuration.Format.SourceS3FileFormatJsonl.NewlinesInValues.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatJsonl.NewlinesInValues.IsNull() {
- *newlinesInValues1 = r.Configuration.Format.SourceS3FileFormatJsonl.NewlinesInValues.ValueBool()
+ if !r.Configuration.Format.Jsonl.NewlinesInValues.IsUnknown() && !r.Configuration.Format.Jsonl.NewlinesInValues.IsNull() {
+ *newlinesInValues1 = r.Configuration.Format.Jsonl.NewlinesInValues.ValueBool()
} else {
newlinesInValues1 = nil
}
- unexpectedFieldBehavior := new(shared.SourceS3FileFormatJsonlUnexpectedFieldBehavior)
- if !r.Configuration.Format.SourceS3FileFormatJsonl.UnexpectedFieldBehavior.IsUnknown() && !r.Configuration.Format.SourceS3FileFormatJsonl.UnexpectedFieldBehavior.IsNull() {
- *unexpectedFieldBehavior = shared.SourceS3FileFormatJsonlUnexpectedFieldBehavior(r.Configuration.Format.SourceS3FileFormatJsonl.UnexpectedFieldBehavior.ValueString())
+ unexpectedFieldBehavior := new(shared.SourceS3UnexpectedFieldBehavior)
+ if !r.Configuration.Format.Jsonl.UnexpectedFieldBehavior.IsUnknown() && !r.Configuration.Format.Jsonl.UnexpectedFieldBehavior.IsNull() {
+ *unexpectedFieldBehavior = shared.SourceS3UnexpectedFieldBehavior(r.Configuration.Format.Jsonl.UnexpectedFieldBehavior.ValueString())
} else {
unexpectedFieldBehavior = nil
}
- sourceS3FileFormatJsonl = &shared.SourceS3FileFormatJsonl{
+ sourceS3Jsonl = &shared.SourceS3Jsonl{
BlockSize: blockSize1,
- Filetype: filetype3,
NewlinesInValues: newlinesInValues1,
UnexpectedFieldBehavior: unexpectedFieldBehavior,
}
}
- if sourceS3FileFormatJsonl != nil {
+ if sourceS3Jsonl != nil {
format = &shared.SourceS3FileFormat{
- SourceS3FileFormatJsonl: sourceS3FileFormatJsonl,
+ SourceS3Jsonl: sourceS3Jsonl,
}
}
}
@@ -274,7 +245,6 @@ func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest
} else {
schema = nil
}
- sourceType := shared.SourceS3S3(r.Configuration.SourceType.ValueString())
startDate1 := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate1, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -289,228 +259,190 @@ func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest
} else {
daysToSyncIfHistoryIsFull = nil
}
- fileType := streamsItem.FileType.ValueString()
- var format1 *shared.SourceS3FileBasedStreamConfigFormat
- if streamsItem.Format != nil {
- var sourceS3FileBasedStreamConfigFormatAvroFormat *shared.SourceS3FileBasedStreamConfigFormatAvroFormat
- if streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat != nil {
- doubleAsString := new(bool)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.DoubleAsString.IsNull() {
- *doubleAsString = streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.DoubleAsString.ValueBool()
- } else {
- doubleAsString = nil
- }
- filetype4 := new(shared.SourceS3FileBasedStreamConfigFormatAvroFormatFiletype)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.Filetype.IsNull() {
- *filetype4 = shared.SourceS3FileBasedStreamConfigFormatAvroFormatFiletype(streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.Filetype.ValueString())
- } else {
- filetype4 = nil
- }
- sourceS3FileBasedStreamConfigFormatAvroFormat = &shared.SourceS3FileBasedStreamConfigFormatAvroFormat{
- DoubleAsString: doubleAsString,
- Filetype: filetype4,
- }
+ var format1 shared.SourceS3Format
+ var sourceS3AvroFormat *shared.SourceS3AvroFormat
+ if streamsItem.Format.AvroFormat != nil {
+ doubleAsString := new(bool)
+ if !streamsItem.Format.AvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.AvroFormat.DoubleAsString.IsNull() {
+ *doubleAsString = streamsItem.Format.AvroFormat.DoubleAsString.ValueBool()
+ } else {
+ doubleAsString = nil
}
- if sourceS3FileBasedStreamConfigFormatAvroFormat != nil {
- format1 = &shared.SourceS3FileBasedStreamConfigFormat{
- SourceS3FileBasedStreamConfigFormatAvroFormat: sourceS3FileBasedStreamConfigFormatAvroFormat,
- }
+ sourceS3AvroFormat = &shared.SourceS3AvroFormat{
+ DoubleAsString: doubleAsString,
}
- var sourceS3FileBasedStreamConfigFormatCSVFormat *shared.SourceS3FileBasedStreamConfigFormatCSVFormat
- if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat != nil {
- delimiter1 := new(string)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Delimiter.IsNull() {
- *delimiter1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Delimiter.ValueString()
- } else {
- delimiter1 = nil
- }
- doubleQuote1 := new(bool)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.DoubleQuote.IsNull() {
- *doubleQuote1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.DoubleQuote.ValueBool()
- } else {
- doubleQuote1 = nil
- }
- encoding1 := new(string)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Encoding.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Encoding.IsNull() {
- *encoding1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Encoding.ValueString()
- } else {
- encoding1 = nil
- }
- escapeChar1 := new(string)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.EscapeChar.IsNull() {
- *escapeChar1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.EscapeChar.ValueString()
- } else {
- escapeChar1 = nil
+ }
+ if sourceS3AvroFormat != nil {
+ format1 = shared.SourceS3Format{
+ SourceS3AvroFormat: sourceS3AvroFormat,
+ }
+ }
+ var sourceS3CSVFormat *shared.SourceS3CSVFormat
+ if streamsItem.Format.CSVFormat != nil {
+ delimiter1 := new(string)
+ if !streamsItem.Format.CSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.CSVFormat.Delimiter.IsNull() {
+ *delimiter1 = streamsItem.Format.CSVFormat.Delimiter.ValueString()
+ } else {
+ delimiter1 = nil
+ }
+ doubleQuote1 := new(bool)
+ if !streamsItem.Format.CSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.CSVFormat.DoubleQuote.IsNull() {
+ *doubleQuote1 = streamsItem.Format.CSVFormat.DoubleQuote.ValueBool()
+ } else {
+ doubleQuote1 = nil
+ }
+ encoding1 := new(string)
+ if !streamsItem.Format.CSVFormat.Encoding.IsUnknown() && !streamsItem.Format.CSVFormat.Encoding.IsNull() {
+ *encoding1 = streamsItem.Format.CSVFormat.Encoding.ValueString()
+ } else {
+ encoding1 = nil
+ }
+ escapeChar1 := new(string)
+ if !streamsItem.Format.CSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.CSVFormat.EscapeChar.IsNull() {
+ *escapeChar1 = streamsItem.Format.CSVFormat.EscapeChar.ValueString()
+ } else {
+ escapeChar1 = nil
+ }
+ var falseValues []string = nil
+ for _, falseValuesItem := range streamsItem.Format.CSVFormat.FalseValues {
+ falseValues = append(falseValues, falseValuesItem.ValueString())
+ }
+ var headerDefinition *shared.SourceS3CSVHeaderDefinition
+ if streamsItem.Format.CSVFormat.HeaderDefinition != nil {
+ var sourceS3FromCSV *shared.SourceS3FromCSV
+ if streamsItem.Format.CSVFormat.HeaderDefinition.FromCSV != nil {
+ sourceS3FromCSV = &shared.SourceS3FromCSV{}
}
- var falseValues []string = nil
- for _, falseValuesItem := range streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.FalseValues {
- falseValues = append(falseValues, falseValuesItem.ValueString())
+ if sourceS3FromCSV != nil {
+ headerDefinition = &shared.SourceS3CSVHeaderDefinition{
+ SourceS3FromCSV: sourceS3FromCSV,
+ }
}
- filetype5 := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatFiletype)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Filetype.IsNull() {
- *filetype5 = shared.SourceS3FileBasedStreamConfigFormatCSVFormatFiletype(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Filetype.ValueString())
- } else {
- filetype5 = nil
+ var sourceS3Autogenerated *shared.SourceS3Autogenerated
+ if streamsItem.Format.CSVFormat.HeaderDefinition.Autogenerated != nil {
+ sourceS3Autogenerated = &shared.SourceS3Autogenerated{}
}
- var headerDefinition *shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition
- if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition != nil {
- var sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
- if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil {
- headerDefinitionType := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.IsNull() {
- *headerDefinitionType = shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.ValueString())
- } else {
- headerDefinitionType = nil
- }
- sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV{
- HeaderDefinitionType: headerDefinitionType,
- }
- }
- if sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil {
- headerDefinition = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV: sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV,
- }
- }
- var sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
- if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil {
- headerDefinitionType1 := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.IsNull() {
- *headerDefinitionType1 = shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.ValueString())
- } else {
- headerDefinitionType1 = nil
- }
- sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated{
- HeaderDefinitionType: headerDefinitionType1,
- }
- }
- if sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil {
- headerDefinition = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated: sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated,
- }
+ if sourceS3Autogenerated != nil {
+ headerDefinition = &shared.SourceS3CSVHeaderDefinition{
+ SourceS3Autogenerated: sourceS3Autogenerated,
}
- var sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
- if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil {
- var columnNames []string = nil
- for _, columnNamesItem := range streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.ColumnNames {
- columnNames = append(columnNames, columnNamesItem.ValueString())
- }
- headerDefinitionType2 := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.IsNull() {
- *headerDefinitionType2 = shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.ValueString())
- } else {
- headerDefinitionType2 = nil
- }
- sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided{
- ColumnNames: columnNames,
- HeaderDefinitionType: headerDefinitionType2,
- }
+ }
+ var sourceS3UserProvided *shared.SourceS3UserProvided
+ if streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided != nil {
+ var columnNames []string = nil
+ for _, columnNamesItem := range streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided.ColumnNames {
+ columnNames = append(columnNames, columnNamesItem.ValueString())
}
- if sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil {
- headerDefinition = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided: sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided,
- }
+ sourceS3UserProvided = &shared.SourceS3UserProvided{
+ ColumnNames: columnNames,
}
}
- inferenceType := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.InferenceType.IsNull() {
- *inferenceType = shared.SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.InferenceType.ValueString())
- } else {
- inferenceType = nil
- }
- var nullValues []string = nil
- for _, nullValuesItem := range streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.NullValues {
- nullValues = append(nullValues, nullValuesItem.ValueString())
- }
- quoteChar1 := new(string)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.QuoteChar.IsNull() {
- *quoteChar1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.QuoteChar.ValueString()
- } else {
- quoteChar1 = nil
- }
- skipRowsAfterHeader := new(int64)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.IsNull() {
- *skipRowsAfterHeader = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.ValueInt64()
- } else {
- skipRowsAfterHeader = nil
- }
- skipRowsBeforeHeader := new(int64)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.IsNull() {
- *skipRowsBeforeHeader = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.ValueInt64()
- } else {
- skipRowsBeforeHeader = nil
- }
- stringsCanBeNull := new(bool)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.IsNull() {
- *stringsCanBeNull = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.ValueBool()
- } else {
- stringsCanBeNull = nil
- }
- var trueValues []string = nil
- for _, trueValuesItem := range streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.TrueValues {
- trueValues = append(trueValues, trueValuesItem.ValueString())
- }
- sourceS3FileBasedStreamConfigFormatCSVFormat = &shared.SourceS3FileBasedStreamConfigFormatCSVFormat{
- Delimiter: delimiter1,
- DoubleQuote: doubleQuote1,
- Encoding: encoding1,
- EscapeChar: escapeChar1,
- FalseValues: falseValues,
- Filetype: filetype5,
- HeaderDefinition: headerDefinition,
- InferenceType: inferenceType,
- NullValues: nullValues,
- QuoteChar: quoteChar1,
- SkipRowsAfterHeader: skipRowsAfterHeader,
- SkipRowsBeforeHeader: skipRowsBeforeHeader,
- StringsCanBeNull: stringsCanBeNull,
- TrueValues: trueValues,
+ if sourceS3UserProvided != nil {
+ headerDefinition = &shared.SourceS3CSVHeaderDefinition{
+ SourceS3UserProvided: sourceS3UserProvided,
+ }
}
}
- if sourceS3FileBasedStreamConfigFormatCSVFormat != nil {
- format1 = &shared.SourceS3FileBasedStreamConfigFormat{
- SourceS3FileBasedStreamConfigFormatCSVFormat: sourceS3FileBasedStreamConfigFormatCSVFormat,
- }
+ inferenceType := new(shared.SourceS3InferenceType)
+ if !streamsItem.Format.CSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.CSVFormat.InferenceType.IsNull() {
+ *inferenceType = shared.SourceS3InferenceType(streamsItem.Format.CSVFormat.InferenceType.ValueString())
+ } else {
+ inferenceType = nil
}
- var sourceS3FileBasedStreamConfigFormatJsonlFormat *shared.SourceS3FileBasedStreamConfigFormatJsonlFormat
- if streamsItem.Format.SourceS3FileBasedStreamConfigFormatJsonlFormat != nil {
- filetype6 := new(shared.SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatJsonlFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatJsonlFormat.Filetype.IsNull() {
- *filetype6 = shared.SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype(streamsItem.Format.SourceS3FileBasedStreamConfigFormatJsonlFormat.Filetype.ValueString())
- } else {
- filetype6 = nil
- }
- sourceS3FileBasedStreamConfigFormatJsonlFormat = &shared.SourceS3FileBasedStreamConfigFormatJsonlFormat{
- Filetype: filetype6,
- }
+ var nullValues []string = nil
+ for _, nullValuesItem := range streamsItem.Format.CSVFormat.NullValues {
+ nullValues = append(nullValues, nullValuesItem.ValueString())
}
- if sourceS3FileBasedStreamConfigFormatJsonlFormat != nil {
- format1 = &shared.SourceS3FileBasedStreamConfigFormat{
- SourceS3FileBasedStreamConfigFormatJsonlFormat: sourceS3FileBasedStreamConfigFormatJsonlFormat,
- }
+ quoteChar1 := new(string)
+ if !streamsItem.Format.CSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.CSVFormat.QuoteChar.IsNull() {
+ *quoteChar1 = streamsItem.Format.CSVFormat.QuoteChar.ValueString()
+ } else {
+ quoteChar1 = nil
}
- var sourceS3FileBasedStreamConfigFormatParquetFormat *shared.SourceS3FileBasedStreamConfigFormatParquetFormat
- if streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat != nil {
- decimalAsFloat := new(bool)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.IsNull() {
- *decimalAsFloat = streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.ValueBool()
- } else {
- decimalAsFloat = nil
- }
- filetype7 := new(shared.SourceS3FileBasedStreamConfigFormatParquetFormatFiletype)
- if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.Filetype.IsNull() {
- *filetype7 = shared.SourceS3FileBasedStreamConfigFormatParquetFormatFiletype(streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.Filetype.ValueString())
- } else {
- filetype7 = nil
- }
- sourceS3FileBasedStreamConfigFormatParquetFormat = &shared.SourceS3FileBasedStreamConfigFormatParquetFormat{
- DecimalAsFloat: decimalAsFloat,
- Filetype: filetype7,
- }
+ skipRowsAfterHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsNull() {
+ *skipRowsAfterHeader = streamsItem.Format.CSVFormat.SkipRowsAfterHeader.ValueInt64()
+ } else {
+ skipRowsAfterHeader = nil
}
- if sourceS3FileBasedStreamConfigFormatParquetFormat != nil {
- format1 = &shared.SourceS3FileBasedStreamConfigFormat{
- SourceS3FileBasedStreamConfigFormatParquetFormat: sourceS3FileBasedStreamConfigFormatParquetFormat,
- }
+ skipRowsBeforeHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsNull() {
+ *skipRowsBeforeHeader = streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.ValueInt64()
+ } else {
+ skipRowsBeforeHeader = nil
+ }
+ stringsCanBeNull := new(bool)
+ if !streamsItem.Format.CSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.CSVFormat.StringsCanBeNull.IsNull() {
+ *stringsCanBeNull = streamsItem.Format.CSVFormat.StringsCanBeNull.ValueBool()
+ } else {
+ stringsCanBeNull = nil
+ }
+ var trueValues []string = nil
+ for _, trueValuesItem := range streamsItem.Format.CSVFormat.TrueValues {
+ trueValues = append(trueValues, trueValuesItem.ValueString())
+ }
+ sourceS3CSVFormat = &shared.SourceS3CSVFormat{
+ Delimiter: delimiter1,
+ DoubleQuote: doubleQuote1,
+ Encoding: encoding1,
+ EscapeChar: escapeChar1,
+ FalseValues: falseValues,
+ HeaderDefinition: headerDefinition,
+ InferenceType: inferenceType,
+ NullValues: nullValues,
+ QuoteChar: quoteChar1,
+ SkipRowsAfterHeader: skipRowsAfterHeader,
+ SkipRowsBeforeHeader: skipRowsBeforeHeader,
+ StringsCanBeNull: stringsCanBeNull,
+ TrueValues: trueValues,
+ }
+ }
+ if sourceS3CSVFormat != nil {
+ format1 = shared.SourceS3Format{
+ SourceS3CSVFormat: sourceS3CSVFormat,
+ }
+ }
+ var sourceS3JsonlFormat *shared.SourceS3JsonlFormat
+ if streamsItem.Format.JsonlFormat != nil {
+ sourceS3JsonlFormat = &shared.SourceS3JsonlFormat{}
+ }
+ if sourceS3JsonlFormat != nil {
+ format1 = shared.SourceS3Format{
+ SourceS3JsonlFormat: sourceS3JsonlFormat,
+ }
+ }
+ var sourceS3ParquetFormat *shared.SourceS3ParquetFormat
+ if streamsItem.Format.ParquetFormat != nil {
+ decimalAsFloat := new(bool)
+ if !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsNull() {
+ *decimalAsFloat = streamsItem.Format.ParquetFormat.DecimalAsFloat.ValueBool()
+ } else {
+ decimalAsFloat = nil
+ }
+ sourceS3ParquetFormat = &shared.SourceS3ParquetFormat{
+ DecimalAsFloat: decimalAsFloat,
+ }
+ }
+ if sourceS3ParquetFormat != nil {
+ format1 = shared.SourceS3Format{
+ SourceS3ParquetFormat: sourceS3ParquetFormat,
+ }
+ }
+ var sourceS3DocumentFileTypeFormatExperimental *shared.SourceS3DocumentFileTypeFormatExperimental
+ if streamsItem.Format.DocumentFileTypeFormatExperimental != nil {
+ skipUnprocessableFileTypes := new(bool)
+ if !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsUnknown() && !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsNull() {
+ *skipUnprocessableFileTypes = streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.ValueBool()
+ } else {
+ skipUnprocessableFileTypes = nil
+ }
+ sourceS3DocumentFileTypeFormatExperimental = &shared.SourceS3DocumentFileTypeFormatExperimental{
+ SkipUnprocessableFileTypes: skipUnprocessableFileTypes,
+ }
+ }
+ if sourceS3DocumentFileTypeFormatExperimental != nil {
+ format1 = shared.SourceS3Format{
+ SourceS3DocumentFileTypeFormatExperimental: sourceS3DocumentFileTypeFormatExperimental,
}
}
var globs []string = nil
@@ -542,15 +474,14 @@ func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest
} else {
schemaless = nil
}
- validationPolicy := new(shared.SourceS3FileBasedStreamConfigValidationPolicy)
+ validationPolicy := new(shared.SourceS3ValidationPolicy)
if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() {
- *validationPolicy = shared.SourceS3FileBasedStreamConfigValidationPolicy(streamsItem.ValidationPolicy.ValueString())
+ *validationPolicy = shared.SourceS3ValidationPolicy(streamsItem.ValidationPolicy.ValueString())
} else {
validationPolicy = nil
}
streams = append(streams, shared.SourceS3FileBasedStreamConfig{
DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull,
- FileType: fileType,
Format: format1,
Globs: globs,
InputSchema: inputSchema,
@@ -571,10 +502,15 @@ func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest
PathPattern: pathPattern,
Provider: provider,
Schema: schema,
- SourceType: sourceType,
StartDate: startDate1,
Streams: streams,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name1 := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -585,6 +521,7 @@ func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceS3CreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name1,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -625,75 +562,69 @@ func (r *SourceS3ResourceModel) ToUpdateSDKType() *shared.SourceS3PutRequest {
}
var format *shared.SourceS3UpdateFileFormat
if r.Configuration.Format != nil {
- var sourceS3UpdateFileFormatCSV *shared.SourceS3UpdateFileFormatCSV
- if r.Configuration.Format.SourceS3UpdateFileFormatCSV != nil {
+ var csv *shared.Csv
+ if r.Configuration.Format.Csv != nil {
additionalReaderOptions := new(string)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.AdditionalReaderOptions.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.AdditionalReaderOptions.IsNull() {
- *additionalReaderOptions = r.Configuration.Format.SourceS3UpdateFileFormatCSV.AdditionalReaderOptions.ValueString()
+ if !r.Configuration.Format.Csv.AdditionalReaderOptions.IsUnknown() && !r.Configuration.Format.Csv.AdditionalReaderOptions.IsNull() {
+ *additionalReaderOptions = r.Configuration.Format.Csv.AdditionalReaderOptions.ValueString()
} else {
additionalReaderOptions = nil
}
advancedOptions := new(string)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.AdvancedOptions.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.AdvancedOptions.IsNull() {
- *advancedOptions = r.Configuration.Format.SourceS3UpdateFileFormatCSV.AdvancedOptions.ValueString()
+ if !r.Configuration.Format.Csv.AdvancedOptions.IsUnknown() && !r.Configuration.Format.Csv.AdvancedOptions.IsNull() {
+ *advancedOptions = r.Configuration.Format.Csv.AdvancedOptions.ValueString()
} else {
advancedOptions = nil
}
blockSize := new(int64)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.BlockSize.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.BlockSize.IsNull() {
- *blockSize = r.Configuration.Format.SourceS3UpdateFileFormatCSV.BlockSize.ValueInt64()
+ if !r.Configuration.Format.Csv.BlockSize.IsUnknown() && !r.Configuration.Format.Csv.BlockSize.IsNull() {
+ *blockSize = r.Configuration.Format.Csv.BlockSize.ValueInt64()
} else {
blockSize = nil
}
delimiter := new(string)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.Delimiter.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.Delimiter.IsNull() {
- *delimiter = r.Configuration.Format.SourceS3UpdateFileFormatCSV.Delimiter.ValueString()
+ if !r.Configuration.Format.Csv.Delimiter.IsUnknown() && !r.Configuration.Format.Csv.Delimiter.IsNull() {
+ *delimiter = r.Configuration.Format.Csv.Delimiter.ValueString()
} else {
delimiter = nil
}
doubleQuote := new(bool)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.DoubleQuote.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.DoubleQuote.IsNull() {
- *doubleQuote = r.Configuration.Format.SourceS3UpdateFileFormatCSV.DoubleQuote.ValueBool()
+ if !r.Configuration.Format.Csv.DoubleQuote.IsUnknown() && !r.Configuration.Format.Csv.DoubleQuote.IsNull() {
+ *doubleQuote = r.Configuration.Format.Csv.DoubleQuote.ValueBool()
} else {
doubleQuote = nil
}
encoding := new(string)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.Encoding.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.Encoding.IsNull() {
- *encoding = r.Configuration.Format.SourceS3UpdateFileFormatCSV.Encoding.ValueString()
+ if !r.Configuration.Format.Csv.Encoding.IsUnknown() && !r.Configuration.Format.Csv.Encoding.IsNull() {
+ *encoding = r.Configuration.Format.Csv.Encoding.ValueString()
} else {
encoding = nil
}
escapeChar := new(string)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.EscapeChar.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.EscapeChar.IsNull() {
- *escapeChar = r.Configuration.Format.SourceS3UpdateFileFormatCSV.EscapeChar.ValueString()
+ if !r.Configuration.Format.Csv.EscapeChar.IsUnknown() && !r.Configuration.Format.Csv.EscapeChar.IsNull() {
+ *escapeChar = r.Configuration.Format.Csv.EscapeChar.ValueString()
} else {
escapeChar = nil
}
- filetype := new(shared.SourceS3UpdateFileFormatCSVFiletype)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.Filetype.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.Filetype.IsNull() {
- *filetype = shared.SourceS3UpdateFileFormatCSVFiletype(r.Configuration.Format.SourceS3UpdateFileFormatCSV.Filetype.ValueString())
- } else {
- filetype = nil
- }
inferDatatypes := new(bool)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.InferDatatypes.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.InferDatatypes.IsNull() {
- *inferDatatypes = r.Configuration.Format.SourceS3UpdateFileFormatCSV.InferDatatypes.ValueBool()
+ if !r.Configuration.Format.Csv.InferDatatypes.IsUnknown() && !r.Configuration.Format.Csv.InferDatatypes.IsNull() {
+ *inferDatatypes = r.Configuration.Format.Csv.InferDatatypes.ValueBool()
} else {
inferDatatypes = nil
}
newlinesInValues := new(bool)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.NewlinesInValues.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.NewlinesInValues.IsNull() {
- *newlinesInValues = r.Configuration.Format.SourceS3UpdateFileFormatCSV.NewlinesInValues.ValueBool()
+ if !r.Configuration.Format.Csv.NewlinesInValues.IsUnknown() && !r.Configuration.Format.Csv.NewlinesInValues.IsNull() {
+ *newlinesInValues = r.Configuration.Format.Csv.NewlinesInValues.ValueBool()
} else {
newlinesInValues = nil
}
quoteChar := new(string)
- if !r.Configuration.Format.SourceS3UpdateFileFormatCSV.QuoteChar.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatCSV.QuoteChar.IsNull() {
- *quoteChar = r.Configuration.Format.SourceS3UpdateFileFormatCSV.QuoteChar.ValueString()
+ if !r.Configuration.Format.Csv.QuoteChar.IsUnknown() && !r.Configuration.Format.Csv.QuoteChar.IsNull() {
+ *quoteChar = r.Configuration.Format.Csv.QuoteChar.ValueString()
} else {
quoteChar = nil
}
- sourceS3UpdateFileFormatCSV = &shared.SourceS3UpdateFileFormatCSV{
+ csv = &shared.Csv{
AdditionalReaderOptions: additionalReaderOptions,
AdvancedOptions: advancedOptions,
BlockSize: blockSize,
@@ -701,106 +632,83 @@ func (r *SourceS3ResourceModel) ToUpdateSDKType() *shared.SourceS3PutRequest {
DoubleQuote: doubleQuote,
Encoding: encoding,
EscapeChar: escapeChar,
- Filetype: filetype,
InferDatatypes: inferDatatypes,
NewlinesInValues: newlinesInValues,
QuoteChar: quoteChar,
}
}
- if sourceS3UpdateFileFormatCSV != nil {
+ if csv != nil {
format = &shared.SourceS3UpdateFileFormat{
- SourceS3UpdateFileFormatCSV: sourceS3UpdateFileFormatCSV,
+ Csv: csv,
}
}
- var sourceS3UpdateFileFormatParquet *shared.SourceS3UpdateFileFormatParquet
- if r.Configuration.Format.SourceS3UpdateFileFormatParquet != nil {
+ var parquet *shared.Parquet
+ if r.Configuration.Format.Parquet != nil {
batchSize := new(int64)
- if !r.Configuration.Format.SourceS3UpdateFileFormatParquet.BatchSize.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatParquet.BatchSize.IsNull() {
- *batchSize = r.Configuration.Format.SourceS3UpdateFileFormatParquet.BatchSize.ValueInt64()
+ if !r.Configuration.Format.Parquet.BatchSize.IsUnknown() && !r.Configuration.Format.Parquet.BatchSize.IsNull() {
+ *batchSize = r.Configuration.Format.Parquet.BatchSize.ValueInt64()
} else {
batchSize = nil
}
bufferSize := new(int64)
- if !r.Configuration.Format.SourceS3UpdateFileFormatParquet.BufferSize.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatParquet.BufferSize.IsNull() {
- *bufferSize = r.Configuration.Format.SourceS3UpdateFileFormatParquet.BufferSize.ValueInt64()
+ if !r.Configuration.Format.Parquet.BufferSize.IsUnknown() && !r.Configuration.Format.Parquet.BufferSize.IsNull() {
+ *bufferSize = r.Configuration.Format.Parquet.BufferSize.ValueInt64()
} else {
bufferSize = nil
}
var columns []string = nil
- for _, columnsItem := range r.Configuration.Format.SourceS3UpdateFileFormatParquet.Columns {
+ for _, columnsItem := range r.Configuration.Format.Parquet.Columns {
columns = append(columns, columnsItem.ValueString())
}
- filetype1 := new(shared.SourceS3UpdateFileFormatParquetFiletype)
- if !r.Configuration.Format.SourceS3UpdateFileFormatParquet.Filetype.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatParquet.Filetype.IsNull() {
- *filetype1 = shared.SourceS3UpdateFileFormatParquetFiletype(r.Configuration.Format.SourceS3UpdateFileFormatParquet.Filetype.ValueString())
- } else {
- filetype1 = nil
- }
- sourceS3UpdateFileFormatParquet = &shared.SourceS3UpdateFileFormatParquet{
+ parquet = &shared.Parquet{
BatchSize: batchSize,
BufferSize: bufferSize,
Columns: columns,
- Filetype: filetype1,
}
}
- if sourceS3UpdateFileFormatParquet != nil {
+ if parquet != nil {
format = &shared.SourceS3UpdateFileFormat{
- SourceS3UpdateFileFormatParquet: sourceS3UpdateFileFormatParquet,
+ Parquet: parquet,
}
}
- var sourceS3UpdateFileFormatAvro *shared.SourceS3UpdateFileFormatAvro
- if r.Configuration.Format.SourceS3UpdateFileFormatAvro != nil {
- filetype2 := new(shared.SourceS3UpdateFileFormatAvroFiletype)
- if !r.Configuration.Format.SourceS3UpdateFileFormatAvro.Filetype.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatAvro.Filetype.IsNull() {
- *filetype2 = shared.SourceS3UpdateFileFormatAvroFiletype(r.Configuration.Format.SourceS3UpdateFileFormatAvro.Filetype.ValueString())
- } else {
- filetype2 = nil
- }
- sourceS3UpdateFileFormatAvro = &shared.SourceS3UpdateFileFormatAvro{
- Filetype: filetype2,
- }
+ var avro *shared.Avro
+ if r.Configuration.Format.Avro != nil {
+ avro = &shared.Avro{}
}
- if sourceS3UpdateFileFormatAvro != nil {
+ if avro != nil {
format = &shared.SourceS3UpdateFileFormat{
- SourceS3UpdateFileFormatAvro: sourceS3UpdateFileFormatAvro,
+ Avro: avro,
}
}
- var sourceS3UpdateFileFormatJsonl *shared.SourceS3UpdateFileFormatJsonl
- if r.Configuration.Format.SourceS3UpdateFileFormatJsonl != nil {
+ var jsonl *shared.Jsonl
+ if r.Configuration.Format.Jsonl != nil {
blockSize1 := new(int64)
- if !r.Configuration.Format.SourceS3UpdateFileFormatJsonl.BlockSize.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatJsonl.BlockSize.IsNull() {
- *blockSize1 = r.Configuration.Format.SourceS3UpdateFileFormatJsonl.BlockSize.ValueInt64()
+ if !r.Configuration.Format.Jsonl.BlockSize.IsUnknown() && !r.Configuration.Format.Jsonl.BlockSize.IsNull() {
+ *blockSize1 = r.Configuration.Format.Jsonl.BlockSize.ValueInt64()
} else {
blockSize1 = nil
}
- filetype3 := new(shared.SourceS3UpdateFileFormatJsonlFiletype)
- if !r.Configuration.Format.SourceS3UpdateFileFormatJsonl.Filetype.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatJsonl.Filetype.IsNull() {
- *filetype3 = shared.SourceS3UpdateFileFormatJsonlFiletype(r.Configuration.Format.SourceS3UpdateFileFormatJsonl.Filetype.ValueString())
- } else {
- filetype3 = nil
- }
newlinesInValues1 := new(bool)
- if !r.Configuration.Format.SourceS3UpdateFileFormatJsonl.NewlinesInValues.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatJsonl.NewlinesInValues.IsNull() {
- *newlinesInValues1 = r.Configuration.Format.SourceS3UpdateFileFormatJsonl.NewlinesInValues.ValueBool()
+ if !r.Configuration.Format.Jsonl.NewlinesInValues.IsUnknown() && !r.Configuration.Format.Jsonl.NewlinesInValues.IsNull() {
+ *newlinesInValues1 = r.Configuration.Format.Jsonl.NewlinesInValues.ValueBool()
} else {
newlinesInValues1 = nil
}
- unexpectedFieldBehavior := new(shared.SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior)
- if !r.Configuration.Format.SourceS3UpdateFileFormatJsonl.UnexpectedFieldBehavior.IsUnknown() && !r.Configuration.Format.SourceS3UpdateFileFormatJsonl.UnexpectedFieldBehavior.IsNull() {
- *unexpectedFieldBehavior = shared.SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior(r.Configuration.Format.SourceS3UpdateFileFormatJsonl.UnexpectedFieldBehavior.ValueString())
+ unexpectedFieldBehavior := new(shared.UnexpectedFieldBehavior)
+ if !r.Configuration.Format.Jsonl.UnexpectedFieldBehavior.IsUnknown() && !r.Configuration.Format.Jsonl.UnexpectedFieldBehavior.IsNull() {
+ *unexpectedFieldBehavior = shared.UnexpectedFieldBehavior(r.Configuration.Format.Jsonl.UnexpectedFieldBehavior.ValueString())
} else {
unexpectedFieldBehavior = nil
}
- sourceS3UpdateFileFormatJsonl = &shared.SourceS3UpdateFileFormatJsonl{
+ jsonl = &shared.Jsonl{
BlockSize: blockSize1,
- Filetype: filetype3,
NewlinesInValues: newlinesInValues1,
UnexpectedFieldBehavior: unexpectedFieldBehavior,
}
}
- if sourceS3UpdateFileFormatJsonl != nil {
+ if jsonl != nil {
format = &shared.SourceS3UpdateFileFormat{
- SourceS3UpdateFileFormatJsonl: sourceS3UpdateFileFormatJsonl,
+ Jsonl: jsonl,
}
}
}
@@ -810,7 +718,7 @@ func (r *SourceS3ResourceModel) ToUpdateSDKType() *shared.SourceS3PutRequest {
} else {
pathPattern = nil
}
- var provider *shared.SourceS3UpdateS3AmazonWebServices
+ var provider *shared.S3AmazonWebServices
if r.Configuration.Provider != nil {
awsAccessKeyId1 := new(string)
if !r.Configuration.Provider.AwsAccessKeyID.IsUnknown() && !r.Configuration.Provider.AwsAccessKeyID.IsNull() {
@@ -848,7 +756,7 @@ func (r *SourceS3ResourceModel) ToUpdateSDKType() *shared.SourceS3PutRequest {
} else {
startDate = nil
}
- provider = &shared.SourceS3UpdateS3AmazonWebServices{
+ provider = &shared.S3AmazonWebServices{
AwsAccessKeyID: awsAccessKeyId1,
AwsSecretAccessKey: awsSecretAccessKey1,
Bucket: bucket1,
@@ -877,228 +785,190 @@ func (r *SourceS3ResourceModel) ToUpdateSDKType() *shared.SourceS3PutRequest {
} else {
daysToSyncIfHistoryIsFull = nil
}
- fileType := streamsItem.FileType.ValueString()
- var format1 *shared.SourceS3UpdateFileBasedStreamConfigFormat
- if streamsItem.Format != nil {
- var sourceS3UpdateFileBasedStreamConfigFormatAvroFormat *shared.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat
- if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat != nil {
- doubleAsString := new(bool)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.DoubleAsString.IsNull() {
- *doubleAsString = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.DoubleAsString.ValueBool()
- } else {
- doubleAsString = nil
- }
- filetype4 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.Filetype.IsNull() {
- *filetype4 = shared.SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.Filetype.ValueString())
- } else {
- filetype4 = nil
- }
- sourceS3UpdateFileBasedStreamConfigFormatAvroFormat = &shared.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat{
- DoubleAsString: doubleAsString,
- Filetype: filetype4,
- }
+ var format1 shared.SourceS3UpdateFormat
+ var sourceS3UpdateAvroFormat *shared.SourceS3UpdateAvroFormat
+ if streamsItem.Format.AvroFormat != nil {
+ doubleAsString := new(bool)
+ if !streamsItem.Format.AvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.AvroFormat.DoubleAsString.IsNull() {
+ *doubleAsString = streamsItem.Format.AvroFormat.DoubleAsString.ValueBool()
+ } else {
+ doubleAsString = nil
}
- if sourceS3UpdateFileBasedStreamConfigFormatAvroFormat != nil {
- format1 = &shared.SourceS3UpdateFileBasedStreamConfigFormat{
- SourceS3UpdateFileBasedStreamConfigFormatAvroFormat: sourceS3UpdateFileBasedStreamConfigFormatAvroFormat,
- }
+ sourceS3UpdateAvroFormat = &shared.SourceS3UpdateAvroFormat{
+ DoubleAsString: doubleAsString,
}
- var sourceS3UpdateFileBasedStreamConfigFormatCSVFormat *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat
- if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat != nil {
- delimiter1 := new(string)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Delimiter.IsNull() {
- *delimiter1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Delimiter.ValueString()
- } else {
- delimiter1 = nil
- }
- doubleQuote1 := new(bool)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.DoubleQuote.IsNull() {
- *doubleQuote1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.DoubleQuote.ValueBool()
- } else {
- doubleQuote1 = nil
- }
- encoding1 := new(string)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Encoding.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Encoding.IsNull() {
- *encoding1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Encoding.ValueString()
- } else {
- encoding1 = nil
- }
- escapeChar1 := new(string)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.EscapeChar.IsNull() {
- *escapeChar1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.EscapeChar.ValueString()
- } else {
- escapeChar1 = nil
+ }
+ if sourceS3UpdateAvroFormat != nil {
+ format1 = shared.SourceS3UpdateFormat{
+ SourceS3UpdateAvroFormat: sourceS3UpdateAvroFormat,
+ }
+ }
+ var sourceS3UpdateCSVFormat *shared.SourceS3UpdateCSVFormat
+ if streamsItem.Format.CSVFormat != nil {
+ delimiter1 := new(string)
+ if !streamsItem.Format.CSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.CSVFormat.Delimiter.IsNull() {
+ *delimiter1 = streamsItem.Format.CSVFormat.Delimiter.ValueString()
+ } else {
+ delimiter1 = nil
+ }
+ doubleQuote1 := new(bool)
+ if !streamsItem.Format.CSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.CSVFormat.DoubleQuote.IsNull() {
+ *doubleQuote1 = streamsItem.Format.CSVFormat.DoubleQuote.ValueBool()
+ } else {
+ doubleQuote1 = nil
+ }
+ encoding1 := new(string)
+ if !streamsItem.Format.CSVFormat.Encoding.IsUnknown() && !streamsItem.Format.CSVFormat.Encoding.IsNull() {
+ *encoding1 = streamsItem.Format.CSVFormat.Encoding.ValueString()
+ } else {
+ encoding1 = nil
+ }
+ escapeChar1 := new(string)
+ if !streamsItem.Format.CSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.CSVFormat.EscapeChar.IsNull() {
+ *escapeChar1 = streamsItem.Format.CSVFormat.EscapeChar.ValueString()
+ } else {
+ escapeChar1 = nil
+ }
+ var falseValues []string = nil
+ for _, falseValuesItem := range streamsItem.Format.CSVFormat.FalseValues {
+ falseValues = append(falseValues, falseValuesItem.ValueString())
+ }
+ var headerDefinition *shared.SourceS3UpdateCSVHeaderDefinition
+ if streamsItem.Format.CSVFormat.HeaderDefinition != nil {
+ var sourceS3UpdateFromCSV *shared.SourceS3UpdateFromCSV
+ if streamsItem.Format.CSVFormat.HeaderDefinition.FromCSV != nil {
+ sourceS3UpdateFromCSV = &shared.SourceS3UpdateFromCSV{}
}
- var falseValues []string = nil
- for _, falseValuesItem := range streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.FalseValues {
- falseValues = append(falseValues, falseValuesItem.ValueString())
+ if sourceS3UpdateFromCSV != nil {
+ headerDefinition = &shared.SourceS3UpdateCSVHeaderDefinition{
+ SourceS3UpdateFromCSV: sourceS3UpdateFromCSV,
+ }
}
- filetype5 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Filetype.IsNull() {
- *filetype5 = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Filetype.ValueString())
- } else {
- filetype5 = nil
+ var sourceS3UpdateAutogenerated *shared.SourceS3UpdateAutogenerated
+ if streamsItem.Format.CSVFormat.HeaderDefinition.Autogenerated != nil {
+ sourceS3UpdateAutogenerated = &shared.SourceS3UpdateAutogenerated{}
}
- var headerDefinition *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition
- if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition != nil {
- var sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
- if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil {
- headerDefinitionType := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.IsNull() {
- *headerDefinitionType = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.ValueString())
- } else {
- headerDefinitionType = nil
- }
- sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV{
- HeaderDefinitionType: headerDefinitionType,
- }
- }
- if sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil {
- headerDefinition = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV: sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV,
- }
- }
- var sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
- if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil {
- headerDefinitionType1 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.IsNull() {
- *headerDefinitionType1 = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.ValueString())
- } else {
- headerDefinitionType1 = nil
- }
- sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated{
- HeaderDefinitionType: headerDefinitionType1,
- }
+ if sourceS3UpdateAutogenerated != nil {
+ headerDefinition = &shared.SourceS3UpdateCSVHeaderDefinition{
+ SourceS3UpdateAutogenerated: sourceS3UpdateAutogenerated,
}
- if sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil {
- headerDefinition = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated: sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated,
- }
- }
- var sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
- if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil {
- var columnNames []string = nil
- for _, columnNamesItem := range streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.ColumnNames {
- columnNames = append(columnNames, columnNamesItem.ValueString())
- }
- headerDefinitionType2 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.IsNull() {
- *headerDefinitionType2 = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.ValueString())
- } else {
- headerDefinitionType2 = nil
- }
- sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided{
- ColumnNames: columnNames,
- HeaderDefinitionType: headerDefinitionType2,
- }
+ }
+ var sourceS3UpdateUserProvided *shared.SourceS3UpdateUserProvided
+ if streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided != nil {
+ var columnNames []string = nil
+ for _, columnNamesItem := range streamsItem.Format.CSVFormat.HeaderDefinition.UserProvided.ColumnNames {
+ columnNames = append(columnNames, columnNamesItem.ValueString())
}
- if sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil {
- headerDefinition = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided: sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided,
- }
+ sourceS3UpdateUserProvided = &shared.SourceS3UpdateUserProvided{
+ ColumnNames: columnNames,
}
}
- inferenceType := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.InferenceType.IsNull() {
- *inferenceType = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.InferenceType.ValueString())
- } else {
- inferenceType = nil
- }
- var nullValues []string = nil
- for _, nullValuesItem := range streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.NullValues {
- nullValues = append(nullValues, nullValuesItem.ValueString())
- }
- quoteChar1 := new(string)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.QuoteChar.IsNull() {
- *quoteChar1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.QuoteChar.ValueString()
- } else {
- quoteChar1 = nil
- }
- skipRowsAfterHeader := new(int64)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.IsNull() {
- *skipRowsAfterHeader = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.ValueInt64()
- } else {
- skipRowsAfterHeader = nil
- }
- skipRowsBeforeHeader := new(int64)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.IsNull() {
- *skipRowsBeforeHeader = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.ValueInt64()
- } else {
- skipRowsBeforeHeader = nil
- }
- stringsCanBeNull := new(bool)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.IsNull() {
- *stringsCanBeNull = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.ValueBool()
- } else {
- stringsCanBeNull = nil
- }
- var trueValues []string = nil
- for _, trueValuesItem := range streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.TrueValues {
- trueValues = append(trueValues, trueValuesItem.ValueString())
- }
- sourceS3UpdateFileBasedStreamConfigFormatCSVFormat = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat{
- Delimiter: delimiter1,
- DoubleQuote: doubleQuote1,
- Encoding: encoding1,
- EscapeChar: escapeChar1,
- FalseValues: falseValues,
- Filetype: filetype5,
- HeaderDefinition: headerDefinition,
- InferenceType: inferenceType,
- NullValues: nullValues,
- QuoteChar: quoteChar1,
- SkipRowsAfterHeader: skipRowsAfterHeader,
- SkipRowsBeforeHeader: skipRowsBeforeHeader,
- StringsCanBeNull: stringsCanBeNull,
- TrueValues: trueValues,
+ if sourceS3UpdateUserProvided != nil {
+ headerDefinition = &shared.SourceS3UpdateCSVHeaderDefinition{
+ SourceS3UpdateUserProvided: sourceS3UpdateUserProvided,
+ }
}
}
- if sourceS3UpdateFileBasedStreamConfigFormatCSVFormat != nil {
- format1 = &shared.SourceS3UpdateFileBasedStreamConfigFormat{
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormat: sourceS3UpdateFileBasedStreamConfigFormatCSVFormat,
- }
+ inferenceType := new(shared.SourceS3UpdateInferenceType)
+ if !streamsItem.Format.CSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.CSVFormat.InferenceType.IsNull() {
+ *inferenceType = shared.SourceS3UpdateInferenceType(streamsItem.Format.CSVFormat.InferenceType.ValueString())
+ } else {
+ inferenceType = nil
}
- var sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat *shared.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat
- if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat != nil {
- filetype6 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat.Filetype.IsNull() {
- *filetype6 = shared.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat.Filetype.ValueString())
- } else {
- filetype6 = nil
- }
- sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat = &shared.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat{
- Filetype: filetype6,
- }
+ var nullValues []string = nil
+ for _, nullValuesItem := range streamsItem.Format.CSVFormat.NullValues {
+ nullValues = append(nullValues, nullValuesItem.ValueString())
}
- if sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat != nil {
- format1 = &shared.SourceS3UpdateFileBasedStreamConfigFormat{
- SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat: sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat,
- }
+ quoteChar1 := new(string)
+ if !streamsItem.Format.CSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.CSVFormat.QuoteChar.IsNull() {
+ *quoteChar1 = streamsItem.Format.CSVFormat.QuoteChar.ValueString()
+ } else {
+ quoteChar1 = nil
}
- var sourceS3UpdateFileBasedStreamConfigFormatParquetFormat *shared.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat
- if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat != nil {
- decimalAsFloat := new(bool)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.IsNull() {
- *decimalAsFloat = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.ValueBool()
- } else {
- decimalAsFloat = nil
- }
- filetype7 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype)
- if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.Filetype.IsNull() {
- *filetype7 = shared.SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.Filetype.ValueString())
- } else {
- filetype7 = nil
- }
- sourceS3UpdateFileBasedStreamConfigFormatParquetFormat = &shared.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat{
- DecimalAsFloat: decimalAsFloat,
- Filetype: filetype7,
- }
+ skipRowsAfterHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsAfterHeader.IsNull() {
+ *skipRowsAfterHeader = streamsItem.Format.CSVFormat.SkipRowsAfterHeader.ValueInt64()
+ } else {
+ skipRowsAfterHeader = nil
}
- if sourceS3UpdateFileBasedStreamConfigFormatParquetFormat != nil {
- format1 = &shared.SourceS3UpdateFileBasedStreamConfigFormat{
- SourceS3UpdateFileBasedStreamConfigFormatParquetFormat: sourceS3UpdateFileBasedStreamConfigFormatParquetFormat,
- }
+ skipRowsBeforeHeader := new(int64)
+ if !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.IsNull() {
+ *skipRowsBeforeHeader = streamsItem.Format.CSVFormat.SkipRowsBeforeHeader.ValueInt64()
+ } else {
+ skipRowsBeforeHeader = nil
+ }
+ stringsCanBeNull := new(bool)
+ if !streamsItem.Format.CSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.CSVFormat.StringsCanBeNull.IsNull() {
+ *stringsCanBeNull = streamsItem.Format.CSVFormat.StringsCanBeNull.ValueBool()
+ } else {
+ stringsCanBeNull = nil
+ }
+ var trueValues []string = nil
+ for _, trueValuesItem := range streamsItem.Format.CSVFormat.TrueValues {
+ trueValues = append(trueValues, trueValuesItem.ValueString())
+ }
+ sourceS3UpdateCSVFormat = &shared.SourceS3UpdateCSVFormat{
+ Delimiter: delimiter1,
+ DoubleQuote: doubleQuote1,
+ Encoding: encoding1,
+ EscapeChar: escapeChar1,
+ FalseValues: falseValues,
+ HeaderDefinition: headerDefinition,
+ InferenceType: inferenceType,
+ NullValues: nullValues,
+ QuoteChar: quoteChar1,
+ SkipRowsAfterHeader: skipRowsAfterHeader,
+ SkipRowsBeforeHeader: skipRowsBeforeHeader,
+ StringsCanBeNull: stringsCanBeNull,
+ TrueValues: trueValues,
+ }
+ }
+ if sourceS3UpdateCSVFormat != nil {
+ format1 = shared.SourceS3UpdateFormat{
+ SourceS3UpdateCSVFormat: sourceS3UpdateCSVFormat,
+ }
+ }
+ var sourceS3UpdateJsonlFormat *shared.SourceS3UpdateJsonlFormat
+ if streamsItem.Format.JsonlFormat != nil {
+ sourceS3UpdateJsonlFormat = &shared.SourceS3UpdateJsonlFormat{}
+ }
+ if sourceS3UpdateJsonlFormat != nil {
+ format1 = shared.SourceS3UpdateFormat{
+ SourceS3UpdateJsonlFormat: sourceS3UpdateJsonlFormat,
+ }
+ }
+ var sourceS3UpdateParquetFormat *shared.SourceS3UpdateParquetFormat
+ if streamsItem.Format.ParquetFormat != nil {
+ decimalAsFloat := new(bool)
+ if !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.ParquetFormat.DecimalAsFloat.IsNull() {
+ *decimalAsFloat = streamsItem.Format.ParquetFormat.DecimalAsFloat.ValueBool()
+ } else {
+ decimalAsFloat = nil
+ }
+ sourceS3UpdateParquetFormat = &shared.SourceS3UpdateParquetFormat{
+ DecimalAsFloat: decimalAsFloat,
+ }
+ }
+ if sourceS3UpdateParquetFormat != nil {
+ format1 = shared.SourceS3UpdateFormat{
+ SourceS3UpdateParquetFormat: sourceS3UpdateParquetFormat,
+ }
+ }
+ var sourceS3UpdateDocumentFileTypeFormatExperimental *shared.SourceS3UpdateDocumentFileTypeFormatExperimental
+ if streamsItem.Format.DocumentFileTypeFormatExperimental != nil {
+ skipUnprocessableFileTypes := new(bool)
+ if !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsUnknown() && !streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.IsNull() {
+ *skipUnprocessableFileTypes = streamsItem.Format.DocumentFileTypeFormatExperimental.SkipUnprocessableFileTypes.ValueBool()
+ } else {
+ skipUnprocessableFileTypes = nil
+ }
+ sourceS3UpdateDocumentFileTypeFormatExperimental = &shared.SourceS3UpdateDocumentFileTypeFormatExperimental{
+ SkipUnprocessableFileTypes: skipUnprocessableFileTypes,
+ }
+ }
+ if sourceS3UpdateDocumentFileTypeFormatExperimental != nil {
+ format1 = shared.SourceS3UpdateFormat{
+ SourceS3UpdateDocumentFileTypeFormatExperimental: sourceS3UpdateDocumentFileTypeFormatExperimental,
}
}
var globs []string = nil
@@ -1130,15 +1000,14 @@ func (r *SourceS3ResourceModel) ToUpdateSDKType() *shared.SourceS3PutRequest {
} else {
schemaless = nil
}
- validationPolicy := new(shared.SourceS3UpdateFileBasedStreamConfigValidationPolicy)
+ validationPolicy := new(shared.SourceS3UpdateValidationPolicy)
if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() {
- *validationPolicy = shared.SourceS3UpdateFileBasedStreamConfigValidationPolicy(streamsItem.ValidationPolicy.ValueString())
+ *validationPolicy = shared.SourceS3UpdateValidationPolicy(streamsItem.ValidationPolicy.ValueString())
} else {
validationPolicy = nil
}
streams = append(streams, shared.SourceS3UpdateFileBasedStreamConfig{
DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull,
- FileType: fileType,
Format: format1,
Globs: globs,
InputSchema: inputSchema,
diff --git a/internal/provider/source_salesforce_data_source.go b/internal/provider/source_salesforce_data_source.go
old mode 100755
new mode 100644
index b91b44a0a..dc7c331d1
--- a/internal/provider/source_salesforce_data_source.go
+++ b/internal/provider/source_salesforce_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceSalesforceDataSource struct {
// SourceSalesforceDataSourceModel describes the data model.
type SourceSalesforceDataSourceModel struct {
- Configuration SourceSalesforce `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,93 +47,20 @@ func (r *SourceSalesforceDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceSalesforce DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Salesforce developer application's Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Salesforce developer application's Client secret`,
- },
- "force_use_bulk_api": schema.BoolAttribute{
- Computed: true,
- Description: `Toggle to use Bulk API (this might cause empty fields for some streams)`,
- },
- "is_sandbox": schema.BoolAttribute{
- Computed: true,
- Description: `Toggle if you're using a Salesforce Sandbox`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "salesforce",
- ),
- },
- Description: `must be one of ["salesforce"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ format. Airbyte will replicate the data updated on and after this date. If this field is blank, Airbyte will replicate the data for last two years.`,
- },
- "streams_criteria": schema.ListNestedAttribute{
- Computed: true,
- NestedObject: schema.NestedAttributeObject{
- Attributes: map[string]schema.Attribute{
- "criteria": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "starts with",
- "ends with",
- "contains",
- "exacts",
- "starts not with",
- "ends not with",
- "not contains",
- "not exacts",
- ),
- },
- Description: `must be one of ["starts with", "ends with", "contains", "exacts", "starts not with", "ends not with", "not contains", "not exacts"]`,
- },
- "value": schema.StringAttribute{
- Computed: true,
- },
- },
- },
- Description: `Add filters to select only required stream based on ` + "`" + `SObject` + "`" + ` name. Use this field to filter which tables are displayed by this connector. This is useful if your Salesforce account has a large number of tables (>1000), in which case you may find it easier to navigate the UI and speed up the connector's performance if you restrict the tables displayed by this connector.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_salesforce_data_source_sdk.go b/internal/provider/source_salesforce_data_source_sdk.go
old mode 100755
new mode 100644
index 8ff4ec204..d56cb5477
--- a/internal/provider/source_salesforce_data_source_sdk.go
+++ b/internal/provider/source_salesforce_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSalesforceDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_salesforce_resource.go b/internal/provider/source_salesforce_resource.go
old mode 100755
new mode 100644
index 75757d0ef..aaa685cdc
--- a/internal/provider/source_salesforce_resource.go
+++ b/internal/provider/source_salesforce_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceSalesforceResource struct {
// SourceSalesforceResourceModel describes the resource data model.
type SourceSalesforceResourceModel struct {
Configuration SourceSalesforce `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -55,15 +57,6 @@ func (r *SourceSalesforceResource) Schema(ctx context.Context, req resource.Sche
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `Enter your Salesforce developer application's Client ID`,
@@ -73,39 +66,34 @@ func (r *SourceSalesforceResource) Schema(ctx context.Context, req resource.Sche
Description: `Enter your Salesforce developer application's Client secret`,
},
"force_use_bulk_api": schema.BoolAttribute{
- Optional: true,
- Description: `Toggle to use Bulk API (this might cause empty fields for some streams)`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Toggle to use Bulk API (this might cause empty fields for some streams)`,
},
"is_sandbox": schema.BoolAttribute{
- Optional: true,
- Description: `Toggle if you're using a Salesforce Sandbox`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Toggle if you're using a Salesforce Sandbox`,
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "salesforce",
- ),
- },
- Description: `must be one of ["salesforce"]`,
- },
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ format. Airbyte will replicate the data updated on and after this date. If this field is blank, Airbyte will replicate the data for last two years.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ format. Airbyte will replicate the data updated on and after this date. If this field is blank, Airbyte will replicate the data for last two years.`,
},
"streams_criteria": schema.ListNestedAttribute{
Optional: true,
NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
"criteria": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ Description: `must be one of ["starts with", "ends with", "contains", "exacts", "starts not with", "ends not with", "not contains", "not exacts"]; Default: "contains"`,
Validators: []validator.String{
stringvalidator.OneOf(
"starts with",
@@ -118,7 +106,6 @@ func (r *SourceSalesforceResource) Schema(ctx context.Context, req resource.Sche
"not exacts",
),
},
- Description: `must be one of ["starts with", "ends with", "contains", "exacts", "starts not with", "ends not with", "not contains", "not exacts"]`,
},
"value": schema.StringAttribute{
Required: true,
@@ -129,13 +116,24 @@ func (r *SourceSalesforceResource) Schema(ctx context.Context, req resource.Sche
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -199,7 +197,7 @@ func (r *SourceSalesforceResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSalesforce(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -375,5 +373,5 @@ func (r *SourceSalesforceResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceSalesforceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_salesforce_resource_sdk.go b/internal/provider/source_salesforce_resource_sdk.go
old mode 100755
new mode 100644
index 7887ad669..8322ce424
--- a/internal/provider/source_salesforce_resource_sdk.go
+++ b/internal/provider/source_salesforce_resource_sdk.go
@@ -3,18 +3,12 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceSalesforceResourceModel) ToCreateSDKType() *shared.SourceSalesforceCreateRequest {
- authType := new(shared.SourceSalesforceAuthType)
- if !r.Configuration.AuthType.IsUnknown() && !r.Configuration.AuthType.IsNull() {
- *authType = shared.SourceSalesforceAuthType(r.Configuration.AuthType.ValueString())
- } else {
- authType = nil
- }
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
forceUseBulkAPI := new(bool)
@@ -30,7 +24,6 @@ func (r *SourceSalesforceResourceModel) ToCreateSDKType() *shared.SourceSalesfor
isSandbox = nil
}
refreshToken := r.Configuration.RefreshToken.ValueString()
- sourceType := shared.SourceSalesforceSalesforce(r.Configuration.SourceType.ValueString())
startDate := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -39,7 +32,12 @@ func (r *SourceSalesforceResourceModel) ToCreateSDKType() *shared.SourceSalesfor
}
var streamsCriteria []shared.SourceSalesforceStreamsCriteria = nil
for _, streamsCriteriaItem := range r.Configuration.StreamsCriteria {
- criteria := shared.SourceSalesforceStreamsCriteriaSearchCriteria(streamsCriteriaItem.Criteria.ValueString())
+ criteria := new(shared.SourceSalesforceSearchCriteria)
+ if !streamsCriteriaItem.Criteria.IsUnknown() && !streamsCriteriaItem.Criteria.IsNull() {
+ *criteria = shared.SourceSalesforceSearchCriteria(streamsCriteriaItem.Criteria.ValueString())
+ } else {
+ criteria = nil
+ }
value := streamsCriteriaItem.Value.ValueString()
streamsCriteria = append(streamsCriteria, shared.SourceSalesforceStreamsCriteria{
Criteria: criteria,
@@ -47,16 +45,20 @@ func (r *SourceSalesforceResourceModel) ToCreateSDKType() *shared.SourceSalesfor
})
}
configuration := shared.SourceSalesforce{
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
ForceUseBulkAPI: forceUseBulkAPI,
IsSandbox: isSandbox,
RefreshToken: refreshToken,
- SourceType: sourceType,
StartDate: startDate,
StreamsCriteria: streamsCriteria,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -67,6 +69,7 @@ func (r *SourceSalesforceResourceModel) ToCreateSDKType() *shared.SourceSalesfor
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSalesforceCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -80,12 +83,6 @@ func (r *SourceSalesforceResourceModel) ToGetSDKType() *shared.SourceSalesforceC
}
func (r *SourceSalesforceResourceModel) ToUpdateSDKType() *shared.SourceSalesforcePutRequest {
- authType := new(shared.SourceSalesforceUpdateAuthType)
- if !r.Configuration.AuthType.IsUnknown() && !r.Configuration.AuthType.IsNull() {
- *authType = shared.SourceSalesforceUpdateAuthType(r.Configuration.AuthType.ValueString())
- } else {
- authType = nil
- }
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
forceUseBulkAPI := new(bool)
@@ -107,17 +104,21 @@ func (r *SourceSalesforceResourceModel) ToUpdateSDKType() *shared.SourceSalesfor
} else {
startDate = nil
}
- var streamsCriteria []shared.SourceSalesforceUpdateStreamsCriteria = nil
+ var streamsCriteria []shared.StreamsCriteria = nil
for _, streamsCriteriaItem := range r.Configuration.StreamsCriteria {
- criteria := shared.SourceSalesforceUpdateStreamsCriteriaSearchCriteria(streamsCriteriaItem.Criteria.ValueString())
+ criteria := new(shared.SearchCriteria)
+ if !streamsCriteriaItem.Criteria.IsUnknown() && !streamsCriteriaItem.Criteria.IsNull() {
+ *criteria = shared.SearchCriteria(streamsCriteriaItem.Criteria.ValueString())
+ } else {
+ criteria = nil
+ }
value := streamsCriteriaItem.Value.ValueString()
- streamsCriteria = append(streamsCriteria, shared.SourceSalesforceUpdateStreamsCriteria{
+ streamsCriteria = append(streamsCriteria, shared.StreamsCriteria{
Criteria: criteria,
Value: value,
})
}
configuration := shared.SourceSalesforceUpdate{
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
ForceUseBulkAPI: forceUseBulkAPI,
diff --git a/internal/provider/source_salesloft_data_source.go b/internal/provider/source_salesloft_data_source.go
old mode 100755
new mode 100644
index 71753bf5f..a21c78c3a
--- a/internal/provider/source_salesloft_data_source.go
+++ b/internal/provider/source_salesloft_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceSalesloftDataSource struct {
// SourceSalesloftDataSourceModel describes the data model.
type SourceSalesloftDataSourceModel struct {
- Configuration SourceSalesloft `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,155 +47,20 @@ func (r *SourceSalesloftDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceSalesloft DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_salesloft_credentials_authenticate_via_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key for making authenticated requests. More instruction on how to find this value in our docs`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_key",
- ),
- },
- Description: `must be one of ["api_key"]`,
- },
- },
- },
- "source_salesloft_credentials_authenticate_via_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Salesloft developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Salesloft developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining a new access token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_salesloft_update_credentials_authenticate_via_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key for making authenticated requests. More instruction on how to find this value in our docs`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_key",
- ),
- },
- Description: `must be one of ["api_key"]`,
- },
- },
- },
- "source_salesloft_update_credentials_authenticate_via_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Salesloft developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Salesloft developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The token for obtaining a new access token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "salesloft",
- ),
- },
- Description: `must be one of ["salesloft"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for Salesloft API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_salesloft_data_source_sdk.go b/internal/provider/source_salesloft_data_source_sdk.go
old mode 100755
new mode 100644
index 3c2f83698..847c38fe2
--- a/internal/provider/source_salesloft_data_source_sdk.go
+++ b/internal/provider/source_salesloft_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSalesloftDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_salesloft_resource.go b/internal/provider/source_salesloft_resource.go
old mode 100755
new mode 100644
index 06a937da1..4e2b5370a
--- a/internal/provider/source_salesloft_resource.go
+++ b/internal/provider/source_salesloft_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceSalesloftResource struct {
// SourceSalesloftResourceModel describes the resource data model.
type SourceSalesloftResourceModel struct {
Configuration SourceSalesloft `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,40 +59,24 @@ func (r *SourceSalesloftResource) Schema(ctx context.Context, req resource.Schem
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_salesloft_credentials_authenticate_via_api_key": schema.SingleNestedAttribute{
+ "authenticate_via_api_key": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key for making authenticated requests. More instruction on how to find this value in our docs`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_key",
- ),
- },
- Description: `must be one of ["api_key"]`,
- },
},
},
- "source_salesloft_credentials_authenticate_via_o_auth": schema.SingleNestedAttribute{
+ "authenticate_via_o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your Salesloft developer application.`,
@@ -102,69 +87,16 @@ func (r *SourceSalesloftResource) Schema(ctx context.Context, req resource.Schem
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The token for obtaining a new access token.`,
},
"token_expiry_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_salesloft_update_credentials_authenticate_via_api_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
Required: true,
- Description: `API Key for making authenticated requests. More instruction on how to find this value in our docs`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_key",
- ),
- },
- Description: `must be one of ["api_key"]`,
- },
- },
- },
- "source_salesloft_update_credentials_authenticate_via_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your Salesloft developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your Salesloft developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The token for obtaining a new access token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Required: true,
+ Sensitive: true,
+ Description: `The date-time when the access token should be refreshed.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date-time when the access token should be refreshed.`,
},
},
},
@@ -173,31 +105,33 @@ func (r *SourceSalesloftResource) Schema(ctx context.Context, req resource.Schem
validators.ExactlyOneChild(),
},
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "salesloft",
- ),
- },
- Description: `must be one of ["salesloft"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date from which you'd like to replicate data for Salesloft API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for Salesloft API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -261,7 +195,7 @@ func (r *SourceSalesloftResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSalesloft(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -437,5 +371,5 @@ func (r *SourceSalesloftResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceSalesloftResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_salesloft_resource_sdk.go b/internal/provider/source_salesloft_resource_sdk.go
old mode 100755
new mode 100644
index aa6654d17..9aefb924e
--- a/internal/provider/source_salesloft_resource_sdk.go
+++ b/internal/provider/source_salesloft_resource_sdk.go
@@ -3,56 +3,56 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceSalesloftResourceModel) ToCreateSDKType() *shared.SourceSalesloftCreateRequest {
var credentials shared.SourceSalesloftCredentials
- var sourceSalesloftCredentialsAuthenticateViaOAuth *shared.SourceSalesloftCredentialsAuthenticateViaOAuth
- if r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaOAuth != nil {
- accessToken := r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaOAuth.AccessToken.ValueString()
- authType := shared.SourceSalesloftCredentialsAuthenticateViaOAuthAuthType(r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaOAuth.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaOAuth.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaOAuth.TokenExpiryDate.ValueString())
- sourceSalesloftCredentialsAuthenticateViaOAuth = &shared.SourceSalesloftCredentialsAuthenticateViaOAuth{
+ var sourceSalesloftAuthenticateViaOAuth *shared.SourceSalesloftAuthenticateViaOAuth
+ if r.Configuration.Credentials.AuthenticateViaOAuth != nil {
+ accessToken := r.Configuration.Credentials.AuthenticateViaOAuth.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.AuthenticateViaOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaOAuth.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.AuthenticateViaOAuth.TokenExpiryDate.ValueString())
+ sourceSalesloftAuthenticateViaOAuth = &shared.SourceSalesloftAuthenticateViaOAuth{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceSalesloftCredentialsAuthenticateViaOAuth != nil {
+ if sourceSalesloftAuthenticateViaOAuth != nil {
credentials = shared.SourceSalesloftCredentials{
- SourceSalesloftCredentialsAuthenticateViaOAuth: sourceSalesloftCredentialsAuthenticateViaOAuth,
+ SourceSalesloftAuthenticateViaOAuth: sourceSalesloftAuthenticateViaOAuth,
}
}
- var sourceSalesloftCredentialsAuthenticateViaAPIKey *shared.SourceSalesloftCredentialsAuthenticateViaAPIKey
- if r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaAPIKey != nil {
- apiKey := r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaAPIKey.APIKey.ValueString()
- authType1 := shared.SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType(r.Configuration.Credentials.SourceSalesloftCredentialsAuthenticateViaAPIKey.AuthType.ValueString())
- sourceSalesloftCredentialsAuthenticateViaAPIKey = &shared.SourceSalesloftCredentialsAuthenticateViaAPIKey{
- APIKey: apiKey,
- AuthType: authType1,
+ var sourceSalesloftAuthenticateViaAPIKey *shared.SourceSalesloftAuthenticateViaAPIKey
+ if r.Configuration.Credentials.AuthenticateViaAPIKey != nil {
+ apiKey := r.Configuration.Credentials.AuthenticateViaAPIKey.APIKey.ValueString()
+ sourceSalesloftAuthenticateViaAPIKey = &shared.SourceSalesloftAuthenticateViaAPIKey{
+ APIKey: apiKey,
}
}
- if sourceSalesloftCredentialsAuthenticateViaAPIKey != nil {
+ if sourceSalesloftAuthenticateViaAPIKey != nil {
credentials = shared.SourceSalesloftCredentials{
- SourceSalesloftCredentialsAuthenticateViaAPIKey: sourceSalesloftCredentialsAuthenticateViaAPIKey,
+ SourceSalesloftAuthenticateViaAPIKey: sourceSalesloftAuthenticateViaAPIKey,
}
}
- sourceType := shared.SourceSalesloftSalesloft(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceSalesloft{
Credentials: credentials,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -63,6 +63,7 @@ func (r *SourceSalesloftResourceModel) ToCreateSDKType() *shared.SourceSalesloft
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSalesloftCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -77,40 +78,36 @@ func (r *SourceSalesloftResourceModel) ToGetSDKType() *shared.SourceSalesloftCre
func (r *SourceSalesloftResourceModel) ToUpdateSDKType() *shared.SourceSalesloftPutRequest {
var credentials shared.SourceSalesloftUpdateCredentials
- var sourceSalesloftUpdateCredentialsAuthenticateViaOAuth *shared.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth
- if r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth != nil {
- accessToken := r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth.AccessToken.ValueString()
- authType := shared.SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthType(r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth.TokenExpiryDate.ValueString())
- sourceSalesloftUpdateCredentialsAuthenticateViaOAuth = &shared.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth{
+ var authenticateViaOAuth *shared.AuthenticateViaOAuth
+ if r.Configuration.Credentials.AuthenticateViaOAuth != nil {
+ accessToken := r.Configuration.Credentials.AuthenticateViaOAuth.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.AuthenticateViaOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.AuthenticateViaOAuth.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.AuthenticateViaOAuth.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.AuthenticateViaOAuth.TokenExpiryDate.ValueString())
+ authenticateViaOAuth = &shared.AuthenticateViaOAuth{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceSalesloftUpdateCredentialsAuthenticateViaOAuth != nil {
+ if authenticateViaOAuth != nil {
credentials = shared.SourceSalesloftUpdateCredentials{
- SourceSalesloftUpdateCredentialsAuthenticateViaOAuth: sourceSalesloftUpdateCredentialsAuthenticateViaOAuth,
+ AuthenticateViaOAuth: authenticateViaOAuth,
}
}
- var sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey *shared.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey
- if r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey != nil {
- apiKey := r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey.APIKey.ValueString()
- authType1 := shared.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthType(r.Configuration.Credentials.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey.AuthType.ValueString())
- sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey = &shared.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey{
- APIKey: apiKey,
- AuthType: authType1,
+ var authenticateViaAPIKey *shared.AuthenticateViaAPIKey
+ if r.Configuration.Credentials.AuthenticateViaAPIKey != nil {
+ apiKey := r.Configuration.Credentials.AuthenticateViaAPIKey.APIKey.ValueString()
+ authenticateViaAPIKey = &shared.AuthenticateViaAPIKey{
+ APIKey: apiKey,
}
}
- if sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey != nil {
+ if authenticateViaAPIKey != nil {
credentials = shared.SourceSalesloftUpdateCredentials{
- SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey: sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey,
+ AuthenticateViaAPIKey: authenticateViaAPIKey,
}
}
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
diff --git a/internal/provider/source_sapfieldglass_data_source.go b/internal/provider/source_sapfieldglass_data_source.go
old mode 100755
new mode 100644
index b74531ee7..8b5dc407b
--- a/internal/provider/source_sapfieldglass_data_source.go
+++ b/internal/provider/source_sapfieldglass_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceSapFieldglassDataSource struct {
// SourceSapFieldglassDataSourceModel describes the data model.
type SourceSapFieldglassDataSourceModel struct {
- Configuration SourceSapFieldglass `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceSapFieldglassDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "SourceSapFieldglass DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sap-fieldglass",
- ),
- },
- Description: `must be one of ["sap-fieldglass"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_sapfieldglass_data_source_sdk.go b/internal/provider/source_sapfieldglass_data_source_sdk.go
old mode 100755
new mode 100644
index ec39bab61..5c11848e4
--- a/internal/provider/source_sapfieldglass_data_source_sdk.go
+++ b/internal/provider/source_sapfieldglass_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSapFieldglassDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_sapfieldglass_resource.go b/internal/provider/source_sapfieldglass_resource.go
old mode 100755
new mode 100644
index a5e847ed1..2c266e388
--- a/internal/provider/source_sapfieldglass_resource.go
+++ b/internal/provider/source_sapfieldglass_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceSapFieldglassResource struct {
// SourceSapFieldglassResourceModel describes the resource data model.
type SourceSapFieldglassResourceModel struct {
- Configuration SourceSapFieldglass `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceSapFieldglassResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceSapFieldglassResource) Schema(ctx context.Context, req resource.S
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sap-fieldglass",
- ),
- },
- Description: `must be one of ["sap-fieldglass"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceSapFieldglassResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSapFieldglass(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceSapFieldglassResource) Delete(ctx context.Context, req resource.D
}
func (r *SourceSapFieldglassResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_sapfieldglass_resource_sdk.go b/internal/provider/source_sapfieldglass_resource_sdk.go
old mode 100755
new mode 100644
index 3b13be30d..501b2fc61
--- a/internal/provider/source_sapfieldglass_resource_sdk.go
+++ b/internal/provider/source_sapfieldglass_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSapFieldglassResourceModel) ToCreateSDKType() *shared.SourceSapFieldglassCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceSapFieldglassSapFieldglass(r.Configuration.SourceType.ValueString())
configuration := shared.SourceSapFieldglass{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceSapFieldglassResourceModel) ToCreateSDKType() *shared.SourceSapFi
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSapFieldglassCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_secoda_data_source.go b/internal/provider/source_secoda_data_source.go
old mode 100755
new mode 100644
index 4f333f03c..708aa4921
--- a/internal/provider/source_secoda_data_source.go
+++ b/internal/provider/source_secoda_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceSecodaDataSource struct {
// SourceSecodaDataSourceModel describes the data model.
type SourceSecodaDataSourceModel struct {
- Configuration SourceSecoda `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,34 +47,20 @@ func (r *SourceSecodaDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceSecoda DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Your API Access Key. See here. The key is case sensitive.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "secoda",
- ),
- },
- Description: `must be one of ["secoda"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_secoda_data_source_sdk.go b/internal/provider/source_secoda_data_source_sdk.go
old mode 100755
new mode 100644
index a3a828ff4..a8e3d63ae
--- a/internal/provider/source_secoda_data_source_sdk.go
+++ b/internal/provider/source_secoda_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSecodaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_secoda_resource.go b/internal/provider/source_secoda_resource.go
old mode 100755
new mode 100644
index a3c191236..230dba587
--- a/internal/provider/source_secoda_resource.go
+++ b/internal/provider/source_secoda_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,7 +33,8 @@ type SourceSecodaResource struct {
// SourceSecodaResourceModel describes the resource data model.
type SourceSecodaResourceModel struct {
- Configuration SourceSecoda `tfsdk:"configuration"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,26 +56,29 @@ func (r *SourceSecodaResource) Schema(ctx context.Context, req resource.SchemaRe
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Access Key. See here. The key is case sensitive.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "secoda",
- ),
- },
- Description: `must be one of ["secoda"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceSecodaResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSecoda(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceSecodaResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceSecodaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_secoda_resource_sdk.go b/internal/provider/source_secoda_resource_sdk.go
old mode 100755
new mode 100644
index fa5952a4e..d0a14dc98
--- a/internal/provider/source_secoda_resource_sdk.go
+++ b/internal/provider/source_secoda_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSecodaResourceModel) ToCreateSDKType() *shared.SourceSecodaCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceSecodaSecoda(r.Configuration.SourceType.ValueString())
configuration := shared.SourceSecoda{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceSecodaResourceModel) ToCreateSDKType() *shared.SourceSecodaCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSecodaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_sendgrid_data_source.go b/internal/provider/source_sendgrid_data_source.go
old mode 100755
new mode 100644
index 0bd9d284c..e54b4fe62
--- a/internal/provider/source_sendgrid_data_source.go
+++ b/internal/provider/source_sendgrid_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceSendgridDataSource struct {
// SourceSendgridDataSourceModel describes the data model.
type SourceSendgridDataSourceModel struct {
- Configuration SourceSendgrid `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,41 +47,20 @@ func (r *SourceSendgridDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceSendgrid DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "apikey": schema.StringAttribute{
- Computed: true,
- Description: `API Key, use admin to generate this key.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sendgrid",
- ),
- },
- Description: `must be one of ["sendgrid"]`,
- },
- "start_time": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `Start time in ISO8601 format. Any data before this time point will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_sendgrid_data_source_sdk.go b/internal/provider/source_sendgrid_data_source_sdk.go
old mode 100755
new mode 100644
index 7e6a99fd2..cc6374b50
--- a/internal/provider/source_sendgrid_data_source_sdk.go
+++ b/internal/provider/source_sendgrid_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSendgridDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_sendgrid_resource.go b/internal/provider/source_sendgrid_resource.go
old mode 100755
new mode 100644
index 826043330..8243e10b3
--- a/internal/provider/source_sendgrid_resource.go
+++ b/internal/provider/source_sendgrid_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceSendgridResource struct {
// SourceSendgridResourceModel describes the resource data model.
type SourceSendgridResourceModel struct {
Configuration SourceSendgrid `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,33 +58,36 @@ func (r *SourceSendgridResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"apikey": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key, use admin to generate this key.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sendgrid",
- ),
- },
- Description: `must be one of ["sendgrid"]`,
- },
"start_time": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Start time in ISO8601 format. Any data before this time point will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `Start time in ISO8601 format. Any data before this time point will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +151,7 @@ func (r *SourceSendgridResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSendgrid(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +327,5 @@ func (r *SourceSendgridResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceSendgridResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_sendgrid_resource_sdk.go b/internal/provider/source_sendgrid_resource_sdk.go
old mode 100755
new mode 100644
index 71c4828c6..5a8d0c5a8
--- a/internal/provider/source_sendgrid_resource_sdk.go
+++ b/internal/provider/source_sendgrid_resource_sdk.go
@@ -3,14 +3,13 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceSendgridResourceModel) ToCreateSDKType() *shared.SourceSendgridCreateRequest {
apikey := r.Configuration.Apikey.ValueString()
- sourceType := shared.SourceSendgridSendgrid(r.Configuration.SourceType.ValueString())
startTime := new(time.Time)
if !r.Configuration.StartTime.IsUnknown() && !r.Configuration.StartTime.IsNull() {
*startTime, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartTime.ValueString())
@@ -18,9 +17,14 @@ func (r *SourceSendgridResourceModel) ToCreateSDKType() *shared.SourceSendgridCr
startTime = nil
}
configuration := shared.SourceSendgrid{
- Apikey: apikey,
- SourceType: sourceType,
- StartTime: startTime,
+ Apikey: apikey,
+ StartTime: startTime,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -32,6 +36,7 @@ func (r *SourceSendgridResourceModel) ToCreateSDKType() *shared.SourceSendgridCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSendgridCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_sendinblue_data_source.go b/internal/provider/source_sendinblue_data_source.go
old mode 100755
new mode 100644
index 9986058fc..2be01c3f6
--- a/internal/provider/source_sendinblue_data_source.go
+++ b/internal/provider/source_sendinblue_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceSendinblueDataSource struct {
// SourceSendinblueDataSourceModel describes the data model.
type SourceSendinblueDataSourceModel struct {
- Configuration SourceSendinblue `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceSendinblueDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceSendinblue DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Your API Key. See here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sendinblue",
- ),
- },
- Description: `must be one of ["sendinblue"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_sendinblue_data_source_sdk.go b/internal/provider/source_sendinblue_data_source_sdk.go
old mode 100755
new mode 100644
index 5a8f344da..918f0ad04
--- a/internal/provider/source_sendinblue_data_source_sdk.go
+++ b/internal/provider/source_sendinblue_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSendinblueDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_sendinblue_resource.go b/internal/provider/source_sendinblue_resource.go
old mode 100755
new mode 100644
index ef1486f94..2e1417c1f
--- a/internal/provider/source_sendinblue_resource.go
+++ b/internal/provider/source_sendinblue_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceSendinblueResource struct {
// SourceSendinblueResourceModel describes the resource data model.
type SourceSendinblueResourceModel struct {
- Configuration SourceSendinblue `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceSendinblueResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceSendinblueResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Key. See here.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sendinblue",
- ),
- },
- Description: `must be one of ["sendinblue"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceSendinblueResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSendinblue(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceSendinblueResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceSendinblueResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_sendinblue_resource_sdk.go b/internal/provider/source_sendinblue_resource_sdk.go
old mode 100755
new mode 100644
index c502faf60..ecdadbfa9
--- a/internal/provider/source_sendinblue_resource_sdk.go
+++ b/internal/provider/source_sendinblue_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSendinblueResourceModel) ToCreateSDKType() *shared.SourceSendinblueCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceSendinblueSendinblue(r.Configuration.SourceType.ValueString())
configuration := shared.SourceSendinblue{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceSendinblueResourceModel) ToCreateSDKType() *shared.SourceSendinbl
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSendinblueCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_senseforce_data_source.go b/internal/provider/source_senseforce_data_source.go
old mode 100755
new mode 100644
index f0ba49f8c..d2637de64
--- a/internal/provider/source_senseforce_data_source.go
+++ b/internal/provider/source_senseforce_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceSenseforceDataSource struct {
// SourceSenseforceDataSourceModel describes the data model.
type SourceSenseforceDataSourceModel struct {
- Configuration SourceSenseforce `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,53 +47,20 @@ func (r *SourceSenseforceDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceSenseforce DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Your API access token. See here. The toke is case sensitive.`,
- },
- "backend_url": schema.StringAttribute{
- Computed: true,
- Description: `Your Senseforce API backend URL. This is the URL shown during the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the term 'galaxy' in their ULR)`,
- },
- "dataset_id": schema.StringAttribute{
- Computed: true,
- Description: `The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source).`,
- },
- "slice_range": schema.Int64Attribute{
- Computed: true,
- Description: `The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "senseforce",
- ),
- },
- Description: `must be one of ["senseforce"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `UTC date and time in the format 2017-01-25. Only data with "Timestamp" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_senseforce_data_source_sdk.go b/internal/provider/source_senseforce_data_source_sdk.go
old mode 100755
new mode 100644
index 6101b6769..1e096be35
--- a/internal/provider/source_senseforce_data_source_sdk.go
+++ b/internal/provider/source_senseforce_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSenseforceDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_senseforce_resource.go b/internal/provider/source_senseforce_resource.go
old mode 100755
new mode 100644
index 7de64fb38..a37d85ec6
--- a/internal/provider/source_senseforce_resource.go
+++ b/internal/provider/source_senseforce_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceSenseforceResource struct {
// SourceSenseforceResourceModel describes the resource data model.
type SourceSenseforceResourceModel struct {
Configuration SourceSenseforce `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceSenseforceResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API access token. See here. The toke is case sensitive.`,
},
"backend_url": schema.StringAttribute{
@@ -68,34 +70,37 @@ func (r *SourceSenseforceResource) Schema(ctx context.Context, req resource.Sche
Description: `The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source).`,
},
"slice_range": schema.Int64Attribute{
- Optional: true,
- Description: `The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "senseforce",
- ),
- },
- Description: `must be one of ["senseforce"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 10` + "\n" +
+ `The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more.`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25. Only data with "Timestamp" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `UTC date and time in the format 2017-01-25. Only data with "Timestamp" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -159,7 +164,7 @@ func (r *SourceSenseforceResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSenseforce(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -335,5 +340,5 @@ func (r *SourceSenseforceResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceSenseforceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_senseforce_resource_sdk.go b/internal/provider/source_senseforce_resource_sdk.go
old mode 100755
new mode 100644
index 057bc7909..eabb2fec6
--- a/internal/provider/source_senseforce_resource_sdk.go
+++ b/internal/provider/source_senseforce_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -18,16 +18,20 @@ func (r *SourceSenseforceResourceModel) ToCreateSDKType() *shared.SourceSensefor
} else {
sliceRange = nil
}
- sourceType := shared.SourceSenseforceSenseforce(r.Configuration.SourceType.ValueString())
startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
configuration := shared.SourceSenseforce{
AccessToken: accessToken,
BackendURL: backendURL,
DatasetID: datasetID,
SliceRange: sliceRange,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -38,6 +42,7 @@ func (r *SourceSenseforceResourceModel) ToCreateSDKType() *shared.SourceSensefor
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSenseforceCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_sentry_data_source.go b/internal/provider/source_sentry_data_source.go
old mode 100755
new mode 100644
index f64c2cdb5..3399002a5
--- a/internal/provider/source_sentry_data_source.go
+++ b/internal/provider/source_sentry_data_source.go
@@ -3,17 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -33,10 +29,10 @@ type SourceSentryDataSource struct {
// SourceSentryDataSourceModel describes the data model.
type SourceSentryDataSourceModel struct {
- Configuration SourceSentry `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -51,54 +47,20 @@ func (r *SourceSentryDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceSentry DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_token": schema.StringAttribute{
- Computed: true,
- Description: `Log into Sentry and then create authentication tokens.For self-hosted, you can find or create authentication tokens by visiting "{instance_url_prefix}/settings/account/api/auth-tokens/"`,
- },
- "discover_fields": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Validators: []validator.List{
- listvalidator.ValueStringsAre(validators.IsValidJSON()),
- },
- Description: `Fields to retrieve when fetching discover events`,
- },
- "hostname": schema.StringAttribute{
- Computed: true,
- Description: `Host name of Sentry API server.For self-hosted, specify your host name here. Otherwise, leave it empty.`,
- },
- "organization": schema.StringAttribute{
- Computed: true,
- Description: `The slug of the organization the groups belong to.`,
- },
- "project": schema.StringAttribute{
- Computed: true,
- Description: `The name (slug) of the Project you want to sync.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sentry",
- ),
- },
- Description: `must be one of ["sentry"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_sentry_data_source_sdk.go b/internal/provider/source_sentry_data_source_sdk.go
old mode 100755
new mode 100644
index 68af41edc..f46bd84e0
--- a/internal/provider/source_sentry_data_source_sdk.go
+++ b/internal/provider/source_sentry_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSentryDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_sentry_resource.go b/internal/provider/source_sentry_resource.go
old mode 100755
new mode 100644
index 7052ed82b..cd34cf874
--- a/internal/provider/source_sentry_resource.go
+++ b/internal/provider/source_sentry_resource.go
@@ -3,19 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -37,6 +37,7 @@ type SourceSentryResource struct {
// SourceSentryResourceModel describes the resource data model.
type SourceSentryResourceModel struct {
Configuration SourceSentry `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,19 +59,21 @@ func (r *SourceSentryResource) Schema(ctx context.Context, req resource.SchemaRe
Attributes: map[string]schema.Attribute{
"auth_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Log into Sentry and then create authentication tokens.For self-hosted, you can find or create authentication tokens by visiting "{instance_url_prefix}/settings/account/api/auth-tokens/"`,
},
"discover_fields": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
+ Description: `Fields to retrieve when fetching discover events`,
Validators: []validator.List{
listvalidator.ValueStringsAre(validators.IsValidJSON()),
},
- Description: `Fields to retrieve when fetching discover events`,
},
"hostname": schema.StringAttribute{
- Optional: true,
- Description: `Host name of Sentry API server.For self-hosted, specify your host name here. Otherwise, leave it empty.`,
+ Optional: true,
+ MarkdownDescription: `Default: "sentry.io"` + "\n" +
+ `Host name of Sentry API server.For self-hosted, specify your host name here. Otherwise, leave it empty.`,
},
"organization": schema.StringAttribute{
Required: true,
@@ -80,24 +83,26 @@ func (r *SourceSentryResource) Schema(ctx context.Context, req resource.SchemaRe
Required: true,
Description: `The name (slug) of the Project you want to sync.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sentry",
- ),
- },
- Description: `must be one of ["sentry"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -161,7 +166,7 @@ func (r *SourceSentryResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSentry(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -337,5 +342,5 @@ func (r *SourceSentryResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceSentryResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_sentry_resource_sdk.go b/internal/provider/source_sentry_resource_sdk.go
old mode 100755
new mode 100644
index 18c3200bc..629054905
--- a/internal/provider/source_sentry_resource_sdk.go
+++ b/internal/provider/source_sentry_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -24,14 +24,18 @@ func (r *SourceSentryResourceModel) ToCreateSDKType() *shared.SourceSentryCreate
}
organization := r.Configuration.Organization.ValueString()
project := r.Configuration.Project.ValueString()
- sourceType := shared.SourceSentrySentry(r.Configuration.SourceType.ValueString())
configuration := shared.SourceSentry{
AuthToken: authToken,
DiscoverFields: discoverFields,
Hostname: hostname,
Organization: organization,
Project: project,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -43,6 +47,7 @@ func (r *SourceSentryResourceModel) ToCreateSDKType() *shared.SourceSentryCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSentryCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_sftp_data_source.go b/internal/provider/source_sftp_data_source.go
old mode 100755
new mode 100644
index 946f72d15..e4651efa4
--- a/internal/provider/source_sftp_data_source.go
+++ b/internal/provider/source_sftp_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceSftpDataSource struct {
// SourceSftpDataSourceModel describes the data model.
type SourceSftpDataSourceModel struct {
- Configuration SourceSftp `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,143 +47,20 @@ func (r *SourceSftpDataSource) Schema(ctx context.Context, req datasource.Schema
MarkdownDescription: "SourceSftp DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_sftp_authentication_wildcard_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through password authentication`,
- },
- "auth_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `The server authentication method`,
- },
- "source_sftp_authentication_wildcard_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through ssh key`,
- },
- "auth_ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- },
- Description: `The server authentication method`,
- },
- "source_sftp_update_authentication_wildcard_password_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through password authentication`,
- },
- "auth_user_password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `The server authentication method`,
- },
- "source_sftp_update_authentication_wildcard_ssh_key_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through ssh key`,
- },
- "auth_ssh_key": schema.StringAttribute{
- Computed: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- },
- Description: `The server authentication method`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The server authentication method`,
- },
- "file_pattern": schema.StringAttribute{
- Computed: true,
- Description: `The regular expression to specify files for sync in a chosen Folder Path`,
- },
- "file_types": schema.StringAttribute{
- Computed: true,
- Description: `Coma separated file types. Currently only 'csv' and 'json' types are supported.`,
- },
- "folder_path": schema.StringAttribute{
- Computed: true,
- Description: `The directory to search files for sync`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The server host address`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The server port`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sftp",
- ),
- },
- Description: `must be one of ["sftp"]`,
- },
- "user": schema.StringAttribute{
- Computed: true,
- Description: `The server user`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_sftp_data_source_sdk.go b/internal/provider/source_sftp_data_source_sdk.go
old mode 100755
new mode 100644
index 9d58cbd1e..fe6058c71
--- a/internal/provider/source_sftp_data_source_sdk.go
+++ b/internal/provider/source_sftp_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSftpDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_sftp_resource.go b/internal/provider/source_sftp_resource.go
old mode 100755
new mode 100644
index e7b0900f0..b63e5408e
--- a/internal/provider/source_sftp_resource.go
+++ b/internal/provider/source_sftp_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceSftpResource struct {
// SourceSftpResourceModel describes the resource data model.
type SourceSftpResourceModel struct {
Configuration SourceSftp `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,120 +59,57 @@ func (r *SourceSftpResource) Schema(ctx context.Context, req resource.SchemaRequ
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_sftp_authentication_wildcard_password_authentication": schema.SingleNestedAttribute{
+ "password_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through password authentication`,
- },
- "auth_user_password": schema.StringAttribute{
- Required: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- },
- Description: `The server authentication method`,
- },
- "source_sftp_authentication_wildcard_ssh_key_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through ssh key`,
- },
- "auth_ssh_key": schema.StringAttribute{
- Required: true,
- Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
- },
- },
- Description: `The server authentication method`,
- },
- "source_sftp_update_authentication_wildcard_password_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_PASSWORD_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_PASSWORD_AUTH"]` + "\n" +
- `Connect through password authentication`,
- },
"auth_user_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
},
Description: `The server authentication method`,
},
- "source_sftp_update_authentication_wildcard_ssh_key_authentication": schema.SingleNestedAttribute{
+ "ssh_key_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "SSH_KEY_AUTH",
- ),
- },
- MarkdownDescription: `must be one of ["SSH_KEY_AUTH"]` + "\n" +
- `Connect through ssh key`,
- },
"auth_ssh_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )`,
},
},
Description: `The server authentication method`,
},
},
+ Description: `The server authentication method`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The server authentication method`,
},
"file_pattern": schema.StringAttribute{
- Optional: true,
- Description: `The regular expression to specify files for sync in a chosen Folder Path`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `The regular expression to specify files for sync in a chosen Folder Path`,
},
"file_types": schema.StringAttribute{
- Optional: true,
- Description: `Coma separated file types. Currently only 'csv' and 'json' types are supported.`,
+ Optional: true,
+ MarkdownDescription: `Default: "csv,json"` + "\n" +
+ `Coma separated file types. Currently only 'csv' and 'json' types are supported.`,
},
"folder_path": schema.StringAttribute{
- Optional: true,
- Description: `The directory to search files for sync`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `The directory to search files for sync`,
},
"host": schema.StringAttribute{
Required: true,
Description: `The server host address`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `The server port`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sftp",
- ),
- },
- Description: `must be one of ["sftp"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `The server port`,
},
"user": schema.StringAttribute{
Required: true,
@@ -179,13 +117,24 @@ func (r *SourceSftpResource) Schema(ctx context.Context, req resource.SchemaRequ
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -249,7 +198,7 @@ func (r *SourceSftpResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSftp(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -425,5 +374,5 @@ func (r *SourceSftpResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *SourceSftpResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_sftp_resource_sdk.go b/internal/provider/source_sftp_resource_sdk.go
old mode 100755
new mode 100644
index ba7dd0596..cc85ad6d3
--- a/internal/provider/source_sftp_resource_sdk.go
+++ b/internal/provider/source_sftp_resource_sdk.go
@@ -3,39 +3,35 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSftpResourceModel) ToCreateSDKType() *shared.SourceSftpCreateRequest {
var credentials *shared.SourceSftpAuthenticationWildcard
if r.Configuration.Credentials != nil {
- var sourceSftpAuthenticationWildcardPasswordAuthentication *shared.SourceSftpAuthenticationWildcardPasswordAuthentication
- if r.Configuration.Credentials.SourceSftpAuthenticationWildcardPasswordAuthentication != nil {
- authMethod := shared.SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod(r.Configuration.Credentials.SourceSftpAuthenticationWildcardPasswordAuthentication.AuthMethod.ValueString())
- authUserPassword := r.Configuration.Credentials.SourceSftpAuthenticationWildcardPasswordAuthentication.AuthUserPassword.ValueString()
- sourceSftpAuthenticationWildcardPasswordAuthentication = &shared.SourceSftpAuthenticationWildcardPasswordAuthentication{
- AuthMethod: authMethod,
+ var sourceSftpPasswordAuthentication *shared.SourceSftpPasswordAuthentication
+ if r.Configuration.Credentials.PasswordAuthentication != nil {
+ authUserPassword := r.Configuration.Credentials.PasswordAuthentication.AuthUserPassword.ValueString()
+ sourceSftpPasswordAuthentication = &shared.SourceSftpPasswordAuthentication{
AuthUserPassword: authUserPassword,
}
}
- if sourceSftpAuthenticationWildcardPasswordAuthentication != nil {
+ if sourceSftpPasswordAuthentication != nil {
credentials = &shared.SourceSftpAuthenticationWildcard{
- SourceSftpAuthenticationWildcardPasswordAuthentication: sourceSftpAuthenticationWildcardPasswordAuthentication,
+ SourceSftpPasswordAuthentication: sourceSftpPasswordAuthentication,
}
}
- var sourceSftpAuthenticationWildcardSSHKeyAuthentication *shared.SourceSftpAuthenticationWildcardSSHKeyAuthentication
- if r.Configuration.Credentials.SourceSftpAuthenticationWildcardSSHKeyAuthentication != nil {
- authMethod1 := shared.SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod(r.Configuration.Credentials.SourceSftpAuthenticationWildcardSSHKeyAuthentication.AuthMethod.ValueString())
- authSSHKey := r.Configuration.Credentials.SourceSftpAuthenticationWildcardSSHKeyAuthentication.AuthSSHKey.ValueString()
- sourceSftpAuthenticationWildcardSSHKeyAuthentication = &shared.SourceSftpAuthenticationWildcardSSHKeyAuthentication{
- AuthMethod: authMethod1,
+ var sourceSftpSSHKeyAuthentication *shared.SourceSftpSSHKeyAuthentication
+ if r.Configuration.Credentials.SSHKeyAuthentication != nil {
+ authSSHKey := r.Configuration.Credentials.SSHKeyAuthentication.AuthSSHKey.ValueString()
+ sourceSftpSSHKeyAuthentication = &shared.SourceSftpSSHKeyAuthentication{
AuthSSHKey: authSSHKey,
}
}
- if sourceSftpAuthenticationWildcardSSHKeyAuthentication != nil {
+ if sourceSftpSSHKeyAuthentication != nil {
credentials = &shared.SourceSftpAuthenticationWildcard{
- SourceSftpAuthenticationWildcardSSHKeyAuthentication: sourceSftpAuthenticationWildcardSSHKeyAuthentication,
+ SourceSftpSSHKeyAuthentication: sourceSftpSSHKeyAuthentication,
}
}
}
@@ -58,8 +54,12 @@ func (r *SourceSftpResourceModel) ToCreateSDKType() *shared.SourceSftpCreateRequ
folderPath = nil
}
host := r.Configuration.Host.ValueString()
- port := r.Configuration.Port.ValueInt64()
- sourceType := shared.SourceSftpSftp(r.Configuration.SourceType.ValueString())
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
user := r.Configuration.User.ValueString()
configuration := shared.SourceSftp{
Credentials: credentials,
@@ -68,9 +68,14 @@ func (r *SourceSftpResourceModel) ToCreateSDKType() *shared.SourceSftpCreateRequ
FolderPath: folderPath,
Host: host,
Port: port,
- SourceType: sourceType,
User: user,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -81,6 +86,7 @@ func (r *SourceSftpResourceModel) ToCreateSDKType() *shared.SourceSftpCreateRequ
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSftpCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -96,32 +102,28 @@ func (r *SourceSftpResourceModel) ToGetSDKType() *shared.SourceSftpCreateRequest
func (r *SourceSftpResourceModel) ToUpdateSDKType() *shared.SourceSftpPutRequest {
var credentials *shared.SourceSftpUpdateAuthenticationWildcard
if r.Configuration.Credentials != nil {
- var sourceSftpUpdateAuthenticationWildcardPasswordAuthentication *shared.SourceSftpUpdateAuthenticationWildcardPasswordAuthentication
- if r.Configuration.Credentials.SourceSftpUpdateAuthenticationWildcardPasswordAuthentication != nil {
- authMethod := shared.SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod(r.Configuration.Credentials.SourceSftpUpdateAuthenticationWildcardPasswordAuthentication.AuthMethod.ValueString())
- authUserPassword := r.Configuration.Credentials.SourceSftpUpdateAuthenticationWildcardPasswordAuthentication.AuthUserPassword.ValueString()
- sourceSftpUpdateAuthenticationWildcardPasswordAuthentication = &shared.SourceSftpUpdateAuthenticationWildcardPasswordAuthentication{
- AuthMethod: authMethod,
+ var sourceSftpUpdatePasswordAuthentication *shared.SourceSftpUpdatePasswordAuthentication
+ if r.Configuration.Credentials.PasswordAuthentication != nil {
+ authUserPassword := r.Configuration.Credentials.PasswordAuthentication.AuthUserPassword.ValueString()
+ sourceSftpUpdatePasswordAuthentication = &shared.SourceSftpUpdatePasswordAuthentication{
AuthUserPassword: authUserPassword,
}
}
- if sourceSftpUpdateAuthenticationWildcardPasswordAuthentication != nil {
+ if sourceSftpUpdatePasswordAuthentication != nil {
credentials = &shared.SourceSftpUpdateAuthenticationWildcard{
- SourceSftpUpdateAuthenticationWildcardPasswordAuthentication: sourceSftpUpdateAuthenticationWildcardPasswordAuthentication,
+ SourceSftpUpdatePasswordAuthentication: sourceSftpUpdatePasswordAuthentication,
}
}
- var sourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication *shared.SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication
- if r.Configuration.Credentials.SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication != nil {
- authMethod1 := shared.SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod(r.Configuration.Credentials.SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication.AuthMethod.ValueString())
- authSSHKey := r.Configuration.Credentials.SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication.AuthSSHKey.ValueString()
- sourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication = &shared.SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication{
- AuthMethod: authMethod1,
+ var sourceSftpUpdateSSHKeyAuthentication *shared.SourceSftpUpdateSSHKeyAuthentication
+ if r.Configuration.Credentials.SSHKeyAuthentication != nil {
+ authSSHKey := r.Configuration.Credentials.SSHKeyAuthentication.AuthSSHKey.ValueString()
+ sourceSftpUpdateSSHKeyAuthentication = &shared.SourceSftpUpdateSSHKeyAuthentication{
AuthSSHKey: authSSHKey,
}
}
- if sourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication != nil {
+ if sourceSftpUpdateSSHKeyAuthentication != nil {
credentials = &shared.SourceSftpUpdateAuthenticationWildcard{
- SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication: sourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication,
+ SourceSftpUpdateSSHKeyAuthentication: sourceSftpUpdateSSHKeyAuthentication,
}
}
}
@@ -144,7 +146,12 @@ func (r *SourceSftpResourceModel) ToUpdateSDKType() *shared.SourceSftpPutRequest
folderPath = nil
}
host := r.Configuration.Host.ValueString()
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
user := r.Configuration.User.ValueString()
configuration := shared.SourceSftpUpdate{
Credentials: credentials,
diff --git a/internal/provider/source_sftpbulk_data_source.go b/internal/provider/source_sftpbulk_data_source.go
old mode 100755
new mode 100644
index 0c1a9c7b7..f7d0d2414
--- a/internal/provider/source_sftpbulk_data_source.go
+++ b/internal/provider/source_sftpbulk_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceSftpBulkDataSource struct {
// SourceSftpBulkDataSourceModel describes the data model.
type SourceSftpBulkDataSourceModel struct {
- Configuration SourceSftpBulk `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,88 +47,20 @@ func (r *SourceSftpBulkDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceSftpBulk DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "file_most_recent": schema.BoolAttribute{
- Computed: true,
- Description: `Sync only the most recent file for the configured folder path and file pattern`,
- },
- "file_pattern": schema.StringAttribute{
- Computed: true,
- Description: `The regular expression to specify files for sync in a chosen Folder Path`,
- },
- "file_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "csv",
- "json",
- ),
- },
- MarkdownDescription: `must be one of ["csv", "json"]` + "\n" +
- `The file type you want to sync. Currently only 'csv' and 'json' files are supported.`,
- },
- "folder_path": schema.StringAttribute{
- Computed: true,
- Description: `The directory to search files for sync`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The server host address`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `OS-level password for logging into the jump server host`,
- },
- "port": schema.Int64Attribute{
- Computed: true,
- Description: `The server port`,
- },
- "private_key": schema.StringAttribute{
- Computed: true,
- Description: `The private key`,
- },
- "separator": schema.StringAttribute{
- Computed: true,
- Description: `The separator used in the CSV files. Define None if you want to use the Sniffer functionality`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sftp-bulk",
- ),
- },
- Description: `must be one of ["sftp-bulk"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- "stream_name": schema.StringAttribute{
- Computed: true,
- Description: `The name of the stream or table you want to create`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `The server user`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_sftpbulk_data_source_sdk.go b/internal/provider/source_sftpbulk_data_source_sdk.go
old mode 100755
new mode 100644
index 488e168b9..c3c5fff5c
--- a/internal/provider/source_sftpbulk_data_source_sdk.go
+++ b/internal/provider/source_sftpbulk_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSftpBulkDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_sftpbulk_resource.go b/internal/provider/source_sftpbulk_resource.go
old mode 100755
new mode 100644
index 275cc5ce9..798678658
--- a/internal/provider/source_sftpbulk_resource.go
+++ b/internal/provider/source_sftpbulk_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceSftpBulkResource struct {
// SourceSftpBulkResourceModel describes the resource data model.
type SourceSftpBulkResourceModel struct {
Configuration SourceSftpBulk `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,27 +58,30 @@ func (r *SourceSftpBulkResource) Schema(ctx context.Context, req resource.Schema
Required: true,
Attributes: map[string]schema.Attribute{
"file_most_recent": schema.BoolAttribute{
- Optional: true,
- Description: `Sync only the most recent file for the configured folder path and file pattern`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Sync only the most recent file for the configured folder path and file pattern`,
},
"file_pattern": schema.StringAttribute{
- Optional: true,
- Description: `The regular expression to specify files for sync in a chosen Folder Path`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `The regular expression to specify files for sync in a chosen Folder Path`,
},
"file_type": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["csv", "json"]; Default: "csv"` + "\n" +
+ `The file type you want to sync. Currently only 'csv' and 'json' files are supported.`,
Validators: []validator.String{
stringvalidator.OneOf(
"csv",
"json",
),
},
- MarkdownDescription: `must be one of ["csv", "json"]` + "\n" +
- `The file type you want to sync. Currently only 'csv' and 'json' files are supported.`,
},
"folder_path": schema.StringAttribute{
- Required: true,
- Description: `The directory to search files for sync`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `The directory to search files for sync`,
},
"host": schema.StringAttribute{
Required: true,
@@ -84,35 +89,30 @@ func (r *SourceSftpBulkResource) Schema(ctx context.Context, req resource.Schema
},
"password": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `OS-level password for logging into the jump server host`,
},
"port": schema.Int64Attribute{
- Required: true,
- Description: `The server port`,
+ Optional: true,
+ MarkdownDescription: `Default: 22` + "\n" +
+ `The server port`,
},
"private_key": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The private key`,
},
"separator": schema.StringAttribute{
- Optional: true,
- Description: `The separator used in the CSV files. Define None if you want to use the Sniffer functionality`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sftp-bulk",
- ),
- },
- Description: `must be one of ["sftp-bulk"]`,
+ Optional: true,
+ MarkdownDescription: `Default: ","` + "\n" +
+ `The separator used in the CSV files. Define None if you want to use the Sniffer functionality`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
"stream_name": schema.StringAttribute{
Required: true,
@@ -124,13 +124,24 @@ func (r *SourceSftpBulkResource) Schema(ctx context.Context, req resource.Schema
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -194,7 +205,7 @@ func (r *SourceSftpBulkResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSftpBulk(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -370,5 +381,5 @@ func (r *SourceSftpBulkResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceSftpBulkResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_sftpbulk_resource_sdk.go b/internal/provider/source_sftpbulk_resource_sdk.go
old mode 100755
new mode 100644
index f28224df8..070a5585f
--- a/internal/provider/source_sftpbulk_resource_sdk.go
+++ b/internal/provider/source_sftpbulk_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -27,7 +27,12 @@ func (r *SourceSftpBulkResourceModel) ToCreateSDKType() *shared.SourceSftpBulkCr
} else {
fileType = nil
}
- folderPath := r.Configuration.FolderPath.ValueString()
+ folderPath := new(string)
+ if !r.Configuration.FolderPath.IsUnknown() && !r.Configuration.FolderPath.IsNull() {
+ *folderPath = r.Configuration.FolderPath.ValueString()
+ } else {
+ folderPath = nil
+ }
host := r.Configuration.Host.ValueString()
password := new(string)
if !r.Configuration.Password.IsUnknown() && !r.Configuration.Password.IsNull() {
@@ -35,7 +40,12 @@ func (r *SourceSftpBulkResourceModel) ToCreateSDKType() *shared.SourceSftpBulkCr
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
privateKey := new(string)
if !r.Configuration.PrivateKey.IsUnknown() && !r.Configuration.PrivateKey.IsNull() {
*privateKey = r.Configuration.PrivateKey.ValueString()
@@ -48,7 +58,6 @@ func (r *SourceSftpBulkResourceModel) ToCreateSDKType() *shared.SourceSftpBulkCr
} else {
separator = nil
}
- sourceType := shared.SourceSftpBulkSftpBulk(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
streamName := r.Configuration.StreamName.ValueString()
username := r.Configuration.Username.ValueString()
@@ -62,11 +71,16 @@ func (r *SourceSftpBulkResourceModel) ToCreateSDKType() *shared.SourceSftpBulkCr
Port: port,
PrivateKey: privateKey,
Separator: separator,
- SourceType: sourceType,
StartDate: startDate,
StreamName: streamName,
Username: username,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -77,6 +91,7 @@ func (r *SourceSftpBulkResourceModel) ToCreateSDKType() *shared.SourceSftpBulkCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSftpBulkCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -102,13 +117,18 @@ func (r *SourceSftpBulkResourceModel) ToUpdateSDKType() *shared.SourceSftpBulkPu
} else {
filePattern = nil
}
- fileType := new(shared.SourceSftpBulkUpdateFileType)
+ fileType := new(shared.FileType)
if !r.Configuration.FileType.IsUnknown() && !r.Configuration.FileType.IsNull() {
- *fileType = shared.SourceSftpBulkUpdateFileType(r.Configuration.FileType.ValueString())
+ *fileType = shared.FileType(r.Configuration.FileType.ValueString())
} else {
fileType = nil
}
- folderPath := r.Configuration.FolderPath.ValueString()
+ folderPath := new(string)
+ if !r.Configuration.FolderPath.IsUnknown() && !r.Configuration.FolderPath.IsNull() {
+ *folderPath = r.Configuration.FolderPath.ValueString()
+ } else {
+ folderPath = nil
+ }
host := r.Configuration.Host.ValueString()
password := new(string)
if !r.Configuration.Password.IsUnknown() && !r.Configuration.Password.IsNull() {
@@ -116,7 +136,12 @@ func (r *SourceSftpBulkResourceModel) ToUpdateSDKType() *shared.SourceSftpBulkPu
} else {
password = nil
}
- port := r.Configuration.Port.ValueInt64()
+ port := new(int64)
+ if !r.Configuration.Port.IsUnknown() && !r.Configuration.Port.IsNull() {
+ *port = r.Configuration.Port.ValueInt64()
+ } else {
+ port = nil
+ }
privateKey := new(string)
if !r.Configuration.PrivateKey.IsUnknown() && !r.Configuration.PrivateKey.IsNull() {
*privateKey = r.Configuration.PrivateKey.ValueString()
diff --git a/internal/provider/source_shopify_data_source.go b/internal/provider/source_shopify_data_source.go
old mode 100755
new mode 100644
index 8982016a2..0612f57b3
--- a/internal/provider/source_shopify_data_source.go
+++ b/internal/provider/source_shopify_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceShopifyDataSource struct {
// SourceShopifyDataSourceModel describes the data model.
type SourceShopifyDataSourceModel struct {
- Configuration SourceShopify `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,142 +47,20 @@ func (r *SourceShopifyDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceShopify DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_shopify_shopify_authorization_method_api_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_password": schema.StringAttribute{
- Computed: true,
- Description: `The API Password for your private application in the ` + "`" + `Shopify` + "`" + ` store.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_password",
- ),
- },
- Description: `must be one of ["api_password"]`,
- },
- },
- Description: `API Password Auth`,
- },
- "source_shopify_shopify_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The Access Token for making authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of the Shopify developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of the Shopify developer application.`,
- },
- },
- Description: `OAuth2.0`,
- },
- "source_shopify_update_shopify_authorization_method_api_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_password": schema.StringAttribute{
- Computed: true,
- Description: `The API Password for your private application in the ` + "`" + `Shopify` + "`" + ` store.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_password",
- ),
- },
- Description: `must be one of ["api_password"]`,
- },
- },
- Description: `API Password Auth`,
- },
- "source_shopify_update_shopify_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The Access Token for making authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of the Shopify developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of the Shopify developer application.`,
- },
- },
- Description: `OAuth2.0`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `The authorization method to use to retrieve data from Shopify`,
- },
- "shop": schema.StringAttribute{
- Computed: true,
- Description: `The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME' or 'NAME.myshopify.com'.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "shopify",
- ),
- },
- Description: `must be one of ["shopify"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_shopify_data_source_sdk.go b/internal/provider/source_shopify_data_source_sdk.go
old mode 100755
new mode 100644
index 370244f32..d6b0c89cb
--- a/internal/provider/source_shopify_data_source_sdk.go
+++ b/internal/provider/source_shopify_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceShopifyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_shopify_resource.go b/internal/provider/source_shopify_resource.go
old mode 100755
new mode 100644
index fe63a8404..0ff50fa96
--- a/internal/provider/source_shopify_resource.go
+++ b/internal/provider/source_shopify_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceShopifyResource struct {
// SourceShopifyResourceModel describes the resource data model.
type SourceShopifyResourceModel struct {
Configuration SourceShopify `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,87 +59,25 @@ func (r *SourceShopifyResource) Schema(ctx context.Context, req resource.SchemaR
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_shopify_shopify_authorization_method_api_password": schema.SingleNestedAttribute{
+ "api_password": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The API Password for your private application in the ` + "`" + `Shopify` + "`" + ` store.`,
},
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_password",
- ),
- },
- Description: `must be one of ["api_password"]`,
- },
- },
- Description: `API Password Auth`,
- },
- "source_shopify_shopify_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Optional: true,
- Description: `The Access Token for making authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Optional: true,
- Description: `The Client ID of the Shopify developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Optional: true,
- Description: `The Client Secret of the Shopify developer application.`,
- },
- },
- Description: `OAuth2.0`,
- },
- "source_shopify_update_shopify_authorization_method_api_password": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_password": schema.StringAttribute{
- Required: true,
- Description: `The API Password for your private application in the ` + "`" + `Shopify` + "`" + ` store.`,
- },
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_password",
- ),
- },
- Description: `must be one of ["api_password"]`,
- },
},
Description: `API Password Auth`,
},
- "source_shopify_update_shopify_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `The Access Token for making authenticated requests.`,
},
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Optional: true,
Description: `The Client ID of the Shopify developer application.`,
@@ -151,40 +90,43 @@ func (r *SourceShopifyResource) Schema(ctx context.Context, req resource.SchemaR
Description: `OAuth2.0`,
},
},
+ Description: `The authorization method to use to retrieve data from Shopify`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `The authorization method to use to retrieve data from Shopify`,
},
"shop": schema.StringAttribute{
Required: true,
Description: `The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME' or 'NAME.myshopify.com'.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "shopify",
- ),
- },
- Description: `must be one of ["shopify"]`,
- },
"start_date": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `Default: "2020-01-01"` + "\n" +
+ `The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -248,7 +190,7 @@ func (r *SourceShopifyResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceShopify(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -424,5 +366,5 @@ func (r *SourceShopifyResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceShopifyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_shopify_resource_sdk.go b/internal/provider/source_shopify_resource_sdk.go
old mode 100755
new mode 100644
index 207b452f8..2b7775153
--- a/internal/provider/source_shopify_resource_sdk.go
+++ b/internal/provider/source_shopify_resource_sdk.go
@@ -3,64 +3,59 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceShopifyResourceModel) ToCreateSDKType() *shared.SourceShopifyCreateRequest {
var credentials *shared.SourceShopifyShopifyAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceShopifyShopifyAuthorizationMethodOAuth20 *shared.SourceShopifyShopifyAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20 != nil {
+ var sourceShopifyOAuth20 *shared.SourceShopifyOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.AccessToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
} else {
accessToken = nil
}
- authMethod := shared.SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.AuthMethod.ValueString())
clientID := new(string)
- if !r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- sourceShopifyShopifyAuthorizationMethodOAuth20 = &shared.SourceShopifyShopifyAuthorizationMethodOAuth20{
+ sourceShopifyOAuth20 = &shared.SourceShopifyOAuth20{
AccessToken: accessToken,
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
}
}
- if sourceShopifyShopifyAuthorizationMethodOAuth20 != nil {
+ if sourceShopifyOAuth20 != nil {
credentials = &shared.SourceShopifyShopifyAuthorizationMethod{
- SourceShopifyShopifyAuthorizationMethodOAuth20: sourceShopifyShopifyAuthorizationMethodOAuth20,
+ SourceShopifyOAuth20: sourceShopifyOAuth20,
}
}
- var sourceShopifyShopifyAuthorizationMethodAPIPassword *shared.SourceShopifyShopifyAuthorizationMethodAPIPassword
- if r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodAPIPassword != nil {
- apiPassword := r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodAPIPassword.APIPassword.ValueString()
- authMethod1 := shared.SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod(r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodAPIPassword.AuthMethod.ValueString())
- sourceShopifyShopifyAuthorizationMethodAPIPassword = &shared.SourceShopifyShopifyAuthorizationMethodAPIPassword{
+ var sourceShopifyAPIPassword *shared.SourceShopifyAPIPassword
+ if r.Configuration.Credentials.APIPassword != nil {
+ apiPassword := r.Configuration.Credentials.APIPassword.APIPassword.ValueString()
+ sourceShopifyAPIPassword = &shared.SourceShopifyAPIPassword{
APIPassword: apiPassword,
- AuthMethod: authMethod1,
}
}
- if sourceShopifyShopifyAuthorizationMethodAPIPassword != nil {
+ if sourceShopifyAPIPassword != nil {
credentials = &shared.SourceShopifyShopifyAuthorizationMethod{
- SourceShopifyShopifyAuthorizationMethodAPIPassword: sourceShopifyShopifyAuthorizationMethodAPIPassword,
+ SourceShopifyAPIPassword: sourceShopifyAPIPassword,
}
}
}
shop := r.Configuration.Shop.ValueString()
- sourceType := shared.SourceShopifyShopify(r.Configuration.SourceType.ValueString())
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
@@ -70,9 +65,14 @@ func (r *SourceShopifyResourceModel) ToCreateSDKType() *shared.SourceShopifyCrea
configuration := shared.SourceShopify{
Credentials: credentials,
Shop: shop,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -83,6 +83,7 @@ func (r *SourceShopifyResourceModel) ToCreateSDKType() *shared.SourceShopifyCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceShopifyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -96,53 +97,49 @@ func (r *SourceShopifyResourceModel) ToGetSDKType() *shared.SourceShopifyCreateR
}
func (r *SourceShopifyResourceModel) ToUpdateSDKType() *shared.SourceShopifyPutRequest {
- var credentials *shared.SourceShopifyUpdateShopifyAuthorizationMethod
+ var credentials *shared.ShopifyAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceShopifyUpdateShopifyAuthorizationMethodOAuth20 *shared.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 != nil {
+ var sourceShopifyUpdateOAuth20 *shared.SourceShopifyUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.AccessToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
} else {
accessToken = nil
}
- authMethod := shared.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.AuthMethod.ValueString())
clientID := new(string)
- if !r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- sourceShopifyUpdateShopifyAuthorizationMethodOAuth20 = &shared.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20{
+ sourceShopifyUpdateOAuth20 = &shared.SourceShopifyUpdateOAuth20{
AccessToken: accessToken,
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
}
}
- if sourceShopifyUpdateShopifyAuthorizationMethodOAuth20 != nil {
- credentials = &shared.SourceShopifyUpdateShopifyAuthorizationMethod{
- SourceShopifyUpdateShopifyAuthorizationMethodOAuth20: sourceShopifyUpdateShopifyAuthorizationMethodOAuth20,
+ if sourceShopifyUpdateOAuth20 != nil {
+ credentials = &shared.ShopifyAuthorizationMethod{
+ SourceShopifyUpdateOAuth20: sourceShopifyUpdateOAuth20,
}
}
- var sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword *shared.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword
- if r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword != nil {
- apiPassword := r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword.APIPassword.ValueString()
- authMethod1 := shared.SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod(r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword.AuthMethod.ValueString())
- sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword = &shared.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword{
- APIPassword: apiPassword,
- AuthMethod: authMethod1,
+ var apiPassword *shared.APIPassword
+ if r.Configuration.Credentials.APIPassword != nil {
+ apiPassword1 := r.Configuration.Credentials.APIPassword.APIPassword.ValueString()
+ apiPassword = &shared.APIPassword{
+ APIPassword: apiPassword1,
}
}
- if sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword != nil {
- credentials = &shared.SourceShopifyUpdateShopifyAuthorizationMethod{
- SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword: sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword,
+ if apiPassword != nil {
+ credentials = &shared.ShopifyAuthorizationMethod{
+ APIPassword: apiPassword,
}
}
}
diff --git a/internal/provider/source_shortio_data_source.go b/internal/provider/source_shortio_data_source.go
old mode 100755
new mode 100644
index 51feea4bf..83ebf5e20
--- a/internal/provider/source_shortio_data_source.go
+++ b/internal/provider/source_shortio_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceShortioDataSource struct {
// SourceShortioDataSourceModel describes the data model.
type SourceShortioDataSourceModel struct {
- Configuration SourceShortio `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,41 +47,20 @@ func (r *SourceShortioDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceShortio DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "domain_id": schema.StringAttribute{
- Computed: true,
- },
- "secret_key": schema.StringAttribute{
- Computed: true,
- Description: `Short.io Secret Key`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "shortio",
- ),
- },
- Description: `must be one of ["shortio"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_shortio_data_source_sdk.go b/internal/provider/source_shortio_data_source_sdk.go
old mode 100755
new mode 100644
index a86e644c0..a375f8ce1
--- a/internal/provider/source_shortio_data_source_sdk.go
+++ b/internal/provider/source_shortio_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceShortioDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_shortio_resource.go b/internal/provider/source_shortio_resource.go
old mode 100755
new mode 100644
index 6018e6d97..b8909e58d
--- a/internal/provider/source_shortio_resource.go
+++ b/internal/provider/source_shortio_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceShortioResource struct {
// SourceShortioResourceModel describes the resource data model.
type SourceShortioResourceModel struct {
Configuration SourceShortio `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -59,30 +59,33 @@ func (r *SourceShortioResource) Schema(ctx context.Context, req resource.SchemaR
},
"secret_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Short.io Secret Key`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "shortio",
- ),
- },
- Description: `must be one of ["shortio"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -146,7 +149,7 @@ func (r *SourceShortioResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceShortio(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -322,5 +325,5 @@ func (r *SourceShortioResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceShortioResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_shortio_resource_sdk.go b/internal/provider/source_shortio_resource_sdk.go
old mode 100755
new mode 100644
index 06a0ecc42..158394685
--- a/internal/provider/source_shortio_resource_sdk.go
+++ b/internal/provider/source_shortio_resource_sdk.go
@@ -3,20 +3,24 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceShortioResourceModel) ToCreateSDKType() *shared.SourceShortioCreateRequest {
domainID := r.Configuration.DomainID.ValueString()
secretKey := r.Configuration.SecretKey.ValueString()
- sourceType := shared.SourceShortioShortio(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceShortio{
- DomainID: domainID,
- SecretKey: secretKey,
- SourceType: sourceType,
- StartDate: startDate,
+ DomainID: domainID,
+ SecretKey: secretKey,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -28,6 +32,7 @@ func (r *SourceShortioResourceModel) ToCreateSDKType() *shared.SourceShortioCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceShortioCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_slack_data_source.go b/internal/provider/source_slack_data_source.go
old mode 100755
new mode 100644
index 0bbf6aa12..ef5d6199d
--- a/internal/provider/source_slack_data_source.go
+++ b/internal/provider/source_slack_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceSlackDataSource struct {
// SourceSlackDataSourceModel describes the data model.
type SourceSlackDataSourceModel struct {
- Configuration SourceSlack `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,151 +47,20 @@ func (r *SourceSlackDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceSlack DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "channel_filter": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A channel name list (without leading '#' char) which limit the channels from which you'd like to sync. Empty list means no filter.`,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_slack_authentication_mechanism_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `A Slack bot token. See the docs for instructions on how to generate it.`,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "API Token Credentials",
- ),
- },
- Description: `must be one of ["API Token Credentials"]`,
- },
- },
- Description: `Choose how to authenticate into Slack`,
- },
- "source_slack_authentication_mechanism_sign_in_via_slack_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Slack access_token. See our docs if you need help generating the token.`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Slack client_id. See our docs if you need help finding this id.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Slack client_secret. See our docs if you need help finding this secret.`,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Default OAuth2.0 authorization",
- ),
- },
- Description: `must be one of ["Default OAuth2.0 authorization"]`,
- },
- },
- Description: `Choose how to authenticate into Slack`,
- },
- "source_slack_update_authentication_mechanism_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `A Slack bot token. See the docs for instructions on how to generate it.`,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "API Token Credentials",
- ),
- },
- Description: `must be one of ["API Token Credentials"]`,
- },
- },
- Description: `Choose how to authenticate into Slack`,
- },
- "source_slack_update_authentication_mechanism_sign_in_via_slack_o_auth": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Slack access_token. See our docs if you need help generating the token.`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Slack client_id. See our docs if you need help finding this id.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Slack client_secret. See our docs if you need help finding this secret.`,
- },
- "option_title": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Default OAuth2.0 authorization",
- ),
- },
- Description: `must be one of ["Default OAuth2.0 authorization"]`,
- },
- },
- Description: `Choose how to authenticate into Slack`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate into Slack`,
- },
- "join_channels": schema.BoolAttribute{
- Computed: true,
- Description: `Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages. `,
- },
- "lookback_window": schema.Int64Attribute{
- Computed: true,
- Description: `How far into the past to look for messages in threads, default is 0 days`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "slack",
- ),
- },
- Description: `must be one of ["slack"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_slack_data_source_sdk.go b/internal/provider/source_slack_data_source_sdk.go
old mode 100755
new mode 100644
index c375c37e1..04bd9c85f
--- a/internal/provider/source_slack_data_source_sdk.go
+++ b/internal/provider/source_slack_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSlackDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_slack_resource.go b/internal/provider/source_slack_resource.go
old mode 100755
new mode 100644
index f4a8caea6..838f2161e
--- a/internal/provider/source_slack_resource.go
+++ b/internal/provider/source_slack_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceSlackResource struct {
// SourceSlackResourceModel describes the resource data model.
type SourceSlackResourceModel struct {
Configuration SourceSlack `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -63,30 +64,23 @@ func (r *SourceSlackResource) Schema(ctx context.Context, req resource.SchemaReq
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_slack_authentication_mechanism_api_token": schema.SingleNestedAttribute{
+ "api_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `A Slack bot token. See the docs for instructions on how to generate it.`,
},
- "option_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "API Token Credentials",
- ),
- },
- Description: `must be one of ["API Token Credentials"]`,
- },
},
Description: `Choose how to authenticate into Slack`,
},
- "source_slack_authentication_mechanism_sign_in_via_slack_o_auth": schema.SingleNestedAttribute{
+ "sign_in_via_slack_o_auth": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Slack access_token. See our docs if you need help generating the token.`,
},
"client_id": schema.StringAttribute{
@@ -97,103 +91,52 @@ func (r *SourceSlackResource) Schema(ctx context.Context, req resource.SchemaReq
Required: true,
Description: `Slack client_secret. See our docs if you need help finding this secret.`,
},
- "option_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Default OAuth2.0 authorization",
- ),
- },
- Description: `must be one of ["Default OAuth2.0 authorization"]`,
- },
- },
- Description: `Choose how to authenticate into Slack`,
- },
- "source_slack_update_authentication_mechanism_api_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Required: true,
- Description: `A Slack bot token. See the docs for instructions on how to generate it.`,
- },
- "option_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "API Token Credentials",
- ),
- },
- Description: `must be one of ["API Token Credentials"]`,
- },
- },
- Description: `Choose how to authenticate into Slack`,
- },
- "source_slack_update_authentication_mechanism_sign_in_via_slack_o_auth": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Slack access_token. See our docs if you need help generating the token.`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `Slack client_id. See our docs if you need help finding this id.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `Slack client_secret. See our docs if you need help finding this secret.`,
- },
- "option_title": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Default OAuth2.0 authorization",
- ),
- },
- Description: `must be one of ["Default OAuth2.0 authorization"]`,
- },
},
Description: `Choose how to authenticate into Slack`,
},
},
+ Description: `Choose how to authenticate into Slack`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate into Slack`,
},
"join_channels": schema.BoolAttribute{
- Required: true,
- Description: `Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages. `,
+ Optional: true,
+ MarkdownDescription: `Default: true` + "\n" +
+ `Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages. `,
},
"lookback_window": schema.Int64Attribute{
- Required: true,
- Description: `How far into the past to look for messages in threads, default is 0 days`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "slack",
- ),
- },
- Description: `must be one of ["slack"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `How far into the past to look for messages in threads, default is 0 days`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -257,7 +200,7 @@ func (r *SourceSlackResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSlack(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -433,5 +376,5 @@ func (r *SourceSlackResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceSlackResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_slack_resource_sdk.go b/internal/provider/source_slack_resource_sdk.go
old mode 100755
new mode 100644
index 780e8437e..a14b80f88
--- a/internal/provider/source_slack_resource_sdk.go
+++ b/internal/provider/source_slack_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -15,51 +15,61 @@ func (r *SourceSlackResourceModel) ToCreateSDKType() *shared.SourceSlackCreateRe
}
var credentials *shared.SourceSlackAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceSlackAuthenticationMechanismSignInViaSlackOAuth *shared.SourceSlackAuthenticationMechanismSignInViaSlackOAuth
- if r.Configuration.Credentials.SourceSlackAuthenticationMechanismSignInViaSlackOAuth != nil {
- accessToken := r.Configuration.Credentials.SourceSlackAuthenticationMechanismSignInViaSlackOAuth.AccessToken.ValueString()
- clientID := r.Configuration.Credentials.SourceSlackAuthenticationMechanismSignInViaSlackOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSlackAuthenticationMechanismSignInViaSlackOAuth.ClientSecret.ValueString()
- optionTitle := shared.SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitle(r.Configuration.Credentials.SourceSlackAuthenticationMechanismSignInViaSlackOAuth.OptionTitle.ValueString())
- sourceSlackAuthenticationMechanismSignInViaSlackOAuth = &shared.SourceSlackAuthenticationMechanismSignInViaSlackOAuth{
+ var sourceSlackSignInViaSlackOAuth *shared.SourceSlackSignInViaSlackOAuth
+ if r.Configuration.Credentials.SignInViaSlackOAuth != nil {
+ accessToken := r.Configuration.Credentials.SignInViaSlackOAuth.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.SignInViaSlackOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.SignInViaSlackOAuth.ClientSecret.ValueString()
+ sourceSlackSignInViaSlackOAuth = &shared.SourceSlackSignInViaSlackOAuth{
AccessToken: accessToken,
ClientID: clientID,
ClientSecret: clientSecret,
- OptionTitle: optionTitle,
}
}
- if sourceSlackAuthenticationMechanismSignInViaSlackOAuth != nil {
+ if sourceSlackSignInViaSlackOAuth != nil {
credentials = &shared.SourceSlackAuthenticationMechanism{
- SourceSlackAuthenticationMechanismSignInViaSlackOAuth: sourceSlackAuthenticationMechanismSignInViaSlackOAuth,
+ SourceSlackSignInViaSlackOAuth: sourceSlackSignInViaSlackOAuth,
}
}
- var sourceSlackAuthenticationMechanismAPIToken *shared.SourceSlackAuthenticationMechanismAPIToken
- if r.Configuration.Credentials.SourceSlackAuthenticationMechanismAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceSlackAuthenticationMechanismAPIToken.APIToken.ValueString()
- optionTitle1 := shared.SourceSlackAuthenticationMechanismAPITokenOptionTitle(r.Configuration.Credentials.SourceSlackAuthenticationMechanismAPIToken.OptionTitle.ValueString())
- sourceSlackAuthenticationMechanismAPIToken = &shared.SourceSlackAuthenticationMechanismAPIToken{
- APIToken: apiToken,
- OptionTitle: optionTitle1,
+ var sourceSlackAPIToken *shared.SourceSlackAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ sourceSlackAPIToken = &shared.SourceSlackAPIToken{
+ APIToken: apiToken,
}
}
- if sourceSlackAuthenticationMechanismAPIToken != nil {
+ if sourceSlackAPIToken != nil {
credentials = &shared.SourceSlackAuthenticationMechanism{
- SourceSlackAuthenticationMechanismAPIToken: sourceSlackAuthenticationMechanismAPIToken,
+ SourceSlackAPIToken: sourceSlackAPIToken,
}
}
}
- joinChannels := r.Configuration.JoinChannels.ValueBool()
- lookbackWindow := r.Configuration.LookbackWindow.ValueInt64()
- sourceType := shared.SourceSlackSlack(r.Configuration.SourceType.ValueString())
+ joinChannels := new(bool)
+ if !r.Configuration.JoinChannels.IsUnknown() && !r.Configuration.JoinChannels.IsNull() {
+ *joinChannels = r.Configuration.JoinChannels.ValueBool()
+ } else {
+ joinChannels = nil
+ }
+ lookbackWindow := new(int64)
+ if !r.Configuration.LookbackWindow.IsUnknown() && !r.Configuration.LookbackWindow.IsNull() {
+ *lookbackWindow = r.Configuration.LookbackWindow.ValueInt64()
+ } else {
+ lookbackWindow = nil
+ }
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceSlack{
ChannelFilter: channelFilter,
Credentials: credentials,
JoinChannels: joinChannels,
LookbackWindow: lookbackWindow,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -70,6 +80,7 @@ func (r *SourceSlackResourceModel) ToCreateSDKType() *shared.SourceSlackCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSlackCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -89,41 +100,47 @@ func (r *SourceSlackResourceModel) ToUpdateSDKType() *shared.SourceSlackPutReque
}
var credentials *shared.SourceSlackUpdateAuthenticationMechanism
if r.Configuration.Credentials != nil {
- var sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth *shared.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth
- if r.Configuration.Credentials.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth != nil {
- accessToken := r.Configuration.Credentials.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth.AccessToken.ValueString()
- clientID := r.Configuration.Credentials.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth.ClientSecret.ValueString()
- optionTitle := shared.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitle(r.Configuration.Credentials.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth.OptionTitle.ValueString())
- sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth = &shared.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth{
+ var signInViaSlackOAuth *shared.SignInViaSlackOAuth
+ if r.Configuration.Credentials.SignInViaSlackOAuth != nil {
+ accessToken := r.Configuration.Credentials.SignInViaSlackOAuth.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.SignInViaSlackOAuth.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.SignInViaSlackOAuth.ClientSecret.ValueString()
+ signInViaSlackOAuth = &shared.SignInViaSlackOAuth{
AccessToken: accessToken,
ClientID: clientID,
ClientSecret: clientSecret,
- OptionTitle: optionTitle,
}
}
- if sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth != nil {
+ if signInViaSlackOAuth != nil {
credentials = &shared.SourceSlackUpdateAuthenticationMechanism{
- SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth: sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth,
+ SignInViaSlackOAuth: signInViaSlackOAuth,
}
}
- var sourceSlackUpdateAuthenticationMechanismAPIToken *shared.SourceSlackUpdateAuthenticationMechanismAPIToken
- if r.Configuration.Credentials.SourceSlackUpdateAuthenticationMechanismAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceSlackUpdateAuthenticationMechanismAPIToken.APIToken.ValueString()
- optionTitle1 := shared.SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitle(r.Configuration.Credentials.SourceSlackUpdateAuthenticationMechanismAPIToken.OptionTitle.ValueString())
- sourceSlackUpdateAuthenticationMechanismAPIToken = &shared.SourceSlackUpdateAuthenticationMechanismAPIToken{
- APIToken: apiToken,
- OptionTitle: optionTitle1,
+ var sourceSlackUpdateAPIToken *shared.SourceSlackUpdateAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ sourceSlackUpdateAPIToken = &shared.SourceSlackUpdateAPIToken{
+ APIToken: apiToken,
}
}
- if sourceSlackUpdateAuthenticationMechanismAPIToken != nil {
+ if sourceSlackUpdateAPIToken != nil {
credentials = &shared.SourceSlackUpdateAuthenticationMechanism{
- SourceSlackUpdateAuthenticationMechanismAPIToken: sourceSlackUpdateAuthenticationMechanismAPIToken,
+ SourceSlackUpdateAPIToken: sourceSlackUpdateAPIToken,
}
}
}
- joinChannels := r.Configuration.JoinChannels.ValueBool()
- lookbackWindow := r.Configuration.LookbackWindow.ValueInt64()
+ joinChannels := new(bool)
+ if !r.Configuration.JoinChannels.IsUnknown() && !r.Configuration.JoinChannels.IsNull() {
+ *joinChannels = r.Configuration.JoinChannels.ValueBool()
+ } else {
+ joinChannels = nil
+ }
+ lookbackWindow := new(int64)
+ if !r.Configuration.LookbackWindow.IsUnknown() && !r.Configuration.LookbackWindow.IsNull() {
+ *lookbackWindow = r.Configuration.LookbackWindow.ValueInt64()
+ } else {
+ lookbackWindow = nil
+ }
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceSlackUpdate{
ChannelFilter: channelFilter,
diff --git a/internal/provider/source_smaily_data_source.go b/internal/provider/source_smaily_data_source.go
old mode 100755
new mode 100644
index 9e707f7bb..31e90cbec
--- a/internal/provider/source_smaily_data_source.go
+++ b/internal/provider/source_smaily_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceSmailyDataSource struct {
// SourceSmailyDataSourceModel describes the data model.
type SourceSmailyDataSourceModel struct {
- Configuration SourceSmaily `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,42 +47,20 @@ func (r *SourceSmailyDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceSmaily DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_password": schema.StringAttribute{
- Computed: true,
- Description: `API user password. See https://smaily.com/help/api/general/create-api-user/`,
- },
- "api_subdomain": schema.StringAttribute{
- Computed: true,
- Description: `API Subdomain. See https://smaily.com/help/api/general/create-api-user/`,
- },
- "api_username": schema.StringAttribute{
- Computed: true,
- Description: `API user username. See https://smaily.com/help/api/general/create-api-user/`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "smaily",
- ),
- },
- Description: `must be one of ["smaily"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_smaily_data_source_sdk.go b/internal/provider/source_smaily_data_source_sdk.go
old mode 100755
new mode 100644
index d47f0c9b4..fb787f38c
--- a/internal/provider/source_smaily_data_source_sdk.go
+++ b/internal/provider/source_smaily_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSmailyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_smaily_resource.go b/internal/provider/source_smaily_resource.go
old mode 100755
new mode 100644
index 9f1625d70..251344d22
--- a/internal/provider/source_smaily_resource.go
+++ b/internal/provider/source_smaily_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceSmailyResource struct {
// SourceSmailyResourceModel describes the resource data model.
type SourceSmailyResourceModel struct {
Configuration SourceSmaily `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +56,7 @@ func (r *SourceSmailyResource) Schema(ctx context.Context, req resource.SchemaRe
Attributes: map[string]schema.Attribute{
"api_password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API user password. See https://smaily.com/help/api/general/create-api-user/`,
},
"api_subdomain": schema.StringAttribute{
@@ -66,24 +67,26 @@ func (r *SourceSmailyResource) Schema(ctx context.Context, req resource.SchemaRe
Required: true,
Description: `API user username. See https://smaily.com/help/api/general/create-api-user/`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "smaily",
- ),
- },
- Description: `must be one of ["smaily"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +150,7 @@ func (r *SourceSmailyResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSmaily(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +326,5 @@ func (r *SourceSmailyResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceSmailyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_smaily_resource_sdk.go b/internal/provider/source_smaily_resource_sdk.go
old mode 100755
new mode 100644
index d4f5ac0d0..f0d8b666f
--- a/internal/provider/source_smaily_resource_sdk.go
+++ b/internal/provider/source_smaily_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -11,12 +11,16 @@ func (r *SourceSmailyResourceModel) ToCreateSDKType() *shared.SourceSmailyCreate
apiPassword := r.Configuration.APIPassword.ValueString()
apiSubdomain := r.Configuration.APISubdomain.ValueString()
apiUsername := r.Configuration.APIUsername.ValueString()
- sourceType := shared.SourceSmailySmaily(r.Configuration.SourceType.ValueString())
configuration := shared.SourceSmaily{
APIPassword: apiPassword,
APISubdomain: apiSubdomain,
APIUsername: apiUsername,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -28,6 +32,7 @@ func (r *SourceSmailyResourceModel) ToCreateSDKType() *shared.SourceSmailyCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSmailyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_smartengage_data_source.go b/internal/provider/source_smartengage_data_source.go
old mode 100755
new mode 100644
index 41828403f..742d1f313
--- a/internal/provider/source_smartengage_data_source.go
+++ b/internal/provider/source_smartengage_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceSmartengageDataSource struct {
// SourceSmartengageDataSourceModel describes the data model.
type SourceSmartengageDataSourceModel struct {
- Configuration SourceSmartengage `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceSmartengageDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourceSmartengage DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `API Key`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "smartengage",
- ),
- },
- Description: `must be one of ["smartengage"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_smartengage_data_source_sdk.go b/internal/provider/source_smartengage_data_source_sdk.go
old mode 100755
new mode 100644
index eb001e3f2..88eea724a
--- a/internal/provider/source_smartengage_data_source_sdk.go
+++ b/internal/provider/source_smartengage_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSmartengageDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_smartengage_resource.go b/internal/provider/source_smartengage_resource.go
old mode 100755
new mode 100644
index 412b9f15b..d9d201df5
--- a/internal/provider/source_smartengage_resource.go
+++ b/internal/provider/source_smartengage_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceSmartengageResource struct {
// SourceSmartengageResourceModel describes the resource data model.
type SourceSmartengageResourceModel struct {
- Configuration SourceSmartengage `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration APIKeyAuth `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceSmartengageResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceSmartengageResource) Schema(ctx context.Context, req resource.Sch
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Key`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "smartengage",
- ),
- },
- Description: `must be one of ["smartengage"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceSmartengageResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSmartengage(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceSmartengageResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourceSmartengageResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_smartengage_resource_sdk.go b/internal/provider/source_smartengage_resource_sdk.go
old mode 100755
new mode 100644
index 0a0baa205..55f967cd8
--- a/internal/provider/source_smartengage_resource_sdk.go
+++ b/internal/provider/source_smartengage_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSmartengageResourceModel) ToCreateSDKType() *shared.SourceSmartengageCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
- sourceType := shared.SourceSmartengageSmartengage(r.Configuration.SourceType.ValueString())
configuration := shared.SourceSmartengage{
- APIKey: apiKey,
- SourceType: sourceType,
+ APIKey: apiKey,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceSmartengageResourceModel) ToCreateSDKType() *shared.SourceSmarten
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSmartengageCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_smartsheets_data_source.go b/internal/provider/source_smartsheets_data_source.go
old mode 100755
new mode 100644
index 2eb7b95b1..dc4806652
--- a/internal/provider/source_smartsheets_data_source.go
+++ b/internal/provider/source_smartsheets_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceSmartsheetsDataSource struct {
// SourceSmartsheetsDataSourceModel describes the data model.
type SourceSmartsheetsDataSourceModel struct {
- Configuration SourceSmartsheets `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,164 +47,20 @@ func (r *SourceSmartsheetsDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourceSmartsheets DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_smartsheets_authorization_method_api_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_smartsheets_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The API ID of the SmartSheets developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The API Secret the SmartSheets developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_smartsheets_update_authorization_method_api_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_smartsheets_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The API ID of the SmartSheets developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The API Secret the SmartSheets developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "metadata_fields": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `A List of available columns which metadata can be pulled from.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "smartsheets",
- ),
- },
- Description: `must be one of ["smartsheets"]`,
- },
- "spreadsheet_id": schema.StringAttribute{
- Computed: true,
- Description: `The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties`,
- },
- "start_datetime": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: ` + "`" + `2000-01-01T13:00:00` + "`" + ``,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_smartsheets_data_source_sdk.go b/internal/provider/source_smartsheets_data_source_sdk.go
old mode 100755
new mode 100644
index 5d43c5da7..93ac7a7f2
--- a/internal/provider/source_smartsheets_data_source_sdk.go
+++ b/internal/provider/source_smartsheets_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSmartsheetsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_smartsheets_resource.go b/internal/provider/source_smartsheets_resource.go
old mode 100755
new mode 100644
index bba00fd20..7fa8cf58c
--- a/internal/provider/source_smartsheets_resource.go
+++ b/internal/provider/source_smartsheets_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceSmartsheetsResource struct {
// SourceSmartsheetsResourceModel describes the resource data model.
type SourceSmartsheetsResourceModel struct {
Configuration SourceSmartsheets `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,40 +59,24 @@ func (r *SourceSmartsheetsResource) Schema(ctx context.Context, req resource.Sch
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_smartsheets_authorization_method_api_access_token": schema.SingleNestedAttribute{
+ "api_access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
},
},
- "source_smartsheets_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The API ID of the SmartSheets developer application.`,
@@ -102,69 +87,16 @@ func (r *SourceSmartsheetsResource) Schema(ctx context.Context, req resource.Sch
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The key to refresh the expired access_token.`,
},
"token_expiry_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_smartsheets_update_authorization_method_api_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_smartsheets_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
Required: true,
- Description: `The API ID of the SmartSheets developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The API Secret the SmartSheets developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Required: true,
+ Sensitive: true,
+ Description: `The date-time when the access token should be refreshed.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date-time when the access token should be refreshed.`,
},
},
},
@@ -178,35 +110,38 @@ func (r *SourceSmartsheetsResource) Schema(ctx context.Context, req resource.Sch
ElementType: types.StringType,
Description: `A List of available columns which metadata can be pulled from.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "smartsheets",
- ),
- },
- Description: `must be one of ["smartsheets"]`,
- },
"spreadsheet_id": schema.StringAttribute{
Required: true,
Description: `The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties`,
},
"start_datetime": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `Default: "2020-01-01T00:00:00+00:00"` + "\n" +
+ `Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: ` + "`" + `2000-01-01T13:00:00` + "`" + ``,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: ` + "`" + `2000-01-01T13:00:00` + "`" + ``,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -270,7 +205,7 @@ func (r *SourceSmartsheetsResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSmartsheets(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -446,5 +381,5 @@ func (r *SourceSmartsheetsResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourceSmartsheetsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_smartsheets_resource_sdk.go b/internal/provider/source_smartsheets_resource_sdk.go
old mode 100755
new mode 100644
index 2321feb00..6abf4137a
--- a/internal/provider/source_smartsheets_resource_sdk.go
+++ b/internal/provider/source_smartsheets_resource_sdk.go
@@ -3,64 +3,49 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceSmartsheetsResourceModel) ToCreateSDKType() *shared.SourceSmartsheetsCreateRequest {
var credentials shared.SourceSmartsheetsAuthorizationMethod
- var sourceSmartsheetsAuthorizationMethodOAuth20 *shared.SourceSmartsheetsAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceSmartsheetsAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceSmartsheetsAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceSmartsheetsAuthorizationMethodOAuth20 = &shared.SourceSmartsheetsAuthorizationMethodOAuth20{
+ var sourceSmartsheetsOAuth20 *shared.SourceSmartsheetsOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceSmartsheetsOAuth20 = &shared.SourceSmartsheetsOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceSmartsheetsAuthorizationMethodOAuth20 != nil {
+ if sourceSmartsheetsOAuth20 != nil {
credentials = shared.SourceSmartsheetsAuthorizationMethod{
- SourceSmartsheetsAuthorizationMethodOAuth20: sourceSmartsheetsAuthorizationMethodOAuth20,
+ SourceSmartsheetsOAuth20: sourceSmartsheetsOAuth20,
}
}
- var sourceSmartsheetsAuthorizationMethodAPIAccessToken *shared.SourceSmartsheetsAuthorizationMethodAPIAccessToken
- if r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodAPIAccessToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodAPIAccessToken.AccessToken.ValueString()
- authType1 := new(shared.SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType)
- if !r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodAPIAccessToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodAPIAccessToken.AuthType.IsNull() {
- *authType1 = shared.SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType(r.Configuration.Credentials.SourceSmartsheetsAuthorizationMethodAPIAccessToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceSmartsheetsAuthorizationMethodAPIAccessToken = &shared.SourceSmartsheetsAuthorizationMethodAPIAccessToken{
+ var sourceSmartsheetsAPIAccessToken *shared.SourceSmartsheetsAPIAccessToken
+ if r.Configuration.Credentials.APIAccessToken != nil {
+ accessToken1 := r.Configuration.Credentials.APIAccessToken.AccessToken.ValueString()
+ sourceSmartsheetsAPIAccessToken = &shared.SourceSmartsheetsAPIAccessToken{
AccessToken: accessToken1,
- AuthType: authType1,
}
}
- if sourceSmartsheetsAuthorizationMethodAPIAccessToken != nil {
+ if sourceSmartsheetsAPIAccessToken != nil {
credentials = shared.SourceSmartsheetsAuthorizationMethod{
- SourceSmartsheetsAuthorizationMethodAPIAccessToken: sourceSmartsheetsAuthorizationMethodAPIAccessToken,
+ SourceSmartsheetsAPIAccessToken: sourceSmartsheetsAPIAccessToken,
}
}
var metadataFields []shared.SourceSmartsheetsValidenums = nil
for _, metadataFieldsItem := range r.Configuration.MetadataFields {
metadataFields = append(metadataFields, shared.SourceSmartsheetsValidenums(metadataFieldsItem.ValueString()))
}
- sourceType := shared.SourceSmartsheetsSmartsheets(r.Configuration.SourceType.ValueString())
spreadsheetID := r.Configuration.SpreadsheetID.ValueString()
startDatetime := new(time.Time)
if !r.Configuration.StartDatetime.IsUnknown() && !r.Configuration.StartDatetime.IsNull() {
@@ -71,10 +56,15 @@ func (r *SourceSmartsheetsResourceModel) ToCreateSDKType() *shared.SourceSmartsh
configuration := shared.SourceSmartsheets{
Credentials: credentials,
MetadataFields: metadataFields,
- SourceType: sourceType,
SpreadsheetID: spreadsheetID,
StartDatetime: startDatetime,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -85,6 +75,7 @@ func (r *SourceSmartsheetsResourceModel) ToCreateSDKType() *shared.SourceSmartsh
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSmartsheetsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -99,55 +90,41 @@ func (r *SourceSmartsheetsResourceModel) ToGetSDKType() *shared.SourceSmartsheet
func (r *SourceSmartsheetsResourceModel) ToUpdateSDKType() *shared.SourceSmartsheetsPutRequest {
var credentials shared.SourceSmartsheetsUpdateAuthorizationMethod
- var sourceSmartsheetsUpdateAuthorizationMethodOAuth20 *shared.SourceSmartsheetsUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceSmartsheetsUpdateAuthorizationMethodOAuth20 = &shared.SourceSmartsheetsUpdateAuthorizationMethodOAuth20{
+ var sourceSmartsheetsUpdateOAuth20 *shared.SourceSmartsheetsUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceSmartsheetsUpdateOAuth20 = &shared.SourceSmartsheetsUpdateOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceSmartsheetsUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceSmartsheetsUpdateOAuth20 != nil {
credentials = shared.SourceSmartsheetsUpdateAuthorizationMethod{
- SourceSmartsheetsUpdateAuthorizationMethodOAuth20: sourceSmartsheetsUpdateAuthorizationMethodOAuth20,
+ SourceSmartsheetsUpdateOAuth20: sourceSmartsheetsUpdateOAuth20,
}
}
- var sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken *shared.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken
- if r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken.AccessToken.ValueString()
- authType1 := new(shared.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType)
- if !r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken.AuthType.IsNull() {
- *authType1 = shared.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType(r.Configuration.Credentials.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken = &shared.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken{
+ var apiAccessToken *shared.APIAccessToken
+ if r.Configuration.Credentials.APIAccessToken != nil {
+ accessToken1 := r.Configuration.Credentials.APIAccessToken.AccessToken.ValueString()
+ apiAccessToken = &shared.APIAccessToken{
AccessToken: accessToken1,
- AuthType: authType1,
}
}
- if sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken != nil {
+ if apiAccessToken != nil {
credentials = shared.SourceSmartsheetsUpdateAuthorizationMethod{
- SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken: sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken,
+ APIAccessToken: apiAccessToken,
}
}
- var metadataFields []shared.SourceSmartsheetsUpdateValidenums = nil
+ var metadataFields []shared.Validenums = nil
for _, metadataFieldsItem := range r.Configuration.MetadataFields {
- metadataFields = append(metadataFields, shared.SourceSmartsheetsUpdateValidenums(metadataFieldsItem.ValueString()))
+ metadataFields = append(metadataFields, shared.Validenums(metadataFieldsItem.ValueString()))
}
spreadsheetID := r.Configuration.SpreadsheetID.ValueString()
startDatetime := new(time.Time)
diff --git a/internal/provider/source_snapchatmarketing_data_source.go b/internal/provider/source_snapchatmarketing_data_source.go
old mode 100755
new mode 100644
index ff6c5d09e..089407784
--- a/internal/provider/source_snapchatmarketing_data_source.go
+++ b/internal/provider/source_snapchatmarketing_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceSnapchatMarketingDataSource struct {
// SourceSnapchatMarketingDataSourceModel describes the data model.
type SourceSnapchatMarketingDataSourceModel struct {
- Configuration SourceSnapchatMarketing `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,56 +47,20 @@ func (r *SourceSnapchatMarketingDataSource) Schema(ctx context.Context, req data
MarkdownDescription: "SourceSnapchatMarketing DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Snapchat developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Snapchat developer application.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `Date in the format 2017-01-25. Any data after this date will not be replicated.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to renew the expired Access Token.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snapchat-marketing",
- ),
- },
- Description: `must be one of ["snapchat-marketing"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `Date in the format 2022-01-01. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_snapchatmarketing_data_source_sdk.go b/internal/provider/source_snapchatmarketing_data_source_sdk.go
old mode 100755
new mode 100644
index 447da3d17..c3c54855d
--- a/internal/provider/source_snapchatmarketing_data_source_sdk.go
+++ b/internal/provider/source_snapchatmarketing_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSnapchatMarketingDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_snapchatmarketing_resource.go b/internal/provider/source_snapchatmarketing_resource.go
old mode 100755
new mode 100644
index 15d5a5409..c746a9952
--- a/internal/provider/source_snapchatmarketing_resource.go
+++ b/internal/provider/source_snapchatmarketing_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceSnapchatMarketingResource struct {
// SourceSnapchatMarketingResourceModel describes the resource data model.
type SourceSnapchatMarketingResourceModel struct {
Configuration SourceSnapchatMarketing `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -64,41 +65,45 @@ func (r *SourceSnapchatMarketingResource) Schema(ctx context.Context, req resour
Description: `The Client Secret of your Snapchat developer application.`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `Date in the format 2017-01-25. Any data after this date will not be replicated.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `Date in the format 2017-01-25. Any data after this date will not be replicated.`,
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Refresh Token to renew the expired Access Token.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snapchat-marketing",
- ),
- },
- Description: `must be one of ["snapchat-marketing"]`,
- },
"start_date": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `Default: "2022-01-01"` + "\n" +
+ `Date in the format 2022-01-01. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `Date in the format 2022-01-01. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -162,7 +167,7 @@ func (r *SourceSnapchatMarketingResource) Create(ctx context.Context, req resour
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSnapchatMarketing(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -338,5 +343,5 @@ func (r *SourceSnapchatMarketingResource) Delete(ctx context.Context, req resour
}
func (r *SourceSnapchatMarketingResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_snapchatmarketing_resource_sdk.go b/internal/provider/source_snapchatmarketing_resource_sdk.go
old mode 100755
new mode 100644
index d1c3b99d6..d5b9579e5
--- a/internal/provider/source_snapchatmarketing_resource_sdk.go
+++ b/internal/provider/source_snapchatmarketing_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -18,7 +18,6 @@ func (r *SourceSnapchatMarketingResourceModel) ToCreateSDKType() *shared.SourceS
endDate = nil
}
refreshToken := r.Configuration.RefreshToken.ValueString()
- sourceType := shared.SourceSnapchatMarketingSnapchatMarketing(r.Configuration.SourceType.ValueString())
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
@@ -30,9 +29,14 @@ func (r *SourceSnapchatMarketingResourceModel) ToCreateSDKType() *shared.SourceS
ClientSecret: clientSecret,
EndDate: endDate,
RefreshToken: refreshToken,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -43,6 +47,7 @@ func (r *SourceSnapchatMarketingResourceModel) ToCreateSDKType() *shared.SourceS
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSnapchatMarketingCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_snowflake_data_source.go b/internal/provider/source_snowflake_data_source.go
old mode 100755
new mode 100644
index 72ca9a2a2..06a06bf1f
--- a/internal/provider/source_snowflake_data_source.go
+++ b/internal/provider/source_snowflake_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceSnowflakeDataSource struct {
// SourceSnowflakeDataSourceModel describes the data model.
type SourceSnowflakeDataSourceModel struct {
- Configuration SourceSnowflake `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,166 +47,20 @@ func (r *SourceSnowflakeDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceSnowflake DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_snowflake_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth",
- ),
- },
- Description: `must be one of ["OAuth"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Snowflake developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Snowflake developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token for making authenticated requests.`,
- },
- },
- },
- "source_snowflake_authorization_method_username_and_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username/password",
- ),
- },
- Description: `must be one of ["username/password"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with the username.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `The username you created to allow Airbyte to access the database.`,
- },
- },
- },
- "source_snowflake_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth",
- ),
- },
- Description: `must be one of ["OAuth"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Snowflake developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Snowflake developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token for making authenticated requests.`,
- },
- },
- },
- "source_snowflake_update_authorization_method_username_and_password": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username/password",
- ),
- },
- Description: `must be one of ["username/password"]`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `The password associated with the username.`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `The username you created to allow Airbyte to access the database.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "database": schema.StringAttribute{
- Computed: true,
- Description: `The database you created for Airbyte to access data.`,
- },
- "host": schema.StringAttribute{
- Computed: true,
- Description: `The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com).`,
- },
- "jdbc_url_params": schema.StringAttribute{
- Computed: true,
- Description: `Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).`,
- },
- "role": schema.StringAttribute{
- Computed: true,
- Description: `The role you created for Airbyte to access Snowflake.`,
- },
- "schema": schema.StringAttribute{
- Computed: true,
- Description: `The source Snowflake schema tables. Leave empty to access tables from multiple schemas.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snowflake",
- ),
- },
- Description: `must be one of ["snowflake"]`,
- },
- "warehouse": schema.StringAttribute{
- Computed: true,
- Description: `The warehouse you created for Airbyte to access data.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_snowflake_data_source_sdk.go b/internal/provider/source_snowflake_data_source_sdk.go
old mode 100755
new mode 100644
index 4d53fbf9c..28d67fdc2
--- a/internal/provider/source_snowflake_data_source_sdk.go
+++ b/internal/provider/source_snowflake_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSnowflakeDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_snowflake_resource.go b/internal/provider/source_snowflake_resource.go
old mode 100755
new mode 100644
index 37ef603d0..0d229299c
--- a/internal/provider/source_snowflake_resource.go
+++ b/internal/provider/source_snowflake_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceSnowflakeResource struct {
// SourceSnowflakeResourceModel describes the resource data model.
type SourceSnowflakeResourceModel struct {
Configuration SourceSnowflake `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,22 +59,14 @@ func (r *SourceSnowflakeResource) Schema(ctx context.Context, req resource.Schem
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_snowflake_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth",
- ),
- },
- Description: `must be one of ["OAuth"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your Snowflake developer application.`,
@@ -84,76 +77,17 @@ func (r *SourceSnowflakeResource) Schema(ctx context.Context, req resource.Schem
},
"refresh_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Refresh Token for making authenticated requests.`,
},
},
},
- "source_snowflake_authorization_method_username_and_password": schema.SingleNestedAttribute{
+ "username_and_password": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username/password",
- ),
- },
- Description: `must be one of ["username/password"]`,
- },
- "password": schema.StringAttribute{
- Required: true,
- Description: `The password associated with the username.`,
- },
- "username": schema.StringAttribute{
- Required: true,
- Description: `The username you created to allow Airbyte to access the database.`,
- },
- },
- },
- "source_snowflake_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Optional: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth",
- ),
- },
- Description: `must be one of ["OAuth"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your Snowflake developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your Snowflake developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Optional: true,
- Description: `Refresh Token for making authenticated requests.`,
- },
- },
- },
- "source_snowflake_update_authorization_method_username_and_password": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "username/password",
- ),
- },
- Description: `must be one of ["username/password"]`,
- },
"password": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The password associated with the username.`,
},
"username": schema.StringAttribute{
@@ -187,28 +121,30 @@ func (r *SourceSnowflakeResource) Schema(ctx context.Context, req resource.Schem
Optional: true,
Description: `The source Snowflake schema tables. Leave empty to access tables from multiple schemas.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "snowflake",
- ),
- },
- Description: `must be one of ["snowflake"]`,
- },
"warehouse": schema.StringAttribute{
Required: true,
Description: `The warehouse you created for Airbyte to access data.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -272,7 +208,7 @@ func (r *SourceSnowflakeResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSnowflake(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -448,5 +384,5 @@ func (r *SourceSnowflakeResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceSnowflakeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_snowflake_resource_sdk.go b/internal/provider/source_snowflake_resource_sdk.go
old mode 100755
new mode 100644
index 7f2c67853..e8d4f67f1
--- a/internal/provider/source_snowflake_resource_sdk.go
+++ b/internal/provider/source_snowflake_resource_sdk.go
@@ -3,57 +3,53 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSnowflakeResourceModel) ToCreateSDKType() *shared.SourceSnowflakeCreateRequest {
var credentials *shared.SourceSnowflakeAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceSnowflakeAuthorizationMethodOAuth20 *shared.SourceSnowflakeAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20 != nil {
+ var sourceSnowflakeOAuth20 *shared.SourceSnowflakeOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20.AccessToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
} else {
accessToken = nil
}
- authType := shared.SourceSnowflakeAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
refreshToken := new(string)
- if !r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20.RefreshToken.IsUnknown() && !r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20.RefreshToken.IsNull() {
- *refreshToken = r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodOAuth20.RefreshToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.RefreshToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.RefreshToken.IsNull() {
+ *refreshToken = r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
} else {
refreshToken = nil
}
- sourceSnowflakeAuthorizationMethodOAuth20 = &shared.SourceSnowflakeAuthorizationMethodOAuth20{
+ sourceSnowflakeOAuth20 = &shared.SourceSnowflakeOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceSnowflakeAuthorizationMethodOAuth20 != nil {
+ if sourceSnowflakeOAuth20 != nil {
credentials = &shared.SourceSnowflakeAuthorizationMethod{
- SourceSnowflakeAuthorizationMethodOAuth20: sourceSnowflakeAuthorizationMethodOAuth20,
+ SourceSnowflakeOAuth20: sourceSnowflakeOAuth20,
}
}
- var sourceSnowflakeAuthorizationMethodUsernameAndPassword *shared.SourceSnowflakeAuthorizationMethodUsernameAndPassword
- if r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodUsernameAndPassword != nil {
- authType1 := shared.SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthType(r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodUsernameAndPassword.AuthType.ValueString())
- password := r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodUsernameAndPassword.Password.ValueString()
- username := r.Configuration.Credentials.SourceSnowflakeAuthorizationMethodUsernameAndPassword.Username.ValueString()
- sourceSnowflakeAuthorizationMethodUsernameAndPassword = &shared.SourceSnowflakeAuthorizationMethodUsernameAndPassword{
- AuthType: authType1,
+ var sourceSnowflakeUsernameAndPassword *shared.SourceSnowflakeUsernameAndPassword
+ if r.Configuration.Credentials.UsernameAndPassword != nil {
+ password := r.Configuration.Credentials.UsernameAndPassword.Password.ValueString()
+ username := r.Configuration.Credentials.UsernameAndPassword.Username.ValueString()
+ sourceSnowflakeUsernameAndPassword = &shared.SourceSnowflakeUsernameAndPassword{
Password: password,
Username: username,
}
}
- if sourceSnowflakeAuthorizationMethodUsernameAndPassword != nil {
+ if sourceSnowflakeUsernameAndPassword != nil {
credentials = &shared.SourceSnowflakeAuthorizationMethod{
- SourceSnowflakeAuthorizationMethodUsernameAndPassword: sourceSnowflakeAuthorizationMethodUsernameAndPassword,
+ SourceSnowflakeUsernameAndPassword: sourceSnowflakeUsernameAndPassword,
}
}
}
@@ -72,7 +68,6 @@ func (r *SourceSnowflakeResourceModel) ToCreateSDKType() *shared.SourceSnowflake
} else {
schema = nil
}
- sourceType := shared.SourceSnowflakeSnowflake(r.Configuration.SourceType.ValueString())
warehouse := r.Configuration.Warehouse.ValueString()
configuration := shared.SourceSnowflake{
Credentials: credentials,
@@ -81,9 +76,14 @@ func (r *SourceSnowflakeResourceModel) ToCreateSDKType() *shared.SourceSnowflake
JdbcURLParams: jdbcURLParams,
Role: role,
Schema: schema,
- SourceType: sourceType,
Warehouse: warehouse,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -94,6 +94,7 @@ func (r *SourceSnowflakeResourceModel) ToCreateSDKType() *shared.SourceSnowflake
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSnowflakeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -109,50 +110,46 @@ func (r *SourceSnowflakeResourceModel) ToGetSDKType() *shared.SourceSnowflakeCre
func (r *SourceSnowflakeResourceModel) ToUpdateSDKType() *shared.SourceSnowflakePutRequest {
var credentials *shared.SourceSnowflakeUpdateAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceSnowflakeUpdateAuthorizationMethodOAuth20 *shared.SourceSnowflakeUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20 != nil {
+ var sourceSnowflakeUpdateOAuth20 *shared.SourceSnowflakeUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
} else {
accessToken = nil
}
- authType := shared.SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
refreshToken := new(string)
- if !r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20.RefreshToken.IsUnknown() && !r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20.RefreshToken.IsNull() {
- *refreshToken = r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.RefreshToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.RefreshToken.IsNull() {
+ *refreshToken = r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
} else {
refreshToken = nil
}
- sourceSnowflakeUpdateAuthorizationMethodOAuth20 = &shared.SourceSnowflakeUpdateAuthorizationMethodOAuth20{
+ sourceSnowflakeUpdateOAuth20 = &shared.SourceSnowflakeUpdateOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceSnowflakeUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceSnowflakeUpdateOAuth20 != nil {
credentials = &shared.SourceSnowflakeUpdateAuthorizationMethod{
- SourceSnowflakeUpdateAuthorizationMethodOAuth20: sourceSnowflakeUpdateAuthorizationMethodOAuth20,
+ SourceSnowflakeUpdateOAuth20: sourceSnowflakeUpdateOAuth20,
}
}
- var sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword *shared.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword
- if r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword != nil {
- authType1 := shared.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType(r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword.AuthType.ValueString())
- password := r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword.Password.ValueString()
- username := r.Configuration.Credentials.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword.Username.ValueString()
- sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword = &shared.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword{
- AuthType: authType1,
+ var sourceSnowflakeUpdateUsernameAndPassword *shared.SourceSnowflakeUpdateUsernameAndPassword
+ if r.Configuration.Credentials.UsernameAndPassword != nil {
+ password := r.Configuration.Credentials.UsernameAndPassword.Password.ValueString()
+ username := r.Configuration.Credentials.UsernameAndPassword.Username.ValueString()
+ sourceSnowflakeUpdateUsernameAndPassword = &shared.SourceSnowflakeUpdateUsernameAndPassword{
Password: password,
Username: username,
}
}
- if sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword != nil {
+ if sourceSnowflakeUpdateUsernameAndPassword != nil {
credentials = &shared.SourceSnowflakeUpdateAuthorizationMethod{
- SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword: sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword,
+ SourceSnowflakeUpdateUsernameAndPassword: sourceSnowflakeUpdateUsernameAndPassword,
}
}
}
diff --git a/internal/provider/source_sonarcloud_data_source.go b/internal/provider/source_sonarcloud_data_source.go
old mode 100755
new mode 100644
index ec8d2d8b8..f5225d1ec
--- a/internal/provider/source_sonarcloud_data_source.go
+++ b/internal/provider/source_sonarcloud_data_source.go
@@ -3,17 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -33,11 +29,11 @@ type SourceSonarCloudDataSource struct {
// SourceSonarCloudDataSourceModel describes the data model.
type SourceSonarCloudDataSourceModel struct {
- Configuration SourceSonarCloud `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -51,60 +47,20 @@ func (r *SourceSonarCloudDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceSonarCloud DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "component_keys": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Validators: []validator.List{
- listvalidator.ValueStringsAre(validators.IsValidJSON()),
- },
- Description: `Comma-separated list of component keys.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `To retrieve issues created before the given date (inclusive).`,
- },
- "organization": schema.StringAttribute{
- Computed: true,
- Description: `Organization key. See here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sonar-cloud",
- ),
- },
- Description: `must be one of ["sonar-cloud"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `To retrieve issues created after the given date (inclusive).`,
- },
- "user_token": schema.StringAttribute{
- Computed: true,
- Description: `Your User Token. See here. The token is case sensitive.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_sonarcloud_data_source_sdk.go b/internal/provider/source_sonarcloud_data_source_sdk.go
old mode 100755
new mode 100644
index e9c30d095..16033e5d1
--- a/internal/provider/source_sonarcloud_data_source_sdk.go
+++ b/internal/provider/source_sonarcloud_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSonarCloudDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_sonarcloud_resource.go b/internal/provider/source_sonarcloud_resource.go
old mode 100755
new mode 100644
index d13095805..4c9d0ff84
--- a/internal/provider/source_sonarcloud_resource.go
+++ b/internal/provider/source_sonarcloud_resource.go
@@ -3,19 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -37,6 +37,7 @@ type SourceSonarCloudResource struct {
// SourceSonarCloudResourceModel describes the resource data model.
type SourceSonarCloudResourceModel struct {
Configuration SourceSonarCloud `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,52 +59,56 @@ func (r *SourceSonarCloudResource) Schema(ctx context.Context, req resource.Sche
Attributes: map[string]schema.Attribute{
"component_keys": schema.ListAttribute{
Required: true,
+ Sensitive: true,
ElementType: types.StringType,
+ Description: `Comma-separated list of component keys.`,
Validators: []validator.List{
listvalidator.ValueStringsAre(validators.IsValidJSON()),
},
- Description: `Comma-separated list of component keys.`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `To retrieve issues created before the given date (inclusive).`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `To retrieve issues created before the given date (inclusive).`,
},
"organization": schema.StringAttribute{
Required: true,
Description: `Organization key. See here.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sonar-cloud",
- ),
- },
- Description: `must be one of ["sonar-cloud"]`,
- },
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `To retrieve issues created after the given date (inclusive).`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `To retrieve issues created after the given date (inclusive).`,
},
"user_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your User Token. See here. The token is case sensitive.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -167,7 +172,7 @@ func (r *SourceSonarCloudResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSonarCloud(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -343,5 +348,5 @@ func (r *SourceSonarCloudResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceSonarCloudResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_sonarcloud_resource_sdk.go b/internal/provider/source_sonarcloud_resource_sdk.go
old mode 100755
new mode 100644
index 1bfbf7e12..001ebc56d
--- a/internal/provider/source_sonarcloud_resource_sdk.go
+++ b/internal/provider/source_sonarcloud_resource_sdk.go
@@ -3,9 +3,9 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -23,7 +23,6 @@ func (r *SourceSonarCloudResourceModel) ToCreateSDKType() *shared.SourceSonarClo
endDate = nil
}
organization := r.Configuration.Organization.ValueString()
- sourceType := shared.SourceSonarCloudSonarCloud(r.Configuration.SourceType.ValueString())
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
@@ -35,10 +34,15 @@ func (r *SourceSonarCloudResourceModel) ToCreateSDKType() *shared.SourceSonarClo
ComponentKeys: componentKeys,
EndDate: endDate,
Organization: organization,
- SourceType: sourceType,
StartDate: startDate,
UserToken: userToken,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -49,6 +53,7 @@ func (r *SourceSonarCloudResourceModel) ToCreateSDKType() *shared.SourceSonarClo
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSonarCloudCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_spacexapi_data_source.go b/internal/provider/source_spacexapi_data_source.go
old mode 100755
new mode 100644
index a4497811d..c85d12a04
--- a/internal/provider/source_spacexapi_data_source.go
+++ b/internal/provider/source_spacexapi_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceSpacexAPIDataSource struct {
// SourceSpacexAPIDataSourceModel describes the data model.
type SourceSpacexAPIDataSourceModel struct {
- Configuration SourceSpacexAPI `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,36 +47,20 @@ func (r *SourceSpacexAPIDataSource) Schema(ctx context.Context, req datasource.S
MarkdownDescription: "SourceSpacexAPI DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "id": schema.StringAttribute{
- Computed: true,
- },
- "options": schema.StringAttribute{
- Computed: true,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "spacex-api",
- ),
- },
- Description: `must be one of ["spacex-api"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_spacexapi_data_source_sdk.go b/internal/provider/source_spacexapi_data_source_sdk.go
old mode 100755
new mode 100644
index 34717b647..72af7409c
--- a/internal/provider/source_spacexapi_data_source_sdk.go
+++ b/internal/provider/source_spacexapi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSpacexAPIDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_spacexapi_resource.go b/internal/provider/source_spacexapi_resource.go
old mode 100755
new mode 100644
index 694b2b8e7..3452fbdae
--- a/internal/provider/source_spacexapi_resource.go
+++ b/internal/provider/source_spacexapi_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceSpacexAPIResource struct {
// SourceSpacexAPIResourceModel describes the resource data model.
type SourceSpacexAPIResourceModel struct {
Configuration SourceSpacexAPI `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -60,24 +60,26 @@ func (r *SourceSpacexAPIResource) Schema(ctx context.Context, req resource.Schem
"options": schema.StringAttribute{
Optional: true,
},
- "source_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "spacex-api",
- ),
- },
- Description: `must be one of ["spacex-api"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -141,7 +143,7 @@ func (r *SourceSpacexAPIResource) Create(ctx context.Context, req resource.Creat
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSpacexAPI(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -317,5 +319,5 @@ func (r *SourceSpacexAPIResource) Delete(ctx context.Context, req resource.Delet
}
func (r *SourceSpacexAPIResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_spacexapi_resource_sdk.go b/internal/provider/source_spacexapi_resource_sdk.go
old mode 100755
new mode 100644
index 3b49e0792..b3f0d2c08
--- a/internal/provider/source_spacexapi_resource_sdk.go
+++ b/internal/provider/source_spacexapi_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -20,16 +20,15 @@ func (r *SourceSpacexAPIResourceModel) ToCreateSDKType() *shared.SourceSpacexAPI
} else {
options = nil
}
- sourceType := new(shared.SourceSpacexAPISpacexAPI)
- if !r.Configuration.SourceType.IsUnknown() && !r.Configuration.SourceType.IsNull() {
- *sourceType = shared.SourceSpacexAPISpacexAPI(r.Configuration.SourceType.ValueString())
- } else {
- sourceType = nil
- }
configuration := shared.SourceSpacexAPI{
- ID: id,
- Options: options,
- SourceType: sourceType,
+ ID: id,
+ Options: options,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -41,6 +40,7 @@ func (r *SourceSpacexAPIResourceModel) ToCreateSDKType() *shared.SourceSpacexAPI
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSpacexAPICreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_square_data_source.go b/internal/provider/source_square_data_source.go
old mode 100755
new mode 100644
index 7e0e63f4d..aa27e0a78
--- a/internal/provider/source_square_data_source.go
+++ b/internal/provider/source_square_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceSquareDataSource struct {
// SourceSquareDataSourceModel describes the data model.
type SourceSquareDataSourceModel struct {
- Configuration SourceSquare `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,146 +47,20 @@ func (r *SourceSquareDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceSquare DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_square_authentication_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `The API key for a Square application`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "API Key",
- ),
- },
- Description: `must be one of ["API Key"]`,
- },
- },
- Description: `Choose how to authenticate to Square.`,
- },
- "source_square_authentication_oauth_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth",
- ),
- },
- Description: `must be one of ["OAuth"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Square-issued ID of your application`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Square-issued application secret for your application`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `A refresh token generated using the above client ID and secret`,
- },
- },
- Description: `Choose how to authenticate to Square.`,
- },
- "source_square_update_authentication_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `The API key for a Square application`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "API Key",
- ),
- },
- Description: `must be one of ["API Key"]`,
- },
- },
- Description: `Choose how to authenticate to Square.`,
- },
- "source_square_update_authentication_oauth_authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth",
- ),
- },
- Description: `must be one of ["OAuth"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Square-issued ID of your application`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Square-issued application secret for your application`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `A refresh token generated using the above client ID and secret`,
- },
- },
- Description: `Choose how to authenticate to Square.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Choose how to authenticate to Square.`,
- },
- "include_deleted_objects": schema.BoolAttribute{
- Computed: true,
- Description: `In some streams there is an option to include deleted objects (Items, Categories, Discounts, Taxes)`,
- },
- "is_sandbox": schema.BoolAttribute{
- Computed: true,
- Description: `Determines whether to use the sandbox or production environment.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "square",
- ),
- },
- Description: `must be one of ["square"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. If not set, all data will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_square_data_source_sdk.go b/internal/provider/source_square_data_source_sdk.go
old mode 100755
new mode 100644
index 48e7b29ab..b5ab3567b
--- a/internal/provider/source_square_data_source_sdk.go
+++ b/internal/provider/source_square_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSquareDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_square_resource.go b/internal/provider/source_square_resource.go
old mode 100755
new mode 100644
index 1fd2b5054..5d96c4fb0
--- a/internal/provider/source_square_resource.go
+++ b/internal/provider/source_square_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceSquareResource struct {
// SourceSquareResourceModel describes the resource data model.
type SourceSquareResourceModel struct {
Configuration SourceSquare `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,83 +59,20 @@ func (r *SourceSquareResource) Schema(ctx context.Context, req resource.SchemaRe
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_square_authentication_api_key": schema.SingleNestedAttribute{
+ "api_key": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The API key for a Square application`,
},
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "API Key",
- ),
- },
- Description: `must be one of ["API Key"]`,
- },
- },
- Description: `Choose how to authenticate to Square.`,
- },
- "source_square_authentication_oauth_authentication": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth",
- ),
- },
- Description: `must be one of ["OAuth"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Square-issued ID of your application`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Square-issued application secret for your application`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `A refresh token generated using the above client ID and secret`,
- },
- },
- Description: `Choose how to authenticate to Square.`,
- },
- "source_square_update_authentication_api_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Required: true,
- Description: `The API key for a Square application`,
- },
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "API Key",
- ),
- },
- Description: `must be one of ["API Key"]`,
- },
},
Description: `Choose how to authenticate to Square.`,
},
- "source_square_update_authentication_oauth_authentication": schema.SingleNestedAttribute{
+ "oauth_authentication": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "OAuth",
- ),
- },
- Description: `must be one of ["OAuth"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Square-issued ID of your application`,
@@ -145,50 +83,56 @@ func (r *SourceSquareResource) Schema(ctx context.Context, req resource.SchemaRe
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `A refresh token generated using the above client ID and secret`,
},
},
Description: `Choose how to authenticate to Square.`,
},
},
+ Description: `Choose how to authenticate to Square.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Choose how to authenticate to Square.`,
},
"include_deleted_objects": schema.BoolAttribute{
- Optional: true,
- Description: `In some streams there is an option to include deleted objects (Items, Categories, Discounts, Taxes)`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `In some streams there is an option to include deleted objects (Items, Categories, Discounts, Taxes)`,
},
"is_sandbox": schema.BoolAttribute{
- Required: true,
- Description: `Determines whether to use the sandbox or production environment.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "square",
- ),
- },
- Description: `must be one of ["square"]`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Determines whether to use the sandbox or production environment.`,
},
"start_date": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `Default: "2021-01-01"` + "\n" +
+ `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. If not set, all data will be replicated.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. If not set, all data will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -252,7 +196,7 @@ func (r *SourceSquareResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSquare(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -428,5 +372,5 @@ func (r *SourceSquareResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceSquareResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_square_resource_sdk.go b/internal/provider/source_square_resource_sdk.go
old mode 100755
new mode 100644
index b1d128cbf..fae384917
--- a/internal/provider/source_square_resource_sdk.go
+++ b/internal/provider/source_square_resource_sdk.go
@@ -3,44 +3,40 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSquareResourceModel) ToCreateSDKType() *shared.SourceSquareCreateRequest {
var credentials *shared.SourceSquareAuthentication
if r.Configuration.Credentials != nil {
- var sourceSquareAuthenticationOauthAuthentication *shared.SourceSquareAuthenticationOauthAuthentication
- if r.Configuration.Credentials.SourceSquareAuthenticationOauthAuthentication != nil {
- authType := shared.SourceSquareAuthenticationOauthAuthenticationAuthType(r.Configuration.Credentials.SourceSquareAuthenticationOauthAuthentication.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceSquareAuthenticationOauthAuthentication.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSquareAuthenticationOauthAuthentication.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceSquareAuthenticationOauthAuthentication.RefreshToken.ValueString()
- sourceSquareAuthenticationOauthAuthentication = &shared.SourceSquareAuthenticationOauthAuthentication{
- AuthType: authType,
+ var sourceSquareOauthAuthentication *shared.SourceSquareOauthAuthentication
+ if r.Configuration.Credentials.OauthAuthentication != nil {
+ clientID := r.Configuration.Credentials.OauthAuthentication.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OauthAuthentication.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OauthAuthentication.RefreshToken.ValueString()
+ sourceSquareOauthAuthentication = &shared.SourceSquareOauthAuthentication{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceSquareAuthenticationOauthAuthentication != nil {
+ if sourceSquareOauthAuthentication != nil {
credentials = &shared.SourceSquareAuthentication{
- SourceSquareAuthenticationOauthAuthentication: sourceSquareAuthenticationOauthAuthentication,
+ SourceSquareOauthAuthentication: sourceSquareOauthAuthentication,
}
}
- var sourceSquareAuthenticationAPIKey *shared.SourceSquareAuthenticationAPIKey
- if r.Configuration.Credentials.SourceSquareAuthenticationAPIKey != nil {
- apiKey := r.Configuration.Credentials.SourceSquareAuthenticationAPIKey.APIKey.ValueString()
- authType1 := shared.SourceSquareAuthenticationAPIKeyAuthType(r.Configuration.Credentials.SourceSquareAuthenticationAPIKey.AuthType.ValueString())
- sourceSquareAuthenticationAPIKey = &shared.SourceSquareAuthenticationAPIKey{
- APIKey: apiKey,
- AuthType: authType1,
+ var sourceSquareAPIKey *shared.SourceSquareAPIKey
+ if r.Configuration.Credentials.APIKey != nil {
+ apiKey := r.Configuration.Credentials.APIKey.APIKey.ValueString()
+ sourceSquareAPIKey = &shared.SourceSquareAPIKey{
+ APIKey: apiKey,
}
}
- if sourceSquareAuthenticationAPIKey != nil {
+ if sourceSquareAPIKey != nil {
credentials = &shared.SourceSquareAuthentication{
- SourceSquareAuthenticationAPIKey: sourceSquareAuthenticationAPIKey,
+ SourceSquareAPIKey: sourceSquareAPIKey,
}
}
}
@@ -50,8 +46,12 @@ func (r *SourceSquareResourceModel) ToCreateSDKType() *shared.SourceSquareCreate
} else {
includeDeletedObjects = nil
}
- isSandbox := r.Configuration.IsSandbox.ValueBool()
- sourceType := shared.SourceSquareSquare(r.Configuration.SourceType.ValueString())
+ isSandbox := new(bool)
+ if !r.Configuration.IsSandbox.IsUnknown() && !r.Configuration.IsSandbox.IsNull() {
+ *isSandbox = r.Configuration.IsSandbox.ValueBool()
+ } else {
+ isSandbox = nil
+ }
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
@@ -62,9 +62,14 @@ func (r *SourceSquareResourceModel) ToCreateSDKType() *shared.SourceSquareCreate
Credentials: credentials,
IncludeDeletedObjects: includeDeletedObjects,
IsSandbox: isSandbox,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -75,6 +80,7 @@ func (r *SourceSquareResourceModel) ToCreateSDKType() *shared.SourceSquareCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSquareCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -90,36 +96,32 @@ func (r *SourceSquareResourceModel) ToGetSDKType() *shared.SourceSquareCreateReq
func (r *SourceSquareResourceModel) ToUpdateSDKType() *shared.SourceSquarePutRequest {
var credentials *shared.SourceSquareUpdateAuthentication
if r.Configuration.Credentials != nil {
- var sourceSquareUpdateAuthenticationOauthAuthentication *shared.SourceSquareUpdateAuthenticationOauthAuthentication
- if r.Configuration.Credentials.SourceSquareUpdateAuthenticationOauthAuthentication != nil {
- authType := shared.SourceSquareUpdateAuthenticationOauthAuthenticationAuthType(r.Configuration.Credentials.SourceSquareUpdateAuthenticationOauthAuthentication.AuthType.ValueString())
- clientID := r.Configuration.Credentials.SourceSquareUpdateAuthenticationOauthAuthentication.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceSquareUpdateAuthenticationOauthAuthentication.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceSquareUpdateAuthenticationOauthAuthentication.RefreshToken.ValueString()
- sourceSquareUpdateAuthenticationOauthAuthentication = &shared.SourceSquareUpdateAuthenticationOauthAuthentication{
- AuthType: authType,
+ var oauthAuthentication *shared.OauthAuthentication
+ if r.Configuration.Credentials.OauthAuthentication != nil {
+ clientID := r.Configuration.Credentials.OauthAuthentication.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OauthAuthentication.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OauthAuthentication.RefreshToken.ValueString()
+ oauthAuthentication = &shared.OauthAuthentication{
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
}
}
- if sourceSquareUpdateAuthenticationOauthAuthentication != nil {
+ if oauthAuthentication != nil {
credentials = &shared.SourceSquareUpdateAuthentication{
- SourceSquareUpdateAuthenticationOauthAuthentication: sourceSquareUpdateAuthenticationOauthAuthentication,
+ OauthAuthentication: oauthAuthentication,
}
}
- var sourceSquareUpdateAuthenticationAPIKey *shared.SourceSquareUpdateAuthenticationAPIKey
- if r.Configuration.Credentials.SourceSquareUpdateAuthenticationAPIKey != nil {
- apiKey := r.Configuration.Credentials.SourceSquareUpdateAuthenticationAPIKey.APIKey.ValueString()
- authType1 := shared.SourceSquareUpdateAuthenticationAPIKeyAuthType(r.Configuration.Credentials.SourceSquareUpdateAuthenticationAPIKey.AuthType.ValueString())
- sourceSquareUpdateAuthenticationAPIKey = &shared.SourceSquareUpdateAuthenticationAPIKey{
- APIKey: apiKey,
- AuthType: authType1,
+ var sourceSquareUpdateAPIKey *shared.SourceSquareUpdateAPIKey
+ if r.Configuration.Credentials.APIKey != nil {
+ apiKey := r.Configuration.Credentials.APIKey.APIKey.ValueString()
+ sourceSquareUpdateAPIKey = &shared.SourceSquareUpdateAPIKey{
+ APIKey: apiKey,
}
}
- if sourceSquareUpdateAuthenticationAPIKey != nil {
+ if sourceSquareUpdateAPIKey != nil {
credentials = &shared.SourceSquareUpdateAuthentication{
- SourceSquareUpdateAuthenticationAPIKey: sourceSquareUpdateAuthenticationAPIKey,
+ SourceSquareUpdateAPIKey: sourceSquareUpdateAPIKey,
}
}
}
@@ -129,7 +131,12 @@ func (r *SourceSquareResourceModel) ToUpdateSDKType() *shared.SourceSquarePutReq
} else {
includeDeletedObjects = nil
}
- isSandbox := r.Configuration.IsSandbox.ValueBool()
+ isSandbox := new(bool)
+ if !r.Configuration.IsSandbox.IsUnknown() && !r.Configuration.IsSandbox.IsNull() {
+ *isSandbox = r.Configuration.IsSandbox.ValueBool()
+ } else {
+ isSandbox = nil
+ }
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
diff --git a/internal/provider/source_strava_data_source.go b/internal/provider/source_strava_data_source.go
old mode 100755
new mode 100644
index 18a75ef1e..068a18039
--- a/internal/provider/source_strava_data_source.go
+++ b/internal/provider/source_strava_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceStravaDataSource struct {
// SourceStravaDataSourceModel describes the data model.
type SourceStravaDataSourceModel struct {
- Configuration SourceStrava `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,62 +47,20 @@ func (r *SourceStravaDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceStrava DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "athlete_id": schema.Int64Attribute{
- Computed: true,
- Description: `The Athlete ID of your Strava developer application.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your Strava developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your Strava developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The Refresh Token with the activity: read_all permissions.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "strava",
- ),
- },
- Description: `must be one of ["strava"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_strava_data_source_sdk.go b/internal/provider/source_strava_data_source_sdk.go
old mode 100755
new mode 100644
index 702ae1627..a4259068a
--- a/internal/provider/source_strava_data_source_sdk.go
+++ b/internal/provider/source_strava_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceStravaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_strava_resource.go b/internal/provider/source_strava_resource.go
old mode 100755
new mode 100644
index d75c4f3c4..f971e76f8
--- a/internal/provider/source_strava_resource.go
+++ b/internal/provider/source_strava_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceStravaResource struct {
// SourceStravaResourceModel describes the resource data model.
type SourceStravaResourceModel struct {
Configuration SourceStrava `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -59,15 +60,6 @@ func (r *SourceStravaResource) Schema(ctx context.Context, req resource.SchemaRe
Required: true,
Description: `The Athlete ID of your Strava developer application.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Client",
- ),
- },
- Description: `must be one of ["Client"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your Strava developer application.`,
@@ -78,33 +70,36 @@ func (r *SourceStravaResource) Schema(ctx context.Context, req resource.SchemaRe
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Refresh Token with the activity: read_all permissions.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "strava",
- ),
- },
- Description: `must be one of ["strava"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -168,7 +163,7 @@ func (r *SourceStravaResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceStrava(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -344,5 +339,5 @@ func (r *SourceStravaResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceStravaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_strava_resource_sdk.go b/internal/provider/source_strava_resource_sdk.go
old mode 100755
new mode 100644
index 233b397fa..38ca66e68
--- a/internal/provider/source_strava_resource_sdk.go
+++ b/internal/provider/source_strava_resource_sdk.go
@@ -3,33 +3,30 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceStravaResourceModel) ToCreateSDKType() *shared.SourceStravaCreateRequest {
athleteID := r.Configuration.AthleteID.ValueInt64()
- authType := new(shared.SourceStravaAuthType)
- if !r.Configuration.AuthType.IsUnknown() && !r.Configuration.AuthType.IsNull() {
- *authType = shared.SourceStravaAuthType(r.Configuration.AuthType.ValueString())
- } else {
- authType = nil
- }
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
refreshToken := r.Configuration.RefreshToken.ValueString()
- sourceType := shared.SourceStravaStrava(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceStrava{
AthleteID: athleteID,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -40,6 +37,7 @@ func (r *SourceStravaResourceModel) ToCreateSDKType() *shared.SourceStravaCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceStravaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -54,19 +52,12 @@ func (r *SourceStravaResourceModel) ToGetSDKType() *shared.SourceStravaCreateReq
func (r *SourceStravaResourceModel) ToUpdateSDKType() *shared.SourceStravaPutRequest {
athleteID := r.Configuration.AthleteID.ValueInt64()
- authType := new(shared.SourceStravaUpdateAuthType)
- if !r.Configuration.AuthType.IsUnknown() && !r.Configuration.AuthType.IsNull() {
- *authType = shared.SourceStravaUpdateAuthType(r.Configuration.AuthType.ValueString())
- } else {
- authType = nil
- }
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
refreshToken := r.Configuration.RefreshToken.ValueString()
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceStravaUpdate{
AthleteID: athleteID,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
diff --git a/internal/provider/source_stripe_data_source.go b/internal/provider/source_stripe_data_source.go
old mode 100755
new mode 100644
index ddc9f603e..e13e01c17
--- a/internal/provider/source_stripe_data_source.go
+++ b/internal/provider/source_stripe_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceStripeDataSource struct {
// SourceStripeDataSourceModel describes the data model.
type SourceStripeDataSourceModel struct {
- Configuration SourceStripe `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,53 +47,20 @@ func (r *SourceStripeDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceStripe DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "account_id": schema.StringAttribute{
- Computed: true,
- Description: `Your Stripe account ID (starts with 'acct_', find yours here).`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Stripe API key (usually starts with 'sk_live_'; find yours here).`,
- },
- "lookback_window_days": schema.Int64Attribute{
- Computed: true,
- Description: `When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here`,
- },
- "slice_range": schema.Int64Attribute{
- Computed: true,
- Description: `The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "stripe",
- ),
- },
- Description: `must be one of ["stripe"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_stripe_data_source_sdk.go b/internal/provider/source_stripe_data_source_sdk.go
old mode 100755
new mode 100644
index a3ee6da9f..2e8a73fd7
--- a/internal/provider/source_stripe_data_source_sdk.go
+++ b/internal/provider/source_stripe_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceStripeDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_stripe_resource.go b/internal/provider/source_stripe_resource.go
old mode 100755
new mode 100644
index 0c0bbd3e6..27c9ca32d
--- a/internal/provider/source_stripe_resource.go
+++ b/internal/provider/source_stripe_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceStripeResource struct {
// SourceStripeResourceModel describes the resource data model.
type SourceStripeResourceModel struct {
Configuration SourceStripe `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -59,43 +60,57 @@ func (r *SourceStripeResource) Schema(ctx context.Context, req resource.SchemaRe
Required: true,
Description: `Your Stripe account ID (starts with 'acct_', find yours here).`,
},
+ "call_rate_limit": schema.Int64Attribute{
+ Optional: true,
+ Description: `The number of API calls per second that you allow connector to make. This value can not be bigger than real API call rate limit (https://stripe.com/docs/rate-limits). If not specified the default maximum is 25 and 100 calls per second for test and production tokens respectively.`,
+ },
"client_secret": schema.StringAttribute{
Required: true,
Description: `Stripe API key (usually starts with 'sk_live_'; find yours here).`,
},
"lookback_window_days": schema.Int64Attribute{
- Optional: true,
- Description: `When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. The Lookback Window only applies to streams that do not support event-based incremental syncs: Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks, Refunds. More info here`,
},
- "slice_range": schema.Int64Attribute{
- Optional: true,
- Description: `The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted.`,
+ "num_workers": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 10` + "\n" +
+ `The number of worker thread to use for the sync. The performance upper boundary depends on call_rate_limit setting and type of account.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "stripe",
- ),
- },
- Description: `must be one of ["stripe"]`,
+ "slice_range": schema.Int64Attribute{
+ Optional: true,
+ MarkdownDescription: `Default: 365` + "\n" +
+ `The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted.`,
},
"start_date": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `Default: "2017-01-25T00:00:00Z"` + "\n" +
+ `UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -159,7 +174,7 @@ func (r *SourceStripeResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceStripe(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -335,5 +350,5 @@ func (r *SourceStripeResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceStripeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_stripe_resource_sdk.go b/internal/provider/source_stripe_resource_sdk.go
old mode 100755
new mode 100644
index 022bf185e..c6298ebd7
--- a/internal/provider/source_stripe_resource_sdk.go
+++ b/internal/provider/source_stripe_resource_sdk.go
@@ -3,13 +3,19 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceStripeResourceModel) ToCreateSDKType() *shared.SourceStripeCreateRequest {
accountID := r.Configuration.AccountID.ValueString()
+ callRateLimit := new(int64)
+ if !r.Configuration.CallRateLimit.IsUnknown() && !r.Configuration.CallRateLimit.IsNull() {
+ *callRateLimit = r.Configuration.CallRateLimit.ValueInt64()
+ } else {
+ callRateLimit = nil
+ }
clientSecret := r.Configuration.ClientSecret.ValueString()
lookbackWindowDays := new(int64)
if !r.Configuration.LookbackWindowDays.IsUnknown() && !r.Configuration.LookbackWindowDays.IsNull() {
@@ -17,13 +23,18 @@ func (r *SourceStripeResourceModel) ToCreateSDKType() *shared.SourceStripeCreate
} else {
lookbackWindowDays = nil
}
+ numWorkers := new(int64)
+ if !r.Configuration.NumWorkers.IsUnknown() && !r.Configuration.NumWorkers.IsNull() {
+ *numWorkers = r.Configuration.NumWorkers.ValueInt64()
+ } else {
+ numWorkers = nil
+ }
sliceRange := new(int64)
if !r.Configuration.SliceRange.IsUnknown() && !r.Configuration.SliceRange.IsNull() {
*sliceRange = r.Configuration.SliceRange.ValueInt64()
} else {
sliceRange = nil
}
- sourceType := shared.SourceStripeStripe(r.Configuration.SourceType.ValueString())
startDate := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -32,12 +43,19 @@ func (r *SourceStripeResourceModel) ToCreateSDKType() *shared.SourceStripeCreate
}
configuration := shared.SourceStripe{
AccountID: accountID,
+ CallRateLimit: callRateLimit,
ClientSecret: clientSecret,
LookbackWindowDays: lookbackWindowDays,
+ NumWorkers: numWorkers,
SliceRange: sliceRange,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -48,6 +66,7 @@ func (r *SourceStripeResourceModel) ToCreateSDKType() *shared.SourceStripeCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceStripeCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -62,6 +81,12 @@ func (r *SourceStripeResourceModel) ToGetSDKType() *shared.SourceStripeCreateReq
func (r *SourceStripeResourceModel) ToUpdateSDKType() *shared.SourceStripePutRequest {
accountID := r.Configuration.AccountID.ValueString()
+ callRateLimit := new(int64)
+ if !r.Configuration.CallRateLimit.IsUnknown() && !r.Configuration.CallRateLimit.IsNull() {
+ *callRateLimit = r.Configuration.CallRateLimit.ValueInt64()
+ } else {
+ callRateLimit = nil
+ }
clientSecret := r.Configuration.ClientSecret.ValueString()
lookbackWindowDays := new(int64)
if !r.Configuration.LookbackWindowDays.IsUnknown() && !r.Configuration.LookbackWindowDays.IsNull() {
@@ -69,6 +94,12 @@ func (r *SourceStripeResourceModel) ToUpdateSDKType() *shared.SourceStripePutReq
} else {
lookbackWindowDays = nil
}
+ numWorkers := new(int64)
+ if !r.Configuration.NumWorkers.IsUnknown() && !r.Configuration.NumWorkers.IsNull() {
+ *numWorkers = r.Configuration.NumWorkers.ValueInt64()
+ } else {
+ numWorkers = nil
+ }
sliceRange := new(int64)
if !r.Configuration.SliceRange.IsUnknown() && !r.Configuration.SliceRange.IsNull() {
*sliceRange = r.Configuration.SliceRange.ValueInt64()
@@ -83,8 +114,10 @@ func (r *SourceStripeResourceModel) ToUpdateSDKType() *shared.SourceStripePutReq
}
configuration := shared.SourceStripeUpdate{
AccountID: accountID,
+ CallRateLimit: callRateLimit,
ClientSecret: clientSecret,
LookbackWindowDays: lookbackWindowDays,
+ NumWorkers: numWorkers,
SliceRange: sliceRange,
StartDate: startDate,
}
diff --git a/internal/provider/source_surveymonkey_data_source.go b/internal/provider/source_surveymonkey_data_source.go
old mode 100755
new mode 100644
index 98614121d..e0b67a0e0
--- a/internal/provider/source_surveymonkey_data_source.go
+++ b/internal/provider/source_surveymonkey_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceSurveymonkeyDataSource struct {
// SourceSurveymonkeyDataSourceModel describes the data model.
type SourceSurveymonkeyDataSourceModel struct {
- Configuration SourceSurveymonkey `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,81 +47,20 @@ func (r *SourceSurveymonkeyDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "SourceSurveymonkey DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests. See the docs for information on how to generate this key.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of the SurveyMonkey developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of the SurveyMonkey developer application.`,
- },
- },
- Description: `The authorization method to use to retrieve data from SurveyMonkey`,
- },
- "origin": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "USA",
- "Europe",
- "Canada",
- ),
- },
- MarkdownDescription: `must be one of ["USA", "Europe", "Canada"]` + "\n" +
- `Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "surveymonkey",
- ),
- },
- Description: `must be one of ["surveymonkey"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- "survey_ids": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `IDs of the surveys from which you'd like to replicate data. If left empty, data from all boards to which you have access will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_surveymonkey_data_source_sdk.go b/internal/provider/source_surveymonkey_data_source_sdk.go
old mode 100755
new mode 100644
index 1e4a33d4d..28d9d8a6a
--- a/internal/provider/source_surveymonkey_data_source_sdk.go
+++ b/internal/provider/source_surveymonkey_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSurveymonkeyDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_surveymonkey_resource.go b/internal/provider/source_surveymonkey_resource.go
old mode 100755
new mode 100644
index 4e9cd80b3..3c1cbcaa3
--- a/internal/provider/source_surveymonkey_resource.go
+++ b/internal/provider/source_surveymonkey_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceSurveymonkeyResource struct {
// SourceSurveymonkeyResourceModel describes the resource data model.
type SourceSurveymonkeyResourceModel struct {
Configuration SourceSurveymonkey `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -60,17 +62,9 @@ func (r *SourceSurveymonkeyResource) Schema(ctx context.Context, req resource.Sc
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests. See the docs for information on how to generate this key.`,
},
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Optional: true,
Description: `The Client ID of the SurveyMonkey developer application.`,
@@ -84,6 +78,8 @@ func (r *SourceSurveymonkeyResource) Schema(ctx context.Context, req resource.Sc
},
"origin": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `must be one of ["USA", "Europe", "Canada"]; Default: "USA"` + "\n" +
+ `Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.`,
Validators: []validator.String{
stringvalidator.OneOf(
"USA",
@@ -91,24 +87,13 @@ func (r *SourceSurveymonkeyResource) Schema(ctx context.Context, req resource.Sc
"Canada",
),
},
- MarkdownDescription: `must be one of ["USA", "Europe", "Canada"]` + "\n" +
- `Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "surveymonkey",
- ),
- },
- Description: `must be one of ["surveymonkey"]`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
"survey_ids": schema.ListAttribute{
Optional: true,
@@ -117,13 +102,24 @@ func (r *SourceSurveymonkeyResource) Schema(ctx context.Context, req resource.Sc
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -187,7 +183,7 @@ func (r *SourceSurveymonkeyResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSurveymonkey(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -363,5 +359,5 @@ func (r *SourceSurveymonkeyResource) Delete(ctx context.Context, req resource.De
}
func (r *SourceSurveymonkeyResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_surveymonkey_resource_sdk.go b/internal/provider/source_surveymonkey_resource_sdk.go
old mode 100755
new mode 100644
index f852d1b72..b542d4f15
--- a/internal/provider/source_surveymonkey_resource_sdk.go
+++ b/internal/provider/source_surveymonkey_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -12,7 +12,6 @@ func (r *SourceSurveymonkeyResourceModel) ToCreateSDKType() *shared.SourceSurvey
var credentials *shared.SourceSurveymonkeySurveyMonkeyAuthorizationMethod
if r.Configuration.Credentials != nil {
accessToken := r.Configuration.Credentials.AccessToken.ValueString()
- authMethod := shared.SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod(r.Configuration.Credentials.AuthMethod.ValueString())
clientID := new(string)
if !r.Configuration.Credentials.ClientID.IsUnknown() && !r.Configuration.Credentials.ClientID.IsNull() {
*clientID = r.Configuration.Credentials.ClientID.ValueString()
@@ -27,7 +26,6 @@ func (r *SourceSurveymonkeyResourceModel) ToCreateSDKType() *shared.SourceSurvey
}
credentials = &shared.SourceSurveymonkeySurveyMonkeyAuthorizationMethod{
AccessToken: accessToken,
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
}
@@ -38,7 +36,6 @@ func (r *SourceSurveymonkeyResourceModel) ToCreateSDKType() *shared.SourceSurvey
} else {
origin = nil
}
- sourceType := shared.SourceSurveymonkeySurveymonkey(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
var surveyIds []string = nil
for _, surveyIdsItem := range r.Configuration.SurveyIds {
@@ -47,10 +44,15 @@ func (r *SourceSurveymonkeyResourceModel) ToCreateSDKType() *shared.SourceSurvey
configuration := shared.SourceSurveymonkey{
Credentials: credentials,
Origin: origin,
- SourceType: sourceType,
StartDate: startDate,
SurveyIds: surveyIds,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -61,6 +63,7 @@ func (r *SourceSurveymonkeyResourceModel) ToCreateSDKType() *shared.SourceSurvey
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSurveymonkeyCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -74,10 +77,9 @@ func (r *SourceSurveymonkeyResourceModel) ToGetSDKType() *shared.SourceSurveymon
}
func (r *SourceSurveymonkeyResourceModel) ToUpdateSDKType() *shared.SourceSurveymonkeyPutRequest {
- var credentials *shared.SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethod
+ var credentials *shared.SurveyMonkeyAuthorizationMethod
if r.Configuration.Credentials != nil {
accessToken := r.Configuration.Credentials.AccessToken.ValueString()
- authMethod := shared.SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethod(r.Configuration.Credentials.AuthMethod.ValueString())
clientID := new(string)
if !r.Configuration.Credentials.ClientID.IsUnknown() && !r.Configuration.Credentials.ClientID.IsNull() {
*clientID = r.Configuration.Credentials.ClientID.ValueString()
@@ -90,16 +92,15 @@ func (r *SourceSurveymonkeyResourceModel) ToUpdateSDKType() *shared.SourceSurvey
} else {
clientSecret = nil
}
- credentials = &shared.SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethod{
+ credentials = &shared.SurveyMonkeyAuthorizationMethod{
AccessToken: accessToken,
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
}
}
- origin := new(shared.SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount)
+ origin := new(shared.OriginDatacenterOfTheSurveyMonkeyAccount)
if !r.Configuration.Origin.IsUnknown() && !r.Configuration.Origin.IsNull() {
- *origin = shared.SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount(r.Configuration.Origin.ValueString())
+ *origin = shared.OriginDatacenterOfTheSurveyMonkeyAccount(r.Configuration.Origin.ValueString())
} else {
origin = nil
}
diff --git a/internal/provider/source_surveysparrow_data_source.go b/internal/provider/source_surveysparrow_data_source.go
old mode 100755
new mode 100644
index 58da2205f..f71e40c15
--- a/internal/provider/source_surveysparrow_data_source.go
+++ b/internal/provider/source_surveysparrow_data_source.go
@@ -3,17 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -33,11 +29,11 @@ type SourceSurveySparrowDataSource struct {
// SourceSurveySparrowDataSourceModel describes the data model.
type SourceSurveySparrowDataSourceModel struct {
- Configuration SourceSurveySparrow `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -51,111 +47,20 @@ func (r *SourceSurveySparrowDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "SourceSurveySparrow DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Your access token. See here. The key is case sensitive.`,
- },
- "region": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_survey_sparrow_base_url_eu_based_account": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "url_base": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "https://eu-api.surveysparrow.com/v3",
- ),
- },
- Description: `must be one of ["https://eu-api.surveysparrow.com/v3"]`,
- },
- },
- Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
- },
- "source_survey_sparrow_base_url_global_account": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "url_base": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "https://api.surveysparrow.com/v3",
- ),
- },
- Description: `must be one of ["https://api.surveysparrow.com/v3"]`,
- },
- },
- Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
- },
- "source_survey_sparrow_update_base_url_eu_based_account": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "url_base": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "https://eu-api.surveysparrow.com/v3",
- ),
- },
- Description: `must be one of ["https://eu-api.surveysparrow.com/v3"]`,
- },
- },
- Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
- },
- "source_survey_sparrow_update_base_url_global_account": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "url_base": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "https://api.surveysparrow.com/v3",
- ),
- },
- Description: `must be one of ["https://api.surveysparrow.com/v3"]`,
- },
- },
- Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "survey-sparrow",
- ),
- },
- Description: `must be one of ["survey-sparrow"]`,
- },
- "survey_id": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Validators: []validator.List{
- listvalidator.ValueStringsAre(validators.IsValidJSON()),
- },
- Description: `A List of your survey ids for survey-specific stream`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_surveysparrow_data_source_sdk.go b/internal/provider/source_surveysparrow_data_source_sdk.go
old mode 100755
new mode 100644
index c1f382121..5f8d925f2
--- a/internal/provider/source_surveysparrow_data_source_sdk.go
+++ b/internal/provider/source_surveysparrow_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceSurveySparrowDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_surveysparrow_resource.go b/internal/provider/source_surveysparrow_resource.go
old mode 100755
new mode 100644
index 0edc52258..b613d3daf
--- a/internal/provider/source_surveysparrow_resource.go
+++ b/internal/provider/source_surveysparrow_resource.go
@@ -3,19 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -37,6 +37,7 @@ type SourceSurveySparrowResource struct {
// SourceSurveySparrowResourceModel describes the resource data model.
type SourceSurveySparrowResourceModel struct {
Configuration SourceSurveySparrow `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,103 +59,56 @@ func (r *SourceSurveySparrowResource) Schema(ctx context.Context, req resource.S
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your access token. See here. The key is case sensitive.`,
},
"region": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_survey_sparrow_base_url_eu_based_account": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "url_base": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "https://eu-api.surveysparrow.com/v3",
- ),
- },
- Description: `must be one of ["https://eu-api.surveysparrow.com/v3"]`,
- },
- },
+ "eu_based_account": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
},
- "source_survey_sparrow_base_url_global_account": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "url_base": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "https://api.surveysparrow.com/v3",
- ),
- },
- Description: `must be one of ["https://api.surveysparrow.com/v3"]`,
- },
- },
- Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
- },
- "source_survey_sparrow_update_base_url_eu_based_account": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "url_base": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "https://eu-api.surveysparrow.com/v3",
- ),
- },
- Description: `must be one of ["https://eu-api.surveysparrow.com/v3"]`,
- },
- },
- Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
- },
- "source_survey_sparrow_update_base_url_global_account": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "url_base": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "https://api.surveysparrow.com/v3",
- ),
- },
- Description: `must be one of ["https://api.surveysparrow.com/v3"]`,
- },
- },
+ "global_account": schema.SingleNestedAttribute{
+ Optional: true,
+ Attributes: map[string]schema.Attribute{},
Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
},
},
+ Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Is your account location is EU based? If yes, the base url to retrieve data will be different.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "survey-sparrow",
- ),
- },
- Description: `must be one of ["survey-sparrow"]`,
},
"survey_id": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
+ Description: `A List of your survey ids for survey-specific stream`,
Validators: []validator.List{
listvalidator.ValueStringsAre(validators.IsValidJSON()),
},
- Description: `A List of your survey ids for survey-specific stream`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -218,7 +172,7 @@ func (r *SourceSurveySparrowResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceSurveySparrow(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -394,5 +348,5 @@ func (r *SourceSurveySparrowResource) Delete(ctx context.Context, req resource.D
}
func (r *SourceSurveySparrowResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_surveysparrow_resource_sdk.go b/internal/provider/source_surveysparrow_resource_sdk.go
old mode 100755
new mode 100644
index 576b06f0a..aef893f0b
--- a/internal/provider/source_surveysparrow_resource_sdk.go
+++ b/internal/provider/source_surveysparrow_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -12,42 +12,25 @@ func (r *SourceSurveySparrowResourceModel) ToCreateSDKType() *shared.SourceSurve
accessToken := r.Configuration.AccessToken.ValueString()
var region *shared.SourceSurveySparrowBaseURL
if r.Configuration.Region != nil {
- var sourceSurveySparrowBaseURLEUBasedAccount *shared.SourceSurveySparrowBaseURLEUBasedAccount
- if r.Configuration.Region.SourceSurveySparrowBaseURLEUBasedAccount != nil {
- urlBase := new(shared.SourceSurveySparrowBaseURLEUBasedAccountURLBase)
- if !r.Configuration.Region.SourceSurveySparrowBaseURLEUBasedAccount.URLBase.IsUnknown() && !r.Configuration.Region.SourceSurveySparrowBaseURLEUBasedAccount.URLBase.IsNull() {
- *urlBase = shared.SourceSurveySparrowBaseURLEUBasedAccountURLBase(r.Configuration.Region.SourceSurveySparrowBaseURLEUBasedAccount.URLBase.ValueString())
- } else {
- urlBase = nil
- }
- sourceSurveySparrowBaseURLEUBasedAccount = &shared.SourceSurveySparrowBaseURLEUBasedAccount{
- URLBase: urlBase,
- }
+ var sourceSurveySparrowEUBasedAccount *shared.SourceSurveySparrowEUBasedAccount
+ if r.Configuration.Region.EUBasedAccount != nil {
+ sourceSurveySparrowEUBasedAccount = &shared.SourceSurveySparrowEUBasedAccount{}
}
- if sourceSurveySparrowBaseURLEUBasedAccount != nil {
+ if sourceSurveySparrowEUBasedAccount != nil {
region = &shared.SourceSurveySparrowBaseURL{
- SourceSurveySparrowBaseURLEUBasedAccount: sourceSurveySparrowBaseURLEUBasedAccount,
+ SourceSurveySparrowEUBasedAccount: sourceSurveySparrowEUBasedAccount,
}
}
- var sourceSurveySparrowBaseURLGlobalAccount *shared.SourceSurveySparrowBaseURLGlobalAccount
- if r.Configuration.Region.SourceSurveySparrowBaseURLGlobalAccount != nil {
- urlBase1 := new(shared.SourceSurveySparrowBaseURLGlobalAccountURLBase)
- if !r.Configuration.Region.SourceSurveySparrowBaseURLGlobalAccount.URLBase.IsUnknown() && !r.Configuration.Region.SourceSurveySparrowBaseURLGlobalAccount.URLBase.IsNull() {
- *urlBase1 = shared.SourceSurveySparrowBaseURLGlobalAccountURLBase(r.Configuration.Region.SourceSurveySparrowBaseURLGlobalAccount.URLBase.ValueString())
- } else {
- urlBase1 = nil
- }
- sourceSurveySparrowBaseURLGlobalAccount = &shared.SourceSurveySparrowBaseURLGlobalAccount{
- URLBase: urlBase1,
- }
+ var sourceSurveySparrowGlobalAccount *shared.SourceSurveySparrowGlobalAccount
+ if r.Configuration.Region.GlobalAccount != nil {
+ sourceSurveySparrowGlobalAccount = &shared.SourceSurveySparrowGlobalAccount{}
}
- if sourceSurveySparrowBaseURLGlobalAccount != nil {
+ if sourceSurveySparrowGlobalAccount != nil {
region = &shared.SourceSurveySparrowBaseURL{
- SourceSurveySparrowBaseURLGlobalAccount: sourceSurveySparrowBaseURLGlobalAccount,
+ SourceSurveySparrowGlobalAccount: sourceSurveySparrowGlobalAccount,
}
}
}
- sourceType := shared.SourceSurveySparrowSurveySparrow(r.Configuration.SourceType.ValueString())
var surveyID []interface{} = nil
for _, surveyIDItem := range r.Configuration.SurveyID {
var surveyIDTmp interface{}
@@ -57,9 +40,14 @@ func (r *SourceSurveySparrowResourceModel) ToCreateSDKType() *shared.SourceSurve
configuration := shared.SourceSurveySparrow{
AccessToken: accessToken,
Region: region,
- SourceType: sourceType,
SurveyID: surveyID,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -70,6 +58,7 @@ func (r *SourceSurveySparrowResourceModel) ToCreateSDKType() *shared.SourceSurve
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceSurveySparrowCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -84,40 +73,24 @@ func (r *SourceSurveySparrowResourceModel) ToGetSDKType() *shared.SourceSurveySp
func (r *SourceSurveySparrowResourceModel) ToUpdateSDKType() *shared.SourceSurveySparrowPutRequest {
accessToken := r.Configuration.AccessToken.ValueString()
- var region *shared.SourceSurveySparrowUpdateBaseURL
+ var region *shared.BaseURL
if r.Configuration.Region != nil {
- var sourceSurveySparrowUpdateBaseURLEUBasedAccount *shared.SourceSurveySparrowUpdateBaseURLEUBasedAccount
- if r.Configuration.Region.SourceSurveySparrowUpdateBaseURLEUBasedAccount != nil {
- urlBase := new(shared.SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase)
- if !r.Configuration.Region.SourceSurveySparrowUpdateBaseURLEUBasedAccount.URLBase.IsUnknown() && !r.Configuration.Region.SourceSurveySparrowUpdateBaseURLEUBasedAccount.URLBase.IsNull() {
- *urlBase = shared.SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase(r.Configuration.Region.SourceSurveySparrowUpdateBaseURLEUBasedAccount.URLBase.ValueString())
- } else {
- urlBase = nil
- }
- sourceSurveySparrowUpdateBaseURLEUBasedAccount = &shared.SourceSurveySparrowUpdateBaseURLEUBasedAccount{
- URLBase: urlBase,
- }
+ var euBasedAccount *shared.EUBasedAccount
+ if r.Configuration.Region.EUBasedAccount != nil {
+ euBasedAccount = &shared.EUBasedAccount{}
}
- if sourceSurveySparrowUpdateBaseURLEUBasedAccount != nil {
- region = &shared.SourceSurveySparrowUpdateBaseURL{
- SourceSurveySparrowUpdateBaseURLEUBasedAccount: sourceSurveySparrowUpdateBaseURLEUBasedAccount,
+ if euBasedAccount != nil {
+ region = &shared.BaseURL{
+ EUBasedAccount: euBasedAccount,
}
}
- var sourceSurveySparrowUpdateBaseURLGlobalAccount *shared.SourceSurveySparrowUpdateBaseURLGlobalAccount
- if r.Configuration.Region.SourceSurveySparrowUpdateBaseURLGlobalAccount != nil {
- urlBase1 := new(shared.SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase)
- if !r.Configuration.Region.SourceSurveySparrowUpdateBaseURLGlobalAccount.URLBase.IsUnknown() && !r.Configuration.Region.SourceSurveySparrowUpdateBaseURLGlobalAccount.URLBase.IsNull() {
- *urlBase1 = shared.SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase(r.Configuration.Region.SourceSurveySparrowUpdateBaseURLGlobalAccount.URLBase.ValueString())
- } else {
- urlBase1 = nil
- }
- sourceSurveySparrowUpdateBaseURLGlobalAccount = &shared.SourceSurveySparrowUpdateBaseURLGlobalAccount{
- URLBase: urlBase1,
- }
+ var globalAccount *shared.GlobalAccount
+ if r.Configuration.Region.GlobalAccount != nil {
+ globalAccount = &shared.GlobalAccount{}
}
- if sourceSurveySparrowUpdateBaseURLGlobalAccount != nil {
- region = &shared.SourceSurveySparrowUpdateBaseURL{
- SourceSurveySparrowUpdateBaseURLGlobalAccount: sourceSurveySparrowUpdateBaseURLGlobalAccount,
+ if globalAccount != nil {
+ region = &shared.BaseURL{
+ GlobalAccount: globalAccount,
}
}
}
diff --git a/internal/provider/source_tempo_data_source.go b/internal/provider/source_tempo_data_source.go
old mode 100755
new mode 100644
index 17f2fe36b..5890c9a33
--- a/internal/provider/source_tempo_data_source.go
+++ b/internal/provider/source_tempo_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceTempoDataSource struct {
// SourceTempoDataSourceModel describes the data model.
type SourceTempoDataSourceModel struct {
- Configuration SourceTempo `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,34 +47,20 @@ func (r *SourceTempoDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceTempo DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Tempo API Token. Go to Tempo>Settings, scroll down to Data Access and select API integration.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "tempo",
- ),
- },
- Description: `must be one of ["tempo"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_tempo_data_source_sdk.go b/internal/provider/source_tempo_data_source_sdk.go
old mode 100755
new mode 100644
index 2ce977e67..acb114bd3
--- a/internal/provider/source_tempo_data_source_sdk.go
+++ b/internal/provider/source_tempo_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTempoDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_tempo_resource.go b/internal/provider/source_tempo_resource.go
old mode 100755
new mode 100644
index 8faaad74f..2d3e21b57
--- a/internal/provider/source_tempo_resource.go
+++ b/internal/provider/source_tempo_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceTempoResource struct {
// SourceTempoResourceModel describes the resource data model.
type SourceTempoResourceModel struct {
- Configuration SourceTempo `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration SourceK6Cloud `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceTempoResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceTempoResource) Schema(ctx context.Context, req resource.SchemaReq
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Tempo API Token. Go to Tempo>Settings, scroll down to Data Access and select API integration.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "tempo",
- ),
- },
- Description: `must be one of ["tempo"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceTempoResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTempo(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceTempoResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceTempoResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_tempo_resource_sdk.go b/internal/provider/source_tempo_resource_sdk.go
old mode 100755
new mode 100644
index 14bcc28b6..d86bc353e
--- a/internal/provider/source_tempo_resource_sdk.go
+++ b/internal/provider/source_tempo_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTempoResourceModel) ToCreateSDKType() *shared.SourceTempoCreateRequest {
apiToken := r.Configuration.APIToken.ValueString()
- sourceType := shared.SourceTempoTempo(r.Configuration.SourceType.ValueString())
configuration := shared.SourceTempo{
- APIToken: apiToken,
- SourceType: sourceType,
+ APIToken: apiToken,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceTempoResourceModel) ToCreateSDKType() *shared.SourceTempoCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTempoCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_theguardianapi_data_source.go b/internal/provider/source_theguardianapi_data_source.go
old mode 100755
new mode 100644
index bae9ecf3f..33b908c13
--- a/internal/provider/source_theguardianapi_data_source.go
+++ b/internal/provider/source_theguardianapi_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceTheGuardianAPIDataSource struct {
// SourceTheGuardianAPIDataSourceModel describes the data model.
type SourceTheGuardianAPIDataSourceModel struct {
- Configuration SourceTheGuardianAPI `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,54 +47,20 @@ func (r *SourceTheGuardianAPIDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "SourceTheGuardianAPI DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Your API Key. See here. The key is case sensitive.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Description: `(Optional) Use this to set the maximum date (YYYY-MM-DD) of the results. Results newer than the end_date will not be shown. Default is set to the current date (today) for incremental syncs.`,
- },
- "query": schema.StringAttribute{
- Computed: true,
- Description: `(Optional) The query (q) parameter filters the results to only those that include that search term. The q parameter supports AND, OR and NOT operators.`,
- },
- "section": schema.StringAttribute{
- Computed: true,
- Description: `(Optional) Use this to filter the results by a particular section. See here for a list of all sections, and here for the sections endpoint documentation.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "the-guardian-api",
- ),
- },
- Description: `must be one of ["the-guardian-api"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `Use this to set the minimum date (YYYY-MM-DD) of the results. Results older than the start_date will not be shown.`,
- },
- "tag": schema.StringAttribute{
- Computed: true,
- Description: `(Optional) A tag is a piece of data that is used by The Guardian to categorise content. Use this parameter to filter results by showing only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_theguardianapi_data_source_sdk.go b/internal/provider/source_theguardianapi_data_source_sdk.go
old mode 100755
new mode 100644
index e7b88777c..28668f35e
--- a/internal/provider/source_theguardianapi_data_source_sdk.go
+++ b/internal/provider/source_theguardianapi_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTheGuardianAPIDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_theguardianapi_resource.go b/internal/provider/source_theguardianapi_resource.go
old mode 100755
new mode 100644
index 28cdd0218..a1c11621d
--- a/internal/provider/source_theguardianapi_resource.go
+++ b/internal/provider/source_theguardianapi_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceTheGuardianAPIResource struct {
// SourceTheGuardianAPIResourceModel describes the resource data model.
type SourceTheGuardianAPIResourceModel struct {
Configuration SourceTheGuardianAPI `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +56,7 @@ func (r *SourceTheGuardianAPIResource) Schema(ctx context.Context, req resource.
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Key. See here. The key is case sensitive.`,
},
"end_date": schema.StringAttribute{
@@ -70,15 +71,6 @@ func (r *SourceTheGuardianAPIResource) Schema(ctx context.Context, req resource.
Optional: true,
Description: `(Optional) Use this to filter the results by a particular section. See here for a list of all sections, and here for the sections endpoint documentation.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "the-guardian-api",
- ),
- },
- Description: `must be one of ["the-guardian-api"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `Use this to set the minimum date (YYYY-MM-DD) of the results. Results older than the start_date will not be shown.`,
@@ -89,13 +81,24 @@ func (r *SourceTheGuardianAPIResource) Schema(ctx context.Context, req resource.
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -159,7 +162,7 @@ func (r *SourceTheGuardianAPIResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTheGuardianAPI(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -335,5 +338,5 @@ func (r *SourceTheGuardianAPIResource) Delete(ctx context.Context, req resource.
}
func (r *SourceTheGuardianAPIResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_theguardianapi_resource_sdk.go b/internal/provider/source_theguardianapi_resource_sdk.go
old mode 100755
new mode 100644
index c9cfef7d9..97a73e888
--- a/internal/provider/source_theguardianapi_resource_sdk.go
+++ b/internal/provider/source_theguardianapi_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -27,7 +27,6 @@ func (r *SourceTheGuardianAPIResourceModel) ToCreateSDKType() *shared.SourceTheG
} else {
section = nil
}
- sourceType := shared.SourceTheGuardianAPITheGuardianAPI(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
tag := new(string)
if !r.Configuration.Tag.IsUnknown() && !r.Configuration.Tag.IsNull() {
@@ -36,13 +35,18 @@ func (r *SourceTheGuardianAPIResourceModel) ToCreateSDKType() *shared.SourceTheG
tag = nil
}
configuration := shared.SourceTheGuardianAPI{
- APIKey: apiKey,
- EndDate: endDate,
- Query: query,
- Section: section,
- SourceType: sourceType,
- StartDate: startDate,
- Tag: tag,
+ APIKey: apiKey,
+ EndDate: endDate,
+ Query: query,
+ Section: section,
+ StartDate: startDate,
+ Tag: tag,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -54,6 +58,7 @@ func (r *SourceTheGuardianAPIResourceModel) ToCreateSDKType() *shared.SourceTheG
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTheGuardianAPICreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_tiktokmarketing_data_source.go b/internal/provider/source_tiktokmarketing_data_source.go
old mode 100755
new mode 100644
index 60bd1cad2..fdc9a03b5
--- a/internal/provider/source_tiktokmarketing_data_source.go
+++ b/internal/provider/source_tiktokmarketing_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceTiktokMarketingDataSource struct {
// SourceTiktokMarketingDataSourceModel describes the data model.
type SourceTiktokMarketingDataSourceModel struct {
- Configuration SourceTiktokMarketing `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,169 +47,20 @@ func (r *SourceTiktokMarketingDataSource) Schema(ctx context.Context, req dataso
MarkdownDescription: "SourceTiktokMarketing DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "attribution_window": schema.Int64Attribute{
- Computed: true,
- Description: `The attribution window in days.`,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_tiktok_marketing_authentication_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Long-term Authorized Access Token.`,
- },
- "advertiser_id": schema.StringAttribute{
- Computed: true,
- Description: `The Advertiser ID to filter reports and streams. Let this empty to retrieve all.`,
- },
- "app_id": schema.StringAttribute{
- Computed: true,
- Description: `The Developer Application App ID.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "secret": schema.StringAttribute{
- Computed: true,
- Description: `The Developer Application Secret.`,
- },
- },
- Description: `Authentication method`,
- },
- "source_tiktok_marketing_authentication_method_sandbox_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The long-term authorized access token.`,
- },
- "advertiser_id": schema.StringAttribute{
- Computed: true,
- Description: `The Advertiser ID which generated for the developer's Sandbox application.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sandbox_access_token",
- ),
- },
- Description: `must be one of ["sandbox_access_token"]`,
- },
- },
- Description: `Authentication method`,
- },
- "source_tiktok_marketing_update_authentication_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Long-term Authorized Access Token.`,
- },
- "advertiser_id": schema.StringAttribute{
- Computed: true,
- Description: `The Advertiser ID to filter reports and streams. Let this empty to retrieve all.`,
- },
- "app_id": schema.StringAttribute{
- Computed: true,
- Description: `The Developer Application App ID.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "secret": schema.StringAttribute{
- Computed: true,
- Description: `The Developer Application Secret.`,
- },
- },
- Description: `Authentication method`,
- },
- "source_tiktok_marketing_update_authentication_method_sandbox_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The long-term authorized access token.`,
- },
- "advertiser_id": schema.StringAttribute{
- Computed: true,
- Description: `The Advertiser ID which generated for the developer's Sandbox application.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sandbox_access_token",
- ),
- },
- Description: `must be one of ["sandbox_access_token"]`,
- },
- },
- Description: `Authentication method`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Authentication method`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DD. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the data till the current date.`,
- },
- "include_deleted": schema.BoolAttribute{
- Computed: true,
- Description: `Set to active if you want to include deleted data in reports.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "tiktok-marketing",
- ),
- },
- Description: `must be one of ["tiktok-marketing"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_tiktokmarketing_data_source_sdk.go b/internal/provider/source_tiktokmarketing_data_source_sdk.go
old mode 100755
new mode 100644
index 8048c7d63..babf9c415
--- a/internal/provider/source_tiktokmarketing_data_source_sdk.go
+++ b/internal/provider/source_tiktokmarketing_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTiktokMarketingDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_tiktokmarketing_resource.go b/internal/provider/source_tiktokmarketing_resource.go
old mode 100755
new mode 100644
index fecfa4929..548e19be1
--- a/internal/provider/source_tiktokmarketing_resource.go
+++ b/internal/provider/source_tiktokmarketing_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceTiktokMarketingResource struct {
// SourceTiktokMarketingResourceModel describes the resource data model.
type SourceTiktokMarketingResourceModel struct {
Configuration SourceTiktokMarketing `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,17 +57,19 @@ func (r *SourceTiktokMarketingResource) Schema(ctx context.Context, req resource
Required: true,
Attributes: map[string]schema.Attribute{
"attribution_window": schema.Int64Attribute{
- Optional: true,
- Description: `The attribution window in days.`,
+ Optional: true,
+ MarkdownDescription: `Default: 3` + "\n" +
+ `The attribution window in days.`,
},
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_tiktok_marketing_authentication_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Long-term Authorized Access Token.`,
},
"advertiser_id": schema.StringAttribute{
@@ -77,15 +80,6 @@ func (r *SourceTiktokMarketingResource) Schema(ctx context.Context, req resource
Required: true,
Description: `The Developer Application App ID.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"secret": schema.StringAttribute{
Required: true,
Description: `The Developer Application Secret.`,
@@ -93,125 +87,67 @@ func (r *SourceTiktokMarketingResource) Schema(ctx context.Context, req resource
},
Description: `Authentication method`,
},
- "source_tiktok_marketing_authentication_method_sandbox_access_token": schema.SingleNestedAttribute{
+ "sandbox_access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The long-term authorized access token.`,
},
"advertiser_id": schema.StringAttribute{
Required: true,
Description: `The Advertiser ID which generated for the developer's Sandbox application.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sandbox_access_token",
- ),
- },
- Description: `must be one of ["sandbox_access_token"]`,
- },
- },
- Description: `Authentication method`,
- },
- "source_tiktok_marketing_update_authentication_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Long-term Authorized Access Token.`,
- },
- "advertiser_id": schema.StringAttribute{
- Optional: true,
- Description: `The Advertiser ID to filter reports and streams. Let this empty to retrieve all.`,
- },
- "app_id": schema.StringAttribute{
- Required: true,
- Description: `The Developer Application App ID.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "secret": schema.StringAttribute{
- Required: true,
- Description: `The Developer Application Secret.`,
- },
- },
- Description: `Authentication method`,
- },
- "source_tiktok_marketing_update_authentication_method_sandbox_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `The long-term authorized access token.`,
- },
- "advertiser_id": schema.StringAttribute{
- Required: true,
- Description: `The Advertiser ID which generated for the developer's Sandbox application.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "sandbox_access_token",
- ),
- },
- Description: `must be one of ["sandbox_access_token"]`,
- },
},
Description: `Authentication method`,
},
},
+ Description: `Authentication method`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Authentication method`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DD. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the data till the current date.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DD. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the data till the current date.`,
},
"include_deleted": schema.BoolAttribute{
- Optional: true,
- Description: `Set to active if you want to include deleted data in reports.`,
- },
- "source_type": schema.StringAttribute{
Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "tiktok-marketing",
- ),
- },
- Description: `must be one of ["tiktok-marketing"]`,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Set to active if you want to include deleted data in reports.`,
},
"start_date": schema.StringAttribute{
Optional: true,
+ MarkdownDescription: `Default: "2016-09-01"` + "\n" +
+ `The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -275,7 +211,7 @@ func (r *SourceTiktokMarketingResource) Create(ctx context.Context, req resource
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTiktokMarketing(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -451,5 +387,5 @@ func (r *SourceTiktokMarketingResource) Delete(ctx context.Context, req resource
}
func (r *SourceTiktokMarketingResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_tiktokmarketing_resource_sdk.go b/internal/provider/source_tiktokmarketing_resource_sdk.go
old mode 100755
new mode 100644
index a1c67081a..67a905be0
--- a/internal/provider/source_tiktokmarketing_resource_sdk.go
+++ b/internal/provider/source_tiktokmarketing_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -17,55 +17,41 @@ func (r *SourceTiktokMarketingResourceModel) ToCreateSDKType() *shared.SourceTik
}
var credentials *shared.SourceTiktokMarketingAuthenticationMethod
if r.Configuration.Credentials != nil {
- var sourceTiktokMarketingAuthenticationMethodOAuth20 *shared.SourceTiktokMarketingAuthenticationMethodOAuth20
- if r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20.AccessToken.ValueString()
+ var sourceTiktokMarketingOAuth20 *shared.SourceTiktokMarketingOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
advertiserID := new(string)
- if !r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20.AdvertiserID.IsUnknown() && !r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20.AdvertiserID.IsNull() {
- *advertiserID = r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20.AdvertiserID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AdvertiserID.IsUnknown() && !r.Configuration.Credentials.OAuth20.AdvertiserID.IsNull() {
+ *advertiserID = r.Configuration.Credentials.OAuth20.AdvertiserID.ValueString()
} else {
advertiserID = nil
}
- appID := r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20.AppID.ValueString()
- authType := new(shared.SourceTiktokMarketingAuthenticationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceTiktokMarketingAuthenticationMethodOAuth20AuthType(r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- secret := r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodOAuth20.Secret.ValueString()
- sourceTiktokMarketingAuthenticationMethodOAuth20 = &shared.SourceTiktokMarketingAuthenticationMethodOAuth20{
+ appID := r.Configuration.Credentials.OAuth20.AppID.ValueString()
+ secret := r.Configuration.Credentials.OAuth20.Secret.ValueString()
+ sourceTiktokMarketingOAuth20 = &shared.SourceTiktokMarketingOAuth20{
AccessToken: accessToken,
AdvertiserID: advertiserID,
AppID: appID,
- AuthType: authType,
Secret: secret,
}
}
- if sourceTiktokMarketingAuthenticationMethodOAuth20 != nil {
+ if sourceTiktokMarketingOAuth20 != nil {
credentials = &shared.SourceTiktokMarketingAuthenticationMethod{
- SourceTiktokMarketingAuthenticationMethodOAuth20: sourceTiktokMarketingAuthenticationMethodOAuth20,
+ SourceTiktokMarketingOAuth20: sourceTiktokMarketingOAuth20,
}
}
- var sourceTiktokMarketingAuthenticationMethodSandboxAccessToken *shared.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken
- if r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken.AccessToken.ValueString()
- advertiserId1 := r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken.AdvertiserID.ValueString()
- authType1 := new(shared.SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType)
- if !r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken.AuthType.IsNull() {
- *authType1 = shared.SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType(r.Configuration.Credentials.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceTiktokMarketingAuthenticationMethodSandboxAccessToken = &shared.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken{
+ var sourceTiktokMarketingSandboxAccessToken *shared.SourceTiktokMarketingSandboxAccessToken
+ if r.Configuration.Credentials.SandboxAccessToken != nil {
+ accessToken1 := r.Configuration.Credentials.SandboxAccessToken.AccessToken.ValueString()
+ advertiserId1 := r.Configuration.Credentials.SandboxAccessToken.AdvertiserID.ValueString()
+ sourceTiktokMarketingSandboxAccessToken = &shared.SourceTiktokMarketingSandboxAccessToken{
AccessToken: accessToken1,
AdvertiserID: advertiserId1,
- AuthType: authType1,
}
}
- if sourceTiktokMarketingAuthenticationMethodSandboxAccessToken != nil {
+ if sourceTiktokMarketingSandboxAccessToken != nil {
credentials = &shared.SourceTiktokMarketingAuthenticationMethod{
- SourceTiktokMarketingAuthenticationMethodSandboxAccessToken: sourceTiktokMarketingAuthenticationMethodSandboxAccessToken,
+ SourceTiktokMarketingSandboxAccessToken: sourceTiktokMarketingSandboxAccessToken,
}
}
}
@@ -81,12 +67,6 @@ func (r *SourceTiktokMarketingResourceModel) ToCreateSDKType() *shared.SourceTik
} else {
includeDeleted = nil
}
- sourceType := new(shared.SourceTiktokMarketingTiktokMarketing)
- if !r.Configuration.SourceType.IsUnknown() && !r.Configuration.SourceType.IsNull() {
- *sourceType = shared.SourceTiktokMarketingTiktokMarketing(r.Configuration.SourceType.ValueString())
- } else {
- sourceType = nil
- }
startDate := new(customTypes.Date)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString())
@@ -98,9 +78,14 @@ func (r *SourceTiktokMarketingResourceModel) ToCreateSDKType() *shared.SourceTik
Credentials: credentials,
EndDate: endDate,
IncludeDeleted: includeDeleted,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -111,6 +96,7 @@ func (r *SourceTiktokMarketingResourceModel) ToCreateSDKType() *shared.SourceTik
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTiktokMarketingCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -132,55 +118,41 @@ func (r *SourceTiktokMarketingResourceModel) ToUpdateSDKType() *shared.SourceTik
}
var credentials *shared.SourceTiktokMarketingUpdateAuthenticationMethod
if r.Configuration.Credentials != nil {
- var sourceTiktokMarketingUpdateAuthenticationMethodOAuth20 *shared.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20
- if r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20.AccessToken.ValueString()
+ var sourceTiktokMarketingUpdateOAuth20 *shared.SourceTiktokMarketingUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
advertiserID := new(string)
- if !r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20.AdvertiserID.IsUnknown() && !r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20.AdvertiserID.IsNull() {
- *advertiserID = r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20.AdvertiserID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AdvertiserID.IsUnknown() && !r.Configuration.Credentials.OAuth20.AdvertiserID.IsNull() {
+ *advertiserID = r.Configuration.Credentials.OAuth20.AdvertiserID.ValueString()
} else {
advertiserID = nil
}
- appID := r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20.AppID.ValueString()
- authType := new(shared.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType(r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- secret := r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20.Secret.ValueString()
- sourceTiktokMarketingUpdateAuthenticationMethodOAuth20 = &shared.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20{
+ appID := r.Configuration.Credentials.OAuth20.AppID.ValueString()
+ secret := r.Configuration.Credentials.OAuth20.Secret.ValueString()
+ sourceTiktokMarketingUpdateOAuth20 = &shared.SourceTiktokMarketingUpdateOAuth20{
AccessToken: accessToken,
AdvertiserID: advertiserID,
AppID: appID,
- AuthType: authType,
Secret: secret,
}
}
- if sourceTiktokMarketingUpdateAuthenticationMethodOAuth20 != nil {
+ if sourceTiktokMarketingUpdateOAuth20 != nil {
credentials = &shared.SourceTiktokMarketingUpdateAuthenticationMethod{
- SourceTiktokMarketingUpdateAuthenticationMethodOAuth20: sourceTiktokMarketingUpdateAuthenticationMethodOAuth20,
+ SourceTiktokMarketingUpdateOAuth20: sourceTiktokMarketingUpdateOAuth20,
}
}
- var sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken *shared.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken
- if r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken.AccessToken.ValueString()
- advertiserId1 := r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken.AdvertiserID.ValueString()
- authType1 := new(shared.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType)
- if !r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken.AuthType.IsNull() {
- *authType1 = shared.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType(r.Configuration.Credentials.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken = &shared.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken{
+ var sandboxAccessToken *shared.SandboxAccessToken
+ if r.Configuration.Credentials.SandboxAccessToken != nil {
+ accessToken1 := r.Configuration.Credentials.SandboxAccessToken.AccessToken.ValueString()
+ advertiserId1 := r.Configuration.Credentials.SandboxAccessToken.AdvertiserID.ValueString()
+ sandboxAccessToken = &shared.SandboxAccessToken{
AccessToken: accessToken1,
AdvertiserID: advertiserId1,
- AuthType: authType1,
}
}
- if sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken != nil {
+ if sandboxAccessToken != nil {
credentials = &shared.SourceTiktokMarketingUpdateAuthenticationMethod{
- SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken: sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken,
+ SandboxAccessToken: sandboxAccessToken,
}
}
}
diff --git a/internal/provider/source_todoist_data_source.go b/internal/provider/source_todoist_data_source.go
old mode 100755
new mode 100644
index 87b1c5085..944e1fc21
--- a/internal/provider/source_todoist_data_source.go
+++ b/internal/provider/source_todoist_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceTodoistDataSource struct {
// SourceTodoistDataSourceModel describes the data model.
type SourceTodoistDataSourceModel struct {
- Configuration SourceTodoist `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceTodoistDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceTodoist DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "todoist",
- ),
- },
- Description: `must be one of ["todoist"]`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `Your API Token. See here. The token is case sensitive.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_todoist_data_source_sdk.go b/internal/provider/source_todoist_data_source_sdk.go
old mode 100755
new mode 100644
index c8f0f80d5..39322047e
--- a/internal/provider/source_todoist_data_source_sdk.go
+++ b/internal/provider/source_todoist_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTodoistDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_todoist_resource.go b/internal/provider/source_todoist_resource.go
old mode 100755
new mode 100644
index b746c98f7..6e66c5a9c
--- a/internal/provider/source_todoist_resource.go
+++ b/internal/provider/source_todoist_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceTodoistResource struct {
// SourceTodoistResourceModel describes the resource data model.
type SourceTodoistResourceModel struct {
- Configuration SourceTodoist `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration DestinationMilvusAPIToken `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceTodoistResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -54,28 +54,31 @@ func (r *SourceTodoistResource) Schema(ctx context.Context, req resource.SchemaR
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "todoist",
- ),
- },
- Description: `must be one of ["todoist"]`,
- },
"token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Token. See here. The token is case sensitive.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceTodoistResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTodoist(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceTodoistResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceTodoistResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_todoist_resource_sdk.go b/internal/provider/source_todoist_resource_sdk.go
old mode 100755
new mode 100644
index b369a05b5..740c84144
--- a/internal/provider/source_todoist_resource_sdk.go
+++ b/internal/provider/source_todoist_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTodoistResourceModel) ToCreateSDKType() *shared.SourceTodoistCreateRequest {
- sourceType := shared.SourceTodoistTodoist(r.Configuration.SourceType.ValueString())
token := r.Configuration.Token.ValueString()
configuration := shared.SourceTodoist{
- SourceType: sourceType,
- Token: token,
+ Token: token,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceTodoistResourceModel) ToCreateSDKType() *shared.SourceTodoistCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTodoistCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_trello_data_source.go b/internal/provider/source_trello_data_source.go
old mode 100755
new mode 100644
index ec6ff3a91..2220b10e2
--- a/internal/provider/source_trello_data_source.go
+++ b/internal/provider/source_trello_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceTrelloDataSource struct {
// SourceTrelloDataSourceModel describes the data model.
type SourceTrelloDataSourceModel struct {
- Configuration SourceTrello `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,50 +47,20 @@ func (r *SourceTrelloDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceTrello DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "board_ids": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated.`,
- },
- "key": schema.StringAttribute{
- Computed: true,
- Description: `Trello API key. See the docs for instructions on how to generate it.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "trello",
- ),
- },
- Description: `must be one of ["trello"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
- },
- "token": schema.StringAttribute{
- Computed: true,
- Description: `Trello API token. See the docs for instructions on how to generate it.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_trello_data_source_sdk.go b/internal/provider/source_trello_data_source_sdk.go
old mode 100755
new mode 100644
index 1fda23be5..30ec14bca
--- a/internal/provider/source_trello_data_source_sdk.go
+++ b/internal/provider/source_trello_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTrelloDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_trello_resource.go b/internal/provider/source_trello_resource.go
old mode 100755
new mode 100644
index ef89ff9e8..ed0c98a1f
--- a/internal/provider/source_trello_resource.go
+++ b/internal/provider/source_trello_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceTrelloResource struct {
// SourceTrelloResourceModel describes the resource data model.
type SourceTrelloResourceModel struct {
Configuration SourceTrello `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,41 +59,45 @@ func (r *SourceTrelloResource) Schema(ctx context.Context, req resource.SchemaRe
"board_ids": schema.ListAttribute{
Optional: true,
ElementType: types.StringType,
- Description: `IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated.`,
+ Description: `IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated. Please note that this is not the 8-character ID in the board's shortLink (URL of the board). Rather, what is required here is the 24-character ID usually returned by the API`,
},
"key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Trello API key. See the docs for instructions on how to generate it.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "trello",
- ),
- },
- Description: `must be one of ["trello"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`,
},
"token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Trello API token. See the docs for instructions on how to generate it.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -156,7 +161,7 @@ func (r *SourceTrelloResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTrello(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -332,5 +337,5 @@ func (r *SourceTrelloResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceTrelloResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_trello_resource_sdk.go b/internal/provider/source_trello_resource_sdk.go
old mode 100755
new mode 100644
index 94418a2d9..5816b2636
--- a/internal/provider/source_trello_resource_sdk.go
+++ b/internal/provider/source_trello_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -14,15 +14,19 @@ func (r *SourceTrelloResourceModel) ToCreateSDKType() *shared.SourceTrelloCreate
boardIds = append(boardIds, boardIdsItem.ValueString())
}
key := r.Configuration.Key.ValueString()
- sourceType := shared.SourceTrelloTrello(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
token := r.Configuration.Token.ValueString()
configuration := shared.SourceTrello{
- BoardIds: boardIds,
- Key: key,
- SourceType: sourceType,
- StartDate: startDate,
- Token: token,
+ BoardIds: boardIds,
+ Key: key,
+ StartDate: startDate,
+ Token: token,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -34,6 +38,7 @@ func (r *SourceTrelloResourceModel) ToCreateSDKType() *shared.SourceTrelloCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTrelloCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_trustpilot_data_source.go b/internal/provider/source_trustpilot_data_source.go
old mode 100755
new mode 100644
index 310f82dd3..b4e651783
--- a/internal/provider/source_trustpilot_data_source.go
+++ b/internal/provider/source_trustpilot_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceTrustpilotDataSource struct {
// SourceTrustpilotDataSourceModel describes the data model.
type SourceTrustpilotDataSourceModel struct {
- Configuration SourceTrustpilot `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,159 +47,20 @@ func (r *SourceTrustpilotDataSource) Schema(ctx context.Context, req datasource.
MarkdownDescription: "SourceTrustpilot DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "business_units": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `The names of business units which shall be synchronized. Some streams e.g. configured_business_units or private_reviews use this configuration.`,
- },
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_trustpilot_authorization_method_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apikey",
- ),
- },
- Description: `must be one of ["apikey"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The API key of the Trustpilot API application.`,
- },
- },
- Description: `The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0.`,
- },
- "source_trustpilot_authorization_method_o_auth_2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The API key of the Trustpilot API application. (represents the OAuth Client ID)`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Secret of the Trustpilot API application. (represents the OAuth Client Secret)`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_trustpilot_update_authorization_method_api_key": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apikey",
- ),
- },
- Description: `must be one of ["apikey"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The API key of the Trustpilot API application.`,
- },
- },
- Description: `The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0.`,
- },
- "source_trustpilot_update_authorization_method_o_auth_2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The API key of the Trustpilot API application. (represents the OAuth Client ID)`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Secret of the Trustpilot API application. (represents the OAuth Client Secret)`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "trustpilot",
- ),
- },
- Description: `must be one of ["trustpilot"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `For streams with sync. method incremental the start date time to be used`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_trustpilot_data_source_sdk.go b/internal/provider/source_trustpilot_data_source_sdk.go
old mode 100755
new mode 100644
index f7a75c128..97b0e858e
--- a/internal/provider/source_trustpilot_data_source_sdk.go
+++ b/internal/provider/source_trustpilot_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTrustpilotDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_trustpilot_resource.go b/internal/provider/source_trustpilot_resource.go
old mode 100755
new mode 100644
index 067db10e5..2f1a98c58
--- a/internal/provider/source_trustpilot_resource.go
+++ b/internal/provider/source_trustpilot_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceTrustpilotResource struct {
// SourceTrustpilotResourceModel describes the resource data model.
type SourceTrustpilotResourceModel struct {
Configuration SourceTrustpilot `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -63,18 +64,9 @@ func (r *SourceTrustpilotResource) Schema(ctx context.Context, req resource.Sche
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_trustpilot_authorization_method_api_key": schema.SingleNestedAttribute{
+ "api_key": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apikey",
- ),
- },
- Description: `must be one of ["apikey"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The API key of the Trustpilot API application.`,
@@ -82,22 +74,14 @@ func (r *SourceTrustpilotResource) Schema(ctx context.Context, req resource.Sche
},
Description: `The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0.`,
},
- "source_trustpilot_authorization_method_o_auth_2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The API key of the Trustpilot API application. (represents the OAuth Client ID)`,
@@ -108,70 +92,16 @@ func (r *SourceTrustpilotResource) Schema(ctx context.Context, req resource.Sche
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The key to refresh the expired access_token.`,
},
"token_expiry_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_trustpilot_update_authorization_method_api_key": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "apikey",
- ),
- },
- Description: `must be one of ["apikey"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The API key of the Trustpilot API application.`,
- },
- },
- Description: `The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0.`,
- },
- "source_trustpilot_update_authorization_method_o_auth_2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
Required: true,
- Description: `The API key of the Trustpilot API application. (represents the OAuth Client ID)`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Secret of the Trustpilot API application. (represents the OAuth Client Secret)`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Required: true,
+ Sensitive: true,
+ Description: `The date-time when the access token should be refreshed.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date-time when the access token should be refreshed.`,
},
},
},
@@ -180,28 +110,30 @@ func (r *SourceTrustpilotResource) Schema(ctx context.Context, req resource.Sche
validators.ExactlyOneChild(),
},
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "trustpilot",
- ),
- },
- Description: `must be one of ["trustpilot"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `For streams with sync. method incremental the start date time to be used`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -265,7 +197,7 @@ func (r *SourceTrustpilotResource) Create(ctx context.Context, req resource.Crea
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTrustpilot(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -441,5 +373,5 @@ func (r *SourceTrustpilotResource) Delete(ctx context.Context, req resource.Dele
}
func (r *SourceTrustpilotResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_trustpilot_resource_sdk.go b/internal/provider/source_trustpilot_resource_sdk.go
old mode 100755
new mode 100644
index 36fc73063..08e956845
--- a/internal/provider/source_trustpilot_resource_sdk.go
+++ b/internal/provider/source_trustpilot_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -14,60 +14,50 @@ func (r *SourceTrustpilotResourceModel) ToCreateSDKType() *shared.SourceTrustpil
businessUnits = append(businessUnits, businessUnitsItem.ValueString())
}
var credentials shared.SourceTrustpilotAuthorizationMethod
- var sourceTrustpilotAuthorizationMethodOAuth20 *shared.SourceTrustpilotAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceTrustpilotAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceTrustpilotAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceTrustpilotAuthorizationMethodOAuth20 = &shared.SourceTrustpilotAuthorizationMethodOAuth20{
+ var sourceTrustpilotOAuth20 *shared.SourceTrustpilotOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceTrustpilotOAuth20 = &shared.SourceTrustpilotOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceTrustpilotAuthorizationMethodOAuth20 != nil {
+ if sourceTrustpilotOAuth20 != nil {
credentials = shared.SourceTrustpilotAuthorizationMethod{
- SourceTrustpilotAuthorizationMethodOAuth20: sourceTrustpilotAuthorizationMethodOAuth20,
+ SourceTrustpilotOAuth20: sourceTrustpilotOAuth20,
}
}
- var sourceTrustpilotAuthorizationMethodAPIKey *shared.SourceTrustpilotAuthorizationMethodAPIKey
- if r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodAPIKey != nil {
- authType1 := new(shared.SourceTrustpilotAuthorizationMethodAPIKeyAuthType)
- if !r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodAPIKey.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodAPIKey.AuthType.IsNull() {
- *authType1 = shared.SourceTrustpilotAuthorizationMethodAPIKeyAuthType(r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodAPIKey.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- clientId1 := r.Configuration.Credentials.SourceTrustpilotAuthorizationMethodAPIKey.ClientID.ValueString()
- sourceTrustpilotAuthorizationMethodAPIKey = &shared.SourceTrustpilotAuthorizationMethodAPIKey{
- AuthType: authType1,
+ var sourceTrustpilotAPIKey *shared.SourceTrustpilotAPIKey
+ if r.Configuration.Credentials.APIKey != nil {
+ clientId1 := r.Configuration.Credentials.APIKey.ClientID.ValueString()
+ sourceTrustpilotAPIKey = &shared.SourceTrustpilotAPIKey{
ClientID: clientId1,
}
}
- if sourceTrustpilotAuthorizationMethodAPIKey != nil {
+ if sourceTrustpilotAPIKey != nil {
credentials = shared.SourceTrustpilotAuthorizationMethod{
- SourceTrustpilotAuthorizationMethodAPIKey: sourceTrustpilotAuthorizationMethodAPIKey,
+ SourceTrustpilotAPIKey: sourceTrustpilotAPIKey,
}
}
- sourceType := shared.SourceTrustpilotTrustpilot(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
configuration := shared.SourceTrustpilot{
BusinessUnits: businessUnits,
Credentials: credentials,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -78,6 +68,7 @@ func (r *SourceTrustpilotResourceModel) ToCreateSDKType() *shared.SourceTrustpil
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTrustpilotCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -96,50 +87,36 @@ func (r *SourceTrustpilotResourceModel) ToUpdateSDKType() *shared.SourceTrustpil
businessUnits = append(businessUnits, businessUnitsItem.ValueString())
}
var credentials shared.SourceTrustpilotUpdateAuthorizationMethod
- var sourceTrustpilotUpdateAuthorizationMethodOAuth20 *shared.SourceTrustpilotUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceTrustpilotUpdateAuthorizationMethodOAuth20 = &shared.SourceTrustpilotUpdateAuthorizationMethodOAuth20{
+ var sourceTrustpilotUpdateOAuth20 *shared.SourceTrustpilotUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceTrustpilotUpdateOAuth20 = &shared.SourceTrustpilotUpdateOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceTrustpilotUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceTrustpilotUpdateOAuth20 != nil {
credentials = shared.SourceTrustpilotUpdateAuthorizationMethod{
- SourceTrustpilotUpdateAuthorizationMethodOAuth20: sourceTrustpilotUpdateAuthorizationMethodOAuth20,
+ SourceTrustpilotUpdateOAuth20: sourceTrustpilotUpdateOAuth20,
}
}
- var sourceTrustpilotUpdateAuthorizationMethodAPIKey *shared.SourceTrustpilotUpdateAuthorizationMethodAPIKey
- if r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodAPIKey != nil {
- authType1 := new(shared.SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType)
- if !r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodAPIKey.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodAPIKey.AuthType.IsNull() {
- *authType1 = shared.SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType(r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodAPIKey.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- clientId1 := r.Configuration.Credentials.SourceTrustpilotUpdateAuthorizationMethodAPIKey.ClientID.ValueString()
- sourceTrustpilotUpdateAuthorizationMethodAPIKey = &shared.SourceTrustpilotUpdateAuthorizationMethodAPIKey{
- AuthType: authType1,
+ var sourceTrustpilotUpdateAPIKey *shared.SourceTrustpilotUpdateAPIKey
+ if r.Configuration.Credentials.APIKey != nil {
+ clientId1 := r.Configuration.Credentials.APIKey.ClientID.ValueString()
+ sourceTrustpilotUpdateAPIKey = &shared.SourceTrustpilotUpdateAPIKey{
ClientID: clientId1,
}
}
- if sourceTrustpilotUpdateAuthorizationMethodAPIKey != nil {
+ if sourceTrustpilotUpdateAPIKey != nil {
credentials = shared.SourceTrustpilotUpdateAuthorizationMethod{
- SourceTrustpilotUpdateAuthorizationMethodAPIKey: sourceTrustpilotUpdateAuthorizationMethodAPIKey,
+ SourceTrustpilotUpdateAPIKey: sourceTrustpilotUpdateAPIKey,
}
}
startDate := r.Configuration.StartDate.ValueString()
diff --git a/internal/provider/source_tvmazeschedule_data_source.go b/internal/provider/source_tvmazeschedule_data_source.go
old mode 100755
new mode 100644
index 87fee0ee3..6383d9714
--- a/internal/provider/source_tvmazeschedule_data_source.go
+++ b/internal/provider/source_tvmazeschedule_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceTvmazeScheduleDataSource struct {
// SourceTvmazeScheduleDataSourceModel describes the data model.
type SourceTvmazeScheduleDataSourceModel struct {
- Configuration SourceTvmazeSchedule `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,50 +47,20 @@ func (r *SourceTvmazeScheduleDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "SourceTvmazeSchedule DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "domestic_schedule_country_code": schema.StringAttribute{
- Computed: true,
- Description: `Country code for domestic TV schedule retrieval.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- MarkdownDescription: `End date for TV schedule retrieval. May be in the future. Optional.` + "\n" +
- ``,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "tvmaze-schedule",
- ),
- },
- Description: `must be one of ["tvmaze-schedule"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `Start date for TV schedule retrieval. May be in the future.`,
- },
- "web_schedule_country_code": schema.StringAttribute{
- Computed: true,
- MarkdownDescription: `ISO 3166-1 country code for web TV schedule retrieval. Leave blank for` + "\n" +
- `all countries plus global web channels (e.g. Netflix). Alternatively,` + "\n" +
- `set to 'global' for just global web channels.` + "\n" +
- ``,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_tvmazeschedule_data_source_sdk.go b/internal/provider/source_tvmazeschedule_data_source_sdk.go
old mode 100755
new mode 100644
index a6af8b294..e128ae30c
--- a/internal/provider/source_tvmazeschedule_data_source_sdk.go
+++ b/internal/provider/source_tvmazeschedule_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTvmazeScheduleDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_tvmazeschedule_resource.go b/internal/provider/source_tvmazeschedule_resource.go
old mode 100755
new mode 100644
index a7298c778..7199da9bb
--- a/internal/provider/source_tvmazeschedule_resource.go
+++ b/internal/provider/source_tvmazeschedule_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceTvmazeScheduleResource struct {
// SourceTvmazeScheduleResourceModel describes the resource data model.
type SourceTvmazeScheduleResourceModel struct {
Configuration SourceTvmazeSchedule `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -63,15 +63,6 @@ func (r *SourceTvmazeScheduleResource) Schema(ctx context.Context, req resource.
MarkdownDescription: `End date for TV schedule retrieval. May be in the future. Optional.` + "\n" +
``,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "tvmaze-schedule",
- ),
- },
- Description: `must be one of ["tvmaze-schedule"]`,
- },
"start_date": schema.StringAttribute{
Required: true,
Description: `Start date for TV schedule retrieval. May be in the future.`,
@@ -85,13 +76,24 @@ func (r *SourceTvmazeScheduleResource) Schema(ctx context.Context, req resource.
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +157,7 @@ func (r *SourceTvmazeScheduleResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTvmazeSchedule(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +333,5 @@ func (r *SourceTvmazeScheduleResource) Delete(ctx context.Context, req resource.
}
func (r *SourceTvmazeScheduleResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_tvmazeschedule_resource_sdk.go b/internal/provider/source_tvmazeschedule_resource_sdk.go
old mode 100755
new mode 100644
index f66621941..31e2f1acc
--- a/internal/provider/source_tvmazeschedule_resource_sdk.go
+++ b/internal/provider/source_tvmazeschedule_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -15,7 +15,6 @@ func (r *SourceTvmazeScheduleResourceModel) ToCreateSDKType() *shared.SourceTvma
} else {
endDate = nil
}
- sourceType := shared.SourceTvmazeScheduleTvmazeSchedule(r.Configuration.SourceType.ValueString())
startDate := r.Configuration.StartDate.ValueString()
webScheduleCountryCode := new(string)
if !r.Configuration.WebScheduleCountryCode.IsUnknown() && !r.Configuration.WebScheduleCountryCode.IsNull() {
@@ -26,10 +25,15 @@ func (r *SourceTvmazeScheduleResourceModel) ToCreateSDKType() *shared.SourceTvma
configuration := shared.SourceTvmazeSchedule{
DomesticScheduleCountryCode: domesticScheduleCountryCode,
EndDate: endDate,
- SourceType: sourceType,
StartDate: startDate,
WebScheduleCountryCode: webScheduleCountryCode,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -40,6 +44,7 @@ func (r *SourceTvmazeScheduleResourceModel) ToCreateSDKType() *shared.SourceTvma
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTvmazeScheduleCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_twilio_data_source.go b/internal/provider/source_twilio_data_source.go
old mode 100755
new mode 100644
index b24c12114..ce8a1e00e
--- a/internal/provider/source_twilio_data_source.go
+++ b/internal/provider/source_twilio_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceTwilioDataSource struct {
// SourceTwilioDataSourceModel describes the data model.
type SourceTwilioDataSourceModel struct {
- Configuration SourceTwilio `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,49 +47,20 @@ func (r *SourceTwilioDataSource) Schema(ctx context.Context, req datasource.Sche
MarkdownDescription: "SourceTwilio DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "account_sid": schema.StringAttribute{
- Computed: true,
- Description: `Twilio account SID`,
- },
- "auth_token": schema.StringAttribute{
- Computed: true,
- Description: `Twilio Auth Token.`,
- },
- "lookback_window": schema.Int64Attribute{
- Computed: true,
- Description: `How far into the past to look for records. (in minutes)`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "twilio",
- ),
- },
- Description: `must be one of ["twilio"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_twilio_data_source_sdk.go b/internal/provider/source_twilio_data_source_sdk.go
old mode 100755
new mode 100644
index 953a18bf8..0f559d3ae
--- a/internal/provider/source_twilio_data_source_sdk.go
+++ b/internal/provider/source_twilio_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTwilioDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_twilio_resource.go b/internal/provider/source_twilio_resource.go
old mode 100755
new mode 100644
index 35686e774..abd3ff82a
--- a/internal/provider/source_twilio_resource.go
+++ b/internal/provider/source_twilio_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceTwilioResource struct {
// SourceTwilioResourceModel describes the resource data model.
type SourceTwilioResourceModel struct {
Configuration SourceTwilio `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -61,37 +62,41 @@ func (r *SourceTwilioResource) Schema(ctx context.Context, req resource.SchemaRe
},
"auth_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Twilio Auth Token.`,
},
"lookback_window": schema.Int64Attribute{
- Optional: true,
- Description: `How far into the past to look for records. (in minutes)`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "twilio",
- ),
- },
- Description: `must be one of ["twilio"]`,
+ Optional: true,
+ MarkdownDescription: `Default: 0` + "\n" +
+ `How far into the past to look for records. (in minutes)`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +160,7 @@ func (r *SourceTwilioResource) Create(ctx context.Context, req resource.CreateRe
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTwilio(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +336,5 @@ func (r *SourceTwilioResource) Delete(ctx context.Context, req resource.DeleteRe
}
func (r *SourceTwilioResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_twilio_resource_sdk.go b/internal/provider/source_twilio_resource_sdk.go
old mode 100755
new mode 100644
index af7e6c208..8a5ac58ef
--- a/internal/provider/source_twilio_resource_sdk.go
+++ b/internal/provider/source_twilio_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -17,15 +17,19 @@ func (r *SourceTwilioResourceModel) ToCreateSDKType() *shared.SourceTwilioCreate
} else {
lookbackWindow = nil
}
- sourceType := shared.SourceTwilioTwilio(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceTwilio{
AccountSid: accountSid,
AuthToken: authToken,
LookbackWindow: lookbackWindow,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -36,6 +40,7 @@ func (r *SourceTwilioResourceModel) ToCreateSDKType() *shared.SourceTwilioCreate
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTwilioCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_twiliotaskrouter_data_source.go b/internal/provider/source_twiliotaskrouter_data_source.go
old mode 100755
new mode 100644
index 2e954ff24..237237d5c
--- a/internal/provider/source_twiliotaskrouter_data_source.go
+++ b/internal/provider/source_twiliotaskrouter_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceTwilioTaskrouterDataSource struct {
// SourceTwilioTaskrouterDataSourceModel describes the data model.
type SourceTwilioTaskrouterDataSourceModel struct {
- Configuration SourceTwilioTaskrouter `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceTwilioTaskrouterDataSource) Schema(ctx context.Context, req datas
MarkdownDescription: "SourceTwilioTaskrouter DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "account_sid": schema.StringAttribute{
- Computed: true,
- Description: `Twilio Account ID`,
- },
- "auth_token": schema.StringAttribute{
- Computed: true,
- Description: `Twilio Auth Token`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "twilio-taskrouter",
- ),
- },
- Description: `must be one of ["twilio-taskrouter"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_twiliotaskrouter_data_source_sdk.go b/internal/provider/source_twiliotaskrouter_data_source_sdk.go
old mode 100755
new mode 100644
index 89a600a7e..fbed01f03
--- a/internal/provider/source_twiliotaskrouter_data_source_sdk.go
+++ b/internal/provider/source_twiliotaskrouter_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTwilioTaskrouterDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_twiliotaskrouter_resource.go b/internal/provider/source_twiliotaskrouter_resource.go
old mode 100755
new mode 100644
index 9e7e844b0..4860b94b9
--- a/internal/provider/source_twiliotaskrouter_resource.go
+++ b/internal/provider/source_twiliotaskrouter_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceTwilioTaskrouterResource struct {
// SourceTwilioTaskrouterResourceModel describes the resource data model.
type SourceTwilioTaskrouterResourceModel struct {
Configuration SourceTwilioTaskrouter `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -60,26 +60,29 @@ func (r *SourceTwilioTaskrouterResource) Schema(ctx context.Context, req resourc
},
"auth_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Twilio Auth Token`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "twilio-taskrouter",
- ),
- },
- Description: `must be one of ["twilio-taskrouter"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceTwilioTaskrouterResource) Create(ctx context.Context, req resourc
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTwilioTaskrouter(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceTwilioTaskrouterResource) Delete(ctx context.Context, req resourc
}
func (r *SourceTwilioTaskrouterResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_twiliotaskrouter_resource_sdk.go b/internal/provider/source_twiliotaskrouter_resource_sdk.go
old mode 100755
new mode 100644
index ecb1d5a02..4efb1d36e
--- a/internal/provider/source_twiliotaskrouter_resource_sdk.go
+++ b/internal/provider/source_twiliotaskrouter_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTwilioTaskrouterResourceModel) ToCreateSDKType() *shared.SourceTwilioTaskrouterCreateRequest {
accountSid := r.Configuration.AccountSid.ValueString()
authToken := r.Configuration.AuthToken.ValueString()
- sourceType := shared.SourceTwilioTaskrouterTwilioTaskrouter(r.Configuration.SourceType.ValueString())
configuration := shared.SourceTwilioTaskrouter{
AccountSid: accountSid,
AuthToken: authToken,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceTwilioTaskrouterResourceModel) ToCreateSDKType() *shared.SourceTw
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTwilioTaskrouterCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_twitter_data_source.go b/internal/provider/source_twitter_data_source.go
old mode 100755
new mode 100644
index 106843b33..10b1a3442
--- a/internal/provider/source_twitter_data_source.go
+++ b/internal/provider/source_twitter_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceTwitterDataSource struct {
// SourceTwitterDataSourceModel describes the data model.
type SourceTwitterDataSourceModel struct {
- Configuration SourceTwitter `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,52 +47,20 @@ func (r *SourceTwitterDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceTwitter DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `App only Bearer Token. See the docs for more information on how to obtain this token.`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The end date for retrieving tweets must be a minimum of 10 seconds prior to the request time.`,
- },
- "query": schema.StringAttribute{
- Computed: true,
- Description: `Query for matching Tweets. You can learn how to build this query by reading build a query guide .`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "twitter",
- ),
- },
- Description: `must be one of ["twitter"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The start date for retrieving tweets cannot be more than 7 days in the past.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_twitter_data_source_sdk.go b/internal/provider/source_twitter_data_source_sdk.go
old mode 100755
new mode 100644
index e4c9ad2f2..fbe750437
--- a/internal/provider/source_twitter_data_source_sdk.go
+++ b/internal/provider/source_twitter_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTwitterDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_twitter_resource.go b/internal/provider/source_twitter_resource.go
old mode 100755
new mode 100644
index d74a3fe5f..b4d50a6a9
--- a/internal/provider/source_twitter_resource.go
+++ b/internal/provider/source_twitter_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceTwitterResource struct {
// SourceTwitterResourceModel describes the resource data model.
type SourceTwitterResourceModel struct {
Configuration SourceTwitter `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,44 +58,47 @@ func (r *SourceTwitterResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `App only Bearer Token. See the docs for more information on how to obtain this token.`,
},
"end_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The end date for retrieving tweets must be a minimum of 10 seconds prior to the request time.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The end date for retrieving tweets must be a minimum of 10 seconds prior to the request time.`,
},
"query": schema.StringAttribute{
Required: true,
Description: `Query for matching Tweets. You can learn how to build this query by reading build a query guide .`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "twitter",
- ),
- },
- Description: `must be one of ["twitter"]`,
- },
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The start date for retrieving tweets cannot be more than 7 days in the past.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The start date for retrieving tweets cannot be more than 7 days in the past.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -158,7 +162,7 @@ func (r *SourceTwitterResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTwitter(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -334,5 +338,5 @@ func (r *SourceTwitterResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceTwitterResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_twitter_resource_sdk.go b/internal/provider/source_twitter_resource_sdk.go
old mode 100755
new mode 100644
index 83fdb537d..5c3de04c6
--- a/internal/provider/source_twitter_resource_sdk.go
+++ b/internal/provider/source_twitter_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -17,7 +17,6 @@ func (r *SourceTwitterResourceModel) ToCreateSDKType() *shared.SourceTwitterCrea
endDate = nil
}
query := r.Configuration.Query.ValueString()
- sourceType := shared.SourceTwitterTwitter(r.Configuration.SourceType.ValueString())
startDate := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -25,11 +24,16 @@ func (r *SourceTwitterResourceModel) ToCreateSDKType() *shared.SourceTwitterCrea
startDate = nil
}
configuration := shared.SourceTwitter{
- APIKey: apiKey,
- EndDate: endDate,
- Query: query,
- SourceType: sourceType,
- StartDate: startDate,
+ APIKey: apiKey,
+ EndDate: endDate,
+ Query: query,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -41,6 +45,7 @@ func (r *SourceTwitterResourceModel) ToCreateSDKType() *shared.SourceTwitterCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTwitterCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_typeform_data_source.go b/internal/provider/source_typeform_data_source.go
old mode 100755
new mode 100644
index ab331c093..37633e9bc
--- a/internal/provider/source_typeform_data_source.go
+++ b/internal/provider/source_typeform_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceTypeformDataSource struct {
// SourceTypeformDataSourceModel describes the data model.
type SourceTypeformDataSourceModel struct {
- Configuration SourceTypeform `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,160 +47,20 @@ func (r *SourceTypeformDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceTypeform DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_typeform_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of the Typeform developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret the Typeform developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_typeform_authorization_method_private_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Log into your Typeform account and then generate a personal Access Token.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_typeform_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of the Typeform developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret the Typeform developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_typeform_update_authorization_method_private_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Log into your Typeform account and then generate a personal Access Token.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "form_ids": schema.ListAttribute{
- Computed: true,
- ElementType: types.StringType,
- Description: `When this parameter is set, the connector will replicate data only from the input forms. Otherwise, all forms in your Typeform account will be replicated. You can find form IDs in your form URLs. For example, in the URL "https://mysite.typeform.com/to/u6nXL7" the form_id is u6nXL7. You can find form URLs on Share panel`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "typeform",
- ),
- },
- Description: `must be one of ["typeform"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for Typeform API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_typeform_data_source_sdk.go b/internal/provider/source_typeform_data_source_sdk.go
old mode 100755
new mode 100644
index 283867058..e52fb5f17
--- a/internal/provider/source_typeform_data_source_sdk.go
+++ b/internal/provider/source_typeform_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceTypeformDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_typeform_resource.go b/internal/provider/source_typeform_resource.go
old mode 100755
new mode 100644
index d648ddabe..e66e9836a
--- a/internal/provider/source_typeform_resource.go
+++ b/internal/provider/source_typeform_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceTypeformResource struct {
// SourceTypeformResourceModel describes the resource data model.
type SourceTypeformResourceModel struct {
Configuration SourceTypeform `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,22 +59,14 @@ func (r *SourceTypeformResource) Schema(ctx context.Context, req resource.Schema
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "source_typeform_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of the Typeform developer application.`,
@@ -84,88 +77,27 @@ func (r *SourceTypeformResource) Schema(ctx context.Context, req resource.Schema
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The key to refresh the expired access_token.`,
},
"token_expiry_date": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date-time when the access token should be refreshed.`,
- },
- },
- },
- "source_typeform_authorization_method_private_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Log into your Typeform account and then generate a personal Access Token.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_typeform_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
Required: true,
- Description: `The Client ID of the Typeform developer application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret the Typeform developer application.`,
- },
- "refresh_token": schema.StringAttribute{
- Required: true,
- Description: `The key to refresh the expired access_token.`,
- },
- "token_expiry_date": schema.StringAttribute{
- Required: true,
+ Sensitive: true,
+ Description: `The date-time when the access token should be refreshed.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date-time when the access token should be refreshed.`,
},
},
},
- "source_typeform_update_authorization_method_private_token": schema.SingleNestedAttribute{
+ "private_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Log into your Typeform account and then generate a personal Access Token.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
},
},
},
@@ -178,31 +110,33 @@ func (r *SourceTypeformResource) Schema(ctx context.Context, req resource.Schema
ElementType: types.StringType,
Description: `When this parameter is set, the connector will replicate data only from the input forms. Otherwise, all forms in your Typeform account will be replicated. You can find form IDs in your form URLs. For example, in the URL "https://mysite.typeform.com/to/u6nXL7" the form_id is u6nXL7. You can find form URLs on Share panel`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "typeform",
- ),
- },
- Description: `must be one of ["typeform"]`,
- },
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The date from which you'd like to replicate data for Typeform API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for Typeform API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -266,7 +200,7 @@ func (r *SourceTypeformResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceTypeform(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -442,5 +376,5 @@ func (r *SourceTypeformResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceTypeformResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_typeform_resource_sdk.go b/internal/provider/source_typeform_resource_sdk.go
old mode 100755
new mode 100644
index 3bbcd5996..45da51b91
--- a/internal/provider/source_typeform_resource_sdk.go
+++ b/internal/provider/source_typeform_resource_sdk.go
@@ -3,64 +3,49 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
func (r *SourceTypeformResourceModel) ToCreateSDKType() *shared.SourceTypeformCreateRequest {
var credentials shared.SourceTypeformAuthorizationMethod
- var sourceTypeformAuthorizationMethodOAuth20 *shared.SourceTypeformAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceTypeformAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceTypeformAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceTypeformAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceTypeformAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTypeformAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceTypeformAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceTypeformAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceTypeformAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceTypeformAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceTypeformAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceTypeformAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceTypeformAuthorizationMethodOAuth20 = &shared.SourceTypeformAuthorizationMethodOAuth20{
+ var sourceTypeformOAuth20 *shared.SourceTypeformOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceTypeformOAuth20 = &shared.SourceTypeformOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceTypeformAuthorizationMethodOAuth20 != nil {
+ if sourceTypeformOAuth20 != nil {
credentials = shared.SourceTypeformAuthorizationMethod{
- SourceTypeformAuthorizationMethodOAuth20: sourceTypeformAuthorizationMethodOAuth20,
+ SourceTypeformOAuth20: sourceTypeformOAuth20,
}
}
- var sourceTypeformAuthorizationMethodPrivateToken *shared.SourceTypeformAuthorizationMethodPrivateToken
- if r.Configuration.Credentials.SourceTypeformAuthorizationMethodPrivateToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceTypeformAuthorizationMethodPrivateToken.AccessToken.ValueString()
- authType1 := new(shared.SourceTypeformAuthorizationMethodPrivateTokenAuthType)
- if !r.Configuration.Credentials.SourceTypeformAuthorizationMethodPrivateToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTypeformAuthorizationMethodPrivateToken.AuthType.IsNull() {
- *authType1 = shared.SourceTypeformAuthorizationMethodPrivateTokenAuthType(r.Configuration.Credentials.SourceTypeformAuthorizationMethodPrivateToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceTypeformAuthorizationMethodPrivateToken = &shared.SourceTypeformAuthorizationMethodPrivateToken{
+ var sourceTypeformPrivateToken *shared.SourceTypeformPrivateToken
+ if r.Configuration.Credentials.PrivateToken != nil {
+ accessToken1 := r.Configuration.Credentials.PrivateToken.AccessToken.ValueString()
+ sourceTypeformPrivateToken = &shared.SourceTypeformPrivateToken{
AccessToken: accessToken1,
- AuthType: authType1,
}
}
- if sourceTypeformAuthorizationMethodPrivateToken != nil {
+ if sourceTypeformPrivateToken != nil {
credentials = shared.SourceTypeformAuthorizationMethod{
- SourceTypeformAuthorizationMethodPrivateToken: sourceTypeformAuthorizationMethodPrivateToken,
+ SourceTypeformPrivateToken: sourceTypeformPrivateToken,
}
}
var formIds []string = nil
for _, formIdsItem := range r.Configuration.FormIds {
formIds = append(formIds, formIdsItem.ValueString())
}
- sourceType := shared.SourceTypeformTypeform(r.Configuration.SourceType.ValueString())
startDate := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -70,9 +55,14 @@ func (r *SourceTypeformResourceModel) ToCreateSDKType() *shared.SourceTypeformCr
configuration := shared.SourceTypeform{
Credentials: credentials,
FormIds: formIds,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -83,6 +73,7 @@ func (r *SourceTypeformResourceModel) ToCreateSDKType() *shared.SourceTypeformCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceTypeformCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -97,50 +88,36 @@ func (r *SourceTypeformResourceModel) ToGetSDKType() *shared.SourceTypeformCreat
func (r *SourceTypeformResourceModel) ToUpdateSDKType() *shared.SourceTypeformPutRequest {
var credentials shared.SourceTypeformUpdateAuthorizationMethod
- var sourceTypeformUpdateAuthorizationMethodOAuth20 *shared.SourceTypeformUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
- authType := new(shared.SourceTypeformUpdateAuthorizationMethodOAuth20AuthType)
- if !r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodOAuth20.AuthType.IsNull() {
- *authType = shared.SourceTypeformUpdateAuthorizationMethodOAuth20AuthType(r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodOAuth20.AuthType.ValueString())
- } else {
- authType = nil
- }
- clientID := r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
- tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodOAuth20.TokenExpiryDate.ValueString())
- sourceTypeformUpdateAuthorizationMethodOAuth20 = &shared.SourceTypeformUpdateAuthorizationMethodOAuth20{
+ var sourceTypeformUpdateOAuth20 *shared.SourceTypeformUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
+ tokenExpiryDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.Credentials.OAuth20.TokenExpiryDate.ValueString())
+ sourceTypeformUpdateOAuth20 = &shared.SourceTypeformUpdateOAuth20{
AccessToken: accessToken,
- AuthType: authType,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
TokenExpiryDate: tokenExpiryDate,
}
}
- if sourceTypeformUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceTypeformUpdateOAuth20 != nil {
credentials = shared.SourceTypeformUpdateAuthorizationMethod{
- SourceTypeformUpdateAuthorizationMethodOAuth20: sourceTypeformUpdateAuthorizationMethodOAuth20,
+ SourceTypeformUpdateOAuth20: sourceTypeformUpdateOAuth20,
}
}
- var sourceTypeformUpdateAuthorizationMethodPrivateToken *shared.SourceTypeformUpdateAuthorizationMethodPrivateToken
- if r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodPrivateToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodPrivateToken.AccessToken.ValueString()
- authType1 := new(shared.SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType)
- if !r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodPrivateToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodPrivateToken.AuthType.IsNull() {
- *authType1 = shared.SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType(r.Configuration.Credentials.SourceTypeformUpdateAuthorizationMethodPrivateToken.AuthType.ValueString())
- } else {
- authType1 = nil
- }
- sourceTypeformUpdateAuthorizationMethodPrivateToken = &shared.SourceTypeformUpdateAuthorizationMethodPrivateToken{
+ var sourceTypeformUpdatePrivateToken *shared.SourceTypeformUpdatePrivateToken
+ if r.Configuration.Credentials.PrivateToken != nil {
+ accessToken1 := r.Configuration.Credentials.PrivateToken.AccessToken.ValueString()
+ sourceTypeformUpdatePrivateToken = &shared.SourceTypeformUpdatePrivateToken{
AccessToken: accessToken1,
- AuthType: authType1,
}
}
- if sourceTypeformUpdateAuthorizationMethodPrivateToken != nil {
+ if sourceTypeformUpdatePrivateToken != nil {
credentials = shared.SourceTypeformUpdateAuthorizationMethod{
- SourceTypeformUpdateAuthorizationMethodPrivateToken: sourceTypeformUpdateAuthorizationMethodPrivateToken,
+ SourceTypeformUpdatePrivateToken: sourceTypeformUpdatePrivateToken,
}
}
var formIds []string = nil
diff --git a/internal/provider/source_uscensus_data_source.go b/internal/provider/source_uscensus_data_source.go
old mode 100755
new mode 100644
index b5a6be4c9..46ae5e1bd
--- a/internal/provider/source_uscensus_data_source.go
+++ b/internal/provider/source_uscensus_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceUsCensusDataSource struct {
// SourceUsCensusDataSourceModel describes the data model.
type SourceUsCensusDataSourceModel struct {
- Configuration SourceUsCensus `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,42 +47,20 @@ func (r *SourceUsCensusDataSource) Schema(ctx context.Context, req datasource.Sc
MarkdownDescription: "SourceUsCensus DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Your API Key. Get your key here.`,
- },
- "query_params": schema.StringAttribute{
- Computed: true,
- Description: `The query parameters portion of the GET request, without the api key`,
- },
- "query_path": schema.StringAttribute{
- Computed: true,
- Description: `The path portion of the GET request`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "us-census",
- ),
- },
- Description: `must be one of ["us-census"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_uscensus_data_source_sdk.go b/internal/provider/source_uscensus_data_source_sdk.go
old mode 100755
new mode 100644
index 89b30b695..815c6e8d3
--- a/internal/provider/source_uscensus_data_source_sdk.go
+++ b/internal/provider/source_uscensus_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceUsCensusDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_uscensus_resource.go b/internal/provider/source_uscensus_resource.go
old mode 100755
new mode 100644
index 1f6ec805d..1ee27a810
--- a/internal/provider/source_uscensus_resource.go
+++ b/internal/provider/source_uscensus_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceUsCensusResource struct {
// SourceUsCensusResourceModel describes the resource data model.
type SourceUsCensusResourceModel struct {
Configuration SourceUsCensus `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,6 +56,7 @@ func (r *SourceUsCensusResource) Schema(ctx context.Context, req resource.Schema
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Key. Get your key here.`,
},
"query_params": schema.StringAttribute{
@@ -66,24 +67,26 @@ func (r *SourceUsCensusResource) Schema(ctx context.Context, req resource.Schema
Required: true,
Description: `The path portion of the GET request`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "us-census",
- ),
- },
- Description: `must be one of ["us-census"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -147,7 +150,7 @@ func (r *SourceUsCensusResource) Create(ctx context.Context, req resource.Create
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceUsCensus(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -323,5 +326,5 @@ func (r *SourceUsCensusResource) Delete(ctx context.Context, req resource.Delete
}
func (r *SourceUsCensusResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_uscensus_resource_sdk.go b/internal/provider/source_uscensus_resource_sdk.go
old mode 100755
new mode 100644
index 17e245f20..bc2468209
--- a/internal/provider/source_uscensus_resource_sdk.go
+++ b/internal/provider/source_uscensus_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -16,12 +16,16 @@ func (r *SourceUsCensusResourceModel) ToCreateSDKType() *shared.SourceUsCensusCr
queryParams = nil
}
queryPath := r.Configuration.QueryPath.ValueString()
- sourceType := shared.SourceUsCensusUsCensus(r.Configuration.SourceType.ValueString())
configuration := shared.SourceUsCensus{
APIKey: apiKey,
QueryParams: queryParams,
QueryPath: queryPath,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -33,6 +37,7 @@ func (r *SourceUsCensusResourceModel) ToCreateSDKType() *shared.SourceUsCensusCr
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceUsCensusCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_vantage_data_source.go b/internal/provider/source_vantage_data_source.go
old mode 100755
new mode 100644
index b228002cc..1c637d5fb
--- a/internal/provider/source_vantage_data_source.go
+++ b/internal/provider/source_vantage_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceVantageDataSource struct {
// SourceVantageDataSourceModel describes the data model.
type SourceVantageDataSourceModel struct {
- Configuration SourceVantage `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,34 +47,20 @@ func (r *SourceVantageDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceVantage DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Your API Access token. See here.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "vantage",
- ),
- },
- Description: `must be one of ["vantage"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_vantage_data_source_sdk.go b/internal/provider/source_vantage_data_source_sdk.go
old mode 100755
new mode 100644
index d7dbfe73e..60eced43b
--- a/internal/provider/source_vantage_data_source_sdk.go
+++ b/internal/provider/source_vantage_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceVantageDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_vantage_resource.go b/internal/provider/source_vantage_resource.go
old mode 100755
new mode 100644
index 9bb5b2d91..c34f4e755
--- a/internal/provider/source_vantage_resource.go
+++ b/internal/provider/source_vantage_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceVantageResource struct {
// SourceVantageResourceModel describes the resource data model.
type SourceVantageResourceModel struct {
- Configuration SourceVantage `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration OAuth2AccessToken `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceVantageResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -56,26 +56,29 @@ func (r *SourceVantageResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your API Access token. See here.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "vantage",
- ),
- },
- Description: `must be one of ["vantage"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceVantageResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceVantage(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceVantageResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceVantageResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_vantage_resource_sdk.go b/internal/provider/source_vantage_resource_sdk.go
old mode 100755
new mode 100644
index 646343033..05f0dc9b4
--- a/internal/provider/source_vantage_resource_sdk.go
+++ b/internal/provider/source_vantage_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceVantageResourceModel) ToCreateSDKType() *shared.SourceVantageCreateRequest {
accessToken := r.Configuration.AccessToken.ValueString()
- sourceType := shared.SourceVantageVantage(r.Configuration.SourceType.ValueString())
configuration := shared.SourceVantage{
AccessToken: accessToken,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceVantageResourceModel) ToCreateSDKType() *shared.SourceVantageCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceVantageCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_webflow_data_source.go b/internal/provider/source_webflow_data_source.go
old mode 100755
new mode 100644
index fa9674bbf..fedd2e25b
--- a/internal/provider/source_webflow_data_source.go
+++ b/internal/provider/source_webflow_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceWebflowDataSource struct {
// SourceWebflowDataSourceModel describes the data model.
type SourceWebflowDataSourceModel struct {
- Configuration SourceWebflow `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,38 +47,20 @@ func (r *SourceWebflowDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceWebflow DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api`,
- },
- "site_id": schema.StringAttribute{
- Computed: true,
- Description: `The id of the Webflow site you are requesting data from. See https://developers.webflow.com/#sites`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "webflow",
- ),
- },
- Description: `must be one of ["webflow"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_webflow_data_source_sdk.go b/internal/provider/source_webflow_data_source_sdk.go
old mode 100755
new mode 100644
index 03f3109bc..1f16cb139
--- a/internal/provider/source_webflow_data_source_sdk.go
+++ b/internal/provider/source_webflow_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceWebflowDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_webflow_resource.go b/internal/provider/source_webflow_resource.go
old mode 100755
new mode 100644
index 4b7fc09af..7cae1aa05
--- a/internal/provider/source_webflow_resource.go
+++ b/internal/provider/source_webflow_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceWebflowResource struct {
// SourceWebflowResourceModel describes the resource data model.
type SourceWebflowResourceModel struct {
Configuration SourceWebflow `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,30 +56,33 @@ func (r *SourceWebflowResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api`,
},
"site_id": schema.StringAttribute{
Required: true,
Description: `The id of the Webflow site you are requesting data from. See https://developers.webflow.com/#sites`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "webflow",
- ),
- },
- Description: `must be one of ["webflow"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -143,7 +146,7 @@ func (r *SourceWebflowResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceWebflow(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -319,5 +322,5 @@ func (r *SourceWebflowResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceWebflowResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_webflow_resource_sdk.go b/internal/provider/source_webflow_resource_sdk.go
old mode 100755
new mode 100644
index a3f013fe5..984d609a1
--- a/internal/provider/source_webflow_resource_sdk.go
+++ b/internal/provider/source_webflow_resource_sdk.go
@@ -3,18 +3,22 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceWebflowResourceModel) ToCreateSDKType() *shared.SourceWebflowCreateRequest {
apiKey := r.Configuration.APIKey.ValueString()
siteID := r.Configuration.SiteID.ValueString()
- sourceType := shared.SourceWebflowWebflow(r.Configuration.SourceType.ValueString())
configuration := shared.SourceWebflow{
- APIKey: apiKey,
- SiteID: siteID,
- SourceType: sourceType,
+ APIKey: apiKey,
+ SiteID: siteID,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -26,6 +30,7 @@ func (r *SourceWebflowResourceModel) ToCreateSDKType() *shared.SourceWebflowCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceWebflowCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_whiskyhunter_data_source.go b/internal/provider/source_whiskyhunter_data_source.go
old mode 100755
new mode 100644
index ad713ea15..e9b1e3750
--- a/internal/provider/source_whiskyhunter_data_source.go
+++ b/internal/provider/source_whiskyhunter_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceWhiskyHunterDataSource struct {
// SourceWhiskyHunterDataSourceModel describes the data model.
type SourceWhiskyHunterDataSourceModel struct {
- Configuration SourceWhiskyHunter `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,30 +47,20 @@ func (r *SourceWhiskyHunterDataSource) Schema(ctx context.Context, req datasourc
MarkdownDescription: "SourceWhiskyHunter DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "whisky-hunter",
- ),
- },
- Description: `must be one of ["whisky-hunter"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_whiskyhunter_data_source_sdk.go b/internal/provider/source_whiskyhunter_data_source_sdk.go
old mode 100755
new mode 100644
index 0f9cb489e..e138f78b4
--- a/internal/provider/source_whiskyhunter_data_source_sdk.go
+++ b/internal/provider/source_whiskyhunter_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceWhiskyHunterDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_whiskyhunter_resource.go b/internal/provider/source_whiskyhunter_resource.go
old mode 100755
new mode 100644
index a18c17682..0072322c3
--- a/internal/provider/source_whiskyhunter_resource.go
+++ b/internal/provider/source_whiskyhunter_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceWhiskyHunterResource struct {
// SourceWhiskyHunterResourceModel describes the resource data model.
type SourceWhiskyHunterResourceModel struct {
- Configuration SourceWhiskyHunter `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceWhiskyHunterResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -52,26 +52,27 @@ func (r *SourceWhiskyHunterResource) Schema(ctx context.Context, req resource.Sc
Attributes: map[string]schema.Attribute{
"configuration": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "whisky-hunter",
- ),
- },
- Description: `must be one of ["whisky-hunter"]`,
- },
+ Required: true,
+ Attributes: map[string]schema.Attribute{},
+ },
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
},
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
},
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -135,7 +136,7 @@ func (r *SourceWhiskyHunterResource) Create(ctx context.Context, req resource.Cr
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceWhiskyHunter(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -311,5 +312,5 @@ func (r *SourceWhiskyHunterResource) Delete(ctx context.Context, req resource.De
}
func (r *SourceWhiskyHunterResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_whiskyhunter_resource_sdk.go b/internal/provider/source_whiskyhunter_resource_sdk.go
old mode 100755
new mode 100644
index 7ee65e27f..b218b230a
--- a/internal/provider/source_whiskyhunter_resource_sdk.go
+++ b/internal/provider/source_whiskyhunter_resource_sdk.go
@@ -3,19 +3,17 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceWhiskyHunterResourceModel) ToCreateSDKType() *shared.SourceWhiskyHunterCreateRequest {
- sourceType := new(shared.SourceWhiskyHunterWhiskyHunter)
- if !r.Configuration.SourceType.IsUnknown() && !r.Configuration.SourceType.IsNull() {
- *sourceType = shared.SourceWhiskyHunterWhiskyHunter(r.Configuration.SourceType.ValueString())
+ configuration := shared.SourceWhiskyHunter{}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
} else {
- sourceType = nil
- }
- configuration := shared.SourceWhiskyHunter{
- SourceType: sourceType,
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -27,6 +25,7 @@ func (r *SourceWhiskyHunterResourceModel) ToCreateSDKType() *shared.SourceWhisky
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceWhiskyHunterCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_wikipediapageviews_data_source.go b/internal/provider/source_wikipediapageviews_data_source.go
old mode 100755
new mode 100644
index 131bbd28e..4553c0ee9
--- a/internal/provider/source_wikipediapageviews_data_source.go
+++ b/internal/provider/source_wikipediapageviews_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceWikipediaPageviewsDataSource struct {
// SourceWikipediaPageviewsDataSourceModel describes the data model.
type SourceWikipediaPageviewsDataSourceModel struct {
- Configuration SourceWikipediaPageviews `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,58 +47,20 @@ func (r *SourceWikipediaPageviewsDataSource) Schema(ctx context.Context, req dat
MarkdownDescription: "SourceWikipediaPageviews DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access": schema.StringAttribute{
- Computed: true,
- Description: `If you want to filter by access method, use one of desktop, mobile-app or mobile-web. If you are interested in pageviews regardless of access method, use all-access.`,
- },
- "agent": schema.StringAttribute{
- Computed: true,
- Description: `If you want to filter by agent type, use one of user, automated or spider. If you are interested in pageviews regardless of agent type, use all-agents.`,
- },
- "article": schema.StringAttribute{
- Computed: true,
- Description: `The title of any article in the specified project. Any spaces should be replaced with underscores. It also should be URI-encoded, so that non-URI-safe characters like %, / or ? are accepted.`,
- },
- "country": schema.StringAttribute{
- Computed: true,
- Description: `The ISO 3166-1 alpha-2 code of a country for which to retrieve top articles.`,
- },
- "end": schema.StringAttribute{
- Computed: true,
- Description: `The date of the last day to include, in YYYYMMDD or YYYYMMDDHH format.`,
- },
- "project": schema.StringAttribute{
- Computed: true,
- Description: `If you want to filter by project, use the domain of any Wikimedia project.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "wikipedia-pageviews",
- ),
- },
- Description: `must be one of ["wikipedia-pageviews"]`,
- },
- "start": schema.StringAttribute{
- Computed: true,
- Description: `The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_wikipediapageviews_data_source_sdk.go b/internal/provider/source_wikipediapageviews_data_source_sdk.go
old mode 100755
new mode 100644
index a51fb19f8..20f9be3b3
--- a/internal/provider/source_wikipediapageviews_data_source_sdk.go
+++ b/internal/provider/source_wikipediapageviews_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceWikipediaPageviewsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_wikipediapageviews_resource.go b/internal/provider/source_wikipediapageviews_resource.go
old mode 100755
new mode 100644
index 2aac17c1d..ed810b44a
--- a/internal/provider/source_wikipediapageviews_resource.go
+++ b/internal/provider/source_wikipediapageviews_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceWikipediaPageviewsResource struct {
// SourceWikipediaPageviewsResourceModel describes the resource data model.
type SourceWikipediaPageviewsResourceModel struct {
Configuration SourceWikipediaPageviews `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -78,28 +78,30 @@ func (r *SourceWikipediaPageviewsResource) Schema(ctx context.Context, req resou
Required: true,
Description: `If you want to filter by project, use the domain of any Wikimedia project.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "wikipedia-pageviews",
- ),
- },
- Description: `must be one of ["wikipedia-pageviews"]`,
- },
"start": schema.StringAttribute{
Required: true,
Description: `The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -163,7 +165,7 @@ func (r *SourceWikipediaPageviewsResource) Create(ctx context.Context, req resou
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceWikipediaPageviews(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -339,5 +341,5 @@ func (r *SourceWikipediaPageviewsResource) Delete(ctx context.Context, req resou
}
func (r *SourceWikipediaPageviewsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_wikipediapageviews_resource_sdk.go b/internal/provider/source_wikipediapageviews_resource_sdk.go
old mode 100755
new mode 100644
index f19adbcc0..9f13ca0f3
--- a/internal/provider/source_wikipediapageviews_resource_sdk.go
+++ b/internal/provider/source_wikipediapageviews_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -14,17 +14,21 @@ func (r *SourceWikipediaPageviewsResourceModel) ToCreateSDKType() *shared.Source
country := r.Configuration.Country.ValueString()
end := r.Configuration.End.ValueString()
project := r.Configuration.Project.ValueString()
- sourceType := shared.SourceWikipediaPageviewsWikipediaPageviews(r.Configuration.SourceType.ValueString())
start := r.Configuration.Start.ValueString()
configuration := shared.SourceWikipediaPageviews{
- Access: access,
- Agent: agent,
- Article: article,
- Country: country,
- End: end,
- Project: project,
- SourceType: sourceType,
- Start: start,
+ Access: access,
+ Agent: agent,
+ Article: article,
+ Country: country,
+ End: end,
+ Project: project,
+ Start: start,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -36,6 +40,7 @@ func (r *SourceWikipediaPageviewsResourceModel) ToCreateSDKType() *shared.Source
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceWikipediaPageviewsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_woocommerce_data_source.go b/internal/provider/source_woocommerce_data_source.go
old mode 100755
new mode 100644
index f5fa9aa58..7a3078be5
--- a/internal/provider/source_woocommerce_data_source.go
+++ b/internal/provider/source_woocommerce_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceWoocommerceDataSource struct {
// SourceWoocommerceDataSourceModel describes the data model.
type SourceWoocommerceDataSourceModel struct {
- Configuration SourceWoocommerce `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,49 +47,20 @@ func (r *SourceWoocommerceDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourceWoocommerce DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Computed: true,
- Description: `Customer Key for API in WooCommerce shop`,
- },
- "api_secret": schema.StringAttribute{
- Computed: true,
- Description: `Customer Secret for API in WooCommerce shop`,
- },
- "shop": schema.StringAttribute{
- Computed: true,
- Description: `The name of the store. For https://EXAMPLE.com, the shop name is 'EXAMPLE.com'.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "woocommerce",
- ),
- },
- Description: `must be one of ["woocommerce"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `The date you would like to replicate data from. Format: YYYY-MM-DD`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_woocommerce_data_source_sdk.go b/internal/provider/source_woocommerce_data_source_sdk.go
old mode 100755
new mode 100644
index 536b7828a..553874a45
--- a/internal/provider/source_woocommerce_data_source_sdk.go
+++ b/internal/provider/source_woocommerce_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceWoocommerceDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_woocommerce_resource.go b/internal/provider/source_woocommerce_resource.go
old mode 100755
new mode 100644
index cb9875953..a1adaa194
--- a/internal/provider/source_woocommerce_resource.go
+++ b/internal/provider/source_woocommerce_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceWoocommerceResource struct {
// SourceWoocommerceResourceModel describes the resource data model.
type SourceWoocommerceResourceModel struct {
Configuration SourceWoocommerce `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceWoocommerceResource) Schema(ctx context.Context, req resource.Sch
Attributes: map[string]schema.Attribute{
"api_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Customer Key for API in WooCommerce shop`,
},
"api_secret": schema.StringAttribute{
@@ -67,31 +69,33 @@ func (r *SourceWoocommerceResource) Schema(ctx context.Context, req resource.Sch
Required: true,
Description: `The name of the store. For https://EXAMPLE.com, the shop name is 'EXAMPLE.com'.`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "woocommerce",
- ),
- },
- Description: `must be one of ["woocommerce"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date you would like to replicate data from. Format: YYYY-MM-DD`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `The date you would like to replicate data from. Format: YYYY-MM-DD`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +159,7 @@ func (r *SourceWoocommerceResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceWoocommerce(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +335,5 @@ func (r *SourceWoocommerceResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourceWoocommerceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_woocommerce_resource_sdk.go b/internal/provider/source_woocommerce_resource_sdk.go
old mode 100755
new mode 100644
index ce0d7a8c2..ed8861c4c
--- a/internal/provider/source_woocommerce_resource_sdk.go
+++ b/internal/provider/source_woocommerce_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -12,14 +12,18 @@ func (r *SourceWoocommerceResourceModel) ToCreateSDKType() *shared.SourceWoocomm
apiKey := r.Configuration.APIKey.ValueString()
apiSecret := r.Configuration.APISecret.ValueString()
shop := r.Configuration.Shop.ValueString()
- sourceType := shared.SourceWoocommerceWoocommerce(r.Configuration.SourceType.ValueString())
startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
configuration := shared.SourceWoocommerce{
- APIKey: apiKey,
- APISecret: apiSecret,
- Shop: shop,
- SourceType: sourceType,
- StartDate: startDate,
+ APIKey: apiKey,
+ APISecret: apiSecret,
+ Shop: shop,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -31,6 +35,7 @@ func (r *SourceWoocommerceResourceModel) ToCreateSDKType() *shared.SourceWoocomm
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceWoocommerceCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_xero_data_source.go b/internal/provider/source_xero_data_source.go
deleted file mode 100755
index 13a3a91ae..000000000
--- a/internal/provider/source_xero_data_source.go
+++ /dev/null
@@ -1,186 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
- "context"
- "fmt"
-
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ datasource.DataSource = &SourceXeroDataSource{}
-var _ datasource.DataSourceWithConfigure = &SourceXeroDataSource{}
-
-func NewSourceXeroDataSource() datasource.DataSource {
- return &SourceXeroDataSource{}
-}
-
-// SourceXeroDataSource is the data source implementation.
-type SourceXeroDataSource struct {
- client *sdk.SDK
-}
-
-// SourceXeroDataSourceModel describes the data model.
-type SourceXeroDataSourceModel struct {
- Configuration SourceXero `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-// Metadata returns the data source type name.
-func (r *SourceXeroDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_xero"
-}
-
-// Schema defines the schema for the data source.
-func (r *SourceXeroDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceXero DataSource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "authentication": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Xero application's access token`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Xero application's Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Xero application's Client Secret`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Xero application's refresh token`,
- },
- "token_expiry_date": schema.StringAttribute{
- Computed: true,
- Description: `The date-time when the access token should be refreshed`,
- },
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xero",
- ),
- },
- Description: `must be one of ["xero"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any data with created_at before this data will not be synced.`,
- },
- "tenant_id": schema.StringAttribute{
- Computed: true,
- Description: `Enter your Xero organization's Tenant ID`,
- },
- },
- },
- "name": schema.StringAttribute{
- Computed: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Required: true,
- },
- "workspace_id": schema.StringAttribute{
- Computed: true,
- },
- },
- }
-}
-
-func (r *SourceXeroDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected DataSource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceXeroDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data *SourceXeroDataSourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceXeroRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceXero(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/internal/provider/source_xero_data_source_sdk.go b/internal/provider/source_xero_data_source_sdk.go
deleted file mode 100755
index bfaeb6b6e..000000000
--- a/internal/provider/source_xero_data_source_sdk.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceXeroDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
diff --git a/internal/provider/source_xero_resource_sdk.go b/internal/provider/source_xero_resource_sdk.go
deleted file mode 100755
index d002727e0..000000000
--- a/internal/provider/source_xero_resource_sdk.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "time"
-)
-
-func (r *SourceXeroResourceModel) ToCreateSDKType() *shared.SourceXeroCreateRequest {
- accessToken := r.Configuration.Authentication.AccessToken.ValueString()
- clientID := r.Configuration.Authentication.ClientID.ValueString()
- clientSecret := r.Configuration.Authentication.ClientSecret.ValueString()
- refreshToken := r.Configuration.Authentication.RefreshToken.ValueString()
- tokenExpiryDate := r.Configuration.Authentication.TokenExpiryDate.ValueString()
- authentication := shared.SourceXeroAuthenticateViaXeroOAuth{
- AccessToken: accessToken,
- ClientID: clientID,
- ClientSecret: clientSecret,
- RefreshToken: refreshToken,
- TokenExpiryDate: tokenExpiryDate,
- }
- sourceType := shared.SourceXeroXero(r.Configuration.SourceType.ValueString())
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
- tenantID := r.Configuration.TenantID.ValueString()
- configuration := shared.SourceXero{
- Authentication: authentication,
- SourceType: sourceType,
- StartDate: startDate,
- TenantID: tenantID,
- }
- name := r.Name.ValueString()
- secretID := new(string)
- if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
- *secretID = r.SecretID.ValueString()
- } else {
- secretID = nil
- }
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceXeroCreateRequest{
- Configuration: configuration,
- Name: name,
- SecretID: secretID,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceXeroResourceModel) ToGetSDKType() *shared.SourceXeroCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceXeroResourceModel) ToUpdateSDKType() *shared.SourceXeroPutRequest {
- accessToken := r.Configuration.Authentication.AccessToken.ValueString()
- clientID := r.Configuration.Authentication.ClientID.ValueString()
- clientSecret := r.Configuration.Authentication.ClientSecret.ValueString()
- refreshToken := r.Configuration.Authentication.RefreshToken.ValueString()
- tokenExpiryDate := r.Configuration.Authentication.TokenExpiryDate.ValueString()
- authentication := shared.SourceXeroUpdateAuthenticateViaXeroOAuth{
- AccessToken: accessToken,
- ClientID: clientID,
- ClientSecret: clientSecret,
- RefreshToken: refreshToken,
- TokenExpiryDate: tokenExpiryDate,
- }
- startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
- tenantID := r.Configuration.TenantID.ValueString()
- configuration := shared.SourceXeroUpdate{
- Authentication: authentication,
- StartDate: startDate,
- TenantID: tenantID,
- }
- name := r.Name.ValueString()
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceXeroPutRequest{
- Configuration: configuration,
- Name: name,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceXeroResourceModel) ToDeleteSDKType() *shared.SourceXeroCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceXeroResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.SourceType = types.StringValue(resp.SourceType)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
-
-func (r *SourceXeroResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
- r.RefreshFromGetResponse(resp)
-}
diff --git a/internal/provider/source_xkcd_data_source.go b/internal/provider/source_xkcd_data_source.go
old mode 100755
new mode 100644
index c9a3167a4..e09a7065d
--- a/internal/provider/source_xkcd_data_source.go
+++ b/internal/provider/source_xkcd_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceXkcdDataSource struct {
// SourceXkcdDataSourceModel describes the data model.
type SourceXkcdDataSourceModel struct {
- Configuration SourceXkcd `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,30 +47,20 @@ func (r *SourceXkcdDataSource) Schema(ctx context.Context, req datasource.Schema
MarkdownDescription: "SourceXkcd DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xkcd",
- ),
- },
- Description: `must be one of ["xkcd"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_xkcd_data_source_sdk.go b/internal/provider/source_xkcd_data_source_sdk.go
old mode 100755
new mode 100644
index 315212243..5c400f814
--- a/internal/provider/source_xkcd_data_source_sdk.go
+++ b/internal/provider/source_xkcd_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceXkcdDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_xkcd_resource.go b/internal/provider/source_xkcd_resource.go
old mode 100755
new mode 100644
index d621e3eb6..a91cfe69f
--- a/internal/provider/source_xkcd_resource.go
+++ b/internal/provider/source_xkcd_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -34,12 +33,13 @@ type SourceXkcdResource struct {
// SourceXkcdResourceModel describes the resource data model.
type SourceXkcdResourceModel struct {
- Configuration SourceXkcd `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
func (r *SourceXkcdResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
@@ -52,26 +52,27 @@ func (r *SourceXkcdResource) Schema(ctx context.Context, req resource.SchemaRequ
Attributes: map[string]schema.Attribute{
"configuration": schema.SingleNestedAttribute{
- Required: true,
- Attributes: map[string]schema.Attribute{
- "source_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "xkcd",
- ),
- },
- Description: `must be one of ["xkcd"]`,
- },
+ Required: true,
+ Attributes: map[string]schema.Attribute{},
+ },
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
},
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
},
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -135,7 +136,7 @@ func (r *SourceXkcdResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceXkcd(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -311,5 +312,5 @@ func (r *SourceXkcdResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *SourceXkcdResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_xkcd_resource_sdk.go b/internal/provider/source_xkcd_resource_sdk.go
old mode 100755
new mode 100644
index 196c6c242..159b2044f
--- a/internal/provider/source_xkcd_resource_sdk.go
+++ b/internal/provider/source_xkcd_resource_sdk.go
@@ -3,19 +3,17 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceXkcdResourceModel) ToCreateSDKType() *shared.SourceXkcdCreateRequest {
- sourceType := new(shared.SourceXkcdXkcd)
- if !r.Configuration.SourceType.IsUnknown() && !r.Configuration.SourceType.IsNull() {
- *sourceType = shared.SourceXkcdXkcd(r.Configuration.SourceType.ValueString())
+ configuration := shared.SourceXkcd{}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
} else {
- sourceType = nil
- }
- configuration := shared.SourceXkcd{
- SourceType: sourceType,
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -27,6 +25,7 @@ func (r *SourceXkcdResourceModel) ToCreateSDKType() *shared.SourceXkcdCreateRequ
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceXkcdCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_yandexmetrica_data_source.go b/internal/provider/source_yandexmetrica_data_source.go
old mode 100755
new mode 100644
index 206f8e326..f2bd59b5d
--- a/internal/provider/source_yandexmetrica_data_source.go
+++ b/internal/provider/source_yandexmetrica_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceYandexMetricaDataSource struct {
// SourceYandexMetricaDataSourceModel describes the data model.
type SourceYandexMetricaDataSourceModel struct {
- Configuration SourceYandexMetrica `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,52 +47,20 @@ func (r *SourceYandexMetricaDataSource) Schema(ctx context.Context, req datasour
MarkdownDescription: "SourceYandexMetrica DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "auth_token": schema.StringAttribute{
- Computed: true,
- Description: `Your Yandex Metrica API access token`,
- },
- "counter_id": schema.StringAttribute{
- Computed: true,
- Description: `Counter ID`,
- },
- "end_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `Starting point for your data replication, in format of "YYYY-MM-DD". If not provided will sync till most recent date.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "yandex-metrica",
- ),
- },
- Description: `must be one of ["yandex-metrica"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
- Description: `Starting point for your data replication, in format of "YYYY-MM-DD".`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_yandexmetrica_data_source_sdk.go b/internal/provider/source_yandexmetrica_data_source_sdk.go
old mode 100755
new mode 100644
index 863bf98a0..141006fee
--- a/internal/provider/source_yandexmetrica_data_source_sdk.go
+++ b/internal/provider/source_yandexmetrica_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceYandexMetricaDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_yandexmetrica_resource.go b/internal/provider/source_yandexmetrica_resource.go
old mode 100755
new mode 100644
index f8be98af6..a52148f55
--- a/internal/provider/source_yandexmetrica_resource.go
+++ b/internal/provider/source_yandexmetrica_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceYandexMetricaResource struct {
// SourceYandexMetricaResourceModel describes the resource data model.
type SourceYandexMetricaResourceModel struct {
Configuration SourceYandexMetrica `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,6 +58,7 @@ func (r *SourceYandexMetricaResource) Schema(ctx context.Context, req resource.S
Attributes: map[string]schema.Attribute{
"auth_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Your Yandex Metrica API access token`,
},
"counter_id": schema.StringAttribute{
@@ -64,37 +66,39 @@ func (r *SourceYandexMetricaResource) Schema(ctx context.Context, req resource.S
Description: `Counter ID`,
},
"end_date": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidDate(),
- },
+ Optional: true,
Description: `Starting point for your data replication, in format of "YYYY-MM-DD". If not provided will sync till most recent date.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
Validators: []validator.String{
- stringvalidator.OneOf(
- "yandex-metrica",
- ),
+ validators.IsValidDate(),
},
- Description: `must be one of ["yandex-metrica"]`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `Starting point for your data replication, in format of "YYYY-MM-DD".`,
Validators: []validator.String{
validators.IsValidDate(),
},
- Description: `Starting point for your data replication, in format of "YYYY-MM-DD".`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -158,7 +162,7 @@ func (r *SourceYandexMetricaResource) Create(ctx context.Context, req resource.C
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceYandexMetrica(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -334,5 +338,5 @@ func (r *SourceYandexMetricaResource) Delete(ctx context.Context, req resource.D
}
func (r *SourceYandexMetricaResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_yandexmetrica_resource_sdk.go b/internal/provider/source_yandexmetrica_resource_sdk.go
old mode 100755
new mode 100644
index 8501a3ec2..c015b3a0f
--- a/internal/provider/source_yandexmetrica_resource_sdk.go
+++ b/internal/provider/source_yandexmetrica_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
- customTypes "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ customTypes "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -17,14 +17,18 @@ func (r *SourceYandexMetricaResourceModel) ToCreateSDKType() *shared.SourceYande
} else {
endDate = nil
}
- sourceType := shared.SourceYandexMetricaYandexMetrica(r.Configuration.SourceType.ValueString())
startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString())
configuration := shared.SourceYandexMetrica{
- AuthToken: authToken,
- CounterID: counterID,
- EndDate: endDate,
- SourceType: sourceType,
- StartDate: startDate,
+ AuthToken: authToken,
+ CounterID: counterID,
+ EndDate: endDate,
+ StartDate: startDate,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -36,6 +40,7 @@ func (r *SourceYandexMetricaResourceModel) ToCreateSDKType() *shared.SourceYande
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceYandexMetricaCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_yotpo_data_source.go b/internal/provider/source_yotpo_data_source.go
old mode 100755
new mode 100644
index 9eddb6c5b..be19dfc4f
--- a/internal/provider/source_yotpo_data_source.go
+++ b/internal/provider/source_yotpo_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,10 +29,10 @@ type SourceYotpoDataSource struct {
// SourceYotpoDataSourceModel describes the data model.
type SourceYotpoDataSourceModel struct {
- Configuration SourceYotpo `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -50,49 +47,20 @@ func (r *SourceYotpoDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceYotpo DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access token recieved as a result of API call to https://api.yotpo.com/oauth/token (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)`,
- },
- "app_key": schema.StringAttribute{
- Computed: true,
- Description: `App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `Email address registered with yotpo.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "yotpo",
- ),
- },
- Description: `must be one of ["yotpo"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `Date time filter for incremental filter, Specify which date to extract from.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_yotpo_data_source_sdk.go b/internal/provider/source_yotpo_data_source_sdk.go
old mode 100755
new mode 100644
index 24809168c..e29dfab0f
--- a/internal/provider/source_yotpo_data_source_sdk.go
+++ b/internal/provider/source_yotpo_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceYotpoDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_yotpo_resource.go b/internal/provider/source_yotpo_resource.go
old mode 100755
new mode 100644
index 37386725e..406ce0b47
--- a/internal/provider/source_yotpo_resource.go
+++ b/internal/provider/source_yotpo_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceYotpoResource struct {
// SourceYotpoResourceModel describes the resource data model.
type SourceYotpoResourceModel struct {
Configuration SourceYotpo `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -57,41 +58,46 @@ func (r *SourceYotpoResource) Schema(ctx context.Context, req resource.SchemaReq
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Access token recieved as a result of API call to https://api.yotpo.com/oauth/token (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)`,
},
"app_key": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)`,
},
"email": schema.StringAttribute{
- Required: true,
- Description: `Email address registered with yotpo.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "yotpo",
- ),
- },
- Description: `must be one of ["yotpo"]`,
+ Optional: true,
+ MarkdownDescription: `Default: "example@gmail.com"` + "\n" +
+ `Email address registered with yotpo.`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `Date time filter for incremental filter, Specify which date to extract from.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `Date time filter for incremental filter, Specify which date to extract from.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -155,7 +161,7 @@ func (r *SourceYotpoResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceYotpo(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -331,5 +337,5 @@ func (r *SourceYotpoResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceYotpoResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_yotpo_resource_sdk.go b/internal/provider/source_yotpo_resource_sdk.go
old mode 100755
new mode 100644
index a88386bbf..2a6a4af47
--- a/internal/provider/source_yotpo_resource_sdk.go
+++ b/internal/provider/source_yotpo_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -11,16 +11,25 @@ import (
func (r *SourceYotpoResourceModel) ToCreateSDKType() *shared.SourceYotpoCreateRequest {
accessToken := r.Configuration.AccessToken.ValueString()
appKey := r.Configuration.AppKey.ValueString()
- email := r.Configuration.Email.ValueString()
- sourceType := shared.SourceYotpoYotpo(r.Configuration.SourceType.ValueString())
+ email := new(string)
+ if !r.Configuration.Email.IsUnknown() && !r.Configuration.Email.IsNull() {
+ *email = r.Configuration.Email.ValueString()
+ } else {
+ email = nil
+ }
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceYotpo{
AccessToken: accessToken,
AppKey: appKey,
Email: email,
- SourceType: sourceType,
StartDate: startDate,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -31,6 +40,7 @@ func (r *SourceYotpoResourceModel) ToCreateSDKType() *shared.SourceYotpoCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceYotpoCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -46,7 +56,12 @@ func (r *SourceYotpoResourceModel) ToGetSDKType() *shared.SourceYotpoCreateReque
func (r *SourceYotpoResourceModel) ToUpdateSDKType() *shared.SourceYotpoPutRequest {
accessToken := r.Configuration.AccessToken.ValueString()
appKey := r.Configuration.AppKey.ValueString()
- email := r.Configuration.Email.ValueString()
+ email := new(string)
+ if !r.Configuration.Email.IsUnknown() && !r.Configuration.Email.IsNull() {
+ *email = r.Configuration.Email.ValueString()
+ } else {
+ email = nil
+ }
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
configuration := shared.SourceYotpoUpdate{
AccessToken: accessToken,
diff --git a/internal/provider/source_younium_data_source.go b/internal/provider/source_younium_data_source.go
deleted file mode 100755
index 10c82869b..000000000
--- a/internal/provider/source_younium_data_source.go
+++ /dev/null
@@ -1,165 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
- "context"
- "fmt"
-
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
- "github.com/hashicorp/terraform-plugin-framework/datasource"
- "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "github.com/hashicorp/terraform-plugin-framework/types"
- "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
-)
-
-// Ensure provider defined types fully satisfy framework interfaces.
-var _ datasource.DataSource = &SourceYouniumDataSource{}
-var _ datasource.DataSourceWithConfigure = &SourceYouniumDataSource{}
-
-func NewSourceYouniumDataSource() datasource.DataSource {
- return &SourceYouniumDataSource{}
-}
-
-// SourceYouniumDataSource is the data source implementation.
-type SourceYouniumDataSource struct {
- client *sdk.SDK
-}
-
-// SourceYouniumDataSourceModel describes the data model.
-type SourceYouniumDataSourceModel struct {
- Configuration SourceYounium `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
-}
-
-// Metadata returns the data source type name.
-func (r *SourceYouniumDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_younium"
-}
-
-// Schema defines the schema for the data source.
-func (r *SourceYouniumDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
- resp.Schema = schema.Schema{
- MarkdownDescription: "SourceYounium DataSource",
-
- Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "legal_entity": schema.StringAttribute{
- Computed: true,
- Description: `Legal Entity that data should be pulled from`,
- },
- "password": schema.StringAttribute{
- Computed: true,
- Description: `Account password for younium account API key`,
- },
- "playground": schema.BoolAttribute{
- Computed: true,
- Description: `Property defining if connector is used against playground or production environment`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "younium",
- ),
- },
- Description: `must be one of ["younium"]`,
- },
- "username": schema.StringAttribute{
- Computed: true,
- Description: `Username for Younium account`,
- },
- },
- },
- "name": schema.StringAttribute{
- Computed: true,
- },
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
- "source_id": schema.StringAttribute{
- Required: true,
- },
- "workspace_id": schema.StringAttribute{
- Computed: true,
- },
- },
- }
-}
-
-func (r *SourceYouniumDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
- // Prevent panic if the provider has not been configured.
- if req.ProviderData == nil {
- return
- }
-
- client, ok := req.ProviderData.(*sdk.SDK)
-
- if !ok {
- resp.Diagnostics.AddError(
- "Unexpected DataSource Configure Type",
- fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
- )
-
- return
- }
-
- r.client = client
-}
-
-func (r *SourceYouniumDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
- var data *SourceYouniumDataSourceModel
- var item types.Object
-
- resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
- if resp.Diagnostics.HasError() {
- return
- }
-
- resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
- UnhandledNullAsEmpty: true,
- UnhandledUnknownAsEmpty: true,
- })...)
-
- if resp.Diagnostics.HasError() {
- return
- }
-
- sourceID := data.SourceID.ValueString()
- request := operations.GetSourceYouniumRequest{
- SourceID: sourceID,
- }
- res, err := r.client.Sources.GetSourceYounium(ctx, request)
- if err != nil {
- resp.Diagnostics.AddError("failure to invoke API", err.Error())
- if res != nil && res.RawResponse != nil {
- resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
- }
- return
- }
- if res == nil {
- resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
- return
- }
- if res.StatusCode != 200 {
- resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
- return
- }
- if res.SourceResponse == nil {
- resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
- return
- }
- data.RefreshFromGetResponse(res.SourceResponse)
-
- // Save updated data into Terraform state
- resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
-}
diff --git a/internal/provider/source_younium_data_source_sdk.go b/internal/provider/source_younium_data_source_sdk.go
deleted file mode 100755
index b73591c2b..000000000
--- a/internal/provider/source_younium_data_source_sdk.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceYouniumDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
diff --git a/internal/provider/source_younium_resource_sdk.go b/internal/provider/source_younium_resource_sdk.go
deleted file mode 100755
index 2f69bb4e7..000000000
--- a/internal/provider/source_younium_resource_sdk.go
+++ /dev/null
@@ -1,90 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "github.com/hashicorp/terraform-plugin-framework/types"
-)
-
-func (r *SourceYouniumResourceModel) ToCreateSDKType() *shared.SourceYouniumCreateRequest {
- legalEntity := r.Configuration.LegalEntity.ValueString()
- password := r.Configuration.Password.ValueString()
- playground := new(bool)
- if !r.Configuration.Playground.IsUnknown() && !r.Configuration.Playground.IsNull() {
- *playground = r.Configuration.Playground.ValueBool()
- } else {
- playground = nil
- }
- sourceType := shared.SourceYouniumYounium(r.Configuration.SourceType.ValueString())
- username := r.Configuration.Username.ValueString()
- configuration := shared.SourceYounium{
- LegalEntity: legalEntity,
- Password: password,
- Playground: playground,
- SourceType: sourceType,
- Username: username,
- }
- name := r.Name.ValueString()
- secretID := new(string)
- if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
- *secretID = r.SecretID.ValueString()
- } else {
- secretID = nil
- }
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceYouniumCreateRequest{
- Configuration: configuration,
- Name: name,
- SecretID: secretID,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceYouniumResourceModel) ToGetSDKType() *shared.SourceYouniumCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceYouniumResourceModel) ToUpdateSDKType() *shared.SourceYouniumPutRequest {
- legalEntity := r.Configuration.LegalEntity.ValueString()
- password := r.Configuration.Password.ValueString()
- playground := new(bool)
- if !r.Configuration.Playground.IsUnknown() && !r.Configuration.Playground.IsNull() {
- *playground = r.Configuration.Playground.ValueBool()
- } else {
- playground = nil
- }
- username := r.Configuration.Username.ValueString()
- configuration := shared.SourceYouniumUpdate{
- LegalEntity: legalEntity,
- Password: password,
- Playground: playground,
- Username: username,
- }
- name := r.Name.ValueString()
- workspaceID := r.WorkspaceID.ValueString()
- out := shared.SourceYouniumPutRequest{
- Configuration: configuration,
- Name: name,
- WorkspaceID: workspaceID,
- }
- return &out
-}
-
-func (r *SourceYouniumResourceModel) ToDeleteSDKType() *shared.SourceYouniumCreateRequest {
- out := r.ToCreateSDKType()
- return out
-}
-
-func (r *SourceYouniumResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
- r.Name = types.StringValue(resp.Name)
- r.SourceID = types.StringValue(resp.SourceID)
- r.SourceType = types.StringValue(resp.SourceType)
- r.WorkspaceID = types.StringValue(resp.WorkspaceID)
-}
-
-func (r *SourceYouniumResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
- r.RefreshFromGetResponse(resp)
-}
diff --git a/internal/provider/source_youtubeanalytics_data_source.go b/internal/provider/source_youtubeanalytics_data_source.go
old mode 100755
new mode 100644
index 16da34d65..5de9f98ad
--- a/internal/provider/source_youtubeanalytics_data_source.go
+++ b/internal/provider/source_youtubeanalytics_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceYoutubeAnalyticsDataSource struct {
// SourceYoutubeAnalyticsDataSourceModel describes the data model.
type SourceYoutubeAnalyticsDataSourceModel struct {
- Configuration SourceYoutubeAnalytics1 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,54 +47,20 @@ func (r *SourceYoutubeAnalyticsDataSource) Schema(ctx context.Context, req datas
MarkdownDescription: "SourceYoutubeAnalytics DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your developer application`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The client secret of your developer application`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `A refresh token generated using the above client ID and secret`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "youtube-analytics",
- ),
- },
- Description: `must be one of ["youtube-analytics"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_youtubeanalytics_data_source_sdk.go b/internal/provider/source_youtubeanalytics_data_source_sdk.go
old mode 100755
new mode 100644
index 0c99210ea..0168075d3
--- a/internal/provider/source_youtubeanalytics_data_source_sdk.go
+++ b/internal/provider/source_youtubeanalytics_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceYoutubeAnalyticsDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_youtubeanalytics_resource.go b/internal/provider/source_youtubeanalytics_resource.go
old mode 100755
new mode 100644
index d43c490e6..58aba0b40
--- a/internal/provider/source_youtubeanalytics_resource.go
+++ b/internal/provider/source_youtubeanalytics_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceYoutubeAnalyticsResource struct {
// SourceYoutubeAnalyticsResourceModel describes the resource data model.
type SourceYoutubeAnalyticsResourceModel struct {
Configuration SourceYoutubeAnalytics `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,6 +59,13 @@ func (r *SourceYoutubeAnalyticsResource) Schema(ctx context.Context, req resourc
"credentials": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
+ "additional_properties": schema.StringAttribute{
+ Optional: true,
+ Description: `Parsed as JSON.`,
+ Validators: []validator.String{
+ validators.IsValidJSON(),
+ },
+ },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your developer application`,
@@ -68,35 +76,31 @@ func (r *SourceYoutubeAnalyticsResource) Schema(ctx context.Context, req resourc
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `A refresh token generated using the above client ID and secret`,
},
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "youtube-analytics",
- ),
},
- Description: `must be one of ["youtube-analytics"]`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -160,7 +164,7 @@ func (r *SourceYoutubeAnalyticsResource) Create(ctx context.Context, req resourc
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceYoutubeAnalytics(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -336,5 +340,5 @@ func (r *SourceYoutubeAnalyticsResource) Delete(ctx context.Context, req resourc
}
func (r *SourceYoutubeAnalyticsResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_youtubeanalytics_resource_sdk.go b/internal/provider/source_youtubeanalytics_resource_sdk.go
old mode 100755
new mode 100644
index a8580f02f..1464e99c5
--- a/internal/provider/source_youtubeanalytics_resource_sdk.go
+++ b/internal/provider/source_youtubeanalytics_resource_sdk.go
@@ -3,29 +3,33 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceYoutubeAnalyticsResourceModel) ToCreateSDKType() *shared.SourceYoutubeAnalyticsCreateRequest {
- clientID := r.Configuration.Credentials.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.RefreshToken.ValueString()
var additionalProperties interface{}
if !r.Configuration.Credentials.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AdditionalProperties.IsNull() {
_ = json.Unmarshal([]byte(r.Configuration.Credentials.AdditionalProperties.ValueString()), &additionalProperties)
}
+ clientID := r.Configuration.Credentials.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.RefreshToken.ValueString()
credentials := shared.SourceYoutubeAnalyticsAuthenticateViaOAuth20{
+ AdditionalProperties: additionalProperties,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
- AdditionalProperties: additionalProperties,
}
- sourceType := shared.SourceYoutubeAnalyticsYoutubeAnalytics(r.Configuration.SourceType.ValueString())
configuration := shared.SourceYoutubeAnalytics{
Credentials: credentials,
- SourceType: sourceType,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -37,6 +41,7 @@ func (r *SourceYoutubeAnalyticsResourceModel) ToCreateSDKType() *shared.SourceYo
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceYoutubeAnalyticsCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -50,18 +55,18 @@ func (r *SourceYoutubeAnalyticsResourceModel) ToGetSDKType() *shared.SourceYoutu
}
func (r *SourceYoutubeAnalyticsResourceModel) ToUpdateSDKType() *shared.SourceYoutubeAnalyticsPutRequest {
- clientID := r.Configuration.Credentials.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.ClientSecret.ValueString()
- refreshToken := r.Configuration.Credentials.RefreshToken.ValueString()
var additionalProperties interface{}
if !r.Configuration.Credentials.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.AdditionalProperties.IsNull() {
_ = json.Unmarshal([]byte(r.Configuration.Credentials.AdditionalProperties.ValueString()), &additionalProperties)
}
- credentials := shared.SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20{
+ clientID := r.Configuration.Credentials.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.ClientSecret.ValueString()
+ refreshToken := r.Configuration.Credentials.RefreshToken.ValueString()
+ credentials := shared.AuthenticateViaOAuth20{
+ AdditionalProperties: additionalProperties,
ClientID: clientID,
ClientSecret: clientSecret,
RefreshToken: refreshToken,
- AdditionalProperties: additionalProperties,
}
configuration := shared.SourceYoutubeAnalyticsUpdate{
Credentials: credentials,
diff --git a/internal/provider/source_zendeskchat_data_source.go b/internal/provider/source_zendeskchat_data_source.go
old mode 100755
new mode 100644
index db95de466..1abeb6cae
--- a/internal/provider/source_zendeskchat_data_source.go
+++ b/internal/provider/source_zendeskchat_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceZendeskChatDataSource struct {
// SourceZendeskChatDataSourceModel describes the data model.
type SourceZendeskChatDataSourceModel struct {
- Configuration SourceZendeskChat `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,145 +47,20 @@ func (r *SourceZendeskChatDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourceZendeskChat DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_zendesk_chat_authorization_method_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The Access Token to make authenticated requests.`,
- },
- "credentials": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_zendesk_chat_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "credentials": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to obtain new Access Token, when it's expired.`,
- },
- },
- },
- "source_zendesk_chat_update_authorization_method_access_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The Access Token to make authenticated requests.`,
- },
- "credentials": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_zendesk_chat_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "credentials": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `Refresh Token to obtain new Access Token, when it's expired.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zendesk-chat",
- ),
- },
- Description: `must be one of ["zendesk-chat"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z.`,
- },
- "subdomain": schema.StringAttribute{
- Computed: true,
- Description: `Required if you access Zendesk Chat from a Zendesk Support subdomain.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_zendeskchat_data_source_sdk.go b/internal/provider/source_zendeskchat_data_source_sdk.go
old mode 100755
new mode 100644
index 347f279bc..5ad0253de
--- a/internal/provider/source_zendeskchat_data_source_sdk.go
+++ b/internal/provider/source_zendeskchat_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZendeskChatDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_zendeskchat_resource.go b/internal/provider/source_zendeskchat_resource.go
old mode 100755
new mode 100644
index f807856c2..64ca994f6
--- a/internal/provider/source_zendeskchat_resource.go
+++ b/internal/provider/source_zendeskchat_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceZendeskChatResource struct {
// SourceZendeskChatResourceModel describes the resource data model.
type SourceZendeskChatResourceModel struct {
Configuration SourceZendeskChat `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,29 +59,22 @@ func (r *SourceZendeskChatResource) Schema(ctx context.Context, req resource.Sch
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_zendesk_chat_authorization_method_access_token": schema.SingleNestedAttribute{
+ "access_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The Access Token to make authenticated requests.`,
},
- "credentials": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
},
},
- "source_zendesk_chat_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Access Token for making authenticated requests.`,
},
"client_id": schema.StringAttribute{
@@ -91,65 +85,9 @@ func (r *SourceZendeskChatResource) Schema(ctx context.Context, req resource.Sch
Optional: true,
Description: `The Client Secret of your OAuth application.`,
},
- "credentials": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "refresh_token": schema.StringAttribute{
- Optional: true,
- Description: `Refresh Token to obtain new Access Token, when it's expired.`,
- },
- },
- },
- "source_zendesk_chat_update_authorization_method_access_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `The Access Token to make authenticated requests.`,
- },
- "credentials": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "access_token",
- ),
- },
- Description: `must be one of ["access_token"]`,
- },
- },
- },
- "source_zendesk_chat_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Optional: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "client_id": schema.StringAttribute{
- Optional: true,
- Description: `The Client ID of your OAuth application`,
- },
- "client_secret": schema.StringAttribute{
- Optional: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- "credentials": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"refresh_token": schema.StringAttribute{
Optional: true,
+ Sensitive: true,
Description: `Refresh Token to obtain new Access Token, when it's expired.`,
},
},
@@ -159,35 +97,38 @@ func (r *SourceZendeskChatResource) Schema(ctx context.Context, req resource.Sch
validators.ExactlyOneChild(),
},
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zendesk-chat",
- ),
- },
- Description: `must be one of ["zendesk-chat"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z.`,
},
"subdomain": schema.StringAttribute{
- Optional: true,
- Description: `Required if you access Zendesk Chat from a Zendesk Support subdomain.`,
+ Optional: true,
+ MarkdownDescription: `Default: ""` + "\n" +
+ `Required if you access Zendesk Chat from a Zendesk Support subdomain.`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -251,7 +192,7 @@ func (r *SourceZendeskChatResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceZendeskChat(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -427,5 +368,5 @@ func (r *SourceZendeskChatResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourceZendeskChatResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_zendeskchat_resource_sdk.go b/internal/provider/source_zendeskchat_resource_sdk.go
old mode 100755
new mode 100644
index 934bc48c2..5777823c7
--- a/internal/provider/source_zendeskchat_resource_sdk.go
+++ b/internal/provider/source_zendeskchat_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -11,62 +11,57 @@ import (
func (r *SourceZendeskChatResourceModel) ToCreateSDKType() *shared.SourceZendeskChatCreateRequest {
var credentials *shared.SourceZendeskChatAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceZendeskChatAuthorizationMethodOAuth20 *shared.SourceZendeskChatAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20 != nil {
+ var sourceZendeskChatOAuth20 *shared.SourceZendeskChatOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.AccessToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
} else {
accessToken = nil
}
clientID := new(string)
- if !r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- credentials1 := shared.SourceZendeskChatAuthorizationMethodOAuth20Credentials(r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.Credentials.ValueString())
refreshToken := new(string)
- if !r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.RefreshToken.IsUnknown() && !r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.RefreshToken.IsNull() {
- *refreshToken = r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodOAuth20.RefreshToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.RefreshToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.RefreshToken.IsNull() {
+ *refreshToken = r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
} else {
refreshToken = nil
}
- sourceZendeskChatAuthorizationMethodOAuth20 = &shared.SourceZendeskChatAuthorizationMethodOAuth20{
+ sourceZendeskChatOAuth20 = &shared.SourceZendeskChatOAuth20{
AccessToken: accessToken,
ClientID: clientID,
ClientSecret: clientSecret,
- Credentials: credentials1,
RefreshToken: refreshToken,
}
}
- if sourceZendeskChatAuthorizationMethodOAuth20 != nil {
+ if sourceZendeskChatOAuth20 != nil {
credentials = &shared.SourceZendeskChatAuthorizationMethod{
- SourceZendeskChatAuthorizationMethodOAuth20: sourceZendeskChatAuthorizationMethodOAuth20,
+ SourceZendeskChatOAuth20: sourceZendeskChatOAuth20,
}
}
- var sourceZendeskChatAuthorizationMethodAccessToken *shared.SourceZendeskChatAuthorizationMethodAccessToken
- if r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodAccessToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodAccessToken.AccessToken.ValueString()
- credentials2 := shared.SourceZendeskChatAuthorizationMethodAccessTokenCredentials(r.Configuration.Credentials.SourceZendeskChatAuthorizationMethodAccessToken.Credentials.ValueString())
- sourceZendeskChatAuthorizationMethodAccessToken = &shared.SourceZendeskChatAuthorizationMethodAccessToken{
+ var sourceZendeskChatAccessToken *shared.SourceZendeskChatAccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ accessToken1 := r.Configuration.Credentials.AccessToken.AccessToken.ValueString()
+ sourceZendeskChatAccessToken = &shared.SourceZendeskChatAccessToken{
AccessToken: accessToken1,
- Credentials: credentials2,
}
}
- if sourceZendeskChatAuthorizationMethodAccessToken != nil {
+ if sourceZendeskChatAccessToken != nil {
credentials = &shared.SourceZendeskChatAuthorizationMethod{
- SourceZendeskChatAuthorizationMethodAccessToken: sourceZendeskChatAuthorizationMethodAccessToken,
+ SourceZendeskChatAccessToken: sourceZendeskChatAccessToken,
}
}
}
- sourceType := shared.SourceZendeskChatZendeskChat(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
subdomain := new(string)
if !r.Configuration.Subdomain.IsUnknown() && !r.Configuration.Subdomain.IsNull() {
@@ -76,10 +71,15 @@ func (r *SourceZendeskChatResourceModel) ToCreateSDKType() *shared.SourceZendesk
}
configuration := shared.SourceZendeskChat{
Credentials: credentials,
- SourceType: sourceType,
StartDate: startDate,
Subdomain: subdomain,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -90,6 +90,7 @@ func (r *SourceZendeskChatResourceModel) ToCreateSDKType() *shared.SourceZendesk
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceZendeskChatCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -105,58 +106,54 @@ func (r *SourceZendeskChatResourceModel) ToGetSDKType() *shared.SourceZendeskCha
func (r *SourceZendeskChatResourceModel) ToUpdateSDKType() *shared.SourceZendeskChatPutRequest {
var credentials *shared.SourceZendeskChatUpdateAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceZendeskChatUpdateAuthorizationMethodOAuth20 *shared.SourceZendeskChatUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20 != nil {
+ var sourceZendeskChatUpdateOAuth20 *shared.SourceZendeskChatUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
accessToken := new(string)
- if !r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.AccessToken.IsNull() {
- *accessToken = r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.AccessToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.AccessToken.IsNull() {
+ *accessToken = r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
} else {
accessToken = nil
}
clientID := new(string)
- if !r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- credentials1 := shared.SourceZendeskChatUpdateAuthorizationMethodOAuth20Credentials(r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.Credentials.ValueString())
refreshToken := new(string)
- if !r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.RefreshToken.IsUnknown() && !r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.RefreshToken.IsNull() {
- *refreshToken = r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodOAuth20.RefreshToken.ValueString()
+ if !r.Configuration.Credentials.OAuth20.RefreshToken.IsUnknown() && !r.Configuration.Credentials.OAuth20.RefreshToken.IsNull() {
+ *refreshToken = r.Configuration.Credentials.OAuth20.RefreshToken.ValueString()
} else {
refreshToken = nil
}
- sourceZendeskChatUpdateAuthorizationMethodOAuth20 = &shared.SourceZendeskChatUpdateAuthorizationMethodOAuth20{
+ sourceZendeskChatUpdateOAuth20 = &shared.SourceZendeskChatUpdateOAuth20{
AccessToken: accessToken,
ClientID: clientID,
ClientSecret: clientSecret,
- Credentials: credentials1,
RefreshToken: refreshToken,
}
}
- if sourceZendeskChatUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceZendeskChatUpdateOAuth20 != nil {
credentials = &shared.SourceZendeskChatUpdateAuthorizationMethod{
- SourceZendeskChatUpdateAuthorizationMethodOAuth20: sourceZendeskChatUpdateAuthorizationMethodOAuth20,
+ SourceZendeskChatUpdateOAuth20: sourceZendeskChatUpdateOAuth20,
}
}
- var sourceZendeskChatUpdateAuthorizationMethodAccessToken *shared.SourceZendeskChatUpdateAuthorizationMethodAccessToken
- if r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodAccessToken != nil {
- accessToken1 := r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodAccessToken.AccessToken.ValueString()
- credentials2 := shared.SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentials(r.Configuration.Credentials.SourceZendeskChatUpdateAuthorizationMethodAccessToken.Credentials.ValueString())
- sourceZendeskChatUpdateAuthorizationMethodAccessToken = &shared.SourceZendeskChatUpdateAuthorizationMethodAccessToken{
+ var sourceZendeskChatUpdateAccessToken *shared.SourceZendeskChatUpdateAccessToken
+ if r.Configuration.Credentials.AccessToken != nil {
+ accessToken1 := r.Configuration.Credentials.AccessToken.AccessToken.ValueString()
+ sourceZendeskChatUpdateAccessToken = &shared.SourceZendeskChatUpdateAccessToken{
AccessToken: accessToken1,
- Credentials: credentials2,
}
}
- if sourceZendeskChatUpdateAuthorizationMethodAccessToken != nil {
+ if sourceZendeskChatUpdateAccessToken != nil {
credentials = &shared.SourceZendeskChatUpdateAuthorizationMethod{
- SourceZendeskChatUpdateAuthorizationMethodAccessToken: sourceZendeskChatUpdateAuthorizationMethodAccessToken,
+ SourceZendeskChatUpdateAccessToken: sourceZendeskChatUpdateAccessToken,
}
}
}
diff --git a/internal/provider/source_zendesksell_data_source.go b/internal/provider/source_zendesksell_data_source.go
new file mode 100644
index 000000000..3699e7228
--- /dev/null
+++ b/internal/provider/source_zendesksell_data_source.go
@@ -0,0 +1,137 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+
+ "github.com/hashicorp/terraform-plugin-framework/datasource"
+ "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/hashicorp/terraform-plugin-framework/types/basetypes"
+)
+
+// Ensure provider defined types fully satisfy framework interfaces.
+var _ datasource.DataSource = &SourceZendeskSellDataSource{}
+var _ datasource.DataSourceWithConfigure = &SourceZendeskSellDataSource{}
+
+func NewSourceZendeskSellDataSource() datasource.DataSource {
+ return &SourceZendeskSellDataSource{}
+}
+
+// SourceZendeskSellDataSource is the data source implementation.
+type SourceZendeskSellDataSource struct {
+ client *sdk.SDK
+}
+
+// SourceZendeskSellDataSourceModel describes the data model.
+type SourceZendeskSellDataSourceModel struct {
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
+}
+
+// Metadata returns the data source type name.
+func (r *SourceZendeskSellDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_zendesk_sell"
+}
+
+// Schema defines the schema for the data source.
+func (r *SourceZendeskSellDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
+ resp.Schema = schema.Schema{
+ MarkdownDescription: "SourceZendeskSell DataSource",
+
+ Attributes: map[string]schema.Attribute{
+ "configuration": schema.StringAttribute{
+ Computed: true,
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
+ },
+ "name": schema.StringAttribute{
+ Computed: true,
+ },
+ "source_id": schema.StringAttribute{
+ Required: true,
+ },
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
+ "workspace_id": schema.StringAttribute{
+ Computed: true,
+ },
+ },
+ }
+}
+
+func (r *SourceZendeskSellDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
+ // Prevent panic if the provider has not been configured.
+ if req.ProviderData == nil {
+ return
+ }
+
+ client, ok := req.ProviderData.(*sdk.SDK)
+
+ if !ok {
+ resp.Diagnostics.AddError(
+ "Unexpected DataSource Configure Type",
+ fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData),
+ )
+
+ return
+ }
+
+ r.client = client
+}
+
+func (r *SourceZendeskSellDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
+ var data *SourceZendeskSellDataSourceModel
+ var item types.Object
+
+ resp.Diagnostics.Append(req.Config.Get(ctx, &item)...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{
+ UnhandledNullAsEmpty: true,
+ UnhandledUnknownAsEmpty: true,
+ })...)
+
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ sourceID := data.SourceID.ValueString()
+ request := operations.GetSourceZendeskSellRequest{
+ SourceID: sourceID,
+ }
+ res, err := r.client.Sources.GetSourceZendeskSell(ctx, request)
+ if err != nil {
+ resp.Diagnostics.AddError("failure to invoke API", err.Error())
+ if res != nil && res.RawResponse != nil {
+ resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse))
+ }
+ return
+ }
+ if res == nil {
+ resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res))
+ return
+ }
+ if res.StatusCode != 200 {
+ resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse))
+ return
+ }
+ if res.SourceResponse == nil {
+ resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse))
+ return
+ }
+ data.RefreshFromGetResponse(res.SourceResponse)
+
+ // Save updated data into Terraform state
+ resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
+}
diff --git a/internal/provider/source_zendesksell_data_source_sdk.go b/internal/provider/source_zendesksell_data_source_sdk.go
new file mode 100644
index 000000000..ceffc76a6
--- /dev/null
+++ b/internal/provider/source_zendesksell_data_source_sdk.go
@@ -0,0 +1,18 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *SourceZendeskSellDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
diff --git a/internal/provider/source_bigcommerce_resource.go b/internal/provider/source_zendesksell_resource.go
old mode 100755
new mode 100644
similarity index 69%
rename from internal/provider/source_bigcommerce_resource.go
rename to internal/provider/source_zendesksell_resource.go
index 7ea3462ee..6ae9fb1bc
--- a/internal/provider/source_bigcommerce_resource.go
+++ b/internal/provider/source_zendesksell_resource.go
@@ -3,87 +3,82 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
// Ensure provider defined types fully satisfy framework interfaces.
-var _ resource.Resource = &SourceBigcommerceResource{}
-var _ resource.ResourceWithImportState = &SourceBigcommerceResource{}
+var _ resource.Resource = &SourceZendeskSellResource{}
+var _ resource.ResourceWithImportState = &SourceZendeskSellResource{}
-func NewSourceBigcommerceResource() resource.Resource {
- return &SourceBigcommerceResource{}
+func NewSourceZendeskSellResource() resource.Resource {
+ return &SourceZendeskSellResource{}
}
-// SourceBigcommerceResource defines the resource implementation.
-type SourceBigcommerceResource struct {
+// SourceZendeskSellResource defines the resource implementation.
+type SourceZendeskSellResource struct {
client *sdk.SDK
}
-// SourceBigcommerceResourceModel describes the resource data model.
-type SourceBigcommerceResourceModel struct {
- Configuration SourceBigcommerce `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- SourceType types.String `tfsdk:"source_type"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+// SourceZendeskSellResourceModel describes the resource data model.
+type SourceZendeskSellResourceModel struct {
+ Configuration SourceK6Cloud `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
+ Name types.String `tfsdk:"name"`
+ SecretID types.String `tfsdk:"secret_id"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
-func (r *SourceBigcommerceResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
- resp.TypeName = req.ProviderTypeName + "_source_bigcommerce"
+func (r *SourceZendeskSellResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
+ resp.TypeName = req.ProviderTypeName + "_source_zendesk_sell"
}
-func (r *SourceBigcommerceResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
+func (r *SourceZendeskSellResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
resp.Schema = schema.Schema{
- MarkdownDescription: "SourceBigcommerce Resource",
+ MarkdownDescription: "SourceZendeskSell Resource",
Attributes: map[string]schema.Attribute{
"configuration": schema.SingleNestedAttribute{
Required: true,
Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
+ "api_token": schema.StringAttribute{
Required: true,
- Description: `Access Token for making authenticated requests.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "bigcommerce",
- ),
- },
- Description: `must be one of ["bigcommerce"]`,
- },
- "start_date": schema.StringAttribute{
- Required: true,
- Description: `The date you would like to replicate data. Format: YYYY-MM-DD.`,
- },
- "store_hash": schema.StringAttribute{
- Required: true,
- Description: `The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/, The store's hash code is 'HASH_CODE'.`,
+ Sensitive: true,
+ Description: `The API token for authenticating to Zendesk Sell`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -109,7 +104,7 @@ func (r *SourceBigcommerceResource) Schema(ctx context.Context, req resource.Sch
}
}
-func (r *SourceBigcommerceResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
+func (r *SourceZendeskSellResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
// Prevent panic if the provider has not been configured.
if req.ProviderData == nil {
return
@@ -129,8 +124,8 @@ func (r *SourceBigcommerceResource) Configure(ctx context.Context, req resource.
r.client = client
}
-func (r *SourceBigcommerceResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
- var data *SourceBigcommerceResourceModel
+func (r *SourceZendeskSellResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
+ var data *SourceZendeskSellResourceModel
var item types.Object
resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...)
@@ -147,8 +142,8 @@ func (r *SourceBigcommerceResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
- res, err := r.client.Sources.CreateSourceBigcommerce(ctx, request)
+ request := data.ToCreateSDKType()
+ res, err := r.client.Sources.CreateSourceZendeskSell(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -174,8 +169,8 @@ func (r *SourceBigcommerceResource) Create(ctx context.Context, req resource.Cre
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
-func (r *SourceBigcommerceResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
- var data *SourceBigcommerceResourceModel
+func (r *SourceZendeskSellResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
+ var data *SourceZendeskSellResourceModel
var item types.Object
resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
@@ -193,10 +188,10 @@ func (r *SourceBigcommerceResource) Read(ctx context.Context, req resource.ReadR
}
sourceID := data.SourceID.ValueString()
- request := operations.GetSourceBigcommerceRequest{
+ request := operations.GetSourceZendeskSellRequest{
SourceID: sourceID,
}
- res, err := r.client.Sources.GetSourceBigcommerce(ctx, request)
+ res, err := r.client.Sources.GetSourceZendeskSell(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -222,20 +217,20 @@ func (r *SourceBigcommerceResource) Read(ctx context.Context, req resource.ReadR
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
-func (r *SourceBigcommerceResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
- var data *SourceBigcommerceResourceModel
+func (r *SourceZendeskSellResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
+ var data *SourceZendeskSellResourceModel
merge(ctx, req, resp, &data)
if resp.Diagnostics.HasError() {
return
}
- sourceBigcommercePutRequest := data.ToUpdateSDKType()
+ sourceZendeskSellPutRequest := data.ToUpdateSDKType()
sourceID := data.SourceID.ValueString()
- request := operations.PutSourceBigcommerceRequest{
- SourceBigcommercePutRequest: sourceBigcommercePutRequest,
+ request := operations.PutSourceZendeskSellRequest{
+ SourceZendeskSellPutRequest: sourceZendeskSellPutRequest,
SourceID: sourceID,
}
- res, err := r.client.Sources.PutSourceBigcommerce(ctx, request)
+ res, err := r.client.Sources.PutSourceZendeskSell(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -252,10 +247,10 @@ func (r *SourceBigcommerceResource) Update(ctx context.Context, req resource.Upd
return
}
sourceId1 := data.SourceID.ValueString()
- getRequest := operations.GetSourceBigcommerceRequest{
+ getRequest := operations.GetSourceZendeskSellRequest{
SourceID: sourceId1,
}
- getResponse, err := r.client.Sources.GetSourceBigcommerce(ctx, getRequest)
+ getResponse, err := r.client.Sources.GetSourceZendeskSell(ctx, getRequest)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -281,8 +276,8 @@ func (r *SourceBigcommerceResource) Update(ctx context.Context, req resource.Upd
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}
-func (r *SourceBigcommerceResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
- var data *SourceBigcommerceResourceModel
+func (r *SourceZendeskSellResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
+ var data *SourceZendeskSellResourceModel
var item types.Object
resp.Diagnostics.Append(req.State.Get(ctx, &item)...)
@@ -300,10 +295,10 @@ func (r *SourceBigcommerceResource) Delete(ctx context.Context, req resource.Del
}
sourceID := data.SourceID.ValueString()
- request := operations.DeleteSourceBigcommerceRequest{
+ request := operations.DeleteSourceZendeskSellRequest{
SourceID: sourceID,
}
- res, err := r.client.Sources.DeleteSourceBigcommerce(ctx, request)
+ res, err := r.client.Sources.DeleteSourceZendeskSell(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
if res != nil && res.RawResponse != nil {
@@ -322,6 +317,6 @@ func (r *SourceBigcommerceResource) Delete(ctx context.Context, req resource.Del
}
-func (r *SourceBigcommerceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+func (r *SourceZendeskSellResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_zendesksell_resource_sdk.go b/internal/provider/source_zendesksell_resource_sdk.go
new file mode 100644
index 000000000..a954f6c8c
--- /dev/null
+++ b/internal/provider/source_zendesksell_resource_sdk.go
@@ -0,0 +1,73 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+func (r *SourceZendeskSellResourceModel) ToCreateSDKType() *shared.SourceZendeskSellCreateRequest {
+ apiToken := r.Configuration.APIToken.ValueString()
+ configuration := shared.SourceZendeskSell{
+ APIToken: apiToken,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
+ name := r.Name.ValueString()
+ secretID := new(string)
+ if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
+ *secretID = r.SecretID.ValueString()
+ } else {
+ secretID = nil
+ }
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceZendeskSellCreateRequest{
+ Configuration: configuration,
+ DefinitionID: definitionID,
+ Name: name,
+ SecretID: secretID,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceZendeskSellResourceModel) ToGetSDKType() *shared.SourceZendeskSellCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceZendeskSellResourceModel) ToUpdateSDKType() *shared.SourceZendeskSellPutRequest {
+ apiToken := r.Configuration.APIToken.ValueString()
+ configuration := shared.SourceZendeskSellUpdate{
+ APIToken: apiToken,
+ }
+ name := r.Name.ValueString()
+ workspaceID := r.WorkspaceID.ValueString()
+ out := shared.SourceZendeskSellPutRequest{
+ Configuration: configuration,
+ Name: name,
+ WorkspaceID: workspaceID,
+ }
+ return &out
+}
+
+func (r *SourceZendeskSellResourceModel) ToDeleteSDKType() *shared.SourceZendeskSellCreateRequest {
+ out := r.ToCreateSDKType()
+ return out
+}
+
+func (r *SourceZendeskSellResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ r.Name = types.StringValue(resp.Name)
+ r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
+ r.WorkspaceID = types.StringValue(resp.WorkspaceID)
+}
+
+func (r *SourceZendeskSellResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) {
+ r.RefreshFromGetResponse(resp)
+}
diff --git a/internal/provider/source_zendesksunshine_data_source.go b/internal/provider/source_zendesksunshine_data_source.go
old mode 100755
new mode 100644
index 03bad6f4a..4c3f8399c
--- a/internal/provider/source_zendesksunshine_data_source.go
+++ b/internal/provider/source_zendesksunshine_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceZendeskSunshineDataSource struct {
// SourceZendeskSunshineDataSourceModel describes the data model.
type SourceZendeskSunshineDataSourceModel struct {
- Configuration SourceZendeskSunshine `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,145 +47,20 @@ func (r *SourceZendeskSunshineDataSource) Schema(ctx context.Context, req dataso
MarkdownDescription: "SourceZendeskSunshine DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_zendesk_sunshine_authorization_method_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `API Token. See the docs for information on how to generate this key.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The user email for your Zendesk account`,
- },
- },
- },
- "source_zendesk_sunshine_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Long-term access Token for making authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- },
- },
- "source_zendesk_sunshine_update_authorization_method_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `API Token. See the docs for information on how to generate this key.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The user email for your Zendesk account`,
- },
- },
- },
- "source_zendesk_sunshine_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `Long-term access Token for making authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- },
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zendesk-sunshine",
- ),
- },
- Description: `must be one of ["zendesk-sunshine"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z.`,
- },
- "subdomain": schema.StringAttribute{
- Computed: true,
- Description: `The subdomain for your Zendesk Account.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_zendesksunshine_data_source_sdk.go b/internal/provider/source_zendesksunshine_data_source_sdk.go
old mode 100755
new mode 100644
index c62994b72..782fc5937
--- a/internal/provider/source_zendesksunshine_data_source_sdk.go
+++ b/internal/provider/source_zendesksunshine_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZendeskSunshineDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_zendesksunshine_resource.go b/internal/provider/source_zendesksunshine_resource.go
old mode 100755
new mode 100644
index f31c86da9..4fcec4feb
--- a/internal/provider/source_zendesksunshine_resource.go
+++ b/internal/provider/source_zendesksunshine_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceZendeskSunshineResource struct {
// SourceZendeskSunshineResourceModel describes the resource data model.
type SourceZendeskSunshineResourceModel struct {
Configuration SourceZendeskSunshine `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,92 +59,28 @@ func (r *SourceZendeskSunshineResource) Schema(ctx context.Context, req resource
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_zendesk_sunshine_authorization_method_api_token": schema.SingleNestedAttribute{
+ "api_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `API Token. See the docs for information on how to generate this key.`,
},
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
"email": schema.StringAttribute{
Required: true,
Description: `The user email for your Zendesk account`,
},
},
},
- "source_zendesk_sunshine_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Long-term access Token for making authenticated requests.`,
},
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Required: true,
- Description: `The Client ID of your OAuth application.`,
- },
- "client_secret": schema.StringAttribute{
- Required: true,
- Description: `The Client Secret of your OAuth application.`,
- },
- },
- },
- "source_zendesk_sunshine_update_authorization_method_api_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Required: true,
- Description: `API Token. See the docs for information on how to generate this key.`,
- },
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- "email": schema.StringAttribute{
- Required: true,
- Description: `The user email for your Zendesk account`,
- },
- },
- },
- "source_zendesk_sunshine_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `Long-term access Token for making authenticated requests.`,
- },
- "auth_method": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
"client_id": schema.StringAttribute{
Required: true,
Description: `The Client ID of your OAuth application.`,
@@ -159,21 +96,12 @@ func (r *SourceZendeskSunshineResource) Schema(ctx context.Context, req resource
validators.ExactlyOneChild(),
},
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zendesk-sunshine",
- ),
- },
- Description: `must be one of ["zendesk-sunshine"]`,
- },
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z.`,
},
"subdomain": schema.StringAttribute{
Required: true,
@@ -181,13 +109,24 @@ func (r *SourceZendeskSunshineResource) Schema(ctx context.Context, req resource
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -251,7 +190,7 @@ func (r *SourceZendeskSunshineResource) Create(ctx context.Context, req resource
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceZendeskSunshine(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -427,5 +366,5 @@ func (r *SourceZendeskSunshineResource) Delete(ctx context.Context, req resource
}
func (r *SourceZendeskSunshineResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_zendesksunshine_resource_sdk.go b/internal/provider/source_zendesksunshine_resource_sdk.go
old mode 100755
new mode 100644
index 10857e20c..042969cbc
--- a/internal/provider/source_zendesksunshine_resource_sdk.go
+++ b/internal/provider/source_zendesksunshine_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -11,50 +11,50 @@ import (
func (r *SourceZendeskSunshineResourceModel) ToCreateSDKType() *shared.SourceZendeskSunshineCreateRequest {
var credentials *shared.SourceZendeskSunshineAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceZendeskSunshineAuthorizationMethodOAuth20 *shared.SourceZendeskSunshineAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.AccessToken.ValueString()
- authMethod := shared.SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.AuthMethod.ValueString())
- clientID := r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.ClientSecret.ValueString()
- sourceZendeskSunshineAuthorizationMethodOAuth20 = &shared.SourceZendeskSunshineAuthorizationMethodOAuth20{
+ var sourceZendeskSunshineOAuth20 *shared.SourceZendeskSunshineOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ sourceZendeskSunshineOAuth20 = &shared.SourceZendeskSunshineOAuth20{
AccessToken: accessToken,
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
}
}
- if sourceZendeskSunshineAuthorizationMethodOAuth20 != nil {
+ if sourceZendeskSunshineOAuth20 != nil {
credentials = &shared.SourceZendeskSunshineAuthorizationMethod{
- SourceZendeskSunshineAuthorizationMethodOAuth20: sourceZendeskSunshineAuthorizationMethodOAuth20,
+ SourceZendeskSunshineOAuth20: sourceZendeskSunshineOAuth20,
}
}
- var sourceZendeskSunshineAuthorizationMethodAPIToken *shared.SourceZendeskSunshineAuthorizationMethodAPIToken
- if r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken.APIToken.ValueString()
- authMethod1 := shared.SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod(r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken.AuthMethod.ValueString())
- email := r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken.Email.ValueString()
- sourceZendeskSunshineAuthorizationMethodAPIToken = &shared.SourceZendeskSunshineAuthorizationMethodAPIToken{
- APIToken: apiToken,
- AuthMethod: authMethod1,
- Email: email,
+ var sourceZendeskSunshineAPIToken *shared.SourceZendeskSunshineAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ email := r.Configuration.Credentials.APIToken.Email.ValueString()
+ sourceZendeskSunshineAPIToken = &shared.SourceZendeskSunshineAPIToken{
+ APIToken: apiToken,
+ Email: email,
}
}
- if sourceZendeskSunshineAuthorizationMethodAPIToken != nil {
+ if sourceZendeskSunshineAPIToken != nil {
credentials = &shared.SourceZendeskSunshineAuthorizationMethod{
- SourceZendeskSunshineAuthorizationMethodAPIToken: sourceZendeskSunshineAuthorizationMethodAPIToken,
+ SourceZendeskSunshineAPIToken: sourceZendeskSunshineAPIToken,
}
}
}
- sourceType := shared.SourceZendeskSunshineZendeskSunshine(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
subdomain := r.Configuration.Subdomain.ValueString()
configuration := shared.SourceZendeskSunshine{
Credentials: credentials,
- SourceType: sourceType,
StartDate: startDate,
Subdomain: subdomain,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -65,6 +65,7 @@ func (r *SourceZendeskSunshineResourceModel) ToCreateSDKType() *shared.SourceZen
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceZendeskSunshineCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -80,38 +81,34 @@ func (r *SourceZendeskSunshineResourceModel) ToGetSDKType() *shared.SourceZendes
func (r *SourceZendeskSunshineResourceModel) ToUpdateSDKType() *shared.SourceZendeskSunshinePutRequest {
var credentials *shared.SourceZendeskSunshineUpdateAuthorizationMethod
if r.Configuration.Credentials != nil {
- var sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 *shared.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20
- if r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.AccessToken.ValueString()
- authMethod := shared.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.AuthMethod.ValueString())
- clientID := r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.ClientID.ValueString()
- clientSecret := r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString()
- sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 = &shared.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20{
+ var sourceZendeskSunshineUpdateOAuth20 *shared.SourceZendeskSunshineUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
+ clientID := r.Configuration.Credentials.OAuth20.ClientID.ValueString()
+ clientSecret := r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
+ sourceZendeskSunshineUpdateOAuth20 = &shared.SourceZendeskSunshineUpdateOAuth20{
AccessToken: accessToken,
- AuthMethod: authMethod,
ClientID: clientID,
ClientSecret: clientSecret,
}
}
- if sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 != nil {
+ if sourceZendeskSunshineUpdateOAuth20 != nil {
credentials = &shared.SourceZendeskSunshineUpdateAuthorizationMethod{
- SourceZendeskSunshineUpdateAuthorizationMethodOAuth20: sourceZendeskSunshineUpdateAuthorizationMethodOAuth20,
+ SourceZendeskSunshineUpdateOAuth20: sourceZendeskSunshineUpdateOAuth20,
}
}
- var sourceZendeskSunshineUpdateAuthorizationMethodAPIToken *shared.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken
- if r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken.APIToken.ValueString()
- authMethod1 := shared.SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod(r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken.AuthMethod.ValueString())
- email := r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken.Email.ValueString()
- sourceZendeskSunshineUpdateAuthorizationMethodAPIToken = &shared.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken{
- APIToken: apiToken,
- AuthMethod: authMethod1,
- Email: email,
+ var sourceZendeskSunshineUpdateAPIToken *shared.SourceZendeskSunshineUpdateAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ email := r.Configuration.Credentials.APIToken.Email.ValueString()
+ sourceZendeskSunshineUpdateAPIToken = &shared.SourceZendeskSunshineUpdateAPIToken{
+ APIToken: apiToken,
+ Email: email,
}
}
- if sourceZendeskSunshineUpdateAuthorizationMethodAPIToken != nil {
+ if sourceZendeskSunshineUpdateAPIToken != nil {
credentials = &shared.SourceZendeskSunshineUpdateAuthorizationMethod{
- SourceZendeskSunshineUpdateAuthorizationMethodAPIToken: sourceZendeskSunshineUpdateAuthorizationMethodAPIToken,
+ SourceZendeskSunshineUpdateAPIToken: sourceZendeskSunshineUpdateAPIToken,
}
}
}
diff --git a/internal/provider/source_zendesksupport_data_source.go b/internal/provider/source_zendesksupport_data_source.go
old mode 100755
new mode 100644
index 8cc3a59d0..a33e86e0a
--- a/internal/provider/source_zendesksupport_data_source.go
+++ b/internal/provider/source_zendesksupport_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceZendeskSupportDataSource struct {
// SourceZendeskSupportDataSourceModel describes the data model.
type SourceZendeskSupportDataSourceModel struct {
- Configuration SourceZendeskSupport1 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,182 +47,20 @@ func (r *SourceZendeskSupportDataSource) Schema(ctx context.Context, req datasou
MarkdownDescription: "SourceZendeskSupport DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_zendesk_support_authentication_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `The value of the API token generated. See our full documentation for more information on generating this token.`,
- },
- "credentials": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The user email for your Zendesk account.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
- },
- "source_zendesk_support_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The OAuth access token. See the Zendesk docs for more information on generating this token.`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The OAuth client's ID. See this guide for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The OAuth client secret. See this guide for more information.`,
- },
- "credentials": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
- },
- "source_zendesk_support_update_authentication_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `The value of the API token generated. See our full documentation for more information on generating this token.`,
- },
- "credentials": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The user email for your Zendesk account.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
- },
- "source_zendesk_support_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The OAuth access token. See the Zendesk docs for more information on generating this token.`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `The OAuth client's ID. See this guide for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `The OAuth client secret. See this guide for more information.`,
- },
- "credentials": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
- },
- "ignore_pagination": schema.BoolAttribute{
- Computed: true,
- Description: `Makes each stream read a single page of data.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zendesk-support",
- ),
- },
- Description: `must be one of ["zendesk-support"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- "subdomain": schema.StringAttribute{
- Computed: true,
- Description: `This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_zendesksupport_data_source_sdk.go b/internal/provider/source_zendesksupport_data_source_sdk.go
old mode 100755
new mode 100644
index a8075f874..547344e94
--- a/internal/provider/source_zendesksupport_data_source_sdk.go
+++ b/internal/provider/source_zendesksupport_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZendeskSupportDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_zendesksupport_resource.go b/internal/provider/source_zendesksupport_resource.go
old mode 100755
new mode 100644
index 6d0d5a8b6..31e6c8171
--- a/internal/provider/source_zendesksupport_resource.go
+++ b/internal/provider/source_zendesksupport_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceZendeskSupportResource struct {
// SourceZendeskSupportResourceModel describes the resource data model.
type SourceZendeskSupportResourceModel struct {
Configuration SourceZendeskSupport `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,107 +59,43 @@ func (r *SourceZendeskSupportResource) Schema(ctx context.Context, req resource.
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_zendesk_support_authentication_api_token": schema.SingleNestedAttribute{
+ "api_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Required: true,
- Description: `The value of the API token generated. See our full documentation for more information on generating this token.`,
- },
- "credentials": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- "email": schema.StringAttribute{
- Required: true,
- Description: `The user email for your Zendesk account.`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
- },
- "source_zendesk_support_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `The OAuth access token. See the Zendesk docs for more information on generating this token.`,
- },
- "client_id": schema.StringAttribute{
Optional: true,
- Description: `The OAuth client's ID. See this guide for more information.`,
- },
- "client_secret": schema.StringAttribute{
- Optional: true,
- Description: `The OAuth client secret. See this guide for more information.`,
- },
- "credentials": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
- },
- Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
- },
- "source_zendesk_support_update_authentication_api_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The value of the API token generated. See our full documentation for more information on generating this token.`,
},
- "credentials": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
"email": schema.StringAttribute{
Required: true,
Description: `The user email for your Zendesk account.`,
},
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
},
- "source_zendesk_support_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The OAuth access token. See the Zendesk docs for more information on generating this token.`,
},
+ "additional_properties": schema.StringAttribute{
+ Optional: true,
+ Description: `Parsed as JSON.`,
+ Validators: []validator.String{
+ validators.IsValidJSON(),
+ },
+ },
"client_id": schema.StringAttribute{
Optional: true,
Description: `The OAuth client's ID. See this guide for more information.`,
@@ -167,50 +104,26 @@ func (r *SourceZendeskSupportResource) Schema(ctx context.Context, req resource.
Optional: true,
Description: `The OAuth client secret. See this guide for more information.`,
},
- "credentials": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
},
},
+ Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Zendesk allows two authentication methods. We recommend using ` + "`" + `OAuth2.0` + "`" + ` for Airbyte Cloud users and ` + "`" + `API token` + "`" + ` for Airbyte Open Source users.`,
},
"ignore_pagination": schema.BoolAttribute{
- Optional: true,
- Description: `Makes each stream read a single page of data.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zendesk-support",
- ),
- },
- Description: `must be one of ["zendesk-support"]`,
+ Optional: true,
+ MarkdownDescription: `Default: false` + "\n" +
+ `Makes each stream read a single page of data.`,
},
"start_date": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
"subdomain": schema.StringAttribute{
Required: true,
@@ -218,13 +131,24 @@ func (r *SourceZendeskSupportResource) Schema(ctx context.Context, req resource.
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -288,7 +212,7 @@ func (r *SourceZendeskSupportResource) Create(ctx context.Context, req resource.
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceZendeskSupport(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -464,5 +388,5 @@ func (r *SourceZendeskSupportResource) Delete(ctx context.Context, req resource.
}
func (r *SourceZendeskSupportResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_zendesksupport_resource_sdk.go b/internal/provider/source_zendesksupport_resource_sdk.go
old mode 100755
new mode 100644
index 2d8b9695b..a76e74d6b
--- a/internal/provider/source_zendesksupport_resource_sdk.go
+++ b/internal/provider/source_zendesksupport_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -12,68 +12,54 @@ import (
func (r *SourceZendeskSupportResourceModel) ToCreateSDKType() *shared.SourceZendeskSupportCreateRequest {
var credentials *shared.SourceZendeskSupportAuthentication
if r.Configuration.Credentials != nil {
- var sourceZendeskSupportAuthenticationOAuth20 *shared.SourceZendeskSupportAuthenticationOAuth20
- if r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.AccessToken.ValueString()
+ var sourceZendeskSupportOAuth20 *shared.SourceZendeskSupportOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ var additionalProperties interface{}
+ if !r.Configuration.Credentials.OAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.OAuth20.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.OAuth20.AdditionalProperties.ValueString()), &additionalProperties)
+ }
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
clientID := new(string)
- if !r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- credentials1 := new(shared.SourceZendeskSupportAuthenticationOAuth20Credentials)
- if !r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.Credentials.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.Credentials.IsNull() {
- *credentials1 = shared.SourceZendeskSupportAuthenticationOAuth20Credentials(r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.Credentials.ValueString())
- } else {
- credentials1 = nil
- }
- var additionalProperties interface{}
- if !r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskSupportAuthenticationOAuth20.AdditionalProperties.ValueString()), &additionalProperties)
- }
- sourceZendeskSupportAuthenticationOAuth20 = &shared.SourceZendeskSupportAuthenticationOAuth20{
+ sourceZendeskSupportOAuth20 = &shared.SourceZendeskSupportOAuth20{
+ AdditionalProperties: additionalProperties,
AccessToken: accessToken,
ClientID: clientID,
ClientSecret: clientSecret,
- Credentials: credentials1,
- AdditionalProperties: additionalProperties,
}
}
- if sourceZendeskSupportAuthenticationOAuth20 != nil {
+ if sourceZendeskSupportOAuth20 != nil {
credentials = &shared.SourceZendeskSupportAuthentication{
- SourceZendeskSupportAuthenticationOAuth20: sourceZendeskSupportAuthenticationOAuth20,
+ SourceZendeskSupportOAuth20: sourceZendeskSupportOAuth20,
}
}
- var sourceZendeskSupportAuthenticationAPIToken *shared.SourceZendeskSupportAuthenticationAPIToken
- if r.Configuration.Credentials.SourceZendeskSupportAuthenticationAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceZendeskSupportAuthenticationAPIToken.APIToken.ValueString()
- credentials2 := new(shared.SourceZendeskSupportAuthenticationAPITokenCredentials)
- if !r.Configuration.Credentials.SourceZendeskSupportAuthenticationAPIToken.Credentials.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportAuthenticationAPIToken.Credentials.IsNull() {
- *credentials2 = shared.SourceZendeskSupportAuthenticationAPITokenCredentials(r.Configuration.Credentials.SourceZendeskSupportAuthenticationAPIToken.Credentials.ValueString())
- } else {
- credentials2 = nil
- }
- email := r.Configuration.Credentials.SourceZendeskSupportAuthenticationAPIToken.Email.ValueString()
+ var sourceZendeskSupportAPIToken *shared.SourceZendeskSupportAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
var additionalProperties1 interface{}
- if !r.Configuration.Credentials.SourceZendeskSupportAuthenticationAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportAuthenticationAPIToken.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskSupportAuthenticationAPIToken.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.Credentials.APIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.APIToken.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.APIToken.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourceZendeskSupportAuthenticationAPIToken = &shared.SourceZendeskSupportAuthenticationAPIToken{
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ email := r.Configuration.Credentials.APIToken.Email.ValueString()
+ sourceZendeskSupportAPIToken = &shared.SourceZendeskSupportAPIToken{
+ AdditionalProperties: additionalProperties1,
APIToken: apiToken,
- Credentials: credentials2,
Email: email,
- AdditionalProperties: additionalProperties1,
}
}
- if sourceZendeskSupportAuthenticationAPIToken != nil {
+ if sourceZendeskSupportAPIToken != nil {
credentials = &shared.SourceZendeskSupportAuthentication{
- SourceZendeskSupportAuthenticationAPIToken: sourceZendeskSupportAuthenticationAPIToken,
+ SourceZendeskSupportAPIToken: sourceZendeskSupportAPIToken,
}
}
}
@@ -83,7 +69,6 @@ func (r *SourceZendeskSupportResourceModel) ToCreateSDKType() *shared.SourceZend
} else {
ignorePagination = nil
}
- sourceType := shared.SourceZendeskSupportZendeskSupport(r.Configuration.SourceType.ValueString())
startDate := new(time.Time)
if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() {
*startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
@@ -94,10 +79,15 @@ func (r *SourceZendeskSupportResourceModel) ToCreateSDKType() *shared.SourceZend
configuration := shared.SourceZendeskSupport{
Credentials: credentials,
IgnorePagination: ignorePagination,
- SourceType: sourceType,
StartDate: startDate,
Subdomain: subdomain,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -108,6 +98,7 @@ func (r *SourceZendeskSupportResourceModel) ToCreateSDKType() *shared.SourceZend
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceZendeskSupportCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -123,68 +114,54 @@ func (r *SourceZendeskSupportResourceModel) ToGetSDKType() *shared.SourceZendesk
func (r *SourceZendeskSupportResourceModel) ToUpdateSDKType() *shared.SourceZendeskSupportPutRequest {
var credentials *shared.SourceZendeskSupportUpdateAuthentication
if r.Configuration.Credentials != nil {
- var sourceZendeskSupportUpdateAuthenticationOAuth20 *shared.SourceZendeskSupportUpdateAuthenticationOAuth20
- if r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.AccessToken.ValueString()
+ var sourceZendeskSupportUpdateOAuth20 *shared.SourceZendeskSupportUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ var additionalProperties interface{}
+ if !r.Configuration.Credentials.OAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.OAuth20.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.OAuth20.AdditionalProperties.ValueString()), &additionalProperties)
+ }
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
clientID := new(string)
- if !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- credentials1 := new(shared.SourceZendeskSupportUpdateAuthenticationOAuth20Credentials)
- if !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.Credentials.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.Credentials.IsNull() {
- *credentials1 = shared.SourceZendeskSupportUpdateAuthenticationOAuth20Credentials(r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.Credentials.ValueString())
- } else {
- credentials1 = nil
- }
- var additionalProperties interface{}
- if !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationOAuth20.AdditionalProperties.ValueString()), &additionalProperties)
- }
- sourceZendeskSupportUpdateAuthenticationOAuth20 = &shared.SourceZendeskSupportUpdateAuthenticationOAuth20{
+ sourceZendeskSupportUpdateOAuth20 = &shared.SourceZendeskSupportUpdateOAuth20{
+ AdditionalProperties: additionalProperties,
AccessToken: accessToken,
ClientID: clientID,
ClientSecret: clientSecret,
- Credentials: credentials1,
- AdditionalProperties: additionalProperties,
}
}
- if sourceZendeskSupportUpdateAuthenticationOAuth20 != nil {
+ if sourceZendeskSupportUpdateOAuth20 != nil {
credentials = &shared.SourceZendeskSupportUpdateAuthentication{
- SourceZendeskSupportUpdateAuthenticationOAuth20: sourceZendeskSupportUpdateAuthenticationOAuth20,
+ SourceZendeskSupportUpdateOAuth20: sourceZendeskSupportUpdateOAuth20,
}
}
- var sourceZendeskSupportUpdateAuthenticationAPIToken *shared.SourceZendeskSupportUpdateAuthenticationAPIToken
- if r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationAPIToken.APIToken.ValueString()
- credentials2 := new(shared.SourceZendeskSupportUpdateAuthenticationAPITokenCredentials)
- if !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationAPIToken.Credentials.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationAPIToken.Credentials.IsNull() {
- *credentials2 = shared.SourceZendeskSupportUpdateAuthenticationAPITokenCredentials(r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationAPIToken.Credentials.ValueString())
- } else {
- credentials2 = nil
- }
- email := r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationAPIToken.Email.ValueString()
+ var sourceZendeskSupportUpdateAPIToken *shared.SourceZendeskSupportUpdateAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
var additionalProperties1 interface{}
- if !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationAPIToken.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskSupportUpdateAuthenticationAPIToken.AdditionalProperties.ValueString()), &additionalProperties1)
+ if !r.Configuration.Credentials.APIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.APIToken.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.APIToken.AdditionalProperties.ValueString()), &additionalProperties1)
}
- sourceZendeskSupportUpdateAuthenticationAPIToken = &shared.SourceZendeskSupportUpdateAuthenticationAPIToken{
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ email := r.Configuration.Credentials.APIToken.Email.ValueString()
+ sourceZendeskSupportUpdateAPIToken = &shared.SourceZendeskSupportUpdateAPIToken{
+ AdditionalProperties: additionalProperties1,
APIToken: apiToken,
- Credentials: credentials2,
Email: email,
- AdditionalProperties: additionalProperties1,
}
}
- if sourceZendeskSupportUpdateAuthenticationAPIToken != nil {
+ if sourceZendeskSupportUpdateAPIToken != nil {
credentials = &shared.SourceZendeskSupportUpdateAuthentication{
- SourceZendeskSupportUpdateAuthenticationAPIToken: sourceZendeskSupportUpdateAuthenticationAPIToken,
+ SourceZendeskSupportUpdateAPIToken: sourceZendeskSupportUpdateAPIToken,
}
}
}
diff --git a/internal/provider/source_zendesktalk_data_source.go b/internal/provider/source_zendesktalk_data_source.go
old mode 100755
new mode 100644
index 36b2e7593..09a98e74f
--- a/internal/provider/source_zendesktalk_data_source.go
+++ b/internal/provider/source_zendesktalk_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceZendeskTalkDataSource struct {
// SourceZendeskTalkDataSourceModel describes the data model.
type SourceZendeskTalkDataSourceModel struct {
- Configuration SourceZendeskTalk1 `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,178 +47,20 @@ func (r *SourceZendeskTalkDataSource) Schema(ctx context.Context, req datasource
MarkdownDescription: "SourceZendeskTalk DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "credentials": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "source_zendesk_talk_authentication_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `The value of the API token generated. See the docs for more information.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The user email for your Zendesk account.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
- },
- "source_zendesk_talk_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The value of the API token generated. See the docs for more information.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Client Secret`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
- },
- "source_zendesk_talk_update_authentication_api_token": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `The value of the API token generated. See the docs for more information.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- "email": schema.StringAttribute{
- Computed: true,
- Description: `The user email for your Zendesk account.`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
- },
- "source_zendesk_talk_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Computed: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Computed: true,
- Description: `The value of the API token generated. See the docs for more information.`,
- },
- "auth_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Client Secret`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
- },
- },
- Validators: []validator.Object{
- validators.ExactlyOneChild(),
- },
- Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zendesk-talk",
- ),
- },
- Description: `must be one of ["zendesk-talk"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `The date from which you'd like to replicate data for Zendesk Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
- },
- "subdomain": schema.StringAttribute{
- Computed: true,
- Description: `This is your Zendesk subdomain that can be found in your account URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN is the value of your subdomain.`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_zendesktalk_data_source_sdk.go b/internal/provider/source_zendesktalk_data_source_sdk.go
old mode 100755
new mode 100644
index 5a15a3d62..b2bd401ed
--- a/internal/provider/source_zendesktalk_data_source_sdk.go
+++ b/internal/provider/source_zendesktalk_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZendeskTalkDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_zendesktalk_resource.go b/internal/provider/source_zendesktalk_resource.go
old mode 100755
new mode 100644
index 809e57faa..f40131eab
--- a/internal/provider/source_zendesktalk_resource.go
+++ b/internal/provider/source_zendesktalk_resource.go
@@ -3,18 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +36,7 @@ type SourceZendeskTalkResource struct {
// SourceZendeskTalkResourceModel describes the resource data model.
type SourceZendeskTalkResourceModel struct {
Configuration SourceZendeskTalk `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -58,115 +59,42 @@ func (r *SourceZendeskTalkResource) Schema(ctx context.Context, req resource.Sch
"credentials": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "source_zendesk_talk_authentication_api_token": schema.SingleNestedAttribute{
+ "api_token": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Required: true,
- Description: `The value of the API token generated. See the docs for more information.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
- "email": schema.StringAttribute{
- Required: true,
- Description: `The user email for your Zendesk account.`,
- },
"additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
- },
- Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
- },
- "source_zendesk_talk_authentication_o_auth2_0": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
- "access_token": schema.StringAttribute{
- Required: true,
- Description: `The value of the API token generated. See the docs for more information.`,
- },
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
- },
- Description: `must be one of ["oauth2.0"]`,
- },
- "client_id": schema.StringAttribute{
Optional: true,
- Description: `Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Optional: true,
- Description: `Client Secret`,
- },
- "additional_properties": schema.StringAttribute{
- Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
validators.IsValidJSON(),
},
- Description: `Parsed as JSON.`,
},
- },
- Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
- },
- "source_zendesk_talk_update_authentication_api_token": schema.SingleNestedAttribute{
- Optional: true,
- Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The value of the API token generated. See the docs for more information.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "api_token",
- ),
- },
- Description: `must be one of ["api_token"]`,
- },
"email": schema.StringAttribute{
Required: true,
Description: `The user email for your Zendesk account.`,
},
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
},
- "source_zendesk_talk_update_authentication_o_auth2_0": schema.SingleNestedAttribute{
+ "o_auth20": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"access_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `The value of the API token generated. See the docs for more information.`,
},
- "auth_type": schema.StringAttribute{
- Optional: true,
+ "additional_properties": schema.StringAttribute{
+ Optional: true,
+ Description: `Parsed as JSON.`,
Validators: []validator.String{
- stringvalidator.OneOf(
- "oauth2.0",
- ),
+ validators.IsValidJSON(),
},
- Description: `must be one of ["oauth2.0"]`,
},
"client_id": schema.StringAttribute{
Optional: true,
@@ -176,37 +104,21 @@ func (r *SourceZendeskTalkResource) Schema(ctx context.Context, req resource.Sch
Optional: true,
Description: `Client Secret`,
},
- "additional_properties": schema.StringAttribute{
- Optional: true,
- Validators: []validator.String{
- validators.IsValidJSON(),
- },
- Description: `Parsed as JSON.`,
- },
},
Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
},
},
+ Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
Validators: []validator.Object{
validators.ExactlyOneChild(),
},
- Description: `Zendesk service provides two authentication methods. Choose between: ` + "`" + `OAuth2.0` + "`" + ` or ` + "`" + `API token` + "`" + `.`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zendesk-talk",
- ),
- },
- Description: `must be one of ["zendesk-talk"]`,
},
"start_date": schema.StringAttribute{
- Required: true,
+ Required: true,
+ Description: `The date from which you'd like to replicate data for Zendesk Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `The date from which you'd like to replicate data for Zendesk Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.`,
},
"subdomain": schema.StringAttribute{
Required: true,
@@ -214,13 +126,24 @@ func (r *SourceZendeskTalkResource) Schema(ctx context.Context, req resource.Sch
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -284,7 +207,7 @@ func (r *SourceZendeskTalkResource) Create(ctx context.Context, req resource.Cre
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceZendeskTalk(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -460,5 +383,5 @@ func (r *SourceZendeskTalkResource) Delete(ctx context.Context, req resource.Del
}
func (r *SourceZendeskTalkResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_zendesktalk_resource_sdk.go b/internal/provider/source_zendesktalk_resource_sdk.go
old mode 100755
new mode 100644
index e7fa5696d..b07302f8c
--- a/internal/provider/source_zendesktalk_resource_sdk.go
+++ b/internal/provider/source_zendesktalk_resource_sdk.go
@@ -3,8 +3,8 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
"encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -12,80 +12,70 @@ import (
func (r *SourceZendeskTalkResourceModel) ToCreateSDKType() *shared.SourceZendeskTalkCreateRequest {
var credentials *shared.SourceZendeskTalkAuthentication
if r.Configuration.Credentials != nil {
- var sourceZendeskTalkAuthenticationAPIToken *shared.SourceZendeskTalkAuthenticationAPIToken
- if r.Configuration.Credentials.SourceZendeskTalkAuthenticationAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceZendeskTalkAuthenticationAPIToken.APIToken.ValueString()
- authType := new(shared.SourceZendeskTalkAuthenticationAPITokenAuthType)
- if !r.Configuration.Credentials.SourceZendeskTalkAuthenticationAPIToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkAuthenticationAPIToken.AuthType.IsNull() {
- *authType = shared.SourceZendeskTalkAuthenticationAPITokenAuthType(r.Configuration.Credentials.SourceZendeskTalkAuthenticationAPIToken.AuthType.ValueString())
- } else {
- authType = nil
- }
- email := r.Configuration.Credentials.SourceZendeskTalkAuthenticationAPIToken.Email.ValueString()
+ var sourceZendeskTalkAPIToken *shared.SourceZendeskTalkAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
var additionalProperties interface{}
- if !r.Configuration.Credentials.SourceZendeskTalkAuthenticationAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkAuthenticationAPIToken.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskTalkAuthenticationAPIToken.AdditionalProperties.ValueString()), &additionalProperties)
+ if !r.Configuration.Credentials.APIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.APIToken.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.APIToken.AdditionalProperties.ValueString()), &additionalProperties)
}
- sourceZendeskTalkAuthenticationAPIToken = &shared.SourceZendeskTalkAuthenticationAPIToken{
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ email := r.Configuration.Credentials.APIToken.Email.ValueString()
+ sourceZendeskTalkAPIToken = &shared.SourceZendeskTalkAPIToken{
+ AdditionalProperties: additionalProperties,
APIToken: apiToken,
- AuthType: authType,
Email: email,
- AdditionalProperties: additionalProperties,
}
}
- if sourceZendeskTalkAuthenticationAPIToken != nil {
+ if sourceZendeskTalkAPIToken != nil {
credentials = &shared.SourceZendeskTalkAuthentication{
- SourceZendeskTalkAuthenticationAPIToken: sourceZendeskTalkAuthenticationAPIToken,
+ SourceZendeskTalkAPIToken: sourceZendeskTalkAPIToken,
}
}
- var sourceZendeskTalkAuthenticationOAuth20 *shared.SourceZendeskTalkAuthenticationOAuth20
- if r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.AccessToken.ValueString()
- authType1 := new(shared.SourceZendeskTalkAuthenticationOAuth20AuthType)
- if !r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.AuthType.IsNull() {
- *authType1 = shared.SourceZendeskTalkAuthenticationOAuth20AuthType(r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.AuthType.ValueString())
- } else {
- authType1 = nil
+ var sourceZendeskTalkOAuth20 *shared.SourceZendeskTalkOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ var additionalProperties1 interface{}
+ if !r.Configuration.Credentials.OAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.OAuth20.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.OAuth20.AdditionalProperties.ValueString()), &additionalProperties1)
}
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
clientID := new(string)
- if !r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- var additionalProperties1 interface{}
- if !r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskTalkAuthenticationOAuth20.AdditionalProperties.ValueString()), &additionalProperties1)
- }
- sourceZendeskTalkAuthenticationOAuth20 = &shared.SourceZendeskTalkAuthenticationOAuth20{
+ sourceZendeskTalkOAuth20 = &shared.SourceZendeskTalkOAuth20{
+ AdditionalProperties: additionalProperties1,
AccessToken: accessToken,
- AuthType: authType1,
ClientID: clientID,
ClientSecret: clientSecret,
- AdditionalProperties: additionalProperties1,
}
}
- if sourceZendeskTalkAuthenticationOAuth20 != nil {
+ if sourceZendeskTalkOAuth20 != nil {
credentials = &shared.SourceZendeskTalkAuthentication{
- SourceZendeskTalkAuthenticationOAuth20: sourceZendeskTalkAuthenticationOAuth20,
+ SourceZendeskTalkOAuth20: sourceZendeskTalkOAuth20,
}
}
}
- sourceType := shared.SourceZendeskTalkZendeskTalk(r.Configuration.SourceType.ValueString())
startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString())
subdomain := r.Configuration.Subdomain.ValueString()
configuration := shared.SourceZendeskTalk{
Credentials: credentials,
- SourceType: sourceType,
StartDate: startDate,
Subdomain: subdomain,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -96,6 +86,7 @@ func (r *SourceZendeskTalkResourceModel) ToCreateSDKType() *shared.SourceZendesk
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceZendeskTalkCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -111,68 +102,54 @@ func (r *SourceZendeskTalkResourceModel) ToGetSDKType() *shared.SourceZendeskTal
func (r *SourceZendeskTalkResourceModel) ToUpdateSDKType() *shared.SourceZendeskTalkPutRequest {
var credentials *shared.SourceZendeskTalkUpdateAuthentication
if r.Configuration.Credentials != nil {
- var sourceZendeskTalkUpdateAuthenticationAPIToken *shared.SourceZendeskTalkUpdateAuthenticationAPIToken
- if r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationAPIToken != nil {
- apiToken := r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationAPIToken.APIToken.ValueString()
- authType := new(shared.SourceZendeskTalkUpdateAuthenticationAPITokenAuthType)
- if !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationAPIToken.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationAPIToken.AuthType.IsNull() {
- *authType = shared.SourceZendeskTalkUpdateAuthenticationAPITokenAuthType(r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationAPIToken.AuthType.ValueString())
- } else {
- authType = nil
- }
- email := r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationAPIToken.Email.ValueString()
+ var sourceZendeskTalkUpdateAPIToken *shared.SourceZendeskTalkUpdateAPIToken
+ if r.Configuration.Credentials.APIToken != nil {
var additionalProperties interface{}
- if !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationAPIToken.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationAPIToken.AdditionalProperties.ValueString()), &additionalProperties)
+ if !r.Configuration.Credentials.APIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.APIToken.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.APIToken.AdditionalProperties.ValueString()), &additionalProperties)
}
- sourceZendeskTalkUpdateAuthenticationAPIToken = &shared.SourceZendeskTalkUpdateAuthenticationAPIToken{
+ apiToken := r.Configuration.Credentials.APIToken.APIToken.ValueString()
+ email := r.Configuration.Credentials.APIToken.Email.ValueString()
+ sourceZendeskTalkUpdateAPIToken = &shared.SourceZendeskTalkUpdateAPIToken{
+ AdditionalProperties: additionalProperties,
APIToken: apiToken,
- AuthType: authType,
Email: email,
- AdditionalProperties: additionalProperties,
}
}
- if sourceZendeskTalkUpdateAuthenticationAPIToken != nil {
+ if sourceZendeskTalkUpdateAPIToken != nil {
credentials = &shared.SourceZendeskTalkUpdateAuthentication{
- SourceZendeskTalkUpdateAuthenticationAPIToken: sourceZendeskTalkUpdateAuthenticationAPIToken,
+ SourceZendeskTalkUpdateAPIToken: sourceZendeskTalkUpdateAPIToken,
}
}
- var sourceZendeskTalkUpdateAuthenticationOAuth20 *shared.SourceZendeskTalkUpdateAuthenticationOAuth20
- if r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20 != nil {
- accessToken := r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.AccessToken.ValueString()
- authType1 := new(shared.SourceZendeskTalkUpdateAuthenticationOAuth20AuthType)
- if !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.AuthType.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.AuthType.IsNull() {
- *authType1 = shared.SourceZendeskTalkUpdateAuthenticationOAuth20AuthType(r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.AuthType.ValueString())
- } else {
- authType1 = nil
+ var sourceZendeskTalkUpdateOAuth20 *shared.SourceZendeskTalkUpdateOAuth20
+ if r.Configuration.Credentials.OAuth20 != nil {
+ var additionalProperties1 interface{}
+ if !r.Configuration.Credentials.OAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.OAuth20.AdditionalProperties.IsNull() {
+ _ = json.Unmarshal([]byte(r.Configuration.Credentials.OAuth20.AdditionalProperties.ValueString()), &additionalProperties1)
}
+ accessToken := r.Configuration.Credentials.OAuth20.AccessToken.ValueString()
clientID := new(string)
- if !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.ClientID.IsNull() {
- *clientID = r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.ClientID.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientID.IsNull() {
+ *clientID = r.Configuration.Credentials.OAuth20.ClientID.ValueString()
} else {
clientID = nil
}
clientSecret := new(string)
- if !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.ClientSecret.IsNull() {
- *clientSecret = r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.ClientSecret.ValueString()
+ if !r.Configuration.Credentials.OAuth20.ClientSecret.IsUnknown() && !r.Configuration.Credentials.OAuth20.ClientSecret.IsNull() {
+ *clientSecret = r.Configuration.Credentials.OAuth20.ClientSecret.ValueString()
} else {
clientSecret = nil
}
- var additionalProperties1 interface{}
- if !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.AdditionalProperties.IsNull() {
- _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskTalkUpdateAuthenticationOAuth20.AdditionalProperties.ValueString()), &additionalProperties1)
- }
- sourceZendeskTalkUpdateAuthenticationOAuth20 = &shared.SourceZendeskTalkUpdateAuthenticationOAuth20{
+ sourceZendeskTalkUpdateOAuth20 = &shared.SourceZendeskTalkUpdateOAuth20{
+ AdditionalProperties: additionalProperties1,
AccessToken: accessToken,
- AuthType: authType1,
ClientID: clientID,
ClientSecret: clientSecret,
- AdditionalProperties: additionalProperties1,
}
}
- if sourceZendeskTalkUpdateAuthenticationOAuth20 != nil {
+ if sourceZendeskTalkUpdateOAuth20 != nil {
credentials = &shared.SourceZendeskTalkUpdateAuthentication{
- SourceZendeskTalkUpdateAuthenticationOAuth20: sourceZendeskTalkUpdateAuthenticationOAuth20,
+ SourceZendeskTalkUpdateOAuth20: sourceZendeskTalkUpdateOAuth20,
}
}
}
diff --git a/internal/provider/source_zenloop_data_source.go b/internal/provider/source_zenloop_data_source.go
old mode 100755
new mode 100644
index fc6a92b9d..3d5b1eeed
--- a/internal/provider/source_zenloop_data_source.go
+++ b/internal/provider/source_zenloop_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,11 +29,11 @@ type SourceZenloopDataSource struct {
// SourceZenloopDataSourceModel describes the data model.
type SourceZenloopDataSourceModel struct {
- Configuration SourceZenloop `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -49,46 +47,20 @@ func (r *SourceZenloopDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceZenloop DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "api_token": schema.StringAttribute{
- Computed: true,
- Description: `Zenloop API Token. You can get the API token in settings page here `,
- },
- "date_from": schema.StringAttribute{
- Computed: true,
- Description: `Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24. Leave empty if only data from current data should be synced`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zenloop",
- ),
- },
- Description: `must be one of ["zenloop"]`,
- },
- "survey_group_id": schema.StringAttribute{
- Computed: true,
- Description: `Zenloop Survey Group ID. Can be found by pulling All Survey Groups via SurveyGroups stream. Leave empty to pull answers from all survey groups`,
- },
- "survey_id": schema.StringAttribute{
- Computed: true,
- Description: `Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_zenloop_data_source_sdk.go b/internal/provider/source_zenloop_data_source_sdk.go
old mode 100755
new mode 100644
index f29c9d901..fceec6570
--- a/internal/provider/source_zenloop_data_source_sdk.go
+++ b/internal/provider/source_zenloop_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZenloopDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_zenloop_resource.go b/internal/provider/source_zenloop_resource.go
old mode 100755
new mode 100644
index 829b70359..28029a95c
--- a/internal/provider/source_zenloop_resource.go
+++ b/internal/provider/source_zenloop_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceZenloopResource struct {
// SourceZenloopResourceModel describes the resource data model.
type SourceZenloopResourceModel struct {
Configuration SourceZenloop `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,21 +56,13 @@ func (r *SourceZenloopResource) Schema(ctx context.Context, req resource.SchemaR
Attributes: map[string]schema.Attribute{
"api_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `Zenloop API Token. You can get the API token in settings page here `,
},
"date_from": schema.StringAttribute{
Optional: true,
Description: `Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24. Leave empty if only data from current data should be synced`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zenloop",
- ),
- },
- Description: `must be one of ["zenloop"]`,
- },
"survey_group_id": schema.StringAttribute{
Optional: true,
Description: `Zenloop Survey Group ID. Can be found by pulling All Survey Groups via SurveyGroups stream. Leave empty to pull answers from all survey groups`,
@@ -81,13 +73,24 @@ func (r *SourceZenloopResource) Schema(ctx context.Context, req resource.SchemaR
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -151,7 +154,7 @@ func (r *SourceZenloopResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceZenloop(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -327,5 +330,5 @@ func (r *SourceZenloopResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceZenloopResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_zenloop_resource_sdk.go b/internal/provider/source_zenloop_resource_sdk.go
old mode 100755
new mode 100644
index 141401dfb..0d62cca6c
--- a/internal/provider/source_zenloop_resource_sdk.go
+++ b/internal/provider/source_zenloop_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -15,7 +15,6 @@ func (r *SourceZenloopResourceModel) ToCreateSDKType() *shared.SourceZenloopCrea
} else {
dateFrom = nil
}
- sourceType := shared.SourceZenloopZenloop(r.Configuration.SourceType.ValueString())
surveyGroupID := new(string)
if !r.Configuration.SurveyGroupID.IsUnknown() && !r.Configuration.SurveyGroupID.IsNull() {
*surveyGroupID = r.Configuration.SurveyGroupID.ValueString()
@@ -31,10 +30,15 @@ func (r *SourceZenloopResourceModel) ToCreateSDKType() *shared.SourceZenloopCrea
configuration := shared.SourceZenloop{
APIToken: apiToken,
DateFrom: dateFrom,
- SourceType: sourceType,
SurveyGroupID: surveyGroupID,
SurveyID: surveyID,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -45,6 +49,7 @@ func (r *SourceZenloopResourceModel) ToCreateSDKType() *shared.SourceZenloopCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceZenloopCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_zohocrm_data_source.go b/internal/provider/source_zohocrm_data_source.go
old mode 100755
new mode 100644
index 45e265b48..c9cde6dbc
--- a/internal/provider/source_zohocrm_data_source.go
+++ b/internal/provider/source_zohocrm_data_source.go
@@ -3,16 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -32,11 +29,11 @@ type SourceZohoCrmDataSource struct {
// SourceZohoCrmDataSourceModel describes the data model.
type SourceZohoCrmDataSourceModel struct {
- Configuration SourceZohoCrm `tfsdk:"configuration"`
- Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
- SourceID types.String `tfsdk:"source_id"`
- WorkspaceID types.String `tfsdk:"workspace_id"`
+ Configuration types.String `tfsdk:"configuration"`
+ Name types.String `tfsdk:"name"`
+ SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
+ WorkspaceID types.String `tfsdk:"workspace_id"`
}
// Metadata returns the data source type name.
@@ -50,90 +47,20 @@ func (r *SourceZohoCrmDataSource) Schema(ctx context.Context, req datasource.Sch
MarkdownDescription: "SourceZohoCrm DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `OAuth2.0 Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `OAuth2.0 Client Secret`,
- },
- "dc_region": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "US",
- "AU",
- "EU",
- "IN",
- "CN",
- "JP",
- ),
- },
- MarkdownDescription: `must be one of ["US", "AU", "EU", "IN", "CN", "JP"]` + "\n" +
- `Please choose the region of your Data Center location. More info by this Link`,
- },
- "edition": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Free",
- "Standard",
- "Professional",
- "Enterprise",
- "Ultimate",
- ),
- },
- MarkdownDescription: `must be one of ["Free", "Standard", "Professional", "Enterprise", "Ultimate"]` + "\n" +
- `Choose your Edition of Zoho CRM to determine API Concurrency Limits`,
- },
- "environment": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Production",
- "Developer",
- "Sandbox",
- ),
- },
- MarkdownDescription: `must be one of ["Production", "Developer", "Sandbox"]` + "\n" +
- `Please choose the environment`,
- },
- "refresh_token": schema.StringAttribute{
- Computed: true,
- Description: `OAuth2.0 Refresh Token`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zoho-crm",
- ),
- },
- Description: `must be one of ["zoho-crm"]`,
- },
- "start_datetime": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- validators.IsRFC3339(),
- },
- Description: `ISO 8601, for instance: ` + "`" + `YYYY-MM-DD` + "`" + `, ` + "`" + `YYYY-MM-DD HH:MM:SS+HH:MM` + "`" + ``,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_zohocrm_data_source_sdk.go b/internal/provider/source_zohocrm_data_source_sdk.go
old mode 100755
new mode 100644
index 02159d59f..bb04e06dd
--- a/internal/provider/source_zohocrm_data_source_sdk.go
+++ b/internal/provider/source_zohocrm_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZohoCrmDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_zohocrm_resource.go b/internal/provider/source_zohocrm_resource.go
old mode 100755
new mode 100644
index 669cb5c74..fba9b5c18
--- a/internal/provider/source_zohocrm_resource.go
+++ b/internal/provider/source_zohocrm_resource.go
@@ -3,18 +3,19 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/validators"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/validators"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -36,6 +37,7 @@ type SourceZohoCrmResource struct {
// SourceZohoCrmResourceModel describes the resource data model.
type SourceZohoCrmResourceModel struct {
Configuration SourceZohoCrm `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -65,6 +67,8 @@ func (r *SourceZohoCrmResource) Schema(ctx context.Context, req resource.SchemaR
},
"dc_region": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["US", "AU", "EU", "IN", "CN", "JP"]` + "\n" +
+ `Please choose the region of your Data Center location. More info by this Link`,
Validators: []validator.String{
stringvalidator.OneOf(
"US",
@@ -75,11 +79,11 @@ func (r *SourceZohoCrmResource) Schema(ctx context.Context, req resource.SchemaR
"JP",
),
},
- MarkdownDescription: `must be one of ["US", "AU", "EU", "IN", "CN", "JP"]` + "\n" +
- `Please choose the region of your Data Center location. More info by this Link`,
},
"edition": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["Free", "Standard", "Professional", "Enterprise", "Ultimate"]; Default: "Free"` + "\n" +
+ `Choose your Edition of Zoho CRM to determine API Concurrency Limits`,
Validators: []validator.String{
stringvalidator.OneOf(
"Free",
@@ -89,11 +93,11 @@ func (r *SourceZohoCrmResource) Schema(ctx context.Context, req resource.SchemaR
"Ultimate",
),
},
- MarkdownDescription: `must be one of ["Free", "Standard", "Professional", "Enterprise", "Ultimate"]` + "\n" +
- `Choose your Edition of Zoho CRM to determine API Concurrency Limits`,
},
"environment": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["Production", "Developer", "Sandbox"]` + "\n" +
+ `Please choose the environment`,
Validators: []validator.String{
stringvalidator.OneOf(
"Production",
@@ -101,38 +105,39 @@ func (r *SourceZohoCrmResource) Schema(ctx context.Context, req resource.SchemaR
"Sandbox",
),
},
- MarkdownDescription: `must be one of ["Production", "Developer", "Sandbox"]` + "\n" +
- `Please choose the environment`,
},
"refresh_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `OAuth2.0 Refresh Token`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zoho-crm",
- ),
- },
- Description: `must be one of ["zoho-crm"]`,
- },
"start_datetime": schema.StringAttribute{
- Optional: true,
+ Optional: true,
+ Description: `ISO 8601, for instance: ` + "`" + `YYYY-MM-DD` + "`" + `, ` + "`" + `YYYY-MM-DD HH:MM:SS+HH:MM` + "`" + ``,
Validators: []validator.String{
validators.IsRFC3339(),
},
- Description: `ISO 8601, for instance: ` + "`" + `YYYY-MM-DD` + "`" + `, ` + "`" + `YYYY-MM-DD HH:MM:SS+HH:MM` + "`" + ``,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -196,7 +201,7 @@ func (r *SourceZohoCrmResource) Create(ctx context.Context, req resource.CreateR
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceZohoCrm(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -372,5 +377,5 @@ func (r *SourceZohoCrmResource) Delete(ctx context.Context, req resource.DeleteR
}
func (r *SourceZohoCrmResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_zohocrm_resource_sdk.go b/internal/provider/source_zohocrm_resource_sdk.go
old mode 100755
new mode 100644
index ba92cd7fd..185e469e7
--- a/internal/provider/source_zohocrm_resource_sdk.go
+++ b/internal/provider/source_zohocrm_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
"time"
)
@@ -12,10 +12,14 @@ func (r *SourceZohoCrmResourceModel) ToCreateSDKType() *shared.SourceZohoCrmCrea
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
dcRegion := shared.SourceZohoCrmDataCenterLocation(r.Configuration.DcRegion.ValueString())
- edition := shared.SourceZohoCRMZohoCRMEdition(r.Configuration.Edition.ValueString())
+ edition := new(shared.SourceZohoCRMZohoCRMEdition)
+ if !r.Configuration.Edition.IsUnknown() && !r.Configuration.Edition.IsNull() {
+ *edition = shared.SourceZohoCRMZohoCRMEdition(r.Configuration.Edition.ValueString())
+ } else {
+ edition = nil
+ }
environment := shared.SourceZohoCrmEnvironment(r.Configuration.Environment.ValueString())
refreshToken := r.Configuration.RefreshToken.ValueString()
- sourceType := shared.SourceZohoCrmZohoCrm(r.Configuration.SourceType.ValueString())
startDatetime := new(time.Time)
if !r.Configuration.StartDatetime.IsUnknown() && !r.Configuration.StartDatetime.IsNull() {
*startDatetime, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDatetime.ValueString())
@@ -29,9 +33,14 @@ func (r *SourceZohoCrmResourceModel) ToCreateSDKType() *shared.SourceZohoCrmCrea
Edition: edition,
Environment: environment,
RefreshToken: refreshToken,
- SourceType: sourceType,
StartDatetime: startDatetime,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -42,6 +51,7 @@ func (r *SourceZohoCrmResourceModel) ToCreateSDKType() *shared.SourceZohoCrmCrea
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceZohoCrmCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -57,8 +67,13 @@ func (r *SourceZohoCrmResourceModel) ToGetSDKType() *shared.SourceZohoCrmCreateR
func (r *SourceZohoCrmResourceModel) ToUpdateSDKType() *shared.SourceZohoCrmPutRequest {
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
- dcRegion := shared.SourceZohoCrmUpdateDataCenterLocation(r.Configuration.DcRegion.ValueString())
- edition := shared.SourceZohoCRMUpdateZohoCRMEdition(r.Configuration.Edition.ValueString())
+ dcRegion := shared.DataCenterLocation(r.Configuration.DcRegion.ValueString())
+ edition := new(shared.ZohoCRMEdition)
+ if !r.Configuration.Edition.IsUnknown() && !r.Configuration.Edition.IsNull() {
+ *edition = shared.ZohoCRMEdition(r.Configuration.Edition.ValueString())
+ } else {
+ edition = nil
+ }
environment := shared.SourceZohoCrmUpdateEnvironment(r.Configuration.Environment.ValueString())
refreshToken := r.Configuration.RefreshToken.ValueString()
startDatetime := new(time.Time)
diff --git a/internal/provider/source_zoom_data_source.go b/internal/provider/source_zoom_data_source.go
old mode 100755
new mode 100644
index 8344b1ee4..d84849e61
--- a/internal/provider/source_zoom_data_source.go
+++ b/internal/provider/source_zoom_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceZoomDataSource struct {
// SourceZoomDataSourceModel describes the data model.
type SourceZoomDataSourceModel struct {
- Configuration SourceZoom `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,34 +47,20 @@ func (r *SourceZoomDataSource) Schema(ctx context.Context, req datasource.Schema
MarkdownDescription: "SourceZoom DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "jwt_token": schema.StringAttribute{
- Computed: true,
- Description: `JWT Token`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zoom",
- ),
- },
- Description: `must be one of ["zoom"]`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_zoom_data_source_sdk.go b/internal/provider/source_zoom_data_source_sdk.go
old mode 100755
new mode 100644
index 31b18551e..0e8058a37
--- a/internal/provider/source_zoom_data_source_sdk.go
+++ b/internal/provider/source_zoom_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZoomDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_zoom_resource.go b/internal/provider/source_zoom_resource.go
old mode 100755
new mode 100644
index 2eeaeb39f..a9fae2cea
--- a/internal/provider/source_zoom_resource.go
+++ b/internal/provider/source_zoom_resource.go
@@ -3,18 +3,17 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -35,6 +34,7 @@ type SourceZoomResource struct {
// SourceZoomResourceModel describes the resource data model.
type SourceZoomResourceModel struct {
Configuration SourceZoom `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -56,26 +56,29 @@ func (r *SourceZoomResource) Schema(ctx context.Context, req resource.SchemaRequ
Attributes: map[string]schema.Attribute{
"jwt_token": schema.StringAttribute{
Required: true,
+ Sensitive: true,
Description: `JWT Token`,
},
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zoom",
- ),
- },
- Description: `must be one of ["zoom"]`,
- },
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -139,7 +142,7 @@ func (r *SourceZoomResource) Create(ctx context.Context, req resource.CreateRequ
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceZoom(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -315,5 +318,5 @@ func (r *SourceZoomResource) Delete(ctx context.Context, req resource.DeleteRequ
}
func (r *SourceZoomResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_zoom_resource_sdk.go b/internal/provider/source_zoom_resource_sdk.go
old mode 100755
new mode 100644
index 52d0826dd..67c73f14f
--- a/internal/provider/source_zoom_resource_sdk.go
+++ b/internal/provider/source_zoom_resource_sdk.go
@@ -3,16 +3,20 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZoomResourceModel) ToCreateSDKType() *shared.SourceZoomCreateRequest {
jwtToken := r.Configuration.JwtToken.ValueString()
- sourceType := shared.SourceZoomZoom(r.Configuration.SourceType.ValueString())
configuration := shared.SourceZoom{
- JwtToken: jwtToken,
- SourceType: sourceType,
+ JwtToken: jwtToken,
+ }
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
}
name := r.Name.ValueString()
secretID := new(string)
@@ -24,6 +28,7 @@ func (r *SourceZoomResourceModel) ToCreateSDKType() *shared.SourceZoomCreateRequ
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceZoomCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
diff --git a/internal/provider/source_zuora_data_source.go b/internal/provider/source_zuora_data_source.go
old mode 100755
new mode 100644
index b2d5cf6d7..d97f53565
--- a/internal/provider/source_zuora_data_source.go
+++ b/internal/provider/source_zuora_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -31,10 +29,10 @@ type SourceZuoraDataSource struct {
// SourceZuoraDataSourceModel describes the data model.
type SourceZuoraDataSourceModel struct {
- Configuration SourceZuora `tfsdk:"configuration"`
+ Configuration types.String `tfsdk:"configuration"`
Name types.String `tfsdk:"name"`
- SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
+ SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
@@ -49,75 +47,20 @@ func (r *SourceZuoraDataSource) Schema(ctx context.Context, req datasource.Schem
MarkdownDescription: "SourceZuora DataSource",
Attributes: map[string]schema.Attribute{
- "configuration": schema.SingleNestedAttribute{
+ "configuration": schema.StringAttribute{
Computed: true,
- Attributes: map[string]schema.Attribute{
- "client_id": schema.StringAttribute{
- Computed: true,
- Description: `Your OAuth user Client ID`,
- },
- "client_secret": schema.StringAttribute{
- Computed: true,
- Description: `Your OAuth user Client Secret`,
- },
- "data_query": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "Live",
- "Unlimited",
- ),
- },
- MarkdownDescription: `must be one of ["Live", "Unlimited"]` + "\n" +
- `Choose between ` + "`" + `Live` + "`" + `, or ` + "`" + `Unlimited` + "`" + ` - the optimized, replicated database at 12 hours freshness for high volume extraction Link`,
- },
- "source_type": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zuora",
- ),
- },
- Description: `must be one of ["zuora"]`,
- },
- "start_date": schema.StringAttribute{
- Computed: true,
- Description: `Start Date in format: YYYY-MM-DD`,
- },
- "tenant_endpoint": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "US Production",
- "US Cloud Production",
- "US API Sandbox",
- "US Cloud API Sandbox",
- "US Central Sandbox",
- "US Performance Test",
- "EU Production",
- "EU API Sandbox",
- "EU Central Sandbox",
- ),
- },
- MarkdownDescription: `must be one of ["US Production", "US Cloud Production", "US API Sandbox", "US Cloud API Sandbox", "US Central Sandbox", "US Performance Test", "EU Production", "EU API Sandbox", "EU Central Sandbox"]` + "\n" +
- `Please choose the right endpoint where your Tenant is located. More info by this Link`,
- },
- "window_in_days": schema.StringAttribute{
- Computed: true,
- Description: `The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year).`,
- },
- },
+ MarkdownDescription: `Parsed as JSON.` + "\n" +
+ `The values required to configure the source.`,
},
"name": schema.StringAttribute{
Computed: true,
},
- "secret_id": schema.StringAttribute{
- Optional: true,
- Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
- },
"source_id": schema.StringAttribute{
Required: true,
},
+ "source_type": schema.StringAttribute{
+ Computed: true,
+ },
"workspace_id": schema.StringAttribute{
Computed: true,
},
diff --git a/internal/provider/source_zuora_data_source_sdk.go b/internal/provider/source_zuora_data_source_sdk.go
old mode 100755
new mode 100644
index 442e83e3c..8a4af82f9
--- a/internal/provider/source_zuora_data_source_sdk.go
+++ b/internal/provider/source_zuora_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZuoraDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) {
+ configurationResult, _ := json.Marshal(resp.Configuration)
+ r.Configuration = types.StringValue(string(configurationResult))
r.Name = types.StringValue(resp.Name)
r.SourceID = types.StringValue(resp.SourceID)
+ r.SourceType = types.StringValue(resp.SourceType)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/source_zuora_resource.go b/internal/provider/source_zuora_resource.go
old mode 100755
new mode 100644
index a7ed43b6b..0d0f99439
--- a/internal/provider/source_zuora_resource.go
+++ b/internal/provider/source_zuora_resource.go
@@ -3,17 +3,18 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
@@ -35,6 +36,7 @@ type SourceZuoraResource struct {
// SourceZuoraResourceModel describes the resource data model.
type SourceZuoraResourceModel struct {
Configuration SourceZuora `tfsdk:"configuration"`
+ DefinitionID types.String `tfsdk:"definition_id"`
Name types.String `tfsdk:"name"`
SecretID types.String `tfsdk:"secret_id"`
SourceID types.String `tfsdk:"source_id"`
@@ -63,24 +65,15 @@ func (r *SourceZuoraResource) Schema(ctx context.Context, req resource.SchemaReq
Description: `Your OAuth user Client Secret`,
},
"data_query": schema.StringAttribute{
- Required: true,
+ Optional: true,
+ MarkdownDescription: `must be one of ["Live", "Unlimited"]; Default: "Live"` + "\n" +
+ `Choose between ` + "`" + `Live` + "`" + `, or ` + "`" + `Unlimited` + "`" + ` - the optimized, replicated database at 12 hours freshness for high volume extraction Link`,
Validators: []validator.String{
stringvalidator.OneOf(
"Live",
"Unlimited",
),
},
- MarkdownDescription: `must be one of ["Live", "Unlimited"]` + "\n" +
- `Choose between ` + "`" + `Live` + "`" + `, or ` + "`" + `Unlimited` + "`" + ` - the optimized, replicated database at 12 hours freshness for high volume extraction Link`,
- },
- "source_type": schema.StringAttribute{
- Required: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "zuora",
- ),
- },
- Description: `must be one of ["zuora"]`,
},
"start_date": schema.StringAttribute{
Required: true,
@@ -88,6 +81,8 @@ func (r *SourceZuoraResource) Schema(ctx context.Context, req resource.SchemaReq
},
"tenant_endpoint": schema.StringAttribute{
Required: true,
+ MarkdownDescription: `must be one of ["US Production", "US Cloud Production", "US API Sandbox", "US Cloud API Sandbox", "US Central Sandbox", "US Performance Test", "EU Production", "EU API Sandbox", "EU Central Sandbox"]` + "\n" +
+ `Please choose the right endpoint where your Tenant is located. More info by this Link`,
Validators: []validator.String{
stringvalidator.OneOf(
"US Production",
@@ -101,22 +96,32 @@ func (r *SourceZuoraResource) Schema(ctx context.Context, req resource.SchemaReq
"EU Central Sandbox",
),
},
- MarkdownDescription: `must be one of ["US Production", "US Cloud Production", "US API Sandbox", "US Cloud API Sandbox", "US Central Sandbox", "US Performance Test", "EU Production", "EU API Sandbox", "EU Central Sandbox"]` + "\n" +
- `Please choose the right endpoint where your Tenant is located. More info by this Link`,
},
"window_in_days": schema.StringAttribute{
- Optional: true,
- Description: `The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year).`,
+ Optional: true,
+ MarkdownDescription: `Default: "90"` + "\n" +
+ `The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year).`,
},
},
},
+ "definition_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
+ Optional: true,
+ Description: `The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.`,
+ },
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
- Required: true,
+ Required: true,
+ Description: `Name of the source e.g. dev-mysql-instance.`,
},
"secret_id": schema.StringAttribute{
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ },
Optional: true,
Description: `Optional secretID obtained through the public API OAuth redirect flow.`,
},
@@ -180,7 +185,7 @@ func (r *SourceZuoraResource) Create(ctx context.Context, req resource.CreateReq
return
}
- request := *data.ToCreateSDKType()
+ request := data.ToCreateSDKType()
res, err := r.client.Sources.CreateSourceZuora(ctx, request)
if err != nil {
resp.Diagnostics.AddError("failure to invoke API", err.Error())
@@ -356,5 +361,5 @@ func (r *SourceZuoraResource) Delete(ctx context.Context, req resource.DeleteReq
}
func (r *SourceZuoraResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("source_id"), req.ID)...)
}
diff --git a/internal/provider/source_zuora_resource_sdk.go b/internal/provider/source_zuora_resource_sdk.go
old mode 100755
new mode 100644
index c2491b13d..085009329
--- a/internal/provider/source_zuora_resource_sdk.go
+++ b/internal/provider/source_zuora_resource_sdk.go
@@ -3,15 +3,19 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *SourceZuoraResourceModel) ToCreateSDKType() *shared.SourceZuoraCreateRequest {
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
- dataQuery := shared.SourceZuoraDataQueryType(r.Configuration.DataQuery.ValueString())
- sourceType := shared.SourceZuoraZuora(r.Configuration.SourceType.ValueString())
+ dataQuery := new(shared.SourceZuoraDataQueryType)
+ if !r.Configuration.DataQuery.IsUnknown() && !r.Configuration.DataQuery.IsNull() {
+ *dataQuery = shared.SourceZuoraDataQueryType(r.Configuration.DataQuery.ValueString())
+ } else {
+ dataQuery = nil
+ }
startDate := r.Configuration.StartDate.ValueString()
tenantEndpoint := shared.SourceZuoraTenantEndpointLocation(r.Configuration.TenantEndpoint.ValueString())
windowInDays := new(string)
@@ -24,11 +28,16 @@ func (r *SourceZuoraResourceModel) ToCreateSDKType() *shared.SourceZuoraCreateRe
ClientID: clientID,
ClientSecret: clientSecret,
DataQuery: dataQuery,
- SourceType: sourceType,
StartDate: startDate,
TenantEndpoint: tenantEndpoint,
WindowInDays: windowInDays,
}
+ definitionID := new(string)
+ if !r.DefinitionID.IsUnknown() && !r.DefinitionID.IsNull() {
+ *definitionID = r.DefinitionID.ValueString()
+ } else {
+ definitionID = nil
+ }
name := r.Name.ValueString()
secretID := new(string)
if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() {
@@ -39,6 +48,7 @@ func (r *SourceZuoraResourceModel) ToCreateSDKType() *shared.SourceZuoraCreateRe
workspaceID := r.WorkspaceID.ValueString()
out := shared.SourceZuoraCreateRequest{
Configuration: configuration,
+ DefinitionID: definitionID,
Name: name,
SecretID: secretID,
WorkspaceID: workspaceID,
@@ -54,9 +64,14 @@ func (r *SourceZuoraResourceModel) ToGetSDKType() *shared.SourceZuoraCreateReque
func (r *SourceZuoraResourceModel) ToUpdateSDKType() *shared.SourceZuoraPutRequest {
clientID := r.Configuration.ClientID.ValueString()
clientSecret := r.Configuration.ClientSecret.ValueString()
- dataQuery := shared.SourceZuoraUpdateDataQueryType(r.Configuration.DataQuery.ValueString())
+ dataQuery := new(shared.DataQueryType)
+ if !r.Configuration.DataQuery.IsUnknown() && !r.Configuration.DataQuery.IsNull() {
+ *dataQuery = shared.DataQueryType(r.Configuration.DataQuery.ValueString())
+ } else {
+ dataQuery = nil
+ }
startDate := r.Configuration.StartDate.ValueString()
- tenantEndpoint := shared.SourceZuoraUpdateTenantEndpointLocation(r.Configuration.TenantEndpoint.ValueString())
+ tenantEndpoint := shared.TenantEndpointLocation(r.Configuration.TenantEndpoint.ValueString())
windowInDays := new(string)
if !r.Configuration.WindowInDays.IsUnknown() && !r.Configuration.WindowInDays.IsNull() {
*windowInDays = r.Configuration.WindowInDays.ValueString()
diff --git a/internal/provider/type_source_linkedin_ads_ad_analytics_report_configuration.go b/internal/provider/type_ad_analytics_report_configuration.go
old mode 100755
new mode 100644
similarity index 83%
rename from internal/provider/type_source_linkedin_ads_ad_analytics_report_configuration.go
rename to internal/provider/type_ad_analytics_report_configuration.go
index 0c3910108..b3eca9fd4
--- a/internal/provider/type_source_linkedin_ads_ad_analytics_report_configuration.go
+++ b/internal/provider/type_ad_analytics_report_configuration.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceLinkedinAdsAdAnalyticsReportConfiguration struct {
+type AdAnalyticsReportConfiguration struct {
Name types.String `tfsdk:"name"`
PivotBy types.String `tfsdk:"pivot_by"`
TimeGranularity types.String `tfsdk:"time_granularity"`
diff --git a/internal/provider/type_destination_firebolt_loading_method_sql_inserts.go b/internal/provider/type_aescbc_envelope_encryption.go
old mode 100755
new mode 100644
similarity index 61%
rename from internal/provider/type_destination_firebolt_loading_method_sql_inserts.go
rename to internal/provider/type_aescbc_envelope_encryption.go
index 71b297a92..f2182c364
--- a/internal/provider/type_destination_firebolt_loading_method_sql_inserts.go
+++ b/internal/provider/type_aescbc_envelope_encryption.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationFireboltLoadingMethodSQLInserts struct {
- Method types.String `tfsdk:"method"`
+type AESCBCEnvelopeEncryption struct {
+ KeyEncryptingKey types.String `tfsdk:"key_encrypting_key"`
}
diff --git a/internal/provider/type_destination_databricks_data_source_amazon_s3.go b/internal/provider/type_amazon_s3.go
old mode 100755
new mode 100644
similarity index 81%
rename from internal/provider/type_destination_databricks_data_source_amazon_s3.go
rename to internal/provider/type_amazon_s3.go
index 6e806e554..87706e140
--- a/internal/provider/type_destination_databricks_data_source_amazon_s3.go
+++ b/internal/provider/type_amazon_s3.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationDatabricksDataSourceAmazonS3 struct {
- DataSourceType types.String `tfsdk:"data_source_type"`
+type AmazonS3 struct {
FileNamePattern types.String `tfsdk:"file_name_pattern"`
S3AccessKeyID types.String `tfsdk:"s3_access_key_id"`
S3BucketName types.String `tfsdk:"s3_bucket_name"`
diff --git a/internal/provider/type_and_group.go b/internal/provider/type_and_group.go
new file mode 100644
index 000000000..d0477fbc5
--- /dev/null
+++ b/internal/provider/type_and_group.go
@@ -0,0 +1,7 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type AndGroup struct {
+ Expressions []Expression `tfsdk:"expressions"`
+}
diff --git a/internal/provider/type_source_xkcd.go b/internal/provider/type_api_key.go
old mode 100755
new mode 100644
similarity index 68%
rename from internal/provider/type_source_xkcd.go
rename to internal/provider/type_api_key.go
index 04d3e6e4c..5776dccbc
--- a/internal/provider/type_source_xkcd.go
+++ b/internal/provider/type_api_key.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceXkcd struct {
- SourceType types.String `tfsdk:"source_type"`
+type APIKey struct {
+ Apikey types.String `tfsdk:"apikey"`
}
diff --git a/internal/provider/type_source_mysql_ssl_modes_required.go b/internal/provider/type_api_key_auth.go
old mode 100755
new mode 100644
similarity index 67%
rename from internal/provider/type_source_mysql_ssl_modes_required.go
rename to internal/provider/type_api_key_auth.go
index 65e4acb1f..f5cb5054b
--- a/internal/provider/type_source_mysql_ssl_modes_required.go
+++ b/internal/provider/type_api_key_auth.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMysqlSSLModesRequired struct {
- Mode types.String `tfsdk:"mode"`
+type APIKeyAuth struct {
+ APIKey types.String `tfsdk:"api_key"`
}
diff --git a/internal/provider/type_destination_elasticsearch_authentication_method_api_key_secret.go b/internal/provider/type_api_key_secret.go
old mode 100755
new mode 100644
similarity index 69%
rename from internal/provider/type_destination_elasticsearch_authentication_method_api_key_secret.go
rename to internal/provider/type_api_key_secret.go
index 1da140eb7..3d770e328
--- a/internal/provider/type_destination_elasticsearch_authentication_method_api_key_secret.go
+++ b/internal/provider/type_api_key_secret.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationElasticsearchAuthenticationMethodAPIKeySecret struct {
+type APIKeySecret struct {
APIKeyID types.String `tfsdk:"api_key_id"`
APIKeySecret types.String `tfsdk:"api_key_secret"`
- Method types.String `tfsdk:"method"`
}
diff --git a/internal/provider/type_source_mysql_ssl_modes_preferred.go b/internal/provider/type_api_password.go
old mode 100755
new mode 100644
similarity index 67%
rename from internal/provider/type_source_mysql_ssl_modes_preferred.go
rename to internal/provider/type_api_password.go
index b0e2e9b29..8fd111720
--- a/internal/provider/type_source_mysql_ssl_modes_preferred.go
+++ b/internal/provider/type_api_password.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMysqlSSLModesPreferred struct {
- Mode types.String `tfsdk:"mode"`
+type APIPassword struct {
+ APIPassword types.String `tfsdk:"api_password"`
}
diff --git a/internal/provider/type_source_onesignal_applications.go b/internal/provider/type_applications.go
old mode 100755
new mode 100644
similarity index 87%
rename from internal/provider/type_source_onesignal_applications.go
rename to internal/provider/type_applications.go
index 3bba34b50..a9ed06199
--- a/internal/provider/type_source_onesignal_applications.go
+++ b/internal/provider/type_applications.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceOnesignalApplications struct {
+type Applications struct {
AppAPIKey types.String `tfsdk:"app_api_key"`
AppID types.String `tfsdk:"app_id"`
AppName types.String `tfsdk:"app_name"`
diff --git a/internal/provider/type_source_snowflake_authorization_method_o_auth20.go b/internal/provider/type_authenticate_via_google_oauth.go
old mode 100755
new mode 100644
similarity index 77%
rename from internal/provider/type_source_snowflake_authorization_method_o_auth20.go
rename to internal/provider/type_authenticate_via_google_oauth.go
index 682eaa68f..0cc23b9b9
--- a/internal/provider/type_source_snowflake_authorization_method_o_auth20.go
+++ b/internal/provider/type_authenticate_via_google_oauth.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceSnowflakeAuthorizationMethodOAuth20 struct {
+type AuthenticateViaGoogleOauth struct {
AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
RefreshToken types.String `tfsdk:"refresh_token"`
diff --git a/internal/provider/type_source_youtube_analytics_authenticate_via_o_auth20.go b/internal/provider/type_authenticate_via_harvest_o_auth.go
old mode 100755
new mode 100644
similarity index 87%
rename from internal/provider/type_source_youtube_analytics_authenticate_via_o_auth20.go
rename to internal/provider/type_authenticate_via_harvest_o_auth.go
index 55e94e069..b3247c882
--- a/internal/provider/type_source_youtube_analytics_authenticate_via_o_auth20.go
+++ b/internal/provider/type_authenticate_via_harvest_o_auth.go
@@ -4,9 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceYoutubeAnalyticsAuthenticateViaOAuth20 struct {
+type AuthenticateViaHarvestOAuth struct {
+ AdditionalProperties types.String `tfsdk:"additional_properties"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
}
diff --git a/internal/provider/type_source_okta_authorization_method_o_auth20.go b/internal/provider/type_authenticate_via_lever_o_auth.go
old mode 100755
new mode 100644
similarity index 75%
rename from internal/provider/type_source_okta_authorization_method_o_auth20.go
rename to internal/provider/type_authenticate_via_lever_o_auth.go
index beabcd6e3..79fb8db23
--- a/internal/provider/type_source_okta_authorization_method_o_auth20.go
+++ b/internal/provider/type_authenticate_via_lever_o_auth.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceOktaAuthorizationMethodOAuth20 struct {
- AuthType types.String `tfsdk:"auth_type"`
+type AuthenticateViaLeverOAuth struct {
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
RefreshToken types.String `tfsdk:"refresh_token"`
diff --git a/internal/provider/type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft.go b/internal/provider/type_authenticate_via_microsoft.go
old mode 100755
new mode 100644
similarity index 70%
rename from internal/provider/type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft.go
rename to internal/provider/type_authenticate_via_microsoft.go
index c928da41a..1e5b673b5
--- a/internal/provider/type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft.go
+++ b/internal/provider/type_authenticate_via_microsoft.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft struct {
- AuthType types.String `tfsdk:"auth_type"`
+type AuthenticateViaMicrosoft struct {
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
TenantID types.String `tfsdk:"tenant_id"`
diff --git a/internal/provider/type_source_linkedin_ads_authentication_o_auth20.go b/internal/provider/type_authenticate_via_microsoft_o_auth20.go
old mode 100755
new mode 100644
similarity index 75%
rename from internal/provider/type_source_linkedin_ads_authentication_o_auth20.go
rename to internal/provider/type_authenticate_via_microsoft_o_auth20.go
index a206be21c..7481d11ad
--- a/internal/provider/type_source_linkedin_ads_authentication_o_auth20.go
+++ b/internal/provider/type_authenticate_via_microsoft_o_auth20.go
@@ -4,9 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceLinkedinAdsAuthenticationOAuth20 struct {
- AuthMethod types.String `tfsdk:"auth_method"`
+type AuthenticateViaMicrosoftOAuth20 struct {
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
RefreshToken types.String `tfsdk:"refresh_token"`
+ TenantID types.String `tfsdk:"tenant_id"`
}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_allow1.go b/internal/provider/type_authenticate_with_api_token.go
old mode 100755
new mode 100644
similarity index 70%
rename from internal/provider/type_source_alloydb_ssl_modes_allow1.go
rename to internal/provider/type_authenticate_with_api_token.go
index 8517e36e6..5a7011e4a
--- a/internal/provider/type_source_alloydb_ssl_modes_allow1.go
+++ b/internal/provider/type_authenticate_with_api_token.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceAlloydbSSLModesAllow1 struct {
- Mode types.String `tfsdk:"mode"`
+type AuthenticateWithAPIToken struct {
AdditionalProperties types.String `tfsdk:"additional_properties"`
+ APIKey types.String `tfsdk:"api_key"`
}
diff --git a/internal/provider/type_source_github_authentication_personal_access_token.go b/internal/provider/type_authenticate_with_personal_access_token.go
old mode 100755
new mode 100644
similarity index 65%
rename from internal/provider/type_source_github_authentication_personal_access_token.go
rename to internal/provider/type_authenticate_with_personal_access_token.go
index 747282179..90fc515a3
--- a/internal/provider/type_source_github_authentication_personal_access_token.go
+++ b/internal/provider/type_authenticate_with_personal_access_token.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceGithubAuthenticationPersonalAccessToken struct {
- OptionTitle types.String `tfsdk:"option_title"`
+type AuthenticateWithPersonalAccessToken struct {
PersonalAccessToken types.String `tfsdk:"personal_access_token"`
}
diff --git a/internal/provider/type_avro_apache_avro.go b/internal/provider/type_avro_apache_avro.go
new file mode 100644
index 000000000..1f060e5d7
--- /dev/null
+++ b/internal/provider/type_avro_apache_avro.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type AvroApacheAvro struct {
+ CompressionCodec DestinationGcsCompressionCodec `tfsdk:"compression_codec"`
+ FormatType types.String `tfsdk:"format_type"`
+}
diff --git a/internal/provider/type_source_s3_file_format_avro.go b/internal/provider/type_avro_format.go
old mode 100755
new mode 100644
similarity index 66%
rename from internal/provider/type_source_s3_file_format_avro.go
rename to internal/provider/type_avro_format.go
index d33b14a94..cc1487ff6
--- a/internal/provider/type_source_s3_file_format_avro.go
+++ b/internal/provider/type_avro_format.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceS3FileFormatAvro struct {
- Filetype types.String `tfsdk:"filetype"`
+type AvroFormat struct {
+ DoubleAsString types.Bool `tfsdk:"double_as_string"`
}
diff --git a/internal/provider/type_source_file_secure_storage_provider_az_blob_azure_blob_storage.go b/internal/provider/type_az_blob_azure_blob_storage.go
old mode 100755
new mode 100644
similarity index 73%
rename from internal/provider/type_source_file_secure_storage_provider_az_blob_azure_blob_storage.go
rename to internal/provider/type_az_blob_azure_blob_storage.go
index 63ab2b277..2d8aeb7f1
--- a/internal/provider/type_source_file_secure_storage_provider_az_blob_azure_blob_storage.go
+++ b/internal/provider/type_az_blob_azure_blob_storage.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceFileSecureStorageProviderAzBlobAzureBlobStorage struct {
+type AzBlobAzureBlobStorage struct {
SasToken types.String `tfsdk:"sas_token"`
SharedKey types.String `tfsdk:"shared_key"`
- Storage types.String `tfsdk:"storage"`
StorageAccount types.String `tfsdk:"storage_account"`
}
diff --git a/internal/provider/type_azure_open_ai.go b/internal/provider/type_azure_open_ai.go
new file mode 100644
index 000000000..34f2afbbb
--- /dev/null
+++ b/internal/provider/type_azure_open_ai.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type AzureOpenAI struct {
+ APIBase types.String `tfsdk:"api_base"`
+ Deployment types.String `tfsdk:"deployment"`
+ OpenaiKey types.String `tfsdk:"openai_key"`
+}
diff --git a/internal/provider/type_by_markdown_header.go b/internal/provider/type_by_markdown_header.go
new file mode 100644
index 000000000..b80d5707a
--- /dev/null
+++ b/internal/provider/type_by_markdown_header.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type ByMarkdownHeader struct {
+ SplitLevel types.Int64 `tfsdk:"split_level"`
+}
diff --git a/internal/provider/type_by_programming_language.go b/internal/provider/type_by_programming_language.go
new file mode 100644
index 000000000..057644d3d
--- /dev/null
+++ b/internal/provider/type_by_programming_language.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type ByProgrammingLanguage struct {
+ Language types.String `tfsdk:"language"`
+}
diff --git a/internal/provider/type_destination_clickhouse_ssh_tunnel_method_no_tunnel.go b/internal/provider/type_by_separator.go
old mode 100755
new mode 100644
similarity index 54%
rename from internal/provider/type_destination_clickhouse_ssh_tunnel_method_no_tunnel.go
rename to internal/provider/type_by_separator.go
index 5de4913ba..ea77972d4
--- a/internal/provider/type_destination_clickhouse_ssh_tunnel_method_no_tunnel.go
+++ b/internal/provider/type_by_separator.go
@@ -4,6 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationClickhouseSSHTunnelMethodNoTunnel struct {
- TunnelMethod types.String `tfsdk:"tunnel_method"`
+type BySeparator struct {
+ KeepSeparator types.Bool `tfsdk:"keep_separator"`
+ Separators []types.String `tfsdk:"separators"`
}
diff --git a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2.go b/internal/provider/type_bzip2.go
old mode 100755
new mode 100644
similarity index 71%
rename from internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2.go
rename to internal/provider/type_bzip2.go
index dde2c994b..863966a9a
--- a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2.go
+++ b/internal/provider/type_bzip2.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 struct {
+type Bzip2 struct {
Codec types.String `tfsdk:"codec"`
}
diff --git a/internal/provider/type_central_api_router.go b/internal/provider/type_central_api_router.go
new file mode 100644
index 000000000..1f5b0eaf2
--- /dev/null
+++ b/internal/provider/type_central_api_router.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type CentralAPIRouter struct {
+ SiteID types.String `tfsdk:"site_id"`
+ UserName types.String `tfsdk:"user_name"`
+ UserSecret types.String `tfsdk:"user_secret"`
+}
diff --git a/internal/provider/type_destination_langchain_indexing_chroma_local_persistance.go b/internal/provider/type_chroma_local_persistance.go
old mode 100755
new mode 100644
similarity index 71%
rename from internal/provider/type_destination_langchain_indexing_chroma_local_persistance.go
rename to internal/provider/type_chroma_local_persistance.go
index 8b628b9a2..14892c78d
--- a/internal/provider/type_destination_langchain_indexing_chroma_local_persistance.go
+++ b/internal/provider/type_chroma_local_persistance.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationLangchainIndexingChromaLocalPersistance struct {
+type ChromaLocalPersistance struct {
CollectionName types.String `tfsdk:"collection_name"`
DestinationPath types.String `tfsdk:"destination_path"`
- Mode types.String `tfsdk:"mode"`
}
diff --git a/internal/provider/type_destination_milvus_embedding_cohere.go b/internal/provider/type_cohere.go
old mode 100755
new mode 100644
similarity index 70%
rename from internal/provider/type_destination_milvus_embedding_cohere.go
rename to internal/provider/type_cohere.go
index 5c0b2141d..f0b5b90f1
--- a/internal/provider/type_destination_milvus_embedding_cohere.go
+++ b/internal/provider/type_cohere.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationMilvusEmbeddingCohere struct {
+type Cohere struct {
CohereKey types.String `tfsdk:"cohere_key"`
- Mode types.String `tfsdk:"mode"`
}
diff --git a/internal/provider/type_connection_schedule.go b/internal/provider/type_connection_schedule.go
old mode 100755
new mode 100644
diff --git a/internal/provider/type_connection_schedule_response.go b/internal/provider/type_connection_schedule_response.go
new file mode 100644
index 000000000..7d7765434
--- /dev/null
+++ b/internal/provider/type_connection_schedule_response.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type ConnectionScheduleResponse struct {
+ BasicTiming types.String `tfsdk:"basic_timing"`
+ CronExpression types.String `tfsdk:"cron_expression"`
+ ScheduleType types.String `tfsdk:"schedule_type"`
+}
diff --git a/internal/provider/type_source_s3_file_format_csv.go b/internal/provider/type_csv.go
old mode 100755
new mode 100644
similarity index 89%
rename from internal/provider/type_source_s3_file_format_csv.go
rename to internal/provider/type_csv.go
index 86b77a95f..0ae542209
--- a/internal/provider/type_source_s3_file_format_csv.go
+++ b/internal/provider/type_csv.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceS3FileFormatCSV struct {
+type Csv struct {
AdditionalReaderOptions types.String `tfsdk:"additional_reader_options"`
AdvancedOptions types.String `tfsdk:"advanced_options"`
BlockSize types.Int64 `tfsdk:"block_size"`
@@ -12,7 +12,6 @@ type SourceS3FileFormatCSV struct {
DoubleQuote types.Bool `tfsdk:"double_quote"`
Encoding types.String `tfsdk:"encoding"`
EscapeChar types.String `tfsdk:"escape_char"`
- Filetype types.String `tfsdk:"filetype"`
InferDatatypes types.Bool `tfsdk:"infer_datatypes"`
NewlinesInValues types.Bool `tfsdk:"newlines_in_values"`
QuoteChar types.String `tfsdk:"quote_char"`
diff --git a/internal/provider/type_destination_postgres_ssl_modes_allow.go b/internal/provider/type_csv_comma_separated_values.go
old mode 100755
new mode 100644
similarity index 65%
rename from internal/provider/type_destination_postgres_ssl_modes_allow.go
rename to internal/provider/type_csv_comma_separated_values.go
index 038dc1c75..883c8897c
--- a/internal/provider/type_destination_postgres_ssl_modes_allow.go
+++ b/internal/provider/type_csv_comma_separated_values.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationPostgresSSLModesAllow struct {
- Mode types.String `tfsdk:"mode"`
+type CSVCommaSeparatedValues struct {
+ Flattening types.String `tfsdk:"flattening"`
}
diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format.go b/internal/provider/type_csv_format.go
old mode 100755
new mode 100644
similarity index 52%
rename from internal/provider/type_source_s3_file_based_stream_config_format_csv_format.go
rename to internal/provider/type_csv_format.go
index 3eb886dc5..dd10d00ea
--- a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format.go
+++ b/internal/provider/type_csv_format.go
@@ -4,19 +4,18 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceS3FileBasedStreamConfigFormatCSVFormat struct {
- Delimiter types.String `tfsdk:"delimiter"`
- DoubleQuote types.Bool `tfsdk:"double_quote"`
- Encoding types.String `tfsdk:"encoding"`
- EscapeChar types.String `tfsdk:"escape_char"`
- FalseValues []types.String `tfsdk:"false_values"`
- Filetype types.String `tfsdk:"filetype"`
- HeaderDefinition *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition `tfsdk:"header_definition"`
- InferenceType types.String `tfsdk:"inference_type"`
- NullValues []types.String `tfsdk:"null_values"`
- QuoteChar types.String `tfsdk:"quote_char"`
- SkipRowsAfterHeader types.Int64 `tfsdk:"skip_rows_after_header"`
- SkipRowsBeforeHeader types.Int64 `tfsdk:"skip_rows_before_header"`
- StringsCanBeNull types.Bool `tfsdk:"strings_can_be_null"`
- TrueValues []types.String `tfsdk:"true_values"`
+type CSVFormat struct {
+ Delimiter types.String `tfsdk:"delimiter"`
+ DoubleQuote types.Bool `tfsdk:"double_quote"`
+ Encoding types.String `tfsdk:"encoding"`
+ EscapeChar types.String `tfsdk:"escape_char"`
+ FalseValues []types.String `tfsdk:"false_values"`
+ HeaderDefinition *SourceAzureBlobStorageCSVHeaderDefinition `tfsdk:"header_definition"`
+ InferenceType types.String `tfsdk:"inference_type"`
+ NullValues []types.String `tfsdk:"null_values"`
+ QuoteChar types.String `tfsdk:"quote_char"`
+ SkipRowsAfterHeader types.Int64 `tfsdk:"skip_rows_after_header"`
+ SkipRowsBeforeHeader types.Int64 `tfsdk:"skip_rows_before_header"`
+ StringsCanBeNull types.Bool `tfsdk:"strings_can_be_null"`
+ TrueValues []types.String `tfsdk:"true_values"`
}
diff --git a/internal/provider/type_source_google_ads_custom_queries.go b/internal/provider/type_custom_queries.go
old mode 100755
new mode 100644
similarity index 84%
rename from internal/provider/type_source_google_ads_custom_queries.go
rename to internal/provider/type_custom_queries.go
index 28d4d1c88..44b714f53
--- a/internal/provider/type_source_google_ads_custom_queries.go
+++ b/internal/provider/type_custom_queries.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceGoogleAdsCustomQueries struct {
+type CustomQueries struct {
Query types.String `tfsdk:"query"`
TableName types.String `tfsdk:"table_name"`
}
diff --git a/internal/provider/type_custom_report_config.go b/internal/provider/type_custom_report_config.go
new file mode 100644
index 000000000..603affbf5
--- /dev/null
+++ b/internal/provider/type_custom_report_config.go
@@ -0,0 +1,12 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type CustomReportConfig struct {
+ Name types.String `tfsdk:"name"`
+ ReportAggregation types.String `tfsdk:"report_aggregation"`
+ ReportColumns []types.String `tfsdk:"report_columns"`
+ ReportingObject types.String `tfsdk:"reporting_object"`
+}
diff --git a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_xz.go b/internal/provider/type_deflate.go
old mode 100755
new mode 100644
similarity index 78%
rename from internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_xz.go
rename to internal/provider/type_deflate.go
index 7b93f900c..32a0f2fcd
--- a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_xz.go
+++ b/internal/provider/type_deflate.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz struct {
+type Deflate struct {
Codec types.String `tfsdk:"codec"`
CompressionLevel types.Int64 `tfsdk:"compression_level"`
}
diff --git a/internal/provider/type_destination_aws_datalake.go b/internal/provider/type_destination_aws_datalake.go
old mode 100755
new mode 100644
index 6bc6efdd0..9122ba524
--- a/internal/provider/type_destination_aws_datalake.go
+++ b/internal/provider/type_destination_aws_datalake.go
@@ -9,7 +9,6 @@ type DestinationAwsDatalake struct {
BucketName types.String `tfsdk:"bucket_name"`
BucketPrefix types.String `tfsdk:"bucket_prefix"`
Credentials DestinationAwsDatalakeAuthenticationMode `tfsdk:"credentials"`
- DestinationType types.String `tfsdk:"destination_type"`
Format *DestinationAwsDatalakeOutputFormatWildcard `tfsdk:"format"`
GlueCatalogFloatAsDecimal types.Bool `tfsdk:"glue_catalog_float_as_decimal"`
LakeformationDatabaseDefaultTagKey types.String `tfsdk:"lakeformation_database_default_tag_key"`
diff --git a/internal/provider/type_destination_aws_datalake1.go b/internal/provider/type_destination_aws_datalake1.go
deleted file mode 100755
index ec4a15e97..000000000
--- a/internal/provider/type_destination_aws_datalake1.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationAwsDatalake1 struct {
- AwsAccountID types.String `tfsdk:"aws_account_id"`
- BucketName types.String `tfsdk:"bucket_name"`
- BucketPrefix types.String `tfsdk:"bucket_prefix"`
- Credentials DestinationAwsDatalakeAuthenticationMode `tfsdk:"credentials"`
- DestinationType types.String `tfsdk:"destination_type"`
- Format *DestinationAwsDatalakeOutputFormatWildcard `tfsdk:"format"`
- GlueCatalogFloatAsDecimal types.Bool `tfsdk:"glue_catalog_float_as_decimal"`
- LakeformationDatabaseDefaultTagKey types.String `tfsdk:"lakeformation_database_default_tag_key"`
- LakeformationDatabaseDefaultTagValues types.String `tfsdk:"lakeformation_database_default_tag_values"`
- LakeformationDatabaseName types.String `tfsdk:"lakeformation_database_name"`
- LakeformationGovernedTables types.Bool `tfsdk:"lakeformation_governed_tables"`
- Partitioning types.String `tfsdk:"partitioning"`
- Region types.String `tfsdk:"region"`
-}
diff --git a/internal/provider/type_destination_aws_datalake_authentication_mode.go b/internal/provider/type_destination_aws_datalake_authentication_mode.go
old mode 100755
new mode 100644
index 8e2403e52..cb22ff846
--- a/internal/provider/type_destination_aws_datalake_authentication_mode.go
+++ b/internal/provider/type_destination_aws_datalake_authentication_mode.go
@@ -3,8 +3,6 @@
package provider
type DestinationAwsDatalakeAuthenticationMode struct {
- DestinationAwsDatalakeAuthenticationModeIAMRole *DestinationAwsDatalakeAuthenticationModeIAMRole `tfsdk:"destination_aws_datalake_authentication_mode_iam_role"`
- DestinationAwsDatalakeAuthenticationModeIAMUser *DestinationAwsDatalakeAuthenticationModeIAMUser `tfsdk:"destination_aws_datalake_authentication_mode_iam_user"`
- DestinationAwsDatalakeUpdateAuthenticationModeIAMRole *DestinationAwsDatalakeAuthenticationModeIAMRole `tfsdk:"destination_aws_datalake_update_authentication_mode_iam_role"`
- DestinationAwsDatalakeUpdateAuthenticationModeIAMUser *DestinationAwsDatalakeAuthenticationModeIAMUser `tfsdk:"destination_aws_datalake_update_authentication_mode_iam_user"`
+ IAMRole *IAMRole `tfsdk:"iam_role"`
+ IAMUser *IAMUser `tfsdk:"iam_user"`
}
diff --git a/internal/provider/type_destination_aws_datalake_authentication_mode_iam_role.go b/internal/provider/type_destination_aws_datalake_authentication_mode_iam_role.go
deleted file mode 100755
index 48eafe65c..000000000
--- a/internal/provider/type_destination_aws_datalake_authentication_mode_iam_role.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationAwsDatalakeAuthenticationModeIAMRole struct {
- CredentialsTitle types.String `tfsdk:"credentials_title"`
- RoleArn types.String `tfsdk:"role_arn"`
-}
diff --git a/internal/provider/type_destination_aws_datalake_output_format_wildcard.go b/internal/provider/type_destination_aws_datalake_output_format_wildcard.go
old mode 100755
new mode 100644
index 3857bcd27..d46f11508
--- a/internal/provider/type_destination_aws_datalake_output_format_wildcard.go
+++ b/internal/provider/type_destination_aws_datalake_output_format_wildcard.go
@@ -3,8 +3,6 @@
package provider
type DestinationAwsDatalakeOutputFormatWildcard struct {
- DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON *DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON `tfsdk:"destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json"`
- DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage *DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage `tfsdk:"destination_aws_datalake_output_format_wildcard_parquet_columnar_storage"`
- DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON *DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON `tfsdk:"destination_aws_datalake_update_output_format_wildcard_json_lines_newline_delimited_json"`
- DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage *DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage `tfsdk:"destination_aws_datalake_update_output_format_wildcard_parquet_columnar_storage"`
+ JSONLinesNewlineDelimitedJSON *JSONLinesNewlineDelimitedJSON `tfsdk:"json_lines_newline_delimited_json"`
+ ParquetColumnarStorage *ParquetColumnarStorage `tfsdk:"parquet_columnar_storage"`
}
diff --git a/internal/provider/type_destination_azure_blob_storage.go b/internal/provider/type_destination_azure_blob_storage.go
old mode 100755
new mode 100644
index 59579bb49..515806184
--- a/internal/provider/type_destination_azure_blob_storage.go
+++ b/internal/provider/type_destination_azure_blob_storage.go
@@ -11,6 +11,5 @@ type DestinationAzureBlobStorage struct {
AzureBlobStorageEndpointDomainName types.String `tfsdk:"azure_blob_storage_endpoint_domain_name"`
AzureBlobStorageOutputBufferSize types.Int64 `tfsdk:"azure_blob_storage_output_buffer_size"`
AzureBlobStorageSpillSize types.Int64 `tfsdk:"azure_blob_storage_spill_size"`
- DestinationType types.String `tfsdk:"destination_type"`
Format DestinationAzureBlobStorageOutputFormat `tfsdk:"format"`
}
diff --git a/internal/provider/type_destination_azure_blob_storage_json_lines_newline_delimited_json.go b/internal/provider/type_destination_azure_blob_storage_json_lines_newline_delimited_json.go
new file mode 100644
index 000000000..cc2ae0c1a
--- /dev/null
+++ b/internal/provider/type_destination_azure_blob_storage_json_lines_newline_delimited_json.go
@@ -0,0 +1,6 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON struct {
+}
diff --git a/internal/provider/type_destination_azure_blob_storage_output_format.go b/internal/provider/type_destination_azure_blob_storage_output_format.go
old mode 100755
new mode 100644
index e57858278..4de4d38e3
--- a/internal/provider/type_destination_azure_blob_storage_output_format.go
+++ b/internal/provider/type_destination_azure_blob_storage_output_format.go
@@ -3,8 +3,6 @@
package provider
type DestinationAzureBlobStorageOutputFormat struct {
- DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues *DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues `tfsdk:"destination_azure_blob_storage_output_format_csv_comma_separated_values"`
- DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON *DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"destination_azure_blob_storage_output_format_json_lines_newline_delimited_json"`
- DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues *DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues `tfsdk:"destination_azure_blob_storage_update_output_format_csv_comma_separated_values"`
- DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON *DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"destination_azure_blob_storage_update_output_format_json_lines_newline_delimited_json"`
+ CSVCommaSeparatedValues *CSVCommaSeparatedValues `tfsdk:"csv_comma_separated_values"`
+ JSONLinesNewlineDelimitedJSON *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"json_lines_newline_delimited_json"`
}
diff --git a/internal/provider/type_destination_azure_blob_storage_output_format_csv_comma_separated_values.go b/internal/provider/type_destination_azure_blob_storage_output_format_csv_comma_separated_values.go
deleted file mode 100755
index 65f84f577..000000000
--- a/internal/provider/type_destination_azure_blob_storage_output_format_csv_comma_separated_values.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues struct {
- Flattening types.String `tfsdk:"flattening"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_bigquery.go b/internal/provider/type_destination_bigquery.go
old mode 100755
new mode 100644
index 09022d87a..b527a4bc7
--- a/internal/provider/type_destination_bigquery.go
+++ b/internal/provider/type_destination_bigquery.go
@@ -9,7 +9,7 @@ type DestinationBigquery struct {
CredentialsJSON types.String `tfsdk:"credentials_json"`
DatasetID types.String `tfsdk:"dataset_id"`
DatasetLocation types.String `tfsdk:"dataset_location"`
- DestinationType types.String `tfsdk:"destination_type"`
+ DisableTypeDedupe types.Bool `tfsdk:"disable_type_dedupe"`
LoadingMethod *DestinationBigqueryLoadingMethod `tfsdk:"loading_method"`
ProjectID types.String `tfsdk:"project_id"`
RawDataDataset types.String `tfsdk:"raw_data_dataset"`
diff --git a/internal/provider/type_destination_bigquery_credential.go b/internal/provider/type_destination_bigquery_credential.go
new file mode 100644
index 000000000..87690319e
--- /dev/null
+++ b/internal/provider/type_destination_bigquery_credential.go
@@ -0,0 +1,7 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationBigqueryCredential struct {
+ HMACKey *DestinationBigqueryHMACKey `tfsdk:"hmac_key"`
+}
diff --git a/internal/provider/type_destination_bigquery_denormalized.go b/internal/provider/type_destination_bigquery_denormalized.go
deleted file mode 100755
index 359b5f691..000000000
--- a/internal/provider/type_destination_bigquery_denormalized.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationBigqueryDenormalized struct {
- BigQueryClientBufferSizeMb types.Int64 `tfsdk:"big_query_client_buffer_size_mb"`
- CredentialsJSON types.String `tfsdk:"credentials_json"`
- DatasetID types.String `tfsdk:"dataset_id"`
- DatasetLocation types.String `tfsdk:"dataset_location"`
- DestinationType types.String `tfsdk:"destination_type"`
- LoadingMethod *DestinationBigqueryDenormalizedLoadingMethod `tfsdk:"loading_method"`
- ProjectID types.String `tfsdk:"project_id"`
-}
diff --git a/internal/provider/type_destination_bigquery_denormalized_loading_method.go b/internal/provider/type_destination_bigquery_denormalized_loading_method.go
deleted file mode 100755
index 064d515a1..000000000
--- a/internal/provider/type_destination_bigquery_denormalized_loading_method.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationBigqueryDenormalizedLoadingMethod struct {
- DestinationBigqueryDenormalizedLoadingMethodGCSStaging *DestinationBigqueryDenormalizedLoadingMethodGCSStaging `tfsdk:"destination_bigquery_denormalized_loading_method_gcs_staging"`
- DestinationBigqueryDenormalizedLoadingMethodStandardInserts *DestinationBigqueryLoadingMethodStandardInserts `tfsdk:"destination_bigquery_denormalized_loading_method_standard_inserts"`
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging `tfsdk:"destination_bigquery_denormalized_update_loading_method_gcs_staging"`
- DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts *DestinationBigqueryLoadingMethodStandardInserts `tfsdk:"destination_bigquery_denormalized_update_loading_method_standard_inserts"`
-}
diff --git a/internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging.go b/internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging.go
deleted file mode 100755
index e3f515534..000000000
--- a/internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationBigqueryDenormalizedLoadingMethodGCSStaging struct {
- Credential DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential `tfsdk:"credential"`
- FileBufferCount types.Int64 `tfsdk:"file_buffer_count"`
- GcsBucketName types.String `tfsdk:"gcs_bucket_name"`
- GcsBucketPath types.String `tfsdk:"gcs_bucket_path"`
- KeepFilesInGcsBucket types.String `tfsdk:"keep_files_in_gcs_bucket"`
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging_credential.go b/internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging_credential.go
deleted file mode 100755
index 7eda2708b..000000000
--- a/internal/provider/type_destination_bigquery_denormalized_loading_method_gcs_staging_credential.go
+++ /dev/null
@@ -1,7 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential struct {
- DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey *DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey `tfsdk:"destination_bigquery_denormalized_loading_method_gcs_staging_credential_hmac_key"`
-}
diff --git a/internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging.go b/internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging.go
deleted file mode 100755
index f27b28539..000000000
--- a/internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging struct {
- Credential DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential `tfsdk:"credential"`
- FileBufferCount types.Int64 `tfsdk:"file_buffer_count"`
- GcsBucketName types.String `tfsdk:"gcs_bucket_name"`
- GcsBucketPath types.String `tfsdk:"gcs_bucket_path"`
- KeepFilesInGcsBucket types.String `tfsdk:"keep_files_in_gcs_bucket"`
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging_credential.go b/internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging_credential.go
deleted file mode 100755
index 18ca2c8f5..000000000
--- a/internal/provider/type_destination_bigquery_denormalized_update_loading_method_gcs_staging_credential.go
+++ /dev/null
@@ -1,7 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential struct {
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey *DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey `tfsdk:"destination_bigquery_denormalized_update_loading_method_gcs_staging_credential_hmac_key"`
-}
diff --git a/internal/provider/type_destination_bigquery_hmac_key.go b/internal/provider/type_destination_bigquery_hmac_key.go
new file mode 100644
index 000000000..ce0a21d81
--- /dev/null
+++ b/internal/provider/type_destination_bigquery_hmac_key.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type DestinationBigqueryHMACKey struct {
+ HmacKeyAccessID types.String `tfsdk:"hmac_key_access_id"`
+ HmacKeySecret types.String `tfsdk:"hmac_key_secret"`
+}
diff --git a/internal/provider/type_destination_bigquery_loading_method.go b/internal/provider/type_destination_bigquery_loading_method.go
old mode 100755
new mode 100644
index 87475b7d4..b3cd1674d
--- a/internal/provider/type_destination_bigquery_loading_method.go
+++ b/internal/provider/type_destination_bigquery_loading_method.go
@@ -3,8 +3,6 @@
package provider
type DestinationBigqueryLoadingMethod struct {
- DestinationBigqueryLoadingMethodGCSStaging *DestinationBigqueryLoadingMethodGCSStaging `tfsdk:"destination_bigquery_loading_method_gcs_staging"`
- DestinationBigqueryLoadingMethodStandardInserts *DestinationBigqueryLoadingMethodStandardInserts `tfsdk:"destination_bigquery_loading_method_standard_inserts"`
- DestinationBigqueryUpdateLoadingMethodGCSStaging *DestinationBigqueryUpdateLoadingMethodGCSStaging `tfsdk:"destination_bigquery_update_loading_method_gcs_staging"`
- DestinationBigqueryUpdateLoadingMethodStandardInserts *DestinationBigqueryLoadingMethodStandardInserts `tfsdk:"destination_bigquery_update_loading_method_standard_inserts"`
+ GCSStaging *GCSStaging `tfsdk:"gcs_staging"`
+ StandardInserts *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"standard_inserts"`
}
diff --git a/internal/provider/type_destination_bigquery_loading_method_gcs_staging.go b/internal/provider/type_destination_bigquery_loading_method_gcs_staging.go
deleted file mode 100755
index ef871f774..000000000
--- a/internal/provider/type_destination_bigquery_loading_method_gcs_staging.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationBigqueryLoadingMethodGCSStaging struct {
- Credential DestinationBigqueryLoadingMethodGCSStagingCredential `tfsdk:"credential"`
- FileBufferCount types.Int64 `tfsdk:"file_buffer_count"`
- GcsBucketName types.String `tfsdk:"gcs_bucket_name"`
- GcsBucketPath types.String `tfsdk:"gcs_bucket_path"`
- KeepFilesInGcsBucket types.String `tfsdk:"keep_files_in_gcs_bucket"`
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_destination_bigquery_loading_method_gcs_staging_credential.go b/internal/provider/type_destination_bigquery_loading_method_gcs_staging_credential.go
deleted file mode 100755
index a5d6455d6..000000000
--- a/internal/provider/type_destination_bigquery_loading_method_gcs_staging_credential.go
+++ /dev/null
@@ -1,7 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationBigqueryLoadingMethodGCSStagingCredential struct {
- DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey *DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey `tfsdk:"destination_bigquery_loading_method_gcs_staging_credential_hmac_key"`
-}
diff --git a/internal/provider/type_destination_bigquery_update_loading_method_gcs_staging.go b/internal/provider/type_destination_bigquery_update_loading_method_gcs_staging.go
deleted file mode 100755
index bee10ad83..000000000
--- a/internal/provider/type_destination_bigquery_update_loading_method_gcs_staging.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationBigqueryUpdateLoadingMethodGCSStaging struct {
- Credential DestinationBigqueryUpdateLoadingMethodGCSStagingCredential `tfsdk:"credential"`
- FileBufferCount types.Int64 `tfsdk:"file_buffer_count"`
- GcsBucketName types.String `tfsdk:"gcs_bucket_name"`
- GcsBucketPath types.String `tfsdk:"gcs_bucket_path"`
- KeepFilesInGcsBucket types.String `tfsdk:"keep_files_in_gcs_bucket"`
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_destination_bigquery_update_loading_method_gcs_staging_credential.go b/internal/provider/type_destination_bigquery_update_loading_method_gcs_staging_credential.go
deleted file mode 100755
index b638579f6..000000000
--- a/internal/provider/type_destination_bigquery_update_loading_method_gcs_staging_credential.go
+++ /dev/null
@@ -1,7 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationBigqueryUpdateLoadingMethodGCSStagingCredential struct {
- DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey *DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey `tfsdk:"destination_bigquery_update_loading_method_gcs_staging_credential_hmac_key"`
-}
diff --git a/internal/provider/type_destination_clickhouse.go b/internal/provider/type_destination_clickhouse.go
old mode 100755
new mode 100644
index c7490d9e7..095fc5ff8
--- a/internal/provider/type_destination_clickhouse.go
+++ b/internal/provider/type_destination_clickhouse.go
@@ -5,12 +5,11 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationClickhouse struct {
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_clickhouse_ssh_tunnel_method.go b/internal/provider/type_destination_clickhouse_ssh_tunnel_method.go
old mode 100755
new mode 100644
index 1fdbe6b47..23c21a737
--- a/internal/provider/type_destination_clickhouse_ssh_tunnel_method.go
+++ b/internal/provider/type_destination_clickhouse_ssh_tunnel_method.go
@@ -3,10 +3,7 @@
package provider
type DestinationClickhouseSSHTunnelMethod struct {
- DestinationClickhouseSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_clickhouse_ssh_tunnel_method_no_tunnel"`
- DestinationClickhouseSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_clickhouse_ssh_tunnel_method_password_authentication"`
- DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_clickhouse_ssh_tunnel_method_ssh_key_authentication"`
- DestinationClickhouseUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_clickhouse_update_ssh_tunnel_method_no_tunnel"`
- DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_clickhouse_update_ssh_tunnel_method_password_authentication"`
- DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_clickhouse_update_ssh_tunnel_method_ssh_key_authentication"`
+ NoTunnel *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"no_tunnel"`
+ PasswordAuthentication *PasswordAuthentication `tfsdk:"password_authentication"`
+ SSHKeyAuthentication *SSHKeyAuthentication `tfsdk:"ssh_key_authentication"`
}
diff --git a/internal/provider/type_destination_clickhouse_ssh_tunnel_method_ssh_key_authentication.go b/internal/provider/type_destination_clickhouse_ssh_tunnel_method_ssh_key_authentication.go
deleted file mode 100755
index b77ce0c86..000000000
--- a/internal/provider/type_destination_clickhouse_ssh_tunnel_method_ssh_key_authentication.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication struct {
- SSHKey types.String `tfsdk:"ssh_key"`
- TunnelHost types.String `tfsdk:"tunnel_host"`
- TunnelMethod types.String `tfsdk:"tunnel_method"`
- TunnelPort types.Int64 `tfsdk:"tunnel_port"`
- TunnelUser types.String `tfsdk:"tunnel_user"`
-}
diff --git a/internal/provider/type_destination_convex.go b/internal/provider/type_destination_convex.go
old mode 100755
new mode 100644
index 978590b43..6c3eec30d
--- a/internal/provider/type_destination_convex.go
+++ b/internal/provider/type_destination_convex.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationConvex struct {
- AccessKey types.String `tfsdk:"access_key"`
- DeploymentURL types.String `tfsdk:"deployment_url"`
- DestinationType types.String `tfsdk:"destination_type"`
+ AccessKey types.String `tfsdk:"access_key"`
+ DeploymentURL types.String `tfsdk:"deployment_url"`
}
diff --git a/internal/provider/type_destination_cumulio.go b/internal/provider/type_destination_cumulio.go
old mode 100755
new mode 100644
index 64982426e..7ccceae0e
--- a/internal/provider/type_destination_cumulio.go
+++ b/internal/provider/type_destination_cumulio.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationCumulio struct {
- APIHost types.String `tfsdk:"api_host"`
- APIKey types.String `tfsdk:"api_key"`
- APIToken types.String `tfsdk:"api_token"`
- DestinationType types.String `tfsdk:"destination_type"`
+ APIHost types.String `tfsdk:"api_host"`
+ APIKey types.String `tfsdk:"api_key"`
+ APIToken types.String `tfsdk:"api_token"`
}
diff --git a/internal/provider/type_destination_databend.go b/internal/provider/type_destination_databend.go
old mode 100755
new mode 100644
index 0d4e3fb5c..f76281d03
--- a/internal/provider/type_destination_databend.go
+++ b/internal/provider/type_destination_databend.go
@@ -5,11 +5,10 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationDatabend struct {
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- Table types.String `tfsdk:"table"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ Table types.String `tfsdk:"table"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_databricks.go b/internal/provider/type_destination_databricks.go
old mode 100755
new mode 100644
index e3dba160d..dd498f949
--- a/internal/provider/type_destination_databricks.go
+++ b/internal/provider/type_destination_databricks.go
@@ -12,7 +12,6 @@ type DestinationDatabricks struct {
DatabricksPersonalAccessToken types.String `tfsdk:"databricks_personal_access_token"`
DatabricksPort types.String `tfsdk:"databricks_port"`
DatabricksServerHostname types.String `tfsdk:"databricks_server_hostname"`
- DestinationType types.String `tfsdk:"destination_type"`
EnableSchemaEvolution types.Bool `tfsdk:"enable_schema_evolution"`
PurgeStagingData types.Bool `tfsdk:"purge_staging_data"`
Schema types.String `tfsdk:"schema"`
diff --git a/internal/provider/type_destination_databricks1.go b/internal/provider/type_destination_databricks1.go
deleted file mode 100755
index 7008804e8..000000000
--- a/internal/provider/type_destination_databricks1.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationDatabricks1 struct {
- AcceptTerms types.Bool `tfsdk:"accept_terms"`
- DataSource DestinationDatabricksDataSource2 `tfsdk:"data_source"`
- Database types.String `tfsdk:"database"`
- DatabricksHTTPPath types.String `tfsdk:"databricks_http_path"`
- DatabricksPersonalAccessToken types.String `tfsdk:"databricks_personal_access_token"`
- DatabricksPort types.String `tfsdk:"databricks_port"`
- DatabricksServerHostname types.String `tfsdk:"databricks_server_hostname"`
- DestinationType types.String `tfsdk:"destination_type"`
- EnableSchemaEvolution types.Bool `tfsdk:"enable_schema_evolution"`
- PurgeStagingData types.Bool `tfsdk:"purge_staging_data"`
- Schema types.String `tfsdk:"schema"`
-}
diff --git a/internal/provider/type_destination_databricks_data_source_azure_blob_storage.go b/internal/provider/type_destination_databricks_azure_blob_storage.go
old mode 100755
new mode 100644
similarity index 79%
rename from internal/provider/type_destination_databricks_data_source_azure_blob_storage.go
rename to internal/provider/type_destination_databricks_azure_blob_storage.go
index 2ebf0c25a..c22c960a2
--- a/internal/provider/type_destination_databricks_data_source_azure_blob_storage.go
+++ b/internal/provider/type_destination_databricks_azure_blob_storage.go
@@ -4,10 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationDatabricksDataSourceAzureBlobStorage struct {
+type DestinationDatabricksAzureBlobStorage struct {
AzureBlobStorageAccountName types.String `tfsdk:"azure_blob_storage_account_name"`
AzureBlobStorageContainerName types.String `tfsdk:"azure_blob_storage_container_name"`
AzureBlobStorageEndpointDomainName types.String `tfsdk:"azure_blob_storage_endpoint_domain_name"`
AzureBlobStorageSasToken types.String `tfsdk:"azure_blob_storage_sas_token"`
- DataSourceType types.String `tfsdk:"data_source_type"`
}
diff --git a/internal/provider/type_destination_databricks_data_source1.go b/internal/provider/type_destination_databricks_data_source1.go
old mode 100755
new mode 100644
index 0279f57ae..c1a756ebf
--- a/internal/provider/type_destination_databricks_data_source1.go
+++ b/internal/provider/type_destination_databricks_data_source1.go
@@ -3,10 +3,7 @@
package provider
type DestinationDatabricksDataSource1 struct {
- DestinationDatabricksDataSourceRecommendedManagedTables *DestinationDatabricksDataSourceRecommendedManagedTables `tfsdk:"destination_databricks_data_source_recommended_managed_tables"`
- DestinationDatabricksDataSourceAmazonS3 *DestinationDatabricksDataSourceAmazonS3 `tfsdk:"destination_databricks_data_source_amazon_s3"`
- DestinationDatabricksDataSourceAzureBlobStorage *DestinationDatabricksDataSourceAzureBlobStorage `tfsdk:"destination_databricks_data_source_azure_blob_storage"`
- DestinationDatabricksUpdateDataSourceRecommendedManagedTables *DestinationDatabricksDataSourceRecommendedManagedTables `tfsdk:"destination_databricks_update_data_source_recommended_managed_tables"`
- DestinationDatabricksUpdateDataSourceAmazonS3 *DestinationDatabricksUpdateDataSourceAmazonS3 `tfsdk:"destination_databricks_update_data_source_amazon_s3"`
- DestinationDatabricksUpdateDataSourceAzureBlobStorage *DestinationDatabricksDataSourceAzureBlobStorage `tfsdk:"destination_databricks_update_data_source_azure_blob_storage"`
+ RecommendedManagedTables *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"recommended_managed_tables"`
+ AmazonS3 *AmazonS3 `tfsdk:"amazon_s3"`
+ AzureBlobStorage *DestinationDatabricksAzureBlobStorage `tfsdk:"azure_blob_storage"`
}
diff --git a/internal/provider/type_destination_databricks_data_source2.go b/internal/provider/type_destination_databricks_data_source2.go
deleted file mode 100755
index 6c9734cc4..000000000
--- a/internal/provider/type_destination_databricks_data_source2.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationDatabricksDataSource2 struct {
- DestinationDatabricksDataSourceRecommendedManagedTables *DestinationDatabricksDataSourceRecommendedManagedTables `tfsdk:"destination_databricks_data_source_recommended_managed_tables"`
- DestinationDatabricksDataSourceAmazonS3 *DestinationDatabricksDataSourceAmazonS31 `tfsdk:"destination_databricks_data_source_amazon_s3"`
- DestinationDatabricksDataSourceAzureBlobStorage *DestinationDatabricksDataSourceAzureBlobStorage `tfsdk:"destination_databricks_data_source_azure_blob_storage"`
- DestinationDatabricksUpdateDataSourceRecommendedManagedTables *DestinationDatabricksDataSourceRecommendedManagedTables `tfsdk:"destination_databricks_update_data_source_recommended_managed_tables"`
- DestinationDatabricksUpdateDataSourceAmazonS3 *DestinationDatabricksUpdateDataSourceAmazonS31 `tfsdk:"destination_databricks_update_data_source_amazon_s3"`
- DestinationDatabricksUpdateDataSourceAzureBlobStorage *DestinationDatabricksDataSourceAzureBlobStorage `tfsdk:"destination_databricks_update_data_source_azure_blob_storage"`
-}
diff --git a/internal/provider/type_destination_databricks_data_source_amazon_s31.go b/internal/provider/type_destination_databricks_data_source_amazon_s31.go
deleted file mode 100755
index 6cddf270e..000000000
--- a/internal/provider/type_destination_databricks_data_source_amazon_s31.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationDatabricksDataSourceAmazonS31 struct {
- DataSourceType types.String `tfsdk:"data_source_type"`
- FileNamePattern types.String `tfsdk:"file_name_pattern"`
- S3AccessKeyID types.String `tfsdk:"s3_access_key_id"`
- S3BucketName types.String `tfsdk:"s3_bucket_name"`
- S3BucketPath types.String `tfsdk:"s3_bucket_path"`
- S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
- S3SecretAccessKey types.String `tfsdk:"s3_secret_access_key"`
-}
diff --git a/internal/provider/type_destination_databricks_update_data_source_amazon_s3.go b/internal/provider/type_destination_databricks_update_data_source_amazon_s3.go
deleted file mode 100755
index 597611906..000000000
--- a/internal/provider/type_destination_databricks_update_data_source_amazon_s3.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationDatabricksUpdateDataSourceAmazonS3 struct {
- DataSourceType types.String `tfsdk:"data_source_type"`
- FileNamePattern types.String `tfsdk:"file_name_pattern"`
- S3AccessKeyID types.String `tfsdk:"s3_access_key_id"`
- S3BucketName types.String `tfsdk:"s3_bucket_name"`
- S3BucketPath types.String `tfsdk:"s3_bucket_path"`
- S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
- S3SecretAccessKey types.String `tfsdk:"s3_secret_access_key"`
-}
diff --git a/internal/provider/type_destination_databricks_update_data_source_amazon_s31.go b/internal/provider/type_destination_databricks_update_data_source_amazon_s31.go
deleted file mode 100755
index 87c2925af..000000000
--- a/internal/provider/type_destination_databricks_update_data_source_amazon_s31.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationDatabricksUpdateDataSourceAmazonS31 struct {
- DataSourceType types.String `tfsdk:"data_source_type"`
- FileNamePattern types.String `tfsdk:"file_name_pattern"`
- S3AccessKeyID types.String `tfsdk:"s3_access_key_id"`
- S3BucketName types.String `tfsdk:"s3_bucket_name"`
- S3BucketPath types.String `tfsdk:"s3_bucket_path"`
- S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
- S3SecretAccessKey types.String `tfsdk:"s3_secret_access_key"`
-}
diff --git a/internal/provider/type_destination_dev_null.go b/internal/provider/type_destination_dev_null.go
old mode 100755
new mode 100644
index 10e3be114..7bf35de28
--- a/internal/provider/type_destination_dev_null.go
+++ b/internal/provider/type_destination_dev_null.go
@@ -2,9 +2,6 @@
package provider
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
type DestinationDevNull struct {
- DestinationType types.String `tfsdk:"destination_type"`
TestDestination DestinationDevNullTestDestination `tfsdk:"test_destination"`
}
diff --git a/internal/provider/type_destination_dev_null_test_destination.go b/internal/provider/type_destination_dev_null_test_destination.go
old mode 100755
new mode 100644
index 06b78f6b9..6872e78d3
--- a/internal/provider/type_destination_dev_null_test_destination.go
+++ b/internal/provider/type_destination_dev_null_test_destination.go
@@ -3,6 +3,5 @@
package provider
type DestinationDevNullTestDestination struct {
- DestinationDevNullTestDestinationSilent *DestinationDevNullTestDestinationSilent `tfsdk:"destination_dev_null_test_destination_silent"`
- DestinationDevNullUpdateTestDestinationSilent *DestinationDevNullTestDestinationSilent `tfsdk:"destination_dev_null_update_test_destination_silent"`
+ Silent *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"silent"`
}
diff --git a/internal/provider/type_destination_duckdb.go b/internal/provider/type_destination_duckdb.go
new file mode 100644
index 000000000..be3ed78eb
--- /dev/null
+++ b/internal/provider/type_destination_duckdb.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type DestinationDuckdb struct {
+ DestinationPath types.String `tfsdk:"destination_path"`
+ MotherduckAPIKey types.String `tfsdk:"motherduck_api_key"`
+ Schema types.String `tfsdk:"schema"`
+}
diff --git a/internal/provider/type_destination_dynamodb.go b/internal/provider/type_destination_dynamodb.go
old mode 100755
new mode 100644
index f40c2493f..76b2b866d
--- a/internal/provider/type_destination_dynamodb.go
+++ b/internal/provider/type_destination_dynamodb.go
@@ -6,7 +6,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationDynamodb struct {
AccessKeyID types.String `tfsdk:"access_key_id"`
- DestinationType types.String `tfsdk:"destination_type"`
DynamodbEndpoint types.String `tfsdk:"dynamodb_endpoint"`
DynamodbRegion types.String `tfsdk:"dynamodb_region"`
DynamodbTableNamePrefix types.String `tfsdk:"dynamodb_table_name_prefix"`
diff --git a/internal/provider/type_destination_dynamodb1.go b/internal/provider/type_destination_dynamodb1.go
deleted file mode 100755
index 0a2de8bdb..000000000
--- a/internal/provider/type_destination_dynamodb1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationDynamodb1 struct {
- AccessKeyID types.String `tfsdk:"access_key_id"`
- DestinationType types.String `tfsdk:"destination_type"`
- DynamodbEndpoint types.String `tfsdk:"dynamodb_endpoint"`
- DynamodbRegion types.String `tfsdk:"dynamodb_region"`
- DynamodbTableNamePrefix types.String `tfsdk:"dynamodb_table_name_prefix"`
- SecretAccessKey types.String `tfsdk:"secret_access_key"`
-}
diff --git a/internal/provider/type_destination_elasticsearch.go b/internal/provider/type_destination_elasticsearch.go
old mode 100755
new mode 100644
index c507f60b0..9a0981b10
--- a/internal/provider/type_destination_elasticsearch.go
+++ b/internal/provider/type_destination_elasticsearch.go
@@ -7,7 +7,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationElasticsearch struct {
AuthenticationMethod *DestinationElasticsearchAuthenticationMethod `tfsdk:"authentication_method"`
CaCertificate types.String `tfsdk:"ca_certificate"`
- DestinationType types.String `tfsdk:"destination_type"`
Endpoint types.String `tfsdk:"endpoint"`
Upsert types.Bool `tfsdk:"upsert"`
}
diff --git a/internal/provider/type_destination_elasticsearch_authentication_method.go b/internal/provider/type_destination_elasticsearch_authentication_method.go
old mode 100755
new mode 100644
index 5a690f8c7..a21033a72
--- a/internal/provider/type_destination_elasticsearch_authentication_method.go
+++ b/internal/provider/type_destination_elasticsearch_authentication_method.go
@@ -3,8 +3,6 @@
package provider
type DestinationElasticsearchAuthenticationMethod struct {
- DestinationElasticsearchAuthenticationMethodAPIKeySecret *DestinationElasticsearchAuthenticationMethodAPIKeySecret `tfsdk:"destination_elasticsearch_authentication_method_api_key_secret"`
- DestinationElasticsearchAuthenticationMethodUsernamePassword *DestinationElasticsearchAuthenticationMethodUsernamePassword `tfsdk:"destination_elasticsearch_authentication_method_username_password"`
- DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret *DestinationElasticsearchAuthenticationMethodAPIKeySecret `tfsdk:"destination_elasticsearch_update_authentication_method_api_key_secret"`
- DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword *DestinationElasticsearchAuthenticationMethodUsernamePassword `tfsdk:"destination_elasticsearch_update_authentication_method_username_password"`
+ APIKeySecret *APIKeySecret `tfsdk:"api_key_secret"`
+ UsernamePassword *UsernamePassword `tfsdk:"username_password"`
}
diff --git a/internal/provider/type_destination_elasticsearch_authentication_method_username_password.go b/internal/provider/type_destination_elasticsearch_authentication_method_username_password.go
deleted file mode 100755
index dc74817bd..000000000
--- a/internal/provider/type_destination_elasticsearch_authentication_method_username_password.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationElasticsearchAuthenticationMethodUsernamePassword struct {
- Method types.String `tfsdk:"method"`
- Password types.String `tfsdk:"password"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_destination_firebolt.go b/internal/provider/type_destination_firebolt.go
old mode 100755
new mode 100644
index 97dd35bdb..1729de867
--- a/internal/provider/type_destination_firebolt.go
+++ b/internal/provider/type_destination_firebolt.go
@@ -5,12 +5,11 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationFirebolt struct {
- Account types.String `tfsdk:"account"`
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Engine types.String `tfsdk:"engine"`
- Host types.String `tfsdk:"host"`
- LoadingMethod *DestinationFireboltLoadingMethod `tfsdk:"loading_method"`
- Password types.String `tfsdk:"password"`
- Username types.String `tfsdk:"username"`
+ Account types.String `tfsdk:"account"`
+ Database types.String `tfsdk:"database"`
+ Engine types.String `tfsdk:"engine"`
+ Host types.String `tfsdk:"host"`
+ LoadingMethod *DestinationFireboltLoadingMethod `tfsdk:"loading_method"`
+ Password types.String `tfsdk:"password"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_firebolt_loading_method.go b/internal/provider/type_destination_firebolt_loading_method.go
old mode 100755
new mode 100644
index 48174bd9e..12f78e8d9
--- a/internal/provider/type_destination_firebolt_loading_method.go
+++ b/internal/provider/type_destination_firebolt_loading_method.go
@@ -3,8 +3,6 @@
package provider
type DestinationFireboltLoadingMethod struct {
- DestinationFireboltLoadingMethodExternalTableViaS3 *DestinationFireboltLoadingMethodExternalTableViaS3 `tfsdk:"destination_firebolt_loading_method_external_table_via_s3"`
- DestinationFireboltLoadingMethodSQLInserts *DestinationFireboltLoadingMethodSQLInserts `tfsdk:"destination_firebolt_loading_method_sql_inserts"`
- DestinationFireboltUpdateLoadingMethodExternalTableViaS3 *DestinationFireboltLoadingMethodExternalTableViaS3 `tfsdk:"destination_firebolt_update_loading_method_external_table_via_s3"`
- DestinationFireboltUpdateLoadingMethodSQLInserts *DestinationFireboltLoadingMethodSQLInserts `tfsdk:"destination_firebolt_update_loading_method_sql_inserts"`
+ ExternalTableViaS3 *ExternalTableViaS3 `tfsdk:"external_table_via_s3"`
+ SQLInserts *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"sql_inserts"`
}
diff --git a/internal/provider/type_destination_firestore.go b/internal/provider/type_destination_firestore.go
old mode 100755
new mode 100644
index ed91ad048..9f11b614c
--- a/internal/provider/type_destination_firestore.go
+++ b/internal/provider/type_destination_firestore.go
@@ -6,6 +6,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationFirestore struct {
CredentialsJSON types.String `tfsdk:"credentials_json"`
- DestinationType types.String `tfsdk:"destination_type"`
ProjectID types.String `tfsdk:"project_id"`
}
diff --git a/internal/provider/type_destination_gcs.go b/internal/provider/type_destination_gcs.go
old mode 100755
new mode 100644
index d0d156915..ed2c78e8b
--- a/internal/provider/type_destination_gcs.go
+++ b/internal/provider/type_destination_gcs.go
@@ -6,7 +6,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationGcs struct {
Credential DestinationGcsAuthentication `tfsdk:"credential"`
- DestinationType types.String `tfsdk:"destination_type"`
Format DestinationGcsOutputFormat `tfsdk:"format"`
GcsBucketName types.String `tfsdk:"gcs_bucket_name"`
GcsBucketPath types.String `tfsdk:"gcs_bucket_path"`
diff --git a/internal/provider/type_destination_gcs_authentication.go b/internal/provider/type_destination_gcs_authentication.go
old mode 100755
new mode 100644
index 730f79d29..9eb1050ce
--- a/internal/provider/type_destination_gcs_authentication.go
+++ b/internal/provider/type_destination_gcs_authentication.go
@@ -3,6 +3,5 @@
package provider
type DestinationGcsAuthentication struct {
- DestinationGcsAuthenticationHMACKey *DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey `tfsdk:"destination_gcs_authentication_hmac_key"`
- DestinationGcsUpdateAuthenticationHMACKey *DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey `tfsdk:"destination_gcs_update_authentication_hmac_key"`
+ HMACKey *HMACKey `tfsdk:"hmac_key"`
}
diff --git a/internal/provider/type_destination_gcs_compression.go b/internal/provider/type_destination_gcs_compression.go
new file mode 100644
index 000000000..0d8a54c4e
--- /dev/null
+++ b/internal/provider/type_destination_gcs_compression.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationGcsCompression struct {
+ Gzip *Gzip `tfsdk:"gzip"`
+ NoCompression *DestinationGcsUpdateNoCompression `tfsdk:"no_compression"`
+}
diff --git a/internal/provider/type_destination_gcs_compression_codec.go b/internal/provider/type_destination_gcs_compression_codec.go
new file mode 100644
index 000000000..7d0e99a79
--- /dev/null
+++ b/internal/provider/type_destination_gcs_compression_codec.go
@@ -0,0 +1,12 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationGcsCompressionCodec struct {
+ Bzip2 *Bzip2 `tfsdk:"bzip2"`
+ Deflate *Deflate `tfsdk:"deflate"`
+ NoCompression *NoCompression `tfsdk:"no_compression"`
+ Snappy *Snappy `tfsdk:"snappy"`
+ Xz *Xz `tfsdk:"xz"`
+ Zstandard *Zstandard `tfsdk:"zstandard"`
+}
diff --git a/internal/provider/type_destination_gcs_csv_comma_separated_values.go b/internal/provider/type_destination_gcs_csv_comma_separated_values.go
new file mode 100644
index 000000000..e4a60961b
--- /dev/null
+++ b/internal/provider/type_destination_gcs_csv_comma_separated_values.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type DestinationGcsCSVCommaSeparatedValues struct {
+ Compression *DestinationGcsCompression `tfsdk:"compression"`
+ Flattening types.String `tfsdk:"flattening"`
+ FormatType types.String `tfsdk:"format_type"`
+}
diff --git a/internal/provider/type_destination_gcs_json_lines_newline_delimited_json.go b/internal/provider/type_destination_gcs_json_lines_newline_delimited_json.go
new file mode 100644
index 000000000..67e298cda
--- /dev/null
+++ b/internal/provider/type_destination_gcs_json_lines_newline_delimited_json.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type DestinationGcsJSONLinesNewlineDelimitedJSON struct {
+ Compression *DestinationGcsCompression `tfsdk:"compression"`
+ FormatType types.String `tfsdk:"format_type"`
+}
diff --git a/internal/provider/type_destination_gcs_output_format.go b/internal/provider/type_destination_gcs_output_format.go
old mode 100755
new mode 100644
index 01bf27458..892057b6e
--- a/internal/provider/type_destination_gcs_output_format.go
+++ b/internal/provider/type_destination_gcs_output_format.go
@@ -3,12 +3,8 @@
package provider
type DestinationGcsOutputFormat struct {
- DestinationGcsOutputFormatAvroApacheAvro *DestinationGcsOutputFormatAvroApacheAvro `tfsdk:"destination_gcs_output_format_avro_apache_avro"`
- DestinationGcsOutputFormatCSVCommaSeparatedValues *DestinationGcsOutputFormatCSVCommaSeparatedValues `tfsdk:"destination_gcs_output_format_csv_comma_separated_values"`
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"destination_gcs_output_format_json_lines_newline_delimited_json"`
- DestinationGcsOutputFormatParquetColumnarStorage *DestinationGcsOutputFormatParquetColumnarStorage `tfsdk:"destination_gcs_output_format_parquet_columnar_storage"`
- DestinationGcsUpdateOutputFormatAvroApacheAvro *DestinationGcsUpdateOutputFormatAvroApacheAvro `tfsdk:"destination_gcs_update_output_format_avro_apache_avro"`
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues `tfsdk:"destination_gcs_update_output_format_csv_comma_separated_values"`
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"destination_gcs_update_output_format_json_lines_newline_delimited_json"`
- DestinationGcsUpdateOutputFormatParquetColumnarStorage *DestinationGcsOutputFormatParquetColumnarStorage `tfsdk:"destination_gcs_update_output_format_parquet_columnar_storage"`
+ AvroApacheAvro *AvroApacheAvro `tfsdk:"avro_apache_avro"`
+ CSVCommaSeparatedValues *DestinationGcsCSVCommaSeparatedValues `tfsdk:"csv_comma_separated_values"`
+ JSONLinesNewlineDelimitedJSON *DestinationGcsJSONLinesNewlineDelimitedJSON `tfsdk:"json_lines_newline_delimited_json"`
+ ParquetColumnarStorage *DestinationGcsParquetColumnarStorage `tfsdk:"parquet_columnar_storage"`
}
diff --git a/internal/provider/type_destination_gcs_output_format_avro_apache_avro.go b/internal/provider/type_destination_gcs_output_format_avro_apache_avro.go
deleted file mode 100755
index faab5c593..000000000
--- a/internal/provider/type_destination_gcs_output_format_avro_apache_avro.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationGcsOutputFormatAvroApacheAvro struct {
- CompressionCodec DestinationGcsOutputFormatAvroApacheAvroCompressionCodec `tfsdk:"compression_codec"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec.go b/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec.go
deleted file mode 100755
index 29cb8a633..000000000
--- a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodec struct {
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 `tfsdk:"destination_gcs_output_format_avro_apache_avro_compression_codec_bzip2"`
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate `tfsdk:"destination_gcs_output_format_avro_apache_avro_compression_codec_deflate"`
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression `tfsdk:"destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression"`
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy `tfsdk:"destination_gcs_output_format_avro_apache_avro_compression_codec_snappy"`
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz `tfsdk:"destination_gcs_output_format_avro_apache_avro_compression_codec_xz"`
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard `tfsdk:"destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard"`
-}
diff --git a/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values.go b/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values.go
deleted file mode 100755
index 16bb58ed4..000000000
--- a/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationGcsOutputFormatCSVCommaSeparatedValues struct {
- Compression *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression `tfsdk:"compression"`
- Flattening types.String `tfsdk:"flattening"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression.go b/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression.go
deleted file mode 100755
index a3b5cdfd4..000000000
--- a/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression struct {
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP `tfsdk:"destination_gcs_output_format_csv_comma_separated_values_compression_gzip"`
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression `tfsdk:"destination_gcs_output_format_csv_comma_separated_values_compression_no_compression"`
-}
diff --git a/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression_no_compression.go b/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression_no_compression.go
deleted file mode 100755
index c9e05742d..000000000
--- a/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression_no_compression.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression struct {
- CompressionType types.String `tfsdk:"compression_type"`
-}
diff --git a/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json.go b/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json.go
deleted file mode 100755
index 3d6fba43d..000000000
--- a/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON struct {
- Compression *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression `tfsdk:"compression"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression.go b/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression.go
deleted file mode 100755
index 8015e0087..000000000
--- a/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP `tfsdk:"destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip"`
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression `tfsdk:"destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression"`
-}
diff --git a/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression.go b/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression.go
deleted file mode 100755
index 5b4bde936..000000000
--- a/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_no_compression.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression struct {
- CompressionType types.String `tfsdk:"compression_type"`
-}
diff --git a/internal/provider/type_destination_gcs_output_format_parquet_columnar_storage.go b/internal/provider/type_destination_gcs_output_format_parquet_columnar_storage.go
deleted file mode 100755
index 0cbda4c66..000000000
--- a/internal/provider/type_destination_gcs_output_format_parquet_columnar_storage.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationGcsOutputFormatParquetColumnarStorage struct {
- BlockSizeMb types.Int64 `tfsdk:"block_size_mb"`
- CompressionCodec types.String `tfsdk:"compression_codec"`
- DictionaryEncoding types.Bool `tfsdk:"dictionary_encoding"`
- DictionaryPageSizeKb types.Int64 `tfsdk:"dictionary_page_size_kb"`
- FormatType types.String `tfsdk:"format_type"`
- MaxPaddingSizeMb types.Int64 `tfsdk:"max_padding_size_mb"`
- PageSizeKb types.Int64 `tfsdk:"page_size_kb"`
-}
diff --git a/internal/provider/type_destination_s3_output_format_parquet_columnar_storage.go b/internal/provider/type_destination_gcs_parquet_columnar_storage.go
old mode 100755
new mode 100644
similarity index 90%
rename from internal/provider/type_destination_s3_output_format_parquet_columnar_storage.go
rename to internal/provider/type_destination_gcs_parquet_columnar_storage.go
index 9313bd9d0..0a2ad5ed1
--- a/internal/provider/type_destination_s3_output_format_parquet_columnar_storage.go
+++ b/internal/provider/type_destination_gcs_parquet_columnar_storage.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationS3OutputFormatParquetColumnarStorage struct {
+type DestinationGcsParquetColumnarStorage struct {
BlockSizeMb types.Int64 `tfsdk:"block_size_mb"`
CompressionCodec types.String `tfsdk:"compression_codec"`
DictionaryEncoding types.Bool `tfsdk:"dictionary_encoding"`
diff --git a/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression_gzip.go b/internal/provider/type_destination_gcs_update_no_compression.go
old mode 100755
new mode 100644
similarity index 73%
rename from internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression_gzip.go
rename to internal/provider/type_destination_gcs_update_no_compression.go
index 5d6fe4ab5..d662df88a
--- a/internal/provider/type_destination_gcs_output_format_csv_comma_separated_values_compression_gzip.go
+++ b/internal/provider/type_destination_gcs_update_no_compression.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP struct {
+type DestinationGcsUpdateNoCompression struct {
CompressionType types.String `tfsdk:"compression_type"`
}
diff --git a/internal/provider/type_destination_gcs_update_output_format_avro_apache_avro.go b/internal/provider/type_destination_gcs_update_output_format_avro_apache_avro.go
deleted file mode 100755
index 28a52b2ab..000000000
--- a/internal/provider/type_destination_gcs_update_output_format_avro_apache_avro.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationGcsUpdateOutputFormatAvroApacheAvro struct {
- CompressionCodec DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec `tfsdk:"compression_codec"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_gcs_update_output_format_avro_apache_avro_compression_codec.go b/internal/provider/type_destination_gcs_update_output_format_avro_apache_avro_compression_codec.go
deleted file mode 100755
index 12af98868..000000000
--- a/internal/provider/type_destination_gcs_update_output_format_avro_apache_avro_compression_codec.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec struct {
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression `tfsdk:"destination_gcs_update_output_format_avro_apache_avro_compression_codec_no_compression"`
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate `tfsdk:"destination_gcs_update_output_format_avro_apache_avro_compression_codec_deflate"`
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 `tfsdk:"destination_gcs_update_output_format_avro_apache_avro_compression_codec_bzip2"`
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz `tfsdk:"destination_gcs_update_output_format_avro_apache_avro_compression_codec_xz"`
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard `tfsdk:"destination_gcs_update_output_format_avro_apache_avro_compression_codec_zstandard"`
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy `tfsdk:"destination_gcs_update_output_format_avro_apache_avro_compression_codec_snappy"`
-}
diff --git a/internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values.go b/internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values.go
deleted file mode 100755
index c69b8cefa..000000000
--- a/internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues struct {
- Compression *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression `tfsdk:"compression"`
- Flattening types.String `tfsdk:"flattening"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values_compression.go b/internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values_compression.go
deleted file mode 100755
index 1f4f9901d..000000000
--- a/internal/provider/type_destination_gcs_update_output_format_csv_comma_separated_values_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression struct {
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression `tfsdk:"destination_gcs_update_output_format_csv_comma_separated_values_compression_no_compression"`
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP `tfsdk:"destination_gcs_update_output_format_csv_comma_separated_values_compression_gzip"`
-}
diff --git a/internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json.go b/internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json.go
deleted file mode 100755
index cfa4b3393..000000000
--- a/internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON struct {
- Compression *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression `tfsdk:"compression"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json_compression.go b/internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json_compression.go
deleted file mode 100755
index f8d72764f..000000000
--- a/internal/provider/type_destination_gcs_update_output_format_json_lines_newline_delimited_json_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression `tfsdk:"destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_no_compression"`
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP `tfsdk:"destination_gcs_update_output_format_json_lines_newline_delimited_json_compression_gzip"`
-}
diff --git a/internal/provider/type_destination_google_sheets.go b/internal/provider/type_destination_google_sheets.go
old mode 100755
new mode 100644
index 3d5fabfca..36b403613
--- a/internal/provider/type_destination_google_sheets.go
+++ b/internal/provider/type_destination_google_sheets.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationGoogleSheets struct {
- Credentials DestinationGoogleSheetsAuthenticationViaGoogleOAuth `tfsdk:"credentials"`
- DestinationType types.String `tfsdk:"destination_type"`
- SpreadsheetID types.String `tfsdk:"spreadsheet_id"`
+ Credentials DestinationGoogleSheetsAuthenticationViaGoogleOAuth `tfsdk:"credentials"`
+ SpreadsheetID types.String `tfsdk:"spreadsheet_id"`
}
diff --git a/internal/provider/type_destination_google_sheets_authentication_via_google_o_auth.go b/internal/provider/type_destination_google_sheets_authentication_via_google_o_auth.go
old mode 100755
new mode 100644
diff --git a/internal/provider/type_destination_keen.go b/internal/provider/type_destination_keen.go
old mode 100755
new mode 100644
index fba1809e2..d06803893
--- a/internal/provider/type_destination_keen.go
+++ b/internal/provider/type_destination_keen.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationKeen struct {
- APIKey types.String `tfsdk:"api_key"`
- DestinationType types.String `tfsdk:"destination_type"`
- InferTimestamp types.Bool `tfsdk:"infer_timestamp"`
- ProjectID types.String `tfsdk:"project_id"`
+ APIKey types.String `tfsdk:"api_key"`
+ InferTimestamp types.Bool `tfsdk:"infer_timestamp"`
+ ProjectID types.String `tfsdk:"project_id"`
}
diff --git a/internal/provider/type_destination_kinesis.go b/internal/provider/type_destination_kinesis.go
old mode 100755
new mode 100644
index 40e380026..188fc0b4e
--- a/internal/provider/type_destination_kinesis.go
+++ b/internal/provider/type_destination_kinesis.go
@@ -5,11 +5,10 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationKinesis struct {
- AccessKey types.String `tfsdk:"access_key"`
- BufferSize types.Int64 `tfsdk:"buffer_size"`
- DestinationType types.String `tfsdk:"destination_type"`
- Endpoint types.String `tfsdk:"endpoint"`
- PrivateKey types.String `tfsdk:"private_key"`
- Region types.String `tfsdk:"region"`
- ShardCount types.Int64 `tfsdk:"shard_count"`
+ AccessKey types.String `tfsdk:"access_key"`
+ BufferSize types.Int64 `tfsdk:"buffer_size"`
+ Endpoint types.String `tfsdk:"endpoint"`
+ PrivateKey types.String `tfsdk:"private_key"`
+ Region types.String `tfsdk:"region"`
+ ShardCount types.Int64 `tfsdk:"shard_count"`
}
diff --git a/internal/provider/type_destination_langchain.go b/internal/provider/type_destination_langchain.go
old mode 100755
new mode 100644
index 270f11a9f..6637b7427
--- a/internal/provider/type_destination_langchain.go
+++ b/internal/provider/type_destination_langchain.go
@@ -2,11 +2,8 @@
package provider
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
type DestinationLangchain struct {
- DestinationType types.String `tfsdk:"destination_type"`
- Embedding DestinationLangchainEmbedding `tfsdk:"embedding"`
- Indexing DestinationLangchainIndexing `tfsdk:"indexing"`
- Processing DestinationLangchainProcessingConfigModel `tfsdk:"processing"`
+ Embedding DestinationLangchainEmbedding `tfsdk:"embedding"`
+ Indexing DestinationLangchainIndexing `tfsdk:"indexing"`
+ Processing DestinationLangchainProcessingConfigModel `tfsdk:"processing"`
}
diff --git a/internal/provider/type_destination_langchain_embedding.go b/internal/provider/type_destination_langchain_embedding.go
old mode 100755
new mode 100644
index e87d455c6..9d2b8c435
--- a/internal/provider/type_destination_langchain_embedding.go
+++ b/internal/provider/type_destination_langchain_embedding.go
@@ -3,8 +3,6 @@
package provider
type DestinationLangchainEmbedding struct {
- DestinationLangchainEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_langchain_embedding_fake"`
- DestinationLangchainEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_langchain_embedding_open_ai"`
- DestinationLangchainUpdateEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_langchain_update_embedding_fake"`
- DestinationLangchainUpdateEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_langchain_update_embedding_open_ai"`
+ Fake *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"fake"`
+ OpenAI *OpenAI `tfsdk:"open_ai"`
}
diff --git a/internal/provider/type_destination_langchain_indexing.go b/internal/provider/type_destination_langchain_indexing.go
old mode 100755
new mode 100644
index 5f8d95055..b9ecdcea4
--- a/internal/provider/type_destination_langchain_indexing.go
+++ b/internal/provider/type_destination_langchain_indexing.go
@@ -3,10 +3,7 @@
package provider
type DestinationLangchainIndexing struct {
- DestinationLangchainIndexingChromaLocalPersistance *DestinationLangchainIndexingChromaLocalPersistance `tfsdk:"destination_langchain_indexing_chroma_local_persistance"`
- DestinationLangchainIndexingDocArrayHnswSearch *DestinationLangchainIndexingDocArrayHnswSearch `tfsdk:"destination_langchain_indexing_doc_array_hnsw_search"`
- DestinationLangchainIndexingPinecone *DestinationLangchainIndexingPinecone `tfsdk:"destination_langchain_indexing_pinecone"`
- DestinationLangchainUpdateIndexingChromaLocalPersistance *DestinationLangchainIndexingChromaLocalPersistance `tfsdk:"destination_langchain_update_indexing_chroma_local_persistance"`
- DestinationLangchainUpdateIndexingDocArrayHnswSearch *DestinationLangchainIndexingDocArrayHnswSearch `tfsdk:"destination_langchain_update_indexing_doc_array_hnsw_search"`
- DestinationLangchainUpdateIndexingPinecone *DestinationLangchainIndexingPinecone `tfsdk:"destination_langchain_update_indexing_pinecone"`
+ ChromaLocalPersistance *ChromaLocalPersistance `tfsdk:"chroma_local_persistance"`
+ DocArrayHnswSearch *DocArrayHnswSearch `tfsdk:"doc_array_hnsw_search"`
+ Pinecone *DestinationLangchainPinecone `tfsdk:"pinecone"`
}
diff --git a/internal/provider/type_destination_langchain_indexing_pinecone.go b/internal/provider/type_destination_langchain_indexing_pinecone.go
deleted file mode 100755
index cf6cb1c81..000000000
--- a/internal/provider/type_destination_langchain_indexing_pinecone.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationLangchainIndexingPinecone struct {
- Index types.String `tfsdk:"index"`
- Mode types.String `tfsdk:"mode"`
- PineconeEnvironment types.String `tfsdk:"pinecone_environment"`
- PineconeKey types.String `tfsdk:"pinecone_key"`
-}
diff --git a/internal/provider/type_destination_pinecone_indexing.go b/internal/provider/type_destination_langchain_pinecone.go
old mode 100755
new mode 100644
similarity index 88%
rename from internal/provider/type_destination_pinecone_indexing.go
rename to internal/provider/type_destination_langchain_pinecone.go
index d13f6ba4a..ef3ffac9f
--- a/internal/provider/type_destination_pinecone_indexing.go
+++ b/internal/provider/type_destination_langchain_pinecone.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationPineconeIndexing struct {
+type DestinationLangchainPinecone struct {
Index types.String `tfsdk:"index"`
PineconeEnvironment types.String `tfsdk:"pinecone_environment"`
PineconeKey types.String `tfsdk:"pinecone_key"`
diff --git a/internal/provider/type_destination_langchain_processing_config_model.go b/internal/provider/type_destination_langchain_processing_config_model.go
old mode 100755
new mode 100644
diff --git a/internal/provider/type_destination_milvus.go b/internal/provider/type_destination_milvus.go
old mode 100755
new mode 100644
index 3d3ea9fdf..259da3d79
--- a/internal/provider/type_destination_milvus.go
+++ b/internal/provider/type_destination_milvus.go
@@ -2,11 +2,8 @@
package provider
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
type DestinationMilvus struct {
- DestinationType types.String `tfsdk:"destination_type"`
- Embedding DestinationMilvusEmbedding `tfsdk:"embedding"`
- Indexing DestinationMilvusIndexing `tfsdk:"indexing"`
- Processing DestinationMilvusProcessingConfigModel `tfsdk:"processing"`
+ Embedding DestinationMilvusEmbedding `tfsdk:"embedding"`
+ Indexing DestinationMilvusIndexing `tfsdk:"indexing"`
+ Processing DestinationMilvusProcessingConfigModel `tfsdk:"processing"`
}
diff --git a/internal/provider/type_destination_milvus_indexing_authentication_api_token.go b/internal/provider/type_destination_milvus_api_token.go
old mode 100755
new mode 100644
similarity index 66%
rename from internal/provider/type_destination_milvus_indexing_authentication_api_token.go
rename to internal/provider/type_destination_milvus_api_token.go
index a49094f16..75af6acdb
--- a/internal/provider/type_destination_milvus_indexing_authentication_api_token.go
+++ b/internal/provider/type_destination_milvus_api_token.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationMilvusIndexingAuthenticationAPIToken struct {
- Mode types.String `tfsdk:"mode"`
+type DestinationMilvusAPIToken struct {
Token types.String `tfsdk:"token"`
}
diff --git a/internal/provider/type_destination_milvus_authentication.go b/internal/provider/type_destination_milvus_authentication.go
new file mode 100644
index 000000000..d1ed3bff5
--- /dev/null
+++ b/internal/provider/type_destination_milvus_authentication.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationMilvusAuthentication struct {
+ APIToken *DestinationMilvusAPIToken `tfsdk:"api_token"`
+ NoAuth *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"no_auth"`
+ UsernamePassword *UsernamePassword `tfsdk:"username_password"`
+}
diff --git a/internal/provider/type_destination_milvus_embedding.go b/internal/provider/type_destination_milvus_embedding.go
old mode 100755
new mode 100644
index d282f648f..206a25ef5
--- a/internal/provider/type_destination_milvus_embedding.go
+++ b/internal/provider/type_destination_milvus_embedding.go
@@ -3,12 +3,10 @@
package provider
type DestinationMilvusEmbedding struct {
- DestinationMilvusEmbeddingCohere *DestinationMilvusEmbeddingCohere `tfsdk:"destination_milvus_embedding_cohere"`
- DestinationMilvusEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_milvus_embedding_fake"`
- DestinationMilvusEmbeddingFromField *DestinationMilvusEmbeddingFromField `tfsdk:"destination_milvus_embedding_from_field"`
- DestinationMilvusEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_milvus_embedding_open_ai"`
- DestinationMilvusUpdateEmbeddingCohere *DestinationMilvusEmbeddingCohere `tfsdk:"destination_milvus_update_embedding_cohere"`
- DestinationMilvusUpdateEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_milvus_update_embedding_fake"`
- DestinationMilvusUpdateEmbeddingFromField *DestinationMilvusEmbeddingFromField `tfsdk:"destination_milvus_update_embedding_from_field"`
- DestinationMilvusUpdateEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_milvus_update_embedding_open_ai"`
+ AzureOpenAI *AzureOpenAI `tfsdk:"azure_open_ai"`
+ Cohere *Cohere `tfsdk:"cohere"`
+ Fake *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"fake"`
+ FromField *FromField `tfsdk:"from_field"`
+ OpenAI *OpenAI `tfsdk:"open_ai"`
+ OpenAICompatible *OpenAICompatible `tfsdk:"open_ai_compatible"`
}
diff --git a/internal/provider/type_destination_milvus_indexing.go b/internal/provider/type_destination_milvus_indexing.go
old mode 100755
new mode 100644
index 6aca07861..cf9723f6c
--- a/internal/provider/type_destination_milvus_indexing.go
+++ b/internal/provider/type_destination_milvus_indexing.go
@@ -5,10 +5,10 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationMilvusIndexing struct {
- Auth DestinationMilvusIndexingAuthentication `tfsdk:"auth"`
- Collection types.String `tfsdk:"collection"`
- Db types.String `tfsdk:"db"`
- Host types.String `tfsdk:"host"`
- TextField types.String `tfsdk:"text_field"`
- VectorField types.String `tfsdk:"vector_field"`
+ Auth DestinationMilvusAuthentication `tfsdk:"auth"`
+ Collection types.String `tfsdk:"collection"`
+ Db types.String `tfsdk:"db"`
+ Host types.String `tfsdk:"host"`
+ TextField types.String `tfsdk:"text_field"`
+ VectorField types.String `tfsdk:"vector_field"`
}
diff --git a/internal/provider/type_destination_milvus_indexing_authentication.go b/internal/provider/type_destination_milvus_indexing_authentication.go
deleted file mode 100755
index 260541485..000000000
--- a/internal/provider/type_destination_milvus_indexing_authentication.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationMilvusIndexingAuthentication struct {
- DestinationMilvusIndexingAuthenticationAPIToken *DestinationMilvusIndexingAuthenticationAPIToken `tfsdk:"destination_milvus_indexing_authentication_api_token"`
- DestinationMilvusIndexingAuthenticationNoAuth *DestinationMilvusIndexingAuthenticationNoAuth `tfsdk:"destination_milvus_indexing_authentication_no_auth"`
- DestinationMilvusIndexingAuthenticationUsernamePassword *DestinationMilvusIndexingAuthenticationUsernamePassword `tfsdk:"destination_milvus_indexing_authentication_username_password"`
- DestinationMilvusUpdateIndexingAuthenticationAPIToken *DestinationMilvusIndexingAuthenticationAPIToken `tfsdk:"destination_milvus_update_indexing_authentication_api_token"`
- DestinationMilvusUpdateIndexingAuthenticationNoAuth *DestinationMilvusIndexingAuthenticationNoAuth `tfsdk:"destination_milvus_update_indexing_authentication_no_auth"`
- DestinationMilvusUpdateIndexingAuthenticationUsernamePassword *DestinationMilvusIndexingAuthenticationUsernamePassword `tfsdk:"destination_milvus_update_indexing_authentication_username_password"`
-}
diff --git a/internal/provider/type_destination_milvus_processing_config_model.go b/internal/provider/type_destination_milvus_processing_config_model.go
old mode 100755
new mode 100644
index 4c58a7c62..56a829a91
--- a/internal/provider/type_destination_milvus_processing_config_model.go
+++ b/internal/provider/type_destination_milvus_processing_config_model.go
@@ -5,8 +5,10 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationMilvusProcessingConfigModel struct {
- ChunkOverlap types.Int64 `tfsdk:"chunk_overlap"`
- ChunkSize types.Int64 `tfsdk:"chunk_size"`
- MetadataFields []types.String `tfsdk:"metadata_fields"`
- TextFields []types.String `tfsdk:"text_fields"`
+ ChunkOverlap types.Int64 `tfsdk:"chunk_overlap"`
+ ChunkSize types.Int64 `tfsdk:"chunk_size"`
+ FieldNameMappings []FieldNameMappingConfigModel `tfsdk:"field_name_mappings"`
+ MetadataFields []types.String `tfsdk:"metadata_fields"`
+ TextFields []types.String `tfsdk:"text_fields"`
+ TextSplitter *DestinationMilvusTextSplitter `tfsdk:"text_splitter"`
}
diff --git a/internal/provider/type_destination_milvus_text_splitter.go b/internal/provider/type_destination_milvus_text_splitter.go
new file mode 100644
index 000000000..d39086300
--- /dev/null
+++ b/internal/provider/type_destination_milvus_text_splitter.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationMilvusTextSplitter struct {
+ ByMarkdownHeader *ByMarkdownHeader `tfsdk:"by_markdown_header"`
+ ByProgrammingLanguage *ByProgrammingLanguage `tfsdk:"by_programming_language"`
+ BySeparator *BySeparator `tfsdk:"by_separator"`
+}
diff --git a/internal/provider/type_destination_mongodb.go b/internal/provider/type_destination_mongodb.go
old mode 100755
new mode 100644
index 2fb364c2b..e789e7117
--- a/internal/provider/type_destination_mongodb.go
+++ b/internal/provider/type_destination_mongodb.go
@@ -5,9 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationMongodb struct {
- AuthType DestinationMongodbAuthorizationType `tfsdk:"auth_type"`
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- InstanceType *DestinationMongodbMongoDbInstanceType `tfsdk:"instance_type"`
- TunnelMethod *DestinationMongodbSSHTunnelMethod `tfsdk:"tunnel_method"`
+ AuthType DestinationMongodbAuthorizationType `tfsdk:"auth_type"`
+ Database types.String `tfsdk:"database"`
+ InstanceType *DestinationMongodbMongoDbInstanceType `tfsdk:"instance_type"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
}
diff --git a/internal/provider/type_destination_mongodb_authorization_type.go b/internal/provider/type_destination_mongodb_authorization_type.go
old mode 100755
new mode 100644
index 066a80182..adc167487
--- a/internal/provider/type_destination_mongodb_authorization_type.go
+++ b/internal/provider/type_destination_mongodb_authorization_type.go
@@ -3,8 +3,6 @@
package provider
type DestinationMongodbAuthorizationType struct {
- DestinationMongodbAuthorizationTypeLoginPassword *DestinationMongodbAuthorizationTypeLoginPassword `tfsdk:"destination_mongodb_authorization_type_login_password"`
- DestinationMongodbAuthorizationTypeNone *DestinationMongodbAuthorizationTypeNone `tfsdk:"destination_mongodb_authorization_type_none"`
- DestinationMongodbUpdateAuthorizationTypeLoginPassword *DestinationMongodbAuthorizationTypeLoginPassword `tfsdk:"destination_mongodb_update_authorization_type_login_password"`
- DestinationMongodbUpdateAuthorizationTypeNone *DestinationMongodbAuthorizationTypeNone `tfsdk:"destination_mongodb_update_authorization_type_none"`
+ LoginPassword *UsernamePassword `tfsdk:"login_password"`
+ None *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"none"`
}
diff --git a/internal/provider/type_destination_mongodb_authorization_type_login_password.go b/internal/provider/type_destination_mongodb_authorization_type_login_password.go
deleted file mode 100755
index 828b46f31..000000000
--- a/internal/provider/type_destination_mongodb_authorization_type_login_password.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationMongodbAuthorizationTypeLoginPassword struct {
- Authorization types.String `tfsdk:"authorization"`
- Password types.String `tfsdk:"password"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_destination_mongodb_authorization_type_none.go b/internal/provider/type_destination_mongodb_authorization_type_none.go
deleted file mode 100755
index 8cf0ca02e..000000000
--- a/internal/provider/type_destination_mongodb_authorization_type_none.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationMongodbAuthorizationTypeNone struct {
- Authorization types.String `tfsdk:"authorization"`
-}
diff --git a/internal/provider/type_destination_mongodb_mongo_db_instance_type.go b/internal/provider/type_destination_mongodb_mongo_db_instance_type.go
old mode 100755
new mode 100644
index 7573cab6a..dd10f585b
--- a/internal/provider/type_destination_mongodb_mongo_db_instance_type.go
+++ b/internal/provider/type_destination_mongodb_mongo_db_instance_type.go
@@ -3,10 +3,7 @@
package provider
type DestinationMongodbMongoDbInstanceType struct {
- DestinationMongodbMongoDBInstanceTypeMongoDBAtlas *DestinationMongodbMongoDBInstanceTypeMongoDBAtlas `tfsdk:"destination_mongodb_mongo_db_instance_type_mongo_db_atlas"`
- DestinationMongodbMongoDbInstanceTypeReplicaSet *DestinationMongodbMongoDbInstanceTypeReplicaSet `tfsdk:"destination_mongodb_mongo_db_instance_type_replica_set"`
- DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance *DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance `tfsdk:"destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance"`
- DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas *DestinationMongodbMongoDBInstanceTypeMongoDBAtlas `tfsdk:"destination_mongodb_update_mongo_db_instance_type_mongo_db_atlas"`
- DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet *DestinationMongodbMongoDbInstanceTypeReplicaSet `tfsdk:"destination_mongodb_update_mongo_db_instance_type_replica_set"`
- DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance *DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance `tfsdk:"destination_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance"`
+ MongoDBAtlas *MongoDBAtlas `tfsdk:"mongo_db_atlas"`
+ ReplicaSet *ReplicaSet `tfsdk:"replica_set"`
+ StandaloneMongoDbInstance *StandaloneMongoDbInstance `tfsdk:"standalone_mongo_db_instance"`
}
diff --git a/internal/provider/type_destination_mongodb_mongo_db_instance_type_replica_set.go b/internal/provider/type_destination_mongodb_mongo_db_instance_type_replica_set.go
deleted file mode 100755
index f3e62d280..000000000
--- a/internal/provider/type_destination_mongodb_mongo_db_instance_type_replica_set.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationMongodbMongoDbInstanceTypeReplicaSet struct {
- Instance types.String `tfsdk:"instance"`
- ReplicaSet types.String `tfsdk:"replica_set"`
- ServerAddresses types.String `tfsdk:"server_addresses"`
-}
diff --git a/internal/provider/type_destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go b/internal/provider/type_destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go
deleted file mode 100755
index 55d903d69..000000000
--- a/internal/provider/type_destination_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance struct {
- Host types.String `tfsdk:"host"`
- Instance types.String `tfsdk:"instance"`
- Port types.Int64 `tfsdk:"port"`
-}
diff --git a/internal/provider/type_destination_mongodb_ssh_tunnel_method.go b/internal/provider/type_destination_mongodb_ssh_tunnel_method.go
deleted file mode 100755
index 15c6f2b33..000000000
--- a/internal/provider/type_destination_mongodb_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationMongodbSSHTunnelMethod struct {
- DestinationMongodbSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_mongodb_ssh_tunnel_method_no_tunnel"`
- DestinationMongodbSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_mongodb_ssh_tunnel_method_password_authentication"`
- DestinationMongodbSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_mongodb_ssh_tunnel_method_ssh_key_authentication"`
- DestinationMongodbUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_mongodb_update_ssh_tunnel_method_no_tunnel"`
- DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_mongodb_update_ssh_tunnel_method_password_authentication"`
- DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_mongodb_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_destination_mssql.go b/internal/provider/type_destination_mssql.go
old mode 100755
new mode 100644
index f6379db85..b625797c7
--- a/internal/provider/type_destination_mssql.go
+++ b/internal/provider/type_destination_mssql.go
@@ -5,14 +5,13 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationMssql struct {
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- Schema types.String `tfsdk:"schema"`
- SslMethod *DestinationMssqlSSLMethod `tfsdk:"ssl_method"`
- TunnelMethod *DestinationMssqlSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ Schema types.String `tfsdk:"schema"`
+ SslMethod *DestinationMssqlSSLMethod `tfsdk:"ssl_method"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_mssql_ssh_tunnel_method.go b/internal/provider/type_destination_mssql_ssh_tunnel_method.go
deleted file mode 100755
index 45dffc779..000000000
--- a/internal/provider/type_destination_mssql_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationMssqlSSHTunnelMethod struct {
- DestinationMssqlSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_mssql_ssh_tunnel_method_no_tunnel"`
- DestinationMssqlSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_mssql_ssh_tunnel_method_password_authentication"`
- DestinationMssqlSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_mssql_ssh_tunnel_method_ssh_key_authentication"`
- DestinationMssqlUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_mssql_update_ssh_tunnel_method_no_tunnel"`
- DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_mssql_update_ssh_tunnel_method_password_authentication"`
- DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_mssql_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_destination_mssql_ssl_method.go b/internal/provider/type_destination_mssql_ssl_method.go
old mode 100755
new mode 100644
index 2abead09a..129090e16
--- a/internal/provider/type_destination_mssql_ssl_method.go
+++ b/internal/provider/type_destination_mssql_ssl_method.go
@@ -3,8 +3,6 @@
package provider
type DestinationMssqlSSLMethod struct {
- DestinationMssqlSSLMethodEncryptedTrustServerCertificate *DestinationMssqlSSLMethodEncryptedTrustServerCertificate `tfsdk:"destination_mssql_ssl_method_encrypted_trust_server_certificate"`
- DestinationMssqlSSLMethodEncryptedVerifyCertificate *DestinationMssqlSSLMethodEncryptedVerifyCertificate `tfsdk:"destination_mssql_ssl_method_encrypted_verify_certificate"`
- DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate *DestinationMssqlSSLMethodEncryptedTrustServerCertificate `tfsdk:"destination_mssql_update_ssl_method_encrypted_trust_server_certificate"`
- DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate *DestinationMssqlSSLMethodEncryptedVerifyCertificate `tfsdk:"destination_mssql_update_ssl_method_encrypted_verify_certificate"`
+ EncryptedTrustServerCertificate *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"encrypted_trust_server_certificate"`
+ EncryptedVerifyCertificate *EncryptedVerifyCertificate `tfsdk:"encrypted_verify_certificate"`
}
diff --git a/internal/provider/type_destination_mssql_ssl_method_encrypted_trust_server_certificate.go b/internal/provider/type_destination_mssql_ssl_method_encrypted_trust_server_certificate.go
deleted file mode 100755
index cf706fccd..000000000
--- a/internal/provider/type_destination_mssql_ssl_method_encrypted_trust_server_certificate.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationMssqlSSLMethodEncryptedTrustServerCertificate struct {
- SslMethod types.String `tfsdk:"ssl_method"`
-}
diff --git a/internal/provider/type_destination_mysql.go b/internal/provider/type_destination_mysql.go
deleted file mode 100755
index 90590290a..000000000
--- a/internal/provider/type_destination_mysql.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationMysql struct {
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- TunnelMethod *DestinationMysqlSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_destination_mysql_ssh_tunnel_method.go b/internal/provider/type_destination_mysql_ssh_tunnel_method.go
deleted file mode 100755
index 6d173aa1e..000000000
--- a/internal/provider/type_destination_mysql_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationMysqlSSHTunnelMethod struct {
- DestinationMysqlSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_mysql_ssh_tunnel_method_no_tunnel"`
- DestinationMysqlSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_mysql_ssh_tunnel_method_password_authentication"`
- DestinationMysqlSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_mysql_ssh_tunnel_method_ssh_key_authentication"`
- DestinationMysqlUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_mysql_update_ssh_tunnel_method_no_tunnel"`
- DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_mysql_update_ssh_tunnel_method_password_authentication"`
- DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_mysql_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_destination_oracle.go b/internal/provider/type_destination_oracle.go
old mode 100755
new mode 100644
index 57ca13a53..39f294669
--- a/internal/provider/type_destination_oracle.go
+++ b/internal/provider/type_destination_oracle.go
@@ -5,13 +5,12 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationOracle struct {
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- Schema types.String `tfsdk:"schema"`
- Sid types.String `tfsdk:"sid"`
- TunnelMethod *DestinationOracleSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ Schema types.String `tfsdk:"schema"`
+ Sid types.String `tfsdk:"sid"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_oracle_ssh_tunnel_method.go b/internal/provider/type_destination_oracle_ssh_tunnel_method.go
deleted file mode 100755
index f5a695a3c..000000000
--- a/internal/provider/type_destination_oracle_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationOracleSSHTunnelMethod struct {
- DestinationOracleSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_oracle_ssh_tunnel_method_no_tunnel"`
- DestinationOracleSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_oracle_ssh_tunnel_method_password_authentication"`
- DestinationOracleSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_oracle_ssh_tunnel_method_ssh_key_authentication"`
- DestinationOracleUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_oracle_update_ssh_tunnel_method_no_tunnel"`
- DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_oracle_update_ssh_tunnel_method_password_authentication"`
- DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_oracle_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_destination_pinecone.go b/internal/provider/type_destination_pinecone.go
old mode 100755
new mode 100644
index 64eb170f3..713f2e713
--- a/internal/provider/type_destination_pinecone.go
+++ b/internal/provider/type_destination_pinecone.go
@@ -2,11 +2,8 @@
package provider
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
type DestinationPinecone struct {
- DestinationType types.String `tfsdk:"destination_type"`
- Embedding DestinationPineconeEmbedding `tfsdk:"embedding"`
- Indexing DestinationPineconeIndexing `tfsdk:"indexing"`
- Processing DestinationMilvusProcessingConfigModel `tfsdk:"processing"`
+ Embedding DestinationPineconeEmbedding `tfsdk:"embedding"`
+ Indexing DestinationLangchainPinecone `tfsdk:"indexing"`
+ Processing DestinationMilvusProcessingConfigModel `tfsdk:"processing"`
}
diff --git a/internal/provider/type_destination_pinecone_embedding.go b/internal/provider/type_destination_pinecone_embedding.go
old mode 100755
new mode 100644
index abec7c195..f0f0706d9
--- a/internal/provider/type_destination_pinecone_embedding.go
+++ b/internal/provider/type_destination_pinecone_embedding.go
@@ -3,10 +3,9 @@
package provider
type DestinationPineconeEmbedding struct {
- DestinationPineconeEmbeddingCohere *DestinationMilvusEmbeddingCohere `tfsdk:"destination_pinecone_embedding_cohere"`
- DestinationPineconeEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_pinecone_embedding_fake"`
- DestinationPineconeEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_pinecone_embedding_open_ai"`
- DestinationPineconeUpdateEmbeddingCohere *DestinationMilvusEmbeddingCohere `tfsdk:"destination_pinecone_update_embedding_cohere"`
- DestinationPineconeUpdateEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_pinecone_update_embedding_fake"`
- DestinationPineconeUpdateEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_pinecone_update_embedding_open_ai"`
+ AzureOpenAI *AzureOpenAI `tfsdk:"azure_open_ai"`
+ Cohere *Cohere `tfsdk:"cohere"`
+ Fake *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"fake"`
+ OpenAI *OpenAI `tfsdk:"open_ai"`
+ OpenAICompatible *OpenAICompatible `tfsdk:"open_ai_compatible"`
}
diff --git a/internal/provider/type_destination_postgres.go b/internal/provider/type_destination_postgres.go
old mode 100755
new mode 100644
index c3dd06aaa..a02e12313
--- a/internal/provider/type_destination_postgres.go
+++ b/internal/provider/type_destination_postgres.go
@@ -5,14 +5,13 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationPostgres struct {
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- Schema types.String `tfsdk:"schema"`
- SslMode *DestinationPostgresSSLModes `tfsdk:"ssl_mode"`
- TunnelMethod *DestinationPostgresSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ Schema types.String `tfsdk:"schema"`
+ SslMode *DestinationPostgresSSLModes `tfsdk:"ssl_mode"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_postgres_ssh_tunnel_method.go b/internal/provider/type_destination_postgres_ssh_tunnel_method.go
deleted file mode 100755
index 71dda83ed..000000000
--- a/internal/provider/type_destination_postgres_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationPostgresSSHTunnelMethod struct {
- DestinationPostgresSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_postgres_ssh_tunnel_method_no_tunnel"`
- DestinationPostgresSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_postgres_ssh_tunnel_method_password_authentication"`
- DestinationPostgresSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_postgres_ssh_tunnel_method_ssh_key_authentication"`
- DestinationPostgresUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_postgres_update_ssh_tunnel_method_no_tunnel"`
- DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_postgres_update_ssh_tunnel_method_password_authentication"`
- DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_postgres_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_destination_postgres_ssl_modes.go b/internal/provider/type_destination_postgres_ssl_modes.go
old mode 100755
new mode 100644
index ad9357c97..b2ca8413b
--- a/internal/provider/type_destination_postgres_ssl_modes.go
+++ b/internal/provider/type_destination_postgres_ssl_modes.go
@@ -3,16 +3,10 @@
package provider
type DestinationPostgresSSLModes struct {
- DestinationPostgresSSLModesAllow *DestinationPostgresSSLModesAllow `tfsdk:"destination_postgres_ssl_modes_allow"`
- DestinationPostgresSSLModesDisable *DestinationPostgresSSLModesDisable `tfsdk:"destination_postgres_ssl_modes_disable"`
- DestinationPostgresSSLModesPrefer *DestinationPostgresSSLModesPrefer `tfsdk:"destination_postgres_ssl_modes_prefer"`
- DestinationPostgresSSLModesRequire *DestinationPostgresSSLModesRequire `tfsdk:"destination_postgres_ssl_modes_require"`
- DestinationPostgresSSLModesVerifyCa *DestinationPostgresSSLModesVerifyCa `tfsdk:"destination_postgres_ssl_modes_verify_ca"`
- DestinationPostgresSSLModesVerifyFull *DestinationPostgresSSLModesVerifyFull `tfsdk:"destination_postgres_ssl_modes_verify_full"`
- DestinationPostgresUpdateSSLModesAllow *DestinationPostgresSSLModesAllow `tfsdk:"destination_postgres_update_ssl_modes_allow"`
- DestinationPostgresUpdateSSLModesDisable *DestinationPostgresSSLModesDisable `tfsdk:"destination_postgres_update_ssl_modes_disable"`
- DestinationPostgresUpdateSSLModesPrefer *DestinationPostgresSSLModesPrefer `tfsdk:"destination_postgres_update_ssl_modes_prefer"`
- DestinationPostgresUpdateSSLModesRequire *DestinationPostgresSSLModesRequire `tfsdk:"destination_postgres_update_ssl_modes_require"`
- DestinationPostgresUpdateSSLModesVerifyCa *DestinationPostgresSSLModesVerifyCa `tfsdk:"destination_postgres_update_ssl_modes_verify_ca"`
- DestinationPostgresUpdateSSLModesVerifyFull *DestinationPostgresSSLModesVerifyFull `tfsdk:"destination_postgres_update_ssl_modes_verify_full"`
+ Allow *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"allow"`
+ Disable *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"disable"`
+ Prefer *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"prefer"`
+ Require *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"require"`
+ VerifyCa *VerifyCa `tfsdk:"verify_ca"`
+ VerifyFull *VerifyFull `tfsdk:"verify_full"`
}
diff --git a/internal/provider/type_destination_postgres_ssl_modes_disable.go b/internal/provider/type_destination_postgres_ssl_modes_disable.go
deleted file mode 100755
index 2e0072d6c..000000000
--- a/internal/provider/type_destination_postgres_ssl_modes_disable.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationPostgresSSLModesDisable struct {
- Mode types.String `tfsdk:"mode"`
-}
diff --git a/internal/provider/type_destination_postgres_ssl_modes_prefer.go b/internal/provider/type_destination_postgres_ssl_modes_prefer.go
deleted file mode 100755
index 564b2ae28..000000000
--- a/internal/provider/type_destination_postgres_ssl_modes_prefer.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationPostgresSSLModesPrefer struct {
- Mode types.String `tfsdk:"mode"`
-}
diff --git a/internal/provider/type_destination_postgres_ssl_modes_require.go b/internal/provider/type_destination_postgres_ssl_modes_require.go
deleted file mode 100755
index e405d447a..000000000
--- a/internal/provider/type_destination_postgres_ssl_modes_require.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationPostgresSSLModesRequire struct {
- Mode types.String `tfsdk:"mode"`
-}
diff --git a/internal/provider/type_destination_postgres_ssl_modes_verify_full.go b/internal/provider/type_destination_postgres_ssl_modes_verify_full.go
deleted file mode 100755
index 77b1b1856..000000000
--- a/internal/provider/type_destination_postgres_ssl_modes_verify_full.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationPostgresSSLModesVerifyFull struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
-}
diff --git a/internal/provider/type_destination_pubsub.go b/internal/provider/type_destination_pubsub.go
old mode 100755
new mode 100644
index c824a851c..12e108463
--- a/internal/provider/type_destination_pubsub.go
+++ b/internal/provider/type_destination_pubsub.go
@@ -10,7 +10,6 @@ type DestinationPubsub struct {
BatchingEnabled types.Bool `tfsdk:"batching_enabled"`
BatchingRequestBytesThreshold types.Int64 `tfsdk:"batching_request_bytes_threshold"`
CredentialsJSON types.String `tfsdk:"credentials_json"`
- DestinationType types.String `tfsdk:"destination_type"`
OrderingEnabled types.Bool `tfsdk:"ordering_enabled"`
ProjectID types.String `tfsdk:"project_id"`
TopicID types.String `tfsdk:"topic_id"`
diff --git a/internal/provider/type_destination_qdrant.go b/internal/provider/type_destination_qdrant.go
new file mode 100644
index 000000000..2714b9f97
--- /dev/null
+++ b/internal/provider/type_destination_qdrant.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationQdrant struct {
+ Embedding DestinationMilvusEmbedding `tfsdk:"embedding"`
+ Indexing DestinationQdrantIndexing `tfsdk:"indexing"`
+ Processing DestinationMilvusProcessingConfigModel `tfsdk:"processing"`
+}
diff --git a/internal/provider/type_destination_qdrant_authentication_method.go b/internal/provider/type_destination_qdrant_authentication_method.go
new file mode 100644
index 000000000..0ed9138d1
--- /dev/null
+++ b/internal/provider/type_destination_qdrant_authentication_method.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationQdrantAuthenticationMethod struct {
+ APIKeyAuth *APIKeyAuth `tfsdk:"api_key_auth"`
+ NoAuth *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"no_auth"`
+}
diff --git a/internal/provider/type_destination_qdrant_distance_metric.go b/internal/provider/type_destination_qdrant_distance_metric.go
new file mode 100644
index 000000000..95688b7ec
--- /dev/null
+++ b/internal/provider/type_destination_qdrant_distance_metric.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationQdrantDistanceMetric struct {
+ Cos *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"cos"`
+ Dot *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"dot"`
+ Euc *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"euc"`
+}
diff --git a/internal/provider/type_destination_qdrant_indexing.go b/internal/provider/type_destination_qdrant_indexing.go
new file mode 100644
index 000000000..c19d64fee
--- /dev/null
+++ b/internal/provider/type_destination_qdrant_indexing.go
@@ -0,0 +1,14 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type DestinationQdrantIndexing struct {
+ AuthMethod *DestinationQdrantAuthenticationMethod `tfsdk:"auth_method"`
+ Collection types.String `tfsdk:"collection"`
+ DistanceMetric *DestinationQdrantDistanceMetric `tfsdk:"distance_metric"`
+ PreferGrpc types.Bool `tfsdk:"prefer_grpc"`
+ TextField types.String `tfsdk:"text_field"`
+ URL types.String `tfsdk:"url"`
+}
diff --git a/internal/provider/type_destination_redis.go b/internal/provider/type_destination_redis.go
old mode 100755
new mode 100644
index 5909bb7ab..beee91f75
--- a/internal/provider/type_destination_redis.go
+++ b/internal/provider/type_destination_redis.go
@@ -5,13 +5,12 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationRedis struct {
- CacheType types.String `tfsdk:"cache_type"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- Ssl types.Bool `tfsdk:"ssl"`
- SslMode *DestinationRedisSSLModes `tfsdk:"ssl_mode"`
- TunnelMethod *DestinationRedisSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ CacheType types.String `tfsdk:"cache_type"`
+ Host types.String `tfsdk:"host"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ Ssl types.Bool `tfsdk:"ssl"`
+ SslMode *DestinationRedisSSLModes `tfsdk:"ssl_mode"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_redis_ssh_tunnel_method.go b/internal/provider/type_destination_redis_ssh_tunnel_method.go
deleted file mode 100755
index 797eef12c..000000000
--- a/internal/provider/type_destination_redis_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationRedisSSHTunnelMethod struct {
- DestinationRedisSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_redis_ssh_tunnel_method_no_tunnel"`
- DestinationRedisSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_redis_ssh_tunnel_method_password_authentication"`
- DestinationRedisSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_redis_ssh_tunnel_method_ssh_key_authentication"`
- DestinationRedisUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_redis_update_ssh_tunnel_method_no_tunnel"`
- DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_redis_update_ssh_tunnel_method_password_authentication"`
- DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_redis_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_destination_redis_ssl_modes.go b/internal/provider/type_destination_redis_ssl_modes.go
old mode 100755
new mode 100644
index 13dde8b0c..1881ea470
--- a/internal/provider/type_destination_redis_ssl_modes.go
+++ b/internal/provider/type_destination_redis_ssl_modes.go
@@ -3,8 +3,6 @@
package provider
type DestinationRedisSSLModes struct {
- DestinationRedisSSLModesDisable *DestinationPostgresSSLModesDisable `tfsdk:"destination_redis_ssl_modes_disable"`
- DestinationRedisSSLModesVerifyFull *DestinationPostgresSSLModesVerifyFull `tfsdk:"destination_redis_ssl_modes_verify_full"`
- DestinationRedisUpdateSSLModesDisable *DestinationPostgresSSLModesDisable `tfsdk:"destination_redis_update_ssl_modes_disable"`
- DestinationRedisUpdateSSLModesVerifyFull *DestinationPostgresSSLModesVerifyFull `tfsdk:"destination_redis_update_ssl_modes_verify_full"`
+ Disable *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"disable"`
+ VerifyFull *VerifyFull `tfsdk:"verify_full"`
}
diff --git a/internal/provider/type_destination_redshift.go b/internal/provider/type_destination_redshift.go
old mode 100755
new mode 100644
index fc74b7ac2..971b61873
--- a/internal/provider/type_destination_redshift.go
+++ b/internal/provider/type_destination_redshift.go
@@ -5,14 +5,13 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationRedshift struct {
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- Schema types.String `tfsdk:"schema"`
- TunnelMethod *DestinationRedshiftSSHTunnelMethod `tfsdk:"tunnel_method"`
- UploadingMethod *DestinationRedshiftUploadingMethod `tfsdk:"uploading_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ Schema types.String `tfsdk:"schema"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ UploadingMethod *DestinationRedshiftUploadingMethod `tfsdk:"uploading_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_redshift1.go b/internal/provider/type_destination_redshift1.go
deleted file mode 100755
index 52fbee755..000000000
--- a/internal/provider/type_destination_redshift1.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationRedshift1 struct {
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- Schema types.String `tfsdk:"schema"`
- TunnelMethod *DestinationRedshiftSSHTunnelMethod `tfsdk:"tunnel_method"`
- UploadingMethod *DestinationRedshiftUploadingMethod1 `tfsdk:"uploading_method"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_destination_redshift_encryption.go b/internal/provider/type_destination_redshift_encryption.go
new file mode 100644
index 000000000..efce2191b
--- /dev/null
+++ b/internal/provider/type_destination_redshift_encryption.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationRedshiftEncryption struct {
+ AESCBCEnvelopeEncryption *AESCBCEnvelopeEncryption `tfsdk:"aescbc_envelope_encryption"`
+ NoEncryption *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"no_encryption"`
+}
diff --git a/internal/provider/type_destination_redshift_ssh_tunnel_method.go b/internal/provider/type_destination_redshift_ssh_tunnel_method.go
deleted file mode 100755
index bf9c4c31f..000000000
--- a/internal/provider/type_destination_redshift_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationRedshiftSSHTunnelMethod struct {
- DestinationRedshiftSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_redshift_ssh_tunnel_method_no_tunnel"`
- DestinationRedshiftSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_redshift_ssh_tunnel_method_password_authentication"`
- DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_redshift_ssh_tunnel_method_ssh_key_authentication"`
- DestinationRedshiftUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_redshift_update_ssh_tunnel_method_no_tunnel"`
- DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_redshift_update_ssh_tunnel_method_password_authentication"`
- DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_redshift_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_destination_redshift_update_uploading_method_s3_staging.go b/internal/provider/type_destination_redshift_update_uploading_method_s3_staging.go
deleted file mode 100755
index d25f22f03..000000000
--- a/internal/provider/type_destination_redshift_update_uploading_method_s3_staging.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationRedshiftUpdateUploadingMethodS3Staging struct {
- AccessKeyID types.String `tfsdk:"access_key_id"`
- Encryption *DestinationRedshiftUpdateUploadingMethodS3StagingEncryption `tfsdk:"encryption"`
- FileBufferCount types.Int64 `tfsdk:"file_buffer_count"`
- FileNamePattern types.String `tfsdk:"file_name_pattern"`
- Method types.String `tfsdk:"method"`
- PurgeStagingData types.Bool `tfsdk:"purge_staging_data"`
- S3BucketName types.String `tfsdk:"s3_bucket_name"`
- S3BucketPath types.String `tfsdk:"s3_bucket_path"`
- S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
- SecretAccessKey types.String `tfsdk:"secret_access_key"`
-}
diff --git a/internal/provider/type_destination_redshift_update_uploading_method_s3_staging1.go b/internal/provider/type_destination_redshift_update_uploading_method_s3_staging1.go
deleted file mode 100755
index 7cb7027df..000000000
--- a/internal/provider/type_destination_redshift_update_uploading_method_s3_staging1.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationRedshiftUpdateUploadingMethodS3Staging1 struct {
- AccessKeyID types.String `tfsdk:"access_key_id"`
- Encryption *DestinationRedshiftUpdateUploadingMethodS3StagingEncryption `tfsdk:"encryption"`
- FileBufferCount types.Int64 `tfsdk:"file_buffer_count"`
- FileNamePattern types.String `tfsdk:"file_name_pattern"`
- Method types.String `tfsdk:"method"`
- PurgeStagingData types.Bool `tfsdk:"purge_staging_data"`
- S3BucketName types.String `tfsdk:"s3_bucket_name"`
- S3BucketPath types.String `tfsdk:"s3_bucket_path"`
- S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
- SecretAccessKey types.String `tfsdk:"secret_access_key"`
-}
diff --git a/internal/provider/type_destination_redshift_update_uploading_method_s3_staging_encryption.go b/internal/provider/type_destination_redshift_update_uploading_method_s3_staging_encryption.go
deleted file mode 100755
index f494600a3..000000000
--- a/internal/provider/type_destination_redshift_update_uploading_method_s3_staging_encryption.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationRedshiftUpdateUploadingMethodS3StagingEncryption struct {
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption *DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption `tfsdk:"destination_redshift_update_uploading_method_s3_staging_encryption_no_encryption"`
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption *DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption `tfsdk:"destination_redshift_update_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption"`
-}
diff --git a/internal/provider/type_destination_redshift_uploading_method.go b/internal/provider/type_destination_redshift_uploading_method.go
old mode 100755
new mode 100644
index fc9b7da94..b1535a8b6
--- a/internal/provider/type_destination_redshift_uploading_method.go
+++ b/internal/provider/type_destination_redshift_uploading_method.go
@@ -3,8 +3,6 @@
package provider
type DestinationRedshiftUploadingMethod struct {
- DestinationRedshiftUploadingMethodS3Staging *DestinationRedshiftUploadingMethodS3Staging `tfsdk:"destination_redshift_uploading_method_s3_staging"`
- DestinationRedshiftUploadingMethodStandard *DestinationRedshiftUploadingMethodStandard `tfsdk:"destination_redshift_uploading_method_standard"`
- DestinationRedshiftUpdateUploadingMethodS3Staging *DestinationRedshiftUpdateUploadingMethodS3Staging `tfsdk:"destination_redshift_update_uploading_method_s3_staging"`
- DestinationRedshiftUpdateUploadingMethodStandard *DestinationRedshiftUploadingMethodStandard `tfsdk:"destination_redshift_update_uploading_method_standard"`
+ S3Staging *S3Staging `tfsdk:"s3_staging"`
+ Standard *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"standard"`
}
diff --git a/internal/provider/type_destination_redshift_uploading_method1.go b/internal/provider/type_destination_redshift_uploading_method1.go
deleted file mode 100755
index 4f849758a..000000000
--- a/internal/provider/type_destination_redshift_uploading_method1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationRedshiftUploadingMethod1 struct {
- DestinationRedshiftUploadingMethodS3Staging *DestinationRedshiftUploadingMethodS3Staging1 `tfsdk:"destination_redshift_uploading_method_s3_staging"`
- DestinationRedshiftUploadingMethodStandard *DestinationRedshiftUploadingMethodStandard `tfsdk:"destination_redshift_uploading_method_standard"`
- DestinationRedshiftUpdateUploadingMethodS3Staging *DestinationRedshiftUpdateUploadingMethodS3Staging1 `tfsdk:"destination_redshift_update_uploading_method_s3_staging"`
- DestinationRedshiftUpdateUploadingMethodStandard *DestinationRedshiftUploadingMethodStandard `tfsdk:"destination_redshift_update_uploading_method_standard"`
-}
diff --git a/internal/provider/type_destination_redshift_uploading_method_s3_staging.go b/internal/provider/type_destination_redshift_uploading_method_s3_staging.go
deleted file mode 100755
index 16d4e81df..000000000
--- a/internal/provider/type_destination_redshift_uploading_method_s3_staging.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationRedshiftUploadingMethodS3Staging struct {
- AccessKeyID types.String `tfsdk:"access_key_id"`
- Encryption *DestinationRedshiftUploadingMethodS3StagingEncryption `tfsdk:"encryption"`
- FileBufferCount types.Int64 `tfsdk:"file_buffer_count"`
- FileNamePattern types.String `tfsdk:"file_name_pattern"`
- Method types.String `tfsdk:"method"`
- PurgeStagingData types.Bool `tfsdk:"purge_staging_data"`
- S3BucketName types.String `tfsdk:"s3_bucket_name"`
- S3BucketPath types.String `tfsdk:"s3_bucket_path"`
- S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
- SecretAccessKey types.String `tfsdk:"secret_access_key"`
-}
diff --git a/internal/provider/type_destination_redshift_uploading_method_s3_staging1.go b/internal/provider/type_destination_redshift_uploading_method_s3_staging1.go
deleted file mode 100755
index 1e9beb6c1..000000000
--- a/internal/provider/type_destination_redshift_uploading_method_s3_staging1.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationRedshiftUploadingMethodS3Staging1 struct {
- AccessKeyID types.String `tfsdk:"access_key_id"`
- Encryption *DestinationRedshiftUploadingMethodS3StagingEncryption `tfsdk:"encryption"`
- FileBufferCount types.Int64 `tfsdk:"file_buffer_count"`
- FileNamePattern types.String `tfsdk:"file_name_pattern"`
- Method types.String `tfsdk:"method"`
- PurgeStagingData types.Bool `tfsdk:"purge_staging_data"`
- S3BucketName types.String `tfsdk:"s3_bucket_name"`
- S3BucketPath types.String `tfsdk:"s3_bucket_path"`
- S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
- SecretAccessKey types.String `tfsdk:"secret_access_key"`
-}
diff --git a/internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption.go b/internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption.go
deleted file mode 100755
index 16f287340..000000000
--- a/internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationRedshiftUploadingMethodS3StagingEncryption struct {
- DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption *DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption `tfsdk:"destination_redshift_uploading_method_s3_staging_encryption_aes_cbc_envelope_encryption"`
- DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption *DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption `tfsdk:"destination_redshift_uploading_method_s3_staging_encryption_no_encryption"`
-}
diff --git a/internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_aescbc_envelope_encryption.go b/internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_aescbc_envelope_encryption.go
deleted file mode 100755
index a7615671f..000000000
--- a/internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_aescbc_envelope_encryption.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption struct {
- EncryptionType types.String `tfsdk:"encryption_type"`
- KeyEncryptingKey types.String `tfsdk:"key_encrypting_key"`
-}
diff --git a/internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_no_encryption.go b/internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_no_encryption.go
deleted file mode 100755
index 38bdbe3a9..000000000
--- a/internal/provider/type_destination_redshift_uploading_method_s3_staging_encryption_no_encryption.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption struct {
- EncryptionType types.String `tfsdk:"encryption_type"`
-}
diff --git a/internal/provider/type_destination_redshift_uploading_method_standard.go b/internal/provider/type_destination_redshift_uploading_method_standard.go
deleted file mode 100755
index 6db071108..000000000
--- a/internal/provider/type_destination_redshift_uploading_method_standard.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationRedshiftUploadingMethodStandard struct {
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_destination_s3.go b/internal/provider/type_destination_s3.go
old mode 100755
new mode 100644
index 0e0d8f2fa..8c671071d
--- a/internal/provider/type_destination_s3.go
+++ b/internal/provider/type_destination_s3.go
@@ -6,7 +6,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationS3 struct {
AccessKeyID types.String `tfsdk:"access_key_id"`
- DestinationType types.String `tfsdk:"destination_type"`
FileNamePattern types.String `tfsdk:"file_name_pattern"`
Format DestinationS3OutputFormat `tfsdk:"format"`
S3BucketName types.String `tfsdk:"s3_bucket_name"`
diff --git a/internal/provider/type_destination_s31.go b/internal/provider/type_destination_s31.go
deleted file mode 100755
index 13e3d8ce7..000000000
--- a/internal/provider/type_destination_s31.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS31 struct {
- AccessKeyID types.String `tfsdk:"access_key_id"`
- DestinationType types.String `tfsdk:"destination_type"`
- FileNamePattern types.String `tfsdk:"file_name_pattern"`
- Format DestinationS3OutputFormat `tfsdk:"format"`
- S3BucketName types.String `tfsdk:"s3_bucket_name"`
- S3BucketPath types.String `tfsdk:"s3_bucket_path"`
- S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
- S3Endpoint types.String `tfsdk:"s3_endpoint"`
- S3PathFormat types.String `tfsdk:"s3_path_format"`
- SecretAccessKey types.String `tfsdk:"secret_access_key"`
-}
diff --git a/internal/provider/type_destination_s3_glue.go b/internal/provider/type_destination_s3_glue.go
old mode 100755
new mode 100644
index 3696794af..f0b76c25b
--- a/internal/provider/type_destination_s3_glue.go
+++ b/internal/provider/type_destination_s3_glue.go
@@ -6,7 +6,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationS3Glue struct {
AccessKeyID types.String `tfsdk:"access_key_id"`
- DestinationType types.String `tfsdk:"destination_type"`
FileNamePattern types.String `tfsdk:"file_name_pattern"`
Format DestinationS3GlueOutputFormat `tfsdk:"format"`
GlueDatabase types.String `tfsdk:"glue_database"`
diff --git a/internal/provider/type_destination_s3_glue1.go b/internal/provider/type_destination_s3_glue1.go
deleted file mode 100755
index d691c25de..000000000
--- a/internal/provider/type_destination_s3_glue1.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS3Glue1 struct {
- AccessKeyID types.String `tfsdk:"access_key_id"`
- DestinationType types.String `tfsdk:"destination_type"`
- FileNamePattern types.String `tfsdk:"file_name_pattern"`
- Format DestinationS3GlueOutputFormat `tfsdk:"format"`
- GlueDatabase types.String `tfsdk:"glue_database"`
- GlueSerializationLibrary types.String `tfsdk:"glue_serialization_library"`
- S3BucketName types.String `tfsdk:"s3_bucket_name"`
- S3BucketPath types.String `tfsdk:"s3_bucket_path"`
- S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
- S3Endpoint types.String `tfsdk:"s3_endpoint"`
- S3PathFormat types.String `tfsdk:"s3_path_format"`
- SecretAccessKey types.String `tfsdk:"secret_access_key"`
-}
diff --git a/internal/provider/type_destination_s3_glue_output_format.go b/internal/provider/type_destination_s3_glue_output_format.go
old mode 100755
new mode 100644
index 2d7da42f6..7557e5d8a
--- a/internal/provider/type_destination_s3_glue_output_format.go
+++ b/internal/provider/type_destination_s3_glue_output_format.go
@@ -3,6 +3,5 @@
package provider
type DestinationS3GlueOutputFormat struct {
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"destination_s3_glue_output_format_json_lines_newline_delimited_json"`
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"destination_s3_glue_update_output_format_json_lines_newline_delimited_json"`
+ JSONLinesNewlineDelimitedJSON *DestinationS3JSONLinesNewlineDelimitedJSON `tfsdk:"json_lines_newline_delimited_json"`
}
diff --git a/internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json.go b/internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json.go
deleted file mode 100755
index 3fd09c4ea..000000000
--- a/internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON struct {
- Compression *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression `tfsdk:"compression"`
- Flattening types.String `tfsdk:"flattening"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json_compression.go b/internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json_compression.go
deleted file mode 100755
index d7c7020c4..000000000
--- a/internal/provider/type_destination_s3_glue_output_format_json_lines_newline_delimited_json_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP `tfsdk:"destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_gzip"`
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression `tfsdk:"destination_s3_glue_output_format_json_lines_newline_delimited_json_compression_no_compression"`
-}
diff --git a/internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json.go b/internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json.go
deleted file mode 100755
index 67f535dc3..000000000
--- a/internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON struct {
- Compression *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression `tfsdk:"compression"`
- Flattening types.String `tfsdk:"flattening"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression.go b/internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression.go
deleted file mode 100755
index 400b3911c..000000000
--- a/internal/provider/type_destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression `tfsdk:"destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_no_compression"`
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP `tfsdk:"destination_s3_glue_update_output_format_json_lines_newline_delimited_json_compression_gzip"`
-}
diff --git a/internal/provider/type_destination_s3_json_lines_newline_delimited_json.go b/internal/provider/type_destination_s3_json_lines_newline_delimited_json.go
new file mode 100644
index 000000000..b7216738d
--- /dev/null
+++ b/internal/provider/type_destination_s3_json_lines_newline_delimited_json.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type DestinationS3JSONLinesNewlineDelimitedJSON struct {
+ Compression *DestinationGcsCompression `tfsdk:"compression"`
+ Flattening types.String `tfsdk:"flattening"`
+ FormatType types.String `tfsdk:"format_type"`
+}
diff --git a/internal/provider/type_destination_s3_output_format.go b/internal/provider/type_destination_s3_output_format.go
old mode 100755
new mode 100644
index f8356db06..010c93534
--- a/internal/provider/type_destination_s3_output_format.go
+++ b/internal/provider/type_destination_s3_output_format.go
@@ -3,12 +3,8 @@
package provider
type DestinationS3OutputFormat struct {
- DestinationS3OutputFormatAvroApacheAvro *DestinationS3OutputFormatAvroApacheAvro `tfsdk:"destination_s3_output_format_avro_apache_avro"`
- DestinationS3OutputFormatCSVCommaSeparatedValues *DestinationS3OutputFormatCSVCommaSeparatedValues `tfsdk:"destination_s3_output_format_csv_comma_separated_values"`
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"destination_s3_output_format_json_lines_newline_delimited_json"`
- DestinationS3OutputFormatParquetColumnarStorage *DestinationS3OutputFormatParquetColumnarStorage `tfsdk:"destination_s3_output_format_parquet_columnar_storage"`
- DestinationS3UpdateOutputFormatAvroApacheAvro *DestinationS3UpdateOutputFormatAvroApacheAvro `tfsdk:"destination_s3_update_output_format_avro_apache_avro"`
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValues *DestinationS3UpdateOutputFormatCSVCommaSeparatedValues `tfsdk:"destination_s3_update_output_format_csv_comma_separated_values"`
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"destination_s3_update_output_format_json_lines_newline_delimited_json"`
- DestinationS3UpdateOutputFormatParquetColumnarStorage *DestinationS3OutputFormatParquetColumnarStorage `tfsdk:"destination_s3_update_output_format_parquet_columnar_storage"`
+ AvroApacheAvro *AvroApacheAvro `tfsdk:"avro_apache_avro"`
+ CSVCommaSeparatedValues *DestinationGcsCSVCommaSeparatedValues `tfsdk:"csv_comma_separated_values"`
+ JSONLinesNewlineDelimitedJSON *DestinationS3JSONLinesNewlineDelimitedJSON `tfsdk:"json_lines_newline_delimited_json"`
+ ParquetColumnarStorage *DestinationGcsParquetColumnarStorage `tfsdk:"parquet_columnar_storage"`
}
diff --git a/internal/provider/type_destination_s3_output_format_avro_apache_avro.go b/internal/provider/type_destination_s3_output_format_avro_apache_avro.go
deleted file mode 100755
index 6239c3616..000000000
--- a/internal/provider/type_destination_s3_output_format_avro_apache_avro.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS3OutputFormatAvroApacheAvro struct {
- CompressionCodec DestinationS3OutputFormatAvroApacheAvroCompressionCodec `tfsdk:"compression_codec"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_s3_output_format_avro_apache_avro_compression_codec.go b/internal/provider/type_destination_s3_output_format_avro_apache_avro_compression_codec.go
deleted file mode 100755
index 174dd254a..000000000
--- a/internal/provider/type_destination_s3_output_format_avro_apache_avro_compression_codec.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodec struct {
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 `tfsdk:"destination_s3_output_format_avro_apache_avro_compression_codec_bzip2"`
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate `tfsdk:"destination_s3_output_format_avro_apache_avro_compression_codec_deflate"`
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression `tfsdk:"destination_s3_output_format_avro_apache_avro_compression_codec_no_compression"`
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy `tfsdk:"destination_s3_output_format_avro_apache_avro_compression_codec_snappy"`
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz `tfsdk:"destination_s3_output_format_avro_apache_avro_compression_codec_xz"`
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard `tfsdk:"destination_s3_output_format_avro_apache_avro_compression_codec_zstandard"`
-}
diff --git a/internal/provider/type_destination_s3_output_format_csv_comma_separated_values.go b/internal/provider/type_destination_s3_output_format_csv_comma_separated_values.go
deleted file mode 100755
index 13d4d6997..000000000
--- a/internal/provider/type_destination_s3_output_format_csv_comma_separated_values.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS3OutputFormatCSVCommaSeparatedValues struct {
- Compression *DestinationS3OutputFormatCSVCommaSeparatedValuesCompression `tfsdk:"compression"`
- Flattening types.String `tfsdk:"flattening"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_s3_output_format_csv_comma_separated_values_compression.go b/internal/provider/type_destination_s3_output_format_csv_comma_separated_values_compression.go
deleted file mode 100755
index bc4a7cedc..000000000
--- a/internal/provider/type_destination_s3_output_format_csv_comma_separated_values_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationS3OutputFormatCSVCommaSeparatedValuesCompression struct {
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP `tfsdk:"destination_s3_output_format_csv_comma_separated_values_compression_gzip"`
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression `tfsdk:"destination_s3_output_format_csv_comma_separated_values_compression_no_compression"`
-}
diff --git a/internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json.go b/internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json.go
deleted file mode 100755
index 9bf90ad51..000000000
--- a/internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON struct {
- Compression *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression `tfsdk:"compression"`
- Flattening types.String `tfsdk:"flattening"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json_compression.go b/internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json_compression.go
deleted file mode 100755
index 4b6ffab64..000000000
--- a/internal/provider/type_destination_s3_output_format_json_lines_newline_delimited_json_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP `tfsdk:"destination_s3_output_format_json_lines_newline_delimited_json_compression_gzip"`
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression `tfsdk:"destination_s3_output_format_json_lines_newline_delimited_json_compression_no_compression"`
-}
diff --git a/internal/provider/type_destination_s3_update_output_format_avro_apache_avro.go b/internal/provider/type_destination_s3_update_output_format_avro_apache_avro.go
deleted file mode 100755
index 5b8e4488f..000000000
--- a/internal/provider/type_destination_s3_update_output_format_avro_apache_avro.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS3UpdateOutputFormatAvroApacheAvro struct {
- CompressionCodec DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec `tfsdk:"compression_codec"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_s3_update_output_format_avro_apache_avro_compression_codec.go b/internal/provider/type_destination_s3_update_output_format_avro_apache_avro_compression_codec.go
deleted file mode 100755
index fcde6f023..000000000
--- a/internal/provider/type_destination_s3_update_output_format_avro_apache_avro_compression_codec.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec struct {
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression `tfsdk:"destination_s3_update_output_format_avro_apache_avro_compression_codec_no_compression"`
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate `tfsdk:"destination_s3_update_output_format_avro_apache_avro_compression_codec_deflate"`
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 `tfsdk:"destination_s3_update_output_format_avro_apache_avro_compression_codec_bzip2"`
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz `tfsdk:"destination_s3_update_output_format_avro_apache_avro_compression_codec_xz"`
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard `tfsdk:"destination_s3_update_output_format_avro_apache_avro_compression_codec_zstandard"`
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy `tfsdk:"destination_s3_update_output_format_avro_apache_avro_compression_codec_snappy"`
-}
diff --git a/internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values.go b/internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values.go
deleted file mode 100755
index 090c07b30..000000000
--- a/internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValues struct {
- Compression *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression `tfsdk:"compression"`
- Flattening types.String `tfsdk:"flattening"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values_compression.go b/internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values_compression.go
deleted file mode 100755
index d84eebe2a..000000000
--- a/internal/provider/type_destination_s3_update_output_format_csv_comma_separated_values_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression struct {
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression `tfsdk:"destination_s3_update_output_format_csv_comma_separated_values_compression_no_compression"`
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP `tfsdk:"destination_s3_update_output_format_csv_comma_separated_values_compression_gzip"`
-}
diff --git a/internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json.go b/internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json.go
deleted file mode 100755
index 8243c55aa..000000000
--- a/internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON struct {
- Compression *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression `tfsdk:"compression"`
- Flattening types.String `tfsdk:"flattening"`
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json_compression.go b/internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json_compression.go
deleted file mode 100755
index f15341c03..000000000
--- a/internal/provider/type_destination_s3_update_output_format_json_lines_newline_delimited_json_compression.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression `tfsdk:"destination_s3_update_output_format_json_lines_newline_delimited_json_compression_no_compression"`
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP `tfsdk:"destination_s3_update_output_format_json_lines_newline_delimited_json_compression_gzip"`
-}
diff --git a/internal/provider/type_destination_sftp_json.go b/internal/provider/type_destination_sftp_json.go
old mode 100755
new mode 100644
index 26307d9b1..443bbe5c7
--- a/internal/provider/type_destination_sftp_json.go
+++ b/internal/provider/type_destination_sftp_json.go
@@ -6,7 +6,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationSftpJSON struct {
DestinationPath types.String `tfsdk:"destination_path"`
- DestinationType types.String `tfsdk:"destination_type"`
Host types.String `tfsdk:"host"`
Password types.String `tfsdk:"password"`
Port types.Int64 `tfsdk:"port"`
diff --git a/internal/provider/type_destination_snowflake.go b/internal/provider/type_destination_snowflake.go
old mode 100755
new mode 100644
index 1700af440..c6f755ef2
--- a/internal/provider/type_destination_snowflake.go
+++ b/internal/provider/type_destination_snowflake.go
@@ -5,14 +5,14 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationSnowflake struct {
- Credentials *DestinationSnowflakeAuthorizationMethod `tfsdk:"credentials"`
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- RawDataSchema types.String `tfsdk:"raw_data_schema"`
- Role types.String `tfsdk:"role"`
- Schema types.String `tfsdk:"schema"`
- Username types.String `tfsdk:"username"`
- Warehouse types.String `tfsdk:"warehouse"`
+ Credentials *DestinationSnowflakeAuthorizationMethod `tfsdk:"credentials"`
+ Database types.String `tfsdk:"database"`
+ DisableTypeDedupe types.Bool `tfsdk:"disable_type_dedupe"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ RawDataSchema types.String `tfsdk:"raw_data_schema"`
+ Role types.String `tfsdk:"role"`
+ Schema types.String `tfsdk:"schema"`
+ Username types.String `tfsdk:"username"`
+ Warehouse types.String `tfsdk:"warehouse"`
}
diff --git a/internal/provider/type_destination_snowflake_authorization_method.go b/internal/provider/type_destination_snowflake_authorization_method.go
old mode 100755
new mode 100644
index 62f051069..d92b97cdb
--- a/internal/provider/type_destination_snowflake_authorization_method.go
+++ b/internal/provider/type_destination_snowflake_authorization_method.go
@@ -3,10 +3,7 @@
package provider
type DestinationSnowflakeAuthorizationMethod struct {
- DestinationSnowflakeAuthorizationMethodKeyPairAuthentication *DestinationSnowflakeAuthorizationMethodKeyPairAuthentication `tfsdk:"destination_snowflake_authorization_method_key_pair_authentication"`
- DestinationSnowflakeAuthorizationMethodOAuth20 *DestinationSnowflakeAuthorizationMethodOAuth20 `tfsdk:"destination_snowflake_authorization_method_o_auth2_0"`
- DestinationSnowflakeAuthorizationMethodUsernameAndPassword *DestinationSnowflakeAuthorizationMethodUsernameAndPassword `tfsdk:"destination_snowflake_authorization_method_username_and_password"`
- DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication *DestinationSnowflakeAuthorizationMethodKeyPairAuthentication `tfsdk:"destination_snowflake_update_authorization_method_key_pair_authentication"`
- DestinationSnowflakeUpdateAuthorizationMethodOAuth20 *DestinationSnowflakeAuthorizationMethodOAuth20 `tfsdk:"destination_snowflake_update_authorization_method_o_auth2_0"`
- DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword *DestinationSnowflakeAuthorizationMethodUsernameAndPassword `tfsdk:"destination_snowflake_update_authorization_method_username_and_password"`
+ KeyPairAuthentication *KeyPairAuthentication `tfsdk:"key_pair_authentication"`
+ OAuth20 *OAuth20 `tfsdk:"o_auth20"`
+ UsernameAndPassword *UsernameAndPassword `tfsdk:"username_and_password"`
}
diff --git a/internal/provider/type_destination_snowflake_authorization_method_username_and_password.go b/internal/provider/type_destination_snowflake_authorization_method_username_and_password.go
deleted file mode 100755
index efdf6bf1a..000000000
--- a/internal/provider/type_destination_snowflake_authorization_method_username_and_password.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type DestinationSnowflakeAuthorizationMethodUsernameAndPassword struct {
- AuthType types.String `tfsdk:"auth_type"`
- Password types.String `tfsdk:"password"`
-}
diff --git a/internal/provider/type_destination_timeplus.go b/internal/provider/type_destination_timeplus.go
old mode 100755
new mode 100644
index 6ad57aee2..34c365543
--- a/internal/provider/type_destination_timeplus.go
+++ b/internal/provider/type_destination_timeplus.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationTimeplus struct {
- Apikey types.String `tfsdk:"apikey"`
- DestinationType types.String `tfsdk:"destination_type"`
- Endpoint types.String `tfsdk:"endpoint"`
+ Apikey types.String `tfsdk:"apikey"`
+ Endpoint types.String `tfsdk:"endpoint"`
}
diff --git a/internal/provider/type_destination_typesense.go b/internal/provider/type_destination_typesense.go
old mode 100755
new mode 100644
index c895f570a..ae145bb43
--- a/internal/provider/type_destination_typesense.go
+++ b/internal/provider/type_destination_typesense.go
@@ -5,10 +5,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationTypesense struct {
- APIKey types.String `tfsdk:"api_key"`
- BatchSize types.Int64 `tfsdk:"batch_size"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- Port types.String `tfsdk:"port"`
- Protocol types.String `tfsdk:"protocol"`
+ APIKey types.String `tfsdk:"api_key"`
+ BatchSize types.Int64 `tfsdk:"batch_size"`
+ Host types.String `tfsdk:"host"`
+ Port types.String `tfsdk:"port"`
+ Protocol types.String `tfsdk:"protocol"`
}
diff --git a/internal/provider/type_destination_vertica.go b/internal/provider/type_destination_vertica.go
old mode 100755
new mode 100644
index fa5e7c0fa..57e203d71
--- a/internal/provider/type_destination_vertica.go
+++ b/internal/provider/type_destination_vertica.go
@@ -5,13 +5,12 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationVertica struct {
- Database types.String `tfsdk:"database"`
- DestinationType types.String `tfsdk:"destination_type"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- Schema types.String `tfsdk:"schema"`
- TunnelMethod *DestinationVerticaSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ Schema types.String `tfsdk:"schema"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_vertica_ssh_tunnel_method.go b/internal/provider/type_destination_vertica_ssh_tunnel_method.go
deleted file mode 100755
index d7b892191..000000000
--- a/internal/provider/type_destination_vertica_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type DestinationVerticaSSHTunnelMethod struct {
- DestinationVerticaSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_vertica_ssh_tunnel_method_no_tunnel"`
- DestinationVerticaSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_vertica_ssh_tunnel_method_password_authentication"`
- DestinationVerticaSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_vertica_ssh_tunnel_method_ssh_key_authentication"`
- DestinationVerticaUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"destination_vertica_update_ssh_tunnel_method_no_tunnel"`
- DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"destination_vertica_update_ssh_tunnel_method_password_authentication"`
- DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"destination_vertica_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_destination_weaviate.go b/internal/provider/type_destination_weaviate.go
new file mode 100644
index 000000000..b18da570d
--- /dev/null
+++ b/internal/provider/type_destination_weaviate.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationWeaviate struct {
+ Embedding DestinationWeaviateEmbedding `tfsdk:"embedding"`
+ Indexing DestinationWeaviateIndexing `tfsdk:"indexing"`
+ Processing DestinationMilvusProcessingConfigModel `tfsdk:"processing"`
+}
diff --git a/internal/provider/type_destination_weaviate_authentication.go b/internal/provider/type_destination_weaviate_authentication.go
new file mode 100644
index 000000000..87672b752
--- /dev/null
+++ b/internal/provider/type_destination_weaviate_authentication.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationWeaviateAuthentication struct {
+ APIToken *DestinationMilvusAPIToken `tfsdk:"api_token"`
+ NoAuthentication *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"no_authentication"`
+ UsernamePassword *UsernamePassword `tfsdk:"username_password"`
+}
diff --git a/internal/provider/type_destination_weaviate_embedding.go b/internal/provider/type_destination_weaviate_embedding.go
new file mode 100644
index 000000000..446f36f8a
--- /dev/null
+++ b/internal/provider/type_destination_weaviate_embedding.go
@@ -0,0 +1,13 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type DestinationWeaviateEmbedding struct {
+ AzureOpenAI *AzureOpenAI `tfsdk:"azure_open_ai"`
+ Cohere *Cohere `tfsdk:"cohere"`
+ Fake *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"fake"`
+ FromField *FromField `tfsdk:"from_field"`
+ NoExternalEmbedding *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"no_external_embedding"`
+ OpenAI *OpenAI `tfsdk:"open_ai"`
+ OpenAICompatible *OpenAICompatible `tfsdk:"open_ai_compatible"`
+}
diff --git a/internal/provider/type_destination_weaviate_indexing.go b/internal/provider/type_destination_weaviate_indexing.go
new file mode 100644
index 000000000..561a304e0
--- /dev/null
+++ b/internal/provider/type_destination_weaviate_indexing.go
@@ -0,0 +1,14 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type DestinationWeaviateIndexing struct {
+ AdditionalHeaders []Header `tfsdk:"additional_headers"`
+ Auth DestinationWeaviateAuthentication `tfsdk:"auth"`
+ BatchSize types.Int64 `tfsdk:"batch_size"`
+ DefaultVectorizer types.String `tfsdk:"default_vectorizer"`
+ Host types.String `tfsdk:"host"`
+ TextField types.String `tfsdk:"text_field"`
+}
diff --git a/internal/provider/type_destination_xata.go b/internal/provider/type_destination_xata.go
old mode 100755
new mode 100644
index 1020ecf11..df1fb11f2
--- a/internal/provider/type_destination_xata.go
+++ b/internal/provider/type_destination_xata.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type DestinationXata struct {
- APIKey types.String `tfsdk:"api_key"`
- DbURL types.String `tfsdk:"db_url"`
- DestinationType types.String `tfsdk:"destination_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ DbURL types.String `tfsdk:"db_url"`
}
diff --git a/internal/provider/type_destination_langchain_indexing_doc_array_hnsw_search.go b/internal/provider/type_doc_array_hnsw_search.go
old mode 100755
new mode 100644
similarity index 66%
rename from internal/provider/type_destination_langchain_indexing_doc_array_hnsw_search.go
rename to internal/provider/type_doc_array_hnsw_search.go
index 8dee2f017..6fe9d5e92
--- a/internal/provider/type_destination_langchain_indexing_doc_array_hnsw_search.go
+++ b/internal/provider/type_doc_array_hnsw_search.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationLangchainIndexingDocArrayHnswSearch struct {
+type DocArrayHnswSearch struct {
DestinationPath types.String `tfsdk:"destination_path"`
- Mode types.String `tfsdk:"mode"`
}
diff --git a/internal/provider/type_destination_databricks_data_source_recommended_managed_tables.go b/internal/provider/type_document_file_type_format_experimental.go
old mode 100755
new mode 100644
similarity index 55%
rename from internal/provider/type_destination_databricks_data_source_recommended_managed_tables.go
rename to internal/provider/type_document_file_type_format_experimental.go
index 304da46b5..86c0ec7b7
--- a/internal/provider/type_destination_databricks_data_source_recommended_managed_tables.go
+++ b/internal/provider/type_document_file_type_format_experimental.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationDatabricksDataSourceRecommendedManagedTables struct {
- DataSourceType types.String `tfsdk:"data_source_type"`
+type DocumentFileTypeFormatExperimental struct {
+ SkipUnprocessableFileTypes types.Bool `tfsdk:"skip_unprocessable_file_types"`
}
diff --git a/internal/provider/type_enabled.go b/internal/provider/type_enabled.go
new file mode 100644
index 000000000..a74d98d7c
--- /dev/null
+++ b/internal/provider/type_enabled.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type Enabled struct {
+ Column types.String `tfsdk:"column"`
+}
diff --git a/internal/provider/type_destination_mssql_ssl_method_encrypted_verify_certificate.go b/internal/provider/type_encrypted_verify_certificate.go
old mode 100755
new mode 100644
similarity index 64%
rename from internal/provider/type_destination_mssql_ssl_method_encrypted_verify_certificate.go
rename to internal/provider/type_encrypted_verify_certificate.go
index dbcd20c6b..0088d4cfe
--- a/internal/provider/type_destination_mssql_ssl_method_encrypted_verify_certificate.go
+++ b/internal/provider/type_encrypted_verify_certificate.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationMssqlSSLMethodEncryptedVerifyCertificate struct {
+type EncryptedVerifyCertificate struct {
HostNameInCertificate types.String `tfsdk:"host_name_in_certificate"`
- SslMethod types.String `tfsdk:"ssl_method"`
}
diff --git a/internal/provider/type_expression.go b/internal/provider/type_expression.go
new file mode 100644
index 000000000..b62140745
--- /dev/null
+++ b/internal/provider/type_expression.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type Expression struct {
+ FieldName types.String `tfsdk:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter `tfsdk:"filter"`
+}
diff --git a/internal/provider/type_destination_firebolt_loading_method_external_table_via_s3.go b/internal/provider/type_external_table_via_s3.go
old mode 100755
new mode 100644
similarity index 76%
rename from internal/provider/type_destination_firebolt_loading_method_external_table_via_s3.go
rename to internal/provider/type_external_table_via_s3.go
index b7ee4c519..fce313e14
--- a/internal/provider/type_destination_firebolt_loading_method_external_table_via_s3.go
+++ b/internal/provider/type_external_table_via_s3.go
@@ -4,10 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationFireboltLoadingMethodExternalTableViaS3 struct {
+type ExternalTableViaS3 struct {
AwsKeyID types.String `tfsdk:"aws_key_id"`
AwsKeySecret types.String `tfsdk:"aws_key_secret"`
- Method types.String `tfsdk:"method"`
S3Bucket types.String `tfsdk:"s3_bucket"`
S3Region types.String `tfsdk:"s3_region"`
}
diff --git a/internal/provider/type_destination_azure_blob_storage_output_format_json_lines_newline_delimited_json.go b/internal/provider/type_field_name_mapping_config_model.go
old mode 100755
new mode 100644
similarity index 54%
rename from internal/provider/type_destination_azure_blob_storage_output_format_json_lines_newline_delimited_json.go
rename to internal/provider/type_field_name_mapping_config_model.go
index 1fee0e377..d85793536
--- a/internal/provider/type_destination_azure_blob_storage_output_format_json_lines_newline_delimited_json.go
+++ b/internal/provider/type_field_name_mapping_config_model.go
@@ -4,6 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON struct {
- FormatType types.String `tfsdk:"format_type"`
+type FieldNameMappingConfigModel struct {
+ FromField types.String `tfsdk:"from_field"`
+ ToField types.String `tfsdk:"to_field"`
}
diff --git a/internal/provider/type_file_based_stream_config.go b/internal/provider/type_file_based_stream_config.go
new file mode 100644
index 000000000..0e95925a8
--- /dev/null
+++ b/internal/provider/type_file_based_stream_config.go
@@ -0,0 +1,17 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type FileBasedStreamConfig struct {
+ DaysToSyncIfHistoryIsFull types.Int64 `tfsdk:"days_to_sync_if_history_is_full"`
+ Format SourceAzureBlobStorageFormat `tfsdk:"format"`
+ Globs []types.String `tfsdk:"globs"`
+ InputSchema types.String `tfsdk:"input_schema"`
+ LegacyPrefix types.String `tfsdk:"legacy_prefix"`
+ Name types.String `tfsdk:"name"`
+ PrimaryKey types.String `tfsdk:"primary_key"`
+ Schemaless types.Bool `tfsdk:"schemaless"`
+ ValidationPolicy types.String `tfsdk:"validation_policy"`
+}
diff --git a/internal/provider/type_destination_milvus_embedding_from_field.go b/internal/provider/type_from_field.go
old mode 100755
new mode 100644
similarity index 73%
rename from internal/provider/type_destination_milvus_embedding_from_field.go
rename to internal/provider/type_from_field.go
index b06084e93..575da0b1f
--- a/internal/provider/type_destination_milvus_embedding_from_field.go
+++ b/internal/provider/type_from_field.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationMilvusEmbeddingFromField struct {
+type FromField struct {
Dimensions types.Int64 `tfsdk:"dimensions"`
FieldName types.String `tfsdk:"field_name"`
- Mode types.String `tfsdk:"mode"`
}
diff --git a/internal/provider/type_source_file_secure_storage_provider_gcs_google_cloud_storage.go b/internal/provider/type_gcs_google_cloud_storage.go
old mode 100755
new mode 100644
similarity index 65%
rename from internal/provider/type_source_file_secure_storage_provider_gcs_google_cloud_storage.go
rename to internal/provider/type_gcs_google_cloud_storage.go
index caab03e25..48c112fcc
--- a/internal/provider/type_source_file_secure_storage_provider_gcs_google_cloud_storage.go
+++ b/internal/provider/type_gcs_google_cloud_storage.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceFileSecureStorageProviderGCSGoogleCloudStorage struct {
+type GCSGoogleCloudStorage struct {
ServiceAccountJSON types.String `tfsdk:"service_account_json"`
- Storage types.String `tfsdk:"storage"`
}
diff --git a/internal/provider/type_gcs_staging.go b/internal/provider/type_gcs_staging.go
new file mode 100644
index 000000000..c6ed313a1
--- /dev/null
+++ b/internal/provider/type_gcs_staging.go
@@ -0,0 +1,12 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type GCSStaging struct {
+ Credential DestinationBigqueryCredential `tfsdk:"credential"`
+ GcsBucketName types.String `tfsdk:"gcs_bucket_name"`
+ GcsBucketPath types.String `tfsdk:"gcs_bucket_path"`
+ KeepFilesInGcsBucket types.String `tfsdk:"keep_files_in_gcs_bucket"`
+}
diff --git a/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip.go b/internal/provider/type_gzip.go
old mode 100755
new mode 100644
similarity index 71%
rename from internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip.go
rename to internal/provider/type_gzip.go
index 54afb3c98..6d5dac633
--- a/internal/provider/type_destination_gcs_output_format_json_lines_newline_delimited_json_compression_gzip.go
+++ b/internal/provider/type_gzip.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP struct {
+type Gzip struct {
CompressionType types.String `tfsdk:"compression_type"`
}
diff --git a/internal/provider/type_header.go b/internal/provider/type_header.go
new file mode 100644
index 000000000..cd06335ea
--- /dev/null
+++ b/internal/provider/type_header.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type Header struct {
+ HeaderKey types.String `tfsdk:"header_key"`
+ Value types.String `tfsdk:"value"`
+}
diff --git a/internal/provider/type_destination_bigquery_loading_method_gcs_staging_credential_hmac_key.go b/internal/provider/type_hmac_key.go
old mode 100755
new mode 100644
similarity index 81%
rename from internal/provider/type_destination_bigquery_loading_method_gcs_staging_credential_hmac_key.go
rename to internal/provider/type_hmac_key.go
index 92797ef04..77badd8f5
--- a/internal/provider/type_destination_bigquery_loading_method_gcs_staging_credential_hmac_key.go
+++ b/internal/provider/type_hmac_key.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey struct {
+type HMACKey struct {
CredentialType types.String `tfsdk:"credential_type"`
HmacKeyAccessID types.String `tfsdk:"hmac_key_access_id"`
HmacKeySecret types.String `tfsdk:"hmac_key_secret"`
diff --git a/internal/provider/type_https_public_web.go b/internal/provider/type_https_public_web.go
new file mode 100644
index 000000000..2d0d11382
--- /dev/null
+++ b/internal/provider/type_https_public_web.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type HTTPSPublicWeb struct {
+ UserAgent types.Bool `tfsdk:"user_agent"`
+}
diff --git a/internal/provider/type_iam_role.go b/internal/provider/type_iam_role.go
new file mode 100644
index 000000000..09f05a367
--- /dev/null
+++ b/internal/provider/type_iam_role.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type IAMRole struct {
+ RoleArn types.String `tfsdk:"role_arn"`
+}
diff --git a/internal/provider/type_source_file_secure_storage_provider_s3_amazon_web_services.go b/internal/provider/type_iam_user.go
old mode 100755
new mode 100644
similarity index 70%
rename from internal/provider/type_source_file_secure_storage_provider_s3_amazon_web_services.go
rename to internal/provider/type_iam_user.go
index ea3712295..93994fa77
--- a/internal/provider/type_source_file_secure_storage_provider_s3_amazon_web_services.go
+++ b/internal/provider/type_iam_user.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceFileSecureStorageProviderS3AmazonWebServices struct {
+type IAMUser struct {
AwsAccessKeyID types.String `tfsdk:"aws_access_key_id"`
AwsSecretAccessKey types.String `tfsdk:"aws_secret_access_key"`
- Storage types.String `tfsdk:"storage"`
}
diff --git a/internal/provider/type_source_facebook_marketing_insight_config.go b/internal/provider/type_insight_config.go
old mode 100755
new mode 100644
similarity index 93%
rename from internal/provider/type_source_facebook_marketing_insight_config.go
rename to internal/provider/type_insight_config.go
index de579c0ce..d9cea7676
--- a/internal/provider/type_source_facebook_marketing_insight_config.go
+++ b/internal/provider/type_insight_config.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceFacebookMarketingInsightConfig struct {
+type InsightConfig struct {
ActionBreakdowns []types.String `tfsdk:"action_breakdowns"`
ActionReportTime types.String `tfsdk:"action_report_time"`
Breakdowns []types.String `tfsdk:"breakdowns"`
diff --git a/internal/provider/type_destination_aws_datalake_output_format_wildcard_parquet_columnar_storage.go b/internal/provider/type_json_lines_newline_delimited_json.go
old mode 100755
new mode 100644
similarity index 77%
rename from internal/provider/type_destination_aws_datalake_output_format_wildcard_parquet_columnar_storage.go
rename to internal/provider/type_json_lines_newline_delimited_json.go
index f3d68ea9e..f23abfea7
--- a/internal/provider/type_destination_aws_datalake_output_format_wildcard_parquet_columnar_storage.go
+++ b/internal/provider/type_json_lines_newline_delimited_json.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage struct {
+type JSONLinesNewlineDelimitedJSON struct {
CompressionCodec types.String `tfsdk:"compression_codec"`
FormatType types.String `tfsdk:"format_type"`
}
diff --git a/internal/provider/type_source_s3_file_format_jsonl.go b/internal/provider/type_jsonl.go
old mode 100755
new mode 100644
similarity index 78%
rename from internal/provider/type_source_s3_file_format_jsonl.go
rename to internal/provider/type_jsonl.go
index c5607f781..ac6583ad8
--- a/internal/provider/type_source_s3_file_format_jsonl.go
+++ b/internal/provider/type_jsonl.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceS3FileFormatJsonl struct {
+type Jsonl struct {
BlockSize types.Int64 `tfsdk:"block_size"`
- Filetype types.String `tfsdk:"filetype"`
NewlinesInValues types.Bool `tfsdk:"newlines_in_values"`
UnexpectedFieldBehavior types.String `tfsdk:"unexpected_field_behavior"`
}
diff --git a/internal/provider/type_destination_snowflake_authorization_method_key_pair_authentication.go b/internal/provider/type_key_pair_authentication.go
old mode 100755
new mode 100644
similarity index 68%
rename from internal/provider/type_destination_snowflake_authorization_method_key_pair_authentication.go
rename to internal/provider/type_key_pair_authentication.go
index 623d47936..818b3de8e
--- a/internal/provider/type_destination_snowflake_authorization_method_key_pair_authentication.go
+++ b/internal/provider/type_key_pair_authentication.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationSnowflakeAuthorizationMethodKeyPairAuthentication struct {
- AuthType types.String `tfsdk:"auth_type"`
+type KeyPairAuthentication struct {
PrivateKey types.String `tfsdk:"private_key"`
PrivateKeyPassword types.String `tfsdk:"private_key_password"`
}
diff --git a/internal/provider/type_source_alloydb_replication_method_logical_replication_cdc.go b/internal/provider/type_logical_replication_cdc.go
old mode 100755
new mode 100644
similarity index 83%
rename from internal/provider/type_source_alloydb_replication_method_logical_replication_cdc.go
rename to internal/provider/type_logical_replication_cdc.go
index 5460f2f0d..4806361b0
--- a/internal/provider/type_source_alloydb_replication_method_logical_replication_cdc.go
+++ b/internal/provider/type_logical_replication_cdc.go
@@ -4,13 +4,12 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceAlloydbReplicationMethodLogicalReplicationCDC struct {
+type LogicalReplicationCDC struct {
+ AdditionalProperties types.String `tfsdk:"additional_properties"`
InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"`
- Method types.String `tfsdk:"method"`
Plugin types.String `tfsdk:"plugin"`
Publication types.String `tfsdk:"publication"`
QueueSize types.Int64 `tfsdk:"queue_size"`
ReplicationSlot types.String `tfsdk:"replication_slot"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
}
diff --git a/internal/provider/type_destination_mongodb_mongo_db_instance_type_mongo_db_atlas.go b/internal/provider/type_mongo_db_atlas.go
old mode 100755
new mode 100644
similarity index 79%
rename from internal/provider/type_destination_mongodb_mongo_db_instance_type_mongo_db_atlas.go
rename to internal/provider/type_mongo_db_atlas.go
index 1e7e62bfc..44bbb08f1
--- a/internal/provider/type_destination_mongodb_mongo_db_instance_type_mongo_db_atlas.go
+++ b/internal/provider/type_mongo_db_atlas.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationMongodbMongoDBInstanceTypeMongoDBAtlas struct {
+type MongoDBAtlas struct {
ClusterURL types.String `tfsdk:"cluster_url"`
Instance types.String `tfsdk:"instance"`
}
diff --git a/internal/provider/type_mongo_db_atlas_replica_set.go b/internal/provider/type_mongo_db_atlas_replica_set.go
new file mode 100644
index 000000000..84ce3b229
--- /dev/null
+++ b/internal/provider/type_mongo_db_atlas_replica_set.go
@@ -0,0 +1,14 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type MongoDBAtlasReplicaSet struct {
+ AdditionalProperties types.String `tfsdk:"additional_properties"`
+ AuthSource types.String `tfsdk:"auth_source"`
+ ConnectionString types.String `tfsdk:"connection_string"`
+ Database types.String `tfsdk:"database"`
+ Password types.String `tfsdk:"password"`
+ Username types.String `tfsdk:"username"`
+}
diff --git a/internal/provider/type_source_oracle_encryption_native_network_encryption_nne.go b/internal/provider/type_native_network_encryption_nne.go
old mode 100755
new mode 100644
similarity index 63%
rename from internal/provider/type_source_oracle_encryption_native_network_encryption_nne.go
rename to internal/provider/type_native_network_encryption_nne.go
index 70a3cf871..f56b98725
--- a/internal/provider/type_source_oracle_encryption_native_network_encryption_nne.go
+++ b/internal/provider/type_native_network_encryption_nne.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceOracleEncryptionNativeNetworkEncryptionNNE struct {
+type NativeNetworkEncryptionNNE struct {
EncryptionAlgorithm types.String `tfsdk:"encryption_algorithm"`
- EncryptionMethod types.String `tfsdk:"encryption_method"`
}
diff --git a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_snappy.go b/internal/provider/type_no_compression.go
old mode 100755
new mode 100644
similarity index 71%
rename from internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_snappy.go
rename to internal/provider/type_no_compression.go
index ef7f3e103..e4a3929a0
--- a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_snappy.go
+++ b/internal/provider/type_no_compression.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy struct {
+type NoCompression struct {
Codec types.String `tfsdk:"codec"`
}
diff --git a/internal/provider/type_not_expression.go b/internal/provider/type_not_expression.go
new file mode 100644
index 000000000..b87f8e032
--- /dev/null
+++ b/internal/provider/type_not_expression.go
@@ -0,0 +1,7 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type NotExpression struct {
+ Expression *Expression `tfsdk:"expression"`
+}
diff --git a/internal/provider/type_source_github_authentication_o_auth.go b/internal/provider/type_o_auth.go
old mode 100755
new mode 100644
similarity index 76%
rename from internal/provider/type_source_github_authentication_o_auth.go
rename to internal/provider/type_o_auth.go
index 37c97fc75..3ff48cd83
--- a/internal/provider/type_source_github_authentication_o_auth.go
+++ b/internal/provider/type_o_auth.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceGithubAuthenticationOAuth struct {
+type OAuth struct {
AccessToken types.String `tfsdk:"access_token"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
- OptionTitle types.String `tfsdk:"option_title"`
}
diff --git a/internal/provider/type_source_zendesk_chat_authorization_method_o_auth20.go b/internal/provider/type_o_auth20.go
old mode 100755
new mode 100644
similarity index 76%
rename from internal/provider/type_source_zendesk_chat_authorization_method_o_auth20.go
rename to internal/provider/type_o_auth20.go
index d3e8cdab4..4a8aa3253
--- a/internal/provider/type_source_zendesk_chat_authorization_method_o_auth20.go
+++ b/internal/provider/type_o_auth20.go
@@ -4,10 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceZendeskChatAuthorizationMethodOAuth20 struct {
+type OAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
- Credentials types.String `tfsdk:"credentials"`
RefreshToken types.String `tfsdk:"refresh_token"`
}
diff --git a/internal/provider/type_source_vantage.go b/internal/provider/type_o_auth2_access_token.go
old mode 100755
new mode 100644
similarity index 73%
rename from internal/provider/type_source_vantage.go
rename to internal/provider/type_o_auth2_access_token.go
index 3ac33c11d..aa2363908
--- a/internal/provider/type_source_vantage.go
+++ b/internal/provider/type_o_auth2_access_token.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceVantage struct {
+type OAuth2AccessToken struct {
AccessToken types.String `tfsdk:"access_token"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_auth0_authentication_method_o_auth2_confidential_application.go b/internal/provider/type_o_auth2_confidential_application.go
old mode 100755
new mode 100644
similarity index 71%
rename from internal/provider/type_source_auth0_authentication_method_o_auth2_confidential_application.go
rename to internal/provider/type_o_auth2_confidential_application.go
index 20189d393..72df33a94
--- a/internal/provider/type_source_auth0_authentication_method_o_auth2_confidential_application.go
+++ b/internal/provider/type_o_auth2_confidential_application.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication struct {
+type OAuth2ConfidentialApplication struct {
Audience types.String `tfsdk:"audience"`
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
}
diff --git a/internal/provider/type_destination_langchain_embedding_open_ai.go b/internal/provider/type_open_ai.go
old mode 100755
new mode 100644
similarity index 69%
rename from internal/provider/type_destination_langchain_embedding_open_ai.go
rename to internal/provider/type_open_ai.go
index 99fc449fa..64f0709ad
--- a/internal/provider/type_destination_langchain_embedding_open_ai.go
+++ b/internal/provider/type_open_ai.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationLangchainEmbeddingOpenAI struct {
- Mode types.String `tfsdk:"mode"`
+type OpenAI struct {
OpenaiKey types.String `tfsdk:"openai_key"`
}
diff --git a/internal/provider/type_source_airtable_authentication_personal_access_token.go b/internal/provider/type_open_ai_compatible.go
old mode 100755
new mode 100644
similarity index 54%
rename from internal/provider/type_source_airtable_authentication_personal_access_token.go
rename to internal/provider/type_open_ai_compatible.go
index 61af133db..4e821b1fc
--- a/internal/provider/type_source_airtable_authentication_personal_access_token.go
+++ b/internal/provider/type_open_ai_compatible.go
@@ -4,7 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceAirtableAuthenticationPersonalAccessToken struct {
+type OpenAICompatible struct {
APIKey types.String `tfsdk:"api_key"`
- AuthMethod types.String `tfsdk:"auth_method"`
+ BaseURL types.String `tfsdk:"base_url"`
+ Dimensions types.Int64 `tfsdk:"dimensions"`
+ ModelName types.String `tfsdk:"model_name"`
}
diff --git a/internal/provider/type_source_s3_file_format_parquet.go b/internal/provider/type_parquet.go
old mode 100755
new mode 100644
similarity index 77%
rename from internal/provider/type_source_s3_file_format_parquet.go
rename to internal/provider/type_parquet.go
index 0d86b3fc2..424fa0804
--- a/internal/provider/type_source_s3_file_format_parquet.go
+++ b/internal/provider/type_parquet.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceS3FileFormatParquet struct {
+type Parquet struct {
BatchSize types.Int64 `tfsdk:"batch_size"`
BufferSize types.Int64 `tfsdk:"buffer_size"`
Columns []types.String `tfsdk:"columns"`
- Filetype types.String `tfsdk:"filetype"`
}
diff --git a/internal/provider/type_destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json.go b/internal/provider/type_parquet_columnar_storage.go
old mode 100755
new mode 100644
similarity index 75%
rename from internal/provider/type_destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json.go
rename to internal/provider/type_parquet_columnar_storage.go
index f0d5762a8..7676ad7e2
--- a/internal/provider/type_destination_aws_datalake_output_format_wildcard_json_lines_newline_delimited_json.go
+++ b/internal/provider/type_parquet_columnar_storage.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON struct {
+type ParquetColumnarStorage struct {
CompressionCodec types.String `tfsdk:"compression_codec"`
FormatType types.String `tfsdk:"format_type"`
}
diff --git a/internal/provider/type_destination_langchain_embedding_fake.go b/internal/provider/type_parquet_format.go
old mode 100755
new mode 100644
similarity index 65%
rename from internal/provider/type_destination_langchain_embedding_fake.go
rename to internal/provider/type_parquet_format.go
index 3709f2aca..b0ab86e42
--- a/internal/provider/type_destination_langchain_embedding_fake.go
+++ b/internal/provider/type_parquet_format.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationLangchainEmbeddingFake struct {
- Mode types.String `tfsdk:"mode"`
+type ParquetFormat struct {
+ DecimalAsFloat types.Bool `tfsdk:"decimal_as_float"`
}
diff --git a/internal/provider/type_destination_clickhouse_ssh_tunnel_method_password_authentication.go b/internal/provider/type_password_authentication.go
old mode 100755
new mode 100644
similarity index 74%
rename from internal/provider/type_destination_clickhouse_ssh_tunnel_method_password_authentication.go
rename to internal/provider/type_password_authentication.go
index e78b97226..a35c27123
--- a/internal/provider/type_destination_clickhouse_ssh_tunnel_method_password_authentication.go
+++ b/internal/provider/type_password_authentication.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationClickhouseSSHTunnelMethodPasswordAuthentication struct {
+type PasswordAuthentication struct {
TunnelHost types.String `tfsdk:"tunnel_host"`
- TunnelMethod types.String `tfsdk:"tunnel_method"`
TunnelPort types.Int64 `tfsdk:"tunnel_port"`
TunnelUser types.String `tfsdk:"tunnel_user"`
TunnelUserPassword types.String `tfsdk:"tunnel_user_password"`
diff --git a/internal/provider/type_project_secret.go b/internal/provider/type_project_secret.go
new file mode 100644
index 000000000..e2532dc63
--- /dev/null
+++ b/internal/provider/type_project_secret.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type ProjectSecret struct {
+ APISecret types.String `tfsdk:"api_secret"`
+}
diff --git a/internal/provider/type_source_mysql_update_method_read_changes_using_binary_log_cdc.go b/internal/provider/type_read_changes_using_binary_log_cdc.go
old mode 100755
new mode 100644
similarity index 70%
rename from internal/provider/type_source_mysql_update_method_read_changes_using_binary_log_cdc.go
rename to internal/provider/type_read_changes_using_binary_log_cdc.go
index 14cb33229..5fba6dcb5
--- a/internal/provider/type_source_mysql_update_method_read_changes_using_binary_log_cdc.go
+++ b/internal/provider/type_read_changes_using_binary_log_cdc.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC struct {
+type ReadChangesUsingBinaryLogCDC struct {
InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
- Method types.String `tfsdk:"method"`
ServerTimeZone types.String `tfsdk:"server_time_zone"`
}
diff --git a/internal/provider/type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go b/internal/provider/type_read_changes_using_change_data_capture_cdc.go
old mode 100755
new mode 100644
similarity index 73%
rename from internal/provider/type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go
rename to internal/provider/type_read_changes_using_change_data_capture_cdc.go
index 605edb04d..53efe6b87
--- a/internal/provider/type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go
+++ b/internal/provider/type_read_changes_using_change_data_capture_cdc.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC struct {
+type ReadChangesUsingChangeDataCaptureCDC struct {
DataToSync types.String `tfsdk:"data_to_sync"`
InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
- Method types.String `tfsdk:"method"`
SnapshotIsolation types.String `tfsdk:"snapshot_isolation"`
}
diff --git a/internal/provider/type_source_mongodb_mongo_db_instance_type_replica_set.go b/internal/provider/type_replica_set.go
old mode 100755
new mode 100644
similarity index 84%
rename from internal/provider/type_source_mongodb_mongo_db_instance_type_replica_set.go
rename to internal/provider/type_replica_set.go
index d7219279b..b9e17ea01
--- a/internal/provider/type_source_mongodb_mongo_db_instance_type_replica_set.go
+++ b/internal/provider/type_replica_set.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMongodbMongoDbInstanceTypeReplicaSet struct {
+type ReplicaSet struct {
Instance types.String `tfsdk:"instance"`
ReplicaSet types.String `tfsdk:"replica_set"`
ServerAddresses types.String `tfsdk:"server_addresses"`
diff --git a/internal/provider/type_report_config.go b/internal/provider/type_report_config.go
new file mode 100644
index 000000000..5e3efcfbf
--- /dev/null
+++ b/internal/provider/type_report_config.go
@@ -0,0 +1,18 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type ReportConfig struct {
+ AttributionTypes []types.String `tfsdk:"attribution_types"`
+ ClickWindowDays types.Int64 `tfsdk:"click_window_days"`
+ Columns []types.String `tfsdk:"columns"`
+ ConversionReportTime types.String `tfsdk:"conversion_report_time"`
+ EngagementWindowDays types.Int64 `tfsdk:"engagement_window_days"`
+ Granularity types.String `tfsdk:"granularity"`
+ Level types.String `tfsdk:"level"`
+ Name types.String `tfsdk:"name"`
+ StartDate types.String `tfsdk:"start_date"`
+ ViewWindowDays types.Int64 `tfsdk:"view_window_days"`
+}
diff --git a/internal/provider/type_s3_staging.go b/internal/provider/type_s3_staging.go
new file mode 100644
index 000000000..6e9e19226
--- /dev/null
+++ b/internal/provider/type_s3_staging.go
@@ -0,0 +1,17 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type S3Staging struct {
+ AccessKeyID types.String `tfsdk:"access_key_id"`
+ Encryption *DestinationRedshiftEncryption `tfsdk:"encryption"`
+ FileBufferCount types.Int64 `tfsdk:"file_buffer_count"`
+ FileNamePattern types.String `tfsdk:"file_name_pattern"`
+ PurgeStagingData types.Bool `tfsdk:"purge_staging_data"`
+ S3BucketName types.String `tfsdk:"s3_bucket_name"`
+ S3BucketPath types.String `tfsdk:"s3_bucket_path"`
+ S3BucketRegion types.String `tfsdk:"s3_bucket_region"`
+ SecretAccessKey types.String `tfsdk:"secret_access_key"`
+}
diff --git a/internal/provider/type_source_tiktok_marketing_authentication_method_sandbox_access_token.go b/internal/provider/type_sandbox_access_token.go
old mode 100755
new mode 100644
similarity index 68%
rename from internal/provider/type_source_tiktok_marketing_authentication_method_sandbox_access_token.go
rename to internal/provider/type_sandbox_access_token.go
index 1fd7a9808..b68770aa1
--- a/internal/provider/type_source_tiktok_marketing_authentication_method_sandbox_access_token.go
+++ b/internal/provider/type_sandbox_access_token.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceTiktokMarketingAuthenticationMethodSandboxAccessToken struct {
+type SandboxAccessToken struct {
AccessToken types.String `tfsdk:"access_token"`
AdvertiserID types.String `tfsdk:"advertiser_id"`
- AuthType types.String `tfsdk:"auth_type"`
}
diff --git a/internal/provider/type_source_file_secure_storage_provider_ssh_secure_shell.go b/internal/provider/type_scp_secure_copy_protocol.go
old mode 100755
new mode 100644
similarity index 75%
rename from internal/provider/type_source_file_secure_storage_provider_ssh_secure_shell.go
rename to internal/provider/type_scp_secure_copy_protocol.go
index a20ab7c4c..73d0d88ac
--- a/internal/provider/type_source_file_secure_storage_provider_ssh_secure_shell.go
+++ b/internal/provider/type_scp_secure_copy_protocol.go
@@ -4,10 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceFileSecureStorageProviderSSHSecureShell struct {
+type SCPSecureCopyProtocol struct {
Host types.String `tfsdk:"host"`
Password types.String `tfsdk:"password"`
Port types.String `tfsdk:"port"`
- Storage types.String `tfsdk:"storage"`
User types.String `tfsdk:"user"`
}
diff --git a/internal/provider/type_self_managed_replica_set.go b/internal/provider/type_self_managed_replica_set.go
new file mode 100644
index 000000000..267e03dab
--- /dev/null
+++ b/internal/provider/type_self_managed_replica_set.go
@@ -0,0 +1,14 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SelfManagedReplicaSet struct {
+ AdditionalProperties types.String `tfsdk:"additional_properties"`
+ AuthSource types.String `tfsdk:"auth_source"`
+ ConnectionString types.String `tfsdk:"connection_string"`
+ Database types.String `tfsdk:"database"`
+ Password types.String `tfsdk:"password"`
+ Username types.String `tfsdk:"username"`
+}
diff --git a/internal/provider/type_service_account.go b/internal/provider/type_service_account.go
new file mode 100644
index 000000000..516332ec3
--- /dev/null
+++ b/internal/provider/type_service_account.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type ServiceAccount struct {
+ ProjectID types.Int64 `tfsdk:"project_id"`
+ Secret types.String `tfsdk:"secret"`
+ Username types.String `tfsdk:"username"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_credentials_service_account_key_authentication.go b/internal/provider/type_service_account_key.go
old mode 100755
new mode 100644
similarity index 61%
rename from internal/provider/type_source_google_analytics_data_api_credentials_service_account_key_authentication.go
rename to internal/provider/type_service_account_key.go
index 9617e25ce..2c365a67c
--- a/internal/provider/type_source_google_analytics_data_api_credentials_service_account_key_authentication.go
+++ b/internal/provider/type_service_account_key.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication struct {
- AuthType types.String `tfsdk:"auth_type"`
+type ServiceAccountKey struct {
CredentialsJSON types.String `tfsdk:"credentials_json"`
+ Email types.String `tfsdk:"email"`
}
diff --git a/internal/provider/type_destination_bigquery_loading_method_standard_inserts.go b/internal/provider/type_service_account_key_authentication.go
old mode 100755
new mode 100644
similarity index 60%
rename from internal/provider/type_destination_bigquery_loading_method_standard_inserts.go
rename to internal/provider/type_service_account_key_authentication.go
index 2822a74ba..2033393de
--- a/internal/provider/type_destination_bigquery_loading_method_standard_inserts.go
+++ b/internal/provider/type_service_account_key_authentication.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationBigqueryLoadingMethodStandardInserts struct {
- Method types.String `tfsdk:"method"`
+type ServiceAccountKeyAuthentication struct {
+ CredentialsJSON types.String `tfsdk:"credentials_json"`
}
diff --git a/internal/provider/type_service_name.go b/internal/provider/type_service_name.go
new file mode 100644
index 000000000..7bb4e7b39
--- /dev/null
+++ b/internal/provider/type_service_name.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type ServiceName struct {
+ ServiceName types.String `tfsdk:"service_name"`
+}
diff --git a/internal/provider/type_source_launchdarkly.go b/internal/provider/type_single_store_access_token.go
old mode 100755
new mode 100644
similarity index 71%
rename from internal/provider/type_source_launchdarkly.go
rename to internal/provider/type_single_store_access_token.go
index 48ec884d3..b7f2aef72
--- a/internal/provider/type_source_launchdarkly.go
+++ b/internal/provider/type_single_store_access_token.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceLaunchdarkly struct {
+type SingleStoreAccessToken struct {
AccessToken types.String `tfsdk:"access_token"`
- SourceType types.String `tfsdk:"source_type"`
+ StoreName types.String `tfsdk:"store_name"`
}
diff --git a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression.go b/internal/provider/type_snappy.go
old mode 100755
new mode 100644
similarity index 69%
rename from internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression.go
rename to internal/provider/type_snappy.go
index 271133076..cc79b5370
--- a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_no_compression.go
+++ b/internal/provider/type_snappy.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression struct {
+type Snappy struct {
Codec types.String `tfsdk:"codec"`
}
diff --git a/internal/provider/type_source_aha.go b/internal/provider/type_source_aha.go
old mode 100755
new mode 100644
index 830930055..0dc06149f
--- a/internal/provider/type_source_aha.go
+++ b/internal/provider/type_source_aha.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAha struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
- URL types.String `tfsdk:"url"`
+ APIKey types.String `tfsdk:"api_key"`
+ URL types.String `tfsdk:"url"`
}
diff --git a/internal/provider/type_source_aircall.go b/internal/provider/type_source_aircall.go
old mode 100755
new mode 100644
index 35f04be5f..606bb4844
--- a/internal/provider/type_source_aircall.go
+++ b/internal/provider/type_source_aircall.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAircall struct {
- APIID types.String `tfsdk:"api_id"`
- APIToken types.String `tfsdk:"api_token"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIID types.String `tfsdk:"api_id"`
+ APIToken types.String `tfsdk:"api_token"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_airtable.go b/internal/provider/type_source_airtable.go
old mode 100755
new mode 100644
index a49a6143e..47ef7d088
--- a/internal/provider/type_source_airtable.go
+++ b/internal/provider/type_source_airtable.go
@@ -2,9 +2,6 @@
package provider
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
type SourceAirtable struct {
Credentials *SourceAirtableAuthentication `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_airtable_authentication.go b/internal/provider/type_source_airtable_authentication.go
old mode 100755
new mode 100644
index 6cfce3c8f..347e9d8e9
--- a/internal/provider/type_source_airtable_authentication.go
+++ b/internal/provider/type_source_airtable_authentication.go
@@ -3,8 +3,6 @@
package provider
type SourceAirtableAuthentication struct {
- SourceAirtableAuthenticationOAuth20 *SourceAirtableAuthenticationOAuth20 `tfsdk:"source_airtable_authentication_o_auth2_0"`
- SourceAirtableAuthenticationPersonalAccessToken *SourceAirtableAuthenticationPersonalAccessToken `tfsdk:"source_airtable_authentication_personal_access_token"`
- SourceAirtableUpdateAuthenticationOAuth20 *SourceAirtableAuthenticationOAuth20 `tfsdk:"source_airtable_update_authentication_o_auth2_0"`
- SourceAirtableUpdateAuthenticationPersonalAccessToken *SourceAirtableAuthenticationPersonalAccessToken `tfsdk:"source_airtable_update_authentication_personal_access_token"`
+ OAuth20 *SourceAirtableOAuth20 `tfsdk:"o_auth20"`
+ PersonalAccessToken *APIKeyAuth `tfsdk:"personal_access_token"`
}
diff --git a/internal/provider/type_source_xero_authenticate_via_xero_o_auth.go b/internal/provider/type_source_airtable_o_auth20.go
old mode 100755
new mode 100644
similarity index 89%
rename from internal/provider/type_source_xero_authenticate_via_xero_o_auth.go
rename to internal/provider/type_source_airtable_o_auth20.go
index e885dc0d8..269846f59
--- a/internal/provider/type_source_xero_authenticate_via_xero_o_auth.go
+++ b/internal/provider/type_source_airtable_o_auth20.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceXeroAuthenticateViaXeroOAuth struct {
+type SourceAirtableOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
diff --git a/internal/provider/type_source_alloydb.go b/internal/provider/type_source_alloydb.go
old mode 100755
new mode 100644
index e1ce96ba4..ec8459be8
--- a/internal/provider/type_source_alloydb.go
+++ b/internal/provider/type_source_alloydb.go
@@ -5,15 +5,14 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAlloydb struct {
- Database types.String `tfsdk:"database"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- ReplicationMethod *SourceAlloydbReplicationMethod `tfsdk:"replication_method"`
- Schemas []types.String `tfsdk:"schemas"`
- SourceType types.String `tfsdk:"source_type"`
- SslMode *SourceAlloydbSSLModes `tfsdk:"ssl_mode"`
- TunnelMethod *SourceAlloydbSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ ReplicationMethod *SourceAlloydbReplicationMethod `tfsdk:"replication_method"`
+ Schemas []types.String `tfsdk:"schemas"`
+ SslMode *SourceAlloydbSSLModes `tfsdk:"ssl_mode"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_alloydb1.go b/internal/provider/type_source_alloydb1.go
deleted file mode 100755
index ad7bbb49f..000000000
--- a/internal/provider/type_source_alloydb1.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydb1 struct {
- Database types.String `tfsdk:"database"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- ReplicationMethod *SourceAlloydbReplicationMethod1 `tfsdk:"replication_method"`
- Schemas []types.String `tfsdk:"schemas"`
- SourceType types.String `tfsdk:"source_type"`
- SslMode *SourceAlloydbSSLModes1 `tfsdk:"ssl_mode"`
- TunnelMethod *SourceAlloydbSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_allow.go b/internal/provider/type_source_alloydb_allow.go
old mode 100755
new mode 100644
similarity index 71%
rename from internal/provider/type_source_alloydb_ssl_modes_allow.go
rename to internal/provider/type_source_alloydb_allow.go
index e4a81e174..5f3c23bbd
--- a/internal/provider/type_source_alloydb_ssl_modes_allow.go
+++ b/internal/provider/type_source_alloydb_allow.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceAlloydbSSLModesAllow struct {
- Mode types.String `tfsdk:"mode"`
+type SourceAlloydbAllow struct {
AdditionalProperties types.String `tfsdk:"additional_properties"`
}
diff --git a/internal/provider/type_source_alloydb_replication_method.go b/internal/provider/type_source_alloydb_replication_method.go
old mode 100755
new mode 100644
index 606633cdd..f2892ed4b
--- a/internal/provider/type_source_alloydb_replication_method.go
+++ b/internal/provider/type_source_alloydb_replication_method.go
@@ -3,10 +3,7 @@
package provider
type SourceAlloydbReplicationMethod struct {
- SourceAlloydbReplicationMethodLogicalReplicationCDC *SourceAlloydbReplicationMethodLogicalReplicationCDC `tfsdk:"source_alloydb_replication_method_logical_replication_cdc"`
- SourceAlloydbReplicationMethodStandard *SourceAlloydbReplicationMethodStandard `tfsdk:"source_alloydb_replication_method_standard"`
- SourceAlloydbReplicationMethodStandardXmin *SourceAlloydbReplicationMethodStandardXmin `tfsdk:"source_alloydb_replication_method_standard_xmin"`
- SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC `tfsdk:"source_alloydb_update_replication_method_logical_replication_cdc"`
- SourceAlloydbUpdateReplicationMethodStandard *SourceAlloydbReplicationMethodStandard `tfsdk:"source_alloydb_update_replication_method_standard"`
- SourceAlloydbUpdateReplicationMethodStandardXmin *SourceAlloydbReplicationMethodStandardXmin `tfsdk:"source_alloydb_update_replication_method_standard_xmin"`
+ LogicalReplicationCDC *LogicalReplicationCDC `tfsdk:"logical_replication_cdc"`
+ Standard *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"standard"`
+ StandardXmin *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"standard_xmin"`
}
diff --git a/internal/provider/type_source_alloydb_replication_method1.go b/internal/provider/type_source_alloydb_replication_method1.go
deleted file mode 100755
index 6574f62a7..000000000
--- a/internal/provider/type_source_alloydb_replication_method1.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceAlloydbReplicationMethod1 struct {
- SourceAlloydbReplicationMethodLogicalReplicationCDC *SourceAlloydbReplicationMethodLogicalReplicationCDC1 `tfsdk:"source_alloydb_replication_method_logical_replication_cdc"`
- SourceAlloydbReplicationMethodStandard *SourceAlloydbReplicationMethodStandard `tfsdk:"source_alloydb_replication_method_standard"`
- SourceAlloydbReplicationMethodStandardXmin *SourceAlloydbReplicationMethodStandardXmin `tfsdk:"source_alloydb_replication_method_standard_xmin"`
- SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC1 `tfsdk:"source_alloydb_update_replication_method_logical_replication_cdc"`
- SourceAlloydbUpdateReplicationMethodStandard *SourceAlloydbReplicationMethodStandard `tfsdk:"source_alloydb_update_replication_method_standard"`
- SourceAlloydbUpdateReplicationMethodStandardXmin *SourceAlloydbReplicationMethodStandardXmin `tfsdk:"source_alloydb_update_replication_method_standard_xmin"`
-}
diff --git a/internal/provider/type_source_alloydb_replication_method_logical_replication_cdc1.go b/internal/provider/type_source_alloydb_replication_method_logical_replication_cdc1.go
deleted file mode 100755
index 420566a45..000000000
--- a/internal/provider/type_source_alloydb_replication_method_logical_replication_cdc1.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbReplicationMethodLogicalReplicationCDC1 struct {
- InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
- LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"`
- Method types.String `tfsdk:"method"`
- Plugin types.String `tfsdk:"plugin"`
- Publication types.String `tfsdk:"publication"`
- QueueSize types.Int64 `tfsdk:"queue_size"`
- ReplicationSlot types.String `tfsdk:"replication_slot"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_replication_method_standard.go b/internal/provider/type_source_alloydb_replication_method_standard.go
deleted file mode 100755
index b7fdee6f1..000000000
--- a/internal/provider/type_source_alloydb_replication_method_standard.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbReplicationMethodStandard struct {
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_source_alloydb_replication_method_standard_xmin.go b/internal/provider/type_source_alloydb_replication_method_standard_xmin.go
deleted file mode 100755
index 15de383a3..000000000
--- a/internal/provider/type_source_alloydb_replication_method_standard_xmin.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbReplicationMethodStandardXmin struct {
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_source_alloydb_ssh_tunnel_method.go b/internal/provider/type_source_alloydb_ssh_tunnel_method.go
deleted file mode 100755
index fe21fe6c0..000000000
--- a/internal/provider/type_source_alloydb_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceAlloydbSSHTunnelMethod struct {
- SourceAlloydbSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_alloydb_ssh_tunnel_method_no_tunnel"`
- SourceAlloydbSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_alloydb_ssh_tunnel_method_password_authentication"`
- SourceAlloydbSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_alloydb_ssh_tunnel_method_ssh_key_authentication"`
- SourceAlloydbUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_alloydb_update_ssh_tunnel_method_no_tunnel"`
- SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_alloydb_update_ssh_tunnel_method_password_authentication"`
- SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_alloydb_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes.go b/internal/provider/type_source_alloydb_ssl_modes.go
old mode 100755
new mode 100644
index c3b187bc9..d865a3834
--- a/internal/provider/type_source_alloydb_ssl_modes.go
+++ b/internal/provider/type_source_alloydb_ssl_modes.go
@@ -3,16 +3,10 @@
package provider
type SourceAlloydbSSLModes struct {
- SourceAlloydbSSLModesAllow *SourceAlloydbSSLModesAllow `tfsdk:"source_alloydb_ssl_modes_allow"`
- SourceAlloydbSSLModesDisable *SourceAlloydbSSLModesDisable `tfsdk:"source_alloydb_ssl_modes_disable"`
- SourceAlloydbSSLModesPrefer *SourceAlloydbSSLModesPrefer `tfsdk:"source_alloydb_ssl_modes_prefer"`
- SourceAlloydbSSLModesRequire *SourceAlloydbSSLModesRequire `tfsdk:"source_alloydb_ssl_modes_require"`
- SourceAlloydbSSLModesVerifyCa *SourceAlloydbSSLModesVerifyCa `tfsdk:"source_alloydb_ssl_modes_verify_ca"`
- SourceAlloydbSSLModesVerifyFull *SourceAlloydbSSLModesVerifyFull `tfsdk:"source_alloydb_ssl_modes_verify_full"`
- SourceAlloydbUpdateSSLModesAllow *SourceAlloydbUpdateSSLModesAllow `tfsdk:"source_alloydb_update_ssl_modes_allow"`
- SourceAlloydbUpdateSSLModesDisable *SourceAlloydbUpdateSSLModesDisable `tfsdk:"source_alloydb_update_ssl_modes_disable"`
- SourceAlloydbUpdateSSLModesPrefer *SourceAlloydbUpdateSSLModesPrefer `tfsdk:"source_alloydb_update_ssl_modes_prefer"`
- SourceAlloydbUpdateSSLModesRequire *SourceAlloydbUpdateSSLModesRequire `tfsdk:"source_alloydb_update_ssl_modes_require"`
- SourceAlloydbUpdateSSLModesVerifyCa *SourceAlloydbUpdateSSLModesVerifyCa `tfsdk:"source_alloydb_update_ssl_modes_verify_ca"`
- SourceAlloydbUpdateSSLModesVerifyFull *SourceAlloydbUpdateSSLModesVerifyFull `tfsdk:"source_alloydb_update_ssl_modes_verify_full"`
+ Allow *SourceAlloydbAllow `tfsdk:"allow"`
+ Disable *SourceAlloydbAllow `tfsdk:"disable"`
+ Prefer *SourceAlloydbAllow `tfsdk:"prefer"`
+ Require *SourceAlloydbAllow `tfsdk:"require"`
+ VerifyCa *SourceAlloydbVerifyCa `tfsdk:"verify_ca"`
+ VerifyFull *SourceAlloydbVerifyCa `tfsdk:"verify_full"`
}
diff --git a/internal/provider/type_source_alloydb_ssl_modes1.go b/internal/provider/type_source_alloydb_ssl_modes1.go
deleted file mode 100755
index c2a1917f0..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes1.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceAlloydbSSLModes1 struct {
- SourceAlloydbSSLModesAllow *SourceAlloydbSSLModesAllow1 `tfsdk:"source_alloydb_ssl_modes_allow"`
- SourceAlloydbSSLModesDisable *SourceAlloydbSSLModesDisable1 `tfsdk:"source_alloydb_ssl_modes_disable"`
- SourceAlloydbSSLModesPrefer *SourceAlloydbSSLModesPrefer1 `tfsdk:"source_alloydb_ssl_modes_prefer"`
- SourceAlloydbSSLModesRequire *SourceAlloydbSSLModesRequire1 `tfsdk:"source_alloydb_ssl_modes_require"`
- SourceAlloydbSSLModesVerifyCa *SourceAlloydbSSLModesVerifyCa1 `tfsdk:"source_alloydb_ssl_modes_verify_ca"`
- SourceAlloydbSSLModesVerifyFull *SourceAlloydbSSLModesVerifyFull1 `tfsdk:"source_alloydb_ssl_modes_verify_full"`
- SourceAlloydbUpdateSSLModesAllow *SourceAlloydbUpdateSSLModesAllow1 `tfsdk:"source_alloydb_update_ssl_modes_allow"`
- SourceAlloydbUpdateSSLModesDisable *SourceAlloydbUpdateSSLModesDisable1 `tfsdk:"source_alloydb_update_ssl_modes_disable"`
- SourceAlloydbUpdateSSLModesPrefer *SourceAlloydbUpdateSSLModesPrefer1 `tfsdk:"source_alloydb_update_ssl_modes_prefer"`
- SourceAlloydbUpdateSSLModesRequire *SourceAlloydbUpdateSSLModesRequire1 `tfsdk:"source_alloydb_update_ssl_modes_require"`
- SourceAlloydbUpdateSSLModesVerifyCa *SourceAlloydbUpdateSSLModesVerifyCa1 `tfsdk:"source_alloydb_update_ssl_modes_verify_ca"`
- SourceAlloydbUpdateSSLModesVerifyFull *SourceAlloydbUpdateSSLModesVerifyFull1 `tfsdk:"source_alloydb_update_ssl_modes_verify_full"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_disable.go b/internal/provider/type_source_alloydb_ssl_modes_disable.go
deleted file mode 100755
index 5b337fe25..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes_disable.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbSSLModesDisable struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_disable1.go b/internal/provider/type_source_alloydb_ssl_modes_disable1.go
deleted file mode 100755
index 16cdee8af..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes_disable1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbSSLModesDisable1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_prefer.go b/internal/provider/type_source_alloydb_ssl_modes_prefer.go
deleted file mode 100755
index b818b682c..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes_prefer.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbSSLModesPrefer struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_prefer1.go b/internal/provider/type_source_alloydb_ssl_modes_prefer1.go
deleted file mode 100755
index 12561855c..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes_prefer1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbSSLModesPrefer1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_require.go b/internal/provider/type_source_alloydb_ssl_modes_require.go
deleted file mode 100755
index 560977031..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes_require.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbSSLModesRequire struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_require1.go b/internal/provider/type_source_alloydb_ssl_modes_require1.go
deleted file mode 100755
index 3831600b4..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes_require1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbSSLModesRequire1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_verify_ca1.go b/internal/provider/type_source_alloydb_ssl_modes_verify_ca1.go
deleted file mode 100755
index 6fb24b846..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes_verify_ca1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbSSLModesVerifyCa1 struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_verify_full.go b/internal/provider/type_source_alloydb_ssl_modes_verify_full.go
deleted file mode 100755
index ba81d218b..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes_verify_full.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbSSLModesVerifyFull struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_verify_full1.go b/internal/provider/type_source_alloydb_ssl_modes_verify_full1.go
deleted file mode 100755
index 7bc98717f..000000000
--- a/internal/provider/type_source_alloydb_ssl_modes_verify_full1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbSSLModesVerifyFull1 struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc.go b/internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc.go
deleted file mode 100755
index 2698a4478..000000000
--- a/internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC struct {
- InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
- LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"`
- Method types.String `tfsdk:"method"`
- Plugin types.String `tfsdk:"plugin"`
- Publication types.String `tfsdk:"publication"`
- QueueSize types.Int64 `tfsdk:"queue_size"`
- ReplicationSlot types.String `tfsdk:"replication_slot"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc1.go b/internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc1.go
deleted file mode 100755
index 35a6b5cda..000000000
--- a/internal/provider/type_source_alloydb_update_replication_method_logical_replication_cdc1.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC1 struct {
- InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
- LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"`
- Method types.String `tfsdk:"method"`
- Plugin types.String `tfsdk:"plugin"`
- Publication types.String `tfsdk:"publication"`
- QueueSize types.Int64 `tfsdk:"queue_size"`
- ReplicationSlot types.String `tfsdk:"replication_slot"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_allow.go b/internal/provider/type_source_alloydb_update_ssl_modes_allow.go
deleted file mode 100755
index 83f2f2cc8..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_allow.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesAllow struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_allow1.go b/internal/provider/type_source_alloydb_update_ssl_modes_allow1.go
deleted file mode 100755
index 9589675c5..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_allow1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesAllow1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_disable.go b/internal/provider/type_source_alloydb_update_ssl_modes_disable.go
deleted file mode 100755
index c25c2ca81..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_disable.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesDisable struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_disable1.go b/internal/provider/type_source_alloydb_update_ssl_modes_disable1.go
deleted file mode 100755
index 4fc05caee..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_disable1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesDisable1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_prefer.go b/internal/provider/type_source_alloydb_update_ssl_modes_prefer.go
deleted file mode 100755
index bed68d10b..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_prefer.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesPrefer struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_prefer1.go b/internal/provider/type_source_alloydb_update_ssl_modes_prefer1.go
deleted file mode 100755
index 8196b20b0..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_prefer1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesPrefer1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_require.go b/internal/provider/type_source_alloydb_update_ssl_modes_require.go
deleted file mode 100755
index 04794fdf5..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_require.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesRequire struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_require1.go b/internal/provider/type_source_alloydb_update_ssl_modes_require1.go
deleted file mode 100755
index d4ef9ee9e..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_require1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesRequire1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_verify_ca.go b/internal/provider/type_source_alloydb_update_ssl_modes_verify_ca.go
deleted file mode 100755
index 8046e038e..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_verify_ca.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesVerifyCa struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_verify_ca1.go b/internal/provider/type_source_alloydb_update_ssl_modes_verify_ca1.go
deleted file mode 100755
index 65ed7dece..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_verify_ca1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesVerifyCa1 struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_verify_full.go b/internal/provider/type_source_alloydb_update_ssl_modes_verify_full.go
deleted file mode 100755
index 7411282cf..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_verify_full.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesVerifyFull struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_update_ssl_modes_verify_full1.go b/internal/provider/type_source_alloydb_update_ssl_modes_verify_full1.go
deleted file mode 100755
index 2d4b291af..000000000
--- a/internal/provider/type_source_alloydb_update_ssl_modes_verify_full1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAlloydbUpdateSSLModesVerifyFull1 struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_alloydb_ssl_modes_verify_ca.go b/internal/provider/type_source_alloydb_verify_ca.go
old mode 100755
new mode 100644
similarity index 83%
rename from internal/provider/type_source_alloydb_ssl_modes_verify_ca.go
rename to internal/provider/type_source_alloydb_verify_ca.go
index c34252c85..86ab09824
--- a/internal/provider/type_source_alloydb_ssl_modes_verify_ca.go
+++ b/internal/provider/type_source_alloydb_verify_ca.go
@@ -4,11 +4,10 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceAlloydbSSLModesVerifyCa struct {
+type SourceAlloydbVerifyCa struct {
+ AdditionalProperties types.String `tfsdk:"additional_properties"`
CaCertificate types.String `tfsdk:"ca_certificate"`
ClientCertificate types.String `tfsdk:"client_certificate"`
ClientKey types.String `tfsdk:"client_key"`
ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
}
diff --git a/internal/provider/type_source_amazon_ads.go b/internal/provider/type_source_amazon_ads.go
old mode 100755
new mode 100644
index 4c7b03712..0777ac2c5
--- a/internal/provider/type_source_amazon_ads.go
+++ b/internal/provider/type_source_amazon_ads.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAmazonAds struct {
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
LookBackWindow types.Int64 `tfsdk:"look_back_window"`
@@ -14,7 +13,6 @@ type SourceAmazonAds struct {
RefreshToken types.String `tfsdk:"refresh_token"`
Region types.String `tfsdk:"region"`
ReportRecordTypes []types.String `tfsdk:"report_record_types"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
StateFilter []types.String `tfsdk:"state_filter"`
}
diff --git a/internal/provider/type_source_amazon_seller_partner.go b/internal/provider/type_source_amazon_seller_partner.go
old mode 100755
new mode 100644
index a612edb64..3789f9546
--- a/internal/provider/type_source_amazon_seller_partner.go
+++ b/internal/provider/type_source_amazon_seller_partner.go
@@ -5,20 +5,15 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAmazonSellerPartner struct {
+ AccountType types.String `tfsdk:"account_type"`
AdvancedStreamOptions types.String `tfsdk:"advanced_stream_options"`
- AuthType types.String `tfsdk:"auth_type"`
- AwsAccessKey types.String `tfsdk:"aws_access_key"`
AwsEnvironment types.String `tfsdk:"aws_environment"`
- AwsSecretKey types.String `tfsdk:"aws_secret_key"`
LwaAppID types.String `tfsdk:"lwa_app_id"`
LwaClientSecret types.String `tfsdk:"lwa_client_secret"`
- MaxWaitSeconds types.Int64 `tfsdk:"max_wait_seconds"`
PeriodInDays types.Int64 `tfsdk:"period_in_days"`
RefreshToken types.String `tfsdk:"refresh_token"`
Region types.String `tfsdk:"region"`
ReplicationEndDate types.String `tfsdk:"replication_end_date"`
ReplicationStartDate types.String `tfsdk:"replication_start_date"`
ReportOptions types.String `tfsdk:"report_options"`
- RoleArn types.String `tfsdk:"role_arn"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_amazon_sqs.go b/internal/provider/type_source_amazon_sqs.go
old mode 100755
new mode 100644
index 64306fcf4..815c047f1
--- a/internal/provider/type_source_amazon_sqs.go
+++ b/internal/provider/type_source_amazon_sqs.go
@@ -13,6 +13,5 @@ type SourceAmazonSqs struct {
QueueURL types.String `tfsdk:"queue_url"`
Region types.String `tfsdk:"region"`
SecretKey types.String `tfsdk:"secret_key"`
- SourceType types.String `tfsdk:"source_type"`
VisibilityTimeout types.Int64 `tfsdk:"visibility_timeout"`
}
diff --git a/internal/provider/type_source_amplitude.go b/internal/provider/type_source_amplitude.go
old mode 100755
new mode 100644
index eae03866f..8512184c0
--- a/internal/provider/type_source_amplitude.go
+++ b/internal/provider/type_source_amplitude.go
@@ -9,6 +9,5 @@ type SourceAmplitude struct {
DataRegion types.String `tfsdk:"data_region"`
RequestTimeRange types.Int64 `tfsdk:"request_time_range"`
SecretKey types.String `tfsdk:"secret_key"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_apify_dataset.go b/internal/provider/type_source_apify_dataset.go
old mode 100755
new mode 100644
index 4f958a86e..3415cee8a
--- a/internal/provider/type_source_apify_dataset.go
+++ b/internal/provider/type_source_apify_dataset.go
@@ -5,8 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceApifyDataset struct {
- Clean types.Bool `tfsdk:"clean"`
- DatasetID types.String `tfsdk:"dataset_id"`
- SourceType types.String `tfsdk:"source_type"`
- Token types.String `tfsdk:"token"`
+ DatasetID types.String `tfsdk:"dataset_id"`
+ Token types.String `tfsdk:"token"`
}
diff --git a/internal/provider/type_source_appfollow.go b/internal/provider/type_source_appfollow.go
old mode 100755
new mode 100644
index aa7ec6ac6..5ac084747
--- a/internal/provider/type_source_appfollow.go
+++ b/internal/provider/type_source_appfollow.go
@@ -5,6 +5,5 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAppfollow struct {
- APISecret types.String `tfsdk:"api_secret"`
- SourceType types.String `tfsdk:"source_type"`
+ APISecret types.String `tfsdk:"api_secret"`
}
diff --git a/internal/provider/type_source_asana.go b/internal/provider/type_source_asana.go
old mode 100755
new mode 100644
index 164e9ce54..a25ebec19
--- a/internal/provider/type_source_asana.go
+++ b/internal/provider/type_source_asana.go
@@ -5,6 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAsana struct {
- Credentials *SourceAsanaAuthenticationMechanism `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
+ Credentials *SourceAsanaAuthenticationMechanism `tfsdk:"credentials"`
+ OrganizationExportIds []types.String `tfsdk:"organization_export_ids"`
+ TestMode types.Bool `tfsdk:"test_mode"`
}
diff --git a/internal/provider/type_source_asana_authentication_mechanism.go b/internal/provider/type_source_asana_authentication_mechanism.go
old mode 100755
new mode 100644
index 1ccd08218..08ecf9a95
--- a/internal/provider/type_source_asana_authentication_mechanism.go
+++ b/internal/provider/type_source_asana_authentication_mechanism.go
@@ -3,8 +3,6 @@
package provider
type SourceAsanaAuthenticationMechanism struct {
- SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth *SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth `tfsdk:"source_asana_authentication_mechanism_authenticate_via_asana_oauth"`
- SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken `tfsdk:"source_asana_authentication_mechanism_authenticate_with_personal_access_token"`
- SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth *SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth `tfsdk:"source_asana_update_authentication_mechanism_authenticate_via_asana_oauth"`
- SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken `tfsdk:"source_asana_update_authentication_mechanism_authenticate_with_personal_access_token"`
+ AuthenticateViaAsanaOauth *DestinationGoogleSheetsAuthenticationViaGoogleOAuth `tfsdk:"authenticate_via_asana_oauth"`
+ AuthenticateWithPersonalAccessToken *AuthenticateWithPersonalAccessToken `tfsdk:"authenticate_with_personal_access_token"`
}
diff --git a/internal/provider/type_source_asana_authentication_mechanism_authenticate_via_asana_oauth.go b/internal/provider/type_source_asana_authentication_mechanism_authenticate_via_asana_oauth.go
deleted file mode 100755
index d42bab9ed..000000000
--- a/internal/provider/type_source_asana_authentication_mechanism_authenticate_via_asana_oauth.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth struct {
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- OptionTitle types.String `tfsdk:"option_title"`
- RefreshToken types.String `tfsdk:"refresh_token"`
-}
diff --git a/internal/provider/type_source_asana_authentication_mechanism_authenticate_with_personal_access_token.go b/internal/provider/type_source_asana_authentication_mechanism_authenticate_with_personal_access_token.go
deleted file mode 100755
index 6558041fe..000000000
--- a/internal/provider/type_source_asana_authentication_mechanism_authenticate_with_personal_access_token.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken struct {
- OptionTitle types.String `tfsdk:"option_title"`
- PersonalAccessToken types.String `tfsdk:"personal_access_token"`
-}
diff --git a/internal/provider/type_source_auth0.go b/internal/provider/type_source_auth0.go
old mode 100755
new mode 100644
index 70589a65e..39d6970fa
--- a/internal/provider/type_source_auth0.go
+++ b/internal/provider/type_source_auth0.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAuth0 struct {
BaseURL types.String `tfsdk:"base_url"`
Credentials SourceAuth0AuthenticationMethod `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_auth0_authentication_method.go b/internal/provider/type_source_auth0_authentication_method.go
old mode 100755
new mode 100644
index 9cd88bb3d..b76cb6cf1
--- a/internal/provider/type_source_auth0_authentication_method.go
+++ b/internal/provider/type_source_auth0_authentication_method.go
@@ -3,8 +3,6 @@
package provider
type SourceAuth0AuthenticationMethod struct {
- SourceAuth0AuthenticationMethodOAuth2AccessToken *SourceAuth0AuthenticationMethodOAuth2AccessToken `tfsdk:"source_auth0_authentication_method_o_auth2_access_token"`
- SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication *SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication `tfsdk:"source_auth0_authentication_method_o_auth2_confidential_application"`
- SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken *SourceAuth0AuthenticationMethodOAuth2AccessToken `tfsdk:"source_auth0_update_authentication_method_o_auth2_access_token"`
- SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication *SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication `tfsdk:"source_auth0_update_authentication_method_o_auth2_confidential_application"`
+ OAuth2AccessToken *OAuth2AccessToken `tfsdk:"o_auth2_access_token"`
+ OAuth2ConfidentialApplication *OAuth2ConfidentialApplication `tfsdk:"o_auth2_confidential_application"`
}
diff --git a/internal/provider/type_source_auth0_authentication_method_o_auth2_access_token.go b/internal/provider/type_source_auth0_authentication_method_o_auth2_access_token.go
deleted file mode 100755
index 9c94a663f..000000000
--- a/internal/provider/type_source_auth0_authentication_method_o_auth2_access_token.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAuth0AuthenticationMethodOAuth2AccessToken struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
-}
diff --git a/internal/provider/type_source_aws_cloudtrail.go b/internal/provider/type_source_aws_cloudtrail.go
old mode 100755
new mode 100644
index d3a40daa8..3b7473aa0
--- a/internal/provider/type_source_aws_cloudtrail.go
+++ b/internal/provider/type_source_aws_cloudtrail.go
@@ -8,6 +8,5 @@ type SourceAwsCloudtrail struct {
AwsKeyID types.String `tfsdk:"aws_key_id"`
AwsRegionName types.String `tfsdk:"aws_region_name"`
AwsSecretKey types.String `tfsdk:"aws_secret_key"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_azure_blob_storage.go b/internal/provider/type_source_azure_blob_storage.go
old mode 100755
new mode 100644
index 737e913a3..9e79ae053
--- a/internal/provider/type_source_azure_blob_storage.go
+++ b/internal/provider/type_source_azure_blob_storage.go
@@ -5,12 +5,10 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAzureBlobStorage struct {
- AzureBlobStorageAccountKey types.String `tfsdk:"azure_blob_storage_account_key"`
- AzureBlobStorageAccountName types.String `tfsdk:"azure_blob_storage_account_name"`
- AzureBlobStorageBlobsPrefix types.String `tfsdk:"azure_blob_storage_blobs_prefix"`
- AzureBlobStorageContainerName types.String `tfsdk:"azure_blob_storage_container_name"`
- AzureBlobStorageEndpoint types.String `tfsdk:"azure_blob_storage_endpoint"`
- AzureBlobStorageSchemaInferenceLimit types.Int64 `tfsdk:"azure_blob_storage_schema_inference_limit"`
- Format SourceAzureBlobStorageInputFormat `tfsdk:"format"`
- SourceType types.String `tfsdk:"source_type"`
+ AzureBlobStorageAccountKey types.String `tfsdk:"azure_blob_storage_account_key"`
+ AzureBlobStorageAccountName types.String `tfsdk:"azure_blob_storage_account_name"`
+ AzureBlobStorageContainerName types.String `tfsdk:"azure_blob_storage_container_name"`
+ AzureBlobStorageEndpoint types.String `tfsdk:"azure_blob_storage_endpoint"`
+ StartDate types.String `tfsdk:"start_date"`
+ Streams []FileBasedStreamConfig `tfsdk:"streams"`
}
diff --git a/internal/provider/type_source_azure_blob_storage_csv_header_definition.go b/internal/provider/type_source_azure_blob_storage_csv_header_definition.go
new file mode 100644
index 000000000..c0054e128
--- /dev/null
+++ b/internal/provider/type_source_azure_blob_storage_csv_header_definition.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceAzureBlobStorageCSVHeaderDefinition struct {
+ Autogenerated *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"autogenerated"`
+ FromCSV *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"from_csv"`
+ UserProvided *UserProvided `tfsdk:"user_provided"`
+}
diff --git a/internal/provider/type_source_azure_blob_storage_format.go b/internal/provider/type_source_azure_blob_storage_format.go
new file mode 100644
index 000000000..8453e6ea3
--- /dev/null
+++ b/internal/provider/type_source_azure_blob_storage_format.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceAzureBlobStorageFormat struct {
+ AvroFormat *AvroFormat `tfsdk:"avro_format"`
+ CSVFormat *CSVFormat `tfsdk:"csv_format"`
+ DocumentFileTypeFormatExperimental *DocumentFileTypeFormatExperimental `tfsdk:"document_file_type_format_experimental"`
+ JsonlFormat *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"jsonl_format"`
+ ParquetFormat *ParquetFormat `tfsdk:"parquet_format"`
+}
diff --git a/internal/provider/type_source_azure_blob_storage_input_format.go b/internal/provider/type_source_azure_blob_storage_input_format.go
deleted file mode 100755
index a39203bf0..000000000
--- a/internal/provider/type_source_azure_blob_storage_input_format.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceAzureBlobStorageInputFormat struct {
- SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON *SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"source_azure_blob_storage_input_format_json_lines_newline_delimited_json"`
- SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON *SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON `tfsdk:"source_azure_blob_storage_update_input_format_json_lines_newline_delimited_json"`
-}
diff --git a/internal/provider/type_source_azure_blob_storage_input_format_json_lines_newline_delimited_json.go b/internal/provider/type_source_azure_blob_storage_input_format_json_lines_newline_delimited_json.go
deleted file mode 100755
index 89f2bc7b3..000000000
--- a/internal/provider/type_source_azure_blob_storage_input_format_json_lines_newline_delimited_json.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON struct {
- FormatType types.String `tfsdk:"format_type"`
-}
diff --git a/internal/provider/type_source_azure_table.go b/internal/provider/type_source_azure_table.go
old mode 100755
new mode 100644
index aaa80e07f..07d8a2d10
--- a/internal/provider/type_source_azure_table.go
+++ b/internal/provider/type_source_azure_table.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceAzureTable struct {
- SourceType types.String `tfsdk:"source_type"`
StorageAccessKey types.String `tfsdk:"storage_access_key"`
StorageAccountName types.String `tfsdk:"storage_account_name"`
StorageEndpointSuffix types.String `tfsdk:"storage_endpoint_suffix"`
diff --git a/internal/provider/type_source_bamboo_hr.go b/internal/provider/type_source_bamboo_hr.go
old mode 100755
new mode 100644
index 857e3c89e..7bdd7809e
--- a/internal/provider/type_source_bamboo_hr.go
+++ b/internal/provider/type_source_bamboo_hr.go
@@ -8,6 +8,5 @@ type SourceBambooHr struct {
APIKey types.String `tfsdk:"api_key"`
CustomReportsFields types.String `tfsdk:"custom_reports_fields"`
CustomReportsIncludeDefaultFields types.Bool `tfsdk:"custom_reports_include_default_fields"`
- SourceType types.String `tfsdk:"source_type"`
Subdomain types.String `tfsdk:"subdomain"`
}
diff --git a/internal/provider/type_source_bigcommerce.go b/internal/provider/type_source_bigcommerce.go
deleted file mode 100755
index 28092945e..000000000
--- a/internal/provider/type_source_bigcommerce.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceBigcommerce struct {
- AccessToken types.String `tfsdk:"access_token"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- StoreHash types.String `tfsdk:"store_hash"`
-}
diff --git a/internal/provider/type_source_bigquery.go b/internal/provider/type_source_bigquery.go
old mode 100755
new mode 100644
index 51ef1a39b..0297a3afb
--- a/internal/provider/type_source_bigquery.go
+++ b/internal/provider/type_source_bigquery.go
@@ -8,5 +8,4 @@ type SourceBigquery struct {
CredentialsJSON types.String `tfsdk:"credentials_json"`
DatasetID types.String `tfsdk:"dataset_id"`
ProjectID types.String `tfsdk:"project_id"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_bing_ads.go b/internal/provider/type_source_bing_ads.go
old mode 100755
new mode 100644
index d924f0aa9..303be9f32
--- a/internal/provider/type_source_bing_ads.go
+++ b/internal/provider/type_source_bing_ads.go
@@ -5,13 +5,12 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceBingAds struct {
- AuthMethod types.String `tfsdk:"auth_method"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- DeveloperToken types.String `tfsdk:"developer_token"`
- LookbackWindow types.Int64 `tfsdk:"lookback_window"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- ReportsStartDate types.String `tfsdk:"reports_start_date"`
- SourceType types.String `tfsdk:"source_type"`
- TenantID types.String `tfsdk:"tenant_id"`
+ ClientID types.String `tfsdk:"client_id"`
+ ClientSecret types.String `tfsdk:"client_secret"`
+ CustomReports []CustomReportConfig `tfsdk:"custom_reports"`
+ DeveloperToken types.String `tfsdk:"developer_token"`
+ LookbackWindow types.Int64 `tfsdk:"lookback_window"`
+ RefreshToken types.String `tfsdk:"refresh_token"`
+ ReportsStartDate types.String `tfsdk:"reports_start_date"`
+ TenantID types.String `tfsdk:"tenant_id"`
}
diff --git a/internal/provider/type_source_braintree.go b/internal/provider/type_source_braintree.go
old mode 100755
new mode 100644
index fff853c0a..961b15d68
--- a/internal/provider/type_source_braintree.go
+++ b/internal/provider/type_source_braintree.go
@@ -9,6 +9,5 @@ type SourceBraintree struct {
MerchantID types.String `tfsdk:"merchant_id"`
PrivateKey types.String `tfsdk:"private_key"`
PublicKey types.String `tfsdk:"public_key"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_braze.go b/internal/provider/type_source_braze.go
old mode 100755
new mode 100644
index f0118b52e..d4b295ffe
--- a/internal/provider/type_source_braze.go
+++ b/internal/provider/type_source_braze.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceBraze struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- URL types.String `tfsdk:"url"`
+ APIKey types.String `tfsdk:"api_key"`
+ StartDate types.String `tfsdk:"start_date"`
+ URL types.String `tfsdk:"url"`
}
diff --git a/internal/provider/type_destination_dev_null_test_destination_silent.go b/internal/provider/type_source_cart.go
old mode 100755
new mode 100644
similarity index 50%
rename from internal/provider/type_destination_dev_null_test_destination_silent.go
rename to internal/provider/type_source_cart.go
index 5aa8dea45..e2a46ea19
--- a/internal/provider/type_destination_dev_null_test_destination_silent.go
+++ b/internal/provider/type_source_cart.go
@@ -4,6 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationDevNullTestDestinationSilent struct {
- TestDestinationType types.String `tfsdk:"test_destination_type"`
+type SourceCart struct {
+ Credentials *SourceCartAuthorizationMethod `tfsdk:"credentials"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_cart_authorization_method.go b/internal/provider/type_source_cart_authorization_method.go
new file mode 100644
index 000000000..f3635cf29
--- /dev/null
+++ b/internal/provider/type_source_cart_authorization_method.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceCartAuthorizationMethod struct {
+ CentralAPIRouter *CentralAPIRouter `tfsdk:"central_api_router"`
+ SingleStoreAccessToken *SingleStoreAccessToken `tfsdk:"single_store_access_token"`
+}
diff --git a/internal/provider/type_source_chargebee.go b/internal/provider/type_source_chargebee.go
old mode 100755
new mode 100644
index b6d5a1820..1541e80be
--- a/internal/provider/type_source_chargebee.go
+++ b/internal/provider/type_source_chargebee.go
@@ -8,6 +8,5 @@ type SourceChargebee struct {
ProductCatalog types.String `tfsdk:"product_catalog"`
Site types.String `tfsdk:"site"`
SiteAPIKey types.String `tfsdk:"site_api_key"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_chartmogul.go b/internal/provider/type_source_chartmogul.go
old mode 100755
new mode 100644
index 63d24013c..7a57c5e1c
--- a/internal/provider/type_source_chartmogul.go
+++ b/internal/provider/type_source_chartmogul.go
@@ -5,8 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceChartmogul struct {
- APIKey types.String `tfsdk:"api_key"`
- Interval types.String `tfsdk:"interval"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIKey types.String `tfsdk:"api_key"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_clickhouse.go b/internal/provider/type_source_clickhouse.go
old mode 100755
new mode 100644
index c5bc88969..f21913e65
--- a/internal/provider/type_source_clickhouse.go
+++ b/internal/provider/type_source_clickhouse.go
@@ -5,11 +5,10 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceClickhouse struct {
- Database types.String `tfsdk:"database"`
- Host types.String `tfsdk:"host"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- SourceType types.String `tfsdk:"source_type"`
- TunnelMethod *SourceClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_clickhouse_ssh_tunnel_method.go b/internal/provider/type_source_clickhouse_ssh_tunnel_method.go
deleted file mode 100755
index 2acec37e0..000000000
--- a/internal/provider/type_source_clickhouse_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceClickhouseSSHTunnelMethod struct {
- SourceClickhouseSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_clickhouse_ssh_tunnel_method_no_tunnel"`
- SourceClickhouseSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_clickhouse_ssh_tunnel_method_password_authentication"`
- SourceClickhouseSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_clickhouse_ssh_tunnel_method_ssh_key_authentication"`
- SourceClickhouseUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_clickhouse_update_ssh_tunnel_method_no_tunnel"`
- SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_clickhouse_update_ssh_tunnel_method_password_authentication"`
- SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_clickhouse_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_source_clickup_api.go b/internal/provider/type_source_clickup_api.go
old mode 100755
new mode 100644
index ef9600a19..99008464b
--- a/internal/provider/type_source_clickup_api.go
+++ b/internal/provider/type_source_clickup_api.go
@@ -9,7 +9,6 @@ type SourceClickupAPI struct {
FolderID types.String `tfsdk:"folder_id"`
IncludeClosedTasks types.Bool `tfsdk:"include_closed_tasks"`
ListID types.String `tfsdk:"list_id"`
- SourceType types.String `tfsdk:"source_type"`
SpaceID types.String `tfsdk:"space_id"`
TeamID types.String `tfsdk:"team_id"`
}
diff --git a/internal/provider/type_source_clockify.go b/internal/provider/type_source_clockify.go
old mode 100755
new mode 100644
index e0caa273e..773f6ab12
--- a/internal/provider/type_source_clockify.go
+++ b/internal/provider/type_source_clockify.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceClockify struct {
APIKey types.String `tfsdk:"api_key"`
APIURL types.String `tfsdk:"api_url"`
- SourceType types.String `tfsdk:"source_type"`
WorkspaceID types.String `tfsdk:"workspace_id"`
}
diff --git a/internal/provider/type_source_close_com.go b/internal/provider/type_source_close_com.go
old mode 100755
new mode 100644
index 551001eda..5e31d37cf
--- a/internal/provider/type_source_close_com.go
+++ b/internal/provider/type_source_close_com.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceCloseCom struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIKey types.String `tfsdk:"api_key"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_coda.go b/internal/provider/type_source_coda.go
old mode 100755
new mode 100644
index 3cd3ef7f0..f30bc4670
--- a/internal/provider/type_source_coda.go
+++ b/internal/provider/type_source_coda.go
@@ -5,6 +5,5 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceCoda struct {
- AuthToken types.String `tfsdk:"auth_token"`
- SourceType types.String `tfsdk:"source_type"`
+ AuthToken types.String `tfsdk:"auth_token"`
}
diff --git a/internal/provider/type_source_coin_api.go b/internal/provider/type_source_coin_api.go
old mode 100755
new mode 100644
index b9b6ea518..8a19ee571
--- a/internal/provider/type_source_coin_api.go
+++ b/internal/provider/type_source_coin_api.go
@@ -10,7 +10,6 @@ type SourceCoinAPI struct {
Environment types.String `tfsdk:"environment"`
Limit types.Int64 `tfsdk:"limit"`
Period types.String `tfsdk:"period"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
SymbolID types.String `tfsdk:"symbol_id"`
}
diff --git a/internal/provider/type_source_coinmarketcap.go b/internal/provider/type_source_coinmarketcap.go
old mode 100755
new mode 100644
index b354d07ce..a8154dfd4
--- a/internal/provider/type_source_coinmarketcap.go
+++ b/internal/provider/type_source_coinmarketcap.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceCoinmarketcap struct {
- APIKey types.String `tfsdk:"api_key"`
- DataType types.String `tfsdk:"data_type"`
- SourceType types.String `tfsdk:"source_type"`
- Symbols []types.String `tfsdk:"symbols"`
+ APIKey types.String `tfsdk:"api_key"`
+ DataType types.String `tfsdk:"data_type"`
+ Symbols []types.String `tfsdk:"symbols"`
}
diff --git a/internal/provider/type_source_configcat.go b/internal/provider/type_source_configcat.go
deleted file mode 100755
index 5c498c754..000000000
--- a/internal/provider/type_source_configcat.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceConfigcat struct {
- Password types.String `tfsdk:"password"`
- SourceType types.String `tfsdk:"source_type"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_source_confluence.go b/internal/provider/type_source_confluence.go
old mode 100755
new mode 100644
index 39a53e8d8..9d7927cec
--- a/internal/provider/type_source_confluence.go
+++ b/internal/provider/type_source_confluence.go
@@ -8,5 +8,4 @@ type SourceConfluence struct {
APIToken types.String `tfsdk:"api_token"`
DomainName types.String `tfsdk:"domain_name"`
Email types.String `tfsdk:"email"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_convex.go b/internal/provider/type_source_convex.go
deleted file mode 100755
index 19b0c9dbc..000000000
--- a/internal/provider/type_source_convex.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceConvex struct {
- AccessKey types.String `tfsdk:"access_key"`
- DeploymentURL types.String `tfsdk:"deployment_url"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_datascope.go b/internal/provider/type_source_datascope.go
old mode 100755
new mode 100644
index 0e8bb68b8..1e6874ea8
--- a/internal/provider/type_source_datascope.go
+++ b/internal/provider/type_source_datascope.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceDatascope struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIKey types.String `tfsdk:"api_key"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_delighted.go b/internal/provider/type_source_delighted.go
old mode 100755
new mode 100644
index d7156cdb1..1b49cf4f1
--- a/internal/provider/type_source_delighted.go
+++ b/internal/provider/type_source_delighted.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceDelighted struct {
- APIKey types.String `tfsdk:"api_key"`
- Since types.String `tfsdk:"since"`
- SourceType types.String `tfsdk:"source_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ Since types.String `tfsdk:"since"`
}
diff --git a/internal/provider/type_source_dixa.go b/internal/provider/type_source_dixa.go
old mode 100755
new mode 100644
index e7c42e616..f6b8637bb
--- a/internal/provider/type_source_dixa.go
+++ b/internal/provider/type_source_dixa.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceDixa struct {
- APIToken types.String `tfsdk:"api_token"`
- BatchSize types.Int64 `tfsdk:"batch_size"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIToken types.String `tfsdk:"api_token"`
+ BatchSize types.Int64 `tfsdk:"batch_size"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_dockerhub.go b/internal/provider/type_source_dockerhub.go
old mode 100755
new mode 100644
index 5d624c53c..daced560d
--- a/internal/provider/type_source_dockerhub.go
+++ b/internal/provider/type_source_dockerhub.go
@@ -6,5 +6,4 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceDockerhub struct {
DockerUsername types.String `tfsdk:"docker_username"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_dremio.go b/internal/provider/type_source_dremio.go
old mode 100755
new mode 100644
index 8146b2348..1b07a23b5
--- a/internal/provider/type_source_dremio.go
+++ b/internal/provider/type_source_dremio.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceDremio struct {
- APIKey types.String `tfsdk:"api_key"`
- BaseURL types.String `tfsdk:"base_url"`
- SourceType types.String `tfsdk:"source_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ BaseURL types.String `tfsdk:"base_url"`
}
diff --git a/internal/provider/type_source_dynamodb.go b/internal/provider/type_source_dynamodb.go
old mode 100755
new mode 100644
index bffde3517..1eedf736d
--- a/internal/provider/type_source_dynamodb.go
+++ b/internal/provider/type_source_dynamodb.go
@@ -10,5 +10,4 @@ type SourceDynamodb struct {
Region types.String `tfsdk:"region"`
ReservedAttributeNames types.String `tfsdk:"reserved_attribute_names"`
SecretAccessKey types.String `tfsdk:"secret_access_key"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_dynamodb1.go b/internal/provider/type_source_dynamodb1.go
deleted file mode 100755
index ccadf1d43..000000000
--- a/internal/provider/type_source_dynamodb1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceDynamodb1 struct {
- AccessKeyID types.String `tfsdk:"access_key_id"`
- Endpoint types.String `tfsdk:"endpoint"`
- Region types.String `tfsdk:"region"`
- ReservedAttributeNames types.String `tfsdk:"reserved_attribute_names"`
- SecretAccessKey types.String `tfsdk:"secret_access_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_e2e_test_cloud.go b/internal/provider/type_source_e2e_test_cloud.go
deleted file mode 100755
index e2fe95d0f..000000000
--- a/internal/provider/type_source_e2e_test_cloud.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceE2eTestCloud struct {
- MaxMessages types.Int64 `tfsdk:"max_messages"`
- MessageIntervalMs types.Int64 `tfsdk:"message_interval_ms"`
- MockCatalog SourceE2eTestCloudMockCatalog `tfsdk:"mock_catalog"`
- Seed types.Int64 `tfsdk:"seed"`
- SourceType types.String `tfsdk:"source_type"`
- Type types.String `tfsdk:"type"`
-}
diff --git a/internal/provider/type_source_e2e_test_cloud_mock_catalog.go b/internal/provider/type_source_e2e_test_cloud_mock_catalog.go
deleted file mode 100755
index 99502951c..000000000
--- a/internal/provider/type_source_e2e_test_cloud_mock_catalog.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceE2eTestCloudMockCatalog struct {
- SourceE2eTestCloudMockCatalogMultiSchema *SourceE2eTestCloudMockCatalogMultiSchema `tfsdk:"source_e2e_test_cloud_mock_catalog_multi_schema"`
- SourceE2eTestCloudMockCatalogSingleSchema *SourceE2eTestCloudMockCatalogSingleSchema `tfsdk:"source_e2e_test_cloud_mock_catalog_single_schema"`
- SourceE2eTestCloudUpdateMockCatalogMultiSchema *SourceE2eTestCloudMockCatalogMultiSchema `tfsdk:"source_e2e_test_cloud_update_mock_catalog_multi_schema"`
- SourceE2eTestCloudUpdateMockCatalogSingleSchema *SourceE2eTestCloudMockCatalogSingleSchema `tfsdk:"source_e2e_test_cloud_update_mock_catalog_single_schema"`
-}
diff --git a/internal/provider/type_source_e2e_test_cloud_mock_catalog_multi_schema.go b/internal/provider/type_source_e2e_test_cloud_mock_catalog_multi_schema.go
deleted file mode 100755
index 3945d0934..000000000
--- a/internal/provider/type_source_e2e_test_cloud_mock_catalog_multi_schema.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceE2eTestCloudMockCatalogMultiSchema struct {
- StreamSchemas types.String `tfsdk:"stream_schemas"`
- Type types.String `tfsdk:"type"`
-}
diff --git a/internal/provider/type_source_e2e_test_cloud_mock_catalog_single_schema.go b/internal/provider/type_source_e2e_test_cloud_mock_catalog_single_schema.go
deleted file mode 100755
index 5c92d78c9..000000000
--- a/internal/provider/type_source_e2e_test_cloud_mock_catalog_single_schema.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceE2eTestCloudMockCatalogSingleSchema struct {
- StreamDuplication types.Int64 `tfsdk:"stream_duplication"`
- StreamName types.String `tfsdk:"stream_name"`
- StreamSchema types.String `tfsdk:"stream_schema"`
- Type types.String `tfsdk:"type"`
-}
diff --git a/internal/provider/type_source_emailoctopus.go b/internal/provider/type_source_emailoctopus.go
deleted file mode 100755
index 66ccec37b..000000000
--- a/internal/provider/type_source_emailoctopus.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceEmailoctopus struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_exchange_rates.go b/internal/provider/type_source_exchange_rates.go
old mode 100755
new mode 100644
index 5db1e13f5..914231ce2
--- a/internal/provider/type_source_exchange_rates.go
+++ b/internal/provider/type_source_exchange_rates.go
@@ -8,6 +8,5 @@ type SourceExchangeRates struct {
AccessKey types.String `tfsdk:"access_key"`
Base types.String `tfsdk:"base"`
IgnoreWeekends types.Bool `tfsdk:"ignore_weekends"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_facebook_marketing.go b/internal/provider/type_source_facebook_marketing.go
old mode 100755
new mode 100644
index b44f1c78f..1cac3b6a5
--- a/internal/provider/type_source_facebook_marketing.go
+++ b/internal/provider/type_source_facebook_marketing.go
@@ -5,18 +5,16 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceFacebookMarketing struct {
- AccessToken types.String `tfsdk:"access_token"`
- AccountID types.String `tfsdk:"account_id"`
- ActionBreakdownsAllowEmpty types.Bool `tfsdk:"action_breakdowns_allow_empty"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- CustomInsights []SourceFacebookMarketingInsightConfig `tfsdk:"custom_insights"`
- EndDate types.String `tfsdk:"end_date"`
- FetchThumbnailImages types.Bool `tfsdk:"fetch_thumbnail_images"`
- IncludeDeleted types.Bool `tfsdk:"include_deleted"`
- InsightsLookbackWindow types.Int64 `tfsdk:"insights_lookback_window"`
- MaxBatchSize types.Int64 `tfsdk:"max_batch_size"`
- PageSize types.Int64 `tfsdk:"page_size"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ AccessToken types.String `tfsdk:"access_token"`
+ AccountID types.String `tfsdk:"account_id"`
+ ActionBreakdownsAllowEmpty types.Bool `tfsdk:"action_breakdowns_allow_empty"`
+ ClientID types.String `tfsdk:"client_id"`
+ ClientSecret types.String `tfsdk:"client_secret"`
+ CustomInsights []InsightConfig `tfsdk:"custom_insights"`
+ EndDate types.String `tfsdk:"end_date"`
+ FetchThumbnailImages types.Bool `tfsdk:"fetch_thumbnail_images"`
+ IncludeDeleted types.Bool `tfsdk:"include_deleted"`
+ InsightsLookbackWindow types.Int64 `tfsdk:"insights_lookback_window"`
+ PageSize types.Int64 `tfsdk:"page_size"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_facebook_pages.go b/internal/provider/type_source_facebook_pages.go
old mode 100755
new mode 100644
index bf2a87ba1..f47ed2ff0
--- a/internal/provider/type_source_facebook_pages.go
+++ b/internal/provider/type_source_facebook_pages.go
@@ -7,5 +7,4 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceFacebookPages struct {
AccessToken types.String `tfsdk:"access_token"`
PageID types.String `tfsdk:"page_id"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_faker.go b/internal/provider/type_source_faker.go
old mode 100755
new mode 100644
index babe5d895..c5c8712b5
--- a/internal/provider/type_source_faker.go
+++ b/internal/provider/type_source_faker.go
@@ -5,10 +5,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceFaker struct {
- AlwaysUpdated types.Bool `tfsdk:"always_updated"`
- Count types.Int64 `tfsdk:"count"`
- Parallelism types.Int64 `tfsdk:"parallelism"`
- RecordsPerSlice types.Int64 `tfsdk:"records_per_slice"`
- Seed types.Int64 `tfsdk:"seed"`
- SourceType types.String `tfsdk:"source_type"`
+ AlwaysUpdated types.Bool `tfsdk:"always_updated"`
+ Count types.Int64 `tfsdk:"count"`
+ Parallelism types.Int64 `tfsdk:"parallelism"`
+ RecordsPerSlice types.Int64 `tfsdk:"records_per_slice"`
+ Seed types.Int64 `tfsdk:"seed"`
}
diff --git a/internal/provider/type_source_fauna.go b/internal/provider/type_source_fauna.go
old mode 100755
new mode 100644
index 96c4053c4..a69ecf929
--- a/internal/provider/type_source_fauna.go
+++ b/internal/provider/type_source_fauna.go
@@ -10,5 +10,4 @@ type SourceFauna struct {
Port types.Int64 `tfsdk:"port"`
Scheme types.String `tfsdk:"scheme"`
Secret types.String `tfsdk:"secret"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_fauna_collection.go b/internal/provider/type_source_fauna_collection.go
old mode 100755
new mode 100644
index 2699ab125..64b9ed983
--- a/internal/provider/type_source_fauna_collection.go
+++ b/internal/provider/type_source_fauna_collection.go
@@ -5,6 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceFaunaCollection struct {
- Deletions SourceFaunaCollectionDeletionMode `tfsdk:"deletions"`
- PageSize types.Int64 `tfsdk:"page_size"`
+ Deletions SourceFaunaDeletionMode `tfsdk:"deletions"`
+ PageSize types.Int64 `tfsdk:"page_size"`
}
diff --git a/internal/provider/type_source_fauna_collection_deletion_mode.go b/internal/provider/type_source_fauna_collection_deletion_mode.go
deleted file mode 100755
index 175ddc8dc..000000000
--- a/internal/provider/type_source_fauna_collection_deletion_mode.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceFaunaCollectionDeletionMode struct {
- SourceFaunaCollectionDeletionModeDisabled *SourceFaunaCollectionDeletionModeDisabled `tfsdk:"source_fauna_collection_deletion_mode_disabled"`
- SourceFaunaCollectionDeletionModeEnabled *SourceFaunaCollectionDeletionModeEnabled `tfsdk:"source_fauna_collection_deletion_mode_enabled"`
- SourceFaunaUpdateCollectionDeletionModeDisabled *SourceFaunaCollectionDeletionModeDisabled `tfsdk:"source_fauna_update_collection_deletion_mode_disabled"`
- SourceFaunaUpdateCollectionDeletionModeEnabled *SourceFaunaCollectionDeletionModeEnabled `tfsdk:"source_fauna_update_collection_deletion_mode_enabled"`
-}
diff --git a/internal/provider/type_source_fauna_collection_deletion_mode_disabled.go b/internal/provider/type_source_fauna_collection_deletion_mode_disabled.go
deleted file mode 100755
index 1618d92a2..000000000
--- a/internal/provider/type_source_fauna_collection_deletion_mode_disabled.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceFaunaCollectionDeletionModeDisabled struct {
- DeletionMode types.String `tfsdk:"deletion_mode"`
-}
diff --git a/internal/provider/type_source_fauna_collection_deletion_mode_enabled.go b/internal/provider/type_source_fauna_collection_deletion_mode_enabled.go
deleted file mode 100755
index ca5e2175a..000000000
--- a/internal/provider/type_source_fauna_collection_deletion_mode_enabled.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceFaunaCollectionDeletionModeEnabled struct {
- Column types.String `tfsdk:"column"`
- DeletionMode types.String `tfsdk:"deletion_mode"`
-}
diff --git a/internal/provider/type_source_fauna_deletion_mode.go b/internal/provider/type_source_fauna_deletion_mode.go
new file mode 100644
index 000000000..315e5c414
--- /dev/null
+++ b/internal/provider/type_source_fauna_deletion_mode.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceFaunaDeletionMode struct {
+ Disabled *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"disabled"`
+ Enabled *Enabled `tfsdk:"enabled"`
+}
diff --git a/internal/provider/type_source_file.go b/internal/provider/type_source_file.go
new file mode 100644
index 000000000..48b5ee5f0
--- /dev/null
+++ b/internal/provider/type_source_file.go
@@ -0,0 +1,13 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceFile struct {
+ DatasetName types.String `tfsdk:"dataset_name"`
+ Format types.String `tfsdk:"format"`
+ Provider SourceFileStorageProvider `tfsdk:"provider"`
+ ReaderOptions types.String `tfsdk:"reader_options"`
+ URL types.String `tfsdk:"url"`
+}
diff --git a/internal/provider/type_destination_aws_datalake_authentication_mode_iam_user.go b/internal/provider/type_source_file_s3_amazon_web_services.go
old mode 100755
new mode 100644
similarity index 69%
rename from internal/provider/type_destination_aws_datalake_authentication_mode_iam_user.go
rename to internal/provider/type_source_file_s3_amazon_web_services.go
index c7cdd06c5..50b2534a3
--- a/internal/provider/type_destination_aws_datalake_authentication_mode_iam_user.go
+++ b/internal/provider/type_source_file_s3_amazon_web_services.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationAwsDatalakeAuthenticationModeIAMUser struct {
+type SourceFileS3AmazonWebServices struct {
AwsAccessKeyID types.String `tfsdk:"aws_access_key_id"`
AwsSecretAccessKey types.String `tfsdk:"aws_secret_access_key"`
- CredentialsTitle types.String `tfsdk:"credentials_title"`
}
diff --git a/internal/provider/type_source_file_secure.go b/internal/provider/type_source_file_secure.go
deleted file mode 100755
index 3f66a4ce2..000000000
--- a/internal/provider/type_source_file_secure.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceFileSecure struct {
- DatasetName types.String `tfsdk:"dataset_name"`
- Format types.String `tfsdk:"format"`
- Provider SourceFileSecureStorageProvider `tfsdk:"provider"`
- ReaderOptions types.String `tfsdk:"reader_options"`
- SourceType types.String `tfsdk:"source_type"`
- URL types.String `tfsdk:"url"`
-}
diff --git a/internal/provider/type_source_file_secure_storage_provider.go b/internal/provider/type_source_file_secure_storage_provider.go
deleted file mode 100755
index ad489b5b4..000000000
--- a/internal/provider/type_source_file_secure_storage_provider.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceFileSecureStorageProvider struct {
- SourceFileSecureStorageProviderAzBlobAzureBlobStorage *SourceFileSecureStorageProviderAzBlobAzureBlobStorage `tfsdk:"source_file_secure_storage_provider_az_blob_azure_blob_storage"`
- SourceFileSecureStorageProviderGCSGoogleCloudStorage *SourceFileSecureStorageProviderGCSGoogleCloudStorage `tfsdk:"source_file_secure_storage_provider_gcs_google_cloud_storage"`
- SourceFileSecureStorageProviderHTTPSPublicWeb *SourceFileSecureStorageProviderHTTPSPublicWeb `tfsdk:"source_file_secure_storage_provider_https_public_web"`
- SourceFileSecureStorageProviderS3AmazonWebServices *SourceFileSecureStorageProviderS3AmazonWebServices `tfsdk:"source_file_secure_storage_provider_s3_amazon_web_services"`
- SourceFileSecureStorageProviderSCPSecureCopyProtocol *SourceFileSecureStorageProviderSCPSecureCopyProtocol `tfsdk:"source_file_secure_storage_provider_scp_secure_copy_protocol"`
- SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol *SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol `tfsdk:"source_file_secure_storage_provider_sftp_secure_file_transfer_protocol"`
- SourceFileSecureStorageProviderSSHSecureShell *SourceFileSecureStorageProviderSSHSecureShell `tfsdk:"source_file_secure_storage_provider_ssh_secure_shell"`
- SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage *SourceFileSecureStorageProviderAzBlobAzureBlobStorage `tfsdk:"source_file_secure_update_storage_provider_az_blob_azure_blob_storage"`
- SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage *SourceFileSecureStorageProviderGCSGoogleCloudStorage `tfsdk:"source_file_secure_update_storage_provider_gcs_google_cloud_storage"`
- SourceFileSecureUpdateStorageProviderHTTPSPublicWeb *SourceFileSecureStorageProviderHTTPSPublicWeb `tfsdk:"source_file_secure_update_storage_provider_https_public_web"`
- SourceFileSecureUpdateStorageProviderS3AmazonWebServices *SourceFileSecureStorageProviderS3AmazonWebServices `tfsdk:"source_file_secure_update_storage_provider_s3_amazon_web_services"`
- SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol *SourceFileSecureStorageProviderSCPSecureCopyProtocol `tfsdk:"source_file_secure_update_storage_provider_scp_secure_copy_protocol"`
- SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol *SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol `tfsdk:"source_file_secure_update_storage_provider_sftp_secure_file_transfer_protocol"`
- SourceFileSecureUpdateStorageProviderSSHSecureShell *SourceFileSecureStorageProviderSSHSecureShell `tfsdk:"source_file_secure_update_storage_provider_ssh_secure_shell"`
-}
diff --git a/internal/provider/type_source_file_secure_storage_provider_https_public_web.go b/internal/provider/type_source_file_secure_storage_provider_https_public_web.go
deleted file mode 100755
index 55339366d..000000000
--- a/internal/provider/type_source_file_secure_storage_provider_https_public_web.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceFileSecureStorageProviderHTTPSPublicWeb struct {
- Storage types.String `tfsdk:"storage"`
- UserAgent types.Bool `tfsdk:"user_agent"`
-}
diff --git a/internal/provider/type_source_file_secure_storage_provider_scp_secure_copy_protocol.go b/internal/provider/type_source_file_secure_storage_provider_scp_secure_copy_protocol.go
deleted file mode 100755
index 54b00cc6b..000000000
--- a/internal/provider/type_source_file_secure_storage_provider_scp_secure_copy_protocol.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceFileSecureStorageProviderSCPSecureCopyProtocol struct {
- Host types.String `tfsdk:"host"`
- Password types.String `tfsdk:"password"`
- Port types.String `tfsdk:"port"`
- Storage types.String `tfsdk:"storage"`
- User types.String `tfsdk:"user"`
-}
diff --git a/internal/provider/type_source_file_secure_storage_provider_sftp_secure_file_transfer_protocol.go b/internal/provider/type_source_file_secure_storage_provider_sftp_secure_file_transfer_protocol.go
deleted file mode 100755
index 3f2943309..000000000
--- a/internal/provider/type_source_file_secure_storage_provider_sftp_secure_file_transfer_protocol.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol struct {
- Host types.String `tfsdk:"host"`
- Password types.String `tfsdk:"password"`
- Port types.String `tfsdk:"port"`
- Storage types.String `tfsdk:"storage"`
- User types.String `tfsdk:"user"`
-}
diff --git a/internal/provider/type_source_file_storage_provider.go b/internal/provider/type_source_file_storage_provider.go
new file mode 100644
index 000000000..dd4841048
--- /dev/null
+++ b/internal/provider/type_source_file_storage_provider.go
@@ -0,0 +1,13 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceFileStorageProvider struct {
+ AzBlobAzureBlobStorage *AzBlobAzureBlobStorage `tfsdk:"az_blob_azure_blob_storage"`
+ GCSGoogleCloudStorage *GCSGoogleCloudStorage `tfsdk:"gcs_google_cloud_storage"`
+ HTTPSPublicWeb *HTTPSPublicWeb `tfsdk:"https_public_web"`
+ S3AmazonWebServices *SourceFileS3AmazonWebServices `tfsdk:"s3_amazon_web_services"`
+ SCPSecureCopyProtocol *SCPSecureCopyProtocol `tfsdk:"scp_secure_copy_protocol"`
+ SFTPSecureFileTransferProtocol *SCPSecureCopyProtocol `tfsdk:"sftp_secure_file_transfer_protocol"`
+ SSHSecureShell *SCPSecureCopyProtocol `tfsdk:"ssh_secure_shell"`
+}
diff --git a/internal/provider/type_source_firebolt.go b/internal/provider/type_source_firebolt.go
old mode 100755
new mode 100644
index 65f41b8b3..6f7a68890
--- a/internal/provider/type_source_firebolt.go
+++ b/internal/provider/type_source_firebolt.go
@@ -5,11 +5,10 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceFirebolt struct {
- Account types.String `tfsdk:"account"`
- Database types.String `tfsdk:"database"`
- Engine types.String `tfsdk:"engine"`
- Host types.String `tfsdk:"host"`
- Password types.String `tfsdk:"password"`
- SourceType types.String `tfsdk:"source_type"`
- Username types.String `tfsdk:"username"`
+ Account types.String `tfsdk:"account"`
+ Database types.String `tfsdk:"database"`
+ Engine types.String `tfsdk:"engine"`
+ Host types.String `tfsdk:"host"`
+ Password types.String `tfsdk:"password"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_freshcaller.go b/internal/provider/type_source_freshcaller.go
old mode 100755
new mode 100644
index bb6330433..eabcdde75
--- a/internal/provider/type_source_freshcaller.go
+++ b/internal/provider/type_source_freshcaller.go
@@ -8,7 +8,6 @@ type SourceFreshcaller struct {
APIKey types.String `tfsdk:"api_key"`
Domain types.String `tfsdk:"domain"`
RequestsPerMinute types.Int64 `tfsdk:"requests_per_minute"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
SyncLagMinutes types.Int64 `tfsdk:"sync_lag_minutes"`
}
diff --git a/internal/provider/type_source_freshdesk.go b/internal/provider/type_source_freshdesk.go
old mode 100755
new mode 100644
index 6878206f2..9970ebcf1
--- a/internal/provider/type_source_freshdesk.go
+++ b/internal/provider/type_source_freshdesk.go
@@ -8,6 +8,5 @@ type SourceFreshdesk struct {
APIKey types.String `tfsdk:"api_key"`
Domain types.String `tfsdk:"domain"`
RequestsPerMinute types.Int64 `tfsdk:"requests_per_minute"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_freshsales.go b/internal/provider/type_source_freshsales.go
old mode 100755
new mode 100644
index e8d01c69a..7f49a8d55
--- a/internal/provider/type_source_freshsales.go
+++ b/internal/provider/type_source_freshsales.go
@@ -7,5 +7,4 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceFreshsales struct {
APIKey types.String `tfsdk:"api_key"`
DomainName types.String `tfsdk:"domain_name"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_gainsight_px.go b/internal/provider/type_source_gainsight_px.go
deleted file mode 100755
index 7644167db..000000000
--- a/internal/provider/type_source_gainsight_px.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGainsightPx struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_gcs.go b/internal/provider/type_source_gcs.go
old mode 100755
new mode 100644
index 55bf6bfc7..9e46950e4
--- a/internal/provider/type_source_gcs.go
+++ b/internal/provider/type_source_gcs.go
@@ -5,8 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceGcs struct {
- GcsBucket types.String `tfsdk:"gcs_bucket"`
- GcsPath types.String `tfsdk:"gcs_path"`
- ServiceAccount types.String `tfsdk:"service_account"`
- SourceType types.String `tfsdk:"source_type"`
+ Bucket types.String `tfsdk:"bucket"`
+ ServiceAccount types.String `tfsdk:"service_account"`
+ StartDate types.String `tfsdk:"start_date"`
+ Streams []SourceGCSStreamConfig `tfsdk:"streams"`
}
diff --git a/internal/provider/type_source_gcs_format.go b/internal/provider/type_source_gcs_format.go
new file mode 100644
index 000000000..4bc2f9249
--- /dev/null
+++ b/internal/provider/type_source_gcs_format.go
@@ -0,0 +1,7 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceGcsFormat struct {
+ CSVFormat *CSVFormat `tfsdk:"csv_format"`
+}
diff --git a/internal/provider/type_source_gcs_stream_config.go b/internal/provider/type_source_gcs_stream_config.go
new file mode 100644
index 000000000..c6c9c1325
--- /dev/null
+++ b/internal/provider/type_source_gcs_stream_config.go
@@ -0,0 +1,17 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGCSStreamConfig struct {
+ DaysToSyncIfHistoryIsFull types.Int64 `tfsdk:"days_to_sync_if_history_is_full"`
+ Format SourceGcsFormat `tfsdk:"format"`
+ Globs []types.String `tfsdk:"globs"`
+ InputSchema types.String `tfsdk:"input_schema"`
+ LegacyPrefix types.String `tfsdk:"legacy_prefix"`
+ Name types.String `tfsdk:"name"`
+ PrimaryKey types.String `tfsdk:"primary_key"`
+ Schemaless types.Bool `tfsdk:"schemaless"`
+ ValidationPolicy types.String `tfsdk:"validation_policy"`
+}
diff --git a/internal/provider/type_source_getlago.go b/internal/provider/type_source_getlago.go
old mode 100755
new mode 100644
index 0211813a4..cf97c9fc0
--- a/internal/provider/type_source_getlago.go
+++ b/internal/provider/type_source_getlago.go
@@ -5,6 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceGetlago struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ APIURL types.String `tfsdk:"api_url"`
}
diff --git a/internal/provider/type_source_github.go b/internal/provider/type_source_github.go
old mode 100755
new mode 100644
index cbc776589..26779fbeb
--- a/internal/provider/type_source_github.go
+++ b/internal/provider/type_source_github.go
@@ -5,10 +5,12 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceGithub struct {
- Branch types.String `tfsdk:"branch"`
- Credentials *SourceGithubAuthentication `tfsdk:"credentials"`
- Repository types.String `tfsdk:"repository"`
- RequestsPerHour types.Int64 `tfsdk:"requests_per_hour"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIURL types.String `tfsdk:"api_url"`
+ Branch types.String `tfsdk:"branch"`
+ Branches []types.String `tfsdk:"branches"`
+ Credentials SourceGithubAuthentication `tfsdk:"credentials"`
+ Repositories []types.String `tfsdk:"repositories"`
+ Repository types.String `tfsdk:"repository"`
+ RequestsPerHour types.Int64 `tfsdk:"requests_per_hour"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_github_authentication.go b/internal/provider/type_source_github_authentication.go
old mode 100755
new mode 100644
index 398eae28e..20b5ce22f
--- a/internal/provider/type_source_github_authentication.go
+++ b/internal/provider/type_source_github_authentication.go
@@ -3,8 +3,6 @@
package provider
type SourceGithubAuthentication struct {
- SourceGithubAuthenticationOAuth *SourceGithubAuthenticationOAuth `tfsdk:"source_github_authentication_o_auth"`
- SourceGithubAuthenticationPersonalAccessToken *SourceGithubAuthenticationPersonalAccessToken `tfsdk:"source_github_authentication_personal_access_token"`
- SourceGithubUpdateAuthenticationOAuth *SourceGithubAuthenticationOAuth `tfsdk:"source_github_update_authentication_o_auth"`
- SourceGithubUpdateAuthenticationPersonalAccessToken *SourceGithubAuthenticationPersonalAccessToken `tfsdk:"source_github_update_authentication_personal_access_token"`
+ OAuth *OAuth `tfsdk:"o_auth"`
+ PersonalAccessToken *AuthenticateWithPersonalAccessToken `tfsdk:"personal_access_token"`
}
diff --git a/internal/provider/type_source_gitlab.go b/internal/provider/type_source_gitlab.go
old mode 100755
new mode 100644
index af94d4a47..4cfcce74c
--- a/internal/provider/type_source_gitlab.go
+++ b/internal/provider/type_source_gitlab.go
@@ -5,10 +5,11 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceGitlab struct {
- APIURL types.String `tfsdk:"api_url"`
- Credentials SourceGitlabAuthorizationMethod `tfsdk:"credentials"`
- Groups types.String `tfsdk:"groups"`
- Projects types.String `tfsdk:"projects"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIURL types.String `tfsdk:"api_url"`
+ Credentials SourceGitlabAuthorizationMethod `tfsdk:"credentials"`
+ Groups types.String `tfsdk:"groups"`
+ GroupsList []types.String `tfsdk:"groups_list"`
+ Projects types.String `tfsdk:"projects"`
+ ProjectsList []types.String `tfsdk:"projects_list"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_gitlab_authorization_method.go b/internal/provider/type_source_gitlab_authorization_method.go
old mode 100755
new mode 100644
index a031847e9..421efba06
--- a/internal/provider/type_source_gitlab_authorization_method.go
+++ b/internal/provider/type_source_gitlab_authorization_method.go
@@ -3,8 +3,6 @@
package provider
type SourceGitlabAuthorizationMethod struct {
- SourceGitlabAuthorizationMethodOAuth20 *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_gitlab_authorization_method_o_auth2_0"`
- SourceGitlabAuthorizationMethodPrivateToken *SourceGitlabAuthorizationMethodPrivateToken `tfsdk:"source_gitlab_authorization_method_private_token"`
- SourceGitlabUpdateAuthorizationMethodOAuth20 *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_gitlab_update_authorization_method_o_auth2_0"`
- SourceGitlabUpdateAuthorizationMethodPrivateToken *SourceGitlabAuthorizationMethodPrivateToken `tfsdk:"source_gitlab_update_authorization_method_private_token"`
+ OAuth20 *SourceGitlabOAuth20 `tfsdk:"o_auth20"`
+ PrivateToken *OAuth2AccessToken `tfsdk:"private_token"`
}
diff --git a/internal/provider/type_source_gitlab_authorization_method_o_auth20.go b/internal/provider/type_source_gitlab_authorization_method_o_auth20.go
deleted file mode 100755
index ae47b3735..000000000
--- a/internal/provider/type_source_gitlab_authorization_method_o_auth20.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGitlabAuthorizationMethodOAuth20 struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- TokenExpiryDate types.String `tfsdk:"token_expiry_date"`
-}
diff --git a/internal/provider/type_source_gitlab_authorization_method_private_token.go b/internal/provider/type_source_gitlab_authorization_method_private_token.go
deleted file mode 100755
index 78456ba7f..000000000
--- a/internal/provider/type_source_gitlab_authorization_method_private_token.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGitlabAuthorizationMethodPrivateToken struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
-}
diff --git a/internal/provider/type_source_airtable_authentication_o_auth20.go b/internal/provider/type_source_gitlab_o_auth20.go
old mode 100755
new mode 100644
similarity index 80%
rename from internal/provider/type_source_airtable_authentication_o_auth20.go
rename to internal/provider/type_source_gitlab_o_auth20.go
index befe0bf55..7dadf0d1a
--- a/internal/provider/type_source_airtable_authentication_o_auth20.go
+++ b/internal/provider/type_source_gitlab_o_auth20.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceAirtableAuthenticationOAuth20 struct {
+type SourceGitlabOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
- AuthMethod types.String `tfsdk:"auth_method"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
RefreshToken types.String `tfsdk:"refresh_token"`
diff --git a/internal/provider/type_source_glassfrog.go b/internal/provider/type_source_glassfrog.go
deleted file mode 100755
index 7b17e8f64..000000000
--- a/internal/provider/type_source_glassfrog.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGlassfrog struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_gnews.go b/internal/provider/type_source_gnews.go
old mode 100755
new mode 100644
index 974bfb3d6..faa1f8dd6
--- a/internal/provider/type_source_gnews.go
+++ b/internal/provider/type_source_gnews.go
@@ -13,7 +13,6 @@ type SourceGnews struct {
Nullable []types.String `tfsdk:"nullable"`
Query types.String `tfsdk:"query"`
Sortby types.String `tfsdk:"sortby"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
TopHeadlinesQuery types.String `tfsdk:"top_headlines_query"`
TopHeadlinesTopic types.String `tfsdk:"top_headlines_topic"`
diff --git a/internal/provider/type_source_google_ads.go b/internal/provider/type_source_google_ads.go
old mode 100755
new mode 100644
index cdbfe939d..4a730ef2f
--- a/internal/provider/type_source_google_ads.go
+++ b/internal/provider/type_source_google_ads.go
@@ -7,10 +7,9 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceGoogleAds struct {
ConversionWindowDays types.Int64 `tfsdk:"conversion_window_days"`
Credentials SourceGoogleAdsGoogleCredentials `tfsdk:"credentials"`
- CustomQueries []SourceGoogleAdsCustomQueries `tfsdk:"custom_queries"`
+ CustomQueries []CustomQueries `tfsdk:"custom_queries"`
CustomerID types.String `tfsdk:"customer_id"`
EndDate types.String `tfsdk:"end_date"`
LoginCustomerID types.String `tfsdk:"login_customer_id"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_google_ads_google_credentials.go b/internal/provider/type_source_google_ads_google_credentials.go
old mode 100755
new mode 100644
diff --git a/internal/provider/type_source_google_analytics_data_api.go b/internal/provider/type_source_google_analytics_data_api.go
old mode 100755
new mode 100644
index 138d980e8..e15d700dd
--- a/internal/provider/type_source_google_analytics_data_api.go
+++ b/internal/provider/type_source_google_analytics_data_api.go
@@ -5,10 +5,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceGoogleAnalyticsDataAPI struct {
- Credentials *SourceGoogleAnalyticsDataAPICredentials `tfsdk:"credentials"`
- CustomReports types.String `tfsdk:"custom_reports"`
- DateRangesStartDate types.String `tfsdk:"date_ranges_start_date"`
- PropertyID types.String `tfsdk:"property_id"`
- SourceType types.String `tfsdk:"source_type"`
- WindowInDays types.Int64 `tfsdk:"window_in_days"`
+ Credentials *SourceGoogleAnalyticsDataAPICredentials `tfsdk:"credentials"`
+ CustomReportsArray []SourceGoogleAnalyticsDataAPICustomReportConfig `tfsdk:"custom_reports_array"`
+ DateRangesStartDate types.String `tfsdk:"date_ranges_start_date"`
+ PropertyIds []types.String `tfsdk:"property_ids"`
+ WindowInDays types.Int64 `tfsdk:"window_in_days"`
}
diff --git a/internal/provider/type_source_google_analytics_data_api_credentials.go b/internal/provider/type_source_google_analytics_data_api_credentials.go
old mode 100755
new mode 100644
index 20dcf4ca7..d311d8aef
--- a/internal/provider/type_source_google_analytics_data_api_credentials.go
+++ b/internal/provider/type_source_google_analytics_data_api_credentials.go
@@ -3,8 +3,6 @@
package provider
type SourceGoogleAnalyticsDataAPICredentials struct {
- SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth *SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth `tfsdk:"source_google_analytics_data_api_credentials_authenticate_via_google_oauth"`
- SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication *SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication `tfsdk:"source_google_analytics_data_api_credentials_service_account_key_authentication"`
- SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth *SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth `tfsdk:"source_google_analytics_data_api_update_credentials_authenticate_via_google_oauth"`
- SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication *SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication `tfsdk:"source_google_analytics_data_api_update_credentials_service_account_key_authentication"`
+ AuthenticateViaGoogleOauth *AuthenticateViaGoogleOauth `tfsdk:"authenticate_via_google_oauth"`
+ ServiceAccountKeyAuthentication *ServiceAccountKeyAuthentication `tfsdk:"service_account_key_authentication"`
}
diff --git a/internal/provider/type_source_google_analytics_data_api_credentials_authenticate_via_google_oauth.go b/internal/provider/type_source_google_analytics_data_api_credentials_authenticate_via_google_oauth.go
deleted file mode 100755
index de669e103..000000000
--- a/internal/provider/type_source_google_analytics_data_api_credentials_authenticate_via_google_oauth.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
-}
diff --git a/internal/provider/type_source_google_analytics_data_api_custom_report_config.go b/internal/provider/type_source_google_analytics_data_api_custom_report_config.go
new file mode 100644
index 000000000..7af1df544
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_custom_report_config.go
@@ -0,0 +1,13 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGoogleAnalyticsDataAPICustomReportConfig struct {
+ DimensionFilter *SourceGoogleAnalyticsDataAPIDimensionsFilter `tfsdk:"dimension_filter"`
+ Dimensions []types.String `tfsdk:"dimensions"`
+ MetricFilter *SourceGoogleAnalyticsDataAPIDimensionsFilter `tfsdk:"metric_filter"`
+ Metrics []types.String `tfsdk:"metrics"`
+ Name types.String `tfsdk:"name"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_dimensions_filter.go b/internal/provider/type_source_google_analytics_data_api_dimensions_filter.go
new file mode 100644
index 000000000..961a4b272
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_dimensions_filter.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceGoogleAnalyticsDataAPIDimensionsFilter struct {
+ AndGroup *AndGroup `tfsdk:"and_group"`
+ Filter *Expression `tfsdk:"filter"`
+ NotExpression *NotExpression `tfsdk:"not_expression"`
+ OrGroup *AndGroup `tfsdk:"or_group"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_double_value.go b/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_double_value.go
new file mode 100644
index 000000000..48b602265
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_double_value.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue struct {
+ Value types.Number `tfsdk:"value"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_int64_value.go b/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_int64_value.go
new file mode 100644
index 000000000..8cbb18356
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_dimensions_filter1_expressions_int64_value.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value struct {
+ Value types.String `tfsdk:"value"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_filter.go b/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_filter.go
new file mode 100644
index 000000000..1da02f611
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_filter.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter struct {
+ BetweenFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter `tfsdk:"between_filter"`
+ InListFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter `tfsdk:"in_list_filter"`
+ NumericFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter `tfsdk:"numeric_filter"`
+ StringFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter `tfsdk:"string_filter"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_from_value.go b/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_from_value.go
new file mode 100644
index 000000000..e372cadfc
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_schemas_custom_reports_array_dimension_filter_from_value.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue struct {
+ DoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue `tfsdk:"double_value"`
+ Int64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value `tfsdk:"int64_value"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_between_filter.go b/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_between_filter.go
new file mode 100644
index 000000000..532421769
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_between_filter.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter struct {
+ FromValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue `tfsdk:"from_value"`
+ ToValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue `tfsdk:"to_value"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_in_list_filter.go b/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_in_list_filter.go
new file mode 100644
index 000000000..7dc26b451
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_in_list_filter.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter struct {
+ CaseSensitive types.Bool `tfsdk:"case_sensitive"`
+ Values []types.String `tfsdk:"values"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_numeric_filter.go b/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_numeric_filter.go
new file mode 100644
index 000000000..406b96c2e
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_numeric_filter.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter struct {
+ Operation []types.String `tfsdk:"operation"`
+ Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue `tfsdk:"value"`
+}
diff --git a/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_string_filter.go b/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_string_filter.go
new file mode 100644
index 000000000..17b309085
--- /dev/null
+++ b/internal/provider/type_source_google_analytics_data_api_update_schemas_custom_reports_array_string_filter.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter struct {
+ CaseSensitive types.Bool `tfsdk:"case_sensitive"`
+ MatchType []types.String `tfsdk:"match_type"`
+ Value types.String `tfsdk:"value"`
+}
diff --git a/internal/provider/type_source_google_analytics_v4.go b/internal/provider/type_source_google_analytics_v4.go
deleted file mode 100755
index aaa86f81a..000000000
--- a/internal/provider/type_source_google_analytics_v4.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGoogleAnalyticsV4 struct {
- Credentials *SourceGoogleAnalyticsV4Credentials `tfsdk:"credentials"`
- CustomReports types.String `tfsdk:"custom_reports"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- ViewID types.String `tfsdk:"view_id"`
- WindowInDays types.Int64 `tfsdk:"window_in_days"`
-}
diff --git a/internal/provider/type_source_google_analytics_v4_credentials.go b/internal/provider/type_source_google_analytics_v4_credentials.go
deleted file mode 100755
index 1ea9cfca3..000000000
--- a/internal/provider/type_source_google_analytics_v4_credentials.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceGoogleAnalyticsV4Credentials struct {
- SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth *SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth `tfsdk:"source_google_analytics_v4_credentials_authenticate_via_google_oauth"`
- SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication *SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication `tfsdk:"source_google_analytics_v4_credentials_service_account_key_authentication"`
- SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth *SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth `tfsdk:"source_google_analytics_v4_update_credentials_authenticate_via_google_oauth"`
- SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication *SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication `tfsdk:"source_google_analytics_v4_update_credentials_service_account_key_authentication"`
-}
diff --git a/internal/provider/type_source_google_directory.go b/internal/provider/type_source_google_directory.go
old mode 100755
new mode 100644
index 6b48799b7..de35860fc
--- a/internal/provider/type_source_google_directory.go
+++ b/internal/provider/type_source_google_directory.go
@@ -2,9 +2,6 @@
package provider
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
type SourceGoogleDirectory struct {
Credentials *SourceGoogleDirectoryGoogleCredentials `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_google_directory_google_credentials.go b/internal/provider/type_source_google_directory_google_credentials.go
old mode 100755
new mode 100644
index 4697c4bc6..7f237cff6
--- a/internal/provider/type_source_google_directory_google_credentials.go
+++ b/internal/provider/type_source_google_directory_google_credentials.go
@@ -3,8 +3,6 @@
package provider
type SourceGoogleDirectoryGoogleCredentials struct {
- SourceGoogleDirectoryGoogleCredentialsServiceAccountKey *SourceGoogleDirectoryGoogleCredentialsServiceAccountKey `tfsdk:"source_google_directory_google_credentials_service_account_key"`
- SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth *SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth `tfsdk:"source_google_directory_google_credentials_sign_in_via_google_o_auth"`
- SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey *SourceGoogleDirectoryGoogleCredentialsServiceAccountKey `tfsdk:"source_google_directory_update_google_credentials_service_account_key"`
- SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth *SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth `tfsdk:"source_google_directory_update_google_credentials_sign_in_via_google_o_auth"`
+ ServiceAccountKey *ServiceAccountKey `tfsdk:"service_account_key"`
+ SignInViaGoogleOAuth *DestinationGoogleSheetsAuthenticationViaGoogleOAuth `tfsdk:"sign_in_via_google_o_auth"`
}
diff --git a/internal/provider/type_source_google_directory_google_credentials_service_account_key.go b/internal/provider/type_source_google_directory_google_credentials_service_account_key.go
deleted file mode 100755
index 8187d870e..000000000
--- a/internal/provider/type_source_google_directory_google_credentials_service_account_key.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGoogleDirectoryGoogleCredentialsServiceAccountKey struct {
- CredentialsJSON types.String `tfsdk:"credentials_json"`
- CredentialsTitle types.String `tfsdk:"credentials_title"`
- Email types.String `tfsdk:"email"`
-}
diff --git a/internal/provider/type_source_google_directory_google_credentials_sign_in_via_google_o_auth.go b/internal/provider/type_source_google_directory_google_credentials_sign_in_via_google_o_auth.go
deleted file mode 100755
index cc96596a1..000000000
--- a/internal/provider/type_source_google_directory_google_credentials_sign_in_via_google_o_auth.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth struct {
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- CredentialsTitle types.String `tfsdk:"credentials_title"`
- RefreshToken types.String `tfsdk:"refresh_token"`
-}
diff --git a/internal/provider/type_source_google_drive.go b/internal/provider/type_source_google_drive.go
new file mode 100644
index 000000000..781767880
--- /dev/null
+++ b/internal/provider/type_source_google_drive.go
@@ -0,0 +1,12 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGoogleDrive struct {
+ Credentials SourceGoogleDriveAuthentication `tfsdk:"credentials"`
+ FolderURL types.String `tfsdk:"folder_url"`
+ StartDate types.String `tfsdk:"start_date"`
+ Streams []SourceGoogleDriveFileBasedStreamConfig `tfsdk:"streams"`
+}
diff --git a/internal/provider/type_source_google_drive_authentication.go b/internal/provider/type_source_google_drive_authentication.go
new file mode 100644
index 000000000..230f5bb3c
--- /dev/null
+++ b/internal/provider/type_source_google_drive_authentication.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceGoogleDriveAuthentication struct {
+ AuthenticateViaGoogleOAuth *DestinationGoogleSheetsAuthenticationViaGoogleOAuth `tfsdk:"authenticate_via_google_o_auth"`
+ ServiceAccountKeyAuthentication *SourceGoogleDriveServiceAccountKeyAuthentication `tfsdk:"service_account_key_authentication"`
+}
diff --git a/internal/provider/type_source_google_drive_csv_format.go b/internal/provider/type_source_google_drive_csv_format.go
new file mode 100644
index 000000000..978cc97be
--- /dev/null
+++ b/internal/provider/type_source_google_drive_csv_format.go
@@ -0,0 +1,20 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGoogleDriveCSVFormat struct {
+ Delimiter types.String `tfsdk:"delimiter"`
+ DoubleQuote types.Bool `tfsdk:"double_quote"`
+ Encoding types.String `tfsdk:"encoding"`
+ EscapeChar types.String `tfsdk:"escape_char"`
+ FalseValues []types.String `tfsdk:"false_values"`
+ HeaderDefinition *SourceAzureBlobStorageCSVHeaderDefinition `tfsdk:"header_definition"`
+ NullValues []types.String `tfsdk:"null_values"`
+ QuoteChar types.String `tfsdk:"quote_char"`
+ SkipRowsAfterHeader types.Int64 `tfsdk:"skip_rows_after_header"`
+ SkipRowsBeforeHeader types.Int64 `tfsdk:"skip_rows_before_header"`
+ StringsCanBeNull types.Bool `tfsdk:"strings_can_be_null"`
+ TrueValues []types.String `tfsdk:"true_values"`
+}
diff --git a/internal/provider/type_source_google_drive_file_based_stream_config.go b/internal/provider/type_source_google_drive_file_based_stream_config.go
new file mode 100644
index 000000000..e516ae656
--- /dev/null
+++ b/internal/provider/type_source_google_drive_file_based_stream_config.go
@@ -0,0 +1,16 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceGoogleDriveFileBasedStreamConfig struct {
+ DaysToSyncIfHistoryIsFull types.Int64 `tfsdk:"days_to_sync_if_history_is_full"`
+ Format SourceGoogleDriveFormat `tfsdk:"format"`
+ Globs []types.String `tfsdk:"globs"`
+ InputSchema types.String `tfsdk:"input_schema"`
+ Name types.String `tfsdk:"name"`
+ PrimaryKey types.String `tfsdk:"primary_key"`
+ Schemaless types.Bool `tfsdk:"schemaless"`
+ ValidationPolicy types.String `tfsdk:"validation_policy"`
+}
diff --git a/internal/provider/type_source_google_drive_format.go b/internal/provider/type_source_google_drive_format.go
new file mode 100644
index 000000000..a797299b9
--- /dev/null
+++ b/internal/provider/type_source_google_drive_format.go
@@ -0,0 +1,11 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceGoogleDriveFormat struct {
+ AvroFormat *AvroFormat `tfsdk:"avro_format"`
+ CSVFormat *SourceGoogleDriveCSVFormat `tfsdk:"csv_format"`
+ DocumentFileTypeFormatExperimental *DocumentFileTypeFormatExperimental `tfsdk:"document_file_type_format_experimental"`
+ JsonlFormat *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"jsonl_format"`
+ ParquetFormat *ParquetFormat `tfsdk:"parquet_format"`
+}
diff --git a/internal/provider/type_source_google_sheets_authentication_service_account_key_authentication.go b/internal/provider/type_source_google_drive_service_account_key_authentication.go
old mode 100755
new mode 100644
similarity index 62%
rename from internal/provider/type_source_google_sheets_authentication_service_account_key_authentication.go
rename to internal/provider/type_source_google_drive_service_account_key_authentication.go
index 7abba9ab1..8533d7221
--- a/internal/provider/type_source_google_sheets_authentication_service_account_key_authentication.go
+++ b/internal/provider/type_source_google_drive_service_account_key_authentication.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication struct {
- AuthType types.String `tfsdk:"auth_type"`
+type SourceGoogleDriveServiceAccountKeyAuthentication struct {
ServiceAccountInfo types.String `tfsdk:"service_account_info"`
}
diff --git a/internal/provider/type_source_google_pagespeed_insights.go b/internal/provider/type_source_google_pagespeed_insights.go
old mode 100755
new mode 100644
index 7ca18ff33..877d534fc
--- a/internal/provider/type_source_google_pagespeed_insights.go
+++ b/internal/provider/type_source_google_pagespeed_insights.go
@@ -7,7 +7,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceGooglePagespeedInsights struct {
APIKey types.String `tfsdk:"api_key"`
Categories []types.String `tfsdk:"categories"`
- SourceType types.String `tfsdk:"source_type"`
Strategies []types.String `tfsdk:"strategies"`
Urls []types.String `tfsdk:"urls"`
}
diff --git a/internal/provider/type_source_google_search_console.go b/internal/provider/type_source_google_search_console.go
old mode 100755
new mode 100644
index 802b69795..1c27f89fc
--- a/internal/provider/type_source_google_search_console.go
+++ b/internal/provider/type_source_google_search_console.go
@@ -11,6 +11,5 @@ type SourceGoogleSearchConsole struct {
DataState types.String `tfsdk:"data_state"`
EndDate types.String `tfsdk:"end_date"`
SiteUrls []types.String `tfsdk:"site_urls"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_google_search_console_authentication_type.go b/internal/provider/type_source_google_search_console_authentication_type.go
old mode 100755
new mode 100644
index 4899e7438..59af6dd0a
--- a/internal/provider/type_source_google_search_console_authentication_type.go
+++ b/internal/provider/type_source_google_search_console_authentication_type.go
@@ -3,8 +3,6 @@
package provider
type SourceGoogleSearchConsoleAuthenticationType struct {
- SourceGoogleSearchConsoleAuthenticationTypeOAuth *SourceGoogleSearchConsoleAuthenticationTypeOAuth `tfsdk:"source_google_search_console_authentication_type_o_auth"`
- SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication *SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication `tfsdk:"source_google_search_console_authentication_type_service_account_key_authentication"`
- SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth *SourceGoogleSearchConsoleAuthenticationTypeOAuth `tfsdk:"source_google_search_console_update_authentication_type_o_auth"`
- SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication *SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication `tfsdk:"source_google_search_console_update_authentication_type_service_account_key_authentication"`
+ OAuth *AuthenticateViaGoogleOauth `tfsdk:"o_auth"`
+ ServiceAccountKeyAuthentication *SourceGoogleSearchConsoleServiceAccountKeyAuthentication `tfsdk:"service_account_key_authentication"`
}
diff --git a/internal/provider/type_source_google_search_console_custom_report_config.go b/internal/provider/type_source_google_search_console_custom_report_config.go
old mode 100755
new mode 100644
diff --git a/internal/provider/type_source_google_search_console_authentication_type_service_account_key_authentication.go b/internal/provider/type_source_google_search_console_service_account_key_authentication.go
old mode 100755
new mode 100644
similarity index 65%
rename from internal/provider/type_source_google_search_console_authentication_type_service_account_key_authentication.go
rename to internal/provider/type_source_google_search_console_service_account_key_authentication.go
index 9279abb95..a886d044b
--- a/internal/provider/type_source_google_search_console_authentication_type_service_account_key_authentication.go
+++ b/internal/provider/type_source_google_search_console_service_account_key_authentication.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication struct {
- AuthType types.String `tfsdk:"auth_type"`
+type SourceGoogleSearchConsoleServiceAccountKeyAuthentication struct {
Email types.String `tfsdk:"email"`
ServiceAccountInfo types.String `tfsdk:"service_account_info"`
}
diff --git a/internal/provider/type_source_google_sheets.go b/internal/provider/type_source_google_sheets.go
old mode 100755
new mode 100644
index 72c0282b1..b316ac540
--- a/internal/provider/type_source_google_sheets.go
+++ b/internal/provider/type_source_google_sheets.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceGoogleSheets struct {
- Credentials SourceGoogleSheetsAuthentication `tfsdk:"credentials"`
- NamesConversion types.Bool `tfsdk:"names_conversion"`
- SourceType types.String `tfsdk:"source_type"`
- SpreadsheetID types.String `tfsdk:"spreadsheet_id"`
+ Credentials SourceGoogleDriveAuthentication `tfsdk:"credentials"`
+ NamesConversion types.Bool `tfsdk:"names_conversion"`
+ SpreadsheetID types.String `tfsdk:"spreadsheet_id"`
}
diff --git a/internal/provider/type_source_google_sheets_authentication.go b/internal/provider/type_source_google_sheets_authentication.go
deleted file mode 100755
index dab904592..000000000
--- a/internal/provider/type_source_google_sheets_authentication.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceGoogleSheetsAuthentication struct {
- SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth *SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth `tfsdk:"source_google_sheets_authentication_authenticate_via_google_o_auth"`
- SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication *SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication `tfsdk:"source_google_sheets_authentication_service_account_key_authentication"`
- SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth *SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth `tfsdk:"source_google_sheets_update_authentication_authenticate_via_google_o_auth"`
- SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication *SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication `tfsdk:"source_google_sheets_update_authentication_service_account_key_authentication"`
-}
diff --git a/internal/provider/type_source_google_sheets_authentication_authenticate_via_google_o_auth.go b/internal/provider/type_source_google_sheets_authentication_authenticate_via_google_o_auth.go
deleted file mode 100755
index e580b10f1..000000000
--- a/internal/provider/type_source_google_sheets_authentication_authenticate_via_google_o_auth.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
-}
diff --git a/internal/provider/type_source_google_webfonts.go b/internal/provider/type_source_google_webfonts.go
old mode 100755
new mode 100644
index 3c97443ee..07e6f9e1f
--- a/internal/provider/type_source_google_webfonts.go
+++ b/internal/provider/type_source_google_webfonts.go
@@ -9,5 +9,4 @@ type SourceGoogleWebfonts struct {
APIKey types.String `tfsdk:"api_key"`
PrettyPrint types.String `tfsdk:"pretty_print"`
Sort types.String `tfsdk:"sort"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_google_workspace_admin_reports.go b/internal/provider/type_source_google_workspace_admin_reports.go
old mode 100755
new mode 100644
index 8f727b4b6..3df22ffc8
--- a/internal/provider/type_source_google_workspace_admin_reports.go
+++ b/internal/provider/type_source_google_workspace_admin_reports.go
@@ -8,5 +8,4 @@ type SourceGoogleWorkspaceAdminReports struct {
CredentialsJSON types.String `tfsdk:"credentials_json"`
Email types.String `tfsdk:"email"`
Lookback types.Int64 `tfsdk:"lookback"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_greenhouse.go b/internal/provider/type_source_greenhouse.go
deleted file mode 100755
index fa1a36f60..000000000
--- a/internal/provider/type_source_greenhouse.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceGreenhouse struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_gridly.go b/internal/provider/type_source_gridly.go
old mode 100755
new mode 100644
index d485221be..2a3b0aede
--- a/internal/provider/type_source_gridly.go
+++ b/internal/provider/type_source_gridly.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceGridly struct {
- APIKey types.String `tfsdk:"api_key"`
- GridID types.String `tfsdk:"grid_id"`
- SourceType types.String `tfsdk:"source_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ GridID types.String `tfsdk:"grid_id"`
}
diff --git a/internal/provider/type_source_harvest.go b/internal/provider/type_source_harvest.go
old mode 100755
new mode 100644
index ce8c20548..56a800e37
--- a/internal/provider/type_source_harvest.go
+++ b/internal/provider/type_source_harvest.go
@@ -9,5 +9,4 @@ type SourceHarvest struct {
Credentials *SourceHarvestAuthenticationMechanism `tfsdk:"credentials"`
ReplicationEndDate types.String `tfsdk:"replication_end_date"`
ReplicationStartDate types.String `tfsdk:"replication_start_date"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_harvest1.go b/internal/provider/type_source_harvest1.go
deleted file mode 100755
index 016ff26f0..000000000
--- a/internal/provider/type_source_harvest1.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHarvest1 struct {
- AccountID types.String `tfsdk:"account_id"`
- Credentials *SourceHarvestAuthenticationMechanism1 `tfsdk:"credentials"`
- ReplicationEndDate types.String `tfsdk:"replication_end_date"`
- ReplicationStartDate types.String `tfsdk:"replication_start_date"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token.go b/internal/provider/type_source_harvest_authenticate_with_personal_access_token.go
old mode 100755
new mode 100644
similarity index 66%
rename from internal/provider/type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token.go
rename to internal/provider/type_source_harvest_authenticate_with_personal_access_token.go
index f2e790608..8d746e984
--- a/internal/provider/type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token.go
+++ b/internal/provider/type_source_harvest_authenticate_with_personal_access_token.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken struct {
- APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
+type SourceHarvestAuthenticateWithPersonalAccessToken struct {
AdditionalProperties types.String `tfsdk:"additional_properties"`
+ APIToken types.String `tfsdk:"api_token"`
}
diff --git a/internal/provider/type_source_harvest_authentication_mechanism.go b/internal/provider/type_source_harvest_authentication_mechanism.go
old mode 100755
new mode 100644
index 58c50b85f..7827dc888
--- a/internal/provider/type_source_harvest_authentication_mechanism.go
+++ b/internal/provider/type_source_harvest_authentication_mechanism.go
@@ -3,8 +3,6 @@
package provider
type SourceHarvestAuthenticationMechanism struct {
- SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth *SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth `tfsdk:"source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth"`
- SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken `tfsdk:"source_harvest_authentication_mechanism_authenticate_with_personal_access_token"`
- SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth *SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth `tfsdk:"source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth"`
- SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken `tfsdk:"source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token"`
+ AuthenticateViaHarvestOAuth *AuthenticateViaHarvestOAuth `tfsdk:"authenticate_via_harvest_o_auth"`
+ AuthenticateWithPersonalAccessToken *SourceHarvestAuthenticateWithPersonalAccessToken `tfsdk:"authenticate_with_personal_access_token"`
}
diff --git a/internal/provider/type_source_harvest_authentication_mechanism1.go b/internal/provider/type_source_harvest_authentication_mechanism1.go
deleted file mode 100755
index e6948523c..000000000
--- a/internal/provider/type_source_harvest_authentication_mechanism1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceHarvestAuthenticationMechanism1 struct {
- SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth *SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth1 `tfsdk:"source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth"`
- SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken1 `tfsdk:"source_harvest_authentication_mechanism_authenticate_with_personal_access_token"`
- SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth *SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth1 `tfsdk:"source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth"`
- SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken1 `tfsdk:"source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token"`
-}
diff --git a/internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth.go b/internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth.go
deleted file mode 100755
index 438f3c3ed..000000000
--- a/internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth1.go b/internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth1.go
deleted file mode 100755
index 8fc71f167..000000000
--- a/internal/provider/type_source_harvest_authentication_mechanism_authenticate_via_harvest_o_auth1.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth1 struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token1.go b/internal/provider/type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token1.go
deleted file mode 100755
index 44536c7c1..000000000
--- a/internal/provider/type_source_harvest_authentication_mechanism_authenticate_with_personal_access_token1.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken1 struct {
- APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth.go b/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth.go
deleted file mode 100755
index ee3c9270f..000000000
--- a/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth1.go b/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth1.go
deleted file mode 100755
index 179de9a6f..000000000
--- a/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_via_harvest_o_auth1.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth1 struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token.go b/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token.go
deleted file mode 100755
index f7987737e..000000000
--- a/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken struct {
- APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token1.go b/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token1.go
deleted file mode 100755
index 45c5bdcc4..000000000
--- a/internal/provider/type_source_harvest_update_authentication_mechanism_authenticate_with_personal_access_token1.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken1 struct {
- APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_hubplanner.go b/internal/provider/type_source_hubplanner.go
deleted file mode 100755
index 75d24d322..000000000
--- a/internal/provider/type_source_hubplanner.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHubplanner struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_hubspot.go b/internal/provider/type_source_hubspot.go
old mode 100755
new mode 100644
index 589a3c00f..2544104d5
--- a/internal/provider/type_source_hubspot.go
+++ b/internal/provider/type_source_hubspot.go
@@ -6,6 +6,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceHubspot struct {
Credentials SourceHubspotAuthentication `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_hubspot_authentication.go b/internal/provider/type_source_hubspot_authentication.go
old mode 100755
new mode 100644
index 82b7ee4dd..234a188d8
--- a/internal/provider/type_source_hubspot_authentication.go
+++ b/internal/provider/type_source_hubspot_authentication.go
@@ -3,8 +3,6 @@
package provider
type SourceHubspotAuthentication struct {
- SourceHubspotAuthenticationOAuth *SourceHubspotAuthenticationOAuth `tfsdk:"source_hubspot_authentication_o_auth"`
- SourceHubspotAuthenticationPrivateApp *SourceHubspotAuthenticationPrivateApp `tfsdk:"source_hubspot_authentication_private_app"`
- SourceHubspotUpdateAuthenticationOAuth *SourceHubspotAuthenticationOAuth `tfsdk:"source_hubspot_update_authentication_o_auth"`
- SourceHubspotUpdateAuthenticationPrivateApp *SourceHubspotAuthenticationPrivateApp `tfsdk:"source_hubspot_update_authentication_private_app"`
+ OAuth *DestinationGoogleSheetsAuthenticationViaGoogleOAuth `tfsdk:"o_auth"`
+ PrivateApp *OAuth2AccessToken `tfsdk:"private_app"`
}
diff --git a/internal/provider/type_source_hubspot_authentication_o_auth.go b/internal/provider/type_source_hubspot_authentication_o_auth.go
deleted file mode 100755
index 912b45302..000000000
--- a/internal/provider/type_source_hubspot_authentication_o_auth.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHubspotAuthenticationOAuth struct {
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- CredentialsTitle types.String `tfsdk:"credentials_title"`
- RefreshToken types.String `tfsdk:"refresh_token"`
-}
diff --git a/internal/provider/type_source_hubspot_authentication_private_app.go b/internal/provider/type_source_hubspot_authentication_private_app.go
deleted file mode 100755
index 16314a52d..000000000
--- a/internal/provider/type_source_hubspot_authentication_private_app.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceHubspotAuthenticationPrivateApp struct {
- AccessToken types.String `tfsdk:"access_token"`
- CredentialsTitle types.String `tfsdk:"credentials_title"`
-}
diff --git a/internal/provider/type_source_insightly.go b/internal/provider/type_source_insightly.go
old mode 100755
new mode 100644
index 1b48c30ba..9d822746c
--- a/internal/provider/type_source_insightly.go
+++ b/internal/provider/type_source_insightly.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceInsightly struct {
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- Token types.String `tfsdk:"token"`
+ StartDate types.String `tfsdk:"start_date"`
+ Token types.String `tfsdk:"token"`
}
diff --git a/internal/provider/type_source_instagram.go b/internal/provider/type_source_instagram.go
old mode 100755
new mode 100644
index 90ecbde49..0403c73e7
--- a/internal/provider/type_source_instagram.go
+++ b/internal/provider/type_source_instagram.go
@@ -8,6 +8,5 @@ type SourceInstagram struct {
AccessToken types.String `tfsdk:"access_token"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_instatus.go b/internal/provider/type_source_instatus.go
deleted file mode 100755
index 70388d815..000000000
--- a/internal/provider/type_source_instatus.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceInstatus struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_intercom.go b/internal/provider/type_source_intercom.go
old mode 100755
new mode 100644
index e1166b90f..c58883c13
--- a/internal/provider/type_source_intercom.go
+++ b/internal/provider/type_source_intercom.go
@@ -8,6 +8,5 @@ type SourceIntercom struct {
AccessToken types.String `tfsdk:"access_token"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_ip2whois.go b/internal/provider/type_source_ip2whois.go
old mode 100755
new mode 100644
index f74c3bc39..919805441
--- a/internal/provider/type_source_ip2whois.go
+++ b/internal/provider/type_source_ip2whois.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceIp2whois struct {
- APIKey types.String `tfsdk:"api_key"`
- Domain types.String `tfsdk:"domain"`
- SourceType types.String `tfsdk:"source_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ Domain types.String `tfsdk:"domain"`
}
diff --git a/internal/provider/type_source_iterable.go b/internal/provider/type_source_iterable.go
deleted file mode 100755
index 0051f2296..000000000
--- a/internal/provider/type_source_iterable.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceIterable struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
-}
diff --git a/internal/provider/type_source_jira.go b/internal/provider/type_source_jira.go
old mode 100755
new mode 100644
index 4095396ab..2a0fa2950
--- a/internal/provider/type_source_jira.go
+++ b/internal/provider/type_source_jira.go
@@ -10,8 +10,9 @@ type SourceJira struct {
Email types.String `tfsdk:"email"`
EnableExperimentalStreams types.Bool `tfsdk:"enable_experimental_streams"`
ExpandIssueChangelog types.Bool `tfsdk:"expand_issue_changelog"`
+ ExpandIssueTransition types.Bool `tfsdk:"expand_issue_transition"`
+ IssuesStreamExpandWith []types.String `tfsdk:"issues_stream_expand_with"`
Projects []types.String `tfsdk:"projects"`
RenderFields types.Bool `tfsdk:"render_fields"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_k6_cloud.go b/internal/provider/type_source_k6_cloud.go
old mode 100755
new mode 100644
index 2940ae56c..4a9ad2c66
--- a/internal/provider/type_source_k6_cloud.go
+++ b/internal/provider/type_source_k6_cloud.go
@@ -5,6 +5,5 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceK6Cloud struct {
- APIToken types.String `tfsdk:"api_token"`
- SourceType types.String `tfsdk:"source_type"`
+ APIToken types.String `tfsdk:"api_token"`
}
diff --git a/internal/provider/type_source_klarna.go b/internal/provider/type_source_klarna.go
old mode 100755
new mode 100644
index b094b10d7..4fd0b074f
--- a/internal/provider/type_source_klarna.go
+++ b/internal/provider/type_source_klarna.go
@@ -8,6 +8,5 @@ type SourceKlarna struct {
Password types.String `tfsdk:"password"`
Playground types.Bool `tfsdk:"playground"`
Region types.String `tfsdk:"region"`
- SourceType types.String `tfsdk:"source_type"`
Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_klaviyo.go b/internal/provider/type_source_klaviyo.go
old mode 100755
new mode 100644
index 96972290a..0ee5e8e4a
--- a/internal/provider/type_source_klaviyo.go
+++ b/internal/provider/type_source_klaviyo.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceKlaviyo struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIKey types.String `tfsdk:"api_key"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_kustomer_singer.go b/internal/provider/type_source_kustomer_singer.go
old mode 100755
new mode 100644
index bf1a0111f..183e14e6e
--- a/internal/provider/type_source_kustomer_singer.go
+++ b/internal/provider/type_source_kustomer_singer.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceKustomerSinger struct {
- APIToken types.String `tfsdk:"api_token"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIToken types.String `tfsdk:"api_token"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_kyve.go b/internal/provider/type_source_kyve.go
old mode 100755
new mode 100644
index a65900788..36a72608b
--- a/internal/provider/type_source_kyve.go
+++ b/internal/provider/type_source_kyve.go
@@ -5,10 +5,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceKyve struct {
- MaxPages types.Int64 `tfsdk:"max_pages"`
- PageSize types.Int64 `tfsdk:"page_size"`
- PoolIds types.String `tfsdk:"pool_ids"`
- SourceType types.String `tfsdk:"source_type"`
- StartIds types.String `tfsdk:"start_ids"`
- URLBase types.String `tfsdk:"url_base"`
+ MaxPages types.Int64 `tfsdk:"max_pages"`
+ PageSize types.Int64 `tfsdk:"page_size"`
+ PoolIds types.String `tfsdk:"pool_ids"`
+ StartIds types.String `tfsdk:"start_ids"`
+ URLBase types.String `tfsdk:"url_base"`
}
diff --git a/internal/provider/type_source_lemlist.go b/internal/provider/type_source_lemlist.go
deleted file mode 100755
index 1f9ece86d..000000000
--- a/internal/provider/type_source_lemlist.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceLemlist struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_lever_hiring.go b/internal/provider/type_source_lever_hiring.go
old mode 100755
new mode 100644
index 6ac4e06e8..b41fa8ffc
--- a/internal/provider/type_source_lever_hiring.go
+++ b/internal/provider/type_source_lever_hiring.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceLeverHiring struct {
Credentials *SourceLeverHiringAuthenticationMechanism `tfsdk:"credentials"`
Environment types.String `tfsdk:"environment"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_lever_hiring_authentication_mechanism.go b/internal/provider/type_source_lever_hiring_authentication_mechanism.go
old mode 100755
new mode 100644
index e3787fcf8..95afa779a
--- a/internal/provider/type_source_lever_hiring_authentication_mechanism.go
+++ b/internal/provider/type_source_lever_hiring_authentication_mechanism.go
@@ -3,8 +3,6 @@
package provider
type SourceLeverHiringAuthenticationMechanism struct {
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey `tfsdk:"source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key"`
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth `tfsdk:"source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth"`
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey `tfsdk:"source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_api_key"`
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth `tfsdk:"source_lever_hiring_update_authentication_mechanism_authenticate_via_lever_o_auth"`
+ AuthenticateViaLeverAPIKey *APIKeyAuth `tfsdk:"authenticate_via_lever_api_key"`
+ AuthenticateViaLeverOAuth *AuthenticateViaLeverOAuth `tfsdk:"authenticate_via_lever_o_auth"`
}
diff --git a/internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key.go b/internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key.go
deleted file mode 100755
index 9d691d4c4..000000000
--- a/internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_api_key.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey struct {
- APIKey types.String `tfsdk:"api_key"`
- AuthType types.String `tfsdk:"auth_type"`
-}
diff --git a/internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth.go b/internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth.go
deleted file mode 100755
index 48d504aad..000000000
--- a/internal/provider/type_source_lever_hiring_authentication_mechanism_authenticate_via_lever_o_auth.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
-}
diff --git a/internal/provider/type_source_linkedin_ads.go b/internal/provider/type_source_linkedin_ads.go
old mode 100755
new mode 100644
index 5a78e0ba9..c2d8a2fef
--- a/internal/provider/type_source_linkedin_ads.go
+++ b/internal/provider/type_source_linkedin_ads.go
@@ -5,9 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceLinkedinAds struct {
- AccountIds []types.Int64 `tfsdk:"account_ids"`
- AdAnalyticsReports []SourceLinkedinAdsAdAnalyticsReportConfiguration `tfsdk:"ad_analytics_reports"`
- Credentials *SourceLinkedinAdsAuthentication `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ AccountIds []types.Int64 `tfsdk:"account_ids"`
+ AdAnalyticsReports []AdAnalyticsReportConfiguration `tfsdk:"ad_analytics_reports"`
+ Credentials *SourceLinkedinAdsAuthentication `tfsdk:"credentials"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_linkedin_ads_authentication.go b/internal/provider/type_source_linkedin_ads_authentication.go
old mode 100755
new mode 100644
index f69a45416..52970fec4
--- a/internal/provider/type_source_linkedin_ads_authentication.go
+++ b/internal/provider/type_source_linkedin_ads_authentication.go
@@ -3,8 +3,6 @@
package provider
type SourceLinkedinAdsAuthentication struct {
- SourceLinkedinAdsAuthenticationAccessToken *SourceLinkedinAdsAuthenticationAccessToken `tfsdk:"source_linkedin_ads_authentication_access_token"`
- SourceLinkedinAdsAuthenticationOAuth20 *SourceLinkedinAdsAuthenticationOAuth20 `tfsdk:"source_linkedin_ads_authentication_o_auth2_0"`
- SourceLinkedinAdsUpdateAuthenticationAccessToken *SourceLinkedinAdsAuthenticationAccessToken `tfsdk:"source_linkedin_ads_update_authentication_access_token"`
- SourceLinkedinAdsUpdateAuthenticationOAuth20 *SourceLinkedinAdsAuthenticationOAuth20 `tfsdk:"source_linkedin_ads_update_authentication_o_auth2_0"`
+ AccessToken *OAuth2AccessToken `tfsdk:"access_token"`
+ OAuth20 *DestinationGoogleSheetsAuthenticationViaGoogleOAuth `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_linkedin_ads_authentication_access_token.go b/internal/provider/type_source_linkedin_ads_authentication_access_token.go
deleted file mode 100755
index a128e18ba..000000000
--- a/internal/provider/type_source_linkedin_ads_authentication_access_token.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceLinkedinAdsAuthenticationAccessToken struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthMethod types.String `tfsdk:"auth_method"`
-}
diff --git a/internal/provider/type_source_linkedin_pages.go b/internal/provider/type_source_linkedin_pages.go
old mode 100755
new mode 100644
index 19714d492..583316fdc
--- a/internal/provider/type_source_linkedin_pages.go
+++ b/internal/provider/type_source_linkedin_pages.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceLinkedinPages struct {
- Credentials *SourceLinkedinPagesAuthentication `tfsdk:"credentials"`
- OrgID types.String `tfsdk:"org_id"`
- SourceType types.String `tfsdk:"source_type"`
+ Credentials *SourceLinkedinAdsAuthentication `tfsdk:"credentials"`
+ OrgID types.String `tfsdk:"org_id"`
}
diff --git a/internal/provider/type_source_linkedin_pages_authentication.go b/internal/provider/type_source_linkedin_pages_authentication.go
deleted file mode 100755
index 160b48601..000000000
--- a/internal/provider/type_source_linkedin_pages_authentication.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceLinkedinPagesAuthentication struct {
- SourceLinkedinPagesAuthenticationAccessToken *SourceLinkedinAdsAuthenticationAccessToken `tfsdk:"source_linkedin_pages_authentication_access_token"`
- SourceLinkedinPagesAuthenticationOAuth20 *SourceLinkedinAdsAuthenticationOAuth20 `tfsdk:"source_linkedin_pages_authentication_o_auth2_0"`
- SourceLinkedinPagesUpdateAuthenticationAccessToken *SourceLinkedinAdsAuthenticationAccessToken `tfsdk:"source_linkedin_pages_update_authentication_access_token"`
- SourceLinkedinPagesUpdateAuthenticationOAuth20 *SourceLinkedinAdsAuthenticationOAuth20 `tfsdk:"source_linkedin_pages_update_authentication_o_auth2_0"`
-}
diff --git a/internal/provider/type_source_linnworks.go b/internal/provider/type_source_linnworks.go
old mode 100755
new mode 100644
index 869e64032..289316c59
--- a/internal/provider/type_source_linnworks.go
+++ b/internal/provider/type_source_linnworks.go
@@ -7,7 +7,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceLinnworks struct {
ApplicationID types.String `tfsdk:"application_id"`
ApplicationSecret types.String `tfsdk:"application_secret"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
Token types.String `tfsdk:"token"`
}
diff --git a/internal/provider/type_source_lokalise.go b/internal/provider/type_source_lokalise.go
old mode 100755
new mode 100644
index e7d0454eb..5d1f8b988
--- a/internal/provider/type_source_lokalise.go
+++ b/internal/provider/type_source_lokalise.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceLokalise struct {
- APIKey types.String `tfsdk:"api_key"`
- ProjectID types.String `tfsdk:"project_id"`
- SourceType types.String `tfsdk:"source_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ ProjectID types.String `tfsdk:"project_id"`
}
diff --git a/internal/provider/type_source_mailchimp.go b/internal/provider/type_source_mailchimp.go
old mode 100755
new mode 100644
index fd1af60f1..4cc7fb003
--- a/internal/provider/type_source_mailchimp.go
+++ b/internal/provider/type_source_mailchimp.go
@@ -7,5 +7,4 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceMailchimp struct {
CampaignID types.String `tfsdk:"campaign_id"`
Credentials *SourceMailchimpAuthentication `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_mailchimp_authentication.go b/internal/provider/type_source_mailchimp_authentication.go
old mode 100755
new mode 100644
index 3f25ec668..6ae1784a0
--- a/internal/provider/type_source_mailchimp_authentication.go
+++ b/internal/provider/type_source_mailchimp_authentication.go
@@ -3,8 +3,6 @@
package provider
type SourceMailchimpAuthentication struct {
- SourceMailchimpAuthenticationAPIKey *SourceMailchimpAuthenticationAPIKey `tfsdk:"source_mailchimp_authentication_api_key"`
- SourceMailchimpAuthenticationOAuth20 *SourceMailchimpAuthenticationOAuth20 `tfsdk:"source_mailchimp_authentication_o_auth2_0"`
- SourceMailchimpUpdateAuthenticationAPIKey *SourceMailchimpAuthenticationAPIKey `tfsdk:"source_mailchimp_update_authentication_api_key"`
- SourceMailchimpUpdateAuthenticationOAuth20 *SourceMailchimpAuthenticationOAuth20 `tfsdk:"source_mailchimp_update_authentication_o_auth2_0"`
+ APIKey *APIKey `tfsdk:"api_key"`
+ OAuth20 *OAuth `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_mailchimp_authentication_api_key.go b/internal/provider/type_source_mailchimp_authentication_api_key.go
deleted file mode 100755
index 7e095b9e8..000000000
--- a/internal/provider/type_source_mailchimp_authentication_api_key.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMailchimpAuthenticationAPIKey struct {
- Apikey types.String `tfsdk:"apikey"`
- AuthType types.String `tfsdk:"auth_type"`
-}
diff --git a/internal/provider/type_source_mailgun.go b/internal/provider/type_source_mailgun.go
old mode 100755
new mode 100644
index 9f9edf710..eea1e1972
--- a/internal/provider/type_source_mailgun.go
+++ b/internal/provider/type_source_mailgun.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceMailgun struct {
DomainRegion types.String `tfsdk:"domain_region"`
PrivateKey types.String `tfsdk:"private_key"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_mailjet_sms.go b/internal/provider/type_source_mailjet_sms.go
old mode 100755
new mode 100644
index 7894af472..23ac7ea4a
--- a/internal/provider/type_source_mailjet_sms.go
+++ b/internal/provider/type_source_mailjet_sms.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceMailjetSms struct {
- EndDate types.Int64 `tfsdk:"end_date"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.Int64 `tfsdk:"start_date"`
- Token types.String `tfsdk:"token"`
+ EndDate types.Int64 `tfsdk:"end_date"`
+ StartDate types.Int64 `tfsdk:"start_date"`
+ Token types.String `tfsdk:"token"`
}
diff --git a/internal/provider/type_source_marketo.go b/internal/provider/type_source_marketo.go
old mode 100755
new mode 100644
index 44879c8b1..b30c02f45
--- a/internal/provider/type_source_marketo.go
+++ b/internal/provider/type_source_marketo.go
@@ -8,6 +8,5 @@ type SourceMarketo struct {
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
DomainURL types.String `tfsdk:"domain_url"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_metabase.go b/internal/provider/type_source_metabase.go
old mode 100755
new mode 100644
index 304488a1b..b685208ce
--- a/internal/provider/type_source_metabase.go
+++ b/internal/provider/type_source_metabase.go
@@ -8,6 +8,5 @@ type SourceMetabase struct {
InstanceAPIURL types.String `tfsdk:"instance_api_url"`
Password types.String `tfsdk:"password"`
SessionToken types.String `tfsdk:"session_token"`
- SourceType types.String `tfsdk:"source_type"`
Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_microsoft_teams.go b/internal/provider/type_source_microsoft_teams.go
old mode 100755
new mode 100644
index 932aa84f5..65cdea911
--- a/internal/provider/type_source_microsoft_teams.go
+++ b/internal/provider/type_source_microsoft_teams.go
@@ -7,5 +7,4 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceMicrosoftTeams struct {
Credentials *SourceMicrosoftTeamsAuthenticationMechanism `tfsdk:"credentials"`
Period types.String `tfsdk:"period"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_microsoft_teams_authentication_mechanism.go b/internal/provider/type_source_microsoft_teams_authentication_mechanism.go
old mode 100755
new mode 100644
index aa424b7e7..ef94a1995
--- a/internal/provider/type_source_microsoft_teams_authentication_mechanism.go
+++ b/internal/provider/type_source_microsoft_teams_authentication_mechanism.go
@@ -3,8 +3,6 @@
package provider
type SourceMicrosoftTeamsAuthenticationMechanism struct {
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft `tfsdk:"source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft"`
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 `tfsdk:"source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0"`
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft `tfsdk:"source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft"`
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 `tfsdk:"source_microsoft_teams_update_authentication_mechanism_authenticate_via_microsoft_o_auth_2_0"`
+ AuthenticateViaMicrosoft *AuthenticateViaMicrosoft `tfsdk:"authenticate_via_microsoft"`
+ AuthenticateViaMicrosoftOAuth20 *AuthenticateViaMicrosoftOAuth20 `tfsdk:"authenticate_via_microsoft_o_auth20"`
}
diff --git a/internal/provider/type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth20.go b/internal/provider/type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth20.go
deleted file mode 100755
index bd93fc2c3..000000000
--- a/internal/provider/type_source_microsoft_teams_authentication_mechanism_authenticate_via_microsoft_o_auth20.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- TenantID types.String `tfsdk:"tenant_id"`
-}
diff --git a/internal/provider/type_source_mixpanel.go b/internal/provider/type_source_mixpanel.go
old mode 100755
new mode 100644
index ae0b37080..25a52fab7
--- a/internal/provider/type_source_mixpanel.go
+++ b/internal/provider/type_source_mixpanel.go
@@ -5,14 +5,12 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceMixpanel struct {
- AttributionWindow types.Int64 `tfsdk:"attribution_window"`
- Credentials *SourceMixpanelAuthenticationWildcard `tfsdk:"credentials"`
- DateWindowSize types.Int64 `tfsdk:"date_window_size"`
- EndDate types.String `tfsdk:"end_date"`
- ProjectID types.Int64 `tfsdk:"project_id"`
- ProjectTimezone types.String `tfsdk:"project_timezone"`
- Region types.String `tfsdk:"region"`
- SelectPropertiesByDefault types.Bool `tfsdk:"select_properties_by_default"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ AttributionWindow types.Int64 `tfsdk:"attribution_window"`
+ Credentials SourceMixpanelAuthenticationWildcard `tfsdk:"credentials"`
+ DateWindowSize types.Int64 `tfsdk:"date_window_size"`
+ EndDate types.String `tfsdk:"end_date"`
+ ProjectTimezone types.String `tfsdk:"project_timezone"`
+ Region types.String `tfsdk:"region"`
+ SelectPropertiesByDefault types.Bool `tfsdk:"select_properties_by_default"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_mixpanel_authentication_wildcard.go b/internal/provider/type_source_mixpanel_authentication_wildcard.go
old mode 100755
new mode 100644
index 7b8275473..7ba92e366
--- a/internal/provider/type_source_mixpanel_authentication_wildcard.go
+++ b/internal/provider/type_source_mixpanel_authentication_wildcard.go
@@ -3,8 +3,6 @@
package provider
type SourceMixpanelAuthenticationWildcard struct {
- SourceMixpanelAuthenticationWildcardProjectSecret *SourceMixpanelAuthenticationWildcardProjectSecret `tfsdk:"source_mixpanel_authentication_wildcard_project_secret"`
- SourceMixpanelAuthenticationWildcardServiceAccount *SourceMixpanelAuthenticationWildcardServiceAccount `tfsdk:"source_mixpanel_authentication_wildcard_service_account"`
- SourceMixpanelUpdateAuthenticationWildcardProjectSecret *SourceMixpanelAuthenticationWildcardProjectSecret `tfsdk:"source_mixpanel_update_authentication_wildcard_project_secret"`
- SourceMixpanelUpdateAuthenticationWildcardServiceAccount *SourceMixpanelAuthenticationWildcardServiceAccount `tfsdk:"source_mixpanel_update_authentication_wildcard_service_account"`
+ ProjectSecret *ProjectSecret `tfsdk:"project_secret"`
+ ServiceAccount *ServiceAccount `tfsdk:"service_account"`
}
diff --git a/internal/provider/type_source_mixpanel_authentication_wildcard_project_secret.go b/internal/provider/type_source_mixpanel_authentication_wildcard_project_secret.go
deleted file mode 100755
index bd47cad79..000000000
--- a/internal/provider/type_source_mixpanel_authentication_wildcard_project_secret.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMixpanelAuthenticationWildcardProjectSecret struct {
- APISecret types.String `tfsdk:"api_secret"`
- OptionTitle types.String `tfsdk:"option_title"`
-}
diff --git a/internal/provider/type_source_mixpanel_authentication_wildcard_service_account.go b/internal/provider/type_source_mixpanel_authentication_wildcard_service_account.go
deleted file mode 100755
index 5b36e3f08..000000000
--- a/internal/provider/type_source_mixpanel_authentication_wildcard_service_account.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMixpanelAuthenticationWildcardServiceAccount struct {
- OptionTitle types.String `tfsdk:"option_title"`
- Secret types.String `tfsdk:"secret"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_source_monday.go b/internal/provider/type_source_monday.go
old mode 100755
new mode 100644
index 3451a818c..dd4243717
--- a/internal/provider/type_source_monday.go
+++ b/internal/provider/type_source_monday.go
@@ -2,9 +2,6 @@
package provider
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
type SourceMonday struct {
Credentials *SourceMondayAuthorizationMethod `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_monday_authorization_method.go b/internal/provider/type_source_monday_authorization_method.go
old mode 100755
new mode 100644
index e6bbcb368..cb291dd1d
--- a/internal/provider/type_source_monday_authorization_method.go
+++ b/internal/provider/type_source_monday_authorization_method.go
@@ -3,8 +3,6 @@
package provider
type SourceMondayAuthorizationMethod struct {
- SourceMondayAuthorizationMethodAPIToken *SourceMondayAuthorizationMethodAPIToken `tfsdk:"source_monday_authorization_method_api_token"`
- SourceMondayAuthorizationMethodOAuth20 *SourceMondayAuthorizationMethodOAuth20 `tfsdk:"source_monday_authorization_method_o_auth2_0"`
- SourceMondayUpdateAuthorizationMethodAPIToken *SourceMondayAuthorizationMethodAPIToken `tfsdk:"source_monday_update_authorization_method_api_token"`
- SourceMondayUpdateAuthorizationMethodOAuth20 *SourceMondayAuthorizationMethodOAuth20 `tfsdk:"source_monday_update_authorization_method_o_auth2_0"`
+ APIToken *SourceK6Cloud `tfsdk:"api_token"`
+ OAuth20 *SourceMondayOAuth20 `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_monday_authorization_method_api_token.go b/internal/provider/type_source_monday_authorization_method_api_token.go
deleted file mode 100755
index e5270e6dc..000000000
--- a/internal/provider/type_source_monday_authorization_method_api_token.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMondayAuthorizationMethodAPIToken struct {
- APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
-}
diff --git a/internal/provider/type_source_monday_authorization_method_o_auth20.go b/internal/provider/type_source_monday_o_auth20.go
old mode 100755
new mode 100644
similarity index 77%
rename from internal/provider/type_source_monday_authorization_method_o_auth20.go
rename to internal/provider/type_source_monday_o_auth20.go
index 6a906dd9c..32d474a8c
--- a/internal/provider/type_source_monday_authorization_method_o_auth20.go
+++ b/internal/provider/type_source_monday_o_auth20.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMondayAuthorizationMethodOAuth20 struct {
+type SourceMondayOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
Subdomain types.String `tfsdk:"subdomain"`
diff --git a/internal/provider/type_source_mongodb.go b/internal/provider/type_source_mongodb.go
deleted file mode 100755
index 0f2f5d0d5..000000000
--- a/internal/provider/type_source_mongodb.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMongodb struct {
- AuthSource types.String `tfsdk:"auth_source"`
- Database types.String `tfsdk:"database"`
- InstanceType *SourceMongodbMongoDbInstanceType `tfsdk:"instance_type"`
- Password types.String `tfsdk:"password"`
- SourceType types.String `tfsdk:"source_type"`
- User types.String `tfsdk:"user"`
-}
diff --git a/internal/provider/type_source_mongodb1.go b/internal/provider/type_source_mongodb1.go
deleted file mode 100755
index 3c54b54cc..000000000
--- a/internal/provider/type_source_mongodb1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMongodb1 struct {
- AuthSource types.String `tfsdk:"auth_source"`
- Database types.String `tfsdk:"database"`
- InstanceType *SourceMongodbMongoDbInstanceType1 `tfsdk:"instance_type"`
- Password types.String `tfsdk:"password"`
- SourceType types.String `tfsdk:"source_type"`
- User types.String `tfsdk:"user"`
-}
diff --git a/internal/provider/type_source_mongodb_internal_poc.go b/internal/provider/type_source_mongodb_internal_poc.go
old mode 100755
new mode 100644
index f668e4c2b..45b1137c4
--- a/internal/provider/type_source_mongodb_internal_poc.go
+++ b/internal/provider/type_source_mongodb_internal_poc.go
@@ -9,6 +9,5 @@ type SourceMongodbInternalPoc struct {
ConnectionString types.String `tfsdk:"connection_string"`
Password types.String `tfsdk:"password"`
ReplicaSet types.String `tfsdk:"replica_set"`
- SourceType types.String `tfsdk:"source_type"`
User types.String `tfsdk:"user"`
}
diff --git a/internal/provider/type_source_mongodb_mongo_db_instance_type.go b/internal/provider/type_source_mongodb_mongo_db_instance_type.go
deleted file mode 100755
index 36c37a0cf..000000000
--- a/internal/provider/type_source_mongodb_mongo_db_instance_type.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceMongodbMongoDbInstanceType struct {
- SourceMongodbMongoDBInstanceTypeMongoDBAtlas *SourceMongodbMongoDBInstanceTypeMongoDBAtlas `tfsdk:"source_mongodb_mongo_db_instance_type_mongo_db_atlas"`
- SourceMongodbMongoDbInstanceTypeReplicaSet *SourceMongodbMongoDbInstanceTypeReplicaSet `tfsdk:"source_mongodb_mongo_db_instance_type_replica_set"`
- SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance *SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance `tfsdk:"source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance"`
- SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas *SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas `tfsdk:"source_mongodb_update_mongo_db_instance_type_mongo_db_atlas"`
- SourceMongodbUpdateMongoDbInstanceTypeReplicaSet *SourceMongodbMongoDbInstanceTypeReplicaSet `tfsdk:"source_mongodb_update_mongo_db_instance_type_replica_set"`
- SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance *SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance `tfsdk:"source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance"`
-}
diff --git a/internal/provider/type_source_mongodb_mongo_db_instance_type1.go b/internal/provider/type_source_mongodb_mongo_db_instance_type1.go
deleted file mode 100755
index 2a72f67f2..000000000
--- a/internal/provider/type_source_mongodb_mongo_db_instance_type1.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceMongodbMongoDbInstanceType1 struct {
- SourceMongodbMongoDBInstanceTypeMongoDBAtlas *SourceMongodbMongoDBInstanceTypeMongoDBAtlas1 `tfsdk:"source_mongodb_mongo_db_instance_type_mongo_db_atlas"`
- SourceMongodbMongoDbInstanceTypeReplicaSet *SourceMongodbMongoDbInstanceTypeReplicaSet `tfsdk:"source_mongodb_mongo_db_instance_type_replica_set"`
- SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance *SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance `tfsdk:"source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance"`
- SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas *SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas1 `tfsdk:"source_mongodb_update_mongo_db_instance_type_mongo_db_atlas"`
- SourceMongodbUpdateMongoDbInstanceTypeReplicaSet *SourceMongodbMongoDbInstanceTypeReplicaSet `tfsdk:"source_mongodb_update_mongo_db_instance_type_replica_set"`
- SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance *SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance `tfsdk:"source_mongodb_update_mongo_db_instance_type_standalone_mongo_db_instance"`
-}
diff --git a/internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas.go b/internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas.go
deleted file mode 100755
index b73505ec7..000000000
--- a/internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMongodbMongoDBInstanceTypeMongoDBAtlas struct {
- ClusterURL types.String `tfsdk:"cluster_url"`
- Instance types.String `tfsdk:"instance"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas1.go b/internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas1.go
deleted file mode 100755
index 6e21b7c24..000000000
--- a/internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas1.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMongodbMongoDBInstanceTypeMongoDBAtlas1 struct {
- ClusterURL types.String `tfsdk:"cluster_url"`
- Instance types.String `tfsdk:"instance"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas.go b/internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas.go
deleted file mode 100755
index ba0db31cf..000000000
--- a/internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas struct {
- ClusterURL types.String `tfsdk:"cluster_url"`
- Instance types.String `tfsdk:"instance"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas1.go b/internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas1.go
deleted file mode 100755
index e228178fe..000000000
--- a/internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas1.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas1 struct {
- ClusterURL types.String `tfsdk:"cluster_url"`
- Instance types.String `tfsdk:"instance"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_mongodb_v2.go b/internal/provider/type_source_mongodb_v2.go
new file mode 100644
index 000000000..f94d1847b
--- /dev/null
+++ b/internal/provider/type_source_mongodb_v2.go
@@ -0,0 +1,12 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SourceMongodbV2 struct {
+ DatabaseConfig SourceMongodbV2ClusterType `tfsdk:"database_config"`
+ DiscoverSampleSize types.Int64 `tfsdk:"discover_sample_size"`
+ InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
+ QueueSize types.Int64 `tfsdk:"queue_size"`
+}
diff --git a/internal/provider/type_source_mongodb_v2_cluster_type.go b/internal/provider/type_source_mongodb_v2_cluster_type.go
new file mode 100644
index 000000000..18930c34b
--- /dev/null
+++ b/internal/provider/type_source_mongodb_v2_cluster_type.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceMongodbV2ClusterType struct {
+ MongoDBAtlasReplicaSet *MongoDBAtlasReplicaSet `tfsdk:"mongo_db_atlas_replica_set"`
+ SelfManagedReplicaSet *SelfManagedReplicaSet `tfsdk:"self_managed_replica_set"`
+}
diff --git a/internal/provider/type_source_mssql.go b/internal/provider/type_source_mssql.go
old mode 100755
new mode 100644
index 164d63031..8c108b6a1
--- a/internal/provider/type_source_mssql.go
+++ b/internal/provider/type_source_mssql.go
@@ -5,15 +5,14 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceMssql struct {
- Database types.String `tfsdk:"database"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- ReplicationMethod *SourceMssqlUpdateMethod `tfsdk:"replication_method"`
- Schemas []types.String `tfsdk:"schemas"`
- SourceType types.String `tfsdk:"source_type"`
- SslMethod *SourceMssqlSSLMethod `tfsdk:"ssl_method"`
- TunnelMethod *SourceMssqlSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ ReplicationMethod *SourceMssqlUpdateMethod `tfsdk:"replication_method"`
+ Schemas []types.String `tfsdk:"schemas"`
+ SslMethod *DestinationMssqlSSLMethod `tfsdk:"ssl_method"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_mssql_ssh_tunnel_method.go b/internal/provider/type_source_mssql_ssh_tunnel_method.go
deleted file mode 100755
index a349a8221..000000000
--- a/internal/provider/type_source_mssql_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceMssqlSSHTunnelMethod struct {
- SourceMssqlSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_mssql_ssh_tunnel_method_no_tunnel"`
- SourceMssqlSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_mssql_ssh_tunnel_method_password_authentication"`
- SourceMssqlSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_mssql_ssh_tunnel_method_ssh_key_authentication"`
- SourceMssqlUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_mssql_update_ssh_tunnel_method_no_tunnel"`
- SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_mssql_update_ssh_tunnel_method_password_authentication"`
- SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_mssql_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_source_mssql_ssl_method.go b/internal/provider/type_source_mssql_ssl_method.go
deleted file mode 100755
index 040ace3a7..000000000
--- a/internal/provider/type_source_mssql_ssl_method.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceMssqlSSLMethod struct {
- SourceMssqlSSLMethodEncryptedTrustServerCertificate *DestinationMssqlSSLMethodEncryptedTrustServerCertificate `tfsdk:"source_mssql_ssl_method_encrypted_trust_server_certificate"`
- SourceMssqlSSLMethodEncryptedVerifyCertificate *DestinationMssqlSSLMethodEncryptedVerifyCertificate `tfsdk:"source_mssql_ssl_method_encrypted_verify_certificate"`
- SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate *DestinationMssqlSSLMethodEncryptedTrustServerCertificate `tfsdk:"source_mssql_update_ssl_method_encrypted_trust_server_certificate"`
- SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate *DestinationMssqlSSLMethodEncryptedVerifyCertificate `tfsdk:"source_mssql_update_ssl_method_encrypted_verify_certificate"`
-}
diff --git a/internal/provider/type_source_mssql_update_method.go b/internal/provider/type_source_mssql_update_method.go
old mode 100755
new mode 100644
index 89e5f0546..2afb175b4
--- a/internal/provider/type_source_mssql_update_method.go
+++ b/internal/provider/type_source_mssql_update_method.go
@@ -3,8 +3,6 @@
package provider
type SourceMssqlUpdateMethod struct {
- SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC `tfsdk:"source_mssql_update_method_read_changes_using_change_data_capture_cdc"`
- SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mssql_update_method_scan_changes_with_user_defined_cursor"`
- SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC `tfsdk:"source_mssql_update_update_method_read_changes_using_change_data_capture_cdc"`
- SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mssql_update_update_method_scan_changes_with_user_defined_cursor"`
+ ReadChangesUsingChangeDataCaptureCDC *ReadChangesUsingChangeDataCaptureCDC `tfsdk:"read_changes_using_change_data_capture_cdc"`
+ ScanChangesWithUserDefinedCursor *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"scan_changes_with_user_defined_cursor"`
}
diff --git a/internal/provider/type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go b/internal/provider/type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go
deleted file mode 100755
index bfc9a2d02..000000000
--- a/internal/provider/type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor struct {
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_source_my_hours.go b/internal/provider/type_source_my_hours.go
old mode 100755
new mode 100644
index e11e97a18..1f9d7fab0
--- a/internal/provider/type_source_my_hours.go
+++ b/internal/provider/type_source_my_hours.go
@@ -8,6 +8,5 @@ type SourceMyHours struct {
Email types.String `tfsdk:"email"`
LogsBatchSize types.Int64 `tfsdk:"logs_batch_size"`
Password types.String `tfsdk:"password"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_mysql.go b/internal/provider/type_source_mysql.go
old mode 100755
new mode 100644
index 7f2ec6e2d..256fa84a5
--- a/internal/provider/type_source_mysql.go
+++ b/internal/provider/type_source_mysql.go
@@ -5,14 +5,13 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceMysql struct {
- Database types.String `tfsdk:"database"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- ReplicationMethod SourceMysqlUpdateMethod `tfsdk:"replication_method"`
- SourceType types.String `tfsdk:"source_type"`
- SslMode *SourceMysqlSSLModes `tfsdk:"ssl_mode"`
- TunnelMethod *SourceMysqlSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ ReplicationMethod SourceMysqlUpdateMethod `tfsdk:"replication_method"`
+ SslMode *SourceMysqlSSLModes `tfsdk:"ssl_mode"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_mysql_ssh_tunnel_method.go b/internal/provider/type_source_mysql_ssh_tunnel_method.go
deleted file mode 100755
index 230d7621b..000000000
--- a/internal/provider/type_source_mysql_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceMysqlSSHTunnelMethod struct {
- SourceMysqlSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_mysql_ssh_tunnel_method_no_tunnel"`
- SourceMysqlSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_mysql_ssh_tunnel_method_password_authentication"`
- SourceMysqlSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_mysql_ssh_tunnel_method_ssh_key_authentication"`
- SourceMysqlUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_mysql_update_ssh_tunnel_method_no_tunnel"`
- SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_mysql_update_ssh_tunnel_method_password_authentication"`
- SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_mysql_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_source_mysql_ssl_modes.go b/internal/provider/type_source_mysql_ssl_modes.go
old mode 100755
new mode 100644
index abd7d351d..24910ac72
--- a/internal/provider/type_source_mysql_ssl_modes.go
+++ b/internal/provider/type_source_mysql_ssl_modes.go
@@ -3,12 +3,8 @@
package provider
type SourceMysqlSSLModes struct {
- SourceMysqlSSLModesPreferred *SourceMysqlSSLModesPreferred `tfsdk:"source_mysql_ssl_modes_preferred"`
- SourceMysqlSSLModesRequired *SourceMysqlSSLModesRequired `tfsdk:"source_mysql_ssl_modes_required"`
- SourceMysqlSSLModesVerifyCA *SourceMysqlSSLModesVerifyCA `tfsdk:"source_mysql_ssl_modes_verify_ca"`
- SourceMysqlSSLModesVerifyIdentity *SourceMysqlSSLModesVerifyIdentity `tfsdk:"source_mysql_ssl_modes_verify_identity"`
- SourceMysqlUpdateSSLModesPreferred *SourceMysqlSSLModesPreferred `tfsdk:"source_mysql_update_ssl_modes_preferred"`
- SourceMysqlUpdateSSLModesRequired *SourceMysqlSSLModesRequired `tfsdk:"source_mysql_update_ssl_modes_required"`
- SourceMysqlUpdateSSLModesVerifyCA *SourceMysqlSSLModesVerifyCA `tfsdk:"source_mysql_update_ssl_modes_verify_ca"`
- SourceMysqlUpdateSSLModesVerifyIdentity *SourceMysqlSSLModesVerifyIdentity `tfsdk:"source_mysql_update_ssl_modes_verify_identity"`
+ Preferred *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"preferred"`
+ Required *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"required"`
+ VerifyCA *SourceMysqlVerifyCA `tfsdk:"verify_ca"`
+ VerifyIdentity *SourceMysqlVerifyCA `tfsdk:"verify_identity"`
}
diff --git a/internal/provider/type_source_mysql_update_method.go b/internal/provider/type_source_mysql_update_method.go
old mode 100755
new mode 100644
index 0285025fd..75a62b59e
--- a/internal/provider/type_source_mysql_update_method.go
+++ b/internal/provider/type_source_mysql_update_method.go
@@ -3,8 +3,6 @@
package provider
type SourceMysqlUpdateMethod struct {
- SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC *SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC `tfsdk:"source_mysql_update_method_read_changes_using_binary_log_cdc"`
- SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mysql_update_method_scan_changes_with_user_defined_cursor"`
- SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC *SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC `tfsdk:"source_mysql_update_update_method_read_changes_using_binary_log_cdc"`
- SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mysql_update_update_method_scan_changes_with_user_defined_cursor"`
+ ReadChangesUsingBinaryLogCDC *ReadChangesUsingBinaryLogCDC `tfsdk:"read_changes_using_binary_log_cdc"`
+ ScanChangesWithUserDefinedCursor *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"scan_changes_with_user_defined_cursor"`
}
diff --git a/internal/provider/type_source_mysql_ssl_modes_verify_ca.go b/internal/provider/type_source_mysql_verify_ca.go
old mode 100755
new mode 100644
similarity index 81%
rename from internal/provider/type_source_mysql_ssl_modes_verify_ca.go
rename to internal/provider/type_source_mysql_verify_ca.go
index c41817e5c..5ceef91dd
--- a/internal/provider/type_source_mysql_ssl_modes_verify_ca.go
+++ b/internal/provider/type_source_mysql_verify_ca.go
@@ -4,10 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMysqlSSLModesVerifyCA struct {
+type SourceMysqlVerifyCA struct {
CaCertificate types.String `tfsdk:"ca_certificate"`
ClientCertificate types.String `tfsdk:"client_certificate"`
ClientKey types.String `tfsdk:"client_key"`
ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
}
diff --git a/internal/provider/type_source_netsuite.go b/internal/provider/type_source_netsuite.go
old mode 100755
new mode 100644
index e548a45be..4fc0b1cf6
--- a/internal/provider/type_source_netsuite.go
+++ b/internal/provider/type_source_netsuite.go
@@ -9,7 +9,6 @@ type SourceNetsuite struct {
ConsumerSecret types.String `tfsdk:"consumer_secret"`
ObjectTypes []types.String `tfsdk:"object_types"`
Realm types.String `tfsdk:"realm"`
- SourceType types.String `tfsdk:"source_type"`
StartDatetime types.String `tfsdk:"start_datetime"`
TokenKey types.String `tfsdk:"token_key"`
TokenSecret types.String `tfsdk:"token_secret"`
diff --git a/internal/provider/type_source_notion.go b/internal/provider/type_source_notion.go
old mode 100755
new mode 100644
index 7c2b96619..7c59cd9b0
--- a/internal/provider/type_source_notion.go
+++ b/internal/provider/type_source_notion.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceNotion struct {
- Credentials *SourceNotionAuthenticateUsing `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ Credentials SourceNotionAuthenticationMethod `tfsdk:"credentials"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_notion_authenticate_using.go b/internal/provider/type_source_notion_authenticate_using.go
deleted file mode 100755
index ea00e6e68..000000000
--- a/internal/provider/type_source_notion_authenticate_using.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceNotionAuthenticateUsing struct {
- SourceNotionAuthenticateUsingAccessToken *SourceNotionAuthenticateUsingAccessToken `tfsdk:"source_notion_authenticate_using_access_token"`
- SourceNotionAuthenticateUsingOAuth20 *SourceNotionAuthenticateUsingOAuth20 `tfsdk:"source_notion_authenticate_using_o_auth2_0"`
- SourceNotionUpdateAuthenticateUsingAccessToken *SourceNotionAuthenticateUsingAccessToken `tfsdk:"source_notion_update_authenticate_using_access_token"`
- SourceNotionUpdateAuthenticateUsingOAuth20 *SourceNotionAuthenticateUsingOAuth20 `tfsdk:"source_notion_update_authenticate_using_o_auth2_0"`
-}
diff --git a/internal/provider/type_source_notion_authenticate_using_access_token.go b/internal/provider/type_source_notion_authenticate_using_access_token.go
deleted file mode 100755
index 49659af77..000000000
--- a/internal/provider/type_source_notion_authenticate_using_access_token.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceNotionAuthenticateUsingAccessToken struct {
- AuthType types.String `tfsdk:"auth_type"`
- Token types.String `tfsdk:"token"`
-}
diff --git a/internal/provider/type_source_notion_authentication_method.go b/internal/provider/type_source_notion_authentication_method.go
new file mode 100644
index 000000000..5e74845c9
--- /dev/null
+++ b/internal/provider/type_source_notion_authentication_method.go
@@ -0,0 +1,8 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+type SourceNotionAuthenticationMethod struct {
+ AccessToken *DestinationMilvusAPIToken `tfsdk:"access_token"`
+ OAuth20 *SourceNotionOAuth20 `tfsdk:"o_auth20"`
+}
diff --git a/internal/provider/type_source_notion_authenticate_using_o_auth20.go b/internal/provider/type_source_notion_o_auth20.go
old mode 100755
new mode 100644
similarity index 75%
rename from internal/provider/type_source_notion_authenticate_using_o_auth20.go
rename to internal/provider/type_source_notion_o_auth20.go
index 80d0e442e..98d7fbea2
--- a/internal/provider/type_source_notion_authenticate_using_o_auth20.go
+++ b/internal/provider/type_source_notion_o_auth20.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceNotionAuthenticateUsingOAuth20 struct {
+type SourceNotionOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
}
diff --git a/internal/provider/type_source_nytimes.go b/internal/provider/type_source_nytimes.go
old mode 100755
new mode 100644
index 4fda926f8..e64226d53
--- a/internal/provider/type_source_nytimes.go
+++ b/internal/provider/type_source_nytimes.go
@@ -5,10 +5,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceNytimes struct {
- APIKey types.String `tfsdk:"api_key"`
- EndDate types.String `tfsdk:"end_date"`
- Period types.Int64 `tfsdk:"period"`
- ShareType types.String `tfsdk:"share_type"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIKey types.String `tfsdk:"api_key"`
+ EndDate types.String `tfsdk:"end_date"`
+ Period types.Int64 `tfsdk:"period"`
+ ShareType types.String `tfsdk:"share_type"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_okta.go b/internal/provider/type_source_okta.go
old mode 100755
new mode 100644
index f52ac7c79..76ba79283
--- a/internal/provider/type_source_okta.go
+++ b/internal/provider/type_source_okta.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceOkta struct {
Credentials *SourceOktaAuthorizationMethod `tfsdk:"credentials"`
Domain types.String `tfsdk:"domain"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_okta_authorization_method.go b/internal/provider/type_source_okta_authorization_method.go
old mode 100755
new mode 100644
index 495ddf36d..c75b35b43
--- a/internal/provider/type_source_okta_authorization_method.go
+++ b/internal/provider/type_source_okta_authorization_method.go
@@ -3,8 +3,6 @@
package provider
type SourceOktaAuthorizationMethod struct {
- SourceOktaAuthorizationMethodAPIToken *SourceMondayAuthorizationMethodAPIToken `tfsdk:"source_okta_authorization_method_api_token"`
- SourceOktaAuthorizationMethodOAuth20 *SourceOktaAuthorizationMethodOAuth20 `tfsdk:"source_okta_authorization_method_o_auth2_0"`
- SourceOktaUpdateAuthorizationMethodAPIToken *SourceMondayAuthorizationMethodAPIToken `tfsdk:"source_okta_update_authorization_method_api_token"`
- SourceOktaUpdateAuthorizationMethodOAuth20 *SourceOktaAuthorizationMethodOAuth20 `tfsdk:"source_okta_update_authorization_method_o_auth2_0"`
+ APIToken *SourceK6Cloud `tfsdk:"api_token"`
+ OAuth20 *DestinationGoogleSheetsAuthenticationViaGoogleOAuth `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_omnisend.go b/internal/provider/type_source_omnisend.go
deleted file mode 100755
index 6bc740e31..000000000
--- a/internal/provider/type_source_omnisend.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceOmnisend struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_onesignal.go b/internal/provider/type_source_onesignal.go
old mode 100755
new mode 100644
index 9520cda81..279bb6036
--- a/internal/provider/type_source_onesignal.go
+++ b/internal/provider/type_source_onesignal.go
@@ -5,9 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceOnesignal struct {
- Applications []SourceOnesignalApplications `tfsdk:"applications"`
- OutcomeNames types.String `tfsdk:"outcome_names"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- UserAuthKey types.String `tfsdk:"user_auth_key"`
+ Applications []Applications `tfsdk:"applications"`
+ OutcomeNames types.String `tfsdk:"outcome_names"`
+ StartDate types.String `tfsdk:"start_date"`
+ UserAuthKey types.String `tfsdk:"user_auth_key"`
}
diff --git a/internal/provider/type_source_oracle.go b/internal/provider/type_source_oracle.go
old mode 100755
new mode 100644
index c74945aa8..52a97070c
--- a/internal/provider/type_source_oracle.go
+++ b/internal/provider/type_source_oracle.go
@@ -5,14 +5,13 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceOracle struct {
- ConnectionData *SourceOracleConnectBy `tfsdk:"connection_data"`
- Encryption SourceOracleEncryption `tfsdk:"encryption"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- Schemas []types.String `tfsdk:"schemas"`
- SourceType types.String `tfsdk:"source_type"`
- TunnelMethod *SourceOracleSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ ConnectionData *SourceOracleConnectBy `tfsdk:"connection_data"`
+ Encryption SourceOracleEncryption `tfsdk:"encryption"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ Schemas []types.String `tfsdk:"schemas"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_oracle_connect_by.go b/internal/provider/type_source_oracle_connect_by.go
old mode 100755
new mode 100644
index 03dda5c77..587789fd4
--- a/internal/provider/type_source_oracle_connect_by.go
+++ b/internal/provider/type_source_oracle_connect_by.go
@@ -3,8 +3,6 @@
package provider
type SourceOracleConnectBy struct {
- SourceOracleConnectByServiceName *SourceOracleConnectByServiceName `tfsdk:"source_oracle_connect_by_service_name"`
- SourceOracleConnectBySystemIDSID *SourceOracleConnectBySystemIDSID `tfsdk:"source_oracle_connect_by_system_id_sid"`
- SourceOracleUpdateConnectByServiceName *SourceOracleConnectByServiceName `tfsdk:"source_oracle_update_connect_by_service_name"`
- SourceOracleUpdateConnectBySystemIDSID *SourceOracleConnectBySystemIDSID `tfsdk:"source_oracle_update_connect_by_system_id_sid"`
+ ServiceName *ServiceName `tfsdk:"service_name"`
+ SystemIDSID *SystemIDSID `tfsdk:"system_idsid"`
}
diff --git a/internal/provider/type_source_oracle_connect_by_service_name.go b/internal/provider/type_source_oracle_connect_by_service_name.go
deleted file mode 100755
index 39d394cc5..000000000
--- a/internal/provider/type_source_oracle_connect_by_service_name.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceOracleConnectByServiceName struct {
- ConnectionType types.String `tfsdk:"connection_type"`
- ServiceName types.String `tfsdk:"service_name"`
-}
diff --git a/internal/provider/type_source_oracle_connect_by_system_idsid.go b/internal/provider/type_source_oracle_connect_by_system_idsid.go
deleted file mode 100755
index 8b0bc357f..000000000
--- a/internal/provider/type_source_oracle_connect_by_system_idsid.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceOracleConnectBySystemIDSID struct {
- ConnectionType types.String `tfsdk:"connection_type"`
- Sid types.String `tfsdk:"sid"`
-}
diff --git a/internal/provider/type_source_oracle_encryption.go b/internal/provider/type_source_oracle_encryption.go
old mode 100755
new mode 100644
index 08c0128d2..589a35250
--- a/internal/provider/type_source_oracle_encryption.go
+++ b/internal/provider/type_source_oracle_encryption.go
@@ -3,8 +3,6 @@
package provider
type SourceOracleEncryption struct {
- SourceOracleEncryptionNativeNetworkEncryptionNNE *SourceOracleEncryptionNativeNetworkEncryptionNNE `tfsdk:"source_oracle_encryption_native_network_encryption_nne"`
- SourceOracleEncryptionTLSEncryptedVerifyCertificate *SourceOracleEncryptionTLSEncryptedVerifyCertificate `tfsdk:"source_oracle_encryption_tls_encrypted_verify_certificate"`
- SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE *SourceOracleEncryptionNativeNetworkEncryptionNNE `tfsdk:"source_oracle_update_encryption_native_network_encryption_nne"`
- SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate *SourceOracleEncryptionTLSEncryptedVerifyCertificate `tfsdk:"source_oracle_update_encryption_tls_encrypted_verify_certificate"`
+ NativeNetworkEncryptionNNE *NativeNetworkEncryptionNNE `tfsdk:"native_network_encryption_nne"`
+ TLSEncryptedVerifyCertificate *TLSEncryptedVerifyCertificate `tfsdk:"tls_encrypted_verify_certificate"`
}
diff --git a/internal/provider/type_source_oracle_encryption_tls_encrypted_verify_certificate.go b/internal/provider/type_source_oracle_encryption_tls_encrypted_verify_certificate.go
deleted file mode 100755
index dd927e56e..000000000
--- a/internal/provider/type_source_oracle_encryption_tls_encrypted_verify_certificate.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceOracleEncryptionTLSEncryptedVerifyCertificate struct {
- EncryptionMethod types.String `tfsdk:"encryption_method"`
- SslCertificate types.String `tfsdk:"ssl_certificate"`
-}
diff --git a/internal/provider/type_source_oracle_ssh_tunnel_method.go b/internal/provider/type_source_oracle_ssh_tunnel_method.go
deleted file mode 100755
index e39f4ca4e..000000000
--- a/internal/provider/type_source_oracle_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceOracleSSHTunnelMethod struct {
- SourceOracleSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_oracle_ssh_tunnel_method_no_tunnel"`
- SourceOracleSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_oracle_ssh_tunnel_method_password_authentication"`
- SourceOracleSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_oracle_ssh_tunnel_method_ssh_key_authentication"`
- SourceOracleUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_oracle_update_ssh_tunnel_method_no_tunnel"`
- SourceOracleUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_oracle_update_ssh_tunnel_method_password_authentication"`
- SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_oracle_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_source_orb.go b/internal/provider/type_source_orb.go
old mode 100755
new mode 100644
index 95723d0e4..8aacc8828
--- a/internal/provider/type_source_orb.go
+++ b/internal/provider/type_source_orb.go
@@ -9,7 +9,6 @@ type SourceOrb struct {
LookbackWindowDays types.Int64 `tfsdk:"lookback_window_days"`
NumericEventPropertiesKeys []types.String `tfsdk:"numeric_event_properties_keys"`
PlanID types.String `tfsdk:"plan_id"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
StringEventPropertiesKeys []types.String `tfsdk:"string_event_properties_keys"`
SubscriptionUsageGroupingKey types.String `tfsdk:"subscription_usage_grouping_key"`
diff --git a/internal/provider/type_source_orbit.go b/internal/provider/type_source_orbit.go
old mode 100755
new mode 100644
index 92a125e08..70156ae74
--- a/internal/provider/type_source_orbit.go
+++ b/internal/provider/type_source_orbit.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceOrbit struct {
- APIToken types.String `tfsdk:"api_token"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- Workspace types.String `tfsdk:"workspace"`
+ APIToken types.String `tfsdk:"api_token"`
+ StartDate types.String `tfsdk:"start_date"`
+ Workspace types.String `tfsdk:"workspace"`
}
diff --git a/internal/provider/type_source_outbrain_amplify.go b/internal/provider/type_source_outbrain_amplify.go
old mode 100755
new mode 100644
index 0817f2ac6..1eb937316
--- a/internal/provider/type_source_outbrain_amplify.go
+++ b/internal/provider/type_source_outbrain_amplify.go
@@ -9,6 +9,5 @@ type SourceOutbrainAmplify struct {
EndDate types.String `tfsdk:"end_date"`
GeoLocationBreakdown types.String `tfsdk:"geo_location_breakdown"`
ReportGranularity types.String `tfsdk:"report_granularity"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_outbrain_amplify_authentication_method.go b/internal/provider/type_source_outbrain_amplify_authentication_method.go
old mode 100755
new mode 100644
index 1b065b5be..e99f763b4
--- a/internal/provider/type_source_outbrain_amplify_authentication_method.go
+++ b/internal/provider/type_source_outbrain_amplify_authentication_method.go
@@ -3,8 +3,6 @@
package provider
type SourceOutbrainAmplifyAuthenticationMethod struct {
- SourceOutbrainAmplifyAuthenticationMethodAccessToken *SourceOutbrainAmplifyAuthenticationMethodAccessToken `tfsdk:"source_outbrain_amplify_authentication_method_access_token"`
- SourceOutbrainAmplifyAuthenticationMethodUsernamePassword *SourceOutbrainAmplifyAuthenticationMethodUsernamePassword `tfsdk:"source_outbrain_amplify_authentication_method_username_password"`
- SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken *SourceOutbrainAmplifyAuthenticationMethodAccessToken `tfsdk:"source_outbrain_amplify_update_authentication_method_access_token"`
- SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword *SourceOutbrainAmplifyAuthenticationMethodUsernamePassword `tfsdk:"source_outbrain_amplify_update_authentication_method_username_password"`
+ AccessToken *OAuth2AccessToken `tfsdk:"access_token"`
+ UsernamePassword *UsernamePassword `tfsdk:"username_password"`
}
diff --git a/internal/provider/type_source_outbrain_amplify_authentication_method_access_token.go b/internal/provider/type_source_outbrain_amplify_authentication_method_access_token.go
deleted file mode 100755
index aed3a6f3b..000000000
--- a/internal/provider/type_source_outbrain_amplify_authentication_method_access_token.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceOutbrainAmplifyAuthenticationMethodAccessToken struct {
- AccessToken types.String `tfsdk:"access_token"`
- Type types.String `tfsdk:"type"`
-}
diff --git a/internal/provider/type_source_outbrain_amplify_authentication_method_username_password.go b/internal/provider/type_source_outbrain_amplify_authentication_method_username_password.go
deleted file mode 100755
index ccf767197..000000000
--- a/internal/provider/type_source_outbrain_amplify_authentication_method_username_password.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceOutbrainAmplifyAuthenticationMethodUsernamePassword struct {
- Password types.String `tfsdk:"password"`
- Type types.String `tfsdk:"type"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_source_outreach.go b/internal/provider/type_source_outreach.go
old mode 100755
new mode 100644
index 5c240eb91..c02654039
--- a/internal/provider/type_source_outreach.go
+++ b/internal/provider/type_source_outreach.go
@@ -9,6 +9,5 @@ type SourceOutreach struct {
ClientSecret types.String `tfsdk:"client_secret"`
RedirectURI types.String `tfsdk:"redirect_uri"`
RefreshToken types.String `tfsdk:"refresh_token"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_paypal_transaction.go b/internal/provider/type_source_paypal_transaction.go
old mode 100755
new mode 100644
index c4cf7e726..c9f7c5eaf
--- a/internal/provider/type_source_paypal_transaction.go
+++ b/internal/provider/type_source_paypal_transaction.go
@@ -9,6 +9,6 @@ type SourcePaypalTransaction struct {
ClientSecret types.String `tfsdk:"client_secret"`
IsSandbox types.Bool `tfsdk:"is_sandbox"`
RefreshToken types.String `tfsdk:"refresh_token"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
+ TimeWindow types.Int64 `tfsdk:"time_window"`
}
diff --git a/internal/provider/type_source_paystack.go b/internal/provider/type_source_paystack.go
old mode 100755
new mode 100644
index dd6f9aa08..2fa052ccd
--- a/internal/provider/type_source_paystack.go
+++ b/internal/provider/type_source_paystack.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourcePaystack struct {
LookbackWindowDays types.Int64 `tfsdk:"lookback_window_days"`
SecretKey types.String `tfsdk:"secret_key"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_pendo.go b/internal/provider/type_source_pendo.go
deleted file mode 100755
index 62dc1d2b9..000000000
--- a/internal/provider/type_source_pendo.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePendo struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_persistiq.go b/internal/provider/type_source_persistiq.go
deleted file mode 100755
index a1d9dcfac..000000000
--- a/internal/provider/type_source_persistiq.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePersistiq struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_pexels_api.go b/internal/provider/type_source_pexels_api.go
old mode 100755
new mode 100644
index 1900669a3..f36fb120b
--- a/internal/provider/type_source_pexels_api.go
+++ b/internal/provider/type_source_pexels_api.go
@@ -11,5 +11,4 @@ type SourcePexelsAPI struct {
Orientation types.String `tfsdk:"orientation"`
Query types.String `tfsdk:"query"`
Size types.String `tfsdk:"size"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_pinterest.go b/internal/provider/type_source_pinterest.go
old mode 100755
new mode 100644
index a7de56f35..6b984a63d
--- a/internal/provider/type_source_pinterest.go
+++ b/internal/provider/type_source_pinterest.go
@@ -5,8 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourcePinterest struct {
- Credentials *SourcePinterestAuthorizationMethod `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- Status []types.String `tfsdk:"status"`
+ Credentials *SourcePinterestAuthorizationMethod `tfsdk:"credentials"`
+ CustomReports []ReportConfig `tfsdk:"custom_reports"`
+ StartDate types.String `tfsdk:"start_date"`
+ Status []types.String `tfsdk:"status"`
}
diff --git a/internal/provider/type_source_pinterest_authorization_method.go b/internal/provider/type_source_pinterest_authorization_method.go
old mode 100755
new mode 100644
index ca65c40d2..ca174b4bc
--- a/internal/provider/type_source_pinterest_authorization_method.go
+++ b/internal/provider/type_source_pinterest_authorization_method.go
@@ -3,8 +3,5 @@
package provider
type SourcePinterestAuthorizationMethod struct {
- SourcePinterestAuthorizationMethodAccessToken *SourceLinkedinAdsAuthenticationAccessToken `tfsdk:"source_pinterest_authorization_method_access_token"`
- SourcePinterestAuthorizationMethodOAuth20 *SourcePinterestAuthorizationMethodOAuth20 `tfsdk:"source_pinterest_authorization_method_o_auth2_0"`
- SourcePinterestUpdateAuthorizationMethodAccessToken *SourceLinkedinAdsAuthenticationAccessToken `tfsdk:"source_pinterest_update_authorization_method_access_token"`
- SourcePinterestUpdateAuthorizationMethodOAuth20 *SourcePinterestAuthorizationMethodOAuth20 `tfsdk:"source_pinterest_update_authorization_method_o_auth2_0"`
+ OAuth20 *AuthenticateViaLeverOAuth `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_pinterest_authorization_method_o_auth20.go b/internal/provider/type_source_pinterest_authorization_method_o_auth20.go
deleted file mode 100755
index a7af7f952..000000000
--- a/internal/provider/type_source_pinterest_authorization_method_o_auth20.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePinterestAuthorizationMethodOAuth20 struct {
- AuthMethod types.String `tfsdk:"auth_method"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
-}
diff --git a/internal/provider/type_source_pipedrive.go b/internal/provider/type_source_pipedrive.go
old mode 100755
new mode 100644
index c2e016178..e7b76931b
--- a/internal/provider/type_source_pipedrive.go
+++ b/internal/provider/type_source_pipedrive.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourcePipedrive struct {
- Authorization *SourcePipedriveAPIKeyAuthentication `tfsdk:"authorization"`
- ReplicationStartDate types.String `tfsdk:"replication_start_date"`
- SourceType types.String `tfsdk:"source_type"`
+ APIToken types.String `tfsdk:"api_token"`
+ ReplicationStartDate types.String `tfsdk:"replication_start_date"`
}
diff --git a/internal/provider/type_source_pocket.go b/internal/provider/type_source_pocket.go
old mode 100755
new mode 100644
index 2e2673f3f..68a2a5800
--- a/internal/provider/type_source_pocket.go
+++ b/internal/provider/type_source_pocket.go
@@ -14,7 +14,6 @@ type SourcePocket struct {
Search types.String `tfsdk:"search"`
Since types.String `tfsdk:"since"`
Sort types.String `tfsdk:"sort"`
- SourceType types.String `tfsdk:"source_type"`
State types.String `tfsdk:"state"`
Tag types.String `tfsdk:"tag"`
}
diff --git a/internal/provider/type_source_pokeapi.go b/internal/provider/type_source_pokeapi.go
old mode 100755
new mode 100644
index 97b154641..6a5325e42
--- a/internal/provider/type_source_pokeapi.go
+++ b/internal/provider/type_source_pokeapi.go
@@ -6,5 +6,4 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourcePokeapi struct {
PokemonName types.String `tfsdk:"pokemon_name"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_polygon_stock_api.go b/internal/provider/type_source_polygon_stock_api.go
old mode 100755
new mode 100644
index b14bdb030..e11edce23
--- a/internal/provider/type_source_polygon_stock_api.go
+++ b/internal/provider/type_source_polygon_stock_api.go
@@ -11,7 +11,6 @@ type SourcePolygonStockAPI struct {
Limit types.Int64 `tfsdk:"limit"`
Multiplier types.Int64 `tfsdk:"multiplier"`
Sort types.String `tfsdk:"sort"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
StocksTicker types.String `tfsdk:"stocks_ticker"`
Timespan types.String `tfsdk:"timespan"`
diff --git a/internal/provider/type_source_postgres.go b/internal/provider/type_source_postgres.go
old mode 100755
new mode 100644
index 3ca85ad2e..132eec490
--- a/internal/provider/type_source_postgres.go
+++ b/internal/provider/type_source_postgres.go
@@ -5,15 +5,14 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourcePostgres struct {
- Database types.String `tfsdk:"database"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- ReplicationMethod *SourcePostgresUpdateMethod `tfsdk:"replication_method"`
- Schemas []types.String `tfsdk:"schemas"`
- SourceType types.String `tfsdk:"source_type"`
- SslMode *SourcePostgresSSLModes `tfsdk:"ssl_mode"`
- TunnelMethod *SourcePostgresSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
+ Database types.String `tfsdk:"database"`
+ Host types.String `tfsdk:"host"`
+ JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
+ Password types.String `tfsdk:"password"`
+ Port types.Int64 `tfsdk:"port"`
+ ReplicationMethod *SourcePostgresUpdateMethod `tfsdk:"replication_method"`
+ Schemas []types.String `tfsdk:"schemas"`
+ SslMode *SourceAlloydbSSLModes `tfsdk:"ssl_mode"`
+ TunnelMethod *DestinationClickhouseSSHTunnelMethod `tfsdk:"tunnel_method"`
+ Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_postgres1.go b/internal/provider/type_source_postgres1.go
deleted file mode 100755
index c9deba721..000000000
--- a/internal/provider/type_source_postgres1.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgres1 struct {
- Database types.String `tfsdk:"database"`
- Host types.String `tfsdk:"host"`
- JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
- Password types.String `tfsdk:"password"`
- Port types.Int64 `tfsdk:"port"`
- ReplicationMethod *SourcePostgresUpdateMethod1 `tfsdk:"replication_method"`
- Schemas []types.String `tfsdk:"schemas"`
- SourceType types.String `tfsdk:"source_type"`
- SslMode *SourcePostgresSSLModes1 `tfsdk:"ssl_mode"`
- TunnelMethod *SourcePostgresSSHTunnelMethod `tfsdk:"tunnel_method"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_source_postgres_ssh_tunnel_method.go b/internal/provider/type_source_postgres_ssh_tunnel_method.go
deleted file mode 100755
index 9906904d7..000000000
--- a/internal/provider/type_source_postgres_ssh_tunnel_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourcePostgresSSHTunnelMethod struct {
- SourcePostgresSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_postgres_ssh_tunnel_method_no_tunnel"`
- SourcePostgresSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_postgres_ssh_tunnel_method_password_authentication"`
- SourcePostgresSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_postgres_ssh_tunnel_method_ssh_key_authentication"`
- SourcePostgresUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel `tfsdk:"source_postgres_update_ssh_tunnel_method_no_tunnel"`
- SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication `tfsdk:"source_postgres_update_ssh_tunnel_method_password_authentication"`
- SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication `tfsdk:"source_postgres_update_ssh_tunnel_method_ssh_key_authentication"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes.go b/internal/provider/type_source_postgres_ssl_modes.go
deleted file mode 100755
index e9583a7ff..000000000
--- a/internal/provider/type_source_postgres_ssl_modes.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourcePostgresSSLModes struct {
- SourcePostgresSSLModesAllow *SourcePostgresSSLModesAllow `tfsdk:"source_postgres_ssl_modes_allow"`
- SourcePostgresSSLModesDisable *SourcePostgresSSLModesDisable `tfsdk:"source_postgres_ssl_modes_disable"`
- SourcePostgresSSLModesPrefer *SourcePostgresSSLModesPrefer `tfsdk:"source_postgres_ssl_modes_prefer"`
- SourcePostgresSSLModesRequire *SourcePostgresSSLModesRequire `tfsdk:"source_postgres_ssl_modes_require"`
- SourcePostgresSSLModesVerifyCa *SourcePostgresSSLModesVerifyCa `tfsdk:"source_postgres_ssl_modes_verify_ca"`
- SourcePostgresSSLModesVerifyFull *SourcePostgresSSLModesVerifyFull `tfsdk:"source_postgres_ssl_modes_verify_full"`
- SourcePostgresUpdateSSLModesAllow *SourcePostgresUpdateSSLModesAllow `tfsdk:"source_postgres_update_ssl_modes_allow"`
- SourcePostgresUpdateSSLModesDisable *SourcePostgresUpdateSSLModesDisable `tfsdk:"source_postgres_update_ssl_modes_disable"`
- SourcePostgresUpdateSSLModesPrefer *SourcePostgresUpdateSSLModesPrefer `tfsdk:"source_postgres_update_ssl_modes_prefer"`
- SourcePostgresUpdateSSLModesRequire *SourcePostgresUpdateSSLModesRequire `tfsdk:"source_postgres_update_ssl_modes_require"`
- SourcePostgresUpdateSSLModesVerifyCa *SourcePostgresUpdateSSLModesVerifyCa `tfsdk:"source_postgres_update_ssl_modes_verify_ca"`
- SourcePostgresUpdateSSLModesVerifyFull *SourcePostgresUpdateSSLModesVerifyFull `tfsdk:"source_postgres_update_ssl_modes_verify_full"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes1.go b/internal/provider/type_source_postgres_ssl_modes1.go
deleted file mode 100755
index e40b71766..000000000
--- a/internal/provider/type_source_postgres_ssl_modes1.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourcePostgresSSLModes1 struct {
- SourcePostgresSSLModesAllow *SourcePostgresSSLModesAllow1 `tfsdk:"source_postgres_ssl_modes_allow"`
- SourcePostgresSSLModesDisable *SourcePostgresSSLModesDisable1 `tfsdk:"source_postgres_ssl_modes_disable"`
- SourcePostgresSSLModesPrefer *SourcePostgresSSLModesPrefer1 `tfsdk:"source_postgres_ssl_modes_prefer"`
- SourcePostgresSSLModesRequire *SourcePostgresSSLModesRequire1 `tfsdk:"source_postgres_ssl_modes_require"`
- SourcePostgresSSLModesVerifyCa *SourcePostgresSSLModesVerifyCa1 `tfsdk:"source_postgres_ssl_modes_verify_ca"`
- SourcePostgresSSLModesVerifyFull *SourcePostgresSSLModesVerifyFull1 `tfsdk:"source_postgres_ssl_modes_verify_full"`
- SourcePostgresUpdateSSLModesAllow *SourcePostgresUpdateSSLModesAllow1 `tfsdk:"source_postgres_update_ssl_modes_allow"`
- SourcePostgresUpdateSSLModesDisable *SourcePostgresUpdateSSLModesDisable1 `tfsdk:"source_postgres_update_ssl_modes_disable"`
- SourcePostgresUpdateSSLModesPrefer *SourcePostgresUpdateSSLModesPrefer1 `tfsdk:"source_postgres_update_ssl_modes_prefer"`
- SourcePostgresUpdateSSLModesRequire *SourcePostgresUpdateSSLModesRequire1 `tfsdk:"source_postgres_update_ssl_modes_require"`
- SourcePostgresUpdateSSLModesVerifyCa *SourcePostgresUpdateSSLModesVerifyCa1 `tfsdk:"source_postgres_update_ssl_modes_verify_ca"`
- SourcePostgresUpdateSSLModesVerifyFull *SourcePostgresUpdateSSLModesVerifyFull1 `tfsdk:"source_postgres_update_ssl_modes_verify_full"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_allow.go b/internal/provider/type_source_postgres_ssl_modes_allow.go
deleted file mode 100755
index c3cdf2e8f..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_allow.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesAllow struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_allow1.go b/internal/provider/type_source_postgres_ssl_modes_allow1.go
deleted file mode 100755
index 1b431d244..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_allow1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesAllow1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_disable.go b/internal/provider/type_source_postgres_ssl_modes_disable.go
deleted file mode 100755
index dcd710daa..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_disable.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesDisable struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_disable1.go b/internal/provider/type_source_postgres_ssl_modes_disable1.go
deleted file mode 100755
index e028a1e12..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_disable1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesDisable1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_prefer.go b/internal/provider/type_source_postgres_ssl_modes_prefer.go
deleted file mode 100755
index dec630456..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_prefer.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesPrefer struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_prefer1.go b/internal/provider/type_source_postgres_ssl_modes_prefer1.go
deleted file mode 100755
index 49ede7980..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_prefer1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesPrefer1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_require.go b/internal/provider/type_source_postgres_ssl_modes_require.go
deleted file mode 100755
index 0a931028b..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_require.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesRequire struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_require1.go b/internal/provider/type_source_postgres_ssl_modes_require1.go
deleted file mode 100755
index 59fbd0e04..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_require1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesRequire1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_verify_ca.go b/internal/provider/type_source_postgres_ssl_modes_verify_ca.go
deleted file mode 100755
index a69d6fd1f..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_verify_ca.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesVerifyCa struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_verify_ca1.go b/internal/provider/type_source_postgres_ssl_modes_verify_ca1.go
deleted file mode 100755
index 33053b8f5..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_verify_ca1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesVerifyCa1 struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_verify_full.go b/internal/provider/type_source_postgres_ssl_modes_verify_full.go
deleted file mode 100755
index ed4060e54..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_verify_full.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesVerifyFull struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_ssl_modes_verify_full1.go b/internal/provider/type_source_postgres_ssl_modes_verify_full1.go
deleted file mode 100755
index 5d38cbaf0..000000000
--- a/internal/provider/type_source_postgres_ssl_modes_verify_full1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresSSLModesVerifyFull1 struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_method.go b/internal/provider/type_source_postgres_update_method.go
old mode 100755
new mode 100644
index 717f7ed90..a59ca40a7
--- a/internal/provider/type_source_postgres_update_method.go
+++ b/internal/provider/type_source_postgres_update_method.go
@@ -3,10 +3,7 @@
package provider
type SourcePostgresUpdateMethod struct {
- SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn `tfsdk:"source_postgres_update_method_detect_changes_with_xmin_system_column"`
- SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC `tfsdk:"source_postgres_update_method_read_changes_using_write_ahead_log_cdc"`
- SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_postgres_update_method_scan_changes_with_user_defined_cursor"`
- SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn `tfsdk:"source_postgres_update_update_method_detect_changes_with_xmin_system_column"`
- SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC `tfsdk:"source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc"`
- SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_postgres_update_update_method_scan_changes_with_user_defined_cursor"`
+ DetectChangesWithXminSystemColumn *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"detect_changes_with_xmin_system_column"`
+ ReadChangesUsingWriteAheadLogCDC *LogicalReplicationCDC `tfsdk:"read_changes_using_write_ahead_log_cdc"`
+ ScanChangesWithUserDefinedCursor *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"scan_changes_with_user_defined_cursor"`
}
diff --git a/internal/provider/type_source_postgres_update_method1.go b/internal/provider/type_source_postgres_update_method1.go
deleted file mode 100755
index 1969f03f3..000000000
--- a/internal/provider/type_source_postgres_update_method1.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourcePostgresUpdateMethod1 struct {
- SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn `tfsdk:"source_postgres_update_method_detect_changes_with_xmin_system_column"`
- SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC1 `tfsdk:"source_postgres_update_method_read_changes_using_write_ahead_log_cdc"`
- SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_postgres_update_method_scan_changes_with_user_defined_cursor"`
- SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn `tfsdk:"source_postgres_update_update_method_detect_changes_with_xmin_system_column"`
- SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC1 `tfsdk:"source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc"`
- SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_postgres_update_update_method_scan_changes_with_user_defined_cursor"`
-}
diff --git a/internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go b/internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go
deleted file mode 100755
index 9a3b9914e..000000000
--- a/internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn struct {
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go b/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go
deleted file mode 100755
index 799c04180..000000000
--- a/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC struct {
- InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
- LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"`
- Method types.String `tfsdk:"method"`
- Plugin types.String `tfsdk:"plugin"`
- Publication types.String `tfsdk:"publication"`
- QueueSize types.Int64 `tfsdk:"queue_size"`
- ReplicationSlot types.String `tfsdk:"replication_slot"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go b/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go
deleted file mode 100755
index 4cf9424f8..000000000
--- a/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC1 struct {
- InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
- LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"`
- Method types.String `tfsdk:"method"`
- Plugin types.String `tfsdk:"plugin"`
- Publication types.String `tfsdk:"publication"`
- QueueSize types.Int64 `tfsdk:"queue_size"`
- ReplicationSlot types.String `tfsdk:"replication_slot"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go b/internal/provider/type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go
deleted file mode 100755
index c0bf69744..000000000
--- a/internal/provider/type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor struct {
- Method types.String `tfsdk:"method"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_allow.go b/internal/provider/type_source_postgres_update_ssl_modes_allow.go
deleted file mode 100755
index 304cf8f7c..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_allow.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesAllow struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_allow1.go b/internal/provider/type_source_postgres_update_ssl_modes_allow1.go
deleted file mode 100755
index 4c25eff2c..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_allow1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesAllow1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_disable.go b/internal/provider/type_source_postgres_update_ssl_modes_disable.go
deleted file mode 100755
index 296064fc2..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_disable.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesDisable struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_disable1.go b/internal/provider/type_source_postgres_update_ssl_modes_disable1.go
deleted file mode 100755
index 4268c423d..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_disable1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesDisable1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_prefer.go b/internal/provider/type_source_postgres_update_ssl_modes_prefer.go
deleted file mode 100755
index 309ac44ea..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_prefer.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesPrefer struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_prefer1.go b/internal/provider/type_source_postgres_update_ssl_modes_prefer1.go
deleted file mode 100755
index 2334f0233..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_prefer1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesPrefer1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_require.go b/internal/provider/type_source_postgres_update_ssl_modes_require.go
deleted file mode 100755
index 31f9d429a..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_require.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesRequire struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_require1.go b/internal/provider/type_source_postgres_update_ssl_modes_require1.go
deleted file mode 100755
index 54eb5c61c..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_require1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesRequire1 struct {
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_verify_ca.go b/internal/provider/type_source_postgres_update_ssl_modes_verify_ca.go
deleted file mode 100755
index 199331782..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_verify_ca.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesVerifyCa struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_verify_ca1.go b/internal/provider/type_source_postgres_update_ssl_modes_verify_ca1.go
deleted file mode 100755
index 23cac1753..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_verify_ca1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesVerifyCa1 struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_verify_full.go b/internal/provider/type_source_postgres_update_ssl_modes_verify_full.go
deleted file mode 100755
index b8e5d8d7e..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_verify_full.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesVerifyFull struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_ssl_modes_verify_full1.go b/internal/provider/type_source_postgres_update_ssl_modes_verify_full1.go
deleted file mode 100755
index 30d12641f..000000000
--- a/internal/provider/type_source_postgres_update_ssl_modes_verify_full1.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateSSLModesVerifyFull1 struct {
- CaCertificate types.String `tfsdk:"ca_certificate"`
- ClientCertificate types.String `tfsdk:"client_certificate"`
- ClientKey types.String `tfsdk:"client_key"`
- ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go b/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go
deleted file mode 100755
index b77b1e797..000000000
--- a/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC struct {
- InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
- LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"`
- Method types.String `tfsdk:"method"`
- Plugin types.String `tfsdk:"plugin"`
- Publication types.String `tfsdk:"publication"`
- QueueSize types.Int64 `tfsdk:"queue_size"`
- ReplicationSlot types.String `tfsdk:"replication_slot"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go b/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go
deleted file mode 100755
index af29faa57..000000000
--- a/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC1 struct {
- InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"`
- LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"`
- Method types.String `tfsdk:"method"`
- Plugin types.String `tfsdk:"plugin"`
- Publication types.String `tfsdk:"publication"`
- QueueSize types.Int64 `tfsdk:"queue_size"`
- ReplicationSlot types.String `tfsdk:"replication_slot"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_posthog.go b/internal/provider/type_source_posthog.go
old mode 100755
new mode 100644
index 826d569c4..b71834f7c
--- a/internal/provider/type_source_posthog.go
+++ b/internal/provider/type_source_posthog.go
@@ -8,6 +8,5 @@ type SourcePosthog struct {
APIKey types.String `tfsdk:"api_key"`
BaseURL types.String `tfsdk:"base_url"`
EventsTimeStep types.Int64 `tfsdk:"events_time_step"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_postmarkapp.go b/internal/provider/type_source_postmarkapp.go
old mode 100755
new mode 100644
index ee778df7d..91aafecde
--- a/internal/provider/type_source_postmarkapp.go
+++ b/internal/provider/type_source_postmarkapp.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourcePostmarkapp struct {
- SourceType types.String `tfsdk:"source_type"`
XPostmarkAccountToken types.String `tfsdk:"x_postmark_account_token"`
XPostmarkServerToken types.String `tfsdk:"x_postmark_server_token"`
}
diff --git a/internal/provider/type_source_prestashop.go b/internal/provider/type_source_prestashop.go
old mode 100755
new mode 100644
index eca853889..0420e40d6
--- a/internal/provider/type_source_prestashop.go
+++ b/internal/provider/type_source_prestashop.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourcePrestashop struct {
- AccessKey types.String `tfsdk:"access_key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- URL types.String `tfsdk:"url"`
+ AccessKey types.String `tfsdk:"access_key"`
+ StartDate types.String `tfsdk:"start_date"`
+ URL types.String `tfsdk:"url"`
}
diff --git a/internal/provider/type_source_punk_api.go b/internal/provider/type_source_punk_api.go
old mode 100755
new mode 100644
index c4e9e8380..56af5ff50
--- a/internal/provider/type_source_punk_api.go
+++ b/internal/provider/type_source_punk_api.go
@@ -8,5 +8,4 @@ type SourcePunkAPI struct {
BrewedAfter types.String `tfsdk:"brewed_after"`
BrewedBefore types.String `tfsdk:"brewed_before"`
ID types.String `tfsdk:"id"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_pypi.go b/internal/provider/type_source_pypi.go
old mode 100755
new mode 100644
index b17f4ca92..6b51932a4
--- a/internal/provider/type_source_pypi.go
+++ b/internal/provider/type_source_pypi.go
@@ -6,6 +6,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourcePypi struct {
ProjectName types.String `tfsdk:"project_name"`
- SourceType types.String `tfsdk:"source_type"`
Version types.String `tfsdk:"version"`
}
diff --git a/internal/provider/type_source_qualaroo.go b/internal/provider/type_source_qualaroo.go
old mode 100755
new mode 100644
index a8765e84f..f1651cf72
--- a/internal/provider/type_source_qualaroo.go
+++ b/internal/provider/type_source_qualaroo.go
@@ -5,9 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceQualaroo struct {
- Key types.String `tfsdk:"key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- SurveyIds []types.String `tfsdk:"survey_ids"`
- Token types.String `tfsdk:"token"`
+ Key types.String `tfsdk:"key"`
+ StartDate types.String `tfsdk:"start_date"`
+ SurveyIds []types.String `tfsdk:"survey_ids"`
+ Token types.String `tfsdk:"token"`
}
diff --git a/internal/provider/type_source_quickbooks.go b/internal/provider/type_source_quickbooks.go
old mode 100755
new mode 100644
index d1ea1208d..769dc46bd
--- a/internal/provider/type_source_quickbooks.go
+++ b/internal/provider/type_source_quickbooks.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceQuickbooks struct {
Credentials SourceQuickbooksAuthorizationMethod `tfsdk:"credentials"`
Sandbox types.Bool `tfsdk:"sandbox"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_quickbooks_authorization_method.go b/internal/provider/type_source_quickbooks_authorization_method.go
old mode 100755
new mode 100644
index f04f68fab..1657b14ba
--- a/internal/provider/type_source_quickbooks_authorization_method.go
+++ b/internal/provider/type_source_quickbooks_authorization_method.go
@@ -3,6 +3,5 @@
package provider
type SourceQuickbooksAuthorizationMethod struct {
- SourceQuickbooksAuthorizationMethodOAuth20 *SourceQuickbooksAuthorizationMethodOAuth20 `tfsdk:"source_quickbooks_authorization_method_o_auth2_0"`
- SourceQuickbooksUpdateAuthorizationMethodOAuth20 *SourceQuickbooksAuthorizationMethodOAuth20 `tfsdk:"source_quickbooks_update_authorization_method_o_auth2_0"`
+ OAuth20 *SourceQuickbooksOAuth20 `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_quickbooks_authorization_method_o_auth20.go b/internal/provider/type_source_quickbooks_o_auth20.go
old mode 100755
new mode 100644
similarity index 81%
rename from internal/provider/type_source_quickbooks_authorization_method_o_auth20.go
rename to internal/provider/type_source_quickbooks_o_auth20.go
index c98d13b47..d329199a2
--- a/internal/provider/type_source_quickbooks_authorization_method_o_auth20.go
+++ b/internal/provider/type_source_quickbooks_o_auth20.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceQuickbooksAuthorizationMethodOAuth20 struct {
+type SourceQuickbooksOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
RealmID types.String `tfsdk:"realm_id"`
diff --git a/internal/provider/type_source_railz.go b/internal/provider/type_source_railz.go
old mode 100755
new mode 100644
index 010aa5948..8097b9bea
--- a/internal/provider/type_source_railz.go
+++ b/internal/provider/type_source_railz.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceRailz struct {
- ClientID types.String `tfsdk:"client_id"`
- SecretKey types.String `tfsdk:"secret_key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ ClientID types.String `tfsdk:"client_id"`
+ SecretKey types.String `tfsdk:"secret_key"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_recharge.go b/internal/provider/type_source_recharge.go
old mode 100755
new mode 100644
index 896137b43..30e189a91
--- a/internal/provider/type_source_recharge.go
+++ b/internal/provider/type_source_recharge.go
@@ -6,6 +6,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceRecharge struct {
AccessToken types.String `tfsdk:"access_token"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_recreation.go b/internal/provider/type_source_recreation.go
old mode 100755
new mode 100644
index 333eb7753..026a8a44d
--- a/internal/provider/type_source_recreation.go
+++ b/internal/provider/type_source_recreation.go
@@ -7,5 +7,4 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceRecreation struct {
Apikey types.String `tfsdk:"apikey"`
QueryCampsites types.String `tfsdk:"query_campsites"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_recruitee.go b/internal/provider/type_source_recruitee.go
old mode 100755
new mode 100644
index 4b2a4255e..b268377cf
--- a/internal/provider/type_source_recruitee.go
+++ b/internal/provider/type_source_recruitee.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceRecruitee struct {
- APIKey types.String `tfsdk:"api_key"`
- CompanyID types.Int64 `tfsdk:"company_id"`
- SourceType types.String `tfsdk:"source_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ CompanyID types.Int64 `tfsdk:"company_id"`
}
diff --git a/internal/provider/type_source_recurly.go b/internal/provider/type_source_recurly.go
old mode 100755
new mode 100644
index ffefea3d6..1492d62d3
--- a/internal/provider/type_source_recurly.go
+++ b/internal/provider/type_source_recurly.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceRecurly struct {
- APIKey types.String `tfsdk:"api_key"`
- BeginTime types.String `tfsdk:"begin_time"`
- EndTime types.String `tfsdk:"end_time"`
- SourceType types.String `tfsdk:"source_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ BeginTime types.String `tfsdk:"begin_time"`
+ EndTime types.String `tfsdk:"end_time"`
}
diff --git a/internal/provider/type_source_redshift.go b/internal/provider/type_source_redshift.go
old mode 100755
new mode 100644
index 385f74000..0c501d691
--- a/internal/provider/type_source_redshift.go
+++ b/internal/provider/type_source_redshift.go
@@ -11,6 +11,5 @@ type SourceRedshift struct {
Password types.String `tfsdk:"password"`
Port types.Int64 `tfsdk:"port"`
Schemas []types.String `tfsdk:"schemas"`
- SourceType types.String `tfsdk:"source_type"`
Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_source_retently.go b/internal/provider/type_source_retently.go
old mode 100755
new mode 100644
index 8b27b4430..9a3e9bcb6
--- a/internal/provider/type_source_retently.go
+++ b/internal/provider/type_source_retently.go
@@ -2,9 +2,6 @@
package provider
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
type SourceRetently struct {
Credentials *SourceRetentlyAuthenticationMechanism `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_retently1.go b/internal/provider/type_source_retently1.go
deleted file mode 100755
index f48c2639c..000000000
--- a/internal/provider/type_source_retently1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceRetently1 struct {
- Credentials *SourceRetentlyAuthenticationMechanism1 `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_retently_authentication_mechanism.go b/internal/provider/type_source_retently_authentication_mechanism.go
old mode 100755
new mode 100644
index ac899f31c..cc6abf909
--- a/internal/provider/type_source_retently_authentication_mechanism.go
+++ b/internal/provider/type_source_retently_authentication_mechanism.go
@@ -3,8 +3,6 @@
package provider
type SourceRetentlyAuthenticationMechanism struct {
- SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth *SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth `tfsdk:"source_retently_authentication_mechanism_authenticate_via_retently_o_auth"`
- SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken *SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken `tfsdk:"source_retently_authentication_mechanism_authenticate_with_api_token"`
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth *SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth `tfsdk:"source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth"`
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken *SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken `tfsdk:"source_retently_update_authentication_mechanism_authenticate_with_api_token"`
+ AuthenticateViaRetentlyOAuth *AuthenticateViaHarvestOAuth `tfsdk:"authenticate_via_retently_o_auth"`
+ AuthenticateWithAPIToken *AuthenticateWithAPIToken `tfsdk:"authenticate_with_api_token"`
}
diff --git a/internal/provider/type_source_retently_authentication_mechanism1.go b/internal/provider/type_source_retently_authentication_mechanism1.go
deleted file mode 100755
index 424060ed1..000000000
--- a/internal/provider/type_source_retently_authentication_mechanism1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceRetentlyAuthenticationMechanism1 struct {
- SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth *SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth1 `tfsdk:"source_retently_authentication_mechanism_authenticate_via_retently_o_auth"`
- SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken *SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken1 `tfsdk:"source_retently_authentication_mechanism_authenticate_with_api_token"`
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth *SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth1 `tfsdk:"source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth"`
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken *SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken1 `tfsdk:"source_retently_update_authentication_mechanism_authenticate_with_api_token"`
-}
diff --git a/internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth.go b/internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth.go
deleted file mode 100755
index 36aee2c32..000000000
--- a/internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth1.go b/internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth1.go
deleted file mode 100755
index 7efbf00c2..000000000
--- a/internal/provider/type_source_retently_authentication_mechanism_authenticate_via_retently_o_auth1.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth1 struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token.go b/internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token.go
deleted file mode 100755
index c37091cd0..000000000
--- a/internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken struct {
- APIKey types.String `tfsdk:"api_key"`
- AuthType types.String `tfsdk:"auth_type"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token1.go b/internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token1.go
deleted file mode 100755
index 98ef48e54..000000000
--- a/internal/provider/type_source_retently_authentication_mechanism_authenticate_with_api_token1.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken1 struct {
- APIKey types.String `tfsdk:"api_key"`
- AuthType types.String `tfsdk:"auth_type"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth.go b/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth.go
deleted file mode 100755
index e022c60e1..000000000
--- a/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth1.go b/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth1.go
deleted file mode 100755
index 9b8db7122..000000000
--- a/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_via_retently_o_auth1.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth1 struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token.go b/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token.go
deleted file mode 100755
index e18b2b0df..000000000
--- a/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken struct {
- APIKey types.String `tfsdk:"api_key"`
- AuthType types.String `tfsdk:"auth_type"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token1.go b/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token1.go
deleted file mode 100755
index 76ff3e9d3..000000000
--- a/internal/provider/type_source_retently_update_authentication_mechanism_authenticate_with_api_token1.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken1 struct {
- APIKey types.String `tfsdk:"api_key"`
- AuthType types.String `tfsdk:"auth_type"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_rki_covid.go b/internal/provider/type_source_rki_covid.go
old mode 100755
new mode 100644
index b045c566f..328a8e7fa
--- a/internal/provider/type_source_rki_covid.go
+++ b/internal/provider/type_source_rki_covid.go
@@ -5,6 +5,5 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceRkiCovid struct {
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_rss.go b/internal/provider/type_source_rss.go
old mode 100755
new mode 100644
index dd6612753..099f52037
--- a/internal/provider/type_source_rss.go
+++ b/internal/provider/type_source_rss.go
@@ -5,6 +5,5 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceRss struct {
- SourceType types.String `tfsdk:"source_type"`
- URL types.String `tfsdk:"url"`
+ URL types.String `tfsdk:"url"`
}
diff --git a/internal/provider/type_source_s3.go b/internal/provider/type_source_s3.go
old mode 100755
new mode 100644
index f810d1468..cacf0cf76
--- a/internal/provider/type_source_s3.go
+++ b/internal/provider/type_source_s3.go
@@ -5,16 +5,15 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceS3 struct {
- AwsAccessKeyID types.String `tfsdk:"aws_access_key_id"`
- AwsSecretAccessKey types.String `tfsdk:"aws_secret_access_key"`
- Bucket types.String `tfsdk:"bucket"`
- Dataset types.String `tfsdk:"dataset"`
- Endpoint types.String `tfsdk:"endpoint"`
- Format *SourceS3FileFormat `tfsdk:"format"`
- PathPattern types.String `tfsdk:"path_pattern"`
- Provider *SourceS3S3AmazonWebServices `tfsdk:"provider"`
- Schema types.String `tfsdk:"schema"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- Streams []SourceS3FileBasedStreamConfig `tfsdk:"streams"`
+ AwsAccessKeyID types.String `tfsdk:"aws_access_key_id"`
+ AwsSecretAccessKey types.String `tfsdk:"aws_secret_access_key"`
+ Bucket types.String `tfsdk:"bucket"`
+ Dataset types.String `tfsdk:"dataset"`
+ Endpoint types.String `tfsdk:"endpoint"`
+ Format *SourceS3FileFormat `tfsdk:"format"`
+ PathPattern types.String `tfsdk:"path_pattern"`
+ Provider *SourceS3S3AmazonWebServices `tfsdk:"provider"`
+ Schema types.String `tfsdk:"schema"`
+ StartDate types.String `tfsdk:"start_date"`
+ Streams []FileBasedStreamConfig `tfsdk:"streams"`
}
diff --git a/internal/provider/type_source_s3_file_based_stream_config.go b/internal/provider/type_source_s3_file_based_stream_config.go
deleted file mode 100755
index 4bb775cd2..000000000
--- a/internal/provider/type_source_s3_file_based_stream_config.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceS3FileBasedStreamConfig struct {
- DaysToSyncIfHistoryIsFull types.Int64 `tfsdk:"days_to_sync_if_history_is_full"`
- FileType types.String `tfsdk:"file_type"`
- Format *SourceS3FileBasedStreamConfigFormat `tfsdk:"format"`
- Globs []types.String `tfsdk:"globs"`
- InputSchema types.String `tfsdk:"input_schema"`
- LegacyPrefix types.String `tfsdk:"legacy_prefix"`
- Name types.String `tfsdk:"name"`
- PrimaryKey types.String `tfsdk:"primary_key"`
- Schemaless types.Bool `tfsdk:"schemaless"`
- ValidationPolicy types.String `tfsdk:"validation_policy"`
-}
diff --git a/internal/provider/type_source_s3_file_based_stream_config_format.go b/internal/provider/type_source_s3_file_based_stream_config_format.go
deleted file mode 100755
index b92efa177..000000000
--- a/internal/provider/type_source_s3_file_based_stream_config_format.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceS3FileBasedStreamConfigFormat struct {
- SourceS3FileBasedStreamConfigFormatAvroFormat *SourceS3FileBasedStreamConfigFormatAvroFormat `tfsdk:"source_s3_file_based_stream_config_format_avro_format"`
- SourceS3FileBasedStreamConfigFormatCSVFormat *SourceS3FileBasedStreamConfigFormatCSVFormat `tfsdk:"source_s3_file_based_stream_config_format_csv_format"`
- SourceS3FileBasedStreamConfigFormatJsonlFormat *SourceS3FileBasedStreamConfigFormatJsonlFormat `tfsdk:"source_s3_file_based_stream_config_format_jsonl_format"`
- SourceS3FileBasedStreamConfigFormatParquetFormat *SourceS3FileBasedStreamConfigFormatParquetFormat `tfsdk:"source_s3_file_based_stream_config_format_parquet_format"`
- SourceS3UpdateFileBasedStreamConfigFormatAvroFormat *SourceS3FileBasedStreamConfigFormatAvroFormat `tfsdk:"source_s3_update_file_based_stream_config_format_avro_format"`
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormat *SourceS3UpdateFileBasedStreamConfigFormatCSVFormat `tfsdk:"source_s3_update_file_based_stream_config_format_csv_format"`
- SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat *SourceS3FileBasedStreamConfigFormatJsonlFormat `tfsdk:"source_s3_update_file_based_stream_config_format_jsonl_format"`
- SourceS3UpdateFileBasedStreamConfigFormatParquetFormat *SourceS3FileBasedStreamConfigFormatParquetFormat `tfsdk:"source_s3_update_file_based_stream_config_format_parquet_format"`
-}
diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go b/internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go
deleted file mode 100755
index b9170e6cb..000000000
--- a/internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceS3FileBasedStreamConfigFormatAvroFormat struct {
- DoubleAsString types.Bool `tfsdk:"double_as_string"`
- Filetype types.String `tfsdk:"filetype"`
-}
diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go
deleted file mode 100755
index f9bed06d3..000000000
--- a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition struct {
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated `tfsdk:"source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated"`
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV `tfsdk:"source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv"`
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided `tfsdk:"source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided"`
-}
diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go
deleted file mode 100755
index 79580fa10..000000000
--- a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated struct {
- HeaderDefinitionType types.String `tfsdk:"header_definition_type"`
-}
diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go
deleted file mode 100755
index e083f93bd..000000000
--- a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV struct {
- HeaderDefinitionType types.String `tfsdk:"header_definition_type"`
-}
diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go
deleted file mode 100755
index ace5d88b2..000000000
--- a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided struct {
- ColumnNames []types.String `tfsdk:"column_names"`
- HeaderDefinitionType types.String `tfsdk:"header_definition_type"`
-}
diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go b/internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go
deleted file mode 100755
index 41910f3b3..000000000
--- a/internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceS3FileBasedStreamConfigFormatJsonlFormat struct {
- Filetype types.String `tfsdk:"filetype"`
-}
diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go b/internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go
deleted file mode 100755
index 8553b9023..000000000
--- a/internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceS3FileBasedStreamConfigFormatParquetFormat struct {
- DecimalAsFloat types.Bool `tfsdk:"decimal_as_float"`
- Filetype types.String `tfsdk:"filetype"`
-}
diff --git a/internal/provider/type_source_s3_file_format.go b/internal/provider/type_source_s3_file_format.go
old mode 100755
new mode 100644
index fd445a332..6671c688c
--- a/internal/provider/type_source_s3_file_format.go
+++ b/internal/provider/type_source_s3_file_format.go
@@ -3,12 +3,8 @@
package provider
type SourceS3FileFormat struct {
- SourceS3FileFormatAvro *SourceS3FileFormatAvro `tfsdk:"source_s3_file_format_avro"`
- SourceS3FileFormatCSV *SourceS3FileFormatCSV `tfsdk:"source_s3_file_format_csv"`
- SourceS3FileFormatJsonl *SourceS3FileFormatJsonl `tfsdk:"source_s3_file_format_jsonl"`
- SourceS3FileFormatParquet *SourceS3FileFormatParquet `tfsdk:"source_s3_file_format_parquet"`
- SourceS3UpdateFileFormatAvro *SourceS3FileFormatAvro `tfsdk:"source_s3_update_file_format_avro"`
- SourceS3UpdateFileFormatCSV *SourceS3FileFormatCSV `tfsdk:"source_s3_update_file_format_csv"`
- SourceS3UpdateFileFormatJsonl *SourceS3FileFormatJsonl `tfsdk:"source_s3_update_file_format_jsonl"`
- SourceS3UpdateFileFormatParquet *SourceS3FileFormatParquet `tfsdk:"source_s3_update_file_format_parquet"`
+ Avro *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"avro"`
+ Csv *Csv `tfsdk:"csv"`
+ Jsonl *Jsonl `tfsdk:"jsonl"`
+ Parquet *Parquet `tfsdk:"parquet"`
}
diff --git a/internal/provider/type_source_s3_s3_amazon_web_services.go b/internal/provider/type_source_s3_s3_amazon_web_services.go
old mode 100755
new mode 100644
diff --git a/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go b/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go
deleted file mode 100755
index 00d6f4b8f..000000000
--- a/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go
+++ /dev/null
@@ -1,22 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormat struct {
- Delimiter types.String `tfsdk:"delimiter"`
- DoubleQuote types.Bool `tfsdk:"double_quote"`
- Encoding types.String `tfsdk:"encoding"`
- EscapeChar types.String `tfsdk:"escape_char"`
- FalseValues []types.String `tfsdk:"false_values"`
- Filetype types.String `tfsdk:"filetype"`
- HeaderDefinition *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition `tfsdk:"header_definition"`
- InferenceType types.String `tfsdk:"inference_type"`
- NullValues []types.String `tfsdk:"null_values"`
- QuoteChar types.String `tfsdk:"quote_char"`
- SkipRowsAfterHeader types.Int64 `tfsdk:"skip_rows_after_header"`
- SkipRowsBeforeHeader types.Int64 `tfsdk:"skip_rows_before_header"`
- StringsCanBeNull types.Bool `tfsdk:"strings_can_be_null"`
- TrueValues []types.String `tfsdk:"true_values"`
-}
diff --git a/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go b/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go
deleted file mode 100755
index bfe0c369c..000000000
--- a/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition struct {
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV `tfsdk:"source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv"`
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated `tfsdk:"source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated"`
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided `tfsdk:"source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided"`
-}
diff --git a/internal/provider/type_source_salesforce.go b/internal/provider/type_source_salesforce.go
old mode 100755
new mode 100644
index 479656581..6fd750c8e
--- a/internal/provider/type_source_salesforce.go
+++ b/internal/provider/type_source_salesforce.go
@@ -5,13 +5,11 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceSalesforce struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- ForceUseBulkAPI types.Bool `tfsdk:"force_use_bulk_api"`
- IsSandbox types.Bool `tfsdk:"is_sandbox"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- StreamsCriteria []SourceSalesforceStreamsCriteria `tfsdk:"streams_criteria"`
+ ClientID types.String `tfsdk:"client_id"`
+ ClientSecret types.String `tfsdk:"client_secret"`
+ ForceUseBulkAPI types.Bool `tfsdk:"force_use_bulk_api"`
+ IsSandbox types.Bool `tfsdk:"is_sandbox"`
+ RefreshToken types.String `tfsdk:"refresh_token"`
+ StartDate types.String `tfsdk:"start_date"`
+ StreamsCriteria []StreamsCriteria `tfsdk:"streams_criteria"`
}
diff --git a/internal/provider/type_source_salesloft.go b/internal/provider/type_source_salesloft.go
old mode 100755
new mode 100644
index 9e8fe53d8..f76e193b4
--- a/internal/provider/type_source_salesloft.go
+++ b/internal/provider/type_source_salesloft.go
@@ -6,6 +6,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceSalesloft struct {
Credentials SourceSalesloftCredentials `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_salesloft_credentials.go b/internal/provider/type_source_salesloft_credentials.go
old mode 100755
new mode 100644
index 2b57abb76..f7d8e8d69
--- a/internal/provider/type_source_salesloft_credentials.go
+++ b/internal/provider/type_source_salesloft_credentials.go
@@ -3,8 +3,6 @@
package provider
type SourceSalesloftCredentials struct {
- SourceSalesloftCredentialsAuthenticateViaAPIKey *SourceSalesloftCredentialsAuthenticateViaAPIKey `tfsdk:"source_salesloft_credentials_authenticate_via_api_key"`
- SourceSalesloftCredentialsAuthenticateViaOAuth *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_salesloft_credentials_authenticate_via_o_auth"`
- SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey *SourceSalesloftCredentialsAuthenticateViaAPIKey `tfsdk:"source_salesloft_update_credentials_authenticate_via_api_key"`
- SourceSalesloftUpdateCredentialsAuthenticateViaOAuth *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_salesloft_update_credentials_authenticate_via_o_auth"`
+ AuthenticateViaAPIKey *APIKeyAuth `tfsdk:"authenticate_via_api_key"`
+ AuthenticateViaOAuth *SourceGitlabOAuth20 `tfsdk:"authenticate_via_o_auth"`
}
diff --git a/internal/provider/type_source_salesloft_credentials_authenticate_via_api_key.go b/internal/provider/type_source_salesloft_credentials_authenticate_via_api_key.go
deleted file mode 100755
index 783211f77..000000000
--- a/internal/provider/type_source_salesloft_credentials_authenticate_via_api_key.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSalesloftCredentialsAuthenticateViaAPIKey struct {
- APIKey types.String `tfsdk:"api_key"`
- AuthType types.String `tfsdk:"auth_type"`
-}
diff --git a/internal/provider/type_source_sap_fieldglass.go b/internal/provider/type_source_sap_fieldglass.go
deleted file mode 100755
index caa35b039..000000000
--- a/internal/provider/type_source_sap_fieldglass.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSapFieldglass struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_secoda.go b/internal/provider/type_source_secoda.go
deleted file mode 100755
index 5e584e3e2..000000000
--- a/internal/provider/type_source_secoda.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSecoda struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_sendgrid.go b/internal/provider/type_source_sendgrid.go
old mode 100755
new mode 100644
index 5c735eae5..52418e621
--- a/internal/provider/type_source_sendgrid.go
+++ b/internal/provider/type_source_sendgrid.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceSendgrid struct {
- Apikey types.String `tfsdk:"apikey"`
- SourceType types.String `tfsdk:"source_type"`
- StartTime types.String `tfsdk:"start_time"`
+ Apikey types.String `tfsdk:"apikey"`
+ StartTime types.String `tfsdk:"start_time"`
}
diff --git a/internal/provider/type_source_sendinblue.go b/internal/provider/type_source_sendinblue.go
deleted file mode 100755
index 3fb0195ca..000000000
--- a/internal/provider/type_source_sendinblue.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSendinblue struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_senseforce.go b/internal/provider/type_source_senseforce.go
old mode 100755
new mode 100644
index 02e5d99ce..ea746bf71
--- a/internal/provider/type_source_senseforce.go
+++ b/internal/provider/type_source_senseforce.go
@@ -9,6 +9,5 @@ type SourceSenseforce struct {
BackendURL types.String `tfsdk:"backend_url"`
DatasetID types.String `tfsdk:"dataset_id"`
SliceRange types.Int64 `tfsdk:"slice_range"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_sentry.go b/internal/provider/type_source_sentry.go
old mode 100755
new mode 100644
index ee8bc15c7..f1a8a979e
--- a/internal/provider/type_source_sentry.go
+++ b/internal/provider/type_source_sentry.go
@@ -10,5 +10,4 @@ type SourceSentry struct {
Hostname types.String `tfsdk:"hostname"`
Organization types.String `tfsdk:"organization"`
Project types.String `tfsdk:"project"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_sftp.go b/internal/provider/type_source_sftp.go
old mode 100755
new mode 100644
index d9755a9b1..7e3c8199a
--- a/internal/provider/type_source_sftp.go
+++ b/internal/provider/type_source_sftp.go
@@ -11,6 +11,5 @@ type SourceSftp struct {
FolderPath types.String `tfsdk:"folder_path"`
Host types.String `tfsdk:"host"`
Port types.Int64 `tfsdk:"port"`
- SourceType types.String `tfsdk:"source_type"`
User types.String `tfsdk:"user"`
}
diff --git a/internal/provider/type_source_sftp_authentication_wildcard.go b/internal/provider/type_source_sftp_authentication_wildcard.go
old mode 100755
new mode 100644
index 13817c563..59f361867
--- a/internal/provider/type_source_sftp_authentication_wildcard.go
+++ b/internal/provider/type_source_sftp_authentication_wildcard.go
@@ -3,8 +3,6 @@
package provider
type SourceSftpAuthenticationWildcard struct {
- SourceSftpAuthenticationWildcardPasswordAuthentication *SourceSftpAuthenticationWildcardPasswordAuthentication `tfsdk:"source_sftp_authentication_wildcard_password_authentication"`
- SourceSftpAuthenticationWildcardSSHKeyAuthentication *SourceSftpAuthenticationWildcardSSHKeyAuthentication `tfsdk:"source_sftp_authentication_wildcard_ssh_key_authentication"`
- SourceSftpUpdateAuthenticationWildcardPasswordAuthentication *SourceSftpAuthenticationWildcardPasswordAuthentication `tfsdk:"source_sftp_update_authentication_wildcard_password_authentication"`
- SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication *SourceSftpAuthenticationWildcardSSHKeyAuthentication `tfsdk:"source_sftp_update_authentication_wildcard_ssh_key_authentication"`
+ PasswordAuthentication *SourceSftpPasswordAuthentication `tfsdk:"password_authentication"`
+ SSHKeyAuthentication *SourceSftpSSHKeyAuthentication `tfsdk:"ssh_key_authentication"`
}
diff --git a/internal/provider/type_source_sftp_bulk.go b/internal/provider/type_source_sftp_bulk.go
old mode 100755
new mode 100644
index 1936ce272..a0716520f
--- a/internal/provider/type_source_sftp_bulk.go
+++ b/internal/provider/type_source_sftp_bulk.go
@@ -14,7 +14,6 @@ type SourceSftpBulk struct {
Port types.Int64 `tfsdk:"port"`
PrivateKey types.String `tfsdk:"private_key"`
Separator types.String `tfsdk:"separator"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
StreamName types.String `tfsdk:"stream_name"`
Username types.String `tfsdk:"username"`
diff --git a/internal/provider/type_source_sftp_authentication_wildcard_password_authentication.go b/internal/provider/type_source_sftp_password_authentication.go
old mode 100755
new mode 100644
similarity index 64%
rename from internal/provider/type_source_sftp_authentication_wildcard_password_authentication.go
rename to internal/provider/type_source_sftp_password_authentication.go
index 120737475..577b8ec75
--- a/internal/provider/type_source_sftp_authentication_wildcard_password_authentication.go
+++ b/internal/provider/type_source_sftp_password_authentication.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceSftpAuthenticationWildcardPasswordAuthentication struct {
- AuthMethod types.String `tfsdk:"auth_method"`
+type SourceSftpPasswordAuthentication struct {
AuthUserPassword types.String `tfsdk:"auth_user_password"`
}
diff --git a/internal/provider/type_source_sftp_authentication_wildcard_ssh_key_authentication.go b/internal/provider/type_source_sftp_ssh_key_authentication.go
old mode 100755
new mode 100644
similarity index 64%
rename from internal/provider/type_source_sftp_authentication_wildcard_ssh_key_authentication.go
rename to internal/provider/type_source_sftp_ssh_key_authentication.go
index ab3fd940f..e34788c1a
--- a/internal/provider/type_source_sftp_authentication_wildcard_ssh_key_authentication.go
+++ b/internal/provider/type_source_sftp_ssh_key_authentication.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceSftpAuthenticationWildcardSSHKeyAuthentication struct {
- AuthMethod types.String `tfsdk:"auth_method"`
+type SourceSftpSSHKeyAuthentication struct {
AuthSSHKey types.String `tfsdk:"auth_ssh_key"`
}
diff --git a/internal/provider/type_source_shopify.go b/internal/provider/type_source_shopify.go
old mode 100755
new mode 100644
index 9aafa332b..648982a01
--- a/internal/provider/type_source_shopify.go
+++ b/internal/provider/type_source_shopify.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceShopify struct {
Credentials *SourceShopifyShopifyAuthorizationMethod `tfsdk:"credentials"`
Shop types.String `tfsdk:"shop"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_mailchimp_authentication_o_auth20.go b/internal/provider/type_source_shopify_o_auth20.go
old mode 100755
new mode 100644
similarity index 75%
rename from internal/provider/type_source_mailchimp_authentication_o_auth20.go
rename to internal/provider/type_source_shopify_o_auth20.go
index 88aa7634a..a4c82fce6
--- a/internal/provider/type_source_mailchimp_authentication_o_auth20.go
+++ b/internal/provider/type_source_shopify_o_auth20.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMailchimpAuthenticationOAuth20 struct {
+type SourceShopifyOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
}
diff --git a/internal/provider/type_source_shopify_shopify_authorization_method.go b/internal/provider/type_source_shopify_shopify_authorization_method.go
old mode 100755
new mode 100644
index be5e17bd2..2f734748f
--- a/internal/provider/type_source_shopify_shopify_authorization_method.go
+++ b/internal/provider/type_source_shopify_shopify_authorization_method.go
@@ -3,8 +3,6 @@
package provider
type SourceShopifyShopifyAuthorizationMethod struct {
- SourceShopifyShopifyAuthorizationMethodAPIPassword *SourceShopifyShopifyAuthorizationMethodAPIPassword `tfsdk:"source_shopify_shopify_authorization_method_api_password"`
- SourceShopifyShopifyAuthorizationMethodOAuth20 *SourceShopifyShopifyAuthorizationMethodOAuth20 `tfsdk:"source_shopify_shopify_authorization_method_o_auth2_0"`
- SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword *SourceShopifyShopifyAuthorizationMethodAPIPassword `tfsdk:"source_shopify_update_shopify_authorization_method_api_password"`
- SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 *SourceShopifyShopifyAuthorizationMethodOAuth20 `tfsdk:"source_shopify_update_shopify_authorization_method_o_auth2_0"`
+ APIPassword *APIPassword `tfsdk:"api_password"`
+ OAuth20 *SourceShopifyOAuth20 `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_shopify_shopify_authorization_method_api_password.go b/internal/provider/type_source_shopify_shopify_authorization_method_api_password.go
deleted file mode 100755
index d32dbe9b5..000000000
--- a/internal/provider/type_source_shopify_shopify_authorization_method_api_password.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceShopifyShopifyAuthorizationMethodAPIPassword struct {
- APIPassword types.String `tfsdk:"api_password"`
- AuthMethod types.String `tfsdk:"auth_method"`
-}
diff --git a/internal/provider/type_source_shopify_shopify_authorization_method_o_auth20.go b/internal/provider/type_source_shopify_shopify_authorization_method_o_auth20.go
deleted file mode 100755
index 113b31a99..000000000
--- a/internal/provider/type_source_shopify_shopify_authorization_method_o_auth20.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceShopifyShopifyAuthorizationMethodOAuth20 struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthMethod types.String `tfsdk:"auth_method"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
-}
diff --git a/internal/provider/type_source_shortio.go b/internal/provider/type_source_shortio.go
old mode 100755
new mode 100644
index a3377140c..ccb357853
--- a/internal/provider/type_source_shortio.go
+++ b/internal/provider/type_source_shortio.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceShortio struct {
- DomainID types.String `tfsdk:"domain_id"`
- SecretKey types.String `tfsdk:"secret_key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ DomainID types.String `tfsdk:"domain_id"`
+ SecretKey types.String `tfsdk:"secret_key"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_slack.go b/internal/provider/type_source_slack.go
old mode 100755
new mode 100644
index 2a837168e..c4ed434d9
--- a/internal/provider/type_source_slack.go
+++ b/internal/provider/type_source_slack.go
@@ -9,6 +9,5 @@ type SourceSlack struct {
Credentials *SourceSlackAuthenticationMechanism `tfsdk:"credentials"`
JoinChannels types.Bool `tfsdk:"join_channels"`
LookbackWindow types.Int64 `tfsdk:"lookback_window"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_slack_authentication_mechanism.go b/internal/provider/type_source_slack_authentication_mechanism.go
old mode 100755
new mode 100644
index 86caf4531..d3e0ec3cf
--- a/internal/provider/type_source_slack_authentication_mechanism.go
+++ b/internal/provider/type_source_slack_authentication_mechanism.go
@@ -3,8 +3,6 @@
package provider
type SourceSlackAuthenticationMechanism struct {
- SourceSlackAuthenticationMechanismAPIToken *SourceSlackAuthenticationMechanismAPIToken `tfsdk:"source_slack_authentication_mechanism_api_token"`
- SourceSlackAuthenticationMechanismSignInViaSlackOAuth *SourceSlackAuthenticationMechanismSignInViaSlackOAuth `tfsdk:"source_slack_authentication_mechanism_sign_in_via_slack_o_auth"`
- SourceSlackUpdateAuthenticationMechanismAPIToken *SourceSlackAuthenticationMechanismAPIToken `tfsdk:"source_slack_update_authentication_mechanism_api_token"`
- SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth *SourceSlackAuthenticationMechanismSignInViaSlackOAuth `tfsdk:"source_slack_update_authentication_mechanism_sign_in_via_slack_o_auth"`
+ APIToken *SourceK6Cloud `tfsdk:"api_token"`
+ SignInViaSlackOAuth *SourceNotionOAuth20 `tfsdk:"sign_in_via_slack_o_auth"`
}
diff --git a/internal/provider/type_source_slack_authentication_mechanism_api_token.go b/internal/provider/type_source_slack_authentication_mechanism_api_token.go
deleted file mode 100755
index 70b0fe099..000000000
--- a/internal/provider/type_source_slack_authentication_mechanism_api_token.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSlackAuthenticationMechanismAPIToken struct {
- APIToken types.String `tfsdk:"api_token"`
- OptionTitle types.String `tfsdk:"option_title"`
-}
diff --git a/internal/provider/type_source_slack_authentication_mechanism_sign_in_via_slack_o_auth.go b/internal/provider/type_source_slack_authentication_mechanism_sign_in_via_slack_o_auth.go
deleted file mode 100755
index cecf18f87..000000000
--- a/internal/provider/type_source_slack_authentication_mechanism_sign_in_via_slack_o_auth.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSlackAuthenticationMechanismSignInViaSlackOAuth struct {
- AccessToken types.String `tfsdk:"access_token"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- OptionTitle types.String `tfsdk:"option_title"`
-}
diff --git a/internal/provider/type_source_smaily.go b/internal/provider/type_source_smaily.go
old mode 100755
new mode 100644
index 36cdf373a..7db94f327
--- a/internal/provider/type_source_smaily.go
+++ b/internal/provider/type_source_smaily.go
@@ -8,5 +8,4 @@ type SourceSmaily struct {
APIPassword types.String `tfsdk:"api_password"`
APISubdomain types.String `tfsdk:"api_subdomain"`
APIUsername types.String `tfsdk:"api_username"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_smartengage.go b/internal/provider/type_source_smartengage.go
deleted file mode 100755
index 53cc5c24b..000000000
--- a/internal/provider/type_source_smartengage.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSmartengage struct {
- APIKey types.String `tfsdk:"api_key"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_smartsheets.go b/internal/provider/type_source_smartsheets.go
old mode 100755
new mode 100644
index a181611f1..6ff6f9bdb
--- a/internal/provider/type_source_smartsheets.go
+++ b/internal/provider/type_source_smartsheets.go
@@ -7,7 +7,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceSmartsheets struct {
Credentials SourceSmartsheetsAuthorizationMethod `tfsdk:"credentials"`
MetadataFields []types.String `tfsdk:"metadata_fields"`
- SourceType types.String `tfsdk:"source_type"`
SpreadsheetID types.String `tfsdk:"spreadsheet_id"`
StartDatetime types.String `tfsdk:"start_datetime"`
}
diff --git a/internal/provider/type_source_smartsheets_authorization_method.go b/internal/provider/type_source_smartsheets_authorization_method.go
old mode 100755
new mode 100644
index cc020ef37..d6cff1467
--- a/internal/provider/type_source_smartsheets_authorization_method.go
+++ b/internal/provider/type_source_smartsheets_authorization_method.go
@@ -3,8 +3,6 @@
package provider
type SourceSmartsheetsAuthorizationMethod struct {
- SourceSmartsheetsAuthorizationMethodAPIAccessToken *SourceGitlabAuthorizationMethodPrivateToken `tfsdk:"source_smartsheets_authorization_method_api_access_token"`
- SourceSmartsheetsAuthorizationMethodOAuth20 *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_smartsheets_authorization_method_o_auth2_0"`
- SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken *SourceGitlabAuthorizationMethodPrivateToken `tfsdk:"source_smartsheets_update_authorization_method_api_access_token"`
- SourceSmartsheetsUpdateAuthorizationMethodOAuth20 *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_smartsheets_update_authorization_method_o_auth2_0"`
+ APIAccessToken *OAuth2AccessToken `tfsdk:"api_access_token"`
+ OAuth20 *SourceGitlabOAuth20 `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_snapchat_marketing.go b/internal/provider/type_source_snapchat_marketing.go
old mode 100755
new mode 100644
index cd6ff2788..4abcd1699
--- a/internal/provider/type_source_snapchat_marketing.go
+++ b/internal/provider/type_source_snapchat_marketing.go
@@ -9,6 +9,5 @@ type SourceSnapchatMarketing struct {
ClientSecret types.String `tfsdk:"client_secret"`
EndDate types.String `tfsdk:"end_date"`
RefreshToken types.String `tfsdk:"refresh_token"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_snowflake.go b/internal/provider/type_source_snowflake.go
old mode 100755
new mode 100644
index 0814b5578..b386700c6
--- a/internal/provider/type_source_snowflake.go
+++ b/internal/provider/type_source_snowflake.go
@@ -11,6 +11,5 @@ type SourceSnowflake struct {
JdbcURLParams types.String `tfsdk:"jdbc_url_params"`
Role types.String `tfsdk:"role"`
Schema types.String `tfsdk:"schema"`
- SourceType types.String `tfsdk:"source_type"`
Warehouse types.String `tfsdk:"warehouse"`
}
diff --git a/internal/provider/type_source_snowflake_authorization_method.go b/internal/provider/type_source_snowflake_authorization_method.go
old mode 100755
new mode 100644
index 12abdc74c..3ad117c82
--- a/internal/provider/type_source_snowflake_authorization_method.go
+++ b/internal/provider/type_source_snowflake_authorization_method.go
@@ -3,8 +3,6 @@
package provider
type SourceSnowflakeAuthorizationMethod struct {
- SourceSnowflakeAuthorizationMethodOAuth20 *SourceSnowflakeAuthorizationMethodOAuth20 `tfsdk:"source_snowflake_authorization_method_o_auth2_0"`
- SourceSnowflakeAuthorizationMethodUsernameAndPassword *SourceSnowflakeAuthorizationMethodUsernameAndPassword `tfsdk:"source_snowflake_authorization_method_username_and_password"`
- SourceSnowflakeUpdateAuthorizationMethodOAuth20 *SourceSnowflakeAuthorizationMethodOAuth20 `tfsdk:"source_snowflake_update_authorization_method_o_auth2_0"`
- SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword *SourceSnowflakeAuthorizationMethodUsernameAndPassword `tfsdk:"source_snowflake_update_authorization_method_username_and_password"`
+ OAuth20 *SourceSnowflakeOAuth20 `tfsdk:"o_auth20"`
+ UsernameAndPassword *UsernamePassword `tfsdk:"username_and_password"`
}
diff --git a/internal/provider/type_source_snowflake_authorization_method_username_and_password.go b/internal/provider/type_source_snowflake_authorization_method_username_and_password.go
deleted file mode 100755
index 71d0bc17b..000000000
--- a/internal/provider/type_source_snowflake_authorization_method_username_and_password.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSnowflakeAuthorizationMethodUsernameAndPassword struct {
- AuthType types.String `tfsdk:"auth_type"`
- Password types.String `tfsdk:"password"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_destination_snowflake_authorization_method_o_auth20.go b/internal/provider/type_source_snowflake_o_auth20.go
old mode 100755
new mode 100644
similarity index 76%
rename from internal/provider/type_destination_snowflake_authorization_method_o_auth20.go
rename to internal/provider/type_source_snowflake_o_auth20.go
index f57da9734..9d4379164
--- a/internal/provider/type_destination_snowflake_authorization_method_o_auth20.go
+++ b/internal/provider/type_source_snowflake_o_auth20.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationSnowflakeAuthorizationMethodOAuth20 struct {
+type SourceSnowflakeOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
RefreshToken types.String `tfsdk:"refresh_token"`
diff --git a/internal/provider/type_source_sonar_cloud.go b/internal/provider/type_source_sonar_cloud.go
old mode 100755
new mode 100644
index 220497869..e2aebae12
--- a/internal/provider/type_source_sonar_cloud.go
+++ b/internal/provider/type_source_sonar_cloud.go
@@ -8,7 +8,6 @@ type SourceSonarCloud struct {
ComponentKeys []types.String `tfsdk:"component_keys"`
EndDate types.String `tfsdk:"end_date"`
Organization types.String `tfsdk:"organization"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
UserToken types.String `tfsdk:"user_token"`
}
diff --git a/internal/provider/type_source_spacex_api.go b/internal/provider/type_source_spacex_api.go
old mode 100755
new mode 100644
index f4ac31a49..d54bbcdb6
--- a/internal/provider/type_source_spacex_api.go
+++ b/internal/provider/type_source_spacex_api.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceSpacexAPI struct {
- ID types.String `tfsdk:"id"`
- Options types.String `tfsdk:"options"`
- SourceType types.String `tfsdk:"source_type"`
+ ID types.String `tfsdk:"id"`
+ Options types.String `tfsdk:"options"`
}
diff --git a/internal/provider/type_source_square.go b/internal/provider/type_source_square.go
old mode 100755
new mode 100644
index 222a295ea..611ade775
--- a/internal/provider/type_source_square.go
+++ b/internal/provider/type_source_square.go
@@ -8,6 +8,5 @@ type SourceSquare struct {
Credentials *SourceSquareAuthentication `tfsdk:"credentials"`
IncludeDeletedObjects types.Bool `tfsdk:"include_deleted_objects"`
IsSandbox types.Bool `tfsdk:"is_sandbox"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_square_authentication.go b/internal/provider/type_source_square_authentication.go
old mode 100755
new mode 100644
index 3824eaf5c..c77102032
--- a/internal/provider/type_source_square_authentication.go
+++ b/internal/provider/type_source_square_authentication.go
@@ -3,8 +3,6 @@
package provider
type SourceSquareAuthentication struct {
- SourceSquareAuthenticationAPIKey *SourceSquareAuthenticationAPIKey `tfsdk:"source_square_authentication_api_key"`
- SourceSquareAuthenticationOauthAuthentication *SourceSquareAuthenticationOauthAuthentication `tfsdk:"source_square_authentication_oauth_authentication"`
- SourceSquareUpdateAuthenticationAPIKey *SourceSquareAuthenticationAPIKey `tfsdk:"source_square_update_authentication_api_key"`
- SourceSquareUpdateAuthenticationOauthAuthentication *SourceSquareAuthenticationOauthAuthentication `tfsdk:"source_square_update_authentication_oauth_authentication"`
+ APIKey *APIKeyAuth `tfsdk:"api_key"`
+ OauthAuthentication *DestinationGoogleSheetsAuthenticationViaGoogleOAuth `tfsdk:"oauth_authentication"`
}
diff --git a/internal/provider/type_source_square_authentication_api_key.go b/internal/provider/type_source_square_authentication_api_key.go
deleted file mode 100755
index 793b76a10..000000000
--- a/internal/provider/type_source_square_authentication_api_key.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSquareAuthenticationAPIKey struct {
- APIKey types.String `tfsdk:"api_key"`
- AuthType types.String `tfsdk:"auth_type"`
-}
diff --git a/internal/provider/type_source_square_authentication_oauth_authentication.go b/internal/provider/type_source_square_authentication_oauth_authentication.go
deleted file mode 100755
index cd6e7a9ab..000000000
--- a/internal/provider/type_source_square_authentication_oauth_authentication.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSquareAuthenticationOauthAuthentication struct {
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
-}
diff --git a/internal/provider/type_source_strava.go b/internal/provider/type_source_strava.go
old mode 100755
new mode 100644
index 1fcad8656..430699f52
--- a/internal/provider/type_source_strava.go
+++ b/internal/provider/type_source_strava.go
@@ -6,10 +6,8 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceStrava struct {
AthleteID types.Int64 `tfsdk:"athlete_id"`
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
RefreshToken types.String `tfsdk:"refresh_token"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_stripe.go b/internal/provider/type_source_stripe.go
old mode 100755
new mode 100644
index 440e3d3a0..6761631fd
--- a/internal/provider/type_source_stripe.go
+++ b/internal/provider/type_source_stripe.go
@@ -6,9 +6,10 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceStripe struct {
AccountID types.String `tfsdk:"account_id"`
+ CallRateLimit types.Int64 `tfsdk:"call_rate_limit"`
ClientSecret types.String `tfsdk:"client_secret"`
LookbackWindowDays types.Int64 `tfsdk:"lookback_window_days"`
+ NumWorkers types.Int64 `tfsdk:"num_workers"`
SliceRange types.Int64 `tfsdk:"slice_range"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_survey_sparrow.go b/internal/provider/type_source_survey_sparrow.go
old mode 100755
new mode 100644
index 11158a251..aac9bebe2
--- a/internal/provider/type_source_survey_sparrow.go
+++ b/internal/provider/type_source_survey_sparrow.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceSurveySparrow struct {
AccessToken types.String `tfsdk:"access_token"`
Region *SourceSurveySparrowBaseURL `tfsdk:"region"`
- SourceType types.String `tfsdk:"source_type"`
SurveyID []types.String `tfsdk:"survey_id"`
}
diff --git a/internal/provider/type_source_survey_sparrow_base_url.go b/internal/provider/type_source_survey_sparrow_base_url.go
old mode 100755
new mode 100644
index 7329ebdd4..68be1ba81
--- a/internal/provider/type_source_survey_sparrow_base_url.go
+++ b/internal/provider/type_source_survey_sparrow_base_url.go
@@ -3,8 +3,6 @@
package provider
type SourceSurveySparrowBaseURL struct {
- SourceSurveySparrowBaseURLEUBasedAccount *SourceSurveySparrowBaseURLEUBasedAccount `tfsdk:"source_survey_sparrow_base_url_eu_based_account"`
- SourceSurveySparrowBaseURLGlobalAccount *SourceSurveySparrowBaseURLGlobalAccount `tfsdk:"source_survey_sparrow_base_url_global_account"`
- SourceSurveySparrowUpdateBaseURLEUBasedAccount *SourceSurveySparrowBaseURLEUBasedAccount `tfsdk:"source_survey_sparrow_update_base_url_eu_based_account"`
- SourceSurveySparrowUpdateBaseURLGlobalAccount *SourceSurveySparrowBaseURLGlobalAccount `tfsdk:"source_survey_sparrow_update_base_url_global_account"`
+ EUBasedAccount *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"eu_based_account"`
+ GlobalAccount *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON `tfsdk:"global_account"`
}
diff --git a/internal/provider/type_source_survey_sparrow_base_url_global_account.go b/internal/provider/type_source_survey_sparrow_base_url_global_account.go
deleted file mode 100755
index a4359b7a2..000000000
--- a/internal/provider/type_source_survey_sparrow_base_url_global_account.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSurveySparrowBaseURLGlobalAccount struct {
- URLBase types.String `tfsdk:"url_base"`
-}
diff --git a/internal/provider/type_source_survey_sparrow_base_urleu_based_account.go b/internal/provider/type_source_survey_sparrow_base_urleu_based_account.go
deleted file mode 100755
index c981a8496..000000000
--- a/internal/provider/type_source_survey_sparrow_base_urleu_based_account.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSurveySparrowBaseURLEUBasedAccount struct {
- URLBase types.String `tfsdk:"url_base"`
-}
diff --git a/internal/provider/type_source_surveymonkey.go b/internal/provider/type_source_surveymonkey.go
old mode 100755
new mode 100644
index 8be319f5a..c42a4cb08
--- a/internal/provider/type_source_surveymonkey.go
+++ b/internal/provider/type_source_surveymonkey.go
@@ -5,9 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceSurveymonkey struct {
- Credentials *SourceSurveymonkeySurveyMonkeyAuthorizationMethod `tfsdk:"credentials"`
- Origin types.String `tfsdk:"origin"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- SurveyIds []types.String `tfsdk:"survey_ids"`
+ Credentials *OAuth `tfsdk:"credentials"`
+ Origin types.String `tfsdk:"origin"`
+ StartDate types.String `tfsdk:"start_date"`
+ SurveyIds []types.String `tfsdk:"survey_ids"`
}
diff --git a/internal/provider/type_source_surveymonkey_survey_monkey_authorization_method.go b/internal/provider/type_source_surveymonkey_survey_monkey_authorization_method.go
deleted file mode 100755
index 4d4c18cd2..000000000
--- a/internal/provider/type_source_surveymonkey_survey_monkey_authorization_method.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceSurveymonkeySurveyMonkeyAuthorizationMethod struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthMethod types.String `tfsdk:"auth_method"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
-}
diff --git a/internal/provider/type_source_tempo.go b/internal/provider/type_source_tempo.go
deleted file mode 100755
index 4cfa73697..000000000
--- a/internal/provider/type_source_tempo.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceTempo struct {
- APIToken types.String `tfsdk:"api_token"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_the_guardian_api.go b/internal/provider/type_source_the_guardian_api.go
old mode 100755
new mode 100644
index 54ddf0972..351f6bba1
--- a/internal/provider/type_source_the_guardian_api.go
+++ b/internal/provider/type_source_the_guardian_api.go
@@ -5,11 +5,10 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceTheGuardianAPI struct {
- APIKey types.String `tfsdk:"api_key"`
- EndDate types.String `tfsdk:"end_date"`
- Query types.String `tfsdk:"query"`
- Section types.String `tfsdk:"section"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- Tag types.String `tfsdk:"tag"`
+ APIKey types.String `tfsdk:"api_key"`
+ EndDate types.String `tfsdk:"end_date"`
+ Query types.String `tfsdk:"query"`
+ Section types.String `tfsdk:"section"`
+ StartDate types.String `tfsdk:"start_date"`
+ Tag types.String `tfsdk:"tag"`
}
diff --git a/internal/provider/type_source_tiktok_marketing.go b/internal/provider/type_source_tiktok_marketing.go
old mode 100755
new mode 100644
index fcd3d740b..1dcc91f0a
--- a/internal/provider/type_source_tiktok_marketing.go
+++ b/internal/provider/type_source_tiktok_marketing.go
@@ -9,6 +9,5 @@ type SourceTiktokMarketing struct {
Credentials *SourceTiktokMarketingAuthenticationMethod `tfsdk:"credentials"`
EndDate types.String `tfsdk:"end_date"`
IncludeDeleted types.Bool `tfsdk:"include_deleted"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_tiktok_marketing_authentication_method.go b/internal/provider/type_source_tiktok_marketing_authentication_method.go
old mode 100755
new mode 100644
index e61b9ff2f..03de5b547
--- a/internal/provider/type_source_tiktok_marketing_authentication_method.go
+++ b/internal/provider/type_source_tiktok_marketing_authentication_method.go
@@ -3,8 +3,6 @@
package provider
type SourceTiktokMarketingAuthenticationMethod struct {
- SourceTiktokMarketingAuthenticationMethodOAuth20 *SourceTiktokMarketingAuthenticationMethodOAuth20 `tfsdk:"source_tiktok_marketing_authentication_method_o_auth2_0"`
- SourceTiktokMarketingAuthenticationMethodSandboxAccessToken *SourceTiktokMarketingAuthenticationMethodSandboxAccessToken `tfsdk:"source_tiktok_marketing_authentication_method_sandbox_access_token"`
- SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 *SourceTiktokMarketingAuthenticationMethodOAuth20 `tfsdk:"source_tiktok_marketing_update_authentication_method_o_auth2_0"`
- SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken *SourceTiktokMarketingAuthenticationMethodSandboxAccessToken `tfsdk:"source_tiktok_marketing_update_authentication_method_sandbox_access_token"`
+ OAuth20 *SourceTiktokMarketingOAuth20 `tfsdk:"o_auth20"`
+ SandboxAccessToken *SandboxAccessToken `tfsdk:"sandbox_access_token"`
}
diff --git a/internal/provider/type_source_tiktok_marketing_authentication_method_o_auth20.go b/internal/provider/type_source_tiktok_marketing_o_auth20.go
old mode 100755
new mode 100644
similarity index 75%
rename from internal/provider/type_source_tiktok_marketing_authentication_method_o_auth20.go
rename to internal/provider/type_source_tiktok_marketing_o_auth20.go
index 486f6deb8..10f6515b5
--- a/internal/provider/type_source_tiktok_marketing_authentication_method_o_auth20.go
+++ b/internal/provider/type_source_tiktok_marketing_o_auth20.go
@@ -4,10 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceTiktokMarketingAuthenticationMethodOAuth20 struct {
+type SourceTiktokMarketingOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
AdvertiserID types.String `tfsdk:"advertiser_id"`
AppID types.String `tfsdk:"app_id"`
- AuthType types.String `tfsdk:"auth_type"`
Secret types.String `tfsdk:"secret"`
}
diff --git a/internal/provider/type_source_todoist.go b/internal/provider/type_source_todoist.go
deleted file mode 100755
index d9fc1b2bf..000000000
--- a/internal/provider/type_source_todoist.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceTodoist struct {
- SourceType types.String `tfsdk:"source_type"`
- Token types.String `tfsdk:"token"`
-}
diff --git a/internal/provider/type_source_trello.go b/internal/provider/type_source_trello.go
old mode 100755
new mode 100644
index 903564225..01b81f40b
--- a/internal/provider/type_source_trello.go
+++ b/internal/provider/type_source_trello.go
@@ -5,9 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceTrello struct {
- BoardIds []types.String `tfsdk:"board_ids"`
- Key types.String `tfsdk:"key"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- Token types.String `tfsdk:"token"`
+ BoardIds []types.String `tfsdk:"board_ids"`
+ Key types.String `tfsdk:"key"`
+ StartDate types.String `tfsdk:"start_date"`
+ Token types.String `tfsdk:"token"`
}
diff --git a/internal/provider/type_source_trustpilot.go b/internal/provider/type_source_trustpilot.go
old mode 100755
new mode 100644
index fd0b90d2d..d783dcb15
--- a/internal/provider/type_source_trustpilot.go
+++ b/internal/provider/type_source_trustpilot.go
@@ -7,6 +7,5 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceTrustpilot struct {
BusinessUnits []types.String `tfsdk:"business_units"`
Credentials SourceTrustpilotAuthorizationMethod `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_trustpilot_authorization_method_api_key.go b/internal/provider/type_source_trustpilot_api_key.go
old mode 100755
new mode 100644
similarity index 66%
rename from internal/provider/type_source_trustpilot_authorization_method_api_key.go
rename to internal/provider/type_source_trustpilot_api_key.go
index 0647d67fe..1d6235037
--- a/internal/provider/type_source_trustpilot_authorization_method_api_key.go
+++ b/internal/provider/type_source_trustpilot_api_key.go
@@ -4,7 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceTrustpilotAuthorizationMethodAPIKey struct {
- AuthType types.String `tfsdk:"auth_type"`
+type SourceTrustpilotAPIKey struct {
ClientID types.String `tfsdk:"client_id"`
}
diff --git a/internal/provider/type_source_trustpilot_authorization_method.go b/internal/provider/type_source_trustpilot_authorization_method.go
old mode 100755
new mode 100644
index 2a20211f6..dca6bec89
--- a/internal/provider/type_source_trustpilot_authorization_method.go
+++ b/internal/provider/type_source_trustpilot_authorization_method.go
@@ -3,8 +3,6 @@
package provider
type SourceTrustpilotAuthorizationMethod struct {
- SourceTrustpilotAuthorizationMethodAPIKey *SourceTrustpilotAuthorizationMethodAPIKey `tfsdk:"source_trustpilot_authorization_method_api_key"`
- SourceTrustpilotAuthorizationMethodOAuth20 *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_trustpilot_authorization_method_o_auth_2_0"`
- SourceTrustpilotUpdateAuthorizationMethodAPIKey *SourceTrustpilotAuthorizationMethodAPIKey `tfsdk:"source_trustpilot_update_authorization_method_api_key"`
- SourceTrustpilotUpdateAuthorizationMethodOAuth20 *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_trustpilot_update_authorization_method_o_auth_2_0"`
+ APIKey *SourceTrustpilotAPIKey `tfsdk:"api_key"`
+ OAuth20 *SourceGitlabOAuth20 `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_tvmaze_schedule.go b/internal/provider/type_source_tvmaze_schedule.go
old mode 100755
new mode 100644
index 70eb1bb0f..fa6d9e3cd
--- a/internal/provider/type_source_tvmaze_schedule.go
+++ b/internal/provider/type_source_tvmaze_schedule.go
@@ -7,7 +7,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceTvmazeSchedule struct {
DomesticScheduleCountryCode types.String `tfsdk:"domestic_schedule_country_code"`
EndDate types.String `tfsdk:"end_date"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
WebScheduleCountryCode types.String `tfsdk:"web_schedule_country_code"`
}
diff --git a/internal/provider/type_source_twilio.go b/internal/provider/type_source_twilio.go
old mode 100755
new mode 100644
index 6e2463d70..c6cc6276a
--- a/internal/provider/type_source_twilio.go
+++ b/internal/provider/type_source_twilio.go
@@ -8,6 +8,5 @@ type SourceTwilio struct {
AccountSid types.String `tfsdk:"account_sid"`
AuthToken types.String `tfsdk:"auth_token"`
LookbackWindow types.Int64 `tfsdk:"lookback_window"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_twilio_taskrouter.go b/internal/provider/type_source_twilio_taskrouter.go
old mode 100755
new mode 100644
index 706e05a14..e8c37ffe5
--- a/internal/provider/type_source_twilio_taskrouter.go
+++ b/internal/provider/type_source_twilio_taskrouter.go
@@ -7,5 +7,4 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceTwilioTaskrouter struct {
AccountSid types.String `tfsdk:"account_sid"`
AuthToken types.String `tfsdk:"auth_token"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_twitter.go b/internal/provider/type_source_twitter.go
old mode 100755
new mode 100644
index b782e1451..ded3d63a2
--- a/internal/provider/type_source_twitter.go
+++ b/internal/provider/type_source_twitter.go
@@ -5,9 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceTwitter struct {
- APIKey types.String `tfsdk:"api_key"`
- EndDate types.String `tfsdk:"end_date"`
- Query types.String `tfsdk:"query"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIKey types.String `tfsdk:"api_key"`
+ EndDate types.String `tfsdk:"end_date"`
+ Query types.String `tfsdk:"query"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_typeform.go b/internal/provider/type_source_typeform.go
old mode 100755
new mode 100644
index 75ed6a447..0de557df4
--- a/internal/provider/type_source_typeform.go
+++ b/internal/provider/type_source_typeform.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceTypeform struct {
- Credentials SourceTypeformAuthorizationMethod `tfsdk:"credentials"`
- FormIds []types.String `tfsdk:"form_ids"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ Credentials SourceGitlabAuthorizationMethod `tfsdk:"credentials"`
+ FormIds []types.String `tfsdk:"form_ids"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_typeform_authorization_method.go b/internal/provider/type_source_typeform_authorization_method.go
deleted file mode 100755
index 2fe6bafda..000000000
--- a/internal/provider/type_source_typeform_authorization_method.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceTypeformAuthorizationMethod struct {
- SourceTypeformAuthorizationMethodOAuth20 *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_typeform_authorization_method_o_auth2_0"`
- SourceTypeformAuthorizationMethodPrivateToken *SourceGitlabAuthorizationMethodPrivateToken `tfsdk:"source_typeform_authorization_method_private_token"`
- SourceTypeformUpdateAuthorizationMethodOAuth20 *SourceGitlabAuthorizationMethodOAuth20 `tfsdk:"source_typeform_update_authorization_method_o_auth2_0"`
- SourceTypeformUpdateAuthorizationMethodPrivateToken *SourceGitlabAuthorizationMethodPrivateToken `tfsdk:"source_typeform_update_authorization_method_private_token"`
-}
diff --git a/internal/provider/type_source_us_census.go b/internal/provider/type_source_us_census.go
old mode 100755
new mode 100644
index f2e5a70a9..58020a9d9
--- a/internal/provider/type_source_us_census.go
+++ b/internal/provider/type_source_us_census.go
@@ -8,5 +8,4 @@ type SourceUsCensus struct {
APIKey types.String `tfsdk:"api_key"`
QueryParams types.String `tfsdk:"query_params"`
QueryPath types.String `tfsdk:"query_path"`
- SourceType types.String `tfsdk:"source_type"`
}
diff --git a/internal/provider/type_source_webflow.go b/internal/provider/type_source_webflow.go
old mode 100755
new mode 100644
index 5cfa929be..998df8e44
--- a/internal/provider/type_source_webflow.go
+++ b/internal/provider/type_source_webflow.go
@@ -5,7 +5,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceWebflow struct {
- APIKey types.String `tfsdk:"api_key"`
- SiteID types.String `tfsdk:"site_id"`
- SourceType types.String `tfsdk:"source_type"`
+ APIKey types.String `tfsdk:"api_key"`
+ SiteID types.String `tfsdk:"site_id"`
}
diff --git a/internal/provider/type_source_whisky_hunter.go b/internal/provider/type_source_whisky_hunter.go
deleted file mode 100755
index 58cf1628b..000000000
--- a/internal/provider/type_source_whisky_hunter.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceWhiskyHunter struct {
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_wikipedia_pageviews.go b/internal/provider/type_source_wikipedia_pageviews.go
old mode 100755
new mode 100644
index 1bc5bc7e8..2526017bb
--- a/internal/provider/type_source_wikipedia_pageviews.go
+++ b/internal/provider/type_source_wikipedia_pageviews.go
@@ -5,12 +5,11 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceWikipediaPageviews struct {
- Access types.String `tfsdk:"access"`
- Agent types.String `tfsdk:"agent"`
- Article types.String `tfsdk:"article"`
- Country types.String `tfsdk:"country"`
- End types.String `tfsdk:"end"`
- Project types.String `tfsdk:"project"`
- SourceType types.String `tfsdk:"source_type"`
- Start types.String `tfsdk:"start"`
+ Access types.String `tfsdk:"access"`
+ Agent types.String `tfsdk:"agent"`
+ Article types.String `tfsdk:"article"`
+ Country types.String `tfsdk:"country"`
+ End types.String `tfsdk:"end"`
+ Project types.String `tfsdk:"project"`
+ Start types.String `tfsdk:"start"`
}
diff --git a/internal/provider/type_source_woocommerce.go b/internal/provider/type_source_woocommerce.go
old mode 100755
new mode 100644
index bf893416f..d63637b5d
--- a/internal/provider/type_source_woocommerce.go
+++ b/internal/provider/type_source_woocommerce.go
@@ -5,9 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceWoocommerce struct {
- APIKey types.String `tfsdk:"api_key"`
- APISecret types.String `tfsdk:"api_secret"`
- Shop types.String `tfsdk:"shop"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ APIKey types.String `tfsdk:"api_key"`
+ APISecret types.String `tfsdk:"api_secret"`
+ Shop types.String `tfsdk:"shop"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_xero.go b/internal/provider/type_source_xero.go
deleted file mode 100755
index 7cdbeaf95..000000000
--- a/internal/provider/type_source_xero.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceXero struct {
- Authentication SourceXeroAuthenticateViaXeroOAuth `tfsdk:"authentication"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- TenantID types.String `tfsdk:"tenant_id"`
-}
diff --git a/internal/provider/type_source_yandex_metrica.go b/internal/provider/type_source_yandex_metrica.go
old mode 100755
new mode 100644
index de46f0f47..1b4134aa5
--- a/internal/provider/type_source_yandex_metrica.go
+++ b/internal/provider/type_source_yandex_metrica.go
@@ -5,9 +5,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceYandexMetrica struct {
- AuthToken types.String `tfsdk:"auth_token"`
- CounterID types.String `tfsdk:"counter_id"`
- EndDate types.String `tfsdk:"end_date"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
+ AuthToken types.String `tfsdk:"auth_token"`
+ CounterID types.String `tfsdk:"counter_id"`
+ EndDate types.String `tfsdk:"end_date"`
+ StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_yotpo.go b/internal/provider/type_source_yotpo.go
old mode 100755
new mode 100644
index 531e2dbc9..49e8c77f7
--- a/internal/provider/type_source_yotpo.go
+++ b/internal/provider/type_source_yotpo.go
@@ -8,6 +8,5 @@ type SourceYotpo struct {
AccessToken types.String `tfsdk:"access_token"`
AppKey types.String `tfsdk:"app_key"`
Email types.String `tfsdk:"email"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
}
diff --git a/internal/provider/type_source_younium.go b/internal/provider/type_source_younium.go
deleted file mode 100755
index 5b63b2243..000000000
--- a/internal/provider/type_source_younium.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceYounium struct {
- LegalEntity types.String `tfsdk:"legal_entity"`
- Password types.String `tfsdk:"password"`
- Playground types.Bool `tfsdk:"playground"`
- SourceType types.String `tfsdk:"source_type"`
- Username types.String `tfsdk:"username"`
-}
diff --git a/internal/provider/type_source_youtube_analytics.go b/internal/provider/type_source_youtube_analytics.go
old mode 100755
new mode 100644
index bdd2d7f5f..dcf2afaa2
--- a/internal/provider/type_source_youtube_analytics.go
+++ b/internal/provider/type_source_youtube_analytics.go
@@ -2,9 +2,6 @@
package provider
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
type SourceYoutubeAnalytics struct {
- Credentials SourceYoutubeAnalyticsAuthenticateViaOAuth20 `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
+ Credentials AuthenticateViaHarvestOAuth `tfsdk:"credentials"`
}
diff --git a/internal/provider/type_source_youtube_analytics1.go b/internal/provider/type_source_youtube_analytics1.go
deleted file mode 100755
index 8d4a7ac91..000000000
--- a/internal/provider/type_source_youtube_analytics1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceYoutubeAnalytics1 struct {
- Credentials SourceYoutubeAnalyticsAuthenticateViaOAuth201 `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
-}
diff --git a/internal/provider/type_source_youtube_analytics_authenticate_via_o_auth201.go b/internal/provider/type_source_youtube_analytics_authenticate_via_o_auth201.go
deleted file mode 100755
index c0bfe8d54..000000000
--- a/internal/provider/type_source_youtube_analytics_authenticate_via_o_auth201.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceYoutubeAnalyticsAuthenticateViaOAuth201 struct {
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- RefreshToken types.String `tfsdk:"refresh_token"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_chat.go b/internal/provider/type_source_zendesk_chat.go
old mode 100755
new mode 100644
index e1a25cb5e..95e0d22c5
--- a/internal/provider/type_source_zendesk_chat.go
+++ b/internal/provider/type_source_zendesk_chat.go
@@ -6,7 +6,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceZendeskChat struct {
Credentials *SourceZendeskChatAuthorizationMethod `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
Subdomain types.String `tfsdk:"subdomain"`
}
diff --git a/internal/provider/type_source_zendesk_chat_authorization_method.go b/internal/provider/type_source_zendesk_chat_authorization_method.go
old mode 100755
new mode 100644
index f2e882dcc..5a25a10c2
--- a/internal/provider/type_source_zendesk_chat_authorization_method.go
+++ b/internal/provider/type_source_zendesk_chat_authorization_method.go
@@ -3,8 +3,6 @@
package provider
type SourceZendeskChatAuthorizationMethod struct {
- SourceZendeskChatAuthorizationMethodAccessToken *SourceZendeskChatAuthorizationMethodAccessToken `tfsdk:"source_zendesk_chat_authorization_method_access_token"`
- SourceZendeskChatAuthorizationMethodOAuth20 *SourceZendeskChatAuthorizationMethodOAuth20 `tfsdk:"source_zendesk_chat_authorization_method_o_auth2_0"`
- SourceZendeskChatUpdateAuthorizationMethodAccessToken *SourceZendeskChatAuthorizationMethodAccessToken `tfsdk:"source_zendesk_chat_update_authorization_method_access_token"`
- SourceZendeskChatUpdateAuthorizationMethodOAuth20 *SourceZendeskChatAuthorizationMethodOAuth20 `tfsdk:"source_zendesk_chat_update_authorization_method_o_auth2_0"`
+ AccessToken *OAuth2AccessToken `tfsdk:"access_token"`
+ OAuth20 *SourceZendeskChatOAuth20 `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_zendesk_chat_authorization_method_access_token.go b/internal/provider/type_source_zendesk_chat_authorization_method_access_token.go
deleted file mode 100755
index 51bfe6698..000000000
--- a/internal/provider/type_source_zendesk_chat_authorization_method_access_token.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskChatAuthorizationMethodAccessToken struct {
- AccessToken types.String `tfsdk:"access_token"`
- Credentials types.String `tfsdk:"credentials"`
-}
diff --git a/internal/provider/type_source_google_search_console_authentication_type_o_auth.go b/internal/provider/type_source_zendesk_chat_o_auth20.go
old mode 100755
new mode 100644
similarity index 76%
rename from internal/provider/type_source_google_search_console_authentication_type_o_auth.go
rename to internal/provider/type_source_zendesk_chat_o_auth20.go
index bec489327..2fe36d79e
--- a/internal/provider/type_source_google_search_console_authentication_type_o_auth.go
+++ b/internal/provider/type_source_zendesk_chat_o_auth20.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceGoogleSearchConsoleAuthenticationTypeOAuth struct {
+type SourceZendeskChatOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
RefreshToken types.String `tfsdk:"refresh_token"`
diff --git a/internal/provider/type_source_zendesk_sunshine.go b/internal/provider/type_source_zendesk_sunshine.go
old mode 100755
new mode 100644
index 71d1a6b1d..e30afcc77
--- a/internal/provider/type_source_zendesk_sunshine.go
+++ b/internal/provider/type_source_zendesk_sunshine.go
@@ -6,7 +6,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceZendeskSunshine struct {
Credentials *SourceZendeskSunshineAuthorizationMethod `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
Subdomain types.String `tfsdk:"subdomain"`
}
diff --git a/internal/provider/type_source_pipedrive_api_key_authentication.go b/internal/provider/type_source_zendesk_sunshine_api_token.go
old mode 100755
new mode 100644
similarity index 68%
rename from internal/provider/type_source_pipedrive_api_key_authentication.go
rename to internal/provider/type_source_zendesk_sunshine_api_token.go
index 339cc48f5..cf1eaed6b
--- a/internal/provider/type_source_pipedrive_api_key_authentication.go
+++ b/internal/provider/type_source_zendesk_sunshine_api_token.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourcePipedriveAPIKeyAuthentication struct {
+type SourceZendeskSunshineAPIToken struct {
APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
+ Email types.String `tfsdk:"email"`
}
diff --git a/internal/provider/type_source_zendesk_sunshine_authorization_method.go b/internal/provider/type_source_zendesk_sunshine_authorization_method.go
old mode 100755
new mode 100644
index b63691156..7a6f943f6
--- a/internal/provider/type_source_zendesk_sunshine_authorization_method.go
+++ b/internal/provider/type_source_zendesk_sunshine_authorization_method.go
@@ -3,8 +3,6 @@
package provider
type SourceZendeskSunshineAuthorizationMethod struct {
- SourceZendeskSunshineAuthorizationMethodAPIToken *SourceZendeskSunshineAuthorizationMethodAPIToken `tfsdk:"source_zendesk_sunshine_authorization_method_api_token"`
- SourceZendeskSunshineAuthorizationMethodOAuth20 *SourceZendeskSunshineAuthorizationMethodOAuth20 `tfsdk:"source_zendesk_sunshine_authorization_method_o_auth2_0"`
- SourceZendeskSunshineUpdateAuthorizationMethodAPIToken *SourceZendeskSunshineAuthorizationMethodAPIToken `tfsdk:"source_zendesk_sunshine_update_authorization_method_api_token"`
- SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 *SourceZendeskSunshineAuthorizationMethodOAuth20 `tfsdk:"source_zendesk_sunshine_update_authorization_method_o_auth2_0"`
+ APIToken *SourceZendeskSunshineAPIToken `tfsdk:"api_token"`
+ OAuth20 *SourceNotionOAuth20 `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go b/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go
deleted file mode 100755
index 4fca9b95a..000000000
--- a/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSunshineAuthorizationMethodAPIToken struct {
- APIToken types.String `tfsdk:"api_token"`
- AuthMethod types.String `tfsdk:"auth_method"`
- Email types.String `tfsdk:"email"`
-}
diff --git a/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go b/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go
deleted file mode 100755
index 5747b9c21..000000000
--- a/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSunshineAuthorizationMethodOAuth20 struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthMethod types.String `tfsdk:"auth_method"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
-}
diff --git a/internal/provider/type_source_zendesk_support.go b/internal/provider/type_source_zendesk_support.go
old mode 100755
new mode 100644
index 935b8835d..d3a8bfc21
--- a/internal/provider/type_source_zendesk_support.go
+++ b/internal/provider/type_source_zendesk_support.go
@@ -7,7 +7,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceZendeskSupport struct {
Credentials *SourceZendeskSupportAuthentication `tfsdk:"credentials"`
IgnorePagination types.Bool `tfsdk:"ignore_pagination"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
Subdomain types.String `tfsdk:"subdomain"`
}
diff --git a/internal/provider/type_source_zendesk_support1.go b/internal/provider/type_source_zendesk_support1.go
deleted file mode 100755
index 2dee43513..000000000
--- a/internal/provider/type_source_zendesk_support1.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSupport1 struct {
- Credentials *SourceZendeskSupportAuthentication1 `tfsdk:"credentials"`
- IgnorePagination types.Bool `tfsdk:"ignore_pagination"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- Subdomain types.String `tfsdk:"subdomain"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_authentication_api_token.go b/internal/provider/type_source_zendesk_support_api_token.go
old mode 100755
new mode 100644
similarity index 75%
rename from internal/provider/type_source_zendesk_talk_authentication_api_token.go
rename to internal/provider/type_source_zendesk_support_api_token.go
index 3b73a8ad9..cd95155b1
--- a/internal/provider/type_source_zendesk_talk_authentication_api_token.go
+++ b/internal/provider/type_source_zendesk_support_api_token.go
@@ -4,9 +4,8 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceZendeskTalkAuthenticationAPIToken struct {
+type SourceZendeskSupportAPIToken struct {
+ AdditionalProperties types.String `tfsdk:"additional_properties"`
APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
Email types.String `tfsdk:"email"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
}
diff --git a/internal/provider/type_source_zendesk_support_authentication.go b/internal/provider/type_source_zendesk_support_authentication.go
old mode 100755
new mode 100644
index d7ff61a1d..71c4b0389
--- a/internal/provider/type_source_zendesk_support_authentication.go
+++ b/internal/provider/type_source_zendesk_support_authentication.go
@@ -3,8 +3,6 @@
package provider
type SourceZendeskSupportAuthentication struct {
- SourceZendeskSupportAuthenticationAPIToken *SourceZendeskSupportAuthenticationAPIToken `tfsdk:"source_zendesk_support_authentication_api_token"`
- SourceZendeskSupportAuthenticationOAuth20 *SourceZendeskSupportAuthenticationOAuth20 `tfsdk:"source_zendesk_support_authentication_o_auth2_0"`
- SourceZendeskSupportUpdateAuthenticationAPIToken *SourceZendeskSupportUpdateAuthenticationAPIToken `tfsdk:"source_zendesk_support_update_authentication_api_token"`
- SourceZendeskSupportUpdateAuthenticationOAuth20 *SourceZendeskSupportUpdateAuthenticationOAuth20 `tfsdk:"source_zendesk_support_update_authentication_o_auth2_0"`
+ APIToken *SourceZendeskSupportAPIToken `tfsdk:"api_token"`
+ OAuth20 *SourceZendeskSupportOAuth20 `tfsdk:"o_auth20"`
}
diff --git a/internal/provider/type_source_zendesk_support_authentication1.go b/internal/provider/type_source_zendesk_support_authentication1.go
deleted file mode 100755
index 1cae4440a..000000000
--- a/internal/provider/type_source_zendesk_support_authentication1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceZendeskSupportAuthentication1 struct {
- SourceZendeskSupportAuthenticationAPIToken *SourceZendeskSupportAuthenticationAPIToken1 `tfsdk:"source_zendesk_support_authentication_api_token"`
- SourceZendeskSupportAuthenticationOAuth20 *SourceZendeskSupportAuthenticationOAuth201 `tfsdk:"source_zendesk_support_authentication_o_auth2_0"`
- SourceZendeskSupportUpdateAuthenticationAPIToken *SourceZendeskSupportUpdateAuthenticationAPIToken1 `tfsdk:"source_zendesk_support_update_authentication_api_token"`
- SourceZendeskSupportUpdateAuthenticationOAuth20 *SourceZendeskSupportUpdateAuthenticationOAuth201 `tfsdk:"source_zendesk_support_update_authentication_o_auth2_0"`
-}
diff --git a/internal/provider/type_source_zendesk_support_authentication_api_token.go b/internal/provider/type_source_zendesk_support_authentication_api_token.go
deleted file mode 100755
index 69e45c83b..000000000
--- a/internal/provider/type_source_zendesk_support_authentication_api_token.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSupportAuthenticationAPIToken struct {
- APIToken types.String `tfsdk:"api_token"`
- Credentials types.String `tfsdk:"credentials"`
- Email types.String `tfsdk:"email"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_support_authentication_api_token1.go b/internal/provider/type_source_zendesk_support_authentication_api_token1.go
deleted file mode 100755
index 1a536558c..000000000
--- a/internal/provider/type_source_zendesk_support_authentication_api_token1.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSupportAuthenticationAPIToken1 struct {
- APIToken types.String `tfsdk:"api_token"`
- Credentials types.String `tfsdk:"credentials"`
- Email types.String `tfsdk:"email"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_support_authentication_o_auth20.go b/internal/provider/type_source_zendesk_support_authentication_o_auth20.go
deleted file mode 100755
index 13df8eeec..000000000
--- a/internal/provider/type_source_zendesk_support_authentication_o_auth20.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSupportAuthenticationOAuth20 struct {
- AccessToken types.String `tfsdk:"access_token"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- Credentials types.String `tfsdk:"credentials"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_support_authentication_o_auth201.go b/internal/provider/type_source_zendesk_support_authentication_o_auth201.go
deleted file mode 100755
index 102cb10e6..000000000
--- a/internal/provider/type_source_zendesk_support_authentication_o_auth201.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSupportAuthenticationOAuth201 struct {
- AccessToken types.String `tfsdk:"access_token"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- Credentials types.String `tfsdk:"credentials"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_authentication_o_auth20.go b/internal/provider/type_source_zendesk_support_o_auth20.go
old mode 100755
new mode 100644
similarity index 78%
rename from internal/provider/type_source_zendesk_talk_authentication_o_auth20.go
rename to internal/provider/type_source_zendesk_support_o_auth20.go
index dcf3fc703..32e858578
--- a/internal/provider/type_source_zendesk_talk_authentication_o_auth20.go
+++ b/internal/provider/type_source_zendesk_support_o_auth20.go
@@ -4,10 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceZendeskTalkAuthenticationOAuth20 struct {
+type SourceZendeskSupportOAuth20 struct {
AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
+ AdditionalProperties types.String `tfsdk:"additional_properties"`
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
}
diff --git a/internal/provider/type_source_zendesk_support_update_authentication_api_token.go b/internal/provider/type_source_zendesk_support_update_authentication_api_token.go
deleted file mode 100755
index 44a51d5ec..000000000
--- a/internal/provider/type_source_zendesk_support_update_authentication_api_token.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSupportUpdateAuthenticationAPIToken struct {
- APIToken types.String `tfsdk:"api_token"`
- Credentials types.String `tfsdk:"credentials"`
- Email types.String `tfsdk:"email"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_support_update_authentication_api_token1.go b/internal/provider/type_source_zendesk_support_update_authentication_api_token1.go
deleted file mode 100755
index 27e7f29ce..000000000
--- a/internal/provider/type_source_zendesk_support_update_authentication_api_token1.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSupportUpdateAuthenticationAPIToken1 struct {
- APIToken types.String `tfsdk:"api_token"`
- Credentials types.String `tfsdk:"credentials"`
- Email types.String `tfsdk:"email"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_support_update_authentication_o_auth20.go b/internal/provider/type_source_zendesk_support_update_authentication_o_auth20.go
deleted file mode 100755
index 2389db150..000000000
--- a/internal/provider/type_source_zendesk_support_update_authentication_o_auth20.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSupportUpdateAuthenticationOAuth20 struct {
- AccessToken types.String `tfsdk:"access_token"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- Credentials types.String `tfsdk:"credentials"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_support_update_authentication_o_auth201.go b/internal/provider/type_source_zendesk_support_update_authentication_o_auth201.go
deleted file mode 100755
index 4b2f30e24..000000000
--- a/internal/provider/type_source_zendesk_support_update_authentication_o_auth201.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskSupportUpdateAuthenticationOAuth201 struct {
- AccessToken types.String `tfsdk:"access_token"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- Credentials types.String `tfsdk:"credentials"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_talk.go b/internal/provider/type_source_zendesk_talk.go
old mode 100755
new mode 100644
index 9ec8b1f98..9c2048251
--- a/internal/provider/type_source_zendesk_talk.go
+++ b/internal/provider/type_source_zendesk_talk.go
@@ -5,8 +5,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceZendeskTalk struct {
- Credentials *SourceZendeskTalkAuthentication `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- Subdomain types.String `tfsdk:"subdomain"`
+ Credentials *SourceZendeskSupportAuthentication `tfsdk:"credentials"`
+ StartDate types.String `tfsdk:"start_date"`
+ Subdomain types.String `tfsdk:"subdomain"`
}
diff --git a/internal/provider/type_source_zendesk_talk1.go b/internal/provider/type_source_zendesk_talk1.go
deleted file mode 100755
index 2740c5a3b..000000000
--- a/internal/provider/type_source_zendesk_talk1.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskTalk1 struct {
- Credentials *SourceZendeskTalkAuthentication1 `tfsdk:"credentials"`
- SourceType types.String `tfsdk:"source_type"`
- StartDate types.String `tfsdk:"start_date"`
- Subdomain types.String `tfsdk:"subdomain"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_authentication.go b/internal/provider/type_source_zendesk_talk_authentication.go
deleted file mode 100755
index b9b1befd8..000000000
--- a/internal/provider/type_source_zendesk_talk_authentication.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceZendeskTalkAuthentication struct {
- SourceZendeskTalkAuthenticationAPIToken *SourceZendeskTalkAuthenticationAPIToken `tfsdk:"source_zendesk_talk_authentication_api_token"`
- SourceZendeskTalkAuthenticationOAuth20 *SourceZendeskTalkAuthenticationOAuth20 `tfsdk:"source_zendesk_talk_authentication_o_auth2_0"`
- SourceZendeskTalkUpdateAuthenticationAPIToken *SourceZendeskTalkUpdateAuthenticationAPIToken `tfsdk:"source_zendesk_talk_update_authentication_api_token"`
- SourceZendeskTalkUpdateAuthenticationOAuth20 *SourceZendeskTalkUpdateAuthenticationOAuth20 `tfsdk:"source_zendesk_talk_update_authentication_o_auth2_0"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_authentication1.go b/internal/provider/type_source_zendesk_talk_authentication1.go
deleted file mode 100755
index a45af85ae..000000000
--- a/internal/provider/type_source_zendesk_talk_authentication1.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-type SourceZendeskTalkAuthentication1 struct {
- SourceZendeskTalkAuthenticationAPIToken *SourceZendeskTalkAuthenticationAPIToken1 `tfsdk:"source_zendesk_talk_authentication_api_token"`
- SourceZendeskTalkAuthenticationOAuth20 *SourceZendeskTalkAuthenticationOAuth201 `tfsdk:"source_zendesk_talk_authentication_o_auth2_0"`
- SourceZendeskTalkUpdateAuthenticationAPIToken *SourceZendeskTalkUpdateAuthenticationAPIToken1 `tfsdk:"source_zendesk_talk_update_authentication_api_token"`
- SourceZendeskTalkUpdateAuthenticationOAuth20 *SourceZendeskTalkUpdateAuthenticationOAuth201 `tfsdk:"source_zendesk_talk_update_authentication_o_auth2_0"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_authentication_api_token1.go b/internal/provider/type_source_zendesk_talk_authentication_api_token1.go
deleted file mode 100755
index 2d692a522..000000000
--- a/internal/provider/type_source_zendesk_talk_authentication_api_token1.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskTalkAuthenticationAPIToken1 struct {
- APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
- Email types.String `tfsdk:"email"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_authentication_o_auth201.go b/internal/provider/type_source_zendesk_talk_authentication_o_auth201.go
deleted file mode 100755
index dc240f924..000000000
--- a/internal/provider/type_source_zendesk_talk_authentication_o_auth201.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskTalkAuthenticationOAuth201 struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_update_authentication_api_token.go b/internal/provider/type_source_zendesk_talk_update_authentication_api_token.go
deleted file mode 100755
index 60fdbde41..000000000
--- a/internal/provider/type_source_zendesk_talk_update_authentication_api_token.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskTalkUpdateAuthenticationAPIToken struct {
- APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
- Email types.String `tfsdk:"email"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_update_authentication_api_token1.go b/internal/provider/type_source_zendesk_talk_update_authentication_api_token1.go
deleted file mode 100755
index a158aeff0..000000000
--- a/internal/provider/type_source_zendesk_talk_update_authentication_api_token1.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskTalkUpdateAuthenticationAPIToken1 struct {
- APIToken types.String `tfsdk:"api_token"`
- AuthType types.String `tfsdk:"auth_type"`
- Email types.String `tfsdk:"email"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_update_authentication_o_auth20.go b/internal/provider/type_source_zendesk_talk_update_authentication_o_auth20.go
deleted file mode 100755
index fde3d87b3..000000000
--- a/internal/provider/type_source_zendesk_talk_update_authentication_o_auth20.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskTalkUpdateAuthenticationOAuth20 struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zendesk_talk_update_authentication_o_auth201.go b/internal/provider/type_source_zendesk_talk_update_authentication_o_auth201.go
deleted file mode 100755
index ad2342d72..000000000
--- a/internal/provider/type_source_zendesk_talk_update_authentication_o_auth201.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package provider
-
-import "github.com/hashicorp/terraform-plugin-framework/types"
-
-type SourceZendeskTalkUpdateAuthenticationOAuth201 struct {
- AccessToken types.String `tfsdk:"access_token"`
- AuthType types.String `tfsdk:"auth_type"`
- ClientID types.String `tfsdk:"client_id"`
- ClientSecret types.String `tfsdk:"client_secret"`
- AdditionalProperties types.String `tfsdk:"additional_properties"`
-}
diff --git a/internal/provider/type_source_zenloop.go b/internal/provider/type_source_zenloop.go
old mode 100755
new mode 100644
index 979c5660c..cc6048364
--- a/internal/provider/type_source_zenloop.go
+++ b/internal/provider/type_source_zenloop.go
@@ -7,7 +7,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceZenloop struct {
APIToken types.String `tfsdk:"api_token"`
DateFrom types.String `tfsdk:"date_from"`
- SourceType types.String `tfsdk:"source_type"`
SurveyGroupID types.String `tfsdk:"survey_group_id"`
SurveyID types.String `tfsdk:"survey_id"`
}
diff --git a/internal/provider/type_source_zoho_crm.go b/internal/provider/type_source_zoho_crm.go
old mode 100755
new mode 100644
index dd7ac3972..a91de38d0
--- a/internal/provider/type_source_zoho_crm.go
+++ b/internal/provider/type_source_zoho_crm.go
@@ -11,6 +11,5 @@ type SourceZohoCrm struct {
Edition types.String `tfsdk:"edition"`
Environment types.String `tfsdk:"environment"`
RefreshToken types.String `tfsdk:"refresh_token"`
- SourceType types.String `tfsdk:"source_type"`
StartDatetime types.String `tfsdk:"start_datetime"`
}
diff --git a/internal/provider/type_source_zoom.go b/internal/provider/type_source_zoom.go
old mode 100755
new mode 100644
index edd42ec4d..ea1e68dff
--- a/internal/provider/type_source_zoom.go
+++ b/internal/provider/type_source_zoom.go
@@ -5,6 +5,5 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
type SourceZoom struct {
- JwtToken types.String `tfsdk:"jwt_token"`
- SourceType types.String `tfsdk:"source_type"`
+ JwtToken types.String `tfsdk:"jwt_token"`
}
diff --git a/internal/provider/type_source_zuora.go b/internal/provider/type_source_zuora.go
old mode 100755
new mode 100644
index d7ecfde34..5c890a4f6
--- a/internal/provider/type_source_zuora.go
+++ b/internal/provider/type_source_zuora.go
@@ -8,7 +8,6 @@ type SourceZuora struct {
ClientID types.String `tfsdk:"client_id"`
ClientSecret types.String `tfsdk:"client_secret"`
DataQuery types.String `tfsdk:"data_query"`
- SourceType types.String `tfsdk:"source_type"`
StartDate types.String `tfsdk:"start_date"`
TenantEndpoint types.String `tfsdk:"tenant_endpoint"`
WindowInDays types.String `tfsdk:"window_in_days"`
diff --git a/internal/provider/type_ssh_key_authentication.go b/internal/provider/type_ssh_key_authentication.go
new file mode 100644
index 000000000..80f488e01
--- /dev/null
+++ b/internal/provider/type_ssh_key_authentication.go
@@ -0,0 +1,12 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SSHKeyAuthentication struct {
+ SSHKey types.String `tfsdk:"ssh_key"`
+ TunnelHost types.String `tfsdk:"tunnel_host"`
+ TunnelPort types.Int64 `tfsdk:"tunnel_port"`
+ TunnelUser types.String `tfsdk:"tunnel_user"`
+}
diff --git a/internal/provider/type_source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go b/internal/provider/type_standalone_mongo_db_instance.go
old mode 100755
new mode 100644
similarity index 79%
rename from internal/provider/type_source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go
rename to internal/provider/type_standalone_mongo_db_instance.go
index 71bc2132c..deb427b7a
--- a/internal/provider/type_source_mongodb_mongo_db_instance_type_standalone_mongo_db_instance.go
+++ b/internal/provider/type_standalone_mongo_db_instance.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance struct {
+type StandaloneMongoDbInstance struct {
Host types.String `tfsdk:"host"`
Instance types.String `tfsdk:"instance"`
Port types.Int64 `tfsdk:"port"`
diff --git a/internal/provider/type_stream_configuration.go b/internal/provider/type_stream_configuration.go
old mode 100755
new mode 100644
diff --git a/internal/provider/type_stream_configurations.go b/internal/provider/type_stream_configurations.go
old mode 100755
new mode 100644
diff --git a/internal/provider/type_source_salesforce_streams_criteria.go b/internal/provider/type_streams_criteria.go
old mode 100755
new mode 100644
similarity index 83%
rename from internal/provider/type_source_salesforce_streams_criteria.go
rename to internal/provider/type_streams_criteria.go
index 7285838eb..c2d08b392
--- a/internal/provider/type_source_salesforce_streams_criteria.go
+++ b/internal/provider/type_streams_criteria.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceSalesforceStreamsCriteria struct {
+type StreamsCriteria struct {
Criteria types.String `tfsdk:"criteria"`
Value types.String `tfsdk:"value"`
}
diff --git a/internal/provider/type_system_idsid.go b/internal/provider/type_system_idsid.go
new file mode 100644
index 000000000..8e86e4ad9
--- /dev/null
+++ b/internal/provider/type_system_idsid.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type SystemIDSID struct {
+ Sid types.String `tfsdk:"sid"`
+}
diff --git a/internal/provider/type_destination_milvus_indexing_authentication_no_auth.go b/internal/provider/type_tls_encrypted_verify_certificate.go
old mode 100755
new mode 100644
similarity index 61%
rename from internal/provider/type_destination_milvus_indexing_authentication_no_auth.go
rename to internal/provider/type_tls_encrypted_verify_certificate.go
index 507bdc6cf..5dbd7afd6
--- a/internal/provider/type_destination_milvus_indexing_authentication_no_auth.go
+++ b/internal/provider/type_tls_encrypted_verify_certificate.go
@@ -4,6 +4,6 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationMilvusIndexingAuthenticationNoAuth struct {
- Mode types.String `tfsdk:"mode"`
+type TLSEncryptedVerifyCertificate struct {
+ SslCertificate types.String `tfsdk:"ssl_certificate"`
}
diff --git a/internal/provider/type_user_provided.go b/internal/provider/type_user_provided.go
new file mode 100644
index 000000000..4674ba528
--- /dev/null
+++ b/internal/provider/type_user_provided.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type UserProvided struct {
+ ColumnNames []types.String `tfsdk:"column_names"`
+}
diff --git a/internal/provider/type_username_and_password.go b/internal/provider/type_username_and_password.go
new file mode 100644
index 000000000..9d24a27d6
--- /dev/null
+++ b/internal/provider/type_username_and_password.go
@@ -0,0 +1,9 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package provider
+
+import "github.com/hashicorp/terraform-plugin-framework/types"
+
+type UsernameAndPassword struct {
+ Password types.String `tfsdk:"password"`
+}
diff --git a/internal/provider/type_destination_milvus_indexing_authentication_username_password.go b/internal/provider/type_username_password.go
old mode 100755
new mode 100644
similarity index 69%
rename from internal/provider/type_destination_milvus_indexing_authentication_username_password.go
rename to internal/provider/type_username_password.go
index ffea134d1..0e583aaa5
--- a/internal/provider/type_destination_milvus_indexing_authentication_username_password.go
+++ b/internal/provider/type_username_password.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationMilvusIndexingAuthenticationUsernamePassword struct {
- Mode types.String `tfsdk:"mode"`
+type UsernamePassword struct {
Password types.String `tfsdk:"password"`
Username types.String `tfsdk:"username"`
}
diff --git a/internal/provider/type_destination_postgres_ssl_modes_verify_ca.go b/internal/provider/type_verify_ca.go
old mode 100755
new mode 100644
similarity index 73%
rename from internal/provider/type_destination_postgres_ssl_modes_verify_ca.go
rename to internal/provider/type_verify_ca.go
index 808be3a4a..2f3de4e3a
--- a/internal/provider/type_destination_postgres_ssl_modes_verify_ca.go
+++ b/internal/provider/type_verify_ca.go
@@ -4,8 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationPostgresSSLModesVerifyCa struct {
+type VerifyCa struct {
CaCertificate types.String `tfsdk:"ca_certificate"`
ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
}
diff --git a/internal/provider/type_source_mysql_ssl_modes_verify_identity.go b/internal/provider/type_verify_full.go
old mode 100755
new mode 100644
similarity index 80%
rename from internal/provider/type_source_mysql_ssl_modes_verify_identity.go
rename to internal/provider/type_verify_full.go
index 201dec353..0848fc0a2
--- a/internal/provider/type_source_mysql_ssl_modes_verify_identity.go
+++ b/internal/provider/type_verify_full.go
@@ -4,10 +4,9 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type SourceMysqlSSLModesVerifyIdentity struct {
+type VerifyFull struct {
CaCertificate types.String `tfsdk:"ca_certificate"`
ClientCertificate types.String `tfsdk:"client_certificate"`
ClientKey types.String `tfsdk:"client_key"`
ClientKeyPassword types.String `tfsdk:"client_key_password"`
- Mode types.String `tfsdk:"mode"`
}
diff --git a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_deflate.go b/internal/provider/type_xz.go
old mode 100755
new mode 100644
similarity index 77%
rename from internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_deflate.go
rename to internal/provider/type_xz.go
index 282d1d27b..47ef82ac4
--- a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_deflate.go
+++ b/internal/provider/type_xz.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate struct {
+type Xz struct {
Codec types.String `tfsdk:"codec"`
CompressionLevel types.Int64 `tfsdk:"compression_level"`
}
diff --git a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard.go b/internal/provider/type_zstandard.go
old mode 100755
new mode 100644
similarity index 80%
rename from internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard.go
rename to internal/provider/type_zstandard.go
index dfbf85be8..6c0784449
--- a/internal/provider/type_destination_gcs_output_format_avro_apache_avro_compression_codec_zstandard.go
+++ b/internal/provider/type_zstandard.go
@@ -4,7 +4,7 @@ package provider
import "github.com/hashicorp/terraform-plugin-framework/types"
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard struct {
+type Zstandard struct {
Codec types.String `tfsdk:"codec"`
CompressionLevel types.Int64 `tfsdk:"compression_level"`
IncludeChecksum types.Bool `tfsdk:"include_checksum"`
diff --git a/internal/provider/utils.go b/internal/provider/utils.go
old mode 100755
new mode 100644
index 22f9867c6..468fec85f
--- a/internal/provider/utils.go
+++ b/internal/provider/utils.go
@@ -3,10 +3,10 @@
package provider
import (
- tfReflect "airbyte/internal/provider/reflect"
"context"
"encoding/json"
"fmt"
+ tfReflect "github.com/airbytehq/terraform-provider-airbyte/internal/provider/reflect"
"github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
diff --git a/internal/provider/workspace_data_source.go b/internal/provider/workspace_data_source.go
old mode 100755
new mode 100644
index c2917487e..cc50b84dd
--- a/internal/provider/workspace_data_source.go
+++ b/internal/provider/workspace_data_source.go
@@ -3,15 +3,13 @@
package provider
import (
- "airbyte/internal/sdk"
- "airbyte/internal/sdk/pkg/models/operations"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
- "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
- "github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework/types/basetypes"
)
@@ -48,19 +46,11 @@ func (r *WorkspaceDataSource) Schema(ctx context.Context, req datasource.SchemaR
Attributes: map[string]schema.Attribute{
"data_residency": schema.StringAttribute{
- Computed: true,
- Validators: []validator.String{
- stringvalidator.OneOf(
- "auto",
- "us",
- "eu",
- ),
- },
- Description: `must be one of ["auto", "us", "eu"]`,
+ Computed: true,
+ Description: `must be one of ["auto", "us", "eu"]; Default: "auto"`,
},
"name": schema.StringAttribute{
- Computed: true,
- Description: `Name of the workspace`,
+ Computed: true,
},
"workspace_id": schema.StringAttribute{
Required: true,
diff --git a/internal/provider/workspace_data_source_sdk.go b/internal/provider/workspace_data_source_sdk.go
old mode 100755
new mode 100644
index 8a1fa6591..41c256f77
--- a/internal/provider/workspace_data_source_sdk.go
+++ b/internal/provider/workspace_data_source_sdk.go
@@ -3,12 +3,16 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
func (r *WorkspaceDataSourceModel) RefreshFromGetResponse(resp *shared.WorkspaceResponse) {
- r.DataResidency = types.StringValue(string(resp.DataResidency))
+ if resp.DataResidency != nil {
+ r.DataResidency = types.StringValue(string(*resp.DataResidency))
+ } else {
+ r.DataResidency = types.StringNull()
+ }
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/provider/workspace_resource.go b/internal/provider/workspace_resource.go
old mode 100755
new mode 100644
index 8ebcfd99a..b7af20b21
--- a/internal/provider/workspace_resource.go
+++ b/internal/provider/workspace_resource.go
@@ -3,12 +3,12 @@
package provider
import (
- "airbyte/internal/sdk"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk"
- speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier"
- "airbyte/internal/sdk/pkg/models/operations"
+ speakeasy_stringplanmodifier "github.com/airbytehq/terraform-provider-airbyte/internal/planmodifiers/stringplanmodifier"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
@@ -53,6 +53,7 @@ func (r *WorkspaceResource) Schema(ctx context.Context, req resource.SchemaReque
PlanModifiers: []planmodifier.String{
speakeasy_stringplanmodifier.SuppressDiff(),
},
+ Description: `must be one of ["auto", "us", "eu"]; Default: "auto"`,
Validators: []validator.String{
stringvalidator.OneOf(
"auto",
@@ -60,7 +61,6 @@ func (r *WorkspaceResource) Schema(ctx context.Context, req resource.SchemaReque
"eu",
),
},
- Description: `must be one of ["auto", "us", "eu"]`,
},
"name": schema.StringAttribute{
PlanModifiers: []planmodifier.String{
@@ -273,5 +273,5 @@ func (r *WorkspaceResource) Delete(ctx context.Context, req resource.DeleteReque
}
func (r *WorkspaceResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) {
- resource.ImportStatePassthroughID(ctx, path.Root("workspace_id"), req, resp)
+ resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("workspace_id"), req.ID)...)
}
diff --git a/internal/provider/workspace_resource_sdk.go b/internal/provider/workspace_resource_sdk.go
old mode 100755
new mode 100644
index 0281dbdd6..17e6f5731
--- a/internal/provider/workspace_resource_sdk.go
+++ b/internal/provider/workspace_resource_sdk.go
@@ -3,7 +3,7 @@
package provider
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"github.com/hashicorp/terraform-plugin-framework/types"
)
@@ -34,7 +34,11 @@ func (r *WorkspaceResourceModel) ToDeleteSDKType() *shared.WorkspaceCreateReques
}
func (r *WorkspaceResourceModel) RefreshFromGetResponse(resp *shared.WorkspaceResponse) {
- r.DataResidency = types.StringValue(string(resp.DataResidency))
+ if resp.DataResidency != nil {
+ r.DataResidency = types.StringValue(string(*resp.DataResidency))
+ } else {
+ r.DataResidency = types.StringNull()
+ }
r.Name = types.StringValue(resp.Name)
r.WorkspaceID = types.StringValue(resp.WorkspaceID)
}
diff --git a/internal/sdk/connections.go b/internal/sdk/connections.go
old mode 100755
new mode 100644
index 971d0fabe..9bfde4a72
--- a/internal/sdk/connections.go
+++ b/internal/sdk/connections.go
@@ -3,33 +3,34 @@
package sdk
import (
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/sdk/pkg/models/shared"
- "airbyte/internal/sdk/pkg/utils"
"bytes"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/sdkerrors"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"io"
"net/http"
"strings"
)
-type connections struct {
+type Connections struct {
sdkConfiguration sdkConfiguration
}
-func newConnections(sdkConfig sdkConfiguration) *connections {
- return &connections{
+func newConnections(sdkConfig sdkConfiguration) *Connections {
+ return &Connections{
sdkConfiguration: sdkConfig,
}
}
// CreateConnection - Create a connection
-func (s *connections) CreateConnection(ctx context.Context, request shared.ConnectionCreateRequest) (*operations.CreateConnectionResponse, error) {
+func (s *Connections) CreateConnection(ctx context.Context, request shared.ConnectionCreateRequest) (*operations.CreateConnectionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/connections"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, false, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
@@ -45,7 +46,7 @@ func (s *connections) CreateConnection(ctx context.Context, request shared.Conne
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -78,12 +79,14 @@ func (s *connections) CreateConnection(ctx context.Context, request shared.Conne
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.ConnectionResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.ConnectionResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.ConnectionResponse = out
+ res.ConnectionResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -94,7 +97,7 @@ func (s *connections) CreateConnection(ctx context.Context, request shared.Conne
}
// DeleteConnection - Delete a Connection
-func (s *connections) DeleteConnection(ctx context.Context, request operations.DeleteConnectionRequest) (*operations.DeleteConnectionResponse, error) {
+func (s *Connections) DeleteConnection(ctx context.Context, request operations.DeleteConnectionRequest) (*operations.DeleteConnectionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/connections/{connectionId}", request, nil)
if err != nil {
@@ -106,7 +109,7 @@ func (s *connections) DeleteConnection(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -144,7 +147,7 @@ func (s *connections) DeleteConnection(ctx context.Context, request operations.D
}
// GetConnection - Get Connection details
-func (s *connections) GetConnection(ctx context.Context, request operations.GetConnectionRequest) (*operations.GetConnectionResponse, error) {
+func (s *Connections) GetConnection(ctx context.Context, request operations.GetConnectionRequest) (*operations.GetConnectionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/connections/{connectionId}", request, nil)
if err != nil {
@@ -156,7 +159,7 @@ func (s *connections) GetConnection(ctx context.Context, request operations.GetC
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -186,12 +189,14 @@ func (s *connections) GetConnection(ctx context.Context, request operations.GetC
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.ConnectionResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.ConnectionResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.ConnectionResponse = out
+ res.ConnectionResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -202,7 +207,7 @@ func (s *connections) GetConnection(ctx context.Context, request operations.GetC
}
// ListConnections - List connections
-func (s *connections) ListConnections(ctx context.Context, request operations.ListConnectionsRequest) (*operations.ListConnectionsResponse, error) {
+func (s *Connections) ListConnections(ctx context.Context, request operations.ListConnectionsRequest) (*operations.ListConnectionsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/connections"
@@ -211,7 +216,7 @@ func (s *connections) ListConnections(ctx context.Context, request operations.Li
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
if err := utils.PopulateQueryParams(ctx, req, request, nil); err != nil {
return nil, fmt.Errorf("error populating query params: %w", err)
@@ -245,12 +250,14 @@ func (s *connections) ListConnections(ctx context.Context, request operations.Li
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.ConnectionsResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.ConnectionsResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.ConnectionsResponse = out
+ res.ConnectionsResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -261,14 +268,14 @@ func (s *connections) ListConnections(ctx context.Context, request operations.Li
}
// PatchConnection - Update Connection details
-func (s *connections) PatchConnection(ctx context.Context, request operations.PatchConnectionRequest) (*operations.PatchConnectionResponse, error) {
+func (s *Connections) PatchConnection(ctx context.Context, request operations.PatchConnectionRequest) (*operations.PatchConnectionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/connections/{connectionId}", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "ConnectionPatchRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, false, "ConnectionPatchRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
@@ -284,7 +291,7 @@ func (s *connections) PatchConnection(ctx context.Context, request operations.Pa
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -317,12 +324,14 @@ func (s *connections) PatchConnection(ctx context.Context, request operations.Pa
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.ConnectionResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.ConnectionResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.ConnectionResponse = out
+ res.ConnectionResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
diff --git a/internal/sdk/destinations.go b/internal/sdk/destinations.go
old mode 100755
new mode 100644
index aaa586ce9..45af6b44c
--- a/internal/sdk/destinations.go
+++ b/internal/sdk/destinations.go
@@ -3,38 +3,38 @@
package sdk
import (
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/sdk/pkg/models/shared"
- "airbyte/internal/sdk/pkg/utils"
"bytes"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/sdkerrors"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"io"
"net/http"
"strings"
)
-type destinations struct {
+type Destinations struct {
sdkConfiguration sdkConfiguration
}
-func newDestinations(sdkConfig sdkConfiguration) *destinations {
- return &destinations{
+func newDestinations(sdkConfig sdkConfiguration) *Destinations {
+ return &Destinations{
sdkConfiguration: sdkConfig,
}
}
// CreateDestination - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *destinations) CreateDestination(ctx context.Context, request shared.DestinationCreateRequest) (*operations.CreateDestinationResponse, error) {
+func (s *Destinations) CreateDestination(ctx context.Context, request *shared.DestinationCreateRequest) (*operations.CreateDestinationResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43,7 +43,7 @@ func (s *destinations) CreateDestination(ctx context.Context, request shared.Des
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -76,12 +76,14 @@ func (s *destinations) CreateDestination(ctx context.Context, request shared.Des
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -95,15 +97,14 @@ func (s *destinations) CreateDestination(ctx context.Context, request shared.Des
// CreateDestinationAwsDatalake - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationAwsDatalake(ctx context.Context, request shared.DestinationAwsDatalakeCreateRequest) (*operations.CreateDestinationAwsDatalakeResponse, error) {
+func (s *Destinations) CreateDestinationAwsDatalake(ctx context.Context, request *shared.DestinationAwsDatalakeCreateRequest) (*operations.CreateDestinationAwsDatalakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#AwsDatalake"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -112,7 +113,7 @@ func (s *destinations) CreateDestinationAwsDatalake(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -145,12 +146,14 @@ func (s *destinations) CreateDestinationAwsDatalake(ctx context.Context, request
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -162,15 +165,14 @@ func (s *destinations) CreateDestinationAwsDatalake(ctx context.Context, request
// CreateDestinationAzureBlobStorage - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationAzureBlobStorage(ctx context.Context, request shared.DestinationAzureBlobStorageCreateRequest) (*operations.CreateDestinationAzureBlobStorageResponse, error) {
+func (s *Destinations) CreateDestinationAzureBlobStorage(ctx context.Context, request *shared.DestinationAzureBlobStorageCreateRequest) (*operations.CreateDestinationAzureBlobStorageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#AzureBlobStorage"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -179,7 +181,7 @@ func (s *destinations) CreateDestinationAzureBlobStorage(ctx context.Context, re
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -212,12 +214,14 @@ func (s *destinations) CreateDestinationAzureBlobStorage(ctx context.Context, re
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -229,15 +233,14 @@ func (s *destinations) CreateDestinationAzureBlobStorage(ctx context.Context, re
// CreateDestinationBigquery - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationBigquery(ctx context.Context, request shared.DestinationBigqueryCreateRequest) (*operations.CreateDestinationBigqueryResponse, error) {
+func (s *Destinations) CreateDestinationBigquery(ctx context.Context, request *shared.DestinationBigqueryCreateRequest) (*operations.CreateDestinationBigqueryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Bigquery"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -246,7 +249,7 @@ func (s *destinations) CreateDestinationBigquery(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -279,12 +282,14 @@ func (s *destinations) CreateDestinationBigquery(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -294,17 +299,16 @@ func (s *destinations) CreateDestinationBigquery(ctx context.Context, request sh
return res, nil
}
-// CreateDestinationBigqueryDenormalized - Create a destination
+// CreateDestinationClickhouse - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationBigqueryDenormalized(ctx context.Context, request shared.DestinationBigqueryDenormalizedCreateRequest) (*operations.CreateDestinationBigqueryDenormalizedResponse, error) {
+func (s *Destinations) CreateDestinationClickhouse(ctx context.Context, request *shared.DestinationClickhouseCreateRequest) (*operations.CreateDestinationClickhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/destinations#BigqueryDenormalized"
+ url := strings.TrimSuffix(baseURL, "/") + "/destinations#Clickhouse"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -313,7 +317,7 @@ func (s *destinations) CreateDestinationBigqueryDenormalized(ctx context.Context
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -337,7 +341,7 @@ func (s *destinations) CreateDestinationBigqueryDenormalized(ctx context.Context
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateDestinationBigqueryDenormalizedResponse{
+ res := &operations.CreateDestinationClickhouseResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -346,12 +350,14 @@ func (s *destinations) CreateDestinationBigqueryDenormalized(ctx context.Context
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -361,17 +367,16 @@ func (s *destinations) CreateDestinationBigqueryDenormalized(ctx context.Context
return res, nil
}
-// CreateDestinationClickhouse - Create a destination
+// CreateDestinationConvex - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationClickhouse(ctx context.Context, request shared.DestinationClickhouseCreateRequest) (*operations.CreateDestinationClickhouseResponse, error) {
+func (s *Destinations) CreateDestinationConvex(ctx context.Context, request *shared.DestinationConvexCreateRequest) (*operations.CreateDestinationConvexResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/destinations#Clickhouse"
+ url := strings.TrimSuffix(baseURL, "/") + "/destinations#Convex"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -380,7 +385,7 @@ func (s *destinations) CreateDestinationClickhouse(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -404,7 +409,7 @@ func (s *destinations) CreateDestinationClickhouse(ctx context.Context, request
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateDestinationClickhouseResponse{
+ res := &operations.CreateDestinationConvexResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -413,12 +418,14 @@ func (s *destinations) CreateDestinationClickhouse(ctx context.Context, request
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -428,17 +435,16 @@ func (s *destinations) CreateDestinationClickhouse(ctx context.Context, request
return res, nil
}
-// CreateDestinationConvex - Create a destination
+// CreateDestinationCumulio - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationConvex(ctx context.Context, request shared.DestinationConvexCreateRequest) (*operations.CreateDestinationConvexResponse, error) {
+func (s *Destinations) CreateDestinationCumulio(ctx context.Context, request *shared.DestinationCumulioCreateRequest) (*operations.CreateDestinationCumulioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/destinations#Convex"
+ url := strings.TrimSuffix(baseURL, "/") + "/destinations#Cumulio"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -447,7 +453,7 @@ func (s *destinations) CreateDestinationConvex(ctx context.Context, request shar
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -471,7 +477,7 @@ func (s *destinations) CreateDestinationConvex(ctx context.Context, request shar
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateDestinationConvexResponse{
+ res := &operations.CreateDestinationCumulioResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -480,12 +486,14 @@ func (s *destinations) CreateDestinationConvex(ctx context.Context, request shar
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -495,17 +503,16 @@ func (s *destinations) CreateDestinationConvex(ctx context.Context, request shar
return res, nil
}
-// CreateDestinationCumulio - Create a destination
+// CreateDestinationDatabend - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationCumulio(ctx context.Context, request shared.DestinationCumulioCreateRequest) (*operations.CreateDestinationCumulioResponse, error) {
+func (s *Destinations) CreateDestinationDatabend(ctx context.Context, request *shared.DestinationDatabendCreateRequest) (*operations.CreateDestinationDatabendResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/destinations#Cumulio"
+ url := strings.TrimSuffix(baseURL, "/") + "/destinations#Databend"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -514,7 +521,7 @@ func (s *destinations) CreateDestinationCumulio(ctx context.Context, request sha
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -538,7 +545,7 @@ func (s *destinations) CreateDestinationCumulio(ctx context.Context, request sha
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateDestinationCumulioResponse{
+ res := &operations.CreateDestinationDatabendResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -547,12 +554,14 @@ func (s *destinations) CreateDestinationCumulio(ctx context.Context, request sha
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -562,17 +571,16 @@ func (s *destinations) CreateDestinationCumulio(ctx context.Context, request sha
return res, nil
}
-// CreateDestinationDatabend - Create a destination
+// CreateDestinationDatabricks - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationDatabend(ctx context.Context, request shared.DestinationDatabendCreateRequest) (*operations.CreateDestinationDatabendResponse, error) {
+func (s *Destinations) CreateDestinationDatabricks(ctx context.Context, request *shared.DestinationDatabricksCreateRequest) (*operations.CreateDestinationDatabricksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/destinations#Databend"
+ url := strings.TrimSuffix(baseURL, "/") + "/destinations#Databricks"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -581,7 +589,7 @@ func (s *destinations) CreateDestinationDatabend(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -605,7 +613,7 @@ func (s *destinations) CreateDestinationDatabend(ctx context.Context, request sh
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateDestinationDatabendResponse{
+ res := &operations.CreateDestinationDatabricksResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -614,12 +622,14 @@ func (s *destinations) CreateDestinationDatabend(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -629,17 +639,16 @@ func (s *destinations) CreateDestinationDatabend(ctx context.Context, request sh
return res, nil
}
-// CreateDestinationDatabricks - Create a destination
+// CreateDestinationDevNull - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationDatabricks(ctx context.Context, request shared.DestinationDatabricksCreateRequest) (*operations.CreateDestinationDatabricksResponse, error) {
+func (s *Destinations) CreateDestinationDevNull(ctx context.Context, request *shared.DestinationDevNullCreateRequest) (*operations.CreateDestinationDevNullResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/destinations#Databricks"
+ url := strings.TrimSuffix(baseURL, "/") + "/destinations#DevNull"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -648,7 +657,7 @@ func (s *destinations) CreateDestinationDatabricks(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -672,7 +681,7 @@ func (s *destinations) CreateDestinationDatabricks(ctx context.Context, request
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateDestinationDatabricksResponse{
+ res := &operations.CreateDestinationDevNullResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -681,12 +690,14 @@ func (s *destinations) CreateDestinationDatabricks(ctx context.Context, request
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -696,17 +707,16 @@ func (s *destinations) CreateDestinationDatabricks(ctx context.Context, request
return res, nil
}
-// CreateDestinationDevNull - Create a destination
+// CreateDestinationDuckdb - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationDevNull(ctx context.Context, request shared.DestinationDevNullCreateRequest) (*operations.CreateDestinationDevNullResponse, error) {
+func (s *Destinations) CreateDestinationDuckdb(ctx context.Context, request *shared.DestinationDuckdbCreateRequest) (*operations.CreateDestinationDuckdbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/destinations#DevNull"
+ url := strings.TrimSuffix(baseURL, "/") + "/destinations#Duckdb"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -715,7 +725,7 @@ func (s *destinations) CreateDestinationDevNull(ctx context.Context, request sha
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -739,7 +749,7 @@ func (s *destinations) CreateDestinationDevNull(ctx context.Context, request sha
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateDestinationDevNullResponse{
+ res := &operations.CreateDestinationDuckdbResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -748,12 +758,14 @@ func (s *destinations) CreateDestinationDevNull(ctx context.Context, request sha
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -765,15 +777,14 @@ func (s *destinations) CreateDestinationDevNull(ctx context.Context, request sha
// CreateDestinationDynamodb - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationDynamodb(ctx context.Context, request shared.DestinationDynamodbCreateRequest) (*operations.CreateDestinationDynamodbResponse, error) {
+func (s *Destinations) CreateDestinationDynamodb(ctx context.Context, request *shared.DestinationDynamodbCreateRequest) (*operations.CreateDestinationDynamodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Dynamodb"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -782,7 +793,7 @@ func (s *destinations) CreateDestinationDynamodb(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -815,12 +826,14 @@ func (s *destinations) CreateDestinationDynamodb(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -832,15 +845,14 @@ func (s *destinations) CreateDestinationDynamodb(ctx context.Context, request sh
// CreateDestinationElasticsearch - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationElasticsearch(ctx context.Context, request shared.DestinationElasticsearchCreateRequest) (*operations.CreateDestinationElasticsearchResponse, error) {
+func (s *Destinations) CreateDestinationElasticsearch(ctx context.Context, request *shared.DestinationElasticsearchCreateRequest) (*operations.CreateDestinationElasticsearchResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Elasticsearch"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -849,7 +861,7 @@ func (s *destinations) CreateDestinationElasticsearch(ctx context.Context, reque
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -882,12 +894,14 @@ func (s *destinations) CreateDestinationElasticsearch(ctx context.Context, reque
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -899,15 +913,14 @@ func (s *destinations) CreateDestinationElasticsearch(ctx context.Context, reque
// CreateDestinationFirebolt - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationFirebolt(ctx context.Context, request shared.DestinationFireboltCreateRequest) (*operations.CreateDestinationFireboltResponse, error) {
+func (s *Destinations) CreateDestinationFirebolt(ctx context.Context, request *shared.DestinationFireboltCreateRequest) (*operations.CreateDestinationFireboltResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Firebolt"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -916,7 +929,7 @@ func (s *destinations) CreateDestinationFirebolt(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -949,12 +962,14 @@ func (s *destinations) CreateDestinationFirebolt(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -966,15 +981,14 @@ func (s *destinations) CreateDestinationFirebolt(ctx context.Context, request sh
// CreateDestinationFirestore - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationFirestore(ctx context.Context, request shared.DestinationFirestoreCreateRequest) (*operations.CreateDestinationFirestoreResponse, error) {
+func (s *Destinations) CreateDestinationFirestore(ctx context.Context, request *shared.DestinationFirestoreCreateRequest) (*operations.CreateDestinationFirestoreResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Firestore"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -983,7 +997,7 @@ func (s *destinations) CreateDestinationFirestore(ctx context.Context, request s
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1016,12 +1030,14 @@ func (s *destinations) CreateDestinationFirestore(ctx context.Context, request s
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1033,15 +1049,14 @@ func (s *destinations) CreateDestinationFirestore(ctx context.Context, request s
// CreateDestinationGcs - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationGcs(ctx context.Context, request shared.DestinationGcsCreateRequest) (*operations.CreateDestinationGcsResponse, error) {
+func (s *Destinations) CreateDestinationGcs(ctx context.Context, request *shared.DestinationGcsCreateRequest) (*operations.CreateDestinationGcsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Gcs"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1050,7 +1065,7 @@ func (s *destinations) CreateDestinationGcs(ctx context.Context, request shared.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1083,12 +1098,14 @@ func (s *destinations) CreateDestinationGcs(ctx context.Context, request shared.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1100,15 +1117,14 @@ func (s *destinations) CreateDestinationGcs(ctx context.Context, request shared.
// CreateDestinationGoogleSheets - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationGoogleSheets(ctx context.Context, request shared.DestinationGoogleSheetsCreateRequest) (*operations.CreateDestinationGoogleSheetsResponse, error) {
+func (s *Destinations) CreateDestinationGoogleSheets(ctx context.Context, request *shared.DestinationGoogleSheetsCreateRequest) (*operations.CreateDestinationGoogleSheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#GoogleSheets"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1117,7 +1133,7 @@ func (s *destinations) CreateDestinationGoogleSheets(ctx context.Context, reques
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1150,12 +1166,14 @@ func (s *destinations) CreateDestinationGoogleSheets(ctx context.Context, reques
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1167,15 +1185,14 @@ func (s *destinations) CreateDestinationGoogleSheets(ctx context.Context, reques
// CreateDestinationKeen - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationKeen(ctx context.Context, request shared.DestinationKeenCreateRequest) (*operations.CreateDestinationKeenResponse, error) {
+func (s *Destinations) CreateDestinationKeen(ctx context.Context, request *shared.DestinationKeenCreateRequest) (*operations.CreateDestinationKeenResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Keen"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1184,7 +1201,7 @@ func (s *destinations) CreateDestinationKeen(ctx context.Context, request shared
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1217,12 +1234,14 @@ func (s *destinations) CreateDestinationKeen(ctx context.Context, request shared
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1234,15 +1253,14 @@ func (s *destinations) CreateDestinationKeen(ctx context.Context, request shared
// CreateDestinationKinesis - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationKinesis(ctx context.Context, request shared.DestinationKinesisCreateRequest) (*operations.CreateDestinationKinesisResponse, error) {
+func (s *Destinations) CreateDestinationKinesis(ctx context.Context, request *shared.DestinationKinesisCreateRequest) (*operations.CreateDestinationKinesisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Kinesis"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1251,7 +1269,7 @@ func (s *destinations) CreateDestinationKinesis(ctx context.Context, request sha
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1284,12 +1302,14 @@ func (s *destinations) CreateDestinationKinesis(ctx context.Context, request sha
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1301,15 +1321,14 @@ func (s *destinations) CreateDestinationKinesis(ctx context.Context, request sha
// CreateDestinationLangchain - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationLangchain(ctx context.Context, request shared.DestinationLangchainCreateRequest) (*operations.CreateDestinationLangchainResponse, error) {
+func (s *Destinations) CreateDestinationLangchain(ctx context.Context, request *shared.DestinationLangchainCreateRequest) (*operations.CreateDestinationLangchainResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Langchain"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1318,7 +1337,7 @@ func (s *destinations) CreateDestinationLangchain(ctx context.Context, request s
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1351,12 +1370,14 @@ func (s *destinations) CreateDestinationLangchain(ctx context.Context, request s
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1368,15 +1389,14 @@ func (s *destinations) CreateDestinationLangchain(ctx context.Context, request s
// CreateDestinationMilvus - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationMilvus(ctx context.Context, request shared.DestinationMilvusCreateRequest) (*operations.CreateDestinationMilvusResponse, error) {
+func (s *Destinations) CreateDestinationMilvus(ctx context.Context, request *shared.DestinationMilvusCreateRequest) (*operations.CreateDestinationMilvusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Milvus"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1385,7 +1405,7 @@ func (s *destinations) CreateDestinationMilvus(ctx context.Context, request shar
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1418,12 +1438,14 @@ func (s *destinations) CreateDestinationMilvus(ctx context.Context, request shar
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1435,15 +1457,14 @@ func (s *destinations) CreateDestinationMilvus(ctx context.Context, request shar
// CreateDestinationMongodb - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationMongodb(ctx context.Context, request shared.DestinationMongodbCreateRequest) (*operations.CreateDestinationMongodbResponse, error) {
+func (s *Destinations) CreateDestinationMongodb(ctx context.Context, request *shared.DestinationMongodbCreateRequest) (*operations.CreateDestinationMongodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Mongodb"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1452,7 +1473,7 @@ func (s *destinations) CreateDestinationMongodb(ctx context.Context, request sha
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1485,12 +1506,14 @@ func (s *destinations) CreateDestinationMongodb(ctx context.Context, request sha
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1502,15 +1525,14 @@ func (s *destinations) CreateDestinationMongodb(ctx context.Context, request sha
// CreateDestinationMssql - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationMssql(ctx context.Context, request shared.DestinationMssqlCreateRequest) (*operations.CreateDestinationMssqlResponse, error) {
+func (s *Destinations) CreateDestinationMssql(ctx context.Context, request *shared.DestinationMssqlCreateRequest) (*operations.CreateDestinationMssqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Mssql"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1519,7 +1541,7 @@ func (s *destinations) CreateDestinationMssql(ctx context.Context, request share
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1552,12 +1574,14 @@ func (s *destinations) CreateDestinationMssql(ctx context.Context, request share
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1569,15 +1593,14 @@ func (s *destinations) CreateDestinationMssql(ctx context.Context, request share
// CreateDestinationMysql - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationMysql(ctx context.Context, request shared.DestinationMysqlCreateRequest) (*operations.CreateDestinationMysqlResponse, error) {
+func (s *Destinations) CreateDestinationMysql(ctx context.Context, request *shared.DestinationMysqlCreateRequest) (*operations.CreateDestinationMysqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Mysql"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1586,7 +1609,7 @@ func (s *destinations) CreateDestinationMysql(ctx context.Context, request share
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1619,12 +1642,14 @@ func (s *destinations) CreateDestinationMysql(ctx context.Context, request share
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1636,15 +1661,14 @@ func (s *destinations) CreateDestinationMysql(ctx context.Context, request share
// CreateDestinationOracle - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationOracle(ctx context.Context, request shared.DestinationOracleCreateRequest) (*operations.CreateDestinationOracleResponse, error) {
+func (s *Destinations) CreateDestinationOracle(ctx context.Context, request *shared.DestinationOracleCreateRequest) (*operations.CreateDestinationOracleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Oracle"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1653,7 +1677,7 @@ func (s *destinations) CreateDestinationOracle(ctx context.Context, request shar
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1686,12 +1710,14 @@ func (s *destinations) CreateDestinationOracle(ctx context.Context, request shar
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1703,15 +1729,14 @@ func (s *destinations) CreateDestinationOracle(ctx context.Context, request shar
// CreateDestinationPinecone - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationPinecone(ctx context.Context, request shared.DestinationPineconeCreateRequest) (*operations.CreateDestinationPineconeResponse, error) {
+func (s *Destinations) CreateDestinationPinecone(ctx context.Context, request *shared.DestinationPineconeCreateRequest) (*operations.CreateDestinationPineconeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Pinecone"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1720,7 +1745,7 @@ func (s *destinations) CreateDestinationPinecone(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1753,12 +1778,14 @@ func (s *destinations) CreateDestinationPinecone(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1770,15 +1797,14 @@ func (s *destinations) CreateDestinationPinecone(ctx context.Context, request sh
// CreateDestinationPostgres - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationPostgres(ctx context.Context, request shared.DestinationPostgresCreateRequest) (*operations.CreateDestinationPostgresResponse, error) {
+func (s *Destinations) CreateDestinationPostgres(ctx context.Context, request *shared.DestinationPostgresCreateRequest) (*operations.CreateDestinationPostgresResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Postgres"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1787,7 +1813,7 @@ func (s *destinations) CreateDestinationPostgres(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1820,12 +1846,14 @@ func (s *destinations) CreateDestinationPostgres(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1837,15 +1865,14 @@ func (s *destinations) CreateDestinationPostgres(ctx context.Context, request sh
// CreateDestinationPubsub - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationPubsub(ctx context.Context, request shared.DestinationPubsubCreateRequest) (*operations.CreateDestinationPubsubResponse, error) {
+func (s *Destinations) CreateDestinationPubsub(ctx context.Context, request *shared.DestinationPubsubCreateRequest) (*operations.CreateDestinationPubsubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Pubsub"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1854,7 +1881,7 @@ func (s *destinations) CreateDestinationPubsub(ctx context.Context, request shar
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1887,12 +1914,82 @@ func (s *destinations) CreateDestinationPubsub(ctx context.Context, request shar
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
+ }
+ case httpRes.StatusCode == 400:
+ fallthrough
+ case httpRes.StatusCode == 403:
+ }
+
+ return res, nil
+}
+
+// CreateDestinationQdrant - Create a destination
+// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
+func (s *Destinations) CreateDestinationQdrant(ctx context.Context, request *shared.DestinationQdrantCreateRequest) (*operations.CreateDestinationQdrantResponse, error) {
+ baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
+ url := strings.TrimSuffix(baseURL, "/") + "/destinations#Qdrant"
+
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
+ if err != nil {
+ return nil, fmt.Errorf("error serializing request body: %w", err)
+ }
+ debugBody := bytes.NewBuffer([]byte{})
+ debugReader := io.TeeReader(bodyReader, debugBody)
+
+ req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader)
+ if err != nil {
+ return nil, fmt.Errorf("error creating request: %w", err)
+ }
+ req.Header.Set("Accept", "application/json")
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
+
+ req.Header.Set("Content-Type", reqContentType)
+
+ client := s.sdkConfiguration.SecurityClient
+
+ httpRes, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("error sending request: %w", err)
+ }
+ if httpRes == nil {
+ return nil, fmt.Errorf("error sending request: no response")
+ }
+
+ rawBody, err := io.ReadAll(httpRes.Body)
+ if err != nil {
+ return nil, fmt.Errorf("error reading response body: %w", err)
+ }
+ httpRes.Request.Body = io.NopCloser(debugBody)
+ httpRes.Body.Close()
+ httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
+
+ contentType := httpRes.Header.Get("Content-Type")
+
+ res := &operations.CreateDestinationQdrantResponse{
+ StatusCode: httpRes.StatusCode,
+ ContentType: contentType,
+ RawResponse: httpRes,
+ }
+ switch {
+ case httpRes.StatusCode == 200:
+ switch {
+ case utils.MatchContentType(contentType, `application/json`):
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
+ }
+
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1904,15 +2001,14 @@ func (s *destinations) CreateDestinationPubsub(ctx context.Context, request shar
// CreateDestinationRedis - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationRedis(ctx context.Context, request shared.DestinationRedisCreateRequest) (*operations.CreateDestinationRedisResponse, error) {
+func (s *Destinations) CreateDestinationRedis(ctx context.Context, request *shared.DestinationRedisCreateRequest) (*operations.CreateDestinationRedisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Redis"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1921,7 +2017,7 @@ func (s *destinations) CreateDestinationRedis(ctx context.Context, request share
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1954,12 +2050,14 @@ func (s *destinations) CreateDestinationRedis(ctx context.Context, request share
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1971,15 +2069,14 @@ func (s *destinations) CreateDestinationRedis(ctx context.Context, request share
// CreateDestinationRedshift - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationRedshift(ctx context.Context, request shared.DestinationRedshiftCreateRequest) (*operations.CreateDestinationRedshiftResponse, error) {
+func (s *Destinations) CreateDestinationRedshift(ctx context.Context, request *shared.DestinationRedshiftCreateRequest) (*operations.CreateDestinationRedshiftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Redshift"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1988,7 +2085,7 @@ func (s *destinations) CreateDestinationRedshift(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2021,12 +2118,14 @@ func (s *destinations) CreateDestinationRedshift(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2038,15 +2137,14 @@ func (s *destinations) CreateDestinationRedshift(ctx context.Context, request sh
// CreateDestinationS3 - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationS3(ctx context.Context, request shared.DestinationS3CreateRequest) (*operations.CreateDestinationS3Response, error) {
+func (s *Destinations) CreateDestinationS3(ctx context.Context, request *shared.DestinationS3CreateRequest) (*operations.CreateDestinationS3Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#S3"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2055,7 +2153,7 @@ func (s *destinations) CreateDestinationS3(ctx context.Context, request shared.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2088,12 +2186,14 @@ func (s *destinations) CreateDestinationS3(ctx context.Context, request shared.D
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2105,15 +2205,14 @@ func (s *destinations) CreateDestinationS3(ctx context.Context, request shared.D
// CreateDestinationS3Glue - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationS3Glue(ctx context.Context, request shared.DestinationS3GlueCreateRequest) (*operations.CreateDestinationS3GlueResponse, error) {
+func (s *Destinations) CreateDestinationS3Glue(ctx context.Context, request *shared.DestinationS3GlueCreateRequest) (*operations.CreateDestinationS3GlueResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#S3Glue"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2122,7 +2221,7 @@ func (s *destinations) CreateDestinationS3Glue(ctx context.Context, request shar
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2155,12 +2254,14 @@ func (s *destinations) CreateDestinationS3Glue(ctx context.Context, request shar
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2172,15 +2273,14 @@ func (s *destinations) CreateDestinationS3Glue(ctx context.Context, request shar
// CreateDestinationSftpJSON - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationSftpJSON(ctx context.Context, request shared.DestinationSftpJSONCreateRequest) (*operations.CreateDestinationSftpJSONResponse, error) {
+func (s *Destinations) CreateDestinationSftpJSON(ctx context.Context, request *shared.DestinationSftpJSONCreateRequest) (*operations.CreateDestinationSftpJSONResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#SftpJson"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2189,7 +2289,7 @@ func (s *destinations) CreateDestinationSftpJSON(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2222,12 +2322,14 @@ func (s *destinations) CreateDestinationSftpJSON(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2239,15 +2341,14 @@ func (s *destinations) CreateDestinationSftpJSON(ctx context.Context, request sh
// CreateDestinationSnowflake - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationSnowflake(ctx context.Context, request shared.DestinationSnowflakeCreateRequest) (*operations.CreateDestinationSnowflakeResponse, error) {
+func (s *Destinations) CreateDestinationSnowflake(ctx context.Context, request *shared.DestinationSnowflakeCreateRequest) (*operations.CreateDestinationSnowflakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Snowflake"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2256,7 +2357,7 @@ func (s *destinations) CreateDestinationSnowflake(ctx context.Context, request s
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2289,12 +2390,14 @@ func (s *destinations) CreateDestinationSnowflake(ctx context.Context, request s
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2306,15 +2409,14 @@ func (s *destinations) CreateDestinationSnowflake(ctx context.Context, request s
// CreateDestinationTimeplus - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationTimeplus(ctx context.Context, request shared.DestinationTimeplusCreateRequest) (*operations.CreateDestinationTimeplusResponse, error) {
+func (s *Destinations) CreateDestinationTimeplus(ctx context.Context, request *shared.DestinationTimeplusCreateRequest) (*operations.CreateDestinationTimeplusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Timeplus"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2323,7 +2425,7 @@ func (s *destinations) CreateDestinationTimeplus(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2356,12 +2458,14 @@ func (s *destinations) CreateDestinationTimeplus(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2373,15 +2477,14 @@ func (s *destinations) CreateDestinationTimeplus(ctx context.Context, request sh
// CreateDestinationTypesense - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationTypesense(ctx context.Context, request shared.DestinationTypesenseCreateRequest) (*operations.CreateDestinationTypesenseResponse, error) {
+func (s *Destinations) CreateDestinationTypesense(ctx context.Context, request *shared.DestinationTypesenseCreateRequest) (*operations.CreateDestinationTypesenseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Typesense"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2390,7 +2493,7 @@ func (s *destinations) CreateDestinationTypesense(ctx context.Context, request s
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2423,12 +2526,14 @@ func (s *destinations) CreateDestinationTypesense(ctx context.Context, request s
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2440,15 +2545,14 @@ func (s *destinations) CreateDestinationTypesense(ctx context.Context, request s
// CreateDestinationVertica - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationVertica(ctx context.Context, request shared.DestinationVerticaCreateRequest) (*operations.CreateDestinationVerticaResponse, error) {
+func (s *Destinations) CreateDestinationVertica(ctx context.Context, request *shared.DestinationVerticaCreateRequest) (*operations.CreateDestinationVerticaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Vertica"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2457,7 +2561,7 @@ func (s *destinations) CreateDestinationVertica(ctx context.Context, request sha
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2490,12 +2594,82 @@ func (s *destinations) CreateDestinationVertica(ctx context.Context, request sha
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
+ }
+
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
+ }
+ case httpRes.StatusCode == 400:
+ fallthrough
+ case httpRes.StatusCode == 403:
+ }
+
+ return res, nil
+}
+
+// CreateDestinationWeaviate - Create a destination
+// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
+func (s *Destinations) CreateDestinationWeaviate(ctx context.Context, request *shared.DestinationWeaviateCreateRequest) (*operations.CreateDestinationWeaviateResponse, error) {
+ baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
+ url := strings.TrimSuffix(baseURL, "/") + "/destinations#Weaviate"
+
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
+ if err != nil {
+ return nil, fmt.Errorf("error serializing request body: %w", err)
+ }
+ debugBody := bytes.NewBuffer([]byte{})
+ debugReader := io.TeeReader(bodyReader, debugBody)
+
+ req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader)
+ if err != nil {
+ return nil, fmt.Errorf("error creating request: %w", err)
+ }
+ req.Header.Set("Accept", "application/json")
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
+
+ req.Header.Set("Content-Type", reqContentType)
+
+ client := s.sdkConfiguration.SecurityClient
+
+ httpRes, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("error sending request: %w", err)
+ }
+ if httpRes == nil {
+ return nil, fmt.Errorf("error sending request: no response")
+ }
+
+ rawBody, err := io.ReadAll(httpRes.Body)
+ if err != nil {
+ return nil, fmt.Errorf("error reading response body: %w", err)
+ }
+ httpRes.Request.Body = io.NopCloser(debugBody)
+ httpRes.Body.Close()
+ httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
+
+ contentType := httpRes.Header.Get("Content-Type")
+
+ res := &operations.CreateDestinationWeaviateResponse{
+ StatusCode: httpRes.StatusCode,
+ ContentType: contentType,
+ RawResponse: httpRes,
+ }
+ switch {
+ case httpRes.StatusCode == 200:
+ switch {
+ case utils.MatchContentType(contentType, `application/json`):
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2507,15 +2681,14 @@ func (s *destinations) CreateDestinationVertica(ctx context.Context, request sha
// CreateDestinationXata - Create a destination
// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination.
-func (s *destinations) CreateDestinationXata(ctx context.Context, request shared.DestinationXataCreateRequest) (*operations.CreateDestinationXataResponse, error) {
+func (s *Destinations) CreateDestinationXata(ctx context.Context, request *shared.DestinationXataCreateRequest) (*operations.CreateDestinationXataResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations#Xata"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2524,7 +2697,7 @@ func (s *destinations) CreateDestinationXata(ctx context.Context, request shared
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2557,12 +2730,14 @@ func (s *destinations) CreateDestinationXata(ctx context.Context, request shared
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2573,7 +2748,7 @@ func (s *destinations) CreateDestinationXata(ctx context.Context, request shared
}
// DeleteDestination - Delete a Destination
-func (s *destinations) DeleteDestination(ctx context.Context, request operations.DeleteDestinationRequest) (*operations.DeleteDestinationResponse, error) {
+func (s *Destinations) DeleteDestination(ctx context.Context, request operations.DeleteDestinationRequest) (*operations.DeleteDestinationResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}", request, nil)
if err != nil {
@@ -2585,7 +2760,7 @@ func (s *destinations) DeleteDestination(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -2623,7 +2798,7 @@ func (s *destinations) DeleteDestination(ctx context.Context, request operations
}
// DeleteDestinationAwsDatalake - Delete a Destination
-func (s *destinations) DeleteDestinationAwsDatalake(ctx context.Context, request operations.DeleteDestinationAwsDatalakeRequest) (*operations.DeleteDestinationAwsDatalakeResponse, error) {
+func (s *Destinations) DeleteDestinationAwsDatalake(ctx context.Context, request operations.DeleteDestinationAwsDatalakeRequest) (*operations.DeleteDestinationAwsDatalakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#AwsDatalake", request, nil)
if err != nil {
@@ -2635,7 +2810,7 @@ func (s *destinations) DeleteDestinationAwsDatalake(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -2673,7 +2848,7 @@ func (s *destinations) DeleteDestinationAwsDatalake(ctx context.Context, request
}
// DeleteDestinationAzureBlobStorage - Delete a Destination
-func (s *destinations) DeleteDestinationAzureBlobStorage(ctx context.Context, request operations.DeleteDestinationAzureBlobStorageRequest) (*operations.DeleteDestinationAzureBlobStorageResponse, error) {
+func (s *Destinations) DeleteDestinationAzureBlobStorage(ctx context.Context, request operations.DeleteDestinationAzureBlobStorageRequest) (*operations.DeleteDestinationAzureBlobStorageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#AzureBlobStorage", request, nil)
if err != nil {
@@ -2685,7 +2860,7 @@ func (s *destinations) DeleteDestinationAzureBlobStorage(ctx context.Context, re
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -2723,7 +2898,7 @@ func (s *destinations) DeleteDestinationAzureBlobStorage(ctx context.Context, re
}
// DeleteDestinationBigquery - Delete a Destination
-func (s *destinations) DeleteDestinationBigquery(ctx context.Context, request operations.DeleteDestinationBigqueryRequest) (*operations.DeleteDestinationBigqueryResponse, error) {
+func (s *Destinations) DeleteDestinationBigquery(ctx context.Context, request operations.DeleteDestinationBigqueryRequest) (*operations.DeleteDestinationBigqueryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Bigquery", request, nil)
if err != nil {
@@ -2735,7 +2910,7 @@ func (s *destinations) DeleteDestinationBigquery(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -2772,10 +2947,10 @@ func (s *destinations) DeleteDestinationBigquery(ctx context.Context, request op
return res, nil
}
-// DeleteDestinationBigqueryDenormalized - Delete a Destination
-func (s *destinations) DeleteDestinationBigqueryDenormalized(ctx context.Context, request operations.DeleteDestinationBigqueryDenormalizedRequest) (*operations.DeleteDestinationBigqueryDenormalizedResponse, error) {
+// DeleteDestinationClickhouse - Delete a Destination
+func (s *Destinations) DeleteDestinationClickhouse(ctx context.Context, request operations.DeleteDestinationClickhouseRequest) (*operations.DeleteDestinationClickhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#BigqueryDenormalized", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Clickhouse", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -2785,7 +2960,7 @@ func (s *destinations) DeleteDestinationBigqueryDenormalized(ctx context.Context
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -2806,7 +2981,7 @@ func (s *destinations) DeleteDestinationBigqueryDenormalized(ctx context.Context
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationBigqueryDenormalizedResponse{
+ res := &operations.DeleteDestinationClickhouseResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -2822,10 +2997,10 @@ func (s *destinations) DeleteDestinationBigqueryDenormalized(ctx context.Context
return res, nil
}
-// DeleteDestinationClickhouse - Delete a Destination
-func (s *destinations) DeleteDestinationClickhouse(ctx context.Context, request operations.DeleteDestinationClickhouseRequest) (*operations.DeleteDestinationClickhouseResponse, error) {
+// DeleteDestinationConvex - Delete a Destination
+func (s *Destinations) DeleteDestinationConvex(ctx context.Context, request operations.DeleteDestinationConvexRequest) (*operations.DeleteDestinationConvexResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Clickhouse", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Convex", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -2835,7 +3010,7 @@ func (s *destinations) DeleteDestinationClickhouse(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -2856,7 +3031,7 @@ func (s *destinations) DeleteDestinationClickhouse(ctx context.Context, request
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationClickhouseResponse{
+ res := &operations.DeleteDestinationConvexResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -2872,10 +3047,10 @@ func (s *destinations) DeleteDestinationClickhouse(ctx context.Context, request
return res, nil
}
-// DeleteDestinationConvex - Delete a Destination
-func (s *destinations) DeleteDestinationConvex(ctx context.Context, request operations.DeleteDestinationConvexRequest) (*operations.DeleteDestinationConvexResponse, error) {
+// DeleteDestinationCumulio - Delete a Destination
+func (s *Destinations) DeleteDestinationCumulio(ctx context.Context, request operations.DeleteDestinationCumulioRequest) (*operations.DeleteDestinationCumulioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Convex", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Cumulio", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -2885,7 +3060,7 @@ func (s *destinations) DeleteDestinationConvex(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -2906,7 +3081,7 @@ func (s *destinations) DeleteDestinationConvex(ctx context.Context, request oper
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationConvexResponse{
+ res := &operations.DeleteDestinationCumulioResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -2922,10 +3097,10 @@ func (s *destinations) DeleteDestinationConvex(ctx context.Context, request oper
return res, nil
}
-// DeleteDestinationCumulio - Delete a Destination
-func (s *destinations) DeleteDestinationCumulio(ctx context.Context, request operations.DeleteDestinationCumulioRequest) (*operations.DeleteDestinationCumulioResponse, error) {
+// DeleteDestinationDatabend - Delete a Destination
+func (s *Destinations) DeleteDestinationDatabend(ctx context.Context, request operations.DeleteDestinationDatabendRequest) (*operations.DeleteDestinationDatabendResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Cumulio", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databend", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -2935,7 +3110,7 @@ func (s *destinations) DeleteDestinationCumulio(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -2956,7 +3131,7 @@ func (s *destinations) DeleteDestinationCumulio(ctx context.Context, request ope
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationCumulioResponse{
+ res := &operations.DeleteDestinationDatabendResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -2972,10 +3147,10 @@ func (s *destinations) DeleteDestinationCumulio(ctx context.Context, request ope
return res, nil
}
-// DeleteDestinationDatabend - Delete a Destination
-func (s *destinations) DeleteDestinationDatabend(ctx context.Context, request operations.DeleteDestinationDatabendRequest) (*operations.DeleteDestinationDatabendResponse, error) {
+// DeleteDestinationDatabricks - Delete a Destination
+func (s *Destinations) DeleteDestinationDatabricks(ctx context.Context, request operations.DeleteDestinationDatabricksRequest) (*operations.DeleteDestinationDatabricksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databend", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databricks", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -2985,7 +3160,7 @@ func (s *destinations) DeleteDestinationDatabend(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3006,7 +3181,7 @@ func (s *destinations) DeleteDestinationDatabend(ctx context.Context, request op
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationDatabendResponse{
+ res := &operations.DeleteDestinationDatabricksResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -3022,10 +3197,10 @@ func (s *destinations) DeleteDestinationDatabend(ctx context.Context, request op
return res, nil
}
-// DeleteDestinationDatabricks - Delete a Destination
-func (s *destinations) DeleteDestinationDatabricks(ctx context.Context, request operations.DeleteDestinationDatabricksRequest) (*operations.DeleteDestinationDatabricksResponse, error) {
+// DeleteDestinationDevNull - Delete a Destination
+func (s *Destinations) DeleteDestinationDevNull(ctx context.Context, request operations.DeleteDestinationDevNullRequest) (*operations.DeleteDestinationDevNullResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databricks", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#DevNull", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -3035,7 +3210,7 @@ func (s *destinations) DeleteDestinationDatabricks(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3056,7 +3231,7 @@ func (s *destinations) DeleteDestinationDatabricks(ctx context.Context, request
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationDatabricksResponse{
+ res := &operations.DeleteDestinationDevNullResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -3072,10 +3247,10 @@ func (s *destinations) DeleteDestinationDatabricks(ctx context.Context, request
return res, nil
}
-// DeleteDestinationDevNull - Delete a Destination
-func (s *destinations) DeleteDestinationDevNull(ctx context.Context, request operations.DeleteDestinationDevNullRequest) (*operations.DeleteDestinationDevNullResponse, error) {
+// DeleteDestinationDuckdb - Delete a Destination
+func (s *Destinations) DeleteDestinationDuckdb(ctx context.Context, request operations.DeleteDestinationDuckdbRequest) (*operations.DeleteDestinationDuckdbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#DevNull", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Duckdb", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -3085,7 +3260,7 @@ func (s *destinations) DeleteDestinationDevNull(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3106,7 +3281,7 @@ func (s *destinations) DeleteDestinationDevNull(ctx context.Context, request ope
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationDevNullResponse{
+ res := &operations.DeleteDestinationDuckdbResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -3123,7 +3298,7 @@ func (s *destinations) DeleteDestinationDevNull(ctx context.Context, request ope
}
// DeleteDestinationDynamodb - Delete a Destination
-func (s *destinations) DeleteDestinationDynamodb(ctx context.Context, request operations.DeleteDestinationDynamodbRequest) (*operations.DeleteDestinationDynamodbResponse, error) {
+func (s *Destinations) DeleteDestinationDynamodb(ctx context.Context, request operations.DeleteDestinationDynamodbRequest) (*operations.DeleteDestinationDynamodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Dynamodb", request, nil)
if err != nil {
@@ -3135,7 +3310,7 @@ func (s *destinations) DeleteDestinationDynamodb(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3173,7 +3348,7 @@ func (s *destinations) DeleteDestinationDynamodb(ctx context.Context, request op
}
// DeleteDestinationElasticsearch - Delete a Destination
-func (s *destinations) DeleteDestinationElasticsearch(ctx context.Context, request operations.DeleteDestinationElasticsearchRequest) (*operations.DeleteDestinationElasticsearchResponse, error) {
+func (s *Destinations) DeleteDestinationElasticsearch(ctx context.Context, request operations.DeleteDestinationElasticsearchRequest) (*operations.DeleteDestinationElasticsearchResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Elasticsearch", request, nil)
if err != nil {
@@ -3185,7 +3360,7 @@ func (s *destinations) DeleteDestinationElasticsearch(ctx context.Context, reque
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3223,7 +3398,7 @@ func (s *destinations) DeleteDestinationElasticsearch(ctx context.Context, reque
}
// DeleteDestinationFirebolt - Delete a Destination
-func (s *destinations) DeleteDestinationFirebolt(ctx context.Context, request operations.DeleteDestinationFireboltRequest) (*operations.DeleteDestinationFireboltResponse, error) {
+func (s *Destinations) DeleteDestinationFirebolt(ctx context.Context, request operations.DeleteDestinationFireboltRequest) (*operations.DeleteDestinationFireboltResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Firebolt", request, nil)
if err != nil {
@@ -3235,7 +3410,7 @@ func (s *destinations) DeleteDestinationFirebolt(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3273,7 +3448,7 @@ func (s *destinations) DeleteDestinationFirebolt(ctx context.Context, request op
}
// DeleteDestinationFirestore - Delete a Destination
-func (s *destinations) DeleteDestinationFirestore(ctx context.Context, request operations.DeleteDestinationFirestoreRequest) (*operations.DeleteDestinationFirestoreResponse, error) {
+func (s *Destinations) DeleteDestinationFirestore(ctx context.Context, request operations.DeleteDestinationFirestoreRequest) (*operations.DeleteDestinationFirestoreResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Firestore", request, nil)
if err != nil {
@@ -3285,7 +3460,7 @@ func (s *destinations) DeleteDestinationFirestore(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3323,7 +3498,7 @@ func (s *destinations) DeleteDestinationFirestore(ctx context.Context, request o
}
// DeleteDestinationGcs - Delete a Destination
-func (s *destinations) DeleteDestinationGcs(ctx context.Context, request operations.DeleteDestinationGcsRequest) (*operations.DeleteDestinationGcsResponse, error) {
+func (s *Destinations) DeleteDestinationGcs(ctx context.Context, request operations.DeleteDestinationGcsRequest) (*operations.DeleteDestinationGcsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Gcs", request, nil)
if err != nil {
@@ -3335,7 +3510,7 @@ func (s *destinations) DeleteDestinationGcs(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3373,7 +3548,7 @@ func (s *destinations) DeleteDestinationGcs(ctx context.Context, request operati
}
// DeleteDestinationGoogleSheets - Delete a Destination
-func (s *destinations) DeleteDestinationGoogleSheets(ctx context.Context, request operations.DeleteDestinationGoogleSheetsRequest) (*operations.DeleteDestinationGoogleSheetsResponse, error) {
+func (s *Destinations) DeleteDestinationGoogleSheets(ctx context.Context, request operations.DeleteDestinationGoogleSheetsRequest) (*operations.DeleteDestinationGoogleSheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#GoogleSheets", request, nil)
if err != nil {
@@ -3385,7 +3560,7 @@ func (s *destinations) DeleteDestinationGoogleSheets(ctx context.Context, reques
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3423,7 +3598,7 @@ func (s *destinations) DeleteDestinationGoogleSheets(ctx context.Context, reques
}
// DeleteDestinationKeen - Delete a Destination
-func (s *destinations) DeleteDestinationKeen(ctx context.Context, request operations.DeleteDestinationKeenRequest) (*operations.DeleteDestinationKeenResponse, error) {
+func (s *Destinations) DeleteDestinationKeen(ctx context.Context, request operations.DeleteDestinationKeenRequest) (*operations.DeleteDestinationKeenResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Keen", request, nil)
if err != nil {
@@ -3435,7 +3610,7 @@ func (s *destinations) DeleteDestinationKeen(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3473,7 +3648,7 @@ func (s *destinations) DeleteDestinationKeen(ctx context.Context, request operat
}
// DeleteDestinationKinesis - Delete a Destination
-func (s *destinations) DeleteDestinationKinesis(ctx context.Context, request operations.DeleteDestinationKinesisRequest) (*operations.DeleteDestinationKinesisResponse, error) {
+func (s *Destinations) DeleteDestinationKinesis(ctx context.Context, request operations.DeleteDestinationKinesisRequest) (*operations.DeleteDestinationKinesisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Kinesis", request, nil)
if err != nil {
@@ -3485,7 +3660,7 @@ func (s *destinations) DeleteDestinationKinesis(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3523,7 +3698,7 @@ func (s *destinations) DeleteDestinationKinesis(ctx context.Context, request ope
}
// DeleteDestinationLangchain - Delete a Destination
-func (s *destinations) DeleteDestinationLangchain(ctx context.Context, request operations.DeleteDestinationLangchainRequest) (*operations.DeleteDestinationLangchainResponse, error) {
+func (s *Destinations) DeleteDestinationLangchain(ctx context.Context, request operations.DeleteDestinationLangchainRequest) (*operations.DeleteDestinationLangchainResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Langchain", request, nil)
if err != nil {
@@ -3535,7 +3710,7 @@ func (s *destinations) DeleteDestinationLangchain(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3573,7 +3748,7 @@ func (s *destinations) DeleteDestinationLangchain(ctx context.Context, request o
}
// DeleteDestinationMilvus - Delete a Destination
-func (s *destinations) DeleteDestinationMilvus(ctx context.Context, request operations.DeleteDestinationMilvusRequest) (*operations.DeleteDestinationMilvusResponse, error) {
+func (s *Destinations) DeleteDestinationMilvus(ctx context.Context, request operations.DeleteDestinationMilvusRequest) (*operations.DeleteDestinationMilvusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Milvus", request, nil)
if err != nil {
@@ -3585,7 +3760,7 @@ func (s *destinations) DeleteDestinationMilvus(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3623,7 +3798,7 @@ func (s *destinations) DeleteDestinationMilvus(ctx context.Context, request oper
}
// DeleteDestinationMongodb - Delete a Destination
-func (s *destinations) DeleteDestinationMongodb(ctx context.Context, request operations.DeleteDestinationMongodbRequest) (*operations.DeleteDestinationMongodbResponse, error) {
+func (s *Destinations) DeleteDestinationMongodb(ctx context.Context, request operations.DeleteDestinationMongodbRequest) (*operations.DeleteDestinationMongodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Mongodb", request, nil)
if err != nil {
@@ -3635,7 +3810,7 @@ func (s *destinations) DeleteDestinationMongodb(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3673,7 +3848,7 @@ func (s *destinations) DeleteDestinationMongodb(ctx context.Context, request ope
}
// DeleteDestinationMssql - Delete a Destination
-func (s *destinations) DeleteDestinationMssql(ctx context.Context, request operations.DeleteDestinationMssqlRequest) (*operations.DeleteDestinationMssqlResponse, error) {
+func (s *Destinations) DeleteDestinationMssql(ctx context.Context, request operations.DeleteDestinationMssqlRequest) (*operations.DeleteDestinationMssqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Mssql", request, nil)
if err != nil {
@@ -3685,7 +3860,7 @@ func (s *destinations) DeleteDestinationMssql(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3723,7 +3898,7 @@ func (s *destinations) DeleteDestinationMssql(ctx context.Context, request opera
}
// DeleteDestinationMysql - Delete a Destination
-func (s *destinations) DeleteDestinationMysql(ctx context.Context, request operations.DeleteDestinationMysqlRequest) (*operations.DeleteDestinationMysqlResponse, error) {
+func (s *Destinations) DeleteDestinationMysql(ctx context.Context, request operations.DeleteDestinationMysqlRequest) (*operations.DeleteDestinationMysqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Mysql", request, nil)
if err != nil {
@@ -3735,7 +3910,7 @@ func (s *destinations) DeleteDestinationMysql(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3773,7 +3948,7 @@ func (s *destinations) DeleteDestinationMysql(ctx context.Context, request opera
}
// DeleteDestinationOracle - Delete a Destination
-func (s *destinations) DeleteDestinationOracle(ctx context.Context, request operations.DeleteDestinationOracleRequest) (*operations.DeleteDestinationOracleResponse, error) {
+func (s *Destinations) DeleteDestinationOracle(ctx context.Context, request operations.DeleteDestinationOracleRequest) (*operations.DeleteDestinationOracleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Oracle", request, nil)
if err != nil {
@@ -3785,7 +3960,7 @@ func (s *destinations) DeleteDestinationOracle(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3823,7 +3998,7 @@ func (s *destinations) DeleteDestinationOracle(ctx context.Context, request oper
}
// DeleteDestinationPinecone - Delete a Destination
-func (s *destinations) DeleteDestinationPinecone(ctx context.Context, request operations.DeleteDestinationPineconeRequest) (*operations.DeleteDestinationPineconeResponse, error) {
+func (s *Destinations) DeleteDestinationPinecone(ctx context.Context, request operations.DeleteDestinationPineconeRequest) (*operations.DeleteDestinationPineconeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Pinecone", request, nil)
if err != nil {
@@ -3835,7 +4010,7 @@ func (s *destinations) DeleteDestinationPinecone(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3873,7 +4048,7 @@ func (s *destinations) DeleteDestinationPinecone(ctx context.Context, request op
}
// DeleteDestinationPostgres - Delete a Destination
-func (s *destinations) DeleteDestinationPostgres(ctx context.Context, request operations.DeleteDestinationPostgresRequest) (*operations.DeleteDestinationPostgresResponse, error) {
+func (s *Destinations) DeleteDestinationPostgres(ctx context.Context, request operations.DeleteDestinationPostgresRequest) (*operations.DeleteDestinationPostgresResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Postgres", request, nil)
if err != nil {
@@ -3885,7 +4060,7 @@ func (s *destinations) DeleteDestinationPostgres(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3923,7 +4098,7 @@ func (s *destinations) DeleteDestinationPostgres(ctx context.Context, request op
}
// DeleteDestinationPubsub - Delete a Destination
-func (s *destinations) DeleteDestinationPubsub(ctx context.Context, request operations.DeleteDestinationPubsubRequest) (*operations.DeleteDestinationPubsubResponse, error) {
+func (s *Destinations) DeleteDestinationPubsub(ctx context.Context, request operations.DeleteDestinationPubsubRequest) (*operations.DeleteDestinationPubsubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Pubsub", request, nil)
if err != nil {
@@ -3935,7 +4110,7 @@ func (s *destinations) DeleteDestinationPubsub(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -3972,10 +4147,10 @@ func (s *destinations) DeleteDestinationPubsub(ctx context.Context, request oper
return res, nil
}
-// DeleteDestinationRedis - Delete a Destination
-func (s *destinations) DeleteDestinationRedis(ctx context.Context, request operations.DeleteDestinationRedisRequest) (*operations.DeleteDestinationRedisResponse, error) {
+// DeleteDestinationQdrant - Delete a Destination
+func (s *Destinations) DeleteDestinationQdrant(ctx context.Context, request operations.DeleteDestinationQdrantRequest) (*operations.DeleteDestinationQdrantResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Redis", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Qdrant", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -3985,7 +4160,7 @@ func (s *destinations) DeleteDestinationRedis(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4006,7 +4181,7 @@ func (s *destinations) DeleteDestinationRedis(ctx context.Context, request opera
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationRedisResponse{
+ res := &operations.DeleteDestinationQdrantResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4022,10 +4197,10 @@ func (s *destinations) DeleteDestinationRedis(ctx context.Context, request opera
return res, nil
}
-// DeleteDestinationRedshift - Delete a Destination
-func (s *destinations) DeleteDestinationRedshift(ctx context.Context, request operations.DeleteDestinationRedshiftRequest) (*operations.DeleteDestinationRedshiftResponse, error) {
+// DeleteDestinationRedis - Delete a Destination
+func (s *Destinations) DeleteDestinationRedis(ctx context.Context, request operations.DeleteDestinationRedisRequest) (*operations.DeleteDestinationRedisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Redshift", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Redis", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4035,7 +4210,7 @@ func (s *destinations) DeleteDestinationRedshift(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4056,7 +4231,7 @@ func (s *destinations) DeleteDestinationRedshift(ctx context.Context, request op
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationRedshiftResponse{
+ res := &operations.DeleteDestinationRedisResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4072,10 +4247,10 @@ func (s *destinations) DeleteDestinationRedshift(ctx context.Context, request op
return res, nil
}
-// DeleteDestinationS3 - Delete a Destination
-func (s *destinations) DeleteDestinationS3(ctx context.Context, request operations.DeleteDestinationS3Request) (*operations.DeleteDestinationS3Response, error) {
+// DeleteDestinationRedshift - Delete a Destination
+func (s *Destinations) DeleteDestinationRedshift(ctx context.Context, request operations.DeleteDestinationRedshiftRequest) (*operations.DeleteDestinationRedshiftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#S3", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Redshift", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4085,7 +4260,7 @@ func (s *destinations) DeleteDestinationS3(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4106,7 +4281,7 @@ func (s *destinations) DeleteDestinationS3(ctx context.Context, request operatio
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationS3Response{
+ res := &operations.DeleteDestinationRedshiftResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4122,10 +4297,10 @@ func (s *destinations) DeleteDestinationS3(ctx context.Context, request operatio
return res, nil
}
-// DeleteDestinationS3Glue - Delete a Destination
-func (s *destinations) DeleteDestinationS3Glue(ctx context.Context, request operations.DeleteDestinationS3GlueRequest) (*operations.DeleteDestinationS3GlueResponse, error) {
+// DeleteDestinationS3 - Delete a Destination
+func (s *Destinations) DeleteDestinationS3(ctx context.Context, request operations.DeleteDestinationS3Request) (*operations.DeleteDestinationS3Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#S3Glue", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#S3", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4135,7 +4310,7 @@ func (s *destinations) DeleteDestinationS3Glue(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4156,7 +4331,7 @@ func (s *destinations) DeleteDestinationS3Glue(ctx context.Context, request oper
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationS3GlueResponse{
+ res := &operations.DeleteDestinationS3Response{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4172,10 +4347,10 @@ func (s *destinations) DeleteDestinationS3Glue(ctx context.Context, request oper
return res, nil
}
-// DeleteDestinationSftpJSON - Delete a Destination
-func (s *destinations) DeleteDestinationSftpJSON(ctx context.Context, request operations.DeleteDestinationSftpJSONRequest) (*operations.DeleteDestinationSftpJSONResponse, error) {
+// DeleteDestinationS3Glue - Delete a Destination
+func (s *Destinations) DeleteDestinationS3Glue(ctx context.Context, request operations.DeleteDestinationS3GlueRequest) (*operations.DeleteDestinationS3GlueResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#SftpJson", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#S3Glue", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4185,7 +4360,7 @@ func (s *destinations) DeleteDestinationSftpJSON(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4206,7 +4381,7 @@ func (s *destinations) DeleteDestinationSftpJSON(ctx context.Context, request op
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationSftpJSONResponse{
+ res := &operations.DeleteDestinationS3GlueResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4222,10 +4397,10 @@ func (s *destinations) DeleteDestinationSftpJSON(ctx context.Context, request op
return res, nil
}
-// DeleteDestinationSnowflake - Delete a Destination
-func (s *destinations) DeleteDestinationSnowflake(ctx context.Context, request operations.DeleteDestinationSnowflakeRequest) (*operations.DeleteDestinationSnowflakeResponse, error) {
+// DeleteDestinationSftpJSON - Delete a Destination
+func (s *Destinations) DeleteDestinationSftpJSON(ctx context.Context, request operations.DeleteDestinationSftpJSONRequest) (*operations.DeleteDestinationSftpJSONResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Snowflake", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#SftpJson", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4235,7 +4410,7 @@ func (s *destinations) DeleteDestinationSnowflake(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4256,7 +4431,7 @@ func (s *destinations) DeleteDestinationSnowflake(ctx context.Context, request o
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteDestinationSnowflakeResponse{
+ res := &operations.DeleteDestinationSftpJSONResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4272,8 +4447,58 @@ func (s *destinations) DeleteDestinationSnowflake(ctx context.Context, request o
return res, nil
}
-// DeleteDestinationTimeplus - Delete a Destination
-func (s *destinations) DeleteDestinationTimeplus(ctx context.Context, request operations.DeleteDestinationTimeplusRequest) (*operations.DeleteDestinationTimeplusResponse, error) {
+// DeleteDestinationSnowflake - Delete a Destination
+func (s *Destinations) DeleteDestinationSnowflake(ctx context.Context, request operations.DeleteDestinationSnowflakeRequest) (*operations.DeleteDestinationSnowflakeResponse, error) {
+ baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Snowflake", request, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error generating URL: %w", err)
+ }
+
+ req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error creating request: %w", err)
+ }
+ req.Header.Set("Accept", "*/*")
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
+
+ client := s.sdkConfiguration.SecurityClient
+
+ httpRes, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("error sending request: %w", err)
+ }
+ if httpRes == nil {
+ return nil, fmt.Errorf("error sending request: no response")
+ }
+
+ rawBody, err := io.ReadAll(httpRes.Body)
+ if err != nil {
+ return nil, fmt.Errorf("error reading response body: %w", err)
+ }
+ httpRes.Body.Close()
+ httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
+
+ contentType := httpRes.Header.Get("Content-Type")
+
+ res := &operations.DeleteDestinationSnowflakeResponse{
+ StatusCode: httpRes.StatusCode,
+ ContentType: contentType,
+ RawResponse: httpRes,
+ }
+ switch {
+ case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
+ fallthrough
+ case httpRes.StatusCode == 403:
+ fallthrough
+ case httpRes.StatusCode == 404:
+ }
+
+ return res, nil
+}
+
+// DeleteDestinationTimeplus - Delete a Destination
+func (s *Destinations) DeleteDestinationTimeplus(ctx context.Context, request operations.DeleteDestinationTimeplusRequest) (*operations.DeleteDestinationTimeplusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Timeplus", request, nil)
if err != nil {
@@ -4285,7 +4510,7 @@ func (s *destinations) DeleteDestinationTimeplus(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4323,7 +4548,7 @@ func (s *destinations) DeleteDestinationTimeplus(ctx context.Context, request op
}
// DeleteDestinationTypesense - Delete a Destination
-func (s *destinations) DeleteDestinationTypesense(ctx context.Context, request operations.DeleteDestinationTypesenseRequest) (*operations.DeleteDestinationTypesenseResponse, error) {
+func (s *Destinations) DeleteDestinationTypesense(ctx context.Context, request operations.DeleteDestinationTypesenseRequest) (*operations.DeleteDestinationTypesenseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Typesense", request, nil)
if err != nil {
@@ -4335,7 +4560,7 @@ func (s *destinations) DeleteDestinationTypesense(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4373,7 +4598,7 @@ func (s *destinations) DeleteDestinationTypesense(ctx context.Context, request o
}
// DeleteDestinationVertica - Delete a Destination
-func (s *destinations) DeleteDestinationVertica(ctx context.Context, request operations.DeleteDestinationVerticaRequest) (*operations.DeleteDestinationVerticaResponse, error) {
+func (s *Destinations) DeleteDestinationVertica(ctx context.Context, request operations.DeleteDestinationVerticaRequest) (*operations.DeleteDestinationVerticaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Vertica", request, nil)
if err != nil {
@@ -4385,7 +4610,7 @@ func (s *destinations) DeleteDestinationVertica(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4422,8 +4647,58 @@ func (s *destinations) DeleteDestinationVertica(ctx context.Context, request ope
return res, nil
}
+// DeleteDestinationWeaviate - Delete a Destination
+func (s *Destinations) DeleteDestinationWeaviate(ctx context.Context, request operations.DeleteDestinationWeaviateRequest) (*operations.DeleteDestinationWeaviateResponse, error) {
+ baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Weaviate", request, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error generating URL: %w", err)
+ }
+
+ req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error creating request: %w", err)
+ }
+ req.Header.Set("Accept", "*/*")
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
+
+ client := s.sdkConfiguration.SecurityClient
+
+ httpRes, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("error sending request: %w", err)
+ }
+ if httpRes == nil {
+ return nil, fmt.Errorf("error sending request: no response")
+ }
+
+ rawBody, err := io.ReadAll(httpRes.Body)
+ if err != nil {
+ return nil, fmt.Errorf("error reading response body: %w", err)
+ }
+ httpRes.Body.Close()
+ httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
+
+ contentType := httpRes.Header.Get("Content-Type")
+
+ res := &operations.DeleteDestinationWeaviateResponse{
+ StatusCode: httpRes.StatusCode,
+ ContentType: contentType,
+ RawResponse: httpRes,
+ }
+ switch {
+ case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
+ fallthrough
+ case httpRes.StatusCode == 403:
+ fallthrough
+ case httpRes.StatusCode == 404:
+ }
+
+ return res, nil
+}
+
// DeleteDestinationXata - Delete a Destination
-func (s *destinations) DeleteDestinationXata(ctx context.Context, request operations.DeleteDestinationXataRequest) (*operations.DeleteDestinationXataResponse, error) {
+func (s *Destinations) DeleteDestinationXata(ctx context.Context, request operations.DeleteDestinationXataRequest) (*operations.DeleteDestinationXataResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Xata", request, nil)
if err != nil {
@@ -4435,7 +4710,7 @@ func (s *destinations) DeleteDestinationXata(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4473,7 +4748,7 @@ func (s *destinations) DeleteDestinationXata(ctx context.Context, request operat
}
// GetDestination - Get Destination details
-func (s *destinations) GetDestination(ctx context.Context, request operations.GetDestinationRequest) (*operations.GetDestinationResponse, error) {
+func (s *Destinations) GetDestination(ctx context.Context, request operations.GetDestinationRequest) (*operations.GetDestinationResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}", request, nil)
if err != nil {
@@ -4485,7 +4760,7 @@ func (s *destinations) GetDestination(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4515,12 +4790,14 @@ func (s *destinations) GetDestination(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -4531,7 +4808,7 @@ func (s *destinations) GetDestination(ctx context.Context, request operations.Ge
}
// GetDestinationAwsDatalake - Get Destination details
-func (s *destinations) GetDestinationAwsDatalake(ctx context.Context, request operations.GetDestinationAwsDatalakeRequest) (*operations.GetDestinationAwsDatalakeResponse, error) {
+func (s *Destinations) GetDestinationAwsDatalake(ctx context.Context, request operations.GetDestinationAwsDatalakeRequest) (*operations.GetDestinationAwsDatalakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#AwsDatalake", request, nil)
if err != nil {
@@ -4543,7 +4820,7 @@ func (s *destinations) GetDestinationAwsDatalake(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4573,12 +4850,14 @@ func (s *destinations) GetDestinationAwsDatalake(ctx context.Context, request op
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -4589,7 +4868,7 @@ func (s *destinations) GetDestinationAwsDatalake(ctx context.Context, request op
}
// GetDestinationAzureBlobStorage - Get Destination details
-func (s *destinations) GetDestinationAzureBlobStorage(ctx context.Context, request operations.GetDestinationAzureBlobStorageRequest) (*operations.GetDestinationAzureBlobStorageResponse, error) {
+func (s *Destinations) GetDestinationAzureBlobStorage(ctx context.Context, request operations.GetDestinationAzureBlobStorageRequest) (*operations.GetDestinationAzureBlobStorageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#AzureBlobStorage", request, nil)
if err != nil {
@@ -4601,7 +4880,7 @@ func (s *destinations) GetDestinationAzureBlobStorage(ctx context.Context, reque
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4631,12 +4910,14 @@ func (s *destinations) GetDestinationAzureBlobStorage(ctx context.Context, reque
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -4647,7 +4928,7 @@ func (s *destinations) GetDestinationAzureBlobStorage(ctx context.Context, reque
}
// GetDestinationBigquery - Get Destination details
-func (s *destinations) GetDestinationBigquery(ctx context.Context, request operations.GetDestinationBigqueryRequest) (*operations.GetDestinationBigqueryResponse, error) {
+func (s *Destinations) GetDestinationBigquery(ctx context.Context, request operations.GetDestinationBigqueryRequest) (*operations.GetDestinationBigqueryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Bigquery", request, nil)
if err != nil {
@@ -4659,7 +4940,7 @@ func (s *destinations) GetDestinationBigquery(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4689,12 +4970,14 @@ func (s *destinations) GetDestinationBigquery(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -4704,10 +4987,10 @@ func (s *destinations) GetDestinationBigquery(ctx context.Context, request opera
return res, nil
}
-// GetDestinationBigqueryDenormalized - Get Destination details
-func (s *destinations) GetDestinationBigqueryDenormalized(ctx context.Context, request operations.GetDestinationBigqueryDenormalizedRequest) (*operations.GetDestinationBigqueryDenormalizedResponse, error) {
+// GetDestinationClickhouse - Get Destination details
+func (s *Destinations) GetDestinationClickhouse(ctx context.Context, request operations.GetDestinationClickhouseRequest) (*operations.GetDestinationClickhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#BigqueryDenormalized", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Clickhouse", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4717,7 +5000,7 @@ func (s *destinations) GetDestinationBigqueryDenormalized(ctx context.Context, r
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4738,7 +5021,7 @@ func (s *destinations) GetDestinationBigqueryDenormalized(ctx context.Context, r
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetDestinationBigqueryDenormalizedResponse{
+ res := &operations.GetDestinationClickhouseResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4747,12 +5030,14 @@ func (s *destinations) GetDestinationBigqueryDenormalized(ctx context.Context, r
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -4762,10 +5047,10 @@ func (s *destinations) GetDestinationBigqueryDenormalized(ctx context.Context, r
return res, nil
}
-// GetDestinationClickhouse - Get Destination details
-func (s *destinations) GetDestinationClickhouse(ctx context.Context, request operations.GetDestinationClickhouseRequest) (*operations.GetDestinationClickhouseResponse, error) {
+// GetDestinationConvex - Get Destination details
+func (s *Destinations) GetDestinationConvex(ctx context.Context, request operations.GetDestinationConvexRequest) (*operations.GetDestinationConvexResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Clickhouse", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Convex", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4775,7 +5060,7 @@ func (s *destinations) GetDestinationClickhouse(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4796,7 +5081,7 @@ func (s *destinations) GetDestinationClickhouse(ctx context.Context, request ope
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetDestinationClickhouseResponse{
+ res := &operations.GetDestinationConvexResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4805,12 +5090,14 @@ func (s *destinations) GetDestinationClickhouse(ctx context.Context, request ope
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -4820,10 +5107,10 @@ func (s *destinations) GetDestinationClickhouse(ctx context.Context, request ope
return res, nil
}
-// GetDestinationConvex - Get Destination details
-func (s *destinations) GetDestinationConvex(ctx context.Context, request operations.GetDestinationConvexRequest) (*operations.GetDestinationConvexResponse, error) {
+// GetDestinationCumulio - Get Destination details
+func (s *Destinations) GetDestinationCumulio(ctx context.Context, request operations.GetDestinationCumulioRequest) (*operations.GetDestinationCumulioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Convex", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Cumulio", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4833,7 +5120,7 @@ func (s *destinations) GetDestinationConvex(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4854,7 +5141,7 @@ func (s *destinations) GetDestinationConvex(ctx context.Context, request operati
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetDestinationConvexResponse{
+ res := &operations.GetDestinationCumulioResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4863,12 +5150,14 @@ func (s *destinations) GetDestinationConvex(ctx context.Context, request operati
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -4878,10 +5167,10 @@ func (s *destinations) GetDestinationConvex(ctx context.Context, request operati
return res, nil
}
-// GetDestinationCumulio - Get Destination details
-func (s *destinations) GetDestinationCumulio(ctx context.Context, request operations.GetDestinationCumulioRequest) (*operations.GetDestinationCumulioResponse, error) {
+// GetDestinationDatabend - Get Destination details
+func (s *Destinations) GetDestinationDatabend(ctx context.Context, request operations.GetDestinationDatabendRequest) (*operations.GetDestinationDatabendResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Cumulio", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databend", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4891,7 +5180,7 @@ func (s *destinations) GetDestinationCumulio(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4912,7 +5201,7 @@ func (s *destinations) GetDestinationCumulio(ctx context.Context, request operat
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetDestinationCumulioResponse{
+ res := &operations.GetDestinationDatabendResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4921,12 +5210,14 @@ func (s *destinations) GetDestinationCumulio(ctx context.Context, request operat
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -4936,10 +5227,10 @@ func (s *destinations) GetDestinationCumulio(ctx context.Context, request operat
return res, nil
}
-// GetDestinationDatabend - Get Destination details
-func (s *destinations) GetDestinationDatabend(ctx context.Context, request operations.GetDestinationDatabendRequest) (*operations.GetDestinationDatabendResponse, error) {
+// GetDestinationDatabricks - Get Destination details
+func (s *Destinations) GetDestinationDatabricks(ctx context.Context, request operations.GetDestinationDatabricksRequest) (*operations.GetDestinationDatabricksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databend", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databricks", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -4949,7 +5240,7 @@ func (s *destinations) GetDestinationDatabend(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -4970,7 +5261,7 @@ func (s *destinations) GetDestinationDatabend(ctx context.Context, request opera
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetDestinationDatabendResponse{
+ res := &operations.GetDestinationDatabricksResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4979,12 +5270,14 @@ func (s *destinations) GetDestinationDatabend(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -4994,10 +5287,10 @@ func (s *destinations) GetDestinationDatabend(ctx context.Context, request opera
return res, nil
}
-// GetDestinationDatabricks - Get Destination details
-func (s *destinations) GetDestinationDatabricks(ctx context.Context, request operations.GetDestinationDatabricksRequest) (*operations.GetDestinationDatabricksResponse, error) {
+// GetDestinationDevNull - Get Destination details
+func (s *Destinations) GetDestinationDevNull(ctx context.Context, request operations.GetDestinationDevNullRequest) (*operations.GetDestinationDevNullResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databricks", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#DevNull", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -5007,7 +5300,7 @@ func (s *destinations) GetDestinationDatabricks(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5028,7 +5321,7 @@ func (s *destinations) GetDestinationDatabricks(ctx context.Context, request ope
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetDestinationDatabricksResponse{
+ res := &operations.GetDestinationDevNullResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -5037,12 +5330,14 @@ func (s *destinations) GetDestinationDatabricks(ctx context.Context, request ope
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5052,10 +5347,10 @@ func (s *destinations) GetDestinationDatabricks(ctx context.Context, request ope
return res, nil
}
-// GetDestinationDevNull - Get Destination details
-func (s *destinations) GetDestinationDevNull(ctx context.Context, request operations.GetDestinationDevNullRequest) (*operations.GetDestinationDevNullResponse, error) {
+// GetDestinationDuckdb - Get Destination details
+func (s *Destinations) GetDestinationDuckdb(ctx context.Context, request operations.GetDestinationDuckdbRequest) (*operations.GetDestinationDuckdbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#DevNull", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Duckdb", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -5065,7 +5360,7 @@ func (s *destinations) GetDestinationDevNull(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5086,7 +5381,7 @@ func (s *destinations) GetDestinationDevNull(ctx context.Context, request operat
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetDestinationDevNullResponse{
+ res := &operations.GetDestinationDuckdbResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -5095,12 +5390,14 @@ func (s *destinations) GetDestinationDevNull(ctx context.Context, request operat
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5111,7 +5408,7 @@ func (s *destinations) GetDestinationDevNull(ctx context.Context, request operat
}
// GetDestinationDynamodb - Get Destination details
-func (s *destinations) GetDestinationDynamodb(ctx context.Context, request operations.GetDestinationDynamodbRequest) (*operations.GetDestinationDynamodbResponse, error) {
+func (s *Destinations) GetDestinationDynamodb(ctx context.Context, request operations.GetDestinationDynamodbRequest) (*operations.GetDestinationDynamodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Dynamodb", request, nil)
if err != nil {
@@ -5123,7 +5420,7 @@ func (s *destinations) GetDestinationDynamodb(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5153,12 +5450,14 @@ func (s *destinations) GetDestinationDynamodb(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5169,7 +5468,7 @@ func (s *destinations) GetDestinationDynamodb(ctx context.Context, request opera
}
// GetDestinationElasticsearch - Get Destination details
-func (s *destinations) GetDestinationElasticsearch(ctx context.Context, request operations.GetDestinationElasticsearchRequest) (*operations.GetDestinationElasticsearchResponse, error) {
+func (s *Destinations) GetDestinationElasticsearch(ctx context.Context, request operations.GetDestinationElasticsearchRequest) (*operations.GetDestinationElasticsearchResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Elasticsearch", request, nil)
if err != nil {
@@ -5181,7 +5480,7 @@ func (s *destinations) GetDestinationElasticsearch(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5211,12 +5510,14 @@ func (s *destinations) GetDestinationElasticsearch(ctx context.Context, request
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5227,7 +5528,7 @@ func (s *destinations) GetDestinationElasticsearch(ctx context.Context, request
}
// GetDestinationFirebolt - Get Destination details
-func (s *destinations) GetDestinationFirebolt(ctx context.Context, request operations.GetDestinationFireboltRequest) (*operations.GetDestinationFireboltResponse, error) {
+func (s *Destinations) GetDestinationFirebolt(ctx context.Context, request operations.GetDestinationFireboltRequest) (*operations.GetDestinationFireboltResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Firebolt", request, nil)
if err != nil {
@@ -5239,7 +5540,7 @@ func (s *destinations) GetDestinationFirebolt(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5269,12 +5570,14 @@ func (s *destinations) GetDestinationFirebolt(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5285,7 +5588,7 @@ func (s *destinations) GetDestinationFirebolt(ctx context.Context, request opera
}
// GetDestinationFirestore - Get Destination details
-func (s *destinations) GetDestinationFirestore(ctx context.Context, request operations.GetDestinationFirestoreRequest) (*operations.GetDestinationFirestoreResponse, error) {
+func (s *Destinations) GetDestinationFirestore(ctx context.Context, request operations.GetDestinationFirestoreRequest) (*operations.GetDestinationFirestoreResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Firestore", request, nil)
if err != nil {
@@ -5297,7 +5600,7 @@ func (s *destinations) GetDestinationFirestore(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5327,12 +5630,14 @@ func (s *destinations) GetDestinationFirestore(ctx context.Context, request oper
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5343,7 +5648,7 @@ func (s *destinations) GetDestinationFirestore(ctx context.Context, request oper
}
// GetDestinationGcs - Get Destination details
-func (s *destinations) GetDestinationGcs(ctx context.Context, request operations.GetDestinationGcsRequest) (*operations.GetDestinationGcsResponse, error) {
+func (s *Destinations) GetDestinationGcs(ctx context.Context, request operations.GetDestinationGcsRequest) (*operations.GetDestinationGcsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Gcs", request, nil)
if err != nil {
@@ -5355,7 +5660,7 @@ func (s *destinations) GetDestinationGcs(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5385,12 +5690,14 @@ func (s *destinations) GetDestinationGcs(ctx context.Context, request operations
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5401,7 +5708,7 @@ func (s *destinations) GetDestinationGcs(ctx context.Context, request operations
}
// GetDestinationGoogleSheets - Get Destination details
-func (s *destinations) GetDestinationGoogleSheets(ctx context.Context, request operations.GetDestinationGoogleSheetsRequest) (*operations.GetDestinationGoogleSheetsResponse, error) {
+func (s *Destinations) GetDestinationGoogleSheets(ctx context.Context, request operations.GetDestinationGoogleSheetsRequest) (*operations.GetDestinationGoogleSheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#GoogleSheets", request, nil)
if err != nil {
@@ -5413,7 +5720,7 @@ func (s *destinations) GetDestinationGoogleSheets(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5443,12 +5750,14 @@ func (s *destinations) GetDestinationGoogleSheets(ctx context.Context, request o
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5459,7 +5768,7 @@ func (s *destinations) GetDestinationGoogleSheets(ctx context.Context, request o
}
// GetDestinationKeen - Get Destination details
-func (s *destinations) GetDestinationKeen(ctx context.Context, request operations.GetDestinationKeenRequest) (*operations.GetDestinationKeenResponse, error) {
+func (s *Destinations) GetDestinationKeen(ctx context.Context, request operations.GetDestinationKeenRequest) (*operations.GetDestinationKeenResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Keen", request, nil)
if err != nil {
@@ -5471,7 +5780,7 @@ func (s *destinations) GetDestinationKeen(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5501,12 +5810,14 @@ func (s *destinations) GetDestinationKeen(ctx context.Context, request operation
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5517,7 +5828,7 @@ func (s *destinations) GetDestinationKeen(ctx context.Context, request operation
}
// GetDestinationKinesis - Get Destination details
-func (s *destinations) GetDestinationKinesis(ctx context.Context, request operations.GetDestinationKinesisRequest) (*operations.GetDestinationKinesisResponse, error) {
+func (s *Destinations) GetDestinationKinesis(ctx context.Context, request operations.GetDestinationKinesisRequest) (*operations.GetDestinationKinesisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Kinesis", request, nil)
if err != nil {
@@ -5529,7 +5840,7 @@ func (s *destinations) GetDestinationKinesis(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5559,12 +5870,14 @@ func (s *destinations) GetDestinationKinesis(ctx context.Context, request operat
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5575,7 +5888,7 @@ func (s *destinations) GetDestinationKinesis(ctx context.Context, request operat
}
// GetDestinationLangchain - Get Destination details
-func (s *destinations) GetDestinationLangchain(ctx context.Context, request operations.GetDestinationLangchainRequest) (*operations.GetDestinationLangchainResponse, error) {
+func (s *Destinations) GetDestinationLangchain(ctx context.Context, request operations.GetDestinationLangchainRequest) (*operations.GetDestinationLangchainResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Langchain", request, nil)
if err != nil {
@@ -5587,7 +5900,7 @@ func (s *destinations) GetDestinationLangchain(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5617,12 +5930,14 @@ func (s *destinations) GetDestinationLangchain(ctx context.Context, request oper
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5633,7 +5948,7 @@ func (s *destinations) GetDestinationLangchain(ctx context.Context, request oper
}
// GetDestinationMilvus - Get Destination details
-func (s *destinations) GetDestinationMilvus(ctx context.Context, request operations.GetDestinationMilvusRequest) (*operations.GetDestinationMilvusResponse, error) {
+func (s *Destinations) GetDestinationMilvus(ctx context.Context, request operations.GetDestinationMilvusRequest) (*operations.GetDestinationMilvusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Milvus", request, nil)
if err != nil {
@@ -5645,7 +5960,7 @@ func (s *destinations) GetDestinationMilvus(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5675,12 +5990,14 @@ func (s *destinations) GetDestinationMilvus(ctx context.Context, request operati
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5691,7 +6008,7 @@ func (s *destinations) GetDestinationMilvus(ctx context.Context, request operati
}
// GetDestinationMongodb - Get Destination details
-func (s *destinations) GetDestinationMongodb(ctx context.Context, request operations.GetDestinationMongodbRequest) (*operations.GetDestinationMongodbResponse, error) {
+func (s *Destinations) GetDestinationMongodb(ctx context.Context, request operations.GetDestinationMongodbRequest) (*operations.GetDestinationMongodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Mongodb", request, nil)
if err != nil {
@@ -5703,7 +6020,7 @@ func (s *destinations) GetDestinationMongodb(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5733,12 +6050,14 @@ func (s *destinations) GetDestinationMongodb(ctx context.Context, request operat
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5749,7 +6068,7 @@ func (s *destinations) GetDestinationMongodb(ctx context.Context, request operat
}
// GetDestinationMssql - Get Destination details
-func (s *destinations) GetDestinationMssql(ctx context.Context, request operations.GetDestinationMssqlRequest) (*operations.GetDestinationMssqlResponse, error) {
+func (s *Destinations) GetDestinationMssql(ctx context.Context, request operations.GetDestinationMssqlRequest) (*operations.GetDestinationMssqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Mssql", request, nil)
if err != nil {
@@ -5761,7 +6080,7 @@ func (s *destinations) GetDestinationMssql(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5791,12 +6110,14 @@ func (s *destinations) GetDestinationMssql(ctx context.Context, request operatio
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5807,7 +6128,7 @@ func (s *destinations) GetDestinationMssql(ctx context.Context, request operatio
}
// GetDestinationMysql - Get Destination details
-func (s *destinations) GetDestinationMysql(ctx context.Context, request operations.GetDestinationMysqlRequest) (*operations.GetDestinationMysqlResponse, error) {
+func (s *Destinations) GetDestinationMysql(ctx context.Context, request operations.GetDestinationMysqlRequest) (*operations.GetDestinationMysqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Mysql", request, nil)
if err != nil {
@@ -5819,7 +6140,7 @@ func (s *destinations) GetDestinationMysql(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5849,12 +6170,14 @@ func (s *destinations) GetDestinationMysql(ctx context.Context, request operatio
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5865,7 +6188,7 @@ func (s *destinations) GetDestinationMysql(ctx context.Context, request operatio
}
// GetDestinationOracle - Get Destination details
-func (s *destinations) GetDestinationOracle(ctx context.Context, request operations.GetDestinationOracleRequest) (*operations.GetDestinationOracleResponse, error) {
+func (s *Destinations) GetDestinationOracle(ctx context.Context, request operations.GetDestinationOracleRequest) (*operations.GetDestinationOracleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Oracle", request, nil)
if err != nil {
@@ -5877,7 +6200,7 @@ func (s *destinations) GetDestinationOracle(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5907,12 +6230,14 @@ func (s *destinations) GetDestinationOracle(ctx context.Context, request operati
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5923,7 +6248,7 @@ func (s *destinations) GetDestinationOracle(ctx context.Context, request operati
}
// GetDestinationPinecone - Get Destination details
-func (s *destinations) GetDestinationPinecone(ctx context.Context, request operations.GetDestinationPineconeRequest) (*operations.GetDestinationPineconeResponse, error) {
+func (s *Destinations) GetDestinationPinecone(ctx context.Context, request operations.GetDestinationPineconeRequest) (*operations.GetDestinationPineconeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Pinecone", request, nil)
if err != nil {
@@ -5935,7 +6260,7 @@ func (s *destinations) GetDestinationPinecone(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -5965,12 +6290,14 @@ func (s *destinations) GetDestinationPinecone(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -5981,7 +6308,7 @@ func (s *destinations) GetDestinationPinecone(ctx context.Context, request opera
}
// GetDestinationPostgres - Get Destination details
-func (s *destinations) GetDestinationPostgres(ctx context.Context, request operations.GetDestinationPostgresRequest) (*operations.GetDestinationPostgresResponse, error) {
+func (s *Destinations) GetDestinationPostgres(ctx context.Context, request operations.GetDestinationPostgresRequest) (*operations.GetDestinationPostgresResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Postgres", request, nil)
if err != nil {
@@ -5993,7 +6320,7 @@ func (s *destinations) GetDestinationPostgres(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6023,12 +6350,14 @@ func (s *destinations) GetDestinationPostgres(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6039,7 +6368,7 @@ func (s *destinations) GetDestinationPostgres(ctx context.Context, request opera
}
// GetDestinationPubsub - Get Destination details
-func (s *destinations) GetDestinationPubsub(ctx context.Context, request operations.GetDestinationPubsubRequest) (*operations.GetDestinationPubsubResponse, error) {
+func (s *Destinations) GetDestinationPubsub(ctx context.Context, request operations.GetDestinationPubsubRequest) (*operations.GetDestinationPubsubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Pubsub", request, nil)
if err != nil {
@@ -6051,7 +6380,7 @@ func (s *destinations) GetDestinationPubsub(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6081,12 +6410,74 @@ func (s *destinations) GetDestinationPubsub(ctx context.Context, request operati
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
+ }
+ case httpRes.StatusCode == 403:
+ fallthrough
+ case httpRes.StatusCode == 404:
+ }
+
+ return res, nil
+}
+
+// GetDestinationQdrant - Get Destination details
+func (s *Destinations) GetDestinationQdrant(ctx context.Context, request operations.GetDestinationQdrantRequest) (*operations.GetDestinationQdrantResponse, error) {
+ baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Qdrant", request, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error generating URL: %w", err)
+ }
+
+ req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error creating request: %w", err)
+ }
+ req.Header.Set("Accept", "application/json")
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
+
+ client := s.sdkConfiguration.SecurityClient
+
+ httpRes, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("error sending request: %w", err)
+ }
+ if httpRes == nil {
+ return nil, fmt.Errorf("error sending request: no response")
+ }
+
+ rawBody, err := io.ReadAll(httpRes.Body)
+ if err != nil {
+ return nil, fmt.Errorf("error reading response body: %w", err)
+ }
+ httpRes.Body.Close()
+ httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
+
+ contentType := httpRes.Header.Get("Content-Type")
+
+ res := &operations.GetDestinationQdrantResponse{
+ StatusCode: httpRes.StatusCode,
+ ContentType: contentType,
+ RawResponse: httpRes,
+ }
+ switch {
+ case httpRes.StatusCode == 200:
+ switch {
+ case utils.MatchContentType(contentType, `application/json`):
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
+ }
+
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6097,7 +6488,7 @@ func (s *destinations) GetDestinationPubsub(ctx context.Context, request operati
}
// GetDestinationRedis - Get Destination details
-func (s *destinations) GetDestinationRedis(ctx context.Context, request operations.GetDestinationRedisRequest) (*operations.GetDestinationRedisResponse, error) {
+func (s *Destinations) GetDestinationRedis(ctx context.Context, request operations.GetDestinationRedisRequest) (*operations.GetDestinationRedisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Redis", request, nil)
if err != nil {
@@ -6109,7 +6500,7 @@ func (s *destinations) GetDestinationRedis(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6139,12 +6530,14 @@ func (s *destinations) GetDestinationRedis(ctx context.Context, request operatio
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6155,7 +6548,7 @@ func (s *destinations) GetDestinationRedis(ctx context.Context, request operatio
}
// GetDestinationRedshift - Get Destination details
-func (s *destinations) GetDestinationRedshift(ctx context.Context, request operations.GetDestinationRedshiftRequest) (*operations.GetDestinationRedshiftResponse, error) {
+func (s *Destinations) GetDestinationRedshift(ctx context.Context, request operations.GetDestinationRedshiftRequest) (*operations.GetDestinationRedshiftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Redshift", request, nil)
if err != nil {
@@ -6167,7 +6560,7 @@ func (s *destinations) GetDestinationRedshift(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6197,12 +6590,14 @@ func (s *destinations) GetDestinationRedshift(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6213,7 +6608,7 @@ func (s *destinations) GetDestinationRedshift(ctx context.Context, request opera
}
// GetDestinationS3 - Get Destination details
-func (s *destinations) GetDestinationS3(ctx context.Context, request operations.GetDestinationS3Request) (*operations.GetDestinationS3Response, error) {
+func (s *Destinations) GetDestinationS3(ctx context.Context, request operations.GetDestinationS3Request) (*operations.GetDestinationS3Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#S3", request, nil)
if err != nil {
@@ -6225,7 +6620,7 @@ func (s *destinations) GetDestinationS3(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6255,12 +6650,14 @@ func (s *destinations) GetDestinationS3(ctx context.Context, request operations.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6271,7 +6668,7 @@ func (s *destinations) GetDestinationS3(ctx context.Context, request operations.
}
// GetDestinationS3Glue - Get Destination details
-func (s *destinations) GetDestinationS3Glue(ctx context.Context, request operations.GetDestinationS3GlueRequest) (*operations.GetDestinationS3GlueResponse, error) {
+func (s *Destinations) GetDestinationS3Glue(ctx context.Context, request operations.GetDestinationS3GlueRequest) (*operations.GetDestinationS3GlueResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#S3Glue", request, nil)
if err != nil {
@@ -6283,7 +6680,7 @@ func (s *destinations) GetDestinationS3Glue(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6313,12 +6710,14 @@ func (s *destinations) GetDestinationS3Glue(ctx context.Context, request operati
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6329,7 +6728,7 @@ func (s *destinations) GetDestinationS3Glue(ctx context.Context, request operati
}
// GetDestinationSftpJSON - Get Destination details
-func (s *destinations) GetDestinationSftpJSON(ctx context.Context, request operations.GetDestinationSftpJSONRequest) (*operations.GetDestinationSftpJSONResponse, error) {
+func (s *Destinations) GetDestinationSftpJSON(ctx context.Context, request operations.GetDestinationSftpJSONRequest) (*operations.GetDestinationSftpJSONResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#SftpJson", request, nil)
if err != nil {
@@ -6341,7 +6740,7 @@ func (s *destinations) GetDestinationSftpJSON(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6371,12 +6770,14 @@ func (s *destinations) GetDestinationSftpJSON(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6387,7 +6788,7 @@ func (s *destinations) GetDestinationSftpJSON(ctx context.Context, request opera
}
// GetDestinationSnowflake - Get Destination details
-func (s *destinations) GetDestinationSnowflake(ctx context.Context, request operations.GetDestinationSnowflakeRequest) (*operations.GetDestinationSnowflakeResponse, error) {
+func (s *Destinations) GetDestinationSnowflake(ctx context.Context, request operations.GetDestinationSnowflakeRequest) (*operations.GetDestinationSnowflakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Snowflake", request, nil)
if err != nil {
@@ -6399,7 +6800,7 @@ func (s *destinations) GetDestinationSnowflake(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6429,12 +6830,14 @@ func (s *destinations) GetDestinationSnowflake(ctx context.Context, request oper
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6445,7 +6848,7 @@ func (s *destinations) GetDestinationSnowflake(ctx context.Context, request oper
}
// GetDestinationTimeplus - Get Destination details
-func (s *destinations) GetDestinationTimeplus(ctx context.Context, request operations.GetDestinationTimeplusRequest) (*operations.GetDestinationTimeplusResponse, error) {
+func (s *Destinations) GetDestinationTimeplus(ctx context.Context, request operations.GetDestinationTimeplusRequest) (*operations.GetDestinationTimeplusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Timeplus", request, nil)
if err != nil {
@@ -6457,7 +6860,7 @@ func (s *destinations) GetDestinationTimeplus(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6487,12 +6890,14 @@ func (s *destinations) GetDestinationTimeplus(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6503,7 +6908,7 @@ func (s *destinations) GetDestinationTimeplus(ctx context.Context, request opera
}
// GetDestinationTypesense - Get Destination details
-func (s *destinations) GetDestinationTypesense(ctx context.Context, request operations.GetDestinationTypesenseRequest) (*operations.GetDestinationTypesenseResponse, error) {
+func (s *Destinations) GetDestinationTypesense(ctx context.Context, request operations.GetDestinationTypesenseRequest) (*operations.GetDestinationTypesenseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Typesense", request, nil)
if err != nil {
@@ -6515,7 +6920,7 @@ func (s *destinations) GetDestinationTypesense(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6545,12 +6950,14 @@ func (s *destinations) GetDestinationTypesense(ctx context.Context, request oper
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6561,7 +6968,7 @@ func (s *destinations) GetDestinationTypesense(ctx context.Context, request oper
}
// GetDestinationVertica - Get Destination details
-func (s *destinations) GetDestinationVertica(ctx context.Context, request operations.GetDestinationVerticaRequest) (*operations.GetDestinationVerticaResponse, error) {
+func (s *Destinations) GetDestinationVertica(ctx context.Context, request operations.GetDestinationVerticaRequest) (*operations.GetDestinationVerticaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Vertica", request, nil)
if err != nil {
@@ -6573,7 +6980,7 @@ func (s *destinations) GetDestinationVertica(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6603,12 +7010,74 @@ func (s *destinations) GetDestinationVertica(ctx context.Context, request operat
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
+ }
+ case httpRes.StatusCode == 403:
+ fallthrough
+ case httpRes.StatusCode == 404:
+ }
+
+ return res, nil
+}
+
+// GetDestinationWeaviate - Get Destination details
+func (s *Destinations) GetDestinationWeaviate(ctx context.Context, request operations.GetDestinationWeaviateRequest) (*operations.GetDestinationWeaviateResponse, error) {
+ baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Weaviate", request, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error generating URL: %w", err)
+ }
+
+ req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error creating request: %w", err)
+ }
+ req.Header.Set("Accept", "application/json")
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
+
+ client := s.sdkConfiguration.SecurityClient
+
+ httpRes, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("error sending request: %w", err)
+ }
+ if httpRes == nil {
+ return nil, fmt.Errorf("error sending request: no response")
+ }
+
+ rawBody, err := io.ReadAll(httpRes.Body)
+ if err != nil {
+ return nil, fmt.Errorf("error reading response body: %w", err)
+ }
+ httpRes.Body.Close()
+ httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
+
+ contentType := httpRes.Header.Get("Content-Type")
+
+ res := &operations.GetDestinationWeaviateResponse{
+ StatusCode: httpRes.StatusCode,
+ ContentType: contentType,
+ RawResponse: httpRes,
+ }
+ switch {
+ case httpRes.StatusCode == 200:
+ switch {
+ case utils.MatchContentType(contentType, `application/json`):
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
+ }
+
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6619,7 +7088,7 @@ func (s *destinations) GetDestinationVertica(ctx context.Context, request operat
}
// GetDestinationXata - Get Destination details
-func (s *destinations) GetDestinationXata(ctx context.Context, request operations.GetDestinationXataRequest) (*operations.GetDestinationXataResponse, error) {
+func (s *Destinations) GetDestinationXata(ctx context.Context, request operations.GetDestinationXataRequest) (*operations.GetDestinationXataResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Xata", request, nil)
if err != nil {
@@ -6631,7 +7100,7 @@ func (s *destinations) GetDestinationXata(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -6661,12 +7130,14 @@ func (s *destinations) GetDestinationXata(ctx context.Context, request operation
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6677,7 +7148,7 @@ func (s *destinations) GetDestinationXata(ctx context.Context, request operation
}
// ListDestinations - List destinations
-func (s *destinations) ListDestinations(ctx context.Context, request operations.ListDestinationsRequest) (*operations.ListDestinationsResponse, error) {
+func (s *Destinations) ListDestinations(ctx context.Context, request operations.ListDestinationsRequest) (*operations.ListDestinationsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/destinations"
@@ -6686,7 +7157,7 @@ func (s *destinations) ListDestinations(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
if err := utils.PopulateQueryParams(ctx, req, request, nil); err != nil {
return nil, fmt.Errorf("error populating query params: %w", err)
@@ -6720,12 +7191,14 @@ func (s *destinations) ListDestinations(ctx context.Context, request operations.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationsResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationsResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationsResponse = out
+ res.DestinationsResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6736,18 +7209,17 @@ func (s *destinations) ListDestinations(ctx context.Context, request operations.
}
// PatchDestination - Update a Destination
-func (s *destinations) PatchDestination(ctx context.Context, request operations.PatchDestinationRequest) (*operations.PatchDestinationResponse, error) {
+func (s *Destinations) PatchDestination(ctx context.Context, request operations.PatchDestinationRequest) (*operations.PatchDestinationResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationPatchRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationPatchRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6756,7 +7228,7 @@ func (s *destinations) PatchDestination(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6789,12 +7261,14 @@ func (s *destinations) PatchDestination(ctx context.Context, request operations.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6805,18 +7279,17 @@ func (s *destinations) PatchDestination(ctx context.Context, request operations.
}
// PutDestination - Update a Destination and fully overwrite it
-func (s *destinations) PutDestination(ctx context.Context, request operations.PutDestinationRequest) (*operations.PutDestinationResponse, error) {
+func (s *Destinations) PutDestination(ctx context.Context, request operations.PutDestinationRequest) (*operations.PutDestinationResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6825,7 +7298,7 @@ func (s *destinations) PutDestination(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6858,12 +7331,14 @@ func (s *destinations) PutDestination(ctx context.Context, request operations.Pu
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.DestinationResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.DestinationResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.DestinationResponse = out
+ res.DestinationResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -6874,18 +7349,17 @@ func (s *destinations) PutDestination(ctx context.Context, request operations.Pu
}
// PutDestinationAwsDatalake - Update a Destination fully
-func (s *destinations) PutDestinationAwsDatalake(ctx context.Context, request operations.PutDestinationAwsDatalakeRequest) (*operations.PutDestinationAwsDatalakeResponse, error) {
+func (s *Destinations) PutDestinationAwsDatalake(ctx context.Context, request operations.PutDestinationAwsDatalakeRequest) (*operations.PutDestinationAwsDatalakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#AwsDatalake", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationAwsDatalakePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationAwsDatalakePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6894,7 +7368,7 @@ func (s *destinations) PutDestinationAwsDatalake(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6935,18 +7409,17 @@ func (s *destinations) PutDestinationAwsDatalake(ctx context.Context, request op
}
// PutDestinationAzureBlobStorage - Update a Destination fully
-func (s *destinations) PutDestinationAzureBlobStorage(ctx context.Context, request operations.PutDestinationAzureBlobStorageRequest) (*operations.PutDestinationAzureBlobStorageResponse, error) {
+func (s *Destinations) PutDestinationAzureBlobStorage(ctx context.Context, request operations.PutDestinationAzureBlobStorageRequest) (*operations.PutDestinationAzureBlobStorageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#AzureBlobStorage", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationAzureBlobStoragePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationAzureBlobStoragePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6955,7 +7428,7 @@ func (s *destinations) PutDestinationAzureBlobStorage(ctx context.Context, reque
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6996,18 +7469,17 @@ func (s *destinations) PutDestinationAzureBlobStorage(ctx context.Context, reque
}
// PutDestinationBigquery - Update a Destination fully
-func (s *destinations) PutDestinationBigquery(ctx context.Context, request operations.PutDestinationBigqueryRequest) (*operations.PutDestinationBigqueryResponse, error) {
+func (s *Destinations) PutDestinationBigquery(ctx context.Context, request operations.PutDestinationBigqueryRequest) (*operations.PutDestinationBigqueryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Bigquery", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationBigqueryPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationBigqueryPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7016,7 +7488,7 @@ func (s *destinations) PutDestinationBigquery(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7056,19 +7528,18 @@ func (s *destinations) PutDestinationBigquery(ctx context.Context, request opera
return res, nil
}
-// PutDestinationBigqueryDenormalized - Update a Destination fully
-func (s *destinations) PutDestinationBigqueryDenormalized(ctx context.Context, request operations.PutDestinationBigqueryDenormalizedRequest) (*operations.PutDestinationBigqueryDenormalizedResponse, error) {
+// PutDestinationClickhouse - Update a Destination fully
+func (s *Destinations) PutDestinationClickhouse(ctx context.Context, request operations.PutDestinationClickhouseRequest) (*operations.PutDestinationClickhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#BigqueryDenormalized", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Clickhouse", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationBigqueryDenormalizedPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationClickhousePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7077,7 +7548,7 @@ func (s *destinations) PutDestinationBigqueryDenormalized(ctx context.Context, r
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7101,7 +7572,7 @@ func (s *destinations) PutDestinationBigqueryDenormalized(ctx context.Context, r
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutDestinationBigqueryDenormalizedResponse{
+ res := &operations.PutDestinationClickhouseResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -7117,19 +7588,18 @@ func (s *destinations) PutDestinationBigqueryDenormalized(ctx context.Context, r
return res, nil
}
-// PutDestinationClickhouse - Update a Destination fully
-func (s *destinations) PutDestinationClickhouse(ctx context.Context, request operations.PutDestinationClickhouseRequest) (*operations.PutDestinationClickhouseResponse, error) {
+// PutDestinationConvex - Update a Destination fully
+func (s *Destinations) PutDestinationConvex(ctx context.Context, request operations.PutDestinationConvexRequest) (*operations.PutDestinationConvexResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Clickhouse", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Convex", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationClickhousePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationConvexPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7138,7 +7608,7 @@ func (s *destinations) PutDestinationClickhouse(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7162,7 +7632,7 @@ func (s *destinations) PutDestinationClickhouse(ctx context.Context, request ope
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutDestinationClickhouseResponse{
+ res := &operations.PutDestinationConvexResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -7178,19 +7648,18 @@ func (s *destinations) PutDestinationClickhouse(ctx context.Context, request ope
return res, nil
}
-// PutDestinationConvex - Update a Destination fully
-func (s *destinations) PutDestinationConvex(ctx context.Context, request operations.PutDestinationConvexRequest) (*operations.PutDestinationConvexResponse, error) {
+// PutDestinationCumulio - Update a Destination fully
+func (s *Destinations) PutDestinationCumulio(ctx context.Context, request operations.PutDestinationCumulioRequest) (*operations.PutDestinationCumulioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Convex", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Cumulio", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationConvexPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationCumulioPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7199,7 +7668,7 @@ func (s *destinations) PutDestinationConvex(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7223,7 +7692,7 @@ func (s *destinations) PutDestinationConvex(ctx context.Context, request operati
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutDestinationConvexResponse{
+ res := &operations.PutDestinationCumulioResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -7239,19 +7708,18 @@ func (s *destinations) PutDestinationConvex(ctx context.Context, request operati
return res, nil
}
-// PutDestinationCumulio - Update a Destination fully
-func (s *destinations) PutDestinationCumulio(ctx context.Context, request operations.PutDestinationCumulioRequest) (*operations.PutDestinationCumulioResponse, error) {
+// PutDestinationDatabend - Update a Destination fully
+func (s *Destinations) PutDestinationDatabend(ctx context.Context, request operations.PutDestinationDatabendRequest) (*operations.PutDestinationDatabendResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Cumulio", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databend", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationCumulioPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationDatabendPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7260,7 +7728,7 @@ func (s *destinations) PutDestinationCumulio(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7284,7 +7752,7 @@ func (s *destinations) PutDestinationCumulio(ctx context.Context, request operat
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutDestinationCumulioResponse{
+ res := &operations.PutDestinationDatabendResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -7300,19 +7768,18 @@ func (s *destinations) PutDestinationCumulio(ctx context.Context, request operat
return res, nil
}
-// PutDestinationDatabend - Update a Destination fully
-func (s *destinations) PutDestinationDatabend(ctx context.Context, request operations.PutDestinationDatabendRequest) (*operations.PutDestinationDatabendResponse, error) {
+// PutDestinationDatabricks - Update a Destination fully
+func (s *Destinations) PutDestinationDatabricks(ctx context.Context, request operations.PutDestinationDatabricksRequest) (*operations.PutDestinationDatabricksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databend", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databricks", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationDatabendPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationDatabricksPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7321,7 +7788,7 @@ func (s *destinations) PutDestinationDatabend(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7345,7 +7812,7 @@ func (s *destinations) PutDestinationDatabend(ctx context.Context, request opera
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutDestinationDatabendResponse{
+ res := &operations.PutDestinationDatabricksResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -7361,19 +7828,18 @@ func (s *destinations) PutDestinationDatabend(ctx context.Context, request opera
return res, nil
}
-// PutDestinationDatabricks - Update a Destination fully
-func (s *destinations) PutDestinationDatabricks(ctx context.Context, request operations.PutDestinationDatabricksRequest) (*operations.PutDestinationDatabricksResponse, error) {
+// PutDestinationDevNull - Update a Destination fully
+func (s *Destinations) PutDestinationDevNull(ctx context.Context, request operations.PutDestinationDevNullRequest) (*operations.PutDestinationDevNullResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Databricks", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#DevNull", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationDatabricksPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationDevNullPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7382,7 +7848,7 @@ func (s *destinations) PutDestinationDatabricks(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7406,7 +7872,7 @@ func (s *destinations) PutDestinationDatabricks(ctx context.Context, request ope
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutDestinationDatabricksResponse{
+ res := &operations.PutDestinationDevNullResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -7422,19 +7888,18 @@ func (s *destinations) PutDestinationDatabricks(ctx context.Context, request ope
return res, nil
}
-// PutDestinationDevNull - Update a Destination fully
-func (s *destinations) PutDestinationDevNull(ctx context.Context, request operations.PutDestinationDevNullRequest) (*operations.PutDestinationDevNullResponse, error) {
+// PutDestinationDuckdb - Update a Destination fully
+func (s *Destinations) PutDestinationDuckdb(ctx context.Context, request operations.PutDestinationDuckdbRequest) (*operations.PutDestinationDuckdbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#DevNull", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Duckdb", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationDevNullPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationDuckdbPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7443,7 +7908,7 @@ func (s *destinations) PutDestinationDevNull(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7467,7 +7932,7 @@ func (s *destinations) PutDestinationDevNull(ctx context.Context, request operat
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutDestinationDevNullResponse{
+ res := &operations.PutDestinationDuckdbResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -7484,18 +7949,17 @@ func (s *destinations) PutDestinationDevNull(ctx context.Context, request operat
}
// PutDestinationDynamodb - Update a Destination fully
-func (s *destinations) PutDestinationDynamodb(ctx context.Context, request operations.PutDestinationDynamodbRequest) (*operations.PutDestinationDynamodbResponse, error) {
+func (s *Destinations) PutDestinationDynamodb(ctx context.Context, request operations.PutDestinationDynamodbRequest) (*operations.PutDestinationDynamodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Dynamodb", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationDynamodbPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationDynamodbPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7504,7 +7968,7 @@ func (s *destinations) PutDestinationDynamodb(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7545,18 +8009,17 @@ func (s *destinations) PutDestinationDynamodb(ctx context.Context, request opera
}
// PutDestinationElasticsearch - Update a Destination fully
-func (s *destinations) PutDestinationElasticsearch(ctx context.Context, request operations.PutDestinationElasticsearchRequest) (*operations.PutDestinationElasticsearchResponse, error) {
+func (s *Destinations) PutDestinationElasticsearch(ctx context.Context, request operations.PutDestinationElasticsearchRequest) (*operations.PutDestinationElasticsearchResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Elasticsearch", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationElasticsearchPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationElasticsearchPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7565,7 +8028,7 @@ func (s *destinations) PutDestinationElasticsearch(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7606,18 +8069,17 @@ func (s *destinations) PutDestinationElasticsearch(ctx context.Context, request
}
// PutDestinationFirebolt - Update a Destination fully
-func (s *destinations) PutDestinationFirebolt(ctx context.Context, request operations.PutDestinationFireboltRequest) (*operations.PutDestinationFireboltResponse, error) {
+func (s *Destinations) PutDestinationFirebolt(ctx context.Context, request operations.PutDestinationFireboltRequest) (*operations.PutDestinationFireboltResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Firebolt", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationFireboltPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationFireboltPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7626,7 +8088,7 @@ func (s *destinations) PutDestinationFirebolt(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7667,18 +8129,17 @@ func (s *destinations) PutDestinationFirebolt(ctx context.Context, request opera
}
// PutDestinationFirestore - Update a Destination fully
-func (s *destinations) PutDestinationFirestore(ctx context.Context, request operations.PutDestinationFirestoreRequest) (*operations.PutDestinationFirestoreResponse, error) {
+func (s *Destinations) PutDestinationFirestore(ctx context.Context, request operations.PutDestinationFirestoreRequest) (*operations.PutDestinationFirestoreResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Firestore", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationFirestorePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationFirestorePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7687,7 +8148,7 @@ func (s *destinations) PutDestinationFirestore(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7728,18 +8189,17 @@ func (s *destinations) PutDestinationFirestore(ctx context.Context, request oper
}
// PutDestinationGcs - Update a Destination fully
-func (s *destinations) PutDestinationGcs(ctx context.Context, request operations.PutDestinationGcsRequest) (*operations.PutDestinationGcsResponse, error) {
+func (s *Destinations) PutDestinationGcs(ctx context.Context, request operations.PutDestinationGcsRequest) (*operations.PutDestinationGcsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Gcs", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationGcsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationGcsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7748,7 +8208,7 @@ func (s *destinations) PutDestinationGcs(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7789,18 +8249,17 @@ func (s *destinations) PutDestinationGcs(ctx context.Context, request operations
}
// PutDestinationGoogleSheets - Update a Destination fully
-func (s *destinations) PutDestinationGoogleSheets(ctx context.Context, request operations.PutDestinationGoogleSheetsRequest) (*operations.PutDestinationGoogleSheetsResponse, error) {
+func (s *Destinations) PutDestinationGoogleSheets(ctx context.Context, request operations.PutDestinationGoogleSheetsRequest) (*operations.PutDestinationGoogleSheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#GoogleSheets", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationGoogleSheetsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationGoogleSheetsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7809,7 +8268,7 @@ func (s *destinations) PutDestinationGoogleSheets(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7850,18 +8309,17 @@ func (s *destinations) PutDestinationGoogleSheets(ctx context.Context, request o
}
// PutDestinationKeen - Update a Destination fully
-func (s *destinations) PutDestinationKeen(ctx context.Context, request operations.PutDestinationKeenRequest) (*operations.PutDestinationKeenResponse, error) {
+func (s *Destinations) PutDestinationKeen(ctx context.Context, request operations.PutDestinationKeenRequest) (*operations.PutDestinationKeenResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Keen", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationKeenPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationKeenPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7870,7 +8328,7 @@ func (s *destinations) PutDestinationKeen(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7911,18 +8369,17 @@ func (s *destinations) PutDestinationKeen(ctx context.Context, request operation
}
// PutDestinationKinesis - Update a Destination fully
-func (s *destinations) PutDestinationKinesis(ctx context.Context, request operations.PutDestinationKinesisRequest) (*operations.PutDestinationKinesisResponse, error) {
+func (s *Destinations) PutDestinationKinesis(ctx context.Context, request operations.PutDestinationKinesisRequest) (*operations.PutDestinationKinesisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Kinesis", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationKinesisPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationKinesisPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7931,7 +8388,7 @@ func (s *destinations) PutDestinationKinesis(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7972,18 +8429,17 @@ func (s *destinations) PutDestinationKinesis(ctx context.Context, request operat
}
// PutDestinationLangchain - Update a Destination fully
-func (s *destinations) PutDestinationLangchain(ctx context.Context, request operations.PutDestinationLangchainRequest) (*operations.PutDestinationLangchainResponse, error) {
+func (s *Destinations) PutDestinationLangchain(ctx context.Context, request operations.PutDestinationLangchainRequest) (*operations.PutDestinationLangchainResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Langchain", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationLangchainPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationLangchainPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7992,7 +8448,7 @@ func (s *destinations) PutDestinationLangchain(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8033,18 +8489,17 @@ func (s *destinations) PutDestinationLangchain(ctx context.Context, request oper
}
// PutDestinationMilvus - Update a Destination fully
-func (s *destinations) PutDestinationMilvus(ctx context.Context, request operations.PutDestinationMilvusRequest) (*operations.PutDestinationMilvusResponse, error) {
+func (s *Destinations) PutDestinationMilvus(ctx context.Context, request operations.PutDestinationMilvusRequest) (*operations.PutDestinationMilvusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Milvus", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationMilvusPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationMilvusPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8053,7 +8508,7 @@ func (s *destinations) PutDestinationMilvus(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8094,18 +8549,17 @@ func (s *destinations) PutDestinationMilvus(ctx context.Context, request operati
}
// PutDestinationMongodb - Update a Destination fully
-func (s *destinations) PutDestinationMongodb(ctx context.Context, request operations.PutDestinationMongodbRequest) (*operations.PutDestinationMongodbResponse, error) {
+func (s *Destinations) PutDestinationMongodb(ctx context.Context, request operations.PutDestinationMongodbRequest) (*operations.PutDestinationMongodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Mongodb", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationMongodbPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationMongodbPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8114,7 +8568,7 @@ func (s *destinations) PutDestinationMongodb(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8155,18 +8609,17 @@ func (s *destinations) PutDestinationMongodb(ctx context.Context, request operat
}
// PutDestinationMssql - Update a Destination fully
-func (s *destinations) PutDestinationMssql(ctx context.Context, request operations.PutDestinationMssqlRequest) (*operations.PutDestinationMssqlResponse, error) {
+func (s *Destinations) PutDestinationMssql(ctx context.Context, request operations.PutDestinationMssqlRequest) (*operations.PutDestinationMssqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Mssql", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationMssqlPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationMssqlPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8175,7 +8628,7 @@ func (s *destinations) PutDestinationMssql(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8216,18 +8669,17 @@ func (s *destinations) PutDestinationMssql(ctx context.Context, request operatio
}
// PutDestinationMysql - Update a Destination fully
-func (s *destinations) PutDestinationMysql(ctx context.Context, request operations.PutDestinationMysqlRequest) (*operations.PutDestinationMysqlResponse, error) {
+func (s *Destinations) PutDestinationMysql(ctx context.Context, request operations.PutDestinationMysqlRequest) (*operations.PutDestinationMysqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Mysql", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationMysqlPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationMysqlPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8236,7 +8688,7 @@ func (s *destinations) PutDestinationMysql(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8277,18 +8729,17 @@ func (s *destinations) PutDestinationMysql(ctx context.Context, request operatio
}
// PutDestinationOracle - Update a Destination fully
-func (s *destinations) PutDestinationOracle(ctx context.Context, request operations.PutDestinationOracleRequest) (*operations.PutDestinationOracleResponse, error) {
+func (s *Destinations) PutDestinationOracle(ctx context.Context, request operations.PutDestinationOracleRequest) (*operations.PutDestinationOracleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Oracle", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationOraclePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationOraclePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8297,7 +8748,7 @@ func (s *destinations) PutDestinationOracle(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8338,18 +8789,17 @@ func (s *destinations) PutDestinationOracle(ctx context.Context, request operati
}
// PutDestinationPinecone - Update a Destination fully
-func (s *destinations) PutDestinationPinecone(ctx context.Context, request operations.PutDestinationPineconeRequest) (*operations.PutDestinationPineconeResponse, error) {
+func (s *Destinations) PutDestinationPinecone(ctx context.Context, request operations.PutDestinationPineconeRequest) (*operations.PutDestinationPineconeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Pinecone", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationPineconePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationPineconePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8358,7 +8808,7 @@ func (s *destinations) PutDestinationPinecone(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8399,18 +8849,17 @@ func (s *destinations) PutDestinationPinecone(ctx context.Context, request opera
}
// PutDestinationPostgres - Update a Destination fully
-func (s *destinations) PutDestinationPostgres(ctx context.Context, request operations.PutDestinationPostgresRequest) (*operations.PutDestinationPostgresResponse, error) {
+func (s *Destinations) PutDestinationPostgres(ctx context.Context, request operations.PutDestinationPostgresRequest) (*operations.PutDestinationPostgresResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Postgres", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationPostgresPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationPostgresPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8419,7 +8868,7 @@ func (s *destinations) PutDestinationPostgres(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8460,18 +8909,17 @@ func (s *destinations) PutDestinationPostgres(ctx context.Context, request opera
}
// PutDestinationPubsub - Update a Destination fully
-func (s *destinations) PutDestinationPubsub(ctx context.Context, request operations.PutDestinationPubsubRequest) (*operations.PutDestinationPubsubResponse, error) {
+func (s *Destinations) PutDestinationPubsub(ctx context.Context, request operations.PutDestinationPubsubRequest) (*operations.PutDestinationPubsubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Pubsub", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationPubsubPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationPubsubPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8480,7 +8928,7 @@ func (s *destinations) PutDestinationPubsub(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8520,19 +8968,78 @@ func (s *destinations) PutDestinationPubsub(ctx context.Context, request operati
return res, nil
}
+// PutDestinationQdrant - Update a Destination fully
+func (s *Destinations) PutDestinationQdrant(ctx context.Context, request operations.PutDestinationQdrantRequest) (*operations.PutDestinationQdrantResponse, error) {
+ baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Qdrant", request, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error generating URL: %w", err)
+ }
+
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationQdrantPutRequest", "json", `request:"mediaType=application/json"`)
+ if err != nil {
+ return nil, fmt.Errorf("error serializing request body: %w", err)
+ }
+ debugBody := bytes.NewBuffer([]byte{})
+ debugReader := io.TeeReader(bodyReader, debugBody)
+
+ req, err := http.NewRequestWithContext(ctx, "PUT", url, debugReader)
+ if err != nil {
+ return nil, fmt.Errorf("error creating request: %w", err)
+ }
+ req.Header.Set("Accept", "*/*")
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
+
+ req.Header.Set("Content-Type", reqContentType)
+
+ client := s.sdkConfiguration.SecurityClient
+
+ httpRes, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("error sending request: %w", err)
+ }
+ if httpRes == nil {
+ return nil, fmt.Errorf("error sending request: no response")
+ }
+
+ rawBody, err := io.ReadAll(httpRes.Body)
+ if err != nil {
+ return nil, fmt.Errorf("error reading response body: %w", err)
+ }
+ httpRes.Request.Body = io.NopCloser(debugBody)
+ httpRes.Body.Close()
+ httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
+
+ contentType := httpRes.Header.Get("Content-Type")
+
+ res := &operations.PutDestinationQdrantResponse{
+ StatusCode: httpRes.StatusCode,
+ ContentType: contentType,
+ RawResponse: httpRes,
+ }
+ switch {
+ case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
+ fallthrough
+ case httpRes.StatusCode == 403:
+ fallthrough
+ case httpRes.StatusCode == 404:
+ }
+
+ return res, nil
+}
+
// PutDestinationRedis - Update a Destination fully
-func (s *destinations) PutDestinationRedis(ctx context.Context, request operations.PutDestinationRedisRequest) (*operations.PutDestinationRedisResponse, error) {
+func (s *Destinations) PutDestinationRedis(ctx context.Context, request operations.PutDestinationRedisRequest) (*operations.PutDestinationRedisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Redis", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationRedisPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationRedisPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8541,7 +9048,7 @@ func (s *destinations) PutDestinationRedis(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8582,18 +9089,17 @@ func (s *destinations) PutDestinationRedis(ctx context.Context, request operatio
}
// PutDestinationRedshift - Update a Destination fully
-func (s *destinations) PutDestinationRedshift(ctx context.Context, request operations.PutDestinationRedshiftRequest) (*operations.PutDestinationRedshiftResponse, error) {
+func (s *Destinations) PutDestinationRedshift(ctx context.Context, request operations.PutDestinationRedshiftRequest) (*operations.PutDestinationRedshiftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Redshift", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationRedshiftPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationRedshiftPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8602,7 +9108,7 @@ func (s *destinations) PutDestinationRedshift(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8643,18 +9149,17 @@ func (s *destinations) PutDestinationRedshift(ctx context.Context, request opera
}
// PutDestinationS3 - Update a Destination fully
-func (s *destinations) PutDestinationS3(ctx context.Context, request operations.PutDestinationS3Request) (*operations.PutDestinationS3Response, error) {
+func (s *Destinations) PutDestinationS3(ctx context.Context, request operations.PutDestinationS3Request) (*operations.PutDestinationS3Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#S3", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationS3PutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationS3PutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8663,7 +9168,7 @@ func (s *destinations) PutDestinationS3(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8704,18 +9209,17 @@ func (s *destinations) PutDestinationS3(ctx context.Context, request operations.
}
// PutDestinationS3Glue - Update a Destination fully
-func (s *destinations) PutDestinationS3Glue(ctx context.Context, request operations.PutDestinationS3GlueRequest) (*operations.PutDestinationS3GlueResponse, error) {
+func (s *Destinations) PutDestinationS3Glue(ctx context.Context, request operations.PutDestinationS3GlueRequest) (*operations.PutDestinationS3GlueResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#S3Glue", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationS3GluePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationS3GluePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8724,7 +9228,7 @@ func (s *destinations) PutDestinationS3Glue(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8765,18 +9269,17 @@ func (s *destinations) PutDestinationS3Glue(ctx context.Context, request operati
}
// PutDestinationSftpJSON - Update a Destination fully
-func (s *destinations) PutDestinationSftpJSON(ctx context.Context, request operations.PutDestinationSftpJSONRequest) (*operations.PutDestinationSftpJSONResponse, error) {
+func (s *Destinations) PutDestinationSftpJSON(ctx context.Context, request operations.PutDestinationSftpJSONRequest) (*operations.PutDestinationSftpJSONResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#SftpJson", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationSftpJSONPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationSftpJSONPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8785,7 +9288,7 @@ func (s *destinations) PutDestinationSftpJSON(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8826,18 +9329,17 @@ func (s *destinations) PutDestinationSftpJSON(ctx context.Context, request opera
}
// PutDestinationSnowflake - Update a Destination fully
-func (s *destinations) PutDestinationSnowflake(ctx context.Context, request operations.PutDestinationSnowflakeRequest) (*operations.PutDestinationSnowflakeResponse, error) {
+func (s *Destinations) PutDestinationSnowflake(ctx context.Context, request operations.PutDestinationSnowflakeRequest) (*operations.PutDestinationSnowflakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Snowflake", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationSnowflakePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationSnowflakePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8846,7 +9348,7 @@ func (s *destinations) PutDestinationSnowflake(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8887,18 +9389,17 @@ func (s *destinations) PutDestinationSnowflake(ctx context.Context, request oper
}
// PutDestinationTimeplus - Update a Destination fully
-func (s *destinations) PutDestinationTimeplus(ctx context.Context, request operations.PutDestinationTimeplusRequest) (*operations.PutDestinationTimeplusResponse, error) {
+func (s *Destinations) PutDestinationTimeplus(ctx context.Context, request operations.PutDestinationTimeplusRequest) (*operations.PutDestinationTimeplusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Timeplus", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationTimeplusPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationTimeplusPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8907,7 +9408,7 @@ func (s *destinations) PutDestinationTimeplus(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8948,18 +9449,17 @@ func (s *destinations) PutDestinationTimeplus(ctx context.Context, request opera
}
// PutDestinationTypesense - Update a Destination fully
-func (s *destinations) PutDestinationTypesense(ctx context.Context, request operations.PutDestinationTypesenseRequest) (*operations.PutDestinationTypesenseResponse, error) {
+func (s *Destinations) PutDestinationTypesense(ctx context.Context, request operations.PutDestinationTypesenseRequest) (*operations.PutDestinationTypesenseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Typesense", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationTypesensePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationTypesensePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8968,7 +9468,7 @@ func (s *destinations) PutDestinationTypesense(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9009,18 +9509,17 @@ func (s *destinations) PutDestinationTypesense(ctx context.Context, request oper
}
// PutDestinationVertica - Update a Destination fully
-func (s *destinations) PutDestinationVertica(ctx context.Context, request operations.PutDestinationVerticaRequest) (*operations.PutDestinationVerticaResponse, error) {
+func (s *Destinations) PutDestinationVertica(ctx context.Context, request operations.PutDestinationVerticaRequest) (*operations.PutDestinationVerticaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Vertica", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationVerticaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationVerticaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9029,7 +9528,7 @@ func (s *destinations) PutDestinationVertica(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9069,19 +9568,78 @@ func (s *destinations) PutDestinationVertica(ctx context.Context, request operat
return res, nil
}
+// PutDestinationWeaviate - Update a Destination fully
+func (s *Destinations) PutDestinationWeaviate(ctx context.Context, request operations.PutDestinationWeaviateRequest) (*operations.PutDestinationWeaviateResponse, error) {
+ baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
+ url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Weaviate", request, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error generating URL: %w", err)
+ }
+
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationWeaviatePutRequest", "json", `request:"mediaType=application/json"`)
+ if err != nil {
+ return nil, fmt.Errorf("error serializing request body: %w", err)
+ }
+ debugBody := bytes.NewBuffer([]byte{})
+ debugReader := io.TeeReader(bodyReader, debugBody)
+
+ req, err := http.NewRequestWithContext(ctx, "PUT", url, debugReader)
+ if err != nil {
+ return nil, fmt.Errorf("error creating request: %w", err)
+ }
+ req.Header.Set("Accept", "*/*")
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
+
+ req.Header.Set("Content-Type", reqContentType)
+
+ client := s.sdkConfiguration.SecurityClient
+
+ httpRes, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("error sending request: %w", err)
+ }
+ if httpRes == nil {
+ return nil, fmt.Errorf("error sending request: no response")
+ }
+
+ rawBody, err := io.ReadAll(httpRes.Body)
+ if err != nil {
+ return nil, fmt.Errorf("error reading response body: %w", err)
+ }
+ httpRes.Request.Body = io.NopCloser(debugBody)
+ httpRes.Body.Close()
+ httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
+
+ contentType := httpRes.Header.Get("Content-Type")
+
+ res := &operations.PutDestinationWeaviateResponse{
+ StatusCode: httpRes.StatusCode,
+ ContentType: contentType,
+ RawResponse: httpRes,
+ }
+ switch {
+ case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
+ fallthrough
+ case httpRes.StatusCode == 403:
+ fallthrough
+ case httpRes.StatusCode == 404:
+ }
+
+ return res, nil
+}
+
// PutDestinationXata - Update a Destination fully
-func (s *destinations) PutDestinationXata(ctx context.Context, request operations.PutDestinationXataRequest) (*operations.PutDestinationXataResponse, error) {
+func (s *Destinations) PutDestinationXata(ctx context.Context, request operations.PutDestinationXataRequest) (*operations.PutDestinationXataResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Xata", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationXataPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "DestinationXataPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9090,7 +9648,7 @@ func (s *destinations) PutDestinationXata(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
diff --git a/internal/sdk/jobs.go b/internal/sdk/jobs.go
old mode 100755
new mode 100644
index 75cbfcfc3..2a574f0f6
--- a/internal/sdk/jobs.go
+++ b/internal/sdk/jobs.go
@@ -3,29 +3,30 @@
package sdk
import (
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/sdk/pkg/models/shared"
- "airbyte/internal/sdk/pkg/utils"
"bytes"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/sdkerrors"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"io"
"net/http"
"strings"
)
-type jobs struct {
+type Jobs struct {
sdkConfiguration sdkConfiguration
}
-func newJobs(sdkConfig sdkConfiguration) *jobs {
- return &jobs{
+func newJobs(sdkConfig sdkConfiguration) *Jobs {
+ return &Jobs{
sdkConfiguration: sdkConfig,
}
}
// CancelJob - Cancel a running Job
-func (s *jobs) CancelJob(ctx context.Context, request operations.CancelJobRequest) (*operations.CancelJobResponse, error) {
+func (s *Jobs) CancelJob(ctx context.Context, request operations.CancelJobRequest) (*operations.CancelJobResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/jobs/{jobId}", request, nil)
if err != nil {
@@ -37,7 +38,7 @@ func (s *jobs) CancelJob(ctx context.Context, request operations.CancelJobReques
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -67,12 +68,14 @@ func (s *jobs) CancelJob(ctx context.Context, request operations.CancelJobReques
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.JobResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.JobResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.JobResponse = out
+ res.JobResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -83,11 +86,11 @@ func (s *jobs) CancelJob(ctx context.Context, request operations.CancelJobReques
}
// CreateJob - Trigger a sync or reset job of a connection
-func (s *jobs) CreateJob(ctx context.Context, request shared.JobCreateRequest) (*operations.CreateJobResponse, error) {
+func (s *Jobs) CreateJob(ctx context.Context, request shared.JobCreateRequest) (*operations.CreateJobResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/jobs"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, false, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
@@ -103,7 +106,7 @@ func (s *jobs) CreateJob(ctx context.Context, request shared.JobCreateRequest) (
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -136,12 +139,14 @@ func (s *jobs) CreateJob(ctx context.Context, request shared.JobCreateRequest) (
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.JobResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.JobResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.JobResponse = out
+ res.JobResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -152,7 +157,7 @@ func (s *jobs) CreateJob(ctx context.Context, request shared.JobCreateRequest) (
}
// GetJob - Get Job status and details
-func (s *jobs) GetJob(ctx context.Context, request operations.GetJobRequest) (*operations.GetJobResponse, error) {
+func (s *Jobs) GetJob(ctx context.Context, request operations.GetJobRequest) (*operations.GetJobResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/jobs/{jobId}", request, nil)
if err != nil {
@@ -164,7 +169,7 @@ func (s *jobs) GetJob(ctx context.Context, request operations.GetJobRequest) (*o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -194,12 +199,14 @@ func (s *jobs) GetJob(ctx context.Context, request operations.GetJobRequest) (*o
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.JobResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.JobResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.JobResponse = out
+ res.JobResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -210,7 +217,7 @@ func (s *jobs) GetJob(ctx context.Context, request operations.GetJobRequest) (*o
}
// ListJobs - List Jobs by sync type
-func (s *jobs) ListJobs(ctx context.Context, request operations.ListJobsRequest) (*operations.ListJobsResponse, error) {
+func (s *Jobs) ListJobs(ctx context.Context, request operations.ListJobsRequest) (*operations.ListJobsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/jobs"
@@ -219,7 +226,7 @@ func (s *jobs) ListJobs(ctx context.Context, request operations.ListJobsRequest)
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
if err := utils.PopulateQueryParams(ctx, req, request, nil); err != nil {
return nil, fmt.Errorf("error populating query params: %w", err)
@@ -253,12 +260,14 @@ func (s *jobs) ListJobs(ctx context.Context, request operations.ListJobsRequest)
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.JobsResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.JobsResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.JobsResponse = out
+ res.JobsResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
}
diff --git a/internal/sdk/pkg/models/operations/canceljob.go b/internal/sdk/pkg/models/operations/canceljob.go
old mode 100755
new mode 100644
index e6651dc36..e1013c8a2
--- a/internal/sdk/pkg/models/operations/canceljob.go
+++ b/internal/sdk/pkg/models/operations/canceljob.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type CancelJobRequest struct {
JobID int64 `pathParam:"style=simple,explode=false,name=jobId"`
}
+func (o *CancelJobRequest) GetJobID() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.JobID
+}
+
type CancelJobResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Cancel a Job.
JobResponse *shared.JobResponse
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *CancelJobResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CancelJobResponse) GetJobResponse() *shared.JobResponse {
+ if o == nil {
+ return nil
+ }
+ return o.JobResponse
+}
+
+func (o *CancelJobResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CancelJobResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createconnection.go b/internal/sdk/pkg/models/operations/createconnection.go
old mode 100755
new mode 100644
index bfbf3110c..72fe445cc
--- a/internal/sdk/pkg/models/operations/createconnection.go
+++ b/internal/sdk/pkg/models/operations/createconnection.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateConnectionResponse struct {
// Successful operation
ConnectionResponse *shared.ConnectionResponse
- ContentType string
- StatusCode int
- RawResponse *http.Response
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateConnectionResponse) GetConnectionResponse() *shared.ConnectionResponse {
+ if o == nil {
+ return nil
+ }
+ return o.ConnectionResponse
+}
+
+func (o *CreateConnectionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateConnectionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateConnectionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestination.go b/internal/sdk/pkg/models/operations/createdestination.go
old mode 100755
new mode 100644
index 85a9219de..b2175c134
--- a/internal/sdk/pkg/models/operations/createdestination.go
+++ b/internal/sdk/pkg/models/operations/createdestination.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationawsdatalake.go b/internal/sdk/pkg/models/operations/createdestinationawsdatalake.go
old mode 100755
new mode 100644
index 4d11e4254..6b40b3df2
--- a/internal/sdk/pkg/models/operations/createdestinationawsdatalake.go
+++ b/internal/sdk/pkg/models/operations/createdestinationawsdatalake.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationAwsDatalakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationAwsDatalakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationAwsDatalakeResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationAwsDatalakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationAwsDatalakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationazureblobstorage.go b/internal/sdk/pkg/models/operations/createdestinationazureblobstorage.go
old mode 100755
new mode 100644
index bea9e73d5..f2ac36502
--- a/internal/sdk/pkg/models/operations/createdestinationazureblobstorage.go
+++ b/internal/sdk/pkg/models/operations/createdestinationazureblobstorage.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationAzureBlobStorageResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationAzureBlobStorageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationAzureBlobStorageResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationAzureBlobStorageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationAzureBlobStorageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationbigquery.go b/internal/sdk/pkg/models/operations/createdestinationbigquery.go
old mode 100755
new mode 100644
index 584216d1e..ead2d7fb4
--- a/internal/sdk/pkg/models/operations/createdestinationbigquery.go
+++ b/internal/sdk/pkg/models/operations/createdestinationbigquery.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationBigqueryResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationBigqueryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationBigqueryResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationBigqueryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationBigqueryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationbigquerydenormalized.go b/internal/sdk/pkg/models/operations/createdestinationbigquerydenormalized.go
deleted file mode 100755
index 746f1c7d4..000000000
--- a/internal/sdk/pkg/models/operations/createdestinationbigquerydenormalized.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type CreateDestinationBigqueryDenormalizedResponse struct {
- ContentType string
- // Successful operation
- DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/createdestinationclickhouse.go b/internal/sdk/pkg/models/operations/createdestinationclickhouse.go
old mode 100755
new mode 100644
index bb773d0d6..7da7aa85a
--- a/internal/sdk/pkg/models/operations/createdestinationclickhouse.go
+++ b/internal/sdk/pkg/models/operations/createdestinationclickhouse.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationClickhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationClickhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationClickhouseResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationClickhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationClickhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationconvex.go b/internal/sdk/pkg/models/operations/createdestinationconvex.go
old mode 100755
new mode 100644
index be30c29f2..32ad30cea
--- a/internal/sdk/pkg/models/operations/createdestinationconvex.go
+++ b/internal/sdk/pkg/models/operations/createdestinationconvex.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationConvexResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationConvexResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationConvexResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationConvexResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationConvexResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationcumulio.go b/internal/sdk/pkg/models/operations/createdestinationcumulio.go
old mode 100755
new mode 100644
index d39ac26de..2ef8bb826
--- a/internal/sdk/pkg/models/operations/createdestinationcumulio.go
+++ b/internal/sdk/pkg/models/operations/createdestinationcumulio.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationCumulioResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationCumulioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationCumulioResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationCumulioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationCumulioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationdatabend.go b/internal/sdk/pkg/models/operations/createdestinationdatabend.go
old mode 100755
new mode 100644
index b73571df6..32ea96671
--- a/internal/sdk/pkg/models/operations/createdestinationdatabend.go
+++ b/internal/sdk/pkg/models/operations/createdestinationdatabend.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationDatabendResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationDatabendResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationDatabendResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationDatabendResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationDatabendResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationdatabricks.go b/internal/sdk/pkg/models/operations/createdestinationdatabricks.go
old mode 100755
new mode 100644
index 26c66f234..b7167c7e9
--- a/internal/sdk/pkg/models/operations/createdestinationdatabricks.go
+++ b/internal/sdk/pkg/models/operations/createdestinationdatabricks.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationDatabricksResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationDatabricksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationDatabricksResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationDatabricksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationDatabricksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationdevnull.go b/internal/sdk/pkg/models/operations/createdestinationdevnull.go
old mode 100755
new mode 100644
index 72c96fe5b..213d8edfb
--- a/internal/sdk/pkg/models/operations/createdestinationdevnull.go
+++ b/internal/sdk/pkg/models/operations/createdestinationdevnull.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationDevNullResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationDevNullResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationDevNullResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationDevNullResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationDevNullResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationduckdb.go b/internal/sdk/pkg/models/operations/createdestinationduckdb.go
new file mode 100644
index 000000000..a910524f1
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/createdestinationduckdb.go
@@ -0,0 +1,47 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type CreateDestinationDuckdbResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Successful operation
+ DestinationResponse *shared.DestinationResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationDuckdbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationDuckdbResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationDuckdbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationDuckdbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createdestinationdynamodb.go b/internal/sdk/pkg/models/operations/createdestinationdynamodb.go
old mode 100755
new mode 100644
index 726cf5332..46893fb78
--- a/internal/sdk/pkg/models/operations/createdestinationdynamodb.go
+++ b/internal/sdk/pkg/models/operations/createdestinationdynamodb.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationDynamodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationDynamodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationDynamodbResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationDynamodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationDynamodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationelasticsearch.go b/internal/sdk/pkg/models/operations/createdestinationelasticsearch.go
old mode 100755
new mode 100644
index ddd198fbf..e23931019
--- a/internal/sdk/pkg/models/operations/createdestinationelasticsearch.go
+++ b/internal/sdk/pkg/models/operations/createdestinationelasticsearch.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationElasticsearchResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationElasticsearchResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationElasticsearchResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationElasticsearchResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationElasticsearchResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationfirebolt.go b/internal/sdk/pkg/models/operations/createdestinationfirebolt.go
old mode 100755
new mode 100644
index 0d13790c5..0c11712fb
--- a/internal/sdk/pkg/models/operations/createdestinationfirebolt.go
+++ b/internal/sdk/pkg/models/operations/createdestinationfirebolt.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationFireboltResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationFireboltResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationFireboltResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationFireboltResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationFireboltResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationfirestore.go b/internal/sdk/pkg/models/operations/createdestinationfirestore.go
old mode 100755
new mode 100644
index b350a4d8d..f72d1bcbe
--- a/internal/sdk/pkg/models/operations/createdestinationfirestore.go
+++ b/internal/sdk/pkg/models/operations/createdestinationfirestore.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationFirestoreResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationFirestoreResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationFirestoreResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationFirestoreResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationFirestoreResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationgcs.go b/internal/sdk/pkg/models/operations/createdestinationgcs.go
old mode 100755
new mode 100644
index cdec896c4..d6c690f61
--- a/internal/sdk/pkg/models/operations/createdestinationgcs.go
+++ b/internal/sdk/pkg/models/operations/createdestinationgcs.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationGcsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationGcsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationGcsResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationGcsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationGcsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationgooglesheets.go b/internal/sdk/pkg/models/operations/createdestinationgooglesheets.go
old mode 100755
new mode 100644
index 911ccdcd2..0d0d56b46
--- a/internal/sdk/pkg/models/operations/createdestinationgooglesheets.go
+++ b/internal/sdk/pkg/models/operations/createdestinationgooglesheets.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationGoogleSheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationGoogleSheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationGoogleSheetsResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationGoogleSheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationGoogleSheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationkeen.go b/internal/sdk/pkg/models/operations/createdestinationkeen.go
old mode 100755
new mode 100644
index 6c7c9b310..4ca4c47a5
--- a/internal/sdk/pkg/models/operations/createdestinationkeen.go
+++ b/internal/sdk/pkg/models/operations/createdestinationkeen.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationKeenResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationKeenResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationKeenResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationKeenResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationKeenResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationkinesis.go b/internal/sdk/pkg/models/operations/createdestinationkinesis.go
old mode 100755
new mode 100644
index 37e5696a4..cd9e19172
--- a/internal/sdk/pkg/models/operations/createdestinationkinesis.go
+++ b/internal/sdk/pkg/models/operations/createdestinationkinesis.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationKinesisResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationKinesisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationKinesisResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationKinesisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationKinesisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationlangchain.go b/internal/sdk/pkg/models/operations/createdestinationlangchain.go
old mode 100755
new mode 100644
index 36b177d45..3bb049078
--- a/internal/sdk/pkg/models/operations/createdestinationlangchain.go
+++ b/internal/sdk/pkg/models/operations/createdestinationlangchain.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationLangchainResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationLangchainResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationLangchainResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationLangchainResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationLangchainResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationmilvus.go b/internal/sdk/pkg/models/operations/createdestinationmilvus.go
old mode 100755
new mode 100644
index a3f0ee427..ac8f0b6fa
--- a/internal/sdk/pkg/models/operations/createdestinationmilvus.go
+++ b/internal/sdk/pkg/models/operations/createdestinationmilvus.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationMilvusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationMilvusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationMilvusResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationMilvusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationMilvusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationmongodb.go b/internal/sdk/pkg/models/operations/createdestinationmongodb.go
old mode 100755
new mode 100644
index 6fd6f8469..2a417f6e9
--- a/internal/sdk/pkg/models/operations/createdestinationmongodb.go
+++ b/internal/sdk/pkg/models/operations/createdestinationmongodb.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationMongodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationMongodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationMongodbResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationMongodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationMongodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationmssql.go b/internal/sdk/pkg/models/operations/createdestinationmssql.go
old mode 100755
new mode 100644
index a0c3ec80d..8370be89c
--- a/internal/sdk/pkg/models/operations/createdestinationmssql.go
+++ b/internal/sdk/pkg/models/operations/createdestinationmssql.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationMssqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationMssqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationMssqlResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationMssqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationMssqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationmysql.go b/internal/sdk/pkg/models/operations/createdestinationmysql.go
old mode 100755
new mode 100644
index c64c7276b..ae59c11d5
--- a/internal/sdk/pkg/models/operations/createdestinationmysql.go
+++ b/internal/sdk/pkg/models/operations/createdestinationmysql.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationMysqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationMysqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationMysqlResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationMysqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationMysqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationoracle.go b/internal/sdk/pkg/models/operations/createdestinationoracle.go
old mode 100755
new mode 100644
index b3fc23c7d..1b4400a23
--- a/internal/sdk/pkg/models/operations/createdestinationoracle.go
+++ b/internal/sdk/pkg/models/operations/createdestinationoracle.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationOracleResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationOracleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationOracleResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationOracleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationOracleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationpinecone.go b/internal/sdk/pkg/models/operations/createdestinationpinecone.go
old mode 100755
new mode 100644
index 4cfa6e3be..6199957a4
--- a/internal/sdk/pkg/models/operations/createdestinationpinecone.go
+++ b/internal/sdk/pkg/models/operations/createdestinationpinecone.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationPineconeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationPineconeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationPineconeResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationPineconeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationPineconeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationpostgres.go b/internal/sdk/pkg/models/operations/createdestinationpostgres.go
old mode 100755
new mode 100644
index 095130836..6714f662c
--- a/internal/sdk/pkg/models/operations/createdestinationpostgres.go
+++ b/internal/sdk/pkg/models/operations/createdestinationpostgres.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationPostgresResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationPostgresResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationPostgresResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationPostgresResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationPostgresResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationpubsub.go b/internal/sdk/pkg/models/operations/createdestinationpubsub.go
old mode 100755
new mode 100644
index 2ba7c66c6..9af65fd65
--- a/internal/sdk/pkg/models/operations/createdestinationpubsub.go
+++ b/internal/sdk/pkg/models/operations/createdestinationpubsub.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationPubsubResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationPubsubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationPubsubResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationPubsubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationPubsubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationqdrant.go b/internal/sdk/pkg/models/operations/createdestinationqdrant.go
new file mode 100644
index 000000000..fb1ba997f
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/createdestinationqdrant.go
@@ -0,0 +1,47 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type CreateDestinationQdrantResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Successful operation
+ DestinationResponse *shared.DestinationResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationQdrantResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationQdrantResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationQdrantResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationQdrantResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createdestinationredis.go b/internal/sdk/pkg/models/operations/createdestinationredis.go
old mode 100755
new mode 100644
index ddb43135e..9b3f02d48
--- a/internal/sdk/pkg/models/operations/createdestinationredis.go
+++ b/internal/sdk/pkg/models/operations/createdestinationredis.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationRedisResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationRedisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationRedisResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationRedisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationRedisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationredshift.go b/internal/sdk/pkg/models/operations/createdestinationredshift.go
old mode 100755
new mode 100644
index 57f520f10..534d5e9ac
--- a/internal/sdk/pkg/models/operations/createdestinationredshift.go
+++ b/internal/sdk/pkg/models/operations/createdestinationredshift.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationRedshiftResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationRedshiftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationRedshiftResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationRedshiftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationRedshiftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinations3.go b/internal/sdk/pkg/models/operations/createdestinations3.go
old mode 100755
new mode 100644
index 93ffdda68..d0d3a4880
--- a/internal/sdk/pkg/models/operations/createdestinations3.go
+++ b/internal/sdk/pkg/models/operations/createdestinations3.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationS3Response struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationS3Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationS3Response) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationS3Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationS3Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinations3glue.go b/internal/sdk/pkg/models/operations/createdestinations3glue.go
old mode 100755
new mode 100644
index 20f6de118..c26f50e62
--- a/internal/sdk/pkg/models/operations/createdestinations3glue.go
+++ b/internal/sdk/pkg/models/operations/createdestinations3glue.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationS3GlueResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationS3GlueResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationS3GlueResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationS3GlueResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationS3GlueResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationsftpjson.go b/internal/sdk/pkg/models/operations/createdestinationsftpjson.go
old mode 100755
new mode 100644
index 4472678c0..e3992e439
--- a/internal/sdk/pkg/models/operations/createdestinationsftpjson.go
+++ b/internal/sdk/pkg/models/operations/createdestinationsftpjson.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationSftpJSONResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationSftpJSONResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationSftpJSONResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationSftpJSONResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationSftpJSONResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationsnowflake.go b/internal/sdk/pkg/models/operations/createdestinationsnowflake.go
old mode 100755
new mode 100644
index dd9e3fb3f..39e2206e0
--- a/internal/sdk/pkg/models/operations/createdestinationsnowflake.go
+++ b/internal/sdk/pkg/models/operations/createdestinationsnowflake.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationSnowflakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationSnowflakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationSnowflakeResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationSnowflakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationSnowflakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationtimeplus.go b/internal/sdk/pkg/models/operations/createdestinationtimeplus.go
old mode 100755
new mode 100644
index f97030231..81dc1c3d3
--- a/internal/sdk/pkg/models/operations/createdestinationtimeplus.go
+++ b/internal/sdk/pkg/models/operations/createdestinationtimeplus.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationTimeplusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationTimeplusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationTimeplusResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationTimeplusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationTimeplusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationtypesense.go b/internal/sdk/pkg/models/operations/createdestinationtypesense.go
old mode 100755
new mode 100644
index d1498f702..278c89daf
--- a/internal/sdk/pkg/models/operations/createdestinationtypesense.go
+++ b/internal/sdk/pkg/models/operations/createdestinationtypesense.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationTypesenseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationTypesenseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationTypesenseResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationTypesenseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationTypesenseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationvertica.go b/internal/sdk/pkg/models/operations/createdestinationvertica.go
old mode 100755
new mode 100644
index 8e53c2fb9..901497d86
--- a/internal/sdk/pkg/models/operations/createdestinationvertica.go
+++ b/internal/sdk/pkg/models/operations/createdestinationvertica.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationVerticaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationVerticaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationVerticaResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationVerticaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationVerticaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createdestinationweaviate.go b/internal/sdk/pkg/models/operations/createdestinationweaviate.go
new file mode 100644
index 000000000..c088d1068
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/createdestinationweaviate.go
@@ -0,0 +1,47 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type CreateDestinationWeaviateResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Successful operation
+ DestinationResponse *shared.DestinationResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationWeaviateResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationWeaviateResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationWeaviateResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationWeaviateResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createdestinationxata.go b/internal/sdk/pkg/models/operations/createdestinationxata.go
old mode 100755
new mode 100644
index 39c056a5f..e44e4b253
--- a/internal/sdk/pkg/models/operations/createdestinationxata.go
+++ b/internal/sdk/pkg/models/operations/createdestinationxata.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateDestinationXataResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateDestinationXataResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateDestinationXataResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *CreateDestinationXataResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateDestinationXataResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createjob.go b/internal/sdk/pkg/models/operations/createjob.go
old mode 100755
new mode 100644
index 7b16ee67b..889255cae
--- a/internal/sdk/pkg/models/operations/createjob.go
+++ b/internal/sdk/pkg/models/operations/createjob.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateJobResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Kicks off a new Job based on the JobType. The connectionId is the resource that Job will be run for.
JobResponse *shared.JobResponse
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *CreateJobResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateJobResponse) GetJobResponse() *shared.JobResponse {
+ if o == nil {
+ return nil
+ }
+ return o.JobResponse
+}
+
+func (o *CreateJobResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateJobResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createorupdateworkspaceoauthcredentials.go b/internal/sdk/pkg/models/operations/createorupdateworkspaceoauthcredentials.go
old mode 100755
new mode 100644
index 61264fdac..0bb5b358c
--- a/internal/sdk/pkg/models/operations/createorupdateworkspaceoauthcredentials.go
+++ b/internal/sdk/pkg/models/operations/createorupdateworkspaceoauthcredentials.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type CreateOrUpdateWorkspaceOAuthCredentialsRequest struct {
WorkspaceID string `pathParam:"style=simple,explode=false,name=workspaceId"`
}
+func (o *CreateOrUpdateWorkspaceOAuthCredentialsRequest) GetWorkspaceOAuthCredentialsRequest() shared.WorkspaceOAuthCredentialsRequest {
+ if o == nil {
+ return shared.WorkspaceOAuthCredentialsRequest{}
+ }
+ return o.WorkspaceOAuthCredentialsRequest
+}
+
+func (o *CreateOrUpdateWorkspaceOAuthCredentialsRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
+
type CreateOrUpdateWorkspaceOAuthCredentialsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *CreateOrUpdateWorkspaceOAuthCredentialsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateOrUpdateWorkspaceOAuthCredentialsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateOrUpdateWorkspaceOAuthCredentialsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createsource.go b/internal/sdk/pkg/models/operations/createsource.go
old mode 100755
new mode 100644
index 68f8df7d2..b02214310
--- a/internal/sdk/pkg/models/operations/createsource.go
+++ b/internal/sdk/pkg/models/operations/createsource.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceaha.go b/internal/sdk/pkg/models/operations/createsourceaha.go
old mode 100755
new mode 100644
index e7b175a38..7c41645b5
--- a/internal/sdk/pkg/models/operations/createsourceaha.go
+++ b/internal/sdk/pkg/models/operations/createsourceaha.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAhaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAhaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAhaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAhaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAhaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceaircall.go b/internal/sdk/pkg/models/operations/createsourceaircall.go
old mode 100755
new mode 100644
index a62c3393a..0619bdf08
--- a/internal/sdk/pkg/models/operations/createsourceaircall.go
+++ b/internal/sdk/pkg/models/operations/createsourceaircall.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAircallResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAircallResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAircallResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAircallResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAircallResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceairtable.go b/internal/sdk/pkg/models/operations/createsourceairtable.go
old mode 100755
new mode 100644
index 91486da9d..e4776aef6
--- a/internal/sdk/pkg/models/operations/createsourceairtable.go
+++ b/internal/sdk/pkg/models/operations/createsourceairtable.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAirtableResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAirtableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAirtableResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAirtableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAirtableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcealloydb.go b/internal/sdk/pkg/models/operations/createsourcealloydb.go
old mode 100755
new mode 100644
index d27629e1d..813751942
--- a/internal/sdk/pkg/models/operations/createsourcealloydb.go
+++ b/internal/sdk/pkg/models/operations/createsourcealloydb.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAlloydbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAlloydbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAlloydbResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAlloydbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAlloydbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceamazonads.go b/internal/sdk/pkg/models/operations/createsourceamazonads.go
old mode 100755
new mode 100644
index 5abca19e4..6b062b63c
--- a/internal/sdk/pkg/models/operations/createsourceamazonads.go
+++ b/internal/sdk/pkg/models/operations/createsourceamazonads.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAmazonAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAmazonAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAmazonAdsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAmazonAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAmazonAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceamazonsellerpartner.go b/internal/sdk/pkg/models/operations/createsourceamazonsellerpartner.go
old mode 100755
new mode 100644
index 3beb263b5..34f6c49b2
--- a/internal/sdk/pkg/models/operations/createsourceamazonsellerpartner.go
+++ b/internal/sdk/pkg/models/operations/createsourceamazonsellerpartner.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAmazonSellerPartnerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAmazonSellerPartnerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAmazonSellerPartnerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAmazonSellerPartnerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAmazonSellerPartnerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceamazonsqs.go b/internal/sdk/pkg/models/operations/createsourceamazonsqs.go
old mode 100755
new mode 100644
index a157fc97d..11678efa2
--- a/internal/sdk/pkg/models/operations/createsourceamazonsqs.go
+++ b/internal/sdk/pkg/models/operations/createsourceamazonsqs.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAmazonSqsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAmazonSqsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAmazonSqsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAmazonSqsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAmazonSqsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceamplitude.go b/internal/sdk/pkg/models/operations/createsourceamplitude.go
old mode 100755
new mode 100644
index 55ee47862..287340e2d
--- a/internal/sdk/pkg/models/operations/createsourceamplitude.go
+++ b/internal/sdk/pkg/models/operations/createsourceamplitude.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAmplitudeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAmplitudeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAmplitudeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAmplitudeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAmplitudeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceapifydataset.go b/internal/sdk/pkg/models/operations/createsourceapifydataset.go
old mode 100755
new mode 100644
index 703e9eb62..874f5e134
--- a/internal/sdk/pkg/models/operations/createsourceapifydataset.go
+++ b/internal/sdk/pkg/models/operations/createsourceapifydataset.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceApifyDatasetResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceApifyDatasetResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceApifyDatasetResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceApifyDatasetResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceApifyDatasetResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceappfollow.go b/internal/sdk/pkg/models/operations/createsourceappfollow.go
old mode 100755
new mode 100644
index 883bf3085..920968eaa
--- a/internal/sdk/pkg/models/operations/createsourceappfollow.go
+++ b/internal/sdk/pkg/models/operations/createsourceappfollow.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAppfollowResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAppfollowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAppfollowResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAppfollowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAppfollowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceasana.go b/internal/sdk/pkg/models/operations/createsourceasana.go
old mode 100755
new mode 100644
index e11b97800..c37ad5851
--- a/internal/sdk/pkg/models/operations/createsourceasana.go
+++ b/internal/sdk/pkg/models/operations/createsourceasana.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAsanaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAsanaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAsanaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAsanaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAsanaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceauth0.go b/internal/sdk/pkg/models/operations/createsourceauth0.go
old mode 100755
new mode 100644
index 7e43cd71f..2058eebe3
--- a/internal/sdk/pkg/models/operations/createsourceauth0.go
+++ b/internal/sdk/pkg/models/operations/createsourceauth0.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAuth0Response struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAuth0Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAuth0Response) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAuth0Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAuth0Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceawscloudtrail.go b/internal/sdk/pkg/models/operations/createsourceawscloudtrail.go
old mode 100755
new mode 100644
index cd2142d8e..6da5b286f
--- a/internal/sdk/pkg/models/operations/createsourceawscloudtrail.go
+++ b/internal/sdk/pkg/models/operations/createsourceawscloudtrail.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAwsCloudtrailResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAwsCloudtrailResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAwsCloudtrailResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAwsCloudtrailResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAwsCloudtrailResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceazureblobstorage.go b/internal/sdk/pkg/models/operations/createsourceazureblobstorage.go
old mode 100755
new mode 100644
index 10cde2393..446f8ce83
--- a/internal/sdk/pkg/models/operations/createsourceazureblobstorage.go
+++ b/internal/sdk/pkg/models/operations/createsourceazureblobstorage.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAzureBlobStorageResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAzureBlobStorageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAzureBlobStorageResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAzureBlobStorageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAzureBlobStorageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceazuretable.go b/internal/sdk/pkg/models/operations/createsourceazuretable.go
old mode 100755
new mode 100644
index 3fbb24fc5..8bafd3e27
--- a/internal/sdk/pkg/models/operations/createsourceazuretable.go
+++ b/internal/sdk/pkg/models/operations/createsourceazuretable.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceAzureTableResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceAzureTableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceAzureTableResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceAzureTableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceAzureTableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcebamboohr.go b/internal/sdk/pkg/models/operations/createsourcebamboohr.go
old mode 100755
new mode 100644
index 4d7d39389..dfc73543a
--- a/internal/sdk/pkg/models/operations/createsourcebamboohr.go
+++ b/internal/sdk/pkg/models/operations/createsourcebamboohr.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceBambooHrResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceBambooHrResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceBambooHrResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceBambooHrResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceBambooHrResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcebigcommerce.go b/internal/sdk/pkg/models/operations/createsourcebigcommerce.go
deleted file mode 100755
index c8c47424b..000000000
--- a/internal/sdk/pkg/models/operations/createsourcebigcommerce.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type CreateSourceBigcommerceResponse struct {
- ContentType string
- // Successful operation
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/createsourcebigquery.go b/internal/sdk/pkg/models/operations/createsourcebigquery.go
old mode 100755
new mode 100644
index def98452d..2ee58e86f
--- a/internal/sdk/pkg/models/operations/createsourcebigquery.go
+++ b/internal/sdk/pkg/models/operations/createsourcebigquery.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceBigqueryResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceBigqueryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceBigqueryResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceBigqueryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceBigqueryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcebingads.go b/internal/sdk/pkg/models/operations/createsourcebingads.go
old mode 100755
new mode 100644
index 3f15e00c8..6f86107fa
--- a/internal/sdk/pkg/models/operations/createsourcebingads.go
+++ b/internal/sdk/pkg/models/operations/createsourcebingads.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceBingAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceBingAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceBingAdsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceBingAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceBingAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcebraintree.go b/internal/sdk/pkg/models/operations/createsourcebraintree.go
old mode 100755
new mode 100644
index f6d457005..b119366b6
--- a/internal/sdk/pkg/models/operations/createsourcebraintree.go
+++ b/internal/sdk/pkg/models/operations/createsourcebraintree.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceBraintreeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceBraintreeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceBraintreeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceBraintreeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceBraintreeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcebraze.go b/internal/sdk/pkg/models/operations/createsourcebraze.go
old mode 100755
new mode 100644
index 9cc9addc3..91ba5ac34
--- a/internal/sdk/pkg/models/operations/createsourcebraze.go
+++ b/internal/sdk/pkg/models/operations/createsourcebraze.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceBrazeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceBrazeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceBrazeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceBrazeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceBrazeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcecart.go b/internal/sdk/pkg/models/operations/createsourcecart.go
new file mode 100644
index 000000000..875eb8cfd
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/createsourcecart.go
@@ -0,0 +1,47 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type CreateSourceCartResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Successful operation
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceCartResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceCartResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceCartResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceCartResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createsourcechargebee.go b/internal/sdk/pkg/models/operations/createsourcechargebee.go
old mode 100755
new mode 100644
index 0a09541b7..3901e0486
--- a/internal/sdk/pkg/models/operations/createsourcechargebee.go
+++ b/internal/sdk/pkg/models/operations/createsourcechargebee.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceChargebeeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceChargebeeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceChargebeeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceChargebeeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceChargebeeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcechartmogul.go b/internal/sdk/pkg/models/operations/createsourcechartmogul.go
old mode 100755
new mode 100644
index 9adc803db..cbf2aabea
--- a/internal/sdk/pkg/models/operations/createsourcechartmogul.go
+++ b/internal/sdk/pkg/models/operations/createsourcechartmogul.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceChartmogulResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceChartmogulResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceChartmogulResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceChartmogulResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceChartmogulResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceclickhouse.go b/internal/sdk/pkg/models/operations/createsourceclickhouse.go
old mode 100755
new mode 100644
index 22e82b94b..7c35eafa4
--- a/internal/sdk/pkg/models/operations/createsourceclickhouse.go
+++ b/internal/sdk/pkg/models/operations/createsourceclickhouse.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceClickhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceClickhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceClickhouseResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceClickhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceClickhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceclickupapi.go b/internal/sdk/pkg/models/operations/createsourceclickupapi.go
old mode 100755
new mode 100644
index 5950e9575..ef677698a
--- a/internal/sdk/pkg/models/operations/createsourceclickupapi.go
+++ b/internal/sdk/pkg/models/operations/createsourceclickupapi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceClickupAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceClickupAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceClickupAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceClickupAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceClickupAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceclockify.go b/internal/sdk/pkg/models/operations/createsourceclockify.go
old mode 100755
new mode 100644
index ad6f8fba8..18e41fc26
--- a/internal/sdk/pkg/models/operations/createsourceclockify.go
+++ b/internal/sdk/pkg/models/operations/createsourceclockify.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceClockifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceClockifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceClockifyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceClockifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceClockifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceclosecom.go b/internal/sdk/pkg/models/operations/createsourceclosecom.go
old mode 100755
new mode 100644
index c019cc77c..4bf734a7f
--- a/internal/sdk/pkg/models/operations/createsourceclosecom.go
+++ b/internal/sdk/pkg/models/operations/createsourceclosecom.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceCloseComResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceCloseComResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceCloseComResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceCloseComResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceCloseComResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcecoda.go b/internal/sdk/pkg/models/operations/createsourcecoda.go
old mode 100755
new mode 100644
index 78279383b..7719a6379
--- a/internal/sdk/pkg/models/operations/createsourcecoda.go
+++ b/internal/sdk/pkg/models/operations/createsourcecoda.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceCodaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceCodaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceCodaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceCodaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceCodaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcecoinapi.go b/internal/sdk/pkg/models/operations/createsourcecoinapi.go
old mode 100755
new mode 100644
index dd82629aa..69daf656b
--- a/internal/sdk/pkg/models/operations/createsourcecoinapi.go
+++ b/internal/sdk/pkg/models/operations/createsourcecoinapi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceCoinAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceCoinAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceCoinAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceCoinAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceCoinAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcecoinmarketcap.go b/internal/sdk/pkg/models/operations/createsourcecoinmarketcap.go
old mode 100755
new mode 100644
index 707f0f497..17a0693e2
--- a/internal/sdk/pkg/models/operations/createsourcecoinmarketcap.go
+++ b/internal/sdk/pkg/models/operations/createsourcecoinmarketcap.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceCoinmarketcapResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceCoinmarketcapResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceCoinmarketcapResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceCoinmarketcapResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceCoinmarketcapResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceconfigcat.go b/internal/sdk/pkg/models/operations/createsourceconfigcat.go
old mode 100755
new mode 100644
index e90622556..a7f6b1e13
--- a/internal/sdk/pkg/models/operations/createsourceconfigcat.go
+++ b/internal/sdk/pkg/models/operations/createsourceconfigcat.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceConfigcatResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceConfigcatResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceConfigcatResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceConfigcatResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceConfigcatResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceconfluence.go b/internal/sdk/pkg/models/operations/createsourceconfluence.go
old mode 100755
new mode 100644
index 2e183fdd0..05f23907b
--- a/internal/sdk/pkg/models/operations/createsourceconfluence.go
+++ b/internal/sdk/pkg/models/operations/createsourceconfluence.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceConfluenceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceConfluenceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceConfluenceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceConfluenceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceConfluenceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceconvex.go b/internal/sdk/pkg/models/operations/createsourceconvex.go
old mode 100755
new mode 100644
index 9cd945264..b6eca21c2
--- a/internal/sdk/pkg/models/operations/createsourceconvex.go
+++ b/internal/sdk/pkg/models/operations/createsourceconvex.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceConvexResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceConvexResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceConvexResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceConvexResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceConvexResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcedatascope.go b/internal/sdk/pkg/models/operations/createsourcedatascope.go
old mode 100755
new mode 100644
index 00b749822..0acfe04c9
--- a/internal/sdk/pkg/models/operations/createsourcedatascope.go
+++ b/internal/sdk/pkg/models/operations/createsourcedatascope.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceDatascopeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceDatascopeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceDatascopeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceDatascopeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceDatascopeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcedelighted.go b/internal/sdk/pkg/models/operations/createsourcedelighted.go
old mode 100755
new mode 100644
index 460e407d0..1d5f2fa4c
--- a/internal/sdk/pkg/models/operations/createsourcedelighted.go
+++ b/internal/sdk/pkg/models/operations/createsourcedelighted.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceDelightedResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceDelightedResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceDelightedResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceDelightedResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceDelightedResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcedixa.go b/internal/sdk/pkg/models/operations/createsourcedixa.go
old mode 100755
new mode 100644
index 8f8b1ac7c..04269b547
--- a/internal/sdk/pkg/models/operations/createsourcedixa.go
+++ b/internal/sdk/pkg/models/operations/createsourcedixa.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceDixaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceDixaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceDixaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceDixaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceDixaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcedockerhub.go b/internal/sdk/pkg/models/operations/createsourcedockerhub.go
old mode 100755
new mode 100644
index a7a182ab1..91cba52b8
--- a/internal/sdk/pkg/models/operations/createsourcedockerhub.go
+++ b/internal/sdk/pkg/models/operations/createsourcedockerhub.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceDockerhubResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceDockerhubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceDockerhubResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceDockerhubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceDockerhubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcedremio.go b/internal/sdk/pkg/models/operations/createsourcedremio.go
old mode 100755
new mode 100644
index 534306e78..5a37f9ec6
--- a/internal/sdk/pkg/models/operations/createsourcedremio.go
+++ b/internal/sdk/pkg/models/operations/createsourcedremio.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceDremioResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceDremioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceDremioResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceDremioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceDremioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcedynamodb.go b/internal/sdk/pkg/models/operations/createsourcedynamodb.go
old mode 100755
new mode 100644
index e8aaabb67..238e413d3
--- a/internal/sdk/pkg/models/operations/createsourcedynamodb.go
+++ b/internal/sdk/pkg/models/operations/createsourcedynamodb.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceDynamodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceDynamodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceDynamodbResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceDynamodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceDynamodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcee2etestcloud.go b/internal/sdk/pkg/models/operations/createsourcee2etestcloud.go
deleted file mode 100755
index 3c4bde03f..000000000
--- a/internal/sdk/pkg/models/operations/createsourcee2etestcloud.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type CreateSourceE2eTestCloudResponse struct {
- ContentType string
- // Successful operation
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/createsourceemailoctopus.go b/internal/sdk/pkg/models/operations/createsourceemailoctopus.go
old mode 100755
new mode 100644
index 63e31890d..e51ebb1ac
--- a/internal/sdk/pkg/models/operations/createsourceemailoctopus.go
+++ b/internal/sdk/pkg/models/operations/createsourceemailoctopus.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceEmailoctopusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceEmailoctopusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceEmailoctopusResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceEmailoctopusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceEmailoctopusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceexchangerates.go b/internal/sdk/pkg/models/operations/createsourceexchangerates.go
old mode 100755
new mode 100644
index e61ed7b49..94ea5890f
--- a/internal/sdk/pkg/models/operations/createsourceexchangerates.go
+++ b/internal/sdk/pkg/models/operations/createsourceexchangerates.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceExchangeRatesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceExchangeRatesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceExchangeRatesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceExchangeRatesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceExchangeRatesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcefacebookmarketing.go b/internal/sdk/pkg/models/operations/createsourcefacebookmarketing.go
old mode 100755
new mode 100644
index 694621839..1405718b9
--- a/internal/sdk/pkg/models/operations/createsourcefacebookmarketing.go
+++ b/internal/sdk/pkg/models/operations/createsourcefacebookmarketing.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceFacebookMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceFacebookMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceFacebookMarketingResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceFacebookMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceFacebookMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcefacebookpages.go b/internal/sdk/pkg/models/operations/createsourcefacebookpages.go
old mode 100755
new mode 100644
index 3ee0c54a1..288d36e33
--- a/internal/sdk/pkg/models/operations/createsourcefacebookpages.go
+++ b/internal/sdk/pkg/models/operations/createsourcefacebookpages.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceFacebookPagesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceFacebookPagesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceFacebookPagesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceFacebookPagesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceFacebookPagesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcefaker.go b/internal/sdk/pkg/models/operations/createsourcefaker.go
old mode 100755
new mode 100644
index fca1f06e8..99310ed65
--- a/internal/sdk/pkg/models/operations/createsourcefaker.go
+++ b/internal/sdk/pkg/models/operations/createsourcefaker.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceFakerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceFakerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceFakerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceFakerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceFakerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcefauna.go b/internal/sdk/pkg/models/operations/createsourcefauna.go
old mode 100755
new mode 100644
index aaa0d26b7..2dda0bcbe
--- a/internal/sdk/pkg/models/operations/createsourcefauna.go
+++ b/internal/sdk/pkg/models/operations/createsourcefauna.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceFaunaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceFaunaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceFaunaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceFaunaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceFaunaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcefile.go b/internal/sdk/pkg/models/operations/createsourcefile.go
new file mode 100644
index 000000000..f4b22819d
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/createsourcefile.go
@@ -0,0 +1,47 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type CreateSourceFileResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Successful operation
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceFileResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceFileResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceFileResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceFileResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createsourcefilesecure.go b/internal/sdk/pkg/models/operations/createsourcefilesecure.go
deleted file mode 100755
index e5bbf3db6..000000000
--- a/internal/sdk/pkg/models/operations/createsourcefilesecure.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type CreateSourceFileSecureResponse struct {
- ContentType string
- // Successful operation
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/createsourcefirebolt.go b/internal/sdk/pkg/models/operations/createsourcefirebolt.go
old mode 100755
new mode 100644
index 129fe9055..f55b90aea
--- a/internal/sdk/pkg/models/operations/createsourcefirebolt.go
+++ b/internal/sdk/pkg/models/operations/createsourcefirebolt.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceFireboltResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceFireboltResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceFireboltResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceFireboltResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceFireboltResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcefreshcaller.go b/internal/sdk/pkg/models/operations/createsourcefreshcaller.go
old mode 100755
new mode 100644
index d79b08a7b..bbfa4e61b
--- a/internal/sdk/pkg/models/operations/createsourcefreshcaller.go
+++ b/internal/sdk/pkg/models/operations/createsourcefreshcaller.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceFreshcallerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceFreshcallerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceFreshcallerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceFreshcallerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceFreshcallerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcefreshdesk.go b/internal/sdk/pkg/models/operations/createsourcefreshdesk.go
old mode 100755
new mode 100644
index 1afeb94e5..689ad3788
--- a/internal/sdk/pkg/models/operations/createsourcefreshdesk.go
+++ b/internal/sdk/pkg/models/operations/createsourcefreshdesk.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceFreshdeskResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceFreshdeskResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceFreshdeskResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceFreshdeskResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceFreshdeskResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcefreshsales.go b/internal/sdk/pkg/models/operations/createsourcefreshsales.go
old mode 100755
new mode 100644
index 65bac42cd..fba67f653
--- a/internal/sdk/pkg/models/operations/createsourcefreshsales.go
+++ b/internal/sdk/pkg/models/operations/createsourcefreshsales.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceFreshsalesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceFreshsalesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceFreshsalesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceFreshsalesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceFreshsalesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegainsightpx.go b/internal/sdk/pkg/models/operations/createsourcegainsightpx.go
old mode 100755
new mode 100644
index 2b1f7cc79..2fcd4e6a9
--- a/internal/sdk/pkg/models/operations/createsourcegainsightpx.go
+++ b/internal/sdk/pkg/models/operations/createsourcegainsightpx.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGainsightPxResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGainsightPxResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGainsightPxResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGainsightPxResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGainsightPxResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegcs.go b/internal/sdk/pkg/models/operations/createsourcegcs.go
old mode 100755
new mode 100644
index 55a837e1e..9e8f84786
--- a/internal/sdk/pkg/models/operations/createsourcegcs.go
+++ b/internal/sdk/pkg/models/operations/createsourcegcs.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGcsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGcsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGcsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGcsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGcsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegetlago.go b/internal/sdk/pkg/models/operations/createsourcegetlago.go
old mode 100755
new mode 100644
index 2d835069a..7a4eca7d1
--- a/internal/sdk/pkg/models/operations/createsourcegetlago.go
+++ b/internal/sdk/pkg/models/operations/createsourcegetlago.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGetlagoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGetlagoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGetlagoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGetlagoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGetlagoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegithub.go b/internal/sdk/pkg/models/operations/createsourcegithub.go
old mode 100755
new mode 100644
index bb6e83d78..16d7045d6
--- a/internal/sdk/pkg/models/operations/createsourcegithub.go
+++ b/internal/sdk/pkg/models/operations/createsourcegithub.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGithubResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGithubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGithubResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGithubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGithubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegitlab.go b/internal/sdk/pkg/models/operations/createsourcegitlab.go
old mode 100755
new mode 100644
index 9493cc915..45f299140
--- a/internal/sdk/pkg/models/operations/createsourcegitlab.go
+++ b/internal/sdk/pkg/models/operations/createsourcegitlab.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGitlabResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGitlabResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGitlabResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGitlabResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGitlabResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceglassfrog.go b/internal/sdk/pkg/models/operations/createsourceglassfrog.go
old mode 100755
new mode 100644
index 21b1c2c36..9ccbcba41
--- a/internal/sdk/pkg/models/operations/createsourceglassfrog.go
+++ b/internal/sdk/pkg/models/operations/createsourceglassfrog.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGlassfrogResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGlassfrogResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGlassfrogResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGlassfrogResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGlassfrogResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegnews.go b/internal/sdk/pkg/models/operations/createsourcegnews.go
old mode 100755
new mode 100644
index 09c8676d4..b68b2e1e9
--- a/internal/sdk/pkg/models/operations/createsourcegnews.go
+++ b/internal/sdk/pkg/models/operations/createsourcegnews.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGnewsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGnewsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGnewsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGnewsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGnewsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegoogleads.go b/internal/sdk/pkg/models/operations/createsourcegoogleads.go
old mode 100755
new mode 100644
index 45a9a57cc..29f7a35dc
--- a/internal/sdk/pkg/models/operations/createsourcegoogleads.go
+++ b/internal/sdk/pkg/models/operations/createsourcegoogleads.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGoogleAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGoogleAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGoogleAdsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGoogleAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGoogleAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegoogleanalyticsdataapi.go b/internal/sdk/pkg/models/operations/createsourcegoogleanalyticsdataapi.go
old mode 100755
new mode 100644
index da42f3955..646e9fa88
--- a/internal/sdk/pkg/models/operations/createsourcegoogleanalyticsdataapi.go
+++ b/internal/sdk/pkg/models/operations/createsourcegoogleanalyticsdataapi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGoogleAnalyticsDataAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGoogleAnalyticsDataAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGoogleAnalyticsDataAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGoogleAnalyticsDataAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGoogleAnalyticsDataAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegoogleanalyticsv4.go b/internal/sdk/pkg/models/operations/createsourcegoogleanalyticsv4.go
deleted file mode 100755
index 1bd511859..000000000
--- a/internal/sdk/pkg/models/operations/createsourcegoogleanalyticsv4.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type CreateSourceGoogleAnalyticsV4Response struct {
- ContentType string
- // Successful operation
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/createsourcegoogledirectory.go b/internal/sdk/pkg/models/operations/createsourcegoogledirectory.go
old mode 100755
new mode 100644
index 0f6744c20..6e6e70bed
--- a/internal/sdk/pkg/models/operations/createsourcegoogledirectory.go
+++ b/internal/sdk/pkg/models/operations/createsourcegoogledirectory.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGoogleDirectoryResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGoogleDirectoryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGoogleDirectoryResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGoogleDirectoryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGoogleDirectoryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegoogledrive.go b/internal/sdk/pkg/models/operations/createsourcegoogledrive.go
new file mode 100644
index 000000000..2cb901556
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/createsourcegoogledrive.go
@@ -0,0 +1,47 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type CreateSourceGoogleDriveResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Successful operation
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGoogleDriveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGoogleDriveResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGoogleDriveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGoogleDriveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createsourcegooglepagespeedinsights.go b/internal/sdk/pkg/models/operations/createsourcegooglepagespeedinsights.go
old mode 100755
new mode 100644
index 1d38e1cb6..c1be51929
--- a/internal/sdk/pkg/models/operations/createsourcegooglepagespeedinsights.go
+++ b/internal/sdk/pkg/models/operations/createsourcegooglepagespeedinsights.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGooglePagespeedInsightsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGooglePagespeedInsightsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGooglePagespeedInsightsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGooglePagespeedInsightsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGooglePagespeedInsightsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegooglesearchconsole.go b/internal/sdk/pkg/models/operations/createsourcegooglesearchconsole.go
old mode 100755
new mode 100644
index 985001b05..28d276522
--- a/internal/sdk/pkg/models/operations/createsourcegooglesearchconsole.go
+++ b/internal/sdk/pkg/models/operations/createsourcegooglesearchconsole.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGoogleSearchConsoleResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGoogleSearchConsoleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGoogleSearchConsoleResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGoogleSearchConsoleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGoogleSearchConsoleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegooglesheets.go b/internal/sdk/pkg/models/operations/createsourcegooglesheets.go
old mode 100755
new mode 100644
index c0a4ab835..aa0db21bf
--- a/internal/sdk/pkg/models/operations/createsourcegooglesheets.go
+++ b/internal/sdk/pkg/models/operations/createsourcegooglesheets.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGoogleSheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGoogleSheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGoogleSheetsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGoogleSheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGoogleSheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegooglewebfonts.go b/internal/sdk/pkg/models/operations/createsourcegooglewebfonts.go
old mode 100755
new mode 100644
index e695b7e5a..27fdab551
--- a/internal/sdk/pkg/models/operations/createsourcegooglewebfonts.go
+++ b/internal/sdk/pkg/models/operations/createsourcegooglewebfonts.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGoogleWebfontsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGoogleWebfontsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGoogleWebfontsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGoogleWebfontsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGoogleWebfontsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegoogleworkspaceadminreports.go b/internal/sdk/pkg/models/operations/createsourcegoogleworkspaceadminreports.go
old mode 100755
new mode 100644
index 42569e5fd..be6dd3f91
--- a/internal/sdk/pkg/models/operations/createsourcegoogleworkspaceadminreports.go
+++ b/internal/sdk/pkg/models/operations/createsourcegoogleworkspaceadminreports.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGoogleWorkspaceAdminReportsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGoogleWorkspaceAdminReportsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGoogleWorkspaceAdminReportsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGoogleWorkspaceAdminReportsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGoogleWorkspaceAdminReportsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegreenhouse.go b/internal/sdk/pkg/models/operations/createsourcegreenhouse.go
old mode 100755
new mode 100644
index 08b17b83a..032444c18
--- a/internal/sdk/pkg/models/operations/createsourcegreenhouse.go
+++ b/internal/sdk/pkg/models/operations/createsourcegreenhouse.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGreenhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGreenhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGreenhouseResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGreenhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGreenhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcegridly.go b/internal/sdk/pkg/models/operations/createsourcegridly.go
old mode 100755
new mode 100644
index d5240a4a2..3d12c7dbf
--- a/internal/sdk/pkg/models/operations/createsourcegridly.go
+++ b/internal/sdk/pkg/models/operations/createsourcegridly.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceGridlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceGridlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceGridlyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceGridlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceGridlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceharvest.go b/internal/sdk/pkg/models/operations/createsourceharvest.go
old mode 100755
new mode 100644
index f6b6327bd..87ce936d9
--- a/internal/sdk/pkg/models/operations/createsourceharvest.go
+++ b/internal/sdk/pkg/models/operations/createsourceharvest.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceHarvestResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceHarvestResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceHarvestResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceHarvestResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceHarvestResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcehubplanner.go b/internal/sdk/pkg/models/operations/createsourcehubplanner.go
old mode 100755
new mode 100644
index 72369494d..0d120c4db
--- a/internal/sdk/pkg/models/operations/createsourcehubplanner.go
+++ b/internal/sdk/pkg/models/operations/createsourcehubplanner.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceHubplannerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceHubplannerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceHubplannerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceHubplannerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceHubplannerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcehubspot.go b/internal/sdk/pkg/models/operations/createsourcehubspot.go
old mode 100755
new mode 100644
index a419536ee..227382c46
--- a/internal/sdk/pkg/models/operations/createsourcehubspot.go
+++ b/internal/sdk/pkg/models/operations/createsourcehubspot.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceHubspotResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceHubspotResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceHubspotResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceHubspotResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceHubspotResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceinsightly.go b/internal/sdk/pkg/models/operations/createsourceinsightly.go
old mode 100755
new mode 100644
index 42e23deca..b36978f66
--- a/internal/sdk/pkg/models/operations/createsourceinsightly.go
+++ b/internal/sdk/pkg/models/operations/createsourceinsightly.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceInsightlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceInsightlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceInsightlyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceInsightlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceInsightlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceinstagram.go b/internal/sdk/pkg/models/operations/createsourceinstagram.go
old mode 100755
new mode 100644
index f935b3ed3..1cbf5ceaa
--- a/internal/sdk/pkg/models/operations/createsourceinstagram.go
+++ b/internal/sdk/pkg/models/operations/createsourceinstagram.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceInstagramResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceInstagramResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceInstagramResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceInstagramResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceInstagramResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceinstatus.go b/internal/sdk/pkg/models/operations/createsourceinstatus.go
old mode 100755
new mode 100644
index a20bcd51f..3a84e4308
--- a/internal/sdk/pkg/models/operations/createsourceinstatus.go
+++ b/internal/sdk/pkg/models/operations/createsourceinstatus.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceInstatusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceInstatusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceInstatusResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceInstatusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceInstatusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceintercom.go b/internal/sdk/pkg/models/operations/createsourceintercom.go
old mode 100755
new mode 100644
index 931f2ccf1..df5b72383
--- a/internal/sdk/pkg/models/operations/createsourceintercom.go
+++ b/internal/sdk/pkg/models/operations/createsourceintercom.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceIntercomResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceIntercomResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceIntercomResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceIntercomResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceIntercomResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceip2whois.go b/internal/sdk/pkg/models/operations/createsourceip2whois.go
old mode 100755
new mode 100644
index 911f13d20..de4b01d4c
--- a/internal/sdk/pkg/models/operations/createsourceip2whois.go
+++ b/internal/sdk/pkg/models/operations/createsourceip2whois.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceIp2whoisResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceIp2whoisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceIp2whoisResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceIp2whoisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceIp2whoisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceiterable.go b/internal/sdk/pkg/models/operations/createsourceiterable.go
old mode 100755
new mode 100644
index 3a586b660..2c15bd035
--- a/internal/sdk/pkg/models/operations/createsourceiterable.go
+++ b/internal/sdk/pkg/models/operations/createsourceiterable.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceIterableResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceIterableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceIterableResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceIterableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceIterableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcejira.go b/internal/sdk/pkg/models/operations/createsourcejira.go
old mode 100755
new mode 100644
index 1cafc4770..a4608680c
--- a/internal/sdk/pkg/models/operations/createsourcejira.go
+++ b/internal/sdk/pkg/models/operations/createsourcejira.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceJiraResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceJiraResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceJiraResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceJiraResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceJiraResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcek6cloud.go b/internal/sdk/pkg/models/operations/createsourcek6cloud.go
old mode 100755
new mode 100644
index d1716f260..bc4cca5ce
--- a/internal/sdk/pkg/models/operations/createsourcek6cloud.go
+++ b/internal/sdk/pkg/models/operations/createsourcek6cloud.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceK6CloudResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceK6CloudResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceK6CloudResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceK6CloudResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceK6CloudResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceklarna.go b/internal/sdk/pkg/models/operations/createsourceklarna.go
old mode 100755
new mode 100644
index f32680d2d..daeb19427
--- a/internal/sdk/pkg/models/operations/createsourceklarna.go
+++ b/internal/sdk/pkg/models/operations/createsourceklarna.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceKlarnaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceKlarnaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceKlarnaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceKlarnaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceKlarnaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceklaviyo.go b/internal/sdk/pkg/models/operations/createsourceklaviyo.go
old mode 100755
new mode 100644
index 5d802c3e1..278b4294d
--- a/internal/sdk/pkg/models/operations/createsourceklaviyo.go
+++ b/internal/sdk/pkg/models/operations/createsourceklaviyo.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceKlaviyoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceKlaviyoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceKlaviyoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceKlaviyoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceKlaviyoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcekustomersinger.go b/internal/sdk/pkg/models/operations/createsourcekustomersinger.go
old mode 100755
new mode 100644
index b1080b325..ff86715cb
--- a/internal/sdk/pkg/models/operations/createsourcekustomersinger.go
+++ b/internal/sdk/pkg/models/operations/createsourcekustomersinger.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceKustomerSingerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceKustomerSingerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceKustomerSingerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceKustomerSingerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceKustomerSingerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcekyve.go b/internal/sdk/pkg/models/operations/createsourcekyve.go
old mode 100755
new mode 100644
index b4c2bb7a3..6706cc920
--- a/internal/sdk/pkg/models/operations/createsourcekyve.go
+++ b/internal/sdk/pkg/models/operations/createsourcekyve.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceKyveResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceKyveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceKyveResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceKyveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceKyveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcelaunchdarkly.go b/internal/sdk/pkg/models/operations/createsourcelaunchdarkly.go
old mode 100755
new mode 100644
index aa7e5303d..08e52d211
--- a/internal/sdk/pkg/models/operations/createsourcelaunchdarkly.go
+++ b/internal/sdk/pkg/models/operations/createsourcelaunchdarkly.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceLaunchdarklyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceLaunchdarklyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceLaunchdarklyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceLaunchdarklyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceLaunchdarklyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcelemlist.go b/internal/sdk/pkg/models/operations/createsourcelemlist.go
old mode 100755
new mode 100644
index c5142c27e..cfd371734
--- a/internal/sdk/pkg/models/operations/createsourcelemlist.go
+++ b/internal/sdk/pkg/models/operations/createsourcelemlist.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceLemlistResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceLemlistResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceLemlistResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceLemlistResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceLemlistResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceleverhiring.go b/internal/sdk/pkg/models/operations/createsourceleverhiring.go
old mode 100755
new mode 100644
index 6993a465c..c7160c463
--- a/internal/sdk/pkg/models/operations/createsourceleverhiring.go
+++ b/internal/sdk/pkg/models/operations/createsourceleverhiring.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceLeverHiringResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceLeverHiringResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceLeverHiringResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceLeverHiringResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceLeverHiringResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcelinkedinads.go b/internal/sdk/pkg/models/operations/createsourcelinkedinads.go
old mode 100755
new mode 100644
index c170f3421..78271a591
--- a/internal/sdk/pkg/models/operations/createsourcelinkedinads.go
+++ b/internal/sdk/pkg/models/operations/createsourcelinkedinads.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceLinkedinAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceLinkedinAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceLinkedinAdsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceLinkedinAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceLinkedinAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcelinkedinpages.go b/internal/sdk/pkg/models/operations/createsourcelinkedinpages.go
old mode 100755
new mode 100644
index 00a22de9f..e335db58f
--- a/internal/sdk/pkg/models/operations/createsourcelinkedinpages.go
+++ b/internal/sdk/pkg/models/operations/createsourcelinkedinpages.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceLinkedinPagesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceLinkedinPagesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceLinkedinPagesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceLinkedinPagesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceLinkedinPagesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcelinnworks.go b/internal/sdk/pkg/models/operations/createsourcelinnworks.go
old mode 100755
new mode 100644
index 78ea0f96a..93b473e84
--- a/internal/sdk/pkg/models/operations/createsourcelinnworks.go
+++ b/internal/sdk/pkg/models/operations/createsourcelinnworks.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceLinnworksResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceLinnworksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceLinnworksResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceLinnworksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceLinnworksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcelokalise.go b/internal/sdk/pkg/models/operations/createsourcelokalise.go
old mode 100755
new mode 100644
index 9ab6da46b..5b46ddc30
--- a/internal/sdk/pkg/models/operations/createsourcelokalise.go
+++ b/internal/sdk/pkg/models/operations/createsourcelokalise.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceLokaliseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceLokaliseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceLokaliseResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceLokaliseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceLokaliseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemailchimp.go b/internal/sdk/pkg/models/operations/createsourcemailchimp.go
old mode 100755
new mode 100644
index de3d9df51..d109b489d
--- a/internal/sdk/pkg/models/operations/createsourcemailchimp.go
+++ b/internal/sdk/pkg/models/operations/createsourcemailchimp.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMailchimpResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMailchimpResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMailchimpResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMailchimpResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMailchimpResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemailgun.go b/internal/sdk/pkg/models/operations/createsourcemailgun.go
old mode 100755
new mode 100644
index 4d2b3bd2d..70b784096
--- a/internal/sdk/pkg/models/operations/createsourcemailgun.go
+++ b/internal/sdk/pkg/models/operations/createsourcemailgun.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMailgunResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMailgunResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMailgunResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMailgunResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMailgunResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemailjetsms.go b/internal/sdk/pkg/models/operations/createsourcemailjetsms.go
old mode 100755
new mode 100644
index a4dabaa66..f13724c43
--- a/internal/sdk/pkg/models/operations/createsourcemailjetsms.go
+++ b/internal/sdk/pkg/models/operations/createsourcemailjetsms.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMailjetSmsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMailjetSmsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMailjetSmsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMailjetSmsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMailjetSmsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemarketo.go b/internal/sdk/pkg/models/operations/createsourcemarketo.go
old mode 100755
new mode 100644
index 1eadd71e2..089f84304
--- a/internal/sdk/pkg/models/operations/createsourcemarketo.go
+++ b/internal/sdk/pkg/models/operations/createsourcemarketo.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMarketoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMarketoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMarketoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMarketoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMarketoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemetabase.go b/internal/sdk/pkg/models/operations/createsourcemetabase.go
old mode 100755
new mode 100644
index a8528149a..f33d2528a
--- a/internal/sdk/pkg/models/operations/createsourcemetabase.go
+++ b/internal/sdk/pkg/models/operations/createsourcemetabase.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMetabaseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMetabaseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMetabaseResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMetabaseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMetabaseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemicrosoftteams.go b/internal/sdk/pkg/models/operations/createsourcemicrosoftteams.go
old mode 100755
new mode 100644
index 08a8e6285..06599fbcb
--- a/internal/sdk/pkg/models/operations/createsourcemicrosoftteams.go
+++ b/internal/sdk/pkg/models/operations/createsourcemicrosoftteams.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMicrosoftTeamsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMicrosoftTeamsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMicrosoftTeamsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMicrosoftTeamsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMicrosoftTeamsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemixpanel.go b/internal/sdk/pkg/models/operations/createsourcemixpanel.go
old mode 100755
new mode 100644
index 1e5b1253d..fb9df07a8
--- a/internal/sdk/pkg/models/operations/createsourcemixpanel.go
+++ b/internal/sdk/pkg/models/operations/createsourcemixpanel.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMixpanelResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMixpanelResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMixpanelResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMixpanelResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMixpanelResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemonday.go b/internal/sdk/pkg/models/operations/createsourcemonday.go
old mode 100755
new mode 100644
index 862755313..55cb8d3ca
--- a/internal/sdk/pkg/models/operations/createsourcemonday.go
+++ b/internal/sdk/pkg/models/operations/createsourcemonday.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMondayResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMondayResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMondayResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMondayResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMondayResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemongodb.go b/internal/sdk/pkg/models/operations/createsourcemongodb.go
deleted file mode 100755
index 03bce5edd..000000000
--- a/internal/sdk/pkg/models/operations/createsourcemongodb.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type CreateSourceMongodbResponse struct {
- ContentType string
- // Successful operation
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/createsourcemongodbinternalpoc.go b/internal/sdk/pkg/models/operations/createsourcemongodbinternalpoc.go
old mode 100755
new mode 100644
index 71c51babf..eb41703a7
--- a/internal/sdk/pkg/models/operations/createsourcemongodbinternalpoc.go
+++ b/internal/sdk/pkg/models/operations/createsourcemongodbinternalpoc.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMongodbInternalPocResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMongodbInternalPocResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMongodbInternalPocResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMongodbInternalPocResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMongodbInternalPocResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemongodbv2.go b/internal/sdk/pkg/models/operations/createsourcemongodbv2.go
new file mode 100644
index 000000000..d4e9b9136
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/createsourcemongodbv2.go
@@ -0,0 +1,47 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type CreateSourceMongodbV2Response struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Successful operation
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMongodbV2Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMongodbV2Response) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMongodbV2Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMongodbV2Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createsourcemssql.go b/internal/sdk/pkg/models/operations/createsourcemssql.go
old mode 100755
new mode 100644
index 317ca3b35..cc621c888
--- a/internal/sdk/pkg/models/operations/createsourcemssql.go
+++ b/internal/sdk/pkg/models/operations/createsourcemssql.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMssqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMssqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMssqlResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMssqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMssqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemyhours.go b/internal/sdk/pkg/models/operations/createsourcemyhours.go
old mode 100755
new mode 100644
index 471456ed9..d291576d1
--- a/internal/sdk/pkg/models/operations/createsourcemyhours.go
+++ b/internal/sdk/pkg/models/operations/createsourcemyhours.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMyHoursResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMyHoursResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMyHoursResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMyHoursResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMyHoursResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcemysql.go b/internal/sdk/pkg/models/operations/createsourcemysql.go
old mode 100755
new mode 100644
index e762e9ed7..bc4792219
--- a/internal/sdk/pkg/models/operations/createsourcemysql.go
+++ b/internal/sdk/pkg/models/operations/createsourcemysql.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceMysqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceMysqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceMysqlResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceMysqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceMysqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcenetsuite.go b/internal/sdk/pkg/models/operations/createsourcenetsuite.go
old mode 100755
new mode 100644
index 2a6377968..de37dbaa6
--- a/internal/sdk/pkg/models/operations/createsourcenetsuite.go
+++ b/internal/sdk/pkg/models/operations/createsourcenetsuite.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceNetsuiteResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceNetsuiteResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceNetsuiteResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceNetsuiteResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceNetsuiteResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcenotion.go b/internal/sdk/pkg/models/operations/createsourcenotion.go
old mode 100755
new mode 100644
index 68cdd0c62..dda35215d
--- a/internal/sdk/pkg/models/operations/createsourcenotion.go
+++ b/internal/sdk/pkg/models/operations/createsourcenotion.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceNotionResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceNotionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceNotionResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceNotionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceNotionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcenytimes.go b/internal/sdk/pkg/models/operations/createsourcenytimes.go
old mode 100755
new mode 100644
index 7a04f882d..fa69eff51
--- a/internal/sdk/pkg/models/operations/createsourcenytimes.go
+++ b/internal/sdk/pkg/models/operations/createsourcenytimes.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceNytimesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceNytimesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceNytimesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceNytimesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceNytimesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceokta.go b/internal/sdk/pkg/models/operations/createsourceokta.go
old mode 100755
new mode 100644
index 245c63d9d..65b3b2c8a
--- a/internal/sdk/pkg/models/operations/createsourceokta.go
+++ b/internal/sdk/pkg/models/operations/createsourceokta.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceOktaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceOktaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceOktaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceOktaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceOktaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceomnisend.go b/internal/sdk/pkg/models/operations/createsourceomnisend.go
old mode 100755
new mode 100644
index 33cd1c4cf..a29a805fd
--- a/internal/sdk/pkg/models/operations/createsourceomnisend.go
+++ b/internal/sdk/pkg/models/operations/createsourceomnisend.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceOmnisendResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceOmnisendResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceOmnisendResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceOmnisendResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceOmnisendResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceonesignal.go b/internal/sdk/pkg/models/operations/createsourceonesignal.go
old mode 100755
new mode 100644
index e6d0b42ec..58f84b4f2
--- a/internal/sdk/pkg/models/operations/createsourceonesignal.go
+++ b/internal/sdk/pkg/models/operations/createsourceonesignal.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceOnesignalResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceOnesignalResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceOnesignalResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceOnesignalResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceOnesignalResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceoracle.go b/internal/sdk/pkg/models/operations/createsourceoracle.go
old mode 100755
new mode 100644
index e141c25a3..042bb031f
--- a/internal/sdk/pkg/models/operations/createsourceoracle.go
+++ b/internal/sdk/pkg/models/operations/createsourceoracle.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceOracleResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceOracleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceOracleResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceOracleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceOracleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceorb.go b/internal/sdk/pkg/models/operations/createsourceorb.go
old mode 100755
new mode 100644
index 7d9a6f327..4cfcb3133
--- a/internal/sdk/pkg/models/operations/createsourceorb.go
+++ b/internal/sdk/pkg/models/operations/createsourceorb.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceOrbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceOrbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceOrbResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceOrbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceOrbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceorbit.go b/internal/sdk/pkg/models/operations/createsourceorbit.go
old mode 100755
new mode 100644
index 2d91dd7c7..f36388a9e
--- a/internal/sdk/pkg/models/operations/createsourceorbit.go
+++ b/internal/sdk/pkg/models/operations/createsourceorbit.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceOrbitResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceOrbitResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceOrbitResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceOrbitResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceOrbitResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceoutbrainamplify.go b/internal/sdk/pkg/models/operations/createsourceoutbrainamplify.go
old mode 100755
new mode 100644
index 5d9d6afbd..5c41fd7f2
--- a/internal/sdk/pkg/models/operations/createsourceoutbrainamplify.go
+++ b/internal/sdk/pkg/models/operations/createsourceoutbrainamplify.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceOutbrainAmplifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceOutbrainAmplifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceOutbrainAmplifyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceOutbrainAmplifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceOutbrainAmplifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceoutreach.go b/internal/sdk/pkg/models/operations/createsourceoutreach.go
old mode 100755
new mode 100644
index 11bdecbaa..2b9f7c683
--- a/internal/sdk/pkg/models/operations/createsourceoutreach.go
+++ b/internal/sdk/pkg/models/operations/createsourceoutreach.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceOutreachResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceOutreachResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceOutreachResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceOutreachResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceOutreachResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepaypaltransaction.go b/internal/sdk/pkg/models/operations/createsourcepaypaltransaction.go
old mode 100755
new mode 100644
index 036b1ce2e..6998636d2
--- a/internal/sdk/pkg/models/operations/createsourcepaypaltransaction.go
+++ b/internal/sdk/pkg/models/operations/createsourcepaypaltransaction.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePaypalTransactionResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePaypalTransactionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePaypalTransactionResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePaypalTransactionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePaypalTransactionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepaystack.go b/internal/sdk/pkg/models/operations/createsourcepaystack.go
old mode 100755
new mode 100644
index 52e65cfc7..e49ec2fb4
--- a/internal/sdk/pkg/models/operations/createsourcepaystack.go
+++ b/internal/sdk/pkg/models/operations/createsourcepaystack.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePaystackResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePaystackResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePaystackResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePaystackResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePaystackResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcependo.go b/internal/sdk/pkg/models/operations/createsourcependo.go
old mode 100755
new mode 100644
index 8143194f0..862694e77
--- a/internal/sdk/pkg/models/operations/createsourcependo.go
+++ b/internal/sdk/pkg/models/operations/createsourcependo.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePendoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePendoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePendoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePendoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePendoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepersistiq.go b/internal/sdk/pkg/models/operations/createsourcepersistiq.go
old mode 100755
new mode 100644
index c2ee3c4b3..e004eb605
--- a/internal/sdk/pkg/models/operations/createsourcepersistiq.go
+++ b/internal/sdk/pkg/models/operations/createsourcepersistiq.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePersistiqResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePersistiqResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePersistiqResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePersistiqResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePersistiqResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepexelsapi.go b/internal/sdk/pkg/models/operations/createsourcepexelsapi.go
old mode 100755
new mode 100644
index a1978aea6..4c9089974
--- a/internal/sdk/pkg/models/operations/createsourcepexelsapi.go
+++ b/internal/sdk/pkg/models/operations/createsourcepexelsapi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePexelsAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePexelsAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePexelsAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePexelsAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePexelsAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepinterest.go b/internal/sdk/pkg/models/operations/createsourcepinterest.go
old mode 100755
new mode 100644
index 28de6f691..eeaa8604e
--- a/internal/sdk/pkg/models/operations/createsourcepinterest.go
+++ b/internal/sdk/pkg/models/operations/createsourcepinterest.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePinterestResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePinterestResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePinterestResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePinterestResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePinterestResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepipedrive.go b/internal/sdk/pkg/models/operations/createsourcepipedrive.go
old mode 100755
new mode 100644
index 7b6965409..5fa8cc378
--- a/internal/sdk/pkg/models/operations/createsourcepipedrive.go
+++ b/internal/sdk/pkg/models/operations/createsourcepipedrive.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePipedriveResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePipedriveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePipedriveResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePipedriveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePipedriveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepocket.go b/internal/sdk/pkg/models/operations/createsourcepocket.go
old mode 100755
new mode 100644
index 32cd3cb13..a78c6e34b
--- a/internal/sdk/pkg/models/operations/createsourcepocket.go
+++ b/internal/sdk/pkg/models/operations/createsourcepocket.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePocketResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePocketResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePocketResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePocketResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePocketResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepokeapi.go b/internal/sdk/pkg/models/operations/createsourcepokeapi.go
old mode 100755
new mode 100644
index 05a191506..7836f22a5
--- a/internal/sdk/pkg/models/operations/createsourcepokeapi.go
+++ b/internal/sdk/pkg/models/operations/createsourcepokeapi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePokeapiResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePokeapiResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePokeapiResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePokeapiResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePokeapiResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepolygonstockapi.go b/internal/sdk/pkg/models/operations/createsourcepolygonstockapi.go
old mode 100755
new mode 100644
index 84ee7f572..2ffd5b6cc
--- a/internal/sdk/pkg/models/operations/createsourcepolygonstockapi.go
+++ b/internal/sdk/pkg/models/operations/createsourcepolygonstockapi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePolygonStockAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePolygonStockAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePolygonStockAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePolygonStockAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePolygonStockAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepostgres.go b/internal/sdk/pkg/models/operations/createsourcepostgres.go
old mode 100755
new mode 100644
index 82dc7bfaf..4194c34ef
--- a/internal/sdk/pkg/models/operations/createsourcepostgres.go
+++ b/internal/sdk/pkg/models/operations/createsourcepostgres.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePostgresResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePostgresResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePostgresResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePostgresResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePostgresResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceposthog.go b/internal/sdk/pkg/models/operations/createsourceposthog.go
old mode 100755
new mode 100644
index d56612b8c..12351b517
--- a/internal/sdk/pkg/models/operations/createsourceposthog.go
+++ b/internal/sdk/pkg/models/operations/createsourceposthog.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePosthogResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePosthogResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePosthogResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePosthogResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePosthogResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepostmarkapp.go b/internal/sdk/pkg/models/operations/createsourcepostmarkapp.go
old mode 100755
new mode 100644
index a23a299a8..75bfc19a0
--- a/internal/sdk/pkg/models/operations/createsourcepostmarkapp.go
+++ b/internal/sdk/pkg/models/operations/createsourcepostmarkapp.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePostmarkappResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePostmarkappResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePostmarkappResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePostmarkappResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePostmarkappResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceprestashop.go b/internal/sdk/pkg/models/operations/createsourceprestashop.go
old mode 100755
new mode 100644
index 636da2fcd..8a9fb146b
--- a/internal/sdk/pkg/models/operations/createsourceprestashop.go
+++ b/internal/sdk/pkg/models/operations/createsourceprestashop.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePrestashopResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePrestashopResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePrestashopResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePrestashopResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePrestashopResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepunkapi.go b/internal/sdk/pkg/models/operations/createsourcepunkapi.go
old mode 100755
new mode 100644
index f2f6ca95b..a1e694446
--- a/internal/sdk/pkg/models/operations/createsourcepunkapi.go
+++ b/internal/sdk/pkg/models/operations/createsourcepunkapi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePunkAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePunkAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePunkAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePunkAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePunkAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcepypi.go b/internal/sdk/pkg/models/operations/createsourcepypi.go
old mode 100755
new mode 100644
index 3ceba9540..fda9c15ad
--- a/internal/sdk/pkg/models/operations/createsourcepypi.go
+++ b/internal/sdk/pkg/models/operations/createsourcepypi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourcePypiResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourcePypiResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourcePypiResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourcePypiResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourcePypiResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcequalaroo.go b/internal/sdk/pkg/models/operations/createsourcequalaroo.go
old mode 100755
new mode 100644
index cf3bccd09..3e9cebc93
--- a/internal/sdk/pkg/models/operations/createsourcequalaroo.go
+++ b/internal/sdk/pkg/models/operations/createsourcequalaroo.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceQualarooResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceQualarooResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceQualarooResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceQualarooResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceQualarooResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcequickbooks.go b/internal/sdk/pkg/models/operations/createsourcequickbooks.go
old mode 100755
new mode 100644
index 1fc6e0adf..05cdc1406
--- a/internal/sdk/pkg/models/operations/createsourcequickbooks.go
+++ b/internal/sdk/pkg/models/operations/createsourcequickbooks.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceQuickbooksResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceQuickbooksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceQuickbooksResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceQuickbooksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceQuickbooksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcerailz.go b/internal/sdk/pkg/models/operations/createsourcerailz.go
old mode 100755
new mode 100644
index 0880cd806..4bbb8c741
--- a/internal/sdk/pkg/models/operations/createsourcerailz.go
+++ b/internal/sdk/pkg/models/operations/createsourcerailz.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceRailzResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceRailzResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceRailzResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceRailzResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceRailzResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcerecharge.go b/internal/sdk/pkg/models/operations/createsourcerecharge.go
old mode 100755
new mode 100644
index 6c103308c..7bc1699cd
--- a/internal/sdk/pkg/models/operations/createsourcerecharge.go
+++ b/internal/sdk/pkg/models/operations/createsourcerecharge.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceRechargeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceRechargeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceRechargeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceRechargeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceRechargeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcerecreation.go b/internal/sdk/pkg/models/operations/createsourcerecreation.go
old mode 100755
new mode 100644
index 9aab84775..e778a58ea
--- a/internal/sdk/pkg/models/operations/createsourcerecreation.go
+++ b/internal/sdk/pkg/models/operations/createsourcerecreation.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceRecreationResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceRecreationResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceRecreationResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceRecreationResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceRecreationResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcerecruitee.go b/internal/sdk/pkg/models/operations/createsourcerecruitee.go
old mode 100755
new mode 100644
index 60fe781ed..88617e109
--- a/internal/sdk/pkg/models/operations/createsourcerecruitee.go
+++ b/internal/sdk/pkg/models/operations/createsourcerecruitee.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceRecruiteeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceRecruiteeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceRecruiteeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceRecruiteeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceRecruiteeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcerecurly.go b/internal/sdk/pkg/models/operations/createsourcerecurly.go
old mode 100755
new mode 100644
index 75f4ae860..8c44d9cf3
--- a/internal/sdk/pkg/models/operations/createsourcerecurly.go
+++ b/internal/sdk/pkg/models/operations/createsourcerecurly.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceRecurlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceRecurlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceRecurlyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceRecurlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceRecurlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceredshift.go b/internal/sdk/pkg/models/operations/createsourceredshift.go
old mode 100755
new mode 100644
index d6c633426..d58d9430e
--- a/internal/sdk/pkg/models/operations/createsourceredshift.go
+++ b/internal/sdk/pkg/models/operations/createsourceredshift.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceRedshiftResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceRedshiftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceRedshiftResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceRedshiftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceRedshiftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceretently.go b/internal/sdk/pkg/models/operations/createsourceretently.go
old mode 100755
new mode 100644
index 220e410f9..184a46891
--- a/internal/sdk/pkg/models/operations/createsourceretently.go
+++ b/internal/sdk/pkg/models/operations/createsourceretently.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceRetentlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceRetentlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceRetentlyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceRetentlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceRetentlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcerkicovid.go b/internal/sdk/pkg/models/operations/createsourcerkicovid.go
old mode 100755
new mode 100644
index ed6fb1079..008a85c72
--- a/internal/sdk/pkg/models/operations/createsourcerkicovid.go
+++ b/internal/sdk/pkg/models/operations/createsourcerkicovid.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceRkiCovidResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceRkiCovidResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceRkiCovidResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceRkiCovidResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceRkiCovidResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcerss.go b/internal/sdk/pkg/models/operations/createsourcerss.go
old mode 100755
new mode 100644
index b6c215e83..ec475b4f4
--- a/internal/sdk/pkg/models/operations/createsourcerss.go
+++ b/internal/sdk/pkg/models/operations/createsourcerss.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceRssResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceRssResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceRssResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceRssResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceRssResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsources3.go b/internal/sdk/pkg/models/operations/createsources3.go
old mode 100755
new mode 100644
index 204862d04..55ed70dd9
--- a/internal/sdk/pkg/models/operations/createsources3.go
+++ b/internal/sdk/pkg/models/operations/createsources3.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceS3Response struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceS3Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceS3Response) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceS3Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceS3Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesalesforce.go b/internal/sdk/pkg/models/operations/createsourcesalesforce.go
old mode 100755
new mode 100644
index a8cafdd48..cd451611b
--- a/internal/sdk/pkg/models/operations/createsourcesalesforce.go
+++ b/internal/sdk/pkg/models/operations/createsourcesalesforce.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSalesforceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSalesforceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSalesforceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSalesforceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSalesforceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesalesloft.go b/internal/sdk/pkg/models/operations/createsourcesalesloft.go
old mode 100755
new mode 100644
index 7b9cf6dcb..397ba8b5c
--- a/internal/sdk/pkg/models/operations/createsourcesalesloft.go
+++ b/internal/sdk/pkg/models/operations/createsourcesalesloft.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSalesloftResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSalesloftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSalesloftResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSalesloftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSalesloftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesapfieldglass.go b/internal/sdk/pkg/models/operations/createsourcesapfieldglass.go
old mode 100755
new mode 100644
index c0de0cc0b..98fc0ba1d
--- a/internal/sdk/pkg/models/operations/createsourcesapfieldglass.go
+++ b/internal/sdk/pkg/models/operations/createsourcesapfieldglass.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSapFieldglassResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSapFieldglassResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSapFieldglassResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSapFieldglassResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSapFieldglassResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesecoda.go b/internal/sdk/pkg/models/operations/createsourcesecoda.go
old mode 100755
new mode 100644
index 2db5fa20b..e8d779a49
--- a/internal/sdk/pkg/models/operations/createsourcesecoda.go
+++ b/internal/sdk/pkg/models/operations/createsourcesecoda.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSecodaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSecodaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSecodaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSecodaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSecodaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesendgrid.go b/internal/sdk/pkg/models/operations/createsourcesendgrid.go
old mode 100755
new mode 100644
index 9a07102ee..f73402c5c
--- a/internal/sdk/pkg/models/operations/createsourcesendgrid.go
+++ b/internal/sdk/pkg/models/operations/createsourcesendgrid.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSendgridResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSendgridResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSendgridResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSendgridResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSendgridResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesendinblue.go b/internal/sdk/pkg/models/operations/createsourcesendinblue.go
old mode 100755
new mode 100644
index e85f703ff..d7e3f8e32
--- a/internal/sdk/pkg/models/operations/createsourcesendinblue.go
+++ b/internal/sdk/pkg/models/operations/createsourcesendinblue.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSendinblueResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSendinblueResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSendinblueResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSendinblueResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSendinblueResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesenseforce.go b/internal/sdk/pkg/models/operations/createsourcesenseforce.go
old mode 100755
new mode 100644
index dfae6d9fb..40f4a26b4
--- a/internal/sdk/pkg/models/operations/createsourcesenseforce.go
+++ b/internal/sdk/pkg/models/operations/createsourcesenseforce.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSenseforceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSenseforceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSenseforceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSenseforceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSenseforceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesentry.go b/internal/sdk/pkg/models/operations/createsourcesentry.go
old mode 100755
new mode 100644
index 8b75ea08c..410ba7613
--- a/internal/sdk/pkg/models/operations/createsourcesentry.go
+++ b/internal/sdk/pkg/models/operations/createsourcesentry.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSentryResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSentryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSentryResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSentryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSentryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesftp.go b/internal/sdk/pkg/models/operations/createsourcesftp.go
old mode 100755
new mode 100644
index 2e6974beb..363b07503
--- a/internal/sdk/pkg/models/operations/createsourcesftp.go
+++ b/internal/sdk/pkg/models/operations/createsourcesftp.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSftpResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSftpResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSftpResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSftpResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSftpResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesftpbulk.go b/internal/sdk/pkg/models/operations/createsourcesftpbulk.go
old mode 100755
new mode 100644
index dff73c886..07a44b54d
--- a/internal/sdk/pkg/models/operations/createsourcesftpbulk.go
+++ b/internal/sdk/pkg/models/operations/createsourcesftpbulk.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSftpBulkResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSftpBulkResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSftpBulkResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSftpBulkResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSftpBulkResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceshopify.go b/internal/sdk/pkg/models/operations/createsourceshopify.go
old mode 100755
new mode 100644
index d8eee070e..ec96e3bc4
--- a/internal/sdk/pkg/models/operations/createsourceshopify.go
+++ b/internal/sdk/pkg/models/operations/createsourceshopify.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceShopifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceShopifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceShopifyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceShopifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceShopifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceshortio.go b/internal/sdk/pkg/models/operations/createsourceshortio.go
old mode 100755
new mode 100644
index b3fd47e20..108827ab9
--- a/internal/sdk/pkg/models/operations/createsourceshortio.go
+++ b/internal/sdk/pkg/models/operations/createsourceshortio.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceShortioResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceShortioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceShortioResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceShortioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceShortioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceslack.go b/internal/sdk/pkg/models/operations/createsourceslack.go
old mode 100755
new mode 100644
index 36a5447c0..ed531a93b
--- a/internal/sdk/pkg/models/operations/createsourceslack.go
+++ b/internal/sdk/pkg/models/operations/createsourceslack.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSlackResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSlackResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSlackResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSlackResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSlackResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesmaily.go b/internal/sdk/pkg/models/operations/createsourcesmaily.go
old mode 100755
new mode 100644
index cebea0878..abdf8e94d
--- a/internal/sdk/pkg/models/operations/createsourcesmaily.go
+++ b/internal/sdk/pkg/models/operations/createsourcesmaily.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSmailyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSmailyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSmailyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSmailyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSmailyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesmartengage.go b/internal/sdk/pkg/models/operations/createsourcesmartengage.go
old mode 100755
new mode 100644
index 9fec9e5b9..46f9bb23b
--- a/internal/sdk/pkg/models/operations/createsourcesmartengage.go
+++ b/internal/sdk/pkg/models/operations/createsourcesmartengage.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSmartengageResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSmartengageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSmartengageResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSmartengageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSmartengageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesmartsheets.go b/internal/sdk/pkg/models/operations/createsourcesmartsheets.go
old mode 100755
new mode 100644
index d22d07b5f..1585efc92
--- a/internal/sdk/pkg/models/operations/createsourcesmartsheets.go
+++ b/internal/sdk/pkg/models/operations/createsourcesmartsheets.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSmartsheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSmartsheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSmartsheetsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSmartsheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSmartsheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesnapchatmarketing.go b/internal/sdk/pkg/models/operations/createsourcesnapchatmarketing.go
old mode 100755
new mode 100644
index 2911d37fe..d1627e7af
--- a/internal/sdk/pkg/models/operations/createsourcesnapchatmarketing.go
+++ b/internal/sdk/pkg/models/operations/createsourcesnapchatmarketing.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSnapchatMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSnapchatMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSnapchatMarketingResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSnapchatMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSnapchatMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesnowflake.go b/internal/sdk/pkg/models/operations/createsourcesnowflake.go
old mode 100755
new mode 100644
index 125561376..6d4973930
--- a/internal/sdk/pkg/models/operations/createsourcesnowflake.go
+++ b/internal/sdk/pkg/models/operations/createsourcesnowflake.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSnowflakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSnowflakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSnowflakeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSnowflakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSnowflakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesonarcloud.go b/internal/sdk/pkg/models/operations/createsourcesonarcloud.go
old mode 100755
new mode 100644
index 4598c4aef..dbf4c1bd1
--- a/internal/sdk/pkg/models/operations/createsourcesonarcloud.go
+++ b/internal/sdk/pkg/models/operations/createsourcesonarcloud.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSonarCloudResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSonarCloudResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSonarCloudResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSonarCloudResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSonarCloudResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcespacexapi.go b/internal/sdk/pkg/models/operations/createsourcespacexapi.go
old mode 100755
new mode 100644
index 59ed3a3cb..951ed2779
--- a/internal/sdk/pkg/models/operations/createsourcespacexapi.go
+++ b/internal/sdk/pkg/models/operations/createsourcespacexapi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSpacexAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSpacexAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSpacexAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSpacexAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSpacexAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesquare.go b/internal/sdk/pkg/models/operations/createsourcesquare.go
old mode 100755
new mode 100644
index 700012ed1..3fc42a677
--- a/internal/sdk/pkg/models/operations/createsourcesquare.go
+++ b/internal/sdk/pkg/models/operations/createsourcesquare.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSquareResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSquareResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSquareResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSquareResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSquareResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcestrava.go b/internal/sdk/pkg/models/operations/createsourcestrava.go
old mode 100755
new mode 100644
index efd211557..7377789e4
--- a/internal/sdk/pkg/models/operations/createsourcestrava.go
+++ b/internal/sdk/pkg/models/operations/createsourcestrava.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceStravaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceStravaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceStravaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceStravaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceStravaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcestripe.go b/internal/sdk/pkg/models/operations/createsourcestripe.go
old mode 100755
new mode 100644
index be5de3990..6531e46ca
--- a/internal/sdk/pkg/models/operations/createsourcestripe.go
+++ b/internal/sdk/pkg/models/operations/createsourcestripe.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceStripeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceStripeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceStripeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceStripeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceStripeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesurveymonkey.go b/internal/sdk/pkg/models/operations/createsourcesurveymonkey.go
old mode 100755
new mode 100644
index 759932911..4fdb4e2e9
--- a/internal/sdk/pkg/models/operations/createsourcesurveymonkey.go
+++ b/internal/sdk/pkg/models/operations/createsourcesurveymonkey.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSurveymonkeyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSurveymonkeyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSurveymonkeyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSurveymonkeyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSurveymonkeyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcesurveysparrow.go b/internal/sdk/pkg/models/operations/createsourcesurveysparrow.go
old mode 100755
new mode 100644
index b72e6ea49..73f5b45aa
--- a/internal/sdk/pkg/models/operations/createsourcesurveysparrow.go
+++ b/internal/sdk/pkg/models/operations/createsourcesurveysparrow.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceSurveySparrowResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceSurveySparrowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceSurveySparrowResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceSurveySparrowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceSurveySparrowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetempo.go b/internal/sdk/pkg/models/operations/createsourcetempo.go
old mode 100755
new mode 100644
index 331d16ccc..8e11586dd
--- a/internal/sdk/pkg/models/operations/createsourcetempo.go
+++ b/internal/sdk/pkg/models/operations/createsourcetempo.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTempoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTempoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTempoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTempoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTempoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetheguardianapi.go b/internal/sdk/pkg/models/operations/createsourcetheguardianapi.go
old mode 100755
new mode 100644
index fd6b2a870..f2834f568
--- a/internal/sdk/pkg/models/operations/createsourcetheguardianapi.go
+++ b/internal/sdk/pkg/models/operations/createsourcetheguardianapi.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTheGuardianAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTheGuardianAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTheGuardianAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTheGuardianAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTheGuardianAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetiktokmarketing.go b/internal/sdk/pkg/models/operations/createsourcetiktokmarketing.go
old mode 100755
new mode 100644
index 609652112..e51516cc0
--- a/internal/sdk/pkg/models/operations/createsourcetiktokmarketing.go
+++ b/internal/sdk/pkg/models/operations/createsourcetiktokmarketing.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTiktokMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTiktokMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTiktokMarketingResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTiktokMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTiktokMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetodoist.go b/internal/sdk/pkg/models/operations/createsourcetodoist.go
old mode 100755
new mode 100644
index df9b8fb67..ac86a4775
--- a/internal/sdk/pkg/models/operations/createsourcetodoist.go
+++ b/internal/sdk/pkg/models/operations/createsourcetodoist.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTodoistResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTodoistResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTodoistResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTodoistResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTodoistResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetrello.go b/internal/sdk/pkg/models/operations/createsourcetrello.go
old mode 100755
new mode 100644
index e993889fc..728e54303
--- a/internal/sdk/pkg/models/operations/createsourcetrello.go
+++ b/internal/sdk/pkg/models/operations/createsourcetrello.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTrelloResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTrelloResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTrelloResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTrelloResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTrelloResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetrustpilot.go b/internal/sdk/pkg/models/operations/createsourcetrustpilot.go
old mode 100755
new mode 100644
index a7efaeb49..437b50d77
--- a/internal/sdk/pkg/models/operations/createsourcetrustpilot.go
+++ b/internal/sdk/pkg/models/operations/createsourcetrustpilot.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTrustpilotResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTrustpilotResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTrustpilotResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTrustpilotResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTrustpilotResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetvmazeschedule.go b/internal/sdk/pkg/models/operations/createsourcetvmazeschedule.go
old mode 100755
new mode 100644
index a17dfda0e..56fcadb1e
--- a/internal/sdk/pkg/models/operations/createsourcetvmazeschedule.go
+++ b/internal/sdk/pkg/models/operations/createsourcetvmazeschedule.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTvmazeScheduleResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTvmazeScheduleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTvmazeScheduleResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTvmazeScheduleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTvmazeScheduleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetwilio.go b/internal/sdk/pkg/models/operations/createsourcetwilio.go
old mode 100755
new mode 100644
index 8d029b890..2dbe4f0b7
--- a/internal/sdk/pkg/models/operations/createsourcetwilio.go
+++ b/internal/sdk/pkg/models/operations/createsourcetwilio.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTwilioResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTwilioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTwilioResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTwilioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTwilioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetwiliotaskrouter.go b/internal/sdk/pkg/models/operations/createsourcetwiliotaskrouter.go
old mode 100755
new mode 100644
index 211555ab2..62658712c
--- a/internal/sdk/pkg/models/operations/createsourcetwiliotaskrouter.go
+++ b/internal/sdk/pkg/models/operations/createsourcetwiliotaskrouter.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTwilioTaskrouterResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTwilioTaskrouterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTwilioTaskrouterResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTwilioTaskrouterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTwilioTaskrouterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetwitter.go b/internal/sdk/pkg/models/operations/createsourcetwitter.go
old mode 100755
new mode 100644
index 877a88408..12c3ebd25
--- a/internal/sdk/pkg/models/operations/createsourcetwitter.go
+++ b/internal/sdk/pkg/models/operations/createsourcetwitter.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTwitterResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTwitterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTwitterResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTwitterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTwitterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcetypeform.go b/internal/sdk/pkg/models/operations/createsourcetypeform.go
old mode 100755
new mode 100644
index cbbb6ad12..92d655437
--- a/internal/sdk/pkg/models/operations/createsourcetypeform.go
+++ b/internal/sdk/pkg/models/operations/createsourcetypeform.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceTypeformResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceTypeformResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceTypeformResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceTypeformResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceTypeformResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceuscensus.go b/internal/sdk/pkg/models/operations/createsourceuscensus.go
old mode 100755
new mode 100644
index cb96cf2a9..593f591d9
--- a/internal/sdk/pkg/models/operations/createsourceuscensus.go
+++ b/internal/sdk/pkg/models/operations/createsourceuscensus.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceUsCensusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceUsCensusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceUsCensusResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceUsCensusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceUsCensusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcevantage.go b/internal/sdk/pkg/models/operations/createsourcevantage.go
old mode 100755
new mode 100644
index ea0df4640..913abf3cd
--- a/internal/sdk/pkg/models/operations/createsourcevantage.go
+++ b/internal/sdk/pkg/models/operations/createsourcevantage.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceVantageResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceVantageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceVantageResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceVantageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceVantageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcewebflow.go b/internal/sdk/pkg/models/operations/createsourcewebflow.go
old mode 100755
new mode 100644
index 8a472349a..f518ab2d1
--- a/internal/sdk/pkg/models/operations/createsourcewebflow.go
+++ b/internal/sdk/pkg/models/operations/createsourcewebflow.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceWebflowResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceWebflowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceWebflowResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceWebflowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceWebflowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcewhiskyhunter.go b/internal/sdk/pkg/models/operations/createsourcewhiskyhunter.go
old mode 100755
new mode 100644
index d7b6662f5..1bba1864b
--- a/internal/sdk/pkg/models/operations/createsourcewhiskyhunter.go
+++ b/internal/sdk/pkg/models/operations/createsourcewhiskyhunter.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceWhiskyHunterResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceWhiskyHunterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceWhiskyHunterResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceWhiskyHunterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceWhiskyHunterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcewikipediapageviews.go b/internal/sdk/pkg/models/operations/createsourcewikipediapageviews.go
old mode 100755
new mode 100644
index d4535db85..6c202f1f8
--- a/internal/sdk/pkg/models/operations/createsourcewikipediapageviews.go
+++ b/internal/sdk/pkg/models/operations/createsourcewikipediapageviews.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceWikipediaPageviewsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceWikipediaPageviewsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceWikipediaPageviewsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceWikipediaPageviewsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceWikipediaPageviewsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcewoocommerce.go b/internal/sdk/pkg/models/operations/createsourcewoocommerce.go
old mode 100755
new mode 100644
index 0874577fb..6a1fc78ad
--- a/internal/sdk/pkg/models/operations/createsourcewoocommerce.go
+++ b/internal/sdk/pkg/models/operations/createsourcewoocommerce.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceWoocommerceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceWoocommerceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceWoocommerceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceWoocommerceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceWoocommerceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcexero.go b/internal/sdk/pkg/models/operations/createsourcexero.go
deleted file mode 100755
index 344d7bc15..000000000
--- a/internal/sdk/pkg/models/operations/createsourcexero.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type CreateSourceXeroResponse struct {
- ContentType string
- // Successful operation
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/createsourcexkcd.go b/internal/sdk/pkg/models/operations/createsourcexkcd.go
old mode 100755
new mode 100644
index fd5dbe602..584d022f8
--- a/internal/sdk/pkg/models/operations/createsourcexkcd.go
+++ b/internal/sdk/pkg/models/operations/createsourcexkcd.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceXkcdResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceXkcdResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceXkcdResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceXkcdResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceXkcdResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceyandexmetrica.go b/internal/sdk/pkg/models/operations/createsourceyandexmetrica.go
old mode 100755
new mode 100644
index 80e4342f5..e2be0fa95
--- a/internal/sdk/pkg/models/operations/createsourceyandexmetrica.go
+++ b/internal/sdk/pkg/models/operations/createsourceyandexmetrica.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceYandexMetricaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceYandexMetricaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceYandexMetricaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceYandexMetricaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceYandexMetricaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceyotpo.go b/internal/sdk/pkg/models/operations/createsourceyotpo.go
old mode 100755
new mode 100644
index d4a1ce7c4..0f98c0c75
--- a/internal/sdk/pkg/models/operations/createsourceyotpo.go
+++ b/internal/sdk/pkg/models/operations/createsourceyotpo.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceYotpoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceYotpoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceYotpoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceYotpoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceYotpoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourceyounium.go b/internal/sdk/pkg/models/operations/createsourceyounium.go
deleted file mode 100755
index 568434851..000000000
--- a/internal/sdk/pkg/models/operations/createsourceyounium.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type CreateSourceYouniumResponse struct {
- ContentType string
- // Successful operation
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/createsourceyoutubeanalytics.go b/internal/sdk/pkg/models/operations/createsourceyoutubeanalytics.go
old mode 100755
new mode 100644
index 49787f3cc..f166d2398
--- a/internal/sdk/pkg/models/operations/createsourceyoutubeanalytics.go
+++ b/internal/sdk/pkg/models/operations/createsourceyoutubeanalytics.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceYoutubeAnalyticsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceYoutubeAnalyticsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceYoutubeAnalyticsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceYoutubeAnalyticsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceYoutubeAnalyticsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcezendeskchat.go b/internal/sdk/pkg/models/operations/createsourcezendeskchat.go
old mode 100755
new mode 100644
index cd400c0ed..2b856b0e8
--- a/internal/sdk/pkg/models/operations/createsourcezendeskchat.go
+++ b/internal/sdk/pkg/models/operations/createsourcezendeskchat.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceZendeskChatResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceZendeskChatResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceZendeskChatResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceZendeskChatResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceZendeskChatResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcezendesksell.go b/internal/sdk/pkg/models/operations/createsourcezendesksell.go
new file mode 100644
index 000000000..1b1813f34
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/createsourcezendesksell.go
@@ -0,0 +1,47 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type CreateSourceZendeskSellResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Successful operation
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceZendeskSellResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceZendeskSellResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceZendeskSellResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceZendeskSellResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/createsourcezendesksunshine.go b/internal/sdk/pkg/models/operations/createsourcezendesksunshine.go
old mode 100755
new mode 100644
index 79df388cd..4eaad9aa7
--- a/internal/sdk/pkg/models/operations/createsourcezendesksunshine.go
+++ b/internal/sdk/pkg/models/operations/createsourcezendesksunshine.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceZendeskSunshineResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceZendeskSunshineResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceZendeskSunshineResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceZendeskSunshineResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceZendeskSunshineResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcezendesksupport.go b/internal/sdk/pkg/models/operations/createsourcezendesksupport.go
old mode 100755
new mode 100644
index 8e68feb4a..5d6b4c2e5
--- a/internal/sdk/pkg/models/operations/createsourcezendesksupport.go
+++ b/internal/sdk/pkg/models/operations/createsourcezendesksupport.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceZendeskSupportResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceZendeskSupportResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceZendeskSupportResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceZendeskSupportResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceZendeskSupportResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcezendesktalk.go b/internal/sdk/pkg/models/operations/createsourcezendesktalk.go
old mode 100755
new mode 100644
index 44d7f4d7f..20143289a
--- a/internal/sdk/pkg/models/operations/createsourcezendesktalk.go
+++ b/internal/sdk/pkg/models/operations/createsourcezendesktalk.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceZendeskTalkResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceZendeskTalkResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceZendeskTalkResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceZendeskTalkResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceZendeskTalkResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcezenloop.go b/internal/sdk/pkg/models/operations/createsourcezenloop.go
old mode 100755
new mode 100644
index 5226a4da3..83901a6b6
--- a/internal/sdk/pkg/models/operations/createsourcezenloop.go
+++ b/internal/sdk/pkg/models/operations/createsourcezenloop.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceZenloopResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceZenloopResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceZenloopResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceZenloopResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceZenloopResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcezohocrm.go b/internal/sdk/pkg/models/operations/createsourcezohocrm.go
old mode 100755
new mode 100644
index 40f341e0f..359965267
--- a/internal/sdk/pkg/models/operations/createsourcezohocrm.go
+++ b/internal/sdk/pkg/models/operations/createsourcezohocrm.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceZohoCrmResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceZohoCrmResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceZohoCrmResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceZohoCrmResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceZohoCrmResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcezoom.go b/internal/sdk/pkg/models/operations/createsourcezoom.go
old mode 100755
new mode 100644
index 0b50b899a..708488724
--- a/internal/sdk/pkg/models/operations/createsourcezoom.go
+++ b/internal/sdk/pkg/models/operations/createsourcezoom.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceZoomResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceZoomResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceZoomResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceZoomResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceZoomResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createsourcezuora.go b/internal/sdk/pkg/models/operations/createsourcezuora.go
old mode 100755
new mode 100644
index 08d50e944..d0f4d23b6
--- a/internal/sdk/pkg/models/operations/createsourcezuora.go
+++ b/internal/sdk/pkg/models/operations/createsourcezuora.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateSourceZuoraResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *CreateSourceZuoraResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateSourceZuoraResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *CreateSourceZuoraResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateSourceZuoraResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/createworkspace.go b/internal/sdk/pkg/models/operations/createworkspace.go
old mode 100755
new mode 100644
index b02f83b2c..79f6d2c6d
--- a/internal/sdk/pkg/models/operations/createworkspace.go
+++ b/internal/sdk/pkg/models/operations/createworkspace.go
@@ -3,14 +3,45 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
type CreateWorkspaceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
// Successful operation
WorkspaceResponse *shared.WorkspaceResponse
}
+
+func (o *CreateWorkspaceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *CreateWorkspaceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *CreateWorkspaceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
+
+func (o *CreateWorkspaceResponse) GetWorkspaceResponse() *shared.WorkspaceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspaceResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deleteconnection.go b/internal/sdk/pkg/models/operations/deleteconnection.go
old mode 100755
new mode 100644
index bd5224b9b..ecedcaad1
--- a/internal/sdk/pkg/models/operations/deleteconnection.go
+++ b/internal/sdk/pkg/models/operations/deleteconnection.go
@@ -10,8 +10,39 @@ type DeleteConnectionRequest struct {
ConnectionID string `pathParam:"style=simple,explode=false,name=connectionId"`
}
+func (o *DeleteConnectionRequest) GetConnectionID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionID
+}
+
type DeleteConnectionResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteConnectionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteConnectionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteConnectionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestination.go b/internal/sdk/pkg/models/operations/deletedestination.go
old mode 100755
new mode 100644
index 1676660b4..c69d003e4
--- a/internal/sdk/pkg/models/operations/deletedestination.go
+++ b/internal/sdk/pkg/models/operations/deletedestination.go
@@ -10,8 +10,39 @@ type DeleteDestinationRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationawsdatalake.go b/internal/sdk/pkg/models/operations/deletedestinationawsdatalake.go
old mode 100755
new mode 100644
index 6a762465b..a4ab986d8
--- a/internal/sdk/pkg/models/operations/deletedestinationawsdatalake.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationawsdatalake.go
@@ -10,8 +10,39 @@ type DeleteDestinationAwsDatalakeRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationAwsDatalakeRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationAwsDatalakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationAwsDatalakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationAwsDatalakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationAwsDatalakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationazureblobstorage.go b/internal/sdk/pkg/models/operations/deletedestinationazureblobstorage.go
old mode 100755
new mode 100644
index da8a66db7..22b035bda
--- a/internal/sdk/pkg/models/operations/deletedestinationazureblobstorage.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationazureblobstorage.go
@@ -10,8 +10,39 @@ type DeleteDestinationAzureBlobStorageRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationAzureBlobStorageRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationAzureBlobStorageResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationAzureBlobStorageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationAzureBlobStorageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationAzureBlobStorageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationbigquery.go b/internal/sdk/pkg/models/operations/deletedestinationbigquery.go
old mode 100755
new mode 100644
index 6d4a051fc..d453df999
--- a/internal/sdk/pkg/models/operations/deletedestinationbigquery.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationbigquery.go
@@ -10,8 +10,39 @@ type DeleteDestinationBigqueryRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationBigqueryRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationBigqueryResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationBigqueryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationBigqueryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationBigqueryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationbigquerydenormalized.go b/internal/sdk/pkg/models/operations/deletedestinationbigquerydenormalized.go
deleted file mode 100755
index 2db785dc2..000000000
--- a/internal/sdk/pkg/models/operations/deletedestinationbigquerydenormalized.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "net/http"
-)
-
-type DeleteDestinationBigqueryDenormalizedRequest struct {
- DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
-}
-
-type DeleteDestinationBigqueryDenormalizedResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationclickhouse.go b/internal/sdk/pkg/models/operations/deletedestinationclickhouse.go
old mode 100755
new mode 100644
index e9d587b54..e4c3d0b2f
--- a/internal/sdk/pkg/models/operations/deletedestinationclickhouse.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationclickhouse.go
@@ -10,8 +10,39 @@ type DeleteDestinationClickhouseRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationClickhouseRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationClickhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationClickhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationClickhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationClickhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationconvex.go b/internal/sdk/pkg/models/operations/deletedestinationconvex.go
old mode 100755
new mode 100644
index 2c2276f3e..490b94698
--- a/internal/sdk/pkg/models/operations/deletedestinationconvex.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationconvex.go
@@ -10,8 +10,39 @@ type DeleteDestinationConvexRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationConvexRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationConvexResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationConvexResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationConvexResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationConvexResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationcumulio.go b/internal/sdk/pkg/models/operations/deletedestinationcumulio.go
old mode 100755
new mode 100644
index 3bfc0a284..34daf4d5e
--- a/internal/sdk/pkg/models/operations/deletedestinationcumulio.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationcumulio.go
@@ -10,8 +10,39 @@ type DeleteDestinationCumulioRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationCumulioRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationCumulioResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationCumulioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationCumulioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationCumulioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationdatabend.go b/internal/sdk/pkg/models/operations/deletedestinationdatabend.go
old mode 100755
new mode 100644
index 4334465a7..91459f8fd
--- a/internal/sdk/pkg/models/operations/deletedestinationdatabend.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationdatabend.go
@@ -10,8 +10,39 @@ type DeleteDestinationDatabendRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationDatabendRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationDatabendResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationDatabendResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationDatabendResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationDatabendResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationdatabricks.go b/internal/sdk/pkg/models/operations/deletedestinationdatabricks.go
old mode 100755
new mode 100644
index ecd645f69..a854fd292
--- a/internal/sdk/pkg/models/operations/deletedestinationdatabricks.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationdatabricks.go
@@ -10,8 +10,39 @@ type DeleteDestinationDatabricksRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationDatabricksRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationDatabricksResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationDatabricksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationDatabricksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationDatabricksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationdevnull.go b/internal/sdk/pkg/models/operations/deletedestinationdevnull.go
old mode 100755
new mode 100644
index 588f8a43d..e16fa666c
--- a/internal/sdk/pkg/models/operations/deletedestinationdevnull.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationdevnull.go
@@ -10,8 +10,39 @@ type DeleteDestinationDevNullRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationDevNullRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationDevNullResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationDevNullResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationDevNullResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationDevNullResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationduckdb.go b/internal/sdk/pkg/models/operations/deletedestinationduckdb.go
new file mode 100644
index 000000000..5e0c728ba
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/deletedestinationduckdb.go
@@ -0,0 +1,48 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "net/http"
+)
+
+type DeleteDestinationDuckdbRequest struct {
+ DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
+}
+
+func (o *DeleteDestinationDuckdbRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+type DeleteDestinationDuckdbResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *DeleteDestinationDuckdbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationDuckdbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationDuckdbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationdynamodb.go b/internal/sdk/pkg/models/operations/deletedestinationdynamodb.go
old mode 100755
new mode 100644
index 1b91f356d..5472c87df
--- a/internal/sdk/pkg/models/operations/deletedestinationdynamodb.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationdynamodb.go
@@ -10,8 +10,39 @@ type DeleteDestinationDynamodbRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationDynamodbRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationDynamodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationDynamodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationDynamodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationDynamodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationelasticsearch.go b/internal/sdk/pkg/models/operations/deletedestinationelasticsearch.go
old mode 100755
new mode 100644
index aa0e05069..40181114e
--- a/internal/sdk/pkg/models/operations/deletedestinationelasticsearch.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationelasticsearch.go
@@ -10,8 +10,39 @@ type DeleteDestinationElasticsearchRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationElasticsearchRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationElasticsearchResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationElasticsearchResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationElasticsearchResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationElasticsearchResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationfirebolt.go b/internal/sdk/pkg/models/operations/deletedestinationfirebolt.go
old mode 100755
new mode 100644
index a386dd911..026de24ad
--- a/internal/sdk/pkg/models/operations/deletedestinationfirebolt.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationfirebolt.go
@@ -10,8 +10,39 @@ type DeleteDestinationFireboltRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationFireboltRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationFireboltResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationFireboltResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationFireboltResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationFireboltResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationfirestore.go b/internal/sdk/pkg/models/operations/deletedestinationfirestore.go
old mode 100755
new mode 100644
index 2aa922bff..8ce70bcbc
--- a/internal/sdk/pkg/models/operations/deletedestinationfirestore.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationfirestore.go
@@ -10,8 +10,39 @@ type DeleteDestinationFirestoreRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationFirestoreRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationFirestoreResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationFirestoreResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationFirestoreResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationFirestoreResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationgcs.go b/internal/sdk/pkg/models/operations/deletedestinationgcs.go
old mode 100755
new mode 100644
index 598089c76..e8accd9ab
--- a/internal/sdk/pkg/models/operations/deletedestinationgcs.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationgcs.go
@@ -10,8 +10,39 @@ type DeleteDestinationGcsRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationGcsRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationGcsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationGcsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationGcsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationGcsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationgooglesheets.go b/internal/sdk/pkg/models/operations/deletedestinationgooglesheets.go
old mode 100755
new mode 100644
index d785b3dd2..5ee58d9b7
--- a/internal/sdk/pkg/models/operations/deletedestinationgooglesheets.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationgooglesheets.go
@@ -10,8 +10,39 @@ type DeleteDestinationGoogleSheetsRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationGoogleSheetsRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationGoogleSheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationGoogleSheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationGoogleSheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationGoogleSheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationkeen.go b/internal/sdk/pkg/models/operations/deletedestinationkeen.go
old mode 100755
new mode 100644
index 7655b07c1..746dd6cfd
--- a/internal/sdk/pkg/models/operations/deletedestinationkeen.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationkeen.go
@@ -10,8 +10,39 @@ type DeleteDestinationKeenRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationKeenRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationKeenResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationKeenResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationKeenResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationKeenResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationkinesis.go b/internal/sdk/pkg/models/operations/deletedestinationkinesis.go
old mode 100755
new mode 100644
index 9eae56853..b1bf4a7d7
--- a/internal/sdk/pkg/models/operations/deletedestinationkinesis.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationkinesis.go
@@ -10,8 +10,39 @@ type DeleteDestinationKinesisRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationKinesisRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationKinesisResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationKinesisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationKinesisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationKinesisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationlangchain.go b/internal/sdk/pkg/models/operations/deletedestinationlangchain.go
old mode 100755
new mode 100644
index 547a61fa4..0e6665605
--- a/internal/sdk/pkg/models/operations/deletedestinationlangchain.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationlangchain.go
@@ -10,8 +10,39 @@ type DeleteDestinationLangchainRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationLangchainRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationLangchainResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationLangchainResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationLangchainResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationLangchainResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationmilvus.go b/internal/sdk/pkg/models/operations/deletedestinationmilvus.go
old mode 100755
new mode 100644
index d79cef4d5..b9a0a528d
--- a/internal/sdk/pkg/models/operations/deletedestinationmilvus.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationmilvus.go
@@ -10,8 +10,39 @@ type DeleteDestinationMilvusRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationMilvusRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationMilvusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationMilvusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationMilvusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationMilvusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationmongodb.go b/internal/sdk/pkg/models/operations/deletedestinationmongodb.go
old mode 100755
new mode 100644
index 874b9680b..c4b2b17fa
--- a/internal/sdk/pkg/models/operations/deletedestinationmongodb.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationmongodb.go
@@ -10,8 +10,39 @@ type DeleteDestinationMongodbRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationMongodbRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationMongodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationMongodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationMongodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationMongodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationmssql.go b/internal/sdk/pkg/models/operations/deletedestinationmssql.go
old mode 100755
new mode 100644
index 95561a477..228b03b7f
--- a/internal/sdk/pkg/models/operations/deletedestinationmssql.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationmssql.go
@@ -10,8 +10,39 @@ type DeleteDestinationMssqlRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationMssqlRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationMssqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationMssqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationMssqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationMssqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationmysql.go b/internal/sdk/pkg/models/operations/deletedestinationmysql.go
old mode 100755
new mode 100644
index 99967db3a..ec3258ca6
--- a/internal/sdk/pkg/models/operations/deletedestinationmysql.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationmysql.go
@@ -10,8 +10,39 @@ type DeleteDestinationMysqlRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationMysqlRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationMysqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationMysqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationMysqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationMysqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationoracle.go b/internal/sdk/pkg/models/operations/deletedestinationoracle.go
old mode 100755
new mode 100644
index c700b73b5..7bc7962a0
--- a/internal/sdk/pkg/models/operations/deletedestinationoracle.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationoracle.go
@@ -10,8 +10,39 @@ type DeleteDestinationOracleRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationOracleRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationOracleResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationOracleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationOracleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationOracleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationpinecone.go b/internal/sdk/pkg/models/operations/deletedestinationpinecone.go
old mode 100755
new mode 100644
index 936ad92bf..bf278aa91
--- a/internal/sdk/pkg/models/operations/deletedestinationpinecone.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationpinecone.go
@@ -10,8 +10,39 @@ type DeleteDestinationPineconeRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationPineconeRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationPineconeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationPineconeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationPineconeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationPineconeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationpostgres.go b/internal/sdk/pkg/models/operations/deletedestinationpostgres.go
old mode 100755
new mode 100644
index 826a4d995..4646feecc
--- a/internal/sdk/pkg/models/operations/deletedestinationpostgres.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationpostgres.go
@@ -10,8 +10,39 @@ type DeleteDestinationPostgresRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationPostgresRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationPostgresResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationPostgresResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationPostgresResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationPostgresResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationpubsub.go b/internal/sdk/pkg/models/operations/deletedestinationpubsub.go
old mode 100755
new mode 100644
index 8b5df32d6..83b1188fb
--- a/internal/sdk/pkg/models/operations/deletedestinationpubsub.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationpubsub.go
@@ -10,8 +10,39 @@ type DeleteDestinationPubsubRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationPubsubRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationPubsubResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationPubsubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationPubsubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationPubsubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationqdrant.go b/internal/sdk/pkg/models/operations/deletedestinationqdrant.go
new file mode 100644
index 000000000..c45ac5cfe
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/deletedestinationqdrant.go
@@ -0,0 +1,48 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "net/http"
+)
+
+type DeleteDestinationQdrantRequest struct {
+ DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
+}
+
+func (o *DeleteDestinationQdrantRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+type DeleteDestinationQdrantResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *DeleteDestinationQdrantResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationQdrantResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationQdrantResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationredis.go b/internal/sdk/pkg/models/operations/deletedestinationredis.go
old mode 100755
new mode 100644
index 688c9e46e..3c4326fe3
--- a/internal/sdk/pkg/models/operations/deletedestinationredis.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationredis.go
@@ -10,8 +10,39 @@ type DeleteDestinationRedisRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationRedisRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationRedisResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationRedisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationRedisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationRedisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationredshift.go b/internal/sdk/pkg/models/operations/deletedestinationredshift.go
old mode 100755
new mode 100644
index 7623d37f5..6ade712f0
--- a/internal/sdk/pkg/models/operations/deletedestinationredshift.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationredshift.go
@@ -10,8 +10,39 @@ type DeleteDestinationRedshiftRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationRedshiftRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationRedshiftResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationRedshiftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationRedshiftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationRedshiftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinations3.go b/internal/sdk/pkg/models/operations/deletedestinations3.go
old mode 100755
new mode 100644
index 2f38766a1..442a4aa25
--- a/internal/sdk/pkg/models/operations/deletedestinations3.go
+++ b/internal/sdk/pkg/models/operations/deletedestinations3.go
@@ -10,8 +10,39 @@ type DeleteDestinationS3Request struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationS3Request) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationS3Response struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationS3Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationS3Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationS3Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinations3glue.go b/internal/sdk/pkg/models/operations/deletedestinations3glue.go
old mode 100755
new mode 100644
index 925baced6..2471d899e
--- a/internal/sdk/pkg/models/operations/deletedestinations3glue.go
+++ b/internal/sdk/pkg/models/operations/deletedestinations3glue.go
@@ -10,8 +10,39 @@ type DeleteDestinationS3GlueRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationS3GlueRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationS3GlueResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationS3GlueResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationS3GlueResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationS3GlueResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationsftpjson.go b/internal/sdk/pkg/models/operations/deletedestinationsftpjson.go
old mode 100755
new mode 100644
index 992dbb134..37c241897
--- a/internal/sdk/pkg/models/operations/deletedestinationsftpjson.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationsftpjson.go
@@ -10,8 +10,39 @@ type DeleteDestinationSftpJSONRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationSftpJSONRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationSftpJSONResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationSftpJSONResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationSftpJSONResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationSftpJSONResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationsnowflake.go b/internal/sdk/pkg/models/operations/deletedestinationsnowflake.go
old mode 100755
new mode 100644
index 2dd48f1f5..d25149e29
--- a/internal/sdk/pkg/models/operations/deletedestinationsnowflake.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationsnowflake.go
@@ -10,8 +10,39 @@ type DeleteDestinationSnowflakeRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationSnowflakeRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationSnowflakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationSnowflakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationSnowflakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationSnowflakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationtimeplus.go b/internal/sdk/pkg/models/operations/deletedestinationtimeplus.go
old mode 100755
new mode 100644
index 0d0306fce..fb85847c4
--- a/internal/sdk/pkg/models/operations/deletedestinationtimeplus.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationtimeplus.go
@@ -10,8 +10,39 @@ type DeleteDestinationTimeplusRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationTimeplusRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationTimeplusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationTimeplusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationTimeplusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationTimeplusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationtypesense.go b/internal/sdk/pkg/models/operations/deletedestinationtypesense.go
old mode 100755
new mode 100644
index e9cc4cecf..4452e7c12
--- a/internal/sdk/pkg/models/operations/deletedestinationtypesense.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationtypesense.go
@@ -10,8 +10,39 @@ type DeleteDestinationTypesenseRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationTypesenseRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationTypesenseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationTypesenseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationTypesenseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationTypesenseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationvertica.go b/internal/sdk/pkg/models/operations/deletedestinationvertica.go
old mode 100755
new mode 100644
index 72b429e4f..d86b78aa5
--- a/internal/sdk/pkg/models/operations/deletedestinationvertica.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationvertica.go
@@ -10,8 +10,39 @@ type DeleteDestinationVerticaRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationVerticaRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationVerticaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationVerticaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationVerticaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationVerticaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationweaviate.go b/internal/sdk/pkg/models/operations/deletedestinationweaviate.go
new file mode 100644
index 000000000..381c2d999
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/deletedestinationweaviate.go
@@ -0,0 +1,48 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "net/http"
+)
+
+type DeleteDestinationWeaviateRequest struct {
+ DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
+}
+
+func (o *DeleteDestinationWeaviateRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+type DeleteDestinationWeaviateResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *DeleteDestinationWeaviateResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationWeaviateResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationWeaviateResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletedestinationxata.go b/internal/sdk/pkg/models/operations/deletedestinationxata.go
old mode 100755
new mode 100644
index d13716419..b37bf4e40
--- a/internal/sdk/pkg/models/operations/deletedestinationxata.go
+++ b/internal/sdk/pkg/models/operations/deletedestinationxata.go
@@ -10,8 +10,39 @@ type DeleteDestinationXataRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *DeleteDestinationXataRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type DeleteDestinationXataResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteDestinationXataResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteDestinationXataResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteDestinationXataResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesource.go b/internal/sdk/pkg/models/operations/deletesource.go
old mode 100755
new mode 100644
index 7e0f83559..99ca6b12d
--- a/internal/sdk/pkg/models/operations/deletesource.go
+++ b/internal/sdk/pkg/models/operations/deletesource.go
@@ -10,8 +10,39 @@ type DeleteSourceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceaha.go b/internal/sdk/pkg/models/operations/deletesourceaha.go
old mode 100755
new mode 100644
index 6a35ed9d6..33bb743c7
--- a/internal/sdk/pkg/models/operations/deletesourceaha.go
+++ b/internal/sdk/pkg/models/operations/deletesourceaha.go
@@ -10,8 +10,39 @@ type DeleteSourceAhaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAhaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAhaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAhaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAhaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAhaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceaircall.go b/internal/sdk/pkg/models/operations/deletesourceaircall.go
old mode 100755
new mode 100644
index 1dff28f4b..116a48e90
--- a/internal/sdk/pkg/models/operations/deletesourceaircall.go
+++ b/internal/sdk/pkg/models/operations/deletesourceaircall.go
@@ -10,8 +10,39 @@ type DeleteSourceAircallRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAircallRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAircallResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAircallResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAircallResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAircallResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceairtable.go b/internal/sdk/pkg/models/operations/deletesourceairtable.go
old mode 100755
new mode 100644
index f77e5d286..3083114c9
--- a/internal/sdk/pkg/models/operations/deletesourceairtable.go
+++ b/internal/sdk/pkg/models/operations/deletesourceairtable.go
@@ -10,8 +10,39 @@ type DeleteSourceAirtableRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAirtableRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAirtableResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAirtableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAirtableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAirtableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcealloydb.go b/internal/sdk/pkg/models/operations/deletesourcealloydb.go
old mode 100755
new mode 100644
index ee0834c24..922ef0fff
--- a/internal/sdk/pkg/models/operations/deletesourcealloydb.go
+++ b/internal/sdk/pkg/models/operations/deletesourcealloydb.go
@@ -10,8 +10,39 @@ type DeleteSourceAlloydbRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAlloydbRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAlloydbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAlloydbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAlloydbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAlloydbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceamazonads.go b/internal/sdk/pkg/models/operations/deletesourceamazonads.go
old mode 100755
new mode 100644
index 84dfbed68..bf2ec7d6d
--- a/internal/sdk/pkg/models/operations/deletesourceamazonads.go
+++ b/internal/sdk/pkg/models/operations/deletesourceamazonads.go
@@ -10,8 +10,39 @@ type DeleteSourceAmazonAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAmazonAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAmazonAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAmazonAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAmazonAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAmazonAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceamazonsellerpartner.go b/internal/sdk/pkg/models/operations/deletesourceamazonsellerpartner.go
old mode 100755
new mode 100644
index 0013158b2..446aa1532
--- a/internal/sdk/pkg/models/operations/deletesourceamazonsellerpartner.go
+++ b/internal/sdk/pkg/models/operations/deletesourceamazonsellerpartner.go
@@ -10,8 +10,39 @@ type DeleteSourceAmazonSellerPartnerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAmazonSellerPartnerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAmazonSellerPartnerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAmazonSellerPartnerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAmazonSellerPartnerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAmazonSellerPartnerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceamazonsqs.go b/internal/sdk/pkg/models/operations/deletesourceamazonsqs.go
old mode 100755
new mode 100644
index dd2bd3fc2..4e2cd7b67
--- a/internal/sdk/pkg/models/operations/deletesourceamazonsqs.go
+++ b/internal/sdk/pkg/models/operations/deletesourceamazonsqs.go
@@ -10,8 +10,39 @@ type DeleteSourceAmazonSqsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAmazonSqsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAmazonSqsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAmazonSqsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAmazonSqsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAmazonSqsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceamplitude.go b/internal/sdk/pkg/models/operations/deletesourceamplitude.go
old mode 100755
new mode 100644
index d8a912dca..4f32b4033
--- a/internal/sdk/pkg/models/operations/deletesourceamplitude.go
+++ b/internal/sdk/pkg/models/operations/deletesourceamplitude.go
@@ -10,8 +10,39 @@ type DeleteSourceAmplitudeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAmplitudeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAmplitudeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAmplitudeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAmplitudeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAmplitudeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceapifydataset.go b/internal/sdk/pkg/models/operations/deletesourceapifydataset.go
old mode 100755
new mode 100644
index 57cc44017..e39e07db5
--- a/internal/sdk/pkg/models/operations/deletesourceapifydataset.go
+++ b/internal/sdk/pkg/models/operations/deletesourceapifydataset.go
@@ -10,8 +10,39 @@ type DeleteSourceApifyDatasetRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceApifyDatasetRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceApifyDatasetResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceApifyDatasetResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceApifyDatasetResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceApifyDatasetResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceappfollow.go b/internal/sdk/pkg/models/operations/deletesourceappfollow.go
old mode 100755
new mode 100644
index 31ef38c3d..48fbb1a71
--- a/internal/sdk/pkg/models/operations/deletesourceappfollow.go
+++ b/internal/sdk/pkg/models/operations/deletesourceappfollow.go
@@ -10,8 +10,39 @@ type DeleteSourceAppfollowRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAppfollowRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAppfollowResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAppfollowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAppfollowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAppfollowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceasana.go b/internal/sdk/pkg/models/operations/deletesourceasana.go
old mode 100755
new mode 100644
index 11e673626..77dd2c244
--- a/internal/sdk/pkg/models/operations/deletesourceasana.go
+++ b/internal/sdk/pkg/models/operations/deletesourceasana.go
@@ -10,8 +10,39 @@ type DeleteSourceAsanaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAsanaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAsanaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAsanaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAsanaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAsanaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceauth0.go b/internal/sdk/pkg/models/operations/deletesourceauth0.go
old mode 100755
new mode 100644
index 00e56f4dd..c305dc3ee
--- a/internal/sdk/pkg/models/operations/deletesourceauth0.go
+++ b/internal/sdk/pkg/models/operations/deletesourceauth0.go
@@ -10,8 +10,39 @@ type DeleteSourceAuth0Request struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAuth0Request) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAuth0Response struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAuth0Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAuth0Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAuth0Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceawscloudtrail.go b/internal/sdk/pkg/models/operations/deletesourceawscloudtrail.go
old mode 100755
new mode 100644
index e4ba97fca..ba733bc0d
--- a/internal/sdk/pkg/models/operations/deletesourceawscloudtrail.go
+++ b/internal/sdk/pkg/models/operations/deletesourceawscloudtrail.go
@@ -10,8 +10,39 @@ type DeleteSourceAwsCloudtrailRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAwsCloudtrailRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAwsCloudtrailResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAwsCloudtrailResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAwsCloudtrailResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAwsCloudtrailResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceazureblobstorage.go b/internal/sdk/pkg/models/operations/deletesourceazureblobstorage.go
old mode 100755
new mode 100644
index 43d6b6fbb..a3a6500d7
--- a/internal/sdk/pkg/models/operations/deletesourceazureblobstorage.go
+++ b/internal/sdk/pkg/models/operations/deletesourceazureblobstorage.go
@@ -10,8 +10,39 @@ type DeleteSourceAzureBlobStorageRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAzureBlobStorageRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAzureBlobStorageResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAzureBlobStorageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAzureBlobStorageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAzureBlobStorageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceazuretable.go b/internal/sdk/pkg/models/operations/deletesourceazuretable.go
old mode 100755
new mode 100644
index 9725e5dbc..f68e767ab
--- a/internal/sdk/pkg/models/operations/deletesourceazuretable.go
+++ b/internal/sdk/pkg/models/operations/deletesourceazuretable.go
@@ -10,8 +10,39 @@ type DeleteSourceAzureTableRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceAzureTableRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceAzureTableResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceAzureTableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceAzureTableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceAzureTableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcebamboohr.go b/internal/sdk/pkg/models/operations/deletesourcebamboohr.go
old mode 100755
new mode 100644
index cd33da082..2bf30695d
--- a/internal/sdk/pkg/models/operations/deletesourcebamboohr.go
+++ b/internal/sdk/pkg/models/operations/deletesourcebamboohr.go
@@ -10,8 +10,39 @@ type DeleteSourceBambooHrRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceBambooHrRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceBambooHrResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceBambooHrResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceBambooHrResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceBambooHrResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcebigcommerce.go b/internal/sdk/pkg/models/operations/deletesourcebigcommerce.go
deleted file mode 100755
index 6ecf4bcbb..000000000
--- a/internal/sdk/pkg/models/operations/deletesourcebigcommerce.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "net/http"
-)
-
-type DeleteSourceBigcommerceRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type DeleteSourceBigcommerceResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/deletesourcebigquery.go b/internal/sdk/pkg/models/operations/deletesourcebigquery.go
old mode 100755
new mode 100644
index b8bd25ffb..d69bac03b
--- a/internal/sdk/pkg/models/operations/deletesourcebigquery.go
+++ b/internal/sdk/pkg/models/operations/deletesourcebigquery.go
@@ -10,8 +10,39 @@ type DeleteSourceBigqueryRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceBigqueryRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceBigqueryResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceBigqueryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceBigqueryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceBigqueryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcebingads.go b/internal/sdk/pkg/models/operations/deletesourcebingads.go
old mode 100755
new mode 100644
index f015db0f1..148723aaa
--- a/internal/sdk/pkg/models/operations/deletesourcebingads.go
+++ b/internal/sdk/pkg/models/operations/deletesourcebingads.go
@@ -10,8 +10,39 @@ type DeleteSourceBingAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceBingAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceBingAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceBingAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceBingAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceBingAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcebraintree.go b/internal/sdk/pkg/models/operations/deletesourcebraintree.go
old mode 100755
new mode 100644
index 34d84419b..9674173a9
--- a/internal/sdk/pkg/models/operations/deletesourcebraintree.go
+++ b/internal/sdk/pkg/models/operations/deletesourcebraintree.go
@@ -10,8 +10,39 @@ type DeleteSourceBraintreeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceBraintreeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceBraintreeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceBraintreeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceBraintreeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceBraintreeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcebraze.go b/internal/sdk/pkg/models/operations/deletesourcebraze.go
old mode 100755
new mode 100644
index 85bec60f3..485007190
--- a/internal/sdk/pkg/models/operations/deletesourcebraze.go
+++ b/internal/sdk/pkg/models/operations/deletesourcebraze.go
@@ -10,8 +10,39 @@ type DeleteSourceBrazeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceBrazeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceBrazeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceBrazeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceBrazeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceBrazeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcecart.go b/internal/sdk/pkg/models/operations/deletesourcecart.go
new file mode 100644
index 000000000..a1068c932
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/deletesourcecart.go
@@ -0,0 +1,48 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "net/http"
+)
+
+type DeleteSourceCartRequest struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *DeleteSourceCartRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type DeleteSourceCartResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *DeleteSourceCartResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceCartResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceCartResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcechargebee.go b/internal/sdk/pkg/models/operations/deletesourcechargebee.go
old mode 100755
new mode 100644
index d2cd70fb4..9c864550d
--- a/internal/sdk/pkg/models/operations/deletesourcechargebee.go
+++ b/internal/sdk/pkg/models/operations/deletesourcechargebee.go
@@ -10,8 +10,39 @@ type DeleteSourceChargebeeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceChargebeeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceChargebeeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceChargebeeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceChargebeeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceChargebeeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcechartmogul.go b/internal/sdk/pkg/models/operations/deletesourcechartmogul.go
old mode 100755
new mode 100644
index a67326e7b..f74f5ad11
--- a/internal/sdk/pkg/models/operations/deletesourcechartmogul.go
+++ b/internal/sdk/pkg/models/operations/deletesourcechartmogul.go
@@ -10,8 +10,39 @@ type DeleteSourceChartmogulRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceChartmogulRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceChartmogulResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceChartmogulResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceChartmogulResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceChartmogulResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceclickhouse.go b/internal/sdk/pkg/models/operations/deletesourceclickhouse.go
old mode 100755
new mode 100644
index e6c45d5bf..2d27aef74
--- a/internal/sdk/pkg/models/operations/deletesourceclickhouse.go
+++ b/internal/sdk/pkg/models/operations/deletesourceclickhouse.go
@@ -10,8 +10,39 @@ type DeleteSourceClickhouseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceClickhouseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceClickhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceClickhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceClickhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceClickhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceclickupapi.go b/internal/sdk/pkg/models/operations/deletesourceclickupapi.go
old mode 100755
new mode 100644
index ec07abfd6..f4760abf6
--- a/internal/sdk/pkg/models/operations/deletesourceclickupapi.go
+++ b/internal/sdk/pkg/models/operations/deletesourceclickupapi.go
@@ -10,8 +10,39 @@ type DeleteSourceClickupAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceClickupAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceClickupAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceClickupAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceClickupAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceClickupAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceclockify.go b/internal/sdk/pkg/models/operations/deletesourceclockify.go
old mode 100755
new mode 100644
index 62ec37dd5..1ef7e920f
--- a/internal/sdk/pkg/models/operations/deletesourceclockify.go
+++ b/internal/sdk/pkg/models/operations/deletesourceclockify.go
@@ -10,8 +10,39 @@ type DeleteSourceClockifyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceClockifyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceClockifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceClockifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceClockifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceClockifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceclosecom.go b/internal/sdk/pkg/models/operations/deletesourceclosecom.go
old mode 100755
new mode 100644
index 58dfefee7..a4c15c5ed
--- a/internal/sdk/pkg/models/operations/deletesourceclosecom.go
+++ b/internal/sdk/pkg/models/operations/deletesourceclosecom.go
@@ -10,8 +10,39 @@ type DeleteSourceCloseComRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceCloseComRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceCloseComResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceCloseComResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceCloseComResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceCloseComResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcecoda.go b/internal/sdk/pkg/models/operations/deletesourcecoda.go
old mode 100755
new mode 100644
index 485a63527..b32f5f5f4
--- a/internal/sdk/pkg/models/operations/deletesourcecoda.go
+++ b/internal/sdk/pkg/models/operations/deletesourcecoda.go
@@ -10,8 +10,39 @@ type DeleteSourceCodaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceCodaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceCodaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceCodaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceCodaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceCodaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcecoinapi.go b/internal/sdk/pkg/models/operations/deletesourcecoinapi.go
old mode 100755
new mode 100644
index f49d15d52..9ed175d3c
--- a/internal/sdk/pkg/models/operations/deletesourcecoinapi.go
+++ b/internal/sdk/pkg/models/operations/deletesourcecoinapi.go
@@ -10,8 +10,39 @@ type DeleteSourceCoinAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceCoinAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceCoinAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceCoinAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceCoinAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceCoinAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcecoinmarketcap.go b/internal/sdk/pkg/models/operations/deletesourcecoinmarketcap.go
old mode 100755
new mode 100644
index e3bf70a53..dc16f24c7
--- a/internal/sdk/pkg/models/operations/deletesourcecoinmarketcap.go
+++ b/internal/sdk/pkg/models/operations/deletesourcecoinmarketcap.go
@@ -10,8 +10,39 @@ type DeleteSourceCoinmarketcapRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceCoinmarketcapRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceCoinmarketcapResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceCoinmarketcapResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceCoinmarketcapResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceCoinmarketcapResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceconfigcat.go b/internal/sdk/pkg/models/operations/deletesourceconfigcat.go
old mode 100755
new mode 100644
index 815505e1c..7064b666d
--- a/internal/sdk/pkg/models/operations/deletesourceconfigcat.go
+++ b/internal/sdk/pkg/models/operations/deletesourceconfigcat.go
@@ -10,8 +10,39 @@ type DeleteSourceConfigcatRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceConfigcatRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceConfigcatResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceConfigcatResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceConfigcatResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceConfigcatResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceconfluence.go b/internal/sdk/pkg/models/operations/deletesourceconfluence.go
old mode 100755
new mode 100644
index 4f4023b78..1a6e1e251
--- a/internal/sdk/pkg/models/operations/deletesourceconfluence.go
+++ b/internal/sdk/pkg/models/operations/deletesourceconfluence.go
@@ -10,8 +10,39 @@ type DeleteSourceConfluenceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceConfluenceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceConfluenceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceConfluenceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceConfluenceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceConfluenceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceconvex.go b/internal/sdk/pkg/models/operations/deletesourceconvex.go
old mode 100755
new mode 100644
index d768d6010..ed5594e83
--- a/internal/sdk/pkg/models/operations/deletesourceconvex.go
+++ b/internal/sdk/pkg/models/operations/deletesourceconvex.go
@@ -10,8 +10,39 @@ type DeleteSourceConvexRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceConvexRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceConvexResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceConvexResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceConvexResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceConvexResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcedatascope.go b/internal/sdk/pkg/models/operations/deletesourcedatascope.go
old mode 100755
new mode 100644
index 19b7b8c13..84bb7812a
--- a/internal/sdk/pkg/models/operations/deletesourcedatascope.go
+++ b/internal/sdk/pkg/models/operations/deletesourcedatascope.go
@@ -10,8 +10,39 @@ type DeleteSourceDatascopeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceDatascopeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceDatascopeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceDatascopeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceDatascopeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceDatascopeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcedelighted.go b/internal/sdk/pkg/models/operations/deletesourcedelighted.go
old mode 100755
new mode 100644
index 8e0873ccd..126fb47d9
--- a/internal/sdk/pkg/models/operations/deletesourcedelighted.go
+++ b/internal/sdk/pkg/models/operations/deletesourcedelighted.go
@@ -10,8 +10,39 @@ type DeleteSourceDelightedRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceDelightedRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceDelightedResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceDelightedResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceDelightedResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceDelightedResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcedixa.go b/internal/sdk/pkg/models/operations/deletesourcedixa.go
old mode 100755
new mode 100644
index 81d9f43cf..53acd9251
--- a/internal/sdk/pkg/models/operations/deletesourcedixa.go
+++ b/internal/sdk/pkg/models/operations/deletesourcedixa.go
@@ -10,8 +10,39 @@ type DeleteSourceDixaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceDixaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceDixaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceDixaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceDixaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceDixaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcedockerhub.go b/internal/sdk/pkg/models/operations/deletesourcedockerhub.go
old mode 100755
new mode 100644
index 8425f3091..7b12f566d
--- a/internal/sdk/pkg/models/operations/deletesourcedockerhub.go
+++ b/internal/sdk/pkg/models/operations/deletesourcedockerhub.go
@@ -10,8 +10,39 @@ type DeleteSourceDockerhubRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceDockerhubRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceDockerhubResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceDockerhubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceDockerhubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceDockerhubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcedremio.go b/internal/sdk/pkg/models/operations/deletesourcedremio.go
old mode 100755
new mode 100644
index 2fb83b099..abd1f6571
--- a/internal/sdk/pkg/models/operations/deletesourcedremio.go
+++ b/internal/sdk/pkg/models/operations/deletesourcedremio.go
@@ -10,8 +10,39 @@ type DeleteSourceDremioRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceDremioRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceDremioResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceDremioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceDremioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceDremioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcedynamodb.go b/internal/sdk/pkg/models/operations/deletesourcedynamodb.go
old mode 100755
new mode 100644
index ec8461759..64359cb5f
--- a/internal/sdk/pkg/models/operations/deletesourcedynamodb.go
+++ b/internal/sdk/pkg/models/operations/deletesourcedynamodb.go
@@ -10,8 +10,39 @@ type DeleteSourceDynamodbRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceDynamodbRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceDynamodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceDynamodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceDynamodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceDynamodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcee2etestcloud.go b/internal/sdk/pkg/models/operations/deletesourcee2etestcloud.go
deleted file mode 100755
index f1a6a363c..000000000
--- a/internal/sdk/pkg/models/operations/deletesourcee2etestcloud.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "net/http"
-)
-
-type DeleteSourceE2eTestCloudRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type DeleteSourceE2eTestCloudResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/deletesourceemailoctopus.go b/internal/sdk/pkg/models/operations/deletesourceemailoctopus.go
old mode 100755
new mode 100644
index 93014e854..d2589b37c
--- a/internal/sdk/pkg/models/operations/deletesourceemailoctopus.go
+++ b/internal/sdk/pkg/models/operations/deletesourceemailoctopus.go
@@ -10,8 +10,39 @@ type DeleteSourceEmailoctopusRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceEmailoctopusRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceEmailoctopusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceEmailoctopusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceEmailoctopusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceEmailoctopusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceexchangerates.go b/internal/sdk/pkg/models/operations/deletesourceexchangerates.go
old mode 100755
new mode 100644
index aa9697669..7c84aa9fe
--- a/internal/sdk/pkg/models/operations/deletesourceexchangerates.go
+++ b/internal/sdk/pkg/models/operations/deletesourceexchangerates.go
@@ -10,8 +10,39 @@ type DeleteSourceExchangeRatesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceExchangeRatesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceExchangeRatesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceExchangeRatesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceExchangeRatesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceExchangeRatesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefacebookmarketing.go b/internal/sdk/pkg/models/operations/deletesourcefacebookmarketing.go
old mode 100755
new mode 100644
index 07a38e91e..d29c85583
--- a/internal/sdk/pkg/models/operations/deletesourcefacebookmarketing.go
+++ b/internal/sdk/pkg/models/operations/deletesourcefacebookmarketing.go
@@ -10,8 +10,39 @@ type DeleteSourceFacebookMarketingRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceFacebookMarketingRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceFacebookMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceFacebookMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceFacebookMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceFacebookMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefacebookpages.go b/internal/sdk/pkg/models/operations/deletesourcefacebookpages.go
old mode 100755
new mode 100644
index e38a9bdcc..f1cc71277
--- a/internal/sdk/pkg/models/operations/deletesourcefacebookpages.go
+++ b/internal/sdk/pkg/models/operations/deletesourcefacebookpages.go
@@ -10,8 +10,39 @@ type DeleteSourceFacebookPagesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceFacebookPagesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceFacebookPagesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceFacebookPagesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceFacebookPagesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceFacebookPagesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefaker.go b/internal/sdk/pkg/models/operations/deletesourcefaker.go
old mode 100755
new mode 100644
index b401c3862..c9512100a
--- a/internal/sdk/pkg/models/operations/deletesourcefaker.go
+++ b/internal/sdk/pkg/models/operations/deletesourcefaker.go
@@ -10,8 +10,39 @@ type DeleteSourceFakerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceFakerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceFakerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceFakerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceFakerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceFakerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefauna.go b/internal/sdk/pkg/models/operations/deletesourcefauna.go
old mode 100755
new mode 100644
index 4387c3aca..6386cf515
--- a/internal/sdk/pkg/models/operations/deletesourcefauna.go
+++ b/internal/sdk/pkg/models/operations/deletesourcefauna.go
@@ -10,8 +10,39 @@ type DeleteSourceFaunaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceFaunaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceFaunaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceFaunaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceFaunaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceFaunaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefile.go b/internal/sdk/pkg/models/operations/deletesourcefile.go
new file mode 100644
index 000000000..1ff48c602
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/deletesourcefile.go
@@ -0,0 +1,48 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "net/http"
+)
+
+type DeleteSourceFileRequest struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *DeleteSourceFileRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type DeleteSourceFileResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *DeleteSourceFileResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceFileResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceFileResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefilesecure.go b/internal/sdk/pkg/models/operations/deletesourcefilesecure.go
deleted file mode 100755
index 487704d6d..000000000
--- a/internal/sdk/pkg/models/operations/deletesourcefilesecure.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "net/http"
-)
-
-type DeleteSourceFileSecureRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type DeleteSourceFileSecureResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefirebolt.go b/internal/sdk/pkg/models/operations/deletesourcefirebolt.go
old mode 100755
new mode 100644
index fe701290e..4ec6ee276
--- a/internal/sdk/pkg/models/operations/deletesourcefirebolt.go
+++ b/internal/sdk/pkg/models/operations/deletesourcefirebolt.go
@@ -10,8 +10,39 @@ type DeleteSourceFireboltRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceFireboltRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceFireboltResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceFireboltResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceFireboltResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceFireboltResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefreshcaller.go b/internal/sdk/pkg/models/operations/deletesourcefreshcaller.go
old mode 100755
new mode 100644
index a69c8bcf7..8af8b8715
--- a/internal/sdk/pkg/models/operations/deletesourcefreshcaller.go
+++ b/internal/sdk/pkg/models/operations/deletesourcefreshcaller.go
@@ -10,8 +10,39 @@ type DeleteSourceFreshcallerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceFreshcallerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceFreshcallerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceFreshcallerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceFreshcallerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceFreshcallerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefreshdesk.go b/internal/sdk/pkg/models/operations/deletesourcefreshdesk.go
old mode 100755
new mode 100644
index c130157a2..ca82a991b
--- a/internal/sdk/pkg/models/operations/deletesourcefreshdesk.go
+++ b/internal/sdk/pkg/models/operations/deletesourcefreshdesk.go
@@ -10,8 +10,39 @@ type DeleteSourceFreshdeskRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceFreshdeskRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceFreshdeskResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceFreshdeskResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceFreshdeskResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceFreshdeskResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcefreshsales.go b/internal/sdk/pkg/models/operations/deletesourcefreshsales.go
old mode 100755
new mode 100644
index 62d1e35fa..bb6bf34a9
--- a/internal/sdk/pkg/models/operations/deletesourcefreshsales.go
+++ b/internal/sdk/pkg/models/operations/deletesourcefreshsales.go
@@ -10,8 +10,39 @@ type DeleteSourceFreshsalesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceFreshsalesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceFreshsalesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceFreshsalesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceFreshsalesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceFreshsalesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegainsightpx.go b/internal/sdk/pkg/models/operations/deletesourcegainsightpx.go
old mode 100755
new mode 100644
index 9aa4b0dc1..026d94d06
--- a/internal/sdk/pkg/models/operations/deletesourcegainsightpx.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegainsightpx.go
@@ -10,8 +10,39 @@ type DeleteSourceGainsightPxRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGainsightPxRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGainsightPxResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGainsightPxResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGainsightPxResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGainsightPxResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegcs.go b/internal/sdk/pkg/models/operations/deletesourcegcs.go
old mode 100755
new mode 100644
index defb2fe76..ada4bc9e3
--- a/internal/sdk/pkg/models/operations/deletesourcegcs.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegcs.go
@@ -10,8 +10,39 @@ type DeleteSourceGcsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGcsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGcsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGcsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGcsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGcsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegetlago.go b/internal/sdk/pkg/models/operations/deletesourcegetlago.go
old mode 100755
new mode 100644
index 99697b6f4..5b54f05d8
--- a/internal/sdk/pkg/models/operations/deletesourcegetlago.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegetlago.go
@@ -10,8 +10,39 @@ type DeleteSourceGetlagoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGetlagoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGetlagoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGetlagoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGetlagoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGetlagoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegithub.go b/internal/sdk/pkg/models/operations/deletesourcegithub.go
old mode 100755
new mode 100644
index 102466350..8e0536e23
--- a/internal/sdk/pkg/models/operations/deletesourcegithub.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegithub.go
@@ -10,8 +10,39 @@ type DeleteSourceGithubRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGithubRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGithubResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGithubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGithubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGithubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegitlab.go b/internal/sdk/pkg/models/operations/deletesourcegitlab.go
old mode 100755
new mode 100644
index 132999eba..0f080dcf8
--- a/internal/sdk/pkg/models/operations/deletesourcegitlab.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegitlab.go
@@ -10,8 +10,39 @@ type DeleteSourceGitlabRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGitlabRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGitlabResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGitlabResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGitlabResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGitlabResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceglassfrog.go b/internal/sdk/pkg/models/operations/deletesourceglassfrog.go
old mode 100755
new mode 100644
index cb8903163..574d12282
--- a/internal/sdk/pkg/models/operations/deletesourceglassfrog.go
+++ b/internal/sdk/pkg/models/operations/deletesourceglassfrog.go
@@ -10,8 +10,39 @@ type DeleteSourceGlassfrogRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGlassfrogRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGlassfrogResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGlassfrogResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGlassfrogResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGlassfrogResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegnews.go b/internal/sdk/pkg/models/operations/deletesourcegnews.go
old mode 100755
new mode 100644
index a4c9a7791..5a9355397
--- a/internal/sdk/pkg/models/operations/deletesourcegnews.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegnews.go
@@ -10,8 +10,39 @@ type DeleteSourceGnewsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGnewsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGnewsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGnewsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGnewsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGnewsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegoogleads.go b/internal/sdk/pkg/models/operations/deletesourcegoogleads.go
old mode 100755
new mode 100644
index 9af3625fd..cf7854fec
--- a/internal/sdk/pkg/models/operations/deletesourcegoogleads.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegoogleads.go
@@ -10,8 +10,39 @@ type DeleteSourceGoogleAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGoogleAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGoogleAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGoogleAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGoogleAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGoogleAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsdataapi.go b/internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsdataapi.go
old mode 100755
new mode 100644
index 13fce300f..09d675eed
--- a/internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsdataapi.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsdataapi.go
@@ -10,8 +10,39 @@ type DeleteSourceGoogleAnalyticsDataAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGoogleAnalyticsDataAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGoogleAnalyticsDataAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGoogleAnalyticsDataAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGoogleAnalyticsDataAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGoogleAnalyticsDataAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsv4.go b/internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsv4.go
deleted file mode 100755
index 333dd81e9..000000000
--- a/internal/sdk/pkg/models/operations/deletesourcegoogleanalyticsv4.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "net/http"
-)
-
-type DeleteSourceGoogleAnalyticsV4Request struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type DeleteSourceGoogleAnalyticsV4Response struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegoogledirectory.go b/internal/sdk/pkg/models/operations/deletesourcegoogledirectory.go
old mode 100755
new mode 100644
index c4cf14daa..bc834b4c1
--- a/internal/sdk/pkg/models/operations/deletesourcegoogledirectory.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegoogledirectory.go
@@ -10,8 +10,39 @@ type DeleteSourceGoogleDirectoryRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGoogleDirectoryRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGoogleDirectoryResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGoogleDirectoryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGoogleDirectoryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGoogleDirectoryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegoogledrive.go b/internal/sdk/pkg/models/operations/deletesourcegoogledrive.go
new file mode 100644
index 000000000..d2e61f619
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/deletesourcegoogledrive.go
@@ -0,0 +1,48 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "net/http"
+)
+
+type DeleteSourceGoogleDriveRequest struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *DeleteSourceGoogleDriveRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type DeleteSourceGoogleDriveResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *DeleteSourceGoogleDriveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGoogleDriveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGoogleDriveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegooglepagespeedinsights.go b/internal/sdk/pkg/models/operations/deletesourcegooglepagespeedinsights.go
old mode 100755
new mode 100644
index af362d8e0..e5754107a
--- a/internal/sdk/pkg/models/operations/deletesourcegooglepagespeedinsights.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegooglepagespeedinsights.go
@@ -10,8 +10,39 @@ type DeleteSourceGooglePagespeedInsightsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGooglePagespeedInsightsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGooglePagespeedInsightsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGooglePagespeedInsightsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGooglePagespeedInsightsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGooglePagespeedInsightsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegooglesearchconsole.go b/internal/sdk/pkg/models/operations/deletesourcegooglesearchconsole.go
old mode 100755
new mode 100644
index d7d704fc4..62a0902ad
--- a/internal/sdk/pkg/models/operations/deletesourcegooglesearchconsole.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegooglesearchconsole.go
@@ -10,8 +10,39 @@ type DeleteSourceGoogleSearchConsoleRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGoogleSearchConsoleRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGoogleSearchConsoleResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGoogleSearchConsoleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGoogleSearchConsoleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGoogleSearchConsoleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegooglesheets.go b/internal/sdk/pkg/models/operations/deletesourcegooglesheets.go
old mode 100755
new mode 100644
index e7ab5f3e8..bbb0d6efd
--- a/internal/sdk/pkg/models/operations/deletesourcegooglesheets.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegooglesheets.go
@@ -10,8 +10,39 @@ type DeleteSourceGoogleSheetsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGoogleSheetsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGoogleSheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGoogleSheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGoogleSheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGoogleSheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegooglewebfonts.go b/internal/sdk/pkg/models/operations/deletesourcegooglewebfonts.go
old mode 100755
new mode 100644
index de17543ea..58adf1ad3
--- a/internal/sdk/pkg/models/operations/deletesourcegooglewebfonts.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegooglewebfonts.go
@@ -10,8 +10,39 @@ type DeleteSourceGoogleWebfontsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGoogleWebfontsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGoogleWebfontsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGoogleWebfontsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGoogleWebfontsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGoogleWebfontsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegoogleworkspaceadminreports.go b/internal/sdk/pkg/models/operations/deletesourcegoogleworkspaceadminreports.go
old mode 100755
new mode 100644
index db728d408..154156206
--- a/internal/sdk/pkg/models/operations/deletesourcegoogleworkspaceadminreports.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegoogleworkspaceadminreports.go
@@ -10,8 +10,39 @@ type DeleteSourceGoogleWorkspaceAdminReportsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGoogleWorkspaceAdminReportsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGoogleWorkspaceAdminReportsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGoogleWorkspaceAdminReportsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGoogleWorkspaceAdminReportsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGoogleWorkspaceAdminReportsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegreenhouse.go b/internal/sdk/pkg/models/operations/deletesourcegreenhouse.go
old mode 100755
new mode 100644
index 64998410a..6df29c5bd
--- a/internal/sdk/pkg/models/operations/deletesourcegreenhouse.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegreenhouse.go
@@ -10,8 +10,39 @@ type DeleteSourceGreenhouseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGreenhouseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGreenhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGreenhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGreenhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGreenhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcegridly.go b/internal/sdk/pkg/models/operations/deletesourcegridly.go
old mode 100755
new mode 100644
index c509083d2..2f9239a5f
--- a/internal/sdk/pkg/models/operations/deletesourcegridly.go
+++ b/internal/sdk/pkg/models/operations/deletesourcegridly.go
@@ -10,8 +10,39 @@ type DeleteSourceGridlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceGridlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceGridlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceGridlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceGridlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceGridlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceharvest.go b/internal/sdk/pkg/models/operations/deletesourceharvest.go
old mode 100755
new mode 100644
index c6093bbe2..62ae1c667
--- a/internal/sdk/pkg/models/operations/deletesourceharvest.go
+++ b/internal/sdk/pkg/models/operations/deletesourceharvest.go
@@ -10,8 +10,39 @@ type DeleteSourceHarvestRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceHarvestRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceHarvestResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceHarvestResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceHarvestResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceHarvestResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcehubplanner.go b/internal/sdk/pkg/models/operations/deletesourcehubplanner.go
old mode 100755
new mode 100644
index e719244a9..6d014c21d
--- a/internal/sdk/pkg/models/operations/deletesourcehubplanner.go
+++ b/internal/sdk/pkg/models/operations/deletesourcehubplanner.go
@@ -10,8 +10,39 @@ type DeleteSourceHubplannerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceHubplannerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceHubplannerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceHubplannerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceHubplannerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceHubplannerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcehubspot.go b/internal/sdk/pkg/models/operations/deletesourcehubspot.go
old mode 100755
new mode 100644
index 6167da7b6..6225af7b9
--- a/internal/sdk/pkg/models/operations/deletesourcehubspot.go
+++ b/internal/sdk/pkg/models/operations/deletesourcehubspot.go
@@ -10,8 +10,39 @@ type DeleteSourceHubspotRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceHubspotRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceHubspotResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceHubspotResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceHubspotResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceHubspotResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceinsightly.go b/internal/sdk/pkg/models/operations/deletesourceinsightly.go
old mode 100755
new mode 100644
index 7335d6028..8dacd9b76
--- a/internal/sdk/pkg/models/operations/deletesourceinsightly.go
+++ b/internal/sdk/pkg/models/operations/deletesourceinsightly.go
@@ -10,8 +10,39 @@ type DeleteSourceInsightlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceInsightlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceInsightlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceInsightlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceInsightlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceInsightlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceinstagram.go b/internal/sdk/pkg/models/operations/deletesourceinstagram.go
old mode 100755
new mode 100644
index 56eff7877..cb31044fc
--- a/internal/sdk/pkg/models/operations/deletesourceinstagram.go
+++ b/internal/sdk/pkg/models/operations/deletesourceinstagram.go
@@ -10,8 +10,39 @@ type DeleteSourceInstagramRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceInstagramRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceInstagramResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceInstagramResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceInstagramResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceInstagramResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceinstatus.go b/internal/sdk/pkg/models/operations/deletesourceinstatus.go
old mode 100755
new mode 100644
index 4bebb9304..7babc84b5
--- a/internal/sdk/pkg/models/operations/deletesourceinstatus.go
+++ b/internal/sdk/pkg/models/operations/deletesourceinstatus.go
@@ -10,8 +10,39 @@ type DeleteSourceInstatusRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceInstatusRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceInstatusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceInstatusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceInstatusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceInstatusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceintercom.go b/internal/sdk/pkg/models/operations/deletesourceintercom.go
old mode 100755
new mode 100644
index d87fa746c..67a22d99e
--- a/internal/sdk/pkg/models/operations/deletesourceintercom.go
+++ b/internal/sdk/pkg/models/operations/deletesourceintercom.go
@@ -10,8 +10,39 @@ type DeleteSourceIntercomRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceIntercomRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceIntercomResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceIntercomResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceIntercomResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceIntercomResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceip2whois.go b/internal/sdk/pkg/models/operations/deletesourceip2whois.go
old mode 100755
new mode 100644
index eab50fa9b..f5077eadc
--- a/internal/sdk/pkg/models/operations/deletesourceip2whois.go
+++ b/internal/sdk/pkg/models/operations/deletesourceip2whois.go
@@ -10,8 +10,39 @@ type DeleteSourceIp2whoisRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceIp2whoisRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceIp2whoisResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceIp2whoisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceIp2whoisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceIp2whoisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceiterable.go b/internal/sdk/pkg/models/operations/deletesourceiterable.go
old mode 100755
new mode 100644
index ec7cf6b82..9b747aea4
--- a/internal/sdk/pkg/models/operations/deletesourceiterable.go
+++ b/internal/sdk/pkg/models/operations/deletesourceiterable.go
@@ -10,8 +10,39 @@ type DeleteSourceIterableRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceIterableRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceIterableResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceIterableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceIterableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceIterableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcejira.go b/internal/sdk/pkg/models/operations/deletesourcejira.go
old mode 100755
new mode 100644
index c27ca8feb..115bc9979
--- a/internal/sdk/pkg/models/operations/deletesourcejira.go
+++ b/internal/sdk/pkg/models/operations/deletesourcejira.go
@@ -10,8 +10,39 @@ type DeleteSourceJiraRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceJiraRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceJiraResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceJiraResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceJiraResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceJiraResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcek6cloud.go b/internal/sdk/pkg/models/operations/deletesourcek6cloud.go
old mode 100755
new mode 100644
index 421cedd55..dbe3d709e
--- a/internal/sdk/pkg/models/operations/deletesourcek6cloud.go
+++ b/internal/sdk/pkg/models/operations/deletesourcek6cloud.go
@@ -10,8 +10,39 @@ type DeleteSourceK6CloudRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceK6CloudRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceK6CloudResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceK6CloudResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceK6CloudResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceK6CloudResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceklarna.go b/internal/sdk/pkg/models/operations/deletesourceklarna.go
old mode 100755
new mode 100644
index 6a630ce6d..95d0bb639
--- a/internal/sdk/pkg/models/operations/deletesourceklarna.go
+++ b/internal/sdk/pkg/models/operations/deletesourceklarna.go
@@ -10,8 +10,39 @@ type DeleteSourceKlarnaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceKlarnaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceKlarnaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceKlarnaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceKlarnaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceKlarnaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceklaviyo.go b/internal/sdk/pkg/models/operations/deletesourceklaviyo.go
old mode 100755
new mode 100644
index c222578f6..d484e7cf4
--- a/internal/sdk/pkg/models/operations/deletesourceklaviyo.go
+++ b/internal/sdk/pkg/models/operations/deletesourceklaviyo.go
@@ -10,8 +10,39 @@ type DeleteSourceKlaviyoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceKlaviyoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceKlaviyoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceKlaviyoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceKlaviyoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceKlaviyoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcekustomersinger.go b/internal/sdk/pkg/models/operations/deletesourcekustomersinger.go
old mode 100755
new mode 100644
index 2db1c82de..561f03f2f
--- a/internal/sdk/pkg/models/operations/deletesourcekustomersinger.go
+++ b/internal/sdk/pkg/models/operations/deletesourcekustomersinger.go
@@ -10,8 +10,39 @@ type DeleteSourceKustomerSingerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceKustomerSingerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceKustomerSingerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceKustomerSingerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceKustomerSingerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceKustomerSingerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcekyve.go b/internal/sdk/pkg/models/operations/deletesourcekyve.go
old mode 100755
new mode 100644
index b69917768..861c7b69c
--- a/internal/sdk/pkg/models/operations/deletesourcekyve.go
+++ b/internal/sdk/pkg/models/operations/deletesourcekyve.go
@@ -10,8 +10,39 @@ type DeleteSourceKyveRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceKyveRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceKyveResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceKyveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceKyveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceKyveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcelaunchdarkly.go b/internal/sdk/pkg/models/operations/deletesourcelaunchdarkly.go
old mode 100755
new mode 100644
index e744b4259..e550a2259
--- a/internal/sdk/pkg/models/operations/deletesourcelaunchdarkly.go
+++ b/internal/sdk/pkg/models/operations/deletesourcelaunchdarkly.go
@@ -10,8 +10,39 @@ type DeleteSourceLaunchdarklyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceLaunchdarklyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceLaunchdarklyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceLaunchdarklyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceLaunchdarklyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceLaunchdarklyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcelemlist.go b/internal/sdk/pkg/models/operations/deletesourcelemlist.go
old mode 100755
new mode 100644
index 4d6c0d6fe..857cab021
--- a/internal/sdk/pkg/models/operations/deletesourcelemlist.go
+++ b/internal/sdk/pkg/models/operations/deletesourcelemlist.go
@@ -10,8 +10,39 @@ type DeleteSourceLemlistRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceLemlistRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceLemlistResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceLemlistResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceLemlistResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceLemlistResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceleverhiring.go b/internal/sdk/pkg/models/operations/deletesourceleverhiring.go
old mode 100755
new mode 100644
index abd47a48d..14862cfaf
--- a/internal/sdk/pkg/models/operations/deletesourceleverhiring.go
+++ b/internal/sdk/pkg/models/operations/deletesourceleverhiring.go
@@ -10,8 +10,39 @@ type DeleteSourceLeverHiringRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceLeverHiringRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceLeverHiringResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceLeverHiringResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceLeverHiringResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceLeverHiringResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcelinkedinads.go b/internal/sdk/pkg/models/operations/deletesourcelinkedinads.go
old mode 100755
new mode 100644
index 9012db8e5..faf4932d5
--- a/internal/sdk/pkg/models/operations/deletesourcelinkedinads.go
+++ b/internal/sdk/pkg/models/operations/deletesourcelinkedinads.go
@@ -10,8 +10,39 @@ type DeleteSourceLinkedinAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceLinkedinAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceLinkedinAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceLinkedinAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceLinkedinAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceLinkedinAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcelinkedinpages.go b/internal/sdk/pkg/models/operations/deletesourcelinkedinpages.go
old mode 100755
new mode 100644
index 890c7129f..de8965434
--- a/internal/sdk/pkg/models/operations/deletesourcelinkedinpages.go
+++ b/internal/sdk/pkg/models/operations/deletesourcelinkedinpages.go
@@ -10,8 +10,39 @@ type DeleteSourceLinkedinPagesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceLinkedinPagesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceLinkedinPagesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceLinkedinPagesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceLinkedinPagesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceLinkedinPagesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcelinnworks.go b/internal/sdk/pkg/models/operations/deletesourcelinnworks.go
old mode 100755
new mode 100644
index 2bb91a679..c31baad94
--- a/internal/sdk/pkg/models/operations/deletesourcelinnworks.go
+++ b/internal/sdk/pkg/models/operations/deletesourcelinnworks.go
@@ -10,8 +10,39 @@ type DeleteSourceLinnworksRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceLinnworksRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceLinnworksResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceLinnworksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceLinnworksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceLinnworksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcelokalise.go b/internal/sdk/pkg/models/operations/deletesourcelokalise.go
old mode 100755
new mode 100644
index 5767d9eea..808ec1185
--- a/internal/sdk/pkg/models/operations/deletesourcelokalise.go
+++ b/internal/sdk/pkg/models/operations/deletesourcelokalise.go
@@ -10,8 +10,39 @@ type DeleteSourceLokaliseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceLokaliseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceLokaliseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceLokaliseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceLokaliseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceLokaliseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemailchimp.go b/internal/sdk/pkg/models/operations/deletesourcemailchimp.go
old mode 100755
new mode 100644
index 11c111590..afe3dc054
--- a/internal/sdk/pkg/models/operations/deletesourcemailchimp.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemailchimp.go
@@ -10,8 +10,39 @@ type DeleteSourceMailchimpRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMailchimpRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMailchimpResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMailchimpResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMailchimpResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMailchimpResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemailgun.go b/internal/sdk/pkg/models/operations/deletesourcemailgun.go
old mode 100755
new mode 100644
index 9fc0c485c..efe058b2e
--- a/internal/sdk/pkg/models/operations/deletesourcemailgun.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemailgun.go
@@ -10,8 +10,39 @@ type DeleteSourceMailgunRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMailgunRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMailgunResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMailgunResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMailgunResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMailgunResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemailjetsms.go b/internal/sdk/pkg/models/operations/deletesourcemailjetsms.go
old mode 100755
new mode 100644
index 0281a023e..8137c8a17
--- a/internal/sdk/pkg/models/operations/deletesourcemailjetsms.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemailjetsms.go
@@ -10,8 +10,39 @@ type DeleteSourceMailjetSmsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMailjetSmsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMailjetSmsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMailjetSmsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMailjetSmsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMailjetSmsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemarketo.go b/internal/sdk/pkg/models/operations/deletesourcemarketo.go
old mode 100755
new mode 100644
index 6b252a9e7..03320db68
--- a/internal/sdk/pkg/models/operations/deletesourcemarketo.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemarketo.go
@@ -10,8 +10,39 @@ type DeleteSourceMarketoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMarketoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMarketoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMarketoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMarketoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMarketoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemetabase.go b/internal/sdk/pkg/models/operations/deletesourcemetabase.go
old mode 100755
new mode 100644
index f5ef2bd2a..b9e171b4a
--- a/internal/sdk/pkg/models/operations/deletesourcemetabase.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemetabase.go
@@ -10,8 +10,39 @@ type DeleteSourceMetabaseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMetabaseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMetabaseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMetabaseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMetabaseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMetabaseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemicrosoftteams.go b/internal/sdk/pkg/models/operations/deletesourcemicrosoftteams.go
old mode 100755
new mode 100644
index cedfc8c99..aa84a92e2
--- a/internal/sdk/pkg/models/operations/deletesourcemicrosoftteams.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemicrosoftteams.go
@@ -10,8 +10,39 @@ type DeleteSourceMicrosoftTeamsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMicrosoftTeamsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMicrosoftTeamsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMicrosoftTeamsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMicrosoftTeamsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMicrosoftTeamsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemixpanel.go b/internal/sdk/pkg/models/operations/deletesourcemixpanel.go
old mode 100755
new mode 100644
index 6a81783d0..fe83b52f4
--- a/internal/sdk/pkg/models/operations/deletesourcemixpanel.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemixpanel.go
@@ -10,8 +10,39 @@ type DeleteSourceMixpanelRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMixpanelRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMixpanelResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMixpanelResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMixpanelResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMixpanelResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemonday.go b/internal/sdk/pkg/models/operations/deletesourcemonday.go
old mode 100755
new mode 100644
index 1eead0d36..bde0caecf
--- a/internal/sdk/pkg/models/operations/deletesourcemonday.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemonday.go
@@ -10,8 +10,39 @@ type DeleteSourceMondayRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMondayRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMondayResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMondayResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMondayResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMondayResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemongodb.go b/internal/sdk/pkg/models/operations/deletesourcemongodb.go
deleted file mode 100755
index 9e77b87ad..000000000
--- a/internal/sdk/pkg/models/operations/deletesourcemongodb.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "net/http"
-)
-
-type DeleteSourceMongodbRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type DeleteSourceMongodbResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemongodbinternalpoc.go b/internal/sdk/pkg/models/operations/deletesourcemongodbinternalpoc.go
old mode 100755
new mode 100644
index db593f146..bbc860d5b
--- a/internal/sdk/pkg/models/operations/deletesourcemongodbinternalpoc.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemongodbinternalpoc.go
@@ -10,8 +10,39 @@ type DeleteSourceMongodbInternalPocRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMongodbInternalPocRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMongodbInternalPocResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMongodbInternalPocResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMongodbInternalPocResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMongodbInternalPocResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemongodbv2.go b/internal/sdk/pkg/models/operations/deletesourcemongodbv2.go
new file mode 100644
index 000000000..87a7d5fef
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/deletesourcemongodbv2.go
@@ -0,0 +1,48 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "net/http"
+)
+
+type DeleteSourceMongodbV2Request struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *DeleteSourceMongodbV2Request) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type DeleteSourceMongodbV2Response struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *DeleteSourceMongodbV2Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMongodbV2Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMongodbV2Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemssql.go b/internal/sdk/pkg/models/operations/deletesourcemssql.go
old mode 100755
new mode 100644
index 57551c2e8..794f633df
--- a/internal/sdk/pkg/models/operations/deletesourcemssql.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemssql.go
@@ -10,8 +10,39 @@ type DeleteSourceMssqlRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMssqlRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMssqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMssqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMssqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMssqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemyhours.go b/internal/sdk/pkg/models/operations/deletesourcemyhours.go
old mode 100755
new mode 100644
index 6d7932ffa..db6aacfe3
--- a/internal/sdk/pkg/models/operations/deletesourcemyhours.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemyhours.go
@@ -10,8 +10,39 @@ type DeleteSourceMyHoursRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMyHoursRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMyHoursResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMyHoursResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMyHoursResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMyHoursResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcemysql.go b/internal/sdk/pkg/models/operations/deletesourcemysql.go
old mode 100755
new mode 100644
index b3d214570..b046ca7c6
--- a/internal/sdk/pkg/models/operations/deletesourcemysql.go
+++ b/internal/sdk/pkg/models/operations/deletesourcemysql.go
@@ -10,8 +10,39 @@ type DeleteSourceMysqlRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceMysqlRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceMysqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceMysqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceMysqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceMysqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcenetsuite.go b/internal/sdk/pkg/models/operations/deletesourcenetsuite.go
old mode 100755
new mode 100644
index bc44a244a..25c4095ba
--- a/internal/sdk/pkg/models/operations/deletesourcenetsuite.go
+++ b/internal/sdk/pkg/models/operations/deletesourcenetsuite.go
@@ -10,8 +10,39 @@ type DeleteSourceNetsuiteRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceNetsuiteRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceNetsuiteResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceNetsuiteResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceNetsuiteResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceNetsuiteResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcenotion.go b/internal/sdk/pkg/models/operations/deletesourcenotion.go
old mode 100755
new mode 100644
index 72b4d161a..ecd2e307b
--- a/internal/sdk/pkg/models/operations/deletesourcenotion.go
+++ b/internal/sdk/pkg/models/operations/deletesourcenotion.go
@@ -10,8 +10,39 @@ type DeleteSourceNotionRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceNotionRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceNotionResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceNotionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceNotionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceNotionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcenytimes.go b/internal/sdk/pkg/models/operations/deletesourcenytimes.go
old mode 100755
new mode 100644
index e4be998be..f96f8e28c
--- a/internal/sdk/pkg/models/operations/deletesourcenytimes.go
+++ b/internal/sdk/pkg/models/operations/deletesourcenytimes.go
@@ -10,8 +10,39 @@ type DeleteSourceNytimesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceNytimesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceNytimesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceNytimesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceNytimesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceNytimesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceokta.go b/internal/sdk/pkg/models/operations/deletesourceokta.go
old mode 100755
new mode 100644
index 19544cb23..6f346767b
--- a/internal/sdk/pkg/models/operations/deletesourceokta.go
+++ b/internal/sdk/pkg/models/operations/deletesourceokta.go
@@ -10,8 +10,39 @@ type DeleteSourceOktaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceOktaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceOktaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceOktaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceOktaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceOktaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceomnisend.go b/internal/sdk/pkg/models/operations/deletesourceomnisend.go
old mode 100755
new mode 100644
index 68dce3686..f0fb31947
--- a/internal/sdk/pkg/models/operations/deletesourceomnisend.go
+++ b/internal/sdk/pkg/models/operations/deletesourceomnisend.go
@@ -10,8 +10,39 @@ type DeleteSourceOmnisendRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceOmnisendRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceOmnisendResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceOmnisendResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceOmnisendResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceOmnisendResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceonesignal.go b/internal/sdk/pkg/models/operations/deletesourceonesignal.go
old mode 100755
new mode 100644
index cdcb8795b..640d75fd1
--- a/internal/sdk/pkg/models/operations/deletesourceonesignal.go
+++ b/internal/sdk/pkg/models/operations/deletesourceonesignal.go
@@ -10,8 +10,39 @@ type DeleteSourceOnesignalRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceOnesignalRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceOnesignalResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceOnesignalResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceOnesignalResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceOnesignalResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceoracle.go b/internal/sdk/pkg/models/operations/deletesourceoracle.go
old mode 100755
new mode 100644
index 98a23bca5..1cc40959b
--- a/internal/sdk/pkg/models/operations/deletesourceoracle.go
+++ b/internal/sdk/pkg/models/operations/deletesourceoracle.go
@@ -10,8 +10,39 @@ type DeleteSourceOracleRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceOracleRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceOracleResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceOracleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceOracleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceOracleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceorb.go b/internal/sdk/pkg/models/operations/deletesourceorb.go
old mode 100755
new mode 100644
index 3ce3df104..62b97e7b5
--- a/internal/sdk/pkg/models/operations/deletesourceorb.go
+++ b/internal/sdk/pkg/models/operations/deletesourceorb.go
@@ -10,8 +10,39 @@ type DeleteSourceOrbRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceOrbRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceOrbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceOrbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceOrbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceOrbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceorbit.go b/internal/sdk/pkg/models/operations/deletesourceorbit.go
old mode 100755
new mode 100644
index b474709f4..0dd0c7de8
--- a/internal/sdk/pkg/models/operations/deletesourceorbit.go
+++ b/internal/sdk/pkg/models/operations/deletesourceorbit.go
@@ -10,8 +10,39 @@ type DeleteSourceOrbitRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceOrbitRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceOrbitResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceOrbitResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceOrbitResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceOrbitResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceoutbrainamplify.go b/internal/sdk/pkg/models/operations/deletesourceoutbrainamplify.go
old mode 100755
new mode 100644
index 01c7aec5d..548feaacf
--- a/internal/sdk/pkg/models/operations/deletesourceoutbrainamplify.go
+++ b/internal/sdk/pkg/models/operations/deletesourceoutbrainamplify.go
@@ -10,8 +10,39 @@ type DeleteSourceOutbrainAmplifyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceOutbrainAmplifyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceOutbrainAmplifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceOutbrainAmplifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceOutbrainAmplifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceOutbrainAmplifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceoutreach.go b/internal/sdk/pkg/models/operations/deletesourceoutreach.go
old mode 100755
new mode 100644
index 51cde2c1d..7810f9334
--- a/internal/sdk/pkg/models/operations/deletesourceoutreach.go
+++ b/internal/sdk/pkg/models/operations/deletesourceoutreach.go
@@ -10,8 +10,39 @@ type DeleteSourceOutreachRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceOutreachRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceOutreachResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceOutreachResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceOutreachResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceOutreachResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepaypaltransaction.go b/internal/sdk/pkg/models/operations/deletesourcepaypaltransaction.go
old mode 100755
new mode 100644
index 9cd54296c..c3a4376f7
--- a/internal/sdk/pkg/models/operations/deletesourcepaypaltransaction.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepaypaltransaction.go
@@ -10,8 +10,39 @@ type DeleteSourcePaypalTransactionRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePaypalTransactionRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePaypalTransactionResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePaypalTransactionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePaypalTransactionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePaypalTransactionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepaystack.go b/internal/sdk/pkg/models/operations/deletesourcepaystack.go
old mode 100755
new mode 100644
index 2ee022aeb..c2eaa9b0d
--- a/internal/sdk/pkg/models/operations/deletesourcepaystack.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepaystack.go
@@ -10,8 +10,39 @@ type DeleteSourcePaystackRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePaystackRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePaystackResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePaystackResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePaystackResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePaystackResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcependo.go b/internal/sdk/pkg/models/operations/deletesourcependo.go
old mode 100755
new mode 100644
index 612b5f3ba..1983b27ac
--- a/internal/sdk/pkg/models/operations/deletesourcependo.go
+++ b/internal/sdk/pkg/models/operations/deletesourcependo.go
@@ -10,8 +10,39 @@ type DeleteSourcePendoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePendoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePendoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePendoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePendoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePendoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepersistiq.go b/internal/sdk/pkg/models/operations/deletesourcepersistiq.go
old mode 100755
new mode 100644
index e1c9d26d8..68b5db747
--- a/internal/sdk/pkg/models/operations/deletesourcepersistiq.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepersistiq.go
@@ -10,8 +10,39 @@ type DeleteSourcePersistiqRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePersistiqRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePersistiqResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePersistiqResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePersistiqResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePersistiqResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepexelsapi.go b/internal/sdk/pkg/models/operations/deletesourcepexelsapi.go
old mode 100755
new mode 100644
index fe1cc5f99..cf505e4d5
--- a/internal/sdk/pkg/models/operations/deletesourcepexelsapi.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepexelsapi.go
@@ -10,8 +10,39 @@ type DeleteSourcePexelsAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePexelsAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePexelsAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePexelsAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePexelsAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePexelsAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepinterest.go b/internal/sdk/pkg/models/operations/deletesourcepinterest.go
old mode 100755
new mode 100644
index f9a6685e7..c513e3fc0
--- a/internal/sdk/pkg/models/operations/deletesourcepinterest.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepinterest.go
@@ -10,8 +10,39 @@ type DeleteSourcePinterestRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePinterestRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePinterestResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePinterestResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePinterestResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePinterestResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepipedrive.go b/internal/sdk/pkg/models/operations/deletesourcepipedrive.go
old mode 100755
new mode 100644
index a40d7c96b..95c3906fd
--- a/internal/sdk/pkg/models/operations/deletesourcepipedrive.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepipedrive.go
@@ -10,8 +10,39 @@ type DeleteSourcePipedriveRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePipedriveRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePipedriveResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePipedriveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePipedriveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePipedriveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepocket.go b/internal/sdk/pkg/models/operations/deletesourcepocket.go
old mode 100755
new mode 100644
index f59f7ff8d..c65754c99
--- a/internal/sdk/pkg/models/operations/deletesourcepocket.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepocket.go
@@ -10,8 +10,39 @@ type DeleteSourcePocketRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePocketRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePocketResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePocketResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePocketResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePocketResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepokeapi.go b/internal/sdk/pkg/models/operations/deletesourcepokeapi.go
old mode 100755
new mode 100644
index d44c6e4b7..220e81c4d
--- a/internal/sdk/pkg/models/operations/deletesourcepokeapi.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepokeapi.go
@@ -10,8 +10,39 @@ type DeleteSourcePokeapiRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePokeapiRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePokeapiResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePokeapiResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePokeapiResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePokeapiResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepolygonstockapi.go b/internal/sdk/pkg/models/operations/deletesourcepolygonstockapi.go
old mode 100755
new mode 100644
index bb96d457b..e2d57fb26
--- a/internal/sdk/pkg/models/operations/deletesourcepolygonstockapi.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepolygonstockapi.go
@@ -10,8 +10,39 @@ type DeleteSourcePolygonStockAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePolygonStockAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePolygonStockAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePolygonStockAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePolygonStockAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePolygonStockAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepostgres.go b/internal/sdk/pkg/models/operations/deletesourcepostgres.go
old mode 100755
new mode 100644
index c4bfbf80d..94cdd3a96
--- a/internal/sdk/pkg/models/operations/deletesourcepostgres.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepostgres.go
@@ -10,8 +10,39 @@ type DeleteSourcePostgresRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePostgresRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePostgresResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePostgresResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePostgresResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePostgresResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceposthog.go b/internal/sdk/pkg/models/operations/deletesourceposthog.go
old mode 100755
new mode 100644
index 846c3c1f5..e13e2f40e
--- a/internal/sdk/pkg/models/operations/deletesourceposthog.go
+++ b/internal/sdk/pkg/models/operations/deletesourceposthog.go
@@ -10,8 +10,39 @@ type DeleteSourcePosthogRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePosthogRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePosthogResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePosthogResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePosthogResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePosthogResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepostmarkapp.go b/internal/sdk/pkg/models/operations/deletesourcepostmarkapp.go
old mode 100755
new mode 100644
index 025d919c3..af325c79e
--- a/internal/sdk/pkg/models/operations/deletesourcepostmarkapp.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepostmarkapp.go
@@ -10,8 +10,39 @@ type DeleteSourcePostmarkappRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePostmarkappRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePostmarkappResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePostmarkappResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePostmarkappResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePostmarkappResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceprestashop.go b/internal/sdk/pkg/models/operations/deletesourceprestashop.go
old mode 100755
new mode 100644
index e53ce6054..6babd76bb
--- a/internal/sdk/pkg/models/operations/deletesourceprestashop.go
+++ b/internal/sdk/pkg/models/operations/deletesourceprestashop.go
@@ -10,8 +10,39 @@ type DeleteSourcePrestashopRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePrestashopRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePrestashopResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePrestashopResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePrestashopResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePrestashopResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepunkapi.go b/internal/sdk/pkg/models/operations/deletesourcepunkapi.go
old mode 100755
new mode 100644
index 116fe5f75..a779cb183
--- a/internal/sdk/pkg/models/operations/deletesourcepunkapi.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepunkapi.go
@@ -10,8 +10,39 @@ type DeleteSourcePunkAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePunkAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePunkAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePunkAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePunkAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePunkAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcepypi.go b/internal/sdk/pkg/models/operations/deletesourcepypi.go
old mode 100755
new mode 100644
index a53c4725a..5c8627bf2
--- a/internal/sdk/pkg/models/operations/deletesourcepypi.go
+++ b/internal/sdk/pkg/models/operations/deletesourcepypi.go
@@ -10,8 +10,39 @@ type DeleteSourcePypiRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourcePypiRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourcePypiResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourcePypiResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourcePypiResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourcePypiResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcequalaroo.go b/internal/sdk/pkg/models/operations/deletesourcequalaroo.go
old mode 100755
new mode 100644
index 5c4c109ce..7cdfdb1a0
--- a/internal/sdk/pkg/models/operations/deletesourcequalaroo.go
+++ b/internal/sdk/pkg/models/operations/deletesourcequalaroo.go
@@ -10,8 +10,39 @@ type DeleteSourceQualarooRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceQualarooRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceQualarooResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceQualarooResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceQualarooResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceQualarooResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcequickbooks.go b/internal/sdk/pkg/models/operations/deletesourcequickbooks.go
old mode 100755
new mode 100644
index 203de3758..49aaed3ac
--- a/internal/sdk/pkg/models/operations/deletesourcequickbooks.go
+++ b/internal/sdk/pkg/models/operations/deletesourcequickbooks.go
@@ -10,8 +10,39 @@ type DeleteSourceQuickbooksRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceQuickbooksRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceQuickbooksResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceQuickbooksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceQuickbooksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceQuickbooksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcerailz.go b/internal/sdk/pkg/models/operations/deletesourcerailz.go
old mode 100755
new mode 100644
index e03d312bf..bf56bd9fb
--- a/internal/sdk/pkg/models/operations/deletesourcerailz.go
+++ b/internal/sdk/pkg/models/operations/deletesourcerailz.go
@@ -10,8 +10,39 @@ type DeleteSourceRailzRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRailzRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceRailzResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceRailzResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceRailzResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceRailzResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcerecharge.go b/internal/sdk/pkg/models/operations/deletesourcerecharge.go
old mode 100755
new mode 100644
index 47cfcdbc5..e806e955a
--- a/internal/sdk/pkg/models/operations/deletesourcerecharge.go
+++ b/internal/sdk/pkg/models/operations/deletesourcerecharge.go
@@ -10,8 +10,39 @@ type DeleteSourceRechargeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRechargeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceRechargeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceRechargeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceRechargeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceRechargeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcerecreation.go b/internal/sdk/pkg/models/operations/deletesourcerecreation.go
old mode 100755
new mode 100644
index 13cd206f4..925741c36
--- a/internal/sdk/pkg/models/operations/deletesourcerecreation.go
+++ b/internal/sdk/pkg/models/operations/deletesourcerecreation.go
@@ -10,8 +10,39 @@ type DeleteSourceRecreationRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRecreationRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceRecreationResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceRecreationResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceRecreationResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceRecreationResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcerecruitee.go b/internal/sdk/pkg/models/operations/deletesourcerecruitee.go
old mode 100755
new mode 100644
index 46edc3a1a..917139092
--- a/internal/sdk/pkg/models/operations/deletesourcerecruitee.go
+++ b/internal/sdk/pkg/models/operations/deletesourcerecruitee.go
@@ -10,8 +10,39 @@ type DeleteSourceRecruiteeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRecruiteeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceRecruiteeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceRecruiteeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceRecruiteeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceRecruiteeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcerecurly.go b/internal/sdk/pkg/models/operations/deletesourcerecurly.go
old mode 100755
new mode 100644
index e90bfea63..754aca088
--- a/internal/sdk/pkg/models/operations/deletesourcerecurly.go
+++ b/internal/sdk/pkg/models/operations/deletesourcerecurly.go
@@ -10,8 +10,39 @@ type DeleteSourceRecurlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRecurlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceRecurlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceRecurlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceRecurlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceRecurlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceredshift.go b/internal/sdk/pkg/models/operations/deletesourceredshift.go
old mode 100755
new mode 100644
index 731a7661f..1cc8820f7
--- a/internal/sdk/pkg/models/operations/deletesourceredshift.go
+++ b/internal/sdk/pkg/models/operations/deletesourceredshift.go
@@ -10,8 +10,39 @@ type DeleteSourceRedshiftRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRedshiftRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceRedshiftResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceRedshiftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceRedshiftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceRedshiftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceretently.go b/internal/sdk/pkg/models/operations/deletesourceretently.go
old mode 100755
new mode 100644
index 2d61db1bc..f18e68110
--- a/internal/sdk/pkg/models/operations/deletesourceretently.go
+++ b/internal/sdk/pkg/models/operations/deletesourceretently.go
@@ -10,8 +10,39 @@ type DeleteSourceRetentlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRetentlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceRetentlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceRetentlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceRetentlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceRetentlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcerkicovid.go b/internal/sdk/pkg/models/operations/deletesourcerkicovid.go
old mode 100755
new mode 100644
index c70f9986e..441618080
--- a/internal/sdk/pkg/models/operations/deletesourcerkicovid.go
+++ b/internal/sdk/pkg/models/operations/deletesourcerkicovid.go
@@ -10,8 +10,39 @@ type DeleteSourceRkiCovidRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRkiCovidRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceRkiCovidResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceRkiCovidResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceRkiCovidResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceRkiCovidResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcerss.go b/internal/sdk/pkg/models/operations/deletesourcerss.go
old mode 100755
new mode 100644
index 6d96e9627..ea9bb45e7
--- a/internal/sdk/pkg/models/operations/deletesourcerss.go
+++ b/internal/sdk/pkg/models/operations/deletesourcerss.go
@@ -10,8 +10,39 @@ type DeleteSourceRssRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceRssRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceRssResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceRssResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceRssResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceRssResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesources3.go b/internal/sdk/pkg/models/operations/deletesources3.go
old mode 100755
new mode 100644
index 161f72353..98d9b60c7
--- a/internal/sdk/pkg/models/operations/deletesources3.go
+++ b/internal/sdk/pkg/models/operations/deletesources3.go
@@ -10,8 +10,39 @@ type DeleteSourceS3Request struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceS3Request) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceS3Response struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceS3Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceS3Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceS3Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesalesforce.go b/internal/sdk/pkg/models/operations/deletesourcesalesforce.go
old mode 100755
new mode 100644
index f0750816e..493ab86d8
--- a/internal/sdk/pkg/models/operations/deletesourcesalesforce.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesalesforce.go
@@ -10,8 +10,39 @@ type DeleteSourceSalesforceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSalesforceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSalesforceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSalesforceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSalesforceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSalesforceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesalesloft.go b/internal/sdk/pkg/models/operations/deletesourcesalesloft.go
old mode 100755
new mode 100644
index 5570670f5..c7f1badac
--- a/internal/sdk/pkg/models/operations/deletesourcesalesloft.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesalesloft.go
@@ -10,8 +10,39 @@ type DeleteSourceSalesloftRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSalesloftRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSalesloftResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSalesloftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSalesloftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSalesloftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesapfieldglass.go b/internal/sdk/pkg/models/operations/deletesourcesapfieldglass.go
old mode 100755
new mode 100644
index ced7d6255..208b522e0
--- a/internal/sdk/pkg/models/operations/deletesourcesapfieldglass.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesapfieldglass.go
@@ -10,8 +10,39 @@ type DeleteSourceSapFieldglassRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSapFieldglassRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSapFieldglassResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSapFieldglassResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSapFieldglassResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSapFieldglassResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesecoda.go b/internal/sdk/pkg/models/operations/deletesourcesecoda.go
old mode 100755
new mode 100644
index 3d91a2e09..27e6b1128
--- a/internal/sdk/pkg/models/operations/deletesourcesecoda.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesecoda.go
@@ -10,8 +10,39 @@ type DeleteSourceSecodaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSecodaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSecodaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSecodaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSecodaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSecodaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesendgrid.go b/internal/sdk/pkg/models/operations/deletesourcesendgrid.go
old mode 100755
new mode 100644
index fc8bc38f7..76213deeb
--- a/internal/sdk/pkg/models/operations/deletesourcesendgrid.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesendgrid.go
@@ -10,8 +10,39 @@ type DeleteSourceSendgridRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSendgridRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSendgridResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSendgridResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSendgridResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSendgridResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesendinblue.go b/internal/sdk/pkg/models/operations/deletesourcesendinblue.go
old mode 100755
new mode 100644
index 78a5ff87b..60830f4f0
--- a/internal/sdk/pkg/models/operations/deletesourcesendinblue.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesendinblue.go
@@ -10,8 +10,39 @@ type DeleteSourceSendinblueRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSendinblueRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSendinblueResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSendinblueResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSendinblueResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSendinblueResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesenseforce.go b/internal/sdk/pkg/models/operations/deletesourcesenseforce.go
old mode 100755
new mode 100644
index 54fa1f96a..31a038eb2
--- a/internal/sdk/pkg/models/operations/deletesourcesenseforce.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesenseforce.go
@@ -10,8 +10,39 @@ type DeleteSourceSenseforceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSenseforceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSenseforceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSenseforceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSenseforceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSenseforceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesentry.go b/internal/sdk/pkg/models/operations/deletesourcesentry.go
old mode 100755
new mode 100644
index 81b24fbc3..159c87c9a
--- a/internal/sdk/pkg/models/operations/deletesourcesentry.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesentry.go
@@ -10,8 +10,39 @@ type DeleteSourceSentryRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSentryRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSentryResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSentryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSentryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSentryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesftp.go b/internal/sdk/pkg/models/operations/deletesourcesftp.go
old mode 100755
new mode 100644
index 4ce7b619e..3849b9933
--- a/internal/sdk/pkg/models/operations/deletesourcesftp.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesftp.go
@@ -10,8 +10,39 @@ type DeleteSourceSftpRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSftpRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSftpResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSftpResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSftpResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSftpResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesftpbulk.go b/internal/sdk/pkg/models/operations/deletesourcesftpbulk.go
old mode 100755
new mode 100644
index 9d8524eb6..c3e48821b
--- a/internal/sdk/pkg/models/operations/deletesourcesftpbulk.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesftpbulk.go
@@ -10,8 +10,39 @@ type DeleteSourceSftpBulkRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSftpBulkRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSftpBulkResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSftpBulkResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSftpBulkResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSftpBulkResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceshopify.go b/internal/sdk/pkg/models/operations/deletesourceshopify.go
old mode 100755
new mode 100644
index d55ea7cce..b9d9eceb6
--- a/internal/sdk/pkg/models/operations/deletesourceshopify.go
+++ b/internal/sdk/pkg/models/operations/deletesourceshopify.go
@@ -10,8 +10,39 @@ type DeleteSourceShopifyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceShopifyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceShopifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceShopifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceShopifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceShopifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceshortio.go b/internal/sdk/pkg/models/operations/deletesourceshortio.go
old mode 100755
new mode 100644
index 354f54017..37289509b
--- a/internal/sdk/pkg/models/operations/deletesourceshortio.go
+++ b/internal/sdk/pkg/models/operations/deletesourceshortio.go
@@ -10,8 +10,39 @@ type DeleteSourceShortioRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceShortioRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceShortioResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceShortioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceShortioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceShortioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceslack.go b/internal/sdk/pkg/models/operations/deletesourceslack.go
old mode 100755
new mode 100644
index d3b28ebcc..e5c9273a4
--- a/internal/sdk/pkg/models/operations/deletesourceslack.go
+++ b/internal/sdk/pkg/models/operations/deletesourceslack.go
@@ -10,8 +10,39 @@ type DeleteSourceSlackRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSlackRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSlackResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSlackResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSlackResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSlackResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesmaily.go b/internal/sdk/pkg/models/operations/deletesourcesmaily.go
old mode 100755
new mode 100644
index 6e0c4b5e2..1aa5744f3
--- a/internal/sdk/pkg/models/operations/deletesourcesmaily.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesmaily.go
@@ -10,8 +10,39 @@ type DeleteSourceSmailyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSmailyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSmailyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSmailyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSmailyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSmailyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesmartengage.go b/internal/sdk/pkg/models/operations/deletesourcesmartengage.go
old mode 100755
new mode 100644
index 1a634e171..4302dd82d
--- a/internal/sdk/pkg/models/operations/deletesourcesmartengage.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesmartengage.go
@@ -10,8 +10,39 @@ type DeleteSourceSmartengageRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSmartengageRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSmartengageResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSmartengageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSmartengageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSmartengageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesmartsheets.go b/internal/sdk/pkg/models/operations/deletesourcesmartsheets.go
old mode 100755
new mode 100644
index a08f48306..38c59c43a
--- a/internal/sdk/pkg/models/operations/deletesourcesmartsheets.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesmartsheets.go
@@ -10,8 +10,39 @@ type DeleteSourceSmartsheetsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSmartsheetsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSmartsheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSmartsheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSmartsheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSmartsheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesnapchatmarketing.go b/internal/sdk/pkg/models/operations/deletesourcesnapchatmarketing.go
old mode 100755
new mode 100644
index 55b6cae79..5d363ac32
--- a/internal/sdk/pkg/models/operations/deletesourcesnapchatmarketing.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesnapchatmarketing.go
@@ -10,8 +10,39 @@ type DeleteSourceSnapchatMarketingRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSnapchatMarketingRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSnapchatMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSnapchatMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSnapchatMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSnapchatMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesnowflake.go b/internal/sdk/pkg/models/operations/deletesourcesnowflake.go
old mode 100755
new mode 100644
index b2285d55a..7a822fbf4
--- a/internal/sdk/pkg/models/operations/deletesourcesnowflake.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesnowflake.go
@@ -10,8 +10,39 @@ type DeleteSourceSnowflakeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSnowflakeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSnowflakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSnowflakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSnowflakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSnowflakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesonarcloud.go b/internal/sdk/pkg/models/operations/deletesourcesonarcloud.go
old mode 100755
new mode 100644
index 9f4c2c593..5ded12e2e
--- a/internal/sdk/pkg/models/operations/deletesourcesonarcloud.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesonarcloud.go
@@ -10,8 +10,39 @@ type DeleteSourceSonarCloudRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSonarCloudRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSonarCloudResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSonarCloudResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSonarCloudResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSonarCloudResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcespacexapi.go b/internal/sdk/pkg/models/operations/deletesourcespacexapi.go
old mode 100755
new mode 100644
index 6a1ddf19a..2bc56f398
--- a/internal/sdk/pkg/models/operations/deletesourcespacexapi.go
+++ b/internal/sdk/pkg/models/operations/deletesourcespacexapi.go
@@ -10,8 +10,39 @@ type DeleteSourceSpacexAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSpacexAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSpacexAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSpacexAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSpacexAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSpacexAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesquare.go b/internal/sdk/pkg/models/operations/deletesourcesquare.go
old mode 100755
new mode 100644
index f60d366b2..931bc9330
--- a/internal/sdk/pkg/models/operations/deletesourcesquare.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesquare.go
@@ -10,8 +10,39 @@ type DeleteSourceSquareRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSquareRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSquareResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSquareResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSquareResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSquareResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcestrava.go b/internal/sdk/pkg/models/operations/deletesourcestrava.go
old mode 100755
new mode 100644
index d0acb1b6c..1f2facaa0
--- a/internal/sdk/pkg/models/operations/deletesourcestrava.go
+++ b/internal/sdk/pkg/models/operations/deletesourcestrava.go
@@ -10,8 +10,39 @@ type DeleteSourceStravaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceStravaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceStravaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceStravaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceStravaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceStravaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcestripe.go b/internal/sdk/pkg/models/operations/deletesourcestripe.go
old mode 100755
new mode 100644
index 3fe93ac88..a2780f02d
--- a/internal/sdk/pkg/models/operations/deletesourcestripe.go
+++ b/internal/sdk/pkg/models/operations/deletesourcestripe.go
@@ -10,8 +10,39 @@ type DeleteSourceStripeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceStripeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceStripeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceStripeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceStripeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceStripeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesurveymonkey.go b/internal/sdk/pkg/models/operations/deletesourcesurveymonkey.go
old mode 100755
new mode 100644
index 45d582695..f6f3725c3
--- a/internal/sdk/pkg/models/operations/deletesourcesurveymonkey.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesurveymonkey.go
@@ -10,8 +10,39 @@ type DeleteSourceSurveymonkeyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSurveymonkeyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSurveymonkeyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSurveymonkeyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSurveymonkeyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSurveymonkeyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcesurveysparrow.go b/internal/sdk/pkg/models/operations/deletesourcesurveysparrow.go
old mode 100755
new mode 100644
index 743bc2a41..d96c40d01
--- a/internal/sdk/pkg/models/operations/deletesourcesurveysparrow.go
+++ b/internal/sdk/pkg/models/operations/deletesourcesurveysparrow.go
@@ -10,8 +10,39 @@ type DeleteSourceSurveySparrowRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceSurveySparrowRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceSurveySparrowResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceSurveySparrowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceSurveySparrowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceSurveySparrowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetempo.go b/internal/sdk/pkg/models/operations/deletesourcetempo.go
old mode 100755
new mode 100644
index 3cd7777cd..1cedb4246
--- a/internal/sdk/pkg/models/operations/deletesourcetempo.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetempo.go
@@ -10,8 +10,39 @@ type DeleteSourceTempoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTempoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTempoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTempoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTempoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTempoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetheguardianapi.go b/internal/sdk/pkg/models/operations/deletesourcetheguardianapi.go
old mode 100755
new mode 100644
index 583f8d01a..a21513938
--- a/internal/sdk/pkg/models/operations/deletesourcetheguardianapi.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetheguardianapi.go
@@ -10,8 +10,39 @@ type DeleteSourceTheGuardianAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTheGuardianAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTheGuardianAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTheGuardianAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTheGuardianAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTheGuardianAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetiktokmarketing.go b/internal/sdk/pkg/models/operations/deletesourcetiktokmarketing.go
old mode 100755
new mode 100644
index 483bd96dd..a5c8af456
--- a/internal/sdk/pkg/models/operations/deletesourcetiktokmarketing.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetiktokmarketing.go
@@ -10,8 +10,39 @@ type DeleteSourceTiktokMarketingRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTiktokMarketingRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTiktokMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTiktokMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTiktokMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTiktokMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetodoist.go b/internal/sdk/pkg/models/operations/deletesourcetodoist.go
old mode 100755
new mode 100644
index 08d0283d9..0a823bde4
--- a/internal/sdk/pkg/models/operations/deletesourcetodoist.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetodoist.go
@@ -10,8 +10,39 @@ type DeleteSourceTodoistRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTodoistRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTodoistResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTodoistResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTodoistResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTodoistResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetrello.go b/internal/sdk/pkg/models/operations/deletesourcetrello.go
old mode 100755
new mode 100644
index 49ba7893f..330b6b98c
--- a/internal/sdk/pkg/models/operations/deletesourcetrello.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetrello.go
@@ -10,8 +10,39 @@ type DeleteSourceTrelloRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTrelloRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTrelloResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTrelloResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTrelloResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTrelloResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetrustpilot.go b/internal/sdk/pkg/models/operations/deletesourcetrustpilot.go
old mode 100755
new mode 100644
index 1812f181b..a9d1043cf
--- a/internal/sdk/pkg/models/operations/deletesourcetrustpilot.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetrustpilot.go
@@ -10,8 +10,39 @@ type DeleteSourceTrustpilotRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTrustpilotRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTrustpilotResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTrustpilotResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTrustpilotResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTrustpilotResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetvmazeschedule.go b/internal/sdk/pkg/models/operations/deletesourcetvmazeschedule.go
old mode 100755
new mode 100644
index c78cab8c8..6314b2867
--- a/internal/sdk/pkg/models/operations/deletesourcetvmazeschedule.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetvmazeschedule.go
@@ -10,8 +10,39 @@ type DeleteSourceTvmazeScheduleRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTvmazeScheduleRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTvmazeScheduleResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTvmazeScheduleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTvmazeScheduleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTvmazeScheduleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetwilio.go b/internal/sdk/pkg/models/operations/deletesourcetwilio.go
old mode 100755
new mode 100644
index 9acc6a838..e5bdbdc99
--- a/internal/sdk/pkg/models/operations/deletesourcetwilio.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetwilio.go
@@ -10,8 +10,39 @@ type DeleteSourceTwilioRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTwilioRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTwilioResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTwilioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTwilioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTwilioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetwiliotaskrouter.go b/internal/sdk/pkg/models/operations/deletesourcetwiliotaskrouter.go
old mode 100755
new mode 100644
index faa6e15ed..abca321fd
--- a/internal/sdk/pkg/models/operations/deletesourcetwiliotaskrouter.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetwiliotaskrouter.go
@@ -10,8 +10,39 @@ type DeleteSourceTwilioTaskrouterRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTwilioTaskrouterRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTwilioTaskrouterResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTwilioTaskrouterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTwilioTaskrouterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTwilioTaskrouterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetwitter.go b/internal/sdk/pkg/models/operations/deletesourcetwitter.go
old mode 100755
new mode 100644
index 773dde1a6..000cc7809
--- a/internal/sdk/pkg/models/operations/deletesourcetwitter.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetwitter.go
@@ -10,8 +10,39 @@ type DeleteSourceTwitterRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTwitterRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTwitterResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTwitterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTwitterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTwitterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcetypeform.go b/internal/sdk/pkg/models/operations/deletesourcetypeform.go
old mode 100755
new mode 100644
index 7919a63b8..5006ea159
--- a/internal/sdk/pkg/models/operations/deletesourcetypeform.go
+++ b/internal/sdk/pkg/models/operations/deletesourcetypeform.go
@@ -10,8 +10,39 @@ type DeleteSourceTypeformRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceTypeformRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceTypeformResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceTypeformResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceTypeformResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceTypeformResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceuscensus.go b/internal/sdk/pkg/models/operations/deletesourceuscensus.go
old mode 100755
new mode 100644
index 749d60876..7285c8344
--- a/internal/sdk/pkg/models/operations/deletesourceuscensus.go
+++ b/internal/sdk/pkg/models/operations/deletesourceuscensus.go
@@ -10,8 +10,39 @@ type DeleteSourceUsCensusRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceUsCensusRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceUsCensusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceUsCensusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceUsCensusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceUsCensusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcevantage.go b/internal/sdk/pkg/models/operations/deletesourcevantage.go
old mode 100755
new mode 100644
index ee6884993..c45c38d68
--- a/internal/sdk/pkg/models/operations/deletesourcevantage.go
+++ b/internal/sdk/pkg/models/operations/deletesourcevantage.go
@@ -10,8 +10,39 @@ type DeleteSourceVantageRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceVantageRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceVantageResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceVantageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceVantageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceVantageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcewebflow.go b/internal/sdk/pkg/models/operations/deletesourcewebflow.go
old mode 100755
new mode 100644
index 07eb63d56..326186096
--- a/internal/sdk/pkg/models/operations/deletesourcewebflow.go
+++ b/internal/sdk/pkg/models/operations/deletesourcewebflow.go
@@ -10,8 +10,39 @@ type DeleteSourceWebflowRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceWebflowRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceWebflowResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceWebflowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceWebflowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceWebflowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcewhiskyhunter.go b/internal/sdk/pkg/models/operations/deletesourcewhiskyhunter.go
old mode 100755
new mode 100644
index 42aa94b48..e23aa9f14
--- a/internal/sdk/pkg/models/operations/deletesourcewhiskyhunter.go
+++ b/internal/sdk/pkg/models/operations/deletesourcewhiskyhunter.go
@@ -10,8 +10,39 @@ type DeleteSourceWhiskyHunterRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceWhiskyHunterRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceWhiskyHunterResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceWhiskyHunterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceWhiskyHunterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceWhiskyHunterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcewikipediapageviews.go b/internal/sdk/pkg/models/operations/deletesourcewikipediapageviews.go
old mode 100755
new mode 100644
index 8467facc0..607d56d66
--- a/internal/sdk/pkg/models/operations/deletesourcewikipediapageviews.go
+++ b/internal/sdk/pkg/models/operations/deletesourcewikipediapageviews.go
@@ -10,8 +10,39 @@ type DeleteSourceWikipediaPageviewsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceWikipediaPageviewsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceWikipediaPageviewsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceWikipediaPageviewsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceWikipediaPageviewsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceWikipediaPageviewsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcewoocommerce.go b/internal/sdk/pkg/models/operations/deletesourcewoocommerce.go
old mode 100755
new mode 100644
index 1a54918d0..9d638fda3
--- a/internal/sdk/pkg/models/operations/deletesourcewoocommerce.go
+++ b/internal/sdk/pkg/models/operations/deletesourcewoocommerce.go
@@ -10,8 +10,39 @@ type DeleteSourceWoocommerceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceWoocommerceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceWoocommerceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceWoocommerceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceWoocommerceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceWoocommerceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcexero.go b/internal/sdk/pkg/models/operations/deletesourcexero.go
deleted file mode 100755
index 8c88458f4..000000000
--- a/internal/sdk/pkg/models/operations/deletesourcexero.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "net/http"
-)
-
-type DeleteSourceXeroRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type DeleteSourceXeroResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/deletesourcexkcd.go b/internal/sdk/pkg/models/operations/deletesourcexkcd.go
old mode 100755
new mode 100644
index 3c2488656..2a89a3627
--- a/internal/sdk/pkg/models/operations/deletesourcexkcd.go
+++ b/internal/sdk/pkg/models/operations/deletesourcexkcd.go
@@ -10,8 +10,39 @@ type DeleteSourceXkcdRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceXkcdRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceXkcdResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceXkcdResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceXkcdResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceXkcdResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceyandexmetrica.go b/internal/sdk/pkg/models/operations/deletesourceyandexmetrica.go
old mode 100755
new mode 100644
index e8a2b0908..6e9f2d0f4
--- a/internal/sdk/pkg/models/operations/deletesourceyandexmetrica.go
+++ b/internal/sdk/pkg/models/operations/deletesourceyandexmetrica.go
@@ -10,8 +10,39 @@ type DeleteSourceYandexMetricaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceYandexMetricaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceYandexMetricaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceYandexMetricaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceYandexMetricaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceYandexMetricaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceyotpo.go b/internal/sdk/pkg/models/operations/deletesourceyotpo.go
old mode 100755
new mode 100644
index 87abd00b5..37ac72a33
--- a/internal/sdk/pkg/models/operations/deletesourceyotpo.go
+++ b/internal/sdk/pkg/models/operations/deletesourceyotpo.go
@@ -10,8 +10,39 @@ type DeleteSourceYotpoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceYotpoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceYotpoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceYotpoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceYotpoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceYotpoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourceyounium.go b/internal/sdk/pkg/models/operations/deletesourceyounium.go
deleted file mode 100755
index 6588cf81e..000000000
--- a/internal/sdk/pkg/models/operations/deletesourceyounium.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "net/http"
-)
-
-type DeleteSourceYouniumRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type DeleteSourceYouniumResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/deletesourceyoutubeanalytics.go b/internal/sdk/pkg/models/operations/deletesourceyoutubeanalytics.go
old mode 100755
new mode 100644
index 146a09461..57ce97549
--- a/internal/sdk/pkg/models/operations/deletesourceyoutubeanalytics.go
+++ b/internal/sdk/pkg/models/operations/deletesourceyoutubeanalytics.go
@@ -10,8 +10,39 @@ type DeleteSourceYoutubeAnalyticsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceYoutubeAnalyticsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceYoutubeAnalyticsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceYoutubeAnalyticsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceYoutubeAnalyticsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceYoutubeAnalyticsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcezendeskchat.go b/internal/sdk/pkg/models/operations/deletesourcezendeskchat.go
old mode 100755
new mode 100644
index d7b19aaae..632b889a8
--- a/internal/sdk/pkg/models/operations/deletesourcezendeskchat.go
+++ b/internal/sdk/pkg/models/operations/deletesourcezendeskchat.go
@@ -10,8 +10,39 @@ type DeleteSourceZendeskChatRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceZendeskChatRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceZendeskChatResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceZendeskChatResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceZendeskChatResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceZendeskChatResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcezendesksell.go b/internal/sdk/pkg/models/operations/deletesourcezendesksell.go
new file mode 100644
index 000000000..b81dccd20
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/deletesourcezendesksell.go
@@ -0,0 +1,48 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "net/http"
+)
+
+type DeleteSourceZendeskSellRequest struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *DeleteSourceZendeskSellRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type DeleteSourceZendeskSellResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *DeleteSourceZendeskSellResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceZendeskSellResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceZendeskSellResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcezendesksunshine.go b/internal/sdk/pkg/models/operations/deletesourcezendesksunshine.go
old mode 100755
new mode 100644
index e5b486659..f04b2f70b
--- a/internal/sdk/pkg/models/operations/deletesourcezendesksunshine.go
+++ b/internal/sdk/pkg/models/operations/deletesourcezendesksunshine.go
@@ -10,8 +10,39 @@ type DeleteSourceZendeskSunshineRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceZendeskSunshineRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceZendeskSunshineResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceZendeskSunshineResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceZendeskSunshineResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceZendeskSunshineResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcezendesksupport.go b/internal/sdk/pkg/models/operations/deletesourcezendesksupport.go
old mode 100755
new mode 100644
index 19e05fdf8..734c32f74
--- a/internal/sdk/pkg/models/operations/deletesourcezendesksupport.go
+++ b/internal/sdk/pkg/models/operations/deletesourcezendesksupport.go
@@ -10,8 +10,39 @@ type DeleteSourceZendeskSupportRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceZendeskSupportRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceZendeskSupportResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceZendeskSupportResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceZendeskSupportResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceZendeskSupportResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcezendesktalk.go b/internal/sdk/pkg/models/operations/deletesourcezendesktalk.go
old mode 100755
new mode 100644
index 778df4a80..e7823ba6d
--- a/internal/sdk/pkg/models/operations/deletesourcezendesktalk.go
+++ b/internal/sdk/pkg/models/operations/deletesourcezendesktalk.go
@@ -10,8 +10,39 @@ type DeleteSourceZendeskTalkRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceZendeskTalkRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceZendeskTalkResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceZendeskTalkResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceZendeskTalkResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceZendeskTalkResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcezenloop.go b/internal/sdk/pkg/models/operations/deletesourcezenloop.go
old mode 100755
new mode 100644
index ad83ad619..c975b1977
--- a/internal/sdk/pkg/models/operations/deletesourcezenloop.go
+++ b/internal/sdk/pkg/models/operations/deletesourcezenloop.go
@@ -10,8 +10,39 @@ type DeleteSourceZenloopRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceZenloopRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceZenloopResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceZenloopResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceZenloopResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceZenloopResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcezohocrm.go b/internal/sdk/pkg/models/operations/deletesourcezohocrm.go
old mode 100755
new mode 100644
index 5e872f65d..b34d606a8
--- a/internal/sdk/pkg/models/operations/deletesourcezohocrm.go
+++ b/internal/sdk/pkg/models/operations/deletesourcezohocrm.go
@@ -10,8 +10,39 @@ type DeleteSourceZohoCrmRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceZohoCrmRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceZohoCrmResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceZohoCrmResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceZohoCrmResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceZohoCrmResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcezoom.go b/internal/sdk/pkg/models/operations/deletesourcezoom.go
old mode 100755
new mode 100644
index 7b79e21b5..61cf89e2c
--- a/internal/sdk/pkg/models/operations/deletesourcezoom.go
+++ b/internal/sdk/pkg/models/operations/deletesourcezoom.go
@@ -10,8 +10,39 @@ type DeleteSourceZoomRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceZoomRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceZoomResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceZoomResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceZoomResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceZoomResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deletesourcezuora.go b/internal/sdk/pkg/models/operations/deletesourcezuora.go
old mode 100755
new mode 100644
index 538f21c26..a8ff38201
--- a/internal/sdk/pkg/models/operations/deletesourcezuora.go
+++ b/internal/sdk/pkg/models/operations/deletesourcezuora.go
@@ -10,8 +10,39 @@ type DeleteSourceZuoraRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *DeleteSourceZuoraRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type DeleteSourceZuoraResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteSourceZuoraResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteSourceZuoraResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteSourceZuoraResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/deleteworkspace.go b/internal/sdk/pkg/models/operations/deleteworkspace.go
old mode 100755
new mode 100644
index f9c820923..36d011bb9
--- a/internal/sdk/pkg/models/operations/deleteworkspace.go
+++ b/internal/sdk/pkg/models/operations/deleteworkspace.go
@@ -10,8 +10,39 @@ type DeleteWorkspaceRequest struct {
WorkspaceID string `pathParam:"style=simple,explode=false,name=workspaceId"`
}
+func (o *DeleteWorkspaceRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
+
type DeleteWorkspaceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *DeleteWorkspaceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *DeleteWorkspaceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *DeleteWorkspaceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getconnection.go b/internal/sdk/pkg/models/operations/getconnection.go
old mode 100755
new mode 100644
index df4d6be99..0556ca29e
--- a/internal/sdk/pkg/models/operations/getconnection.go
+++ b/internal/sdk/pkg/models/operations/getconnection.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetConnectionRequest struct {
ConnectionID string `pathParam:"style=simple,explode=false,name=connectionId"`
}
+func (o *GetConnectionRequest) GetConnectionID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionID
+}
+
type GetConnectionResponse struct {
// Get a Connection by the id in the path.
ConnectionResponse *shared.ConnectionResponse
- ContentType string
- StatusCode int
- RawResponse *http.Response
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetConnectionResponse) GetConnectionResponse() *shared.ConnectionResponse {
+ if o == nil {
+ return nil
+ }
+ return o.ConnectionResponse
+}
+
+func (o *GetConnectionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetConnectionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetConnectionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestination.go b/internal/sdk/pkg/models/operations/getdestination.go
old mode 100755
new mode 100644
index aacb3f187..bd1e3db6e
--- a/internal/sdk/pkg/models/operations/getdestination.go
+++ b/internal/sdk/pkg/models/operations/getdestination.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationawsdatalake.go b/internal/sdk/pkg/models/operations/getdestinationawsdatalake.go
old mode 100755
new mode 100644
index 9cc639370..d98a49fb5
--- a/internal/sdk/pkg/models/operations/getdestinationawsdatalake.go
+++ b/internal/sdk/pkg/models/operations/getdestinationawsdatalake.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationAwsDatalakeRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationAwsDatalakeRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationAwsDatalakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationAwsDatalakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationAwsDatalakeResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationAwsDatalakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationAwsDatalakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationazureblobstorage.go b/internal/sdk/pkg/models/operations/getdestinationazureblobstorage.go
old mode 100755
new mode 100644
index 6637ed30b..ac2bf5ceb
--- a/internal/sdk/pkg/models/operations/getdestinationazureblobstorage.go
+++ b/internal/sdk/pkg/models/operations/getdestinationazureblobstorage.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationAzureBlobStorageRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationAzureBlobStorageRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationAzureBlobStorageResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationAzureBlobStorageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationAzureBlobStorageResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationAzureBlobStorageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationAzureBlobStorageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationbigquery.go b/internal/sdk/pkg/models/operations/getdestinationbigquery.go
old mode 100755
new mode 100644
index 709aff2c7..63140e41c
--- a/internal/sdk/pkg/models/operations/getdestinationbigquery.go
+++ b/internal/sdk/pkg/models/operations/getdestinationbigquery.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationBigqueryRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationBigqueryRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationBigqueryResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationBigqueryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationBigqueryResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationBigqueryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationBigqueryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationbigquerydenormalized.go b/internal/sdk/pkg/models/operations/getdestinationbigquerydenormalized.go
deleted file mode 100755
index 0d797b72a..000000000
--- a/internal/sdk/pkg/models/operations/getdestinationbigquerydenormalized.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type GetDestinationBigqueryDenormalizedRequest struct {
- DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
-}
-
-type GetDestinationBigqueryDenormalizedResponse struct {
- ContentType string
- // Get a Destination by the id in the path.
- DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/getdestinationclickhouse.go b/internal/sdk/pkg/models/operations/getdestinationclickhouse.go
old mode 100755
new mode 100644
index a8aa47bd4..7ccbac2b3
--- a/internal/sdk/pkg/models/operations/getdestinationclickhouse.go
+++ b/internal/sdk/pkg/models/operations/getdestinationclickhouse.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationClickhouseRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationClickhouseRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationClickhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationClickhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationClickhouseResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationClickhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationClickhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationconvex.go b/internal/sdk/pkg/models/operations/getdestinationconvex.go
old mode 100755
new mode 100644
index d6ce4de38..d7293e5cf
--- a/internal/sdk/pkg/models/operations/getdestinationconvex.go
+++ b/internal/sdk/pkg/models/operations/getdestinationconvex.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationConvexRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationConvexRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationConvexResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationConvexResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationConvexResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationConvexResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationConvexResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationcumulio.go b/internal/sdk/pkg/models/operations/getdestinationcumulio.go
old mode 100755
new mode 100644
index b8e7b5c97..5f7748531
--- a/internal/sdk/pkg/models/operations/getdestinationcumulio.go
+++ b/internal/sdk/pkg/models/operations/getdestinationcumulio.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationCumulioRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationCumulioRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationCumulioResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationCumulioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationCumulioResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationCumulioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationCumulioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationdatabend.go b/internal/sdk/pkg/models/operations/getdestinationdatabend.go
old mode 100755
new mode 100644
index e1ac4c8d2..0767f9d5a
--- a/internal/sdk/pkg/models/operations/getdestinationdatabend.go
+++ b/internal/sdk/pkg/models/operations/getdestinationdatabend.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationDatabendRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationDatabendRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationDatabendResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationDatabendResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationDatabendResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationDatabendResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationDatabendResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationdatabricks.go b/internal/sdk/pkg/models/operations/getdestinationdatabricks.go
old mode 100755
new mode 100644
index b2bb8d07b..471a7a89e
--- a/internal/sdk/pkg/models/operations/getdestinationdatabricks.go
+++ b/internal/sdk/pkg/models/operations/getdestinationdatabricks.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationDatabricksRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationDatabricksRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationDatabricksResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationDatabricksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationDatabricksResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationDatabricksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationDatabricksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationdevnull.go b/internal/sdk/pkg/models/operations/getdestinationdevnull.go
old mode 100755
new mode 100644
index c99f42f14..198069b4e
--- a/internal/sdk/pkg/models/operations/getdestinationdevnull.go
+++ b/internal/sdk/pkg/models/operations/getdestinationdevnull.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationDevNullRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationDevNullRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationDevNullResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationDevNullResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationDevNullResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationDevNullResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationDevNullResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationduckdb.go b/internal/sdk/pkg/models/operations/getdestinationduckdb.go
new file mode 100644
index 000000000..13408b421
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/getdestinationduckdb.go
@@ -0,0 +1,58 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type GetDestinationDuckdbRequest struct {
+ DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
+}
+
+func (o *GetDestinationDuckdbRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+type GetDestinationDuckdbResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Get a Destination by the id in the path.
+ DestinationResponse *shared.DestinationResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationDuckdbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationDuckdbResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationDuckdbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationDuckdbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getdestinationdynamodb.go b/internal/sdk/pkg/models/operations/getdestinationdynamodb.go
old mode 100755
new mode 100644
index 8b2647afa..cf3ec7d81
--- a/internal/sdk/pkg/models/operations/getdestinationdynamodb.go
+++ b/internal/sdk/pkg/models/operations/getdestinationdynamodb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationDynamodbRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationDynamodbRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationDynamodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationDynamodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationDynamodbResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationDynamodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationDynamodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationelasticsearch.go b/internal/sdk/pkg/models/operations/getdestinationelasticsearch.go
old mode 100755
new mode 100644
index 4150d030d..0fd781460
--- a/internal/sdk/pkg/models/operations/getdestinationelasticsearch.go
+++ b/internal/sdk/pkg/models/operations/getdestinationelasticsearch.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationElasticsearchRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationElasticsearchRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationElasticsearchResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationElasticsearchResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationElasticsearchResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationElasticsearchResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationElasticsearchResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationfirebolt.go b/internal/sdk/pkg/models/operations/getdestinationfirebolt.go
old mode 100755
new mode 100644
index 6e78a7ff1..e8477cddc
--- a/internal/sdk/pkg/models/operations/getdestinationfirebolt.go
+++ b/internal/sdk/pkg/models/operations/getdestinationfirebolt.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationFireboltRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationFireboltRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationFireboltResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationFireboltResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationFireboltResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationFireboltResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationFireboltResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationfirestore.go b/internal/sdk/pkg/models/operations/getdestinationfirestore.go
old mode 100755
new mode 100644
index 860e71ef7..fd2d7ceb4
--- a/internal/sdk/pkg/models/operations/getdestinationfirestore.go
+++ b/internal/sdk/pkg/models/operations/getdestinationfirestore.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationFirestoreRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationFirestoreRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationFirestoreResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationFirestoreResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationFirestoreResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationFirestoreResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationFirestoreResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationgcs.go b/internal/sdk/pkg/models/operations/getdestinationgcs.go
old mode 100755
new mode 100644
index 037ecb163..4a3637cda
--- a/internal/sdk/pkg/models/operations/getdestinationgcs.go
+++ b/internal/sdk/pkg/models/operations/getdestinationgcs.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationGcsRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationGcsRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationGcsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationGcsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationGcsResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationGcsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationGcsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationgooglesheets.go b/internal/sdk/pkg/models/operations/getdestinationgooglesheets.go
old mode 100755
new mode 100644
index bde67830e..49dcbbf66
--- a/internal/sdk/pkg/models/operations/getdestinationgooglesheets.go
+++ b/internal/sdk/pkg/models/operations/getdestinationgooglesheets.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationGoogleSheetsRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationGoogleSheetsRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationGoogleSheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationGoogleSheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationGoogleSheetsResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationGoogleSheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationGoogleSheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationkeen.go b/internal/sdk/pkg/models/operations/getdestinationkeen.go
old mode 100755
new mode 100644
index 6af105e65..d8db3f5c6
--- a/internal/sdk/pkg/models/operations/getdestinationkeen.go
+++ b/internal/sdk/pkg/models/operations/getdestinationkeen.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationKeenRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationKeenRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationKeenResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationKeenResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationKeenResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationKeenResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationKeenResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationkinesis.go b/internal/sdk/pkg/models/operations/getdestinationkinesis.go
old mode 100755
new mode 100644
index 14788a94f..a89e5abdf
--- a/internal/sdk/pkg/models/operations/getdestinationkinesis.go
+++ b/internal/sdk/pkg/models/operations/getdestinationkinesis.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationKinesisRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationKinesisRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationKinesisResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationKinesisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationKinesisResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationKinesisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationKinesisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationlangchain.go b/internal/sdk/pkg/models/operations/getdestinationlangchain.go
old mode 100755
new mode 100644
index 5a3fa55a1..23f06d6ef
--- a/internal/sdk/pkg/models/operations/getdestinationlangchain.go
+++ b/internal/sdk/pkg/models/operations/getdestinationlangchain.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationLangchainRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationLangchainRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationLangchainResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationLangchainResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationLangchainResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationLangchainResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationLangchainResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationmilvus.go b/internal/sdk/pkg/models/operations/getdestinationmilvus.go
old mode 100755
new mode 100644
index 1796f623e..29845b6f4
--- a/internal/sdk/pkg/models/operations/getdestinationmilvus.go
+++ b/internal/sdk/pkg/models/operations/getdestinationmilvus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationMilvusRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationMilvusRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationMilvusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationMilvusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationMilvusResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationMilvusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationMilvusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationmongodb.go b/internal/sdk/pkg/models/operations/getdestinationmongodb.go
old mode 100755
new mode 100644
index 5b80e1e23..0595041b6
--- a/internal/sdk/pkg/models/operations/getdestinationmongodb.go
+++ b/internal/sdk/pkg/models/operations/getdestinationmongodb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationMongodbRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationMongodbRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationMongodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationMongodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationMongodbResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationMongodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationMongodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationmssql.go b/internal/sdk/pkg/models/operations/getdestinationmssql.go
old mode 100755
new mode 100644
index 67fdb77aa..2338df9c8
--- a/internal/sdk/pkg/models/operations/getdestinationmssql.go
+++ b/internal/sdk/pkg/models/operations/getdestinationmssql.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationMssqlRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationMssqlRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationMssqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationMssqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationMssqlResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationMssqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationMssqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationmysql.go b/internal/sdk/pkg/models/operations/getdestinationmysql.go
old mode 100755
new mode 100644
index 6ec306ec0..0ee27a995
--- a/internal/sdk/pkg/models/operations/getdestinationmysql.go
+++ b/internal/sdk/pkg/models/operations/getdestinationmysql.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationMysqlRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationMysqlRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationMysqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationMysqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationMysqlResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationMysqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationMysqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationoracle.go b/internal/sdk/pkg/models/operations/getdestinationoracle.go
old mode 100755
new mode 100644
index 95a497834..422227a02
--- a/internal/sdk/pkg/models/operations/getdestinationoracle.go
+++ b/internal/sdk/pkg/models/operations/getdestinationoracle.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationOracleRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationOracleRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationOracleResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationOracleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationOracleResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationOracleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationOracleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationpinecone.go b/internal/sdk/pkg/models/operations/getdestinationpinecone.go
old mode 100755
new mode 100644
index a5b4e2cb4..26bec8612
--- a/internal/sdk/pkg/models/operations/getdestinationpinecone.go
+++ b/internal/sdk/pkg/models/operations/getdestinationpinecone.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationPineconeRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationPineconeRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationPineconeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationPineconeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationPineconeResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationPineconeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationPineconeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationpostgres.go b/internal/sdk/pkg/models/operations/getdestinationpostgres.go
old mode 100755
new mode 100644
index 018f65299..869f58cf5
--- a/internal/sdk/pkg/models/operations/getdestinationpostgres.go
+++ b/internal/sdk/pkg/models/operations/getdestinationpostgres.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationPostgresRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationPostgresRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationPostgresResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationPostgresResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationPostgresResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationPostgresResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationPostgresResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationpubsub.go b/internal/sdk/pkg/models/operations/getdestinationpubsub.go
old mode 100755
new mode 100644
index f2582a5a7..a41b384dc
--- a/internal/sdk/pkg/models/operations/getdestinationpubsub.go
+++ b/internal/sdk/pkg/models/operations/getdestinationpubsub.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationPubsubRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationPubsubRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationPubsubResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationPubsubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationPubsubResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationPubsubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationPubsubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationqdrant.go b/internal/sdk/pkg/models/operations/getdestinationqdrant.go
new file mode 100644
index 000000000..61c9bd9f4
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/getdestinationqdrant.go
@@ -0,0 +1,58 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type GetDestinationQdrantRequest struct {
+ DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
+}
+
+func (o *GetDestinationQdrantRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+type GetDestinationQdrantResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Get a Destination by the id in the path.
+ DestinationResponse *shared.DestinationResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationQdrantResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationQdrantResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationQdrantResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationQdrantResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getdestinationredis.go b/internal/sdk/pkg/models/operations/getdestinationredis.go
old mode 100755
new mode 100644
index 3c9b2b900..01e608818
--- a/internal/sdk/pkg/models/operations/getdestinationredis.go
+++ b/internal/sdk/pkg/models/operations/getdestinationredis.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationRedisRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationRedisRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationRedisResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationRedisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationRedisResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationRedisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationRedisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationredshift.go b/internal/sdk/pkg/models/operations/getdestinationredshift.go
old mode 100755
new mode 100644
index b43b8c2d2..832ec667c
--- a/internal/sdk/pkg/models/operations/getdestinationredshift.go
+++ b/internal/sdk/pkg/models/operations/getdestinationredshift.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationRedshiftRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationRedshiftRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationRedshiftResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationRedshiftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationRedshiftResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationRedshiftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationRedshiftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinations3.go b/internal/sdk/pkg/models/operations/getdestinations3.go
old mode 100755
new mode 100644
index 8f22c2898..510e76e87
--- a/internal/sdk/pkg/models/operations/getdestinations3.go
+++ b/internal/sdk/pkg/models/operations/getdestinations3.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationS3Request struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationS3Request) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationS3Response struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationS3Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationS3Response) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationS3Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationS3Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinations3glue.go b/internal/sdk/pkg/models/operations/getdestinations3glue.go
old mode 100755
new mode 100644
index 48231910d..03c9564aa
--- a/internal/sdk/pkg/models/operations/getdestinations3glue.go
+++ b/internal/sdk/pkg/models/operations/getdestinations3glue.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationS3GlueRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationS3GlueRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationS3GlueResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationS3GlueResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationS3GlueResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationS3GlueResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationS3GlueResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationsftpjson.go b/internal/sdk/pkg/models/operations/getdestinationsftpjson.go
old mode 100755
new mode 100644
index 30afdc9e6..555d0564f
--- a/internal/sdk/pkg/models/operations/getdestinationsftpjson.go
+++ b/internal/sdk/pkg/models/operations/getdestinationsftpjson.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationSftpJSONRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationSftpJSONRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationSftpJSONResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationSftpJSONResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationSftpJSONResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationSftpJSONResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationSftpJSONResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationsnowflake.go b/internal/sdk/pkg/models/operations/getdestinationsnowflake.go
old mode 100755
new mode 100644
index 61f9ded5a..aad50e6d7
--- a/internal/sdk/pkg/models/operations/getdestinationsnowflake.go
+++ b/internal/sdk/pkg/models/operations/getdestinationsnowflake.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationSnowflakeRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationSnowflakeRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationSnowflakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationSnowflakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationSnowflakeResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationSnowflakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationSnowflakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationtimeplus.go b/internal/sdk/pkg/models/operations/getdestinationtimeplus.go
old mode 100755
new mode 100644
index f76b61f69..56fd0f8d0
--- a/internal/sdk/pkg/models/operations/getdestinationtimeplus.go
+++ b/internal/sdk/pkg/models/operations/getdestinationtimeplus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationTimeplusRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationTimeplusRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationTimeplusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationTimeplusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationTimeplusResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationTimeplusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationTimeplusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationtypesense.go b/internal/sdk/pkg/models/operations/getdestinationtypesense.go
old mode 100755
new mode 100644
index 5e27a5672..feb59ba0f
--- a/internal/sdk/pkg/models/operations/getdestinationtypesense.go
+++ b/internal/sdk/pkg/models/operations/getdestinationtypesense.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationTypesenseRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationTypesenseRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationTypesenseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationTypesenseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationTypesenseResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationTypesenseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationTypesenseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationvertica.go b/internal/sdk/pkg/models/operations/getdestinationvertica.go
old mode 100755
new mode 100644
index 6a5e98a16..773c027dd
--- a/internal/sdk/pkg/models/operations/getdestinationvertica.go
+++ b/internal/sdk/pkg/models/operations/getdestinationvertica.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationVerticaRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationVerticaRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationVerticaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationVerticaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationVerticaResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationVerticaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationVerticaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getdestinationweaviate.go b/internal/sdk/pkg/models/operations/getdestinationweaviate.go
new file mode 100644
index 000000000..ccb651cc0
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/getdestinationweaviate.go
@@ -0,0 +1,58 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type GetDestinationWeaviateRequest struct {
+ DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
+}
+
+func (o *GetDestinationWeaviateRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+type GetDestinationWeaviateResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Get a Destination by the id in the path.
+ DestinationResponse *shared.DestinationResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationWeaviateResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationWeaviateResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationWeaviateResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationWeaviateResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getdestinationxata.go b/internal/sdk/pkg/models/operations/getdestinationxata.go
old mode 100755
new mode 100644
index 6cc078686..b10d36263
--- a/internal/sdk/pkg/models/operations/getdestinationxata.go
+++ b/internal/sdk/pkg/models/operations/getdestinationxata.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetDestinationXataRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *GetDestinationXataRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type GetDestinationXataResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Destination by the id in the path.
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetDestinationXataResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetDestinationXataResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *GetDestinationXataResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetDestinationXataResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getjob.go b/internal/sdk/pkg/models/operations/getjob.go
old mode 100755
new mode 100644
index 9dd4ef96c..e3983ecab
--- a/internal/sdk/pkg/models/operations/getjob.go
+++ b/internal/sdk/pkg/models/operations/getjob.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetJobRequest struct {
JobID int64 `pathParam:"style=simple,explode=false,name=jobId"`
}
+func (o *GetJobRequest) GetJobID() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.JobID
+}
+
type GetJobResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Job by the id in the path.
JobResponse *shared.JobResponse
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *GetJobResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetJobResponse) GetJobResponse() *shared.JobResponse {
+ if o == nil {
+ return nil
+ }
+ return o.JobResponse
+}
+
+func (o *GetJobResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetJobResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getsource.go b/internal/sdk/pkg/models/operations/getsource.go
old mode 100755
new mode 100644
index cb9242deb..49cf47b86
--- a/internal/sdk/pkg/models/operations/getsource.go
+++ b/internal/sdk/pkg/models/operations/getsource.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceaha.go b/internal/sdk/pkg/models/operations/getsourceaha.go
old mode 100755
new mode 100644
index 1c8f82f71..883eef13c
--- a/internal/sdk/pkg/models/operations/getsourceaha.go
+++ b/internal/sdk/pkg/models/operations/getsourceaha.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAhaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAhaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAhaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAhaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAhaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAhaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAhaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceaircall.go b/internal/sdk/pkg/models/operations/getsourceaircall.go
old mode 100755
new mode 100644
index 3708833e0..e2f542dc9
--- a/internal/sdk/pkg/models/operations/getsourceaircall.go
+++ b/internal/sdk/pkg/models/operations/getsourceaircall.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAircallRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAircallRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAircallResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAircallResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAircallResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAircallResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAircallResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceairtable.go b/internal/sdk/pkg/models/operations/getsourceairtable.go
old mode 100755
new mode 100644
index 645dc0b58..ab43a495c
--- a/internal/sdk/pkg/models/operations/getsourceairtable.go
+++ b/internal/sdk/pkg/models/operations/getsourceairtable.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAirtableRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAirtableRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAirtableResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAirtableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAirtableResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAirtableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAirtableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcealloydb.go b/internal/sdk/pkg/models/operations/getsourcealloydb.go
old mode 100755
new mode 100644
index c7344b11b..b36d102b1
--- a/internal/sdk/pkg/models/operations/getsourcealloydb.go
+++ b/internal/sdk/pkg/models/operations/getsourcealloydb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAlloydbRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAlloydbRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAlloydbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAlloydbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAlloydbResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAlloydbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAlloydbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceamazonads.go b/internal/sdk/pkg/models/operations/getsourceamazonads.go
old mode 100755
new mode 100644
index e76b98318..73c22035c
--- a/internal/sdk/pkg/models/operations/getsourceamazonads.go
+++ b/internal/sdk/pkg/models/operations/getsourceamazonads.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAmazonAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAmazonAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAmazonAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAmazonAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAmazonAdsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAmazonAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAmazonAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceamazonsellerpartner.go b/internal/sdk/pkg/models/operations/getsourceamazonsellerpartner.go
old mode 100755
new mode 100644
index 9154785ce..40724b08e
--- a/internal/sdk/pkg/models/operations/getsourceamazonsellerpartner.go
+++ b/internal/sdk/pkg/models/operations/getsourceamazonsellerpartner.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAmazonSellerPartnerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAmazonSellerPartnerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAmazonSellerPartnerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAmazonSellerPartnerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAmazonSellerPartnerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAmazonSellerPartnerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAmazonSellerPartnerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceamazonsqs.go b/internal/sdk/pkg/models/operations/getsourceamazonsqs.go
old mode 100755
new mode 100644
index 553324327..66f0f49f3
--- a/internal/sdk/pkg/models/operations/getsourceamazonsqs.go
+++ b/internal/sdk/pkg/models/operations/getsourceamazonsqs.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAmazonSqsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAmazonSqsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAmazonSqsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAmazonSqsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAmazonSqsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAmazonSqsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAmazonSqsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceamplitude.go b/internal/sdk/pkg/models/operations/getsourceamplitude.go
old mode 100755
new mode 100644
index c16c671b9..103409812
--- a/internal/sdk/pkg/models/operations/getsourceamplitude.go
+++ b/internal/sdk/pkg/models/operations/getsourceamplitude.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAmplitudeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAmplitudeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAmplitudeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAmplitudeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAmplitudeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAmplitudeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAmplitudeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceapifydataset.go b/internal/sdk/pkg/models/operations/getsourceapifydataset.go
old mode 100755
new mode 100644
index e7a415b90..8893a13cf
--- a/internal/sdk/pkg/models/operations/getsourceapifydataset.go
+++ b/internal/sdk/pkg/models/operations/getsourceapifydataset.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceApifyDatasetRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceApifyDatasetRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceApifyDatasetResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceApifyDatasetResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceApifyDatasetResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceApifyDatasetResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceApifyDatasetResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceappfollow.go b/internal/sdk/pkg/models/operations/getsourceappfollow.go
old mode 100755
new mode 100644
index 0573daa9b..e66c5af34
--- a/internal/sdk/pkg/models/operations/getsourceappfollow.go
+++ b/internal/sdk/pkg/models/operations/getsourceappfollow.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAppfollowRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAppfollowRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAppfollowResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAppfollowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAppfollowResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAppfollowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAppfollowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceasana.go b/internal/sdk/pkg/models/operations/getsourceasana.go
old mode 100755
new mode 100644
index 982f99de9..3040ed48a
--- a/internal/sdk/pkg/models/operations/getsourceasana.go
+++ b/internal/sdk/pkg/models/operations/getsourceasana.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAsanaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAsanaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAsanaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAsanaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAsanaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAsanaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAsanaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceauth0.go b/internal/sdk/pkg/models/operations/getsourceauth0.go
old mode 100755
new mode 100644
index ffe25fa1e..0b95f8903
--- a/internal/sdk/pkg/models/operations/getsourceauth0.go
+++ b/internal/sdk/pkg/models/operations/getsourceauth0.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAuth0Request struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAuth0Request) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAuth0Response struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAuth0Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAuth0Response) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAuth0Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAuth0Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceawscloudtrail.go b/internal/sdk/pkg/models/operations/getsourceawscloudtrail.go
old mode 100755
new mode 100644
index 205bc3133..13d46bfa1
--- a/internal/sdk/pkg/models/operations/getsourceawscloudtrail.go
+++ b/internal/sdk/pkg/models/operations/getsourceawscloudtrail.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAwsCloudtrailRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAwsCloudtrailRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAwsCloudtrailResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAwsCloudtrailResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAwsCloudtrailResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAwsCloudtrailResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAwsCloudtrailResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceazureblobstorage.go b/internal/sdk/pkg/models/operations/getsourceazureblobstorage.go
old mode 100755
new mode 100644
index 003a689bb..cbb81095b
--- a/internal/sdk/pkg/models/operations/getsourceazureblobstorage.go
+++ b/internal/sdk/pkg/models/operations/getsourceazureblobstorage.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAzureBlobStorageRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAzureBlobStorageRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAzureBlobStorageResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAzureBlobStorageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAzureBlobStorageResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAzureBlobStorageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAzureBlobStorageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceazuretable.go b/internal/sdk/pkg/models/operations/getsourceazuretable.go
old mode 100755
new mode 100644
index 4ae066741..bbb9a6c29
--- a/internal/sdk/pkg/models/operations/getsourceazuretable.go
+++ b/internal/sdk/pkg/models/operations/getsourceazuretable.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceAzureTableRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceAzureTableRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceAzureTableResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceAzureTableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceAzureTableResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceAzureTableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceAzureTableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcebamboohr.go b/internal/sdk/pkg/models/operations/getsourcebamboohr.go
old mode 100755
new mode 100644
index a06389757..57fb46b3c
--- a/internal/sdk/pkg/models/operations/getsourcebamboohr.go
+++ b/internal/sdk/pkg/models/operations/getsourcebamboohr.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceBambooHrRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceBambooHrRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceBambooHrResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceBambooHrResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceBambooHrResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceBambooHrResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceBambooHrResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcebigcommerce.go b/internal/sdk/pkg/models/operations/getsourcebigcommerce.go
deleted file mode 100755
index 452cb7b0e..000000000
--- a/internal/sdk/pkg/models/operations/getsourcebigcommerce.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type GetSourceBigcommerceRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type GetSourceBigcommerceResponse struct {
- ContentType string
- // Get a Source by the id in the path.
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/getsourcebigquery.go b/internal/sdk/pkg/models/operations/getsourcebigquery.go
old mode 100755
new mode 100644
index 8a7578d50..2b62e2f90
--- a/internal/sdk/pkg/models/operations/getsourcebigquery.go
+++ b/internal/sdk/pkg/models/operations/getsourcebigquery.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceBigqueryRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceBigqueryRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceBigqueryResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceBigqueryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceBigqueryResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceBigqueryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceBigqueryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcebingads.go b/internal/sdk/pkg/models/operations/getsourcebingads.go
old mode 100755
new mode 100644
index 455366a11..1c702e977
--- a/internal/sdk/pkg/models/operations/getsourcebingads.go
+++ b/internal/sdk/pkg/models/operations/getsourcebingads.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceBingAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceBingAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceBingAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceBingAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceBingAdsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceBingAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceBingAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcebraintree.go b/internal/sdk/pkg/models/operations/getsourcebraintree.go
old mode 100755
new mode 100644
index 9e0ae5b6e..cffa611eb
--- a/internal/sdk/pkg/models/operations/getsourcebraintree.go
+++ b/internal/sdk/pkg/models/operations/getsourcebraintree.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceBraintreeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceBraintreeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceBraintreeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceBraintreeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceBraintreeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceBraintreeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceBraintreeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcebraze.go b/internal/sdk/pkg/models/operations/getsourcebraze.go
old mode 100755
new mode 100644
index 4d771dd96..7a8aa6000
--- a/internal/sdk/pkg/models/operations/getsourcebraze.go
+++ b/internal/sdk/pkg/models/operations/getsourcebraze.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceBrazeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceBrazeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceBrazeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceBrazeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceBrazeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceBrazeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceBrazeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcecart.go b/internal/sdk/pkg/models/operations/getsourcecart.go
new file mode 100644
index 000000000..329190753
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/getsourcecart.go
@@ -0,0 +1,58 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type GetSourceCartRequest struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *GetSourceCartRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type GetSourceCartResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Get a Source by the id in the path.
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceCartResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceCartResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceCartResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceCartResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getsourcechargebee.go b/internal/sdk/pkg/models/operations/getsourcechargebee.go
old mode 100755
new mode 100644
index 825956cc5..c08333f7e
--- a/internal/sdk/pkg/models/operations/getsourcechargebee.go
+++ b/internal/sdk/pkg/models/operations/getsourcechargebee.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceChargebeeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceChargebeeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceChargebeeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceChargebeeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceChargebeeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceChargebeeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceChargebeeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcechartmogul.go b/internal/sdk/pkg/models/operations/getsourcechartmogul.go
old mode 100755
new mode 100644
index 32410df83..1e230d868
--- a/internal/sdk/pkg/models/operations/getsourcechartmogul.go
+++ b/internal/sdk/pkg/models/operations/getsourcechartmogul.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceChartmogulRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceChartmogulRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceChartmogulResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceChartmogulResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceChartmogulResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceChartmogulResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceChartmogulResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceclickhouse.go b/internal/sdk/pkg/models/operations/getsourceclickhouse.go
old mode 100755
new mode 100644
index c43792617..5c5a81d92
--- a/internal/sdk/pkg/models/operations/getsourceclickhouse.go
+++ b/internal/sdk/pkg/models/operations/getsourceclickhouse.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceClickhouseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceClickhouseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceClickhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceClickhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceClickhouseResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceClickhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceClickhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceclickupapi.go b/internal/sdk/pkg/models/operations/getsourceclickupapi.go
old mode 100755
new mode 100644
index 235d13a78..6da1db8a5
--- a/internal/sdk/pkg/models/operations/getsourceclickupapi.go
+++ b/internal/sdk/pkg/models/operations/getsourceclickupapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceClickupAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceClickupAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceClickupAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceClickupAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceClickupAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceClickupAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceClickupAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceclockify.go b/internal/sdk/pkg/models/operations/getsourceclockify.go
old mode 100755
new mode 100644
index d2d51fc80..970c10293
--- a/internal/sdk/pkg/models/operations/getsourceclockify.go
+++ b/internal/sdk/pkg/models/operations/getsourceclockify.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceClockifyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceClockifyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceClockifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceClockifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceClockifyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceClockifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceClockifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceclosecom.go b/internal/sdk/pkg/models/operations/getsourceclosecom.go
old mode 100755
new mode 100644
index f91142fd6..c1cf152b9
--- a/internal/sdk/pkg/models/operations/getsourceclosecom.go
+++ b/internal/sdk/pkg/models/operations/getsourceclosecom.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceCloseComRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceCloseComRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceCloseComResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceCloseComResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceCloseComResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceCloseComResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceCloseComResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcecoda.go b/internal/sdk/pkg/models/operations/getsourcecoda.go
old mode 100755
new mode 100644
index b53a6eea1..7136bc9fa
--- a/internal/sdk/pkg/models/operations/getsourcecoda.go
+++ b/internal/sdk/pkg/models/operations/getsourcecoda.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceCodaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceCodaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceCodaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceCodaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceCodaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceCodaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceCodaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcecoinapi.go b/internal/sdk/pkg/models/operations/getsourcecoinapi.go
old mode 100755
new mode 100644
index 1075e2e9c..f12cc8844
--- a/internal/sdk/pkg/models/operations/getsourcecoinapi.go
+++ b/internal/sdk/pkg/models/operations/getsourcecoinapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceCoinAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceCoinAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceCoinAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceCoinAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceCoinAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceCoinAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceCoinAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcecoinmarketcap.go b/internal/sdk/pkg/models/operations/getsourcecoinmarketcap.go
old mode 100755
new mode 100644
index f1b28a2cb..465ac598f
--- a/internal/sdk/pkg/models/operations/getsourcecoinmarketcap.go
+++ b/internal/sdk/pkg/models/operations/getsourcecoinmarketcap.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceCoinmarketcapRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceCoinmarketcapRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceCoinmarketcapResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceCoinmarketcapResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceCoinmarketcapResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceCoinmarketcapResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceCoinmarketcapResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceconfigcat.go b/internal/sdk/pkg/models/operations/getsourceconfigcat.go
old mode 100755
new mode 100644
index 64c384294..699fbb0db
--- a/internal/sdk/pkg/models/operations/getsourceconfigcat.go
+++ b/internal/sdk/pkg/models/operations/getsourceconfigcat.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceConfigcatRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceConfigcatRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceConfigcatResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceConfigcatResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceConfigcatResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceConfigcatResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceConfigcatResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceconfluence.go b/internal/sdk/pkg/models/operations/getsourceconfluence.go
old mode 100755
new mode 100644
index b727c6cfa..026227e2a
--- a/internal/sdk/pkg/models/operations/getsourceconfluence.go
+++ b/internal/sdk/pkg/models/operations/getsourceconfluence.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceConfluenceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceConfluenceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceConfluenceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceConfluenceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceConfluenceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceConfluenceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceConfluenceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceconvex.go b/internal/sdk/pkg/models/operations/getsourceconvex.go
old mode 100755
new mode 100644
index e93a971a1..547290a45
--- a/internal/sdk/pkg/models/operations/getsourceconvex.go
+++ b/internal/sdk/pkg/models/operations/getsourceconvex.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceConvexRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceConvexRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceConvexResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceConvexResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceConvexResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceConvexResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceConvexResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcedatascope.go b/internal/sdk/pkg/models/operations/getsourcedatascope.go
old mode 100755
new mode 100644
index 40ad8610b..857d12802
--- a/internal/sdk/pkg/models/operations/getsourcedatascope.go
+++ b/internal/sdk/pkg/models/operations/getsourcedatascope.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceDatascopeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceDatascopeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceDatascopeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceDatascopeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceDatascopeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceDatascopeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceDatascopeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcedelighted.go b/internal/sdk/pkg/models/operations/getsourcedelighted.go
old mode 100755
new mode 100644
index 574bdeb90..b29966557
--- a/internal/sdk/pkg/models/operations/getsourcedelighted.go
+++ b/internal/sdk/pkg/models/operations/getsourcedelighted.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceDelightedRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceDelightedRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceDelightedResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceDelightedResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceDelightedResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceDelightedResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceDelightedResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcedixa.go b/internal/sdk/pkg/models/operations/getsourcedixa.go
old mode 100755
new mode 100644
index 7baa94079..6b9fe268d
--- a/internal/sdk/pkg/models/operations/getsourcedixa.go
+++ b/internal/sdk/pkg/models/operations/getsourcedixa.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceDixaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceDixaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceDixaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceDixaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceDixaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceDixaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceDixaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcedockerhub.go b/internal/sdk/pkg/models/operations/getsourcedockerhub.go
old mode 100755
new mode 100644
index 06a35a7b4..3b7091c05
--- a/internal/sdk/pkg/models/operations/getsourcedockerhub.go
+++ b/internal/sdk/pkg/models/operations/getsourcedockerhub.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceDockerhubRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceDockerhubRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceDockerhubResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceDockerhubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceDockerhubResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceDockerhubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceDockerhubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcedremio.go b/internal/sdk/pkg/models/operations/getsourcedremio.go
old mode 100755
new mode 100644
index 512ce570e..cc5b94b6b
--- a/internal/sdk/pkg/models/operations/getsourcedremio.go
+++ b/internal/sdk/pkg/models/operations/getsourcedremio.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceDremioRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceDremioRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceDremioResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceDremioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceDremioResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceDremioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceDremioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcedynamodb.go b/internal/sdk/pkg/models/operations/getsourcedynamodb.go
old mode 100755
new mode 100644
index 408d921f8..bf42ec2cd
--- a/internal/sdk/pkg/models/operations/getsourcedynamodb.go
+++ b/internal/sdk/pkg/models/operations/getsourcedynamodb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceDynamodbRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceDynamodbRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceDynamodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceDynamodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceDynamodbResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceDynamodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceDynamodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcee2etestcloud.go b/internal/sdk/pkg/models/operations/getsourcee2etestcloud.go
deleted file mode 100755
index 65b3dccfc..000000000
--- a/internal/sdk/pkg/models/operations/getsourcee2etestcloud.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type GetSourceE2eTestCloudRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type GetSourceE2eTestCloudResponse struct {
- ContentType string
- // Get a Source by the id in the path.
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/getsourceemailoctopus.go b/internal/sdk/pkg/models/operations/getsourceemailoctopus.go
old mode 100755
new mode 100644
index 7fcd26a55..24d5c66cf
--- a/internal/sdk/pkg/models/operations/getsourceemailoctopus.go
+++ b/internal/sdk/pkg/models/operations/getsourceemailoctopus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceEmailoctopusRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceEmailoctopusRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceEmailoctopusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceEmailoctopusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceEmailoctopusResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceEmailoctopusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceEmailoctopusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceexchangerates.go b/internal/sdk/pkg/models/operations/getsourceexchangerates.go
old mode 100755
new mode 100644
index 1a1dde806..39ea54546
--- a/internal/sdk/pkg/models/operations/getsourceexchangerates.go
+++ b/internal/sdk/pkg/models/operations/getsourceexchangerates.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceExchangeRatesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceExchangeRatesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceExchangeRatesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceExchangeRatesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceExchangeRatesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceExchangeRatesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceExchangeRatesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcefacebookmarketing.go b/internal/sdk/pkg/models/operations/getsourcefacebookmarketing.go
old mode 100755
new mode 100644
index f8f805ff2..a3d127f8c
--- a/internal/sdk/pkg/models/operations/getsourcefacebookmarketing.go
+++ b/internal/sdk/pkg/models/operations/getsourcefacebookmarketing.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceFacebookMarketingRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceFacebookMarketingRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceFacebookMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceFacebookMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceFacebookMarketingResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceFacebookMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceFacebookMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcefacebookpages.go b/internal/sdk/pkg/models/operations/getsourcefacebookpages.go
old mode 100755
new mode 100644
index dd077980f..580639a6d
--- a/internal/sdk/pkg/models/operations/getsourcefacebookpages.go
+++ b/internal/sdk/pkg/models/operations/getsourcefacebookpages.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceFacebookPagesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceFacebookPagesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceFacebookPagesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceFacebookPagesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceFacebookPagesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceFacebookPagesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceFacebookPagesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcefaker.go b/internal/sdk/pkg/models/operations/getsourcefaker.go
old mode 100755
new mode 100644
index 84a8d74da..6bd1e6d37
--- a/internal/sdk/pkg/models/operations/getsourcefaker.go
+++ b/internal/sdk/pkg/models/operations/getsourcefaker.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceFakerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceFakerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceFakerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceFakerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceFakerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceFakerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceFakerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcefauna.go b/internal/sdk/pkg/models/operations/getsourcefauna.go
old mode 100755
new mode 100644
index a81bbb19a..10f1f24d2
--- a/internal/sdk/pkg/models/operations/getsourcefauna.go
+++ b/internal/sdk/pkg/models/operations/getsourcefauna.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceFaunaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceFaunaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceFaunaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceFaunaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceFaunaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceFaunaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceFaunaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcefile.go b/internal/sdk/pkg/models/operations/getsourcefile.go
new file mode 100644
index 000000000..6d4550b7b
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/getsourcefile.go
@@ -0,0 +1,58 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type GetSourceFileRequest struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *GetSourceFileRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type GetSourceFileResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Get a Source by the id in the path.
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceFileResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceFileResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceFileResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceFileResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getsourcefilesecure.go b/internal/sdk/pkg/models/operations/getsourcefilesecure.go
deleted file mode 100755
index 0834d7644..000000000
--- a/internal/sdk/pkg/models/operations/getsourcefilesecure.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type GetSourceFileSecureRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type GetSourceFileSecureResponse struct {
- ContentType string
- // Get a Source by the id in the path.
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/getsourcefirebolt.go b/internal/sdk/pkg/models/operations/getsourcefirebolt.go
old mode 100755
new mode 100644
index bfd7f3c20..afb285764
--- a/internal/sdk/pkg/models/operations/getsourcefirebolt.go
+++ b/internal/sdk/pkg/models/operations/getsourcefirebolt.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceFireboltRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceFireboltRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceFireboltResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceFireboltResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceFireboltResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceFireboltResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceFireboltResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcefreshcaller.go b/internal/sdk/pkg/models/operations/getsourcefreshcaller.go
old mode 100755
new mode 100644
index 7938134c1..3ef019ca5
--- a/internal/sdk/pkg/models/operations/getsourcefreshcaller.go
+++ b/internal/sdk/pkg/models/operations/getsourcefreshcaller.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceFreshcallerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceFreshcallerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceFreshcallerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceFreshcallerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceFreshcallerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceFreshcallerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceFreshcallerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcefreshdesk.go b/internal/sdk/pkg/models/operations/getsourcefreshdesk.go
old mode 100755
new mode 100644
index e46f59267..6f489d96f
--- a/internal/sdk/pkg/models/operations/getsourcefreshdesk.go
+++ b/internal/sdk/pkg/models/operations/getsourcefreshdesk.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceFreshdeskRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceFreshdeskRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceFreshdeskResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceFreshdeskResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceFreshdeskResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceFreshdeskResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceFreshdeskResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcefreshsales.go b/internal/sdk/pkg/models/operations/getsourcefreshsales.go
old mode 100755
new mode 100644
index 950307f21..dadd8acee
--- a/internal/sdk/pkg/models/operations/getsourcefreshsales.go
+++ b/internal/sdk/pkg/models/operations/getsourcefreshsales.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceFreshsalesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceFreshsalesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceFreshsalesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceFreshsalesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceFreshsalesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceFreshsalesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceFreshsalesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegainsightpx.go b/internal/sdk/pkg/models/operations/getsourcegainsightpx.go
old mode 100755
new mode 100644
index 52390a24e..ad559f1ad
--- a/internal/sdk/pkg/models/operations/getsourcegainsightpx.go
+++ b/internal/sdk/pkg/models/operations/getsourcegainsightpx.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGainsightPxRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGainsightPxRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGainsightPxResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGainsightPxResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGainsightPxResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGainsightPxResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGainsightPxResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegcs.go b/internal/sdk/pkg/models/operations/getsourcegcs.go
old mode 100755
new mode 100644
index 7473cbeec..daec2f918
--- a/internal/sdk/pkg/models/operations/getsourcegcs.go
+++ b/internal/sdk/pkg/models/operations/getsourcegcs.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGcsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGcsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGcsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGcsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGcsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGcsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGcsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegetlago.go b/internal/sdk/pkg/models/operations/getsourcegetlago.go
old mode 100755
new mode 100644
index c0b2e3f75..97eb324b1
--- a/internal/sdk/pkg/models/operations/getsourcegetlago.go
+++ b/internal/sdk/pkg/models/operations/getsourcegetlago.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGetlagoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGetlagoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGetlagoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGetlagoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGetlagoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGetlagoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGetlagoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegithub.go b/internal/sdk/pkg/models/operations/getsourcegithub.go
old mode 100755
new mode 100644
index 2b2a7fd35..c8c0cbe6f
--- a/internal/sdk/pkg/models/operations/getsourcegithub.go
+++ b/internal/sdk/pkg/models/operations/getsourcegithub.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGithubRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGithubRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGithubResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGithubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGithubResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGithubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGithubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegitlab.go b/internal/sdk/pkg/models/operations/getsourcegitlab.go
old mode 100755
new mode 100644
index 550649573..7155e0c52
--- a/internal/sdk/pkg/models/operations/getsourcegitlab.go
+++ b/internal/sdk/pkg/models/operations/getsourcegitlab.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGitlabRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGitlabRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGitlabResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGitlabResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGitlabResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGitlabResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGitlabResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceglassfrog.go b/internal/sdk/pkg/models/operations/getsourceglassfrog.go
old mode 100755
new mode 100644
index f2b270252..c9e6e75b7
--- a/internal/sdk/pkg/models/operations/getsourceglassfrog.go
+++ b/internal/sdk/pkg/models/operations/getsourceglassfrog.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGlassfrogRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGlassfrogRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGlassfrogResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGlassfrogResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGlassfrogResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGlassfrogResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGlassfrogResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegnews.go b/internal/sdk/pkg/models/operations/getsourcegnews.go
old mode 100755
new mode 100644
index 943877941..a7c22c17c
--- a/internal/sdk/pkg/models/operations/getsourcegnews.go
+++ b/internal/sdk/pkg/models/operations/getsourcegnews.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGnewsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGnewsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGnewsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGnewsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGnewsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGnewsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGnewsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegoogleads.go b/internal/sdk/pkg/models/operations/getsourcegoogleads.go
old mode 100755
new mode 100644
index 9858499ee..7326ff63c
--- a/internal/sdk/pkg/models/operations/getsourcegoogleads.go
+++ b/internal/sdk/pkg/models/operations/getsourcegoogleads.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGoogleAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGoogleAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGoogleAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGoogleAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGoogleAdsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGoogleAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGoogleAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegoogleanalyticsdataapi.go b/internal/sdk/pkg/models/operations/getsourcegoogleanalyticsdataapi.go
old mode 100755
new mode 100644
index 3815dc8e7..74d432963
--- a/internal/sdk/pkg/models/operations/getsourcegoogleanalyticsdataapi.go
+++ b/internal/sdk/pkg/models/operations/getsourcegoogleanalyticsdataapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGoogleAnalyticsDataAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGoogleAnalyticsDataAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGoogleAnalyticsDataAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGoogleAnalyticsDataAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGoogleAnalyticsDataAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGoogleAnalyticsDataAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGoogleAnalyticsDataAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegoogleanalyticsv4.go b/internal/sdk/pkg/models/operations/getsourcegoogleanalyticsv4.go
deleted file mode 100755
index f12184d80..000000000
--- a/internal/sdk/pkg/models/operations/getsourcegoogleanalyticsv4.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type GetSourceGoogleAnalyticsV4Request struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type GetSourceGoogleAnalyticsV4Response struct {
- ContentType string
- // Get a Source by the id in the path.
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/getsourcegoogledirectory.go b/internal/sdk/pkg/models/operations/getsourcegoogledirectory.go
old mode 100755
new mode 100644
index 1ad39e636..69c115049
--- a/internal/sdk/pkg/models/operations/getsourcegoogledirectory.go
+++ b/internal/sdk/pkg/models/operations/getsourcegoogledirectory.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGoogleDirectoryRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGoogleDirectoryRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGoogleDirectoryResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGoogleDirectoryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGoogleDirectoryResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGoogleDirectoryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGoogleDirectoryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegoogledrive.go b/internal/sdk/pkg/models/operations/getsourcegoogledrive.go
new file mode 100644
index 000000000..fa49fbe19
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/getsourcegoogledrive.go
@@ -0,0 +1,58 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type GetSourceGoogleDriveRequest struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *GetSourceGoogleDriveRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type GetSourceGoogleDriveResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Get a Source by the id in the path.
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGoogleDriveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGoogleDriveResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGoogleDriveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGoogleDriveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getsourcegooglepagespeedinsights.go b/internal/sdk/pkg/models/operations/getsourcegooglepagespeedinsights.go
old mode 100755
new mode 100644
index 7094b93c4..c1139d115
--- a/internal/sdk/pkg/models/operations/getsourcegooglepagespeedinsights.go
+++ b/internal/sdk/pkg/models/operations/getsourcegooglepagespeedinsights.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGooglePagespeedInsightsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGooglePagespeedInsightsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGooglePagespeedInsightsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGooglePagespeedInsightsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGooglePagespeedInsightsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGooglePagespeedInsightsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGooglePagespeedInsightsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegooglesearchconsole.go b/internal/sdk/pkg/models/operations/getsourcegooglesearchconsole.go
old mode 100755
new mode 100644
index a57046547..7e2bc9f7c
--- a/internal/sdk/pkg/models/operations/getsourcegooglesearchconsole.go
+++ b/internal/sdk/pkg/models/operations/getsourcegooglesearchconsole.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGoogleSearchConsoleRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGoogleSearchConsoleRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGoogleSearchConsoleResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGoogleSearchConsoleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGoogleSearchConsoleResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGoogleSearchConsoleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGoogleSearchConsoleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegooglesheets.go b/internal/sdk/pkg/models/operations/getsourcegooglesheets.go
old mode 100755
new mode 100644
index 0180e6c42..56995e320
--- a/internal/sdk/pkg/models/operations/getsourcegooglesheets.go
+++ b/internal/sdk/pkg/models/operations/getsourcegooglesheets.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGoogleSheetsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGoogleSheetsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGoogleSheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGoogleSheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGoogleSheetsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGoogleSheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGoogleSheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegooglewebfonts.go b/internal/sdk/pkg/models/operations/getsourcegooglewebfonts.go
old mode 100755
new mode 100644
index 93730d84a..14f6c13bb
--- a/internal/sdk/pkg/models/operations/getsourcegooglewebfonts.go
+++ b/internal/sdk/pkg/models/operations/getsourcegooglewebfonts.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGoogleWebfontsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGoogleWebfontsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGoogleWebfontsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGoogleWebfontsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGoogleWebfontsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGoogleWebfontsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGoogleWebfontsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegoogleworkspaceadminreports.go b/internal/sdk/pkg/models/operations/getsourcegoogleworkspaceadminreports.go
old mode 100755
new mode 100644
index 283a6bf19..9ac0f3c13
--- a/internal/sdk/pkg/models/operations/getsourcegoogleworkspaceadminreports.go
+++ b/internal/sdk/pkg/models/operations/getsourcegoogleworkspaceadminreports.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGoogleWorkspaceAdminReportsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGoogleWorkspaceAdminReportsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGoogleWorkspaceAdminReportsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGoogleWorkspaceAdminReportsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGoogleWorkspaceAdminReportsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGoogleWorkspaceAdminReportsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGoogleWorkspaceAdminReportsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegreenhouse.go b/internal/sdk/pkg/models/operations/getsourcegreenhouse.go
old mode 100755
new mode 100644
index c21943538..8c0d4292a
--- a/internal/sdk/pkg/models/operations/getsourcegreenhouse.go
+++ b/internal/sdk/pkg/models/operations/getsourcegreenhouse.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGreenhouseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGreenhouseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGreenhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGreenhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGreenhouseResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGreenhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGreenhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcegridly.go b/internal/sdk/pkg/models/operations/getsourcegridly.go
old mode 100755
new mode 100644
index 90cdb9d25..0b74e6454
--- a/internal/sdk/pkg/models/operations/getsourcegridly.go
+++ b/internal/sdk/pkg/models/operations/getsourcegridly.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceGridlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceGridlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceGridlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceGridlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceGridlyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceGridlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceGridlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceharvest.go b/internal/sdk/pkg/models/operations/getsourceharvest.go
old mode 100755
new mode 100644
index a3cc286bd..87b05631e
--- a/internal/sdk/pkg/models/operations/getsourceharvest.go
+++ b/internal/sdk/pkg/models/operations/getsourceharvest.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceHarvestRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceHarvestRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceHarvestResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceHarvestResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceHarvestResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceHarvestResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceHarvestResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcehubplanner.go b/internal/sdk/pkg/models/operations/getsourcehubplanner.go
old mode 100755
new mode 100644
index e09b9dfca..4fa280b17
--- a/internal/sdk/pkg/models/operations/getsourcehubplanner.go
+++ b/internal/sdk/pkg/models/operations/getsourcehubplanner.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceHubplannerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceHubplannerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceHubplannerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceHubplannerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceHubplannerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceHubplannerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceHubplannerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcehubspot.go b/internal/sdk/pkg/models/operations/getsourcehubspot.go
old mode 100755
new mode 100644
index e072ad702..1684de94c
--- a/internal/sdk/pkg/models/operations/getsourcehubspot.go
+++ b/internal/sdk/pkg/models/operations/getsourcehubspot.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceHubspotRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceHubspotRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceHubspotResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceHubspotResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceHubspotResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceHubspotResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceHubspotResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceinsightly.go b/internal/sdk/pkg/models/operations/getsourceinsightly.go
old mode 100755
new mode 100644
index 0aea61c07..6216179b4
--- a/internal/sdk/pkg/models/operations/getsourceinsightly.go
+++ b/internal/sdk/pkg/models/operations/getsourceinsightly.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceInsightlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceInsightlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceInsightlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceInsightlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceInsightlyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceInsightlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceInsightlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceinstagram.go b/internal/sdk/pkg/models/operations/getsourceinstagram.go
old mode 100755
new mode 100644
index a3dc14646..05055a9ee
--- a/internal/sdk/pkg/models/operations/getsourceinstagram.go
+++ b/internal/sdk/pkg/models/operations/getsourceinstagram.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceInstagramRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceInstagramRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceInstagramResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceInstagramResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceInstagramResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceInstagramResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceInstagramResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceinstatus.go b/internal/sdk/pkg/models/operations/getsourceinstatus.go
old mode 100755
new mode 100644
index d20f607b0..f692a429d
--- a/internal/sdk/pkg/models/operations/getsourceinstatus.go
+++ b/internal/sdk/pkg/models/operations/getsourceinstatus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceInstatusRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceInstatusRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceInstatusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceInstatusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceInstatusResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceInstatusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceInstatusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceintercom.go b/internal/sdk/pkg/models/operations/getsourceintercom.go
old mode 100755
new mode 100644
index 90b662265..5bce6caea
--- a/internal/sdk/pkg/models/operations/getsourceintercom.go
+++ b/internal/sdk/pkg/models/operations/getsourceintercom.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceIntercomRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceIntercomRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceIntercomResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceIntercomResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceIntercomResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceIntercomResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceIntercomResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceip2whois.go b/internal/sdk/pkg/models/operations/getsourceip2whois.go
old mode 100755
new mode 100644
index 875284a20..0869a1815
--- a/internal/sdk/pkg/models/operations/getsourceip2whois.go
+++ b/internal/sdk/pkg/models/operations/getsourceip2whois.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceIp2whoisRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceIp2whoisRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceIp2whoisResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceIp2whoisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceIp2whoisResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceIp2whoisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceIp2whoisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceiterable.go b/internal/sdk/pkg/models/operations/getsourceiterable.go
old mode 100755
new mode 100644
index b98204878..c884c0778
--- a/internal/sdk/pkg/models/operations/getsourceiterable.go
+++ b/internal/sdk/pkg/models/operations/getsourceiterable.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceIterableRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceIterableRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceIterableResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceIterableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceIterableResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceIterableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceIterableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcejira.go b/internal/sdk/pkg/models/operations/getsourcejira.go
old mode 100755
new mode 100644
index 81ab85ceb..f6ccbbdb4
--- a/internal/sdk/pkg/models/operations/getsourcejira.go
+++ b/internal/sdk/pkg/models/operations/getsourcejira.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceJiraRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceJiraRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceJiraResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceJiraResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceJiraResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceJiraResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceJiraResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcek6cloud.go b/internal/sdk/pkg/models/operations/getsourcek6cloud.go
old mode 100755
new mode 100644
index 5ab1a2761..819d5b479
--- a/internal/sdk/pkg/models/operations/getsourcek6cloud.go
+++ b/internal/sdk/pkg/models/operations/getsourcek6cloud.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceK6CloudRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceK6CloudRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceK6CloudResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceK6CloudResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceK6CloudResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceK6CloudResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceK6CloudResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceklarna.go b/internal/sdk/pkg/models/operations/getsourceklarna.go
old mode 100755
new mode 100644
index 5744d6c39..57b74d478
--- a/internal/sdk/pkg/models/operations/getsourceklarna.go
+++ b/internal/sdk/pkg/models/operations/getsourceklarna.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceKlarnaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceKlarnaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceKlarnaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceKlarnaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceKlarnaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceKlarnaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceKlarnaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceklaviyo.go b/internal/sdk/pkg/models/operations/getsourceklaviyo.go
old mode 100755
new mode 100644
index c3a7d42ea..4cc903424
--- a/internal/sdk/pkg/models/operations/getsourceklaviyo.go
+++ b/internal/sdk/pkg/models/operations/getsourceklaviyo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceKlaviyoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceKlaviyoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceKlaviyoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceKlaviyoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceKlaviyoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceKlaviyoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceKlaviyoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcekustomersinger.go b/internal/sdk/pkg/models/operations/getsourcekustomersinger.go
old mode 100755
new mode 100644
index 3b077c5dd..ae5eeb77d
--- a/internal/sdk/pkg/models/operations/getsourcekustomersinger.go
+++ b/internal/sdk/pkg/models/operations/getsourcekustomersinger.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceKustomerSingerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceKustomerSingerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceKustomerSingerResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceKustomerSingerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceKustomerSingerResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceKustomerSingerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceKustomerSingerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcekyve.go b/internal/sdk/pkg/models/operations/getsourcekyve.go
old mode 100755
new mode 100644
index 6018c69c1..4c54a27f1
--- a/internal/sdk/pkg/models/operations/getsourcekyve.go
+++ b/internal/sdk/pkg/models/operations/getsourcekyve.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceKyveRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceKyveRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceKyveResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceKyveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceKyveResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceKyveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceKyveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcelaunchdarkly.go b/internal/sdk/pkg/models/operations/getsourcelaunchdarkly.go
old mode 100755
new mode 100644
index 0ee44631d..bb8958bcf
--- a/internal/sdk/pkg/models/operations/getsourcelaunchdarkly.go
+++ b/internal/sdk/pkg/models/operations/getsourcelaunchdarkly.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceLaunchdarklyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceLaunchdarklyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceLaunchdarklyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceLaunchdarklyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceLaunchdarklyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceLaunchdarklyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceLaunchdarklyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcelemlist.go b/internal/sdk/pkg/models/operations/getsourcelemlist.go
old mode 100755
new mode 100644
index ca8f6af9d..13b1525f6
--- a/internal/sdk/pkg/models/operations/getsourcelemlist.go
+++ b/internal/sdk/pkg/models/operations/getsourcelemlist.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceLemlistRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceLemlistRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceLemlistResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceLemlistResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceLemlistResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceLemlistResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceLemlistResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceleverhiring.go b/internal/sdk/pkg/models/operations/getsourceleverhiring.go
old mode 100755
new mode 100644
index 2cb58cc35..27b1274a4
--- a/internal/sdk/pkg/models/operations/getsourceleverhiring.go
+++ b/internal/sdk/pkg/models/operations/getsourceleverhiring.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceLeverHiringRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceLeverHiringRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceLeverHiringResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceLeverHiringResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceLeverHiringResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceLeverHiringResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceLeverHiringResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcelinkedinads.go b/internal/sdk/pkg/models/operations/getsourcelinkedinads.go
old mode 100755
new mode 100644
index bbfdc65a4..bb9acf7d0
--- a/internal/sdk/pkg/models/operations/getsourcelinkedinads.go
+++ b/internal/sdk/pkg/models/operations/getsourcelinkedinads.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceLinkedinAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceLinkedinAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceLinkedinAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceLinkedinAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceLinkedinAdsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceLinkedinAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceLinkedinAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcelinkedinpages.go b/internal/sdk/pkg/models/operations/getsourcelinkedinpages.go
old mode 100755
new mode 100644
index 504e7b12a..e6c87be56
--- a/internal/sdk/pkg/models/operations/getsourcelinkedinpages.go
+++ b/internal/sdk/pkg/models/operations/getsourcelinkedinpages.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceLinkedinPagesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceLinkedinPagesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceLinkedinPagesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceLinkedinPagesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceLinkedinPagesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceLinkedinPagesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceLinkedinPagesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcelinnworks.go b/internal/sdk/pkg/models/operations/getsourcelinnworks.go
old mode 100755
new mode 100644
index 85ce66021..ffee085ef
--- a/internal/sdk/pkg/models/operations/getsourcelinnworks.go
+++ b/internal/sdk/pkg/models/operations/getsourcelinnworks.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceLinnworksRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceLinnworksRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceLinnworksResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceLinnworksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceLinnworksResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceLinnworksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceLinnworksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcelokalise.go b/internal/sdk/pkg/models/operations/getsourcelokalise.go
old mode 100755
new mode 100644
index 9ea6be406..73b55a306
--- a/internal/sdk/pkg/models/operations/getsourcelokalise.go
+++ b/internal/sdk/pkg/models/operations/getsourcelokalise.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceLokaliseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceLokaliseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceLokaliseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceLokaliseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceLokaliseResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceLokaliseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceLokaliseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemailchimp.go b/internal/sdk/pkg/models/operations/getsourcemailchimp.go
old mode 100755
new mode 100644
index 3c91ccd25..dc31ff002
--- a/internal/sdk/pkg/models/operations/getsourcemailchimp.go
+++ b/internal/sdk/pkg/models/operations/getsourcemailchimp.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMailchimpRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMailchimpRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMailchimpResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMailchimpResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMailchimpResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMailchimpResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMailchimpResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemailgun.go b/internal/sdk/pkg/models/operations/getsourcemailgun.go
old mode 100755
new mode 100644
index 517016f67..d42f1e171
--- a/internal/sdk/pkg/models/operations/getsourcemailgun.go
+++ b/internal/sdk/pkg/models/operations/getsourcemailgun.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMailgunRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMailgunRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMailgunResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMailgunResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMailgunResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMailgunResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMailgunResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemailjetsms.go b/internal/sdk/pkg/models/operations/getsourcemailjetsms.go
old mode 100755
new mode 100644
index 0d7d9fad4..7a642455d
--- a/internal/sdk/pkg/models/operations/getsourcemailjetsms.go
+++ b/internal/sdk/pkg/models/operations/getsourcemailjetsms.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMailjetSmsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMailjetSmsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMailjetSmsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMailjetSmsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMailjetSmsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMailjetSmsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMailjetSmsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemarketo.go b/internal/sdk/pkg/models/operations/getsourcemarketo.go
old mode 100755
new mode 100644
index e2ac87652..0511099ae
--- a/internal/sdk/pkg/models/operations/getsourcemarketo.go
+++ b/internal/sdk/pkg/models/operations/getsourcemarketo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMarketoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMarketoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMarketoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMarketoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMarketoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMarketoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMarketoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemetabase.go b/internal/sdk/pkg/models/operations/getsourcemetabase.go
old mode 100755
new mode 100644
index efdf55f78..da519d195
--- a/internal/sdk/pkg/models/operations/getsourcemetabase.go
+++ b/internal/sdk/pkg/models/operations/getsourcemetabase.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMetabaseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMetabaseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMetabaseResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMetabaseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMetabaseResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMetabaseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMetabaseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemicrosoftteams.go b/internal/sdk/pkg/models/operations/getsourcemicrosoftteams.go
old mode 100755
new mode 100644
index d21716bcd..e7c0de3ad
--- a/internal/sdk/pkg/models/operations/getsourcemicrosoftteams.go
+++ b/internal/sdk/pkg/models/operations/getsourcemicrosoftteams.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMicrosoftTeamsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMicrosoftTeamsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMicrosoftTeamsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMicrosoftTeamsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMicrosoftTeamsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMicrosoftTeamsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMicrosoftTeamsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemixpanel.go b/internal/sdk/pkg/models/operations/getsourcemixpanel.go
old mode 100755
new mode 100644
index cc1d51bb6..d35b67b31
--- a/internal/sdk/pkg/models/operations/getsourcemixpanel.go
+++ b/internal/sdk/pkg/models/operations/getsourcemixpanel.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMixpanelRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMixpanelRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMixpanelResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMixpanelResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMixpanelResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMixpanelResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMixpanelResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemonday.go b/internal/sdk/pkg/models/operations/getsourcemonday.go
old mode 100755
new mode 100644
index af590d16a..5dd5518e5
--- a/internal/sdk/pkg/models/operations/getsourcemonday.go
+++ b/internal/sdk/pkg/models/operations/getsourcemonday.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMondayRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMondayRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMondayResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMondayResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMondayResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMondayResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMondayResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemongodb.go b/internal/sdk/pkg/models/operations/getsourcemongodb.go
deleted file mode 100755
index a243a572e..000000000
--- a/internal/sdk/pkg/models/operations/getsourcemongodb.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type GetSourceMongodbRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type GetSourceMongodbResponse struct {
- ContentType string
- // Get a Source by the id in the path.
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/getsourcemongodbinternalpoc.go b/internal/sdk/pkg/models/operations/getsourcemongodbinternalpoc.go
old mode 100755
new mode 100644
index 74b93e805..b8fd44b25
--- a/internal/sdk/pkg/models/operations/getsourcemongodbinternalpoc.go
+++ b/internal/sdk/pkg/models/operations/getsourcemongodbinternalpoc.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMongodbInternalPocRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMongodbInternalPocRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMongodbInternalPocResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMongodbInternalPocResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMongodbInternalPocResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMongodbInternalPocResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMongodbInternalPocResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemongodbv2.go b/internal/sdk/pkg/models/operations/getsourcemongodbv2.go
new file mode 100644
index 000000000..9b5cdf76b
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/getsourcemongodbv2.go
@@ -0,0 +1,58 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type GetSourceMongodbV2Request struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *GetSourceMongodbV2Request) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type GetSourceMongodbV2Response struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Get a Source by the id in the path.
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMongodbV2Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMongodbV2Response) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMongodbV2Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMongodbV2Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getsourcemssql.go b/internal/sdk/pkg/models/operations/getsourcemssql.go
old mode 100755
new mode 100644
index 6f5b680c8..37dbf8ae0
--- a/internal/sdk/pkg/models/operations/getsourcemssql.go
+++ b/internal/sdk/pkg/models/operations/getsourcemssql.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMssqlRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMssqlRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMssqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMssqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMssqlResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMssqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMssqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemyhours.go b/internal/sdk/pkg/models/operations/getsourcemyhours.go
old mode 100755
new mode 100644
index 6ffc2e4c4..182681692
--- a/internal/sdk/pkg/models/operations/getsourcemyhours.go
+++ b/internal/sdk/pkg/models/operations/getsourcemyhours.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMyHoursRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMyHoursRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMyHoursResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMyHoursResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMyHoursResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMyHoursResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMyHoursResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcemysql.go b/internal/sdk/pkg/models/operations/getsourcemysql.go
old mode 100755
new mode 100644
index 2f93cd13b..424ec9f52
--- a/internal/sdk/pkg/models/operations/getsourcemysql.go
+++ b/internal/sdk/pkg/models/operations/getsourcemysql.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceMysqlRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceMysqlRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceMysqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceMysqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceMysqlResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceMysqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceMysqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcenetsuite.go b/internal/sdk/pkg/models/operations/getsourcenetsuite.go
old mode 100755
new mode 100644
index e2d7965cb..bfb25bd8b
--- a/internal/sdk/pkg/models/operations/getsourcenetsuite.go
+++ b/internal/sdk/pkg/models/operations/getsourcenetsuite.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceNetsuiteRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceNetsuiteRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceNetsuiteResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceNetsuiteResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceNetsuiteResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceNetsuiteResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceNetsuiteResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcenotion.go b/internal/sdk/pkg/models/operations/getsourcenotion.go
old mode 100755
new mode 100644
index 1d9234a1f..3d2c2eb4f
--- a/internal/sdk/pkg/models/operations/getsourcenotion.go
+++ b/internal/sdk/pkg/models/operations/getsourcenotion.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceNotionRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceNotionRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceNotionResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceNotionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceNotionResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceNotionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceNotionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcenytimes.go b/internal/sdk/pkg/models/operations/getsourcenytimes.go
old mode 100755
new mode 100644
index 08a4c12ba..7c5b2c0c7
--- a/internal/sdk/pkg/models/operations/getsourcenytimes.go
+++ b/internal/sdk/pkg/models/operations/getsourcenytimes.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceNytimesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceNytimesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceNytimesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceNytimesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceNytimesResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceNytimesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceNytimesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceokta.go b/internal/sdk/pkg/models/operations/getsourceokta.go
old mode 100755
new mode 100644
index 6afd90187..64b8cf020
--- a/internal/sdk/pkg/models/operations/getsourceokta.go
+++ b/internal/sdk/pkg/models/operations/getsourceokta.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceOktaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceOktaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceOktaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceOktaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceOktaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceOktaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceOktaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceomnisend.go b/internal/sdk/pkg/models/operations/getsourceomnisend.go
old mode 100755
new mode 100644
index 94b3ed9da..9956232d0
--- a/internal/sdk/pkg/models/operations/getsourceomnisend.go
+++ b/internal/sdk/pkg/models/operations/getsourceomnisend.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceOmnisendRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceOmnisendRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceOmnisendResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceOmnisendResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceOmnisendResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceOmnisendResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceOmnisendResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceonesignal.go b/internal/sdk/pkg/models/operations/getsourceonesignal.go
old mode 100755
new mode 100644
index 8e107d862..b3f758787
--- a/internal/sdk/pkg/models/operations/getsourceonesignal.go
+++ b/internal/sdk/pkg/models/operations/getsourceonesignal.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceOnesignalRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceOnesignalRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceOnesignalResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceOnesignalResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceOnesignalResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceOnesignalResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceOnesignalResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceoracle.go b/internal/sdk/pkg/models/operations/getsourceoracle.go
old mode 100755
new mode 100644
index bca185705..2d113a381
--- a/internal/sdk/pkg/models/operations/getsourceoracle.go
+++ b/internal/sdk/pkg/models/operations/getsourceoracle.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceOracleRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceOracleRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceOracleResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceOracleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceOracleResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceOracleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceOracleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceorb.go b/internal/sdk/pkg/models/operations/getsourceorb.go
old mode 100755
new mode 100644
index 6058da934..e8b12a127
--- a/internal/sdk/pkg/models/operations/getsourceorb.go
+++ b/internal/sdk/pkg/models/operations/getsourceorb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceOrbRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceOrbRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceOrbResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceOrbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceOrbResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceOrbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceOrbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceorbit.go b/internal/sdk/pkg/models/operations/getsourceorbit.go
old mode 100755
new mode 100644
index e1c3b843f..513af5798
--- a/internal/sdk/pkg/models/operations/getsourceorbit.go
+++ b/internal/sdk/pkg/models/operations/getsourceorbit.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceOrbitRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceOrbitRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceOrbitResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceOrbitResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceOrbitResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceOrbitResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceOrbitResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceoutbrainamplify.go b/internal/sdk/pkg/models/operations/getsourceoutbrainamplify.go
old mode 100755
new mode 100644
index 173ccf6ba..6be9c8b28
--- a/internal/sdk/pkg/models/operations/getsourceoutbrainamplify.go
+++ b/internal/sdk/pkg/models/operations/getsourceoutbrainamplify.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceOutbrainAmplifyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceOutbrainAmplifyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceOutbrainAmplifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceOutbrainAmplifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceOutbrainAmplifyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceOutbrainAmplifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceOutbrainAmplifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceoutreach.go b/internal/sdk/pkg/models/operations/getsourceoutreach.go
old mode 100755
new mode 100644
index 3198facbb..7a319bd83
--- a/internal/sdk/pkg/models/operations/getsourceoutreach.go
+++ b/internal/sdk/pkg/models/operations/getsourceoutreach.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceOutreachRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceOutreachRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceOutreachResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceOutreachResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceOutreachResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceOutreachResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceOutreachResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepaypaltransaction.go b/internal/sdk/pkg/models/operations/getsourcepaypaltransaction.go
old mode 100755
new mode 100644
index c140495da..8ef905b77
--- a/internal/sdk/pkg/models/operations/getsourcepaypaltransaction.go
+++ b/internal/sdk/pkg/models/operations/getsourcepaypaltransaction.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePaypalTransactionRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePaypalTransactionRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePaypalTransactionResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePaypalTransactionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePaypalTransactionResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePaypalTransactionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePaypalTransactionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepaystack.go b/internal/sdk/pkg/models/operations/getsourcepaystack.go
old mode 100755
new mode 100644
index 296759bda..8a636e8d3
--- a/internal/sdk/pkg/models/operations/getsourcepaystack.go
+++ b/internal/sdk/pkg/models/operations/getsourcepaystack.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePaystackRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePaystackRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePaystackResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePaystackResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePaystackResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePaystackResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePaystackResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcependo.go b/internal/sdk/pkg/models/operations/getsourcependo.go
old mode 100755
new mode 100644
index c723d13c2..e921f768d
--- a/internal/sdk/pkg/models/operations/getsourcependo.go
+++ b/internal/sdk/pkg/models/operations/getsourcependo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePendoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePendoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePendoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePendoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePendoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePendoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePendoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepersistiq.go b/internal/sdk/pkg/models/operations/getsourcepersistiq.go
old mode 100755
new mode 100644
index 9a250bb0c..aa8d39ee3
--- a/internal/sdk/pkg/models/operations/getsourcepersistiq.go
+++ b/internal/sdk/pkg/models/operations/getsourcepersistiq.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePersistiqRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePersistiqRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePersistiqResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePersistiqResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePersistiqResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePersistiqResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePersistiqResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepexelsapi.go b/internal/sdk/pkg/models/operations/getsourcepexelsapi.go
old mode 100755
new mode 100644
index 0a2601bf1..fc4d42498
--- a/internal/sdk/pkg/models/operations/getsourcepexelsapi.go
+++ b/internal/sdk/pkg/models/operations/getsourcepexelsapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePexelsAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePexelsAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePexelsAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePexelsAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePexelsAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePexelsAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePexelsAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepinterest.go b/internal/sdk/pkg/models/operations/getsourcepinterest.go
old mode 100755
new mode 100644
index 93bcc5756..bb7fda63a
--- a/internal/sdk/pkg/models/operations/getsourcepinterest.go
+++ b/internal/sdk/pkg/models/operations/getsourcepinterest.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePinterestRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePinterestRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePinterestResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePinterestResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePinterestResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePinterestResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePinterestResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepipedrive.go b/internal/sdk/pkg/models/operations/getsourcepipedrive.go
old mode 100755
new mode 100644
index 57cf1f444..fc5948e1f
--- a/internal/sdk/pkg/models/operations/getsourcepipedrive.go
+++ b/internal/sdk/pkg/models/operations/getsourcepipedrive.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePipedriveRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePipedriveRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePipedriveResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePipedriveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePipedriveResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePipedriveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePipedriveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepocket.go b/internal/sdk/pkg/models/operations/getsourcepocket.go
old mode 100755
new mode 100644
index 03374047f..37f103547
--- a/internal/sdk/pkg/models/operations/getsourcepocket.go
+++ b/internal/sdk/pkg/models/operations/getsourcepocket.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePocketRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePocketRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePocketResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePocketResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePocketResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePocketResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePocketResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepokeapi.go b/internal/sdk/pkg/models/operations/getsourcepokeapi.go
old mode 100755
new mode 100644
index e60afb7be..4a18c2a94
--- a/internal/sdk/pkg/models/operations/getsourcepokeapi.go
+++ b/internal/sdk/pkg/models/operations/getsourcepokeapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePokeapiRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePokeapiRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePokeapiResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePokeapiResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePokeapiResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePokeapiResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePokeapiResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepolygonstockapi.go b/internal/sdk/pkg/models/operations/getsourcepolygonstockapi.go
old mode 100755
new mode 100644
index 245bc8ad3..31af78dee
--- a/internal/sdk/pkg/models/operations/getsourcepolygonstockapi.go
+++ b/internal/sdk/pkg/models/operations/getsourcepolygonstockapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePolygonStockAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePolygonStockAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePolygonStockAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePolygonStockAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePolygonStockAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePolygonStockAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePolygonStockAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepostgres.go b/internal/sdk/pkg/models/operations/getsourcepostgres.go
old mode 100755
new mode 100644
index 8a51277ca..481d95028
--- a/internal/sdk/pkg/models/operations/getsourcepostgres.go
+++ b/internal/sdk/pkg/models/operations/getsourcepostgres.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePostgresRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePostgresRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePostgresResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePostgresResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePostgresResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePostgresResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePostgresResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceposthog.go b/internal/sdk/pkg/models/operations/getsourceposthog.go
old mode 100755
new mode 100644
index cf36635e2..a1519f99f
--- a/internal/sdk/pkg/models/operations/getsourceposthog.go
+++ b/internal/sdk/pkg/models/operations/getsourceposthog.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePosthogRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePosthogRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePosthogResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePosthogResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePosthogResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePosthogResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePosthogResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepostmarkapp.go b/internal/sdk/pkg/models/operations/getsourcepostmarkapp.go
old mode 100755
new mode 100644
index 719fe22c2..0f4dc26e4
--- a/internal/sdk/pkg/models/operations/getsourcepostmarkapp.go
+++ b/internal/sdk/pkg/models/operations/getsourcepostmarkapp.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePostmarkappRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePostmarkappRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePostmarkappResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePostmarkappResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePostmarkappResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePostmarkappResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePostmarkappResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceprestashop.go b/internal/sdk/pkg/models/operations/getsourceprestashop.go
old mode 100755
new mode 100644
index d1ee46341..38949be02
--- a/internal/sdk/pkg/models/operations/getsourceprestashop.go
+++ b/internal/sdk/pkg/models/operations/getsourceprestashop.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePrestashopRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePrestashopRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePrestashopResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePrestashopResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePrestashopResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePrestashopResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePrestashopResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepunkapi.go b/internal/sdk/pkg/models/operations/getsourcepunkapi.go
old mode 100755
new mode 100644
index b3367895f..d9358972f
--- a/internal/sdk/pkg/models/operations/getsourcepunkapi.go
+++ b/internal/sdk/pkg/models/operations/getsourcepunkapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePunkAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePunkAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePunkAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePunkAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePunkAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePunkAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePunkAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcepypi.go b/internal/sdk/pkg/models/operations/getsourcepypi.go
old mode 100755
new mode 100644
index 36f874390..7569ef1b6
--- a/internal/sdk/pkg/models/operations/getsourcepypi.go
+++ b/internal/sdk/pkg/models/operations/getsourcepypi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourcePypiRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourcePypiRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourcePypiResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourcePypiResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourcePypiResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourcePypiResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourcePypiResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcequalaroo.go b/internal/sdk/pkg/models/operations/getsourcequalaroo.go
old mode 100755
new mode 100644
index d8d640f66..c51ed9dc2
--- a/internal/sdk/pkg/models/operations/getsourcequalaroo.go
+++ b/internal/sdk/pkg/models/operations/getsourcequalaroo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceQualarooRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceQualarooRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceQualarooResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceQualarooResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceQualarooResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceQualarooResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceQualarooResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcequickbooks.go b/internal/sdk/pkg/models/operations/getsourcequickbooks.go
old mode 100755
new mode 100644
index 9f53ec29f..1431eedf3
--- a/internal/sdk/pkg/models/operations/getsourcequickbooks.go
+++ b/internal/sdk/pkg/models/operations/getsourcequickbooks.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceQuickbooksRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceQuickbooksRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceQuickbooksResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceQuickbooksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceQuickbooksResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceQuickbooksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceQuickbooksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcerailz.go b/internal/sdk/pkg/models/operations/getsourcerailz.go
old mode 100755
new mode 100644
index 003905e29..d0f9c81d6
--- a/internal/sdk/pkg/models/operations/getsourcerailz.go
+++ b/internal/sdk/pkg/models/operations/getsourcerailz.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRailzRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRailzRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceRailzResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceRailzResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceRailzResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceRailzResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceRailzResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcerecharge.go b/internal/sdk/pkg/models/operations/getsourcerecharge.go
old mode 100755
new mode 100644
index a49b390f3..492820e5c
--- a/internal/sdk/pkg/models/operations/getsourcerecharge.go
+++ b/internal/sdk/pkg/models/operations/getsourcerecharge.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRechargeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRechargeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceRechargeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceRechargeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceRechargeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceRechargeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceRechargeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcerecreation.go b/internal/sdk/pkg/models/operations/getsourcerecreation.go
old mode 100755
new mode 100644
index f19d21647..b432a6004
--- a/internal/sdk/pkg/models/operations/getsourcerecreation.go
+++ b/internal/sdk/pkg/models/operations/getsourcerecreation.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRecreationRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRecreationRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceRecreationResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceRecreationResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceRecreationResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceRecreationResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceRecreationResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcerecruitee.go b/internal/sdk/pkg/models/operations/getsourcerecruitee.go
old mode 100755
new mode 100644
index bd3c37589..baa803b93
--- a/internal/sdk/pkg/models/operations/getsourcerecruitee.go
+++ b/internal/sdk/pkg/models/operations/getsourcerecruitee.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRecruiteeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRecruiteeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceRecruiteeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceRecruiteeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceRecruiteeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceRecruiteeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceRecruiteeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcerecurly.go b/internal/sdk/pkg/models/operations/getsourcerecurly.go
old mode 100755
new mode 100644
index 1943691df..dcebe38d7
--- a/internal/sdk/pkg/models/operations/getsourcerecurly.go
+++ b/internal/sdk/pkg/models/operations/getsourcerecurly.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRecurlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRecurlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceRecurlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceRecurlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceRecurlyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceRecurlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceRecurlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceredshift.go b/internal/sdk/pkg/models/operations/getsourceredshift.go
old mode 100755
new mode 100644
index be376cfbc..d24341872
--- a/internal/sdk/pkg/models/operations/getsourceredshift.go
+++ b/internal/sdk/pkg/models/operations/getsourceredshift.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRedshiftRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRedshiftRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceRedshiftResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceRedshiftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceRedshiftResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceRedshiftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceRedshiftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceretently.go b/internal/sdk/pkg/models/operations/getsourceretently.go
old mode 100755
new mode 100644
index 82da76681..d70ea4394
--- a/internal/sdk/pkg/models/operations/getsourceretently.go
+++ b/internal/sdk/pkg/models/operations/getsourceretently.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRetentlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRetentlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceRetentlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceRetentlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceRetentlyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceRetentlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceRetentlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcerkicovid.go b/internal/sdk/pkg/models/operations/getsourcerkicovid.go
old mode 100755
new mode 100644
index 5ba3f9042..c1d4aad3e
--- a/internal/sdk/pkg/models/operations/getsourcerkicovid.go
+++ b/internal/sdk/pkg/models/operations/getsourcerkicovid.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRkiCovidRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRkiCovidRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceRkiCovidResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceRkiCovidResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceRkiCovidResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceRkiCovidResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceRkiCovidResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcerss.go b/internal/sdk/pkg/models/operations/getsourcerss.go
old mode 100755
new mode 100644
index e3be32a49..58d5aec48
--- a/internal/sdk/pkg/models/operations/getsourcerss.go
+++ b/internal/sdk/pkg/models/operations/getsourcerss.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceRssRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceRssRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceRssResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceRssResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceRssResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceRssResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceRssResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsources3.go b/internal/sdk/pkg/models/operations/getsources3.go
old mode 100755
new mode 100644
index 2dc87b7f0..0dec02dbd
--- a/internal/sdk/pkg/models/operations/getsources3.go
+++ b/internal/sdk/pkg/models/operations/getsources3.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceS3Request struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceS3Request) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceS3Response struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceS3Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceS3Response) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceS3Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceS3Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesalesforce.go b/internal/sdk/pkg/models/operations/getsourcesalesforce.go
old mode 100755
new mode 100644
index 3e5f055b2..9cea2220c
--- a/internal/sdk/pkg/models/operations/getsourcesalesforce.go
+++ b/internal/sdk/pkg/models/operations/getsourcesalesforce.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSalesforceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSalesforceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSalesforceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSalesforceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSalesforceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSalesforceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSalesforceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesalesloft.go b/internal/sdk/pkg/models/operations/getsourcesalesloft.go
old mode 100755
new mode 100644
index ca01f40d7..ece88fb43
--- a/internal/sdk/pkg/models/operations/getsourcesalesloft.go
+++ b/internal/sdk/pkg/models/operations/getsourcesalesloft.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSalesloftRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSalesloftRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSalesloftResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSalesloftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSalesloftResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSalesloftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSalesloftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesapfieldglass.go b/internal/sdk/pkg/models/operations/getsourcesapfieldglass.go
old mode 100755
new mode 100644
index 52d32bbde..4cefef6c3
--- a/internal/sdk/pkg/models/operations/getsourcesapfieldglass.go
+++ b/internal/sdk/pkg/models/operations/getsourcesapfieldglass.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSapFieldglassRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSapFieldglassRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSapFieldglassResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSapFieldglassResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSapFieldglassResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSapFieldglassResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSapFieldglassResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesecoda.go b/internal/sdk/pkg/models/operations/getsourcesecoda.go
old mode 100755
new mode 100644
index 73ec15fb0..a09eb76eb
--- a/internal/sdk/pkg/models/operations/getsourcesecoda.go
+++ b/internal/sdk/pkg/models/operations/getsourcesecoda.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSecodaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSecodaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSecodaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSecodaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSecodaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSecodaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSecodaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesendgrid.go b/internal/sdk/pkg/models/operations/getsourcesendgrid.go
old mode 100755
new mode 100644
index 006b4686e..ae9c72531
--- a/internal/sdk/pkg/models/operations/getsourcesendgrid.go
+++ b/internal/sdk/pkg/models/operations/getsourcesendgrid.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSendgridRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSendgridRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSendgridResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSendgridResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSendgridResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSendgridResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSendgridResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesendinblue.go b/internal/sdk/pkg/models/operations/getsourcesendinblue.go
old mode 100755
new mode 100644
index 091ba1e53..45c078ffb
--- a/internal/sdk/pkg/models/operations/getsourcesendinblue.go
+++ b/internal/sdk/pkg/models/operations/getsourcesendinblue.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSendinblueRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSendinblueRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSendinblueResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSendinblueResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSendinblueResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSendinblueResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSendinblueResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesenseforce.go b/internal/sdk/pkg/models/operations/getsourcesenseforce.go
old mode 100755
new mode 100644
index fe6d3fd79..6f8c365a8
--- a/internal/sdk/pkg/models/operations/getsourcesenseforce.go
+++ b/internal/sdk/pkg/models/operations/getsourcesenseforce.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSenseforceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSenseforceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSenseforceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSenseforceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSenseforceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSenseforceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSenseforceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesentry.go b/internal/sdk/pkg/models/operations/getsourcesentry.go
old mode 100755
new mode 100644
index 4e0da45e5..ebe794b12
--- a/internal/sdk/pkg/models/operations/getsourcesentry.go
+++ b/internal/sdk/pkg/models/operations/getsourcesentry.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSentryRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSentryRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSentryResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSentryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSentryResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSentryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSentryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesftp.go b/internal/sdk/pkg/models/operations/getsourcesftp.go
old mode 100755
new mode 100644
index 10a67086d..2e480203e
--- a/internal/sdk/pkg/models/operations/getsourcesftp.go
+++ b/internal/sdk/pkg/models/operations/getsourcesftp.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSftpRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSftpRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSftpResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSftpResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSftpResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSftpResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSftpResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesftpbulk.go b/internal/sdk/pkg/models/operations/getsourcesftpbulk.go
old mode 100755
new mode 100644
index 1b70983e1..930320252
--- a/internal/sdk/pkg/models/operations/getsourcesftpbulk.go
+++ b/internal/sdk/pkg/models/operations/getsourcesftpbulk.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSftpBulkRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSftpBulkRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSftpBulkResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSftpBulkResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSftpBulkResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSftpBulkResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSftpBulkResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceshopify.go b/internal/sdk/pkg/models/operations/getsourceshopify.go
old mode 100755
new mode 100644
index 904476b2c..78159ff5a
--- a/internal/sdk/pkg/models/operations/getsourceshopify.go
+++ b/internal/sdk/pkg/models/operations/getsourceshopify.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceShopifyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceShopifyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceShopifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceShopifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceShopifyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceShopifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceShopifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceshortio.go b/internal/sdk/pkg/models/operations/getsourceshortio.go
old mode 100755
new mode 100644
index e4171c609..ffe26dd8c
--- a/internal/sdk/pkg/models/operations/getsourceshortio.go
+++ b/internal/sdk/pkg/models/operations/getsourceshortio.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceShortioRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceShortioRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceShortioResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceShortioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceShortioResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceShortioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceShortioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceslack.go b/internal/sdk/pkg/models/operations/getsourceslack.go
old mode 100755
new mode 100644
index f5f2366e6..2070f7c55
--- a/internal/sdk/pkg/models/operations/getsourceslack.go
+++ b/internal/sdk/pkg/models/operations/getsourceslack.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSlackRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSlackRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSlackResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSlackResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSlackResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSlackResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSlackResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesmaily.go b/internal/sdk/pkg/models/operations/getsourcesmaily.go
old mode 100755
new mode 100644
index 25dcc1de3..01c35404b
--- a/internal/sdk/pkg/models/operations/getsourcesmaily.go
+++ b/internal/sdk/pkg/models/operations/getsourcesmaily.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSmailyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSmailyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSmailyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSmailyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSmailyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSmailyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSmailyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesmartengage.go b/internal/sdk/pkg/models/operations/getsourcesmartengage.go
old mode 100755
new mode 100644
index 2862d0a17..4cfbcbc20
--- a/internal/sdk/pkg/models/operations/getsourcesmartengage.go
+++ b/internal/sdk/pkg/models/operations/getsourcesmartengage.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSmartengageRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSmartengageRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSmartengageResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSmartengageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSmartengageResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSmartengageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSmartengageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesmartsheets.go b/internal/sdk/pkg/models/operations/getsourcesmartsheets.go
old mode 100755
new mode 100644
index be8167aba..9872e2afc
--- a/internal/sdk/pkg/models/operations/getsourcesmartsheets.go
+++ b/internal/sdk/pkg/models/operations/getsourcesmartsheets.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSmartsheetsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSmartsheetsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSmartsheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSmartsheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSmartsheetsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSmartsheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSmartsheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesnapchatmarketing.go b/internal/sdk/pkg/models/operations/getsourcesnapchatmarketing.go
old mode 100755
new mode 100644
index f9214ef0a..0b2059f30
--- a/internal/sdk/pkg/models/operations/getsourcesnapchatmarketing.go
+++ b/internal/sdk/pkg/models/operations/getsourcesnapchatmarketing.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSnapchatMarketingRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSnapchatMarketingRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSnapchatMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSnapchatMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSnapchatMarketingResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSnapchatMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSnapchatMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesnowflake.go b/internal/sdk/pkg/models/operations/getsourcesnowflake.go
old mode 100755
new mode 100644
index 1c1f2b03c..642ca5643
--- a/internal/sdk/pkg/models/operations/getsourcesnowflake.go
+++ b/internal/sdk/pkg/models/operations/getsourcesnowflake.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSnowflakeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSnowflakeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSnowflakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSnowflakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSnowflakeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSnowflakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSnowflakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesonarcloud.go b/internal/sdk/pkg/models/operations/getsourcesonarcloud.go
old mode 100755
new mode 100644
index ff049e256..cb7c78df6
--- a/internal/sdk/pkg/models/operations/getsourcesonarcloud.go
+++ b/internal/sdk/pkg/models/operations/getsourcesonarcloud.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSonarCloudRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSonarCloudRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSonarCloudResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSonarCloudResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSonarCloudResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSonarCloudResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSonarCloudResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcespacexapi.go b/internal/sdk/pkg/models/operations/getsourcespacexapi.go
old mode 100755
new mode 100644
index 1b184ee4d..d8bb8ba7a
--- a/internal/sdk/pkg/models/operations/getsourcespacexapi.go
+++ b/internal/sdk/pkg/models/operations/getsourcespacexapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSpacexAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSpacexAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSpacexAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSpacexAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSpacexAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSpacexAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSpacexAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesquare.go b/internal/sdk/pkg/models/operations/getsourcesquare.go
old mode 100755
new mode 100644
index 07f6c651b..dd7da15d2
--- a/internal/sdk/pkg/models/operations/getsourcesquare.go
+++ b/internal/sdk/pkg/models/operations/getsourcesquare.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSquareRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSquareRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSquareResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSquareResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSquareResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSquareResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSquareResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcestrava.go b/internal/sdk/pkg/models/operations/getsourcestrava.go
old mode 100755
new mode 100644
index cdc072d02..a6e436c27
--- a/internal/sdk/pkg/models/operations/getsourcestrava.go
+++ b/internal/sdk/pkg/models/operations/getsourcestrava.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceStravaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceStravaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceStravaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceStravaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceStravaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceStravaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceStravaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcestripe.go b/internal/sdk/pkg/models/operations/getsourcestripe.go
old mode 100755
new mode 100644
index c7c3c5c35..e41bfa907
--- a/internal/sdk/pkg/models/operations/getsourcestripe.go
+++ b/internal/sdk/pkg/models/operations/getsourcestripe.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceStripeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceStripeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceStripeResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceStripeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceStripeResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceStripeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceStripeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesurveymonkey.go b/internal/sdk/pkg/models/operations/getsourcesurveymonkey.go
old mode 100755
new mode 100644
index cff870895..d7f5fcc1c
--- a/internal/sdk/pkg/models/operations/getsourcesurveymonkey.go
+++ b/internal/sdk/pkg/models/operations/getsourcesurveymonkey.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSurveymonkeyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSurveymonkeyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSurveymonkeyResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSurveymonkeyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSurveymonkeyResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSurveymonkeyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSurveymonkeyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcesurveysparrow.go b/internal/sdk/pkg/models/operations/getsourcesurveysparrow.go
old mode 100755
new mode 100644
index 5ebd22d8e..59e69b36e
--- a/internal/sdk/pkg/models/operations/getsourcesurveysparrow.go
+++ b/internal/sdk/pkg/models/operations/getsourcesurveysparrow.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceSurveySparrowRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceSurveySparrowRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceSurveySparrowResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceSurveySparrowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceSurveySparrowResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceSurveySparrowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceSurveySparrowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetempo.go b/internal/sdk/pkg/models/operations/getsourcetempo.go
old mode 100755
new mode 100644
index c4cabca76..3e3f0e23f
--- a/internal/sdk/pkg/models/operations/getsourcetempo.go
+++ b/internal/sdk/pkg/models/operations/getsourcetempo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTempoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTempoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTempoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTempoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTempoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTempoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTempoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetheguardianapi.go b/internal/sdk/pkg/models/operations/getsourcetheguardianapi.go
old mode 100755
new mode 100644
index 78e0d619b..874d6163c
--- a/internal/sdk/pkg/models/operations/getsourcetheguardianapi.go
+++ b/internal/sdk/pkg/models/operations/getsourcetheguardianapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTheGuardianAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTheGuardianAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTheGuardianAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTheGuardianAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTheGuardianAPIResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTheGuardianAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTheGuardianAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetiktokmarketing.go b/internal/sdk/pkg/models/operations/getsourcetiktokmarketing.go
old mode 100755
new mode 100644
index b5523f2a1..42024f989
--- a/internal/sdk/pkg/models/operations/getsourcetiktokmarketing.go
+++ b/internal/sdk/pkg/models/operations/getsourcetiktokmarketing.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTiktokMarketingRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTiktokMarketingRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTiktokMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTiktokMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTiktokMarketingResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTiktokMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTiktokMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetodoist.go b/internal/sdk/pkg/models/operations/getsourcetodoist.go
old mode 100755
new mode 100644
index a66c86477..ac3ec9eb6
--- a/internal/sdk/pkg/models/operations/getsourcetodoist.go
+++ b/internal/sdk/pkg/models/operations/getsourcetodoist.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTodoistRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTodoistRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTodoistResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTodoistResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTodoistResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTodoistResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTodoistResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetrello.go b/internal/sdk/pkg/models/operations/getsourcetrello.go
old mode 100755
new mode 100644
index 0ba47ecfb..ce65a19ff
--- a/internal/sdk/pkg/models/operations/getsourcetrello.go
+++ b/internal/sdk/pkg/models/operations/getsourcetrello.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTrelloRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTrelloRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTrelloResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTrelloResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTrelloResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTrelloResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTrelloResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetrustpilot.go b/internal/sdk/pkg/models/operations/getsourcetrustpilot.go
old mode 100755
new mode 100644
index 11f976f08..86dd253a3
--- a/internal/sdk/pkg/models/operations/getsourcetrustpilot.go
+++ b/internal/sdk/pkg/models/operations/getsourcetrustpilot.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTrustpilotRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTrustpilotRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTrustpilotResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTrustpilotResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTrustpilotResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTrustpilotResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTrustpilotResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetvmazeschedule.go b/internal/sdk/pkg/models/operations/getsourcetvmazeschedule.go
old mode 100755
new mode 100644
index 4e7fb0b29..2c7f67471
--- a/internal/sdk/pkg/models/operations/getsourcetvmazeschedule.go
+++ b/internal/sdk/pkg/models/operations/getsourcetvmazeschedule.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTvmazeScheduleRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTvmazeScheduleRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTvmazeScheduleResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTvmazeScheduleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTvmazeScheduleResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTvmazeScheduleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTvmazeScheduleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetwilio.go b/internal/sdk/pkg/models/operations/getsourcetwilio.go
old mode 100755
new mode 100644
index 35dc60aae..f95a9b3ba
--- a/internal/sdk/pkg/models/operations/getsourcetwilio.go
+++ b/internal/sdk/pkg/models/operations/getsourcetwilio.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTwilioRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTwilioRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTwilioResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTwilioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTwilioResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTwilioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTwilioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetwiliotaskrouter.go b/internal/sdk/pkg/models/operations/getsourcetwiliotaskrouter.go
old mode 100755
new mode 100644
index 3c9d7140c..6f0793359
--- a/internal/sdk/pkg/models/operations/getsourcetwiliotaskrouter.go
+++ b/internal/sdk/pkg/models/operations/getsourcetwiliotaskrouter.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTwilioTaskrouterRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTwilioTaskrouterRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTwilioTaskrouterResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTwilioTaskrouterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTwilioTaskrouterResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTwilioTaskrouterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTwilioTaskrouterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetwitter.go b/internal/sdk/pkg/models/operations/getsourcetwitter.go
old mode 100755
new mode 100644
index 03357efda..5dd5165e9
--- a/internal/sdk/pkg/models/operations/getsourcetwitter.go
+++ b/internal/sdk/pkg/models/operations/getsourcetwitter.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTwitterRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTwitterRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTwitterResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTwitterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTwitterResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTwitterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTwitterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcetypeform.go b/internal/sdk/pkg/models/operations/getsourcetypeform.go
old mode 100755
new mode 100644
index e13b0938c..b3eb5c738
--- a/internal/sdk/pkg/models/operations/getsourcetypeform.go
+++ b/internal/sdk/pkg/models/operations/getsourcetypeform.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceTypeformRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceTypeformRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceTypeformResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceTypeformResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceTypeformResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceTypeformResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceTypeformResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceuscensus.go b/internal/sdk/pkg/models/operations/getsourceuscensus.go
old mode 100755
new mode 100644
index b95797dcc..aed1fc407
--- a/internal/sdk/pkg/models/operations/getsourceuscensus.go
+++ b/internal/sdk/pkg/models/operations/getsourceuscensus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceUsCensusRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceUsCensusRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceUsCensusResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceUsCensusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceUsCensusResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceUsCensusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceUsCensusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcevantage.go b/internal/sdk/pkg/models/operations/getsourcevantage.go
old mode 100755
new mode 100644
index a5adb0b5f..7ea559245
--- a/internal/sdk/pkg/models/operations/getsourcevantage.go
+++ b/internal/sdk/pkg/models/operations/getsourcevantage.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceVantageRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceVantageRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceVantageResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceVantageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceVantageResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceVantageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceVantageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcewebflow.go b/internal/sdk/pkg/models/operations/getsourcewebflow.go
old mode 100755
new mode 100644
index d3f944580..59bb218ae
--- a/internal/sdk/pkg/models/operations/getsourcewebflow.go
+++ b/internal/sdk/pkg/models/operations/getsourcewebflow.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceWebflowRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceWebflowRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceWebflowResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceWebflowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceWebflowResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceWebflowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceWebflowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcewhiskyhunter.go b/internal/sdk/pkg/models/operations/getsourcewhiskyhunter.go
old mode 100755
new mode 100644
index 32a7dd3f0..e9735a246
--- a/internal/sdk/pkg/models/operations/getsourcewhiskyhunter.go
+++ b/internal/sdk/pkg/models/operations/getsourcewhiskyhunter.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceWhiskyHunterRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceWhiskyHunterRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceWhiskyHunterResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceWhiskyHunterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceWhiskyHunterResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceWhiskyHunterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceWhiskyHunterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcewikipediapageviews.go b/internal/sdk/pkg/models/operations/getsourcewikipediapageviews.go
old mode 100755
new mode 100644
index f0f9d70f2..f22c8e8d4
--- a/internal/sdk/pkg/models/operations/getsourcewikipediapageviews.go
+++ b/internal/sdk/pkg/models/operations/getsourcewikipediapageviews.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceWikipediaPageviewsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceWikipediaPageviewsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceWikipediaPageviewsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceWikipediaPageviewsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceWikipediaPageviewsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceWikipediaPageviewsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceWikipediaPageviewsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcewoocommerce.go b/internal/sdk/pkg/models/operations/getsourcewoocommerce.go
old mode 100755
new mode 100644
index 4477d9ae8..702b5b0b8
--- a/internal/sdk/pkg/models/operations/getsourcewoocommerce.go
+++ b/internal/sdk/pkg/models/operations/getsourcewoocommerce.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceWoocommerceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceWoocommerceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceWoocommerceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceWoocommerceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceWoocommerceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceWoocommerceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceWoocommerceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcexero.go b/internal/sdk/pkg/models/operations/getsourcexero.go
deleted file mode 100755
index 2a941e3c6..000000000
--- a/internal/sdk/pkg/models/operations/getsourcexero.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type GetSourceXeroRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type GetSourceXeroResponse struct {
- ContentType string
- // Get a Source by the id in the path.
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/getsourcexkcd.go b/internal/sdk/pkg/models/operations/getsourcexkcd.go
old mode 100755
new mode 100644
index 7a0c4a5f5..bb16fcac7
--- a/internal/sdk/pkg/models/operations/getsourcexkcd.go
+++ b/internal/sdk/pkg/models/operations/getsourcexkcd.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceXkcdRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceXkcdRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceXkcdResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceXkcdResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceXkcdResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceXkcdResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceXkcdResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceyandexmetrica.go b/internal/sdk/pkg/models/operations/getsourceyandexmetrica.go
old mode 100755
new mode 100644
index 6638ec78d..55ed69aec
--- a/internal/sdk/pkg/models/operations/getsourceyandexmetrica.go
+++ b/internal/sdk/pkg/models/operations/getsourceyandexmetrica.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceYandexMetricaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceYandexMetricaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceYandexMetricaResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceYandexMetricaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceYandexMetricaResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceYandexMetricaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceYandexMetricaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceyotpo.go b/internal/sdk/pkg/models/operations/getsourceyotpo.go
old mode 100755
new mode 100644
index 418ca00b7..ae043621e
--- a/internal/sdk/pkg/models/operations/getsourceyotpo.go
+++ b/internal/sdk/pkg/models/operations/getsourceyotpo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceYotpoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceYotpoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceYotpoResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceYotpoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceYotpoResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceYotpoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceYotpoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourceyounium.go b/internal/sdk/pkg/models/operations/getsourceyounium.go
deleted file mode 100755
index 0f58cbd29..000000000
--- a/internal/sdk/pkg/models/operations/getsourceyounium.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type GetSourceYouniumRequest struct {
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type GetSourceYouniumResponse struct {
- ContentType string
- // Get a Source by the id in the path.
- SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/getsourceyoutubeanalytics.go b/internal/sdk/pkg/models/operations/getsourceyoutubeanalytics.go
old mode 100755
new mode 100644
index adb0d9d06..fa3fab1bc
--- a/internal/sdk/pkg/models/operations/getsourceyoutubeanalytics.go
+++ b/internal/sdk/pkg/models/operations/getsourceyoutubeanalytics.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceYoutubeAnalyticsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceYoutubeAnalyticsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceYoutubeAnalyticsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceYoutubeAnalyticsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceYoutubeAnalyticsResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceYoutubeAnalyticsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceYoutubeAnalyticsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcezendeskchat.go b/internal/sdk/pkg/models/operations/getsourcezendeskchat.go
old mode 100755
new mode 100644
index 903ab750a..7bd2eb42d
--- a/internal/sdk/pkg/models/operations/getsourcezendeskchat.go
+++ b/internal/sdk/pkg/models/operations/getsourcezendeskchat.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceZendeskChatRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceZendeskChatRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceZendeskChatResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceZendeskChatResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceZendeskChatResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceZendeskChatResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceZendeskChatResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcezendesksell.go b/internal/sdk/pkg/models/operations/getsourcezendesksell.go
new file mode 100644
index 000000000..841fb81d9
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/getsourcezendesksell.go
@@ -0,0 +1,58 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type GetSourceZendeskSellRequest struct {
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *GetSourceZendeskSellRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type GetSourceZendeskSellResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // Get a Source by the id in the path.
+ SourceResponse *shared.SourceResponse
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceZendeskSellResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceZendeskSellResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceZendeskSellResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceZendeskSellResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getsourcezendesksunshine.go b/internal/sdk/pkg/models/operations/getsourcezendesksunshine.go
old mode 100755
new mode 100644
index aced7ce83..353cbcff8
--- a/internal/sdk/pkg/models/operations/getsourcezendesksunshine.go
+++ b/internal/sdk/pkg/models/operations/getsourcezendesksunshine.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceZendeskSunshineRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceZendeskSunshineRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceZendeskSunshineResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceZendeskSunshineResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceZendeskSunshineResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceZendeskSunshineResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceZendeskSunshineResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcezendesksupport.go b/internal/sdk/pkg/models/operations/getsourcezendesksupport.go
old mode 100755
new mode 100644
index b0f27edff..98e1dd17d
--- a/internal/sdk/pkg/models/operations/getsourcezendesksupport.go
+++ b/internal/sdk/pkg/models/operations/getsourcezendesksupport.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceZendeskSupportRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceZendeskSupportRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceZendeskSupportResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceZendeskSupportResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceZendeskSupportResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceZendeskSupportResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceZendeskSupportResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcezendesktalk.go b/internal/sdk/pkg/models/operations/getsourcezendesktalk.go
old mode 100755
new mode 100644
index ea8fcf33d..a6ff5e694
--- a/internal/sdk/pkg/models/operations/getsourcezendesktalk.go
+++ b/internal/sdk/pkg/models/operations/getsourcezendesktalk.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceZendeskTalkRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceZendeskTalkRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceZendeskTalkResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceZendeskTalkResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceZendeskTalkResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceZendeskTalkResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceZendeskTalkResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcezenloop.go b/internal/sdk/pkg/models/operations/getsourcezenloop.go
old mode 100755
new mode 100644
index 775ecb723..bbf45b4e4
--- a/internal/sdk/pkg/models/operations/getsourcezenloop.go
+++ b/internal/sdk/pkg/models/operations/getsourcezenloop.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceZenloopRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceZenloopRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceZenloopResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceZenloopResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceZenloopResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceZenloopResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceZenloopResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcezohocrm.go b/internal/sdk/pkg/models/operations/getsourcezohocrm.go
old mode 100755
new mode 100644
index 7959a4631..8377d2962
--- a/internal/sdk/pkg/models/operations/getsourcezohocrm.go
+++ b/internal/sdk/pkg/models/operations/getsourcezohocrm.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceZohoCrmRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceZohoCrmRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceZohoCrmResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceZohoCrmResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceZohoCrmResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceZohoCrmResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceZohoCrmResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcezoom.go b/internal/sdk/pkg/models/operations/getsourcezoom.go
old mode 100755
new mode 100644
index 0349c9a42..8aa797562
--- a/internal/sdk/pkg/models/operations/getsourcezoom.go
+++ b/internal/sdk/pkg/models/operations/getsourcezoom.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceZoomRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceZoomRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceZoomResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceZoomResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceZoomResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceZoomResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceZoomResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getsourcezuora.go b/internal/sdk/pkg/models/operations/getsourcezuora.go
old mode 100755
new mode 100644
index 8a25639a7..e97c185a9
--- a/internal/sdk/pkg/models/operations/getsourcezuora.go
+++ b/internal/sdk/pkg/models/operations/getsourcezuora.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetSourceZuoraRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *GetSourceZuoraRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetSourceZuoraResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Get a Source by the id in the path.
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *GetSourceZuoraResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetSourceZuoraResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *GetSourceZuoraResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetSourceZuoraResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/getstreamproperties.go b/internal/sdk/pkg/models/operations/getstreamproperties.go
old mode 100755
new mode 100644
index ad2b304c3..35574617b
--- a/internal/sdk/pkg/models/operations/getstreamproperties.go
+++ b/internal/sdk/pkg/models/operations/getstreamproperties.go
@@ -3,7 +3,8 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"net/http"
)
@@ -11,15 +12,78 @@ type GetStreamPropertiesRequest struct {
// ID of the destination
DestinationID string `queryParam:"style=form,explode=true,name=destinationId"`
// If true pull the latest schema from the source, else pull from cache (default false)
- IgnoreCache *bool `queryParam:"style=form,explode=true,name=ignoreCache"`
+ IgnoreCache *bool `default:"false" queryParam:"style=form,explode=true,name=ignoreCache"`
// ID of the source
SourceID string `queryParam:"style=form,explode=true,name=sourceId"`
}
+func (g GetStreamPropertiesRequest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(g, "", false)
+}
+
+func (g *GetStreamPropertiesRequest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &g, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *GetStreamPropertiesRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+func (o *GetStreamPropertiesRequest) GetIgnoreCache() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IgnoreCache
+}
+
+func (o *GetStreamPropertiesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type GetStreamPropertiesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
// Get the available streams properties for a source/destination pair.
StreamPropertiesResponse *shared.StreamPropertiesResponse
}
+
+func (o *GetStreamPropertiesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetStreamPropertiesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetStreamPropertiesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
+
+func (o *GetStreamPropertiesResponse) GetStreamPropertiesResponse() *shared.StreamPropertiesResponse {
+ if o == nil {
+ return nil
+ }
+ return o.StreamPropertiesResponse
+}
diff --git a/internal/sdk/pkg/models/operations/getworkspace.go b/internal/sdk/pkg/models/operations/getworkspace.go
old mode 100755
new mode 100644
index 37b1fdf69..8657a5c80
--- a/internal/sdk/pkg/models/operations/getworkspace.go
+++ b/internal/sdk/pkg/models/operations/getworkspace.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -11,10 +11,48 @@ type GetWorkspaceRequest struct {
WorkspaceID string `pathParam:"style=simple,explode=false,name=workspaceId"`
}
+func (o *GetWorkspaceRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
+
type GetWorkspaceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
// Get a Workspace by the id in the path.
WorkspaceResponse *shared.WorkspaceResponse
}
+
+func (o *GetWorkspaceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *GetWorkspaceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *GetWorkspaceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
+
+func (o *GetWorkspaceResponse) GetWorkspaceResponse() *shared.WorkspaceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspaceResponse
+}
diff --git a/internal/sdk/pkg/models/operations/initiateoauth.go b/internal/sdk/pkg/models/operations/initiateoauth.go
old mode 100755
new mode 100644
index c08ceca5a..b4a9bc1ab
--- a/internal/sdk/pkg/models/operations/initiateoauth.go
+++ b/internal/sdk/pkg/models/operations/initiateoauth.go
@@ -7,7 +7,31 @@ import (
)
type InitiateOAuthResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *InitiateOAuthResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *InitiateOAuthResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *InitiateOAuthResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/listconnections.go b/internal/sdk/pkg/models/operations/listconnections.go
old mode 100755
new mode 100644
index 6978ab5a2..03b46611c
--- a/internal/sdk/pkg/models/operations/listconnections.go
+++ b/internal/sdk/pkg/models/operations/listconnections.go
@@ -3,25 +3,96 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"net/http"
)
type ListConnectionsRequest struct {
// Include deleted connections in the returned results.
- IncludeDeleted *bool `queryParam:"style=form,explode=true,name=includeDeleted"`
+ IncludeDeleted *bool `default:"false" queryParam:"style=form,explode=true,name=includeDeleted"`
// Set the limit on the number of Connections returned. The default is 20.
- Limit *int `queryParam:"style=form,explode=true,name=limit"`
+ Limit *int `default:"20" queryParam:"style=form,explode=true,name=limit"`
// Set the offset to start at when returning Connections. The default is 0
- Offset *int `queryParam:"style=form,explode=true,name=offset"`
+ Offset *int `default:"0" queryParam:"style=form,explode=true,name=offset"`
// The UUIDs of the workspaces you wish to list connections for. Empty list will retrieve all allowed workspaces.
WorkspaceIds []string `queryParam:"style=form,explode=true,name=workspaceIds"`
}
+func (l ListConnectionsRequest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(l, "", false)
+}
+
+func (l *ListConnectionsRequest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &l, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ListConnectionsRequest) GetIncludeDeleted() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeleted
+}
+
+func (o *ListConnectionsRequest) GetLimit() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Limit
+}
+
+func (o *ListConnectionsRequest) GetOffset() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Offset
+}
+
+func (o *ListConnectionsRequest) GetWorkspaceIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspaceIds
+}
+
type ListConnectionsResponse struct {
// Successful operation
ConnectionsResponse *shared.ConnectionsResponse
- ContentType string
- StatusCode int
- RawResponse *http.Response
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *ListConnectionsResponse) GetConnectionsResponse() *shared.ConnectionsResponse {
+ if o == nil {
+ return nil
+ }
+ return o.ConnectionsResponse
+}
+
+func (o *ListConnectionsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *ListConnectionsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *ListConnectionsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/listdestinations.go b/internal/sdk/pkg/models/operations/listdestinations.go
old mode 100755
new mode 100644
index af0a543d0..7e6751bc6
--- a/internal/sdk/pkg/models/operations/listdestinations.go
+++ b/internal/sdk/pkg/models/operations/listdestinations.go
@@ -3,25 +3,96 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"net/http"
)
type ListDestinationsRequest struct {
// Include deleted destinations in the returned results.
- IncludeDeleted *bool `queryParam:"style=form,explode=true,name=includeDeleted"`
+ IncludeDeleted *bool `default:"false" queryParam:"style=form,explode=true,name=includeDeleted"`
// Set the limit on the number of destinations returned. The default is 20.
- Limit *int `queryParam:"style=form,explode=true,name=limit"`
+ Limit *int `default:"20" queryParam:"style=form,explode=true,name=limit"`
// Set the offset to start at when returning destinations. The default is 0
- Offset *int `queryParam:"style=form,explode=true,name=offset"`
+ Offset *int `default:"0" queryParam:"style=form,explode=true,name=offset"`
// The UUIDs of the workspaces you wish to list destinations for. Empty list will retrieve all allowed workspaces.
WorkspaceIds []string `queryParam:"style=form,explode=true,name=workspaceIds"`
}
+func (l ListDestinationsRequest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(l, "", false)
+}
+
+func (l *ListDestinationsRequest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &l, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ListDestinationsRequest) GetIncludeDeleted() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeleted
+}
+
+func (o *ListDestinationsRequest) GetLimit() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Limit
+}
+
+func (o *ListDestinationsRequest) GetOffset() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Offset
+}
+
+func (o *ListDestinationsRequest) GetWorkspaceIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspaceIds
+}
+
type ListDestinationsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
DestinationsResponse *shared.DestinationsResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *ListDestinationsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *ListDestinationsResponse) GetDestinationsResponse() *shared.DestinationsResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationsResponse
+}
+
+func (o *ListDestinationsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *ListDestinationsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/listjobs.go b/internal/sdk/pkg/models/operations/listjobs.go
old mode 100755
new mode 100644
index 739314789..bf9a4c062
--- a/internal/sdk/pkg/models/operations/listjobs.go
+++ b/internal/sdk/pkg/models/operations/listjobs.go
@@ -3,7 +3,8 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"net/http"
"time"
)
@@ -18,9 +19,9 @@ type ListJobsRequest struct {
// Filter the Jobs by jobType.
JobType *shared.JobTypeEnum `queryParam:"style=form,explode=true,name=jobType"`
// Set the limit on the number of Jobs returned. The default is 20 Jobs.
- Limit *int `queryParam:"style=form,explode=true,name=limit"`
+ Limit *int `default:"20" queryParam:"style=form,explode=true,name=limit"`
// Set the offset to start at when returning Jobs. The default is 0.
- Offset *int `queryParam:"style=form,explode=true,name=offset"`
+ Offset *int `default:"0" queryParam:"style=form,explode=true,name=offset"`
// The field and method to use for ordering. Currently allowed are createdAt and updatedAt.
OrderBy *string `queryParam:"style=form,explode=true,name=orderBy"`
// The Job status you want to filter by
@@ -33,10 +34,129 @@ type ListJobsRequest struct {
WorkspaceIds []string `queryParam:"style=form,explode=true,name=workspaceIds"`
}
+func (l ListJobsRequest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(l, "", false)
+}
+
+func (l *ListJobsRequest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &l, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ListJobsRequest) GetConnectionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ConnectionID
+}
+
+func (o *ListJobsRequest) GetCreatedAtEnd() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.CreatedAtEnd
+}
+
+func (o *ListJobsRequest) GetCreatedAtStart() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.CreatedAtStart
+}
+
+func (o *ListJobsRequest) GetJobType() *shared.JobTypeEnum {
+ if o == nil {
+ return nil
+ }
+ return o.JobType
+}
+
+func (o *ListJobsRequest) GetLimit() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Limit
+}
+
+func (o *ListJobsRequest) GetOffset() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Offset
+}
+
+func (o *ListJobsRequest) GetOrderBy() *string {
+ if o == nil {
+ return nil
+ }
+ return o.OrderBy
+}
+
+func (o *ListJobsRequest) GetStatus() *shared.JobStatusEnum {
+ if o == nil {
+ return nil
+ }
+ return o.Status
+}
+
+func (o *ListJobsRequest) GetUpdatedAtEnd() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.UpdatedAtEnd
+}
+
+func (o *ListJobsRequest) GetUpdatedAtStart() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.UpdatedAtStart
+}
+
+func (o *ListJobsRequest) GetWorkspaceIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspaceIds
+}
+
type ListJobsResponse struct {
+ // HTTP response content type for this operation
ContentType string
// List all the Jobs by connectionId.
JobsResponse *shared.JobsResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *ListJobsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *ListJobsResponse) GetJobsResponse() *shared.JobsResponse {
+ if o == nil {
+ return nil
+ }
+ return o.JobsResponse
+}
+
+func (o *ListJobsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *ListJobsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/listsources.go b/internal/sdk/pkg/models/operations/listsources.go
old mode 100755
new mode 100644
index 5d18b4e8c..430f8aa05
--- a/internal/sdk/pkg/models/operations/listsources.go
+++ b/internal/sdk/pkg/models/operations/listsources.go
@@ -3,25 +3,96 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"net/http"
)
type ListSourcesRequest struct {
// Include deleted sources in the returned results.
- IncludeDeleted *bool `queryParam:"style=form,explode=true,name=includeDeleted"`
+ IncludeDeleted *bool `default:"false" queryParam:"style=form,explode=true,name=includeDeleted"`
// Set the limit on the number of sources returned. The default is 20.
- Limit *int `queryParam:"style=form,explode=true,name=limit"`
+ Limit *int `default:"20" queryParam:"style=form,explode=true,name=limit"`
// Set the offset to start at when returning sources. The default is 0
- Offset *int `queryParam:"style=form,explode=true,name=offset"`
+ Offset *int `default:"0" queryParam:"style=form,explode=true,name=offset"`
// The UUIDs of the workspaces you wish to list sources for. Empty list will retrieve all allowed workspaces.
WorkspaceIds []string `queryParam:"style=form,explode=true,name=workspaceIds"`
}
+func (l ListSourcesRequest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(l, "", false)
+}
+
+func (l *ListSourcesRequest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &l, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ListSourcesRequest) GetIncludeDeleted() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeleted
+}
+
+func (o *ListSourcesRequest) GetLimit() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Limit
+}
+
+func (o *ListSourcesRequest) GetOffset() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Offset
+}
+
+func (o *ListSourcesRequest) GetWorkspaceIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspaceIds
+}
+
type ListSourcesResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Successful operation
SourcesResponse *shared.SourcesResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *ListSourcesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *ListSourcesResponse) GetSourcesResponse() *shared.SourcesResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourcesResponse
+}
+
+func (o *ListSourcesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *ListSourcesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/listworkspaces.go b/internal/sdk/pkg/models/operations/listworkspaces.go
old mode 100755
new mode 100644
index fe810db87..52d59667a
--- a/internal/sdk/pkg/models/operations/listworkspaces.go
+++ b/internal/sdk/pkg/models/operations/listworkspaces.go
@@ -3,25 +3,96 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"net/http"
)
type ListWorkspacesRequest struct {
// Include deleted workspaces in the returned results.
- IncludeDeleted *bool `queryParam:"style=form,explode=true,name=includeDeleted"`
+ IncludeDeleted *bool `default:"false" queryParam:"style=form,explode=true,name=includeDeleted"`
// Set the limit on the number of workspaces returned. The default is 20.
- Limit *int `queryParam:"style=form,explode=true,name=limit"`
+ Limit *int `default:"20" queryParam:"style=form,explode=true,name=limit"`
// Set the offset to start at when returning workspaces. The default is 0
- Offset *int `queryParam:"style=form,explode=true,name=offset"`
+ Offset *int `default:"0" queryParam:"style=form,explode=true,name=offset"`
// The UUIDs of the workspaces you wish to fetch. Empty list will retrieve all allowed workspaces.
WorkspaceIds []string `queryParam:"style=form,explode=true,name=workspaceIds"`
}
+func (l ListWorkspacesRequest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(l, "", false)
+}
+
+func (l *ListWorkspacesRequest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &l, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ListWorkspacesRequest) GetIncludeDeleted() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeleted
+}
+
+func (o *ListWorkspacesRequest) GetLimit() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Limit
+}
+
+func (o *ListWorkspacesRequest) GetOffset() *int {
+ if o == nil {
+ return nil
+ }
+ return o.Offset
+}
+
+func (o *ListWorkspacesRequest) GetWorkspaceIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspaceIds
+}
+
type ListWorkspacesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
// Successful operation
WorkspacesResponse *shared.WorkspacesResponse
}
+
+func (o *ListWorkspacesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *ListWorkspacesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *ListWorkspacesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
+
+func (o *ListWorkspacesResponse) GetWorkspacesResponse() *shared.WorkspacesResponse {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspacesResponse
+}
diff --git a/internal/sdk/pkg/models/operations/patchconnection.go b/internal/sdk/pkg/models/operations/patchconnection.go
old mode 100755
new mode 100644
index 61b6c23ba..6cacfe301
--- a/internal/sdk/pkg/models/operations/patchconnection.go
+++ b/internal/sdk/pkg/models/operations/patchconnection.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,10 +12,55 @@ type PatchConnectionRequest struct {
ConnectionID string `pathParam:"style=simple,explode=false,name=connectionId"`
}
+func (o *PatchConnectionRequest) GetConnectionPatchRequest() shared.ConnectionPatchRequest {
+ if o == nil {
+ return shared.ConnectionPatchRequest{}
+ }
+ return o.ConnectionPatchRequest
+}
+
+func (o *PatchConnectionRequest) GetConnectionID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionID
+}
+
type PatchConnectionResponse struct {
// Update a Connection by the id in the path.
ConnectionResponse *shared.ConnectionResponse
- ContentType string
- StatusCode int
- RawResponse *http.Response
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PatchConnectionResponse) GetConnectionResponse() *shared.ConnectionResponse {
+ if o == nil {
+ return nil
+ }
+ return o.ConnectionResponse
+}
+
+func (o *PatchConnectionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PatchConnectionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PatchConnectionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/patchdestination.go b/internal/sdk/pkg/models/operations/patchdestination.go
old mode 100755
new mode 100644
index ea8d97db4..0db9fc921
--- a/internal/sdk/pkg/models/operations/patchdestination.go
+++ b/internal/sdk/pkg/models/operations/patchdestination.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,10 +12,55 @@ type PatchDestinationRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PatchDestinationRequest) GetDestinationPatchRequest() *shared.DestinationPatchRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationPatchRequest
+}
+
+func (o *PatchDestinationRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PatchDestinationResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Update a Destination
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PatchDestinationResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PatchDestinationResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *PatchDestinationResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PatchDestinationResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/patchsource.go b/internal/sdk/pkg/models/operations/patchsource.go
old mode 100755
new mode 100644
index eef8a3f9e..e42aef076
--- a/internal/sdk/pkg/models/operations/patchsource.go
+++ b/internal/sdk/pkg/models/operations/patchsource.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,10 +12,55 @@ type PatchSourceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PatchSourceRequest) GetSourcePatchRequest() *shared.SourcePatchRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePatchRequest
+}
+
+func (o *PatchSourceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PatchSourceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Update a Source
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PatchSourceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PatchSourceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *PatchSourceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PatchSourceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/putdestination.go b/internal/sdk/pkg/models/operations/putdestination.go
old mode 100755
new mode 100644
index 87e174876..0cd84ccd0
--- a/internal/sdk/pkg/models/operations/putdestination.go
+++ b/internal/sdk/pkg/models/operations/putdestination.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,10 +12,55 @@ type PutDestinationRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationRequest) GetDestinationPutRequest() *shared.DestinationPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationPutRequest
+}
+
+func (o *PutDestinationRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Update a Destination and fully overwrite it
DestinationResponse *shared.DestinationResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutDestinationResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationResponse) GetDestinationResponse() *shared.DestinationResponse {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationResponse
+}
+
+func (o *PutDestinationResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/putdestinationawsdatalake.go b/internal/sdk/pkg/models/operations/putdestinationawsdatalake.go
old mode 100755
new mode 100644
index cabafc41b..7fcae0108
--- a/internal/sdk/pkg/models/operations/putdestinationawsdatalake.go
+++ b/internal/sdk/pkg/models/operations/putdestinationawsdatalake.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationAwsDatalakeRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationAwsDatalakeRequest) GetDestinationAwsDatalakePutRequest() *shared.DestinationAwsDatalakePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationAwsDatalakePutRequest
+}
+
+func (o *PutDestinationAwsDatalakeRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationAwsDatalakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationAwsDatalakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationAwsDatalakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationAwsDatalakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationazureblobstorage.go b/internal/sdk/pkg/models/operations/putdestinationazureblobstorage.go
old mode 100755
new mode 100644
index 38d8f40b0..5d108cf7a
--- a/internal/sdk/pkg/models/operations/putdestinationazureblobstorage.go
+++ b/internal/sdk/pkg/models/operations/putdestinationazureblobstorage.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationAzureBlobStorageRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationAzureBlobStorageRequest) GetDestinationAzureBlobStoragePutRequest() *shared.DestinationAzureBlobStoragePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationAzureBlobStoragePutRequest
+}
+
+func (o *PutDestinationAzureBlobStorageRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationAzureBlobStorageResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationAzureBlobStorageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationAzureBlobStorageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationAzureBlobStorageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationbigquery.go b/internal/sdk/pkg/models/operations/putdestinationbigquery.go
old mode 100755
new mode 100644
index 4ed79c7cb..3596f5e4e
--- a/internal/sdk/pkg/models/operations/putdestinationbigquery.go
+++ b/internal/sdk/pkg/models/operations/putdestinationbigquery.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationBigqueryRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationBigqueryRequest) GetDestinationBigqueryPutRequest() *shared.DestinationBigqueryPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationBigqueryPutRequest
+}
+
+func (o *PutDestinationBigqueryRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationBigqueryResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationBigqueryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationBigqueryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationBigqueryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationbigquerydenormalized.go b/internal/sdk/pkg/models/operations/putdestinationbigquerydenormalized.go
deleted file mode 100755
index e82023039..000000000
--- a/internal/sdk/pkg/models/operations/putdestinationbigquerydenormalized.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type PutDestinationBigqueryDenormalizedRequest struct {
- DestinationBigqueryDenormalizedPutRequest *shared.DestinationBigqueryDenormalizedPutRequest `request:"mediaType=application/json"`
- DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
-}
-
-type PutDestinationBigqueryDenormalizedResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/putdestinationclickhouse.go b/internal/sdk/pkg/models/operations/putdestinationclickhouse.go
old mode 100755
new mode 100644
index 967e0d11b..7ee28f114
--- a/internal/sdk/pkg/models/operations/putdestinationclickhouse.go
+++ b/internal/sdk/pkg/models/operations/putdestinationclickhouse.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationClickhouseRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationClickhouseRequest) GetDestinationClickhousePutRequest() *shared.DestinationClickhousePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationClickhousePutRequest
+}
+
+func (o *PutDestinationClickhouseRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationClickhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationClickhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationClickhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationClickhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationconvex.go b/internal/sdk/pkg/models/operations/putdestinationconvex.go
old mode 100755
new mode 100644
index 0eefd9124..a77b5c745
--- a/internal/sdk/pkg/models/operations/putdestinationconvex.go
+++ b/internal/sdk/pkg/models/operations/putdestinationconvex.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationConvexRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationConvexRequest) GetDestinationConvexPutRequest() *shared.DestinationConvexPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationConvexPutRequest
+}
+
+func (o *PutDestinationConvexRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationConvexResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationConvexResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationConvexResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationConvexResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationcumulio.go b/internal/sdk/pkg/models/operations/putdestinationcumulio.go
old mode 100755
new mode 100644
index 2d6348fe7..adf7de119
--- a/internal/sdk/pkg/models/operations/putdestinationcumulio.go
+++ b/internal/sdk/pkg/models/operations/putdestinationcumulio.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationCumulioRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationCumulioRequest) GetDestinationCumulioPutRequest() *shared.DestinationCumulioPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationCumulioPutRequest
+}
+
+func (o *PutDestinationCumulioRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationCumulioResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationCumulioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationCumulioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationCumulioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationdatabend.go b/internal/sdk/pkg/models/operations/putdestinationdatabend.go
old mode 100755
new mode 100644
index 9890e625b..00047446b
--- a/internal/sdk/pkg/models/operations/putdestinationdatabend.go
+++ b/internal/sdk/pkg/models/operations/putdestinationdatabend.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationDatabendRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationDatabendRequest) GetDestinationDatabendPutRequest() *shared.DestinationDatabendPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationDatabendPutRequest
+}
+
+func (o *PutDestinationDatabendRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationDatabendResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationDatabendResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationDatabendResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationDatabendResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationdatabricks.go b/internal/sdk/pkg/models/operations/putdestinationdatabricks.go
old mode 100755
new mode 100644
index 5a1c3a3e3..1ac511d80
--- a/internal/sdk/pkg/models/operations/putdestinationdatabricks.go
+++ b/internal/sdk/pkg/models/operations/putdestinationdatabricks.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationDatabricksRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationDatabricksRequest) GetDestinationDatabricksPutRequest() *shared.DestinationDatabricksPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationDatabricksPutRequest
+}
+
+func (o *PutDestinationDatabricksRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationDatabricksResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationDatabricksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationDatabricksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationDatabricksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationdevnull.go b/internal/sdk/pkg/models/operations/putdestinationdevnull.go
old mode 100755
new mode 100644
index 8959a40ee..831f130b9
--- a/internal/sdk/pkg/models/operations/putdestinationdevnull.go
+++ b/internal/sdk/pkg/models/operations/putdestinationdevnull.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationDevNullRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationDevNullRequest) GetDestinationDevNullPutRequest() *shared.DestinationDevNullPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationDevNullPutRequest
+}
+
+func (o *PutDestinationDevNullRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationDevNullResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationDevNullResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationDevNullResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationDevNullResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationduckdb.go b/internal/sdk/pkg/models/operations/putdestinationduckdb.go
new file mode 100644
index 000000000..b778a03cb
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/putdestinationduckdb.go
@@ -0,0 +1,57 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type PutDestinationDuckdbRequest struct {
+ DestinationDuckdbPutRequest *shared.DestinationDuckdbPutRequest `request:"mediaType=application/json"`
+ DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
+}
+
+func (o *PutDestinationDuckdbRequest) GetDestinationDuckdbPutRequest() *shared.DestinationDuckdbPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationDuckdbPutRequest
+}
+
+func (o *PutDestinationDuckdbRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+type PutDestinationDuckdbResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutDestinationDuckdbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationDuckdbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationDuckdbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationdynamodb.go b/internal/sdk/pkg/models/operations/putdestinationdynamodb.go
old mode 100755
new mode 100644
index e436900de..373860024
--- a/internal/sdk/pkg/models/operations/putdestinationdynamodb.go
+++ b/internal/sdk/pkg/models/operations/putdestinationdynamodb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationDynamodbRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationDynamodbRequest) GetDestinationDynamodbPutRequest() *shared.DestinationDynamodbPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationDynamodbPutRequest
+}
+
+func (o *PutDestinationDynamodbRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationDynamodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationDynamodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationDynamodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationDynamodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationelasticsearch.go b/internal/sdk/pkg/models/operations/putdestinationelasticsearch.go
old mode 100755
new mode 100644
index 6a0f535df..c81d72af3
--- a/internal/sdk/pkg/models/operations/putdestinationelasticsearch.go
+++ b/internal/sdk/pkg/models/operations/putdestinationelasticsearch.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationElasticsearchRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationElasticsearchRequest) GetDestinationElasticsearchPutRequest() *shared.DestinationElasticsearchPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationElasticsearchPutRequest
+}
+
+func (o *PutDestinationElasticsearchRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationElasticsearchResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationElasticsearchResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationElasticsearchResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationElasticsearchResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationfirebolt.go b/internal/sdk/pkg/models/operations/putdestinationfirebolt.go
old mode 100755
new mode 100644
index b2b48cd0c..cad341f83
--- a/internal/sdk/pkg/models/operations/putdestinationfirebolt.go
+++ b/internal/sdk/pkg/models/operations/putdestinationfirebolt.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationFireboltRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationFireboltRequest) GetDestinationFireboltPutRequest() *shared.DestinationFireboltPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationFireboltPutRequest
+}
+
+func (o *PutDestinationFireboltRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationFireboltResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationFireboltResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationFireboltResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationFireboltResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationfirestore.go b/internal/sdk/pkg/models/operations/putdestinationfirestore.go
old mode 100755
new mode 100644
index 52bda5a4c..bd82acc17
--- a/internal/sdk/pkg/models/operations/putdestinationfirestore.go
+++ b/internal/sdk/pkg/models/operations/putdestinationfirestore.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationFirestoreRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationFirestoreRequest) GetDestinationFirestorePutRequest() *shared.DestinationFirestorePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationFirestorePutRequest
+}
+
+func (o *PutDestinationFirestoreRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationFirestoreResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationFirestoreResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationFirestoreResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationFirestoreResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationgcs.go b/internal/sdk/pkg/models/operations/putdestinationgcs.go
old mode 100755
new mode 100644
index f2b01e8d4..04ede272b
--- a/internal/sdk/pkg/models/operations/putdestinationgcs.go
+++ b/internal/sdk/pkg/models/operations/putdestinationgcs.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationGcsRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationGcsRequest) GetDestinationGcsPutRequest() *shared.DestinationGcsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationGcsPutRequest
+}
+
+func (o *PutDestinationGcsRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationGcsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationGcsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationGcsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationGcsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationgooglesheets.go b/internal/sdk/pkg/models/operations/putdestinationgooglesheets.go
old mode 100755
new mode 100644
index 9899faa30..0e5bf6da3
--- a/internal/sdk/pkg/models/operations/putdestinationgooglesheets.go
+++ b/internal/sdk/pkg/models/operations/putdestinationgooglesheets.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationGoogleSheetsRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationGoogleSheetsRequest) GetDestinationGoogleSheetsPutRequest() *shared.DestinationGoogleSheetsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationGoogleSheetsPutRequest
+}
+
+func (o *PutDestinationGoogleSheetsRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationGoogleSheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationGoogleSheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationGoogleSheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationGoogleSheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationkeen.go b/internal/sdk/pkg/models/operations/putdestinationkeen.go
old mode 100755
new mode 100644
index 811aab291..8928afd32
--- a/internal/sdk/pkg/models/operations/putdestinationkeen.go
+++ b/internal/sdk/pkg/models/operations/putdestinationkeen.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationKeenRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationKeenRequest) GetDestinationKeenPutRequest() *shared.DestinationKeenPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationKeenPutRequest
+}
+
+func (o *PutDestinationKeenRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationKeenResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationKeenResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationKeenResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationKeenResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationkinesis.go b/internal/sdk/pkg/models/operations/putdestinationkinesis.go
old mode 100755
new mode 100644
index acf8846a8..ccab8a394
--- a/internal/sdk/pkg/models/operations/putdestinationkinesis.go
+++ b/internal/sdk/pkg/models/operations/putdestinationkinesis.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationKinesisRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationKinesisRequest) GetDestinationKinesisPutRequest() *shared.DestinationKinesisPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationKinesisPutRequest
+}
+
+func (o *PutDestinationKinesisRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationKinesisResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationKinesisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationKinesisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationKinesisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationlangchain.go b/internal/sdk/pkg/models/operations/putdestinationlangchain.go
old mode 100755
new mode 100644
index d69314196..4310ecec6
--- a/internal/sdk/pkg/models/operations/putdestinationlangchain.go
+++ b/internal/sdk/pkg/models/operations/putdestinationlangchain.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationLangchainRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationLangchainRequest) GetDestinationLangchainPutRequest() *shared.DestinationLangchainPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationLangchainPutRequest
+}
+
+func (o *PutDestinationLangchainRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationLangchainResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationLangchainResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationLangchainResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationLangchainResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationmilvus.go b/internal/sdk/pkg/models/operations/putdestinationmilvus.go
old mode 100755
new mode 100644
index 7c29dde1e..cb21b7847
--- a/internal/sdk/pkg/models/operations/putdestinationmilvus.go
+++ b/internal/sdk/pkg/models/operations/putdestinationmilvus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationMilvusRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationMilvusRequest) GetDestinationMilvusPutRequest() *shared.DestinationMilvusPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationMilvusPutRequest
+}
+
+func (o *PutDestinationMilvusRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationMilvusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationMilvusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationMilvusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationMilvusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationmongodb.go b/internal/sdk/pkg/models/operations/putdestinationmongodb.go
old mode 100755
new mode 100644
index cd5f919ac..4590ef97c
--- a/internal/sdk/pkg/models/operations/putdestinationmongodb.go
+++ b/internal/sdk/pkg/models/operations/putdestinationmongodb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationMongodbRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationMongodbRequest) GetDestinationMongodbPutRequest() *shared.DestinationMongodbPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationMongodbPutRequest
+}
+
+func (o *PutDestinationMongodbRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationMongodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationMongodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationMongodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationMongodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationmssql.go b/internal/sdk/pkg/models/operations/putdestinationmssql.go
old mode 100755
new mode 100644
index df12a877d..696b4d347
--- a/internal/sdk/pkg/models/operations/putdestinationmssql.go
+++ b/internal/sdk/pkg/models/operations/putdestinationmssql.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationMssqlRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationMssqlRequest) GetDestinationMssqlPutRequest() *shared.DestinationMssqlPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationMssqlPutRequest
+}
+
+func (o *PutDestinationMssqlRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationMssqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationMssqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationMssqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationMssqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationmysql.go b/internal/sdk/pkg/models/operations/putdestinationmysql.go
old mode 100755
new mode 100644
index eb030a2df..c4c09d416
--- a/internal/sdk/pkg/models/operations/putdestinationmysql.go
+++ b/internal/sdk/pkg/models/operations/putdestinationmysql.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationMysqlRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationMysqlRequest) GetDestinationMysqlPutRequest() *shared.DestinationMysqlPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationMysqlPutRequest
+}
+
+func (o *PutDestinationMysqlRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationMysqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationMysqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationMysqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationMysqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationoracle.go b/internal/sdk/pkg/models/operations/putdestinationoracle.go
old mode 100755
new mode 100644
index f370bf717..61e0e2548
--- a/internal/sdk/pkg/models/operations/putdestinationoracle.go
+++ b/internal/sdk/pkg/models/operations/putdestinationoracle.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationOracleRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationOracleRequest) GetDestinationOraclePutRequest() *shared.DestinationOraclePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationOraclePutRequest
+}
+
+func (o *PutDestinationOracleRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationOracleResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationOracleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationOracleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationOracleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationpinecone.go b/internal/sdk/pkg/models/operations/putdestinationpinecone.go
old mode 100755
new mode 100644
index 0af975ad6..0f4c66c88
--- a/internal/sdk/pkg/models/operations/putdestinationpinecone.go
+++ b/internal/sdk/pkg/models/operations/putdestinationpinecone.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationPineconeRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationPineconeRequest) GetDestinationPineconePutRequest() *shared.DestinationPineconePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationPineconePutRequest
+}
+
+func (o *PutDestinationPineconeRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationPineconeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationPineconeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationPineconeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationPineconeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationpostgres.go b/internal/sdk/pkg/models/operations/putdestinationpostgres.go
old mode 100755
new mode 100644
index 98ea3906d..0a22b2b3d
--- a/internal/sdk/pkg/models/operations/putdestinationpostgres.go
+++ b/internal/sdk/pkg/models/operations/putdestinationpostgres.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationPostgresRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationPostgresRequest) GetDestinationPostgresPutRequest() *shared.DestinationPostgresPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationPostgresPutRequest
+}
+
+func (o *PutDestinationPostgresRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationPostgresResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationPostgresResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationPostgresResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationPostgresResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationpubsub.go b/internal/sdk/pkg/models/operations/putdestinationpubsub.go
old mode 100755
new mode 100644
index 19a8ac9ff..00b46b021
--- a/internal/sdk/pkg/models/operations/putdestinationpubsub.go
+++ b/internal/sdk/pkg/models/operations/putdestinationpubsub.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationPubsubRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationPubsubRequest) GetDestinationPubsubPutRequest() *shared.DestinationPubsubPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationPubsubPutRequest
+}
+
+func (o *PutDestinationPubsubRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationPubsubResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationPubsubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationPubsubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationPubsubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationqdrant.go b/internal/sdk/pkg/models/operations/putdestinationqdrant.go
new file mode 100644
index 000000000..7a25d323d
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/putdestinationqdrant.go
@@ -0,0 +1,57 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type PutDestinationQdrantRequest struct {
+ DestinationQdrantPutRequest *shared.DestinationQdrantPutRequest `request:"mediaType=application/json"`
+ DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
+}
+
+func (o *PutDestinationQdrantRequest) GetDestinationQdrantPutRequest() *shared.DestinationQdrantPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationQdrantPutRequest
+}
+
+func (o *PutDestinationQdrantRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+type PutDestinationQdrantResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutDestinationQdrantResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationQdrantResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationQdrantResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationredis.go b/internal/sdk/pkg/models/operations/putdestinationredis.go
old mode 100755
new mode 100644
index df9e7014e..1a90d5b84
--- a/internal/sdk/pkg/models/operations/putdestinationredis.go
+++ b/internal/sdk/pkg/models/operations/putdestinationredis.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationRedisRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationRedisRequest) GetDestinationRedisPutRequest() *shared.DestinationRedisPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationRedisPutRequest
+}
+
+func (o *PutDestinationRedisRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationRedisResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationRedisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationRedisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationRedisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationredshift.go b/internal/sdk/pkg/models/operations/putdestinationredshift.go
old mode 100755
new mode 100644
index 6627e71b7..2ffaa9d2a
--- a/internal/sdk/pkg/models/operations/putdestinationredshift.go
+++ b/internal/sdk/pkg/models/operations/putdestinationredshift.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationRedshiftRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationRedshiftRequest) GetDestinationRedshiftPutRequest() *shared.DestinationRedshiftPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationRedshiftPutRequest
+}
+
+func (o *PutDestinationRedshiftRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationRedshiftResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationRedshiftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationRedshiftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationRedshiftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinations3.go b/internal/sdk/pkg/models/operations/putdestinations3.go
old mode 100755
new mode 100644
index b5f3b5b0c..5e406fdda
--- a/internal/sdk/pkg/models/operations/putdestinations3.go
+++ b/internal/sdk/pkg/models/operations/putdestinations3.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationS3Request struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationS3Request) GetDestinationS3PutRequest() *shared.DestinationS3PutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationS3PutRequest
+}
+
+func (o *PutDestinationS3Request) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationS3Response struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationS3Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationS3Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationS3Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinations3glue.go b/internal/sdk/pkg/models/operations/putdestinations3glue.go
old mode 100755
new mode 100644
index b07f71a38..796aaefe9
--- a/internal/sdk/pkg/models/operations/putdestinations3glue.go
+++ b/internal/sdk/pkg/models/operations/putdestinations3glue.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationS3GlueRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationS3GlueRequest) GetDestinationS3GluePutRequest() *shared.DestinationS3GluePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationS3GluePutRequest
+}
+
+func (o *PutDestinationS3GlueRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationS3GlueResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationS3GlueResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationS3GlueResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationS3GlueResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationsftpjson.go b/internal/sdk/pkg/models/operations/putdestinationsftpjson.go
old mode 100755
new mode 100644
index 0bc9c9319..d29ca33e8
--- a/internal/sdk/pkg/models/operations/putdestinationsftpjson.go
+++ b/internal/sdk/pkg/models/operations/putdestinationsftpjson.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationSftpJSONRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationSftpJSONRequest) GetDestinationSftpJSONPutRequest() *shared.DestinationSftpJSONPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationSftpJSONPutRequest
+}
+
+func (o *PutDestinationSftpJSONRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationSftpJSONResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationSftpJSONResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationSftpJSONResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationSftpJSONResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationsnowflake.go b/internal/sdk/pkg/models/operations/putdestinationsnowflake.go
old mode 100755
new mode 100644
index ae3b3f0d9..3f697149e
--- a/internal/sdk/pkg/models/operations/putdestinationsnowflake.go
+++ b/internal/sdk/pkg/models/operations/putdestinationsnowflake.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationSnowflakeRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationSnowflakeRequest) GetDestinationSnowflakePutRequest() *shared.DestinationSnowflakePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationSnowflakePutRequest
+}
+
+func (o *PutDestinationSnowflakeRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationSnowflakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationSnowflakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationSnowflakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationSnowflakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationtimeplus.go b/internal/sdk/pkg/models/operations/putdestinationtimeplus.go
old mode 100755
new mode 100644
index 0fd203eef..f4053e39c
--- a/internal/sdk/pkg/models/operations/putdestinationtimeplus.go
+++ b/internal/sdk/pkg/models/operations/putdestinationtimeplus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationTimeplusRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationTimeplusRequest) GetDestinationTimeplusPutRequest() *shared.DestinationTimeplusPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationTimeplusPutRequest
+}
+
+func (o *PutDestinationTimeplusRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationTimeplusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationTimeplusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationTimeplusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationTimeplusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationtypesense.go b/internal/sdk/pkg/models/operations/putdestinationtypesense.go
old mode 100755
new mode 100644
index 8c728f6c7..ecbd4200e
--- a/internal/sdk/pkg/models/operations/putdestinationtypesense.go
+++ b/internal/sdk/pkg/models/operations/putdestinationtypesense.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationTypesenseRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationTypesenseRequest) GetDestinationTypesensePutRequest() *shared.DestinationTypesensePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationTypesensePutRequest
+}
+
+func (o *PutDestinationTypesenseRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationTypesenseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationTypesenseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationTypesenseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationTypesenseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationvertica.go b/internal/sdk/pkg/models/operations/putdestinationvertica.go
old mode 100755
new mode 100644
index b437c86f9..b52303cc5
--- a/internal/sdk/pkg/models/operations/putdestinationvertica.go
+++ b/internal/sdk/pkg/models/operations/putdestinationvertica.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationVerticaRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationVerticaRequest) GetDestinationVerticaPutRequest() *shared.DestinationVerticaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationVerticaPutRequest
+}
+
+func (o *PutDestinationVerticaRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationVerticaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationVerticaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationVerticaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationVerticaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationweaviate.go b/internal/sdk/pkg/models/operations/putdestinationweaviate.go
new file mode 100644
index 000000000..26f491e76
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/putdestinationweaviate.go
@@ -0,0 +1,57 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type PutDestinationWeaviateRequest struct {
+ DestinationWeaviatePutRequest *shared.DestinationWeaviatePutRequest `request:"mediaType=application/json"`
+ DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
+}
+
+func (o *PutDestinationWeaviateRequest) GetDestinationWeaviatePutRequest() *shared.DestinationWeaviatePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationWeaviatePutRequest
+}
+
+func (o *PutDestinationWeaviateRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+type PutDestinationWeaviateResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutDestinationWeaviateResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationWeaviateResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationWeaviateResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putdestinationxata.go b/internal/sdk/pkg/models/operations/putdestinationxata.go
old mode 100755
new mode 100644
index f45e6ae02..d2c2453f5
--- a/internal/sdk/pkg/models/operations/putdestinationxata.go
+++ b/internal/sdk/pkg/models/operations/putdestinationxata.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutDestinationXataRequest struct {
DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"`
}
+func (o *PutDestinationXataRequest) GetDestinationXataPutRequest() *shared.DestinationXataPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.DestinationXataPutRequest
+}
+
+func (o *PutDestinationXataRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
type PutDestinationXataResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutDestinationXataResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutDestinationXataResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutDestinationXataResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsource.go b/internal/sdk/pkg/models/operations/putsource.go
old mode 100755
new mode 100644
index c80ad92f5..95fbaa196
--- a/internal/sdk/pkg/models/operations/putsource.go
+++ b/internal/sdk/pkg/models/operations/putsource.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,10 +12,55 @@ type PutSourceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRequest) GetSourcePutRequest() *shared.SourcePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePutRequest
+}
+
+func (o *PutSourceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceResponse struct {
+ // HTTP response content type for this operation
ContentType string
// Update a source and fully overwrite it
SourceResponse *shared.SourceResponse
- StatusCode int
- RawResponse *http.Response
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutSourceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceResponse) GetSourceResponse() *shared.SourceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.SourceResponse
+}
+
+func (o *PutSourceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
}
diff --git a/internal/sdk/pkg/models/operations/putsourceaha.go b/internal/sdk/pkg/models/operations/putsourceaha.go
old mode 100755
new mode 100644
index d3a213e39..0f04ea231
--- a/internal/sdk/pkg/models/operations/putsourceaha.go
+++ b/internal/sdk/pkg/models/operations/putsourceaha.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAhaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAhaRequest) GetSourceAhaPutRequest() *shared.SourceAhaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAhaPutRequest
+}
+
+func (o *PutSourceAhaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAhaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAhaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAhaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAhaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceaircall.go b/internal/sdk/pkg/models/operations/putsourceaircall.go
old mode 100755
new mode 100644
index 4915edba6..e05b8fc34
--- a/internal/sdk/pkg/models/operations/putsourceaircall.go
+++ b/internal/sdk/pkg/models/operations/putsourceaircall.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAircallRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAircallRequest) GetSourceAircallPutRequest() *shared.SourceAircallPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAircallPutRequest
+}
+
+func (o *PutSourceAircallRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAircallResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAircallResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAircallResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAircallResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceairtable.go b/internal/sdk/pkg/models/operations/putsourceairtable.go
old mode 100755
new mode 100644
index db714716a..39fac3c74
--- a/internal/sdk/pkg/models/operations/putsourceairtable.go
+++ b/internal/sdk/pkg/models/operations/putsourceairtable.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAirtableRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAirtableRequest) GetSourceAirtablePutRequest() *shared.SourceAirtablePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAirtablePutRequest
+}
+
+func (o *PutSourceAirtableRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAirtableResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAirtableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAirtableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAirtableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcealloydb.go b/internal/sdk/pkg/models/operations/putsourcealloydb.go
old mode 100755
new mode 100644
index 5f7cbbb7a..5937cd6a0
--- a/internal/sdk/pkg/models/operations/putsourcealloydb.go
+++ b/internal/sdk/pkg/models/operations/putsourcealloydb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAlloydbRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAlloydbRequest) GetSourceAlloydbPutRequest() *shared.SourceAlloydbPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAlloydbPutRequest
+}
+
+func (o *PutSourceAlloydbRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAlloydbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAlloydbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAlloydbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAlloydbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceamazonads.go b/internal/sdk/pkg/models/operations/putsourceamazonads.go
old mode 100755
new mode 100644
index 688ae6ed6..a630c984d
--- a/internal/sdk/pkg/models/operations/putsourceamazonads.go
+++ b/internal/sdk/pkg/models/operations/putsourceamazonads.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAmazonAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAmazonAdsRequest) GetSourceAmazonAdsPutRequest() *shared.SourceAmazonAdsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAmazonAdsPutRequest
+}
+
+func (o *PutSourceAmazonAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAmazonAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAmazonAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAmazonAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAmazonAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceamazonsellerpartner.go b/internal/sdk/pkg/models/operations/putsourceamazonsellerpartner.go
old mode 100755
new mode 100644
index a7ba24293..343fecab6
--- a/internal/sdk/pkg/models/operations/putsourceamazonsellerpartner.go
+++ b/internal/sdk/pkg/models/operations/putsourceamazonsellerpartner.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAmazonSellerPartnerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAmazonSellerPartnerRequest) GetSourceAmazonSellerPartnerPutRequest() *shared.SourceAmazonSellerPartnerPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAmazonSellerPartnerPutRequest
+}
+
+func (o *PutSourceAmazonSellerPartnerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAmazonSellerPartnerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAmazonSellerPartnerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAmazonSellerPartnerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAmazonSellerPartnerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceamazonsqs.go b/internal/sdk/pkg/models/operations/putsourceamazonsqs.go
old mode 100755
new mode 100644
index 224c2d2c5..2d4477bff
--- a/internal/sdk/pkg/models/operations/putsourceamazonsqs.go
+++ b/internal/sdk/pkg/models/operations/putsourceamazonsqs.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAmazonSqsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAmazonSqsRequest) GetSourceAmazonSqsPutRequest() *shared.SourceAmazonSqsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAmazonSqsPutRequest
+}
+
+func (o *PutSourceAmazonSqsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAmazonSqsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAmazonSqsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAmazonSqsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAmazonSqsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceamplitude.go b/internal/sdk/pkg/models/operations/putsourceamplitude.go
old mode 100755
new mode 100644
index 30e48a52e..02d5a768e
--- a/internal/sdk/pkg/models/operations/putsourceamplitude.go
+++ b/internal/sdk/pkg/models/operations/putsourceamplitude.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAmplitudeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAmplitudeRequest) GetSourceAmplitudePutRequest() *shared.SourceAmplitudePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAmplitudePutRequest
+}
+
+func (o *PutSourceAmplitudeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAmplitudeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAmplitudeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAmplitudeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAmplitudeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceapifydataset.go b/internal/sdk/pkg/models/operations/putsourceapifydataset.go
old mode 100755
new mode 100644
index 3a3a3f2c7..0d5c5274f
--- a/internal/sdk/pkg/models/operations/putsourceapifydataset.go
+++ b/internal/sdk/pkg/models/operations/putsourceapifydataset.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceApifyDatasetRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceApifyDatasetRequest) GetSourceApifyDatasetPutRequest() *shared.SourceApifyDatasetPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceApifyDatasetPutRequest
+}
+
+func (o *PutSourceApifyDatasetRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceApifyDatasetResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceApifyDatasetResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceApifyDatasetResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceApifyDatasetResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceappfollow.go b/internal/sdk/pkg/models/operations/putsourceappfollow.go
old mode 100755
new mode 100644
index 8013972ed..8df22eb76
--- a/internal/sdk/pkg/models/operations/putsourceappfollow.go
+++ b/internal/sdk/pkg/models/operations/putsourceappfollow.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAppfollowRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAppfollowRequest) GetSourceAppfollowPutRequest() *shared.SourceAppfollowPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAppfollowPutRequest
+}
+
+func (o *PutSourceAppfollowRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAppfollowResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAppfollowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAppfollowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAppfollowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceasana.go b/internal/sdk/pkg/models/operations/putsourceasana.go
old mode 100755
new mode 100644
index aef888f7e..cfac86cc3
--- a/internal/sdk/pkg/models/operations/putsourceasana.go
+++ b/internal/sdk/pkg/models/operations/putsourceasana.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAsanaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAsanaRequest) GetSourceAsanaPutRequest() *shared.SourceAsanaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAsanaPutRequest
+}
+
+func (o *PutSourceAsanaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAsanaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAsanaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAsanaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAsanaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceauth0.go b/internal/sdk/pkg/models/operations/putsourceauth0.go
old mode 100755
new mode 100644
index 89a8c715f..eed6b2772
--- a/internal/sdk/pkg/models/operations/putsourceauth0.go
+++ b/internal/sdk/pkg/models/operations/putsourceauth0.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAuth0Request struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAuth0Request) GetSourceAuth0PutRequest() *shared.SourceAuth0PutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAuth0PutRequest
+}
+
+func (o *PutSourceAuth0Request) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAuth0Response struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAuth0Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAuth0Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAuth0Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceawscloudtrail.go b/internal/sdk/pkg/models/operations/putsourceawscloudtrail.go
old mode 100755
new mode 100644
index 55dc9457b..8a71efd57
--- a/internal/sdk/pkg/models/operations/putsourceawscloudtrail.go
+++ b/internal/sdk/pkg/models/operations/putsourceawscloudtrail.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAwsCloudtrailRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAwsCloudtrailRequest) GetSourceAwsCloudtrailPutRequest() *shared.SourceAwsCloudtrailPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAwsCloudtrailPutRequest
+}
+
+func (o *PutSourceAwsCloudtrailRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAwsCloudtrailResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAwsCloudtrailResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAwsCloudtrailResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAwsCloudtrailResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceazureblobstorage.go b/internal/sdk/pkg/models/operations/putsourceazureblobstorage.go
old mode 100755
new mode 100644
index 77ce00d6e..6a14756b0
--- a/internal/sdk/pkg/models/operations/putsourceazureblobstorage.go
+++ b/internal/sdk/pkg/models/operations/putsourceazureblobstorage.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAzureBlobStorageRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAzureBlobStorageRequest) GetSourceAzureBlobStoragePutRequest() *shared.SourceAzureBlobStoragePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAzureBlobStoragePutRequest
+}
+
+func (o *PutSourceAzureBlobStorageRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAzureBlobStorageResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAzureBlobStorageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAzureBlobStorageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAzureBlobStorageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceazuretable.go b/internal/sdk/pkg/models/operations/putsourceazuretable.go
old mode 100755
new mode 100644
index 277ed3712..b20f567cf
--- a/internal/sdk/pkg/models/operations/putsourceazuretable.go
+++ b/internal/sdk/pkg/models/operations/putsourceazuretable.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceAzureTableRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceAzureTableRequest) GetSourceAzureTablePutRequest() *shared.SourceAzureTablePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceAzureTablePutRequest
+}
+
+func (o *PutSourceAzureTableRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceAzureTableResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceAzureTableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceAzureTableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceAzureTableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcebamboohr.go b/internal/sdk/pkg/models/operations/putsourcebamboohr.go
old mode 100755
new mode 100644
index a10b6a3d5..e0ed835d6
--- a/internal/sdk/pkg/models/operations/putsourcebamboohr.go
+++ b/internal/sdk/pkg/models/operations/putsourcebamboohr.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceBambooHrRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceBambooHrRequest) GetSourceBambooHrPutRequest() *shared.SourceBambooHrPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceBambooHrPutRequest
+}
+
+func (o *PutSourceBambooHrRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceBambooHrResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceBambooHrResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceBambooHrResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceBambooHrResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcebigcommerce.go b/internal/sdk/pkg/models/operations/putsourcebigcommerce.go
deleted file mode 100755
index 66868e74c..000000000
--- a/internal/sdk/pkg/models/operations/putsourcebigcommerce.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type PutSourceBigcommerceRequest struct {
- SourceBigcommercePutRequest *shared.SourceBigcommercePutRequest `request:"mediaType=application/json"`
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type PutSourceBigcommerceResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/putsourcebigquery.go b/internal/sdk/pkg/models/operations/putsourcebigquery.go
old mode 100755
new mode 100644
index 262022a15..7331dafa8
--- a/internal/sdk/pkg/models/operations/putsourcebigquery.go
+++ b/internal/sdk/pkg/models/operations/putsourcebigquery.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceBigqueryRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceBigqueryRequest) GetSourceBigqueryPutRequest() *shared.SourceBigqueryPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceBigqueryPutRequest
+}
+
+func (o *PutSourceBigqueryRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceBigqueryResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceBigqueryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceBigqueryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceBigqueryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcebingads.go b/internal/sdk/pkg/models/operations/putsourcebingads.go
old mode 100755
new mode 100644
index 518abe6a9..5e4d17825
--- a/internal/sdk/pkg/models/operations/putsourcebingads.go
+++ b/internal/sdk/pkg/models/operations/putsourcebingads.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceBingAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceBingAdsRequest) GetSourceBingAdsPutRequest() *shared.SourceBingAdsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceBingAdsPutRequest
+}
+
+func (o *PutSourceBingAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceBingAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceBingAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceBingAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceBingAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcebraintree.go b/internal/sdk/pkg/models/operations/putsourcebraintree.go
old mode 100755
new mode 100644
index 7dd86ab79..1867b2786
--- a/internal/sdk/pkg/models/operations/putsourcebraintree.go
+++ b/internal/sdk/pkg/models/operations/putsourcebraintree.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceBraintreeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceBraintreeRequest) GetSourceBraintreePutRequest() *shared.SourceBraintreePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceBraintreePutRequest
+}
+
+func (o *PutSourceBraintreeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceBraintreeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceBraintreeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceBraintreeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceBraintreeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcebraze.go b/internal/sdk/pkg/models/operations/putsourcebraze.go
old mode 100755
new mode 100644
index 5f4e7c55a..13b24353d
--- a/internal/sdk/pkg/models/operations/putsourcebraze.go
+++ b/internal/sdk/pkg/models/operations/putsourcebraze.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceBrazeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceBrazeRequest) GetSourceBrazePutRequest() *shared.SourceBrazePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceBrazePutRequest
+}
+
+func (o *PutSourceBrazeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceBrazeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceBrazeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceBrazeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceBrazeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcecart.go b/internal/sdk/pkg/models/operations/putsourcecart.go
new file mode 100644
index 000000000..7e690ac73
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/putsourcecart.go
@@ -0,0 +1,57 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type PutSourceCartRequest struct {
+ SourceCartPutRequest *shared.SourceCartPutRequest `request:"mediaType=application/json"`
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *PutSourceCartRequest) GetSourceCartPutRequest() *shared.SourceCartPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceCartPutRequest
+}
+
+func (o *PutSourceCartRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type PutSourceCartResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutSourceCartResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceCartResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceCartResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcechargebee.go b/internal/sdk/pkg/models/operations/putsourcechargebee.go
old mode 100755
new mode 100644
index a803798ea..c9af819c8
--- a/internal/sdk/pkg/models/operations/putsourcechargebee.go
+++ b/internal/sdk/pkg/models/operations/putsourcechargebee.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceChargebeeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceChargebeeRequest) GetSourceChargebeePutRequest() *shared.SourceChargebeePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceChargebeePutRequest
+}
+
+func (o *PutSourceChargebeeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceChargebeeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceChargebeeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceChargebeeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceChargebeeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcechartmogul.go b/internal/sdk/pkg/models/operations/putsourcechartmogul.go
old mode 100755
new mode 100644
index 9184735ed..f63d900e9
--- a/internal/sdk/pkg/models/operations/putsourcechartmogul.go
+++ b/internal/sdk/pkg/models/operations/putsourcechartmogul.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceChartmogulRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceChartmogulRequest) GetSourceChartmogulPutRequest() *shared.SourceChartmogulPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceChartmogulPutRequest
+}
+
+func (o *PutSourceChartmogulRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceChartmogulResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceChartmogulResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceChartmogulResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceChartmogulResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceclickhouse.go b/internal/sdk/pkg/models/operations/putsourceclickhouse.go
old mode 100755
new mode 100644
index 3c38746e8..3659a3ca6
--- a/internal/sdk/pkg/models/operations/putsourceclickhouse.go
+++ b/internal/sdk/pkg/models/operations/putsourceclickhouse.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceClickhouseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceClickhouseRequest) GetSourceClickhousePutRequest() *shared.SourceClickhousePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceClickhousePutRequest
+}
+
+func (o *PutSourceClickhouseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceClickhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceClickhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceClickhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceClickhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceclickupapi.go b/internal/sdk/pkg/models/operations/putsourceclickupapi.go
old mode 100755
new mode 100644
index 9ae4abbf9..e8dcb68d4
--- a/internal/sdk/pkg/models/operations/putsourceclickupapi.go
+++ b/internal/sdk/pkg/models/operations/putsourceclickupapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceClickupAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceClickupAPIRequest) GetSourceClickupAPIPutRequest() *shared.SourceClickupAPIPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceClickupAPIPutRequest
+}
+
+func (o *PutSourceClickupAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceClickupAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceClickupAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceClickupAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceClickupAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceclockify.go b/internal/sdk/pkg/models/operations/putsourceclockify.go
old mode 100755
new mode 100644
index 8e69a1055..b657d4e38
--- a/internal/sdk/pkg/models/operations/putsourceclockify.go
+++ b/internal/sdk/pkg/models/operations/putsourceclockify.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceClockifyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceClockifyRequest) GetSourceClockifyPutRequest() *shared.SourceClockifyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceClockifyPutRequest
+}
+
+func (o *PutSourceClockifyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceClockifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceClockifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceClockifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceClockifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceclosecom.go b/internal/sdk/pkg/models/operations/putsourceclosecom.go
old mode 100755
new mode 100644
index 7630482ea..d00a3d73f
--- a/internal/sdk/pkg/models/operations/putsourceclosecom.go
+++ b/internal/sdk/pkg/models/operations/putsourceclosecom.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceCloseComRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceCloseComRequest) GetSourceCloseComPutRequest() *shared.SourceCloseComPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceCloseComPutRequest
+}
+
+func (o *PutSourceCloseComRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceCloseComResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceCloseComResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceCloseComResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceCloseComResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcecoda.go b/internal/sdk/pkg/models/operations/putsourcecoda.go
old mode 100755
new mode 100644
index 101877875..03fb55f29
--- a/internal/sdk/pkg/models/operations/putsourcecoda.go
+++ b/internal/sdk/pkg/models/operations/putsourcecoda.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceCodaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceCodaRequest) GetSourceCodaPutRequest() *shared.SourceCodaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceCodaPutRequest
+}
+
+func (o *PutSourceCodaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceCodaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceCodaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceCodaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceCodaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcecoinapi.go b/internal/sdk/pkg/models/operations/putsourcecoinapi.go
old mode 100755
new mode 100644
index 31dba3d83..d83b0cf6c
--- a/internal/sdk/pkg/models/operations/putsourcecoinapi.go
+++ b/internal/sdk/pkg/models/operations/putsourcecoinapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceCoinAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceCoinAPIRequest) GetSourceCoinAPIPutRequest() *shared.SourceCoinAPIPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceCoinAPIPutRequest
+}
+
+func (o *PutSourceCoinAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceCoinAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceCoinAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceCoinAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceCoinAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcecoinmarketcap.go b/internal/sdk/pkg/models/operations/putsourcecoinmarketcap.go
old mode 100755
new mode 100644
index 80b175065..7a8e296e6
--- a/internal/sdk/pkg/models/operations/putsourcecoinmarketcap.go
+++ b/internal/sdk/pkg/models/operations/putsourcecoinmarketcap.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceCoinmarketcapRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceCoinmarketcapRequest) GetSourceCoinmarketcapPutRequest() *shared.SourceCoinmarketcapPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceCoinmarketcapPutRequest
+}
+
+func (o *PutSourceCoinmarketcapRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceCoinmarketcapResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceCoinmarketcapResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceCoinmarketcapResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceCoinmarketcapResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceconfigcat.go b/internal/sdk/pkg/models/operations/putsourceconfigcat.go
old mode 100755
new mode 100644
index b8fe2ed2e..af3520494
--- a/internal/sdk/pkg/models/operations/putsourceconfigcat.go
+++ b/internal/sdk/pkg/models/operations/putsourceconfigcat.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceConfigcatRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceConfigcatRequest) GetSourceConfigcatPutRequest() *shared.SourceConfigcatPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceConfigcatPutRequest
+}
+
+func (o *PutSourceConfigcatRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceConfigcatResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceConfigcatResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceConfigcatResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceConfigcatResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceconfluence.go b/internal/sdk/pkg/models/operations/putsourceconfluence.go
old mode 100755
new mode 100644
index 2be6143e7..29e2c7883
--- a/internal/sdk/pkg/models/operations/putsourceconfluence.go
+++ b/internal/sdk/pkg/models/operations/putsourceconfluence.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceConfluenceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceConfluenceRequest) GetSourceConfluencePutRequest() *shared.SourceConfluencePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceConfluencePutRequest
+}
+
+func (o *PutSourceConfluenceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceConfluenceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceConfluenceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceConfluenceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceConfluenceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceconvex.go b/internal/sdk/pkg/models/operations/putsourceconvex.go
old mode 100755
new mode 100644
index bcf1721ee..dbd48a245
--- a/internal/sdk/pkg/models/operations/putsourceconvex.go
+++ b/internal/sdk/pkg/models/operations/putsourceconvex.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceConvexRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceConvexRequest) GetSourceConvexPutRequest() *shared.SourceConvexPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceConvexPutRequest
+}
+
+func (o *PutSourceConvexRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceConvexResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceConvexResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceConvexResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceConvexResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcedatascope.go b/internal/sdk/pkg/models/operations/putsourcedatascope.go
old mode 100755
new mode 100644
index 091cddfae..3c1b553c1
--- a/internal/sdk/pkg/models/operations/putsourcedatascope.go
+++ b/internal/sdk/pkg/models/operations/putsourcedatascope.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceDatascopeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceDatascopeRequest) GetSourceDatascopePutRequest() *shared.SourceDatascopePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceDatascopePutRequest
+}
+
+func (o *PutSourceDatascopeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceDatascopeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceDatascopeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceDatascopeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceDatascopeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcedelighted.go b/internal/sdk/pkg/models/operations/putsourcedelighted.go
old mode 100755
new mode 100644
index 591ba6f3e..c4d677f55
--- a/internal/sdk/pkg/models/operations/putsourcedelighted.go
+++ b/internal/sdk/pkg/models/operations/putsourcedelighted.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceDelightedRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceDelightedRequest) GetSourceDelightedPutRequest() *shared.SourceDelightedPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceDelightedPutRequest
+}
+
+func (o *PutSourceDelightedRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceDelightedResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceDelightedResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceDelightedResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceDelightedResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcedixa.go b/internal/sdk/pkg/models/operations/putsourcedixa.go
old mode 100755
new mode 100644
index 9405c3d20..8ace142dc
--- a/internal/sdk/pkg/models/operations/putsourcedixa.go
+++ b/internal/sdk/pkg/models/operations/putsourcedixa.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceDixaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceDixaRequest) GetSourceDixaPutRequest() *shared.SourceDixaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceDixaPutRequest
+}
+
+func (o *PutSourceDixaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceDixaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceDixaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceDixaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceDixaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcedockerhub.go b/internal/sdk/pkg/models/operations/putsourcedockerhub.go
old mode 100755
new mode 100644
index 6b59b34be..8e1c49347
--- a/internal/sdk/pkg/models/operations/putsourcedockerhub.go
+++ b/internal/sdk/pkg/models/operations/putsourcedockerhub.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceDockerhubRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceDockerhubRequest) GetSourceDockerhubPutRequest() *shared.SourceDockerhubPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceDockerhubPutRequest
+}
+
+func (o *PutSourceDockerhubRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceDockerhubResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceDockerhubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceDockerhubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceDockerhubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcedremio.go b/internal/sdk/pkg/models/operations/putsourcedremio.go
old mode 100755
new mode 100644
index 453f35598..eae6c9528
--- a/internal/sdk/pkg/models/operations/putsourcedremio.go
+++ b/internal/sdk/pkg/models/operations/putsourcedremio.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceDremioRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceDremioRequest) GetSourceDremioPutRequest() *shared.SourceDremioPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceDremioPutRequest
+}
+
+func (o *PutSourceDremioRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceDremioResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceDremioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceDremioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceDremioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcedynamodb.go b/internal/sdk/pkg/models/operations/putsourcedynamodb.go
old mode 100755
new mode 100644
index bbfbf7803..3350bd4a7
--- a/internal/sdk/pkg/models/operations/putsourcedynamodb.go
+++ b/internal/sdk/pkg/models/operations/putsourcedynamodb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceDynamodbRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceDynamodbRequest) GetSourceDynamodbPutRequest() *shared.SourceDynamodbPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceDynamodbPutRequest
+}
+
+func (o *PutSourceDynamodbRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceDynamodbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceDynamodbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceDynamodbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceDynamodbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcee2etestcloud.go b/internal/sdk/pkg/models/operations/putsourcee2etestcloud.go
deleted file mode 100755
index a49001581..000000000
--- a/internal/sdk/pkg/models/operations/putsourcee2etestcloud.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type PutSourceE2eTestCloudRequest struct {
- SourceE2eTestCloudPutRequest *shared.SourceE2eTestCloudPutRequest `request:"mediaType=application/json"`
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type PutSourceE2eTestCloudResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/putsourceemailoctopus.go b/internal/sdk/pkg/models/operations/putsourceemailoctopus.go
old mode 100755
new mode 100644
index ab9858a30..9554ec7fd
--- a/internal/sdk/pkg/models/operations/putsourceemailoctopus.go
+++ b/internal/sdk/pkg/models/operations/putsourceemailoctopus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceEmailoctopusRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceEmailoctopusRequest) GetSourceEmailoctopusPutRequest() *shared.SourceEmailoctopusPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceEmailoctopusPutRequest
+}
+
+func (o *PutSourceEmailoctopusRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceEmailoctopusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceEmailoctopusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceEmailoctopusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceEmailoctopusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceexchangerates.go b/internal/sdk/pkg/models/operations/putsourceexchangerates.go
old mode 100755
new mode 100644
index 79ea9b8f5..3f5fd8637
--- a/internal/sdk/pkg/models/operations/putsourceexchangerates.go
+++ b/internal/sdk/pkg/models/operations/putsourceexchangerates.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceExchangeRatesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceExchangeRatesRequest) GetSourceExchangeRatesPutRequest() *shared.SourceExchangeRatesPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceExchangeRatesPutRequest
+}
+
+func (o *PutSourceExchangeRatesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceExchangeRatesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceExchangeRatesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceExchangeRatesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceExchangeRatesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcefacebookmarketing.go b/internal/sdk/pkg/models/operations/putsourcefacebookmarketing.go
old mode 100755
new mode 100644
index a82e1d86b..8544b75e2
--- a/internal/sdk/pkg/models/operations/putsourcefacebookmarketing.go
+++ b/internal/sdk/pkg/models/operations/putsourcefacebookmarketing.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceFacebookMarketingRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceFacebookMarketingRequest) GetSourceFacebookMarketingPutRequest() *shared.SourceFacebookMarketingPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceFacebookMarketingPutRequest
+}
+
+func (o *PutSourceFacebookMarketingRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceFacebookMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceFacebookMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceFacebookMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceFacebookMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcefacebookpages.go b/internal/sdk/pkg/models/operations/putsourcefacebookpages.go
old mode 100755
new mode 100644
index 43456b3d1..ffc1ac8b1
--- a/internal/sdk/pkg/models/operations/putsourcefacebookpages.go
+++ b/internal/sdk/pkg/models/operations/putsourcefacebookpages.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceFacebookPagesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceFacebookPagesRequest) GetSourceFacebookPagesPutRequest() *shared.SourceFacebookPagesPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceFacebookPagesPutRequest
+}
+
+func (o *PutSourceFacebookPagesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceFacebookPagesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceFacebookPagesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceFacebookPagesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceFacebookPagesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcefaker.go b/internal/sdk/pkg/models/operations/putsourcefaker.go
old mode 100755
new mode 100644
index a03882761..bcc45c6a4
--- a/internal/sdk/pkg/models/operations/putsourcefaker.go
+++ b/internal/sdk/pkg/models/operations/putsourcefaker.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceFakerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceFakerRequest) GetSourceFakerPutRequest() *shared.SourceFakerPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceFakerPutRequest
+}
+
+func (o *PutSourceFakerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceFakerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceFakerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceFakerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceFakerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcefauna.go b/internal/sdk/pkg/models/operations/putsourcefauna.go
old mode 100755
new mode 100644
index e367e76a9..d06f21711
--- a/internal/sdk/pkg/models/operations/putsourcefauna.go
+++ b/internal/sdk/pkg/models/operations/putsourcefauna.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceFaunaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceFaunaRequest) GetSourceFaunaPutRequest() *shared.SourceFaunaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceFaunaPutRequest
+}
+
+func (o *PutSourceFaunaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceFaunaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceFaunaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceFaunaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceFaunaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcefile.go b/internal/sdk/pkg/models/operations/putsourcefile.go
new file mode 100644
index 000000000..fa3a1a714
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/putsourcefile.go
@@ -0,0 +1,57 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type PutSourceFileRequest struct {
+ SourceFilePutRequest *shared.SourceFilePutRequest `request:"mediaType=application/json"`
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *PutSourceFileRequest) GetSourceFilePutRequest() *shared.SourceFilePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceFilePutRequest
+}
+
+func (o *PutSourceFileRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type PutSourceFileResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutSourceFileResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceFileResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceFileResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcefilesecure.go b/internal/sdk/pkg/models/operations/putsourcefilesecure.go
deleted file mode 100755
index caaeb2698..000000000
--- a/internal/sdk/pkg/models/operations/putsourcefilesecure.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type PutSourceFileSecureRequest struct {
- SourceFileSecurePutRequest *shared.SourceFileSecurePutRequest `request:"mediaType=application/json"`
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type PutSourceFileSecureResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/putsourcefirebolt.go b/internal/sdk/pkg/models/operations/putsourcefirebolt.go
old mode 100755
new mode 100644
index 6391f3fe1..1cad77372
--- a/internal/sdk/pkg/models/operations/putsourcefirebolt.go
+++ b/internal/sdk/pkg/models/operations/putsourcefirebolt.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceFireboltRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceFireboltRequest) GetSourceFireboltPutRequest() *shared.SourceFireboltPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceFireboltPutRequest
+}
+
+func (o *PutSourceFireboltRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceFireboltResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceFireboltResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceFireboltResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceFireboltResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcefreshcaller.go b/internal/sdk/pkg/models/operations/putsourcefreshcaller.go
old mode 100755
new mode 100644
index 7e62539e2..bfa13728e
--- a/internal/sdk/pkg/models/operations/putsourcefreshcaller.go
+++ b/internal/sdk/pkg/models/operations/putsourcefreshcaller.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceFreshcallerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceFreshcallerRequest) GetSourceFreshcallerPutRequest() *shared.SourceFreshcallerPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceFreshcallerPutRequest
+}
+
+func (o *PutSourceFreshcallerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceFreshcallerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceFreshcallerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceFreshcallerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceFreshcallerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcefreshdesk.go b/internal/sdk/pkg/models/operations/putsourcefreshdesk.go
old mode 100755
new mode 100644
index 9bf7ed170..0fa901921
--- a/internal/sdk/pkg/models/operations/putsourcefreshdesk.go
+++ b/internal/sdk/pkg/models/operations/putsourcefreshdesk.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceFreshdeskRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceFreshdeskRequest) GetSourceFreshdeskPutRequest() *shared.SourceFreshdeskPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceFreshdeskPutRequest
+}
+
+func (o *PutSourceFreshdeskRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceFreshdeskResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceFreshdeskResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceFreshdeskResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceFreshdeskResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcefreshsales.go b/internal/sdk/pkg/models/operations/putsourcefreshsales.go
old mode 100755
new mode 100644
index 1df8daaac..eefdf3447
--- a/internal/sdk/pkg/models/operations/putsourcefreshsales.go
+++ b/internal/sdk/pkg/models/operations/putsourcefreshsales.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceFreshsalesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceFreshsalesRequest) GetSourceFreshsalesPutRequest() *shared.SourceFreshsalesPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceFreshsalesPutRequest
+}
+
+func (o *PutSourceFreshsalesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceFreshsalesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceFreshsalesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceFreshsalesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceFreshsalesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegainsightpx.go b/internal/sdk/pkg/models/operations/putsourcegainsightpx.go
old mode 100755
new mode 100644
index 2a9fbd3ae..242f8d4e0
--- a/internal/sdk/pkg/models/operations/putsourcegainsightpx.go
+++ b/internal/sdk/pkg/models/operations/putsourcegainsightpx.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGainsightPxRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGainsightPxRequest) GetSourceGainsightPxPutRequest() *shared.SourceGainsightPxPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGainsightPxPutRequest
+}
+
+func (o *PutSourceGainsightPxRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGainsightPxResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGainsightPxResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGainsightPxResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGainsightPxResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegcs.go b/internal/sdk/pkg/models/operations/putsourcegcs.go
old mode 100755
new mode 100644
index 06ff4ac84..ee8c0e531
--- a/internal/sdk/pkg/models/operations/putsourcegcs.go
+++ b/internal/sdk/pkg/models/operations/putsourcegcs.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGcsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGcsRequest) GetSourceGcsPutRequest() *shared.SourceGcsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGcsPutRequest
+}
+
+func (o *PutSourceGcsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGcsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGcsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGcsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGcsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegetlago.go b/internal/sdk/pkg/models/operations/putsourcegetlago.go
old mode 100755
new mode 100644
index 334462cdc..6b3127b45
--- a/internal/sdk/pkg/models/operations/putsourcegetlago.go
+++ b/internal/sdk/pkg/models/operations/putsourcegetlago.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGetlagoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGetlagoRequest) GetSourceGetlagoPutRequest() *shared.SourceGetlagoPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGetlagoPutRequest
+}
+
+func (o *PutSourceGetlagoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGetlagoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGetlagoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGetlagoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGetlagoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegithub.go b/internal/sdk/pkg/models/operations/putsourcegithub.go
old mode 100755
new mode 100644
index 8f7b109f8..c9e48a2d9
--- a/internal/sdk/pkg/models/operations/putsourcegithub.go
+++ b/internal/sdk/pkg/models/operations/putsourcegithub.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGithubRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGithubRequest) GetSourceGithubPutRequest() *shared.SourceGithubPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGithubPutRequest
+}
+
+func (o *PutSourceGithubRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGithubResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGithubResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGithubResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGithubResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegitlab.go b/internal/sdk/pkg/models/operations/putsourcegitlab.go
old mode 100755
new mode 100644
index e69c0c1cf..4d8240a82
--- a/internal/sdk/pkg/models/operations/putsourcegitlab.go
+++ b/internal/sdk/pkg/models/operations/putsourcegitlab.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGitlabRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGitlabRequest) GetSourceGitlabPutRequest() *shared.SourceGitlabPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGitlabPutRequest
+}
+
+func (o *PutSourceGitlabRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGitlabResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGitlabResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGitlabResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGitlabResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceglassfrog.go b/internal/sdk/pkg/models/operations/putsourceglassfrog.go
old mode 100755
new mode 100644
index aa2772941..5a5daf4b5
--- a/internal/sdk/pkg/models/operations/putsourceglassfrog.go
+++ b/internal/sdk/pkg/models/operations/putsourceglassfrog.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGlassfrogRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGlassfrogRequest) GetSourceGlassfrogPutRequest() *shared.SourceGlassfrogPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGlassfrogPutRequest
+}
+
+func (o *PutSourceGlassfrogRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGlassfrogResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGlassfrogResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGlassfrogResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGlassfrogResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegnews.go b/internal/sdk/pkg/models/operations/putsourcegnews.go
old mode 100755
new mode 100644
index be5657683..a00821ba6
--- a/internal/sdk/pkg/models/operations/putsourcegnews.go
+++ b/internal/sdk/pkg/models/operations/putsourcegnews.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGnewsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGnewsRequest) GetSourceGnewsPutRequest() *shared.SourceGnewsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGnewsPutRequest
+}
+
+func (o *PutSourceGnewsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGnewsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGnewsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGnewsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGnewsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegoogleads.go b/internal/sdk/pkg/models/operations/putsourcegoogleads.go
old mode 100755
new mode 100644
index 4af2b2dd6..2c7d2a417
--- a/internal/sdk/pkg/models/operations/putsourcegoogleads.go
+++ b/internal/sdk/pkg/models/operations/putsourcegoogleads.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGoogleAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGoogleAdsRequest) GetSourceGoogleAdsPutRequest() *shared.SourceGoogleAdsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGoogleAdsPutRequest
+}
+
+func (o *PutSourceGoogleAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGoogleAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGoogleAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGoogleAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGoogleAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegoogleanalyticsdataapi.go b/internal/sdk/pkg/models/operations/putsourcegoogleanalyticsdataapi.go
old mode 100755
new mode 100644
index 2f05d198a..603645cda
--- a/internal/sdk/pkg/models/operations/putsourcegoogleanalyticsdataapi.go
+++ b/internal/sdk/pkg/models/operations/putsourcegoogleanalyticsdataapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGoogleAnalyticsDataAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGoogleAnalyticsDataAPIRequest) GetSourceGoogleAnalyticsDataAPIPutRequest() *shared.SourceGoogleAnalyticsDataAPIPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGoogleAnalyticsDataAPIPutRequest
+}
+
+func (o *PutSourceGoogleAnalyticsDataAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGoogleAnalyticsDataAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGoogleAnalyticsDataAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGoogleAnalyticsDataAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGoogleAnalyticsDataAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegoogleanalyticsv4.go b/internal/sdk/pkg/models/operations/putsourcegoogleanalyticsv4.go
deleted file mode 100755
index c70bff38f..000000000
--- a/internal/sdk/pkg/models/operations/putsourcegoogleanalyticsv4.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type PutSourceGoogleAnalyticsV4Request struct {
- SourceGoogleAnalyticsV4PutRequest *shared.SourceGoogleAnalyticsV4PutRequest `request:"mediaType=application/json"`
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type PutSourceGoogleAnalyticsV4Response struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/putsourcegoogledirectory.go b/internal/sdk/pkg/models/operations/putsourcegoogledirectory.go
old mode 100755
new mode 100644
index 13488123d..23e50e7ee
--- a/internal/sdk/pkg/models/operations/putsourcegoogledirectory.go
+++ b/internal/sdk/pkg/models/operations/putsourcegoogledirectory.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGoogleDirectoryRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGoogleDirectoryRequest) GetSourceGoogleDirectoryPutRequest() *shared.SourceGoogleDirectoryPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGoogleDirectoryPutRequest
+}
+
+func (o *PutSourceGoogleDirectoryRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGoogleDirectoryResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGoogleDirectoryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGoogleDirectoryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGoogleDirectoryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegoogledrive.go b/internal/sdk/pkg/models/operations/putsourcegoogledrive.go
new file mode 100644
index 000000000..4f90eb3e9
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/putsourcegoogledrive.go
@@ -0,0 +1,57 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type PutSourceGoogleDriveRequest struct {
+ SourceGoogleDrivePutRequest *shared.SourceGoogleDrivePutRequest `request:"mediaType=application/json"`
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *PutSourceGoogleDriveRequest) GetSourceGoogleDrivePutRequest() *shared.SourceGoogleDrivePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGoogleDrivePutRequest
+}
+
+func (o *PutSourceGoogleDriveRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type PutSourceGoogleDriveResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutSourceGoogleDriveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGoogleDriveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGoogleDriveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegooglepagespeedinsights.go b/internal/sdk/pkg/models/operations/putsourcegooglepagespeedinsights.go
old mode 100755
new mode 100644
index 7516d3709..38e475756
--- a/internal/sdk/pkg/models/operations/putsourcegooglepagespeedinsights.go
+++ b/internal/sdk/pkg/models/operations/putsourcegooglepagespeedinsights.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGooglePagespeedInsightsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGooglePagespeedInsightsRequest) GetSourceGooglePagespeedInsightsPutRequest() *shared.SourceGooglePagespeedInsightsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGooglePagespeedInsightsPutRequest
+}
+
+func (o *PutSourceGooglePagespeedInsightsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGooglePagespeedInsightsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGooglePagespeedInsightsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGooglePagespeedInsightsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGooglePagespeedInsightsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegooglesearchconsole.go b/internal/sdk/pkg/models/operations/putsourcegooglesearchconsole.go
old mode 100755
new mode 100644
index 06585dd83..579e235e9
--- a/internal/sdk/pkg/models/operations/putsourcegooglesearchconsole.go
+++ b/internal/sdk/pkg/models/operations/putsourcegooglesearchconsole.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGoogleSearchConsoleRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGoogleSearchConsoleRequest) GetSourceGoogleSearchConsolePutRequest() *shared.SourceGoogleSearchConsolePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGoogleSearchConsolePutRequest
+}
+
+func (o *PutSourceGoogleSearchConsoleRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGoogleSearchConsoleResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGoogleSearchConsoleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGoogleSearchConsoleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGoogleSearchConsoleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegooglesheets.go b/internal/sdk/pkg/models/operations/putsourcegooglesheets.go
old mode 100755
new mode 100644
index 97a0d6c0d..b99e14b68
--- a/internal/sdk/pkg/models/operations/putsourcegooglesheets.go
+++ b/internal/sdk/pkg/models/operations/putsourcegooglesheets.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGoogleSheetsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGoogleSheetsRequest) GetSourceGoogleSheetsPutRequest() *shared.SourceGoogleSheetsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGoogleSheetsPutRequest
+}
+
+func (o *PutSourceGoogleSheetsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGoogleSheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGoogleSheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGoogleSheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGoogleSheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegooglewebfonts.go b/internal/sdk/pkg/models/operations/putsourcegooglewebfonts.go
old mode 100755
new mode 100644
index d595c6a62..8ba1bef73
--- a/internal/sdk/pkg/models/operations/putsourcegooglewebfonts.go
+++ b/internal/sdk/pkg/models/operations/putsourcegooglewebfonts.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGoogleWebfontsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGoogleWebfontsRequest) GetSourceGoogleWebfontsPutRequest() *shared.SourceGoogleWebfontsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGoogleWebfontsPutRequest
+}
+
+func (o *PutSourceGoogleWebfontsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGoogleWebfontsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGoogleWebfontsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGoogleWebfontsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGoogleWebfontsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegoogleworkspaceadminreports.go b/internal/sdk/pkg/models/operations/putsourcegoogleworkspaceadminreports.go
old mode 100755
new mode 100644
index 91f795557..769b89374
--- a/internal/sdk/pkg/models/operations/putsourcegoogleworkspaceadminreports.go
+++ b/internal/sdk/pkg/models/operations/putsourcegoogleworkspaceadminreports.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGoogleWorkspaceAdminReportsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGoogleWorkspaceAdminReportsRequest) GetSourceGoogleWorkspaceAdminReportsPutRequest() *shared.SourceGoogleWorkspaceAdminReportsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGoogleWorkspaceAdminReportsPutRequest
+}
+
+func (o *PutSourceGoogleWorkspaceAdminReportsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGoogleWorkspaceAdminReportsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGoogleWorkspaceAdminReportsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGoogleWorkspaceAdminReportsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGoogleWorkspaceAdminReportsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegreenhouse.go b/internal/sdk/pkg/models/operations/putsourcegreenhouse.go
old mode 100755
new mode 100644
index 31dcbf8ae..a019db661
--- a/internal/sdk/pkg/models/operations/putsourcegreenhouse.go
+++ b/internal/sdk/pkg/models/operations/putsourcegreenhouse.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGreenhouseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGreenhouseRequest) GetSourceGreenhousePutRequest() *shared.SourceGreenhousePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGreenhousePutRequest
+}
+
+func (o *PutSourceGreenhouseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGreenhouseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGreenhouseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGreenhouseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGreenhouseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcegridly.go b/internal/sdk/pkg/models/operations/putsourcegridly.go
old mode 100755
new mode 100644
index 564fb9f55..15d2fc94c
--- a/internal/sdk/pkg/models/operations/putsourcegridly.go
+++ b/internal/sdk/pkg/models/operations/putsourcegridly.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceGridlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceGridlyRequest) GetSourceGridlyPutRequest() *shared.SourceGridlyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceGridlyPutRequest
+}
+
+func (o *PutSourceGridlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceGridlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceGridlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceGridlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceGridlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceharvest.go b/internal/sdk/pkg/models/operations/putsourceharvest.go
old mode 100755
new mode 100644
index 6207a4278..6dbba33ce
--- a/internal/sdk/pkg/models/operations/putsourceharvest.go
+++ b/internal/sdk/pkg/models/operations/putsourceharvest.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceHarvestRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceHarvestRequest) GetSourceHarvestPutRequest() *shared.SourceHarvestPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceHarvestPutRequest
+}
+
+func (o *PutSourceHarvestRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceHarvestResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceHarvestResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceHarvestResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceHarvestResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcehubplanner.go b/internal/sdk/pkg/models/operations/putsourcehubplanner.go
old mode 100755
new mode 100644
index 7050006db..58947c52e
--- a/internal/sdk/pkg/models/operations/putsourcehubplanner.go
+++ b/internal/sdk/pkg/models/operations/putsourcehubplanner.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceHubplannerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceHubplannerRequest) GetSourceHubplannerPutRequest() *shared.SourceHubplannerPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceHubplannerPutRequest
+}
+
+func (o *PutSourceHubplannerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceHubplannerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceHubplannerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceHubplannerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceHubplannerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcehubspot.go b/internal/sdk/pkg/models/operations/putsourcehubspot.go
old mode 100755
new mode 100644
index 8870f9f47..8480c690f
--- a/internal/sdk/pkg/models/operations/putsourcehubspot.go
+++ b/internal/sdk/pkg/models/operations/putsourcehubspot.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceHubspotRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceHubspotRequest) GetSourceHubspotPutRequest() *shared.SourceHubspotPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceHubspotPutRequest
+}
+
+func (o *PutSourceHubspotRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceHubspotResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceHubspotResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceHubspotResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceHubspotResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceinsightly.go b/internal/sdk/pkg/models/operations/putsourceinsightly.go
old mode 100755
new mode 100644
index 6b1470472..01286433a
--- a/internal/sdk/pkg/models/operations/putsourceinsightly.go
+++ b/internal/sdk/pkg/models/operations/putsourceinsightly.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceInsightlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceInsightlyRequest) GetSourceInsightlyPutRequest() *shared.SourceInsightlyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceInsightlyPutRequest
+}
+
+func (o *PutSourceInsightlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceInsightlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceInsightlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceInsightlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceInsightlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceinstagram.go b/internal/sdk/pkg/models/operations/putsourceinstagram.go
old mode 100755
new mode 100644
index f0da785ec..3d0e81a9c
--- a/internal/sdk/pkg/models/operations/putsourceinstagram.go
+++ b/internal/sdk/pkg/models/operations/putsourceinstagram.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceInstagramRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceInstagramRequest) GetSourceInstagramPutRequest() *shared.SourceInstagramPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceInstagramPutRequest
+}
+
+func (o *PutSourceInstagramRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceInstagramResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceInstagramResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceInstagramResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceInstagramResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceinstatus.go b/internal/sdk/pkg/models/operations/putsourceinstatus.go
old mode 100755
new mode 100644
index 362c1cc9d..ed2984412
--- a/internal/sdk/pkg/models/operations/putsourceinstatus.go
+++ b/internal/sdk/pkg/models/operations/putsourceinstatus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceInstatusRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceInstatusRequest) GetSourceInstatusPutRequest() *shared.SourceInstatusPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceInstatusPutRequest
+}
+
+func (o *PutSourceInstatusRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceInstatusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceInstatusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceInstatusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceInstatusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceintercom.go b/internal/sdk/pkg/models/operations/putsourceintercom.go
old mode 100755
new mode 100644
index 9ae3192b1..2f4bc66b7
--- a/internal/sdk/pkg/models/operations/putsourceintercom.go
+++ b/internal/sdk/pkg/models/operations/putsourceintercom.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceIntercomRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceIntercomRequest) GetSourceIntercomPutRequest() *shared.SourceIntercomPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceIntercomPutRequest
+}
+
+func (o *PutSourceIntercomRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceIntercomResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceIntercomResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceIntercomResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceIntercomResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceip2whois.go b/internal/sdk/pkg/models/operations/putsourceip2whois.go
old mode 100755
new mode 100644
index d7108a94f..c03ce9386
--- a/internal/sdk/pkg/models/operations/putsourceip2whois.go
+++ b/internal/sdk/pkg/models/operations/putsourceip2whois.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceIp2whoisRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceIp2whoisRequest) GetSourceIp2whoisPutRequest() *shared.SourceIp2whoisPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceIp2whoisPutRequest
+}
+
+func (o *PutSourceIp2whoisRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceIp2whoisResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceIp2whoisResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceIp2whoisResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceIp2whoisResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceiterable.go b/internal/sdk/pkg/models/operations/putsourceiterable.go
old mode 100755
new mode 100644
index 795eb16ec..e920bb916
--- a/internal/sdk/pkg/models/operations/putsourceiterable.go
+++ b/internal/sdk/pkg/models/operations/putsourceiterable.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceIterableRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceIterableRequest) GetSourceIterablePutRequest() *shared.SourceIterablePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceIterablePutRequest
+}
+
+func (o *PutSourceIterableRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceIterableResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceIterableResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceIterableResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceIterableResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcejira.go b/internal/sdk/pkg/models/operations/putsourcejira.go
old mode 100755
new mode 100644
index 0c88402a9..3753b6fde
--- a/internal/sdk/pkg/models/operations/putsourcejira.go
+++ b/internal/sdk/pkg/models/operations/putsourcejira.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceJiraRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceJiraRequest) GetSourceJiraPutRequest() *shared.SourceJiraPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceJiraPutRequest
+}
+
+func (o *PutSourceJiraRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceJiraResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceJiraResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceJiraResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceJiraResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcek6cloud.go b/internal/sdk/pkg/models/operations/putsourcek6cloud.go
old mode 100755
new mode 100644
index 8a4ebb87d..651e246de
--- a/internal/sdk/pkg/models/operations/putsourcek6cloud.go
+++ b/internal/sdk/pkg/models/operations/putsourcek6cloud.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceK6CloudRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceK6CloudRequest) GetSourceK6CloudPutRequest() *shared.SourceK6CloudPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceK6CloudPutRequest
+}
+
+func (o *PutSourceK6CloudRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceK6CloudResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceK6CloudResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceK6CloudResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceK6CloudResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceklarna.go b/internal/sdk/pkg/models/operations/putsourceklarna.go
old mode 100755
new mode 100644
index 802c8b356..2efd00190
--- a/internal/sdk/pkg/models/operations/putsourceklarna.go
+++ b/internal/sdk/pkg/models/operations/putsourceklarna.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceKlarnaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceKlarnaRequest) GetSourceKlarnaPutRequest() *shared.SourceKlarnaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceKlarnaPutRequest
+}
+
+func (o *PutSourceKlarnaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceKlarnaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceKlarnaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceKlarnaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceKlarnaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceklaviyo.go b/internal/sdk/pkg/models/operations/putsourceklaviyo.go
old mode 100755
new mode 100644
index 16da081c8..a6e2fb235
--- a/internal/sdk/pkg/models/operations/putsourceklaviyo.go
+++ b/internal/sdk/pkg/models/operations/putsourceklaviyo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceKlaviyoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceKlaviyoRequest) GetSourceKlaviyoPutRequest() *shared.SourceKlaviyoPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceKlaviyoPutRequest
+}
+
+func (o *PutSourceKlaviyoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceKlaviyoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceKlaviyoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceKlaviyoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceKlaviyoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcekustomersinger.go b/internal/sdk/pkg/models/operations/putsourcekustomersinger.go
old mode 100755
new mode 100644
index c6e158ac0..557a2d8aa
--- a/internal/sdk/pkg/models/operations/putsourcekustomersinger.go
+++ b/internal/sdk/pkg/models/operations/putsourcekustomersinger.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceKustomerSingerRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceKustomerSingerRequest) GetSourceKustomerSingerPutRequest() *shared.SourceKustomerSingerPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceKustomerSingerPutRequest
+}
+
+func (o *PutSourceKustomerSingerRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceKustomerSingerResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceKustomerSingerResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceKustomerSingerResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceKustomerSingerResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcekyve.go b/internal/sdk/pkg/models/operations/putsourcekyve.go
old mode 100755
new mode 100644
index 5403acb4e..d3d880a28
--- a/internal/sdk/pkg/models/operations/putsourcekyve.go
+++ b/internal/sdk/pkg/models/operations/putsourcekyve.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceKyveRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceKyveRequest) GetSourceKyvePutRequest() *shared.SourceKyvePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceKyvePutRequest
+}
+
+func (o *PutSourceKyveRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceKyveResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceKyveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceKyveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceKyveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcelaunchdarkly.go b/internal/sdk/pkg/models/operations/putsourcelaunchdarkly.go
old mode 100755
new mode 100644
index 625bd652d..2b67671a0
--- a/internal/sdk/pkg/models/operations/putsourcelaunchdarkly.go
+++ b/internal/sdk/pkg/models/operations/putsourcelaunchdarkly.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceLaunchdarklyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceLaunchdarklyRequest) GetSourceLaunchdarklyPutRequest() *shared.SourceLaunchdarklyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceLaunchdarklyPutRequest
+}
+
+func (o *PutSourceLaunchdarklyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceLaunchdarklyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceLaunchdarklyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceLaunchdarklyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceLaunchdarklyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcelemlist.go b/internal/sdk/pkg/models/operations/putsourcelemlist.go
old mode 100755
new mode 100644
index df02e4ea3..8630c527d
--- a/internal/sdk/pkg/models/operations/putsourcelemlist.go
+++ b/internal/sdk/pkg/models/operations/putsourcelemlist.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceLemlistRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceLemlistRequest) GetSourceLemlistPutRequest() *shared.SourceLemlistPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceLemlistPutRequest
+}
+
+func (o *PutSourceLemlistRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceLemlistResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceLemlistResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceLemlistResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceLemlistResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceleverhiring.go b/internal/sdk/pkg/models/operations/putsourceleverhiring.go
old mode 100755
new mode 100644
index d25e996ce..6fff26f83
--- a/internal/sdk/pkg/models/operations/putsourceleverhiring.go
+++ b/internal/sdk/pkg/models/operations/putsourceleverhiring.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceLeverHiringRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceLeverHiringRequest) GetSourceLeverHiringPutRequest() *shared.SourceLeverHiringPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceLeverHiringPutRequest
+}
+
+func (o *PutSourceLeverHiringRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceLeverHiringResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceLeverHiringResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceLeverHiringResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceLeverHiringResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcelinkedinads.go b/internal/sdk/pkg/models/operations/putsourcelinkedinads.go
old mode 100755
new mode 100644
index 35a309494..8593d4e0a
--- a/internal/sdk/pkg/models/operations/putsourcelinkedinads.go
+++ b/internal/sdk/pkg/models/operations/putsourcelinkedinads.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceLinkedinAdsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceLinkedinAdsRequest) GetSourceLinkedinAdsPutRequest() *shared.SourceLinkedinAdsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceLinkedinAdsPutRequest
+}
+
+func (o *PutSourceLinkedinAdsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceLinkedinAdsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceLinkedinAdsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceLinkedinAdsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceLinkedinAdsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcelinkedinpages.go b/internal/sdk/pkg/models/operations/putsourcelinkedinpages.go
old mode 100755
new mode 100644
index 6300f4882..10cb00f2d
--- a/internal/sdk/pkg/models/operations/putsourcelinkedinpages.go
+++ b/internal/sdk/pkg/models/operations/putsourcelinkedinpages.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceLinkedinPagesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceLinkedinPagesRequest) GetSourceLinkedinPagesPutRequest() *shared.SourceLinkedinPagesPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceLinkedinPagesPutRequest
+}
+
+func (o *PutSourceLinkedinPagesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceLinkedinPagesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceLinkedinPagesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceLinkedinPagesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceLinkedinPagesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcelinnworks.go b/internal/sdk/pkg/models/operations/putsourcelinnworks.go
old mode 100755
new mode 100644
index f48b41fb4..39f0617e2
--- a/internal/sdk/pkg/models/operations/putsourcelinnworks.go
+++ b/internal/sdk/pkg/models/operations/putsourcelinnworks.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceLinnworksRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceLinnworksRequest) GetSourceLinnworksPutRequest() *shared.SourceLinnworksPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceLinnworksPutRequest
+}
+
+func (o *PutSourceLinnworksRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceLinnworksResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceLinnworksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceLinnworksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceLinnworksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcelokalise.go b/internal/sdk/pkg/models/operations/putsourcelokalise.go
old mode 100755
new mode 100644
index a0e736903..a21943e4f
--- a/internal/sdk/pkg/models/operations/putsourcelokalise.go
+++ b/internal/sdk/pkg/models/operations/putsourcelokalise.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceLokaliseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceLokaliseRequest) GetSourceLokalisePutRequest() *shared.SourceLokalisePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceLokalisePutRequest
+}
+
+func (o *PutSourceLokaliseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceLokaliseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceLokaliseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceLokaliseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceLokaliseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemailchimp.go b/internal/sdk/pkg/models/operations/putsourcemailchimp.go
old mode 100755
new mode 100644
index e87383f8f..35908914c
--- a/internal/sdk/pkg/models/operations/putsourcemailchimp.go
+++ b/internal/sdk/pkg/models/operations/putsourcemailchimp.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMailchimpRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMailchimpRequest) GetSourceMailchimpPutRequest() *shared.SourceMailchimpPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMailchimpPutRequest
+}
+
+func (o *PutSourceMailchimpRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMailchimpResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMailchimpResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMailchimpResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMailchimpResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemailgun.go b/internal/sdk/pkg/models/operations/putsourcemailgun.go
old mode 100755
new mode 100644
index 0dfc5cd1a..9d3899252
--- a/internal/sdk/pkg/models/operations/putsourcemailgun.go
+++ b/internal/sdk/pkg/models/operations/putsourcemailgun.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMailgunRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMailgunRequest) GetSourceMailgunPutRequest() *shared.SourceMailgunPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMailgunPutRequest
+}
+
+func (o *PutSourceMailgunRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMailgunResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMailgunResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMailgunResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMailgunResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemailjetsms.go b/internal/sdk/pkg/models/operations/putsourcemailjetsms.go
old mode 100755
new mode 100644
index 99cccbb74..1a681c305
--- a/internal/sdk/pkg/models/operations/putsourcemailjetsms.go
+++ b/internal/sdk/pkg/models/operations/putsourcemailjetsms.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMailjetSmsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMailjetSmsRequest) GetSourceMailjetSmsPutRequest() *shared.SourceMailjetSmsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMailjetSmsPutRequest
+}
+
+func (o *PutSourceMailjetSmsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMailjetSmsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMailjetSmsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMailjetSmsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMailjetSmsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemarketo.go b/internal/sdk/pkg/models/operations/putsourcemarketo.go
old mode 100755
new mode 100644
index d6d8ec51f..403c58251
--- a/internal/sdk/pkg/models/operations/putsourcemarketo.go
+++ b/internal/sdk/pkg/models/operations/putsourcemarketo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMarketoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMarketoRequest) GetSourceMarketoPutRequest() *shared.SourceMarketoPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMarketoPutRequest
+}
+
+func (o *PutSourceMarketoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMarketoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMarketoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMarketoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMarketoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemetabase.go b/internal/sdk/pkg/models/operations/putsourcemetabase.go
old mode 100755
new mode 100644
index a055dc94b..db85dbc0a
--- a/internal/sdk/pkg/models/operations/putsourcemetabase.go
+++ b/internal/sdk/pkg/models/operations/putsourcemetabase.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMetabaseRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMetabaseRequest) GetSourceMetabasePutRequest() *shared.SourceMetabasePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMetabasePutRequest
+}
+
+func (o *PutSourceMetabaseRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMetabaseResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMetabaseResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMetabaseResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMetabaseResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemicrosoftteams.go b/internal/sdk/pkg/models/operations/putsourcemicrosoftteams.go
old mode 100755
new mode 100644
index 716301786..e73bfddc0
--- a/internal/sdk/pkg/models/operations/putsourcemicrosoftteams.go
+++ b/internal/sdk/pkg/models/operations/putsourcemicrosoftteams.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMicrosoftTeamsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMicrosoftTeamsRequest) GetSourceMicrosoftTeamsPutRequest() *shared.SourceMicrosoftTeamsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMicrosoftTeamsPutRequest
+}
+
+func (o *PutSourceMicrosoftTeamsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMicrosoftTeamsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMicrosoftTeamsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMicrosoftTeamsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMicrosoftTeamsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemixpanel.go b/internal/sdk/pkg/models/operations/putsourcemixpanel.go
old mode 100755
new mode 100644
index d9c6d33d0..98572edf9
--- a/internal/sdk/pkg/models/operations/putsourcemixpanel.go
+++ b/internal/sdk/pkg/models/operations/putsourcemixpanel.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMixpanelRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMixpanelRequest) GetSourceMixpanelPutRequest() *shared.SourceMixpanelPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMixpanelPutRequest
+}
+
+func (o *PutSourceMixpanelRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMixpanelResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMixpanelResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMixpanelResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMixpanelResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemonday.go b/internal/sdk/pkg/models/operations/putsourcemonday.go
old mode 100755
new mode 100644
index 18fb6e355..e2431c8ae
--- a/internal/sdk/pkg/models/operations/putsourcemonday.go
+++ b/internal/sdk/pkg/models/operations/putsourcemonday.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMondayRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMondayRequest) GetSourceMondayPutRequest() *shared.SourceMondayPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMondayPutRequest
+}
+
+func (o *PutSourceMondayRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMondayResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMondayResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMondayResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMondayResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemongodb.go b/internal/sdk/pkg/models/operations/putsourcemongodb.go
deleted file mode 100755
index b96fbb9ff..000000000
--- a/internal/sdk/pkg/models/operations/putsourcemongodb.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type PutSourceMongodbRequest struct {
- SourceMongodbPutRequest *shared.SourceMongodbPutRequest `request:"mediaType=application/json"`
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type PutSourceMongodbResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/putsourcemongodbinternalpoc.go b/internal/sdk/pkg/models/operations/putsourcemongodbinternalpoc.go
old mode 100755
new mode 100644
index cef1f8751..a6c0d5ee9
--- a/internal/sdk/pkg/models/operations/putsourcemongodbinternalpoc.go
+++ b/internal/sdk/pkg/models/operations/putsourcemongodbinternalpoc.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMongodbInternalPocRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMongodbInternalPocRequest) GetSourceMongodbInternalPocPutRequest() *shared.SourceMongodbInternalPocPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMongodbInternalPocPutRequest
+}
+
+func (o *PutSourceMongodbInternalPocRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMongodbInternalPocResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMongodbInternalPocResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMongodbInternalPocResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMongodbInternalPocResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemongodbv2.go b/internal/sdk/pkg/models/operations/putsourcemongodbv2.go
new file mode 100644
index 000000000..3007defc9
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/putsourcemongodbv2.go
@@ -0,0 +1,57 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type PutSourceMongodbV2Request struct {
+ SourceMongodbV2PutRequest *shared.SourceMongodbV2PutRequest `request:"mediaType=application/json"`
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *PutSourceMongodbV2Request) GetSourceMongodbV2PutRequest() *shared.SourceMongodbV2PutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMongodbV2PutRequest
+}
+
+func (o *PutSourceMongodbV2Request) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type PutSourceMongodbV2Response struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutSourceMongodbV2Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMongodbV2Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMongodbV2Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemssql.go b/internal/sdk/pkg/models/operations/putsourcemssql.go
old mode 100755
new mode 100644
index b0cd5fa33..b49a2a06a
--- a/internal/sdk/pkg/models/operations/putsourcemssql.go
+++ b/internal/sdk/pkg/models/operations/putsourcemssql.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMssqlRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMssqlRequest) GetSourceMssqlPutRequest() *shared.SourceMssqlPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMssqlPutRequest
+}
+
+func (o *PutSourceMssqlRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMssqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMssqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMssqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMssqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemyhours.go b/internal/sdk/pkg/models/operations/putsourcemyhours.go
old mode 100755
new mode 100644
index e3bf85883..25c5f2c5c
--- a/internal/sdk/pkg/models/operations/putsourcemyhours.go
+++ b/internal/sdk/pkg/models/operations/putsourcemyhours.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMyHoursRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMyHoursRequest) GetSourceMyHoursPutRequest() *shared.SourceMyHoursPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMyHoursPutRequest
+}
+
+func (o *PutSourceMyHoursRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMyHoursResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMyHoursResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMyHoursResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMyHoursResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcemysql.go b/internal/sdk/pkg/models/operations/putsourcemysql.go
old mode 100755
new mode 100644
index abb7bd8d6..5b3583868
--- a/internal/sdk/pkg/models/operations/putsourcemysql.go
+++ b/internal/sdk/pkg/models/operations/putsourcemysql.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceMysqlRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceMysqlRequest) GetSourceMysqlPutRequest() *shared.SourceMysqlPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceMysqlPutRequest
+}
+
+func (o *PutSourceMysqlRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceMysqlResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceMysqlResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceMysqlResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceMysqlResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcenetsuite.go b/internal/sdk/pkg/models/operations/putsourcenetsuite.go
old mode 100755
new mode 100644
index 21b52913c..393b1ff53
--- a/internal/sdk/pkg/models/operations/putsourcenetsuite.go
+++ b/internal/sdk/pkg/models/operations/putsourcenetsuite.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceNetsuiteRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceNetsuiteRequest) GetSourceNetsuitePutRequest() *shared.SourceNetsuitePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceNetsuitePutRequest
+}
+
+func (o *PutSourceNetsuiteRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceNetsuiteResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceNetsuiteResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceNetsuiteResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceNetsuiteResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcenotion.go b/internal/sdk/pkg/models/operations/putsourcenotion.go
old mode 100755
new mode 100644
index 36ef47ff9..afacc54a0
--- a/internal/sdk/pkg/models/operations/putsourcenotion.go
+++ b/internal/sdk/pkg/models/operations/putsourcenotion.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceNotionRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceNotionRequest) GetSourceNotionPutRequest() *shared.SourceNotionPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceNotionPutRequest
+}
+
+func (o *PutSourceNotionRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceNotionResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceNotionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceNotionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceNotionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcenytimes.go b/internal/sdk/pkg/models/operations/putsourcenytimes.go
old mode 100755
new mode 100644
index 1fccb87d8..e97a05bb2
--- a/internal/sdk/pkg/models/operations/putsourcenytimes.go
+++ b/internal/sdk/pkg/models/operations/putsourcenytimes.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceNytimesRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceNytimesRequest) GetSourceNytimesPutRequest() *shared.SourceNytimesPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceNytimesPutRequest
+}
+
+func (o *PutSourceNytimesRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceNytimesResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceNytimesResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceNytimesResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceNytimesResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceokta.go b/internal/sdk/pkg/models/operations/putsourceokta.go
old mode 100755
new mode 100644
index a89fc86da..35c46e33c
--- a/internal/sdk/pkg/models/operations/putsourceokta.go
+++ b/internal/sdk/pkg/models/operations/putsourceokta.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceOktaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceOktaRequest) GetSourceOktaPutRequest() *shared.SourceOktaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceOktaPutRequest
+}
+
+func (o *PutSourceOktaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceOktaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceOktaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceOktaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceOktaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceomnisend.go b/internal/sdk/pkg/models/operations/putsourceomnisend.go
old mode 100755
new mode 100644
index ed714a3c2..e4a58e46f
--- a/internal/sdk/pkg/models/operations/putsourceomnisend.go
+++ b/internal/sdk/pkg/models/operations/putsourceomnisend.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceOmnisendRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceOmnisendRequest) GetSourceOmnisendPutRequest() *shared.SourceOmnisendPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceOmnisendPutRequest
+}
+
+func (o *PutSourceOmnisendRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceOmnisendResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceOmnisendResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceOmnisendResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceOmnisendResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceonesignal.go b/internal/sdk/pkg/models/operations/putsourceonesignal.go
old mode 100755
new mode 100644
index d99b65e44..51a5adc65
--- a/internal/sdk/pkg/models/operations/putsourceonesignal.go
+++ b/internal/sdk/pkg/models/operations/putsourceonesignal.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceOnesignalRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceOnesignalRequest) GetSourceOnesignalPutRequest() *shared.SourceOnesignalPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceOnesignalPutRequest
+}
+
+func (o *PutSourceOnesignalRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceOnesignalResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceOnesignalResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceOnesignalResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceOnesignalResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceoracle.go b/internal/sdk/pkg/models/operations/putsourceoracle.go
old mode 100755
new mode 100644
index 18522254a..3f548e7c6
--- a/internal/sdk/pkg/models/operations/putsourceoracle.go
+++ b/internal/sdk/pkg/models/operations/putsourceoracle.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceOracleRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceOracleRequest) GetSourceOraclePutRequest() *shared.SourceOraclePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceOraclePutRequest
+}
+
+func (o *PutSourceOracleRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceOracleResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceOracleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceOracleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceOracleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceorb.go b/internal/sdk/pkg/models/operations/putsourceorb.go
old mode 100755
new mode 100644
index 4caf99021..18db247c6
--- a/internal/sdk/pkg/models/operations/putsourceorb.go
+++ b/internal/sdk/pkg/models/operations/putsourceorb.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceOrbRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceOrbRequest) GetSourceOrbPutRequest() *shared.SourceOrbPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceOrbPutRequest
+}
+
+func (o *PutSourceOrbRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceOrbResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceOrbResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceOrbResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceOrbResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceorbit.go b/internal/sdk/pkg/models/operations/putsourceorbit.go
old mode 100755
new mode 100644
index 66507b641..5280fa1be
--- a/internal/sdk/pkg/models/operations/putsourceorbit.go
+++ b/internal/sdk/pkg/models/operations/putsourceorbit.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceOrbitRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceOrbitRequest) GetSourceOrbitPutRequest() *shared.SourceOrbitPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceOrbitPutRequest
+}
+
+func (o *PutSourceOrbitRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceOrbitResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceOrbitResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceOrbitResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceOrbitResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceoutbrainamplify.go b/internal/sdk/pkg/models/operations/putsourceoutbrainamplify.go
old mode 100755
new mode 100644
index ce9b70764..9f6b8ef1a
--- a/internal/sdk/pkg/models/operations/putsourceoutbrainamplify.go
+++ b/internal/sdk/pkg/models/operations/putsourceoutbrainamplify.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceOutbrainAmplifyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceOutbrainAmplifyRequest) GetSourceOutbrainAmplifyPutRequest() *shared.SourceOutbrainAmplifyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceOutbrainAmplifyPutRequest
+}
+
+func (o *PutSourceOutbrainAmplifyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceOutbrainAmplifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceOutbrainAmplifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceOutbrainAmplifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceOutbrainAmplifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceoutreach.go b/internal/sdk/pkg/models/operations/putsourceoutreach.go
old mode 100755
new mode 100644
index 05a0e8dd6..453ee5352
--- a/internal/sdk/pkg/models/operations/putsourceoutreach.go
+++ b/internal/sdk/pkg/models/operations/putsourceoutreach.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceOutreachRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceOutreachRequest) GetSourceOutreachPutRequest() *shared.SourceOutreachPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceOutreachPutRequest
+}
+
+func (o *PutSourceOutreachRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceOutreachResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceOutreachResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceOutreachResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceOutreachResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepaypaltransaction.go b/internal/sdk/pkg/models/operations/putsourcepaypaltransaction.go
old mode 100755
new mode 100644
index 56bf1e602..c8249563f
--- a/internal/sdk/pkg/models/operations/putsourcepaypaltransaction.go
+++ b/internal/sdk/pkg/models/operations/putsourcepaypaltransaction.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePaypalTransactionRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePaypalTransactionRequest) GetSourcePaypalTransactionPutRequest() *shared.SourcePaypalTransactionPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePaypalTransactionPutRequest
+}
+
+func (o *PutSourcePaypalTransactionRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePaypalTransactionResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePaypalTransactionResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePaypalTransactionResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePaypalTransactionResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepaystack.go b/internal/sdk/pkg/models/operations/putsourcepaystack.go
old mode 100755
new mode 100644
index 98fe02e22..88772a0a6
--- a/internal/sdk/pkg/models/operations/putsourcepaystack.go
+++ b/internal/sdk/pkg/models/operations/putsourcepaystack.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePaystackRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePaystackRequest) GetSourcePaystackPutRequest() *shared.SourcePaystackPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePaystackPutRequest
+}
+
+func (o *PutSourcePaystackRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePaystackResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePaystackResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePaystackResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePaystackResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcependo.go b/internal/sdk/pkg/models/operations/putsourcependo.go
old mode 100755
new mode 100644
index dc7b5aea3..05b2bc212
--- a/internal/sdk/pkg/models/operations/putsourcependo.go
+++ b/internal/sdk/pkg/models/operations/putsourcependo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePendoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePendoRequest) GetSourcePendoPutRequest() *shared.SourcePendoPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePendoPutRequest
+}
+
+func (o *PutSourcePendoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePendoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePendoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePendoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePendoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepersistiq.go b/internal/sdk/pkg/models/operations/putsourcepersistiq.go
old mode 100755
new mode 100644
index d9d47f8ca..55ff9e03f
--- a/internal/sdk/pkg/models/operations/putsourcepersistiq.go
+++ b/internal/sdk/pkg/models/operations/putsourcepersistiq.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePersistiqRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePersistiqRequest) GetSourcePersistiqPutRequest() *shared.SourcePersistiqPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePersistiqPutRequest
+}
+
+func (o *PutSourcePersistiqRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePersistiqResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePersistiqResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePersistiqResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePersistiqResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepexelsapi.go b/internal/sdk/pkg/models/operations/putsourcepexelsapi.go
old mode 100755
new mode 100644
index 79e0449a2..14959d876
--- a/internal/sdk/pkg/models/operations/putsourcepexelsapi.go
+++ b/internal/sdk/pkg/models/operations/putsourcepexelsapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePexelsAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePexelsAPIRequest) GetSourcePexelsAPIPutRequest() *shared.SourcePexelsAPIPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePexelsAPIPutRequest
+}
+
+func (o *PutSourcePexelsAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePexelsAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePexelsAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePexelsAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePexelsAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepinterest.go b/internal/sdk/pkg/models/operations/putsourcepinterest.go
old mode 100755
new mode 100644
index 21f9153f6..5386007ce
--- a/internal/sdk/pkg/models/operations/putsourcepinterest.go
+++ b/internal/sdk/pkg/models/operations/putsourcepinterest.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePinterestRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePinterestRequest) GetSourcePinterestPutRequest() *shared.SourcePinterestPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePinterestPutRequest
+}
+
+func (o *PutSourcePinterestRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePinterestResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePinterestResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePinterestResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePinterestResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepipedrive.go b/internal/sdk/pkg/models/operations/putsourcepipedrive.go
old mode 100755
new mode 100644
index 38bc7d844..83d17602d
--- a/internal/sdk/pkg/models/operations/putsourcepipedrive.go
+++ b/internal/sdk/pkg/models/operations/putsourcepipedrive.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePipedriveRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePipedriveRequest) GetSourcePipedrivePutRequest() *shared.SourcePipedrivePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePipedrivePutRequest
+}
+
+func (o *PutSourcePipedriveRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePipedriveResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePipedriveResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePipedriveResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePipedriveResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepocket.go b/internal/sdk/pkg/models/operations/putsourcepocket.go
old mode 100755
new mode 100644
index d59991d12..2492b6c37
--- a/internal/sdk/pkg/models/operations/putsourcepocket.go
+++ b/internal/sdk/pkg/models/operations/putsourcepocket.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePocketRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePocketRequest) GetSourcePocketPutRequest() *shared.SourcePocketPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePocketPutRequest
+}
+
+func (o *PutSourcePocketRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePocketResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePocketResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePocketResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePocketResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepokeapi.go b/internal/sdk/pkg/models/operations/putsourcepokeapi.go
old mode 100755
new mode 100644
index 185898eb9..973179cf1
--- a/internal/sdk/pkg/models/operations/putsourcepokeapi.go
+++ b/internal/sdk/pkg/models/operations/putsourcepokeapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePokeapiRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePokeapiRequest) GetSourcePokeapiPutRequest() *shared.SourcePokeapiPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePokeapiPutRequest
+}
+
+func (o *PutSourcePokeapiRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePokeapiResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePokeapiResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePokeapiResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePokeapiResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepolygonstockapi.go b/internal/sdk/pkg/models/operations/putsourcepolygonstockapi.go
old mode 100755
new mode 100644
index 34e8b8695..e0ba593a8
--- a/internal/sdk/pkg/models/operations/putsourcepolygonstockapi.go
+++ b/internal/sdk/pkg/models/operations/putsourcepolygonstockapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePolygonStockAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePolygonStockAPIRequest) GetSourcePolygonStockAPIPutRequest() *shared.SourcePolygonStockAPIPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePolygonStockAPIPutRequest
+}
+
+func (o *PutSourcePolygonStockAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePolygonStockAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePolygonStockAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePolygonStockAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePolygonStockAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepostgres.go b/internal/sdk/pkg/models/operations/putsourcepostgres.go
old mode 100755
new mode 100644
index 087f95b58..2e44b4f9b
--- a/internal/sdk/pkg/models/operations/putsourcepostgres.go
+++ b/internal/sdk/pkg/models/operations/putsourcepostgres.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePostgresRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePostgresRequest) GetSourcePostgresPutRequest() *shared.SourcePostgresPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePostgresPutRequest
+}
+
+func (o *PutSourcePostgresRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePostgresResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePostgresResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePostgresResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePostgresResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceposthog.go b/internal/sdk/pkg/models/operations/putsourceposthog.go
old mode 100755
new mode 100644
index 70f1ae004..63ace6353
--- a/internal/sdk/pkg/models/operations/putsourceposthog.go
+++ b/internal/sdk/pkg/models/operations/putsourceposthog.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePosthogRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePosthogRequest) GetSourcePosthogPutRequest() *shared.SourcePosthogPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePosthogPutRequest
+}
+
+func (o *PutSourcePosthogRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePosthogResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePosthogResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePosthogResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePosthogResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepostmarkapp.go b/internal/sdk/pkg/models/operations/putsourcepostmarkapp.go
old mode 100755
new mode 100644
index 9231c275e..30fa29444
--- a/internal/sdk/pkg/models/operations/putsourcepostmarkapp.go
+++ b/internal/sdk/pkg/models/operations/putsourcepostmarkapp.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePostmarkappRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePostmarkappRequest) GetSourcePostmarkappPutRequest() *shared.SourcePostmarkappPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePostmarkappPutRequest
+}
+
+func (o *PutSourcePostmarkappRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePostmarkappResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePostmarkappResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePostmarkappResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePostmarkappResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceprestashop.go b/internal/sdk/pkg/models/operations/putsourceprestashop.go
old mode 100755
new mode 100644
index 7b4528eee..ff3d52543
--- a/internal/sdk/pkg/models/operations/putsourceprestashop.go
+++ b/internal/sdk/pkg/models/operations/putsourceprestashop.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePrestashopRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePrestashopRequest) GetSourcePrestashopPutRequest() *shared.SourcePrestashopPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePrestashopPutRequest
+}
+
+func (o *PutSourcePrestashopRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePrestashopResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePrestashopResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePrestashopResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePrestashopResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepunkapi.go b/internal/sdk/pkg/models/operations/putsourcepunkapi.go
old mode 100755
new mode 100644
index a5e1ec93c..c6061ea2b
--- a/internal/sdk/pkg/models/operations/putsourcepunkapi.go
+++ b/internal/sdk/pkg/models/operations/putsourcepunkapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePunkAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePunkAPIRequest) GetSourcePunkAPIPutRequest() *shared.SourcePunkAPIPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePunkAPIPutRequest
+}
+
+func (o *PutSourcePunkAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePunkAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePunkAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePunkAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePunkAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcepypi.go b/internal/sdk/pkg/models/operations/putsourcepypi.go
old mode 100755
new mode 100644
index e1f23ce47..4e446de67
--- a/internal/sdk/pkg/models/operations/putsourcepypi.go
+++ b/internal/sdk/pkg/models/operations/putsourcepypi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourcePypiRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourcePypiRequest) GetSourcePypiPutRequest() *shared.SourcePypiPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourcePypiPutRequest
+}
+
+func (o *PutSourcePypiRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourcePypiResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourcePypiResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourcePypiResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourcePypiResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcequalaroo.go b/internal/sdk/pkg/models/operations/putsourcequalaroo.go
old mode 100755
new mode 100644
index ed96b0fb8..56570ee43
--- a/internal/sdk/pkg/models/operations/putsourcequalaroo.go
+++ b/internal/sdk/pkg/models/operations/putsourcequalaroo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceQualarooRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceQualarooRequest) GetSourceQualarooPutRequest() *shared.SourceQualarooPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceQualarooPutRequest
+}
+
+func (o *PutSourceQualarooRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceQualarooResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceQualarooResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceQualarooResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceQualarooResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcequickbooks.go b/internal/sdk/pkg/models/operations/putsourcequickbooks.go
old mode 100755
new mode 100644
index be5e0ede8..21b38b4e3
--- a/internal/sdk/pkg/models/operations/putsourcequickbooks.go
+++ b/internal/sdk/pkg/models/operations/putsourcequickbooks.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceQuickbooksRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceQuickbooksRequest) GetSourceQuickbooksPutRequest() *shared.SourceQuickbooksPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceQuickbooksPutRequest
+}
+
+func (o *PutSourceQuickbooksRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceQuickbooksResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceQuickbooksResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceQuickbooksResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceQuickbooksResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcerailz.go b/internal/sdk/pkg/models/operations/putsourcerailz.go
old mode 100755
new mode 100644
index f885f6d34..b5ad373c0
--- a/internal/sdk/pkg/models/operations/putsourcerailz.go
+++ b/internal/sdk/pkg/models/operations/putsourcerailz.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceRailzRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRailzRequest) GetSourceRailzPutRequest() *shared.SourceRailzPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceRailzPutRequest
+}
+
+func (o *PutSourceRailzRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceRailzResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceRailzResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceRailzResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceRailzResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcerecharge.go b/internal/sdk/pkg/models/operations/putsourcerecharge.go
old mode 100755
new mode 100644
index 729241379..d796604ac
--- a/internal/sdk/pkg/models/operations/putsourcerecharge.go
+++ b/internal/sdk/pkg/models/operations/putsourcerecharge.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceRechargeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRechargeRequest) GetSourceRechargePutRequest() *shared.SourceRechargePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceRechargePutRequest
+}
+
+func (o *PutSourceRechargeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceRechargeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceRechargeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceRechargeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceRechargeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcerecreation.go b/internal/sdk/pkg/models/operations/putsourcerecreation.go
old mode 100755
new mode 100644
index 6410312de..b2c037084
--- a/internal/sdk/pkg/models/operations/putsourcerecreation.go
+++ b/internal/sdk/pkg/models/operations/putsourcerecreation.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceRecreationRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRecreationRequest) GetSourceRecreationPutRequest() *shared.SourceRecreationPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceRecreationPutRequest
+}
+
+func (o *PutSourceRecreationRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceRecreationResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceRecreationResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceRecreationResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceRecreationResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcerecruitee.go b/internal/sdk/pkg/models/operations/putsourcerecruitee.go
old mode 100755
new mode 100644
index 0ceef43de..2655816a1
--- a/internal/sdk/pkg/models/operations/putsourcerecruitee.go
+++ b/internal/sdk/pkg/models/operations/putsourcerecruitee.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceRecruiteeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRecruiteeRequest) GetSourceRecruiteePutRequest() *shared.SourceRecruiteePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceRecruiteePutRequest
+}
+
+func (o *PutSourceRecruiteeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceRecruiteeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceRecruiteeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceRecruiteeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceRecruiteeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcerecurly.go b/internal/sdk/pkg/models/operations/putsourcerecurly.go
old mode 100755
new mode 100644
index 88436f1cd..6f7b07aa2
--- a/internal/sdk/pkg/models/operations/putsourcerecurly.go
+++ b/internal/sdk/pkg/models/operations/putsourcerecurly.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceRecurlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRecurlyRequest) GetSourceRecurlyPutRequest() *shared.SourceRecurlyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceRecurlyPutRequest
+}
+
+func (o *PutSourceRecurlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceRecurlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceRecurlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceRecurlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceRecurlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceredshift.go b/internal/sdk/pkg/models/operations/putsourceredshift.go
old mode 100755
new mode 100644
index 07ccded5e..7e00a69ad
--- a/internal/sdk/pkg/models/operations/putsourceredshift.go
+++ b/internal/sdk/pkg/models/operations/putsourceredshift.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceRedshiftRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRedshiftRequest) GetSourceRedshiftPutRequest() *shared.SourceRedshiftPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceRedshiftPutRequest
+}
+
+func (o *PutSourceRedshiftRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceRedshiftResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceRedshiftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceRedshiftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceRedshiftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceretently.go b/internal/sdk/pkg/models/operations/putsourceretently.go
old mode 100755
new mode 100644
index ef3ac0f57..87b67ac85
--- a/internal/sdk/pkg/models/operations/putsourceretently.go
+++ b/internal/sdk/pkg/models/operations/putsourceretently.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceRetentlyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRetentlyRequest) GetSourceRetentlyPutRequest() *shared.SourceRetentlyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceRetentlyPutRequest
+}
+
+func (o *PutSourceRetentlyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceRetentlyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceRetentlyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceRetentlyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceRetentlyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcerkicovid.go b/internal/sdk/pkg/models/operations/putsourcerkicovid.go
old mode 100755
new mode 100644
index 98106a8ca..610c93790
--- a/internal/sdk/pkg/models/operations/putsourcerkicovid.go
+++ b/internal/sdk/pkg/models/operations/putsourcerkicovid.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceRkiCovidRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRkiCovidRequest) GetSourceRkiCovidPutRequest() *shared.SourceRkiCovidPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceRkiCovidPutRequest
+}
+
+func (o *PutSourceRkiCovidRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceRkiCovidResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceRkiCovidResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceRkiCovidResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceRkiCovidResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcerss.go b/internal/sdk/pkg/models/operations/putsourcerss.go
old mode 100755
new mode 100644
index d072cd726..325d79df2
--- a/internal/sdk/pkg/models/operations/putsourcerss.go
+++ b/internal/sdk/pkg/models/operations/putsourcerss.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceRssRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceRssRequest) GetSourceRssPutRequest() *shared.SourceRssPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceRssPutRequest
+}
+
+func (o *PutSourceRssRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceRssResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceRssResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceRssResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceRssResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsources3.go b/internal/sdk/pkg/models/operations/putsources3.go
old mode 100755
new mode 100644
index cece454c5..8289a3c97
--- a/internal/sdk/pkg/models/operations/putsources3.go
+++ b/internal/sdk/pkg/models/operations/putsources3.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceS3Request struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceS3Request) GetSourceS3PutRequest() *shared.SourceS3PutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceS3PutRequest
+}
+
+func (o *PutSourceS3Request) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceS3Response struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceS3Response) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceS3Response) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceS3Response) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesalesforce.go b/internal/sdk/pkg/models/operations/putsourcesalesforce.go
old mode 100755
new mode 100644
index 047314162..e85548fb3
--- a/internal/sdk/pkg/models/operations/putsourcesalesforce.go
+++ b/internal/sdk/pkg/models/operations/putsourcesalesforce.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSalesforceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSalesforceRequest) GetSourceSalesforcePutRequest() *shared.SourceSalesforcePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSalesforcePutRequest
+}
+
+func (o *PutSourceSalesforceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSalesforceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSalesforceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSalesforceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSalesforceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesalesloft.go b/internal/sdk/pkg/models/operations/putsourcesalesloft.go
old mode 100755
new mode 100644
index 87d305c97..0fe3d7123
--- a/internal/sdk/pkg/models/operations/putsourcesalesloft.go
+++ b/internal/sdk/pkg/models/operations/putsourcesalesloft.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSalesloftRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSalesloftRequest) GetSourceSalesloftPutRequest() *shared.SourceSalesloftPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSalesloftPutRequest
+}
+
+func (o *PutSourceSalesloftRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSalesloftResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSalesloftResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSalesloftResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSalesloftResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesapfieldglass.go b/internal/sdk/pkg/models/operations/putsourcesapfieldglass.go
old mode 100755
new mode 100644
index ce1606bdf..ed9cbeb7a
--- a/internal/sdk/pkg/models/operations/putsourcesapfieldglass.go
+++ b/internal/sdk/pkg/models/operations/putsourcesapfieldglass.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSapFieldglassRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSapFieldglassRequest) GetSourceSapFieldglassPutRequest() *shared.SourceSapFieldglassPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSapFieldglassPutRequest
+}
+
+func (o *PutSourceSapFieldglassRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSapFieldglassResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSapFieldglassResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSapFieldglassResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSapFieldglassResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesecoda.go b/internal/sdk/pkg/models/operations/putsourcesecoda.go
old mode 100755
new mode 100644
index 37b7b0547..f60e5a7eb
--- a/internal/sdk/pkg/models/operations/putsourcesecoda.go
+++ b/internal/sdk/pkg/models/operations/putsourcesecoda.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSecodaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSecodaRequest) GetSourceSecodaPutRequest() *shared.SourceSecodaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSecodaPutRequest
+}
+
+func (o *PutSourceSecodaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSecodaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSecodaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSecodaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSecodaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesendgrid.go b/internal/sdk/pkg/models/operations/putsourcesendgrid.go
old mode 100755
new mode 100644
index af3571be8..329ad80ec
--- a/internal/sdk/pkg/models/operations/putsourcesendgrid.go
+++ b/internal/sdk/pkg/models/operations/putsourcesendgrid.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSendgridRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSendgridRequest) GetSourceSendgridPutRequest() *shared.SourceSendgridPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSendgridPutRequest
+}
+
+func (o *PutSourceSendgridRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSendgridResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSendgridResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSendgridResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSendgridResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesendinblue.go b/internal/sdk/pkg/models/operations/putsourcesendinblue.go
old mode 100755
new mode 100644
index 74963f352..1ffb65471
--- a/internal/sdk/pkg/models/operations/putsourcesendinblue.go
+++ b/internal/sdk/pkg/models/operations/putsourcesendinblue.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSendinblueRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSendinblueRequest) GetSourceSendinbluePutRequest() *shared.SourceSendinbluePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSendinbluePutRequest
+}
+
+func (o *PutSourceSendinblueRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSendinblueResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSendinblueResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSendinblueResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSendinblueResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesenseforce.go b/internal/sdk/pkg/models/operations/putsourcesenseforce.go
old mode 100755
new mode 100644
index 20c05ec1f..86bac267b
--- a/internal/sdk/pkg/models/operations/putsourcesenseforce.go
+++ b/internal/sdk/pkg/models/operations/putsourcesenseforce.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSenseforceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSenseforceRequest) GetSourceSenseforcePutRequest() *shared.SourceSenseforcePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSenseforcePutRequest
+}
+
+func (o *PutSourceSenseforceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSenseforceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSenseforceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSenseforceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSenseforceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesentry.go b/internal/sdk/pkg/models/operations/putsourcesentry.go
old mode 100755
new mode 100644
index ebe57e962..1c6f91a1f
--- a/internal/sdk/pkg/models/operations/putsourcesentry.go
+++ b/internal/sdk/pkg/models/operations/putsourcesentry.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSentryRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSentryRequest) GetSourceSentryPutRequest() *shared.SourceSentryPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSentryPutRequest
+}
+
+func (o *PutSourceSentryRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSentryResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSentryResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSentryResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSentryResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesftp.go b/internal/sdk/pkg/models/operations/putsourcesftp.go
old mode 100755
new mode 100644
index 7f4af3837..fcdc69245
--- a/internal/sdk/pkg/models/operations/putsourcesftp.go
+++ b/internal/sdk/pkg/models/operations/putsourcesftp.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSftpRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSftpRequest) GetSourceSftpPutRequest() *shared.SourceSftpPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSftpPutRequest
+}
+
+func (o *PutSourceSftpRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSftpResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSftpResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSftpResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSftpResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesftpbulk.go b/internal/sdk/pkg/models/operations/putsourcesftpbulk.go
old mode 100755
new mode 100644
index e8eac5608..1cff21960
--- a/internal/sdk/pkg/models/operations/putsourcesftpbulk.go
+++ b/internal/sdk/pkg/models/operations/putsourcesftpbulk.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSftpBulkRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSftpBulkRequest) GetSourceSftpBulkPutRequest() *shared.SourceSftpBulkPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSftpBulkPutRequest
+}
+
+func (o *PutSourceSftpBulkRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSftpBulkResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSftpBulkResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSftpBulkResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSftpBulkResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceshopify.go b/internal/sdk/pkg/models/operations/putsourceshopify.go
old mode 100755
new mode 100644
index e0911192f..df29c7cd0
--- a/internal/sdk/pkg/models/operations/putsourceshopify.go
+++ b/internal/sdk/pkg/models/operations/putsourceshopify.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceShopifyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceShopifyRequest) GetSourceShopifyPutRequest() *shared.SourceShopifyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceShopifyPutRequest
+}
+
+func (o *PutSourceShopifyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceShopifyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceShopifyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceShopifyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceShopifyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceshortio.go b/internal/sdk/pkg/models/operations/putsourceshortio.go
old mode 100755
new mode 100644
index 2e7b54860..eeff461b6
--- a/internal/sdk/pkg/models/operations/putsourceshortio.go
+++ b/internal/sdk/pkg/models/operations/putsourceshortio.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceShortioRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceShortioRequest) GetSourceShortioPutRequest() *shared.SourceShortioPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceShortioPutRequest
+}
+
+func (o *PutSourceShortioRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceShortioResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceShortioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceShortioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceShortioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceslack.go b/internal/sdk/pkg/models/operations/putsourceslack.go
old mode 100755
new mode 100644
index 0b01e6468..7b86c250b
--- a/internal/sdk/pkg/models/operations/putsourceslack.go
+++ b/internal/sdk/pkg/models/operations/putsourceslack.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSlackRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSlackRequest) GetSourceSlackPutRequest() *shared.SourceSlackPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSlackPutRequest
+}
+
+func (o *PutSourceSlackRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSlackResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSlackResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSlackResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSlackResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesmaily.go b/internal/sdk/pkg/models/operations/putsourcesmaily.go
old mode 100755
new mode 100644
index 116907f12..8a43cb351
--- a/internal/sdk/pkg/models/operations/putsourcesmaily.go
+++ b/internal/sdk/pkg/models/operations/putsourcesmaily.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSmailyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSmailyRequest) GetSourceSmailyPutRequest() *shared.SourceSmailyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSmailyPutRequest
+}
+
+func (o *PutSourceSmailyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSmailyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSmailyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSmailyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSmailyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesmartengage.go b/internal/sdk/pkg/models/operations/putsourcesmartengage.go
old mode 100755
new mode 100644
index 8cb744fd8..c064e4c6c
--- a/internal/sdk/pkg/models/operations/putsourcesmartengage.go
+++ b/internal/sdk/pkg/models/operations/putsourcesmartengage.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSmartengageRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSmartengageRequest) GetSourceSmartengagePutRequest() *shared.SourceSmartengagePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSmartengagePutRequest
+}
+
+func (o *PutSourceSmartengageRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSmartengageResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSmartengageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSmartengageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSmartengageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesmartsheets.go b/internal/sdk/pkg/models/operations/putsourcesmartsheets.go
old mode 100755
new mode 100644
index 93cf5adbb..a68b71e41
--- a/internal/sdk/pkg/models/operations/putsourcesmartsheets.go
+++ b/internal/sdk/pkg/models/operations/putsourcesmartsheets.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSmartsheetsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSmartsheetsRequest) GetSourceSmartsheetsPutRequest() *shared.SourceSmartsheetsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSmartsheetsPutRequest
+}
+
+func (o *PutSourceSmartsheetsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSmartsheetsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSmartsheetsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSmartsheetsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSmartsheetsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesnapchatmarketing.go b/internal/sdk/pkg/models/operations/putsourcesnapchatmarketing.go
old mode 100755
new mode 100644
index 236fa63a0..8f938e9d8
--- a/internal/sdk/pkg/models/operations/putsourcesnapchatmarketing.go
+++ b/internal/sdk/pkg/models/operations/putsourcesnapchatmarketing.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSnapchatMarketingRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSnapchatMarketingRequest) GetSourceSnapchatMarketingPutRequest() *shared.SourceSnapchatMarketingPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSnapchatMarketingPutRequest
+}
+
+func (o *PutSourceSnapchatMarketingRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSnapchatMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSnapchatMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSnapchatMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSnapchatMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesnowflake.go b/internal/sdk/pkg/models/operations/putsourcesnowflake.go
old mode 100755
new mode 100644
index ed2e46e56..739de7e3d
--- a/internal/sdk/pkg/models/operations/putsourcesnowflake.go
+++ b/internal/sdk/pkg/models/operations/putsourcesnowflake.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSnowflakeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSnowflakeRequest) GetSourceSnowflakePutRequest() *shared.SourceSnowflakePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSnowflakePutRequest
+}
+
+func (o *PutSourceSnowflakeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSnowflakeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSnowflakeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSnowflakeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSnowflakeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesonarcloud.go b/internal/sdk/pkg/models/operations/putsourcesonarcloud.go
old mode 100755
new mode 100644
index 0109056d9..364234a59
--- a/internal/sdk/pkg/models/operations/putsourcesonarcloud.go
+++ b/internal/sdk/pkg/models/operations/putsourcesonarcloud.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSonarCloudRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSonarCloudRequest) GetSourceSonarCloudPutRequest() *shared.SourceSonarCloudPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSonarCloudPutRequest
+}
+
+func (o *PutSourceSonarCloudRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSonarCloudResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSonarCloudResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSonarCloudResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSonarCloudResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcespacexapi.go b/internal/sdk/pkg/models/operations/putsourcespacexapi.go
old mode 100755
new mode 100644
index 7b89083a4..0c0a1d3ef
--- a/internal/sdk/pkg/models/operations/putsourcespacexapi.go
+++ b/internal/sdk/pkg/models/operations/putsourcespacexapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSpacexAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSpacexAPIRequest) GetSourceSpacexAPIPutRequest() *shared.SourceSpacexAPIPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSpacexAPIPutRequest
+}
+
+func (o *PutSourceSpacexAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSpacexAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSpacexAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSpacexAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSpacexAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesquare.go b/internal/sdk/pkg/models/operations/putsourcesquare.go
old mode 100755
new mode 100644
index 492135231..7890e6483
--- a/internal/sdk/pkg/models/operations/putsourcesquare.go
+++ b/internal/sdk/pkg/models/operations/putsourcesquare.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSquareRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSquareRequest) GetSourceSquarePutRequest() *shared.SourceSquarePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSquarePutRequest
+}
+
+func (o *PutSourceSquareRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSquareResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSquareResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSquareResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSquareResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcestrava.go b/internal/sdk/pkg/models/operations/putsourcestrava.go
old mode 100755
new mode 100644
index 50f05fbe7..fe9ddb95e
--- a/internal/sdk/pkg/models/operations/putsourcestrava.go
+++ b/internal/sdk/pkg/models/operations/putsourcestrava.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceStravaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceStravaRequest) GetSourceStravaPutRequest() *shared.SourceStravaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceStravaPutRequest
+}
+
+func (o *PutSourceStravaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceStravaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceStravaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceStravaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceStravaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcestripe.go b/internal/sdk/pkg/models/operations/putsourcestripe.go
old mode 100755
new mode 100644
index 746e1b753..805137ebc
--- a/internal/sdk/pkg/models/operations/putsourcestripe.go
+++ b/internal/sdk/pkg/models/operations/putsourcestripe.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceStripeRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceStripeRequest) GetSourceStripePutRequest() *shared.SourceStripePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceStripePutRequest
+}
+
+func (o *PutSourceStripeRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceStripeResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceStripeResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceStripeResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceStripeResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesurveymonkey.go b/internal/sdk/pkg/models/operations/putsourcesurveymonkey.go
old mode 100755
new mode 100644
index 949eef39d..c072d27da
--- a/internal/sdk/pkg/models/operations/putsourcesurveymonkey.go
+++ b/internal/sdk/pkg/models/operations/putsourcesurveymonkey.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSurveymonkeyRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSurveymonkeyRequest) GetSourceSurveymonkeyPutRequest() *shared.SourceSurveymonkeyPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSurveymonkeyPutRequest
+}
+
+func (o *PutSourceSurveymonkeyRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSurveymonkeyResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSurveymonkeyResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSurveymonkeyResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSurveymonkeyResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcesurveysparrow.go b/internal/sdk/pkg/models/operations/putsourcesurveysparrow.go
old mode 100755
new mode 100644
index ef6c070c9..cb07b885b
--- a/internal/sdk/pkg/models/operations/putsourcesurveysparrow.go
+++ b/internal/sdk/pkg/models/operations/putsourcesurveysparrow.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceSurveySparrowRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceSurveySparrowRequest) GetSourceSurveySparrowPutRequest() *shared.SourceSurveySparrowPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceSurveySparrowPutRequest
+}
+
+func (o *PutSourceSurveySparrowRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceSurveySparrowResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceSurveySparrowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceSurveySparrowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceSurveySparrowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetempo.go b/internal/sdk/pkg/models/operations/putsourcetempo.go
old mode 100755
new mode 100644
index e86ba8cd6..c7894370e
--- a/internal/sdk/pkg/models/operations/putsourcetempo.go
+++ b/internal/sdk/pkg/models/operations/putsourcetempo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTempoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTempoRequest) GetSourceTempoPutRequest() *shared.SourceTempoPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTempoPutRequest
+}
+
+func (o *PutSourceTempoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTempoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTempoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTempoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTempoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetheguardianapi.go b/internal/sdk/pkg/models/operations/putsourcetheguardianapi.go
old mode 100755
new mode 100644
index b3f16e40e..38df86484
--- a/internal/sdk/pkg/models/operations/putsourcetheguardianapi.go
+++ b/internal/sdk/pkg/models/operations/putsourcetheguardianapi.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTheGuardianAPIRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTheGuardianAPIRequest) GetSourceTheGuardianAPIPutRequest() *shared.SourceTheGuardianAPIPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTheGuardianAPIPutRequest
+}
+
+func (o *PutSourceTheGuardianAPIRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTheGuardianAPIResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTheGuardianAPIResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTheGuardianAPIResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTheGuardianAPIResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetiktokmarketing.go b/internal/sdk/pkg/models/operations/putsourcetiktokmarketing.go
old mode 100755
new mode 100644
index 38eab168b..ad67b4deb
--- a/internal/sdk/pkg/models/operations/putsourcetiktokmarketing.go
+++ b/internal/sdk/pkg/models/operations/putsourcetiktokmarketing.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTiktokMarketingRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTiktokMarketingRequest) GetSourceTiktokMarketingPutRequest() *shared.SourceTiktokMarketingPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTiktokMarketingPutRequest
+}
+
+func (o *PutSourceTiktokMarketingRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTiktokMarketingResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTiktokMarketingResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTiktokMarketingResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTiktokMarketingResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetodoist.go b/internal/sdk/pkg/models/operations/putsourcetodoist.go
old mode 100755
new mode 100644
index 6d52a889c..10d080238
--- a/internal/sdk/pkg/models/operations/putsourcetodoist.go
+++ b/internal/sdk/pkg/models/operations/putsourcetodoist.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTodoistRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTodoistRequest) GetSourceTodoistPutRequest() *shared.SourceTodoistPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTodoistPutRequest
+}
+
+func (o *PutSourceTodoistRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTodoistResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTodoistResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTodoistResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTodoistResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetrello.go b/internal/sdk/pkg/models/operations/putsourcetrello.go
old mode 100755
new mode 100644
index e2ecf5245..bc115be6d
--- a/internal/sdk/pkg/models/operations/putsourcetrello.go
+++ b/internal/sdk/pkg/models/operations/putsourcetrello.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTrelloRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTrelloRequest) GetSourceTrelloPutRequest() *shared.SourceTrelloPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTrelloPutRequest
+}
+
+func (o *PutSourceTrelloRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTrelloResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTrelloResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTrelloResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTrelloResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetrustpilot.go b/internal/sdk/pkg/models/operations/putsourcetrustpilot.go
old mode 100755
new mode 100644
index 5e4ab2f86..f3aae6821
--- a/internal/sdk/pkg/models/operations/putsourcetrustpilot.go
+++ b/internal/sdk/pkg/models/operations/putsourcetrustpilot.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTrustpilotRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTrustpilotRequest) GetSourceTrustpilotPutRequest() *shared.SourceTrustpilotPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTrustpilotPutRequest
+}
+
+func (o *PutSourceTrustpilotRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTrustpilotResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTrustpilotResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTrustpilotResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTrustpilotResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetvmazeschedule.go b/internal/sdk/pkg/models/operations/putsourcetvmazeschedule.go
old mode 100755
new mode 100644
index 609e4b4ea..660293e8a
--- a/internal/sdk/pkg/models/operations/putsourcetvmazeschedule.go
+++ b/internal/sdk/pkg/models/operations/putsourcetvmazeschedule.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTvmazeScheduleRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTvmazeScheduleRequest) GetSourceTvmazeSchedulePutRequest() *shared.SourceTvmazeSchedulePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTvmazeSchedulePutRequest
+}
+
+func (o *PutSourceTvmazeScheduleRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTvmazeScheduleResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTvmazeScheduleResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTvmazeScheduleResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTvmazeScheduleResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetwilio.go b/internal/sdk/pkg/models/operations/putsourcetwilio.go
old mode 100755
new mode 100644
index f00b2345d..da5baaf5c
--- a/internal/sdk/pkg/models/operations/putsourcetwilio.go
+++ b/internal/sdk/pkg/models/operations/putsourcetwilio.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTwilioRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTwilioRequest) GetSourceTwilioPutRequest() *shared.SourceTwilioPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTwilioPutRequest
+}
+
+func (o *PutSourceTwilioRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTwilioResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTwilioResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTwilioResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTwilioResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetwiliotaskrouter.go b/internal/sdk/pkg/models/operations/putsourcetwiliotaskrouter.go
old mode 100755
new mode 100644
index e0bd7d102..c2e11cbbe
--- a/internal/sdk/pkg/models/operations/putsourcetwiliotaskrouter.go
+++ b/internal/sdk/pkg/models/operations/putsourcetwiliotaskrouter.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTwilioTaskrouterRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTwilioTaskrouterRequest) GetSourceTwilioTaskrouterPutRequest() *shared.SourceTwilioTaskrouterPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTwilioTaskrouterPutRequest
+}
+
+func (o *PutSourceTwilioTaskrouterRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTwilioTaskrouterResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTwilioTaskrouterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTwilioTaskrouterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTwilioTaskrouterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetwitter.go b/internal/sdk/pkg/models/operations/putsourcetwitter.go
old mode 100755
new mode 100644
index e936ba3d0..31864e714
--- a/internal/sdk/pkg/models/operations/putsourcetwitter.go
+++ b/internal/sdk/pkg/models/operations/putsourcetwitter.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTwitterRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTwitterRequest) GetSourceTwitterPutRequest() *shared.SourceTwitterPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTwitterPutRequest
+}
+
+func (o *PutSourceTwitterRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTwitterResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTwitterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTwitterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTwitterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcetypeform.go b/internal/sdk/pkg/models/operations/putsourcetypeform.go
old mode 100755
new mode 100644
index f4b82f142..ea76b213f
--- a/internal/sdk/pkg/models/operations/putsourcetypeform.go
+++ b/internal/sdk/pkg/models/operations/putsourcetypeform.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceTypeformRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceTypeformRequest) GetSourceTypeformPutRequest() *shared.SourceTypeformPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceTypeformPutRequest
+}
+
+func (o *PutSourceTypeformRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceTypeformResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceTypeformResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceTypeformResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceTypeformResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceuscensus.go b/internal/sdk/pkg/models/operations/putsourceuscensus.go
old mode 100755
new mode 100644
index 9847a0232..dbabe373e
--- a/internal/sdk/pkg/models/operations/putsourceuscensus.go
+++ b/internal/sdk/pkg/models/operations/putsourceuscensus.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceUsCensusRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceUsCensusRequest) GetSourceUsCensusPutRequest() *shared.SourceUsCensusPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceUsCensusPutRequest
+}
+
+func (o *PutSourceUsCensusRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceUsCensusResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceUsCensusResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceUsCensusResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceUsCensusResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcevantage.go b/internal/sdk/pkg/models/operations/putsourcevantage.go
old mode 100755
new mode 100644
index 8353ad337..8b3920c79
--- a/internal/sdk/pkg/models/operations/putsourcevantage.go
+++ b/internal/sdk/pkg/models/operations/putsourcevantage.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceVantageRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceVantageRequest) GetSourceVantagePutRequest() *shared.SourceVantagePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceVantagePutRequest
+}
+
+func (o *PutSourceVantageRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceVantageResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceVantageResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceVantageResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceVantageResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcewebflow.go b/internal/sdk/pkg/models/operations/putsourcewebflow.go
old mode 100755
new mode 100644
index ccebcf385..efc5b581a
--- a/internal/sdk/pkg/models/operations/putsourcewebflow.go
+++ b/internal/sdk/pkg/models/operations/putsourcewebflow.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceWebflowRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceWebflowRequest) GetSourceWebflowPutRequest() *shared.SourceWebflowPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceWebflowPutRequest
+}
+
+func (o *PutSourceWebflowRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceWebflowResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceWebflowResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceWebflowResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceWebflowResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcewhiskyhunter.go b/internal/sdk/pkg/models/operations/putsourcewhiskyhunter.go
old mode 100755
new mode 100644
index 74257d007..c1df5b4ce
--- a/internal/sdk/pkg/models/operations/putsourcewhiskyhunter.go
+++ b/internal/sdk/pkg/models/operations/putsourcewhiskyhunter.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceWhiskyHunterRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceWhiskyHunterRequest) GetSourceWhiskyHunterPutRequest() *shared.SourceWhiskyHunterPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceWhiskyHunterPutRequest
+}
+
+func (o *PutSourceWhiskyHunterRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceWhiskyHunterResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceWhiskyHunterResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceWhiskyHunterResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceWhiskyHunterResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcewikipediapageviews.go b/internal/sdk/pkg/models/operations/putsourcewikipediapageviews.go
old mode 100755
new mode 100644
index 98da11650..4694b8797
--- a/internal/sdk/pkg/models/operations/putsourcewikipediapageviews.go
+++ b/internal/sdk/pkg/models/operations/putsourcewikipediapageviews.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceWikipediaPageviewsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceWikipediaPageviewsRequest) GetSourceWikipediaPageviewsPutRequest() *shared.SourceWikipediaPageviewsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceWikipediaPageviewsPutRequest
+}
+
+func (o *PutSourceWikipediaPageviewsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceWikipediaPageviewsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceWikipediaPageviewsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceWikipediaPageviewsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceWikipediaPageviewsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcewoocommerce.go b/internal/sdk/pkg/models/operations/putsourcewoocommerce.go
old mode 100755
new mode 100644
index 4464a24c0..b94472079
--- a/internal/sdk/pkg/models/operations/putsourcewoocommerce.go
+++ b/internal/sdk/pkg/models/operations/putsourcewoocommerce.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceWoocommerceRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceWoocommerceRequest) GetSourceWoocommercePutRequest() *shared.SourceWoocommercePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceWoocommercePutRequest
+}
+
+func (o *PutSourceWoocommerceRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceWoocommerceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceWoocommerceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceWoocommerceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceWoocommerceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcexero.go b/internal/sdk/pkg/models/operations/putsourcexero.go
deleted file mode 100755
index dd8fda6f3..000000000
--- a/internal/sdk/pkg/models/operations/putsourcexero.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type PutSourceXeroRequest struct {
- SourceXeroPutRequest *shared.SourceXeroPutRequest `request:"mediaType=application/json"`
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type PutSourceXeroResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/putsourcexkcd.go b/internal/sdk/pkg/models/operations/putsourcexkcd.go
old mode 100755
new mode 100644
index 70f250114..a69ea9f3d
--- a/internal/sdk/pkg/models/operations/putsourcexkcd.go
+++ b/internal/sdk/pkg/models/operations/putsourcexkcd.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceXkcdRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceXkcdRequest) GetSourceXkcdPutRequest() *shared.SourceXkcdPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceXkcdPutRequest
+}
+
+func (o *PutSourceXkcdRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceXkcdResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceXkcdResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceXkcdResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceXkcdResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceyandexmetrica.go b/internal/sdk/pkg/models/operations/putsourceyandexmetrica.go
old mode 100755
new mode 100644
index 9d24f3e8b..04dd3e092
--- a/internal/sdk/pkg/models/operations/putsourceyandexmetrica.go
+++ b/internal/sdk/pkg/models/operations/putsourceyandexmetrica.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceYandexMetricaRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceYandexMetricaRequest) GetSourceYandexMetricaPutRequest() *shared.SourceYandexMetricaPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceYandexMetricaPutRequest
+}
+
+func (o *PutSourceYandexMetricaRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceYandexMetricaResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceYandexMetricaResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceYandexMetricaResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceYandexMetricaResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceyotpo.go b/internal/sdk/pkg/models/operations/putsourceyotpo.go
old mode 100755
new mode 100644
index 092cde685..6f4220818
--- a/internal/sdk/pkg/models/operations/putsourceyotpo.go
+++ b/internal/sdk/pkg/models/operations/putsourceyotpo.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceYotpoRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceYotpoRequest) GetSourceYotpoPutRequest() *shared.SourceYotpoPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceYotpoPutRequest
+}
+
+func (o *PutSourceYotpoRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceYotpoResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceYotpoResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceYotpoResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceYotpoResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourceyounium.go b/internal/sdk/pkg/models/operations/putsourceyounium.go
deleted file mode 100755
index cc8dff2f7..000000000
--- a/internal/sdk/pkg/models/operations/putsourceyounium.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package operations
-
-import (
- "airbyte/internal/sdk/pkg/models/shared"
- "net/http"
-)
-
-type PutSourceYouniumRequest struct {
- SourceYouniumPutRequest *shared.SourceYouniumPutRequest `request:"mediaType=application/json"`
- SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
-}
-
-type PutSourceYouniumResponse struct {
- ContentType string
- StatusCode int
- RawResponse *http.Response
-}
diff --git a/internal/sdk/pkg/models/operations/putsourceyoutubeanalytics.go b/internal/sdk/pkg/models/operations/putsourceyoutubeanalytics.go
old mode 100755
new mode 100644
index fd5078ad1..fd9575f38
--- a/internal/sdk/pkg/models/operations/putsourceyoutubeanalytics.go
+++ b/internal/sdk/pkg/models/operations/putsourceyoutubeanalytics.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceYoutubeAnalyticsRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceYoutubeAnalyticsRequest) GetSourceYoutubeAnalyticsPutRequest() *shared.SourceYoutubeAnalyticsPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceYoutubeAnalyticsPutRequest
+}
+
+func (o *PutSourceYoutubeAnalyticsRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceYoutubeAnalyticsResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceYoutubeAnalyticsResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceYoutubeAnalyticsResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceYoutubeAnalyticsResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcezendeskchat.go b/internal/sdk/pkg/models/operations/putsourcezendeskchat.go
old mode 100755
new mode 100644
index 3f8c1dde1..e27a05568
--- a/internal/sdk/pkg/models/operations/putsourcezendeskchat.go
+++ b/internal/sdk/pkg/models/operations/putsourcezendeskchat.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceZendeskChatRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceZendeskChatRequest) GetSourceZendeskChatPutRequest() *shared.SourceZendeskChatPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceZendeskChatPutRequest
+}
+
+func (o *PutSourceZendeskChatRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceZendeskChatResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceZendeskChatResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceZendeskChatResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceZendeskChatResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcezendesksell.go b/internal/sdk/pkg/models/operations/putsourcezendesksell.go
new file mode 100644
index 000000000..a7ab10b04
--- /dev/null
+++ b/internal/sdk/pkg/models/operations/putsourcezendesksell.go
@@ -0,0 +1,57 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package operations
+
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "net/http"
+)
+
+type PutSourceZendeskSellRequest struct {
+ SourceZendeskSellPutRequest *shared.SourceZendeskSellPutRequest `request:"mediaType=application/json"`
+ SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
+}
+
+func (o *PutSourceZendeskSellRequest) GetSourceZendeskSellPutRequest() *shared.SourceZendeskSellPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceZendeskSellPutRequest
+}
+
+func (o *PutSourceZendeskSellRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+type PutSourceZendeskSellResponse struct {
+ // HTTP response content type for this operation
+ ContentType string
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
+ RawResponse *http.Response
+}
+
+func (o *PutSourceZendeskSellResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceZendeskSellResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceZendeskSellResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcezendesksunshine.go b/internal/sdk/pkg/models/operations/putsourcezendesksunshine.go
old mode 100755
new mode 100644
index adc1f6d60..0366eba64
--- a/internal/sdk/pkg/models/operations/putsourcezendesksunshine.go
+++ b/internal/sdk/pkg/models/operations/putsourcezendesksunshine.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceZendeskSunshineRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceZendeskSunshineRequest) GetSourceZendeskSunshinePutRequest() *shared.SourceZendeskSunshinePutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceZendeskSunshinePutRequest
+}
+
+func (o *PutSourceZendeskSunshineRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceZendeskSunshineResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceZendeskSunshineResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceZendeskSunshineResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceZendeskSunshineResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcezendesksupport.go b/internal/sdk/pkg/models/operations/putsourcezendesksupport.go
old mode 100755
new mode 100644
index d14d2c207..ac11305cb
--- a/internal/sdk/pkg/models/operations/putsourcezendesksupport.go
+++ b/internal/sdk/pkg/models/operations/putsourcezendesksupport.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceZendeskSupportRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceZendeskSupportRequest) GetSourceZendeskSupportPutRequest() *shared.SourceZendeskSupportPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceZendeskSupportPutRequest
+}
+
+func (o *PutSourceZendeskSupportRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceZendeskSupportResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceZendeskSupportResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceZendeskSupportResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceZendeskSupportResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcezendesktalk.go b/internal/sdk/pkg/models/operations/putsourcezendesktalk.go
old mode 100755
new mode 100644
index 592e949cf..d4b76c3f9
--- a/internal/sdk/pkg/models/operations/putsourcezendesktalk.go
+++ b/internal/sdk/pkg/models/operations/putsourcezendesktalk.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceZendeskTalkRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceZendeskTalkRequest) GetSourceZendeskTalkPutRequest() *shared.SourceZendeskTalkPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceZendeskTalkPutRequest
+}
+
+func (o *PutSourceZendeskTalkRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceZendeskTalkResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceZendeskTalkResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceZendeskTalkResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceZendeskTalkResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcezenloop.go b/internal/sdk/pkg/models/operations/putsourcezenloop.go
old mode 100755
new mode 100644
index 10c4cab0c..0265f0eaf
--- a/internal/sdk/pkg/models/operations/putsourcezenloop.go
+++ b/internal/sdk/pkg/models/operations/putsourcezenloop.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceZenloopRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceZenloopRequest) GetSourceZenloopPutRequest() *shared.SourceZenloopPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceZenloopPutRequest
+}
+
+func (o *PutSourceZenloopRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceZenloopResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceZenloopResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceZenloopResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceZenloopResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcezohocrm.go b/internal/sdk/pkg/models/operations/putsourcezohocrm.go
old mode 100755
new mode 100644
index 95f73325d..285fb24dd
--- a/internal/sdk/pkg/models/operations/putsourcezohocrm.go
+++ b/internal/sdk/pkg/models/operations/putsourcezohocrm.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceZohoCrmRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceZohoCrmRequest) GetSourceZohoCrmPutRequest() *shared.SourceZohoCrmPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceZohoCrmPutRequest
+}
+
+func (o *PutSourceZohoCrmRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceZohoCrmResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceZohoCrmResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceZohoCrmResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceZohoCrmResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcezoom.go b/internal/sdk/pkg/models/operations/putsourcezoom.go
old mode 100755
new mode 100644
index 1b33ba320..6299383d8
--- a/internal/sdk/pkg/models/operations/putsourcezoom.go
+++ b/internal/sdk/pkg/models/operations/putsourcezoom.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceZoomRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceZoomRequest) GetSourceZoomPutRequest() *shared.SourceZoomPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceZoomPutRequest
+}
+
+func (o *PutSourceZoomRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceZoomResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceZoomResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceZoomResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceZoomResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/putsourcezuora.go b/internal/sdk/pkg/models/operations/putsourcezuora.go
old mode 100755
new mode 100644
index f5bf8355d..1ffc5bfcf
--- a/internal/sdk/pkg/models/operations/putsourcezuora.go
+++ b/internal/sdk/pkg/models/operations/putsourcezuora.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,8 +12,46 @@ type PutSourceZuoraRequest struct {
SourceID string `pathParam:"style=simple,explode=false,name=sourceId"`
}
+func (o *PutSourceZuoraRequest) GetSourceZuoraPutRequest() *shared.SourceZuoraPutRequest {
+ if o == nil {
+ return nil
+ }
+ return o.SourceZuoraPutRequest
+}
+
+func (o *PutSourceZuoraRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
type PutSourceZuoraResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
}
+
+func (o *PutSourceZuoraResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *PutSourceZuoraResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *PutSourceZuoraResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
diff --git a/internal/sdk/pkg/models/operations/updateworkspace.go b/internal/sdk/pkg/models/operations/updateworkspace.go
old mode 100755
new mode 100644
index 363692dc4..50b535a13
--- a/internal/sdk/pkg/models/operations/updateworkspace.go
+++ b/internal/sdk/pkg/models/operations/updateworkspace.go
@@ -3,7 +3,7 @@
package operations
import (
- "airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
"net/http"
)
@@ -12,10 +12,55 @@ type UpdateWorkspaceRequest struct {
WorkspaceID string `pathParam:"style=simple,explode=false,name=workspaceId"`
}
+func (o *UpdateWorkspaceRequest) GetWorkspaceUpdateRequest() shared.WorkspaceUpdateRequest {
+ if o == nil {
+ return shared.WorkspaceUpdateRequest{}
+ }
+ return o.WorkspaceUpdateRequest
+}
+
+func (o *UpdateWorkspaceRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
+
type UpdateWorkspaceResponse struct {
+ // HTTP response content type for this operation
ContentType string
- StatusCode int
+ // HTTP response status code for this operation
+ StatusCode int
+ // Raw HTTP response; suitable for custom response parsing
RawResponse *http.Response
// Successful operation
WorkspaceResponse *shared.WorkspaceResponse
}
+
+func (o *UpdateWorkspaceResponse) GetContentType() string {
+ if o == nil {
+ return ""
+ }
+ return o.ContentType
+}
+
+func (o *UpdateWorkspaceResponse) GetStatusCode() int {
+ if o == nil {
+ return 0
+ }
+ return o.StatusCode
+}
+
+func (o *UpdateWorkspaceResponse) GetRawResponse() *http.Response {
+ if o == nil {
+ return nil
+ }
+ return o.RawResponse
+}
+
+func (o *UpdateWorkspaceResponse) GetWorkspaceResponse() *shared.WorkspaceResponse {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspaceResponse
+}
diff --git a/internal/sdk/pkg/models/sdkerrors/sdkerror.go b/internal/sdk/pkg/models/sdkerrors/sdkerror.go
new file mode 100644
index 000000000..5c1affd31
--- /dev/null
+++ b/internal/sdk/pkg/models/sdkerrors/sdkerror.go
@@ -0,0 +1,35 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package sdkerrors
+
+import (
+ "fmt"
+ "net/http"
+)
+
+type SDKError struct {
+ Message string
+ StatusCode int
+ Body string
+ RawResponse *http.Response
+}
+
+var _ error = &SDKError{}
+
+func NewSDKError(message string, statusCode int, body string, httpRes *http.Response) *SDKError {
+ return &SDKError{
+ Message: message,
+ StatusCode: statusCode,
+ Body: body,
+ RawResponse: httpRes,
+ }
+}
+
+func (e *SDKError) Error() string {
+ body := ""
+ if len(e.Body) > 0 {
+ body = fmt.Sprintf("\n%s", e.Body)
+ }
+
+ return fmt.Sprintf("%s: Status %d%s", e.Message, e.StatusCode, body)
+}
diff --git a/internal/sdk/pkg/models/shared/actortypeenum.go b/internal/sdk/pkg/models/shared/actortypeenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/connectioncreaterequest.go b/internal/sdk/pkg/models/shared/connectioncreaterequest.go
old mode 100755
new mode 100644
index f537dca7c..148f7f100
--- a/internal/sdk/pkg/models/shared/connectioncreaterequest.go
+++ b/internal/sdk/pkg/models/shared/connectioncreaterequest.go
@@ -2,19 +2,23 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type ConnectionCreateRequest struct {
// A list of configured stream options for a connection.
Configurations *StreamConfigurations `json:"configurations,omitempty"`
- DataResidency *GeographyEnum `json:"dataResidency,omitempty"`
+ DataResidency *GeographyEnum `default:"auto" json:"dataResidency"`
DestinationID string `json:"destinationId"`
// Optional name of the connection
Name *string `json:"name,omitempty"`
// Define the location where the data will be stored in the destination
- NamespaceDefinition *NamespaceDefinitionEnum `json:"namespaceDefinition,omitempty"`
+ NamespaceDefinition *NamespaceDefinitionEnum `default:"destination" json:"namespaceDefinition"`
// Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
- NamespaceFormat *string `json:"namespaceFormat,omitempty"`
+ NamespaceFormat *string `default:"null" json:"namespaceFormat"`
// Set how Airbyte handles syncs when it detects a non-breaking schema change in the source
- NonBreakingSchemaUpdatesBehavior *NonBreakingSchemaUpdatesBehaviorEnum `json:"nonBreakingSchemaUpdatesBehavior,omitempty"`
+ NonBreakingSchemaUpdatesBehavior *NonBreakingSchemaUpdatesBehaviorEnum `default:"ignore" json:"nonBreakingSchemaUpdatesBehavior"`
// Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”).
Prefix *string `json:"prefix,omitempty"`
// schedule for when the the connection should run, per the schedule type
@@ -22,3 +26,91 @@ type ConnectionCreateRequest struct {
SourceID string `json:"sourceId"`
Status *ConnectionStatusEnum `json:"status,omitempty"`
}
+
+func (c ConnectionCreateRequest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *ConnectionCreateRequest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ConnectionCreateRequest) GetConfigurations() *StreamConfigurations {
+ if o == nil {
+ return nil
+ }
+ return o.Configurations
+}
+
+func (o *ConnectionCreateRequest) GetDataResidency() *GeographyEnum {
+ if o == nil {
+ return nil
+ }
+ return o.DataResidency
+}
+
+func (o *ConnectionCreateRequest) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+func (o *ConnectionCreateRequest) GetName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Name
+}
+
+func (o *ConnectionCreateRequest) GetNamespaceDefinition() *NamespaceDefinitionEnum {
+ if o == nil {
+ return nil
+ }
+ return o.NamespaceDefinition
+}
+
+func (o *ConnectionCreateRequest) GetNamespaceFormat() *string {
+ if o == nil {
+ return nil
+ }
+ return o.NamespaceFormat
+}
+
+func (o *ConnectionCreateRequest) GetNonBreakingSchemaUpdatesBehavior() *NonBreakingSchemaUpdatesBehaviorEnum {
+ if o == nil {
+ return nil
+ }
+ return o.NonBreakingSchemaUpdatesBehavior
+}
+
+func (o *ConnectionCreateRequest) GetPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Prefix
+}
+
+func (o *ConnectionCreateRequest) GetSchedule() *ConnectionSchedule {
+ if o == nil {
+ return nil
+ }
+ return o.Schedule
+}
+
+func (o *ConnectionCreateRequest) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+func (o *ConnectionCreateRequest) GetStatus() *ConnectionStatusEnum {
+ if o == nil {
+ return nil
+ }
+ return o.Status
+}
diff --git a/internal/sdk/pkg/models/shared/connectionpatchrequest.go b/internal/sdk/pkg/models/shared/connectionpatchrequest.go
old mode 100755
new mode 100644
index bc188e13c..2072cff46
--- a/internal/sdk/pkg/models/shared/connectionpatchrequest.go
+++ b/internal/sdk/pkg/models/shared/connectionpatchrequest.go
@@ -2,6 +2,10 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type ConnectionPatchRequest struct {
// A list of configured stream options for a connection.
Configurations *StreamConfigurations `json:"configurations,omitempty"`
@@ -11,7 +15,7 @@ type ConnectionPatchRequest struct {
// Define the location where the data will be stored in the destination
NamespaceDefinition *NamespaceDefinitionEnumNoDefault `json:"namespaceDefinition,omitempty"`
// Used when namespaceDefinition is 'custom_format'. If blank then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" then behaves like namespaceDefinition = 'source'.
- NamespaceFormat *string `json:"namespaceFormat,omitempty"`
+ NamespaceFormat *string `default:"null" json:"namespaceFormat"`
// Set how Airbyte handles syncs when it detects a non-breaking schema change in the source
NonBreakingSchemaUpdatesBehavior *NonBreakingSchemaUpdatesBehaviorEnumNoDefault `json:"nonBreakingSchemaUpdatesBehavior,omitempty"`
// Prefix that will be prepended to the name of each stream when it is written to the destination (ex. “airbyte_” causes “projects” => “airbyte_projects”).
@@ -20,3 +24,77 @@ type ConnectionPatchRequest struct {
Schedule *ConnectionSchedule `json:"schedule,omitempty"`
Status *ConnectionStatusEnum `json:"status,omitempty"`
}
+
+func (c ConnectionPatchRequest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *ConnectionPatchRequest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ConnectionPatchRequest) GetConfigurations() *StreamConfigurations {
+ if o == nil {
+ return nil
+ }
+ return o.Configurations
+}
+
+func (o *ConnectionPatchRequest) GetDataResidency() *GeographyEnumNoDefault {
+ if o == nil {
+ return nil
+ }
+ return o.DataResidency
+}
+
+func (o *ConnectionPatchRequest) GetName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Name
+}
+
+func (o *ConnectionPatchRequest) GetNamespaceDefinition() *NamespaceDefinitionEnumNoDefault {
+ if o == nil {
+ return nil
+ }
+ return o.NamespaceDefinition
+}
+
+func (o *ConnectionPatchRequest) GetNamespaceFormat() *string {
+ if o == nil {
+ return nil
+ }
+ return o.NamespaceFormat
+}
+
+func (o *ConnectionPatchRequest) GetNonBreakingSchemaUpdatesBehavior() *NonBreakingSchemaUpdatesBehaviorEnumNoDefault {
+ if o == nil {
+ return nil
+ }
+ return o.NonBreakingSchemaUpdatesBehavior
+}
+
+func (o *ConnectionPatchRequest) GetPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Prefix
+}
+
+func (o *ConnectionPatchRequest) GetSchedule() *ConnectionSchedule {
+ if o == nil {
+ return nil
+ }
+ return o.Schedule
+}
+
+func (o *ConnectionPatchRequest) GetStatus() *ConnectionStatusEnum {
+ if o == nil {
+ return nil
+ }
+ return o.Status
+}
diff --git a/internal/sdk/pkg/models/shared/connectionresponse.go b/internal/sdk/pkg/models/shared/connectionresponse.go
old mode 100755
new mode 100644
index 7de532ae1..78a9ec13d
--- a/internal/sdk/pkg/models/shared/connectionresponse.go
+++ b/internal/sdk/pkg/models/shared/connectionresponse.go
@@ -2,19 +2,23 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
// ConnectionResponse - Provides details of a single connection.
type ConnectionResponse struct {
// A list of configured stream options for a connection.
Configurations StreamConfigurations `json:"configurations"`
ConnectionID string `json:"connectionId"`
- DataResidency GeographyEnum `json:"dataResidency"`
+ DataResidency *GeographyEnum `default:"auto" json:"dataResidency"`
DestinationID string `json:"destinationId"`
Name string `json:"name"`
// Define the location where the data will be stored in the destination
- NamespaceDefinition *NamespaceDefinitionEnum `json:"namespaceDefinition,omitempty"`
+ NamespaceDefinition *NamespaceDefinitionEnum `default:"destination" json:"namespaceDefinition"`
NamespaceFormat *string `json:"namespaceFormat,omitempty"`
// Set how Airbyte handles syncs when it detects a non-breaking schema change in the source
- NonBreakingSchemaUpdatesBehavior *NonBreakingSchemaUpdatesBehaviorEnum `json:"nonBreakingSchemaUpdatesBehavior,omitempty"`
+ NonBreakingSchemaUpdatesBehavior *NonBreakingSchemaUpdatesBehaviorEnum `default:"ignore" json:"nonBreakingSchemaUpdatesBehavior"`
Prefix *string `json:"prefix,omitempty"`
// schedule for when the the connection should run, per the schedule type
Schedule ConnectionScheduleResponse `json:"schedule"`
@@ -22,3 +26,105 @@ type ConnectionResponse struct {
Status ConnectionStatusEnum `json:"status"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (c ConnectionResponse) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *ConnectionResponse) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ConnectionResponse) GetConfigurations() StreamConfigurations {
+ if o == nil {
+ return StreamConfigurations{}
+ }
+ return o.Configurations
+}
+
+func (o *ConnectionResponse) GetConnectionID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionID
+}
+
+func (o *ConnectionResponse) GetDataResidency() *GeographyEnum {
+ if o == nil {
+ return nil
+ }
+ return o.DataResidency
+}
+
+func (o *ConnectionResponse) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+func (o *ConnectionResponse) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *ConnectionResponse) GetNamespaceDefinition() *NamespaceDefinitionEnum {
+ if o == nil {
+ return nil
+ }
+ return o.NamespaceDefinition
+}
+
+func (o *ConnectionResponse) GetNamespaceFormat() *string {
+ if o == nil {
+ return nil
+ }
+ return o.NamespaceFormat
+}
+
+func (o *ConnectionResponse) GetNonBreakingSchemaUpdatesBehavior() *NonBreakingSchemaUpdatesBehaviorEnum {
+ if o == nil {
+ return nil
+ }
+ return o.NonBreakingSchemaUpdatesBehavior
+}
+
+func (o *ConnectionResponse) GetPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Prefix
+}
+
+func (o *ConnectionResponse) GetSchedule() ConnectionScheduleResponse {
+ if o == nil {
+ return ConnectionScheduleResponse{}
+ }
+ return o.Schedule
+}
+
+func (o *ConnectionResponse) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+func (o *ConnectionResponse) GetStatus() ConnectionStatusEnum {
+ if o == nil {
+ return ConnectionStatusEnum("")
+ }
+ return o.Status
+}
+
+func (o *ConnectionResponse) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/connectionschedule.go b/internal/sdk/pkg/models/shared/connectionschedule.go
old mode 100755
new mode 100644
index 947c9c29c..3fd85bf70
--- a/internal/sdk/pkg/models/shared/connectionschedule.go
+++ b/internal/sdk/pkg/models/shared/connectionschedule.go
@@ -7,3 +7,17 @@ type ConnectionSchedule struct {
CronExpression *string `json:"cronExpression,omitempty"`
ScheduleType ScheduleTypeEnum `json:"scheduleType"`
}
+
+func (o *ConnectionSchedule) GetCronExpression() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CronExpression
+}
+
+func (o *ConnectionSchedule) GetScheduleType() ScheduleTypeEnum {
+ if o == nil {
+ return ScheduleTypeEnum("")
+ }
+ return o.ScheduleType
+}
diff --git a/internal/sdk/pkg/models/shared/connectionscheduleresponse.go b/internal/sdk/pkg/models/shared/connectionscheduleresponse.go
old mode 100755
new mode 100644
index 5481d5813..837507960
--- a/internal/sdk/pkg/models/shared/connectionscheduleresponse.go
+++ b/internal/sdk/pkg/models/shared/connectionscheduleresponse.go
@@ -8,3 +8,24 @@ type ConnectionScheduleResponse struct {
CronExpression *string `json:"cronExpression,omitempty"`
ScheduleType ScheduleTypeWithBasicEnum `json:"scheduleType"`
}
+
+func (o *ConnectionScheduleResponse) GetBasicTiming() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BasicTiming
+}
+
+func (o *ConnectionScheduleResponse) GetCronExpression() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CronExpression
+}
+
+func (o *ConnectionScheduleResponse) GetScheduleType() ScheduleTypeWithBasicEnum {
+ if o == nil {
+ return ScheduleTypeWithBasicEnum("")
+ }
+ return o.ScheduleType
+}
diff --git a/internal/sdk/pkg/models/shared/connectionsresponse.go b/internal/sdk/pkg/models/shared/connectionsresponse.go
old mode 100755
new mode 100644
index b5fe04eec..718a0b2fe
--- a/internal/sdk/pkg/models/shared/connectionsresponse.go
+++ b/internal/sdk/pkg/models/shared/connectionsresponse.go
@@ -7,3 +7,24 @@ type ConnectionsResponse struct {
Next *string `json:"next,omitempty"`
Previous *string `json:"previous,omitempty"`
}
+
+func (o *ConnectionsResponse) GetData() []ConnectionResponse {
+ if o == nil {
+ return []ConnectionResponse{}
+ }
+ return o.Data
+}
+
+func (o *ConnectionsResponse) GetNext() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Next
+}
+
+func (o *ConnectionsResponse) GetPrevious() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Previous
+}
diff --git a/internal/sdk/pkg/models/shared/connectionstatusenum.go b/internal/sdk/pkg/models/shared/connectionstatusenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/connectionsyncmodeenum.go b/internal/sdk/pkg/models/shared/connectionsyncmodeenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/destinationawsdatalake.go b/internal/sdk/pkg/models/shared/destinationawsdatalake.go
old mode 100755
new mode 100644
index c2769fc33..a9a16d312
--- a/internal/sdk/pkg/models/shared/destinationawsdatalake.go
+++ b/internal/sdk/pkg/models/shared/destinationawsdatalake.go
@@ -3,130 +3,176 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle - Name of the credentials
-type DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle string
+// DestinationAwsDatalakeSchemasCredentialsTitle - Name of the credentials
+type DestinationAwsDatalakeSchemasCredentialsTitle string
const (
- DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitleIamUser DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle = "IAM User"
+ DestinationAwsDatalakeSchemasCredentialsTitleIamUser DestinationAwsDatalakeSchemasCredentialsTitle = "IAM User"
)
-func (e DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle) ToPointer() *DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle {
+func (e DestinationAwsDatalakeSchemasCredentialsTitle) ToPointer() *DestinationAwsDatalakeSchemasCredentialsTitle {
return &e
}
-func (e *DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *DestinationAwsDatalakeSchemasCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "IAM User":
- *e = DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle(v)
+ *e = DestinationAwsDatalakeSchemasCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for DestinationAwsDatalakeSchemasCredentialsTitle: %v", v)
}
}
-// DestinationAwsDatalakeAuthenticationModeIAMUser - Choose How to Authenticate to AWS.
-type DestinationAwsDatalakeAuthenticationModeIAMUser struct {
+// DestinationAwsDatalakeIAMUser - Choose How to Authenticate to AWS.
+type DestinationAwsDatalakeIAMUser struct {
// AWS User Access Key Id
AwsAccessKeyID string `json:"aws_access_key_id"`
// Secret Access Key
AwsSecretAccessKey string `json:"aws_secret_access_key"`
// Name of the credentials
- CredentialsTitle DestinationAwsDatalakeAuthenticationModeIAMUserCredentialsTitle `json:"credentials_title"`
+ credentialsTitle *DestinationAwsDatalakeSchemasCredentialsTitle `const:"IAM User" json:"credentials_title"`
}
-// DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle - Name of the credentials
-type DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle string
+func (d DestinationAwsDatalakeIAMUser) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAwsDatalakeIAMUser) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAwsDatalakeIAMUser) GetAwsAccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsAccessKeyID
+}
+
+func (o *DestinationAwsDatalakeIAMUser) GetAwsSecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsSecretAccessKey
+}
+
+func (o *DestinationAwsDatalakeIAMUser) GetCredentialsTitle() *DestinationAwsDatalakeSchemasCredentialsTitle {
+ return DestinationAwsDatalakeSchemasCredentialsTitleIamUser.ToPointer()
+}
+
+// DestinationAwsDatalakeCredentialsTitle - Name of the credentials
+type DestinationAwsDatalakeCredentialsTitle string
const (
- DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitleIamRole DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle = "IAM Role"
+ DestinationAwsDatalakeCredentialsTitleIamRole DestinationAwsDatalakeCredentialsTitle = "IAM Role"
)
-func (e DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle) ToPointer() *DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle {
+func (e DestinationAwsDatalakeCredentialsTitle) ToPointer() *DestinationAwsDatalakeCredentialsTitle {
return &e
}
-func (e *DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *DestinationAwsDatalakeCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "IAM Role":
- *e = DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle(v)
+ *e = DestinationAwsDatalakeCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for DestinationAwsDatalakeCredentialsTitle: %v", v)
}
}
-// DestinationAwsDatalakeAuthenticationModeIAMRole - Choose How to Authenticate to AWS.
-type DestinationAwsDatalakeAuthenticationModeIAMRole struct {
+// DestinationAwsDatalakeIAMRole - Choose How to Authenticate to AWS.
+type DestinationAwsDatalakeIAMRole struct {
// Name of the credentials
- CredentialsTitle DestinationAwsDatalakeAuthenticationModeIAMRoleCredentialsTitle `json:"credentials_title"`
+ credentialsTitle *DestinationAwsDatalakeCredentialsTitle `const:"IAM Role" json:"credentials_title"`
// Will assume this role to write data to s3
RoleArn string `json:"role_arn"`
}
+func (d DestinationAwsDatalakeIAMRole) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAwsDatalakeIAMRole) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAwsDatalakeIAMRole) GetCredentialsTitle() *DestinationAwsDatalakeCredentialsTitle {
+ return DestinationAwsDatalakeCredentialsTitleIamRole.ToPointer()
+}
+
+func (o *DestinationAwsDatalakeIAMRole) GetRoleArn() string {
+ if o == nil {
+ return ""
+ }
+ return o.RoleArn
+}
+
type DestinationAwsDatalakeAuthenticationModeType string
const (
- DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeAuthenticationModeIAMRole DestinationAwsDatalakeAuthenticationModeType = "destination-aws-datalake_Authentication mode_IAM Role"
- DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeAuthenticationModeIAMUser DestinationAwsDatalakeAuthenticationModeType = "destination-aws-datalake_Authentication mode_IAM User"
+ DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeIAMRole DestinationAwsDatalakeAuthenticationModeType = "destination-aws-datalake_IAM Role"
+ DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeIAMUser DestinationAwsDatalakeAuthenticationModeType = "destination-aws-datalake_IAM User"
)
type DestinationAwsDatalakeAuthenticationMode struct {
- DestinationAwsDatalakeAuthenticationModeIAMRole *DestinationAwsDatalakeAuthenticationModeIAMRole
- DestinationAwsDatalakeAuthenticationModeIAMUser *DestinationAwsDatalakeAuthenticationModeIAMUser
+ DestinationAwsDatalakeIAMRole *DestinationAwsDatalakeIAMRole
+ DestinationAwsDatalakeIAMUser *DestinationAwsDatalakeIAMUser
Type DestinationAwsDatalakeAuthenticationModeType
}
-func CreateDestinationAwsDatalakeAuthenticationModeDestinationAwsDatalakeAuthenticationModeIAMRole(destinationAwsDatalakeAuthenticationModeIAMRole DestinationAwsDatalakeAuthenticationModeIAMRole) DestinationAwsDatalakeAuthenticationMode {
- typ := DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeAuthenticationModeIAMRole
+func CreateDestinationAwsDatalakeAuthenticationModeDestinationAwsDatalakeIAMRole(destinationAwsDatalakeIAMRole DestinationAwsDatalakeIAMRole) DestinationAwsDatalakeAuthenticationMode {
+ typ := DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeIAMRole
return DestinationAwsDatalakeAuthenticationMode{
- DestinationAwsDatalakeAuthenticationModeIAMRole: &destinationAwsDatalakeAuthenticationModeIAMRole,
- Type: typ,
+ DestinationAwsDatalakeIAMRole: &destinationAwsDatalakeIAMRole,
+ Type: typ,
}
}
-func CreateDestinationAwsDatalakeAuthenticationModeDestinationAwsDatalakeAuthenticationModeIAMUser(destinationAwsDatalakeAuthenticationModeIAMUser DestinationAwsDatalakeAuthenticationModeIAMUser) DestinationAwsDatalakeAuthenticationMode {
- typ := DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeAuthenticationModeIAMUser
+func CreateDestinationAwsDatalakeAuthenticationModeDestinationAwsDatalakeIAMUser(destinationAwsDatalakeIAMUser DestinationAwsDatalakeIAMUser) DestinationAwsDatalakeAuthenticationMode {
+ typ := DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeIAMUser
return DestinationAwsDatalakeAuthenticationMode{
- DestinationAwsDatalakeAuthenticationModeIAMUser: &destinationAwsDatalakeAuthenticationModeIAMUser,
- Type: typ,
+ DestinationAwsDatalakeIAMUser: &destinationAwsDatalakeIAMUser,
+ Type: typ,
}
}
func (u *DestinationAwsDatalakeAuthenticationMode) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationAwsDatalakeAuthenticationModeIAMRole := new(DestinationAwsDatalakeAuthenticationModeIAMRole)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAwsDatalakeAuthenticationModeIAMRole); err == nil {
- u.DestinationAwsDatalakeAuthenticationModeIAMRole = destinationAwsDatalakeAuthenticationModeIAMRole
- u.Type = DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeAuthenticationModeIAMRole
+
+ destinationAwsDatalakeIAMRole := new(DestinationAwsDatalakeIAMRole)
+ if err := utils.UnmarshalJSON(data, &destinationAwsDatalakeIAMRole, "", true, true); err == nil {
+ u.DestinationAwsDatalakeIAMRole = destinationAwsDatalakeIAMRole
+ u.Type = DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeIAMRole
return nil
}
- destinationAwsDatalakeAuthenticationModeIAMUser := new(DestinationAwsDatalakeAuthenticationModeIAMUser)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAwsDatalakeAuthenticationModeIAMUser); err == nil {
- u.DestinationAwsDatalakeAuthenticationModeIAMUser = destinationAwsDatalakeAuthenticationModeIAMUser
- u.Type = DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeAuthenticationModeIAMUser
+ destinationAwsDatalakeIAMUser := new(DestinationAwsDatalakeIAMUser)
+ if err := utils.UnmarshalJSON(data, &destinationAwsDatalakeIAMUser, "", true, true); err == nil {
+ u.DestinationAwsDatalakeIAMUser = destinationAwsDatalakeIAMUser
+ u.Type = DestinationAwsDatalakeAuthenticationModeTypeDestinationAwsDatalakeIAMUser
return nil
}
@@ -134,56 +180,56 @@ func (u *DestinationAwsDatalakeAuthenticationMode) UnmarshalJSON(data []byte) er
}
func (u DestinationAwsDatalakeAuthenticationMode) MarshalJSON() ([]byte, error) {
- if u.DestinationAwsDatalakeAuthenticationModeIAMRole != nil {
- return json.Marshal(u.DestinationAwsDatalakeAuthenticationModeIAMRole)
+ if u.DestinationAwsDatalakeIAMRole != nil {
+ return utils.MarshalJSON(u.DestinationAwsDatalakeIAMRole, "", true)
}
- if u.DestinationAwsDatalakeAuthenticationModeIAMUser != nil {
- return json.Marshal(u.DestinationAwsDatalakeAuthenticationModeIAMUser)
+ if u.DestinationAwsDatalakeIAMUser != nil {
+ return utils.MarshalJSON(u.DestinationAwsDatalakeIAMUser, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationAwsDatalakeAwsDatalake string
+type AwsDatalake string
const (
- DestinationAwsDatalakeAwsDatalakeAwsDatalake DestinationAwsDatalakeAwsDatalake = "aws-datalake"
+ AwsDatalakeAwsDatalake AwsDatalake = "aws-datalake"
)
-func (e DestinationAwsDatalakeAwsDatalake) ToPointer() *DestinationAwsDatalakeAwsDatalake {
+func (e AwsDatalake) ToPointer() *AwsDatalake {
return &e
}
-func (e *DestinationAwsDatalakeAwsDatalake) UnmarshalJSON(data []byte) error {
+func (e *AwsDatalake) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "aws-datalake":
- *e = DestinationAwsDatalakeAwsDatalake(v)
+ *e = AwsDatalake(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeAwsDatalake: %v", v)
+ return fmt.Errorf("invalid value for AwsDatalake: %v", v)
}
}
-// DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional - The compression algorithm used to compress data.
-type DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional string
+// DestinationAwsDatalakeSchemasCompressionCodecOptional - The compression algorithm used to compress data.
+type DestinationAwsDatalakeSchemasCompressionCodecOptional string
const (
- DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptionalUncompressed DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional = "UNCOMPRESSED"
- DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptionalSnappy DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional = "SNAPPY"
- DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptionalGzip DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional = "GZIP"
- DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptionalZstd DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional = "ZSTD"
+ DestinationAwsDatalakeSchemasCompressionCodecOptionalUncompressed DestinationAwsDatalakeSchemasCompressionCodecOptional = "UNCOMPRESSED"
+ DestinationAwsDatalakeSchemasCompressionCodecOptionalSnappy DestinationAwsDatalakeSchemasCompressionCodecOptional = "SNAPPY"
+ DestinationAwsDatalakeSchemasCompressionCodecOptionalGzip DestinationAwsDatalakeSchemasCompressionCodecOptional = "GZIP"
+ DestinationAwsDatalakeSchemasCompressionCodecOptionalZstd DestinationAwsDatalakeSchemasCompressionCodecOptional = "ZSTD"
)
-func (e DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional) ToPointer() *DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional {
+func (e DestinationAwsDatalakeSchemasCompressionCodecOptional) ToPointer() *DestinationAwsDatalakeSchemasCompressionCodecOptional {
return &e
}
-func (e *DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional) UnmarshalJSON(data []byte) error {
+func (e *DestinationAwsDatalakeSchemasCompressionCodecOptional) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -196,57 +242,82 @@ func (e *DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompres
case "GZIP":
fallthrough
case "ZSTD":
- *e = DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional(v)
+ *e = DestinationAwsDatalakeSchemasCompressionCodecOptional(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional: %v", v)
+ return fmt.Errorf("invalid value for DestinationAwsDatalakeSchemasCompressionCodecOptional: %v", v)
}
}
-type DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard string
+type DestinationAwsDatalakeSchemasFormatTypeWildcard string
const (
- DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcardParquet DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard = "Parquet"
+ DestinationAwsDatalakeSchemasFormatTypeWildcardParquet DestinationAwsDatalakeSchemasFormatTypeWildcard = "Parquet"
)
-func (e DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard) ToPointer() *DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard {
+func (e DestinationAwsDatalakeSchemasFormatTypeWildcard) ToPointer() *DestinationAwsDatalakeSchemasFormatTypeWildcard {
return &e
}
-func (e *DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard) UnmarshalJSON(data []byte) error {
+func (e *DestinationAwsDatalakeSchemasFormatTypeWildcard) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Parquet":
- *e = DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard(v)
+ *e = DestinationAwsDatalakeSchemasFormatTypeWildcard(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard: %v", v)
+ return fmt.Errorf("invalid value for DestinationAwsDatalakeSchemasFormatTypeWildcard: %v", v)
}
}
-// DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage - Format of the data output.
-type DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage struct {
+// DestinationAwsDatalakeParquetColumnarStorage - Format of the data output.
+type DestinationAwsDatalakeParquetColumnarStorage struct {
// The compression algorithm used to compress data.
- CompressionCodec *DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional `json:"compression_codec,omitempty"`
- FormatType DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard `json:"format_type"`
+ CompressionCodec *DestinationAwsDatalakeSchemasCompressionCodecOptional `default:"SNAPPY" json:"compression_codec"`
+ FormatType *DestinationAwsDatalakeSchemasFormatTypeWildcard `default:"Parquet" json:"format_type"`
}
-// DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional - The compression algorithm used to compress data.
-type DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional string
+func (d DestinationAwsDatalakeParquetColumnarStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAwsDatalakeParquetColumnarStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAwsDatalakeParquetColumnarStorage) GetCompressionCodec() *DestinationAwsDatalakeSchemasCompressionCodecOptional {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionCodec
+}
+
+func (o *DestinationAwsDatalakeParquetColumnarStorage) GetFormatType() *DestinationAwsDatalakeSchemasFormatTypeWildcard {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+// DestinationAwsDatalakeCompressionCodecOptional - The compression algorithm used to compress data.
+type DestinationAwsDatalakeCompressionCodecOptional string
const (
- DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptionalUncompressed DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional = "UNCOMPRESSED"
- DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptionalGzip DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional = "GZIP"
+ DestinationAwsDatalakeCompressionCodecOptionalUncompressed DestinationAwsDatalakeCompressionCodecOptional = "UNCOMPRESSED"
+ DestinationAwsDatalakeCompressionCodecOptionalGzip DestinationAwsDatalakeCompressionCodecOptional = "GZIP"
)
-func (e DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional) ToPointer() *DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional {
+func (e DestinationAwsDatalakeCompressionCodecOptional) ToPointer() *DestinationAwsDatalakeCompressionCodecOptional {
return &e
}
-func (e *DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional) UnmarshalJSON(data []byte) error {
+func (e *DestinationAwsDatalakeCompressionCodecOptional) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -255,94 +326,114 @@ func (e *DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON
case "UNCOMPRESSED":
fallthrough
case "GZIP":
- *e = DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional(v)
+ *e = DestinationAwsDatalakeCompressionCodecOptional(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional: %v", v)
+ return fmt.Errorf("invalid value for DestinationAwsDatalakeCompressionCodecOptional: %v", v)
}
}
-type DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard string
+type DestinationAwsDatalakeFormatTypeWildcard string
const (
- DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcardJsonl DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard = "JSONL"
+ DestinationAwsDatalakeFormatTypeWildcardJsonl DestinationAwsDatalakeFormatTypeWildcard = "JSONL"
)
-func (e DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard) ToPointer() *DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard {
+func (e DestinationAwsDatalakeFormatTypeWildcard) ToPointer() *DestinationAwsDatalakeFormatTypeWildcard {
return &e
}
-func (e *DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard) UnmarshalJSON(data []byte) error {
+func (e *DestinationAwsDatalakeFormatTypeWildcard) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard(v)
+ *e = DestinationAwsDatalakeFormatTypeWildcard(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard: %v", v)
+ return fmt.Errorf("invalid value for DestinationAwsDatalakeFormatTypeWildcard: %v", v)
}
}
-// DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON - Format of the data output.
-type DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON struct {
+// DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON - Format of the data output.
+type DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON struct {
// The compression algorithm used to compress data.
- CompressionCodec *DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional `json:"compression_codec,omitempty"`
- FormatType DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard `json:"format_type"`
+ CompressionCodec *DestinationAwsDatalakeCompressionCodecOptional `default:"UNCOMPRESSED" json:"compression_codec"`
+ FormatType *DestinationAwsDatalakeFormatTypeWildcard `default:"JSONL" json:"format_type"`
+}
+
+func (d DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON) GetCompressionCodec() *DestinationAwsDatalakeCompressionCodecOptional {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionCodec
+}
+
+func (o *DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON) GetFormatType() *DestinationAwsDatalakeFormatTypeWildcard {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
type DestinationAwsDatalakeOutputFormatWildcardType string
const (
- DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON DestinationAwsDatalakeOutputFormatWildcardType = "destination-aws-datalake_Output Format *_JSON Lines: Newline-delimited JSON"
- DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage DestinationAwsDatalakeOutputFormatWildcardType = "destination-aws-datalake_Output Format *_Parquet: Columnar Storage"
+ DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeJSONLinesNewlineDelimitedJSON DestinationAwsDatalakeOutputFormatWildcardType = "destination-aws-datalake_JSON Lines: Newline-delimited JSON"
+ DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeParquetColumnarStorage DestinationAwsDatalakeOutputFormatWildcardType = "destination-aws-datalake_Parquet: Columnar Storage"
)
type DestinationAwsDatalakeOutputFormatWildcard struct {
- DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON *DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON
- DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage *DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage
+ DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON *DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON
+ DestinationAwsDatalakeParquetColumnarStorage *DestinationAwsDatalakeParquetColumnarStorage
Type DestinationAwsDatalakeOutputFormatWildcardType
}
-func CreateDestinationAwsDatalakeOutputFormatWildcardDestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON(destinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON) DestinationAwsDatalakeOutputFormatWildcard {
- typ := DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON
+func CreateDestinationAwsDatalakeOutputFormatWildcardDestinationAwsDatalakeJSONLinesNewlineDelimitedJSON(destinationAwsDatalakeJSONLinesNewlineDelimitedJSON DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON) DestinationAwsDatalakeOutputFormatWildcard {
+ typ := DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeJSONLinesNewlineDelimitedJSON
return DestinationAwsDatalakeOutputFormatWildcard{
- DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON: &destinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON,
+ DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON: &destinationAwsDatalakeJSONLinesNewlineDelimitedJSON,
Type: typ,
}
}
-func CreateDestinationAwsDatalakeOutputFormatWildcardDestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage(destinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage) DestinationAwsDatalakeOutputFormatWildcard {
- typ := DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage
+func CreateDestinationAwsDatalakeOutputFormatWildcardDestinationAwsDatalakeParquetColumnarStorage(destinationAwsDatalakeParquetColumnarStorage DestinationAwsDatalakeParquetColumnarStorage) DestinationAwsDatalakeOutputFormatWildcard {
+ typ := DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeParquetColumnarStorage
return DestinationAwsDatalakeOutputFormatWildcard{
- DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage: &destinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage,
+ DestinationAwsDatalakeParquetColumnarStorage: &destinationAwsDatalakeParquetColumnarStorage,
Type: typ,
}
}
func (u *DestinationAwsDatalakeOutputFormatWildcard) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON := new(DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON = destinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON
- u.Type = DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON
+
+ destinationAwsDatalakeJSONLinesNewlineDelimitedJSON := new(DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &destinationAwsDatalakeJSONLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON = destinationAwsDatalakeJSONLinesNewlineDelimitedJSON
+ u.Type = DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeJSONLinesNewlineDelimitedJSON
return nil
}
- destinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage := new(DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage); err == nil {
- u.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage = destinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage
- u.Type = DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage
+ destinationAwsDatalakeParquetColumnarStorage := new(DestinationAwsDatalakeParquetColumnarStorage)
+ if err := utils.UnmarshalJSON(data, &destinationAwsDatalakeParquetColumnarStorage, "", true, true); err == nil {
+ u.DestinationAwsDatalakeParquetColumnarStorage = destinationAwsDatalakeParquetColumnarStorage
+ u.Type = DestinationAwsDatalakeOutputFormatWildcardTypeDestinationAwsDatalakeParquetColumnarStorage
return nil
}
@@ -350,15 +441,15 @@ func (u *DestinationAwsDatalakeOutputFormatWildcard) UnmarshalJSON(data []byte)
}
func (u DestinationAwsDatalakeOutputFormatWildcard) MarshalJSON() ([]byte, error) {
- if u.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationAwsDatalakeOutputFormatWildcardJSONLinesNewlineDelimitedJSON)
+ if u.DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.DestinationAwsDatalakeJSONLinesNewlineDelimitedJSON, "", true)
}
- if u.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage != nil {
- return json.Marshal(u.DestinationAwsDatalakeOutputFormatWildcardParquetColumnarStorage)
+ if u.DestinationAwsDatalakeParquetColumnarStorage != nil {
+ return utils.MarshalJSON(u.DestinationAwsDatalakeParquetColumnarStorage, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationAwsDatalakeChooseHowToPartitionData - Partition data by cursor fields when a cursor field is a date
@@ -513,11 +604,11 @@ type DestinationAwsDatalake struct {
BucketPrefix *string `json:"bucket_prefix,omitempty"`
// Choose How to Authenticate to AWS.
Credentials DestinationAwsDatalakeAuthenticationMode `json:"credentials"`
- DestinationType DestinationAwsDatalakeAwsDatalake `json:"destinationType"`
+ destinationType AwsDatalake `const:"aws-datalake" json:"destinationType"`
// Format of the data output.
Format *DestinationAwsDatalakeOutputFormatWildcard `json:"format,omitempty"`
// Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source.
- GlueCatalogFloatAsDecimal *bool `json:"glue_catalog_float_as_decimal,omitempty"`
+ GlueCatalogFloatAsDecimal *bool `default:"false" json:"glue_catalog_float_as_decimal"`
// Add a default tag key to databases created by this destination
LakeformationDatabaseDefaultTagKey *string `json:"lakeformation_database_default_tag_key,omitempty"`
// Add default values for the `Tag Key` to databases created by this destination. Comma separate for multiple values.
@@ -525,9 +616,108 @@ type DestinationAwsDatalake struct {
// The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
LakeformationDatabaseName string `json:"lakeformation_database_name"`
// Whether to create tables as LF governed tables.
- LakeformationGovernedTables *bool `json:"lakeformation_governed_tables,omitempty"`
+ LakeformationGovernedTables *bool `default:"false" json:"lakeformation_governed_tables"`
// Partition data by cursor fields when a cursor field is a date
- Partitioning *DestinationAwsDatalakeChooseHowToPartitionData `json:"partitioning,omitempty"`
+ Partitioning *DestinationAwsDatalakeChooseHowToPartitionData `default:"NO PARTITIONING" json:"partitioning"`
// The region of the S3 bucket. See here for all region codes.
- Region DestinationAwsDatalakeS3BucketRegion `json:"region"`
+ Region *DestinationAwsDatalakeS3BucketRegion `default:"" json:"region"`
+}
+
+func (d DestinationAwsDatalake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAwsDatalake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAwsDatalake) GetAwsAccountID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsAccountID
+}
+
+func (o *DestinationAwsDatalake) GetBucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.BucketName
+}
+
+func (o *DestinationAwsDatalake) GetBucketPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BucketPrefix
+}
+
+func (o *DestinationAwsDatalake) GetCredentials() DestinationAwsDatalakeAuthenticationMode {
+ if o == nil {
+ return DestinationAwsDatalakeAuthenticationMode{}
+ }
+ return o.Credentials
+}
+
+func (o *DestinationAwsDatalake) GetDestinationType() AwsDatalake {
+ return AwsDatalakeAwsDatalake
+}
+
+func (o *DestinationAwsDatalake) GetFormat() *DestinationAwsDatalakeOutputFormatWildcard {
+ if o == nil {
+ return nil
+ }
+ return o.Format
+}
+
+func (o *DestinationAwsDatalake) GetGlueCatalogFloatAsDecimal() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.GlueCatalogFloatAsDecimal
+}
+
+func (o *DestinationAwsDatalake) GetLakeformationDatabaseDefaultTagKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LakeformationDatabaseDefaultTagKey
+}
+
+func (o *DestinationAwsDatalake) GetLakeformationDatabaseDefaultTagValues() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LakeformationDatabaseDefaultTagValues
+}
+
+func (o *DestinationAwsDatalake) GetLakeformationDatabaseName() string {
+ if o == nil {
+ return ""
+ }
+ return o.LakeformationDatabaseName
+}
+
+func (o *DestinationAwsDatalake) GetLakeformationGovernedTables() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.LakeformationGovernedTables
+}
+
+func (o *DestinationAwsDatalake) GetPartitioning() *DestinationAwsDatalakeChooseHowToPartitionData {
+ if o == nil {
+ return nil
+ }
+ return o.Partitioning
+}
+
+func (o *DestinationAwsDatalake) GetRegion() *DestinationAwsDatalakeS3BucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.Region
}
diff --git a/internal/sdk/pkg/models/shared/destinationawsdatalakecreaterequest.go b/internal/sdk/pkg/models/shared/destinationawsdatalakecreaterequest.go
old mode 100755
new mode 100644
index 8706a1fd3..596351dc5
--- a/internal/sdk/pkg/models/shared/destinationawsdatalakecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationawsdatalakecreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationAwsDatalakeCreateRequest struct {
Configuration DestinationAwsDatalake `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationAwsDatalakeCreateRequest) GetConfiguration() DestinationAwsDatalake {
+ if o == nil {
+ return DestinationAwsDatalake{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationAwsDatalakeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationAwsDatalakeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationAwsDatalakeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationawsdatalakeputrequest.go b/internal/sdk/pkg/models/shared/destinationawsdatalakeputrequest.go
old mode 100755
new mode 100644
index b30d48b34..f85a07f80
--- a/internal/sdk/pkg/models/shared/destinationawsdatalakeputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationawsdatalakeputrequest.go
@@ -7,3 +7,24 @@ type DestinationAwsDatalakePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationAwsDatalakePutRequest) GetConfiguration() DestinationAwsDatalakeUpdate {
+ if o == nil {
+ return DestinationAwsDatalakeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationAwsDatalakePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationAwsDatalakePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationawsdatalakeupdate.go b/internal/sdk/pkg/models/shared/destinationawsdatalakeupdate.go
old mode 100755
new mode 100644
index d0ad381b7..db6f00093
--- a/internal/sdk/pkg/models/shared/destinationawsdatalakeupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationawsdatalakeupdate.go
@@ -3,163 +3,209 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle - Name of the credentials
-type DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle string
+// DestinationAwsDatalakeUpdateCredentialsTitle - Name of the credentials
+type DestinationAwsDatalakeUpdateCredentialsTitle string
const (
- DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitleIamUser DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle = "IAM User"
+ DestinationAwsDatalakeUpdateCredentialsTitleIamUser DestinationAwsDatalakeUpdateCredentialsTitle = "IAM User"
)
-func (e DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle) ToPointer() *DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle {
+func (e DestinationAwsDatalakeUpdateCredentialsTitle) ToPointer() *DestinationAwsDatalakeUpdateCredentialsTitle {
return &e
}
-func (e *DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *DestinationAwsDatalakeUpdateCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "IAM User":
- *e = DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle(v)
+ *e = DestinationAwsDatalakeUpdateCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateCredentialsTitle: %v", v)
}
}
-// DestinationAwsDatalakeUpdateAuthenticationModeIAMUser - Choose How to Authenticate to AWS.
-type DestinationAwsDatalakeUpdateAuthenticationModeIAMUser struct {
+// IAMUser - Choose How to Authenticate to AWS.
+type IAMUser struct {
// AWS User Access Key Id
AwsAccessKeyID string `json:"aws_access_key_id"`
// Secret Access Key
AwsSecretAccessKey string `json:"aws_secret_access_key"`
// Name of the credentials
- CredentialsTitle DestinationAwsDatalakeUpdateAuthenticationModeIAMUserCredentialsTitle `json:"credentials_title"`
+ credentialsTitle *DestinationAwsDatalakeUpdateCredentialsTitle `const:"IAM User" json:"credentials_title"`
}
-// DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle - Name of the credentials
-type DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle string
+func (i IAMUser) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(i, "", false)
+}
+
+func (i *IAMUser) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &i, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *IAMUser) GetAwsAccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsAccessKeyID
+}
+
+func (o *IAMUser) GetAwsSecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsSecretAccessKey
+}
+
+func (o *IAMUser) GetCredentialsTitle() *DestinationAwsDatalakeUpdateCredentialsTitle {
+ return DestinationAwsDatalakeUpdateCredentialsTitleIamUser.ToPointer()
+}
+
+// CredentialsTitle - Name of the credentials
+type CredentialsTitle string
const (
- DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitleIamRole DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle = "IAM Role"
+ CredentialsTitleIamRole CredentialsTitle = "IAM Role"
)
-func (e DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle) ToPointer() *DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle {
+func (e CredentialsTitle) ToPointer() *CredentialsTitle {
return &e
}
-func (e *DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *CredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "IAM Role":
- *e = DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle(v)
+ *e = CredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for CredentialsTitle: %v", v)
}
}
-// DestinationAwsDatalakeUpdateAuthenticationModeIAMRole - Choose How to Authenticate to AWS.
-type DestinationAwsDatalakeUpdateAuthenticationModeIAMRole struct {
+// IAMRole - Choose How to Authenticate to AWS.
+type IAMRole struct {
// Name of the credentials
- CredentialsTitle DestinationAwsDatalakeUpdateAuthenticationModeIAMRoleCredentialsTitle `json:"credentials_title"`
+ credentialsTitle *CredentialsTitle `const:"IAM Role" json:"credentials_title"`
// Will assume this role to write data to s3
RoleArn string `json:"role_arn"`
}
-type DestinationAwsDatalakeUpdateAuthenticationModeType string
+func (i IAMRole) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(i, "", false)
+}
+
+func (i *IAMRole) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &i, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *IAMRole) GetCredentialsTitle() *CredentialsTitle {
+ return CredentialsTitleIamRole.ToPointer()
+}
+
+func (o *IAMRole) GetRoleArn() string {
+ if o == nil {
+ return ""
+ }
+ return o.RoleArn
+}
+
+type AuthenticationModeType string
const (
- DestinationAwsDatalakeUpdateAuthenticationModeTypeDestinationAwsDatalakeUpdateAuthenticationModeIAMRole DestinationAwsDatalakeUpdateAuthenticationModeType = "destination-aws-datalake-update_Authentication mode_IAM Role"
- DestinationAwsDatalakeUpdateAuthenticationModeTypeDestinationAwsDatalakeUpdateAuthenticationModeIAMUser DestinationAwsDatalakeUpdateAuthenticationModeType = "destination-aws-datalake-update_Authentication mode_IAM User"
+ AuthenticationModeTypeIAMRole AuthenticationModeType = "IAM Role"
+ AuthenticationModeTypeIAMUser AuthenticationModeType = "IAM User"
)
-type DestinationAwsDatalakeUpdateAuthenticationMode struct {
- DestinationAwsDatalakeUpdateAuthenticationModeIAMRole *DestinationAwsDatalakeUpdateAuthenticationModeIAMRole
- DestinationAwsDatalakeUpdateAuthenticationModeIAMUser *DestinationAwsDatalakeUpdateAuthenticationModeIAMUser
+type AuthenticationMode struct {
+ IAMRole *IAMRole
+ IAMUser *IAMUser
- Type DestinationAwsDatalakeUpdateAuthenticationModeType
+ Type AuthenticationModeType
}
-func CreateDestinationAwsDatalakeUpdateAuthenticationModeDestinationAwsDatalakeUpdateAuthenticationModeIAMRole(destinationAwsDatalakeUpdateAuthenticationModeIAMRole DestinationAwsDatalakeUpdateAuthenticationModeIAMRole) DestinationAwsDatalakeUpdateAuthenticationMode {
- typ := DestinationAwsDatalakeUpdateAuthenticationModeTypeDestinationAwsDatalakeUpdateAuthenticationModeIAMRole
+func CreateAuthenticationModeIAMRole(iamRole IAMRole) AuthenticationMode {
+ typ := AuthenticationModeTypeIAMRole
- return DestinationAwsDatalakeUpdateAuthenticationMode{
- DestinationAwsDatalakeUpdateAuthenticationModeIAMRole: &destinationAwsDatalakeUpdateAuthenticationModeIAMRole,
- Type: typ,
+ return AuthenticationMode{
+ IAMRole: &iamRole,
+ Type: typ,
}
}
-func CreateDestinationAwsDatalakeUpdateAuthenticationModeDestinationAwsDatalakeUpdateAuthenticationModeIAMUser(destinationAwsDatalakeUpdateAuthenticationModeIAMUser DestinationAwsDatalakeUpdateAuthenticationModeIAMUser) DestinationAwsDatalakeUpdateAuthenticationMode {
- typ := DestinationAwsDatalakeUpdateAuthenticationModeTypeDestinationAwsDatalakeUpdateAuthenticationModeIAMUser
+func CreateAuthenticationModeIAMUser(iamUser IAMUser) AuthenticationMode {
+ typ := AuthenticationModeTypeIAMUser
- return DestinationAwsDatalakeUpdateAuthenticationMode{
- DestinationAwsDatalakeUpdateAuthenticationModeIAMUser: &destinationAwsDatalakeUpdateAuthenticationModeIAMUser,
- Type: typ,
+ return AuthenticationMode{
+ IAMUser: &iamUser,
+ Type: typ,
}
}
-func (u *DestinationAwsDatalakeUpdateAuthenticationMode) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *AuthenticationMode) UnmarshalJSON(data []byte) error {
- destinationAwsDatalakeUpdateAuthenticationModeIAMRole := new(DestinationAwsDatalakeUpdateAuthenticationModeIAMRole)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAwsDatalakeUpdateAuthenticationModeIAMRole); err == nil {
- u.DestinationAwsDatalakeUpdateAuthenticationModeIAMRole = destinationAwsDatalakeUpdateAuthenticationModeIAMRole
- u.Type = DestinationAwsDatalakeUpdateAuthenticationModeTypeDestinationAwsDatalakeUpdateAuthenticationModeIAMRole
+ iamRole := new(IAMRole)
+ if err := utils.UnmarshalJSON(data, &iamRole, "", true, true); err == nil {
+ u.IAMRole = iamRole
+ u.Type = AuthenticationModeTypeIAMRole
return nil
}
- destinationAwsDatalakeUpdateAuthenticationModeIAMUser := new(DestinationAwsDatalakeUpdateAuthenticationModeIAMUser)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAwsDatalakeUpdateAuthenticationModeIAMUser); err == nil {
- u.DestinationAwsDatalakeUpdateAuthenticationModeIAMUser = destinationAwsDatalakeUpdateAuthenticationModeIAMUser
- u.Type = DestinationAwsDatalakeUpdateAuthenticationModeTypeDestinationAwsDatalakeUpdateAuthenticationModeIAMUser
+ iamUser := new(IAMUser)
+ if err := utils.UnmarshalJSON(data, &iamUser, "", true, true); err == nil {
+ u.IAMUser = iamUser
+ u.Type = AuthenticationModeTypeIAMUser
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationAwsDatalakeUpdateAuthenticationMode) MarshalJSON() ([]byte, error) {
- if u.DestinationAwsDatalakeUpdateAuthenticationModeIAMRole != nil {
- return json.Marshal(u.DestinationAwsDatalakeUpdateAuthenticationModeIAMRole)
+func (u AuthenticationMode) MarshalJSON() ([]byte, error) {
+ if u.IAMRole != nil {
+ return utils.MarshalJSON(u.IAMRole, "", true)
}
- if u.DestinationAwsDatalakeUpdateAuthenticationModeIAMUser != nil {
- return json.Marshal(u.DestinationAwsDatalakeUpdateAuthenticationModeIAMUser)
+ if u.IAMUser != nil {
+ return utils.MarshalJSON(u.IAMUser, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional - The compression algorithm used to compress data.
-type DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional string
+// DestinationAwsDatalakeUpdateCompressionCodecOptional - The compression algorithm used to compress data.
+type DestinationAwsDatalakeUpdateCompressionCodecOptional string
const (
- DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptionalUncompressed DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional = "UNCOMPRESSED"
- DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptionalSnappy DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional = "SNAPPY"
- DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptionalGzip DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional = "GZIP"
- DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptionalZstd DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional = "ZSTD"
+ DestinationAwsDatalakeUpdateCompressionCodecOptionalUncompressed DestinationAwsDatalakeUpdateCompressionCodecOptional = "UNCOMPRESSED"
+ DestinationAwsDatalakeUpdateCompressionCodecOptionalSnappy DestinationAwsDatalakeUpdateCompressionCodecOptional = "SNAPPY"
+ DestinationAwsDatalakeUpdateCompressionCodecOptionalGzip DestinationAwsDatalakeUpdateCompressionCodecOptional = "GZIP"
+ DestinationAwsDatalakeUpdateCompressionCodecOptionalZstd DestinationAwsDatalakeUpdateCompressionCodecOptional = "ZSTD"
)
-func (e DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional) ToPointer() *DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional {
+func (e DestinationAwsDatalakeUpdateCompressionCodecOptional) ToPointer() *DestinationAwsDatalakeUpdateCompressionCodecOptional {
return &e
}
-func (e *DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional) UnmarshalJSON(data []byte) error {
+func (e *DestinationAwsDatalakeUpdateCompressionCodecOptional) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -172,57 +218,82 @@ func (e *DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageC
case "GZIP":
fallthrough
case "ZSTD":
- *e = DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional(v)
+ *e = DestinationAwsDatalakeUpdateCompressionCodecOptional(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional: %v", v)
+ return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateCompressionCodecOptional: %v", v)
}
}
-type DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard string
+type DestinationAwsDatalakeUpdateFormatTypeWildcard string
const (
- DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcardParquet DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard = "Parquet"
+ DestinationAwsDatalakeUpdateFormatTypeWildcardParquet DestinationAwsDatalakeUpdateFormatTypeWildcard = "Parquet"
)
-func (e DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard) ToPointer() *DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard {
+func (e DestinationAwsDatalakeUpdateFormatTypeWildcard) ToPointer() *DestinationAwsDatalakeUpdateFormatTypeWildcard {
return &e
}
-func (e *DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard) UnmarshalJSON(data []byte) error {
+func (e *DestinationAwsDatalakeUpdateFormatTypeWildcard) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Parquet":
- *e = DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard(v)
+ *e = DestinationAwsDatalakeUpdateFormatTypeWildcard(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard: %v", v)
+ return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateFormatTypeWildcard: %v", v)
}
}
-// DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage - Format of the data output.
-type DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage struct {
+// ParquetColumnarStorage - Format of the data output.
+type ParquetColumnarStorage struct {
// The compression algorithm used to compress data.
- CompressionCodec *DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageCompressionCodecOptional `json:"compression_codec,omitempty"`
- FormatType DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorageFormatTypeWildcard `json:"format_type"`
+ CompressionCodec *DestinationAwsDatalakeUpdateCompressionCodecOptional `default:"SNAPPY" json:"compression_codec"`
+ FormatType *DestinationAwsDatalakeUpdateFormatTypeWildcard `default:"Parquet" json:"format_type"`
+}
+
+func (p ParquetColumnarStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *ParquetColumnarStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ParquetColumnarStorage) GetCompressionCodec() *DestinationAwsDatalakeUpdateCompressionCodecOptional {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionCodec
+}
+
+func (o *ParquetColumnarStorage) GetFormatType() *DestinationAwsDatalakeUpdateFormatTypeWildcard {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
-// DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional - The compression algorithm used to compress data.
-type DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional string
+// CompressionCodecOptional - The compression algorithm used to compress data.
+type CompressionCodecOptional string
const (
- DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptionalUncompressed DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional = "UNCOMPRESSED"
- DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptionalGzip DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional = "GZIP"
+ CompressionCodecOptionalUncompressed CompressionCodecOptional = "UNCOMPRESSED"
+ CompressionCodecOptionalGzip CompressionCodecOptional = "GZIP"
)
-func (e DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional) ToPointer() *DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional {
+func (e CompressionCodecOptional) ToPointer() *CompressionCodecOptional {
return &e
}
-func (e *DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional) UnmarshalJSON(data []byte) error {
+func (e *CompressionCodecOptional) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -231,130 +302,150 @@ func (e *DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimit
case "UNCOMPRESSED":
fallthrough
case "GZIP":
- *e = DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional(v)
+ *e = CompressionCodecOptional(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional: %v", v)
+ return fmt.Errorf("invalid value for CompressionCodecOptional: %v", v)
}
}
-type DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard string
+type FormatTypeWildcard string
const (
- DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcardJsonl DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard = "JSONL"
+ FormatTypeWildcardJsonl FormatTypeWildcard = "JSONL"
)
-func (e DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard) ToPointer() *DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard {
+func (e FormatTypeWildcard) ToPointer() *FormatTypeWildcard {
return &e
}
-func (e *DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard) UnmarshalJSON(data []byte) error {
+func (e *FormatTypeWildcard) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard(v)
+ *e = FormatTypeWildcard(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard: %v", v)
+ return fmt.Errorf("invalid value for FormatTypeWildcard: %v", v)
}
}
-// DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON - Format of the data output.
-type DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON struct {
+// JSONLinesNewlineDelimitedJSON - Format of the data output.
+type JSONLinesNewlineDelimitedJSON struct {
// The compression algorithm used to compress data.
- CompressionCodec *DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONCompressionCodecOptional `json:"compression_codec,omitempty"`
- FormatType DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSONFormatTypeWildcard `json:"format_type"`
+ CompressionCodec *CompressionCodecOptional `default:"UNCOMPRESSED" json:"compression_codec"`
+ FormatType *FormatTypeWildcard `default:"JSONL" json:"format_type"`
+}
+
+func (j JSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(j, "", false)
+}
+
+func (j *JSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &j, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *JSONLinesNewlineDelimitedJSON) GetCompressionCodec() *CompressionCodecOptional {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionCodec
+}
+
+func (o *JSONLinesNewlineDelimitedJSON) GetFormatType() *FormatTypeWildcard {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
-type DestinationAwsDatalakeUpdateOutputFormatWildcardType string
+type OutputFormatWildcardType string
const (
- DestinationAwsDatalakeUpdateOutputFormatWildcardTypeDestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON DestinationAwsDatalakeUpdateOutputFormatWildcardType = "destination-aws-datalake-update_Output Format *_JSON Lines: Newline-delimited JSON"
- DestinationAwsDatalakeUpdateOutputFormatWildcardTypeDestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage DestinationAwsDatalakeUpdateOutputFormatWildcardType = "destination-aws-datalake-update_Output Format *_Parquet: Columnar Storage"
+ OutputFormatWildcardTypeJSONLinesNewlineDelimitedJSON OutputFormatWildcardType = "JSON Lines: Newline-delimited JSON"
+ OutputFormatWildcardTypeParquetColumnarStorage OutputFormatWildcardType = "Parquet: Columnar Storage"
)
-type DestinationAwsDatalakeUpdateOutputFormatWildcard struct {
- DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON *DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON
- DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage *DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage
+type OutputFormatWildcard struct {
+ JSONLinesNewlineDelimitedJSON *JSONLinesNewlineDelimitedJSON
+ ParquetColumnarStorage *ParquetColumnarStorage
- Type DestinationAwsDatalakeUpdateOutputFormatWildcardType
+ Type OutputFormatWildcardType
}
-func CreateDestinationAwsDatalakeUpdateOutputFormatWildcardDestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON(destinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON) DestinationAwsDatalakeUpdateOutputFormatWildcard {
- typ := DestinationAwsDatalakeUpdateOutputFormatWildcardTypeDestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON
+func CreateOutputFormatWildcardJSONLinesNewlineDelimitedJSON(jsonLinesNewlineDelimitedJSON JSONLinesNewlineDelimitedJSON) OutputFormatWildcard {
+ typ := OutputFormatWildcardTypeJSONLinesNewlineDelimitedJSON
- return DestinationAwsDatalakeUpdateOutputFormatWildcard{
- DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON: &destinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON,
- Type: typ,
+ return OutputFormatWildcard{
+ JSONLinesNewlineDelimitedJSON: &jsonLinesNewlineDelimitedJSON,
+ Type: typ,
}
}
-func CreateDestinationAwsDatalakeUpdateOutputFormatWildcardDestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage(destinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage) DestinationAwsDatalakeUpdateOutputFormatWildcard {
- typ := DestinationAwsDatalakeUpdateOutputFormatWildcardTypeDestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage
+func CreateOutputFormatWildcardParquetColumnarStorage(parquetColumnarStorage ParquetColumnarStorage) OutputFormatWildcard {
+ typ := OutputFormatWildcardTypeParquetColumnarStorage
- return DestinationAwsDatalakeUpdateOutputFormatWildcard{
- DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage: &destinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage,
- Type: typ,
+ return OutputFormatWildcard{
+ ParquetColumnarStorage: &parquetColumnarStorage,
+ Type: typ,
}
}
-func (u *DestinationAwsDatalakeUpdateOutputFormatWildcard) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *OutputFormatWildcard) UnmarshalJSON(data []byte) error {
- destinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON := new(DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON = destinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON
- u.Type = DestinationAwsDatalakeUpdateOutputFormatWildcardTypeDestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON
+ jsonLinesNewlineDelimitedJSON := new(JSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &jsonLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.JSONLinesNewlineDelimitedJSON = jsonLinesNewlineDelimitedJSON
+ u.Type = OutputFormatWildcardTypeJSONLinesNewlineDelimitedJSON
return nil
}
- destinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage := new(DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage); err == nil {
- u.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage = destinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage
- u.Type = DestinationAwsDatalakeUpdateOutputFormatWildcardTypeDestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage
+ parquetColumnarStorage := new(ParquetColumnarStorage)
+ if err := utils.UnmarshalJSON(data, &parquetColumnarStorage, "", true, true); err == nil {
+ u.ParquetColumnarStorage = parquetColumnarStorage
+ u.Type = OutputFormatWildcardTypeParquetColumnarStorage
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationAwsDatalakeUpdateOutputFormatWildcard) MarshalJSON() ([]byte, error) {
- if u.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationAwsDatalakeUpdateOutputFormatWildcardJSONLinesNewlineDelimitedJSON)
+func (u OutputFormatWildcard) MarshalJSON() ([]byte, error) {
+ if u.JSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.JSONLinesNewlineDelimitedJSON, "", true)
}
- if u.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage != nil {
- return json.Marshal(u.DestinationAwsDatalakeUpdateOutputFormatWildcardParquetColumnarStorage)
+ if u.ParquetColumnarStorage != nil {
+ return utils.MarshalJSON(u.ParquetColumnarStorage, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationAwsDatalakeUpdateChooseHowToPartitionData - Partition data by cursor fields when a cursor field is a date
-type DestinationAwsDatalakeUpdateChooseHowToPartitionData string
+// ChooseHowToPartitionData - Partition data by cursor fields when a cursor field is a date
+type ChooseHowToPartitionData string
const (
- DestinationAwsDatalakeUpdateChooseHowToPartitionDataNoPartitioning DestinationAwsDatalakeUpdateChooseHowToPartitionData = "NO PARTITIONING"
- DestinationAwsDatalakeUpdateChooseHowToPartitionDataDate DestinationAwsDatalakeUpdateChooseHowToPartitionData = "DATE"
- DestinationAwsDatalakeUpdateChooseHowToPartitionDataYear DestinationAwsDatalakeUpdateChooseHowToPartitionData = "YEAR"
- DestinationAwsDatalakeUpdateChooseHowToPartitionDataMonth DestinationAwsDatalakeUpdateChooseHowToPartitionData = "MONTH"
- DestinationAwsDatalakeUpdateChooseHowToPartitionDataDay DestinationAwsDatalakeUpdateChooseHowToPartitionData = "DAY"
- DestinationAwsDatalakeUpdateChooseHowToPartitionDataYearMonth DestinationAwsDatalakeUpdateChooseHowToPartitionData = "YEAR/MONTH"
- DestinationAwsDatalakeUpdateChooseHowToPartitionDataYearMonthDay DestinationAwsDatalakeUpdateChooseHowToPartitionData = "YEAR/MONTH/DAY"
+ ChooseHowToPartitionDataNoPartitioning ChooseHowToPartitionData = "NO PARTITIONING"
+ ChooseHowToPartitionDataDate ChooseHowToPartitionData = "DATE"
+ ChooseHowToPartitionDataYear ChooseHowToPartitionData = "YEAR"
+ ChooseHowToPartitionDataMonth ChooseHowToPartitionData = "MONTH"
+ ChooseHowToPartitionDataDay ChooseHowToPartitionData = "DAY"
+ ChooseHowToPartitionDataYearMonth ChooseHowToPartitionData = "YEAR/MONTH"
+ ChooseHowToPartitionDataYearMonthDay ChooseHowToPartitionData = "YEAR/MONTH/DAY"
)
-func (e DestinationAwsDatalakeUpdateChooseHowToPartitionData) ToPointer() *DestinationAwsDatalakeUpdateChooseHowToPartitionData {
+func (e ChooseHowToPartitionData) ToPointer() *ChooseHowToPartitionData {
return &e
}
-func (e *DestinationAwsDatalakeUpdateChooseHowToPartitionData) UnmarshalJSON(data []byte) error {
+func (e *ChooseHowToPartitionData) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -373,50 +464,50 @@ func (e *DestinationAwsDatalakeUpdateChooseHowToPartitionData) UnmarshalJSON(dat
case "YEAR/MONTH":
fallthrough
case "YEAR/MONTH/DAY":
- *e = DestinationAwsDatalakeUpdateChooseHowToPartitionData(v)
+ *e = ChooseHowToPartitionData(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateChooseHowToPartitionData: %v", v)
+ return fmt.Errorf("invalid value for ChooseHowToPartitionData: %v", v)
}
}
-// DestinationAwsDatalakeUpdateS3BucketRegion - The region of the S3 bucket. See here for all region codes.
-type DestinationAwsDatalakeUpdateS3BucketRegion string
+// S3BucketRegion - The region of the S3 bucket. See here for all region codes.
+type S3BucketRegion string
const (
- DestinationAwsDatalakeUpdateS3BucketRegionUnknown DestinationAwsDatalakeUpdateS3BucketRegion = ""
- DestinationAwsDatalakeUpdateS3BucketRegionUsEast1 DestinationAwsDatalakeUpdateS3BucketRegion = "us-east-1"
- DestinationAwsDatalakeUpdateS3BucketRegionUsEast2 DestinationAwsDatalakeUpdateS3BucketRegion = "us-east-2"
- DestinationAwsDatalakeUpdateS3BucketRegionUsWest1 DestinationAwsDatalakeUpdateS3BucketRegion = "us-west-1"
- DestinationAwsDatalakeUpdateS3BucketRegionUsWest2 DestinationAwsDatalakeUpdateS3BucketRegion = "us-west-2"
- DestinationAwsDatalakeUpdateS3BucketRegionAfSouth1 DestinationAwsDatalakeUpdateS3BucketRegion = "af-south-1"
- DestinationAwsDatalakeUpdateS3BucketRegionApEast1 DestinationAwsDatalakeUpdateS3BucketRegion = "ap-east-1"
- DestinationAwsDatalakeUpdateS3BucketRegionApSouth1 DestinationAwsDatalakeUpdateS3BucketRegion = "ap-south-1"
- DestinationAwsDatalakeUpdateS3BucketRegionApNortheast1 DestinationAwsDatalakeUpdateS3BucketRegion = "ap-northeast-1"
- DestinationAwsDatalakeUpdateS3BucketRegionApNortheast2 DestinationAwsDatalakeUpdateS3BucketRegion = "ap-northeast-2"
- DestinationAwsDatalakeUpdateS3BucketRegionApNortheast3 DestinationAwsDatalakeUpdateS3BucketRegion = "ap-northeast-3"
- DestinationAwsDatalakeUpdateS3BucketRegionApSoutheast1 DestinationAwsDatalakeUpdateS3BucketRegion = "ap-southeast-1"
- DestinationAwsDatalakeUpdateS3BucketRegionApSoutheast2 DestinationAwsDatalakeUpdateS3BucketRegion = "ap-southeast-2"
- DestinationAwsDatalakeUpdateS3BucketRegionCaCentral1 DestinationAwsDatalakeUpdateS3BucketRegion = "ca-central-1"
- DestinationAwsDatalakeUpdateS3BucketRegionCnNorth1 DestinationAwsDatalakeUpdateS3BucketRegion = "cn-north-1"
- DestinationAwsDatalakeUpdateS3BucketRegionCnNorthwest1 DestinationAwsDatalakeUpdateS3BucketRegion = "cn-northwest-1"
- DestinationAwsDatalakeUpdateS3BucketRegionEuCentral1 DestinationAwsDatalakeUpdateS3BucketRegion = "eu-central-1"
- DestinationAwsDatalakeUpdateS3BucketRegionEuNorth1 DestinationAwsDatalakeUpdateS3BucketRegion = "eu-north-1"
- DestinationAwsDatalakeUpdateS3BucketRegionEuSouth1 DestinationAwsDatalakeUpdateS3BucketRegion = "eu-south-1"
- DestinationAwsDatalakeUpdateS3BucketRegionEuWest1 DestinationAwsDatalakeUpdateS3BucketRegion = "eu-west-1"
- DestinationAwsDatalakeUpdateS3BucketRegionEuWest2 DestinationAwsDatalakeUpdateS3BucketRegion = "eu-west-2"
- DestinationAwsDatalakeUpdateS3BucketRegionEuWest3 DestinationAwsDatalakeUpdateS3BucketRegion = "eu-west-3"
- DestinationAwsDatalakeUpdateS3BucketRegionSaEast1 DestinationAwsDatalakeUpdateS3BucketRegion = "sa-east-1"
- DestinationAwsDatalakeUpdateS3BucketRegionMeSouth1 DestinationAwsDatalakeUpdateS3BucketRegion = "me-south-1"
- DestinationAwsDatalakeUpdateS3BucketRegionUsGovEast1 DestinationAwsDatalakeUpdateS3BucketRegion = "us-gov-east-1"
- DestinationAwsDatalakeUpdateS3BucketRegionUsGovWest1 DestinationAwsDatalakeUpdateS3BucketRegion = "us-gov-west-1"
+ S3BucketRegionUnknown S3BucketRegion = ""
+ S3BucketRegionUsEast1 S3BucketRegion = "us-east-1"
+ S3BucketRegionUsEast2 S3BucketRegion = "us-east-2"
+ S3BucketRegionUsWest1 S3BucketRegion = "us-west-1"
+ S3BucketRegionUsWest2 S3BucketRegion = "us-west-2"
+ S3BucketRegionAfSouth1 S3BucketRegion = "af-south-1"
+ S3BucketRegionApEast1 S3BucketRegion = "ap-east-1"
+ S3BucketRegionApSouth1 S3BucketRegion = "ap-south-1"
+ S3BucketRegionApNortheast1 S3BucketRegion = "ap-northeast-1"
+ S3BucketRegionApNortheast2 S3BucketRegion = "ap-northeast-2"
+ S3BucketRegionApNortheast3 S3BucketRegion = "ap-northeast-3"
+ S3BucketRegionApSoutheast1 S3BucketRegion = "ap-southeast-1"
+ S3BucketRegionApSoutheast2 S3BucketRegion = "ap-southeast-2"
+ S3BucketRegionCaCentral1 S3BucketRegion = "ca-central-1"
+ S3BucketRegionCnNorth1 S3BucketRegion = "cn-north-1"
+ S3BucketRegionCnNorthwest1 S3BucketRegion = "cn-northwest-1"
+ S3BucketRegionEuCentral1 S3BucketRegion = "eu-central-1"
+ S3BucketRegionEuNorth1 S3BucketRegion = "eu-north-1"
+ S3BucketRegionEuSouth1 S3BucketRegion = "eu-south-1"
+ S3BucketRegionEuWest1 S3BucketRegion = "eu-west-1"
+ S3BucketRegionEuWest2 S3BucketRegion = "eu-west-2"
+ S3BucketRegionEuWest3 S3BucketRegion = "eu-west-3"
+ S3BucketRegionSaEast1 S3BucketRegion = "sa-east-1"
+ S3BucketRegionMeSouth1 S3BucketRegion = "me-south-1"
+ S3BucketRegionUsGovEast1 S3BucketRegion = "us-gov-east-1"
+ S3BucketRegionUsGovWest1 S3BucketRegion = "us-gov-west-1"
)
-func (e DestinationAwsDatalakeUpdateS3BucketRegion) ToPointer() *DestinationAwsDatalakeUpdateS3BucketRegion {
+func (e S3BucketRegion) ToPointer() *S3BucketRegion {
return &e
}
-func (e *DestinationAwsDatalakeUpdateS3BucketRegion) UnmarshalJSON(data []byte) error {
+func (e *S3BucketRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -473,10 +564,10 @@ func (e *DestinationAwsDatalakeUpdateS3BucketRegion) UnmarshalJSON(data []byte)
case "us-gov-east-1":
fallthrough
case "us-gov-west-1":
- *e = DestinationAwsDatalakeUpdateS3BucketRegion(v)
+ *e = S3BucketRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAwsDatalakeUpdateS3BucketRegion: %v", v)
+ return fmt.Errorf("invalid value for S3BucketRegion: %v", v)
}
}
@@ -488,11 +579,11 @@ type DestinationAwsDatalakeUpdate struct {
// S3 prefix
BucketPrefix *string `json:"bucket_prefix,omitempty"`
// Choose How to Authenticate to AWS.
- Credentials DestinationAwsDatalakeUpdateAuthenticationMode `json:"credentials"`
+ Credentials AuthenticationMode `json:"credentials"`
// Format of the data output.
- Format *DestinationAwsDatalakeUpdateOutputFormatWildcard `json:"format,omitempty"`
+ Format *OutputFormatWildcard `json:"format,omitempty"`
// Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source.
- GlueCatalogFloatAsDecimal *bool `json:"glue_catalog_float_as_decimal,omitempty"`
+ GlueCatalogFloatAsDecimal *bool `default:"false" json:"glue_catalog_float_as_decimal"`
// Add a default tag key to databases created by this destination
LakeformationDatabaseDefaultTagKey *string `json:"lakeformation_database_default_tag_key,omitempty"`
// Add default values for the `Tag Key` to databases created by this destination. Comma separate for multiple values.
@@ -500,9 +591,104 @@ type DestinationAwsDatalakeUpdate struct {
// The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
LakeformationDatabaseName string `json:"lakeformation_database_name"`
// Whether to create tables as LF governed tables.
- LakeformationGovernedTables *bool `json:"lakeformation_governed_tables,omitempty"`
+ LakeformationGovernedTables *bool `default:"false" json:"lakeformation_governed_tables"`
// Partition data by cursor fields when a cursor field is a date
- Partitioning *DestinationAwsDatalakeUpdateChooseHowToPartitionData `json:"partitioning,omitempty"`
+ Partitioning *ChooseHowToPartitionData `default:"NO PARTITIONING" json:"partitioning"`
// The region of the S3 bucket. See here for all region codes.
- Region DestinationAwsDatalakeUpdateS3BucketRegion `json:"region"`
+ Region *S3BucketRegion `default:"" json:"region"`
+}
+
+func (d DestinationAwsDatalakeUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAwsDatalakeUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetAwsAccountID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsAccountID
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetBucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.BucketName
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetBucketPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BucketPrefix
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetCredentials() AuthenticationMode {
+ if o == nil {
+ return AuthenticationMode{}
+ }
+ return o.Credentials
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetFormat() *OutputFormatWildcard {
+ if o == nil {
+ return nil
+ }
+ return o.Format
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetGlueCatalogFloatAsDecimal() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.GlueCatalogFloatAsDecimal
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetLakeformationDatabaseDefaultTagKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LakeformationDatabaseDefaultTagKey
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetLakeformationDatabaseDefaultTagValues() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LakeformationDatabaseDefaultTagValues
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetLakeformationDatabaseName() string {
+ if o == nil {
+ return ""
+ }
+ return o.LakeformationDatabaseName
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetLakeformationGovernedTables() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.LakeformationGovernedTables
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetPartitioning() *ChooseHowToPartitionData {
+ if o == nil {
+ return nil
+ }
+ return o.Partitioning
+}
+
+func (o *DestinationAwsDatalakeUpdate) GetRegion() *S3BucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.Region
}
diff --git a/internal/sdk/pkg/models/shared/destinationazureblobstorage.go b/internal/sdk/pkg/models/shared/destinationazureblobstorage.go
old mode 100755
new mode 100644
index 6411e49df..37992b0df
--- a/internal/sdk/pkg/models/shared/destinationazureblobstorage.go
+++ b/internal/sdk/pkg/models/shared/destinationazureblobstorage.go
@@ -3,78 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationAzureBlobStorageAzureBlobStorage string
+type AzureBlobStorage string
const (
- DestinationAzureBlobStorageAzureBlobStorageAzureBlobStorage DestinationAzureBlobStorageAzureBlobStorage = "azure-blob-storage"
+ AzureBlobStorageAzureBlobStorage AzureBlobStorage = "azure-blob-storage"
)
-func (e DestinationAzureBlobStorageAzureBlobStorage) ToPointer() *DestinationAzureBlobStorageAzureBlobStorage {
+func (e AzureBlobStorage) ToPointer() *AzureBlobStorage {
return &e
}
-func (e *DestinationAzureBlobStorageAzureBlobStorage) UnmarshalJSON(data []byte) error {
+func (e *AzureBlobStorage) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "azure-blob-storage":
- *e = DestinationAzureBlobStorageAzureBlobStorage(v)
+ *e = AzureBlobStorage(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAzureBlobStorageAzureBlobStorage: %v", v)
+ return fmt.Errorf("invalid value for AzureBlobStorage: %v", v)
}
}
-type DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type DestinationAzureBlobStorageSchemasFormatType string
const (
- DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ DestinationAzureBlobStorageSchemasFormatTypeJsonl DestinationAzureBlobStorageSchemasFormatType = "JSONL"
)
-func (e DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e DestinationAzureBlobStorageSchemasFormatType) ToPointer() *DestinationAzureBlobStorageSchemasFormatType {
return &e
}
-func (e *DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationAzureBlobStorageSchemasFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ *e = DestinationAzureBlobStorageSchemasFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationAzureBlobStorageSchemasFormatType: %v", v)
}
}
-// DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON - Output data format
-type DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON struct {
- FormatType DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+// DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON - Output data format
+type DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON struct {
+ formatType DestinationAzureBlobStorageSchemasFormatType `const:"JSONL" json:"format_type"`
}
-// DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening - Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-type DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening string
+func (d DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON) GetFormatType() DestinationAzureBlobStorageSchemasFormatType {
+ return DestinationAzureBlobStorageSchemasFormatTypeJsonl
+}
+
+// DestinationAzureBlobStorageNormalizationFlattening - Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
+type DestinationAzureBlobStorageNormalizationFlattening string
const (
- DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlatteningNoFlattening DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening = "No flattening"
- DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlatteningRootLevelFlattening DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening = "Root level flattening"
+ DestinationAzureBlobStorageNormalizationFlatteningNoFlattening DestinationAzureBlobStorageNormalizationFlattening = "No flattening"
+ DestinationAzureBlobStorageNormalizationFlatteningRootLevelFlattening DestinationAzureBlobStorageNormalizationFlattening = "Root level flattening"
)
-func (e DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening) ToPointer() *DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening {
+func (e DestinationAzureBlobStorageNormalizationFlattening) ToPointer() *DestinationAzureBlobStorageNormalizationFlattening {
return &e
}
-func (e *DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening) UnmarshalJSON(data []byte) error {
+func (e *DestinationAzureBlobStorageNormalizationFlattening) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -83,94 +98,111 @@ func (e *DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormaliza
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening(v)
+ *e = DestinationAzureBlobStorageNormalizationFlattening(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening: %v", v)
+ return fmt.Errorf("invalid value for DestinationAzureBlobStorageNormalizationFlattening: %v", v)
}
}
-type DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatType string
+type DestinationAzureBlobStorageFormatType string
const (
- DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatTypeCsv DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatType = "CSV"
+ DestinationAzureBlobStorageFormatTypeCsv DestinationAzureBlobStorageFormatType = "CSV"
)
-func (e DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatType) ToPointer() *DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatType {
+func (e DestinationAzureBlobStorageFormatType) ToPointer() *DestinationAzureBlobStorageFormatType {
return &e
}
-func (e *DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationAzureBlobStorageFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CSV":
- *e = DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatType(v)
+ *e = DestinationAzureBlobStorageFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationAzureBlobStorageFormatType: %v", v)
}
}
-// DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues - Output data format
-type DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues struct {
+// DestinationAzureBlobStorageCSVCommaSeparatedValues - Output data format
+type DestinationAzureBlobStorageCSVCommaSeparatedValues struct {
// Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
- Flattening DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesNormalizationFlattening `json:"flattening"`
- FormatType DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValuesFormatType `json:"format_type"`
+ Flattening *DestinationAzureBlobStorageNormalizationFlattening `default:"No flattening" json:"flattening"`
+ formatType DestinationAzureBlobStorageFormatType `const:"CSV" json:"format_type"`
+}
+
+func (d DestinationAzureBlobStorageCSVCommaSeparatedValues) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAzureBlobStorageCSVCommaSeparatedValues) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAzureBlobStorageCSVCommaSeparatedValues) GetFlattening() *DestinationAzureBlobStorageNormalizationFlattening {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
+}
+
+func (o *DestinationAzureBlobStorageCSVCommaSeparatedValues) GetFormatType() DestinationAzureBlobStorageFormatType {
+ return DestinationAzureBlobStorageFormatTypeCsv
}
type DestinationAzureBlobStorageOutputFormatType string
const (
- DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues DestinationAzureBlobStorageOutputFormatType = "destination-azure-blob-storage_Output Format_CSV: Comma-Separated Values"
- DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON DestinationAzureBlobStorageOutputFormatType = "destination-azure-blob-storage_Output Format_JSON Lines: newline-delimited JSON"
+ DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageCSVCommaSeparatedValues DestinationAzureBlobStorageOutputFormatType = "destination-azure-blob-storage_CSV: Comma-Separated Values"
+ DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON DestinationAzureBlobStorageOutputFormatType = "destination-azure-blob-storage_JSON Lines: newline-delimited JSON"
)
type DestinationAzureBlobStorageOutputFormat struct {
- DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues *DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues
- DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON *DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON
+ DestinationAzureBlobStorageCSVCommaSeparatedValues *DestinationAzureBlobStorageCSVCommaSeparatedValues
+ DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON *DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON
Type DestinationAzureBlobStorageOutputFormatType
}
-func CreateDestinationAzureBlobStorageOutputFormatDestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues(destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues) DestinationAzureBlobStorageOutputFormat {
- typ := DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues
+func CreateDestinationAzureBlobStorageOutputFormatDestinationAzureBlobStorageCSVCommaSeparatedValues(destinationAzureBlobStorageCSVCommaSeparatedValues DestinationAzureBlobStorageCSVCommaSeparatedValues) DestinationAzureBlobStorageOutputFormat {
+ typ := DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageCSVCommaSeparatedValues
return DestinationAzureBlobStorageOutputFormat{
- DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues: &destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues,
+ DestinationAzureBlobStorageCSVCommaSeparatedValues: &destinationAzureBlobStorageCSVCommaSeparatedValues,
Type: typ,
}
}
-func CreateDestinationAzureBlobStorageOutputFormatDestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON(destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON) DestinationAzureBlobStorageOutputFormat {
- typ := DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON
+func CreateDestinationAzureBlobStorageOutputFormatDestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON(destinationAzureBlobStorageJSONLinesNewlineDelimitedJSON DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON) DestinationAzureBlobStorageOutputFormat {
+ typ := DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON
return DestinationAzureBlobStorageOutputFormat{
- DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON: &destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON: &destinationAzureBlobStorageJSONLinesNewlineDelimitedJSON,
Type: typ,
}
}
func (u *DestinationAzureBlobStorageOutputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON = destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON
- u.Type = DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON
+
+ destinationAzureBlobStorageJSONLinesNewlineDelimitedJSON := new(DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &destinationAzureBlobStorageJSONLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON = destinationAzureBlobStorageJSONLinesNewlineDelimitedJSON
+ u.Type = DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON
return nil
}
- destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues := new(DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues); err == nil {
- u.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues = destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues
- u.Type = DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues
+ destinationAzureBlobStorageCSVCommaSeparatedValues := new(DestinationAzureBlobStorageCSVCommaSeparatedValues)
+ if err := utils.UnmarshalJSON(data, &destinationAzureBlobStorageCSVCommaSeparatedValues, "", true, true); err == nil {
+ u.DestinationAzureBlobStorageCSVCommaSeparatedValues = destinationAzureBlobStorageCSVCommaSeparatedValues
+ u.Type = DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageCSVCommaSeparatedValues
return nil
}
@@ -178,15 +210,15 @@ func (u *DestinationAzureBlobStorageOutputFormat) UnmarshalJSON(data []byte) err
}
func (u DestinationAzureBlobStorageOutputFormat) MarshalJSON() ([]byte, error) {
- if u.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON)
+ if u.DestinationAzureBlobStorageCSVCommaSeparatedValues != nil {
+ return utils.MarshalJSON(u.DestinationAzureBlobStorageCSVCommaSeparatedValues, "", true)
}
- if u.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues != nil {
- return json.Marshal(u.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues)
+ if u.DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationAzureBlobStorage struct {
@@ -197,12 +229,76 @@ type DestinationAzureBlobStorage struct {
// The name of the Azure blob storage container. If not exists - will be created automatically. May be empty, then will be created automatically airbytecontainer+timestamp
AzureBlobStorageContainerName *string `json:"azure_blob_storage_container_name,omitempty"`
// This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
- AzureBlobStorageEndpointDomainName *string `json:"azure_blob_storage_endpoint_domain_name,omitempty"`
+ AzureBlobStorageEndpointDomainName *string `default:"blob.core.windows.net" json:"azure_blob_storage_endpoint_domain_name"`
// The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure.
- AzureBlobStorageOutputBufferSize *int64 `json:"azure_blob_storage_output_buffer_size,omitempty"`
+ AzureBlobStorageOutputBufferSize *int64 `default:"5" json:"azure_blob_storage_output_buffer_size"`
// The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable
- AzureBlobStorageSpillSize *int64 `json:"azure_blob_storage_spill_size,omitempty"`
- DestinationType DestinationAzureBlobStorageAzureBlobStorage `json:"destinationType"`
+ AzureBlobStorageSpillSize *int64 `default:"500" json:"azure_blob_storage_spill_size"`
+ destinationType AzureBlobStorage `const:"azure-blob-storage" json:"destinationType"`
// Output data format
Format DestinationAzureBlobStorageOutputFormat `json:"format"`
}
+
+func (d DestinationAzureBlobStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAzureBlobStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAzureBlobStorage) GetAzureBlobStorageAccountKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountKey
+}
+
+func (o *DestinationAzureBlobStorage) GetAzureBlobStorageAccountName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountName
+}
+
+func (o *DestinationAzureBlobStorage) GetAzureBlobStorageContainerName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageContainerName
+}
+
+func (o *DestinationAzureBlobStorage) GetAzureBlobStorageEndpointDomainName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageEndpointDomainName
+}
+
+func (o *DestinationAzureBlobStorage) GetAzureBlobStorageOutputBufferSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageOutputBufferSize
+}
+
+func (o *DestinationAzureBlobStorage) GetAzureBlobStorageSpillSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageSpillSize
+}
+
+func (o *DestinationAzureBlobStorage) GetDestinationType() AzureBlobStorage {
+ return AzureBlobStorageAzureBlobStorage
+}
+
+func (o *DestinationAzureBlobStorage) GetFormat() DestinationAzureBlobStorageOutputFormat {
+ if o == nil {
+ return DestinationAzureBlobStorageOutputFormat{}
+ }
+ return o.Format
+}
diff --git a/internal/sdk/pkg/models/shared/destinationazureblobstoragecreaterequest.go b/internal/sdk/pkg/models/shared/destinationazureblobstoragecreaterequest.go
old mode 100755
new mode 100644
index bd39e8ff3..9524fe55e
--- a/internal/sdk/pkg/models/shared/destinationazureblobstoragecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationazureblobstoragecreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationAzureBlobStorageCreateRequest struct {
Configuration DestinationAzureBlobStorage `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationAzureBlobStorageCreateRequest) GetConfiguration() DestinationAzureBlobStorage {
+ if o == nil {
+ return DestinationAzureBlobStorage{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationAzureBlobStorageCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationAzureBlobStorageCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationAzureBlobStorageCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationazureblobstorageputrequest.go b/internal/sdk/pkg/models/shared/destinationazureblobstorageputrequest.go
old mode 100755
new mode 100644
index 2e90a963e..92bbd2ee4
--- a/internal/sdk/pkg/models/shared/destinationazureblobstorageputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationazureblobstorageputrequest.go
@@ -7,3 +7,24 @@ type DestinationAzureBlobStoragePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationAzureBlobStoragePutRequest) GetConfiguration() DestinationAzureBlobStorageUpdate {
+ if o == nil {
+ return DestinationAzureBlobStorageUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationAzureBlobStoragePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationAzureBlobStoragePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go b/internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go
old mode 100755
new mode 100644
index 8a55073c0..44b55ac54
--- a/internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go
@@ -3,54 +3,69 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type DestinationAzureBlobStorageUpdateFormatType string
const (
- DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ DestinationAzureBlobStorageUpdateFormatTypeJsonl DestinationAzureBlobStorageUpdateFormatType = "JSONL"
)
-func (e DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e DestinationAzureBlobStorageUpdateFormatType) ToPointer() *DestinationAzureBlobStorageUpdateFormatType {
return &e
}
-func (e *DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationAzureBlobStorageUpdateFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ *e = DestinationAzureBlobStorageUpdateFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationAzureBlobStorageUpdateFormatType: %v", v)
}
}
-// DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON - Output data format
-type DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON struct {
- FormatType DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+// DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON - Output data format
+type DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON struct {
+ formatType DestinationAzureBlobStorageUpdateFormatType `const:"JSONL" json:"format_type"`
}
-// DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening - Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-type DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening string
+func (d DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON) GetFormatType() DestinationAzureBlobStorageUpdateFormatType {
+ return DestinationAzureBlobStorageUpdateFormatTypeJsonl
+}
+
+// NormalizationFlattening - Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
+type NormalizationFlattening string
const (
- DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlatteningNoFlattening DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening = "No flattening"
- DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlatteningRootLevelFlattening DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening = "Root level flattening"
+ NormalizationFlatteningNoFlattening NormalizationFlattening = "No flattening"
+ NormalizationFlatteningRootLevelFlattening NormalizationFlattening = "Root level flattening"
)
-func (e DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening) ToPointer() *DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening {
+func (e NormalizationFlattening) ToPointer() *NormalizationFlattening {
return &e
}
-func (e *DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening) UnmarshalJSON(data []byte) error {
+func (e *NormalizationFlattening) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -59,110 +74,127 @@ func (e *DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNor
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening(v)
+ *e = NormalizationFlattening(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening: %v", v)
+ return fmt.Errorf("invalid value for NormalizationFlattening: %v", v)
}
}
-type DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatType string
+type FormatType string
const (
- DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatTypeCsv DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatType = "CSV"
+ FormatTypeCsv FormatType = "CSV"
)
-func (e DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatType) ToPointer() *DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatType {
+func (e FormatType) ToPointer() *FormatType {
return &e
}
-func (e *DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatType) UnmarshalJSON(data []byte) error {
+func (e *FormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CSV":
- *e = DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatType(v)
+ *e = FormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatType: %v", v)
+ return fmt.Errorf("invalid value for FormatType: %v", v)
}
}
-// DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues - Output data format
-type DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues struct {
+// CSVCommaSeparatedValues - Output data format
+type CSVCommaSeparatedValues struct {
// Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
- Flattening DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesNormalizationFlattening `json:"flattening"`
- FormatType DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValuesFormatType `json:"format_type"`
+ Flattening *NormalizationFlattening `default:"No flattening" json:"flattening"`
+ formatType FormatType `const:"CSV" json:"format_type"`
+}
+
+func (c CSVCommaSeparatedValues) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *CSVCommaSeparatedValues) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *CSVCommaSeparatedValues) GetFlattening() *NormalizationFlattening {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
+}
+
+func (o *CSVCommaSeparatedValues) GetFormatType() FormatType {
+ return FormatTypeCsv
}
-type DestinationAzureBlobStorageUpdateOutputFormatType string
+type OutputFormatType string
const (
- DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues DestinationAzureBlobStorageUpdateOutputFormatType = "destination-azure-blob-storage-update_Output Format_CSV: Comma-Separated Values"
- DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON DestinationAzureBlobStorageUpdateOutputFormatType = "destination-azure-blob-storage-update_Output Format_JSON Lines: newline-delimited JSON"
+ OutputFormatTypeCSVCommaSeparatedValues OutputFormatType = "CSV: Comma-Separated Values"
+ OutputFormatTypeDestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON OutputFormatType = "destination-azure-blob-storage-update_JSON Lines: newline-delimited JSON"
)
-type DestinationAzureBlobStorageUpdateOutputFormat struct {
- DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues *DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues
- DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON *DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON
+type OutputFormat struct {
+ CSVCommaSeparatedValues *CSVCommaSeparatedValues
+ DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON *DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON
- Type DestinationAzureBlobStorageUpdateOutputFormatType
+ Type OutputFormatType
}
-func CreateDestinationAzureBlobStorageUpdateOutputFormatDestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues(destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues) DestinationAzureBlobStorageUpdateOutputFormat {
- typ := DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues
+func CreateOutputFormatCSVCommaSeparatedValues(csvCommaSeparatedValues CSVCommaSeparatedValues) OutputFormat {
+ typ := OutputFormatTypeCSVCommaSeparatedValues
- return DestinationAzureBlobStorageUpdateOutputFormat{
- DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues: &destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues,
- Type: typ,
+ return OutputFormat{
+ CSVCommaSeparatedValues: &csvCommaSeparatedValues,
+ Type: typ,
}
}
-func CreateDestinationAzureBlobStorageUpdateOutputFormatDestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON(destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON) DestinationAzureBlobStorageUpdateOutputFormat {
- typ := DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON
+func CreateOutputFormatDestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON(destinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON) OutputFormat {
+ typ := OutputFormatTypeDestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON
- return DestinationAzureBlobStorageUpdateOutputFormat{
- DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON: &destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON,
+ return OutputFormat{
+ DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON: &destinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON,
Type: typ,
}
}
-func (u *DestinationAzureBlobStorageUpdateOutputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *OutputFormat) UnmarshalJSON(data []byte) error {
- destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON = destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON
- u.Type = DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON
+ destinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON := new(DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &destinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON = destinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON
+ u.Type = OutputFormatTypeDestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON
return nil
}
- destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues := new(DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues); err == nil {
- u.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues = destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues
- u.Type = DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues
+ csvCommaSeparatedValues := new(CSVCommaSeparatedValues)
+ if err := utils.UnmarshalJSON(data, &csvCommaSeparatedValues, "", true, true); err == nil {
+ u.CSVCommaSeparatedValues = csvCommaSeparatedValues
+ u.Type = OutputFormatTypeCSVCommaSeparatedValues
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationAzureBlobStorageUpdateOutputFormat) MarshalJSON() ([]byte, error) {
- if u.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON)
+func (u OutputFormat) MarshalJSON() ([]byte, error) {
+ if u.CSVCommaSeparatedValues != nil {
+ return utils.MarshalJSON(u.CSVCommaSeparatedValues, "", true)
}
- if u.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues != nil {
- return json.Marshal(u.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues)
+ if u.DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.DestinationAzureBlobStorageUpdateJSONLinesNewlineDelimitedJSON, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationAzureBlobStorageUpdate struct {
@@ -173,11 +205,71 @@ type DestinationAzureBlobStorageUpdate struct {
// The name of the Azure blob storage container. If not exists - will be created automatically. May be empty, then will be created automatically airbytecontainer+timestamp
AzureBlobStorageContainerName *string `json:"azure_blob_storage_container_name,omitempty"`
// This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
- AzureBlobStorageEndpointDomainName *string `json:"azure_blob_storage_endpoint_domain_name,omitempty"`
+ AzureBlobStorageEndpointDomainName *string `default:"blob.core.windows.net" json:"azure_blob_storage_endpoint_domain_name"`
// The amount of megabytes to buffer for the output stream to Azure. This will impact memory footprint on workers, but may need adjustment for performance and appropriate block size in Azure.
- AzureBlobStorageOutputBufferSize *int64 `json:"azure_blob_storage_output_buffer_size,omitempty"`
+ AzureBlobStorageOutputBufferSize *int64 `default:"5" json:"azure_blob_storage_output_buffer_size"`
// The amount of megabytes after which the connector should spill the records in a new blob object. Make sure to configure size greater than individual records. Enter 0 if not applicable
- AzureBlobStorageSpillSize *int64 `json:"azure_blob_storage_spill_size,omitempty"`
+ AzureBlobStorageSpillSize *int64 `default:"500" json:"azure_blob_storage_spill_size"`
// Output data format
- Format DestinationAzureBlobStorageUpdateOutputFormat `json:"format"`
+ Format OutputFormat `json:"format"`
+}
+
+func (d DestinationAzureBlobStorageUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationAzureBlobStorageUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationAzureBlobStorageUpdate) GetAzureBlobStorageAccountKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountKey
+}
+
+func (o *DestinationAzureBlobStorageUpdate) GetAzureBlobStorageAccountName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountName
+}
+
+func (o *DestinationAzureBlobStorageUpdate) GetAzureBlobStorageContainerName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageContainerName
+}
+
+func (o *DestinationAzureBlobStorageUpdate) GetAzureBlobStorageEndpointDomainName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageEndpointDomainName
+}
+
+func (o *DestinationAzureBlobStorageUpdate) GetAzureBlobStorageOutputBufferSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageOutputBufferSize
+}
+
+func (o *DestinationAzureBlobStorageUpdate) GetAzureBlobStorageSpillSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageSpillSize
+}
+
+func (o *DestinationAzureBlobStorageUpdate) GetFormat() OutputFormat {
+ if o == nil {
+ return OutputFormat{}
+ }
+ return o.Format
}
diff --git a/internal/sdk/pkg/models/shared/destinationbigquery.go b/internal/sdk/pkg/models/shared/destinationbigquery.go
old mode 100755
new mode 100644
index 06e2bf662..2e5bddc6b
--- a/internal/sdk/pkg/models/shared/destinationbigquery.go
+++ b/internal/sdk/pkg/models/shared/destinationbigquery.go
@@ -3,10 +3,10 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// DestinationBigqueryDatasetLocation - The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.
@@ -38,6 +38,9 @@ const (
DestinationBigqueryDatasetLocationEuropeWest7 DestinationBigqueryDatasetLocation = "europe-west7"
DestinationBigqueryDatasetLocationEuropeWest8 DestinationBigqueryDatasetLocation = "europe-west8"
DestinationBigqueryDatasetLocationEuropeWest9 DestinationBigqueryDatasetLocation = "europe-west9"
+ DestinationBigqueryDatasetLocationEuropeWest12 DestinationBigqueryDatasetLocation = "europe-west12"
+ DestinationBigqueryDatasetLocationMeCentral1 DestinationBigqueryDatasetLocation = "me-central1"
+ DestinationBigqueryDatasetLocationMeCentral2 DestinationBigqueryDatasetLocation = "me-central2"
DestinationBigqueryDatasetLocationMeWest1 DestinationBigqueryDatasetLocation = "me-west1"
DestinationBigqueryDatasetLocationNorthamericaNortheast1 DestinationBigqueryDatasetLocation = "northamerica-northeast1"
DestinationBigqueryDatasetLocationNorthamericaNortheast2 DestinationBigqueryDatasetLocation = "northamerica-northeast2"
@@ -49,6 +52,7 @@ const (
DestinationBigqueryDatasetLocationUsEast3 DestinationBigqueryDatasetLocation = "us-east3"
DestinationBigqueryDatasetLocationUsEast4 DestinationBigqueryDatasetLocation = "us-east4"
DestinationBigqueryDatasetLocationUsEast5 DestinationBigqueryDatasetLocation = "us-east5"
+ DestinationBigqueryDatasetLocationUsSouth1 DestinationBigqueryDatasetLocation = "us-south1"
DestinationBigqueryDatasetLocationUsWest1 DestinationBigqueryDatasetLocation = "us-west1"
DestinationBigqueryDatasetLocationUsWest2 DestinationBigqueryDatasetLocation = "us-west2"
DestinationBigqueryDatasetLocationUsWest3 DestinationBigqueryDatasetLocation = "us-west3"
@@ -115,6 +119,12 @@ func (e *DestinationBigqueryDatasetLocation) UnmarshalJSON(data []byte) error {
fallthrough
case "europe-west9":
fallthrough
+ case "europe-west12":
+ fallthrough
+ case "me-central1":
+ fallthrough
+ case "me-central2":
+ fallthrough
case "me-west1":
fallthrough
case "northamerica-northeast1":
@@ -137,6 +147,8 @@ func (e *DestinationBigqueryDatasetLocation) UnmarshalJSON(data []byte) error {
fallthrough
case "us-east5":
fallthrough
+ case "us-south1":
+ fallthrough
case "us-west1":
fallthrough
case "us-west2":
@@ -151,120 +163,190 @@ func (e *DestinationBigqueryDatasetLocation) UnmarshalJSON(data []byte) error {
}
}
-type DestinationBigqueryBigquery string
+type Bigquery string
const (
- DestinationBigqueryBigqueryBigquery DestinationBigqueryBigquery = "bigquery"
+ BigqueryBigquery Bigquery = "bigquery"
)
-func (e DestinationBigqueryBigquery) ToPointer() *DestinationBigqueryBigquery {
+func (e Bigquery) ToPointer() *Bigquery {
return &e
}
-func (e *DestinationBigqueryBigquery) UnmarshalJSON(data []byte) error {
+func (e *Bigquery) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "bigquery":
- *e = DestinationBigqueryBigquery(v)
+ *e = Bigquery(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationBigqueryBigquery: %v", v)
+ return fmt.Errorf("invalid value for Bigquery: %v", v)
+ }
+}
+
+type DestinationBigquerySchemasMethod string
+
+const (
+ DestinationBigquerySchemasMethodStandard DestinationBigquerySchemasMethod = "Standard"
+)
+
+func (e DestinationBigquerySchemasMethod) ToPointer() *DestinationBigquerySchemasMethod {
+ return &e
+}
+
+func (e *DestinationBigquerySchemasMethod) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Standard":
+ *e = DestinationBigquerySchemasMethod(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationBigquerySchemasMethod: %v", v)
+ }
+}
+
+// DestinationBigqueryStandardInserts - (not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use GCS staging.
+type DestinationBigqueryStandardInserts struct {
+ method DestinationBigquerySchemasMethod `const:"Standard" json:"method"`
+}
+
+func (d DestinationBigqueryStandardInserts) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationBigqueryStandardInserts) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-type DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType string
+func (o *DestinationBigqueryStandardInserts) GetMethod() DestinationBigquerySchemasMethod {
+ return DestinationBigquerySchemasMethodStandard
+}
+
+type DestinationBigqueryCredentialType string
const (
- DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeHmacKey DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType = "HMAC_KEY"
+ DestinationBigqueryCredentialTypeHmacKey DestinationBigqueryCredentialType = "HMAC_KEY"
)
-func (e DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType) ToPointer() *DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType {
+func (e DestinationBigqueryCredentialType) ToPointer() *DestinationBigqueryCredentialType {
return &e
}
-func (e *DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType) UnmarshalJSON(data []byte) error {
+func (e *DestinationBigqueryCredentialType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "HMAC_KEY":
- *e = DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType(v)
+ *e = DestinationBigqueryCredentialType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType: %v", v)
+ return fmt.Errorf("invalid value for DestinationBigqueryCredentialType: %v", v)
}
}
-// DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
-type DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey struct {
- CredentialType DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKeyCredentialType `json:"credential_type"`
+// DestinationBigqueryHMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
+type DestinationBigqueryHMACKey struct {
+ credentialType DestinationBigqueryCredentialType `const:"HMAC_KEY" json:"credential_type"`
// HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
HmacKeyAccessID string `json:"hmac_key_access_id"`
// The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
HmacKeySecret string `json:"hmac_key_secret"`
}
-type DestinationBigqueryLoadingMethodGCSStagingCredentialType string
+func (d DestinationBigqueryHMACKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationBigqueryHMACKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationBigqueryHMACKey) GetCredentialType() DestinationBigqueryCredentialType {
+ return DestinationBigqueryCredentialTypeHmacKey
+}
+
+func (o *DestinationBigqueryHMACKey) GetHmacKeyAccessID() string {
+ if o == nil {
+ return ""
+ }
+ return o.HmacKeyAccessID
+}
+
+func (o *DestinationBigqueryHMACKey) GetHmacKeySecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.HmacKeySecret
+}
+
+type DestinationBigqueryCredentialUnionType string
const (
- DestinationBigqueryLoadingMethodGCSStagingCredentialTypeDestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey DestinationBigqueryLoadingMethodGCSStagingCredentialType = "destination-bigquery_Loading Method_GCS Staging_Credential_HMAC key"
+ DestinationBigqueryCredentialUnionTypeDestinationBigqueryHMACKey DestinationBigqueryCredentialUnionType = "destination-bigquery_HMAC key"
)
-type DestinationBigqueryLoadingMethodGCSStagingCredential struct {
- DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey *DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey
+type DestinationBigqueryCredential struct {
+ DestinationBigqueryHMACKey *DestinationBigqueryHMACKey
- Type DestinationBigqueryLoadingMethodGCSStagingCredentialType
+ Type DestinationBigqueryCredentialUnionType
}
-func CreateDestinationBigqueryLoadingMethodGCSStagingCredentialDestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey(destinationBigqueryLoadingMethodGCSStagingCredentialHMACKey DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey) DestinationBigqueryLoadingMethodGCSStagingCredential {
- typ := DestinationBigqueryLoadingMethodGCSStagingCredentialTypeDestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey
+func CreateDestinationBigqueryCredentialDestinationBigqueryHMACKey(destinationBigqueryHMACKey DestinationBigqueryHMACKey) DestinationBigqueryCredential {
+ typ := DestinationBigqueryCredentialUnionTypeDestinationBigqueryHMACKey
- return DestinationBigqueryLoadingMethodGCSStagingCredential{
- DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey: &destinationBigqueryLoadingMethodGCSStagingCredentialHMACKey,
- Type: typ,
+ return DestinationBigqueryCredential{
+ DestinationBigqueryHMACKey: &destinationBigqueryHMACKey,
+ Type: typ,
}
}
-func (u *DestinationBigqueryLoadingMethodGCSStagingCredential) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationBigqueryCredential) UnmarshalJSON(data []byte) error {
- destinationBigqueryLoadingMethodGCSStagingCredentialHMACKey := new(DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryLoadingMethodGCSStagingCredentialHMACKey); err == nil {
- u.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey = destinationBigqueryLoadingMethodGCSStagingCredentialHMACKey
- u.Type = DestinationBigqueryLoadingMethodGCSStagingCredentialTypeDestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey
+ destinationBigqueryHMACKey := new(DestinationBigqueryHMACKey)
+ if err := utils.UnmarshalJSON(data, &destinationBigqueryHMACKey, "", true, true); err == nil {
+ u.DestinationBigqueryHMACKey = destinationBigqueryHMACKey
+ u.Type = DestinationBigqueryCredentialUnionTypeDestinationBigqueryHMACKey
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationBigqueryLoadingMethodGCSStagingCredential) MarshalJSON() ([]byte, error) {
- if u.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey != nil {
- return json.Marshal(u.DestinationBigqueryLoadingMethodGCSStagingCredentialHMACKey)
+func (u DestinationBigqueryCredential) MarshalJSON() ([]byte, error) {
+ if u.DestinationBigqueryHMACKey != nil {
+ return utils.MarshalJSON(u.DestinationBigqueryHMACKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-type DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing string
+// DestinationBigqueryGCSTmpFilesAfterwardProcessing - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
+type DestinationBigqueryGCSTmpFilesAfterwardProcessing string
const (
- DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingDeleteAllTmpFilesFromGcs DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing = "Delete all tmp files from GCS"
- DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingKeepAllTmpFilesInGcs DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing = "Keep all tmp files in GCS"
+ DestinationBigqueryGCSTmpFilesAfterwardProcessingDeleteAllTmpFilesFromGcs DestinationBigqueryGCSTmpFilesAfterwardProcessing = "Delete all tmp files from GCS"
+ DestinationBigqueryGCSTmpFilesAfterwardProcessingKeepAllTmpFilesInGcs DestinationBigqueryGCSTmpFilesAfterwardProcessing = "Keep all tmp files in GCS"
)
-func (e DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing) ToPointer() *DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing {
+func (e DestinationBigqueryGCSTmpFilesAfterwardProcessing) ToPointer() *DestinationBigqueryGCSTmpFilesAfterwardProcessing {
return &e
}
-func (e *DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing) UnmarshalJSON(data []byte) error {
+func (e *DestinationBigqueryGCSTmpFilesAfterwardProcessing) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -273,131 +355,138 @@ func (e *DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessin
case "Delete all tmp files from GCS":
fallthrough
case "Keep all tmp files in GCS":
- *e = DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(v)
+ *e = DestinationBigqueryGCSTmpFilesAfterwardProcessing(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing: %v", v)
+ return fmt.Errorf("invalid value for DestinationBigqueryGCSTmpFilesAfterwardProcessing: %v", v)
}
}
-type DestinationBigqueryLoadingMethodGCSStagingMethod string
+type DestinationBigqueryMethod string
const (
- DestinationBigqueryLoadingMethodGCSStagingMethodGcsStaging DestinationBigqueryLoadingMethodGCSStagingMethod = "GCS Staging"
+ DestinationBigqueryMethodGcsStaging DestinationBigqueryMethod = "GCS Staging"
)
-func (e DestinationBigqueryLoadingMethodGCSStagingMethod) ToPointer() *DestinationBigqueryLoadingMethodGCSStagingMethod {
+func (e DestinationBigqueryMethod) ToPointer() *DestinationBigqueryMethod {
return &e
}
-func (e *DestinationBigqueryLoadingMethodGCSStagingMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationBigqueryMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GCS Staging":
- *e = DestinationBigqueryLoadingMethodGCSStagingMethod(v)
+ *e = DestinationBigqueryMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationBigqueryLoadingMethodGCSStagingMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationBigqueryMethod: %v", v)
}
}
-// DestinationBigqueryLoadingMethodGCSStaging - Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
-type DestinationBigqueryLoadingMethodGCSStaging struct {
+// DestinationBigqueryGCSStaging - (recommended) Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery. Provides best-in-class speed, reliability and scalability. Read more about GCS Staging here.
+type DestinationBigqueryGCSStaging struct {
// An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
- Credential DestinationBigqueryLoadingMethodGCSStagingCredential `json:"credential"`
- // Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
- FileBufferCount *int64 `json:"file_buffer_count,omitempty"`
+ Credential DestinationBigqueryCredential `json:"credential"`
// The name of the GCS bucket. Read more here.
GcsBucketName string `json:"gcs_bucket_name"`
// Directory under the GCS bucket where data will be written.
GcsBucketPath string `json:"gcs_bucket_path"`
// This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
- KeepFilesInGcsBucket *DestinationBigqueryLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing `json:"keep_files_in_gcs-bucket,omitempty"`
- Method DestinationBigqueryLoadingMethodGCSStagingMethod `json:"method"`
+ KeepFilesInGcsBucket *DestinationBigqueryGCSTmpFilesAfterwardProcessing `default:"Delete all tmp files from GCS" json:"keep_files_in_gcs-bucket"`
+ method DestinationBigqueryMethod `const:"GCS Staging" json:"method"`
}
-type DestinationBigqueryLoadingMethodStandardInsertsMethod string
+func (d DestinationBigqueryGCSStaging) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
-const (
- DestinationBigqueryLoadingMethodStandardInsertsMethodStandard DestinationBigqueryLoadingMethodStandardInsertsMethod = "Standard"
-)
+func (d *DestinationBigqueryGCSStaging) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
-func (e DestinationBigqueryLoadingMethodStandardInsertsMethod) ToPointer() *DestinationBigqueryLoadingMethodStandardInsertsMethod {
- return &e
+func (o *DestinationBigqueryGCSStaging) GetCredential() DestinationBigqueryCredential {
+ if o == nil {
+ return DestinationBigqueryCredential{}
+ }
+ return o.Credential
}
-func (e *DestinationBigqueryLoadingMethodStandardInsertsMethod) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
+func (o *DestinationBigqueryGCSStaging) GetGcsBucketName() string {
+ if o == nil {
+ return ""
}
- switch v {
- case "Standard":
- *e = DestinationBigqueryLoadingMethodStandardInsertsMethod(v)
+ return o.GcsBucketName
+}
+
+func (o *DestinationBigqueryGCSStaging) GetGcsBucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.GcsBucketPath
+}
+
+func (o *DestinationBigqueryGCSStaging) GetKeepFilesInGcsBucket() *DestinationBigqueryGCSTmpFilesAfterwardProcessing {
+ if o == nil {
return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryLoadingMethodStandardInsertsMethod: %v", v)
}
+ return o.KeepFilesInGcsBucket
}
-// DestinationBigqueryLoadingMethodStandardInserts - Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
-type DestinationBigqueryLoadingMethodStandardInserts struct {
- Method DestinationBigqueryLoadingMethodStandardInsertsMethod `json:"method"`
+func (o *DestinationBigqueryGCSStaging) GetMethod() DestinationBigqueryMethod {
+ return DestinationBigqueryMethodGcsStaging
}
type DestinationBigqueryLoadingMethodType string
const (
- DestinationBigqueryLoadingMethodTypeDestinationBigqueryLoadingMethodStandardInserts DestinationBigqueryLoadingMethodType = "destination-bigquery_Loading Method_Standard Inserts"
- DestinationBigqueryLoadingMethodTypeDestinationBigqueryLoadingMethodGCSStaging DestinationBigqueryLoadingMethodType = "destination-bigquery_Loading Method_GCS Staging"
+ DestinationBigqueryLoadingMethodTypeDestinationBigqueryGCSStaging DestinationBigqueryLoadingMethodType = "destination-bigquery_GCS Staging"
+ DestinationBigqueryLoadingMethodTypeDestinationBigqueryStandardInserts DestinationBigqueryLoadingMethodType = "destination-bigquery_Standard Inserts"
)
type DestinationBigqueryLoadingMethod struct {
- DestinationBigqueryLoadingMethodStandardInserts *DestinationBigqueryLoadingMethodStandardInserts
- DestinationBigqueryLoadingMethodGCSStaging *DestinationBigqueryLoadingMethodGCSStaging
+ DestinationBigqueryGCSStaging *DestinationBigqueryGCSStaging
+ DestinationBigqueryStandardInserts *DestinationBigqueryStandardInserts
Type DestinationBigqueryLoadingMethodType
}
-func CreateDestinationBigqueryLoadingMethodDestinationBigqueryLoadingMethodStandardInserts(destinationBigqueryLoadingMethodStandardInserts DestinationBigqueryLoadingMethodStandardInserts) DestinationBigqueryLoadingMethod {
- typ := DestinationBigqueryLoadingMethodTypeDestinationBigqueryLoadingMethodStandardInserts
+func CreateDestinationBigqueryLoadingMethodDestinationBigqueryGCSStaging(destinationBigqueryGCSStaging DestinationBigqueryGCSStaging) DestinationBigqueryLoadingMethod {
+ typ := DestinationBigqueryLoadingMethodTypeDestinationBigqueryGCSStaging
return DestinationBigqueryLoadingMethod{
- DestinationBigqueryLoadingMethodStandardInserts: &destinationBigqueryLoadingMethodStandardInserts,
- Type: typ,
+ DestinationBigqueryGCSStaging: &destinationBigqueryGCSStaging,
+ Type: typ,
}
}
-func CreateDestinationBigqueryLoadingMethodDestinationBigqueryLoadingMethodGCSStaging(destinationBigqueryLoadingMethodGCSStaging DestinationBigqueryLoadingMethodGCSStaging) DestinationBigqueryLoadingMethod {
- typ := DestinationBigqueryLoadingMethodTypeDestinationBigqueryLoadingMethodGCSStaging
+func CreateDestinationBigqueryLoadingMethodDestinationBigqueryStandardInserts(destinationBigqueryStandardInserts DestinationBigqueryStandardInserts) DestinationBigqueryLoadingMethod {
+ typ := DestinationBigqueryLoadingMethodTypeDestinationBigqueryStandardInserts
return DestinationBigqueryLoadingMethod{
- DestinationBigqueryLoadingMethodGCSStaging: &destinationBigqueryLoadingMethodGCSStaging,
- Type: typ,
+ DestinationBigqueryStandardInserts: &destinationBigqueryStandardInserts,
+ Type: typ,
}
}
func (u *DestinationBigqueryLoadingMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationBigqueryLoadingMethodStandardInserts := new(DestinationBigqueryLoadingMethodStandardInserts)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryLoadingMethodStandardInserts); err == nil {
- u.DestinationBigqueryLoadingMethodStandardInserts = destinationBigqueryLoadingMethodStandardInserts
- u.Type = DestinationBigqueryLoadingMethodTypeDestinationBigqueryLoadingMethodStandardInserts
+
+ destinationBigqueryStandardInserts := new(DestinationBigqueryStandardInserts)
+ if err := utils.UnmarshalJSON(data, &destinationBigqueryStandardInserts, "", true, true); err == nil {
+ u.DestinationBigqueryStandardInserts = destinationBigqueryStandardInserts
+ u.Type = DestinationBigqueryLoadingMethodTypeDestinationBigqueryStandardInserts
return nil
}
- destinationBigqueryLoadingMethodGCSStaging := new(DestinationBigqueryLoadingMethodGCSStaging)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryLoadingMethodGCSStaging); err == nil {
- u.DestinationBigqueryLoadingMethodGCSStaging = destinationBigqueryLoadingMethodGCSStaging
- u.Type = DestinationBigqueryLoadingMethodTypeDestinationBigqueryLoadingMethodGCSStaging
+ destinationBigqueryGCSStaging := new(DestinationBigqueryGCSStaging)
+ if err := utils.UnmarshalJSON(data, &destinationBigqueryGCSStaging, "", true, true); err == nil {
+ u.DestinationBigqueryGCSStaging = destinationBigqueryGCSStaging
+ u.Type = DestinationBigqueryLoadingMethodTypeDestinationBigqueryGCSStaging
return nil
}
@@ -405,15 +494,15 @@ func (u *DestinationBigqueryLoadingMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationBigqueryLoadingMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationBigqueryLoadingMethodStandardInserts != nil {
- return json.Marshal(u.DestinationBigqueryLoadingMethodStandardInserts)
+ if u.DestinationBigqueryGCSStaging != nil {
+ return utils.MarshalJSON(u.DestinationBigqueryGCSStaging, "", true)
}
- if u.DestinationBigqueryLoadingMethodGCSStaging != nil {
- return json.Marshal(u.DestinationBigqueryLoadingMethodGCSStaging)
+ if u.DestinationBigqueryStandardInserts != nil {
+ return utils.MarshalJSON(u.DestinationBigqueryStandardInserts, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationBigqueryTransformationQueryRunType - Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.
@@ -446,20 +535,100 @@ func (e *DestinationBigqueryTransformationQueryRunType) UnmarshalJSON(data []byt
type DestinationBigquery struct {
// Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.
- BigQueryClientBufferSizeMb *int64 `json:"big_query_client_buffer_size_mb,omitempty"`
+ BigQueryClientBufferSizeMb *int64 `default:"15" json:"big_query_client_buffer_size_mb"`
// The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
CredentialsJSON *string `json:"credentials_json,omitempty"`
// The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.
DatasetID string `json:"dataset_id"`
// The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.
DatasetLocation DestinationBigqueryDatasetLocation `json:"dataset_location"`
- DestinationType DestinationBigqueryBigquery `json:"destinationType"`
- // Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
+ destinationType Bigquery `const:"bigquery" json:"destinationType"`
+ // Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions
+ DisableTypeDedupe *bool `default:"false" json:"disable_type_dedupe"`
+ // The way data will be uploaded to BigQuery.
LoadingMethod *DestinationBigqueryLoadingMethod `json:"loading_method,omitempty"`
// The GCP project ID for the project containing the target BigQuery dataset. Read more here.
ProjectID string `json:"project_id"`
- // The dataset to write raw tables into
+ // The dataset to write raw tables into (default: airbyte_internal)
RawDataDataset *string `json:"raw_data_dataset,omitempty"`
// Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.
- TransformationPriority *DestinationBigqueryTransformationQueryRunType `json:"transformation_priority,omitempty"`
+ TransformationPriority *DestinationBigqueryTransformationQueryRunType `default:"interactive" json:"transformation_priority"`
+}
+
+func (d DestinationBigquery) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationBigquery) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationBigquery) GetBigQueryClientBufferSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BigQueryClientBufferSizeMb
+}
+
+func (o *DestinationBigquery) GetCredentialsJSON() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CredentialsJSON
+}
+
+func (o *DestinationBigquery) GetDatasetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatasetID
+}
+
+func (o *DestinationBigquery) GetDatasetLocation() DestinationBigqueryDatasetLocation {
+ if o == nil {
+ return DestinationBigqueryDatasetLocation("")
+ }
+ return o.DatasetLocation
+}
+
+func (o *DestinationBigquery) GetDestinationType() Bigquery {
+ return BigqueryBigquery
+}
+
+func (o *DestinationBigquery) GetDisableTypeDedupe() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DisableTypeDedupe
+}
+
+func (o *DestinationBigquery) GetLoadingMethod() *DestinationBigqueryLoadingMethod {
+ if o == nil {
+ return nil
+ }
+ return o.LoadingMethod
+}
+
+func (o *DestinationBigquery) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
+
+func (o *DestinationBigquery) GetRawDataDataset() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RawDataDataset
+}
+
+func (o *DestinationBigquery) GetTransformationPriority() *DestinationBigqueryTransformationQueryRunType {
+ if o == nil {
+ return nil
+ }
+ return o.TransformationPriority
}
diff --git a/internal/sdk/pkg/models/shared/destinationbigquerycreaterequest.go b/internal/sdk/pkg/models/shared/destinationbigquerycreaterequest.go
old mode 100755
new mode 100644
index 7f605018d..9b00baac5
--- a/internal/sdk/pkg/models/shared/destinationbigquerycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationbigquerycreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationBigqueryCreateRequest struct {
Configuration DestinationBigquery `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationBigqueryCreateRequest) GetConfiguration() DestinationBigquery {
+ if o == nil {
+ return DestinationBigquery{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationBigqueryCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationBigqueryCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationBigqueryCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationbigquerydenormalized.go b/internal/sdk/pkg/models/shared/destinationbigquerydenormalized.go
deleted file mode 100755
index 79a9af335..000000000
--- a/internal/sdk/pkg/models/shared/destinationbigquerydenormalized.go
+++ /dev/null
@@ -1,433 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-// DestinationBigqueryDenormalizedDatasetLocation - The location of the dataset. Warning: Changes made after creation will not be applied. The default "US" value is used if not set explicitly. Read more here.
-type DestinationBigqueryDenormalizedDatasetLocation string
-
-const (
- DestinationBigqueryDenormalizedDatasetLocationUs DestinationBigqueryDenormalizedDatasetLocation = "US"
- DestinationBigqueryDenormalizedDatasetLocationEu DestinationBigqueryDenormalizedDatasetLocation = "EU"
- DestinationBigqueryDenormalizedDatasetLocationAsiaEast1 DestinationBigqueryDenormalizedDatasetLocation = "asia-east1"
- DestinationBigqueryDenormalizedDatasetLocationAsiaEast2 DestinationBigqueryDenormalizedDatasetLocation = "asia-east2"
- DestinationBigqueryDenormalizedDatasetLocationAsiaNortheast1 DestinationBigqueryDenormalizedDatasetLocation = "asia-northeast1"
- DestinationBigqueryDenormalizedDatasetLocationAsiaNortheast2 DestinationBigqueryDenormalizedDatasetLocation = "asia-northeast2"
- DestinationBigqueryDenormalizedDatasetLocationAsiaNortheast3 DestinationBigqueryDenormalizedDatasetLocation = "asia-northeast3"
- DestinationBigqueryDenormalizedDatasetLocationAsiaSouth1 DestinationBigqueryDenormalizedDatasetLocation = "asia-south1"
- DestinationBigqueryDenormalizedDatasetLocationAsiaSouth2 DestinationBigqueryDenormalizedDatasetLocation = "asia-south2"
- DestinationBigqueryDenormalizedDatasetLocationAsiaSoutheast1 DestinationBigqueryDenormalizedDatasetLocation = "asia-southeast1"
- DestinationBigqueryDenormalizedDatasetLocationAsiaSoutheast2 DestinationBigqueryDenormalizedDatasetLocation = "asia-southeast2"
- DestinationBigqueryDenormalizedDatasetLocationAustraliaSoutheast1 DestinationBigqueryDenormalizedDatasetLocation = "australia-southeast1"
- DestinationBigqueryDenormalizedDatasetLocationAustraliaSoutheast2 DestinationBigqueryDenormalizedDatasetLocation = "australia-southeast2"
- DestinationBigqueryDenormalizedDatasetLocationEuropeCentral1 DestinationBigqueryDenormalizedDatasetLocation = "europe-central1"
- DestinationBigqueryDenormalizedDatasetLocationEuropeCentral2 DestinationBigqueryDenormalizedDatasetLocation = "europe-central2"
- DestinationBigqueryDenormalizedDatasetLocationEuropeNorth1 DestinationBigqueryDenormalizedDatasetLocation = "europe-north1"
- DestinationBigqueryDenormalizedDatasetLocationEuropeSouthwest1 DestinationBigqueryDenormalizedDatasetLocation = "europe-southwest1"
- DestinationBigqueryDenormalizedDatasetLocationEuropeWest1 DestinationBigqueryDenormalizedDatasetLocation = "europe-west1"
- DestinationBigqueryDenormalizedDatasetLocationEuropeWest2 DestinationBigqueryDenormalizedDatasetLocation = "europe-west2"
- DestinationBigqueryDenormalizedDatasetLocationEuropeWest3 DestinationBigqueryDenormalizedDatasetLocation = "europe-west3"
- DestinationBigqueryDenormalizedDatasetLocationEuropeWest4 DestinationBigqueryDenormalizedDatasetLocation = "europe-west4"
- DestinationBigqueryDenormalizedDatasetLocationEuropeWest6 DestinationBigqueryDenormalizedDatasetLocation = "europe-west6"
- DestinationBigqueryDenormalizedDatasetLocationEuropeWest7 DestinationBigqueryDenormalizedDatasetLocation = "europe-west7"
- DestinationBigqueryDenormalizedDatasetLocationEuropeWest8 DestinationBigqueryDenormalizedDatasetLocation = "europe-west8"
- DestinationBigqueryDenormalizedDatasetLocationEuropeWest9 DestinationBigqueryDenormalizedDatasetLocation = "europe-west9"
- DestinationBigqueryDenormalizedDatasetLocationMeWest1 DestinationBigqueryDenormalizedDatasetLocation = "me-west1"
- DestinationBigqueryDenormalizedDatasetLocationNorthamericaNortheast1 DestinationBigqueryDenormalizedDatasetLocation = "northamerica-northeast1"
- DestinationBigqueryDenormalizedDatasetLocationNorthamericaNortheast2 DestinationBigqueryDenormalizedDatasetLocation = "northamerica-northeast2"
- DestinationBigqueryDenormalizedDatasetLocationSouthamericaEast1 DestinationBigqueryDenormalizedDatasetLocation = "southamerica-east1"
- DestinationBigqueryDenormalizedDatasetLocationSouthamericaWest1 DestinationBigqueryDenormalizedDatasetLocation = "southamerica-west1"
- DestinationBigqueryDenormalizedDatasetLocationUsCentral1 DestinationBigqueryDenormalizedDatasetLocation = "us-central1"
- DestinationBigqueryDenormalizedDatasetLocationUsEast1 DestinationBigqueryDenormalizedDatasetLocation = "us-east1"
- DestinationBigqueryDenormalizedDatasetLocationUsEast2 DestinationBigqueryDenormalizedDatasetLocation = "us-east2"
- DestinationBigqueryDenormalizedDatasetLocationUsEast3 DestinationBigqueryDenormalizedDatasetLocation = "us-east3"
- DestinationBigqueryDenormalizedDatasetLocationUsEast4 DestinationBigqueryDenormalizedDatasetLocation = "us-east4"
- DestinationBigqueryDenormalizedDatasetLocationUsEast5 DestinationBigqueryDenormalizedDatasetLocation = "us-east5"
- DestinationBigqueryDenormalizedDatasetLocationUsWest1 DestinationBigqueryDenormalizedDatasetLocation = "us-west1"
- DestinationBigqueryDenormalizedDatasetLocationUsWest2 DestinationBigqueryDenormalizedDatasetLocation = "us-west2"
- DestinationBigqueryDenormalizedDatasetLocationUsWest3 DestinationBigqueryDenormalizedDatasetLocation = "us-west3"
- DestinationBigqueryDenormalizedDatasetLocationUsWest4 DestinationBigqueryDenormalizedDatasetLocation = "us-west4"
-)
-
-func (e DestinationBigqueryDenormalizedDatasetLocation) ToPointer() *DestinationBigqueryDenormalizedDatasetLocation {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedDatasetLocation) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "US":
- fallthrough
- case "EU":
- fallthrough
- case "asia-east1":
- fallthrough
- case "asia-east2":
- fallthrough
- case "asia-northeast1":
- fallthrough
- case "asia-northeast2":
- fallthrough
- case "asia-northeast3":
- fallthrough
- case "asia-south1":
- fallthrough
- case "asia-south2":
- fallthrough
- case "asia-southeast1":
- fallthrough
- case "asia-southeast2":
- fallthrough
- case "australia-southeast1":
- fallthrough
- case "australia-southeast2":
- fallthrough
- case "europe-central1":
- fallthrough
- case "europe-central2":
- fallthrough
- case "europe-north1":
- fallthrough
- case "europe-southwest1":
- fallthrough
- case "europe-west1":
- fallthrough
- case "europe-west2":
- fallthrough
- case "europe-west3":
- fallthrough
- case "europe-west4":
- fallthrough
- case "europe-west6":
- fallthrough
- case "europe-west7":
- fallthrough
- case "europe-west8":
- fallthrough
- case "europe-west9":
- fallthrough
- case "me-west1":
- fallthrough
- case "northamerica-northeast1":
- fallthrough
- case "northamerica-northeast2":
- fallthrough
- case "southamerica-east1":
- fallthrough
- case "southamerica-west1":
- fallthrough
- case "us-central1":
- fallthrough
- case "us-east1":
- fallthrough
- case "us-east2":
- fallthrough
- case "us-east3":
- fallthrough
- case "us-east4":
- fallthrough
- case "us-east5":
- fallthrough
- case "us-west1":
- fallthrough
- case "us-west2":
- fallthrough
- case "us-west3":
- fallthrough
- case "us-west4":
- *e = DestinationBigqueryDenormalizedDatasetLocation(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedDatasetLocation: %v", v)
- }
-}
-
-type DestinationBigqueryDenormalizedBigqueryDenormalized string
-
-const (
- DestinationBigqueryDenormalizedBigqueryDenormalizedBigqueryDenormalized DestinationBigqueryDenormalizedBigqueryDenormalized = "bigquery-denormalized"
-)
-
-func (e DestinationBigqueryDenormalizedBigqueryDenormalized) ToPointer() *DestinationBigqueryDenormalizedBigqueryDenormalized {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedBigqueryDenormalized) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "bigquery-denormalized":
- *e = DestinationBigqueryDenormalizedBigqueryDenormalized(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedBigqueryDenormalized: %v", v)
- }
-}
-
-type DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType string
-
-const (
- DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeHmacKey DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType = "HMAC_KEY"
-)
-
-func (e DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType) ToPointer() *DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "HMAC_KEY":
- *e = DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType: %v", v)
- }
-}
-
-// DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
-type DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey struct {
- CredentialType DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKeyCredentialType `json:"credential_type"`
- // HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
- HmacKeyAccessID string `json:"hmac_key_access_id"`
- // The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
- HmacKeySecret string `json:"hmac_key_secret"`
-}
-
-type DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialType string
-
-const (
- DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialTypeDestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialType = "destination-bigquery-denormalized_Loading Method_GCS Staging_Credential_HMAC key"
-)
-
-type DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential struct {
- DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey *DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey
-
- Type DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialType
-}
-
-func CreateDestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialDestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey(destinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey) DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential {
- typ := DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialTypeDestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey
-
- return DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential{
- DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey: &destinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey,
- Type: typ,
- }
-}
-
-func (u *DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey := new(DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey); err == nil {
- u.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey = destinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey
- u.Type = DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialTypeDestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential) MarshalJSON() ([]byte, error) {
- if u.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey != nil {
- return json.Marshal(u.DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredentialHMACKey)
- }
-
- return nil, nil
-}
-
-// DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-type DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing string
-
-const (
- DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingDeleteAllTmpFilesFromGcs DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing = "Delete all tmp files from GCS"
- DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingKeepAllTmpFilesInGcs DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing = "Keep all tmp files in GCS"
-)
-
-func (e DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing) ToPointer() *DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "Delete all tmp files from GCS":
- fallthrough
- case "Keep all tmp files in GCS":
- *e = DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing: %v", v)
- }
-}
-
-type DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod string
-
-const (
- DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethodGcsStaging DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod = "GCS Staging"
-)
-
-func (e DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod) ToPointer() *DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "GCS Staging":
- *e = DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod: %v", v)
- }
-}
-
-// DestinationBigqueryDenormalizedLoadingMethodGCSStaging - Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
-type DestinationBigqueryDenormalizedLoadingMethodGCSStaging struct {
- // An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
- Credential DestinationBigqueryDenormalizedLoadingMethodGCSStagingCredential `json:"credential"`
- // Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
- FileBufferCount *int64 `json:"file_buffer_count,omitempty"`
- // The name of the GCS bucket. Read more here.
- GcsBucketName string `json:"gcs_bucket_name"`
- // Directory under the GCS bucket where data will be written. Read more here.
- GcsBucketPath string `json:"gcs_bucket_path"`
- // This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
- KeepFilesInGcsBucket *DestinationBigqueryDenormalizedLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing `json:"keep_files_in_gcs-bucket,omitempty"`
- Method DestinationBigqueryDenormalizedLoadingMethodGCSStagingMethod `json:"method"`
-}
-
-type DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod string
-
-const (
- DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethodStandard DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod = "Standard"
-)
-
-func (e DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod) ToPointer() *DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "Standard":
- *e = DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod: %v", v)
- }
-}
-
-// DestinationBigqueryDenormalizedLoadingMethodStandardInserts - Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
-type DestinationBigqueryDenormalizedLoadingMethodStandardInserts struct {
- Method DestinationBigqueryDenormalizedLoadingMethodStandardInsertsMethod `json:"method"`
-}
-
-type DestinationBigqueryDenormalizedLoadingMethodType string
-
-const (
- DestinationBigqueryDenormalizedLoadingMethodTypeDestinationBigqueryDenormalizedLoadingMethodStandardInserts DestinationBigqueryDenormalizedLoadingMethodType = "destination-bigquery-denormalized_Loading Method_Standard Inserts"
- DestinationBigqueryDenormalizedLoadingMethodTypeDestinationBigqueryDenormalizedLoadingMethodGCSStaging DestinationBigqueryDenormalizedLoadingMethodType = "destination-bigquery-denormalized_Loading Method_GCS Staging"
-)
-
-type DestinationBigqueryDenormalizedLoadingMethod struct {
- DestinationBigqueryDenormalizedLoadingMethodStandardInserts *DestinationBigqueryDenormalizedLoadingMethodStandardInserts
- DestinationBigqueryDenormalizedLoadingMethodGCSStaging *DestinationBigqueryDenormalizedLoadingMethodGCSStaging
-
- Type DestinationBigqueryDenormalizedLoadingMethodType
-}
-
-func CreateDestinationBigqueryDenormalizedLoadingMethodDestinationBigqueryDenormalizedLoadingMethodStandardInserts(destinationBigqueryDenormalizedLoadingMethodStandardInserts DestinationBigqueryDenormalizedLoadingMethodStandardInserts) DestinationBigqueryDenormalizedLoadingMethod {
- typ := DestinationBigqueryDenormalizedLoadingMethodTypeDestinationBigqueryDenormalizedLoadingMethodStandardInserts
-
- return DestinationBigqueryDenormalizedLoadingMethod{
- DestinationBigqueryDenormalizedLoadingMethodStandardInserts: &destinationBigqueryDenormalizedLoadingMethodStandardInserts,
- Type: typ,
- }
-}
-
-func CreateDestinationBigqueryDenormalizedLoadingMethodDestinationBigqueryDenormalizedLoadingMethodGCSStaging(destinationBigqueryDenormalizedLoadingMethodGCSStaging DestinationBigqueryDenormalizedLoadingMethodGCSStaging) DestinationBigqueryDenormalizedLoadingMethod {
- typ := DestinationBigqueryDenormalizedLoadingMethodTypeDestinationBigqueryDenormalizedLoadingMethodGCSStaging
-
- return DestinationBigqueryDenormalizedLoadingMethod{
- DestinationBigqueryDenormalizedLoadingMethodGCSStaging: &destinationBigqueryDenormalizedLoadingMethodGCSStaging,
- Type: typ,
- }
-}
-
-func (u *DestinationBigqueryDenormalizedLoadingMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationBigqueryDenormalizedLoadingMethodStandardInserts := new(DestinationBigqueryDenormalizedLoadingMethodStandardInserts)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryDenormalizedLoadingMethodStandardInserts); err == nil {
- u.DestinationBigqueryDenormalizedLoadingMethodStandardInserts = destinationBigqueryDenormalizedLoadingMethodStandardInserts
- u.Type = DestinationBigqueryDenormalizedLoadingMethodTypeDestinationBigqueryDenormalizedLoadingMethodStandardInserts
- return nil
- }
-
- destinationBigqueryDenormalizedLoadingMethodGCSStaging := new(DestinationBigqueryDenormalizedLoadingMethodGCSStaging)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryDenormalizedLoadingMethodGCSStaging); err == nil {
- u.DestinationBigqueryDenormalizedLoadingMethodGCSStaging = destinationBigqueryDenormalizedLoadingMethodGCSStaging
- u.Type = DestinationBigqueryDenormalizedLoadingMethodTypeDestinationBigqueryDenormalizedLoadingMethodGCSStaging
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u DestinationBigqueryDenormalizedLoadingMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationBigqueryDenormalizedLoadingMethodStandardInserts != nil {
- return json.Marshal(u.DestinationBigqueryDenormalizedLoadingMethodStandardInserts)
- }
-
- if u.DestinationBigqueryDenormalizedLoadingMethodGCSStaging != nil {
- return json.Marshal(u.DestinationBigqueryDenormalizedLoadingMethodGCSStaging)
- }
-
- return nil, nil
-}
-
-type DestinationBigqueryDenormalized struct {
- // Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.
- BigQueryClientBufferSizeMb *int64 `json:"big_query_client_buffer_size_mb,omitempty"`
- // The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
- CredentialsJSON *string `json:"credentials_json,omitempty"`
- // The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.
- DatasetID string `json:"dataset_id"`
- // The location of the dataset. Warning: Changes made after creation will not be applied. The default "US" value is used if not set explicitly. Read more here.
- DatasetLocation *DestinationBigqueryDenormalizedDatasetLocation `json:"dataset_location,omitempty"`
- DestinationType DestinationBigqueryDenormalizedBigqueryDenormalized `json:"destinationType"`
- // Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
- LoadingMethod *DestinationBigqueryDenormalizedLoadingMethod `json:"loading_method,omitempty"`
- // The GCP project ID for the project containing the target BigQuery dataset. Read more here.
- ProjectID string `json:"project_id"`
-}
diff --git a/internal/sdk/pkg/models/shared/destinationbigquerydenormalizedcreaterequest.go b/internal/sdk/pkg/models/shared/destinationbigquerydenormalizedcreaterequest.go
deleted file mode 100755
index dce40c9d3..000000000
--- a/internal/sdk/pkg/models/shared/destinationbigquerydenormalizedcreaterequest.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type DestinationBigqueryDenormalizedCreateRequest struct {
- Configuration DestinationBigqueryDenormalized `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/destinationbigquerydenormalizedputrequest.go b/internal/sdk/pkg/models/shared/destinationbigquerydenormalizedputrequest.go
deleted file mode 100755
index c6598ecab..000000000
--- a/internal/sdk/pkg/models/shared/destinationbigquerydenormalizedputrequest.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type DestinationBigqueryDenormalizedPutRequest struct {
- Configuration DestinationBigqueryDenormalizedUpdate `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/destinationbigquerydenormalizedupdate.go b/internal/sdk/pkg/models/shared/destinationbigquerydenormalizedupdate.go
deleted file mode 100755
index 64b214e77..000000000
--- a/internal/sdk/pkg/models/shared/destinationbigquerydenormalizedupdate.go
+++ /dev/null
@@ -1,408 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-// DestinationBigqueryDenormalizedUpdateDatasetLocation - The location of the dataset. Warning: Changes made after creation will not be applied. The default "US" value is used if not set explicitly. Read more here.
-type DestinationBigqueryDenormalizedUpdateDatasetLocation string
-
-const (
- DestinationBigqueryDenormalizedUpdateDatasetLocationUs DestinationBigqueryDenormalizedUpdateDatasetLocation = "US"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEu DestinationBigqueryDenormalizedUpdateDatasetLocation = "EU"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAsiaEast1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "asia-east1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAsiaEast2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "asia-east2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAsiaNortheast1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "asia-northeast1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAsiaNortheast2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "asia-northeast2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAsiaNortheast3 DestinationBigqueryDenormalizedUpdateDatasetLocation = "asia-northeast3"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAsiaSouth1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "asia-south1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAsiaSouth2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "asia-south2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAsiaSoutheast1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "asia-southeast1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAsiaSoutheast2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "asia-southeast2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAustraliaSoutheast1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "australia-southeast1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationAustraliaSoutheast2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "australia-southeast2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeCentral1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-central1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeCentral2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-central2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeNorth1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-north1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeSouthwest1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-southwest1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeWest1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-west1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeWest2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-west2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeWest3 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-west3"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeWest4 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-west4"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeWest6 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-west6"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeWest7 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-west7"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeWest8 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-west8"
- DestinationBigqueryDenormalizedUpdateDatasetLocationEuropeWest9 DestinationBigqueryDenormalizedUpdateDatasetLocation = "europe-west9"
- DestinationBigqueryDenormalizedUpdateDatasetLocationMeWest1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "me-west1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationNorthamericaNortheast1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "northamerica-northeast1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationNorthamericaNortheast2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "northamerica-northeast2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationSouthamericaEast1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "southamerica-east1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationSouthamericaWest1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "southamerica-west1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsCentral1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-central1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsEast1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-east1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsEast2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-east2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsEast3 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-east3"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsEast4 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-east4"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsEast5 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-east5"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsWest1 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-west1"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsWest2 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-west2"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsWest3 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-west3"
- DestinationBigqueryDenormalizedUpdateDatasetLocationUsWest4 DestinationBigqueryDenormalizedUpdateDatasetLocation = "us-west4"
-)
-
-func (e DestinationBigqueryDenormalizedUpdateDatasetLocation) ToPointer() *DestinationBigqueryDenormalizedUpdateDatasetLocation {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedUpdateDatasetLocation) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "US":
- fallthrough
- case "EU":
- fallthrough
- case "asia-east1":
- fallthrough
- case "asia-east2":
- fallthrough
- case "asia-northeast1":
- fallthrough
- case "asia-northeast2":
- fallthrough
- case "asia-northeast3":
- fallthrough
- case "asia-south1":
- fallthrough
- case "asia-south2":
- fallthrough
- case "asia-southeast1":
- fallthrough
- case "asia-southeast2":
- fallthrough
- case "australia-southeast1":
- fallthrough
- case "australia-southeast2":
- fallthrough
- case "europe-central1":
- fallthrough
- case "europe-central2":
- fallthrough
- case "europe-north1":
- fallthrough
- case "europe-southwest1":
- fallthrough
- case "europe-west1":
- fallthrough
- case "europe-west2":
- fallthrough
- case "europe-west3":
- fallthrough
- case "europe-west4":
- fallthrough
- case "europe-west6":
- fallthrough
- case "europe-west7":
- fallthrough
- case "europe-west8":
- fallthrough
- case "europe-west9":
- fallthrough
- case "me-west1":
- fallthrough
- case "northamerica-northeast1":
- fallthrough
- case "northamerica-northeast2":
- fallthrough
- case "southamerica-east1":
- fallthrough
- case "southamerica-west1":
- fallthrough
- case "us-central1":
- fallthrough
- case "us-east1":
- fallthrough
- case "us-east2":
- fallthrough
- case "us-east3":
- fallthrough
- case "us-east4":
- fallthrough
- case "us-east5":
- fallthrough
- case "us-west1":
- fallthrough
- case "us-west2":
- fallthrough
- case "us-west3":
- fallthrough
- case "us-west4":
- *e = DestinationBigqueryDenormalizedUpdateDatasetLocation(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedUpdateDatasetLocation: %v", v)
- }
-}
-
-type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType string
-
-const (
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeHmacKey DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType = "HMAC_KEY"
-)
-
-func (e DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType) ToPointer() *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "HMAC_KEY":
- *e = DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType: %v", v)
- }
-}
-
-// DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
-type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey struct {
- CredentialType DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType `json:"credential_type"`
- // HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
- HmacKeyAccessID string `json:"hmac_key_access_id"`
- // The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
- HmacKeySecret string `json:"hmac_key_secret"`
-}
-
-type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialType string
-
-const (
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialTypeDestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialType = "destination-bigquery-denormalized-update_Loading Method_GCS Staging_Credential_HMAC key"
-)
-
-type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential struct {
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey
-
- Type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialType
-}
-
-func CreateDestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialDestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey(destinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey) DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential {
- typ := DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialTypeDestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey
-
- return DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential{
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey: &destinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey,
- Type: typ,
- }
-}
-
-func (u *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey := new(DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey); err == nil {
- u.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey = destinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey
- u.Type = DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialTypeDestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential) MarshalJSON() ([]byte, error) {
- if u.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey != nil {
- return json.Marshal(u.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredentialHMACKey)
- }
-
- return nil, nil
-}
-
-// DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing string
-
-const (
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingDeleteAllTmpFilesFromGcs DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing = "Delete all tmp files from GCS"
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingKeepAllTmpFilesInGcs DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing = "Keep all tmp files in GCS"
-)
-
-func (e DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing) ToPointer() *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "Delete all tmp files from GCS":
- fallthrough
- case "Keep all tmp files in GCS":
- *e = DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing: %v", v)
- }
-}
-
-type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethod string
-
-const (
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethodGcsStaging DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethod = "GCS Staging"
-)
-
-func (e DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethod) ToPointer() *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethod {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethod) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "GCS Staging":
- *e = DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethod(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethod: %v", v)
- }
-}
-
-// DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging - Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
-type DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging struct {
- // An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
- Credential DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingCredential `json:"credential"`
- // Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
- FileBufferCount *int64 `json:"file_buffer_count,omitempty"`
- // The name of the GCS bucket. Read more here.
- GcsBucketName string `json:"gcs_bucket_name"`
- // Directory under the GCS bucket where data will be written. Read more here.
- GcsBucketPath string `json:"gcs_bucket_path"`
- // This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
- KeepFilesInGcsBucket *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing `json:"keep_files_in_gcs-bucket,omitempty"`
- Method DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStagingMethod `json:"method"`
-}
-
-type DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethod string
-
-const (
- DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethodStandard DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethod = "Standard"
-)
-
-func (e DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethod) ToPointer() *DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethod {
- return &e
-}
-
-func (e *DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethod) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "Standard":
- *e = DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethod(v)
- return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethod: %v", v)
- }
-}
-
-// DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts - Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
-type DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts struct {
- Method DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInsertsMethod `json:"method"`
-}
-
-type DestinationBigqueryDenormalizedUpdateLoadingMethodType string
-
-const (
- DestinationBigqueryDenormalizedUpdateLoadingMethodTypeDestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts DestinationBigqueryDenormalizedUpdateLoadingMethodType = "destination-bigquery-denormalized-update_Loading Method_Standard Inserts"
- DestinationBigqueryDenormalizedUpdateLoadingMethodTypeDestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging DestinationBigqueryDenormalizedUpdateLoadingMethodType = "destination-bigquery-denormalized-update_Loading Method_GCS Staging"
-)
-
-type DestinationBigqueryDenormalizedUpdateLoadingMethod struct {
- DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts *DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging *DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging
-
- Type DestinationBigqueryDenormalizedUpdateLoadingMethodType
-}
-
-func CreateDestinationBigqueryDenormalizedUpdateLoadingMethodDestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts(destinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts) DestinationBigqueryDenormalizedUpdateLoadingMethod {
- typ := DestinationBigqueryDenormalizedUpdateLoadingMethodTypeDestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts
-
- return DestinationBigqueryDenormalizedUpdateLoadingMethod{
- DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts: &destinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts,
- Type: typ,
- }
-}
-
-func CreateDestinationBigqueryDenormalizedUpdateLoadingMethodDestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging(destinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging) DestinationBigqueryDenormalizedUpdateLoadingMethod {
- typ := DestinationBigqueryDenormalizedUpdateLoadingMethodTypeDestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging
-
- return DestinationBigqueryDenormalizedUpdateLoadingMethod{
- DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging: &destinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging,
- Type: typ,
- }
-}
-
-func (u *DestinationBigqueryDenormalizedUpdateLoadingMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts := new(DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts); err == nil {
- u.DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts = destinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts
- u.Type = DestinationBigqueryDenormalizedUpdateLoadingMethodTypeDestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts
- return nil
- }
-
- destinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging := new(DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging); err == nil {
- u.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging = destinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging
- u.Type = DestinationBigqueryDenormalizedUpdateLoadingMethodTypeDestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u DestinationBigqueryDenormalizedUpdateLoadingMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts != nil {
- return json.Marshal(u.DestinationBigqueryDenormalizedUpdateLoadingMethodStandardInserts)
- }
-
- if u.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging != nil {
- return json.Marshal(u.DestinationBigqueryDenormalizedUpdateLoadingMethodGCSStaging)
- }
-
- return nil, nil
-}
-
-type DestinationBigqueryDenormalizedUpdate struct {
- // Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.
- BigQueryClientBufferSizeMb *int64 `json:"big_query_client_buffer_size_mb,omitempty"`
- // The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
- CredentialsJSON *string `json:"credentials_json,omitempty"`
- // The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.
- DatasetID string `json:"dataset_id"`
- // The location of the dataset. Warning: Changes made after creation will not be applied. The default "US" value is used if not set explicitly. Read more here.
- DatasetLocation *DestinationBigqueryDenormalizedUpdateDatasetLocation `json:"dataset_location,omitempty"`
- // Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
- LoadingMethod *DestinationBigqueryDenormalizedUpdateLoadingMethod `json:"loading_method,omitempty"`
- // The GCP project ID for the project containing the target BigQuery dataset. Read more here.
- ProjectID string `json:"project_id"`
-}
diff --git a/internal/sdk/pkg/models/shared/destinationbigqueryputrequest.go b/internal/sdk/pkg/models/shared/destinationbigqueryputrequest.go
old mode 100755
new mode 100644
index 759966aa5..ee58ebdb9
--- a/internal/sdk/pkg/models/shared/destinationbigqueryputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationbigqueryputrequest.go
@@ -7,3 +7,24 @@ type DestinationBigqueryPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationBigqueryPutRequest) GetConfiguration() DestinationBigqueryUpdate {
+ if o == nil {
+ return DestinationBigqueryUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationBigqueryPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationBigqueryPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationbigqueryupdate.go b/internal/sdk/pkg/models/shared/destinationbigqueryupdate.go
old mode 100755
new mode 100644
index ab98a64be..73e2e7c2f
--- a/internal/sdk/pkg/models/shared/destinationbigqueryupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationbigqueryupdate.go
@@ -3,63 +3,67 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationBigqueryUpdateDatasetLocation - The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.
-type DestinationBigqueryUpdateDatasetLocation string
+// DatasetLocation - The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.
+type DatasetLocation string
const (
- DestinationBigqueryUpdateDatasetLocationUs DestinationBigqueryUpdateDatasetLocation = "US"
- DestinationBigqueryUpdateDatasetLocationEu DestinationBigqueryUpdateDatasetLocation = "EU"
- DestinationBigqueryUpdateDatasetLocationAsiaEast1 DestinationBigqueryUpdateDatasetLocation = "asia-east1"
- DestinationBigqueryUpdateDatasetLocationAsiaEast2 DestinationBigqueryUpdateDatasetLocation = "asia-east2"
- DestinationBigqueryUpdateDatasetLocationAsiaNortheast1 DestinationBigqueryUpdateDatasetLocation = "asia-northeast1"
- DestinationBigqueryUpdateDatasetLocationAsiaNortheast2 DestinationBigqueryUpdateDatasetLocation = "asia-northeast2"
- DestinationBigqueryUpdateDatasetLocationAsiaNortheast3 DestinationBigqueryUpdateDatasetLocation = "asia-northeast3"
- DestinationBigqueryUpdateDatasetLocationAsiaSouth1 DestinationBigqueryUpdateDatasetLocation = "asia-south1"
- DestinationBigqueryUpdateDatasetLocationAsiaSouth2 DestinationBigqueryUpdateDatasetLocation = "asia-south2"
- DestinationBigqueryUpdateDatasetLocationAsiaSoutheast1 DestinationBigqueryUpdateDatasetLocation = "asia-southeast1"
- DestinationBigqueryUpdateDatasetLocationAsiaSoutheast2 DestinationBigqueryUpdateDatasetLocation = "asia-southeast2"
- DestinationBigqueryUpdateDatasetLocationAustraliaSoutheast1 DestinationBigqueryUpdateDatasetLocation = "australia-southeast1"
- DestinationBigqueryUpdateDatasetLocationAustraliaSoutheast2 DestinationBigqueryUpdateDatasetLocation = "australia-southeast2"
- DestinationBigqueryUpdateDatasetLocationEuropeCentral1 DestinationBigqueryUpdateDatasetLocation = "europe-central1"
- DestinationBigqueryUpdateDatasetLocationEuropeCentral2 DestinationBigqueryUpdateDatasetLocation = "europe-central2"
- DestinationBigqueryUpdateDatasetLocationEuropeNorth1 DestinationBigqueryUpdateDatasetLocation = "europe-north1"
- DestinationBigqueryUpdateDatasetLocationEuropeSouthwest1 DestinationBigqueryUpdateDatasetLocation = "europe-southwest1"
- DestinationBigqueryUpdateDatasetLocationEuropeWest1 DestinationBigqueryUpdateDatasetLocation = "europe-west1"
- DestinationBigqueryUpdateDatasetLocationEuropeWest2 DestinationBigqueryUpdateDatasetLocation = "europe-west2"
- DestinationBigqueryUpdateDatasetLocationEuropeWest3 DestinationBigqueryUpdateDatasetLocation = "europe-west3"
- DestinationBigqueryUpdateDatasetLocationEuropeWest4 DestinationBigqueryUpdateDatasetLocation = "europe-west4"
- DestinationBigqueryUpdateDatasetLocationEuropeWest6 DestinationBigqueryUpdateDatasetLocation = "europe-west6"
- DestinationBigqueryUpdateDatasetLocationEuropeWest7 DestinationBigqueryUpdateDatasetLocation = "europe-west7"
- DestinationBigqueryUpdateDatasetLocationEuropeWest8 DestinationBigqueryUpdateDatasetLocation = "europe-west8"
- DestinationBigqueryUpdateDatasetLocationEuropeWest9 DestinationBigqueryUpdateDatasetLocation = "europe-west9"
- DestinationBigqueryUpdateDatasetLocationMeWest1 DestinationBigqueryUpdateDatasetLocation = "me-west1"
- DestinationBigqueryUpdateDatasetLocationNorthamericaNortheast1 DestinationBigqueryUpdateDatasetLocation = "northamerica-northeast1"
- DestinationBigqueryUpdateDatasetLocationNorthamericaNortheast2 DestinationBigqueryUpdateDatasetLocation = "northamerica-northeast2"
- DestinationBigqueryUpdateDatasetLocationSouthamericaEast1 DestinationBigqueryUpdateDatasetLocation = "southamerica-east1"
- DestinationBigqueryUpdateDatasetLocationSouthamericaWest1 DestinationBigqueryUpdateDatasetLocation = "southamerica-west1"
- DestinationBigqueryUpdateDatasetLocationUsCentral1 DestinationBigqueryUpdateDatasetLocation = "us-central1"
- DestinationBigqueryUpdateDatasetLocationUsEast1 DestinationBigqueryUpdateDatasetLocation = "us-east1"
- DestinationBigqueryUpdateDatasetLocationUsEast2 DestinationBigqueryUpdateDatasetLocation = "us-east2"
- DestinationBigqueryUpdateDatasetLocationUsEast3 DestinationBigqueryUpdateDatasetLocation = "us-east3"
- DestinationBigqueryUpdateDatasetLocationUsEast4 DestinationBigqueryUpdateDatasetLocation = "us-east4"
- DestinationBigqueryUpdateDatasetLocationUsEast5 DestinationBigqueryUpdateDatasetLocation = "us-east5"
- DestinationBigqueryUpdateDatasetLocationUsWest1 DestinationBigqueryUpdateDatasetLocation = "us-west1"
- DestinationBigqueryUpdateDatasetLocationUsWest2 DestinationBigqueryUpdateDatasetLocation = "us-west2"
- DestinationBigqueryUpdateDatasetLocationUsWest3 DestinationBigqueryUpdateDatasetLocation = "us-west3"
- DestinationBigqueryUpdateDatasetLocationUsWest4 DestinationBigqueryUpdateDatasetLocation = "us-west4"
+ DatasetLocationUs DatasetLocation = "US"
+ DatasetLocationEu DatasetLocation = "EU"
+ DatasetLocationAsiaEast1 DatasetLocation = "asia-east1"
+ DatasetLocationAsiaEast2 DatasetLocation = "asia-east2"
+ DatasetLocationAsiaNortheast1 DatasetLocation = "asia-northeast1"
+ DatasetLocationAsiaNortheast2 DatasetLocation = "asia-northeast2"
+ DatasetLocationAsiaNortheast3 DatasetLocation = "asia-northeast3"
+ DatasetLocationAsiaSouth1 DatasetLocation = "asia-south1"
+ DatasetLocationAsiaSouth2 DatasetLocation = "asia-south2"
+ DatasetLocationAsiaSoutheast1 DatasetLocation = "asia-southeast1"
+ DatasetLocationAsiaSoutheast2 DatasetLocation = "asia-southeast2"
+ DatasetLocationAustraliaSoutheast1 DatasetLocation = "australia-southeast1"
+ DatasetLocationAustraliaSoutheast2 DatasetLocation = "australia-southeast2"
+ DatasetLocationEuropeCentral1 DatasetLocation = "europe-central1"
+ DatasetLocationEuropeCentral2 DatasetLocation = "europe-central2"
+ DatasetLocationEuropeNorth1 DatasetLocation = "europe-north1"
+ DatasetLocationEuropeSouthwest1 DatasetLocation = "europe-southwest1"
+ DatasetLocationEuropeWest1 DatasetLocation = "europe-west1"
+ DatasetLocationEuropeWest2 DatasetLocation = "europe-west2"
+ DatasetLocationEuropeWest3 DatasetLocation = "europe-west3"
+ DatasetLocationEuropeWest4 DatasetLocation = "europe-west4"
+ DatasetLocationEuropeWest6 DatasetLocation = "europe-west6"
+ DatasetLocationEuropeWest7 DatasetLocation = "europe-west7"
+ DatasetLocationEuropeWest8 DatasetLocation = "europe-west8"
+ DatasetLocationEuropeWest9 DatasetLocation = "europe-west9"
+ DatasetLocationEuropeWest12 DatasetLocation = "europe-west12"
+ DatasetLocationMeCentral1 DatasetLocation = "me-central1"
+ DatasetLocationMeCentral2 DatasetLocation = "me-central2"
+ DatasetLocationMeWest1 DatasetLocation = "me-west1"
+ DatasetLocationNorthamericaNortheast1 DatasetLocation = "northamerica-northeast1"
+ DatasetLocationNorthamericaNortheast2 DatasetLocation = "northamerica-northeast2"
+ DatasetLocationSouthamericaEast1 DatasetLocation = "southamerica-east1"
+ DatasetLocationSouthamericaWest1 DatasetLocation = "southamerica-west1"
+ DatasetLocationUsCentral1 DatasetLocation = "us-central1"
+ DatasetLocationUsEast1 DatasetLocation = "us-east1"
+ DatasetLocationUsEast2 DatasetLocation = "us-east2"
+ DatasetLocationUsEast3 DatasetLocation = "us-east3"
+ DatasetLocationUsEast4 DatasetLocation = "us-east4"
+ DatasetLocationUsEast5 DatasetLocation = "us-east5"
+ DatasetLocationUsSouth1 DatasetLocation = "us-south1"
+ DatasetLocationUsWest1 DatasetLocation = "us-west1"
+ DatasetLocationUsWest2 DatasetLocation = "us-west2"
+ DatasetLocationUsWest3 DatasetLocation = "us-west3"
+ DatasetLocationUsWest4 DatasetLocation = "us-west4"
)
-func (e DestinationBigqueryUpdateDatasetLocation) ToPointer() *DestinationBigqueryUpdateDatasetLocation {
+func (e DatasetLocation) ToPointer() *DatasetLocation {
return &e
}
-func (e *DestinationBigqueryUpdateDatasetLocation) UnmarshalJSON(data []byte) error {
+func (e *DatasetLocation) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -115,6 +119,12 @@ func (e *DestinationBigqueryUpdateDatasetLocation) UnmarshalJSON(data []byte) er
fallthrough
case "europe-west9":
fallthrough
+ case "europe-west12":
+ fallthrough
+ case "me-central1":
+ fallthrough
+ case "me-central2":
+ fallthrough
case "me-west1":
fallthrough
case "northamerica-northeast1":
@@ -137,6 +147,8 @@ func (e *DestinationBigqueryUpdateDatasetLocation) UnmarshalJSON(data []byte) er
fallthrough
case "us-east5":
fallthrough
+ case "us-south1":
+ fallthrough
case "us-west1":
fallthrough
case "us-west2":
@@ -144,103 +156,173 @@ func (e *DestinationBigqueryUpdateDatasetLocation) UnmarshalJSON(data []byte) er
case "us-west3":
fallthrough
case "us-west4":
- *e = DestinationBigqueryUpdateDatasetLocation(v)
+ *e = DatasetLocation(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DatasetLocation: %v", v)
+ }
+}
+
+type DestinationBigqueryUpdateMethod string
+
+const (
+ DestinationBigqueryUpdateMethodStandard DestinationBigqueryUpdateMethod = "Standard"
+)
+
+func (e DestinationBigqueryUpdateMethod) ToPointer() *DestinationBigqueryUpdateMethod {
+ return &e
+}
+
+func (e *DestinationBigqueryUpdateMethod) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Standard":
+ *e = DestinationBigqueryUpdateMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationBigqueryUpdateDatasetLocation: %v", v)
+ return fmt.Errorf("invalid value for DestinationBigqueryUpdateMethod: %v", v)
+ }
+}
+
+// StandardInserts - (not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use GCS staging.
+type StandardInserts struct {
+ method DestinationBigqueryUpdateMethod `const:"Standard" json:"method"`
+}
+
+func (s StandardInserts) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *StandardInserts) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-type DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType string
+func (o *StandardInserts) GetMethod() DestinationBigqueryUpdateMethod {
+ return DestinationBigqueryUpdateMethodStandard
+}
+
+type DestinationBigqueryUpdateCredentialType string
const (
- DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialTypeHmacKey DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType = "HMAC_KEY"
+ DestinationBigqueryUpdateCredentialTypeHmacKey DestinationBigqueryUpdateCredentialType = "HMAC_KEY"
)
-func (e DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType) ToPointer() *DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType {
+func (e DestinationBigqueryUpdateCredentialType) ToPointer() *DestinationBigqueryUpdateCredentialType {
return &e
}
-func (e *DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType) UnmarshalJSON(data []byte) error {
+func (e *DestinationBigqueryUpdateCredentialType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "HMAC_KEY":
- *e = DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType(v)
+ *e = DestinationBigqueryUpdateCredentialType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType: %v", v)
+ return fmt.Errorf("invalid value for DestinationBigqueryUpdateCredentialType: %v", v)
}
}
-// DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
-type DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey struct {
- CredentialType DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKeyCredentialType `json:"credential_type"`
+// DestinationBigqueryUpdateHMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
+type DestinationBigqueryUpdateHMACKey struct {
+ credentialType DestinationBigqueryUpdateCredentialType `const:"HMAC_KEY" json:"credential_type"`
// HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
HmacKeyAccessID string `json:"hmac_key_access_id"`
// The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
HmacKeySecret string `json:"hmac_key_secret"`
}
-type DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialType string
+func (d DestinationBigqueryUpdateHMACKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationBigqueryUpdateHMACKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationBigqueryUpdateHMACKey) GetCredentialType() DestinationBigqueryUpdateCredentialType {
+ return DestinationBigqueryUpdateCredentialTypeHmacKey
+}
+
+func (o *DestinationBigqueryUpdateHMACKey) GetHmacKeyAccessID() string {
+ if o == nil {
+ return ""
+ }
+ return o.HmacKeyAccessID
+}
+
+func (o *DestinationBigqueryUpdateHMACKey) GetHmacKeySecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.HmacKeySecret
+}
+
+type CredentialUnionType string
const (
- DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialTypeDestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialType = "destination-bigquery-update_Loading Method_GCS Staging_Credential_HMAC key"
+ CredentialUnionTypeDestinationBigqueryUpdateHMACKey CredentialUnionType = "destination-bigquery-update_HMAC key"
)
-type DestinationBigqueryUpdateLoadingMethodGCSStagingCredential struct {
- DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey *DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey
+type Credential struct {
+ DestinationBigqueryUpdateHMACKey *DestinationBigqueryUpdateHMACKey
- Type DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialType
+ Type CredentialUnionType
}
-func CreateDestinationBigqueryUpdateLoadingMethodGCSStagingCredentialDestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey(destinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey) DestinationBigqueryUpdateLoadingMethodGCSStagingCredential {
- typ := DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialTypeDestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey
+func CreateCredentialDestinationBigqueryUpdateHMACKey(destinationBigqueryUpdateHMACKey DestinationBigqueryUpdateHMACKey) Credential {
+ typ := CredentialUnionTypeDestinationBigqueryUpdateHMACKey
- return DestinationBigqueryUpdateLoadingMethodGCSStagingCredential{
- DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey: &destinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey,
- Type: typ,
+ return Credential{
+ DestinationBigqueryUpdateHMACKey: &destinationBigqueryUpdateHMACKey,
+ Type: typ,
}
}
-func (u *DestinationBigqueryUpdateLoadingMethodGCSStagingCredential) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *Credential) UnmarshalJSON(data []byte) error {
- destinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey := new(DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey); err == nil {
- u.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey = destinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey
- u.Type = DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialTypeDestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey
+ destinationBigqueryUpdateHMACKey := new(DestinationBigqueryUpdateHMACKey)
+ if err := utils.UnmarshalJSON(data, &destinationBigqueryUpdateHMACKey, "", true, true); err == nil {
+ u.DestinationBigqueryUpdateHMACKey = destinationBigqueryUpdateHMACKey
+ u.Type = CredentialUnionTypeDestinationBigqueryUpdateHMACKey
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationBigqueryUpdateLoadingMethodGCSStagingCredential) MarshalJSON() ([]byte, error) {
- if u.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey != nil {
- return json.Marshal(u.DestinationBigqueryUpdateLoadingMethodGCSStagingCredentialHMACKey)
+func (u Credential) MarshalJSON() ([]byte, error) {
+ if u.DestinationBigqueryUpdateHMACKey != nil {
+ return utils.MarshalJSON(u.DestinationBigqueryUpdateHMACKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
-type DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing string
+// GCSTmpFilesAfterwardProcessing - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
+type GCSTmpFilesAfterwardProcessing string
const (
- DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingDeleteAllTmpFilesFromGcs DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing = "Delete all tmp files from GCS"
- DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessingKeepAllTmpFilesInGcs DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing = "Keep all tmp files in GCS"
+ GCSTmpFilesAfterwardProcessingDeleteAllTmpFilesFromGcs GCSTmpFilesAfterwardProcessing = "Delete all tmp files from GCS"
+ GCSTmpFilesAfterwardProcessingKeepAllTmpFilesInGcs GCSTmpFilesAfterwardProcessing = "Keep all tmp files in GCS"
)
-func (e DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing) ToPointer() *DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing {
+func (e GCSTmpFilesAfterwardProcessing) ToPointer() *GCSTmpFilesAfterwardProcessing {
return &e
}
-func (e *DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing) UnmarshalJSON(data []byte) error {
+func (e *GCSTmpFilesAfterwardProcessing) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -249,162 +331,169 @@ func (e *DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardPro
case "Delete all tmp files from GCS":
fallthrough
case "Keep all tmp files in GCS":
- *e = DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing(v)
+ *e = GCSTmpFilesAfterwardProcessing(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing: %v", v)
+ return fmt.Errorf("invalid value for GCSTmpFilesAfterwardProcessing: %v", v)
}
}
-type DestinationBigqueryUpdateLoadingMethodGCSStagingMethod string
+type Method string
const (
- DestinationBigqueryUpdateLoadingMethodGCSStagingMethodGcsStaging DestinationBigqueryUpdateLoadingMethodGCSStagingMethod = "GCS Staging"
+ MethodGcsStaging Method = "GCS Staging"
)
-func (e DestinationBigqueryUpdateLoadingMethodGCSStagingMethod) ToPointer() *DestinationBigqueryUpdateLoadingMethodGCSStagingMethod {
+func (e Method) ToPointer() *Method {
return &e
}
-func (e *DestinationBigqueryUpdateLoadingMethodGCSStagingMethod) UnmarshalJSON(data []byte) error {
+func (e *Method) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GCS Staging":
- *e = DestinationBigqueryUpdateLoadingMethodGCSStagingMethod(v)
+ *e = Method(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationBigqueryUpdateLoadingMethodGCSStagingMethod: %v", v)
+ return fmt.Errorf("invalid value for Method: %v", v)
}
}
-// DestinationBigqueryUpdateLoadingMethodGCSStaging - Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
-type DestinationBigqueryUpdateLoadingMethodGCSStaging struct {
+// GCSStaging - (recommended) Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery. Provides best-in-class speed, reliability and scalability. Read more about GCS Staging here.
+type GCSStaging struct {
// An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
- Credential DestinationBigqueryUpdateLoadingMethodGCSStagingCredential `json:"credential"`
- // Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
- FileBufferCount *int64 `json:"file_buffer_count,omitempty"`
+ Credential Credential `json:"credential"`
// The name of the GCS bucket. Read more here.
GcsBucketName string `json:"gcs_bucket_name"`
// Directory under the GCS bucket where data will be written.
GcsBucketPath string `json:"gcs_bucket_path"`
// This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly.
- KeepFilesInGcsBucket *DestinationBigqueryUpdateLoadingMethodGCSStagingGCSTmpFilesAfterwardProcessing `json:"keep_files_in_gcs-bucket,omitempty"`
- Method DestinationBigqueryUpdateLoadingMethodGCSStagingMethod `json:"method"`
+ KeepFilesInGcsBucket *GCSTmpFilesAfterwardProcessing `default:"Delete all tmp files from GCS" json:"keep_files_in_gcs-bucket"`
+ method Method `const:"GCS Staging" json:"method"`
}
-type DestinationBigqueryUpdateLoadingMethodStandardInsertsMethod string
+func (g GCSStaging) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(g, "", false)
+}
-const (
- DestinationBigqueryUpdateLoadingMethodStandardInsertsMethodStandard DestinationBigqueryUpdateLoadingMethodStandardInsertsMethod = "Standard"
-)
+func (g *GCSStaging) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &g, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
-func (e DestinationBigqueryUpdateLoadingMethodStandardInsertsMethod) ToPointer() *DestinationBigqueryUpdateLoadingMethodStandardInsertsMethod {
- return &e
+func (o *GCSStaging) GetCredential() Credential {
+ if o == nil {
+ return Credential{}
+ }
+ return o.Credential
}
-func (e *DestinationBigqueryUpdateLoadingMethodStandardInsertsMethod) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
+func (o *GCSStaging) GetGcsBucketName() string {
+ if o == nil {
+ return ""
}
- switch v {
- case "Standard":
- *e = DestinationBigqueryUpdateLoadingMethodStandardInsertsMethod(v)
+ return o.GcsBucketName
+}
+
+func (o *GCSStaging) GetGcsBucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.GcsBucketPath
+}
+
+func (o *GCSStaging) GetKeepFilesInGcsBucket() *GCSTmpFilesAfterwardProcessing {
+ if o == nil {
return nil
- default:
- return fmt.Errorf("invalid value for DestinationBigqueryUpdateLoadingMethodStandardInsertsMethod: %v", v)
}
+ return o.KeepFilesInGcsBucket
}
-// DestinationBigqueryUpdateLoadingMethodStandardInserts - Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
-type DestinationBigqueryUpdateLoadingMethodStandardInserts struct {
- Method DestinationBigqueryUpdateLoadingMethodStandardInsertsMethod `json:"method"`
+func (o *GCSStaging) GetMethod() Method {
+ return MethodGcsStaging
}
-type DestinationBigqueryUpdateLoadingMethodType string
+type LoadingMethodType string
const (
- DestinationBigqueryUpdateLoadingMethodTypeDestinationBigqueryUpdateLoadingMethodStandardInserts DestinationBigqueryUpdateLoadingMethodType = "destination-bigquery-update_Loading Method_Standard Inserts"
- DestinationBigqueryUpdateLoadingMethodTypeDestinationBigqueryUpdateLoadingMethodGCSStaging DestinationBigqueryUpdateLoadingMethodType = "destination-bigquery-update_Loading Method_GCS Staging"
+ LoadingMethodTypeGCSStaging LoadingMethodType = "GCS Staging"
+ LoadingMethodTypeStandardInserts LoadingMethodType = "Standard Inserts"
)
-type DestinationBigqueryUpdateLoadingMethod struct {
- DestinationBigqueryUpdateLoadingMethodStandardInserts *DestinationBigqueryUpdateLoadingMethodStandardInserts
- DestinationBigqueryUpdateLoadingMethodGCSStaging *DestinationBigqueryUpdateLoadingMethodGCSStaging
+type LoadingMethod struct {
+ GCSStaging *GCSStaging
+ StandardInserts *StandardInserts
- Type DestinationBigqueryUpdateLoadingMethodType
+ Type LoadingMethodType
}
-func CreateDestinationBigqueryUpdateLoadingMethodDestinationBigqueryUpdateLoadingMethodStandardInserts(destinationBigqueryUpdateLoadingMethodStandardInserts DestinationBigqueryUpdateLoadingMethodStandardInserts) DestinationBigqueryUpdateLoadingMethod {
- typ := DestinationBigqueryUpdateLoadingMethodTypeDestinationBigqueryUpdateLoadingMethodStandardInserts
+func CreateLoadingMethodGCSStaging(gcsStaging GCSStaging) LoadingMethod {
+ typ := LoadingMethodTypeGCSStaging
- return DestinationBigqueryUpdateLoadingMethod{
- DestinationBigqueryUpdateLoadingMethodStandardInserts: &destinationBigqueryUpdateLoadingMethodStandardInserts,
- Type: typ,
+ return LoadingMethod{
+ GCSStaging: &gcsStaging,
+ Type: typ,
}
}
-func CreateDestinationBigqueryUpdateLoadingMethodDestinationBigqueryUpdateLoadingMethodGCSStaging(destinationBigqueryUpdateLoadingMethodGCSStaging DestinationBigqueryUpdateLoadingMethodGCSStaging) DestinationBigqueryUpdateLoadingMethod {
- typ := DestinationBigqueryUpdateLoadingMethodTypeDestinationBigqueryUpdateLoadingMethodGCSStaging
+func CreateLoadingMethodStandardInserts(standardInserts StandardInserts) LoadingMethod {
+ typ := LoadingMethodTypeStandardInserts
- return DestinationBigqueryUpdateLoadingMethod{
- DestinationBigqueryUpdateLoadingMethodGCSStaging: &destinationBigqueryUpdateLoadingMethodGCSStaging,
- Type: typ,
+ return LoadingMethod{
+ StandardInserts: &standardInserts,
+ Type: typ,
}
}
-func (u *DestinationBigqueryUpdateLoadingMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *LoadingMethod) UnmarshalJSON(data []byte) error {
- destinationBigqueryUpdateLoadingMethodStandardInserts := new(DestinationBigqueryUpdateLoadingMethodStandardInserts)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryUpdateLoadingMethodStandardInserts); err == nil {
- u.DestinationBigqueryUpdateLoadingMethodStandardInserts = destinationBigqueryUpdateLoadingMethodStandardInserts
- u.Type = DestinationBigqueryUpdateLoadingMethodTypeDestinationBigqueryUpdateLoadingMethodStandardInserts
+ standardInserts := new(StandardInserts)
+ if err := utils.UnmarshalJSON(data, &standardInserts, "", true, true); err == nil {
+ u.StandardInserts = standardInserts
+ u.Type = LoadingMethodTypeStandardInserts
return nil
}
- destinationBigqueryUpdateLoadingMethodGCSStaging := new(DestinationBigqueryUpdateLoadingMethodGCSStaging)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationBigqueryUpdateLoadingMethodGCSStaging); err == nil {
- u.DestinationBigqueryUpdateLoadingMethodGCSStaging = destinationBigqueryUpdateLoadingMethodGCSStaging
- u.Type = DestinationBigqueryUpdateLoadingMethodTypeDestinationBigqueryUpdateLoadingMethodGCSStaging
+ gcsStaging := new(GCSStaging)
+ if err := utils.UnmarshalJSON(data, &gcsStaging, "", true, true); err == nil {
+ u.GCSStaging = gcsStaging
+ u.Type = LoadingMethodTypeGCSStaging
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationBigqueryUpdateLoadingMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationBigqueryUpdateLoadingMethodStandardInserts != nil {
- return json.Marshal(u.DestinationBigqueryUpdateLoadingMethodStandardInserts)
+func (u LoadingMethod) MarshalJSON() ([]byte, error) {
+ if u.GCSStaging != nil {
+ return utils.MarshalJSON(u.GCSStaging, "", true)
}
- if u.DestinationBigqueryUpdateLoadingMethodGCSStaging != nil {
- return json.Marshal(u.DestinationBigqueryUpdateLoadingMethodGCSStaging)
+ if u.StandardInserts != nil {
+ return utils.MarshalJSON(u.StandardInserts, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationBigqueryUpdateTransformationQueryRunType - Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.
-type DestinationBigqueryUpdateTransformationQueryRunType string
+// TransformationQueryRunType - Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.
+type TransformationQueryRunType string
const (
- DestinationBigqueryUpdateTransformationQueryRunTypeInteractive DestinationBigqueryUpdateTransformationQueryRunType = "interactive"
- DestinationBigqueryUpdateTransformationQueryRunTypeBatch DestinationBigqueryUpdateTransformationQueryRunType = "batch"
+ TransformationQueryRunTypeInteractive TransformationQueryRunType = "interactive"
+ TransformationQueryRunTypeBatch TransformationQueryRunType = "batch"
)
-func (e DestinationBigqueryUpdateTransformationQueryRunType) ToPointer() *DestinationBigqueryUpdateTransformationQueryRunType {
+func (e TransformationQueryRunType) ToPointer() *TransformationQueryRunType {
return &e
}
-func (e *DestinationBigqueryUpdateTransformationQueryRunType) UnmarshalJSON(data []byte) error {
+func (e *TransformationQueryRunType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -413,28 +502,104 @@ func (e *DestinationBigqueryUpdateTransformationQueryRunType) UnmarshalJSON(data
case "interactive":
fallthrough
case "batch":
- *e = DestinationBigqueryUpdateTransformationQueryRunType(v)
+ *e = TransformationQueryRunType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationBigqueryUpdateTransformationQueryRunType: %v", v)
+ return fmt.Errorf("invalid value for TransformationQueryRunType: %v", v)
}
}
type DestinationBigqueryUpdate struct {
// Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here.
- BigQueryClientBufferSizeMb *int64 `json:"big_query_client_buffer_size_mb,omitempty"`
+ BigQueryClientBufferSizeMb *int64 `default:"15" json:"big_query_client_buffer_size_mb"`
// The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
CredentialsJSON *string `json:"credentials_json,omitempty"`
// The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more here.
DatasetID string `json:"dataset_id"`
// The location of the dataset. Warning: Changes made after creation will not be applied. Read more here.
- DatasetLocation DestinationBigqueryUpdateDatasetLocation `json:"dataset_location"`
- // Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here.
- LoadingMethod *DestinationBigqueryUpdateLoadingMethod `json:"loading_method,omitempty"`
+ DatasetLocation DatasetLocation `json:"dataset_location"`
+ // Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions
+ DisableTypeDedupe *bool `default:"false" json:"disable_type_dedupe"`
+ // The way data will be uploaded to BigQuery.
+ LoadingMethod *LoadingMethod `json:"loading_method,omitempty"`
// The GCP project ID for the project containing the target BigQuery dataset. Read more here.
ProjectID string `json:"project_id"`
- // The dataset to write raw tables into
+ // The dataset to write raw tables into (default: airbyte_internal)
RawDataDataset *string `json:"raw_data_dataset,omitempty"`
// Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.
- TransformationPriority *DestinationBigqueryUpdateTransformationQueryRunType `json:"transformation_priority,omitempty"`
+ TransformationPriority *TransformationQueryRunType `default:"interactive" json:"transformation_priority"`
+}
+
+func (d DestinationBigqueryUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationBigqueryUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationBigqueryUpdate) GetBigQueryClientBufferSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BigQueryClientBufferSizeMb
+}
+
+func (o *DestinationBigqueryUpdate) GetCredentialsJSON() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CredentialsJSON
+}
+
+func (o *DestinationBigqueryUpdate) GetDatasetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatasetID
+}
+
+func (o *DestinationBigqueryUpdate) GetDatasetLocation() DatasetLocation {
+ if o == nil {
+ return DatasetLocation("")
+ }
+ return o.DatasetLocation
+}
+
+func (o *DestinationBigqueryUpdate) GetDisableTypeDedupe() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DisableTypeDedupe
+}
+
+func (o *DestinationBigqueryUpdate) GetLoadingMethod() *LoadingMethod {
+ if o == nil {
+ return nil
+ }
+ return o.LoadingMethod
+}
+
+func (o *DestinationBigqueryUpdate) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
+
+func (o *DestinationBigqueryUpdate) GetRawDataDataset() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RawDataDataset
+}
+
+func (o *DestinationBigqueryUpdate) GetTransformationPriority() *TransformationQueryRunType {
+ if o == nil {
+ return nil
+ }
+ return o.TransformationPriority
}
diff --git a/internal/sdk/pkg/models/shared/destinationclickhouse.go b/internal/sdk/pkg/models/shared/destinationclickhouse.go
old mode 100755
new mode 100644
index 7ed90086b..03fd99b27
--- a/internal/sdk/pkg/models/shared/destinationclickhouse.go
+++ b/internal/sdk/pkg/models/shared/destinationclickhouse.go
@@ -3,215 +3,309 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationClickhouseClickhouse string
+type Clickhouse string
const (
- DestinationClickhouseClickhouseClickhouse DestinationClickhouseClickhouse = "clickhouse"
+ ClickhouseClickhouse Clickhouse = "clickhouse"
)
-func (e DestinationClickhouseClickhouse) ToPointer() *DestinationClickhouseClickhouse {
+func (e Clickhouse) ToPointer() *Clickhouse {
return &e
}
-func (e *DestinationClickhouseClickhouse) UnmarshalJSON(data []byte) error {
+func (e *Clickhouse) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "clickhouse":
- *e = DestinationClickhouseClickhouse(v)
+ *e = Clickhouse(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationClickhouseClickhouse: %v", v)
+ return fmt.Errorf("invalid value for Clickhouse: %v", v)
}
}
-// DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationClickhouseSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationClickhouseSchemasTunnelMethodTunnelMethod string
const (
- DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationClickhouseSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationClickhouseSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationClickhouseSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationClickhouseSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationClickhouseSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationClickhouseSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationClickhouseSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationClickhouseSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationClickhouseSSHTunnelMethodPasswordAuthentication struct {
+// DestinationClickhousePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationClickhousePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationClickhouseSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationClickhousePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationClickhousePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationClickhousePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationClickhousePasswordAuthentication) GetTunnelMethod() DestinationClickhouseSchemasTunnelMethodTunnelMethod {
+ return DestinationClickhouseSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationClickhousePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationClickhousePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationClickhousePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationClickhouseSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationClickhouseSchemasTunnelMethod string
const (
- DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationClickhouseSchemasTunnelMethodSSHKeyAuth DestinationClickhouseSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationClickhouseSchemasTunnelMethod) ToPointer() *DestinationClickhouseSchemasTunnelMethod {
return &e
}
-func (e *DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationClickhouseSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationClickhouseSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationClickhouseSchemasTunnelMethod: %v", v)
}
}
-// DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationClickhouseSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationClickhouseSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationClickhouseSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationClickhouseSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationClickhouseSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationClickhouseSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationClickhouseSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationClickhouseSSHKeyAuthentication) GetTunnelMethod() DestinationClickhouseSchemasTunnelMethod {
+ return DestinationClickhouseSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationClickhouseSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationClickhouseSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationClickhouseTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationClickhouseTunnelMethod string
const (
- DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationClickhouseTunnelMethodNoTunnel DestinationClickhouseTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationClickhouseTunnelMethod) ToPointer() *DestinationClickhouseTunnelMethod {
return &e
}
-func (e *DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationClickhouseTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationClickhouseTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationClickhouseTunnelMethod: %v", v)
}
}
-// DestinationClickhouseSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationClickhouseSSHTunnelMethodNoTunnel struct {
+// DestinationClickhouseNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationClickhouseNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationClickhouseSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationClickhouseTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationClickhouseNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationClickhouseNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationClickhouseNoTunnel) GetTunnelMethod() DestinationClickhouseTunnelMethod {
+ return DestinationClickhouseTunnelMethodNoTunnel
}
type DestinationClickhouseSSHTunnelMethodType string
const (
- DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHTunnelMethodNoTunnel DestinationClickhouseSSHTunnelMethodType = "destination-clickhouse_SSH Tunnel Method_No Tunnel"
- DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHTunnelMethodSSHKeyAuthentication DestinationClickhouseSSHTunnelMethodType = "destination-clickhouse_SSH Tunnel Method_SSH Key Authentication"
- DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHTunnelMethodPasswordAuthentication DestinationClickhouseSSHTunnelMethodType = "destination-clickhouse_SSH Tunnel Method_Password Authentication"
+ DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseNoTunnel DestinationClickhouseSSHTunnelMethodType = "destination-clickhouse_No Tunnel"
+ DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHKeyAuthentication DestinationClickhouseSSHTunnelMethodType = "destination-clickhouse_SSH Key Authentication"
+ DestinationClickhouseSSHTunnelMethodTypeDestinationClickhousePasswordAuthentication DestinationClickhouseSSHTunnelMethodType = "destination-clickhouse_Password Authentication"
)
type DestinationClickhouseSSHTunnelMethod struct {
- DestinationClickhouseSSHTunnelMethodNoTunnel *DestinationClickhouseSSHTunnelMethodNoTunnel
- DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication
- DestinationClickhouseSSHTunnelMethodPasswordAuthentication *DestinationClickhouseSSHTunnelMethodPasswordAuthentication
+ DestinationClickhouseNoTunnel *DestinationClickhouseNoTunnel
+ DestinationClickhouseSSHKeyAuthentication *DestinationClickhouseSSHKeyAuthentication
+ DestinationClickhousePasswordAuthentication *DestinationClickhousePasswordAuthentication
Type DestinationClickhouseSSHTunnelMethodType
}
-func CreateDestinationClickhouseSSHTunnelMethodDestinationClickhouseSSHTunnelMethodNoTunnel(destinationClickhouseSSHTunnelMethodNoTunnel DestinationClickhouseSSHTunnelMethodNoTunnel) DestinationClickhouseSSHTunnelMethod {
- typ := DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHTunnelMethodNoTunnel
+func CreateDestinationClickhouseSSHTunnelMethodDestinationClickhouseNoTunnel(destinationClickhouseNoTunnel DestinationClickhouseNoTunnel) DestinationClickhouseSSHTunnelMethod {
+ typ := DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseNoTunnel
return DestinationClickhouseSSHTunnelMethod{
- DestinationClickhouseSSHTunnelMethodNoTunnel: &destinationClickhouseSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationClickhouseNoTunnel: &destinationClickhouseNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationClickhouseSSHTunnelMethodDestinationClickhouseSSHTunnelMethodSSHKeyAuthentication(destinationClickhouseSSHTunnelMethodSSHKeyAuthentication DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication) DestinationClickhouseSSHTunnelMethod {
- typ := DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationClickhouseSSHTunnelMethodDestinationClickhouseSSHKeyAuthentication(destinationClickhouseSSHKeyAuthentication DestinationClickhouseSSHKeyAuthentication) DestinationClickhouseSSHTunnelMethod {
+ typ := DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHKeyAuthentication
return DestinationClickhouseSSHTunnelMethod{
- DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication: &destinationClickhouseSSHTunnelMethodSSHKeyAuthentication,
+ DestinationClickhouseSSHKeyAuthentication: &destinationClickhouseSSHKeyAuthentication,
Type: typ,
}
}
-func CreateDestinationClickhouseSSHTunnelMethodDestinationClickhouseSSHTunnelMethodPasswordAuthentication(destinationClickhouseSSHTunnelMethodPasswordAuthentication DestinationClickhouseSSHTunnelMethodPasswordAuthentication) DestinationClickhouseSSHTunnelMethod {
- typ := DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHTunnelMethodPasswordAuthentication
+func CreateDestinationClickhouseSSHTunnelMethodDestinationClickhousePasswordAuthentication(destinationClickhousePasswordAuthentication DestinationClickhousePasswordAuthentication) DestinationClickhouseSSHTunnelMethod {
+ typ := DestinationClickhouseSSHTunnelMethodTypeDestinationClickhousePasswordAuthentication
return DestinationClickhouseSSHTunnelMethod{
- DestinationClickhouseSSHTunnelMethodPasswordAuthentication: &destinationClickhouseSSHTunnelMethodPasswordAuthentication,
+ DestinationClickhousePasswordAuthentication: &destinationClickhousePasswordAuthentication,
Type: typ,
}
}
func (u *DestinationClickhouseSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationClickhouseSSHTunnelMethodNoTunnel := new(DestinationClickhouseSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationClickhouseSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationClickhouseSSHTunnelMethodNoTunnel = destinationClickhouseSSHTunnelMethodNoTunnel
- u.Type = DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHTunnelMethodNoTunnel
+
+ destinationClickhouseNoTunnel := new(DestinationClickhouseNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationClickhouseNoTunnel, "", true, true); err == nil {
+ u.DestinationClickhouseNoTunnel = destinationClickhouseNoTunnel
+ u.Type = DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseNoTunnel
return nil
}
- destinationClickhouseSSHTunnelMethodSSHKeyAuthentication := new(DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationClickhouseSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication = destinationClickhouseSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHTunnelMethodSSHKeyAuthentication
+ destinationClickhouseSSHKeyAuthentication := new(DestinationClickhouseSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationClickhouseSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationClickhouseSSHKeyAuthentication = destinationClickhouseSSHKeyAuthentication
+ u.Type = DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHKeyAuthentication
return nil
}
- destinationClickhouseSSHTunnelMethodPasswordAuthentication := new(DestinationClickhouseSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationClickhouseSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationClickhouseSSHTunnelMethodPasswordAuthentication = destinationClickhouseSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationClickhouseSSHTunnelMethodTypeDestinationClickhouseSSHTunnelMethodPasswordAuthentication
+ destinationClickhousePasswordAuthentication := new(DestinationClickhousePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationClickhousePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationClickhousePasswordAuthentication = destinationClickhousePasswordAuthentication
+ u.Type = DestinationClickhouseSSHTunnelMethodTypeDestinationClickhousePasswordAuthentication
return nil
}
@@ -219,25 +313,25 @@ func (u *DestinationClickhouseSSHTunnelMethod) UnmarshalJSON(data []byte) error
}
func (u DestinationClickhouseSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationClickhouseSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationClickhouseSSHTunnelMethodNoTunnel)
+ if u.DestinationClickhouseNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationClickhouseNoTunnel, "", true)
}
- if u.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationClickhouseSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationClickhouseSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationClickhouseSSHKeyAuthentication, "", true)
}
- if u.DestinationClickhouseSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationClickhouseSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationClickhousePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationClickhousePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationClickhouse struct {
// Name of the database.
- Database string `json:"database"`
- DestinationType DestinationClickhouseClickhouse `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Clickhouse `const:"clickhouse" json:"destinationType"`
// Hostname of the database.
Host string `json:"host"`
// Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
@@ -245,9 +339,73 @@ type DestinationClickhouse struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// HTTP port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"8123" json:"port"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *DestinationClickhouseSSHTunnelMethod `json:"tunnel_method,omitempty"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationClickhouse) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationClickhouse) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationClickhouse) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationClickhouse) GetDestinationType() Clickhouse {
+ return ClickhouseClickhouse
+}
+
+func (o *DestinationClickhouse) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationClickhouse) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationClickhouse) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationClickhouse) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationClickhouse) GetTunnelMethod() *DestinationClickhouseSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationClickhouse) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationclickhousecreaterequest.go b/internal/sdk/pkg/models/shared/destinationclickhousecreaterequest.go
old mode 100755
new mode 100644
index 06f5a0b37..12470231a
--- a/internal/sdk/pkg/models/shared/destinationclickhousecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationclickhousecreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationClickhouseCreateRequest struct {
Configuration DestinationClickhouse `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationClickhouseCreateRequest) GetConfiguration() DestinationClickhouse {
+ if o == nil {
+ return DestinationClickhouse{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationClickhouseCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationClickhouseCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationClickhouseCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationclickhouseputrequest.go b/internal/sdk/pkg/models/shared/destinationclickhouseputrequest.go
old mode 100755
new mode 100644
index 8ff88edae..9e71b412c
--- a/internal/sdk/pkg/models/shared/destinationclickhouseputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationclickhouseputrequest.go
@@ -7,3 +7,24 @@ type DestinationClickhousePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationClickhousePutRequest) GetConfiguration() DestinationClickhouseUpdate {
+ if o == nil {
+ return DestinationClickhouseUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationClickhousePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationClickhousePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationclickhouseupdate.go b/internal/sdk/pkg/models/shared/destinationclickhouseupdate.go
old mode 100755
new mode 100644
index bf0c2aa18..61091872b
--- a/internal/sdk/pkg/models/shared/destinationclickhouseupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationclickhouseupdate.go
@@ -3,211 +3,305 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationClickhouseUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationClickhouseUpdateSchemasTunnelMethod string
const (
- DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationClickhouseUpdateSchemasTunnelMethodSSHPasswordAuth DestinationClickhouseUpdateSchemasTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationClickhouseUpdateSchemasTunnelMethod) ToPointer() *DestinationClickhouseUpdateSchemasTunnelMethod {
return &e
}
-func (e *DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationClickhouseUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationClickhouseUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationClickhouseUpdateSchemasTunnelMethod: %v", v)
}
}
-// DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication struct {
+// PasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type PasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationClickhouseUpdateSchemasTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (p PasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *PasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *PasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *PasswordAuthentication) GetTunnelMethod() DestinationClickhouseUpdateSchemasTunnelMethod {
+ return DestinationClickhouseUpdateSchemasTunnelMethodSSHPasswordAuth
+}
+
+func (o *PasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *PasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *PasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationClickhouseUpdateTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationClickhouseUpdateTunnelMethod string
const (
- DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationClickhouseUpdateTunnelMethodSSHKeyAuth DestinationClickhouseUpdateTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationClickhouseUpdateTunnelMethod) ToPointer() *DestinationClickhouseUpdateTunnelMethod {
return &e
}
-func (e *DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationClickhouseUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationClickhouseUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationClickhouseUpdateTunnelMethod: %v", v)
}
}
-// DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// SSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationClickhouseUpdateTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SSHKeyAuthentication) GetTunnelMethod() DestinationClickhouseUpdateTunnelMethod {
+ return DestinationClickhouseUpdateTunnelMethodSSHKeyAuth
+}
+
+func (o *SSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// TunnelMethod - No ssh tunnel needed to connect to database
+type TunnelMethod string
const (
- DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ TunnelMethodNoTunnel TunnelMethod = "NO_TUNNEL"
)
-func (e DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e TunnelMethod) ToPointer() *TunnelMethod {
return &e
}
-func (e *DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *TunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = TunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for TunnelMethod: %v", v)
}
}
-// DestinationClickhouseUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationClickhouseUpdateSSHTunnelMethodNoTunnel struct {
+// NoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type NoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod TunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (n NoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
+}
+
+func (n *NoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
+ }
+ return nil
}
-type DestinationClickhouseUpdateSSHTunnelMethodType string
+func (o *NoTunnel) GetTunnelMethod() TunnelMethod {
+ return TunnelMethodNoTunnel
+}
+
+type SSHTunnelMethodType string
const (
- DestinationClickhouseUpdateSSHTunnelMethodTypeDestinationClickhouseUpdateSSHTunnelMethodNoTunnel DestinationClickhouseUpdateSSHTunnelMethodType = "destination-clickhouse-update_SSH Tunnel Method_No Tunnel"
- DestinationClickhouseUpdateSSHTunnelMethodTypeDestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication DestinationClickhouseUpdateSSHTunnelMethodType = "destination-clickhouse-update_SSH Tunnel Method_SSH Key Authentication"
- DestinationClickhouseUpdateSSHTunnelMethodTypeDestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication DestinationClickhouseUpdateSSHTunnelMethodType = "destination-clickhouse-update_SSH Tunnel Method_Password Authentication"
+ SSHTunnelMethodTypeNoTunnel SSHTunnelMethodType = "No Tunnel"
+ SSHTunnelMethodTypeSSHKeyAuthentication SSHTunnelMethodType = "SSH Key Authentication"
+ SSHTunnelMethodTypePasswordAuthentication SSHTunnelMethodType = "Password Authentication"
)
-type DestinationClickhouseUpdateSSHTunnelMethod struct {
- DestinationClickhouseUpdateSSHTunnelMethodNoTunnel *DestinationClickhouseUpdateSSHTunnelMethodNoTunnel
- DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
- DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication *DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication
+type SSHTunnelMethod struct {
+ NoTunnel *NoTunnel
+ SSHKeyAuthentication *SSHKeyAuthentication
+ PasswordAuthentication *PasswordAuthentication
- Type DestinationClickhouseUpdateSSHTunnelMethodType
+ Type SSHTunnelMethodType
}
-func CreateDestinationClickhouseUpdateSSHTunnelMethodDestinationClickhouseUpdateSSHTunnelMethodNoTunnel(destinationClickhouseUpdateSSHTunnelMethodNoTunnel DestinationClickhouseUpdateSSHTunnelMethodNoTunnel) DestinationClickhouseUpdateSSHTunnelMethod {
- typ := DestinationClickhouseUpdateSSHTunnelMethodTypeDestinationClickhouseUpdateSSHTunnelMethodNoTunnel
+func CreateSSHTunnelMethodNoTunnel(noTunnel NoTunnel) SSHTunnelMethod {
+ typ := SSHTunnelMethodTypeNoTunnel
- return DestinationClickhouseUpdateSSHTunnelMethod{
- DestinationClickhouseUpdateSSHTunnelMethodNoTunnel: &destinationClickhouseUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ return SSHTunnelMethod{
+ NoTunnel: &noTunnel,
+ Type: typ,
}
}
-func CreateDestinationClickhouseUpdateSSHTunnelMethodDestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication(destinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication) DestinationClickhouseUpdateSSHTunnelMethod {
- typ := DestinationClickhouseUpdateSSHTunnelMethodTypeDestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateSSHTunnelMethodSSHKeyAuthentication(sshKeyAuthentication SSHKeyAuthentication) SSHTunnelMethod {
+ typ := SSHTunnelMethodTypeSSHKeyAuthentication
- return DestinationClickhouseUpdateSSHTunnelMethod{
- DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication: &destinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ return SSHTunnelMethod{
+ SSHKeyAuthentication: &sshKeyAuthentication,
+ Type: typ,
}
}
-func CreateDestinationClickhouseUpdateSSHTunnelMethodDestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication(destinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication) DestinationClickhouseUpdateSSHTunnelMethod {
- typ := DestinationClickhouseUpdateSSHTunnelMethodTypeDestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication
+func CreateSSHTunnelMethodPasswordAuthentication(passwordAuthentication PasswordAuthentication) SSHTunnelMethod {
+ typ := SSHTunnelMethodTypePasswordAuthentication
- return DestinationClickhouseUpdateSSHTunnelMethod{
- DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication: &destinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ return SSHTunnelMethod{
+ PasswordAuthentication: &passwordAuthentication,
+ Type: typ,
}
}
-func (u *DestinationClickhouseUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SSHTunnelMethod) UnmarshalJSON(data []byte) error {
- destinationClickhouseUpdateSSHTunnelMethodNoTunnel := new(DestinationClickhouseUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationClickhouseUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationClickhouseUpdateSSHTunnelMethodNoTunnel = destinationClickhouseUpdateSSHTunnelMethodNoTunnel
- u.Type = DestinationClickhouseUpdateSSHTunnelMethodTypeDestinationClickhouseUpdateSSHTunnelMethodNoTunnel
+ noTunnel := new(NoTunnel)
+ if err := utils.UnmarshalJSON(data, &noTunnel, "", true, true); err == nil {
+ u.NoTunnel = noTunnel
+ u.Type = SSHTunnelMethodTypeNoTunnel
return nil
}
- destinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication := new(DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication = destinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationClickhouseUpdateSSHTunnelMethodTypeDestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
+ sshKeyAuthentication := new(SSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sshKeyAuthentication, "", true, true); err == nil {
+ u.SSHKeyAuthentication = sshKeyAuthentication
+ u.Type = SSHTunnelMethodTypeSSHKeyAuthentication
return nil
}
- destinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication := new(DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication = destinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationClickhouseUpdateSSHTunnelMethodTypeDestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication
+ passwordAuthentication := new(PasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &passwordAuthentication, "", true, true); err == nil {
+ u.PasswordAuthentication = passwordAuthentication
+ u.Type = SSHTunnelMethodTypePasswordAuthentication
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationClickhouseUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationClickhouseUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationClickhouseUpdateSSHTunnelMethodNoTunnel)
+func (u SSHTunnelMethod) MarshalJSON() ([]byte, error) {
+ if u.NoTunnel != nil {
+ return utils.MarshalJSON(u.NoTunnel, "", true)
}
- if u.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.SSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SSHKeyAuthentication, "", true)
}
- if u.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationClickhouseUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.PasswordAuthentication != nil {
+ return utils.MarshalJSON(u.PasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationClickhouseUpdate struct {
@@ -220,9 +314,69 @@ type DestinationClickhouseUpdate struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// HTTP port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"8123" json:"port"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
- TunnelMethod *DestinationClickhouseUpdateSSHTunnelMethod `json:"tunnel_method,omitempty"`
+ TunnelMethod *SSHTunnelMethod `json:"tunnel_method,omitempty"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationClickhouseUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationClickhouseUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationClickhouseUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationClickhouseUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationClickhouseUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationClickhouseUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationClickhouseUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationClickhouseUpdate) GetTunnelMethod() *SSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationClickhouseUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationconvex.go b/internal/sdk/pkg/models/shared/destinationconvex.go
old mode 100755
new mode 100644
index e6d28de91..260b6db89
--- a/internal/sdk/pkg/models/shared/destinationconvex.go
+++ b/internal/sdk/pkg/models/shared/destinationconvex.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationConvexConvex string
+type Convex string
const (
- DestinationConvexConvexConvex DestinationConvexConvex = "convex"
+ ConvexConvex Convex = "convex"
)
-func (e DestinationConvexConvex) ToPointer() *DestinationConvexConvex {
+func (e Convex) ToPointer() *Convex {
return &e
}
-func (e *DestinationConvexConvex) UnmarshalJSON(data []byte) error {
+func (e *Convex) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "convex":
- *e = DestinationConvexConvex(v)
+ *e = Convex(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationConvexConvex: %v", v)
+ return fmt.Errorf("invalid value for Convex: %v", v)
}
}
@@ -35,6 +36,35 @@ type DestinationConvex struct {
// API access key used to send data to a Convex deployment.
AccessKey string `json:"access_key"`
// URL of the Convex deployment that is the destination
- DeploymentURL string `json:"deployment_url"`
- DestinationType DestinationConvexConvex `json:"destinationType"`
+ DeploymentURL string `json:"deployment_url"`
+ destinationType Convex `const:"convex" json:"destinationType"`
+}
+
+func (d DestinationConvex) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationConvex) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationConvex) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *DestinationConvex) GetDeploymentURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.DeploymentURL
+}
+
+func (o *DestinationConvex) GetDestinationType() Convex {
+ return ConvexConvex
}
diff --git a/internal/sdk/pkg/models/shared/destinationconvexcreaterequest.go b/internal/sdk/pkg/models/shared/destinationconvexcreaterequest.go
old mode 100755
new mode 100644
index b56f6f6fa..e41047d20
--- a/internal/sdk/pkg/models/shared/destinationconvexcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationconvexcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationConvexCreateRequest struct {
Configuration DestinationConvex `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationConvexCreateRequest) GetConfiguration() DestinationConvex {
+ if o == nil {
+ return DestinationConvex{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationConvexCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationConvexCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationConvexCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationconvexputrequest.go b/internal/sdk/pkg/models/shared/destinationconvexputrequest.go
old mode 100755
new mode 100644
index 1747d93ed..af9d6351f
--- a/internal/sdk/pkg/models/shared/destinationconvexputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationconvexputrequest.go
@@ -7,3 +7,24 @@ type DestinationConvexPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationConvexPutRequest) GetConfiguration() DestinationConvexUpdate {
+ if o == nil {
+ return DestinationConvexUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationConvexPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationConvexPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationconvexupdate.go b/internal/sdk/pkg/models/shared/destinationconvexupdate.go
old mode 100755
new mode 100644
index 77cba8f74..4a6bce0ff
--- a/internal/sdk/pkg/models/shared/destinationconvexupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationconvexupdate.go
@@ -8,3 +8,17 @@ type DestinationConvexUpdate struct {
// URL of the Convex deployment that is the destination
DeploymentURL string `json:"deployment_url"`
}
+
+func (o *DestinationConvexUpdate) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *DestinationConvexUpdate) GetDeploymentURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.DeploymentURL
+}
diff --git a/internal/sdk/pkg/models/shared/destinationcreaterequest.go b/internal/sdk/pkg/models/shared/destinationcreaterequest.go
old mode 100755
new mode 100644
index f640eff9c..5a7fe905d
--- a/internal/sdk/pkg/models/shared/destinationcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationcreaterequest.go
@@ -5,6 +5,37 @@ package shared
type DestinationCreateRequest struct {
// The values required to configure the destination.
Configuration interface{} `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationCreateRequest) GetConfiguration() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.Configuration
+}
+
+func (o *DestinationCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationcumulio.go b/internal/sdk/pkg/models/shared/destinationcumulio.go
old mode 100755
new mode 100644
index f86eb1982..616a5f5a4
--- a/internal/sdk/pkg/models/shared/destinationcumulio.go
+++ b/internal/sdk/pkg/models/shared/destinationcumulio.go
@@ -5,38 +5,75 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationCumulioCumulio string
+type Cumulio string
const (
- DestinationCumulioCumulioCumulio DestinationCumulioCumulio = "cumulio"
+ CumulioCumulio Cumulio = "cumulio"
)
-func (e DestinationCumulioCumulio) ToPointer() *DestinationCumulioCumulio {
+func (e Cumulio) ToPointer() *Cumulio {
return &e
}
-func (e *DestinationCumulioCumulio) UnmarshalJSON(data []byte) error {
+func (e *Cumulio) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "cumulio":
- *e = DestinationCumulioCumulio(v)
+ *e = Cumulio(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationCumulioCumulio: %v", v)
+ return fmt.Errorf("invalid value for Cumulio: %v", v)
}
}
type DestinationCumulio struct {
// URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.
- APIHost string `json:"api_host"`
+ APIHost *string `default:"https://api.cumul.io" json:"api_host"`
// An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
APIKey string `json:"api_key"`
// The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
- APIToken string `json:"api_token"`
- DestinationType DestinationCumulioCumulio `json:"destinationType"`
+ APIToken string `json:"api_token"`
+ destinationType Cumulio `const:"cumulio" json:"destinationType"`
+}
+
+func (d DestinationCumulio) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationCumulio) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationCumulio) GetAPIHost() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIHost
+}
+
+func (o *DestinationCumulio) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *DestinationCumulio) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *DestinationCumulio) GetDestinationType() Cumulio {
+ return CumulioCumulio
}
diff --git a/internal/sdk/pkg/models/shared/destinationcumuliocreaterequest.go b/internal/sdk/pkg/models/shared/destinationcumuliocreaterequest.go
old mode 100755
new mode 100644
index c76a89da5..674968e70
--- a/internal/sdk/pkg/models/shared/destinationcumuliocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationcumuliocreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationCumulioCreateRequest struct {
Configuration DestinationCumulio `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationCumulioCreateRequest) GetConfiguration() DestinationCumulio {
+ if o == nil {
+ return DestinationCumulio{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationCumulioCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationCumulioCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationCumulioCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationcumulioputrequest.go b/internal/sdk/pkg/models/shared/destinationcumulioputrequest.go
old mode 100755
new mode 100644
index e230f904f..d74d87ef9
--- a/internal/sdk/pkg/models/shared/destinationcumulioputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationcumulioputrequest.go
@@ -7,3 +7,24 @@ type DestinationCumulioPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationCumulioPutRequest) GetConfiguration() DestinationCumulioUpdate {
+ if o == nil {
+ return DestinationCumulioUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationCumulioPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationCumulioPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationcumulioupdate.go b/internal/sdk/pkg/models/shared/destinationcumulioupdate.go
old mode 100755
new mode 100644
index 7c29ba187..8fce0864c
--- a/internal/sdk/pkg/models/shared/destinationcumulioupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationcumulioupdate.go
@@ -2,11 +2,47 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type DestinationCumulioUpdate struct {
// URL of the Cumul.io API (e.g. 'https://api.cumul.io', 'https://api.us.cumul.io', or VPC-specific API url). Defaults to 'https://api.cumul.io'.
- APIHost string `json:"api_host"`
+ APIHost *string `default:"https://api.cumul.io" json:"api_host"`
// An API key generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
APIKey string `json:"api_key"`
// The corresponding API token generated in Cumul.io's platform (can be generated here: https://app.cumul.io/start/profile/integration).
APIToken string `json:"api_token"`
}
+
+func (d DestinationCumulioUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationCumulioUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationCumulioUpdate) GetAPIHost() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIHost
+}
+
+func (o *DestinationCumulioUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *DestinationCumulioUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdatabend.go b/internal/sdk/pkg/models/shared/destinationdatabend.go
old mode 100755
new mode 100644
index 65a02f578..721f78716
--- a/internal/sdk/pkg/models/shared/destinationdatabend.go
+++ b/internal/sdk/pkg/models/shared/destinationdatabend.go
@@ -5,44 +5,102 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationDatabendDatabend string
+type Databend string
const (
- DestinationDatabendDatabendDatabend DestinationDatabendDatabend = "databend"
+ DatabendDatabend Databend = "databend"
)
-func (e DestinationDatabendDatabend) ToPointer() *DestinationDatabendDatabend {
+func (e Databend) ToPointer() *Databend {
return &e
}
-func (e *DestinationDatabendDatabend) UnmarshalJSON(data []byte) error {
+func (e *Databend) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "databend":
- *e = DestinationDatabendDatabend(v)
+ *e = Databend(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabendDatabend: %v", v)
+ return fmt.Errorf("invalid value for Databend: %v", v)
}
}
type DestinationDatabend struct {
// Name of the database.
- Database string `json:"database"`
- DestinationType DestinationDatabendDatabend `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Databend `const:"databend" json:"destinationType"`
// Hostname of the database.
Host string `json:"host"`
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port *int64 `json:"port,omitempty"`
+ Port *int64 `default:"443" json:"port"`
// The default table was written to.
- Table *string `json:"table,omitempty"`
+ Table *string `default:"default" json:"table"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationDatabend) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDatabend) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDatabend) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationDatabend) GetDestinationType() Databend {
+ return DatabendDatabend
+}
+
+func (o *DestinationDatabend) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationDatabend) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationDatabend) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationDatabend) GetTable() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Table
+}
+
+func (o *DestinationDatabend) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdatabendcreaterequest.go b/internal/sdk/pkg/models/shared/destinationdatabendcreaterequest.go
old mode 100755
new mode 100644
index d94d2ab4c..4eff01d52
--- a/internal/sdk/pkg/models/shared/destinationdatabendcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationdatabendcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationDatabendCreateRequest struct {
Configuration DestinationDatabend `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationDatabendCreateRequest) GetConfiguration() DestinationDatabend {
+ if o == nil {
+ return DestinationDatabend{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDatabendCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationDatabendCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDatabendCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationdatabendputrequest.go b/internal/sdk/pkg/models/shared/destinationdatabendputrequest.go
old mode 100755
new mode 100644
index 1540b5848..8c954dcf3
--- a/internal/sdk/pkg/models/shared/destinationdatabendputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationdatabendputrequest.go
@@ -7,3 +7,24 @@ type DestinationDatabendPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationDatabendPutRequest) GetConfiguration() DestinationDatabendUpdate {
+ if o == nil {
+ return DestinationDatabendUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDatabendPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDatabendPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdatabendupdate.go b/internal/sdk/pkg/models/shared/destinationdatabendupdate.go
old mode 100755
new mode 100644
index 67ea5a86a..8692c4834
--- a/internal/sdk/pkg/models/shared/destinationdatabendupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationdatabendupdate.go
@@ -2,6 +2,10 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type DestinationDatabendUpdate struct {
// Name of the database.
Database string `json:"database"`
@@ -10,9 +14,62 @@ type DestinationDatabendUpdate struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port *int64 `json:"port,omitempty"`
+ Port *int64 `default:"443" json:"port"`
// The default table was written to.
- Table *string `json:"table,omitempty"`
+ Table *string `default:"default" json:"table"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationDatabendUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDatabendUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDatabendUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationDatabendUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationDatabendUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationDatabendUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationDatabendUpdate) GetTable() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Table
+}
+
+func (o *DestinationDatabendUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdatabricks.go b/internal/sdk/pkg/models/shared/destinationdatabricks.go
old mode 100755
new mode 100644
index e8ae885a8..ce884f49d
--- a/internal/sdk/pkg/models/shared/destinationdatabricks.go
+++ b/internal/sdk/pkg/models/shared/destinationdatabricks.go
@@ -3,110 +3,153 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationDatabricksDataSourceAzureBlobStorageDataSourceType string
+type DestinationDatabricksSchemasDataSourceDataSourceType string
const (
- DestinationDatabricksDataSourceAzureBlobStorageDataSourceTypeAzureBlobStorage DestinationDatabricksDataSourceAzureBlobStorageDataSourceType = "AZURE_BLOB_STORAGE"
+ DestinationDatabricksSchemasDataSourceDataSourceTypeAzureBlobStorage DestinationDatabricksSchemasDataSourceDataSourceType = "AZURE_BLOB_STORAGE"
)
-func (e DestinationDatabricksDataSourceAzureBlobStorageDataSourceType) ToPointer() *DestinationDatabricksDataSourceAzureBlobStorageDataSourceType {
+func (e DestinationDatabricksSchemasDataSourceDataSourceType) ToPointer() *DestinationDatabricksSchemasDataSourceDataSourceType {
return &e
}
-func (e *DestinationDatabricksDataSourceAzureBlobStorageDataSourceType) UnmarshalJSON(data []byte) error {
+func (e *DestinationDatabricksSchemasDataSourceDataSourceType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "AZURE_BLOB_STORAGE":
- *e = DestinationDatabricksDataSourceAzureBlobStorageDataSourceType(v)
+ *e = DestinationDatabricksSchemasDataSourceDataSourceType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabricksDataSourceAzureBlobStorageDataSourceType: %v", v)
+ return fmt.Errorf("invalid value for DestinationDatabricksSchemasDataSourceDataSourceType: %v", v)
}
}
-// DestinationDatabricksDataSourceAzureBlobStorage - Storage on which the delta lake is built.
-type DestinationDatabricksDataSourceAzureBlobStorage struct {
+// DestinationDatabricksAzureBlobStorage - Storage on which the delta lake is built.
+type DestinationDatabricksAzureBlobStorage struct {
// The account's name of the Azure Blob Storage.
AzureBlobStorageAccountName string `json:"azure_blob_storage_account_name"`
// The name of the Azure blob storage container.
AzureBlobStorageContainerName string `json:"azure_blob_storage_container_name"`
// This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
- AzureBlobStorageEndpointDomainName *string `json:"azure_blob_storage_endpoint_domain_name,omitempty"`
+ AzureBlobStorageEndpointDomainName *string `default:"blob.core.windows.net" json:"azure_blob_storage_endpoint_domain_name"`
// Shared access signature (SAS) token to grant limited access to objects in your storage account.
- AzureBlobStorageSasToken string `json:"azure_blob_storage_sas_token"`
- DataSourceType DestinationDatabricksDataSourceAzureBlobStorageDataSourceType `json:"data_source_type"`
+ AzureBlobStorageSasToken string `json:"azure_blob_storage_sas_token"`
+ dataSourceType DestinationDatabricksSchemasDataSourceDataSourceType `const:"AZURE_BLOB_STORAGE" json:"data_source_type"`
}
-type DestinationDatabricksDataSourceAmazonS3DataSourceType string
+func (d DestinationDatabricksAzureBlobStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDatabricksAzureBlobStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDatabricksAzureBlobStorage) GetAzureBlobStorageAccountName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountName
+}
+
+func (o *DestinationDatabricksAzureBlobStorage) GetAzureBlobStorageContainerName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageContainerName
+}
+
+func (o *DestinationDatabricksAzureBlobStorage) GetAzureBlobStorageEndpointDomainName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageEndpointDomainName
+}
+
+func (o *DestinationDatabricksAzureBlobStorage) GetAzureBlobStorageSasToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageSasToken
+}
+
+func (o *DestinationDatabricksAzureBlobStorage) GetDataSourceType() DestinationDatabricksSchemasDataSourceDataSourceType {
+ return DestinationDatabricksSchemasDataSourceDataSourceTypeAzureBlobStorage
+}
+
+type DestinationDatabricksSchemasDataSourceType string
const (
- DestinationDatabricksDataSourceAmazonS3DataSourceTypeS3Storage DestinationDatabricksDataSourceAmazonS3DataSourceType = "S3_STORAGE"
+ DestinationDatabricksSchemasDataSourceTypeS3Storage DestinationDatabricksSchemasDataSourceType = "S3_STORAGE"
)
-func (e DestinationDatabricksDataSourceAmazonS3DataSourceType) ToPointer() *DestinationDatabricksDataSourceAmazonS3DataSourceType {
+func (e DestinationDatabricksSchemasDataSourceType) ToPointer() *DestinationDatabricksSchemasDataSourceType {
return &e
}
-func (e *DestinationDatabricksDataSourceAmazonS3DataSourceType) UnmarshalJSON(data []byte) error {
+func (e *DestinationDatabricksSchemasDataSourceType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "S3_STORAGE":
- *e = DestinationDatabricksDataSourceAmazonS3DataSourceType(v)
+ *e = DestinationDatabricksSchemasDataSourceType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabricksDataSourceAmazonS3DataSourceType: %v", v)
+ return fmt.Errorf("invalid value for DestinationDatabricksSchemasDataSourceType: %v", v)
}
}
-// DestinationDatabricksDataSourceAmazonS3S3BucketRegion - The region of the S3 staging bucket to use if utilising a copy strategy.
-type DestinationDatabricksDataSourceAmazonS3S3BucketRegion string
+// DestinationDatabricksS3BucketRegion - The region of the S3 staging bucket to use if utilising a copy strategy.
+type DestinationDatabricksS3BucketRegion string
const (
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionUnknown DestinationDatabricksDataSourceAmazonS3S3BucketRegion = ""
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionUsEast1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "us-east-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionUsEast2 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "us-east-2"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionUsWest1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "us-west-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionUsWest2 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "us-west-2"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionAfSouth1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "af-south-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionApEast1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "ap-east-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionApSouth1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "ap-south-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionApNortheast1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "ap-northeast-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionApNortheast2 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "ap-northeast-2"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionApNortheast3 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "ap-northeast-3"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionApSoutheast1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "ap-southeast-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionApSoutheast2 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "ap-southeast-2"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionCaCentral1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "ca-central-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionCnNorth1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "cn-north-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionCnNorthwest1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "cn-northwest-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionEuCentral1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "eu-central-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionEuNorth1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "eu-north-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionEuSouth1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "eu-south-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionEuWest1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "eu-west-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionEuWest2 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "eu-west-2"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionEuWest3 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "eu-west-3"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionSaEast1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "sa-east-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionMeSouth1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "me-south-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionUsGovEast1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "us-gov-east-1"
- DestinationDatabricksDataSourceAmazonS3S3BucketRegionUsGovWest1 DestinationDatabricksDataSourceAmazonS3S3BucketRegion = "us-gov-west-1"
+ DestinationDatabricksS3BucketRegionUnknown DestinationDatabricksS3BucketRegion = ""
+ DestinationDatabricksS3BucketRegionUsEast1 DestinationDatabricksS3BucketRegion = "us-east-1"
+ DestinationDatabricksS3BucketRegionUsEast2 DestinationDatabricksS3BucketRegion = "us-east-2"
+ DestinationDatabricksS3BucketRegionUsWest1 DestinationDatabricksS3BucketRegion = "us-west-1"
+ DestinationDatabricksS3BucketRegionUsWest2 DestinationDatabricksS3BucketRegion = "us-west-2"
+ DestinationDatabricksS3BucketRegionAfSouth1 DestinationDatabricksS3BucketRegion = "af-south-1"
+ DestinationDatabricksS3BucketRegionApEast1 DestinationDatabricksS3BucketRegion = "ap-east-1"
+ DestinationDatabricksS3BucketRegionApSouth1 DestinationDatabricksS3BucketRegion = "ap-south-1"
+ DestinationDatabricksS3BucketRegionApNortheast1 DestinationDatabricksS3BucketRegion = "ap-northeast-1"
+ DestinationDatabricksS3BucketRegionApNortheast2 DestinationDatabricksS3BucketRegion = "ap-northeast-2"
+ DestinationDatabricksS3BucketRegionApNortheast3 DestinationDatabricksS3BucketRegion = "ap-northeast-3"
+ DestinationDatabricksS3BucketRegionApSoutheast1 DestinationDatabricksS3BucketRegion = "ap-southeast-1"
+ DestinationDatabricksS3BucketRegionApSoutheast2 DestinationDatabricksS3BucketRegion = "ap-southeast-2"
+ DestinationDatabricksS3BucketRegionCaCentral1 DestinationDatabricksS3BucketRegion = "ca-central-1"
+ DestinationDatabricksS3BucketRegionCnNorth1 DestinationDatabricksS3BucketRegion = "cn-north-1"
+ DestinationDatabricksS3BucketRegionCnNorthwest1 DestinationDatabricksS3BucketRegion = "cn-northwest-1"
+ DestinationDatabricksS3BucketRegionEuCentral1 DestinationDatabricksS3BucketRegion = "eu-central-1"
+ DestinationDatabricksS3BucketRegionEuNorth1 DestinationDatabricksS3BucketRegion = "eu-north-1"
+ DestinationDatabricksS3BucketRegionEuSouth1 DestinationDatabricksS3BucketRegion = "eu-south-1"
+ DestinationDatabricksS3BucketRegionEuWest1 DestinationDatabricksS3BucketRegion = "eu-west-1"
+ DestinationDatabricksS3BucketRegionEuWest2 DestinationDatabricksS3BucketRegion = "eu-west-2"
+ DestinationDatabricksS3BucketRegionEuWest3 DestinationDatabricksS3BucketRegion = "eu-west-3"
+ DestinationDatabricksS3BucketRegionSaEast1 DestinationDatabricksS3BucketRegion = "sa-east-1"
+ DestinationDatabricksS3BucketRegionMeSouth1 DestinationDatabricksS3BucketRegion = "me-south-1"
+ DestinationDatabricksS3BucketRegionUsGovEast1 DestinationDatabricksS3BucketRegion = "us-gov-east-1"
+ DestinationDatabricksS3BucketRegionUsGovWest1 DestinationDatabricksS3BucketRegion = "us-gov-west-1"
)
-func (e DestinationDatabricksDataSourceAmazonS3S3BucketRegion) ToPointer() *DestinationDatabricksDataSourceAmazonS3S3BucketRegion {
+func (e DestinationDatabricksS3BucketRegion) ToPointer() *DestinationDatabricksS3BucketRegion {
return &e
}
-func (e *DestinationDatabricksDataSourceAmazonS3S3BucketRegion) UnmarshalJSON(data []byte) error {
+func (e *DestinationDatabricksS3BucketRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -163,16 +206,16 @@ func (e *DestinationDatabricksDataSourceAmazonS3S3BucketRegion) UnmarshalJSON(da
case "us-gov-east-1":
fallthrough
case "us-gov-west-1":
- *e = DestinationDatabricksDataSourceAmazonS3S3BucketRegion(v)
+ *e = DestinationDatabricksS3BucketRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabricksDataSourceAmazonS3S3BucketRegion: %v", v)
+ return fmt.Errorf("invalid value for DestinationDatabricksS3BucketRegion: %v", v)
}
}
-// DestinationDatabricksDataSourceAmazonS3 - Storage on which the delta lake is built.
-type DestinationDatabricksDataSourceAmazonS3 struct {
- DataSourceType DestinationDatabricksDataSourceAmazonS3DataSourceType `json:"data_source_type"`
+// DestinationDatabricksAmazonS3 - Storage on which the delta lake is built.
+type DestinationDatabricksAmazonS3 struct {
+ dataSourceType DestinationDatabricksSchemasDataSourceType `const:"S3_STORAGE" json:"data_source_type"`
// The pattern allows you to set the file-name format for the S3 staging file(s)
FileNamePattern *string `json:"file_name_pattern,omitempty"`
// The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.
@@ -182,110 +225,175 @@ type DestinationDatabricksDataSourceAmazonS3 struct {
// The directory under the S3 bucket where data will be written.
S3BucketPath string `json:"s3_bucket_path"`
// The region of the S3 staging bucket to use if utilising a copy strategy.
- S3BucketRegion DestinationDatabricksDataSourceAmazonS3S3BucketRegion `json:"s3_bucket_region"`
+ S3BucketRegion *DestinationDatabricksS3BucketRegion `default:"" json:"s3_bucket_region"`
// The corresponding secret to the above access key id.
S3SecretAccessKey string `json:"s3_secret_access_key"`
}
-type DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType string
+func (d DestinationDatabricksAmazonS3) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDatabricksAmazonS3) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDatabricksAmazonS3) GetDataSourceType() DestinationDatabricksSchemasDataSourceType {
+ return DestinationDatabricksSchemasDataSourceTypeS3Storage
+}
+
+func (o *DestinationDatabricksAmazonS3) GetFileNamePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileNamePattern
+}
+
+func (o *DestinationDatabricksAmazonS3) GetS3AccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3AccessKeyID
+}
+
+func (o *DestinationDatabricksAmazonS3) GetS3BucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketName
+}
+
+func (o *DestinationDatabricksAmazonS3) GetS3BucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketPath
+}
+
+func (o *DestinationDatabricksAmazonS3) GetS3BucketRegion() *DestinationDatabricksS3BucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.S3BucketRegion
+}
+
+func (o *DestinationDatabricksAmazonS3) GetS3SecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3SecretAccessKey
+}
+
+type DestinationDatabricksDataSourceType string
const (
- DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceTypeManagedTablesStorage DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType = "MANAGED_TABLES_STORAGE"
+ DestinationDatabricksDataSourceTypeManagedTablesStorage DestinationDatabricksDataSourceType = "MANAGED_TABLES_STORAGE"
)
-func (e DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType) ToPointer() *DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType {
+func (e DestinationDatabricksDataSourceType) ToPointer() *DestinationDatabricksDataSourceType {
return &e
}
-func (e *DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType) UnmarshalJSON(data []byte) error {
+func (e *DestinationDatabricksDataSourceType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "MANAGED_TABLES_STORAGE":
- *e = DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType(v)
+ *e = DestinationDatabricksDataSourceType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType: %v", v)
+ return fmt.Errorf("invalid value for DestinationDatabricksDataSourceType: %v", v)
}
}
-// DestinationDatabricksDataSourceRecommendedManagedTables - Storage on which the delta lake is built.
-type DestinationDatabricksDataSourceRecommendedManagedTables struct {
- DataSourceType DestinationDatabricksDataSourceRecommendedManagedTablesDataSourceType `json:"data_source_type"`
+// DestinationDatabricksRecommendedManagedTables - Storage on which the delta lake is built.
+type DestinationDatabricksRecommendedManagedTables struct {
+ dataSourceType DestinationDatabricksDataSourceType `const:"MANAGED_TABLES_STORAGE" json:"data_source_type"`
}
-type DestinationDatabricksDataSourceType string
+func (d DestinationDatabricksRecommendedManagedTables) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDatabricksRecommendedManagedTables) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDatabricksRecommendedManagedTables) GetDataSourceType() DestinationDatabricksDataSourceType {
+ return DestinationDatabricksDataSourceTypeManagedTablesStorage
+}
+
+type DestinationDatabricksDataSourceUnionType string
const (
- DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceRecommendedManagedTables DestinationDatabricksDataSourceType = "destination-databricks_Data Source_[Recommended] Managed tables"
- DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAmazonS3 DestinationDatabricksDataSourceType = "destination-databricks_Data Source_Amazon S3"
- DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAzureBlobStorage DestinationDatabricksDataSourceType = "destination-databricks_Data Source_Azure Blob Storage"
+ DestinationDatabricksDataSourceUnionTypeDestinationDatabricksRecommendedManagedTables DestinationDatabricksDataSourceUnionType = "destination-databricks_[Recommended] Managed tables"
+ DestinationDatabricksDataSourceUnionTypeDestinationDatabricksAmazonS3 DestinationDatabricksDataSourceUnionType = "destination-databricks_Amazon S3"
+ DestinationDatabricksDataSourceUnionTypeDestinationDatabricksAzureBlobStorage DestinationDatabricksDataSourceUnionType = "destination-databricks_Azure Blob Storage"
)
type DestinationDatabricksDataSource struct {
- DestinationDatabricksDataSourceRecommendedManagedTables *DestinationDatabricksDataSourceRecommendedManagedTables
- DestinationDatabricksDataSourceAmazonS3 *DestinationDatabricksDataSourceAmazonS3
- DestinationDatabricksDataSourceAzureBlobStorage *DestinationDatabricksDataSourceAzureBlobStorage
+ DestinationDatabricksRecommendedManagedTables *DestinationDatabricksRecommendedManagedTables
+ DestinationDatabricksAmazonS3 *DestinationDatabricksAmazonS3
+ DestinationDatabricksAzureBlobStorage *DestinationDatabricksAzureBlobStorage
- Type DestinationDatabricksDataSourceType
+ Type DestinationDatabricksDataSourceUnionType
}
-func CreateDestinationDatabricksDataSourceDestinationDatabricksDataSourceRecommendedManagedTables(destinationDatabricksDataSourceRecommendedManagedTables DestinationDatabricksDataSourceRecommendedManagedTables) DestinationDatabricksDataSource {
- typ := DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceRecommendedManagedTables
+func CreateDestinationDatabricksDataSourceDestinationDatabricksRecommendedManagedTables(destinationDatabricksRecommendedManagedTables DestinationDatabricksRecommendedManagedTables) DestinationDatabricksDataSource {
+ typ := DestinationDatabricksDataSourceUnionTypeDestinationDatabricksRecommendedManagedTables
return DestinationDatabricksDataSource{
- DestinationDatabricksDataSourceRecommendedManagedTables: &destinationDatabricksDataSourceRecommendedManagedTables,
+ DestinationDatabricksRecommendedManagedTables: &destinationDatabricksRecommendedManagedTables,
Type: typ,
}
}
-func CreateDestinationDatabricksDataSourceDestinationDatabricksDataSourceAmazonS3(destinationDatabricksDataSourceAmazonS3 DestinationDatabricksDataSourceAmazonS3) DestinationDatabricksDataSource {
- typ := DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAmazonS3
+func CreateDestinationDatabricksDataSourceDestinationDatabricksAmazonS3(destinationDatabricksAmazonS3 DestinationDatabricksAmazonS3) DestinationDatabricksDataSource {
+ typ := DestinationDatabricksDataSourceUnionTypeDestinationDatabricksAmazonS3
return DestinationDatabricksDataSource{
- DestinationDatabricksDataSourceAmazonS3: &destinationDatabricksDataSourceAmazonS3,
- Type: typ,
+ DestinationDatabricksAmazonS3: &destinationDatabricksAmazonS3,
+ Type: typ,
}
}
-func CreateDestinationDatabricksDataSourceDestinationDatabricksDataSourceAzureBlobStorage(destinationDatabricksDataSourceAzureBlobStorage DestinationDatabricksDataSourceAzureBlobStorage) DestinationDatabricksDataSource {
- typ := DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAzureBlobStorage
+func CreateDestinationDatabricksDataSourceDestinationDatabricksAzureBlobStorage(destinationDatabricksAzureBlobStorage DestinationDatabricksAzureBlobStorage) DestinationDatabricksDataSource {
+ typ := DestinationDatabricksDataSourceUnionTypeDestinationDatabricksAzureBlobStorage
return DestinationDatabricksDataSource{
- DestinationDatabricksDataSourceAzureBlobStorage: &destinationDatabricksDataSourceAzureBlobStorage,
- Type: typ,
+ DestinationDatabricksAzureBlobStorage: &destinationDatabricksAzureBlobStorage,
+ Type: typ,
}
}
func (u *DestinationDatabricksDataSource) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationDatabricksDataSourceRecommendedManagedTables := new(DestinationDatabricksDataSourceRecommendedManagedTables)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationDatabricksDataSourceRecommendedManagedTables); err == nil {
- u.DestinationDatabricksDataSourceRecommendedManagedTables = destinationDatabricksDataSourceRecommendedManagedTables
- u.Type = DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceRecommendedManagedTables
+
+ destinationDatabricksRecommendedManagedTables := new(DestinationDatabricksRecommendedManagedTables)
+ if err := utils.UnmarshalJSON(data, &destinationDatabricksRecommendedManagedTables, "", true, true); err == nil {
+ u.DestinationDatabricksRecommendedManagedTables = destinationDatabricksRecommendedManagedTables
+ u.Type = DestinationDatabricksDataSourceUnionTypeDestinationDatabricksRecommendedManagedTables
return nil
}
- destinationDatabricksDataSourceAzureBlobStorage := new(DestinationDatabricksDataSourceAzureBlobStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationDatabricksDataSourceAzureBlobStorage); err == nil {
- u.DestinationDatabricksDataSourceAzureBlobStorage = destinationDatabricksDataSourceAzureBlobStorage
- u.Type = DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAzureBlobStorage
+ destinationDatabricksAzureBlobStorage := new(DestinationDatabricksAzureBlobStorage)
+ if err := utils.UnmarshalJSON(data, &destinationDatabricksAzureBlobStorage, "", true, true); err == nil {
+ u.DestinationDatabricksAzureBlobStorage = destinationDatabricksAzureBlobStorage
+ u.Type = DestinationDatabricksDataSourceUnionTypeDestinationDatabricksAzureBlobStorage
return nil
}
- destinationDatabricksDataSourceAmazonS3 := new(DestinationDatabricksDataSourceAmazonS3)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationDatabricksDataSourceAmazonS3); err == nil {
- u.DestinationDatabricksDataSourceAmazonS3 = destinationDatabricksDataSourceAmazonS3
- u.Type = DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAmazonS3
+ destinationDatabricksAmazonS3 := new(DestinationDatabricksAmazonS3)
+ if err := utils.UnmarshalJSON(data, &destinationDatabricksAmazonS3, "", true, true); err == nil {
+ u.DestinationDatabricksAmazonS3 = destinationDatabricksAmazonS3
+ u.Type = DestinationDatabricksDataSourceUnionTypeDestinationDatabricksAmazonS3
return nil
}
@@ -293,48 +401,48 @@ func (u *DestinationDatabricksDataSource) UnmarshalJSON(data []byte) error {
}
func (u DestinationDatabricksDataSource) MarshalJSON() ([]byte, error) {
- if u.DestinationDatabricksDataSourceRecommendedManagedTables != nil {
- return json.Marshal(u.DestinationDatabricksDataSourceRecommendedManagedTables)
+ if u.DestinationDatabricksRecommendedManagedTables != nil {
+ return utils.MarshalJSON(u.DestinationDatabricksRecommendedManagedTables, "", true)
}
- if u.DestinationDatabricksDataSourceAzureBlobStorage != nil {
- return json.Marshal(u.DestinationDatabricksDataSourceAzureBlobStorage)
+ if u.DestinationDatabricksAmazonS3 != nil {
+ return utils.MarshalJSON(u.DestinationDatabricksAmazonS3, "", true)
}
- if u.DestinationDatabricksDataSourceAmazonS3 != nil {
- return json.Marshal(u.DestinationDatabricksDataSourceAmazonS3)
+ if u.DestinationDatabricksAzureBlobStorage != nil {
+ return utils.MarshalJSON(u.DestinationDatabricksAzureBlobStorage, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationDatabricksDatabricks string
+type Databricks string
const (
- DestinationDatabricksDatabricksDatabricks DestinationDatabricksDatabricks = "databricks"
+ DatabricksDatabricks Databricks = "databricks"
)
-func (e DestinationDatabricksDatabricks) ToPointer() *DestinationDatabricksDatabricks {
+func (e Databricks) ToPointer() *Databricks {
return &e
}
-func (e *DestinationDatabricksDatabricks) UnmarshalJSON(data []byte) error {
+func (e *Databricks) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "databricks":
- *e = DestinationDatabricksDatabricks(v)
+ *e = Databricks(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabricksDatabricks: %v", v)
+ return fmt.Errorf("invalid value for Databricks: %v", v)
}
}
type DestinationDatabricks struct {
// You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector.
- AcceptTerms bool `json:"accept_terms"`
+ AcceptTerms *bool `default:"false" json:"accept_terms"`
// Storage on which the delta lake is built.
DataSource DestinationDatabricksDataSource `json:"data_source"`
// The name of the catalog. If not specified otherwise, the "hive_metastore" will be used.
@@ -344,14 +452,99 @@ type DestinationDatabricks struct {
// Databricks Personal Access Token for making authenticated requests.
DatabricksPersonalAccessToken string `json:"databricks_personal_access_token"`
// Databricks Cluster Port.
- DatabricksPort *string `json:"databricks_port,omitempty"`
+ DatabricksPort *string `default:"443" json:"databricks_port"`
// Databricks Cluster Server Hostname.
- DatabricksServerHostname string `json:"databricks_server_hostname"`
- DestinationType DestinationDatabricksDatabricks `json:"destinationType"`
+ DatabricksServerHostname string `json:"databricks_server_hostname"`
+ destinationType Databricks `const:"databricks" json:"destinationType"`
// Support schema evolution for all streams. If "false", the connector might fail when a stream's schema changes.
- EnableSchemaEvolution *bool `json:"enable_schema_evolution,omitempty"`
+ EnableSchemaEvolution *bool `default:"false" json:"enable_schema_evolution"`
// Default to 'true'. Switch it to 'false' for debugging purpose.
- PurgeStagingData *bool `json:"purge_staging_data,omitempty"`
+ PurgeStagingData *bool `default:"true" json:"purge_staging_data"`
// The default schema tables are written. If not specified otherwise, the "default" will be used.
- Schema *string `json:"schema,omitempty"`
+ Schema *string `default:"default" json:"schema"`
+}
+
+func (d DestinationDatabricks) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDatabricks) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDatabricks) GetAcceptTerms() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.AcceptTerms
+}
+
+func (o *DestinationDatabricks) GetDataSource() DestinationDatabricksDataSource {
+ if o == nil {
+ return DestinationDatabricksDataSource{}
+ }
+ return o.DataSource
+}
+
+func (o *DestinationDatabricks) GetDatabase() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Database
+}
+
+func (o *DestinationDatabricks) GetDatabricksHTTPPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatabricksHTTPPath
+}
+
+func (o *DestinationDatabricks) GetDatabricksPersonalAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatabricksPersonalAccessToken
+}
+
+func (o *DestinationDatabricks) GetDatabricksPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DatabricksPort
+}
+
+func (o *DestinationDatabricks) GetDatabricksServerHostname() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatabricksServerHostname
+}
+
+func (o *DestinationDatabricks) GetDestinationType() Databricks {
+ return DatabricksDatabricks
+}
+
+func (o *DestinationDatabricks) GetEnableSchemaEvolution() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.EnableSchemaEvolution
+}
+
+func (o *DestinationDatabricks) GetPurgeStagingData() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.PurgeStagingData
+}
+
+func (o *DestinationDatabricks) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
}
diff --git a/internal/sdk/pkg/models/shared/destinationdatabrickscreaterequest.go b/internal/sdk/pkg/models/shared/destinationdatabrickscreaterequest.go
old mode 100755
new mode 100644
index 2a81a6b06..efb0ad308
--- a/internal/sdk/pkg/models/shared/destinationdatabrickscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationdatabrickscreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationDatabricksCreateRequest struct {
Configuration DestinationDatabricks `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationDatabricksCreateRequest) GetConfiguration() DestinationDatabricks {
+ if o == nil {
+ return DestinationDatabricks{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDatabricksCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationDatabricksCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDatabricksCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationdatabricksputrequest.go b/internal/sdk/pkg/models/shared/destinationdatabricksputrequest.go
old mode 100755
new mode 100644
index 33adaa9ae..ff87c8578
--- a/internal/sdk/pkg/models/shared/destinationdatabricksputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationdatabricksputrequest.go
@@ -7,3 +7,24 @@ type DestinationDatabricksPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationDatabricksPutRequest) GetConfiguration() DestinationDatabricksUpdate {
+ if o == nil {
+ return DestinationDatabricksUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDatabricksPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDatabricksPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdatabricksupdate.go b/internal/sdk/pkg/models/shared/destinationdatabricksupdate.go
old mode 100755
new mode 100644
index f791c2b54..483bb2af8
--- a/internal/sdk/pkg/models/shared/destinationdatabricksupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationdatabricksupdate.go
@@ -3,110 +3,153 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceType string
+type DestinationDatabricksUpdateSchemasDataSourceType string
const (
- DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceTypeAzureBlobStorage DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceType = "AZURE_BLOB_STORAGE"
+ DestinationDatabricksUpdateSchemasDataSourceTypeAzureBlobStorage DestinationDatabricksUpdateSchemasDataSourceType = "AZURE_BLOB_STORAGE"
)
-func (e DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceType) ToPointer() *DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceType {
+func (e DestinationDatabricksUpdateSchemasDataSourceType) ToPointer() *DestinationDatabricksUpdateSchemasDataSourceType {
return &e
}
-func (e *DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceType) UnmarshalJSON(data []byte) error {
+func (e *DestinationDatabricksUpdateSchemasDataSourceType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "AZURE_BLOB_STORAGE":
- *e = DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceType(v)
+ *e = DestinationDatabricksUpdateSchemasDataSourceType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceType: %v", v)
+ return fmt.Errorf("invalid value for DestinationDatabricksUpdateSchemasDataSourceType: %v", v)
}
}
-// DestinationDatabricksUpdateDataSourceAzureBlobStorage - Storage on which the delta lake is built.
-type DestinationDatabricksUpdateDataSourceAzureBlobStorage struct {
+// DestinationDatabricksUpdateAzureBlobStorage - Storage on which the delta lake is built.
+type DestinationDatabricksUpdateAzureBlobStorage struct {
// The account's name of the Azure Blob Storage.
AzureBlobStorageAccountName string `json:"azure_blob_storage_account_name"`
// The name of the Azure blob storage container.
AzureBlobStorageContainerName string `json:"azure_blob_storage_container_name"`
// This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
- AzureBlobStorageEndpointDomainName *string `json:"azure_blob_storage_endpoint_domain_name,omitempty"`
+ AzureBlobStorageEndpointDomainName *string `default:"blob.core.windows.net" json:"azure_blob_storage_endpoint_domain_name"`
// Shared access signature (SAS) token to grant limited access to objects in your storage account.
- AzureBlobStorageSasToken string `json:"azure_blob_storage_sas_token"`
- DataSourceType DestinationDatabricksUpdateDataSourceAzureBlobStorageDataSourceType `json:"data_source_type"`
+ AzureBlobStorageSasToken string `json:"azure_blob_storage_sas_token"`
+ dataSourceType DestinationDatabricksUpdateSchemasDataSourceType `const:"AZURE_BLOB_STORAGE" json:"data_source_type"`
}
-type DestinationDatabricksUpdateDataSourceAmazonS3DataSourceType string
+func (d DestinationDatabricksUpdateAzureBlobStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDatabricksUpdateAzureBlobStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDatabricksUpdateAzureBlobStorage) GetAzureBlobStorageAccountName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountName
+}
+
+func (o *DestinationDatabricksUpdateAzureBlobStorage) GetAzureBlobStorageContainerName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageContainerName
+}
+
+func (o *DestinationDatabricksUpdateAzureBlobStorage) GetAzureBlobStorageEndpointDomainName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageEndpointDomainName
+}
+
+func (o *DestinationDatabricksUpdateAzureBlobStorage) GetAzureBlobStorageSasToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageSasToken
+}
+
+func (o *DestinationDatabricksUpdateAzureBlobStorage) GetDataSourceType() DestinationDatabricksUpdateSchemasDataSourceType {
+ return DestinationDatabricksUpdateSchemasDataSourceTypeAzureBlobStorage
+}
+
+type DestinationDatabricksUpdateDataSourceType string
const (
- DestinationDatabricksUpdateDataSourceAmazonS3DataSourceTypeS3Storage DestinationDatabricksUpdateDataSourceAmazonS3DataSourceType = "S3_STORAGE"
+ DestinationDatabricksUpdateDataSourceTypeS3Storage DestinationDatabricksUpdateDataSourceType = "S3_STORAGE"
)
-func (e DestinationDatabricksUpdateDataSourceAmazonS3DataSourceType) ToPointer() *DestinationDatabricksUpdateDataSourceAmazonS3DataSourceType {
+func (e DestinationDatabricksUpdateDataSourceType) ToPointer() *DestinationDatabricksUpdateDataSourceType {
return &e
}
-func (e *DestinationDatabricksUpdateDataSourceAmazonS3DataSourceType) UnmarshalJSON(data []byte) error {
+func (e *DestinationDatabricksUpdateDataSourceType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "S3_STORAGE":
- *e = DestinationDatabricksUpdateDataSourceAmazonS3DataSourceType(v)
+ *e = DestinationDatabricksUpdateDataSourceType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabricksUpdateDataSourceAmazonS3DataSourceType: %v", v)
+ return fmt.Errorf("invalid value for DestinationDatabricksUpdateDataSourceType: %v", v)
}
}
-// DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion - The region of the S3 staging bucket to use if utilising a copy strategy.
-type DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion string
+// DestinationDatabricksUpdateS3BucketRegion - The region of the S3 staging bucket to use if utilising a copy strategy.
+type DestinationDatabricksUpdateS3BucketRegion string
const (
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionUnknown DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = ""
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionUsEast1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "us-east-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionUsEast2 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "us-east-2"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionUsWest1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "us-west-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionUsWest2 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "us-west-2"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionAfSouth1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "af-south-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionApEast1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "ap-east-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionApSouth1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "ap-south-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionApNortheast1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "ap-northeast-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionApNortheast2 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "ap-northeast-2"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionApNortheast3 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "ap-northeast-3"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionApSoutheast1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "ap-southeast-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionApSoutheast2 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "ap-southeast-2"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionCaCentral1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "ca-central-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionCnNorth1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "cn-north-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionCnNorthwest1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "cn-northwest-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionEuCentral1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "eu-central-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionEuNorth1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "eu-north-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionEuSouth1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "eu-south-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionEuWest1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "eu-west-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionEuWest2 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "eu-west-2"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionEuWest3 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "eu-west-3"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionSaEast1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "sa-east-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionMeSouth1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "me-south-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionUsGovEast1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "us-gov-east-1"
- DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegionUsGovWest1 DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion = "us-gov-west-1"
+ DestinationDatabricksUpdateS3BucketRegionUnknown DestinationDatabricksUpdateS3BucketRegion = ""
+ DestinationDatabricksUpdateS3BucketRegionUsEast1 DestinationDatabricksUpdateS3BucketRegion = "us-east-1"
+ DestinationDatabricksUpdateS3BucketRegionUsEast2 DestinationDatabricksUpdateS3BucketRegion = "us-east-2"
+ DestinationDatabricksUpdateS3BucketRegionUsWest1 DestinationDatabricksUpdateS3BucketRegion = "us-west-1"
+ DestinationDatabricksUpdateS3BucketRegionUsWest2 DestinationDatabricksUpdateS3BucketRegion = "us-west-2"
+ DestinationDatabricksUpdateS3BucketRegionAfSouth1 DestinationDatabricksUpdateS3BucketRegion = "af-south-1"
+ DestinationDatabricksUpdateS3BucketRegionApEast1 DestinationDatabricksUpdateS3BucketRegion = "ap-east-1"
+ DestinationDatabricksUpdateS3BucketRegionApSouth1 DestinationDatabricksUpdateS3BucketRegion = "ap-south-1"
+ DestinationDatabricksUpdateS3BucketRegionApNortheast1 DestinationDatabricksUpdateS3BucketRegion = "ap-northeast-1"
+ DestinationDatabricksUpdateS3BucketRegionApNortheast2 DestinationDatabricksUpdateS3BucketRegion = "ap-northeast-2"
+ DestinationDatabricksUpdateS3BucketRegionApNortheast3 DestinationDatabricksUpdateS3BucketRegion = "ap-northeast-3"
+ DestinationDatabricksUpdateS3BucketRegionApSoutheast1 DestinationDatabricksUpdateS3BucketRegion = "ap-southeast-1"
+ DestinationDatabricksUpdateS3BucketRegionApSoutheast2 DestinationDatabricksUpdateS3BucketRegion = "ap-southeast-2"
+ DestinationDatabricksUpdateS3BucketRegionCaCentral1 DestinationDatabricksUpdateS3BucketRegion = "ca-central-1"
+ DestinationDatabricksUpdateS3BucketRegionCnNorth1 DestinationDatabricksUpdateS3BucketRegion = "cn-north-1"
+ DestinationDatabricksUpdateS3BucketRegionCnNorthwest1 DestinationDatabricksUpdateS3BucketRegion = "cn-northwest-1"
+ DestinationDatabricksUpdateS3BucketRegionEuCentral1 DestinationDatabricksUpdateS3BucketRegion = "eu-central-1"
+ DestinationDatabricksUpdateS3BucketRegionEuNorth1 DestinationDatabricksUpdateS3BucketRegion = "eu-north-1"
+ DestinationDatabricksUpdateS3BucketRegionEuSouth1 DestinationDatabricksUpdateS3BucketRegion = "eu-south-1"
+ DestinationDatabricksUpdateS3BucketRegionEuWest1 DestinationDatabricksUpdateS3BucketRegion = "eu-west-1"
+ DestinationDatabricksUpdateS3BucketRegionEuWest2 DestinationDatabricksUpdateS3BucketRegion = "eu-west-2"
+ DestinationDatabricksUpdateS3BucketRegionEuWest3 DestinationDatabricksUpdateS3BucketRegion = "eu-west-3"
+ DestinationDatabricksUpdateS3BucketRegionSaEast1 DestinationDatabricksUpdateS3BucketRegion = "sa-east-1"
+ DestinationDatabricksUpdateS3BucketRegionMeSouth1 DestinationDatabricksUpdateS3BucketRegion = "me-south-1"
+ DestinationDatabricksUpdateS3BucketRegionUsGovEast1 DestinationDatabricksUpdateS3BucketRegion = "us-gov-east-1"
+ DestinationDatabricksUpdateS3BucketRegionUsGovWest1 DestinationDatabricksUpdateS3BucketRegion = "us-gov-west-1"
)
-func (e DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion) ToPointer() *DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion {
+func (e DestinationDatabricksUpdateS3BucketRegion) ToPointer() *DestinationDatabricksUpdateS3BucketRegion {
return &e
}
-func (e *DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion) UnmarshalJSON(data []byte) error {
+func (e *DestinationDatabricksUpdateS3BucketRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -163,16 +206,16 @@ func (e *DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion) UnmarshalJ
case "us-gov-east-1":
fallthrough
case "us-gov-west-1":
- *e = DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion(v)
+ *e = DestinationDatabricksUpdateS3BucketRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion: %v", v)
+ return fmt.Errorf("invalid value for DestinationDatabricksUpdateS3BucketRegion: %v", v)
}
}
-// DestinationDatabricksUpdateDataSourceAmazonS3 - Storage on which the delta lake is built.
-type DestinationDatabricksUpdateDataSourceAmazonS3 struct {
- DataSourceType DestinationDatabricksUpdateDataSourceAmazonS3DataSourceType `json:"data_source_type"`
+// AmazonS3 - Storage on which the delta lake is built.
+type AmazonS3 struct {
+ dataSourceType DestinationDatabricksUpdateDataSourceType `const:"S3_STORAGE" json:"data_source_type"`
// The pattern allows you to set the file-name format for the S3 staging file(s)
FileNamePattern *string `json:"file_name_pattern,omitempty"`
// The Access Key Id granting allow one to access the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket.
@@ -182,137 +225,202 @@ type DestinationDatabricksUpdateDataSourceAmazonS3 struct {
// The directory under the S3 bucket where data will be written.
S3BucketPath string `json:"s3_bucket_path"`
// The region of the S3 staging bucket to use if utilising a copy strategy.
- S3BucketRegion DestinationDatabricksUpdateDataSourceAmazonS3S3BucketRegion `json:"s3_bucket_region"`
+ S3BucketRegion *DestinationDatabricksUpdateS3BucketRegion `default:"" json:"s3_bucket_region"`
// The corresponding secret to the above access key id.
S3SecretAccessKey string `json:"s3_secret_access_key"`
}
-type DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceType string
+func (a AmazonS3) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AmazonS3) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AmazonS3) GetDataSourceType() DestinationDatabricksUpdateDataSourceType {
+ return DestinationDatabricksUpdateDataSourceTypeS3Storage
+}
+
+func (o *AmazonS3) GetFileNamePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileNamePattern
+}
+
+func (o *AmazonS3) GetS3AccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3AccessKeyID
+}
+
+func (o *AmazonS3) GetS3BucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketName
+}
+
+func (o *AmazonS3) GetS3BucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketPath
+}
+
+func (o *AmazonS3) GetS3BucketRegion() *DestinationDatabricksUpdateS3BucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.S3BucketRegion
+}
+
+func (o *AmazonS3) GetS3SecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3SecretAccessKey
+}
+
+type DataSourceType string
const (
- DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceTypeManagedTablesStorage DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceType = "MANAGED_TABLES_STORAGE"
+ DataSourceTypeManagedTablesStorage DataSourceType = "MANAGED_TABLES_STORAGE"
)
-func (e DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceType) ToPointer() *DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceType {
+func (e DataSourceType) ToPointer() *DataSourceType {
return &e
}
-func (e *DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceType) UnmarshalJSON(data []byte) error {
+func (e *DataSourceType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "MANAGED_TABLES_STORAGE":
- *e = DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceType(v)
+ *e = DataSourceType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceType: %v", v)
+ return fmt.Errorf("invalid value for DataSourceType: %v", v)
}
}
-// DestinationDatabricksUpdateDataSourceRecommendedManagedTables - Storage on which the delta lake is built.
-type DestinationDatabricksUpdateDataSourceRecommendedManagedTables struct {
- DataSourceType DestinationDatabricksUpdateDataSourceRecommendedManagedTablesDataSourceType `json:"data_source_type"`
+// RecommendedManagedTables - Storage on which the delta lake is built.
+type RecommendedManagedTables struct {
+ dataSourceType DataSourceType `const:"MANAGED_TABLES_STORAGE" json:"data_source_type"`
}
-type DestinationDatabricksUpdateDataSourceType string
+func (r RecommendedManagedTables) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(r, "", false)
+}
+
+func (r *RecommendedManagedTables) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &r, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *RecommendedManagedTables) GetDataSourceType() DataSourceType {
+ return DataSourceTypeManagedTablesStorage
+}
+
+type DataSourceUnionType string
const (
- DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceRecommendedManagedTables DestinationDatabricksUpdateDataSourceType = "destination-databricks-update_Data Source_[Recommended] Managed tables"
- DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAmazonS3 DestinationDatabricksUpdateDataSourceType = "destination-databricks-update_Data Source_Amazon S3"
- DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAzureBlobStorage DestinationDatabricksUpdateDataSourceType = "destination-databricks-update_Data Source_Azure Blob Storage"
+ DataSourceUnionTypeRecommendedManagedTables DataSourceUnionType = "[Recommended] Managed tables"
+ DataSourceUnionTypeAmazonS3 DataSourceUnionType = "Amazon S3"
+ DataSourceUnionTypeDestinationDatabricksUpdateAzureBlobStorage DataSourceUnionType = "destination-databricks-update_Azure Blob Storage"
)
-type DestinationDatabricksUpdateDataSource struct {
- DestinationDatabricksUpdateDataSourceRecommendedManagedTables *DestinationDatabricksUpdateDataSourceRecommendedManagedTables
- DestinationDatabricksUpdateDataSourceAmazonS3 *DestinationDatabricksUpdateDataSourceAmazonS3
- DestinationDatabricksUpdateDataSourceAzureBlobStorage *DestinationDatabricksUpdateDataSourceAzureBlobStorage
+type DataSource struct {
+ RecommendedManagedTables *RecommendedManagedTables
+ AmazonS3 *AmazonS3
+ DestinationDatabricksUpdateAzureBlobStorage *DestinationDatabricksUpdateAzureBlobStorage
- Type DestinationDatabricksUpdateDataSourceType
+ Type DataSourceUnionType
}
-func CreateDestinationDatabricksUpdateDataSourceDestinationDatabricksUpdateDataSourceRecommendedManagedTables(destinationDatabricksUpdateDataSourceRecommendedManagedTables DestinationDatabricksUpdateDataSourceRecommendedManagedTables) DestinationDatabricksUpdateDataSource {
- typ := DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceRecommendedManagedTables
+func CreateDataSourceRecommendedManagedTables(recommendedManagedTables RecommendedManagedTables) DataSource {
+ typ := DataSourceUnionTypeRecommendedManagedTables
- return DestinationDatabricksUpdateDataSource{
- DestinationDatabricksUpdateDataSourceRecommendedManagedTables: &destinationDatabricksUpdateDataSourceRecommendedManagedTables,
- Type: typ,
+ return DataSource{
+ RecommendedManagedTables: &recommendedManagedTables,
+ Type: typ,
}
}
-func CreateDestinationDatabricksUpdateDataSourceDestinationDatabricksUpdateDataSourceAmazonS3(destinationDatabricksUpdateDataSourceAmazonS3 DestinationDatabricksUpdateDataSourceAmazonS3) DestinationDatabricksUpdateDataSource {
- typ := DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAmazonS3
+func CreateDataSourceAmazonS3(amazonS3 AmazonS3) DataSource {
+ typ := DataSourceUnionTypeAmazonS3
- return DestinationDatabricksUpdateDataSource{
- DestinationDatabricksUpdateDataSourceAmazonS3: &destinationDatabricksUpdateDataSourceAmazonS3,
- Type: typ,
+ return DataSource{
+ AmazonS3: &amazonS3,
+ Type: typ,
}
}
-func CreateDestinationDatabricksUpdateDataSourceDestinationDatabricksUpdateDataSourceAzureBlobStorage(destinationDatabricksUpdateDataSourceAzureBlobStorage DestinationDatabricksUpdateDataSourceAzureBlobStorage) DestinationDatabricksUpdateDataSource {
- typ := DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAzureBlobStorage
+func CreateDataSourceDestinationDatabricksUpdateAzureBlobStorage(destinationDatabricksUpdateAzureBlobStorage DestinationDatabricksUpdateAzureBlobStorage) DataSource {
+ typ := DataSourceUnionTypeDestinationDatabricksUpdateAzureBlobStorage
- return DestinationDatabricksUpdateDataSource{
- DestinationDatabricksUpdateDataSourceAzureBlobStorage: &destinationDatabricksUpdateDataSourceAzureBlobStorage,
+ return DataSource{
+ DestinationDatabricksUpdateAzureBlobStorage: &destinationDatabricksUpdateAzureBlobStorage,
Type: typ,
}
}
-func (u *DestinationDatabricksUpdateDataSource) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DataSource) UnmarshalJSON(data []byte) error {
- destinationDatabricksUpdateDataSourceRecommendedManagedTables := new(DestinationDatabricksUpdateDataSourceRecommendedManagedTables)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationDatabricksUpdateDataSourceRecommendedManagedTables); err == nil {
- u.DestinationDatabricksUpdateDataSourceRecommendedManagedTables = destinationDatabricksUpdateDataSourceRecommendedManagedTables
- u.Type = DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceRecommendedManagedTables
+ recommendedManagedTables := new(RecommendedManagedTables)
+ if err := utils.UnmarshalJSON(data, &recommendedManagedTables, "", true, true); err == nil {
+ u.RecommendedManagedTables = recommendedManagedTables
+ u.Type = DataSourceUnionTypeRecommendedManagedTables
return nil
}
- destinationDatabricksUpdateDataSourceAzureBlobStorage := new(DestinationDatabricksUpdateDataSourceAzureBlobStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationDatabricksUpdateDataSourceAzureBlobStorage); err == nil {
- u.DestinationDatabricksUpdateDataSourceAzureBlobStorage = destinationDatabricksUpdateDataSourceAzureBlobStorage
- u.Type = DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAzureBlobStorage
+ destinationDatabricksUpdateAzureBlobStorage := new(DestinationDatabricksUpdateAzureBlobStorage)
+ if err := utils.UnmarshalJSON(data, &destinationDatabricksUpdateAzureBlobStorage, "", true, true); err == nil {
+ u.DestinationDatabricksUpdateAzureBlobStorage = destinationDatabricksUpdateAzureBlobStorage
+ u.Type = DataSourceUnionTypeDestinationDatabricksUpdateAzureBlobStorage
return nil
}
- destinationDatabricksUpdateDataSourceAmazonS3 := new(DestinationDatabricksUpdateDataSourceAmazonS3)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationDatabricksUpdateDataSourceAmazonS3); err == nil {
- u.DestinationDatabricksUpdateDataSourceAmazonS3 = destinationDatabricksUpdateDataSourceAmazonS3
- u.Type = DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAmazonS3
+ amazonS3 := new(AmazonS3)
+ if err := utils.UnmarshalJSON(data, &amazonS3, "", true, true); err == nil {
+ u.AmazonS3 = amazonS3
+ u.Type = DataSourceUnionTypeAmazonS3
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationDatabricksUpdateDataSource) MarshalJSON() ([]byte, error) {
- if u.DestinationDatabricksUpdateDataSourceRecommendedManagedTables != nil {
- return json.Marshal(u.DestinationDatabricksUpdateDataSourceRecommendedManagedTables)
+func (u DataSource) MarshalJSON() ([]byte, error) {
+ if u.RecommendedManagedTables != nil {
+ return utils.MarshalJSON(u.RecommendedManagedTables, "", true)
}
- if u.DestinationDatabricksUpdateDataSourceAzureBlobStorage != nil {
- return json.Marshal(u.DestinationDatabricksUpdateDataSourceAzureBlobStorage)
+ if u.AmazonS3 != nil {
+ return utils.MarshalJSON(u.AmazonS3, "", true)
}
- if u.DestinationDatabricksUpdateDataSourceAmazonS3 != nil {
- return json.Marshal(u.DestinationDatabricksUpdateDataSourceAmazonS3)
+ if u.DestinationDatabricksUpdateAzureBlobStorage != nil {
+ return utils.MarshalJSON(u.DestinationDatabricksUpdateAzureBlobStorage, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationDatabricksUpdate struct {
// You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector.
- AcceptTerms bool `json:"accept_terms"`
+ AcceptTerms *bool `default:"false" json:"accept_terms"`
// Storage on which the delta lake is built.
- DataSource DestinationDatabricksUpdateDataSource `json:"data_source"`
+ DataSource DataSource `json:"data_source"`
// The name of the catalog. If not specified otherwise, the "hive_metastore" will be used.
Database *string `json:"database,omitempty"`
// Databricks Cluster HTTP Path.
@@ -320,13 +428,94 @@ type DestinationDatabricksUpdate struct {
// Databricks Personal Access Token for making authenticated requests.
DatabricksPersonalAccessToken string `json:"databricks_personal_access_token"`
// Databricks Cluster Port.
- DatabricksPort *string `json:"databricks_port,omitempty"`
+ DatabricksPort *string `default:"443" json:"databricks_port"`
// Databricks Cluster Server Hostname.
DatabricksServerHostname string `json:"databricks_server_hostname"`
// Support schema evolution for all streams. If "false", the connector might fail when a stream's schema changes.
- EnableSchemaEvolution *bool `json:"enable_schema_evolution,omitempty"`
+ EnableSchemaEvolution *bool `default:"false" json:"enable_schema_evolution"`
// Default to 'true'. Switch it to 'false' for debugging purpose.
- PurgeStagingData *bool `json:"purge_staging_data,omitempty"`
+ PurgeStagingData *bool `default:"true" json:"purge_staging_data"`
// The default schema tables are written. If not specified otherwise, the "default" will be used.
- Schema *string `json:"schema,omitempty"`
+ Schema *string `default:"default" json:"schema"`
+}
+
+func (d DestinationDatabricksUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDatabricksUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDatabricksUpdate) GetAcceptTerms() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.AcceptTerms
+}
+
+func (o *DestinationDatabricksUpdate) GetDataSource() DataSource {
+ if o == nil {
+ return DataSource{}
+ }
+ return o.DataSource
+}
+
+func (o *DestinationDatabricksUpdate) GetDatabase() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Database
+}
+
+func (o *DestinationDatabricksUpdate) GetDatabricksHTTPPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatabricksHTTPPath
+}
+
+func (o *DestinationDatabricksUpdate) GetDatabricksPersonalAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatabricksPersonalAccessToken
+}
+
+func (o *DestinationDatabricksUpdate) GetDatabricksPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DatabricksPort
+}
+
+func (o *DestinationDatabricksUpdate) GetDatabricksServerHostname() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatabricksServerHostname
+}
+
+func (o *DestinationDatabricksUpdate) GetEnableSchemaEvolution() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.EnableSchemaEvolution
+}
+
+func (o *DestinationDatabricksUpdate) GetPurgeStagingData() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.PurgeStagingData
+}
+
+func (o *DestinationDatabricksUpdate) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
}
diff --git a/internal/sdk/pkg/models/shared/destinationdevnull.go b/internal/sdk/pkg/models/shared/destinationdevnull.go
old mode 100755
new mode 100644
index f7bf53c34..fff81e634
--- a/internal/sdk/pkg/models/shared/destinationdevnull.go
+++ b/internal/sdk/pkg/models/shared/destinationdevnull.go
@@ -3,95 +3,107 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationDevNullDevNull string
+type DevNull string
const (
- DestinationDevNullDevNullDevNull DestinationDevNullDevNull = "dev-null"
+ DevNullDevNull DevNull = "dev-null"
)
-func (e DestinationDevNullDevNull) ToPointer() *DestinationDevNullDevNull {
+func (e DevNull) ToPointer() *DevNull {
return &e
}
-func (e *DestinationDevNullDevNull) UnmarshalJSON(data []byte) error {
+func (e *DevNull) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "dev-null":
- *e = DestinationDevNullDevNull(v)
+ *e = DevNull(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDevNullDevNull: %v", v)
+ return fmt.Errorf("invalid value for DevNull: %v", v)
}
}
-type DestinationDevNullTestDestinationSilentTestDestinationType string
+type DestinationDevNullTestDestinationType string
const (
- DestinationDevNullTestDestinationSilentTestDestinationTypeSilent DestinationDevNullTestDestinationSilentTestDestinationType = "SILENT"
+ DestinationDevNullTestDestinationTypeSilent DestinationDevNullTestDestinationType = "SILENT"
)
-func (e DestinationDevNullTestDestinationSilentTestDestinationType) ToPointer() *DestinationDevNullTestDestinationSilentTestDestinationType {
+func (e DestinationDevNullTestDestinationType) ToPointer() *DestinationDevNullTestDestinationType {
return &e
}
-func (e *DestinationDevNullTestDestinationSilentTestDestinationType) UnmarshalJSON(data []byte) error {
+func (e *DestinationDevNullTestDestinationType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SILENT":
- *e = DestinationDevNullTestDestinationSilentTestDestinationType(v)
+ *e = DestinationDevNullTestDestinationType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDevNullTestDestinationSilentTestDestinationType: %v", v)
+ return fmt.Errorf("invalid value for DestinationDevNullTestDestinationType: %v", v)
}
}
-// DestinationDevNullTestDestinationSilent - The type of destination to be used
-type DestinationDevNullTestDestinationSilent struct {
- TestDestinationType DestinationDevNullTestDestinationSilentTestDestinationType `json:"test_destination_type"`
+// DestinationDevNullSilent - The type of destination to be used
+type DestinationDevNullSilent struct {
+ testDestinationType *DestinationDevNullTestDestinationType `const:"SILENT" json:"test_destination_type"`
}
-type DestinationDevNullTestDestinationType string
+func (d DestinationDevNullSilent) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDevNullSilent) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDevNullSilent) GetTestDestinationType() *DestinationDevNullTestDestinationType {
+ return DestinationDevNullTestDestinationTypeSilent.ToPointer()
+}
+
+type DestinationDevNullTestDestinationUnionType string
const (
- DestinationDevNullTestDestinationTypeDestinationDevNullTestDestinationSilent DestinationDevNullTestDestinationType = "destination-dev-null_Test Destination_Silent"
+ DestinationDevNullTestDestinationUnionTypeDestinationDevNullSilent DestinationDevNullTestDestinationUnionType = "destination-dev-null_Silent"
)
type DestinationDevNullTestDestination struct {
- DestinationDevNullTestDestinationSilent *DestinationDevNullTestDestinationSilent
+ DestinationDevNullSilent *DestinationDevNullSilent
- Type DestinationDevNullTestDestinationType
+ Type DestinationDevNullTestDestinationUnionType
}
-func CreateDestinationDevNullTestDestinationDestinationDevNullTestDestinationSilent(destinationDevNullTestDestinationSilent DestinationDevNullTestDestinationSilent) DestinationDevNullTestDestination {
- typ := DestinationDevNullTestDestinationTypeDestinationDevNullTestDestinationSilent
+func CreateDestinationDevNullTestDestinationDestinationDevNullSilent(destinationDevNullSilent DestinationDevNullSilent) DestinationDevNullTestDestination {
+ typ := DestinationDevNullTestDestinationUnionTypeDestinationDevNullSilent
return DestinationDevNullTestDestination{
- DestinationDevNullTestDestinationSilent: &destinationDevNullTestDestinationSilent,
- Type: typ,
+ DestinationDevNullSilent: &destinationDevNullSilent,
+ Type: typ,
}
}
func (u *DestinationDevNullTestDestination) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationDevNullTestDestinationSilent := new(DestinationDevNullTestDestinationSilent)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationDevNullTestDestinationSilent); err == nil {
- u.DestinationDevNullTestDestinationSilent = destinationDevNullTestDestinationSilent
- u.Type = DestinationDevNullTestDestinationTypeDestinationDevNullTestDestinationSilent
+
+ destinationDevNullSilent := new(DestinationDevNullSilent)
+ if err := utils.UnmarshalJSON(data, &destinationDevNullSilent, "", true, true); err == nil {
+ u.DestinationDevNullSilent = destinationDevNullSilent
+ u.Type = DestinationDevNullTestDestinationUnionTypeDestinationDevNullSilent
return nil
}
@@ -99,15 +111,37 @@ func (u *DestinationDevNullTestDestination) UnmarshalJSON(data []byte) error {
}
func (u DestinationDevNullTestDestination) MarshalJSON() ([]byte, error) {
- if u.DestinationDevNullTestDestinationSilent != nil {
- return json.Marshal(u.DestinationDevNullTestDestinationSilent)
+ if u.DestinationDevNullSilent != nil {
+ return utils.MarshalJSON(u.DestinationDevNullSilent, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationDevNull struct {
- DestinationType DestinationDevNullDevNull `json:"destinationType"`
+ destinationType DevNull `const:"dev-null" json:"destinationType"`
// The type of destination to be used
TestDestination DestinationDevNullTestDestination `json:"test_destination"`
}
+
+func (d DestinationDevNull) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDevNull) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDevNull) GetDestinationType() DevNull {
+ return DevNullDevNull
+}
+
+func (o *DestinationDevNull) GetTestDestination() DestinationDevNullTestDestination {
+ if o == nil {
+ return DestinationDevNullTestDestination{}
+ }
+ return o.TestDestination
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdevnullcreaterequest.go b/internal/sdk/pkg/models/shared/destinationdevnullcreaterequest.go
old mode 100755
new mode 100644
index d703d7de1..c55a831d1
--- a/internal/sdk/pkg/models/shared/destinationdevnullcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationdevnullcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationDevNullCreateRequest struct {
Configuration DestinationDevNull `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationDevNullCreateRequest) GetConfiguration() DestinationDevNull {
+ if o == nil {
+ return DestinationDevNull{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDevNullCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationDevNullCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDevNullCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationdevnullputrequest.go b/internal/sdk/pkg/models/shared/destinationdevnullputrequest.go
old mode 100755
new mode 100644
index f602b6d7c..3b4f8cb6e
--- a/internal/sdk/pkg/models/shared/destinationdevnullputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationdevnullputrequest.go
@@ -7,3 +7,24 @@ type DestinationDevNullPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationDevNullPutRequest) GetConfiguration() DestinationDevNullUpdate {
+ if o == nil {
+ return DestinationDevNullUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDevNullPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDevNullPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdevnullupdate.go b/internal/sdk/pkg/models/shared/destinationdevnullupdate.go
old mode 100755
new mode 100644
index 781c65821..7be0ab1e2
--- a/internal/sdk/pkg/models/shared/destinationdevnullupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationdevnullupdate.go
@@ -3,86 +3,105 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationDevNullUpdateTestDestinationSilentTestDestinationType string
+type TestDestinationType string
const (
- DestinationDevNullUpdateTestDestinationSilentTestDestinationTypeSilent DestinationDevNullUpdateTestDestinationSilentTestDestinationType = "SILENT"
+ TestDestinationTypeSilent TestDestinationType = "SILENT"
)
-func (e DestinationDevNullUpdateTestDestinationSilentTestDestinationType) ToPointer() *DestinationDevNullUpdateTestDestinationSilentTestDestinationType {
+func (e TestDestinationType) ToPointer() *TestDestinationType {
return &e
}
-func (e *DestinationDevNullUpdateTestDestinationSilentTestDestinationType) UnmarshalJSON(data []byte) error {
+func (e *TestDestinationType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SILENT":
- *e = DestinationDevNullUpdateTestDestinationSilentTestDestinationType(v)
+ *e = TestDestinationType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDevNullUpdateTestDestinationSilentTestDestinationType: %v", v)
+ return fmt.Errorf("invalid value for TestDestinationType: %v", v)
}
}
-// DestinationDevNullUpdateTestDestinationSilent - The type of destination to be used
-type DestinationDevNullUpdateTestDestinationSilent struct {
- TestDestinationType DestinationDevNullUpdateTestDestinationSilentTestDestinationType `json:"test_destination_type"`
+// Silent - The type of destination to be used
+type Silent struct {
+ testDestinationType *TestDestinationType `const:"SILENT" json:"test_destination_type"`
}
-type DestinationDevNullUpdateTestDestinationType string
+func (s Silent) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *Silent) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Silent) GetTestDestinationType() *TestDestinationType {
+ return TestDestinationTypeSilent.ToPointer()
+}
+
+type TestDestinationUnionType string
const (
- DestinationDevNullUpdateTestDestinationTypeDestinationDevNullUpdateTestDestinationSilent DestinationDevNullUpdateTestDestinationType = "destination-dev-null-update_Test Destination_Silent"
+ TestDestinationUnionTypeSilent TestDestinationUnionType = "Silent"
)
-type DestinationDevNullUpdateTestDestination struct {
- DestinationDevNullUpdateTestDestinationSilent *DestinationDevNullUpdateTestDestinationSilent
+type TestDestination struct {
+ Silent *Silent
- Type DestinationDevNullUpdateTestDestinationType
+ Type TestDestinationUnionType
}
-func CreateDestinationDevNullUpdateTestDestinationDestinationDevNullUpdateTestDestinationSilent(destinationDevNullUpdateTestDestinationSilent DestinationDevNullUpdateTestDestinationSilent) DestinationDevNullUpdateTestDestination {
- typ := DestinationDevNullUpdateTestDestinationTypeDestinationDevNullUpdateTestDestinationSilent
+func CreateTestDestinationSilent(silent Silent) TestDestination {
+ typ := TestDestinationUnionTypeSilent
- return DestinationDevNullUpdateTestDestination{
- DestinationDevNullUpdateTestDestinationSilent: &destinationDevNullUpdateTestDestinationSilent,
- Type: typ,
+ return TestDestination{
+ Silent: &silent,
+ Type: typ,
}
}
-func (u *DestinationDevNullUpdateTestDestination) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *TestDestination) UnmarshalJSON(data []byte) error {
- destinationDevNullUpdateTestDestinationSilent := new(DestinationDevNullUpdateTestDestinationSilent)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationDevNullUpdateTestDestinationSilent); err == nil {
- u.DestinationDevNullUpdateTestDestinationSilent = destinationDevNullUpdateTestDestinationSilent
- u.Type = DestinationDevNullUpdateTestDestinationTypeDestinationDevNullUpdateTestDestinationSilent
+ silent := new(Silent)
+ if err := utils.UnmarshalJSON(data, &silent, "", true, true); err == nil {
+ u.Silent = silent
+ u.Type = TestDestinationUnionTypeSilent
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationDevNullUpdateTestDestination) MarshalJSON() ([]byte, error) {
- if u.DestinationDevNullUpdateTestDestinationSilent != nil {
- return json.Marshal(u.DestinationDevNullUpdateTestDestinationSilent)
+func (u TestDestination) MarshalJSON() ([]byte, error) {
+ if u.Silent != nil {
+ return utils.MarshalJSON(u.Silent, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationDevNullUpdate struct {
// The type of destination to be used
- TestDestination DestinationDevNullUpdateTestDestination `json:"test_destination"`
+ TestDestination TestDestination `json:"test_destination"`
+}
+
+func (o *DestinationDevNullUpdate) GetTestDestination() TestDestination {
+ if o == nil {
+ return TestDestination{}
+ }
+ return o.TestDestination
}
diff --git a/internal/sdk/pkg/models/shared/destinationduckdb.go b/internal/sdk/pkg/models/shared/destinationduckdb.go
new file mode 100644
index 000000000..6cdf2c559
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationduckdb.go
@@ -0,0 +1,79 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type Duckdb string
+
+const (
+ DuckdbDuckdb Duckdb = "duckdb"
+)
+
+func (e Duckdb) ToPointer() *Duckdb {
+ return &e
+}
+
+func (e *Duckdb) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "duckdb":
+ *e = Duckdb(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for Duckdb: %v", v)
+ }
+}
+
+type DestinationDuckdb struct {
+ destinationType Duckdb `const:"duckdb" json:"destinationType"`
+ // Path to the .duckdb file, or the text 'md:' to connect to MotherDuck. The file will be placed inside that local mount. For more information check out our docs
+ DestinationPath string `json:"destination_path"`
+ // API key to use for authentication to a MotherDuck database.
+ MotherduckAPIKey *string `json:"motherduck_api_key,omitempty"`
+ // Database schema name, default for duckdb is 'main'.
+ Schema *string `json:"schema,omitempty"`
+}
+
+func (d DestinationDuckdb) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDuckdb) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDuckdb) GetDestinationType() Duckdb {
+ return DuckdbDuckdb
+}
+
+func (o *DestinationDuckdb) GetDestinationPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationPath
+}
+
+func (o *DestinationDuckdb) GetMotherduckAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.MotherduckAPIKey
+}
+
+func (o *DestinationDuckdb) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
diff --git a/internal/sdk/pkg/models/shared/destinationduckdbcreaterequest.go b/internal/sdk/pkg/models/shared/destinationduckdbcreaterequest.go
new file mode 100644
index 000000000..845f06664
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationduckdbcreaterequest.go
@@ -0,0 +1,40 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type DestinationDuckdbCreateRequest struct {
+ Configuration DestinationDuckdb `json:"configuration"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationDuckdbCreateRequest) GetConfiguration() DestinationDuckdb {
+ if o == nil {
+ return DestinationDuckdb{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDuckdbCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationDuckdbCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDuckdbCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationduckdbputrequest.go b/internal/sdk/pkg/models/shared/destinationduckdbputrequest.go
new file mode 100644
index 000000000..eb0f7c10f
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationduckdbputrequest.go
@@ -0,0 +1,30 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type DestinationDuckdbPutRequest struct {
+ Configuration DestinationDuckdbUpdate `json:"configuration"`
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationDuckdbPutRequest) GetConfiguration() DestinationDuckdbUpdate {
+ if o == nil {
+ return DestinationDuckdbUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDuckdbPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDuckdbPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationduckdbupdate.go b/internal/sdk/pkg/models/shared/destinationduckdbupdate.go
new file mode 100644
index 000000000..ca7a8fad1
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationduckdbupdate.go
@@ -0,0 +1,33 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type DestinationDuckdbUpdate struct {
+ // Path to the .duckdb file, or the text 'md:' to connect to MotherDuck. The file will be placed inside that local mount. For more information check out our docs
+ DestinationPath string `json:"destination_path"`
+ // API key to use for authentication to a MotherDuck database.
+ MotherduckAPIKey *string `json:"motherduck_api_key,omitempty"`
+ // Database schema name, default for duckdb is 'main'.
+ Schema *string `json:"schema,omitempty"`
+}
+
+func (o *DestinationDuckdbUpdate) GetDestinationPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationPath
+}
+
+func (o *DestinationDuckdbUpdate) GetMotherduckAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.MotherduckAPIKey
+}
+
+func (o *DestinationDuckdbUpdate) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdynamodb.go b/internal/sdk/pkg/models/shared/destinationdynamodb.go
old mode 100755
new mode 100644
index 9fe0ddfd9..967f1d739
--- a/internal/sdk/pkg/models/shared/destinationdynamodb.go
+++ b/internal/sdk/pkg/models/shared/destinationdynamodb.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationDynamodbDynamodb string
+type Dynamodb string
const (
- DestinationDynamodbDynamodbDynamodb DestinationDynamodbDynamodb = "dynamodb"
+ DynamodbDynamodb Dynamodb = "dynamodb"
)
-func (e DestinationDynamodbDynamodb) ToPointer() *DestinationDynamodbDynamodb {
+func (e Dynamodb) ToPointer() *Dynamodb {
return &e
}
-func (e *DestinationDynamodbDynamodb) UnmarshalJSON(data []byte) error {
+func (e *Dynamodb) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "dynamodb":
- *e = DestinationDynamodbDynamodb(v)
+ *e = Dynamodb(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDynamodbDynamodb: %v", v)
+ return fmt.Errorf("invalid value for Dynamodb: %v", v)
}
}
@@ -133,14 +134,64 @@ func (e *DestinationDynamodbDynamoDBRegion) UnmarshalJSON(data []byte) error {
type DestinationDynamodb struct {
// The access key id to access the DynamoDB. Airbyte requires Read and Write permissions to the DynamoDB.
- AccessKeyID string `json:"access_key_id"`
- DestinationType DestinationDynamodbDynamodb `json:"destinationType"`
+ AccessKeyID string `json:"access_key_id"`
+ destinationType Dynamodb `const:"dynamodb" json:"destinationType"`
// This is your DynamoDB endpoint url.(if you are working with AWS DynamoDB, just leave empty).
- DynamodbEndpoint *string `json:"dynamodb_endpoint,omitempty"`
+ DynamodbEndpoint *string `default:"" json:"dynamodb_endpoint"`
// The region of the DynamoDB.
- DynamodbRegion DestinationDynamodbDynamoDBRegion `json:"dynamodb_region"`
+ DynamodbRegion *DestinationDynamodbDynamoDBRegion `default:"" json:"dynamodb_region"`
// The prefix to use when naming DynamoDB tables.
DynamodbTableNamePrefix string `json:"dynamodb_table_name_prefix"`
// The corresponding secret to the access key id.
SecretAccessKey string `json:"secret_access_key"`
}
+
+func (d DestinationDynamodb) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDynamodb) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDynamodb) GetAccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKeyID
+}
+
+func (o *DestinationDynamodb) GetDestinationType() Dynamodb {
+ return DynamodbDynamodb
+}
+
+func (o *DestinationDynamodb) GetDynamodbEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DynamodbEndpoint
+}
+
+func (o *DestinationDynamodb) GetDynamodbRegion() *DestinationDynamodbDynamoDBRegion {
+ if o == nil {
+ return nil
+ }
+ return o.DynamodbRegion
+}
+
+func (o *DestinationDynamodb) GetDynamodbTableNamePrefix() string {
+ if o == nil {
+ return ""
+ }
+ return o.DynamodbTableNamePrefix
+}
+
+func (o *DestinationDynamodb) GetSecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretAccessKey
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdynamodbcreaterequest.go b/internal/sdk/pkg/models/shared/destinationdynamodbcreaterequest.go
old mode 100755
new mode 100644
index ebfec7a9a..2c783e72c
--- a/internal/sdk/pkg/models/shared/destinationdynamodbcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationdynamodbcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationDynamodbCreateRequest struct {
Configuration DestinationDynamodb `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationDynamodbCreateRequest) GetConfiguration() DestinationDynamodb {
+ if o == nil {
+ return DestinationDynamodb{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDynamodbCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationDynamodbCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDynamodbCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationdynamodbputrequest.go b/internal/sdk/pkg/models/shared/destinationdynamodbputrequest.go
old mode 100755
new mode 100644
index dc01867b3..5036c500e
--- a/internal/sdk/pkg/models/shared/destinationdynamodbputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationdynamodbputrequest.go
@@ -7,3 +7,24 @@ type DestinationDynamodbPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationDynamodbPutRequest) GetConfiguration() DestinationDynamodbUpdate {
+ if o == nil {
+ return DestinationDynamodbUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationDynamodbPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationDynamodbPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationdynamodbupdate.go b/internal/sdk/pkg/models/shared/destinationdynamodbupdate.go
old mode 100755
new mode 100644
index 158f6edd4..4728e8f0d
--- a/internal/sdk/pkg/models/shared/destinationdynamodbupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationdynamodbupdate.go
@@ -5,45 +5,46 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationDynamodbUpdateDynamoDBRegion - The region of the DynamoDB.
-type DestinationDynamodbUpdateDynamoDBRegion string
+// DynamoDBRegion - The region of the DynamoDB.
+type DynamoDBRegion string
const (
- DestinationDynamodbUpdateDynamoDBRegionUnknown DestinationDynamodbUpdateDynamoDBRegion = ""
- DestinationDynamodbUpdateDynamoDBRegionUsEast1 DestinationDynamodbUpdateDynamoDBRegion = "us-east-1"
- DestinationDynamodbUpdateDynamoDBRegionUsEast2 DestinationDynamodbUpdateDynamoDBRegion = "us-east-2"
- DestinationDynamodbUpdateDynamoDBRegionUsWest1 DestinationDynamodbUpdateDynamoDBRegion = "us-west-1"
- DestinationDynamodbUpdateDynamoDBRegionUsWest2 DestinationDynamodbUpdateDynamoDBRegion = "us-west-2"
- DestinationDynamodbUpdateDynamoDBRegionAfSouth1 DestinationDynamodbUpdateDynamoDBRegion = "af-south-1"
- DestinationDynamodbUpdateDynamoDBRegionApEast1 DestinationDynamodbUpdateDynamoDBRegion = "ap-east-1"
- DestinationDynamodbUpdateDynamoDBRegionApSouth1 DestinationDynamodbUpdateDynamoDBRegion = "ap-south-1"
- DestinationDynamodbUpdateDynamoDBRegionApNortheast1 DestinationDynamodbUpdateDynamoDBRegion = "ap-northeast-1"
- DestinationDynamodbUpdateDynamoDBRegionApNortheast2 DestinationDynamodbUpdateDynamoDBRegion = "ap-northeast-2"
- DestinationDynamodbUpdateDynamoDBRegionApNortheast3 DestinationDynamodbUpdateDynamoDBRegion = "ap-northeast-3"
- DestinationDynamodbUpdateDynamoDBRegionApSoutheast1 DestinationDynamodbUpdateDynamoDBRegion = "ap-southeast-1"
- DestinationDynamodbUpdateDynamoDBRegionApSoutheast2 DestinationDynamodbUpdateDynamoDBRegion = "ap-southeast-2"
- DestinationDynamodbUpdateDynamoDBRegionCaCentral1 DestinationDynamodbUpdateDynamoDBRegion = "ca-central-1"
- DestinationDynamodbUpdateDynamoDBRegionCnNorth1 DestinationDynamodbUpdateDynamoDBRegion = "cn-north-1"
- DestinationDynamodbUpdateDynamoDBRegionCnNorthwest1 DestinationDynamodbUpdateDynamoDBRegion = "cn-northwest-1"
- DestinationDynamodbUpdateDynamoDBRegionEuCentral1 DestinationDynamodbUpdateDynamoDBRegion = "eu-central-1"
- DestinationDynamodbUpdateDynamoDBRegionEuNorth1 DestinationDynamodbUpdateDynamoDBRegion = "eu-north-1"
- DestinationDynamodbUpdateDynamoDBRegionEuSouth1 DestinationDynamodbUpdateDynamoDBRegion = "eu-south-1"
- DestinationDynamodbUpdateDynamoDBRegionEuWest1 DestinationDynamodbUpdateDynamoDBRegion = "eu-west-1"
- DestinationDynamodbUpdateDynamoDBRegionEuWest2 DestinationDynamodbUpdateDynamoDBRegion = "eu-west-2"
- DestinationDynamodbUpdateDynamoDBRegionEuWest3 DestinationDynamodbUpdateDynamoDBRegion = "eu-west-3"
- DestinationDynamodbUpdateDynamoDBRegionSaEast1 DestinationDynamodbUpdateDynamoDBRegion = "sa-east-1"
- DestinationDynamodbUpdateDynamoDBRegionMeSouth1 DestinationDynamodbUpdateDynamoDBRegion = "me-south-1"
- DestinationDynamodbUpdateDynamoDBRegionUsGovEast1 DestinationDynamodbUpdateDynamoDBRegion = "us-gov-east-1"
- DestinationDynamodbUpdateDynamoDBRegionUsGovWest1 DestinationDynamodbUpdateDynamoDBRegion = "us-gov-west-1"
+ DynamoDBRegionUnknown DynamoDBRegion = ""
+ DynamoDBRegionUsEast1 DynamoDBRegion = "us-east-1"
+ DynamoDBRegionUsEast2 DynamoDBRegion = "us-east-2"
+ DynamoDBRegionUsWest1 DynamoDBRegion = "us-west-1"
+ DynamoDBRegionUsWest2 DynamoDBRegion = "us-west-2"
+ DynamoDBRegionAfSouth1 DynamoDBRegion = "af-south-1"
+ DynamoDBRegionApEast1 DynamoDBRegion = "ap-east-1"
+ DynamoDBRegionApSouth1 DynamoDBRegion = "ap-south-1"
+ DynamoDBRegionApNortheast1 DynamoDBRegion = "ap-northeast-1"
+ DynamoDBRegionApNortheast2 DynamoDBRegion = "ap-northeast-2"
+ DynamoDBRegionApNortheast3 DynamoDBRegion = "ap-northeast-3"
+ DynamoDBRegionApSoutheast1 DynamoDBRegion = "ap-southeast-1"
+ DynamoDBRegionApSoutheast2 DynamoDBRegion = "ap-southeast-2"
+ DynamoDBRegionCaCentral1 DynamoDBRegion = "ca-central-1"
+ DynamoDBRegionCnNorth1 DynamoDBRegion = "cn-north-1"
+ DynamoDBRegionCnNorthwest1 DynamoDBRegion = "cn-northwest-1"
+ DynamoDBRegionEuCentral1 DynamoDBRegion = "eu-central-1"
+ DynamoDBRegionEuNorth1 DynamoDBRegion = "eu-north-1"
+ DynamoDBRegionEuSouth1 DynamoDBRegion = "eu-south-1"
+ DynamoDBRegionEuWest1 DynamoDBRegion = "eu-west-1"
+ DynamoDBRegionEuWest2 DynamoDBRegion = "eu-west-2"
+ DynamoDBRegionEuWest3 DynamoDBRegion = "eu-west-3"
+ DynamoDBRegionSaEast1 DynamoDBRegion = "sa-east-1"
+ DynamoDBRegionMeSouth1 DynamoDBRegion = "me-south-1"
+ DynamoDBRegionUsGovEast1 DynamoDBRegion = "us-gov-east-1"
+ DynamoDBRegionUsGovWest1 DynamoDBRegion = "us-gov-west-1"
)
-func (e DestinationDynamodbUpdateDynamoDBRegion) ToPointer() *DestinationDynamodbUpdateDynamoDBRegion {
+func (e DynamoDBRegion) ToPointer() *DynamoDBRegion {
return &e
}
-func (e *DestinationDynamodbUpdateDynamoDBRegion) UnmarshalJSON(data []byte) error {
+func (e *DynamoDBRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -100,10 +101,10 @@ func (e *DestinationDynamodbUpdateDynamoDBRegion) UnmarshalJSON(data []byte) err
case "us-gov-east-1":
fallthrough
case "us-gov-west-1":
- *e = DestinationDynamodbUpdateDynamoDBRegion(v)
+ *e = DynamoDBRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationDynamodbUpdateDynamoDBRegion: %v", v)
+ return fmt.Errorf("invalid value for DynamoDBRegion: %v", v)
}
}
@@ -111,11 +112,57 @@ type DestinationDynamodbUpdate struct {
// The access key id to access the DynamoDB. Airbyte requires Read and Write permissions to the DynamoDB.
AccessKeyID string `json:"access_key_id"`
// This is your DynamoDB endpoint url.(if you are working with AWS DynamoDB, just leave empty).
- DynamodbEndpoint *string `json:"dynamodb_endpoint,omitempty"`
+ DynamodbEndpoint *string `default:"" json:"dynamodb_endpoint"`
// The region of the DynamoDB.
- DynamodbRegion DestinationDynamodbUpdateDynamoDBRegion `json:"dynamodb_region"`
+ DynamodbRegion *DynamoDBRegion `default:"" json:"dynamodb_region"`
// The prefix to use when naming DynamoDB tables.
DynamodbTableNamePrefix string `json:"dynamodb_table_name_prefix"`
// The corresponding secret to the access key id.
SecretAccessKey string `json:"secret_access_key"`
}
+
+func (d DestinationDynamodbUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationDynamodbUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationDynamodbUpdate) GetAccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKeyID
+}
+
+func (o *DestinationDynamodbUpdate) GetDynamodbEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DynamodbEndpoint
+}
+
+func (o *DestinationDynamodbUpdate) GetDynamodbRegion() *DynamoDBRegion {
+ if o == nil {
+ return nil
+ }
+ return o.DynamodbRegion
+}
+
+func (o *DestinationDynamodbUpdate) GetDynamodbTableNamePrefix() string {
+ if o == nil {
+ return ""
+ }
+ return o.DynamodbTableNamePrefix
+}
+
+func (o *DestinationDynamodbUpdate) GetSecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretAccessKey
+}
diff --git a/internal/sdk/pkg/models/shared/destinationelasticsearch.go b/internal/sdk/pkg/models/shared/destinationelasticsearch.go
old mode 100755
new mode 100644
index c023bb13b..c08f0ed02
--- a/internal/sdk/pkg/models/shared/destinationelasticsearch.go
+++ b/internal/sdk/pkg/models/shared/destinationelasticsearch.go
@@ -3,128 +3,181 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod string
+type DestinationElasticsearchSchemasMethod string
const (
- DestinationElasticsearchAuthenticationMethodUsernamePasswordMethodBasic DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod = "basic"
+ DestinationElasticsearchSchemasMethodBasic DestinationElasticsearchSchemasMethod = "basic"
)
-func (e DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod) ToPointer() *DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod {
+func (e DestinationElasticsearchSchemasMethod) ToPointer() *DestinationElasticsearchSchemasMethod {
return &e
}
-func (e *DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationElasticsearchSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "basic":
- *e = DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod(v)
+ *e = DestinationElasticsearchSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationElasticsearchSchemasMethod: %v", v)
}
}
-// DestinationElasticsearchAuthenticationMethodUsernamePassword - Basic auth header with a username and password
-type DestinationElasticsearchAuthenticationMethodUsernamePassword struct {
- Method DestinationElasticsearchAuthenticationMethodUsernamePasswordMethod `json:"method"`
+// DestinationElasticsearchUsernamePassword - Basic auth header with a username and password
+type DestinationElasticsearchUsernamePassword struct {
+ method DestinationElasticsearchSchemasMethod `const:"basic" json:"method"`
// Basic auth password to access a secure Elasticsearch server
Password string `json:"password"`
// Basic auth username to access a secure Elasticsearch server
Username string `json:"username"`
}
-type DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod string
+func (d DestinationElasticsearchUsernamePassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationElasticsearchUsernamePassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationElasticsearchUsernamePassword) GetMethod() DestinationElasticsearchSchemasMethod {
+ return DestinationElasticsearchSchemasMethodBasic
+}
+
+func (o *DestinationElasticsearchUsernamePassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationElasticsearchUsernamePassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type DestinationElasticsearchMethod string
const (
- DestinationElasticsearchAuthenticationMethodAPIKeySecretMethodSecret DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod = "secret"
+ DestinationElasticsearchMethodSecret DestinationElasticsearchMethod = "secret"
)
-func (e DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod) ToPointer() *DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod {
+func (e DestinationElasticsearchMethod) ToPointer() *DestinationElasticsearchMethod {
return &e
}
-func (e *DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationElasticsearchMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "secret":
- *e = DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod(v)
+ *e = DestinationElasticsearchMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationElasticsearchMethod: %v", v)
}
}
-// DestinationElasticsearchAuthenticationMethodAPIKeySecret - Use a api key and secret combination to authenticate
-type DestinationElasticsearchAuthenticationMethodAPIKeySecret struct {
+// DestinationElasticsearchAPIKeySecret - Use a api key and secret combination to authenticate
+type DestinationElasticsearchAPIKeySecret struct {
// The Key ID to used when accessing an enterprise Elasticsearch instance.
APIKeyID string `json:"apiKeyId"`
// The secret associated with the API Key ID.
- APIKeySecret string `json:"apiKeySecret"`
- Method DestinationElasticsearchAuthenticationMethodAPIKeySecretMethod `json:"method"`
+ APIKeySecret string `json:"apiKeySecret"`
+ method DestinationElasticsearchMethod `const:"secret" json:"method"`
+}
+
+func (d DestinationElasticsearchAPIKeySecret) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationElasticsearchAPIKeySecret) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationElasticsearchAPIKeySecret) GetAPIKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKeyID
+}
+
+func (o *DestinationElasticsearchAPIKeySecret) GetAPIKeySecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKeySecret
+}
+
+func (o *DestinationElasticsearchAPIKeySecret) GetMethod() DestinationElasticsearchMethod {
+ return DestinationElasticsearchMethodSecret
}
type DestinationElasticsearchAuthenticationMethodType string
const (
- DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchAuthenticationMethodAPIKeySecret DestinationElasticsearchAuthenticationMethodType = "destination-elasticsearch_Authentication Method_Api Key/Secret"
- DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchAuthenticationMethodUsernamePassword DestinationElasticsearchAuthenticationMethodType = "destination-elasticsearch_Authentication Method_Username/Password"
+ DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchAPIKeySecret DestinationElasticsearchAuthenticationMethodType = "destination-elasticsearch_Api Key/Secret"
+ DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchUsernamePassword DestinationElasticsearchAuthenticationMethodType = "destination-elasticsearch_Username/Password"
)
type DestinationElasticsearchAuthenticationMethod struct {
- DestinationElasticsearchAuthenticationMethodAPIKeySecret *DestinationElasticsearchAuthenticationMethodAPIKeySecret
- DestinationElasticsearchAuthenticationMethodUsernamePassword *DestinationElasticsearchAuthenticationMethodUsernamePassword
+ DestinationElasticsearchAPIKeySecret *DestinationElasticsearchAPIKeySecret
+ DestinationElasticsearchUsernamePassword *DestinationElasticsearchUsernamePassword
Type DestinationElasticsearchAuthenticationMethodType
}
-func CreateDestinationElasticsearchAuthenticationMethodDestinationElasticsearchAuthenticationMethodAPIKeySecret(destinationElasticsearchAuthenticationMethodAPIKeySecret DestinationElasticsearchAuthenticationMethodAPIKeySecret) DestinationElasticsearchAuthenticationMethod {
- typ := DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchAuthenticationMethodAPIKeySecret
+func CreateDestinationElasticsearchAuthenticationMethodDestinationElasticsearchAPIKeySecret(destinationElasticsearchAPIKeySecret DestinationElasticsearchAPIKeySecret) DestinationElasticsearchAuthenticationMethod {
+ typ := DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchAPIKeySecret
return DestinationElasticsearchAuthenticationMethod{
- DestinationElasticsearchAuthenticationMethodAPIKeySecret: &destinationElasticsearchAuthenticationMethodAPIKeySecret,
- Type: typ,
+ DestinationElasticsearchAPIKeySecret: &destinationElasticsearchAPIKeySecret,
+ Type: typ,
}
}
-func CreateDestinationElasticsearchAuthenticationMethodDestinationElasticsearchAuthenticationMethodUsernamePassword(destinationElasticsearchAuthenticationMethodUsernamePassword DestinationElasticsearchAuthenticationMethodUsernamePassword) DestinationElasticsearchAuthenticationMethod {
- typ := DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchAuthenticationMethodUsernamePassword
+func CreateDestinationElasticsearchAuthenticationMethodDestinationElasticsearchUsernamePassword(destinationElasticsearchUsernamePassword DestinationElasticsearchUsernamePassword) DestinationElasticsearchAuthenticationMethod {
+ typ := DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchUsernamePassword
return DestinationElasticsearchAuthenticationMethod{
- DestinationElasticsearchAuthenticationMethodUsernamePassword: &destinationElasticsearchAuthenticationMethodUsernamePassword,
- Type: typ,
+ DestinationElasticsearchUsernamePassword: &destinationElasticsearchUsernamePassword,
+ Type: typ,
}
}
func (u *DestinationElasticsearchAuthenticationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationElasticsearchAuthenticationMethodAPIKeySecret := new(DestinationElasticsearchAuthenticationMethodAPIKeySecret)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationElasticsearchAuthenticationMethodAPIKeySecret); err == nil {
- u.DestinationElasticsearchAuthenticationMethodAPIKeySecret = destinationElasticsearchAuthenticationMethodAPIKeySecret
- u.Type = DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchAuthenticationMethodAPIKeySecret
+
+ destinationElasticsearchAPIKeySecret := new(DestinationElasticsearchAPIKeySecret)
+ if err := utils.UnmarshalJSON(data, &destinationElasticsearchAPIKeySecret, "", true, true); err == nil {
+ u.DestinationElasticsearchAPIKeySecret = destinationElasticsearchAPIKeySecret
+ u.Type = DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchAPIKeySecret
return nil
}
- destinationElasticsearchAuthenticationMethodUsernamePassword := new(DestinationElasticsearchAuthenticationMethodUsernamePassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationElasticsearchAuthenticationMethodUsernamePassword); err == nil {
- u.DestinationElasticsearchAuthenticationMethodUsernamePassword = destinationElasticsearchAuthenticationMethodUsernamePassword
- u.Type = DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchAuthenticationMethodUsernamePassword
+ destinationElasticsearchUsernamePassword := new(DestinationElasticsearchUsernamePassword)
+ if err := utils.UnmarshalJSON(data, &destinationElasticsearchUsernamePassword, "", true, true); err == nil {
+ u.DestinationElasticsearchUsernamePassword = destinationElasticsearchUsernamePassword
+ u.Type = DestinationElasticsearchAuthenticationMethodTypeDestinationElasticsearchUsernamePassword
return nil
}
@@ -132,38 +185,38 @@ func (u *DestinationElasticsearchAuthenticationMethod) UnmarshalJSON(data []byte
}
func (u DestinationElasticsearchAuthenticationMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationElasticsearchAuthenticationMethodAPIKeySecret != nil {
- return json.Marshal(u.DestinationElasticsearchAuthenticationMethodAPIKeySecret)
+ if u.DestinationElasticsearchAPIKeySecret != nil {
+ return utils.MarshalJSON(u.DestinationElasticsearchAPIKeySecret, "", true)
}
- if u.DestinationElasticsearchAuthenticationMethodUsernamePassword != nil {
- return json.Marshal(u.DestinationElasticsearchAuthenticationMethodUsernamePassword)
+ if u.DestinationElasticsearchUsernamePassword != nil {
+ return utils.MarshalJSON(u.DestinationElasticsearchUsernamePassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationElasticsearchElasticsearch string
+type Elasticsearch string
const (
- DestinationElasticsearchElasticsearchElasticsearch DestinationElasticsearchElasticsearch = "elasticsearch"
+ ElasticsearchElasticsearch Elasticsearch = "elasticsearch"
)
-func (e DestinationElasticsearchElasticsearch) ToPointer() *DestinationElasticsearchElasticsearch {
+func (e Elasticsearch) ToPointer() *Elasticsearch {
return &e
}
-func (e *DestinationElasticsearchElasticsearch) UnmarshalJSON(data []byte) error {
+func (e *Elasticsearch) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "elasticsearch":
- *e = DestinationElasticsearchElasticsearch(v)
+ *e = Elasticsearch(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationElasticsearchElasticsearch: %v", v)
+ return fmt.Errorf("invalid value for Elasticsearch: %v", v)
}
}
@@ -171,10 +224,53 @@ type DestinationElasticsearch struct {
// The type of authentication to be used
AuthenticationMethod *DestinationElasticsearchAuthenticationMethod `json:"authenticationMethod,omitempty"`
// CA certificate
- CaCertificate *string `json:"ca_certificate,omitempty"`
- DestinationType DestinationElasticsearchElasticsearch `json:"destinationType"`
+ CaCertificate *string `json:"ca_certificate,omitempty"`
+ destinationType Elasticsearch `const:"elasticsearch" json:"destinationType"`
// The full url of the Elasticsearch server
Endpoint string `json:"endpoint"`
// If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.
- Upsert *bool `json:"upsert,omitempty"`
+ Upsert *bool `default:"true" json:"upsert"`
+}
+
+func (d DestinationElasticsearch) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationElasticsearch) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationElasticsearch) GetAuthenticationMethod() *DestinationElasticsearchAuthenticationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.AuthenticationMethod
+}
+
+func (o *DestinationElasticsearch) GetCaCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CaCertificate
+}
+
+func (o *DestinationElasticsearch) GetDestinationType() Elasticsearch {
+ return ElasticsearchElasticsearch
+}
+
+func (o *DestinationElasticsearch) GetEndpoint() string {
+ if o == nil {
+ return ""
+ }
+ return o.Endpoint
+}
+
+func (o *DestinationElasticsearch) GetUpsert() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Upsert
}
diff --git a/internal/sdk/pkg/models/shared/destinationelasticsearchcreaterequest.go b/internal/sdk/pkg/models/shared/destinationelasticsearchcreaterequest.go
old mode 100755
new mode 100644
index 0da25bbe6..42021416f
--- a/internal/sdk/pkg/models/shared/destinationelasticsearchcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationelasticsearchcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationElasticsearchCreateRequest struct {
Configuration DestinationElasticsearch `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationElasticsearchCreateRequest) GetConfiguration() DestinationElasticsearch {
+ if o == nil {
+ return DestinationElasticsearch{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationElasticsearchCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationElasticsearchCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationElasticsearchCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationelasticsearchputrequest.go b/internal/sdk/pkg/models/shared/destinationelasticsearchputrequest.go
old mode 100755
new mode 100644
index 8dd3ecafa..5496c10e6
--- a/internal/sdk/pkg/models/shared/destinationelasticsearchputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationelasticsearchputrequest.go
@@ -7,3 +7,24 @@ type DestinationElasticsearchPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationElasticsearchPutRequest) GetConfiguration() DestinationElasticsearchUpdate {
+ if o == nil {
+ return DestinationElasticsearchUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationElasticsearchPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationElasticsearchPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationelasticsearchupdate.go b/internal/sdk/pkg/models/shared/destinationelasticsearchupdate.go
old mode 100755
new mode 100644
index 4f94d1241..a9c7d0c6d
--- a/internal/sdk/pkg/models/shared/destinationelasticsearchupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationelasticsearchupdate.go
@@ -3,153 +3,245 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethod string
+type DestinationElasticsearchUpdateSchemasMethod string
const (
- DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethodBasic DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethod = "basic"
+ DestinationElasticsearchUpdateSchemasMethodBasic DestinationElasticsearchUpdateSchemasMethod = "basic"
)
-func (e DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethod) ToPointer() *DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethod {
+func (e DestinationElasticsearchUpdateSchemasMethod) ToPointer() *DestinationElasticsearchUpdateSchemasMethod {
return &e
}
-func (e *DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationElasticsearchUpdateSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "basic":
- *e = DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethod(v)
+ *e = DestinationElasticsearchUpdateSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationElasticsearchUpdateSchemasMethod: %v", v)
}
}
-// DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword - Basic auth header with a username and password
-type DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword struct {
- Method DestinationElasticsearchUpdateAuthenticationMethodUsernamePasswordMethod `json:"method"`
+// UsernamePassword - Basic auth header with a username and password
+type UsernamePassword struct {
+ method DestinationElasticsearchUpdateSchemasMethod `const:"basic" json:"method"`
// Basic auth password to access a secure Elasticsearch server
Password string `json:"password"`
// Basic auth username to access a secure Elasticsearch server
Username string `json:"username"`
}
-type DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethod string
+func (u UsernamePassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(u, "", false)
+}
+
+func (u *UsernamePassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &u, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *UsernamePassword) GetMethod() DestinationElasticsearchUpdateSchemasMethod {
+ return DestinationElasticsearchUpdateSchemasMethodBasic
+}
+
+func (o *UsernamePassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *UsernamePassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type DestinationElasticsearchUpdateMethod string
const (
- DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethodSecret DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethod = "secret"
+ DestinationElasticsearchUpdateMethodSecret DestinationElasticsearchUpdateMethod = "secret"
)
-func (e DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethod) ToPointer() *DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethod {
+func (e DestinationElasticsearchUpdateMethod) ToPointer() *DestinationElasticsearchUpdateMethod {
return &e
}
-func (e *DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationElasticsearchUpdateMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "secret":
- *e = DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethod(v)
+ *e = DestinationElasticsearchUpdateMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationElasticsearchUpdateMethod: %v", v)
}
}
-// DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret - Use a api key and secret combination to authenticate
-type DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret struct {
+// APIKeySecret - Use a api key and secret combination to authenticate
+type APIKeySecret struct {
// The Key ID to used when accessing an enterprise Elasticsearch instance.
APIKeyID string `json:"apiKeyId"`
// The secret associated with the API Key ID.
- APIKeySecret string `json:"apiKeySecret"`
- Method DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecretMethod `json:"method"`
+ APIKeySecret string `json:"apiKeySecret"`
+ method DestinationElasticsearchUpdateMethod `const:"secret" json:"method"`
+}
+
+func (a APIKeySecret) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *APIKeySecret) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *APIKeySecret) GetAPIKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKeyID
}
-type DestinationElasticsearchUpdateAuthenticationMethodType string
+func (o *APIKeySecret) GetAPIKeySecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKeySecret
+}
+
+func (o *APIKeySecret) GetMethod() DestinationElasticsearchUpdateMethod {
+ return DestinationElasticsearchUpdateMethodSecret
+}
+
+type AuthenticationMethodType string
const (
- DestinationElasticsearchUpdateAuthenticationMethodTypeDestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret DestinationElasticsearchUpdateAuthenticationMethodType = "destination-elasticsearch-update_Authentication Method_Api Key/Secret"
- DestinationElasticsearchUpdateAuthenticationMethodTypeDestinationElasticsearchUpdateAuthenticationMethodUsernamePassword DestinationElasticsearchUpdateAuthenticationMethodType = "destination-elasticsearch-update_Authentication Method_Username/Password"
+ AuthenticationMethodTypeAPIKeySecret AuthenticationMethodType = "Api Key/Secret"
+ AuthenticationMethodTypeUsernamePassword AuthenticationMethodType = "Username/Password"
)
-type DestinationElasticsearchUpdateAuthenticationMethod struct {
- DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret *DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret
- DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword *DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword
+type AuthenticationMethod struct {
+ APIKeySecret *APIKeySecret
+ UsernamePassword *UsernamePassword
- Type DestinationElasticsearchUpdateAuthenticationMethodType
+ Type AuthenticationMethodType
}
-func CreateDestinationElasticsearchUpdateAuthenticationMethodDestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret(destinationElasticsearchUpdateAuthenticationMethodAPIKeySecret DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret) DestinationElasticsearchUpdateAuthenticationMethod {
- typ := DestinationElasticsearchUpdateAuthenticationMethodTypeDestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret
+func CreateAuthenticationMethodAPIKeySecret(apiKeySecret APIKeySecret) AuthenticationMethod {
+ typ := AuthenticationMethodTypeAPIKeySecret
- return DestinationElasticsearchUpdateAuthenticationMethod{
- DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret: &destinationElasticsearchUpdateAuthenticationMethodAPIKeySecret,
- Type: typ,
+ return AuthenticationMethod{
+ APIKeySecret: &apiKeySecret,
+ Type: typ,
}
}
-func CreateDestinationElasticsearchUpdateAuthenticationMethodDestinationElasticsearchUpdateAuthenticationMethodUsernamePassword(destinationElasticsearchUpdateAuthenticationMethodUsernamePassword DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword) DestinationElasticsearchUpdateAuthenticationMethod {
- typ := DestinationElasticsearchUpdateAuthenticationMethodTypeDestinationElasticsearchUpdateAuthenticationMethodUsernamePassword
+func CreateAuthenticationMethodUsernamePassword(usernamePassword UsernamePassword) AuthenticationMethod {
+ typ := AuthenticationMethodTypeUsernamePassword
- return DestinationElasticsearchUpdateAuthenticationMethod{
- DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword: &destinationElasticsearchUpdateAuthenticationMethodUsernamePassword,
- Type: typ,
+ return AuthenticationMethod{
+ UsernamePassword: &usernamePassword,
+ Type: typ,
}
}
-func (u *DestinationElasticsearchUpdateAuthenticationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *AuthenticationMethod) UnmarshalJSON(data []byte) error {
- destinationElasticsearchUpdateAuthenticationMethodAPIKeySecret := new(DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationElasticsearchUpdateAuthenticationMethodAPIKeySecret); err == nil {
- u.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret = destinationElasticsearchUpdateAuthenticationMethodAPIKeySecret
- u.Type = DestinationElasticsearchUpdateAuthenticationMethodTypeDestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret
+ apiKeySecret := new(APIKeySecret)
+ if err := utils.UnmarshalJSON(data, &apiKeySecret, "", true, true); err == nil {
+ u.APIKeySecret = apiKeySecret
+ u.Type = AuthenticationMethodTypeAPIKeySecret
return nil
}
- destinationElasticsearchUpdateAuthenticationMethodUsernamePassword := new(DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationElasticsearchUpdateAuthenticationMethodUsernamePassword); err == nil {
- u.DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword = destinationElasticsearchUpdateAuthenticationMethodUsernamePassword
- u.Type = DestinationElasticsearchUpdateAuthenticationMethodTypeDestinationElasticsearchUpdateAuthenticationMethodUsernamePassword
+ usernamePassword := new(UsernamePassword)
+ if err := utils.UnmarshalJSON(data, &usernamePassword, "", true, true); err == nil {
+ u.UsernamePassword = usernamePassword
+ u.Type = AuthenticationMethodTypeUsernamePassword
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationElasticsearchUpdateAuthenticationMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret != nil {
- return json.Marshal(u.DestinationElasticsearchUpdateAuthenticationMethodAPIKeySecret)
+func (u AuthenticationMethod) MarshalJSON() ([]byte, error) {
+ if u.APIKeySecret != nil {
+ return utils.MarshalJSON(u.APIKeySecret, "", true)
}
- if u.DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword != nil {
- return json.Marshal(u.DestinationElasticsearchUpdateAuthenticationMethodUsernamePassword)
+ if u.UsernamePassword != nil {
+ return utils.MarshalJSON(u.UsernamePassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationElasticsearchUpdate struct {
// The type of authentication to be used
- AuthenticationMethod *DestinationElasticsearchUpdateAuthenticationMethod `json:"authenticationMethod,omitempty"`
+ AuthenticationMethod *AuthenticationMethod `json:"authenticationMethod,omitempty"`
// CA certificate
CaCertificate *string `json:"ca_certificate,omitempty"`
// The full url of the Elasticsearch server
Endpoint string `json:"endpoint"`
// If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.
- Upsert *bool `json:"upsert,omitempty"`
+ Upsert *bool `default:"true" json:"upsert"`
+}
+
+func (d DestinationElasticsearchUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationElasticsearchUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationElasticsearchUpdate) GetAuthenticationMethod() *AuthenticationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.AuthenticationMethod
+}
+
+func (o *DestinationElasticsearchUpdate) GetCaCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CaCertificate
+}
+
+func (o *DestinationElasticsearchUpdate) GetEndpoint() string {
+ if o == nil {
+ return ""
+ }
+ return o.Endpoint
+}
+
+func (o *DestinationElasticsearchUpdate) GetUpsert() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Upsert
}
diff --git a/internal/sdk/pkg/models/shared/destinationfirebolt.go b/internal/sdk/pkg/models/shared/destinationfirebolt.go
old mode 100755
new mode 100644
index 4def8ea03..40405b0cb
--- a/internal/sdk/pkg/models/shared/destinationfirebolt.go
+++ b/internal/sdk/pkg/models/shared/destinationfirebolt.go
@@ -3,152 +3,205 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationFireboltFirebolt string
+type Firebolt string
const (
- DestinationFireboltFireboltFirebolt DestinationFireboltFirebolt = "firebolt"
+ FireboltFirebolt Firebolt = "firebolt"
)
-func (e DestinationFireboltFirebolt) ToPointer() *DestinationFireboltFirebolt {
+func (e Firebolt) ToPointer() *Firebolt {
return &e
}
-func (e *DestinationFireboltFirebolt) UnmarshalJSON(data []byte) error {
+func (e *Firebolt) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "firebolt":
- *e = DestinationFireboltFirebolt(v)
+ *e = Firebolt(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationFireboltFirebolt: %v", v)
+ return fmt.Errorf("invalid value for Firebolt: %v", v)
}
}
-type DestinationFireboltLoadingMethodExternalTableViaS3Method string
+type DestinationFireboltSchemasMethod string
const (
- DestinationFireboltLoadingMethodExternalTableViaS3MethodS3 DestinationFireboltLoadingMethodExternalTableViaS3Method = "S3"
+ DestinationFireboltSchemasMethodS3 DestinationFireboltSchemasMethod = "S3"
)
-func (e DestinationFireboltLoadingMethodExternalTableViaS3Method) ToPointer() *DestinationFireboltLoadingMethodExternalTableViaS3Method {
+func (e DestinationFireboltSchemasMethod) ToPointer() *DestinationFireboltSchemasMethod {
return &e
}
-func (e *DestinationFireboltLoadingMethodExternalTableViaS3Method) UnmarshalJSON(data []byte) error {
+func (e *DestinationFireboltSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "S3":
- *e = DestinationFireboltLoadingMethodExternalTableViaS3Method(v)
+ *e = DestinationFireboltSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationFireboltLoadingMethodExternalTableViaS3Method: %v", v)
+ return fmt.Errorf("invalid value for DestinationFireboltSchemasMethod: %v", v)
}
}
-// DestinationFireboltLoadingMethodExternalTableViaS3 - Loading method used to select the way data will be uploaded to Firebolt
-type DestinationFireboltLoadingMethodExternalTableViaS3 struct {
+// DestinationFireboltExternalTableViaS3 - Loading method used to select the way data will be uploaded to Firebolt
+type DestinationFireboltExternalTableViaS3 struct {
// AWS access key granting read and write access to S3.
AwsKeyID string `json:"aws_key_id"`
// Corresponding secret part of the AWS Key
- AwsKeySecret string `json:"aws_key_secret"`
- Method DestinationFireboltLoadingMethodExternalTableViaS3Method `json:"method"`
+ AwsKeySecret string `json:"aws_key_secret"`
+ method DestinationFireboltSchemasMethod `const:"S3" json:"method"`
// The name of the S3 bucket.
S3Bucket string `json:"s3_bucket"`
// Region name of the S3 bucket.
S3Region string `json:"s3_region"`
}
-type DestinationFireboltLoadingMethodSQLInsertsMethod string
+func (d DestinationFireboltExternalTableViaS3) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationFireboltExternalTableViaS3) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationFireboltExternalTableViaS3) GetAwsKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsKeyID
+}
+
+func (o *DestinationFireboltExternalTableViaS3) GetAwsKeySecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsKeySecret
+}
+
+func (o *DestinationFireboltExternalTableViaS3) GetMethod() DestinationFireboltSchemasMethod {
+ return DestinationFireboltSchemasMethodS3
+}
+
+func (o *DestinationFireboltExternalTableViaS3) GetS3Bucket() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3Bucket
+}
+
+func (o *DestinationFireboltExternalTableViaS3) GetS3Region() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3Region
+}
+
+type DestinationFireboltMethod string
const (
- DestinationFireboltLoadingMethodSQLInsertsMethodSQL DestinationFireboltLoadingMethodSQLInsertsMethod = "SQL"
+ DestinationFireboltMethodSQL DestinationFireboltMethod = "SQL"
)
-func (e DestinationFireboltLoadingMethodSQLInsertsMethod) ToPointer() *DestinationFireboltLoadingMethodSQLInsertsMethod {
+func (e DestinationFireboltMethod) ToPointer() *DestinationFireboltMethod {
return &e
}
-func (e *DestinationFireboltLoadingMethodSQLInsertsMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationFireboltMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SQL":
- *e = DestinationFireboltLoadingMethodSQLInsertsMethod(v)
+ *e = DestinationFireboltMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationFireboltLoadingMethodSQLInsertsMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationFireboltMethod: %v", v)
}
}
-// DestinationFireboltLoadingMethodSQLInserts - Loading method used to select the way data will be uploaded to Firebolt
-type DestinationFireboltLoadingMethodSQLInserts struct {
- Method DestinationFireboltLoadingMethodSQLInsertsMethod `json:"method"`
+// DestinationFireboltSQLInserts - Loading method used to select the way data will be uploaded to Firebolt
+type DestinationFireboltSQLInserts struct {
+ method DestinationFireboltMethod `const:"SQL" json:"method"`
+}
+
+func (d DestinationFireboltSQLInserts) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationFireboltSQLInserts) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationFireboltSQLInserts) GetMethod() DestinationFireboltMethod {
+ return DestinationFireboltMethodSQL
}
type DestinationFireboltLoadingMethodType string
const (
- DestinationFireboltLoadingMethodTypeDestinationFireboltLoadingMethodSQLInserts DestinationFireboltLoadingMethodType = "destination-firebolt_Loading Method_SQL Inserts"
- DestinationFireboltLoadingMethodTypeDestinationFireboltLoadingMethodExternalTableViaS3 DestinationFireboltLoadingMethodType = "destination-firebolt_Loading Method_External Table via S3"
+ DestinationFireboltLoadingMethodTypeDestinationFireboltSQLInserts DestinationFireboltLoadingMethodType = "destination-firebolt_SQL Inserts"
+ DestinationFireboltLoadingMethodTypeDestinationFireboltExternalTableViaS3 DestinationFireboltLoadingMethodType = "destination-firebolt_External Table via S3"
)
type DestinationFireboltLoadingMethod struct {
- DestinationFireboltLoadingMethodSQLInserts *DestinationFireboltLoadingMethodSQLInserts
- DestinationFireboltLoadingMethodExternalTableViaS3 *DestinationFireboltLoadingMethodExternalTableViaS3
+ DestinationFireboltSQLInserts *DestinationFireboltSQLInserts
+ DestinationFireboltExternalTableViaS3 *DestinationFireboltExternalTableViaS3
Type DestinationFireboltLoadingMethodType
}
-func CreateDestinationFireboltLoadingMethodDestinationFireboltLoadingMethodSQLInserts(destinationFireboltLoadingMethodSQLInserts DestinationFireboltLoadingMethodSQLInserts) DestinationFireboltLoadingMethod {
- typ := DestinationFireboltLoadingMethodTypeDestinationFireboltLoadingMethodSQLInserts
+func CreateDestinationFireboltLoadingMethodDestinationFireboltSQLInserts(destinationFireboltSQLInserts DestinationFireboltSQLInserts) DestinationFireboltLoadingMethod {
+ typ := DestinationFireboltLoadingMethodTypeDestinationFireboltSQLInserts
return DestinationFireboltLoadingMethod{
- DestinationFireboltLoadingMethodSQLInserts: &destinationFireboltLoadingMethodSQLInserts,
- Type: typ,
+ DestinationFireboltSQLInserts: &destinationFireboltSQLInserts,
+ Type: typ,
}
}
-func CreateDestinationFireboltLoadingMethodDestinationFireboltLoadingMethodExternalTableViaS3(destinationFireboltLoadingMethodExternalTableViaS3 DestinationFireboltLoadingMethodExternalTableViaS3) DestinationFireboltLoadingMethod {
- typ := DestinationFireboltLoadingMethodTypeDestinationFireboltLoadingMethodExternalTableViaS3
+func CreateDestinationFireboltLoadingMethodDestinationFireboltExternalTableViaS3(destinationFireboltExternalTableViaS3 DestinationFireboltExternalTableViaS3) DestinationFireboltLoadingMethod {
+ typ := DestinationFireboltLoadingMethodTypeDestinationFireboltExternalTableViaS3
return DestinationFireboltLoadingMethod{
- DestinationFireboltLoadingMethodExternalTableViaS3: &destinationFireboltLoadingMethodExternalTableViaS3,
- Type: typ,
+ DestinationFireboltExternalTableViaS3: &destinationFireboltExternalTableViaS3,
+ Type: typ,
}
}
func (u *DestinationFireboltLoadingMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationFireboltLoadingMethodSQLInserts := new(DestinationFireboltLoadingMethodSQLInserts)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationFireboltLoadingMethodSQLInserts); err == nil {
- u.DestinationFireboltLoadingMethodSQLInserts = destinationFireboltLoadingMethodSQLInserts
- u.Type = DestinationFireboltLoadingMethodTypeDestinationFireboltLoadingMethodSQLInserts
+
+ destinationFireboltSQLInserts := new(DestinationFireboltSQLInserts)
+ if err := utils.UnmarshalJSON(data, &destinationFireboltSQLInserts, "", true, true); err == nil {
+ u.DestinationFireboltSQLInserts = destinationFireboltSQLInserts
+ u.Type = DestinationFireboltLoadingMethodTypeDestinationFireboltSQLInserts
return nil
}
- destinationFireboltLoadingMethodExternalTableViaS3 := new(DestinationFireboltLoadingMethodExternalTableViaS3)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationFireboltLoadingMethodExternalTableViaS3); err == nil {
- u.DestinationFireboltLoadingMethodExternalTableViaS3 = destinationFireboltLoadingMethodExternalTableViaS3
- u.Type = DestinationFireboltLoadingMethodTypeDestinationFireboltLoadingMethodExternalTableViaS3
+ destinationFireboltExternalTableViaS3 := new(DestinationFireboltExternalTableViaS3)
+ if err := utils.UnmarshalJSON(data, &destinationFireboltExternalTableViaS3, "", true, true); err == nil {
+ u.DestinationFireboltExternalTableViaS3 = destinationFireboltExternalTableViaS3
+ u.Type = DestinationFireboltLoadingMethodTypeDestinationFireboltExternalTableViaS3
return nil
}
@@ -156,23 +209,23 @@ func (u *DestinationFireboltLoadingMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationFireboltLoadingMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationFireboltLoadingMethodSQLInserts != nil {
- return json.Marshal(u.DestinationFireboltLoadingMethodSQLInserts)
+ if u.DestinationFireboltSQLInserts != nil {
+ return utils.MarshalJSON(u.DestinationFireboltSQLInserts, "", true)
}
- if u.DestinationFireboltLoadingMethodExternalTableViaS3 != nil {
- return json.Marshal(u.DestinationFireboltLoadingMethodExternalTableViaS3)
+ if u.DestinationFireboltExternalTableViaS3 != nil {
+ return utils.MarshalJSON(u.DestinationFireboltExternalTableViaS3, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationFirebolt struct {
// Firebolt account to login.
Account *string `json:"account,omitempty"`
// The database to connect to.
- Database string `json:"database"`
- DestinationType DestinationFireboltFirebolt `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Firebolt `const:"firebolt" json:"destinationType"`
// Engine name or url to connect to.
Engine *string `json:"engine,omitempty"`
// The host name of your Firebolt database.
@@ -184,3 +237,67 @@ type DestinationFirebolt struct {
// Firebolt email address you use to login.
Username string `json:"username"`
}
+
+func (d DestinationFirebolt) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationFirebolt) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationFirebolt) GetAccount() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Account
+}
+
+func (o *DestinationFirebolt) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationFirebolt) GetDestinationType() Firebolt {
+ return FireboltFirebolt
+}
+
+func (o *DestinationFirebolt) GetEngine() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Engine
+}
+
+func (o *DestinationFirebolt) GetHost() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Host
+}
+
+func (o *DestinationFirebolt) GetLoadingMethod() *DestinationFireboltLoadingMethod {
+ if o == nil {
+ return nil
+ }
+ return o.LoadingMethod
+}
+
+func (o *DestinationFirebolt) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationFirebolt) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationfireboltcreaterequest.go b/internal/sdk/pkg/models/shared/destinationfireboltcreaterequest.go
old mode 100755
new mode 100644
index 26128b9fa..303951f75
--- a/internal/sdk/pkg/models/shared/destinationfireboltcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationfireboltcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationFireboltCreateRequest struct {
Configuration DestinationFirebolt `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationFireboltCreateRequest) GetConfiguration() DestinationFirebolt {
+ if o == nil {
+ return DestinationFirebolt{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationFireboltCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationFireboltCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationFireboltCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationfireboltputrequest.go b/internal/sdk/pkg/models/shared/destinationfireboltputrequest.go
old mode 100755
new mode 100644
index cb0c96eea..f25213750
--- a/internal/sdk/pkg/models/shared/destinationfireboltputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationfireboltputrequest.go
@@ -7,3 +7,24 @@ type DestinationFireboltPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationFireboltPutRequest) GetConfiguration() DestinationFireboltUpdate {
+ if o == nil {
+ return DestinationFireboltUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationFireboltPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationFireboltPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationfireboltupdate.go b/internal/sdk/pkg/models/shared/destinationfireboltupdate.go
old mode 100755
new mode 100644
index cf7624505..730a2ae54
--- a/internal/sdk/pkg/models/shared/destinationfireboltupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationfireboltupdate.go
@@ -3,128 +3,181 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationFireboltUpdateLoadingMethodExternalTableViaS3Method string
+type DestinationFireboltUpdateSchemasMethod string
const (
- DestinationFireboltUpdateLoadingMethodExternalTableViaS3MethodS3 DestinationFireboltUpdateLoadingMethodExternalTableViaS3Method = "S3"
+ DestinationFireboltUpdateSchemasMethodS3 DestinationFireboltUpdateSchemasMethod = "S3"
)
-func (e DestinationFireboltUpdateLoadingMethodExternalTableViaS3Method) ToPointer() *DestinationFireboltUpdateLoadingMethodExternalTableViaS3Method {
+func (e DestinationFireboltUpdateSchemasMethod) ToPointer() *DestinationFireboltUpdateSchemasMethod {
return &e
}
-func (e *DestinationFireboltUpdateLoadingMethodExternalTableViaS3Method) UnmarshalJSON(data []byte) error {
+func (e *DestinationFireboltUpdateSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "S3":
- *e = DestinationFireboltUpdateLoadingMethodExternalTableViaS3Method(v)
+ *e = DestinationFireboltUpdateSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationFireboltUpdateLoadingMethodExternalTableViaS3Method: %v", v)
+ return fmt.Errorf("invalid value for DestinationFireboltUpdateSchemasMethod: %v", v)
}
}
-// DestinationFireboltUpdateLoadingMethodExternalTableViaS3 - Loading method used to select the way data will be uploaded to Firebolt
-type DestinationFireboltUpdateLoadingMethodExternalTableViaS3 struct {
+// ExternalTableViaS3 - Loading method used to select the way data will be uploaded to Firebolt
+type ExternalTableViaS3 struct {
// AWS access key granting read and write access to S3.
AwsKeyID string `json:"aws_key_id"`
// Corresponding secret part of the AWS Key
- AwsKeySecret string `json:"aws_key_secret"`
- Method DestinationFireboltUpdateLoadingMethodExternalTableViaS3Method `json:"method"`
+ AwsKeySecret string `json:"aws_key_secret"`
+ method DestinationFireboltUpdateSchemasMethod `const:"S3" json:"method"`
// The name of the S3 bucket.
S3Bucket string `json:"s3_bucket"`
// Region name of the S3 bucket.
S3Region string `json:"s3_region"`
}
-type DestinationFireboltUpdateLoadingMethodSQLInsertsMethod string
+func (e ExternalTableViaS3) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(e, "", false)
+}
+
+func (e *ExternalTableViaS3) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &e, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ExternalTableViaS3) GetAwsKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsKeyID
+}
+
+func (o *ExternalTableViaS3) GetAwsKeySecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsKeySecret
+}
+
+func (o *ExternalTableViaS3) GetMethod() DestinationFireboltUpdateSchemasMethod {
+ return DestinationFireboltUpdateSchemasMethodS3
+}
+
+func (o *ExternalTableViaS3) GetS3Bucket() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3Bucket
+}
+
+func (o *ExternalTableViaS3) GetS3Region() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3Region
+}
+
+type DestinationFireboltUpdateMethod string
const (
- DestinationFireboltUpdateLoadingMethodSQLInsertsMethodSQL DestinationFireboltUpdateLoadingMethodSQLInsertsMethod = "SQL"
+ DestinationFireboltUpdateMethodSQL DestinationFireboltUpdateMethod = "SQL"
)
-func (e DestinationFireboltUpdateLoadingMethodSQLInsertsMethod) ToPointer() *DestinationFireboltUpdateLoadingMethodSQLInsertsMethod {
+func (e DestinationFireboltUpdateMethod) ToPointer() *DestinationFireboltUpdateMethod {
return &e
}
-func (e *DestinationFireboltUpdateLoadingMethodSQLInsertsMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationFireboltUpdateMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SQL":
- *e = DestinationFireboltUpdateLoadingMethodSQLInsertsMethod(v)
+ *e = DestinationFireboltUpdateMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationFireboltUpdateLoadingMethodSQLInsertsMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationFireboltUpdateMethod: %v", v)
+ }
+}
+
+// SQLInserts - Loading method used to select the way data will be uploaded to Firebolt
+type SQLInserts struct {
+ method DestinationFireboltUpdateMethod `const:"SQL" json:"method"`
+}
+
+func (s SQLInserts) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SQLInserts) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationFireboltUpdateLoadingMethodSQLInserts - Loading method used to select the way data will be uploaded to Firebolt
-type DestinationFireboltUpdateLoadingMethodSQLInserts struct {
- Method DestinationFireboltUpdateLoadingMethodSQLInsertsMethod `json:"method"`
+func (o *SQLInserts) GetMethod() DestinationFireboltUpdateMethod {
+ return DestinationFireboltUpdateMethodSQL
}
type DestinationFireboltUpdateLoadingMethodType string
const (
- DestinationFireboltUpdateLoadingMethodTypeDestinationFireboltUpdateLoadingMethodSQLInserts DestinationFireboltUpdateLoadingMethodType = "destination-firebolt-update_Loading Method_SQL Inserts"
- DestinationFireboltUpdateLoadingMethodTypeDestinationFireboltUpdateLoadingMethodExternalTableViaS3 DestinationFireboltUpdateLoadingMethodType = "destination-firebolt-update_Loading Method_External Table via S3"
+ DestinationFireboltUpdateLoadingMethodTypeSQLInserts DestinationFireboltUpdateLoadingMethodType = "SQL Inserts"
+ DestinationFireboltUpdateLoadingMethodTypeExternalTableViaS3 DestinationFireboltUpdateLoadingMethodType = "External Table via S3"
)
type DestinationFireboltUpdateLoadingMethod struct {
- DestinationFireboltUpdateLoadingMethodSQLInserts *DestinationFireboltUpdateLoadingMethodSQLInserts
- DestinationFireboltUpdateLoadingMethodExternalTableViaS3 *DestinationFireboltUpdateLoadingMethodExternalTableViaS3
+ SQLInserts *SQLInserts
+ ExternalTableViaS3 *ExternalTableViaS3
Type DestinationFireboltUpdateLoadingMethodType
}
-func CreateDestinationFireboltUpdateLoadingMethodDestinationFireboltUpdateLoadingMethodSQLInserts(destinationFireboltUpdateLoadingMethodSQLInserts DestinationFireboltUpdateLoadingMethodSQLInserts) DestinationFireboltUpdateLoadingMethod {
- typ := DestinationFireboltUpdateLoadingMethodTypeDestinationFireboltUpdateLoadingMethodSQLInserts
+func CreateDestinationFireboltUpdateLoadingMethodSQLInserts(sqlInserts SQLInserts) DestinationFireboltUpdateLoadingMethod {
+ typ := DestinationFireboltUpdateLoadingMethodTypeSQLInserts
return DestinationFireboltUpdateLoadingMethod{
- DestinationFireboltUpdateLoadingMethodSQLInserts: &destinationFireboltUpdateLoadingMethodSQLInserts,
- Type: typ,
+ SQLInserts: &sqlInserts,
+ Type: typ,
}
}
-func CreateDestinationFireboltUpdateLoadingMethodDestinationFireboltUpdateLoadingMethodExternalTableViaS3(destinationFireboltUpdateLoadingMethodExternalTableViaS3 DestinationFireboltUpdateLoadingMethodExternalTableViaS3) DestinationFireboltUpdateLoadingMethod {
- typ := DestinationFireboltUpdateLoadingMethodTypeDestinationFireboltUpdateLoadingMethodExternalTableViaS3
+func CreateDestinationFireboltUpdateLoadingMethodExternalTableViaS3(externalTableViaS3 ExternalTableViaS3) DestinationFireboltUpdateLoadingMethod {
+ typ := DestinationFireboltUpdateLoadingMethodTypeExternalTableViaS3
return DestinationFireboltUpdateLoadingMethod{
- DestinationFireboltUpdateLoadingMethodExternalTableViaS3: &destinationFireboltUpdateLoadingMethodExternalTableViaS3,
- Type: typ,
+ ExternalTableViaS3: &externalTableViaS3,
+ Type: typ,
}
}
func (u *DestinationFireboltUpdateLoadingMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationFireboltUpdateLoadingMethodSQLInserts := new(DestinationFireboltUpdateLoadingMethodSQLInserts)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationFireboltUpdateLoadingMethodSQLInserts); err == nil {
- u.DestinationFireboltUpdateLoadingMethodSQLInserts = destinationFireboltUpdateLoadingMethodSQLInserts
- u.Type = DestinationFireboltUpdateLoadingMethodTypeDestinationFireboltUpdateLoadingMethodSQLInserts
+
+ sqlInserts := new(SQLInserts)
+ if err := utils.UnmarshalJSON(data, &sqlInserts, "", true, true); err == nil {
+ u.SQLInserts = sqlInserts
+ u.Type = DestinationFireboltUpdateLoadingMethodTypeSQLInserts
return nil
}
- destinationFireboltUpdateLoadingMethodExternalTableViaS3 := new(DestinationFireboltUpdateLoadingMethodExternalTableViaS3)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationFireboltUpdateLoadingMethodExternalTableViaS3); err == nil {
- u.DestinationFireboltUpdateLoadingMethodExternalTableViaS3 = destinationFireboltUpdateLoadingMethodExternalTableViaS3
- u.Type = DestinationFireboltUpdateLoadingMethodTypeDestinationFireboltUpdateLoadingMethodExternalTableViaS3
+ externalTableViaS3 := new(ExternalTableViaS3)
+ if err := utils.UnmarshalJSON(data, &externalTableViaS3, "", true, true); err == nil {
+ u.ExternalTableViaS3 = externalTableViaS3
+ u.Type = DestinationFireboltUpdateLoadingMethodTypeExternalTableViaS3
return nil
}
@@ -132,15 +185,15 @@ func (u *DestinationFireboltUpdateLoadingMethod) UnmarshalJSON(data []byte) erro
}
func (u DestinationFireboltUpdateLoadingMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationFireboltUpdateLoadingMethodSQLInserts != nil {
- return json.Marshal(u.DestinationFireboltUpdateLoadingMethodSQLInserts)
+ if u.SQLInserts != nil {
+ return utils.MarshalJSON(u.SQLInserts, "", true)
}
- if u.DestinationFireboltUpdateLoadingMethodExternalTableViaS3 != nil {
- return json.Marshal(u.DestinationFireboltUpdateLoadingMethodExternalTableViaS3)
+ if u.ExternalTableViaS3 != nil {
+ return utils.MarshalJSON(u.ExternalTableViaS3, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationFireboltUpdate struct {
@@ -159,3 +212,52 @@ type DestinationFireboltUpdate struct {
// Firebolt email address you use to login.
Username string `json:"username"`
}
+
+func (o *DestinationFireboltUpdate) GetAccount() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Account
+}
+
+func (o *DestinationFireboltUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationFireboltUpdate) GetEngine() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Engine
+}
+
+func (o *DestinationFireboltUpdate) GetHost() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Host
+}
+
+func (o *DestinationFireboltUpdate) GetLoadingMethod() *DestinationFireboltUpdateLoadingMethod {
+ if o == nil {
+ return nil
+ }
+ return o.LoadingMethod
+}
+
+func (o *DestinationFireboltUpdate) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationFireboltUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationfirestore.go b/internal/sdk/pkg/models/shared/destinationfirestore.go
old mode 100755
new mode 100644
index b2f808703..3e396c9c7
--- a/internal/sdk/pkg/models/shared/destinationfirestore.go
+++ b/internal/sdk/pkg/models/shared/destinationfirestore.go
@@ -5,36 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationFirestoreFirestore string
+type Firestore string
const (
- DestinationFirestoreFirestoreFirestore DestinationFirestoreFirestore = "firestore"
+ FirestoreFirestore Firestore = "firestore"
)
-func (e DestinationFirestoreFirestore) ToPointer() *DestinationFirestoreFirestore {
+func (e Firestore) ToPointer() *Firestore {
return &e
}
-func (e *DestinationFirestoreFirestore) UnmarshalJSON(data []byte) error {
+func (e *Firestore) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "firestore":
- *e = DestinationFirestoreFirestore(v)
+ *e = Firestore(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationFirestoreFirestore: %v", v)
+ return fmt.Errorf("invalid value for Firestore: %v", v)
}
}
type DestinationFirestore struct {
// The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty.
- CredentialsJSON *string `json:"credentials_json,omitempty"`
- DestinationType DestinationFirestoreFirestore `json:"destinationType"`
+ CredentialsJSON *string `json:"credentials_json,omitempty"`
+ destinationType Firestore `const:"firestore" json:"destinationType"`
// The GCP project ID for the project containing the target BigQuery dataset.
ProjectID string `json:"project_id"`
}
+
+func (d DestinationFirestore) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationFirestore) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationFirestore) GetCredentialsJSON() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CredentialsJSON
+}
+
+func (o *DestinationFirestore) GetDestinationType() Firestore {
+ return FirestoreFirestore
+}
+
+func (o *DestinationFirestore) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationfirestorecreaterequest.go b/internal/sdk/pkg/models/shared/destinationfirestorecreaterequest.go
old mode 100755
new mode 100644
index bf882f3c5..7a57e1195
--- a/internal/sdk/pkg/models/shared/destinationfirestorecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationfirestorecreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationFirestoreCreateRequest struct {
Configuration DestinationFirestore `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationFirestoreCreateRequest) GetConfiguration() DestinationFirestore {
+ if o == nil {
+ return DestinationFirestore{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationFirestoreCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationFirestoreCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationFirestoreCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationfirestoreputrequest.go b/internal/sdk/pkg/models/shared/destinationfirestoreputrequest.go
old mode 100755
new mode 100644
index 45266c929..1a2060769
--- a/internal/sdk/pkg/models/shared/destinationfirestoreputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationfirestoreputrequest.go
@@ -7,3 +7,24 @@ type DestinationFirestorePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationFirestorePutRequest) GetConfiguration() DestinationFirestoreUpdate {
+ if o == nil {
+ return DestinationFirestoreUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationFirestorePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationFirestorePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationfirestoreupdate.go b/internal/sdk/pkg/models/shared/destinationfirestoreupdate.go
old mode 100755
new mode 100644
index 86043b1c1..f2c18ea1d
--- a/internal/sdk/pkg/models/shared/destinationfirestoreupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationfirestoreupdate.go
@@ -8,3 +8,17 @@ type DestinationFirestoreUpdate struct {
// The GCP project ID for the project containing the target BigQuery dataset.
ProjectID string `json:"project_id"`
}
+
+func (o *DestinationFirestoreUpdate) GetCredentialsJSON() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CredentialsJSON
+}
+
+func (o *DestinationFirestoreUpdate) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationgcs.go b/internal/sdk/pkg/models/shared/destinationgcs.go
old mode 100755
new mode 100644
index e0a586d4b..30f754eb6
--- a/internal/sdk/pkg/models/shared/destinationgcs.go
+++ b/internal/sdk/pkg/models/shared/destinationgcs.go
@@ -3,75 +3,104 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationGcsAuthenticationHMACKeyCredentialType string
+type DestinationGcsCredentialType string
const (
- DestinationGcsAuthenticationHMACKeyCredentialTypeHmacKey DestinationGcsAuthenticationHMACKeyCredentialType = "HMAC_KEY"
+ DestinationGcsCredentialTypeHmacKey DestinationGcsCredentialType = "HMAC_KEY"
)
-func (e DestinationGcsAuthenticationHMACKeyCredentialType) ToPointer() *DestinationGcsAuthenticationHMACKeyCredentialType {
+func (e DestinationGcsCredentialType) ToPointer() *DestinationGcsCredentialType {
return &e
}
-func (e *DestinationGcsAuthenticationHMACKeyCredentialType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsCredentialType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "HMAC_KEY":
- *e = DestinationGcsAuthenticationHMACKeyCredentialType(v)
+ *e = DestinationGcsCredentialType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsAuthenticationHMACKeyCredentialType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsCredentialType: %v", v)
}
}
-// DestinationGcsAuthenticationHMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
-type DestinationGcsAuthenticationHMACKey struct {
- CredentialType DestinationGcsAuthenticationHMACKeyCredentialType `json:"credential_type"`
+// DestinationGcsHMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
+type DestinationGcsHMACKey struct {
+ CredentialType *DestinationGcsCredentialType `default:"HMAC_KEY" json:"credential_type"`
// When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.
HmacKeyAccessID string `json:"hmac_key_access_id"`
// The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.
HmacKeySecret string `json:"hmac_key_secret"`
}
+func (d DestinationGcsHMACKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsHMACKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsHMACKey) GetCredentialType() *DestinationGcsCredentialType {
+ if o == nil {
+ return nil
+ }
+ return o.CredentialType
+}
+
+func (o *DestinationGcsHMACKey) GetHmacKeyAccessID() string {
+ if o == nil {
+ return ""
+ }
+ return o.HmacKeyAccessID
+}
+
+func (o *DestinationGcsHMACKey) GetHmacKeySecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.HmacKeySecret
+}
+
type DestinationGcsAuthenticationType string
const (
- DestinationGcsAuthenticationTypeDestinationGcsAuthenticationHMACKey DestinationGcsAuthenticationType = "destination-gcs_Authentication_HMAC Key"
+ DestinationGcsAuthenticationTypeDestinationGcsHMACKey DestinationGcsAuthenticationType = "destination-gcs_HMAC Key"
)
type DestinationGcsAuthentication struct {
- DestinationGcsAuthenticationHMACKey *DestinationGcsAuthenticationHMACKey
+ DestinationGcsHMACKey *DestinationGcsHMACKey
Type DestinationGcsAuthenticationType
}
-func CreateDestinationGcsAuthenticationDestinationGcsAuthenticationHMACKey(destinationGcsAuthenticationHMACKey DestinationGcsAuthenticationHMACKey) DestinationGcsAuthentication {
- typ := DestinationGcsAuthenticationTypeDestinationGcsAuthenticationHMACKey
+func CreateDestinationGcsAuthenticationDestinationGcsHMACKey(destinationGcsHMACKey DestinationGcsHMACKey) DestinationGcsAuthentication {
+ typ := DestinationGcsAuthenticationTypeDestinationGcsHMACKey
return DestinationGcsAuthentication{
- DestinationGcsAuthenticationHMACKey: &destinationGcsAuthenticationHMACKey,
- Type: typ,
+ DestinationGcsHMACKey: &destinationGcsHMACKey,
+ Type: typ,
}
}
func (u *DestinationGcsAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationGcsAuthenticationHMACKey := new(DestinationGcsAuthenticationHMACKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsAuthenticationHMACKey); err == nil {
- u.DestinationGcsAuthenticationHMACKey = destinationGcsAuthenticationHMACKey
- u.Type = DestinationGcsAuthenticationTypeDestinationGcsAuthenticationHMACKey
+
+ destinationGcsHMACKey := new(DestinationGcsHMACKey)
+ if err := utils.UnmarshalJSON(data, &destinationGcsHMACKey, "", true, true); err == nil {
+ u.DestinationGcsHMACKey = destinationGcsHMACKey
+ u.Type = DestinationGcsAuthenticationTypeDestinationGcsHMACKey
return nil
}
@@ -79,55 +108,55 @@ func (u *DestinationGcsAuthentication) UnmarshalJSON(data []byte) error {
}
func (u DestinationGcsAuthentication) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsAuthenticationHMACKey != nil {
- return json.Marshal(u.DestinationGcsAuthenticationHMACKey)
+ if u.DestinationGcsHMACKey != nil {
+ return utils.MarshalJSON(u.DestinationGcsHMACKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationGcsGcs string
+type Gcs string
const (
- DestinationGcsGcsGcs DestinationGcsGcs = "gcs"
+ GcsGcs Gcs = "gcs"
)
-func (e DestinationGcsGcs) ToPointer() *DestinationGcsGcs {
+func (e Gcs) ToPointer() *Gcs {
return &e
}
-func (e *DestinationGcsGcs) UnmarshalJSON(data []byte) error {
+func (e *Gcs) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "gcs":
- *e = DestinationGcsGcs(v)
+ *e = Gcs(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsGcs: %v", v)
+ return fmt.Errorf("invalid value for Gcs: %v", v)
}
}
-// DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec - The compression algorithm used to compress data pages.
-type DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec string
+// DestinationGcsSchemasCompressionCodec - The compression algorithm used to compress data pages.
+type DestinationGcsSchemasCompressionCodec string
const (
- DestinationGcsOutputFormatParquetColumnarStorageCompressionCodecUncompressed DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec = "UNCOMPRESSED"
- DestinationGcsOutputFormatParquetColumnarStorageCompressionCodecSnappy DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec = "SNAPPY"
- DestinationGcsOutputFormatParquetColumnarStorageCompressionCodecGzip DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec = "GZIP"
- DestinationGcsOutputFormatParquetColumnarStorageCompressionCodecLzo DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec = "LZO"
- DestinationGcsOutputFormatParquetColumnarStorageCompressionCodecBrotli DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec = "BROTLI"
- DestinationGcsOutputFormatParquetColumnarStorageCompressionCodecLz4 DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec = "LZ4"
- DestinationGcsOutputFormatParquetColumnarStorageCompressionCodecZstd DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec = "ZSTD"
+ DestinationGcsSchemasCompressionCodecUncompressed DestinationGcsSchemasCompressionCodec = "UNCOMPRESSED"
+ DestinationGcsSchemasCompressionCodecSnappy DestinationGcsSchemasCompressionCodec = "SNAPPY"
+ DestinationGcsSchemasCompressionCodecGzip DestinationGcsSchemasCompressionCodec = "GZIP"
+ DestinationGcsSchemasCompressionCodecLzo DestinationGcsSchemasCompressionCodec = "LZO"
+ DestinationGcsSchemasCompressionCodecBrotli DestinationGcsSchemasCompressionCodec = "BROTLI"
+ DestinationGcsSchemasCompressionCodecLz4 DestinationGcsSchemasCompressionCodec = "LZ4"
+ DestinationGcsSchemasCompressionCodecZstd DestinationGcsSchemasCompressionCodec = "ZSTD"
)
-func (e DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec) ToPointer() *DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec {
+func (e DestinationGcsSchemasCompressionCodec) ToPointer() *DestinationGcsSchemasCompressionCodec {
return &e
}
-func (e *DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasCompressionCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -146,350 +175,497 @@ func (e *DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec) Unmar
case "LZ4":
fallthrough
case "ZSTD":
- *e = DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec(v)
+ *e = DestinationGcsSchemasCompressionCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasCompressionCodec: %v", v)
}
}
-type DestinationGcsOutputFormatParquetColumnarStorageFormatType string
+type DestinationGcsSchemasFormatOutputFormatFormatType string
const (
- DestinationGcsOutputFormatParquetColumnarStorageFormatTypeParquet DestinationGcsOutputFormatParquetColumnarStorageFormatType = "Parquet"
+ DestinationGcsSchemasFormatOutputFormatFormatTypeParquet DestinationGcsSchemasFormatOutputFormatFormatType = "Parquet"
)
-func (e DestinationGcsOutputFormatParquetColumnarStorageFormatType) ToPointer() *DestinationGcsOutputFormatParquetColumnarStorageFormatType {
+func (e DestinationGcsSchemasFormatOutputFormatFormatType) ToPointer() *DestinationGcsSchemasFormatOutputFormatFormatType {
return &e
}
-func (e *DestinationGcsOutputFormatParquetColumnarStorageFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasFormatOutputFormatFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Parquet":
- *e = DestinationGcsOutputFormatParquetColumnarStorageFormatType(v)
+ *e = DestinationGcsSchemasFormatOutputFormatFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatParquetColumnarStorageFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasFormatOutputFormatFormatType: %v", v)
}
}
-// DestinationGcsOutputFormatParquetColumnarStorage - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
-type DestinationGcsOutputFormatParquetColumnarStorage struct {
+// DestinationGcsParquetColumnarStorage - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
+type DestinationGcsParquetColumnarStorage struct {
// This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
- BlockSizeMb *int64 `json:"block_size_mb,omitempty"`
+ BlockSizeMb *int64 `default:"128" json:"block_size_mb"`
// The compression algorithm used to compress data pages.
- CompressionCodec *DestinationGcsOutputFormatParquetColumnarStorageCompressionCodec `json:"compression_codec,omitempty"`
+ CompressionCodec *DestinationGcsSchemasCompressionCodec `default:"UNCOMPRESSED" json:"compression_codec"`
// Default: true.
- DictionaryEncoding *bool `json:"dictionary_encoding,omitempty"`
+ DictionaryEncoding *bool `default:"true" json:"dictionary_encoding"`
// There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
- DictionaryPageSizeKb *int64 `json:"dictionary_page_size_kb,omitempty"`
- FormatType DestinationGcsOutputFormatParquetColumnarStorageFormatType `json:"format_type"`
+ DictionaryPageSizeKb *int64 `default:"1024" json:"dictionary_page_size_kb"`
+ FormatType *DestinationGcsSchemasFormatOutputFormatFormatType `default:"Parquet" json:"format_type"`
// Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
- MaxPaddingSizeMb *int64 `json:"max_padding_size_mb,omitempty"`
+ MaxPaddingSizeMb *int64 `default:"8" json:"max_padding_size_mb"`
// The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
- PageSizeKb *int64 `json:"page_size_kb,omitempty"`
+ PageSizeKb *int64 `default:"1024" json:"page_size_kb"`
+}
+
+func (d DestinationGcsParquetColumnarStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsParquetColumnarStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsParquetColumnarStorage) GetBlockSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BlockSizeMb
}
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType string
+func (o *DestinationGcsParquetColumnarStorage) GetCompressionCodec() *DestinationGcsSchemasCompressionCodec {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionCodec
+}
+
+func (o *DestinationGcsParquetColumnarStorage) GetDictionaryEncoding() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DictionaryEncoding
+}
+
+func (o *DestinationGcsParquetColumnarStorage) GetDictionaryPageSizeKb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DictionaryPageSizeKb
+}
+
+func (o *DestinationGcsParquetColumnarStorage) GetFormatType() *DestinationGcsSchemasFormatOutputFormatFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+func (o *DestinationGcsParquetColumnarStorage) GetMaxPaddingSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxPaddingSizeMb
+}
+
+func (o *DestinationGcsParquetColumnarStorage) GetPageSizeKb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSizeKb
+}
+
+type DestinationGcsSchemasFormatOutputFormatCompressionType string
const (
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeGzip DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType = "GZIP"
+ DestinationGcsSchemasFormatOutputFormatCompressionTypeGzip DestinationGcsSchemasFormatOutputFormatCompressionType = "GZIP"
)
-func (e DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) ToPointer() *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType {
+func (e DestinationGcsSchemasFormatOutputFormatCompressionType) ToPointer() *DestinationGcsSchemasFormatOutputFormatCompressionType {
return &e
}
-func (e *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasFormatOutputFormatCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(v)
+ *e = DestinationGcsSchemasFormatOutputFormatCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasFormatOutputFormatCompressionType: %v", v)
}
}
-// DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP struct {
- CompressionType *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+// DestinationGcsSchemasGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationGcsSchemasGZIP struct {
+ CompressionType *DestinationGcsSchemasFormatOutputFormatCompressionType `default:"GZIP" json:"compression_type"`
+}
+
+func (d DestinationGcsSchemasGZIP) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType string
+func (d *DestinationGcsSchemasGZIP) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsSchemasGZIP) GetCompressionType() *DestinationGcsSchemasFormatOutputFormatCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
+}
+
+type DestinationGcsSchemasFormatCompressionType string
const (
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeNoCompression DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType = "No Compression"
+ DestinationGcsSchemasFormatCompressionTypeNoCompression DestinationGcsSchemasFormatCompressionType = "No Compression"
)
-func (e DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) ToPointer() *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType {
+func (e DestinationGcsSchemasFormatCompressionType) ToPointer() *DestinationGcsSchemasFormatCompressionType {
return &e
}
-func (e *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasFormatCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(v)
+ *e = DestinationGcsSchemasFormatCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasFormatCompressionType: %v", v)
+ }
+}
+
+// DestinationGcsSchemasFormatNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationGcsSchemasFormatNoCompression struct {
+ CompressionType *DestinationGcsSchemasFormatCompressionType `default:"No Compression" json:"compression_type"`
+}
+
+func (d DestinationGcsSchemasFormatNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsSchemasFormatNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression struct {
- CompressionType *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+func (o *DestinationGcsSchemasFormatNoCompression) GetCompressionType() *DestinationGcsSchemasFormatCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionType string
+type DestinationGcsSchemasCompressionUnionType string
const (
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-gcs_Output Format_JSON Lines: newline-delimited JSON_Compression_No Compression"
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-gcs_Output Format_JSON Lines: newline-delimited JSON_Compression_GZIP"
+ DestinationGcsSchemasCompressionUnionTypeDestinationGcsSchemasFormatNoCompression DestinationGcsSchemasCompressionUnionType = "destination-gcs_Schemas_format_No Compression"
+ DestinationGcsSchemasCompressionUnionTypeDestinationGcsSchemasGZIP DestinationGcsSchemasCompressionUnionType = "destination-gcs_Schemas_GZIP"
)
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+type DestinationGcsSchemasCompression struct {
+ DestinationGcsSchemasFormatNoCompression *DestinationGcsSchemasFormatNoCompression
+ DestinationGcsSchemasGZIP *DestinationGcsSchemasGZIP
- Type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionType
+ Type DestinationGcsSchemasCompressionUnionType
}
-func CreateDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression(destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression) DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+func CreateDestinationGcsSchemasCompressionDestinationGcsSchemasFormatNoCompression(destinationGcsSchemasFormatNoCompression DestinationGcsSchemasFormatNoCompression) DestinationGcsSchemasCompression {
+ typ := DestinationGcsSchemasCompressionUnionTypeDestinationGcsSchemasFormatNoCompression
- return DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: &destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
- Type: typ,
+ return DestinationGcsSchemasCompression{
+ DestinationGcsSchemasFormatNoCompression: &destinationGcsSchemasFormatNoCompression,
+ Type: typ,
}
}
-func CreateDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP(destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP) DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+func CreateDestinationGcsSchemasCompressionDestinationGcsSchemasGZIP(destinationGcsSchemasGZIP DestinationGcsSchemasGZIP) DestinationGcsSchemasCompression {
+ typ := DestinationGcsSchemasCompressionUnionTypeDestinationGcsSchemasGZIP
- return DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: &destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
- Type: typ,
+ return DestinationGcsSchemasCompression{
+ DestinationGcsSchemasGZIP: &destinationGcsSchemasGZIP,
+ Type: typ,
}
}
-func (u *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationGcsSchemasCompression) UnmarshalJSON(data []byte) error {
- destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression := new(DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression); err == nil {
- u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- u.Type = DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+ destinationGcsSchemasFormatNoCompression := new(DestinationGcsSchemasFormatNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationGcsSchemasFormatNoCompression, "", true, true); err == nil {
+ u.DestinationGcsSchemasFormatNoCompression = destinationGcsSchemasFormatNoCompression
+ u.Type = DestinationGcsSchemasCompressionUnionTypeDestinationGcsSchemasFormatNoCompression
return nil
}
- destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP := new(DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP); err == nil {
- u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = destinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- u.Type = DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+ destinationGcsSchemasGZIP := new(DestinationGcsSchemasGZIP)
+ if err := utils.UnmarshalJSON(data, &destinationGcsSchemasGZIP, "", true, true); err == nil {
+ u.DestinationGcsSchemasGZIP = destinationGcsSchemasGZIP
+ u.Type = DestinationGcsSchemasCompressionUnionTypeDestinationGcsSchemasGZIP
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- return json.Marshal(u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
+func (u DestinationGcsSchemasCompression) MarshalJSON() ([]byte, error) {
+ if u.DestinationGcsSchemasFormatNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationGcsSchemasFormatNoCompression, "", true)
}
- if u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- return json.Marshal(u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
+ if u.DestinationGcsSchemasGZIP != nil {
+ return utils.MarshalJSON(u.DestinationGcsSchemasGZIP, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type DestinationGcsSchemasFormatFormatType string
const (
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ DestinationGcsSchemasFormatFormatTypeJsonl DestinationGcsSchemasFormatFormatType = "JSONL"
)
-func (e DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e DestinationGcsSchemasFormatFormatType) ToPointer() *DestinationGcsSchemasFormatFormatType {
return &e
}
-func (e *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasFormatFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ *e = DestinationGcsSchemasFormatFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasFormatFormatType: %v", v)
}
}
-// DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
-type DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON struct {
+// DestinationGcsJSONLinesNewlineDelimitedJSON - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
+type DestinationGcsJSONLinesNewlineDelimitedJSON struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
- Compression *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONCompression `json:"compression,omitempty"`
- FormatType DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+ Compression *DestinationGcsSchemasCompression `json:"compression,omitempty"`
+ FormatType *DestinationGcsSchemasFormatFormatType `default:"JSONL" json:"format_type"`
+}
+
+func (d DestinationGcsJSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsJSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsJSONLinesNewlineDelimitedJSON) GetCompression() *DestinationGcsSchemasCompression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
+}
+
+func (o *DestinationGcsJSONLinesNewlineDelimitedJSON) GetFormatType() *DestinationGcsSchemasFormatFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType string
+type DestinationGcsSchemasCompressionType string
const (
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeGzip DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType = "GZIP"
+ DestinationGcsSchemasCompressionTypeGzip DestinationGcsSchemasCompressionType = "GZIP"
)
-func (e DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType) ToPointer() *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType {
+func (e DestinationGcsSchemasCompressionType) ToPointer() *DestinationGcsSchemasCompressionType {
return &e
}
-func (e *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(v)
+ *e = DestinationGcsSchemasCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasCompressionType: %v", v)
}
}
-// DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP struct {
- CompressionType *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+// DestinationGcsGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
+type DestinationGcsGZIP struct {
+ CompressionType *DestinationGcsSchemasCompressionType `default:"GZIP" json:"compression_type"`
+}
+
+func (d DestinationGcsGZIP) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType string
+func (d *DestinationGcsGZIP) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsGZIP) GetCompressionType() *DestinationGcsSchemasCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
+}
+
+type DestinationGcsCompressionType string
const (
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeNoCompression DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType = "No Compression"
+ DestinationGcsCompressionTypeNoCompression DestinationGcsCompressionType = "No Compression"
)
-func (e DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType) ToPointer() *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType {
+func (e DestinationGcsCompressionType) ToPointer() *DestinationGcsCompressionType {
return &e
}
-func (e *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(v)
+ *e = DestinationGcsCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsCompressionType: %v", v)
+ }
+}
+
+// DestinationGcsSchemasNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
+type DestinationGcsSchemasNoCompression struct {
+ CompressionType *DestinationGcsCompressionType `default:"No Compression" json:"compression_type"`
+}
+
+func (d DestinationGcsSchemasNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsSchemasNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression struct {
- CompressionType *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+func (o *DestinationGcsSchemasNoCompression) GetCompressionType() *DestinationGcsCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionType string
+type DestinationGcsCompressionUnionType string
const (
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionType = "destination-gcs_Output Format_CSV: Comma-Separated Values_Compression_No Compression"
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionType = "destination-gcs_Output Format_CSV: Comma-Separated Values_Compression_GZIP"
+ DestinationGcsCompressionUnionTypeDestinationGcsSchemasNoCompression DestinationGcsCompressionUnionType = "destination-gcs_Schemas_No Compression"
+ DestinationGcsCompressionUnionTypeDestinationGcsGZIP DestinationGcsCompressionUnionType = "destination-gcs_GZIP"
)
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression struct {
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP
+type DestinationGcsCompression struct {
+ DestinationGcsSchemasNoCompression *DestinationGcsSchemasNoCompression
+ DestinationGcsGZIP *DestinationGcsGZIP
- Type DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionType
+ Type DestinationGcsCompressionUnionType
}
-func CreateDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression(destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression) DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression {
- typ := DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
+func CreateDestinationGcsCompressionDestinationGcsSchemasNoCompression(destinationGcsSchemasNoCompression DestinationGcsSchemasNoCompression) DestinationGcsCompression {
+ typ := DestinationGcsCompressionUnionTypeDestinationGcsSchemasNoCompression
- return DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression: &destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression,
- Type: typ,
+ return DestinationGcsCompression{
+ DestinationGcsSchemasNoCompression: &destinationGcsSchemasNoCompression,
+ Type: typ,
}
}
-func CreateDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP(destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP) DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression {
- typ := DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP
+func CreateDestinationGcsCompressionDestinationGcsGZIP(destinationGcsGZIP DestinationGcsGZIP) DestinationGcsCompression {
+ typ := DestinationGcsCompressionUnionTypeDestinationGcsGZIP
- return DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP: &destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP,
- Type: typ,
+ return DestinationGcsCompression{
+ DestinationGcsGZIP: &destinationGcsGZIP,
+ Type: typ,
}
}
-func (u *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationGcsCompression) UnmarshalJSON(data []byte) error {
- destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression := new(DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression); err == nil {
- u.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression = destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- u.Type = DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
+ destinationGcsSchemasNoCompression := new(DestinationGcsSchemasNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationGcsSchemasNoCompression, "", true, true); err == nil {
+ u.DestinationGcsSchemasNoCompression = destinationGcsSchemasNoCompression
+ u.Type = DestinationGcsCompressionUnionTypeDestinationGcsSchemasNoCompression
return nil
}
- destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP := new(DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP); err == nil {
- u.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP = destinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP
- u.Type = DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP
+ destinationGcsGZIP := new(DestinationGcsGZIP)
+ if err := utils.UnmarshalJSON(data, &destinationGcsGZIP, "", true, true); err == nil {
+ u.DestinationGcsGZIP = destinationGcsGZIP
+ u.Type = DestinationGcsCompressionUnionTypeDestinationGcsGZIP
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- return json.Marshal(u.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionNoCompression)
+func (u DestinationGcsCompression) MarshalJSON() ([]byte, error) {
+ if u.DestinationGcsSchemasNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationGcsSchemasNoCompression, "", true)
}
- if u.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- return json.Marshal(u.DestinationGcsOutputFormatCSVCommaSeparatedValuesCompressionGZIP)
+ if u.DestinationGcsGZIP != nil {
+ return utils.MarshalJSON(u.DestinationGcsGZIP, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization - Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization string
+// DestinationGcsNormalization - Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
+type DestinationGcsNormalization string
const (
- DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalizationNoFlattening DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization = "No flattening"
- DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalizationRootLevelFlattening DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization = "Root level flattening"
+ DestinationGcsNormalizationNoFlattening DestinationGcsNormalization = "No flattening"
+ DestinationGcsNormalizationRootLevelFlattening DestinationGcsNormalization = "Root level flattening"
)
-func (e DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization) ToPointer() *DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization {
+func (e DestinationGcsNormalization) ToPointer() *DestinationGcsNormalization {
return &e
}
-func (e *DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsNormalization) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -498,513 +674,684 @@ func (e *DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization) Unmarsh
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization(v)
+ *e = DestinationGcsNormalization(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsNormalization: %v", v)
}
}
-type DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatType string
+type DestinationGcsSchemasFormatType string
const (
- DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatTypeCsv DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatType = "CSV"
+ DestinationGcsSchemasFormatTypeCsv DestinationGcsSchemasFormatType = "CSV"
)
-func (e DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatType) ToPointer() *DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatType {
+func (e DestinationGcsSchemasFormatType) ToPointer() *DestinationGcsSchemasFormatType {
return &e
}
-func (e *DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CSV":
- *e = DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatType(v)
+ *e = DestinationGcsSchemasFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasFormatType: %v", v)
}
}
-// DestinationGcsOutputFormatCSVCommaSeparatedValues - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
-type DestinationGcsOutputFormatCSVCommaSeparatedValues struct {
+// DestinationGcsCSVCommaSeparatedValues - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
+type DestinationGcsCSVCommaSeparatedValues struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
- Compression *DestinationGcsOutputFormatCSVCommaSeparatedValuesCompression `json:"compression,omitempty"`
+ Compression *DestinationGcsCompression `json:"compression,omitempty"`
// Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
- Flattening *DestinationGcsOutputFormatCSVCommaSeparatedValuesNormalization `json:"flattening,omitempty"`
- FormatType DestinationGcsOutputFormatCSVCommaSeparatedValuesFormatType `json:"format_type"`
+ Flattening *DestinationGcsNormalization `default:"No flattening" json:"flattening"`
+ FormatType *DestinationGcsSchemasFormatType `default:"CSV" json:"format_type"`
}
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodec string
+func (d DestinationGcsCSVCommaSeparatedValues) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsCSVCommaSeparatedValues) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsCSVCommaSeparatedValues) GetCompression() *DestinationGcsCompression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
+}
+
+func (o *DestinationGcsCSVCommaSeparatedValues) GetFlattening() *DestinationGcsNormalization {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
+}
+
+func (o *DestinationGcsCSVCommaSeparatedValues) GetFormatType() *DestinationGcsSchemasFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+type DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec string
const (
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodecSnappy DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodec = "snappy"
+ DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodecSnappy DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec = "snappy"
)
-func (e DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodec) ToPointer() *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodec {
+func (e DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec) ToPointer() *DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec {
return &e
}
-func (e *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "snappy":
- *e = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodec(v)
+ *e = DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec: %v", v)
+ }
+}
+
+// DestinationGcsSnappy - The compression algorithm used to compress data. Default to no compression.
+type DestinationGcsSnappy struct {
+ Codec *DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec `default:"snappy" json:"codec"`
+}
+
+func (d DestinationGcsSnappy) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsSnappy) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy struct {
- Codec DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappyCodec `json:"codec"`
+func (o *DestinationGcsSnappy) GetCodec() *DestinationGcsSchemasFormatOutputFormat1CompressionCodecCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
}
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodec string
+type DestinationGcsSchemasFormatOutputFormat1Codec string
const (
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodecZstandard DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodec = "zstandard"
+ DestinationGcsSchemasFormatOutputFormat1CodecZstandard DestinationGcsSchemasFormatOutputFormat1Codec = "zstandard"
)
-func (e DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodec) ToPointer() *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodec {
+func (e DestinationGcsSchemasFormatOutputFormat1Codec) ToPointer() *DestinationGcsSchemasFormatOutputFormat1Codec {
return &e
}
-func (e *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasFormatOutputFormat1Codec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zstandard":
- *e = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodec(v)
+ *e = DestinationGcsSchemasFormatOutputFormat1Codec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasFormatOutputFormat1Codec: %v", v)
}
}
-// DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard struct {
- Codec DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandardCodec `json:"codec"`
+// DestinationGcsZstandard - The compression algorithm used to compress data. Default to no compression.
+type DestinationGcsZstandard struct {
+ Codec *DestinationGcsSchemasFormatOutputFormat1Codec `default:"zstandard" json:"codec"`
// Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
- CompressionLevel *int64 `json:"compression_level,omitempty"`
+ CompressionLevel *int64 `default:"3" json:"compression_level"`
// If true, include a checksum with each data block.
- IncludeChecksum *bool `json:"include_checksum,omitempty"`
+ IncludeChecksum *bool `default:"false" json:"include_checksum"`
+}
+
+func (d DestinationGcsZstandard) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsZstandard) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsZstandard) GetCodec() *DestinationGcsSchemasFormatOutputFormat1Codec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *DestinationGcsZstandard) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
+}
+
+func (o *DestinationGcsZstandard) GetIncludeChecksum() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeChecksum
}
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodec string
+type DestinationGcsSchemasFormatOutputFormatCodec string
const (
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodecXz DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodec = "xz"
+ DestinationGcsSchemasFormatOutputFormatCodecXz DestinationGcsSchemasFormatOutputFormatCodec = "xz"
)
-func (e DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodec) ToPointer() *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodec {
+func (e DestinationGcsSchemasFormatOutputFormatCodec) ToPointer() *DestinationGcsSchemasFormatOutputFormatCodec {
return &e
}
-func (e *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasFormatOutputFormatCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "xz":
- *e = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodec(v)
+ *e = DestinationGcsSchemasFormatOutputFormatCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasFormatOutputFormatCodec: %v", v)
}
}
-// DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz struct {
- Codec DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXzCodec `json:"codec"`
+// DestinationGcsXz - The compression algorithm used to compress data. Default to no compression.
+type DestinationGcsXz struct {
+ Codec *DestinationGcsSchemasFormatOutputFormatCodec `default:"xz" json:"codec"`
// The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.
- CompressionLevel *int64 `json:"compression_level,omitempty"`
+ CompressionLevel *int64 `default:"6" json:"compression_level"`
}
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2Codec string
+func (d DestinationGcsXz) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsXz) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsXz) GetCodec() *DestinationGcsSchemasFormatOutputFormatCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *DestinationGcsXz) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
+}
+
+type DestinationGcsSchemasFormatCodec string
const (
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2CodecBzip2 DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2Codec = "bzip2"
+ DestinationGcsSchemasFormatCodecBzip2 DestinationGcsSchemasFormatCodec = "bzip2"
)
-func (e DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2Codec) ToPointer() *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2Codec {
+func (e DestinationGcsSchemasFormatCodec) ToPointer() *DestinationGcsSchemasFormatCodec {
return &e
}
-func (e *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2Codec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasFormatCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "bzip2":
- *e = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2Codec(v)
+ *e = DestinationGcsSchemasFormatCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2Codec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasFormatCodec: %v", v)
}
}
-// DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 struct {
- Codec DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2Codec `json:"codec"`
+// DestinationGcsBzip2 - The compression algorithm used to compress data. Default to no compression.
+type DestinationGcsBzip2 struct {
+ Codec *DestinationGcsSchemasFormatCodec `default:"bzip2" json:"codec"`
}
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodec string
+func (d DestinationGcsBzip2) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsBzip2) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsBzip2) GetCodec() *DestinationGcsSchemasFormatCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+type DestinationGcsSchemasCodec string
const (
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodecDeflate DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodec = "Deflate"
+ DestinationGcsSchemasCodecDeflate DestinationGcsSchemasCodec = "Deflate"
)
-func (e DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodec) ToPointer() *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodec {
+func (e DestinationGcsSchemasCodec) ToPointer() *DestinationGcsSchemasCodec {
return &e
}
-func (e *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsSchemasCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Deflate":
- *e = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodec(v)
+ *e = DestinationGcsSchemasCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsSchemasCodec: %v", v)
}
}
-// DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate struct {
- Codec DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflateCodec `json:"codec"`
+// DestinationGcsDeflate - The compression algorithm used to compress data. Default to no compression.
+type DestinationGcsDeflate struct {
+ Codec *DestinationGcsSchemasCodec `default:"Deflate" json:"codec"`
// 0: no compression & fastest, 9: best compression & slowest.
- CompressionLevel *int64 `json:"compression_level,omitempty"`
+ CompressionLevel *int64 `default:"0" json:"compression_level"`
+}
+
+func (d DestinationGcsDeflate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec string
+func (d *DestinationGcsDeflate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsDeflate) GetCodec() *DestinationGcsSchemasCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *DestinationGcsDeflate) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
+}
+
+type DestinationGcsCodec string
const (
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodecNoCompression DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec = "no compression"
+ DestinationGcsCodecNoCompression DestinationGcsCodec = "no compression"
)
-func (e DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec) ToPointer() *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec {
+func (e DestinationGcsCodec) ToPointer() *DestinationGcsCodec {
return &e
}
-func (e *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "no compression":
- *e = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec(v)
+ *e = DestinationGcsCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsCodec: %v", v)
+ }
+}
+
+// DestinationGcsNoCompression - The compression algorithm used to compress data. Default to no compression.
+type DestinationGcsNoCompression struct {
+ Codec *DestinationGcsCodec `default:"no compression" json:"codec"`
+}
+
+func (d DestinationGcsNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression struct {
- Codec DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec `json:"codec"`
+func (o *DestinationGcsNoCompression) GetCodec() *DestinationGcsCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
}
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecType string
+type DestinationGcsCompressionCodecType string
const (
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression DestinationGcsOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs_Output Format_Avro: Apache Avro_Compression Codec_No Compression"
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate DestinationGcsOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs_Output Format_Avro: Apache Avro_Compression Codec_Deflate"
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 DestinationGcsOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs_Output Format_Avro: Apache Avro_Compression Codec_bzip2"
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz DestinationGcsOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs_Output Format_Avro: Apache Avro_Compression Codec_xz"
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard DestinationGcsOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs_Output Format_Avro: Apache Avro_Compression Codec_zstandard"
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy DestinationGcsOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs_Output Format_Avro: Apache Avro_Compression Codec_snappy"
+ DestinationGcsCompressionCodecTypeDestinationGcsNoCompression DestinationGcsCompressionCodecType = "destination-gcs_No Compression"
+ DestinationGcsCompressionCodecTypeDestinationGcsDeflate DestinationGcsCompressionCodecType = "destination-gcs_Deflate"
+ DestinationGcsCompressionCodecTypeDestinationGcsBzip2 DestinationGcsCompressionCodecType = "destination-gcs_bzip2"
+ DestinationGcsCompressionCodecTypeDestinationGcsXz DestinationGcsCompressionCodecType = "destination-gcs_xz"
+ DestinationGcsCompressionCodecTypeDestinationGcsZstandard DestinationGcsCompressionCodecType = "destination-gcs_zstandard"
+ DestinationGcsCompressionCodecTypeDestinationGcsSnappy DestinationGcsCompressionCodecType = "destination-gcs_snappy"
)
-type DestinationGcsOutputFormatAvroApacheAvroCompressionCodec struct {
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy *DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy
+type DestinationGcsCompressionCodec struct {
+ DestinationGcsNoCompression *DestinationGcsNoCompression
+ DestinationGcsDeflate *DestinationGcsDeflate
+ DestinationGcsBzip2 *DestinationGcsBzip2
+ DestinationGcsXz *DestinationGcsXz
+ DestinationGcsZstandard *DestinationGcsZstandard
+ DestinationGcsSnappy *DestinationGcsSnappy
- Type DestinationGcsOutputFormatAvroApacheAvroCompressionCodecType
+ Type DestinationGcsCompressionCodecType
}
-func CreateDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression(destinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression) DestinationGcsOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression
+func CreateDestinationGcsCompressionCodecDestinationGcsNoCompression(destinationGcsNoCompression DestinationGcsNoCompression) DestinationGcsCompressionCodec {
+ typ := DestinationGcsCompressionCodecTypeDestinationGcsNoCompression
- return DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression: &destinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression,
- Type: typ,
+ return DestinationGcsCompressionCodec{
+ DestinationGcsNoCompression: &destinationGcsNoCompression,
+ Type: typ,
}
}
-func CreateDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate(destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate) DestinationGcsOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate
+func CreateDestinationGcsCompressionCodecDestinationGcsDeflate(destinationGcsDeflate DestinationGcsDeflate) DestinationGcsCompressionCodec {
+ typ := DestinationGcsCompressionCodecTypeDestinationGcsDeflate
- return DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate: &destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate,
- Type: typ,
+ return DestinationGcsCompressionCodec{
+ DestinationGcsDeflate: &destinationGcsDeflate,
+ Type: typ,
}
}
-func CreateDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2(destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2) DestinationGcsOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2
+func CreateDestinationGcsCompressionCodecDestinationGcsBzip2(destinationGcsBzip2 DestinationGcsBzip2) DestinationGcsCompressionCodec {
+ typ := DestinationGcsCompressionCodecTypeDestinationGcsBzip2
- return DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2: &destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2,
- Type: typ,
+ return DestinationGcsCompressionCodec{
+ DestinationGcsBzip2: &destinationGcsBzip2,
+ Type: typ,
}
}
-func CreateDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz(destinationGcsOutputFormatAvroApacheAvroCompressionCodecXz DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz) DestinationGcsOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz
+func CreateDestinationGcsCompressionCodecDestinationGcsXz(destinationGcsXz DestinationGcsXz) DestinationGcsCompressionCodec {
+ typ := DestinationGcsCompressionCodecTypeDestinationGcsXz
- return DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz: &destinationGcsOutputFormatAvroApacheAvroCompressionCodecXz,
- Type: typ,
+ return DestinationGcsCompressionCodec{
+ DestinationGcsXz: &destinationGcsXz,
+ Type: typ,
}
}
-func CreateDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard(destinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard) DestinationGcsOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard
+func CreateDestinationGcsCompressionCodecDestinationGcsZstandard(destinationGcsZstandard DestinationGcsZstandard) DestinationGcsCompressionCodec {
+ typ := DestinationGcsCompressionCodecTypeDestinationGcsZstandard
- return DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard: &destinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard,
- Type: typ,
+ return DestinationGcsCompressionCodec{
+ DestinationGcsZstandard: &destinationGcsZstandard,
+ Type: typ,
}
}
-func CreateDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy(destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy) DestinationGcsOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy
+func CreateDestinationGcsCompressionCodecDestinationGcsSnappy(destinationGcsSnappy DestinationGcsSnappy) DestinationGcsCompressionCodec {
+ typ := DestinationGcsCompressionCodecTypeDestinationGcsSnappy
- return DestinationGcsOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy: &destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy,
- Type: typ,
+ return DestinationGcsCompressionCodec{
+ DestinationGcsSnappy: &destinationGcsSnappy,
+ Type: typ,
}
}
-func (u *DestinationGcsOutputFormatAvroApacheAvroCompressionCodec) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationGcsCompressionCodec) UnmarshalJSON(data []byte) error {
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression); err == nil {
- u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression = destinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression
- u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression
+ destinationGcsNoCompression := new(DestinationGcsNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationGcsNoCompression, "", true, true); err == nil {
+ u.DestinationGcsNoCompression = destinationGcsNoCompression
+ u.Type = DestinationGcsCompressionCodecTypeDestinationGcsNoCompression
return nil
}
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil {
- u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2
- u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2
+ destinationGcsBzip2 := new(DestinationGcsBzip2)
+ if err := utils.UnmarshalJSON(data, &destinationGcsBzip2, "", true, true); err == nil {
+ u.DestinationGcsBzip2 = destinationGcsBzip2
+ u.Type = DestinationGcsCompressionCodecTypeDestinationGcsBzip2
return nil
}
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil {
- u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy = destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy
- u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy
+ destinationGcsSnappy := new(DestinationGcsSnappy)
+ if err := utils.UnmarshalJSON(data, &destinationGcsSnappy, "", true, true); err == nil {
+ u.DestinationGcsSnappy = destinationGcsSnappy
+ u.Type = DestinationGcsCompressionCodecTypeDestinationGcsSnappy
return nil
}
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil {
- u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate = destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate
- u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate
+ destinationGcsDeflate := new(DestinationGcsDeflate)
+ if err := utils.UnmarshalJSON(data, &destinationGcsDeflate, "", true, true); err == nil {
+ u.DestinationGcsDeflate = destinationGcsDeflate
+ u.Type = DestinationGcsCompressionCodecTypeDestinationGcsDeflate
return nil
}
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecXz := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecXz); err == nil {
- u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz = destinationGcsOutputFormatAvroApacheAvroCompressionCodecXz
- u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz
+ destinationGcsXz := new(DestinationGcsXz)
+ if err := utils.UnmarshalJSON(data, &destinationGcsXz, "", true, true); err == nil {
+ u.DestinationGcsXz = destinationGcsXz
+ u.Type = DestinationGcsCompressionCodecTypeDestinationGcsXz
return nil
}
- destinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard); err == nil {
- u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard = destinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard
- u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard
+ destinationGcsZstandard := new(DestinationGcsZstandard)
+ if err := utils.UnmarshalJSON(data, &destinationGcsZstandard, "", true, true); err == nil {
+ u.DestinationGcsZstandard = destinationGcsZstandard
+ u.Type = DestinationGcsCompressionCodecTypeDestinationGcsZstandard
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationGcsOutputFormatAvroApacheAvroCompressionCodec) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression)
+func (u DestinationGcsCompressionCodec) MarshalJSON() ([]byte, error) {
+ if u.DestinationGcsNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationGcsNoCompression, "", true)
}
- if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2)
+ if u.DestinationGcsDeflate != nil {
+ return utils.MarshalJSON(u.DestinationGcsDeflate, "", true)
}
- if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy)
+ if u.DestinationGcsBzip2 != nil {
+ return utils.MarshalJSON(u.DestinationGcsBzip2, "", true)
}
- if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate)
+ if u.DestinationGcsXz != nil {
+ return utils.MarshalJSON(u.DestinationGcsXz, "", true)
}
- if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz != nil {
- return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz)
+ if u.DestinationGcsZstandard != nil {
+ return utils.MarshalJSON(u.DestinationGcsZstandard, "", true)
}
- if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard)
+ if u.DestinationGcsSnappy != nil {
+ return utils.MarshalJSON(u.DestinationGcsSnappy, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationGcsOutputFormatAvroApacheAvroFormatType string
+type DestinationGcsFormatType string
const (
- DestinationGcsOutputFormatAvroApacheAvroFormatTypeAvro DestinationGcsOutputFormatAvroApacheAvroFormatType = "Avro"
+ DestinationGcsFormatTypeAvro DestinationGcsFormatType = "Avro"
)
-func (e DestinationGcsOutputFormatAvroApacheAvroFormatType) ToPointer() *DestinationGcsOutputFormatAvroApacheAvroFormatType {
+func (e DestinationGcsFormatType) ToPointer() *DestinationGcsFormatType {
return &e
}
-func (e *DestinationGcsOutputFormatAvroApacheAvroFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Avro":
- *e = DestinationGcsOutputFormatAvroApacheAvroFormatType(v)
+ *e = DestinationGcsFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsOutputFormatAvroApacheAvroFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsFormatType: %v", v)
}
}
-// DestinationGcsOutputFormatAvroApacheAvro - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
-type DestinationGcsOutputFormatAvroApacheAvro struct {
+// DestinationGcsAvroApacheAvro - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
+type DestinationGcsAvroApacheAvro struct {
// The compression algorithm used to compress data. Default to no compression.
- CompressionCodec DestinationGcsOutputFormatAvroApacheAvroCompressionCodec `json:"compression_codec"`
- FormatType DestinationGcsOutputFormatAvroApacheAvroFormatType `json:"format_type"`
+ CompressionCodec DestinationGcsCompressionCodec `json:"compression_codec"`
+ FormatType *DestinationGcsFormatType `default:"Avro" json:"format_type"`
+}
+
+func (d DestinationGcsAvroApacheAvro) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsAvroApacheAvro) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsAvroApacheAvro) GetCompressionCodec() DestinationGcsCompressionCodec {
+ if o == nil {
+ return DestinationGcsCompressionCodec{}
+ }
+ return o.CompressionCodec
+}
+
+func (o *DestinationGcsAvroApacheAvro) GetFormatType() *DestinationGcsFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
type DestinationGcsOutputFormatType string
const (
- DestinationGcsOutputFormatTypeDestinationGcsOutputFormatAvroApacheAvro DestinationGcsOutputFormatType = "destination-gcs_Output Format_Avro: Apache Avro"
- DestinationGcsOutputFormatTypeDestinationGcsOutputFormatCSVCommaSeparatedValues DestinationGcsOutputFormatType = "destination-gcs_Output Format_CSV: Comma-Separated Values"
- DestinationGcsOutputFormatTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON DestinationGcsOutputFormatType = "destination-gcs_Output Format_JSON Lines: newline-delimited JSON"
- DestinationGcsOutputFormatTypeDestinationGcsOutputFormatParquetColumnarStorage DestinationGcsOutputFormatType = "destination-gcs_Output Format_Parquet: Columnar Storage"
+ DestinationGcsOutputFormatTypeDestinationGcsAvroApacheAvro DestinationGcsOutputFormatType = "destination-gcs_Avro: Apache Avro"
+ DestinationGcsOutputFormatTypeDestinationGcsCSVCommaSeparatedValues DestinationGcsOutputFormatType = "destination-gcs_CSV: Comma-Separated Values"
+ DestinationGcsOutputFormatTypeDestinationGcsJSONLinesNewlineDelimitedJSON DestinationGcsOutputFormatType = "destination-gcs_JSON Lines: newline-delimited JSON"
+ DestinationGcsOutputFormatTypeDestinationGcsParquetColumnarStorage DestinationGcsOutputFormatType = "destination-gcs_Parquet: Columnar Storage"
)
type DestinationGcsOutputFormat struct {
- DestinationGcsOutputFormatAvroApacheAvro *DestinationGcsOutputFormatAvroApacheAvro
- DestinationGcsOutputFormatCSVCommaSeparatedValues *DestinationGcsOutputFormatCSVCommaSeparatedValues
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON *DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON
- DestinationGcsOutputFormatParquetColumnarStorage *DestinationGcsOutputFormatParquetColumnarStorage
+ DestinationGcsAvroApacheAvro *DestinationGcsAvroApacheAvro
+ DestinationGcsCSVCommaSeparatedValues *DestinationGcsCSVCommaSeparatedValues
+ DestinationGcsJSONLinesNewlineDelimitedJSON *DestinationGcsJSONLinesNewlineDelimitedJSON
+ DestinationGcsParquetColumnarStorage *DestinationGcsParquetColumnarStorage
Type DestinationGcsOutputFormatType
}
-func CreateDestinationGcsOutputFormatDestinationGcsOutputFormatAvroApacheAvro(destinationGcsOutputFormatAvroApacheAvro DestinationGcsOutputFormatAvroApacheAvro) DestinationGcsOutputFormat {
- typ := DestinationGcsOutputFormatTypeDestinationGcsOutputFormatAvroApacheAvro
+func CreateDestinationGcsOutputFormatDestinationGcsAvroApacheAvro(destinationGcsAvroApacheAvro DestinationGcsAvroApacheAvro) DestinationGcsOutputFormat {
+ typ := DestinationGcsOutputFormatTypeDestinationGcsAvroApacheAvro
return DestinationGcsOutputFormat{
- DestinationGcsOutputFormatAvroApacheAvro: &destinationGcsOutputFormatAvroApacheAvro,
- Type: typ,
+ DestinationGcsAvroApacheAvro: &destinationGcsAvroApacheAvro,
+ Type: typ,
}
}
-func CreateDestinationGcsOutputFormatDestinationGcsOutputFormatCSVCommaSeparatedValues(destinationGcsOutputFormatCSVCommaSeparatedValues DestinationGcsOutputFormatCSVCommaSeparatedValues) DestinationGcsOutputFormat {
- typ := DestinationGcsOutputFormatTypeDestinationGcsOutputFormatCSVCommaSeparatedValues
+func CreateDestinationGcsOutputFormatDestinationGcsCSVCommaSeparatedValues(destinationGcsCSVCommaSeparatedValues DestinationGcsCSVCommaSeparatedValues) DestinationGcsOutputFormat {
+ typ := DestinationGcsOutputFormatTypeDestinationGcsCSVCommaSeparatedValues
return DestinationGcsOutputFormat{
- DestinationGcsOutputFormatCSVCommaSeparatedValues: &destinationGcsOutputFormatCSVCommaSeparatedValues,
- Type: typ,
+ DestinationGcsCSVCommaSeparatedValues: &destinationGcsCSVCommaSeparatedValues,
+ Type: typ,
}
}
-func CreateDestinationGcsOutputFormatDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON(destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON) DestinationGcsOutputFormat {
- typ := DestinationGcsOutputFormatTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON
+func CreateDestinationGcsOutputFormatDestinationGcsJSONLinesNewlineDelimitedJSON(destinationGcsJSONLinesNewlineDelimitedJSON DestinationGcsJSONLinesNewlineDelimitedJSON) DestinationGcsOutputFormat {
+ typ := DestinationGcsOutputFormatTypeDestinationGcsJSONLinesNewlineDelimitedJSON
return DestinationGcsOutputFormat{
- DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON: &destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationGcsJSONLinesNewlineDelimitedJSON: &destinationGcsJSONLinesNewlineDelimitedJSON,
Type: typ,
}
}
-func CreateDestinationGcsOutputFormatDestinationGcsOutputFormatParquetColumnarStorage(destinationGcsOutputFormatParquetColumnarStorage DestinationGcsOutputFormatParquetColumnarStorage) DestinationGcsOutputFormat {
- typ := DestinationGcsOutputFormatTypeDestinationGcsOutputFormatParquetColumnarStorage
+func CreateDestinationGcsOutputFormatDestinationGcsParquetColumnarStorage(destinationGcsParquetColumnarStorage DestinationGcsParquetColumnarStorage) DestinationGcsOutputFormat {
+ typ := DestinationGcsOutputFormatTypeDestinationGcsParquetColumnarStorage
return DestinationGcsOutputFormat{
- DestinationGcsOutputFormatParquetColumnarStorage: &destinationGcsOutputFormatParquetColumnarStorage,
- Type: typ,
+ DestinationGcsParquetColumnarStorage: &destinationGcsParquetColumnarStorage,
+ Type: typ,
}
}
func (u *DestinationGcsOutputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationGcsOutputFormatAvroApacheAvro := new(DestinationGcsOutputFormatAvroApacheAvro)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvro); err == nil {
- u.DestinationGcsOutputFormatAvroApacheAvro = destinationGcsOutputFormatAvroApacheAvro
- u.Type = DestinationGcsOutputFormatTypeDestinationGcsOutputFormatAvroApacheAvro
+
+ destinationGcsAvroApacheAvro := new(DestinationGcsAvroApacheAvro)
+ if err := utils.UnmarshalJSON(data, &destinationGcsAvroApacheAvro, "", true, true); err == nil {
+ u.DestinationGcsAvroApacheAvro = destinationGcsAvroApacheAvro
+ u.Type = DestinationGcsOutputFormatTypeDestinationGcsAvroApacheAvro
return nil
}
- destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON = destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON
- u.Type = DestinationGcsOutputFormatTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON
+ destinationGcsJSONLinesNewlineDelimitedJSON := new(DestinationGcsJSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &destinationGcsJSONLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.DestinationGcsJSONLinesNewlineDelimitedJSON = destinationGcsJSONLinesNewlineDelimitedJSON
+ u.Type = DestinationGcsOutputFormatTypeDestinationGcsJSONLinesNewlineDelimitedJSON
return nil
}
- destinationGcsOutputFormatCSVCommaSeparatedValues := new(DestinationGcsOutputFormatCSVCommaSeparatedValues)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatCSVCommaSeparatedValues); err == nil {
- u.DestinationGcsOutputFormatCSVCommaSeparatedValues = destinationGcsOutputFormatCSVCommaSeparatedValues
- u.Type = DestinationGcsOutputFormatTypeDestinationGcsOutputFormatCSVCommaSeparatedValues
+ destinationGcsCSVCommaSeparatedValues := new(DestinationGcsCSVCommaSeparatedValues)
+ if err := utils.UnmarshalJSON(data, &destinationGcsCSVCommaSeparatedValues, "", true, true); err == nil {
+ u.DestinationGcsCSVCommaSeparatedValues = destinationGcsCSVCommaSeparatedValues
+ u.Type = DestinationGcsOutputFormatTypeDestinationGcsCSVCommaSeparatedValues
return nil
}
- destinationGcsOutputFormatParquetColumnarStorage := new(DestinationGcsOutputFormatParquetColumnarStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsOutputFormatParquetColumnarStorage); err == nil {
- u.DestinationGcsOutputFormatParquetColumnarStorage = destinationGcsOutputFormatParquetColumnarStorage
- u.Type = DestinationGcsOutputFormatTypeDestinationGcsOutputFormatParquetColumnarStorage
+ destinationGcsParquetColumnarStorage := new(DestinationGcsParquetColumnarStorage)
+ if err := utils.UnmarshalJSON(data, &destinationGcsParquetColumnarStorage, "", true, true); err == nil {
+ u.DestinationGcsParquetColumnarStorage = destinationGcsParquetColumnarStorage
+ u.Type = DestinationGcsOutputFormatTypeDestinationGcsParquetColumnarStorage
return nil
}
@@ -1012,23 +1359,23 @@ func (u *DestinationGcsOutputFormat) UnmarshalJSON(data []byte) error {
}
func (u DestinationGcsOutputFormat) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsOutputFormatAvroApacheAvro != nil {
- return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvro)
+ if u.DestinationGcsAvroApacheAvro != nil {
+ return utils.MarshalJSON(u.DestinationGcsAvroApacheAvro, "", true)
}
- if u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON)
+ if u.DestinationGcsCSVCommaSeparatedValues != nil {
+ return utils.MarshalJSON(u.DestinationGcsCSVCommaSeparatedValues, "", true)
}
- if u.DestinationGcsOutputFormatCSVCommaSeparatedValues != nil {
- return json.Marshal(u.DestinationGcsOutputFormatCSVCommaSeparatedValues)
+ if u.DestinationGcsJSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.DestinationGcsJSONLinesNewlineDelimitedJSON, "", true)
}
- if u.DestinationGcsOutputFormatParquetColumnarStorage != nil {
- return json.Marshal(u.DestinationGcsOutputFormatParquetColumnarStorage)
+ if u.DestinationGcsParquetColumnarStorage != nil {
+ return utils.MarshalJSON(u.DestinationGcsParquetColumnarStorage, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationGCSGCSBucketRegion - Select a Region of the GCS Bucket. Read more here.
@@ -1161,7 +1508,7 @@ func (e *DestinationGCSGCSBucketRegion) UnmarshalJSON(data []byte) error {
type DestinationGcs struct {
// An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
Credential DestinationGcsAuthentication `json:"credential"`
- DestinationType DestinationGcsGcs `json:"destinationType"`
+ destinationType Gcs `const:"gcs" json:"destinationType"`
// Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
Format DestinationGcsOutputFormat `json:"format"`
// You can find the bucket name in the App Engine Admin console Application Settings page, under the label Google Cloud Storage Bucket. Read more here.
@@ -1169,5 +1516,55 @@ type DestinationGcs struct {
// GCS Bucket Path string Subdirectory under the above bucket to sync the data into.
GcsBucketPath string `json:"gcs_bucket_path"`
// Select a Region of the GCS Bucket. Read more here.
- GcsBucketRegion *DestinationGCSGCSBucketRegion `json:"gcs_bucket_region,omitempty"`
+ GcsBucketRegion *DestinationGCSGCSBucketRegion `default:"us" json:"gcs_bucket_region"`
+}
+
+func (d DestinationGcs) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcs) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcs) GetCredential() DestinationGcsAuthentication {
+ if o == nil {
+ return DestinationGcsAuthentication{}
+ }
+ return o.Credential
+}
+
+func (o *DestinationGcs) GetDestinationType() Gcs {
+ return GcsGcs
+}
+
+func (o *DestinationGcs) GetFormat() DestinationGcsOutputFormat {
+ if o == nil {
+ return DestinationGcsOutputFormat{}
+ }
+ return o.Format
+}
+
+func (o *DestinationGcs) GetGcsBucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.GcsBucketName
+}
+
+func (o *DestinationGcs) GetGcsBucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.GcsBucketPath
+}
+
+func (o *DestinationGcs) GetGcsBucketRegion() *DestinationGCSGCSBucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.GcsBucketRegion
}
diff --git a/internal/sdk/pkg/models/shared/destinationgcscreaterequest.go b/internal/sdk/pkg/models/shared/destinationgcscreaterequest.go
old mode 100755
new mode 100644
index 31db59f02..0d06418d5
--- a/internal/sdk/pkg/models/shared/destinationgcscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationgcscreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationGcsCreateRequest struct {
Configuration DestinationGcs `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationGcsCreateRequest) GetConfiguration() DestinationGcs {
+ if o == nil {
+ return DestinationGcs{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationGcsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationGcsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationGcsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationgcsputrequest.go b/internal/sdk/pkg/models/shared/destinationgcsputrequest.go
old mode 100755
new mode 100644
index 8840c7f4b..c91f96c4a
--- a/internal/sdk/pkg/models/shared/destinationgcsputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationgcsputrequest.go
@@ -7,3 +7,24 @@ type DestinationGcsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationGcsPutRequest) GetConfiguration() DestinationGcsUpdate {
+ if o == nil {
+ return DestinationGcsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationGcsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationGcsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationgcsupdate.go b/internal/sdk/pkg/models/shared/destinationgcsupdate.go
old mode 100755
new mode 100644
index e7b48050c..9109750fc
--- a/internal/sdk/pkg/models/shared/destinationgcsupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationgcsupdate.go
@@ -3,107 +3,136 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationGcsUpdateAuthenticationHMACKeyCredentialType string
+type CredentialType string
const (
- DestinationGcsUpdateAuthenticationHMACKeyCredentialTypeHmacKey DestinationGcsUpdateAuthenticationHMACKeyCredentialType = "HMAC_KEY"
+ CredentialTypeHmacKey CredentialType = "HMAC_KEY"
)
-func (e DestinationGcsUpdateAuthenticationHMACKeyCredentialType) ToPointer() *DestinationGcsUpdateAuthenticationHMACKeyCredentialType {
+func (e CredentialType) ToPointer() *CredentialType {
return &e
}
-func (e *DestinationGcsUpdateAuthenticationHMACKeyCredentialType) UnmarshalJSON(data []byte) error {
+func (e *CredentialType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "HMAC_KEY":
- *e = DestinationGcsUpdateAuthenticationHMACKeyCredentialType(v)
+ *e = CredentialType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateAuthenticationHMACKeyCredentialType: %v", v)
+ return fmt.Errorf("invalid value for CredentialType: %v", v)
}
}
-// DestinationGcsUpdateAuthenticationHMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
-type DestinationGcsUpdateAuthenticationHMACKey struct {
- CredentialType DestinationGcsUpdateAuthenticationHMACKeyCredentialType `json:"credential_type"`
+// HMACKey - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
+type HMACKey struct {
+ CredentialType *CredentialType `default:"HMAC_KEY" json:"credential_type"`
// When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long. Read more here.
HmacKeyAccessID string `json:"hmac_key_access_id"`
// The corresponding secret for the access ID. It is a 40-character base-64 encoded string. Read more here.
HmacKeySecret string `json:"hmac_key_secret"`
}
-type DestinationGcsUpdateAuthenticationType string
+func (h HMACKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(h, "", false)
+}
+
+func (h *HMACKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &h, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *HMACKey) GetCredentialType() *CredentialType {
+ if o == nil {
+ return nil
+ }
+ return o.CredentialType
+}
+
+func (o *HMACKey) GetHmacKeyAccessID() string {
+ if o == nil {
+ return ""
+ }
+ return o.HmacKeyAccessID
+}
+
+func (o *HMACKey) GetHmacKeySecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.HmacKeySecret
+}
+
+type AuthenticationUnionType string
const (
- DestinationGcsUpdateAuthenticationTypeDestinationGcsUpdateAuthenticationHMACKey DestinationGcsUpdateAuthenticationType = "destination-gcs-update_Authentication_HMAC Key"
+ AuthenticationUnionTypeHMACKey AuthenticationUnionType = "HMAC Key"
)
-type DestinationGcsUpdateAuthentication struct {
- DestinationGcsUpdateAuthenticationHMACKey *DestinationGcsUpdateAuthenticationHMACKey
+type Authentication struct {
+ HMACKey *HMACKey
- Type DestinationGcsUpdateAuthenticationType
+ Type AuthenticationUnionType
}
-func CreateDestinationGcsUpdateAuthenticationDestinationGcsUpdateAuthenticationHMACKey(destinationGcsUpdateAuthenticationHMACKey DestinationGcsUpdateAuthenticationHMACKey) DestinationGcsUpdateAuthentication {
- typ := DestinationGcsUpdateAuthenticationTypeDestinationGcsUpdateAuthenticationHMACKey
+func CreateAuthenticationHMACKey(hmacKey HMACKey) Authentication {
+ typ := AuthenticationUnionTypeHMACKey
- return DestinationGcsUpdateAuthentication{
- DestinationGcsUpdateAuthenticationHMACKey: &destinationGcsUpdateAuthenticationHMACKey,
- Type: typ,
+ return Authentication{
+ HMACKey: &hmacKey,
+ Type: typ,
}
}
-func (u *DestinationGcsUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *Authentication) UnmarshalJSON(data []byte) error {
- destinationGcsUpdateAuthenticationHMACKey := new(DestinationGcsUpdateAuthenticationHMACKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateAuthenticationHMACKey); err == nil {
- u.DestinationGcsUpdateAuthenticationHMACKey = destinationGcsUpdateAuthenticationHMACKey
- u.Type = DestinationGcsUpdateAuthenticationTypeDestinationGcsUpdateAuthenticationHMACKey
+ hmacKey := new(HMACKey)
+ if err := utils.UnmarshalJSON(data, &hmacKey, "", true, true); err == nil {
+ u.HMACKey = hmacKey
+ u.Type = AuthenticationUnionTypeHMACKey
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationGcsUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsUpdateAuthenticationHMACKey != nil {
- return json.Marshal(u.DestinationGcsUpdateAuthenticationHMACKey)
+func (u Authentication) MarshalJSON() ([]byte, error) {
+ if u.HMACKey != nil {
+ return utils.MarshalJSON(u.HMACKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec - The compression algorithm used to compress data pages.
-type DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec string
+// DestinationGcsUpdateCompressionCodec - The compression algorithm used to compress data pages.
+type DestinationGcsUpdateCompressionCodec string
const (
- DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodecUncompressed DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec = "UNCOMPRESSED"
- DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodecSnappy DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec = "SNAPPY"
- DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodecGzip DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec = "GZIP"
- DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodecLzo DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec = "LZO"
- DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodecBrotli DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec = "BROTLI"
- DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodecLz4 DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec = "LZ4"
- DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodecZstd DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec = "ZSTD"
+ DestinationGcsUpdateCompressionCodecUncompressed DestinationGcsUpdateCompressionCodec = "UNCOMPRESSED"
+ DestinationGcsUpdateCompressionCodecSnappy DestinationGcsUpdateCompressionCodec = "SNAPPY"
+ DestinationGcsUpdateCompressionCodecGzip DestinationGcsUpdateCompressionCodec = "GZIP"
+ DestinationGcsUpdateCompressionCodecLzo DestinationGcsUpdateCompressionCodec = "LZO"
+ DestinationGcsUpdateCompressionCodecBrotli DestinationGcsUpdateCompressionCodec = "BROTLI"
+ DestinationGcsUpdateCompressionCodecLz4 DestinationGcsUpdateCompressionCodec = "LZ4"
+ DestinationGcsUpdateCompressionCodecZstd DestinationGcsUpdateCompressionCodec = "ZSTD"
)
-func (e DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec) ToPointer() *DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec {
+func (e DestinationGcsUpdateCompressionCodec) ToPointer() *DestinationGcsUpdateCompressionCodec {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateCompressionCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -122,350 +151,497 @@ func (e *DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec)
case "LZ4":
fallthrough
case "ZSTD":
- *e = DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec(v)
+ *e = DestinationGcsUpdateCompressionCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateCompressionCodec: %v", v)
}
}
-type DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatType string
+type DestinationGcsUpdateSchemasFormatOutputFormatFormatType string
const (
- DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatTypeParquet DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatType = "Parquet"
+ DestinationGcsUpdateSchemasFormatOutputFormatFormatTypeParquet DestinationGcsUpdateSchemasFormatOutputFormatFormatType = "Parquet"
)
-func (e DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatType) ToPointer() *DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatType {
+func (e DestinationGcsUpdateSchemasFormatOutputFormatFormatType) ToPointer() *DestinationGcsUpdateSchemasFormatOutputFormatFormatType {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateSchemasFormatOutputFormatFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Parquet":
- *e = DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatType(v)
+ *e = DestinationGcsUpdateSchemasFormatOutputFormatFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateSchemasFormatOutputFormatFormatType: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatParquetColumnarStorage - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
-type DestinationGcsUpdateOutputFormatParquetColumnarStorage struct {
+// DestinationGcsUpdateParquetColumnarStorage - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
+type DestinationGcsUpdateParquetColumnarStorage struct {
// This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
- BlockSizeMb *int64 `json:"block_size_mb,omitempty"`
+ BlockSizeMb *int64 `default:"128" json:"block_size_mb"`
// The compression algorithm used to compress data pages.
- CompressionCodec *DestinationGcsUpdateOutputFormatParquetColumnarStorageCompressionCodec `json:"compression_codec,omitempty"`
+ CompressionCodec *DestinationGcsUpdateCompressionCodec `default:"UNCOMPRESSED" json:"compression_codec"`
// Default: true.
- DictionaryEncoding *bool `json:"dictionary_encoding,omitempty"`
+ DictionaryEncoding *bool `default:"true" json:"dictionary_encoding"`
// There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
- DictionaryPageSizeKb *int64 `json:"dictionary_page_size_kb,omitempty"`
- FormatType DestinationGcsUpdateOutputFormatParquetColumnarStorageFormatType `json:"format_type"`
+ DictionaryPageSizeKb *int64 `default:"1024" json:"dictionary_page_size_kb"`
+ FormatType *DestinationGcsUpdateSchemasFormatOutputFormatFormatType `default:"Parquet" json:"format_type"`
// Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
- MaxPaddingSizeMb *int64 `json:"max_padding_size_mb,omitempty"`
+ MaxPaddingSizeMb *int64 `default:"8" json:"max_padding_size_mb"`
// The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
- PageSizeKb *int64 `json:"page_size_kb,omitempty"`
+ PageSizeKb *int64 `default:"1024" json:"page_size_kb"`
+}
+
+func (d DestinationGcsUpdateParquetColumnarStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType string
+func (d *DestinationGcsUpdateParquetColumnarStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsUpdateParquetColumnarStorage) GetBlockSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BlockSizeMb
+}
+
+func (o *DestinationGcsUpdateParquetColumnarStorage) GetCompressionCodec() *DestinationGcsUpdateCompressionCodec {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionCodec
+}
+
+func (o *DestinationGcsUpdateParquetColumnarStorage) GetDictionaryEncoding() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DictionaryEncoding
+}
+
+func (o *DestinationGcsUpdateParquetColumnarStorage) GetDictionaryPageSizeKb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DictionaryPageSizeKb
+}
+
+func (o *DestinationGcsUpdateParquetColumnarStorage) GetFormatType() *DestinationGcsUpdateSchemasFormatOutputFormatFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+func (o *DestinationGcsUpdateParquetColumnarStorage) GetMaxPaddingSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxPaddingSizeMb
+}
+
+func (o *DestinationGcsUpdateParquetColumnarStorage) GetPageSizeKb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSizeKb
+}
+
+type DestinationGcsUpdateSchemasFormatCompressionType string
const (
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeGzip DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType = "GZIP"
+ DestinationGcsUpdateSchemasFormatCompressionTypeGzip DestinationGcsUpdateSchemasFormatCompressionType = "GZIP"
)
-func (e DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) ToPointer() *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType {
+func (e DestinationGcsUpdateSchemasFormatCompressionType) ToPointer() *DestinationGcsUpdateSchemasFormatCompressionType {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateSchemasFormatCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(v)
+ *e = DestinationGcsUpdateSchemasFormatCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateSchemasFormatCompressionType: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP struct {
- CompressionType *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+// DestinationGcsUpdateGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationGcsUpdateGZIP struct {
+ CompressionType *DestinationGcsUpdateSchemasFormatCompressionType `default:"GZIP" json:"compression_type"`
+}
+
+func (d DestinationGcsUpdateGZIP) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsUpdateGZIP) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsUpdateGZIP) GetCompressionType() *DestinationGcsUpdateSchemasFormatCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType string
+type DestinationGcsUpdateSchemasCompressionType string
const (
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeNoCompression DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType = "No Compression"
+ DestinationGcsUpdateSchemasCompressionTypeNoCompression DestinationGcsUpdateSchemasCompressionType = "No Compression"
)
-func (e DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) ToPointer() *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType {
+func (e DestinationGcsUpdateSchemasCompressionType) ToPointer() *DestinationGcsUpdateSchemasCompressionType {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateSchemasCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(v)
+ *e = DestinationGcsUpdateSchemasCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateSchemasCompressionType: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression struct {
- CompressionType *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+// DestinationGcsUpdateSchemasNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationGcsUpdateSchemasNoCompression struct {
+ CompressionType *DestinationGcsUpdateSchemasCompressionType `default:"No Compression" json:"compression_type"`
}
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType string
+func (d DestinationGcsUpdateSchemasNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsUpdateSchemasNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsUpdateSchemasNoCompression) GetCompressionType() *DestinationGcsUpdateSchemasCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
+}
+
+type DestinationGcsUpdateCompressionUnionType string
const (
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-gcs-update_Output Format_JSON Lines: newline-delimited JSON_Compression_No Compression"
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-gcs-update_Output Format_JSON Lines: newline-delimited JSON_Compression_GZIP"
+ DestinationGcsUpdateCompressionUnionTypeDestinationGcsUpdateSchemasNoCompression DestinationGcsUpdateCompressionUnionType = "destination-gcs-update_Schemas_No Compression"
+ DestinationGcsUpdateCompressionUnionTypeDestinationGcsUpdateGZIP DestinationGcsUpdateCompressionUnionType = "destination-gcs-update_GZIP"
)
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+type DestinationGcsUpdateCompression struct {
+ DestinationGcsUpdateSchemasNoCompression *DestinationGcsUpdateSchemasNoCompression
+ DestinationGcsUpdateGZIP *DestinationGcsUpdateGZIP
- Type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType
+ Type DestinationGcsUpdateCompressionUnionType
}
-func CreateDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression(destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression) DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+func CreateDestinationGcsUpdateCompressionDestinationGcsUpdateSchemasNoCompression(destinationGcsUpdateSchemasNoCompression DestinationGcsUpdateSchemasNoCompression) DestinationGcsUpdateCompression {
+ typ := DestinationGcsUpdateCompressionUnionTypeDestinationGcsUpdateSchemasNoCompression
- return DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: &destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
- Type: typ,
+ return DestinationGcsUpdateCompression{
+ DestinationGcsUpdateSchemasNoCompression: &destinationGcsUpdateSchemasNoCompression,
+ Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP(destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP) DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+func CreateDestinationGcsUpdateCompressionDestinationGcsUpdateGZIP(destinationGcsUpdateGZIP DestinationGcsUpdateGZIP) DestinationGcsUpdateCompression {
+ typ := DestinationGcsUpdateCompressionUnionTypeDestinationGcsUpdateGZIP
- return DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: &destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
- Type: typ,
+ return DestinationGcsUpdateCompression{
+ DestinationGcsUpdateGZIP: &destinationGcsUpdateGZIP,
+ Type: typ,
}
}
-func (u *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationGcsUpdateCompression) UnmarshalJSON(data []byte) error {
- destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression := new(DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression); err == nil {
- u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- u.Type = DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+ destinationGcsUpdateSchemasNoCompression := new(DestinationGcsUpdateSchemasNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationGcsUpdateSchemasNoCompression, "", true, true); err == nil {
+ u.DestinationGcsUpdateSchemasNoCompression = destinationGcsUpdateSchemasNoCompression
+ u.Type = DestinationGcsUpdateCompressionUnionTypeDestinationGcsUpdateSchemasNoCompression
return nil
}
- destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP := new(DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP); err == nil {
- u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- u.Type = DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+ destinationGcsUpdateGZIP := new(DestinationGcsUpdateGZIP)
+ if err := utils.UnmarshalJSON(data, &destinationGcsUpdateGZIP, "", true, true); err == nil {
+ u.DestinationGcsUpdateGZIP = destinationGcsUpdateGZIP
+ u.Type = DestinationGcsUpdateCompressionUnionTypeDestinationGcsUpdateGZIP
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
+func (u DestinationGcsUpdateCompression) MarshalJSON() ([]byte, error) {
+ if u.DestinationGcsUpdateSchemasNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationGcsUpdateSchemasNoCompression, "", true)
}
- if u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
+ if u.DestinationGcsUpdateGZIP != nil {
+ return utils.MarshalJSON(u.DestinationGcsUpdateGZIP, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type DestinationGcsUpdateSchemasFormatFormatType string
const (
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ DestinationGcsUpdateSchemasFormatFormatTypeJsonl DestinationGcsUpdateSchemasFormatFormatType = "JSONL"
)
-func (e DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e DestinationGcsUpdateSchemasFormatFormatType) ToPointer() *DestinationGcsUpdateSchemasFormatFormatType {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateSchemasFormatFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ *e = DestinationGcsUpdateSchemasFormatFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateSchemasFormatFormatType: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
-type DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON struct {
+// DestinationGcsUpdateJSONLinesNewlineDelimitedJSON - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
+type DestinationGcsUpdateJSONLinesNewlineDelimitedJSON struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
- Compression *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression `json:"compression,omitempty"`
- FormatType DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+ Compression *DestinationGcsUpdateCompression `json:"compression,omitempty"`
+ FormatType *DestinationGcsUpdateSchemasFormatFormatType `default:"JSONL" json:"format_type"`
}
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType string
+func (d DestinationGcsUpdateJSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsUpdateJSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsUpdateJSONLinesNewlineDelimitedJSON) GetCompression() *DestinationGcsUpdateCompression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
+}
+
+func (o *DestinationGcsUpdateJSONLinesNewlineDelimitedJSON) GetFormatType() *DestinationGcsUpdateSchemasFormatFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+type DestinationGcsUpdateCompressionType string
const (
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeGzip DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType = "GZIP"
+ DestinationGcsUpdateCompressionTypeGzip DestinationGcsUpdateCompressionType = "GZIP"
)
-func (e DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType) ToPointer() *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType {
+func (e DestinationGcsUpdateCompressionType) ToPointer() *DestinationGcsUpdateCompressionType {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(v)
+ *e = DestinationGcsUpdateCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateCompressionType: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP struct {
- CompressionType *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+// Gzip - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
+type Gzip struct {
+ CompressionType *DestinationGcsUpdateCompressionType `default:"GZIP" json:"compression_type"`
+}
+
+func (g Gzip) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(g, "", false)
}
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType string
+func (g *Gzip) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &g, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Gzip) GetCompressionType() *DestinationGcsUpdateCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
+}
+
+type CompressionType string
const (
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeNoCompression DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType = "No Compression"
+ CompressionTypeNoCompression CompressionType = "No Compression"
)
-func (e DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType) ToPointer() *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType {
+func (e CompressionType) ToPointer() *CompressionType {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *CompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(v)
+ *e = CompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for CompressionType: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression struct {
- CompressionType *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+// DestinationGcsUpdateNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
+type DestinationGcsUpdateNoCompression struct {
+ CompressionType *CompressionType `default:"No Compression" json:"compression_type"`
+}
+
+func (d DestinationGcsUpdateNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsUpdateNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsUpdateNoCompression) GetCompressionType() *CompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionType string
+type CompressionUnionType string
const (
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionType = "destination-gcs-update_Output Format_CSV: Comma-Separated Values_Compression_No Compression"
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionType = "destination-gcs-update_Output Format_CSV: Comma-Separated Values_Compression_GZIP"
+ CompressionUnionTypeDestinationGcsUpdateNoCompression CompressionUnionType = "destination-gcs-update_No Compression"
+ CompressionUnionTypeGzip CompressionUnionType = "GZIP"
)
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression struct {
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
+type Compression struct {
+ DestinationGcsUpdateNoCompression *DestinationGcsUpdateNoCompression
+ Gzip *Gzip
- Type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionType
+ Type CompressionUnionType
}
-func CreateDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression(destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression) DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression {
- typ := DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
+func CreateCompressionDestinationGcsUpdateNoCompression(destinationGcsUpdateNoCompression DestinationGcsUpdateNoCompression) Compression {
+ typ := CompressionUnionTypeDestinationGcsUpdateNoCompression
- return DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression: &destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression,
- Type: typ,
+ return Compression{
+ DestinationGcsUpdateNoCompression: &destinationGcsUpdateNoCompression,
+ Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP(destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP) DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression {
- typ := DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
+func CreateCompressionGzip(gzip Gzip) Compression {
+ typ := CompressionUnionTypeGzip
- return DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP: &destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP,
+ return Compression{
+ Gzip: &gzip,
Type: typ,
}
}
-func (u *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *Compression) UnmarshalJSON(data []byte) error {
- destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression := new(DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression); err == nil {
- u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression = destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- u.Type = DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
+ destinationGcsUpdateNoCompression := new(DestinationGcsUpdateNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationGcsUpdateNoCompression, "", true, true); err == nil {
+ u.DestinationGcsUpdateNoCompression = destinationGcsUpdateNoCompression
+ u.Type = CompressionUnionTypeDestinationGcsUpdateNoCompression
return nil
}
- destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP := new(DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP); err == nil {
- u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP = destinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
- u.Type = DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
+ gzip := new(Gzip)
+ if err := utils.UnmarshalJSON(data, &gzip, "", true, true); err == nil {
+ u.Gzip = gzip
+ u.Type = CompressionUnionTypeGzip
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression)
+func (u Compression) MarshalJSON() ([]byte, error) {
+ if u.DestinationGcsUpdateNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationGcsUpdateNoCompression, "", true)
}
- if u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP)
+ if u.Gzip != nil {
+ return utils.MarshalJSON(u.Gzip, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization - Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization string
+// Normalization - Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
+type Normalization string
const (
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalizationNoFlattening DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization = "No flattening"
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalizationRootLevelFlattening DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization = "Root level flattening"
+ NormalizationNoFlattening Normalization = "No flattening"
+ NormalizationRootLevelFlattening Normalization = "Root level flattening"
)
-func (e DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization) ToPointer() *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization {
+func (e Normalization) ToPointer() *Normalization {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization) UnmarshalJSON(data []byte) error {
+func (e *Normalization) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -474,513 +650,684 @@ func (e *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization) U
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization(v)
+ *e = Normalization(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization: %v", v)
+ return fmt.Errorf("invalid value for Normalization: %v", v)
}
}
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatType string
+type DestinationGcsUpdateSchemasFormatType string
const (
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatTypeCsv DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatType = "CSV"
+ DestinationGcsUpdateSchemasFormatTypeCsv DestinationGcsUpdateSchemasFormatType = "CSV"
)
-func (e DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatType) ToPointer() *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatType {
+func (e DestinationGcsUpdateSchemasFormatType) ToPointer() *DestinationGcsUpdateSchemasFormatType {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateSchemasFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CSV":
- *e = DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatType(v)
+ *e = DestinationGcsUpdateSchemasFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateSchemasFormatType: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
-type DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues struct {
+// DestinationGcsUpdateCSVCommaSeparatedValues - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
+type DestinationGcsUpdateCSVCommaSeparatedValues struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
- Compression *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesCompression `json:"compression,omitempty"`
+ Compression *Compression `json:"compression,omitempty"`
// Whether the input JSON data should be normalized (flattened) in the output CSV. Please refer to docs for details.
- Flattening *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesNormalization `json:"flattening,omitempty"`
- FormatType DestinationGcsUpdateOutputFormatCSVCommaSeparatedValuesFormatType `json:"format_type"`
+ Flattening *Normalization `default:"No flattening" json:"flattening"`
+ FormatType *DestinationGcsUpdateSchemasFormatType `default:"CSV" json:"format_type"`
+}
+
+func (d DestinationGcsUpdateCSVCommaSeparatedValues) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsUpdateCSVCommaSeparatedValues) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsUpdateCSVCommaSeparatedValues) GetCompression() *Compression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
+}
+
+func (o *DestinationGcsUpdateCSVCommaSeparatedValues) GetFlattening() *Normalization {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
}
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec string
+func (o *DestinationGcsUpdateCSVCommaSeparatedValues) GetFormatType() *DestinationGcsUpdateSchemasFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+type DestinationGcsUpdateSchemasFormatOutputFormat1Codec string
const (
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodecSnappy DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec = "snappy"
+ DestinationGcsUpdateSchemasFormatOutputFormat1CodecSnappy DestinationGcsUpdateSchemasFormatOutputFormat1Codec = "snappy"
)
-func (e DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec) ToPointer() *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec {
+func (e DestinationGcsUpdateSchemasFormatOutputFormat1Codec) ToPointer() *DestinationGcsUpdateSchemasFormatOutputFormat1Codec {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateSchemasFormatOutputFormat1Codec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "snappy":
- *e = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec(v)
+ *e = DestinationGcsUpdateSchemasFormatOutputFormat1Codec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateSchemasFormatOutputFormat1Codec: %v", v)
+ }
+}
+
+// Snappy - The compression algorithm used to compress data. Default to no compression.
+type Snappy struct {
+ Codec *DestinationGcsUpdateSchemasFormatOutputFormat1Codec `default:"snappy" json:"codec"`
+}
+
+func (s Snappy) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *Snappy) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy struct {
- Codec DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec `json:"codec"`
+func (o *Snappy) GetCodec() *DestinationGcsUpdateSchemasFormatOutputFormat1Codec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
}
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec string
+type DestinationGcsUpdateSchemasFormatOutputFormatCodec string
const (
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodecZstandard DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec = "zstandard"
+ DestinationGcsUpdateSchemasFormatOutputFormatCodecZstandard DestinationGcsUpdateSchemasFormatOutputFormatCodec = "zstandard"
)
-func (e DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec) ToPointer() *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec {
+func (e DestinationGcsUpdateSchemasFormatOutputFormatCodec) ToPointer() *DestinationGcsUpdateSchemasFormatOutputFormatCodec {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateSchemasFormatOutputFormatCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zstandard":
- *e = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec(v)
+ *e = DestinationGcsUpdateSchemasFormatOutputFormatCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateSchemasFormatOutputFormatCodec: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard struct {
- Codec DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec `json:"codec"`
+// Zstandard - The compression algorithm used to compress data. Default to no compression.
+type Zstandard struct {
+ Codec *DestinationGcsUpdateSchemasFormatOutputFormatCodec `default:"zstandard" json:"codec"`
// Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
- CompressionLevel *int64 `json:"compression_level,omitempty"`
+ CompressionLevel *int64 `default:"3" json:"compression_level"`
// If true, include a checksum with each data block.
- IncludeChecksum *bool `json:"include_checksum,omitempty"`
+ IncludeChecksum *bool `default:"false" json:"include_checksum"`
+}
+
+func (z Zstandard) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(z, "", false)
}
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec string
+func (z *Zstandard) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &z, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Zstandard) GetCodec() *DestinationGcsUpdateSchemasFormatOutputFormatCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *Zstandard) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
+}
+
+func (o *Zstandard) GetIncludeChecksum() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeChecksum
+}
+
+type DestinationGcsUpdateSchemasFormatCodec string
const (
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodecXz DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec = "xz"
+ DestinationGcsUpdateSchemasFormatCodecXz DestinationGcsUpdateSchemasFormatCodec = "xz"
)
-func (e DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec) ToPointer() *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec {
+func (e DestinationGcsUpdateSchemasFormatCodec) ToPointer() *DestinationGcsUpdateSchemasFormatCodec {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateSchemasFormatCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "xz":
- *e = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec(v)
+ *e = DestinationGcsUpdateSchemasFormatCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateSchemasFormatCodec: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz struct {
- Codec DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec `json:"codec"`
+// Xz - The compression algorithm used to compress data. Default to no compression.
+type Xz struct {
+ Codec *DestinationGcsUpdateSchemasFormatCodec `default:"xz" json:"codec"`
// The presets 0-3 are fast presets with medium compression. The presets 4-6 are fairly slow presets with high compression. The default preset is 6. The presets 7-9 are like the preset 6 but use bigger dictionaries and have higher compressor and decompressor memory requirements. Unless the uncompressed size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is waste of memory to use the presets 7, 8, or 9, respectively. Read more here for details.
- CompressionLevel *int64 `json:"compression_level,omitempty"`
+ CompressionLevel *int64 `default:"6" json:"compression_level"`
+}
+
+func (x Xz) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(x, "", false)
+}
+
+func (x *Xz) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &x, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Xz) GetCodec() *DestinationGcsUpdateSchemasFormatCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *Xz) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
}
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec string
+type DestinationGcsUpdateSchemasCodec string
const (
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2CodecBzip2 DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec = "bzip2"
+ DestinationGcsUpdateSchemasCodecBzip2 DestinationGcsUpdateSchemasCodec = "bzip2"
)
-func (e DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec) ToPointer() *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec {
+func (e DestinationGcsUpdateSchemasCodec) ToPointer() *DestinationGcsUpdateSchemasCodec {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateSchemasCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "bzip2":
- *e = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec(v)
+ *e = DestinationGcsUpdateSchemasCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateSchemasCodec: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 struct {
- Codec DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec `json:"codec"`
+// Bzip2 - The compression algorithm used to compress data. Default to no compression.
+type Bzip2 struct {
+ Codec *DestinationGcsUpdateSchemasCodec `default:"bzip2" json:"codec"`
}
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec string
+func (b Bzip2) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(b, "", false)
+}
+
+func (b *Bzip2) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &b, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Bzip2) GetCodec() *DestinationGcsUpdateSchemasCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+type DestinationGcsUpdateCodec string
const (
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodecDeflate DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec = "Deflate"
+ DestinationGcsUpdateCodecDeflate DestinationGcsUpdateCodec = "Deflate"
)
-func (e DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec) ToPointer() *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec {
+func (e DestinationGcsUpdateCodec) ToPointer() *DestinationGcsUpdateCodec {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Deflate":
- *e = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec(v)
+ *e = DestinationGcsUpdateCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateCodec: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate struct {
- Codec DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec `json:"codec"`
+// Deflate - The compression algorithm used to compress data. Default to no compression.
+type Deflate struct {
+ Codec *DestinationGcsUpdateCodec `default:"Deflate" json:"codec"`
// 0: no compression & fastest, 9: best compression & slowest.
- CompressionLevel *int64 `json:"compression_level,omitempty"`
+ CompressionLevel *int64 `default:"0" json:"compression_level"`
+}
+
+func (d Deflate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *Deflate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Deflate) GetCodec() *DestinationGcsUpdateCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *Deflate) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
}
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec string
+type Codec string
const (
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodecNoCompression DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec = "no compression"
+ CodecNoCompression Codec = "no compression"
)
-func (e DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec) ToPointer() *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec {
+func (e Codec) ToPointer() *Codec {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec) UnmarshalJSON(data []byte) error {
+func (e *Codec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "no compression":
- *e = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec(v)
+ *e = Codec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec: %v", v)
+ return fmt.Errorf("invalid value for Codec: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression - The compression algorithm used to compress data. Default to no compression.
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression struct {
- Codec DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec `json:"codec"`
+// NoCompression - The compression algorithm used to compress data. Default to no compression.
+type NoCompression struct {
+ Codec *Codec `default:"no compression" json:"codec"`
+}
+
+func (n NoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
}
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecType string
+func (n *NoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *NoCompression) GetCodec() *Codec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+type CompressionCodecType string
const (
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs-update_Output Format_Avro: Apache Avro_Compression Codec_No Compression"
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs-update_Output Format_Avro: Apache Avro_Compression Codec_Deflate"
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs-update_Output Format_Avro: Apache Avro_Compression Codec_bzip2"
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs-update_Output Format_Avro: Apache Avro_Compression Codec_xz"
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs-update_Output Format_Avro: Apache Avro_Compression Codec_zstandard"
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-gcs-update_Output Format_Avro: Apache Avro_Compression Codec_snappy"
+ CompressionCodecTypeNoCompression CompressionCodecType = "No Compression"
+ CompressionCodecTypeDeflate CompressionCodecType = "Deflate"
+ CompressionCodecTypeBzip2 CompressionCodecType = "bzip2"
+ CompressionCodecTypeXz CompressionCodecType = "xz"
+ CompressionCodecTypeZstandard CompressionCodecType = "zstandard"
+ CompressionCodecTypeSnappy CompressionCodecType = "snappy"
)
-type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec struct {
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
+type CompressionCodec struct {
+ NoCompression *NoCompression
+ Deflate *Deflate
+ Bzip2 *Bzip2
+ Xz *Xz
+ Zstandard *Zstandard
+ Snappy *Snappy
- Type DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecType
+ Type CompressionCodecType
}
-func CreateDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression(destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression) DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
+func CreateCompressionCodecNoCompression(noCompression NoCompression) CompressionCodec {
+ typ := CompressionCodecTypeNoCompression
- return DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression: &destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression,
- Type: typ,
+ return CompressionCodec{
+ NoCompression: &noCompression,
+ Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate(destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
+func CreateCompressionCodecDeflate(deflate Deflate) CompressionCodec {
+ typ := CompressionCodecTypeDeflate
- return DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate: &destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate,
- Type: typ,
+ return CompressionCodec{
+ Deflate: &deflate,
+ Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2(destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2) DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
+func CreateCompressionCodecBzip2(bzip2 Bzip2) CompressionCodec {
+ typ := CompressionCodecTypeBzip2
- return DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2: &destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2,
- Type: typ,
+ return CompressionCodec{
+ Bzip2: &bzip2,
+ Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz(destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz) DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz
+func CreateCompressionCodecXz(xz Xz) CompressionCodec {
+ typ := CompressionCodecTypeXz
- return DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz: &destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz,
+ return CompressionCodec{
+ Xz: &xz,
Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard(destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard) DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
+func CreateCompressionCodecZstandard(zstandard Zstandard) CompressionCodec {
+ typ := CompressionCodecTypeZstandard
- return DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard: &destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard,
- Type: typ,
+ return CompressionCodec{
+ Zstandard: &zstandard,
+ Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy(destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
+func CreateCompressionCodecSnappy(snappy Snappy) CompressionCodec {
+ typ := CompressionCodecTypeSnappy
- return DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy: &destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy,
- Type: typ,
+ return CompressionCodec{
+ Snappy: &snappy,
+ Type: typ,
}
}
-func (u *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *CompressionCodec) UnmarshalJSON(data []byte) error {
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression); err == nil {
- u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
- u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
+ noCompression := new(NoCompression)
+ if err := utils.UnmarshalJSON(data, &noCompression, "", true, true); err == nil {
+ u.NoCompression = noCompression
+ u.Type = CompressionCodecTypeNoCompression
return nil
}
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil {
- u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
- u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
+ bzip2 := new(Bzip2)
+ if err := utils.UnmarshalJSON(data, &bzip2, "", true, true); err == nil {
+ u.Bzip2 = bzip2
+ u.Type = CompressionCodecTypeBzip2
return nil
}
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil {
- u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
- u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
+ snappy := new(Snappy)
+ if err := utils.UnmarshalJSON(data, &snappy, "", true, true); err == nil {
+ u.Snappy = snappy
+ u.Type = CompressionCodecTypeSnappy
return nil
}
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil {
- u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
- u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
+ deflate := new(Deflate)
+ if err := utils.UnmarshalJSON(data, &deflate, "", true, true); err == nil {
+ u.Deflate = deflate
+ u.Type = CompressionCodecTypeDeflate
return nil
}
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz); err == nil {
- u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz
- u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz
+ xz := new(Xz)
+ if err := utils.UnmarshalJSON(data, &xz, "", true, true); err == nil {
+ u.Xz = xz
+ u.Type = CompressionCodecTypeXz
return nil
}
- destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard); err == nil {
- u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
- u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
+ zstandard := new(Zstandard)
+ if err := utils.UnmarshalJSON(data, &zstandard, "", true, true); err == nil {
+ u.Zstandard = zstandard
+ u.Type = CompressionCodecTypeZstandard
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression)
+func (u CompressionCodec) MarshalJSON() ([]byte, error) {
+ if u.NoCompression != nil {
+ return utils.MarshalJSON(u.NoCompression, "", true)
}
- if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2)
+ if u.Deflate != nil {
+ return utils.MarshalJSON(u.Deflate, "", true)
}
- if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy)
+ if u.Bzip2 != nil {
+ return utils.MarshalJSON(u.Bzip2, "", true)
}
- if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate)
+ if u.Xz != nil {
+ return utils.MarshalJSON(u.Xz, "", true)
}
- if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz)
+ if u.Zstandard != nil {
+ return utils.MarshalJSON(u.Zstandard, "", true)
}
- if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard)
+ if u.Snappy != nil {
+ return utils.MarshalJSON(u.Snappy, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationGcsUpdateOutputFormatAvroApacheAvroFormatType string
+type DestinationGcsUpdateFormatType string
const (
- DestinationGcsUpdateOutputFormatAvroApacheAvroFormatTypeAvro DestinationGcsUpdateOutputFormatAvroApacheAvroFormatType = "Avro"
+ DestinationGcsUpdateFormatTypeAvro DestinationGcsUpdateFormatType = "Avro"
)
-func (e DestinationGcsUpdateOutputFormatAvroApacheAvroFormatType) ToPointer() *DestinationGcsUpdateOutputFormatAvroApacheAvroFormatType {
+func (e DestinationGcsUpdateFormatType) ToPointer() *DestinationGcsUpdateFormatType {
return &e
}
-func (e *DestinationGcsUpdateOutputFormatAvroApacheAvroFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationGcsUpdateFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Avro":
- *e = DestinationGcsUpdateOutputFormatAvroApacheAvroFormatType(v)
+ *e = DestinationGcsUpdateFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGcsUpdateOutputFormatAvroApacheAvroFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationGcsUpdateFormatType: %v", v)
}
}
-// DestinationGcsUpdateOutputFormatAvroApacheAvro - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
-type DestinationGcsUpdateOutputFormatAvroApacheAvro struct {
+// AvroApacheAvro - Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
+type AvroApacheAvro struct {
// The compression algorithm used to compress data. Default to no compression.
- CompressionCodec DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec `json:"compression_codec"`
- FormatType DestinationGcsUpdateOutputFormatAvroApacheAvroFormatType `json:"format_type"`
+ CompressionCodec CompressionCodec `json:"compression_codec"`
+ FormatType *DestinationGcsUpdateFormatType `default:"Avro" json:"format_type"`
+}
+
+func (a AvroApacheAvro) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AvroApacheAvro) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AvroApacheAvro) GetCompressionCodec() CompressionCodec {
+ if o == nil {
+ return CompressionCodec{}
+ }
+ return o.CompressionCodec
+}
+
+func (o *AvroApacheAvro) GetFormatType() *DestinationGcsUpdateFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
type DestinationGcsUpdateOutputFormatType string
const (
- DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatAvroApacheAvro DestinationGcsUpdateOutputFormatType = "destination-gcs-update_Output Format_Avro: Apache Avro"
- DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValues DestinationGcsUpdateOutputFormatType = "destination-gcs-update_Output Format_CSV: Comma-Separated Values"
- DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON DestinationGcsUpdateOutputFormatType = "destination-gcs-update_Output Format_JSON Lines: newline-delimited JSON"
- DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatParquetColumnarStorage DestinationGcsUpdateOutputFormatType = "destination-gcs-update_Output Format_Parquet: Columnar Storage"
+ DestinationGcsUpdateOutputFormatTypeAvroApacheAvro DestinationGcsUpdateOutputFormatType = "Avro: Apache Avro"
+ DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateCSVCommaSeparatedValues DestinationGcsUpdateOutputFormatType = "destination-gcs-update_CSV: Comma-Separated Values"
+ DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateJSONLinesNewlineDelimitedJSON DestinationGcsUpdateOutputFormatType = "destination-gcs-update_JSON Lines: newline-delimited JSON"
+ DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateParquetColumnarStorage DestinationGcsUpdateOutputFormatType = "destination-gcs-update_Parquet: Columnar Storage"
)
type DestinationGcsUpdateOutputFormat struct {
- DestinationGcsUpdateOutputFormatAvroApacheAvro *DestinationGcsUpdateOutputFormatAvroApacheAvro
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues *DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON *DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON
- DestinationGcsUpdateOutputFormatParquetColumnarStorage *DestinationGcsUpdateOutputFormatParquetColumnarStorage
+ AvroApacheAvro *AvroApacheAvro
+ DestinationGcsUpdateCSVCommaSeparatedValues *DestinationGcsUpdateCSVCommaSeparatedValues
+ DestinationGcsUpdateJSONLinesNewlineDelimitedJSON *DestinationGcsUpdateJSONLinesNewlineDelimitedJSON
+ DestinationGcsUpdateParquetColumnarStorage *DestinationGcsUpdateParquetColumnarStorage
Type DestinationGcsUpdateOutputFormatType
}
-func CreateDestinationGcsUpdateOutputFormatDestinationGcsUpdateOutputFormatAvroApacheAvro(destinationGcsUpdateOutputFormatAvroApacheAvro DestinationGcsUpdateOutputFormatAvroApacheAvro) DestinationGcsUpdateOutputFormat {
- typ := DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatAvroApacheAvro
+func CreateDestinationGcsUpdateOutputFormatAvroApacheAvro(avroApacheAvro AvroApacheAvro) DestinationGcsUpdateOutputFormat {
+ typ := DestinationGcsUpdateOutputFormatTypeAvroApacheAvro
return DestinationGcsUpdateOutputFormat{
- DestinationGcsUpdateOutputFormatAvroApacheAvro: &destinationGcsUpdateOutputFormatAvroApacheAvro,
- Type: typ,
+ AvroApacheAvro: &avroApacheAvro,
+ Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatDestinationGcsUpdateOutputFormatCSVCommaSeparatedValues(destinationGcsUpdateOutputFormatCSVCommaSeparatedValues DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues) DestinationGcsUpdateOutputFormat {
- typ := DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValues
+func CreateDestinationGcsUpdateOutputFormatDestinationGcsUpdateCSVCommaSeparatedValues(destinationGcsUpdateCSVCommaSeparatedValues DestinationGcsUpdateCSVCommaSeparatedValues) DestinationGcsUpdateOutputFormat {
+ typ := DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateCSVCommaSeparatedValues
return DestinationGcsUpdateOutputFormat{
- DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues: &destinationGcsUpdateOutputFormatCSVCommaSeparatedValues,
+ DestinationGcsUpdateCSVCommaSeparatedValues: &destinationGcsUpdateCSVCommaSeparatedValues,
Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON(destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON) DestinationGcsUpdateOutputFormat {
- typ := DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON
+func CreateDestinationGcsUpdateOutputFormatDestinationGcsUpdateJSONLinesNewlineDelimitedJSON(destinationGcsUpdateJSONLinesNewlineDelimitedJSON DestinationGcsUpdateJSONLinesNewlineDelimitedJSON) DestinationGcsUpdateOutputFormat {
+ typ := DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateJSONLinesNewlineDelimitedJSON
return DestinationGcsUpdateOutputFormat{
- DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON: &destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationGcsUpdateJSONLinesNewlineDelimitedJSON: &destinationGcsUpdateJSONLinesNewlineDelimitedJSON,
Type: typ,
}
}
-func CreateDestinationGcsUpdateOutputFormatDestinationGcsUpdateOutputFormatParquetColumnarStorage(destinationGcsUpdateOutputFormatParquetColumnarStorage DestinationGcsUpdateOutputFormatParquetColumnarStorage) DestinationGcsUpdateOutputFormat {
- typ := DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatParquetColumnarStorage
+func CreateDestinationGcsUpdateOutputFormatDestinationGcsUpdateParquetColumnarStorage(destinationGcsUpdateParquetColumnarStorage DestinationGcsUpdateParquetColumnarStorage) DestinationGcsUpdateOutputFormat {
+ typ := DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateParquetColumnarStorage
return DestinationGcsUpdateOutputFormat{
- DestinationGcsUpdateOutputFormatParquetColumnarStorage: &destinationGcsUpdateOutputFormatParquetColumnarStorage,
+ DestinationGcsUpdateParquetColumnarStorage: &destinationGcsUpdateParquetColumnarStorage,
Type: typ,
}
}
func (u *DestinationGcsUpdateOutputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationGcsUpdateOutputFormatAvroApacheAvro := new(DestinationGcsUpdateOutputFormatAvroApacheAvro)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvro); err == nil {
- u.DestinationGcsUpdateOutputFormatAvroApacheAvro = destinationGcsUpdateOutputFormatAvroApacheAvro
- u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatAvroApacheAvro
+
+ avroApacheAvro := new(AvroApacheAvro)
+ if err := utils.UnmarshalJSON(data, &avroApacheAvro, "", true, true); err == nil {
+ u.AvroApacheAvro = avroApacheAvro
+ u.Type = DestinationGcsUpdateOutputFormatTypeAvroApacheAvro
return nil
}
- destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON = destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON
- u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON
+ destinationGcsUpdateJSONLinesNewlineDelimitedJSON := new(DestinationGcsUpdateJSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &destinationGcsUpdateJSONLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.DestinationGcsUpdateJSONLinesNewlineDelimitedJSON = destinationGcsUpdateJSONLinesNewlineDelimitedJSON
+ u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateJSONLinesNewlineDelimitedJSON
return nil
}
- destinationGcsUpdateOutputFormatCSVCommaSeparatedValues := new(DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatCSVCommaSeparatedValues); err == nil {
- u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues = destinationGcsUpdateOutputFormatCSVCommaSeparatedValues
- u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValues
+ destinationGcsUpdateCSVCommaSeparatedValues := new(DestinationGcsUpdateCSVCommaSeparatedValues)
+ if err := utils.UnmarshalJSON(data, &destinationGcsUpdateCSVCommaSeparatedValues, "", true, true); err == nil {
+ u.DestinationGcsUpdateCSVCommaSeparatedValues = destinationGcsUpdateCSVCommaSeparatedValues
+ u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateCSVCommaSeparatedValues
return nil
}
- destinationGcsUpdateOutputFormatParquetColumnarStorage := new(DestinationGcsUpdateOutputFormatParquetColumnarStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationGcsUpdateOutputFormatParquetColumnarStorage); err == nil {
- u.DestinationGcsUpdateOutputFormatParquetColumnarStorage = destinationGcsUpdateOutputFormatParquetColumnarStorage
- u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatParquetColumnarStorage
+ destinationGcsUpdateParquetColumnarStorage := new(DestinationGcsUpdateParquetColumnarStorage)
+ if err := utils.UnmarshalJSON(data, &destinationGcsUpdateParquetColumnarStorage, "", true, true); err == nil {
+ u.DestinationGcsUpdateParquetColumnarStorage = destinationGcsUpdateParquetColumnarStorage
+ u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateParquetColumnarStorage
return nil
}
@@ -988,71 +1335,71 @@ func (u *DestinationGcsUpdateOutputFormat) UnmarshalJSON(data []byte) error {
}
func (u DestinationGcsUpdateOutputFormat) MarshalJSON() ([]byte, error) {
- if u.DestinationGcsUpdateOutputFormatAvroApacheAvro != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvro)
+ if u.AvroApacheAvro != nil {
+ return utils.MarshalJSON(u.AvroApacheAvro, "", true)
}
- if u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON)
+ if u.DestinationGcsUpdateCSVCommaSeparatedValues != nil {
+ return utils.MarshalJSON(u.DestinationGcsUpdateCSVCommaSeparatedValues, "", true)
}
- if u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues)
+ if u.DestinationGcsUpdateJSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.DestinationGcsUpdateJSONLinesNewlineDelimitedJSON, "", true)
}
- if u.DestinationGcsUpdateOutputFormatParquetColumnarStorage != nil {
- return json.Marshal(u.DestinationGcsUpdateOutputFormatParquetColumnarStorage)
+ if u.DestinationGcsUpdateParquetColumnarStorage != nil {
+ return utils.MarshalJSON(u.DestinationGcsUpdateParquetColumnarStorage, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationGCSUpdateGCSBucketRegion - Select a Region of the GCS Bucket. Read more here.
-type DestinationGCSUpdateGCSBucketRegion string
+// GCSBucketRegion - Select a Region of the GCS Bucket. Read more here.
+type GCSBucketRegion string
const (
- DestinationGCSUpdateGCSBucketRegionNorthamericaNortheast1 DestinationGCSUpdateGCSBucketRegion = "northamerica-northeast1"
- DestinationGCSUpdateGCSBucketRegionNorthamericaNortheast2 DestinationGCSUpdateGCSBucketRegion = "northamerica-northeast2"
- DestinationGCSUpdateGCSBucketRegionUsCentral1 DestinationGCSUpdateGCSBucketRegion = "us-central1"
- DestinationGCSUpdateGCSBucketRegionUsEast1 DestinationGCSUpdateGCSBucketRegion = "us-east1"
- DestinationGCSUpdateGCSBucketRegionUsEast4 DestinationGCSUpdateGCSBucketRegion = "us-east4"
- DestinationGCSUpdateGCSBucketRegionUsWest1 DestinationGCSUpdateGCSBucketRegion = "us-west1"
- DestinationGCSUpdateGCSBucketRegionUsWest2 DestinationGCSUpdateGCSBucketRegion = "us-west2"
- DestinationGCSUpdateGCSBucketRegionUsWest3 DestinationGCSUpdateGCSBucketRegion = "us-west3"
- DestinationGCSUpdateGCSBucketRegionUsWest4 DestinationGCSUpdateGCSBucketRegion = "us-west4"
- DestinationGCSUpdateGCSBucketRegionSouthamericaEast1 DestinationGCSUpdateGCSBucketRegion = "southamerica-east1"
- DestinationGCSUpdateGCSBucketRegionSouthamericaWest1 DestinationGCSUpdateGCSBucketRegion = "southamerica-west1"
- DestinationGCSUpdateGCSBucketRegionEuropeCentral2 DestinationGCSUpdateGCSBucketRegion = "europe-central2"
- DestinationGCSUpdateGCSBucketRegionEuropeNorth1 DestinationGCSUpdateGCSBucketRegion = "europe-north1"
- DestinationGCSUpdateGCSBucketRegionEuropeWest1 DestinationGCSUpdateGCSBucketRegion = "europe-west1"
- DestinationGCSUpdateGCSBucketRegionEuropeWest2 DestinationGCSUpdateGCSBucketRegion = "europe-west2"
- DestinationGCSUpdateGCSBucketRegionEuropeWest3 DestinationGCSUpdateGCSBucketRegion = "europe-west3"
- DestinationGCSUpdateGCSBucketRegionEuropeWest4 DestinationGCSUpdateGCSBucketRegion = "europe-west4"
- DestinationGCSUpdateGCSBucketRegionEuropeWest6 DestinationGCSUpdateGCSBucketRegion = "europe-west6"
- DestinationGCSUpdateGCSBucketRegionAsiaEast1 DestinationGCSUpdateGCSBucketRegion = "asia-east1"
- DestinationGCSUpdateGCSBucketRegionAsiaEast2 DestinationGCSUpdateGCSBucketRegion = "asia-east2"
- DestinationGCSUpdateGCSBucketRegionAsiaNortheast1 DestinationGCSUpdateGCSBucketRegion = "asia-northeast1"
- DestinationGCSUpdateGCSBucketRegionAsiaNortheast2 DestinationGCSUpdateGCSBucketRegion = "asia-northeast2"
- DestinationGCSUpdateGCSBucketRegionAsiaNortheast3 DestinationGCSUpdateGCSBucketRegion = "asia-northeast3"
- DestinationGCSUpdateGCSBucketRegionAsiaSouth1 DestinationGCSUpdateGCSBucketRegion = "asia-south1"
- DestinationGCSUpdateGCSBucketRegionAsiaSouth2 DestinationGCSUpdateGCSBucketRegion = "asia-south2"
- DestinationGCSUpdateGCSBucketRegionAsiaSoutheast1 DestinationGCSUpdateGCSBucketRegion = "asia-southeast1"
- DestinationGCSUpdateGCSBucketRegionAsiaSoutheast2 DestinationGCSUpdateGCSBucketRegion = "asia-southeast2"
- DestinationGCSUpdateGCSBucketRegionAustraliaSoutheast1 DestinationGCSUpdateGCSBucketRegion = "australia-southeast1"
- DestinationGCSUpdateGCSBucketRegionAustraliaSoutheast2 DestinationGCSUpdateGCSBucketRegion = "australia-southeast2"
- DestinationGCSUpdateGCSBucketRegionAsia DestinationGCSUpdateGCSBucketRegion = "asia"
- DestinationGCSUpdateGCSBucketRegionEu DestinationGCSUpdateGCSBucketRegion = "eu"
- DestinationGCSUpdateGCSBucketRegionUs DestinationGCSUpdateGCSBucketRegion = "us"
- DestinationGCSUpdateGCSBucketRegionAsia1 DestinationGCSUpdateGCSBucketRegion = "asia1"
- DestinationGCSUpdateGCSBucketRegionEur4 DestinationGCSUpdateGCSBucketRegion = "eur4"
- DestinationGCSUpdateGCSBucketRegionNam4 DestinationGCSUpdateGCSBucketRegion = "nam4"
+ GCSBucketRegionNorthamericaNortheast1 GCSBucketRegion = "northamerica-northeast1"
+ GCSBucketRegionNorthamericaNortheast2 GCSBucketRegion = "northamerica-northeast2"
+ GCSBucketRegionUsCentral1 GCSBucketRegion = "us-central1"
+ GCSBucketRegionUsEast1 GCSBucketRegion = "us-east1"
+ GCSBucketRegionUsEast4 GCSBucketRegion = "us-east4"
+ GCSBucketRegionUsWest1 GCSBucketRegion = "us-west1"
+ GCSBucketRegionUsWest2 GCSBucketRegion = "us-west2"
+ GCSBucketRegionUsWest3 GCSBucketRegion = "us-west3"
+ GCSBucketRegionUsWest4 GCSBucketRegion = "us-west4"
+ GCSBucketRegionSouthamericaEast1 GCSBucketRegion = "southamerica-east1"
+ GCSBucketRegionSouthamericaWest1 GCSBucketRegion = "southamerica-west1"
+ GCSBucketRegionEuropeCentral2 GCSBucketRegion = "europe-central2"
+ GCSBucketRegionEuropeNorth1 GCSBucketRegion = "europe-north1"
+ GCSBucketRegionEuropeWest1 GCSBucketRegion = "europe-west1"
+ GCSBucketRegionEuropeWest2 GCSBucketRegion = "europe-west2"
+ GCSBucketRegionEuropeWest3 GCSBucketRegion = "europe-west3"
+ GCSBucketRegionEuropeWest4 GCSBucketRegion = "europe-west4"
+ GCSBucketRegionEuropeWest6 GCSBucketRegion = "europe-west6"
+ GCSBucketRegionAsiaEast1 GCSBucketRegion = "asia-east1"
+ GCSBucketRegionAsiaEast2 GCSBucketRegion = "asia-east2"
+ GCSBucketRegionAsiaNortheast1 GCSBucketRegion = "asia-northeast1"
+ GCSBucketRegionAsiaNortheast2 GCSBucketRegion = "asia-northeast2"
+ GCSBucketRegionAsiaNortheast3 GCSBucketRegion = "asia-northeast3"
+ GCSBucketRegionAsiaSouth1 GCSBucketRegion = "asia-south1"
+ GCSBucketRegionAsiaSouth2 GCSBucketRegion = "asia-south2"
+ GCSBucketRegionAsiaSoutheast1 GCSBucketRegion = "asia-southeast1"
+ GCSBucketRegionAsiaSoutheast2 GCSBucketRegion = "asia-southeast2"
+ GCSBucketRegionAustraliaSoutheast1 GCSBucketRegion = "australia-southeast1"
+ GCSBucketRegionAustraliaSoutheast2 GCSBucketRegion = "australia-southeast2"
+ GCSBucketRegionAsia GCSBucketRegion = "asia"
+ GCSBucketRegionEu GCSBucketRegion = "eu"
+ GCSBucketRegionUs GCSBucketRegion = "us"
+ GCSBucketRegionAsia1 GCSBucketRegion = "asia1"
+ GCSBucketRegionEur4 GCSBucketRegion = "eur4"
+ GCSBucketRegionNam4 GCSBucketRegion = "nam4"
)
-func (e DestinationGCSUpdateGCSBucketRegion) ToPointer() *DestinationGCSUpdateGCSBucketRegion {
+func (e GCSBucketRegion) ToPointer() *GCSBucketRegion {
return &e
}
-func (e *DestinationGCSUpdateGCSBucketRegion) UnmarshalJSON(data []byte) error {
+func (e *GCSBucketRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -1127,16 +1474,16 @@ func (e *DestinationGCSUpdateGCSBucketRegion) UnmarshalJSON(data []byte) error {
case "eur4":
fallthrough
case "nam4":
- *e = DestinationGCSUpdateGCSBucketRegion(v)
+ *e = GCSBucketRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGCSUpdateGCSBucketRegion: %v", v)
+ return fmt.Errorf("invalid value for GCSBucketRegion: %v", v)
}
}
type DestinationGcsUpdate struct {
// An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more here.
- Credential DestinationGcsUpdateAuthentication `json:"credential"`
+ Credential Authentication `json:"credential"`
// Output data format. One of the following formats must be selected - AVRO format, PARQUET format, CSV format, or JSONL format.
Format DestinationGcsUpdateOutputFormat `json:"format"`
// You can find the bucket name in the App Engine Admin console Application Settings page, under the label Google Cloud Storage Bucket. Read more here.
@@ -1144,5 +1491,51 @@ type DestinationGcsUpdate struct {
// GCS Bucket Path string Subdirectory under the above bucket to sync the data into.
GcsBucketPath string `json:"gcs_bucket_path"`
// Select a Region of the GCS Bucket. Read more here.
- GcsBucketRegion *DestinationGCSUpdateGCSBucketRegion `json:"gcs_bucket_region,omitempty"`
+ GcsBucketRegion *GCSBucketRegion `default:"us" json:"gcs_bucket_region"`
+}
+
+func (d DestinationGcsUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGcsUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGcsUpdate) GetCredential() Authentication {
+ if o == nil {
+ return Authentication{}
+ }
+ return o.Credential
+}
+
+func (o *DestinationGcsUpdate) GetFormat() DestinationGcsUpdateOutputFormat {
+ if o == nil {
+ return DestinationGcsUpdateOutputFormat{}
+ }
+ return o.Format
+}
+
+func (o *DestinationGcsUpdate) GetGcsBucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.GcsBucketName
+}
+
+func (o *DestinationGcsUpdate) GetGcsBucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.GcsBucketPath
+}
+
+func (o *DestinationGcsUpdate) GetGcsBucketRegion() *GCSBucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.GcsBucketRegion
}
diff --git a/internal/sdk/pkg/models/shared/destinationgooglesheets.go b/internal/sdk/pkg/models/shared/destinationgooglesheets.go
old mode 100755
new mode 100644
index 79507679b..89e124130
--- a/internal/sdk/pkg/models/shared/destinationgooglesheets.go
+++ b/internal/sdk/pkg/models/shared/destinationgooglesheets.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// DestinationGoogleSheetsAuthenticationViaGoogleOAuth - Google API Credentials for connecting to Google Sheets and Google Drive APIs
@@ -17,34 +18,84 @@ type DestinationGoogleSheetsAuthenticationViaGoogleOAuth struct {
RefreshToken string `json:"refresh_token"`
}
-type DestinationGoogleSheetsGoogleSheets string
+func (o *DestinationGoogleSheetsAuthenticationViaGoogleOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *DestinationGoogleSheetsAuthenticationViaGoogleOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *DestinationGoogleSheetsAuthenticationViaGoogleOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+type GoogleSheets string
const (
- DestinationGoogleSheetsGoogleSheetsGoogleSheets DestinationGoogleSheetsGoogleSheets = "google-sheets"
+ GoogleSheetsGoogleSheets GoogleSheets = "google-sheets"
)
-func (e DestinationGoogleSheetsGoogleSheets) ToPointer() *DestinationGoogleSheetsGoogleSheets {
+func (e GoogleSheets) ToPointer() *GoogleSheets {
return &e
}
-func (e *DestinationGoogleSheetsGoogleSheets) UnmarshalJSON(data []byte) error {
+func (e *GoogleSheets) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "google-sheets":
- *e = DestinationGoogleSheetsGoogleSheets(v)
+ *e = GoogleSheets(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationGoogleSheetsGoogleSheets: %v", v)
+ return fmt.Errorf("invalid value for GoogleSheets: %v", v)
}
}
type DestinationGoogleSheets struct {
// Google API Credentials for connecting to Google Sheets and Google Drive APIs
Credentials DestinationGoogleSheetsAuthenticationViaGoogleOAuth `json:"credentials"`
- DestinationType DestinationGoogleSheetsGoogleSheets `json:"destinationType"`
+ destinationType GoogleSheets `const:"google-sheets" json:"destinationType"`
// The link to your spreadsheet. See this guide for more details.
SpreadsheetID string `json:"spreadsheet_id"`
}
+
+func (d DestinationGoogleSheets) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationGoogleSheets) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationGoogleSheets) GetCredentials() DestinationGoogleSheetsAuthenticationViaGoogleOAuth {
+ if o == nil {
+ return DestinationGoogleSheetsAuthenticationViaGoogleOAuth{}
+ }
+ return o.Credentials
+}
+
+func (o *DestinationGoogleSheets) GetDestinationType() GoogleSheets {
+ return GoogleSheetsGoogleSheets
+}
+
+func (o *DestinationGoogleSheets) GetSpreadsheetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SpreadsheetID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationgooglesheetscreaterequest.go b/internal/sdk/pkg/models/shared/destinationgooglesheetscreaterequest.go
old mode 100755
new mode 100644
index 91a08316b..139d87688
--- a/internal/sdk/pkg/models/shared/destinationgooglesheetscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationgooglesheetscreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationGoogleSheetsCreateRequest struct {
Configuration DestinationGoogleSheets `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationGoogleSheetsCreateRequest) GetConfiguration() DestinationGoogleSheets {
+ if o == nil {
+ return DestinationGoogleSheets{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationGoogleSheetsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationGoogleSheetsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationGoogleSheetsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationgooglesheetsputrequest.go b/internal/sdk/pkg/models/shared/destinationgooglesheetsputrequest.go
old mode 100755
new mode 100644
index 6b1fc5179..07e40d974
--- a/internal/sdk/pkg/models/shared/destinationgooglesheetsputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationgooglesheetsputrequest.go
@@ -7,3 +7,24 @@ type DestinationGoogleSheetsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationGoogleSheetsPutRequest) GetConfiguration() DestinationGoogleSheetsUpdate {
+ if o == nil {
+ return DestinationGoogleSheetsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationGoogleSheetsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationGoogleSheetsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationgooglesheetsupdate.go b/internal/sdk/pkg/models/shared/destinationgooglesheetsupdate.go
old mode 100755
new mode 100644
index 81114c419..a41e95914
--- a/internal/sdk/pkg/models/shared/destinationgooglesheetsupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationgooglesheetsupdate.go
@@ -2,8 +2,8 @@
package shared
-// DestinationGoogleSheetsUpdateAuthenticationViaGoogleOAuth - Google API Credentials for connecting to Google Sheets and Google Drive APIs
-type DestinationGoogleSheetsUpdateAuthenticationViaGoogleOAuth struct {
+// AuthenticationViaGoogleOAuth - Google API Credentials for connecting to Google Sheets and Google Drive APIs
+type AuthenticationViaGoogleOAuth struct {
// The Client ID of your Google Sheets developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Google Sheets developer application.
@@ -12,9 +12,44 @@ type DestinationGoogleSheetsUpdateAuthenticationViaGoogleOAuth struct {
RefreshToken string `json:"refresh_token"`
}
+func (o *AuthenticationViaGoogleOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *AuthenticationViaGoogleOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *AuthenticationViaGoogleOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type DestinationGoogleSheetsUpdate struct {
// Google API Credentials for connecting to Google Sheets and Google Drive APIs
- Credentials DestinationGoogleSheetsUpdateAuthenticationViaGoogleOAuth `json:"credentials"`
+ Credentials AuthenticationViaGoogleOAuth `json:"credentials"`
// The link to your spreadsheet. See this guide for more details.
SpreadsheetID string `json:"spreadsheet_id"`
}
+
+func (o *DestinationGoogleSheetsUpdate) GetCredentials() AuthenticationViaGoogleOAuth {
+ if o == nil {
+ return AuthenticationViaGoogleOAuth{}
+ }
+ return o.Credentials
+}
+
+func (o *DestinationGoogleSheetsUpdate) GetSpreadsheetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SpreadsheetID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationkeen.go b/internal/sdk/pkg/models/shared/destinationkeen.go
old mode 100755
new mode 100644
index 782fb7769..9da4f2c32
--- a/internal/sdk/pkg/models/shared/destinationkeen.go
+++ b/internal/sdk/pkg/models/shared/destinationkeen.go
@@ -5,38 +5,75 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationKeenKeen string
+type Keen string
const (
- DestinationKeenKeenKeen DestinationKeenKeen = "keen"
+ KeenKeen Keen = "keen"
)
-func (e DestinationKeenKeen) ToPointer() *DestinationKeenKeen {
+func (e Keen) ToPointer() *Keen {
return &e
}
-func (e *DestinationKeenKeen) UnmarshalJSON(data []byte) error {
+func (e *Keen) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "keen":
- *e = DestinationKeenKeen(v)
+ *e = Keen(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationKeenKeen: %v", v)
+ return fmt.Errorf("invalid value for Keen: %v", v)
}
}
type DestinationKeen struct {
// To get Keen Master API Key, navigate to the Access tab from the left-hand, side panel and check the Project Details section.
- APIKey string `json:"api_key"`
- DestinationType DestinationKeenKeen `json:"destinationType"`
+ APIKey string `json:"api_key"`
+ destinationType Keen `const:"keen" json:"destinationType"`
// Allow connector to guess keen.timestamp value based on the streamed data.
- InferTimestamp *bool `json:"infer_timestamp,omitempty"`
+ InferTimestamp *bool `default:"true" json:"infer_timestamp"`
// To get Keen Project ID, navigate to the Access tab from the left-hand, side panel and check the Project Details section.
ProjectID string `json:"project_id"`
}
+
+func (d DestinationKeen) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationKeen) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationKeen) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *DestinationKeen) GetDestinationType() Keen {
+ return KeenKeen
+}
+
+func (o *DestinationKeen) GetInferTimestamp() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.InferTimestamp
+}
+
+func (o *DestinationKeen) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationkeencreaterequest.go b/internal/sdk/pkg/models/shared/destinationkeencreaterequest.go
old mode 100755
new mode 100644
index 6890764ed..549623164
--- a/internal/sdk/pkg/models/shared/destinationkeencreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationkeencreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationKeenCreateRequest struct {
Configuration DestinationKeen `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationKeenCreateRequest) GetConfiguration() DestinationKeen {
+ if o == nil {
+ return DestinationKeen{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationKeenCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationKeenCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationKeenCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationkeenputrequest.go b/internal/sdk/pkg/models/shared/destinationkeenputrequest.go
old mode 100755
new mode 100644
index 128c70124..1899c0e61
--- a/internal/sdk/pkg/models/shared/destinationkeenputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationkeenputrequest.go
@@ -7,3 +7,24 @@ type DestinationKeenPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationKeenPutRequest) GetConfiguration() DestinationKeenUpdate {
+ if o == nil {
+ return DestinationKeenUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationKeenPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationKeenPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationkeenupdate.go b/internal/sdk/pkg/models/shared/destinationkeenupdate.go
old mode 100755
new mode 100644
index c7bee0be8..6c9d82783
--- a/internal/sdk/pkg/models/shared/destinationkeenupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationkeenupdate.go
@@ -2,11 +2,47 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type DestinationKeenUpdate struct {
// To get Keen Master API Key, navigate to the Access tab from the left-hand, side panel and check the Project Details section.
APIKey string `json:"api_key"`
// Allow connector to guess keen.timestamp value based on the streamed data.
- InferTimestamp *bool `json:"infer_timestamp,omitempty"`
+ InferTimestamp *bool `default:"true" json:"infer_timestamp"`
// To get Keen Project ID, navigate to the Access tab from the left-hand, side panel and check the Project Details section.
ProjectID string `json:"project_id"`
}
+
+func (d DestinationKeenUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationKeenUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationKeenUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *DestinationKeenUpdate) GetInferTimestamp() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.InferTimestamp
+}
+
+func (o *DestinationKeenUpdate) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationkinesis.go b/internal/sdk/pkg/models/shared/destinationkinesis.go
old mode 100755
new mode 100644
index 04d12eab1..f16f6b6e3
--- a/internal/sdk/pkg/models/shared/destinationkinesis.go
+++ b/internal/sdk/pkg/models/shared/destinationkinesis.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationKinesisKinesis string
+type Kinesis string
const (
- DestinationKinesisKinesisKinesis DestinationKinesisKinesis = "kinesis"
+ KinesisKinesis Kinesis = "kinesis"
)
-func (e DestinationKinesisKinesis) ToPointer() *DestinationKinesisKinesis {
+func (e Kinesis) ToPointer() *Kinesis {
return &e
}
-func (e *DestinationKinesisKinesis) UnmarshalJSON(data []byte) error {
+func (e *Kinesis) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "kinesis":
- *e = DestinationKinesisKinesis(v)
+ *e = Kinesis(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationKinesisKinesis: %v", v)
+ return fmt.Errorf("invalid value for Kinesis: %v", v)
}
}
@@ -35,8 +36,8 @@ type DestinationKinesis struct {
// Generate the AWS Access Key for current user.
AccessKey string `json:"accessKey"`
// Buffer size for storing kinesis records before being batch streamed.
- BufferSize int64 `json:"bufferSize"`
- DestinationType DestinationKinesisKinesis `json:"destinationType"`
+ BufferSize *int64 `default:"100" json:"bufferSize"`
+ destinationType Kinesis `const:"kinesis" json:"destinationType"`
// AWS Kinesis endpoint.
Endpoint string `json:"endpoint"`
// The AWS Private Key - a string of numbers and letters that are unique for each account, also known as a "recovery phrase".
@@ -44,5 +45,62 @@ type DestinationKinesis struct {
// AWS region. Your account determines the Regions that are available to you.
Region string `json:"region"`
// Number of shards to which the data should be streamed.
- ShardCount int64 `json:"shardCount"`
+ ShardCount *int64 `default:"5" json:"shardCount"`
+}
+
+func (d DestinationKinesis) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationKinesis) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationKinesis) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *DestinationKinesis) GetBufferSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BufferSize
+}
+
+func (o *DestinationKinesis) GetDestinationType() Kinesis {
+ return KinesisKinesis
+}
+
+func (o *DestinationKinesis) GetEndpoint() string {
+ if o == nil {
+ return ""
+ }
+ return o.Endpoint
+}
+
+func (o *DestinationKinesis) GetPrivateKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PrivateKey
+}
+
+func (o *DestinationKinesis) GetRegion() string {
+ if o == nil {
+ return ""
+ }
+ return o.Region
+}
+
+func (o *DestinationKinesis) GetShardCount() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ShardCount
}
diff --git a/internal/sdk/pkg/models/shared/destinationkinesiscreaterequest.go b/internal/sdk/pkg/models/shared/destinationkinesiscreaterequest.go
old mode 100755
new mode 100644
index 2dc0177cd..4f85bbcfa
--- a/internal/sdk/pkg/models/shared/destinationkinesiscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationkinesiscreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationKinesisCreateRequest struct {
Configuration DestinationKinesis `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationKinesisCreateRequest) GetConfiguration() DestinationKinesis {
+ if o == nil {
+ return DestinationKinesis{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationKinesisCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationKinesisCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationKinesisCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationkinesisputrequest.go b/internal/sdk/pkg/models/shared/destinationkinesisputrequest.go
old mode 100755
new mode 100644
index 66b481e88..8826436c4
--- a/internal/sdk/pkg/models/shared/destinationkinesisputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationkinesisputrequest.go
@@ -7,3 +7,24 @@ type DestinationKinesisPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationKinesisPutRequest) GetConfiguration() DestinationKinesisUpdate {
+ if o == nil {
+ return DestinationKinesisUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationKinesisPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationKinesisPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationkinesisupdate.go b/internal/sdk/pkg/models/shared/destinationkinesisupdate.go
old mode 100755
new mode 100644
index 4244c67f4..74c1083b4
--- a/internal/sdk/pkg/models/shared/destinationkinesisupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationkinesisupdate.go
@@ -2,11 +2,15 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type DestinationKinesisUpdate struct {
// Generate the AWS Access Key for current user.
AccessKey string `json:"accessKey"`
// Buffer size for storing kinesis records before being batch streamed.
- BufferSize int64 `json:"bufferSize"`
+ BufferSize *int64 `default:"100" json:"bufferSize"`
// AWS Kinesis endpoint.
Endpoint string `json:"endpoint"`
// The AWS Private Key - a string of numbers and letters that are unique for each account, also known as a "recovery phrase".
@@ -14,5 +18,58 @@ type DestinationKinesisUpdate struct {
// AWS region. Your account determines the Regions that are available to you.
Region string `json:"region"`
// Number of shards to which the data should be streamed.
- ShardCount int64 `json:"shardCount"`
+ ShardCount *int64 `default:"5" json:"shardCount"`
+}
+
+func (d DestinationKinesisUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationKinesisUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationKinesisUpdate) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *DestinationKinesisUpdate) GetBufferSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BufferSize
+}
+
+func (o *DestinationKinesisUpdate) GetEndpoint() string {
+ if o == nil {
+ return ""
+ }
+ return o.Endpoint
+}
+
+func (o *DestinationKinesisUpdate) GetPrivateKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PrivateKey
+}
+
+func (o *DestinationKinesisUpdate) GetRegion() string {
+ if o == nil {
+ return ""
+ }
+ return o.Region
+}
+
+func (o *DestinationKinesisUpdate) GetShardCount() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ShardCount
}
diff --git a/internal/sdk/pkg/models/shared/destinationlangchain.go b/internal/sdk/pkg/models/shared/destinationlangchain.go
old mode 100755
new mode 100644
index 8addc3858..27443e36c
--- a/internal/sdk/pkg/models/shared/destinationlangchain.go
+++ b/internal/sdk/pkg/models/shared/destinationlangchain.go
@@ -3,145 +3,177 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationLangchainLangchain string
+type Langchain string
const (
- DestinationLangchainLangchainLangchain DestinationLangchainLangchain = "langchain"
+ LangchainLangchain Langchain = "langchain"
)
-func (e DestinationLangchainLangchain) ToPointer() *DestinationLangchainLangchain {
+func (e Langchain) ToPointer() *Langchain {
return &e
}
-func (e *DestinationLangchainLangchain) UnmarshalJSON(data []byte) error {
+func (e *Langchain) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "langchain":
- *e = DestinationLangchainLangchain(v)
+ *e = Langchain(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainLangchain: %v", v)
+ return fmt.Errorf("invalid value for Langchain: %v", v)
}
}
-type DestinationLangchainEmbeddingFakeMode string
+type DestinationLangchainSchemasMode string
const (
- DestinationLangchainEmbeddingFakeModeFake DestinationLangchainEmbeddingFakeMode = "fake"
+ DestinationLangchainSchemasModeFake DestinationLangchainSchemasMode = "fake"
)
-func (e DestinationLangchainEmbeddingFakeMode) ToPointer() *DestinationLangchainEmbeddingFakeMode {
+func (e DestinationLangchainSchemasMode) ToPointer() *DestinationLangchainSchemasMode {
return &e
}
-func (e *DestinationLangchainEmbeddingFakeMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "fake":
- *e = DestinationLangchainEmbeddingFakeMode(v)
+ *e = DestinationLangchainSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainEmbeddingFakeMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainSchemasMode: %v", v)
}
}
-// DestinationLangchainEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
-type DestinationLangchainEmbeddingFake struct {
- Mode *DestinationLangchainEmbeddingFakeMode `json:"mode,omitempty"`
+// DestinationLangchainFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type DestinationLangchainFake struct {
+ mode *DestinationLangchainSchemasMode `const:"fake" json:"mode"`
}
-type DestinationLangchainEmbeddingOpenAIMode string
+func (d DestinationLangchainFake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationLangchainFake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationLangchainFake) GetMode() *DestinationLangchainSchemasMode {
+ return DestinationLangchainSchemasModeFake.ToPointer()
+}
+
+type DestinationLangchainMode string
const (
- DestinationLangchainEmbeddingOpenAIModeOpenai DestinationLangchainEmbeddingOpenAIMode = "openai"
+ DestinationLangchainModeOpenai DestinationLangchainMode = "openai"
)
-func (e DestinationLangchainEmbeddingOpenAIMode) ToPointer() *DestinationLangchainEmbeddingOpenAIMode {
+func (e DestinationLangchainMode) ToPointer() *DestinationLangchainMode {
return &e
}
-func (e *DestinationLangchainEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "openai":
- *e = DestinationLangchainEmbeddingOpenAIMode(v)
+ *e = DestinationLangchainMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainEmbeddingOpenAIMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainMode: %v", v)
}
}
-// DestinationLangchainEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
-type DestinationLangchainEmbeddingOpenAI struct {
- Mode *DestinationLangchainEmbeddingOpenAIMode `json:"mode,omitempty"`
- OpenaiKey string `json:"openai_key"`
+// DestinationLangchainOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationLangchainOpenAI struct {
+ mode *DestinationLangchainMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationLangchainOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationLangchainOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationLangchainOpenAI) GetMode() *DestinationLangchainMode {
+ return DestinationLangchainModeOpenai.ToPointer()
+}
+
+func (o *DestinationLangchainOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
}
type DestinationLangchainEmbeddingType string
const (
- DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingOpenAI DestinationLangchainEmbeddingType = "destination-langchain_Embedding_OpenAI"
- DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingFake DestinationLangchainEmbeddingType = "destination-langchain_Embedding_Fake"
+ DestinationLangchainEmbeddingTypeDestinationLangchainOpenAI DestinationLangchainEmbeddingType = "destination-langchain_OpenAI"
+ DestinationLangchainEmbeddingTypeDestinationLangchainFake DestinationLangchainEmbeddingType = "destination-langchain_Fake"
)
type DestinationLangchainEmbedding struct {
- DestinationLangchainEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI
- DestinationLangchainEmbeddingFake *DestinationLangchainEmbeddingFake
+ DestinationLangchainOpenAI *DestinationLangchainOpenAI
+ DestinationLangchainFake *DestinationLangchainFake
Type DestinationLangchainEmbeddingType
}
-func CreateDestinationLangchainEmbeddingDestinationLangchainEmbeddingOpenAI(destinationLangchainEmbeddingOpenAI DestinationLangchainEmbeddingOpenAI) DestinationLangchainEmbedding {
- typ := DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingOpenAI
+func CreateDestinationLangchainEmbeddingDestinationLangchainOpenAI(destinationLangchainOpenAI DestinationLangchainOpenAI) DestinationLangchainEmbedding {
+ typ := DestinationLangchainEmbeddingTypeDestinationLangchainOpenAI
return DestinationLangchainEmbedding{
- DestinationLangchainEmbeddingOpenAI: &destinationLangchainEmbeddingOpenAI,
- Type: typ,
+ DestinationLangchainOpenAI: &destinationLangchainOpenAI,
+ Type: typ,
}
}
-func CreateDestinationLangchainEmbeddingDestinationLangchainEmbeddingFake(destinationLangchainEmbeddingFake DestinationLangchainEmbeddingFake) DestinationLangchainEmbedding {
- typ := DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingFake
+func CreateDestinationLangchainEmbeddingDestinationLangchainFake(destinationLangchainFake DestinationLangchainFake) DestinationLangchainEmbedding {
+ typ := DestinationLangchainEmbeddingTypeDestinationLangchainFake
return DestinationLangchainEmbedding{
- DestinationLangchainEmbeddingFake: &destinationLangchainEmbeddingFake,
- Type: typ,
+ DestinationLangchainFake: &destinationLangchainFake,
+ Type: typ,
}
}
func (u *DestinationLangchainEmbedding) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationLangchainEmbeddingFake := new(DestinationLangchainEmbeddingFake)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainEmbeddingFake); err == nil {
- u.DestinationLangchainEmbeddingFake = destinationLangchainEmbeddingFake
- u.Type = DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingFake
+
+ destinationLangchainFake := new(DestinationLangchainFake)
+ if err := utils.UnmarshalJSON(data, &destinationLangchainFake, "", true, true); err == nil {
+ u.DestinationLangchainFake = destinationLangchainFake
+ u.Type = DestinationLangchainEmbeddingTypeDestinationLangchainFake
return nil
}
- destinationLangchainEmbeddingOpenAI := new(DestinationLangchainEmbeddingOpenAI)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainEmbeddingOpenAI); err == nil {
- u.DestinationLangchainEmbeddingOpenAI = destinationLangchainEmbeddingOpenAI
- u.Type = DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingOpenAI
+ destinationLangchainOpenAI := new(DestinationLangchainOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationLangchainOpenAI, "", true, true); err == nil {
+ u.DestinationLangchainOpenAI = destinationLangchainOpenAI
+ u.Type = DestinationLangchainEmbeddingTypeDestinationLangchainOpenAI
return nil
}
@@ -149,185 +181,265 @@ func (u *DestinationLangchainEmbedding) UnmarshalJSON(data []byte) error {
}
func (u DestinationLangchainEmbedding) MarshalJSON() ([]byte, error) {
- if u.DestinationLangchainEmbeddingFake != nil {
- return json.Marshal(u.DestinationLangchainEmbeddingFake)
+ if u.DestinationLangchainOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationLangchainOpenAI, "", true)
}
- if u.DestinationLangchainEmbeddingOpenAI != nil {
- return json.Marshal(u.DestinationLangchainEmbeddingOpenAI)
+ if u.DestinationLangchainFake != nil {
+ return utils.MarshalJSON(u.DestinationLangchainFake, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationLangchainIndexingChromaLocalPersistanceMode string
+type DestinationLangchainSchemasIndexingIndexing3Mode string
const (
- DestinationLangchainIndexingChromaLocalPersistanceModeChromaLocal DestinationLangchainIndexingChromaLocalPersistanceMode = "chroma_local"
+ DestinationLangchainSchemasIndexingIndexing3ModeChromaLocal DestinationLangchainSchemasIndexingIndexing3Mode = "chroma_local"
)
-func (e DestinationLangchainIndexingChromaLocalPersistanceMode) ToPointer() *DestinationLangchainIndexingChromaLocalPersistanceMode {
+func (e DestinationLangchainSchemasIndexingIndexing3Mode) ToPointer() *DestinationLangchainSchemasIndexingIndexing3Mode {
return &e
}
-func (e *DestinationLangchainIndexingChromaLocalPersistanceMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainSchemasIndexingIndexing3Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "chroma_local":
- *e = DestinationLangchainIndexingChromaLocalPersistanceMode(v)
+ *e = DestinationLangchainSchemasIndexingIndexing3Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainIndexingChromaLocalPersistanceMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainSchemasIndexingIndexing3Mode: %v", v)
}
}
-// DestinationLangchainIndexingChromaLocalPersistance - Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync.
-type DestinationLangchainIndexingChromaLocalPersistance struct {
+// DestinationLangchainChromaLocalPersistance - Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync.
+type DestinationLangchainChromaLocalPersistance struct {
// Name of the collection to use.
- CollectionName *string `json:"collection_name,omitempty"`
+ CollectionName *string `default:"langchain" json:"collection_name"`
// Path to the directory where chroma files will be written. The files will be placed inside that local mount.
- DestinationPath string `json:"destination_path"`
- Mode *DestinationLangchainIndexingChromaLocalPersistanceMode `json:"mode,omitempty"`
+ DestinationPath string `json:"destination_path"`
+ mode *DestinationLangchainSchemasIndexingIndexing3Mode `const:"chroma_local" json:"mode"`
+}
+
+func (d DestinationLangchainChromaLocalPersistance) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationLangchainIndexingDocArrayHnswSearchMode string
+func (d *DestinationLangchainChromaLocalPersistance) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationLangchainChromaLocalPersistance) GetCollectionName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CollectionName
+}
+
+func (o *DestinationLangchainChromaLocalPersistance) GetDestinationPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationPath
+}
+
+func (o *DestinationLangchainChromaLocalPersistance) GetMode() *DestinationLangchainSchemasIndexingIndexing3Mode {
+ return DestinationLangchainSchemasIndexingIndexing3ModeChromaLocal.ToPointer()
+}
+
+type DestinationLangchainSchemasIndexingIndexingMode string
const (
- DestinationLangchainIndexingDocArrayHnswSearchModeDocArrayHnswSearch DestinationLangchainIndexingDocArrayHnswSearchMode = "DocArrayHnswSearch"
+ DestinationLangchainSchemasIndexingIndexingModeDocArrayHnswSearch DestinationLangchainSchemasIndexingIndexingMode = "DocArrayHnswSearch"
)
-func (e DestinationLangchainIndexingDocArrayHnswSearchMode) ToPointer() *DestinationLangchainIndexingDocArrayHnswSearchMode {
+func (e DestinationLangchainSchemasIndexingIndexingMode) ToPointer() *DestinationLangchainSchemasIndexingIndexingMode {
return &e
}
-func (e *DestinationLangchainIndexingDocArrayHnswSearchMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainSchemasIndexingIndexingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "DocArrayHnswSearch":
- *e = DestinationLangchainIndexingDocArrayHnswSearchMode(v)
+ *e = DestinationLangchainSchemasIndexingIndexingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainIndexingDocArrayHnswSearchMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainSchemasIndexingIndexingMode: %v", v)
}
}
-// DestinationLangchainIndexingDocArrayHnswSearch - DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite.
-type DestinationLangchainIndexingDocArrayHnswSearch struct {
+// DestinationLangchainDocArrayHnswSearch - DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite.
+type DestinationLangchainDocArrayHnswSearch struct {
// Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.
- DestinationPath string `json:"destination_path"`
- Mode *DestinationLangchainIndexingDocArrayHnswSearchMode `json:"mode,omitempty"`
+ DestinationPath string `json:"destination_path"`
+ mode *DestinationLangchainSchemasIndexingIndexingMode `const:"DocArrayHnswSearch" json:"mode"`
+}
+
+func (d DestinationLangchainDocArrayHnswSearch) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationLangchainIndexingPineconeMode string
+func (d *DestinationLangchainDocArrayHnswSearch) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationLangchainDocArrayHnswSearch) GetDestinationPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationPath
+}
+
+func (o *DestinationLangchainDocArrayHnswSearch) GetMode() *DestinationLangchainSchemasIndexingIndexingMode {
+ return DestinationLangchainSchemasIndexingIndexingModeDocArrayHnswSearch.ToPointer()
+}
+
+type DestinationLangchainSchemasIndexingMode string
const (
- DestinationLangchainIndexingPineconeModePinecone DestinationLangchainIndexingPineconeMode = "pinecone"
+ DestinationLangchainSchemasIndexingModePinecone DestinationLangchainSchemasIndexingMode = "pinecone"
)
-func (e DestinationLangchainIndexingPineconeMode) ToPointer() *DestinationLangchainIndexingPineconeMode {
+func (e DestinationLangchainSchemasIndexingMode) ToPointer() *DestinationLangchainSchemasIndexingMode {
return &e
}
-func (e *DestinationLangchainIndexingPineconeMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainSchemasIndexingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pinecone":
- *e = DestinationLangchainIndexingPineconeMode(v)
+ *e = DestinationLangchainSchemasIndexingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainIndexingPineconeMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainSchemasIndexingMode: %v", v)
}
}
-// DestinationLangchainIndexingPinecone - Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain.
-type DestinationLangchainIndexingPinecone struct {
+// DestinationLangchainPinecone - Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain.
+type DestinationLangchainPinecone struct {
// Pinecone index to use
- Index string `json:"index"`
- Mode *DestinationLangchainIndexingPineconeMode `json:"mode,omitempty"`
+ Index string `json:"index"`
+ mode *DestinationLangchainSchemasIndexingMode `const:"pinecone" json:"mode"`
// Pinecone environment to use
PineconeEnvironment string `json:"pinecone_environment"`
PineconeKey string `json:"pinecone_key"`
}
+func (d DestinationLangchainPinecone) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationLangchainPinecone) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationLangchainPinecone) GetIndex() string {
+ if o == nil {
+ return ""
+ }
+ return o.Index
+}
+
+func (o *DestinationLangchainPinecone) GetMode() *DestinationLangchainSchemasIndexingMode {
+ return DestinationLangchainSchemasIndexingModePinecone.ToPointer()
+}
+
+func (o *DestinationLangchainPinecone) GetPineconeEnvironment() string {
+ if o == nil {
+ return ""
+ }
+ return o.PineconeEnvironment
+}
+
+func (o *DestinationLangchainPinecone) GetPineconeKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PineconeKey
+}
+
type DestinationLangchainIndexingType string
const (
- DestinationLangchainIndexingTypeDestinationLangchainIndexingPinecone DestinationLangchainIndexingType = "destination-langchain_Indexing_Pinecone"
- DestinationLangchainIndexingTypeDestinationLangchainIndexingDocArrayHnswSearch DestinationLangchainIndexingType = "destination-langchain_Indexing_DocArrayHnswSearch"
- DestinationLangchainIndexingTypeDestinationLangchainIndexingChromaLocalPersistance DestinationLangchainIndexingType = "destination-langchain_Indexing_Chroma (local persistance)"
+ DestinationLangchainIndexingTypeDestinationLangchainPinecone DestinationLangchainIndexingType = "destination-langchain_Pinecone"
+ DestinationLangchainIndexingTypeDestinationLangchainDocArrayHnswSearch DestinationLangchainIndexingType = "destination-langchain_DocArrayHnswSearch"
+ DestinationLangchainIndexingTypeDestinationLangchainChromaLocalPersistance DestinationLangchainIndexingType = "destination-langchain_Chroma (local persistance)"
)
type DestinationLangchainIndexing struct {
- DestinationLangchainIndexingPinecone *DestinationLangchainIndexingPinecone
- DestinationLangchainIndexingDocArrayHnswSearch *DestinationLangchainIndexingDocArrayHnswSearch
- DestinationLangchainIndexingChromaLocalPersistance *DestinationLangchainIndexingChromaLocalPersistance
+ DestinationLangchainPinecone *DestinationLangchainPinecone
+ DestinationLangchainDocArrayHnswSearch *DestinationLangchainDocArrayHnswSearch
+ DestinationLangchainChromaLocalPersistance *DestinationLangchainChromaLocalPersistance
Type DestinationLangchainIndexingType
}
-func CreateDestinationLangchainIndexingDestinationLangchainIndexingPinecone(destinationLangchainIndexingPinecone DestinationLangchainIndexingPinecone) DestinationLangchainIndexing {
- typ := DestinationLangchainIndexingTypeDestinationLangchainIndexingPinecone
+func CreateDestinationLangchainIndexingDestinationLangchainPinecone(destinationLangchainPinecone DestinationLangchainPinecone) DestinationLangchainIndexing {
+ typ := DestinationLangchainIndexingTypeDestinationLangchainPinecone
return DestinationLangchainIndexing{
- DestinationLangchainIndexingPinecone: &destinationLangchainIndexingPinecone,
- Type: typ,
+ DestinationLangchainPinecone: &destinationLangchainPinecone,
+ Type: typ,
}
}
-func CreateDestinationLangchainIndexingDestinationLangchainIndexingDocArrayHnswSearch(destinationLangchainIndexingDocArrayHnswSearch DestinationLangchainIndexingDocArrayHnswSearch) DestinationLangchainIndexing {
- typ := DestinationLangchainIndexingTypeDestinationLangchainIndexingDocArrayHnswSearch
+func CreateDestinationLangchainIndexingDestinationLangchainDocArrayHnswSearch(destinationLangchainDocArrayHnswSearch DestinationLangchainDocArrayHnswSearch) DestinationLangchainIndexing {
+ typ := DestinationLangchainIndexingTypeDestinationLangchainDocArrayHnswSearch
return DestinationLangchainIndexing{
- DestinationLangchainIndexingDocArrayHnswSearch: &destinationLangchainIndexingDocArrayHnswSearch,
- Type: typ,
+ DestinationLangchainDocArrayHnswSearch: &destinationLangchainDocArrayHnswSearch,
+ Type: typ,
}
}
-func CreateDestinationLangchainIndexingDestinationLangchainIndexingChromaLocalPersistance(destinationLangchainIndexingChromaLocalPersistance DestinationLangchainIndexingChromaLocalPersistance) DestinationLangchainIndexing {
- typ := DestinationLangchainIndexingTypeDestinationLangchainIndexingChromaLocalPersistance
+func CreateDestinationLangchainIndexingDestinationLangchainChromaLocalPersistance(destinationLangchainChromaLocalPersistance DestinationLangchainChromaLocalPersistance) DestinationLangchainIndexing {
+ typ := DestinationLangchainIndexingTypeDestinationLangchainChromaLocalPersistance
return DestinationLangchainIndexing{
- DestinationLangchainIndexingChromaLocalPersistance: &destinationLangchainIndexingChromaLocalPersistance,
+ DestinationLangchainChromaLocalPersistance: &destinationLangchainChromaLocalPersistance,
Type: typ,
}
}
func (u *DestinationLangchainIndexing) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationLangchainIndexingDocArrayHnswSearch := new(DestinationLangchainIndexingDocArrayHnswSearch)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainIndexingDocArrayHnswSearch); err == nil {
- u.DestinationLangchainIndexingDocArrayHnswSearch = destinationLangchainIndexingDocArrayHnswSearch
- u.Type = DestinationLangchainIndexingTypeDestinationLangchainIndexingDocArrayHnswSearch
+
+ destinationLangchainDocArrayHnswSearch := new(DestinationLangchainDocArrayHnswSearch)
+ if err := utils.UnmarshalJSON(data, &destinationLangchainDocArrayHnswSearch, "", true, true); err == nil {
+ u.DestinationLangchainDocArrayHnswSearch = destinationLangchainDocArrayHnswSearch
+ u.Type = DestinationLangchainIndexingTypeDestinationLangchainDocArrayHnswSearch
return nil
}
- destinationLangchainIndexingChromaLocalPersistance := new(DestinationLangchainIndexingChromaLocalPersistance)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainIndexingChromaLocalPersistance); err == nil {
- u.DestinationLangchainIndexingChromaLocalPersistance = destinationLangchainIndexingChromaLocalPersistance
- u.Type = DestinationLangchainIndexingTypeDestinationLangchainIndexingChromaLocalPersistance
+ destinationLangchainChromaLocalPersistance := new(DestinationLangchainChromaLocalPersistance)
+ if err := utils.UnmarshalJSON(data, &destinationLangchainChromaLocalPersistance, "", true, true); err == nil {
+ u.DestinationLangchainChromaLocalPersistance = destinationLangchainChromaLocalPersistance
+ u.Type = DestinationLangchainIndexingTypeDestinationLangchainChromaLocalPersistance
return nil
}
- destinationLangchainIndexingPinecone := new(DestinationLangchainIndexingPinecone)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainIndexingPinecone); err == nil {
- u.DestinationLangchainIndexingPinecone = destinationLangchainIndexingPinecone
- u.Type = DestinationLangchainIndexingTypeDestinationLangchainIndexingPinecone
+ destinationLangchainPinecone := new(DestinationLangchainPinecone)
+ if err := utils.UnmarshalJSON(data, &destinationLangchainPinecone, "", true, true); err == nil {
+ u.DestinationLangchainPinecone = destinationLangchainPinecone
+ u.Type = DestinationLangchainIndexingTypeDestinationLangchainPinecone
return nil
}
@@ -335,35 +447,103 @@ func (u *DestinationLangchainIndexing) UnmarshalJSON(data []byte) error {
}
func (u DestinationLangchainIndexing) MarshalJSON() ([]byte, error) {
- if u.DestinationLangchainIndexingDocArrayHnswSearch != nil {
- return json.Marshal(u.DestinationLangchainIndexingDocArrayHnswSearch)
+ if u.DestinationLangchainPinecone != nil {
+ return utils.MarshalJSON(u.DestinationLangchainPinecone, "", true)
}
- if u.DestinationLangchainIndexingChromaLocalPersistance != nil {
- return json.Marshal(u.DestinationLangchainIndexingChromaLocalPersistance)
+ if u.DestinationLangchainDocArrayHnswSearch != nil {
+ return utils.MarshalJSON(u.DestinationLangchainDocArrayHnswSearch, "", true)
}
- if u.DestinationLangchainIndexingPinecone != nil {
- return json.Marshal(u.DestinationLangchainIndexingPinecone)
+ if u.DestinationLangchainChromaLocalPersistance != nil {
+ return utils.MarshalJSON(u.DestinationLangchainChromaLocalPersistance, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationLangchainProcessingConfigModel struct {
// Size of overlap between chunks in tokens to store in vector store to better capture relevant context
- ChunkOverlap *int64 `json:"chunk_overlap,omitempty"`
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
// Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
ChunkSize int64 `json:"chunk_size"`
// List of fields in the record that should be used to calculate the embedding. All other fields are passed along as meta fields. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
TextFields []string `json:"text_fields"`
}
+func (d DestinationLangchainProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationLangchainProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationLangchainProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *DestinationLangchainProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *DestinationLangchainProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.TextFields
+}
+
type DestinationLangchain struct {
- DestinationType DestinationLangchainLangchain `json:"destinationType"`
+ destinationType Langchain `const:"langchain" json:"destinationType"`
// Embedding configuration
Embedding DestinationLangchainEmbedding `json:"embedding"`
// Indexing configuration
Indexing DestinationLangchainIndexing `json:"indexing"`
Processing DestinationLangchainProcessingConfigModel `json:"processing"`
}
+
+func (d DestinationLangchain) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationLangchain) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationLangchain) GetDestinationType() Langchain {
+ return LangchainLangchain
+}
+
+func (o *DestinationLangchain) GetEmbedding() DestinationLangchainEmbedding {
+ if o == nil {
+ return DestinationLangchainEmbedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationLangchain) GetIndexing() DestinationLangchainIndexing {
+ if o == nil {
+ return DestinationLangchainIndexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationLangchain) GetProcessing() DestinationLangchainProcessingConfigModel {
+ if o == nil {
+ return DestinationLangchainProcessingConfigModel{}
+ }
+ return o.Processing
+}
diff --git a/internal/sdk/pkg/models/shared/destinationlangchaincreaterequest.go b/internal/sdk/pkg/models/shared/destinationlangchaincreaterequest.go
old mode 100755
new mode 100644
index 2ccb6daec..b4945cc39
--- a/internal/sdk/pkg/models/shared/destinationlangchaincreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationlangchaincreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationLangchainCreateRequest struct {
Configuration DestinationLangchain `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationLangchainCreateRequest) GetConfiguration() DestinationLangchain {
+ if o == nil {
+ return DestinationLangchain{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationLangchainCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationLangchainCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationLangchainCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationlangchainputrequest.go b/internal/sdk/pkg/models/shared/destinationlangchainputrequest.go
old mode 100755
new mode 100644
index 344fa5cd9..cd3de13c9
--- a/internal/sdk/pkg/models/shared/destinationlangchainputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationlangchainputrequest.go
@@ -7,3 +7,24 @@ type DestinationLangchainPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationLangchainPutRequest) GetConfiguration() DestinationLangchainUpdate {
+ if o == nil {
+ return DestinationLangchainUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationLangchainPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationLangchainPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationlangchainupdate.go b/internal/sdk/pkg/models/shared/destinationlangchainupdate.go
old mode 100755
new mode 100644
index d1c96f7bc..b62d8554e
--- a/internal/sdk/pkg/models/shared/destinationlangchainupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationlangchainupdate.go
@@ -3,342 +3,507 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationLangchainUpdateEmbeddingFakeMode string
+type DestinationLangchainUpdateMode string
const (
- DestinationLangchainUpdateEmbeddingFakeModeFake DestinationLangchainUpdateEmbeddingFakeMode = "fake"
+ DestinationLangchainUpdateModeFake DestinationLangchainUpdateMode = "fake"
)
-func (e DestinationLangchainUpdateEmbeddingFakeMode) ToPointer() *DestinationLangchainUpdateEmbeddingFakeMode {
+func (e DestinationLangchainUpdateMode) ToPointer() *DestinationLangchainUpdateMode {
return &e
}
-func (e *DestinationLangchainUpdateEmbeddingFakeMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainUpdateMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "fake":
- *e = DestinationLangchainUpdateEmbeddingFakeMode(v)
+ *e = DestinationLangchainUpdateMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainUpdateEmbeddingFakeMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainUpdateMode: %v", v)
}
}
-// DestinationLangchainUpdateEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
-type DestinationLangchainUpdateEmbeddingFake struct {
- Mode *DestinationLangchainUpdateEmbeddingFakeMode `json:"mode,omitempty"`
+// Fake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type Fake struct {
+ mode *DestinationLangchainUpdateMode `const:"fake" json:"mode"`
}
-type DestinationLangchainUpdateEmbeddingOpenAIMode string
+func (f Fake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(f, "", false)
+}
+
+func (f *Fake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &f, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Fake) GetMode() *DestinationLangchainUpdateMode {
+ return DestinationLangchainUpdateModeFake.ToPointer()
+}
+
+type DestinationLangchainUpdateSchemasEmbeddingMode string
const (
- DestinationLangchainUpdateEmbeddingOpenAIModeOpenai DestinationLangchainUpdateEmbeddingOpenAIMode = "openai"
+ DestinationLangchainUpdateSchemasEmbeddingModeOpenai DestinationLangchainUpdateSchemasEmbeddingMode = "openai"
)
-func (e DestinationLangchainUpdateEmbeddingOpenAIMode) ToPointer() *DestinationLangchainUpdateEmbeddingOpenAIMode {
+func (e DestinationLangchainUpdateSchemasEmbeddingMode) ToPointer() *DestinationLangchainUpdateSchemasEmbeddingMode {
return &e
}
-func (e *DestinationLangchainUpdateEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainUpdateSchemasEmbeddingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "openai":
- *e = DestinationLangchainUpdateEmbeddingOpenAIMode(v)
+ *e = DestinationLangchainUpdateSchemasEmbeddingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainUpdateEmbeddingOpenAIMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainUpdateSchemasEmbeddingMode: %v", v)
}
}
-// DestinationLangchainUpdateEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
-type DestinationLangchainUpdateEmbeddingOpenAI struct {
- Mode *DestinationLangchainUpdateEmbeddingOpenAIMode `json:"mode,omitempty"`
- OpenaiKey string `json:"openai_key"`
+// OpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type OpenAI struct {
+ mode *DestinationLangchainUpdateSchemasEmbeddingMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
}
-type DestinationLangchainUpdateEmbeddingType string
+func (o OpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(o, "", false)
+}
+
+func (o *OpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &o, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *OpenAI) GetMode() *DestinationLangchainUpdateSchemasEmbeddingMode {
+ return DestinationLangchainUpdateSchemasEmbeddingModeOpenai.ToPointer()
+}
+
+func (o *OpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type EmbeddingType string
const (
- DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingOpenAI DestinationLangchainUpdateEmbeddingType = "destination-langchain-update_Embedding_OpenAI"
- DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingFake DestinationLangchainUpdateEmbeddingType = "destination-langchain-update_Embedding_Fake"
+ EmbeddingTypeOpenAI EmbeddingType = "OpenAI"
+ EmbeddingTypeFake EmbeddingType = "Fake"
)
-type DestinationLangchainUpdateEmbedding struct {
- DestinationLangchainUpdateEmbeddingOpenAI *DestinationLangchainUpdateEmbeddingOpenAI
- DestinationLangchainUpdateEmbeddingFake *DestinationLangchainUpdateEmbeddingFake
+type Embedding struct {
+ OpenAI *OpenAI
+ Fake *Fake
- Type DestinationLangchainUpdateEmbeddingType
+ Type EmbeddingType
}
-func CreateDestinationLangchainUpdateEmbeddingDestinationLangchainUpdateEmbeddingOpenAI(destinationLangchainUpdateEmbeddingOpenAI DestinationLangchainUpdateEmbeddingOpenAI) DestinationLangchainUpdateEmbedding {
- typ := DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingOpenAI
+func CreateEmbeddingOpenAI(openAI OpenAI) Embedding {
+ typ := EmbeddingTypeOpenAI
- return DestinationLangchainUpdateEmbedding{
- DestinationLangchainUpdateEmbeddingOpenAI: &destinationLangchainUpdateEmbeddingOpenAI,
- Type: typ,
+ return Embedding{
+ OpenAI: &openAI,
+ Type: typ,
}
}
-func CreateDestinationLangchainUpdateEmbeddingDestinationLangchainUpdateEmbeddingFake(destinationLangchainUpdateEmbeddingFake DestinationLangchainUpdateEmbeddingFake) DestinationLangchainUpdateEmbedding {
- typ := DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingFake
+func CreateEmbeddingFake(fake Fake) Embedding {
+ typ := EmbeddingTypeFake
- return DestinationLangchainUpdateEmbedding{
- DestinationLangchainUpdateEmbeddingFake: &destinationLangchainUpdateEmbeddingFake,
- Type: typ,
+ return Embedding{
+ Fake: &fake,
+ Type: typ,
}
}
-func (u *DestinationLangchainUpdateEmbedding) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *Embedding) UnmarshalJSON(data []byte) error {
- destinationLangchainUpdateEmbeddingFake := new(DestinationLangchainUpdateEmbeddingFake)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainUpdateEmbeddingFake); err == nil {
- u.DestinationLangchainUpdateEmbeddingFake = destinationLangchainUpdateEmbeddingFake
- u.Type = DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingFake
+ fake := new(Fake)
+ if err := utils.UnmarshalJSON(data, &fake, "", true, true); err == nil {
+ u.Fake = fake
+ u.Type = EmbeddingTypeFake
return nil
}
- destinationLangchainUpdateEmbeddingOpenAI := new(DestinationLangchainUpdateEmbeddingOpenAI)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainUpdateEmbeddingOpenAI); err == nil {
- u.DestinationLangchainUpdateEmbeddingOpenAI = destinationLangchainUpdateEmbeddingOpenAI
- u.Type = DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingOpenAI
+ openAI := new(OpenAI)
+ if err := utils.UnmarshalJSON(data, &openAI, "", true, true); err == nil {
+ u.OpenAI = openAI
+ u.Type = EmbeddingTypeOpenAI
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationLangchainUpdateEmbedding) MarshalJSON() ([]byte, error) {
- if u.DestinationLangchainUpdateEmbeddingFake != nil {
- return json.Marshal(u.DestinationLangchainUpdateEmbeddingFake)
+func (u Embedding) MarshalJSON() ([]byte, error) {
+ if u.OpenAI != nil {
+ return utils.MarshalJSON(u.OpenAI, "", true)
}
- if u.DestinationLangchainUpdateEmbeddingOpenAI != nil {
- return json.Marshal(u.DestinationLangchainUpdateEmbeddingOpenAI)
+ if u.Fake != nil {
+ return utils.MarshalJSON(u.Fake, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationLangchainUpdateIndexingChromaLocalPersistanceMode string
+type DestinationLangchainUpdateSchemasIndexingIndexingMode string
const (
- DestinationLangchainUpdateIndexingChromaLocalPersistanceModeChromaLocal DestinationLangchainUpdateIndexingChromaLocalPersistanceMode = "chroma_local"
+ DestinationLangchainUpdateSchemasIndexingIndexingModeChromaLocal DestinationLangchainUpdateSchemasIndexingIndexingMode = "chroma_local"
)
-func (e DestinationLangchainUpdateIndexingChromaLocalPersistanceMode) ToPointer() *DestinationLangchainUpdateIndexingChromaLocalPersistanceMode {
+func (e DestinationLangchainUpdateSchemasIndexingIndexingMode) ToPointer() *DestinationLangchainUpdateSchemasIndexingIndexingMode {
return &e
}
-func (e *DestinationLangchainUpdateIndexingChromaLocalPersistanceMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainUpdateSchemasIndexingIndexingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "chroma_local":
- *e = DestinationLangchainUpdateIndexingChromaLocalPersistanceMode(v)
+ *e = DestinationLangchainUpdateSchemasIndexingIndexingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainUpdateIndexingChromaLocalPersistanceMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainUpdateSchemasIndexingIndexingMode: %v", v)
}
}
-// DestinationLangchainUpdateIndexingChromaLocalPersistance - Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync.
-type DestinationLangchainUpdateIndexingChromaLocalPersistance struct {
+// ChromaLocalPersistance - Chroma is a popular vector store that can be used to store and retrieve embeddings. It will build its index in memory and persist it to disk by the end of the sync.
+type ChromaLocalPersistance struct {
// Name of the collection to use.
- CollectionName *string `json:"collection_name,omitempty"`
+ CollectionName *string `default:"langchain" json:"collection_name"`
// Path to the directory where chroma files will be written. The files will be placed inside that local mount.
- DestinationPath string `json:"destination_path"`
- Mode *DestinationLangchainUpdateIndexingChromaLocalPersistanceMode `json:"mode,omitempty"`
+ DestinationPath string `json:"destination_path"`
+ mode *DestinationLangchainUpdateSchemasIndexingIndexingMode `const:"chroma_local" json:"mode"`
+}
+
+func (c ChromaLocalPersistance) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
}
-type DestinationLangchainUpdateIndexingDocArrayHnswSearchMode string
+func (c *ChromaLocalPersistance) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ChromaLocalPersistance) GetCollectionName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CollectionName
+}
+
+func (o *ChromaLocalPersistance) GetDestinationPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationPath
+}
+
+func (o *ChromaLocalPersistance) GetMode() *DestinationLangchainUpdateSchemasIndexingIndexingMode {
+ return DestinationLangchainUpdateSchemasIndexingIndexingModeChromaLocal.ToPointer()
+}
+
+type DestinationLangchainUpdateSchemasIndexingMode string
const (
- DestinationLangchainUpdateIndexingDocArrayHnswSearchModeDocArrayHnswSearch DestinationLangchainUpdateIndexingDocArrayHnswSearchMode = "DocArrayHnswSearch"
+ DestinationLangchainUpdateSchemasIndexingModeDocArrayHnswSearch DestinationLangchainUpdateSchemasIndexingMode = "DocArrayHnswSearch"
)
-func (e DestinationLangchainUpdateIndexingDocArrayHnswSearchMode) ToPointer() *DestinationLangchainUpdateIndexingDocArrayHnswSearchMode {
+func (e DestinationLangchainUpdateSchemasIndexingMode) ToPointer() *DestinationLangchainUpdateSchemasIndexingMode {
return &e
}
-func (e *DestinationLangchainUpdateIndexingDocArrayHnswSearchMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainUpdateSchemasIndexingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "DocArrayHnswSearch":
- *e = DestinationLangchainUpdateIndexingDocArrayHnswSearchMode(v)
+ *e = DestinationLangchainUpdateSchemasIndexingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainUpdateIndexingDocArrayHnswSearchMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainUpdateSchemasIndexingMode: %v", v)
}
}
-// DestinationLangchainUpdateIndexingDocArrayHnswSearch - DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite.
-type DestinationLangchainUpdateIndexingDocArrayHnswSearch struct {
+// DocArrayHnswSearch is a lightweight Document Index implementation provided by Docarray that runs fully locally and is best suited for small- to medium-sized datasets. It stores vectors on disk in hnswlib, and stores all other data in SQLite.
+type DocArrayHnswSearch struct {
// Path to the directory where hnswlib and meta data files will be written. The files will be placed inside that local mount. All files in the specified destination directory will be deleted on each run.
- DestinationPath string `json:"destination_path"`
- Mode *DestinationLangchainUpdateIndexingDocArrayHnswSearchMode `json:"mode,omitempty"`
+ DestinationPath string `json:"destination_path"`
+ mode *DestinationLangchainUpdateSchemasIndexingMode `const:"DocArrayHnswSearch" json:"mode"`
+}
+
+func (d DocArrayHnswSearch) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DocArrayHnswSearch) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DocArrayHnswSearch) GetDestinationPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationPath
+}
+
+func (o *DocArrayHnswSearch) GetMode() *DestinationLangchainUpdateSchemasIndexingMode {
+ return DestinationLangchainUpdateSchemasIndexingModeDocArrayHnswSearch.ToPointer()
}
-type DestinationLangchainUpdateIndexingPineconeMode string
+type DestinationLangchainUpdateSchemasMode string
const (
- DestinationLangchainUpdateIndexingPineconeModePinecone DestinationLangchainUpdateIndexingPineconeMode = "pinecone"
+ DestinationLangchainUpdateSchemasModePinecone DestinationLangchainUpdateSchemasMode = "pinecone"
)
-func (e DestinationLangchainUpdateIndexingPineconeMode) ToPointer() *DestinationLangchainUpdateIndexingPineconeMode {
+func (e DestinationLangchainUpdateSchemasMode) ToPointer() *DestinationLangchainUpdateSchemasMode {
return &e
}
-func (e *DestinationLangchainUpdateIndexingPineconeMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationLangchainUpdateSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pinecone":
- *e = DestinationLangchainUpdateIndexingPineconeMode(v)
+ *e = DestinationLangchainUpdateSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationLangchainUpdateIndexingPineconeMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationLangchainUpdateSchemasMode: %v", v)
}
}
-// DestinationLangchainUpdateIndexingPinecone - Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain.
-type DestinationLangchainUpdateIndexingPinecone struct {
+// DestinationLangchainUpdatePinecone - Pinecone is a popular vector store that can be used to store and retrieve embeddings. It is a managed service and can also be queried from outside of langchain.
+type DestinationLangchainUpdatePinecone struct {
// Pinecone index to use
- Index string `json:"index"`
- Mode *DestinationLangchainUpdateIndexingPineconeMode `json:"mode,omitempty"`
+ Index string `json:"index"`
+ mode *DestinationLangchainUpdateSchemasMode `const:"pinecone" json:"mode"`
// Pinecone environment to use
PineconeEnvironment string `json:"pinecone_environment"`
PineconeKey string `json:"pinecone_key"`
}
-type DestinationLangchainUpdateIndexingType string
+func (d DestinationLangchainUpdatePinecone) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationLangchainUpdatePinecone) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationLangchainUpdatePinecone) GetIndex() string {
+ if o == nil {
+ return ""
+ }
+ return o.Index
+}
+
+func (o *DestinationLangchainUpdatePinecone) GetMode() *DestinationLangchainUpdateSchemasMode {
+ return DestinationLangchainUpdateSchemasModePinecone.ToPointer()
+}
+
+func (o *DestinationLangchainUpdatePinecone) GetPineconeEnvironment() string {
+ if o == nil {
+ return ""
+ }
+ return o.PineconeEnvironment
+}
+
+func (o *DestinationLangchainUpdatePinecone) GetPineconeKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PineconeKey
+}
+
+type IndexingType string
const (
- DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingPinecone DestinationLangchainUpdateIndexingType = "destination-langchain-update_Indexing_Pinecone"
- DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingDocArrayHnswSearch DestinationLangchainUpdateIndexingType = "destination-langchain-update_Indexing_DocArrayHnswSearch"
- DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingChromaLocalPersistance DestinationLangchainUpdateIndexingType = "destination-langchain-update_Indexing_Chroma (local persistance)"
+ IndexingTypeDestinationLangchainUpdatePinecone IndexingType = "destination-langchain-update_Pinecone"
+ IndexingTypeDocArrayHnswSearch IndexingType = "DocArrayHnswSearch"
+ IndexingTypeChromaLocalPersistance IndexingType = "Chroma (local persistance)"
)
-type DestinationLangchainUpdateIndexing struct {
- DestinationLangchainUpdateIndexingPinecone *DestinationLangchainUpdateIndexingPinecone
- DestinationLangchainUpdateIndexingDocArrayHnswSearch *DestinationLangchainUpdateIndexingDocArrayHnswSearch
- DestinationLangchainUpdateIndexingChromaLocalPersistance *DestinationLangchainUpdateIndexingChromaLocalPersistance
+type Indexing struct {
+ DestinationLangchainUpdatePinecone *DestinationLangchainUpdatePinecone
+ DocArrayHnswSearch *DocArrayHnswSearch
+ ChromaLocalPersistance *ChromaLocalPersistance
- Type DestinationLangchainUpdateIndexingType
+ Type IndexingType
}
-func CreateDestinationLangchainUpdateIndexingDestinationLangchainUpdateIndexingPinecone(destinationLangchainUpdateIndexingPinecone DestinationLangchainUpdateIndexingPinecone) DestinationLangchainUpdateIndexing {
- typ := DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingPinecone
+func CreateIndexingDestinationLangchainUpdatePinecone(destinationLangchainUpdatePinecone DestinationLangchainUpdatePinecone) Indexing {
+ typ := IndexingTypeDestinationLangchainUpdatePinecone
- return DestinationLangchainUpdateIndexing{
- DestinationLangchainUpdateIndexingPinecone: &destinationLangchainUpdateIndexingPinecone,
- Type: typ,
+ return Indexing{
+ DestinationLangchainUpdatePinecone: &destinationLangchainUpdatePinecone,
+ Type: typ,
}
}
-func CreateDestinationLangchainUpdateIndexingDestinationLangchainUpdateIndexingDocArrayHnswSearch(destinationLangchainUpdateIndexingDocArrayHnswSearch DestinationLangchainUpdateIndexingDocArrayHnswSearch) DestinationLangchainUpdateIndexing {
- typ := DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingDocArrayHnswSearch
+func CreateIndexingDocArrayHnswSearch(docArrayHnswSearch DocArrayHnswSearch) Indexing {
+ typ := IndexingTypeDocArrayHnswSearch
- return DestinationLangchainUpdateIndexing{
- DestinationLangchainUpdateIndexingDocArrayHnswSearch: &destinationLangchainUpdateIndexingDocArrayHnswSearch,
- Type: typ,
+ return Indexing{
+ DocArrayHnswSearch: &docArrayHnswSearch,
+ Type: typ,
}
}
-func CreateDestinationLangchainUpdateIndexingDestinationLangchainUpdateIndexingChromaLocalPersistance(destinationLangchainUpdateIndexingChromaLocalPersistance DestinationLangchainUpdateIndexingChromaLocalPersistance) DestinationLangchainUpdateIndexing {
- typ := DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingChromaLocalPersistance
+func CreateIndexingChromaLocalPersistance(chromaLocalPersistance ChromaLocalPersistance) Indexing {
+ typ := IndexingTypeChromaLocalPersistance
- return DestinationLangchainUpdateIndexing{
- DestinationLangchainUpdateIndexingChromaLocalPersistance: &destinationLangchainUpdateIndexingChromaLocalPersistance,
- Type: typ,
+ return Indexing{
+ ChromaLocalPersistance: &chromaLocalPersistance,
+ Type: typ,
}
}
-func (u *DestinationLangchainUpdateIndexing) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *Indexing) UnmarshalJSON(data []byte) error {
- destinationLangchainUpdateIndexingDocArrayHnswSearch := new(DestinationLangchainUpdateIndexingDocArrayHnswSearch)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainUpdateIndexingDocArrayHnswSearch); err == nil {
- u.DestinationLangchainUpdateIndexingDocArrayHnswSearch = destinationLangchainUpdateIndexingDocArrayHnswSearch
- u.Type = DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingDocArrayHnswSearch
+ docArrayHnswSearch := new(DocArrayHnswSearch)
+ if err := utils.UnmarshalJSON(data, &docArrayHnswSearch, "", true, true); err == nil {
+ u.DocArrayHnswSearch = docArrayHnswSearch
+ u.Type = IndexingTypeDocArrayHnswSearch
return nil
}
- destinationLangchainUpdateIndexingChromaLocalPersistance := new(DestinationLangchainUpdateIndexingChromaLocalPersistance)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainUpdateIndexingChromaLocalPersistance); err == nil {
- u.DestinationLangchainUpdateIndexingChromaLocalPersistance = destinationLangchainUpdateIndexingChromaLocalPersistance
- u.Type = DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingChromaLocalPersistance
+ chromaLocalPersistance := new(ChromaLocalPersistance)
+ if err := utils.UnmarshalJSON(data, &chromaLocalPersistance, "", true, true); err == nil {
+ u.ChromaLocalPersistance = chromaLocalPersistance
+ u.Type = IndexingTypeChromaLocalPersistance
return nil
}
- destinationLangchainUpdateIndexingPinecone := new(DestinationLangchainUpdateIndexingPinecone)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationLangchainUpdateIndexingPinecone); err == nil {
- u.DestinationLangchainUpdateIndexingPinecone = destinationLangchainUpdateIndexingPinecone
- u.Type = DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingPinecone
+ destinationLangchainUpdatePinecone := new(DestinationLangchainUpdatePinecone)
+ if err := utils.UnmarshalJSON(data, &destinationLangchainUpdatePinecone, "", true, true); err == nil {
+ u.DestinationLangchainUpdatePinecone = destinationLangchainUpdatePinecone
+ u.Type = IndexingTypeDestinationLangchainUpdatePinecone
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationLangchainUpdateIndexing) MarshalJSON() ([]byte, error) {
- if u.DestinationLangchainUpdateIndexingDocArrayHnswSearch != nil {
- return json.Marshal(u.DestinationLangchainUpdateIndexingDocArrayHnswSearch)
+func (u Indexing) MarshalJSON() ([]byte, error) {
+ if u.DestinationLangchainUpdatePinecone != nil {
+ return utils.MarshalJSON(u.DestinationLangchainUpdatePinecone, "", true)
}
- if u.DestinationLangchainUpdateIndexingChromaLocalPersistance != nil {
- return json.Marshal(u.DestinationLangchainUpdateIndexingChromaLocalPersistance)
+ if u.DocArrayHnswSearch != nil {
+ return utils.MarshalJSON(u.DocArrayHnswSearch, "", true)
}
- if u.DestinationLangchainUpdateIndexingPinecone != nil {
- return json.Marshal(u.DestinationLangchainUpdateIndexingPinecone)
+ if u.ChromaLocalPersistance != nil {
+ return utils.MarshalJSON(u.ChromaLocalPersistance, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationLangchainUpdateProcessingConfigModel struct {
+type ProcessingConfigModel struct {
// Size of overlap between chunks in tokens to store in vector store to better capture relevant context
- ChunkOverlap *int64 `json:"chunk_overlap,omitempty"`
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
// Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
ChunkSize int64 `json:"chunk_size"`
// List of fields in the record that should be used to calculate the embedding. All other fields are passed along as meta fields. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
TextFields []string `json:"text_fields"`
}
+func (p ProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *ProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *ProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *ProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.TextFields
+}
+
type DestinationLangchainUpdate struct {
// Embedding configuration
- Embedding DestinationLangchainUpdateEmbedding `json:"embedding"`
+ Embedding Embedding `json:"embedding"`
// Indexing configuration
- Indexing DestinationLangchainUpdateIndexing `json:"indexing"`
- Processing DestinationLangchainUpdateProcessingConfigModel `json:"processing"`
+ Indexing Indexing `json:"indexing"`
+ Processing ProcessingConfigModel `json:"processing"`
+}
+
+func (o *DestinationLangchainUpdate) GetEmbedding() Embedding {
+ if o == nil {
+ return Embedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationLangchainUpdate) GetIndexing() Indexing {
+ if o == nil {
+ return Indexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationLangchainUpdate) GetProcessing() ProcessingConfigModel {
+ if o == nil {
+ return ProcessingConfigModel{}
+ }
+ return o.Processing
}
diff --git a/internal/sdk/pkg/models/shared/destinationmilvus.go b/internal/sdk/pkg/models/shared/destinationmilvus.go
old mode 100755
new mode 100644
index a664a9212..52f251b2f
--- a/internal/sdk/pkg/models/shared/destinationmilvus.go
+++ b/internal/sdk/pkg/models/shared/destinationmilvus.go
@@ -3,248 +3,513 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationMilvusMilvus string
+type Milvus string
const (
- DestinationMilvusMilvusMilvus DestinationMilvusMilvus = "milvus"
+ MilvusMilvus Milvus = "milvus"
)
-func (e DestinationMilvusMilvus) ToPointer() *DestinationMilvusMilvus {
+func (e Milvus) ToPointer() *Milvus {
return &e
}
-func (e *DestinationMilvusMilvus) UnmarshalJSON(data []byte) error {
+func (e *Milvus) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "milvus":
- *e = DestinationMilvusMilvus(v)
+ *e = Milvus(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusMilvus: %v", v)
+ return fmt.Errorf("invalid value for Milvus: %v", v)
}
}
-type DestinationMilvusEmbeddingFromFieldMode string
+type DestinationMilvusSchemasEmbeddingEmbedding6Mode string
const (
- DestinationMilvusEmbeddingFromFieldModeFromField DestinationMilvusEmbeddingFromFieldMode = "from_field"
+ DestinationMilvusSchemasEmbeddingEmbedding6ModeOpenaiCompatible DestinationMilvusSchemasEmbeddingEmbedding6Mode = "openai_compatible"
)
-func (e DestinationMilvusEmbeddingFromFieldMode) ToPointer() *DestinationMilvusEmbeddingFromFieldMode {
+func (e DestinationMilvusSchemasEmbeddingEmbedding6Mode) ToPointer() *DestinationMilvusSchemasEmbeddingEmbedding6Mode {
return &e
}
-func (e *DestinationMilvusEmbeddingFromFieldMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusSchemasEmbeddingEmbedding6Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai_compatible":
+ *e = DestinationMilvusSchemasEmbeddingEmbedding6Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasEmbeddingEmbedding6Mode: %v", v)
+ }
+}
+
+// DestinationMilvusOpenAICompatible - Use a service that's compatible with the OpenAI API to embed text.
+type DestinationMilvusOpenAICompatible struct {
+ APIKey *string `default:"" json:"api_key"`
+ // The base URL for your OpenAI-compatible service
+ BaseURL string `json:"base_url"`
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ mode *DestinationMilvusSchemasEmbeddingEmbedding6Mode `const:"openai_compatible" json:"mode"`
+ // The name of the model to use for embedding
+ ModelName *string `default:"text-embedding-ada-002" json:"model_name"`
+}
+
+func (d DestinationMilvusOpenAICompatible) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusOpenAICompatible) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusOpenAICompatible) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *DestinationMilvusOpenAICompatible) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *DestinationMilvusOpenAICompatible) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationMilvusOpenAICompatible) GetMode() *DestinationMilvusSchemasEmbeddingEmbedding6Mode {
+ return DestinationMilvusSchemasEmbeddingEmbedding6ModeOpenaiCompatible.ToPointer()
+}
+
+func (o *DestinationMilvusOpenAICompatible) GetModelName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ModelName
+}
+
+type DestinationMilvusSchemasEmbeddingEmbedding5Mode string
+
+const (
+ DestinationMilvusSchemasEmbeddingEmbedding5ModeAzureOpenai DestinationMilvusSchemasEmbeddingEmbedding5Mode = "azure_openai"
+)
+
+func (e DestinationMilvusSchemasEmbeddingEmbedding5Mode) ToPointer() *DestinationMilvusSchemasEmbeddingEmbedding5Mode {
+ return &e
+}
+
+func (e *DestinationMilvusSchemasEmbeddingEmbedding5Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "azure_openai":
+ *e = DestinationMilvusSchemasEmbeddingEmbedding5Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasEmbeddingEmbedding5Mode: %v", v)
+ }
+}
+
+// DestinationMilvusAzureOpenAI - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationMilvusAzureOpenAI struct {
+ // The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ APIBase string `json:"api_base"`
+ // The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ Deployment string `json:"deployment"`
+ mode *DestinationMilvusSchemasEmbeddingEmbedding5Mode `const:"azure_openai" json:"mode"`
+ // The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationMilvusAzureOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusAzureOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusAzureOpenAI) GetAPIBase() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIBase
+}
+
+func (o *DestinationMilvusAzureOpenAI) GetDeployment() string {
+ if o == nil {
+ return ""
+ }
+ return o.Deployment
+}
+
+func (o *DestinationMilvusAzureOpenAI) GetMode() *DestinationMilvusSchemasEmbeddingEmbedding5Mode {
+ return DestinationMilvusSchemasEmbeddingEmbedding5ModeAzureOpenai.ToPointer()
+}
+
+func (o *DestinationMilvusAzureOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationMilvusSchemasEmbeddingEmbeddingMode string
+
+const (
+ DestinationMilvusSchemasEmbeddingEmbeddingModeFromField DestinationMilvusSchemasEmbeddingEmbeddingMode = "from_field"
+)
+
+func (e DestinationMilvusSchemasEmbeddingEmbeddingMode) ToPointer() *DestinationMilvusSchemasEmbeddingEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationMilvusSchemasEmbeddingEmbeddingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "from_field":
- *e = DestinationMilvusEmbeddingFromFieldMode(v)
+ *e = DestinationMilvusSchemasEmbeddingEmbeddingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusEmbeddingFromFieldMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasEmbeddingEmbeddingMode: %v", v)
}
}
-// DestinationMilvusEmbeddingFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.
-type DestinationMilvusEmbeddingFromField struct {
+// DestinationMilvusFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.
+type DestinationMilvusFromField struct {
// The number of dimensions the embedding model is generating
Dimensions int64 `json:"dimensions"`
// Name of the field in the record that contains the embedding
- FieldName string `json:"field_name"`
- Mode *DestinationMilvusEmbeddingFromFieldMode `json:"mode,omitempty"`
+ FieldName string `json:"field_name"`
+ mode *DestinationMilvusSchemasEmbeddingEmbeddingMode `const:"from_field" json:"mode"`
}
-type DestinationMilvusEmbeddingFakeMode string
+func (d DestinationMilvusFromField) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusFromField) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusFromField) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationMilvusFromField) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *DestinationMilvusFromField) GetMode() *DestinationMilvusSchemasEmbeddingEmbeddingMode {
+ return DestinationMilvusSchemasEmbeddingEmbeddingModeFromField.ToPointer()
+}
+
+type DestinationMilvusSchemasEmbeddingMode string
const (
- DestinationMilvusEmbeddingFakeModeFake DestinationMilvusEmbeddingFakeMode = "fake"
+ DestinationMilvusSchemasEmbeddingModeFake DestinationMilvusSchemasEmbeddingMode = "fake"
)
-func (e DestinationMilvusEmbeddingFakeMode) ToPointer() *DestinationMilvusEmbeddingFakeMode {
+func (e DestinationMilvusSchemasEmbeddingMode) ToPointer() *DestinationMilvusSchemasEmbeddingMode {
return &e
}
-func (e *DestinationMilvusEmbeddingFakeMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusSchemasEmbeddingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "fake":
- *e = DestinationMilvusEmbeddingFakeMode(v)
+ *e = DestinationMilvusSchemasEmbeddingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusEmbeddingFakeMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationMilvusFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type DestinationMilvusFake struct {
+ mode *DestinationMilvusSchemasEmbeddingMode `const:"fake" json:"mode"`
+}
+
+func (d DestinationMilvusFake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusFake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationMilvusEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
-type DestinationMilvusEmbeddingFake struct {
- Mode *DestinationMilvusEmbeddingFakeMode `json:"mode,omitempty"`
+func (o *DestinationMilvusFake) GetMode() *DestinationMilvusSchemasEmbeddingMode {
+ return DestinationMilvusSchemasEmbeddingModeFake.ToPointer()
}
-type DestinationMilvusEmbeddingCohereMode string
+type DestinationMilvusSchemasMode string
const (
- DestinationMilvusEmbeddingCohereModeCohere DestinationMilvusEmbeddingCohereMode = "cohere"
+ DestinationMilvusSchemasModeCohere DestinationMilvusSchemasMode = "cohere"
)
-func (e DestinationMilvusEmbeddingCohereMode) ToPointer() *DestinationMilvusEmbeddingCohereMode {
+func (e DestinationMilvusSchemasMode) ToPointer() *DestinationMilvusSchemasMode {
return &e
}
-func (e *DestinationMilvusEmbeddingCohereMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "cohere":
- *e = DestinationMilvusEmbeddingCohereMode(v)
+ *e = DestinationMilvusSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusEmbeddingCohereMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasMode: %v", v)
}
}
-// DestinationMilvusEmbeddingCohere - Use the Cohere API to embed text.
-type DestinationMilvusEmbeddingCohere struct {
- CohereKey string `json:"cohere_key"`
- Mode *DestinationMilvusEmbeddingCohereMode `json:"mode,omitempty"`
+// DestinationMilvusCohere - Use the Cohere API to embed text.
+type DestinationMilvusCohere struct {
+ CohereKey string `json:"cohere_key"`
+ mode *DestinationMilvusSchemasMode `const:"cohere" json:"mode"`
}
-type DestinationMilvusEmbeddingOpenAIMode string
+func (d DestinationMilvusCohere) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusCohere) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusCohere) GetCohereKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.CohereKey
+}
+
+func (o *DestinationMilvusCohere) GetMode() *DestinationMilvusSchemasMode {
+ return DestinationMilvusSchemasModeCohere.ToPointer()
+}
+
+type DestinationMilvusMode string
const (
- DestinationMilvusEmbeddingOpenAIModeOpenai DestinationMilvusEmbeddingOpenAIMode = "openai"
+ DestinationMilvusModeOpenai DestinationMilvusMode = "openai"
)
-func (e DestinationMilvusEmbeddingOpenAIMode) ToPointer() *DestinationMilvusEmbeddingOpenAIMode {
+func (e DestinationMilvusMode) ToPointer() *DestinationMilvusMode {
return &e
}
-func (e *DestinationMilvusEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "openai":
- *e = DestinationMilvusEmbeddingOpenAIMode(v)
+ *e = DestinationMilvusMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusEmbeddingOpenAIMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusMode: %v", v)
+ }
+}
+
+// DestinationMilvusOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationMilvusOpenAI struct {
+ mode *DestinationMilvusMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationMilvusOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationMilvusEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
-type DestinationMilvusEmbeddingOpenAI struct {
- Mode *DestinationMilvusEmbeddingOpenAIMode `json:"mode,omitempty"`
- OpenaiKey string `json:"openai_key"`
+func (o *DestinationMilvusOpenAI) GetMode() *DestinationMilvusMode {
+ return DestinationMilvusModeOpenai.ToPointer()
+}
+
+func (o *DestinationMilvusOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
}
type DestinationMilvusEmbeddingType string
const (
- DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingOpenAI DestinationMilvusEmbeddingType = "destination-milvus_Embedding_OpenAI"
- DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingCohere DestinationMilvusEmbeddingType = "destination-milvus_Embedding_Cohere"
- DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFake DestinationMilvusEmbeddingType = "destination-milvus_Embedding_Fake"
- DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFromField DestinationMilvusEmbeddingType = "destination-milvus_Embedding_From Field"
+ DestinationMilvusEmbeddingTypeDestinationMilvusOpenAI DestinationMilvusEmbeddingType = "destination-milvus_OpenAI"
+ DestinationMilvusEmbeddingTypeDestinationMilvusCohere DestinationMilvusEmbeddingType = "destination-milvus_Cohere"
+ DestinationMilvusEmbeddingTypeDestinationMilvusFake DestinationMilvusEmbeddingType = "destination-milvus_Fake"
+ DestinationMilvusEmbeddingTypeDestinationMilvusFromField DestinationMilvusEmbeddingType = "destination-milvus_From Field"
+ DestinationMilvusEmbeddingTypeDestinationMilvusAzureOpenAI DestinationMilvusEmbeddingType = "destination-milvus_Azure OpenAI"
+ DestinationMilvusEmbeddingTypeDestinationMilvusOpenAICompatible DestinationMilvusEmbeddingType = "destination-milvus_OpenAI-compatible"
)
type DestinationMilvusEmbedding struct {
- DestinationMilvusEmbeddingOpenAI *DestinationMilvusEmbeddingOpenAI
- DestinationMilvusEmbeddingCohere *DestinationMilvusEmbeddingCohere
- DestinationMilvusEmbeddingFake *DestinationMilvusEmbeddingFake
- DestinationMilvusEmbeddingFromField *DestinationMilvusEmbeddingFromField
+ DestinationMilvusOpenAI *DestinationMilvusOpenAI
+ DestinationMilvusCohere *DestinationMilvusCohere
+ DestinationMilvusFake *DestinationMilvusFake
+ DestinationMilvusFromField *DestinationMilvusFromField
+ DestinationMilvusAzureOpenAI *DestinationMilvusAzureOpenAI
+ DestinationMilvusOpenAICompatible *DestinationMilvusOpenAICompatible
Type DestinationMilvusEmbeddingType
}
-func CreateDestinationMilvusEmbeddingDestinationMilvusEmbeddingOpenAI(destinationMilvusEmbeddingOpenAI DestinationMilvusEmbeddingOpenAI) DestinationMilvusEmbedding {
- typ := DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingOpenAI
+func CreateDestinationMilvusEmbeddingDestinationMilvusOpenAI(destinationMilvusOpenAI DestinationMilvusOpenAI) DestinationMilvusEmbedding {
+ typ := DestinationMilvusEmbeddingTypeDestinationMilvusOpenAI
return DestinationMilvusEmbedding{
- DestinationMilvusEmbeddingOpenAI: &destinationMilvusEmbeddingOpenAI,
- Type: typ,
+ DestinationMilvusOpenAI: &destinationMilvusOpenAI,
+ Type: typ,
}
}
-func CreateDestinationMilvusEmbeddingDestinationMilvusEmbeddingCohere(destinationMilvusEmbeddingCohere DestinationMilvusEmbeddingCohere) DestinationMilvusEmbedding {
- typ := DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingCohere
+func CreateDestinationMilvusEmbeddingDestinationMilvusCohere(destinationMilvusCohere DestinationMilvusCohere) DestinationMilvusEmbedding {
+ typ := DestinationMilvusEmbeddingTypeDestinationMilvusCohere
return DestinationMilvusEmbedding{
- DestinationMilvusEmbeddingCohere: &destinationMilvusEmbeddingCohere,
- Type: typ,
+ DestinationMilvusCohere: &destinationMilvusCohere,
+ Type: typ,
}
}
-func CreateDestinationMilvusEmbeddingDestinationMilvusEmbeddingFake(destinationMilvusEmbeddingFake DestinationMilvusEmbeddingFake) DestinationMilvusEmbedding {
- typ := DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFake
+func CreateDestinationMilvusEmbeddingDestinationMilvusFake(destinationMilvusFake DestinationMilvusFake) DestinationMilvusEmbedding {
+ typ := DestinationMilvusEmbeddingTypeDestinationMilvusFake
return DestinationMilvusEmbedding{
- DestinationMilvusEmbeddingFake: &destinationMilvusEmbeddingFake,
- Type: typ,
+ DestinationMilvusFake: &destinationMilvusFake,
+ Type: typ,
}
}
-func CreateDestinationMilvusEmbeddingDestinationMilvusEmbeddingFromField(destinationMilvusEmbeddingFromField DestinationMilvusEmbeddingFromField) DestinationMilvusEmbedding {
- typ := DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFromField
+func CreateDestinationMilvusEmbeddingDestinationMilvusFromField(destinationMilvusFromField DestinationMilvusFromField) DestinationMilvusEmbedding {
+ typ := DestinationMilvusEmbeddingTypeDestinationMilvusFromField
return DestinationMilvusEmbedding{
- DestinationMilvusEmbeddingFromField: &destinationMilvusEmbeddingFromField,
- Type: typ,
+ DestinationMilvusFromField: &destinationMilvusFromField,
+ Type: typ,
+ }
+}
+
+func CreateDestinationMilvusEmbeddingDestinationMilvusAzureOpenAI(destinationMilvusAzureOpenAI DestinationMilvusAzureOpenAI) DestinationMilvusEmbedding {
+ typ := DestinationMilvusEmbeddingTypeDestinationMilvusAzureOpenAI
+
+ return DestinationMilvusEmbedding{
+ DestinationMilvusAzureOpenAI: &destinationMilvusAzureOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationMilvusEmbeddingDestinationMilvusOpenAICompatible(destinationMilvusOpenAICompatible DestinationMilvusOpenAICompatible) DestinationMilvusEmbedding {
+ typ := DestinationMilvusEmbeddingTypeDestinationMilvusOpenAICompatible
+
+ return DestinationMilvusEmbedding{
+ DestinationMilvusOpenAICompatible: &destinationMilvusOpenAICompatible,
+ Type: typ,
}
}
func (u *DestinationMilvusEmbedding) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- destinationMilvusEmbeddingFake := new(DestinationMilvusEmbeddingFake)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusEmbeddingFake); err == nil {
- u.DestinationMilvusEmbeddingFake = destinationMilvusEmbeddingFake
- u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFake
+ destinationMilvusFake := new(DestinationMilvusFake)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusFake, "", true, true); err == nil {
+ u.DestinationMilvusFake = destinationMilvusFake
+ u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusFake
return nil
}
- destinationMilvusEmbeddingOpenAI := new(DestinationMilvusEmbeddingOpenAI)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusEmbeddingOpenAI); err == nil {
- u.DestinationMilvusEmbeddingOpenAI = destinationMilvusEmbeddingOpenAI
- u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingOpenAI
+ destinationMilvusOpenAI := new(DestinationMilvusOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusOpenAI, "", true, true); err == nil {
+ u.DestinationMilvusOpenAI = destinationMilvusOpenAI
+ u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusOpenAI
return nil
}
- destinationMilvusEmbeddingCohere := new(DestinationMilvusEmbeddingCohere)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusEmbeddingCohere); err == nil {
- u.DestinationMilvusEmbeddingCohere = destinationMilvusEmbeddingCohere
- u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingCohere
+ destinationMilvusCohere := new(DestinationMilvusCohere)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusCohere, "", true, true); err == nil {
+ u.DestinationMilvusCohere = destinationMilvusCohere
+ u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusCohere
return nil
}
- destinationMilvusEmbeddingFromField := new(DestinationMilvusEmbeddingFromField)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusEmbeddingFromField); err == nil {
- u.DestinationMilvusEmbeddingFromField = destinationMilvusEmbeddingFromField
- u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFromField
+ destinationMilvusFromField := new(DestinationMilvusFromField)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusFromField, "", true, true); err == nil {
+ u.DestinationMilvusFromField = destinationMilvusFromField
+ u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusFromField
+ return nil
+ }
+
+ destinationMilvusAzureOpenAI := new(DestinationMilvusAzureOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusAzureOpenAI, "", true, true); err == nil {
+ u.DestinationMilvusAzureOpenAI = destinationMilvusAzureOpenAI
+ u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusAzureOpenAI
+ return nil
+ }
+
+ destinationMilvusOpenAICompatible := new(DestinationMilvusOpenAICompatible)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusOpenAICompatible, "", true, true); err == nil {
+ u.DestinationMilvusOpenAICompatible = destinationMilvusOpenAICompatible
+ u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusOpenAICompatible
return nil
}
@@ -252,242 +517,799 @@ func (u *DestinationMilvusEmbedding) UnmarshalJSON(data []byte) error {
}
func (u DestinationMilvusEmbedding) MarshalJSON() ([]byte, error) {
- if u.DestinationMilvusEmbeddingFake != nil {
- return json.Marshal(u.DestinationMilvusEmbeddingFake)
+ if u.DestinationMilvusOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationMilvusOpenAI, "", true)
+ }
+
+ if u.DestinationMilvusCohere != nil {
+ return utils.MarshalJSON(u.DestinationMilvusCohere, "", true)
+ }
+
+ if u.DestinationMilvusFake != nil {
+ return utils.MarshalJSON(u.DestinationMilvusFake, "", true)
}
- if u.DestinationMilvusEmbeddingOpenAI != nil {
- return json.Marshal(u.DestinationMilvusEmbeddingOpenAI)
+ if u.DestinationMilvusFromField != nil {
+ return utils.MarshalJSON(u.DestinationMilvusFromField, "", true)
}
- if u.DestinationMilvusEmbeddingCohere != nil {
- return json.Marshal(u.DestinationMilvusEmbeddingCohere)
+ if u.DestinationMilvusAzureOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationMilvusAzureOpenAI, "", true)
}
- if u.DestinationMilvusEmbeddingFromField != nil {
- return json.Marshal(u.DestinationMilvusEmbeddingFromField)
+ if u.DestinationMilvusOpenAICompatible != nil {
+ return utils.MarshalJSON(u.DestinationMilvusOpenAICompatible, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationMilvusIndexingAuthenticationNoAuthMode string
+type DestinationMilvusSchemasIndexingAuthAuthenticationMode string
const (
- DestinationMilvusIndexingAuthenticationNoAuthModeNoAuth DestinationMilvusIndexingAuthenticationNoAuthMode = "no_auth"
+ DestinationMilvusSchemasIndexingAuthAuthenticationModeNoAuth DestinationMilvusSchemasIndexingAuthAuthenticationMode = "no_auth"
)
-func (e DestinationMilvusIndexingAuthenticationNoAuthMode) ToPointer() *DestinationMilvusIndexingAuthenticationNoAuthMode {
+func (e DestinationMilvusSchemasIndexingAuthAuthenticationMode) ToPointer() *DestinationMilvusSchemasIndexingAuthAuthenticationMode {
return &e
}
-func (e *DestinationMilvusIndexingAuthenticationNoAuthMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusSchemasIndexingAuthAuthenticationMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "no_auth":
- *e = DestinationMilvusIndexingAuthenticationNoAuthMode(v)
+ *e = DestinationMilvusSchemasIndexingAuthAuthenticationMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusIndexingAuthenticationNoAuthMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasIndexingAuthAuthenticationMode: %v", v)
}
}
-// DestinationMilvusIndexingAuthenticationNoAuth - Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)
-type DestinationMilvusIndexingAuthenticationNoAuth struct {
- Mode *DestinationMilvusIndexingAuthenticationNoAuthMode `json:"mode,omitempty"`
+// DestinationMilvusNoAuth - Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)
+type DestinationMilvusNoAuth struct {
+ mode *DestinationMilvusSchemasIndexingAuthAuthenticationMode `const:"no_auth" json:"mode"`
}
-type DestinationMilvusIndexingAuthenticationUsernamePasswordMode string
+func (d DestinationMilvusNoAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusNoAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusNoAuth) GetMode() *DestinationMilvusSchemasIndexingAuthAuthenticationMode {
+ return DestinationMilvusSchemasIndexingAuthAuthenticationModeNoAuth.ToPointer()
+}
+
+type DestinationMilvusSchemasIndexingAuthMode string
const (
- DestinationMilvusIndexingAuthenticationUsernamePasswordModeUsernamePassword DestinationMilvusIndexingAuthenticationUsernamePasswordMode = "username_password"
+ DestinationMilvusSchemasIndexingAuthModeUsernamePassword DestinationMilvusSchemasIndexingAuthMode = "username_password"
)
-func (e DestinationMilvusIndexingAuthenticationUsernamePasswordMode) ToPointer() *DestinationMilvusIndexingAuthenticationUsernamePasswordMode {
+func (e DestinationMilvusSchemasIndexingAuthMode) ToPointer() *DestinationMilvusSchemasIndexingAuthMode {
return &e
}
-func (e *DestinationMilvusIndexingAuthenticationUsernamePasswordMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusSchemasIndexingAuthMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "username_password":
- *e = DestinationMilvusIndexingAuthenticationUsernamePasswordMode(v)
+ *e = DestinationMilvusSchemasIndexingAuthMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusIndexingAuthenticationUsernamePasswordMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasIndexingAuthMode: %v", v)
}
}
-// DestinationMilvusIndexingAuthenticationUsernamePassword - Authenticate using username and password (suitable for self-managed Milvus clusters)
-type DestinationMilvusIndexingAuthenticationUsernamePassword struct {
- Mode *DestinationMilvusIndexingAuthenticationUsernamePasswordMode `json:"mode,omitempty"`
+// DestinationMilvusUsernamePassword - Authenticate using username and password (suitable for self-managed Milvus clusters)
+type DestinationMilvusUsernamePassword struct {
+ mode *DestinationMilvusSchemasIndexingAuthMode `const:"username_password" json:"mode"`
// Password for the Milvus instance
Password string `json:"password"`
// Username for the Milvus instance
Username string `json:"username"`
}
-type DestinationMilvusIndexingAuthenticationAPITokenMode string
+func (d DestinationMilvusUsernamePassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusUsernamePassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusUsernamePassword) GetMode() *DestinationMilvusSchemasIndexingAuthMode {
+ return DestinationMilvusSchemasIndexingAuthModeUsernamePassword.ToPointer()
+}
+
+func (o *DestinationMilvusUsernamePassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationMilvusUsernamePassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type DestinationMilvusSchemasIndexingMode string
const (
- DestinationMilvusIndexingAuthenticationAPITokenModeToken DestinationMilvusIndexingAuthenticationAPITokenMode = "token"
+ DestinationMilvusSchemasIndexingModeToken DestinationMilvusSchemasIndexingMode = "token"
)
-func (e DestinationMilvusIndexingAuthenticationAPITokenMode) ToPointer() *DestinationMilvusIndexingAuthenticationAPITokenMode {
+func (e DestinationMilvusSchemasIndexingMode) ToPointer() *DestinationMilvusSchemasIndexingMode {
return &e
}
-func (e *DestinationMilvusIndexingAuthenticationAPITokenMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusSchemasIndexingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "token":
- *e = DestinationMilvusIndexingAuthenticationAPITokenMode(v)
+ *e = DestinationMilvusSchemasIndexingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusIndexingAuthenticationAPITokenMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasIndexingMode: %v", v)
}
}
-// DestinationMilvusIndexingAuthenticationAPIToken - Authenticate using an API token (suitable for Zilliz Cloud)
-type DestinationMilvusIndexingAuthenticationAPIToken struct {
- Mode *DestinationMilvusIndexingAuthenticationAPITokenMode `json:"mode,omitempty"`
+// DestinationMilvusAPIToken - Authenticate using an API token (suitable for Zilliz Cloud)
+type DestinationMilvusAPIToken struct {
+ mode *DestinationMilvusSchemasIndexingMode `const:"token" json:"mode"`
// API Token for the Milvus instance
Token string `json:"token"`
}
-type DestinationMilvusIndexingAuthenticationType string
+func (d DestinationMilvusAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusAPIToken) GetMode() *DestinationMilvusSchemasIndexingMode {
+ return DestinationMilvusSchemasIndexingModeToken.ToPointer()
+}
+
+func (o *DestinationMilvusAPIToken) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
+
+type DestinationMilvusAuthenticationType string
const (
- DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationAPIToken DestinationMilvusIndexingAuthenticationType = "destination-milvus_Indexing_Authentication_API Token"
- DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationUsernamePassword DestinationMilvusIndexingAuthenticationType = "destination-milvus_Indexing_Authentication_Username/Password"
- DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationNoAuth DestinationMilvusIndexingAuthenticationType = "destination-milvus_Indexing_Authentication_No auth"
+ DestinationMilvusAuthenticationTypeDestinationMilvusAPIToken DestinationMilvusAuthenticationType = "destination-milvus_API Token"
+ DestinationMilvusAuthenticationTypeDestinationMilvusUsernamePassword DestinationMilvusAuthenticationType = "destination-milvus_Username/Password"
+ DestinationMilvusAuthenticationTypeDestinationMilvusNoAuth DestinationMilvusAuthenticationType = "destination-milvus_No auth"
)
-type DestinationMilvusIndexingAuthentication struct {
- DestinationMilvusIndexingAuthenticationAPIToken *DestinationMilvusIndexingAuthenticationAPIToken
- DestinationMilvusIndexingAuthenticationUsernamePassword *DestinationMilvusIndexingAuthenticationUsernamePassword
- DestinationMilvusIndexingAuthenticationNoAuth *DestinationMilvusIndexingAuthenticationNoAuth
+type DestinationMilvusAuthentication struct {
+ DestinationMilvusAPIToken *DestinationMilvusAPIToken
+ DestinationMilvusUsernamePassword *DestinationMilvusUsernamePassword
+ DestinationMilvusNoAuth *DestinationMilvusNoAuth
- Type DestinationMilvusIndexingAuthenticationType
+ Type DestinationMilvusAuthenticationType
}
-func CreateDestinationMilvusIndexingAuthenticationDestinationMilvusIndexingAuthenticationAPIToken(destinationMilvusIndexingAuthenticationAPIToken DestinationMilvusIndexingAuthenticationAPIToken) DestinationMilvusIndexingAuthentication {
- typ := DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationAPIToken
+func CreateDestinationMilvusAuthenticationDestinationMilvusAPIToken(destinationMilvusAPIToken DestinationMilvusAPIToken) DestinationMilvusAuthentication {
+ typ := DestinationMilvusAuthenticationTypeDestinationMilvusAPIToken
- return DestinationMilvusIndexingAuthentication{
- DestinationMilvusIndexingAuthenticationAPIToken: &destinationMilvusIndexingAuthenticationAPIToken,
- Type: typ,
+ return DestinationMilvusAuthentication{
+ DestinationMilvusAPIToken: &destinationMilvusAPIToken,
+ Type: typ,
}
}
-func CreateDestinationMilvusIndexingAuthenticationDestinationMilvusIndexingAuthenticationUsernamePassword(destinationMilvusIndexingAuthenticationUsernamePassword DestinationMilvusIndexingAuthenticationUsernamePassword) DestinationMilvusIndexingAuthentication {
- typ := DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationUsernamePassword
+func CreateDestinationMilvusAuthenticationDestinationMilvusUsernamePassword(destinationMilvusUsernamePassword DestinationMilvusUsernamePassword) DestinationMilvusAuthentication {
+ typ := DestinationMilvusAuthenticationTypeDestinationMilvusUsernamePassword
- return DestinationMilvusIndexingAuthentication{
- DestinationMilvusIndexingAuthenticationUsernamePassword: &destinationMilvusIndexingAuthenticationUsernamePassword,
- Type: typ,
+ return DestinationMilvusAuthentication{
+ DestinationMilvusUsernamePassword: &destinationMilvusUsernamePassword,
+ Type: typ,
}
}
-func CreateDestinationMilvusIndexingAuthenticationDestinationMilvusIndexingAuthenticationNoAuth(destinationMilvusIndexingAuthenticationNoAuth DestinationMilvusIndexingAuthenticationNoAuth) DestinationMilvusIndexingAuthentication {
- typ := DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationNoAuth
+func CreateDestinationMilvusAuthenticationDestinationMilvusNoAuth(destinationMilvusNoAuth DestinationMilvusNoAuth) DestinationMilvusAuthentication {
+ typ := DestinationMilvusAuthenticationTypeDestinationMilvusNoAuth
- return DestinationMilvusIndexingAuthentication{
- DestinationMilvusIndexingAuthenticationNoAuth: &destinationMilvusIndexingAuthenticationNoAuth,
- Type: typ,
+ return DestinationMilvusAuthentication{
+ DestinationMilvusNoAuth: &destinationMilvusNoAuth,
+ Type: typ,
}
}
-func (u *DestinationMilvusIndexingAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationMilvusAuthentication) UnmarshalJSON(data []byte) error {
- destinationMilvusIndexingAuthenticationNoAuth := new(DestinationMilvusIndexingAuthenticationNoAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusIndexingAuthenticationNoAuth); err == nil {
- u.DestinationMilvusIndexingAuthenticationNoAuth = destinationMilvusIndexingAuthenticationNoAuth
- u.Type = DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationNoAuth
+ destinationMilvusNoAuth := new(DestinationMilvusNoAuth)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusNoAuth, "", true, true); err == nil {
+ u.DestinationMilvusNoAuth = destinationMilvusNoAuth
+ u.Type = DestinationMilvusAuthenticationTypeDestinationMilvusNoAuth
return nil
}
- destinationMilvusIndexingAuthenticationAPIToken := new(DestinationMilvusIndexingAuthenticationAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusIndexingAuthenticationAPIToken); err == nil {
- u.DestinationMilvusIndexingAuthenticationAPIToken = destinationMilvusIndexingAuthenticationAPIToken
- u.Type = DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationAPIToken
+ destinationMilvusAPIToken := new(DestinationMilvusAPIToken)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusAPIToken, "", true, true); err == nil {
+ u.DestinationMilvusAPIToken = destinationMilvusAPIToken
+ u.Type = DestinationMilvusAuthenticationTypeDestinationMilvusAPIToken
return nil
}
- destinationMilvusIndexingAuthenticationUsernamePassword := new(DestinationMilvusIndexingAuthenticationUsernamePassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusIndexingAuthenticationUsernamePassword); err == nil {
- u.DestinationMilvusIndexingAuthenticationUsernamePassword = destinationMilvusIndexingAuthenticationUsernamePassword
- u.Type = DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationUsernamePassword
+ destinationMilvusUsernamePassword := new(DestinationMilvusUsernamePassword)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusUsernamePassword, "", true, true); err == nil {
+ u.DestinationMilvusUsernamePassword = destinationMilvusUsernamePassword
+ u.Type = DestinationMilvusAuthenticationTypeDestinationMilvusUsernamePassword
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationMilvusIndexingAuthentication) MarshalJSON() ([]byte, error) {
- if u.DestinationMilvusIndexingAuthenticationNoAuth != nil {
- return json.Marshal(u.DestinationMilvusIndexingAuthenticationNoAuth)
+func (u DestinationMilvusAuthentication) MarshalJSON() ([]byte, error) {
+ if u.DestinationMilvusAPIToken != nil {
+ return utils.MarshalJSON(u.DestinationMilvusAPIToken, "", true)
}
- if u.DestinationMilvusIndexingAuthenticationAPIToken != nil {
- return json.Marshal(u.DestinationMilvusIndexingAuthenticationAPIToken)
+ if u.DestinationMilvusUsernamePassword != nil {
+ return utils.MarshalJSON(u.DestinationMilvusUsernamePassword, "", true)
}
- if u.DestinationMilvusIndexingAuthenticationUsernamePassword != nil {
- return json.Marshal(u.DestinationMilvusIndexingAuthenticationUsernamePassword)
+ if u.DestinationMilvusNoAuth != nil {
+ return utils.MarshalJSON(u.DestinationMilvusNoAuth, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationMilvusIndexing - Indexing configuration
type DestinationMilvusIndexing struct {
// Authentication method
- Auth DestinationMilvusIndexingAuthentication `json:"auth"`
+ Auth DestinationMilvusAuthentication `json:"auth"`
// The collection to load data into
Collection string `json:"collection"`
// The database to connect to
- Db *string `json:"db,omitempty"`
+ Db *string `default:"" json:"db"`
// The public endpoint of the Milvus instance.
Host string `json:"host"`
// The field in the entity that contains the embedded text
- TextField *string `json:"text_field,omitempty"`
+ TextField *string `default:"text" json:"text_field"`
// The field in the entity that contains the vector
- VectorField *string `json:"vector_field,omitempty"`
+ VectorField *string `default:"vector" json:"vector_field"`
+}
+
+func (d DestinationMilvusIndexing) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusIndexing) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusIndexing) GetAuth() DestinationMilvusAuthentication {
+ if o == nil {
+ return DestinationMilvusAuthentication{}
+ }
+ return o.Auth
+}
+
+func (o *DestinationMilvusIndexing) GetCollection() string {
+ if o == nil {
+ return ""
+ }
+ return o.Collection
+}
+
+func (o *DestinationMilvusIndexing) GetDb() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Db
+}
+
+func (o *DestinationMilvusIndexing) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationMilvusIndexing) GetTextField() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TextField
+}
+
+func (o *DestinationMilvusIndexing) GetVectorField() *string {
+ if o == nil {
+ return nil
+ }
+ return o.VectorField
+}
+
+type DestinationMilvusFieldNameMappingConfigModel struct {
+ // The field name in the source
+ FromField string `json:"from_field"`
+ // The field name to use in the destination
+ ToField string `json:"to_field"`
+}
+
+func (o *DestinationMilvusFieldNameMappingConfigModel) GetFromField() string {
+ if o == nil {
+ return ""
+ }
+ return o.FromField
+}
+
+func (o *DestinationMilvusFieldNameMappingConfigModel) GetToField() string {
+ if o == nil {
+ return ""
+ }
+ return o.ToField
+}
+
+// DestinationMilvusLanguage - Split code in suitable places based on the programming language
+type DestinationMilvusLanguage string
+
+const (
+ DestinationMilvusLanguageCpp DestinationMilvusLanguage = "cpp"
+ DestinationMilvusLanguageGo DestinationMilvusLanguage = "go"
+ DestinationMilvusLanguageJava DestinationMilvusLanguage = "java"
+ DestinationMilvusLanguageJs DestinationMilvusLanguage = "js"
+ DestinationMilvusLanguagePhp DestinationMilvusLanguage = "php"
+ DestinationMilvusLanguageProto DestinationMilvusLanguage = "proto"
+ DestinationMilvusLanguagePython DestinationMilvusLanguage = "python"
+ DestinationMilvusLanguageRst DestinationMilvusLanguage = "rst"
+ DestinationMilvusLanguageRuby DestinationMilvusLanguage = "ruby"
+ DestinationMilvusLanguageRust DestinationMilvusLanguage = "rust"
+ DestinationMilvusLanguageScala DestinationMilvusLanguage = "scala"
+ DestinationMilvusLanguageSwift DestinationMilvusLanguage = "swift"
+ DestinationMilvusLanguageMarkdown DestinationMilvusLanguage = "markdown"
+ DestinationMilvusLanguageLatex DestinationMilvusLanguage = "latex"
+ DestinationMilvusLanguageHTML DestinationMilvusLanguage = "html"
+ DestinationMilvusLanguageSol DestinationMilvusLanguage = "sol"
+)
+
+func (e DestinationMilvusLanguage) ToPointer() *DestinationMilvusLanguage {
+ return &e
+}
+
+func (e *DestinationMilvusLanguage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cpp":
+ fallthrough
+ case "go":
+ fallthrough
+ case "java":
+ fallthrough
+ case "js":
+ fallthrough
+ case "php":
+ fallthrough
+ case "proto":
+ fallthrough
+ case "python":
+ fallthrough
+ case "rst":
+ fallthrough
+ case "ruby":
+ fallthrough
+ case "rust":
+ fallthrough
+ case "scala":
+ fallthrough
+ case "swift":
+ fallthrough
+ case "markdown":
+ fallthrough
+ case "latex":
+ fallthrough
+ case "html":
+ fallthrough
+ case "sol":
+ *e = DestinationMilvusLanguage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusLanguage: %v", v)
+ }
+}
+
+type DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode string
+
+const (
+ DestinationMilvusSchemasProcessingTextSplitterTextSplitterModeCode DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode = "code"
+)
+
+func (e DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode) ToPointer() *DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "code":
+ *e = DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationMilvusByProgrammingLanguage - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
+type DestinationMilvusByProgrammingLanguage struct {
+ // Split code in suitable places based on the programming language
+ Language DestinationMilvusLanguage `json:"language"`
+ mode *DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode `const:"code" json:"mode"`
+}
+
+func (d DestinationMilvusByProgrammingLanguage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusByProgrammingLanguage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusByProgrammingLanguage) GetLanguage() DestinationMilvusLanguage {
+ if o == nil {
+ return DestinationMilvusLanguage("")
+ }
+ return o.Language
+}
+
+func (o *DestinationMilvusByProgrammingLanguage) GetMode() *DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode {
+ return DestinationMilvusSchemasProcessingTextSplitterTextSplitterModeCode.ToPointer()
+}
+
+type DestinationMilvusSchemasProcessingTextSplitterMode string
+
+const (
+ DestinationMilvusSchemasProcessingTextSplitterModeMarkdown DestinationMilvusSchemasProcessingTextSplitterMode = "markdown"
+)
+
+func (e DestinationMilvusSchemasProcessingTextSplitterMode) ToPointer() *DestinationMilvusSchemasProcessingTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationMilvusSchemasProcessingTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "markdown":
+ *e = DestinationMilvusSchemasProcessingTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasProcessingTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationMilvusByMarkdownHeader - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
+type DestinationMilvusByMarkdownHeader struct {
+ mode *DestinationMilvusSchemasProcessingTextSplitterMode `const:"markdown" json:"mode"`
+ // Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+ SplitLevel *int64 `default:"1" json:"split_level"`
+}
+
+func (d DestinationMilvusByMarkdownHeader) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusByMarkdownHeader) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusByMarkdownHeader) GetMode() *DestinationMilvusSchemasProcessingTextSplitterMode {
+ return DestinationMilvusSchemasProcessingTextSplitterModeMarkdown.ToPointer()
+}
+
+func (o *DestinationMilvusByMarkdownHeader) GetSplitLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SplitLevel
+}
+
+type DestinationMilvusSchemasProcessingMode string
+
+const (
+ DestinationMilvusSchemasProcessingModeSeparator DestinationMilvusSchemasProcessingMode = "separator"
+)
+
+func (e DestinationMilvusSchemasProcessingMode) ToPointer() *DestinationMilvusSchemasProcessingMode {
+ return &e
+}
+
+func (e *DestinationMilvusSchemasProcessingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "separator":
+ *e = DestinationMilvusSchemasProcessingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusSchemasProcessingMode: %v", v)
+ }
+}
+
+// DestinationMilvusBySeparator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
+type DestinationMilvusBySeparator struct {
+ // Whether to keep the separator in the resulting chunks
+ KeepSeparator *bool `default:"false" json:"keep_separator"`
+ mode *DestinationMilvusSchemasProcessingMode `const:"separator" json:"mode"`
+ // List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+ Separators []string `json:"separators,omitempty"`
+}
+
+func (d DestinationMilvusBySeparator) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusBySeparator) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusBySeparator) GetKeepSeparator() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.KeepSeparator
+}
+
+func (o *DestinationMilvusBySeparator) GetMode() *DestinationMilvusSchemasProcessingMode {
+ return DestinationMilvusSchemasProcessingModeSeparator.ToPointer()
+}
+
+func (o *DestinationMilvusBySeparator) GetSeparators() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Separators
+}
+
+type DestinationMilvusTextSplitterType string
+
+const (
+ DestinationMilvusTextSplitterTypeDestinationMilvusBySeparator DestinationMilvusTextSplitterType = "destination-milvus_By Separator"
+ DestinationMilvusTextSplitterTypeDestinationMilvusByMarkdownHeader DestinationMilvusTextSplitterType = "destination-milvus_By Markdown header"
+ DestinationMilvusTextSplitterTypeDestinationMilvusByProgrammingLanguage DestinationMilvusTextSplitterType = "destination-milvus_By Programming Language"
+)
+
+type DestinationMilvusTextSplitter struct {
+ DestinationMilvusBySeparator *DestinationMilvusBySeparator
+ DestinationMilvusByMarkdownHeader *DestinationMilvusByMarkdownHeader
+ DestinationMilvusByProgrammingLanguage *DestinationMilvusByProgrammingLanguage
+
+ Type DestinationMilvusTextSplitterType
+}
+
+func CreateDestinationMilvusTextSplitterDestinationMilvusBySeparator(destinationMilvusBySeparator DestinationMilvusBySeparator) DestinationMilvusTextSplitter {
+ typ := DestinationMilvusTextSplitterTypeDestinationMilvusBySeparator
+
+ return DestinationMilvusTextSplitter{
+ DestinationMilvusBySeparator: &destinationMilvusBySeparator,
+ Type: typ,
+ }
+}
+
+func CreateDestinationMilvusTextSplitterDestinationMilvusByMarkdownHeader(destinationMilvusByMarkdownHeader DestinationMilvusByMarkdownHeader) DestinationMilvusTextSplitter {
+ typ := DestinationMilvusTextSplitterTypeDestinationMilvusByMarkdownHeader
+
+ return DestinationMilvusTextSplitter{
+ DestinationMilvusByMarkdownHeader: &destinationMilvusByMarkdownHeader,
+ Type: typ,
+ }
+}
+
+func CreateDestinationMilvusTextSplitterDestinationMilvusByProgrammingLanguage(destinationMilvusByProgrammingLanguage DestinationMilvusByProgrammingLanguage) DestinationMilvusTextSplitter {
+ typ := DestinationMilvusTextSplitterTypeDestinationMilvusByProgrammingLanguage
+
+ return DestinationMilvusTextSplitter{
+ DestinationMilvusByProgrammingLanguage: &destinationMilvusByProgrammingLanguage,
+ Type: typ,
+ }
+}
+
+func (u *DestinationMilvusTextSplitter) UnmarshalJSON(data []byte) error {
+
+ destinationMilvusByMarkdownHeader := new(DestinationMilvusByMarkdownHeader)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusByMarkdownHeader, "", true, true); err == nil {
+ u.DestinationMilvusByMarkdownHeader = destinationMilvusByMarkdownHeader
+ u.Type = DestinationMilvusTextSplitterTypeDestinationMilvusByMarkdownHeader
+ return nil
+ }
+
+ destinationMilvusByProgrammingLanguage := new(DestinationMilvusByProgrammingLanguage)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusByProgrammingLanguage, "", true, true); err == nil {
+ u.DestinationMilvusByProgrammingLanguage = destinationMilvusByProgrammingLanguage
+ u.Type = DestinationMilvusTextSplitterTypeDestinationMilvusByProgrammingLanguage
+ return nil
+ }
+
+ destinationMilvusBySeparator := new(DestinationMilvusBySeparator)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusBySeparator, "", true, true); err == nil {
+ u.DestinationMilvusBySeparator = destinationMilvusBySeparator
+ u.Type = DestinationMilvusTextSplitterTypeDestinationMilvusBySeparator
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationMilvusTextSplitter) MarshalJSON() ([]byte, error) {
+ if u.DestinationMilvusBySeparator != nil {
+ return utils.MarshalJSON(u.DestinationMilvusBySeparator, "", true)
+ }
+
+ if u.DestinationMilvusByMarkdownHeader != nil {
+ return utils.MarshalJSON(u.DestinationMilvusByMarkdownHeader, "", true)
+ }
+
+ if u.DestinationMilvusByProgrammingLanguage != nil {
+ return utils.MarshalJSON(u.DestinationMilvusByProgrammingLanguage, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationMilvusProcessingConfigModel struct {
// Size of overlap between chunks in tokens to store in vector store to better capture relevant context
- ChunkOverlap *int64 `json:"chunk_overlap,omitempty"`
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
// Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
ChunkSize int64 `json:"chunk_size"`
+ // List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
+ FieldNameMappings []DestinationMilvusFieldNameMappingConfigModel `json:"field_name_mappings,omitempty"`
// List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
MetadataFields []string `json:"metadata_fields,omitempty"`
// List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
TextFields []string `json:"text_fields,omitempty"`
+ // Split text fields into chunks based on the specified method.
+ TextSplitter *DestinationMilvusTextSplitter `json:"text_splitter,omitempty"`
+}
+
+func (d DestinationMilvusProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *DestinationMilvusProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *DestinationMilvusProcessingConfigModel) GetFieldNameMappings() []DestinationMilvusFieldNameMappingConfigModel {
+ if o == nil {
+ return nil
+ }
+ return o.FieldNameMappings
+}
+
+func (o *DestinationMilvusProcessingConfigModel) GetMetadataFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *DestinationMilvusProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TextFields
+}
+
+func (o *DestinationMilvusProcessingConfigModel) GetTextSplitter() *DestinationMilvusTextSplitter {
+ if o == nil {
+ return nil
+ }
+ return o.TextSplitter
}
type DestinationMilvus struct {
- DestinationType DestinationMilvusMilvus `json:"destinationType"`
+ destinationType Milvus `const:"milvus" json:"destinationType"`
// Embedding configuration
Embedding DestinationMilvusEmbedding `json:"embedding"`
// Indexing configuration
Indexing DestinationMilvusIndexing `json:"indexing"`
Processing DestinationMilvusProcessingConfigModel `json:"processing"`
}
+
+func (d DestinationMilvus) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvus) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvus) GetDestinationType() Milvus {
+ return MilvusMilvus
+}
+
+func (o *DestinationMilvus) GetEmbedding() DestinationMilvusEmbedding {
+ if o == nil {
+ return DestinationMilvusEmbedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationMilvus) GetIndexing() DestinationMilvusIndexing {
+ if o == nil {
+ return DestinationMilvusIndexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationMilvus) GetProcessing() DestinationMilvusProcessingConfigModel {
+ if o == nil {
+ return DestinationMilvusProcessingConfigModel{}
+ }
+ return o.Processing
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go b/internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go
old mode 100755
new mode 100644
index 6f1af0cba..d079ff800
--- a/internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationMilvusCreateRequest struct {
Configuration DestinationMilvus `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationMilvusCreateRequest) GetConfiguration() DestinationMilvus {
+ if o == nil {
+ return DestinationMilvus{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationMilvusCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationMilvusCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationMilvusCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationmilvusputrequest.go b/internal/sdk/pkg/models/shared/destinationmilvusputrequest.go
old mode 100755
new mode 100644
index ba3946485..9310bcd08
--- a/internal/sdk/pkg/models/shared/destinationmilvusputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationmilvusputrequest.go
@@ -7,3 +7,24 @@ type DestinationMilvusPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationMilvusPutRequest) GetConfiguration() DestinationMilvusUpdate {
+ if o == nil {
+ return DestinationMilvusUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationMilvusPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationMilvusPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmilvusupdate.go b/internal/sdk/pkg/models/shared/destinationmilvusupdate.go
old mode 100755
new mode 100644
index 4a20582ee..e3a349239
--- a/internal/sdk/pkg/models/shared/destinationmilvusupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationmilvusupdate.go
@@ -3,224 +3,489 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationMilvusUpdateEmbeddingFromFieldMode string
+type DestinationMilvusUpdateSchemasEmbeddingEmbedding6Mode string
const (
- DestinationMilvusUpdateEmbeddingFromFieldModeFromField DestinationMilvusUpdateEmbeddingFromFieldMode = "from_field"
+ DestinationMilvusUpdateSchemasEmbeddingEmbedding6ModeOpenaiCompatible DestinationMilvusUpdateSchemasEmbeddingEmbedding6Mode = "openai_compatible"
)
-func (e DestinationMilvusUpdateEmbeddingFromFieldMode) ToPointer() *DestinationMilvusUpdateEmbeddingFromFieldMode {
+func (e DestinationMilvusUpdateSchemasEmbeddingEmbedding6Mode) ToPointer() *DestinationMilvusUpdateSchemasEmbeddingEmbedding6Mode {
return &e
}
-func (e *DestinationMilvusUpdateEmbeddingFromFieldMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusUpdateSchemasEmbeddingEmbedding6Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai_compatible":
+ *e = DestinationMilvusUpdateSchemasEmbeddingEmbedding6Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasEmbeddingEmbedding6Mode: %v", v)
+ }
+}
+
+// OpenAICompatible - Use a service that's compatible with the OpenAI API to embed text.
+type OpenAICompatible struct {
+ APIKey *string `default:"" json:"api_key"`
+ // The base URL for your OpenAI-compatible service
+ BaseURL string `json:"base_url"`
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ mode *DestinationMilvusUpdateSchemasEmbeddingEmbedding6Mode `const:"openai_compatible" json:"mode"`
+ // The name of the model to use for embedding
+ ModelName *string `default:"text-embedding-ada-002" json:"model_name"`
+}
+
+func (o OpenAICompatible) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(o, "", false)
+}
+
+func (o *OpenAICompatible) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &o, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *OpenAICompatible) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *OpenAICompatible) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *OpenAICompatible) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *OpenAICompatible) GetMode() *DestinationMilvusUpdateSchemasEmbeddingEmbedding6Mode {
+ return DestinationMilvusUpdateSchemasEmbeddingEmbedding6ModeOpenaiCompatible.ToPointer()
+}
+
+func (o *OpenAICompatible) GetModelName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ModelName
+}
+
+type DestinationMilvusUpdateSchemasEmbeddingEmbedding5Mode string
+
+const (
+ DestinationMilvusUpdateSchemasEmbeddingEmbedding5ModeAzureOpenai DestinationMilvusUpdateSchemasEmbeddingEmbedding5Mode = "azure_openai"
+)
+
+func (e DestinationMilvusUpdateSchemasEmbeddingEmbedding5Mode) ToPointer() *DestinationMilvusUpdateSchemasEmbeddingEmbedding5Mode {
+ return &e
+}
+
+func (e *DestinationMilvusUpdateSchemasEmbeddingEmbedding5Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "azure_openai":
+ *e = DestinationMilvusUpdateSchemasEmbeddingEmbedding5Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasEmbeddingEmbedding5Mode: %v", v)
+ }
+}
+
+// AzureOpenAI - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type AzureOpenAI struct {
+ // The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ APIBase string `json:"api_base"`
+ // The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ Deployment string `json:"deployment"`
+ mode *DestinationMilvusUpdateSchemasEmbeddingEmbedding5Mode `const:"azure_openai" json:"mode"`
+ // The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (a AzureOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AzureOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AzureOpenAI) GetAPIBase() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIBase
+}
+
+func (o *AzureOpenAI) GetDeployment() string {
+ if o == nil {
+ return ""
+ }
+ return o.Deployment
+}
+
+func (o *AzureOpenAI) GetMode() *DestinationMilvusUpdateSchemasEmbeddingEmbedding5Mode {
+ return DestinationMilvusUpdateSchemasEmbeddingEmbedding5ModeAzureOpenai.ToPointer()
+}
+
+func (o *AzureOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationMilvusUpdateSchemasEmbeddingEmbeddingMode string
+
+const (
+ DestinationMilvusUpdateSchemasEmbeddingEmbeddingModeFromField DestinationMilvusUpdateSchemasEmbeddingEmbeddingMode = "from_field"
+)
+
+func (e DestinationMilvusUpdateSchemasEmbeddingEmbeddingMode) ToPointer() *DestinationMilvusUpdateSchemasEmbeddingEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationMilvusUpdateSchemasEmbeddingEmbeddingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "from_field":
- *e = DestinationMilvusUpdateEmbeddingFromFieldMode(v)
+ *e = DestinationMilvusUpdateSchemasEmbeddingEmbeddingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusUpdateEmbeddingFromFieldMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasEmbeddingEmbeddingMode: %v", v)
}
}
-// DestinationMilvusUpdateEmbeddingFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.
-type DestinationMilvusUpdateEmbeddingFromField struct {
+// FromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.
+type FromField struct {
// The number of dimensions the embedding model is generating
Dimensions int64 `json:"dimensions"`
// Name of the field in the record that contains the embedding
- FieldName string `json:"field_name"`
- Mode *DestinationMilvusUpdateEmbeddingFromFieldMode `json:"mode,omitempty"`
+ FieldName string `json:"field_name"`
+ mode *DestinationMilvusUpdateSchemasEmbeddingEmbeddingMode `const:"from_field" json:"mode"`
+}
+
+func (f FromField) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(f, "", false)
}
-type DestinationMilvusUpdateEmbeddingFakeMode string
+func (f *FromField) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &f, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *FromField) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *FromField) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *FromField) GetMode() *DestinationMilvusUpdateSchemasEmbeddingEmbeddingMode {
+ return DestinationMilvusUpdateSchemasEmbeddingEmbeddingModeFromField.ToPointer()
+}
+
+type DestinationMilvusUpdateSchemasEmbeddingMode string
const (
- DestinationMilvusUpdateEmbeddingFakeModeFake DestinationMilvusUpdateEmbeddingFakeMode = "fake"
+ DestinationMilvusUpdateSchemasEmbeddingModeFake DestinationMilvusUpdateSchemasEmbeddingMode = "fake"
)
-func (e DestinationMilvusUpdateEmbeddingFakeMode) ToPointer() *DestinationMilvusUpdateEmbeddingFakeMode {
+func (e DestinationMilvusUpdateSchemasEmbeddingMode) ToPointer() *DestinationMilvusUpdateSchemasEmbeddingMode {
return &e
}
-func (e *DestinationMilvusUpdateEmbeddingFakeMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusUpdateSchemasEmbeddingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "fake":
- *e = DestinationMilvusUpdateEmbeddingFakeMode(v)
+ *e = DestinationMilvusUpdateSchemasEmbeddingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusUpdateEmbeddingFakeMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationMilvusUpdateFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type DestinationMilvusUpdateFake struct {
+ mode *DestinationMilvusUpdateSchemasEmbeddingMode `const:"fake" json:"mode"`
+}
+
+func (d DestinationMilvusUpdateFake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusUpdateFake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationMilvusUpdateEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
-type DestinationMilvusUpdateEmbeddingFake struct {
- Mode *DestinationMilvusUpdateEmbeddingFakeMode `json:"mode,omitempty"`
+func (o *DestinationMilvusUpdateFake) GetMode() *DestinationMilvusUpdateSchemasEmbeddingMode {
+ return DestinationMilvusUpdateSchemasEmbeddingModeFake.ToPointer()
}
-type DestinationMilvusUpdateEmbeddingCohereMode string
+type DestinationMilvusUpdateSchemasMode string
const (
- DestinationMilvusUpdateEmbeddingCohereModeCohere DestinationMilvusUpdateEmbeddingCohereMode = "cohere"
+ DestinationMilvusUpdateSchemasModeCohere DestinationMilvusUpdateSchemasMode = "cohere"
)
-func (e DestinationMilvusUpdateEmbeddingCohereMode) ToPointer() *DestinationMilvusUpdateEmbeddingCohereMode {
+func (e DestinationMilvusUpdateSchemasMode) ToPointer() *DestinationMilvusUpdateSchemasMode {
return &e
}
-func (e *DestinationMilvusUpdateEmbeddingCohereMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusUpdateSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "cohere":
- *e = DestinationMilvusUpdateEmbeddingCohereMode(v)
+ *e = DestinationMilvusUpdateSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusUpdateEmbeddingCohereMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasMode: %v", v)
}
}
-// DestinationMilvusUpdateEmbeddingCohere - Use the Cohere API to embed text.
-type DestinationMilvusUpdateEmbeddingCohere struct {
- CohereKey string `json:"cohere_key"`
- Mode *DestinationMilvusUpdateEmbeddingCohereMode `json:"mode,omitempty"`
+// Cohere - Use the Cohere API to embed text.
+type Cohere struct {
+ CohereKey string `json:"cohere_key"`
+ mode *DestinationMilvusUpdateSchemasMode `const:"cohere" json:"mode"`
}
-type DestinationMilvusUpdateEmbeddingOpenAIMode string
+func (c Cohere) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *Cohere) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Cohere) GetCohereKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.CohereKey
+}
+
+func (o *Cohere) GetMode() *DestinationMilvusUpdateSchemasMode {
+ return DestinationMilvusUpdateSchemasModeCohere.ToPointer()
+}
+
+type DestinationMilvusUpdateMode string
const (
- DestinationMilvusUpdateEmbeddingOpenAIModeOpenai DestinationMilvusUpdateEmbeddingOpenAIMode = "openai"
+ DestinationMilvusUpdateModeOpenai DestinationMilvusUpdateMode = "openai"
)
-func (e DestinationMilvusUpdateEmbeddingOpenAIMode) ToPointer() *DestinationMilvusUpdateEmbeddingOpenAIMode {
+func (e DestinationMilvusUpdateMode) ToPointer() *DestinationMilvusUpdateMode {
return &e
}
-func (e *DestinationMilvusUpdateEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusUpdateMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "openai":
- *e = DestinationMilvusUpdateEmbeddingOpenAIMode(v)
+ *e = DestinationMilvusUpdateMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusUpdateEmbeddingOpenAIMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateMode: %v", v)
}
}
-// DestinationMilvusUpdateEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
-type DestinationMilvusUpdateEmbeddingOpenAI struct {
- Mode *DestinationMilvusUpdateEmbeddingOpenAIMode `json:"mode,omitempty"`
- OpenaiKey string `json:"openai_key"`
+// DestinationMilvusUpdateOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationMilvusUpdateOpenAI struct {
+ mode *DestinationMilvusUpdateMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationMilvusUpdateOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusUpdateOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusUpdateOpenAI) GetMode() *DestinationMilvusUpdateMode {
+ return DestinationMilvusUpdateModeOpenai.ToPointer()
+}
+
+func (o *DestinationMilvusUpdateOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
}
type DestinationMilvusUpdateEmbeddingType string
const (
- DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingOpenAI DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_Embedding_OpenAI"
- DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingCohere DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_Embedding_Cohere"
- DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFake DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_Embedding_Fake"
- DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFromField DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_Embedding_From Field"
+ DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateOpenAI DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_OpenAI"
+ DestinationMilvusUpdateEmbeddingTypeCohere DestinationMilvusUpdateEmbeddingType = "Cohere"
+ DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateFake DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_Fake"
+ DestinationMilvusUpdateEmbeddingTypeFromField DestinationMilvusUpdateEmbeddingType = "From Field"
+ DestinationMilvusUpdateEmbeddingTypeAzureOpenAI DestinationMilvusUpdateEmbeddingType = "Azure OpenAI"
+ DestinationMilvusUpdateEmbeddingTypeOpenAICompatible DestinationMilvusUpdateEmbeddingType = "OpenAI-compatible"
)
type DestinationMilvusUpdateEmbedding struct {
- DestinationMilvusUpdateEmbeddingOpenAI *DestinationMilvusUpdateEmbeddingOpenAI
- DestinationMilvusUpdateEmbeddingCohere *DestinationMilvusUpdateEmbeddingCohere
- DestinationMilvusUpdateEmbeddingFake *DestinationMilvusUpdateEmbeddingFake
- DestinationMilvusUpdateEmbeddingFromField *DestinationMilvusUpdateEmbeddingFromField
+ DestinationMilvusUpdateOpenAI *DestinationMilvusUpdateOpenAI
+ Cohere *Cohere
+ DestinationMilvusUpdateFake *DestinationMilvusUpdateFake
+ FromField *FromField
+ AzureOpenAI *AzureOpenAI
+ OpenAICompatible *OpenAICompatible
Type DestinationMilvusUpdateEmbeddingType
}
-func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateEmbeddingOpenAI(destinationMilvusUpdateEmbeddingOpenAI DestinationMilvusUpdateEmbeddingOpenAI) DestinationMilvusUpdateEmbedding {
- typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingOpenAI
+func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateOpenAI(destinationMilvusUpdateOpenAI DestinationMilvusUpdateOpenAI) DestinationMilvusUpdateEmbedding {
+ typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateOpenAI
+
+ return DestinationMilvusUpdateEmbedding{
+ DestinationMilvusUpdateOpenAI: &destinationMilvusUpdateOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationMilvusUpdateEmbeddingCohere(cohere Cohere) DestinationMilvusUpdateEmbedding {
+ typ := DestinationMilvusUpdateEmbeddingTypeCohere
return DestinationMilvusUpdateEmbedding{
- DestinationMilvusUpdateEmbeddingOpenAI: &destinationMilvusUpdateEmbeddingOpenAI,
- Type: typ,
+ Cohere: &cohere,
+ Type: typ,
}
}
-func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateEmbeddingCohere(destinationMilvusUpdateEmbeddingCohere DestinationMilvusUpdateEmbeddingCohere) DestinationMilvusUpdateEmbedding {
- typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingCohere
+func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateFake(destinationMilvusUpdateFake DestinationMilvusUpdateFake) DestinationMilvusUpdateEmbedding {
+ typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateFake
return DestinationMilvusUpdateEmbedding{
- DestinationMilvusUpdateEmbeddingCohere: &destinationMilvusUpdateEmbeddingCohere,
- Type: typ,
+ DestinationMilvusUpdateFake: &destinationMilvusUpdateFake,
+ Type: typ,
}
}
-func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateEmbeddingFake(destinationMilvusUpdateEmbeddingFake DestinationMilvusUpdateEmbeddingFake) DestinationMilvusUpdateEmbedding {
- typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFake
+func CreateDestinationMilvusUpdateEmbeddingFromField(fromField FromField) DestinationMilvusUpdateEmbedding {
+ typ := DestinationMilvusUpdateEmbeddingTypeFromField
return DestinationMilvusUpdateEmbedding{
- DestinationMilvusUpdateEmbeddingFake: &destinationMilvusUpdateEmbeddingFake,
- Type: typ,
+ FromField: &fromField,
+ Type: typ,
}
}
-func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateEmbeddingFromField(destinationMilvusUpdateEmbeddingFromField DestinationMilvusUpdateEmbeddingFromField) DestinationMilvusUpdateEmbedding {
- typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFromField
+func CreateDestinationMilvusUpdateEmbeddingAzureOpenAI(azureOpenAI AzureOpenAI) DestinationMilvusUpdateEmbedding {
+ typ := DestinationMilvusUpdateEmbeddingTypeAzureOpenAI
return DestinationMilvusUpdateEmbedding{
- DestinationMilvusUpdateEmbeddingFromField: &destinationMilvusUpdateEmbeddingFromField,
- Type: typ,
+ AzureOpenAI: &azureOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationMilvusUpdateEmbeddingOpenAICompatible(openAICompatible OpenAICompatible) DestinationMilvusUpdateEmbedding {
+ typ := DestinationMilvusUpdateEmbeddingTypeOpenAICompatible
+
+ return DestinationMilvusUpdateEmbedding{
+ OpenAICompatible: &openAICompatible,
+ Type: typ,
}
}
func (u *DestinationMilvusUpdateEmbedding) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- destinationMilvusUpdateEmbeddingFake := new(DestinationMilvusUpdateEmbeddingFake)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusUpdateEmbeddingFake); err == nil {
- u.DestinationMilvusUpdateEmbeddingFake = destinationMilvusUpdateEmbeddingFake
- u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFake
+ destinationMilvusUpdateFake := new(DestinationMilvusUpdateFake)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusUpdateFake, "", true, true); err == nil {
+ u.DestinationMilvusUpdateFake = destinationMilvusUpdateFake
+ u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateFake
return nil
}
- destinationMilvusUpdateEmbeddingOpenAI := new(DestinationMilvusUpdateEmbeddingOpenAI)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusUpdateEmbeddingOpenAI); err == nil {
- u.DestinationMilvusUpdateEmbeddingOpenAI = destinationMilvusUpdateEmbeddingOpenAI
- u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingOpenAI
+ destinationMilvusUpdateOpenAI := new(DestinationMilvusUpdateOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusUpdateOpenAI, "", true, true); err == nil {
+ u.DestinationMilvusUpdateOpenAI = destinationMilvusUpdateOpenAI
+ u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateOpenAI
return nil
}
- destinationMilvusUpdateEmbeddingCohere := new(DestinationMilvusUpdateEmbeddingCohere)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusUpdateEmbeddingCohere); err == nil {
- u.DestinationMilvusUpdateEmbeddingCohere = destinationMilvusUpdateEmbeddingCohere
- u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingCohere
+ cohere := new(Cohere)
+ if err := utils.UnmarshalJSON(data, &cohere, "", true, true); err == nil {
+ u.Cohere = cohere
+ u.Type = DestinationMilvusUpdateEmbeddingTypeCohere
return nil
}
- destinationMilvusUpdateEmbeddingFromField := new(DestinationMilvusUpdateEmbeddingFromField)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusUpdateEmbeddingFromField); err == nil {
- u.DestinationMilvusUpdateEmbeddingFromField = destinationMilvusUpdateEmbeddingFromField
- u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFromField
+ fromField := new(FromField)
+ if err := utils.UnmarshalJSON(data, &fromField, "", true, true); err == nil {
+ u.FromField = fromField
+ u.Type = DestinationMilvusUpdateEmbeddingTypeFromField
+ return nil
+ }
+
+ azureOpenAI := new(AzureOpenAI)
+ if err := utils.UnmarshalJSON(data, &azureOpenAI, "", true, true); err == nil {
+ u.AzureOpenAI = azureOpenAI
+ u.Type = DestinationMilvusUpdateEmbeddingTypeAzureOpenAI
+ return nil
+ }
+
+ openAICompatible := new(OpenAICompatible)
+ if err := utils.UnmarshalJSON(data, &openAICompatible, "", true, true); err == nil {
+ u.OpenAICompatible = openAICompatible
+ u.Type = DestinationMilvusUpdateEmbeddingTypeOpenAICompatible
return nil
}
@@ -228,235 +493,756 @@ func (u *DestinationMilvusUpdateEmbedding) UnmarshalJSON(data []byte) error {
}
func (u DestinationMilvusUpdateEmbedding) MarshalJSON() ([]byte, error) {
- if u.DestinationMilvusUpdateEmbeddingFake != nil {
- return json.Marshal(u.DestinationMilvusUpdateEmbeddingFake)
+ if u.DestinationMilvusUpdateOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationMilvusUpdateOpenAI, "", true)
}
- if u.DestinationMilvusUpdateEmbeddingOpenAI != nil {
- return json.Marshal(u.DestinationMilvusUpdateEmbeddingOpenAI)
+ if u.Cohere != nil {
+ return utils.MarshalJSON(u.Cohere, "", true)
}
- if u.DestinationMilvusUpdateEmbeddingCohere != nil {
- return json.Marshal(u.DestinationMilvusUpdateEmbeddingCohere)
+ if u.DestinationMilvusUpdateFake != nil {
+ return utils.MarshalJSON(u.DestinationMilvusUpdateFake, "", true)
}
- if u.DestinationMilvusUpdateEmbeddingFromField != nil {
- return json.Marshal(u.DestinationMilvusUpdateEmbeddingFromField)
+ if u.FromField != nil {
+ return utils.MarshalJSON(u.FromField, "", true)
}
- return nil, nil
+ if u.AzureOpenAI != nil {
+ return utils.MarshalJSON(u.AzureOpenAI, "", true)
+ }
+
+ if u.OpenAICompatible != nil {
+ return utils.MarshalJSON(u.OpenAICompatible, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationMilvusUpdateIndexingAuthenticationNoAuthMode string
+type DestinationMilvusUpdateSchemasIndexingAuthAuthenticationMode string
const (
- DestinationMilvusUpdateIndexingAuthenticationNoAuthModeNoAuth DestinationMilvusUpdateIndexingAuthenticationNoAuthMode = "no_auth"
+ DestinationMilvusUpdateSchemasIndexingAuthAuthenticationModeNoAuth DestinationMilvusUpdateSchemasIndexingAuthAuthenticationMode = "no_auth"
)
-func (e DestinationMilvusUpdateIndexingAuthenticationNoAuthMode) ToPointer() *DestinationMilvusUpdateIndexingAuthenticationNoAuthMode {
+func (e DestinationMilvusUpdateSchemasIndexingAuthAuthenticationMode) ToPointer() *DestinationMilvusUpdateSchemasIndexingAuthAuthenticationMode {
return &e
}
-func (e *DestinationMilvusUpdateIndexingAuthenticationNoAuthMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusUpdateSchemasIndexingAuthAuthenticationMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "no_auth":
- *e = DestinationMilvusUpdateIndexingAuthenticationNoAuthMode(v)
+ *e = DestinationMilvusUpdateSchemasIndexingAuthAuthenticationMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusUpdateIndexingAuthenticationNoAuthMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasIndexingAuthAuthenticationMode: %v", v)
+ }
+}
+
+// NoAuth - Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)
+type NoAuth struct {
+ mode *DestinationMilvusUpdateSchemasIndexingAuthAuthenticationMode `const:"no_auth" json:"mode"`
+}
+
+func (n NoAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
+}
+
+func (n *NoAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationMilvusUpdateIndexingAuthenticationNoAuth - Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)
-type DestinationMilvusUpdateIndexingAuthenticationNoAuth struct {
- Mode *DestinationMilvusUpdateIndexingAuthenticationNoAuthMode `json:"mode,omitempty"`
+func (o *NoAuth) GetMode() *DestinationMilvusUpdateSchemasIndexingAuthAuthenticationMode {
+ return DestinationMilvusUpdateSchemasIndexingAuthAuthenticationModeNoAuth.ToPointer()
}
-type DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode string
+type DestinationMilvusUpdateSchemasIndexingAuthMode string
const (
- DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordModeUsernamePassword DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode = "username_password"
+ DestinationMilvusUpdateSchemasIndexingAuthModeUsernamePassword DestinationMilvusUpdateSchemasIndexingAuthMode = "username_password"
)
-func (e DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode) ToPointer() *DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode {
+func (e DestinationMilvusUpdateSchemasIndexingAuthMode) ToPointer() *DestinationMilvusUpdateSchemasIndexingAuthMode {
return &e
}
-func (e *DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusUpdateSchemasIndexingAuthMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "username_password":
- *e = DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode(v)
+ *e = DestinationMilvusUpdateSchemasIndexingAuthMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasIndexingAuthMode: %v", v)
}
}
-// DestinationMilvusUpdateIndexingAuthenticationUsernamePassword - Authenticate using username and password (suitable for self-managed Milvus clusters)
-type DestinationMilvusUpdateIndexingAuthenticationUsernamePassword struct {
- Mode *DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode `json:"mode,omitempty"`
+// DestinationMilvusUpdateUsernamePassword - Authenticate using username and password (suitable for self-managed Milvus clusters)
+type DestinationMilvusUpdateUsernamePassword struct {
+ mode *DestinationMilvusUpdateSchemasIndexingAuthMode `const:"username_password" json:"mode"`
// Password for the Milvus instance
Password string `json:"password"`
// Username for the Milvus instance
Username string `json:"username"`
}
-type DestinationMilvusUpdateIndexingAuthenticationAPITokenMode string
+func (d DestinationMilvusUpdateUsernamePassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusUpdateUsernamePassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusUpdateUsernamePassword) GetMode() *DestinationMilvusUpdateSchemasIndexingAuthMode {
+ return DestinationMilvusUpdateSchemasIndexingAuthModeUsernamePassword.ToPointer()
+}
+
+func (o *DestinationMilvusUpdateUsernamePassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationMilvusUpdateUsernamePassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type DestinationMilvusUpdateSchemasIndexingMode string
const (
- DestinationMilvusUpdateIndexingAuthenticationAPITokenModeToken DestinationMilvusUpdateIndexingAuthenticationAPITokenMode = "token"
+ DestinationMilvusUpdateSchemasIndexingModeToken DestinationMilvusUpdateSchemasIndexingMode = "token"
)
-func (e DestinationMilvusUpdateIndexingAuthenticationAPITokenMode) ToPointer() *DestinationMilvusUpdateIndexingAuthenticationAPITokenMode {
+func (e DestinationMilvusUpdateSchemasIndexingMode) ToPointer() *DestinationMilvusUpdateSchemasIndexingMode {
return &e
}
-func (e *DestinationMilvusUpdateIndexingAuthenticationAPITokenMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationMilvusUpdateSchemasIndexingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "token":
- *e = DestinationMilvusUpdateIndexingAuthenticationAPITokenMode(v)
+ *e = DestinationMilvusUpdateSchemasIndexingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMilvusUpdateIndexingAuthenticationAPITokenMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasIndexingMode: %v", v)
}
}
-// DestinationMilvusUpdateIndexingAuthenticationAPIToken - Authenticate using an API token (suitable for Zilliz Cloud)
-type DestinationMilvusUpdateIndexingAuthenticationAPIToken struct {
- Mode *DestinationMilvusUpdateIndexingAuthenticationAPITokenMode `json:"mode,omitempty"`
+// DestinationMilvusUpdateAPIToken - Authenticate using an API token (suitable for Zilliz Cloud)
+type DestinationMilvusUpdateAPIToken struct {
+ mode *DestinationMilvusUpdateSchemasIndexingMode `const:"token" json:"mode"`
// API Token for the Milvus instance
Token string `json:"token"`
}
-type DestinationMilvusUpdateIndexingAuthenticationType string
+func (d DestinationMilvusUpdateAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusUpdateAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusUpdateAPIToken) GetMode() *DestinationMilvusUpdateSchemasIndexingMode {
+ return DestinationMilvusUpdateSchemasIndexingModeToken.ToPointer()
+}
+
+func (o *DestinationMilvusUpdateAPIToken) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
+
+type DestinationMilvusUpdateAuthenticationType string
const (
- DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationAPIToken DestinationMilvusUpdateIndexingAuthenticationType = "destination-milvus-update_Indexing_Authentication_API Token"
- DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationUsernamePassword DestinationMilvusUpdateIndexingAuthenticationType = "destination-milvus-update_Indexing_Authentication_Username/Password"
- DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationNoAuth DestinationMilvusUpdateIndexingAuthenticationType = "destination-milvus-update_Indexing_Authentication_No auth"
+ DestinationMilvusUpdateAuthenticationTypeDestinationMilvusUpdateAPIToken DestinationMilvusUpdateAuthenticationType = "destination-milvus-update_API Token"
+ DestinationMilvusUpdateAuthenticationTypeDestinationMilvusUpdateUsernamePassword DestinationMilvusUpdateAuthenticationType = "destination-milvus-update_Username/Password"
+ DestinationMilvusUpdateAuthenticationTypeNoAuth DestinationMilvusUpdateAuthenticationType = "No auth"
)
-type DestinationMilvusUpdateIndexingAuthentication struct {
- DestinationMilvusUpdateIndexingAuthenticationAPIToken *DestinationMilvusUpdateIndexingAuthenticationAPIToken
- DestinationMilvusUpdateIndexingAuthenticationUsernamePassword *DestinationMilvusUpdateIndexingAuthenticationUsernamePassword
- DestinationMilvusUpdateIndexingAuthenticationNoAuth *DestinationMilvusUpdateIndexingAuthenticationNoAuth
+type DestinationMilvusUpdateAuthentication struct {
+ DestinationMilvusUpdateAPIToken *DestinationMilvusUpdateAPIToken
+ DestinationMilvusUpdateUsernamePassword *DestinationMilvusUpdateUsernamePassword
+ NoAuth *NoAuth
- Type DestinationMilvusUpdateIndexingAuthenticationType
+ Type DestinationMilvusUpdateAuthenticationType
}
-func CreateDestinationMilvusUpdateIndexingAuthenticationDestinationMilvusUpdateIndexingAuthenticationAPIToken(destinationMilvusUpdateIndexingAuthenticationAPIToken DestinationMilvusUpdateIndexingAuthenticationAPIToken) DestinationMilvusUpdateIndexingAuthentication {
- typ := DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationAPIToken
+func CreateDestinationMilvusUpdateAuthenticationDestinationMilvusUpdateAPIToken(destinationMilvusUpdateAPIToken DestinationMilvusUpdateAPIToken) DestinationMilvusUpdateAuthentication {
+ typ := DestinationMilvusUpdateAuthenticationTypeDestinationMilvusUpdateAPIToken
- return DestinationMilvusUpdateIndexingAuthentication{
- DestinationMilvusUpdateIndexingAuthenticationAPIToken: &destinationMilvusUpdateIndexingAuthenticationAPIToken,
- Type: typ,
+ return DestinationMilvusUpdateAuthentication{
+ DestinationMilvusUpdateAPIToken: &destinationMilvusUpdateAPIToken,
+ Type: typ,
}
}
-func CreateDestinationMilvusUpdateIndexingAuthenticationDestinationMilvusUpdateIndexingAuthenticationUsernamePassword(destinationMilvusUpdateIndexingAuthenticationUsernamePassword DestinationMilvusUpdateIndexingAuthenticationUsernamePassword) DestinationMilvusUpdateIndexingAuthentication {
- typ := DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationUsernamePassword
+func CreateDestinationMilvusUpdateAuthenticationDestinationMilvusUpdateUsernamePassword(destinationMilvusUpdateUsernamePassword DestinationMilvusUpdateUsernamePassword) DestinationMilvusUpdateAuthentication {
+ typ := DestinationMilvusUpdateAuthenticationTypeDestinationMilvusUpdateUsernamePassword
- return DestinationMilvusUpdateIndexingAuthentication{
- DestinationMilvusUpdateIndexingAuthenticationUsernamePassword: &destinationMilvusUpdateIndexingAuthenticationUsernamePassword,
- Type: typ,
+ return DestinationMilvusUpdateAuthentication{
+ DestinationMilvusUpdateUsernamePassword: &destinationMilvusUpdateUsernamePassword,
+ Type: typ,
}
}
-func CreateDestinationMilvusUpdateIndexingAuthenticationDestinationMilvusUpdateIndexingAuthenticationNoAuth(destinationMilvusUpdateIndexingAuthenticationNoAuth DestinationMilvusUpdateIndexingAuthenticationNoAuth) DestinationMilvusUpdateIndexingAuthentication {
- typ := DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationNoAuth
+func CreateDestinationMilvusUpdateAuthenticationNoAuth(noAuth NoAuth) DestinationMilvusUpdateAuthentication {
+ typ := DestinationMilvusUpdateAuthenticationTypeNoAuth
- return DestinationMilvusUpdateIndexingAuthentication{
- DestinationMilvusUpdateIndexingAuthenticationNoAuth: &destinationMilvusUpdateIndexingAuthenticationNoAuth,
- Type: typ,
+ return DestinationMilvusUpdateAuthentication{
+ NoAuth: &noAuth,
+ Type: typ,
}
}
-func (u *DestinationMilvusUpdateIndexingAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationMilvusUpdateAuthentication) UnmarshalJSON(data []byte) error {
- destinationMilvusUpdateIndexingAuthenticationNoAuth := new(DestinationMilvusUpdateIndexingAuthenticationNoAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusUpdateIndexingAuthenticationNoAuth); err == nil {
- u.DestinationMilvusUpdateIndexingAuthenticationNoAuth = destinationMilvusUpdateIndexingAuthenticationNoAuth
- u.Type = DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationNoAuth
+ noAuth := new(NoAuth)
+ if err := utils.UnmarshalJSON(data, &noAuth, "", true, true); err == nil {
+ u.NoAuth = noAuth
+ u.Type = DestinationMilvusUpdateAuthenticationTypeNoAuth
return nil
}
- destinationMilvusUpdateIndexingAuthenticationAPIToken := new(DestinationMilvusUpdateIndexingAuthenticationAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusUpdateIndexingAuthenticationAPIToken); err == nil {
- u.DestinationMilvusUpdateIndexingAuthenticationAPIToken = destinationMilvusUpdateIndexingAuthenticationAPIToken
- u.Type = DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationAPIToken
+ destinationMilvusUpdateAPIToken := new(DestinationMilvusUpdateAPIToken)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusUpdateAPIToken, "", true, true); err == nil {
+ u.DestinationMilvusUpdateAPIToken = destinationMilvusUpdateAPIToken
+ u.Type = DestinationMilvusUpdateAuthenticationTypeDestinationMilvusUpdateAPIToken
return nil
}
- destinationMilvusUpdateIndexingAuthenticationUsernamePassword := new(DestinationMilvusUpdateIndexingAuthenticationUsernamePassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMilvusUpdateIndexingAuthenticationUsernamePassword); err == nil {
- u.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword = destinationMilvusUpdateIndexingAuthenticationUsernamePassword
- u.Type = DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationUsernamePassword
+ destinationMilvusUpdateUsernamePassword := new(DestinationMilvusUpdateUsernamePassword)
+ if err := utils.UnmarshalJSON(data, &destinationMilvusUpdateUsernamePassword, "", true, true); err == nil {
+ u.DestinationMilvusUpdateUsernamePassword = destinationMilvusUpdateUsernamePassword
+ u.Type = DestinationMilvusUpdateAuthenticationTypeDestinationMilvusUpdateUsernamePassword
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationMilvusUpdateIndexingAuthentication) MarshalJSON() ([]byte, error) {
- if u.DestinationMilvusUpdateIndexingAuthenticationNoAuth != nil {
- return json.Marshal(u.DestinationMilvusUpdateIndexingAuthenticationNoAuth)
+func (u DestinationMilvusUpdateAuthentication) MarshalJSON() ([]byte, error) {
+ if u.DestinationMilvusUpdateAPIToken != nil {
+ return utils.MarshalJSON(u.DestinationMilvusUpdateAPIToken, "", true)
}
- if u.DestinationMilvusUpdateIndexingAuthenticationAPIToken != nil {
- return json.Marshal(u.DestinationMilvusUpdateIndexingAuthenticationAPIToken)
+ if u.DestinationMilvusUpdateUsernamePassword != nil {
+ return utils.MarshalJSON(u.DestinationMilvusUpdateUsernamePassword, "", true)
}
- if u.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword != nil {
- return json.Marshal(u.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword)
+ if u.NoAuth != nil {
+ return utils.MarshalJSON(u.NoAuth, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationMilvusUpdateIndexing - Indexing configuration
type DestinationMilvusUpdateIndexing struct {
// Authentication method
- Auth DestinationMilvusUpdateIndexingAuthentication `json:"auth"`
+ Auth DestinationMilvusUpdateAuthentication `json:"auth"`
// The collection to load data into
Collection string `json:"collection"`
// The database to connect to
- Db *string `json:"db,omitempty"`
+ Db *string `default:"" json:"db"`
// The public endpoint of the Milvus instance.
Host string `json:"host"`
// The field in the entity that contains the embedded text
- TextField *string `json:"text_field,omitempty"`
+ TextField *string `default:"text" json:"text_field"`
// The field in the entity that contains the vector
- VectorField *string `json:"vector_field,omitempty"`
+ VectorField *string `default:"vector" json:"vector_field"`
+}
+
+func (d DestinationMilvusUpdateIndexing) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusUpdateIndexing) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusUpdateIndexing) GetAuth() DestinationMilvusUpdateAuthentication {
+ if o == nil {
+ return DestinationMilvusUpdateAuthentication{}
+ }
+ return o.Auth
+}
+
+func (o *DestinationMilvusUpdateIndexing) GetCollection() string {
+ if o == nil {
+ return ""
+ }
+ return o.Collection
+}
+
+func (o *DestinationMilvusUpdateIndexing) GetDb() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Db
+}
+
+func (o *DestinationMilvusUpdateIndexing) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationMilvusUpdateIndexing) GetTextField() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TextField
+}
+
+func (o *DestinationMilvusUpdateIndexing) GetVectorField() *string {
+ if o == nil {
+ return nil
+ }
+ return o.VectorField
+}
+
+type FieldNameMappingConfigModel struct {
+ // The field name in the source
+ FromField string `json:"from_field"`
+ // The field name to use in the destination
+ ToField string `json:"to_field"`
+}
+
+func (o *FieldNameMappingConfigModel) GetFromField() string {
+ if o == nil {
+ return ""
+ }
+ return o.FromField
+}
+
+func (o *FieldNameMappingConfigModel) GetToField() string {
+ if o == nil {
+ return ""
+ }
+ return o.ToField
+}
+
+// DestinationMilvusUpdateLanguage - Split code in suitable places based on the programming language
+type DestinationMilvusUpdateLanguage string
+
+const (
+ DestinationMilvusUpdateLanguageCpp DestinationMilvusUpdateLanguage = "cpp"
+ DestinationMilvusUpdateLanguageGo DestinationMilvusUpdateLanguage = "go"
+ DestinationMilvusUpdateLanguageJava DestinationMilvusUpdateLanguage = "java"
+ DestinationMilvusUpdateLanguageJs DestinationMilvusUpdateLanguage = "js"
+ DestinationMilvusUpdateLanguagePhp DestinationMilvusUpdateLanguage = "php"
+ DestinationMilvusUpdateLanguageProto DestinationMilvusUpdateLanguage = "proto"
+ DestinationMilvusUpdateLanguagePython DestinationMilvusUpdateLanguage = "python"
+ DestinationMilvusUpdateLanguageRst DestinationMilvusUpdateLanguage = "rst"
+ DestinationMilvusUpdateLanguageRuby DestinationMilvusUpdateLanguage = "ruby"
+ DestinationMilvusUpdateLanguageRust DestinationMilvusUpdateLanguage = "rust"
+ DestinationMilvusUpdateLanguageScala DestinationMilvusUpdateLanguage = "scala"
+ DestinationMilvusUpdateLanguageSwift DestinationMilvusUpdateLanguage = "swift"
+ DestinationMilvusUpdateLanguageMarkdown DestinationMilvusUpdateLanguage = "markdown"
+ DestinationMilvusUpdateLanguageLatex DestinationMilvusUpdateLanguage = "latex"
+ DestinationMilvusUpdateLanguageHTML DestinationMilvusUpdateLanguage = "html"
+ DestinationMilvusUpdateLanguageSol DestinationMilvusUpdateLanguage = "sol"
+)
+
+func (e DestinationMilvusUpdateLanguage) ToPointer() *DestinationMilvusUpdateLanguage {
+ return &e
+}
+
+func (e *DestinationMilvusUpdateLanguage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cpp":
+ fallthrough
+ case "go":
+ fallthrough
+ case "java":
+ fallthrough
+ case "js":
+ fallthrough
+ case "php":
+ fallthrough
+ case "proto":
+ fallthrough
+ case "python":
+ fallthrough
+ case "rst":
+ fallthrough
+ case "ruby":
+ fallthrough
+ case "rust":
+ fallthrough
+ case "scala":
+ fallthrough
+ case "swift":
+ fallthrough
+ case "markdown":
+ fallthrough
+ case "latex":
+ fallthrough
+ case "html":
+ fallthrough
+ case "sol":
+ *e = DestinationMilvusUpdateLanguage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateLanguage: %v", v)
+ }
+}
+
+type DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterMode string
+
+const (
+ DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterModeCode DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterMode = "code"
+)
+
+func (e DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterMode) ToPointer() *DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "code":
+ *e = DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterMode: %v", v)
+ }
+}
+
+// ByProgrammingLanguage - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
+type ByProgrammingLanguage struct {
+ // Split code in suitable places based on the programming language
+ Language DestinationMilvusUpdateLanguage `json:"language"`
+ mode *DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterMode `const:"code" json:"mode"`
+}
+
+func (b ByProgrammingLanguage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(b, "", false)
+}
+
+func (b *ByProgrammingLanguage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &b, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ByProgrammingLanguage) GetLanguage() DestinationMilvusUpdateLanguage {
+ if o == nil {
+ return DestinationMilvusUpdateLanguage("")
+ }
+ return o.Language
+}
+
+func (o *ByProgrammingLanguage) GetMode() *DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterMode {
+ return DestinationMilvusUpdateSchemasProcessingTextSplitterTextSplitterModeCode.ToPointer()
+}
+
+type DestinationMilvusUpdateSchemasProcessingTextSplitterMode string
+
+const (
+ DestinationMilvusUpdateSchemasProcessingTextSplitterModeMarkdown DestinationMilvusUpdateSchemasProcessingTextSplitterMode = "markdown"
+)
+
+func (e DestinationMilvusUpdateSchemasProcessingTextSplitterMode) ToPointer() *DestinationMilvusUpdateSchemasProcessingTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationMilvusUpdateSchemasProcessingTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "markdown":
+ *e = DestinationMilvusUpdateSchemasProcessingTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasProcessingTextSplitterMode: %v", v)
+ }
+}
+
+// ByMarkdownHeader - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
+type ByMarkdownHeader struct {
+ mode *DestinationMilvusUpdateSchemasProcessingTextSplitterMode `const:"markdown" json:"mode"`
+ // Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+ SplitLevel *int64 `default:"1" json:"split_level"`
+}
+
+func (b ByMarkdownHeader) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(b, "", false)
+}
+
+func (b *ByMarkdownHeader) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &b, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ByMarkdownHeader) GetMode() *DestinationMilvusUpdateSchemasProcessingTextSplitterMode {
+ return DestinationMilvusUpdateSchemasProcessingTextSplitterModeMarkdown.ToPointer()
+}
+
+func (o *ByMarkdownHeader) GetSplitLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SplitLevel
+}
+
+type DestinationMilvusUpdateSchemasProcessingMode string
+
+const (
+ DestinationMilvusUpdateSchemasProcessingModeSeparator DestinationMilvusUpdateSchemasProcessingMode = "separator"
+)
+
+func (e DestinationMilvusUpdateSchemasProcessingMode) ToPointer() *DestinationMilvusUpdateSchemasProcessingMode {
+ return &e
+}
+
+func (e *DestinationMilvusUpdateSchemasProcessingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "separator":
+ *e = DestinationMilvusUpdateSchemasProcessingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationMilvusUpdateSchemasProcessingMode: %v", v)
+ }
+}
+
+// BySeparator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
+type BySeparator struct {
+ // Whether to keep the separator in the resulting chunks
+ KeepSeparator *bool `default:"false" json:"keep_separator"`
+ mode *DestinationMilvusUpdateSchemasProcessingMode `const:"separator" json:"mode"`
+ // List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+ Separators []string `json:"separators,omitempty"`
+}
+
+func (b BySeparator) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(b, "", false)
+}
+
+func (b *BySeparator) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &b, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *BySeparator) GetKeepSeparator() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.KeepSeparator
+}
+
+func (o *BySeparator) GetMode() *DestinationMilvusUpdateSchemasProcessingMode {
+ return DestinationMilvusUpdateSchemasProcessingModeSeparator.ToPointer()
+}
+
+func (o *BySeparator) GetSeparators() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Separators
+}
+
+type TextSplitterType string
+
+const (
+ TextSplitterTypeBySeparator TextSplitterType = "By Separator"
+ TextSplitterTypeByMarkdownHeader TextSplitterType = "By Markdown header"
+ TextSplitterTypeByProgrammingLanguage TextSplitterType = "By Programming Language"
+)
+
+type TextSplitter struct {
+ BySeparator *BySeparator
+ ByMarkdownHeader *ByMarkdownHeader
+ ByProgrammingLanguage *ByProgrammingLanguage
+
+ Type TextSplitterType
+}
+
+func CreateTextSplitterBySeparator(bySeparator BySeparator) TextSplitter {
+ typ := TextSplitterTypeBySeparator
+
+ return TextSplitter{
+ BySeparator: &bySeparator,
+ Type: typ,
+ }
+}
+
+func CreateTextSplitterByMarkdownHeader(byMarkdownHeader ByMarkdownHeader) TextSplitter {
+ typ := TextSplitterTypeByMarkdownHeader
+
+ return TextSplitter{
+ ByMarkdownHeader: &byMarkdownHeader,
+ Type: typ,
+ }
+}
+
+func CreateTextSplitterByProgrammingLanguage(byProgrammingLanguage ByProgrammingLanguage) TextSplitter {
+ typ := TextSplitterTypeByProgrammingLanguage
+
+ return TextSplitter{
+ ByProgrammingLanguage: &byProgrammingLanguage,
+ Type: typ,
+ }
+}
+
+func (u *TextSplitter) UnmarshalJSON(data []byte) error {
+
+ byMarkdownHeader := new(ByMarkdownHeader)
+ if err := utils.UnmarshalJSON(data, &byMarkdownHeader, "", true, true); err == nil {
+ u.ByMarkdownHeader = byMarkdownHeader
+ u.Type = TextSplitterTypeByMarkdownHeader
+ return nil
+ }
+
+ byProgrammingLanguage := new(ByProgrammingLanguage)
+ if err := utils.UnmarshalJSON(data, &byProgrammingLanguage, "", true, true); err == nil {
+ u.ByProgrammingLanguage = byProgrammingLanguage
+ u.Type = TextSplitterTypeByProgrammingLanguage
+ return nil
+ }
+
+ bySeparator := new(BySeparator)
+ if err := utils.UnmarshalJSON(data, &bySeparator, "", true, true); err == nil {
+ u.BySeparator = bySeparator
+ u.Type = TextSplitterTypeBySeparator
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u TextSplitter) MarshalJSON() ([]byte, error) {
+ if u.BySeparator != nil {
+ return utils.MarshalJSON(u.BySeparator, "", true)
+ }
+
+ if u.ByMarkdownHeader != nil {
+ return utils.MarshalJSON(u.ByMarkdownHeader, "", true)
+ }
+
+ if u.ByProgrammingLanguage != nil {
+ return utils.MarshalJSON(u.ByProgrammingLanguage, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationMilvusUpdateProcessingConfigModel struct {
// Size of overlap between chunks in tokens to store in vector store to better capture relevant context
- ChunkOverlap *int64 `json:"chunk_overlap,omitempty"`
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
// Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
ChunkSize int64 `json:"chunk_size"`
+ // List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
+ FieldNameMappings []FieldNameMappingConfigModel `json:"field_name_mappings,omitempty"`
// List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
MetadataFields []string `json:"metadata_fields,omitempty"`
// List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
TextFields []string `json:"text_fields,omitempty"`
+ // Split text fields into chunks based on the specified method.
+ TextSplitter *TextSplitter `json:"text_splitter,omitempty"`
+}
+
+func (d DestinationMilvusUpdateProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMilvusUpdateProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMilvusUpdateProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *DestinationMilvusUpdateProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *DestinationMilvusUpdateProcessingConfigModel) GetFieldNameMappings() []FieldNameMappingConfigModel {
+ if o == nil {
+ return nil
+ }
+ return o.FieldNameMappings
+}
+
+func (o *DestinationMilvusUpdateProcessingConfigModel) GetMetadataFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *DestinationMilvusUpdateProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TextFields
+}
+
+func (o *DestinationMilvusUpdateProcessingConfigModel) GetTextSplitter() *TextSplitter {
+ if o == nil {
+ return nil
+ }
+ return o.TextSplitter
}
type DestinationMilvusUpdate struct {
@@ -466,3 +1252,24 @@ type DestinationMilvusUpdate struct {
Indexing DestinationMilvusUpdateIndexing `json:"indexing"`
Processing DestinationMilvusUpdateProcessingConfigModel `json:"processing"`
}
+
+func (o *DestinationMilvusUpdate) GetEmbedding() DestinationMilvusUpdateEmbedding {
+ if o == nil {
+ return DestinationMilvusUpdateEmbedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationMilvusUpdate) GetIndexing() DestinationMilvusUpdateIndexing {
+ if o == nil {
+ return DestinationMilvusUpdateIndexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationMilvusUpdate) GetProcessing() DestinationMilvusUpdateProcessingConfigModel {
+ if o == nil {
+ return DestinationMilvusUpdateProcessingConfigModel{}
+ }
+ return o.Processing
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmongodb.go b/internal/sdk/pkg/models/shared/destinationmongodb.go
old mode 100755
new mode 100644
index b3edae1a1..cae92a805
--- a/internal/sdk/pkg/models/shared/destinationmongodb.go
+++ b/internal/sdk/pkg/models/shared/destinationmongodb.go
@@ -3,124 +3,163 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationMongodbAuthorizationTypeLoginPasswordAuthorization string
+type DestinationMongodbSchemasAuthorization string
const (
- DestinationMongodbAuthorizationTypeLoginPasswordAuthorizationLoginPassword DestinationMongodbAuthorizationTypeLoginPasswordAuthorization = "login/password"
+ DestinationMongodbSchemasAuthorizationLoginPassword DestinationMongodbSchemasAuthorization = "login/password"
)
-func (e DestinationMongodbAuthorizationTypeLoginPasswordAuthorization) ToPointer() *DestinationMongodbAuthorizationTypeLoginPasswordAuthorization {
+func (e DestinationMongodbSchemasAuthorization) ToPointer() *DestinationMongodbSchemasAuthorization {
return &e
}
-func (e *DestinationMongodbAuthorizationTypeLoginPasswordAuthorization) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbSchemasAuthorization) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "login/password":
- *e = DestinationMongodbAuthorizationTypeLoginPasswordAuthorization(v)
+ *e = DestinationMongodbSchemasAuthorization(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbAuthorizationTypeLoginPasswordAuthorization: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbSchemasAuthorization: %v", v)
}
}
-// DestinationMongodbAuthorizationTypeLoginPassword - Login/Password.
-type DestinationMongodbAuthorizationTypeLoginPassword struct {
- Authorization DestinationMongodbAuthorizationTypeLoginPasswordAuthorization `json:"authorization"`
+// DestinationMongodbLoginPassword - Login/Password.
+type DestinationMongodbLoginPassword struct {
+ authorization DestinationMongodbSchemasAuthorization `const:"login/password" json:"authorization"`
// Password associated with the username.
Password string `json:"password"`
// Username to use to access the database.
Username string `json:"username"`
}
-type DestinationMongodbAuthorizationTypeNoneAuthorization string
+func (d DestinationMongodbLoginPassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbLoginPassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbLoginPassword) GetAuthorization() DestinationMongodbSchemasAuthorization {
+ return DestinationMongodbSchemasAuthorizationLoginPassword
+}
+
+func (o *DestinationMongodbLoginPassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationMongodbLoginPassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type DestinationMongodbAuthorization string
const (
- DestinationMongodbAuthorizationTypeNoneAuthorizationNone DestinationMongodbAuthorizationTypeNoneAuthorization = "none"
+ DestinationMongodbAuthorizationNone DestinationMongodbAuthorization = "none"
)
-func (e DestinationMongodbAuthorizationTypeNoneAuthorization) ToPointer() *DestinationMongodbAuthorizationTypeNoneAuthorization {
+func (e DestinationMongodbAuthorization) ToPointer() *DestinationMongodbAuthorization {
return &e
}
-func (e *DestinationMongodbAuthorizationTypeNoneAuthorization) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbAuthorization) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "none":
- *e = DestinationMongodbAuthorizationTypeNoneAuthorization(v)
+ *e = DestinationMongodbAuthorization(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbAuthorizationTypeNoneAuthorization: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbAuthorization: %v", v)
+ }
+}
+
+// DestinationMongodbNone - None.
+type DestinationMongodbNone struct {
+ authorization DestinationMongodbAuthorization `const:"none" json:"authorization"`
+}
+
+func (d DestinationMongodbNone) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbNone) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationMongodbAuthorizationTypeNone - None.
-type DestinationMongodbAuthorizationTypeNone struct {
- Authorization DestinationMongodbAuthorizationTypeNoneAuthorization `json:"authorization"`
+func (o *DestinationMongodbNone) GetAuthorization() DestinationMongodbAuthorization {
+ return DestinationMongodbAuthorizationNone
}
type DestinationMongodbAuthorizationTypeType string
const (
- DestinationMongodbAuthorizationTypeTypeDestinationMongodbAuthorizationTypeNone DestinationMongodbAuthorizationTypeType = "destination-mongodb_Authorization type_None"
- DestinationMongodbAuthorizationTypeTypeDestinationMongodbAuthorizationTypeLoginPassword DestinationMongodbAuthorizationTypeType = "destination-mongodb_Authorization type_Login/Password"
+ DestinationMongodbAuthorizationTypeTypeDestinationMongodbNone DestinationMongodbAuthorizationTypeType = "destination-mongodb_None"
+ DestinationMongodbAuthorizationTypeTypeDestinationMongodbLoginPassword DestinationMongodbAuthorizationTypeType = "destination-mongodb_Login/Password"
)
type DestinationMongodbAuthorizationType struct {
- DestinationMongodbAuthorizationTypeNone *DestinationMongodbAuthorizationTypeNone
- DestinationMongodbAuthorizationTypeLoginPassword *DestinationMongodbAuthorizationTypeLoginPassword
+ DestinationMongodbNone *DestinationMongodbNone
+ DestinationMongodbLoginPassword *DestinationMongodbLoginPassword
Type DestinationMongodbAuthorizationTypeType
}
-func CreateDestinationMongodbAuthorizationTypeDestinationMongodbAuthorizationTypeNone(destinationMongodbAuthorizationTypeNone DestinationMongodbAuthorizationTypeNone) DestinationMongodbAuthorizationType {
- typ := DestinationMongodbAuthorizationTypeTypeDestinationMongodbAuthorizationTypeNone
+func CreateDestinationMongodbAuthorizationTypeDestinationMongodbNone(destinationMongodbNone DestinationMongodbNone) DestinationMongodbAuthorizationType {
+ typ := DestinationMongodbAuthorizationTypeTypeDestinationMongodbNone
return DestinationMongodbAuthorizationType{
- DestinationMongodbAuthorizationTypeNone: &destinationMongodbAuthorizationTypeNone,
- Type: typ,
+ DestinationMongodbNone: &destinationMongodbNone,
+ Type: typ,
}
}
-func CreateDestinationMongodbAuthorizationTypeDestinationMongodbAuthorizationTypeLoginPassword(destinationMongodbAuthorizationTypeLoginPassword DestinationMongodbAuthorizationTypeLoginPassword) DestinationMongodbAuthorizationType {
- typ := DestinationMongodbAuthorizationTypeTypeDestinationMongodbAuthorizationTypeLoginPassword
+func CreateDestinationMongodbAuthorizationTypeDestinationMongodbLoginPassword(destinationMongodbLoginPassword DestinationMongodbLoginPassword) DestinationMongodbAuthorizationType {
+ typ := DestinationMongodbAuthorizationTypeTypeDestinationMongodbLoginPassword
return DestinationMongodbAuthorizationType{
- DestinationMongodbAuthorizationTypeLoginPassword: &destinationMongodbAuthorizationTypeLoginPassword,
- Type: typ,
+ DestinationMongodbLoginPassword: &destinationMongodbLoginPassword,
+ Type: typ,
}
}
func (u *DestinationMongodbAuthorizationType) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationMongodbAuthorizationTypeNone := new(DestinationMongodbAuthorizationTypeNone)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbAuthorizationTypeNone); err == nil {
- u.DestinationMongodbAuthorizationTypeNone = destinationMongodbAuthorizationTypeNone
- u.Type = DestinationMongodbAuthorizationTypeTypeDestinationMongodbAuthorizationTypeNone
+
+ destinationMongodbNone := new(DestinationMongodbNone)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbNone, "", true, true); err == nil {
+ u.DestinationMongodbNone = destinationMongodbNone
+ u.Type = DestinationMongodbAuthorizationTypeTypeDestinationMongodbNone
return nil
}
- destinationMongodbAuthorizationTypeLoginPassword := new(DestinationMongodbAuthorizationTypeLoginPassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbAuthorizationTypeLoginPassword); err == nil {
- u.DestinationMongodbAuthorizationTypeLoginPassword = destinationMongodbAuthorizationTypeLoginPassword
- u.Type = DestinationMongodbAuthorizationTypeTypeDestinationMongodbAuthorizationTypeLoginPassword
+ destinationMongodbLoginPassword := new(DestinationMongodbLoginPassword)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbLoginPassword, "", true, true); err == nil {
+ u.DestinationMongodbLoginPassword = destinationMongodbLoginPassword
+ u.Type = DestinationMongodbAuthorizationTypeTypeDestinationMongodbLoginPassword
return nil
}
@@ -128,208 +167,290 @@ func (u *DestinationMongodbAuthorizationType) UnmarshalJSON(data []byte) error {
}
func (u DestinationMongodbAuthorizationType) MarshalJSON() ([]byte, error) {
- if u.DestinationMongodbAuthorizationTypeNone != nil {
- return json.Marshal(u.DestinationMongodbAuthorizationTypeNone)
+ if u.DestinationMongodbNone != nil {
+ return utils.MarshalJSON(u.DestinationMongodbNone, "", true)
}
- if u.DestinationMongodbAuthorizationTypeLoginPassword != nil {
- return json.Marshal(u.DestinationMongodbAuthorizationTypeLoginPassword)
+ if u.DestinationMongodbLoginPassword != nil {
+ return utils.MarshalJSON(u.DestinationMongodbLoginPassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationMongodbMongodb string
+type Mongodb string
const (
- DestinationMongodbMongodbMongodb DestinationMongodbMongodb = "mongodb"
+ MongodbMongodb Mongodb = "mongodb"
)
-func (e DestinationMongodbMongodb) ToPointer() *DestinationMongodbMongodb {
+func (e Mongodb) ToPointer() *Mongodb {
return &e
}
-func (e *DestinationMongodbMongodb) UnmarshalJSON(data []byte) error {
+func (e *Mongodb) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "mongodb":
- *e = DestinationMongodbMongodb(v)
+ *e = Mongodb(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbMongodb: %v", v)
+ return fmt.Errorf("invalid value for Mongodb: %v", v)
}
}
-type DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstance string
+type DestinationMongodbSchemasInstanceTypeInstance string
const (
- DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstanceAtlas DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstance = "atlas"
+ DestinationMongodbSchemasInstanceTypeInstanceAtlas DestinationMongodbSchemasInstanceTypeInstance = "atlas"
)
-func (e DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstance) ToPointer() *DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstance {
+func (e DestinationMongodbSchemasInstanceTypeInstance) ToPointer() *DestinationMongodbSchemasInstanceTypeInstance {
return &e
}
-func (e *DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstance) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbSchemasInstanceTypeInstance) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "atlas":
- *e = DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstance(v)
+ *e = DestinationMongodbSchemasInstanceTypeInstance(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstance: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbSchemasInstanceTypeInstance: %v", v)
}
}
-// DestinationMongodbMongoDBInstanceTypeMongoDBAtlas - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type DestinationMongodbMongoDBInstanceTypeMongoDBAtlas struct {
+// DestinationMongodbMongoDBAtlas - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
+type DestinationMongodbMongoDBAtlas struct {
// URL of a cluster to connect to.
- ClusterURL string `json:"cluster_url"`
- Instance DestinationMongodbMongoDBInstanceTypeMongoDBAtlasInstance `json:"instance"`
+ ClusterURL string `json:"cluster_url"`
+ Instance *DestinationMongodbSchemasInstanceTypeInstance `default:"atlas" json:"instance"`
}
-type DestinationMongodbMongoDbInstanceTypeReplicaSetInstance string
+func (d DestinationMongodbMongoDBAtlas) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbMongoDBAtlas) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbMongoDBAtlas) GetClusterURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClusterURL
+}
+
+func (o *DestinationMongodbMongoDBAtlas) GetInstance() *DestinationMongodbSchemasInstanceTypeInstance {
+ if o == nil {
+ return nil
+ }
+ return o.Instance
+}
+
+type DestinationMongodbSchemasInstance string
const (
- DestinationMongodbMongoDbInstanceTypeReplicaSetInstanceReplica DestinationMongodbMongoDbInstanceTypeReplicaSetInstance = "replica"
+ DestinationMongodbSchemasInstanceReplica DestinationMongodbSchemasInstance = "replica"
)
-func (e DestinationMongodbMongoDbInstanceTypeReplicaSetInstance) ToPointer() *DestinationMongodbMongoDbInstanceTypeReplicaSetInstance {
+func (e DestinationMongodbSchemasInstance) ToPointer() *DestinationMongodbSchemasInstance {
return &e
}
-func (e *DestinationMongodbMongoDbInstanceTypeReplicaSetInstance) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbSchemasInstance) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "replica":
- *e = DestinationMongodbMongoDbInstanceTypeReplicaSetInstance(v)
+ *e = DestinationMongodbSchemasInstance(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbMongoDbInstanceTypeReplicaSetInstance: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbSchemasInstance: %v", v)
}
}
-// DestinationMongodbMongoDbInstanceTypeReplicaSet - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type DestinationMongodbMongoDbInstanceTypeReplicaSet struct {
- Instance DestinationMongodbMongoDbInstanceTypeReplicaSetInstance `json:"instance"`
+// DestinationMongodbReplicaSet - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
+type DestinationMongodbReplicaSet struct {
+ Instance *DestinationMongodbSchemasInstance `default:"replica" json:"instance"`
// A replica set name.
ReplicaSet *string `json:"replica_set,omitempty"`
// The members of a replica set. Please specify `host`:`port` of each member seperated by comma.
ServerAddresses string `json:"server_addresses"`
}
-type DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance string
+func (d DestinationMongodbReplicaSet) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbReplicaSet) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbReplicaSet) GetInstance() *DestinationMongodbSchemasInstance {
+ if o == nil {
+ return nil
+ }
+ return o.Instance
+}
+
+func (o *DestinationMongodbReplicaSet) GetReplicaSet() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicaSet
+}
+
+func (o *DestinationMongodbReplicaSet) GetServerAddresses() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServerAddresses
+}
+
+type DestinationMongodbInstance string
const (
- DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstanceStandalone DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance = "standalone"
+ DestinationMongodbInstanceStandalone DestinationMongodbInstance = "standalone"
)
-func (e DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance) ToPointer() *DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance {
+func (e DestinationMongodbInstance) ToPointer() *DestinationMongodbInstance {
return &e
}
-func (e *DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbInstance) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "standalone":
- *e = DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance(v)
+ *e = DestinationMongodbInstance(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbInstance: %v", v)
}
}
-// DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance struct {
+// DestinationMongodbStandaloneMongoDbInstance - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
+type DestinationMongodbStandaloneMongoDbInstance struct {
// The Host of a Mongo database to be replicated.
- Host string `json:"host"`
- Instance DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance `json:"instance"`
+ Host string `json:"host"`
+ Instance *DestinationMongodbInstance `default:"standalone" json:"instance"`
// The Port of a Mongo database to be replicated.
- Port int64 `json:"port"`
+ Port *int64 `default:"27017" json:"port"`
+}
+
+func (d DestinationMongodbStandaloneMongoDbInstance) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbStandaloneMongoDbInstance) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbStandaloneMongoDbInstance) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationMongodbStandaloneMongoDbInstance) GetInstance() *DestinationMongodbInstance {
+ if o == nil {
+ return nil
+ }
+ return o.Instance
+}
+
+func (o *DestinationMongodbStandaloneMongoDbInstance) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
}
type DestinationMongodbMongoDbInstanceTypeType string
const (
- DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance DestinationMongodbMongoDbInstanceTypeType = "destination-mongodb_MongoDb Instance Type_Standalone MongoDb Instance"
- DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDbInstanceTypeReplicaSet DestinationMongodbMongoDbInstanceTypeType = "destination-mongodb_MongoDb Instance Type_Replica Set"
- DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDBInstanceTypeMongoDBAtlas DestinationMongodbMongoDbInstanceTypeType = "destination-mongodb_MongoDb Instance Type_MongoDB Atlas"
+ DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbStandaloneMongoDbInstance DestinationMongodbMongoDbInstanceTypeType = "destination-mongodb_Standalone MongoDb Instance"
+ DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbReplicaSet DestinationMongodbMongoDbInstanceTypeType = "destination-mongodb_Replica Set"
+ DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDBAtlas DestinationMongodbMongoDbInstanceTypeType = "destination-mongodb_MongoDB Atlas"
)
type DestinationMongodbMongoDbInstanceType struct {
- DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance *DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
- DestinationMongodbMongoDbInstanceTypeReplicaSet *DestinationMongodbMongoDbInstanceTypeReplicaSet
- DestinationMongodbMongoDBInstanceTypeMongoDBAtlas *DestinationMongodbMongoDBInstanceTypeMongoDBAtlas
+ DestinationMongodbStandaloneMongoDbInstance *DestinationMongodbStandaloneMongoDbInstance
+ DestinationMongodbReplicaSet *DestinationMongodbReplicaSet
+ DestinationMongodbMongoDBAtlas *DestinationMongodbMongoDBAtlas
Type DestinationMongodbMongoDbInstanceTypeType
}
-func CreateDestinationMongodbMongoDbInstanceTypeDestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance(destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance) DestinationMongodbMongoDbInstanceType {
- typ := DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
+func CreateDestinationMongodbMongoDbInstanceTypeDestinationMongodbStandaloneMongoDbInstance(destinationMongodbStandaloneMongoDbInstance DestinationMongodbStandaloneMongoDbInstance) DestinationMongodbMongoDbInstanceType {
+ typ := DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbStandaloneMongoDbInstance
return DestinationMongodbMongoDbInstanceType{
- DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance: &destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance,
+ DestinationMongodbStandaloneMongoDbInstance: &destinationMongodbStandaloneMongoDbInstance,
Type: typ,
}
}
-func CreateDestinationMongodbMongoDbInstanceTypeDestinationMongodbMongoDbInstanceTypeReplicaSet(destinationMongodbMongoDbInstanceTypeReplicaSet DestinationMongodbMongoDbInstanceTypeReplicaSet) DestinationMongodbMongoDbInstanceType {
- typ := DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDbInstanceTypeReplicaSet
+func CreateDestinationMongodbMongoDbInstanceTypeDestinationMongodbReplicaSet(destinationMongodbReplicaSet DestinationMongodbReplicaSet) DestinationMongodbMongoDbInstanceType {
+ typ := DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbReplicaSet
return DestinationMongodbMongoDbInstanceType{
- DestinationMongodbMongoDbInstanceTypeReplicaSet: &destinationMongodbMongoDbInstanceTypeReplicaSet,
- Type: typ,
+ DestinationMongodbReplicaSet: &destinationMongodbReplicaSet,
+ Type: typ,
}
}
-func CreateDestinationMongodbMongoDbInstanceTypeDestinationMongodbMongoDBInstanceTypeMongoDBAtlas(destinationMongodbMongoDBInstanceTypeMongoDBAtlas DestinationMongodbMongoDBInstanceTypeMongoDBAtlas) DestinationMongodbMongoDbInstanceType {
- typ := DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDBInstanceTypeMongoDBAtlas
+func CreateDestinationMongodbMongoDbInstanceTypeDestinationMongodbMongoDBAtlas(destinationMongodbMongoDBAtlas DestinationMongodbMongoDBAtlas) DestinationMongodbMongoDbInstanceType {
+ typ := DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDBAtlas
return DestinationMongodbMongoDbInstanceType{
- DestinationMongodbMongoDBInstanceTypeMongoDBAtlas: &destinationMongodbMongoDBInstanceTypeMongoDBAtlas,
- Type: typ,
+ DestinationMongodbMongoDBAtlas: &destinationMongodbMongoDBAtlas,
+ Type: typ,
}
}
func (u *DestinationMongodbMongoDbInstanceType) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationMongodbMongoDBInstanceTypeMongoDBAtlas := new(DestinationMongodbMongoDBInstanceTypeMongoDBAtlas)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbMongoDBInstanceTypeMongoDBAtlas); err == nil {
- u.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas = destinationMongodbMongoDBInstanceTypeMongoDBAtlas
- u.Type = DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDBInstanceTypeMongoDBAtlas
+
+ destinationMongodbMongoDBAtlas := new(DestinationMongodbMongoDBAtlas)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbMongoDBAtlas, "", true, true); err == nil {
+ u.DestinationMongodbMongoDBAtlas = destinationMongodbMongoDBAtlas
+ u.Type = DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDBAtlas
return nil
}
- destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance := new(DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance); err == nil {
- u.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance = destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
- u.Type = DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
+ destinationMongodbStandaloneMongoDbInstance := new(DestinationMongodbStandaloneMongoDbInstance)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbStandaloneMongoDbInstance, "", true, true); err == nil {
+ u.DestinationMongodbStandaloneMongoDbInstance = destinationMongodbStandaloneMongoDbInstance
+ u.Type = DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbStandaloneMongoDbInstance
return nil
}
- destinationMongodbMongoDbInstanceTypeReplicaSet := new(DestinationMongodbMongoDbInstanceTypeReplicaSet)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbMongoDbInstanceTypeReplicaSet); err == nil {
- u.DestinationMongodbMongoDbInstanceTypeReplicaSet = destinationMongodbMongoDbInstanceTypeReplicaSet
- u.Type = DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDbInstanceTypeReplicaSet
+ destinationMongodbReplicaSet := new(DestinationMongodbReplicaSet)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbReplicaSet, "", true, true); err == nil {
+ u.DestinationMongodbReplicaSet = destinationMongodbReplicaSet
+ u.Type = DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbReplicaSet
return nil
}
@@ -337,200 +458,294 @@ func (u *DestinationMongodbMongoDbInstanceType) UnmarshalJSON(data []byte) error
}
func (u DestinationMongodbMongoDbInstanceType) MarshalJSON() ([]byte, error) {
- if u.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas != nil {
- return json.Marshal(u.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas)
+ if u.DestinationMongodbStandaloneMongoDbInstance != nil {
+ return utils.MarshalJSON(u.DestinationMongodbStandaloneMongoDbInstance, "", true)
}
- if u.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- return json.Marshal(u.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance)
+ if u.DestinationMongodbReplicaSet != nil {
+ return utils.MarshalJSON(u.DestinationMongodbReplicaSet, "", true)
}
- if u.DestinationMongodbMongoDbInstanceTypeReplicaSet != nil {
- return json.Marshal(u.DestinationMongodbMongoDbInstanceTypeReplicaSet)
+ if u.DestinationMongodbMongoDBAtlas != nil {
+ return utils.MarshalJSON(u.DestinationMongodbMongoDBAtlas, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationMongodbSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationMongodbSchemasTunnelMethodTunnelMethod string
const (
- DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationMongodbSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationMongodbSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationMongodbSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationMongodbSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationMongodbSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationMongodbSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMongodbSSHTunnelMethodPasswordAuthentication struct {
+// DestinationMongodbPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMongodbPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationMongodbSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMongodbSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationMongodbPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMongodbPasswordAuthentication) GetTunnelMethod() DestinationMongodbSchemasTunnelMethodTunnelMethod {
+ return DestinationMongodbSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationMongodbPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMongodbPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationMongodbPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationMongodbSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationMongodbSchemasTunnelMethod string
const (
- DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationMongodbSchemasTunnelMethodSSHKeyAuth DestinationMongodbSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationMongodbSchemasTunnelMethod) ToPointer() *DestinationMongodbSchemasTunnelMethod {
return &e
}
-func (e *DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationMongodbSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbSchemasTunnelMethod: %v", v)
}
}
-// DestinationMongodbSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMongodbSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationMongodbSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMongodbSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationMongodbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMongodbSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationMongodbSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationMongodbSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMongodbSSHKeyAuthentication) GetTunnelMethod() DestinationMongodbSchemasTunnelMethod {
+ return DestinationMongodbSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationMongodbSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMongodbSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationMongodbTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationMongodbTunnelMethod string
const (
- DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationMongodbTunnelMethodNoTunnel DestinationMongodbTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationMongodbTunnelMethod) ToPointer() *DestinationMongodbTunnelMethod {
return &e
}
-func (e *DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationMongodbTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbTunnelMethod: %v", v)
}
}
-// DestinationMongodbSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMongodbSSHTunnelMethodNoTunnel struct {
+// DestinationMongodbNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMongodbNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationMongodbSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMongodbTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationMongodbNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbNoTunnel) GetTunnelMethod() DestinationMongodbTunnelMethod {
+ return DestinationMongodbTunnelMethodNoTunnel
}
type DestinationMongodbSSHTunnelMethodType string
const (
- DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHTunnelMethodNoTunnel DestinationMongodbSSHTunnelMethodType = "destination-mongodb_SSH Tunnel Method_No Tunnel"
- DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHTunnelMethodSSHKeyAuthentication DestinationMongodbSSHTunnelMethodType = "destination-mongodb_SSH Tunnel Method_SSH Key Authentication"
- DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHTunnelMethodPasswordAuthentication DestinationMongodbSSHTunnelMethodType = "destination-mongodb_SSH Tunnel Method_Password Authentication"
+ DestinationMongodbSSHTunnelMethodTypeDestinationMongodbNoTunnel DestinationMongodbSSHTunnelMethodType = "destination-mongodb_No Tunnel"
+ DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHKeyAuthentication DestinationMongodbSSHTunnelMethodType = "destination-mongodb_SSH Key Authentication"
+ DestinationMongodbSSHTunnelMethodTypeDestinationMongodbPasswordAuthentication DestinationMongodbSSHTunnelMethodType = "destination-mongodb_Password Authentication"
)
type DestinationMongodbSSHTunnelMethod struct {
- DestinationMongodbSSHTunnelMethodNoTunnel *DestinationMongodbSSHTunnelMethodNoTunnel
- DestinationMongodbSSHTunnelMethodSSHKeyAuthentication *DestinationMongodbSSHTunnelMethodSSHKeyAuthentication
- DestinationMongodbSSHTunnelMethodPasswordAuthentication *DestinationMongodbSSHTunnelMethodPasswordAuthentication
+ DestinationMongodbNoTunnel *DestinationMongodbNoTunnel
+ DestinationMongodbSSHKeyAuthentication *DestinationMongodbSSHKeyAuthentication
+ DestinationMongodbPasswordAuthentication *DestinationMongodbPasswordAuthentication
Type DestinationMongodbSSHTunnelMethodType
}
-func CreateDestinationMongodbSSHTunnelMethodDestinationMongodbSSHTunnelMethodNoTunnel(destinationMongodbSSHTunnelMethodNoTunnel DestinationMongodbSSHTunnelMethodNoTunnel) DestinationMongodbSSHTunnelMethod {
- typ := DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHTunnelMethodNoTunnel
+func CreateDestinationMongodbSSHTunnelMethodDestinationMongodbNoTunnel(destinationMongodbNoTunnel DestinationMongodbNoTunnel) DestinationMongodbSSHTunnelMethod {
+ typ := DestinationMongodbSSHTunnelMethodTypeDestinationMongodbNoTunnel
return DestinationMongodbSSHTunnelMethod{
- DestinationMongodbSSHTunnelMethodNoTunnel: &destinationMongodbSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationMongodbNoTunnel: &destinationMongodbNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationMongodbSSHTunnelMethodDestinationMongodbSSHTunnelMethodSSHKeyAuthentication(destinationMongodbSSHTunnelMethodSSHKeyAuthentication DestinationMongodbSSHTunnelMethodSSHKeyAuthentication) DestinationMongodbSSHTunnelMethod {
- typ := DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationMongodbSSHTunnelMethodDestinationMongodbSSHKeyAuthentication(destinationMongodbSSHKeyAuthentication DestinationMongodbSSHKeyAuthentication) DestinationMongodbSSHTunnelMethod {
+ typ := DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHKeyAuthentication
return DestinationMongodbSSHTunnelMethod{
- DestinationMongodbSSHTunnelMethodSSHKeyAuthentication: &destinationMongodbSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ DestinationMongodbSSHKeyAuthentication: &destinationMongodbSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateDestinationMongodbSSHTunnelMethodDestinationMongodbSSHTunnelMethodPasswordAuthentication(destinationMongodbSSHTunnelMethodPasswordAuthentication DestinationMongodbSSHTunnelMethodPasswordAuthentication) DestinationMongodbSSHTunnelMethod {
- typ := DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHTunnelMethodPasswordAuthentication
+func CreateDestinationMongodbSSHTunnelMethodDestinationMongodbPasswordAuthentication(destinationMongodbPasswordAuthentication DestinationMongodbPasswordAuthentication) DestinationMongodbSSHTunnelMethod {
+ typ := DestinationMongodbSSHTunnelMethodTypeDestinationMongodbPasswordAuthentication
return DestinationMongodbSSHTunnelMethod{
- DestinationMongodbSSHTunnelMethodPasswordAuthentication: &destinationMongodbSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ DestinationMongodbPasswordAuthentication: &destinationMongodbPasswordAuthentication,
+ Type: typ,
}
}
func (u *DestinationMongodbSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationMongodbSSHTunnelMethodNoTunnel := new(DestinationMongodbSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationMongodbSSHTunnelMethodNoTunnel = destinationMongodbSSHTunnelMethodNoTunnel
- u.Type = DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHTunnelMethodNoTunnel
+
+ destinationMongodbNoTunnel := new(DestinationMongodbNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbNoTunnel, "", true, true); err == nil {
+ u.DestinationMongodbNoTunnel = destinationMongodbNoTunnel
+ u.Type = DestinationMongodbSSHTunnelMethodTypeDestinationMongodbNoTunnel
return nil
}
- destinationMongodbSSHTunnelMethodSSHKeyAuthentication := new(DestinationMongodbSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication = destinationMongodbSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHTunnelMethodSSHKeyAuthentication
+ destinationMongodbSSHKeyAuthentication := new(DestinationMongodbSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationMongodbSSHKeyAuthentication = destinationMongodbSSHKeyAuthentication
+ u.Type = DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHKeyAuthentication
return nil
}
- destinationMongodbSSHTunnelMethodPasswordAuthentication := new(DestinationMongodbSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationMongodbSSHTunnelMethodPasswordAuthentication = destinationMongodbSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationMongodbSSHTunnelMethodTypeDestinationMongodbSSHTunnelMethodPasswordAuthentication
+ destinationMongodbPasswordAuthentication := new(DestinationMongodbPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbPasswordAuthentication, "", true, true); err == nil {
+ u.DestinationMongodbPasswordAuthentication = destinationMongodbPasswordAuthentication
+ u.Type = DestinationMongodbSSHTunnelMethodTypeDestinationMongodbPasswordAuthentication
return nil
}
@@ -538,29 +753,72 @@ func (u *DestinationMongodbSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationMongodbSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationMongodbSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationMongodbSSHTunnelMethodNoTunnel)
+ if u.DestinationMongodbNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationMongodbNoTunnel, "", true)
}
- if u.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationMongodbSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationMongodbSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMongodbSSHKeyAuthentication, "", true)
}
- if u.DestinationMongodbSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationMongodbSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationMongodbPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMongodbPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationMongodb struct {
// Authorization type.
AuthType DestinationMongodbAuthorizationType `json:"auth_type"`
// Name of the database.
- Database string `json:"database"`
- DestinationType DestinationMongodbMongodb `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Mongodb `const:"mongodb" json:"destinationType"`
// MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
InstanceType *DestinationMongodbMongoDbInstanceType `json:"instance_type,omitempty"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *DestinationMongodbSSHTunnelMethod `json:"tunnel_method,omitempty"`
}
+
+func (d DestinationMongodb) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodb) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodb) GetAuthType() DestinationMongodbAuthorizationType {
+ if o == nil {
+ return DestinationMongodbAuthorizationType{}
+ }
+ return o.AuthType
+}
+
+func (o *DestinationMongodb) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationMongodb) GetDestinationType() Mongodb {
+ return MongodbMongodb
+}
+
+func (o *DestinationMongodb) GetInstanceType() *DestinationMongodbMongoDbInstanceType {
+ if o == nil {
+ return nil
+ }
+ return o.InstanceType
+}
+
+func (o *DestinationMongodb) GetTunnelMethod() *DestinationMongodbSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmongodbcreaterequest.go b/internal/sdk/pkg/models/shared/destinationmongodbcreaterequest.go
old mode 100755
new mode 100644
index a034f68e7..a8e74d1fd
--- a/internal/sdk/pkg/models/shared/destinationmongodbcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationmongodbcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationMongodbCreateRequest struct {
Configuration DestinationMongodb `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationMongodbCreateRequest) GetConfiguration() DestinationMongodb {
+ if o == nil {
+ return DestinationMongodb{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationMongodbCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationMongodbCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationMongodbCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationmongodbputrequest.go b/internal/sdk/pkg/models/shared/destinationmongodbputrequest.go
old mode 100755
new mode 100644
index 1ea66bce3..453795406
--- a/internal/sdk/pkg/models/shared/destinationmongodbputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationmongodbputrequest.go
@@ -7,3 +7,24 @@ type DestinationMongodbPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationMongodbPutRequest) GetConfiguration() DestinationMongodbUpdate {
+ if o == nil {
+ return DestinationMongodbUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationMongodbPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationMongodbPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmongodbupdate.go b/internal/sdk/pkg/models/shared/destinationmongodbupdate.go
old mode 100755
new mode 100644
index acd08fa23..5a01f94cd
--- a/internal/sdk/pkg/models/shared/destinationmongodbupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationmongodbupdate.go
@@ -3,510 +3,725 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorization string
+type DestinationMongodbUpdateAuthorization string
const (
- DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorizationLoginPassword DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorization = "login/password"
+ DestinationMongodbUpdateAuthorizationLoginPassword DestinationMongodbUpdateAuthorization = "login/password"
)
-func (e DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorization) ToPointer() *DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorization {
+func (e DestinationMongodbUpdateAuthorization) ToPointer() *DestinationMongodbUpdateAuthorization {
return &e
}
-func (e *DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorization) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbUpdateAuthorization) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "login/password":
- *e = DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorization(v)
+ *e = DestinationMongodbUpdateAuthorization(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorization: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbUpdateAuthorization: %v", v)
}
}
-// DestinationMongodbUpdateAuthorizationTypeLoginPassword - Login/Password.
-type DestinationMongodbUpdateAuthorizationTypeLoginPassword struct {
- Authorization DestinationMongodbUpdateAuthorizationTypeLoginPasswordAuthorization `json:"authorization"`
+// LoginPassword - Login/Password.
+type LoginPassword struct {
+ authorization DestinationMongodbUpdateAuthorization `const:"login/password" json:"authorization"`
// Password associated with the username.
Password string `json:"password"`
// Username to use to access the database.
Username string `json:"username"`
}
-type DestinationMongodbUpdateAuthorizationTypeNoneAuthorization string
+func (l LoginPassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(l, "", false)
+}
+
+func (l *LoginPassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &l, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *LoginPassword) GetAuthorization() DestinationMongodbUpdateAuthorization {
+ return DestinationMongodbUpdateAuthorizationLoginPassword
+}
+
+func (o *LoginPassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *LoginPassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type Authorization string
const (
- DestinationMongodbUpdateAuthorizationTypeNoneAuthorizationNone DestinationMongodbUpdateAuthorizationTypeNoneAuthorization = "none"
+ AuthorizationNone Authorization = "none"
)
-func (e DestinationMongodbUpdateAuthorizationTypeNoneAuthorization) ToPointer() *DestinationMongodbUpdateAuthorizationTypeNoneAuthorization {
+func (e Authorization) ToPointer() *Authorization {
return &e
}
-func (e *DestinationMongodbUpdateAuthorizationTypeNoneAuthorization) UnmarshalJSON(data []byte) error {
+func (e *Authorization) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "none":
- *e = DestinationMongodbUpdateAuthorizationTypeNoneAuthorization(v)
+ *e = Authorization(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbUpdateAuthorizationTypeNoneAuthorization: %v", v)
+ return fmt.Errorf("invalid value for Authorization: %v", v)
}
}
-// DestinationMongodbUpdateAuthorizationTypeNone - None.
-type DestinationMongodbUpdateAuthorizationTypeNone struct {
- Authorization DestinationMongodbUpdateAuthorizationTypeNoneAuthorization `json:"authorization"`
+// None - None.
+type None struct {
+ authorization Authorization `const:"none" json:"authorization"`
}
-type DestinationMongodbUpdateAuthorizationTypeType string
+func (n None) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
+}
+
+func (n *None) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *None) GetAuthorization() Authorization {
+ return AuthorizationNone
+}
+
+type AuthorizationTypeType string
const (
- DestinationMongodbUpdateAuthorizationTypeTypeDestinationMongodbUpdateAuthorizationTypeNone DestinationMongodbUpdateAuthorizationTypeType = "destination-mongodb-update_Authorization type_None"
- DestinationMongodbUpdateAuthorizationTypeTypeDestinationMongodbUpdateAuthorizationTypeLoginPassword DestinationMongodbUpdateAuthorizationTypeType = "destination-mongodb-update_Authorization type_Login/Password"
+ AuthorizationTypeTypeNone AuthorizationTypeType = "None"
+ AuthorizationTypeTypeLoginPassword AuthorizationTypeType = "Login/Password"
)
-type DestinationMongodbUpdateAuthorizationType struct {
- DestinationMongodbUpdateAuthorizationTypeNone *DestinationMongodbUpdateAuthorizationTypeNone
- DestinationMongodbUpdateAuthorizationTypeLoginPassword *DestinationMongodbUpdateAuthorizationTypeLoginPassword
+type AuthorizationType struct {
+ None *None
+ LoginPassword *LoginPassword
- Type DestinationMongodbUpdateAuthorizationTypeType
+ Type AuthorizationTypeType
}
-func CreateDestinationMongodbUpdateAuthorizationTypeDestinationMongodbUpdateAuthorizationTypeNone(destinationMongodbUpdateAuthorizationTypeNone DestinationMongodbUpdateAuthorizationTypeNone) DestinationMongodbUpdateAuthorizationType {
- typ := DestinationMongodbUpdateAuthorizationTypeTypeDestinationMongodbUpdateAuthorizationTypeNone
+func CreateAuthorizationTypeNone(none None) AuthorizationType {
+ typ := AuthorizationTypeTypeNone
- return DestinationMongodbUpdateAuthorizationType{
- DestinationMongodbUpdateAuthorizationTypeNone: &destinationMongodbUpdateAuthorizationTypeNone,
+ return AuthorizationType{
+ None: &none,
Type: typ,
}
}
-func CreateDestinationMongodbUpdateAuthorizationTypeDestinationMongodbUpdateAuthorizationTypeLoginPassword(destinationMongodbUpdateAuthorizationTypeLoginPassword DestinationMongodbUpdateAuthorizationTypeLoginPassword) DestinationMongodbUpdateAuthorizationType {
- typ := DestinationMongodbUpdateAuthorizationTypeTypeDestinationMongodbUpdateAuthorizationTypeLoginPassword
+func CreateAuthorizationTypeLoginPassword(loginPassword LoginPassword) AuthorizationType {
+ typ := AuthorizationTypeTypeLoginPassword
- return DestinationMongodbUpdateAuthorizationType{
- DestinationMongodbUpdateAuthorizationTypeLoginPassword: &destinationMongodbUpdateAuthorizationTypeLoginPassword,
- Type: typ,
+ return AuthorizationType{
+ LoginPassword: &loginPassword,
+ Type: typ,
}
}
-func (u *DestinationMongodbUpdateAuthorizationType) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *AuthorizationType) UnmarshalJSON(data []byte) error {
- destinationMongodbUpdateAuthorizationTypeNone := new(DestinationMongodbUpdateAuthorizationTypeNone)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbUpdateAuthorizationTypeNone); err == nil {
- u.DestinationMongodbUpdateAuthorizationTypeNone = destinationMongodbUpdateAuthorizationTypeNone
- u.Type = DestinationMongodbUpdateAuthorizationTypeTypeDestinationMongodbUpdateAuthorizationTypeNone
+ none := new(None)
+ if err := utils.UnmarshalJSON(data, &none, "", true, true); err == nil {
+ u.None = none
+ u.Type = AuthorizationTypeTypeNone
return nil
}
- destinationMongodbUpdateAuthorizationTypeLoginPassword := new(DestinationMongodbUpdateAuthorizationTypeLoginPassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbUpdateAuthorizationTypeLoginPassword); err == nil {
- u.DestinationMongodbUpdateAuthorizationTypeLoginPassword = destinationMongodbUpdateAuthorizationTypeLoginPassword
- u.Type = DestinationMongodbUpdateAuthorizationTypeTypeDestinationMongodbUpdateAuthorizationTypeLoginPassword
+ loginPassword := new(LoginPassword)
+ if err := utils.UnmarshalJSON(data, &loginPassword, "", true, true); err == nil {
+ u.LoginPassword = loginPassword
+ u.Type = AuthorizationTypeTypeLoginPassword
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationMongodbUpdateAuthorizationType) MarshalJSON() ([]byte, error) {
- if u.DestinationMongodbUpdateAuthorizationTypeNone != nil {
- return json.Marshal(u.DestinationMongodbUpdateAuthorizationTypeNone)
+func (u AuthorizationType) MarshalJSON() ([]byte, error) {
+ if u.None != nil {
+ return utils.MarshalJSON(u.None, "", true)
}
- if u.DestinationMongodbUpdateAuthorizationTypeLoginPassword != nil {
- return json.Marshal(u.DestinationMongodbUpdateAuthorizationTypeLoginPassword)
+ if u.LoginPassword != nil {
+ return utils.MarshalJSON(u.LoginPassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance string
+type DestinationMongodbUpdateSchemasInstance string
const (
- DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstanceAtlas DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance = "atlas"
+ DestinationMongodbUpdateSchemasInstanceAtlas DestinationMongodbUpdateSchemasInstance = "atlas"
)
-func (e DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance) ToPointer() *DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance {
+func (e DestinationMongodbUpdateSchemasInstance) ToPointer() *DestinationMongodbUpdateSchemasInstance {
return &e
}
-func (e *DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbUpdateSchemasInstance) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "atlas":
- *e = DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance(v)
+ *e = DestinationMongodbUpdateSchemasInstance(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbUpdateSchemasInstance: %v", v)
}
}
-// DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas struct {
+// MongoDBAtlas - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
+type MongoDBAtlas struct {
// URL of a cluster to connect to.
- ClusterURL string `json:"cluster_url"`
- Instance DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance `json:"instance"`
+ ClusterURL string `json:"cluster_url"`
+ Instance *DestinationMongodbUpdateSchemasInstance `default:"atlas" json:"instance"`
+}
+
+func (m MongoDBAtlas) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(m, "", false)
+}
+
+func (m *MongoDBAtlas) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &m, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *MongoDBAtlas) GetClusterURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClusterURL
+}
+
+func (o *MongoDBAtlas) GetInstance() *DestinationMongodbUpdateSchemasInstance {
+ if o == nil {
+ return nil
+ }
+ return o.Instance
}
-type DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstance string
+type DestinationMongodbUpdateInstance string
const (
- DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstanceReplica DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstance = "replica"
+ DestinationMongodbUpdateInstanceReplica DestinationMongodbUpdateInstance = "replica"
)
-func (e DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstance) ToPointer() *DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstance {
+func (e DestinationMongodbUpdateInstance) ToPointer() *DestinationMongodbUpdateInstance {
return &e
}
-func (e *DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstance) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbUpdateInstance) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "replica":
- *e = DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstance(v)
+ *e = DestinationMongodbUpdateInstance(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstance: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbUpdateInstance: %v", v)
}
}
-// DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet struct {
- Instance DestinationMongodbUpdateMongoDbInstanceTypeReplicaSetInstance `json:"instance"`
+// ReplicaSet - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
+type ReplicaSet struct {
+ Instance *DestinationMongodbUpdateInstance `default:"replica" json:"instance"`
// A replica set name.
ReplicaSet *string `json:"replica_set,omitempty"`
// The members of a replica set. Please specify `host`:`port` of each member seperated by comma.
ServerAddresses string `json:"server_addresses"`
}
-type DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance string
+func (r ReplicaSet) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(r, "", false)
+}
+
+func (r *ReplicaSet) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &r, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ReplicaSet) GetInstance() *DestinationMongodbUpdateInstance {
+ if o == nil {
+ return nil
+ }
+ return o.Instance
+}
+
+func (o *ReplicaSet) GetReplicaSet() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicaSet
+}
+
+func (o *ReplicaSet) GetServerAddresses() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServerAddresses
+}
+
+type Instance string
const (
- DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstanceStandalone DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance = "standalone"
+ InstanceStandalone Instance = "standalone"
)
-func (e DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance) ToPointer() *DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance {
+func (e Instance) ToPointer() *Instance {
return &e
}
-func (e *DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance) UnmarshalJSON(data []byte) error {
+func (e *Instance) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "standalone":
- *e = DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance(v)
+ *e = Instance(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance: %v", v)
+ return fmt.Errorf("invalid value for Instance: %v", v)
}
}
-// DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance struct {
+// StandaloneMongoDbInstance - MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
+type StandaloneMongoDbInstance struct {
// The Host of a Mongo database to be replicated.
- Host string `json:"host"`
- Instance DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance `json:"instance"`
+ Host string `json:"host"`
+ Instance *Instance `default:"standalone" json:"instance"`
// The Port of a Mongo database to be replicated.
- Port int64 `json:"port"`
+ Port *int64 `default:"27017" json:"port"`
+}
+
+func (s StandaloneMongoDbInstance) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *StandaloneMongoDbInstance) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *StandaloneMongoDbInstance) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *StandaloneMongoDbInstance) GetInstance() *Instance {
+ if o == nil {
+ return nil
+ }
+ return o.Instance
+}
+
+func (o *StandaloneMongoDbInstance) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
}
-type DestinationMongodbUpdateMongoDbInstanceTypeType string
+type MongoDbInstanceTypeType string
const (
- DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance DestinationMongodbUpdateMongoDbInstanceTypeType = "destination-mongodb-update_MongoDb Instance Type_Standalone MongoDb Instance"
- DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDbInstanceTypeReplicaSet DestinationMongodbUpdateMongoDbInstanceTypeType = "destination-mongodb-update_MongoDb Instance Type_Replica Set"
- DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas DestinationMongodbUpdateMongoDbInstanceTypeType = "destination-mongodb-update_MongoDb Instance Type_MongoDB Atlas"
+ MongoDbInstanceTypeTypeStandaloneMongoDbInstance MongoDbInstanceTypeType = "Standalone MongoDb Instance"
+ MongoDbInstanceTypeTypeReplicaSet MongoDbInstanceTypeType = "Replica Set"
+ MongoDbInstanceTypeTypeMongoDBAtlas MongoDbInstanceTypeType = "MongoDB Atlas"
)
-type DestinationMongodbUpdateMongoDbInstanceType struct {
- DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance *DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
- DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet *DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet
- DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas *DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
+type MongoDbInstanceType struct {
+ StandaloneMongoDbInstance *StandaloneMongoDbInstance
+ ReplicaSet *ReplicaSet
+ MongoDBAtlas *MongoDBAtlas
- Type DestinationMongodbUpdateMongoDbInstanceTypeType
+ Type MongoDbInstanceTypeType
}
-func CreateDestinationMongodbUpdateMongoDbInstanceTypeDestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance(destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance) DestinationMongodbUpdateMongoDbInstanceType {
- typ := DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
+func CreateMongoDbInstanceTypeStandaloneMongoDbInstance(standaloneMongoDbInstance StandaloneMongoDbInstance) MongoDbInstanceType {
+ typ := MongoDbInstanceTypeTypeStandaloneMongoDbInstance
- return DestinationMongodbUpdateMongoDbInstanceType{
- DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance: &destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance,
- Type: typ,
+ return MongoDbInstanceType{
+ StandaloneMongoDbInstance: &standaloneMongoDbInstance,
+ Type: typ,
}
}
-func CreateDestinationMongodbUpdateMongoDbInstanceTypeDestinationMongodbUpdateMongoDbInstanceTypeReplicaSet(destinationMongodbUpdateMongoDbInstanceTypeReplicaSet DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet) DestinationMongodbUpdateMongoDbInstanceType {
- typ := DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDbInstanceTypeReplicaSet
+func CreateMongoDbInstanceTypeReplicaSet(replicaSet ReplicaSet) MongoDbInstanceType {
+ typ := MongoDbInstanceTypeTypeReplicaSet
- return DestinationMongodbUpdateMongoDbInstanceType{
- DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet: &destinationMongodbUpdateMongoDbInstanceTypeReplicaSet,
- Type: typ,
+ return MongoDbInstanceType{
+ ReplicaSet: &replicaSet,
+ Type: typ,
}
}
-func CreateDestinationMongodbUpdateMongoDbInstanceTypeDestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas(destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) DestinationMongodbUpdateMongoDbInstanceType {
- typ := DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
+func CreateMongoDbInstanceTypeMongoDBAtlas(mongoDBAtlas MongoDBAtlas) MongoDbInstanceType {
+ typ := MongoDbInstanceTypeTypeMongoDBAtlas
- return DestinationMongodbUpdateMongoDbInstanceType{
- DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas: &destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas,
- Type: typ,
+ return MongoDbInstanceType{
+ MongoDBAtlas: &mongoDBAtlas,
+ Type: typ,
}
}
-func (u *DestinationMongodbUpdateMongoDbInstanceType) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *MongoDbInstanceType) UnmarshalJSON(data []byte) error {
- destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas := new(DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas); err == nil {
- u.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas = destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
- u.Type = DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
+ mongoDBAtlas := new(MongoDBAtlas)
+ if err := utils.UnmarshalJSON(data, &mongoDBAtlas, "", true, true); err == nil {
+ u.MongoDBAtlas = mongoDBAtlas
+ u.Type = MongoDbInstanceTypeTypeMongoDBAtlas
return nil
}
- destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance := new(DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance); err == nil {
- u.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance = destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
- u.Type = DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
+ standaloneMongoDbInstance := new(StandaloneMongoDbInstance)
+ if err := utils.UnmarshalJSON(data, &standaloneMongoDbInstance, "", true, true); err == nil {
+ u.StandaloneMongoDbInstance = standaloneMongoDbInstance
+ u.Type = MongoDbInstanceTypeTypeStandaloneMongoDbInstance
return nil
}
- destinationMongodbUpdateMongoDbInstanceTypeReplicaSet := new(DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbUpdateMongoDbInstanceTypeReplicaSet); err == nil {
- u.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet = destinationMongodbUpdateMongoDbInstanceTypeReplicaSet
- u.Type = DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDbInstanceTypeReplicaSet
+ replicaSet := new(ReplicaSet)
+ if err := utils.UnmarshalJSON(data, &replicaSet, "", true, true); err == nil {
+ u.ReplicaSet = replicaSet
+ u.Type = MongoDbInstanceTypeTypeReplicaSet
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationMongodbUpdateMongoDbInstanceType) MarshalJSON() ([]byte, error) {
- if u.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil {
- return json.Marshal(u.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas)
+func (u MongoDbInstanceType) MarshalJSON() ([]byte, error) {
+ if u.StandaloneMongoDbInstance != nil {
+ return utils.MarshalJSON(u.StandaloneMongoDbInstance, "", true)
}
- if u.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- return json.Marshal(u.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance)
+ if u.ReplicaSet != nil {
+ return utils.MarshalJSON(u.ReplicaSet, "", true)
}
- if u.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet != nil {
- return json.Marshal(u.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet)
+ if u.MongoDBAtlas != nil {
+ return utils.MarshalJSON(u.MongoDBAtlas, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod string
const (
- DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationMongodbUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication struct {
+// DestinationMongodbUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMongodbUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationMongodbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationMongodbUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMongodbUpdatePasswordAuthentication) GetTunnelMethod() DestinationMongodbUpdateSchemasTunnelMethodTunnelMethod {
+ return DestinationMongodbUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationMongodbUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMongodbUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationMongodbUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationMongodbUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationMongodbUpdateSchemasTunnelMethod string
const (
- DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationMongodbUpdateSchemasTunnelMethodSSHKeyAuth DestinationMongodbUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationMongodbUpdateSchemasTunnelMethod) ToPointer() *DestinationMongodbUpdateSchemasTunnelMethod {
return &e
}
-func (e *DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationMongodbUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbUpdateSchemasTunnelMethod: %v", v)
}
}
-// DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationMongodbUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMongodbUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMongodbUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationMongodbUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationMongodbUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMongodbUpdateSSHKeyAuthentication) GetTunnelMethod() DestinationMongodbUpdateSchemasTunnelMethod {
+ return DestinationMongodbUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationMongodbUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMongodbUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationMongodbUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationMongodbUpdateTunnelMethod string
const (
- DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationMongodbUpdateTunnelMethodNoTunnel DestinationMongodbUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationMongodbUpdateTunnelMethod) ToPointer() *DestinationMongodbUpdateTunnelMethod {
return &e
}
-func (e *DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMongodbUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationMongodbUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMongodbUpdateTunnelMethod: %v", v)
}
}
-// DestinationMongodbUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMongodbUpdateSSHTunnelMethodNoTunnel struct {
+// DestinationMongodbUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMongodbUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationMongodbUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMongodbUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationMongodbUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMongodbUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMongodbUpdateNoTunnel) GetTunnelMethod() DestinationMongodbUpdateTunnelMethod {
+ return DestinationMongodbUpdateTunnelMethodNoTunnel
}
type DestinationMongodbUpdateSSHTunnelMethodType string
const (
- DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHTunnelMethodNoTunnel DestinationMongodbUpdateSSHTunnelMethodType = "destination-mongodb-update_SSH Tunnel Method_No Tunnel"
- DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication DestinationMongodbUpdateSSHTunnelMethodType = "destination-mongodb-update_SSH Tunnel Method_SSH Key Authentication"
- DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication DestinationMongodbUpdateSSHTunnelMethodType = "destination-mongodb-update_SSH Tunnel Method_Password Authentication"
+ DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateNoTunnel DestinationMongodbUpdateSSHTunnelMethodType = "destination-mongodb-update_No Tunnel"
+ DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHKeyAuthentication DestinationMongodbUpdateSSHTunnelMethodType = "destination-mongodb-update_SSH Key Authentication"
+ DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdatePasswordAuthentication DestinationMongodbUpdateSSHTunnelMethodType = "destination-mongodb-update_Password Authentication"
)
type DestinationMongodbUpdateSSHTunnelMethod struct {
- DestinationMongodbUpdateSSHTunnelMethodNoTunnel *DestinationMongodbUpdateSSHTunnelMethodNoTunnel
- DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication
- DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication *DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication
+ DestinationMongodbUpdateNoTunnel *DestinationMongodbUpdateNoTunnel
+ DestinationMongodbUpdateSSHKeyAuthentication *DestinationMongodbUpdateSSHKeyAuthentication
+ DestinationMongodbUpdatePasswordAuthentication *DestinationMongodbUpdatePasswordAuthentication
Type DestinationMongodbUpdateSSHTunnelMethodType
}
-func CreateDestinationMongodbUpdateSSHTunnelMethodDestinationMongodbUpdateSSHTunnelMethodNoTunnel(destinationMongodbUpdateSSHTunnelMethodNoTunnel DestinationMongodbUpdateSSHTunnelMethodNoTunnel) DestinationMongodbUpdateSSHTunnelMethod {
- typ := DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHTunnelMethodNoTunnel
+func CreateDestinationMongodbUpdateSSHTunnelMethodDestinationMongodbUpdateNoTunnel(destinationMongodbUpdateNoTunnel DestinationMongodbUpdateNoTunnel) DestinationMongodbUpdateSSHTunnelMethod {
+ typ := DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateNoTunnel
return DestinationMongodbUpdateSSHTunnelMethod{
- DestinationMongodbUpdateSSHTunnelMethodNoTunnel: &destinationMongodbUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationMongodbUpdateNoTunnel: &destinationMongodbUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationMongodbUpdateSSHTunnelMethodDestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication(destinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication) DestinationMongodbUpdateSSHTunnelMethod {
- typ := DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationMongodbUpdateSSHTunnelMethodDestinationMongodbUpdateSSHKeyAuthentication(destinationMongodbUpdateSSHKeyAuthentication DestinationMongodbUpdateSSHKeyAuthentication) DestinationMongodbUpdateSSHTunnelMethod {
+ typ := DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHKeyAuthentication
return DestinationMongodbUpdateSSHTunnelMethod{
- DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication: &destinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationMongodbUpdateSSHKeyAuthentication: &destinationMongodbUpdateSSHKeyAuthentication,
Type: typ,
}
}
-func CreateDestinationMongodbUpdateSSHTunnelMethodDestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication(destinationMongodbUpdateSSHTunnelMethodPasswordAuthentication DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication) DestinationMongodbUpdateSSHTunnelMethod {
- typ := DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication
+func CreateDestinationMongodbUpdateSSHTunnelMethodDestinationMongodbUpdatePasswordAuthentication(destinationMongodbUpdatePasswordAuthentication DestinationMongodbUpdatePasswordAuthentication) DestinationMongodbUpdateSSHTunnelMethod {
+ typ := DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdatePasswordAuthentication
return DestinationMongodbUpdateSSHTunnelMethod{
- DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication: &destinationMongodbUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationMongodbUpdatePasswordAuthentication: &destinationMongodbUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *DestinationMongodbUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationMongodbUpdateSSHTunnelMethodNoTunnel := new(DestinationMongodbUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationMongodbUpdateSSHTunnelMethodNoTunnel = destinationMongodbUpdateSSHTunnelMethodNoTunnel
- u.Type = DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHTunnelMethodNoTunnel
+
+ destinationMongodbUpdateNoTunnel := new(DestinationMongodbUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbUpdateNoTunnel, "", true, true); err == nil {
+ u.DestinationMongodbUpdateNoTunnel = destinationMongodbUpdateNoTunnel
+ u.Type = DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateNoTunnel
return nil
}
- destinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication := new(DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication = destinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication
+ destinationMongodbUpdateSSHKeyAuthentication := new(DestinationMongodbUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationMongodbUpdateSSHKeyAuthentication = destinationMongodbUpdateSSHKeyAuthentication
+ u.Type = DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHKeyAuthentication
return nil
}
- destinationMongodbUpdateSSHTunnelMethodPasswordAuthentication := new(DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMongodbUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication = destinationMongodbUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication
+ destinationMongodbUpdatePasswordAuthentication := new(DestinationMongodbUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMongodbUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationMongodbUpdatePasswordAuthentication = destinationMongodbUpdatePasswordAuthentication
+ u.Type = DestinationMongodbUpdateSSHTunnelMethodTypeDestinationMongodbUpdatePasswordAuthentication
return nil
}
@@ -514,28 +729,56 @@ func (u *DestinationMongodbUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) err
}
func (u DestinationMongodbUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationMongodbUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationMongodbUpdateSSHTunnelMethodNoTunnel)
+ if u.DestinationMongodbUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationMongodbUpdateNoTunnel, "", true)
}
- if u.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationMongodbUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationMongodbUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMongodbUpdateSSHKeyAuthentication, "", true)
}
- if u.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationMongodbUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationMongodbUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMongodbUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationMongodbUpdate struct {
// Authorization type.
- AuthType DestinationMongodbUpdateAuthorizationType `json:"auth_type"`
+ AuthType AuthorizationType `json:"auth_type"`
// Name of the database.
Database string `json:"database"`
// MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
- InstanceType *DestinationMongodbUpdateMongoDbInstanceType `json:"instance_type,omitempty"`
+ InstanceType *MongoDbInstanceType `json:"instance_type,omitempty"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *DestinationMongodbUpdateSSHTunnelMethod `json:"tunnel_method,omitempty"`
}
+
+func (o *DestinationMongodbUpdate) GetAuthType() AuthorizationType {
+ if o == nil {
+ return AuthorizationType{}
+ }
+ return o.AuthType
+}
+
+func (o *DestinationMongodbUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationMongodbUpdate) GetInstanceType() *MongoDbInstanceType {
+ if o == nil {
+ return nil
+ }
+ return o.InstanceType
+}
+
+func (o *DestinationMongodbUpdate) GetTunnelMethod() *DestinationMongodbUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmssql.go b/internal/sdk/pkg/models/shared/destinationmssql.go
old mode 100755
new mode 100644
index 7680ec88b..06351198c
--- a/internal/sdk/pkg/models/shared/destinationmssql.go
+++ b/internal/sdk/pkg/models/shared/destinationmssql.go
@@ -3,146 +3,178 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationMssqlMssql string
+type Mssql string
const (
- DestinationMssqlMssqlMssql DestinationMssqlMssql = "mssql"
+ MssqlMssql Mssql = "mssql"
)
-func (e DestinationMssqlMssql) ToPointer() *DestinationMssqlMssql {
+func (e Mssql) ToPointer() *Mssql {
return &e
}
-func (e *DestinationMssqlMssql) UnmarshalJSON(data []byte) error {
+func (e *Mssql) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "mssql":
- *e = DestinationMssqlMssql(v)
+ *e = Mssql(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlMssql: %v", v)
+ return fmt.Errorf("invalid value for Mssql: %v", v)
}
}
-type DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethod string
+type DestinationMssqlSchemasSslMethodSslMethod string
const (
- DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethodEncryptedVerifyCertificate DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethod = "encrypted_verify_certificate"
+ DestinationMssqlSchemasSslMethodSslMethodEncryptedVerifyCertificate DestinationMssqlSchemasSslMethodSslMethod = "encrypted_verify_certificate"
)
-func (e DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethod) ToPointer() *DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethod {
+func (e DestinationMssqlSchemasSslMethodSslMethod) ToPointer() *DestinationMssqlSchemasSslMethodSslMethod {
return &e
}
-func (e *DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlSchemasSslMethodSslMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_verify_certificate":
- *e = DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethod(v)
+ *e = DestinationMssqlSchemasSslMethodSslMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlSchemasSslMethodSslMethod: %v", v)
}
}
-// DestinationMssqlSSLMethodEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
-type DestinationMssqlSSLMethodEncryptedVerifyCertificate struct {
+// DestinationMssqlEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
+type DestinationMssqlEncryptedVerifyCertificate struct {
// Specifies the host name of the server. The value of this property must match the subject property of the certificate.
- HostNameInCertificate *string `json:"hostNameInCertificate,omitempty"`
- SslMethod DestinationMssqlSSLMethodEncryptedVerifyCertificateSSLMethod `json:"ssl_method"`
+ HostNameInCertificate *string `json:"hostNameInCertificate,omitempty"`
+ sslMethod *DestinationMssqlSchemasSslMethodSslMethod `const:"encrypted_verify_certificate" json:"ssl_method"`
}
-type DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod string
+func (d DestinationMssqlEncryptedVerifyCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssqlEncryptedVerifyCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssqlEncryptedVerifyCertificate) GetHostNameInCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.HostNameInCertificate
+}
+
+func (o *DestinationMssqlEncryptedVerifyCertificate) GetSslMethod() *DestinationMssqlSchemasSslMethodSslMethod {
+ return DestinationMssqlSchemasSslMethodSslMethodEncryptedVerifyCertificate.ToPointer()
+}
+
+type DestinationMssqlSchemasSslMethod string
const (
- DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethodEncryptedTrustServerCertificate DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod = "encrypted_trust_server_certificate"
+ DestinationMssqlSchemasSslMethodEncryptedTrustServerCertificate DestinationMssqlSchemasSslMethod = "encrypted_trust_server_certificate"
)
-func (e DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod) ToPointer() *DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod {
+func (e DestinationMssqlSchemasSslMethod) ToPointer() *DestinationMssqlSchemasSslMethod {
return &e
}
-func (e *DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlSchemasSslMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_trust_server_certificate":
- *e = DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod(v)
+ *e = DestinationMssqlSchemasSslMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlSchemasSslMethod: %v", v)
}
}
-// DestinationMssqlSSLMethodEncryptedTrustServerCertificate - Use the certificate provided by the server without verification. (For testing purposes only!)
-type DestinationMssqlSSLMethodEncryptedTrustServerCertificate struct {
- SslMethod DestinationMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod `json:"ssl_method"`
+// DestinationMssqlEncryptedTrustServerCertificate - Use the certificate provided by the server without verification. (For testing purposes only!)
+type DestinationMssqlEncryptedTrustServerCertificate struct {
+ sslMethod *DestinationMssqlSchemasSslMethod `const:"encrypted_trust_server_certificate" json:"ssl_method"`
+}
+
+func (d DestinationMssqlEncryptedTrustServerCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssqlEncryptedTrustServerCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssqlEncryptedTrustServerCertificate) GetSslMethod() *DestinationMssqlSchemasSslMethod {
+ return DestinationMssqlSchemasSslMethodEncryptedTrustServerCertificate.ToPointer()
}
type DestinationMssqlSSLMethodType string
const (
- DestinationMssqlSSLMethodTypeDestinationMssqlSSLMethodEncryptedTrustServerCertificate DestinationMssqlSSLMethodType = "destination-mssql_SSL Method_Encrypted (trust server certificate)"
- DestinationMssqlSSLMethodTypeDestinationMssqlSSLMethodEncryptedVerifyCertificate DestinationMssqlSSLMethodType = "destination-mssql_SSL Method_Encrypted (verify certificate)"
+ DestinationMssqlSSLMethodTypeDestinationMssqlEncryptedTrustServerCertificate DestinationMssqlSSLMethodType = "destination-mssql_Encrypted (trust server certificate)"
+ DestinationMssqlSSLMethodTypeDestinationMssqlEncryptedVerifyCertificate DestinationMssqlSSLMethodType = "destination-mssql_Encrypted (verify certificate)"
)
type DestinationMssqlSSLMethod struct {
- DestinationMssqlSSLMethodEncryptedTrustServerCertificate *DestinationMssqlSSLMethodEncryptedTrustServerCertificate
- DestinationMssqlSSLMethodEncryptedVerifyCertificate *DestinationMssqlSSLMethodEncryptedVerifyCertificate
+ DestinationMssqlEncryptedTrustServerCertificate *DestinationMssqlEncryptedTrustServerCertificate
+ DestinationMssqlEncryptedVerifyCertificate *DestinationMssqlEncryptedVerifyCertificate
Type DestinationMssqlSSLMethodType
}
-func CreateDestinationMssqlSSLMethodDestinationMssqlSSLMethodEncryptedTrustServerCertificate(destinationMssqlSSLMethodEncryptedTrustServerCertificate DestinationMssqlSSLMethodEncryptedTrustServerCertificate) DestinationMssqlSSLMethod {
- typ := DestinationMssqlSSLMethodTypeDestinationMssqlSSLMethodEncryptedTrustServerCertificate
+func CreateDestinationMssqlSSLMethodDestinationMssqlEncryptedTrustServerCertificate(destinationMssqlEncryptedTrustServerCertificate DestinationMssqlEncryptedTrustServerCertificate) DestinationMssqlSSLMethod {
+ typ := DestinationMssqlSSLMethodTypeDestinationMssqlEncryptedTrustServerCertificate
return DestinationMssqlSSLMethod{
- DestinationMssqlSSLMethodEncryptedTrustServerCertificate: &destinationMssqlSSLMethodEncryptedTrustServerCertificate,
+ DestinationMssqlEncryptedTrustServerCertificate: &destinationMssqlEncryptedTrustServerCertificate,
Type: typ,
}
}
-func CreateDestinationMssqlSSLMethodDestinationMssqlSSLMethodEncryptedVerifyCertificate(destinationMssqlSSLMethodEncryptedVerifyCertificate DestinationMssqlSSLMethodEncryptedVerifyCertificate) DestinationMssqlSSLMethod {
- typ := DestinationMssqlSSLMethodTypeDestinationMssqlSSLMethodEncryptedVerifyCertificate
+func CreateDestinationMssqlSSLMethodDestinationMssqlEncryptedVerifyCertificate(destinationMssqlEncryptedVerifyCertificate DestinationMssqlEncryptedVerifyCertificate) DestinationMssqlSSLMethod {
+ typ := DestinationMssqlSSLMethodTypeDestinationMssqlEncryptedVerifyCertificate
return DestinationMssqlSSLMethod{
- DestinationMssqlSSLMethodEncryptedVerifyCertificate: &destinationMssqlSSLMethodEncryptedVerifyCertificate,
+ DestinationMssqlEncryptedVerifyCertificate: &destinationMssqlEncryptedVerifyCertificate,
Type: typ,
}
}
func (u *DestinationMssqlSSLMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationMssqlSSLMethodEncryptedTrustServerCertificate := new(DestinationMssqlSSLMethodEncryptedTrustServerCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlSSLMethodEncryptedTrustServerCertificate); err == nil {
- u.DestinationMssqlSSLMethodEncryptedTrustServerCertificate = destinationMssqlSSLMethodEncryptedTrustServerCertificate
- u.Type = DestinationMssqlSSLMethodTypeDestinationMssqlSSLMethodEncryptedTrustServerCertificate
+
+ destinationMssqlEncryptedTrustServerCertificate := new(DestinationMssqlEncryptedTrustServerCertificate)
+ if err := utils.UnmarshalJSON(data, &destinationMssqlEncryptedTrustServerCertificate, "", true, true); err == nil {
+ u.DestinationMssqlEncryptedTrustServerCertificate = destinationMssqlEncryptedTrustServerCertificate
+ u.Type = DestinationMssqlSSLMethodTypeDestinationMssqlEncryptedTrustServerCertificate
return nil
}
- destinationMssqlSSLMethodEncryptedVerifyCertificate := new(DestinationMssqlSSLMethodEncryptedVerifyCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlSSLMethodEncryptedVerifyCertificate); err == nil {
- u.DestinationMssqlSSLMethodEncryptedVerifyCertificate = destinationMssqlSSLMethodEncryptedVerifyCertificate
- u.Type = DestinationMssqlSSLMethodTypeDestinationMssqlSSLMethodEncryptedVerifyCertificate
+ destinationMssqlEncryptedVerifyCertificate := new(DestinationMssqlEncryptedVerifyCertificate)
+ if err := utils.UnmarshalJSON(data, &destinationMssqlEncryptedVerifyCertificate, "", true, true); err == nil {
+ u.DestinationMssqlEncryptedVerifyCertificate = destinationMssqlEncryptedVerifyCertificate
+ u.Type = DestinationMssqlSSLMethodTypeDestinationMssqlEncryptedVerifyCertificate
return nil
}
@@ -150,196 +182,290 @@ func (u *DestinationMssqlSSLMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationMssqlSSLMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationMssqlSSLMethodEncryptedTrustServerCertificate != nil {
- return json.Marshal(u.DestinationMssqlSSLMethodEncryptedTrustServerCertificate)
+ if u.DestinationMssqlEncryptedTrustServerCertificate != nil {
+ return utils.MarshalJSON(u.DestinationMssqlEncryptedTrustServerCertificate, "", true)
}
- if u.DestinationMssqlSSLMethodEncryptedVerifyCertificate != nil {
- return json.Marshal(u.DestinationMssqlSSLMethodEncryptedVerifyCertificate)
+ if u.DestinationMssqlEncryptedVerifyCertificate != nil {
+ return utils.MarshalJSON(u.DestinationMssqlEncryptedVerifyCertificate, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationMssqlSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationMssqlSchemasTunnelMethodTunnelMethod string
const (
- DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationMssqlSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationMssqlSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationMssqlSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationMssqlSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationMssqlSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationMssqlSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMssqlSSHTunnelMethodPasswordAuthentication struct {
+// DestinationMssqlPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMssqlPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMssqlSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationMssqlPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssqlPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssqlPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMssqlPasswordAuthentication) GetTunnelMethod() DestinationMssqlSchemasTunnelMethodTunnelMethod {
+ return DestinationMssqlSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationMssqlPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMssqlPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationMssqlPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationMssqlSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationMssqlSchemasTunnelMethod string
const (
- DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationMssqlSchemasTunnelMethodSSHKeyAuth DestinationMssqlSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationMssqlSchemasTunnelMethod) ToPointer() *DestinationMssqlSchemasTunnelMethod {
return &e
}
-func (e *DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationMssqlSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlSchemasTunnelMethod: %v", v)
}
}
-// DestinationMssqlSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMssqlSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationMssqlSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMssqlSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMssqlSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationMssqlSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssqlSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssqlSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationMssqlSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMssqlSSHKeyAuthentication) GetTunnelMethod() DestinationMssqlSchemasTunnelMethod {
+ return DestinationMssqlSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationMssqlSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMssqlSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationMssqlTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationMssqlTunnelMethod string
const (
- DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationMssqlTunnelMethodNoTunnel DestinationMssqlTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationMssqlTunnelMethod) ToPointer() *DestinationMssqlTunnelMethod {
return &e
}
-func (e *DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationMssqlTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlTunnelMethod: %v", v)
}
}
-// DestinationMssqlSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMssqlSSHTunnelMethodNoTunnel struct {
+// DestinationMssqlNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMssqlNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationMssqlSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMssqlTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationMssqlNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssqlNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssqlNoTunnel) GetTunnelMethod() DestinationMssqlTunnelMethod {
+ return DestinationMssqlTunnelMethodNoTunnel
}
type DestinationMssqlSSHTunnelMethodType string
const (
- DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHTunnelMethodNoTunnel DestinationMssqlSSHTunnelMethodType = "destination-mssql_SSH Tunnel Method_No Tunnel"
- DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHTunnelMethodSSHKeyAuthentication DestinationMssqlSSHTunnelMethodType = "destination-mssql_SSH Tunnel Method_SSH Key Authentication"
- DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHTunnelMethodPasswordAuthentication DestinationMssqlSSHTunnelMethodType = "destination-mssql_SSH Tunnel Method_Password Authentication"
+ DestinationMssqlSSHTunnelMethodTypeDestinationMssqlNoTunnel DestinationMssqlSSHTunnelMethodType = "destination-mssql_No Tunnel"
+ DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHKeyAuthentication DestinationMssqlSSHTunnelMethodType = "destination-mssql_SSH Key Authentication"
+ DestinationMssqlSSHTunnelMethodTypeDestinationMssqlPasswordAuthentication DestinationMssqlSSHTunnelMethodType = "destination-mssql_Password Authentication"
)
type DestinationMssqlSSHTunnelMethod struct {
- DestinationMssqlSSHTunnelMethodNoTunnel *DestinationMssqlSSHTunnelMethodNoTunnel
- DestinationMssqlSSHTunnelMethodSSHKeyAuthentication *DestinationMssqlSSHTunnelMethodSSHKeyAuthentication
- DestinationMssqlSSHTunnelMethodPasswordAuthentication *DestinationMssqlSSHTunnelMethodPasswordAuthentication
+ DestinationMssqlNoTunnel *DestinationMssqlNoTunnel
+ DestinationMssqlSSHKeyAuthentication *DestinationMssqlSSHKeyAuthentication
+ DestinationMssqlPasswordAuthentication *DestinationMssqlPasswordAuthentication
Type DestinationMssqlSSHTunnelMethodType
}
-func CreateDestinationMssqlSSHTunnelMethodDestinationMssqlSSHTunnelMethodNoTunnel(destinationMssqlSSHTunnelMethodNoTunnel DestinationMssqlSSHTunnelMethodNoTunnel) DestinationMssqlSSHTunnelMethod {
- typ := DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHTunnelMethodNoTunnel
+func CreateDestinationMssqlSSHTunnelMethodDestinationMssqlNoTunnel(destinationMssqlNoTunnel DestinationMssqlNoTunnel) DestinationMssqlSSHTunnelMethod {
+ typ := DestinationMssqlSSHTunnelMethodTypeDestinationMssqlNoTunnel
return DestinationMssqlSSHTunnelMethod{
- DestinationMssqlSSHTunnelMethodNoTunnel: &destinationMssqlSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationMssqlNoTunnel: &destinationMssqlNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationMssqlSSHTunnelMethodDestinationMssqlSSHTunnelMethodSSHKeyAuthentication(destinationMssqlSSHTunnelMethodSSHKeyAuthentication DestinationMssqlSSHTunnelMethodSSHKeyAuthentication) DestinationMssqlSSHTunnelMethod {
- typ := DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationMssqlSSHTunnelMethodDestinationMssqlSSHKeyAuthentication(destinationMssqlSSHKeyAuthentication DestinationMssqlSSHKeyAuthentication) DestinationMssqlSSHTunnelMethod {
+ typ := DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHKeyAuthentication
return DestinationMssqlSSHTunnelMethod{
- DestinationMssqlSSHTunnelMethodSSHKeyAuthentication: &destinationMssqlSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ DestinationMssqlSSHKeyAuthentication: &destinationMssqlSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateDestinationMssqlSSHTunnelMethodDestinationMssqlSSHTunnelMethodPasswordAuthentication(destinationMssqlSSHTunnelMethodPasswordAuthentication DestinationMssqlSSHTunnelMethodPasswordAuthentication) DestinationMssqlSSHTunnelMethod {
- typ := DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHTunnelMethodPasswordAuthentication
+func CreateDestinationMssqlSSHTunnelMethodDestinationMssqlPasswordAuthentication(destinationMssqlPasswordAuthentication DestinationMssqlPasswordAuthentication) DestinationMssqlSSHTunnelMethod {
+ typ := DestinationMssqlSSHTunnelMethodTypeDestinationMssqlPasswordAuthentication
return DestinationMssqlSSHTunnelMethod{
- DestinationMssqlSSHTunnelMethodPasswordAuthentication: &destinationMssqlSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ DestinationMssqlPasswordAuthentication: &destinationMssqlPasswordAuthentication,
+ Type: typ,
}
}
func (u *DestinationMssqlSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationMssqlSSHTunnelMethodNoTunnel := new(DestinationMssqlSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationMssqlSSHTunnelMethodNoTunnel = destinationMssqlSSHTunnelMethodNoTunnel
- u.Type = DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHTunnelMethodNoTunnel
+
+ destinationMssqlNoTunnel := new(DestinationMssqlNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationMssqlNoTunnel, "", true, true); err == nil {
+ u.DestinationMssqlNoTunnel = destinationMssqlNoTunnel
+ u.Type = DestinationMssqlSSHTunnelMethodTypeDestinationMssqlNoTunnel
return nil
}
- destinationMssqlSSHTunnelMethodSSHKeyAuthentication := new(DestinationMssqlSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication = destinationMssqlSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHTunnelMethodSSHKeyAuthentication
+ destinationMssqlSSHKeyAuthentication := new(DestinationMssqlSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMssqlSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationMssqlSSHKeyAuthentication = destinationMssqlSSHKeyAuthentication
+ u.Type = DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHKeyAuthentication
return nil
}
- destinationMssqlSSHTunnelMethodPasswordAuthentication := new(DestinationMssqlSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationMssqlSSHTunnelMethodPasswordAuthentication = destinationMssqlSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationMssqlSSHTunnelMethodTypeDestinationMssqlSSHTunnelMethodPasswordAuthentication
+ destinationMssqlPasswordAuthentication := new(DestinationMssqlPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMssqlPasswordAuthentication, "", true, true); err == nil {
+ u.DestinationMssqlPasswordAuthentication = destinationMssqlPasswordAuthentication
+ u.Type = DestinationMssqlSSHTunnelMethodTypeDestinationMssqlPasswordAuthentication
return nil
}
@@ -347,25 +473,25 @@ func (u *DestinationMssqlSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationMssqlSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationMssqlSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationMssqlSSHTunnelMethodNoTunnel)
+ if u.DestinationMssqlNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationMssqlNoTunnel, "", true)
}
- if u.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationMssqlSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationMssqlSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMssqlSSHKeyAuthentication, "", true)
}
- if u.DestinationMssqlSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationMssqlSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationMssqlPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMssqlPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationMssql struct {
// The name of the MSSQL database.
- Database string `json:"database"`
- DestinationType DestinationMssqlMssql `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Mssql `const:"mssql" json:"destinationType"`
// The host name of the MSSQL database.
Host string `json:"host"`
// Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
@@ -373,9 +499,9 @@ type DestinationMssql struct {
// The password associated with this username.
Password *string `json:"password,omitempty"`
// The port of the MSSQL database.
- Port int64 `json:"port"`
+ Port *int64 `default:"1433" json:"port"`
// The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
- Schema string `json:"schema"`
+ Schema *string `default:"public" json:"schema"`
// The encryption method which is used to communicate with the database.
SslMethod *DestinationMssqlSSLMethod `json:"ssl_method,omitempty"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
@@ -383,3 +509,81 @@ type DestinationMssql struct {
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (d DestinationMssql) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssql) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssql) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationMssql) GetDestinationType() Mssql {
+ return MssqlMssql
+}
+
+func (o *DestinationMssql) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationMssql) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationMssql) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationMssql) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationMssql) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *DestinationMssql) GetSslMethod() *DestinationMssqlSSLMethod {
+ if o == nil {
+ return nil
+ }
+ return o.SslMethod
+}
+
+func (o *DestinationMssql) GetTunnelMethod() *DestinationMssqlSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationMssql) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmssqlcreaterequest.go b/internal/sdk/pkg/models/shared/destinationmssqlcreaterequest.go
old mode 100755
new mode 100644
index d33281301..5fa0091f5
--- a/internal/sdk/pkg/models/shared/destinationmssqlcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationmssqlcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationMssqlCreateRequest struct {
Configuration DestinationMssql `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationMssqlCreateRequest) GetConfiguration() DestinationMssql {
+ if o == nil {
+ return DestinationMssql{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationMssqlCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationMssqlCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationMssqlCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationmssqlputrequest.go b/internal/sdk/pkg/models/shared/destinationmssqlputrequest.go
old mode 100755
new mode 100644
index 9b049df3b..986b1558d
--- a/internal/sdk/pkg/models/shared/destinationmssqlputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationmssqlputrequest.go
@@ -7,3 +7,24 @@ type DestinationMssqlPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationMssqlPutRequest) GetConfiguration() DestinationMssqlUpdate {
+ if o == nil {
+ return DestinationMssqlUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationMssqlPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationMssqlPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmssqlupdate.go b/internal/sdk/pkg/models/shared/destinationmssqlupdate.go
old mode 100755
new mode 100644
index dca62a8ff..db963d1fe
--- a/internal/sdk/pkg/models/shared/destinationmssqlupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationmssqlupdate.go
@@ -3,319 +3,445 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod string
+type DestinationMssqlUpdateSchemasSslMethod string
const (
- DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethodEncryptedVerifyCertificate DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod = "encrypted_verify_certificate"
+ DestinationMssqlUpdateSchemasSslMethodEncryptedVerifyCertificate DestinationMssqlUpdateSchemasSslMethod = "encrypted_verify_certificate"
)
-func (e DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod) ToPointer() *DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod {
+func (e DestinationMssqlUpdateSchemasSslMethod) ToPointer() *DestinationMssqlUpdateSchemasSslMethod {
return &e
}
-func (e *DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlUpdateSchemasSslMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_verify_certificate":
- *e = DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod(v)
+ *e = DestinationMssqlUpdateSchemasSslMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlUpdateSchemasSslMethod: %v", v)
}
}
-// DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
-type DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate struct {
+// EncryptedVerifyCertificate - Verify and use the certificate provided by the server.
+type EncryptedVerifyCertificate struct {
// Specifies the host name of the server. The value of this property must match the subject property of the certificate.
- HostNameInCertificate *string `json:"hostNameInCertificate,omitempty"`
- SslMethod DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod `json:"ssl_method"`
+ HostNameInCertificate *string `json:"hostNameInCertificate,omitempty"`
+ sslMethod *DestinationMssqlUpdateSchemasSslMethod `const:"encrypted_verify_certificate" json:"ssl_method"`
}
-type DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod string
+func (e EncryptedVerifyCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(e, "", false)
+}
+
+func (e *EncryptedVerifyCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &e, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *EncryptedVerifyCertificate) GetHostNameInCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.HostNameInCertificate
+}
+
+func (o *EncryptedVerifyCertificate) GetSslMethod() *DestinationMssqlUpdateSchemasSslMethod {
+ return DestinationMssqlUpdateSchemasSslMethodEncryptedVerifyCertificate.ToPointer()
+}
+
+type DestinationMssqlUpdateSslMethod string
const (
- DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethodEncryptedTrustServerCertificate DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod = "encrypted_trust_server_certificate"
+ DestinationMssqlUpdateSslMethodEncryptedTrustServerCertificate DestinationMssqlUpdateSslMethod = "encrypted_trust_server_certificate"
)
-func (e DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod) ToPointer() *DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod {
+func (e DestinationMssqlUpdateSslMethod) ToPointer() *DestinationMssqlUpdateSslMethod {
return &e
}
-func (e *DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlUpdateSslMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_trust_server_certificate":
- *e = DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod(v)
+ *e = DestinationMssqlUpdateSslMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlUpdateSslMethod: %v", v)
}
}
-// DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate - Use the certificate provided by the server without verification. (For testing purposes only!)
-type DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate struct {
- SslMethod DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod `json:"ssl_method"`
+// EncryptedTrustServerCertificate - Use the certificate provided by the server without verification. (For testing purposes only!)
+type EncryptedTrustServerCertificate struct {
+ sslMethod *DestinationMssqlUpdateSslMethod `const:"encrypted_trust_server_certificate" json:"ssl_method"`
}
-type DestinationMssqlUpdateSSLMethodType string
+func (e EncryptedTrustServerCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(e, "", false)
+}
+
+func (e *EncryptedTrustServerCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &e, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *EncryptedTrustServerCertificate) GetSslMethod() *DestinationMssqlUpdateSslMethod {
+ return DestinationMssqlUpdateSslMethodEncryptedTrustServerCertificate.ToPointer()
+}
+
+type SSLMethodType string
const (
- DestinationMssqlUpdateSSLMethodTypeDestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate DestinationMssqlUpdateSSLMethodType = "destination-mssql-update_SSL Method_Encrypted (trust server certificate)"
- DestinationMssqlUpdateSSLMethodTypeDestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate DestinationMssqlUpdateSSLMethodType = "destination-mssql-update_SSL Method_Encrypted (verify certificate)"
+ SSLMethodTypeEncryptedTrustServerCertificate SSLMethodType = "Encrypted (trust server certificate)"
+ SSLMethodTypeEncryptedVerifyCertificate SSLMethodType = "Encrypted (verify certificate)"
)
-type DestinationMssqlUpdateSSLMethod struct {
- DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate *DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate
- DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate *DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate
+type SSLMethod struct {
+ EncryptedTrustServerCertificate *EncryptedTrustServerCertificate
+ EncryptedVerifyCertificate *EncryptedVerifyCertificate
- Type DestinationMssqlUpdateSSLMethodType
+ Type SSLMethodType
}
-func CreateDestinationMssqlUpdateSSLMethodDestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate(destinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate) DestinationMssqlUpdateSSLMethod {
- typ := DestinationMssqlUpdateSSLMethodTypeDestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate
+func CreateSSLMethodEncryptedTrustServerCertificate(encryptedTrustServerCertificate EncryptedTrustServerCertificate) SSLMethod {
+ typ := SSLMethodTypeEncryptedTrustServerCertificate
- return DestinationMssqlUpdateSSLMethod{
- DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate: &destinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate,
- Type: typ,
+ return SSLMethod{
+ EncryptedTrustServerCertificate: &encryptedTrustServerCertificate,
+ Type: typ,
}
}
-func CreateDestinationMssqlUpdateSSLMethodDestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate(destinationMssqlUpdateSSLMethodEncryptedVerifyCertificate DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate) DestinationMssqlUpdateSSLMethod {
- typ := DestinationMssqlUpdateSSLMethodTypeDestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate
+func CreateSSLMethodEncryptedVerifyCertificate(encryptedVerifyCertificate EncryptedVerifyCertificate) SSLMethod {
+ typ := SSLMethodTypeEncryptedVerifyCertificate
- return DestinationMssqlUpdateSSLMethod{
- DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate: &destinationMssqlUpdateSSLMethodEncryptedVerifyCertificate,
- Type: typ,
+ return SSLMethod{
+ EncryptedVerifyCertificate: &encryptedVerifyCertificate,
+ Type: typ,
}
}
-func (u *DestinationMssqlUpdateSSLMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SSLMethod) UnmarshalJSON(data []byte) error {
- destinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate := new(DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate); err == nil {
- u.DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate = destinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate
- u.Type = DestinationMssqlUpdateSSLMethodTypeDestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate
+ encryptedTrustServerCertificate := new(EncryptedTrustServerCertificate)
+ if err := utils.UnmarshalJSON(data, &encryptedTrustServerCertificate, "", true, true); err == nil {
+ u.EncryptedTrustServerCertificate = encryptedTrustServerCertificate
+ u.Type = SSLMethodTypeEncryptedTrustServerCertificate
return nil
}
- destinationMssqlUpdateSSLMethodEncryptedVerifyCertificate := new(DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlUpdateSSLMethodEncryptedVerifyCertificate); err == nil {
- u.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate = destinationMssqlUpdateSSLMethodEncryptedVerifyCertificate
- u.Type = DestinationMssqlUpdateSSLMethodTypeDestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate
+ encryptedVerifyCertificate := new(EncryptedVerifyCertificate)
+ if err := utils.UnmarshalJSON(data, &encryptedVerifyCertificate, "", true, true); err == nil {
+ u.EncryptedVerifyCertificate = encryptedVerifyCertificate
+ u.Type = SSLMethodTypeEncryptedVerifyCertificate
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationMssqlUpdateSSLMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate != nil {
- return json.Marshal(u.DestinationMssqlUpdateSSLMethodEncryptedTrustServerCertificate)
+func (u SSLMethod) MarshalJSON() ([]byte, error) {
+ if u.EncryptedTrustServerCertificate != nil {
+ return utils.MarshalJSON(u.EncryptedTrustServerCertificate, "", true)
}
- if u.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate != nil {
- return json.Marshal(u.DestinationMssqlUpdateSSLMethodEncryptedVerifyCertificate)
+ if u.EncryptedVerifyCertificate != nil {
+ return utils.MarshalJSON(u.EncryptedVerifyCertificate, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod string
const (
- DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationMssqlUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication struct {
+// DestinationMssqlUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMssqlUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationMssqlUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssqlUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssqlUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMssqlUpdatePasswordAuthentication) GetTunnelMethod() DestinationMssqlUpdateSchemasTunnelMethodTunnelMethod {
+ return DestinationMssqlUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationMssqlUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMssqlUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationMssqlUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationMssqlUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationMssqlUpdateSchemasTunnelMethod string
const (
- DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationMssqlUpdateSchemasTunnelMethodSSHKeyAuth DestinationMssqlUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationMssqlUpdateSchemasTunnelMethod) ToPointer() *DestinationMssqlUpdateSchemasTunnelMethod {
return &e
}
-func (e *DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationMssqlUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlUpdateSchemasTunnelMethod: %v", v)
}
}
-// DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationMssqlUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMssqlUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMssqlUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationMssqlUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssqlUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssqlUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationMssqlUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMssqlUpdateSSHKeyAuthentication) GetTunnelMethod() DestinationMssqlUpdateSchemasTunnelMethod {
+ return DestinationMssqlUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationMssqlUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMssqlUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationMssqlUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationMssqlUpdateTunnelMethod string
const (
- DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationMssqlUpdateTunnelMethodNoTunnel DestinationMssqlUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationMssqlUpdateTunnelMethod) ToPointer() *DestinationMssqlUpdateTunnelMethod {
return &e
}
-func (e *DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMssqlUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationMssqlUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMssqlUpdateTunnelMethod: %v", v)
}
}
-// DestinationMssqlUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMssqlUpdateSSHTunnelMethodNoTunnel struct {
+// DestinationMssqlUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMssqlUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMssqlUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationMssqlUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssqlUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssqlUpdateNoTunnel) GetTunnelMethod() DestinationMssqlUpdateTunnelMethod {
+ return DestinationMssqlUpdateTunnelMethodNoTunnel
}
type DestinationMssqlUpdateSSHTunnelMethodType string
const (
- DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHTunnelMethodNoTunnel DestinationMssqlUpdateSSHTunnelMethodType = "destination-mssql-update_SSH Tunnel Method_No Tunnel"
- DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication DestinationMssqlUpdateSSHTunnelMethodType = "destination-mssql-update_SSH Tunnel Method_SSH Key Authentication"
- DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication DestinationMssqlUpdateSSHTunnelMethodType = "destination-mssql-update_SSH Tunnel Method_Password Authentication"
+ DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateNoTunnel DestinationMssqlUpdateSSHTunnelMethodType = "destination-mssql-update_No Tunnel"
+ DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHKeyAuthentication DestinationMssqlUpdateSSHTunnelMethodType = "destination-mssql-update_SSH Key Authentication"
+ DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdatePasswordAuthentication DestinationMssqlUpdateSSHTunnelMethodType = "destination-mssql-update_Password Authentication"
)
type DestinationMssqlUpdateSSHTunnelMethod struct {
- DestinationMssqlUpdateSSHTunnelMethodNoTunnel *DestinationMssqlUpdateSSHTunnelMethodNoTunnel
- DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
- DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication *DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication
+ DestinationMssqlUpdateNoTunnel *DestinationMssqlUpdateNoTunnel
+ DestinationMssqlUpdateSSHKeyAuthentication *DestinationMssqlUpdateSSHKeyAuthentication
+ DestinationMssqlUpdatePasswordAuthentication *DestinationMssqlUpdatePasswordAuthentication
Type DestinationMssqlUpdateSSHTunnelMethodType
}
-func CreateDestinationMssqlUpdateSSHTunnelMethodDestinationMssqlUpdateSSHTunnelMethodNoTunnel(destinationMssqlUpdateSSHTunnelMethodNoTunnel DestinationMssqlUpdateSSHTunnelMethodNoTunnel) DestinationMssqlUpdateSSHTunnelMethod {
- typ := DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHTunnelMethodNoTunnel
+func CreateDestinationMssqlUpdateSSHTunnelMethodDestinationMssqlUpdateNoTunnel(destinationMssqlUpdateNoTunnel DestinationMssqlUpdateNoTunnel) DestinationMssqlUpdateSSHTunnelMethod {
+ typ := DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateNoTunnel
return DestinationMssqlUpdateSSHTunnelMethod{
- DestinationMssqlUpdateSSHTunnelMethodNoTunnel: &destinationMssqlUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationMssqlUpdateNoTunnel: &destinationMssqlUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationMssqlUpdateSSHTunnelMethodDestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication(destinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication) DestinationMssqlUpdateSSHTunnelMethod {
- typ := DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationMssqlUpdateSSHTunnelMethodDestinationMssqlUpdateSSHKeyAuthentication(destinationMssqlUpdateSSHKeyAuthentication DestinationMssqlUpdateSSHKeyAuthentication) DestinationMssqlUpdateSSHTunnelMethod {
+ typ := DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHKeyAuthentication
return DestinationMssqlUpdateSSHTunnelMethod{
- DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication: &destinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationMssqlUpdateSSHKeyAuthentication: &destinationMssqlUpdateSSHKeyAuthentication,
Type: typ,
}
}
-func CreateDestinationMssqlUpdateSSHTunnelMethodDestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication(destinationMssqlUpdateSSHTunnelMethodPasswordAuthentication DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication) DestinationMssqlUpdateSSHTunnelMethod {
- typ := DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication
+func CreateDestinationMssqlUpdateSSHTunnelMethodDestinationMssqlUpdatePasswordAuthentication(destinationMssqlUpdatePasswordAuthentication DestinationMssqlUpdatePasswordAuthentication) DestinationMssqlUpdateSSHTunnelMethod {
+ typ := DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdatePasswordAuthentication
return DestinationMssqlUpdateSSHTunnelMethod{
- DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication: &destinationMssqlUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationMssqlUpdatePasswordAuthentication: &destinationMssqlUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *DestinationMssqlUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationMssqlUpdateSSHTunnelMethodNoTunnel := new(DestinationMssqlUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationMssqlUpdateSSHTunnelMethodNoTunnel = destinationMssqlUpdateSSHTunnelMethodNoTunnel
- u.Type = DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHTunnelMethodNoTunnel
+
+ destinationMssqlUpdateNoTunnel := new(DestinationMssqlUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationMssqlUpdateNoTunnel, "", true, true); err == nil {
+ u.DestinationMssqlUpdateNoTunnel = destinationMssqlUpdateNoTunnel
+ u.Type = DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateNoTunnel
return nil
}
- destinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication := new(DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication = destinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
+ destinationMssqlUpdateSSHKeyAuthentication := new(DestinationMssqlUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMssqlUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationMssqlUpdateSSHKeyAuthentication = destinationMssqlUpdateSSHKeyAuthentication
+ u.Type = DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHKeyAuthentication
return nil
}
- destinationMssqlUpdateSSHTunnelMethodPasswordAuthentication := new(DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMssqlUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication = destinationMssqlUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication
+ destinationMssqlUpdatePasswordAuthentication := new(DestinationMssqlUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMssqlUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationMssqlUpdatePasswordAuthentication = destinationMssqlUpdatePasswordAuthentication
+ u.Type = DestinationMssqlUpdateSSHTunnelMethodTypeDestinationMssqlUpdatePasswordAuthentication
return nil
}
@@ -323,19 +449,19 @@ func (u *DestinationMssqlUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error
}
func (u DestinationMssqlUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationMssqlUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationMssqlUpdateSSHTunnelMethodNoTunnel)
+ if u.DestinationMssqlUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationMssqlUpdateNoTunnel, "", true)
}
- if u.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationMssqlUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationMssqlUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMssqlUpdateSSHKeyAuthentication, "", true)
}
- if u.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationMssqlUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationMssqlUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMssqlUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationMssqlUpdate struct {
@@ -348,13 +474,87 @@ type DestinationMssqlUpdate struct {
// The password associated with this username.
Password *string `json:"password,omitempty"`
// The port of the MSSQL database.
- Port int64 `json:"port"`
+ Port *int64 `default:"1433" json:"port"`
// The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
- Schema string `json:"schema"`
+ Schema *string `default:"public" json:"schema"`
// The encryption method which is used to communicate with the database.
- SslMethod *DestinationMssqlUpdateSSLMethod `json:"ssl_method,omitempty"`
+ SslMethod *SSLMethod `json:"ssl_method,omitempty"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *DestinationMssqlUpdateSSHTunnelMethod `json:"tunnel_method,omitempty"`
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (d DestinationMssqlUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMssqlUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMssqlUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationMssqlUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationMssqlUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationMssqlUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationMssqlUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationMssqlUpdate) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *DestinationMssqlUpdate) GetSslMethod() *SSLMethod {
+ if o == nil {
+ return nil
+ }
+ return o.SslMethod
+}
+
+func (o *DestinationMssqlUpdate) GetTunnelMethod() *DestinationMssqlUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationMssqlUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmysql.go b/internal/sdk/pkg/models/shared/destinationmysql.go
old mode 100755
new mode 100644
index 737a5cf66..d145768b0
--- a/internal/sdk/pkg/models/shared/destinationmysql.go
+++ b/internal/sdk/pkg/models/shared/destinationmysql.go
@@ -3,215 +3,309 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationMysqlMysql string
+type Mysql string
const (
- DestinationMysqlMysqlMysql DestinationMysqlMysql = "mysql"
+ MysqlMysql Mysql = "mysql"
)
-func (e DestinationMysqlMysql) ToPointer() *DestinationMysqlMysql {
+func (e Mysql) ToPointer() *Mysql {
return &e
}
-func (e *DestinationMysqlMysql) UnmarshalJSON(data []byte) error {
+func (e *Mysql) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "mysql":
- *e = DestinationMysqlMysql(v)
+ *e = Mysql(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMysqlMysql: %v", v)
+ return fmt.Errorf("invalid value for Mysql: %v", v)
}
}
-// DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationMysqlSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationMysqlSchemasTunnelMethodTunnelMethod string
const (
- DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationMysqlSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationMysqlSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationMysqlSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationMysqlSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMysqlSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationMysqlSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMysqlSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationMysqlSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMysqlSSHTunnelMethodPasswordAuthentication struct {
+// DestinationMysqlPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMysqlPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMysqlSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationMysqlPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMysqlPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMysqlPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMysqlPasswordAuthentication) GetTunnelMethod() DestinationMysqlSchemasTunnelMethodTunnelMethod {
+ return DestinationMysqlSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationMysqlPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMysqlPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationMysqlPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationMysqlSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationMysqlSchemasTunnelMethod string
const (
- DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationMysqlSchemasTunnelMethodSSHKeyAuth DestinationMysqlSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationMysqlSchemasTunnelMethod) ToPointer() *DestinationMysqlSchemasTunnelMethod {
return &e
}
-func (e *DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMysqlSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationMysqlSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMysqlSchemasTunnelMethod: %v", v)
}
}
-// DestinationMysqlSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMysqlSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationMysqlSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMysqlSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMysqlSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationMysqlSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMysqlSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMysqlSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationMysqlSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMysqlSSHKeyAuthentication) GetTunnelMethod() DestinationMysqlSchemasTunnelMethod {
+ return DestinationMysqlSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationMysqlSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMysqlSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationMysqlTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationMysqlTunnelMethod string
const (
- DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationMysqlTunnelMethodNoTunnel DestinationMysqlTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationMysqlTunnelMethod) ToPointer() *DestinationMysqlTunnelMethod {
return &e
}
-func (e *DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMysqlTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationMysqlTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMysqlTunnelMethod: %v", v)
}
}
-// DestinationMysqlSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMysqlSSHTunnelMethodNoTunnel struct {
+// DestinationMysqlNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMysqlNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationMysqlSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMysqlTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationMysqlNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMysqlNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMysqlNoTunnel) GetTunnelMethod() DestinationMysqlTunnelMethod {
+ return DestinationMysqlTunnelMethodNoTunnel
}
type DestinationMysqlSSHTunnelMethodType string
const (
- DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHTunnelMethodNoTunnel DestinationMysqlSSHTunnelMethodType = "destination-mysql_SSH Tunnel Method_No Tunnel"
- DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHTunnelMethodSSHKeyAuthentication DestinationMysqlSSHTunnelMethodType = "destination-mysql_SSH Tunnel Method_SSH Key Authentication"
- DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHTunnelMethodPasswordAuthentication DestinationMysqlSSHTunnelMethodType = "destination-mysql_SSH Tunnel Method_Password Authentication"
+ DestinationMysqlSSHTunnelMethodTypeDestinationMysqlNoTunnel DestinationMysqlSSHTunnelMethodType = "destination-mysql_No Tunnel"
+ DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHKeyAuthentication DestinationMysqlSSHTunnelMethodType = "destination-mysql_SSH Key Authentication"
+ DestinationMysqlSSHTunnelMethodTypeDestinationMysqlPasswordAuthentication DestinationMysqlSSHTunnelMethodType = "destination-mysql_Password Authentication"
)
type DestinationMysqlSSHTunnelMethod struct {
- DestinationMysqlSSHTunnelMethodNoTunnel *DestinationMysqlSSHTunnelMethodNoTunnel
- DestinationMysqlSSHTunnelMethodSSHKeyAuthentication *DestinationMysqlSSHTunnelMethodSSHKeyAuthentication
- DestinationMysqlSSHTunnelMethodPasswordAuthentication *DestinationMysqlSSHTunnelMethodPasswordAuthentication
+ DestinationMysqlNoTunnel *DestinationMysqlNoTunnel
+ DestinationMysqlSSHKeyAuthentication *DestinationMysqlSSHKeyAuthentication
+ DestinationMysqlPasswordAuthentication *DestinationMysqlPasswordAuthentication
Type DestinationMysqlSSHTunnelMethodType
}
-func CreateDestinationMysqlSSHTunnelMethodDestinationMysqlSSHTunnelMethodNoTunnel(destinationMysqlSSHTunnelMethodNoTunnel DestinationMysqlSSHTunnelMethodNoTunnel) DestinationMysqlSSHTunnelMethod {
- typ := DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHTunnelMethodNoTunnel
+func CreateDestinationMysqlSSHTunnelMethodDestinationMysqlNoTunnel(destinationMysqlNoTunnel DestinationMysqlNoTunnel) DestinationMysqlSSHTunnelMethod {
+ typ := DestinationMysqlSSHTunnelMethodTypeDestinationMysqlNoTunnel
return DestinationMysqlSSHTunnelMethod{
- DestinationMysqlSSHTunnelMethodNoTunnel: &destinationMysqlSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationMysqlNoTunnel: &destinationMysqlNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationMysqlSSHTunnelMethodDestinationMysqlSSHTunnelMethodSSHKeyAuthentication(destinationMysqlSSHTunnelMethodSSHKeyAuthentication DestinationMysqlSSHTunnelMethodSSHKeyAuthentication) DestinationMysqlSSHTunnelMethod {
- typ := DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationMysqlSSHTunnelMethodDestinationMysqlSSHKeyAuthentication(destinationMysqlSSHKeyAuthentication DestinationMysqlSSHKeyAuthentication) DestinationMysqlSSHTunnelMethod {
+ typ := DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHKeyAuthentication
return DestinationMysqlSSHTunnelMethod{
- DestinationMysqlSSHTunnelMethodSSHKeyAuthentication: &destinationMysqlSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ DestinationMysqlSSHKeyAuthentication: &destinationMysqlSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateDestinationMysqlSSHTunnelMethodDestinationMysqlSSHTunnelMethodPasswordAuthentication(destinationMysqlSSHTunnelMethodPasswordAuthentication DestinationMysqlSSHTunnelMethodPasswordAuthentication) DestinationMysqlSSHTunnelMethod {
- typ := DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHTunnelMethodPasswordAuthentication
+func CreateDestinationMysqlSSHTunnelMethodDestinationMysqlPasswordAuthentication(destinationMysqlPasswordAuthentication DestinationMysqlPasswordAuthentication) DestinationMysqlSSHTunnelMethod {
+ typ := DestinationMysqlSSHTunnelMethodTypeDestinationMysqlPasswordAuthentication
return DestinationMysqlSSHTunnelMethod{
- DestinationMysqlSSHTunnelMethodPasswordAuthentication: &destinationMysqlSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ DestinationMysqlPasswordAuthentication: &destinationMysqlPasswordAuthentication,
+ Type: typ,
}
}
func (u *DestinationMysqlSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationMysqlSSHTunnelMethodNoTunnel := new(DestinationMysqlSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMysqlSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationMysqlSSHTunnelMethodNoTunnel = destinationMysqlSSHTunnelMethodNoTunnel
- u.Type = DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHTunnelMethodNoTunnel
+
+ destinationMysqlNoTunnel := new(DestinationMysqlNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationMysqlNoTunnel, "", true, true); err == nil {
+ u.DestinationMysqlNoTunnel = destinationMysqlNoTunnel
+ u.Type = DestinationMysqlSSHTunnelMethodTypeDestinationMysqlNoTunnel
return nil
}
- destinationMysqlSSHTunnelMethodSSHKeyAuthentication := new(DestinationMysqlSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMysqlSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication = destinationMysqlSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHTunnelMethodSSHKeyAuthentication
+ destinationMysqlSSHKeyAuthentication := new(DestinationMysqlSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMysqlSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationMysqlSSHKeyAuthentication = destinationMysqlSSHKeyAuthentication
+ u.Type = DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHKeyAuthentication
return nil
}
- destinationMysqlSSHTunnelMethodPasswordAuthentication := new(DestinationMysqlSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMysqlSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationMysqlSSHTunnelMethodPasswordAuthentication = destinationMysqlSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationMysqlSSHTunnelMethodTypeDestinationMysqlSSHTunnelMethodPasswordAuthentication
+ destinationMysqlPasswordAuthentication := new(DestinationMysqlPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMysqlPasswordAuthentication, "", true, true); err == nil {
+ u.DestinationMysqlPasswordAuthentication = destinationMysqlPasswordAuthentication
+ u.Type = DestinationMysqlSSHTunnelMethodTypeDestinationMysqlPasswordAuthentication
return nil
}
@@ -219,25 +313,25 @@ func (u *DestinationMysqlSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationMysqlSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationMysqlSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationMysqlSSHTunnelMethodNoTunnel)
+ if u.DestinationMysqlNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationMysqlNoTunnel, "", true)
}
- if u.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationMysqlSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationMysqlSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMysqlSSHKeyAuthentication, "", true)
}
- if u.DestinationMysqlSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationMysqlSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationMysqlPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMysqlPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationMysql struct {
// Name of the database.
- Database string `json:"database"`
- DestinationType DestinationMysqlMysql `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Mysql `const:"mysql" json:"destinationType"`
// Hostname of the database.
Host string `json:"host"`
// Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
@@ -245,9 +339,73 @@ type DestinationMysql struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"3306" json:"port"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *DestinationMysqlSSHTunnelMethod `json:"tunnel_method,omitempty"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationMysql) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMysql) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMysql) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationMysql) GetDestinationType() Mysql {
+ return MysqlMysql
+}
+
+func (o *DestinationMysql) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationMysql) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationMysql) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationMysql) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationMysql) GetTunnelMethod() *DestinationMysqlSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationMysql) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmysqlcreaterequest.go b/internal/sdk/pkg/models/shared/destinationmysqlcreaterequest.go
old mode 100755
new mode 100644
index a2ff3ac19..649c6e0c5
--- a/internal/sdk/pkg/models/shared/destinationmysqlcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationmysqlcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationMysqlCreateRequest struct {
Configuration DestinationMysql `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationMysqlCreateRequest) GetConfiguration() DestinationMysql {
+ if o == nil {
+ return DestinationMysql{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationMysqlCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationMysqlCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationMysqlCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationmysqlputrequest.go b/internal/sdk/pkg/models/shared/destinationmysqlputrequest.go
old mode 100755
new mode 100644
index 79a051820..95560d135
--- a/internal/sdk/pkg/models/shared/destinationmysqlputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationmysqlputrequest.go
@@ -7,3 +7,24 @@ type DestinationMysqlPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationMysqlPutRequest) GetConfiguration() DestinationMysqlUpdate {
+ if o == nil {
+ return DestinationMysqlUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationMysqlPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationMysqlPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationmysqlupdate.go b/internal/sdk/pkg/models/shared/destinationmysqlupdate.go
old mode 100755
new mode 100644
index 9d5ce93e0..f75fe6348
--- a/internal/sdk/pkg/models/shared/destinationmysqlupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationmysqlupdate.go
@@ -3,191 +3,285 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod string
const (
- DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationMysqlUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication struct {
+// DestinationMysqlUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMysqlUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationMysqlUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMysqlUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMysqlUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMysqlUpdatePasswordAuthentication) GetTunnelMethod() DestinationMysqlUpdateSchemasTunnelMethodTunnelMethod {
+ return DestinationMysqlUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationMysqlUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMysqlUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationMysqlUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationMysqlUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationMysqlUpdateSchemasTunnelMethod string
const (
- DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationMysqlUpdateSchemasTunnelMethodSSHKeyAuth DestinationMysqlUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationMysqlUpdateSchemasTunnelMethod) ToPointer() *DestinationMysqlUpdateSchemasTunnelMethod {
return &e
}
-func (e *DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMysqlUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationMysqlUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMysqlUpdateSchemasTunnelMethod: %v", v)
}
}
-// DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationMysqlUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMysqlUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMysqlUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationMysqlUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMysqlUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMysqlUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationMysqlUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationMysqlUpdateSSHKeyAuthentication) GetTunnelMethod() DestinationMysqlUpdateSchemasTunnelMethod {
+ return DestinationMysqlUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationMysqlUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationMysqlUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationMysqlUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationMysqlUpdateTunnelMethod string
const (
- DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationMysqlUpdateTunnelMethodNoTunnel DestinationMysqlUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationMysqlUpdateTunnelMethod) ToPointer() *DestinationMysqlUpdateTunnelMethod {
return &e
}
-func (e *DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationMysqlUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationMysqlUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationMysqlUpdateTunnelMethod: %v", v)
}
}
-// DestinationMysqlUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationMysqlUpdateSSHTunnelMethodNoTunnel struct {
+// DestinationMysqlUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationMysqlUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationMysqlUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationMysqlUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMysqlUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMysqlUpdateNoTunnel) GetTunnelMethod() DestinationMysqlUpdateTunnelMethod {
+ return DestinationMysqlUpdateTunnelMethodNoTunnel
}
type DestinationMysqlUpdateSSHTunnelMethodType string
const (
- DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHTunnelMethodNoTunnel DestinationMysqlUpdateSSHTunnelMethodType = "destination-mysql-update_SSH Tunnel Method_No Tunnel"
- DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication DestinationMysqlUpdateSSHTunnelMethodType = "destination-mysql-update_SSH Tunnel Method_SSH Key Authentication"
- DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication DestinationMysqlUpdateSSHTunnelMethodType = "destination-mysql-update_SSH Tunnel Method_Password Authentication"
+ DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateNoTunnel DestinationMysqlUpdateSSHTunnelMethodType = "destination-mysql-update_No Tunnel"
+ DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHKeyAuthentication DestinationMysqlUpdateSSHTunnelMethodType = "destination-mysql-update_SSH Key Authentication"
+ DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdatePasswordAuthentication DestinationMysqlUpdateSSHTunnelMethodType = "destination-mysql-update_Password Authentication"
)
type DestinationMysqlUpdateSSHTunnelMethod struct {
- DestinationMysqlUpdateSSHTunnelMethodNoTunnel *DestinationMysqlUpdateSSHTunnelMethodNoTunnel
- DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
- DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication *DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication
+ DestinationMysqlUpdateNoTunnel *DestinationMysqlUpdateNoTunnel
+ DestinationMysqlUpdateSSHKeyAuthentication *DestinationMysqlUpdateSSHKeyAuthentication
+ DestinationMysqlUpdatePasswordAuthentication *DestinationMysqlUpdatePasswordAuthentication
Type DestinationMysqlUpdateSSHTunnelMethodType
}
-func CreateDestinationMysqlUpdateSSHTunnelMethodDestinationMysqlUpdateSSHTunnelMethodNoTunnel(destinationMysqlUpdateSSHTunnelMethodNoTunnel DestinationMysqlUpdateSSHTunnelMethodNoTunnel) DestinationMysqlUpdateSSHTunnelMethod {
- typ := DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHTunnelMethodNoTunnel
+func CreateDestinationMysqlUpdateSSHTunnelMethodDestinationMysqlUpdateNoTunnel(destinationMysqlUpdateNoTunnel DestinationMysqlUpdateNoTunnel) DestinationMysqlUpdateSSHTunnelMethod {
+ typ := DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateNoTunnel
return DestinationMysqlUpdateSSHTunnelMethod{
- DestinationMysqlUpdateSSHTunnelMethodNoTunnel: &destinationMysqlUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationMysqlUpdateNoTunnel: &destinationMysqlUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationMysqlUpdateSSHTunnelMethodDestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication(destinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication) DestinationMysqlUpdateSSHTunnelMethod {
- typ := DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationMysqlUpdateSSHTunnelMethodDestinationMysqlUpdateSSHKeyAuthentication(destinationMysqlUpdateSSHKeyAuthentication DestinationMysqlUpdateSSHKeyAuthentication) DestinationMysqlUpdateSSHTunnelMethod {
+ typ := DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHKeyAuthentication
return DestinationMysqlUpdateSSHTunnelMethod{
- DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication: &destinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationMysqlUpdateSSHKeyAuthentication: &destinationMysqlUpdateSSHKeyAuthentication,
Type: typ,
}
}
-func CreateDestinationMysqlUpdateSSHTunnelMethodDestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication(destinationMysqlUpdateSSHTunnelMethodPasswordAuthentication DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication) DestinationMysqlUpdateSSHTunnelMethod {
- typ := DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication
+func CreateDestinationMysqlUpdateSSHTunnelMethodDestinationMysqlUpdatePasswordAuthentication(destinationMysqlUpdatePasswordAuthentication DestinationMysqlUpdatePasswordAuthentication) DestinationMysqlUpdateSSHTunnelMethod {
+ typ := DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdatePasswordAuthentication
return DestinationMysqlUpdateSSHTunnelMethod{
- DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication: &destinationMysqlUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationMysqlUpdatePasswordAuthentication: &destinationMysqlUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *DestinationMysqlUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationMysqlUpdateSSHTunnelMethodNoTunnel := new(DestinationMysqlUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMysqlUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationMysqlUpdateSSHTunnelMethodNoTunnel = destinationMysqlUpdateSSHTunnelMethodNoTunnel
- u.Type = DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHTunnelMethodNoTunnel
+
+ destinationMysqlUpdateNoTunnel := new(DestinationMysqlUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationMysqlUpdateNoTunnel, "", true, true); err == nil {
+ u.DestinationMysqlUpdateNoTunnel = destinationMysqlUpdateNoTunnel
+ u.Type = DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateNoTunnel
return nil
}
- destinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication := new(DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication = destinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
+ destinationMysqlUpdateSSHKeyAuthentication := new(DestinationMysqlUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMysqlUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationMysqlUpdateSSHKeyAuthentication = destinationMysqlUpdateSSHKeyAuthentication
+ u.Type = DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHKeyAuthentication
return nil
}
- destinationMysqlUpdateSSHTunnelMethodPasswordAuthentication := new(DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationMysqlUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication = destinationMysqlUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication
+ destinationMysqlUpdatePasswordAuthentication := new(DestinationMysqlUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationMysqlUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationMysqlUpdatePasswordAuthentication = destinationMysqlUpdatePasswordAuthentication
+ u.Type = DestinationMysqlUpdateSSHTunnelMethodTypeDestinationMysqlUpdatePasswordAuthentication
return nil
}
@@ -195,19 +289,19 @@ func (u *DestinationMysqlUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error
}
func (u DestinationMysqlUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationMysqlUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationMysqlUpdateSSHTunnelMethodNoTunnel)
+ if u.DestinationMysqlUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationMysqlUpdateNoTunnel, "", true)
}
- if u.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationMysqlUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationMysqlUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMysqlUpdateSSHKeyAuthentication, "", true)
}
- if u.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationMysqlUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationMysqlUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationMysqlUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationMysqlUpdate struct {
@@ -220,9 +314,69 @@ type DestinationMysqlUpdate struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"3306" json:"port"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *DestinationMysqlUpdateSSHTunnelMethod `json:"tunnel_method,omitempty"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationMysqlUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationMysqlUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationMysqlUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationMysqlUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationMysqlUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationMysqlUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationMysqlUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationMysqlUpdate) GetTunnelMethod() *DestinationMysqlUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationMysqlUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationoracle.go b/internal/sdk/pkg/models/shared/destinationoracle.go
old mode 100755
new mode 100644
index e519980f4..d84c430f6
--- a/internal/sdk/pkg/models/shared/destinationoracle.go
+++ b/internal/sdk/pkg/models/shared/destinationoracle.go
@@ -3,215 +3,309 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationOracleOracle string
+type Oracle string
const (
- DestinationOracleOracleOracle DestinationOracleOracle = "oracle"
+ OracleOracle Oracle = "oracle"
)
-func (e DestinationOracleOracle) ToPointer() *DestinationOracleOracle {
+func (e Oracle) ToPointer() *Oracle {
return &e
}
-func (e *DestinationOracleOracle) UnmarshalJSON(data []byte) error {
+func (e *Oracle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oracle":
- *e = DestinationOracleOracle(v)
+ *e = Oracle(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationOracleOracle: %v", v)
+ return fmt.Errorf("invalid value for Oracle: %v", v)
}
}
-// DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationOracleSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationOracleSchemasTunnelMethodTunnelMethod string
const (
- DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationOracleSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationOracleSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationOracleSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationOracleSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationOracleSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationOracleSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationOracleSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationOracleSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationOracleSSHTunnelMethodPasswordAuthentication struct {
+// DestinationOraclePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationOraclePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationOracleSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationOraclePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationOraclePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationOraclePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationOraclePasswordAuthentication) GetTunnelMethod() DestinationOracleSchemasTunnelMethodTunnelMethod {
+ return DestinationOracleSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationOraclePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationOraclePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationOraclePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationOracleSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationOracleSchemasTunnelMethod string
const (
- DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationOracleSchemasTunnelMethodSSHKeyAuth DestinationOracleSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationOracleSchemasTunnelMethod) ToPointer() *DestinationOracleSchemasTunnelMethod {
return &e
}
-func (e *DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationOracleSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationOracleSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationOracleSchemasTunnelMethod: %v", v)
}
}
-// DestinationOracleSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationOracleSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationOracleSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationOracleSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationOracleSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationOracleSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationOracleSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationOracleSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationOracleSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationOracleSSHKeyAuthentication) GetTunnelMethod() DestinationOracleSchemasTunnelMethod {
+ return DestinationOracleSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationOracleSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationOracleSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationOracleTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationOracleTunnelMethod string
const (
- DestinationOracleSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationOracleTunnelMethodNoTunnel DestinationOracleTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationOracleTunnelMethod) ToPointer() *DestinationOracleTunnelMethod {
return &e
}
-func (e *DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationOracleTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationOracleTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationOracleTunnelMethod: %v", v)
}
}
-// DestinationOracleSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationOracleSSHTunnelMethodNoTunnel struct {
+// DestinationOracleNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationOracleNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationOracleSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationOracleTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationOracleNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationOracleNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationOracleNoTunnel) GetTunnelMethod() DestinationOracleTunnelMethod {
+ return DestinationOracleTunnelMethodNoTunnel
}
type DestinationOracleSSHTunnelMethodType string
const (
- DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHTunnelMethodNoTunnel DestinationOracleSSHTunnelMethodType = "destination-oracle_SSH Tunnel Method_No Tunnel"
- DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHTunnelMethodSSHKeyAuthentication DestinationOracleSSHTunnelMethodType = "destination-oracle_SSH Tunnel Method_SSH Key Authentication"
- DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHTunnelMethodPasswordAuthentication DestinationOracleSSHTunnelMethodType = "destination-oracle_SSH Tunnel Method_Password Authentication"
+ DestinationOracleSSHTunnelMethodTypeDestinationOracleNoTunnel DestinationOracleSSHTunnelMethodType = "destination-oracle_No Tunnel"
+ DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHKeyAuthentication DestinationOracleSSHTunnelMethodType = "destination-oracle_SSH Key Authentication"
+ DestinationOracleSSHTunnelMethodTypeDestinationOraclePasswordAuthentication DestinationOracleSSHTunnelMethodType = "destination-oracle_Password Authentication"
)
type DestinationOracleSSHTunnelMethod struct {
- DestinationOracleSSHTunnelMethodNoTunnel *DestinationOracleSSHTunnelMethodNoTunnel
- DestinationOracleSSHTunnelMethodSSHKeyAuthentication *DestinationOracleSSHTunnelMethodSSHKeyAuthentication
- DestinationOracleSSHTunnelMethodPasswordAuthentication *DestinationOracleSSHTunnelMethodPasswordAuthentication
+ DestinationOracleNoTunnel *DestinationOracleNoTunnel
+ DestinationOracleSSHKeyAuthentication *DestinationOracleSSHKeyAuthentication
+ DestinationOraclePasswordAuthentication *DestinationOraclePasswordAuthentication
Type DestinationOracleSSHTunnelMethodType
}
-func CreateDestinationOracleSSHTunnelMethodDestinationOracleSSHTunnelMethodNoTunnel(destinationOracleSSHTunnelMethodNoTunnel DestinationOracleSSHTunnelMethodNoTunnel) DestinationOracleSSHTunnelMethod {
- typ := DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHTunnelMethodNoTunnel
+func CreateDestinationOracleSSHTunnelMethodDestinationOracleNoTunnel(destinationOracleNoTunnel DestinationOracleNoTunnel) DestinationOracleSSHTunnelMethod {
+ typ := DestinationOracleSSHTunnelMethodTypeDestinationOracleNoTunnel
return DestinationOracleSSHTunnelMethod{
- DestinationOracleSSHTunnelMethodNoTunnel: &destinationOracleSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationOracleNoTunnel: &destinationOracleNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationOracleSSHTunnelMethodDestinationOracleSSHTunnelMethodSSHKeyAuthentication(destinationOracleSSHTunnelMethodSSHKeyAuthentication DestinationOracleSSHTunnelMethodSSHKeyAuthentication) DestinationOracleSSHTunnelMethod {
- typ := DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationOracleSSHTunnelMethodDestinationOracleSSHKeyAuthentication(destinationOracleSSHKeyAuthentication DestinationOracleSSHKeyAuthentication) DestinationOracleSSHTunnelMethod {
+ typ := DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHKeyAuthentication
return DestinationOracleSSHTunnelMethod{
- DestinationOracleSSHTunnelMethodSSHKeyAuthentication: &destinationOracleSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ DestinationOracleSSHKeyAuthentication: &destinationOracleSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateDestinationOracleSSHTunnelMethodDestinationOracleSSHTunnelMethodPasswordAuthentication(destinationOracleSSHTunnelMethodPasswordAuthentication DestinationOracleSSHTunnelMethodPasswordAuthentication) DestinationOracleSSHTunnelMethod {
- typ := DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHTunnelMethodPasswordAuthentication
+func CreateDestinationOracleSSHTunnelMethodDestinationOraclePasswordAuthentication(destinationOraclePasswordAuthentication DestinationOraclePasswordAuthentication) DestinationOracleSSHTunnelMethod {
+ typ := DestinationOracleSSHTunnelMethodTypeDestinationOraclePasswordAuthentication
return DestinationOracleSSHTunnelMethod{
- DestinationOracleSSHTunnelMethodPasswordAuthentication: &destinationOracleSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ DestinationOraclePasswordAuthentication: &destinationOraclePasswordAuthentication,
+ Type: typ,
}
}
func (u *DestinationOracleSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationOracleSSHTunnelMethodNoTunnel := new(DestinationOracleSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationOracleSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationOracleSSHTunnelMethodNoTunnel = destinationOracleSSHTunnelMethodNoTunnel
- u.Type = DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHTunnelMethodNoTunnel
+
+ destinationOracleNoTunnel := new(DestinationOracleNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationOracleNoTunnel, "", true, true); err == nil {
+ u.DestinationOracleNoTunnel = destinationOracleNoTunnel
+ u.Type = DestinationOracleSSHTunnelMethodTypeDestinationOracleNoTunnel
return nil
}
- destinationOracleSSHTunnelMethodSSHKeyAuthentication := new(DestinationOracleSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationOracleSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationOracleSSHTunnelMethodSSHKeyAuthentication = destinationOracleSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHTunnelMethodSSHKeyAuthentication
+ destinationOracleSSHKeyAuthentication := new(DestinationOracleSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationOracleSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationOracleSSHKeyAuthentication = destinationOracleSSHKeyAuthentication
+ u.Type = DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHKeyAuthentication
return nil
}
- destinationOracleSSHTunnelMethodPasswordAuthentication := new(DestinationOracleSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationOracleSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationOracleSSHTunnelMethodPasswordAuthentication = destinationOracleSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationOracleSSHTunnelMethodTypeDestinationOracleSSHTunnelMethodPasswordAuthentication
+ destinationOraclePasswordAuthentication := new(DestinationOraclePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationOraclePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationOraclePasswordAuthentication = destinationOraclePasswordAuthentication
+ u.Type = DestinationOracleSSHTunnelMethodTypeDestinationOraclePasswordAuthentication
return nil
}
@@ -219,23 +313,23 @@ func (u *DestinationOracleSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationOracleSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationOracleSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationOracleSSHTunnelMethodNoTunnel)
+ if u.DestinationOracleNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationOracleNoTunnel, "", true)
}
- if u.DestinationOracleSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationOracleSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationOracleSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationOracleSSHKeyAuthentication, "", true)
}
- if u.DestinationOracleSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationOracleSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationOraclePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationOraclePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationOracle struct {
- DestinationType DestinationOracleOracle `json:"destinationType"`
+ destinationType Oracle `const:"oracle" json:"destinationType"`
// The hostname of the database.
Host string `json:"host"`
// Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
@@ -243,9 +337,9 @@ type DestinationOracle struct {
// The password associated with the username.
Password *string `json:"password,omitempty"`
// The port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"1521" json:"port"`
// The default schema is used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. The usual value for this field is "airbyte". In Oracle, schemas and users are the same thing, so the "user" parameter is used as the login credentials and this is used for the default Airbyte message schema.
- Schema *string `json:"schema,omitempty"`
+ Schema *string `default:"airbyte" json:"schema"`
// The System Identifier uniquely distinguishes the instance from any other instance on the same computer.
Sid string `json:"sid"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
@@ -253,3 +347,74 @@ type DestinationOracle struct {
// The username to access the database. This user must have CREATE USER privileges in the database.
Username string `json:"username"`
}
+
+func (d DestinationOracle) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationOracle) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationOracle) GetDestinationType() Oracle {
+ return OracleOracle
+}
+
+func (o *DestinationOracle) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationOracle) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationOracle) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationOracle) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationOracle) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *DestinationOracle) GetSid() string {
+ if o == nil {
+ return ""
+ }
+ return o.Sid
+}
+
+func (o *DestinationOracle) GetTunnelMethod() *DestinationOracleSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationOracle) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationoraclecreaterequest.go b/internal/sdk/pkg/models/shared/destinationoraclecreaterequest.go
old mode 100755
new mode 100644
index 131626e30..3fa866895
--- a/internal/sdk/pkg/models/shared/destinationoraclecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationoraclecreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationOracleCreateRequest struct {
Configuration DestinationOracle `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationOracleCreateRequest) GetConfiguration() DestinationOracle {
+ if o == nil {
+ return DestinationOracle{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationOracleCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationOracleCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationOracleCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationoracleputrequest.go b/internal/sdk/pkg/models/shared/destinationoracleputrequest.go
old mode 100755
new mode 100644
index 0559c6269..0ca6b2fec
--- a/internal/sdk/pkg/models/shared/destinationoracleputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationoracleputrequest.go
@@ -7,3 +7,24 @@ type DestinationOraclePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationOraclePutRequest) GetConfiguration() DestinationOracleUpdate {
+ if o == nil {
+ return DestinationOracleUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationOraclePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationOraclePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationoracleupdate.go b/internal/sdk/pkg/models/shared/destinationoracleupdate.go
old mode 100755
new mode 100644
index e3d36e223..020a21e11
--- a/internal/sdk/pkg/models/shared/destinationoracleupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationoracleupdate.go
@@ -3,191 +3,285 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationOracleUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationOracleUpdateSchemasTunnelMethodTunnelMethod string
const (
- DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationOracleUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationOracleUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationOracleUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationOracleUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationOracleUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationOracleUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationOracleUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication struct {
+// DestinationOracleUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationOracleUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationOracleUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationOracleUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationOracleUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationOracleUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationOracleUpdatePasswordAuthentication) GetTunnelMethod() DestinationOracleUpdateSchemasTunnelMethodTunnelMethod {
+ return DestinationOracleUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationOracleUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationOracleUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationOracleUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationOracleUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationOracleUpdateSchemasTunnelMethod string
const (
- DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationOracleUpdateSchemasTunnelMethodSSHKeyAuth DestinationOracleUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationOracleUpdateSchemasTunnelMethod) ToPointer() *DestinationOracleUpdateSchemasTunnelMethod {
return &e
}
-func (e *DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationOracleUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationOracleUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationOracleUpdateSchemasTunnelMethod: %v", v)
}
}
-// DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationOracleUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationOracleUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationOracleUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationOracleUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationOracleUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationOracleUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationOracleUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationOracleUpdateSSHKeyAuthentication) GetTunnelMethod() DestinationOracleUpdateSchemasTunnelMethod {
+ return DestinationOracleUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationOracleUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationOracleUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationOracleUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationOracleUpdateTunnelMethod string
const (
- DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationOracleUpdateTunnelMethodNoTunnel DestinationOracleUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationOracleUpdateTunnelMethod) ToPointer() *DestinationOracleUpdateTunnelMethod {
return &e
}
-func (e *DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationOracleUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationOracleUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationOracleUpdateTunnelMethod: %v", v)
}
}
-// DestinationOracleUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationOracleUpdateSSHTunnelMethodNoTunnel struct {
+// DestinationOracleUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationOracleUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationOracleUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationOracleUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationOracleUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationOracleUpdateNoTunnel) GetTunnelMethod() DestinationOracleUpdateTunnelMethod {
+ return DestinationOracleUpdateTunnelMethodNoTunnel
}
type DestinationOracleUpdateSSHTunnelMethodType string
const (
- DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHTunnelMethodNoTunnel DestinationOracleUpdateSSHTunnelMethodType = "destination-oracle-update_SSH Tunnel Method_No Tunnel"
- DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication DestinationOracleUpdateSSHTunnelMethodType = "destination-oracle-update_SSH Tunnel Method_SSH Key Authentication"
- DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHTunnelMethodPasswordAuthentication DestinationOracleUpdateSSHTunnelMethodType = "destination-oracle-update_SSH Tunnel Method_Password Authentication"
+ DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateNoTunnel DestinationOracleUpdateSSHTunnelMethodType = "destination-oracle-update_No Tunnel"
+ DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHKeyAuthentication DestinationOracleUpdateSSHTunnelMethodType = "destination-oracle-update_SSH Key Authentication"
+ DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdatePasswordAuthentication DestinationOracleUpdateSSHTunnelMethodType = "destination-oracle-update_Password Authentication"
)
type DestinationOracleUpdateSSHTunnelMethod struct {
- DestinationOracleUpdateSSHTunnelMethodNoTunnel *DestinationOracleUpdateSSHTunnelMethodNoTunnel
- DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication
- DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication *DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication
+ DestinationOracleUpdateNoTunnel *DestinationOracleUpdateNoTunnel
+ DestinationOracleUpdateSSHKeyAuthentication *DestinationOracleUpdateSSHKeyAuthentication
+ DestinationOracleUpdatePasswordAuthentication *DestinationOracleUpdatePasswordAuthentication
Type DestinationOracleUpdateSSHTunnelMethodType
}
-func CreateDestinationOracleUpdateSSHTunnelMethodDestinationOracleUpdateSSHTunnelMethodNoTunnel(destinationOracleUpdateSSHTunnelMethodNoTunnel DestinationOracleUpdateSSHTunnelMethodNoTunnel) DestinationOracleUpdateSSHTunnelMethod {
- typ := DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHTunnelMethodNoTunnel
+func CreateDestinationOracleUpdateSSHTunnelMethodDestinationOracleUpdateNoTunnel(destinationOracleUpdateNoTunnel DestinationOracleUpdateNoTunnel) DestinationOracleUpdateSSHTunnelMethod {
+ typ := DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateNoTunnel
return DestinationOracleUpdateSSHTunnelMethod{
- DestinationOracleUpdateSSHTunnelMethodNoTunnel: &destinationOracleUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationOracleUpdateNoTunnel: &destinationOracleUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationOracleUpdateSSHTunnelMethodDestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication(destinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication) DestinationOracleUpdateSSHTunnelMethod {
- typ := DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationOracleUpdateSSHTunnelMethodDestinationOracleUpdateSSHKeyAuthentication(destinationOracleUpdateSSHKeyAuthentication DestinationOracleUpdateSSHKeyAuthentication) DestinationOracleUpdateSSHTunnelMethod {
+ typ := DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHKeyAuthentication
return DestinationOracleUpdateSSHTunnelMethod{
- DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication: &destinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationOracleUpdateSSHKeyAuthentication: &destinationOracleUpdateSSHKeyAuthentication,
Type: typ,
}
}
-func CreateDestinationOracleUpdateSSHTunnelMethodDestinationOracleUpdateSSHTunnelMethodPasswordAuthentication(destinationOracleUpdateSSHTunnelMethodPasswordAuthentication DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication) DestinationOracleUpdateSSHTunnelMethod {
- typ := DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHTunnelMethodPasswordAuthentication
+func CreateDestinationOracleUpdateSSHTunnelMethodDestinationOracleUpdatePasswordAuthentication(destinationOracleUpdatePasswordAuthentication DestinationOracleUpdatePasswordAuthentication) DestinationOracleUpdateSSHTunnelMethod {
+ typ := DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdatePasswordAuthentication
return DestinationOracleUpdateSSHTunnelMethod{
- DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication: &destinationOracleUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationOracleUpdatePasswordAuthentication: &destinationOracleUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *DestinationOracleUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationOracleUpdateSSHTunnelMethodNoTunnel := new(DestinationOracleUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationOracleUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationOracleUpdateSSHTunnelMethodNoTunnel = destinationOracleUpdateSSHTunnelMethodNoTunnel
- u.Type = DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHTunnelMethodNoTunnel
+
+ destinationOracleUpdateNoTunnel := new(DestinationOracleUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationOracleUpdateNoTunnel, "", true, true); err == nil {
+ u.DestinationOracleUpdateNoTunnel = destinationOracleUpdateNoTunnel
+ u.Type = DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateNoTunnel
return nil
}
- destinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication := new(DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication = destinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication
+ destinationOracleUpdateSSHKeyAuthentication := new(DestinationOracleUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationOracleUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationOracleUpdateSSHKeyAuthentication = destinationOracleUpdateSSHKeyAuthentication
+ u.Type = DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHKeyAuthentication
return nil
}
- destinationOracleUpdateSSHTunnelMethodPasswordAuthentication := new(DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationOracleUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication = destinationOracleUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdateSSHTunnelMethodPasswordAuthentication
+ destinationOracleUpdatePasswordAuthentication := new(DestinationOracleUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationOracleUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationOracleUpdatePasswordAuthentication = destinationOracleUpdatePasswordAuthentication
+ u.Type = DestinationOracleUpdateSSHTunnelMethodTypeDestinationOracleUpdatePasswordAuthentication
return nil
}
@@ -195,19 +289,19 @@ func (u *DestinationOracleUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) erro
}
func (u DestinationOracleUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationOracleUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationOracleUpdateSSHTunnelMethodNoTunnel)
+ if u.DestinationOracleUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationOracleUpdateNoTunnel, "", true)
}
- if u.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationOracleUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationOracleUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationOracleUpdateSSHKeyAuthentication, "", true)
}
- if u.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationOracleUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationOracleUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationOracleUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationOracleUpdate struct {
@@ -218,9 +312,9 @@ type DestinationOracleUpdate struct {
// The password associated with the username.
Password *string `json:"password,omitempty"`
// The port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"1521" json:"port"`
// The default schema is used as the target schema for all statements issued from the connection that do not explicitly specify a schema name. The usual value for this field is "airbyte". In Oracle, schemas and users are the same thing, so the "user" parameter is used as the login credentials and this is used for the default Airbyte message schema.
- Schema *string `json:"schema,omitempty"`
+ Schema *string `default:"airbyte" json:"schema"`
// The System Identifier uniquely distinguishes the instance from any other instance on the same computer.
Sid string `json:"sid"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
@@ -228,3 +322,70 @@ type DestinationOracleUpdate struct {
// The username to access the database. This user must have CREATE USER privileges in the database.
Username string `json:"username"`
}
+
+func (d DestinationOracleUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationOracleUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationOracleUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationOracleUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationOracleUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationOracleUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationOracleUpdate) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *DestinationOracleUpdate) GetSid() string {
+ if o == nil {
+ return ""
+ }
+ return o.Sid
+}
+
+func (o *DestinationOracleUpdate) GetTunnelMethod() *DestinationOracleUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationOracleUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpatchrequest.go b/internal/sdk/pkg/models/shared/destinationpatchrequest.go
old mode 100755
new mode 100644
index fcbba4a73..f0fd420ac
--- a/internal/sdk/pkg/models/shared/destinationpatchrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationpatchrequest.go
@@ -7,3 +7,17 @@ type DestinationPatchRequest struct {
Configuration interface{} `json:"configuration,omitempty"`
Name *string `json:"name,omitempty"`
}
+
+func (o *DestinationPatchRequest) GetConfiguration() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.Configuration
+}
+
+func (o *DestinationPatchRequest) GetName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Name
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpinecone.go b/internal/sdk/pkg/models/shared/destinationpinecone.go
old mode 100755
new mode 100644
index 81f24ac74..238feefc8
--- a/internal/sdk/pkg/models/shared/destinationpinecone.go
+++ b/internal/sdk/pkg/models/shared/destinationpinecone.go
@@ -3,195 +3,433 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationPineconePinecone string
+type Pinecone string
const (
- DestinationPineconePineconePinecone DestinationPineconePinecone = "pinecone"
+ PineconePinecone Pinecone = "pinecone"
)
-func (e DestinationPineconePinecone) ToPointer() *DestinationPineconePinecone {
+func (e Pinecone) ToPointer() *Pinecone {
return &e
}
-func (e *DestinationPineconePinecone) UnmarshalJSON(data []byte) error {
+func (e *Pinecone) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pinecone":
- *e = DestinationPineconePinecone(v)
+ *e = Pinecone(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPineconePinecone: %v", v)
+ return fmt.Errorf("invalid value for Pinecone: %v", v)
}
}
-type DestinationPineconeEmbeddingFakeMode string
+type DestinationPineconeSchemasEmbeddingEmbedding5Mode string
const (
- DestinationPineconeEmbeddingFakeModeFake DestinationPineconeEmbeddingFakeMode = "fake"
+ DestinationPineconeSchemasEmbeddingEmbedding5ModeOpenaiCompatible DestinationPineconeSchemasEmbeddingEmbedding5Mode = "openai_compatible"
)
-func (e DestinationPineconeEmbeddingFakeMode) ToPointer() *DestinationPineconeEmbeddingFakeMode {
+func (e DestinationPineconeSchemasEmbeddingEmbedding5Mode) ToPointer() *DestinationPineconeSchemasEmbeddingEmbedding5Mode {
return &e
}
-func (e *DestinationPineconeEmbeddingFakeMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPineconeSchemasEmbeddingEmbedding5Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai_compatible":
+ *e = DestinationPineconeSchemasEmbeddingEmbedding5Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeSchemasEmbeddingEmbedding5Mode: %v", v)
+ }
+}
+
+// DestinationPineconeOpenAICompatible - Use a service that's compatible with the OpenAI API to embed text.
+type DestinationPineconeOpenAICompatible struct {
+ APIKey *string `default:"" json:"api_key"`
+ // The base URL for your OpenAI-compatible service
+ BaseURL string `json:"base_url"`
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ mode *DestinationPineconeSchemasEmbeddingEmbedding5Mode `const:"openai_compatible" json:"mode"`
+ // The name of the model to use for embedding
+ ModelName *string `default:"text-embedding-ada-002" json:"model_name"`
+}
+
+func (d DestinationPineconeOpenAICompatible) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeOpenAICompatible) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeOpenAICompatible) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *DestinationPineconeOpenAICompatible) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *DestinationPineconeOpenAICompatible) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationPineconeOpenAICompatible) GetMode() *DestinationPineconeSchemasEmbeddingEmbedding5Mode {
+ return DestinationPineconeSchemasEmbeddingEmbedding5ModeOpenaiCompatible.ToPointer()
+}
+
+func (o *DestinationPineconeOpenAICompatible) GetModelName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ModelName
+}
+
+type DestinationPineconeSchemasEmbeddingEmbeddingMode string
+
+const (
+ DestinationPineconeSchemasEmbeddingEmbeddingModeAzureOpenai DestinationPineconeSchemasEmbeddingEmbeddingMode = "azure_openai"
+)
+
+func (e DestinationPineconeSchemasEmbeddingEmbeddingMode) ToPointer() *DestinationPineconeSchemasEmbeddingEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationPineconeSchemasEmbeddingEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "azure_openai":
+ *e = DestinationPineconeSchemasEmbeddingEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeSchemasEmbeddingEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationPineconeAzureOpenAI - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationPineconeAzureOpenAI struct {
+ // The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ APIBase string `json:"api_base"`
+ // The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ Deployment string `json:"deployment"`
+ mode *DestinationPineconeSchemasEmbeddingEmbeddingMode `const:"azure_openai" json:"mode"`
+ // The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationPineconeAzureOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeAzureOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeAzureOpenAI) GetAPIBase() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIBase
+}
+
+func (o *DestinationPineconeAzureOpenAI) GetDeployment() string {
+ if o == nil {
+ return ""
+ }
+ return o.Deployment
+}
+
+func (o *DestinationPineconeAzureOpenAI) GetMode() *DestinationPineconeSchemasEmbeddingEmbeddingMode {
+ return DestinationPineconeSchemasEmbeddingEmbeddingModeAzureOpenai.ToPointer()
+}
+
+func (o *DestinationPineconeAzureOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationPineconeSchemasEmbeddingMode string
+
+const (
+ DestinationPineconeSchemasEmbeddingModeFake DestinationPineconeSchemasEmbeddingMode = "fake"
+)
+
+func (e DestinationPineconeSchemasEmbeddingMode) ToPointer() *DestinationPineconeSchemasEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationPineconeSchemasEmbeddingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "fake":
- *e = DestinationPineconeEmbeddingFakeMode(v)
+ *e = DestinationPineconeSchemasEmbeddingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPineconeEmbeddingFakeMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPineconeSchemasEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationPineconeFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type DestinationPineconeFake struct {
+ mode *DestinationPineconeSchemasEmbeddingMode `const:"fake" json:"mode"`
+}
+
+func (d DestinationPineconeFake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeFake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationPineconeEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
-type DestinationPineconeEmbeddingFake struct {
- Mode *DestinationPineconeEmbeddingFakeMode `json:"mode,omitempty"`
+func (o *DestinationPineconeFake) GetMode() *DestinationPineconeSchemasEmbeddingMode {
+ return DestinationPineconeSchemasEmbeddingModeFake.ToPointer()
}
-type DestinationPineconeEmbeddingCohereMode string
+type DestinationPineconeSchemasMode string
const (
- DestinationPineconeEmbeddingCohereModeCohere DestinationPineconeEmbeddingCohereMode = "cohere"
+ DestinationPineconeSchemasModeCohere DestinationPineconeSchemasMode = "cohere"
)
-func (e DestinationPineconeEmbeddingCohereMode) ToPointer() *DestinationPineconeEmbeddingCohereMode {
+func (e DestinationPineconeSchemasMode) ToPointer() *DestinationPineconeSchemasMode {
return &e
}
-func (e *DestinationPineconeEmbeddingCohereMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPineconeSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "cohere":
- *e = DestinationPineconeEmbeddingCohereMode(v)
+ *e = DestinationPineconeSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPineconeEmbeddingCohereMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPineconeSchemasMode: %v", v)
+ }
+}
+
+// DestinationPineconeCohere - Use the Cohere API to embed text.
+type DestinationPineconeCohere struct {
+ CohereKey string `json:"cohere_key"`
+ mode *DestinationPineconeSchemasMode `const:"cohere" json:"mode"`
+}
+
+func (d DestinationPineconeCohere) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeCohere) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationPineconeEmbeddingCohere - Use the Cohere API to embed text.
-type DestinationPineconeEmbeddingCohere struct {
- CohereKey string `json:"cohere_key"`
- Mode *DestinationPineconeEmbeddingCohereMode `json:"mode,omitempty"`
+func (o *DestinationPineconeCohere) GetCohereKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.CohereKey
}
-type DestinationPineconeEmbeddingOpenAIMode string
+func (o *DestinationPineconeCohere) GetMode() *DestinationPineconeSchemasMode {
+ return DestinationPineconeSchemasModeCohere.ToPointer()
+}
+
+type DestinationPineconeMode string
const (
- DestinationPineconeEmbeddingOpenAIModeOpenai DestinationPineconeEmbeddingOpenAIMode = "openai"
+ DestinationPineconeModeOpenai DestinationPineconeMode = "openai"
)
-func (e DestinationPineconeEmbeddingOpenAIMode) ToPointer() *DestinationPineconeEmbeddingOpenAIMode {
+func (e DestinationPineconeMode) ToPointer() *DestinationPineconeMode {
return &e
}
-func (e *DestinationPineconeEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPineconeMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "openai":
- *e = DestinationPineconeEmbeddingOpenAIMode(v)
+ *e = DestinationPineconeMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPineconeEmbeddingOpenAIMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPineconeMode: %v", v)
+ }
+}
+
+// DestinationPineconeOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationPineconeOpenAI struct {
+ mode *DestinationPineconeMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationPineconeOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationPineconeEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
-type DestinationPineconeEmbeddingOpenAI struct {
- Mode *DestinationPineconeEmbeddingOpenAIMode `json:"mode,omitempty"`
- OpenaiKey string `json:"openai_key"`
+func (o *DestinationPineconeOpenAI) GetMode() *DestinationPineconeMode {
+ return DestinationPineconeModeOpenai.ToPointer()
+}
+
+func (o *DestinationPineconeOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
}
type DestinationPineconeEmbeddingType string
const (
- DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingOpenAI DestinationPineconeEmbeddingType = "destination-pinecone_Embedding_OpenAI"
- DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingCohere DestinationPineconeEmbeddingType = "destination-pinecone_Embedding_Cohere"
- DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingFake DestinationPineconeEmbeddingType = "destination-pinecone_Embedding_Fake"
+ DestinationPineconeEmbeddingTypeDestinationPineconeOpenAI DestinationPineconeEmbeddingType = "destination-pinecone_OpenAI"
+ DestinationPineconeEmbeddingTypeDestinationPineconeCohere DestinationPineconeEmbeddingType = "destination-pinecone_Cohere"
+ DestinationPineconeEmbeddingTypeDestinationPineconeFake DestinationPineconeEmbeddingType = "destination-pinecone_Fake"
+ DestinationPineconeEmbeddingTypeDestinationPineconeAzureOpenAI DestinationPineconeEmbeddingType = "destination-pinecone_Azure OpenAI"
+ DestinationPineconeEmbeddingTypeDestinationPineconeOpenAICompatible DestinationPineconeEmbeddingType = "destination-pinecone_OpenAI-compatible"
)
type DestinationPineconeEmbedding struct {
- DestinationPineconeEmbeddingOpenAI *DestinationPineconeEmbeddingOpenAI
- DestinationPineconeEmbeddingCohere *DestinationPineconeEmbeddingCohere
- DestinationPineconeEmbeddingFake *DestinationPineconeEmbeddingFake
+ DestinationPineconeOpenAI *DestinationPineconeOpenAI
+ DestinationPineconeCohere *DestinationPineconeCohere
+ DestinationPineconeFake *DestinationPineconeFake
+ DestinationPineconeAzureOpenAI *DestinationPineconeAzureOpenAI
+ DestinationPineconeOpenAICompatible *DestinationPineconeOpenAICompatible
Type DestinationPineconeEmbeddingType
}
-func CreateDestinationPineconeEmbeddingDestinationPineconeEmbeddingOpenAI(destinationPineconeEmbeddingOpenAI DestinationPineconeEmbeddingOpenAI) DestinationPineconeEmbedding {
- typ := DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingOpenAI
+func CreateDestinationPineconeEmbeddingDestinationPineconeOpenAI(destinationPineconeOpenAI DestinationPineconeOpenAI) DestinationPineconeEmbedding {
+ typ := DestinationPineconeEmbeddingTypeDestinationPineconeOpenAI
return DestinationPineconeEmbedding{
- DestinationPineconeEmbeddingOpenAI: &destinationPineconeEmbeddingOpenAI,
- Type: typ,
+ DestinationPineconeOpenAI: &destinationPineconeOpenAI,
+ Type: typ,
}
}
-func CreateDestinationPineconeEmbeddingDestinationPineconeEmbeddingCohere(destinationPineconeEmbeddingCohere DestinationPineconeEmbeddingCohere) DestinationPineconeEmbedding {
- typ := DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingCohere
+func CreateDestinationPineconeEmbeddingDestinationPineconeCohere(destinationPineconeCohere DestinationPineconeCohere) DestinationPineconeEmbedding {
+ typ := DestinationPineconeEmbeddingTypeDestinationPineconeCohere
return DestinationPineconeEmbedding{
- DestinationPineconeEmbeddingCohere: &destinationPineconeEmbeddingCohere,
- Type: typ,
+ DestinationPineconeCohere: &destinationPineconeCohere,
+ Type: typ,
}
}
-func CreateDestinationPineconeEmbeddingDestinationPineconeEmbeddingFake(destinationPineconeEmbeddingFake DestinationPineconeEmbeddingFake) DestinationPineconeEmbedding {
- typ := DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingFake
+func CreateDestinationPineconeEmbeddingDestinationPineconeFake(destinationPineconeFake DestinationPineconeFake) DestinationPineconeEmbedding {
+ typ := DestinationPineconeEmbeddingTypeDestinationPineconeFake
return DestinationPineconeEmbedding{
- DestinationPineconeEmbeddingFake: &destinationPineconeEmbeddingFake,
- Type: typ,
+ DestinationPineconeFake: &destinationPineconeFake,
+ Type: typ,
+ }
+}
+
+func CreateDestinationPineconeEmbeddingDestinationPineconeAzureOpenAI(destinationPineconeAzureOpenAI DestinationPineconeAzureOpenAI) DestinationPineconeEmbedding {
+ typ := DestinationPineconeEmbeddingTypeDestinationPineconeAzureOpenAI
+
+ return DestinationPineconeEmbedding{
+ DestinationPineconeAzureOpenAI: &destinationPineconeAzureOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationPineconeEmbeddingDestinationPineconeOpenAICompatible(destinationPineconeOpenAICompatible DestinationPineconeOpenAICompatible) DestinationPineconeEmbedding {
+ typ := DestinationPineconeEmbeddingTypeDestinationPineconeOpenAICompatible
+
+ return DestinationPineconeEmbedding{
+ DestinationPineconeOpenAICompatible: &destinationPineconeOpenAICompatible,
+ Type: typ,
}
}
func (u *DestinationPineconeEmbedding) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- destinationPineconeEmbeddingFake := new(DestinationPineconeEmbeddingFake)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPineconeEmbeddingFake); err == nil {
- u.DestinationPineconeEmbeddingFake = destinationPineconeEmbeddingFake
- u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingFake
+ destinationPineconeFake := new(DestinationPineconeFake)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeFake, "", true, true); err == nil {
+ u.DestinationPineconeFake = destinationPineconeFake
+ u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeFake
return nil
}
- destinationPineconeEmbeddingOpenAI := new(DestinationPineconeEmbeddingOpenAI)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPineconeEmbeddingOpenAI); err == nil {
- u.DestinationPineconeEmbeddingOpenAI = destinationPineconeEmbeddingOpenAI
- u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingOpenAI
+ destinationPineconeOpenAI := new(DestinationPineconeOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeOpenAI, "", true, true); err == nil {
+ u.DestinationPineconeOpenAI = destinationPineconeOpenAI
+ u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeOpenAI
return nil
}
- destinationPineconeEmbeddingCohere := new(DestinationPineconeEmbeddingCohere)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPineconeEmbeddingCohere); err == nil {
- u.DestinationPineconeEmbeddingCohere = destinationPineconeEmbeddingCohere
- u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingCohere
+ destinationPineconeCohere := new(DestinationPineconeCohere)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeCohere, "", true, true); err == nil {
+ u.DestinationPineconeCohere = destinationPineconeCohere
+ u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeCohere
+ return nil
+ }
+
+ destinationPineconeAzureOpenAI := new(DestinationPineconeAzureOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeAzureOpenAI, "", true, true); err == nil {
+ u.DestinationPineconeAzureOpenAI = destinationPineconeAzureOpenAI
+ u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeAzureOpenAI
+ return nil
+ }
+
+ destinationPineconeOpenAICompatible := new(DestinationPineconeOpenAICompatible)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeOpenAICompatible, "", true, true); err == nil {
+ u.DestinationPineconeOpenAICompatible = destinationPineconeOpenAICompatible
+ u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeOpenAICompatible
return nil
}
@@ -199,46 +437,513 @@ func (u *DestinationPineconeEmbedding) UnmarshalJSON(data []byte) error {
}
func (u DestinationPineconeEmbedding) MarshalJSON() ([]byte, error) {
- if u.DestinationPineconeEmbeddingFake != nil {
- return json.Marshal(u.DestinationPineconeEmbeddingFake)
+ if u.DestinationPineconeOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationPineconeOpenAI, "", true)
+ }
+
+ if u.DestinationPineconeCohere != nil {
+ return utils.MarshalJSON(u.DestinationPineconeCohere, "", true)
+ }
+
+ if u.DestinationPineconeFake != nil {
+ return utils.MarshalJSON(u.DestinationPineconeFake, "", true)
}
- if u.DestinationPineconeEmbeddingOpenAI != nil {
- return json.Marshal(u.DestinationPineconeEmbeddingOpenAI)
+ if u.DestinationPineconeAzureOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationPineconeAzureOpenAI, "", true)
}
- if u.DestinationPineconeEmbeddingCohere != nil {
- return json.Marshal(u.DestinationPineconeEmbeddingCohere)
+ if u.DestinationPineconeOpenAICompatible != nil {
+ return utils.MarshalJSON(u.DestinationPineconeOpenAICompatible, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationPineconeIndexing - Pinecone is a popular vector store that can be used to store and retrieve embeddings.
type DestinationPineconeIndexing struct {
- // Pinecone index to use
+ // Pinecone index in your project to load data into
Index string `json:"index"`
- // Pinecone environment to use
+ // Pinecone Cloud environment to use
PineconeEnvironment string `json:"pinecone_environment"`
- PineconeKey string `json:"pinecone_key"`
+ // The Pinecone API key to use matching the environment (copy from Pinecone console)
+ PineconeKey string `json:"pinecone_key"`
+}
+
+func (o *DestinationPineconeIndexing) GetIndex() string {
+ if o == nil {
+ return ""
+ }
+ return o.Index
+}
+
+func (o *DestinationPineconeIndexing) GetPineconeEnvironment() string {
+ if o == nil {
+ return ""
+ }
+ return o.PineconeEnvironment
+}
+
+func (o *DestinationPineconeIndexing) GetPineconeKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PineconeKey
+}
+
+type DestinationPineconeFieldNameMappingConfigModel struct {
+ // The field name in the source
+ FromField string `json:"from_field"`
+ // The field name to use in the destination
+ ToField string `json:"to_field"`
+}
+
+func (o *DestinationPineconeFieldNameMappingConfigModel) GetFromField() string {
+ if o == nil {
+ return ""
+ }
+ return o.FromField
+}
+
+func (o *DestinationPineconeFieldNameMappingConfigModel) GetToField() string {
+ if o == nil {
+ return ""
+ }
+ return o.ToField
+}
+
+// DestinationPineconeLanguage - Split code in suitable places based on the programming language
+type DestinationPineconeLanguage string
+
+const (
+ DestinationPineconeLanguageCpp DestinationPineconeLanguage = "cpp"
+ DestinationPineconeLanguageGo DestinationPineconeLanguage = "go"
+ DestinationPineconeLanguageJava DestinationPineconeLanguage = "java"
+ DestinationPineconeLanguageJs DestinationPineconeLanguage = "js"
+ DestinationPineconeLanguagePhp DestinationPineconeLanguage = "php"
+ DestinationPineconeLanguageProto DestinationPineconeLanguage = "proto"
+ DestinationPineconeLanguagePython DestinationPineconeLanguage = "python"
+ DestinationPineconeLanguageRst DestinationPineconeLanguage = "rst"
+ DestinationPineconeLanguageRuby DestinationPineconeLanguage = "ruby"
+ DestinationPineconeLanguageRust DestinationPineconeLanguage = "rust"
+ DestinationPineconeLanguageScala DestinationPineconeLanguage = "scala"
+ DestinationPineconeLanguageSwift DestinationPineconeLanguage = "swift"
+ DestinationPineconeLanguageMarkdown DestinationPineconeLanguage = "markdown"
+ DestinationPineconeLanguageLatex DestinationPineconeLanguage = "latex"
+ DestinationPineconeLanguageHTML DestinationPineconeLanguage = "html"
+ DestinationPineconeLanguageSol DestinationPineconeLanguage = "sol"
+)
+
+func (e DestinationPineconeLanguage) ToPointer() *DestinationPineconeLanguage {
+ return &e
+}
+
+func (e *DestinationPineconeLanguage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cpp":
+ fallthrough
+ case "go":
+ fallthrough
+ case "java":
+ fallthrough
+ case "js":
+ fallthrough
+ case "php":
+ fallthrough
+ case "proto":
+ fallthrough
+ case "python":
+ fallthrough
+ case "rst":
+ fallthrough
+ case "ruby":
+ fallthrough
+ case "rust":
+ fallthrough
+ case "scala":
+ fallthrough
+ case "swift":
+ fallthrough
+ case "markdown":
+ fallthrough
+ case "latex":
+ fallthrough
+ case "html":
+ fallthrough
+ case "sol":
+ *e = DestinationPineconeLanguage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeLanguage: %v", v)
+ }
+}
+
+type DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode string
+
+const (
+ DestinationPineconeSchemasProcessingTextSplitterTextSplitterModeCode DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode = "code"
+)
+
+func (e DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode) ToPointer() *DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "code":
+ *e = DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationPineconeByProgrammingLanguage - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
+type DestinationPineconeByProgrammingLanguage struct {
+ // Split code in suitable places based on the programming language
+ Language DestinationPineconeLanguage `json:"language"`
+ mode *DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode `const:"code" json:"mode"`
+}
+
+func (d DestinationPineconeByProgrammingLanguage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeByProgrammingLanguage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeByProgrammingLanguage) GetLanguage() DestinationPineconeLanguage {
+ if o == nil {
+ return DestinationPineconeLanguage("")
+ }
+ return o.Language
+}
+
+func (o *DestinationPineconeByProgrammingLanguage) GetMode() *DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode {
+ return DestinationPineconeSchemasProcessingTextSplitterTextSplitterModeCode.ToPointer()
+}
+
+type DestinationPineconeSchemasProcessingTextSplitterMode string
+
+const (
+ DestinationPineconeSchemasProcessingTextSplitterModeMarkdown DestinationPineconeSchemasProcessingTextSplitterMode = "markdown"
+)
+
+func (e DestinationPineconeSchemasProcessingTextSplitterMode) ToPointer() *DestinationPineconeSchemasProcessingTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationPineconeSchemasProcessingTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "markdown":
+ *e = DestinationPineconeSchemasProcessingTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeSchemasProcessingTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationPineconeByMarkdownHeader - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
+type DestinationPineconeByMarkdownHeader struct {
+ mode *DestinationPineconeSchemasProcessingTextSplitterMode `const:"markdown" json:"mode"`
+ // Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+ SplitLevel *int64 `default:"1" json:"split_level"`
+}
+
+func (d DestinationPineconeByMarkdownHeader) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeByMarkdownHeader) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeByMarkdownHeader) GetMode() *DestinationPineconeSchemasProcessingTextSplitterMode {
+ return DestinationPineconeSchemasProcessingTextSplitterModeMarkdown.ToPointer()
+}
+
+func (o *DestinationPineconeByMarkdownHeader) GetSplitLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SplitLevel
+}
+
+type DestinationPineconeSchemasProcessingMode string
+
+const (
+ DestinationPineconeSchemasProcessingModeSeparator DestinationPineconeSchemasProcessingMode = "separator"
+)
+
+func (e DestinationPineconeSchemasProcessingMode) ToPointer() *DestinationPineconeSchemasProcessingMode {
+ return &e
+}
+
+func (e *DestinationPineconeSchemasProcessingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "separator":
+ *e = DestinationPineconeSchemasProcessingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeSchemasProcessingMode: %v", v)
+ }
+}
+
+// DestinationPineconeBySeparator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
+type DestinationPineconeBySeparator struct {
+ // Whether to keep the separator in the resulting chunks
+ KeepSeparator *bool `default:"false" json:"keep_separator"`
+ mode *DestinationPineconeSchemasProcessingMode `const:"separator" json:"mode"`
+ // List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+ Separators []string `json:"separators,omitempty"`
+}
+
+func (d DestinationPineconeBySeparator) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeBySeparator) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeBySeparator) GetKeepSeparator() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.KeepSeparator
+}
+
+func (o *DestinationPineconeBySeparator) GetMode() *DestinationPineconeSchemasProcessingMode {
+ return DestinationPineconeSchemasProcessingModeSeparator.ToPointer()
+}
+
+func (o *DestinationPineconeBySeparator) GetSeparators() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Separators
+}
+
+type DestinationPineconeTextSplitterType string
+
+const (
+ DestinationPineconeTextSplitterTypeDestinationPineconeBySeparator DestinationPineconeTextSplitterType = "destination-pinecone_By Separator"
+ DestinationPineconeTextSplitterTypeDestinationPineconeByMarkdownHeader DestinationPineconeTextSplitterType = "destination-pinecone_By Markdown header"
+ DestinationPineconeTextSplitterTypeDestinationPineconeByProgrammingLanguage DestinationPineconeTextSplitterType = "destination-pinecone_By Programming Language"
+)
+
+type DestinationPineconeTextSplitter struct {
+ DestinationPineconeBySeparator *DestinationPineconeBySeparator
+ DestinationPineconeByMarkdownHeader *DestinationPineconeByMarkdownHeader
+ DestinationPineconeByProgrammingLanguage *DestinationPineconeByProgrammingLanguage
+
+ Type DestinationPineconeTextSplitterType
+}
+
+func CreateDestinationPineconeTextSplitterDestinationPineconeBySeparator(destinationPineconeBySeparator DestinationPineconeBySeparator) DestinationPineconeTextSplitter {
+ typ := DestinationPineconeTextSplitterTypeDestinationPineconeBySeparator
+
+ return DestinationPineconeTextSplitter{
+ DestinationPineconeBySeparator: &destinationPineconeBySeparator,
+ Type: typ,
+ }
+}
+
+func CreateDestinationPineconeTextSplitterDestinationPineconeByMarkdownHeader(destinationPineconeByMarkdownHeader DestinationPineconeByMarkdownHeader) DestinationPineconeTextSplitter {
+ typ := DestinationPineconeTextSplitterTypeDestinationPineconeByMarkdownHeader
+
+ return DestinationPineconeTextSplitter{
+ DestinationPineconeByMarkdownHeader: &destinationPineconeByMarkdownHeader,
+ Type: typ,
+ }
+}
+
+func CreateDestinationPineconeTextSplitterDestinationPineconeByProgrammingLanguage(destinationPineconeByProgrammingLanguage DestinationPineconeByProgrammingLanguage) DestinationPineconeTextSplitter {
+ typ := DestinationPineconeTextSplitterTypeDestinationPineconeByProgrammingLanguage
+
+ return DestinationPineconeTextSplitter{
+ DestinationPineconeByProgrammingLanguage: &destinationPineconeByProgrammingLanguage,
+ Type: typ,
+ }
+}
+
+func (u *DestinationPineconeTextSplitter) UnmarshalJSON(data []byte) error {
+
+ destinationPineconeByMarkdownHeader := new(DestinationPineconeByMarkdownHeader)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeByMarkdownHeader, "", true, true); err == nil {
+ u.DestinationPineconeByMarkdownHeader = destinationPineconeByMarkdownHeader
+ u.Type = DestinationPineconeTextSplitterTypeDestinationPineconeByMarkdownHeader
+ return nil
+ }
+
+ destinationPineconeByProgrammingLanguage := new(DestinationPineconeByProgrammingLanguage)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeByProgrammingLanguage, "", true, true); err == nil {
+ u.DestinationPineconeByProgrammingLanguage = destinationPineconeByProgrammingLanguage
+ u.Type = DestinationPineconeTextSplitterTypeDestinationPineconeByProgrammingLanguage
+ return nil
+ }
+
+ destinationPineconeBySeparator := new(DestinationPineconeBySeparator)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeBySeparator, "", true, true); err == nil {
+ u.DestinationPineconeBySeparator = destinationPineconeBySeparator
+ u.Type = DestinationPineconeTextSplitterTypeDestinationPineconeBySeparator
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationPineconeTextSplitter) MarshalJSON() ([]byte, error) {
+ if u.DestinationPineconeBySeparator != nil {
+ return utils.MarshalJSON(u.DestinationPineconeBySeparator, "", true)
+ }
+
+ if u.DestinationPineconeByMarkdownHeader != nil {
+ return utils.MarshalJSON(u.DestinationPineconeByMarkdownHeader, "", true)
+ }
+
+ if u.DestinationPineconeByProgrammingLanguage != nil {
+ return utils.MarshalJSON(u.DestinationPineconeByProgrammingLanguage, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationPineconeProcessingConfigModel struct {
// Size of overlap between chunks in tokens to store in vector store to better capture relevant context
- ChunkOverlap *int64 `json:"chunk_overlap,omitempty"`
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
// Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
ChunkSize int64 `json:"chunk_size"`
+ // List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
+ FieldNameMappings []DestinationPineconeFieldNameMappingConfigModel `json:"field_name_mappings,omitempty"`
// List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
MetadataFields []string `json:"metadata_fields,omitempty"`
// List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
TextFields []string `json:"text_fields,omitempty"`
+ // Split text fields into chunks based on the specified method.
+ TextSplitter *DestinationPineconeTextSplitter `json:"text_splitter,omitempty"`
+}
+
+func (d DestinationPineconeProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *DestinationPineconeProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *DestinationPineconeProcessingConfigModel) GetFieldNameMappings() []DestinationPineconeFieldNameMappingConfigModel {
+ if o == nil {
+ return nil
+ }
+ return o.FieldNameMappings
+}
+
+func (o *DestinationPineconeProcessingConfigModel) GetMetadataFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *DestinationPineconeProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TextFields
+}
+
+func (o *DestinationPineconeProcessingConfigModel) GetTextSplitter() *DestinationPineconeTextSplitter {
+ if o == nil {
+ return nil
+ }
+ return o.TextSplitter
}
type DestinationPinecone struct {
- DestinationType DestinationPineconePinecone `json:"destinationType"`
+ destinationType Pinecone `const:"pinecone" json:"destinationType"`
// Embedding configuration
Embedding DestinationPineconeEmbedding `json:"embedding"`
// Pinecone is a popular vector store that can be used to store and retrieve embeddings.
Indexing DestinationPineconeIndexing `json:"indexing"`
Processing DestinationPineconeProcessingConfigModel `json:"processing"`
}
+
+func (d DestinationPinecone) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPinecone) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPinecone) GetDestinationType() Pinecone {
+ return PineconePinecone
+}
+
+func (o *DestinationPinecone) GetEmbedding() DestinationPineconeEmbedding {
+ if o == nil {
+ return DestinationPineconeEmbedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationPinecone) GetIndexing() DestinationPineconeIndexing {
+ if o == nil {
+ return DestinationPineconeIndexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationPinecone) GetProcessing() DestinationPineconeProcessingConfigModel {
+ if o == nil {
+ return DestinationPineconeProcessingConfigModel{}
+ }
+ return o.Processing
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpineconecreaterequest.go b/internal/sdk/pkg/models/shared/destinationpineconecreaterequest.go
old mode 100755
new mode 100644
index 47c7380cf..d7a917109
--- a/internal/sdk/pkg/models/shared/destinationpineconecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationpineconecreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationPineconeCreateRequest struct {
Configuration DestinationPinecone `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationPineconeCreateRequest) GetConfiguration() DestinationPinecone {
+ if o == nil {
+ return DestinationPinecone{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationPineconeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationPineconeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationPineconeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationpineconeputrequest.go b/internal/sdk/pkg/models/shared/destinationpineconeputrequest.go
old mode 100755
new mode 100644
index d59659835..0033abae6
--- a/internal/sdk/pkg/models/shared/destinationpineconeputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationpineconeputrequest.go
@@ -7,3 +7,24 @@ type DestinationPineconePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationPineconePutRequest) GetConfiguration() DestinationPineconeUpdate {
+ if o == nil {
+ return DestinationPineconeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationPineconePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationPineconePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpineconeupdate.go b/internal/sdk/pkg/models/shared/destinationpineconeupdate.go
old mode 100755
new mode 100644
index 813b50eb6..8b8f2b234
--- a/internal/sdk/pkg/models/shared/destinationpineconeupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationpineconeupdate.go
@@ -3,171 +3,409 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationPineconeUpdateEmbeddingFakeMode string
+type DestinationPineconeUpdateSchemasEmbeddingEmbedding5Mode string
const (
- DestinationPineconeUpdateEmbeddingFakeModeFake DestinationPineconeUpdateEmbeddingFakeMode = "fake"
+ DestinationPineconeUpdateSchemasEmbeddingEmbedding5ModeOpenaiCompatible DestinationPineconeUpdateSchemasEmbeddingEmbedding5Mode = "openai_compatible"
)
-func (e DestinationPineconeUpdateEmbeddingFakeMode) ToPointer() *DestinationPineconeUpdateEmbeddingFakeMode {
+func (e DestinationPineconeUpdateSchemasEmbeddingEmbedding5Mode) ToPointer() *DestinationPineconeUpdateSchemasEmbeddingEmbedding5Mode {
return &e
}
-func (e *DestinationPineconeUpdateEmbeddingFakeMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPineconeUpdateSchemasEmbeddingEmbedding5Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai_compatible":
+ *e = DestinationPineconeUpdateSchemasEmbeddingEmbedding5Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeUpdateSchemasEmbeddingEmbedding5Mode: %v", v)
+ }
+}
+
+// DestinationPineconeUpdateOpenAICompatible - Use a service that's compatible with the OpenAI API to embed text.
+type DestinationPineconeUpdateOpenAICompatible struct {
+ APIKey *string `default:"" json:"api_key"`
+ // The base URL for your OpenAI-compatible service
+ BaseURL string `json:"base_url"`
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ mode *DestinationPineconeUpdateSchemasEmbeddingEmbedding5Mode `const:"openai_compatible" json:"mode"`
+ // The name of the model to use for embedding
+ ModelName *string `default:"text-embedding-ada-002" json:"model_name"`
+}
+
+func (d DestinationPineconeUpdateOpenAICompatible) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeUpdateOpenAICompatible) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeUpdateOpenAICompatible) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *DestinationPineconeUpdateOpenAICompatible) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *DestinationPineconeUpdateOpenAICompatible) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationPineconeUpdateOpenAICompatible) GetMode() *DestinationPineconeUpdateSchemasEmbeddingEmbedding5Mode {
+ return DestinationPineconeUpdateSchemasEmbeddingEmbedding5ModeOpenaiCompatible.ToPointer()
+}
+
+func (o *DestinationPineconeUpdateOpenAICompatible) GetModelName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ModelName
+}
+
+type DestinationPineconeUpdateSchemasEmbeddingEmbeddingMode string
+
+const (
+ DestinationPineconeUpdateSchemasEmbeddingEmbeddingModeAzureOpenai DestinationPineconeUpdateSchemasEmbeddingEmbeddingMode = "azure_openai"
+)
+
+func (e DestinationPineconeUpdateSchemasEmbeddingEmbeddingMode) ToPointer() *DestinationPineconeUpdateSchemasEmbeddingEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationPineconeUpdateSchemasEmbeddingEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "azure_openai":
+ *e = DestinationPineconeUpdateSchemasEmbeddingEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeUpdateSchemasEmbeddingEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationPineconeUpdateAzureOpenAI - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationPineconeUpdateAzureOpenAI struct {
+ // The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ APIBase string `json:"api_base"`
+ // The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ Deployment string `json:"deployment"`
+ mode *DestinationPineconeUpdateSchemasEmbeddingEmbeddingMode `const:"azure_openai" json:"mode"`
+ // The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationPineconeUpdateAzureOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeUpdateAzureOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeUpdateAzureOpenAI) GetAPIBase() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIBase
+}
+
+func (o *DestinationPineconeUpdateAzureOpenAI) GetDeployment() string {
+ if o == nil {
+ return ""
+ }
+ return o.Deployment
+}
+
+func (o *DestinationPineconeUpdateAzureOpenAI) GetMode() *DestinationPineconeUpdateSchemasEmbeddingEmbeddingMode {
+ return DestinationPineconeUpdateSchemasEmbeddingEmbeddingModeAzureOpenai.ToPointer()
+}
+
+func (o *DestinationPineconeUpdateAzureOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationPineconeUpdateSchemasEmbeddingMode string
+
+const (
+ DestinationPineconeUpdateSchemasEmbeddingModeFake DestinationPineconeUpdateSchemasEmbeddingMode = "fake"
+)
+
+func (e DestinationPineconeUpdateSchemasEmbeddingMode) ToPointer() *DestinationPineconeUpdateSchemasEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationPineconeUpdateSchemasEmbeddingMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "fake":
- *e = DestinationPineconeUpdateEmbeddingFakeMode(v)
+ *e = DestinationPineconeUpdateSchemasEmbeddingMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPineconeUpdateEmbeddingFakeMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPineconeUpdateSchemasEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationPineconeUpdateFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type DestinationPineconeUpdateFake struct {
+ mode *DestinationPineconeUpdateSchemasEmbeddingMode `const:"fake" json:"mode"`
+}
+
+func (d DestinationPineconeUpdateFake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeUpdateFake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationPineconeUpdateEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
-type DestinationPineconeUpdateEmbeddingFake struct {
- Mode *DestinationPineconeUpdateEmbeddingFakeMode `json:"mode,omitempty"`
+func (o *DestinationPineconeUpdateFake) GetMode() *DestinationPineconeUpdateSchemasEmbeddingMode {
+ return DestinationPineconeUpdateSchemasEmbeddingModeFake.ToPointer()
}
-type DestinationPineconeUpdateEmbeddingCohereMode string
+type DestinationPineconeUpdateSchemasMode string
const (
- DestinationPineconeUpdateEmbeddingCohereModeCohere DestinationPineconeUpdateEmbeddingCohereMode = "cohere"
+ DestinationPineconeUpdateSchemasModeCohere DestinationPineconeUpdateSchemasMode = "cohere"
)
-func (e DestinationPineconeUpdateEmbeddingCohereMode) ToPointer() *DestinationPineconeUpdateEmbeddingCohereMode {
+func (e DestinationPineconeUpdateSchemasMode) ToPointer() *DestinationPineconeUpdateSchemasMode {
return &e
}
-func (e *DestinationPineconeUpdateEmbeddingCohereMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPineconeUpdateSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "cohere":
- *e = DestinationPineconeUpdateEmbeddingCohereMode(v)
+ *e = DestinationPineconeUpdateSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPineconeUpdateEmbeddingCohereMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPineconeUpdateSchemasMode: %v", v)
}
}
-// DestinationPineconeUpdateEmbeddingCohere - Use the Cohere API to embed text.
-type DestinationPineconeUpdateEmbeddingCohere struct {
- CohereKey string `json:"cohere_key"`
- Mode *DestinationPineconeUpdateEmbeddingCohereMode `json:"mode,omitempty"`
+// DestinationPineconeUpdateCohere - Use the Cohere API to embed text.
+type DestinationPineconeUpdateCohere struct {
+ CohereKey string `json:"cohere_key"`
+ mode *DestinationPineconeUpdateSchemasMode `const:"cohere" json:"mode"`
+}
+
+func (d DestinationPineconeUpdateCohere) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationPineconeUpdateEmbeddingOpenAIMode string
+func (d *DestinationPineconeUpdateCohere) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeUpdateCohere) GetCohereKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.CohereKey
+}
+
+func (o *DestinationPineconeUpdateCohere) GetMode() *DestinationPineconeUpdateSchemasMode {
+ return DestinationPineconeUpdateSchemasModeCohere.ToPointer()
+}
+
+type DestinationPineconeUpdateMode string
const (
- DestinationPineconeUpdateEmbeddingOpenAIModeOpenai DestinationPineconeUpdateEmbeddingOpenAIMode = "openai"
+ DestinationPineconeUpdateModeOpenai DestinationPineconeUpdateMode = "openai"
)
-func (e DestinationPineconeUpdateEmbeddingOpenAIMode) ToPointer() *DestinationPineconeUpdateEmbeddingOpenAIMode {
+func (e DestinationPineconeUpdateMode) ToPointer() *DestinationPineconeUpdateMode {
return &e
}
-func (e *DestinationPineconeUpdateEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPineconeUpdateMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "openai":
- *e = DestinationPineconeUpdateEmbeddingOpenAIMode(v)
+ *e = DestinationPineconeUpdateMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPineconeUpdateEmbeddingOpenAIMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPineconeUpdateMode: %v", v)
+ }
+}
+
+// DestinationPineconeUpdateOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationPineconeUpdateOpenAI struct {
+ mode *DestinationPineconeUpdateMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationPineconeUpdateOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeUpdateOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
+}
+
+func (o *DestinationPineconeUpdateOpenAI) GetMode() *DestinationPineconeUpdateMode {
+ return DestinationPineconeUpdateModeOpenai.ToPointer()
}
-// DestinationPineconeUpdateEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
-type DestinationPineconeUpdateEmbeddingOpenAI struct {
- Mode *DestinationPineconeUpdateEmbeddingOpenAIMode `json:"mode,omitempty"`
- OpenaiKey string `json:"openai_key"`
+func (o *DestinationPineconeUpdateOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
}
type DestinationPineconeUpdateEmbeddingType string
const (
- DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingOpenAI DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_Embedding_OpenAI"
- DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingCohere DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_Embedding_Cohere"
- DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingFake DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_Embedding_Fake"
+ DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateOpenAI DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_OpenAI"
+ DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateCohere DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_Cohere"
+ DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateFake DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_Fake"
+ DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateAzureOpenAI DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_Azure OpenAI"
+ DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateOpenAICompatible DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_OpenAI-compatible"
)
type DestinationPineconeUpdateEmbedding struct {
- DestinationPineconeUpdateEmbeddingOpenAI *DestinationPineconeUpdateEmbeddingOpenAI
- DestinationPineconeUpdateEmbeddingCohere *DestinationPineconeUpdateEmbeddingCohere
- DestinationPineconeUpdateEmbeddingFake *DestinationPineconeUpdateEmbeddingFake
+ DestinationPineconeUpdateOpenAI *DestinationPineconeUpdateOpenAI
+ DestinationPineconeUpdateCohere *DestinationPineconeUpdateCohere
+ DestinationPineconeUpdateFake *DestinationPineconeUpdateFake
+ DestinationPineconeUpdateAzureOpenAI *DestinationPineconeUpdateAzureOpenAI
+ DestinationPineconeUpdateOpenAICompatible *DestinationPineconeUpdateOpenAICompatible
Type DestinationPineconeUpdateEmbeddingType
}
-func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateEmbeddingOpenAI(destinationPineconeUpdateEmbeddingOpenAI DestinationPineconeUpdateEmbeddingOpenAI) DestinationPineconeUpdateEmbedding {
- typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingOpenAI
+func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateOpenAI(destinationPineconeUpdateOpenAI DestinationPineconeUpdateOpenAI) DestinationPineconeUpdateEmbedding {
+ typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateOpenAI
return DestinationPineconeUpdateEmbedding{
- DestinationPineconeUpdateEmbeddingOpenAI: &destinationPineconeUpdateEmbeddingOpenAI,
- Type: typ,
+ DestinationPineconeUpdateOpenAI: &destinationPineconeUpdateOpenAI,
+ Type: typ,
}
}
-func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateEmbeddingCohere(destinationPineconeUpdateEmbeddingCohere DestinationPineconeUpdateEmbeddingCohere) DestinationPineconeUpdateEmbedding {
- typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingCohere
+func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateCohere(destinationPineconeUpdateCohere DestinationPineconeUpdateCohere) DestinationPineconeUpdateEmbedding {
+ typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateCohere
return DestinationPineconeUpdateEmbedding{
- DestinationPineconeUpdateEmbeddingCohere: &destinationPineconeUpdateEmbeddingCohere,
- Type: typ,
+ DestinationPineconeUpdateCohere: &destinationPineconeUpdateCohere,
+ Type: typ,
}
}
-func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateEmbeddingFake(destinationPineconeUpdateEmbeddingFake DestinationPineconeUpdateEmbeddingFake) DestinationPineconeUpdateEmbedding {
- typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingFake
+func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateFake(destinationPineconeUpdateFake DestinationPineconeUpdateFake) DestinationPineconeUpdateEmbedding {
+ typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateFake
return DestinationPineconeUpdateEmbedding{
- DestinationPineconeUpdateEmbeddingFake: &destinationPineconeUpdateEmbeddingFake,
- Type: typ,
+ DestinationPineconeUpdateFake: &destinationPineconeUpdateFake,
+ Type: typ,
+ }
+}
+
+func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateAzureOpenAI(destinationPineconeUpdateAzureOpenAI DestinationPineconeUpdateAzureOpenAI) DestinationPineconeUpdateEmbedding {
+ typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateAzureOpenAI
+
+ return DestinationPineconeUpdateEmbedding{
+ DestinationPineconeUpdateAzureOpenAI: &destinationPineconeUpdateAzureOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateOpenAICompatible(destinationPineconeUpdateOpenAICompatible DestinationPineconeUpdateOpenAICompatible) DestinationPineconeUpdateEmbedding {
+ typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateOpenAICompatible
+
+ return DestinationPineconeUpdateEmbedding{
+ DestinationPineconeUpdateOpenAICompatible: &destinationPineconeUpdateOpenAICompatible,
+ Type: typ,
}
}
func (u *DestinationPineconeUpdateEmbedding) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- destinationPineconeUpdateEmbeddingFake := new(DestinationPineconeUpdateEmbeddingFake)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPineconeUpdateEmbeddingFake); err == nil {
- u.DestinationPineconeUpdateEmbeddingFake = destinationPineconeUpdateEmbeddingFake
- u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingFake
+ destinationPineconeUpdateFake := new(DestinationPineconeUpdateFake)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeUpdateFake, "", true, true); err == nil {
+ u.DestinationPineconeUpdateFake = destinationPineconeUpdateFake
+ u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateFake
+ return nil
+ }
+
+ destinationPineconeUpdateOpenAI := new(DestinationPineconeUpdateOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeUpdateOpenAI, "", true, true); err == nil {
+ u.DestinationPineconeUpdateOpenAI = destinationPineconeUpdateOpenAI
+ u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateOpenAI
+ return nil
+ }
+
+ destinationPineconeUpdateCohere := new(DestinationPineconeUpdateCohere)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeUpdateCohere, "", true, true); err == nil {
+ u.DestinationPineconeUpdateCohere = destinationPineconeUpdateCohere
+ u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateCohere
return nil
}
- destinationPineconeUpdateEmbeddingOpenAI := new(DestinationPineconeUpdateEmbeddingOpenAI)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPineconeUpdateEmbeddingOpenAI); err == nil {
- u.DestinationPineconeUpdateEmbeddingOpenAI = destinationPineconeUpdateEmbeddingOpenAI
- u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingOpenAI
+ destinationPineconeUpdateAzureOpenAI := new(DestinationPineconeUpdateAzureOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeUpdateAzureOpenAI, "", true, true); err == nil {
+ u.DestinationPineconeUpdateAzureOpenAI = destinationPineconeUpdateAzureOpenAI
+ u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateAzureOpenAI
return nil
}
- destinationPineconeUpdateEmbeddingCohere := new(DestinationPineconeUpdateEmbeddingCohere)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPineconeUpdateEmbeddingCohere); err == nil {
- u.DestinationPineconeUpdateEmbeddingCohere = destinationPineconeUpdateEmbeddingCohere
- u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingCohere
+ destinationPineconeUpdateOpenAICompatible := new(DestinationPineconeUpdateOpenAICompatible)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeUpdateOpenAICompatible, "", true, true); err == nil {
+ u.DestinationPineconeUpdateOpenAICompatible = destinationPineconeUpdateOpenAICompatible
+ u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateOpenAICompatible
return nil
}
@@ -175,39 +413,470 @@ func (u *DestinationPineconeUpdateEmbedding) UnmarshalJSON(data []byte) error {
}
func (u DestinationPineconeUpdateEmbedding) MarshalJSON() ([]byte, error) {
- if u.DestinationPineconeUpdateEmbeddingFake != nil {
- return json.Marshal(u.DestinationPineconeUpdateEmbeddingFake)
+ if u.DestinationPineconeUpdateOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationPineconeUpdateOpenAI, "", true)
+ }
+
+ if u.DestinationPineconeUpdateCohere != nil {
+ return utils.MarshalJSON(u.DestinationPineconeUpdateCohere, "", true)
+ }
+
+ if u.DestinationPineconeUpdateFake != nil {
+ return utils.MarshalJSON(u.DestinationPineconeUpdateFake, "", true)
}
- if u.DestinationPineconeUpdateEmbeddingOpenAI != nil {
- return json.Marshal(u.DestinationPineconeUpdateEmbeddingOpenAI)
+ if u.DestinationPineconeUpdateAzureOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationPineconeUpdateAzureOpenAI, "", true)
}
- if u.DestinationPineconeUpdateEmbeddingCohere != nil {
- return json.Marshal(u.DestinationPineconeUpdateEmbeddingCohere)
+ if u.DestinationPineconeUpdateOpenAICompatible != nil {
+ return utils.MarshalJSON(u.DestinationPineconeUpdateOpenAICompatible, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationPineconeUpdateIndexing - Pinecone is a popular vector store that can be used to store and retrieve embeddings.
type DestinationPineconeUpdateIndexing struct {
- // Pinecone index to use
+ // Pinecone index in your project to load data into
Index string `json:"index"`
- // Pinecone environment to use
+ // Pinecone Cloud environment to use
PineconeEnvironment string `json:"pinecone_environment"`
- PineconeKey string `json:"pinecone_key"`
+ // The Pinecone API key to use matching the environment (copy from Pinecone console)
+ PineconeKey string `json:"pinecone_key"`
+}
+
+func (o *DestinationPineconeUpdateIndexing) GetIndex() string {
+ if o == nil {
+ return ""
+ }
+ return o.Index
+}
+
+func (o *DestinationPineconeUpdateIndexing) GetPineconeEnvironment() string {
+ if o == nil {
+ return ""
+ }
+ return o.PineconeEnvironment
+}
+
+func (o *DestinationPineconeUpdateIndexing) GetPineconeKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PineconeKey
+}
+
+type DestinationPineconeUpdateFieldNameMappingConfigModel struct {
+ // The field name in the source
+ FromField string `json:"from_field"`
+ // The field name to use in the destination
+ ToField string `json:"to_field"`
+}
+
+func (o *DestinationPineconeUpdateFieldNameMappingConfigModel) GetFromField() string {
+ if o == nil {
+ return ""
+ }
+ return o.FromField
+}
+
+func (o *DestinationPineconeUpdateFieldNameMappingConfigModel) GetToField() string {
+ if o == nil {
+ return ""
+ }
+ return o.ToField
+}
+
+// DestinationPineconeUpdateLanguage - Split code in suitable places based on the programming language
+type DestinationPineconeUpdateLanguage string
+
+const (
+ DestinationPineconeUpdateLanguageCpp DestinationPineconeUpdateLanguage = "cpp"
+ DestinationPineconeUpdateLanguageGo DestinationPineconeUpdateLanguage = "go"
+ DestinationPineconeUpdateLanguageJava DestinationPineconeUpdateLanguage = "java"
+ DestinationPineconeUpdateLanguageJs DestinationPineconeUpdateLanguage = "js"
+ DestinationPineconeUpdateLanguagePhp DestinationPineconeUpdateLanguage = "php"
+ DestinationPineconeUpdateLanguageProto DestinationPineconeUpdateLanguage = "proto"
+ DestinationPineconeUpdateLanguagePython DestinationPineconeUpdateLanguage = "python"
+ DestinationPineconeUpdateLanguageRst DestinationPineconeUpdateLanguage = "rst"
+ DestinationPineconeUpdateLanguageRuby DestinationPineconeUpdateLanguage = "ruby"
+ DestinationPineconeUpdateLanguageRust DestinationPineconeUpdateLanguage = "rust"
+ DestinationPineconeUpdateLanguageScala DestinationPineconeUpdateLanguage = "scala"
+ DestinationPineconeUpdateLanguageSwift DestinationPineconeUpdateLanguage = "swift"
+ DestinationPineconeUpdateLanguageMarkdown DestinationPineconeUpdateLanguage = "markdown"
+ DestinationPineconeUpdateLanguageLatex DestinationPineconeUpdateLanguage = "latex"
+ DestinationPineconeUpdateLanguageHTML DestinationPineconeUpdateLanguage = "html"
+ DestinationPineconeUpdateLanguageSol DestinationPineconeUpdateLanguage = "sol"
+)
+
+func (e DestinationPineconeUpdateLanguage) ToPointer() *DestinationPineconeUpdateLanguage {
+ return &e
+}
+
+func (e *DestinationPineconeUpdateLanguage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cpp":
+ fallthrough
+ case "go":
+ fallthrough
+ case "java":
+ fallthrough
+ case "js":
+ fallthrough
+ case "php":
+ fallthrough
+ case "proto":
+ fallthrough
+ case "python":
+ fallthrough
+ case "rst":
+ fallthrough
+ case "ruby":
+ fallthrough
+ case "rust":
+ fallthrough
+ case "scala":
+ fallthrough
+ case "swift":
+ fallthrough
+ case "markdown":
+ fallthrough
+ case "latex":
+ fallthrough
+ case "html":
+ fallthrough
+ case "sol":
+ *e = DestinationPineconeUpdateLanguage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeUpdateLanguage: %v", v)
+ }
+}
+
+type DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterMode string
+
+const (
+ DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterModeCode DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterMode = "code"
+)
+
+func (e DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterMode) ToPointer() *DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "code":
+ *e = DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationPineconeUpdateByProgrammingLanguage - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
+type DestinationPineconeUpdateByProgrammingLanguage struct {
+ // Split code in suitable places based on the programming language
+ Language DestinationPineconeUpdateLanguage `json:"language"`
+ mode *DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterMode `const:"code" json:"mode"`
+}
+
+func (d DestinationPineconeUpdateByProgrammingLanguage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeUpdateByProgrammingLanguage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeUpdateByProgrammingLanguage) GetLanguage() DestinationPineconeUpdateLanguage {
+ if o == nil {
+ return DestinationPineconeUpdateLanguage("")
+ }
+ return o.Language
+}
+
+func (o *DestinationPineconeUpdateByProgrammingLanguage) GetMode() *DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterMode {
+ return DestinationPineconeUpdateSchemasProcessingTextSplitterTextSplitterModeCode.ToPointer()
+}
+
+type DestinationPineconeUpdateSchemasProcessingTextSplitterMode string
+
+const (
+ DestinationPineconeUpdateSchemasProcessingTextSplitterModeMarkdown DestinationPineconeUpdateSchemasProcessingTextSplitterMode = "markdown"
+)
+
+func (e DestinationPineconeUpdateSchemasProcessingTextSplitterMode) ToPointer() *DestinationPineconeUpdateSchemasProcessingTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationPineconeUpdateSchemasProcessingTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "markdown":
+ *e = DestinationPineconeUpdateSchemasProcessingTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeUpdateSchemasProcessingTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationPineconeUpdateByMarkdownHeader - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
+type DestinationPineconeUpdateByMarkdownHeader struct {
+ mode *DestinationPineconeUpdateSchemasProcessingTextSplitterMode `const:"markdown" json:"mode"`
+ // Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+ SplitLevel *int64 `default:"1" json:"split_level"`
+}
+
+func (d DestinationPineconeUpdateByMarkdownHeader) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeUpdateByMarkdownHeader) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeUpdateByMarkdownHeader) GetMode() *DestinationPineconeUpdateSchemasProcessingTextSplitterMode {
+ return DestinationPineconeUpdateSchemasProcessingTextSplitterModeMarkdown.ToPointer()
+}
+
+func (o *DestinationPineconeUpdateByMarkdownHeader) GetSplitLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SplitLevel
+}
+
+type DestinationPineconeUpdateSchemasProcessingMode string
+
+const (
+ DestinationPineconeUpdateSchemasProcessingModeSeparator DestinationPineconeUpdateSchemasProcessingMode = "separator"
+)
+
+func (e DestinationPineconeUpdateSchemasProcessingMode) ToPointer() *DestinationPineconeUpdateSchemasProcessingMode {
+ return &e
+}
+
+func (e *DestinationPineconeUpdateSchemasProcessingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "separator":
+ *e = DestinationPineconeUpdateSchemasProcessingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationPineconeUpdateSchemasProcessingMode: %v", v)
+ }
+}
+
+// DestinationPineconeUpdateBySeparator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
+type DestinationPineconeUpdateBySeparator struct {
+ // Whether to keep the separator in the resulting chunks
+ KeepSeparator *bool `default:"false" json:"keep_separator"`
+ mode *DestinationPineconeUpdateSchemasProcessingMode `const:"separator" json:"mode"`
+ // List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+ Separators []string `json:"separators,omitempty"`
+}
+
+func (d DestinationPineconeUpdateBySeparator) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeUpdateBySeparator) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeUpdateBySeparator) GetKeepSeparator() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.KeepSeparator
+}
+
+func (o *DestinationPineconeUpdateBySeparator) GetMode() *DestinationPineconeUpdateSchemasProcessingMode {
+ return DestinationPineconeUpdateSchemasProcessingModeSeparator.ToPointer()
+}
+
+func (o *DestinationPineconeUpdateBySeparator) GetSeparators() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Separators
+}
+
+type DestinationPineconeUpdateTextSplitterType string
+
+const (
+ DestinationPineconeUpdateTextSplitterTypeDestinationPineconeUpdateBySeparator DestinationPineconeUpdateTextSplitterType = "destination-pinecone-update_By Separator"
+ DestinationPineconeUpdateTextSplitterTypeDestinationPineconeUpdateByMarkdownHeader DestinationPineconeUpdateTextSplitterType = "destination-pinecone-update_By Markdown header"
+ DestinationPineconeUpdateTextSplitterTypeDestinationPineconeUpdateByProgrammingLanguage DestinationPineconeUpdateTextSplitterType = "destination-pinecone-update_By Programming Language"
+)
+
+type DestinationPineconeUpdateTextSplitter struct {
+ DestinationPineconeUpdateBySeparator *DestinationPineconeUpdateBySeparator
+ DestinationPineconeUpdateByMarkdownHeader *DestinationPineconeUpdateByMarkdownHeader
+ DestinationPineconeUpdateByProgrammingLanguage *DestinationPineconeUpdateByProgrammingLanguage
+
+ Type DestinationPineconeUpdateTextSplitterType
+}
+
+func CreateDestinationPineconeUpdateTextSplitterDestinationPineconeUpdateBySeparator(destinationPineconeUpdateBySeparator DestinationPineconeUpdateBySeparator) DestinationPineconeUpdateTextSplitter {
+ typ := DestinationPineconeUpdateTextSplitterTypeDestinationPineconeUpdateBySeparator
+
+ return DestinationPineconeUpdateTextSplitter{
+ DestinationPineconeUpdateBySeparator: &destinationPineconeUpdateBySeparator,
+ Type: typ,
+ }
+}
+
+func CreateDestinationPineconeUpdateTextSplitterDestinationPineconeUpdateByMarkdownHeader(destinationPineconeUpdateByMarkdownHeader DestinationPineconeUpdateByMarkdownHeader) DestinationPineconeUpdateTextSplitter {
+ typ := DestinationPineconeUpdateTextSplitterTypeDestinationPineconeUpdateByMarkdownHeader
+
+ return DestinationPineconeUpdateTextSplitter{
+ DestinationPineconeUpdateByMarkdownHeader: &destinationPineconeUpdateByMarkdownHeader,
+ Type: typ,
+ }
+}
+
+func CreateDestinationPineconeUpdateTextSplitterDestinationPineconeUpdateByProgrammingLanguage(destinationPineconeUpdateByProgrammingLanguage DestinationPineconeUpdateByProgrammingLanguage) DestinationPineconeUpdateTextSplitter {
+ typ := DestinationPineconeUpdateTextSplitterTypeDestinationPineconeUpdateByProgrammingLanguage
+
+ return DestinationPineconeUpdateTextSplitter{
+ DestinationPineconeUpdateByProgrammingLanguage: &destinationPineconeUpdateByProgrammingLanguage,
+ Type: typ,
+ }
+}
+
+func (u *DestinationPineconeUpdateTextSplitter) UnmarshalJSON(data []byte) error {
+
+ destinationPineconeUpdateByMarkdownHeader := new(DestinationPineconeUpdateByMarkdownHeader)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeUpdateByMarkdownHeader, "", true, true); err == nil {
+ u.DestinationPineconeUpdateByMarkdownHeader = destinationPineconeUpdateByMarkdownHeader
+ u.Type = DestinationPineconeUpdateTextSplitterTypeDestinationPineconeUpdateByMarkdownHeader
+ return nil
+ }
+
+ destinationPineconeUpdateByProgrammingLanguage := new(DestinationPineconeUpdateByProgrammingLanguage)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeUpdateByProgrammingLanguage, "", true, true); err == nil {
+ u.DestinationPineconeUpdateByProgrammingLanguage = destinationPineconeUpdateByProgrammingLanguage
+ u.Type = DestinationPineconeUpdateTextSplitterTypeDestinationPineconeUpdateByProgrammingLanguage
+ return nil
+ }
+
+ destinationPineconeUpdateBySeparator := new(DestinationPineconeUpdateBySeparator)
+ if err := utils.UnmarshalJSON(data, &destinationPineconeUpdateBySeparator, "", true, true); err == nil {
+ u.DestinationPineconeUpdateBySeparator = destinationPineconeUpdateBySeparator
+ u.Type = DestinationPineconeUpdateTextSplitterTypeDestinationPineconeUpdateBySeparator
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationPineconeUpdateTextSplitter) MarshalJSON() ([]byte, error) {
+ if u.DestinationPineconeUpdateBySeparator != nil {
+ return utils.MarshalJSON(u.DestinationPineconeUpdateBySeparator, "", true)
+ }
+
+ if u.DestinationPineconeUpdateByMarkdownHeader != nil {
+ return utils.MarshalJSON(u.DestinationPineconeUpdateByMarkdownHeader, "", true)
+ }
+
+ if u.DestinationPineconeUpdateByProgrammingLanguage != nil {
+ return utils.MarshalJSON(u.DestinationPineconeUpdateByProgrammingLanguage, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationPineconeUpdateProcessingConfigModel struct {
// Size of overlap between chunks in tokens to store in vector store to better capture relevant context
- ChunkOverlap *int64 `json:"chunk_overlap,omitempty"`
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
// Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
ChunkSize int64 `json:"chunk_size"`
+ // List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
+ FieldNameMappings []DestinationPineconeUpdateFieldNameMappingConfigModel `json:"field_name_mappings,omitempty"`
// List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
MetadataFields []string `json:"metadata_fields,omitempty"`
// List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
TextFields []string `json:"text_fields,omitempty"`
+ // Split text fields into chunks based on the specified method.
+ TextSplitter *DestinationPineconeUpdateTextSplitter `json:"text_splitter,omitempty"`
+}
+
+func (d DestinationPineconeUpdateProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPineconeUpdateProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPineconeUpdateProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *DestinationPineconeUpdateProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *DestinationPineconeUpdateProcessingConfigModel) GetFieldNameMappings() []DestinationPineconeUpdateFieldNameMappingConfigModel {
+ if o == nil {
+ return nil
+ }
+ return o.FieldNameMappings
+}
+
+func (o *DestinationPineconeUpdateProcessingConfigModel) GetMetadataFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *DestinationPineconeUpdateProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TextFields
+}
+
+func (o *DestinationPineconeUpdateProcessingConfigModel) GetTextSplitter() *DestinationPineconeUpdateTextSplitter {
+ if o == nil {
+ return nil
+ }
+ return o.TextSplitter
}
type DestinationPineconeUpdate struct {
@@ -217,3 +886,24 @@ type DestinationPineconeUpdate struct {
Indexing DestinationPineconeUpdateIndexing `json:"indexing"`
Processing DestinationPineconeUpdateProcessingConfigModel `json:"processing"`
}
+
+func (o *DestinationPineconeUpdate) GetEmbedding() DestinationPineconeUpdateEmbedding {
+ if o == nil {
+ return DestinationPineconeUpdateEmbedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationPineconeUpdate) GetIndexing() DestinationPineconeUpdateIndexing {
+ if o == nil {
+ return DestinationPineconeUpdateIndexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationPineconeUpdate) GetProcessing() DestinationPineconeUpdateProcessingConfigModel {
+ if o == nil {
+ return DestinationPineconeUpdateProcessingConfigModel{}
+ }
+ return o.Processing
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpostgres.go b/internal/sdk/pkg/models/shared/destinationpostgres.go
old mode 100755
new mode 100644
index 6ffdff971..61d63ef91
--- a/internal/sdk/pkg/models/shared/destinationpostgres.go
+++ b/internal/sdk/pkg/models/shared/destinationpostgres.go
@@ -3,62 +3,62 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationPostgresPostgres string
+type Postgres string
const (
- DestinationPostgresPostgresPostgres DestinationPostgresPostgres = "postgres"
+ PostgresPostgres Postgres = "postgres"
)
-func (e DestinationPostgresPostgres) ToPointer() *DestinationPostgresPostgres {
+func (e Postgres) ToPointer() *Postgres {
return &e
}
-func (e *DestinationPostgresPostgres) UnmarshalJSON(data []byte) error {
+func (e *Postgres) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "postgres":
- *e = DestinationPostgresPostgres(v)
+ *e = Postgres(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresPostgres: %v", v)
+ return fmt.Errorf("invalid value for Postgres: %v", v)
}
}
-type DestinationPostgresSSLModesVerifyFullMode string
+type DestinationPostgresSchemasSSLModeSSLModes6Mode string
const (
- DestinationPostgresSSLModesVerifyFullModeVerifyFull DestinationPostgresSSLModesVerifyFullMode = "verify-full"
+ DestinationPostgresSchemasSSLModeSSLModes6ModeVerifyFull DestinationPostgresSchemasSSLModeSSLModes6Mode = "verify-full"
)
-func (e DestinationPostgresSSLModesVerifyFullMode) ToPointer() *DestinationPostgresSSLModesVerifyFullMode {
+func (e DestinationPostgresSchemasSSLModeSSLModes6Mode) ToPointer() *DestinationPostgresSchemasSSLModeSSLModes6Mode {
return &e
}
-func (e *DestinationPostgresSSLModesVerifyFullMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresSchemasSSLModeSSLModes6Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-full":
- *e = DestinationPostgresSSLModesVerifyFullMode(v)
+ *e = DestinationPostgresSchemasSSLModeSSLModes6Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresSSLModesVerifyFullMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresSchemasSSLModeSSLModes6Mode: %v", v)
}
}
-// DestinationPostgresSSLModesVerifyFull - Verify-full SSL mode.
-type DestinationPostgresSSLModesVerifyFull struct {
+// DestinationPostgresVerifyFull - Verify-full SSL mode.
+type DestinationPostgresVerifyFull struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -66,289 +66,408 @@ type DestinationPostgresSSLModesVerifyFull struct {
// Client key
ClientKey string `json:"client_key"`
// Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode DestinationPostgresSSLModesVerifyFullMode `json:"mode"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode *DestinationPostgresSchemasSSLModeSSLModes6Mode `const:"verify-full" json:"mode"`
}
-type DestinationPostgresSSLModesVerifyCaMode string
+func (d DestinationPostgresVerifyFull) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresVerifyFull) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresVerifyFull) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
+}
+
+func (o *DestinationPostgresVerifyFull) GetClientCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientCertificate
+}
+
+func (o *DestinationPostgresVerifyFull) GetClientKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientKey
+}
+
+func (o *DestinationPostgresVerifyFull) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *DestinationPostgresVerifyFull) GetMode() *DestinationPostgresSchemasSSLModeSSLModes6Mode {
+ return DestinationPostgresSchemasSSLModeSSLModes6ModeVerifyFull.ToPointer()
+}
+
+type DestinationPostgresSchemasSSLModeSSLModes5Mode string
const (
- DestinationPostgresSSLModesVerifyCaModeVerifyCa DestinationPostgresSSLModesVerifyCaMode = "verify-ca"
+ DestinationPostgresSchemasSSLModeSSLModes5ModeVerifyCa DestinationPostgresSchemasSSLModeSSLModes5Mode = "verify-ca"
)
-func (e DestinationPostgresSSLModesVerifyCaMode) ToPointer() *DestinationPostgresSSLModesVerifyCaMode {
+func (e DestinationPostgresSchemasSSLModeSSLModes5Mode) ToPointer() *DestinationPostgresSchemasSSLModeSSLModes5Mode {
return &e
}
-func (e *DestinationPostgresSSLModesVerifyCaMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresSchemasSSLModeSSLModes5Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-ca":
- *e = DestinationPostgresSSLModesVerifyCaMode(v)
+ *e = DestinationPostgresSchemasSSLModeSSLModes5Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresSSLModesVerifyCaMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresSchemasSSLModeSSLModes5Mode: %v", v)
}
}
-// DestinationPostgresSSLModesVerifyCa - Verify-ca SSL mode.
-type DestinationPostgresSSLModesVerifyCa struct {
+// DestinationPostgresVerifyCa - Verify-ca SSL mode.
+type DestinationPostgresVerifyCa struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode DestinationPostgresSSLModesVerifyCaMode `json:"mode"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode *DestinationPostgresSchemasSSLModeSSLModes5Mode `const:"verify-ca" json:"mode"`
}
-type DestinationPostgresSSLModesRequireMode string
+func (d DestinationPostgresVerifyCa) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresVerifyCa) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresVerifyCa) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
+}
+
+func (o *DestinationPostgresVerifyCa) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *DestinationPostgresVerifyCa) GetMode() *DestinationPostgresSchemasSSLModeSSLModes5Mode {
+ return DestinationPostgresSchemasSSLModeSSLModes5ModeVerifyCa.ToPointer()
+}
+
+type DestinationPostgresSchemasSSLModeSSLModesMode string
const (
- DestinationPostgresSSLModesRequireModeRequire DestinationPostgresSSLModesRequireMode = "require"
+ DestinationPostgresSchemasSSLModeSSLModesModeRequire DestinationPostgresSchemasSSLModeSSLModesMode = "require"
)
-func (e DestinationPostgresSSLModesRequireMode) ToPointer() *DestinationPostgresSSLModesRequireMode {
+func (e DestinationPostgresSchemasSSLModeSSLModesMode) ToPointer() *DestinationPostgresSchemasSSLModeSSLModesMode {
return &e
}
-func (e *DestinationPostgresSSLModesRequireMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresSchemasSSLModeSSLModesMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "require":
- *e = DestinationPostgresSSLModesRequireMode(v)
+ *e = DestinationPostgresSchemasSSLModeSSLModesMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresSSLModesRequireMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresSchemasSSLModeSSLModesMode: %v", v)
+ }
+}
+
+// DestinationPostgresRequire - Require SSL mode.
+type DestinationPostgresRequire struct {
+ mode *DestinationPostgresSchemasSSLModeSSLModesMode `const:"require" json:"mode"`
+}
+
+func (d DestinationPostgresRequire) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresRequire) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationPostgresSSLModesRequire - Require SSL mode.
-type DestinationPostgresSSLModesRequire struct {
- Mode DestinationPostgresSSLModesRequireMode `json:"mode"`
+func (o *DestinationPostgresRequire) GetMode() *DestinationPostgresSchemasSSLModeSSLModesMode {
+ return DestinationPostgresSchemasSSLModeSSLModesModeRequire.ToPointer()
}
-type DestinationPostgresSSLModesPreferMode string
+type DestinationPostgresSchemasSslModeMode string
const (
- DestinationPostgresSSLModesPreferModePrefer DestinationPostgresSSLModesPreferMode = "prefer"
+ DestinationPostgresSchemasSslModeModePrefer DestinationPostgresSchemasSslModeMode = "prefer"
)
-func (e DestinationPostgresSSLModesPreferMode) ToPointer() *DestinationPostgresSSLModesPreferMode {
+func (e DestinationPostgresSchemasSslModeMode) ToPointer() *DestinationPostgresSchemasSslModeMode {
return &e
}
-func (e *DestinationPostgresSSLModesPreferMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresSchemasSslModeMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "prefer":
- *e = DestinationPostgresSSLModesPreferMode(v)
+ *e = DestinationPostgresSchemasSslModeMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresSSLModesPreferMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresSchemasSslModeMode: %v", v)
+ }
+}
+
+// DestinationPostgresPrefer - Prefer SSL mode.
+type DestinationPostgresPrefer struct {
+ mode *DestinationPostgresSchemasSslModeMode `const:"prefer" json:"mode"`
+}
+
+func (d DestinationPostgresPrefer) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresPrefer) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationPostgresSSLModesPrefer - Prefer SSL mode.
-type DestinationPostgresSSLModesPrefer struct {
- Mode DestinationPostgresSSLModesPreferMode `json:"mode"`
+func (o *DestinationPostgresPrefer) GetMode() *DestinationPostgresSchemasSslModeMode {
+ return DestinationPostgresSchemasSslModeModePrefer.ToPointer()
}
-type DestinationPostgresSSLModesAllowMode string
+type DestinationPostgresSchemasMode string
const (
- DestinationPostgresSSLModesAllowModeAllow DestinationPostgresSSLModesAllowMode = "allow"
+ DestinationPostgresSchemasModeAllow DestinationPostgresSchemasMode = "allow"
)
-func (e DestinationPostgresSSLModesAllowMode) ToPointer() *DestinationPostgresSSLModesAllowMode {
+func (e DestinationPostgresSchemasMode) ToPointer() *DestinationPostgresSchemasMode {
return &e
}
-func (e *DestinationPostgresSSLModesAllowMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "allow":
- *e = DestinationPostgresSSLModesAllowMode(v)
+ *e = DestinationPostgresSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresSSLModesAllowMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresSchemasMode: %v", v)
}
}
-// DestinationPostgresSSLModesAllow - Allow SSL mode.
-type DestinationPostgresSSLModesAllow struct {
- Mode DestinationPostgresSSLModesAllowMode `json:"mode"`
+// DestinationPostgresAllow - Allow SSL mode.
+type DestinationPostgresAllow struct {
+ mode *DestinationPostgresSchemasMode `const:"allow" json:"mode"`
}
-type DestinationPostgresSSLModesDisableMode string
+func (d DestinationPostgresAllow) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresAllow) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresAllow) GetMode() *DestinationPostgresSchemasMode {
+ return DestinationPostgresSchemasModeAllow.ToPointer()
+}
+
+type DestinationPostgresMode string
const (
- DestinationPostgresSSLModesDisableModeDisable DestinationPostgresSSLModesDisableMode = "disable"
+ DestinationPostgresModeDisable DestinationPostgresMode = "disable"
)
-func (e DestinationPostgresSSLModesDisableMode) ToPointer() *DestinationPostgresSSLModesDisableMode {
+func (e DestinationPostgresMode) ToPointer() *DestinationPostgresMode {
return &e
}
-func (e *DestinationPostgresSSLModesDisableMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "disable":
- *e = DestinationPostgresSSLModesDisableMode(v)
+ *e = DestinationPostgresMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresSSLModesDisableMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresMode: %v", v)
}
}
-// DestinationPostgresSSLModesDisable - Disable SSL.
-type DestinationPostgresSSLModesDisable struct {
- Mode DestinationPostgresSSLModesDisableMode `json:"mode"`
+// DestinationPostgresDisable - Disable SSL.
+type DestinationPostgresDisable struct {
+ mode *DestinationPostgresMode `const:"disable" json:"mode"`
+}
+
+func (d DestinationPostgresDisable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresDisable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresDisable) GetMode() *DestinationPostgresMode {
+ return DestinationPostgresModeDisable.ToPointer()
}
type DestinationPostgresSSLModesType string
const (
- DestinationPostgresSSLModesTypeDestinationPostgresSSLModesDisable DestinationPostgresSSLModesType = "destination-postgres_SSL modes_disable"
- DestinationPostgresSSLModesTypeDestinationPostgresSSLModesAllow DestinationPostgresSSLModesType = "destination-postgres_SSL modes_allow"
- DestinationPostgresSSLModesTypeDestinationPostgresSSLModesPrefer DestinationPostgresSSLModesType = "destination-postgres_SSL modes_prefer"
- DestinationPostgresSSLModesTypeDestinationPostgresSSLModesRequire DestinationPostgresSSLModesType = "destination-postgres_SSL modes_require"
- DestinationPostgresSSLModesTypeDestinationPostgresSSLModesVerifyCa DestinationPostgresSSLModesType = "destination-postgres_SSL modes_verify-ca"
- DestinationPostgresSSLModesTypeDestinationPostgresSSLModesVerifyFull DestinationPostgresSSLModesType = "destination-postgres_SSL modes_verify-full"
+ DestinationPostgresSSLModesTypeDestinationPostgresDisable DestinationPostgresSSLModesType = "destination-postgres_disable"
+ DestinationPostgresSSLModesTypeDestinationPostgresAllow DestinationPostgresSSLModesType = "destination-postgres_allow"
+ DestinationPostgresSSLModesTypeDestinationPostgresPrefer DestinationPostgresSSLModesType = "destination-postgres_prefer"
+ DestinationPostgresSSLModesTypeDestinationPostgresRequire DestinationPostgresSSLModesType = "destination-postgres_require"
+ DestinationPostgresSSLModesTypeDestinationPostgresVerifyCa DestinationPostgresSSLModesType = "destination-postgres_verify-ca"
+ DestinationPostgresSSLModesTypeDestinationPostgresVerifyFull DestinationPostgresSSLModesType = "destination-postgres_verify-full"
)
type DestinationPostgresSSLModes struct {
- DestinationPostgresSSLModesDisable *DestinationPostgresSSLModesDisable
- DestinationPostgresSSLModesAllow *DestinationPostgresSSLModesAllow
- DestinationPostgresSSLModesPrefer *DestinationPostgresSSLModesPrefer
- DestinationPostgresSSLModesRequire *DestinationPostgresSSLModesRequire
- DestinationPostgresSSLModesVerifyCa *DestinationPostgresSSLModesVerifyCa
- DestinationPostgresSSLModesVerifyFull *DestinationPostgresSSLModesVerifyFull
+ DestinationPostgresDisable *DestinationPostgresDisable
+ DestinationPostgresAllow *DestinationPostgresAllow
+ DestinationPostgresPrefer *DestinationPostgresPrefer
+ DestinationPostgresRequire *DestinationPostgresRequire
+ DestinationPostgresVerifyCa *DestinationPostgresVerifyCa
+ DestinationPostgresVerifyFull *DestinationPostgresVerifyFull
Type DestinationPostgresSSLModesType
}
-func CreateDestinationPostgresSSLModesDestinationPostgresSSLModesDisable(destinationPostgresSSLModesDisable DestinationPostgresSSLModesDisable) DestinationPostgresSSLModes {
- typ := DestinationPostgresSSLModesTypeDestinationPostgresSSLModesDisable
+func CreateDestinationPostgresSSLModesDestinationPostgresDisable(destinationPostgresDisable DestinationPostgresDisable) DestinationPostgresSSLModes {
+ typ := DestinationPostgresSSLModesTypeDestinationPostgresDisable
return DestinationPostgresSSLModes{
- DestinationPostgresSSLModesDisable: &destinationPostgresSSLModesDisable,
- Type: typ,
+ DestinationPostgresDisable: &destinationPostgresDisable,
+ Type: typ,
}
}
-func CreateDestinationPostgresSSLModesDestinationPostgresSSLModesAllow(destinationPostgresSSLModesAllow DestinationPostgresSSLModesAllow) DestinationPostgresSSLModes {
- typ := DestinationPostgresSSLModesTypeDestinationPostgresSSLModesAllow
+func CreateDestinationPostgresSSLModesDestinationPostgresAllow(destinationPostgresAllow DestinationPostgresAllow) DestinationPostgresSSLModes {
+ typ := DestinationPostgresSSLModesTypeDestinationPostgresAllow
return DestinationPostgresSSLModes{
- DestinationPostgresSSLModesAllow: &destinationPostgresSSLModesAllow,
- Type: typ,
+ DestinationPostgresAllow: &destinationPostgresAllow,
+ Type: typ,
}
}
-func CreateDestinationPostgresSSLModesDestinationPostgresSSLModesPrefer(destinationPostgresSSLModesPrefer DestinationPostgresSSLModesPrefer) DestinationPostgresSSLModes {
- typ := DestinationPostgresSSLModesTypeDestinationPostgresSSLModesPrefer
+func CreateDestinationPostgresSSLModesDestinationPostgresPrefer(destinationPostgresPrefer DestinationPostgresPrefer) DestinationPostgresSSLModes {
+ typ := DestinationPostgresSSLModesTypeDestinationPostgresPrefer
return DestinationPostgresSSLModes{
- DestinationPostgresSSLModesPrefer: &destinationPostgresSSLModesPrefer,
- Type: typ,
+ DestinationPostgresPrefer: &destinationPostgresPrefer,
+ Type: typ,
}
}
-func CreateDestinationPostgresSSLModesDestinationPostgresSSLModesRequire(destinationPostgresSSLModesRequire DestinationPostgresSSLModesRequire) DestinationPostgresSSLModes {
- typ := DestinationPostgresSSLModesTypeDestinationPostgresSSLModesRequire
+func CreateDestinationPostgresSSLModesDestinationPostgresRequire(destinationPostgresRequire DestinationPostgresRequire) DestinationPostgresSSLModes {
+ typ := DestinationPostgresSSLModesTypeDestinationPostgresRequire
return DestinationPostgresSSLModes{
- DestinationPostgresSSLModesRequire: &destinationPostgresSSLModesRequire,
- Type: typ,
+ DestinationPostgresRequire: &destinationPostgresRequire,
+ Type: typ,
}
}
-func CreateDestinationPostgresSSLModesDestinationPostgresSSLModesVerifyCa(destinationPostgresSSLModesVerifyCa DestinationPostgresSSLModesVerifyCa) DestinationPostgresSSLModes {
- typ := DestinationPostgresSSLModesTypeDestinationPostgresSSLModesVerifyCa
+func CreateDestinationPostgresSSLModesDestinationPostgresVerifyCa(destinationPostgresVerifyCa DestinationPostgresVerifyCa) DestinationPostgresSSLModes {
+ typ := DestinationPostgresSSLModesTypeDestinationPostgresVerifyCa
return DestinationPostgresSSLModes{
- DestinationPostgresSSLModesVerifyCa: &destinationPostgresSSLModesVerifyCa,
- Type: typ,
+ DestinationPostgresVerifyCa: &destinationPostgresVerifyCa,
+ Type: typ,
}
}
-func CreateDestinationPostgresSSLModesDestinationPostgresSSLModesVerifyFull(destinationPostgresSSLModesVerifyFull DestinationPostgresSSLModesVerifyFull) DestinationPostgresSSLModes {
- typ := DestinationPostgresSSLModesTypeDestinationPostgresSSLModesVerifyFull
+func CreateDestinationPostgresSSLModesDestinationPostgresVerifyFull(destinationPostgresVerifyFull DestinationPostgresVerifyFull) DestinationPostgresSSLModes {
+ typ := DestinationPostgresSSLModesTypeDestinationPostgresVerifyFull
return DestinationPostgresSSLModes{
- DestinationPostgresSSLModesVerifyFull: &destinationPostgresSSLModesVerifyFull,
- Type: typ,
+ DestinationPostgresVerifyFull: &destinationPostgresVerifyFull,
+ Type: typ,
}
}
func (u *DestinationPostgresSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationPostgresSSLModesDisable := new(DestinationPostgresSSLModesDisable)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresSSLModesDisable); err == nil {
- u.DestinationPostgresSSLModesDisable = destinationPostgresSSLModesDisable
- u.Type = DestinationPostgresSSLModesTypeDestinationPostgresSSLModesDisable
+
+ destinationPostgresDisable := new(DestinationPostgresDisable)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresDisable, "", true, true); err == nil {
+ u.DestinationPostgresDisable = destinationPostgresDisable
+ u.Type = DestinationPostgresSSLModesTypeDestinationPostgresDisable
return nil
}
- destinationPostgresSSLModesAllow := new(DestinationPostgresSSLModesAllow)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresSSLModesAllow); err == nil {
- u.DestinationPostgresSSLModesAllow = destinationPostgresSSLModesAllow
- u.Type = DestinationPostgresSSLModesTypeDestinationPostgresSSLModesAllow
+ destinationPostgresAllow := new(DestinationPostgresAllow)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresAllow, "", true, true); err == nil {
+ u.DestinationPostgresAllow = destinationPostgresAllow
+ u.Type = DestinationPostgresSSLModesTypeDestinationPostgresAllow
return nil
}
- destinationPostgresSSLModesPrefer := new(DestinationPostgresSSLModesPrefer)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresSSLModesPrefer); err == nil {
- u.DestinationPostgresSSLModesPrefer = destinationPostgresSSLModesPrefer
- u.Type = DestinationPostgresSSLModesTypeDestinationPostgresSSLModesPrefer
+ destinationPostgresPrefer := new(DestinationPostgresPrefer)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresPrefer, "", true, true); err == nil {
+ u.DestinationPostgresPrefer = destinationPostgresPrefer
+ u.Type = DestinationPostgresSSLModesTypeDestinationPostgresPrefer
return nil
}
- destinationPostgresSSLModesRequire := new(DestinationPostgresSSLModesRequire)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresSSLModesRequire); err == nil {
- u.DestinationPostgresSSLModesRequire = destinationPostgresSSLModesRequire
- u.Type = DestinationPostgresSSLModesTypeDestinationPostgresSSLModesRequire
+ destinationPostgresRequire := new(DestinationPostgresRequire)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresRequire, "", true, true); err == nil {
+ u.DestinationPostgresRequire = destinationPostgresRequire
+ u.Type = DestinationPostgresSSLModesTypeDestinationPostgresRequire
return nil
}
- destinationPostgresSSLModesVerifyCa := new(DestinationPostgresSSLModesVerifyCa)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresSSLModesVerifyCa); err == nil {
- u.DestinationPostgresSSLModesVerifyCa = destinationPostgresSSLModesVerifyCa
- u.Type = DestinationPostgresSSLModesTypeDestinationPostgresSSLModesVerifyCa
+ destinationPostgresVerifyCa := new(DestinationPostgresVerifyCa)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresVerifyCa, "", true, true); err == nil {
+ u.DestinationPostgresVerifyCa = destinationPostgresVerifyCa
+ u.Type = DestinationPostgresSSLModesTypeDestinationPostgresVerifyCa
return nil
}
- destinationPostgresSSLModesVerifyFull := new(DestinationPostgresSSLModesVerifyFull)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresSSLModesVerifyFull); err == nil {
- u.DestinationPostgresSSLModesVerifyFull = destinationPostgresSSLModesVerifyFull
- u.Type = DestinationPostgresSSLModesTypeDestinationPostgresSSLModesVerifyFull
+ destinationPostgresVerifyFull := new(DestinationPostgresVerifyFull)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresVerifyFull, "", true, true); err == nil {
+ u.DestinationPostgresVerifyFull = destinationPostgresVerifyFull
+ u.Type = DestinationPostgresSSLModesTypeDestinationPostgresVerifyFull
return nil
}
@@ -356,212 +475,306 @@ func (u *DestinationPostgresSSLModes) UnmarshalJSON(data []byte) error {
}
func (u DestinationPostgresSSLModes) MarshalJSON() ([]byte, error) {
- if u.DestinationPostgresSSLModesDisable != nil {
- return json.Marshal(u.DestinationPostgresSSLModesDisable)
+ if u.DestinationPostgresDisable != nil {
+ return utils.MarshalJSON(u.DestinationPostgresDisable, "", true)
}
- if u.DestinationPostgresSSLModesAllow != nil {
- return json.Marshal(u.DestinationPostgresSSLModesAllow)
+ if u.DestinationPostgresAllow != nil {
+ return utils.MarshalJSON(u.DestinationPostgresAllow, "", true)
}
- if u.DestinationPostgresSSLModesPrefer != nil {
- return json.Marshal(u.DestinationPostgresSSLModesPrefer)
+ if u.DestinationPostgresPrefer != nil {
+ return utils.MarshalJSON(u.DestinationPostgresPrefer, "", true)
}
- if u.DestinationPostgresSSLModesRequire != nil {
- return json.Marshal(u.DestinationPostgresSSLModesRequire)
+ if u.DestinationPostgresRequire != nil {
+ return utils.MarshalJSON(u.DestinationPostgresRequire, "", true)
}
- if u.DestinationPostgresSSLModesVerifyCa != nil {
- return json.Marshal(u.DestinationPostgresSSLModesVerifyCa)
+ if u.DestinationPostgresVerifyCa != nil {
+ return utils.MarshalJSON(u.DestinationPostgresVerifyCa, "", true)
}
- if u.DestinationPostgresSSLModesVerifyFull != nil {
- return json.Marshal(u.DestinationPostgresSSLModesVerifyFull)
+ if u.DestinationPostgresVerifyFull != nil {
+ return utils.MarshalJSON(u.DestinationPostgresVerifyFull, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationPostgresSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationPostgresSchemasTunnelMethodTunnelMethod string
const (
- DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationPostgresSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationPostgresSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationPostgresSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationPostgresSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationPostgresSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationPostgresSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationPostgresSSHTunnelMethodPasswordAuthentication struct {
+// DestinationPostgresPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationPostgresPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationPostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationPostgresSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationPostgresPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationPostgresPasswordAuthentication) GetTunnelMethod() DestinationPostgresSchemasTunnelMethodTunnelMethod {
+ return DestinationPostgresSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationPostgresPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationPostgresPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationPostgresPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationPostgresSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationPostgresSchemasTunnelMethod string
const (
- DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationPostgresSchemasTunnelMethodSSHKeyAuth DestinationPostgresSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationPostgresSchemasTunnelMethod) ToPointer() *DestinationPostgresSchemasTunnelMethod {
return &e
}
-func (e *DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationPostgresSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresSchemasTunnelMethod: %v", v)
}
}
-// DestinationPostgresSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationPostgresSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationPostgresSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationPostgresSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationPostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationPostgresSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationPostgresSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationPostgresSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationPostgresSSHKeyAuthentication) GetTunnelMethod() DestinationPostgresSchemasTunnelMethod {
+ return DestinationPostgresSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationPostgresSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationPostgresSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationPostgresTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationPostgresTunnelMethod string
const (
- DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationPostgresTunnelMethodNoTunnel DestinationPostgresTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationPostgresTunnelMethod) ToPointer() *DestinationPostgresTunnelMethod {
return &e
}
-func (e *DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationPostgresTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresTunnelMethod: %v", v)
}
}
-// DestinationPostgresSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationPostgresSSHTunnelMethodNoTunnel struct {
+// DestinationPostgresNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationPostgresNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationPostgresSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationPostgresTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationPostgresNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresNoTunnel) GetTunnelMethod() DestinationPostgresTunnelMethod {
+ return DestinationPostgresTunnelMethodNoTunnel
}
type DestinationPostgresSSHTunnelMethodType string
const (
- DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHTunnelMethodNoTunnel DestinationPostgresSSHTunnelMethodType = "destination-postgres_SSH Tunnel Method_No Tunnel"
- DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHTunnelMethodSSHKeyAuthentication DestinationPostgresSSHTunnelMethodType = "destination-postgres_SSH Tunnel Method_SSH Key Authentication"
- DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHTunnelMethodPasswordAuthentication DestinationPostgresSSHTunnelMethodType = "destination-postgres_SSH Tunnel Method_Password Authentication"
+ DestinationPostgresSSHTunnelMethodTypeDestinationPostgresNoTunnel DestinationPostgresSSHTunnelMethodType = "destination-postgres_No Tunnel"
+ DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHKeyAuthentication DestinationPostgresSSHTunnelMethodType = "destination-postgres_SSH Key Authentication"
+ DestinationPostgresSSHTunnelMethodTypeDestinationPostgresPasswordAuthentication DestinationPostgresSSHTunnelMethodType = "destination-postgres_Password Authentication"
)
type DestinationPostgresSSHTunnelMethod struct {
- DestinationPostgresSSHTunnelMethodNoTunnel *DestinationPostgresSSHTunnelMethodNoTunnel
- DestinationPostgresSSHTunnelMethodSSHKeyAuthentication *DestinationPostgresSSHTunnelMethodSSHKeyAuthentication
- DestinationPostgresSSHTunnelMethodPasswordAuthentication *DestinationPostgresSSHTunnelMethodPasswordAuthentication
+ DestinationPostgresNoTunnel *DestinationPostgresNoTunnel
+ DestinationPostgresSSHKeyAuthentication *DestinationPostgresSSHKeyAuthentication
+ DestinationPostgresPasswordAuthentication *DestinationPostgresPasswordAuthentication
Type DestinationPostgresSSHTunnelMethodType
}
-func CreateDestinationPostgresSSHTunnelMethodDestinationPostgresSSHTunnelMethodNoTunnel(destinationPostgresSSHTunnelMethodNoTunnel DestinationPostgresSSHTunnelMethodNoTunnel) DestinationPostgresSSHTunnelMethod {
- typ := DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHTunnelMethodNoTunnel
+func CreateDestinationPostgresSSHTunnelMethodDestinationPostgresNoTunnel(destinationPostgresNoTunnel DestinationPostgresNoTunnel) DestinationPostgresSSHTunnelMethod {
+ typ := DestinationPostgresSSHTunnelMethodTypeDestinationPostgresNoTunnel
return DestinationPostgresSSHTunnelMethod{
- DestinationPostgresSSHTunnelMethodNoTunnel: &destinationPostgresSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationPostgresNoTunnel: &destinationPostgresNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationPostgresSSHTunnelMethodDestinationPostgresSSHTunnelMethodSSHKeyAuthentication(destinationPostgresSSHTunnelMethodSSHKeyAuthentication DestinationPostgresSSHTunnelMethodSSHKeyAuthentication) DestinationPostgresSSHTunnelMethod {
- typ := DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationPostgresSSHTunnelMethodDestinationPostgresSSHKeyAuthentication(destinationPostgresSSHKeyAuthentication DestinationPostgresSSHKeyAuthentication) DestinationPostgresSSHTunnelMethod {
+ typ := DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHKeyAuthentication
return DestinationPostgresSSHTunnelMethod{
- DestinationPostgresSSHTunnelMethodSSHKeyAuthentication: &destinationPostgresSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ DestinationPostgresSSHKeyAuthentication: &destinationPostgresSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateDestinationPostgresSSHTunnelMethodDestinationPostgresSSHTunnelMethodPasswordAuthentication(destinationPostgresSSHTunnelMethodPasswordAuthentication DestinationPostgresSSHTunnelMethodPasswordAuthentication) DestinationPostgresSSHTunnelMethod {
- typ := DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHTunnelMethodPasswordAuthentication
+func CreateDestinationPostgresSSHTunnelMethodDestinationPostgresPasswordAuthentication(destinationPostgresPasswordAuthentication DestinationPostgresPasswordAuthentication) DestinationPostgresSSHTunnelMethod {
+ typ := DestinationPostgresSSHTunnelMethodTypeDestinationPostgresPasswordAuthentication
return DestinationPostgresSSHTunnelMethod{
- DestinationPostgresSSHTunnelMethodPasswordAuthentication: &destinationPostgresSSHTunnelMethodPasswordAuthentication,
+ DestinationPostgresPasswordAuthentication: &destinationPostgresPasswordAuthentication,
Type: typ,
}
}
func (u *DestinationPostgresSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationPostgresSSHTunnelMethodNoTunnel := new(DestinationPostgresSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationPostgresSSHTunnelMethodNoTunnel = destinationPostgresSSHTunnelMethodNoTunnel
- u.Type = DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHTunnelMethodNoTunnel
+
+ destinationPostgresNoTunnel := new(DestinationPostgresNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresNoTunnel, "", true, true); err == nil {
+ u.DestinationPostgresNoTunnel = destinationPostgresNoTunnel
+ u.Type = DestinationPostgresSSHTunnelMethodTypeDestinationPostgresNoTunnel
return nil
}
- destinationPostgresSSHTunnelMethodSSHKeyAuthentication := new(DestinationPostgresSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication = destinationPostgresSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHTunnelMethodSSHKeyAuthentication
+ destinationPostgresSSHKeyAuthentication := new(DestinationPostgresSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationPostgresSSHKeyAuthentication = destinationPostgresSSHKeyAuthentication
+ u.Type = DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHKeyAuthentication
return nil
}
- destinationPostgresSSHTunnelMethodPasswordAuthentication := new(DestinationPostgresSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationPostgresSSHTunnelMethodPasswordAuthentication = destinationPostgresSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationPostgresSSHTunnelMethodTypeDestinationPostgresSSHTunnelMethodPasswordAuthentication
+ destinationPostgresPasswordAuthentication := new(DestinationPostgresPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresPasswordAuthentication, "", true, true); err == nil {
+ u.DestinationPostgresPasswordAuthentication = destinationPostgresPasswordAuthentication
+ u.Type = DestinationPostgresSSHTunnelMethodTypeDestinationPostgresPasswordAuthentication
return nil
}
@@ -569,25 +782,25 @@ func (u *DestinationPostgresSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationPostgresSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationPostgresSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationPostgresSSHTunnelMethodNoTunnel)
+ if u.DestinationPostgresNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationPostgresNoTunnel, "", true)
}
- if u.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationPostgresSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationPostgresSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationPostgresSSHKeyAuthentication, "", true)
}
- if u.DestinationPostgresSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationPostgresSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationPostgresPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationPostgresPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationPostgres struct {
// Name of the database.
- Database string `json:"database"`
- DestinationType DestinationPostgresPostgres `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Postgres `const:"postgres" json:"destinationType"`
// Hostname of the database.
Host string `json:"host"`
// Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
@@ -595,9 +808,9 @@ type DestinationPostgres struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5432" json:"port"`
// The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
- Schema string `json:"schema"`
+ Schema *string `default:"public" json:"schema"`
// SSL connection modes.
// disable - Chose this mode to disable encryption of communication between Airbyte and destination database
// allow - Chose this mode to enable encryption only when required by the source database
@@ -612,3 +825,81 @@ type DestinationPostgres struct {
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationPostgres) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgres) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgres) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationPostgres) GetDestinationType() Postgres {
+ return PostgresPostgres
+}
+
+func (o *DestinationPostgres) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationPostgres) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationPostgres) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationPostgres) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationPostgres) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *DestinationPostgres) GetSslMode() *DestinationPostgresSSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *DestinationPostgres) GetTunnelMethod() *DestinationPostgresSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationPostgres) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpostgrescreaterequest.go b/internal/sdk/pkg/models/shared/destinationpostgrescreaterequest.go
old mode 100755
new mode 100644
index 39a24904a..cfb46ba57
--- a/internal/sdk/pkg/models/shared/destinationpostgrescreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationpostgrescreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationPostgresCreateRequest struct {
Configuration DestinationPostgres `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationPostgresCreateRequest) GetConfiguration() DestinationPostgres {
+ if o == nil {
+ return DestinationPostgres{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationPostgresCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationPostgresCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationPostgresCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationpostgresputrequest.go b/internal/sdk/pkg/models/shared/destinationpostgresputrequest.go
old mode 100755
new mode 100644
index ee308d3fc..fed0fc787
--- a/internal/sdk/pkg/models/shared/destinationpostgresputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationpostgresputrequest.go
@@ -7,3 +7,24 @@ type DestinationPostgresPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationPostgresPutRequest) GetConfiguration() DestinationPostgresUpdate {
+ if o == nil {
+ return DestinationPostgresUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationPostgresPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationPostgresPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpostgresupdate.go b/internal/sdk/pkg/models/shared/destinationpostgresupdate.go
old mode 100755
new mode 100644
index 79eec9137..ab189797f
--- a/internal/sdk/pkg/models/shared/destinationpostgresupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationpostgresupdate.go
@@ -3,38 +3,38 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationPostgresUpdateSSLModesVerifyFullMode string
+type DestinationPostgresUpdateSchemasSSLModeSSLModes6Mode string
const (
- DestinationPostgresUpdateSSLModesVerifyFullModeVerifyFull DestinationPostgresUpdateSSLModesVerifyFullMode = "verify-full"
+ DestinationPostgresUpdateSchemasSSLModeSSLModes6ModeVerifyFull DestinationPostgresUpdateSchemasSSLModeSSLModes6Mode = "verify-full"
)
-func (e DestinationPostgresUpdateSSLModesVerifyFullMode) ToPointer() *DestinationPostgresUpdateSSLModesVerifyFullMode {
+func (e DestinationPostgresUpdateSchemasSSLModeSSLModes6Mode) ToPointer() *DestinationPostgresUpdateSchemasSSLModeSSLModes6Mode {
return &e
}
-func (e *DestinationPostgresUpdateSSLModesVerifyFullMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresUpdateSchemasSSLModeSSLModes6Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-full":
- *e = DestinationPostgresUpdateSSLModesVerifyFullMode(v)
+ *e = DestinationPostgresUpdateSchemasSSLModeSSLModes6Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresUpdateSSLModesVerifyFullMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresUpdateSchemasSSLModeSSLModes6Mode: %v", v)
}
}
-// DestinationPostgresUpdateSSLModesVerifyFull - Verify-full SSL mode.
-type DestinationPostgresUpdateSSLModesVerifyFull struct {
+// VerifyFull - Verify-full SSL mode.
+type VerifyFull struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -42,502 +42,715 @@ type DestinationPostgresUpdateSSLModesVerifyFull struct {
// Client key
ClientKey string `json:"client_key"`
// Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode DestinationPostgresUpdateSSLModesVerifyFullMode `json:"mode"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode *DestinationPostgresUpdateSchemasSSLModeSSLModes6Mode `const:"verify-full" json:"mode"`
}
-type DestinationPostgresUpdateSSLModesVerifyCaMode string
+func (v VerifyFull) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(v, "", false)
+}
+
+func (v *VerifyFull) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &v, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *VerifyFull) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
+}
+
+func (o *VerifyFull) GetClientCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientCertificate
+}
+
+func (o *VerifyFull) GetClientKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientKey
+}
+
+func (o *VerifyFull) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *VerifyFull) GetMode() *DestinationPostgresUpdateSchemasSSLModeSSLModes6Mode {
+ return DestinationPostgresUpdateSchemasSSLModeSSLModes6ModeVerifyFull.ToPointer()
+}
+
+type DestinationPostgresUpdateSchemasSSLModeSSLModesMode string
const (
- DestinationPostgresUpdateSSLModesVerifyCaModeVerifyCa DestinationPostgresUpdateSSLModesVerifyCaMode = "verify-ca"
+ DestinationPostgresUpdateSchemasSSLModeSSLModesModeVerifyCa DestinationPostgresUpdateSchemasSSLModeSSLModesMode = "verify-ca"
)
-func (e DestinationPostgresUpdateSSLModesVerifyCaMode) ToPointer() *DestinationPostgresUpdateSSLModesVerifyCaMode {
+func (e DestinationPostgresUpdateSchemasSSLModeSSLModesMode) ToPointer() *DestinationPostgresUpdateSchemasSSLModeSSLModesMode {
return &e
}
-func (e *DestinationPostgresUpdateSSLModesVerifyCaMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresUpdateSchemasSSLModeSSLModesMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-ca":
- *e = DestinationPostgresUpdateSSLModesVerifyCaMode(v)
+ *e = DestinationPostgresUpdateSchemasSSLModeSSLModesMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresUpdateSSLModesVerifyCaMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresUpdateSchemasSSLModeSSLModesMode: %v", v)
}
}
-// DestinationPostgresUpdateSSLModesVerifyCa - Verify-ca SSL mode.
-type DestinationPostgresUpdateSSLModesVerifyCa struct {
+// VerifyCa - Verify-ca SSL mode.
+type VerifyCa struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode DestinationPostgresUpdateSSLModesVerifyCaMode `json:"mode"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode *DestinationPostgresUpdateSchemasSSLModeSSLModesMode `const:"verify-ca" json:"mode"`
}
-type DestinationPostgresUpdateSSLModesRequireMode string
+func (v VerifyCa) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(v, "", false)
+}
+
+func (v *VerifyCa) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &v, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *VerifyCa) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
+}
+
+func (o *VerifyCa) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *VerifyCa) GetMode() *DestinationPostgresUpdateSchemasSSLModeSSLModesMode {
+ return DestinationPostgresUpdateSchemasSSLModeSSLModesModeVerifyCa.ToPointer()
+}
+
+type DestinationPostgresUpdateSchemasSslModeMode string
const (
- DestinationPostgresUpdateSSLModesRequireModeRequire DestinationPostgresUpdateSSLModesRequireMode = "require"
+ DestinationPostgresUpdateSchemasSslModeModeRequire DestinationPostgresUpdateSchemasSslModeMode = "require"
)
-func (e DestinationPostgresUpdateSSLModesRequireMode) ToPointer() *DestinationPostgresUpdateSSLModesRequireMode {
+func (e DestinationPostgresUpdateSchemasSslModeMode) ToPointer() *DestinationPostgresUpdateSchemasSslModeMode {
return &e
}
-func (e *DestinationPostgresUpdateSSLModesRequireMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresUpdateSchemasSslModeMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "require":
- *e = DestinationPostgresUpdateSSLModesRequireMode(v)
+ *e = DestinationPostgresUpdateSchemasSslModeMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresUpdateSSLModesRequireMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresUpdateSchemasSslModeMode: %v", v)
}
}
-// DestinationPostgresUpdateSSLModesRequire - Require SSL mode.
-type DestinationPostgresUpdateSSLModesRequire struct {
- Mode DestinationPostgresUpdateSSLModesRequireMode `json:"mode"`
+// Require SSL mode.
+type Require struct {
+ mode *DestinationPostgresUpdateSchemasSslModeMode `const:"require" json:"mode"`
}
-type DestinationPostgresUpdateSSLModesPreferMode string
+func (r Require) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(r, "", false)
+}
+
+func (r *Require) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &r, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Require) GetMode() *DestinationPostgresUpdateSchemasSslModeMode {
+ return DestinationPostgresUpdateSchemasSslModeModeRequire.ToPointer()
+}
+
+type DestinationPostgresUpdateSchemasMode string
const (
- DestinationPostgresUpdateSSLModesPreferModePrefer DestinationPostgresUpdateSSLModesPreferMode = "prefer"
+ DestinationPostgresUpdateSchemasModePrefer DestinationPostgresUpdateSchemasMode = "prefer"
)
-func (e DestinationPostgresUpdateSSLModesPreferMode) ToPointer() *DestinationPostgresUpdateSSLModesPreferMode {
+func (e DestinationPostgresUpdateSchemasMode) ToPointer() *DestinationPostgresUpdateSchemasMode {
return &e
}
-func (e *DestinationPostgresUpdateSSLModesPreferMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresUpdateSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "prefer":
- *e = DestinationPostgresUpdateSSLModesPreferMode(v)
+ *e = DestinationPostgresUpdateSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresUpdateSSLModesPreferMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresUpdateSchemasMode: %v", v)
+ }
+}
+
+// Prefer SSL mode.
+type Prefer struct {
+ mode *DestinationPostgresUpdateSchemasMode `const:"prefer" json:"mode"`
+}
+
+func (p Prefer) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *Prefer) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationPostgresUpdateSSLModesPrefer - Prefer SSL mode.
-type DestinationPostgresUpdateSSLModesPrefer struct {
- Mode DestinationPostgresUpdateSSLModesPreferMode `json:"mode"`
+func (o *Prefer) GetMode() *DestinationPostgresUpdateSchemasMode {
+ return DestinationPostgresUpdateSchemasModePrefer.ToPointer()
}
-type DestinationPostgresUpdateSSLModesAllowMode string
+type DestinationPostgresUpdateMode string
const (
- DestinationPostgresUpdateSSLModesAllowModeAllow DestinationPostgresUpdateSSLModesAllowMode = "allow"
+ DestinationPostgresUpdateModeAllow DestinationPostgresUpdateMode = "allow"
)
-func (e DestinationPostgresUpdateSSLModesAllowMode) ToPointer() *DestinationPostgresUpdateSSLModesAllowMode {
+func (e DestinationPostgresUpdateMode) ToPointer() *DestinationPostgresUpdateMode {
return &e
}
-func (e *DestinationPostgresUpdateSSLModesAllowMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresUpdateMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "allow":
- *e = DestinationPostgresUpdateSSLModesAllowMode(v)
+ *e = DestinationPostgresUpdateMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresUpdateSSLModesAllowMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresUpdateMode: %v", v)
+ }
+}
+
+// Allow SSL mode.
+type Allow struct {
+ mode *DestinationPostgresUpdateMode `const:"allow" json:"mode"`
+}
+
+func (a Allow) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *Allow) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationPostgresUpdateSSLModesAllow - Allow SSL mode.
-type DestinationPostgresUpdateSSLModesAllow struct {
- Mode DestinationPostgresUpdateSSLModesAllowMode `json:"mode"`
+func (o *Allow) GetMode() *DestinationPostgresUpdateMode {
+ return DestinationPostgresUpdateModeAllow.ToPointer()
}
-type DestinationPostgresUpdateSSLModesDisableMode string
+type Mode string
const (
- DestinationPostgresUpdateSSLModesDisableModeDisable DestinationPostgresUpdateSSLModesDisableMode = "disable"
+ ModeDisable Mode = "disable"
)
-func (e DestinationPostgresUpdateSSLModesDisableMode) ToPointer() *DestinationPostgresUpdateSSLModesDisableMode {
+func (e Mode) ToPointer() *Mode {
return &e
}
-func (e *DestinationPostgresUpdateSSLModesDisableMode) UnmarshalJSON(data []byte) error {
+func (e *Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "disable":
- *e = DestinationPostgresUpdateSSLModesDisableMode(v)
+ *e = Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresUpdateSSLModesDisableMode: %v", v)
+ return fmt.Errorf("invalid value for Mode: %v", v)
+ }
+}
+
+// Disable SSL.
+type Disable struct {
+ mode *Mode `const:"disable" json:"mode"`
+}
+
+func (d Disable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *Disable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationPostgresUpdateSSLModesDisable - Disable SSL.
-type DestinationPostgresUpdateSSLModesDisable struct {
- Mode DestinationPostgresUpdateSSLModesDisableMode `json:"mode"`
+func (o *Disable) GetMode() *Mode {
+ return ModeDisable.ToPointer()
}
-type DestinationPostgresUpdateSSLModesType string
+type SSLModesType string
const (
- DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesDisable DestinationPostgresUpdateSSLModesType = "destination-postgres-update_SSL modes_disable"
- DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesAllow DestinationPostgresUpdateSSLModesType = "destination-postgres-update_SSL modes_allow"
- DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesPrefer DestinationPostgresUpdateSSLModesType = "destination-postgres-update_SSL modes_prefer"
- DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesRequire DestinationPostgresUpdateSSLModesType = "destination-postgres-update_SSL modes_require"
- DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesVerifyCa DestinationPostgresUpdateSSLModesType = "destination-postgres-update_SSL modes_verify-ca"
- DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesVerifyFull DestinationPostgresUpdateSSLModesType = "destination-postgres-update_SSL modes_verify-full"
+ SSLModesTypeDisable SSLModesType = "disable"
+ SSLModesTypeAllow SSLModesType = "allow"
+ SSLModesTypePrefer SSLModesType = "prefer"
+ SSLModesTypeRequire SSLModesType = "require"
+ SSLModesTypeVerifyCa SSLModesType = "verify-ca"
+ SSLModesTypeVerifyFull SSLModesType = "verify-full"
)
-type DestinationPostgresUpdateSSLModes struct {
- DestinationPostgresUpdateSSLModesDisable *DestinationPostgresUpdateSSLModesDisable
- DestinationPostgresUpdateSSLModesAllow *DestinationPostgresUpdateSSLModesAllow
- DestinationPostgresUpdateSSLModesPrefer *DestinationPostgresUpdateSSLModesPrefer
- DestinationPostgresUpdateSSLModesRequire *DestinationPostgresUpdateSSLModesRequire
- DestinationPostgresUpdateSSLModesVerifyCa *DestinationPostgresUpdateSSLModesVerifyCa
- DestinationPostgresUpdateSSLModesVerifyFull *DestinationPostgresUpdateSSLModesVerifyFull
+type SSLModes struct {
+ Disable *Disable
+ Allow *Allow
+ Prefer *Prefer
+ Require *Require
+ VerifyCa *VerifyCa
+ VerifyFull *VerifyFull
- Type DestinationPostgresUpdateSSLModesType
+ Type SSLModesType
}
-func CreateDestinationPostgresUpdateSSLModesDestinationPostgresUpdateSSLModesDisable(destinationPostgresUpdateSSLModesDisable DestinationPostgresUpdateSSLModesDisable) DestinationPostgresUpdateSSLModes {
- typ := DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesDisable
+func CreateSSLModesDisable(disable Disable) SSLModes {
+ typ := SSLModesTypeDisable
- return DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesDisable: &destinationPostgresUpdateSSLModesDisable,
- Type: typ,
+ return SSLModes{
+ Disable: &disable,
+ Type: typ,
}
}
-func CreateDestinationPostgresUpdateSSLModesDestinationPostgresUpdateSSLModesAllow(destinationPostgresUpdateSSLModesAllow DestinationPostgresUpdateSSLModesAllow) DestinationPostgresUpdateSSLModes {
- typ := DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesAllow
+func CreateSSLModesAllow(allow Allow) SSLModes {
+ typ := SSLModesTypeAllow
- return DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesAllow: &destinationPostgresUpdateSSLModesAllow,
- Type: typ,
+ return SSLModes{
+ Allow: &allow,
+ Type: typ,
}
}
-func CreateDestinationPostgresUpdateSSLModesDestinationPostgresUpdateSSLModesPrefer(destinationPostgresUpdateSSLModesPrefer DestinationPostgresUpdateSSLModesPrefer) DestinationPostgresUpdateSSLModes {
- typ := DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesPrefer
+func CreateSSLModesPrefer(prefer Prefer) SSLModes {
+ typ := SSLModesTypePrefer
- return DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesPrefer: &destinationPostgresUpdateSSLModesPrefer,
- Type: typ,
+ return SSLModes{
+ Prefer: &prefer,
+ Type: typ,
}
}
-func CreateDestinationPostgresUpdateSSLModesDestinationPostgresUpdateSSLModesRequire(destinationPostgresUpdateSSLModesRequire DestinationPostgresUpdateSSLModesRequire) DestinationPostgresUpdateSSLModes {
- typ := DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesRequire
+func CreateSSLModesRequire(require Require) SSLModes {
+ typ := SSLModesTypeRequire
- return DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesRequire: &destinationPostgresUpdateSSLModesRequire,
- Type: typ,
+ return SSLModes{
+ Require: &require,
+ Type: typ,
}
}
-func CreateDestinationPostgresUpdateSSLModesDestinationPostgresUpdateSSLModesVerifyCa(destinationPostgresUpdateSSLModesVerifyCa DestinationPostgresUpdateSSLModesVerifyCa) DestinationPostgresUpdateSSLModes {
- typ := DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesVerifyCa
+func CreateSSLModesVerifyCa(verifyCa VerifyCa) SSLModes {
+ typ := SSLModesTypeVerifyCa
- return DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesVerifyCa: &destinationPostgresUpdateSSLModesVerifyCa,
- Type: typ,
+ return SSLModes{
+ VerifyCa: &verifyCa,
+ Type: typ,
}
}
-func CreateDestinationPostgresUpdateSSLModesDestinationPostgresUpdateSSLModesVerifyFull(destinationPostgresUpdateSSLModesVerifyFull DestinationPostgresUpdateSSLModesVerifyFull) DestinationPostgresUpdateSSLModes {
- typ := DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesVerifyFull
+func CreateSSLModesVerifyFull(verifyFull VerifyFull) SSLModes {
+ typ := SSLModesTypeVerifyFull
- return DestinationPostgresUpdateSSLModes{
- DestinationPostgresUpdateSSLModesVerifyFull: &destinationPostgresUpdateSSLModesVerifyFull,
- Type: typ,
+ return SSLModes{
+ VerifyFull: &verifyFull,
+ Type: typ,
}
}
-func (u *DestinationPostgresUpdateSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SSLModes) UnmarshalJSON(data []byte) error {
- destinationPostgresUpdateSSLModesDisable := new(DestinationPostgresUpdateSSLModesDisable)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresUpdateSSLModesDisable); err == nil {
- u.DestinationPostgresUpdateSSLModesDisable = destinationPostgresUpdateSSLModesDisable
- u.Type = DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesDisable
+ disable := new(Disable)
+ if err := utils.UnmarshalJSON(data, &disable, "", true, true); err == nil {
+ u.Disable = disable
+ u.Type = SSLModesTypeDisable
return nil
}
- destinationPostgresUpdateSSLModesAllow := new(DestinationPostgresUpdateSSLModesAllow)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresUpdateSSLModesAllow); err == nil {
- u.DestinationPostgresUpdateSSLModesAllow = destinationPostgresUpdateSSLModesAllow
- u.Type = DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesAllow
+ allow := new(Allow)
+ if err := utils.UnmarshalJSON(data, &allow, "", true, true); err == nil {
+ u.Allow = allow
+ u.Type = SSLModesTypeAllow
return nil
}
- destinationPostgresUpdateSSLModesPrefer := new(DestinationPostgresUpdateSSLModesPrefer)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresUpdateSSLModesPrefer); err == nil {
- u.DestinationPostgresUpdateSSLModesPrefer = destinationPostgresUpdateSSLModesPrefer
- u.Type = DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesPrefer
+ prefer := new(Prefer)
+ if err := utils.UnmarshalJSON(data, &prefer, "", true, true); err == nil {
+ u.Prefer = prefer
+ u.Type = SSLModesTypePrefer
return nil
}
- destinationPostgresUpdateSSLModesRequire := new(DestinationPostgresUpdateSSLModesRequire)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresUpdateSSLModesRequire); err == nil {
- u.DestinationPostgresUpdateSSLModesRequire = destinationPostgresUpdateSSLModesRequire
- u.Type = DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesRequire
+ require := new(Require)
+ if err := utils.UnmarshalJSON(data, &require, "", true, true); err == nil {
+ u.Require = require
+ u.Type = SSLModesTypeRequire
return nil
}
- destinationPostgresUpdateSSLModesVerifyCa := new(DestinationPostgresUpdateSSLModesVerifyCa)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresUpdateSSLModesVerifyCa); err == nil {
- u.DestinationPostgresUpdateSSLModesVerifyCa = destinationPostgresUpdateSSLModesVerifyCa
- u.Type = DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesVerifyCa
+ verifyCa := new(VerifyCa)
+ if err := utils.UnmarshalJSON(data, &verifyCa, "", true, true); err == nil {
+ u.VerifyCa = verifyCa
+ u.Type = SSLModesTypeVerifyCa
return nil
}
- destinationPostgresUpdateSSLModesVerifyFull := new(DestinationPostgresUpdateSSLModesVerifyFull)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresUpdateSSLModesVerifyFull); err == nil {
- u.DestinationPostgresUpdateSSLModesVerifyFull = destinationPostgresUpdateSSLModesVerifyFull
- u.Type = DestinationPostgresUpdateSSLModesTypeDestinationPostgresUpdateSSLModesVerifyFull
+ verifyFull := new(VerifyFull)
+ if err := utils.UnmarshalJSON(data, &verifyFull, "", true, true); err == nil {
+ u.VerifyFull = verifyFull
+ u.Type = SSLModesTypeVerifyFull
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationPostgresUpdateSSLModes) MarshalJSON() ([]byte, error) {
- if u.DestinationPostgresUpdateSSLModesDisable != nil {
- return json.Marshal(u.DestinationPostgresUpdateSSLModesDisable)
+func (u SSLModes) MarshalJSON() ([]byte, error) {
+ if u.Disable != nil {
+ return utils.MarshalJSON(u.Disable, "", true)
}
- if u.DestinationPostgresUpdateSSLModesAllow != nil {
- return json.Marshal(u.DestinationPostgresUpdateSSLModesAllow)
+ if u.Allow != nil {
+ return utils.MarshalJSON(u.Allow, "", true)
}
- if u.DestinationPostgresUpdateSSLModesPrefer != nil {
- return json.Marshal(u.DestinationPostgresUpdateSSLModesPrefer)
+ if u.Prefer != nil {
+ return utils.MarshalJSON(u.Prefer, "", true)
}
- if u.DestinationPostgresUpdateSSLModesRequire != nil {
- return json.Marshal(u.DestinationPostgresUpdateSSLModesRequire)
+ if u.Require != nil {
+ return utils.MarshalJSON(u.Require, "", true)
}
- if u.DestinationPostgresUpdateSSLModesVerifyCa != nil {
- return json.Marshal(u.DestinationPostgresUpdateSSLModesVerifyCa)
+ if u.VerifyCa != nil {
+ return utils.MarshalJSON(u.VerifyCa, "", true)
}
- if u.DestinationPostgresUpdateSSLModesVerifyFull != nil {
- return json.Marshal(u.DestinationPostgresUpdateSSLModesVerifyFull)
+ if u.VerifyFull != nil {
+ return utils.MarshalJSON(u.VerifyFull, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod string
const (
- DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationPostgresUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication struct {
+// DestinationPostgresUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationPostgresUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationPostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationPostgresUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationPostgresUpdatePasswordAuthentication) GetTunnelMethod() DestinationPostgresUpdateSchemasTunnelMethodTunnelMethod {
+ return DestinationPostgresUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationPostgresUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationPostgresUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationPostgresUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationPostgresUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationPostgresUpdateSchemasTunnelMethod string
const (
- DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationPostgresUpdateSchemasTunnelMethodSSHKeyAuth DestinationPostgresUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationPostgresUpdateSchemasTunnelMethod) ToPointer() *DestinationPostgresUpdateSchemasTunnelMethod {
return &e
}
-func (e *DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationPostgresUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresUpdateSchemasTunnelMethod: %v", v)
}
}
-// DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationPostgresUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationPostgresUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationPostgresUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationPostgresUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationPostgresUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationPostgresUpdateSSHKeyAuthentication) GetTunnelMethod() DestinationPostgresUpdateSchemasTunnelMethod {
+ return DestinationPostgresUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationPostgresUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationPostgresUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationPostgresUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationPostgresUpdateTunnelMethod string
const (
- DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationPostgresUpdateTunnelMethodNoTunnel DestinationPostgresUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationPostgresUpdateTunnelMethod) ToPointer() *DestinationPostgresUpdateTunnelMethod {
return &e
}
-func (e *DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationPostgresUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationPostgresUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationPostgresUpdateTunnelMethod: %v", v)
}
}
-// DestinationPostgresUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationPostgresUpdateSSHTunnelMethodNoTunnel struct {
+// DestinationPostgresUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationPostgresUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationPostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationPostgresUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationPostgresUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresUpdateNoTunnel) GetTunnelMethod() DestinationPostgresUpdateTunnelMethod {
+ return DestinationPostgresUpdateTunnelMethodNoTunnel
}
type DestinationPostgresUpdateSSHTunnelMethodType string
const (
- DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHTunnelMethodNoTunnel DestinationPostgresUpdateSSHTunnelMethodType = "destination-postgres-update_SSH Tunnel Method_No Tunnel"
- DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication DestinationPostgresUpdateSSHTunnelMethodType = "destination-postgres-update_SSH Tunnel Method_SSH Key Authentication"
- DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication DestinationPostgresUpdateSSHTunnelMethodType = "destination-postgres-update_SSH Tunnel Method_Password Authentication"
+ DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateNoTunnel DestinationPostgresUpdateSSHTunnelMethodType = "destination-postgres-update_No Tunnel"
+ DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHKeyAuthentication DestinationPostgresUpdateSSHTunnelMethodType = "destination-postgres-update_SSH Key Authentication"
+ DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdatePasswordAuthentication DestinationPostgresUpdateSSHTunnelMethodType = "destination-postgres-update_Password Authentication"
)
type DestinationPostgresUpdateSSHTunnelMethod struct {
- DestinationPostgresUpdateSSHTunnelMethodNoTunnel *DestinationPostgresUpdateSSHTunnelMethodNoTunnel
- DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication
- DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication *DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication
+ DestinationPostgresUpdateNoTunnel *DestinationPostgresUpdateNoTunnel
+ DestinationPostgresUpdateSSHKeyAuthentication *DestinationPostgresUpdateSSHKeyAuthentication
+ DestinationPostgresUpdatePasswordAuthentication *DestinationPostgresUpdatePasswordAuthentication
Type DestinationPostgresUpdateSSHTunnelMethodType
}
-func CreateDestinationPostgresUpdateSSHTunnelMethodDestinationPostgresUpdateSSHTunnelMethodNoTunnel(destinationPostgresUpdateSSHTunnelMethodNoTunnel DestinationPostgresUpdateSSHTunnelMethodNoTunnel) DestinationPostgresUpdateSSHTunnelMethod {
- typ := DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHTunnelMethodNoTunnel
+func CreateDestinationPostgresUpdateSSHTunnelMethodDestinationPostgresUpdateNoTunnel(destinationPostgresUpdateNoTunnel DestinationPostgresUpdateNoTunnel) DestinationPostgresUpdateSSHTunnelMethod {
+ typ := DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateNoTunnel
return DestinationPostgresUpdateSSHTunnelMethod{
- DestinationPostgresUpdateSSHTunnelMethodNoTunnel: &destinationPostgresUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationPostgresUpdateNoTunnel: &destinationPostgresUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationPostgresUpdateSSHTunnelMethodDestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication(destinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication) DestinationPostgresUpdateSSHTunnelMethod {
- typ := DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationPostgresUpdateSSHTunnelMethodDestinationPostgresUpdateSSHKeyAuthentication(destinationPostgresUpdateSSHKeyAuthentication DestinationPostgresUpdateSSHKeyAuthentication) DestinationPostgresUpdateSSHTunnelMethod {
+ typ := DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHKeyAuthentication
return DestinationPostgresUpdateSSHTunnelMethod{
- DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication: &destinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationPostgresUpdateSSHKeyAuthentication: &destinationPostgresUpdateSSHKeyAuthentication,
Type: typ,
}
}
-func CreateDestinationPostgresUpdateSSHTunnelMethodDestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication(destinationPostgresUpdateSSHTunnelMethodPasswordAuthentication DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication) DestinationPostgresUpdateSSHTunnelMethod {
- typ := DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication
+func CreateDestinationPostgresUpdateSSHTunnelMethodDestinationPostgresUpdatePasswordAuthentication(destinationPostgresUpdatePasswordAuthentication DestinationPostgresUpdatePasswordAuthentication) DestinationPostgresUpdateSSHTunnelMethod {
+ typ := DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdatePasswordAuthentication
return DestinationPostgresUpdateSSHTunnelMethod{
- DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication: &destinationPostgresUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationPostgresUpdatePasswordAuthentication: &destinationPostgresUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *DestinationPostgresUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationPostgresUpdateSSHTunnelMethodNoTunnel := new(DestinationPostgresUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationPostgresUpdateSSHTunnelMethodNoTunnel = destinationPostgresUpdateSSHTunnelMethodNoTunnel
- u.Type = DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHTunnelMethodNoTunnel
+
+ destinationPostgresUpdateNoTunnel := new(DestinationPostgresUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresUpdateNoTunnel, "", true, true); err == nil {
+ u.DestinationPostgresUpdateNoTunnel = destinationPostgresUpdateNoTunnel
+ u.Type = DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateNoTunnel
return nil
}
- destinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication := new(DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication = destinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication
+ destinationPostgresUpdateSSHKeyAuthentication := new(DestinationPostgresUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationPostgresUpdateSSHKeyAuthentication = destinationPostgresUpdateSSHKeyAuthentication
+ u.Type = DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHKeyAuthentication
return nil
}
- destinationPostgresUpdateSSHTunnelMethodPasswordAuthentication := new(DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationPostgresUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication = destinationPostgresUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication
+ destinationPostgresUpdatePasswordAuthentication := new(DestinationPostgresUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationPostgresUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationPostgresUpdatePasswordAuthentication = destinationPostgresUpdatePasswordAuthentication
+ u.Type = DestinationPostgresUpdateSSHTunnelMethodTypeDestinationPostgresUpdatePasswordAuthentication
return nil
}
@@ -545,19 +758,19 @@ func (u *DestinationPostgresUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) er
}
func (u DestinationPostgresUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationPostgresUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationPostgresUpdateSSHTunnelMethodNoTunnel)
+ if u.DestinationPostgresUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationPostgresUpdateNoTunnel, "", true)
}
- if u.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationPostgresUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationPostgresUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationPostgresUpdateSSHKeyAuthentication, "", true)
}
- if u.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationPostgresUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationPostgresUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationPostgresUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationPostgresUpdate struct {
@@ -570,9 +783,9 @@ type DestinationPostgresUpdate struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5432" json:"port"`
// The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public".
- Schema string `json:"schema"`
+ Schema *string `default:"public" json:"schema"`
// SSL connection modes.
// disable - Chose this mode to disable encryption of communication between Airbyte and destination database
// allow - Chose this mode to enable encryption only when required by the source database
@@ -581,9 +794,83 @@ type DestinationPostgresUpdate struct {
// verify-ca - Chose this mode to always require encryption and to verify that the source database server has a valid SSL certificate
// verify-full - This is the most secure mode. Chose this mode to always require encryption and to verify the identity of the source database server
// See more information - in the docs.
- SslMode *DestinationPostgresUpdateSSLModes `json:"ssl_mode,omitempty"`
+ SslMode *SSLModes `json:"ssl_mode,omitempty"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *DestinationPostgresUpdateSSHTunnelMethod `json:"tunnel_method,omitempty"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationPostgresUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPostgresUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPostgresUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationPostgresUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationPostgresUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationPostgresUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationPostgresUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationPostgresUpdate) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *DestinationPostgresUpdate) GetSslMode() *SSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *DestinationPostgresUpdate) GetTunnelMethod() *DestinationPostgresUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationPostgresUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpubsub.go b/internal/sdk/pkg/models/shared/destinationpubsub.go
old mode 100755
new mode 100644
index 5f5cf236a..8c0df30bd
--- a/internal/sdk/pkg/models/shared/destinationpubsub.go
+++ b/internal/sdk/pkg/models/shared/destinationpubsub.go
@@ -5,48 +5,120 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationPubsubPubsub string
+type Pubsub string
const (
- DestinationPubsubPubsubPubsub DestinationPubsubPubsub = "pubsub"
+ PubsubPubsub Pubsub = "pubsub"
)
-func (e DestinationPubsubPubsub) ToPointer() *DestinationPubsubPubsub {
+func (e Pubsub) ToPointer() *Pubsub {
return &e
}
-func (e *DestinationPubsubPubsub) UnmarshalJSON(data []byte) error {
+func (e *Pubsub) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pubsub":
- *e = DestinationPubsubPubsub(v)
+ *e = Pubsub(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationPubsubPubsub: %v", v)
+ return fmt.Errorf("invalid value for Pubsub: %v", v)
}
}
type DestinationPubsub struct {
// Number of ms before the buffer is flushed
- BatchingDelayThreshold *int64 `json:"batching_delay_threshold,omitempty"`
+ BatchingDelayThreshold *int64 `default:"1" json:"batching_delay_threshold"`
// Number of messages before the buffer is flushed
- BatchingElementCountThreshold *int64 `json:"batching_element_count_threshold,omitempty"`
+ BatchingElementCountThreshold *int64 `default:"1" json:"batching_element_count_threshold"`
// If TRUE messages will be buffered instead of sending them one by one
- BatchingEnabled bool `json:"batching_enabled"`
+ BatchingEnabled *bool `default:"false" json:"batching_enabled"`
// Number of bytes before the buffer is flushed
- BatchingRequestBytesThreshold *int64 `json:"batching_request_bytes_threshold,omitempty"`
+ BatchingRequestBytesThreshold *int64 `default:"1" json:"batching_request_bytes_threshold"`
// The contents of the JSON service account key. Check out the docs if you need help generating this key.
- CredentialsJSON string `json:"credentials_json"`
- DestinationType DestinationPubsubPubsub `json:"destinationType"`
+ CredentialsJSON string `json:"credentials_json"`
+ destinationType Pubsub `const:"pubsub" json:"destinationType"`
// If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key of stream
- OrderingEnabled bool `json:"ordering_enabled"`
+ OrderingEnabled *bool `default:"false" json:"ordering_enabled"`
// The GCP project ID for the project containing the target PubSub.
ProjectID string `json:"project_id"`
// The PubSub topic ID in the given GCP project ID.
TopicID string `json:"topic_id"`
}
+
+func (d DestinationPubsub) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPubsub) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPubsub) GetBatchingDelayThreshold() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchingDelayThreshold
+}
+
+func (o *DestinationPubsub) GetBatchingElementCountThreshold() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchingElementCountThreshold
+}
+
+func (o *DestinationPubsub) GetBatchingEnabled() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.BatchingEnabled
+}
+
+func (o *DestinationPubsub) GetBatchingRequestBytesThreshold() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchingRequestBytesThreshold
+}
+
+func (o *DestinationPubsub) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+func (o *DestinationPubsub) GetDestinationType() Pubsub {
+ return PubsubPubsub
+}
+
+func (o *DestinationPubsub) GetOrderingEnabled() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.OrderingEnabled
+}
+
+func (o *DestinationPubsub) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
+
+func (o *DestinationPubsub) GetTopicID() string {
+ if o == nil {
+ return ""
+ }
+ return o.TopicID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpubsubcreaterequest.go b/internal/sdk/pkg/models/shared/destinationpubsubcreaterequest.go
old mode 100755
new mode 100644
index 4526e9041..b74be2b1b
--- a/internal/sdk/pkg/models/shared/destinationpubsubcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationpubsubcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationPubsubCreateRequest struct {
Configuration DestinationPubsub `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationPubsubCreateRequest) GetConfiguration() DestinationPubsub {
+ if o == nil {
+ return DestinationPubsub{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationPubsubCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationPubsubCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationPubsubCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationpubsubputrequest.go b/internal/sdk/pkg/models/shared/destinationpubsubputrequest.go
old mode 100755
new mode 100644
index 305980db4..7091ea66d
--- a/internal/sdk/pkg/models/shared/destinationpubsubputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationpubsubputrequest.go
@@ -7,3 +7,24 @@ type DestinationPubsubPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationPubsubPutRequest) GetConfiguration() DestinationPubsubUpdate {
+ if o == nil {
+ return DestinationPubsubUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationPubsubPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationPubsubPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationpubsubupdate.go b/internal/sdk/pkg/models/shared/destinationpubsubupdate.go
old mode 100755
new mode 100644
index 8f3495db4..26395c11a
--- a/internal/sdk/pkg/models/shared/destinationpubsubupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationpubsubupdate.go
@@ -2,21 +2,92 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type DestinationPubsubUpdate struct {
// Number of ms before the buffer is flushed
- BatchingDelayThreshold *int64 `json:"batching_delay_threshold,omitempty"`
+ BatchingDelayThreshold *int64 `default:"1" json:"batching_delay_threshold"`
// Number of messages before the buffer is flushed
- BatchingElementCountThreshold *int64 `json:"batching_element_count_threshold,omitempty"`
+ BatchingElementCountThreshold *int64 `default:"1" json:"batching_element_count_threshold"`
// If TRUE messages will be buffered instead of sending them one by one
- BatchingEnabled bool `json:"batching_enabled"`
+ BatchingEnabled *bool `default:"false" json:"batching_enabled"`
// Number of bytes before the buffer is flushed
- BatchingRequestBytesThreshold *int64 `json:"batching_request_bytes_threshold,omitempty"`
+ BatchingRequestBytesThreshold *int64 `default:"1" json:"batching_request_bytes_threshold"`
// The contents of the JSON service account key. Check out the docs if you need help generating this key.
CredentialsJSON string `json:"credentials_json"`
// If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key of stream
- OrderingEnabled bool `json:"ordering_enabled"`
+ OrderingEnabled *bool `default:"false" json:"ordering_enabled"`
// The GCP project ID for the project containing the target PubSub.
ProjectID string `json:"project_id"`
// The PubSub topic ID in the given GCP project ID.
TopicID string `json:"topic_id"`
}
+
+func (d DestinationPubsubUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationPubsubUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationPubsubUpdate) GetBatchingDelayThreshold() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchingDelayThreshold
+}
+
+func (o *DestinationPubsubUpdate) GetBatchingElementCountThreshold() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchingElementCountThreshold
+}
+
+func (o *DestinationPubsubUpdate) GetBatchingEnabled() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.BatchingEnabled
+}
+
+func (o *DestinationPubsubUpdate) GetBatchingRequestBytesThreshold() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchingRequestBytesThreshold
+}
+
+func (o *DestinationPubsubUpdate) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+func (o *DestinationPubsubUpdate) GetOrderingEnabled() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.OrderingEnabled
+}
+
+func (o *DestinationPubsubUpdate) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
+
+func (o *DestinationPubsubUpdate) GetTopicID() string {
+ if o == nil {
+ return ""
+ }
+ return o.TopicID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationputrequest.go b/internal/sdk/pkg/models/shared/destinationputrequest.go
old mode 100755
new mode 100644
index a4eb95550..a2e792902
--- a/internal/sdk/pkg/models/shared/destinationputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationputrequest.go
@@ -7,3 +7,17 @@ type DestinationPutRequest struct {
Configuration interface{} `json:"configuration"`
Name string `json:"name"`
}
+
+func (o *DestinationPutRequest) GetConfiguration() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.Configuration
+}
+
+func (o *DestinationPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
diff --git a/internal/sdk/pkg/models/shared/destinationqdrant.go b/internal/sdk/pkg/models/shared/destinationqdrant.go
new file mode 100644
index 000000000..ac45529e7
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationqdrant.go
@@ -0,0 +1,1448 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type Qdrant string
+
+const (
+ QdrantQdrant Qdrant = "qdrant"
+)
+
+func (e Qdrant) ToPointer() *Qdrant {
+ return &e
+}
+
+func (e *Qdrant) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "qdrant":
+ *e = Qdrant(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for Qdrant: %v", v)
+ }
+}
+
+type DestinationQdrantSchemasEmbeddingEmbedding6Mode string
+
+const (
+ DestinationQdrantSchemasEmbeddingEmbedding6ModeOpenaiCompatible DestinationQdrantSchemasEmbeddingEmbedding6Mode = "openai_compatible"
+)
+
+func (e DestinationQdrantSchemasEmbeddingEmbedding6Mode) ToPointer() *DestinationQdrantSchemasEmbeddingEmbedding6Mode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasEmbeddingEmbedding6Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai_compatible":
+ *e = DestinationQdrantSchemasEmbeddingEmbedding6Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasEmbeddingEmbedding6Mode: %v", v)
+ }
+}
+
+// DestinationQdrantOpenAICompatible - Use a service that's compatible with the OpenAI API to embed text.
+type DestinationQdrantOpenAICompatible struct {
+ APIKey *string `default:"" json:"api_key"`
+ // The base URL for your OpenAI-compatible service
+ BaseURL string `json:"base_url"`
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ mode *DestinationQdrantSchemasEmbeddingEmbedding6Mode `const:"openai_compatible" json:"mode"`
+ // The name of the model to use for embedding
+ ModelName *string `default:"text-embedding-ada-002" json:"model_name"`
+}
+
+func (d DestinationQdrantOpenAICompatible) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantOpenAICompatible) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantOpenAICompatible) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *DestinationQdrantOpenAICompatible) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *DestinationQdrantOpenAICompatible) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationQdrantOpenAICompatible) GetMode() *DestinationQdrantSchemasEmbeddingEmbedding6Mode {
+ return DestinationQdrantSchemasEmbeddingEmbedding6ModeOpenaiCompatible.ToPointer()
+}
+
+func (o *DestinationQdrantOpenAICompatible) GetModelName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ModelName
+}
+
+type DestinationQdrantSchemasEmbeddingEmbedding5Mode string
+
+const (
+ DestinationQdrantSchemasEmbeddingEmbedding5ModeAzureOpenai DestinationQdrantSchemasEmbeddingEmbedding5Mode = "azure_openai"
+)
+
+func (e DestinationQdrantSchemasEmbeddingEmbedding5Mode) ToPointer() *DestinationQdrantSchemasEmbeddingEmbedding5Mode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasEmbeddingEmbedding5Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "azure_openai":
+ *e = DestinationQdrantSchemasEmbeddingEmbedding5Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasEmbeddingEmbedding5Mode: %v", v)
+ }
+}
+
+// DestinationQdrantAzureOpenAI - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationQdrantAzureOpenAI struct {
+ // The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ APIBase string `json:"api_base"`
+ // The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ Deployment string `json:"deployment"`
+ mode *DestinationQdrantSchemasEmbeddingEmbedding5Mode `const:"azure_openai" json:"mode"`
+ // The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationQdrantAzureOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantAzureOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantAzureOpenAI) GetAPIBase() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIBase
+}
+
+func (o *DestinationQdrantAzureOpenAI) GetDeployment() string {
+ if o == nil {
+ return ""
+ }
+ return o.Deployment
+}
+
+func (o *DestinationQdrantAzureOpenAI) GetMode() *DestinationQdrantSchemasEmbeddingEmbedding5Mode {
+ return DestinationQdrantSchemasEmbeddingEmbedding5ModeAzureOpenai.ToPointer()
+}
+
+func (o *DestinationQdrantAzureOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationQdrantSchemasEmbeddingEmbeddingMode string
+
+const (
+ DestinationQdrantSchemasEmbeddingEmbeddingModeFromField DestinationQdrantSchemasEmbeddingEmbeddingMode = "from_field"
+)
+
+func (e DestinationQdrantSchemasEmbeddingEmbeddingMode) ToPointer() *DestinationQdrantSchemasEmbeddingEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasEmbeddingEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "from_field":
+ *e = DestinationQdrantSchemasEmbeddingEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasEmbeddingEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationQdrantFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.
+type DestinationQdrantFromField struct {
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ // Name of the field in the record that contains the embedding
+ FieldName string `json:"field_name"`
+ mode *DestinationQdrantSchemasEmbeddingEmbeddingMode `const:"from_field" json:"mode"`
+}
+
+func (d DestinationQdrantFromField) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantFromField) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantFromField) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationQdrantFromField) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *DestinationQdrantFromField) GetMode() *DestinationQdrantSchemasEmbeddingEmbeddingMode {
+ return DestinationQdrantSchemasEmbeddingEmbeddingModeFromField.ToPointer()
+}
+
+type DestinationQdrantSchemasEmbeddingMode string
+
+const (
+ DestinationQdrantSchemasEmbeddingModeFake DestinationQdrantSchemasEmbeddingMode = "fake"
+)
+
+func (e DestinationQdrantSchemasEmbeddingMode) ToPointer() *DestinationQdrantSchemasEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "fake":
+ *e = DestinationQdrantSchemasEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationQdrantFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type DestinationQdrantFake struct {
+ mode *DestinationQdrantSchemasEmbeddingMode `const:"fake" json:"mode"`
+}
+
+func (d DestinationQdrantFake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantFake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantFake) GetMode() *DestinationQdrantSchemasEmbeddingMode {
+ return DestinationQdrantSchemasEmbeddingModeFake.ToPointer()
+}
+
+type DestinationQdrantSchemasMode string
+
+const (
+ DestinationQdrantSchemasModeCohere DestinationQdrantSchemasMode = "cohere"
+)
+
+func (e DestinationQdrantSchemasMode) ToPointer() *DestinationQdrantSchemasMode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cohere":
+ *e = DestinationQdrantSchemasMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasMode: %v", v)
+ }
+}
+
+// DestinationQdrantCohere - Use the Cohere API to embed text.
+type DestinationQdrantCohere struct {
+ CohereKey string `json:"cohere_key"`
+ mode *DestinationQdrantSchemasMode `const:"cohere" json:"mode"`
+}
+
+func (d DestinationQdrantCohere) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantCohere) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantCohere) GetCohereKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.CohereKey
+}
+
+func (o *DestinationQdrantCohere) GetMode() *DestinationQdrantSchemasMode {
+ return DestinationQdrantSchemasModeCohere.ToPointer()
+}
+
+type DestinationQdrantMode string
+
+const (
+ DestinationQdrantModeOpenai DestinationQdrantMode = "openai"
+)
+
+func (e DestinationQdrantMode) ToPointer() *DestinationQdrantMode {
+ return &e
+}
+
+func (e *DestinationQdrantMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai":
+ *e = DestinationQdrantMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantMode: %v", v)
+ }
+}
+
+// DestinationQdrantOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationQdrantOpenAI struct {
+ mode *DestinationQdrantMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationQdrantOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantOpenAI) GetMode() *DestinationQdrantMode {
+ return DestinationQdrantModeOpenai.ToPointer()
+}
+
+func (o *DestinationQdrantOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationQdrantEmbeddingType string
+
+const (
+ DestinationQdrantEmbeddingTypeDestinationQdrantOpenAI DestinationQdrantEmbeddingType = "destination-qdrant_OpenAI"
+ DestinationQdrantEmbeddingTypeDestinationQdrantCohere DestinationQdrantEmbeddingType = "destination-qdrant_Cohere"
+ DestinationQdrantEmbeddingTypeDestinationQdrantFake DestinationQdrantEmbeddingType = "destination-qdrant_Fake"
+ DestinationQdrantEmbeddingTypeDestinationQdrantFromField DestinationQdrantEmbeddingType = "destination-qdrant_From Field"
+ DestinationQdrantEmbeddingTypeDestinationQdrantAzureOpenAI DestinationQdrantEmbeddingType = "destination-qdrant_Azure OpenAI"
+ DestinationQdrantEmbeddingTypeDestinationQdrantOpenAICompatible DestinationQdrantEmbeddingType = "destination-qdrant_OpenAI-compatible"
+)
+
+type DestinationQdrantEmbedding struct {
+ DestinationQdrantOpenAI *DestinationQdrantOpenAI
+ DestinationQdrantCohere *DestinationQdrantCohere
+ DestinationQdrantFake *DestinationQdrantFake
+ DestinationQdrantFromField *DestinationQdrantFromField
+ DestinationQdrantAzureOpenAI *DestinationQdrantAzureOpenAI
+ DestinationQdrantOpenAICompatible *DestinationQdrantOpenAICompatible
+
+ Type DestinationQdrantEmbeddingType
+}
+
+func CreateDestinationQdrantEmbeddingDestinationQdrantOpenAI(destinationQdrantOpenAI DestinationQdrantOpenAI) DestinationQdrantEmbedding {
+ typ := DestinationQdrantEmbeddingTypeDestinationQdrantOpenAI
+
+ return DestinationQdrantEmbedding{
+ DestinationQdrantOpenAI: &destinationQdrantOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantEmbeddingDestinationQdrantCohere(destinationQdrantCohere DestinationQdrantCohere) DestinationQdrantEmbedding {
+ typ := DestinationQdrantEmbeddingTypeDestinationQdrantCohere
+
+ return DestinationQdrantEmbedding{
+ DestinationQdrantCohere: &destinationQdrantCohere,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantEmbeddingDestinationQdrantFake(destinationQdrantFake DestinationQdrantFake) DestinationQdrantEmbedding {
+ typ := DestinationQdrantEmbeddingTypeDestinationQdrantFake
+
+ return DestinationQdrantEmbedding{
+ DestinationQdrantFake: &destinationQdrantFake,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantEmbeddingDestinationQdrantFromField(destinationQdrantFromField DestinationQdrantFromField) DestinationQdrantEmbedding {
+ typ := DestinationQdrantEmbeddingTypeDestinationQdrantFromField
+
+ return DestinationQdrantEmbedding{
+ DestinationQdrantFromField: &destinationQdrantFromField,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantEmbeddingDestinationQdrantAzureOpenAI(destinationQdrantAzureOpenAI DestinationQdrantAzureOpenAI) DestinationQdrantEmbedding {
+ typ := DestinationQdrantEmbeddingTypeDestinationQdrantAzureOpenAI
+
+ return DestinationQdrantEmbedding{
+ DestinationQdrantAzureOpenAI: &destinationQdrantAzureOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantEmbeddingDestinationQdrantOpenAICompatible(destinationQdrantOpenAICompatible DestinationQdrantOpenAICompatible) DestinationQdrantEmbedding {
+ typ := DestinationQdrantEmbeddingTypeDestinationQdrantOpenAICompatible
+
+ return DestinationQdrantEmbedding{
+ DestinationQdrantOpenAICompatible: &destinationQdrantOpenAICompatible,
+ Type: typ,
+ }
+}
+
+func (u *DestinationQdrantEmbedding) UnmarshalJSON(data []byte) error {
+
+ destinationQdrantFake := new(DestinationQdrantFake)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantFake, "", true, true); err == nil {
+ u.DestinationQdrantFake = destinationQdrantFake
+ u.Type = DestinationQdrantEmbeddingTypeDestinationQdrantFake
+ return nil
+ }
+
+ destinationQdrantOpenAI := new(DestinationQdrantOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantOpenAI, "", true, true); err == nil {
+ u.DestinationQdrantOpenAI = destinationQdrantOpenAI
+ u.Type = DestinationQdrantEmbeddingTypeDestinationQdrantOpenAI
+ return nil
+ }
+
+ destinationQdrantCohere := new(DestinationQdrantCohere)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantCohere, "", true, true); err == nil {
+ u.DestinationQdrantCohere = destinationQdrantCohere
+ u.Type = DestinationQdrantEmbeddingTypeDestinationQdrantCohere
+ return nil
+ }
+
+ destinationQdrantFromField := new(DestinationQdrantFromField)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantFromField, "", true, true); err == nil {
+ u.DestinationQdrantFromField = destinationQdrantFromField
+ u.Type = DestinationQdrantEmbeddingTypeDestinationQdrantFromField
+ return nil
+ }
+
+ destinationQdrantAzureOpenAI := new(DestinationQdrantAzureOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantAzureOpenAI, "", true, true); err == nil {
+ u.DestinationQdrantAzureOpenAI = destinationQdrantAzureOpenAI
+ u.Type = DestinationQdrantEmbeddingTypeDestinationQdrantAzureOpenAI
+ return nil
+ }
+
+ destinationQdrantOpenAICompatible := new(DestinationQdrantOpenAICompatible)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantOpenAICompatible, "", true, true); err == nil {
+ u.DestinationQdrantOpenAICompatible = destinationQdrantOpenAICompatible
+ u.Type = DestinationQdrantEmbeddingTypeDestinationQdrantOpenAICompatible
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationQdrantEmbedding) MarshalJSON() ([]byte, error) {
+ if u.DestinationQdrantOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationQdrantOpenAI, "", true)
+ }
+
+ if u.DestinationQdrantCohere != nil {
+ return utils.MarshalJSON(u.DestinationQdrantCohere, "", true)
+ }
+
+ if u.DestinationQdrantFake != nil {
+ return utils.MarshalJSON(u.DestinationQdrantFake, "", true)
+ }
+
+ if u.DestinationQdrantFromField != nil {
+ return utils.MarshalJSON(u.DestinationQdrantFromField, "", true)
+ }
+
+ if u.DestinationQdrantAzureOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationQdrantAzureOpenAI, "", true)
+ }
+
+ if u.DestinationQdrantOpenAICompatible != nil {
+ return utils.MarshalJSON(u.DestinationQdrantOpenAICompatible, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationQdrantSchemasIndexingAuthMethodMode string
+
+const (
+ DestinationQdrantSchemasIndexingAuthMethodModeNoAuth DestinationQdrantSchemasIndexingAuthMethodMode = "no_auth"
+)
+
+func (e DestinationQdrantSchemasIndexingAuthMethodMode) ToPointer() *DestinationQdrantSchemasIndexingAuthMethodMode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasIndexingAuthMethodMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "no_auth":
+ *e = DestinationQdrantSchemasIndexingAuthMethodMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasIndexingAuthMethodMode: %v", v)
+ }
+}
+
+// DestinationQdrantNoAuth - Method to authenticate with the Qdrant Instance
+type DestinationQdrantNoAuth struct {
+ mode *DestinationQdrantSchemasIndexingAuthMethodMode `const:"no_auth" json:"mode"`
+}
+
+func (d DestinationQdrantNoAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantNoAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantNoAuth) GetMode() *DestinationQdrantSchemasIndexingAuthMethodMode {
+ return DestinationQdrantSchemasIndexingAuthMethodModeNoAuth.ToPointer()
+}
+
+type DestinationQdrantSchemasIndexingMode string
+
+const (
+ DestinationQdrantSchemasIndexingModeAPIKeyAuth DestinationQdrantSchemasIndexingMode = "api_key_auth"
+)
+
+func (e DestinationQdrantSchemasIndexingMode) ToPointer() *DestinationQdrantSchemasIndexingMode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasIndexingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "api_key_auth":
+ *e = DestinationQdrantSchemasIndexingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasIndexingMode: %v", v)
+ }
+}
+
+// DestinationQdrantAPIKeyAuth - Method to authenticate with the Qdrant Instance
+type DestinationQdrantAPIKeyAuth struct {
+ // API Key for the Qdrant instance
+ APIKey string `json:"api_key"`
+ mode *DestinationQdrantSchemasIndexingMode `const:"api_key_auth" json:"mode"`
+}
+
+func (d DestinationQdrantAPIKeyAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantAPIKeyAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantAPIKeyAuth) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *DestinationQdrantAPIKeyAuth) GetMode() *DestinationQdrantSchemasIndexingMode {
+ return DestinationQdrantSchemasIndexingModeAPIKeyAuth.ToPointer()
+}
+
+type DestinationQdrantAuthenticationMethodType string
+
+const (
+ DestinationQdrantAuthenticationMethodTypeDestinationQdrantAPIKeyAuth DestinationQdrantAuthenticationMethodType = "destination-qdrant_ApiKeyAuth"
+ DestinationQdrantAuthenticationMethodTypeDestinationQdrantNoAuth DestinationQdrantAuthenticationMethodType = "destination-qdrant_NoAuth"
+)
+
+type DestinationQdrantAuthenticationMethod struct {
+ DestinationQdrantAPIKeyAuth *DestinationQdrantAPIKeyAuth
+ DestinationQdrantNoAuth *DestinationQdrantNoAuth
+
+ Type DestinationQdrantAuthenticationMethodType
+}
+
+func CreateDestinationQdrantAuthenticationMethodDestinationQdrantAPIKeyAuth(destinationQdrantAPIKeyAuth DestinationQdrantAPIKeyAuth) DestinationQdrantAuthenticationMethod {
+ typ := DestinationQdrantAuthenticationMethodTypeDestinationQdrantAPIKeyAuth
+
+ return DestinationQdrantAuthenticationMethod{
+ DestinationQdrantAPIKeyAuth: &destinationQdrantAPIKeyAuth,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantAuthenticationMethodDestinationQdrantNoAuth(destinationQdrantNoAuth DestinationQdrantNoAuth) DestinationQdrantAuthenticationMethod {
+ typ := DestinationQdrantAuthenticationMethodTypeDestinationQdrantNoAuth
+
+ return DestinationQdrantAuthenticationMethod{
+ DestinationQdrantNoAuth: &destinationQdrantNoAuth,
+ Type: typ,
+ }
+}
+
+func (u *DestinationQdrantAuthenticationMethod) UnmarshalJSON(data []byte) error {
+
+ destinationQdrantNoAuth := new(DestinationQdrantNoAuth)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantNoAuth, "", true, true); err == nil {
+ u.DestinationQdrantNoAuth = destinationQdrantNoAuth
+ u.Type = DestinationQdrantAuthenticationMethodTypeDestinationQdrantNoAuth
+ return nil
+ }
+
+ destinationQdrantAPIKeyAuth := new(DestinationQdrantAPIKeyAuth)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantAPIKeyAuth, "", true, true); err == nil {
+ u.DestinationQdrantAPIKeyAuth = destinationQdrantAPIKeyAuth
+ u.Type = DestinationQdrantAuthenticationMethodTypeDestinationQdrantAPIKeyAuth
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationQdrantAuthenticationMethod) MarshalJSON() ([]byte, error) {
+ if u.DestinationQdrantAPIKeyAuth != nil {
+ return utils.MarshalJSON(u.DestinationQdrantAPIKeyAuth, "", true)
+ }
+
+ if u.DestinationQdrantNoAuth != nil {
+ return utils.MarshalJSON(u.DestinationQdrantNoAuth, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationQdrantSchemasIndexingDistanceMetricDistanceMetric string
+
+const (
+ DestinationQdrantSchemasIndexingDistanceMetricDistanceMetricEuc DestinationQdrantSchemasIndexingDistanceMetricDistanceMetric = "euc"
+)
+
+func (e DestinationQdrantSchemasIndexingDistanceMetricDistanceMetric) ToPointer() *DestinationQdrantSchemasIndexingDistanceMetricDistanceMetric {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasIndexingDistanceMetricDistanceMetric) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "euc":
+ *e = DestinationQdrantSchemasIndexingDistanceMetricDistanceMetric(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasIndexingDistanceMetricDistanceMetric: %v", v)
+ }
+}
+
+// DestinationQdrantEuc - The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.
+type DestinationQdrantEuc struct {
+ distanceMetric *DestinationQdrantSchemasIndexingDistanceMetricDistanceMetric `const:"euc" json:"distance_metric"`
+}
+
+func (d DestinationQdrantEuc) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantEuc) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantEuc) GetDistanceMetric() *DestinationQdrantSchemasIndexingDistanceMetricDistanceMetric {
+ return DestinationQdrantSchemasIndexingDistanceMetricDistanceMetricEuc.ToPointer()
+}
+
+type DestinationQdrantSchemasIndexingDistanceMetric string
+
+const (
+ DestinationQdrantSchemasIndexingDistanceMetricCos DestinationQdrantSchemasIndexingDistanceMetric = "cos"
+)
+
+func (e DestinationQdrantSchemasIndexingDistanceMetric) ToPointer() *DestinationQdrantSchemasIndexingDistanceMetric {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasIndexingDistanceMetric) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cos":
+ *e = DestinationQdrantSchemasIndexingDistanceMetric(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasIndexingDistanceMetric: %v", v)
+ }
+}
+
+// DestinationQdrantCos - The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.
+type DestinationQdrantCos struct {
+ distanceMetric *DestinationQdrantSchemasIndexingDistanceMetric `const:"cos" json:"distance_metric"`
+}
+
+func (d DestinationQdrantCos) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantCos) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantCos) GetDistanceMetric() *DestinationQdrantSchemasIndexingDistanceMetric {
+ return DestinationQdrantSchemasIndexingDistanceMetricCos.ToPointer()
+}
+
+type DestinationQdrantSchemasDistanceMetric string
+
+const (
+ DestinationQdrantSchemasDistanceMetricDot DestinationQdrantSchemasDistanceMetric = "dot"
+)
+
+func (e DestinationQdrantSchemasDistanceMetric) ToPointer() *DestinationQdrantSchemasDistanceMetric {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasDistanceMetric) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "dot":
+ *e = DestinationQdrantSchemasDistanceMetric(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasDistanceMetric: %v", v)
+ }
+}
+
+// DestinationQdrantDot - The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.
+type DestinationQdrantDot struct {
+ distanceMetric *DestinationQdrantSchemasDistanceMetric `const:"dot" json:"distance_metric"`
+}
+
+func (d DestinationQdrantDot) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantDot) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantDot) GetDistanceMetric() *DestinationQdrantSchemasDistanceMetric {
+ return DestinationQdrantSchemasDistanceMetricDot.ToPointer()
+}
+
+type DestinationQdrantDistanceMetricType string
+
+const (
+ DestinationQdrantDistanceMetricTypeDestinationQdrantDot DestinationQdrantDistanceMetricType = "destination-qdrant_dot"
+ DestinationQdrantDistanceMetricTypeDestinationQdrantCos DestinationQdrantDistanceMetricType = "destination-qdrant_cos"
+ DestinationQdrantDistanceMetricTypeDestinationQdrantEuc DestinationQdrantDistanceMetricType = "destination-qdrant_euc"
+)
+
+type DestinationQdrantDistanceMetric struct {
+ DestinationQdrantDot *DestinationQdrantDot
+ DestinationQdrantCos *DestinationQdrantCos
+ DestinationQdrantEuc *DestinationQdrantEuc
+
+ Type DestinationQdrantDistanceMetricType
+}
+
+func CreateDestinationQdrantDistanceMetricDestinationQdrantDot(destinationQdrantDot DestinationQdrantDot) DestinationQdrantDistanceMetric {
+ typ := DestinationQdrantDistanceMetricTypeDestinationQdrantDot
+
+ return DestinationQdrantDistanceMetric{
+ DestinationQdrantDot: &destinationQdrantDot,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantDistanceMetricDestinationQdrantCos(destinationQdrantCos DestinationQdrantCos) DestinationQdrantDistanceMetric {
+ typ := DestinationQdrantDistanceMetricTypeDestinationQdrantCos
+
+ return DestinationQdrantDistanceMetric{
+ DestinationQdrantCos: &destinationQdrantCos,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantDistanceMetricDestinationQdrantEuc(destinationQdrantEuc DestinationQdrantEuc) DestinationQdrantDistanceMetric {
+ typ := DestinationQdrantDistanceMetricTypeDestinationQdrantEuc
+
+ return DestinationQdrantDistanceMetric{
+ DestinationQdrantEuc: &destinationQdrantEuc,
+ Type: typ,
+ }
+}
+
+func (u *DestinationQdrantDistanceMetric) UnmarshalJSON(data []byte) error {
+
+ destinationQdrantDot := new(DestinationQdrantDot)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantDot, "", true, true); err == nil {
+ u.DestinationQdrantDot = destinationQdrantDot
+ u.Type = DestinationQdrantDistanceMetricTypeDestinationQdrantDot
+ return nil
+ }
+
+ destinationQdrantCos := new(DestinationQdrantCos)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantCos, "", true, true); err == nil {
+ u.DestinationQdrantCos = destinationQdrantCos
+ u.Type = DestinationQdrantDistanceMetricTypeDestinationQdrantCos
+ return nil
+ }
+
+ destinationQdrantEuc := new(DestinationQdrantEuc)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantEuc, "", true, true); err == nil {
+ u.DestinationQdrantEuc = destinationQdrantEuc
+ u.Type = DestinationQdrantDistanceMetricTypeDestinationQdrantEuc
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationQdrantDistanceMetric) MarshalJSON() ([]byte, error) {
+ if u.DestinationQdrantDot != nil {
+ return utils.MarshalJSON(u.DestinationQdrantDot, "", true)
+ }
+
+ if u.DestinationQdrantCos != nil {
+ return utils.MarshalJSON(u.DestinationQdrantCos, "", true)
+ }
+
+ if u.DestinationQdrantEuc != nil {
+ return utils.MarshalJSON(u.DestinationQdrantEuc, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// DestinationQdrantIndexing - Indexing configuration
+type DestinationQdrantIndexing struct {
+ // Method to authenticate with the Qdrant Instance
+ AuthMethod *DestinationQdrantAuthenticationMethod `json:"auth_method,omitempty"`
+ // The collection to load data into
+ Collection string `json:"collection"`
+ // The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.
+ DistanceMetric *DestinationQdrantDistanceMetric `json:"distance_metric,omitempty"`
+ // Whether to prefer gRPC over HTTP. Set to true for Qdrant cloud clusters
+ PreferGrpc *bool `default:"true" json:"prefer_grpc"`
+ // The field in the payload that contains the embedded text
+ TextField *string `default:"text" json:"text_field"`
+ // Public Endpoint of the Qdrant cluser
+ URL string `json:"url"`
+}
+
+func (d DestinationQdrantIndexing) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantIndexing) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantIndexing) GetAuthMethod() *DestinationQdrantAuthenticationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.AuthMethod
+}
+
+func (o *DestinationQdrantIndexing) GetCollection() string {
+ if o == nil {
+ return ""
+ }
+ return o.Collection
+}
+
+func (o *DestinationQdrantIndexing) GetDistanceMetric() *DestinationQdrantDistanceMetric {
+ if o == nil {
+ return nil
+ }
+ return o.DistanceMetric
+}
+
+func (o *DestinationQdrantIndexing) GetPreferGrpc() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.PreferGrpc
+}
+
+func (o *DestinationQdrantIndexing) GetTextField() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TextField
+}
+
+func (o *DestinationQdrantIndexing) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
+
+type DestinationQdrantFieldNameMappingConfigModel struct {
+ // The field name in the source
+ FromField string `json:"from_field"`
+ // The field name to use in the destination
+ ToField string `json:"to_field"`
+}
+
+func (o *DestinationQdrantFieldNameMappingConfigModel) GetFromField() string {
+ if o == nil {
+ return ""
+ }
+ return o.FromField
+}
+
+func (o *DestinationQdrantFieldNameMappingConfigModel) GetToField() string {
+ if o == nil {
+ return ""
+ }
+ return o.ToField
+}
+
+// DestinationQdrantLanguage - Split code in suitable places based on the programming language
+type DestinationQdrantLanguage string
+
+const (
+ DestinationQdrantLanguageCpp DestinationQdrantLanguage = "cpp"
+ DestinationQdrantLanguageGo DestinationQdrantLanguage = "go"
+ DestinationQdrantLanguageJava DestinationQdrantLanguage = "java"
+ DestinationQdrantLanguageJs DestinationQdrantLanguage = "js"
+ DestinationQdrantLanguagePhp DestinationQdrantLanguage = "php"
+ DestinationQdrantLanguageProto DestinationQdrantLanguage = "proto"
+ DestinationQdrantLanguagePython DestinationQdrantLanguage = "python"
+ DestinationQdrantLanguageRst DestinationQdrantLanguage = "rst"
+ DestinationQdrantLanguageRuby DestinationQdrantLanguage = "ruby"
+ DestinationQdrantLanguageRust DestinationQdrantLanguage = "rust"
+ DestinationQdrantLanguageScala DestinationQdrantLanguage = "scala"
+ DestinationQdrantLanguageSwift DestinationQdrantLanguage = "swift"
+ DestinationQdrantLanguageMarkdown DestinationQdrantLanguage = "markdown"
+ DestinationQdrantLanguageLatex DestinationQdrantLanguage = "latex"
+ DestinationQdrantLanguageHTML DestinationQdrantLanguage = "html"
+ DestinationQdrantLanguageSol DestinationQdrantLanguage = "sol"
+)
+
+func (e DestinationQdrantLanguage) ToPointer() *DestinationQdrantLanguage {
+ return &e
+}
+
+func (e *DestinationQdrantLanguage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cpp":
+ fallthrough
+ case "go":
+ fallthrough
+ case "java":
+ fallthrough
+ case "js":
+ fallthrough
+ case "php":
+ fallthrough
+ case "proto":
+ fallthrough
+ case "python":
+ fallthrough
+ case "rst":
+ fallthrough
+ case "ruby":
+ fallthrough
+ case "rust":
+ fallthrough
+ case "scala":
+ fallthrough
+ case "swift":
+ fallthrough
+ case "markdown":
+ fallthrough
+ case "latex":
+ fallthrough
+ case "html":
+ fallthrough
+ case "sol":
+ *e = DestinationQdrantLanguage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantLanguage: %v", v)
+ }
+}
+
+type DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode string
+
+const (
+ DestinationQdrantSchemasProcessingTextSplitterTextSplitterModeCode DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode = "code"
+)
+
+func (e DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode) ToPointer() *DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "code":
+ *e = DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationQdrantByProgrammingLanguage - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
+type DestinationQdrantByProgrammingLanguage struct {
+ // Split code in suitable places based on the programming language
+ Language DestinationQdrantLanguage `json:"language"`
+ mode *DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode `const:"code" json:"mode"`
+}
+
+func (d DestinationQdrantByProgrammingLanguage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantByProgrammingLanguage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantByProgrammingLanguage) GetLanguage() DestinationQdrantLanguage {
+ if o == nil {
+ return DestinationQdrantLanguage("")
+ }
+ return o.Language
+}
+
+func (o *DestinationQdrantByProgrammingLanguage) GetMode() *DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode {
+ return DestinationQdrantSchemasProcessingTextSplitterTextSplitterModeCode.ToPointer()
+}
+
+type DestinationQdrantSchemasProcessingTextSplitterMode string
+
+const (
+ DestinationQdrantSchemasProcessingTextSplitterModeMarkdown DestinationQdrantSchemasProcessingTextSplitterMode = "markdown"
+)
+
+func (e DestinationQdrantSchemasProcessingTextSplitterMode) ToPointer() *DestinationQdrantSchemasProcessingTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasProcessingTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "markdown":
+ *e = DestinationQdrantSchemasProcessingTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasProcessingTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationQdrantByMarkdownHeader - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
+type DestinationQdrantByMarkdownHeader struct {
+ mode *DestinationQdrantSchemasProcessingTextSplitterMode `const:"markdown" json:"mode"`
+ // Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+ SplitLevel *int64 `default:"1" json:"split_level"`
+}
+
+func (d DestinationQdrantByMarkdownHeader) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantByMarkdownHeader) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantByMarkdownHeader) GetMode() *DestinationQdrantSchemasProcessingTextSplitterMode {
+ return DestinationQdrantSchemasProcessingTextSplitterModeMarkdown.ToPointer()
+}
+
+func (o *DestinationQdrantByMarkdownHeader) GetSplitLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SplitLevel
+}
+
+type DestinationQdrantSchemasProcessingMode string
+
+const (
+ DestinationQdrantSchemasProcessingModeSeparator DestinationQdrantSchemasProcessingMode = "separator"
+)
+
+func (e DestinationQdrantSchemasProcessingMode) ToPointer() *DestinationQdrantSchemasProcessingMode {
+ return &e
+}
+
+func (e *DestinationQdrantSchemasProcessingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "separator":
+ *e = DestinationQdrantSchemasProcessingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantSchemasProcessingMode: %v", v)
+ }
+}
+
+// DestinationQdrantBySeparator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
+type DestinationQdrantBySeparator struct {
+ // Whether to keep the separator in the resulting chunks
+ KeepSeparator *bool `default:"false" json:"keep_separator"`
+ mode *DestinationQdrantSchemasProcessingMode `const:"separator" json:"mode"`
+ // List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+ Separators []string `json:"separators,omitempty"`
+}
+
+func (d DestinationQdrantBySeparator) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantBySeparator) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantBySeparator) GetKeepSeparator() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.KeepSeparator
+}
+
+func (o *DestinationQdrantBySeparator) GetMode() *DestinationQdrantSchemasProcessingMode {
+ return DestinationQdrantSchemasProcessingModeSeparator.ToPointer()
+}
+
+func (o *DestinationQdrantBySeparator) GetSeparators() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Separators
+}
+
+type DestinationQdrantTextSplitterType string
+
+const (
+ DestinationQdrantTextSplitterTypeDestinationQdrantBySeparator DestinationQdrantTextSplitterType = "destination-qdrant_By Separator"
+ DestinationQdrantTextSplitterTypeDestinationQdrantByMarkdownHeader DestinationQdrantTextSplitterType = "destination-qdrant_By Markdown header"
+ DestinationQdrantTextSplitterTypeDestinationQdrantByProgrammingLanguage DestinationQdrantTextSplitterType = "destination-qdrant_By Programming Language"
+)
+
+type DestinationQdrantTextSplitter struct {
+ DestinationQdrantBySeparator *DestinationQdrantBySeparator
+ DestinationQdrantByMarkdownHeader *DestinationQdrantByMarkdownHeader
+ DestinationQdrantByProgrammingLanguage *DestinationQdrantByProgrammingLanguage
+
+ Type DestinationQdrantTextSplitterType
+}
+
+func CreateDestinationQdrantTextSplitterDestinationQdrantBySeparator(destinationQdrantBySeparator DestinationQdrantBySeparator) DestinationQdrantTextSplitter {
+ typ := DestinationQdrantTextSplitterTypeDestinationQdrantBySeparator
+
+ return DestinationQdrantTextSplitter{
+ DestinationQdrantBySeparator: &destinationQdrantBySeparator,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantTextSplitterDestinationQdrantByMarkdownHeader(destinationQdrantByMarkdownHeader DestinationQdrantByMarkdownHeader) DestinationQdrantTextSplitter {
+ typ := DestinationQdrantTextSplitterTypeDestinationQdrantByMarkdownHeader
+
+ return DestinationQdrantTextSplitter{
+ DestinationQdrantByMarkdownHeader: &destinationQdrantByMarkdownHeader,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantTextSplitterDestinationQdrantByProgrammingLanguage(destinationQdrantByProgrammingLanguage DestinationQdrantByProgrammingLanguage) DestinationQdrantTextSplitter {
+ typ := DestinationQdrantTextSplitterTypeDestinationQdrantByProgrammingLanguage
+
+ return DestinationQdrantTextSplitter{
+ DestinationQdrantByProgrammingLanguage: &destinationQdrantByProgrammingLanguage,
+ Type: typ,
+ }
+}
+
+func (u *DestinationQdrantTextSplitter) UnmarshalJSON(data []byte) error {
+
+ destinationQdrantByMarkdownHeader := new(DestinationQdrantByMarkdownHeader)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantByMarkdownHeader, "", true, true); err == nil {
+ u.DestinationQdrantByMarkdownHeader = destinationQdrantByMarkdownHeader
+ u.Type = DestinationQdrantTextSplitterTypeDestinationQdrantByMarkdownHeader
+ return nil
+ }
+
+ destinationQdrantByProgrammingLanguage := new(DestinationQdrantByProgrammingLanguage)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantByProgrammingLanguage, "", true, true); err == nil {
+ u.DestinationQdrantByProgrammingLanguage = destinationQdrantByProgrammingLanguage
+ u.Type = DestinationQdrantTextSplitterTypeDestinationQdrantByProgrammingLanguage
+ return nil
+ }
+
+ destinationQdrantBySeparator := new(DestinationQdrantBySeparator)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantBySeparator, "", true, true); err == nil {
+ u.DestinationQdrantBySeparator = destinationQdrantBySeparator
+ u.Type = DestinationQdrantTextSplitterTypeDestinationQdrantBySeparator
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationQdrantTextSplitter) MarshalJSON() ([]byte, error) {
+ if u.DestinationQdrantBySeparator != nil {
+ return utils.MarshalJSON(u.DestinationQdrantBySeparator, "", true)
+ }
+
+ if u.DestinationQdrantByMarkdownHeader != nil {
+ return utils.MarshalJSON(u.DestinationQdrantByMarkdownHeader, "", true)
+ }
+
+ if u.DestinationQdrantByProgrammingLanguage != nil {
+ return utils.MarshalJSON(u.DestinationQdrantByProgrammingLanguage, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationQdrantProcessingConfigModel struct {
+ // Size of overlap between chunks in tokens to store in vector store to better capture relevant context
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
+ // Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
+ ChunkSize int64 `json:"chunk_size"`
+ // List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
+ FieldNameMappings []DestinationQdrantFieldNameMappingConfigModel `json:"field_name_mappings,omitempty"`
+ // List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
+ MetadataFields []string `json:"metadata_fields,omitempty"`
+ // List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+ TextFields []string `json:"text_fields,omitempty"`
+ // Split text fields into chunks based on the specified method.
+ TextSplitter *DestinationQdrantTextSplitter `json:"text_splitter,omitempty"`
+}
+
+func (d DestinationQdrantProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *DestinationQdrantProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *DestinationQdrantProcessingConfigModel) GetFieldNameMappings() []DestinationQdrantFieldNameMappingConfigModel {
+ if o == nil {
+ return nil
+ }
+ return o.FieldNameMappings
+}
+
+func (o *DestinationQdrantProcessingConfigModel) GetMetadataFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *DestinationQdrantProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TextFields
+}
+
+func (o *DestinationQdrantProcessingConfigModel) GetTextSplitter() *DestinationQdrantTextSplitter {
+ if o == nil {
+ return nil
+ }
+ return o.TextSplitter
+}
+
+type DestinationQdrant struct {
+ destinationType Qdrant `const:"qdrant" json:"destinationType"`
+ // Embedding configuration
+ Embedding DestinationQdrantEmbedding `json:"embedding"`
+ // Indexing configuration
+ Indexing DestinationQdrantIndexing `json:"indexing"`
+ Processing DestinationQdrantProcessingConfigModel `json:"processing"`
+}
+
+func (d DestinationQdrant) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrant) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrant) GetDestinationType() Qdrant {
+ return QdrantQdrant
+}
+
+func (o *DestinationQdrant) GetEmbedding() DestinationQdrantEmbedding {
+ if o == nil {
+ return DestinationQdrantEmbedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationQdrant) GetIndexing() DestinationQdrantIndexing {
+ if o == nil {
+ return DestinationQdrantIndexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationQdrant) GetProcessing() DestinationQdrantProcessingConfigModel {
+ if o == nil {
+ return DestinationQdrantProcessingConfigModel{}
+ }
+ return o.Processing
+}
diff --git a/internal/sdk/pkg/models/shared/destinationqdrantcreaterequest.go b/internal/sdk/pkg/models/shared/destinationqdrantcreaterequest.go
new file mode 100644
index 000000000..a57bdfb81
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationqdrantcreaterequest.go
@@ -0,0 +1,40 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type DestinationQdrantCreateRequest struct {
+ Configuration DestinationQdrant `json:"configuration"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationQdrantCreateRequest) GetConfiguration() DestinationQdrant {
+ if o == nil {
+ return DestinationQdrant{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationQdrantCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationQdrantCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationQdrantCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationqdrantputrequest.go b/internal/sdk/pkg/models/shared/destinationqdrantputrequest.go
new file mode 100644
index 000000000..de8e580e7
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationqdrantputrequest.go
@@ -0,0 +1,30 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type DestinationQdrantPutRequest struct {
+ Configuration DestinationQdrantUpdate `json:"configuration"`
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationQdrantPutRequest) GetConfiguration() DestinationQdrantUpdate {
+ if o == nil {
+ return DestinationQdrantUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationQdrantPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationQdrantPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationqdrantupdate.go b/internal/sdk/pkg/models/shared/destinationqdrantupdate.go
new file mode 100644
index 000000000..a73a49c6e
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationqdrantupdate.go
@@ -0,0 +1,1408 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type DestinationQdrantUpdateSchemasEmbeddingEmbedding6Mode string
+
+const (
+ DestinationQdrantUpdateSchemasEmbeddingEmbedding6ModeOpenaiCompatible DestinationQdrantUpdateSchemasEmbeddingEmbedding6Mode = "openai_compatible"
+)
+
+func (e DestinationQdrantUpdateSchemasEmbeddingEmbedding6Mode) ToPointer() *DestinationQdrantUpdateSchemasEmbeddingEmbedding6Mode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasEmbeddingEmbedding6Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai_compatible":
+ *e = DestinationQdrantUpdateSchemasEmbeddingEmbedding6Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasEmbeddingEmbedding6Mode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateOpenAICompatible - Use a service that's compatible with the OpenAI API to embed text.
+type DestinationQdrantUpdateOpenAICompatible struct {
+ APIKey *string `default:"" json:"api_key"`
+ // The base URL for your OpenAI-compatible service
+ BaseURL string `json:"base_url"`
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ mode *DestinationQdrantUpdateSchemasEmbeddingEmbedding6Mode `const:"openai_compatible" json:"mode"`
+ // The name of the model to use for embedding
+ ModelName *string `default:"text-embedding-ada-002" json:"model_name"`
+}
+
+func (d DestinationQdrantUpdateOpenAICompatible) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateOpenAICompatible) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateOpenAICompatible) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *DestinationQdrantUpdateOpenAICompatible) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *DestinationQdrantUpdateOpenAICompatible) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationQdrantUpdateOpenAICompatible) GetMode() *DestinationQdrantUpdateSchemasEmbeddingEmbedding6Mode {
+ return DestinationQdrantUpdateSchemasEmbeddingEmbedding6ModeOpenaiCompatible.ToPointer()
+}
+
+func (o *DestinationQdrantUpdateOpenAICompatible) GetModelName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ModelName
+}
+
+type DestinationQdrantUpdateSchemasEmbeddingEmbedding5Mode string
+
+const (
+ DestinationQdrantUpdateSchemasEmbeddingEmbedding5ModeAzureOpenai DestinationQdrantUpdateSchemasEmbeddingEmbedding5Mode = "azure_openai"
+)
+
+func (e DestinationQdrantUpdateSchemasEmbeddingEmbedding5Mode) ToPointer() *DestinationQdrantUpdateSchemasEmbeddingEmbedding5Mode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasEmbeddingEmbedding5Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "azure_openai":
+ *e = DestinationQdrantUpdateSchemasEmbeddingEmbedding5Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasEmbeddingEmbedding5Mode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateAzureOpenAI - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationQdrantUpdateAzureOpenAI struct {
+ // The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ APIBase string `json:"api_base"`
+ // The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ Deployment string `json:"deployment"`
+ mode *DestinationQdrantUpdateSchemasEmbeddingEmbedding5Mode `const:"azure_openai" json:"mode"`
+ // The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationQdrantUpdateAzureOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateAzureOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateAzureOpenAI) GetAPIBase() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIBase
+}
+
+func (o *DestinationQdrantUpdateAzureOpenAI) GetDeployment() string {
+ if o == nil {
+ return ""
+ }
+ return o.Deployment
+}
+
+func (o *DestinationQdrantUpdateAzureOpenAI) GetMode() *DestinationQdrantUpdateSchemasEmbeddingEmbedding5Mode {
+ return DestinationQdrantUpdateSchemasEmbeddingEmbedding5ModeAzureOpenai.ToPointer()
+}
+
+func (o *DestinationQdrantUpdateAzureOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationQdrantUpdateSchemasEmbeddingEmbeddingMode string
+
+const (
+ DestinationQdrantUpdateSchemasEmbeddingEmbeddingModeFromField DestinationQdrantUpdateSchemasEmbeddingEmbeddingMode = "from_field"
+)
+
+func (e DestinationQdrantUpdateSchemasEmbeddingEmbeddingMode) ToPointer() *DestinationQdrantUpdateSchemasEmbeddingEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasEmbeddingEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "from_field":
+ *e = DestinationQdrantUpdateSchemasEmbeddingEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasEmbeddingEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.
+type DestinationQdrantUpdateFromField struct {
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ // Name of the field in the record that contains the embedding
+ FieldName string `json:"field_name"`
+ mode *DestinationQdrantUpdateSchemasEmbeddingEmbeddingMode `const:"from_field" json:"mode"`
+}
+
+func (d DestinationQdrantUpdateFromField) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateFromField) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateFromField) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationQdrantUpdateFromField) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *DestinationQdrantUpdateFromField) GetMode() *DestinationQdrantUpdateSchemasEmbeddingEmbeddingMode {
+ return DestinationQdrantUpdateSchemasEmbeddingEmbeddingModeFromField.ToPointer()
+}
+
+type DestinationQdrantUpdateSchemasEmbeddingMode string
+
+const (
+ DestinationQdrantUpdateSchemasEmbeddingModeFake DestinationQdrantUpdateSchemasEmbeddingMode = "fake"
+)
+
+func (e DestinationQdrantUpdateSchemasEmbeddingMode) ToPointer() *DestinationQdrantUpdateSchemasEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "fake":
+ *e = DestinationQdrantUpdateSchemasEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type DestinationQdrantUpdateFake struct {
+ mode *DestinationQdrantUpdateSchemasEmbeddingMode `const:"fake" json:"mode"`
+}
+
+func (d DestinationQdrantUpdateFake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateFake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateFake) GetMode() *DestinationQdrantUpdateSchemasEmbeddingMode {
+ return DestinationQdrantUpdateSchemasEmbeddingModeFake.ToPointer()
+}
+
+type DestinationQdrantUpdateSchemasMode string
+
+const (
+ DestinationQdrantUpdateSchemasModeCohere DestinationQdrantUpdateSchemasMode = "cohere"
+)
+
+func (e DestinationQdrantUpdateSchemasMode) ToPointer() *DestinationQdrantUpdateSchemasMode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cohere":
+ *e = DestinationQdrantUpdateSchemasMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasMode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateCohere - Use the Cohere API to embed text.
+type DestinationQdrantUpdateCohere struct {
+ CohereKey string `json:"cohere_key"`
+ mode *DestinationQdrantUpdateSchemasMode `const:"cohere" json:"mode"`
+}
+
+func (d DestinationQdrantUpdateCohere) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateCohere) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateCohere) GetCohereKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.CohereKey
+}
+
+func (o *DestinationQdrantUpdateCohere) GetMode() *DestinationQdrantUpdateSchemasMode {
+ return DestinationQdrantUpdateSchemasModeCohere.ToPointer()
+}
+
+type DestinationQdrantUpdateMode string
+
+const (
+ DestinationQdrantUpdateModeOpenai DestinationQdrantUpdateMode = "openai"
+)
+
+func (e DestinationQdrantUpdateMode) ToPointer() *DestinationQdrantUpdateMode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai":
+ *e = DestinationQdrantUpdateMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateMode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationQdrantUpdateOpenAI struct {
+ mode *DestinationQdrantUpdateMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationQdrantUpdateOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateOpenAI) GetMode() *DestinationQdrantUpdateMode {
+ return DestinationQdrantUpdateModeOpenai.ToPointer()
+}
+
+func (o *DestinationQdrantUpdateOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationQdrantUpdateEmbeddingType string
+
+const (
+ DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateOpenAI DestinationQdrantUpdateEmbeddingType = "destination-qdrant-update_OpenAI"
+ DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateCohere DestinationQdrantUpdateEmbeddingType = "destination-qdrant-update_Cohere"
+ DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateFake DestinationQdrantUpdateEmbeddingType = "destination-qdrant-update_Fake"
+ DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateFromField DestinationQdrantUpdateEmbeddingType = "destination-qdrant-update_From Field"
+ DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateAzureOpenAI DestinationQdrantUpdateEmbeddingType = "destination-qdrant-update_Azure OpenAI"
+ DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateOpenAICompatible DestinationQdrantUpdateEmbeddingType = "destination-qdrant-update_OpenAI-compatible"
+)
+
+type DestinationQdrantUpdateEmbedding struct {
+ DestinationQdrantUpdateOpenAI *DestinationQdrantUpdateOpenAI
+ DestinationQdrantUpdateCohere *DestinationQdrantUpdateCohere
+ DestinationQdrantUpdateFake *DestinationQdrantUpdateFake
+ DestinationQdrantUpdateFromField *DestinationQdrantUpdateFromField
+ DestinationQdrantUpdateAzureOpenAI *DestinationQdrantUpdateAzureOpenAI
+ DestinationQdrantUpdateOpenAICompatible *DestinationQdrantUpdateOpenAICompatible
+
+ Type DestinationQdrantUpdateEmbeddingType
+}
+
+func CreateDestinationQdrantUpdateEmbeddingDestinationQdrantUpdateOpenAI(destinationQdrantUpdateOpenAI DestinationQdrantUpdateOpenAI) DestinationQdrantUpdateEmbedding {
+ typ := DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateOpenAI
+
+ return DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateOpenAI: &destinationQdrantUpdateOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantUpdateEmbeddingDestinationQdrantUpdateCohere(destinationQdrantUpdateCohere DestinationQdrantUpdateCohere) DestinationQdrantUpdateEmbedding {
+ typ := DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateCohere
+
+ return DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateCohere: &destinationQdrantUpdateCohere,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantUpdateEmbeddingDestinationQdrantUpdateFake(destinationQdrantUpdateFake DestinationQdrantUpdateFake) DestinationQdrantUpdateEmbedding {
+ typ := DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateFake
+
+ return DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateFake: &destinationQdrantUpdateFake,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantUpdateEmbeddingDestinationQdrantUpdateFromField(destinationQdrantUpdateFromField DestinationQdrantUpdateFromField) DestinationQdrantUpdateEmbedding {
+ typ := DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateFromField
+
+ return DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateFromField: &destinationQdrantUpdateFromField,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantUpdateEmbeddingDestinationQdrantUpdateAzureOpenAI(destinationQdrantUpdateAzureOpenAI DestinationQdrantUpdateAzureOpenAI) DestinationQdrantUpdateEmbedding {
+ typ := DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateAzureOpenAI
+
+ return DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateAzureOpenAI: &destinationQdrantUpdateAzureOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantUpdateEmbeddingDestinationQdrantUpdateOpenAICompatible(destinationQdrantUpdateOpenAICompatible DestinationQdrantUpdateOpenAICompatible) DestinationQdrantUpdateEmbedding {
+ typ := DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateOpenAICompatible
+
+ return DestinationQdrantUpdateEmbedding{
+ DestinationQdrantUpdateOpenAICompatible: &destinationQdrantUpdateOpenAICompatible,
+ Type: typ,
+ }
+}
+
+func (u *DestinationQdrantUpdateEmbedding) UnmarshalJSON(data []byte) error {
+
+ destinationQdrantUpdateFake := new(DestinationQdrantUpdateFake)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateFake, "", true, true); err == nil {
+ u.DestinationQdrantUpdateFake = destinationQdrantUpdateFake
+ u.Type = DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateFake
+ return nil
+ }
+
+ destinationQdrantUpdateOpenAI := new(DestinationQdrantUpdateOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateOpenAI, "", true, true); err == nil {
+ u.DestinationQdrantUpdateOpenAI = destinationQdrantUpdateOpenAI
+ u.Type = DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateOpenAI
+ return nil
+ }
+
+ destinationQdrantUpdateCohere := new(DestinationQdrantUpdateCohere)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateCohere, "", true, true); err == nil {
+ u.DestinationQdrantUpdateCohere = destinationQdrantUpdateCohere
+ u.Type = DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateCohere
+ return nil
+ }
+
+ destinationQdrantUpdateFromField := new(DestinationQdrantUpdateFromField)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateFromField, "", true, true); err == nil {
+ u.DestinationQdrantUpdateFromField = destinationQdrantUpdateFromField
+ u.Type = DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateFromField
+ return nil
+ }
+
+ destinationQdrantUpdateAzureOpenAI := new(DestinationQdrantUpdateAzureOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateAzureOpenAI, "", true, true); err == nil {
+ u.DestinationQdrantUpdateAzureOpenAI = destinationQdrantUpdateAzureOpenAI
+ u.Type = DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateAzureOpenAI
+ return nil
+ }
+
+ destinationQdrantUpdateOpenAICompatible := new(DestinationQdrantUpdateOpenAICompatible)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateOpenAICompatible, "", true, true); err == nil {
+ u.DestinationQdrantUpdateOpenAICompatible = destinationQdrantUpdateOpenAICompatible
+ u.Type = DestinationQdrantUpdateEmbeddingTypeDestinationQdrantUpdateOpenAICompatible
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationQdrantUpdateEmbedding) MarshalJSON() ([]byte, error) {
+ if u.DestinationQdrantUpdateOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateOpenAI, "", true)
+ }
+
+ if u.DestinationQdrantUpdateCohere != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateCohere, "", true)
+ }
+
+ if u.DestinationQdrantUpdateFake != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateFake, "", true)
+ }
+
+ if u.DestinationQdrantUpdateFromField != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateFromField, "", true)
+ }
+
+ if u.DestinationQdrantUpdateAzureOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateAzureOpenAI, "", true)
+ }
+
+ if u.DestinationQdrantUpdateOpenAICompatible != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateOpenAICompatible, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationQdrantUpdateSchemasIndexingAuthMethodMode string
+
+const (
+ DestinationQdrantUpdateSchemasIndexingAuthMethodModeNoAuth DestinationQdrantUpdateSchemasIndexingAuthMethodMode = "no_auth"
+)
+
+func (e DestinationQdrantUpdateSchemasIndexingAuthMethodMode) ToPointer() *DestinationQdrantUpdateSchemasIndexingAuthMethodMode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasIndexingAuthMethodMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "no_auth":
+ *e = DestinationQdrantUpdateSchemasIndexingAuthMethodMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasIndexingAuthMethodMode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateNoAuth - Method to authenticate with the Qdrant Instance
+type DestinationQdrantUpdateNoAuth struct {
+ mode *DestinationQdrantUpdateSchemasIndexingAuthMethodMode `const:"no_auth" json:"mode"`
+}
+
+func (d DestinationQdrantUpdateNoAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateNoAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateNoAuth) GetMode() *DestinationQdrantUpdateSchemasIndexingAuthMethodMode {
+ return DestinationQdrantUpdateSchemasIndexingAuthMethodModeNoAuth.ToPointer()
+}
+
+type DestinationQdrantUpdateSchemasIndexingMode string
+
+const (
+ DestinationQdrantUpdateSchemasIndexingModeAPIKeyAuth DestinationQdrantUpdateSchemasIndexingMode = "api_key_auth"
+)
+
+func (e DestinationQdrantUpdateSchemasIndexingMode) ToPointer() *DestinationQdrantUpdateSchemasIndexingMode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasIndexingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "api_key_auth":
+ *e = DestinationQdrantUpdateSchemasIndexingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasIndexingMode: %v", v)
+ }
+}
+
+// APIKeyAuth - Method to authenticate with the Qdrant Instance
+type APIKeyAuth struct {
+ // API Key for the Qdrant instance
+ APIKey string `json:"api_key"`
+ mode *DestinationQdrantUpdateSchemasIndexingMode `const:"api_key_auth" json:"mode"`
+}
+
+func (a APIKeyAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *APIKeyAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *APIKeyAuth) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *APIKeyAuth) GetMode() *DestinationQdrantUpdateSchemasIndexingMode {
+ return DestinationQdrantUpdateSchemasIndexingModeAPIKeyAuth.ToPointer()
+}
+
+type DestinationQdrantUpdateAuthenticationMethodType string
+
+const (
+ DestinationQdrantUpdateAuthenticationMethodTypeAPIKeyAuth DestinationQdrantUpdateAuthenticationMethodType = "ApiKeyAuth"
+ DestinationQdrantUpdateAuthenticationMethodTypeDestinationQdrantUpdateNoAuth DestinationQdrantUpdateAuthenticationMethodType = "destination-qdrant-update_NoAuth"
+)
+
+type DestinationQdrantUpdateAuthenticationMethod struct {
+ APIKeyAuth *APIKeyAuth
+ DestinationQdrantUpdateNoAuth *DestinationQdrantUpdateNoAuth
+
+ Type DestinationQdrantUpdateAuthenticationMethodType
+}
+
+func CreateDestinationQdrantUpdateAuthenticationMethodAPIKeyAuth(apiKeyAuth APIKeyAuth) DestinationQdrantUpdateAuthenticationMethod {
+ typ := DestinationQdrantUpdateAuthenticationMethodTypeAPIKeyAuth
+
+ return DestinationQdrantUpdateAuthenticationMethod{
+ APIKeyAuth: &apiKeyAuth,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantUpdateAuthenticationMethodDestinationQdrantUpdateNoAuth(destinationQdrantUpdateNoAuth DestinationQdrantUpdateNoAuth) DestinationQdrantUpdateAuthenticationMethod {
+ typ := DestinationQdrantUpdateAuthenticationMethodTypeDestinationQdrantUpdateNoAuth
+
+ return DestinationQdrantUpdateAuthenticationMethod{
+ DestinationQdrantUpdateNoAuth: &destinationQdrantUpdateNoAuth,
+ Type: typ,
+ }
+}
+
+func (u *DestinationQdrantUpdateAuthenticationMethod) UnmarshalJSON(data []byte) error {
+
+ destinationQdrantUpdateNoAuth := new(DestinationQdrantUpdateNoAuth)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateNoAuth, "", true, true); err == nil {
+ u.DestinationQdrantUpdateNoAuth = destinationQdrantUpdateNoAuth
+ u.Type = DestinationQdrantUpdateAuthenticationMethodTypeDestinationQdrantUpdateNoAuth
+ return nil
+ }
+
+ apiKeyAuth := new(APIKeyAuth)
+ if err := utils.UnmarshalJSON(data, &apiKeyAuth, "", true, true); err == nil {
+ u.APIKeyAuth = apiKeyAuth
+ u.Type = DestinationQdrantUpdateAuthenticationMethodTypeAPIKeyAuth
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationQdrantUpdateAuthenticationMethod) MarshalJSON() ([]byte, error) {
+ if u.APIKeyAuth != nil {
+ return utils.MarshalJSON(u.APIKeyAuth, "", true)
+ }
+
+ if u.DestinationQdrantUpdateNoAuth != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateNoAuth, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationQdrantUpdateSchemasIndexingDistanceMetric string
+
+const (
+ DestinationQdrantUpdateSchemasIndexingDistanceMetricEuc DestinationQdrantUpdateSchemasIndexingDistanceMetric = "euc"
+)
+
+func (e DestinationQdrantUpdateSchemasIndexingDistanceMetric) ToPointer() *DestinationQdrantUpdateSchemasIndexingDistanceMetric {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasIndexingDistanceMetric) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "euc":
+ *e = DestinationQdrantUpdateSchemasIndexingDistanceMetric(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasIndexingDistanceMetric: %v", v)
+ }
+}
+
+// Euc - The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.
+type Euc struct {
+ distanceMetric *DestinationQdrantUpdateSchemasIndexingDistanceMetric `const:"euc" json:"distance_metric"`
+}
+
+func (e Euc) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(e, "", false)
+}
+
+func (e *Euc) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &e, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Euc) GetDistanceMetric() *DestinationQdrantUpdateSchemasIndexingDistanceMetric {
+ return DestinationQdrantUpdateSchemasIndexingDistanceMetricEuc.ToPointer()
+}
+
+type DestinationQdrantUpdateSchemasDistanceMetric string
+
+const (
+ DestinationQdrantUpdateSchemasDistanceMetricCos DestinationQdrantUpdateSchemasDistanceMetric = "cos"
+)
+
+func (e DestinationQdrantUpdateSchemasDistanceMetric) ToPointer() *DestinationQdrantUpdateSchemasDistanceMetric {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasDistanceMetric) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cos":
+ *e = DestinationQdrantUpdateSchemasDistanceMetric(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasDistanceMetric: %v", v)
+ }
+}
+
+// Cos - The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.
+type Cos struct {
+ distanceMetric *DestinationQdrantUpdateSchemasDistanceMetric `const:"cos" json:"distance_metric"`
+}
+
+func (c Cos) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *Cos) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Cos) GetDistanceMetric() *DestinationQdrantUpdateSchemasDistanceMetric {
+ return DestinationQdrantUpdateSchemasDistanceMetricCos.ToPointer()
+}
+
+type DestinationQdrantUpdateDistanceMetric string
+
+const (
+ DestinationQdrantUpdateDistanceMetricDot DestinationQdrantUpdateDistanceMetric = "dot"
+)
+
+func (e DestinationQdrantUpdateDistanceMetric) ToPointer() *DestinationQdrantUpdateDistanceMetric {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateDistanceMetric) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "dot":
+ *e = DestinationQdrantUpdateDistanceMetric(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateDistanceMetric: %v", v)
+ }
+}
+
+// Dot - The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.
+type Dot struct {
+ distanceMetric *DestinationQdrantUpdateDistanceMetric `const:"dot" json:"distance_metric"`
+}
+
+func (d Dot) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *Dot) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Dot) GetDistanceMetric() *DestinationQdrantUpdateDistanceMetric {
+ return DestinationQdrantUpdateDistanceMetricDot.ToPointer()
+}
+
+type DistanceMetricType string
+
+const (
+ DistanceMetricTypeDot DistanceMetricType = "dot"
+ DistanceMetricTypeCos DistanceMetricType = "cos"
+ DistanceMetricTypeEuc DistanceMetricType = "euc"
+)
+
+type DistanceMetric struct {
+ Dot *Dot
+ Cos *Cos
+ Euc *Euc
+
+ Type DistanceMetricType
+}
+
+func CreateDistanceMetricDot(dot Dot) DistanceMetric {
+ typ := DistanceMetricTypeDot
+
+ return DistanceMetric{
+ Dot: &dot,
+ Type: typ,
+ }
+}
+
+func CreateDistanceMetricCos(cos Cos) DistanceMetric {
+ typ := DistanceMetricTypeCos
+
+ return DistanceMetric{
+ Cos: &cos,
+ Type: typ,
+ }
+}
+
+func CreateDistanceMetricEuc(euc Euc) DistanceMetric {
+ typ := DistanceMetricTypeEuc
+
+ return DistanceMetric{
+ Euc: &euc,
+ Type: typ,
+ }
+}
+
+func (u *DistanceMetric) UnmarshalJSON(data []byte) error {
+
+ dot := new(Dot)
+ if err := utils.UnmarshalJSON(data, &dot, "", true, true); err == nil {
+ u.Dot = dot
+ u.Type = DistanceMetricTypeDot
+ return nil
+ }
+
+ cos := new(Cos)
+ if err := utils.UnmarshalJSON(data, &cos, "", true, true); err == nil {
+ u.Cos = cos
+ u.Type = DistanceMetricTypeCos
+ return nil
+ }
+
+ euc := new(Euc)
+ if err := utils.UnmarshalJSON(data, &euc, "", true, true); err == nil {
+ u.Euc = euc
+ u.Type = DistanceMetricTypeEuc
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DistanceMetric) MarshalJSON() ([]byte, error) {
+ if u.Dot != nil {
+ return utils.MarshalJSON(u.Dot, "", true)
+ }
+
+ if u.Cos != nil {
+ return utils.MarshalJSON(u.Cos, "", true)
+ }
+
+ if u.Euc != nil {
+ return utils.MarshalJSON(u.Euc, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// DestinationQdrantUpdateIndexing - Indexing configuration
+type DestinationQdrantUpdateIndexing struct {
+ // Method to authenticate with the Qdrant Instance
+ AuthMethod *DestinationQdrantUpdateAuthenticationMethod `json:"auth_method,omitempty"`
+ // The collection to load data into
+ Collection string `json:"collection"`
+ // The Distance metric used to measure similarities among vectors. This field is only used if the collection defined in the does not exist yet and is created automatically by the connector.
+ DistanceMetric *DistanceMetric `json:"distance_metric,omitempty"`
+ // Whether to prefer gRPC over HTTP. Set to true for Qdrant cloud clusters
+ PreferGrpc *bool `default:"true" json:"prefer_grpc"`
+ // The field in the payload that contains the embedded text
+ TextField *string `default:"text" json:"text_field"`
+ // Public Endpoint of the Qdrant cluser
+ URL string `json:"url"`
+}
+
+func (d DestinationQdrantUpdateIndexing) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateIndexing) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateIndexing) GetAuthMethod() *DestinationQdrantUpdateAuthenticationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.AuthMethod
+}
+
+func (o *DestinationQdrantUpdateIndexing) GetCollection() string {
+ if o == nil {
+ return ""
+ }
+ return o.Collection
+}
+
+func (o *DestinationQdrantUpdateIndexing) GetDistanceMetric() *DistanceMetric {
+ if o == nil {
+ return nil
+ }
+ return o.DistanceMetric
+}
+
+func (o *DestinationQdrantUpdateIndexing) GetPreferGrpc() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.PreferGrpc
+}
+
+func (o *DestinationQdrantUpdateIndexing) GetTextField() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TextField
+}
+
+func (o *DestinationQdrantUpdateIndexing) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
+
+type DestinationQdrantUpdateFieldNameMappingConfigModel struct {
+ // The field name in the source
+ FromField string `json:"from_field"`
+ // The field name to use in the destination
+ ToField string `json:"to_field"`
+}
+
+func (o *DestinationQdrantUpdateFieldNameMappingConfigModel) GetFromField() string {
+ if o == nil {
+ return ""
+ }
+ return o.FromField
+}
+
+func (o *DestinationQdrantUpdateFieldNameMappingConfigModel) GetToField() string {
+ if o == nil {
+ return ""
+ }
+ return o.ToField
+}
+
+// DestinationQdrantUpdateLanguage - Split code in suitable places based on the programming language
+type DestinationQdrantUpdateLanguage string
+
+const (
+ DestinationQdrantUpdateLanguageCpp DestinationQdrantUpdateLanguage = "cpp"
+ DestinationQdrantUpdateLanguageGo DestinationQdrantUpdateLanguage = "go"
+ DestinationQdrantUpdateLanguageJava DestinationQdrantUpdateLanguage = "java"
+ DestinationQdrantUpdateLanguageJs DestinationQdrantUpdateLanguage = "js"
+ DestinationQdrantUpdateLanguagePhp DestinationQdrantUpdateLanguage = "php"
+ DestinationQdrantUpdateLanguageProto DestinationQdrantUpdateLanguage = "proto"
+ DestinationQdrantUpdateLanguagePython DestinationQdrantUpdateLanguage = "python"
+ DestinationQdrantUpdateLanguageRst DestinationQdrantUpdateLanguage = "rst"
+ DestinationQdrantUpdateLanguageRuby DestinationQdrantUpdateLanguage = "ruby"
+ DestinationQdrantUpdateLanguageRust DestinationQdrantUpdateLanguage = "rust"
+ DestinationQdrantUpdateLanguageScala DestinationQdrantUpdateLanguage = "scala"
+ DestinationQdrantUpdateLanguageSwift DestinationQdrantUpdateLanguage = "swift"
+ DestinationQdrantUpdateLanguageMarkdown DestinationQdrantUpdateLanguage = "markdown"
+ DestinationQdrantUpdateLanguageLatex DestinationQdrantUpdateLanguage = "latex"
+ DestinationQdrantUpdateLanguageHTML DestinationQdrantUpdateLanguage = "html"
+ DestinationQdrantUpdateLanguageSol DestinationQdrantUpdateLanguage = "sol"
+)
+
+func (e DestinationQdrantUpdateLanguage) ToPointer() *DestinationQdrantUpdateLanguage {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateLanguage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cpp":
+ fallthrough
+ case "go":
+ fallthrough
+ case "java":
+ fallthrough
+ case "js":
+ fallthrough
+ case "php":
+ fallthrough
+ case "proto":
+ fallthrough
+ case "python":
+ fallthrough
+ case "rst":
+ fallthrough
+ case "ruby":
+ fallthrough
+ case "rust":
+ fallthrough
+ case "scala":
+ fallthrough
+ case "swift":
+ fallthrough
+ case "markdown":
+ fallthrough
+ case "latex":
+ fallthrough
+ case "html":
+ fallthrough
+ case "sol":
+ *e = DestinationQdrantUpdateLanguage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateLanguage: %v", v)
+ }
+}
+
+type DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterMode string
+
+const (
+ DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterModeCode DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterMode = "code"
+)
+
+func (e DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterMode) ToPointer() *DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "code":
+ *e = DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateByProgrammingLanguage - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
+type DestinationQdrantUpdateByProgrammingLanguage struct {
+ // Split code in suitable places based on the programming language
+ Language DestinationQdrantUpdateLanguage `json:"language"`
+ mode *DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterMode `const:"code" json:"mode"`
+}
+
+func (d DestinationQdrantUpdateByProgrammingLanguage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateByProgrammingLanguage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateByProgrammingLanguage) GetLanguage() DestinationQdrantUpdateLanguage {
+ if o == nil {
+ return DestinationQdrantUpdateLanguage("")
+ }
+ return o.Language
+}
+
+func (o *DestinationQdrantUpdateByProgrammingLanguage) GetMode() *DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterMode {
+ return DestinationQdrantUpdateSchemasProcessingTextSplitterTextSplitterModeCode.ToPointer()
+}
+
+type DestinationQdrantUpdateSchemasProcessingTextSplitterMode string
+
+const (
+ DestinationQdrantUpdateSchemasProcessingTextSplitterModeMarkdown DestinationQdrantUpdateSchemasProcessingTextSplitterMode = "markdown"
+)
+
+func (e DestinationQdrantUpdateSchemasProcessingTextSplitterMode) ToPointer() *DestinationQdrantUpdateSchemasProcessingTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasProcessingTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "markdown":
+ *e = DestinationQdrantUpdateSchemasProcessingTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasProcessingTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateByMarkdownHeader - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
+type DestinationQdrantUpdateByMarkdownHeader struct {
+ mode *DestinationQdrantUpdateSchemasProcessingTextSplitterMode `const:"markdown" json:"mode"`
+ // Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+ SplitLevel *int64 `default:"1" json:"split_level"`
+}
+
+func (d DestinationQdrantUpdateByMarkdownHeader) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateByMarkdownHeader) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateByMarkdownHeader) GetMode() *DestinationQdrantUpdateSchemasProcessingTextSplitterMode {
+ return DestinationQdrantUpdateSchemasProcessingTextSplitterModeMarkdown.ToPointer()
+}
+
+func (o *DestinationQdrantUpdateByMarkdownHeader) GetSplitLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SplitLevel
+}
+
+type DestinationQdrantUpdateSchemasProcessingMode string
+
+const (
+ DestinationQdrantUpdateSchemasProcessingModeSeparator DestinationQdrantUpdateSchemasProcessingMode = "separator"
+)
+
+func (e DestinationQdrantUpdateSchemasProcessingMode) ToPointer() *DestinationQdrantUpdateSchemasProcessingMode {
+ return &e
+}
+
+func (e *DestinationQdrantUpdateSchemasProcessingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "separator":
+ *e = DestinationQdrantUpdateSchemasProcessingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationQdrantUpdateSchemasProcessingMode: %v", v)
+ }
+}
+
+// DestinationQdrantUpdateBySeparator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
+type DestinationQdrantUpdateBySeparator struct {
+ // Whether to keep the separator in the resulting chunks
+ KeepSeparator *bool `default:"false" json:"keep_separator"`
+ mode *DestinationQdrantUpdateSchemasProcessingMode `const:"separator" json:"mode"`
+ // List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+ Separators []string `json:"separators,omitempty"`
+}
+
+func (d DestinationQdrantUpdateBySeparator) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateBySeparator) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateBySeparator) GetKeepSeparator() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.KeepSeparator
+}
+
+func (o *DestinationQdrantUpdateBySeparator) GetMode() *DestinationQdrantUpdateSchemasProcessingMode {
+ return DestinationQdrantUpdateSchemasProcessingModeSeparator.ToPointer()
+}
+
+func (o *DestinationQdrantUpdateBySeparator) GetSeparators() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Separators
+}
+
+type DestinationQdrantUpdateTextSplitterType string
+
+const (
+ DestinationQdrantUpdateTextSplitterTypeDestinationQdrantUpdateBySeparator DestinationQdrantUpdateTextSplitterType = "destination-qdrant-update_By Separator"
+ DestinationQdrantUpdateTextSplitterTypeDestinationQdrantUpdateByMarkdownHeader DestinationQdrantUpdateTextSplitterType = "destination-qdrant-update_By Markdown header"
+ DestinationQdrantUpdateTextSplitterTypeDestinationQdrantUpdateByProgrammingLanguage DestinationQdrantUpdateTextSplitterType = "destination-qdrant-update_By Programming Language"
+)
+
+type DestinationQdrantUpdateTextSplitter struct {
+ DestinationQdrantUpdateBySeparator *DestinationQdrantUpdateBySeparator
+ DestinationQdrantUpdateByMarkdownHeader *DestinationQdrantUpdateByMarkdownHeader
+ DestinationQdrantUpdateByProgrammingLanguage *DestinationQdrantUpdateByProgrammingLanguage
+
+ Type DestinationQdrantUpdateTextSplitterType
+}
+
+func CreateDestinationQdrantUpdateTextSplitterDestinationQdrantUpdateBySeparator(destinationQdrantUpdateBySeparator DestinationQdrantUpdateBySeparator) DestinationQdrantUpdateTextSplitter {
+ typ := DestinationQdrantUpdateTextSplitterTypeDestinationQdrantUpdateBySeparator
+
+ return DestinationQdrantUpdateTextSplitter{
+ DestinationQdrantUpdateBySeparator: &destinationQdrantUpdateBySeparator,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantUpdateTextSplitterDestinationQdrantUpdateByMarkdownHeader(destinationQdrantUpdateByMarkdownHeader DestinationQdrantUpdateByMarkdownHeader) DestinationQdrantUpdateTextSplitter {
+ typ := DestinationQdrantUpdateTextSplitterTypeDestinationQdrantUpdateByMarkdownHeader
+
+ return DestinationQdrantUpdateTextSplitter{
+ DestinationQdrantUpdateByMarkdownHeader: &destinationQdrantUpdateByMarkdownHeader,
+ Type: typ,
+ }
+}
+
+func CreateDestinationQdrantUpdateTextSplitterDestinationQdrantUpdateByProgrammingLanguage(destinationQdrantUpdateByProgrammingLanguage DestinationQdrantUpdateByProgrammingLanguage) DestinationQdrantUpdateTextSplitter {
+ typ := DestinationQdrantUpdateTextSplitterTypeDestinationQdrantUpdateByProgrammingLanguage
+
+ return DestinationQdrantUpdateTextSplitter{
+ DestinationQdrantUpdateByProgrammingLanguage: &destinationQdrantUpdateByProgrammingLanguage,
+ Type: typ,
+ }
+}
+
+func (u *DestinationQdrantUpdateTextSplitter) UnmarshalJSON(data []byte) error {
+
+ destinationQdrantUpdateByMarkdownHeader := new(DestinationQdrantUpdateByMarkdownHeader)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateByMarkdownHeader, "", true, true); err == nil {
+ u.DestinationQdrantUpdateByMarkdownHeader = destinationQdrantUpdateByMarkdownHeader
+ u.Type = DestinationQdrantUpdateTextSplitterTypeDestinationQdrantUpdateByMarkdownHeader
+ return nil
+ }
+
+ destinationQdrantUpdateByProgrammingLanguage := new(DestinationQdrantUpdateByProgrammingLanguage)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateByProgrammingLanguage, "", true, true); err == nil {
+ u.DestinationQdrantUpdateByProgrammingLanguage = destinationQdrantUpdateByProgrammingLanguage
+ u.Type = DestinationQdrantUpdateTextSplitterTypeDestinationQdrantUpdateByProgrammingLanguage
+ return nil
+ }
+
+ destinationQdrantUpdateBySeparator := new(DestinationQdrantUpdateBySeparator)
+ if err := utils.UnmarshalJSON(data, &destinationQdrantUpdateBySeparator, "", true, true); err == nil {
+ u.DestinationQdrantUpdateBySeparator = destinationQdrantUpdateBySeparator
+ u.Type = DestinationQdrantUpdateTextSplitterTypeDestinationQdrantUpdateBySeparator
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationQdrantUpdateTextSplitter) MarshalJSON() ([]byte, error) {
+ if u.DestinationQdrantUpdateBySeparator != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateBySeparator, "", true)
+ }
+
+ if u.DestinationQdrantUpdateByMarkdownHeader != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateByMarkdownHeader, "", true)
+ }
+
+ if u.DestinationQdrantUpdateByProgrammingLanguage != nil {
+ return utils.MarshalJSON(u.DestinationQdrantUpdateByProgrammingLanguage, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationQdrantUpdateProcessingConfigModel struct {
+ // Size of overlap between chunks in tokens to store in vector store to better capture relevant context
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
+ // Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
+ ChunkSize int64 `json:"chunk_size"`
+ // List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
+ FieldNameMappings []DestinationQdrantUpdateFieldNameMappingConfigModel `json:"field_name_mappings,omitempty"`
+ // List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
+ MetadataFields []string `json:"metadata_fields,omitempty"`
+ // List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+ TextFields []string `json:"text_fields,omitempty"`
+ // Split text fields into chunks based on the specified method.
+ TextSplitter *DestinationQdrantUpdateTextSplitter `json:"text_splitter,omitempty"`
+}
+
+func (d DestinationQdrantUpdateProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationQdrantUpdateProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationQdrantUpdateProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *DestinationQdrantUpdateProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *DestinationQdrantUpdateProcessingConfigModel) GetFieldNameMappings() []DestinationQdrantUpdateFieldNameMappingConfigModel {
+ if o == nil {
+ return nil
+ }
+ return o.FieldNameMappings
+}
+
+func (o *DestinationQdrantUpdateProcessingConfigModel) GetMetadataFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *DestinationQdrantUpdateProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TextFields
+}
+
+func (o *DestinationQdrantUpdateProcessingConfigModel) GetTextSplitter() *DestinationQdrantUpdateTextSplitter {
+ if o == nil {
+ return nil
+ }
+ return o.TextSplitter
+}
+
+type DestinationQdrantUpdate struct {
+ // Embedding configuration
+ Embedding DestinationQdrantUpdateEmbedding `json:"embedding"`
+ // Indexing configuration
+ Indexing DestinationQdrantUpdateIndexing `json:"indexing"`
+ Processing DestinationQdrantUpdateProcessingConfigModel `json:"processing"`
+}
+
+func (o *DestinationQdrantUpdate) GetEmbedding() DestinationQdrantUpdateEmbedding {
+ if o == nil {
+ return DestinationQdrantUpdateEmbedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationQdrantUpdate) GetIndexing() DestinationQdrantUpdateIndexing {
+ if o == nil {
+ return DestinationQdrantUpdateIndexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationQdrantUpdate) GetProcessing() DestinationQdrantUpdateProcessingConfigModel {
+ if o == nil {
+ return DestinationQdrantUpdateProcessingConfigModel{}
+ }
+ return o.Processing
+}
diff --git a/internal/sdk/pkg/models/shared/destinationredis.go b/internal/sdk/pkg/models/shared/destinationredis.go
old mode 100755
new mode 100644
index 79d4ea87a..d6be61888
--- a/internal/sdk/pkg/models/shared/destinationredis.go
+++ b/internal/sdk/pkg/models/shared/destinationredis.go
@@ -3,10 +3,10 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// DestinationRedisCacheType - Redis cache type to store data in.
@@ -34,56 +34,56 @@ func (e *DestinationRedisCacheType) UnmarshalJSON(data []byte) error {
}
}
-type DestinationRedisRedis string
+type Redis string
const (
- DestinationRedisRedisRedis DestinationRedisRedis = "redis"
+ RedisRedis Redis = "redis"
)
-func (e DestinationRedisRedis) ToPointer() *DestinationRedisRedis {
+func (e Redis) ToPointer() *Redis {
return &e
}
-func (e *DestinationRedisRedis) UnmarshalJSON(data []byte) error {
+func (e *Redis) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "redis":
- *e = DestinationRedisRedis(v)
+ *e = Redis(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisRedis: %v", v)
+ return fmt.Errorf("invalid value for Redis: %v", v)
}
}
-type DestinationRedisSSLModesVerifyFullMode string
+type DestinationRedisSchemasMode string
const (
- DestinationRedisSSLModesVerifyFullModeVerifyFull DestinationRedisSSLModesVerifyFullMode = "verify-full"
+ DestinationRedisSchemasModeVerifyFull DestinationRedisSchemasMode = "verify-full"
)
-func (e DestinationRedisSSLModesVerifyFullMode) ToPointer() *DestinationRedisSSLModesVerifyFullMode {
+func (e DestinationRedisSchemasMode) ToPointer() *DestinationRedisSchemasMode {
return &e
}
-func (e *DestinationRedisSSLModesVerifyFullMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-full":
- *e = DestinationRedisSSLModesVerifyFullMode(v)
+ *e = DestinationRedisSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisSSLModesVerifyFullMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisSchemasMode: %v", v)
}
}
-// DestinationRedisSSLModesVerifyFull - Verify-full SSL mode.
-type DestinationRedisSSLModesVerifyFull struct {
+// DestinationRedisVerifyFull - Verify-full SSL mode.
+type DestinationRedisVerifyFull struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -91,89 +91,142 @@ type DestinationRedisSSLModesVerifyFull struct {
// Client key
ClientKey string `json:"client_key"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode DestinationRedisSSLModesVerifyFullMode `json:"mode"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode *DestinationRedisSchemasMode `const:"verify-full" json:"mode"`
}
-type DestinationRedisSSLModesDisableMode string
+func (d DestinationRedisVerifyFull) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisVerifyFull) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisVerifyFull) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
+}
+
+func (o *DestinationRedisVerifyFull) GetClientCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientCertificate
+}
+
+func (o *DestinationRedisVerifyFull) GetClientKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientKey
+}
+
+func (o *DestinationRedisVerifyFull) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *DestinationRedisVerifyFull) GetMode() *DestinationRedisSchemasMode {
+ return DestinationRedisSchemasModeVerifyFull.ToPointer()
+}
+
+type DestinationRedisMode string
const (
- DestinationRedisSSLModesDisableModeDisable DestinationRedisSSLModesDisableMode = "disable"
+ DestinationRedisModeDisable DestinationRedisMode = "disable"
)
-func (e DestinationRedisSSLModesDisableMode) ToPointer() *DestinationRedisSSLModesDisableMode {
+func (e DestinationRedisMode) ToPointer() *DestinationRedisMode {
return &e
}
-func (e *DestinationRedisSSLModesDisableMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "disable":
- *e = DestinationRedisSSLModesDisableMode(v)
+ *e = DestinationRedisMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisSSLModesDisableMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisMode: %v", v)
}
}
-// DestinationRedisSSLModesDisable - Disable SSL.
-type DestinationRedisSSLModesDisable struct {
- Mode DestinationRedisSSLModesDisableMode `json:"mode"`
+// DestinationRedisDisable - Disable SSL.
+type DestinationRedisDisable struct {
+ mode *DestinationRedisMode `const:"disable" json:"mode"`
+}
+
+func (d DestinationRedisDisable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisDisable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisDisable) GetMode() *DestinationRedisMode {
+ return DestinationRedisModeDisable.ToPointer()
}
type DestinationRedisSSLModesType string
const (
- DestinationRedisSSLModesTypeDestinationRedisSSLModesDisable DestinationRedisSSLModesType = "destination-redis_SSL Modes_disable"
- DestinationRedisSSLModesTypeDestinationRedisSSLModesVerifyFull DestinationRedisSSLModesType = "destination-redis_SSL Modes_verify-full"
+ DestinationRedisSSLModesTypeDestinationRedisDisable DestinationRedisSSLModesType = "destination-redis_disable"
+ DestinationRedisSSLModesTypeDestinationRedisVerifyFull DestinationRedisSSLModesType = "destination-redis_verify-full"
)
type DestinationRedisSSLModes struct {
- DestinationRedisSSLModesDisable *DestinationRedisSSLModesDisable
- DestinationRedisSSLModesVerifyFull *DestinationRedisSSLModesVerifyFull
+ DestinationRedisDisable *DestinationRedisDisable
+ DestinationRedisVerifyFull *DestinationRedisVerifyFull
Type DestinationRedisSSLModesType
}
-func CreateDestinationRedisSSLModesDestinationRedisSSLModesDisable(destinationRedisSSLModesDisable DestinationRedisSSLModesDisable) DestinationRedisSSLModes {
- typ := DestinationRedisSSLModesTypeDestinationRedisSSLModesDisable
+func CreateDestinationRedisSSLModesDestinationRedisDisable(destinationRedisDisable DestinationRedisDisable) DestinationRedisSSLModes {
+ typ := DestinationRedisSSLModesTypeDestinationRedisDisable
return DestinationRedisSSLModes{
- DestinationRedisSSLModesDisable: &destinationRedisSSLModesDisable,
- Type: typ,
+ DestinationRedisDisable: &destinationRedisDisable,
+ Type: typ,
}
}
-func CreateDestinationRedisSSLModesDestinationRedisSSLModesVerifyFull(destinationRedisSSLModesVerifyFull DestinationRedisSSLModesVerifyFull) DestinationRedisSSLModes {
- typ := DestinationRedisSSLModesTypeDestinationRedisSSLModesVerifyFull
+func CreateDestinationRedisSSLModesDestinationRedisVerifyFull(destinationRedisVerifyFull DestinationRedisVerifyFull) DestinationRedisSSLModes {
+ typ := DestinationRedisSSLModesTypeDestinationRedisVerifyFull
return DestinationRedisSSLModes{
- DestinationRedisSSLModesVerifyFull: &destinationRedisSSLModesVerifyFull,
- Type: typ,
+ DestinationRedisVerifyFull: &destinationRedisVerifyFull,
+ Type: typ,
}
}
func (u *DestinationRedisSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationRedisSSLModesDisable := new(DestinationRedisSSLModesDisable)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisSSLModesDisable); err == nil {
- u.DestinationRedisSSLModesDisable = destinationRedisSSLModesDisable
- u.Type = DestinationRedisSSLModesTypeDestinationRedisSSLModesDisable
+
+ destinationRedisDisable := new(DestinationRedisDisable)
+ if err := utils.UnmarshalJSON(data, &destinationRedisDisable, "", true, true); err == nil {
+ u.DestinationRedisDisable = destinationRedisDisable
+ u.Type = DestinationRedisSSLModesTypeDestinationRedisDisable
return nil
}
- destinationRedisSSLModesVerifyFull := new(DestinationRedisSSLModesVerifyFull)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisSSLModesVerifyFull); err == nil {
- u.DestinationRedisSSLModesVerifyFull = destinationRedisSSLModesVerifyFull
- u.Type = DestinationRedisSSLModesTypeDestinationRedisSSLModesVerifyFull
+ destinationRedisVerifyFull := new(DestinationRedisVerifyFull)
+ if err := utils.UnmarshalJSON(data, &destinationRedisVerifyFull, "", true, true); err == nil {
+ u.DestinationRedisVerifyFull = destinationRedisVerifyFull
+ u.Type = DestinationRedisSSLModesTypeDestinationRedisVerifyFull
return nil
}
@@ -181,196 +234,290 @@ func (u *DestinationRedisSSLModes) UnmarshalJSON(data []byte) error {
}
func (u DestinationRedisSSLModes) MarshalJSON() ([]byte, error) {
- if u.DestinationRedisSSLModesDisable != nil {
- return json.Marshal(u.DestinationRedisSSLModesDisable)
+ if u.DestinationRedisDisable != nil {
+ return utils.MarshalJSON(u.DestinationRedisDisable, "", true)
}
- if u.DestinationRedisSSLModesVerifyFull != nil {
- return json.Marshal(u.DestinationRedisSSLModesVerifyFull)
+ if u.DestinationRedisVerifyFull != nil {
+ return utils.MarshalJSON(u.DestinationRedisVerifyFull, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationRedisSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationRedisSchemasTunnelMethodTunnelMethod string
const (
- DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationRedisSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationRedisSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationRedisSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationRedisSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationRedisSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationRedisSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedisSSHTunnelMethodPasswordAuthentication struct {
+// DestinationRedisPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedisPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationRedisSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedisSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationRedisPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationRedisPasswordAuthentication) GetTunnelMethod() DestinationRedisSchemasTunnelMethodTunnelMethod {
+ return DestinationRedisSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationRedisPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationRedisPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationRedisPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationRedisSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationRedisSchemasTunnelMethod string
const (
- DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationRedisSchemasTunnelMethodSSHKeyAuth DestinationRedisSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationRedisSchemasTunnelMethod) ToPointer() *DestinationRedisSchemasTunnelMethod {
return &e
}
-func (e *DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationRedisSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisSchemasTunnelMethod: %v", v)
}
}
-// DestinationRedisSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedisSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationRedisSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedisSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationRedisSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedisSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationRedisSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationRedisSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationRedisSSHKeyAuthentication) GetTunnelMethod() DestinationRedisSchemasTunnelMethod {
+ return DestinationRedisSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationRedisSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationRedisSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationRedisTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationRedisTunnelMethod string
const (
- DestinationRedisSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationRedisTunnelMethodNoTunnel DestinationRedisTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationRedisTunnelMethod) ToPointer() *DestinationRedisTunnelMethod {
return &e
}
-func (e *DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationRedisTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisTunnelMethod: %v", v)
}
}
-// DestinationRedisSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedisSSHTunnelMethodNoTunnel struct {
+// DestinationRedisNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedisNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationRedisSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedisTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationRedisNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisNoTunnel) GetTunnelMethod() DestinationRedisTunnelMethod {
+ return DestinationRedisTunnelMethodNoTunnel
}
type DestinationRedisSSHTunnelMethodType string
const (
- DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHTunnelMethodNoTunnel DestinationRedisSSHTunnelMethodType = "destination-redis_SSH Tunnel Method_No Tunnel"
- DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHTunnelMethodSSHKeyAuthentication DestinationRedisSSHTunnelMethodType = "destination-redis_SSH Tunnel Method_SSH Key Authentication"
- DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHTunnelMethodPasswordAuthentication DestinationRedisSSHTunnelMethodType = "destination-redis_SSH Tunnel Method_Password Authentication"
+ DestinationRedisSSHTunnelMethodTypeDestinationRedisNoTunnel DestinationRedisSSHTunnelMethodType = "destination-redis_No Tunnel"
+ DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHKeyAuthentication DestinationRedisSSHTunnelMethodType = "destination-redis_SSH Key Authentication"
+ DestinationRedisSSHTunnelMethodTypeDestinationRedisPasswordAuthentication DestinationRedisSSHTunnelMethodType = "destination-redis_Password Authentication"
)
type DestinationRedisSSHTunnelMethod struct {
- DestinationRedisSSHTunnelMethodNoTunnel *DestinationRedisSSHTunnelMethodNoTunnel
- DestinationRedisSSHTunnelMethodSSHKeyAuthentication *DestinationRedisSSHTunnelMethodSSHKeyAuthentication
- DestinationRedisSSHTunnelMethodPasswordAuthentication *DestinationRedisSSHTunnelMethodPasswordAuthentication
+ DestinationRedisNoTunnel *DestinationRedisNoTunnel
+ DestinationRedisSSHKeyAuthentication *DestinationRedisSSHKeyAuthentication
+ DestinationRedisPasswordAuthentication *DestinationRedisPasswordAuthentication
Type DestinationRedisSSHTunnelMethodType
}
-func CreateDestinationRedisSSHTunnelMethodDestinationRedisSSHTunnelMethodNoTunnel(destinationRedisSSHTunnelMethodNoTunnel DestinationRedisSSHTunnelMethodNoTunnel) DestinationRedisSSHTunnelMethod {
- typ := DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHTunnelMethodNoTunnel
+func CreateDestinationRedisSSHTunnelMethodDestinationRedisNoTunnel(destinationRedisNoTunnel DestinationRedisNoTunnel) DestinationRedisSSHTunnelMethod {
+ typ := DestinationRedisSSHTunnelMethodTypeDestinationRedisNoTunnel
return DestinationRedisSSHTunnelMethod{
- DestinationRedisSSHTunnelMethodNoTunnel: &destinationRedisSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationRedisNoTunnel: &destinationRedisNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationRedisSSHTunnelMethodDestinationRedisSSHTunnelMethodSSHKeyAuthentication(destinationRedisSSHTunnelMethodSSHKeyAuthentication DestinationRedisSSHTunnelMethodSSHKeyAuthentication) DestinationRedisSSHTunnelMethod {
- typ := DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationRedisSSHTunnelMethodDestinationRedisSSHKeyAuthentication(destinationRedisSSHKeyAuthentication DestinationRedisSSHKeyAuthentication) DestinationRedisSSHTunnelMethod {
+ typ := DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHKeyAuthentication
return DestinationRedisSSHTunnelMethod{
- DestinationRedisSSHTunnelMethodSSHKeyAuthentication: &destinationRedisSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ DestinationRedisSSHKeyAuthentication: &destinationRedisSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateDestinationRedisSSHTunnelMethodDestinationRedisSSHTunnelMethodPasswordAuthentication(destinationRedisSSHTunnelMethodPasswordAuthentication DestinationRedisSSHTunnelMethodPasswordAuthentication) DestinationRedisSSHTunnelMethod {
- typ := DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHTunnelMethodPasswordAuthentication
+func CreateDestinationRedisSSHTunnelMethodDestinationRedisPasswordAuthentication(destinationRedisPasswordAuthentication DestinationRedisPasswordAuthentication) DestinationRedisSSHTunnelMethod {
+ typ := DestinationRedisSSHTunnelMethodTypeDestinationRedisPasswordAuthentication
return DestinationRedisSSHTunnelMethod{
- DestinationRedisSSHTunnelMethodPasswordAuthentication: &destinationRedisSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ DestinationRedisPasswordAuthentication: &destinationRedisPasswordAuthentication,
+ Type: typ,
}
}
func (u *DestinationRedisSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationRedisSSHTunnelMethodNoTunnel := new(DestinationRedisSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationRedisSSHTunnelMethodNoTunnel = destinationRedisSSHTunnelMethodNoTunnel
- u.Type = DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHTunnelMethodNoTunnel
+
+ destinationRedisNoTunnel := new(DestinationRedisNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationRedisNoTunnel, "", true, true); err == nil {
+ u.DestinationRedisNoTunnel = destinationRedisNoTunnel
+ u.Type = DestinationRedisSSHTunnelMethodTypeDestinationRedisNoTunnel
return nil
}
- destinationRedisSSHTunnelMethodSSHKeyAuthentication := new(DestinationRedisSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationRedisSSHTunnelMethodSSHKeyAuthentication = destinationRedisSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHTunnelMethodSSHKeyAuthentication
+ destinationRedisSSHKeyAuthentication := new(DestinationRedisSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationRedisSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationRedisSSHKeyAuthentication = destinationRedisSSHKeyAuthentication
+ u.Type = DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHKeyAuthentication
return nil
}
- destinationRedisSSHTunnelMethodPasswordAuthentication := new(DestinationRedisSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationRedisSSHTunnelMethodPasswordAuthentication = destinationRedisSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationRedisSSHTunnelMethodTypeDestinationRedisSSHTunnelMethodPasswordAuthentication
+ destinationRedisPasswordAuthentication := new(DestinationRedisPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationRedisPasswordAuthentication, "", true, true); err == nil {
+ u.DestinationRedisPasswordAuthentication = destinationRedisPasswordAuthentication
+ u.Type = DestinationRedisSSHTunnelMethodTypeDestinationRedisPasswordAuthentication
return nil
}
@@ -378,33 +525,33 @@ func (u *DestinationRedisSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationRedisSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationRedisSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationRedisSSHTunnelMethodNoTunnel)
+ if u.DestinationRedisNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationRedisNoTunnel, "", true)
}
- if u.DestinationRedisSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationRedisSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationRedisSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationRedisSSHKeyAuthentication, "", true)
}
- if u.DestinationRedisSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationRedisSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationRedisPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationRedisPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationRedis struct {
// Redis cache type to store data in.
- CacheType DestinationRedisCacheType `json:"cache_type"`
- DestinationType DestinationRedisRedis `json:"destinationType"`
+ CacheType *DestinationRedisCacheType `default:"hash" json:"cache_type"`
+ destinationType Redis `const:"redis" json:"destinationType"`
// Redis host to connect to.
Host string `json:"host"`
// Password associated with Redis.
Password *string `json:"password,omitempty"`
// Port of Redis.
- Port int64 `json:"port"`
+ Port *int64 `default:"6379" json:"port"`
// Indicates whether SSL encryption protocol will be used to connect to Redis. It is recommended to use SSL connection if possible.
- Ssl *bool `json:"ssl,omitempty"`
+ Ssl *bool `default:"false" json:"ssl"`
// SSL connection modes.
// verify-full - This is the most secure mode. Always require encryption and verifies the identity of the source database server
SslMode *DestinationRedisSSLModes `json:"ssl_mode,omitempty"`
@@ -413,3 +560,74 @@ type DestinationRedis struct {
// Username associated with Redis.
Username string `json:"username"`
}
+
+func (d DestinationRedis) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedis) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedis) GetCacheType() *DestinationRedisCacheType {
+ if o == nil {
+ return nil
+ }
+ return o.CacheType
+}
+
+func (o *DestinationRedis) GetDestinationType() Redis {
+ return RedisRedis
+}
+
+func (o *DestinationRedis) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationRedis) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationRedis) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationRedis) GetSsl() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Ssl
+}
+
+func (o *DestinationRedis) GetSslMode() *DestinationRedisSSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *DestinationRedis) GetTunnelMethod() *DestinationRedisSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationRedis) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationrediscreaterequest.go b/internal/sdk/pkg/models/shared/destinationrediscreaterequest.go
old mode 100755
new mode 100644
index c50521773..bbded6b84
--- a/internal/sdk/pkg/models/shared/destinationrediscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationrediscreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationRedisCreateRequest struct {
Configuration DestinationRedis `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationRedisCreateRequest) GetConfiguration() DestinationRedis {
+ if o == nil {
+ return DestinationRedis{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationRedisCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationRedisCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationRedisCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationredisputrequest.go b/internal/sdk/pkg/models/shared/destinationredisputrequest.go
old mode 100755
new mode 100644
index e6efe3ddb..e86fbf189
--- a/internal/sdk/pkg/models/shared/destinationredisputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationredisputrequest.go
@@ -7,3 +7,24 @@ type DestinationRedisPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationRedisPutRequest) GetConfiguration() DestinationRedisUpdate {
+ if o == nil {
+ return DestinationRedisUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationRedisPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationRedisPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationredisupdate.go b/internal/sdk/pkg/models/shared/destinationredisupdate.go
old mode 100755
new mode 100644
index 62c60f56e..1f1109f4f
--- a/internal/sdk/pkg/models/shared/destinationredisupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationredisupdate.go
@@ -3,63 +3,63 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationRedisUpdateCacheType - Redis cache type to store data in.
-type DestinationRedisUpdateCacheType string
+// CacheType - Redis cache type to store data in.
+type CacheType string
const (
- DestinationRedisUpdateCacheTypeHash DestinationRedisUpdateCacheType = "hash"
+ CacheTypeHash CacheType = "hash"
)
-func (e DestinationRedisUpdateCacheType) ToPointer() *DestinationRedisUpdateCacheType {
+func (e CacheType) ToPointer() *CacheType {
return &e
}
-func (e *DestinationRedisUpdateCacheType) UnmarshalJSON(data []byte) error {
+func (e *CacheType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "hash":
- *e = DestinationRedisUpdateCacheType(v)
+ *e = CacheType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisUpdateCacheType: %v", v)
+ return fmt.Errorf("invalid value for CacheType: %v", v)
}
}
-type DestinationRedisUpdateSSLModesVerifyFullMode string
+type DestinationRedisUpdateSchemasMode string
const (
- DestinationRedisUpdateSSLModesVerifyFullModeVerifyFull DestinationRedisUpdateSSLModesVerifyFullMode = "verify-full"
+ DestinationRedisUpdateSchemasModeVerifyFull DestinationRedisUpdateSchemasMode = "verify-full"
)
-func (e DestinationRedisUpdateSSLModesVerifyFullMode) ToPointer() *DestinationRedisUpdateSSLModesVerifyFullMode {
+func (e DestinationRedisUpdateSchemasMode) ToPointer() *DestinationRedisUpdateSchemasMode {
return &e
}
-func (e *DestinationRedisUpdateSSLModesVerifyFullMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisUpdateSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-full":
- *e = DestinationRedisUpdateSSLModesVerifyFullMode(v)
+ *e = DestinationRedisUpdateSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisUpdateSSLModesVerifyFullMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisUpdateSchemasMode: %v", v)
}
}
-// DestinationRedisUpdateSSLModesVerifyFull - Verify-full SSL mode.
-type DestinationRedisUpdateSSLModesVerifyFull struct {
+// DestinationRedisUpdateVerifyFull - Verify-full SSL mode.
+type DestinationRedisUpdateVerifyFull struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -67,89 +67,142 @@ type DestinationRedisUpdateSSLModesVerifyFull struct {
// Client key
ClientKey string `json:"client_key"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode DestinationRedisUpdateSSLModesVerifyFullMode `json:"mode"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode *DestinationRedisUpdateSchemasMode `const:"verify-full" json:"mode"`
}
-type DestinationRedisUpdateSSLModesDisableMode string
+func (d DestinationRedisUpdateVerifyFull) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisUpdateVerifyFull) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisUpdateVerifyFull) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
+}
+
+func (o *DestinationRedisUpdateVerifyFull) GetClientCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientCertificate
+}
+
+func (o *DestinationRedisUpdateVerifyFull) GetClientKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientKey
+}
+
+func (o *DestinationRedisUpdateVerifyFull) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *DestinationRedisUpdateVerifyFull) GetMode() *DestinationRedisUpdateSchemasMode {
+ return DestinationRedisUpdateSchemasModeVerifyFull.ToPointer()
+}
+
+type DestinationRedisUpdateMode string
const (
- DestinationRedisUpdateSSLModesDisableModeDisable DestinationRedisUpdateSSLModesDisableMode = "disable"
+ DestinationRedisUpdateModeDisable DestinationRedisUpdateMode = "disable"
)
-func (e DestinationRedisUpdateSSLModesDisableMode) ToPointer() *DestinationRedisUpdateSSLModesDisableMode {
+func (e DestinationRedisUpdateMode) ToPointer() *DestinationRedisUpdateMode {
return &e
}
-func (e *DestinationRedisUpdateSSLModesDisableMode) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisUpdateMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "disable":
- *e = DestinationRedisUpdateSSLModesDisableMode(v)
+ *e = DestinationRedisUpdateMode(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisUpdateSSLModesDisableMode: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisUpdateMode: %v", v)
}
}
-// DestinationRedisUpdateSSLModesDisable - Disable SSL.
-type DestinationRedisUpdateSSLModesDisable struct {
- Mode DestinationRedisUpdateSSLModesDisableMode `json:"mode"`
+// DestinationRedisUpdateDisable - Disable SSL.
+type DestinationRedisUpdateDisable struct {
+ mode *DestinationRedisUpdateMode `const:"disable" json:"mode"`
+}
+
+func (d DestinationRedisUpdateDisable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisUpdateDisable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisUpdateDisable) GetMode() *DestinationRedisUpdateMode {
+ return DestinationRedisUpdateModeDisable.ToPointer()
}
type DestinationRedisUpdateSSLModesType string
const (
- DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateSSLModesDisable DestinationRedisUpdateSSLModesType = "destination-redis-update_SSL Modes_disable"
- DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateSSLModesVerifyFull DestinationRedisUpdateSSLModesType = "destination-redis-update_SSL Modes_verify-full"
+ DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateDisable DestinationRedisUpdateSSLModesType = "destination-redis-update_disable"
+ DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateVerifyFull DestinationRedisUpdateSSLModesType = "destination-redis-update_verify-full"
)
type DestinationRedisUpdateSSLModes struct {
- DestinationRedisUpdateSSLModesDisable *DestinationRedisUpdateSSLModesDisable
- DestinationRedisUpdateSSLModesVerifyFull *DestinationRedisUpdateSSLModesVerifyFull
+ DestinationRedisUpdateDisable *DestinationRedisUpdateDisable
+ DestinationRedisUpdateVerifyFull *DestinationRedisUpdateVerifyFull
Type DestinationRedisUpdateSSLModesType
}
-func CreateDestinationRedisUpdateSSLModesDestinationRedisUpdateSSLModesDisable(destinationRedisUpdateSSLModesDisable DestinationRedisUpdateSSLModesDisable) DestinationRedisUpdateSSLModes {
- typ := DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateSSLModesDisable
+func CreateDestinationRedisUpdateSSLModesDestinationRedisUpdateDisable(destinationRedisUpdateDisable DestinationRedisUpdateDisable) DestinationRedisUpdateSSLModes {
+ typ := DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateDisable
return DestinationRedisUpdateSSLModes{
- DestinationRedisUpdateSSLModesDisable: &destinationRedisUpdateSSLModesDisable,
- Type: typ,
+ DestinationRedisUpdateDisable: &destinationRedisUpdateDisable,
+ Type: typ,
}
}
-func CreateDestinationRedisUpdateSSLModesDestinationRedisUpdateSSLModesVerifyFull(destinationRedisUpdateSSLModesVerifyFull DestinationRedisUpdateSSLModesVerifyFull) DestinationRedisUpdateSSLModes {
- typ := DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateSSLModesVerifyFull
+func CreateDestinationRedisUpdateSSLModesDestinationRedisUpdateVerifyFull(destinationRedisUpdateVerifyFull DestinationRedisUpdateVerifyFull) DestinationRedisUpdateSSLModes {
+ typ := DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateVerifyFull
return DestinationRedisUpdateSSLModes{
- DestinationRedisUpdateSSLModesVerifyFull: &destinationRedisUpdateSSLModesVerifyFull,
- Type: typ,
+ DestinationRedisUpdateVerifyFull: &destinationRedisUpdateVerifyFull,
+ Type: typ,
}
}
func (u *DestinationRedisUpdateSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationRedisUpdateSSLModesDisable := new(DestinationRedisUpdateSSLModesDisable)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisUpdateSSLModesDisable); err == nil {
- u.DestinationRedisUpdateSSLModesDisable = destinationRedisUpdateSSLModesDisable
- u.Type = DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateSSLModesDisable
+
+ destinationRedisUpdateDisable := new(DestinationRedisUpdateDisable)
+ if err := utils.UnmarshalJSON(data, &destinationRedisUpdateDisable, "", true, true); err == nil {
+ u.DestinationRedisUpdateDisable = destinationRedisUpdateDisable
+ u.Type = DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateDisable
return nil
}
- destinationRedisUpdateSSLModesVerifyFull := new(DestinationRedisUpdateSSLModesVerifyFull)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisUpdateSSLModesVerifyFull); err == nil {
- u.DestinationRedisUpdateSSLModesVerifyFull = destinationRedisUpdateSSLModesVerifyFull
- u.Type = DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateSSLModesVerifyFull
+ destinationRedisUpdateVerifyFull := new(DestinationRedisUpdateVerifyFull)
+ if err := utils.UnmarshalJSON(data, &destinationRedisUpdateVerifyFull, "", true, true); err == nil {
+ u.DestinationRedisUpdateVerifyFull = destinationRedisUpdateVerifyFull
+ u.Type = DestinationRedisUpdateSSLModesTypeDestinationRedisUpdateVerifyFull
return nil
}
@@ -157,196 +210,290 @@ func (u *DestinationRedisUpdateSSLModes) UnmarshalJSON(data []byte) error {
}
func (u DestinationRedisUpdateSSLModes) MarshalJSON() ([]byte, error) {
- if u.DestinationRedisUpdateSSLModesDisable != nil {
- return json.Marshal(u.DestinationRedisUpdateSSLModesDisable)
+ if u.DestinationRedisUpdateDisable != nil {
+ return utils.MarshalJSON(u.DestinationRedisUpdateDisable, "", true)
}
- if u.DestinationRedisUpdateSSLModesVerifyFull != nil {
- return json.Marshal(u.DestinationRedisUpdateSSLModesVerifyFull)
+ if u.DestinationRedisUpdateVerifyFull != nil {
+ return utils.MarshalJSON(u.DestinationRedisUpdateVerifyFull, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationRedisUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationRedisUpdateSchemasTunnelMethodTunnelMethod string
const (
- DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationRedisUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationRedisUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationRedisUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationRedisUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationRedisUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication struct {
+// DestinationRedisUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedisUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationRedisUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedisUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationRedisUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationRedisUpdatePasswordAuthentication) GetTunnelMethod() DestinationRedisUpdateSchemasTunnelMethodTunnelMethod {
+ return DestinationRedisUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationRedisUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationRedisUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationRedisUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationRedisUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationRedisUpdateSchemasTunnelMethod string
const (
- DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationRedisUpdateSchemasTunnelMethodSSHKeyAuth DestinationRedisUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationRedisUpdateSchemasTunnelMethod) ToPointer() *DestinationRedisUpdateSchemasTunnelMethod {
return &e
}
-func (e *DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationRedisUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisUpdateSchemasTunnelMethod: %v", v)
}
}
-// DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationRedisUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedisUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedisUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationRedisUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationRedisUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationRedisUpdateSSHKeyAuthentication) GetTunnelMethod() DestinationRedisUpdateSchemasTunnelMethod {
+ return DestinationRedisUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationRedisUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationRedisUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationRedisUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationRedisUpdateTunnelMethod string
const (
- DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationRedisUpdateTunnelMethodNoTunnel DestinationRedisUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationRedisUpdateTunnelMethod) ToPointer() *DestinationRedisUpdateTunnelMethod {
return &e
}
-func (e *DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedisUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationRedisUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedisUpdateTunnelMethod: %v", v)
}
}
-// DestinationRedisUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedisUpdateSSHTunnelMethodNoTunnel struct {
+// DestinationRedisUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedisUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationRedisUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedisUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationRedisUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisUpdateNoTunnel) GetTunnelMethod() DestinationRedisUpdateTunnelMethod {
+ return DestinationRedisUpdateTunnelMethodNoTunnel
}
type DestinationRedisUpdateSSHTunnelMethodType string
const (
- DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHTunnelMethodNoTunnel DestinationRedisUpdateSSHTunnelMethodType = "destination-redis-update_SSH Tunnel Method_No Tunnel"
- DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication DestinationRedisUpdateSSHTunnelMethodType = "destination-redis-update_SSH Tunnel Method_SSH Key Authentication"
- DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHTunnelMethodPasswordAuthentication DestinationRedisUpdateSSHTunnelMethodType = "destination-redis-update_SSH Tunnel Method_Password Authentication"
+ DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateNoTunnel DestinationRedisUpdateSSHTunnelMethodType = "destination-redis-update_No Tunnel"
+ DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHKeyAuthentication DestinationRedisUpdateSSHTunnelMethodType = "destination-redis-update_SSH Key Authentication"
+ DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdatePasswordAuthentication DestinationRedisUpdateSSHTunnelMethodType = "destination-redis-update_Password Authentication"
)
type DestinationRedisUpdateSSHTunnelMethod struct {
- DestinationRedisUpdateSSHTunnelMethodNoTunnel *DestinationRedisUpdateSSHTunnelMethodNoTunnel
- DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication
- DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication *DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication
+ DestinationRedisUpdateNoTunnel *DestinationRedisUpdateNoTunnel
+ DestinationRedisUpdateSSHKeyAuthentication *DestinationRedisUpdateSSHKeyAuthentication
+ DestinationRedisUpdatePasswordAuthentication *DestinationRedisUpdatePasswordAuthentication
Type DestinationRedisUpdateSSHTunnelMethodType
}
-func CreateDestinationRedisUpdateSSHTunnelMethodDestinationRedisUpdateSSHTunnelMethodNoTunnel(destinationRedisUpdateSSHTunnelMethodNoTunnel DestinationRedisUpdateSSHTunnelMethodNoTunnel) DestinationRedisUpdateSSHTunnelMethod {
- typ := DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHTunnelMethodNoTunnel
+func CreateDestinationRedisUpdateSSHTunnelMethodDestinationRedisUpdateNoTunnel(destinationRedisUpdateNoTunnel DestinationRedisUpdateNoTunnel) DestinationRedisUpdateSSHTunnelMethod {
+ typ := DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateNoTunnel
return DestinationRedisUpdateSSHTunnelMethod{
- DestinationRedisUpdateSSHTunnelMethodNoTunnel: &destinationRedisUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationRedisUpdateNoTunnel: &destinationRedisUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationRedisUpdateSSHTunnelMethodDestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication(destinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication) DestinationRedisUpdateSSHTunnelMethod {
- typ := DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationRedisUpdateSSHTunnelMethodDestinationRedisUpdateSSHKeyAuthentication(destinationRedisUpdateSSHKeyAuthentication DestinationRedisUpdateSSHKeyAuthentication) DestinationRedisUpdateSSHTunnelMethod {
+ typ := DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHKeyAuthentication
return DestinationRedisUpdateSSHTunnelMethod{
- DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication: &destinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationRedisUpdateSSHKeyAuthentication: &destinationRedisUpdateSSHKeyAuthentication,
Type: typ,
}
}
-func CreateDestinationRedisUpdateSSHTunnelMethodDestinationRedisUpdateSSHTunnelMethodPasswordAuthentication(destinationRedisUpdateSSHTunnelMethodPasswordAuthentication DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication) DestinationRedisUpdateSSHTunnelMethod {
- typ := DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHTunnelMethodPasswordAuthentication
+func CreateDestinationRedisUpdateSSHTunnelMethodDestinationRedisUpdatePasswordAuthentication(destinationRedisUpdatePasswordAuthentication DestinationRedisUpdatePasswordAuthentication) DestinationRedisUpdateSSHTunnelMethod {
+ typ := DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdatePasswordAuthentication
return DestinationRedisUpdateSSHTunnelMethod{
- DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication: &destinationRedisUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationRedisUpdatePasswordAuthentication: &destinationRedisUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *DestinationRedisUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationRedisUpdateSSHTunnelMethodNoTunnel := new(DestinationRedisUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationRedisUpdateSSHTunnelMethodNoTunnel = destinationRedisUpdateSSHTunnelMethodNoTunnel
- u.Type = DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHTunnelMethodNoTunnel
+
+ destinationRedisUpdateNoTunnel := new(DestinationRedisUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationRedisUpdateNoTunnel, "", true, true); err == nil {
+ u.DestinationRedisUpdateNoTunnel = destinationRedisUpdateNoTunnel
+ u.Type = DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateNoTunnel
return nil
}
- destinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication := new(DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication = destinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication
+ destinationRedisUpdateSSHKeyAuthentication := new(DestinationRedisUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationRedisUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationRedisUpdateSSHKeyAuthentication = destinationRedisUpdateSSHKeyAuthentication
+ u.Type = DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHKeyAuthentication
return nil
}
- destinationRedisUpdateSSHTunnelMethodPasswordAuthentication := new(DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedisUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication = destinationRedisUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdateSSHTunnelMethodPasswordAuthentication
+ destinationRedisUpdatePasswordAuthentication := new(DestinationRedisUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationRedisUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationRedisUpdatePasswordAuthentication = destinationRedisUpdatePasswordAuthentication
+ u.Type = DestinationRedisUpdateSSHTunnelMethodTypeDestinationRedisUpdatePasswordAuthentication
return nil
}
@@ -354,32 +501,32 @@ func (u *DestinationRedisUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error
}
func (u DestinationRedisUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationRedisUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationRedisUpdateSSHTunnelMethodNoTunnel)
+ if u.DestinationRedisUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationRedisUpdateNoTunnel, "", true)
}
- if u.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationRedisUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationRedisUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationRedisUpdateSSHKeyAuthentication, "", true)
}
- if u.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationRedisUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationRedisUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationRedisUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationRedisUpdate struct {
// Redis cache type to store data in.
- CacheType DestinationRedisUpdateCacheType `json:"cache_type"`
+ CacheType *CacheType `default:"hash" json:"cache_type"`
// Redis host to connect to.
Host string `json:"host"`
// Password associated with Redis.
Password *string `json:"password,omitempty"`
// Port of Redis.
- Port int64 `json:"port"`
+ Port *int64 `default:"6379" json:"port"`
// Indicates whether SSL encryption protocol will be used to connect to Redis. It is recommended to use SSL connection if possible.
- Ssl *bool `json:"ssl,omitempty"`
+ Ssl *bool `default:"false" json:"ssl"`
// SSL connection modes.
// verify-full - This is the most secure mode. Always require encryption and verifies the identity of the source database server
SslMode *DestinationRedisUpdateSSLModes `json:"ssl_mode,omitempty"`
@@ -388,3 +535,70 @@ type DestinationRedisUpdate struct {
// Username associated with Redis.
Username string `json:"username"`
}
+
+func (d DestinationRedisUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedisUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedisUpdate) GetCacheType() *CacheType {
+ if o == nil {
+ return nil
+ }
+ return o.CacheType
+}
+
+func (o *DestinationRedisUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationRedisUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationRedisUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationRedisUpdate) GetSsl() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Ssl
+}
+
+func (o *DestinationRedisUpdate) GetSslMode() *DestinationRedisUpdateSSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *DestinationRedisUpdate) GetTunnelMethod() *DestinationRedisUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationRedisUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationredshift.go b/internal/sdk/pkg/models/shared/destinationredshift.go
old mode 100755
new mode 100644
index e72b77a47..75141ee1c
--- a/internal/sdk/pkg/models/shared/destinationredshift.go
+++ b/internal/sdk/pkg/models/shared/destinationredshift.go
@@ -3,215 +3,309 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationRedshiftRedshift string
+type Redshift string
const (
- DestinationRedshiftRedshiftRedshift DestinationRedshiftRedshift = "redshift"
+ RedshiftRedshift Redshift = "redshift"
)
-func (e DestinationRedshiftRedshift) ToPointer() *DestinationRedshiftRedshift {
+func (e Redshift) ToPointer() *Redshift {
return &e
}
-func (e *DestinationRedshiftRedshift) UnmarshalJSON(data []byte) error {
+func (e *Redshift) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "redshift":
- *e = DestinationRedshiftRedshift(v)
+ *e = Redshift(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftRedshift: %v", v)
+ return fmt.Errorf("invalid value for Redshift: %v", v)
}
}
-// DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationRedshiftSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationRedshiftSchemasTunnelMethodTunnelMethod string
const (
- DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationRedshiftSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationRedshiftSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationRedshiftSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationRedshiftSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationRedshiftSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationRedshiftSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedshiftSSHTunnelMethodPasswordAuthentication struct {
+// DestinationRedshiftPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedshiftPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationRedshiftSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedshiftSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationRedshiftPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedshiftPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationRedshiftPasswordAuthentication) GetTunnelMethod() DestinationRedshiftSchemasTunnelMethodTunnelMethod {
+ return DestinationRedshiftSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationRedshiftPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationRedshiftPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationRedshiftPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationRedshiftSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationRedshiftSchemasTunnelMethod string
const (
- DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationRedshiftSchemasTunnelMethodSSHKeyAuth DestinationRedshiftSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationRedshiftSchemasTunnelMethod) ToPointer() *DestinationRedshiftSchemasTunnelMethod {
return &e
}
-func (e *DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationRedshiftSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftSchemasTunnelMethod: %v", v)
}
}
-// DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationRedshiftSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedshiftSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationRedshiftSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedshiftSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationRedshiftSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedshiftSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationRedshiftSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationRedshiftSSHKeyAuthentication) GetTunnelMethod() DestinationRedshiftSchemasTunnelMethod {
+ return DestinationRedshiftSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationRedshiftSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationRedshiftSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationRedshiftTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationRedshiftTunnelMethod string
const (
- DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationRedshiftTunnelMethodNoTunnel DestinationRedshiftTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationRedshiftTunnelMethod) ToPointer() *DestinationRedshiftTunnelMethod {
return &e
}
-func (e *DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationRedshiftTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftTunnelMethod: %v", v)
}
}
-// DestinationRedshiftSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedshiftSSHTunnelMethodNoTunnel struct {
+// DestinationRedshiftNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedshiftNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationRedshiftSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedshiftTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationRedshiftNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedshiftNoTunnel) GetTunnelMethod() DestinationRedshiftTunnelMethod {
+ return DestinationRedshiftTunnelMethodNoTunnel
}
type DestinationRedshiftSSHTunnelMethodType string
const (
- DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHTunnelMethodNoTunnel DestinationRedshiftSSHTunnelMethodType = "destination-redshift_SSH Tunnel Method_No Tunnel"
- DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHTunnelMethodSSHKeyAuthentication DestinationRedshiftSSHTunnelMethodType = "destination-redshift_SSH Tunnel Method_SSH Key Authentication"
- DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHTunnelMethodPasswordAuthentication DestinationRedshiftSSHTunnelMethodType = "destination-redshift_SSH Tunnel Method_Password Authentication"
+ DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftNoTunnel DestinationRedshiftSSHTunnelMethodType = "destination-redshift_No Tunnel"
+ DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHKeyAuthentication DestinationRedshiftSSHTunnelMethodType = "destination-redshift_SSH Key Authentication"
+ DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftPasswordAuthentication DestinationRedshiftSSHTunnelMethodType = "destination-redshift_Password Authentication"
)
type DestinationRedshiftSSHTunnelMethod struct {
- DestinationRedshiftSSHTunnelMethodNoTunnel *DestinationRedshiftSSHTunnelMethodNoTunnel
- DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication *DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication
- DestinationRedshiftSSHTunnelMethodPasswordAuthentication *DestinationRedshiftSSHTunnelMethodPasswordAuthentication
+ DestinationRedshiftNoTunnel *DestinationRedshiftNoTunnel
+ DestinationRedshiftSSHKeyAuthentication *DestinationRedshiftSSHKeyAuthentication
+ DestinationRedshiftPasswordAuthentication *DestinationRedshiftPasswordAuthentication
Type DestinationRedshiftSSHTunnelMethodType
}
-func CreateDestinationRedshiftSSHTunnelMethodDestinationRedshiftSSHTunnelMethodNoTunnel(destinationRedshiftSSHTunnelMethodNoTunnel DestinationRedshiftSSHTunnelMethodNoTunnel) DestinationRedshiftSSHTunnelMethod {
- typ := DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHTunnelMethodNoTunnel
+func CreateDestinationRedshiftSSHTunnelMethodDestinationRedshiftNoTunnel(destinationRedshiftNoTunnel DestinationRedshiftNoTunnel) DestinationRedshiftSSHTunnelMethod {
+ typ := DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftNoTunnel
return DestinationRedshiftSSHTunnelMethod{
- DestinationRedshiftSSHTunnelMethodNoTunnel: &destinationRedshiftSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationRedshiftNoTunnel: &destinationRedshiftNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationRedshiftSSHTunnelMethodDestinationRedshiftSSHTunnelMethodSSHKeyAuthentication(destinationRedshiftSSHTunnelMethodSSHKeyAuthentication DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication) DestinationRedshiftSSHTunnelMethod {
- typ := DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationRedshiftSSHTunnelMethodDestinationRedshiftSSHKeyAuthentication(destinationRedshiftSSHKeyAuthentication DestinationRedshiftSSHKeyAuthentication) DestinationRedshiftSSHTunnelMethod {
+ typ := DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHKeyAuthentication
return DestinationRedshiftSSHTunnelMethod{
- DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication: &destinationRedshiftSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ DestinationRedshiftSSHKeyAuthentication: &destinationRedshiftSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateDestinationRedshiftSSHTunnelMethodDestinationRedshiftSSHTunnelMethodPasswordAuthentication(destinationRedshiftSSHTunnelMethodPasswordAuthentication DestinationRedshiftSSHTunnelMethodPasswordAuthentication) DestinationRedshiftSSHTunnelMethod {
- typ := DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHTunnelMethodPasswordAuthentication
+func CreateDestinationRedshiftSSHTunnelMethodDestinationRedshiftPasswordAuthentication(destinationRedshiftPasswordAuthentication DestinationRedshiftPasswordAuthentication) DestinationRedshiftSSHTunnelMethod {
+ typ := DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftPasswordAuthentication
return DestinationRedshiftSSHTunnelMethod{
- DestinationRedshiftSSHTunnelMethodPasswordAuthentication: &destinationRedshiftSSHTunnelMethodPasswordAuthentication,
+ DestinationRedshiftPasswordAuthentication: &destinationRedshiftPasswordAuthentication,
Type: typ,
}
}
func (u *DestinationRedshiftSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationRedshiftSSHTunnelMethodNoTunnel := new(DestinationRedshiftSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationRedshiftSSHTunnelMethodNoTunnel = destinationRedshiftSSHTunnelMethodNoTunnel
- u.Type = DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHTunnelMethodNoTunnel
+
+ destinationRedshiftNoTunnel := new(DestinationRedshiftNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftNoTunnel, "", true, true); err == nil {
+ u.DestinationRedshiftNoTunnel = destinationRedshiftNoTunnel
+ u.Type = DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftNoTunnel
return nil
}
- destinationRedshiftSSHTunnelMethodSSHKeyAuthentication := new(DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication = destinationRedshiftSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHTunnelMethodSSHKeyAuthentication
+ destinationRedshiftSSHKeyAuthentication := new(DestinationRedshiftSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationRedshiftSSHKeyAuthentication = destinationRedshiftSSHKeyAuthentication
+ u.Type = DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHKeyAuthentication
return nil
}
- destinationRedshiftSSHTunnelMethodPasswordAuthentication := new(DestinationRedshiftSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationRedshiftSSHTunnelMethodPasswordAuthentication = destinationRedshiftSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftSSHTunnelMethodPasswordAuthentication
+ destinationRedshiftPasswordAuthentication := new(DestinationRedshiftPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftPasswordAuthentication, "", true, true); err == nil {
+ u.DestinationRedshiftPasswordAuthentication = destinationRedshiftPasswordAuthentication
+ u.Type = DestinationRedshiftSSHTunnelMethodTypeDestinationRedshiftPasswordAuthentication
return nil
}
@@ -219,208 +313,284 @@ func (u *DestinationRedshiftSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationRedshiftSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationRedshiftSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationRedshiftSSHTunnelMethodNoTunnel)
+ if u.DestinationRedshiftNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftNoTunnel, "", true)
+ }
+
+ if u.DestinationRedshiftSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftSSHKeyAuthentication, "", true)
}
- if u.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationRedshiftSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationRedshiftPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftPasswordAuthentication, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationRedshiftSchemasMethod string
+
+const (
+ DestinationRedshiftSchemasMethodStandard DestinationRedshiftSchemasMethod = "Standard"
+)
+
+func (e DestinationRedshiftSchemasMethod) ToPointer() *DestinationRedshiftSchemasMethod {
+ return &e
+}
+
+func (e *DestinationRedshiftSchemasMethod) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
}
+ switch v {
+ case "Standard":
+ *e = DestinationRedshiftSchemasMethod(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationRedshiftSchemasMethod: %v", v)
+ }
+}
- if u.DestinationRedshiftSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationRedshiftSSHTunnelMethodPasswordAuthentication)
+// DestinationRedshiftStandard - (not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use S3 uploading.
+type DestinationRedshiftStandard struct {
+ method DestinationRedshiftSchemasMethod `const:"Standard" json:"method"`
+}
+
+func (d DestinationRedshiftStandard) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftStandard) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
+}
- return nil, nil
+func (o *DestinationRedshiftStandard) GetMethod() DestinationRedshiftSchemasMethod {
+ return DestinationRedshiftSchemasMethodStandard
}
-type DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType string
+type DestinationRedshiftSchemasEncryptionType string
const (
- DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionTypeAesCbcEnvelope DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType = "aes_cbc_envelope"
+ DestinationRedshiftSchemasEncryptionTypeAesCbcEnvelope DestinationRedshiftSchemasEncryptionType = "aes_cbc_envelope"
)
-func (e DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType) ToPointer() *DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType {
+func (e DestinationRedshiftSchemasEncryptionType) ToPointer() *DestinationRedshiftSchemasEncryptionType {
return &e
}
-func (e *DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftSchemasEncryptionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "aes_cbc_envelope":
- *e = DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType(v)
+ *e = DestinationRedshiftSchemasEncryptionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftSchemasEncryptionType: %v", v)
}
}
-// DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption - Staging data will be encrypted using AES-CBC envelope encryption.
-type DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption struct {
- EncryptionType DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType `json:"encryption_type"`
+// DestinationRedshiftAESCBCEnvelopeEncryption - Staging data will be encrypted using AES-CBC envelope encryption.
+type DestinationRedshiftAESCBCEnvelopeEncryption struct {
+ encryptionType *DestinationRedshiftSchemasEncryptionType `const:"aes_cbc_envelope" json:"encryption_type"`
// The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.
KeyEncryptingKey *string `json:"key_encrypting_key,omitempty"`
}
-type DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType string
+func (d DestinationRedshiftAESCBCEnvelopeEncryption) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftAESCBCEnvelopeEncryption) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedshiftAESCBCEnvelopeEncryption) GetEncryptionType() *DestinationRedshiftSchemasEncryptionType {
+ return DestinationRedshiftSchemasEncryptionTypeAesCbcEnvelope.ToPointer()
+}
+
+func (o *DestinationRedshiftAESCBCEnvelopeEncryption) GetKeyEncryptingKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.KeyEncryptingKey
+}
+
+type DestinationRedshiftEncryptionType string
const (
- DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionTypeNone DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType = "none"
+ DestinationRedshiftEncryptionTypeNone DestinationRedshiftEncryptionType = "none"
)
-func (e DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType) ToPointer() *DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType {
+func (e DestinationRedshiftEncryptionType) ToPointer() *DestinationRedshiftEncryptionType {
return &e
}
-func (e *DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftEncryptionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "none":
- *e = DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType(v)
+ *e = DestinationRedshiftEncryptionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftEncryptionType: %v", v)
+ }
+}
+
+// DestinationRedshiftNoEncryption - Staging data will be stored in plaintext.
+type DestinationRedshiftNoEncryption struct {
+ encryptionType *DestinationRedshiftEncryptionType `const:"none" json:"encryption_type"`
+}
+
+func (d DestinationRedshiftNoEncryption) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftNoEncryption) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption - Staging data will be stored in plaintext.
-type DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption struct {
- EncryptionType DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType `json:"encryption_type"`
+func (o *DestinationRedshiftNoEncryption) GetEncryptionType() *DestinationRedshiftEncryptionType {
+ return DestinationRedshiftEncryptionTypeNone.ToPointer()
}
-type DestinationRedshiftUploadingMethodS3StagingEncryptionType string
+type DestinationRedshiftEncryptionUnionType string
const (
- DestinationRedshiftUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption DestinationRedshiftUploadingMethodS3StagingEncryptionType = "destination-redshift_Uploading Method_S3 Staging_Encryption_No encryption"
- DestinationRedshiftUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption DestinationRedshiftUploadingMethodS3StagingEncryptionType = "destination-redshift_Uploading Method_S3 Staging_Encryption_AES-CBC envelope encryption"
+ DestinationRedshiftEncryptionUnionTypeDestinationRedshiftNoEncryption DestinationRedshiftEncryptionUnionType = "destination-redshift_No encryption"
+ DestinationRedshiftEncryptionUnionTypeDestinationRedshiftAESCBCEnvelopeEncryption DestinationRedshiftEncryptionUnionType = "destination-redshift_AES-CBC envelope encryption"
)
-type DestinationRedshiftUploadingMethodS3StagingEncryption struct {
- DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption *DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption
- DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption *DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
+type DestinationRedshiftEncryption struct {
+ DestinationRedshiftNoEncryption *DestinationRedshiftNoEncryption
+ DestinationRedshiftAESCBCEnvelopeEncryption *DestinationRedshiftAESCBCEnvelopeEncryption
- Type DestinationRedshiftUploadingMethodS3StagingEncryptionType
+ Type DestinationRedshiftEncryptionUnionType
}
-func CreateDestinationRedshiftUploadingMethodS3StagingEncryptionDestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption(destinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption) DestinationRedshiftUploadingMethodS3StagingEncryption {
- typ := DestinationRedshiftUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption
+func CreateDestinationRedshiftEncryptionDestinationRedshiftNoEncryption(destinationRedshiftNoEncryption DestinationRedshiftNoEncryption) DestinationRedshiftEncryption {
+ typ := DestinationRedshiftEncryptionUnionTypeDestinationRedshiftNoEncryption
- return DestinationRedshiftUploadingMethodS3StagingEncryption{
- DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption: &destinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption,
- Type: typ,
+ return DestinationRedshiftEncryption{
+ DestinationRedshiftNoEncryption: &destinationRedshiftNoEncryption,
+ Type: typ,
}
}
-func CreateDestinationRedshiftUploadingMethodS3StagingEncryptionDestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption(destinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption) DestinationRedshiftUploadingMethodS3StagingEncryption {
- typ := DestinationRedshiftUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
+func CreateDestinationRedshiftEncryptionDestinationRedshiftAESCBCEnvelopeEncryption(destinationRedshiftAESCBCEnvelopeEncryption DestinationRedshiftAESCBCEnvelopeEncryption) DestinationRedshiftEncryption {
+ typ := DestinationRedshiftEncryptionUnionTypeDestinationRedshiftAESCBCEnvelopeEncryption
- return DestinationRedshiftUploadingMethodS3StagingEncryption{
- DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption: &destinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption,
+ return DestinationRedshiftEncryption{
+ DestinationRedshiftAESCBCEnvelopeEncryption: &destinationRedshiftAESCBCEnvelopeEncryption,
Type: typ,
}
}
-func (u *DestinationRedshiftUploadingMethodS3StagingEncryption) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationRedshiftEncryption) UnmarshalJSON(data []byte) error {
- destinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption := new(DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption); err == nil {
- u.DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption = destinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption
- u.Type = DestinationRedshiftUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption
+ destinationRedshiftNoEncryption := new(DestinationRedshiftNoEncryption)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftNoEncryption, "", true, true); err == nil {
+ u.DestinationRedshiftNoEncryption = destinationRedshiftNoEncryption
+ u.Type = DestinationRedshiftEncryptionUnionTypeDestinationRedshiftNoEncryption
return nil
}
- destinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption := new(DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption); err == nil {
- u.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption = destinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
- u.Type = DestinationRedshiftUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
+ destinationRedshiftAESCBCEnvelopeEncryption := new(DestinationRedshiftAESCBCEnvelopeEncryption)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftAESCBCEnvelopeEncryption, "", true, true); err == nil {
+ u.DestinationRedshiftAESCBCEnvelopeEncryption = destinationRedshiftAESCBCEnvelopeEncryption
+ u.Type = DestinationRedshiftEncryptionUnionTypeDestinationRedshiftAESCBCEnvelopeEncryption
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationRedshiftUploadingMethodS3StagingEncryption) MarshalJSON() ([]byte, error) {
- if u.DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption != nil {
- return json.Marshal(u.DestinationRedshiftUploadingMethodS3StagingEncryptionNoEncryption)
+func (u DestinationRedshiftEncryption) MarshalJSON() ([]byte, error) {
+ if u.DestinationRedshiftNoEncryption != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftNoEncryption, "", true)
}
- if u.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption != nil {
- return json.Marshal(u.DestinationRedshiftUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption)
+ if u.DestinationRedshiftAESCBCEnvelopeEncryption != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftAESCBCEnvelopeEncryption, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationRedshiftUploadingMethodS3StagingMethod string
+type DestinationRedshiftMethod string
const (
- DestinationRedshiftUploadingMethodS3StagingMethodS3Staging DestinationRedshiftUploadingMethodS3StagingMethod = "S3 Staging"
+ DestinationRedshiftMethodS3Staging DestinationRedshiftMethod = "S3 Staging"
)
-func (e DestinationRedshiftUploadingMethodS3StagingMethod) ToPointer() *DestinationRedshiftUploadingMethodS3StagingMethod {
+func (e DestinationRedshiftMethod) ToPointer() *DestinationRedshiftMethod {
return &e
}
-func (e *DestinationRedshiftUploadingMethodS3StagingMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "S3 Staging":
- *e = DestinationRedshiftUploadingMethodS3StagingMethod(v)
+ *e = DestinationRedshiftMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUploadingMethodS3StagingMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftMethod: %v", v)
}
}
-// DestinationRedshiftUploadingMethodS3StagingS3BucketRegion - The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.
-type DestinationRedshiftUploadingMethodS3StagingS3BucketRegion string
+// DestinationRedshiftS3BucketRegion - The region of the S3 staging bucket.
+type DestinationRedshiftS3BucketRegion string
const (
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionUnknown DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = ""
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionUsEast1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "us-east-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionUsEast2 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "us-east-2"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionUsWest1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "us-west-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionUsWest2 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "us-west-2"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionAfSouth1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "af-south-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionApEast1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "ap-east-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionApSouth1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "ap-south-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionApNortheast1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "ap-northeast-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionApNortheast2 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "ap-northeast-2"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionApNortheast3 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "ap-northeast-3"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionApSoutheast1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "ap-southeast-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionApSoutheast2 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "ap-southeast-2"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionCaCentral1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "ca-central-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionCnNorth1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "cn-north-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionCnNorthwest1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "cn-northwest-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionEuCentral1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "eu-central-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionEuNorth1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "eu-north-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionEuSouth1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "eu-south-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionEuWest1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "eu-west-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionEuWest2 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "eu-west-2"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionEuWest3 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "eu-west-3"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionSaEast1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "sa-east-1"
- DestinationRedshiftUploadingMethodS3StagingS3BucketRegionMeSouth1 DestinationRedshiftUploadingMethodS3StagingS3BucketRegion = "me-south-1"
+ DestinationRedshiftS3BucketRegionUnknown DestinationRedshiftS3BucketRegion = ""
+ DestinationRedshiftS3BucketRegionUsEast1 DestinationRedshiftS3BucketRegion = "us-east-1"
+ DestinationRedshiftS3BucketRegionUsEast2 DestinationRedshiftS3BucketRegion = "us-east-2"
+ DestinationRedshiftS3BucketRegionUsWest1 DestinationRedshiftS3BucketRegion = "us-west-1"
+ DestinationRedshiftS3BucketRegionUsWest2 DestinationRedshiftS3BucketRegion = "us-west-2"
+ DestinationRedshiftS3BucketRegionAfSouth1 DestinationRedshiftS3BucketRegion = "af-south-1"
+ DestinationRedshiftS3BucketRegionApEast1 DestinationRedshiftS3BucketRegion = "ap-east-1"
+ DestinationRedshiftS3BucketRegionApSouth1 DestinationRedshiftS3BucketRegion = "ap-south-1"
+ DestinationRedshiftS3BucketRegionApNortheast1 DestinationRedshiftS3BucketRegion = "ap-northeast-1"
+ DestinationRedshiftS3BucketRegionApNortheast2 DestinationRedshiftS3BucketRegion = "ap-northeast-2"
+ DestinationRedshiftS3BucketRegionApNortheast3 DestinationRedshiftS3BucketRegion = "ap-northeast-3"
+ DestinationRedshiftS3BucketRegionApSoutheast1 DestinationRedshiftS3BucketRegion = "ap-southeast-1"
+ DestinationRedshiftS3BucketRegionApSoutheast2 DestinationRedshiftS3BucketRegion = "ap-southeast-2"
+ DestinationRedshiftS3BucketRegionCaCentral1 DestinationRedshiftS3BucketRegion = "ca-central-1"
+ DestinationRedshiftS3BucketRegionCnNorth1 DestinationRedshiftS3BucketRegion = "cn-north-1"
+ DestinationRedshiftS3BucketRegionCnNorthwest1 DestinationRedshiftS3BucketRegion = "cn-northwest-1"
+ DestinationRedshiftS3BucketRegionEuCentral1 DestinationRedshiftS3BucketRegion = "eu-central-1"
+ DestinationRedshiftS3BucketRegionEuNorth1 DestinationRedshiftS3BucketRegion = "eu-north-1"
+ DestinationRedshiftS3BucketRegionEuSouth1 DestinationRedshiftS3BucketRegion = "eu-south-1"
+ DestinationRedshiftS3BucketRegionEuWest1 DestinationRedshiftS3BucketRegion = "eu-west-1"
+ DestinationRedshiftS3BucketRegionEuWest2 DestinationRedshiftS3BucketRegion = "eu-west-2"
+ DestinationRedshiftS3BucketRegionEuWest3 DestinationRedshiftS3BucketRegion = "eu-west-3"
+ DestinationRedshiftS3BucketRegionSaEast1 DestinationRedshiftS3BucketRegion = "sa-east-1"
+ DestinationRedshiftS3BucketRegionMeSouth1 DestinationRedshiftS3BucketRegion = "me-south-1"
)
-func (e DestinationRedshiftUploadingMethodS3StagingS3BucketRegion) ToPointer() *DestinationRedshiftUploadingMethodS3StagingS3BucketRegion {
+func (e DestinationRedshiftS3BucketRegion) ToPointer() *DestinationRedshiftS3BucketRegion {
return &e
}
-func (e *DestinationRedshiftUploadingMethodS3StagingS3BucketRegion) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftS3BucketRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -473,115 +643,159 @@ func (e *DestinationRedshiftUploadingMethodS3StagingS3BucketRegion) UnmarshalJSO
case "sa-east-1":
fallthrough
case "me-south-1":
- *e = DestinationRedshiftUploadingMethodS3StagingS3BucketRegion(v)
+ *e = DestinationRedshiftS3BucketRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUploadingMethodS3StagingS3BucketRegion: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftS3BucketRegion: %v", v)
}
}
-// DestinationRedshiftUploadingMethodS3Staging - The method how the data will be uploaded to the database.
-type DestinationRedshiftUploadingMethodS3Staging struct {
+// DestinationRedshiftS3Staging - (recommended) Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.
+type DestinationRedshiftS3Staging struct {
// This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.
AccessKeyID string `json:"access_key_id"`
// How to encrypt the staging data
- Encryption *DestinationRedshiftUploadingMethodS3StagingEncryption `json:"encryption,omitempty"`
+ Encryption *DestinationRedshiftEncryption `json:"encryption,omitempty"`
// Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
- FileBufferCount *int64 `json:"file_buffer_count,omitempty"`
+ FileBufferCount *int64 `default:"10" json:"file_buffer_count"`
// The pattern allows you to set the file-name format for the S3 staging file(s)
- FileNamePattern *string `json:"file_name_pattern,omitempty"`
- Method DestinationRedshiftUploadingMethodS3StagingMethod `json:"method"`
+ FileNamePattern *string `json:"file_name_pattern,omitempty"`
+ method DestinationRedshiftMethod `const:"S3 Staging" json:"method"`
// Whether to delete the staging files from S3 after completing the sync. See docs for details.
- PurgeStagingData *bool `json:"purge_staging_data,omitempty"`
- // The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.
+ PurgeStagingData *bool `default:"true" json:"purge_staging_data"`
+ // The name of the staging S3 bucket.
S3BucketName string `json:"s3_bucket_name"`
// The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.
S3BucketPath *string `json:"s3_bucket_path,omitempty"`
- // The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.
- S3BucketRegion DestinationRedshiftUploadingMethodS3StagingS3BucketRegion `json:"s3_bucket_region"`
+ // The region of the S3 staging bucket.
+ S3BucketRegion *DestinationRedshiftS3BucketRegion `default:"" json:"s3_bucket_region"`
// The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.
SecretAccessKey string `json:"secret_access_key"`
}
-type DestinationRedshiftUploadingMethodStandardMethod string
+func (d DestinationRedshiftS3Staging) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
-const (
- DestinationRedshiftUploadingMethodStandardMethodStandard DestinationRedshiftUploadingMethodStandardMethod = "Standard"
-)
+func (d *DestinationRedshiftS3Staging) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
-func (e DestinationRedshiftUploadingMethodStandardMethod) ToPointer() *DestinationRedshiftUploadingMethodStandardMethod {
- return &e
+func (o *DestinationRedshiftS3Staging) GetAccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKeyID
}
-func (e *DestinationRedshiftUploadingMethodStandardMethod) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
+func (o *DestinationRedshiftS3Staging) GetEncryption() *DestinationRedshiftEncryption {
+ if o == nil {
+ return nil
}
- switch v {
- case "Standard":
- *e = DestinationRedshiftUploadingMethodStandardMethod(v)
+ return o.Encryption
+}
+
+func (o *DestinationRedshiftS3Staging) GetFileBufferCount() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.FileBufferCount
+}
+
+func (o *DestinationRedshiftS3Staging) GetFileNamePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileNamePattern
+}
+
+func (o *DestinationRedshiftS3Staging) GetMethod() DestinationRedshiftMethod {
+ return DestinationRedshiftMethodS3Staging
+}
+
+func (o *DestinationRedshiftS3Staging) GetPurgeStagingData() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.PurgeStagingData
+}
+
+func (o *DestinationRedshiftS3Staging) GetS3BucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketName
+}
+
+func (o *DestinationRedshiftS3Staging) GetS3BucketPath() *string {
+ if o == nil {
+ return nil
+ }
+ return o.S3BucketPath
+}
+
+func (o *DestinationRedshiftS3Staging) GetS3BucketRegion() *DestinationRedshiftS3BucketRegion {
+ if o == nil {
return nil
- default:
- return fmt.Errorf("invalid value for DestinationRedshiftUploadingMethodStandardMethod: %v", v)
}
+ return o.S3BucketRegion
}
-// DestinationRedshiftUploadingMethodStandard - The method how the data will be uploaded to the database.
-type DestinationRedshiftUploadingMethodStandard struct {
- Method DestinationRedshiftUploadingMethodStandardMethod `json:"method"`
+func (o *DestinationRedshiftS3Staging) GetSecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretAccessKey
}
type DestinationRedshiftUploadingMethodType string
const (
- DestinationRedshiftUploadingMethodTypeDestinationRedshiftUploadingMethodStandard DestinationRedshiftUploadingMethodType = "destination-redshift_Uploading Method_Standard"
- DestinationRedshiftUploadingMethodTypeDestinationRedshiftUploadingMethodS3Staging DestinationRedshiftUploadingMethodType = "destination-redshift_Uploading Method_S3 Staging"
+ DestinationRedshiftUploadingMethodTypeDestinationRedshiftS3Staging DestinationRedshiftUploadingMethodType = "destination-redshift_S3 Staging"
+ DestinationRedshiftUploadingMethodTypeDestinationRedshiftStandard DestinationRedshiftUploadingMethodType = "destination-redshift_Standard"
)
type DestinationRedshiftUploadingMethod struct {
- DestinationRedshiftUploadingMethodStandard *DestinationRedshiftUploadingMethodStandard
- DestinationRedshiftUploadingMethodS3Staging *DestinationRedshiftUploadingMethodS3Staging
+ DestinationRedshiftS3Staging *DestinationRedshiftS3Staging
+ DestinationRedshiftStandard *DestinationRedshiftStandard
Type DestinationRedshiftUploadingMethodType
}
-func CreateDestinationRedshiftUploadingMethodDestinationRedshiftUploadingMethodStandard(destinationRedshiftUploadingMethodStandard DestinationRedshiftUploadingMethodStandard) DestinationRedshiftUploadingMethod {
- typ := DestinationRedshiftUploadingMethodTypeDestinationRedshiftUploadingMethodStandard
+func CreateDestinationRedshiftUploadingMethodDestinationRedshiftS3Staging(destinationRedshiftS3Staging DestinationRedshiftS3Staging) DestinationRedshiftUploadingMethod {
+ typ := DestinationRedshiftUploadingMethodTypeDestinationRedshiftS3Staging
return DestinationRedshiftUploadingMethod{
- DestinationRedshiftUploadingMethodStandard: &destinationRedshiftUploadingMethodStandard,
- Type: typ,
+ DestinationRedshiftS3Staging: &destinationRedshiftS3Staging,
+ Type: typ,
}
}
-func CreateDestinationRedshiftUploadingMethodDestinationRedshiftUploadingMethodS3Staging(destinationRedshiftUploadingMethodS3Staging DestinationRedshiftUploadingMethodS3Staging) DestinationRedshiftUploadingMethod {
- typ := DestinationRedshiftUploadingMethodTypeDestinationRedshiftUploadingMethodS3Staging
+func CreateDestinationRedshiftUploadingMethodDestinationRedshiftStandard(destinationRedshiftStandard DestinationRedshiftStandard) DestinationRedshiftUploadingMethod {
+ typ := DestinationRedshiftUploadingMethodTypeDestinationRedshiftStandard
return DestinationRedshiftUploadingMethod{
- DestinationRedshiftUploadingMethodS3Staging: &destinationRedshiftUploadingMethodS3Staging,
- Type: typ,
+ DestinationRedshiftStandard: &destinationRedshiftStandard,
+ Type: typ,
}
}
func (u *DestinationRedshiftUploadingMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationRedshiftUploadingMethodStandard := new(DestinationRedshiftUploadingMethodStandard)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUploadingMethodStandard); err == nil {
- u.DestinationRedshiftUploadingMethodStandard = destinationRedshiftUploadingMethodStandard
- u.Type = DestinationRedshiftUploadingMethodTypeDestinationRedshiftUploadingMethodStandard
+
+ destinationRedshiftStandard := new(DestinationRedshiftStandard)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftStandard, "", true, true); err == nil {
+ u.DestinationRedshiftStandard = destinationRedshiftStandard
+ u.Type = DestinationRedshiftUploadingMethodTypeDestinationRedshiftStandard
return nil
}
- destinationRedshiftUploadingMethodS3Staging := new(DestinationRedshiftUploadingMethodS3Staging)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUploadingMethodS3Staging); err == nil {
- u.DestinationRedshiftUploadingMethodS3Staging = destinationRedshiftUploadingMethodS3Staging
- u.Type = DestinationRedshiftUploadingMethodTypeDestinationRedshiftUploadingMethodS3Staging
+ destinationRedshiftS3Staging := new(DestinationRedshiftS3Staging)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftS3Staging, "", true, true); err == nil {
+ u.DestinationRedshiftS3Staging = destinationRedshiftS3Staging
+ u.Type = DestinationRedshiftUploadingMethodTypeDestinationRedshiftS3Staging
return nil
}
@@ -589,21 +803,21 @@ func (u *DestinationRedshiftUploadingMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationRedshiftUploadingMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationRedshiftUploadingMethodStandard != nil {
- return json.Marshal(u.DestinationRedshiftUploadingMethodStandard)
+ if u.DestinationRedshiftS3Staging != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftS3Staging, "", true)
}
- if u.DestinationRedshiftUploadingMethodS3Staging != nil {
- return json.Marshal(u.DestinationRedshiftUploadingMethodS3Staging)
+ if u.DestinationRedshiftStandard != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftStandard, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationRedshift struct {
// Name of the database.
- Database string `json:"database"`
- DestinationType DestinationRedshiftRedshift `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Redshift `const:"redshift" json:"destinationType"`
// Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)
Host string `json:"host"`
// Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
@@ -611,13 +825,91 @@ type DestinationRedshift struct {
// Password associated with the username.
Password string `json:"password"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5439" json:"port"`
// The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public".
- Schema string `json:"schema"`
+ Schema *string `default:"public" json:"schema"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *DestinationRedshiftSSHTunnelMethod `json:"tunnel_method,omitempty"`
- // The method how the data will be uploaded to the database.
+ // The way data will be uploaded to Redshift.
UploadingMethod *DestinationRedshiftUploadingMethod `json:"uploading_method,omitempty"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationRedshift) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshift) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedshift) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationRedshift) GetDestinationType() Redshift {
+ return RedshiftRedshift
+}
+
+func (o *DestinationRedshift) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationRedshift) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationRedshift) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationRedshift) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationRedshift) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *DestinationRedshift) GetTunnelMethod() *DestinationRedshiftSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationRedshift) GetUploadingMethod() *DestinationRedshiftUploadingMethod {
+ if o == nil {
+ return nil
+ }
+ return o.UploadingMethod
+}
+
+func (o *DestinationRedshift) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationredshiftcreaterequest.go b/internal/sdk/pkg/models/shared/destinationredshiftcreaterequest.go
old mode 100755
new mode 100644
index fd9384c73..6c636c19a
--- a/internal/sdk/pkg/models/shared/destinationredshiftcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationredshiftcreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationRedshiftCreateRequest struct {
Configuration DestinationRedshift `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationRedshiftCreateRequest) GetConfiguration() DestinationRedshift {
+ if o == nil {
+ return DestinationRedshift{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationRedshiftCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationRedshiftCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationRedshiftCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationredshiftputrequest.go b/internal/sdk/pkg/models/shared/destinationredshiftputrequest.go
old mode 100755
new mode 100644
index 263693349..32846e065
--- a/internal/sdk/pkg/models/shared/destinationredshiftputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationredshiftputrequest.go
@@ -7,3 +7,24 @@ type DestinationRedshiftPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationRedshiftPutRequest) GetConfiguration() DestinationRedshiftUpdate {
+ if o == nil {
+ return DestinationRedshiftUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationRedshiftPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationRedshiftPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationredshiftupdate.go b/internal/sdk/pkg/models/shared/destinationredshiftupdate.go
old mode 100755
new mode 100644
index c5c2f5186..dcccae4a5
--- a/internal/sdk/pkg/models/shared/destinationredshiftupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationredshiftupdate.go
@@ -3,191 +3,285 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod string
const (
- DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication struct {
+// DestinationRedshiftUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedshiftUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationRedshiftUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedshiftUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationRedshiftUpdatePasswordAuthentication) GetTunnelMethod() DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethod {
+ return DestinationRedshiftUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationRedshiftUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationRedshiftUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationRedshiftUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationRedshiftUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationRedshiftUpdateSchemasTunnelMethod string
const (
- DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationRedshiftUpdateSchemasTunnelMethodSSHKeyAuth DestinationRedshiftUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationRedshiftUpdateSchemasTunnelMethod) ToPointer() *DestinationRedshiftUpdateSchemasTunnelMethod {
return &e
}
-func (e *DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationRedshiftUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftUpdateSchemasTunnelMethod: %v", v)
}
}
-// DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationRedshiftUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedshiftUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedshiftUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationRedshiftUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedshiftUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationRedshiftUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationRedshiftUpdateSSHKeyAuthentication) GetTunnelMethod() DestinationRedshiftUpdateSchemasTunnelMethod {
+ return DestinationRedshiftUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationRedshiftUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationRedshiftUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationRedshiftUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationRedshiftUpdateTunnelMethod string
const (
- DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationRedshiftUpdateTunnelMethodNoTunnel DestinationRedshiftUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationRedshiftUpdateTunnelMethod) ToPointer() *DestinationRedshiftUpdateTunnelMethod {
return &e
}
-func (e *DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationRedshiftUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftUpdateTunnelMethod: %v", v)
}
}
-// DestinationRedshiftUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationRedshiftUpdateSSHTunnelMethodNoTunnel struct {
+// DestinationRedshiftUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationRedshiftUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationRedshiftUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationRedshiftUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationRedshiftUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedshiftUpdateNoTunnel) GetTunnelMethod() DestinationRedshiftUpdateTunnelMethod {
+ return DestinationRedshiftUpdateTunnelMethodNoTunnel
}
type DestinationRedshiftUpdateSSHTunnelMethodType string
const (
- DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHTunnelMethodNoTunnel DestinationRedshiftUpdateSSHTunnelMethodType = "destination-redshift-update_SSH Tunnel Method_No Tunnel"
- DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication DestinationRedshiftUpdateSSHTunnelMethodType = "destination-redshift-update_SSH Tunnel Method_SSH Key Authentication"
- DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication DestinationRedshiftUpdateSSHTunnelMethodType = "destination-redshift-update_SSH Tunnel Method_Password Authentication"
+ DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateNoTunnel DestinationRedshiftUpdateSSHTunnelMethodType = "destination-redshift-update_No Tunnel"
+ DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHKeyAuthentication DestinationRedshiftUpdateSSHTunnelMethodType = "destination-redshift-update_SSH Key Authentication"
+ DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdatePasswordAuthentication DestinationRedshiftUpdateSSHTunnelMethodType = "destination-redshift-update_Password Authentication"
)
type DestinationRedshiftUpdateSSHTunnelMethod struct {
- DestinationRedshiftUpdateSSHTunnelMethodNoTunnel *DestinationRedshiftUpdateSSHTunnelMethodNoTunnel
- DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication
- DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication *DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication
+ DestinationRedshiftUpdateNoTunnel *DestinationRedshiftUpdateNoTunnel
+ DestinationRedshiftUpdateSSHKeyAuthentication *DestinationRedshiftUpdateSSHKeyAuthentication
+ DestinationRedshiftUpdatePasswordAuthentication *DestinationRedshiftUpdatePasswordAuthentication
Type DestinationRedshiftUpdateSSHTunnelMethodType
}
-func CreateDestinationRedshiftUpdateSSHTunnelMethodDestinationRedshiftUpdateSSHTunnelMethodNoTunnel(destinationRedshiftUpdateSSHTunnelMethodNoTunnel DestinationRedshiftUpdateSSHTunnelMethodNoTunnel) DestinationRedshiftUpdateSSHTunnelMethod {
- typ := DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHTunnelMethodNoTunnel
+func CreateDestinationRedshiftUpdateSSHTunnelMethodDestinationRedshiftUpdateNoTunnel(destinationRedshiftUpdateNoTunnel DestinationRedshiftUpdateNoTunnel) DestinationRedshiftUpdateSSHTunnelMethod {
+ typ := DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateNoTunnel
return DestinationRedshiftUpdateSSHTunnelMethod{
- DestinationRedshiftUpdateSSHTunnelMethodNoTunnel: &destinationRedshiftUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationRedshiftUpdateNoTunnel: &destinationRedshiftUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationRedshiftUpdateSSHTunnelMethodDestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication(destinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication) DestinationRedshiftUpdateSSHTunnelMethod {
- typ := DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationRedshiftUpdateSSHTunnelMethodDestinationRedshiftUpdateSSHKeyAuthentication(destinationRedshiftUpdateSSHKeyAuthentication DestinationRedshiftUpdateSSHKeyAuthentication) DestinationRedshiftUpdateSSHTunnelMethod {
+ typ := DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHKeyAuthentication
return DestinationRedshiftUpdateSSHTunnelMethod{
- DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication: &destinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationRedshiftUpdateSSHKeyAuthentication: &destinationRedshiftUpdateSSHKeyAuthentication,
Type: typ,
}
}
-func CreateDestinationRedshiftUpdateSSHTunnelMethodDestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication(destinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication) DestinationRedshiftUpdateSSHTunnelMethod {
- typ := DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication
+func CreateDestinationRedshiftUpdateSSHTunnelMethodDestinationRedshiftUpdatePasswordAuthentication(destinationRedshiftUpdatePasswordAuthentication DestinationRedshiftUpdatePasswordAuthentication) DestinationRedshiftUpdateSSHTunnelMethod {
+ typ := DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdatePasswordAuthentication
return DestinationRedshiftUpdateSSHTunnelMethod{
- DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication: &destinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationRedshiftUpdatePasswordAuthentication: &destinationRedshiftUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *DestinationRedshiftUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationRedshiftUpdateSSHTunnelMethodNoTunnel := new(DestinationRedshiftUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationRedshiftUpdateSSHTunnelMethodNoTunnel = destinationRedshiftUpdateSSHTunnelMethodNoTunnel
- u.Type = DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHTunnelMethodNoTunnel
+
+ destinationRedshiftUpdateNoTunnel := new(DestinationRedshiftUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftUpdateNoTunnel, "", true, true); err == nil {
+ u.DestinationRedshiftUpdateNoTunnel = destinationRedshiftUpdateNoTunnel
+ u.Type = DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateNoTunnel
return nil
}
- destinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication := new(DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication = destinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication
+ destinationRedshiftUpdateSSHKeyAuthentication := new(DestinationRedshiftUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationRedshiftUpdateSSHKeyAuthentication = destinationRedshiftUpdateSSHKeyAuthentication
+ u.Type = DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHKeyAuthentication
return nil
}
- destinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication := new(DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication = destinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication
+ destinationRedshiftUpdatePasswordAuthentication := new(DestinationRedshiftUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationRedshiftUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationRedshiftUpdatePasswordAuthentication = destinationRedshiftUpdatePasswordAuthentication
+ u.Type = DestinationRedshiftUpdateSSHTunnelMethodTypeDestinationRedshiftUpdatePasswordAuthentication
return nil
}
@@ -195,208 +289,284 @@ func (u *DestinationRedshiftUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) er
}
func (u DestinationRedshiftUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationRedshiftUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationRedshiftUpdateSSHTunnelMethodNoTunnel)
+ if u.DestinationRedshiftUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftUpdateNoTunnel, "", true)
}
- if u.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationRedshiftUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationRedshiftUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftUpdateSSHKeyAuthentication, "", true)
}
- if u.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationRedshiftUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationRedshiftUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationRedshiftUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType string
+type DestinationRedshiftUpdateSchemasMethod string
const (
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionTypeAesCbcEnvelope DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType = "aes_cbc_envelope"
+ DestinationRedshiftUpdateSchemasMethodStandard DestinationRedshiftUpdateSchemasMethod = "Standard"
)
-func (e DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType) ToPointer() *DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType {
+func (e DestinationRedshiftUpdateSchemasMethod) ToPointer() *DestinationRedshiftUpdateSchemasMethod {
return &e
}
-func (e *DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftUpdateSchemasMethod) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Standard":
+ *e = DestinationRedshiftUpdateSchemasMethod(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationRedshiftUpdateSchemasMethod: %v", v)
+ }
+}
+
+// Standard - (not recommended) Direct loading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In all other cases, you should use S3 uploading.
+type Standard struct {
+ method DestinationRedshiftUpdateSchemasMethod `const:"Standard" json:"method"`
+}
+
+func (s Standard) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *Standard) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Standard) GetMethod() DestinationRedshiftUpdateSchemasMethod {
+ return DestinationRedshiftUpdateSchemasMethodStandard
+}
+
+type DestinationRedshiftUpdateEncryptionType string
+
+const (
+ DestinationRedshiftUpdateEncryptionTypeAesCbcEnvelope DestinationRedshiftUpdateEncryptionType = "aes_cbc_envelope"
+)
+
+func (e DestinationRedshiftUpdateEncryptionType) ToPointer() *DestinationRedshiftUpdateEncryptionType {
+ return &e
+}
+
+func (e *DestinationRedshiftUpdateEncryptionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "aes_cbc_envelope":
- *e = DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType(v)
+ *e = DestinationRedshiftUpdateEncryptionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftUpdateEncryptionType: %v", v)
}
}
-// DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption - Staging data will be encrypted using AES-CBC envelope encryption.
-type DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption struct {
- EncryptionType DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryptionEncryptionType `json:"encryption_type"`
+// AESCBCEnvelopeEncryption - Staging data will be encrypted using AES-CBC envelope encryption.
+type AESCBCEnvelopeEncryption struct {
+ encryptionType *DestinationRedshiftUpdateEncryptionType `const:"aes_cbc_envelope" json:"encryption_type"`
// The key, base64-encoded. Must be either 128, 192, or 256 bits. Leave blank to have Airbyte generate an ephemeral key for each sync.
KeyEncryptingKey *string `json:"key_encrypting_key,omitempty"`
}
-type DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType string
+func (a AESCBCEnvelopeEncryption) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AESCBCEnvelopeEncryption) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AESCBCEnvelopeEncryption) GetEncryptionType() *DestinationRedshiftUpdateEncryptionType {
+ return DestinationRedshiftUpdateEncryptionTypeAesCbcEnvelope.ToPointer()
+}
+
+func (o *AESCBCEnvelopeEncryption) GetKeyEncryptingKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.KeyEncryptingKey
+}
+
+type EncryptionType string
const (
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionTypeNone DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType = "none"
+ EncryptionTypeNone EncryptionType = "none"
)
-func (e DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType) ToPointer() *DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType {
+func (e EncryptionType) ToPointer() *EncryptionType {
return &e
}
-func (e *DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType) UnmarshalJSON(data []byte) error {
+func (e *EncryptionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "none":
- *e = DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType(v)
+ *e = EncryptionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType: %v", v)
+ return fmt.Errorf("invalid value for EncryptionType: %v", v)
+ }
+}
+
+// NoEncryption - Staging data will be stored in plaintext.
+type NoEncryption struct {
+ encryptionType *EncryptionType `const:"none" json:"encryption_type"`
+}
+
+func (n NoEncryption) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
+}
+
+func (n *NoEncryption) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption - Staging data will be stored in plaintext.
-type DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption struct {
- EncryptionType DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryptionEncryptionType `json:"encryption_type"`
+func (o *NoEncryption) GetEncryptionType() *EncryptionType {
+ return EncryptionTypeNone.ToPointer()
}
-type DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionType string
+type DestinationRedshiftUpdateEncryptionUnionType string
const (
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionType = "destination-redshift-update_Uploading Method_S3 Staging_Encryption_No encryption"
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionType = "destination-redshift-update_Uploading Method_S3 Staging_Encryption_AES-CBC envelope encryption"
+ DestinationRedshiftUpdateEncryptionUnionTypeNoEncryption DestinationRedshiftUpdateEncryptionUnionType = "No encryption"
+ DestinationRedshiftUpdateEncryptionUnionTypeAESCBCEnvelopeEncryption DestinationRedshiftUpdateEncryptionUnionType = "AES-CBC envelope encryption"
)
-type DestinationRedshiftUpdateUploadingMethodS3StagingEncryption struct {
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption *DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption *DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
+type DestinationRedshiftUpdateEncryption struct {
+ NoEncryption *NoEncryption
+ AESCBCEnvelopeEncryption *AESCBCEnvelopeEncryption
- Type DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionType
+ Type DestinationRedshiftUpdateEncryptionUnionType
}
-func CreateDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption(destinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption) DestinationRedshiftUpdateUploadingMethodS3StagingEncryption {
- typ := DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption
+func CreateDestinationRedshiftUpdateEncryptionNoEncryption(noEncryption NoEncryption) DestinationRedshiftUpdateEncryption {
+ typ := DestinationRedshiftUpdateEncryptionUnionTypeNoEncryption
- return DestinationRedshiftUpdateUploadingMethodS3StagingEncryption{
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption: &destinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption,
- Type: typ,
+ return DestinationRedshiftUpdateEncryption{
+ NoEncryption: &noEncryption,
+ Type: typ,
}
}
-func CreateDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption(destinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption) DestinationRedshiftUpdateUploadingMethodS3StagingEncryption {
- typ := DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
+func CreateDestinationRedshiftUpdateEncryptionAESCBCEnvelopeEncryption(aesCBCEnvelopeEncryption AESCBCEnvelopeEncryption) DestinationRedshiftUpdateEncryption {
+ typ := DestinationRedshiftUpdateEncryptionUnionTypeAESCBCEnvelopeEncryption
- return DestinationRedshiftUpdateUploadingMethodS3StagingEncryption{
- DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption: &destinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption,
- Type: typ,
+ return DestinationRedshiftUpdateEncryption{
+ AESCBCEnvelopeEncryption: &aesCBCEnvelopeEncryption,
+ Type: typ,
}
}
-func (u *DestinationRedshiftUpdateUploadingMethodS3StagingEncryption) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationRedshiftUpdateEncryption) UnmarshalJSON(data []byte) error {
- destinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption := new(DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption); err == nil {
- u.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption = destinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption
- u.Type = DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption
+ noEncryption := new(NoEncryption)
+ if err := utils.UnmarshalJSON(data, &noEncryption, "", true, true); err == nil {
+ u.NoEncryption = noEncryption
+ u.Type = DestinationRedshiftUpdateEncryptionUnionTypeNoEncryption
return nil
}
- destinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption := new(DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption); err == nil {
- u.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption = destinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
- u.Type = DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionTypeDestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption
+ aesCBCEnvelopeEncryption := new(AESCBCEnvelopeEncryption)
+ if err := utils.UnmarshalJSON(data, &aesCBCEnvelopeEncryption, "", true, true); err == nil {
+ u.AESCBCEnvelopeEncryption = aesCBCEnvelopeEncryption
+ u.Type = DestinationRedshiftUpdateEncryptionUnionTypeAESCBCEnvelopeEncryption
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationRedshiftUpdateUploadingMethodS3StagingEncryption) MarshalJSON() ([]byte, error) {
- if u.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption != nil {
- return json.Marshal(u.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionNoEncryption)
+func (u DestinationRedshiftUpdateEncryption) MarshalJSON() ([]byte, error) {
+ if u.NoEncryption != nil {
+ return utils.MarshalJSON(u.NoEncryption, "", true)
}
- if u.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption != nil {
- return json.Marshal(u.DestinationRedshiftUpdateUploadingMethodS3StagingEncryptionAESCBCEnvelopeEncryption)
+ if u.AESCBCEnvelopeEncryption != nil {
+ return utils.MarshalJSON(u.AESCBCEnvelopeEncryption, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationRedshiftUpdateUploadingMethodS3StagingMethod string
+type DestinationRedshiftUpdateMethod string
const (
- DestinationRedshiftUpdateUploadingMethodS3StagingMethodS3Staging DestinationRedshiftUpdateUploadingMethodS3StagingMethod = "S3 Staging"
+ DestinationRedshiftUpdateMethodS3Staging DestinationRedshiftUpdateMethod = "S3 Staging"
)
-func (e DestinationRedshiftUpdateUploadingMethodS3StagingMethod) ToPointer() *DestinationRedshiftUpdateUploadingMethodS3StagingMethod {
+func (e DestinationRedshiftUpdateMethod) ToPointer() *DestinationRedshiftUpdateMethod {
return &e
}
-func (e *DestinationRedshiftUpdateUploadingMethodS3StagingMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftUpdateMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "S3 Staging":
- *e = DestinationRedshiftUpdateUploadingMethodS3StagingMethod(v)
+ *e = DestinationRedshiftUpdateMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUpdateUploadingMethodS3StagingMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftUpdateMethod: %v", v)
}
}
-// DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion - The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.
-type DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion string
+// DestinationRedshiftUpdateS3BucketRegion - The region of the S3 staging bucket.
+type DestinationRedshiftUpdateS3BucketRegion string
const (
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionUnknown DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = ""
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionUsEast1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "us-east-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionUsEast2 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "us-east-2"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionUsWest1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "us-west-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionUsWest2 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "us-west-2"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionAfSouth1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "af-south-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionApEast1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "ap-east-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionApSouth1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "ap-south-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionApNortheast1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "ap-northeast-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionApNortheast2 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "ap-northeast-2"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionApNortheast3 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "ap-northeast-3"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionApSoutheast1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "ap-southeast-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionApSoutheast2 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "ap-southeast-2"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionCaCentral1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "ca-central-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionCnNorth1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "cn-north-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionCnNorthwest1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "cn-northwest-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionEuCentral1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "eu-central-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionEuNorth1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "eu-north-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionEuSouth1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "eu-south-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionEuWest1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "eu-west-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionEuWest2 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "eu-west-2"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionEuWest3 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "eu-west-3"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionSaEast1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "sa-east-1"
- DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegionMeSouth1 DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion = "me-south-1"
+ DestinationRedshiftUpdateS3BucketRegionUnknown DestinationRedshiftUpdateS3BucketRegion = ""
+ DestinationRedshiftUpdateS3BucketRegionUsEast1 DestinationRedshiftUpdateS3BucketRegion = "us-east-1"
+ DestinationRedshiftUpdateS3BucketRegionUsEast2 DestinationRedshiftUpdateS3BucketRegion = "us-east-2"
+ DestinationRedshiftUpdateS3BucketRegionUsWest1 DestinationRedshiftUpdateS3BucketRegion = "us-west-1"
+ DestinationRedshiftUpdateS3BucketRegionUsWest2 DestinationRedshiftUpdateS3BucketRegion = "us-west-2"
+ DestinationRedshiftUpdateS3BucketRegionAfSouth1 DestinationRedshiftUpdateS3BucketRegion = "af-south-1"
+ DestinationRedshiftUpdateS3BucketRegionApEast1 DestinationRedshiftUpdateS3BucketRegion = "ap-east-1"
+ DestinationRedshiftUpdateS3BucketRegionApSouth1 DestinationRedshiftUpdateS3BucketRegion = "ap-south-1"
+ DestinationRedshiftUpdateS3BucketRegionApNortheast1 DestinationRedshiftUpdateS3BucketRegion = "ap-northeast-1"
+ DestinationRedshiftUpdateS3BucketRegionApNortheast2 DestinationRedshiftUpdateS3BucketRegion = "ap-northeast-2"
+ DestinationRedshiftUpdateS3BucketRegionApNortheast3 DestinationRedshiftUpdateS3BucketRegion = "ap-northeast-3"
+ DestinationRedshiftUpdateS3BucketRegionApSoutheast1 DestinationRedshiftUpdateS3BucketRegion = "ap-southeast-1"
+ DestinationRedshiftUpdateS3BucketRegionApSoutheast2 DestinationRedshiftUpdateS3BucketRegion = "ap-southeast-2"
+ DestinationRedshiftUpdateS3BucketRegionCaCentral1 DestinationRedshiftUpdateS3BucketRegion = "ca-central-1"
+ DestinationRedshiftUpdateS3BucketRegionCnNorth1 DestinationRedshiftUpdateS3BucketRegion = "cn-north-1"
+ DestinationRedshiftUpdateS3BucketRegionCnNorthwest1 DestinationRedshiftUpdateS3BucketRegion = "cn-northwest-1"
+ DestinationRedshiftUpdateS3BucketRegionEuCentral1 DestinationRedshiftUpdateS3BucketRegion = "eu-central-1"
+ DestinationRedshiftUpdateS3BucketRegionEuNorth1 DestinationRedshiftUpdateS3BucketRegion = "eu-north-1"
+ DestinationRedshiftUpdateS3BucketRegionEuSouth1 DestinationRedshiftUpdateS3BucketRegion = "eu-south-1"
+ DestinationRedshiftUpdateS3BucketRegionEuWest1 DestinationRedshiftUpdateS3BucketRegion = "eu-west-1"
+ DestinationRedshiftUpdateS3BucketRegionEuWest2 DestinationRedshiftUpdateS3BucketRegion = "eu-west-2"
+ DestinationRedshiftUpdateS3BucketRegionEuWest3 DestinationRedshiftUpdateS3BucketRegion = "eu-west-3"
+ DestinationRedshiftUpdateS3BucketRegionSaEast1 DestinationRedshiftUpdateS3BucketRegion = "sa-east-1"
+ DestinationRedshiftUpdateS3BucketRegionMeSouth1 DestinationRedshiftUpdateS3BucketRegion = "me-south-1"
)
-func (e DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion) ToPointer() *DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion {
+func (e DestinationRedshiftUpdateS3BucketRegion) ToPointer() *DestinationRedshiftUpdateS3BucketRegion {
return &e
}
-func (e *DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion) UnmarshalJSON(data []byte) error {
+func (e *DestinationRedshiftUpdateS3BucketRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -449,131 +619,175 @@ func (e *DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion) Unmars
case "sa-east-1":
fallthrough
case "me-south-1":
- *e = DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion(v)
+ *e = DestinationRedshiftUpdateS3BucketRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion: %v", v)
+ return fmt.Errorf("invalid value for DestinationRedshiftUpdateS3BucketRegion: %v", v)
}
}
-// DestinationRedshiftUpdateUploadingMethodS3Staging - The method how the data will be uploaded to the database.
-type DestinationRedshiftUpdateUploadingMethodS3Staging struct {
+// S3Staging - (recommended) Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.
+type S3Staging struct {
// This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See AWS docs on how to generate an access key ID and secret access key.
AccessKeyID string `json:"access_key_id"`
// How to encrypt the staging data
- Encryption *DestinationRedshiftUpdateUploadingMethodS3StagingEncryption `json:"encryption,omitempty"`
+ Encryption *DestinationRedshiftUpdateEncryption `json:"encryption,omitempty"`
// Number of file buffers allocated for writing data. Increasing this number is beneficial for connections using Change Data Capture (CDC) and up to the number of streams within a connection. Increasing the number of file buffers past the maximum number of streams has deteriorating effects
- FileBufferCount *int64 `json:"file_buffer_count,omitempty"`
+ FileBufferCount *int64 `default:"10" json:"file_buffer_count"`
// The pattern allows you to set the file-name format for the S3 staging file(s)
- FileNamePattern *string `json:"file_name_pattern,omitempty"`
- Method DestinationRedshiftUpdateUploadingMethodS3StagingMethod `json:"method"`
+ FileNamePattern *string `json:"file_name_pattern,omitempty"`
+ method DestinationRedshiftUpdateMethod `const:"S3 Staging" json:"method"`
// Whether to delete the staging files from S3 after completing the sync. See docs for details.
- PurgeStagingData *bool `json:"purge_staging_data,omitempty"`
- // The name of the staging S3 bucket to use if utilising a COPY strategy. COPY is recommended for production workloads for better speed and scalability. See AWS docs for more details.
+ PurgeStagingData *bool `default:"true" json:"purge_staging_data"`
+ // The name of the staging S3 bucket.
S3BucketName string `json:"s3_bucket_name"`
// The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See path's name recommendations for more details.
S3BucketPath *string `json:"s3_bucket_path,omitempty"`
- // The region of the S3 staging bucket to use if utilising a COPY strategy. See AWS docs for details.
- S3BucketRegion DestinationRedshiftUpdateUploadingMethodS3StagingS3BucketRegion `json:"s3_bucket_region"`
+ // The region of the S3 staging bucket.
+ S3BucketRegion *DestinationRedshiftUpdateS3BucketRegion `default:"" json:"s3_bucket_region"`
// The corresponding secret to the above access key id. See AWS docs on how to generate an access key ID and secret access key.
SecretAccessKey string `json:"secret_access_key"`
}
-type DestinationRedshiftUpdateUploadingMethodStandardMethod string
+func (s S3Staging) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
-const (
- DestinationRedshiftUpdateUploadingMethodStandardMethodStandard DestinationRedshiftUpdateUploadingMethodStandardMethod = "Standard"
-)
+func (s *S3Staging) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
-func (e DestinationRedshiftUpdateUploadingMethodStandardMethod) ToPointer() *DestinationRedshiftUpdateUploadingMethodStandardMethod {
- return &e
+func (o *S3Staging) GetAccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKeyID
}
-func (e *DestinationRedshiftUpdateUploadingMethodStandardMethod) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
+func (o *S3Staging) GetEncryption() *DestinationRedshiftUpdateEncryption {
+ if o == nil {
+ return nil
}
- switch v {
- case "Standard":
- *e = DestinationRedshiftUpdateUploadingMethodStandardMethod(v)
+ return o.Encryption
+}
+
+func (o *S3Staging) GetFileBufferCount() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.FileBufferCount
+}
+
+func (o *S3Staging) GetFileNamePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileNamePattern
+}
+
+func (o *S3Staging) GetMethod() DestinationRedshiftUpdateMethod {
+ return DestinationRedshiftUpdateMethodS3Staging
+}
+
+func (o *S3Staging) GetPurgeStagingData() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.PurgeStagingData
+}
+
+func (o *S3Staging) GetS3BucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketName
+}
+
+func (o *S3Staging) GetS3BucketPath() *string {
+ if o == nil {
return nil
- default:
- return fmt.Errorf("invalid value for DestinationRedshiftUpdateUploadingMethodStandardMethod: %v", v)
}
+ return o.S3BucketPath
}
-// DestinationRedshiftUpdateUploadingMethodStandard - The method how the data will be uploaded to the database.
-type DestinationRedshiftUpdateUploadingMethodStandard struct {
- Method DestinationRedshiftUpdateUploadingMethodStandardMethod `json:"method"`
+func (o *S3Staging) GetS3BucketRegion() *DestinationRedshiftUpdateS3BucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.S3BucketRegion
+}
+
+func (o *S3Staging) GetSecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretAccessKey
}
-type DestinationRedshiftUpdateUploadingMethodType string
+type UploadingMethodType string
const (
- DestinationRedshiftUpdateUploadingMethodTypeDestinationRedshiftUpdateUploadingMethodStandard DestinationRedshiftUpdateUploadingMethodType = "destination-redshift-update_Uploading Method_Standard"
- DestinationRedshiftUpdateUploadingMethodTypeDestinationRedshiftUpdateUploadingMethodS3Staging DestinationRedshiftUpdateUploadingMethodType = "destination-redshift-update_Uploading Method_S3 Staging"
+ UploadingMethodTypeS3Staging UploadingMethodType = "S3 Staging"
+ UploadingMethodTypeStandard UploadingMethodType = "Standard"
)
-type DestinationRedshiftUpdateUploadingMethod struct {
- DestinationRedshiftUpdateUploadingMethodStandard *DestinationRedshiftUpdateUploadingMethodStandard
- DestinationRedshiftUpdateUploadingMethodS3Staging *DestinationRedshiftUpdateUploadingMethodS3Staging
+type UploadingMethod struct {
+ S3Staging *S3Staging
+ Standard *Standard
- Type DestinationRedshiftUpdateUploadingMethodType
+ Type UploadingMethodType
}
-func CreateDestinationRedshiftUpdateUploadingMethodDestinationRedshiftUpdateUploadingMethodStandard(destinationRedshiftUpdateUploadingMethodStandard DestinationRedshiftUpdateUploadingMethodStandard) DestinationRedshiftUpdateUploadingMethod {
- typ := DestinationRedshiftUpdateUploadingMethodTypeDestinationRedshiftUpdateUploadingMethodStandard
+func CreateUploadingMethodS3Staging(s3Staging S3Staging) UploadingMethod {
+ typ := UploadingMethodTypeS3Staging
- return DestinationRedshiftUpdateUploadingMethod{
- DestinationRedshiftUpdateUploadingMethodStandard: &destinationRedshiftUpdateUploadingMethodStandard,
- Type: typ,
+ return UploadingMethod{
+ S3Staging: &s3Staging,
+ Type: typ,
}
}
-func CreateDestinationRedshiftUpdateUploadingMethodDestinationRedshiftUpdateUploadingMethodS3Staging(destinationRedshiftUpdateUploadingMethodS3Staging DestinationRedshiftUpdateUploadingMethodS3Staging) DestinationRedshiftUpdateUploadingMethod {
- typ := DestinationRedshiftUpdateUploadingMethodTypeDestinationRedshiftUpdateUploadingMethodS3Staging
+func CreateUploadingMethodStandard(standard Standard) UploadingMethod {
+ typ := UploadingMethodTypeStandard
- return DestinationRedshiftUpdateUploadingMethod{
- DestinationRedshiftUpdateUploadingMethodS3Staging: &destinationRedshiftUpdateUploadingMethodS3Staging,
- Type: typ,
+ return UploadingMethod{
+ Standard: &standard,
+ Type: typ,
}
}
-func (u *DestinationRedshiftUpdateUploadingMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *UploadingMethod) UnmarshalJSON(data []byte) error {
- destinationRedshiftUpdateUploadingMethodStandard := new(DestinationRedshiftUpdateUploadingMethodStandard)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUpdateUploadingMethodStandard); err == nil {
- u.DestinationRedshiftUpdateUploadingMethodStandard = destinationRedshiftUpdateUploadingMethodStandard
- u.Type = DestinationRedshiftUpdateUploadingMethodTypeDestinationRedshiftUpdateUploadingMethodStandard
+ standard := new(Standard)
+ if err := utils.UnmarshalJSON(data, &standard, "", true, true); err == nil {
+ u.Standard = standard
+ u.Type = UploadingMethodTypeStandard
return nil
}
- destinationRedshiftUpdateUploadingMethodS3Staging := new(DestinationRedshiftUpdateUploadingMethodS3Staging)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationRedshiftUpdateUploadingMethodS3Staging); err == nil {
- u.DestinationRedshiftUpdateUploadingMethodS3Staging = destinationRedshiftUpdateUploadingMethodS3Staging
- u.Type = DestinationRedshiftUpdateUploadingMethodTypeDestinationRedshiftUpdateUploadingMethodS3Staging
+ s3Staging := new(S3Staging)
+ if err := utils.UnmarshalJSON(data, &s3Staging, "", true, true); err == nil {
+ u.S3Staging = s3Staging
+ u.Type = UploadingMethodTypeS3Staging
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationRedshiftUpdateUploadingMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationRedshiftUpdateUploadingMethodStandard != nil {
- return json.Marshal(u.DestinationRedshiftUpdateUploadingMethodStandard)
+func (u UploadingMethod) MarshalJSON() ([]byte, error) {
+ if u.S3Staging != nil {
+ return utils.MarshalJSON(u.S3Staging, "", true)
}
- if u.DestinationRedshiftUpdateUploadingMethodS3Staging != nil {
- return json.Marshal(u.DestinationRedshiftUpdateUploadingMethodS3Staging)
+ if u.Standard != nil {
+ return utils.MarshalJSON(u.Standard, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationRedshiftUpdate struct {
@@ -586,13 +800,87 @@ type DestinationRedshiftUpdate struct {
// Password associated with the username.
Password string `json:"password"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5439" json:"port"`
// The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public".
- Schema string `json:"schema"`
+ Schema *string `default:"public" json:"schema"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *DestinationRedshiftUpdateSSHTunnelMethod `json:"tunnel_method,omitempty"`
- // The method how the data will be uploaded to the database.
- UploadingMethod *DestinationRedshiftUpdateUploadingMethod `json:"uploading_method,omitempty"`
+ // The way data will be uploaded to Redshift.
+ UploadingMethod *UploadingMethod `json:"uploading_method,omitempty"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationRedshiftUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationRedshiftUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationRedshiftUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationRedshiftUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationRedshiftUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationRedshiftUpdate) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationRedshiftUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationRedshiftUpdate) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *DestinationRedshiftUpdate) GetTunnelMethod() *DestinationRedshiftUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationRedshiftUpdate) GetUploadingMethod() *UploadingMethod {
+ if o == nil {
+ return nil
+ }
+ return o.UploadingMethod
+}
+
+func (o *DestinationRedshiftUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationresponse.go b/internal/sdk/pkg/models/shared/destinationresponse.go
old mode 100755
new mode 100644
index 7f43a8b59..48d7243fb
--- a/internal/sdk/pkg/models/shared/destinationresponse.go
+++ b/internal/sdk/pkg/models/shared/destinationresponse.go
@@ -11,3 +11,38 @@ type DestinationResponse struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationResponse) GetConfiguration() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.Configuration
+}
+
+func (o *DestinationResponse) GetDestinationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationID
+}
+
+func (o *DestinationResponse) GetDestinationType() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationType
+}
+
+func (o *DestinationResponse) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationResponse) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinations3.go b/internal/sdk/pkg/models/shared/destinations3.go
old mode 100755
new mode 100644
index bb6abc638..866016573
--- a/internal/sdk/pkg/models/shared/destinations3.go
+++ b/internal/sdk/pkg/models/shared/destinations3.go
@@ -3,54 +3,54 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationS3S3 string
+type S3 string
const (
- DestinationS3S3S3 DestinationS3S3 = "s3"
+ S3S3 S3 = "s3"
)
-func (e DestinationS3S3) ToPointer() *DestinationS3S3 {
+func (e S3) ToPointer() *S3 {
return &e
}
-func (e *DestinationS3S3) UnmarshalJSON(data []byte) error {
+func (e *S3) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "s3":
- *e = DestinationS3S3(v)
+ *e = S3(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3S3: %v", v)
+ return fmt.Errorf("invalid value for S3: %v", v)
}
}
-// DestinationS3OutputFormatParquetColumnarStorageCompressionCodec - The compression algorithm used to compress data pages.
-type DestinationS3OutputFormatParquetColumnarStorageCompressionCodec string
+// DestinationS3SchemasCompressionCodec - The compression algorithm used to compress data pages.
+type DestinationS3SchemasCompressionCodec string
const (
- DestinationS3OutputFormatParquetColumnarStorageCompressionCodecUncompressed DestinationS3OutputFormatParquetColumnarStorageCompressionCodec = "UNCOMPRESSED"
- DestinationS3OutputFormatParquetColumnarStorageCompressionCodecSnappy DestinationS3OutputFormatParquetColumnarStorageCompressionCodec = "SNAPPY"
- DestinationS3OutputFormatParquetColumnarStorageCompressionCodecGzip DestinationS3OutputFormatParquetColumnarStorageCompressionCodec = "GZIP"
- DestinationS3OutputFormatParquetColumnarStorageCompressionCodecLzo DestinationS3OutputFormatParquetColumnarStorageCompressionCodec = "LZO"
- DestinationS3OutputFormatParquetColumnarStorageCompressionCodecBrotli DestinationS3OutputFormatParquetColumnarStorageCompressionCodec = "BROTLI"
- DestinationS3OutputFormatParquetColumnarStorageCompressionCodecLz4 DestinationS3OutputFormatParquetColumnarStorageCompressionCodec = "LZ4"
- DestinationS3OutputFormatParquetColumnarStorageCompressionCodecZstd DestinationS3OutputFormatParquetColumnarStorageCompressionCodec = "ZSTD"
+ DestinationS3SchemasCompressionCodecUncompressed DestinationS3SchemasCompressionCodec = "UNCOMPRESSED"
+ DestinationS3SchemasCompressionCodecSnappy DestinationS3SchemasCompressionCodec = "SNAPPY"
+ DestinationS3SchemasCompressionCodecGzip DestinationS3SchemasCompressionCodec = "GZIP"
+ DestinationS3SchemasCompressionCodecLzo DestinationS3SchemasCompressionCodec = "LZO"
+ DestinationS3SchemasCompressionCodecBrotli DestinationS3SchemasCompressionCodec = "BROTLI"
+ DestinationS3SchemasCompressionCodecLz4 DestinationS3SchemasCompressionCodec = "LZ4"
+ DestinationS3SchemasCompressionCodecZstd DestinationS3SchemasCompressionCodec = "ZSTD"
)
-func (e DestinationS3OutputFormatParquetColumnarStorageCompressionCodec) ToPointer() *DestinationS3OutputFormatParquetColumnarStorageCompressionCodec {
+func (e DestinationS3SchemasCompressionCodec) ToPointer() *DestinationS3SchemasCompressionCodec {
return &e
}
-func (e *DestinationS3OutputFormatParquetColumnarStorageCompressionCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasCompressionCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -69,193 +69,284 @@ func (e *DestinationS3OutputFormatParquetColumnarStorageCompressionCodec) Unmars
case "LZ4":
fallthrough
case "ZSTD":
- *e = DestinationS3OutputFormatParquetColumnarStorageCompressionCodec(v)
+ *e = DestinationS3SchemasCompressionCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatParquetColumnarStorageCompressionCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasCompressionCodec: %v", v)
}
}
-type DestinationS3OutputFormatParquetColumnarStorageFormatType string
+type DestinationS3SchemasFormatOutputFormatFormatType string
const (
- DestinationS3OutputFormatParquetColumnarStorageFormatTypeParquet DestinationS3OutputFormatParquetColumnarStorageFormatType = "Parquet"
+ DestinationS3SchemasFormatOutputFormatFormatTypeParquet DestinationS3SchemasFormatOutputFormatFormatType = "Parquet"
)
-func (e DestinationS3OutputFormatParquetColumnarStorageFormatType) ToPointer() *DestinationS3OutputFormatParquetColumnarStorageFormatType {
+func (e DestinationS3SchemasFormatOutputFormatFormatType) ToPointer() *DestinationS3SchemasFormatOutputFormatFormatType {
return &e
}
-func (e *DestinationS3OutputFormatParquetColumnarStorageFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFormatOutputFormatFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Parquet":
- *e = DestinationS3OutputFormatParquetColumnarStorageFormatType(v)
+ *e = DestinationS3SchemasFormatOutputFormatFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatParquetColumnarStorageFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFormatOutputFormatFormatType: %v", v)
}
}
-// DestinationS3OutputFormatParquetColumnarStorage - Format of the data output. See here for more details
-type DestinationS3OutputFormatParquetColumnarStorage struct {
+// DestinationS3ParquetColumnarStorage - Format of the data output. See here for more details
+type DestinationS3ParquetColumnarStorage struct {
// This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
- BlockSizeMb *int64 `json:"block_size_mb,omitempty"`
+ BlockSizeMb *int64 `default:"128" json:"block_size_mb"`
// The compression algorithm used to compress data pages.
- CompressionCodec *DestinationS3OutputFormatParquetColumnarStorageCompressionCodec `json:"compression_codec,omitempty"`
+ CompressionCodec *DestinationS3SchemasCompressionCodec `default:"UNCOMPRESSED" json:"compression_codec"`
// Default: true.
- DictionaryEncoding *bool `json:"dictionary_encoding,omitempty"`
+ DictionaryEncoding *bool `default:"true" json:"dictionary_encoding"`
// There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
- DictionaryPageSizeKb *int64 `json:"dictionary_page_size_kb,omitempty"`
- FormatType DestinationS3OutputFormatParquetColumnarStorageFormatType `json:"format_type"`
+ DictionaryPageSizeKb *int64 `default:"1024" json:"dictionary_page_size_kb"`
+ FormatType *DestinationS3SchemasFormatOutputFormatFormatType `default:"Parquet" json:"format_type"`
// Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
- MaxPaddingSizeMb *int64 `json:"max_padding_size_mb,omitempty"`
+ MaxPaddingSizeMb *int64 `default:"8" json:"max_padding_size_mb"`
// The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
- PageSizeKb *int64 `json:"page_size_kb,omitempty"`
+ PageSizeKb *int64 `default:"1024" json:"page_size_kb"`
}
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType string
+func (d DestinationS3ParquetColumnarStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3ParquetColumnarStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3ParquetColumnarStorage) GetBlockSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BlockSizeMb
+}
+
+func (o *DestinationS3ParquetColumnarStorage) GetCompressionCodec() *DestinationS3SchemasCompressionCodec {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionCodec
+}
+
+func (o *DestinationS3ParquetColumnarStorage) GetDictionaryEncoding() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DictionaryEncoding
+}
+
+func (o *DestinationS3ParquetColumnarStorage) GetDictionaryPageSizeKb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DictionaryPageSizeKb
+}
+
+func (o *DestinationS3ParquetColumnarStorage) GetFormatType() *DestinationS3SchemasFormatOutputFormatFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+func (o *DestinationS3ParquetColumnarStorage) GetMaxPaddingSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxPaddingSizeMb
+}
+
+func (o *DestinationS3ParquetColumnarStorage) GetPageSizeKb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSizeKb
+}
+
+type DestinationS3SchemasFormatOutputFormatCompressionType string
const (
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeGzip DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType = "GZIP"
+ DestinationS3SchemasFormatOutputFormatCompressionTypeGzip DestinationS3SchemasFormatOutputFormatCompressionType = "GZIP"
)
-func (e DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) ToPointer() *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType {
+func (e DestinationS3SchemasFormatOutputFormatCompressionType) ToPointer() *DestinationS3SchemasFormatOutputFormatCompressionType {
return &e
}
-func (e *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFormatOutputFormatCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(v)
+ *e = DestinationS3SchemasFormatOutputFormatCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFormatOutputFormatCompressionType: %v", v)
+ }
+}
+
+// DestinationS3SchemasGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationS3SchemasGZIP struct {
+ CompressionType *DestinationS3SchemasFormatOutputFormatCompressionType `default:"GZIP" json:"compression_type"`
+}
+
+func (d DestinationS3SchemasGZIP) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3SchemasGZIP) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP struct {
- CompressionType *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+func (o *DestinationS3SchemasGZIP) GetCompressionType() *DestinationS3SchemasFormatOutputFormatCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType string
+type DestinationS3SchemasFormatCompressionType string
const (
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeNoCompression DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType = "No Compression"
+ DestinationS3SchemasFormatCompressionTypeNoCompression DestinationS3SchemasFormatCompressionType = "No Compression"
)
-func (e DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) ToPointer() *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType {
+func (e DestinationS3SchemasFormatCompressionType) ToPointer() *DestinationS3SchemasFormatCompressionType {
return &e
}
-func (e *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFormatCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(v)
+ *e = DestinationS3SchemasFormatCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFormatCompressionType: %v", v)
+ }
+}
+
+// DestinationS3SchemasFormatNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationS3SchemasFormatNoCompression struct {
+ CompressionType *DestinationS3SchemasFormatCompressionType `default:"No Compression" json:"compression_type"`
+}
+
+func (d DestinationS3SchemasFormatNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3SchemasFormatNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression struct {
- CompressionType *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+func (o *DestinationS3SchemasFormatNoCompression) GetCompressionType() *DestinationS3SchemasFormatCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionType string
+type DestinationS3SchemasCompressionUnionType string
const (
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-s3_Output Format_JSON Lines: Newline-delimited JSON_Compression_No Compression"
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-s3_Output Format_JSON Lines: Newline-delimited JSON_Compression_GZIP"
+ DestinationS3SchemasCompressionUnionTypeDestinationS3SchemasFormatNoCompression DestinationS3SchemasCompressionUnionType = "destination-s3_Schemas_format_No Compression"
+ DestinationS3SchemasCompressionUnionTypeDestinationS3SchemasGZIP DestinationS3SchemasCompressionUnionType = "destination-s3_Schemas_GZIP"
)
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+type DestinationS3SchemasCompression struct {
+ DestinationS3SchemasFormatNoCompression *DestinationS3SchemasFormatNoCompression
+ DestinationS3SchemasGZIP *DestinationS3SchemasGZIP
- Type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionType
+ Type DestinationS3SchemasCompressionUnionType
}
-func CreateDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression(destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression) DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+func CreateDestinationS3SchemasCompressionDestinationS3SchemasFormatNoCompression(destinationS3SchemasFormatNoCompression DestinationS3SchemasFormatNoCompression) DestinationS3SchemasCompression {
+ typ := DestinationS3SchemasCompressionUnionTypeDestinationS3SchemasFormatNoCompression
- return DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: &destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
- Type: typ,
+ return DestinationS3SchemasCompression{
+ DestinationS3SchemasFormatNoCompression: &destinationS3SchemasFormatNoCompression,
+ Type: typ,
}
}
-func CreateDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP(destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP) DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+func CreateDestinationS3SchemasCompressionDestinationS3SchemasGZIP(destinationS3SchemasGZIP DestinationS3SchemasGZIP) DestinationS3SchemasCompression {
+ typ := DestinationS3SchemasCompressionUnionTypeDestinationS3SchemasGZIP
- return DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: &destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
- Type: typ,
+ return DestinationS3SchemasCompression{
+ DestinationS3SchemasGZIP: &destinationS3SchemasGZIP,
+ Type: typ,
}
}
-func (u *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationS3SchemasCompression) UnmarshalJSON(data []byte) error {
- destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression := new(DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression); err == nil {
- u.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- u.Type = DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+ destinationS3SchemasFormatNoCompression := new(DestinationS3SchemasFormatNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationS3SchemasFormatNoCompression, "", true, true); err == nil {
+ u.DestinationS3SchemasFormatNoCompression = destinationS3SchemasFormatNoCompression
+ u.Type = DestinationS3SchemasCompressionUnionTypeDestinationS3SchemasFormatNoCompression
return nil
}
- destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP := new(DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP); err == nil {
- u.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = destinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- u.Type = DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+ destinationS3SchemasGZIP := new(DestinationS3SchemasGZIP)
+ if err := utils.UnmarshalJSON(data, &destinationS3SchemasGZIP, "", true, true); err == nil {
+ u.DestinationS3SchemasGZIP = destinationS3SchemasGZIP
+ u.Type = DestinationS3SchemasCompressionUnionTypeDestinationS3SchemasGZIP
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- return json.Marshal(u.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
+func (u DestinationS3SchemasCompression) MarshalJSON() ([]byte, error) {
+ if u.DestinationS3SchemasFormatNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationS3SchemasFormatNoCompression, "", true)
}
- if u.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- return json.Marshal(u.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
+ if u.DestinationS3SchemasGZIP != nil {
+ return utils.MarshalJSON(u.DestinationS3SchemasGZIP, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening - Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening string
+// DestinationS3SchemasFlattening - Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
+type DestinationS3SchemasFlattening string
const (
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlatteningNoFlattening DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening = "No flattening"
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlatteningRootLevelFlattening DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening = "Root level flattening"
+ DestinationS3SchemasFlatteningNoFlattening DestinationS3SchemasFlattening = "No flattening"
+ DestinationS3SchemasFlatteningRootLevelFlattening DestinationS3SchemasFlattening = "Root level flattening"
)
-func (e DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening) ToPointer() *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening {
+func (e DestinationS3SchemasFlattening) ToPointer() *DestinationS3SchemasFlattening {
return &e
}
-func (e *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFlattening) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -264,185 +355,248 @@ func (e *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening) Unmar
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening(v)
+ *e = DestinationS3SchemasFlattening(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFlattening: %v", v)
}
}
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type DestinationS3SchemasFormatFormatType string
const (
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ DestinationS3SchemasFormatFormatTypeJsonl DestinationS3SchemasFormatFormatType = "JSONL"
)
-func (e DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e DestinationS3SchemasFormatFormatType) ToPointer() *DestinationS3SchemasFormatFormatType {
return &e
}
-func (e *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFormatFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ *e = DestinationS3SchemasFormatFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFormatFormatType: %v", v)
}
}
-// DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON - Format of the data output. See here for more details
-type DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON struct {
+// DestinationS3JSONLinesNewlineDelimitedJSON - Format of the data output. See here for more details
+type DestinationS3JSONLinesNewlineDelimitedJSON struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
- Compression *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONCompression `json:"compression,omitempty"`
+ Compression *DestinationS3SchemasCompression `json:"compression,omitempty"`
// Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
- Flattening *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFlattening `json:"flattening,omitempty"`
- FormatType DestinationS3OutputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+ Flattening *DestinationS3SchemasFlattening `default:"No flattening" json:"flattening"`
+ FormatType *DestinationS3SchemasFormatFormatType `default:"JSONL" json:"format_type"`
+}
+
+func (d DestinationS3JSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType string
+func (d *DestinationS3JSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3JSONLinesNewlineDelimitedJSON) GetCompression() *DestinationS3SchemasCompression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
+}
+
+func (o *DestinationS3JSONLinesNewlineDelimitedJSON) GetFlattening() *DestinationS3SchemasFlattening {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
+}
+
+func (o *DestinationS3JSONLinesNewlineDelimitedJSON) GetFormatType() *DestinationS3SchemasFormatFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+type DestinationS3SchemasCompressionType string
const (
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeGzip DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType = "GZIP"
+ DestinationS3SchemasCompressionTypeGzip DestinationS3SchemasCompressionType = "GZIP"
)
-func (e DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType) ToPointer() *DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType {
+func (e DestinationS3SchemasCompressionType) ToPointer() *DestinationS3SchemasCompressionType {
return &e
}
-func (e *DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(v)
+ *e = DestinationS3SchemasCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasCompressionType: %v", v)
+ }
+}
+
+// DestinationS3GZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
+type DestinationS3GZIP struct {
+ CompressionType *DestinationS3SchemasCompressionType `default:"GZIP" json:"compression_type"`
+}
+
+func (d DestinationS3GZIP) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3GZIP) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
-type DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP struct {
- CompressionType *DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+func (o *DestinationS3GZIP) GetCompressionType() *DestinationS3SchemasCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType string
+type DestinationS3CompressionType string
const (
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeNoCompression DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType = "No Compression"
+ DestinationS3CompressionTypeNoCompression DestinationS3CompressionType = "No Compression"
)
-func (e DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType) ToPointer() *DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType {
+func (e DestinationS3CompressionType) ToPointer() *DestinationS3CompressionType {
return &e
}
-func (e *DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3CompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(v)
+ *e = DestinationS3CompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3CompressionType: %v", v)
+ }
+}
+
+// DestinationS3SchemasNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
+type DestinationS3SchemasNoCompression struct {
+ CompressionType *DestinationS3CompressionType `default:"No Compression" json:"compression_type"`
+}
+
+func (d DestinationS3SchemasNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3SchemasNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
-type DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression struct {
- CompressionType *DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+func (o *DestinationS3SchemasNoCompression) GetCompressionType() *DestinationS3CompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionType string
+type DestinationS3CompressionUnionType string
const (
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionType = "destination-s3_Output Format_CSV: Comma-Separated Values_Compression_No Compression"
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionType = "destination-s3_Output Format_CSV: Comma-Separated Values_Compression_GZIP"
+ DestinationS3CompressionUnionTypeDestinationS3SchemasNoCompression DestinationS3CompressionUnionType = "destination-s3_Schemas_No Compression"
+ DestinationS3CompressionUnionTypeDestinationS3GZIP DestinationS3CompressionUnionType = "destination-s3_GZIP"
)
-type DestinationS3OutputFormatCSVCommaSeparatedValuesCompression struct {
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression *DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP *DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP
+type DestinationS3Compression struct {
+ DestinationS3SchemasNoCompression *DestinationS3SchemasNoCompression
+ DestinationS3GZIP *DestinationS3GZIP
- Type DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionType
+ Type DestinationS3CompressionUnionType
}
-func CreateDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression(destinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression) DestinationS3OutputFormatCSVCommaSeparatedValuesCompression {
- typ := DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression
+func CreateDestinationS3CompressionDestinationS3SchemasNoCompression(destinationS3SchemasNoCompression DestinationS3SchemasNoCompression) DestinationS3Compression {
+ typ := DestinationS3CompressionUnionTypeDestinationS3SchemasNoCompression
- return DestinationS3OutputFormatCSVCommaSeparatedValuesCompression{
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression: &destinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression,
- Type: typ,
+ return DestinationS3Compression{
+ DestinationS3SchemasNoCompression: &destinationS3SchemasNoCompression,
+ Type: typ,
}
}
-func CreateDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP(destinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP) DestinationS3OutputFormatCSVCommaSeparatedValuesCompression {
- typ := DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP
+func CreateDestinationS3CompressionDestinationS3GZIP(destinationS3GZIP DestinationS3GZIP) DestinationS3Compression {
+ typ := DestinationS3CompressionUnionTypeDestinationS3GZIP
- return DestinationS3OutputFormatCSVCommaSeparatedValuesCompression{
- DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP: &destinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP,
- Type: typ,
+ return DestinationS3Compression{
+ DestinationS3GZIP: &destinationS3GZIP,
+ Type: typ,
}
}
-func (u *DestinationS3OutputFormatCSVCommaSeparatedValuesCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationS3Compression) UnmarshalJSON(data []byte) error {
- destinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression := new(DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression); err == nil {
- u.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression = destinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- u.Type = DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression
+ destinationS3SchemasNoCompression := new(DestinationS3SchemasNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationS3SchemasNoCompression, "", true, true); err == nil {
+ u.DestinationS3SchemasNoCompression = destinationS3SchemasNoCompression
+ u.Type = DestinationS3CompressionUnionTypeDestinationS3SchemasNoCompression
return nil
}
- destinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP := new(DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP); err == nil {
- u.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP = destinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP
- u.Type = DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP
+ destinationS3GZIP := new(DestinationS3GZIP)
+ if err := utils.UnmarshalJSON(data, &destinationS3GZIP, "", true, true); err == nil {
+ u.DestinationS3GZIP = destinationS3GZIP
+ u.Type = DestinationS3CompressionUnionTypeDestinationS3GZIP
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationS3OutputFormatCSVCommaSeparatedValuesCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- return json.Marshal(u.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionNoCompression)
+func (u DestinationS3Compression) MarshalJSON() ([]byte, error) {
+ if u.DestinationS3SchemasNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationS3SchemasNoCompression, "", true)
}
- if u.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- return json.Marshal(u.DestinationS3OutputFormatCSVCommaSeparatedValuesCompressionGZIP)
+ if u.DestinationS3GZIP != nil {
+ return utils.MarshalJSON(u.DestinationS3GZIP, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening - Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-type DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening string
+// DestinationS3Flattening - Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
+type DestinationS3Flattening string
const (
- DestinationS3OutputFormatCSVCommaSeparatedValuesFlatteningNoFlattening DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening = "No flattening"
- DestinationS3OutputFormatCSVCommaSeparatedValuesFlatteningRootLevelFlattening DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening = "Root level flattening"
+ DestinationS3FlatteningNoFlattening DestinationS3Flattening = "No flattening"
+ DestinationS3FlatteningRootLevelFlattening DestinationS3Flattening = "Root level flattening"
)
-func (e DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening) ToPointer() *DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening {
+func (e DestinationS3Flattening) ToPointer() *DestinationS3Flattening {
return &e
}
-func (e *DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3Flattening) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -451,513 +605,684 @@ func (e *DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening) UnmarshalJS
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening(v)
+ *e = DestinationS3Flattening(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3Flattening: %v", v)
}
}
-type DestinationS3OutputFormatCSVCommaSeparatedValuesFormatType string
+type DestinationS3SchemasFormatType string
const (
- DestinationS3OutputFormatCSVCommaSeparatedValuesFormatTypeCsv DestinationS3OutputFormatCSVCommaSeparatedValuesFormatType = "CSV"
+ DestinationS3SchemasFormatTypeCsv DestinationS3SchemasFormatType = "CSV"
)
-func (e DestinationS3OutputFormatCSVCommaSeparatedValuesFormatType) ToPointer() *DestinationS3OutputFormatCSVCommaSeparatedValuesFormatType {
+func (e DestinationS3SchemasFormatType) ToPointer() *DestinationS3SchemasFormatType {
return &e
}
-func (e *DestinationS3OutputFormatCSVCommaSeparatedValuesFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CSV":
- *e = DestinationS3OutputFormatCSVCommaSeparatedValuesFormatType(v)
+ *e = DestinationS3SchemasFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatCSVCommaSeparatedValuesFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFormatType: %v", v)
}
}
-// DestinationS3OutputFormatCSVCommaSeparatedValues - Format of the data output. See here for more details
-type DestinationS3OutputFormatCSVCommaSeparatedValues struct {
+// DestinationS3CSVCommaSeparatedValues - Format of the data output. See here for more details
+type DestinationS3CSVCommaSeparatedValues struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
- Compression *DestinationS3OutputFormatCSVCommaSeparatedValuesCompression `json:"compression,omitempty"`
+ Compression *DestinationS3Compression `json:"compression,omitempty"`
// Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
- Flattening DestinationS3OutputFormatCSVCommaSeparatedValuesFlattening `json:"flattening"`
- FormatType DestinationS3OutputFormatCSVCommaSeparatedValuesFormatType `json:"format_type"`
+ Flattening *DestinationS3Flattening `default:"No flattening" json:"flattening"`
+ FormatType *DestinationS3SchemasFormatType `default:"CSV" json:"format_type"`
+}
+
+func (d DestinationS3CSVCommaSeparatedValues) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3CSVCommaSeparatedValues) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3CSVCommaSeparatedValues) GetCompression() *DestinationS3Compression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
}
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodec string
+func (o *DestinationS3CSVCommaSeparatedValues) GetFlattening() *DestinationS3Flattening {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
+}
+
+func (o *DestinationS3CSVCommaSeparatedValues) GetFormatType() *DestinationS3SchemasFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+type DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec string
const (
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodecSnappy DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodec = "snappy"
+ DestinationS3SchemasFormatOutputFormat1CompressionCodecCodecSnappy DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec = "snappy"
)
-func (e DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodec) ToPointer() *DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodec {
+func (e DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec) ToPointer() *DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec {
return &e
}
-func (e *DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "snappy":
- *e = DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodec(v)
+ *e = DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec: %v", v)
}
}
-// DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy struct {
- Codec DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappyCodec `json:"codec"`
+// DestinationS3Snappy - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3Snappy struct {
+ Codec *DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec `default:"snappy" json:"codec"`
+}
+
+func (d DestinationS3Snappy) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodec string
+func (d *DestinationS3Snappy) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3Snappy) GetCodec() *DestinationS3SchemasFormatOutputFormat1CompressionCodecCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+type DestinationS3SchemasFormatOutputFormat1Codec string
const (
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodecZstandard DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodec = "zstandard"
+ DestinationS3SchemasFormatOutputFormat1CodecZstandard DestinationS3SchemasFormatOutputFormat1Codec = "zstandard"
)
-func (e DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodec) ToPointer() *DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodec {
+func (e DestinationS3SchemasFormatOutputFormat1Codec) ToPointer() *DestinationS3SchemasFormatOutputFormat1Codec {
return &e
}
-func (e *DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFormatOutputFormat1Codec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zstandard":
- *e = DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodec(v)
+ *e = DestinationS3SchemasFormatOutputFormat1Codec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFormatOutputFormat1Codec: %v", v)
}
}
-// DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard struct {
- Codec DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandardCodec `json:"codec"`
+// DestinationS3Zstandard - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3Zstandard struct {
+ Codec *DestinationS3SchemasFormatOutputFormat1Codec `default:"zstandard" json:"codec"`
// Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
- CompressionLevel int64 `json:"compression_level"`
+ CompressionLevel *int64 `default:"3" json:"compression_level"`
// If true, include a checksum with each data block.
- IncludeChecksum *bool `json:"include_checksum,omitempty"`
+ IncludeChecksum *bool `default:"false" json:"include_checksum"`
+}
+
+func (d DestinationS3Zstandard) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodec string
+func (d *DestinationS3Zstandard) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3Zstandard) GetCodec() *DestinationS3SchemasFormatOutputFormat1Codec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *DestinationS3Zstandard) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
+}
+
+func (o *DestinationS3Zstandard) GetIncludeChecksum() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeChecksum
+}
+
+type DestinationS3SchemasFormatOutputFormatCodec string
const (
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodecXz DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodec = "xz"
+ DestinationS3SchemasFormatOutputFormatCodecXz DestinationS3SchemasFormatOutputFormatCodec = "xz"
)
-func (e DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodec) ToPointer() *DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodec {
+func (e DestinationS3SchemasFormatOutputFormatCodec) ToPointer() *DestinationS3SchemasFormatOutputFormatCodec {
return &e
}
-func (e *DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFormatOutputFormatCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "xz":
- *e = DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodec(v)
+ *e = DestinationS3SchemasFormatOutputFormatCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFormatOutputFormatCodec: %v", v)
}
}
-// DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz struct {
- Codec DestinationS3OutputFormatAvroApacheAvroCompressionCodecXzCodec `json:"codec"`
+// DestinationS3Xz - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3Xz struct {
+ Codec *DestinationS3SchemasFormatOutputFormatCodec `default:"xz" json:"codec"`
// See here for details.
- CompressionLevel int64 `json:"compression_level"`
+ CompressionLevel *int64 `default:"6" json:"compression_level"`
}
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2Codec string
+func (d DestinationS3Xz) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3Xz) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3Xz) GetCodec() *DestinationS3SchemasFormatOutputFormatCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *DestinationS3Xz) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
+}
+
+type DestinationS3SchemasFormatCodec string
const (
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2CodecBzip2 DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2Codec = "bzip2"
+ DestinationS3SchemasFormatCodecBzip2 DestinationS3SchemasFormatCodec = "bzip2"
)
-func (e DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2Codec) ToPointer() *DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2Codec {
+func (e DestinationS3SchemasFormatCodec) ToPointer() *DestinationS3SchemasFormatCodec {
return &e
}
-func (e *DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2Codec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasFormatCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "bzip2":
- *e = DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2Codec(v)
+ *e = DestinationS3SchemasFormatCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2Codec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasFormatCodec: %v", v)
+ }
+}
+
+// DestinationS3Bzip2 - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3Bzip2 struct {
+ Codec *DestinationS3SchemasFormatCodec `default:"bzip2" json:"codec"`
+}
+
+func (d DestinationS3Bzip2) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3Bzip2) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 struct {
- Codec DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2Codec `json:"codec"`
+func (o *DestinationS3Bzip2) GetCodec() *DestinationS3SchemasFormatCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
}
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodec string
+type DestinationS3SchemasCodec string
const (
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodecDeflate DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodec = "Deflate"
+ DestinationS3SchemasCodecDeflate DestinationS3SchemasCodec = "Deflate"
)
-func (e DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodec) ToPointer() *DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodec {
+func (e DestinationS3SchemasCodec) ToPointer() *DestinationS3SchemasCodec {
return &e
}
-func (e *DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3SchemasCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Deflate":
- *e = DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodec(v)
+ *e = DestinationS3SchemasCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3SchemasCodec: %v", v)
}
}
-// DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate struct {
- Codec DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflateCodec `json:"codec"`
+// DestinationS3Deflate - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3Deflate struct {
+ Codec *DestinationS3SchemasCodec `default:"Deflate" json:"codec"`
// 0: no compression & fastest, 9: best compression & slowest.
- CompressionLevel int64 `json:"compression_level"`
+ CompressionLevel *int64 `default:"0" json:"compression_level"`
+}
+
+func (d DestinationS3Deflate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3Deflate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3Deflate) GetCodec() *DestinationS3SchemasCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *DestinationS3Deflate) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
}
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec string
+type DestinationS3Codec string
const (
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodecNoCompression DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec = "no compression"
+ DestinationS3CodecNoCompression DestinationS3Codec = "no compression"
)
-func (e DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec) ToPointer() *DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec {
+func (e DestinationS3Codec) ToPointer() *DestinationS3Codec {
return &e
}
-func (e *DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3Codec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "no compression":
- *e = DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec(v)
+ *e = DestinationS3Codec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3Codec: %v", v)
+ }
+}
+
+// DestinationS3NoCompression - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3NoCompression struct {
+ Codec *DestinationS3Codec `default:"no compression" json:"codec"`
+}
+
+func (d DestinationS3NoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3NoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression struct {
- Codec DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec `json:"codec"`
+func (o *DestinationS3NoCompression) GetCodec() *DestinationS3Codec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
}
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodecType string
+type DestinationS3CompressionCodecType string
const (
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression DestinationS3OutputFormatAvroApacheAvroCompressionCodecType = "destination-s3_Output Format_Avro: Apache Avro_Compression Codec_No Compression"
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate DestinationS3OutputFormatAvroApacheAvroCompressionCodecType = "destination-s3_Output Format_Avro: Apache Avro_Compression Codec_Deflate"
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 DestinationS3OutputFormatAvroApacheAvroCompressionCodecType = "destination-s3_Output Format_Avro: Apache Avro_Compression Codec_bzip2"
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecXz DestinationS3OutputFormatAvroApacheAvroCompressionCodecType = "destination-s3_Output Format_Avro: Apache Avro_Compression Codec_xz"
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard DestinationS3OutputFormatAvroApacheAvroCompressionCodecType = "destination-s3_Output Format_Avro: Apache Avro_Compression Codec_zstandard"
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy DestinationS3OutputFormatAvroApacheAvroCompressionCodecType = "destination-s3_Output Format_Avro: Apache Avro_Compression Codec_snappy"
+ DestinationS3CompressionCodecTypeDestinationS3NoCompression DestinationS3CompressionCodecType = "destination-s3_No Compression"
+ DestinationS3CompressionCodecTypeDestinationS3Deflate DestinationS3CompressionCodecType = "destination-s3_Deflate"
+ DestinationS3CompressionCodecTypeDestinationS3Bzip2 DestinationS3CompressionCodecType = "destination-s3_bzip2"
+ DestinationS3CompressionCodecTypeDestinationS3Xz DestinationS3CompressionCodecType = "destination-s3_xz"
+ DestinationS3CompressionCodecTypeDestinationS3Zstandard DestinationS3CompressionCodecType = "destination-s3_zstandard"
+ DestinationS3CompressionCodecTypeDestinationS3Snappy DestinationS3CompressionCodecType = "destination-s3_snappy"
)
-type DestinationS3OutputFormatAvroApacheAvroCompressionCodec struct {
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression *DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate *DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 *DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz *DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard *DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy *DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy
+type DestinationS3CompressionCodec struct {
+ DestinationS3NoCompression *DestinationS3NoCompression
+ DestinationS3Deflate *DestinationS3Deflate
+ DestinationS3Bzip2 *DestinationS3Bzip2
+ DestinationS3Xz *DestinationS3Xz
+ DestinationS3Zstandard *DestinationS3Zstandard
+ DestinationS3Snappy *DestinationS3Snappy
- Type DestinationS3OutputFormatAvroApacheAvroCompressionCodecType
+ Type DestinationS3CompressionCodecType
}
-func CreateDestinationS3OutputFormatAvroApacheAvroCompressionCodecDestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression(destinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression) DestinationS3OutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression
+func CreateDestinationS3CompressionCodecDestinationS3NoCompression(destinationS3NoCompression DestinationS3NoCompression) DestinationS3CompressionCodec {
+ typ := DestinationS3CompressionCodecTypeDestinationS3NoCompression
- return DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression: &destinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression,
- Type: typ,
+ return DestinationS3CompressionCodec{
+ DestinationS3NoCompression: &destinationS3NoCompression,
+ Type: typ,
}
}
-func CreateDestinationS3OutputFormatAvroApacheAvroCompressionCodecDestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate(destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate) DestinationS3OutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate
+func CreateDestinationS3CompressionCodecDestinationS3Deflate(destinationS3Deflate DestinationS3Deflate) DestinationS3CompressionCodec {
+ typ := DestinationS3CompressionCodecTypeDestinationS3Deflate
- return DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate: &destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate,
- Type: typ,
+ return DestinationS3CompressionCodec{
+ DestinationS3Deflate: &destinationS3Deflate,
+ Type: typ,
}
}
-func CreateDestinationS3OutputFormatAvroApacheAvroCompressionCodecDestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2(destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2) DestinationS3OutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2
+func CreateDestinationS3CompressionCodecDestinationS3Bzip2(destinationS3Bzip2 DestinationS3Bzip2) DestinationS3CompressionCodec {
+ typ := DestinationS3CompressionCodecTypeDestinationS3Bzip2
- return DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2: &destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2,
- Type: typ,
+ return DestinationS3CompressionCodec{
+ DestinationS3Bzip2: &destinationS3Bzip2,
+ Type: typ,
}
}
-func CreateDestinationS3OutputFormatAvroApacheAvroCompressionCodecDestinationS3OutputFormatAvroApacheAvroCompressionCodecXz(destinationS3OutputFormatAvroApacheAvroCompressionCodecXz DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz) DestinationS3OutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecXz
+func CreateDestinationS3CompressionCodecDestinationS3Xz(destinationS3Xz DestinationS3Xz) DestinationS3CompressionCodec {
+ typ := DestinationS3CompressionCodecTypeDestinationS3Xz
- return DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz: &destinationS3OutputFormatAvroApacheAvroCompressionCodecXz,
- Type: typ,
+ return DestinationS3CompressionCodec{
+ DestinationS3Xz: &destinationS3Xz,
+ Type: typ,
}
}
-func CreateDestinationS3OutputFormatAvroApacheAvroCompressionCodecDestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard(destinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard) DestinationS3OutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard
+func CreateDestinationS3CompressionCodecDestinationS3Zstandard(destinationS3Zstandard DestinationS3Zstandard) DestinationS3CompressionCodec {
+ typ := DestinationS3CompressionCodecTypeDestinationS3Zstandard
- return DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard: &destinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard,
- Type: typ,
+ return DestinationS3CompressionCodec{
+ DestinationS3Zstandard: &destinationS3Zstandard,
+ Type: typ,
}
}
-func CreateDestinationS3OutputFormatAvroApacheAvroCompressionCodecDestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy(destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy) DestinationS3OutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy
+func CreateDestinationS3CompressionCodecDestinationS3Snappy(destinationS3Snappy DestinationS3Snappy) DestinationS3CompressionCodec {
+ typ := DestinationS3CompressionCodecTypeDestinationS3Snappy
- return DestinationS3OutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy: &destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy,
- Type: typ,
+ return DestinationS3CompressionCodec{
+ DestinationS3Snappy: &destinationS3Snappy,
+ Type: typ,
}
}
-func (u *DestinationS3OutputFormatAvroApacheAvroCompressionCodec) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationS3CompressionCodec) UnmarshalJSON(data []byte) error {
- destinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression); err == nil {
- u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression = destinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression
- u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression
+ destinationS3NoCompression := new(DestinationS3NoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationS3NoCompression, "", true, true); err == nil {
+ u.DestinationS3NoCompression = destinationS3NoCompression
+ u.Type = DestinationS3CompressionCodecTypeDestinationS3NoCompression
return nil
}
- destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil {
- u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2
- u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2
+ destinationS3Bzip2 := new(DestinationS3Bzip2)
+ if err := utils.UnmarshalJSON(data, &destinationS3Bzip2, "", true, true); err == nil {
+ u.DestinationS3Bzip2 = destinationS3Bzip2
+ u.Type = DestinationS3CompressionCodecTypeDestinationS3Bzip2
return nil
}
- destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil {
- u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy = destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy
- u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy
+ destinationS3Snappy := new(DestinationS3Snappy)
+ if err := utils.UnmarshalJSON(data, &destinationS3Snappy, "", true, true); err == nil {
+ u.DestinationS3Snappy = destinationS3Snappy
+ u.Type = DestinationS3CompressionCodecTypeDestinationS3Snappy
return nil
}
- destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil {
- u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate = destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate
- u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate
+ destinationS3Deflate := new(DestinationS3Deflate)
+ if err := utils.UnmarshalJSON(data, &destinationS3Deflate, "", true, true); err == nil {
+ u.DestinationS3Deflate = destinationS3Deflate
+ u.Type = DestinationS3CompressionCodecTypeDestinationS3Deflate
return nil
}
- destinationS3OutputFormatAvroApacheAvroCompressionCodecXz := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecXz); err == nil {
- u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz = destinationS3OutputFormatAvroApacheAvroCompressionCodecXz
- u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecXz
+ destinationS3Xz := new(DestinationS3Xz)
+ if err := utils.UnmarshalJSON(data, &destinationS3Xz, "", true, true); err == nil {
+ u.DestinationS3Xz = destinationS3Xz
+ u.Type = DestinationS3CompressionCodecTypeDestinationS3Xz
return nil
}
- destinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard); err == nil {
- u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard = destinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard
- u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard
+ destinationS3Zstandard := new(DestinationS3Zstandard)
+ if err := utils.UnmarshalJSON(data, &destinationS3Zstandard, "", true, true); err == nil {
+ u.DestinationS3Zstandard = destinationS3Zstandard
+ u.Type = DestinationS3CompressionCodecTypeDestinationS3Zstandard
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationS3OutputFormatAvroApacheAvroCompressionCodec) MarshalJSON() ([]byte, error) {
- if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression)
+func (u DestinationS3CompressionCodec) MarshalJSON() ([]byte, error) {
+ if u.DestinationS3NoCompression != nil {
+ return utils.MarshalJSON(u.DestinationS3NoCompression, "", true)
}
- if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2)
+ if u.DestinationS3Deflate != nil {
+ return utils.MarshalJSON(u.DestinationS3Deflate, "", true)
}
- if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy)
+ if u.DestinationS3Bzip2 != nil {
+ return utils.MarshalJSON(u.DestinationS3Bzip2, "", true)
}
- if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate)
+ if u.DestinationS3Xz != nil {
+ return utils.MarshalJSON(u.DestinationS3Xz, "", true)
}
- if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz != nil {
- return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz)
+ if u.DestinationS3Zstandard != nil {
+ return utils.MarshalJSON(u.DestinationS3Zstandard, "", true)
}
- if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard)
+ if u.DestinationS3Snappy != nil {
+ return utils.MarshalJSON(u.DestinationS3Snappy, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationS3OutputFormatAvroApacheAvroFormatType string
+type DestinationS3FormatType string
const (
- DestinationS3OutputFormatAvroApacheAvroFormatTypeAvro DestinationS3OutputFormatAvroApacheAvroFormatType = "Avro"
+ DestinationS3FormatTypeAvro DestinationS3FormatType = "Avro"
)
-func (e DestinationS3OutputFormatAvroApacheAvroFormatType) ToPointer() *DestinationS3OutputFormatAvroApacheAvroFormatType {
+func (e DestinationS3FormatType) ToPointer() *DestinationS3FormatType {
return &e
}
-func (e *DestinationS3OutputFormatAvroApacheAvroFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3FormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Avro":
- *e = DestinationS3OutputFormatAvroApacheAvroFormatType(v)
+ *e = DestinationS3FormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3OutputFormatAvroApacheAvroFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3FormatType: %v", v)
}
}
-// DestinationS3OutputFormatAvroApacheAvro - Format of the data output. See here for more details
-type DestinationS3OutputFormatAvroApacheAvro struct {
+// DestinationS3AvroApacheAvro - Format of the data output. See here for more details
+type DestinationS3AvroApacheAvro struct {
// The compression algorithm used to compress data. Default to no compression.
- CompressionCodec DestinationS3OutputFormatAvroApacheAvroCompressionCodec `json:"compression_codec"`
- FormatType DestinationS3OutputFormatAvroApacheAvroFormatType `json:"format_type"`
+ CompressionCodec DestinationS3CompressionCodec `json:"compression_codec"`
+ FormatType *DestinationS3FormatType `default:"Avro" json:"format_type"`
+}
+
+func (d DestinationS3AvroApacheAvro) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3AvroApacheAvro) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3AvroApacheAvro) GetCompressionCodec() DestinationS3CompressionCodec {
+ if o == nil {
+ return DestinationS3CompressionCodec{}
+ }
+ return o.CompressionCodec
+}
+
+func (o *DestinationS3AvroApacheAvro) GetFormatType() *DestinationS3FormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
type DestinationS3OutputFormatType string
const (
- DestinationS3OutputFormatTypeDestinationS3OutputFormatAvroApacheAvro DestinationS3OutputFormatType = "destination-s3_Output Format_Avro: Apache Avro"
- DestinationS3OutputFormatTypeDestinationS3OutputFormatCSVCommaSeparatedValues DestinationS3OutputFormatType = "destination-s3_Output Format_CSV: Comma-Separated Values"
- DestinationS3OutputFormatTypeDestinationS3OutputFormatJSONLinesNewlineDelimitedJSON DestinationS3OutputFormatType = "destination-s3_Output Format_JSON Lines: Newline-delimited JSON"
- DestinationS3OutputFormatTypeDestinationS3OutputFormatParquetColumnarStorage DestinationS3OutputFormatType = "destination-s3_Output Format_Parquet: Columnar Storage"
+ DestinationS3OutputFormatTypeDestinationS3AvroApacheAvro DestinationS3OutputFormatType = "destination-s3_Avro: Apache Avro"
+ DestinationS3OutputFormatTypeDestinationS3CSVCommaSeparatedValues DestinationS3OutputFormatType = "destination-s3_CSV: Comma-Separated Values"
+ DestinationS3OutputFormatTypeDestinationS3JSONLinesNewlineDelimitedJSON DestinationS3OutputFormatType = "destination-s3_JSON Lines: Newline-delimited JSON"
+ DestinationS3OutputFormatTypeDestinationS3ParquetColumnarStorage DestinationS3OutputFormatType = "destination-s3_Parquet: Columnar Storage"
)
type DestinationS3OutputFormat struct {
- DestinationS3OutputFormatAvroApacheAvro *DestinationS3OutputFormatAvroApacheAvro
- DestinationS3OutputFormatCSVCommaSeparatedValues *DestinationS3OutputFormatCSVCommaSeparatedValues
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON *DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON
- DestinationS3OutputFormatParquetColumnarStorage *DestinationS3OutputFormatParquetColumnarStorage
+ DestinationS3AvroApacheAvro *DestinationS3AvroApacheAvro
+ DestinationS3CSVCommaSeparatedValues *DestinationS3CSVCommaSeparatedValues
+ DestinationS3JSONLinesNewlineDelimitedJSON *DestinationS3JSONLinesNewlineDelimitedJSON
+ DestinationS3ParquetColumnarStorage *DestinationS3ParquetColumnarStorage
Type DestinationS3OutputFormatType
}
-func CreateDestinationS3OutputFormatDestinationS3OutputFormatAvroApacheAvro(destinationS3OutputFormatAvroApacheAvro DestinationS3OutputFormatAvroApacheAvro) DestinationS3OutputFormat {
- typ := DestinationS3OutputFormatTypeDestinationS3OutputFormatAvroApacheAvro
+func CreateDestinationS3OutputFormatDestinationS3AvroApacheAvro(destinationS3AvroApacheAvro DestinationS3AvroApacheAvro) DestinationS3OutputFormat {
+ typ := DestinationS3OutputFormatTypeDestinationS3AvroApacheAvro
return DestinationS3OutputFormat{
- DestinationS3OutputFormatAvroApacheAvro: &destinationS3OutputFormatAvroApacheAvro,
- Type: typ,
+ DestinationS3AvroApacheAvro: &destinationS3AvroApacheAvro,
+ Type: typ,
}
}
-func CreateDestinationS3OutputFormatDestinationS3OutputFormatCSVCommaSeparatedValues(destinationS3OutputFormatCSVCommaSeparatedValues DestinationS3OutputFormatCSVCommaSeparatedValues) DestinationS3OutputFormat {
- typ := DestinationS3OutputFormatTypeDestinationS3OutputFormatCSVCommaSeparatedValues
+func CreateDestinationS3OutputFormatDestinationS3CSVCommaSeparatedValues(destinationS3CSVCommaSeparatedValues DestinationS3CSVCommaSeparatedValues) DestinationS3OutputFormat {
+ typ := DestinationS3OutputFormatTypeDestinationS3CSVCommaSeparatedValues
return DestinationS3OutputFormat{
- DestinationS3OutputFormatCSVCommaSeparatedValues: &destinationS3OutputFormatCSVCommaSeparatedValues,
- Type: typ,
+ DestinationS3CSVCommaSeparatedValues: &destinationS3CSVCommaSeparatedValues,
+ Type: typ,
}
}
-func CreateDestinationS3OutputFormatDestinationS3OutputFormatJSONLinesNewlineDelimitedJSON(destinationS3OutputFormatJSONLinesNewlineDelimitedJSON DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON) DestinationS3OutputFormat {
- typ := DestinationS3OutputFormatTypeDestinationS3OutputFormatJSONLinesNewlineDelimitedJSON
+func CreateDestinationS3OutputFormatDestinationS3JSONLinesNewlineDelimitedJSON(destinationS3JSONLinesNewlineDelimitedJSON DestinationS3JSONLinesNewlineDelimitedJSON) DestinationS3OutputFormat {
+ typ := DestinationS3OutputFormatTypeDestinationS3JSONLinesNewlineDelimitedJSON
return DestinationS3OutputFormat{
- DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON: &destinationS3OutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationS3JSONLinesNewlineDelimitedJSON: &destinationS3JSONLinesNewlineDelimitedJSON,
Type: typ,
}
}
-func CreateDestinationS3OutputFormatDestinationS3OutputFormatParquetColumnarStorage(destinationS3OutputFormatParquetColumnarStorage DestinationS3OutputFormatParquetColumnarStorage) DestinationS3OutputFormat {
- typ := DestinationS3OutputFormatTypeDestinationS3OutputFormatParquetColumnarStorage
+func CreateDestinationS3OutputFormatDestinationS3ParquetColumnarStorage(destinationS3ParquetColumnarStorage DestinationS3ParquetColumnarStorage) DestinationS3OutputFormat {
+ typ := DestinationS3OutputFormatTypeDestinationS3ParquetColumnarStorage
return DestinationS3OutputFormat{
- DestinationS3OutputFormatParquetColumnarStorage: &destinationS3OutputFormatParquetColumnarStorage,
- Type: typ,
+ DestinationS3ParquetColumnarStorage: &destinationS3ParquetColumnarStorage,
+ Type: typ,
}
}
func (u *DestinationS3OutputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- destinationS3OutputFormatAvroApacheAvro := new(DestinationS3OutputFormatAvroApacheAvro)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatAvroApacheAvro); err == nil {
- u.DestinationS3OutputFormatAvroApacheAvro = destinationS3OutputFormatAvroApacheAvro
- u.Type = DestinationS3OutputFormatTypeDestinationS3OutputFormatAvroApacheAvro
+ destinationS3AvroApacheAvro := new(DestinationS3AvroApacheAvro)
+ if err := utils.UnmarshalJSON(data, &destinationS3AvroApacheAvro, "", true, true); err == nil {
+ u.DestinationS3AvroApacheAvro = destinationS3AvroApacheAvro
+ u.Type = DestinationS3OutputFormatTypeDestinationS3AvroApacheAvro
return nil
}
- destinationS3OutputFormatCSVCommaSeparatedValues := new(DestinationS3OutputFormatCSVCommaSeparatedValues)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatCSVCommaSeparatedValues); err == nil {
- u.DestinationS3OutputFormatCSVCommaSeparatedValues = destinationS3OutputFormatCSVCommaSeparatedValues
- u.Type = DestinationS3OutputFormatTypeDestinationS3OutputFormatCSVCommaSeparatedValues
+ destinationS3CSVCommaSeparatedValues := new(DestinationS3CSVCommaSeparatedValues)
+ if err := utils.UnmarshalJSON(data, &destinationS3CSVCommaSeparatedValues, "", true, true); err == nil {
+ u.DestinationS3CSVCommaSeparatedValues = destinationS3CSVCommaSeparatedValues
+ u.Type = DestinationS3OutputFormatTypeDestinationS3CSVCommaSeparatedValues
return nil
}
- destinationS3OutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON = destinationS3OutputFormatJSONLinesNewlineDelimitedJSON
- u.Type = DestinationS3OutputFormatTypeDestinationS3OutputFormatJSONLinesNewlineDelimitedJSON
+ destinationS3JSONLinesNewlineDelimitedJSON := new(DestinationS3JSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &destinationS3JSONLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.DestinationS3JSONLinesNewlineDelimitedJSON = destinationS3JSONLinesNewlineDelimitedJSON
+ u.Type = DestinationS3OutputFormatTypeDestinationS3JSONLinesNewlineDelimitedJSON
return nil
}
- destinationS3OutputFormatParquetColumnarStorage := new(DestinationS3OutputFormatParquetColumnarStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3OutputFormatParquetColumnarStorage); err == nil {
- u.DestinationS3OutputFormatParquetColumnarStorage = destinationS3OutputFormatParquetColumnarStorage
- u.Type = DestinationS3OutputFormatTypeDestinationS3OutputFormatParquetColumnarStorage
+ destinationS3ParquetColumnarStorage := new(DestinationS3ParquetColumnarStorage)
+ if err := utils.UnmarshalJSON(data, &destinationS3ParquetColumnarStorage, "", true, true); err == nil {
+ u.DestinationS3ParquetColumnarStorage = destinationS3ParquetColumnarStorage
+ u.Type = DestinationS3OutputFormatTypeDestinationS3ParquetColumnarStorage
return nil
}
@@ -965,23 +1290,23 @@ func (u *DestinationS3OutputFormat) UnmarshalJSON(data []byte) error {
}
func (u DestinationS3OutputFormat) MarshalJSON() ([]byte, error) {
- if u.DestinationS3OutputFormatAvroApacheAvro != nil {
- return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvro)
+ if u.DestinationS3AvroApacheAvro != nil {
+ return utils.MarshalJSON(u.DestinationS3AvroApacheAvro, "", true)
}
- if u.DestinationS3OutputFormatCSVCommaSeparatedValues != nil {
- return json.Marshal(u.DestinationS3OutputFormatCSVCommaSeparatedValues)
+ if u.DestinationS3CSVCommaSeparatedValues != nil {
+ return utils.MarshalJSON(u.DestinationS3CSVCommaSeparatedValues, "", true)
}
- if u.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationS3OutputFormatJSONLinesNewlineDelimitedJSON)
+ if u.DestinationS3JSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.DestinationS3JSONLinesNewlineDelimitedJSON, "", true)
}
- if u.DestinationS3OutputFormatParquetColumnarStorage != nil {
- return json.Marshal(u.DestinationS3OutputFormatParquetColumnarStorage)
+ if u.DestinationS3ParquetColumnarStorage != nil {
+ return utils.MarshalJSON(u.DestinationS3ParquetColumnarStorage, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationS3S3BucketRegion - The region of the S3 bucket. See here for all region codes.
@@ -1086,8 +1411,8 @@ func (e *DestinationS3S3BucketRegion) UnmarshalJSON(data []byte) error {
type DestinationS3 struct {
// The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.
- AccessKeyID *string `json:"access_key_id,omitempty"`
- DestinationType DestinationS3S3 `json:"destinationType"`
+ AccessKeyID *string `json:"access_key_id,omitempty"`
+ destinationType S3 `const:"s3" json:"destinationType"`
// The pattern allows you to set the file-name format for the S3 staging file(s)
FileNamePattern *string `json:"file_name_pattern,omitempty"`
// Format of the data output. See here for more details
@@ -1097,11 +1422,89 @@ type DestinationS3 struct {
// Directory under the S3 bucket where data will be written. Read more here
S3BucketPath string `json:"s3_bucket_path"`
// The region of the S3 bucket. See here for all region codes.
- S3BucketRegion DestinationS3S3BucketRegion `json:"s3_bucket_region"`
+ S3BucketRegion *DestinationS3S3BucketRegion `default:"" json:"s3_bucket_region"`
// Your S3 endpoint url. Read more here
- S3Endpoint *string `json:"s3_endpoint,omitempty"`
+ S3Endpoint *string `default:"" json:"s3_endpoint"`
// Format string on how data will be organized inside the S3 bucket directory. Read more here
S3PathFormat *string `json:"s3_path_format,omitempty"`
// The corresponding secret to the access key ID. Read more here
SecretAccessKey *string `json:"secret_access_key,omitempty"`
}
+
+func (d DestinationS3) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3) GetAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessKeyID
+}
+
+func (o *DestinationS3) GetDestinationType() S3 {
+ return S3S3
+}
+
+func (o *DestinationS3) GetFileNamePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileNamePattern
+}
+
+func (o *DestinationS3) GetFormat() DestinationS3OutputFormat {
+ if o == nil {
+ return DestinationS3OutputFormat{}
+ }
+ return o.Format
+}
+
+func (o *DestinationS3) GetS3BucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketName
+}
+
+func (o *DestinationS3) GetS3BucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketPath
+}
+
+func (o *DestinationS3) GetS3BucketRegion() *DestinationS3S3BucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.S3BucketRegion
+}
+
+func (o *DestinationS3) GetS3Endpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.S3Endpoint
+}
+
+func (o *DestinationS3) GetS3PathFormat() *string {
+ if o == nil {
+ return nil
+ }
+ return o.S3PathFormat
+}
+
+func (o *DestinationS3) GetSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretAccessKey
+}
diff --git a/internal/sdk/pkg/models/shared/destinations3createrequest.go b/internal/sdk/pkg/models/shared/destinations3createrequest.go
old mode 100755
new mode 100644
index 2d5bfc5ba..e484dfe82
--- a/internal/sdk/pkg/models/shared/destinations3createrequest.go
+++ b/internal/sdk/pkg/models/shared/destinations3createrequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationS3CreateRequest struct {
Configuration DestinationS3 `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationS3CreateRequest) GetConfiguration() DestinationS3 {
+ if o == nil {
+ return DestinationS3{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationS3CreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationS3CreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationS3CreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinations3glue.go b/internal/sdk/pkg/models/shared/destinations3glue.go
old mode 100755
new mode 100644
index f6d876d72..1a5e0413b
--- a/internal/sdk/pkg/models/shared/destinations3glue.go
+++ b/internal/sdk/pkg/models/shared/destinations3glue.go
@@ -3,175 +3,206 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationS3GlueS3Glue string
+type S3Glue string
const (
- DestinationS3GlueS3GlueS3Glue DestinationS3GlueS3Glue = "s3-glue"
+ S3GlueS3Glue S3Glue = "s3-glue"
)
-func (e DestinationS3GlueS3Glue) ToPointer() *DestinationS3GlueS3Glue {
+func (e S3Glue) ToPointer() *S3Glue {
return &e
}
-func (e *DestinationS3GlueS3Glue) UnmarshalJSON(data []byte) error {
+func (e *S3Glue) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "s3-glue":
- *e = DestinationS3GlueS3Glue(v)
+ *e = S3Glue(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueS3Glue: %v", v)
+ return fmt.Errorf("invalid value for S3Glue: %v", v)
}
}
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType string
+type DestinationS3GlueSchemasCompressionType string
const (
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeGzip DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType = "GZIP"
+ DestinationS3GlueSchemasCompressionTypeGzip DestinationS3GlueSchemasCompressionType = "GZIP"
)
-func (e DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) ToPointer() *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType {
+func (e DestinationS3GlueSchemasCompressionType) ToPointer() *DestinationS3GlueSchemasCompressionType {
return &e
}
-func (e *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3GlueSchemasCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(v)
+ *e = DestinationS3GlueSchemasCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3GlueSchemasCompressionType: %v", v)
}
}
-// DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP struct {
- CompressionType *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+// DestinationS3GlueGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationS3GlueGZIP struct {
+ CompressionType *DestinationS3GlueSchemasCompressionType `default:"GZIP" json:"compression_type"`
}
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType string
+func (d DestinationS3GlueGZIP) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3GlueGZIP) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3GlueGZIP) GetCompressionType() *DestinationS3GlueSchemasCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
+}
+
+type DestinationS3GlueCompressionType string
const (
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeNoCompression DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType = "No Compression"
+ DestinationS3GlueCompressionTypeNoCompression DestinationS3GlueCompressionType = "No Compression"
)
-func (e DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) ToPointer() *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType {
+func (e DestinationS3GlueCompressionType) ToPointer() *DestinationS3GlueCompressionType {
return &e
}
-func (e *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3GlueCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(v)
+ *e = DestinationS3GlueCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3GlueCompressionType: %v", v)
}
}
-// DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression struct {
- CompressionType *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+// DestinationS3GlueNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationS3GlueNoCompression struct {
+ CompressionType *DestinationS3GlueCompressionType `default:"No Compression" json:"compression_type"`
}
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionType string
+func (d DestinationS3GlueNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3GlueNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3GlueNoCompression) GetCompressionType() *DestinationS3GlueCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
+}
+
+type DestinationS3GlueCompressionUnionType string
const (
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-s3-glue_Output Format_JSON Lines: Newline-delimited JSON_Compression_No Compression"
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-s3-glue_Output Format_JSON Lines: Newline-delimited JSON_Compression_GZIP"
+ DestinationS3GlueCompressionUnionTypeDestinationS3GlueNoCompression DestinationS3GlueCompressionUnionType = "destination-s3-glue_No Compression"
+ DestinationS3GlueCompressionUnionTypeDestinationS3GlueGZIP DestinationS3GlueCompressionUnionType = "destination-s3-glue_GZIP"
)
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+type DestinationS3GlueCompression struct {
+ DestinationS3GlueNoCompression *DestinationS3GlueNoCompression
+ DestinationS3GlueGZIP *DestinationS3GlueGZIP
- Type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionType
+ Type DestinationS3GlueCompressionUnionType
}
-func CreateDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression(destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression) DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+func CreateDestinationS3GlueCompressionDestinationS3GlueNoCompression(destinationS3GlueNoCompression DestinationS3GlueNoCompression) DestinationS3GlueCompression {
+ typ := DestinationS3GlueCompressionUnionTypeDestinationS3GlueNoCompression
- return DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: &destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
- Type: typ,
+ return DestinationS3GlueCompression{
+ DestinationS3GlueNoCompression: &destinationS3GlueNoCompression,
+ Type: typ,
}
}
-func CreateDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP(destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP) DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+func CreateDestinationS3GlueCompressionDestinationS3GlueGZIP(destinationS3GlueGZIP DestinationS3GlueGZIP) DestinationS3GlueCompression {
+ typ := DestinationS3GlueCompressionUnionTypeDestinationS3GlueGZIP
- return DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: &destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
- Type: typ,
+ return DestinationS3GlueCompression{
+ DestinationS3GlueGZIP: &destinationS3GlueGZIP,
+ Type: typ,
}
}
-func (u *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationS3GlueCompression) UnmarshalJSON(data []byte) error {
- destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression := new(DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression); err == nil {
- u.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- u.Type = DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+ destinationS3GlueNoCompression := new(DestinationS3GlueNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationS3GlueNoCompression, "", true, true); err == nil {
+ u.DestinationS3GlueNoCompression = destinationS3GlueNoCompression
+ u.Type = DestinationS3GlueCompressionUnionTypeDestinationS3GlueNoCompression
return nil
}
- destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP := new(DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP); err == nil {
- u.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- u.Type = DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+ destinationS3GlueGZIP := new(DestinationS3GlueGZIP)
+ if err := utils.UnmarshalJSON(data, &destinationS3GlueGZIP, "", true, true); err == nil {
+ u.DestinationS3GlueGZIP = destinationS3GlueGZIP
+ u.Type = DestinationS3GlueCompressionUnionTypeDestinationS3GlueGZIP
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- return json.Marshal(u.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
+func (u DestinationS3GlueCompression) MarshalJSON() ([]byte, error) {
+ if u.DestinationS3GlueNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationS3GlueNoCompression, "", true)
}
- if u.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- return json.Marshal(u.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
+ if u.DestinationS3GlueGZIP != nil {
+ return utils.MarshalJSON(u.DestinationS3GlueGZIP, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening - Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening string
+// DestinationS3GlueFlattening - Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
+type DestinationS3GlueFlattening string
const (
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlatteningNoFlattening DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening = "No flattening"
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlatteningRootLevelFlattening DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening = "Root level flattening"
+ DestinationS3GlueFlatteningNoFlattening DestinationS3GlueFlattening = "No flattening"
+ DestinationS3GlueFlatteningRootLevelFlattening DestinationS3GlueFlattening = "Root level flattening"
)
-func (e DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening) ToPointer() *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening {
+func (e DestinationS3GlueFlattening) ToPointer() *DestinationS3GlueFlattening {
return &e
}
-func (e *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3GlueFlattening) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -180,76 +211,105 @@ func (e *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening) U
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening(v)
+ *e = DestinationS3GlueFlattening(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3GlueFlattening: %v", v)
}
}
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type DestinationS3GlueFormatType string
const (
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ DestinationS3GlueFormatTypeJsonl DestinationS3GlueFormatType = "JSONL"
)
-func (e DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e DestinationS3GlueFormatType) ToPointer() *DestinationS3GlueFormatType {
return &e
}
-func (e *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3GlueFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ *e = DestinationS3GlueFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3GlueFormatType: %v", v)
}
}
-// DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON - Format of the data output. See here for more details
-type DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON struct {
+// DestinationS3GlueJSONLinesNewlineDelimitedJSON - Format of the data output. See here for more details
+type DestinationS3GlueJSONLinesNewlineDelimitedJSON struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
- Compression *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONCompression `json:"compression,omitempty"`
+ Compression *DestinationS3GlueCompression `json:"compression,omitempty"`
// Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
- Flattening *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFlattening `json:"flattening,omitempty"`
- FormatType DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+ Flattening *DestinationS3GlueFlattening `default:"Root level flattening" json:"flattening"`
+ FormatType *DestinationS3GlueFormatType `default:"JSONL" json:"format_type"`
+}
+
+func (d DestinationS3GlueJSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3GlueJSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3GlueJSONLinesNewlineDelimitedJSON) GetCompression() *DestinationS3GlueCompression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
+}
+
+func (o *DestinationS3GlueJSONLinesNewlineDelimitedJSON) GetFlattening() *DestinationS3GlueFlattening {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
+}
+
+func (o *DestinationS3GlueJSONLinesNewlineDelimitedJSON) GetFormatType() *DestinationS3GlueFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
type DestinationS3GlueOutputFormatType string
const (
- DestinationS3GlueOutputFormatTypeDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON DestinationS3GlueOutputFormatType = "destination-s3-glue_Output Format_JSON Lines: Newline-delimited JSON"
+ DestinationS3GlueOutputFormatTypeDestinationS3GlueJSONLinesNewlineDelimitedJSON DestinationS3GlueOutputFormatType = "destination-s3-glue_JSON Lines: Newline-delimited JSON"
)
type DestinationS3GlueOutputFormat struct {
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON *DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON
+ DestinationS3GlueJSONLinesNewlineDelimitedJSON *DestinationS3GlueJSONLinesNewlineDelimitedJSON
Type DestinationS3GlueOutputFormatType
}
-func CreateDestinationS3GlueOutputFormatDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON(destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON) DestinationS3GlueOutputFormat {
- typ := DestinationS3GlueOutputFormatTypeDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON
+func CreateDestinationS3GlueOutputFormatDestinationS3GlueJSONLinesNewlineDelimitedJSON(destinationS3GlueJSONLinesNewlineDelimitedJSON DestinationS3GlueJSONLinesNewlineDelimitedJSON) DestinationS3GlueOutputFormat {
+ typ := DestinationS3GlueOutputFormatTypeDestinationS3GlueJSONLinesNewlineDelimitedJSON
return DestinationS3GlueOutputFormat{
- DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON: &destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationS3GlueJSONLinesNewlineDelimitedJSON: &destinationS3GlueJSONLinesNewlineDelimitedJSON,
Type: typ,
}
}
func (u *DestinationS3GlueOutputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON = destinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON
- u.Type = DestinationS3GlueOutputFormatTypeDestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON
+
+ destinationS3GlueJSONLinesNewlineDelimitedJSON := new(DestinationS3GlueJSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &destinationS3GlueJSONLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.DestinationS3GlueJSONLinesNewlineDelimitedJSON = destinationS3GlueJSONLinesNewlineDelimitedJSON
+ u.Type = DestinationS3GlueOutputFormatTypeDestinationS3GlueJSONLinesNewlineDelimitedJSON
return nil
}
@@ -257,11 +317,11 @@ func (u *DestinationS3GlueOutputFormat) UnmarshalJSON(data []byte) error {
}
func (u DestinationS3GlueOutputFormat) MarshalJSON() ([]byte, error) {
- if u.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationS3GlueOutputFormatJSONLinesNewlineDelimitedJSON)
+ if u.DestinationS3GlueJSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.DestinationS3GlueJSONLinesNewlineDelimitedJSON, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationS3GlueSerializationLibrary - The library that your query engine will use for reading and writing data in your lake.
@@ -394,8 +454,8 @@ func (e *DestinationS3GlueS3BucketRegion) UnmarshalJSON(data []byte) error {
type DestinationS3Glue struct {
// The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.
- AccessKeyID *string `json:"access_key_id,omitempty"`
- DestinationType DestinationS3GlueS3Glue `json:"destinationType"`
+ AccessKeyID *string `json:"access_key_id,omitempty"`
+ destinationType S3Glue `const:"s3-glue" json:"destinationType"`
// The pattern allows you to set the file-name format for the S3 staging file(s)
FileNamePattern *string `json:"file_name_pattern,omitempty"`
// Format of the data output. See here for more details
@@ -403,17 +463,109 @@ type DestinationS3Glue struct {
// Name of the glue database for creating the tables, leave blank if no integration
GlueDatabase string `json:"glue_database"`
// The library that your query engine will use for reading and writing data in your lake.
- GlueSerializationLibrary DestinationS3GlueSerializationLibrary `json:"glue_serialization_library"`
+ GlueSerializationLibrary *DestinationS3GlueSerializationLibrary `default:"org.openx.data.jsonserde.JsonSerDe" json:"glue_serialization_library"`
// The name of the S3 bucket. Read more here.
S3BucketName string `json:"s3_bucket_name"`
// Directory under the S3 bucket where data will be written. Read more here
S3BucketPath string `json:"s3_bucket_path"`
// The region of the S3 bucket. See here for all region codes.
- S3BucketRegion DestinationS3GlueS3BucketRegion `json:"s3_bucket_region"`
+ S3BucketRegion *DestinationS3GlueS3BucketRegion `default:"" json:"s3_bucket_region"`
// Your S3 endpoint url. Read more here
- S3Endpoint *string `json:"s3_endpoint,omitempty"`
+ S3Endpoint *string `default:"" json:"s3_endpoint"`
// Format string on how data will be organized inside the S3 bucket directory. Read more here
S3PathFormat *string `json:"s3_path_format,omitempty"`
// The corresponding secret to the access key ID. Read more here
SecretAccessKey *string `json:"secret_access_key,omitempty"`
}
+
+func (d DestinationS3Glue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3Glue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3Glue) GetAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessKeyID
+}
+
+func (o *DestinationS3Glue) GetDestinationType() S3Glue {
+ return S3GlueS3Glue
+}
+
+func (o *DestinationS3Glue) GetFileNamePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileNamePattern
+}
+
+func (o *DestinationS3Glue) GetFormat() DestinationS3GlueOutputFormat {
+ if o == nil {
+ return DestinationS3GlueOutputFormat{}
+ }
+ return o.Format
+}
+
+func (o *DestinationS3Glue) GetGlueDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.GlueDatabase
+}
+
+func (o *DestinationS3Glue) GetGlueSerializationLibrary() *DestinationS3GlueSerializationLibrary {
+ if o == nil {
+ return nil
+ }
+ return o.GlueSerializationLibrary
+}
+
+func (o *DestinationS3Glue) GetS3BucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketName
+}
+
+func (o *DestinationS3Glue) GetS3BucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketPath
+}
+
+func (o *DestinationS3Glue) GetS3BucketRegion() *DestinationS3GlueS3BucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.S3BucketRegion
+}
+
+func (o *DestinationS3Glue) GetS3Endpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.S3Endpoint
+}
+
+func (o *DestinationS3Glue) GetS3PathFormat() *string {
+ if o == nil {
+ return nil
+ }
+ return o.S3PathFormat
+}
+
+func (o *DestinationS3Glue) GetSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretAccessKey
+}
diff --git a/internal/sdk/pkg/models/shared/destinations3gluecreaterequest.go b/internal/sdk/pkg/models/shared/destinations3gluecreaterequest.go
old mode 100755
new mode 100644
index 4ac7ada4a..369adf57f
--- a/internal/sdk/pkg/models/shared/destinations3gluecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinations3gluecreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationS3GlueCreateRequest struct {
Configuration DestinationS3Glue `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationS3GlueCreateRequest) GetConfiguration() DestinationS3Glue {
+ if o == nil {
+ return DestinationS3Glue{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationS3GlueCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationS3GlueCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationS3GlueCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinations3glueputrequest.go b/internal/sdk/pkg/models/shared/destinations3glueputrequest.go
old mode 100755
new mode 100644
index d09f93455..99d777229
--- a/internal/sdk/pkg/models/shared/destinations3glueputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinations3glueputrequest.go
@@ -7,3 +7,24 @@ type DestinationS3GluePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationS3GluePutRequest) GetConfiguration() DestinationS3GlueUpdate {
+ if o == nil {
+ return DestinationS3GlueUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationS3GluePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationS3GluePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinations3glueupdate.go b/internal/sdk/pkg/models/shared/destinations3glueupdate.go
old mode 100755
new mode 100644
index faf26f12c..061b9c672
--- a/internal/sdk/pkg/models/shared/destinations3glueupdate.go
+++ b/internal/sdk/pkg/models/shared/destinations3glueupdate.go
@@ -3,151 +3,182 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType string
+type DestinationS3GlueUpdateSchemasCompressionType string
const (
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeGzip DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType = "GZIP"
+ DestinationS3GlueUpdateSchemasCompressionTypeGzip DestinationS3GlueUpdateSchemasCompressionType = "GZIP"
)
-func (e DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) ToPointer() *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType {
+func (e DestinationS3GlueUpdateSchemasCompressionType) ToPointer() *DestinationS3GlueUpdateSchemasCompressionType {
return &e
}
-func (e *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3GlueUpdateSchemasCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(v)
+ *e = DestinationS3GlueUpdateSchemasCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3GlueUpdateSchemasCompressionType: %v", v)
}
}
-// DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP struct {
- CompressionType *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+// DestinationS3GlueUpdateGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationS3GlueUpdateGZIP struct {
+ CompressionType *DestinationS3GlueUpdateSchemasCompressionType `default:"GZIP" json:"compression_type"`
}
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType string
+func (d DestinationS3GlueUpdateGZIP) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3GlueUpdateGZIP) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3GlueUpdateGZIP) GetCompressionType() *DestinationS3GlueUpdateSchemasCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
+}
+
+type DestinationS3GlueUpdateCompressionType string
const (
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeNoCompression DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType = "No Compression"
+ DestinationS3GlueUpdateCompressionTypeNoCompression DestinationS3GlueUpdateCompressionType = "No Compression"
)
-func (e DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) ToPointer() *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType {
+func (e DestinationS3GlueUpdateCompressionType) ToPointer() *DestinationS3GlueUpdateCompressionType {
return &e
}
-func (e *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3GlueUpdateCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(v)
+ *e = DestinationS3GlueUpdateCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3GlueUpdateCompressionType: %v", v)
+ }
+}
+
+// DestinationS3GlueUpdateNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationS3GlueUpdateNoCompression struct {
+ CompressionType *DestinationS3GlueUpdateCompressionType `default:"No Compression" json:"compression_type"`
+}
+
+func (d DestinationS3GlueUpdateNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3GlueUpdateNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression struct {
- CompressionType *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+func (o *DestinationS3GlueUpdateNoCompression) GetCompressionType() *DestinationS3GlueUpdateCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType string
+type DestinationS3GlueUpdateCompressionUnionType string
const (
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-s3-glue-update_Output Format_JSON Lines: Newline-delimited JSON_Compression_No Compression"
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-s3-glue-update_Output Format_JSON Lines: Newline-delimited JSON_Compression_GZIP"
+ DestinationS3GlueUpdateCompressionUnionTypeDestinationS3GlueUpdateNoCompression DestinationS3GlueUpdateCompressionUnionType = "destination-s3-glue-update_No Compression"
+ DestinationS3GlueUpdateCompressionUnionTypeDestinationS3GlueUpdateGZIP DestinationS3GlueUpdateCompressionUnionType = "destination-s3-glue-update_GZIP"
)
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+type DestinationS3GlueUpdateCompression struct {
+ DestinationS3GlueUpdateNoCompression *DestinationS3GlueUpdateNoCompression
+ DestinationS3GlueUpdateGZIP *DestinationS3GlueUpdateGZIP
- Type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType
+ Type DestinationS3GlueUpdateCompressionUnionType
}
-func CreateDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression(destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression) DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+func CreateDestinationS3GlueUpdateCompressionDestinationS3GlueUpdateNoCompression(destinationS3GlueUpdateNoCompression DestinationS3GlueUpdateNoCompression) DestinationS3GlueUpdateCompression {
+ typ := DestinationS3GlueUpdateCompressionUnionTypeDestinationS3GlueUpdateNoCompression
- return DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: &destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
- Type: typ,
+ return DestinationS3GlueUpdateCompression{
+ DestinationS3GlueUpdateNoCompression: &destinationS3GlueUpdateNoCompression,
+ Type: typ,
}
}
-func CreateDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP(destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP) DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+func CreateDestinationS3GlueUpdateCompressionDestinationS3GlueUpdateGZIP(destinationS3GlueUpdateGZIP DestinationS3GlueUpdateGZIP) DestinationS3GlueUpdateCompression {
+ typ := DestinationS3GlueUpdateCompressionUnionTypeDestinationS3GlueUpdateGZIP
- return DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: &destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
- Type: typ,
+ return DestinationS3GlueUpdateCompression{
+ DestinationS3GlueUpdateGZIP: &destinationS3GlueUpdateGZIP,
+ Type: typ,
}
}
-func (u *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationS3GlueUpdateCompression) UnmarshalJSON(data []byte) error {
- destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression := new(DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression); err == nil {
- u.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- u.Type = DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+ destinationS3GlueUpdateNoCompression := new(DestinationS3GlueUpdateNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationS3GlueUpdateNoCompression, "", true, true); err == nil {
+ u.DestinationS3GlueUpdateNoCompression = destinationS3GlueUpdateNoCompression
+ u.Type = DestinationS3GlueUpdateCompressionUnionTypeDestinationS3GlueUpdateNoCompression
return nil
}
- destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP := new(DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP); err == nil {
- u.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- u.Type = DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+ destinationS3GlueUpdateGZIP := new(DestinationS3GlueUpdateGZIP)
+ if err := utils.UnmarshalJSON(data, &destinationS3GlueUpdateGZIP, "", true, true); err == nil {
+ u.DestinationS3GlueUpdateGZIP = destinationS3GlueUpdateGZIP
+ u.Type = DestinationS3GlueUpdateCompressionUnionTypeDestinationS3GlueUpdateGZIP
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- return json.Marshal(u.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
+func (u DestinationS3GlueUpdateCompression) MarshalJSON() ([]byte, error) {
+ if u.DestinationS3GlueUpdateNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationS3GlueUpdateNoCompression, "", true)
}
- if u.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- return json.Marshal(u.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
+ if u.DestinationS3GlueUpdateGZIP != nil {
+ return utils.MarshalJSON(u.DestinationS3GlueUpdateGZIP, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening - Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening string
+// Flattening - Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
+type Flattening string
const (
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlatteningNoFlattening DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening = "No flattening"
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlatteningRootLevelFlattening DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening = "Root level flattening"
+ FlatteningNoFlattening Flattening = "No flattening"
+ FlatteningRootLevelFlattening Flattening = "Root level flattening"
)
-func (e DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening) ToPointer() *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening {
+func (e Flattening) ToPointer() *Flattening {
return &e
}
-func (e *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening) UnmarshalJSON(data []byte) error {
+func (e *Flattening) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -156,76 +187,105 @@ func (e *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlatten
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening(v)
+ *e = Flattening(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening: %v", v)
+ return fmt.Errorf("invalid value for Flattening: %v", v)
}
}
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type DestinationS3GlueUpdateFormatType string
const (
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ DestinationS3GlueUpdateFormatTypeJsonl DestinationS3GlueUpdateFormatType = "JSONL"
)
-func (e DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e DestinationS3GlueUpdateFormatType) ToPointer() *DestinationS3GlueUpdateFormatType {
return &e
}
-func (e *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3GlueUpdateFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ *e = DestinationS3GlueUpdateFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3GlueUpdateFormatType: %v", v)
}
}
-// DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON - Format of the data output. See here for more details
-type DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON struct {
+// DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON - Format of the data output. See here for more details
+type DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
- Compression *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression `json:"compression,omitempty"`
+ Compression *DestinationS3GlueUpdateCompression `json:"compression,omitempty"`
// Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
- Flattening *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening `json:"flattening,omitempty"`
- FormatType DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+ Flattening *Flattening `default:"Root level flattening" json:"flattening"`
+ FormatType *DestinationS3GlueUpdateFormatType `default:"JSONL" json:"format_type"`
+}
+
+func (d DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON) GetCompression() *DestinationS3GlueUpdateCompression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
+}
+
+func (o *DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON) GetFlattening() *Flattening {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
+}
+
+func (o *DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON) GetFormatType() *DestinationS3GlueUpdateFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
type DestinationS3GlueUpdateOutputFormatType string
const (
- DestinationS3GlueUpdateOutputFormatTypeDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON DestinationS3GlueUpdateOutputFormatType = "destination-s3-glue-update_Output Format_JSON Lines: Newline-delimited JSON"
+ DestinationS3GlueUpdateOutputFormatTypeDestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON DestinationS3GlueUpdateOutputFormatType = "destination-s3-glue-update_JSON Lines: Newline-delimited JSON"
)
type DestinationS3GlueUpdateOutputFormat struct {
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON *DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON
+ DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON *DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON
Type DestinationS3GlueUpdateOutputFormatType
}
-func CreateDestinationS3GlueUpdateOutputFormatDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON(destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON) DestinationS3GlueUpdateOutputFormat {
- typ := DestinationS3GlueUpdateOutputFormatTypeDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON
+func CreateDestinationS3GlueUpdateOutputFormatDestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON(destinationS3GlueUpdateJSONLinesNewlineDelimitedJSON DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON) DestinationS3GlueUpdateOutputFormat {
+ typ := DestinationS3GlueUpdateOutputFormatTypeDestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON
return DestinationS3GlueUpdateOutputFormat{
- DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON: &destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON: &destinationS3GlueUpdateJSONLinesNewlineDelimitedJSON,
Type: typ,
}
}
func (u *DestinationS3GlueUpdateOutputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON = destinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON
- u.Type = DestinationS3GlueUpdateOutputFormatTypeDestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON
+
+ destinationS3GlueUpdateJSONLinesNewlineDelimitedJSON := new(DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &destinationS3GlueUpdateJSONLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON = destinationS3GlueUpdateJSONLinesNewlineDelimitedJSON
+ u.Type = DestinationS3GlueUpdateOutputFormatTypeDestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON
return nil
}
@@ -233,26 +293,26 @@ func (u *DestinationS3GlueUpdateOutputFormat) UnmarshalJSON(data []byte) error {
}
func (u DestinationS3GlueUpdateOutputFormat) MarshalJSON() ([]byte, error) {
- if u.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationS3GlueUpdateOutputFormatJSONLinesNewlineDelimitedJSON)
+ if u.DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.DestinationS3GlueUpdateJSONLinesNewlineDelimitedJSON, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationS3GlueUpdateSerializationLibrary - The library that your query engine will use for reading and writing data in your lake.
-type DestinationS3GlueUpdateSerializationLibrary string
+// SerializationLibrary - The library that your query engine will use for reading and writing data in your lake.
+type SerializationLibrary string
const (
- DestinationS3GlueUpdateSerializationLibraryOrgOpenxDataJsonserdeJSONSerDe DestinationS3GlueUpdateSerializationLibrary = "org.openx.data.jsonserde.JsonSerDe"
- DestinationS3GlueUpdateSerializationLibraryOrgApacheHiveHcatalogDataJSONSerDe DestinationS3GlueUpdateSerializationLibrary = "org.apache.hive.hcatalog.data.JsonSerDe"
+ SerializationLibraryOrgOpenxDataJsonserdeJSONSerDe SerializationLibrary = "org.openx.data.jsonserde.JsonSerDe"
+ SerializationLibraryOrgApacheHiveHcatalogDataJSONSerDe SerializationLibrary = "org.apache.hive.hcatalog.data.JsonSerDe"
)
-func (e DestinationS3GlueUpdateSerializationLibrary) ToPointer() *DestinationS3GlueUpdateSerializationLibrary {
+func (e SerializationLibrary) ToPointer() *SerializationLibrary {
return &e
}
-func (e *DestinationS3GlueUpdateSerializationLibrary) UnmarshalJSON(data []byte) error {
+func (e *SerializationLibrary) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -261,10 +321,10 @@ func (e *DestinationS3GlueUpdateSerializationLibrary) UnmarshalJSON(data []byte)
case "org.openx.data.jsonserde.JsonSerDe":
fallthrough
case "org.apache.hive.hcatalog.data.JsonSerDe":
- *e = DestinationS3GlueUpdateSerializationLibrary(v)
+ *e = SerializationLibrary(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3GlueUpdateSerializationLibrary: %v", v)
+ return fmt.Errorf("invalid value for SerializationLibrary: %v", v)
}
}
@@ -378,17 +438,105 @@ type DestinationS3GlueUpdate struct {
// Name of the glue database for creating the tables, leave blank if no integration
GlueDatabase string `json:"glue_database"`
// The library that your query engine will use for reading and writing data in your lake.
- GlueSerializationLibrary DestinationS3GlueUpdateSerializationLibrary `json:"glue_serialization_library"`
+ GlueSerializationLibrary *SerializationLibrary `default:"org.openx.data.jsonserde.JsonSerDe" json:"glue_serialization_library"`
// The name of the S3 bucket. Read more here.
S3BucketName string `json:"s3_bucket_name"`
// Directory under the S3 bucket where data will be written. Read more here
S3BucketPath string `json:"s3_bucket_path"`
// The region of the S3 bucket. See here for all region codes.
- S3BucketRegion DestinationS3GlueUpdateS3BucketRegion `json:"s3_bucket_region"`
+ S3BucketRegion *DestinationS3GlueUpdateS3BucketRegion `default:"" json:"s3_bucket_region"`
// Your S3 endpoint url. Read more here
- S3Endpoint *string `json:"s3_endpoint,omitempty"`
+ S3Endpoint *string `default:"" json:"s3_endpoint"`
// Format string on how data will be organized inside the S3 bucket directory. Read more here
S3PathFormat *string `json:"s3_path_format,omitempty"`
// The corresponding secret to the access key ID. Read more here
SecretAccessKey *string `json:"secret_access_key,omitempty"`
}
+
+func (d DestinationS3GlueUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3GlueUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3GlueUpdate) GetAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessKeyID
+}
+
+func (o *DestinationS3GlueUpdate) GetFileNamePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileNamePattern
+}
+
+func (o *DestinationS3GlueUpdate) GetFormat() DestinationS3GlueUpdateOutputFormat {
+ if o == nil {
+ return DestinationS3GlueUpdateOutputFormat{}
+ }
+ return o.Format
+}
+
+func (o *DestinationS3GlueUpdate) GetGlueDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.GlueDatabase
+}
+
+func (o *DestinationS3GlueUpdate) GetGlueSerializationLibrary() *SerializationLibrary {
+ if o == nil {
+ return nil
+ }
+ return o.GlueSerializationLibrary
+}
+
+func (o *DestinationS3GlueUpdate) GetS3BucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketName
+}
+
+func (o *DestinationS3GlueUpdate) GetS3BucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketPath
+}
+
+func (o *DestinationS3GlueUpdate) GetS3BucketRegion() *DestinationS3GlueUpdateS3BucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.S3BucketRegion
+}
+
+func (o *DestinationS3GlueUpdate) GetS3Endpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.S3Endpoint
+}
+
+func (o *DestinationS3GlueUpdate) GetS3PathFormat() *string {
+ if o == nil {
+ return nil
+ }
+ return o.S3PathFormat
+}
+
+func (o *DestinationS3GlueUpdate) GetSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretAccessKey
+}
diff --git a/internal/sdk/pkg/models/shared/destinations3putrequest.go b/internal/sdk/pkg/models/shared/destinations3putrequest.go
old mode 100755
new mode 100644
index 4141dfe02..fc0f570ec
--- a/internal/sdk/pkg/models/shared/destinations3putrequest.go
+++ b/internal/sdk/pkg/models/shared/destinations3putrequest.go
@@ -7,3 +7,24 @@ type DestinationS3PutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationS3PutRequest) GetConfiguration() DestinationS3Update {
+ if o == nil {
+ return DestinationS3Update{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationS3PutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationS3PutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinations3update.go b/internal/sdk/pkg/models/shared/destinations3update.go
old mode 100755
new mode 100644
index 62ebb26be..04895b043
--- a/internal/sdk/pkg/models/shared/destinations3update.go
+++ b/internal/sdk/pkg/models/shared/destinations3update.go
@@ -3,30 +3,30 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec - The compression algorithm used to compress data pages.
-type DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec string
+// DestinationS3UpdateSchemasCompressionCodec - The compression algorithm used to compress data pages.
+type DestinationS3UpdateSchemasCompressionCodec string
const (
- DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodecUncompressed DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec = "UNCOMPRESSED"
- DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodecSnappy DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec = "SNAPPY"
- DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodecGzip DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec = "GZIP"
- DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodecLzo DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec = "LZO"
- DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodecBrotli DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec = "BROTLI"
- DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodecLz4 DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec = "LZ4"
- DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodecZstd DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec = "ZSTD"
+ DestinationS3UpdateSchemasCompressionCodecUncompressed DestinationS3UpdateSchemasCompressionCodec = "UNCOMPRESSED"
+ DestinationS3UpdateSchemasCompressionCodecSnappy DestinationS3UpdateSchemasCompressionCodec = "SNAPPY"
+ DestinationS3UpdateSchemasCompressionCodecGzip DestinationS3UpdateSchemasCompressionCodec = "GZIP"
+ DestinationS3UpdateSchemasCompressionCodecLzo DestinationS3UpdateSchemasCompressionCodec = "LZO"
+ DestinationS3UpdateSchemasCompressionCodecBrotli DestinationS3UpdateSchemasCompressionCodec = "BROTLI"
+ DestinationS3UpdateSchemasCompressionCodecLz4 DestinationS3UpdateSchemasCompressionCodec = "LZ4"
+ DestinationS3UpdateSchemasCompressionCodecZstd DestinationS3UpdateSchemasCompressionCodec = "ZSTD"
)
-func (e DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec) ToPointer() *DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec {
+func (e DestinationS3UpdateSchemasCompressionCodec) ToPointer() *DestinationS3UpdateSchemasCompressionCodec {
return &e
}
-func (e *DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasCompressionCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -45,193 +45,284 @@ func (e *DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec)
case "LZ4":
fallthrough
case "ZSTD":
- *e = DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec(v)
+ *e = DestinationS3UpdateSchemasCompressionCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasCompressionCodec: %v", v)
}
}
-type DestinationS3UpdateOutputFormatParquetColumnarStorageFormatType string
+type DestinationS3UpdateSchemasFormatOutputFormatFormatType string
const (
- DestinationS3UpdateOutputFormatParquetColumnarStorageFormatTypeParquet DestinationS3UpdateOutputFormatParquetColumnarStorageFormatType = "Parquet"
+ DestinationS3UpdateSchemasFormatOutputFormatFormatTypeParquet DestinationS3UpdateSchemasFormatOutputFormatFormatType = "Parquet"
)
-func (e DestinationS3UpdateOutputFormatParquetColumnarStorageFormatType) ToPointer() *DestinationS3UpdateOutputFormatParquetColumnarStorageFormatType {
+func (e DestinationS3UpdateSchemasFormatOutputFormatFormatType) ToPointer() *DestinationS3UpdateSchemasFormatOutputFormatFormatType {
return &e
}
-func (e *DestinationS3UpdateOutputFormatParquetColumnarStorageFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFormatOutputFormatFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Parquet":
- *e = DestinationS3UpdateOutputFormatParquetColumnarStorageFormatType(v)
+ *e = DestinationS3UpdateSchemasFormatOutputFormatFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatParquetColumnarStorageFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFormatOutputFormatFormatType: %v", v)
}
}
-// DestinationS3UpdateOutputFormatParquetColumnarStorage - Format of the data output. See here for more details
-type DestinationS3UpdateOutputFormatParquetColumnarStorage struct {
+// DestinationS3UpdateParquetColumnarStorage - Format of the data output. See here for more details
+type DestinationS3UpdateParquetColumnarStorage struct {
// This is the size of a row group being buffered in memory. It limits the memory usage when writing. Larger values will improve the IO when reading, but consume more memory when writing. Default: 128 MB.
- BlockSizeMb *int64 `json:"block_size_mb,omitempty"`
+ BlockSizeMb *int64 `default:"128" json:"block_size_mb"`
// The compression algorithm used to compress data pages.
- CompressionCodec *DestinationS3UpdateOutputFormatParquetColumnarStorageCompressionCodec `json:"compression_codec,omitempty"`
+ CompressionCodec *DestinationS3UpdateSchemasCompressionCodec `default:"UNCOMPRESSED" json:"compression_codec"`
// Default: true.
- DictionaryEncoding *bool `json:"dictionary_encoding,omitempty"`
+ DictionaryEncoding *bool `default:"true" json:"dictionary_encoding"`
// There is one dictionary page per column per row group when dictionary encoding is used. The dictionary page size works like the page size but for dictionary. Default: 1024 KB.
- DictionaryPageSizeKb *int64 `json:"dictionary_page_size_kb,omitempty"`
- FormatType DestinationS3UpdateOutputFormatParquetColumnarStorageFormatType `json:"format_type"`
+ DictionaryPageSizeKb *int64 `default:"1024" json:"dictionary_page_size_kb"`
+ FormatType *DestinationS3UpdateSchemasFormatOutputFormatFormatType `default:"Parquet" json:"format_type"`
// Maximum size allowed as padding to align row groups. This is also the minimum size of a row group. Default: 8 MB.
- MaxPaddingSizeMb *int64 `json:"max_padding_size_mb,omitempty"`
+ MaxPaddingSizeMb *int64 `default:"8" json:"max_padding_size_mb"`
// The page size is for compression. A block is composed of pages. A page is the smallest unit that must be read fully to access a single record. If this value is too small, the compression will deteriorate. Default: 1024 KB.
- PageSizeKb *int64 `json:"page_size_kb,omitempty"`
+ PageSizeKb *int64 `default:"1024" json:"page_size_kb"`
}
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType string
+func (d DestinationS3UpdateParquetColumnarStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateParquetColumnarStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateParquetColumnarStorage) GetBlockSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BlockSizeMb
+}
+
+func (o *DestinationS3UpdateParquetColumnarStorage) GetCompressionCodec() *DestinationS3UpdateSchemasCompressionCodec {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionCodec
+}
+
+func (o *DestinationS3UpdateParquetColumnarStorage) GetDictionaryEncoding() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DictionaryEncoding
+}
+
+func (o *DestinationS3UpdateParquetColumnarStorage) GetDictionaryPageSizeKb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DictionaryPageSizeKb
+}
+
+func (o *DestinationS3UpdateParquetColumnarStorage) GetFormatType() *DestinationS3UpdateSchemasFormatOutputFormatFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+func (o *DestinationS3UpdateParquetColumnarStorage) GetMaxPaddingSizeMb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxPaddingSizeMb
+}
+
+func (o *DestinationS3UpdateParquetColumnarStorage) GetPageSizeKb() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSizeKb
+}
+
+type DestinationS3UpdateSchemasFormatOutputFormatCompressionType string
const (
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionTypeGzip DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType = "GZIP"
+ DestinationS3UpdateSchemasFormatOutputFormatCompressionTypeGzip DestinationS3UpdateSchemasFormatOutputFormatCompressionType = "GZIP"
)
-func (e DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) ToPointer() *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType {
+func (e DestinationS3UpdateSchemasFormatOutputFormatCompressionType) ToPointer() *DestinationS3UpdateSchemasFormatOutputFormatCompressionType {
return &e
}
-func (e *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFormatOutputFormatCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType(v)
+ *e = DestinationS3UpdateSchemasFormatOutputFormatCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFormatOutputFormatCompressionType: %v", v)
+ }
+}
+
+// DestinationS3UpdateSchemasGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationS3UpdateSchemasGZIP struct {
+ CompressionType *DestinationS3UpdateSchemasFormatOutputFormatCompressionType `default:"GZIP" json:"compression_type"`
+}
+
+func (d DestinationS3UpdateSchemasGZIP) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateSchemasGZIP) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP struct {
- CompressionType *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+func (o *DestinationS3UpdateSchemasGZIP) GetCompressionType() *DestinationS3UpdateSchemasFormatOutputFormatCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType string
+type DestinationS3UpdateSchemasFormatCompressionType string
const (
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionTypeNoCompression DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType = "No Compression"
+ DestinationS3UpdateSchemasFormatCompressionTypeNoCompression DestinationS3UpdateSchemasFormatCompressionType = "No Compression"
)
-func (e DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) ToPointer() *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType {
+func (e DestinationS3UpdateSchemasFormatCompressionType) ToPointer() *DestinationS3UpdateSchemasFormatCompressionType {
return &e
}
-func (e *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFormatCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType(v)
+ *e = DestinationS3UpdateSchemasFormatCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFormatCompressionType: %v", v)
}
}
-// DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression struct {
- CompressionType *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+// DestinationS3UpdateSchemasFormatNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
+type DestinationS3UpdateSchemasFormatNoCompression struct {
+ CompressionType *DestinationS3UpdateSchemasFormatCompressionType `default:"No Compression" json:"compression_type"`
}
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType string
+func (d DestinationS3UpdateSchemasFormatNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateSchemasFormatNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateSchemasFormatNoCompression) GetCompressionType() *DestinationS3UpdateSchemasFormatCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
+}
+
+type DestinationS3UpdateSchemasCompressionUnionType string
const (
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-s3-update_Output Format_JSON Lines: Newline-delimited JSON_Compression_No Compression"
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType = "destination-s3-update_Output Format_JSON Lines: Newline-delimited JSON_Compression_GZIP"
+ DestinationS3UpdateSchemasCompressionUnionTypeDestinationS3UpdateSchemasFormatNoCompression DestinationS3UpdateSchemasCompressionUnionType = "destination-s3-update_Schemas_format_No Compression"
+ DestinationS3UpdateSchemasCompressionUnionTypeDestinationS3UpdateSchemasGZIP DestinationS3UpdateSchemasCompressionUnionType = "destination-s3-update_Schemas_GZIP"
)
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression struct {
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+type DestinationS3UpdateSchemasCompression struct {
+ DestinationS3UpdateSchemasFormatNoCompression *DestinationS3UpdateSchemasFormatNoCompression
+ DestinationS3UpdateSchemasGZIP *DestinationS3UpdateSchemasGZIP
- Type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionType
+ Type DestinationS3UpdateSchemasCompressionUnionType
}
-func CreateDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression(destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression) DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+func CreateDestinationS3UpdateSchemasCompressionDestinationS3UpdateSchemasFormatNoCompression(destinationS3UpdateSchemasFormatNoCompression DestinationS3UpdateSchemasFormatNoCompression) DestinationS3UpdateSchemasCompression {
+ typ := DestinationS3UpdateSchemasCompressionUnionTypeDestinationS3UpdateSchemasFormatNoCompression
- return DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression: &destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression,
+ return DestinationS3UpdateSchemasCompression{
+ DestinationS3UpdateSchemasFormatNoCompression: &destinationS3UpdateSchemasFormatNoCompression,
Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP(destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP) DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression {
- typ := DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+func CreateDestinationS3UpdateSchemasCompressionDestinationS3UpdateSchemasGZIP(destinationS3UpdateSchemasGZIP DestinationS3UpdateSchemasGZIP) DestinationS3UpdateSchemasCompression {
+ typ := DestinationS3UpdateSchemasCompressionUnionTypeDestinationS3UpdateSchemasGZIP
- return DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression{
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP: &destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP,
- Type: typ,
+ return DestinationS3UpdateSchemasCompression{
+ DestinationS3UpdateSchemasGZIP: &destinationS3UpdateSchemasGZIP,
+ Type: typ,
}
}
-func (u *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationS3UpdateSchemasCompression) UnmarshalJSON(data []byte) error {
- destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression := new(DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression); err == nil {
- u.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression = destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
- u.Type = DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression
+ destinationS3UpdateSchemasFormatNoCompression := new(DestinationS3UpdateSchemasFormatNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateSchemasFormatNoCompression, "", true, true); err == nil {
+ u.DestinationS3UpdateSchemasFormatNoCompression = destinationS3UpdateSchemasFormatNoCompression
+ u.Type = DestinationS3UpdateSchemasCompressionUnionTypeDestinationS3UpdateSchemasFormatNoCompression
return nil
}
- destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP := new(DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP); err == nil {
- u.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP = destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
- u.Type = DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionTypeDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP
+ destinationS3UpdateSchemasGZIP := new(DestinationS3UpdateSchemasGZIP)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateSchemasGZIP, "", true, true); err == nil {
+ u.DestinationS3UpdateSchemasGZIP = destinationS3UpdateSchemasGZIP
+ u.Type = DestinationS3UpdateSchemasCompressionUnionTypeDestinationS3UpdateSchemasGZIP
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionNoCompression)
+func (u DestinationS3UpdateSchemasCompression) MarshalJSON() ([]byte, error) {
+ if u.DestinationS3UpdateSchemasFormatNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateSchemasFormatNoCompression, "", true)
}
- if u.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompressionGZIP)
+ if u.DestinationS3UpdateSchemasGZIP != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateSchemasGZIP, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening - Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening string
+// DestinationS3UpdateSchemasFlattening - Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
+type DestinationS3UpdateSchemasFlattening string
const (
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlatteningNoFlattening DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening = "No flattening"
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlatteningRootLevelFlattening DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening = "Root level flattening"
+ DestinationS3UpdateSchemasFlatteningNoFlattening DestinationS3UpdateSchemasFlattening = "No flattening"
+ DestinationS3UpdateSchemasFlatteningRootLevelFlattening DestinationS3UpdateSchemasFlattening = "Root level flattening"
)
-func (e DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening) ToPointer() *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening {
+func (e DestinationS3UpdateSchemasFlattening) ToPointer() *DestinationS3UpdateSchemasFlattening {
return &e
}
-func (e *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFlattening) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -240,185 +331,248 @@ func (e *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening)
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening(v)
+ *e = DestinationS3UpdateSchemasFlattening(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFlattening: %v", v)
}
}
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type DestinationS3UpdateSchemasFormatFormatType string
const (
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ DestinationS3UpdateSchemasFormatFormatTypeJsonl DestinationS3UpdateSchemasFormatFormatType = "JSONL"
)
-func (e DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e DestinationS3UpdateSchemasFormatFormatType) ToPointer() *DestinationS3UpdateSchemasFormatFormatType {
return &e
}
-func (e *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFormatFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "JSONL":
- *e = DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ *e = DestinationS3UpdateSchemasFormatFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFormatFormatType: %v", v)
}
}
-// DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON - Format of the data output. See here for more details
-type DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON struct {
+// DestinationS3UpdateJSONLinesNewlineDelimitedJSON - Format of the data output. See here for more details
+type DestinationS3UpdateJSONLinesNewlineDelimitedJSON struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".jsonl.gz").
- Compression *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONCompression `json:"compression,omitempty"`
+ Compression *DestinationS3UpdateSchemasCompression `json:"compression,omitempty"`
// Whether the input json data should be normalized (flattened) in the output JSON Lines. Please refer to docs for details.
- Flattening *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFlattening `json:"flattening,omitempty"`
- FormatType DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+ Flattening *DestinationS3UpdateSchemasFlattening `default:"No flattening" json:"flattening"`
+ FormatType *DestinationS3UpdateSchemasFormatFormatType `default:"JSONL" json:"format_type"`
+}
+
+func (d DestinationS3UpdateJSONLinesNewlineDelimitedJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateJSONLinesNewlineDelimitedJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateJSONLinesNewlineDelimitedJSON) GetCompression() *DestinationS3UpdateSchemasCompression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
+}
+
+func (o *DestinationS3UpdateJSONLinesNewlineDelimitedJSON) GetFlattening() *DestinationS3UpdateSchemasFlattening {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
+}
+
+func (o *DestinationS3UpdateJSONLinesNewlineDelimitedJSON) GetFormatType() *DestinationS3UpdateSchemasFormatFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType string
+type DestinationS3UpdateSchemasCompressionType string
const (
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionTypeGzip DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType = "GZIP"
+ DestinationS3UpdateSchemasCompressionTypeGzip DestinationS3UpdateSchemasCompressionType = "GZIP"
)
-func (e DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType) ToPointer() *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType {
+func (e DestinationS3UpdateSchemasCompressionType) ToPointer() *DestinationS3UpdateSchemasCompressionType {
return &e
}
-func (e *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "GZIP":
- *e = DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType(v)
+ *e = DestinationS3UpdateSchemasCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasCompressionType: %v", v)
}
}
-// DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP struct {
- CompressionType *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIPCompressionType `json:"compression_type,omitempty"`
+// DestinationS3UpdateGZIP - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
+type DestinationS3UpdateGZIP struct {
+ CompressionType *DestinationS3UpdateSchemasCompressionType `default:"GZIP" json:"compression_type"`
+}
+
+func (d DestinationS3UpdateGZIP) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType string
+func (d *DestinationS3UpdateGZIP) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateGZIP) GetCompressionType() *DestinationS3UpdateSchemasCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
+}
+
+type DestinationS3UpdateCompressionType string
const (
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionTypeNoCompression DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType = "No Compression"
+ DestinationS3UpdateCompressionTypeNoCompression DestinationS3UpdateCompressionType = "No Compression"
)
-func (e DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType) ToPointer() *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType {
+func (e DestinationS3UpdateCompressionType) ToPointer() *DestinationS3UpdateCompressionType {
return &e
}
-func (e *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateCompressionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "No Compression":
- *e = DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType(v)
+ *e = DestinationS3UpdateCompressionType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateCompressionType: %v", v)
}
}
-// DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression struct {
- CompressionType *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompressionCompressionType `json:"compression_type,omitempty"`
+// DestinationS3UpdateSchemasNoCompression - Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
+type DestinationS3UpdateSchemasNoCompression struct {
+ CompressionType *DestinationS3UpdateCompressionType `default:"No Compression" json:"compression_type"`
+}
+
+func (d DestinationS3UpdateSchemasNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateSchemasNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateSchemasNoCompression) GetCompressionType() *DestinationS3UpdateCompressionType {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionType
}
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionType string
+type DestinationS3UpdateCompressionUnionType string
const (
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionType = "destination-s3-update_Output Format_CSV: Comma-Separated Values_Compression_No Compression"
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionType = "destination-s3-update_Output Format_CSV: Comma-Separated Values_Compression_GZIP"
+ DestinationS3UpdateCompressionUnionTypeDestinationS3UpdateSchemasNoCompression DestinationS3UpdateCompressionUnionType = "destination-s3-update_Schemas_No Compression"
+ DestinationS3UpdateCompressionUnionTypeDestinationS3UpdateGZIP DestinationS3UpdateCompressionUnionType = "destination-s3-update_GZIP"
)
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression struct {
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
+type DestinationS3UpdateCompression struct {
+ DestinationS3UpdateSchemasNoCompression *DestinationS3UpdateSchemasNoCompression
+ DestinationS3UpdateGZIP *DestinationS3UpdateGZIP
- Type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionType
+ Type DestinationS3UpdateCompressionUnionType
}
-func CreateDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression(destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression) DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression {
- typ := DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
+func CreateDestinationS3UpdateCompressionDestinationS3UpdateSchemasNoCompression(destinationS3UpdateSchemasNoCompression DestinationS3UpdateSchemasNoCompression) DestinationS3UpdateCompression {
+ typ := DestinationS3UpdateCompressionUnionTypeDestinationS3UpdateSchemasNoCompression
- return DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression: &destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression,
- Type: typ,
+ return DestinationS3UpdateCompression{
+ DestinationS3UpdateSchemasNoCompression: &destinationS3UpdateSchemasNoCompression,
+ Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP(destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP) DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression {
- typ := DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
+func CreateDestinationS3UpdateCompressionDestinationS3UpdateGZIP(destinationS3UpdateGZIP DestinationS3UpdateGZIP) DestinationS3UpdateCompression {
+ typ := DestinationS3UpdateCompressionUnionTypeDestinationS3UpdateGZIP
- return DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression{
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP: &destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP,
- Type: typ,
+ return DestinationS3UpdateCompression{
+ DestinationS3UpdateGZIP: &destinationS3UpdateGZIP,
+ Type: typ,
}
}
-func (u *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationS3UpdateCompression) UnmarshalJSON(data []byte) error {
- destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression := new(DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression); err == nil {
- u.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression = destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
- u.Type = DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression
+ destinationS3UpdateSchemasNoCompression := new(DestinationS3UpdateSchemasNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateSchemasNoCompression, "", true, true); err == nil {
+ u.DestinationS3UpdateSchemasNoCompression = destinationS3UpdateSchemasNoCompression
+ u.Type = DestinationS3UpdateCompressionUnionTypeDestinationS3UpdateSchemasNoCompression
return nil
}
- destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP := new(DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP); err == nil {
- u.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP = destinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
- u.Type = DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionTypeDestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP
+ destinationS3UpdateGZIP := new(DestinationS3UpdateGZIP)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateGZIP, "", true, true); err == nil {
+ u.DestinationS3UpdateGZIP = destinationS3UpdateGZIP
+ u.Type = DestinationS3UpdateCompressionUnionTypeDestinationS3UpdateGZIP
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression) MarshalJSON() ([]byte, error) {
- if u.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionNoCompression)
+func (u DestinationS3UpdateCompression) MarshalJSON() ([]byte, error) {
+ if u.DestinationS3UpdateSchemasNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateSchemasNoCompression, "", true)
}
- if u.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompressionGZIP)
+ if u.DestinationS3UpdateGZIP != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateGZIP, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening - Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening string
+// DestinationS3UpdateFlattening - Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
+type DestinationS3UpdateFlattening string
const (
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlatteningNoFlattening DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening = "No flattening"
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlatteningRootLevelFlattening DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening = "Root level flattening"
+ DestinationS3UpdateFlatteningNoFlattening DestinationS3UpdateFlattening = "No flattening"
+ DestinationS3UpdateFlatteningRootLevelFlattening DestinationS3UpdateFlattening = "Root level flattening"
)
-func (e DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening) ToPointer() *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening {
+func (e DestinationS3UpdateFlattening) ToPointer() *DestinationS3UpdateFlattening {
return &e
}
-func (e *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateFlattening) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -427,513 +581,684 @@ func (e *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening) Unmar
case "No flattening":
fallthrough
case "Root level flattening":
- *e = DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening(v)
+ *e = DestinationS3UpdateFlattening(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateFlattening: %v", v)
}
}
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatType string
+type DestinationS3UpdateSchemasFormatType string
const (
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatTypeCsv DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatType = "CSV"
+ DestinationS3UpdateSchemasFormatTypeCsv DestinationS3UpdateSchemasFormatType = "CSV"
)
-func (e DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatType) ToPointer() *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatType {
+func (e DestinationS3UpdateSchemasFormatType) ToPointer() *DestinationS3UpdateSchemasFormatType {
return &e
}
-func (e *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CSV":
- *e = DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatType(v)
+ *e = DestinationS3UpdateSchemasFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFormatType: %v", v)
}
}
-// DestinationS3UpdateOutputFormatCSVCommaSeparatedValues - Format of the data output. See here for more details
-type DestinationS3UpdateOutputFormatCSVCommaSeparatedValues struct {
+// DestinationS3UpdateCSVCommaSeparatedValues - Format of the data output. See here for more details
+type DestinationS3UpdateCSVCommaSeparatedValues struct {
// Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: ".csv.gz").
- Compression *DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesCompression `json:"compression,omitempty"`
+ Compression *DestinationS3UpdateCompression `json:"compression,omitempty"`
// Whether the input json data should be normalized (flattened) in the output CSV. Please refer to docs for details.
- Flattening DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFlattening `json:"flattening"`
- FormatType DestinationS3UpdateOutputFormatCSVCommaSeparatedValuesFormatType `json:"format_type"`
+ Flattening *DestinationS3UpdateFlattening `default:"No flattening" json:"flattening"`
+ FormatType *DestinationS3UpdateSchemasFormatType `default:"CSV" json:"format_type"`
+}
+
+func (d DestinationS3UpdateCSVCommaSeparatedValues) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateCSVCommaSeparatedValues) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
}
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec string
+func (o *DestinationS3UpdateCSVCommaSeparatedValues) GetCompression() *DestinationS3UpdateCompression {
+ if o == nil {
+ return nil
+ }
+ return o.Compression
+}
+
+func (o *DestinationS3UpdateCSVCommaSeparatedValues) GetFlattening() *DestinationS3UpdateFlattening {
+ if o == nil {
+ return nil
+ }
+ return o.Flattening
+}
+
+func (o *DestinationS3UpdateCSVCommaSeparatedValues) GetFormatType() *DestinationS3UpdateSchemasFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
+}
+
+type DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec string
const (
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodecSnappy DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec = "snappy"
+ DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodecSnappy DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec = "snappy"
)
-func (e DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec) ToPointer() *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec {
+func (e DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec) ToPointer() *DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec {
return &e
}
-func (e *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "snappy":
- *e = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec(v)
+ *e = DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec: %v", v)
}
}
-// DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy struct {
- Codec DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappyCodec `json:"codec"`
+// DestinationS3UpdateSnappy - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3UpdateSnappy struct {
+ Codec *DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec `default:"snappy" json:"codec"`
}
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec string
+func (d DestinationS3UpdateSnappy) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateSnappy) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateSnappy) GetCodec() *DestinationS3UpdateSchemasFormatOutputFormat1CompressionCodecCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+type DestinationS3UpdateSchemasFormatOutputFormat1Codec string
const (
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodecZstandard DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec = "zstandard"
+ DestinationS3UpdateSchemasFormatOutputFormat1CodecZstandard DestinationS3UpdateSchemasFormatOutputFormat1Codec = "zstandard"
)
-func (e DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec) ToPointer() *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec {
+func (e DestinationS3UpdateSchemasFormatOutputFormat1Codec) ToPointer() *DestinationS3UpdateSchemasFormatOutputFormat1Codec {
return &e
}
-func (e *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFormatOutputFormat1Codec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zstandard":
- *e = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec(v)
+ *e = DestinationS3UpdateSchemasFormatOutputFormat1Codec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFormatOutputFormat1Codec: %v", v)
}
}
-// DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard struct {
- Codec DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandardCodec `json:"codec"`
+// DestinationS3UpdateZstandard - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3UpdateZstandard struct {
+ Codec *DestinationS3UpdateSchemasFormatOutputFormat1Codec `default:"zstandard" json:"codec"`
// Negative levels are 'fast' modes akin to lz4 or snappy, levels above 9 are generally for archival purposes, and levels above 18 use a lot of memory.
- CompressionLevel int64 `json:"compression_level"`
+ CompressionLevel *int64 `default:"3" json:"compression_level"`
// If true, include a checksum with each data block.
- IncludeChecksum *bool `json:"include_checksum,omitempty"`
+ IncludeChecksum *bool `default:"false" json:"include_checksum"`
+}
+
+func (d DestinationS3UpdateZstandard) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateZstandard) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateZstandard) GetCodec() *DestinationS3UpdateSchemasFormatOutputFormat1Codec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *DestinationS3UpdateZstandard) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
}
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec string
+func (o *DestinationS3UpdateZstandard) GetIncludeChecksum() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeChecksum
+}
+
+type DestinationS3UpdateSchemasFormatOutputFormatCodec string
const (
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodecXz DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec = "xz"
+ DestinationS3UpdateSchemasFormatOutputFormatCodecXz DestinationS3UpdateSchemasFormatOutputFormatCodec = "xz"
)
-func (e DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec) ToPointer() *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec {
+func (e DestinationS3UpdateSchemasFormatOutputFormatCodec) ToPointer() *DestinationS3UpdateSchemasFormatOutputFormatCodec {
return &e
}
-func (e *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFormatOutputFormatCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "xz":
- *e = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec(v)
+ *e = DestinationS3UpdateSchemasFormatOutputFormatCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFormatOutputFormatCodec: %v", v)
}
}
-// DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz struct {
- Codec DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXzCodec `json:"codec"`
+// DestinationS3UpdateXz - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3UpdateXz struct {
+ Codec *DestinationS3UpdateSchemasFormatOutputFormatCodec `default:"xz" json:"codec"`
// See here for details.
- CompressionLevel int64 `json:"compression_level"`
+ CompressionLevel *int64 `default:"6" json:"compression_level"`
+}
+
+func (d DestinationS3UpdateXz) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateXz) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateXz) GetCodec() *DestinationS3UpdateSchemasFormatOutputFormatCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *DestinationS3UpdateXz) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
}
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec string
+type DestinationS3UpdateSchemasFormatCodec string
const (
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2CodecBzip2 DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec = "bzip2"
+ DestinationS3UpdateSchemasFormatCodecBzip2 DestinationS3UpdateSchemasFormatCodec = "bzip2"
)
-func (e DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec) ToPointer() *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec {
+func (e DestinationS3UpdateSchemasFormatCodec) ToPointer() *DestinationS3UpdateSchemasFormatCodec {
return &e
}
-func (e *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasFormatCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "bzip2":
- *e = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec(v)
+ *e = DestinationS3UpdateSchemasFormatCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasFormatCodec: %v", v)
}
}
-// DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 struct {
- Codec DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2Codec `json:"codec"`
+// DestinationS3UpdateBzip2 - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3UpdateBzip2 struct {
+ Codec *DestinationS3UpdateSchemasFormatCodec `default:"bzip2" json:"codec"`
}
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec string
+func (d DestinationS3UpdateBzip2) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateBzip2) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateBzip2) GetCodec() *DestinationS3UpdateSchemasFormatCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+type DestinationS3UpdateSchemasCodec string
const (
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodecDeflate DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec = "Deflate"
+ DestinationS3UpdateSchemasCodecDeflate DestinationS3UpdateSchemasCodec = "Deflate"
)
-func (e DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec) ToPointer() *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec {
+func (e DestinationS3UpdateSchemasCodec) ToPointer() *DestinationS3UpdateSchemasCodec {
return &e
}
-func (e *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateSchemasCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Deflate":
- *e = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec(v)
+ *e = DestinationS3UpdateSchemasCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateSchemasCodec: %v", v)
}
}
-// DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate struct {
- Codec DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflateCodec `json:"codec"`
+// DestinationS3UpdateDeflate - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3UpdateDeflate struct {
+ Codec *DestinationS3UpdateSchemasCodec `default:"Deflate" json:"codec"`
// 0: no compression & fastest, 9: best compression & slowest.
- CompressionLevel int64 `json:"compression_level"`
+ CompressionLevel *int64 `default:"0" json:"compression_level"`
+}
+
+func (d DestinationS3UpdateDeflate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec string
+func (d *DestinationS3UpdateDeflate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateDeflate) GetCodec() *DestinationS3UpdateSchemasCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
+}
+
+func (o *DestinationS3UpdateDeflate) GetCompressionLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CompressionLevel
+}
+
+type DestinationS3UpdateCodec string
const (
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodecNoCompression DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec = "no compression"
+ DestinationS3UpdateCodecNoCompression DestinationS3UpdateCodec = "no compression"
)
-func (e DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec) ToPointer() *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec {
+func (e DestinationS3UpdateCodec) ToPointer() *DestinationS3UpdateCodec {
return &e
}
-func (e *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateCodec) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "no compression":
- *e = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec(v)
+ *e = DestinationS3UpdateCodec(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateCodec: %v", v)
+ }
+}
+
+// DestinationS3UpdateNoCompression - The compression algorithm used to compress data. Default to no compression.
+type DestinationS3UpdateNoCompression struct {
+ Codec *DestinationS3UpdateCodec `default:"no compression" json:"codec"`
+}
+
+func (d DestinationS3UpdateNoCompression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateNoCompression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression - The compression algorithm used to compress data. Default to no compression.
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression struct {
- Codec DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompressionCodec `json:"codec"`
+func (o *DestinationS3UpdateNoCompression) GetCodec() *DestinationS3UpdateCodec {
+ if o == nil {
+ return nil
+ }
+ return o.Codec
}
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecType string
+type DestinationS3UpdateCompressionCodecType string
const (
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-s3-update_Output Format_Avro: Apache Avro_Compression Codec_No Compression"
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-s3-update_Output Format_Avro: Apache Avro_Compression Codec_Deflate"
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-s3-update_Output Format_Avro: Apache Avro_Compression Codec_bzip2"
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-s3-update_Output Format_Avro: Apache Avro_Compression Codec_xz"
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-s3-update_Output Format_Avro: Apache Avro_Compression Codec_zstandard"
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecType = "destination-s3-update_Output Format_Avro: Apache Avro_Compression Codec_snappy"
+ DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateNoCompression DestinationS3UpdateCompressionCodecType = "destination-s3-update_No Compression"
+ DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateDeflate DestinationS3UpdateCompressionCodecType = "destination-s3-update_Deflate"
+ DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateBzip2 DestinationS3UpdateCompressionCodecType = "destination-s3-update_bzip2"
+ DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateXz DestinationS3UpdateCompressionCodecType = "destination-s3-update_xz"
+ DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateZstandard DestinationS3UpdateCompressionCodecType = "destination-s3-update_zstandard"
+ DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateSnappy DestinationS3UpdateCompressionCodecType = "destination-s3-update_snappy"
)
-type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec struct {
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
+type DestinationS3UpdateCompressionCodec struct {
+ DestinationS3UpdateNoCompression *DestinationS3UpdateNoCompression
+ DestinationS3UpdateDeflate *DestinationS3UpdateDeflate
+ DestinationS3UpdateBzip2 *DestinationS3UpdateBzip2
+ DestinationS3UpdateXz *DestinationS3UpdateXz
+ DestinationS3UpdateZstandard *DestinationS3UpdateZstandard
+ DestinationS3UpdateSnappy *DestinationS3UpdateSnappy
- Type DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecType
+ Type DestinationS3UpdateCompressionCodecType
}
-func CreateDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression(destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression) DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
+func CreateDestinationS3UpdateCompressionCodecDestinationS3UpdateNoCompression(destinationS3UpdateNoCompression DestinationS3UpdateNoCompression) DestinationS3UpdateCompressionCodec {
+ typ := DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateNoCompression
- return DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression: &destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression,
- Type: typ,
+ return DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateNoCompression: &destinationS3UpdateNoCompression,
+ Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate(destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
+func CreateDestinationS3UpdateCompressionCodecDestinationS3UpdateDeflate(destinationS3UpdateDeflate DestinationS3UpdateDeflate) DestinationS3UpdateCompressionCodec {
+ typ := DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateDeflate
- return DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate: &destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate,
- Type: typ,
+ return DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateDeflate: &destinationS3UpdateDeflate,
+ Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2(destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2) DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
+func CreateDestinationS3UpdateCompressionCodecDestinationS3UpdateBzip2(destinationS3UpdateBzip2 DestinationS3UpdateBzip2) DestinationS3UpdateCompressionCodec {
+ typ := DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateBzip2
- return DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2: &destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2,
- Type: typ,
+ return DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateBzip2: &destinationS3UpdateBzip2,
+ Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz(destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz) DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz
+func CreateDestinationS3UpdateCompressionCodecDestinationS3UpdateXz(destinationS3UpdateXz DestinationS3UpdateXz) DestinationS3UpdateCompressionCodec {
+ typ := DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateXz
- return DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz: &destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz,
- Type: typ,
+ return DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateXz: &destinationS3UpdateXz,
+ Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard(destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard) DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
+func CreateDestinationS3UpdateCompressionCodecDestinationS3UpdateZstandard(destinationS3UpdateZstandard DestinationS3UpdateZstandard) DestinationS3UpdateCompressionCodec {
+ typ := DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateZstandard
- return DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard: &destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard,
- Type: typ,
+ return DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateZstandard: &destinationS3UpdateZstandard,
+ Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy(destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec {
- typ := DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
+func CreateDestinationS3UpdateCompressionCodecDestinationS3UpdateSnappy(destinationS3UpdateSnappy DestinationS3UpdateSnappy) DestinationS3UpdateCompressionCodec {
+ typ := DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateSnappy
- return DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec{
- DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy: &destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy,
- Type: typ,
+ return DestinationS3UpdateCompressionCodec{
+ DestinationS3UpdateSnappy: &destinationS3UpdateSnappy,
+ Type: typ,
}
}
-func (u *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DestinationS3UpdateCompressionCodec) UnmarshalJSON(data []byte) error {
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression); err == nil {
- u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
- u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression
+ destinationS3UpdateNoCompression := new(DestinationS3UpdateNoCompression)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateNoCompression, "", true, true); err == nil {
+ u.DestinationS3UpdateNoCompression = destinationS3UpdateNoCompression
+ u.Type = DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateNoCompression
return nil
}
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil {
- u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
- u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2
+ destinationS3UpdateBzip2 := new(DestinationS3UpdateBzip2)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateBzip2, "", true, true); err == nil {
+ u.DestinationS3UpdateBzip2 = destinationS3UpdateBzip2
+ u.Type = DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateBzip2
return nil
}
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil {
- u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
- u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy
+ destinationS3UpdateSnappy := new(DestinationS3UpdateSnappy)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateSnappy, "", true, true); err == nil {
+ u.DestinationS3UpdateSnappy = destinationS3UpdateSnappy
+ u.Type = DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateSnappy
return nil
}
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil {
- u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
- u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate
+ destinationS3UpdateDeflate := new(DestinationS3UpdateDeflate)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateDeflate, "", true, true); err == nil {
+ u.DestinationS3UpdateDeflate = destinationS3UpdateDeflate
+ u.Type = DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateDeflate
return nil
}
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz); err == nil {
- u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz
- u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz
+ destinationS3UpdateXz := new(DestinationS3UpdateXz)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateXz, "", true, true); err == nil {
+ u.DestinationS3UpdateXz = destinationS3UpdateXz
+ u.Type = DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateXz
return nil
}
- destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard); err == nil {
- u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
- u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard
+ destinationS3UpdateZstandard := new(DestinationS3UpdateZstandard)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateZstandard, "", true, true); err == nil {
+ u.DestinationS3UpdateZstandard = destinationS3UpdateZstandard
+ u.Type = DestinationS3UpdateCompressionCodecTypeDestinationS3UpdateZstandard
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec) MarshalJSON() ([]byte, error) {
- if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression)
+func (u DestinationS3UpdateCompressionCodec) MarshalJSON() ([]byte, error) {
+ if u.DestinationS3UpdateNoCompression != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateNoCompression, "", true)
}
- if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2)
+ if u.DestinationS3UpdateDeflate != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateDeflate, "", true)
}
- if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy)
+ if u.DestinationS3UpdateBzip2 != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateBzip2, "", true)
}
- if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate)
+ if u.DestinationS3UpdateXz != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateXz, "", true)
}
- if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz)
+ if u.DestinationS3UpdateZstandard != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateZstandard, "", true)
}
- if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard)
+ if u.DestinationS3UpdateSnappy != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateSnappy, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationS3UpdateOutputFormatAvroApacheAvroFormatType string
+type DestinationS3UpdateFormatType string
const (
- DestinationS3UpdateOutputFormatAvroApacheAvroFormatTypeAvro DestinationS3UpdateOutputFormatAvroApacheAvroFormatType = "Avro"
+ DestinationS3UpdateFormatTypeAvro DestinationS3UpdateFormatType = "Avro"
)
-func (e DestinationS3UpdateOutputFormatAvroApacheAvroFormatType) ToPointer() *DestinationS3UpdateOutputFormatAvroApacheAvroFormatType {
+func (e DestinationS3UpdateFormatType) ToPointer() *DestinationS3UpdateFormatType {
return &e
}
-func (e *DestinationS3UpdateOutputFormatAvroApacheAvroFormatType) UnmarshalJSON(data []byte) error {
+func (e *DestinationS3UpdateFormatType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Avro":
- *e = DestinationS3UpdateOutputFormatAvroApacheAvroFormatType(v)
+ *e = DestinationS3UpdateFormatType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationS3UpdateOutputFormatAvroApacheAvroFormatType: %v", v)
+ return fmt.Errorf("invalid value for DestinationS3UpdateFormatType: %v", v)
}
}
-// DestinationS3UpdateOutputFormatAvroApacheAvro - Format of the data output. See here for more details
-type DestinationS3UpdateOutputFormatAvroApacheAvro struct {
+// DestinationS3UpdateAvroApacheAvro - Format of the data output. See here for more details
+type DestinationS3UpdateAvroApacheAvro struct {
// The compression algorithm used to compress data. Default to no compression.
- CompressionCodec DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec `json:"compression_codec"`
- FormatType DestinationS3UpdateOutputFormatAvroApacheAvroFormatType `json:"format_type"`
+ CompressionCodec DestinationS3UpdateCompressionCodec `json:"compression_codec"`
+ FormatType *DestinationS3UpdateFormatType `default:"Avro" json:"format_type"`
+}
+
+func (d DestinationS3UpdateAvroApacheAvro) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3UpdateAvroApacheAvro) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3UpdateAvroApacheAvro) GetCompressionCodec() DestinationS3UpdateCompressionCodec {
+ if o == nil {
+ return DestinationS3UpdateCompressionCodec{}
+ }
+ return o.CompressionCodec
+}
+
+func (o *DestinationS3UpdateAvroApacheAvro) GetFormatType() *DestinationS3UpdateFormatType {
+ if o == nil {
+ return nil
+ }
+ return o.FormatType
}
type DestinationS3UpdateOutputFormatType string
const (
- DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatAvroApacheAvro DestinationS3UpdateOutputFormatType = "destination-s3-update_Output Format_Avro: Apache Avro"
- DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatCSVCommaSeparatedValues DestinationS3UpdateOutputFormatType = "destination-s3-update_Output Format_CSV: Comma-Separated Values"
- DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON DestinationS3UpdateOutputFormatType = "destination-s3-update_Output Format_JSON Lines: Newline-delimited JSON"
- DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatParquetColumnarStorage DestinationS3UpdateOutputFormatType = "destination-s3-update_Output Format_Parquet: Columnar Storage"
+ DestinationS3UpdateOutputFormatTypeDestinationS3UpdateAvroApacheAvro DestinationS3UpdateOutputFormatType = "destination-s3-update_Avro: Apache Avro"
+ DestinationS3UpdateOutputFormatTypeDestinationS3UpdateCSVCommaSeparatedValues DestinationS3UpdateOutputFormatType = "destination-s3-update_CSV: Comma-Separated Values"
+ DestinationS3UpdateOutputFormatTypeDestinationS3UpdateJSONLinesNewlineDelimitedJSON DestinationS3UpdateOutputFormatType = "destination-s3-update_JSON Lines: Newline-delimited JSON"
+ DestinationS3UpdateOutputFormatTypeDestinationS3UpdateParquetColumnarStorage DestinationS3UpdateOutputFormatType = "destination-s3-update_Parquet: Columnar Storage"
)
type DestinationS3UpdateOutputFormat struct {
- DestinationS3UpdateOutputFormatAvroApacheAvro *DestinationS3UpdateOutputFormatAvroApacheAvro
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValues *DestinationS3UpdateOutputFormatCSVCommaSeparatedValues
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON *DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON
- DestinationS3UpdateOutputFormatParquetColumnarStorage *DestinationS3UpdateOutputFormatParquetColumnarStorage
+ DestinationS3UpdateAvroApacheAvro *DestinationS3UpdateAvroApacheAvro
+ DestinationS3UpdateCSVCommaSeparatedValues *DestinationS3UpdateCSVCommaSeparatedValues
+ DestinationS3UpdateJSONLinesNewlineDelimitedJSON *DestinationS3UpdateJSONLinesNewlineDelimitedJSON
+ DestinationS3UpdateParquetColumnarStorage *DestinationS3UpdateParquetColumnarStorage
Type DestinationS3UpdateOutputFormatType
}
-func CreateDestinationS3UpdateOutputFormatDestinationS3UpdateOutputFormatAvroApacheAvro(destinationS3UpdateOutputFormatAvroApacheAvro DestinationS3UpdateOutputFormatAvroApacheAvro) DestinationS3UpdateOutputFormat {
- typ := DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatAvroApacheAvro
+func CreateDestinationS3UpdateOutputFormatDestinationS3UpdateAvroApacheAvro(destinationS3UpdateAvroApacheAvro DestinationS3UpdateAvroApacheAvro) DestinationS3UpdateOutputFormat {
+ typ := DestinationS3UpdateOutputFormatTypeDestinationS3UpdateAvroApacheAvro
return DestinationS3UpdateOutputFormat{
- DestinationS3UpdateOutputFormatAvroApacheAvro: &destinationS3UpdateOutputFormatAvroApacheAvro,
- Type: typ,
+ DestinationS3UpdateAvroApacheAvro: &destinationS3UpdateAvroApacheAvro,
+ Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatDestinationS3UpdateOutputFormatCSVCommaSeparatedValues(destinationS3UpdateOutputFormatCSVCommaSeparatedValues DestinationS3UpdateOutputFormatCSVCommaSeparatedValues) DestinationS3UpdateOutputFormat {
- typ := DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatCSVCommaSeparatedValues
+func CreateDestinationS3UpdateOutputFormatDestinationS3UpdateCSVCommaSeparatedValues(destinationS3UpdateCSVCommaSeparatedValues DestinationS3UpdateCSVCommaSeparatedValues) DestinationS3UpdateOutputFormat {
+ typ := DestinationS3UpdateOutputFormatTypeDestinationS3UpdateCSVCommaSeparatedValues
return DestinationS3UpdateOutputFormat{
- DestinationS3UpdateOutputFormatCSVCommaSeparatedValues: &destinationS3UpdateOutputFormatCSVCommaSeparatedValues,
+ DestinationS3UpdateCSVCommaSeparatedValues: &destinationS3UpdateCSVCommaSeparatedValues,
Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON(destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON) DestinationS3UpdateOutputFormat {
- typ := DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON
+func CreateDestinationS3UpdateOutputFormatDestinationS3UpdateJSONLinesNewlineDelimitedJSON(destinationS3UpdateJSONLinesNewlineDelimitedJSON DestinationS3UpdateJSONLinesNewlineDelimitedJSON) DestinationS3UpdateOutputFormat {
+ typ := DestinationS3UpdateOutputFormatTypeDestinationS3UpdateJSONLinesNewlineDelimitedJSON
return DestinationS3UpdateOutputFormat{
- DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON: &destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON,
+ DestinationS3UpdateJSONLinesNewlineDelimitedJSON: &destinationS3UpdateJSONLinesNewlineDelimitedJSON,
Type: typ,
}
}
-func CreateDestinationS3UpdateOutputFormatDestinationS3UpdateOutputFormatParquetColumnarStorage(destinationS3UpdateOutputFormatParquetColumnarStorage DestinationS3UpdateOutputFormatParquetColumnarStorage) DestinationS3UpdateOutputFormat {
- typ := DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatParquetColumnarStorage
+func CreateDestinationS3UpdateOutputFormatDestinationS3UpdateParquetColumnarStorage(destinationS3UpdateParquetColumnarStorage DestinationS3UpdateParquetColumnarStorage) DestinationS3UpdateOutputFormat {
+ typ := DestinationS3UpdateOutputFormatTypeDestinationS3UpdateParquetColumnarStorage
return DestinationS3UpdateOutputFormat{
- DestinationS3UpdateOutputFormatParquetColumnarStorage: &destinationS3UpdateOutputFormatParquetColumnarStorage,
+ DestinationS3UpdateParquetColumnarStorage: &destinationS3UpdateParquetColumnarStorage,
Type: typ,
}
}
func (u *DestinationS3UpdateOutputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- destinationS3UpdateOutputFormatAvroApacheAvro := new(DestinationS3UpdateOutputFormatAvroApacheAvro)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvro); err == nil {
- u.DestinationS3UpdateOutputFormatAvroApacheAvro = destinationS3UpdateOutputFormatAvroApacheAvro
- u.Type = DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatAvroApacheAvro
+ destinationS3UpdateAvroApacheAvro := new(DestinationS3UpdateAvroApacheAvro)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateAvroApacheAvro, "", true, true); err == nil {
+ u.DestinationS3UpdateAvroApacheAvro = destinationS3UpdateAvroApacheAvro
+ u.Type = DestinationS3UpdateOutputFormatTypeDestinationS3UpdateAvroApacheAvro
return nil
}
- destinationS3UpdateOutputFormatCSVCommaSeparatedValues := new(DestinationS3UpdateOutputFormatCSVCommaSeparatedValues)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatCSVCommaSeparatedValues); err == nil {
- u.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues = destinationS3UpdateOutputFormatCSVCommaSeparatedValues
- u.Type = DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatCSVCommaSeparatedValues
+ destinationS3UpdateCSVCommaSeparatedValues := new(DestinationS3UpdateCSVCommaSeparatedValues)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateCSVCommaSeparatedValues, "", true, true); err == nil {
+ u.DestinationS3UpdateCSVCommaSeparatedValues = destinationS3UpdateCSVCommaSeparatedValues
+ u.Type = DestinationS3UpdateOutputFormatTypeDestinationS3UpdateCSVCommaSeparatedValues
return nil
}
- destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON = destinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON
- u.Type = DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON
+ destinationS3UpdateJSONLinesNewlineDelimitedJSON := new(DestinationS3UpdateJSONLinesNewlineDelimitedJSON)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateJSONLinesNewlineDelimitedJSON, "", true, true); err == nil {
+ u.DestinationS3UpdateJSONLinesNewlineDelimitedJSON = destinationS3UpdateJSONLinesNewlineDelimitedJSON
+ u.Type = DestinationS3UpdateOutputFormatTypeDestinationS3UpdateJSONLinesNewlineDelimitedJSON
return nil
}
- destinationS3UpdateOutputFormatParquetColumnarStorage := new(DestinationS3UpdateOutputFormatParquetColumnarStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationS3UpdateOutputFormatParquetColumnarStorage); err == nil {
- u.DestinationS3UpdateOutputFormatParquetColumnarStorage = destinationS3UpdateOutputFormatParquetColumnarStorage
- u.Type = DestinationS3UpdateOutputFormatTypeDestinationS3UpdateOutputFormatParquetColumnarStorage
+ destinationS3UpdateParquetColumnarStorage := new(DestinationS3UpdateParquetColumnarStorage)
+ if err := utils.UnmarshalJSON(data, &destinationS3UpdateParquetColumnarStorage, "", true, true); err == nil {
+ u.DestinationS3UpdateParquetColumnarStorage = destinationS3UpdateParquetColumnarStorage
+ u.Type = DestinationS3UpdateOutputFormatTypeDestinationS3UpdateParquetColumnarStorage
return nil
}
@@ -941,23 +1266,23 @@ func (u *DestinationS3UpdateOutputFormat) UnmarshalJSON(data []byte) error {
}
func (u DestinationS3UpdateOutputFormat) MarshalJSON() ([]byte, error) {
- if u.DestinationS3UpdateOutputFormatAvroApacheAvro != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvro)
+ if u.DestinationS3UpdateAvroApacheAvro != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateAvroApacheAvro, "", true)
}
- if u.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatCSVCommaSeparatedValues)
+ if u.DestinationS3UpdateCSVCommaSeparatedValues != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateCSVCommaSeparatedValues, "", true)
}
- if u.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatJSONLinesNewlineDelimitedJSON)
+ if u.DestinationS3UpdateJSONLinesNewlineDelimitedJSON != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateJSONLinesNewlineDelimitedJSON, "", true)
}
- if u.DestinationS3UpdateOutputFormatParquetColumnarStorage != nil {
- return json.Marshal(u.DestinationS3UpdateOutputFormatParquetColumnarStorage)
+ if u.DestinationS3UpdateParquetColumnarStorage != nil {
+ return utils.MarshalJSON(u.DestinationS3UpdateParquetColumnarStorage, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// DestinationS3UpdateS3BucketRegion - The region of the S3 bucket. See here for all region codes.
@@ -1072,11 +1397,85 @@ type DestinationS3Update struct {
// Directory under the S3 bucket where data will be written. Read more here
S3BucketPath string `json:"s3_bucket_path"`
// The region of the S3 bucket. See here for all region codes.
- S3BucketRegion DestinationS3UpdateS3BucketRegion `json:"s3_bucket_region"`
+ S3BucketRegion *DestinationS3UpdateS3BucketRegion `default:"" json:"s3_bucket_region"`
// Your S3 endpoint url. Read more here
- S3Endpoint *string `json:"s3_endpoint,omitempty"`
+ S3Endpoint *string `default:"" json:"s3_endpoint"`
// Format string on how data will be organized inside the S3 bucket directory. Read more here
S3PathFormat *string `json:"s3_path_format,omitempty"`
// The corresponding secret to the access key ID. Read more here
SecretAccessKey *string `json:"secret_access_key,omitempty"`
}
+
+func (d DestinationS3Update) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationS3Update) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationS3Update) GetAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessKeyID
+}
+
+func (o *DestinationS3Update) GetFileNamePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileNamePattern
+}
+
+func (o *DestinationS3Update) GetFormat() DestinationS3UpdateOutputFormat {
+ if o == nil {
+ return DestinationS3UpdateOutputFormat{}
+ }
+ return o.Format
+}
+
+func (o *DestinationS3Update) GetS3BucketName() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketName
+}
+
+func (o *DestinationS3Update) GetS3BucketPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.S3BucketPath
+}
+
+func (o *DestinationS3Update) GetS3BucketRegion() *DestinationS3UpdateS3BucketRegion {
+ if o == nil {
+ return nil
+ }
+ return o.S3BucketRegion
+}
+
+func (o *DestinationS3Update) GetS3Endpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.S3Endpoint
+}
+
+func (o *DestinationS3Update) GetS3PathFormat() *string {
+ if o == nil {
+ return nil
+ }
+ return o.S3PathFormat
+}
+
+func (o *DestinationS3Update) GetSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretAccessKey
+}
diff --git a/internal/sdk/pkg/models/shared/destinationsftpjson.go b/internal/sdk/pkg/models/shared/destinationsftpjson.go
old mode 100755
new mode 100644
index 37359b863..04b4d4cdf
--- a/internal/sdk/pkg/models/shared/destinationsftpjson.go
+++ b/internal/sdk/pkg/models/shared/destinationsftpjson.go
@@ -5,34 +5,35 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationSftpJSONSftpJSON string
+type SftpJSON string
const (
- DestinationSftpJSONSftpJSONSftpJSON DestinationSftpJSONSftpJSON = "sftp-json"
+ SftpJSONSftpJSON SftpJSON = "sftp-json"
)
-func (e DestinationSftpJSONSftpJSON) ToPointer() *DestinationSftpJSONSftpJSON {
+func (e SftpJSON) ToPointer() *SftpJSON {
return &e
}
-func (e *DestinationSftpJSONSftpJSON) UnmarshalJSON(data []byte) error {
+func (e *SftpJSON) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sftp-json":
- *e = DestinationSftpJSONSftpJSON(v)
+ *e = SftpJSON(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationSftpJSONSftpJSON: %v", v)
+ return fmt.Errorf("invalid value for SftpJSON: %v", v)
}
}
type DestinationSftpJSON struct {
- DestinationType DestinationSftpJSONSftpJSON `json:"destinationType"`
+ destinationType SftpJSON `const:"sftp-json" json:"destinationType"`
// Path to the directory where json files will be written.
DestinationPath string `json:"destination_path"`
// Hostname of the SFTP server.
@@ -40,7 +41,57 @@ type DestinationSftpJSON struct {
// Password associated with the username.
Password string `json:"password"`
// Port of the SFTP server.
- Port *int64 `json:"port,omitempty"`
+ Port *int64 `default:"22" json:"port"`
// Username to use to access the SFTP server.
Username string `json:"username"`
}
+
+func (d DestinationSftpJSON) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationSftpJSON) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationSftpJSON) GetDestinationType() SftpJSON {
+ return SftpJSONSftpJSON
+}
+
+func (o *DestinationSftpJSON) GetDestinationPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationPath
+}
+
+func (o *DestinationSftpJSON) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationSftpJSON) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationSftpJSON) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationSftpJSON) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationsftpjsoncreaterequest.go b/internal/sdk/pkg/models/shared/destinationsftpjsoncreaterequest.go
old mode 100755
new mode 100644
index eb556c517..4093df0e0
--- a/internal/sdk/pkg/models/shared/destinationsftpjsoncreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationsftpjsoncreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationSftpJSONCreateRequest struct {
Configuration DestinationSftpJSON `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationSftpJSONCreateRequest) GetConfiguration() DestinationSftpJSON {
+ if o == nil {
+ return DestinationSftpJSON{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationSftpJSONCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationSftpJSONCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationSftpJSONCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationsftpjsonputrequest.go b/internal/sdk/pkg/models/shared/destinationsftpjsonputrequest.go
old mode 100755
new mode 100644
index 1a7af86cd..c14396fec
--- a/internal/sdk/pkg/models/shared/destinationsftpjsonputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationsftpjsonputrequest.go
@@ -7,3 +7,24 @@ type DestinationSftpJSONPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationSftpJSONPutRequest) GetConfiguration() DestinationSftpJSONUpdate {
+ if o == nil {
+ return DestinationSftpJSONUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationSftpJSONPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationSftpJSONPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationsftpjsonupdate.go b/internal/sdk/pkg/models/shared/destinationsftpjsonupdate.go
old mode 100755
new mode 100644
index fd1e2635e..f310f3b3c
--- a/internal/sdk/pkg/models/shared/destinationsftpjsonupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationsftpjsonupdate.go
@@ -2,6 +2,10 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type DestinationSftpJSONUpdate struct {
// Path to the directory where json files will be written.
DestinationPath string `json:"destination_path"`
@@ -10,7 +14,53 @@ type DestinationSftpJSONUpdate struct {
// Password associated with the username.
Password string `json:"password"`
// Port of the SFTP server.
- Port *int64 `json:"port,omitempty"`
+ Port *int64 `default:"22" json:"port"`
// Username to use to access the SFTP server.
Username string `json:"username"`
}
+
+func (d DestinationSftpJSONUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationSftpJSONUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationSftpJSONUpdate) GetDestinationPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.DestinationPath
+}
+
+func (o *DestinationSftpJSONUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationSftpJSONUpdate) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationSftpJSONUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationSftpJSONUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationsnowflake.go b/internal/sdk/pkg/models/shared/destinationsnowflake.go
old mode 100755
new mode 100644
index 8af8ad074..17198f8c5
--- a/internal/sdk/pkg/models/shared/destinationsnowflake.go
+++ b/internal/sdk/pkg/models/shared/destinationsnowflake.go
@@ -3,102 +3,153 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType string
+type DestinationSnowflakeSchemasCredentialsAuthType string
const (
- DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthTypeUsernameAndPassword DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType = "Username and Password"
+ DestinationSnowflakeSchemasCredentialsAuthTypeUsernameAndPassword DestinationSnowflakeSchemasCredentialsAuthType = "Username and Password"
)
-func (e DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType) ToPointer() *DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType {
+func (e DestinationSnowflakeSchemasCredentialsAuthType) ToPointer() *DestinationSnowflakeSchemasCredentialsAuthType {
return &e
}
-func (e *DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType) UnmarshalJSON(data []byte) error {
+func (e *DestinationSnowflakeSchemasCredentialsAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Username and Password":
- *e = DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType(v)
+ *e = DestinationSnowflakeSchemasCredentialsAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType: %v", v)
+ return fmt.Errorf("invalid value for DestinationSnowflakeSchemasCredentialsAuthType: %v", v)
}
}
-type DestinationSnowflakeAuthorizationMethodUsernameAndPassword struct {
- AuthType *DestinationSnowflakeAuthorizationMethodUsernameAndPasswordAuthType `json:"auth_type,omitempty"`
+type DestinationSnowflakeUsernameAndPassword struct {
+ authType *DestinationSnowflakeSchemasCredentialsAuthType `const:"Username and Password" json:"auth_type"`
// Enter the password associated with the username.
Password string `json:"password"`
}
-type DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType string
+func (d DestinationSnowflakeUsernameAndPassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationSnowflakeUsernameAndPassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationSnowflakeUsernameAndPassword) GetAuthType() *DestinationSnowflakeSchemasCredentialsAuthType {
+ return DestinationSnowflakeSchemasCredentialsAuthTypeUsernameAndPassword.ToPointer()
+}
+
+func (o *DestinationSnowflakeUsernameAndPassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+type DestinationSnowflakeSchemasAuthType string
const (
- DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthTypeKeyPairAuthentication DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType = "Key Pair Authentication"
+ DestinationSnowflakeSchemasAuthTypeKeyPairAuthentication DestinationSnowflakeSchemasAuthType = "Key Pair Authentication"
)
-func (e DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType) ToPointer() *DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType {
+func (e DestinationSnowflakeSchemasAuthType) ToPointer() *DestinationSnowflakeSchemasAuthType {
return &e
}
-func (e *DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *DestinationSnowflakeSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Key Pair Authentication":
- *e = DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType(v)
+ *e = DestinationSnowflakeSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for DestinationSnowflakeSchemasAuthType: %v", v)
}
}
-type DestinationSnowflakeAuthorizationMethodKeyPairAuthentication struct {
- AuthType *DestinationSnowflakeAuthorizationMethodKeyPairAuthenticationAuthType `json:"auth_type,omitempty"`
+type DestinationSnowflakeKeyPairAuthentication struct {
+ authType *DestinationSnowflakeSchemasAuthType `const:"Key Pair Authentication" json:"auth_type"`
// RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.
PrivateKey string `json:"private_key"`
// Passphrase for private key
PrivateKeyPassword *string `json:"private_key_password,omitempty"`
}
-type DestinationSnowflakeAuthorizationMethodOAuth20AuthType string
+func (d DestinationSnowflakeKeyPairAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationSnowflakeKeyPairAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationSnowflakeKeyPairAuthentication) GetAuthType() *DestinationSnowflakeSchemasAuthType {
+ return DestinationSnowflakeSchemasAuthTypeKeyPairAuthentication.ToPointer()
+}
+
+func (o *DestinationSnowflakeKeyPairAuthentication) GetPrivateKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PrivateKey
+}
+
+func (o *DestinationSnowflakeKeyPairAuthentication) GetPrivateKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrivateKeyPassword
+}
+
+type DestinationSnowflakeAuthType string
const (
- DestinationSnowflakeAuthorizationMethodOAuth20AuthTypeOAuth20 DestinationSnowflakeAuthorizationMethodOAuth20AuthType = "OAuth2.0"
+ DestinationSnowflakeAuthTypeOAuth20 DestinationSnowflakeAuthType = "OAuth2.0"
)
-func (e DestinationSnowflakeAuthorizationMethodOAuth20AuthType) ToPointer() *DestinationSnowflakeAuthorizationMethodOAuth20AuthType {
+func (e DestinationSnowflakeAuthType) ToPointer() *DestinationSnowflakeAuthType {
return &e
}
-func (e *DestinationSnowflakeAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *DestinationSnowflakeAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth2.0":
- *e = DestinationSnowflakeAuthorizationMethodOAuth20AuthType(v)
+ *e = DestinationSnowflakeAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationSnowflakeAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for DestinationSnowflakeAuthType: %v", v)
}
}
-type DestinationSnowflakeAuthorizationMethodOAuth20 struct {
+type DestinationSnowflakeOAuth20 struct {
// Enter you application's Access Token
- AccessToken string `json:"access_token"`
- AuthType *DestinationSnowflakeAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *DestinationSnowflakeAuthType `const:"OAuth2.0" json:"auth_type"`
// Enter your application's Client ID
ClientID *string `json:"client_id,omitempty"`
// Enter your application's Client secret
@@ -107,76 +158,112 @@ type DestinationSnowflakeAuthorizationMethodOAuth20 struct {
RefreshToken string `json:"refresh_token"`
}
+func (d DestinationSnowflakeOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationSnowflakeOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationSnowflakeOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *DestinationSnowflakeOAuth20) GetAuthType() *DestinationSnowflakeAuthType {
+ return DestinationSnowflakeAuthTypeOAuth20.ToPointer()
+}
+
+func (o *DestinationSnowflakeOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *DestinationSnowflakeOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *DestinationSnowflakeOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type DestinationSnowflakeAuthorizationMethodType string
const (
- DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodOAuth20 DestinationSnowflakeAuthorizationMethodType = "destination-snowflake_Authorization Method_OAuth2.0"
- DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodKeyPairAuthentication DestinationSnowflakeAuthorizationMethodType = "destination-snowflake_Authorization Method_Key Pair Authentication"
- DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodUsernameAndPassword DestinationSnowflakeAuthorizationMethodType = "destination-snowflake_Authorization Method_Username and Password"
+ DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeOAuth20 DestinationSnowflakeAuthorizationMethodType = "destination-snowflake_OAuth2.0"
+ DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeKeyPairAuthentication DestinationSnowflakeAuthorizationMethodType = "destination-snowflake_Key Pair Authentication"
+ DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeUsernameAndPassword DestinationSnowflakeAuthorizationMethodType = "destination-snowflake_Username and Password"
)
type DestinationSnowflakeAuthorizationMethod struct {
- DestinationSnowflakeAuthorizationMethodOAuth20 *DestinationSnowflakeAuthorizationMethodOAuth20
- DestinationSnowflakeAuthorizationMethodKeyPairAuthentication *DestinationSnowflakeAuthorizationMethodKeyPairAuthentication
- DestinationSnowflakeAuthorizationMethodUsernameAndPassword *DestinationSnowflakeAuthorizationMethodUsernameAndPassword
+ DestinationSnowflakeOAuth20 *DestinationSnowflakeOAuth20
+ DestinationSnowflakeKeyPairAuthentication *DestinationSnowflakeKeyPairAuthentication
+ DestinationSnowflakeUsernameAndPassword *DestinationSnowflakeUsernameAndPassword
Type DestinationSnowflakeAuthorizationMethodType
}
-func CreateDestinationSnowflakeAuthorizationMethodDestinationSnowflakeAuthorizationMethodOAuth20(destinationSnowflakeAuthorizationMethodOAuth20 DestinationSnowflakeAuthorizationMethodOAuth20) DestinationSnowflakeAuthorizationMethod {
- typ := DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodOAuth20
+func CreateDestinationSnowflakeAuthorizationMethodDestinationSnowflakeOAuth20(destinationSnowflakeOAuth20 DestinationSnowflakeOAuth20) DestinationSnowflakeAuthorizationMethod {
+ typ := DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeOAuth20
return DestinationSnowflakeAuthorizationMethod{
- DestinationSnowflakeAuthorizationMethodOAuth20: &destinationSnowflakeAuthorizationMethodOAuth20,
- Type: typ,
+ DestinationSnowflakeOAuth20: &destinationSnowflakeOAuth20,
+ Type: typ,
}
}
-func CreateDestinationSnowflakeAuthorizationMethodDestinationSnowflakeAuthorizationMethodKeyPairAuthentication(destinationSnowflakeAuthorizationMethodKeyPairAuthentication DestinationSnowflakeAuthorizationMethodKeyPairAuthentication) DestinationSnowflakeAuthorizationMethod {
- typ := DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodKeyPairAuthentication
+func CreateDestinationSnowflakeAuthorizationMethodDestinationSnowflakeKeyPairAuthentication(destinationSnowflakeKeyPairAuthentication DestinationSnowflakeKeyPairAuthentication) DestinationSnowflakeAuthorizationMethod {
+ typ := DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeKeyPairAuthentication
return DestinationSnowflakeAuthorizationMethod{
- DestinationSnowflakeAuthorizationMethodKeyPairAuthentication: &destinationSnowflakeAuthorizationMethodKeyPairAuthentication,
+ DestinationSnowflakeKeyPairAuthentication: &destinationSnowflakeKeyPairAuthentication,
Type: typ,
}
}
-func CreateDestinationSnowflakeAuthorizationMethodDestinationSnowflakeAuthorizationMethodUsernameAndPassword(destinationSnowflakeAuthorizationMethodUsernameAndPassword DestinationSnowflakeAuthorizationMethodUsernameAndPassword) DestinationSnowflakeAuthorizationMethod {
- typ := DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodUsernameAndPassword
+func CreateDestinationSnowflakeAuthorizationMethodDestinationSnowflakeUsernameAndPassword(destinationSnowflakeUsernameAndPassword DestinationSnowflakeUsernameAndPassword) DestinationSnowflakeAuthorizationMethod {
+ typ := DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeUsernameAndPassword
return DestinationSnowflakeAuthorizationMethod{
- DestinationSnowflakeAuthorizationMethodUsernameAndPassword: &destinationSnowflakeAuthorizationMethodUsernameAndPassword,
- Type: typ,
+ DestinationSnowflakeUsernameAndPassword: &destinationSnowflakeUsernameAndPassword,
+ Type: typ,
}
}
func (u *DestinationSnowflakeAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationSnowflakeAuthorizationMethodUsernameAndPassword := new(DestinationSnowflakeAuthorizationMethodUsernameAndPassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationSnowflakeAuthorizationMethodUsernameAndPassword); err == nil {
- u.DestinationSnowflakeAuthorizationMethodUsernameAndPassword = destinationSnowflakeAuthorizationMethodUsernameAndPassword
- u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodUsernameAndPassword
+
+ destinationSnowflakeUsernameAndPassword := new(DestinationSnowflakeUsernameAndPassword)
+ if err := utils.UnmarshalJSON(data, &destinationSnowflakeUsernameAndPassword, "", true, true); err == nil {
+ u.DestinationSnowflakeUsernameAndPassword = destinationSnowflakeUsernameAndPassword
+ u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeUsernameAndPassword
return nil
}
- destinationSnowflakeAuthorizationMethodKeyPairAuthentication := new(DestinationSnowflakeAuthorizationMethodKeyPairAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationSnowflakeAuthorizationMethodKeyPairAuthentication); err == nil {
- u.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication = destinationSnowflakeAuthorizationMethodKeyPairAuthentication
- u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodKeyPairAuthentication
+ destinationSnowflakeKeyPairAuthentication := new(DestinationSnowflakeKeyPairAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationSnowflakeKeyPairAuthentication, "", true, true); err == nil {
+ u.DestinationSnowflakeKeyPairAuthentication = destinationSnowflakeKeyPairAuthentication
+ u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeKeyPairAuthentication
return nil
}
- destinationSnowflakeAuthorizationMethodOAuth20 := new(DestinationSnowflakeAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationSnowflakeAuthorizationMethodOAuth20); err == nil {
- u.DestinationSnowflakeAuthorizationMethodOAuth20 = destinationSnowflakeAuthorizationMethodOAuth20
- u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodOAuth20
+ destinationSnowflakeOAuth20 := new(DestinationSnowflakeOAuth20)
+ if err := utils.UnmarshalJSON(data, &destinationSnowflakeOAuth20, "", true, true); err == nil {
+ u.DestinationSnowflakeOAuth20 = destinationSnowflakeOAuth20
+ u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeOAuth20
return nil
}
@@ -184,55 +271,57 @@ func (u *DestinationSnowflakeAuthorizationMethod) UnmarshalJSON(data []byte) err
}
func (u DestinationSnowflakeAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationSnowflakeAuthorizationMethodUsernameAndPassword != nil {
- return json.Marshal(u.DestinationSnowflakeAuthorizationMethodUsernameAndPassword)
+ if u.DestinationSnowflakeOAuth20 != nil {
+ return utils.MarshalJSON(u.DestinationSnowflakeOAuth20, "", true)
}
- if u.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication != nil {
- return json.Marshal(u.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication)
+ if u.DestinationSnowflakeKeyPairAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationSnowflakeKeyPairAuthentication, "", true)
}
- if u.DestinationSnowflakeAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.DestinationSnowflakeAuthorizationMethodOAuth20)
+ if u.DestinationSnowflakeUsernameAndPassword != nil {
+ return utils.MarshalJSON(u.DestinationSnowflakeUsernameAndPassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type DestinationSnowflakeSnowflake string
+type Snowflake string
const (
- DestinationSnowflakeSnowflakeSnowflake DestinationSnowflakeSnowflake = "snowflake"
+ SnowflakeSnowflake Snowflake = "snowflake"
)
-func (e DestinationSnowflakeSnowflake) ToPointer() *DestinationSnowflakeSnowflake {
+func (e Snowflake) ToPointer() *Snowflake {
return &e
}
-func (e *DestinationSnowflakeSnowflake) UnmarshalJSON(data []byte) error {
+func (e *Snowflake) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "snowflake":
- *e = DestinationSnowflakeSnowflake(v)
+ *e = Snowflake(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationSnowflakeSnowflake: %v", v)
+ return fmt.Errorf("invalid value for Snowflake: %v", v)
}
}
type DestinationSnowflake struct {
Credentials *DestinationSnowflakeAuthorizationMethod `json:"credentials,omitempty"`
// Enter the name of the database you want to sync data into
- Database string `json:"database"`
- DestinationType DestinationSnowflakeSnowflake `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Snowflake `const:"snowflake" json:"destinationType"`
+ // Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions
+ DisableTypeDedupe *bool `default:"false" json:"disable_type_dedupe"`
// Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)
Host string `json:"host"`
// Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
JdbcURLParams *string `json:"jdbc_url_params,omitempty"`
- // The schema to write raw tables into
+ // The schema to write raw tables into (default: airbyte_internal)
RawDataSchema *string `json:"raw_data_schema,omitempty"`
// Enter the role that you want to use to access Snowflake
Role string `json:"role"`
@@ -243,3 +332,88 @@ type DestinationSnowflake struct {
// Enter the name of the warehouse that you want to sync data into
Warehouse string `json:"warehouse"`
}
+
+func (d DestinationSnowflake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationSnowflake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationSnowflake) GetCredentials() *DestinationSnowflakeAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *DestinationSnowflake) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationSnowflake) GetDestinationType() Snowflake {
+ return SnowflakeSnowflake
+}
+
+func (o *DestinationSnowflake) GetDisableTypeDedupe() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DisableTypeDedupe
+}
+
+func (o *DestinationSnowflake) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationSnowflake) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationSnowflake) GetRawDataSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RawDataSchema
+}
+
+func (o *DestinationSnowflake) GetRole() string {
+ if o == nil {
+ return ""
+ }
+ return o.Role
+}
+
+func (o *DestinationSnowflake) GetSchema() string {
+ if o == nil {
+ return ""
+ }
+ return o.Schema
+}
+
+func (o *DestinationSnowflake) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+func (o *DestinationSnowflake) GetWarehouse() string {
+ if o == nil {
+ return ""
+ }
+ return o.Warehouse
+}
diff --git a/internal/sdk/pkg/models/shared/destinationsnowflakecreaterequest.go b/internal/sdk/pkg/models/shared/destinationsnowflakecreaterequest.go
old mode 100755
new mode 100644
index 2e063ab87..ebf9230d4
--- a/internal/sdk/pkg/models/shared/destinationsnowflakecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationsnowflakecreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationSnowflakeCreateRequest struct {
Configuration DestinationSnowflake `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationSnowflakeCreateRequest) GetConfiguration() DestinationSnowflake {
+ if o == nil {
+ return DestinationSnowflake{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationSnowflakeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationSnowflakeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationSnowflakeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationsnowflakeputrequest.go b/internal/sdk/pkg/models/shared/destinationsnowflakeputrequest.go
old mode 100755
new mode 100644
index 3dd4e15ac..f00c7d343
--- a/internal/sdk/pkg/models/shared/destinationsnowflakeputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationsnowflakeputrequest.go
@@ -7,3 +7,24 @@ type DestinationSnowflakePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationSnowflakePutRequest) GetConfiguration() DestinationSnowflakeUpdate {
+ if o == nil {
+ return DestinationSnowflakeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationSnowflakePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationSnowflakePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationsnowflakeupdate.go b/internal/sdk/pkg/models/shared/destinationsnowflakeupdate.go
old mode 100755
new mode 100644
index d5f7e7d2e..7523eeffd
--- a/internal/sdk/pkg/models/shared/destinationsnowflakeupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationsnowflakeupdate.go
@@ -3,102 +3,153 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType string
+type DestinationSnowflakeUpdateSchemasAuthType string
const (
- DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthTypeUsernameAndPassword DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType = "Username and Password"
+ DestinationSnowflakeUpdateSchemasAuthTypeUsernameAndPassword DestinationSnowflakeUpdateSchemasAuthType = "Username and Password"
)
-func (e DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType) ToPointer() *DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType {
+func (e DestinationSnowflakeUpdateSchemasAuthType) ToPointer() *DestinationSnowflakeUpdateSchemasAuthType {
return &e
}
-func (e *DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType) UnmarshalJSON(data []byte) error {
+func (e *DestinationSnowflakeUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Username and Password":
- *e = DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType(v)
+ *e = DestinationSnowflakeUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType: %v", v)
+ return fmt.Errorf("invalid value for DestinationSnowflakeUpdateSchemasAuthType: %v", v)
}
}
-type DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword struct {
- AuthType *DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType `json:"auth_type,omitempty"`
+type UsernameAndPassword struct {
+ authType *DestinationSnowflakeUpdateSchemasAuthType `const:"Username and Password" json:"auth_type"`
// Enter the password associated with the username.
Password string `json:"password"`
}
-type DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType string
+func (u UsernameAndPassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(u, "", false)
+}
+
+func (u *UsernameAndPassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &u, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *UsernameAndPassword) GetAuthType() *DestinationSnowflakeUpdateSchemasAuthType {
+ return DestinationSnowflakeUpdateSchemasAuthTypeUsernameAndPassword.ToPointer()
+}
+
+func (o *UsernameAndPassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+type DestinationSnowflakeUpdateAuthType string
const (
- DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthTypeKeyPairAuthentication DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType = "Key Pair Authentication"
+ DestinationSnowflakeUpdateAuthTypeKeyPairAuthentication DestinationSnowflakeUpdateAuthType = "Key Pair Authentication"
)
-func (e DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType) ToPointer() *DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType {
+func (e DestinationSnowflakeUpdateAuthType) ToPointer() *DestinationSnowflakeUpdateAuthType {
return &e
}
-func (e *DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *DestinationSnowflakeUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Key Pair Authentication":
- *e = DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType(v)
+ *e = DestinationSnowflakeUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for DestinationSnowflakeUpdateAuthType: %v", v)
}
}
-type DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication struct {
- AuthType *DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthenticationAuthType `json:"auth_type,omitempty"`
+type KeyPairAuthentication struct {
+ authType *DestinationSnowflakeUpdateAuthType `const:"Key Pair Authentication" json:"auth_type"`
// RSA Private key to use for Snowflake connection. See the docs for more information on how to obtain this key.
PrivateKey string `json:"private_key"`
// Passphrase for private key
PrivateKeyPassword *string `json:"private_key_password,omitempty"`
}
-type DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType string
+func (k KeyPairAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(k, "", false)
+}
+
+func (k *KeyPairAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &k, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *KeyPairAuthentication) GetAuthType() *DestinationSnowflakeUpdateAuthType {
+ return DestinationSnowflakeUpdateAuthTypeKeyPairAuthentication.ToPointer()
+}
+
+func (o *KeyPairAuthentication) GetPrivateKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PrivateKey
+}
+
+func (o *KeyPairAuthentication) GetPrivateKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrivateKeyPassword
+}
+
+type DestinationSnowflakeUpdateSchemasCredentialsAuthType string
const (
- DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthTypeOAuth20 DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType = "OAuth2.0"
+ DestinationSnowflakeUpdateSchemasCredentialsAuthTypeOAuth20 DestinationSnowflakeUpdateSchemasCredentialsAuthType = "OAuth2.0"
)
-func (e DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType) ToPointer() *DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType {
+func (e DestinationSnowflakeUpdateSchemasCredentialsAuthType) ToPointer() *DestinationSnowflakeUpdateSchemasCredentialsAuthType {
return &e
}
-func (e *DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *DestinationSnowflakeUpdateSchemasCredentialsAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth2.0":
- *e = DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType(v)
+ *e = DestinationSnowflakeUpdateSchemasCredentialsAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for DestinationSnowflakeUpdateSchemasCredentialsAuthType: %v", v)
}
}
-type DestinationSnowflakeUpdateAuthorizationMethodOAuth20 struct {
+type OAuth20 struct {
// Enter you application's Access Token
- AccessToken string `json:"access_token"`
- AuthType *DestinationSnowflakeUpdateAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *DestinationSnowflakeUpdateSchemasCredentialsAuthType `const:"OAuth2.0" json:"auth_type"`
// Enter your application's Client ID
ClientID *string `json:"client_id,omitempty"`
// Enter your application's Client secret
@@ -107,107 +158,145 @@ type DestinationSnowflakeUpdateAuthorizationMethodOAuth20 struct {
RefreshToken string `json:"refresh_token"`
}
-type DestinationSnowflakeUpdateAuthorizationMethodType string
+func (o OAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(o, "", false)
+}
+
+func (o *OAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &o, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *OAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *OAuth20) GetAuthType() *DestinationSnowflakeUpdateSchemasCredentialsAuthType {
+ return DestinationSnowflakeUpdateSchemasCredentialsAuthTypeOAuth20.ToPointer()
+}
+
+func (o *OAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *OAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *OAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+type AuthorizationMethodType string
const (
- DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodOAuth20 DestinationSnowflakeUpdateAuthorizationMethodType = "destination-snowflake-update_Authorization Method_OAuth2.0"
- DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication DestinationSnowflakeUpdateAuthorizationMethodType = "destination-snowflake-update_Authorization Method_Key Pair Authentication"
- DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword DestinationSnowflakeUpdateAuthorizationMethodType = "destination-snowflake-update_Authorization Method_Username and Password"
+ AuthorizationMethodTypeOAuth20 AuthorizationMethodType = "OAuth2.0"
+ AuthorizationMethodTypeKeyPairAuthentication AuthorizationMethodType = "Key Pair Authentication"
+ AuthorizationMethodTypeUsernameAndPassword AuthorizationMethodType = "Username and Password"
)
-type DestinationSnowflakeUpdateAuthorizationMethod struct {
- DestinationSnowflakeUpdateAuthorizationMethodOAuth20 *DestinationSnowflakeUpdateAuthorizationMethodOAuth20
- DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication *DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication
- DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword *DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword
+type AuthorizationMethod struct {
+ OAuth20 *OAuth20
+ KeyPairAuthentication *KeyPairAuthentication
+ UsernameAndPassword *UsernameAndPassword
- Type DestinationSnowflakeUpdateAuthorizationMethodType
+ Type AuthorizationMethodType
}
-func CreateDestinationSnowflakeUpdateAuthorizationMethodDestinationSnowflakeUpdateAuthorizationMethodOAuth20(destinationSnowflakeUpdateAuthorizationMethodOAuth20 DestinationSnowflakeUpdateAuthorizationMethodOAuth20) DestinationSnowflakeUpdateAuthorizationMethod {
- typ := DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodOAuth20
+func CreateAuthorizationMethodOAuth20(oAuth20 OAuth20) AuthorizationMethod {
+ typ := AuthorizationMethodTypeOAuth20
- return DestinationSnowflakeUpdateAuthorizationMethod{
- DestinationSnowflakeUpdateAuthorizationMethodOAuth20: &destinationSnowflakeUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ return AuthorizationMethod{
+ OAuth20: &oAuth20,
+ Type: typ,
}
}
-func CreateDestinationSnowflakeUpdateAuthorizationMethodDestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication(destinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication) DestinationSnowflakeUpdateAuthorizationMethod {
- typ := DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication
+func CreateAuthorizationMethodKeyPairAuthentication(keyPairAuthentication KeyPairAuthentication) AuthorizationMethod {
+ typ := AuthorizationMethodTypeKeyPairAuthentication
- return DestinationSnowflakeUpdateAuthorizationMethod{
- DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication: &destinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication,
- Type: typ,
+ return AuthorizationMethod{
+ KeyPairAuthentication: &keyPairAuthentication,
+ Type: typ,
}
}
-func CreateDestinationSnowflakeUpdateAuthorizationMethodDestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword(destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword) DestinationSnowflakeUpdateAuthorizationMethod {
- typ := DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword
+func CreateAuthorizationMethodUsernameAndPassword(usernameAndPassword UsernameAndPassword) AuthorizationMethod {
+ typ := AuthorizationMethodTypeUsernameAndPassword
- return DestinationSnowflakeUpdateAuthorizationMethod{
- DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword: &destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword,
- Type: typ,
+ return AuthorizationMethod{
+ UsernameAndPassword: &usernameAndPassword,
+ Type: typ,
}
}
-func (u *DestinationSnowflakeUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *AuthorizationMethod) UnmarshalJSON(data []byte) error {
- destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword := new(DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword); err == nil {
- u.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword = destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword
- u.Type = DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword
+ usernameAndPassword := new(UsernameAndPassword)
+ if err := utils.UnmarshalJSON(data, &usernameAndPassword, "", true, true); err == nil {
+ u.UsernameAndPassword = usernameAndPassword
+ u.Type = AuthorizationMethodTypeUsernameAndPassword
return nil
}
- destinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication := new(DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication); err == nil {
- u.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication = destinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication
- u.Type = DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication
+ keyPairAuthentication := new(KeyPairAuthentication)
+ if err := utils.UnmarshalJSON(data, &keyPairAuthentication, "", true, true); err == nil {
+ u.KeyPairAuthentication = keyPairAuthentication
+ u.Type = AuthorizationMethodTypeKeyPairAuthentication
return nil
}
- destinationSnowflakeUpdateAuthorizationMethodOAuth20 := new(DestinationSnowflakeUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationSnowflakeUpdateAuthorizationMethodOAuth20); err == nil {
- u.DestinationSnowflakeUpdateAuthorizationMethodOAuth20 = destinationSnowflakeUpdateAuthorizationMethodOAuth20
- u.Type = DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodOAuth20
+ oAuth20 := new(OAuth20)
+ if err := utils.UnmarshalJSON(data, &oAuth20, "", true, true); err == nil {
+ u.OAuth20 = oAuth20
+ u.Type = AuthorizationMethodTypeOAuth20
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u DestinationSnowflakeUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword != nil {
- return json.Marshal(u.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword)
+func (u AuthorizationMethod) MarshalJSON() ([]byte, error) {
+ if u.OAuth20 != nil {
+ return utils.MarshalJSON(u.OAuth20, "", true)
}
- if u.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication != nil {
- return json.Marshal(u.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication)
+ if u.KeyPairAuthentication != nil {
+ return utils.MarshalJSON(u.KeyPairAuthentication, "", true)
}
- if u.DestinationSnowflakeUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.DestinationSnowflakeUpdateAuthorizationMethodOAuth20)
+ if u.UsernameAndPassword != nil {
+ return utils.MarshalJSON(u.UsernameAndPassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationSnowflakeUpdate struct {
- Credentials *DestinationSnowflakeUpdateAuthorizationMethod `json:"credentials,omitempty"`
+ Credentials *AuthorizationMethod `json:"credentials,omitempty"`
// Enter the name of the database you want to sync data into
Database string `json:"database"`
+ // Disable Writing Final Tables. WARNING! The data format in _airbyte_data is likely stable but there are no guarantees that other metadata columns will remain the same in future versions
+ DisableTypeDedupe *bool `default:"false" json:"disable_type_dedupe"`
// Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)
Host string `json:"host"`
// Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
JdbcURLParams *string `json:"jdbc_url_params,omitempty"`
- // The schema to write raw tables into
+ // The schema to write raw tables into (default: airbyte_internal)
RawDataSchema *string `json:"raw_data_schema,omitempty"`
// Enter the role that you want to use to access Snowflake
Role string `json:"role"`
@@ -218,3 +307,84 @@ type DestinationSnowflakeUpdate struct {
// Enter the name of the warehouse that you want to sync data into
Warehouse string `json:"warehouse"`
}
+
+func (d DestinationSnowflakeUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationSnowflakeUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationSnowflakeUpdate) GetCredentials() *AuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *DestinationSnowflakeUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationSnowflakeUpdate) GetDisableTypeDedupe() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DisableTypeDedupe
+}
+
+func (o *DestinationSnowflakeUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationSnowflakeUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationSnowflakeUpdate) GetRawDataSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RawDataSchema
+}
+
+func (o *DestinationSnowflakeUpdate) GetRole() string {
+ if o == nil {
+ return ""
+ }
+ return o.Role
+}
+
+func (o *DestinationSnowflakeUpdate) GetSchema() string {
+ if o == nil {
+ return ""
+ }
+ return o.Schema
+}
+
+func (o *DestinationSnowflakeUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+func (o *DestinationSnowflakeUpdate) GetWarehouse() string {
+ if o == nil {
+ return ""
+ }
+ return o.Warehouse
+}
diff --git a/internal/sdk/pkg/models/shared/destinationsresponse.go b/internal/sdk/pkg/models/shared/destinationsresponse.go
old mode 100755
new mode 100644
index ec9a40233..145dd5e72
--- a/internal/sdk/pkg/models/shared/destinationsresponse.go
+++ b/internal/sdk/pkg/models/shared/destinationsresponse.go
@@ -7,3 +7,24 @@ type DestinationsResponse struct {
Next *string `json:"next,omitempty"`
Previous *string `json:"previous,omitempty"`
}
+
+func (o *DestinationsResponse) GetData() []DestinationResponse {
+ if o == nil {
+ return []DestinationResponse{}
+ }
+ return o.Data
+}
+
+func (o *DestinationsResponse) GetNext() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Next
+}
+
+func (o *DestinationsResponse) GetPrevious() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Previous
+}
diff --git a/internal/sdk/pkg/models/shared/destinationtimeplus.go b/internal/sdk/pkg/models/shared/destinationtimeplus.go
old mode 100755
new mode 100644
index 628f76766..74ce378bd
--- a/internal/sdk/pkg/models/shared/destinationtimeplus.go
+++ b/internal/sdk/pkg/models/shared/destinationtimeplus.go
@@ -5,36 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationTimeplusTimeplus string
+type Timeplus string
const (
- DestinationTimeplusTimeplusTimeplus DestinationTimeplusTimeplus = "timeplus"
+ TimeplusTimeplus Timeplus = "timeplus"
)
-func (e DestinationTimeplusTimeplus) ToPointer() *DestinationTimeplusTimeplus {
+func (e Timeplus) ToPointer() *Timeplus {
return &e
}
-func (e *DestinationTimeplusTimeplus) UnmarshalJSON(data []byte) error {
+func (e *Timeplus) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "timeplus":
- *e = DestinationTimeplusTimeplus(v)
+ *e = Timeplus(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationTimeplusTimeplus: %v", v)
+ return fmt.Errorf("invalid value for Timeplus: %v", v)
}
}
type DestinationTimeplus struct {
// Personal API key
- Apikey string `json:"apikey"`
- DestinationType DestinationTimeplusTimeplus `json:"destinationType"`
+ Apikey string `json:"apikey"`
+ destinationType Timeplus `const:"timeplus" json:"destinationType"`
// Timeplus workspace endpoint
- Endpoint string `json:"endpoint"`
+ Endpoint *string `default:"https://us.timeplus.cloud/" json:"endpoint"`
+}
+
+func (d DestinationTimeplus) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationTimeplus) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationTimeplus) GetApikey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Apikey
+}
+
+func (o *DestinationTimeplus) GetDestinationType() Timeplus {
+ return TimeplusTimeplus
+}
+
+func (o *DestinationTimeplus) GetEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Endpoint
}
diff --git a/internal/sdk/pkg/models/shared/destinationtimepluscreaterequest.go b/internal/sdk/pkg/models/shared/destinationtimepluscreaterequest.go
old mode 100755
new mode 100644
index 68690e756..0113eb518
--- a/internal/sdk/pkg/models/shared/destinationtimepluscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationtimepluscreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationTimeplusCreateRequest struct {
Configuration DestinationTimeplus `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationTimeplusCreateRequest) GetConfiguration() DestinationTimeplus {
+ if o == nil {
+ return DestinationTimeplus{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationTimeplusCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationTimeplusCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationTimeplusCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationtimeplusputrequest.go b/internal/sdk/pkg/models/shared/destinationtimeplusputrequest.go
old mode 100755
new mode 100644
index 5a30b1732..6e7da4606
--- a/internal/sdk/pkg/models/shared/destinationtimeplusputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationtimeplusputrequest.go
@@ -7,3 +7,24 @@ type DestinationTimeplusPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationTimeplusPutRequest) GetConfiguration() DestinationTimeplusUpdate {
+ if o == nil {
+ return DestinationTimeplusUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationTimeplusPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationTimeplusPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationtimeplusupdate.go b/internal/sdk/pkg/models/shared/destinationtimeplusupdate.go
old mode 100755
new mode 100644
index 360bceb09..5a7417757
--- a/internal/sdk/pkg/models/shared/destinationtimeplusupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationtimeplusupdate.go
@@ -2,9 +2,38 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type DestinationTimeplusUpdate struct {
// Personal API key
Apikey string `json:"apikey"`
// Timeplus workspace endpoint
- Endpoint string `json:"endpoint"`
+ Endpoint *string `default:"https://us.timeplus.cloud/" json:"endpoint"`
+}
+
+func (d DestinationTimeplusUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationTimeplusUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationTimeplusUpdate) GetApikey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Apikey
+}
+
+func (o *DestinationTimeplusUpdate) GetEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Endpoint
}
diff --git a/internal/sdk/pkg/models/shared/destinationtypesense.go b/internal/sdk/pkg/models/shared/destinationtypesense.go
old mode 100755
new mode 100644
index ed06be1c1..20a24febb
--- a/internal/sdk/pkg/models/shared/destinationtypesense.go
+++ b/internal/sdk/pkg/models/shared/destinationtypesense.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationTypesenseTypesense string
+type Typesense string
const (
- DestinationTypesenseTypesenseTypesense DestinationTypesenseTypesense = "typesense"
+ TypesenseTypesense Typesense = "typesense"
)
-func (e DestinationTypesenseTypesense) ToPointer() *DestinationTypesenseTypesense {
+func (e Typesense) ToPointer() *Typesense {
return &e
}
-func (e *DestinationTypesenseTypesense) UnmarshalJSON(data []byte) error {
+func (e *Typesense) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "typesense":
- *e = DestinationTypesenseTypesense(v)
+ *e = Typesense(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationTypesenseTypesense: %v", v)
+ return fmt.Errorf("invalid value for Typesense: %v", v)
}
}
@@ -35,8 +36,8 @@ type DestinationTypesense struct {
// Typesense API Key
APIKey string `json:"api_key"`
// How many documents should be imported together. Default 1000
- BatchSize *int64 `json:"batch_size,omitempty"`
- DestinationType DestinationTypesenseTypesense `json:"destinationType"`
+ BatchSize *int64 `json:"batch_size,omitempty"`
+ destinationType Typesense `const:"typesense" json:"destinationType"`
// Hostname of the Typesense instance without protocol.
Host string `json:"host"`
// Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443
@@ -44,3 +45,53 @@ type DestinationTypesense struct {
// Protocol of the Typesense instance. Ex: http or https. Default is https
Protocol *string `json:"protocol,omitempty"`
}
+
+func (d DestinationTypesense) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationTypesense) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationTypesense) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *DestinationTypesense) GetBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchSize
+}
+
+func (o *DestinationTypesense) GetDestinationType() Typesense {
+ return TypesenseTypesense
+}
+
+func (o *DestinationTypesense) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationTypesense) GetPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationTypesense) GetProtocol() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Protocol
+}
diff --git a/internal/sdk/pkg/models/shared/destinationtypesensecreaterequest.go b/internal/sdk/pkg/models/shared/destinationtypesensecreaterequest.go
old mode 100755
new mode 100644
index 92fe586d9..fb5ceb61c
--- a/internal/sdk/pkg/models/shared/destinationtypesensecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationtypesensecreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationTypesenseCreateRequest struct {
Configuration DestinationTypesense `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationTypesenseCreateRequest) GetConfiguration() DestinationTypesense {
+ if o == nil {
+ return DestinationTypesense{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationTypesenseCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationTypesenseCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationTypesenseCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationtypesenseputrequest.go b/internal/sdk/pkg/models/shared/destinationtypesenseputrequest.go
old mode 100755
new mode 100644
index ec224b306..188697844
--- a/internal/sdk/pkg/models/shared/destinationtypesenseputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationtypesenseputrequest.go
@@ -7,3 +7,24 @@ type DestinationTypesensePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationTypesensePutRequest) GetConfiguration() DestinationTypesenseUpdate {
+ if o == nil {
+ return DestinationTypesenseUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationTypesensePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationTypesensePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationtypesenseupdate.go b/internal/sdk/pkg/models/shared/destinationtypesenseupdate.go
old mode 100755
new mode 100644
index e53ad1135..98e801470
--- a/internal/sdk/pkg/models/shared/destinationtypesenseupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationtypesenseupdate.go
@@ -14,3 +14,38 @@ type DestinationTypesenseUpdate struct {
// Protocol of the Typesense instance. Ex: http or https. Default is https
Protocol *string `json:"protocol,omitempty"`
}
+
+func (o *DestinationTypesenseUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *DestinationTypesenseUpdate) GetBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchSize
+}
+
+func (o *DestinationTypesenseUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationTypesenseUpdate) GetPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationTypesenseUpdate) GetProtocol() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Protocol
+}
diff --git a/internal/sdk/pkg/models/shared/destinationvertica.go b/internal/sdk/pkg/models/shared/destinationvertica.go
old mode 100755
new mode 100644
index 6dfd61d46..32be93154
--- a/internal/sdk/pkg/models/shared/destinationvertica.go
+++ b/internal/sdk/pkg/models/shared/destinationvertica.go
@@ -3,215 +3,309 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationVerticaVertica string
+type Vertica string
const (
- DestinationVerticaVerticaVertica DestinationVerticaVertica = "vertica"
+ VerticaVertica Vertica = "vertica"
)
-func (e DestinationVerticaVertica) ToPointer() *DestinationVerticaVertica {
+func (e Vertica) ToPointer() *Vertica {
return &e
}
-func (e *DestinationVerticaVertica) UnmarshalJSON(data []byte) error {
+func (e *Vertica) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "vertica":
- *e = DestinationVerticaVertica(v)
+ *e = Vertica(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationVerticaVertica: %v", v)
+ return fmt.Errorf("invalid value for Vertica: %v", v)
}
}
-// DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationVerticaSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationVerticaSchemasTunnelMethodTunnelMethod string
const (
- DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationVerticaSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationVerticaSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationVerticaSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationVerticaSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationVerticaSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationVerticaSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationVerticaSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationVerticaSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationVerticaSSHTunnelMethodPasswordAuthentication struct {
+// DestinationVerticaPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationVerticaPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationVerticaSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationVerticaSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationVerticaPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationVerticaPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationVerticaPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationVerticaPasswordAuthentication) GetTunnelMethod() DestinationVerticaSchemasTunnelMethodTunnelMethod {
+ return DestinationVerticaSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationVerticaPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationVerticaPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationVerticaPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationVerticaSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationVerticaSchemasTunnelMethod string
const (
- DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationVerticaSchemasTunnelMethodSSHKeyAuth DestinationVerticaSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationVerticaSchemasTunnelMethod) ToPointer() *DestinationVerticaSchemasTunnelMethod {
return &e
}
-func (e *DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationVerticaSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationVerticaSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationVerticaSchemasTunnelMethod: %v", v)
}
}
-// DestinationVerticaSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationVerticaSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationVerticaSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationVerticaSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationVerticaSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationVerticaSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationVerticaSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationVerticaSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationVerticaSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationVerticaSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationVerticaSSHKeyAuthentication) GetTunnelMethod() DestinationVerticaSchemasTunnelMethod {
+ return DestinationVerticaSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationVerticaSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationVerticaSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationVerticaTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationVerticaTunnelMethod string
const (
- DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationVerticaTunnelMethodNoTunnel DestinationVerticaTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationVerticaTunnelMethod) ToPointer() *DestinationVerticaTunnelMethod {
return &e
}
-func (e *DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationVerticaTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationVerticaTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationVerticaTunnelMethod: %v", v)
}
}
-// DestinationVerticaSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationVerticaSSHTunnelMethodNoTunnel struct {
+// DestinationVerticaNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationVerticaNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationVerticaSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationVerticaTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationVerticaNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationVerticaNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationVerticaNoTunnel) GetTunnelMethod() DestinationVerticaTunnelMethod {
+ return DestinationVerticaTunnelMethodNoTunnel
}
type DestinationVerticaSSHTunnelMethodType string
const (
- DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHTunnelMethodNoTunnel DestinationVerticaSSHTunnelMethodType = "destination-vertica_SSH Tunnel Method_No Tunnel"
- DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHTunnelMethodSSHKeyAuthentication DestinationVerticaSSHTunnelMethodType = "destination-vertica_SSH Tunnel Method_SSH Key Authentication"
- DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHTunnelMethodPasswordAuthentication DestinationVerticaSSHTunnelMethodType = "destination-vertica_SSH Tunnel Method_Password Authentication"
+ DestinationVerticaSSHTunnelMethodTypeDestinationVerticaNoTunnel DestinationVerticaSSHTunnelMethodType = "destination-vertica_No Tunnel"
+ DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHKeyAuthentication DestinationVerticaSSHTunnelMethodType = "destination-vertica_SSH Key Authentication"
+ DestinationVerticaSSHTunnelMethodTypeDestinationVerticaPasswordAuthentication DestinationVerticaSSHTunnelMethodType = "destination-vertica_Password Authentication"
)
type DestinationVerticaSSHTunnelMethod struct {
- DestinationVerticaSSHTunnelMethodNoTunnel *DestinationVerticaSSHTunnelMethodNoTunnel
- DestinationVerticaSSHTunnelMethodSSHKeyAuthentication *DestinationVerticaSSHTunnelMethodSSHKeyAuthentication
- DestinationVerticaSSHTunnelMethodPasswordAuthentication *DestinationVerticaSSHTunnelMethodPasswordAuthentication
+ DestinationVerticaNoTunnel *DestinationVerticaNoTunnel
+ DestinationVerticaSSHKeyAuthentication *DestinationVerticaSSHKeyAuthentication
+ DestinationVerticaPasswordAuthentication *DestinationVerticaPasswordAuthentication
Type DestinationVerticaSSHTunnelMethodType
}
-func CreateDestinationVerticaSSHTunnelMethodDestinationVerticaSSHTunnelMethodNoTunnel(destinationVerticaSSHTunnelMethodNoTunnel DestinationVerticaSSHTunnelMethodNoTunnel) DestinationVerticaSSHTunnelMethod {
- typ := DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHTunnelMethodNoTunnel
+func CreateDestinationVerticaSSHTunnelMethodDestinationVerticaNoTunnel(destinationVerticaNoTunnel DestinationVerticaNoTunnel) DestinationVerticaSSHTunnelMethod {
+ typ := DestinationVerticaSSHTunnelMethodTypeDestinationVerticaNoTunnel
return DestinationVerticaSSHTunnelMethod{
- DestinationVerticaSSHTunnelMethodNoTunnel: &destinationVerticaSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationVerticaNoTunnel: &destinationVerticaNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationVerticaSSHTunnelMethodDestinationVerticaSSHTunnelMethodSSHKeyAuthentication(destinationVerticaSSHTunnelMethodSSHKeyAuthentication DestinationVerticaSSHTunnelMethodSSHKeyAuthentication) DestinationVerticaSSHTunnelMethod {
- typ := DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationVerticaSSHTunnelMethodDestinationVerticaSSHKeyAuthentication(destinationVerticaSSHKeyAuthentication DestinationVerticaSSHKeyAuthentication) DestinationVerticaSSHTunnelMethod {
+ typ := DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHKeyAuthentication
return DestinationVerticaSSHTunnelMethod{
- DestinationVerticaSSHTunnelMethodSSHKeyAuthentication: &destinationVerticaSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ DestinationVerticaSSHKeyAuthentication: &destinationVerticaSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateDestinationVerticaSSHTunnelMethodDestinationVerticaSSHTunnelMethodPasswordAuthentication(destinationVerticaSSHTunnelMethodPasswordAuthentication DestinationVerticaSSHTunnelMethodPasswordAuthentication) DestinationVerticaSSHTunnelMethod {
- typ := DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHTunnelMethodPasswordAuthentication
+func CreateDestinationVerticaSSHTunnelMethodDestinationVerticaPasswordAuthentication(destinationVerticaPasswordAuthentication DestinationVerticaPasswordAuthentication) DestinationVerticaSSHTunnelMethod {
+ typ := DestinationVerticaSSHTunnelMethodTypeDestinationVerticaPasswordAuthentication
return DestinationVerticaSSHTunnelMethod{
- DestinationVerticaSSHTunnelMethodPasswordAuthentication: &destinationVerticaSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ DestinationVerticaPasswordAuthentication: &destinationVerticaPasswordAuthentication,
+ Type: typ,
}
}
func (u *DestinationVerticaSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationVerticaSSHTunnelMethodNoTunnel := new(DestinationVerticaSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationVerticaSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationVerticaSSHTunnelMethodNoTunnel = destinationVerticaSSHTunnelMethodNoTunnel
- u.Type = DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHTunnelMethodNoTunnel
+
+ destinationVerticaNoTunnel := new(DestinationVerticaNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationVerticaNoTunnel, "", true, true); err == nil {
+ u.DestinationVerticaNoTunnel = destinationVerticaNoTunnel
+ u.Type = DestinationVerticaSSHTunnelMethodTypeDestinationVerticaNoTunnel
return nil
}
- destinationVerticaSSHTunnelMethodSSHKeyAuthentication := new(DestinationVerticaSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationVerticaSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication = destinationVerticaSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHTunnelMethodSSHKeyAuthentication
+ destinationVerticaSSHKeyAuthentication := new(DestinationVerticaSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationVerticaSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationVerticaSSHKeyAuthentication = destinationVerticaSSHKeyAuthentication
+ u.Type = DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHKeyAuthentication
return nil
}
- destinationVerticaSSHTunnelMethodPasswordAuthentication := new(DestinationVerticaSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationVerticaSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationVerticaSSHTunnelMethodPasswordAuthentication = destinationVerticaSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationVerticaSSHTunnelMethodTypeDestinationVerticaSSHTunnelMethodPasswordAuthentication
+ destinationVerticaPasswordAuthentication := new(DestinationVerticaPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationVerticaPasswordAuthentication, "", true, true); err == nil {
+ u.DestinationVerticaPasswordAuthentication = destinationVerticaPasswordAuthentication
+ u.Type = DestinationVerticaSSHTunnelMethodTypeDestinationVerticaPasswordAuthentication
return nil
}
@@ -219,25 +313,25 @@ func (u *DestinationVerticaSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u DestinationVerticaSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationVerticaSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationVerticaSSHTunnelMethodNoTunnel)
+ if u.DestinationVerticaNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationVerticaNoTunnel, "", true)
}
- if u.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationVerticaSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationVerticaSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationVerticaSSHKeyAuthentication, "", true)
}
- if u.DestinationVerticaSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationVerticaSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationVerticaPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationVerticaPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationVertica struct {
// Name of the database.
- Database string `json:"database"`
- DestinationType DestinationVerticaVertica `json:"destinationType"`
+ Database string `json:"database"`
+ destinationType Vertica `const:"vertica" json:"destinationType"`
// Hostname of the database.
Host string `json:"host"`
// Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
@@ -245,7 +339,7 @@ type DestinationVertica struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5433" json:"port"`
// Schema for vertica destination
Schema string `json:"schema"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
@@ -253,3 +347,74 @@ type DestinationVertica struct {
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationVertica) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationVertica) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationVertica) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationVertica) GetDestinationType() Vertica {
+ return VerticaVertica
+}
+
+func (o *DestinationVertica) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationVertica) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationVertica) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationVertica) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationVertica) GetSchema() string {
+ if o == nil {
+ return ""
+ }
+ return o.Schema
+}
+
+func (o *DestinationVertica) GetTunnelMethod() *DestinationVerticaSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationVertica) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationverticacreaterequest.go b/internal/sdk/pkg/models/shared/destinationverticacreaterequest.go
old mode 100755
new mode 100644
index 8c8cd4ec6..70d7b644d
--- a/internal/sdk/pkg/models/shared/destinationverticacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationverticacreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationVerticaCreateRequest struct {
Configuration DestinationVertica `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationVerticaCreateRequest) GetConfiguration() DestinationVertica {
+ if o == nil {
+ return DestinationVertica{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationVerticaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationVerticaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationVerticaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationverticaputrequest.go b/internal/sdk/pkg/models/shared/destinationverticaputrequest.go
old mode 100755
new mode 100644
index f2d38566f..a94f8db8c
--- a/internal/sdk/pkg/models/shared/destinationverticaputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationverticaputrequest.go
@@ -7,3 +7,24 @@ type DestinationVerticaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationVerticaPutRequest) GetConfiguration() DestinationVerticaUpdate {
+ if o == nil {
+ return DestinationVerticaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationVerticaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationVerticaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationverticaupdate.go b/internal/sdk/pkg/models/shared/destinationverticaupdate.go
old mode 100755
new mode 100644
index 825cd0564..b5e2c9c99
--- a/internal/sdk/pkg/models/shared/destinationverticaupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationverticaupdate.go
@@ -3,191 +3,285 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod string
const (
- DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ DestinationVerticaUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication struct {
+// DestinationVerticaUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationVerticaUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod DestinationVerticaUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (d DestinationVerticaUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationVerticaUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationVerticaUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationVerticaUpdatePasswordAuthentication) GetTunnelMethod() DestinationVerticaUpdateSchemasTunnelMethodTunnelMethod {
+ return DestinationVerticaUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *DestinationVerticaUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationVerticaUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *DestinationVerticaUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// DestinationVerticaUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type DestinationVerticaUpdateSchemasTunnelMethod string
const (
- DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ DestinationVerticaUpdateSchemasTunnelMethodSSHKeyAuth DestinationVerticaUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e DestinationVerticaUpdateSchemasTunnelMethod) ToPointer() *DestinationVerticaUpdateSchemasTunnelMethod {
return &e
}
-func (e *DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationVerticaUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = DestinationVerticaUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationVerticaUpdateSchemasTunnelMethod: %v", v)
}
}
-// DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// DestinationVerticaUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationVerticaUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationVerticaUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (d DestinationVerticaUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationVerticaUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationVerticaUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *DestinationVerticaUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *DestinationVerticaUpdateSSHKeyAuthentication) GetTunnelMethod() DestinationVerticaUpdateSchemasTunnelMethod {
+ return DestinationVerticaUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *DestinationVerticaUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *DestinationVerticaUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// DestinationVerticaUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type DestinationVerticaUpdateTunnelMethod string
const (
- DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ DestinationVerticaUpdateTunnelMethodNoTunnel DestinationVerticaUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e DestinationVerticaUpdateTunnelMethod) ToPointer() *DestinationVerticaUpdateTunnelMethod {
return &e
}
-func (e *DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *DestinationVerticaUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = DestinationVerticaUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for DestinationVerticaUpdateTunnelMethod: %v", v)
}
}
-// DestinationVerticaUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type DestinationVerticaUpdateSSHTunnelMethodNoTunnel struct {
+// DestinationVerticaUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type DestinationVerticaUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod DestinationVerticaUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod DestinationVerticaUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (d DestinationVerticaUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationVerticaUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationVerticaUpdateNoTunnel) GetTunnelMethod() DestinationVerticaUpdateTunnelMethod {
+ return DestinationVerticaUpdateTunnelMethodNoTunnel
}
type DestinationVerticaUpdateSSHTunnelMethodType string
const (
- DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHTunnelMethodNoTunnel DestinationVerticaUpdateSSHTunnelMethodType = "destination-vertica-update_SSH Tunnel Method_No Tunnel"
- DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication DestinationVerticaUpdateSSHTunnelMethodType = "destination-vertica-update_SSH Tunnel Method_SSH Key Authentication"
- DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication DestinationVerticaUpdateSSHTunnelMethodType = "destination-vertica-update_SSH Tunnel Method_Password Authentication"
+ DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateNoTunnel DestinationVerticaUpdateSSHTunnelMethodType = "destination-vertica-update_No Tunnel"
+ DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHKeyAuthentication DestinationVerticaUpdateSSHTunnelMethodType = "destination-vertica-update_SSH Key Authentication"
+ DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdatePasswordAuthentication DestinationVerticaUpdateSSHTunnelMethodType = "destination-vertica-update_Password Authentication"
)
type DestinationVerticaUpdateSSHTunnelMethod struct {
- DestinationVerticaUpdateSSHTunnelMethodNoTunnel *DestinationVerticaUpdateSSHTunnelMethodNoTunnel
- DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication *DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication
- DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication *DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication
+ DestinationVerticaUpdateNoTunnel *DestinationVerticaUpdateNoTunnel
+ DestinationVerticaUpdateSSHKeyAuthentication *DestinationVerticaUpdateSSHKeyAuthentication
+ DestinationVerticaUpdatePasswordAuthentication *DestinationVerticaUpdatePasswordAuthentication
Type DestinationVerticaUpdateSSHTunnelMethodType
}
-func CreateDestinationVerticaUpdateSSHTunnelMethodDestinationVerticaUpdateSSHTunnelMethodNoTunnel(destinationVerticaUpdateSSHTunnelMethodNoTunnel DestinationVerticaUpdateSSHTunnelMethodNoTunnel) DestinationVerticaUpdateSSHTunnelMethod {
- typ := DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHTunnelMethodNoTunnel
+func CreateDestinationVerticaUpdateSSHTunnelMethodDestinationVerticaUpdateNoTunnel(destinationVerticaUpdateNoTunnel DestinationVerticaUpdateNoTunnel) DestinationVerticaUpdateSSHTunnelMethod {
+ typ := DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateNoTunnel
return DestinationVerticaUpdateSSHTunnelMethod{
- DestinationVerticaUpdateSSHTunnelMethodNoTunnel: &destinationVerticaUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ DestinationVerticaUpdateNoTunnel: &destinationVerticaUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateDestinationVerticaUpdateSSHTunnelMethodDestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication(destinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication) DestinationVerticaUpdateSSHTunnelMethod {
- typ := DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateDestinationVerticaUpdateSSHTunnelMethodDestinationVerticaUpdateSSHKeyAuthentication(destinationVerticaUpdateSSHKeyAuthentication DestinationVerticaUpdateSSHKeyAuthentication) DestinationVerticaUpdateSSHTunnelMethod {
+ typ := DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHKeyAuthentication
return DestinationVerticaUpdateSSHTunnelMethod{
- DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication: &destinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication,
+ DestinationVerticaUpdateSSHKeyAuthentication: &destinationVerticaUpdateSSHKeyAuthentication,
Type: typ,
}
}
-func CreateDestinationVerticaUpdateSSHTunnelMethodDestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication(destinationVerticaUpdateSSHTunnelMethodPasswordAuthentication DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication) DestinationVerticaUpdateSSHTunnelMethod {
- typ := DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication
+func CreateDestinationVerticaUpdateSSHTunnelMethodDestinationVerticaUpdatePasswordAuthentication(destinationVerticaUpdatePasswordAuthentication DestinationVerticaUpdatePasswordAuthentication) DestinationVerticaUpdateSSHTunnelMethod {
+ typ := DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdatePasswordAuthentication
return DestinationVerticaUpdateSSHTunnelMethod{
- DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication: &destinationVerticaUpdateSSHTunnelMethodPasswordAuthentication,
+ DestinationVerticaUpdatePasswordAuthentication: &destinationVerticaUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *DestinationVerticaUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- destinationVerticaUpdateSSHTunnelMethodNoTunnel := new(DestinationVerticaUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationVerticaUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.DestinationVerticaUpdateSSHTunnelMethodNoTunnel = destinationVerticaUpdateSSHTunnelMethodNoTunnel
- u.Type = DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHTunnelMethodNoTunnel
+
+ destinationVerticaUpdateNoTunnel := new(DestinationVerticaUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &destinationVerticaUpdateNoTunnel, "", true, true); err == nil {
+ u.DestinationVerticaUpdateNoTunnel = destinationVerticaUpdateNoTunnel
+ u.Type = DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateNoTunnel
return nil
}
- destinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication := new(DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication = destinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication
+ destinationVerticaUpdateSSHKeyAuthentication := new(DestinationVerticaUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationVerticaUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.DestinationVerticaUpdateSSHKeyAuthentication = destinationVerticaUpdateSSHKeyAuthentication
+ u.Type = DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHKeyAuthentication
return nil
}
- destinationVerticaUpdateSSHTunnelMethodPasswordAuthentication := new(DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&destinationVerticaUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication = destinationVerticaUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication
+ destinationVerticaUpdatePasswordAuthentication := new(DestinationVerticaUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationVerticaUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.DestinationVerticaUpdatePasswordAuthentication = destinationVerticaUpdatePasswordAuthentication
+ u.Type = DestinationVerticaUpdateSSHTunnelMethodTypeDestinationVerticaUpdatePasswordAuthentication
return nil
}
@@ -195,19 +289,19 @@ func (u *DestinationVerticaUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) err
}
func (u DestinationVerticaUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.DestinationVerticaUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.DestinationVerticaUpdateSSHTunnelMethodNoTunnel)
+ if u.DestinationVerticaUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.DestinationVerticaUpdateNoTunnel, "", true)
}
- if u.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.DestinationVerticaUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.DestinationVerticaUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationVerticaUpdateSSHKeyAuthentication, "", true)
}
- if u.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.DestinationVerticaUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.DestinationVerticaUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationVerticaUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type DestinationVerticaUpdate struct {
@@ -220,7 +314,7 @@ type DestinationVerticaUpdate struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5433" json:"port"`
// Schema for vertica destination
Schema string `json:"schema"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
@@ -228,3 +322,70 @@ type DestinationVerticaUpdate struct {
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (d DestinationVerticaUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationVerticaUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationVerticaUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *DestinationVerticaUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationVerticaUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *DestinationVerticaUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *DestinationVerticaUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *DestinationVerticaUpdate) GetSchema() string {
+ if o == nil {
+ return ""
+ }
+ return o.Schema
+}
+
+func (o *DestinationVerticaUpdate) GetTunnelMethod() *DestinationVerticaUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *DestinationVerticaUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/destinationweaviate.go b/internal/sdk/pkg/models/shared/destinationweaviate.go
new file mode 100644
index 000000000..b5bfd53b3
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationweaviate.go
@@ -0,0 +1,1446 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type Weaviate string
+
+const (
+ WeaviateWeaviate Weaviate = "weaviate"
+)
+
+func (e Weaviate) ToPointer() *Weaviate {
+ return &e
+}
+
+func (e *Weaviate) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "weaviate":
+ *e = Weaviate(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for Weaviate: %v", v)
+ }
+}
+
+type DestinationWeaviateSchemasEmbeddingEmbedding7Mode string
+
+const (
+ DestinationWeaviateSchemasEmbeddingEmbedding7ModeOpenaiCompatible DestinationWeaviateSchemasEmbeddingEmbedding7Mode = "openai_compatible"
+)
+
+func (e DestinationWeaviateSchemasEmbeddingEmbedding7Mode) ToPointer() *DestinationWeaviateSchemasEmbeddingEmbedding7Mode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasEmbeddingEmbedding7Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai_compatible":
+ *e = DestinationWeaviateSchemasEmbeddingEmbedding7Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasEmbeddingEmbedding7Mode: %v", v)
+ }
+}
+
+// DestinationWeaviateOpenAICompatible - Use a service that's compatible with the OpenAI API to embed text.
+type DestinationWeaviateOpenAICompatible struct {
+ APIKey *string `default:"" json:"api_key"`
+ // The base URL for your OpenAI-compatible service
+ BaseURL string `json:"base_url"`
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ mode *DestinationWeaviateSchemasEmbeddingEmbedding7Mode `const:"openai_compatible" json:"mode"`
+ // The name of the model to use for embedding
+ ModelName *string `default:"text-embedding-ada-002" json:"model_name"`
+}
+
+func (d DestinationWeaviateOpenAICompatible) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateOpenAICompatible) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateOpenAICompatible) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *DestinationWeaviateOpenAICompatible) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *DestinationWeaviateOpenAICompatible) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationWeaviateOpenAICompatible) GetMode() *DestinationWeaviateSchemasEmbeddingEmbedding7Mode {
+ return DestinationWeaviateSchemasEmbeddingEmbedding7ModeOpenaiCompatible.ToPointer()
+}
+
+func (o *DestinationWeaviateOpenAICompatible) GetModelName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ModelName
+}
+
+type DestinationWeaviateSchemasEmbeddingEmbedding6Mode string
+
+const (
+ DestinationWeaviateSchemasEmbeddingEmbedding6ModeFake DestinationWeaviateSchemasEmbeddingEmbedding6Mode = "fake"
+)
+
+func (e DestinationWeaviateSchemasEmbeddingEmbedding6Mode) ToPointer() *DestinationWeaviateSchemasEmbeddingEmbedding6Mode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasEmbeddingEmbedding6Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "fake":
+ *e = DestinationWeaviateSchemasEmbeddingEmbedding6Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasEmbeddingEmbedding6Mode: %v", v)
+ }
+}
+
+// DestinationWeaviateFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type DestinationWeaviateFake struct {
+ mode *DestinationWeaviateSchemasEmbeddingEmbedding6Mode `const:"fake" json:"mode"`
+}
+
+func (d DestinationWeaviateFake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateFake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateFake) GetMode() *DestinationWeaviateSchemasEmbeddingEmbedding6Mode {
+ return DestinationWeaviateSchemasEmbeddingEmbedding6ModeFake.ToPointer()
+}
+
+type DestinationWeaviateSchemasEmbeddingEmbedding5Mode string
+
+const (
+ DestinationWeaviateSchemasEmbeddingEmbedding5ModeFromField DestinationWeaviateSchemasEmbeddingEmbedding5Mode = "from_field"
+)
+
+func (e DestinationWeaviateSchemasEmbeddingEmbedding5Mode) ToPointer() *DestinationWeaviateSchemasEmbeddingEmbedding5Mode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasEmbeddingEmbedding5Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "from_field":
+ *e = DestinationWeaviateSchemasEmbeddingEmbedding5Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasEmbeddingEmbedding5Mode: %v", v)
+ }
+}
+
+// DestinationWeaviateFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.
+type DestinationWeaviateFromField struct {
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ // Name of the field in the record that contains the embedding
+ FieldName string `json:"field_name"`
+ mode *DestinationWeaviateSchemasEmbeddingEmbedding5Mode `const:"from_field" json:"mode"`
+}
+
+func (d DestinationWeaviateFromField) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateFromField) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateFromField) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationWeaviateFromField) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *DestinationWeaviateFromField) GetMode() *DestinationWeaviateSchemasEmbeddingEmbedding5Mode {
+ return DestinationWeaviateSchemasEmbeddingEmbedding5ModeFromField.ToPointer()
+}
+
+type DestinationWeaviateSchemasEmbeddingEmbeddingMode string
+
+const (
+ DestinationWeaviateSchemasEmbeddingEmbeddingModeCohere DestinationWeaviateSchemasEmbeddingEmbeddingMode = "cohere"
+)
+
+func (e DestinationWeaviateSchemasEmbeddingEmbeddingMode) ToPointer() *DestinationWeaviateSchemasEmbeddingEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasEmbeddingEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cohere":
+ *e = DestinationWeaviateSchemasEmbeddingEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasEmbeddingEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationWeaviateCohere - Use the Cohere API to embed text.
+type DestinationWeaviateCohere struct {
+ CohereKey string `json:"cohere_key"`
+ mode *DestinationWeaviateSchemasEmbeddingEmbeddingMode `const:"cohere" json:"mode"`
+}
+
+func (d DestinationWeaviateCohere) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateCohere) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateCohere) GetCohereKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.CohereKey
+}
+
+func (o *DestinationWeaviateCohere) GetMode() *DestinationWeaviateSchemasEmbeddingEmbeddingMode {
+ return DestinationWeaviateSchemasEmbeddingEmbeddingModeCohere.ToPointer()
+}
+
+type DestinationWeaviateSchemasEmbeddingMode string
+
+const (
+ DestinationWeaviateSchemasEmbeddingModeOpenai DestinationWeaviateSchemasEmbeddingMode = "openai"
+)
+
+func (e DestinationWeaviateSchemasEmbeddingMode) ToPointer() *DestinationWeaviateSchemasEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai":
+ *e = DestinationWeaviateSchemasEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationWeaviateOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationWeaviateOpenAI struct {
+ mode *DestinationWeaviateSchemasEmbeddingMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationWeaviateOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateOpenAI) GetMode() *DestinationWeaviateSchemasEmbeddingMode {
+ return DestinationWeaviateSchemasEmbeddingModeOpenai.ToPointer()
+}
+
+func (o *DestinationWeaviateOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationWeaviateSchemasMode string
+
+const (
+ DestinationWeaviateSchemasModeAzureOpenai DestinationWeaviateSchemasMode = "azure_openai"
+)
+
+func (e DestinationWeaviateSchemasMode) ToPointer() *DestinationWeaviateSchemasMode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "azure_openai":
+ *e = DestinationWeaviateSchemasMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasMode: %v", v)
+ }
+}
+
+// DestinationWeaviateAzureOpenAI - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationWeaviateAzureOpenAI struct {
+ // The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ APIBase string `json:"api_base"`
+ // The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ Deployment string `json:"deployment"`
+ mode *DestinationWeaviateSchemasMode `const:"azure_openai" json:"mode"`
+ // The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationWeaviateAzureOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateAzureOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateAzureOpenAI) GetAPIBase() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIBase
+}
+
+func (o *DestinationWeaviateAzureOpenAI) GetDeployment() string {
+ if o == nil {
+ return ""
+ }
+ return o.Deployment
+}
+
+func (o *DestinationWeaviateAzureOpenAI) GetMode() *DestinationWeaviateSchemasMode {
+ return DestinationWeaviateSchemasModeAzureOpenai.ToPointer()
+}
+
+func (o *DestinationWeaviateAzureOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationWeaviateMode string
+
+const (
+ DestinationWeaviateModeNoEmbedding DestinationWeaviateMode = "no_embedding"
+)
+
+func (e DestinationWeaviateMode) ToPointer() *DestinationWeaviateMode {
+ return &e
+}
+
+func (e *DestinationWeaviateMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "no_embedding":
+ *e = DestinationWeaviateMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateMode: %v", v)
+ }
+}
+
+// DestinationWeaviateNoExternalEmbedding - Do not calculate and pass embeddings to Weaviate. Suitable for clusters with configured vectorizers to calculate embeddings within Weaviate or for classes that should only support regular text search.
+type DestinationWeaviateNoExternalEmbedding struct {
+ mode *DestinationWeaviateMode `const:"no_embedding" json:"mode"`
+}
+
+func (d DestinationWeaviateNoExternalEmbedding) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateNoExternalEmbedding) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateNoExternalEmbedding) GetMode() *DestinationWeaviateMode {
+ return DestinationWeaviateModeNoEmbedding.ToPointer()
+}
+
+type DestinationWeaviateEmbeddingType string
+
+const (
+ DestinationWeaviateEmbeddingTypeDestinationWeaviateNoExternalEmbedding DestinationWeaviateEmbeddingType = "destination-weaviate_No external embedding"
+ DestinationWeaviateEmbeddingTypeDestinationWeaviateAzureOpenAI DestinationWeaviateEmbeddingType = "destination-weaviate_Azure OpenAI"
+ DestinationWeaviateEmbeddingTypeDestinationWeaviateOpenAI DestinationWeaviateEmbeddingType = "destination-weaviate_OpenAI"
+ DestinationWeaviateEmbeddingTypeDestinationWeaviateCohere DestinationWeaviateEmbeddingType = "destination-weaviate_Cohere"
+ DestinationWeaviateEmbeddingTypeDestinationWeaviateFromField DestinationWeaviateEmbeddingType = "destination-weaviate_From Field"
+ DestinationWeaviateEmbeddingTypeDestinationWeaviateFake DestinationWeaviateEmbeddingType = "destination-weaviate_Fake"
+ DestinationWeaviateEmbeddingTypeDestinationWeaviateOpenAICompatible DestinationWeaviateEmbeddingType = "destination-weaviate_OpenAI-compatible"
+)
+
+type DestinationWeaviateEmbedding struct {
+ DestinationWeaviateNoExternalEmbedding *DestinationWeaviateNoExternalEmbedding
+ DestinationWeaviateAzureOpenAI *DestinationWeaviateAzureOpenAI
+ DestinationWeaviateOpenAI *DestinationWeaviateOpenAI
+ DestinationWeaviateCohere *DestinationWeaviateCohere
+ DestinationWeaviateFromField *DestinationWeaviateFromField
+ DestinationWeaviateFake *DestinationWeaviateFake
+ DestinationWeaviateOpenAICompatible *DestinationWeaviateOpenAICompatible
+
+ Type DestinationWeaviateEmbeddingType
+}
+
+func CreateDestinationWeaviateEmbeddingDestinationWeaviateNoExternalEmbedding(destinationWeaviateNoExternalEmbedding DestinationWeaviateNoExternalEmbedding) DestinationWeaviateEmbedding {
+ typ := DestinationWeaviateEmbeddingTypeDestinationWeaviateNoExternalEmbedding
+
+ return DestinationWeaviateEmbedding{
+ DestinationWeaviateNoExternalEmbedding: &destinationWeaviateNoExternalEmbedding,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateEmbeddingDestinationWeaviateAzureOpenAI(destinationWeaviateAzureOpenAI DestinationWeaviateAzureOpenAI) DestinationWeaviateEmbedding {
+ typ := DestinationWeaviateEmbeddingTypeDestinationWeaviateAzureOpenAI
+
+ return DestinationWeaviateEmbedding{
+ DestinationWeaviateAzureOpenAI: &destinationWeaviateAzureOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateEmbeddingDestinationWeaviateOpenAI(destinationWeaviateOpenAI DestinationWeaviateOpenAI) DestinationWeaviateEmbedding {
+ typ := DestinationWeaviateEmbeddingTypeDestinationWeaviateOpenAI
+
+ return DestinationWeaviateEmbedding{
+ DestinationWeaviateOpenAI: &destinationWeaviateOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateEmbeddingDestinationWeaviateCohere(destinationWeaviateCohere DestinationWeaviateCohere) DestinationWeaviateEmbedding {
+ typ := DestinationWeaviateEmbeddingTypeDestinationWeaviateCohere
+
+ return DestinationWeaviateEmbedding{
+ DestinationWeaviateCohere: &destinationWeaviateCohere,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateEmbeddingDestinationWeaviateFromField(destinationWeaviateFromField DestinationWeaviateFromField) DestinationWeaviateEmbedding {
+ typ := DestinationWeaviateEmbeddingTypeDestinationWeaviateFromField
+
+ return DestinationWeaviateEmbedding{
+ DestinationWeaviateFromField: &destinationWeaviateFromField,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateEmbeddingDestinationWeaviateFake(destinationWeaviateFake DestinationWeaviateFake) DestinationWeaviateEmbedding {
+ typ := DestinationWeaviateEmbeddingTypeDestinationWeaviateFake
+
+ return DestinationWeaviateEmbedding{
+ DestinationWeaviateFake: &destinationWeaviateFake,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateEmbeddingDestinationWeaviateOpenAICompatible(destinationWeaviateOpenAICompatible DestinationWeaviateOpenAICompatible) DestinationWeaviateEmbedding {
+ typ := DestinationWeaviateEmbeddingTypeDestinationWeaviateOpenAICompatible
+
+ return DestinationWeaviateEmbedding{
+ DestinationWeaviateOpenAICompatible: &destinationWeaviateOpenAICompatible,
+ Type: typ,
+ }
+}
+
+func (u *DestinationWeaviateEmbedding) UnmarshalJSON(data []byte) error {
+
+ destinationWeaviateNoExternalEmbedding := new(DestinationWeaviateNoExternalEmbedding)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateNoExternalEmbedding, "", true, true); err == nil {
+ u.DestinationWeaviateNoExternalEmbedding = destinationWeaviateNoExternalEmbedding
+ u.Type = DestinationWeaviateEmbeddingTypeDestinationWeaviateNoExternalEmbedding
+ return nil
+ }
+
+ destinationWeaviateFake := new(DestinationWeaviateFake)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateFake, "", true, true); err == nil {
+ u.DestinationWeaviateFake = destinationWeaviateFake
+ u.Type = DestinationWeaviateEmbeddingTypeDestinationWeaviateFake
+ return nil
+ }
+
+ destinationWeaviateOpenAI := new(DestinationWeaviateOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateOpenAI, "", true, true); err == nil {
+ u.DestinationWeaviateOpenAI = destinationWeaviateOpenAI
+ u.Type = DestinationWeaviateEmbeddingTypeDestinationWeaviateOpenAI
+ return nil
+ }
+
+ destinationWeaviateCohere := new(DestinationWeaviateCohere)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateCohere, "", true, true); err == nil {
+ u.DestinationWeaviateCohere = destinationWeaviateCohere
+ u.Type = DestinationWeaviateEmbeddingTypeDestinationWeaviateCohere
+ return nil
+ }
+
+ destinationWeaviateFromField := new(DestinationWeaviateFromField)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateFromField, "", true, true); err == nil {
+ u.DestinationWeaviateFromField = destinationWeaviateFromField
+ u.Type = DestinationWeaviateEmbeddingTypeDestinationWeaviateFromField
+ return nil
+ }
+
+ destinationWeaviateAzureOpenAI := new(DestinationWeaviateAzureOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateAzureOpenAI, "", true, true); err == nil {
+ u.DestinationWeaviateAzureOpenAI = destinationWeaviateAzureOpenAI
+ u.Type = DestinationWeaviateEmbeddingTypeDestinationWeaviateAzureOpenAI
+ return nil
+ }
+
+ destinationWeaviateOpenAICompatible := new(DestinationWeaviateOpenAICompatible)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateOpenAICompatible, "", true, true); err == nil {
+ u.DestinationWeaviateOpenAICompatible = destinationWeaviateOpenAICompatible
+ u.Type = DestinationWeaviateEmbeddingTypeDestinationWeaviateOpenAICompatible
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationWeaviateEmbedding) MarshalJSON() ([]byte, error) {
+ if u.DestinationWeaviateNoExternalEmbedding != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateNoExternalEmbedding, "", true)
+ }
+
+ if u.DestinationWeaviateAzureOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateAzureOpenAI, "", true)
+ }
+
+ if u.DestinationWeaviateOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateOpenAI, "", true)
+ }
+
+ if u.DestinationWeaviateCohere != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateCohere, "", true)
+ }
+
+ if u.DestinationWeaviateFromField != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateFromField, "", true)
+ }
+
+ if u.DestinationWeaviateFake != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateFake, "", true)
+ }
+
+ if u.DestinationWeaviateOpenAICompatible != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateOpenAICompatible, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationWeaviateHeader struct {
+ HeaderKey string `json:"header_key"`
+ Value string `json:"value"`
+}
+
+func (o *DestinationWeaviateHeader) GetHeaderKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.HeaderKey
+}
+
+func (o *DestinationWeaviateHeader) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type DestinationWeaviateSchemasIndexingAuthAuthenticationMode string
+
+const (
+ DestinationWeaviateSchemasIndexingAuthAuthenticationModeNoAuth DestinationWeaviateSchemasIndexingAuthAuthenticationMode = "no_auth"
+)
+
+func (e DestinationWeaviateSchemasIndexingAuthAuthenticationMode) ToPointer() *DestinationWeaviateSchemasIndexingAuthAuthenticationMode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasIndexingAuthAuthenticationMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "no_auth":
+ *e = DestinationWeaviateSchemasIndexingAuthAuthenticationMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasIndexingAuthAuthenticationMode: %v", v)
+ }
+}
+
+// DestinationWeaviateNoAuthentication - Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)
+type DestinationWeaviateNoAuthentication struct {
+ mode *DestinationWeaviateSchemasIndexingAuthAuthenticationMode `const:"no_auth" json:"mode"`
+}
+
+func (d DestinationWeaviateNoAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateNoAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateNoAuthentication) GetMode() *DestinationWeaviateSchemasIndexingAuthAuthenticationMode {
+ return DestinationWeaviateSchemasIndexingAuthAuthenticationModeNoAuth.ToPointer()
+}
+
+type DestinationWeaviateSchemasIndexingAuthMode string
+
+const (
+ DestinationWeaviateSchemasIndexingAuthModeUsernamePassword DestinationWeaviateSchemasIndexingAuthMode = "username_password"
+)
+
+func (e DestinationWeaviateSchemasIndexingAuthMode) ToPointer() *DestinationWeaviateSchemasIndexingAuthMode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasIndexingAuthMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "username_password":
+ *e = DestinationWeaviateSchemasIndexingAuthMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasIndexingAuthMode: %v", v)
+ }
+}
+
+// DestinationWeaviateUsernamePassword - Authenticate using username and password (suitable for self-managed Weaviate clusters)
+type DestinationWeaviateUsernamePassword struct {
+ mode *DestinationWeaviateSchemasIndexingAuthMode `const:"username_password" json:"mode"`
+ // Password for the Weaviate cluster
+ Password string `json:"password"`
+ // Username for the Weaviate cluster
+ Username string `json:"username"`
+}
+
+func (d DestinationWeaviateUsernamePassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUsernamePassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUsernamePassword) GetMode() *DestinationWeaviateSchemasIndexingAuthMode {
+ return DestinationWeaviateSchemasIndexingAuthModeUsernamePassword.ToPointer()
+}
+
+func (o *DestinationWeaviateUsernamePassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationWeaviateUsernamePassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type DestinationWeaviateSchemasIndexingMode string
+
+const (
+ DestinationWeaviateSchemasIndexingModeToken DestinationWeaviateSchemasIndexingMode = "token"
+)
+
+func (e DestinationWeaviateSchemasIndexingMode) ToPointer() *DestinationWeaviateSchemasIndexingMode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasIndexingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "token":
+ *e = DestinationWeaviateSchemasIndexingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasIndexingMode: %v", v)
+ }
+}
+
+// DestinationWeaviateAPIToken - Authenticate using an API token (suitable for Weaviate Cloud)
+type DestinationWeaviateAPIToken struct {
+ mode *DestinationWeaviateSchemasIndexingMode `const:"token" json:"mode"`
+ // API Token for the Weaviate instance
+ Token string `json:"token"`
+}
+
+func (d DestinationWeaviateAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateAPIToken) GetMode() *DestinationWeaviateSchemasIndexingMode {
+ return DestinationWeaviateSchemasIndexingModeToken.ToPointer()
+}
+
+func (o *DestinationWeaviateAPIToken) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
+
+type DestinationWeaviateAuthenticationType string
+
+const (
+ DestinationWeaviateAuthenticationTypeDestinationWeaviateAPIToken DestinationWeaviateAuthenticationType = "destination-weaviate_API Token"
+ DestinationWeaviateAuthenticationTypeDestinationWeaviateUsernamePassword DestinationWeaviateAuthenticationType = "destination-weaviate_Username/Password"
+ DestinationWeaviateAuthenticationTypeDestinationWeaviateNoAuthentication DestinationWeaviateAuthenticationType = "destination-weaviate_No Authentication"
+)
+
+type DestinationWeaviateAuthentication struct {
+ DestinationWeaviateAPIToken *DestinationWeaviateAPIToken
+ DestinationWeaviateUsernamePassword *DestinationWeaviateUsernamePassword
+ DestinationWeaviateNoAuthentication *DestinationWeaviateNoAuthentication
+
+ Type DestinationWeaviateAuthenticationType
+}
+
+func CreateDestinationWeaviateAuthenticationDestinationWeaviateAPIToken(destinationWeaviateAPIToken DestinationWeaviateAPIToken) DestinationWeaviateAuthentication {
+ typ := DestinationWeaviateAuthenticationTypeDestinationWeaviateAPIToken
+
+ return DestinationWeaviateAuthentication{
+ DestinationWeaviateAPIToken: &destinationWeaviateAPIToken,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateAuthenticationDestinationWeaviateUsernamePassword(destinationWeaviateUsernamePassword DestinationWeaviateUsernamePassword) DestinationWeaviateAuthentication {
+ typ := DestinationWeaviateAuthenticationTypeDestinationWeaviateUsernamePassword
+
+ return DestinationWeaviateAuthentication{
+ DestinationWeaviateUsernamePassword: &destinationWeaviateUsernamePassword,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateAuthenticationDestinationWeaviateNoAuthentication(destinationWeaviateNoAuthentication DestinationWeaviateNoAuthentication) DestinationWeaviateAuthentication {
+ typ := DestinationWeaviateAuthenticationTypeDestinationWeaviateNoAuthentication
+
+ return DestinationWeaviateAuthentication{
+ DestinationWeaviateNoAuthentication: &destinationWeaviateNoAuthentication,
+ Type: typ,
+ }
+}
+
+func (u *DestinationWeaviateAuthentication) UnmarshalJSON(data []byte) error {
+
+ destinationWeaviateNoAuthentication := new(DestinationWeaviateNoAuthentication)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateNoAuthentication, "", true, true); err == nil {
+ u.DestinationWeaviateNoAuthentication = destinationWeaviateNoAuthentication
+ u.Type = DestinationWeaviateAuthenticationTypeDestinationWeaviateNoAuthentication
+ return nil
+ }
+
+ destinationWeaviateAPIToken := new(DestinationWeaviateAPIToken)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateAPIToken, "", true, true); err == nil {
+ u.DestinationWeaviateAPIToken = destinationWeaviateAPIToken
+ u.Type = DestinationWeaviateAuthenticationTypeDestinationWeaviateAPIToken
+ return nil
+ }
+
+ destinationWeaviateUsernamePassword := new(DestinationWeaviateUsernamePassword)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUsernamePassword, "", true, true); err == nil {
+ u.DestinationWeaviateUsernamePassword = destinationWeaviateUsernamePassword
+ u.Type = DestinationWeaviateAuthenticationTypeDestinationWeaviateUsernamePassword
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationWeaviateAuthentication) MarshalJSON() ([]byte, error) {
+ if u.DestinationWeaviateAPIToken != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateAPIToken, "", true)
+ }
+
+ if u.DestinationWeaviateUsernamePassword != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUsernamePassword, "", true)
+ }
+
+ if u.DestinationWeaviateNoAuthentication != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateNoAuthentication, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// DestinationWeaviateDefaultVectorizer - The vectorizer to use if new classes need to be created
+type DestinationWeaviateDefaultVectorizer string
+
+const (
+ DestinationWeaviateDefaultVectorizerNone DestinationWeaviateDefaultVectorizer = "none"
+ DestinationWeaviateDefaultVectorizerText2vecCohere DestinationWeaviateDefaultVectorizer = "text2vec-cohere"
+ DestinationWeaviateDefaultVectorizerText2vecHuggingface DestinationWeaviateDefaultVectorizer = "text2vec-huggingface"
+ DestinationWeaviateDefaultVectorizerText2vecOpenai DestinationWeaviateDefaultVectorizer = "text2vec-openai"
+ DestinationWeaviateDefaultVectorizerText2vecPalm DestinationWeaviateDefaultVectorizer = "text2vec-palm"
+ DestinationWeaviateDefaultVectorizerText2vecContextionary DestinationWeaviateDefaultVectorizer = "text2vec-contextionary"
+ DestinationWeaviateDefaultVectorizerText2vecTransformers DestinationWeaviateDefaultVectorizer = "text2vec-transformers"
+ DestinationWeaviateDefaultVectorizerText2vecGpt4all DestinationWeaviateDefaultVectorizer = "text2vec-gpt4all"
+)
+
+func (e DestinationWeaviateDefaultVectorizer) ToPointer() *DestinationWeaviateDefaultVectorizer {
+ return &e
+}
+
+func (e *DestinationWeaviateDefaultVectorizer) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "none":
+ fallthrough
+ case "text2vec-cohere":
+ fallthrough
+ case "text2vec-huggingface":
+ fallthrough
+ case "text2vec-openai":
+ fallthrough
+ case "text2vec-palm":
+ fallthrough
+ case "text2vec-contextionary":
+ fallthrough
+ case "text2vec-transformers":
+ fallthrough
+ case "text2vec-gpt4all":
+ *e = DestinationWeaviateDefaultVectorizer(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateDefaultVectorizer: %v", v)
+ }
+}
+
+// DestinationWeaviateIndexing - Indexing configuration
+type DestinationWeaviateIndexing struct {
+ // Additional HTTP headers to send with every request.
+ AdditionalHeaders []DestinationWeaviateHeader `json:"additional_headers,omitempty"`
+ // Authentication method
+ Auth DestinationWeaviateAuthentication `json:"auth"`
+ // The number of records to send to Weaviate in each batch
+ BatchSize *int64 `default:"128" json:"batch_size"`
+ // The vectorizer to use if new classes need to be created
+ DefaultVectorizer *DestinationWeaviateDefaultVectorizer `default:"none" json:"default_vectorizer"`
+ // The public endpoint of the Weaviate cluster.
+ Host string `json:"host"`
+ // The field in the object that contains the embedded text
+ TextField *string `default:"text" json:"text_field"`
+}
+
+func (d DestinationWeaviateIndexing) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateIndexing) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateIndexing) GetAdditionalHeaders() []DestinationWeaviateHeader {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalHeaders
+}
+
+func (o *DestinationWeaviateIndexing) GetAuth() DestinationWeaviateAuthentication {
+ if o == nil {
+ return DestinationWeaviateAuthentication{}
+ }
+ return o.Auth
+}
+
+func (o *DestinationWeaviateIndexing) GetBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchSize
+}
+
+func (o *DestinationWeaviateIndexing) GetDefaultVectorizer() *DestinationWeaviateDefaultVectorizer {
+ if o == nil {
+ return nil
+ }
+ return o.DefaultVectorizer
+}
+
+func (o *DestinationWeaviateIndexing) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationWeaviateIndexing) GetTextField() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TextField
+}
+
+type DestinationWeaviateFieldNameMappingConfigModel struct {
+ // The field name in the source
+ FromField string `json:"from_field"`
+ // The field name to use in the destination
+ ToField string `json:"to_field"`
+}
+
+func (o *DestinationWeaviateFieldNameMappingConfigModel) GetFromField() string {
+ if o == nil {
+ return ""
+ }
+ return o.FromField
+}
+
+func (o *DestinationWeaviateFieldNameMappingConfigModel) GetToField() string {
+ if o == nil {
+ return ""
+ }
+ return o.ToField
+}
+
+// DestinationWeaviateLanguage - Split code in suitable places based on the programming language
+type DestinationWeaviateLanguage string
+
+const (
+ DestinationWeaviateLanguageCpp DestinationWeaviateLanguage = "cpp"
+ DestinationWeaviateLanguageGo DestinationWeaviateLanguage = "go"
+ DestinationWeaviateLanguageJava DestinationWeaviateLanguage = "java"
+ DestinationWeaviateLanguageJs DestinationWeaviateLanguage = "js"
+ DestinationWeaviateLanguagePhp DestinationWeaviateLanguage = "php"
+ DestinationWeaviateLanguageProto DestinationWeaviateLanguage = "proto"
+ DestinationWeaviateLanguagePython DestinationWeaviateLanguage = "python"
+ DestinationWeaviateLanguageRst DestinationWeaviateLanguage = "rst"
+ DestinationWeaviateLanguageRuby DestinationWeaviateLanguage = "ruby"
+ DestinationWeaviateLanguageRust DestinationWeaviateLanguage = "rust"
+ DestinationWeaviateLanguageScala DestinationWeaviateLanguage = "scala"
+ DestinationWeaviateLanguageSwift DestinationWeaviateLanguage = "swift"
+ DestinationWeaviateLanguageMarkdown DestinationWeaviateLanguage = "markdown"
+ DestinationWeaviateLanguageLatex DestinationWeaviateLanguage = "latex"
+ DestinationWeaviateLanguageHTML DestinationWeaviateLanguage = "html"
+ DestinationWeaviateLanguageSol DestinationWeaviateLanguage = "sol"
+)
+
+func (e DestinationWeaviateLanguage) ToPointer() *DestinationWeaviateLanguage {
+ return &e
+}
+
+func (e *DestinationWeaviateLanguage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cpp":
+ fallthrough
+ case "go":
+ fallthrough
+ case "java":
+ fallthrough
+ case "js":
+ fallthrough
+ case "php":
+ fallthrough
+ case "proto":
+ fallthrough
+ case "python":
+ fallthrough
+ case "rst":
+ fallthrough
+ case "ruby":
+ fallthrough
+ case "rust":
+ fallthrough
+ case "scala":
+ fallthrough
+ case "swift":
+ fallthrough
+ case "markdown":
+ fallthrough
+ case "latex":
+ fallthrough
+ case "html":
+ fallthrough
+ case "sol":
+ *e = DestinationWeaviateLanguage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateLanguage: %v", v)
+ }
+}
+
+type DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode string
+
+const (
+ DestinationWeaviateSchemasProcessingTextSplitterTextSplitterModeCode DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode = "code"
+)
+
+func (e DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode) ToPointer() *DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "code":
+ *e = DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationWeaviateByProgrammingLanguage - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
+type DestinationWeaviateByProgrammingLanguage struct {
+ // Split code in suitable places based on the programming language
+ Language DestinationWeaviateLanguage `json:"language"`
+ mode *DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode `const:"code" json:"mode"`
+}
+
+func (d DestinationWeaviateByProgrammingLanguage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateByProgrammingLanguage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateByProgrammingLanguage) GetLanguage() DestinationWeaviateLanguage {
+ if o == nil {
+ return DestinationWeaviateLanguage("")
+ }
+ return o.Language
+}
+
+func (o *DestinationWeaviateByProgrammingLanguage) GetMode() *DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode {
+ return DestinationWeaviateSchemasProcessingTextSplitterTextSplitterModeCode.ToPointer()
+}
+
+type DestinationWeaviateSchemasProcessingTextSplitterMode string
+
+const (
+ DestinationWeaviateSchemasProcessingTextSplitterModeMarkdown DestinationWeaviateSchemasProcessingTextSplitterMode = "markdown"
+)
+
+func (e DestinationWeaviateSchemasProcessingTextSplitterMode) ToPointer() *DestinationWeaviateSchemasProcessingTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasProcessingTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "markdown":
+ *e = DestinationWeaviateSchemasProcessingTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasProcessingTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationWeaviateByMarkdownHeader - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
+type DestinationWeaviateByMarkdownHeader struct {
+ mode *DestinationWeaviateSchemasProcessingTextSplitterMode `const:"markdown" json:"mode"`
+ // Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+ SplitLevel *int64 `default:"1" json:"split_level"`
+}
+
+func (d DestinationWeaviateByMarkdownHeader) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateByMarkdownHeader) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateByMarkdownHeader) GetMode() *DestinationWeaviateSchemasProcessingTextSplitterMode {
+ return DestinationWeaviateSchemasProcessingTextSplitterModeMarkdown.ToPointer()
+}
+
+func (o *DestinationWeaviateByMarkdownHeader) GetSplitLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SplitLevel
+}
+
+type DestinationWeaviateSchemasProcessingMode string
+
+const (
+ DestinationWeaviateSchemasProcessingModeSeparator DestinationWeaviateSchemasProcessingMode = "separator"
+)
+
+func (e DestinationWeaviateSchemasProcessingMode) ToPointer() *DestinationWeaviateSchemasProcessingMode {
+ return &e
+}
+
+func (e *DestinationWeaviateSchemasProcessingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "separator":
+ *e = DestinationWeaviateSchemasProcessingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateSchemasProcessingMode: %v", v)
+ }
+}
+
+// DestinationWeaviateBySeparator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
+type DestinationWeaviateBySeparator struct {
+ // Whether to keep the separator in the resulting chunks
+ KeepSeparator *bool `default:"false" json:"keep_separator"`
+ mode *DestinationWeaviateSchemasProcessingMode `const:"separator" json:"mode"`
+ // List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+ Separators []string `json:"separators,omitempty"`
+}
+
+func (d DestinationWeaviateBySeparator) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateBySeparator) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateBySeparator) GetKeepSeparator() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.KeepSeparator
+}
+
+func (o *DestinationWeaviateBySeparator) GetMode() *DestinationWeaviateSchemasProcessingMode {
+ return DestinationWeaviateSchemasProcessingModeSeparator.ToPointer()
+}
+
+func (o *DestinationWeaviateBySeparator) GetSeparators() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Separators
+}
+
+type DestinationWeaviateTextSplitterType string
+
+const (
+ DestinationWeaviateTextSplitterTypeDestinationWeaviateBySeparator DestinationWeaviateTextSplitterType = "destination-weaviate_By Separator"
+ DestinationWeaviateTextSplitterTypeDestinationWeaviateByMarkdownHeader DestinationWeaviateTextSplitterType = "destination-weaviate_By Markdown header"
+ DestinationWeaviateTextSplitterTypeDestinationWeaviateByProgrammingLanguage DestinationWeaviateTextSplitterType = "destination-weaviate_By Programming Language"
+)
+
+type DestinationWeaviateTextSplitter struct {
+ DestinationWeaviateBySeparator *DestinationWeaviateBySeparator
+ DestinationWeaviateByMarkdownHeader *DestinationWeaviateByMarkdownHeader
+ DestinationWeaviateByProgrammingLanguage *DestinationWeaviateByProgrammingLanguage
+
+ Type DestinationWeaviateTextSplitterType
+}
+
+func CreateDestinationWeaviateTextSplitterDestinationWeaviateBySeparator(destinationWeaviateBySeparator DestinationWeaviateBySeparator) DestinationWeaviateTextSplitter {
+ typ := DestinationWeaviateTextSplitterTypeDestinationWeaviateBySeparator
+
+ return DestinationWeaviateTextSplitter{
+ DestinationWeaviateBySeparator: &destinationWeaviateBySeparator,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateTextSplitterDestinationWeaviateByMarkdownHeader(destinationWeaviateByMarkdownHeader DestinationWeaviateByMarkdownHeader) DestinationWeaviateTextSplitter {
+ typ := DestinationWeaviateTextSplitterTypeDestinationWeaviateByMarkdownHeader
+
+ return DestinationWeaviateTextSplitter{
+ DestinationWeaviateByMarkdownHeader: &destinationWeaviateByMarkdownHeader,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateTextSplitterDestinationWeaviateByProgrammingLanguage(destinationWeaviateByProgrammingLanguage DestinationWeaviateByProgrammingLanguage) DestinationWeaviateTextSplitter {
+ typ := DestinationWeaviateTextSplitterTypeDestinationWeaviateByProgrammingLanguage
+
+ return DestinationWeaviateTextSplitter{
+ DestinationWeaviateByProgrammingLanguage: &destinationWeaviateByProgrammingLanguage,
+ Type: typ,
+ }
+}
+
+func (u *DestinationWeaviateTextSplitter) UnmarshalJSON(data []byte) error {
+
+ destinationWeaviateByMarkdownHeader := new(DestinationWeaviateByMarkdownHeader)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateByMarkdownHeader, "", true, true); err == nil {
+ u.DestinationWeaviateByMarkdownHeader = destinationWeaviateByMarkdownHeader
+ u.Type = DestinationWeaviateTextSplitterTypeDestinationWeaviateByMarkdownHeader
+ return nil
+ }
+
+ destinationWeaviateByProgrammingLanguage := new(DestinationWeaviateByProgrammingLanguage)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateByProgrammingLanguage, "", true, true); err == nil {
+ u.DestinationWeaviateByProgrammingLanguage = destinationWeaviateByProgrammingLanguage
+ u.Type = DestinationWeaviateTextSplitterTypeDestinationWeaviateByProgrammingLanguage
+ return nil
+ }
+
+ destinationWeaviateBySeparator := new(DestinationWeaviateBySeparator)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateBySeparator, "", true, true); err == nil {
+ u.DestinationWeaviateBySeparator = destinationWeaviateBySeparator
+ u.Type = DestinationWeaviateTextSplitterTypeDestinationWeaviateBySeparator
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationWeaviateTextSplitter) MarshalJSON() ([]byte, error) {
+ if u.DestinationWeaviateBySeparator != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateBySeparator, "", true)
+ }
+
+ if u.DestinationWeaviateByMarkdownHeader != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateByMarkdownHeader, "", true)
+ }
+
+ if u.DestinationWeaviateByProgrammingLanguage != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateByProgrammingLanguage, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationWeaviateProcessingConfigModel struct {
+ // Size of overlap between chunks in tokens to store in vector store to better capture relevant context
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
+ // Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
+ ChunkSize int64 `json:"chunk_size"`
+ // List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
+ FieldNameMappings []DestinationWeaviateFieldNameMappingConfigModel `json:"field_name_mappings,omitempty"`
+ // List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
+ MetadataFields []string `json:"metadata_fields,omitempty"`
+ // List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+ TextFields []string `json:"text_fields,omitempty"`
+ // Split text fields into chunks based on the specified method.
+ TextSplitter *DestinationWeaviateTextSplitter `json:"text_splitter,omitempty"`
+}
+
+func (d DestinationWeaviateProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *DestinationWeaviateProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *DestinationWeaviateProcessingConfigModel) GetFieldNameMappings() []DestinationWeaviateFieldNameMappingConfigModel {
+ if o == nil {
+ return nil
+ }
+ return o.FieldNameMappings
+}
+
+func (o *DestinationWeaviateProcessingConfigModel) GetMetadataFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *DestinationWeaviateProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TextFields
+}
+
+func (o *DestinationWeaviateProcessingConfigModel) GetTextSplitter() *DestinationWeaviateTextSplitter {
+ if o == nil {
+ return nil
+ }
+ return o.TextSplitter
+}
+
+type DestinationWeaviate struct {
+ destinationType Weaviate `const:"weaviate" json:"destinationType"`
+ // Embedding configuration
+ Embedding DestinationWeaviateEmbedding `json:"embedding"`
+ // Indexing configuration
+ Indexing DestinationWeaviateIndexing `json:"indexing"`
+ Processing DestinationWeaviateProcessingConfigModel `json:"processing"`
+}
+
+func (d DestinationWeaviate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviate) GetDestinationType() Weaviate {
+ return WeaviateWeaviate
+}
+
+func (o *DestinationWeaviate) GetEmbedding() DestinationWeaviateEmbedding {
+ if o == nil {
+ return DestinationWeaviateEmbedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationWeaviate) GetIndexing() DestinationWeaviateIndexing {
+ if o == nil {
+ return DestinationWeaviateIndexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationWeaviate) GetProcessing() DestinationWeaviateProcessingConfigModel {
+ if o == nil {
+ return DestinationWeaviateProcessingConfigModel{}
+ }
+ return o.Processing
+}
diff --git a/internal/sdk/pkg/models/shared/destinationweaviatecreaterequest.go b/internal/sdk/pkg/models/shared/destinationweaviatecreaterequest.go
new file mode 100644
index 000000000..e62cc1741
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationweaviatecreaterequest.go
@@ -0,0 +1,40 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type DestinationWeaviateCreateRequest struct {
+ Configuration DestinationWeaviate `json:"configuration"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationWeaviateCreateRequest) GetConfiguration() DestinationWeaviate {
+ if o == nil {
+ return DestinationWeaviate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationWeaviateCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationWeaviateCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationWeaviateCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationweaviateputrequest.go b/internal/sdk/pkg/models/shared/destinationweaviateputrequest.go
new file mode 100644
index 000000000..0c816dbfe
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationweaviateputrequest.go
@@ -0,0 +1,30 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type DestinationWeaviatePutRequest struct {
+ Configuration DestinationWeaviateUpdate `json:"configuration"`
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationWeaviatePutRequest) GetConfiguration() DestinationWeaviateUpdate {
+ if o == nil {
+ return DestinationWeaviateUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationWeaviatePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationWeaviatePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationweaviateupdate.go b/internal/sdk/pkg/models/shared/destinationweaviateupdate.go
new file mode 100644
index 000000000..e5c8fc2ac
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/destinationweaviateupdate.go
@@ -0,0 +1,1406 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type DestinationWeaviateUpdateSchemasEmbeddingEmbedding7Mode string
+
+const (
+ DestinationWeaviateUpdateSchemasEmbeddingEmbedding7ModeOpenaiCompatible DestinationWeaviateUpdateSchemasEmbeddingEmbedding7Mode = "openai_compatible"
+)
+
+func (e DestinationWeaviateUpdateSchemasEmbeddingEmbedding7Mode) ToPointer() *DestinationWeaviateUpdateSchemasEmbeddingEmbedding7Mode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasEmbeddingEmbedding7Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai_compatible":
+ *e = DestinationWeaviateUpdateSchemasEmbeddingEmbedding7Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasEmbeddingEmbedding7Mode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateOpenAICompatible - Use a service that's compatible with the OpenAI API to embed text.
+type DestinationWeaviateUpdateOpenAICompatible struct {
+ APIKey *string `default:"" json:"api_key"`
+ // The base URL for your OpenAI-compatible service
+ BaseURL string `json:"base_url"`
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ mode *DestinationWeaviateUpdateSchemasEmbeddingEmbedding7Mode `const:"openai_compatible" json:"mode"`
+ // The name of the model to use for embedding
+ ModelName *string `default:"text-embedding-ada-002" json:"model_name"`
+}
+
+func (d DestinationWeaviateUpdateOpenAICompatible) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateOpenAICompatible) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateOpenAICompatible) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *DestinationWeaviateUpdateOpenAICompatible) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *DestinationWeaviateUpdateOpenAICompatible) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationWeaviateUpdateOpenAICompatible) GetMode() *DestinationWeaviateUpdateSchemasEmbeddingEmbedding7Mode {
+ return DestinationWeaviateUpdateSchemasEmbeddingEmbedding7ModeOpenaiCompatible.ToPointer()
+}
+
+func (o *DestinationWeaviateUpdateOpenAICompatible) GetModelName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ModelName
+}
+
+type DestinationWeaviateUpdateSchemasEmbeddingEmbedding6Mode string
+
+const (
+ DestinationWeaviateUpdateSchemasEmbeddingEmbedding6ModeFake DestinationWeaviateUpdateSchemasEmbeddingEmbedding6Mode = "fake"
+)
+
+func (e DestinationWeaviateUpdateSchemasEmbeddingEmbedding6Mode) ToPointer() *DestinationWeaviateUpdateSchemasEmbeddingEmbedding6Mode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasEmbeddingEmbedding6Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "fake":
+ *e = DestinationWeaviateUpdateSchemasEmbeddingEmbedding6Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasEmbeddingEmbedding6Mode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
+type DestinationWeaviateUpdateFake struct {
+ mode *DestinationWeaviateUpdateSchemasEmbeddingEmbedding6Mode `const:"fake" json:"mode"`
+}
+
+func (d DestinationWeaviateUpdateFake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateFake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateFake) GetMode() *DestinationWeaviateUpdateSchemasEmbeddingEmbedding6Mode {
+ return DestinationWeaviateUpdateSchemasEmbeddingEmbedding6ModeFake.ToPointer()
+}
+
+type DestinationWeaviateUpdateSchemasEmbeddingEmbedding5Mode string
+
+const (
+ DestinationWeaviateUpdateSchemasEmbeddingEmbedding5ModeFromField DestinationWeaviateUpdateSchemasEmbeddingEmbedding5Mode = "from_field"
+)
+
+func (e DestinationWeaviateUpdateSchemasEmbeddingEmbedding5Mode) ToPointer() *DestinationWeaviateUpdateSchemasEmbeddingEmbedding5Mode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasEmbeddingEmbedding5Mode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "from_field":
+ *e = DestinationWeaviateUpdateSchemasEmbeddingEmbedding5Mode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasEmbeddingEmbedding5Mode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.
+type DestinationWeaviateUpdateFromField struct {
+ // The number of dimensions the embedding model is generating
+ Dimensions int64 `json:"dimensions"`
+ // Name of the field in the record that contains the embedding
+ FieldName string `json:"field_name"`
+ mode *DestinationWeaviateUpdateSchemasEmbeddingEmbedding5Mode `const:"from_field" json:"mode"`
+}
+
+func (d DestinationWeaviateUpdateFromField) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateFromField) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateFromField) GetDimensions() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Dimensions
+}
+
+func (o *DestinationWeaviateUpdateFromField) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *DestinationWeaviateUpdateFromField) GetMode() *DestinationWeaviateUpdateSchemasEmbeddingEmbedding5Mode {
+ return DestinationWeaviateUpdateSchemasEmbeddingEmbedding5ModeFromField.ToPointer()
+}
+
+type DestinationWeaviateUpdateSchemasEmbeddingEmbeddingMode string
+
+const (
+ DestinationWeaviateUpdateSchemasEmbeddingEmbeddingModeCohere DestinationWeaviateUpdateSchemasEmbeddingEmbeddingMode = "cohere"
+)
+
+func (e DestinationWeaviateUpdateSchemasEmbeddingEmbeddingMode) ToPointer() *DestinationWeaviateUpdateSchemasEmbeddingEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasEmbeddingEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cohere":
+ *e = DestinationWeaviateUpdateSchemasEmbeddingEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasEmbeddingEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateCohere - Use the Cohere API to embed text.
+type DestinationWeaviateUpdateCohere struct {
+ CohereKey string `json:"cohere_key"`
+ mode *DestinationWeaviateUpdateSchemasEmbeddingEmbeddingMode `const:"cohere" json:"mode"`
+}
+
+func (d DestinationWeaviateUpdateCohere) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateCohere) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateCohere) GetCohereKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.CohereKey
+}
+
+func (o *DestinationWeaviateUpdateCohere) GetMode() *DestinationWeaviateUpdateSchemasEmbeddingEmbeddingMode {
+ return DestinationWeaviateUpdateSchemasEmbeddingEmbeddingModeCohere.ToPointer()
+}
+
+type DestinationWeaviateUpdateSchemasEmbeddingMode string
+
+const (
+ DestinationWeaviateUpdateSchemasEmbeddingModeOpenai DestinationWeaviateUpdateSchemasEmbeddingMode = "openai"
+)
+
+func (e DestinationWeaviateUpdateSchemasEmbeddingMode) ToPointer() *DestinationWeaviateUpdateSchemasEmbeddingMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasEmbeddingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "openai":
+ *e = DestinationWeaviateUpdateSchemasEmbeddingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasEmbeddingMode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationWeaviateUpdateOpenAI struct {
+ mode *DestinationWeaviateUpdateSchemasEmbeddingMode `const:"openai" json:"mode"`
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationWeaviateUpdateOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateOpenAI) GetMode() *DestinationWeaviateUpdateSchemasEmbeddingMode {
+ return DestinationWeaviateUpdateSchemasEmbeddingModeOpenai.ToPointer()
+}
+
+func (o *DestinationWeaviateUpdateOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationWeaviateUpdateSchemasMode string
+
+const (
+ DestinationWeaviateUpdateSchemasModeAzureOpenai DestinationWeaviateUpdateSchemasMode = "azure_openai"
+)
+
+func (e DestinationWeaviateUpdateSchemasMode) ToPointer() *DestinationWeaviateUpdateSchemasMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "azure_openai":
+ *e = DestinationWeaviateUpdateSchemasMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasMode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateAzureOpenAI - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
+type DestinationWeaviateUpdateAzureOpenAI struct {
+ // The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ APIBase string `json:"api_base"`
+ // The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ Deployment string `json:"deployment"`
+ mode *DestinationWeaviateUpdateSchemasMode `const:"azure_openai" json:"mode"`
+ // The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
+ OpenaiKey string `json:"openai_key"`
+}
+
+func (d DestinationWeaviateUpdateAzureOpenAI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateAzureOpenAI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateAzureOpenAI) GetAPIBase() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIBase
+}
+
+func (o *DestinationWeaviateUpdateAzureOpenAI) GetDeployment() string {
+ if o == nil {
+ return ""
+ }
+ return o.Deployment
+}
+
+func (o *DestinationWeaviateUpdateAzureOpenAI) GetMode() *DestinationWeaviateUpdateSchemasMode {
+ return DestinationWeaviateUpdateSchemasModeAzureOpenai.ToPointer()
+}
+
+func (o *DestinationWeaviateUpdateAzureOpenAI) GetOpenaiKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.OpenaiKey
+}
+
+type DestinationWeaviateUpdateMode string
+
+const (
+ DestinationWeaviateUpdateModeNoEmbedding DestinationWeaviateUpdateMode = "no_embedding"
+)
+
+func (e DestinationWeaviateUpdateMode) ToPointer() *DestinationWeaviateUpdateMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "no_embedding":
+ *e = DestinationWeaviateUpdateMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateMode: %v", v)
+ }
+}
+
+// NoExternalEmbedding - Do not calculate and pass embeddings to Weaviate. Suitable for clusters with configured vectorizers to calculate embeddings within Weaviate or for classes that should only support regular text search.
+type NoExternalEmbedding struct {
+ mode *DestinationWeaviateUpdateMode `const:"no_embedding" json:"mode"`
+}
+
+func (n NoExternalEmbedding) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
+}
+
+func (n *NoExternalEmbedding) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *NoExternalEmbedding) GetMode() *DestinationWeaviateUpdateMode {
+ return DestinationWeaviateUpdateModeNoEmbedding.ToPointer()
+}
+
+type DestinationWeaviateUpdateEmbeddingType string
+
+const (
+ DestinationWeaviateUpdateEmbeddingTypeNoExternalEmbedding DestinationWeaviateUpdateEmbeddingType = "No external embedding"
+ DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateAzureOpenAI DestinationWeaviateUpdateEmbeddingType = "destination-weaviate-update_Azure OpenAI"
+ DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateOpenAI DestinationWeaviateUpdateEmbeddingType = "destination-weaviate-update_OpenAI"
+ DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateCohere DestinationWeaviateUpdateEmbeddingType = "destination-weaviate-update_Cohere"
+ DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateFromField DestinationWeaviateUpdateEmbeddingType = "destination-weaviate-update_From Field"
+ DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateFake DestinationWeaviateUpdateEmbeddingType = "destination-weaviate-update_Fake"
+ DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateOpenAICompatible DestinationWeaviateUpdateEmbeddingType = "destination-weaviate-update_OpenAI-compatible"
+)
+
+type DestinationWeaviateUpdateEmbedding struct {
+ NoExternalEmbedding *NoExternalEmbedding
+ DestinationWeaviateUpdateAzureOpenAI *DestinationWeaviateUpdateAzureOpenAI
+ DestinationWeaviateUpdateOpenAI *DestinationWeaviateUpdateOpenAI
+ DestinationWeaviateUpdateCohere *DestinationWeaviateUpdateCohere
+ DestinationWeaviateUpdateFromField *DestinationWeaviateUpdateFromField
+ DestinationWeaviateUpdateFake *DestinationWeaviateUpdateFake
+ DestinationWeaviateUpdateOpenAICompatible *DestinationWeaviateUpdateOpenAICompatible
+
+ Type DestinationWeaviateUpdateEmbeddingType
+}
+
+func CreateDestinationWeaviateUpdateEmbeddingNoExternalEmbedding(noExternalEmbedding NoExternalEmbedding) DestinationWeaviateUpdateEmbedding {
+ typ := DestinationWeaviateUpdateEmbeddingTypeNoExternalEmbedding
+
+ return DestinationWeaviateUpdateEmbedding{
+ NoExternalEmbedding: &noExternalEmbedding,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateEmbeddingDestinationWeaviateUpdateAzureOpenAI(destinationWeaviateUpdateAzureOpenAI DestinationWeaviateUpdateAzureOpenAI) DestinationWeaviateUpdateEmbedding {
+ typ := DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateAzureOpenAI
+
+ return DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateAzureOpenAI: &destinationWeaviateUpdateAzureOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateEmbeddingDestinationWeaviateUpdateOpenAI(destinationWeaviateUpdateOpenAI DestinationWeaviateUpdateOpenAI) DestinationWeaviateUpdateEmbedding {
+ typ := DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateOpenAI
+
+ return DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateOpenAI: &destinationWeaviateUpdateOpenAI,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateEmbeddingDestinationWeaviateUpdateCohere(destinationWeaviateUpdateCohere DestinationWeaviateUpdateCohere) DestinationWeaviateUpdateEmbedding {
+ typ := DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateCohere
+
+ return DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateCohere: &destinationWeaviateUpdateCohere,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateEmbeddingDestinationWeaviateUpdateFromField(destinationWeaviateUpdateFromField DestinationWeaviateUpdateFromField) DestinationWeaviateUpdateEmbedding {
+ typ := DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateFromField
+
+ return DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateFromField: &destinationWeaviateUpdateFromField,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateEmbeddingDestinationWeaviateUpdateFake(destinationWeaviateUpdateFake DestinationWeaviateUpdateFake) DestinationWeaviateUpdateEmbedding {
+ typ := DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateFake
+
+ return DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateFake: &destinationWeaviateUpdateFake,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateEmbeddingDestinationWeaviateUpdateOpenAICompatible(destinationWeaviateUpdateOpenAICompatible DestinationWeaviateUpdateOpenAICompatible) DestinationWeaviateUpdateEmbedding {
+ typ := DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateOpenAICompatible
+
+ return DestinationWeaviateUpdateEmbedding{
+ DestinationWeaviateUpdateOpenAICompatible: &destinationWeaviateUpdateOpenAICompatible,
+ Type: typ,
+ }
+}
+
+func (u *DestinationWeaviateUpdateEmbedding) UnmarshalJSON(data []byte) error {
+
+ noExternalEmbedding := new(NoExternalEmbedding)
+ if err := utils.UnmarshalJSON(data, &noExternalEmbedding, "", true, true); err == nil {
+ u.NoExternalEmbedding = noExternalEmbedding
+ u.Type = DestinationWeaviateUpdateEmbeddingTypeNoExternalEmbedding
+ return nil
+ }
+
+ destinationWeaviateUpdateFake := new(DestinationWeaviateUpdateFake)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateFake, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateFake = destinationWeaviateUpdateFake
+ u.Type = DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateFake
+ return nil
+ }
+
+ destinationWeaviateUpdateOpenAI := new(DestinationWeaviateUpdateOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateOpenAI, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateOpenAI = destinationWeaviateUpdateOpenAI
+ u.Type = DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateOpenAI
+ return nil
+ }
+
+ destinationWeaviateUpdateCohere := new(DestinationWeaviateUpdateCohere)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateCohere, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateCohere = destinationWeaviateUpdateCohere
+ u.Type = DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateCohere
+ return nil
+ }
+
+ destinationWeaviateUpdateFromField := new(DestinationWeaviateUpdateFromField)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateFromField, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateFromField = destinationWeaviateUpdateFromField
+ u.Type = DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateFromField
+ return nil
+ }
+
+ destinationWeaviateUpdateAzureOpenAI := new(DestinationWeaviateUpdateAzureOpenAI)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateAzureOpenAI, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateAzureOpenAI = destinationWeaviateUpdateAzureOpenAI
+ u.Type = DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateAzureOpenAI
+ return nil
+ }
+
+ destinationWeaviateUpdateOpenAICompatible := new(DestinationWeaviateUpdateOpenAICompatible)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateOpenAICompatible, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateOpenAICompatible = destinationWeaviateUpdateOpenAICompatible
+ u.Type = DestinationWeaviateUpdateEmbeddingTypeDestinationWeaviateUpdateOpenAICompatible
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationWeaviateUpdateEmbedding) MarshalJSON() ([]byte, error) {
+ if u.NoExternalEmbedding != nil {
+ return utils.MarshalJSON(u.NoExternalEmbedding, "", true)
+ }
+
+ if u.DestinationWeaviateUpdateAzureOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateAzureOpenAI, "", true)
+ }
+
+ if u.DestinationWeaviateUpdateOpenAI != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateOpenAI, "", true)
+ }
+
+ if u.DestinationWeaviateUpdateCohere != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateCohere, "", true)
+ }
+
+ if u.DestinationWeaviateUpdateFromField != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateFromField, "", true)
+ }
+
+ if u.DestinationWeaviateUpdateFake != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateFake, "", true)
+ }
+
+ if u.DestinationWeaviateUpdateOpenAICompatible != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateOpenAICompatible, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type Header struct {
+ HeaderKey string `json:"header_key"`
+ Value string `json:"value"`
+}
+
+func (o *Header) GetHeaderKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.HeaderKey
+}
+
+func (o *Header) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationMode string
+
+const (
+ DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationModeNoAuth DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationMode = "no_auth"
+)
+
+func (e DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationMode) ToPointer() *DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "no_auth":
+ *e = DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationMode: %v", v)
+ }
+}
+
+// NoAuthentication - Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)
+type NoAuthentication struct {
+ mode *DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationMode `const:"no_auth" json:"mode"`
+}
+
+func (n NoAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
+}
+
+func (n *NoAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *NoAuthentication) GetMode() *DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationMode {
+ return DestinationWeaviateUpdateSchemasIndexingAuthAuthenticationModeNoAuth.ToPointer()
+}
+
+type DestinationWeaviateUpdateSchemasIndexingAuthMode string
+
+const (
+ DestinationWeaviateUpdateSchemasIndexingAuthModeUsernamePassword DestinationWeaviateUpdateSchemasIndexingAuthMode = "username_password"
+)
+
+func (e DestinationWeaviateUpdateSchemasIndexingAuthMode) ToPointer() *DestinationWeaviateUpdateSchemasIndexingAuthMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasIndexingAuthMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "username_password":
+ *e = DestinationWeaviateUpdateSchemasIndexingAuthMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasIndexingAuthMode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateUsernamePassword - Authenticate using username and password (suitable for self-managed Weaviate clusters)
+type DestinationWeaviateUpdateUsernamePassword struct {
+ mode *DestinationWeaviateUpdateSchemasIndexingAuthMode `const:"username_password" json:"mode"`
+ // Password for the Weaviate cluster
+ Password string `json:"password"`
+ // Username for the Weaviate cluster
+ Username string `json:"username"`
+}
+
+func (d DestinationWeaviateUpdateUsernamePassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateUsernamePassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateUsernamePassword) GetMode() *DestinationWeaviateUpdateSchemasIndexingAuthMode {
+ return DestinationWeaviateUpdateSchemasIndexingAuthModeUsernamePassword.ToPointer()
+}
+
+func (o *DestinationWeaviateUpdateUsernamePassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *DestinationWeaviateUpdateUsernamePassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type DestinationWeaviateUpdateSchemasIndexingMode string
+
+const (
+ DestinationWeaviateUpdateSchemasIndexingModeToken DestinationWeaviateUpdateSchemasIndexingMode = "token"
+)
+
+func (e DestinationWeaviateUpdateSchemasIndexingMode) ToPointer() *DestinationWeaviateUpdateSchemasIndexingMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasIndexingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "token":
+ *e = DestinationWeaviateUpdateSchemasIndexingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasIndexingMode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateAPIToken - Authenticate using an API token (suitable for Weaviate Cloud)
+type DestinationWeaviateUpdateAPIToken struct {
+ mode *DestinationWeaviateUpdateSchemasIndexingMode `const:"token" json:"mode"`
+ // API Token for the Weaviate instance
+ Token string `json:"token"`
+}
+
+func (d DestinationWeaviateUpdateAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateAPIToken) GetMode() *DestinationWeaviateUpdateSchemasIndexingMode {
+ return DestinationWeaviateUpdateSchemasIndexingModeToken.ToPointer()
+}
+
+func (o *DestinationWeaviateUpdateAPIToken) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
+
+type DestinationWeaviateUpdateAuthenticationType string
+
+const (
+ DestinationWeaviateUpdateAuthenticationTypeDestinationWeaviateUpdateAPIToken DestinationWeaviateUpdateAuthenticationType = "destination-weaviate-update_API Token"
+ DestinationWeaviateUpdateAuthenticationTypeDestinationWeaviateUpdateUsernamePassword DestinationWeaviateUpdateAuthenticationType = "destination-weaviate-update_Username/Password"
+ DestinationWeaviateUpdateAuthenticationTypeNoAuthentication DestinationWeaviateUpdateAuthenticationType = "No Authentication"
+)
+
+type DestinationWeaviateUpdateAuthentication struct {
+ DestinationWeaviateUpdateAPIToken *DestinationWeaviateUpdateAPIToken
+ DestinationWeaviateUpdateUsernamePassword *DestinationWeaviateUpdateUsernamePassword
+ NoAuthentication *NoAuthentication
+
+ Type DestinationWeaviateUpdateAuthenticationType
+}
+
+func CreateDestinationWeaviateUpdateAuthenticationDestinationWeaviateUpdateAPIToken(destinationWeaviateUpdateAPIToken DestinationWeaviateUpdateAPIToken) DestinationWeaviateUpdateAuthentication {
+ typ := DestinationWeaviateUpdateAuthenticationTypeDestinationWeaviateUpdateAPIToken
+
+ return DestinationWeaviateUpdateAuthentication{
+ DestinationWeaviateUpdateAPIToken: &destinationWeaviateUpdateAPIToken,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateAuthenticationDestinationWeaviateUpdateUsernamePassword(destinationWeaviateUpdateUsernamePassword DestinationWeaviateUpdateUsernamePassword) DestinationWeaviateUpdateAuthentication {
+ typ := DestinationWeaviateUpdateAuthenticationTypeDestinationWeaviateUpdateUsernamePassword
+
+ return DestinationWeaviateUpdateAuthentication{
+ DestinationWeaviateUpdateUsernamePassword: &destinationWeaviateUpdateUsernamePassword,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateAuthenticationNoAuthentication(noAuthentication NoAuthentication) DestinationWeaviateUpdateAuthentication {
+ typ := DestinationWeaviateUpdateAuthenticationTypeNoAuthentication
+
+ return DestinationWeaviateUpdateAuthentication{
+ NoAuthentication: &noAuthentication,
+ Type: typ,
+ }
+}
+
+func (u *DestinationWeaviateUpdateAuthentication) UnmarshalJSON(data []byte) error {
+
+ noAuthentication := new(NoAuthentication)
+ if err := utils.UnmarshalJSON(data, &noAuthentication, "", true, true); err == nil {
+ u.NoAuthentication = noAuthentication
+ u.Type = DestinationWeaviateUpdateAuthenticationTypeNoAuthentication
+ return nil
+ }
+
+ destinationWeaviateUpdateAPIToken := new(DestinationWeaviateUpdateAPIToken)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateAPIToken, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateAPIToken = destinationWeaviateUpdateAPIToken
+ u.Type = DestinationWeaviateUpdateAuthenticationTypeDestinationWeaviateUpdateAPIToken
+ return nil
+ }
+
+ destinationWeaviateUpdateUsernamePassword := new(DestinationWeaviateUpdateUsernamePassword)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateUsernamePassword, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateUsernamePassword = destinationWeaviateUpdateUsernamePassword
+ u.Type = DestinationWeaviateUpdateAuthenticationTypeDestinationWeaviateUpdateUsernamePassword
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationWeaviateUpdateAuthentication) MarshalJSON() ([]byte, error) {
+ if u.DestinationWeaviateUpdateAPIToken != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateAPIToken, "", true)
+ }
+
+ if u.DestinationWeaviateUpdateUsernamePassword != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateUsernamePassword, "", true)
+ }
+
+ if u.NoAuthentication != nil {
+ return utils.MarshalJSON(u.NoAuthentication, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// DefaultVectorizer - The vectorizer to use if new classes need to be created
+type DefaultVectorizer string
+
+const (
+ DefaultVectorizerNone DefaultVectorizer = "none"
+ DefaultVectorizerText2vecCohere DefaultVectorizer = "text2vec-cohere"
+ DefaultVectorizerText2vecHuggingface DefaultVectorizer = "text2vec-huggingface"
+ DefaultVectorizerText2vecOpenai DefaultVectorizer = "text2vec-openai"
+ DefaultVectorizerText2vecPalm DefaultVectorizer = "text2vec-palm"
+ DefaultVectorizerText2vecContextionary DefaultVectorizer = "text2vec-contextionary"
+ DefaultVectorizerText2vecTransformers DefaultVectorizer = "text2vec-transformers"
+ DefaultVectorizerText2vecGpt4all DefaultVectorizer = "text2vec-gpt4all"
+)
+
+func (e DefaultVectorizer) ToPointer() *DefaultVectorizer {
+ return &e
+}
+
+func (e *DefaultVectorizer) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "none":
+ fallthrough
+ case "text2vec-cohere":
+ fallthrough
+ case "text2vec-huggingface":
+ fallthrough
+ case "text2vec-openai":
+ fallthrough
+ case "text2vec-palm":
+ fallthrough
+ case "text2vec-contextionary":
+ fallthrough
+ case "text2vec-transformers":
+ fallthrough
+ case "text2vec-gpt4all":
+ *e = DefaultVectorizer(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DefaultVectorizer: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateIndexing - Indexing configuration
+type DestinationWeaviateUpdateIndexing struct {
+ // Additional HTTP headers to send with every request.
+ AdditionalHeaders []Header `json:"additional_headers,omitempty"`
+ // Authentication method
+ Auth DestinationWeaviateUpdateAuthentication `json:"auth"`
+ // The number of records to send to Weaviate in each batch
+ BatchSize *int64 `default:"128" json:"batch_size"`
+ // The vectorizer to use if new classes need to be created
+ DefaultVectorizer *DefaultVectorizer `default:"none" json:"default_vectorizer"`
+ // The public endpoint of the Weaviate cluster.
+ Host string `json:"host"`
+ // The field in the object that contains the embedded text
+ TextField *string `default:"text" json:"text_field"`
+}
+
+func (d DestinationWeaviateUpdateIndexing) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateIndexing) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateIndexing) GetAdditionalHeaders() []Header {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalHeaders
+}
+
+func (o *DestinationWeaviateUpdateIndexing) GetAuth() DestinationWeaviateUpdateAuthentication {
+ if o == nil {
+ return DestinationWeaviateUpdateAuthentication{}
+ }
+ return o.Auth
+}
+
+func (o *DestinationWeaviateUpdateIndexing) GetBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchSize
+}
+
+func (o *DestinationWeaviateUpdateIndexing) GetDefaultVectorizer() *DefaultVectorizer {
+ if o == nil {
+ return nil
+ }
+ return o.DefaultVectorizer
+}
+
+func (o *DestinationWeaviateUpdateIndexing) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *DestinationWeaviateUpdateIndexing) GetTextField() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TextField
+}
+
+type DestinationWeaviateUpdateFieldNameMappingConfigModel struct {
+ // The field name in the source
+ FromField string `json:"from_field"`
+ // The field name to use in the destination
+ ToField string `json:"to_field"`
+}
+
+func (o *DestinationWeaviateUpdateFieldNameMappingConfigModel) GetFromField() string {
+ if o == nil {
+ return ""
+ }
+ return o.FromField
+}
+
+func (o *DestinationWeaviateUpdateFieldNameMappingConfigModel) GetToField() string {
+ if o == nil {
+ return ""
+ }
+ return o.ToField
+}
+
+// DestinationWeaviateUpdateLanguage - Split code in suitable places based on the programming language
+type DestinationWeaviateUpdateLanguage string
+
+const (
+ DestinationWeaviateUpdateLanguageCpp DestinationWeaviateUpdateLanguage = "cpp"
+ DestinationWeaviateUpdateLanguageGo DestinationWeaviateUpdateLanguage = "go"
+ DestinationWeaviateUpdateLanguageJava DestinationWeaviateUpdateLanguage = "java"
+ DestinationWeaviateUpdateLanguageJs DestinationWeaviateUpdateLanguage = "js"
+ DestinationWeaviateUpdateLanguagePhp DestinationWeaviateUpdateLanguage = "php"
+ DestinationWeaviateUpdateLanguageProto DestinationWeaviateUpdateLanguage = "proto"
+ DestinationWeaviateUpdateLanguagePython DestinationWeaviateUpdateLanguage = "python"
+ DestinationWeaviateUpdateLanguageRst DestinationWeaviateUpdateLanguage = "rst"
+ DestinationWeaviateUpdateLanguageRuby DestinationWeaviateUpdateLanguage = "ruby"
+ DestinationWeaviateUpdateLanguageRust DestinationWeaviateUpdateLanguage = "rust"
+ DestinationWeaviateUpdateLanguageScala DestinationWeaviateUpdateLanguage = "scala"
+ DestinationWeaviateUpdateLanguageSwift DestinationWeaviateUpdateLanguage = "swift"
+ DestinationWeaviateUpdateLanguageMarkdown DestinationWeaviateUpdateLanguage = "markdown"
+ DestinationWeaviateUpdateLanguageLatex DestinationWeaviateUpdateLanguage = "latex"
+ DestinationWeaviateUpdateLanguageHTML DestinationWeaviateUpdateLanguage = "html"
+ DestinationWeaviateUpdateLanguageSol DestinationWeaviateUpdateLanguage = "sol"
+)
+
+func (e DestinationWeaviateUpdateLanguage) ToPointer() *DestinationWeaviateUpdateLanguage {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateLanguage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cpp":
+ fallthrough
+ case "go":
+ fallthrough
+ case "java":
+ fallthrough
+ case "js":
+ fallthrough
+ case "php":
+ fallthrough
+ case "proto":
+ fallthrough
+ case "python":
+ fallthrough
+ case "rst":
+ fallthrough
+ case "ruby":
+ fallthrough
+ case "rust":
+ fallthrough
+ case "scala":
+ fallthrough
+ case "swift":
+ fallthrough
+ case "markdown":
+ fallthrough
+ case "latex":
+ fallthrough
+ case "html":
+ fallthrough
+ case "sol":
+ *e = DestinationWeaviateUpdateLanguage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateLanguage: %v", v)
+ }
+}
+
+type DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterMode string
+
+const (
+ DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterModeCode DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterMode = "code"
+)
+
+func (e DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterMode) ToPointer() *DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "code":
+ *e = DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateByProgrammingLanguage - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
+type DestinationWeaviateUpdateByProgrammingLanguage struct {
+ // Split code in suitable places based on the programming language
+ Language DestinationWeaviateUpdateLanguage `json:"language"`
+ mode *DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterMode `const:"code" json:"mode"`
+}
+
+func (d DestinationWeaviateUpdateByProgrammingLanguage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateByProgrammingLanguage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateByProgrammingLanguage) GetLanguage() DestinationWeaviateUpdateLanguage {
+ if o == nil {
+ return DestinationWeaviateUpdateLanguage("")
+ }
+ return o.Language
+}
+
+func (o *DestinationWeaviateUpdateByProgrammingLanguage) GetMode() *DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterMode {
+ return DestinationWeaviateUpdateSchemasProcessingTextSplitterTextSplitterModeCode.ToPointer()
+}
+
+type DestinationWeaviateUpdateSchemasProcessingTextSplitterMode string
+
+const (
+ DestinationWeaviateUpdateSchemasProcessingTextSplitterModeMarkdown DestinationWeaviateUpdateSchemasProcessingTextSplitterMode = "markdown"
+)
+
+func (e DestinationWeaviateUpdateSchemasProcessingTextSplitterMode) ToPointer() *DestinationWeaviateUpdateSchemasProcessingTextSplitterMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasProcessingTextSplitterMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "markdown":
+ *e = DestinationWeaviateUpdateSchemasProcessingTextSplitterMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasProcessingTextSplitterMode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateByMarkdownHeader - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
+type DestinationWeaviateUpdateByMarkdownHeader struct {
+ mode *DestinationWeaviateUpdateSchemasProcessingTextSplitterMode `const:"markdown" json:"mode"`
+ // Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points
+ SplitLevel *int64 `default:"1" json:"split_level"`
+}
+
+func (d DestinationWeaviateUpdateByMarkdownHeader) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateByMarkdownHeader) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateByMarkdownHeader) GetMode() *DestinationWeaviateUpdateSchemasProcessingTextSplitterMode {
+ return DestinationWeaviateUpdateSchemasProcessingTextSplitterModeMarkdown.ToPointer()
+}
+
+func (o *DestinationWeaviateUpdateByMarkdownHeader) GetSplitLevel() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SplitLevel
+}
+
+type DestinationWeaviateUpdateSchemasProcessingMode string
+
+const (
+ DestinationWeaviateUpdateSchemasProcessingModeSeparator DestinationWeaviateUpdateSchemasProcessingMode = "separator"
+)
+
+func (e DestinationWeaviateUpdateSchemasProcessingMode) ToPointer() *DestinationWeaviateUpdateSchemasProcessingMode {
+ return &e
+}
+
+func (e *DestinationWeaviateUpdateSchemasProcessingMode) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "separator":
+ *e = DestinationWeaviateUpdateSchemasProcessingMode(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for DestinationWeaviateUpdateSchemasProcessingMode: %v", v)
+ }
+}
+
+// DestinationWeaviateUpdateBySeparator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
+type DestinationWeaviateUpdateBySeparator struct {
+ // Whether to keep the separator in the resulting chunks
+ KeepSeparator *bool `default:"false" json:"keep_separator"`
+ mode *DestinationWeaviateUpdateSchemasProcessingMode `const:"separator" json:"mode"`
+ // List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
+ Separators []string `json:"separators,omitempty"`
+}
+
+func (d DestinationWeaviateUpdateBySeparator) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateBySeparator) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateBySeparator) GetKeepSeparator() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.KeepSeparator
+}
+
+func (o *DestinationWeaviateUpdateBySeparator) GetMode() *DestinationWeaviateUpdateSchemasProcessingMode {
+ return DestinationWeaviateUpdateSchemasProcessingModeSeparator.ToPointer()
+}
+
+func (o *DestinationWeaviateUpdateBySeparator) GetSeparators() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Separators
+}
+
+type DestinationWeaviateUpdateTextSplitterType string
+
+const (
+ DestinationWeaviateUpdateTextSplitterTypeDestinationWeaviateUpdateBySeparator DestinationWeaviateUpdateTextSplitterType = "destination-weaviate-update_By Separator"
+ DestinationWeaviateUpdateTextSplitterTypeDestinationWeaviateUpdateByMarkdownHeader DestinationWeaviateUpdateTextSplitterType = "destination-weaviate-update_By Markdown header"
+ DestinationWeaviateUpdateTextSplitterTypeDestinationWeaviateUpdateByProgrammingLanguage DestinationWeaviateUpdateTextSplitterType = "destination-weaviate-update_By Programming Language"
+)
+
+type DestinationWeaviateUpdateTextSplitter struct {
+ DestinationWeaviateUpdateBySeparator *DestinationWeaviateUpdateBySeparator
+ DestinationWeaviateUpdateByMarkdownHeader *DestinationWeaviateUpdateByMarkdownHeader
+ DestinationWeaviateUpdateByProgrammingLanguage *DestinationWeaviateUpdateByProgrammingLanguage
+
+ Type DestinationWeaviateUpdateTextSplitterType
+}
+
+func CreateDestinationWeaviateUpdateTextSplitterDestinationWeaviateUpdateBySeparator(destinationWeaviateUpdateBySeparator DestinationWeaviateUpdateBySeparator) DestinationWeaviateUpdateTextSplitter {
+ typ := DestinationWeaviateUpdateTextSplitterTypeDestinationWeaviateUpdateBySeparator
+
+ return DestinationWeaviateUpdateTextSplitter{
+ DestinationWeaviateUpdateBySeparator: &destinationWeaviateUpdateBySeparator,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateTextSplitterDestinationWeaviateUpdateByMarkdownHeader(destinationWeaviateUpdateByMarkdownHeader DestinationWeaviateUpdateByMarkdownHeader) DestinationWeaviateUpdateTextSplitter {
+ typ := DestinationWeaviateUpdateTextSplitterTypeDestinationWeaviateUpdateByMarkdownHeader
+
+ return DestinationWeaviateUpdateTextSplitter{
+ DestinationWeaviateUpdateByMarkdownHeader: &destinationWeaviateUpdateByMarkdownHeader,
+ Type: typ,
+ }
+}
+
+func CreateDestinationWeaviateUpdateTextSplitterDestinationWeaviateUpdateByProgrammingLanguage(destinationWeaviateUpdateByProgrammingLanguage DestinationWeaviateUpdateByProgrammingLanguage) DestinationWeaviateUpdateTextSplitter {
+ typ := DestinationWeaviateUpdateTextSplitterTypeDestinationWeaviateUpdateByProgrammingLanguage
+
+ return DestinationWeaviateUpdateTextSplitter{
+ DestinationWeaviateUpdateByProgrammingLanguage: &destinationWeaviateUpdateByProgrammingLanguage,
+ Type: typ,
+ }
+}
+
+func (u *DestinationWeaviateUpdateTextSplitter) UnmarshalJSON(data []byte) error {
+
+ destinationWeaviateUpdateByMarkdownHeader := new(DestinationWeaviateUpdateByMarkdownHeader)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateByMarkdownHeader, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateByMarkdownHeader = destinationWeaviateUpdateByMarkdownHeader
+ u.Type = DestinationWeaviateUpdateTextSplitterTypeDestinationWeaviateUpdateByMarkdownHeader
+ return nil
+ }
+
+ destinationWeaviateUpdateByProgrammingLanguage := new(DestinationWeaviateUpdateByProgrammingLanguage)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateByProgrammingLanguage, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateByProgrammingLanguage = destinationWeaviateUpdateByProgrammingLanguage
+ u.Type = DestinationWeaviateUpdateTextSplitterTypeDestinationWeaviateUpdateByProgrammingLanguage
+ return nil
+ }
+
+ destinationWeaviateUpdateBySeparator := new(DestinationWeaviateUpdateBySeparator)
+ if err := utils.UnmarshalJSON(data, &destinationWeaviateUpdateBySeparator, "", true, true); err == nil {
+ u.DestinationWeaviateUpdateBySeparator = destinationWeaviateUpdateBySeparator
+ u.Type = DestinationWeaviateUpdateTextSplitterTypeDestinationWeaviateUpdateBySeparator
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DestinationWeaviateUpdateTextSplitter) MarshalJSON() ([]byte, error) {
+ if u.DestinationWeaviateUpdateBySeparator != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateBySeparator, "", true)
+ }
+
+ if u.DestinationWeaviateUpdateByMarkdownHeader != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateByMarkdownHeader, "", true)
+ }
+
+ if u.DestinationWeaviateUpdateByProgrammingLanguage != nil {
+ return utils.MarshalJSON(u.DestinationWeaviateUpdateByProgrammingLanguage, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type DestinationWeaviateUpdateProcessingConfigModel struct {
+ // Size of overlap between chunks in tokens to store in vector store to better capture relevant context
+ ChunkOverlap *int64 `default:"0" json:"chunk_overlap"`
+ // Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
+ ChunkSize int64 `json:"chunk_size"`
+ // List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
+ FieldNameMappings []DestinationWeaviateUpdateFieldNameMappingConfigModel `json:"field_name_mappings,omitempty"`
+ // List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
+ MetadataFields []string `json:"metadata_fields,omitempty"`
+ // List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.
+ TextFields []string `json:"text_fields,omitempty"`
+ // Split text fields into chunks based on the specified method.
+ TextSplitter *DestinationWeaviateUpdateTextSplitter `json:"text_splitter,omitempty"`
+}
+
+func (d DestinationWeaviateUpdateProcessingConfigModel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationWeaviateUpdateProcessingConfigModel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationWeaviateUpdateProcessingConfigModel) GetChunkOverlap() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ChunkOverlap
+}
+
+func (o *DestinationWeaviateUpdateProcessingConfigModel) GetChunkSize() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ChunkSize
+}
+
+func (o *DestinationWeaviateUpdateProcessingConfigModel) GetFieldNameMappings() []DestinationWeaviateUpdateFieldNameMappingConfigModel {
+ if o == nil {
+ return nil
+ }
+ return o.FieldNameMappings
+}
+
+func (o *DestinationWeaviateUpdateProcessingConfigModel) GetMetadataFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *DestinationWeaviateUpdateProcessingConfigModel) GetTextFields() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TextFields
+}
+
+func (o *DestinationWeaviateUpdateProcessingConfigModel) GetTextSplitter() *DestinationWeaviateUpdateTextSplitter {
+ if o == nil {
+ return nil
+ }
+ return o.TextSplitter
+}
+
+type DestinationWeaviateUpdate struct {
+ // Embedding configuration
+ Embedding DestinationWeaviateUpdateEmbedding `json:"embedding"`
+ // Indexing configuration
+ Indexing DestinationWeaviateUpdateIndexing `json:"indexing"`
+ Processing DestinationWeaviateUpdateProcessingConfigModel `json:"processing"`
+}
+
+func (o *DestinationWeaviateUpdate) GetEmbedding() DestinationWeaviateUpdateEmbedding {
+ if o == nil {
+ return DestinationWeaviateUpdateEmbedding{}
+ }
+ return o.Embedding
+}
+
+func (o *DestinationWeaviateUpdate) GetIndexing() DestinationWeaviateUpdateIndexing {
+ if o == nil {
+ return DestinationWeaviateUpdateIndexing{}
+ }
+ return o.Indexing
+}
+
+func (o *DestinationWeaviateUpdate) GetProcessing() DestinationWeaviateUpdateProcessingConfigModel {
+ if o == nil {
+ return DestinationWeaviateUpdateProcessingConfigModel{}
+ }
+ return o.Processing
+}
diff --git a/internal/sdk/pkg/models/shared/destinationxata.go b/internal/sdk/pkg/models/shared/destinationxata.go
old mode 100755
new mode 100644
index a7b20a6b7..b1042e27e
--- a/internal/sdk/pkg/models/shared/destinationxata.go
+++ b/internal/sdk/pkg/models/shared/destinationxata.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type DestinationXataXata string
+type Xata string
const (
- DestinationXataXataXata DestinationXataXata = "xata"
+ XataXata Xata = "xata"
)
-func (e DestinationXataXata) ToPointer() *DestinationXataXata {
+func (e Xata) ToPointer() *Xata {
return &e
}
-func (e *DestinationXataXata) UnmarshalJSON(data []byte) error {
+func (e *Xata) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "xata":
- *e = DestinationXataXata(v)
+ *e = Xata(v)
return nil
default:
- return fmt.Errorf("invalid value for DestinationXataXata: %v", v)
+ return fmt.Errorf("invalid value for Xata: %v", v)
}
}
@@ -35,6 +36,35 @@ type DestinationXata struct {
// API Key to connect.
APIKey string `json:"api_key"`
// URL pointing to your workspace.
- DbURL string `json:"db_url"`
- DestinationType DestinationXataXata `json:"destinationType"`
+ DbURL string `json:"db_url"`
+ destinationType Xata `const:"xata" json:"destinationType"`
+}
+
+func (d DestinationXata) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DestinationXata) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DestinationXata) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *DestinationXata) GetDbURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.DbURL
+}
+
+func (o *DestinationXata) GetDestinationType() Xata {
+ return XataXata
}
diff --git a/internal/sdk/pkg/models/shared/destinationxatacreaterequest.go b/internal/sdk/pkg/models/shared/destinationxatacreaterequest.go
old mode 100755
new mode 100644
index 4641587bb..339017852
--- a/internal/sdk/pkg/models/shared/destinationxatacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/destinationxatacreaterequest.go
@@ -4,6 +4,37 @@ package shared
type DestinationXataCreateRequest struct {
Configuration DestinationXata `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ // The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the destination e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *DestinationXataCreateRequest) GetConfiguration() DestinationXata {
+ if o == nil {
+ return DestinationXata{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationXataCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *DestinationXataCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationXataCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/destinationxataputrequest.go b/internal/sdk/pkg/models/shared/destinationxataputrequest.go
old mode 100755
new mode 100644
index b7e7edfc3..acfbd1e40
--- a/internal/sdk/pkg/models/shared/destinationxataputrequest.go
+++ b/internal/sdk/pkg/models/shared/destinationxataputrequest.go
@@ -7,3 +7,24 @@ type DestinationXataPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *DestinationXataPutRequest) GetConfiguration() DestinationXataUpdate {
+ if o == nil {
+ return DestinationXataUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *DestinationXataPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *DestinationXataPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/destinationxataupdate.go b/internal/sdk/pkg/models/shared/destinationxataupdate.go
old mode 100755
new mode 100644
index 7805ada36..859b2ddbf
--- a/internal/sdk/pkg/models/shared/destinationxataupdate.go
+++ b/internal/sdk/pkg/models/shared/destinationxataupdate.go
@@ -8,3 +8,17 @@ type DestinationXataUpdate struct {
// URL pointing to your workspace.
DbURL string `json:"db_url"`
}
+
+func (o *DestinationXataUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *DestinationXataUpdate) GetDbURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.DbURL
+}
diff --git a/internal/sdk/pkg/models/shared/geographyenum.go b/internal/sdk/pkg/models/shared/geographyenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/geographyenumnodefault.go b/internal/sdk/pkg/models/shared/geographyenumnodefault.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/initiateoauthrequest.go b/internal/sdk/pkg/models/shared/initiateoauthrequest.go
old mode 100755
new mode 100644
index e2f2bc8a4..299a6f3a9
--- a/internal/sdk/pkg/models/shared/initiateoauthrequest.go
+++ b/internal/sdk/pkg/models/shared/initiateoauthrequest.go
@@ -4,12 +4,49 @@ package shared
// InitiateOauthRequest - POST body for initiating OAuth via the public API
type InitiateOauthRequest struct {
- // The name of the source to authenticate to
- Name string `json:"name"`
+ // The name of the source to authenticate to. Deprecated - use sourceType instead.
+ Name *string `json:"name,omitempty"`
// Arbitrary vars to pass for OAuth depending on what the source/destination spec requires.
OAuthInputConfiguration *OAuthInputConfiguration `json:"oAuthInputConfiguration,omitempty"`
// The URL to redirect the user to with the OAuth secret stored in the secret_id query string parameter after authentication is complete.
RedirectURL string `json:"redirectUrl"`
+ // The name of the source to authenticate to
+ SourceType *string `json:"sourceType,omitempty"`
// The workspace to create the secret and eventually the full source.
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *InitiateOauthRequest) GetName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Name
+}
+
+func (o *InitiateOauthRequest) GetOAuthInputConfiguration() *OAuthInputConfiguration {
+ if o == nil {
+ return nil
+ }
+ return o.OAuthInputConfiguration
+}
+
+func (o *InitiateOauthRequest) GetRedirectURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.RedirectURL
+}
+
+func (o *InitiateOauthRequest) GetSourceType() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SourceType
+}
+
+func (o *InitiateOauthRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/jobcreaterequest.go b/internal/sdk/pkg/models/shared/jobcreaterequest.go
old mode 100755
new mode 100644
index 9ebedafab..217e820df
--- a/internal/sdk/pkg/models/shared/jobcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/jobcreaterequest.go
@@ -8,3 +8,17 @@ type JobCreateRequest struct {
// Enum that describes the different types of jobs that the platform runs.
JobType JobTypeEnum `json:"jobType"`
}
+
+func (o *JobCreateRequest) GetConnectionID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionID
+}
+
+func (o *JobCreateRequest) GetJobType() JobTypeEnum {
+ if o == nil {
+ return JobTypeEnum("")
+ }
+ return o.JobType
+}
diff --git a/internal/sdk/pkg/models/shared/jobresponse.go b/internal/sdk/pkg/models/shared/jobresponse.go
old mode 100755
new mode 100644
index dd1cae007..17f2f4fd6
--- a/internal/sdk/pkg/models/shared/jobresponse.go
+++ b/internal/sdk/pkg/models/shared/jobresponse.go
@@ -16,3 +16,66 @@ type JobResponse struct {
StartTime string `json:"startTime"`
Status JobStatusEnum `json:"status"`
}
+
+func (o *JobResponse) GetBytesSynced() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BytesSynced
+}
+
+func (o *JobResponse) GetConnectionID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionID
+}
+
+func (o *JobResponse) GetDuration() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Duration
+}
+
+func (o *JobResponse) GetJobID() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.JobID
+}
+
+func (o *JobResponse) GetJobType() JobTypeEnum {
+ if o == nil {
+ return JobTypeEnum("")
+ }
+ return o.JobType
+}
+
+func (o *JobResponse) GetLastUpdatedAt() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LastUpdatedAt
+}
+
+func (o *JobResponse) GetRowsSynced() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RowsSynced
+}
+
+func (o *JobResponse) GetStartTime() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartTime
+}
+
+func (o *JobResponse) GetStatus() JobStatusEnum {
+ if o == nil {
+ return JobStatusEnum("")
+ }
+ return o.Status
+}
diff --git a/internal/sdk/pkg/models/shared/jobsresponse.go b/internal/sdk/pkg/models/shared/jobsresponse.go
old mode 100755
new mode 100644
index 26501596d..6b9c0eaab
--- a/internal/sdk/pkg/models/shared/jobsresponse.go
+++ b/internal/sdk/pkg/models/shared/jobsresponse.go
@@ -7,3 +7,24 @@ type JobsResponse struct {
Next *string `json:"next,omitempty"`
Previous *string `json:"previous,omitempty"`
}
+
+func (o *JobsResponse) GetData() []JobResponse {
+ if o == nil {
+ return []JobResponse{}
+ }
+ return o.Data
+}
+
+func (o *JobsResponse) GetNext() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Next
+}
+
+func (o *JobsResponse) GetPrevious() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Previous
+}
diff --git a/internal/sdk/pkg/models/shared/jobstatusenum.go b/internal/sdk/pkg/models/shared/jobstatusenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/jobtypeenum.go b/internal/sdk/pkg/models/shared/jobtypeenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/namespacedefinitionenum.go b/internal/sdk/pkg/models/shared/namespacedefinitionenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/namespacedefinitionenumnodefault.go b/internal/sdk/pkg/models/shared/namespacedefinitionenumnodefault.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/nonbreakingschemaupdatesbehaviorenum.go b/internal/sdk/pkg/models/shared/nonbreakingschemaupdatesbehaviorenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/nonbreakingschemaupdatesbehaviorenumnodefault.go b/internal/sdk/pkg/models/shared/nonbreakingschemaupdatesbehaviorenumnodefault.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/oauthcredentialsconfiguration.go b/internal/sdk/pkg/models/shared/oauthcredentialsconfiguration.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/oauthinputconfiguration.go b/internal/sdk/pkg/models/shared/oauthinputconfiguration.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/scheduletypeenum.go b/internal/sdk/pkg/models/shared/scheduletypeenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/scheduletypewithbasicenum.go b/internal/sdk/pkg/models/shared/scheduletypewithbasicenum.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/schemebasicauth.go b/internal/sdk/pkg/models/shared/schemebasicauth.go
new file mode 100644
index 000000000..a0c4bfa90
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/schemebasicauth.go
@@ -0,0 +1,22 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SchemeBasicAuth struct {
+ Password string `security:"name=password"`
+ Username string `security:"name=username"`
+}
+
+func (o *SchemeBasicAuth) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SchemeBasicAuth) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/security.go b/internal/sdk/pkg/models/shared/security.go
old mode 100755
new mode 100644
index cc19e48f8..6967742d7
--- a/internal/sdk/pkg/models/shared/security.go
+++ b/internal/sdk/pkg/models/shared/security.go
@@ -2,12 +2,21 @@
package shared
-type SchemeBasicAuth struct {
- Password string `security:"name=password"`
- Username string `security:"name=username"`
-}
-
type Security struct {
BasicAuth *SchemeBasicAuth `security:"scheme,type=http,subtype=basic"`
BearerAuth *string `security:"scheme,type=http,subtype=bearer,name=Authorization"`
}
+
+func (o *Security) GetBasicAuth() *SchemeBasicAuth {
+ if o == nil {
+ return nil
+ }
+ return o.BasicAuth
+}
+
+func (o *Security) GetBearerAuth() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BearerAuth
+}
diff --git a/internal/sdk/pkg/models/shared/sourceaha.go b/internal/sdk/pkg/models/shared/sourceaha.go
old mode 100755
new mode 100644
index dc0b144dc..75aa711cd
--- a/internal/sdk/pkg/models/shared/sourceaha.go
+++ b/internal/sdk/pkg/models/shared/sourceaha.go
@@ -5,36 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceAhaAha string
+type Aha string
const (
- SourceAhaAhaAha SourceAhaAha = "aha"
+ AhaAha Aha = "aha"
)
-func (e SourceAhaAha) ToPointer() *SourceAhaAha {
+func (e Aha) ToPointer() *Aha {
return &e
}
-func (e *SourceAhaAha) UnmarshalJSON(data []byte) error {
+func (e *Aha) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "aha":
- *e = SourceAhaAha(v)
+ *e = Aha(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAhaAha: %v", v)
+ return fmt.Errorf("invalid value for Aha: %v", v)
}
}
type SourceAha struct {
// API Key
- APIKey string `json:"api_key"`
- SourceType SourceAhaAha `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Aha `const:"aha" json:"sourceType"`
// URL
URL string `json:"url"`
}
+
+func (s SourceAha) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAha) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAha) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceAha) GetSourceType() Aha {
+ return AhaAha
+}
+
+func (o *SourceAha) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sourceahacreaterequest.go b/internal/sdk/pkg/models/shared/sourceahacreaterequest.go
old mode 100755
new mode 100644
index e3745d85f..b8130bb7e
--- a/internal/sdk/pkg/models/shared/sourceahacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceahacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAhaCreateRequest struct {
Configuration SourceAha `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAhaCreateRequest) GetConfiguration() SourceAha {
+ if o == nil {
+ return SourceAha{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAhaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAhaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAhaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAhaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceahaputrequest.go b/internal/sdk/pkg/models/shared/sourceahaputrequest.go
old mode 100755
new mode 100644
index 48cab7b2a..9a61da97f
--- a/internal/sdk/pkg/models/shared/sourceahaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceahaputrequest.go
@@ -7,3 +7,24 @@ type SourceAhaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAhaPutRequest) GetConfiguration() SourceAhaUpdate {
+ if o == nil {
+ return SourceAhaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAhaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAhaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceahaupdate.go b/internal/sdk/pkg/models/shared/sourceahaupdate.go
old mode 100755
new mode 100644
index 24b75f25b..18321094e
--- a/internal/sdk/pkg/models/shared/sourceahaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceahaupdate.go
@@ -8,3 +8,17 @@ type SourceAhaUpdate struct {
// URL
URL string `json:"url"`
}
+
+func (o *SourceAhaUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceAhaUpdate) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sourceaircall.go b/internal/sdk/pkg/models/shared/sourceaircall.go
old mode 100755
new mode 100644
index a37e1cc7f..43dc450c8
--- a/internal/sdk/pkg/models/shared/sourceaircall.go
+++ b/internal/sdk/pkg/models/shared/sourceaircall.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceAircallAircall string
+type Aircall string
const (
- SourceAircallAircallAircall SourceAircallAircall = "aircall"
+ AircallAircall Aircall = "aircall"
)
-func (e SourceAircallAircall) ToPointer() *SourceAircallAircall {
+func (e Aircall) ToPointer() *Aircall {
return &e
}
-func (e *SourceAircallAircall) UnmarshalJSON(data []byte) error {
+func (e *Aircall) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "aircall":
- *e = SourceAircallAircall(v)
+ *e = Aircall(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAircallAircall: %v", v)
+ return fmt.Errorf("invalid value for Aircall: %v", v)
}
}
@@ -36,8 +37,44 @@ type SourceAircall struct {
// App ID found at settings https://dashboard.aircall.io/integrations/api-keys
APIID string `json:"api_id"`
// App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)
- APIToken string `json:"api_token"`
- SourceType SourceAircallAircall `json:"sourceType"`
+ APIToken string `json:"api_token"`
+ sourceType Aircall `const:"aircall" json:"sourceType"`
// Date time filter for incremental filter, Specify which date to extract from.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceAircall) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAircall) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAircall) GetAPIID() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIID
+}
+
+func (o *SourceAircall) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceAircall) GetSourceType() Aircall {
+ return AircallAircall
+}
+
+func (o *SourceAircall) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceaircallcreaterequest.go b/internal/sdk/pkg/models/shared/sourceaircallcreaterequest.go
old mode 100755
new mode 100644
index 9eb22bb4a..e247715b6
--- a/internal/sdk/pkg/models/shared/sourceaircallcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceaircallcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAircallCreateRequest struct {
Configuration SourceAircall `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAircallCreateRequest) GetConfiguration() SourceAircall {
+ if o == nil {
+ return SourceAircall{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAircallCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAircallCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAircallCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAircallCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceaircallputrequest.go b/internal/sdk/pkg/models/shared/sourceaircallputrequest.go
old mode 100755
new mode 100644
index 0b2fb32ef..eb1fd3b04
--- a/internal/sdk/pkg/models/shared/sourceaircallputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceaircallputrequest.go
@@ -7,3 +7,24 @@ type SourceAircallPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAircallPutRequest) GetConfiguration() SourceAircallUpdate {
+ if o == nil {
+ return SourceAircallUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAircallPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAircallPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceaircallupdate.go b/internal/sdk/pkg/models/shared/sourceaircallupdate.go
old mode 100755
new mode 100644
index 57f3aac5b..26b93fc71
--- a/internal/sdk/pkg/models/shared/sourceaircallupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceaircallupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -14,3 +15,35 @@ type SourceAircallUpdate struct {
// Date time filter for incremental filter, Specify which date to extract from.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceAircallUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAircallUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAircallUpdate) GetAPIID() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIID
+}
+
+func (o *SourceAircallUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceAircallUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceairtable.go b/internal/sdk/pkg/models/shared/sourceairtable.go
old mode 100755
new mode 100644
index 21eda702c..5b3a36d81
--- a/internal/sdk/pkg/models/shared/sourceairtable.go
+++ b/internal/sdk/pkg/models/shared/sourceairtable.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceAirtableAuthenticationPersonalAccessTokenAuthMethod string
+type SourceAirtableSchemasAuthMethod string
const (
- SourceAirtableAuthenticationPersonalAccessTokenAuthMethodAPIKey SourceAirtableAuthenticationPersonalAccessTokenAuthMethod = "api_key"
+ SourceAirtableSchemasAuthMethodAPIKey SourceAirtableSchemasAuthMethod = "api_key"
)
-func (e SourceAirtableAuthenticationPersonalAccessTokenAuthMethod) ToPointer() *SourceAirtableAuthenticationPersonalAccessTokenAuthMethod {
+func (e SourceAirtableSchemasAuthMethod) ToPointer() *SourceAirtableSchemasAuthMethod {
return &e
}
-func (e *SourceAirtableAuthenticationPersonalAccessTokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAirtableSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_key":
- *e = SourceAirtableAuthenticationPersonalAccessTokenAuthMethod(v)
+ *e = SourceAirtableSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAirtableAuthenticationPersonalAccessTokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAirtableSchemasAuthMethod: %v", v)
}
}
-type SourceAirtableAuthenticationPersonalAccessToken struct {
+type SourceAirtablePersonalAccessToken struct {
// The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.
- APIKey string `json:"api_key"`
- AuthMethod *SourceAirtableAuthenticationPersonalAccessTokenAuthMethod `json:"auth_method,omitempty"`
+ APIKey string `json:"api_key"`
+ authMethod *SourceAirtableSchemasAuthMethod `const:"api_key" json:"auth_method,omitempty"`
}
-type SourceAirtableAuthenticationOAuth20AuthMethod string
+func (s SourceAirtablePersonalAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAirtablePersonalAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAirtablePersonalAccessToken) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceAirtablePersonalAccessToken) GetAuthMethod() *SourceAirtableSchemasAuthMethod {
+ return SourceAirtableSchemasAuthMethodAPIKey.ToPointer()
+}
+
+type SourceAirtableAuthMethod string
const (
- SourceAirtableAuthenticationOAuth20AuthMethodOauth20 SourceAirtableAuthenticationOAuth20AuthMethod = "oauth2.0"
+ SourceAirtableAuthMethodOauth20 SourceAirtableAuthMethod = "oauth2.0"
)
-func (e SourceAirtableAuthenticationOAuth20AuthMethod) ToPointer() *SourceAirtableAuthenticationOAuth20AuthMethod {
+func (e SourceAirtableAuthMethod) ToPointer() *SourceAirtableAuthMethod {
return &e
}
-func (e *SourceAirtableAuthenticationOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAirtableAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceAirtableAuthenticationOAuth20AuthMethod(v)
+ *e = SourceAirtableAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAirtableAuthenticationOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAirtableAuthMethod: %v", v)
}
}
-type SourceAirtableAuthenticationOAuth20 struct {
+type SourceAirtableOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthMethod *SourceAirtableAuthenticationOAuth20AuthMethod `json:"auth_method,omitempty"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authMethod *SourceAirtableAuthMethod `const:"oauth2.0" json:"auth_method,omitempty"`
// The client ID of the Airtable developer application.
ClientID string `json:"client_id"`
// The client secret the Airtable developer application.
@@ -78,56 +100,101 @@ type SourceAirtableAuthenticationOAuth20 struct {
TokenExpiryDate *time.Time `json:"token_expiry_date,omitempty"`
}
+func (s SourceAirtableOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAirtableOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAirtableOAuth20) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceAirtableOAuth20) GetAuthMethod() *SourceAirtableAuthMethod {
+ return SourceAirtableAuthMethodOauth20.ToPointer()
+}
+
+func (o *SourceAirtableOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceAirtableOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceAirtableOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceAirtableOAuth20) GetTokenExpiryDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.TokenExpiryDate
+}
+
type SourceAirtableAuthenticationType string
const (
- SourceAirtableAuthenticationTypeSourceAirtableAuthenticationOAuth20 SourceAirtableAuthenticationType = "source-airtable_Authentication_OAuth2.0"
- SourceAirtableAuthenticationTypeSourceAirtableAuthenticationPersonalAccessToken SourceAirtableAuthenticationType = "source-airtable_Authentication_Personal Access Token"
+ SourceAirtableAuthenticationTypeSourceAirtableOAuth20 SourceAirtableAuthenticationType = "source-airtable_OAuth2.0"
+ SourceAirtableAuthenticationTypeSourceAirtablePersonalAccessToken SourceAirtableAuthenticationType = "source-airtable_Personal Access Token"
)
type SourceAirtableAuthentication struct {
- SourceAirtableAuthenticationOAuth20 *SourceAirtableAuthenticationOAuth20
- SourceAirtableAuthenticationPersonalAccessToken *SourceAirtableAuthenticationPersonalAccessToken
+ SourceAirtableOAuth20 *SourceAirtableOAuth20
+ SourceAirtablePersonalAccessToken *SourceAirtablePersonalAccessToken
Type SourceAirtableAuthenticationType
}
-func CreateSourceAirtableAuthenticationSourceAirtableAuthenticationOAuth20(sourceAirtableAuthenticationOAuth20 SourceAirtableAuthenticationOAuth20) SourceAirtableAuthentication {
- typ := SourceAirtableAuthenticationTypeSourceAirtableAuthenticationOAuth20
+func CreateSourceAirtableAuthenticationSourceAirtableOAuth20(sourceAirtableOAuth20 SourceAirtableOAuth20) SourceAirtableAuthentication {
+ typ := SourceAirtableAuthenticationTypeSourceAirtableOAuth20
return SourceAirtableAuthentication{
- SourceAirtableAuthenticationOAuth20: &sourceAirtableAuthenticationOAuth20,
- Type: typ,
+ SourceAirtableOAuth20: &sourceAirtableOAuth20,
+ Type: typ,
}
}
-func CreateSourceAirtableAuthenticationSourceAirtableAuthenticationPersonalAccessToken(sourceAirtableAuthenticationPersonalAccessToken SourceAirtableAuthenticationPersonalAccessToken) SourceAirtableAuthentication {
- typ := SourceAirtableAuthenticationTypeSourceAirtableAuthenticationPersonalAccessToken
+func CreateSourceAirtableAuthenticationSourceAirtablePersonalAccessToken(sourceAirtablePersonalAccessToken SourceAirtablePersonalAccessToken) SourceAirtableAuthentication {
+ typ := SourceAirtableAuthenticationTypeSourceAirtablePersonalAccessToken
return SourceAirtableAuthentication{
- SourceAirtableAuthenticationPersonalAccessToken: &sourceAirtableAuthenticationPersonalAccessToken,
- Type: typ,
+ SourceAirtablePersonalAccessToken: &sourceAirtablePersonalAccessToken,
+ Type: typ,
}
}
func (u *SourceAirtableAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAirtableAuthenticationPersonalAccessToken := new(SourceAirtableAuthenticationPersonalAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAirtableAuthenticationPersonalAccessToken); err == nil {
- u.SourceAirtableAuthenticationPersonalAccessToken = sourceAirtableAuthenticationPersonalAccessToken
- u.Type = SourceAirtableAuthenticationTypeSourceAirtableAuthenticationPersonalAccessToken
+
+ sourceAirtablePersonalAccessToken := new(SourceAirtablePersonalAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceAirtablePersonalAccessToken, "", true, true); err == nil {
+ u.SourceAirtablePersonalAccessToken = sourceAirtablePersonalAccessToken
+ u.Type = SourceAirtableAuthenticationTypeSourceAirtablePersonalAccessToken
return nil
}
- sourceAirtableAuthenticationOAuth20 := new(SourceAirtableAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAirtableAuthenticationOAuth20); err == nil {
- u.SourceAirtableAuthenticationOAuth20 = sourceAirtableAuthenticationOAuth20
- u.Type = SourceAirtableAuthenticationTypeSourceAirtableAuthenticationOAuth20
+ sourceAirtableOAuth20 := new(SourceAirtableOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceAirtableOAuth20, "", true, true); err == nil {
+ u.SourceAirtableOAuth20 = sourceAirtableOAuth20
+ u.Type = SourceAirtableAuthenticationTypeSourceAirtableOAuth20
return nil
}
@@ -135,42 +202,64 @@ func (u *SourceAirtableAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceAirtableAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceAirtableAuthenticationPersonalAccessToken != nil {
- return json.Marshal(u.SourceAirtableAuthenticationPersonalAccessToken)
+ if u.SourceAirtableOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceAirtableOAuth20, "", true)
}
- if u.SourceAirtableAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceAirtableAuthenticationOAuth20)
+ if u.SourceAirtablePersonalAccessToken != nil {
+ return utils.MarshalJSON(u.SourceAirtablePersonalAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceAirtableAirtable string
+type Airtable string
const (
- SourceAirtableAirtableAirtable SourceAirtableAirtable = "airtable"
+ AirtableAirtable Airtable = "airtable"
)
-func (e SourceAirtableAirtable) ToPointer() *SourceAirtableAirtable {
+func (e Airtable) ToPointer() *Airtable {
return &e
}
-func (e *SourceAirtableAirtable) UnmarshalJSON(data []byte) error {
+func (e *Airtable) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "airtable":
- *e = SourceAirtableAirtable(v)
+ *e = Airtable(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAirtableAirtable: %v", v)
+ return fmt.Errorf("invalid value for Airtable: %v", v)
}
}
type SourceAirtable struct {
Credentials *SourceAirtableAuthentication `json:"credentials,omitempty"`
- SourceType *SourceAirtableAirtable `json:"sourceType,omitempty"`
+ sourceType *Airtable `const:"airtable" json:"sourceType,omitempty"`
+}
+
+func (s SourceAirtable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAirtable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAirtable) GetCredentials() *SourceAirtableAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceAirtable) GetSourceType() *Airtable {
+ return AirtableAirtable.ToPointer()
}
diff --git a/internal/sdk/pkg/models/shared/sourceairtablecreaterequest.go b/internal/sdk/pkg/models/shared/sourceairtablecreaterequest.go
old mode 100755
new mode 100644
index 3e7bb1a08..c9aaa9b28
--- a/internal/sdk/pkg/models/shared/sourceairtablecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceairtablecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAirtableCreateRequest struct {
Configuration SourceAirtable `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAirtableCreateRequest) GetConfiguration() SourceAirtable {
+ if o == nil {
+ return SourceAirtable{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAirtableCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAirtableCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAirtableCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAirtableCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceairtableputrequest.go b/internal/sdk/pkg/models/shared/sourceairtableputrequest.go
old mode 100755
new mode 100644
index 1b1eac1df..1bb54c254
--- a/internal/sdk/pkg/models/shared/sourceairtableputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceairtableputrequest.go
@@ -7,3 +7,24 @@ type SourceAirtablePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAirtablePutRequest) GetConfiguration() SourceAirtableUpdate {
+ if o == nil {
+ return SourceAirtableUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAirtablePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAirtablePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceairtableupdate.go b/internal/sdk/pkg/models/shared/sourceairtableupdate.go
old mode 100755
new mode 100644
index 3e58e82e5..9e185c9ad
--- a/internal/sdk/pkg/models/shared/sourceairtableupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceairtableupdate.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod string
+type SourceAirtableUpdateAuthMethod string
const (
- SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethodAPIKey SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod = "api_key"
+ SourceAirtableUpdateAuthMethodAPIKey SourceAirtableUpdateAuthMethod = "api_key"
)
-func (e SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod) ToPointer() *SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod {
+func (e SourceAirtableUpdateAuthMethod) ToPointer() *SourceAirtableUpdateAuthMethod {
return &e
}
-func (e *SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAirtableUpdateAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_key":
- *e = SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod(v)
+ *e = SourceAirtableUpdateAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAirtableUpdateAuthMethod: %v", v)
}
}
-type SourceAirtableUpdateAuthenticationPersonalAccessToken struct {
+type PersonalAccessToken struct {
// The Personal Access Token for the Airtable account. See the Support Guide for more information on how to obtain this token.
- APIKey string `json:"api_key"`
- AuthMethod *SourceAirtableUpdateAuthenticationPersonalAccessTokenAuthMethod `json:"auth_method,omitempty"`
+ APIKey string `json:"api_key"`
+ authMethod *SourceAirtableUpdateAuthMethod `const:"api_key" json:"auth_method,omitempty"`
}
-type SourceAirtableUpdateAuthenticationOAuth20AuthMethod string
+func (p PersonalAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *PersonalAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *PersonalAccessToken) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *PersonalAccessToken) GetAuthMethod() *SourceAirtableUpdateAuthMethod {
+ return SourceAirtableUpdateAuthMethodAPIKey.ToPointer()
+}
+
+type SourceAirtableUpdateSchemasAuthMethod string
const (
- SourceAirtableUpdateAuthenticationOAuth20AuthMethodOauth20 SourceAirtableUpdateAuthenticationOAuth20AuthMethod = "oauth2.0"
+ SourceAirtableUpdateSchemasAuthMethodOauth20 SourceAirtableUpdateSchemasAuthMethod = "oauth2.0"
)
-func (e SourceAirtableUpdateAuthenticationOAuth20AuthMethod) ToPointer() *SourceAirtableUpdateAuthenticationOAuth20AuthMethod {
+func (e SourceAirtableUpdateSchemasAuthMethod) ToPointer() *SourceAirtableUpdateSchemasAuthMethod {
return &e
}
-func (e *SourceAirtableUpdateAuthenticationOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAirtableUpdateSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceAirtableUpdateAuthenticationOAuth20AuthMethod(v)
+ *e = SourceAirtableUpdateSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAirtableUpdateAuthenticationOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAirtableUpdateSchemasAuthMethod: %v", v)
}
}
-type SourceAirtableUpdateAuthenticationOAuth20 struct {
+type SourceAirtableUpdateOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthMethod *SourceAirtableUpdateAuthenticationOAuth20AuthMethod `json:"auth_method,omitempty"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authMethod *SourceAirtableUpdateSchemasAuthMethod `const:"oauth2.0" json:"auth_method,omitempty"`
// The client ID of the Airtable developer application.
ClientID string `json:"client_id"`
// The client secret the Airtable developer application.
@@ -78,56 +100,101 @@ type SourceAirtableUpdateAuthenticationOAuth20 struct {
TokenExpiryDate *time.Time `json:"token_expiry_date,omitempty"`
}
+func (s SourceAirtableUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAirtableUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAirtableUpdateOAuth20) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceAirtableUpdateOAuth20) GetAuthMethod() *SourceAirtableUpdateSchemasAuthMethod {
+ return SourceAirtableUpdateSchemasAuthMethodOauth20.ToPointer()
+}
+
+func (o *SourceAirtableUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceAirtableUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceAirtableUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceAirtableUpdateOAuth20) GetTokenExpiryDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.TokenExpiryDate
+}
+
type SourceAirtableUpdateAuthenticationType string
const (
- SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationOAuth20 SourceAirtableUpdateAuthenticationType = "source-airtable-update_Authentication_OAuth2.0"
- SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationPersonalAccessToken SourceAirtableUpdateAuthenticationType = "source-airtable-update_Authentication_Personal Access Token"
+ SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateOAuth20 SourceAirtableUpdateAuthenticationType = "source-airtable-update_OAuth2.0"
+ SourceAirtableUpdateAuthenticationTypePersonalAccessToken SourceAirtableUpdateAuthenticationType = "Personal Access Token"
)
type SourceAirtableUpdateAuthentication struct {
- SourceAirtableUpdateAuthenticationOAuth20 *SourceAirtableUpdateAuthenticationOAuth20
- SourceAirtableUpdateAuthenticationPersonalAccessToken *SourceAirtableUpdateAuthenticationPersonalAccessToken
+ SourceAirtableUpdateOAuth20 *SourceAirtableUpdateOAuth20
+ PersonalAccessToken *PersonalAccessToken
Type SourceAirtableUpdateAuthenticationType
}
-func CreateSourceAirtableUpdateAuthenticationSourceAirtableUpdateAuthenticationOAuth20(sourceAirtableUpdateAuthenticationOAuth20 SourceAirtableUpdateAuthenticationOAuth20) SourceAirtableUpdateAuthentication {
- typ := SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationOAuth20
+func CreateSourceAirtableUpdateAuthenticationSourceAirtableUpdateOAuth20(sourceAirtableUpdateOAuth20 SourceAirtableUpdateOAuth20) SourceAirtableUpdateAuthentication {
+ typ := SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateOAuth20
return SourceAirtableUpdateAuthentication{
- SourceAirtableUpdateAuthenticationOAuth20: &sourceAirtableUpdateAuthenticationOAuth20,
- Type: typ,
+ SourceAirtableUpdateOAuth20: &sourceAirtableUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceAirtableUpdateAuthenticationSourceAirtableUpdateAuthenticationPersonalAccessToken(sourceAirtableUpdateAuthenticationPersonalAccessToken SourceAirtableUpdateAuthenticationPersonalAccessToken) SourceAirtableUpdateAuthentication {
- typ := SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationPersonalAccessToken
+func CreateSourceAirtableUpdateAuthenticationPersonalAccessToken(personalAccessToken PersonalAccessToken) SourceAirtableUpdateAuthentication {
+ typ := SourceAirtableUpdateAuthenticationTypePersonalAccessToken
return SourceAirtableUpdateAuthentication{
- SourceAirtableUpdateAuthenticationPersonalAccessToken: &sourceAirtableUpdateAuthenticationPersonalAccessToken,
- Type: typ,
+ PersonalAccessToken: &personalAccessToken,
+ Type: typ,
}
}
func (u *SourceAirtableUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAirtableUpdateAuthenticationPersonalAccessToken := new(SourceAirtableUpdateAuthenticationPersonalAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAirtableUpdateAuthenticationPersonalAccessToken); err == nil {
- u.SourceAirtableUpdateAuthenticationPersonalAccessToken = sourceAirtableUpdateAuthenticationPersonalAccessToken
- u.Type = SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationPersonalAccessToken
+
+ personalAccessToken := new(PersonalAccessToken)
+ if err := utils.UnmarshalJSON(data, &personalAccessToken, "", true, true); err == nil {
+ u.PersonalAccessToken = personalAccessToken
+ u.Type = SourceAirtableUpdateAuthenticationTypePersonalAccessToken
return nil
}
- sourceAirtableUpdateAuthenticationOAuth20 := new(SourceAirtableUpdateAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAirtableUpdateAuthenticationOAuth20); err == nil {
- u.SourceAirtableUpdateAuthenticationOAuth20 = sourceAirtableUpdateAuthenticationOAuth20
- u.Type = SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationOAuth20
+ sourceAirtableUpdateOAuth20 := new(SourceAirtableUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceAirtableUpdateOAuth20, "", true, true); err == nil {
+ u.SourceAirtableUpdateOAuth20 = sourceAirtableUpdateOAuth20
+ u.Type = SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateOAuth20
return nil
}
@@ -135,17 +202,24 @@ func (u *SourceAirtableUpdateAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceAirtableUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceAirtableUpdateAuthenticationPersonalAccessToken != nil {
- return json.Marshal(u.SourceAirtableUpdateAuthenticationPersonalAccessToken)
+ if u.SourceAirtableUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceAirtableUpdateOAuth20, "", true)
}
- if u.SourceAirtableUpdateAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceAirtableUpdateAuthenticationOAuth20)
+ if u.PersonalAccessToken != nil {
+ return utils.MarshalJSON(u.PersonalAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceAirtableUpdate struct {
Credentials *SourceAirtableUpdateAuthentication `json:"credentials,omitempty"`
}
+
+func (o *SourceAirtableUpdate) GetCredentials() *SourceAirtableUpdateAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
diff --git a/internal/sdk/pkg/models/shared/sourcealloydb.go b/internal/sdk/pkg/models/shared/sourcealloydb.go
old mode 100755
new mode 100644
index 58e429820..a971ce534
--- a/internal/sdk/pkg/models/shared/sourcealloydb.go
+++ b/internal/sdk/pkg/models/shared/sourcealloydb.go
@@ -3,54 +3,69 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceAlloydbReplicationMethodStandardMethod string
+type SourceAlloydbSchemasReplicationMethodMethod string
const (
- SourceAlloydbReplicationMethodStandardMethodStandard SourceAlloydbReplicationMethodStandardMethod = "Standard"
+ SourceAlloydbSchemasReplicationMethodMethodStandard SourceAlloydbSchemasReplicationMethodMethod = "Standard"
)
-func (e SourceAlloydbReplicationMethodStandardMethod) ToPointer() *SourceAlloydbReplicationMethodStandardMethod {
+func (e SourceAlloydbSchemasReplicationMethodMethod) ToPointer() *SourceAlloydbSchemasReplicationMethodMethod {
return &e
}
-func (e *SourceAlloydbReplicationMethodStandardMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbSchemasReplicationMethodMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Standard":
- *e = SourceAlloydbReplicationMethodStandardMethod(v)
+ *e = SourceAlloydbSchemasReplicationMethodMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbReplicationMethodStandardMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbSchemasReplicationMethodMethod: %v", v)
}
}
-// SourceAlloydbReplicationMethodStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.
-type SourceAlloydbReplicationMethodStandard struct {
- Method SourceAlloydbReplicationMethodStandardMethod `json:"method"`
+// SourceAlloydbStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.
+type SourceAlloydbStandard struct {
+ method SourceAlloydbSchemasReplicationMethodMethod `const:"Standard" json:"method"`
}
-// SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-type SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour string
+func (s SourceAlloydbStandard) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbStandard) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydbStandard) GetMethod() SourceAlloydbSchemasReplicationMethodMethod {
+ return SourceAlloydbSchemasReplicationMethodMethodStandard
+}
+
+// SourceAlloydbLSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
+type SourceAlloydbLSNCommitBehaviour string
const (
- SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviourWhileReadingData SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour = "While reading Data"
- SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviourAfterLoadingDataInTheDestination SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour = "After loading Data in the destination"
+ SourceAlloydbLSNCommitBehaviourWhileReadingData SourceAlloydbLSNCommitBehaviour = "While reading Data"
+ SourceAlloydbLSNCommitBehaviourAfterLoadingDataInTheDestination SourceAlloydbLSNCommitBehaviour = "After loading Data in the destination"
)
-func (e SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) ToPointer() *SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour {
+func (e SourceAlloydbLSNCommitBehaviour) ToPointer() *SourceAlloydbLSNCommitBehaviour {
return &e
}
-func (e *SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbLSNCommitBehaviour) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -59,230 +74,251 @@ func (e *SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour)
case "While reading Data":
fallthrough
case "After loading Data in the destination":
- *e = SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour(v)
+ *e = SourceAlloydbLSNCommitBehaviour(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbLSNCommitBehaviour: %v", v)
}
}
-type SourceAlloydbReplicationMethodLogicalReplicationCDCMethod string
+type SourceAlloydbSchemasMethod string
const (
- SourceAlloydbReplicationMethodLogicalReplicationCDCMethodCdc SourceAlloydbReplicationMethodLogicalReplicationCDCMethod = "CDC"
+ SourceAlloydbSchemasMethodCdc SourceAlloydbSchemasMethod = "CDC"
)
-func (e SourceAlloydbReplicationMethodLogicalReplicationCDCMethod) ToPointer() *SourceAlloydbReplicationMethodLogicalReplicationCDCMethod {
+func (e SourceAlloydbSchemasMethod) ToPointer() *SourceAlloydbSchemasMethod {
return &e
}
-func (e *SourceAlloydbReplicationMethodLogicalReplicationCDCMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CDC":
- *e = SourceAlloydbReplicationMethodLogicalReplicationCDCMethod(v)
+ *e = SourceAlloydbSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbReplicationMethodLogicalReplicationCDCMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbSchemasMethod: %v", v)
}
}
-// SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin - A logical decoding plugin installed on the PostgreSQL server.
-type SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin string
+// SourceAlloydbPlugin - A logical decoding plugin installed on the PostgreSQL server.
+type SourceAlloydbPlugin string
const (
- SourceAlloydbReplicationMethodLogicalReplicationCDCPluginPgoutput SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin = "pgoutput"
+ SourceAlloydbPluginPgoutput SourceAlloydbPlugin = "pgoutput"
)
-func (e SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin) ToPointer() *SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin {
+func (e SourceAlloydbPlugin) ToPointer() *SourceAlloydbPlugin {
return &e
}
-func (e *SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbPlugin) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pgoutput":
- *e = SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin(v)
+ *e = SourceAlloydbPlugin(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbPlugin: %v", v)
}
}
-// SourceAlloydbReplicationMethodLogicalReplicationCDC - Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.
-type SourceAlloydbReplicationMethodLogicalReplicationCDC struct {
+// SourceAlloydbLogicalReplicationCDC - Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.
+type SourceAlloydbLogicalReplicationCDC struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
- InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"`
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
// Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
- LsnCommitBehaviour *SourceAlloydbReplicationMethodLogicalReplicationCDCLSNCommitBehaviour `json:"lsn_commit_behaviour,omitempty"`
- Method SourceAlloydbReplicationMethodLogicalReplicationCDCMethod `json:"method"`
+ LsnCommitBehaviour *SourceAlloydbLSNCommitBehaviour `default:"After loading Data in the destination" json:"lsn_commit_behaviour"`
+ method SourceAlloydbSchemasMethod `const:"CDC" json:"method"`
// A logical decoding plugin installed on the PostgreSQL server.
- Plugin *SourceAlloydbReplicationMethodLogicalReplicationCDCPlugin `json:"plugin,omitempty"`
+ Plugin *SourceAlloydbPlugin `default:"pgoutput" json:"plugin"`
// A Postgres publication used for consuming changes. Read about publications and replication identities.
Publication string `json:"publication"`
// The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
- QueueSize *int64 `json:"queue_size,omitempty"`
+ QueueSize *int64 `default:"10000" json:"queue_size"`
// A plugin logical replication slot. Read about replication slots.
ReplicationSlot string `json:"replication_slot"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceAlloydbReplicationMethodLogicalReplicationCDC SourceAlloydbReplicationMethodLogicalReplicationCDC
-func (c *SourceAlloydbReplicationMethodLogicalReplicationCDC) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbReplicationMethodLogicalReplicationCDC{}
+func (s SourceAlloydbLogicalReplicationCDC) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourceAlloydbLogicalReplicationCDC) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourceAlloydbReplicationMethodLogicalReplicationCDC(data)
+ return nil
+}
- additionalFields := make(map[string]interface{})
+func (o *SourceAlloydbLogicalReplicationCDC) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourceAlloydbLogicalReplicationCDC) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "initial_waiting_seconds")
- delete(additionalFields, "lsn_commit_behaviour")
- delete(additionalFields, "method")
- delete(additionalFields, "plugin")
- delete(additionalFields, "publication")
- delete(additionalFields, "queue_size")
- delete(additionalFields, "replication_slot")
+ return o.InitialWaitingSeconds
+}
- c.AdditionalProperties = additionalFields
+func (o *SourceAlloydbLogicalReplicationCDC) GetLsnCommitBehaviour() *SourceAlloydbLSNCommitBehaviour {
+ if o == nil {
+ return nil
+ }
+ return o.LsnCommitBehaviour
+}
- return nil
+func (o *SourceAlloydbLogicalReplicationCDC) GetMethod() SourceAlloydbSchemasMethod {
+ return SourceAlloydbSchemasMethodCdc
}
-func (c SourceAlloydbReplicationMethodLogicalReplicationCDC) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbReplicationMethodLogicalReplicationCDC(c))
- if err != nil {
- return nil, err
+func (o *SourceAlloydbLogicalReplicationCDC) GetPlugin() *SourceAlloydbPlugin {
+ if o == nil {
+ return nil
}
+ return o.Plugin
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbLogicalReplicationCDC) GetPublication() string {
+ if o == nil {
+ return ""
}
+ return o.Publication
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceAlloydbLogicalReplicationCDC) GetQueueSize() *int64 {
+ if o == nil {
+ return nil
}
+ return o.QueueSize
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbLogicalReplicationCDC) GetReplicationSlot() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.ReplicationSlot
}
-type SourceAlloydbReplicationMethodStandardXminMethod string
+type SourceAlloydbMethod string
const (
- SourceAlloydbReplicationMethodStandardXminMethodXmin SourceAlloydbReplicationMethodStandardXminMethod = "Xmin"
+ SourceAlloydbMethodXmin SourceAlloydbMethod = "Xmin"
)
-func (e SourceAlloydbReplicationMethodStandardXminMethod) ToPointer() *SourceAlloydbReplicationMethodStandardXminMethod {
+func (e SourceAlloydbMethod) ToPointer() *SourceAlloydbMethod {
return &e
}
-func (e *SourceAlloydbReplicationMethodStandardXminMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Xmin":
- *e = SourceAlloydbReplicationMethodStandardXminMethod(v)
+ *e = SourceAlloydbMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbReplicationMethodStandardXminMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbMethod: %v", v)
+ }
+}
+
+// SourceAlloydbStandardXmin - Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.
+type SourceAlloydbStandardXmin struct {
+ method SourceAlloydbMethod `const:"Xmin" json:"method"`
+}
+
+func (s SourceAlloydbStandardXmin) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbStandardXmin) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceAlloydbReplicationMethodStandardXmin - Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.
-type SourceAlloydbReplicationMethodStandardXmin struct {
- Method SourceAlloydbReplicationMethodStandardXminMethod `json:"method"`
+func (o *SourceAlloydbStandardXmin) GetMethod() SourceAlloydbMethod {
+ return SourceAlloydbMethodXmin
}
type SourceAlloydbReplicationMethodType string
const (
- SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodStandardXmin SourceAlloydbReplicationMethodType = "source-alloydb_Replication Method_Standard (Xmin)"
- SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodLogicalReplicationCDC SourceAlloydbReplicationMethodType = "source-alloydb_Replication Method_Logical Replication (CDC)"
- SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodStandard SourceAlloydbReplicationMethodType = "source-alloydb_Replication Method_Standard"
+ SourceAlloydbReplicationMethodTypeSourceAlloydbStandardXmin SourceAlloydbReplicationMethodType = "source-alloydb_Standard (Xmin)"
+ SourceAlloydbReplicationMethodTypeSourceAlloydbLogicalReplicationCDC SourceAlloydbReplicationMethodType = "source-alloydb_Logical Replication (CDC)"
+ SourceAlloydbReplicationMethodTypeSourceAlloydbStandard SourceAlloydbReplicationMethodType = "source-alloydb_Standard"
)
type SourceAlloydbReplicationMethod struct {
- SourceAlloydbReplicationMethodStandardXmin *SourceAlloydbReplicationMethodStandardXmin
- SourceAlloydbReplicationMethodLogicalReplicationCDC *SourceAlloydbReplicationMethodLogicalReplicationCDC
- SourceAlloydbReplicationMethodStandard *SourceAlloydbReplicationMethodStandard
+ SourceAlloydbStandardXmin *SourceAlloydbStandardXmin
+ SourceAlloydbLogicalReplicationCDC *SourceAlloydbLogicalReplicationCDC
+ SourceAlloydbStandard *SourceAlloydbStandard
Type SourceAlloydbReplicationMethodType
}
-func CreateSourceAlloydbReplicationMethodSourceAlloydbReplicationMethodStandardXmin(sourceAlloydbReplicationMethodStandardXmin SourceAlloydbReplicationMethodStandardXmin) SourceAlloydbReplicationMethod {
- typ := SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodStandardXmin
+func CreateSourceAlloydbReplicationMethodSourceAlloydbStandardXmin(sourceAlloydbStandardXmin SourceAlloydbStandardXmin) SourceAlloydbReplicationMethod {
+ typ := SourceAlloydbReplicationMethodTypeSourceAlloydbStandardXmin
return SourceAlloydbReplicationMethod{
- SourceAlloydbReplicationMethodStandardXmin: &sourceAlloydbReplicationMethodStandardXmin,
- Type: typ,
+ SourceAlloydbStandardXmin: &sourceAlloydbStandardXmin,
+ Type: typ,
}
}
-func CreateSourceAlloydbReplicationMethodSourceAlloydbReplicationMethodLogicalReplicationCDC(sourceAlloydbReplicationMethodLogicalReplicationCDC SourceAlloydbReplicationMethodLogicalReplicationCDC) SourceAlloydbReplicationMethod {
- typ := SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodLogicalReplicationCDC
+func CreateSourceAlloydbReplicationMethodSourceAlloydbLogicalReplicationCDC(sourceAlloydbLogicalReplicationCDC SourceAlloydbLogicalReplicationCDC) SourceAlloydbReplicationMethod {
+ typ := SourceAlloydbReplicationMethodTypeSourceAlloydbLogicalReplicationCDC
return SourceAlloydbReplicationMethod{
- SourceAlloydbReplicationMethodLogicalReplicationCDC: &sourceAlloydbReplicationMethodLogicalReplicationCDC,
- Type: typ,
+ SourceAlloydbLogicalReplicationCDC: &sourceAlloydbLogicalReplicationCDC,
+ Type: typ,
}
}
-func CreateSourceAlloydbReplicationMethodSourceAlloydbReplicationMethodStandard(sourceAlloydbReplicationMethodStandard SourceAlloydbReplicationMethodStandard) SourceAlloydbReplicationMethod {
- typ := SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodStandard
+func CreateSourceAlloydbReplicationMethodSourceAlloydbStandard(sourceAlloydbStandard SourceAlloydbStandard) SourceAlloydbReplicationMethod {
+ typ := SourceAlloydbReplicationMethodTypeSourceAlloydbStandard
return SourceAlloydbReplicationMethod{
- SourceAlloydbReplicationMethodStandard: &sourceAlloydbReplicationMethodStandard,
- Type: typ,
+ SourceAlloydbStandard: &sourceAlloydbStandard,
+ Type: typ,
}
}
func (u *SourceAlloydbReplicationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAlloydbReplicationMethodStandardXmin := new(SourceAlloydbReplicationMethodStandardXmin)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbReplicationMethodStandardXmin); err == nil {
- u.SourceAlloydbReplicationMethodStandardXmin = sourceAlloydbReplicationMethodStandardXmin
- u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodStandardXmin
+
+ sourceAlloydbStandardXmin := new(SourceAlloydbStandardXmin)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbStandardXmin, "", true, true); err == nil {
+ u.SourceAlloydbStandardXmin = sourceAlloydbStandardXmin
+ u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbStandardXmin
return nil
}
- sourceAlloydbReplicationMethodStandard := new(SourceAlloydbReplicationMethodStandard)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbReplicationMethodStandard); err == nil {
- u.SourceAlloydbReplicationMethodStandard = sourceAlloydbReplicationMethodStandard
- u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodStandard
+ sourceAlloydbStandard := new(SourceAlloydbStandard)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbStandard, "", true, true); err == nil {
+ u.SourceAlloydbStandard = sourceAlloydbStandard
+ u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbStandard
return nil
}
- sourceAlloydbReplicationMethodLogicalReplicationCDC := new(SourceAlloydbReplicationMethodLogicalReplicationCDC)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbReplicationMethodLogicalReplicationCDC); err == nil {
- u.SourceAlloydbReplicationMethodLogicalReplicationCDC = sourceAlloydbReplicationMethodLogicalReplicationCDC
- u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodLogicalReplicationCDC
+ sourceAlloydbLogicalReplicationCDC := new(SourceAlloydbLogicalReplicationCDC)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbLogicalReplicationCDC, "", true, true); err == nil {
+ u.SourceAlloydbLogicalReplicationCDC = sourceAlloydbLogicalReplicationCDC
+ u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbLogicalReplicationCDC
return nil
}
@@ -290,71 +326,72 @@ func (u *SourceAlloydbReplicationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceAlloydbReplicationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceAlloydbReplicationMethodStandardXmin != nil {
- return json.Marshal(u.SourceAlloydbReplicationMethodStandardXmin)
+ if u.SourceAlloydbStandardXmin != nil {
+ return utils.MarshalJSON(u.SourceAlloydbStandardXmin, "", true)
}
- if u.SourceAlloydbReplicationMethodStandard != nil {
- return json.Marshal(u.SourceAlloydbReplicationMethodStandard)
+ if u.SourceAlloydbLogicalReplicationCDC != nil {
+ return utils.MarshalJSON(u.SourceAlloydbLogicalReplicationCDC, "", true)
}
- if u.SourceAlloydbReplicationMethodLogicalReplicationCDC != nil {
- return json.Marshal(u.SourceAlloydbReplicationMethodLogicalReplicationCDC)
+ if u.SourceAlloydbStandard != nil {
+ return utils.MarshalJSON(u.SourceAlloydbStandard, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceAlloydbAlloydb string
+type Alloydb string
const (
- SourceAlloydbAlloydbAlloydb SourceAlloydbAlloydb = "alloydb"
+ AlloydbAlloydb Alloydb = "alloydb"
)
-func (e SourceAlloydbAlloydb) ToPointer() *SourceAlloydbAlloydb {
+func (e Alloydb) ToPointer() *Alloydb {
return &e
}
-func (e *SourceAlloydbAlloydb) UnmarshalJSON(data []byte) error {
+func (e *Alloydb) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "alloydb":
- *e = SourceAlloydbAlloydb(v)
+ *e = Alloydb(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbAlloydb: %v", v)
+ return fmt.Errorf("invalid value for Alloydb: %v", v)
}
}
-type SourceAlloydbSSLModesVerifyFullMode string
+type SourceAlloydbSchemasSSLModeSSLModes6Mode string
const (
- SourceAlloydbSSLModesVerifyFullModeVerifyFull SourceAlloydbSSLModesVerifyFullMode = "verify-full"
+ SourceAlloydbSchemasSSLModeSSLModes6ModeVerifyFull SourceAlloydbSchemasSSLModeSSLModes6Mode = "verify-full"
)
-func (e SourceAlloydbSSLModesVerifyFullMode) ToPointer() *SourceAlloydbSSLModesVerifyFullMode {
+func (e SourceAlloydbSchemasSSLModeSSLModes6Mode) ToPointer() *SourceAlloydbSchemasSSLModeSSLModes6Mode {
return &e
}
-func (e *SourceAlloydbSSLModesVerifyFullMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbSchemasSSLModeSSLModes6Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-full":
- *e = SourceAlloydbSSLModesVerifyFullMode(v)
+ *e = SourceAlloydbSchemasSSLModeSSLModes6Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbSSLModesVerifyFullMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbSchemasSSLModeSSLModes6Mode: %v", v)
}
}
-// SourceAlloydbSSLModesVerifyFull - This is the most secure mode. Always require encryption and verifies the identity of the source database server.
-type SourceAlloydbSSLModesVerifyFull struct {
+// SourceAlloydbVerifyFull - This is the most secure mode. Always require encryption and verifies the identity of the source database server.
+type SourceAlloydbVerifyFull struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -362,86 +399,87 @@ type SourceAlloydbSSLModesVerifyFull struct {
// Client key
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourceAlloydbSSLModesVerifyFullMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourceAlloydbSchemasSSLModeSSLModes6Mode `const:"verify-full" json:"mode"`
}
-type _SourceAlloydbSSLModesVerifyFull SourceAlloydbSSLModesVerifyFull
-func (c *SourceAlloydbSSLModesVerifyFull) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbSSLModesVerifyFull{}
+func (s SourceAlloydbVerifyFull) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourceAlloydbVerifyFull) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourceAlloydbSSLModesVerifyFull(data)
-
- additionalFields := make(map[string]interface{})
+ return nil
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourceAlloydbVerifyFull) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "ca_certificate")
- delete(additionalFields, "client_certificate")
- delete(additionalFields, "client_key")
- delete(additionalFields, "client_key_password")
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
- return nil
+ return o.AdditionalProperties
}
-func (c SourceAlloydbSSLModesVerifyFull) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbSSLModesVerifyFull(c))
- if err != nil {
- return nil, err
+func (o *SourceAlloydbVerifyFull) GetCaCertificate() string {
+ if o == nil {
+ return ""
}
+ return o.CaCertificate
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbVerifyFull) GetClientCertificate() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientCertificate
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceAlloydbVerifyFull) GetClientKey() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKey
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbVerifyFull) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKeyPassword
+}
- return json.Marshal(out)
+func (o *SourceAlloydbVerifyFull) GetMode() SourceAlloydbSchemasSSLModeSSLModes6Mode {
+ return SourceAlloydbSchemasSSLModeSSLModes6ModeVerifyFull
}
-type SourceAlloydbSSLModesVerifyCaMode string
+type SourceAlloydbSchemasSSLModeSSLModes5Mode string
const (
- SourceAlloydbSSLModesVerifyCaModeVerifyCa SourceAlloydbSSLModesVerifyCaMode = "verify-ca"
+ SourceAlloydbSchemasSSLModeSSLModes5ModeVerifyCa SourceAlloydbSchemasSSLModeSSLModes5Mode = "verify-ca"
)
-func (e SourceAlloydbSSLModesVerifyCaMode) ToPointer() *SourceAlloydbSSLModesVerifyCaMode {
+func (e SourceAlloydbSchemasSSLModeSSLModes5Mode) ToPointer() *SourceAlloydbSchemasSSLModeSSLModes5Mode {
return &e
}
-func (e *SourceAlloydbSSLModesVerifyCaMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbSchemasSSLModeSSLModes5Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-ca":
- *e = SourceAlloydbSSLModesVerifyCaMode(v)
+ *e = SourceAlloydbSchemasSSLModeSSLModes5Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbSSLModesVerifyCaMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbSchemasSSLModeSSLModes5Mode: %v", v)
}
}
-// SourceAlloydbSSLModesVerifyCa - Always require encryption and verifies that the source database server has a valid SSL certificate.
-type SourceAlloydbSSLModesVerifyCa struct {
+// SourceAlloydbVerifyCa - Always require encryption and verifies that the source database server has a valid SSL certificate.
+type SourceAlloydbVerifyCa struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -449,490 +487,385 @@ type SourceAlloydbSSLModesVerifyCa struct {
// Client key
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourceAlloydbSSLModesVerifyCaMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourceAlloydbSchemasSSLModeSSLModes5Mode `const:"verify-ca" json:"mode"`
}
-type _SourceAlloydbSSLModesVerifyCa SourceAlloydbSSLModesVerifyCa
-func (c *SourceAlloydbSSLModesVerifyCa) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbSSLModesVerifyCa{}
+func (s SourceAlloydbVerifyCa) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourceAlloydbVerifyCa) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourceAlloydbSSLModesVerifyCa(data)
-
- additionalFields := make(map[string]interface{})
+ return nil
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourceAlloydbVerifyCa) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "ca_certificate")
- delete(additionalFields, "client_certificate")
- delete(additionalFields, "client_key")
- delete(additionalFields, "client_key_password")
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
- return nil
+ return o.AdditionalProperties
}
-func (c SourceAlloydbSSLModesVerifyCa) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbSSLModesVerifyCa(c))
- if err != nil {
- return nil, err
+func (o *SourceAlloydbVerifyCa) GetCaCertificate() string {
+ if o == nil {
+ return ""
}
+ return o.CaCertificate
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbVerifyCa) GetClientCertificate() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientCertificate
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceAlloydbVerifyCa) GetClientKey() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKey
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbVerifyCa) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKeyPassword
+}
- return json.Marshal(out)
+func (o *SourceAlloydbVerifyCa) GetMode() SourceAlloydbSchemasSSLModeSSLModes5Mode {
+ return SourceAlloydbSchemasSSLModeSSLModes5ModeVerifyCa
}
-type SourceAlloydbSSLModesRequireMode string
+type SourceAlloydbSchemasSSLModeSSLModesMode string
const (
- SourceAlloydbSSLModesRequireModeRequire SourceAlloydbSSLModesRequireMode = "require"
+ SourceAlloydbSchemasSSLModeSSLModesModeRequire SourceAlloydbSchemasSSLModeSSLModesMode = "require"
)
-func (e SourceAlloydbSSLModesRequireMode) ToPointer() *SourceAlloydbSSLModesRequireMode {
+func (e SourceAlloydbSchemasSSLModeSSLModesMode) ToPointer() *SourceAlloydbSchemasSSLModeSSLModesMode {
return &e
}
-func (e *SourceAlloydbSSLModesRequireMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbSchemasSSLModeSSLModesMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "require":
- *e = SourceAlloydbSSLModesRequireMode(v)
+ *e = SourceAlloydbSchemasSSLModeSSLModesMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbSSLModesRequireMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbSchemasSSLModeSSLModesMode: %v", v)
}
}
-// SourceAlloydbSSLModesRequire - Always require encryption. If the source database server does not support encryption, connection will fail.
-type SourceAlloydbSSLModesRequire struct {
- Mode SourceAlloydbSSLModesRequireMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourceAlloydbRequire - Always require encryption. If the source database server does not support encryption, connection will fail.
+type SourceAlloydbRequire struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourceAlloydbSchemasSSLModeSSLModesMode `const:"require" json:"mode"`
}
-type _SourceAlloydbSSLModesRequire SourceAlloydbSSLModesRequire
-
-func (c *SourceAlloydbSSLModesRequire) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbSSLModesRequire{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceAlloydbSSLModesRequire(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceAlloydbRequire) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceAlloydbRequire) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceAlloydbSSLModesRequire) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbSSLModesRequire(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbRequire) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourceAlloydbRequire) GetMode() SourceAlloydbSchemasSSLModeSSLModesMode {
+ return SourceAlloydbSchemasSSLModeSSLModesModeRequire
}
-type SourceAlloydbSSLModesPreferMode string
+type SourceAlloydbSchemasSslModeMode string
const (
- SourceAlloydbSSLModesPreferModePrefer SourceAlloydbSSLModesPreferMode = "prefer"
+ SourceAlloydbSchemasSslModeModePrefer SourceAlloydbSchemasSslModeMode = "prefer"
)
-func (e SourceAlloydbSSLModesPreferMode) ToPointer() *SourceAlloydbSSLModesPreferMode {
+func (e SourceAlloydbSchemasSslModeMode) ToPointer() *SourceAlloydbSchemasSslModeMode {
return &e
}
-func (e *SourceAlloydbSSLModesPreferMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbSchemasSslModeMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "prefer":
- *e = SourceAlloydbSSLModesPreferMode(v)
+ *e = SourceAlloydbSchemasSslModeMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbSSLModesPreferMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbSchemasSslModeMode: %v", v)
}
}
-// SourceAlloydbSSLModesPrefer - Allows unencrypted connection only if the source database does not support encryption.
-type SourceAlloydbSSLModesPrefer struct {
- Mode SourceAlloydbSSLModesPreferMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourceAlloydbPrefer - Allows unencrypted connection only if the source database does not support encryption.
+type SourceAlloydbPrefer struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourceAlloydbSchemasSslModeMode `const:"prefer" json:"mode"`
}
-type _SourceAlloydbSSLModesPrefer SourceAlloydbSSLModesPrefer
-func (c *SourceAlloydbSSLModesPrefer) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbSSLModesPrefer{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceAlloydbSSLModesPrefer(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceAlloydbPrefer) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceAlloydbPrefer) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceAlloydbSSLModesPrefer) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbSSLModesPrefer(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbPrefer) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourceAlloydbPrefer) GetMode() SourceAlloydbSchemasSslModeMode {
+ return SourceAlloydbSchemasSslModeModePrefer
}
-type SourceAlloydbSSLModesAllowMode string
+type SourceAlloydbSchemasMode string
const (
- SourceAlloydbSSLModesAllowModeAllow SourceAlloydbSSLModesAllowMode = "allow"
+ SourceAlloydbSchemasModeAllow SourceAlloydbSchemasMode = "allow"
)
-func (e SourceAlloydbSSLModesAllowMode) ToPointer() *SourceAlloydbSSLModesAllowMode {
+func (e SourceAlloydbSchemasMode) ToPointer() *SourceAlloydbSchemasMode {
return &e
}
-func (e *SourceAlloydbSSLModesAllowMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "allow":
- *e = SourceAlloydbSSLModesAllowMode(v)
+ *e = SourceAlloydbSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbSSLModesAllowMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbSchemasMode: %v", v)
}
}
-// SourceAlloydbSSLModesAllow - Enables encryption only when required by the source database.
-type SourceAlloydbSSLModesAllow struct {
- Mode SourceAlloydbSSLModesAllowMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourceAlloydbAllow - Enables encryption only when required by the source database.
+type SourceAlloydbAllow struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourceAlloydbSchemasMode `const:"allow" json:"mode"`
}
-type _SourceAlloydbSSLModesAllow SourceAlloydbSSLModesAllow
-
-func (c *SourceAlloydbSSLModesAllow) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbSSLModesAllow{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceAlloydbSSLModesAllow(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceAlloydbAllow) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceAlloydbAllow) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceAlloydbSSLModesAllow) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbSSLModesAllow(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbAllow) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourceAlloydbAllow) GetMode() SourceAlloydbSchemasMode {
+ return SourceAlloydbSchemasModeAllow
}
-type SourceAlloydbSSLModesDisableMode string
+type SourceAlloydbMode string
const (
- SourceAlloydbSSLModesDisableModeDisable SourceAlloydbSSLModesDisableMode = "disable"
+ SourceAlloydbModeDisable SourceAlloydbMode = "disable"
)
-func (e SourceAlloydbSSLModesDisableMode) ToPointer() *SourceAlloydbSSLModesDisableMode {
+func (e SourceAlloydbMode) ToPointer() *SourceAlloydbMode {
return &e
}
-func (e *SourceAlloydbSSLModesDisableMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "disable":
- *e = SourceAlloydbSSLModesDisableMode(v)
+ *e = SourceAlloydbMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbSSLModesDisableMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbMode: %v", v)
}
}
-// SourceAlloydbSSLModesDisable - Disables encryption of communication between Airbyte and source database.
-type SourceAlloydbSSLModesDisable struct {
- Mode SourceAlloydbSSLModesDisableMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourceAlloydbDisable - Disables encryption of communication between Airbyte and source database.
+type SourceAlloydbDisable struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourceAlloydbMode `const:"disable" json:"mode"`
}
-type _SourceAlloydbSSLModesDisable SourceAlloydbSSLModesDisable
-
-func (c *SourceAlloydbSSLModesDisable) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbSSLModesDisable{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceAlloydbSSLModesDisable(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceAlloydbDisable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceAlloydbDisable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceAlloydbSSLModesDisable) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbSSLModesDisable(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbDisable) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourceAlloydbDisable) GetMode() SourceAlloydbMode {
+ return SourceAlloydbModeDisable
}
type SourceAlloydbSSLModesType string
const (
- SourceAlloydbSSLModesTypeSourceAlloydbSSLModesDisable SourceAlloydbSSLModesType = "source-alloydb_SSL Modes_disable"
- SourceAlloydbSSLModesTypeSourceAlloydbSSLModesAllow SourceAlloydbSSLModesType = "source-alloydb_SSL Modes_allow"
- SourceAlloydbSSLModesTypeSourceAlloydbSSLModesPrefer SourceAlloydbSSLModesType = "source-alloydb_SSL Modes_prefer"
- SourceAlloydbSSLModesTypeSourceAlloydbSSLModesRequire SourceAlloydbSSLModesType = "source-alloydb_SSL Modes_require"
- SourceAlloydbSSLModesTypeSourceAlloydbSSLModesVerifyCa SourceAlloydbSSLModesType = "source-alloydb_SSL Modes_verify-ca"
- SourceAlloydbSSLModesTypeSourceAlloydbSSLModesVerifyFull SourceAlloydbSSLModesType = "source-alloydb_SSL Modes_verify-full"
+ SourceAlloydbSSLModesTypeSourceAlloydbDisable SourceAlloydbSSLModesType = "source-alloydb_disable"
+ SourceAlloydbSSLModesTypeSourceAlloydbAllow SourceAlloydbSSLModesType = "source-alloydb_allow"
+ SourceAlloydbSSLModesTypeSourceAlloydbPrefer SourceAlloydbSSLModesType = "source-alloydb_prefer"
+ SourceAlloydbSSLModesTypeSourceAlloydbRequire SourceAlloydbSSLModesType = "source-alloydb_require"
+ SourceAlloydbSSLModesTypeSourceAlloydbVerifyCa SourceAlloydbSSLModesType = "source-alloydb_verify-ca"
+ SourceAlloydbSSLModesTypeSourceAlloydbVerifyFull SourceAlloydbSSLModesType = "source-alloydb_verify-full"
)
type SourceAlloydbSSLModes struct {
- SourceAlloydbSSLModesDisable *SourceAlloydbSSLModesDisable
- SourceAlloydbSSLModesAllow *SourceAlloydbSSLModesAllow
- SourceAlloydbSSLModesPrefer *SourceAlloydbSSLModesPrefer
- SourceAlloydbSSLModesRequire *SourceAlloydbSSLModesRequire
- SourceAlloydbSSLModesVerifyCa *SourceAlloydbSSLModesVerifyCa
- SourceAlloydbSSLModesVerifyFull *SourceAlloydbSSLModesVerifyFull
+ SourceAlloydbDisable *SourceAlloydbDisable
+ SourceAlloydbAllow *SourceAlloydbAllow
+ SourceAlloydbPrefer *SourceAlloydbPrefer
+ SourceAlloydbRequire *SourceAlloydbRequire
+ SourceAlloydbVerifyCa *SourceAlloydbVerifyCa
+ SourceAlloydbVerifyFull *SourceAlloydbVerifyFull
Type SourceAlloydbSSLModesType
}
-func CreateSourceAlloydbSSLModesSourceAlloydbSSLModesDisable(sourceAlloydbSSLModesDisable SourceAlloydbSSLModesDisable) SourceAlloydbSSLModes {
- typ := SourceAlloydbSSLModesTypeSourceAlloydbSSLModesDisable
+func CreateSourceAlloydbSSLModesSourceAlloydbDisable(sourceAlloydbDisable SourceAlloydbDisable) SourceAlloydbSSLModes {
+ typ := SourceAlloydbSSLModesTypeSourceAlloydbDisable
return SourceAlloydbSSLModes{
- SourceAlloydbSSLModesDisable: &sourceAlloydbSSLModesDisable,
- Type: typ,
+ SourceAlloydbDisable: &sourceAlloydbDisable,
+ Type: typ,
}
}
-func CreateSourceAlloydbSSLModesSourceAlloydbSSLModesAllow(sourceAlloydbSSLModesAllow SourceAlloydbSSLModesAllow) SourceAlloydbSSLModes {
- typ := SourceAlloydbSSLModesTypeSourceAlloydbSSLModesAllow
+func CreateSourceAlloydbSSLModesSourceAlloydbAllow(sourceAlloydbAllow SourceAlloydbAllow) SourceAlloydbSSLModes {
+ typ := SourceAlloydbSSLModesTypeSourceAlloydbAllow
return SourceAlloydbSSLModes{
- SourceAlloydbSSLModesAllow: &sourceAlloydbSSLModesAllow,
- Type: typ,
+ SourceAlloydbAllow: &sourceAlloydbAllow,
+ Type: typ,
}
}
-func CreateSourceAlloydbSSLModesSourceAlloydbSSLModesPrefer(sourceAlloydbSSLModesPrefer SourceAlloydbSSLModesPrefer) SourceAlloydbSSLModes {
- typ := SourceAlloydbSSLModesTypeSourceAlloydbSSLModesPrefer
+func CreateSourceAlloydbSSLModesSourceAlloydbPrefer(sourceAlloydbPrefer SourceAlloydbPrefer) SourceAlloydbSSLModes {
+ typ := SourceAlloydbSSLModesTypeSourceAlloydbPrefer
return SourceAlloydbSSLModes{
- SourceAlloydbSSLModesPrefer: &sourceAlloydbSSLModesPrefer,
- Type: typ,
+ SourceAlloydbPrefer: &sourceAlloydbPrefer,
+ Type: typ,
}
}
-func CreateSourceAlloydbSSLModesSourceAlloydbSSLModesRequire(sourceAlloydbSSLModesRequire SourceAlloydbSSLModesRequire) SourceAlloydbSSLModes {
- typ := SourceAlloydbSSLModesTypeSourceAlloydbSSLModesRequire
+func CreateSourceAlloydbSSLModesSourceAlloydbRequire(sourceAlloydbRequire SourceAlloydbRequire) SourceAlloydbSSLModes {
+ typ := SourceAlloydbSSLModesTypeSourceAlloydbRequire
return SourceAlloydbSSLModes{
- SourceAlloydbSSLModesRequire: &sourceAlloydbSSLModesRequire,
- Type: typ,
+ SourceAlloydbRequire: &sourceAlloydbRequire,
+ Type: typ,
}
}
-func CreateSourceAlloydbSSLModesSourceAlloydbSSLModesVerifyCa(sourceAlloydbSSLModesVerifyCa SourceAlloydbSSLModesVerifyCa) SourceAlloydbSSLModes {
- typ := SourceAlloydbSSLModesTypeSourceAlloydbSSLModesVerifyCa
+func CreateSourceAlloydbSSLModesSourceAlloydbVerifyCa(sourceAlloydbVerifyCa SourceAlloydbVerifyCa) SourceAlloydbSSLModes {
+ typ := SourceAlloydbSSLModesTypeSourceAlloydbVerifyCa
return SourceAlloydbSSLModes{
- SourceAlloydbSSLModesVerifyCa: &sourceAlloydbSSLModesVerifyCa,
- Type: typ,
+ SourceAlloydbVerifyCa: &sourceAlloydbVerifyCa,
+ Type: typ,
}
}
-func CreateSourceAlloydbSSLModesSourceAlloydbSSLModesVerifyFull(sourceAlloydbSSLModesVerifyFull SourceAlloydbSSLModesVerifyFull) SourceAlloydbSSLModes {
- typ := SourceAlloydbSSLModesTypeSourceAlloydbSSLModesVerifyFull
+func CreateSourceAlloydbSSLModesSourceAlloydbVerifyFull(sourceAlloydbVerifyFull SourceAlloydbVerifyFull) SourceAlloydbSSLModes {
+ typ := SourceAlloydbSSLModesTypeSourceAlloydbVerifyFull
return SourceAlloydbSSLModes{
- SourceAlloydbSSLModesVerifyFull: &sourceAlloydbSSLModesVerifyFull,
- Type: typ,
+ SourceAlloydbVerifyFull: &sourceAlloydbVerifyFull,
+ Type: typ,
}
}
func (u *SourceAlloydbSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAlloydbSSLModesDisable := new(SourceAlloydbSSLModesDisable)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbSSLModesDisable); err == nil {
- u.SourceAlloydbSSLModesDisable = sourceAlloydbSSLModesDisable
- u.Type = SourceAlloydbSSLModesTypeSourceAlloydbSSLModesDisable
+
+ sourceAlloydbDisable := new(SourceAlloydbDisable)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbDisable, "", true, true); err == nil {
+ u.SourceAlloydbDisable = sourceAlloydbDisable
+ u.Type = SourceAlloydbSSLModesTypeSourceAlloydbDisable
return nil
}
- sourceAlloydbSSLModesAllow := new(SourceAlloydbSSLModesAllow)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbSSLModesAllow); err == nil {
- u.SourceAlloydbSSLModesAllow = sourceAlloydbSSLModesAllow
- u.Type = SourceAlloydbSSLModesTypeSourceAlloydbSSLModesAllow
+ sourceAlloydbAllow := new(SourceAlloydbAllow)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbAllow, "", true, true); err == nil {
+ u.SourceAlloydbAllow = sourceAlloydbAllow
+ u.Type = SourceAlloydbSSLModesTypeSourceAlloydbAllow
return nil
}
- sourceAlloydbSSLModesPrefer := new(SourceAlloydbSSLModesPrefer)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbSSLModesPrefer); err == nil {
- u.SourceAlloydbSSLModesPrefer = sourceAlloydbSSLModesPrefer
- u.Type = SourceAlloydbSSLModesTypeSourceAlloydbSSLModesPrefer
+ sourceAlloydbPrefer := new(SourceAlloydbPrefer)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbPrefer, "", true, true); err == nil {
+ u.SourceAlloydbPrefer = sourceAlloydbPrefer
+ u.Type = SourceAlloydbSSLModesTypeSourceAlloydbPrefer
return nil
}
- sourceAlloydbSSLModesRequire := new(SourceAlloydbSSLModesRequire)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbSSLModesRequire); err == nil {
- u.SourceAlloydbSSLModesRequire = sourceAlloydbSSLModesRequire
- u.Type = SourceAlloydbSSLModesTypeSourceAlloydbSSLModesRequire
+ sourceAlloydbRequire := new(SourceAlloydbRequire)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbRequire, "", true, true); err == nil {
+ u.SourceAlloydbRequire = sourceAlloydbRequire
+ u.Type = SourceAlloydbSSLModesTypeSourceAlloydbRequire
return nil
}
- sourceAlloydbSSLModesVerifyCa := new(SourceAlloydbSSLModesVerifyCa)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbSSLModesVerifyCa); err == nil {
- u.SourceAlloydbSSLModesVerifyCa = sourceAlloydbSSLModesVerifyCa
- u.Type = SourceAlloydbSSLModesTypeSourceAlloydbSSLModesVerifyCa
+ sourceAlloydbVerifyCa := new(SourceAlloydbVerifyCa)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbVerifyCa, "", true, true); err == nil {
+ u.SourceAlloydbVerifyCa = sourceAlloydbVerifyCa
+ u.Type = SourceAlloydbSSLModesTypeSourceAlloydbVerifyCa
return nil
}
- sourceAlloydbSSLModesVerifyFull := new(SourceAlloydbSSLModesVerifyFull)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbSSLModesVerifyFull); err == nil {
- u.SourceAlloydbSSLModesVerifyFull = sourceAlloydbSSLModesVerifyFull
- u.Type = SourceAlloydbSSLModesTypeSourceAlloydbSSLModesVerifyFull
+ sourceAlloydbVerifyFull := new(SourceAlloydbVerifyFull)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbVerifyFull, "", true, true); err == nil {
+ u.SourceAlloydbVerifyFull = sourceAlloydbVerifyFull
+ u.Type = SourceAlloydbSSLModesTypeSourceAlloydbVerifyFull
return nil
}
@@ -940,212 +873,306 @@ func (u *SourceAlloydbSSLModes) UnmarshalJSON(data []byte) error {
}
func (u SourceAlloydbSSLModes) MarshalJSON() ([]byte, error) {
- if u.SourceAlloydbSSLModesDisable != nil {
- return json.Marshal(u.SourceAlloydbSSLModesDisable)
+ if u.SourceAlloydbDisable != nil {
+ return utils.MarshalJSON(u.SourceAlloydbDisable, "", true)
}
- if u.SourceAlloydbSSLModesAllow != nil {
- return json.Marshal(u.SourceAlloydbSSLModesAllow)
+ if u.SourceAlloydbAllow != nil {
+ return utils.MarshalJSON(u.SourceAlloydbAllow, "", true)
}
- if u.SourceAlloydbSSLModesPrefer != nil {
- return json.Marshal(u.SourceAlloydbSSLModesPrefer)
+ if u.SourceAlloydbPrefer != nil {
+ return utils.MarshalJSON(u.SourceAlloydbPrefer, "", true)
}
- if u.SourceAlloydbSSLModesRequire != nil {
- return json.Marshal(u.SourceAlloydbSSLModesRequire)
+ if u.SourceAlloydbRequire != nil {
+ return utils.MarshalJSON(u.SourceAlloydbRequire, "", true)
}
- if u.SourceAlloydbSSLModesVerifyCa != nil {
- return json.Marshal(u.SourceAlloydbSSLModesVerifyCa)
+ if u.SourceAlloydbVerifyCa != nil {
+ return utils.MarshalJSON(u.SourceAlloydbVerifyCa, "", true)
}
- if u.SourceAlloydbSSLModesVerifyFull != nil {
- return json.Marshal(u.SourceAlloydbSSLModesVerifyFull)
+ if u.SourceAlloydbVerifyFull != nil {
+ return utils.MarshalJSON(u.SourceAlloydbVerifyFull, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceAlloydbSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceAlloydbSchemasTunnelMethodTunnelMethod string
const (
- SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceAlloydbSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceAlloydbSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceAlloydbSchemasTunnelMethodTunnelMethod) ToPointer() *SourceAlloydbSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceAlloydbSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceAlloydbSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceAlloydbSSHTunnelMethodPasswordAuthentication struct {
+// SourceAlloydbPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceAlloydbPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceAlloydbSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceAlloydbSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceAlloydbPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydbPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceAlloydbPasswordAuthentication) GetTunnelMethod() SourceAlloydbSchemasTunnelMethodTunnelMethod {
+ return SourceAlloydbSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceAlloydbPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceAlloydbPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceAlloydbPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceAlloydbSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceAlloydbSchemasTunnelMethod string
const (
- SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceAlloydbSchemasTunnelMethodSSHKeyAuth SourceAlloydbSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceAlloydbSchemasTunnelMethod) ToPointer() *SourceAlloydbSchemasTunnelMethod {
return &e
}
-func (e *SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceAlloydbSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbSchemasTunnelMethod: %v", v)
}
}
-// SourceAlloydbSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceAlloydbSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceAlloydbSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceAlloydbSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceAlloydbSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceAlloydbSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceAlloydbSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydbSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceAlloydbSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceAlloydbSSHKeyAuthentication) GetTunnelMethod() SourceAlloydbSchemasTunnelMethod {
+ return SourceAlloydbSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceAlloydbSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceAlloydbSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceAlloydbTunnelMethod - No ssh tunnel needed to connect to database
+type SourceAlloydbTunnelMethod string
const (
- SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceAlloydbTunnelMethodNoTunnel SourceAlloydbTunnelMethod = "NO_TUNNEL"
)
-func (e SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceAlloydbTunnelMethod) ToPointer() *SourceAlloydbTunnelMethod {
return &e
}
-func (e *SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceAlloydbTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbTunnelMethod: %v", v)
}
}
-// SourceAlloydbSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceAlloydbSSHTunnelMethodNoTunnel struct {
+// SourceAlloydbNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceAlloydbNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceAlloydbSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceAlloydbTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceAlloydbNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydbNoTunnel) GetTunnelMethod() SourceAlloydbTunnelMethod {
+ return SourceAlloydbTunnelMethodNoTunnel
}
type SourceAlloydbSSHTunnelMethodType string
const (
- SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHTunnelMethodNoTunnel SourceAlloydbSSHTunnelMethodType = "source-alloydb_SSH Tunnel Method_No Tunnel"
- SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHTunnelMethodSSHKeyAuthentication SourceAlloydbSSHTunnelMethodType = "source-alloydb_SSH Tunnel Method_SSH Key Authentication"
- SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHTunnelMethodPasswordAuthentication SourceAlloydbSSHTunnelMethodType = "source-alloydb_SSH Tunnel Method_Password Authentication"
+ SourceAlloydbSSHTunnelMethodTypeSourceAlloydbNoTunnel SourceAlloydbSSHTunnelMethodType = "source-alloydb_No Tunnel"
+ SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHKeyAuthentication SourceAlloydbSSHTunnelMethodType = "source-alloydb_SSH Key Authentication"
+ SourceAlloydbSSHTunnelMethodTypeSourceAlloydbPasswordAuthentication SourceAlloydbSSHTunnelMethodType = "source-alloydb_Password Authentication"
)
type SourceAlloydbSSHTunnelMethod struct {
- SourceAlloydbSSHTunnelMethodNoTunnel *SourceAlloydbSSHTunnelMethodNoTunnel
- SourceAlloydbSSHTunnelMethodSSHKeyAuthentication *SourceAlloydbSSHTunnelMethodSSHKeyAuthentication
- SourceAlloydbSSHTunnelMethodPasswordAuthentication *SourceAlloydbSSHTunnelMethodPasswordAuthentication
+ SourceAlloydbNoTunnel *SourceAlloydbNoTunnel
+ SourceAlloydbSSHKeyAuthentication *SourceAlloydbSSHKeyAuthentication
+ SourceAlloydbPasswordAuthentication *SourceAlloydbPasswordAuthentication
Type SourceAlloydbSSHTunnelMethodType
}
-func CreateSourceAlloydbSSHTunnelMethodSourceAlloydbSSHTunnelMethodNoTunnel(sourceAlloydbSSHTunnelMethodNoTunnel SourceAlloydbSSHTunnelMethodNoTunnel) SourceAlloydbSSHTunnelMethod {
- typ := SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHTunnelMethodNoTunnel
+func CreateSourceAlloydbSSHTunnelMethodSourceAlloydbNoTunnel(sourceAlloydbNoTunnel SourceAlloydbNoTunnel) SourceAlloydbSSHTunnelMethod {
+ typ := SourceAlloydbSSHTunnelMethodTypeSourceAlloydbNoTunnel
return SourceAlloydbSSHTunnelMethod{
- SourceAlloydbSSHTunnelMethodNoTunnel: &sourceAlloydbSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceAlloydbNoTunnel: &sourceAlloydbNoTunnel,
+ Type: typ,
}
}
-func CreateSourceAlloydbSSHTunnelMethodSourceAlloydbSSHTunnelMethodSSHKeyAuthentication(sourceAlloydbSSHTunnelMethodSSHKeyAuthentication SourceAlloydbSSHTunnelMethodSSHKeyAuthentication) SourceAlloydbSSHTunnelMethod {
- typ := SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceAlloydbSSHTunnelMethodSourceAlloydbSSHKeyAuthentication(sourceAlloydbSSHKeyAuthentication SourceAlloydbSSHKeyAuthentication) SourceAlloydbSSHTunnelMethod {
+ typ := SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHKeyAuthentication
return SourceAlloydbSSHTunnelMethod{
- SourceAlloydbSSHTunnelMethodSSHKeyAuthentication: &sourceAlloydbSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourceAlloydbSSHKeyAuthentication: &sourceAlloydbSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourceAlloydbSSHTunnelMethodSourceAlloydbSSHTunnelMethodPasswordAuthentication(sourceAlloydbSSHTunnelMethodPasswordAuthentication SourceAlloydbSSHTunnelMethodPasswordAuthentication) SourceAlloydbSSHTunnelMethod {
- typ := SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHTunnelMethodPasswordAuthentication
+func CreateSourceAlloydbSSHTunnelMethodSourceAlloydbPasswordAuthentication(sourceAlloydbPasswordAuthentication SourceAlloydbPasswordAuthentication) SourceAlloydbSSHTunnelMethod {
+ typ := SourceAlloydbSSHTunnelMethodTypeSourceAlloydbPasswordAuthentication
return SourceAlloydbSSHTunnelMethod{
- SourceAlloydbSSHTunnelMethodPasswordAuthentication: &sourceAlloydbSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ SourceAlloydbPasswordAuthentication: &sourceAlloydbPasswordAuthentication,
+ Type: typ,
}
}
func (u *SourceAlloydbSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAlloydbSSHTunnelMethodNoTunnel := new(SourceAlloydbSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbSSHTunnelMethodNoTunnel); err == nil {
- u.SourceAlloydbSSHTunnelMethodNoTunnel = sourceAlloydbSSHTunnelMethodNoTunnel
- u.Type = SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHTunnelMethodNoTunnel
+
+ sourceAlloydbNoTunnel := new(SourceAlloydbNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbNoTunnel, "", true, true); err == nil {
+ u.SourceAlloydbNoTunnel = sourceAlloydbNoTunnel
+ u.Type = SourceAlloydbSSHTunnelMethodTypeSourceAlloydbNoTunnel
return nil
}
- sourceAlloydbSSHTunnelMethodSSHKeyAuthentication := new(SourceAlloydbSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication = sourceAlloydbSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHTunnelMethodSSHKeyAuthentication
+ sourceAlloydbSSHKeyAuthentication := new(SourceAlloydbSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceAlloydbSSHKeyAuthentication = sourceAlloydbSSHKeyAuthentication
+ u.Type = SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHKeyAuthentication
return nil
}
- sourceAlloydbSSHTunnelMethodPasswordAuthentication := new(SourceAlloydbSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceAlloydbSSHTunnelMethodPasswordAuthentication = sourceAlloydbSSHTunnelMethodPasswordAuthentication
- u.Type = SourceAlloydbSSHTunnelMethodTypeSourceAlloydbSSHTunnelMethodPasswordAuthentication
+ sourceAlloydbPasswordAuthentication := new(SourceAlloydbPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbPasswordAuthentication, "", true, true); err == nil {
+ u.SourceAlloydbPasswordAuthentication = sourceAlloydbPasswordAuthentication
+ u.Type = SourceAlloydbSSHTunnelMethodTypeSourceAlloydbPasswordAuthentication
return nil
}
@@ -1153,19 +1180,19 @@ func (u *SourceAlloydbSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceAlloydbSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceAlloydbSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceAlloydbSSHTunnelMethodNoTunnel)
+ if u.SourceAlloydbNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceAlloydbNoTunnel, "", true)
}
- if u.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceAlloydbSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceAlloydbSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceAlloydbSSHKeyAuthentication, "", true)
}
- if u.SourceAlloydbSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceAlloydbSSHTunnelMethodPasswordAuthentication)
+ if u.SourceAlloydbPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceAlloydbPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceAlloydb struct {
@@ -1178,12 +1205,12 @@ type SourceAlloydb struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5432" json:"port"`
// Replication method for extracting data from the database.
ReplicationMethod *SourceAlloydbReplicationMethod `json:"replication_method,omitempty"`
// The list of schemas (case sensitive) to sync from. Defaults to public.
- Schemas []string `json:"schemas,omitempty"`
- SourceType SourceAlloydbAlloydb `json:"sourceType"`
+ Schemas []string `json:"schemas,omitempty"`
+ sourceType Alloydb `const:"alloydb" json:"sourceType"`
// SSL connection modes.
// Read more in the docs.
SslMode *SourceAlloydbSSLModes `json:"ssl_mode,omitempty"`
@@ -1192,3 +1219,88 @@ type SourceAlloydb struct {
// Username to access the database.
Username string `json:"username"`
}
+
+func (s SourceAlloydb) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydb) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydb) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceAlloydb) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceAlloydb) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceAlloydb) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceAlloydb) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceAlloydb) GetReplicationMethod() *SourceAlloydbReplicationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationMethod
+}
+
+func (o *SourceAlloydb) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourceAlloydb) GetSourceType() Alloydb {
+ return AlloydbAlloydb
+}
+
+func (o *SourceAlloydb) GetSslMode() *SourceAlloydbSSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *SourceAlloydb) GetTunnelMethod() *SourceAlloydbSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceAlloydb) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcealloydbcreaterequest.go b/internal/sdk/pkg/models/shared/sourcealloydbcreaterequest.go
old mode 100755
new mode 100644
index 0b5fd040e..fe789086d
--- a/internal/sdk/pkg/models/shared/sourcealloydbcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcealloydbcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAlloydbCreateRequest struct {
Configuration SourceAlloydb `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAlloydbCreateRequest) GetConfiguration() SourceAlloydb {
+ if o == nil {
+ return SourceAlloydb{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAlloydbCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAlloydbCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAlloydbCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAlloydbCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcealloydbputrequest.go b/internal/sdk/pkg/models/shared/sourcealloydbputrequest.go
old mode 100755
new mode 100644
index 4fa998a42..5543cdd3e
--- a/internal/sdk/pkg/models/shared/sourcealloydbputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcealloydbputrequest.go
@@ -7,3 +7,24 @@ type SourceAlloydbPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAlloydbPutRequest) GetConfiguration() SourceAlloydbUpdate {
+ if o == nil {
+ return SourceAlloydbUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAlloydbPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAlloydbPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcealloydbupdate.go b/internal/sdk/pkg/models/shared/sourcealloydbupdate.go
old mode 100755
new mode 100644
index a385ff6d1..3f937db10
--- a/internal/sdk/pkg/models/shared/sourcealloydbupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcealloydbupdate.go
@@ -3,54 +3,69 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceAlloydbUpdateReplicationMethodStandardMethod string
+type SourceAlloydbUpdateSchemasReplicationMethodMethod string
const (
- SourceAlloydbUpdateReplicationMethodStandardMethodStandard SourceAlloydbUpdateReplicationMethodStandardMethod = "Standard"
+ SourceAlloydbUpdateSchemasReplicationMethodMethodStandard SourceAlloydbUpdateSchemasReplicationMethodMethod = "Standard"
)
-func (e SourceAlloydbUpdateReplicationMethodStandardMethod) ToPointer() *SourceAlloydbUpdateReplicationMethodStandardMethod {
+func (e SourceAlloydbUpdateSchemasReplicationMethodMethod) ToPointer() *SourceAlloydbUpdateSchemasReplicationMethodMethod {
return &e
}
-func (e *SourceAlloydbUpdateReplicationMethodStandardMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateSchemasReplicationMethodMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Standard":
- *e = SourceAlloydbUpdateReplicationMethodStandardMethod(v)
+ *e = SourceAlloydbUpdateSchemasReplicationMethodMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateReplicationMethodStandardMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateSchemasReplicationMethodMethod: %v", v)
}
}
-// SourceAlloydbUpdateReplicationMethodStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.
-type SourceAlloydbUpdateReplicationMethodStandard struct {
- Method SourceAlloydbUpdateReplicationMethodStandardMethod `json:"method"`
+// SourceAlloydbUpdateStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.
+type SourceAlloydbUpdateStandard struct {
+ method SourceAlloydbUpdateSchemasReplicationMethodMethod `const:"Standard" json:"method"`
}
-// SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-type SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour string
+func (s SourceAlloydbUpdateStandard) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbUpdateStandard) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydbUpdateStandard) GetMethod() SourceAlloydbUpdateSchemasReplicationMethodMethod {
+ return SourceAlloydbUpdateSchemasReplicationMethodMethodStandard
+}
+
+// LSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
+type LSNCommitBehaviour string
const (
- SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviourWhileReadingData SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour = "While reading Data"
- SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviourAfterLoadingDataInTheDestination SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour = "After loading Data in the destination"
+ LSNCommitBehaviourWhileReadingData LSNCommitBehaviour = "While reading Data"
+ LSNCommitBehaviourAfterLoadingDataInTheDestination LSNCommitBehaviour = "After loading Data in the destination"
)
-func (e SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) ToPointer() *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour {
+func (e LSNCommitBehaviour) ToPointer() *LSNCommitBehaviour {
return &e
}
-func (e *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) UnmarshalJSON(data []byte) error {
+func (e *LSNCommitBehaviour) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -59,278 +74,300 @@ func (e *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehav
case "While reading Data":
fallthrough
case "After loading Data in the destination":
- *e = SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour(v)
+ *e = LSNCommitBehaviour(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour: %v", v)
+ return fmt.Errorf("invalid value for LSNCommitBehaviour: %v", v)
}
}
-type SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethod string
+type SourceAlloydbUpdateSchemasMethod string
const (
- SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethodCdc SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethod = "CDC"
+ SourceAlloydbUpdateSchemasMethodCdc SourceAlloydbUpdateSchemasMethod = "CDC"
)
-func (e SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethod) ToPointer() *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethod {
+func (e SourceAlloydbUpdateSchemasMethod) ToPointer() *SourceAlloydbUpdateSchemasMethod {
return &e
}
-func (e *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CDC":
- *e = SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethod(v)
+ *e = SourceAlloydbUpdateSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateSchemasMethod: %v", v)
}
}
-// SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin - A logical decoding plugin installed on the PostgreSQL server.
-type SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin string
+// Plugin - A logical decoding plugin installed on the PostgreSQL server.
+type Plugin string
const (
- SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPluginPgoutput SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin = "pgoutput"
+ PluginPgoutput Plugin = "pgoutput"
)
-func (e SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin) ToPointer() *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin {
+func (e Plugin) ToPointer() *Plugin {
return &e
}
-func (e *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin) UnmarshalJSON(data []byte) error {
+func (e *Plugin) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pgoutput":
- *e = SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin(v)
+ *e = Plugin(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin: %v", v)
+ return fmt.Errorf("invalid value for Plugin: %v", v)
}
}
-// SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC - Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.
-type SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC struct {
+// LogicalReplicationCDC - Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.
+type LogicalReplicationCDC struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
- InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"`
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
// Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
- LsnCommitBehaviour *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour `json:"lsn_commit_behaviour,omitempty"`
- Method SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCMethod `json:"method"`
+ LsnCommitBehaviour *LSNCommitBehaviour `default:"After loading Data in the destination" json:"lsn_commit_behaviour"`
+ method SourceAlloydbUpdateSchemasMethod `const:"CDC" json:"method"`
// A logical decoding plugin installed on the PostgreSQL server.
- Plugin *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDCPlugin `json:"plugin,omitempty"`
+ Plugin *Plugin `default:"pgoutput" json:"plugin"`
// A Postgres publication used for consuming changes. Read about publications and replication identities.
Publication string `json:"publication"`
// The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
- QueueSize *int64 `json:"queue_size,omitempty"`
+ QueueSize *int64 `default:"10000" json:"queue_size"`
// A plugin logical replication slot. Read about replication slots.
ReplicationSlot string `json:"replication_slot"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC
-func (c *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC{}
+func (l LogicalReplicationCDC) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(l, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (l *LogicalReplicationCDC) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &l, "", false, true); err != nil {
return err
}
- *c = SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC(data)
+ return nil
+}
- additionalFields := make(map[string]interface{})
+func (o *LogicalReplicationCDC) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *LogicalReplicationCDC) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "initial_waiting_seconds")
- delete(additionalFields, "lsn_commit_behaviour")
- delete(additionalFields, "method")
- delete(additionalFields, "plugin")
- delete(additionalFields, "publication")
- delete(additionalFields, "queue_size")
- delete(additionalFields, "replication_slot")
+ return o.InitialWaitingSeconds
+}
- c.AdditionalProperties = additionalFields
+func (o *LogicalReplicationCDC) GetLsnCommitBehaviour() *LSNCommitBehaviour {
+ if o == nil {
+ return nil
+ }
+ return o.LsnCommitBehaviour
+}
- return nil
+func (o *LogicalReplicationCDC) GetMethod() SourceAlloydbUpdateSchemasMethod {
+ return SourceAlloydbUpdateSchemasMethodCdc
}
-func (c SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC(c))
- if err != nil {
- return nil, err
+func (o *LogicalReplicationCDC) GetPlugin() *Plugin {
+ if o == nil {
+ return nil
}
+ return o.Plugin
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *LogicalReplicationCDC) GetPublication() string {
+ if o == nil {
+ return ""
}
+ return o.Publication
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *LogicalReplicationCDC) GetQueueSize() *int64 {
+ if o == nil {
+ return nil
}
+ return o.QueueSize
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *LogicalReplicationCDC) GetReplicationSlot() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.ReplicationSlot
}
-type SourceAlloydbUpdateReplicationMethodStandardXminMethod string
+type SourceAlloydbUpdateMethod string
const (
- SourceAlloydbUpdateReplicationMethodStandardXminMethodXmin SourceAlloydbUpdateReplicationMethodStandardXminMethod = "Xmin"
+ SourceAlloydbUpdateMethodXmin SourceAlloydbUpdateMethod = "Xmin"
)
-func (e SourceAlloydbUpdateReplicationMethodStandardXminMethod) ToPointer() *SourceAlloydbUpdateReplicationMethodStandardXminMethod {
+func (e SourceAlloydbUpdateMethod) ToPointer() *SourceAlloydbUpdateMethod {
return &e
}
-func (e *SourceAlloydbUpdateReplicationMethodStandardXminMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Xmin":
- *e = SourceAlloydbUpdateReplicationMethodStandardXminMethod(v)
+ *e = SourceAlloydbUpdateMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateReplicationMethodStandardXminMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateMethod: %v", v)
+ }
+}
+
+// StandardXmin - Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.
+type StandardXmin struct {
+ method SourceAlloydbUpdateMethod `const:"Xmin" json:"method"`
+}
+
+func (s StandardXmin) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *StandardXmin) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceAlloydbUpdateReplicationMethodStandardXmin - Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.
-type SourceAlloydbUpdateReplicationMethodStandardXmin struct {
- Method SourceAlloydbUpdateReplicationMethodStandardXminMethod `json:"method"`
+func (o *StandardXmin) GetMethod() SourceAlloydbUpdateMethod {
+ return SourceAlloydbUpdateMethodXmin
}
-type SourceAlloydbUpdateReplicationMethodType string
+type ReplicationMethodType string
const (
- SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodStandardXmin SourceAlloydbUpdateReplicationMethodType = "source-alloydb-update_Replication Method_Standard (Xmin)"
- SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodLogicalReplicationCDC SourceAlloydbUpdateReplicationMethodType = "source-alloydb-update_Replication Method_Logical Replication (CDC)"
- SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodStandard SourceAlloydbUpdateReplicationMethodType = "source-alloydb-update_Replication Method_Standard"
+ ReplicationMethodTypeStandardXmin ReplicationMethodType = "Standard (Xmin)"
+ ReplicationMethodTypeLogicalReplicationCDC ReplicationMethodType = "Logical Replication (CDC)"
+ ReplicationMethodTypeSourceAlloydbUpdateStandard ReplicationMethodType = "source-alloydb-update_Standard"
)
-type SourceAlloydbUpdateReplicationMethod struct {
- SourceAlloydbUpdateReplicationMethodStandardXmin *SourceAlloydbUpdateReplicationMethodStandardXmin
- SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC *SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC
- SourceAlloydbUpdateReplicationMethodStandard *SourceAlloydbUpdateReplicationMethodStandard
+type ReplicationMethod struct {
+ StandardXmin *StandardXmin
+ LogicalReplicationCDC *LogicalReplicationCDC
+ SourceAlloydbUpdateStandard *SourceAlloydbUpdateStandard
- Type SourceAlloydbUpdateReplicationMethodType
+ Type ReplicationMethodType
}
-func CreateSourceAlloydbUpdateReplicationMethodSourceAlloydbUpdateReplicationMethodStandardXmin(sourceAlloydbUpdateReplicationMethodStandardXmin SourceAlloydbUpdateReplicationMethodStandardXmin) SourceAlloydbUpdateReplicationMethod {
- typ := SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodStandardXmin
+func CreateReplicationMethodStandardXmin(standardXmin StandardXmin) ReplicationMethod {
+ typ := ReplicationMethodTypeStandardXmin
- return SourceAlloydbUpdateReplicationMethod{
- SourceAlloydbUpdateReplicationMethodStandardXmin: &sourceAlloydbUpdateReplicationMethodStandardXmin,
- Type: typ,
+ return ReplicationMethod{
+ StandardXmin: &standardXmin,
+ Type: typ,
}
}
-func CreateSourceAlloydbUpdateReplicationMethodSourceAlloydbUpdateReplicationMethodLogicalReplicationCDC(sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC) SourceAlloydbUpdateReplicationMethod {
- typ := SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodLogicalReplicationCDC
+func CreateReplicationMethodLogicalReplicationCDC(logicalReplicationCDC LogicalReplicationCDC) ReplicationMethod {
+ typ := ReplicationMethodTypeLogicalReplicationCDC
- return SourceAlloydbUpdateReplicationMethod{
- SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC: &sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC,
- Type: typ,
+ return ReplicationMethod{
+ LogicalReplicationCDC: &logicalReplicationCDC,
+ Type: typ,
}
}
-func CreateSourceAlloydbUpdateReplicationMethodSourceAlloydbUpdateReplicationMethodStandard(sourceAlloydbUpdateReplicationMethodStandard SourceAlloydbUpdateReplicationMethodStandard) SourceAlloydbUpdateReplicationMethod {
- typ := SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodStandard
+func CreateReplicationMethodSourceAlloydbUpdateStandard(sourceAlloydbUpdateStandard SourceAlloydbUpdateStandard) ReplicationMethod {
+ typ := ReplicationMethodTypeSourceAlloydbUpdateStandard
- return SourceAlloydbUpdateReplicationMethod{
- SourceAlloydbUpdateReplicationMethodStandard: &sourceAlloydbUpdateReplicationMethodStandard,
- Type: typ,
+ return ReplicationMethod{
+ SourceAlloydbUpdateStandard: &sourceAlloydbUpdateStandard,
+ Type: typ,
}
}
-func (u *SourceAlloydbUpdateReplicationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *ReplicationMethod) UnmarshalJSON(data []byte) error {
- sourceAlloydbUpdateReplicationMethodStandardXmin := new(SourceAlloydbUpdateReplicationMethodStandardXmin)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateReplicationMethodStandardXmin); err == nil {
- u.SourceAlloydbUpdateReplicationMethodStandardXmin = sourceAlloydbUpdateReplicationMethodStandardXmin
- u.Type = SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodStandardXmin
+ standardXmin := new(StandardXmin)
+ if err := utils.UnmarshalJSON(data, &standardXmin, "", true, true); err == nil {
+ u.StandardXmin = standardXmin
+ u.Type = ReplicationMethodTypeStandardXmin
return nil
}
- sourceAlloydbUpdateReplicationMethodStandard := new(SourceAlloydbUpdateReplicationMethodStandard)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateReplicationMethodStandard); err == nil {
- u.SourceAlloydbUpdateReplicationMethodStandard = sourceAlloydbUpdateReplicationMethodStandard
- u.Type = SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodStandard
+ sourceAlloydbUpdateStandard := new(SourceAlloydbUpdateStandard)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdateStandard, "", true, true); err == nil {
+ u.SourceAlloydbUpdateStandard = sourceAlloydbUpdateStandard
+ u.Type = ReplicationMethodTypeSourceAlloydbUpdateStandard
return nil
}
- sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC := new(SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC); err == nil {
- u.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC = sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC
- u.Type = SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodLogicalReplicationCDC
+ logicalReplicationCDC := new(LogicalReplicationCDC)
+ if err := utils.UnmarshalJSON(data, &logicalReplicationCDC, "", true, true); err == nil {
+ u.LogicalReplicationCDC = logicalReplicationCDC
+ u.Type = ReplicationMethodTypeLogicalReplicationCDC
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceAlloydbUpdateReplicationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceAlloydbUpdateReplicationMethodStandardXmin != nil {
- return json.Marshal(u.SourceAlloydbUpdateReplicationMethodStandardXmin)
+func (u ReplicationMethod) MarshalJSON() ([]byte, error) {
+ if u.StandardXmin != nil {
+ return utils.MarshalJSON(u.StandardXmin, "", true)
}
- if u.SourceAlloydbUpdateReplicationMethodStandard != nil {
- return json.Marshal(u.SourceAlloydbUpdateReplicationMethodStandard)
+ if u.LogicalReplicationCDC != nil {
+ return utils.MarshalJSON(u.LogicalReplicationCDC, "", true)
}
- if u.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC != nil {
- return json.Marshal(u.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC)
+ if u.SourceAlloydbUpdateStandard != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdateStandard, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceAlloydbUpdateSSLModesVerifyFullMode string
+type SourceAlloydbUpdateSchemasSSLModeSSLModes6Mode string
const (
- SourceAlloydbUpdateSSLModesVerifyFullModeVerifyFull SourceAlloydbUpdateSSLModesVerifyFullMode = "verify-full"
+ SourceAlloydbUpdateSchemasSSLModeSSLModes6ModeVerifyFull SourceAlloydbUpdateSchemasSSLModeSSLModes6Mode = "verify-full"
)
-func (e SourceAlloydbUpdateSSLModesVerifyFullMode) ToPointer() *SourceAlloydbUpdateSSLModesVerifyFullMode {
+func (e SourceAlloydbUpdateSchemasSSLModeSSLModes6Mode) ToPointer() *SourceAlloydbUpdateSchemasSSLModeSSLModes6Mode {
return &e
}
-func (e *SourceAlloydbUpdateSSLModesVerifyFullMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateSchemasSSLModeSSLModes6Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-full":
- *e = SourceAlloydbUpdateSSLModesVerifyFullMode(v)
+ *e = SourceAlloydbUpdateSchemasSSLModeSSLModes6Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateSSLModesVerifyFullMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateSchemasSSLModeSSLModes6Mode: %v", v)
}
}
-// SourceAlloydbUpdateSSLModesVerifyFull - This is the most secure mode. Always require encryption and verifies the identity of the source database server.
-type SourceAlloydbUpdateSSLModesVerifyFull struct {
+// SourceAlloydbUpdateVerifyFull - This is the most secure mode. Always require encryption and verifies the identity of the source database server.
+type SourceAlloydbUpdateVerifyFull struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -338,86 +375,87 @@ type SourceAlloydbUpdateSSLModesVerifyFull struct {
// Client key
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourceAlloydbUpdateSSLModesVerifyFullMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourceAlloydbUpdateSchemasSSLModeSSLModes6Mode `const:"verify-full" json:"mode"`
}
-type _SourceAlloydbUpdateSSLModesVerifyFull SourceAlloydbUpdateSSLModesVerifyFull
-func (c *SourceAlloydbUpdateSSLModesVerifyFull) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbUpdateSSLModesVerifyFull{}
+func (s SourceAlloydbUpdateVerifyFull) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourceAlloydbUpdateVerifyFull) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourceAlloydbUpdateSSLModesVerifyFull(data)
-
- additionalFields := make(map[string]interface{})
+ return nil
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourceAlloydbUpdateVerifyFull) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "ca_certificate")
- delete(additionalFields, "client_certificate")
- delete(additionalFields, "client_key")
- delete(additionalFields, "client_key_password")
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
- return nil
+ return o.AdditionalProperties
}
-func (c SourceAlloydbUpdateSSLModesVerifyFull) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbUpdateSSLModesVerifyFull(c))
- if err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateVerifyFull) GetCaCertificate() string {
+ if o == nil {
+ return ""
}
+ return o.CaCertificate
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateVerifyFull) GetClientCertificate() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientCertificate
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateVerifyFull) GetClientKey() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKey
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateVerifyFull) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKeyPassword
+}
- return json.Marshal(out)
+func (o *SourceAlloydbUpdateVerifyFull) GetMode() SourceAlloydbUpdateSchemasSSLModeSSLModes6Mode {
+ return SourceAlloydbUpdateSchemasSSLModeSSLModes6ModeVerifyFull
}
-type SourceAlloydbUpdateSSLModesVerifyCaMode string
+type SourceAlloydbUpdateSchemasSSLModeSSLModes5Mode string
const (
- SourceAlloydbUpdateSSLModesVerifyCaModeVerifyCa SourceAlloydbUpdateSSLModesVerifyCaMode = "verify-ca"
+ SourceAlloydbUpdateSchemasSSLModeSSLModes5ModeVerifyCa SourceAlloydbUpdateSchemasSSLModeSSLModes5Mode = "verify-ca"
)
-func (e SourceAlloydbUpdateSSLModesVerifyCaMode) ToPointer() *SourceAlloydbUpdateSSLModesVerifyCaMode {
+func (e SourceAlloydbUpdateSchemasSSLModeSSLModes5Mode) ToPointer() *SourceAlloydbUpdateSchemasSSLModeSSLModes5Mode {
return &e
}
-func (e *SourceAlloydbUpdateSSLModesVerifyCaMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateSchemasSSLModeSSLModes5Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-ca":
- *e = SourceAlloydbUpdateSSLModesVerifyCaMode(v)
+ *e = SourceAlloydbUpdateSchemasSSLModeSSLModes5Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateSSLModesVerifyCaMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateSchemasSSLModeSSLModes5Mode: %v", v)
}
}
-// SourceAlloydbUpdateSSLModesVerifyCa - Always require encryption and verifies that the source database server has a valid SSL certificate.
-type SourceAlloydbUpdateSSLModesVerifyCa struct {
+// SourceAlloydbUpdateVerifyCa - Always require encryption and verifies that the source database server has a valid SSL certificate.
+type SourceAlloydbUpdateVerifyCa struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -425,490 +463,385 @@ type SourceAlloydbUpdateSSLModesVerifyCa struct {
// Client key
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourceAlloydbUpdateSSLModesVerifyCaMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourceAlloydbUpdateSchemasSSLModeSSLModes5Mode `const:"verify-ca" json:"mode"`
}
-type _SourceAlloydbUpdateSSLModesVerifyCa SourceAlloydbUpdateSSLModesVerifyCa
-func (c *SourceAlloydbUpdateSSLModesVerifyCa) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbUpdateSSLModesVerifyCa{}
+func (s SourceAlloydbUpdateVerifyCa) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourceAlloydbUpdateVerifyCa) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourceAlloydbUpdateSSLModesVerifyCa(data)
-
- additionalFields := make(map[string]interface{})
+ return nil
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourceAlloydbUpdateVerifyCa) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "ca_certificate")
- delete(additionalFields, "client_certificate")
- delete(additionalFields, "client_key")
- delete(additionalFields, "client_key_password")
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
- return nil
+ return o.AdditionalProperties
}
-func (c SourceAlloydbUpdateSSLModesVerifyCa) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbUpdateSSLModesVerifyCa(c))
- if err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateVerifyCa) GetCaCertificate() string {
+ if o == nil {
+ return ""
}
+ return o.CaCertificate
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateVerifyCa) GetClientCertificate() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientCertificate
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateVerifyCa) GetClientKey() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKey
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateVerifyCa) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKeyPassword
+}
- return json.Marshal(out)
+func (o *SourceAlloydbUpdateVerifyCa) GetMode() SourceAlloydbUpdateSchemasSSLModeSSLModes5Mode {
+ return SourceAlloydbUpdateSchemasSSLModeSSLModes5ModeVerifyCa
}
-type SourceAlloydbUpdateSSLModesRequireMode string
+type SourceAlloydbUpdateSchemasSSLModeSSLModesMode string
const (
- SourceAlloydbUpdateSSLModesRequireModeRequire SourceAlloydbUpdateSSLModesRequireMode = "require"
+ SourceAlloydbUpdateSchemasSSLModeSSLModesModeRequire SourceAlloydbUpdateSchemasSSLModeSSLModesMode = "require"
)
-func (e SourceAlloydbUpdateSSLModesRequireMode) ToPointer() *SourceAlloydbUpdateSSLModesRequireMode {
+func (e SourceAlloydbUpdateSchemasSSLModeSSLModesMode) ToPointer() *SourceAlloydbUpdateSchemasSSLModeSSLModesMode {
return &e
}
-func (e *SourceAlloydbUpdateSSLModesRequireMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateSchemasSSLModeSSLModesMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "require":
- *e = SourceAlloydbUpdateSSLModesRequireMode(v)
+ *e = SourceAlloydbUpdateSchemasSSLModeSSLModesMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateSSLModesRequireMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateSchemasSSLModeSSLModesMode: %v", v)
}
}
-// SourceAlloydbUpdateSSLModesRequire - Always require encryption. If the source database server does not support encryption, connection will fail.
-type SourceAlloydbUpdateSSLModesRequire struct {
- Mode SourceAlloydbUpdateSSLModesRequireMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourceAlloydbUpdateRequire - Always require encryption. If the source database server does not support encryption, connection will fail.
+type SourceAlloydbUpdateRequire struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourceAlloydbUpdateSchemasSSLModeSSLModesMode `const:"require" json:"mode"`
}
-type _SourceAlloydbUpdateSSLModesRequire SourceAlloydbUpdateSSLModesRequire
-
-func (c *SourceAlloydbUpdateSSLModesRequire) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbUpdateSSLModesRequire{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceAlloydbUpdateSSLModesRequire(data)
- additionalFields := make(map[string]interface{})
+func (s SourceAlloydbUpdateRequire) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceAlloydbUpdateRequire) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceAlloydbUpdateSSLModesRequire) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbUpdateSSLModesRequire(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateRequire) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourceAlloydbUpdateRequire) GetMode() SourceAlloydbUpdateSchemasSSLModeSSLModesMode {
+ return SourceAlloydbUpdateSchemasSSLModeSSLModesModeRequire
}
-type SourceAlloydbUpdateSSLModesPreferMode string
+type SourceAlloydbUpdateSchemasSslModeMode string
const (
- SourceAlloydbUpdateSSLModesPreferModePrefer SourceAlloydbUpdateSSLModesPreferMode = "prefer"
+ SourceAlloydbUpdateSchemasSslModeModePrefer SourceAlloydbUpdateSchemasSslModeMode = "prefer"
)
-func (e SourceAlloydbUpdateSSLModesPreferMode) ToPointer() *SourceAlloydbUpdateSSLModesPreferMode {
+func (e SourceAlloydbUpdateSchemasSslModeMode) ToPointer() *SourceAlloydbUpdateSchemasSslModeMode {
return &e
}
-func (e *SourceAlloydbUpdateSSLModesPreferMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateSchemasSslModeMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "prefer":
- *e = SourceAlloydbUpdateSSLModesPreferMode(v)
+ *e = SourceAlloydbUpdateSchemasSslModeMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateSSLModesPreferMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateSchemasSslModeMode: %v", v)
}
}
-// SourceAlloydbUpdateSSLModesPrefer - Allows unencrypted connection only if the source database does not support encryption.
-type SourceAlloydbUpdateSSLModesPrefer struct {
- Mode SourceAlloydbUpdateSSLModesPreferMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourceAlloydbUpdatePrefer - Allows unencrypted connection only if the source database does not support encryption.
+type SourceAlloydbUpdatePrefer struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourceAlloydbUpdateSchemasSslModeMode `const:"prefer" json:"mode"`
}
-type _SourceAlloydbUpdateSSLModesPrefer SourceAlloydbUpdateSSLModesPrefer
-
-func (c *SourceAlloydbUpdateSSLModesPrefer) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbUpdateSSLModesPrefer{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceAlloydbUpdateSSLModesPrefer(data)
- additionalFields := make(map[string]interface{})
+func (s SourceAlloydbUpdatePrefer) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceAlloydbUpdatePrefer) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceAlloydbUpdateSSLModesPrefer) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbUpdateSSLModesPrefer(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbUpdatePrefer) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourceAlloydbUpdatePrefer) GetMode() SourceAlloydbUpdateSchemasSslModeMode {
+ return SourceAlloydbUpdateSchemasSslModeModePrefer
}
-type SourceAlloydbUpdateSSLModesAllowMode string
+type SourceAlloydbUpdateSchemasMode string
const (
- SourceAlloydbUpdateSSLModesAllowModeAllow SourceAlloydbUpdateSSLModesAllowMode = "allow"
+ SourceAlloydbUpdateSchemasModeAllow SourceAlloydbUpdateSchemasMode = "allow"
)
-func (e SourceAlloydbUpdateSSLModesAllowMode) ToPointer() *SourceAlloydbUpdateSSLModesAllowMode {
+func (e SourceAlloydbUpdateSchemasMode) ToPointer() *SourceAlloydbUpdateSchemasMode {
return &e
}
-func (e *SourceAlloydbUpdateSSLModesAllowMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "allow":
- *e = SourceAlloydbUpdateSSLModesAllowMode(v)
+ *e = SourceAlloydbUpdateSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateSSLModesAllowMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateSchemasMode: %v", v)
}
}
-// SourceAlloydbUpdateSSLModesAllow - Enables encryption only when required by the source database.
-type SourceAlloydbUpdateSSLModesAllow struct {
- Mode SourceAlloydbUpdateSSLModesAllowMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourceAlloydbUpdateAllow - Enables encryption only when required by the source database.
+type SourceAlloydbUpdateAllow struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourceAlloydbUpdateSchemasMode `const:"allow" json:"mode"`
}
-type _SourceAlloydbUpdateSSLModesAllow SourceAlloydbUpdateSSLModesAllow
-
-func (c *SourceAlloydbUpdateSSLModesAllow) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbUpdateSSLModesAllow{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceAlloydbUpdateSSLModesAllow(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceAlloydbUpdateAllow) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceAlloydbUpdateAllow) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceAlloydbUpdateSSLModesAllow) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbUpdateSSLModesAllow(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateAllow) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourceAlloydbUpdateAllow) GetMode() SourceAlloydbUpdateSchemasMode {
+ return SourceAlloydbUpdateSchemasModeAllow
}
-type SourceAlloydbUpdateSSLModesDisableMode string
+type SourceAlloydbUpdateMode string
const (
- SourceAlloydbUpdateSSLModesDisableModeDisable SourceAlloydbUpdateSSLModesDisableMode = "disable"
+ SourceAlloydbUpdateModeDisable SourceAlloydbUpdateMode = "disable"
)
-func (e SourceAlloydbUpdateSSLModesDisableMode) ToPointer() *SourceAlloydbUpdateSSLModesDisableMode {
+func (e SourceAlloydbUpdateMode) ToPointer() *SourceAlloydbUpdateMode {
return &e
}
-func (e *SourceAlloydbUpdateSSLModesDisableMode) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "disable":
- *e = SourceAlloydbUpdateSSLModesDisableMode(v)
+ *e = SourceAlloydbUpdateMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateSSLModesDisableMode: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateMode: %v", v)
}
}
-// SourceAlloydbUpdateSSLModesDisable - Disables encryption of communication between Airbyte and source database.
-type SourceAlloydbUpdateSSLModesDisable struct {
- Mode SourceAlloydbUpdateSSLModesDisableMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourceAlloydbUpdateDisable - Disables encryption of communication between Airbyte and source database.
+type SourceAlloydbUpdateDisable struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourceAlloydbUpdateMode `const:"disable" json:"mode"`
}
-type _SourceAlloydbUpdateSSLModesDisable SourceAlloydbUpdateSSLModesDisable
-
-func (c *SourceAlloydbUpdateSSLModesDisable) UnmarshalJSON(bs []byte) error {
- data := _SourceAlloydbUpdateSSLModesDisable{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceAlloydbUpdateSSLModesDisable(data)
- additionalFields := make(map[string]interface{})
+func (s SourceAlloydbUpdateDisable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceAlloydbUpdateDisable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceAlloydbUpdateSSLModesDisable) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceAlloydbUpdateSSLModesDisable(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceAlloydbUpdateDisable) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourceAlloydbUpdateDisable) GetMode() SourceAlloydbUpdateMode {
+ return SourceAlloydbUpdateModeDisable
}
type SourceAlloydbUpdateSSLModesType string
const (
- SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesDisable SourceAlloydbUpdateSSLModesType = "source-alloydb-update_SSL Modes_disable"
- SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesAllow SourceAlloydbUpdateSSLModesType = "source-alloydb-update_SSL Modes_allow"
- SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesPrefer SourceAlloydbUpdateSSLModesType = "source-alloydb-update_SSL Modes_prefer"
- SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesRequire SourceAlloydbUpdateSSLModesType = "source-alloydb-update_SSL Modes_require"
- SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesVerifyCa SourceAlloydbUpdateSSLModesType = "source-alloydb-update_SSL Modes_verify-ca"
- SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesVerifyFull SourceAlloydbUpdateSSLModesType = "source-alloydb-update_SSL Modes_verify-full"
+ SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateDisable SourceAlloydbUpdateSSLModesType = "source-alloydb-update_disable"
+ SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateAllow SourceAlloydbUpdateSSLModesType = "source-alloydb-update_allow"
+ SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdatePrefer SourceAlloydbUpdateSSLModesType = "source-alloydb-update_prefer"
+ SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateRequire SourceAlloydbUpdateSSLModesType = "source-alloydb-update_require"
+ SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateVerifyCa SourceAlloydbUpdateSSLModesType = "source-alloydb-update_verify-ca"
+ SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateVerifyFull SourceAlloydbUpdateSSLModesType = "source-alloydb-update_verify-full"
)
type SourceAlloydbUpdateSSLModes struct {
- SourceAlloydbUpdateSSLModesDisable *SourceAlloydbUpdateSSLModesDisable
- SourceAlloydbUpdateSSLModesAllow *SourceAlloydbUpdateSSLModesAllow
- SourceAlloydbUpdateSSLModesPrefer *SourceAlloydbUpdateSSLModesPrefer
- SourceAlloydbUpdateSSLModesRequire *SourceAlloydbUpdateSSLModesRequire
- SourceAlloydbUpdateSSLModesVerifyCa *SourceAlloydbUpdateSSLModesVerifyCa
- SourceAlloydbUpdateSSLModesVerifyFull *SourceAlloydbUpdateSSLModesVerifyFull
+ SourceAlloydbUpdateDisable *SourceAlloydbUpdateDisable
+ SourceAlloydbUpdateAllow *SourceAlloydbUpdateAllow
+ SourceAlloydbUpdatePrefer *SourceAlloydbUpdatePrefer
+ SourceAlloydbUpdateRequire *SourceAlloydbUpdateRequire
+ SourceAlloydbUpdateVerifyCa *SourceAlloydbUpdateVerifyCa
+ SourceAlloydbUpdateVerifyFull *SourceAlloydbUpdateVerifyFull
Type SourceAlloydbUpdateSSLModesType
}
-func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateSSLModesDisable(sourceAlloydbUpdateSSLModesDisable SourceAlloydbUpdateSSLModesDisable) SourceAlloydbUpdateSSLModes {
- typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesDisable
+func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateDisable(sourceAlloydbUpdateDisable SourceAlloydbUpdateDisable) SourceAlloydbUpdateSSLModes {
+ typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateDisable
return SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesDisable: &sourceAlloydbUpdateSSLModesDisable,
- Type: typ,
+ SourceAlloydbUpdateDisable: &sourceAlloydbUpdateDisable,
+ Type: typ,
}
}
-func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateSSLModesAllow(sourceAlloydbUpdateSSLModesAllow SourceAlloydbUpdateSSLModesAllow) SourceAlloydbUpdateSSLModes {
- typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesAllow
+func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateAllow(sourceAlloydbUpdateAllow SourceAlloydbUpdateAllow) SourceAlloydbUpdateSSLModes {
+ typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateAllow
return SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesAllow: &sourceAlloydbUpdateSSLModesAllow,
- Type: typ,
+ SourceAlloydbUpdateAllow: &sourceAlloydbUpdateAllow,
+ Type: typ,
}
}
-func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateSSLModesPrefer(sourceAlloydbUpdateSSLModesPrefer SourceAlloydbUpdateSSLModesPrefer) SourceAlloydbUpdateSSLModes {
- typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesPrefer
+func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdatePrefer(sourceAlloydbUpdatePrefer SourceAlloydbUpdatePrefer) SourceAlloydbUpdateSSLModes {
+ typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdatePrefer
return SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesPrefer: &sourceAlloydbUpdateSSLModesPrefer,
- Type: typ,
+ SourceAlloydbUpdatePrefer: &sourceAlloydbUpdatePrefer,
+ Type: typ,
}
}
-func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateSSLModesRequire(sourceAlloydbUpdateSSLModesRequire SourceAlloydbUpdateSSLModesRequire) SourceAlloydbUpdateSSLModes {
- typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesRequire
+func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateRequire(sourceAlloydbUpdateRequire SourceAlloydbUpdateRequire) SourceAlloydbUpdateSSLModes {
+ typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateRequire
return SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesRequire: &sourceAlloydbUpdateSSLModesRequire,
- Type: typ,
+ SourceAlloydbUpdateRequire: &sourceAlloydbUpdateRequire,
+ Type: typ,
}
}
-func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateSSLModesVerifyCa(sourceAlloydbUpdateSSLModesVerifyCa SourceAlloydbUpdateSSLModesVerifyCa) SourceAlloydbUpdateSSLModes {
- typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesVerifyCa
+func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateVerifyCa(sourceAlloydbUpdateVerifyCa SourceAlloydbUpdateVerifyCa) SourceAlloydbUpdateSSLModes {
+ typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateVerifyCa
return SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesVerifyCa: &sourceAlloydbUpdateSSLModesVerifyCa,
- Type: typ,
+ SourceAlloydbUpdateVerifyCa: &sourceAlloydbUpdateVerifyCa,
+ Type: typ,
}
}
-func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateSSLModesVerifyFull(sourceAlloydbUpdateSSLModesVerifyFull SourceAlloydbUpdateSSLModesVerifyFull) SourceAlloydbUpdateSSLModes {
- typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesVerifyFull
+func CreateSourceAlloydbUpdateSSLModesSourceAlloydbUpdateVerifyFull(sourceAlloydbUpdateVerifyFull SourceAlloydbUpdateVerifyFull) SourceAlloydbUpdateSSLModes {
+ typ := SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateVerifyFull
return SourceAlloydbUpdateSSLModes{
- SourceAlloydbUpdateSSLModesVerifyFull: &sourceAlloydbUpdateSSLModesVerifyFull,
- Type: typ,
+ SourceAlloydbUpdateVerifyFull: &sourceAlloydbUpdateVerifyFull,
+ Type: typ,
}
}
func (u *SourceAlloydbUpdateSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAlloydbUpdateSSLModesDisable := new(SourceAlloydbUpdateSSLModesDisable)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateSSLModesDisable); err == nil {
- u.SourceAlloydbUpdateSSLModesDisable = sourceAlloydbUpdateSSLModesDisable
- u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesDisable
+
+ sourceAlloydbUpdateDisable := new(SourceAlloydbUpdateDisable)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdateDisable, "", true, true); err == nil {
+ u.SourceAlloydbUpdateDisable = sourceAlloydbUpdateDisable
+ u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateDisable
return nil
}
- sourceAlloydbUpdateSSLModesAllow := new(SourceAlloydbUpdateSSLModesAllow)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateSSLModesAllow); err == nil {
- u.SourceAlloydbUpdateSSLModesAllow = sourceAlloydbUpdateSSLModesAllow
- u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesAllow
+ sourceAlloydbUpdateAllow := new(SourceAlloydbUpdateAllow)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdateAllow, "", true, true); err == nil {
+ u.SourceAlloydbUpdateAllow = sourceAlloydbUpdateAllow
+ u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateAllow
return nil
}
- sourceAlloydbUpdateSSLModesPrefer := new(SourceAlloydbUpdateSSLModesPrefer)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateSSLModesPrefer); err == nil {
- u.SourceAlloydbUpdateSSLModesPrefer = sourceAlloydbUpdateSSLModesPrefer
- u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesPrefer
+ sourceAlloydbUpdatePrefer := new(SourceAlloydbUpdatePrefer)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdatePrefer, "", true, true); err == nil {
+ u.SourceAlloydbUpdatePrefer = sourceAlloydbUpdatePrefer
+ u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdatePrefer
return nil
}
- sourceAlloydbUpdateSSLModesRequire := new(SourceAlloydbUpdateSSLModesRequire)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateSSLModesRequire); err == nil {
- u.SourceAlloydbUpdateSSLModesRequire = sourceAlloydbUpdateSSLModesRequire
- u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesRequire
+ sourceAlloydbUpdateRequire := new(SourceAlloydbUpdateRequire)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdateRequire, "", true, true); err == nil {
+ u.SourceAlloydbUpdateRequire = sourceAlloydbUpdateRequire
+ u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateRequire
return nil
}
- sourceAlloydbUpdateSSLModesVerifyCa := new(SourceAlloydbUpdateSSLModesVerifyCa)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateSSLModesVerifyCa); err == nil {
- u.SourceAlloydbUpdateSSLModesVerifyCa = sourceAlloydbUpdateSSLModesVerifyCa
- u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesVerifyCa
+ sourceAlloydbUpdateVerifyCa := new(SourceAlloydbUpdateVerifyCa)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdateVerifyCa, "", true, true); err == nil {
+ u.SourceAlloydbUpdateVerifyCa = sourceAlloydbUpdateVerifyCa
+ u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateVerifyCa
return nil
}
- sourceAlloydbUpdateSSLModesVerifyFull := new(SourceAlloydbUpdateSSLModesVerifyFull)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateSSLModesVerifyFull); err == nil {
- u.SourceAlloydbUpdateSSLModesVerifyFull = sourceAlloydbUpdateSSLModesVerifyFull
- u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateSSLModesVerifyFull
+ sourceAlloydbUpdateVerifyFull := new(SourceAlloydbUpdateVerifyFull)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdateVerifyFull, "", true, true); err == nil {
+ u.SourceAlloydbUpdateVerifyFull = sourceAlloydbUpdateVerifyFull
+ u.Type = SourceAlloydbUpdateSSLModesTypeSourceAlloydbUpdateVerifyFull
return nil
}
@@ -916,212 +849,306 @@ func (u *SourceAlloydbUpdateSSLModes) UnmarshalJSON(data []byte) error {
}
func (u SourceAlloydbUpdateSSLModes) MarshalJSON() ([]byte, error) {
- if u.SourceAlloydbUpdateSSLModesDisable != nil {
- return json.Marshal(u.SourceAlloydbUpdateSSLModesDisable)
+ if u.SourceAlloydbUpdateDisable != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdateDisable, "", true)
}
- if u.SourceAlloydbUpdateSSLModesAllow != nil {
- return json.Marshal(u.SourceAlloydbUpdateSSLModesAllow)
+ if u.SourceAlloydbUpdateAllow != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdateAllow, "", true)
}
- if u.SourceAlloydbUpdateSSLModesPrefer != nil {
- return json.Marshal(u.SourceAlloydbUpdateSSLModesPrefer)
+ if u.SourceAlloydbUpdatePrefer != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdatePrefer, "", true)
}
- if u.SourceAlloydbUpdateSSLModesRequire != nil {
- return json.Marshal(u.SourceAlloydbUpdateSSLModesRequire)
+ if u.SourceAlloydbUpdateRequire != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdateRequire, "", true)
}
- if u.SourceAlloydbUpdateSSLModesVerifyCa != nil {
- return json.Marshal(u.SourceAlloydbUpdateSSLModesVerifyCa)
+ if u.SourceAlloydbUpdateVerifyCa != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdateVerifyCa, "", true)
}
- if u.SourceAlloydbUpdateSSLModesVerifyFull != nil {
- return json.Marshal(u.SourceAlloydbUpdateSSLModesVerifyFull)
+ if u.SourceAlloydbUpdateVerifyFull != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdateVerifyFull, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod string
const (
- SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceAlloydbUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication struct {
+// SourceAlloydbUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceAlloydbUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceAlloydbUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceAlloydbUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydbUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceAlloydbUpdatePasswordAuthentication) GetTunnelMethod() SourceAlloydbUpdateSchemasTunnelMethodTunnelMethod {
+ return SourceAlloydbUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceAlloydbUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceAlloydbUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceAlloydbUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceAlloydbUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceAlloydbUpdateSchemasTunnelMethod string
const (
- SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceAlloydbUpdateSchemasTunnelMethodSSHKeyAuth SourceAlloydbUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceAlloydbUpdateSchemasTunnelMethod) ToPointer() *SourceAlloydbUpdateSchemasTunnelMethod {
return &e
}
-func (e *SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceAlloydbUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateSchemasTunnelMethod: %v", v)
}
}
-// SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceAlloydbUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceAlloydbUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceAlloydbUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceAlloydbUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydbUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceAlloydbUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceAlloydbUpdateSSHKeyAuthentication) GetTunnelMethod() SourceAlloydbUpdateSchemasTunnelMethod {
+ return SourceAlloydbUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceAlloydbUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceAlloydbUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceAlloydbUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type SourceAlloydbUpdateTunnelMethod string
const (
- SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceAlloydbUpdateTunnelMethodNoTunnel SourceAlloydbUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceAlloydbUpdateTunnelMethod) ToPointer() *SourceAlloydbUpdateTunnelMethod {
return &e
}
-func (e *SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAlloydbUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceAlloydbUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAlloydbUpdateTunnelMethod: %v", v)
}
}
-// SourceAlloydbUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceAlloydbUpdateSSHTunnelMethodNoTunnel struct {
+// SourceAlloydbUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceAlloydbUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceAlloydbUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceAlloydbUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceAlloydbUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydbUpdateNoTunnel) GetTunnelMethod() SourceAlloydbUpdateTunnelMethod {
+ return SourceAlloydbUpdateTunnelMethodNoTunnel
}
type SourceAlloydbUpdateSSHTunnelMethodType string
const (
- SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHTunnelMethodNoTunnel SourceAlloydbUpdateSSHTunnelMethodType = "source-alloydb-update_SSH Tunnel Method_No Tunnel"
- SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication SourceAlloydbUpdateSSHTunnelMethodType = "source-alloydb-update_SSH Tunnel Method_SSH Key Authentication"
- SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication SourceAlloydbUpdateSSHTunnelMethodType = "source-alloydb-update_SSH Tunnel Method_Password Authentication"
+ SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateNoTunnel SourceAlloydbUpdateSSHTunnelMethodType = "source-alloydb-update_No Tunnel"
+ SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHKeyAuthentication SourceAlloydbUpdateSSHTunnelMethodType = "source-alloydb-update_SSH Key Authentication"
+ SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdatePasswordAuthentication SourceAlloydbUpdateSSHTunnelMethodType = "source-alloydb-update_Password Authentication"
)
type SourceAlloydbUpdateSSHTunnelMethod struct {
- SourceAlloydbUpdateSSHTunnelMethodNoTunnel *SourceAlloydbUpdateSSHTunnelMethodNoTunnel
- SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication *SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication
- SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication *SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication
+ SourceAlloydbUpdateNoTunnel *SourceAlloydbUpdateNoTunnel
+ SourceAlloydbUpdateSSHKeyAuthentication *SourceAlloydbUpdateSSHKeyAuthentication
+ SourceAlloydbUpdatePasswordAuthentication *SourceAlloydbUpdatePasswordAuthentication
Type SourceAlloydbUpdateSSHTunnelMethodType
}
-func CreateSourceAlloydbUpdateSSHTunnelMethodSourceAlloydbUpdateSSHTunnelMethodNoTunnel(sourceAlloydbUpdateSSHTunnelMethodNoTunnel SourceAlloydbUpdateSSHTunnelMethodNoTunnel) SourceAlloydbUpdateSSHTunnelMethod {
- typ := SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHTunnelMethodNoTunnel
+func CreateSourceAlloydbUpdateSSHTunnelMethodSourceAlloydbUpdateNoTunnel(sourceAlloydbUpdateNoTunnel SourceAlloydbUpdateNoTunnel) SourceAlloydbUpdateSSHTunnelMethod {
+ typ := SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateNoTunnel
return SourceAlloydbUpdateSSHTunnelMethod{
- SourceAlloydbUpdateSSHTunnelMethodNoTunnel: &sourceAlloydbUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceAlloydbUpdateNoTunnel: &sourceAlloydbUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateSourceAlloydbUpdateSSHTunnelMethodSourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication(sourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication) SourceAlloydbUpdateSSHTunnelMethod {
- typ := SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceAlloydbUpdateSSHTunnelMethodSourceAlloydbUpdateSSHKeyAuthentication(sourceAlloydbUpdateSSHKeyAuthentication SourceAlloydbUpdateSSHKeyAuthentication) SourceAlloydbUpdateSSHTunnelMethod {
+ typ := SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHKeyAuthentication
return SourceAlloydbUpdateSSHTunnelMethod{
- SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication: &sourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourceAlloydbUpdateSSHKeyAuthentication: &sourceAlloydbUpdateSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourceAlloydbUpdateSSHTunnelMethodSourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication(sourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication) SourceAlloydbUpdateSSHTunnelMethod {
- typ := SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication
+func CreateSourceAlloydbUpdateSSHTunnelMethodSourceAlloydbUpdatePasswordAuthentication(sourceAlloydbUpdatePasswordAuthentication SourceAlloydbUpdatePasswordAuthentication) SourceAlloydbUpdateSSHTunnelMethod {
+ typ := SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdatePasswordAuthentication
return SourceAlloydbUpdateSSHTunnelMethod{
- SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication: &sourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication,
+ SourceAlloydbUpdatePasswordAuthentication: &sourceAlloydbUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *SourceAlloydbUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAlloydbUpdateSSHTunnelMethodNoTunnel := new(SourceAlloydbUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.SourceAlloydbUpdateSSHTunnelMethodNoTunnel = sourceAlloydbUpdateSSHTunnelMethodNoTunnel
- u.Type = SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHTunnelMethodNoTunnel
+
+ sourceAlloydbUpdateNoTunnel := new(SourceAlloydbUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdateNoTunnel, "", true, true); err == nil {
+ u.SourceAlloydbUpdateNoTunnel = sourceAlloydbUpdateNoTunnel
+ u.Type = SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateNoTunnel
return nil
}
- sourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication := new(SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication = sourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication
+ sourceAlloydbUpdateSSHKeyAuthentication := new(SourceAlloydbUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceAlloydbUpdateSSHKeyAuthentication = sourceAlloydbUpdateSSHKeyAuthentication
+ u.Type = SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHKeyAuthentication
return nil
}
- sourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication := new(SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication = sourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication
+ sourceAlloydbUpdatePasswordAuthentication := new(SourceAlloydbUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceAlloydbUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.SourceAlloydbUpdatePasswordAuthentication = sourceAlloydbUpdatePasswordAuthentication
+ u.Type = SourceAlloydbUpdateSSHTunnelMethodTypeSourceAlloydbUpdatePasswordAuthentication
return nil
}
@@ -1129,19 +1156,19 @@ func (u *SourceAlloydbUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceAlloydbUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceAlloydbUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceAlloydbUpdateSSHTunnelMethodNoTunnel)
+ if u.SourceAlloydbUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdateNoTunnel, "", true)
}
- if u.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceAlloydbUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceAlloydbUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdateSSHKeyAuthentication, "", true)
}
- if u.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceAlloydbUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.SourceAlloydbUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceAlloydbUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceAlloydbUpdate struct {
@@ -1154,9 +1181,9 @@ type SourceAlloydbUpdate struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5432" json:"port"`
// Replication method for extracting data from the database.
- ReplicationMethod *SourceAlloydbUpdateReplicationMethod `json:"replication_method,omitempty"`
+ ReplicationMethod *ReplicationMethod `json:"replication_method,omitempty"`
// The list of schemas (case sensitive) to sync from. Defaults to public.
Schemas []string `json:"schemas,omitempty"`
// SSL connection modes.
@@ -1167,3 +1194,84 @@ type SourceAlloydbUpdate struct {
// Username to access the database.
Username string `json:"username"`
}
+
+func (s SourceAlloydbUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAlloydbUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAlloydbUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceAlloydbUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceAlloydbUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceAlloydbUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceAlloydbUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceAlloydbUpdate) GetReplicationMethod() *ReplicationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationMethod
+}
+
+func (o *SourceAlloydbUpdate) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourceAlloydbUpdate) GetSslMode() *SourceAlloydbUpdateSSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *SourceAlloydbUpdate) GetTunnelMethod() *SourceAlloydbUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceAlloydbUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonads.go b/internal/sdk/pkg/models/shared/sourceamazonads.go
old mode 100755
new mode 100644
index d2fa1f1f9..7a49cd016
--- a/internal/sdk/pkg/models/shared/sourceamazonads.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonads.go
@@ -5,6 +5,8 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceAmazonAdsAuthType string
@@ -107,27 +109,27 @@ func (e *SourceAmazonAdsReportRecordTypes) UnmarshalJSON(data []byte) error {
}
}
-type SourceAmazonAdsAmazonAds string
+type AmazonAds string
const (
- SourceAmazonAdsAmazonAdsAmazonAds SourceAmazonAdsAmazonAds = "amazon-ads"
+ AmazonAdsAmazonAds AmazonAds = "amazon-ads"
)
-func (e SourceAmazonAdsAmazonAds) ToPointer() *SourceAmazonAdsAmazonAds {
+func (e AmazonAds) ToPointer() *AmazonAds {
return &e
}
-func (e *SourceAmazonAdsAmazonAds) UnmarshalJSON(data []byte) error {
+func (e *AmazonAds) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "amazon-ads":
- *e = SourceAmazonAdsAmazonAds(v)
+ *e = AmazonAds(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmazonAdsAmazonAds: %v", v)
+ return fmt.Errorf("invalid value for AmazonAds: %v", v)
}
}
@@ -162,13 +164,13 @@ func (e *SourceAmazonAdsStateFilter) UnmarshalJSON(data []byte) error {
}
type SourceAmazonAds struct {
- AuthType *SourceAmazonAdsAuthType `json:"auth_type,omitempty"`
+ authType *SourceAmazonAdsAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The client ID of your Amazon Ads developer application. See the docs for more information.
ClientID string `json:"client_id"`
// The client secret of your Amazon Ads developer application. See the docs for more information.
ClientSecret string `json:"client_secret"`
// The amount of days to go back in time to get the updated data from Amazon Ads
- LookBackWindow *int64 `json:"look_back_window,omitempty"`
+ LookBackWindow *int64 `default:"3" json:"look_back_window"`
// Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.
MarketplaceIds []string `json:"marketplace_ids,omitempty"`
// Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.
@@ -176,12 +178,101 @@ type SourceAmazonAds struct {
// Amazon Ads refresh token. See the docs for more information on how to obtain this token.
RefreshToken string `json:"refresh_token"`
// Region to pull data from (EU/NA/FE). See docs for more details.
- Region *SourceAmazonAdsRegion `json:"region,omitempty"`
+ Region *SourceAmazonAdsRegion `default:"NA" json:"region"`
// Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details
ReportRecordTypes []SourceAmazonAdsReportRecordTypes `json:"report_record_types,omitempty"`
- SourceType SourceAmazonAdsAmazonAds `json:"sourceType"`
+ sourceType AmazonAds `const:"amazon-ads" json:"sourceType"`
// The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format
- StartDate *string `json:"start_date,omitempty"`
+ StartDate *types.Date `json:"start_date,omitempty"`
// Reflects the state of the Display, Product, and Brand Campaign streams as enabled, paused, or archived. If you do not populate this field, it will be ignored completely.
StateFilter []SourceAmazonAdsStateFilter `json:"state_filter,omitempty"`
}
+
+func (s SourceAmazonAds) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAmazonAds) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAmazonAds) GetAuthType() *SourceAmazonAdsAuthType {
+ return SourceAmazonAdsAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceAmazonAds) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceAmazonAds) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceAmazonAds) GetLookBackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookBackWindow
+}
+
+func (o *SourceAmazonAds) GetMarketplaceIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MarketplaceIds
+}
+
+func (o *SourceAmazonAds) GetProfiles() []int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Profiles
+}
+
+func (o *SourceAmazonAds) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceAmazonAds) GetRegion() *SourceAmazonAdsRegion {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceAmazonAds) GetReportRecordTypes() []SourceAmazonAdsReportRecordTypes {
+ if o == nil {
+ return nil
+ }
+ return o.ReportRecordTypes
+}
+
+func (o *SourceAmazonAds) GetSourceType() AmazonAds {
+ return AmazonAdsAmazonAds
+}
+
+func (o *SourceAmazonAds) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceAmazonAds) GetStateFilter() []SourceAmazonAdsStateFilter {
+ if o == nil {
+ return nil
+ }
+ return o.StateFilter
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonadscreaterequest.go b/internal/sdk/pkg/models/shared/sourceamazonadscreaterequest.go
old mode 100755
new mode 100644
index ac301fb36..2bd45def2
--- a/internal/sdk/pkg/models/shared/sourceamazonadscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonadscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAmazonAdsCreateRequest struct {
Configuration SourceAmazonAds `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAmazonAdsCreateRequest) GetConfiguration() SourceAmazonAds {
+ if o == nil {
+ return SourceAmazonAds{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAmazonAdsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAmazonAdsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAmazonAdsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAmazonAdsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonadsputrequest.go b/internal/sdk/pkg/models/shared/sourceamazonadsputrequest.go
old mode 100755
new mode 100644
index abab7da3d..def2e8775
--- a/internal/sdk/pkg/models/shared/sourceamazonadsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonadsputrequest.go
@@ -7,3 +7,24 @@ type SourceAmazonAdsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAmazonAdsPutRequest) GetConfiguration() SourceAmazonAdsUpdate {
+ if o == nil {
+ return SourceAmazonAdsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAmazonAdsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAmazonAdsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonadsupdate.go b/internal/sdk/pkg/models/shared/sourceamazonadsupdate.go
old mode 100755
new mode 100644
index 4087cc4ae..7826886bc
--- a/internal/sdk/pkg/models/shared/sourceamazonadsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonadsupdate.go
@@ -5,6 +5,8 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceAmazonAdsUpdateAuthType string
@@ -31,20 +33,20 @@ func (e *SourceAmazonAdsUpdateAuthType) UnmarshalJSON(data []byte) error {
}
}
-// SourceAmazonAdsUpdateRegion - Region to pull data from (EU/NA/FE). See docs for more details.
-type SourceAmazonAdsUpdateRegion string
+// Region to pull data from (EU/NA/FE). See docs for more details.
+type Region string
const (
- SourceAmazonAdsUpdateRegionNa SourceAmazonAdsUpdateRegion = "NA"
- SourceAmazonAdsUpdateRegionEu SourceAmazonAdsUpdateRegion = "EU"
- SourceAmazonAdsUpdateRegionFe SourceAmazonAdsUpdateRegion = "FE"
+ RegionNa Region = "NA"
+ RegionEu Region = "EU"
+ RegionFe Region = "FE"
)
-func (e SourceAmazonAdsUpdateRegion) ToPointer() *SourceAmazonAdsUpdateRegion {
+func (e Region) ToPointer() *Region {
return &e
}
-func (e *SourceAmazonAdsUpdateRegion) UnmarshalJSON(data []byte) error {
+func (e *Region) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -55,31 +57,31 @@ func (e *SourceAmazonAdsUpdateRegion) UnmarshalJSON(data []byte) error {
case "EU":
fallthrough
case "FE":
- *e = SourceAmazonAdsUpdateRegion(v)
+ *e = Region(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmazonAdsUpdateRegion: %v", v)
+ return fmt.Errorf("invalid value for Region: %v", v)
}
}
-type SourceAmazonAdsUpdateReportRecordTypes string
+type ReportRecordTypes string
const (
- SourceAmazonAdsUpdateReportRecordTypesAdGroups SourceAmazonAdsUpdateReportRecordTypes = "adGroups"
- SourceAmazonAdsUpdateReportRecordTypesAsins SourceAmazonAdsUpdateReportRecordTypes = "asins"
- SourceAmazonAdsUpdateReportRecordTypesAsinsKeywords SourceAmazonAdsUpdateReportRecordTypes = "asins_keywords"
- SourceAmazonAdsUpdateReportRecordTypesAsinsTargets SourceAmazonAdsUpdateReportRecordTypes = "asins_targets"
- SourceAmazonAdsUpdateReportRecordTypesCampaigns SourceAmazonAdsUpdateReportRecordTypes = "campaigns"
- SourceAmazonAdsUpdateReportRecordTypesKeywords SourceAmazonAdsUpdateReportRecordTypes = "keywords"
- SourceAmazonAdsUpdateReportRecordTypesProductAds SourceAmazonAdsUpdateReportRecordTypes = "productAds"
- SourceAmazonAdsUpdateReportRecordTypesTargets SourceAmazonAdsUpdateReportRecordTypes = "targets"
+ ReportRecordTypesAdGroups ReportRecordTypes = "adGroups"
+ ReportRecordTypesAsins ReportRecordTypes = "asins"
+ ReportRecordTypesAsinsKeywords ReportRecordTypes = "asins_keywords"
+ ReportRecordTypesAsinsTargets ReportRecordTypes = "asins_targets"
+ ReportRecordTypesCampaigns ReportRecordTypes = "campaigns"
+ ReportRecordTypesKeywords ReportRecordTypes = "keywords"
+ ReportRecordTypesProductAds ReportRecordTypes = "productAds"
+ ReportRecordTypesTargets ReportRecordTypes = "targets"
)
-func (e SourceAmazonAdsUpdateReportRecordTypes) ToPointer() *SourceAmazonAdsUpdateReportRecordTypes {
+func (e ReportRecordTypes) ToPointer() *ReportRecordTypes {
return &e
}
-func (e *SourceAmazonAdsUpdateReportRecordTypes) UnmarshalJSON(data []byte) error {
+func (e *ReportRecordTypes) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -100,26 +102,26 @@ func (e *SourceAmazonAdsUpdateReportRecordTypes) UnmarshalJSON(data []byte) erro
case "productAds":
fallthrough
case "targets":
- *e = SourceAmazonAdsUpdateReportRecordTypes(v)
+ *e = ReportRecordTypes(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmazonAdsUpdateReportRecordTypes: %v", v)
+ return fmt.Errorf("invalid value for ReportRecordTypes: %v", v)
}
}
-type SourceAmazonAdsUpdateStateFilter string
+type StateFilter string
const (
- SourceAmazonAdsUpdateStateFilterEnabled SourceAmazonAdsUpdateStateFilter = "enabled"
- SourceAmazonAdsUpdateStateFilterPaused SourceAmazonAdsUpdateStateFilter = "paused"
- SourceAmazonAdsUpdateStateFilterArchived SourceAmazonAdsUpdateStateFilter = "archived"
+ StateFilterEnabled StateFilter = "enabled"
+ StateFilterPaused StateFilter = "paused"
+ StateFilterArchived StateFilter = "archived"
)
-func (e SourceAmazonAdsUpdateStateFilter) ToPointer() *SourceAmazonAdsUpdateStateFilter {
+func (e StateFilter) ToPointer() *StateFilter {
return &e
}
-func (e *SourceAmazonAdsUpdateStateFilter) UnmarshalJSON(data []byte) error {
+func (e *StateFilter) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -130,21 +132,21 @@ func (e *SourceAmazonAdsUpdateStateFilter) UnmarshalJSON(data []byte) error {
case "paused":
fallthrough
case "archived":
- *e = SourceAmazonAdsUpdateStateFilter(v)
+ *e = StateFilter(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmazonAdsUpdateStateFilter: %v", v)
+ return fmt.Errorf("invalid value for StateFilter: %v", v)
}
}
type SourceAmazonAdsUpdate struct {
- AuthType *SourceAmazonAdsUpdateAuthType `json:"auth_type,omitempty"`
+ authType *SourceAmazonAdsUpdateAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The client ID of your Amazon Ads developer application. See the docs for more information.
ClientID string `json:"client_id"`
// The client secret of your Amazon Ads developer application. See the docs for more information.
ClientSecret string `json:"client_secret"`
// The amount of days to go back in time to get the updated data from Amazon Ads
- LookBackWindow *int64 `json:"look_back_window,omitempty"`
+ LookBackWindow *int64 `default:"3" json:"look_back_window"`
// Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.
MarketplaceIds []string `json:"marketplace_ids,omitempty"`
// Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.
@@ -152,11 +154,96 @@ type SourceAmazonAdsUpdate struct {
// Amazon Ads refresh token. See the docs for more information on how to obtain this token.
RefreshToken string `json:"refresh_token"`
// Region to pull data from (EU/NA/FE). See docs for more details.
- Region *SourceAmazonAdsUpdateRegion `json:"region,omitempty"`
+ Region *Region `default:"NA" json:"region"`
// Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details
- ReportRecordTypes []SourceAmazonAdsUpdateReportRecordTypes `json:"report_record_types,omitempty"`
+ ReportRecordTypes []ReportRecordTypes `json:"report_record_types,omitempty"`
// The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format
- StartDate *string `json:"start_date,omitempty"`
+ StartDate *types.Date `json:"start_date,omitempty"`
// Reflects the state of the Display, Product, and Brand Campaign streams as enabled, paused, or archived. If you do not populate this field, it will be ignored completely.
- StateFilter []SourceAmazonAdsUpdateStateFilter `json:"state_filter,omitempty"`
+ StateFilter []StateFilter `json:"state_filter,omitempty"`
+}
+
+func (s SourceAmazonAdsUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAmazonAdsUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAmazonAdsUpdate) GetAuthType() *SourceAmazonAdsUpdateAuthType {
+ return SourceAmazonAdsUpdateAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceAmazonAdsUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceAmazonAdsUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceAmazonAdsUpdate) GetLookBackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookBackWindow
+}
+
+func (o *SourceAmazonAdsUpdate) GetMarketplaceIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.MarketplaceIds
+}
+
+func (o *SourceAmazonAdsUpdate) GetProfiles() []int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Profiles
+}
+
+func (o *SourceAmazonAdsUpdate) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceAmazonAdsUpdate) GetRegion() *Region {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceAmazonAdsUpdate) GetReportRecordTypes() []ReportRecordTypes {
+ if o == nil {
+ return nil
+ }
+ return o.ReportRecordTypes
+}
+
+func (o *SourceAmazonAdsUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceAmazonAdsUpdate) GetStateFilter() []StateFilter {
+ if o == nil {
+ return nil
+ }
+ return o.StateFilter
}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonsellerpartner.go b/internal/sdk/pkg/models/shared/sourceamazonsellerpartner.go
old mode 100755
new mode 100644
index b66a76966..5d952b366
--- a/internal/sdk/pkg/models/shared/sourceamazonsellerpartner.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonsellerpartner.go
@@ -5,8 +5,38 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
)
+// SourceAmazonSellerPartnerAWSSellerPartnerAccountType - Type of the Account you're going to authorize the Airbyte application by
+type SourceAmazonSellerPartnerAWSSellerPartnerAccountType string
+
+const (
+ SourceAmazonSellerPartnerAWSSellerPartnerAccountTypeSeller SourceAmazonSellerPartnerAWSSellerPartnerAccountType = "Seller"
+ SourceAmazonSellerPartnerAWSSellerPartnerAccountTypeVendor SourceAmazonSellerPartnerAWSSellerPartnerAccountType = "Vendor"
+)
+
+func (e SourceAmazonSellerPartnerAWSSellerPartnerAccountType) ToPointer() *SourceAmazonSellerPartnerAWSSellerPartnerAccountType {
+ return &e
+}
+
+func (e *SourceAmazonSellerPartnerAWSSellerPartnerAccountType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Seller":
+ fallthrough
+ case "Vendor":
+ *e = SourceAmazonSellerPartnerAWSSellerPartnerAccountType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAmazonSellerPartnerAWSSellerPartnerAccountType: %v", v)
+ }
+}
+
type SourceAmazonSellerPartnerAuthType string
const (
@@ -147,59 +177,149 @@ func (e *SourceAmazonSellerPartnerAWSRegion) UnmarshalJSON(data []byte) error {
}
}
-type SourceAmazonSellerPartnerAmazonSellerPartner string
+type AmazonSellerPartner string
const (
- SourceAmazonSellerPartnerAmazonSellerPartnerAmazonSellerPartner SourceAmazonSellerPartnerAmazonSellerPartner = "amazon-seller-partner"
+ AmazonSellerPartnerAmazonSellerPartner AmazonSellerPartner = "amazon-seller-partner"
)
-func (e SourceAmazonSellerPartnerAmazonSellerPartner) ToPointer() *SourceAmazonSellerPartnerAmazonSellerPartner {
+func (e AmazonSellerPartner) ToPointer() *AmazonSellerPartner {
return &e
}
-func (e *SourceAmazonSellerPartnerAmazonSellerPartner) UnmarshalJSON(data []byte) error {
+func (e *AmazonSellerPartner) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "amazon-seller-partner":
- *e = SourceAmazonSellerPartnerAmazonSellerPartner(v)
+ *e = AmazonSellerPartner(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmazonSellerPartnerAmazonSellerPartner: %v", v)
+ return fmt.Errorf("invalid value for AmazonSellerPartner: %v", v)
}
}
type SourceAmazonSellerPartner struct {
+ // Type of the Account you're going to authorize the Airbyte application by
+ AccountType *SourceAmazonSellerPartnerAWSSellerPartnerAccountType `default:"Seller" json:"account_type"`
// Additional information to configure report options. This varies by report type, not every report implement this kind of feature. Must be a valid json string.
AdvancedStreamOptions *string `json:"advanced_stream_options,omitempty"`
- AuthType *SourceAmazonSellerPartnerAuthType `json:"auth_type,omitempty"`
- // Specifies the AWS access key used as part of the credentials to authenticate the user.
- AwsAccessKey *string `json:"aws_access_key,omitempty"`
+ authType *SourceAmazonSellerPartnerAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// Select the AWS Environment.
- AwsEnvironment SourceAmazonSellerPartnerAWSEnvironment `json:"aws_environment"`
- // Specifies the AWS secret key used as part of the credentials to authenticate the user.
- AwsSecretKey *string `json:"aws_secret_key,omitempty"`
+ AwsEnvironment *SourceAmazonSellerPartnerAWSEnvironment `default:"PRODUCTION" json:"aws_environment"`
// Your Login with Amazon Client ID.
LwaAppID string `json:"lwa_app_id"`
// Your Login with Amazon Client Secret.
LwaClientSecret string `json:"lwa_client_secret"`
- // Sometimes report can take up to 30 minutes to generate. This will set the limit for how long to wait for a successful report.
- MaxWaitSeconds *int64 `json:"max_wait_seconds,omitempty"`
// Will be used for stream slicing for initial full_refresh sync when no updated state is present for reports that support sliced incremental sync.
- PeriodInDays *int64 `json:"period_in_days,omitempty"`
+ PeriodInDays *int64 `default:"90" json:"period_in_days"`
// The Refresh Token obtained via OAuth flow authorization.
RefreshToken string `json:"refresh_token"`
// Select the AWS Region.
- Region SourceAmazonSellerPartnerAWSRegion `json:"region"`
+ Region *SourceAmazonSellerPartnerAWSRegion `default:"US" json:"region"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.
- ReplicationEndDate *string `json:"replication_end_date,omitempty"`
+ ReplicationEndDate *time.Time `json:"replication_end_date,omitempty"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
- ReplicationStartDate string `json:"replication_start_date"`
+ ReplicationStartDate time.Time `json:"replication_start_date"`
// Additional information passed to reports. This varies by report type. Must be a valid json string.
- ReportOptions *string `json:"report_options,omitempty"`
- // Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. (Needs permission to 'Assume Role' STS).
- RoleArn *string `json:"role_arn,omitempty"`
- SourceType SourceAmazonSellerPartnerAmazonSellerPartner `json:"sourceType"`
+ ReportOptions *string `json:"report_options,omitempty"`
+ sourceType AmazonSellerPartner `const:"amazon-seller-partner" json:"sourceType"`
+}
+
+func (s SourceAmazonSellerPartner) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAmazonSellerPartner) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAmazonSellerPartner) GetAccountType() *SourceAmazonSellerPartnerAWSSellerPartnerAccountType {
+ if o == nil {
+ return nil
+ }
+ return o.AccountType
+}
+
+func (o *SourceAmazonSellerPartner) GetAdvancedStreamOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AdvancedStreamOptions
+}
+
+func (o *SourceAmazonSellerPartner) GetAuthType() *SourceAmazonSellerPartnerAuthType {
+ return SourceAmazonSellerPartnerAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceAmazonSellerPartner) GetAwsEnvironment() *SourceAmazonSellerPartnerAWSEnvironment {
+ if o == nil {
+ return nil
+ }
+ return o.AwsEnvironment
+}
+
+func (o *SourceAmazonSellerPartner) GetLwaAppID() string {
+ if o == nil {
+ return ""
+ }
+ return o.LwaAppID
+}
+
+func (o *SourceAmazonSellerPartner) GetLwaClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.LwaClientSecret
+}
+
+func (o *SourceAmazonSellerPartner) GetPeriodInDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PeriodInDays
+}
+
+func (o *SourceAmazonSellerPartner) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceAmazonSellerPartner) GetRegion() *SourceAmazonSellerPartnerAWSRegion {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceAmazonSellerPartner) GetReplicationEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationEndDate
+}
+
+func (o *SourceAmazonSellerPartner) GetReplicationStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.ReplicationStartDate
+}
+
+func (o *SourceAmazonSellerPartner) GetReportOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReportOptions
+}
+
+func (o *SourceAmazonSellerPartner) GetSourceType() AmazonSellerPartner {
+ return AmazonSellerPartnerAmazonSellerPartner
}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonsellerpartnercreaterequest.go b/internal/sdk/pkg/models/shared/sourceamazonsellerpartnercreaterequest.go
old mode 100755
new mode 100644
index 233a0f92b..cca2ef881
--- a/internal/sdk/pkg/models/shared/sourceamazonsellerpartnercreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonsellerpartnercreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAmazonSellerPartnerCreateRequest struct {
Configuration SourceAmazonSellerPartner `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAmazonSellerPartnerCreateRequest) GetConfiguration() SourceAmazonSellerPartner {
+ if o == nil {
+ return SourceAmazonSellerPartner{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAmazonSellerPartnerCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAmazonSellerPartnerCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAmazonSellerPartnerCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAmazonSellerPartnerCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonsellerpartnerputrequest.go b/internal/sdk/pkg/models/shared/sourceamazonsellerpartnerputrequest.go
old mode 100755
new mode 100644
index 77bd4150f..66cbe4444
--- a/internal/sdk/pkg/models/shared/sourceamazonsellerpartnerputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonsellerpartnerputrequest.go
@@ -7,3 +7,24 @@ type SourceAmazonSellerPartnerPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAmazonSellerPartnerPutRequest) GetConfiguration() SourceAmazonSellerPartnerUpdate {
+ if o == nil {
+ return SourceAmazonSellerPartnerUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAmazonSellerPartnerPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAmazonSellerPartnerPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonsellerpartnerupdate.go b/internal/sdk/pkg/models/shared/sourceamazonsellerpartnerupdate.go
old mode 100755
new mode 100644
index 1bfe4e1b5..beb68d84d
--- a/internal/sdk/pkg/models/shared/sourceamazonsellerpartnerupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonsellerpartnerupdate.go
@@ -5,8 +5,38 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
)
+// AWSSellerPartnerAccountType - Type of the Account you're going to authorize the Airbyte application by
+type AWSSellerPartnerAccountType string
+
+const (
+ AWSSellerPartnerAccountTypeSeller AWSSellerPartnerAccountType = "Seller"
+ AWSSellerPartnerAccountTypeVendor AWSSellerPartnerAccountType = "Vendor"
+)
+
+func (e AWSSellerPartnerAccountType) ToPointer() *AWSSellerPartnerAccountType {
+ return &e
+}
+
+func (e *AWSSellerPartnerAccountType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Seller":
+ fallthrough
+ case "Vendor":
+ *e = AWSSellerPartnerAccountType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for AWSSellerPartnerAccountType: %v", v)
+ }
+}
+
type SourceAmazonSellerPartnerUpdateAuthType string
const (
@@ -31,19 +61,19 @@ func (e *SourceAmazonSellerPartnerUpdateAuthType) UnmarshalJSON(data []byte) err
}
}
-// SourceAmazonSellerPartnerUpdateAWSEnvironment - Select the AWS Environment.
-type SourceAmazonSellerPartnerUpdateAWSEnvironment string
+// AWSEnvironment - Select the AWS Environment.
+type AWSEnvironment string
const (
- SourceAmazonSellerPartnerUpdateAWSEnvironmentProduction SourceAmazonSellerPartnerUpdateAWSEnvironment = "PRODUCTION"
- SourceAmazonSellerPartnerUpdateAWSEnvironmentSandbox SourceAmazonSellerPartnerUpdateAWSEnvironment = "SANDBOX"
+ AWSEnvironmentProduction AWSEnvironment = "PRODUCTION"
+ AWSEnvironmentSandbox AWSEnvironment = "SANDBOX"
)
-func (e SourceAmazonSellerPartnerUpdateAWSEnvironment) ToPointer() *SourceAmazonSellerPartnerUpdateAWSEnvironment {
+func (e AWSEnvironment) ToPointer() *AWSEnvironment {
return &e
}
-func (e *SourceAmazonSellerPartnerUpdateAWSEnvironment) UnmarshalJSON(data []byte) error {
+func (e *AWSEnvironment) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -52,46 +82,46 @@ func (e *SourceAmazonSellerPartnerUpdateAWSEnvironment) UnmarshalJSON(data []byt
case "PRODUCTION":
fallthrough
case "SANDBOX":
- *e = SourceAmazonSellerPartnerUpdateAWSEnvironment(v)
+ *e = AWSEnvironment(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmazonSellerPartnerUpdateAWSEnvironment: %v", v)
+ return fmt.Errorf("invalid value for AWSEnvironment: %v", v)
}
}
-// SourceAmazonSellerPartnerUpdateAWSRegion - Select the AWS Region.
-type SourceAmazonSellerPartnerUpdateAWSRegion string
+// AWSRegion - Select the AWS Region.
+type AWSRegion string
const (
- SourceAmazonSellerPartnerUpdateAWSRegionAe SourceAmazonSellerPartnerUpdateAWSRegion = "AE"
- SourceAmazonSellerPartnerUpdateAWSRegionAu SourceAmazonSellerPartnerUpdateAWSRegion = "AU"
- SourceAmazonSellerPartnerUpdateAWSRegionBe SourceAmazonSellerPartnerUpdateAWSRegion = "BE"
- SourceAmazonSellerPartnerUpdateAWSRegionBr SourceAmazonSellerPartnerUpdateAWSRegion = "BR"
- SourceAmazonSellerPartnerUpdateAWSRegionCa SourceAmazonSellerPartnerUpdateAWSRegion = "CA"
- SourceAmazonSellerPartnerUpdateAWSRegionDe SourceAmazonSellerPartnerUpdateAWSRegion = "DE"
- SourceAmazonSellerPartnerUpdateAWSRegionEg SourceAmazonSellerPartnerUpdateAWSRegion = "EG"
- SourceAmazonSellerPartnerUpdateAWSRegionEs SourceAmazonSellerPartnerUpdateAWSRegion = "ES"
- SourceAmazonSellerPartnerUpdateAWSRegionFr SourceAmazonSellerPartnerUpdateAWSRegion = "FR"
- SourceAmazonSellerPartnerUpdateAWSRegionGb SourceAmazonSellerPartnerUpdateAWSRegion = "GB"
- SourceAmazonSellerPartnerUpdateAWSRegionIn SourceAmazonSellerPartnerUpdateAWSRegion = "IN"
- SourceAmazonSellerPartnerUpdateAWSRegionIt SourceAmazonSellerPartnerUpdateAWSRegion = "IT"
- SourceAmazonSellerPartnerUpdateAWSRegionJp SourceAmazonSellerPartnerUpdateAWSRegion = "JP"
- SourceAmazonSellerPartnerUpdateAWSRegionMx SourceAmazonSellerPartnerUpdateAWSRegion = "MX"
- SourceAmazonSellerPartnerUpdateAWSRegionNl SourceAmazonSellerPartnerUpdateAWSRegion = "NL"
- SourceAmazonSellerPartnerUpdateAWSRegionPl SourceAmazonSellerPartnerUpdateAWSRegion = "PL"
- SourceAmazonSellerPartnerUpdateAWSRegionSa SourceAmazonSellerPartnerUpdateAWSRegion = "SA"
- SourceAmazonSellerPartnerUpdateAWSRegionSe SourceAmazonSellerPartnerUpdateAWSRegion = "SE"
- SourceAmazonSellerPartnerUpdateAWSRegionSg SourceAmazonSellerPartnerUpdateAWSRegion = "SG"
- SourceAmazonSellerPartnerUpdateAWSRegionTr SourceAmazonSellerPartnerUpdateAWSRegion = "TR"
- SourceAmazonSellerPartnerUpdateAWSRegionUk SourceAmazonSellerPartnerUpdateAWSRegion = "UK"
- SourceAmazonSellerPartnerUpdateAWSRegionUs SourceAmazonSellerPartnerUpdateAWSRegion = "US"
+ AWSRegionAe AWSRegion = "AE"
+ AWSRegionAu AWSRegion = "AU"
+ AWSRegionBe AWSRegion = "BE"
+ AWSRegionBr AWSRegion = "BR"
+ AWSRegionCa AWSRegion = "CA"
+ AWSRegionDe AWSRegion = "DE"
+ AWSRegionEg AWSRegion = "EG"
+ AWSRegionEs AWSRegion = "ES"
+ AWSRegionFr AWSRegion = "FR"
+ AWSRegionGb AWSRegion = "GB"
+ AWSRegionIn AWSRegion = "IN"
+ AWSRegionIt AWSRegion = "IT"
+ AWSRegionJp AWSRegion = "JP"
+ AWSRegionMx AWSRegion = "MX"
+ AWSRegionNl AWSRegion = "NL"
+ AWSRegionPl AWSRegion = "PL"
+ AWSRegionSa AWSRegion = "SA"
+ AWSRegionSe AWSRegion = "SE"
+ AWSRegionSg AWSRegion = "SG"
+ AWSRegionTr AWSRegion = "TR"
+ AWSRegionUk AWSRegion = "UK"
+ AWSRegionUs AWSRegion = "US"
)
-func (e SourceAmazonSellerPartnerUpdateAWSRegion) ToPointer() *SourceAmazonSellerPartnerUpdateAWSRegion {
+func (e AWSRegion) ToPointer() *AWSRegion {
return &e
}
-func (e *SourceAmazonSellerPartnerUpdateAWSRegion) UnmarshalJSON(data []byte) error {
+func (e *AWSRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -140,41 +170,127 @@ func (e *SourceAmazonSellerPartnerUpdateAWSRegion) UnmarshalJSON(data []byte) er
case "UK":
fallthrough
case "US":
- *e = SourceAmazonSellerPartnerUpdateAWSRegion(v)
+ *e = AWSRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmazonSellerPartnerUpdateAWSRegion: %v", v)
+ return fmt.Errorf("invalid value for AWSRegion: %v", v)
}
}
type SourceAmazonSellerPartnerUpdate struct {
+ // Type of the Account you're going to authorize the Airbyte application by
+ AccountType *AWSSellerPartnerAccountType `default:"Seller" json:"account_type"`
// Additional information to configure report options. This varies by report type, not every report implement this kind of feature. Must be a valid json string.
AdvancedStreamOptions *string `json:"advanced_stream_options,omitempty"`
- AuthType *SourceAmazonSellerPartnerUpdateAuthType `json:"auth_type,omitempty"`
- // Specifies the AWS access key used as part of the credentials to authenticate the user.
- AwsAccessKey *string `json:"aws_access_key,omitempty"`
+ authType *SourceAmazonSellerPartnerUpdateAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// Select the AWS Environment.
- AwsEnvironment SourceAmazonSellerPartnerUpdateAWSEnvironment `json:"aws_environment"`
- // Specifies the AWS secret key used as part of the credentials to authenticate the user.
- AwsSecretKey *string `json:"aws_secret_key,omitempty"`
+ AwsEnvironment *AWSEnvironment `default:"PRODUCTION" json:"aws_environment"`
// Your Login with Amazon Client ID.
LwaAppID string `json:"lwa_app_id"`
// Your Login with Amazon Client Secret.
LwaClientSecret string `json:"lwa_client_secret"`
- // Sometimes report can take up to 30 minutes to generate. This will set the limit for how long to wait for a successful report.
- MaxWaitSeconds *int64 `json:"max_wait_seconds,omitempty"`
// Will be used for stream slicing for initial full_refresh sync when no updated state is present for reports that support sliced incremental sync.
- PeriodInDays *int64 `json:"period_in_days,omitempty"`
+ PeriodInDays *int64 `default:"90" json:"period_in_days"`
// The Refresh Token obtained via OAuth flow authorization.
RefreshToken string `json:"refresh_token"`
// Select the AWS Region.
- Region SourceAmazonSellerPartnerUpdateAWSRegion `json:"region"`
+ Region *AWSRegion `default:"US" json:"region"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.
- ReplicationEndDate *string `json:"replication_end_date,omitempty"`
+ ReplicationEndDate *time.Time `json:"replication_end_date,omitempty"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
- ReplicationStartDate string `json:"replication_start_date"`
+ ReplicationStartDate time.Time `json:"replication_start_date"`
// Additional information passed to reports. This varies by report type. Must be a valid json string.
ReportOptions *string `json:"report_options,omitempty"`
- // Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. (Needs permission to 'Assume Role' STS).
- RoleArn *string `json:"role_arn,omitempty"`
+}
+
+func (s SourceAmazonSellerPartnerUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAmazonSellerPartnerUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetAccountType() *AWSSellerPartnerAccountType {
+ if o == nil {
+ return nil
+ }
+ return o.AccountType
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetAdvancedStreamOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AdvancedStreamOptions
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetAuthType() *SourceAmazonSellerPartnerUpdateAuthType {
+ return SourceAmazonSellerPartnerUpdateAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetAwsEnvironment() *AWSEnvironment {
+ if o == nil {
+ return nil
+ }
+ return o.AwsEnvironment
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetLwaAppID() string {
+ if o == nil {
+ return ""
+ }
+ return o.LwaAppID
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetLwaClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.LwaClientSecret
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetPeriodInDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PeriodInDays
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetRegion() *AWSRegion {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetReplicationEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationEndDate
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetReplicationStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.ReplicationStartDate
+}
+
+func (o *SourceAmazonSellerPartnerUpdate) GetReportOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReportOptions
}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonsqs.go b/internal/sdk/pkg/models/shared/sourceamazonsqs.go
old mode 100755
new mode 100644
index 3c1ff0dd9..00f077903
--- a/internal/sdk/pkg/models/shared/sourceamazonsqs.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonsqs.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceAmazonSqsAWSRegion - AWS Region of the SQS Queue
@@ -104,27 +105,27 @@ func (e *SourceAmazonSqsAWSRegion) UnmarshalJSON(data []byte) error {
}
}
-type SourceAmazonSqsAmazonSqs string
+type AmazonSqs string
const (
- SourceAmazonSqsAmazonSqsAmazonSqs SourceAmazonSqsAmazonSqs = "amazon-sqs"
+ AmazonSqsAmazonSqs AmazonSqs = "amazon-sqs"
)
-func (e SourceAmazonSqsAmazonSqs) ToPointer() *SourceAmazonSqsAmazonSqs {
+func (e AmazonSqs) ToPointer() *AmazonSqs {
return &e
}
-func (e *SourceAmazonSqsAmazonSqs) UnmarshalJSON(data []byte) error {
+func (e *AmazonSqs) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "amazon-sqs":
- *e = SourceAmazonSqsAmazonSqs(v)
+ *e = AmazonSqs(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmazonSqsAmazonSqs: %v", v)
+ return fmt.Errorf("invalid value for AmazonSqs: %v", v)
}
}
@@ -134,7 +135,7 @@ type SourceAmazonSqs struct {
// Comma separated list of Mesage Attribute names to return
AttributesToReturn *string `json:"attributes_to_return,omitempty"`
// If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail.
- DeleteMessages bool `json:"delete_messages"`
+ DeleteMessages *bool `default:"false" json:"delete_messages"`
// Max amount of messages to get in one batch (10 max)
MaxBatchSize *int64 `json:"max_batch_size,omitempty"`
// Max amount of time in seconds to wait for messages in a single poll (20 max)
@@ -144,8 +145,86 @@ type SourceAmazonSqs struct {
// AWS Region of the SQS Queue
Region SourceAmazonSqsAWSRegion `json:"region"`
// The Secret Key of the AWS IAM Role to use for pulling messages
- SecretKey *string `json:"secret_key,omitempty"`
- SourceType SourceAmazonSqsAmazonSqs `json:"sourceType"`
+ SecretKey *string `json:"secret_key,omitempty"`
+ sourceType AmazonSqs `const:"amazon-sqs" json:"sourceType"`
// Modify the Visibility Timeout of the individual message from the Queue's default (seconds).
VisibilityTimeout *int64 `json:"visibility_timeout,omitempty"`
}
+
+func (s SourceAmazonSqs) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAmazonSqs) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAmazonSqs) GetAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessKey
+}
+
+func (o *SourceAmazonSqs) GetAttributesToReturn() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AttributesToReturn
+}
+
+func (o *SourceAmazonSqs) GetDeleteMessages() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DeleteMessages
+}
+
+func (o *SourceAmazonSqs) GetMaxBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxBatchSize
+}
+
+func (o *SourceAmazonSqs) GetMaxWaitTime() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxWaitTime
+}
+
+func (o *SourceAmazonSqs) GetQueueURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.QueueURL
+}
+
+func (o *SourceAmazonSqs) GetRegion() SourceAmazonSqsAWSRegion {
+ if o == nil {
+ return SourceAmazonSqsAWSRegion("")
+ }
+ return o.Region
+}
+
+func (o *SourceAmazonSqs) GetSecretKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretKey
+}
+
+func (o *SourceAmazonSqs) GetSourceType() AmazonSqs {
+ return AmazonSqsAmazonSqs
+}
+
+func (o *SourceAmazonSqs) GetVisibilityTimeout() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.VisibilityTimeout
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonsqscreaterequest.go b/internal/sdk/pkg/models/shared/sourceamazonsqscreaterequest.go
old mode 100755
new mode 100644
index 94ddf5ad1..f3f1e1544
--- a/internal/sdk/pkg/models/shared/sourceamazonsqscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonsqscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAmazonSqsCreateRequest struct {
Configuration SourceAmazonSqs `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAmazonSqsCreateRequest) GetConfiguration() SourceAmazonSqs {
+ if o == nil {
+ return SourceAmazonSqs{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAmazonSqsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAmazonSqsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAmazonSqsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAmazonSqsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonsqsputrequest.go b/internal/sdk/pkg/models/shared/sourceamazonsqsputrequest.go
old mode 100755
new mode 100644
index cfa0a8568..acbbe8961
--- a/internal/sdk/pkg/models/shared/sourceamazonsqsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonsqsputrequest.go
@@ -7,3 +7,24 @@ type SourceAmazonSqsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAmazonSqsPutRequest) GetConfiguration() SourceAmazonSqsUpdate {
+ if o == nil {
+ return SourceAmazonSqsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAmazonSqsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAmazonSqsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamazonsqsupdate.go b/internal/sdk/pkg/models/shared/sourceamazonsqsupdate.go
old mode 100755
new mode 100644
index b1f526441..54c0ade10
--- a/internal/sdk/pkg/models/shared/sourceamazonsqsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceamazonsqsupdate.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceAmazonSqsUpdateAWSRegion - AWS Region of the SQS Queue
@@ -110,7 +111,7 @@ type SourceAmazonSqsUpdate struct {
// Comma separated list of Mesage Attribute names to return
AttributesToReturn *string `json:"attributes_to_return,omitempty"`
// If Enabled, messages will be deleted from the SQS Queue after being read. If Disabled, messages are left in the queue and can be read more than once. WARNING: Enabling this option can result in data loss in cases of failure, use with caution, see documentation for more detail.
- DeleteMessages bool `json:"delete_messages"`
+ DeleteMessages *bool `default:"false" json:"delete_messages"`
// Max amount of messages to get in one batch (10 max)
MaxBatchSize *int64 `json:"max_batch_size,omitempty"`
// Max amount of time in seconds to wait for messages in a single poll (20 max)
@@ -124,3 +125,77 @@ type SourceAmazonSqsUpdate struct {
// Modify the Visibility Timeout of the individual message from the Queue's default (seconds).
VisibilityTimeout *int64 `json:"visibility_timeout,omitempty"`
}
+
+func (s SourceAmazonSqsUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAmazonSqsUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAmazonSqsUpdate) GetAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessKey
+}
+
+func (o *SourceAmazonSqsUpdate) GetAttributesToReturn() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AttributesToReturn
+}
+
+func (o *SourceAmazonSqsUpdate) GetDeleteMessages() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DeleteMessages
+}
+
+func (o *SourceAmazonSqsUpdate) GetMaxBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxBatchSize
+}
+
+func (o *SourceAmazonSqsUpdate) GetMaxWaitTime() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxWaitTime
+}
+
+func (o *SourceAmazonSqsUpdate) GetQueueURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.QueueURL
+}
+
+func (o *SourceAmazonSqsUpdate) GetRegion() SourceAmazonSqsUpdateAWSRegion {
+ if o == nil {
+ return SourceAmazonSqsUpdateAWSRegion("")
+ }
+ return o.Region
+}
+
+func (o *SourceAmazonSqsUpdate) GetSecretKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretKey
+}
+
+func (o *SourceAmazonSqsUpdate) GetVisibilityTimeout() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.VisibilityTimeout
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamplitude.go b/internal/sdk/pkg/models/shared/sourceamplitude.go
old mode 100755
new mode 100644
index 983821900..f4e032ece
--- a/internal/sdk/pkg/models/shared/sourceamplitude.go
+++ b/internal/sdk/pkg/models/shared/sourceamplitude.go
@@ -5,6 +5,8 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
)
// SourceAmplitudeDataRegion - Amplitude data region server
@@ -35,27 +37,27 @@ func (e *SourceAmplitudeDataRegion) UnmarshalJSON(data []byte) error {
}
}
-type SourceAmplitudeAmplitude string
+type Amplitude string
const (
- SourceAmplitudeAmplitudeAmplitude SourceAmplitudeAmplitude = "amplitude"
+ AmplitudeAmplitude Amplitude = "amplitude"
)
-func (e SourceAmplitudeAmplitude) ToPointer() *SourceAmplitudeAmplitude {
+func (e Amplitude) ToPointer() *Amplitude {
return &e
}
-func (e *SourceAmplitudeAmplitude) UnmarshalJSON(data []byte) error {
+func (e *Amplitude) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "amplitude":
- *e = SourceAmplitudeAmplitude(v)
+ *e = Amplitude(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmplitudeAmplitude: %v", v)
+ return fmt.Errorf("invalid value for Amplitude: %v", v)
}
}
@@ -63,12 +65,62 @@ type SourceAmplitude struct {
// Amplitude API Key. See the setup guide for more information on how to obtain this key.
APIKey string `json:"api_key"`
// Amplitude data region server
- DataRegion *SourceAmplitudeDataRegion `json:"data_region,omitempty"`
+ DataRegion *SourceAmplitudeDataRegion `default:"Standard Server" json:"data_region"`
// According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours.
- RequestTimeRange *int64 `json:"request_time_range,omitempty"`
+ RequestTimeRange *int64 `default:"24" json:"request_time_range"`
// Amplitude Secret Key. See the setup guide for more information on how to obtain this key.
- SecretKey string `json:"secret_key"`
- SourceType SourceAmplitudeAmplitude `json:"sourceType"`
+ SecretKey string `json:"secret_key"`
+ sourceType Amplitude `const:"amplitude" json:"sourceType"`
// UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.
- StartDate string `json:"start_date"`
+ StartDate time.Time `json:"start_date"`
+}
+
+func (s SourceAmplitude) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAmplitude) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAmplitude) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceAmplitude) GetDataRegion() *SourceAmplitudeDataRegion {
+ if o == nil {
+ return nil
+ }
+ return o.DataRegion
+}
+
+func (o *SourceAmplitude) GetRequestTimeRange() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RequestTimeRange
+}
+
+func (o *SourceAmplitude) GetSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretKey
+}
+
+func (o *SourceAmplitude) GetSourceType() Amplitude {
+ return AmplitudeAmplitude
+}
+
+func (o *SourceAmplitude) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceamplitudecreaterequest.go b/internal/sdk/pkg/models/shared/sourceamplitudecreaterequest.go
old mode 100755
new mode 100644
index df0678050..c92b90914
--- a/internal/sdk/pkg/models/shared/sourceamplitudecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceamplitudecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAmplitudeCreateRequest struct {
Configuration SourceAmplitude `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAmplitudeCreateRequest) GetConfiguration() SourceAmplitude {
+ if o == nil {
+ return SourceAmplitude{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAmplitudeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAmplitudeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAmplitudeCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAmplitudeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamplitudeputrequest.go b/internal/sdk/pkg/models/shared/sourceamplitudeputrequest.go
old mode 100755
new mode 100644
index 82d9b960d..c7bcd7c34
--- a/internal/sdk/pkg/models/shared/sourceamplitudeputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceamplitudeputrequest.go
@@ -7,3 +7,24 @@ type SourceAmplitudePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAmplitudePutRequest) GetConfiguration() SourceAmplitudeUpdate {
+ if o == nil {
+ return SourceAmplitudeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAmplitudePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAmplitudePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceamplitudeupdate.go b/internal/sdk/pkg/models/shared/sourceamplitudeupdate.go
old mode 100755
new mode 100644
index 300712106..2ec6a8916
--- a/internal/sdk/pkg/models/shared/sourceamplitudeupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceamplitudeupdate.go
@@ -5,21 +5,23 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
)
-// SourceAmplitudeUpdateDataRegion - Amplitude data region server
-type SourceAmplitudeUpdateDataRegion string
+// DataRegion - Amplitude data region server
+type DataRegion string
const (
- SourceAmplitudeUpdateDataRegionStandardServer SourceAmplitudeUpdateDataRegion = "Standard Server"
- SourceAmplitudeUpdateDataRegionEuResidencyServer SourceAmplitudeUpdateDataRegion = "EU Residency Server"
+ DataRegionStandardServer DataRegion = "Standard Server"
+ DataRegionEuResidencyServer DataRegion = "EU Residency Server"
)
-func (e SourceAmplitudeUpdateDataRegion) ToPointer() *SourceAmplitudeUpdateDataRegion {
+func (e DataRegion) ToPointer() *DataRegion {
return &e
}
-func (e *SourceAmplitudeUpdateDataRegion) UnmarshalJSON(data []byte) error {
+func (e *DataRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -28,10 +30,10 @@ func (e *SourceAmplitudeUpdateDataRegion) UnmarshalJSON(data []byte) error {
case "Standard Server":
fallthrough
case "EU Residency Server":
- *e = SourceAmplitudeUpdateDataRegion(v)
+ *e = DataRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAmplitudeUpdateDataRegion: %v", v)
+ return fmt.Errorf("invalid value for DataRegion: %v", v)
}
}
@@ -39,11 +41,57 @@ type SourceAmplitudeUpdate struct {
// Amplitude API Key. See the setup guide for more information on how to obtain this key.
APIKey string `json:"api_key"`
// Amplitude data region server
- DataRegion *SourceAmplitudeUpdateDataRegion `json:"data_region,omitempty"`
+ DataRegion *DataRegion `default:"Standard Server" json:"data_region"`
// According to Considerations too big time range in request can cause a timeout error. In this case, set shorter time interval in hours.
- RequestTimeRange *int64 `json:"request_time_range,omitempty"`
+ RequestTimeRange *int64 `default:"24" json:"request_time_range"`
// Amplitude Secret Key. See the setup guide for more information on how to obtain this key.
SecretKey string `json:"secret_key"`
// UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.
- StartDate string `json:"start_date"`
+ StartDate time.Time `json:"start_date"`
+}
+
+func (s SourceAmplitudeUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAmplitudeUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAmplitudeUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceAmplitudeUpdate) GetDataRegion() *DataRegion {
+ if o == nil {
+ return nil
+ }
+ return o.DataRegion
+}
+
+func (o *SourceAmplitudeUpdate) GetRequestTimeRange() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RequestTimeRange
+}
+
+func (o *SourceAmplitudeUpdate) GetSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretKey
+}
+
+func (o *SourceAmplitudeUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceapifydataset.go b/internal/sdk/pkg/models/shared/sourceapifydataset.go
old mode 100755
new mode 100644
index ac5137d45..cd13f3c8b
--- a/internal/sdk/pkg/models/shared/sourceapifydataset.go
+++ b/internal/sdk/pkg/models/shared/sourceapifydataset.go
@@ -5,38 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceApifyDatasetApifyDataset string
+type ApifyDataset string
const (
- SourceApifyDatasetApifyDatasetApifyDataset SourceApifyDatasetApifyDataset = "apify-dataset"
+ ApifyDatasetApifyDataset ApifyDataset = "apify-dataset"
)
-func (e SourceApifyDatasetApifyDataset) ToPointer() *SourceApifyDatasetApifyDataset {
+func (e ApifyDataset) ToPointer() *ApifyDataset {
return &e
}
-func (e *SourceApifyDatasetApifyDataset) UnmarshalJSON(data []byte) error {
+func (e *ApifyDataset) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "apify-dataset":
- *e = SourceApifyDatasetApifyDataset(v)
+ *e = ApifyDataset(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceApifyDatasetApifyDataset: %v", v)
+ return fmt.Errorf("invalid value for ApifyDataset: %v", v)
}
}
type SourceApifyDataset struct {
- // If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false.
- Clean *bool `json:"clean,omitempty"`
- // ID of the dataset you would like to load to Airbyte.
- DatasetID *string `json:"datasetId,omitempty"`
- SourceType SourceApifyDatasetApifyDataset `json:"sourceType"`
- // Your application's Client Secret. You can find this value on the console integrations tab after you login.
+ // ID of the dataset you would like to load to Airbyte. In Apify Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs for more information.
+ DatasetID string `json:"dataset_id"`
+ sourceType ApifyDataset `const:"apify-dataset" json:"sourceType"`
+ // Personal API token of your Apify account. In Apify Console, you can find your API token in the Settings section under the Integrations tab after you login. See the Apify Docs for more information.
Token string `json:"token"`
}
+
+func (s SourceApifyDataset) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceApifyDataset) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceApifyDataset) GetDatasetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatasetID
+}
+
+func (o *SourceApifyDataset) GetSourceType() ApifyDataset {
+ return ApifyDatasetApifyDataset
+}
+
+func (o *SourceApifyDataset) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourceapifydatasetcreaterequest.go b/internal/sdk/pkg/models/shared/sourceapifydatasetcreaterequest.go
old mode 100755
new mode 100644
index 07a166215..726a3e549
--- a/internal/sdk/pkg/models/shared/sourceapifydatasetcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceapifydatasetcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceApifyDatasetCreateRequest struct {
Configuration SourceApifyDataset `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceApifyDatasetCreateRequest) GetConfiguration() SourceApifyDataset {
+ if o == nil {
+ return SourceApifyDataset{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceApifyDatasetCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceApifyDatasetCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceApifyDatasetCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceApifyDatasetCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceapifydatasetputrequest.go b/internal/sdk/pkg/models/shared/sourceapifydatasetputrequest.go
old mode 100755
new mode 100644
index c6e66d240..2309617a7
--- a/internal/sdk/pkg/models/shared/sourceapifydatasetputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceapifydatasetputrequest.go
@@ -7,3 +7,24 @@ type SourceApifyDatasetPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceApifyDatasetPutRequest) GetConfiguration() SourceApifyDatasetUpdate {
+ if o == nil {
+ return SourceApifyDatasetUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceApifyDatasetPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceApifyDatasetPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceapifydatasetupdate.go b/internal/sdk/pkg/models/shared/sourceapifydatasetupdate.go
old mode 100755
new mode 100644
index 750eddd04..75cd252ab
--- a/internal/sdk/pkg/models/shared/sourceapifydatasetupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceapifydatasetupdate.go
@@ -3,10 +3,22 @@
package shared
type SourceApifyDatasetUpdate struct {
- // If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false.
- Clean *bool `json:"clean,omitempty"`
- // ID of the dataset you would like to load to Airbyte.
- DatasetID *string `json:"datasetId,omitempty"`
- // Your application's Client Secret. You can find this value on the console integrations tab after you login.
+ // ID of the dataset you would like to load to Airbyte. In Apify Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs for more information.
+ DatasetID string `json:"dataset_id"`
+ // Personal API token of your Apify account. In Apify Console, you can find your API token in the Settings section under the Integrations tab after you login. See the Apify Docs for more information.
Token string `json:"token"`
}
+
+func (o *SourceApifyDatasetUpdate) GetDatasetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatasetID
+}
+
+func (o *SourceApifyDatasetUpdate) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourceappfollow.go b/internal/sdk/pkg/models/shared/sourceappfollow.go
old mode 100755
new mode 100644
index e5fd9602e..584f59009
--- a/internal/sdk/pkg/models/shared/sourceappfollow.go
+++ b/internal/sdk/pkg/models/shared/sourceappfollow.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceAppfollowAppfollow string
+type Appfollow string
const (
- SourceAppfollowAppfollowAppfollow SourceAppfollowAppfollow = "appfollow"
+ AppfollowAppfollow Appfollow = "appfollow"
)
-func (e SourceAppfollowAppfollow) ToPointer() *SourceAppfollowAppfollow {
+func (e Appfollow) ToPointer() *Appfollow {
return &e
}
-func (e *SourceAppfollowAppfollow) UnmarshalJSON(data []byte) error {
+func (e *Appfollow) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "appfollow":
- *e = SourceAppfollowAppfollow(v)
+ *e = Appfollow(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAppfollowAppfollow: %v", v)
+ return fmt.Errorf("invalid value for Appfollow: %v", v)
}
}
type SourceAppfollow struct {
// API Key provided by Appfollow
- APISecret *string `json:"api_secret,omitempty"`
- SourceType SourceAppfollowAppfollow `json:"sourceType"`
+ APISecret *string `json:"api_secret,omitempty"`
+ sourceType Appfollow `const:"appfollow" json:"sourceType"`
+}
+
+func (s SourceAppfollow) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAppfollow) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAppfollow) GetAPISecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APISecret
+}
+
+func (o *SourceAppfollow) GetSourceType() Appfollow {
+ return AppfollowAppfollow
}
diff --git a/internal/sdk/pkg/models/shared/sourceappfollowcreaterequest.go b/internal/sdk/pkg/models/shared/sourceappfollowcreaterequest.go
old mode 100755
new mode 100644
index 226e71d8f..652894300
--- a/internal/sdk/pkg/models/shared/sourceappfollowcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceappfollowcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAppfollowCreateRequest struct {
Configuration SourceAppfollow `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAppfollowCreateRequest) GetConfiguration() SourceAppfollow {
+ if o == nil {
+ return SourceAppfollow{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAppfollowCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAppfollowCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAppfollowCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAppfollowCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceappfollowputrequest.go b/internal/sdk/pkg/models/shared/sourceappfollowputrequest.go
old mode 100755
new mode 100644
index 926e11394..9d8b57a69
--- a/internal/sdk/pkg/models/shared/sourceappfollowputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceappfollowputrequest.go
@@ -7,3 +7,24 @@ type SourceAppfollowPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAppfollowPutRequest) GetConfiguration() SourceAppfollowUpdate {
+ if o == nil {
+ return SourceAppfollowUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAppfollowPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAppfollowPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceappfollowupdate.go b/internal/sdk/pkg/models/shared/sourceappfollowupdate.go
old mode 100755
new mode 100644
index 3808d89c3..baeab5ebb
--- a/internal/sdk/pkg/models/shared/sourceappfollowupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceappfollowupdate.go
@@ -6,3 +6,10 @@ type SourceAppfollowUpdate struct {
// API Key provided by Appfollow
APISecret *string `json:"api_secret,omitempty"`
}
+
+func (o *SourceAppfollowUpdate) GetAPISecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APISecret
+}
diff --git a/internal/sdk/pkg/models/shared/sourceasana.go b/internal/sdk/pkg/models/shared/sourceasana.go
old mode 100755
new mode 100644
index ced0f41a3..4f48e395b
--- a/internal/sdk/pkg/models/shared/sourceasana.go
+++ b/internal/sdk/pkg/models/shared/sourceasana.go
@@ -3,129 +3,182 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle - PAT Credentials
-type SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle string
+// SourceAsanaSchemasCredentialsTitle - PAT Credentials
+type SourceAsanaSchemasCredentialsTitle string
const (
- SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitlePatCredentials SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle = "PAT Credentials"
+ SourceAsanaSchemasCredentialsTitlePatCredentials SourceAsanaSchemasCredentialsTitle = "PAT Credentials"
)
-func (e SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle) ToPointer() *SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle {
+func (e SourceAsanaSchemasCredentialsTitle) ToPointer() *SourceAsanaSchemasCredentialsTitle {
return &e
}
-func (e *SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceAsanaSchemasCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "PAT Credentials":
- *e = SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle(v)
+ *e = SourceAsanaSchemasCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceAsanaSchemasCredentialsTitle: %v", v)
}
}
-// SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken - Choose how to authenticate to Github
-type SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken struct {
+// SourceAsanaAuthenticateWithPersonalAccessToken - Choose how to authenticate to Github
+type SourceAsanaAuthenticateWithPersonalAccessToken struct {
// PAT Credentials
- OptionTitle *SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle `json:"option_title,omitempty"`
+ optionTitle *SourceAsanaSchemasCredentialsTitle `const:"PAT Credentials" json:"option_title,omitempty"`
// Asana Personal Access Token (generate yours here).
PersonalAccessToken string `json:"personal_access_token"`
}
-// SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle - OAuth Credentials
-type SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle string
+func (s SourceAsanaAuthenticateWithPersonalAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAsanaAuthenticateWithPersonalAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAsanaAuthenticateWithPersonalAccessToken) GetOptionTitle() *SourceAsanaSchemasCredentialsTitle {
+ return SourceAsanaSchemasCredentialsTitlePatCredentials.ToPointer()
+}
+
+func (o *SourceAsanaAuthenticateWithPersonalAccessToken) GetPersonalAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.PersonalAccessToken
+}
+
+// SourceAsanaCredentialsTitle - OAuth Credentials
+type SourceAsanaCredentialsTitle string
const (
- SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitleOAuthCredentials SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle = "OAuth Credentials"
+ SourceAsanaCredentialsTitleOAuthCredentials SourceAsanaCredentialsTitle = "OAuth Credentials"
)
-func (e SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle) ToPointer() *SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle {
+func (e SourceAsanaCredentialsTitle) ToPointer() *SourceAsanaCredentialsTitle {
return &e
}
-func (e *SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceAsanaCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth Credentials":
- *e = SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle(v)
+ *e = SourceAsanaCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceAsanaCredentialsTitle: %v", v)
}
}
-// SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth - Choose how to authenticate to Github
-type SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth struct {
+// SourceAsanaAuthenticateViaAsanaOauth - Choose how to authenticate to Github
+type SourceAsanaAuthenticateViaAsanaOauth struct {
ClientID string `json:"client_id"`
ClientSecret string `json:"client_secret"`
// OAuth Credentials
- OptionTitle *SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle `json:"option_title,omitempty"`
- RefreshToken string `json:"refresh_token"`
+ optionTitle *SourceAsanaCredentialsTitle `const:"OAuth Credentials" json:"option_title,omitempty"`
+ RefreshToken string `json:"refresh_token"`
+}
+
+func (s SourceAsanaAuthenticateViaAsanaOauth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAsanaAuthenticateViaAsanaOauth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAsanaAuthenticateViaAsanaOauth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceAsanaAuthenticateViaAsanaOauth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceAsanaAuthenticateViaAsanaOauth) GetOptionTitle() *SourceAsanaCredentialsTitle {
+ return SourceAsanaCredentialsTitleOAuthCredentials.ToPointer()
+}
+
+func (o *SourceAsanaAuthenticateViaAsanaOauth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
}
type SourceAsanaAuthenticationMechanismType string
const (
- SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth SourceAsanaAuthenticationMechanismType = "source-asana_Authentication mechanism_Authenticate via Asana (Oauth)"
- SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceAsanaAuthenticationMechanismType = "source-asana_Authentication mechanism_Authenticate with Personal Access Token"
+ SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticateViaAsanaOauth SourceAsanaAuthenticationMechanismType = "source-asana_Authenticate via Asana (Oauth)"
+ SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticateWithPersonalAccessToken SourceAsanaAuthenticationMechanismType = "source-asana_Authenticate with Personal Access Token"
)
type SourceAsanaAuthenticationMechanism struct {
- SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth *SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth
- SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken
+ SourceAsanaAuthenticateViaAsanaOauth *SourceAsanaAuthenticateViaAsanaOauth
+ SourceAsanaAuthenticateWithPersonalAccessToken *SourceAsanaAuthenticateWithPersonalAccessToken
Type SourceAsanaAuthenticationMechanismType
}
-func CreateSourceAsanaAuthenticationMechanismSourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth(sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth) SourceAsanaAuthenticationMechanism {
- typ := SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth
+func CreateSourceAsanaAuthenticationMechanismSourceAsanaAuthenticateViaAsanaOauth(sourceAsanaAuthenticateViaAsanaOauth SourceAsanaAuthenticateViaAsanaOauth) SourceAsanaAuthenticationMechanism {
+ typ := SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticateViaAsanaOauth
return SourceAsanaAuthenticationMechanism{
- SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth: &sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth,
- Type: typ,
+ SourceAsanaAuthenticateViaAsanaOauth: &sourceAsanaAuthenticateViaAsanaOauth,
+ Type: typ,
}
}
-func CreateSourceAsanaAuthenticationMechanismSourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken(sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken) SourceAsanaAuthenticationMechanism {
- typ := SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken
+func CreateSourceAsanaAuthenticationMechanismSourceAsanaAuthenticateWithPersonalAccessToken(sourceAsanaAuthenticateWithPersonalAccessToken SourceAsanaAuthenticateWithPersonalAccessToken) SourceAsanaAuthenticationMechanism {
+ typ := SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticateWithPersonalAccessToken
return SourceAsanaAuthenticationMechanism{
- SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken: &sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken,
+ SourceAsanaAuthenticateWithPersonalAccessToken: &sourceAsanaAuthenticateWithPersonalAccessToken,
Type: typ,
}
}
func (u *SourceAsanaAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil {
- u.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken
- u.Type = SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken
+
+ sourceAsanaAuthenticateWithPersonalAccessToken := new(SourceAsanaAuthenticateWithPersonalAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceAsanaAuthenticateWithPersonalAccessToken, "", true, true); err == nil {
+ u.SourceAsanaAuthenticateWithPersonalAccessToken = sourceAsanaAuthenticateWithPersonalAccessToken
+ u.Type = SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticateWithPersonalAccessToken
return nil
}
- sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth := new(SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth); err == nil {
- u.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth = sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth
- u.Type = SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth
+ sourceAsanaAuthenticateViaAsanaOauth := new(SourceAsanaAuthenticateViaAsanaOauth)
+ if err := utils.UnmarshalJSON(data, &sourceAsanaAuthenticateViaAsanaOauth, "", true, true); err == nil {
+ u.SourceAsanaAuthenticateViaAsanaOauth = sourceAsanaAuthenticateViaAsanaOauth
+ u.Type = SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticateViaAsanaOauth
return nil
}
@@ -133,43 +186,83 @@ func (u *SourceAsanaAuthenticationMechanism) UnmarshalJSON(data []byte) error {
}
func (u SourceAsanaAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
- return json.Marshal(u.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken)
+ if u.SourceAsanaAuthenticateViaAsanaOauth != nil {
+ return utils.MarshalJSON(u.SourceAsanaAuthenticateViaAsanaOauth, "", true)
}
- if u.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth != nil {
- return json.Marshal(u.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth)
+ if u.SourceAsanaAuthenticateWithPersonalAccessToken != nil {
+ return utils.MarshalJSON(u.SourceAsanaAuthenticateWithPersonalAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceAsanaAsana string
+type Asana string
const (
- SourceAsanaAsanaAsana SourceAsanaAsana = "asana"
+ AsanaAsana Asana = "asana"
)
-func (e SourceAsanaAsana) ToPointer() *SourceAsanaAsana {
+func (e Asana) ToPointer() *Asana {
return &e
}
-func (e *SourceAsanaAsana) UnmarshalJSON(data []byte) error {
+func (e *Asana) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "asana":
- *e = SourceAsanaAsana(v)
+ *e = Asana(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAsanaAsana: %v", v)
+ return fmt.Errorf("invalid value for Asana: %v", v)
}
}
type SourceAsana struct {
// Choose how to authenticate to Github
Credentials *SourceAsanaAuthenticationMechanism `json:"credentials,omitempty"`
- SourceType *SourceAsanaAsana `json:"sourceType,omitempty"`
+ // Globally unique identifiers for the organization exports
+ OrganizationExportIds []interface{} `json:"organization_export_ids,omitempty"`
+ sourceType *Asana `const:"asana" json:"sourceType,omitempty"`
+ // This flag is used for testing purposes for certain streams that return a lot of data. This flag is not meant to be enabled for prod.
+ TestMode *bool `json:"test_mode,omitempty"`
+}
+
+func (s SourceAsana) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAsana) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAsana) GetCredentials() *SourceAsanaAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceAsana) GetOrganizationExportIds() []interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.OrganizationExportIds
+}
+
+func (o *SourceAsana) GetSourceType() *Asana {
+ return AsanaAsana.ToPointer()
+}
+
+func (o *SourceAsana) GetTestMode() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.TestMode
}
diff --git a/internal/sdk/pkg/models/shared/sourceasanacreaterequest.go b/internal/sdk/pkg/models/shared/sourceasanacreaterequest.go
old mode 100755
new mode 100644
index d209f9578..f64451beb
--- a/internal/sdk/pkg/models/shared/sourceasanacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceasanacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAsanaCreateRequest struct {
Configuration SourceAsana `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAsanaCreateRequest) GetConfiguration() SourceAsana {
+ if o == nil {
+ return SourceAsana{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAsanaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAsanaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAsanaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAsanaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceasanaputrequest.go b/internal/sdk/pkg/models/shared/sourceasanaputrequest.go
old mode 100755
new mode 100644
index 150b49d1f..9b041f7e1
--- a/internal/sdk/pkg/models/shared/sourceasanaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceasanaputrequest.go
@@ -7,3 +7,24 @@ type SourceAsanaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAsanaPutRequest) GetConfiguration() SourceAsanaUpdate {
+ if o == nil {
+ return SourceAsanaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAsanaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAsanaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceasanaupdate.go b/internal/sdk/pkg/models/shared/sourceasanaupdate.go
old mode 100755
new mode 100644
index 0ef1dc6b2..3daa7e93f
--- a/internal/sdk/pkg/models/shared/sourceasanaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceasanaupdate.go
@@ -3,148 +3,226 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle - PAT Credentials
-type SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle string
+// SourceAsanaUpdateSchemasCredentialsTitle - PAT Credentials
+type SourceAsanaUpdateSchemasCredentialsTitle string
const (
- SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitlePatCredentials SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle = "PAT Credentials"
+ SourceAsanaUpdateSchemasCredentialsTitlePatCredentials SourceAsanaUpdateSchemasCredentialsTitle = "PAT Credentials"
)
-func (e SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle) ToPointer() *SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle {
+func (e SourceAsanaUpdateSchemasCredentialsTitle) ToPointer() *SourceAsanaUpdateSchemasCredentialsTitle {
return &e
}
-func (e *SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceAsanaUpdateSchemasCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "PAT Credentials":
- *e = SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle(v)
+ *e = SourceAsanaUpdateSchemasCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceAsanaUpdateSchemasCredentialsTitle: %v", v)
}
}
-// SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken - Choose how to authenticate to Github
-type SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken struct {
+// AuthenticateWithPersonalAccessToken - Choose how to authenticate to Github
+type AuthenticateWithPersonalAccessToken struct {
// PAT Credentials
- OptionTitle *SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenCredentialsTitle `json:"option_title,omitempty"`
+ optionTitle *SourceAsanaUpdateSchemasCredentialsTitle `const:"PAT Credentials" json:"option_title,omitempty"`
// Asana Personal Access Token (generate yours here).
PersonalAccessToken string `json:"personal_access_token"`
}
-// SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle - OAuth Credentials
-type SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle string
+func (a AuthenticateWithPersonalAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AuthenticateWithPersonalAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AuthenticateWithPersonalAccessToken) GetOptionTitle() *SourceAsanaUpdateSchemasCredentialsTitle {
+ return SourceAsanaUpdateSchemasCredentialsTitlePatCredentials.ToPointer()
+}
+
+func (o *AuthenticateWithPersonalAccessToken) GetPersonalAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.PersonalAccessToken
+}
+
+// SourceAsanaUpdateCredentialsTitle - OAuth Credentials
+type SourceAsanaUpdateCredentialsTitle string
const (
- SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitleOAuthCredentials SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle = "OAuth Credentials"
+ SourceAsanaUpdateCredentialsTitleOAuthCredentials SourceAsanaUpdateCredentialsTitle = "OAuth Credentials"
)
-func (e SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle) ToPointer() *SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle {
+func (e SourceAsanaUpdateCredentialsTitle) ToPointer() *SourceAsanaUpdateCredentialsTitle {
return &e
}
-func (e *SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceAsanaUpdateCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth Credentials":
- *e = SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle(v)
+ *e = SourceAsanaUpdateCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceAsanaUpdateCredentialsTitle: %v", v)
}
}
-// SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth - Choose how to authenticate to Github
-type SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth struct {
+// AuthenticateViaAsanaOauth - Choose how to authenticate to Github
+type AuthenticateViaAsanaOauth struct {
ClientID string `json:"client_id"`
ClientSecret string `json:"client_secret"`
// OAuth Credentials
- OptionTitle *SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauthCredentialsTitle `json:"option_title,omitempty"`
- RefreshToken string `json:"refresh_token"`
+ optionTitle *SourceAsanaUpdateCredentialsTitle `const:"OAuth Credentials" json:"option_title,omitempty"`
+ RefreshToken string `json:"refresh_token"`
+}
+
+func (a AuthenticateViaAsanaOauth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AuthenticateViaAsanaOauth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AuthenticateViaAsanaOauth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
}
-type SourceAsanaUpdateAuthenticationMechanismType string
+func (o *AuthenticateViaAsanaOauth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *AuthenticateViaAsanaOauth) GetOptionTitle() *SourceAsanaUpdateCredentialsTitle {
+ return SourceAsanaUpdateCredentialsTitleOAuthCredentials.ToPointer()
+}
+
+func (o *AuthenticateViaAsanaOauth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+type AuthenticationMechanismType string
const (
- SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth SourceAsanaUpdateAuthenticationMechanismType = "source-asana-update_Authentication mechanism_Authenticate via Asana (Oauth)"
- SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceAsanaUpdateAuthenticationMechanismType = "source-asana-update_Authentication mechanism_Authenticate with Personal Access Token"
+ AuthenticationMechanismTypeAuthenticateViaAsanaOauth AuthenticationMechanismType = "Authenticate via Asana (Oauth)"
+ AuthenticationMechanismTypeAuthenticateWithPersonalAccessToken AuthenticationMechanismType = "Authenticate with Personal Access Token"
)
-type SourceAsanaUpdateAuthenticationMechanism struct {
- SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth *SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth
- SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
+type AuthenticationMechanism struct {
+ AuthenticateViaAsanaOauth *AuthenticateViaAsanaOauth
+ AuthenticateWithPersonalAccessToken *AuthenticateWithPersonalAccessToken
- Type SourceAsanaUpdateAuthenticationMechanismType
+ Type AuthenticationMechanismType
}
-func CreateSourceAsanaUpdateAuthenticationMechanismSourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth(sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth) SourceAsanaUpdateAuthenticationMechanism {
- typ := SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth
+func CreateAuthenticationMechanismAuthenticateViaAsanaOauth(authenticateViaAsanaOauth AuthenticateViaAsanaOauth) AuthenticationMechanism {
+ typ := AuthenticationMechanismTypeAuthenticateViaAsanaOauth
- return SourceAsanaUpdateAuthenticationMechanism{
- SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth: &sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth,
- Type: typ,
+ return AuthenticationMechanism{
+ AuthenticateViaAsanaOauth: &authenticateViaAsanaOauth,
+ Type: typ,
}
}
-func CreateSourceAsanaUpdateAuthenticationMechanismSourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken(sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) SourceAsanaUpdateAuthenticationMechanism {
- typ := SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
+func CreateAuthenticationMechanismAuthenticateWithPersonalAccessToken(authenticateWithPersonalAccessToken AuthenticateWithPersonalAccessToken) AuthenticationMechanism {
+ typ := AuthenticationMechanismTypeAuthenticateWithPersonalAccessToken
- return SourceAsanaUpdateAuthenticationMechanism{
- SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken: &sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken,
- Type: typ,
+ return AuthenticationMechanism{
+ AuthenticateWithPersonalAccessToken: &authenticateWithPersonalAccessToken,
+ Type: typ,
}
}
-func (u *SourceAsanaUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *AuthenticationMechanism) UnmarshalJSON(data []byte) error {
- sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil {
- u.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
- u.Type = SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
+ authenticateWithPersonalAccessToken := new(AuthenticateWithPersonalAccessToken)
+ if err := utils.UnmarshalJSON(data, &authenticateWithPersonalAccessToken, "", true, true); err == nil {
+ u.AuthenticateWithPersonalAccessToken = authenticateWithPersonalAccessToken
+ u.Type = AuthenticationMechanismTypeAuthenticateWithPersonalAccessToken
return nil
}
- sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth := new(SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth); err == nil {
- u.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth = sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth
- u.Type = SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth
+ authenticateViaAsanaOauth := new(AuthenticateViaAsanaOauth)
+ if err := utils.UnmarshalJSON(data, &authenticateViaAsanaOauth, "", true, true); err == nil {
+ u.AuthenticateViaAsanaOauth = authenticateViaAsanaOauth
+ u.Type = AuthenticationMechanismTypeAuthenticateViaAsanaOauth
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceAsanaUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
- return json.Marshal(u.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken)
+func (u AuthenticationMechanism) MarshalJSON() ([]byte, error) {
+ if u.AuthenticateViaAsanaOauth != nil {
+ return utils.MarshalJSON(u.AuthenticateViaAsanaOauth, "", true)
}
- if u.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth != nil {
- return json.Marshal(u.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth)
+ if u.AuthenticateWithPersonalAccessToken != nil {
+ return utils.MarshalJSON(u.AuthenticateWithPersonalAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceAsanaUpdate struct {
// Choose how to authenticate to Github
- Credentials *SourceAsanaUpdateAuthenticationMechanism `json:"credentials,omitempty"`
+ Credentials *AuthenticationMechanism `json:"credentials,omitempty"`
+ // Globally unique identifiers for the organization exports
+ OrganizationExportIds []interface{} `json:"organization_export_ids,omitempty"`
+ // This flag is used for testing purposes for certain streams that return a lot of data. This flag is not meant to be enabled for prod.
+ TestMode *bool `json:"test_mode,omitempty"`
+}
+
+func (o *SourceAsanaUpdate) GetCredentials() *AuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceAsanaUpdate) GetOrganizationExportIds() []interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.OrganizationExportIds
+}
+
+func (o *SourceAsanaUpdate) GetTestMode() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.TestMode
}
diff --git a/internal/sdk/pkg/models/shared/sourceauth0.go b/internal/sdk/pkg/models/shared/sourceauth0.go
old mode 100755
new mode 100644
index ee9c68f65..2d491a86d
--- a/internal/sdk/pkg/models/shared/sourceauth0.go
+++ b/internal/sdk/pkg/models/shared/sourceauth0.go
@@ -3,126 +3,179 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethod string
+type SourceAuth0SchemasCredentialsAuthenticationMethod string
const (
- SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethodOauth2AccessToken SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethod = "oauth2_access_token"
+ SourceAuth0SchemasCredentialsAuthenticationMethodOauth2AccessToken SourceAuth0SchemasCredentialsAuthenticationMethod = "oauth2_access_token"
)
-func (e SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethod) ToPointer() *SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethod {
+func (e SourceAuth0SchemasCredentialsAuthenticationMethod) ToPointer() *SourceAuth0SchemasCredentialsAuthenticationMethod {
return &e
}
-func (e *SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAuth0SchemasCredentialsAuthenticationMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2_access_token":
- *e = SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethod(v)
+ *e = SourceAuth0SchemasCredentialsAuthenticationMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAuth0SchemasCredentialsAuthenticationMethod: %v", v)
}
}
-type SourceAuth0AuthenticationMethodOAuth2AccessToken struct {
+type SourceAuth0OAuth2AccessToken struct {
// Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.
- AccessToken string `json:"access_token"`
- AuthType SourceAuth0AuthenticationMethodOAuth2AccessTokenAuthenticationMethod `json:"auth_type"`
+ AccessToken string `json:"access_token"`
+ authType SourceAuth0SchemasCredentialsAuthenticationMethod `const:"oauth2_access_token" json:"auth_type"`
}
-type SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod string
+func (s SourceAuth0OAuth2AccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAuth0OAuth2AccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAuth0OAuth2AccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceAuth0OAuth2AccessToken) GetAuthType() SourceAuth0SchemasCredentialsAuthenticationMethod {
+ return SourceAuth0SchemasCredentialsAuthenticationMethodOauth2AccessToken
+}
+
+type SourceAuth0SchemasAuthenticationMethod string
const (
- SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethodOauth2ConfidentialApplication SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod = "oauth2_confidential_application"
+ SourceAuth0SchemasAuthenticationMethodOauth2ConfidentialApplication SourceAuth0SchemasAuthenticationMethod = "oauth2_confidential_application"
)
-func (e SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod) ToPointer() *SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod {
+func (e SourceAuth0SchemasAuthenticationMethod) ToPointer() *SourceAuth0SchemasAuthenticationMethod {
return &e
}
-func (e *SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAuth0SchemasAuthenticationMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2_confidential_application":
- *e = SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod(v)
+ *e = SourceAuth0SchemasAuthenticationMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAuth0SchemasAuthenticationMethod: %v", v)
}
}
-type SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication struct {
+type SourceAuth0OAuth2ConfidentialApplication struct {
// The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab
- Audience string `json:"audience"`
- AuthType SourceAuth0AuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod `json:"auth_type"`
+ Audience string `json:"audience"`
+ authType SourceAuth0SchemasAuthenticationMethod `const:"oauth2_confidential_application" json:"auth_type"`
// Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.
ClientID string `json:"client_id"`
// Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.
ClientSecret string `json:"client_secret"`
}
+func (s SourceAuth0OAuth2ConfidentialApplication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAuth0OAuth2ConfidentialApplication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAuth0OAuth2ConfidentialApplication) GetAudience() string {
+ if o == nil {
+ return ""
+ }
+ return o.Audience
+}
+
+func (o *SourceAuth0OAuth2ConfidentialApplication) GetAuthType() SourceAuth0SchemasAuthenticationMethod {
+ return SourceAuth0SchemasAuthenticationMethodOauth2ConfidentialApplication
+}
+
+func (o *SourceAuth0OAuth2ConfidentialApplication) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceAuth0OAuth2ConfidentialApplication) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
type SourceAuth0AuthenticationMethodType string
const (
- SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2ConfidentialApplication SourceAuth0AuthenticationMethodType = "source-auth0_Authentication Method_OAuth2 Confidential Application"
- SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2AccessToken SourceAuth0AuthenticationMethodType = "source-auth0_Authentication Method_OAuth2 Access Token"
+ SourceAuth0AuthenticationMethodTypeSourceAuth0OAuth2ConfidentialApplication SourceAuth0AuthenticationMethodType = "source-auth0_OAuth2 Confidential Application"
+ SourceAuth0AuthenticationMethodTypeSourceAuth0OAuth2AccessToken SourceAuth0AuthenticationMethodType = "source-auth0_OAuth2 Access Token"
)
type SourceAuth0AuthenticationMethod struct {
- SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication *SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication
- SourceAuth0AuthenticationMethodOAuth2AccessToken *SourceAuth0AuthenticationMethodOAuth2AccessToken
+ SourceAuth0OAuth2ConfidentialApplication *SourceAuth0OAuth2ConfidentialApplication
+ SourceAuth0OAuth2AccessToken *SourceAuth0OAuth2AccessToken
Type SourceAuth0AuthenticationMethodType
}
-func CreateSourceAuth0AuthenticationMethodSourceAuth0AuthenticationMethodOAuth2ConfidentialApplication(sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication) SourceAuth0AuthenticationMethod {
- typ := SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2ConfidentialApplication
+func CreateSourceAuth0AuthenticationMethodSourceAuth0OAuth2ConfidentialApplication(sourceAuth0OAuth2ConfidentialApplication SourceAuth0OAuth2ConfidentialApplication) SourceAuth0AuthenticationMethod {
+ typ := SourceAuth0AuthenticationMethodTypeSourceAuth0OAuth2ConfidentialApplication
return SourceAuth0AuthenticationMethod{
- SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication: &sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication,
- Type: typ,
+ SourceAuth0OAuth2ConfidentialApplication: &sourceAuth0OAuth2ConfidentialApplication,
+ Type: typ,
}
}
-func CreateSourceAuth0AuthenticationMethodSourceAuth0AuthenticationMethodOAuth2AccessToken(sourceAuth0AuthenticationMethodOAuth2AccessToken SourceAuth0AuthenticationMethodOAuth2AccessToken) SourceAuth0AuthenticationMethod {
- typ := SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2AccessToken
+func CreateSourceAuth0AuthenticationMethodSourceAuth0OAuth2AccessToken(sourceAuth0OAuth2AccessToken SourceAuth0OAuth2AccessToken) SourceAuth0AuthenticationMethod {
+ typ := SourceAuth0AuthenticationMethodTypeSourceAuth0OAuth2AccessToken
return SourceAuth0AuthenticationMethod{
- SourceAuth0AuthenticationMethodOAuth2AccessToken: &sourceAuth0AuthenticationMethodOAuth2AccessToken,
- Type: typ,
+ SourceAuth0OAuth2AccessToken: &sourceAuth0OAuth2AccessToken,
+ Type: typ,
}
}
func (u *SourceAuth0AuthenticationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAuth0AuthenticationMethodOAuth2AccessToken := new(SourceAuth0AuthenticationMethodOAuth2AccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAuth0AuthenticationMethodOAuth2AccessToken); err == nil {
- u.SourceAuth0AuthenticationMethodOAuth2AccessToken = sourceAuth0AuthenticationMethodOAuth2AccessToken
- u.Type = SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2AccessToken
+
+ sourceAuth0OAuth2AccessToken := new(SourceAuth0OAuth2AccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceAuth0OAuth2AccessToken, "", true, true); err == nil {
+ u.SourceAuth0OAuth2AccessToken = sourceAuth0OAuth2AccessToken
+ u.Type = SourceAuth0AuthenticationMethodTypeSourceAuth0OAuth2AccessToken
return nil
}
- sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication := new(SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication); err == nil {
- u.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication = sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication
- u.Type = SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2ConfidentialApplication
+ sourceAuth0OAuth2ConfidentialApplication := new(SourceAuth0OAuth2ConfidentialApplication)
+ if err := utils.UnmarshalJSON(data, &sourceAuth0OAuth2ConfidentialApplication, "", true, true); err == nil {
+ u.SourceAuth0OAuth2ConfidentialApplication = sourceAuth0OAuth2ConfidentialApplication
+ u.Type = SourceAuth0AuthenticationMethodTypeSourceAuth0OAuth2ConfidentialApplication
return nil
}
@@ -130,38 +183,38 @@ func (u *SourceAuth0AuthenticationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceAuth0AuthenticationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceAuth0AuthenticationMethodOAuth2AccessToken != nil {
- return json.Marshal(u.SourceAuth0AuthenticationMethodOAuth2AccessToken)
+ if u.SourceAuth0OAuth2ConfidentialApplication != nil {
+ return utils.MarshalJSON(u.SourceAuth0OAuth2ConfidentialApplication, "", true)
}
- if u.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication != nil {
- return json.Marshal(u.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication)
+ if u.SourceAuth0OAuth2AccessToken != nil {
+ return utils.MarshalJSON(u.SourceAuth0OAuth2AccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceAuth0Auth0 string
+type Auth0 string
const (
- SourceAuth0Auth0Auth0 SourceAuth0Auth0 = "auth0"
+ Auth0Auth0 Auth0 = "auth0"
)
-func (e SourceAuth0Auth0) ToPointer() *SourceAuth0Auth0 {
+func (e Auth0) ToPointer() *Auth0 {
return &e
}
-func (e *SourceAuth0Auth0) UnmarshalJSON(data []byte) error {
+func (e *Auth0) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "auth0":
- *e = SourceAuth0Auth0(v)
+ *e = Auth0(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAuth0Auth0: %v", v)
+ return fmt.Errorf("invalid value for Auth0: %v", v)
}
}
@@ -169,7 +222,43 @@ type SourceAuth0 struct {
// The Authentication API is served over HTTPS. All URLs referenced in the documentation have the following base `https://YOUR_DOMAIN`
BaseURL string `json:"base_url"`
Credentials SourceAuth0AuthenticationMethod `json:"credentials"`
- SourceType SourceAuth0Auth0 `json:"sourceType"`
+ sourceType Auth0 `const:"auth0" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
- StartDate *string `json:"start_date,omitempty"`
+ StartDate *string `default:"2023-08-05T00:43:59.244Z" json:"start_date"`
+}
+
+func (s SourceAuth0) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAuth0) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAuth0) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *SourceAuth0) GetCredentials() SourceAuth0AuthenticationMethod {
+ if o == nil {
+ return SourceAuth0AuthenticationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceAuth0) GetSourceType() Auth0 {
+ return Auth0Auth0
+}
+
+func (o *SourceAuth0) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceauth0createrequest.go b/internal/sdk/pkg/models/shared/sourceauth0createrequest.go
old mode 100755
new mode 100644
index 91e72e6bc..794dc0e15
--- a/internal/sdk/pkg/models/shared/sourceauth0createrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceauth0createrequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAuth0CreateRequest struct {
Configuration SourceAuth0 `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAuth0CreateRequest) GetConfiguration() SourceAuth0 {
+ if o == nil {
+ return SourceAuth0{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAuth0CreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAuth0CreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAuth0CreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAuth0CreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceauth0putrequest.go b/internal/sdk/pkg/models/shared/sourceauth0putrequest.go
old mode 100755
new mode 100644
index f47f23e7a..a1e174176
--- a/internal/sdk/pkg/models/shared/sourceauth0putrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceauth0putrequest.go
@@ -7,3 +7,24 @@ type SourceAuth0PutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAuth0PutRequest) GetConfiguration() SourceAuth0Update {
+ if o == nil {
+ return SourceAuth0Update{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAuth0PutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAuth0PutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceauth0update.go b/internal/sdk/pkg/models/shared/sourceauth0update.go
old mode 100755
new mode 100644
index 6b35171d2..6dbdbc370
--- a/internal/sdk/pkg/models/shared/sourceauth0update.go
+++ b/internal/sdk/pkg/models/shared/sourceauth0update.go
@@ -3,126 +3,179 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethod string
+type SourceAuth0UpdateSchemasCredentialsAuthenticationMethod string
const (
- SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethodOauth2AccessToken SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethod = "oauth2_access_token"
+ SourceAuth0UpdateSchemasCredentialsAuthenticationMethodOauth2AccessToken SourceAuth0UpdateSchemasCredentialsAuthenticationMethod = "oauth2_access_token"
)
-func (e SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethod) ToPointer() *SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethod {
+func (e SourceAuth0UpdateSchemasCredentialsAuthenticationMethod) ToPointer() *SourceAuth0UpdateSchemasCredentialsAuthenticationMethod {
return &e
}
-func (e *SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAuth0UpdateSchemasCredentialsAuthenticationMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2_access_token":
- *e = SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethod(v)
+ *e = SourceAuth0UpdateSchemasCredentialsAuthenticationMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAuth0UpdateSchemasCredentialsAuthenticationMethod: %v", v)
}
}
-type SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken struct {
+type OAuth2AccessToken struct {
// Also called API Access Token The access token used to call the Auth0 Management API Token. It's a JWT that contains specific grant permissions knowns as scopes.
- AccessToken string `json:"access_token"`
- AuthType SourceAuth0UpdateAuthenticationMethodOAuth2AccessTokenAuthenticationMethod `json:"auth_type"`
+ AccessToken string `json:"access_token"`
+ authType SourceAuth0UpdateSchemasCredentialsAuthenticationMethod `const:"oauth2_access_token" json:"auth_type"`
}
-type SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod string
+func (o OAuth2AccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(o, "", false)
+}
+
+func (o *OAuth2AccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &o, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *OAuth2AccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *OAuth2AccessToken) GetAuthType() SourceAuth0UpdateSchemasCredentialsAuthenticationMethod {
+ return SourceAuth0UpdateSchemasCredentialsAuthenticationMethodOauth2AccessToken
+}
+
+type SourceAuth0UpdateSchemasAuthenticationMethod string
const (
- SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethodOauth2ConfidentialApplication SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod = "oauth2_confidential_application"
+ SourceAuth0UpdateSchemasAuthenticationMethodOauth2ConfidentialApplication SourceAuth0UpdateSchemasAuthenticationMethod = "oauth2_confidential_application"
)
-func (e SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod) ToPointer() *SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod {
+func (e SourceAuth0UpdateSchemasAuthenticationMethod) ToPointer() *SourceAuth0UpdateSchemasAuthenticationMethod {
return &e
}
-func (e *SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceAuth0UpdateSchemasAuthenticationMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2_confidential_application":
- *e = SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod(v)
+ *e = SourceAuth0UpdateSchemasAuthenticationMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceAuth0UpdateSchemasAuthenticationMethod: %v", v)
}
}
-type SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication struct {
+type OAuth2ConfidentialApplication struct {
// The audience for the token, which is your API. You can find this in the Identifier field on your API's settings tab
- Audience string `json:"audience"`
- AuthType SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplicationAuthenticationMethod `json:"auth_type"`
+ Audience string `json:"audience"`
+ authType SourceAuth0UpdateSchemasAuthenticationMethod `const:"oauth2_confidential_application" json:"auth_type"`
// Your application's Client ID. You can find this value on the application's settings tab after you login the admin portal.
ClientID string `json:"client_id"`
// Your application's Client Secret. You can find this value on the application's settings tab after you login the admin portal.
ClientSecret string `json:"client_secret"`
}
+func (o OAuth2ConfidentialApplication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(o, "", false)
+}
+
+func (o *OAuth2ConfidentialApplication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &o, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *OAuth2ConfidentialApplication) GetAudience() string {
+ if o == nil {
+ return ""
+ }
+ return o.Audience
+}
+
+func (o *OAuth2ConfidentialApplication) GetAuthType() SourceAuth0UpdateSchemasAuthenticationMethod {
+ return SourceAuth0UpdateSchemasAuthenticationMethodOauth2ConfidentialApplication
+}
+
+func (o *OAuth2ConfidentialApplication) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *OAuth2ConfidentialApplication) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
type SourceAuth0UpdateAuthenticationMethodType string
const (
- SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication SourceAuth0UpdateAuthenticationMethodType = "source-auth0-update_Authentication Method_OAuth2 Confidential Application"
- SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2AccessToken SourceAuth0UpdateAuthenticationMethodType = "source-auth0-update_Authentication Method_OAuth2 Access Token"
+ SourceAuth0UpdateAuthenticationMethodTypeOAuth2ConfidentialApplication SourceAuth0UpdateAuthenticationMethodType = "OAuth2 Confidential Application"
+ SourceAuth0UpdateAuthenticationMethodTypeOAuth2AccessToken SourceAuth0UpdateAuthenticationMethodType = "OAuth2 Access Token"
)
type SourceAuth0UpdateAuthenticationMethod struct {
- SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication *SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication
- SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken *SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken
+ OAuth2ConfidentialApplication *OAuth2ConfidentialApplication
+ OAuth2AccessToken *OAuth2AccessToken
Type SourceAuth0UpdateAuthenticationMethodType
}
-func CreateSourceAuth0UpdateAuthenticationMethodSourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication(sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication) SourceAuth0UpdateAuthenticationMethod {
- typ := SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication
+func CreateSourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication(oAuth2ConfidentialApplication OAuth2ConfidentialApplication) SourceAuth0UpdateAuthenticationMethod {
+ typ := SourceAuth0UpdateAuthenticationMethodTypeOAuth2ConfidentialApplication
return SourceAuth0UpdateAuthenticationMethod{
- SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication: &sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication,
- Type: typ,
+ OAuth2ConfidentialApplication: &oAuth2ConfidentialApplication,
+ Type: typ,
}
}
-func CreateSourceAuth0UpdateAuthenticationMethodSourceAuth0UpdateAuthenticationMethodOAuth2AccessToken(sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken) SourceAuth0UpdateAuthenticationMethod {
- typ := SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2AccessToken
+func CreateSourceAuth0UpdateAuthenticationMethodOAuth2AccessToken(oAuth2AccessToken OAuth2AccessToken) SourceAuth0UpdateAuthenticationMethod {
+ typ := SourceAuth0UpdateAuthenticationMethodTypeOAuth2AccessToken
return SourceAuth0UpdateAuthenticationMethod{
- SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken: &sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken,
- Type: typ,
+ OAuth2AccessToken: &oAuth2AccessToken,
+ Type: typ,
}
}
func (u *SourceAuth0UpdateAuthenticationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken := new(SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken); err == nil {
- u.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken = sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken
- u.Type = SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2AccessToken
+
+ oAuth2AccessToken := new(OAuth2AccessToken)
+ if err := utils.UnmarshalJSON(data, &oAuth2AccessToken, "", true, true); err == nil {
+ u.OAuth2AccessToken = oAuth2AccessToken
+ u.Type = SourceAuth0UpdateAuthenticationMethodTypeOAuth2AccessToken
return nil
}
- sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication := new(SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication); err == nil {
- u.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication = sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication
- u.Type = SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication
+ oAuth2ConfidentialApplication := new(OAuth2ConfidentialApplication)
+ if err := utils.UnmarshalJSON(data, &oAuth2ConfidentialApplication, "", true, true); err == nil {
+ u.OAuth2ConfidentialApplication = oAuth2ConfidentialApplication
+ u.Type = SourceAuth0UpdateAuthenticationMethodTypeOAuth2ConfidentialApplication
return nil
}
@@ -130,15 +183,15 @@ func (u *SourceAuth0UpdateAuthenticationMethod) UnmarshalJSON(data []byte) error
}
func (u SourceAuth0UpdateAuthenticationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken != nil {
- return json.Marshal(u.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken)
+ if u.OAuth2ConfidentialApplication != nil {
+ return utils.MarshalJSON(u.OAuth2ConfidentialApplication, "", true)
}
- if u.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication != nil {
- return json.Marshal(u.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication)
+ if u.OAuth2AccessToken != nil {
+ return utils.MarshalJSON(u.OAuth2AccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceAuth0Update struct {
@@ -146,5 +199,37 @@ type SourceAuth0Update struct {
BaseURL string `json:"base_url"`
Credentials SourceAuth0UpdateAuthenticationMethod `json:"credentials"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
- StartDate *string `json:"start_date,omitempty"`
+ StartDate *string `default:"2023-08-05T00:43:59.244Z" json:"start_date"`
+}
+
+func (s SourceAuth0Update) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAuth0Update) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAuth0Update) GetBaseURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BaseURL
+}
+
+func (o *SourceAuth0Update) GetCredentials() SourceAuth0UpdateAuthenticationMethod {
+ if o == nil {
+ return SourceAuth0UpdateAuthenticationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceAuth0Update) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceawscloudtrail.go b/internal/sdk/pkg/models/shared/sourceawscloudtrail.go
old mode 100755
new mode 100644
index 63815d2aa..4a395bb85
--- a/internal/sdk/pkg/models/shared/sourceawscloudtrail.go
+++ b/internal/sdk/pkg/models/shared/sourceawscloudtrail.go
@@ -3,32 +3,33 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceAwsCloudtrailAwsCloudtrail string
+type AwsCloudtrail string
const (
- SourceAwsCloudtrailAwsCloudtrailAwsCloudtrail SourceAwsCloudtrailAwsCloudtrail = "aws-cloudtrail"
+ AwsCloudtrailAwsCloudtrail AwsCloudtrail = "aws-cloudtrail"
)
-func (e SourceAwsCloudtrailAwsCloudtrail) ToPointer() *SourceAwsCloudtrailAwsCloudtrail {
+func (e AwsCloudtrail) ToPointer() *AwsCloudtrail {
return &e
}
-func (e *SourceAwsCloudtrailAwsCloudtrail) UnmarshalJSON(data []byte) error {
+func (e *AwsCloudtrail) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "aws-cloudtrail":
- *e = SourceAwsCloudtrailAwsCloudtrail(v)
+ *e = AwsCloudtrail(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAwsCloudtrailAwsCloudtrail: %v", v)
+ return fmt.Errorf("invalid value for AwsCloudtrail: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceAwsCloudtrail struct {
// The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name.
AwsRegionName string `json:"aws_region_name"`
// AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.
- AwsSecretKey string `json:"aws_secret_key"`
- SourceType SourceAwsCloudtrailAwsCloudtrail `json:"sourceType"`
+ AwsSecretKey string `json:"aws_secret_key"`
+ sourceType AwsCloudtrail `const:"aws-cloudtrail" json:"sourceType"`
// The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD.
- StartDate types.Date `json:"start_date"`
+ StartDate *types.Date `default:"1970-01-01" json:"start_date"`
+}
+
+func (s SourceAwsCloudtrail) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAwsCloudtrail) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAwsCloudtrail) GetAwsKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsKeyID
+}
+
+func (o *SourceAwsCloudtrail) GetAwsRegionName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsRegionName
+}
+
+func (o *SourceAwsCloudtrail) GetAwsSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsSecretKey
+}
+
+func (o *SourceAwsCloudtrail) GetSourceType() AwsCloudtrail {
+ return AwsCloudtrailAwsCloudtrail
+}
+
+func (o *SourceAwsCloudtrail) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceawscloudtrailcreaterequest.go b/internal/sdk/pkg/models/shared/sourceawscloudtrailcreaterequest.go
old mode 100755
new mode 100644
index 3cc0b1399..6a8b5b16a
--- a/internal/sdk/pkg/models/shared/sourceawscloudtrailcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceawscloudtrailcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAwsCloudtrailCreateRequest struct {
Configuration SourceAwsCloudtrail `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAwsCloudtrailCreateRequest) GetConfiguration() SourceAwsCloudtrail {
+ if o == nil {
+ return SourceAwsCloudtrail{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAwsCloudtrailCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAwsCloudtrailCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAwsCloudtrailCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAwsCloudtrailCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceawscloudtrailputrequest.go b/internal/sdk/pkg/models/shared/sourceawscloudtrailputrequest.go
old mode 100755
new mode 100644
index 31fbf24eb..bce01c2fa
--- a/internal/sdk/pkg/models/shared/sourceawscloudtrailputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceawscloudtrailputrequest.go
@@ -7,3 +7,24 @@ type SourceAwsCloudtrailPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAwsCloudtrailPutRequest) GetConfiguration() SourceAwsCloudtrailUpdate {
+ if o == nil {
+ return SourceAwsCloudtrailUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAwsCloudtrailPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAwsCloudtrailPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceawscloudtrailupdate.go b/internal/sdk/pkg/models/shared/sourceawscloudtrailupdate.go
old mode 100755
new mode 100644
index d77845da3..bff9e909c
--- a/internal/sdk/pkg/models/shared/sourceawscloudtrailupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceawscloudtrailupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceAwsCloudtrailUpdate struct {
@@ -14,5 +15,44 @@ type SourceAwsCloudtrailUpdate struct {
// AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key.
AwsSecretKey string `json:"aws_secret_key"`
// The date you would like to replicate data. Data in AWS CloudTrail is available for last 90 days only. Format: YYYY-MM-DD.
- StartDate types.Date `json:"start_date"`
+ StartDate *types.Date `default:"1970-01-01" json:"start_date"`
+}
+
+func (s SourceAwsCloudtrailUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAwsCloudtrailUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAwsCloudtrailUpdate) GetAwsKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsKeyID
+}
+
+func (o *SourceAwsCloudtrailUpdate) GetAwsRegionName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsRegionName
+}
+
+func (o *SourceAwsCloudtrailUpdate) GetAwsSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AwsSecretKey
+}
+
+func (o *SourceAwsCloudtrailUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceazureblobstorage.go b/internal/sdk/pkg/models/shared/sourceazureblobstorage.go
old mode 100755
new mode 100644
index 0add41315..1539dab40
--- a/internal/sdk/pkg/models/shared/sourceazureblobstorage.go
+++ b/internal/sdk/pkg/models/shared/sourceazureblobstorage.go
@@ -3,123 +3,981 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
)
-type SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type SourceAzureBlobStorageAzureBlobStorage string
+
+const (
+ SourceAzureBlobStorageAzureBlobStorageAzureBlobStorage SourceAzureBlobStorageAzureBlobStorage = "azure-blob-storage"
+)
+
+func (e SourceAzureBlobStorageAzureBlobStorage) ToPointer() *SourceAzureBlobStorageAzureBlobStorage {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageAzureBlobStorage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "azure-blob-storage":
+ *e = SourceAzureBlobStorageAzureBlobStorage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageAzureBlobStorage: %v", v)
+ }
+}
+
+type SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype string
+
+const (
+ SourceAzureBlobStorageSchemasStreamsFormatFormatFiletypeUnstructured SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype = "unstructured"
+)
+
+func (e SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype) ToPointer() *SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "unstructured":
+ *e = SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype: %v", v)
+ }
+}
+
+// SourceAzureBlobStorageDocumentFileTypeFormatExperimental - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
+type SourceAzureBlobStorageDocumentFileTypeFormatExperimental struct {
+ filetype *SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype `const:"unstructured" json:"filetype"`
+ // If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.
+ SkipUnprocessableFileTypes *bool `default:"true" json:"skip_unprocessable_file_types"`
+}
+
+func (s SourceAzureBlobStorageDocumentFileTypeFormatExperimental) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageDocumentFileTypeFormatExperimental) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorageDocumentFileTypeFormatExperimental) GetFiletype() *SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype {
+ return SourceAzureBlobStorageSchemasStreamsFormatFormatFiletypeUnstructured.ToPointer()
+}
+
+func (o *SourceAzureBlobStorageDocumentFileTypeFormatExperimental) GetSkipUnprocessableFileTypes() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.SkipUnprocessableFileTypes
+}
+
+type SourceAzureBlobStorageSchemasStreamsFormatFiletype string
+
+const (
+ SourceAzureBlobStorageSchemasStreamsFormatFiletypeParquet SourceAzureBlobStorageSchemasStreamsFormatFiletype = "parquet"
+)
+
+func (e SourceAzureBlobStorageSchemasStreamsFormatFiletype) ToPointer() *SourceAzureBlobStorageSchemasStreamsFormatFiletype {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageSchemasStreamsFormatFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "parquet":
+ *e = SourceAzureBlobStorageSchemasStreamsFormatFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageSchemasStreamsFormatFiletype: %v", v)
+ }
+}
+
+// SourceAzureBlobStorageParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceAzureBlobStorageParquetFormat struct {
+ // Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
+ DecimalAsFloat *bool `default:"false" json:"decimal_as_float"`
+ filetype *SourceAzureBlobStorageSchemasStreamsFormatFiletype `const:"parquet" json:"filetype"`
+}
+
+func (s SourceAzureBlobStorageParquetFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageParquetFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorageParquetFormat) GetDecimalAsFloat() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DecimalAsFloat
+}
+
+func (o *SourceAzureBlobStorageParquetFormat) GetFiletype() *SourceAzureBlobStorageSchemasStreamsFormatFiletype {
+ return SourceAzureBlobStorageSchemasStreamsFormatFiletypeParquet.ToPointer()
+}
+
+type SourceAzureBlobStorageSchemasStreamsFiletype string
+
+const (
+ SourceAzureBlobStorageSchemasStreamsFiletypeJsonl SourceAzureBlobStorageSchemasStreamsFiletype = "jsonl"
+)
+
+func (e SourceAzureBlobStorageSchemasStreamsFiletype) ToPointer() *SourceAzureBlobStorageSchemasStreamsFiletype {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageSchemasStreamsFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "jsonl":
+ *e = SourceAzureBlobStorageSchemasStreamsFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageSchemasStreamsFiletype: %v", v)
+ }
+}
+
+// SourceAzureBlobStorageJsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceAzureBlobStorageJsonlFormat struct {
+ filetype *SourceAzureBlobStorageSchemasStreamsFiletype `const:"jsonl" json:"filetype"`
+}
+
+func (s SourceAzureBlobStorageJsonlFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageJsonlFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorageJsonlFormat) GetFiletype() *SourceAzureBlobStorageSchemasStreamsFiletype {
+ return SourceAzureBlobStorageSchemasStreamsFiletypeJsonl.ToPointer()
+}
+
+type SourceAzureBlobStorageSchemasFiletype string
+
+const (
+ SourceAzureBlobStorageSchemasFiletypeCsv SourceAzureBlobStorageSchemasFiletype = "csv"
+)
+
+func (e SourceAzureBlobStorageSchemasFiletype) ToPointer() *SourceAzureBlobStorageSchemasFiletype {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageSchemasFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "csv":
+ *e = SourceAzureBlobStorageSchemasFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageSchemasFiletype: %v", v)
+ }
+}
+
+type SourceAzureBlobStorageSchemasStreamsHeaderDefinitionType string
+
+const (
+ SourceAzureBlobStorageSchemasStreamsHeaderDefinitionTypeUserProvided SourceAzureBlobStorageSchemasStreamsHeaderDefinitionType = "User Provided"
+)
+
+func (e SourceAzureBlobStorageSchemasStreamsHeaderDefinitionType) ToPointer() *SourceAzureBlobStorageSchemasStreamsHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageSchemasStreamsHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "User Provided":
+ *e = SourceAzureBlobStorageSchemasStreamsHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageSchemasStreamsHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceAzureBlobStorageUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceAzureBlobStorageUserProvided struct {
+ // The column names that will be used while emitting the CSV records
+ ColumnNames []string `json:"column_names"`
+ headerDefinitionType *SourceAzureBlobStorageSchemasStreamsHeaderDefinitionType `const:"User Provided" json:"header_definition_type"`
+}
+
+func (s SourceAzureBlobStorageUserProvided) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageUserProvided) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorageUserProvided) GetColumnNames() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ColumnNames
+}
+
+func (o *SourceAzureBlobStorageUserProvided) GetHeaderDefinitionType() *SourceAzureBlobStorageSchemasStreamsHeaderDefinitionType {
+ return SourceAzureBlobStorageSchemasStreamsHeaderDefinitionTypeUserProvided.ToPointer()
+}
+
+type SourceAzureBlobStorageSchemasHeaderDefinitionType string
+
+const (
+ SourceAzureBlobStorageSchemasHeaderDefinitionTypeAutogenerated SourceAzureBlobStorageSchemasHeaderDefinitionType = "Autogenerated"
+)
+
+func (e SourceAzureBlobStorageSchemasHeaderDefinitionType) ToPointer() *SourceAzureBlobStorageSchemasHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageSchemasHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Autogenerated":
+ *e = SourceAzureBlobStorageSchemasHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageSchemasHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceAzureBlobStorageAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceAzureBlobStorageAutogenerated struct {
+ headerDefinitionType *SourceAzureBlobStorageSchemasHeaderDefinitionType `const:"Autogenerated" json:"header_definition_type"`
+}
+
+func (s SourceAzureBlobStorageAutogenerated) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageAutogenerated) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorageAutogenerated) GetHeaderDefinitionType() *SourceAzureBlobStorageSchemasHeaderDefinitionType {
+ return SourceAzureBlobStorageSchemasHeaderDefinitionTypeAutogenerated.ToPointer()
+}
+
+type SourceAzureBlobStorageHeaderDefinitionType string
+
+const (
+ SourceAzureBlobStorageHeaderDefinitionTypeFromCsv SourceAzureBlobStorageHeaderDefinitionType = "From CSV"
+)
+
+func (e SourceAzureBlobStorageHeaderDefinitionType) ToPointer() *SourceAzureBlobStorageHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "From CSV":
+ *e = SourceAzureBlobStorageHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceAzureBlobStorageFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceAzureBlobStorageFromCSV struct {
+ headerDefinitionType *SourceAzureBlobStorageHeaderDefinitionType `const:"From CSV" json:"header_definition_type"`
+}
+
+func (s SourceAzureBlobStorageFromCSV) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageFromCSV) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorageFromCSV) GetHeaderDefinitionType() *SourceAzureBlobStorageHeaderDefinitionType {
+ return SourceAzureBlobStorageHeaderDefinitionTypeFromCsv.ToPointer()
+}
+
+type SourceAzureBlobStorageCSVHeaderDefinitionType string
+
+const (
+ SourceAzureBlobStorageCSVHeaderDefinitionTypeSourceAzureBlobStorageFromCSV SourceAzureBlobStorageCSVHeaderDefinitionType = "source-azure-blob-storage_From CSV"
+ SourceAzureBlobStorageCSVHeaderDefinitionTypeSourceAzureBlobStorageAutogenerated SourceAzureBlobStorageCSVHeaderDefinitionType = "source-azure-blob-storage_Autogenerated"
+ SourceAzureBlobStorageCSVHeaderDefinitionTypeSourceAzureBlobStorageUserProvided SourceAzureBlobStorageCSVHeaderDefinitionType = "source-azure-blob-storage_User Provided"
+)
+
+type SourceAzureBlobStorageCSVHeaderDefinition struct {
+ SourceAzureBlobStorageFromCSV *SourceAzureBlobStorageFromCSV
+ SourceAzureBlobStorageAutogenerated *SourceAzureBlobStorageAutogenerated
+ SourceAzureBlobStorageUserProvided *SourceAzureBlobStorageUserProvided
+
+ Type SourceAzureBlobStorageCSVHeaderDefinitionType
+}
+
+func CreateSourceAzureBlobStorageCSVHeaderDefinitionSourceAzureBlobStorageFromCSV(sourceAzureBlobStorageFromCSV SourceAzureBlobStorageFromCSV) SourceAzureBlobStorageCSVHeaderDefinition {
+ typ := SourceAzureBlobStorageCSVHeaderDefinitionTypeSourceAzureBlobStorageFromCSV
+
+ return SourceAzureBlobStorageCSVHeaderDefinition{
+ SourceAzureBlobStorageFromCSV: &sourceAzureBlobStorageFromCSV,
+ Type: typ,
+ }
+}
+
+func CreateSourceAzureBlobStorageCSVHeaderDefinitionSourceAzureBlobStorageAutogenerated(sourceAzureBlobStorageAutogenerated SourceAzureBlobStorageAutogenerated) SourceAzureBlobStorageCSVHeaderDefinition {
+ typ := SourceAzureBlobStorageCSVHeaderDefinitionTypeSourceAzureBlobStorageAutogenerated
+
+ return SourceAzureBlobStorageCSVHeaderDefinition{
+ SourceAzureBlobStorageAutogenerated: &sourceAzureBlobStorageAutogenerated,
+ Type: typ,
+ }
+}
+
+func CreateSourceAzureBlobStorageCSVHeaderDefinitionSourceAzureBlobStorageUserProvided(sourceAzureBlobStorageUserProvided SourceAzureBlobStorageUserProvided) SourceAzureBlobStorageCSVHeaderDefinition {
+ typ := SourceAzureBlobStorageCSVHeaderDefinitionTypeSourceAzureBlobStorageUserProvided
+
+ return SourceAzureBlobStorageCSVHeaderDefinition{
+ SourceAzureBlobStorageUserProvided: &sourceAzureBlobStorageUserProvided,
+ Type: typ,
+ }
+}
+
+func (u *SourceAzureBlobStorageCSVHeaderDefinition) UnmarshalJSON(data []byte) error {
+
+ sourceAzureBlobStorageFromCSV := new(SourceAzureBlobStorageFromCSV)
+ if err := utils.UnmarshalJSON(data, &sourceAzureBlobStorageFromCSV, "", true, true); err == nil {
+ u.SourceAzureBlobStorageFromCSV = sourceAzureBlobStorageFromCSV
+ u.Type = SourceAzureBlobStorageCSVHeaderDefinitionTypeSourceAzureBlobStorageFromCSV
+ return nil
+ }
+
+ sourceAzureBlobStorageAutogenerated := new(SourceAzureBlobStorageAutogenerated)
+ if err := utils.UnmarshalJSON(data, &sourceAzureBlobStorageAutogenerated, "", true, true); err == nil {
+ u.SourceAzureBlobStorageAutogenerated = sourceAzureBlobStorageAutogenerated
+ u.Type = SourceAzureBlobStorageCSVHeaderDefinitionTypeSourceAzureBlobStorageAutogenerated
+ return nil
+ }
+
+ sourceAzureBlobStorageUserProvided := new(SourceAzureBlobStorageUserProvided)
+ if err := utils.UnmarshalJSON(data, &sourceAzureBlobStorageUserProvided, "", true, true); err == nil {
+ u.SourceAzureBlobStorageUserProvided = sourceAzureBlobStorageUserProvided
+ u.Type = SourceAzureBlobStorageCSVHeaderDefinitionTypeSourceAzureBlobStorageUserProvided
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceAzureBlobStorageCSVHeaderDefinition) MarshalJSON() ([]byte, error) {
+ if u.SourceAzureBlobStorageFromCSV != nil {
+ return utils.MarshalJSON(u.SourceAzureBlobStorageFromCSV, "", true)
+ }
+
+ if u.SourceAzureBlobStorageAutogenerated != nil {
+ return utils.MarshalJSON(u.SourceAzureBlobStorageAutogenerated, "", true)
+ }
+
+ if u.SourceAzureBlobStorageUserProvided != nil {
+ return utils.MarshalJSON(u.SourceAzureBlobStorageUserProvided, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourceAzureBlobStorageInferenceType - How to infer the types of the columns. If none, inference default to strings.
+type SourceAzureBlobStorageInferenceType string
const (
- SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ SourceAzureBlobStorageInferenceTypeNone SourceAzureBlobStorageInferenceType = "None"
+ SourceAzureBlobStorageInferenceTypePrimitiveTypesOnly SourceAzureBlobStorageInferenceType = "Primitive Types Only"
)
-func (e SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e SourceAzureBlobStorageInferenceType) ToPointer() *SourceAzureBlobStorageInferenceType {
return &e
}
-func (e *SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *SourceAzureBlobStorageInferenceType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "JSONL":
- *e = SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ case "None":
+ fallthrough
+ case "Primitive Types Only":
+ *e = SourceAzureBlobStorageInferenceType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageInferenceType: %v", v)
+ }
+}
+
+// SourceAzureBlobStorageCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceAzureBlobStorageCSVFormat struct {
+ // The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+ Delimiter *string `default:"," json:"delimiter"`
+ // Whether two quotes in a quoted CSV value denote a single quote in the data.
+ DoubleQuote *bool `default:"true" json:"double_quote"`
+ // The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+ Encoding *string `default:"utf8" json:"encoding"`
+ // The character used for escaping special characters. To disallow escaping, leave this field blank.
+ EscapeChar *string `json:"escape_char,omitempty"`
+ // A set of case-sensitive strings that should be interpreted as false values.
+ FalseValues []string `json:"false_values,omitempty"`
+ filetype *SourceAzureBlobStorageSchemasFiletype `const:"csv" json:"filetype"`
+ // How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+ HeaderDefinition *SourceAzureBlobStorageCSVHeaderDefinition `json:"header_definition,omitempty"`
+ // How to infer the types of the columns. If none, inference default to strings.
+ InferenceType *SourceAzureBlobStorageInferenceType `default:"None" json:"inference_type"`
+ // A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
+ NullValues []string `json:"null_values,omitempty"`
+ // The character used for quoting CSV values. To disallow quoting, make this field blank.
+ QuoteChar *string `default:""" json:"quote_char"`
+ // The number of rows to skip after the header row.
+ SkipRowsAfterHeader *int64 `default:"0" json:"skip_rows_after_header"`
+ // The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+ SkipRowsBeforeHeader *int64 `default:"0" json:"skip_rows_before_header"`
+ // Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+ StringsCanBeNull *bool `default:"true" json:"strings_can_be_null"`
+ // A set of case-sensitive strings that should be interpreted as true values.
+ TrueValues []string `json:"true_values,omitempty"`
+}
+
+func (s SourceAzureBlobStorageCSVFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageCSVFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON - Input data format
-type SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON struct {
- FormatType SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+func (o *SourceAzureBlobStorageCSVFormat) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
}
-type SourceAzureBlobStorageInputFormatType string
+func (o *SourceAzureBlobStorageCSVFormat) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetFalseValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FalseValues
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetFiletype() *SourceAzureBlobStorageSchemasFiletype {
+ return SourceAzureBlobStorageSchemasFiletypeCsv.ToPointer()
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetHeaderDefinition() *SourceAzureBlobStorageCSVHeaderDefinition {
+ if o == nil {
+ return nil
+ }
+ return o.HeaderDefinition
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetInferenceType() *SourceAzureBlobStorageInferenceType {
+ if o == nil {
+ return nil
+ }
+ return o.InferenceType
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetNullValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NullValues
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetSkipRowsAfterHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsAfterHeader
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetSkipRowsBeforeHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsBeforeHeader
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetStringsCanBeNull() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.StringsCanBeNull
+}
+
+func (o *SourceAzureBlobStorageCSVFormat) GetTrueValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TrueValues
+}
+
+type SourceAzureBlobStorageFiletype string
const (
- SourceAzureBlobStorageInputFormatTypeSourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON SourceAzureBlobStorageInputFormatType = "source-azure-blob-storage_Input Format_JSON Lines: newline-delimited JSON"
+ SourceAzureBlobStorageFiletypeAvro SourceAzureBlobStorageFiletype = "avro"
)
-type SourceAzureBlobStorageInputFormat struct {
- SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON *SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON
+func (e SourceAzureBlobStorageFiletype) ToPointer() *SourceAzureBlobStorageFiletype {
+ return &e
+}
- Type SourceAzureBlobStorageInputFormatType
+func (e *SourceAzureBlobStorageFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "avro":
+ *e = SourceAzureBlobStorageFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageFiletype: %v", v)
+ }
+}
+
+// SourceAzureBlobStorageAvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceAzureBlobStorageAvroFormat struct {
+ // Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
+ DoubleAsString *bool `default:"false" json:"double_as_string"`
+ filetype *SourceAzureBlobStorageFiletype `const:"avro" json:"filetype"`
+}
+
+func (s SourceAzureBlobStorageAvroFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageAvroFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorageAvroFormat) GetDoubleAsString() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleAsString
+}
+
+func (o *SourceAzureBlobStorageAvroFormat) GetFiletype() *SourceAzureBlobStorageFiletype {
+ return SourceAzureBlobStorageFiletypeAvro.ToPointer()
}
-func CreateSourceAzureBlobStorageInputFormatSourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON(sourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON) SourceAzureBlobStorageInputFormat {
- typ := SourceAzureBlobStorageInputFormatTypeSourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON
+type SourceAzureBlobStorageFormatType string
+
+const (
+ SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageAvroFormat SourceAzureBlobStorageFormatType = "source-azure-blob-storage_Avro Format"
+ SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageCSVFormat SourceAzureBlobStorageFormatType = "source-azure-blob-storage_CSV Format"
+ SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageJsonlFormat SourceAzureBlobStorageFormatType = "source-azure-blob-storage_Jsonl Format"
+ SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageParquetFormat SourceAzureBlobStorageFormatType = "source-azure-blob-storage_Parquet Format"
+ SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageDocumentFileTypeFormatExperimental SourceAzureBlobStorageFormatType = "source-azure-blob-storage_Document File Type Format (Experimental)"
+)
+
+type SourceAzureBlobStorageFormat struct {
+ SourceAzureBlobStorageAvroFormat *SourceAzureBlobStorageAvroFormat
+ SourceAzureBlobStorageCSVFormat *SourceAzureBlobStorageCSVFormat
+ SourceAzureBlobStorageJsonlFormat *SourceAzureBlobStorageJsonlFormat
+ SourceAzureBlobStorageParquetFormat *SourceAzureBlobStorageParquetFormat
+ SourceAzureBlobStorageDocumentFileTypeFormatExperimental *SourceAzureBlobStorageDocumentFileTypeFormatExperimental
+
+ Type SourceAzureBlobStorageFormatType
+}
- return SourceAzureBlobStorageInputFormat{
- SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON: &sourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON,
+func CreateSourceAzureBlobStorageFormatSourceAzureBlobStorageAvroFormat(sourceAzureBlobStorageAvroFormat SourceAzureBlobStorageAvroFormat) SourceAzureBlobStorageFormat {
+ typ := SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageAvroFormat
+
+ return SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageAvroFormat: &sourceAzureBlobStorageAvroFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceAzureBlobStorageFormatSourceAzureBlobStorageCSVFormat(sourceAzureBlobStorageCSVFormat SourceAzureBlobStorageCSVFormat) SourceAzureBlobStorageFormat {
+ typ := SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageCSVFormat
+
+ return SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageCSVFormat: &sourceAzureBlobStorageCSVFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceAzureBlobStorageFormatSourceAzureBlobStorageJsonlFormat(sourceAzureBlobStorageJsonlFormat SourceAzureBlobStorageJsonlFormat) SourceAzureBlobStorageFormat {
+ typ := SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageJsonlFormat
+
+ return SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageJsonlFormat: &sourceAzureBlobStorageJsonlFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceAzureBlobStorageFormatSourceAzureBlobStorageParquetFormat(sourceAzureBlobStorageParquetFormat SourceAzureBlobStorageParquetFormat) SourceAzureBlobStorageFormat {
+ typ := SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageParquetFormat
+
+ return SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageParquetFormat: &sourceAzureBlobStorageParquetFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceAzureBlobStorageFormatSourceAzureBlobStorageDocumentFileTypeFormatExperimental(sourceAzureBlobStorageDocumentFileTypeFormatExperimental SourceAzureBlobStorageDocumentFileTypeFormatExperimental) SourceAzureBlobStorageFormat {
+ typ := SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageDocumentFileTypeFormatExperimental
+
+ return SourceAzureBlobStorageFormat{
+ SourceAzureBlobStorageDocumentFileTypeFormatExperimental: &sourceAzureBlobStorageDocumentFileTypeFormatExperimental,
Type: typ,
}
}
-func (u *SourceAzureBlobStorageInputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SourceAzureBlobStorageFormat) UnmarshalJSON(data []byte) error {
+
+ sourceAzureBlobStorageJsonlFormat := new(SourceAzureBlobStorageJsonlFormat)
+ if err := utils.UnmarshalJSON(data, &sourceAzureBlobStorageJsonlFormat, "", true, true); err == nil {
+ u.SourceAzureBlobStorageJsonlFormat = sourceAzureBlobStorageJsonlFormat
+ u.Type = SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageJsonlFormat
+ return nil
+ }
+
+ sourceAzureBlobStorageAvroFormat := new(SourceAzureBlobStorageAvroFormat)
+ if err := utils.UnmarshalJSON(data, &sourceAzureBlobStorageAvroFormat, "", true, true); err == nil {
+ u.SourceAzureBlobStorageAvroFormat = sourceAzureBlobStorageAvroFormat
+ u.Type = SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageAvroFormat
+ return nil
+ }
+
+ sourceAzureBlobStorageParquetFormat := new(SourceAzureBlobStorageParquetFormat)
+ if err := utils.UnmarshalJSON(data, &sourceAzureBlobStorageParquetFormat, "", true, true); err == nil {
+ u.SourceAzureBlobStorageParquetFormat = sourceAzureBlobStorageParquetFormat
+ u.Type = SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageParquetFormat
+ return nil
+ }
+
+ sourceAzureBlobStorageDocumentFileTypeFormatExperimental := new(SourceAzureBlobStorageDocumentFileTypeFormatExperimental)
+ if err := utils.UnmarshalJSON(data, &sourceAzureBlobStorageDocumentFileTypeFormatExperimental, "", true, true); err == nil {
+ u.SourceAzureBlobStorageDocumentFileTypeFormatExperimental = sourceAzureBlobStorageDocumentFileTypeFormatExperimental
+ u.Type = SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageDocumentFileTypeFormatExperimental
+ return nil
+ }
- sourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON := new(SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON = sourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON
- u.Type = SourceAzureBlobStorageInputFormatTypeSourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON
+ sourceAzureBlobStorageCSVFormat := new(SourceAzureBlobStorageCSVFormat)
+ if err := utils.UnmarshalJSON(data, &sourceAzureBlobStorageCSVFormat, "", true, true); err == nil {
+ u.SourceAzureBlobStorageCSVFormat = sourceAzureBlobStorageCSVFormat
+ u.Type = SourceAzureBlobStorageFormatTypeSourceAzureBlobStorageCSVFormat
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceAzureBlobStorageInputFormat) MarshalJSON() ([]byte, error) {
- if u.SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.SourceAzureBlobStorageInputFormatJSONLinesNewlineDelimitedJSON)
+func (u SourceAzureBlobStorageFormat) MarshalJSON() ([]byte, error) {
+ if u.SourceAzureBlobStorageAvroFormat != nil {
+ return utils.MarshalJSON(u.SourceAzureBlobStorageAvroFormat, "", true)
}
- return nil, nil
+ if u.SourceAzureBlobStorageCSVFormat != nil {
+ return utils.MarshalJSON(u.SourceAzureBlobStorageCSVFormat, "", true)
+ }
+
+ if u.SourceAzureBlobStorageJsonlFormat != nil {
+ return utils.MarshalJSON(u.SourceAzureBlobStorageJsonlFormat, "", true)
+ }
+
+ if u.SourceAzureBlobStorageParquetFormat != nil {
+ return utils.MarshalJSON(u.SourceAzureBlobStorageParquetFormat, "", true)
+ }
+
+ if u.SourceAzureBlobStorageDocumentFileTypeFormatExperimental != nil {
+ return utils.MarshalJSON(u.SourceAzureBlobStorageDocumentFileTypeFormatExperimental, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceAzureBlobStorageAzureBlobStorage string
+// SourceAzureBlobStorageValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+type SourceAzureBlobStorageValidationPolicy string
const (
- SourceAzureBlobStorageAzureBlobStorageAzureBlobStorage SourceAzureBlobStorageAzureBlobStorage = "azure-blob-storage"
+ SourceAzureBlobStorageValidationPolicyEmitRecord SourceAzureBlobStorageValidationPolicy = "Emit Record"
+ SourceAzureBlobStorageValidationPolicySkipRecord SourceAzureBlobStorageValidationPolicy = "Skip Record"
+ SourceAzureBlobStorageValidationPolicyWaitForDiscover SourceAzureBlobStorageValidationPolicy = "Wait for Discover"
)
-func (e SourceAzureBlobStorageAzureBlobStorage) ToPointer() *SourceAzureBlobStorageAzureBlobStorage {
+func (e SourceAzureBlobStorageValidationPolicy) ToPointer() *SourceAzureBlobStorageValidationPolicy {
return &e
}
-func (e *SourceAzureBlobStorageAzureBlobStorage) UnmarshalJSON(data []byte) error {
+func (e *SourceAzureBlobStorageValidationPolicy) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "azure-blob-storage":
- *e = SourceAzureBlobStorageAzureBlobStorage(v)
+ case "Emit Record":
+ fallthrough
+ case "Skip Record":
+ fallthrough
+ case "Wait for Discover":
+ *e = SourceAzureBlobStorageValidationPolicy(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAzureBlobStorageAzureBlobStorage: %v", v)
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageValidationPolicy: %v", v)
+ }
+}
+
+type SourceAzureBlobStorageFileBasedStreamConfig struct {
+ // When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
+ DaysToSyncIfHistoryIsFull *int64 `default:"3" json:"days_to_sync_if_history_is_full"`
+ // The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+ Format SourceAzureBlobStorageFormat `json:"format"`
+ // The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
+ Globs []string `json:"globs,omitempty"`
+ // The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
+ InputSchema *string `json:"input_schema,omitempty"`
+ // The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
+ LegacyPrefix *string `json:"legacy_prefix,omitempty"`
+ // The name of the stream.
+ Name string `json:"name"`
+ // The column or columns (for a composite key) that serves as the unique identifier of a record.
+ PrimaryKey *string `json:"primary_key,omitempty"`
+ // When enabled, syncs will not validate or structure records against the stream's schema.
+ Schemaless *bool `default:"false" json:"schemaless"`
+ // The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+ ValidationPolicy *SourceAzureBlobStorageValidationPolicy `default:"Emit Record" json:"validation_policy"`
+}
+
+func (s SourceAzureBlobStorageFileBasedStreamConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageFileBasedStreamConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorageFileBasedStreamConfig) GetDaysToSyncIfHistoryIsFull() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DaysToSyncIfHistoryIsFull
+}
+
+func (o *SourceAzureBlobStorageFileBasedStreamConfig) GetFormat() SourceAzureBlobStorageFormat {
+ if o == nil {
+ return SourceAzureBlobStorageFormat{}
+ }
+ return o.Format
+}
+
+func (o *SourceAzureBlobStorageFileBasedStreamConfig) GetGlobs() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Globs
+}
+
+func (o *SourceAzureBlobStorageFileBasedStreamConfig) GetInputSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.InputSchema
+}
+
+func (o *SourceAzureBlobStorageFileBasedStreamConfig) GetLegacyPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LegacyPrefix
+}
+
+func (o *SourceAzureBlobStorageFileBasedStreamConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAzureBlobStorageFileBasedStreamConfig) GetPrimaryKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrimaryKey
+}
+
+func (o *SourceAzureBlobStorageFileBasedStreamConfig) GetSchemaless() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Schemaless
+}
+
+func (o *SourceAzureBlobStorageFileBasedStreamConfig) GetValidationPolicy() *SourceAzureBlobStorageValidationPolicy {
+ if o == nil {
+ return nil
}
+ return o.ValidationPolicy
}
+// SourceAzureBlobStorage - NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
+// because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
type SourceAzureBlobStorage struct {
// The Azure blob storage account key.
AzureBlobStorageAccountKey string `json:"azure_blob_storage_account_key"`
// The account's name of the Azure Blob Storage.
AzureBlobStorageAccountName string `json:"azure_blob_storage_account_name"`
- // The Azure blob storage prefix to be applied
- AzureBlobStorageBlobsPrefix *string `json:"azure_blob_storage_blobs_prefix,omitempty"`
// The name of the Azure blob storage container.
AzureBlobStorageContainerName string `json:"azure_blob_storage_container_name"`
// This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
- AzureBlobStorageEndpoint *string `json:"azure_blob_storage_endpoint,omitempty"`
- // The Azure blob storage blobs to scan for inferring the schema, useful on large amounts of data with consistent structure
- AzureBlobStorageSchemaInferenceLimit *int64 `json:"azure_blob_storage_schema_inference_limit,omitempty"`
- // Input data format
- Format SourceAzureBlobStorageInputFormat `json:"format"`
- SourceType SourceAzureBlobStorageAzureBlobStorage `json:"sourceType"`
+ AzureBlobStorageEndpoint *string `json:"azure_blob_storage_endpoint,omitempty"`
+ sourceType SourceAzureBlobStorageAzureBlobStorage `const:"azure-blob-storage" json:"sourceType"`
+ // UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+ // Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
+ Streams []SourceAzureBlobStorageFileBasedStreamConfig `json:"streams"`
+}
+
+func (s SourceAzureBlobStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorage) GetAzureBlobStorageAccountKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountKey
+}
+
+func (o *SourceAzureBlobStorage) GetAzureBlobStorageAccountName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountName
+}
+
+func (o *SourceAzureBlobStorage) GetAzureBlobStorageContainerName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageContainerName
+}
+
+func (o *SourceAzureBlobStorage) GetAzureBlobStorageEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageEndpoint
+}
+
+func (o *SourceAzureBlobStorage) GetSourceType() SourceAzureBlobStorageAzureBlobStorage {
+ return SourceAzureBlobStorageAzureBlobStorageAzureBlobStorage
+}
+
+func (o *SourceAzureBlobStorage) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceAzureBlobStorage) GetStreams() []SourceAzureBlobStorageFileBasedStreamConfig {
+ if o == nil {
+ return []SourceAzureBlobStorageFileBasedStreamConfig{}
+ }
+ return o.Streams
}
diff --git a/internal/sdk/pkg/models/shared/sourceazureblobstoragecreaterequest.go b/internal/sdk/pkg/models/shared/sourceazureblobstoragecreaterequest.go
old mode 100755
new mode 100644
index 567c09282..f8386e519
--- a/internal/sdk/pkg/models/shared/sourceazureblobstoragecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceazureblobstoragecreaterequest.go
@@ -3,9 +3,49 @@
package shared
type SourceAzureBlobStorageCreateRequest struct {
+ // NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
+ // because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
Configuration SourceAzureBlobStorage `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAzureBlobStorageCreateRequest) GetConfiguration() SourceAzureBlobStorage {
+ if o == nil {
+ return SourceAzureBlobStorage{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAzureBlobStorageCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAzureBlobStorageCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAzureBlobStorageCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAzureBlobStorageCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceazureblobstorageputrequest.go b/internal/sdk/pkg/models/shared/sourceazureblobstorageputrequest.go
old mode 100755
new mode 100644
index daf47b6e5..ded59c20c
--- a/internal/sdk/pkg/models/shared/sourceazureblobstorageputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceazureblobstorageputrequest.go
@@ -3,7 +3,30 @@
package shared
type SourceAzureBlobStoragePutRequest struct {
+ // NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
+ // because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
Configuration SourceAzureBlobStorageUpdate `json:"configuration"`
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAzureBlobStoragePutRequest) GetConfiguration() SourceAzureBlobStorageUpdate {
+ if o == nil {
+ return SourceAzureBlobStorageUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAzureBlobStoragePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAzureBlobStoragePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceazureblobstorageupdate.go b/internal/sdk/pkg/models/shared/sourceazureblobstorageupdate.go
old mode 100755
new mode 100644
index 8165b4e49..251ad3f0c
--- a/internal/sdk/pkg/models/shared/sourceazureblobstorageupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceazureblobstorageupdate.go
@@ -3,98 +3,952 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
)
-type SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatType string
+type SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletype string
const (
- SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatTypeJsonl SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatType = "JSONL"
+ SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletypeUnstructured SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletype = "unstructured"
)
-func (e SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatType) ToPointer() *SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatType {
+func (e SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletype) ToPointer() *SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletype {
return &e
}
-func (e *SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatType) UnmarshalJSON(data []byte) error {
+func (e *SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "JSONL":
- *e = SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatType(v)
+ case "unstructured":
+ *e = SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatType: %v", v)
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletype: %v", v)
}
}
-// SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON - Input data format
-type SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON struct {
- FormatType SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSONFormatType `json:"format_type"`
+// DocumentFileTypeFormatExperimental - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
+type DocumentFileTypeFormatExperimental struct {
+ filetype *SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletype `const:"unstructured" json:"filetype"`
+ // If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.
+ SkipUnprocessableFileTypes *bool `default:"true" json:"skip_unprocessable_file_types"`
}
-type SourceAzureBlobStorageUpdateInputFormatType string
+func (d DocumentFileTypeFormatExperimental) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DocumentFileTypeFormatExperimental) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DocumentFileTypeFormatExperimental) GetFiletype() *SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletype {
+ return SourceAzureBlobStorageUpdateSchemasStreamsFormatFiletypeUnstructured.ToPointer()
+}
+
+func (o *DocumentFileTypeFormatExperimental) GetSkipUnprocessableFileTypes() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.SkipUnprocessableFileTypes
+}
+
+type SourceAzureBlobStorageUpdateSchemasStreamsFiletype string
+
+const (
+ SourceAzureBlobStorageUpdateSchemasStreamsFiletypeParquet SourceAzureBlobStorageUpdateSchemasStreamsFiletype = "parquet"
+)
+
+func (e SourceAzureBlobStorageUpdateSchemasStreamsFiletype) ToPointer() *SourceAzureBlobStorageUpdateSchemasStreamsFiletype {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageUpdateSchemasStreamsFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "parquet":
+ *e = SourceAzureBlobStorageUpdateSchemasStreamsFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageUpdateSchemasStreamsFiletype: %v", v)
+ }
+}
+
+// ParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type ParquetFormat struct {
+ // Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
+ DecimalAsFloat *bool `default:"false" json:"decimal_as_float"`
+ filetype *SourceAzureBlobStorageUpdateSchemasStreamsFiletype `const:"parquet" json:"filetype"`
+}
+
+func (p ParquetFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *ParquetFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ParquetFormat) GetDecimalAsFloat() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DecimalAsFloat
+}
+
+func (o *ParquetFormat) GetFiletype() *SourceAzureBlobStorageUpdateSchemasStreamsFiletype {
+ return SourceAzureBlobStorageUpdateSchemasStreamsFiletypeParquet.ToPointer()
+}
+
+type SourceAzureBlobStorageUpdateSchemasFiletype string
+
+const (
+ SourceAzureBlobStorageUpdateSchemasFiletypeJsonl SourceAzureBlobStorageUpdateSchemasFiletype = "jsonl"
+)
+
+func (e SourceAzureBlobStorageUpdateSchemasFiletype) ToPointer() *SourceAzureBlobStorageUpdateSchemasFiletype {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageUpdateSchemasFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "jsonl":
+ *e = SourceAzureBlobStorageUpdateSchemasFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageUpdateSchemasFiletype: %v", v)
+ }
+}
+
+// JsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type JsonlFormat struct {
+ filetype *SourceAzureBlobStorageUpdateSchemasFiletype `const:"jsonl" json:"filetype"`
+}
+
+func (j JsonlFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(j, "", false)
+}
+
+func (j *JsonlFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &j, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *JsonlFormat) GetFiletype() *SourceAzureBlobStorageUpdateSchemasFiletype {
+ return SourceAzureBlobStorageUpdateSchemasFiletypeJsonl.ToPointer()
+}
+
+type SourceAzureBlobStorageUpdateFiletype string
+
+const (
+ SourceAzureBlobStorageUpdateFiletypeCsv SourceAzureBlobStorageUpdateFiletype = "csv"
+)
+
+func (e SourceAzureBlobStorageUpdateFiletype) ToPointer() *SourceAzureBlobStorageUpdateFiletype {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageUpdateFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "csv":
+ *e = SourceAzureBlobStorageUpdateFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageUpdateFiletype: %v", v)
+ }
+}
+
+type SourceAzureBlobStorageUpdateSchemasHeaderDefinitionType string
+
+const (
+ SourceAzureBlobStorageUpdateSchemasHeaderDefinitionTypeUserProvided SourceAzureBlobStorageUpdateSchemasHeaderDefinitionType = "User Provided"
+)
+
+func (e SourceAzureBlobStorageUpdateSchemasHeaderDefinitionType) ToPointer() *SourceAzureBlobStorageUpdateSchemasHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageUpdateSchemasHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "User Provided":
+ *e = SourceAzureBlobStorageUpdateSchemasHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageUpdateSchemasHeaderDefinitionType: %v", v)
+ }
+}
+
+// UserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type UserProvided struct {
+ // The column names that will be used while emitting the CSV records
+ ColumnNames []string `json:"column_names"`
+ headerDefinitionType *SourceAzureBlobStorageUpdateSchemasHeaderDefinitionType `const:"User Provided" json:"header_definition_type"`
+}
+
+func (u UserProvided) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(u, "", false)
+}
+
+func (u *UserProvided) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &u, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *UserProvided) GetColumnNames() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ColumnNames
+}
+
+func (o *UserProvided) GetHeaderDefinitionType() *SourceAzureBlobStorageUpdateSchemasHeaderDefinitionType {
+ return SourceAzureBlobStorageUpdateSchemasHeaderDefinitionTypeUserProvided.ToPointer()
+}
+
+type SourceAzureBlobStorageUpdateHeaderDefinitionType string
+
+const (
+ SourceAzureBlobStorageUpdateHeaderDefinitionTypeAutogenerated SourceAzureBlobStorageUpdateHeaderDefinitionType = "Autogenerated"
+)
+
+func (e SourceAzureBlobStorageUpdateHeaderDefinitionType) ToPointer() *SourceAzureBlobStorageUpdateHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageUpdateHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Autogenerated":
+ *e = SourceAzureBlobStorageUpdateHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageUpdateHeaderDefinitionType: %v", v)
+ }
+}
+
+// Autogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type Autogenerated struct {
+ headerDefinitionType *SourceAzureBlobStorageUpdateHeaderDefinitionType `const:"Autogenerated" json:"header_definition_type"`
+}
+
+func (a Autogenerated) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *Autogenerated) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Autogenerated) GetHeaderDefinitionType() *SourceAzureBlobStorageUpdateHeaderDefinitionType {
+ return SourceAzureBlobStorageUpdateHeaderDefinitionTypeAutogenerated.ToPointer()
+}
+
+type HeaderDefinitionType string
+
+const (
+ HeaderDefinitionTypeFromCsv HeaderDefinitionType = "From CSV"
+)
+
+func (e HeaderDefinitionType) ToPointer() *HeaderDefinitionType {
+ return &e
+}
+
+func (e *HeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "From CSV":
+ *e = HeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for HeaderDefinitionType: %v", v)
+ }
+}
+
+// FromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type FromCSV struct {
+ headerDefinitionType *HeaderDefinitionType `const:"From CSV" json:"header_definition_type"`
+}
+
+func (f FromCSV) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(f, "", false)
+}
+
+func (f *FromCSV) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &f, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *FromCSV) GetHeaderDefinitionType() *HeaderDefinitionType {
+ return HeaderDefinitionTypeFromCsv.ToPointer()
+}
+
+type CSVHeaderDefinitionType string
+
+const (
+ CSVHeaderDefinitionTypeFromCSV CSVHeaderDefinitionType = "From CSV"
+ CSVHeaderDefinitionTypeAutogenerated CSVHeaderDefinitionType = "Autogenerated"
+ CSVHeaderDefinitionTypeUserProvided CSVHeaderDefinitionType = "User Provided"
+)
+
+type CSVHeaderDefinition struct {
+ FromCSV *FromCSV
+ Autogenerated *Autogenerated
+ UserProvided *UserProvided
+
+ Type CSVHeaderDefinitionType
+}
+
+func CreateCSVHeaderDefinitionFromCSV(fromCSV FromCSV) CSVHeaderDefinition {
+ typ := CSVHeaderDefinitionTypeFromCSV
+
+ return CSVHeaderDefinition{
+ FromCSV: &fromCSV,
+ Type: typ,
+ }
+}
+
+func CreateCSVHeaderDefinitionAutogenerated(autogenerated Autogenerated) CSVHeaderDefinition {
+ typ := CSVHeaderDefinitionTypeAutogenerated
+
+ return CSVHeaderDefinition{
+ Autogenerated: &autogenerated,
+ Type: typ,
+ }
+}
+
+func CreateCSVHeaderDefinitionUserProvided(userProvided UserProvided) CSVHeaderDefinition {
+ typ := CSVHeaderDefinitionTypeUserProvided
+
+ return CSVHeaderDefinition{
+ UserProvided: &userProvided,
+ Type: typ,
+ }
+}
+
+func (u *CSVHeaderDefinition) UnmarshalJSON(data []byte) error {
+
+ fromCSV := new(FromCSV)
+ if err := utils.UnmarshalJSON(data, &fromCSV, "", true, true); err == nil {
+ u.FromCSV = fromCSV
+ u.Type = CSVHeaderDefinitionTypeFromCSV
+ return nil
+ }
+
+ autogenerated := new(Autogenerated)
+ if err := utils.UnmarshalJSON(data, &autogenerated, "", true, true); err == nil {
+ u.Autogenerated = autogenerated
+ u.Type = CSVHeaderDefinitionTypeAutogenerated
+ return nil
+ }
+
+ userProvided := new(UserProvided)
+ if err := utils.UnmarshalJSON(data, &userProvided, "", true, true); err == nil {
+ u.UserProvided = userProvided
+ u.Type = CSVHeaderDefinitionTypeUserProvided
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u CSVHeaderDefinition) MarshalJSON() ([]byte, error) {
+ if u.FromCSV != nil {
+ return utils.MarshalJSON(u.FromCSV, "", true)
+ }
+
+ if u.Autogenerated != nil {
+ return utils.MarshalJSON(u.Autogenerated, "", true)
+ }
+
+ if u.UserProvided != nil {
+ return utils.MarshalJSON(u.UserProvided, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// InferenceType - How to infer the types of the columns. If none, inference default to strings.
+type InferenceType string
+
+const (
+ InferenceTypeNone InferenceType = "None"
+ InferenceTypePrimitiveTypesOnly InferenceType = "Primitive Types Only"
+)
+
+func (e InferenceType) ToPointer() *InferenceType {
+ return &e
+}
+
+func (e *InferenceType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "None":
+ fallthrough
+ case "Primitive Types Only":
+ *e = InferenceType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for InferenceType: %v", v)
+ }
+}
+
+// CSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type CSVFormat struct {
+ // The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+ Delimiter *string `default:"," json:"delimiter"`
+ // Whether two quotes in a quoted CSV value denote a single quote in the data.
+ DoubleQuote *bool `default:"true" json:"double_quote"`
+ // The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+ Encoding *string `default:"utf8" json:"encoding"`
+ // The character used for escaping special characters. To disallow escaping, leave this field blank.
+ EscapeChar *string `json:"escape_char,omitempty"`
+ // A set of case-sensitive strings that should be interpreted as false values.
+ FalseValues []string `json:"false_values,omitempty"`
+ filetype *SourceAzureBlobStorageUpdateFiletype `const:"csv" json:"filetype"`
+ // How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+ HeaderDefinition *CSVHeaderDefinition `json:"header_definition,omitempty"`
+ // How to infer the types of the columns. If none, inference default to strings.
+ InferenceType *InferenceType `default:"None" json:"inference_type"`
+ // A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
+ NullValues []string `json:"null_values,omitempty"`
+ // The character used for quoting CSV values. To disallow quoting, make this field blank.
+ QuoteChar *string `default:""" json:"quote_char"`
+ // The number of rows to skip after the header row.
+ SkipRowsAfterHeader *int64 `default:"0" json:"skip_rows_after_header"`
+ // The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+ SkipRowsBeforeHeader *int64 `default:"0" json:"skip_rows_before_header"`
+ // Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+ StringsCanBeNull *bool `default:"true" json:"strings_can_be_null"`
+ // A set of case-sensitive strings that should be interpreted as true values.
+ TrueValues []string `json:"true_values,omitempty"`
+}
+
+func (c CSVFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *CSVFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *CSVFormat) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
+}
+
+func (o *CSVFormat) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *CSVFormat) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *CSVFormat) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *CSVFormat) GetFalseValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FalseValues
+}
+
+func (o *CSVFormat) GetFiletype() *SourceAzureBlobStorageUpdateFiletype {
+ return SourceAzureBlobStorageUpdateFiletypeCsv.ToPointer()
+}
+
+func (o *CSVFormat) GetHeaderDefinition() *CSVHeaderDefinition {
+ if o == nil {
+ return nil
+ }
+ return o.HeaderDefinition
+}
+
+func (o *CSVFormat) GetInferenceType() *InferenceType {
+ if o == nil {
+ return nil
+ }
+ return o.InferenceType
+}
+
+func (o *CSVFormat) GetNullValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NullValues
+}
+
+func (o *CSVFormat) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
+}
+
+func (o *CSVFormat) GetSkipRowsAfterHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsAfterHeader
+}
+
+func (o *CSVFormat) GetSkipRowsBeforeHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsBeforeHeader
+}
+
+func (o *CSVFormat) GetStringsCanBeNull() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.StringsCanBeNull
+}
+
+func (o *CSVFormat) GetTrueValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TrueValues
+}
+
+type SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletype string
const (
- SourceAzureBlobStorageUpdateInputFormatTypeSourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON SourceAzureBlobStorageUpdateInputFormatType = "source-azure-blob-storage-update_Input Format_JSON Lines: newline-delimited JSON"
+ SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletypeAvro SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletype = "avro"
)
-type SourceAzureBlobStorageUpdateInputFormat struct {
- SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON *SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON
+func (e SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletype) ToPointer() *SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletype {
+ return &e
+}
+
+func (e *SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "avro":
+ *e = SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletype: %v", v)
+ }
+}
- Type SourceAzureBlobStorageUpdateInputFormatType
+// AvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type AvroFormat struct {
+ // Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
+ DoubleAsString *bool `default:"false" json:"double_as_string"`
+ filetype *SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletype `const:"avro" json:"filetype"`
}
-func CreateSourceAzureBlobStorageUpdateInputFormatSourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON(sourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON) SourceAzureBlobStorageUpdateInputFormat {
- typ := SourceAzureBlobStorageUpdateInputFormatTypeSourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON
+func (a AvroFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
- return SourceAzureBlobStorageUpdateInputFormat{
- SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON: &sourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON,
- Type: typ,
+func (a *AvroFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
}
+ return nil
}
-func (u *SourceAzureBlobStorageUpdateInputFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (o *AvroFormat) GetDoubleAsString() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleAsString
+}
+
+func (o *AvroFormat) GetFiletype() *SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletype {
+ return SourceAzureBlobStorageUpdateSchemasStreamsFormatFormatFiletypeAvro.ToPointer()
+}
+
+type FormatUnionType string
+
+const (
+ FormatUnionTypeAvroFormat FormatUnionType = "Avro Format"
+ FormatUnionTypeCSVFormat FormatUnionType = "CSV Format"
+ FormatUnionTypeJsonlFormat FormatUnionType = "Jsonl Format"
+ FormatUnionTypeParquetFormat FormatUnionType = "Parquet Format"
+ FormatUnionTypeDocumentFileTypeFormatExperimental FormatUnionType = "Document File Type Format (Experimental)"
+)
+
+type Format struct {
+ AvroFormat *AvroFormat
+ CSVFormat *CSVFormat
+ JsonlFormat *JsonlFormat
+ ParquetFormat *ParquetFormat
+ DocumentFileTypeFormatExperimental *DocumentFileTypeFormatExperimental
+
+ Type FormatUnionType
+}
+
+func CreateFormatAvroFormat(avroFormat AvroFormat) Format {
+ typ := FormatUnionTypeAvroFormat
+
+ return Format{
+ AvroFormat: &avroFormat,
+ Type: typ,
+ }
+}
+
+func CreateFormatCSVFormat(csvFormat CSVFormat) Format {
+ typ := FormatUnionTypeCSVFormat
+
+ return Format{
+ CSVFormat: &csvFormat,
+ Type: typ,
+ }
+}
+
+func CreateFormatJsonlFormat(jsonlFormat JsonlFormat) Format {
+ typ := FormatUnionTypeJsonlFormat
+
+ return Format{
+ JsonlFormat: &jsonlFormat,
+ Type: typ,
+ }
+}
+
+func CreateFormatParquetFormat(parquetFormat ParquetFormat) Format {
+ typ := FormatUnionTypeParquetFormat
+
+ return Format{
+ ParquetFormat: &parquetFormat,
+ Type: typ,
+ }
+}
- sourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON := new(SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON); err == nil {
- u.SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON = sourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON
- u.Type = SourceAzureBlobStorageUpdateInputFormatTypeSourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON
+func CreateFormatDocumentFileTypeFormatExperimental(documentFileTypeFormatExperimental DocumentFileTypeFormatExperimental) Format {
+ typ := FormatUnionTypeDocumentFileTypeFormatExperimental
+
+ return Format{
+ DocumentFileTypeFormatExperimental: &documentFileTypeFormatExperimental,
+ Type: typ,
+ }
+}
+
+func (u *Format) UnmarshalJSON(data []byte) error {
+
+ jsonlFormat := new(JsonlFormat)
+ if err := utils.UnmarshalJSON(data, &jsonlFormat, "", true, true); err == nil {
+ u.JsonlFormat = jsonlFormat
+ u.Type = FormatUnionTypeJsonlFormat
+ return nil
+ }
+
+ avroFormat := new(AvroFormat)
+ if err := utils.UnmarshalJSON(data, &avroFormat, "", true, true); err == nil {
+ u.AvroFormat = avroFormat
+ u.Type = FormatUnionTypeAvroFormat
+ return nil
+ }
+
+ parquetFormat := new(ParquetFormat)
+ if err := utils.UnmarshalJSON(data, &parquetFormat, "", true, true); err == nil {
+ u.ParquetFormat = parquetFormat
+ u.Type = FormatUnionTypeParquetFormat
+ return nil
+ }
+
+ documentFileTypeFormatExperimental := new(DocumentFileTypeFormatExperimental)
+ if err := utils.UnmarshalJSON(data, &documentFileTypeFormatExperimental, "", true, true); err == nil {
+ u.DocumentFileTypeFormatExperimental = documentFileTypeFormatExperimental
+ u.Type = FormatUnionTypeDocumentFileTypeFormatExperimental
+ return nil
+ }
+
+ csvFormat := new(CSVFormat)
+ if err := utils.UnmarshalJSON(data, &csvFormat, "", true, true); err == nil {
+ u.CSVFormat = csvFormat
+ u.Type = FormatUnionTypeCSVFormat
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceAzureBlobStorageUpdateInputFormat) MarshalJSON() ([]byte, error) {
- if u.SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON != nil {
- return json.Marshal(u.SourceAzureBlobStorageUpdateInputFormatJSONLinesNewlineDelimitedJSON)
+func (u Format) MarshalJSON() ([]byte, error) {
+ if u.AvroFormat != nil {
+ return utils.MarshalJSON(u.AvroFormat, "", true)
+ }
+
+ if u.CSVFormat != nil {
+ return utils.MarshalJSON(u.CSVFormat, "", true)
+ }
+
+ if u.JsonlFormat != nil {
+ return utils.MarshalJSON(u.JsonlFormat, "", true)
+ }
+
+ if u.ParquetFormat != nil {
+ return utils.MarshalJSON(u.ParquetFormat, "", true)
+ }
+
+ if u.DocumentFileTypeFormatExperimental != nil {
+ return utils.MarshalJSON(u.DocumentFileTypeFormatExperimental, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// ValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+type ValidationPolicy string
+
+const (
+ ValidationPolicyEmitRecord ValidationPolicy = "Emit Record"
+ ValidationPolicySkipRecord ValidationPolicy = "Skip Record"
+ ValidationPolicyWaitForDiscover ValidationPolicy = "Wait for Discover"
+)
+
+func (e ValidationPolicy) ToPointer() *ValidationPolicy {
+ return &e
+}
+
+func (e *ValidationPolicy) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Emit Record":
+ fallthrough
+ case "Skip Record":
+ fallthrough
+ case "Wait for Discover":
+ *e = ValidationPolicy(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for ValidationPolicy: %v", v)
+ }
+}
+
+type FileBasedStreamConfig struct {
+ // When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
+ DaysToSyncIfHistoryIsFull *int64 `default:"3" json:"days_to_sync_if_history_is_full"`
+ // The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+ Format Format `json:"format"`
+ // The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
+ Globs []string `json:"globs,omitempty"`
+ // The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
+ InputSchema *string `json:"input_schema,omitempty"`
+ // The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
+ LegacyPrefix *string `json:"legacy_prefix,omitempty"`
+ // The name of the stream.
+ Name string `json:"name"`
+ // The column or columns (for a composite key) that serves as the unique identifier of a record.
+ PrimaryKey *string `json:"primary_key,omitempty"`
+ // When enabled, syncs will not validate or structure records against the stream's schema.
+ Schemaless *bool `default:"false" json:"schemaless"`
+ // The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+ ValidationPolicy *ValidationPolicy `default:"Emit Record" json:"validation_policy"`
+}
+
+func (f FileBasedStreamConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(f, "", false)
+}
+
+func (f *FileBasedStreamConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &f, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *FileBasedStreamConfig) GetDaysToSyncIfHistoryIsFull() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DaysToSyncIfHistoryIsFull
+}
+
+func (o *FileBasedStreamConfig) GetFormat() Format {
+ if o == nil {
+ return Format{}
+ }
+ return o.Format
+}
+
+func (o *FileBasedStreamConfig) GetGlobs() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Globs
+}
+
+func (o *FileBasedStreamConfig) GetInputSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.InputSchema
+}
+
+func (o *FileBasedStreamConfig) GetLegacyPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LegacyPrefix
+}
+
+func (o *FileBasedStreamConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *FileBasedStreamConfig) GetPrimaryKey() *string {
+ if o == nil {
+ return nil
}
+ return o.PrimaryKey
+}
- return nil, nil
+func (o *FileBasedStreamConfig) GetSchemaless() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Schemaless
}
+func (o *FileBasedStreamConfig) GetValidationPolicy() *ValidationPolicy {
+ if o == nil {
+ return nil
+ }
+ return o.ValidationPolicy
+}
+
+// SourceAzureBlobStorageUpdate - NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
+// because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
type SourceAzureBlobStorageUpdate struct {
// The Azure blob storage account key.
AzureBlobStorageAccountKey string `json:"azure_blob_storage_account_key"`
// The account's name of the Azure Blob Storage.
AzureBlobStorageAccountName string `json:"azure_blob_storage_account_name"`
- // The Azure blob storage prefix to be applied
- AzureBlobStorageBlobsPrefix *string `json:"azure_blob_storage_blobs_prefix,omitempty"`
// The name of the Azure blob storage container.
AzureBlobStorageContainerName string `json:"azure_blob_storage_container_name"`
// This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
AzureBlobStorageEndpoint *string `json:"azure_blob_storage_endpoint,omitempty"`
- // The Azure blob storage blobs to scan for inferring the schema, useful on large amounts of data with consistent structure
- AzureBlobStorageSchemaInferenceLimit *int64 `json:"azure_blob_storage_schema_inference_limit,omitempty"`
- // Input data format
- Format SourceAzureBlobStorageUpdateInputFormat `json:"format"`
+ // UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+ // Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
+ Streams []FileBasedStreamConfig `json:"streams"`
+}
+
+func (s SourceAzureBlobStorageUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureBlobStorageUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureBlobStorageUpdate) GetAzureBlobStorageAccountKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountKey
+}
+
+func (o *SourceAzureBlobStorageUpdate) GetAzureBlobStorageAccountName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageAccountName
+}
+
+func (o *SourceAzureBlobStorageUpdate) GetAzureBlobStorageContainerName() string {
+ if o == nil {
+ return ""
+ }
+ return o.AzureBlobStorageContainerName
+}
+
+func (o *SourceAzureBlobStorageUpdate) GetAzureBlobStorageEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AzureBlobStorageEndpoint
+}
+
+func (o *SourceAzureBlobStorageUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceAzureBlobStorageUpdate) GetStreams() []FileBasedStreamConfig {
+ if o == nil {
+ return []FileBasedStreamConfig{}
+ }
+ return o.Streams
}
diff --git a/internal/sdk/pkg/models/shared/sourceazuretable.go b/internal/sdk/pkg/models/shared/sourceazuretable.go
old mode 100755
new mode 100644
index f6d5f1a86..1fd58230d
--- a/internal/sdk/pkg/models/shared/sourceazuretable.go
+++ b/internal/sdk/pkg/models/shared/sourceazuretable.go
@@ -5,38 +5,75 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceAzureTableAzureTable string
+type AzureTable string
const (
- SourceAzureTableAzureTableAzureTable SourceAzureTableAzureTable = "azure-table"
+ AzureTableAzureTable AzureTable = "azure-table"
)
-func (e SourceAzureTableAzureTable) ToPointer() *SourceAzureTableAzureTable {
+func (e AzureTable) ToPointer() *AzureTable {
return &e
}
-func (e *SourceAzureTableAzureTable) UnmarshalJSON(data []byte) error {
+func (e *AzureTable) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "azure-table":
- *e = SourceAzureTableAzureTable(v)
+ *e = AzureTable(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceAzureTableAzureTable: %v", v)
+ return fmt.Errorf("invalid value for AzureTable: %v", v)
}
}
type SourceAzureTable struct {
- SourceType SourceAzureTableAzureTable `json:"sourceType"`
+ sourceType AzureTable `const:"azure-table" json:"sourceType"`
// Azure Table Storage Access Key. See the docs for more information on how to obtain this key.
StorageAccessKey string `json:"storage_access_key"`
// The name of your storage account.
StorageAccountName string `json:"storage_account_name"`
// Azure Table Storage service account URL suffix. See the docs for more information on how to obtain endpoint suffix
- StorageEndpointSuffix *string `json:"storage_endpoint_suffix,omitempty"`
+ StorageEndpointSuffix *string `default:"core.windows.net" json:"storage_endpoint_suffix"`
+}
+
+func (s SourceAzureTable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureTable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureTable) GetSourceType() AzureTable {
+ return AzureTableAzureTable
+}
+
+func (o *SourceAzureTable) GetStorageAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.StorageAccessKey
+}
+
+func (o *SourceAzureTable) GetStorageAccountName() string {
+ if o == nil {
+ return ""
+ }
+ return o.StorageAccountName
+}
+
+func (o *SourceAzureTable) GetStorageEndpointSuffix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StorageEndpointSuffix
}
diff --git a/internal/sdk/pkg/models/shared/sourceazuretablecreaterequest.go b/internal/sdk/pkg/models/shared/sourceazuretablecreaterequest.go
old mode 100755
new mode 100644
index 2904be1ca..d40bc890e
--- a/internal/sdk/pkg/models/shared/sourceazuretablecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceazuretablecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceAzureTableCreateRequest struct {
Configuration SourceAzureTable `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAzureTableCreateRequest) GetConfiguration() SourceAzureTable {
+ if o == nil {
+ return SourceAzureTable{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAzureTableCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceAzureTableCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAzureTableCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceAzureTableCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceazuretableputrequest.go b/internal/sdk/pkg/models/shared/sourceazuretableputrequest.go
old mode 100755
new mode 100644
index bb4d4f61a..ef288356f
--- a/internal/sdk/pkg/models/shared/sourceazuretableputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceazuretableputrequest.go
@@ -7,3 +7,24 @@ type SourceAzureTablePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceAzureTablePutRequest) GetConfiguration() SourceAzureTableUpdate {
+ if o == nil {
+ return SourceAzureTableUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceAzureTablePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceAzureTablePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceazuretableupdate.go b/internal/sdk/pkg/models/shared/sourceazuretableupdate.go
old mode 100755
new mode 100644
index 8d3a0cd52..195ead52b
--- a/internal/sdk/pkg/models/shared/sourceazuretableupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceazuretableupdate.go
@@ -2,11 +2,47 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceAzureTableUpdate struct {
// Azure Table Storage Access Key. See the docs for more information on how to obtain this key.
StorageAccessKey string `json:"storage_access_key"`
// The name of your storage account.
StorageAccountName string `json:"storage_account_name"`
// Azure Table Storage service account URL suffix. See the docs for more information on how to obtain endpoint suffix
- StorageEndpointSuffix *string `json:"storage_endpoint_suffix,omitempty"`
+ StorageEndpointSuffix *string `default:"core.windows.net" json:"storage_endpoint_suffix"`
+}
+
+func (s SourceAzureTableUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceAzureTableUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceAzureTableUpdate) GetStorageAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.StorageAccessKey
+}
+
+func (o *SourceAzureTableUpdate) GetStorageAccountName() string {
+ if o == nil {
+ return ""
+ }
+ return o.StorageAccountName
+}
+
+func (o *SourceAzureTableUpdate) GetStorageEndpointSuffix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StorageEndpointSuffix
}
diff --git a/internal/sdk/pkg/models/shared/sourcebamboohr.go b/internal/sdk/pkg/models/shared/sourcebamboohr.go
old mode 100755
new mode 100644
index 21c4660d2..36e5d2cb5
--- a/internal/sdk/pkg/models/shared/sourcebamboohr.go
+++ b/internal/sdk/pkg/models/shared/sourcebamboohr.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceBambooHrBambooHr string
+type BambooHr string
const (
- SourceBambooHrBambooHrBambooHr SourceBambooHrBambooHr = "bamboo-hr"
+ BambooHrBambooHr BambooHr = "bamboo-hr"
)
-func (e SourceBambooHrBambooHr) ToPointer() *SourceBambooHrBambooHr {
+func (e BambooHr) ToPointer() *BambooHr {
return &e
}
-func (e *SourceBambooHrBambooHr) UnmarshalJSON(data []byte) error {
+func (e *BambooHr) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "bamboo-hr":
- *e = SourceBambooHrBambooHr(v)
+ *e = BambooHr(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceBambooHrBambooHr: %v", v)
+ return fmt.Errorf("invalid value for BambooHr: %v", v)
}
}
@@ -35,10 +36,53 @@ type SourceBambooHr struct {
// Api key of bamboo hr
APIKey string `json:"api_key"`
// Comma-separated list of fields to include in custom reports.
- CustomReportsFields *string `json:"custom_reports_fields,omitempty"`
+ CustomReportsFields *string `default:"" json:"custom_reports_fields"`
// If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.
- CustomReportsIncludeDefaultFields *bool `json:"custom_reports_include_default_fields,omitempty"`
- SourceType SourceBambooHrBambooHr `json:"sourceType"`
+ CustomReportsIncludeDefaultFields *bool `default:"true" json:"custom_reports_include_default_fields"`
+ sourceType BambooHr `const:"bamboo-hr" json:"sourceType"`
// Sub Domain of bamboo hr
Subdomain string `json:"subdomain"`
}
+
+func (s SourceBambooHr) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBambooHr) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBambooHr) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceBambooHr) GetCustomReportsFields() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReportsFields
+}
+
+func (o *SourceBambooHr) GetCustomReportsIncludeDefaultFields() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReportsIncludeDefaultFields
+}
+
+func (o *SourceBambooHr) GetSourceType() BambooHr {
+ return BambooHrBambooHr
+}
+
+func (o *SourceBambooHr) GetSubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Subdomain
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebamboohrcreaterequest.go b/internal/sdk/pkg/models/shared/sourcebamboohrcreaterequest.go
old mode 100755
new mode 100644
index c0d68bc67..ad2f1c5da
--- a/internal/sdk/pkg/models/shared/sourcebamboohrcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebamboohrcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceBambooHrCreateRequest struct {
Configuration SourceBambooHr `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBambooHrCreateRequest) GetConfiguration() SourceBambooHr {
+ if o == nil {
+ return SourceBambooHr{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBambooHrCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceBambooHrCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBambooHrCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceBambooHrCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebamboohrputrequest.go b/internal/sdk/pkg/models/shared/sourcebamboohrputrequest.go
old mode 100755
new mode 100644
index 22526c97a..1fbf842c0
--- a/internal/sdk/pkg/models/shared/sourcebamboohrputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebamboohrputrequest.go
@@ -7,3 +7,24 @@ type SourceBambooHrPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBambooHrPutRequest) GetConfiguration() SourceBambooHrUpdate {
+ if o == nil {
+ return SourceBambooHrUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBambooHrPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBambooHrPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebamboohrupdate.go b/internal/sdk/pkg/models/shared/sourcebamboohrupdate.go
old mode 100755
new mode 100644
index 1afb0c1f3..5d0cb7886
--- a/internal/sdk/pkg/models/shared/sourcebamboohrupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcebamboohrupdate.go
@@ -2,13 +2,56 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceBambooHrUpdate struct {
// Api key of bamboo hr
APIKey string `json:"api_key"`
// Comma-separated list of fields to include in custom reports.
- CustomReportsFields *string `json:"custom_reports_fields,omitempty"`
+ CustomReportsFields *string `default:"" json:"custom_reports_fields"`
// If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names.
- CustomReportsIncludeDefaultFields *bool `json:"custom_reports_include_default_fields,omitempty"`
+ CustomReportsIncludeDefaultFields *bool `default:"true" json:"custom_reports_include_default_fields"`
// Sub Domain of bamboo hr
Subdomain string `json:"subdomain"`
}
+
+func (s SourceBambooHrUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBambooHrUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBambooHrUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceBambooHrUpdate) GetCustomReportsFields() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReportsFields
+}
+
+func (o *SourceBambooHrUpdate) GetCustomReportsIncludeDefaultFields() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReportsIncludeDefaultFields
+}
+
+func (o *SourceBambooHrUpdate) GetSubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Subdomain
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebigcommerce.go b/internal/sdk/pkg/models/shared/sourcebigcommerce.go
deleted file mode 100755
index 5c13554bc..000000000
--- a/internal/sdk/pkg/models/shared/sourcebigcommerce.go
+++ /dev/null
@@ -1,42 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "encoding/json"
- "fmt"
-)
-
-type SourceBigcommerceBigcommerce string
-
-const (
- SourceBigcommerceBigcommerceBigcommerce SourceBigcommerceBigcommerce = "bigcommerce"
-)
-
-func (e SourceBigcommerceBigcommerce) ToPointer() *SourceBigcommerceBigcommerce {
- return &e
-}
-
-func (e *SourceBigcommerceBigcommerce) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "bigcommerce":
- *e = SourceBigcommerceBigcommerce(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceBigcommerceBigcommerce: %v", v)
- }
-}
-
-type SourceBigcommerce struct {
- // Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- SourceType SourceBigcommerceBigcommerce `json:"sourceType"`
- // The date you would like to replicate data. Format: YYYY-MM-DD.
- StartDate string `json:"start_date"`
- // The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/, The store's hash code is 'HASH_CODE'.
- StoreHash string `json:"store_hash"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcebigcommercecreaterequest.go b/internal/sdk/pkg/models/shared/sourcebigcommercecreaterequest.go
deleted file mode 100755
index e1f05ee10..000000000
--- a/internal/sdk/pkg/models/shared/sourcebigcommercecreaterequest.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceBigcommerceCreateRequest struct {
- Configuration SourceBigcommerce `json:"configuration"`
- Name string `json:"name"`
- // Optional secretID obtained through the public API OAuth redirect flow.
- SecretID *string `json:"secretId,omitempty"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcebigcommerceputrequest.go b/internal/sdk/pkg/models/shared/sourcebigcommerceputrequest.go
deleted file mode 100755
index 5976fbfa6..000000000
--- a/internal/sdk/pkg/models/shared/sourcebigcommerceputrequest.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceBigcommercePutRequest struct {
- Configuration SourceBigcommerceUpdate `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcebigcommerceupdate.go b/internal/sdk/pkg/models/shared/sourcebigcommerceupdate.go
deleted file mode 100755
index f9a229a53..000000000
--- a/internal/sdk/pkg/models/shared/sourcebigcommerceupdate.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceBigcommerceUpdate struct {
- // Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- // The date you would like to replicate data. Format: YYYY-MM-DD.
- StartDate string `json:"start_date"`
- // The hash code of the store. For https://api.bigcommerce.com/stores/HASH_CODE/v3/, The store's hash code is 'HASH_CODE'.
- StoreHash string `json:"store_hash"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcebigquery.go b/internal/sdk/pkg/models/shared/sourcebigquery.go
old mode 100755
new mode 100644
index 50eb0dc5a..207558698
--- a/internal/sdk/pkg/models/shared/sourcebigquery.go
+++ b/internal/sdk/pkg/models/shared/sourcebigquery.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceBigqueryBigquery string
@@ -38,5 +39,41 @@ type SourceBigquery struct {
DatasetID *string `json:"dataset_id,omitempty"`
// The GCP project ID for the project containing the target BigQuery dataset.
ProjectID string `json:"project_id"`
- SourceType SourceBigqueryBigquery `json:"sourceType"`
+ sourceType SourceBigqueryBigquery `const:"bigquery" json:"sourceType"`
+}
+
+func (s SourceBigquery) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBigquery) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBigquery) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+func (o *SourceBigquery) GetDatasetID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DatasetID
+}
+
+func (o *SourceBigquery) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
+
+func (o *SourceBigquery) GetSourceType() SourceBigqueryBigquery {
+ return SourceBigqueryBigqueryBigquery
}
diff --git a/internal/sdk/pkg/models/shared/sourcebigquerycreaterequest.go b/internal/sdk/pkg/models/shared/sourcebigquerycreaterequest.go
old mode 100755
new mode 100644
index 72e32f97d..886574ed4
--- a/internal/sdk/pkg/models/shared/sourcebigquerycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebigquerycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceBigqueryCreateRequest struct {
Configuration SourceBigquery `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBigqueryCreateRequest) GetConfiguration() SourceBigquery {
+ if o == nil {
+ return SourceBigquery{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBigqueryCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceBigqueryCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBigqueryCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceBigqueryCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebigqueryputrequest.go b/internal/sdk/pkg/models/shared/sourcebigqueryputrequest.go
old mode 100755
new mode 100644
index e32b3216d..ab02dcf51
--- a/internal/sdk/pkg/models/shared/sourcebigqueryputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebigqueryputrequest.go
@@ -7,3 +7,24 @@ type SourceBigqueryPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBigqueryPutRequest) GetConfiguration() SourceBigqueryUpdate {
+ if o == nil {
+ return SourceBigqueryUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBigqueryPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBigqueryPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebigqueryupdate.go b/internal/sdk/pkg/models/shared/sourcebigqueryupdate.go
old mode 100755
new mode 100644
index bca927573..f600a88ea
--- a/internal/sdk/pkg/models/shared/sourcebigqueryupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcebigqueryupdate.go
@@ -10,3 +10,24 @@ type SourceBigqueryUpdate struct {
// The GCP project ID for the project containing the target BigQuery dataset.
ProjectID string `json:"project_id"`
}
+
+func (o *SourceBigqueryUpdate) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+func (o *SourceBigqueryUpdate) GetDatasetID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DatasetID
+}
+
+func (o *SourceBigqueryUpdate) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebingads.go b/internal/sdk/pkg/models/shared/sourcebingads.go
old mode 100755
new mode 100644
index 21172e114..b878f4dc8
--- a/internal/sdk/pkg/models/shared/sourcebingads.go
+++ b/internal/sdk/pkg/models/shared/sourcebingads.go
@@ -3,9 +3,10 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceBingAdsAuthMethod string
@@ -32,45 +33,296 @@ func (e *SourceBingAdsAuthMethod) UnmarshalJSON(data []byte) error {
}
}
-type SourceBingAdsBingAds string
+// SourceBingAdsReportingDataObject - The name of the the object derives from the ReportRequest object. You can find it in Bing Ads Api docs - Reporting API - Reporting Data Objects.
+type SourceBingAdsReportingDataObject string
const (
- SourceBingAdsBingAdsBingAds SourceBingAdsBingAds = "bing-ads"
+ SourceBingAdsReportingDataObjectAccountPerformanceReportRequest SourceBingAdsReportingDataObject = "AccountPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectAdDynamicTextPerformanceReportRequest SourceBingAdsReportingDataObject = "AdDynamicTextPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectAdExtensionByAdReportRequest SourceBingAdsReportingDataObject = "AdExtensionByAdReportRequest"
+ SourceBingAdsReportingDataObjectAdExtensionByKeywordReportRequest SourceBingAdsReportingDataObject = "AdExtensionByKeywordReportRequest"
+ SourceBingAdsReportingDataObjectAdExtensionDetailReportRequest SourceBingAdsReportingDataObject = "AdExtensionDetailReportRequest"
+ SourceBingAdsReportingDataObjectAdGroupPerformanceReportRequest SourceBingAdsReportingDataObject = "AdGroupPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectAdPerformanceReportRequest SourceBingAdsReportingDataObject = "AdPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectAgeGenderAudienceReportRequest SourceBingAdsReportingDataObject = "AgeGenderAudienceReportRequest"
+ SourceBingAdsReportingDataObjectAudiencePerformanceReportRequest SourceBingAdsReportingDataObject = "AudiencePerformanceReportRequest"
+ SourceBingAdsReportingDataObjectCallDetailReportRequest SourceBingAdsReportingDataObject = "CallDetailReportRequest"
+ SourceBingAdsReportingDataObjectCampaignPerformanceReportRequest SourceBingAdsReportingDataObject = "CampaignPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectConversionPerformanceReportRequest SourceBingAdsReportingDataObject = "ConversionPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectDestinationURLPerformanceReportRequest SourceBingAdsReportingDataObject = "DestinationUrlPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectDsaAutoTargetPerformanceReportRequest SourceBingAdsReportingDataObject = "DSAAutoTargetPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectDsaCategoryPerformanceReportRequest SourceBingAdsReportingDataObject = "DSACategoryPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectDsaSearchQueryPerformanceReportRequest SourceBingAdsReportingDataObject = "DSASearchQueryPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectGeographicPerformanceReportRequest SourceBingAdsReportingDataObject = "GeographicPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectGoalsAndFunnelsReportRequest SourceBingAdsReportingDataObject = "GoalsAndFunnelsReportRequest"
+ SourceBingAdsReportingDataObjectHotelDimensionPerformanceReportRequest SourceBingAdsReportingDataObject = "HotelDimensionPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectHotelGroupPerformanceReportRequest SourceBingAdsReportingDataObject = "HotelGroupPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectKeywordPerformanceReportRequest SourceBingAdsReportingDataObject = "KeywordPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectNegativeKeywordConflictReportRequest SourceBingAdsReportingDataObject = "NegativeKeywordConflictReportRequest"
+ SourceBingAdsReportingDataObjectProductDimensionPerformanceReportRequest SourceBingAdsReportingDataObject = "ProductDimensionPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectProductMatchCountReportRequest SourceBingAdsReportingDataObject = "ProductMatchCountReportRequest"
+ SourceBingAdsReportingDataObjectProductNegativeKeywordConflictReportRequest SourceBingAdsReportingDataObject = "ProductNegativeKeywordConflictReportRequest"
+ SourceBingAdsReportingDataObjectProductPartitionPerformanceReportRequest SourceBingAdsReportingDataObject = "ProductPartitionPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectProductPartitionUnitPerformanceReportRequest SourceBingAdsReportingDataObject = "ProductPartitionUnitPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectProductSearchQueryPerformanceReportRequest SourceBingAdsReportingDataObject = "ProductSearchQueryPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectProfessionalDemographicsAudienceReportRequest SourceBingAdsReportingDataObject = "ProfessionalDemographicsAudienceReportRequest"
+ SourceBingAdsReportingDataObjectPublisherUsagePerformanceReportRequest SourceBingAdsReportingDataObject = "PublisherUsagePerformanceReportRequest"
+ SourceBingAdsReportingDataObjectSearchCampaignChangeHistoryReportRequest SourceBingAdsReportingDataObject = "SearchCampaignChangeHistoryReportRequest"
+ SourceBingAdsReportingDataObjectSearchQueryPerformanceReportRequest SourceBingAdsReportingDataObject = "SearchQueryPerformanceReportRequest"
+ SourceBingAdsReportingDataObjectShareOfVoiceReportRequest SourceBingAdsReportingDataObject = "ShareOfVoiceReportRequest"
+ SourceBingAdsReportingDataObjectUserLocationPerformanceReportRequest SourceBingAdsReportingDataObject = "UserLocationPerformanceReportRequest"
)
-func (e SourceBingAdsBingAds) ToPointer() *SourceBingAdsBingAds {
+func (e SourceBingAdsReportingDataObject) ToPointer() *SourceBingAdsReportingDataObject {
return &e
}
-func (e *SourceBingAdsBingAds) UnmarshalJSON(data []byte) error {
+func (e *SourceBingAdsReportingDataObject) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "AccountPerformanceReportRequest":
+ fallthrough
+ case "AdDynamicTextPerformanceReportRequest":
+ fallthrough
+ case "AdExtensionByAdReportRequest":
+ fallthrough
+ case "AdExtensionByKeywordReportRequest":
+ fallthrough
+ case "AdExtensionDetailReportRequest":
+ fallthrough
+ case "AdGroupPerformanceReportRequest":
+ fallthrough
+ case "AdPerformanceReportRequest":
+ fallthrough
+ case "AgeGenderAudienceReportRequest":
+ fallthrough
+ case "AudiencePerformanceReportRequest":
+ fallthrough
+ case "CallDetailReportRequest":
+ fallthrough
+ case "CampaignPerformanceReportRequest":
+ fallthrough
+ case "ConversionPerformanceReportRequest":
+ fallthrough
+ case "DestinationUrlPerformanceReportRequest":
+ fallthrough
+ case "DSAAutoTargetPerformanceReportRequest":
+ fallthrough
+ case "DSACategoryPerformanceReportRequest":
+ fallthrough
+ case "DSASearchQueryPerformanceReportRequest":
+ fallthrough
+ case "GeographicPerformanceReportRequest":
+ fallthrough
+ case "GoalsAndFunnelsReportRequest":
+ fallthrough
+ case "HotelDimensionPerformanceReportRequest":
+ fallthrough
+ case "HotelGroupPerformanceReportRequest":
+ fallthrough
+ case "KeywordPerformanceReportRequest":
+ fallthrough
+ case "NegativeKeywordConflictReportRequest":
+ fallthrough
+ case "ProductDimensionPerformanceReportRequest":
+ fallthrough
+ case "ProductMatchCountReportRequest":
+ fallthrough
+ case "ProductNegativeKeywordConflictReportRequest":
+ fallthrough
+ case "ProductPartitionPerformanceReportRequest":
+ fallthrough
+ case "ProductPartitionUnitPerformanceReportRequest":
+ fallthrough
+ case "ProductSearchQueryPerformanceReportRequest":
+ fallthrough
+ case "ProfessionalDemographicsAudienceReportRequest":
+ fallthrough
+ case "PublisherUsagePerformanceReportRequest":
+ fallthrough
+ case "SearchCampaignChangeHistoryReportRequest":
+ fallthrough
+ case "SearchQueryPerformanceReportRequest":
+ fallthrough
+ case "ShareOfVoiceReportRequest":
+ fallthrough
+ case "UserLocationPerformanceReportRequest":
+ *e = SourceBingAdsReportingDataObject(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceBingAdsReportingDataObject: %v", v)
+ }
+}
+
+type SourceBingAdsCustomReportConfig struct {
+ // The name of the custom report, this name would be used as stream name
+ Name string `json:"name"`
+ // A list of available aggregations.
+ ReportAggregation *string `default:"[Hourly]" json:"report_aggregation"`
+ // A list of available report object columns. You can find it in description of reporting object that you want to add to custom report.
+ ReportColumns []string `json:"report_columns"`
+ // The name of the the object derives from the ReportRequest object. You can find it in Bing Ads Api docs - Reporting API - Reporting Data Objects.
+ ReportingObject SourceBingAdsReportingDataObject `json:"reporting_object"`
+}
+
+func (s SourceBingAdsCustomReportConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBingAdsCustomReportConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBingAdsCustomReportConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBingAdsCustomReportConfig) GetReportAggregation() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReportAggregation
+}
+
+func (o *SourceBingAdsCustomReportConfig) GetReportColumns() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ReportColumns
+}
+
+func (o *SourceBingAdsCustomReportConfig) GetReportingObject() SourceBingAdsReportingDataObject {
+ if o == nil {
+ return SourceBingAdsReportingDataObject("")
+ }
+ return o.ReportingObject
+}
+
+type BingAds string
+
+const (
+ BingAdsBingAds BingAds = "bing-ads"
+)
+
+func (e BingAds) ToPointer() *BingAds {
+ return &e
+}
+
+func (e *BingAds) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "bing-ads":
- *e = SourceBingAdsBingAds(v)
+ *e = BingAds(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceBingAdsBingAds: %v", v)
+ return fmt.Errorf("invalid value for BingAds: %v", v)
}
}
type SourceBingAds struct {
- AuthMethod *SourceBingAdsAuthMethod `json:"auth_method,omitempty"`
+ authMethod *SourceBingAdsAuthMethod `const:"oauth2.0" json:"auth_method,omitempty"`
// The Client ID of your Microsoft Advertising developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Microsoft Advertising developer application.
- ClientSecret *string `json:"client_secret,omitempty"`
+ ClientSecret *string `default:"" json:"client_secret"`
+ // You can add your Custom Bing Ads report by creating one.
+ CustomReports []SourceBingAdsCustomReportConfig `json:"custom_reports,omitempty"`
// Developer token associated with user. See more info in the docs.
DeveloperToken string `json:"developer_token"`
- // Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode.
- LookbackWindow *int64 `json:"lookback_window,omitempty"`
+ // Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode without specified Reports Start Date.
+ LookbackWindow *int64 `default:"0" json:"lookback_window"`
// Refresh Token to renew the expired Access Token.
RefreshToken string `json:"refresh_token"`
- // The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format.
- ReportsStartDate types.Date `json:"reports_start_date"`
- SourceType SourceBingAdsBingAds `json:"sourceType"`
+ // The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format. If not set, data from previous and current calendar year will be replicated.
+ ReportsStartDate *types.Date `json:"reports_start_date,omitempty"`
+ sourceType BingAds `const:"bing-ads" json:"sourceType"`
// The Tenant ID of your Microsoft Advertising developer application. Set this to "common" unless you know you need a different value.
- TenantID *string `json:"tenant_id,omitempty"`
+ TenantID *string `default:"common" json:"tenant_id"`
+}
+
+func (s SourceBingAds) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBingAds) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBingAds) GetAuthMethod() *SourceBingAdsAuthMethod {
+ return SourceBingAdsAuthMethodOauth20.ToPointer()
+}
+
+func (o *SourceBingAds) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceBingAds) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceBingAds) GetCustomReports() []SourceBingAdsCustomReportConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReports
+}
+
+func (o *SourceBingAds) GetDeveloperToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.DeveloperToken
+}
+
+func (o *SourceBingAds) GetLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindow
+}
+
+func (o *SourceBingAds) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceBingAds) GetReportsStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.ReportsStartDate
+}
+
+func (o *SourceBingAds) GetSourceType() BingAds {
+ return BingAdsBingAds
+}
+
+func (o *SourceBingAds) GetTenantID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TenantID
}
diff --git a/internal/sdk/pkg/models/shared/sourcebingadscreaterequest.go b/internal/sdk/pkg/models/shared/sourcebingadscreaterequest.go
old mode 100755
new mode 100644
index 5c9e63592..7c09396fc
--- a/internal/sdk/pkg/models/shared/sourcebingadscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebingadscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceBingAdsCreateRequest struct {
Configuration SourceBingAds `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBingAdsCreateRequest) GetConfiguration() SourceBingAds {
+ if o == nil {
+ return SourceBingAds{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBingAdsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceBingAdsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBingAdsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceBingAdsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebingadsputrequest.go b/internal/sdk/pkg/models/shared/sourcebingadsputrequest.go
old mode 100755
new mode 100644
index 4c0b88770..42f8c9ac8
--- a/internal/sdk/pkg/models/shared/sourcebingadsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebingadsputrequest.go
@@ -7,3 +7,24 @@ type SourceBingAdsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBingAdsPutRequest) GetConfiguration() SourceBingAdsUpdate {
+ if o == nil {
+ return SourceBingAdsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBingAdsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBingAdsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebingadsupdate.go b/internal/sdk/pkg/models/shared/sourcebingadsupdate.go
old mode 100755
new mode 100644
index 4675a0b6a..6e9a64a69
--- a/internal/sdk/pkg/models/shared/sourcebingadsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcebingadsupdate.go
@@ -3,49 +3,297 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceBingAdsUpdateAuthMethod string
+type AuthMethod string
const (
- SourceBingAdsUpdateAuthMethodOauth20 SourceBingAdsUpdateAuthMethod = "oauth2.0"
+ AuthMethodOauth20 AuthMethod = "oauth2.0"
)
-func (e SourceBingAdsUpdateAuthMethod) ToPointer() *SourceBingAdsUpdateAuthMethod {
+func (e AuthMethod) ToPointer() *AuthMethod {
return &e
}
-func (e *SourceBingAdsUpdateAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *AuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceBingAdsUpdateAuthMethod(v)
+ *e = AuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceBingAdsUpdateAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for AuthMethod: %v", v)
}
}
+// ReportingDataObject - The name of the the object derives from the ReportRequest object. You can find it in Bing Ads Api docs - Reporting API - Reporting Data Objects.
+type ReportingDataObject string
+
+const (
+ ReportingDataObjectAccountPerformanceReportRequest ReportingDataObject = "AccountPerformanceReportRequest"
+ ReportingDataObjectAdDynamicTextPerformanceReportRequest ReportingDataObject = "AdDynamicTextPerformanceReportRequest"
+ ReportingDataObjectAdExtensionByAdReportRequest ReportingDataObject = "AdExtensionByAdReportRequest"
+ ReportingDataObjectAdExtensionByKeywordReportRequest ReportingDataObject = "AdExtensionByKeywordReportRequest"
+ ReportingDataObjectAdExtensionDetailReportRequest ReportingDataObject = "AdExtensionDetailReportRequest"
+ ReportingDataObjectAdGroupPerformanceReportRequest ReportingDataObject = "AdGroupPerformanceReportRequest"
+ ReportingDataObjectAdPerformanceReportRequest ReportingDataObject = "AdPerformanceReportRequest"
+ ReportingDataObjectAgeGenderAudienceReportRequest ReportingDataObject = "AgeGenderAudienceReportRequest"
+ ReportingDataObjectAudiencePerformanceReportRequest ReportingDataObject = "AudiencePerformanceReportRequest"
+ ReportingDataObjectCallDetailReportRequest ReportingDataObject = "CallDetailReportRequest"
+ ReportingDataObjectCampaignPerformanceReportRequest ReportingDataObject = "CampaignPerformanceReportRequest"
+ ReportingDataObjectConversionPerformanceReportRequest ReportingDataObject = "ConversionPerformanceReportRequest"
+ ReportingDataObjectDestinationURLPerformanceReportRequest ReportingDataObject = "DestinationUrlPerformanceReportRequest"
+ ReportingDataObjectDsaAutoTargetPerformanceReportRequest ReportingDataObject = "DSAAutoTargetPerformanceReportRequest"
+ ReportingDataObjectDsaCategoryPerformanceReportRequest ReportingDataObject = "DSACategoryPerformanceReportRequest"
+ ReportingDataObjectDsaSearchQueryPerformanceReportRequest ReportingDataObject = "DSASearchQueryPerformanceReportRequest"
+ ReportingDataObjectGeographicPerformanceReportRequest ReportingDataObject = "GeographicPerformanceReportRequest"
+ ReportingDataObjectGoalsAndFunnelsReportRequest ReportingDataObject = "GoalsAndFunnelsReportRequest"
+ ReportingDataObjectHotelDimensionPerformanceReportRequest ReportingDataObject = "HotelDimensionPerformanceReportRequest"
+ ReportingDataObjectHotelGroupPerformanceReportRequest ReportingDataObject = "HotelGroupPerformanceReportRequest"
+ ReportingDataObjectKeywordPerformanceReportRequest ReportingDataObject = "KeywordPerformanceReportRequest"
+ ReportingDataObjectNegativeKeywordConflictReportRequest ReportingDataObject = "NegativeKeywordConflictReportRequest"
+ ReportingDataObjectProductDimensionPerformanceReportRequest ReportingDataObject = "ProductDimensionPerformanceReportRequest"
+ ReportingDataObjectProductMatchCountReportRequest ReportingDataObject = "ProductMatchCountReportRequest"
+ ReportingDataObjectProductNegativeKeywordConflictReportRequest ReportingDataObject = "ProductNegativeKeywordConflictReportRequest"
+ ReportingDataObjectProductPartitionPerformanceReportRequest ReportingDataObject = "ProductPartitionPerformanceReportRequest"
+ ReportingDataObjectProductPartitionUnitPerformanceReportRequest ReportingDataObject = "ProductPartitionUnitPerformanceReportRequest"
+ ReportingDataObjectProductSearchQueryPerformanceReportRequest ReportingDataObject = "ProductSearchQueryPerformanceReportRequest"
+ ReportingDataObjectProfessionalDemographicsAudienceReportRequest ReportingDataObject = "ProfessionalDemographicsAudienceReportRequest"
+ ReportingDataObjectPublisherUsagePerformanceReportRequest ReportingDataObject = "PublisherUsagePerformanceReportRequest"
+ ReportingDataObjectSearchCampaignChangeHistoryReportRequest ReportingDataObject = "SearchCampaignChangeHistoryReportRequest"
+ ReportingDataObjectSearchQueryPerformanceReportRequest ReportingDataObject = "SearchQueryPerformanceReportRequest"
+ ReportingDataObjectShareOfVoiceReportRequest ReportingDataObject = "ShareOfVoiceReportRequest"
+ ReportingDataObjectUserLocationPerformanceReportRequest ReportingDataObject = "UserLocationPerformanceReportRequest"
+)
+
+func (e ReportingDataObject) ToPointer() *ReportingDataObject {
+ return &e
+}
+
+func (e *ReportingDataObject) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "AccountPerformanceReportRequest":
+ fallthrough
+ case "AdDynamicTextPerformanceReportRequest":
+ fallthrough
+ case "AdExtensionByAdReportRequest":
+ fallthrough
+ case "AdExtensionByKeywordReportRequest":
+ fallthrough
+ case "AdExtensionDetailReportRequest":
+ fallthrough
+ case "AdGroupPerformanceReportRequest":
+ fallthrough
+ case "AdPerformanceReportRequest":
+ fallthrough
+ case "AgeGenderAudienceReportRequest":
+ fallthrough
+ case "AudiencePerformanceReportRequest":
+ fallthrough
+ case "CallDetailReportRequest":
+ fallthrough
+ case "CampaignPerformanceReportRequest":
+ fallthrough
+ case "ConversionPerformanceReportRequest":
+ fallthrough
+ case "DestinationUrlPerformanceReportRequest":
+ fallthrough
+ case "DSAAutoTargetPerformanceReportRequest":
+ fallthrough
+ case "DSACategoryPerformanceReportRequest":
+ fallthrough
+ case "DSASearchQueryPerformanceReportRequest":
+ fallthrough
+ case "GeographicPerformanceReportRequest":
+ fallthrough
+ case "GoalsAndFunnelsReportRequest":
+ fallthrough
+ case "HotelDimensionPerformanceReportRequest":
+ fallthrough
+ case "HotelGroupPerformanceReportRequest":
+ fallthrough
+ case "KeywordPerformanceReportRequest":
+ fallthrough
+ case "NegativeKeywordConflictReportRequest":
+ fallthrough
+ case "ProductDimensionPerformanceReportRequest":
+ fallthrough
+ case "ProductMatchCountReportRequest":
+ fallthrough
+ case "ProductNegativeKeywordConflictReportRequest":
+ fallthrough
+ case "ProductPartitionPerformanceReportRequest":
+ fallthrough
+ case "ProductPartitionUnitPerformanceReportRequest":
+ fallthrough
+ case "ProductSearchQueryPerformanceReportRequest":
+ fallthrough
+ case "ProfessionalDemographicsAudienceReportRequest":
+ fallthrough
+ case "PublisherUsagePerformanceReportRequest":
+ fallthrough
+ case "SearchCampaignChangeHistoryReportRequest":
+ fallthrough
+ case "SearchQueryPerformanceReportRequest":
+ fallthrough
+ case "ShareOfVoiceReportRequest":
+ fallthrough
+ case "UserLocationPerformanceReportRequest":
+ *e = ReportingDataObject(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for ReportingDataObject: %v", v)
+ }
+}
+
+type CustomReportConfig struct {
+ // The name of the custom report, this name would be used as stream name
+ Name string `json:"name"`
+ // A list of available aggregations.
+ ReportAggregation *string `default:"[Hourly]" json:"report_aggregation"`
+ // A list of available report object columns. You can find it in description of reporting object that you want to add to custom report.
+ ReportColumns []string `json:"report_columns"`
+ // The name of the the object derives from the ReportRequest object. You can find it in Bing Ads Api docs - Reporting API - Reporting Data Objects.
+ ReportingObject ReportingDataObject `json:"reporting_object"`
+}
+
+func (c CustomReportConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *CustomReportConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *CustomReportConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *CustomReportConfig) GetReportAggregation() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReportAggregation
+}
+
+func (o *CustomReportConfig) GetReportColumns() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ReportColumns
+}
+
+func (o *CustomReportConfig) GetReportingObject() ReportingDataObject {
+ if o == nil {
+ return ReportingDataObject("")
+ }
+ return o.ReportingObject
+}
+
type SourceBingAdsUpdate struct {
- AuthMethod *SourceBingAdsUpdateAuthMethod `json:"auth_method,omitempty"`
+ authMethod *AuthMethod `const:"oauth2.0" json:"auth_method,omitempty"`
// The Client ID of your Microsoft Advertising developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Microsoft Advertising developer application.
- ClientSecret *string `json:"client_secret,omitempty"`
+ ClientSecret *string `default:"" json:"client_secret"`
+ // You can add your Custom Bing Ads report by creating one.
+ CustomReports []CustomReportConfig `json:"custom_reports,omitempty"`
// Developer token associated with user. See more info in the docs.
DeveloperToken string `json:"developer_token"`
- // Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode.
- LookbackWindow *int64 `json:"lookback_window,omitempty"`
+ // Also known as attribution or conversion window. How far into the past to look for records (in days). If your conversion window has an hours/minutes granularity, round it up to the number of days exceeding. Used only for performance report streams in incremental mode without specified Reports Start Date.
+ LookbackWindow *int64 `default:"0" json:"lookback_window"`
// Refresh Token to renew the expired Access Token.
RefreshToken string `json:"refresh_token"`
- // The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format.
- ReportsStartDate types.Date `json:"reports_start_date"`
+ // The start date from which to begin replicating report data. Any data generated before this date will not be replicated in reports. This is a UTC date in YYYY-MM-DD format. If not set, data from previous and current calendar year will be replicated.
+ ReportsStartDate *types.Date `json:"reports_start_date,omitempty"`
// The Tenant ID of your Microsoft Advertising developer application. Set this to "common" unless you know you need a different value.
- TenantID *string `json:"tenant_id,omitempty"`
+ TenantID *string `default:"common" json:"tenant_id"`
+}
+
+func (s SourceBingAdsUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBingAdsUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBingAdsUpdate) GetAuthMethod() *AuthMethod {
+ return AuthMethodOauth20.ToPointer()
+}
+
+func (o *SourceBingAdsUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceBingAdsUpdate) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceBingAdsUpdate) GetCustomReports() []CustomReportConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReports
+}
+
+func (o *SourceBingAdsUpdate) GetDeveloperToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.DeveloperToken
+}
+
+func (o *SourceBingAdsUpdate) GetLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindow
+}
+
+func (o *SourceBingAdsUpdate) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceBingAdsUpdate) GetReportsStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.ReportsStartDate
+}
+
+func (o *SourceBingAdsUpdate) GetTenantID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TenantID
}
diff --git a/internal/sdk/pkg/models/shared/sourcebraintree.go b/internal/sdk/pkg/models/shared/sourcebraintree.go
old mode 100755
new mode 100644
index 3cec98607..87f12f594
--- a/internal/sdk/pkg/models/shared/sourcebraintree.go
+++ b/internal/sdk/pkg/models/shared/sourcebraintree.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -42,27 +43,27 @@ func (e *SourceBraintreeEnvironment) UnmarshalJSON(data []byte) error {
}
}
-type SourceBraintreeBraintree string
+type Braintree string
const (
- SourceBraintreeBraintreeBraintree SourceBraintreeBraintree = "braintree"
+ BraintreeBraintree Braintree = "braintree"
)
-func (e SourceBraintreeBraintree) ToPointer() *SourceBraintreeBraintree {
+func (e Braintree) ToPointer() *Braintree {
return &e
}
-func (e *SourceBraintreeBraintree) UnmarshalJSON(data []byte) error {
+func (e *Braintree) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "braintree":
- *e = SourceBraintreeBraintree(v)
+ *e = Braintree(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceBraintreeBraintree: %v", v)
+ return fmt.Errorf("invalid value for Braintree: %v", v)
}
}
@@ -74,8 +75,58 @@ type SourceBraintree struct {
// Braintree Private Key. See the docs for more information on how to obtain this key.
PrivateKey string `json:"private_key"`
// Braintree Public Key. See the docs for more information on how to obtain this key.
- PublicKey string `json:"public_key"`
- SourceType SourceBraintreeBraintree `json:"sourceType"`
+ PublicKey string `json:"public_key"`
+ sourceType Braintree `const:"braintree" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceBraintree) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBraintree) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBraintree) GetEnvironment() SourceBraintreeEnvironment {
+ if o == nil {
+ return SourceBraintreeEnvironment("")
+ }
+ return o.Environment
+}
+
+func (o *SourceBraintree) GetMerchantID() string {
+ if o == nil {
+ return ""
+ }
+ return o.MerchantID
+}
+
+func (o *SourceBraintree) GetPrivateKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PrivateKey
+}
+
+func (o *SourceBraintree) GetPublicKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PublicKey
+}
+
+func (o *SourceBraintree) GetSourceType() Braintree {
+ return BraintreeBraintree
+}
+
+func (o *SourceBraintree) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebraintreecreaterequest.go b/internal/sdk/pkg/models/shared/sourcebraintreecreaterequest.go
old mode 100755
new mode 100644
index 19a2ec179..2577e25d9
--- a/internal/sdk/pkg/models/shared/sourcebraintreecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebraintreecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceBraintreeCreateRequest struct {
Configuration SourceBraintree `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBraintreeCreateRequest) GetConfiguration() SourceBraintree {
+ if o == nil {
+ return SourceBraintree{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBraintreeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceBraintreeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBraintreeCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceBraintreeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebraintreeputrequest.go b/internal/sdk/pkg/models/shared/sourcebraintreeputrequest.go
old mode 100755
new mode 100644
index 967613ca3..6c460046c
--- a/internal/sdk/pkg/models/shared/sourcebraintreeputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebraintreeputrequest.go
@@ -7,3 +7,24 @@ type SourceBraintreePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBraintreePutRequest) GetConfiguration() SourceBraintreeUpdate {
+ if o == nil {
+ return SourceBraintreeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBraintreePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBraintreePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebraintreeupdate.go b/internal/sdk/pkg/models/shared/sourcebraintreeupdate.go
old mode 100755
new mode 100644
index 968b54df0..7d6d1e4eb
--- a/internal/sdk/pkg/models/shared/sourcebraintreeupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcebraintreeupdate.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -54,3 +55,49 @@ type SourceBraintreeUpdate struct {
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceBraintreeUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBraintreeUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBraintreeUpdate) GetEnvironment() SourceBraintreeUpdateEnvironment {
+ if o == nil {
+ return SourceBraintreeUpdateEnvironment("")
+ }
+ return o.Environment
+}
+
+func (o *SourceBraintreeUpdate) GetMerchantID() string {
+ if o == nil {
+ return ""
+ }
+ return o.MerchantID
+}
+
+func (o *SourceBraintreeUpdate) GetPrivateKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PrivateKey
+}
+
+func (o *SourceBraintreeUpdate) GetPublicKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PublicKey
+}
+
+func (o *SourceBraintreeUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebraze.go b/internal/sdk/pkg/models/shared/sourcebraze.go
old mode 100755
new mode 100644
index 0cbefba3c..a1c942306
--- a/internal/sdk/pkg/models/shared/sourcebraze.go
+++ b/internal/sdk/pkg/models/shared/sourcebraze.go
@@ -3,41 +3,78 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceBrazeBraze string
+type Braze string
const (
- SourceBrazeBrazeBraze SourceBrazeBraze = "braze"
+ BrazeBraze Braze = "braze"
)
-func (e SourceBrazeBraze) ToPointer() *SourceBrazeBraze {
+func (e Braze) ToPointer() *Braze {
return &e
}
-func (e *SourceBrazeBraze) UnmarshalJSON(data []byte) error {
+func (e *Braze) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "braze":
- *e = SourceBrazeBraze(v)
+ *e = Braze(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceBrazeBraze: %v", v)
+ return fmt.Errorf("invalid value for Braze: %v", v)
}
}
type SourceBraze struct {
// Braze REST API key
- APIKey string `json:"api_key"`
- SourceType SourceBrazeBraze `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Braze `const:"braze" json:"sourceType"`
// Rows after this date will be synced
StartDate types.Date `json:"start_date"`
// Braze REST API endpoint
URL string `json:"url"`
}
+
+func (s SourceBraze) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBraze) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBraze) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceBraze) GetSourceType() Braze {
+ return BrazeBraze
+}
+
+func (o *SourceBraze) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceBraze) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebrazecreaterequest.go b/internal/sdk/pkg/models/shared/sourcebrazecreaterequest.go
old mode 100755
new mode 100644
index 9e9b5a92a..bf0f6d953
--- a/internal/sdk/pkg/models/shared/sourcebrazecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebrazecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceBrazeCreateRequest struct {
Configuration SourceBraze `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBrazeCreateRequest) GetConfiguration() SourceBraze {
+ if o == nil {
+ return SourceBraze{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBrazeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceBrazeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBrazeCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceBrazeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebrazeputrequest.go b/internal/sdk/pkg/models/shared/sourcebrazeputrequest.go
old mode 100755
new mode 100644
index 9e52d21b4..2f9e30115
--- a/internal/sdk/pkg/models/shared/sourcebrazeputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcebrazeputrequest.go
@@ -7,3 +7,24 @@ type SourceBrazePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceBrazePutRequest) GetConfiguration() SourceBrazeUpdate {
+ if o == nil {
+ return SourceBrazeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceBrazePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceBrazePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcebrazeupdate.go b/internal/sdk/pkg/models/shared/sourcebrazeupdate.go
old mode 100755
new mode 100644
index 00230ae67..1f6a10784
--- a/internal/sdk/pkg/models/shared/sourcebrazeupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcebrazeupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceBrazeUpdate struct {
@@ -14,3 +15,35 @@ type SourceBrazeUpdate struct {
// Braze REST API endpoint
URL string `json:"url"`
}
+
+func (s SourceBrazeUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceBrazeUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceBrazeUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceBrazeUpdate) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceBrazeUpdate) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecart.go b/internal/sdk/pkg/models/shared/sourcecart.go
new file mode 100644
index 000000000..46b043160
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcecart.go
@@ -0,0 +1,264 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type SourceCartSchemasAuthType string
+
+const (
+ SourceCartSchemasAuthTypeSingleStoreAccessToken SourceCartSchemasAuthType = "SINGLE_STORE_ACCESS_TOKEN"
+)
+
+func (e SourceCartSchemasAuthType) ToPointer() *SourceCartSchemasAuthType {
+ return &e
+}
+
+func (e *SourceCartSchemasAuthType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SINGLE_STORE_ACCESS_TOKEN":
+ *e = SourceCartSchemasAuthType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceCartSchemasAuthType: %v", v)
+ }
+}
+
+type SourceCartSingleStoreAccessToken struct {
+ // Access Token for making authenticated requests.
+ AccessToken string `json:"access_token"`
+ authType SourceCartSchemasAuthType `const:"SINGLE_STORE_ACCESS_TOKEN" json:"auth_type"`
+ // The name of Cart.com Online Store. All API URLs start with https://[mystorename.com]/api/v1/, where [mystorename.com] is the domain name of your store.
+ StoreName string `json:"store_name"`
+}
+
+func (s SourceCartSingleStoreAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceCartSingleStoreAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceCartSingleStoreAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceCartSingleStoreAccessToken) GetAuthType() SourceCartSchemasAuthType {
+ return SourceCartSchemasAuthTypeSingleStoreAccessToken
+}
+
+func (o *SourceCartSingleStoreAccessToken) GetStoreName() string {
+ if o == nil {
+ return ""
+ }
+ return o.StoreName
+}
+
+type SourceCartAuthType string
+
+const (
+ SourceCartAuthTypeCentralAPIRouter SourceCartAuthType = "CENTRAL_API_ROUTER"
+)
+
+func (e SourceCartAuthType) ToPointer() *SourceCartAuthType {
+ return &e
+}
+
+func (e *SourceCartAuthType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "CENTRAL_API_ROUTER":
+ *e = SourceCartAuthType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceCartAuthType: %v", v)
+ }
+}
+
+type SourceCartCentralAPIRouter struct {
+ authType SourceCartAuthType `const:"CENTRAL_API_ROUTER" json:"auth_type"`
+ // You can determine a site provisioning site Id by hitting https://site.com/store/sitemonitor.aspx and reading the response param PSID
+ SiteID string `json:"site_id"`
+ // Enter your application's User Name
+ UserName string `json:"user_name"`
+ // Enter your application's User Secret
+ UserSecret string `json:"user_secret"`
+}
+
+func (s SourceCartCentralAPIRouter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceCartCentralAPIRouter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceCartCentralAPIRouter) GetAuthType() SourceCartAuthType {
+ return SourceCartAuthTypeCentralAPIRouter
+}
+
+func (o *SourceCartCentralAPIRouter) GetSiteID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SiteID
+}
+
+func (o *SourceCartCentralAPIRouter) GetUserName() string {
+ if o == nil {
+ return ""
+ }
+ return o.UserName
+}
+
+func (o *SourceCartCentralAPIRouter) GetUserSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.UserSecret
+}
+
+type SourceCartAuthorizationMethodType string
+
+const (
+ SourceCartAuthorizationMethodTypeSourceCartCentralAPIRouter SourceCartAuthorizationMethodType = "source-cart_Central API Router"
+ SourceCartAuthorizationMethodTypeSourceCartSingleStoreAccessToken SourceCartAuthorizationMethodType = "source-cart_Single Store Access Token"
+)
+
+type SourceCartAuthorizationMethod struct {
+ SourceCartCentralAPIRouter *SourceCartCentralAPIRouter
+ SourceCartSingleStoreAccessToken *SourceCartSingleStoreAccessToken
+
+ Type SourceCartAuthorizationMethodType
+}
+
+func CreateSourceCartAuthorizationMethodSourceCartCentralAPIRouter(sourceCartCentralAPIRouter SourceCartCentralAPIRouter) SourceCartAuthorizationMethod {
+ typ := SourceCartAuthorizationMethodTypeSourceCartCentralAPIRouter
+
+ return SourceCartAuthorizationMethod{
+ SourceCartCentralAPIRouter: &sourceCartCentralAPIRouter,
+ Type: typ,
+ }
+}
+
+func CreateSourceCartAuthorizationMethodSourceCartSingleStoreAccessToken(sourceCartSingleStoreAccessToken SourceCartSingleStoreAccessToken) SourceCartAuthorizationMethod {
+ typ := SourceCartAuthorizationMethodTypeSourceCartSingleStoreAccessToken
+
+ return SourceCartAuthorizationMethod{
+ SourceCartSingleStoreAccessToken: &sourceCartSingleStoreAccessToken,
+ Type: typ,
+ }
+}
+
+func (u *SourceCartAuthorizationMethod) UnmarshalJSON(data []byte) error {
+
+ sourceCartSingleStoreAccessToken := new(SourceCartSingleStoreAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceCartSingleStoreAccessToken, "", true, true); err == nil {
+ u.SourceCartSingleStoreAccessToken = sourceCartSingleStoreAccessToken
+ u.Type = SourceCartAuthorizationMethodTypeSourceCartSingleStoreAccessToken
+ return nil
+ }
+
+ sourceCartCentralAPIRouter := new(SourceCartCentralAPIRouter)
+ if err := utils.UnmarshalJSON(data, &sourceCartCentralAPIRouter, "", true, true); err == nil {
+ u.SourceCartCentralAPIRouter = sourceCartCentralAPIRouter
+ u.Type = SourceCartAuthorizationMethodTypeSourceCartCentralAPIRouter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceCartAuthorizationMethod) MarshalJSON() ([]byte, error) {
+ if u.SourceCartCentralAPIRouter != nil {
+ return utils.MarshalJSON(u.SourceCartCentralAPIRouter, "", true)
+ }
+
+ if u.SourceCartSingleStoreAccessToken != nil {
+ return utils.MarshalJSON(u.SourceCartSingleStoreAccessToken, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type Cart string
+
+const (
+ CartCart Cart = "cart"
+)
+
+func (e Cart) ToPointer() *Cart {
+ return &e
+}
+
+func (e *Cart) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "cart":
+ *e = Cart(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for Cart: %v", v)
+ }
+}
+
+type SourceCart struct {
+ Credentials *SourceCartAuthorizationMethod `json:"credentials,omitempty"`
+ sourceType Cart `const:"cart" json:"sourceType"`
+ // The date from which you'd like to replicate the data
+ StartDate string `json:"start_date"`
+}
+
+func (s SourceCart) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceCart) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceCart) GetCredentials() *SourceCartAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceCart) GetSourceType() Cart {
+ return CartCart
+}
+
+func (o *SourceCart) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecartcreaterequest.go b/internal/sdk/pkg/models/shared/sourcecartcreaterequest.go
new file mode 100644
index 000000000..3ef0f2c44
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcecartcreaterequest.go
@@ -0,0 +1,49 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceCartCreateRequest struct {
+ Configuration SourceCart `json:"configuration"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ // Optional secretID obtained through the public API OAuth redirect flow.
+ SecretID *string `json:"secretId,omitempty"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceCartCreateRequest) GetConfiguration() SourceCart {
+ if o == nil {
+ return SourceCart{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCartCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceCartCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCartCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceCartCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecartputrequest.go b/internal/sdk/pkg/models/shared/sourcecartputrequest.go
new file mode 100644
index 000000000..bbecd9f34
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcecartputrequest.go
@@ -0,0 +1,30 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceCartPutRequest struct {
+ Configuration SourceCartUpdate `json:"configuration"`
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceCartPutRequest) GetConfiguration() SourceCartUpdate {
+ if o == nil {
+ return SourceCartUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCartPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCartPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecartupdate.go b/internal/sdk/pkg/models/shared/sourcecartupdate.go
new file mode 100644
index 000000000..ec8a90ca5
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcecartupdate.go
@@ -0,0 +1,224 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type SourceCartUpdateSchemasAuthType string
+
+const (
+ SourceCartUpdateSchemasAuthTypeSingleStoreAccessToken SourceCartUpdateSchemasAuthType = "SINGLE_STORE_ACCESS_TOKEN"
+)
+
+func (e SourceCartUpdateSchemasAuthType) ToPointer() *SourceCartUpdateSchemasAuthType {
+ return &e
+}
+
+func (e *SourceCartUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SINGLE_STORE_ACCESS_TOKEN":
+ *e = SourceCartUpdateSchemasAuthType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceCartUpdateSchemasAuthType: %v", v)
+ }
+}
+
+type SingleStoreAccessToken struct {
+ // Access Token for making authenticated requests.
+ AccessToken string `json:"access_token"`
+ authType SourceCartUpdateSchemasAuthType `const:"SINGLE_STORE_ACCESS_TOKEN" json:"auth_type"`
+ // The name of Cart.com Online Store. All API URLs start with https://[mystorename.com]/api/v1/, where [mystorename.com] is the domain name of your store.
+ StoreName string `json:"store_name"`
+}
+
+func (s SingleStoreAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SingleStoreAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SingleStoreAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SingleStoreAccessToken) GetAuthType() SourceCartUpdateSchemasAuthType {
+ return SourceCartUpdateSchemasAuthTypeSingleStoreAccessToken
+}
+
+func (o *SingleStoreAccessToken) GetStoreName() string {
+ if o == nil {
+ return ""
+ }
+ return o.StoreName
+}
+
+type SourceCartUpdateAuthType string
+
+const (
+ SourceCartUpdateAuthTypeCentralAPIRouter SourceCartUpdateAuthType = "CENTRAL_API_ROUTER"
+)
+
+func (e SourceCartUpdateAuthType) ToPointer() *SourceCartUpdateAuthType {
+ return &e
+}
+
+func (e *SourceCartUpdateAuthType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "CENTRAL_API_ROUTER":
+ *e = SourceCartUpdateAuthType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceCartUpdateAuthType: %v", v)
+ }
+}
+
+type CentralAPIRouter struct {
+ authType SourceCartUpdateAuthType `const:"CENTRAL_API_ROUTER" json:"auth_type"`
+ // You can determine a site provisioning site Id by hitting https://site.com/store/sitemonitor.aspx and reading the response param PSID
+ SiteID string `json:"site_id"`
+ // Enter your application's User Name
+ UserName string `json:"user_name"`
+ // Enter your application's User Secret
+ UserSecret string `json:"user_secret"`
+}
+
+func (c CentralAPIRouter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *CentralAPIRouter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *CentralAPIRouter) GetAuthType() SourceCartUpdateAuthType {
+ return SourceCartUpdateAuthTypeCentralAPIRouter
+}
+
+func (o *CentralAPIRouter) GetSiteID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SiteID
+}
+
+func (o *CentralAPIRouter) GetUserName() string {
+ if o == nil {
+ return ""
+ }
+ return o.UserName
+}
+
+func (o *CentralAPIRouter) GetUserSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.UserSecret
+}
+
+type SourceCartUpdateAuthorizationMethodType string
+
+const (
+ SourceCartUpdateAuthorizationMethodTypeCentralAPIRouter SourceCartUpdateAuthorizationMethodType = "Central API Router"
+ SourceCartUpdateAuthorizationMethodTypeSingleStoreAccessToken SourceCartUpdateAuthorizationMethodType = "Single Store Access Token"
+)
+
+type SourceCartUpdateAuthorizationMethod struct {
+ CentralAPIRouter *CentralAPIRouter
+ SingleStoreAccessToken *SingleStoreAccessToken
+
+ Type SourceCartUpdateAuthorizationMethodType
+}
+
+func CreateSourceCartUpdateAuthorizationMethodCentralAPIRouter(centralAPIRouter CentralAPIRouter) SourceCartUpdateAuthorizationMethod {
+ typ := SourceCartUpdateAuthorizationMethodTypeCentralAPIRouter
+
+ return SourceCartUpdateAuthorizationMethod{
+ CentralAPIRouter: ¢ralAPIRouter,
+ Type: typ,
+ }
+}
+
+func CreateSourceCartUpdateAuthorizationMethodSingleStoreAccessToken(singleStoreAccessToken SingleStoreAccessToken) SourceCartUpdateAuthorizationMethod {
+ typ := SourceCartUpdateAuthorizationMethodTypeSingleStoreAccessToken
+
+ return SourceCartUpdateAuthorizationMethod{
+ SingleStoreAccessToken: &singleStoreAccessToken,
+ Type: typ,
+ }
+}
+
+func (u *SourceCartUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
+
+ singleStoreAccessToken := new(SingleStoreAccessToken)
+ if err := utils.UnmarshalJSON(data, &singleStoreAccessToken, "", true, true); err == nil {
+ u.SingleStoreAccessToken = singleStoreAccessToken
+ u.Type = SourceCartUpdateAuthorizationMethodTypeSingleStoreAccessToken
+ return nil
+ }
+
+ centralAPIRouter := new(CentralAPIRouter)
+ if err := utils.UnmarshalJSON(data, ¢ralAPIRouter, "", true, true); err == nil {
+ u.CentralAPIRouter = centralAPIRouter
+ u.Type = SourceCartUpdateAuthorizationMethodTypeCentralAPIRouter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceCartUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
+ if u.CentralAPIRouter != nil {
+ return utils.MarshalJSON(u.CentralAPIRouter, "", true)
+ }
+
+ if u.SingleStoreAccessToken != nil {
+ return utils.MarshalJSON(u.SingleStoreAccessToken, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceCartUpdate struct {
+ Credentials *SourceCartUpdateAuthorizationMethod `json:"credentials,omitempty"`
+ // The date from which you'd like to replicate the data
+ StartDate string `json:"start_date"`
+}
+
+func (o *SourceCartUpdate) GetCredentials() *SourceCartUpdateAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceCartUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcechargebee.go b/internal/sdk/pkg/models/shared/sourcechargebee.go
old mode 100755
new mode 100644
index 872dfe814..7e5382bd2
--- a/internal/sdk/pkg/models/shared/sourcechargebee.go
+++ b/internal/sdk/pkg/models/shared/sourcechargebee.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -36,27 +37,27 @@ func (e *SourceChargebeeProductCatalog) UnmarshalJSON(data []byte) error {
}
}
-type SourceChargebeeChargebee string
+type Chargebee string
const (
- SourceChargebeeChargebeeChargebee SourceChargebeeChargebee = "chargebee"
+ ChargebeeChargebee Chargebee = "chargebee"
)
-func (e SourceChargebeeChargebee) ToPointer() *SourceChargebeeChargebee {
+func (e Chargebee) ToPointer() *Chargebee {
return &e
}
-func (e *SourceChargebeeChargebee) UnmarshalJSON(data []byte) error {
+func (e *Chargebee) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "chargebee":
- *e = SourceChargebeeChargebee(v)
+ *e = Chargebee(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceChargebeeChargebee: %v", v)
+ return fmt.Errorf("invalid value for Chargebee: %v", v)
}
}
@@ -66,8 +67,51 @@ type SourceChargebee struct {
// The site prefix for your Chargebee instance.
Site string `json:"site"`
// Chargebee API Key. See the docs for more information on how to obtain this key.
- SiteAPIKey string `json:"site_api_key"`
- SourceType SourceChargebeeChargebee `json:"sourceType"`
+ SiteAPIKey string `json:"site_api_key"`
+ sourceType Chargebee `const:"chargebee" json:"sourceType"`
// UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceChargebee) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceChargebee) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceChargebee) GetProductCatalog() SourceChargebeeProductCatalog {
+ if o == nil {
+ return SourceChargebeeProductCatalog("")
+ }
+ return o.ProductCatalog
+}
+
+func (o *SourceChargebee) GetSite() string {
+ if o == nil {
+ return ""
+ }
+ return o.Site
+}
+
+func (o *SourceChargebee) GetSiteAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SiteAPIKey
+}
+
+func (o *SourceChargebee) GetSourceType() Chargebee {
+ return ChargebeeChargebee
+}
+
+func (o *SourceChargebee) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcechargebeecreaterequest.go b/internal/sdk/pkg/models/shared/sourcechargebeecreaterequest.go
old mode 100755
new mode 100644
index c714ac002..e778775f0
--- a/internal/sdk/pkg/models/shared/sourcechargebeecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcechargebeecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceChargebeeCreateRequest struct {
Configuration SourceChargebee `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceChargebeeCreateRequest) GetConfiguration() SourceChargebee {
+ if o == nil {
+ return SourceChargebee{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceChargebeeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceChargebeeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceChargebeeCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceChargebeeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcechargebeeputrequest.go b/internal/sdk/pkg/models/shared/sourcechargebeeputrequest.go
old mode 100755
new mode 100644
index 02e8d3d4e..a85af0e70
--- a/internal/sdk/pkg/models/shared/sourcechargebeeputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcechargebeeputrequest.go
@@ -7,3 +7,24 @@ type SourceChargebeePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceChargebeePutRequest) GetConfiguration() SourceChargebeeUpdate {
+ if o == nil {
+ return SourceChargebeeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceChargebeePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceChargebeePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcechargebeeupdate.go b/internal/sdk/pkg/models/shared/sourcechargebeeupdate.go
old mode 100755
new mode 100644
index 8cbf8e1ec..f3f77a65f
--- a/internal/sdk/pkg/models/shared/sourcechargebeeupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcechargebeeupdate.go
@@ -5,22 +5,23 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-// SourceChargebeeUpdateProductCatalog - Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under `API Version` section.
-type SourceChargebeeUpdateProductCatalog string
+// ProductCatalog - Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under `API Version` section.
+type ProductCatalog string
const (
- SourceChargebeeUpdateProductCatalogOne0 SourceChargebeeUpdateProductCatalog = "1.0"
- SourceChargebeeUpdateProductCatalogTwo0 SourceChargebeeUpdateProductCatalog = "2.0"
+ ProductCatalogOne0 ProductCatalog = "1.0"
+ ProductCatalogTwo0 ProductCatalog = "2.0"
)
-func (e SourceChargebeeUpdateProductCatalog) ToPointer() *SourceChargebeeUpdateProductCatalog {
+func (e ProductCatalog) ToPointer() *ProductCatalog {
return &e
}
-func (e *SourceChargebeeUpdateProductCatalog) UnmarshalJSON(data []byte) error {
+func (e *ProductCatalog) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -29,16 +30,16 @@ func (e *SourceChargebeeUpdateProductCatalog) UnmarshalJSON(data []byte) error {
case "1.0":
fallthrough
case "2.0":
- *e = SourceChargebeeUpdateProductCatalog(v)
+ *e = ProductCatalog(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceChargebeeUpdateProductCatalog: %v", v)
+ return fmt.Errorf("invalid value for ProductCatalog: %v", v)
}
}
type SourceChargebeeUpdate struct {
// Product Catalog version of your Chargebee site. Instructions on how to find your version you may find here under `API Version` section.
- ProductCatalog SourceChargebeeUpdateProductCatalog `json:"product_catalog"`
+ ProductCatalog ProductCatalog `json:"product_catalog"`
// The site prefix for your Chargebee instance.
Site string `json:"site"`
// Chargebee API Key. See the docs for more information on how to obtain this key.
@@ -46,3 +47,42 @@ type SourceChargebeeUpdate struct {
// UTC date and time in the format 2021-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceChargebeeUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceChargebeeUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceChargebeeUpdate) GetProductCatalog() ProductCatalog {
+ if o == nil {
+ return ProductCatalog("")
+ }
+ return o.ProductCatalog
+}
+
+func (o *SourceChargebeeUpdate) GetSite() string {
+ if o == nil {
+ return ""
+ }
+ return o.Site
+}
+
+func (o *SourceChargebeeUpdate) GetSiteAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SiteAPIKey
+}
+
+func (o *SourceChargebeeUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcechartmogul.go b/internal/sdk/pkg/models/shared/sourcechartmogul.go
old mode 100755
new mode 100644
index f37cd4061..79b53860e
--- a/internal/sdk/pkg/models/shared/sourcechartmogul.go
+++ b/internal/sdk/pkg/models/shared/sourcechartmogul.go
@@ -5,73 +5,67 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-// SourceChartmogulInterval - Some APIs such as Metrics require intervals to cluster data.
-type SourceChartmogulInterval string
+type Chartmogul string
const (
- SourceChartmogulIntervalDay SourceChartmogulInterval = "day"
- SourceChartmogulIntervalWeek SourceChartmogulInterval = "week"
- SourceChartmogulIntervalMonth SourceChartmogulInterval = "month"
- SourceChartmogulIntervalQuarter SourceChartmogulInterval = "quarter"
+ ChartmogulChartmogul Chartmogul = "chartmogul"
)
-func (e SourceChartmogulInterval) ToPointer() *SourceChartmogulInterval {
+func (e Chartmogul) ToPointer() *Chartmogul {
return &e
}
-func (e *SourceChartmogulInterval) UnmarshalJSON(data []byte) error {
+func (e *Chartmogul) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "day":
- fallthrough
- case "week":
- fallthrough
- case "month":
- fallthrough
- case "quarter":
- *e = SourceChartmogulInterval(v)
+ case "chartmogul":
+ *e = Chartmogul(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceChartmogulInterval: %v", v)
+ return fmt.Errorf("invalid value for Chartmogul: %v", v)
}
}
-type SourceChartmogulChartmogul string
-
-const (
- SourceChartmogulChartmogulChartmogul SourceChartmogulChartmogul = "chartmogul"
-)
+type SourceChartmogul struct {
+ // Your Chartmogul API key. See the docs for info on how to obtain this.
+ APIKey string `json:"api_key"`
+ sourceType Chartmogul `const:"chartmogul" json:"sourceType"`
+ // UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated.
+ StartDate time.Time `json:"start_date"`
+}
-func (e SourceChartmogulChartmogul) ToPointer() *SourceChartmogulChartmogul {
- return &e
+func (s SourceChartmogul) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
}
-func (e *SourceChartmogulChartmogul) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
+func (s *SourceChartmogul) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
return err
}
- switch v {
- case "chartmogul":
- *e = SourceChartmogulChartmogul(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceChartmogulChartmogul: %v", v)
+ return nil
+}
+
+func (o *SourceChartmogul) GetAPIKey() string {
+ if o == nil {
+ return ""
}
+ return o.APIKey
}
-type SourceChartmogul struct {
- // Your Chartmogul API key. See the docs for info on how to obtain this.
- APIKey string `json:"api_key"`
- // Some APIs such as Metrics require intervals to cluster data.
- Interval SourceChartmogulInterval `json:"interval"`
- SourceType SourceChartmogulChartmogul `json:"sourceType"`
- // UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated.
- StartDate time.Time `json:"start_date"`
+func (o *SourceChartmogul) GetSourceType() Chartmogul {
+ return ChartmogulChartmogul
+}
+
+func (o *SourceChartmogul) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcechartmogulcreaterequest.go b/internal/sdk/pkg/models/shared/sourcechartmogulcreaterequest.go
old mode 100755
new mode 100644
index c592001c9..cd3ad18d2
--- a/internal/sdk/pkg/models/shared/sourcechartmogulcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcechartmogulcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceChartmogulCreateRequest struct {
Configuration SourceChartmogul `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceChartmogulCreateRequest) GetConfiguration() SourceChartmogul {
+ if o == nil {
+ return SourceChartmogul{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceChartmogulCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceChartmogulCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceChartmogulCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceChartmogulCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcechartmogulputrequest.go b/internal/sdk/pkg/models/shared/sourcechartmogulputrequest.go
old mode 100755
new mode 100644
index 9dbe400c8..68b040914
--- a/internal/sdk/pkg/models/shared/sourcechartmogulputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcechartmogulputrequest.go
@@ -7,3 +7,24 @@ type SourceChartmogulPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceChartmogulPutRequest) GetConfiguration() SourceChartmogulUpdate {
+ if o == nil {
+ return SourceChartmogulUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceChartmogulPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceChartmogulPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcechartmogulupdate.go b/internal/sdk/pkg/models/shared/sourcechartmogulupdate.go
old mode 100755
new mode 100644
index c7ff9b9a6..e55e5cc4a
--- a/internal/sdk/pkg/models/shared/sourcechartmogulupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcechartmogulupdate.go
@@ -3,50 +3,38 @@
package shared
import (
- "encoding/json"
- "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-// SourceChartmogulUpdateInterval - Some APIs such as Metrics require intervals to cluster data.
-type SourceChartmogulUpdateInterval string
-
-const (
- SourceChartmogulUpdateIntervalDay SourceChartmogulUpdateInterval = "day"
- SourceChartmogulUpdateIntervalWeek SourceChartmogulUpdateInterval = "week"
- SourceChartmogulUpdateIntervalMonth SourceChartmogulUpdateInterval = "month"
- SourceChartmogulUpdateIntervalQuarter SourceChartmogulUpdateInterval = "quarter"
-)
+type SourceChartmogulUpdate struct {
+ // Your Chartmogul API key. See the docs for info on how to obtain this.
+ APIKey string `json:"api_key"`
+ // UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated.
+ StartDate time.Time `json:"start_date"`
+}
-func (e SourceChartmogulUpdateInterval) ToPointer() *SourceChartmogulUpdateInterval {
- return &e
+func (s SourceChartmogulUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
}
-func (e *SourceChartmogulUpdateInterval) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
+func (s *SourceChartmogulUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
return err
}
- switch v {
- case "day":
- fallthrough
- case "week":
- fallthrough
- case "month":
- fallthrough
- case "quarter":
- *e = SourceChartmogulUpdateInterval(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceChartmogulUpdateInterval: %v", v)
+ return nil
+}
+
+func (o *SourceChartmogulUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
}
+ return o.APIKey
}
-type SourceChartmogulUpdate struct {
- // Your Chartmogul API key. See the docs for info on how to obtain this.
- APIKey string `json:"api_key"`
- // Some APIs such as Metrics require intervals to cluster data.
- Interval SourceChartmogulUpdateInterval `json:"interval"`
- // UTC date and time in the format 2017-01-25T00:00:00Z. When feasible, any data before this date will not be replicated.
- StartDate time.Time `json:"start_date"`
+func (o *SourceChartmogulUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceclickhouse.go b/internal/sdk/pkg/models/shared/sourceclickhouse.go
old mode 100755
new mode 100644
index d9da7fb78..07df600dd
--- a/internal/sdk/pkg/models/shared/sourceclickhouse.go
+++ b/internal/sdk/pkg/models/shared/sourceclickhouse.go
@@ -3,10 +3,10 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceClickhouseClickhouse string
@@ -33,185 +33,279 @@ func (e *SourceClickhouseClickhouse) UnmarshalJSON(data []byte) error {
}
}
-// SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceClickhouseSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceClickhouseSchemasTunnelMethodTunnelMethod string
const (
- SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceClickhouseSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceClickhouseSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceClickhouseSchemasTunnelMethodTunnelMethod) ToPointer() *SourceClickhouseSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceClickhouseSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceClickhouseSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceClickhouseSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceClickhouseSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceClickhouseSSHTunnelMethodPasswordAuthentication struct {
+// SourceClickhousePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceClickhousePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceClickhouseSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceClickhouseSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceClickhousePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickhousePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickhousePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceClickhousePasswordAuthentication) GetTunnelMethod() SourceClickhouseSchemasTunnelMethodTunnelMethod {
+ return SourceClickhouseSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceClickhousePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceClickhousePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceClickhousePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceClickhouseSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceClickhouseSchemasTunnelMethod string
const (
- SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceClickhouseSchemasTunnelMethodSSHKeyAuth SourceClickhouseSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceClickhouseSchemasTunnelMethod) ToPointer() *SourceClickhouseSchemasTunnelMethod {
return &e
}
-func (e *SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceClickhouseSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceClickhouseSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceClickhouseSchemasTunnelMethod: %v", v)
}
}
-// SourceClickhouseSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceClickhouseSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceClickhouseSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceClickhouseSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceClickhouseSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceClickhouseSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceClickhouseSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickhouseSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickhouseSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceClickhouseSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceClickhouseSSHKeyAuthentication) GetTunnelMethod() SourceClickhouseSchemasTunnelMethod {
+ return SourceClickhouseSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceClickhouseSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceClickhouseSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceClickhouseTunnelMethod - No ssh tunnel needed to connect to database
+type SourceClickhouseTunnelMethod string
const (
- SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceClickhouseTunnelMethodNoTunnel SourceClickhouseTunnelMethod = "NO_TUNNEL"
)
-func (e SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceClickhouseTunnelMethod) ToPointer() *SourceClickhouseTunnelMethod {
return &e
}
-func (e *SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceClickhouseTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceClickhouseTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceClickhouseTunnelMethod: %v", v)
}
}
-// SourceClickhouseSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceClickhouseSSHTunnelMethodNoTunnel struct {
+// SourceClickhouseNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceClickhouseNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceClickhouseSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceClickhouseTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceClickhouseNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickhouseNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickhouseNoTunnel) GetTunnelMethod() SourceClickhouseTunnelMethod {
+ return SourceClickhouseTunnelMethodNoTunnel
}
type SourceClickhouseSSHTunnelMethodType string
const (
- SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHTunnelMethodNoTunnel SourceClickhouseSSHTunnelMethodType = "source-clickhouse_SSH Tunnel Method_No Tunnel"
- SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHTunnelMethodSSHKeyAuthentication SourceClickhouseSSHTunnelMethodType = "source-clickhouse_SSH Tunnel Method_SSH Key Authentication"
- SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHTunnelMethodPasswordAuthentication SourceClickhouseSSHTunnelMethodType = "source-clickhouse_SSH Tunnel Method_Password Authentication"
+ SourceClickhouseSSHTunnelMethodTypeSourceClickhouseNoTunnel SourceClickhouseSSHTunnelMethodType = "source-clickhouse_No Tunnel"
+ SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHKeyAuthentication SourceClickhouseSSHTunnelMethodType = "source-clickhouse_SSH Key Authentication"
+ SourceClickhouseSSHTunnelMethodTypeSourceClickhousePasswordAuthentication SourceClickhouseSSHTunnelMethodType = "source-clickhouse_Password Authentication"
)
type SourceClickhouseSSHTunnelMethod struct {
- SourceClickhouseSSHTunnelMethodNoTunnel *SourceClickhouseSSHTunnelMethodNoTunnel
- SourceClickhouseSSHTunnelMethodSSHKeyAuthentication *SourceClickhouseSSHTunnelMethodSSHKeyAuthentication
- SourceClickhouseSSHTunnelMethodPasswordAuthentication *SourceClickhouseSSHTunnelMethodPasswordAuthentication
+ SourceClickhouseNoTunnel *SourceClickhouseNoTunnel
+ SourceClickhouseSSHKeyAuthentication *SourceClickhouseSSHKeyAuthentication
+ SourceClickhousePasswordAuthentication *SourceClickhousePasswordAuthentication
Type SourceClickhouseSSHTunnelMethodType
}
-func CreateSourceClickhouseSSHTunnelMethodSourceClickhouseSSHTunnelMethodNoTunnel(sourceClickhouseSSHTunnelMethodNoTunnel SourceClickhouseSSHTunnelMethodNoTunnel) SourceClickhouseSSHTunnelMethod {
- typ := SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHTunnelMethodNoTunnel
+func CreateSourceClickhouseSSHTunnelMethodSourceClickhouseNoTunnel(sourceClickhouseNoTunnel SourceClickhouseNoTunnel) SourceClickhouseSSHTunnelMethod {
+ typ := SourceClickhouseSSHTunnelMethodTypeSourceClickhouseNoTunnel
return SourceClickhouseSSHTunnelMethod{
- SourceClickhouseSSHTunnelMethodNoTunnel: &sourceClickhouseSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceClickhouseNoTunnel: &sourceClickhouseNoTunnel,
+ Type: typ,
}
}
-func CreateSourceClickhouseSSHTunnelMethodSourceClickhouseSSHTunnelMethodSSHKeyAuthentication(sourceClickhouseSSHTunnelMethodSSHKeyAuthentication SourceClickhouseSSHTunnelMethodSSHKeyAuthentication) SourceClickhouseSSHTunnelMethod {
- typ := SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceClickhouseSSHTunnelMethodSourceClickhouseSSHKeyAuthentication(sourceClickhouseSSHKeyAuthentication SourceClickhouseSSHKeyAuthentication) SourceClickhouseSSHTunnelMethod {
+ typ := SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHKeyAuthentication
return SourceClickhouseSSHTunnelMethod{
- SourceClickhouseSSHTunnelMethodSSHKeyAuthentication: &sourceClickhouseSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourceClickhouseSSHKeyAuthentication: &sourceClickhouseSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourceClickhouseSSHTunnelMethodSourceClickhouseSSHTunnelMethodPasswordAuthentication(sourceClickhouseSSHTunnelMethodPasswordAuthentication SourceClickhouseSSHTunnelMethodPasswordAuthentication) SourceClickhouseSSHTunnelMethod {
- typ := SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHTunnelMethodPasswordAuthentication
+func CreateSourceClickhouseSSHTunnelMethodSourceClickhousePasswordAuthentication(sourceClickhousePasswordAuthentication SourceClickhousePasswordAuthentication) SourceClickhouseSSHTunnelMethod {
+ typ := SourceClickhouseSSHTunnelMethodTypeSourceClickhousePasswordAuthentication
return SourceClickhouseSSHTunnelMethod{
- SourceClickhouseSSHTunnelMethodPasswordAuthentication: &sourceClickhouseSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ SourceClickhousePasswordAuthentication: &sourceClickhousePasswordAuthentication,
+ Type: typ,
}
}
func (u *SourceClickhouseSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceClickhouseSSHTunnelMethodNoTunnel := new(SourceClickhouseSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceClickhouseSSHTunnelMethodNoTunnel); err == nil {
- u.SourceClickhouseSSHTunnelMethodNoTunnel = sourceClickhouseSSHTunnelMethodNoTunnel
- u.Type = SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHTunnelMethodNoTunnel
+
+ sourceClickhouseNoTunnel := new(SourceClickhouseNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceClickhouseNoTunnel, "", true, true); err == nil {
+ u.SourceClickhouseNoTunnel = sourceClickhouseNoTunnel
+ u.Type = SourceClickhouseSSHTunnelMethodTypeSourceClickhouseNoTunnel
return nil
}
- sourceClickhouseSSHTunnelMethodSSHKeyAuthentication := new(SourceClickhouseSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceClickhouseSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication = sourceClickhouseSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHTunnelMethodSSHKeyAuthentication
+ sourceClickhouseSSHKeyAuthentication := new(SourceClickhouseSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceClickhouseSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceClickhouseSSHKeyAuthentication = sourceClickhouseSSHKeyAuthentication
+ u.Type = SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHKeyAuthentication
return nil
}
- sourceClickhouseSSHTunnelMethodPasswordAuthentication := new(SourceClickhouseSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceClickhouseSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceClickhouseSSHTunnelMethodPasswordAuthentication = sourceClickhouseSSHTunnelMethodPasswordAuthentication
- u.Type = SourceClickhouseSSHTunnelMethodTypeSourceClickhouseSSHTunnelMethodPasswordAuthentication
+ sourceClickhousePasswordAuthentication := new(SourceClickhousePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceClickhousePasswordAuthentication, "", true, true); err == nil {
+ u.SourceClickhousePasswordAuthentication = sourceClickhousePasswordAuthentication
+ u.Type = SourceClickhouseSSHTunnelMethodTypeSourceClickhousePasswordAuthentication
return nil
}
@@ -219,19 +313,19 @@ func (u *SourceClickhouseSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceClickhouseSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceClickhouseSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceClickhouseSSHTunnelMethodNoTunnel)
+ if u.SourceClickhouseNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceClickhouseNoTunnel, "", true)
}
- if u.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceClickhouseSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceClickhouseSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceClickhouseSSHKeyAuthentication, "", true)
}
- if u.SourceClickhouseSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceClickhouseSSHTunnelMethodPasswordAuthentication)
+ if u.SourceClickhousePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceClickhousePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceClickhouse struct {
@@ -242,10 +336,67 @@ type SourceClickhouse struct {
// The password associated with this username.
Password *string `json:"password,omitempty"`
// The port of the database.
- Port int64 `json:"port"`
- SourceType SourceClickhouseClickhouse `json:"sourceType"`
+ Port *int64 `default:"8123" json:"port"`
+ sourceType SourceClickhouseClickhouse `const:"clickhouse" json:"sourceType"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *SourceClickhouseSSHTunnelMethod `json:"tunnel_method,omitempty"`
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (s SourceClickhouse) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickhouse) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickhouse) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceClickhouse) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceClickhouse) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceClickhouse) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceClickhouse) GetSourceType() SourceClickhouseClickhouse {
+ return SourceClickhouseClickhouseClickhouse
+}
+
+func (o *SourceClickhouse) GetTunnelMethod() *SourceClickhouseSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceClickhouse) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclickhousecreaterequest.go b/internal/sdk/pkg/models/shared/sourceclickhousecreaterequest.go
old mode 100755
new mode 100644
index 9ce0eced8..7ee6d8b3d
--- a/internal/sdk/pkg/models/shared/sourceclickhousecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceclickhousecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceClickhouseCreateRequest struct {
Configuration SourceClickhouse `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceClickhouseCreateRequest) GetConfiguration() SourceClickhouse {
+ if o == nil {
+ return SourceClickhouse{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceClickhouseCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceClickhouseCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceClickhouseCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceClickhouseCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclickhouseputrequest.go b/internal/sdk/pkg/models/shared/sourceclickhouseputrequest.go
old mode 100755
new mode 100644
index 8ea3664bd..ffeb3cac6
--- a/internal/sdk/pkg/models/shared/sourceclickhouseputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceclickhouseputrequest.go
@@ -7,3 +7,24 @@ type SourceClickhousePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceClickhousePutRequest) GetConfiguration() SourceClickhouseUpdate {
+ if o == nil {
+ return SourceClickhouseUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceClickhousePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceClickhousePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclickhouseupdate.go b/internal/sdk/pkg/models/shared/sourceclickhouseupdate.go
old mode 100755
new mode 100644
index c883607e3..67c4e33a1
--- a/internal/sdk/pkg/models/shared/sourceclickhouseupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceclickhouseupdate.go
@@ -3,191 +3,285 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod string
const (
- SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceClickhouseUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication struct {
+// SourceClickhouseUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceClickhouseUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceClickhouseUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceClickhouseUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickhouseUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickhouseUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceClickhouseUpdatePasswordAuthentication) GetTunnelMethod() SourceClickhouseUpdateSchemasTunnelMethodTunnelMethod {
+ return SourceClickhouseUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceClickhouseUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceClickhouseUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceClickhouseUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceClickhouseUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceClickhouseUpdateSchemasTunnelMethod string
const (
- SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceClickhouseUpdateSchemasTunnelMethodSSHKeyAuth SourceClickhouseUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceClickhouseUpdateSchemasTunnelMethod) ToPointer() *SourceClickhouseUpdateSchemasTunnelMethod {
return &e
}
-func (e *SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceClickhouseUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceClickhouseUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceClickhouseUpdateSchemasTunnelMethod: %v", v)
}
}
-// SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceClickhouseUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceClickhouseUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceClickhouseUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceClickhouseUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickhouseUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickhouseUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceClickhouseUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceClickhouseUpdateSSHKeyAuthentication) GetTunnelMethod() SourceClickhouseUpdateSchemasTunnelMethod {
+ return SourceClickhouseUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceClickhouseUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceClickhouseUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceClickhouseUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type SourceClickhouseUpdateTunnelMethod string
const (
- SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceClickhouseUpdateTunnelMethodNoTunnel SourceClickhouseUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceClickhouseUpdateTunnelMethod) ToPointer() *SourceClickhouseUpdateTunnelMethod {
return &e
}
-func (e *SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceClickhouseUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceClickhouseUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceClickhouseUpdateTunnelMethod: %v", v)
}
}
-// SourceClickhouseUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceClickhouseUpdateSSHTunnelMethodNoTunnel struct {
+// SourceClickhouseUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceClickhouseUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceClickhouseUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceClickhouseUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceClickhouseUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickhouseUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickhouseUpdateNoTunnel) GetTunnelMethod() SourceClickhouseUpdateTunnelMethod {
+ return SourceClickhouseUpdateTunnelMethodNoTunnel
}
type SourceClickhouseUpdateSSHTunnelMethodType string
const (
- SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHTunnelMethodNoTunnel SourceClickhouseUpdateSSHTunnelMethodType = "source-clickhouse-update_SSH Tunnel Method_No Tunnel"
- SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication SourceClickhouseUpdateSSHTunnelMethodType = "source-clickhouse-update_SSH Tunnel Method_SSH Key Authentication"
- SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication SourceClickhouseUpdateSSHTunnelMethodType = "source-clickhouse-update_SSH Tunnel Method_Password Authentication"
+ SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateNoTunnel SourceClickhouseUpdateSSHTunnelMethodType = "source-clickhouse-update_No Tunnel"
+ SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHKeyAuthentication SourceClickhouseUpdateSSHTunnelMethodType = "source-clickhouse-update_SSH Key Authentication"
+ SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdatePasswordAuthentication SourceClickhouseUpdateSSHTunnelMethodType = "source-clickhouse-update_Password Authentication"
)
type SourceClickhouseUpdateSSHTunnelMethod struct {
- SourceClickhouseUpdateSSHTunnelMethodNoTunnel *SourceClickhouseUpdateSSHTunnelMethodNoTunnel
- SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication *SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
- SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication *SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication
+ SourceClickhouseUpdateNoTunnel *SourceClickhouseUpdateNoTunnel
+ SourceClickhouseUpdateSSHKeyAuthentication *SourceClickhouseUpdateSSHKeyAuthentication
+ SourceClickhouseUpdatePasswordAuthentication *SourceClickhouseUpdatePasswordAuthentication
Type SourceClickhouseUpdateSSHTunnelMethodType
}
-func CreateSourceClickhouseUpdateSSHTunnelMethodSourceClickhouseUpdateSSHTunnelMethodNoTunnel(sourceClickhouseUpdateSSHTunnelMethodNoTunnel SourceClickhouseUpdateSSHTunnelMethodNoTunnel) SourceClickhouseUpdateSSHTunnelMethod {
- typ := SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHTunnelMethodNoTunnel
+func CreateSourceClickhouseUpdateSSHTunnelMethodSourceClickhouseUpdateNoTunnel(sourceClickhouseUpdateNoTunnel SourceClickhouseUpdateNoTunnel) SourceClickhouseUpdateSSHTunnelMethod {
+ typ := SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateNoTunnel
return SourceClickhouseUpdateSSHTunnelMethod{
- SourceClickhouseUpdateSSHTunnelMethodNoTunnel: &sourceClickhouseUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceClickhouseUpdateNoTunnel: &sourceClickhouseUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateSourceClickhouseUpdateSSHTunnelMethodSourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication(sourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication) SourceClickhouseUpdateSSHTunnelMethod {
- typ := SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceClickhouseUpdateSSHTunnelMethodSourceClickhouseUpdateSSHKeyAuthentication(sourceClickhouseUpdateSSHKeyAuthentication SourceClickhouseUpdateSSHKeyAuthentication) SourceClickhouseUpdateSSHTunnelMethod {
+ typ := SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHKeyAuthentication
return SourceClickhouseUpdateSSHTunnelMethod{
- SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication: &sourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication,
+ SourceClickhouseUpdateSSHKeyAuthentication: &sourceClickhouseUpdateSSHKeyAuthentication,
Type: typ,
}
}
-func CreateSourceClickhouseUpdateSSHTunnelMethodSourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication(sourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication) SourceClickhouseUpdateSSHTunnelMethod {
- typ := SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication
+func CreateSourceClickhouseUpdateSSHTunnelMethodSourceClickhouseUpdatePasswordAuthentication(sourceClickhouseUpdatePasswordAuthentication SourceClickhouseUpdatePasswordAuthentication) SourceClickhouseUpdateSSHTunnelMethod {
+ typ := SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdatePasswordAuthentication
return SourceClickhouseUpdateSSHTunnelMethod{
- SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication: &sourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication,
+ SourceClickhouseUpdatePasswordAuthentication: &sourceClickhouseUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *SourceClickhouseUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceClickhouseUpdateSSHTunnelMethodNoTunnel := new(SourceClickhouseUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceClickhouseUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.SourceClickhouseUpdateSSHTunnelMethodNoTunnel = sourceClickhouseUpdateSSHTunnelMethodNoTunnel
- u.Type = SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHTunnelMethodNoTunnel
+
+ sourceClickhouseUpdateNoTunnel := new(SourceClickhouseUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceClickhouseUpdateNoTunnel, "", true, true); err == nil {
+ u.SourceClickhouseUpdateNoTunnel = sourceClickhouseUpdateNoTunnel
+ u.Type = SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateNoTunnel
return nil
}
- sourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication := new(SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication = sourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication
+ sourceClickhouseUpdateSSHKeyAuthentication := new(SourceClickhouseUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceClickhouseUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceClickhouseUpdateSSHKeyAuthentication = sourceClickhouseUpdateSSHKeyAuthentication
+ u.Type = SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHKeyAuthentication
return nil
}
- sourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication := new(SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication = sourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication
+ sourceClickhouseUpdatePasswordAuthentication := new(SourceClickhouseUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceClickhouseUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.SourceClickhouseUpdatePasswordAuthentication = sourceClickhouseUpdatePasswordAuthentication
+ u.Type = SourceClickhouseUpdateSSHTunnelMethodTypeSourceClickhouseUpdatePasswordAuthentication
return nil
}
@@ -195,19 +289,19 @@ func (u *SourceClickhouseUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error
}
func (u SourceClickhouseUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceClickhouseUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceClickhouseUpdateSSHTunnelMethodNoTunnel)
+ if u.SourceClickhouseUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceClickhouseUpdateNoTunnel, "", true)
}
- if u.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceClickhouseUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceClickhouseUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceClickhouseUpdateSSHKeyAuthentication, "", true)
}
- if u.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceClickhouseUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.SourceClickhouseUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceClickhouseUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceClickhouseUpdate struct {
@@ -218,9 +312,62 @@ type SourceClickhouseUpdate struct {
// The password associated with this username.
Password *string `json:"password,omitempty"`
// The port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"8123" json:"port"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *SourceClickhouseUpdateSSHTunnelMethod `json:"tunnel_method,omitempty"`
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (s SourceClickhouseUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickhouseUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickhouseUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceClickhouseUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceClickhouseUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceClickhouseUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceClickhouseUpdate) GetTunnelMethod() *SourceClickhouseUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceClickhouseUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclickupapi.go b/internal/sdk/pkg/models/shared/sourceclickupapi.go
old mode 100755
new mode 100644
index 8a98142ed..6eb8b8650
--- a/internal/sdk/pkg/models/shared/sourceclickupapi.go
+++ b/internal/sdk/pkg/models/shared/sourceclickupapi.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceClickupAPIClickupAPI string
+type ClickupAPI string
const (
- SourceClickupAPIClickupAPIClickupAPI SourceClickupAPIClickupAPI = "clickup-api"
+ ClickupAPIClickupAPI ClickupAPI = "clickup-api"
)
-func (e SourceClickupAPIClickupAPI) ToPointer() *SourceClickupAPIClickupAPI {
+func (e ClickupAPI) ToPointer() *ClickupAPI {
return &e
}
-func (e *SourceClickupAPIClickupAPI) UnmarshalJSON(data []byte) error {
+func (e *ClickupAPI) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "clickup-api":
- *e = SourceClickupAPIClickupAPI(v)
+ *e = ClickupAPI(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceClickupAPIClickupAPI: %v", v)
+ return fmt.Errorf("invalid value for ClickupAPI: %v", v)
}
}
@@ -37,12 +38,69 @@ type SourceClickupAPI struct {
// The ID of your folder in your space. Retrieve it from the `/space/{space_id}/folder` of the ClickUp API. See here.
FolderID *string `json:"folder_id,omitempty"`
// Include or exclude closed tasks. By default, they are excluded. See here.
- IncludeClosedTasks *bool `json:"include_closed_tasks,omitempty"`
+ IncludeClosedTasks *bool `default:"false" json:"include_closed_tasks"`
// The ID of your list in your folder. Retrieve it from the `/folder/{folder_id}/list` of the ClickUp API. See here.
- ListID *string `json:"list_id,omitempty"`
- SourceType SourceClickupAPIClickupAPI `json:"sourceType"`
+ ListID *string `json:"list_id,omitempty"`
+ sourceType ClickupAPI `const:"clickup-api" json:"sourceType"`
// The ID of your space in your workspace. Retrieve it from the `/team/{team_id}/space` of the ClickUp API. See here.
SpaceID *string `json:"space_id,omitempty"`
// The ID of your team in ClickUp. Retrieve it from the `/team` of the ClickUp API. See here.
TeamID *string `json:"team_id,omitempty"`
}
+
+func (s SourceClickupAPI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickupAPI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickupAPI) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceClickupAPI) GetFolderID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FolderID
+}
+
+func (o *SourceClickupAPI) GetIncludeClosedTasks() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeClosedTasks
+}
+
+func (o *SourceClickupAPI) GetListID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ListID
+}
+
+func (o *SourceClickupAPI) GetSourceType() ClickupAPI {
+ return ClickupAPIClickupAPI
+}
+
+func (o *SourceClickupAPI) GetSpaceID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SpaceID
+}
+
+func (o *SourceClickupAPI) GetTeamID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TeamID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclickupapicreaterequest.go b/internal/sdk/pkg/models/shared/sourceclickupapicreaterequest.go
old mode 100755
new mode 100644
index 53b805807..03a09c860
--- a/internal/sdk/pkg/models/shared/sourceclickupapicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceclickupapicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceClickupAPICreateRequest struct {
Configuration SourceClickupAPI `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceClickupAPICreateRequest) GetConfiguration() SourceClickupAPI {
+ if o == nil {
+ return SourceClickupAPI{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceClickupAPICreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceClickupAPICreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceClickupAPICreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceClickupAPICreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclickupapiputrequest.go b/internal/sdk/pkg/models/shared/sourceclickupapiputrequest.go
old mode 100755
new mode 100644
index 5ae4f6b4e..b75af12c8
--- a/internal/sdk/pkg/models/shared/sourceclickupapiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceclickupapiputrequest.go
@@ -7,3 +7,24 @@ type SourceClickupAPIPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceClickupAPIPutRequest) GetConfiguration() SourceClickupAPIUpdate {
+ if o == nil {
+ return SourceClickupAPIUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceClickupAPIPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceClickupAPIPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclickupapiupdate.go b/internal/sdk/pkg/models/shared/sourceclickupapiupdate.go
old mode 100755
new mode 100644
index e2115c537..0d36532b7
--- a/internal/sdk/pkg/models/shared/sourceclickupapiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceclickupapiupdate.go
@@ -2,13 +2,17 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceClickupAPIUpdate struct {
// Every ClickUp API call required authentication. This field is your personal API token. See here.
APIToken string `json:"api_token"`
// The ID of your folder in your space. Retrieve it from the `/space/{space_id}/folder` of the ClickUp API. See here.
FolderID *string `json:"folder_id,omitempty"`
// Include or exclude closed tasks. By default, they are excluded. See here.
- IncludeClosedTasks *bool `json:"include_closed_tasks,omitempty"`
+ IncludeClosedTasks *bool `default:"false" json:"include_closed_tasks"`
// The ID of your list in your folder. Retrieve it from the `/folder/{folder_id}/list` of the ClickUp API. See here.
ListID *string `json:"list_id,omitempty"`
// The ID of your space in your workspace. Retrieve it from the `/team/{team_id}/space` of the ClickUp API. See here.
@@ -16,3 +20,56 @@ type SourceClickupAPIUpdate struct {
// The ID of your team in ClickUp. Retrieve it from the `/team` of the ClickUp API. See here.
TeamID *string `json:"team_id,omitempty"`
}
+
+func (s SourceClickupAPIUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClickupAPIUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClickupAPIUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceClickupAPIUpdate) GetFolderID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FolderID
+}
+
+func (o *SourceClickupAPIUpdate) GetIncludeClosedTasks() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeClosedTasks
+}
+
+func (o *SourceClickupAPIUpdate) GetListID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ListID
+}
+
+func (o *SourceClickupAPIUpdate) GetSpaceID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SpaceID
+}
+
+func (o *SourceClickupAPIUpdate) GetTeamID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TeamID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclockify.go b/internal/sdk/pkg/models/shared/sourceclockify.go
old mode 100755
new mode 100644
index 0b00420e4..ed48c2f73
--- a/internal/sdk/pkg/models/shared/sourceclockify.go
+++ b/internal/sdk/pkg/models/shared/sourceclockify.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceClockifyClockify string
+type Clockify string
const (
- SourceClockifyClockifyClockify SourceClockifyClockify = "clockify"
+ ClockifyClockify Clockify = "clockify"
)
-func (e SourceClockifyClockify) ToPointer() *SourceClockifyClockify {
+func (e Clockify) ToPointer() *Clockify {
return &e
}
-func (e *SourceClockifyClockify) UnmarshalJSON(data []byte) error {
+func (e *Clockify) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "clockify":
- *e = SourceClockifyClockify(v)
+ *e = Clockify(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceClockifyClockify: %v", v)
+ return fmt.Errorf("invalid value for Clockify: %v", v)
}
}
@@ -35,8 +36,44 @@ type SourceClockify struct {
// You can get your api access_key here This API is Case Sensitive.
APIKey string `json:"api_key"`
// The URL for the Clockify API. This should only need to be modified if connecting to an enterprise version of Clockify.
- APIURL *string `json:"api_url,omitempty"`
- SourceType SourceClockifyClockify `json:"sourceType"`
+ APIURL *string `default:"https://api.clockify.me" json:"api_url"`
+ sourceType Clockify `const:"clockify" json:"sourceType"`
// WorkSpace Id
WorkspaceID string `json:"workspace_id"`
}
+
+func (s SourceClockify) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClockify) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClockify) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceClockify) GetAPIURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIURL
+}
+
+func (o *SourceClockify) GetSourceType() Clockify {
+ return ClockifyClockify
+}
+
+func (o *SourceClockify) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclockifycreaterequest.go b/internal/sdk/pkg/models/shared/sourceclockifycreaterequest.go
old mode 100755
new mode 100644
index 534a4507c..6b628acf6
--- a/internal/sdk/pkg/models/shared/sourceclockifycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceclockifycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceClockifyCreateRequest struct {
Configuration SourceClockify `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceClockifyCreateRequest) GetConfiguration() SourceClockify {
+ if o == nil {
+ return SourceClockify{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceClockifyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceClockifyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceClockifyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceClockifyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclockifyputrequest.go b/internal/sdk/pkg/models/shared/sourceclockifyputrequest.go
old mode 100755
new mode 100644
index 56fdecd0f..dc04b5710
--- a/internal/sdk/pkg/models/shared/sourceclockifyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceclockifyputrequest.go
@@ -7,3 +7,24 @@ type SourceClockifyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceClockifyPutRequest) GetConfiguration() SourceClockifyUpdate {
+ if o == nil {
+ return SourceClockifyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceClockifyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceClockifyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclockifyupdate.go b/internal/sdk/pkg/models/shared/sourceclockifyupdate.go
old mode 100755
new mode 100644
index 79a1aaca9..d19ddd532
--- a/internal/sdk/pkg/models/shared/sourceclockifyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceclockifyupdate.go
@@ -2,11 +2,47 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceClockifyUpdate struct {
// You can get your api access_key here This API is Case Sensitive.
APIKey string `json:"api_key"`
// The URL for the Clockify API. This should only need to be modified if connecting to an enterprise version of Clockify.
- APIURL *string `json:"api_url,omitempty"`
+ APIURL *string `default:"https://api.clockify.me" json:"api_url"`
// WorkSpace Id
WorkspaceID string `json:"workspace_id"`
}
+
+func (s SourceClockifyUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceClockifyUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceClockifyUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceClockifyUpdate) GetAPIURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIURL
+}
+
+func (o *SourceClockifyUpdate) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclosecom.go b/internal/sdk/pkg/models/shared/sourceclosecom.go
old mode 100755
new mode 100644
index dbbde85ae..26a2ad42b
--- a/internal/sdk/pkg/models/shared/sourceclosecom.go
+++ b/internal/sdk/pkg/models/shared/sourceclosecom.go
@@ -3,39 +3,69 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceCloseComCloseCom string
+type CloseCom string
const (
- SourceCloseComCloseComCloseCom SourceCloseComCloseCom = "close-com"
+ CloseComCloseCom CloseCom = "close-com"
)
-func (e SourceCloseComCloseCom) ToPointer() *SourceCloseComCloseCom {
+func (e CloseCom) ToPointer() *CloseCom {
return &e
}
-func (e *SourceCloseComCloseCom) UnmarshalJSON(data []byte) error {
+func (e *CloseCom) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "close-com":
- *e = SourceCloseComCloseCom(v)
+ *e = CloseCom(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceCloseComCloseCom: %v", v)
+ return fmt.Errorf("invalid value for CloseCom: %v", v)
}
}
type SourceCloseCom struct {
// Close.com API key (usually starts with 'api_'; find yours here).
- APIKey string `json:"api_key"`
- SourceType SourceCloseComCloseCom `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType CloseCom `const:"close-com" json:"sourceType"`
// The start date to sync data; all data after this date will be replicated. Leave blank to retrieve all the data available in the account. Format: YYYY-MM-DD.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2021-01-01" json:"start_date"`
+}
+
+func (s SourceCloseCom) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceCloseCom) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceCloseCom) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceCloseCom) GetSourceType() CloseCom {
+ return CloseComCloseCom
+}
+
+func (o *SourceCloseCom) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceclosecomcreaterequest.go b/internal/sdk/pkg/models/shared/sourceclosecomcreaterequest.go
old mode 100755
new mode 100644
index 8049ddd74..ad8225ac5
--- a/internal/sdk/pkg/models/shared/sourceclosecomcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceclosecomcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceCloseComCreateRequest struct {
Configuration SourceCloseCom `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceCloseComCreateRequest) GetConfiguration() SourceCloseCom {
+ if o == nil {
+ return SourceCloseCom{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCloseComCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceCloseComCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCloseComCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceCloseComCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclosecomputrequest.go b/internal/sdk/pkg/models/shared/sourceclosecomputrequest.go
old mode 100755
new mode 100644
index 2119abcfc..d6e7f72b8
--- a/internal/sdk/pkg/models/shared/sourceclosecomputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceclosecomputrequest.go
@@ -7,3 +7,24 @@ type SourceCloseComPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceCloseComPutRequest) GetConfiguration() SourceCloseComUpdate {
+ if o == nil {
+ return SourceCloseComUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCloseComPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCloseComPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceclosecomupdate.go b/internal/sdk/pkg/models/shared/sourceclosecomupdate.go
old mode 100755
new mode 100644
index 59b8a0589..5e0b13491
--- a/internal/sdk/pkg/models/shared/sourceclosecomupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceclosecomupdate.go
@@ -3,12 +3,38 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceCloseComUpdate struct {
// Close.com API key (usually starts with 'api_'; find yours here).
APIKey string `json:"api_key"`
// The start date to sync data; all data after this date will be replicated. Leave blank to retrieve all the data available in the account. Format: YYYY-MM-DD.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2021-01-01" json:"start_date"`
+}
+
+func (s SourceCloseComUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceCloseComUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceCloseComUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceCloseComUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcecoda.go b/internal/sdk/pkg/models/shared/sourcecoda.go
old mode 100755
new mode 100644
index afab7ae92..3376eadd5
--- a/internal/sdk/pkg/models/shared/sourcecoda.go
+++ b/internal/sdk/pkg/models/shared/sourcecoda.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceCodaCoda string
+type Coda string
const (
- SourceCodaCodaCoda SourceCodaCoda = "coda"
+ CodaCoda Coda = "coda"
)
-func (e SourceCodaCoda) ToPointer() *SourceCodaCoda {
+func (e Coda) ToPointer() *Coda {
return &e
}
-func (e *SourceCodaCoda) UnmarshalJSON(data []byte) error {
+func (e *Coda) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "coda":
- *e = SourceCodaCoda(v)
+ *e = Coda(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceCodaCoda: %v", v)
+ return fmt.Errorf("invalid value for Coda: %v", v)
}
}
type SourceCoda struct {
// Bearer token
- AuthToken string `json:"auth_token"`
- SourceType SourceCodaCoda `json:"sourceType"`
+ AuthToken string `json:"auth_token"`
+ sourceType Coda `const:"coda" json:"sourceType"`
+}
+
+func (s SourceCoda) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceCoda) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceCoda) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
+
+func (o *SourceCoda) GetSourceType() Coda {
+ return CodaCoda
}
diff --git a/internal/sdk/pkg/models/shared/sourcecodacreaterequest.go b/internal/sdk/pkg/models/shared/sourcecodacreaterequest.go
old mode 100755
new mode 100644
index f71c078c5..afaaa96ff
--- a/internal/sdk/pkg/models/shared/sourcecodacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcecodacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceCodaCreateRequest struct {
Configuration SourceCoda `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceCodaCreateRequest) GetConfiguration() SourceCoda {
+ if o == nil {
+ return SourceCoda{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCodaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceCodaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCodaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceCodaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecodaputrequest.go b/internal/sdk/pkg/models/shared/sourcecodaputrequest.go
old mode 100755
new mode 100644
index 88c7c4f6e..4eb694aed
--- a/internal/sdk/pkg/models/shared/sourcecodaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcecodaputrequest.go
@@ -7,3 +7,24 @@ type SourceCodaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceCodaPutRequest) GetConfiguration() SourceCodaUpdate {
+ if o == nil {
+ return SourceCodaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCodaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCodaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecodaupdate.go b/internal/sdk/pkg/models/shared/sourcecodaupdate.go
old mode 100755
new mode 100644
index d8a4c709c..511ecb08d
--- a/internal/sdk/pkg/models/shared/sourcecodaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcecodaupdate.go
@@ -6,3 +6,10 @@ type SourceCodaUpdate struct {
// Bearer token
AuthToken string `json:"auth_token"`
}
+
+func (o *SourceCodaUpdate) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecoinapi.go b/internal/sdk/pkg/models/shared/sourcecoinapi.go
old mode 100755
new mode 100644
index 7680fb313..5715528f1
--- a/internal/sdk/pkg/models/shared/sourcecoinapi.go
+++ b/internal/sdk/pkg/models/shared/sourcecoinapi.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceCoinAPIEnvironment - The environment to use. Either sandbox or production.
@@ -35,27 +36,27 @@ func (e *SourceCoinAPIEnvironment) UnmarshalJSON(data []byte) error {
}
}
-type SourceCoinAPICoinAPI string
+type CoinAPI string
const (
- SourceCoinAPICoinAPICoinAPI SourceCoinAPICoinAPI = "coin-api"
+ CoinAPICoinAPI CoinAPI = "coin-api"
)
-func (e SourceCoinAPICoinAPI) ToPointer() *SourceCoinAPICoinAPI {
+func (e CoinAPI) ToPointer() *CoinAPI {
return &e
}
-func (e *SourceCoinAPICoinAPI) UnmarshalJSON(data []byte) error {
+func (e *CoinAPI) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "coin-api":
- *e = SourceCoinAPICoinAPI(v)
+ *e = CoinAPI(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceCoinAPICoinAPI: %v", v)
+ return fmt.Errorf("invalid value for CoinAPI: %v", v)
}
}
@@ -69,15 +70,15 @@ type SourceCoinAPI struct {
EndDate *string `json:"end_date,omitempty"`
// The environment to use. Either sandbox or production.
//
- Environment SourceCoinAPIEnvironment `json:"environment"`
+ Environment *SourceCoinAPIEnvironment `default:"sandbox" json:"environment"`
// The maximum number of elements to return. If not supplied, the default
// is 100. For numbers larger than 100, each 100 items is counted as one
// request for pricing purposes. Maximum value is 100000.
//
- Limit *int64 `json:"limit,omitempty"`
+ Limit *int64 `default:"100" json:"limit"`
// The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get
- Period string `json:"period"`
- SourceType SourceCoinAPICoinAPI `json:"sourceType"`
+ Period string `json:"period"`
+ sourceType CoinAPI `const:"coin-api" json:"sourceType"`
// The start date in ISO 8601 format.
StartDate string `json:"start_date"`
// The symbol ID to use. See the documentation for a list.
@@ -85,3 +86,67 @@ type SourceCoinAPI struct {
//
SymbolID string `json:"symbol_id"`
}
+
+func (s SourceCoinAPI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceCoinAPI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceCoinAPI) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceCoinAPI) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceCoinAPI) GetEnvironment() *SourceCoinAPIEnvironment {
+ if o == nil {
+ return nil
+ }
+ return o.Environment
+}
+
+func (o *SourceCoinAPI) GetLimit() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Limit
+}
+
+func (o *SourceCoinAPI) GetPeriod() string {
+ if o == nil {
+ return ""
+ }
+ return o.Period
+}
+
+func (o *SourceCoinAPI) GetSourceType() CoinAPI {
+ return CoinAPICoinAPI
+}
+
+func (o *SourceCoinAPI) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceCoinAPI) GetSymbolID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SymbolID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecoinapicreaterequest.go b/internal/sdk/pkg/models/shared/sourcecoinapicreaterequest.go
old mode 100755
new mode 100644
index b0ce0a896..491fe614e
--- a/internal/sdk/pkg/models/shared/sourcecoinapicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcecoinapicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceCoinAPICreateRequest struct {
Configuration SourceCoinAPI `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceCoinAPICreateRequest) GetConfiguration() SourceCoinAPI {
+ if o == nil {
+ return SourceCoinAPI{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCoinAPICreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceCoinAPICreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCoinAPICreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceCoinAPICreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecoinapiputrequest.go b/internal/sdk/pkg/models/shared/sourcecoinapiputrequest.go
old mode 100755
new mode 100644
index 484ad1707..263b669f7
--- a/internal/sdk/pkg/models/shared/sourcecoinapiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcecoinapiputrequest.go
@@ -7,3 +7,24 @@ type SourceCoinAPIPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceCoinAPIPutRequest) GetConfiguration() SourceCoinAPIUpdate {
+ if o == nil {
+ return SourceCoinAPIUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCoinAPIPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCoinAPIPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecoinapiupdate.go b/internal/sdk/pkg/models/shared/sourcecoinapiupdate.go
old mode 100755
new mode 100644
index 621577036..ff6c49ab3
--- a/internal/sdk/pkg/models/shared/sourcecoinapiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcecoinapiupdate.go
@@ -5,21 +5,22 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceCoinAPIUpdateEnvironment - The environment to use. Either sandbox or production.
-type SourceCoinAPIUpdateEnvironment string
+// Environment - The environment to use. Either sandbox or production.
+type Environment string
const (
- SourceCoinAPIUpdateEnvironmentSandbox SourceCoinAPIUpdateEnvironment = "sandbox"
- SourceCoinAPIUpdateEnvironmentProduction SourceCoinAPIUpdateEnvironment = "production"
+ EnvironmentSandbox Environment = "sandbox"
+ EnvironmentProduction Environment = "production"
)
-func (e SourceCoinAPIUpdateEnvironment) ToPointer() *SourceCoinAPIUpdateEnvironment {
+func (e Environment) ToPointer() *Environment {
return &e
}
-func (e *SourceCoinAPIUpdateEnvironment) UnmarshalJSON(data []byte) error {
+func (e *Environment) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -28,10 +29,10 @@ func (e *SourceCoinAPIUpdateEnvironment) UnmarshalJSON(data []byte) error {
case "sandbox":
fallthrough
case "production":
- *e = SourceCoinAPIUpdateEnvironment(v)
+ *e = Environment(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceCoinAPIUpdateEnvironment: %v", v)
+ return fmt.Errorf("invalid value for Environment: %v", v)
}
}
@@ -45,12 +46,12 @@ type SourceCoinAPIUpdate struct {
EndDate *string `json:"end_date,omitempty"`
// The environment to use. Either sandbox or production.
//
- Environment SourceCoinAPIUpdateEnvironment `json:"environment"`
+ Environment *Environment `default:"sandbox" json:"environment"`
// The maximum number of elements to return. If not supplied, the default
// is 100. For numbers larger than 100, each 100 items is counted as one
// request for pricing purposes. Maximum value is 100000.
//
- Limit *int64 `json:"limit,omitempty"`
+ Limit *int64 `default:"100" json:"limit"`
// The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get
Period string `json:"period"`
// The start date in ISO 8601 format.
@@ -60,3 +61,63 @@ type SourceCoinAPIUpdate struct {
//
SymbolID string `json:"symbol_id"`
}
+
+func (s SourceCoinAPIUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceCoinAPIUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceCoinAPIUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceCoinAPIUpdate) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceCoinAPIUpdate) GetEnvironment() *Environment {
+ if o == nil {
+ return nil
+ }
+ return o.Environment
+}
+
+func (o *SourceCoinAPIUpdate) GetLimit() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Limit
+}
+
+func (o *SourceCoinAPIUpdate) GetPeriod() string {
+ if o == nil {
+ return ""
+ }
+ return o.Period
+}
+
+func (o *SourceCoinAPIUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceCoinAPIUpdate) GetSymbolID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SymbolID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecoinmarketcap.go b/internal/sdk/pkg/models/shared/sourcecoinmarketcap.go
old mode 100755
new mode 100644
index b3b9c772c..5484ed48e
--- a/internal/sdk/pkg/models/shared/sourcecoinmarketcap.go
+++ b/internal/sdk/pkg/models/shared/sourcecoinmarketcap.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceCoinmarketcapDataType - /latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here.
@@ -35,27 +36,27 @@ func (e *SourceCoinmarketcapDataType) UnmarshalJSON(data []byte) error {
}
}
-type SourceCoinmarketcapCoinmarketcap string
+type Coinmarketcap string
const (
- SourceCoinmarketcapCoinmarketcapCoinmarketcap SourceCoinmarketcapCoinmarketcap = "coinmarketcap"
+ CoinmarketcapCoinmarketcap Coinmarketcap = "coinmarketcap"
)
-func (e SourceCoinmarketcapCoinmarketcap) ToPointer() *SourceCoinmarketcapCoinmarketcap {
+func (e Coinmarketcap) ToPointer() *Coinmarketcap {
return &e
}
-func (e *SourceCoinmarketcapCoinmarketcap) UnmarshalJSON(data []byte) error {
+func (e *Coinmarketcap) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "coinmarketcap":
- *e = SourceCoinmarketcapCoinmarketcap(v)
+ *e = Coinmarketcap(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceCoinmarketcapCoinmarketcap: %v", v)
+ return fmt.Errorf("invalid value for Coinmarketcap: %v", v)
}
}
@@ -63,8 +64,44 @@ type SourceCoinmarketcap struct {
// Your API Key. See here. The token is case sensitive.
APIKey string `json:"api_key"`
// /latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here.
- DataType SourceCoinmarketcapDataType `json:"data_type"`
- SourceType SourceCoinmarketcapCoinmarketcap `json:"sourceType"`
+ DataType SourceCoinmarketcapDataType `json:"data_type"`
+ sourceType Coinmarketcap `const:"coinmarketcap" json:"sourceType"`
// Cryptocurrency symbols. (only used for quotes stream)
Symbols []string `json:"symbols,omitempty"`
}
+
+func (s SourceCoinmarketcap) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceCoinmarketcap) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceCoinmarketcap) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceCoinmarketcap) GetDataType() SourceCoinmarketcapDataType {
+ if o == nil {
+ return SourceCoinmarketcapDataType("")
+ }
+ return o.DataType
+}
+
+func (o *SourceCoinmarketcap) GetSourceType() Coinmarketcap {
+ return CoinmarketcapCoinmarketcap
+}
+
+func (o *SourceCoinmarketcap) GetSymbols() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Symbols
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecoinmarketcapcreaterequest.go b/internal/sdk/pkg/models/shared/sourcecoinmarketcapcreaterequest.go
old mode 100755
new mode 100644
index 9745afc31..b1b779f54
--- a/internal/sdk/pkg/models/shared/sourcecoinmarketcapcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcecoinmarketcapcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceCoinmarketcapCreateRequest struct {
Configuration SourceCoinmarketcap `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceCoinmarketcapCreateRequest) GetConfiguration() SourceCoinmarketcap {
+ if o == nil {
+ return SourceCoinmarketcap{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCoinmarketcapCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceCoinmarketcapCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCoinmarketcapCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceCoinmarketcapCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecoinmarketcapputrequest.go b/internal/sdk/pkg/models/shared/sourcecoinmarketcapputrequest.go
old mode 100755
new mode 100644
index 6e6836070..33aefb71d
--- a/internal/sdk/pkg/models/shared/sourcecoinmarketcapputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcecoinmarketcapputrequest.go
@@ -7,3 +7,24 @@ type SourceCoinmarketcapPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceCoinmarketcapPutRequest) GetConfiguration() SourceCoinmarketcapUpdate {
+ if o == nil {
+ return SourceCoinmarketcapUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceCoinmarketcapPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCoinmarketcapPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecoinmarketcapupdate.go b/internal/sdk/pkg/models/shared/sourcecoinmarketcapupdate.go
old mode 100755
new mode 100644
index bb38d0ed8..f7d6f8e5d
--- a/internal/sdk/pkg/models/shared/sourcecoinmarketcapupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcecoinmarketcapupdate.go
@@ -7,19 +7,19 @@ import (
"fmt"
)
-// SourceCoinmarketcapUpdateDataType - /latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here.
-type SourceCoinmarketcapUpdateDataType string
+// DataType - /latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here.
+type DataType string
const (
- SourceCoinmarketcapUpdateDataTypeLatest SourceCoinmarketcapUpdateDataType = "latest"
- SourceCoinmarketcapUpdateDataTypeHistorical SourceCoinmarketcapUpdateDataType = "historical"
+ DataTypeLatest DataType = "latest"
+ DataTypeHistorical DataType = "historical"
)
-func (e SourceCoinmarketcapUpdateDataType) ToPointer() *SourceCoinmarketcapUpdateDataType {
+func (e DataType) ToPointer() *DataType {
return &e
}
-func (e *SourceCoinmarketcapUpdateDataType) UnmarshalJSON(data []byte) error {
+func (e *DataType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -28,10 +28,10 @@ func (e *SourceCoinmarketcapUpdateDataType) UnmarshalJSON(data []byte) error {
case "latest":
fallthrough
case "historical":
- *e = SourceCoinmarketcapUpdateDataType(v)
+ *e = DataType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceCoinmarketcapUpdateDataType: %v", v)
+ return fmt.Errorf("invalid value for DataType: %v", v)
}
}
@@ -39,7 +39,28 @@ type SourceCoinmarketcapUpdate struct {
// Your API Key. See here. The token is case sensitive.
APIKey string `json:"api_key"`
// /latest: Latest market ticker quotes and averages for cryptocurrencies and exchanges. /historical: Intervals of historic market data like OHLCV data or data for use in charting libraries. See here.
- DataType SourceCoinmarketcapUpdateDataType `json:"data_type"`
+ DataType DataType `json:"data_type"`
// Cryptocurrency symbols. (only used for quotes stream)
Symbols []string `json:"symbols,omitempty"`
}
+
+func (o *SourceCoinmarketcapUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceCoinmarketcapUpdate) GetDataType() DataType {
+ if o == nil {
+ return DataType("")
+ }
+ return o.DataType
+}
+
+func (o *SourceCoinmarketcapUpdate) GetSymbols() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Symbols
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconfigcat.go b/internal/sdk/pkg/models/shared/sourceconfigcat.go
old mode 100755
new mode 100644
index 3a063229a..7b9a6a224
--- a/internal/sdk/pkg/models/shared/sourceconfigcat.go
+++ b/internal/sdk/pkg/models/shared/sourceconfigcat.go
@@ -5,36 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceConfigcatConfigcat string
+type Configcat string
const (
- SourceConfigcatConfigcatConfigcat SourceConfigcatConfigcat = "configcat"
+ ConfigcatConfigcat Configcat = "configcat"
)
-func (e SourceConfigcatConfigcat) ToPointer() *SourceConfigcatConfigcat {
+func (e Configcat) ToPointer() *Configcat {
return &e
}
-func (e *SourceConfigcatConfigcat) UnmarshalJSON(data []byte) error {
+func (e *Configcat) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "configcat":
- *e = SourceConfigcatConfigcat(v)
+ *e = Configcat(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceConfigcatConfigcat: %v", v)
+ return fmt.Errorf("invalid value for Configcat: %v", v)
}
}
type SourceConfigcat struct {
// Basic auth password. See here.
- Password string `json:"password"`
- SourceType SourceConfigcatConfigcat `json:"sourceType"`
+ Password string `json:"password"`
+ sourceType Configcat `const:"configcat" json:"sourceType"`
// Basic auth user name. See here.
Username string `json:"username"`
}
+
+func (s SourceConfigcat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceConfigcat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceConfigcat) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceConfigcat) GetSourceType() Configcat {
+ return ConfigcatConfigcat
+}
+
+func (o *SourceConfigcat) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconfigcatcreaterequest.go b/internal/sdk/pkg/models/shared/sourceconfigcatcreaterequest.go
old mode 100755
new mode 100644
index 2e78f6bef..ecb068aa5
--- a/internal/sdk/pkg/models/shared/sourceconfigcatcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceconfigcatcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceConfigcatCreateRequest struct {
Configuration SourceConfigcat `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceConfigcatCreateRequest) GetConfiguration() SourceConfigcat {
+ if o == nil {
+ return SourceConfigcat{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceConfigcatCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceConfigcatCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceConfigcatCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceConfigcatCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconfigcatputrequest.go b/internal/sdk/pkg/models/shared/sourceconfigcatputrequest.go
old mode 100755
new mode 100644
index 1032cf218..d828eeac1
--- a/internal/sdk/pkg/models/shared/sourceconfigcatputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceconfigcatputrequest.go
@@ -7,3 +7,24 @@ type SourceConfigcatPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceConfigcatPutRequest) GetConfiguration() SourceConfigcatUpdate {
+ if o == nil {
+ return SourceConfigcatUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceConfigcatPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceConfigcatPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconfigcatupdate.go b/internal/sdk/pkg/models/shared/sourceconfigcatupdate.go
old mode 100755
new mode 100644
index e8610ab33..7d4f2416c
--- a/internal/sdk/pkg/models/shared/sourceconfigcatupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceconfigcatupdate.go
@@ -8,3 +8,17 @@ type SourceConfigcatUpdate struct {
// Basic auth user name. See here.
Username string `json:"username"`
}
+
+func (o *SourceConfigcatUpdate) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceConfigcatUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconfluence.go b/internal/sdk/pkg/models/shared/sourceconfluence.go
old mode 100755
new mode 100644
index 81df49216..b05d705c2
--- a/internal/sdk/pkg/models/shared/sourceconfluence.go
+++ b/internal/sdk/pkg/models/shared/sourceconfluence.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceConfluenceConfluence string
+type Confluence string
const (
- SourceConfluenceConfluenceConfluence SourceConfluenceConfluence = "confluence"
+ ConfluenceConfluence Confluence = "confluence"
)
-func (e SourceConfluenceConfluence) ToPointer() *SourceConfluenceConfluence {
+func (e Confluence) ToPointer() *Confluence {
return &e
}
-func (e *SourceConfluenceConfluence) UnmarshalJSON(data []byte) error {
+func (e *Confluence) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "confluence":
- *e = SourceConfluenceConfluence(v)
+ *e = Confluence(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceConfluenceConfluence: %v", v)
+ return fmt.Errorf("invalid value for Confluence: %v", v)
}
}
@@ -37,6 +38,42 @@ type SourceConfluence struct {
// Your Confluence domain name
DomainName string `json:"domain_name"`
// Your Confluence login email
- Email string `json:"email"`
- SourceType SourceConfluenceConfluence `json:"sourceType"`
+ Email string `json:"email"`
+ sourceType Confluence `const:"confluence" json:"sourceType"`
+}
+
+func (s SourceConfluence) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceConfluence) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceConfluence) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceConfluence) GetDomainName() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomainName
+}
+
+func (o *SourceConfluence) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+func (o *SourceConfluence) GetSourceType() Confluence {
+ return ConfluenceConfluence
}
diff --git a/internal/sdk/pkg/models/shared/sourceconfluencecreaterequest.go b/internal/sdk/pkg/models/shared/sourceconfluencecreaterequest.go
old mode 100755
new mode 100644
index 550fb26ad..8b7e55546
--- a/internal/sdk/pkg/models/shared/sourceconfluencecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceconfluencecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceConfluenceCreateRequest struct {
Configuration SourceConfluence `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceConfluenceCreateRequest) GetConfiguration() SourceConfluence {
+ if o == nil {
+ return SourceConfluence{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceConfluenceCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceConfluenceCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceConfluenceCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceConfluenceCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconfluenceputrequest.go b/internal/sdk/pkg/models/shared/sourceconfluenceputrequest.go
old mode 100755
new mode 100644
index 36cb08f7e..4c8586cf6
--- a/internal/sdk/pkg/models/shared/sourceconfluenceputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceconfluenceputrequest.go
@@ -7,3 +7,24 @@ type SourceConfluencePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceConfluencePutRequest) GetConfiguration() SourceConfluenceUpdate {
+ if o == nil {
+ return SourceConfluenceUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceConfluencePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceConfluencePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconfluenceupdate.go b/internal/sdk/pkg/models/shared/sourceconfluenceupdate.go
old mode 100755
new mode 100644
index f93a84b39..60f8f36c9
--- a/internal/sdk/pkg/models/shared/sourceconfluenceupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceconfluenceupdate.go
@@ -10,3 +10,24 @@ type SourceConfluenceUpdate struct {
// Your Confluence login email
Email string `json:"email"`
}
+
+func (o *SourceConfluenceUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceConfluenceUpdate) GetDomainName() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomainName
+}
+
+func (o *SourceConfluenceUpdate) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconvex.go b/internal/sdk/pkg/models/shared/sourceconvex.go
old mode 100755
new mode 100644
index 3d125af8d..53a3a52c0
--- a/internal/sdk/pkg/models/shared/sourceconvex.go
+++ b/internal/sdk/pkg/models/shared/sourceconvex.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceConvexConvex string
@@ -35,5 +36,34 @@ type SourceConvex struct {
// API access key used to retrieve data from Convex.
AccessKey string `json:"access_key"`
DeploymentURL string `json:"deployment_url"`
- SourceType SourceConvexConvex `json:"sourceType"`
+ sourceType SourceConvexConvex `const:"convex" json:"sourceType"`
+}
+
+func (s SourceConvex) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceConvex) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceConvex) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *SourceConvex) GetDeploymentURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.DeploymentURL
+}
+
+func (o *SourceConvex) GetSourceType() SourceConvexConvex {
+ return SourceConvexConvexConvex
}
diff --git a/internal/sdk/pkg/models/shared/sourceconvexcreaterequest.go b/internal/sdk/pkg/models/shared/sourceconvexcreaterequest.go
old mode 100755
new mode 100644
index 991a2f494..278460b4b
--- a/internal/sdk/pkg/models/shared/sourceconvexcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceconvexcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceConvexCreateRequest struct {
Configuration SourceConvex `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceConvexCreateRequest) GetConfiguration() SourceConvex {
+ if o == nil {
+ return SourceConvex{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceConvexCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceConvexCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceConvexCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceConvexCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconvexputrequest.go b/internal/sdk/pkg/models/shared/sourceconvexputrequest.go
old mode 100755
new mode 100644
index cd4693941..2eb2a809a
--- a/internal/sdk/pkg/models/shared/sourceconvexputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceconvexputrequest.go
@@ -7,3 +7,24 @@ type SourceConvexPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceConvexPutRequest) GetConfiguration() SourceConvexUpdate {
+ if o == nil {
+ return SourceConvexUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceConvexPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceConvexPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceconvexupdate.go b/internal/sdk/pkg/models/shared/sourceconvexupdate.go
old mode 100755
new mode 100644
index b8b50d057..2f5c44297
--- a/internal/sdk/pkg/models/shared/sourceconvexupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceconvexupdate.go
@@ -7,3 +7,17 @@ type SourceConvexUpdate struct {
AccessKey string `json:"access_key"`
DeploymentURL string `json:"deployment_url"`
}
+
+func (o *SourceConvexUpdate) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *SourceConvexUpdate) GetDeploymentURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.DeploymentURL
+}
diff --git a/internal/sdk/pkg/models/shared/sourcecreaterequest.go b/internal/sdk/pkg/models/shared/sourcecreaterequest.go
old mode 100755
new mode 100644
index 015c00358..1481357dd
--- a/internal/sdk/pkg/models/shared/sourcecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcecreaterequest.go
@@ -5,8 +5,46 @@ package shared
type SourceCreateRequest struct {
// The values required to configure the source.
Configuration interface{} `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceCreateRequest) GetConfiguration() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.Configuration
+}
+
+func (o *SourceCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedatascope.go b/internal/sdk/pkg/models/shared/sourcedatascope.go
old mode 100755
new mode 100644
index 34d102733..a45cf37c9
--- a/internal/sdk/pkg/models/shared/sourcedatascope.go
+++ b/internal/sdk/pkg/models/shared/sourcedatascope.go
@@ -5,36 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceDatascopeDatascope string
+type Datascope string
const (
- SourceDatascopeDatascopeDatascope SourceDatascopeDatascope = "datascope"
+ DatascopeDatascope Datascope = "datascope"
)
-func (e SourceDatascopeDatascope) ToPointer() *SourceDatascopeDatascope {
+func (e Datascope) ToPointer() *Datascope {
return &e
}
-func (e *SourceDatascopeDatascope) UnmarshalJSON(data []byte) error {
+func (e *Datascope) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "datascope":
- *e = SourceDatascopeDatascope(v)
+ *e = Datascope(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceDatascopeDatascope: %v", v)
+ return fmt.Errorf("invalid value for Datascope: %v", v)
}
}
type SourceDatascope struct {
// API Key
- APIKey string `json:"api_key"`
- SourceType SourceDatascopeDatascope `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Datascope `const:"datascope" json:"sourceType"`
// Start date for the data to be replicated
StartDate string `json:"start_date"`
}
+
+func (s SourceDatascope) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDatascope) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDatascope) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceDatascope) GetSourceType() Datascope {
+ return DatascopeDatascope
+}
+
+func (o *SourceDatascope) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedatascopecreaterequest.go b/internal/sdk/pkg/models/shared/sourcedatascopecreaterequest.go
old mode 100755
new mode 100644
index 2c9cf5659..be297c737
--- a/internal/sdk/pkg/models/shared/sourcedatascopecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedatascopecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceDatascopeCreateRequest struct {
Configuration SourceDatascope `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDatascopeCreateRequest) GetConfiguration() SourceDatascope {
+ if o == nil {
+ return SourceDatascope{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDatascopeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceDatascopeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDatascopeCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceDatascopeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedatascopeputrequest.go b/internal/sdk/pkg/models/shared/sourcedatascopeputrequest.go
old mode 100755
new mode 100644
index 1ffde54e8..cb5cf925b
--- a/internal/sdk/pkg/models/shared/sourcedatascopeputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedatascopeputrequest.go
@@ -7,3 +7,24 @@ type SourceDatascopePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDatascopePutRequest) GetConfiguration() SourceDatascopeUpdate {
+ if o == nil {
+ return SourceDatascopeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDatascopePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDatascopePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedatascopeupdate.go b/internal/sdk/pkg/models/shared/sourcedatascopeupdate.go
old mode 100755
new mode 100644
index c66185765..1c9fd04e7
--- a/internal/sdk/pkg/models/shared/sourcedatascopeupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcedatascopeupdate.go
@@ -8,3 +8,17 @@ type SourceDatascopeUpdate struct {
// Start date for the data to be replicated
StartDate string `json:"start_date"`
}
+
+func (o *SourceDatascopeUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceDatascopeUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedelighted.go b/internal/sdk/pkg/models/shared/sourcedelighted.go
old mode 100755
new mode 100644
index 22ffd297d..15b86e2f5
--- a/internal/sdk/pkg/models/shared/sourcedelighted.go
+++ b/internal/sdk/pkg/models/shared/sourcedelighted.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceDelightedDelighted string
+type Delighted string
const (
- SourceDelightedDelightedDelighted SourceDelightedDelighted = "delighted"
+ DelightedDelighted Delighted = "delighted"
)
-func (e SourceDelightedDelighted) ToPointer() *SourceDelightedDelighted {
+func (e Delighted) ToPointer() *Delighted {
return &e
}
-func (e *SourceDelightedDelighted) UnmarshalJSON(data []byte) error {
+func (e *Delighted) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "delighted":
- *e = SourceDelightedDelighted(v)
+ *e = Delighted(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceDelightedDelighted: %v", v)
+ return fmt.Errorf("invalid value for Delighted: %v", v)
}
}
@@ -36,6 +37,35 @@ type SourceDelighted struct {
// A Delighted API key.
APIKey string `json:"api_key"`
// The date from which you'd like to replicate the data
- Since time.Time `json:"since"`
- SourceType SourceDelightedDelighted `json:"sourceType"`
+ Since time.Time `json:"since"`
+ sourceType Delighted `const:"delighted" json:"sourceType"`
+}
+
+func (s SourceDelighted) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDelighted) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDelighted) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceDelighted) GetSince() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.Since
+}
+
+func (o *SourceDelighted) GetSourceType() Delighted {
+ return DelightedDelighted
}
diff --git a/internal/sdk/pkg/models/shared/sourcedelightedcreaterequest.go b/internal/sdk/pkg/models/shared/sourcedelightedcreaterequest.go
old mode 100755
new mode 100644
index 09d7ee4dd..629975513
--- a/internal/sdk/pkg/models/shared/sourcedelightedcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedelightedcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceDelightedCreateRequest struct {
Configuration SourceDelighted `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDelightedCreateRequest) GetConfiguration() SourceDelighted {
+ if o == nil {
+ return SourceDelighted{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDelightedCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceDelightedCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDelightedCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceDelightedCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedelightedputrequest.go b/internal/sdk/pkg/models/shared/sourcedelightedputrequest.go
old mode 100755
new mode 100644
index c2b0b58d7..62f899701
--- a/internal/sdk/pkg/models/shared/sourcedelightedputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedelightedputrequest.go
@@ -7,3 +7,24 @@ type SourceDelightedPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDelightedPutRequest) GetConfiguration() SourceDelightedUpdate {
+ if o == nil {
+ return SourceDelightedUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDelightedPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDelightedPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedelightedupdate.go b/internal/sdk/pkg/models/shared/sourcedelightedupdate.go
old mode 100755
new mode 100644
index 7d332d5c4..99d3e6589
--- a/internal/sdk/pkg/models/shared/sourcedelightedupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcedelightedupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -12,3 +13,28 @@ type SourceDelightedUpdate struct {
// The date from which you'd like to replicate the data
Since time.Time `json:"since"`
}
+
+func (s SourceDelightedUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDelightedUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDelightedUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceDelightedUpdate) GetSince() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.Since
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedixa.go b/internal/sdk/pkg/models/shared/sourcedixa.go
old mode 100755
new mode 100644
index 1eaf5faa6..241575420
--- a/internal/sdk/pkg/models/shared/sourcedixa.go
+++ b/internal/sdk/pkg/models/shared/sourcedixa.go
@@ -5,29 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
)
-type SourceDixaDixa string
+type Dixa string
const (
- SourceDixaDixaDixa SourceDixaDixa = "dixa"
+ DixaDixa Dixa = "dixa"
)
-func (e SourceDixaDixa) ToPointer() *SourceDixaDixa {
+func (e Dixa) ToPointer() *Dixa {
return &e
}
-func (e *SourceDixaDixa) UnmarshalJSON(data []byte) error {
+func (e *Dixa) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "dixa":
- *e = SourceDixaDixa(v)
+ *e = Dixa(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceDixaDixa: %v", v)
+ return fmt.Errorf("invalid value for Dixa: %v", v)
}
}
@@ -35,8 +37,44 @@ type SourceDixa struct {
// Dixa API token
APIToken string `json:"api_token"`
// Number of days to batch into one request. Max 31.
- BatchSize *int64 `json:"batch_size,omitempty"`
- SourceType SourceDixaDixa `json:"sourceType"`
+ BatchSize *int64 `default:"31" json:"batch_size"`
+ sourceType Dixa `const:"dixa" json:"sourceType"`
// The connector pulls records updated from this date onwards.
- StartDate string `json:"start_date"`
+ StartDate time.Time `json:"start_date"`
+}
+
+func (s SourceDixa) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDixa) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDixa) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceDixa) GetBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchSize
+}
+
+func (o *SourceDixa) GetSourceType() Dixa {
+ return DixaDixa
+}
+
+func (o *SourceDixa) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcedixacreaterequest.go b/internal/sdk/pkg/models/shared/sourcedixacreaterequest.go
old mode 100755
new mode 100644
index ca6b59a45..2b847a8d2
--- a/internal/sdk/pkg/models/shared/sourcedixacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedixacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceDixaCreateRequest struct {
Configuration SourceDixa `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDixaCreateRequest) GetConfiguration() SourceDixa {
+ if o == nil {
+ return SourceDixa{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDixaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceDixaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDixaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceDixaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedixaputrequest.go b/internal/sdk/pkg/models/shared/sourcedixaputrequest.go
old mode 100755
new mode 100644
index 824d5d121..38f711d90
--- a/internal/sdk/pkg/models/shared/sourcedixaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedixaputrequest.go
@@ -7,3 +7,24 @@ type SourceDixaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDixaPutRequest) GetConfiguration() SourceDixaUpdate {
+ if o == nil {
+ return SourceDixaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDixaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDixaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedixaupdate.go b/internal/sdk/pkg/models/shared/sourcedixaupdate.go
old mode 100755
new mode 100644
index 23e4418f5..6fdf89c7c
--- a/internal/sdk/pkg/models/shared/sourcedixaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcedixaupdate.go
@@ -2,11 +2,48 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
+)
+
type SourceDixaUpdate struct {
// Dixa API token
APIToken string `json:"api_token"`
// Number of days to batch into one request. Max 31.
- BatchSize *int64 `json:"batch_size,omitempty"`
+ BatchSize *int64 `default:"31" json:"batch_size"`
// The connector pulls records updated from this date onwards.
- StartDate string `json:"start_date"`
+ StartDate time.Time `json:"start_date"`
+}
+
+func (s SourceDixaUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDixaUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDixaUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceDixaUpdate) GetBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchSize
+}
+
+func (o *SourceDixaUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcedockerhub.go b/internal/sdk/pkg/models/shared/sourcedockerhub.go
old mode 100755
new mode 100644
index 28617edb6..8d65478a9
--- a/internal/sdk/pkg/models/shared/sourcedockerhub.go
+++ b/internal/sdk/pkg/models/shared/sourcedockerhub.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceDockerhubDockerhub string
+type Dockerhub string
const (
- SourceDockerhubDockerhubDockerhub SourceDockerhubDockerhub = "dockerhub"
+ DockerhubDockerhub Dockerhub = "dockerhub"
)
-func (e SourceDockerhubDockerhub) ToPointer() *SourceDockerhubDockerhub {
+func (e Dockerhub) ToPointer() *Dockerhub {
return &e
}
-func (e *SourceDockerhubDockerhub) UnmarshalJSON(data []byte) error {
+func (e *Dockerhub) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "dockerhub":
- *e = SourceDockerhubDockerhub(v)
+ *e = Dockerhub(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceDockerhubDockerhub: %v", v)
+ return fmt.Errorf("invalid value for Dockerhub: %v", v)
}
}
type SourceDockerhub struct {
// Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/ API call)
- DockerUsername string `json:"docker_username"`
- SourceType SourceDockerhubDockerhub `json:"sourceType"`
+ DockerUsername string `json:"docker_username"`
+ sourceType Dockerhub `const:"dockerhub" json:"sourceType"`
+}
+
+func (s SourceDockerhub) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDockerhub) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDockerhub) GetDockerUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.DockerUsername
+}
+
+func (o *SourceDockerhub) GetSourceType() Dockerhub {
+ return DockerhubDockerhub
}
diff --git a/internal/sdk/pkg/models/shared/sourcedockerhubcreaterequest.go b/internal/sdk/pkg/models/shared/sourcedockerhubcreaterequest.go
old mode 100755
new mode 100644
index a230d6787..17e544d05
--- a/internal/sdk/pkg/models/shared/sourcedockerhubcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedockerhubcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceDockerhubCreateRequest struct {
Configuration SourceDockerhub `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDockerhubCreateRequest) GetConfiguration() SourceDockerhub {
+ if o == nil {
+ return SourceDockerhub{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDockerhubCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceDockerhubCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDockerhubCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceDockerhubCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedockerhubputrequest.go b/internal/sdk/pkg/models/shared/sourcedockerhubputrequest.go
old mode 100755
new mode 100644
index 36d330631..68f1e7312
--- a/internal/sdk/pkg/models/shared/sourcedockerhubputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedockerhubputrequest.go
@@ -7,3 +7,24 @@ type SourceDockerhubPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDockerhubPutRequest) GetConfiguration() SourceDockerhubUpdate {
+ if o == nil {
+ return SourceDockerhubUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDockerhubPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDockerhubPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedockerhubupdate.go b/internal/sdk/pkg/models/shared/sourcedockerhubupdate.go
old mode 100755
new mode 100644
index a35aa52de..a7b4304c8
--- a/internal/sdk/pkg/models/shared/sourcedockerhubupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcedockerhubupdate.go
@@ -6,3 +6,10 @@ type SourceDockerhubUpdate struct {
// Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/ API call)
DockerUsername string `json:"docker_username"`
}
+
+func (o *SourceDockerhubUpdate) GetDockerUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.DockerUsername
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedremio.go b/internal/sdk/pkg/models/shared/sourcedremio.go
old mode 100755
new mode 100644
index c3707ac7b..6097adcda
--- a/internal/sdk/pkg/models/shared/sourcedremio.go
+++ b/internal/sdk/pkg/models/shared/sourcedremio.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceDremioDremio string
+type Dremio string
const (
- SourceDremioDremioDremio SourceDremioDremio = "dremio"
+ DremioDremio Dremio = "dremio"
)
-func (e SourceDremioDremio) ToPointer() *SourceDremioDremio {
+func (e Dremio) ToPointer() *Dremio {
return &e
}
-func (e *SourceDremioDremio) UnmarshalJSON(data []byte) error {
+func (e *Dremio) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "dremio":
- *e = SourceDremioDremio(v)
+ *e = Dremio(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceDremioDremio: %v", v)
+ return fmt.Errorf("invalid value for Dremio: %v", v)
}
}
@@ -35,6 +36,35 @@ type SourceDremio struct {
// API Key that is generated when you authenticate to Dremio API
APIKey string `json:"api_key"`
// URL of your Dremio instance
- BaseURL string `json:"base_url"`
- SourceType SourceDremioDremio `json:"sourceType"`
+ BaseURL *string `default:"https://app.dremio.cloud" json:"base_url"`
+ sourceType Dremio `const:"dremio" json:"sourceType"`
+}
+
+func (s SourceDremio) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDremio) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDremio) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceDremio) GetBaseURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BaseURL
+}
+
+func (o *SourceDremio) GetSourceType() Dremio {
+ return DremioDremio
}
diff --git a/internal/sdk/pkg/models/shared/sourcedremiocreaterequest.go b/internal/sdk/pkg/models/shared/sourcedremiocreaterequest.go
old mode 100755
new mode 100644
index 9b857241a..b822e4192
--- a/internal/sdk/pkg/models/shared/sourcedremiocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedremiocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceDremioCreateRequest struct {
Configuration SourceDremio `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDremioCreateRequest) GetConfiguration() SourceDremio {
+ if o == nil {
+ return SourceDremio{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDremioCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceDremioCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDremioCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceDremioCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedremioputrequest.go b/internal/sdk/pkg/models/shared/sourcedremioputrequest.go
old mode 100755
new mode 100644
index 2f35b6d19..5f21b67cc
--- a/internal/sdk/pkg/models/shared/sourcedremioputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedremioputrequest.go
@@ -7,3 +7,24 @@ type SourceDremioPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDremioPutRequest) GetConfiguration() SourceDremioUpdate {
+ if o == nil {
+ return SourceDremioUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDremioPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDremioPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedremioupdate.go b/internal/sdk/pkg/models/shared/sourcedremioupdate.go
old mode 100755
new mode 100644
index 48064785a..e751e164d
--- a/internal/sdk/pkg/models/shared/sourcedremioupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcedremioupdate.go
@@ -2,9 +2,38 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceDremioUpdate struct {
// API Key that is generated when you authenticate to Dremio API
APIKey string `json:"api_key"`
// URL of your Dremio instance
- BaseURL string `json:"base_url"`
+ BaseURL *string `default:"https://app.dremio.cloud" json:"base_url"`
+}
+
+func (s SourceDremioUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDremioUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDremioUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceDremioUpdate) GetBaseURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BaseURL
}
diff --git a/internal/sdk/pkg/models/shared/sourcedynamodb.go b/internal/sdk/pkg/models/shared/sourcedynamodb.go
old mode 100755
new mode 100644
index 5fa433935..bdd9b7cc4
--- a/internal/sdk/pkg/models/shared/sourcedynamodb.go
+++ b/internal/sdk/pkg/models/shared/sourcedynamodb.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceDynamodbDynamodbRegion - The region of the Dynamodb database
@@ -135,12 +136,62 @@ type SourceDynamodb struct {
// The access key id to access Dynamodb. Airbyte requires read permissions to the database
AccessKeyID string `json:"access_key_id"`
// the URL of the Dynamodb database
- Endpoint *string `json:"endpoint,omitempty"`
+ Endpoint *string `default:"" json:"endpoint"`
// The region of the Dynamodb database
- Region *SourceDynamodbDynamodbRegion `json:"region,omitempty"`
+ Region *SourceDynamodbDynamodbRegion `default:"" json:"region"`
// Comma separated reserved attribute names present in your tables
ReservedAttributeNames *string `json:"reserved_attribute_names,omitempty"`
// The corresponding secret to the access key id.
SecretAccessKey string `json:"secret_access_key"`
- SourceType SourceDynamodbDynamodb `json:"sourceType"`
+ sourceType SourceDynamodbDynamodb `const:"dynamodb" json:"sourceType"`
+}
+
+func (s SourceDynamodb) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDynamodb) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDynamodb) GetAccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKeyID
+}
+
+func (o *SourceDynamodb) GetEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Endpoint
+}
+
+func (o *SourceDynamodb) GetRegion() *SourceDynamodbDynamodbRegion {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceDynamodb) GetReservedAttributeNames() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReservedAttributeNames
+}
+
+func (o *SourceDynamodb) GetSecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretAccessKey
+}
+
+func (o *SourceDynamodb) GetSourceType() SourceDynamodbDynamodb {
+ return SourceDynamodbDynamodbDynamodb
}
diff --git a/internal/sdk/pkg/models/shared/sourcedynamodbcreaterequest.go b/internal/sdk/pkg/models/shared/sourcedynamodbcreaterequest.go
old mode 100755
new mode 100644
index 2bcc703df..bf76d7a1a
--- a/internal/sdk/pkg/models/shared/sourcedynamodbcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedynamodbcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceDynamodbCreateRequest struct {
Configuration SourceDynamodb `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDynamodbCreateRequest) GetConfiguration() SourceDynamodb {
+ if o == nil {
+ return SourceDynamodb{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDynamodbCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceDynamodbCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDynamodbCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceDynamodbCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedynamodbputrequest.go b/internal/sdk/pkg/models/shared/sourcedynamodbputrequest.go
old mode 100755
new mode 100644
index dfc8448b8..72ea3745e
--- a/internal/sdk/pkg/models/shared/sourcedynamodbputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcedynamodbputrequest.go
@@ -7,3 +7,24 @@ type SourceDynamodbPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceDynamodbPutRequest) GetConfiguration() SourceDynamodbUpdate {
+ if o == nil {
+ return SourceDynamodbUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceDynamodbPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceDynamodbPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcedynamodbupdate.go b/internal/sdk/pkg/models/shared/sourcedynamodbupdate.go
old mode 100755
new mode 100644
index 064074af1..e92cf2e65
--- a/internal/sdk/pkg/models/shared/sourcedynamodbupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcedynamodbupdate.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceDynamodbUpdateDynamodbRegion - The region of the Dynamodb database
@@ -111,11 +112,57 @@ type SourceDynamodbUpdate struct {
// The access key id to access Dynamodb. Airbyte requires read permissions to the database
AccessKeyID string `json:"access_key_id"`
// the URL of the Dynamodb database
- Endpoint *string `json:"endpoint,omitempty"`
+ Endpoint *string `default:"" json:"endpoint"`
// The region of the Dynamodb database
- Region *SourceDynamodbUpdateDynamodbRegion `json:"region,omitempty"`
+ Region *SourceDynamodbUpdateDynamodbRegion `default:"" json:"region"`
// Comma separated reserved attribute names present in your tables
ReservedAttributeNames *string `json:"reserved_attribute_names,omitempty"`
// The corresponding secret to the access key id.
SecretAccessKey string `json:"secret_access_key"`
}
+
+func (s SourceDynamodbUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceDynamodbUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceDynamodbUpdate) GetAccessKeyID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKeyID
+}
+
+func (o *SourceDynamodbUpdate) GetEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Endpoint
+}
+
+func (o *SourceDynamodbUpdate) GetRegion() *SourceDynamodbUpdateDynamodbRegion {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceDynamodbUpdate) GetReservedAttributeNames() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReservedAttributeNames
+}
+
+func (o *SourceDynamodbUpdate) GetSecretAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretAccessKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcee2etestcloud.go b/internal/sdk/pkg/models/shared/sourcee2etestcloud.go
deleted file mode 100755
index fa916fe3a..000000000
--- a/internal/sdk/pkg/models/shared/sourcee2etestcloud.go
+++ /dev/null
@@ -1,204 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-type SourceE2eTestCloudMockCatalogMultiSchemaType string
-
-const (
- SourceE2eTestCloudMockCatalogMultiSchemaTypeMultiStream SourceE2eTestCloudMockCatalogMultiSchemaType = "MULTI_STREAM"
-)
-
-func (e SourceE2eTestCloudMockCatalogMultiSchemaType) ToPointer() *SourceE2eTestCloudMockCatalogMultiSchemaType {
- return &e
-}
-
-func (e *SourceE2eTestCloudMockCatalogMultiSchemaType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "MULTI_STREAM":
- *e = SourceE2eTestCloudMockCatalogMultiSchemaType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceE2eTestCloudMockCatalogMultiSchemaType: %v", v)
- }
-}
-
-// SourceE2eTestCloudMockCatalogMultiSchema - A catalog with multiple data streams, each with a different schema.
-type SourceE2eTestCloudMockCatalogMultiSchema struct {
- // A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.
- StreamSchemas string `json:"stream_schemas"`
- Type SourceE2eTestCloudMockCatalogMultiSchemaType `json:"type"`
-}
-
-type SourceE2eTestCloudMockCatalogSingleSchemaType string
-
-const (
- SourceE2eTestCloudMockCatalogSingleSchemaTypeSingleStream SourceE2eTestCloudMockCatalogSingleSchemaType = "SINGLE_STREAM"
-)
-
-func (e SourceE2eTestCloudMockCatalogSingleSchemaType) ToPointer() *SourceE2eTestCloudMockCatalogSingleSchemaType {
- return &e
-}
-
-func (e *SourceE2eTestCloudMockCatalogSingleSchemaType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "SINGLE_STREAM":
- *e = SourceE2eTestCloudMockCatalogSingleSchemaType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceE2eTestCloudMockCatalogSingleSchemaType: %v", v)
- }
-}
-
-// SourceE2eTestCloudMockCatalogSingleSchema - A catalog with one or multiple streams that share the same schema.
-type SourceE2eTestCloudMockCatalogSingleSchema struct {
- // Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.
- StreamDuplication *int64 `json:"stream_duplication,omitempty"`
- // Name of the data stream.
- StreamName string `json:"stream_name"`
- // A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.
- StreamSchema string `json:"stream_schema"`
- Type SourceE2eTestCloudMockCatalogSingleSchemaType `json:"type"`
-}
-
-type SourceE2eTestCloudMockCatalogType string
-
-const (
- SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogSingleSchema SourceE2eTestCloudMockCatalogType = "source-e2e-test-cloud_Mock Catalog_Single Schema"
- SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogMultiSchema SourceE2eTestCloudMockCatalogType = "source-e2e-test-cloud_Mock Catalog_Multi Schema"
-)
-
-type SourceE2eTestCloudMockCatalog struct {
- SourceE2eTestCloudMockCatalogSingleSchema *SourceE2eTestCloudMockCatalogSingleSchema
- SourceE2eTestCloudMockCatalogMultiSchema *SourceE2eTestCloudMockCatalogMultiSchema
-
- Type SourceE2eTestCloudMockCatalogType
-}
-
-func CreateSourceE2eTestCloudMockCatalogSourceE2eTestCloudMockCatalogSingleSchema(sourceE2eTestCloudMockCatalogSingleSchema SourceE2eTestCloudMockCatalogSingleSchema) SourceE2eTestCloudMockCatalog {
- typ := SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogSingleSchema
-
- return SourceE2eTestCloudMockCatalog{
- SourceE2eTestCloudMockCatalogSingleSchema: &sourceE2eTestCloudMockCatalogSingleSchema,
- Type: typ,
- }
-}
-
-func CreateSourceE2eTestCloudMockCatalogSourceE2eTestCloudMockCatalogMultiSchema(sourceE2eTestCloudMockCatalogMultiSchema SourceE2eTestCloudMockCatalogMultiSchema) SourceE2eTestCloudMockCatalog {
- typ := SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogMultiSchema
-
- return SourceE2eTestCloudMockCatalog{
- SourceE2eTestCloudMockCatalogMultiSchema: &sourceE2eTestCloudMockCatalogMultiSchema,
- Type: typ,
- }
-}
-
-func (u *SourceE2eTestCloudMockCatalog) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceE2eTestCloudMockCatalogMultiSchema := new(SourceE2eTestCloudMockCatalogMultiSchema)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceE2eTestCloudMockCatalogMultiSchema); err == nil {
- u.SourceE2eTestCloudMockCatalogMultiSchema = sourceE2eTestCloudMockCatalogMultiSchema
- u.Type = SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogMultiSchema
- return nil
- }
-
- sourceE2eTestCloudMockCatalogSingleSchema := new(SourceE2eTestCloudMockCatalogSingleSchema)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceE2eTestCloudMockCatalogSingleSchema); err == nil {
- u.SourceE2eTestCloudMockCatalogSingleSchema = sourceE2eTestCloudMockCatalogSingleSchema
- u.Type = SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogSingleSchema
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u SourceE2eTestCloudMockCatalog) MarshalJSON() ([]byte, error) {
- if u.SourceE2eTestCloudMockCatalogMultiSchema != nil {
- return json.Marshal(u.SourceE2eTestCloudMockCatalogMultiSchema)
- }
-
- if u.SourceE2eTestCloudMockCatalogSingleSchema != nil {
- return json.Marshal(u.SourceE2eTestCloudMockCatalogSingleSchema)
- }
-
- return nil, nil
-}
-
-type SourceE2eTestCloudE2eTestCloud string
-
-const (
- SourceE2eTestCloudE2eTestCloudE2eTestCloud SourceE2eTestCloudE2eTestCloud = "e2e-test-cloud"
-)
-
-func (e SourceE2eTestCloudE2eTestCloud) ToPointer() *SourceE2eTestCloudE2eTestCloud {
- return &e
-}
-
-func (e *SourceE2eTestCloudE2eTestCloud) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "e2e-test-cloud":
- *e = SourceE2eTestCloudE2eTestCloud(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceE2eTestCloudE2eTestCloud: %v", v)
- }
-}
-
-type SourceE2eTestCloudType string
-
-const (
- SourceE2eTestCloudTypeContinuousFeed SourceE2eTestCloudType = "CONTINUOUS_FEED"
-)
-
-func (e SourceE2eTestCloudType) ToPointer() *SourceE2eTestCloudType {
- return &e
-}
-
-func (e *SourceE2eTestCloudType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "CONTINUOUS_FEED":
- *e = SourceE2eTestCloudType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceE2eTestCloudType: %v", v)
- }
-}
-
-type SourceE2eTestCloud struct {
- // Number of records to emit per stream. Min 1. Max 100 billion.
- MaxMessages int64 `json:"max_messages"`
- // Interval between messages in ms. Min 0 ms. Max 60000 ms (1 minute).
- MessageIntervalMs *int64 `json:"message_interval_ms,omitempty"`
- MockCatalog SourceE2eTestCloudMockCatalog `json:"mock_catalog"`
- // When the seed is unspecified, the current time millis will be used as the seed. Range: [0, 1000000].
- Seed *int64 `json:"seed,omitempty"`
- SourceType SourceE2eTestCloudE2eTestCloud `json:"sourceType"`
- Type *SourceE2eTestCloudType `json:"type,omitempty"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcee2etestcloudcreaterequest.go b/internal/sdk/pkg/models/shared/sourcee2etestcloudcreaterequest.go
deleted file mode 100755
index 6eb73aa41..000000000
--- a/internal/sdk/pkg/models/shared/sourcee2etestcloudcreaterequest.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceE2eTestCloudCreateRequest struct {
- Configuration SourceE2eTestCloud `json:"configuration"`
- Name string `json:"name"`
- // Optional secretID obtained through the public API OAuth redirect flow.
- SecretID *string `json:"secretId,omitempty"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcee2etestcloudputrequest.go b/internal/sdk/pkg/models/shared/sourcee2etestcloudputrequest.go
deleted file mode 100755
index 4f02a0b73..000000000
--- a/internal/sdk/pkg/models/shared/sourcee2etestcloudputrequest.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceE2eTestCloudPutRequest struct {
- Configuration SourceE2eTestCloudUpdate `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcee2etestcloudupdate.go b/internal/sdk/pkg/models/shared/sourcee2etestcloudupdate.go
deleted file mode 100755
index 314f387b3..000000000
--- a/internal/sdk/pkg/models/shared/sourcee2etestcloudupdate.go
+++ /dev/null
@@ -1,179 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-type SourceE2eTestCloudUpdateMockCatalogMultiSchemaType string
-
-const (
- SourceE2eTestCloudUpdateMockCatalogMultiSchemaTypeMultiStream SourceE2eTestCloudUpdateMockCatalogMultiSchemaType = "MULTI_STREAM"
-)
-
-func (e SourceE2eTestCloudUpdateMockCatalogMultiSchemaType) ToPointer() *SourceE2eTestCloudUpdateMockCatalogMultiSchemaType {
- return &e
-}
-
-func (e *SourceE2eTestCloudUpdateMockCatalogMultiSchemaType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "MULTI_STREAM":
- *e = SourceE2eTestCloudUpdateMockCatalogMultiSchemaType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceE2eTestCloudUpdateMockCatalogMultiSchemaType: %v", v)
- }
-}
-
-// SourceE2eTestCloudUpdateMockCatalogMultiSchema - A catalog with multiple data streams, each with a different schema.
-type SourceE2eTestCloudUpdateMockCatalogMultiSchema struct {
- // A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.
- StreamSchemas string `json:"stream_schemas"`
- Type SourceE2eTestCloudUpdateMockCatalogMultiSchemaType `json:"type"`
-}
-
-type SourceE2eTestCloudUpdateMockCatalogSingleSchemaType string
-
-const (
- SourceE2eTestCloudUpdateMockCatalogSingleSchemaTypeSingleStream SourceE2eTestCloudUpdateMockCatalogSingleSchemaType = "SINGLE_STREAM"
-)
-
-func (e SourceE2eTestCloudUpdateMockCatalogSingleSchemaType) ToPointer() *SourceE2eTestCloudUpdateMockCatalogSingleSchemaType {
- return &e
-}
-
-func (e *SourceE2eTestCloudUpdateMockCatalogSingleSchemaType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "SINGLE_STREAM":
- *e = SourceE2eTestCloudUpdateMockCatalogSingleSchemaType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceE2eTestCloudUpdateMockCatalogSingleSchemaType: %v", v)
- }
-}
-
-// SourceE2eTestCloudUpdateMockCatalogSingleSchema - A catalog with one or multiple streams that share the same schema.
-type SourceE2eTestCloudUpdateMockCatalogSingleSchema struct {
- // Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc.
- StreamDuplication *int64 `json:"stream_duplication,omitempty"`
- // Name of the data stream.
- StreamName string `json:"stream_name"`
- // A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.
- StreamSchema string `json:"stream_schema"`
- Type SourceE2eTestCloudUpdateMockCatalogSingleSchemaType `json:"type"`
-}
-
-type SourceE2eTestCloudUpdateMockCatalogType string
-
-const (
- SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogSingleSchema SourceE2eTestCloudUpdateMockCatalogType = "source-e2e-test-cloud-update_Mock Catalog_Single Schema"
- SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogMultiSchema SourceE2eTestCloudUpdateMockCatalogType = "source-e2e-test-cloud-update_Mock Catalog_Multi Schema"
-)
-
-type SourceE2eTestCloudUpdateMockCatalog struct {
- SourceE2eTestCloudUpdateMockCatalogSingleSchema *SourceE2eTestCloudUpdateMockCatalogSingleSchema
- SourceE2eTestCloudUpdateMockCatalogMultiSchema *SourceE2eTestCloudUpdateMockCatalogMultiSchema
-
- Type SourceE2eTestCloudUpdateMockCatalogType
-}
-
-func CreateSourceE2eTestCloudUpdateMockCatalogSourceE2eTestCloudUpdateMockCatalogSingleSchema(sourceE2eTestCloudUpdateMockCatalogSingleSchema SourceE2eTestCloudUpdateMockCatalogSingleSchema) SourceE2eTestCloudUpdateMockCatalog {
- typ := SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogSingleSchema
-
- return SourceE2eTestCloudUpdateMockCatalog{
- SourceE2eTestCloudUpdateMockCatalogSingleSchema: &sourceE2eTestCloudUpdateMockCatalogSingleSchema,
- Type: typ,
- }
-}
-
-func CreateSourceE2eTestCloudUpdateMockCatalogSourceE2eTestCloudUpdateMockCatalogMultiSchema(sourceE2eTestCloudUpdateMockCatalogMultiSchema SourceE2eTestCloudUpdateMockCatalogMultiSchema) SourceE2eTestCloudUpdateMockCatalog {
- typ := SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogMultiSchema
-
- return SourceE2eTestCloudUpdateMockCatalog{
- SourceE2eTestCloudUpdateMockCatalogMultiSchema: &sourceE2eTestCloudUpdateMockCatalogMultiSchema,
- Type: typ,
- }
-}
-
-func (u *SourceE2eTestCloudUpdateMockCatalog) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceE2eTestCloudUpdateMockCatalogMultiSchema := new(SourceE2eTestCloudUpdateMockCatalogMultiSchema)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceE2eTestCloudUpdateMockCatalogMultiSchema); err == nil {
- u.SourceE2eTestCloudUpdateMockCatalogMultiSchema = sourceE2eTestCloudUpdateMockCatalogMultiSchema
- u.Type = SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogMultiSchema
- return nil
- }
-
- sourceE2eTestCloudUpdateMockCatalogSingleSchema := new(SourceE2eTestCloudUpdateMockCatalogSingleSchema)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceE2eTestCloudUpdateMockCatalogSingleSchema); err == nil {
- u.SourceE2eTestCloudUpdateMockCatalogSingleSchema = sourceE2eTestCloudUpdateMockCatalogSingleSchema
- u.Type = SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogSingleSchema
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u SourceE2eTestCloudUpdateMockCatalog) MarshalJSON() ([]byte, error) {
- if u.SourceE2eTestCloudUpdateMockCatalogMultiSchema != nil {
- return json.Marshal(u.SourceE2eTestCloudUpdateMockCatalogMultiSchema)
- }
-
- if u.SourceE2eTestCloudUpdateMockCatalogSingleSchema != nil {
- return json.Marshal(u.SourceE2eTestCloudUpdateMockCatalogSingleSchema)
- }
-
- return nil, nil
-}
-
-type SourceE2eTestCloudUpdateType string
-
-const (
- SourceE2eTestCloudUpdateTypeContinuousFeed SourceE2eTestCloudUpdateType = "CONTINUOUS_FEED"
-)
-
-func (e SourceE2eTestCloudUpdateType) ToPointer() *SourceE2eTestCloudUpdateType {
- return &e
-}
-
-func (e *SourceE2eTestCloudUpdateType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "CONTINUOUS_FEED":
- *e = SourceE2eTestCloudUpdateType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceE2eTestCloudUpdateType: %v", v)
- }
-}
-
-type SourceE2eTestCloudUpdate struct {
- // Number of records to emit per stream. Min 1. Max 100 billion.
- MaxMessages int64 `json:"max_messages"`
- // Interval between messages in ms. Min 0 ms. Max 60000 ms (1 minute).
- MessageIntervalMs *int64 `json:"message_interval_ms,omitempty"`
- MockCatalog SourceE2eTestCloudUpdateMockCatalog `json:"mock_catalog"`
- // When the seed is unspecified, the current time millis will be used as the seed. Range: [0, 1000000].
- Seed *int64 `json:"seed,omitempty"`
- Type *SourceE2eTestCloudUpdateType `json:"type,omitempty"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourceemailoctopus.go b/internal/sdk/pkg/models/shared/sourceemailoctopus.go
old mode 100755
new mode 100644
index 7790bd5cb..da3b7d81b
--- a/internal/sdk/pkg/models/shared/sourceemailoctopus.go
+++ b/internal/sdk/pkg/models/shared/sourceemailoctopus.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceEmailoctopusEmailoctopus string
+type Emailoctopus string
const (
- SourceEmailoctopusEmailoctopusEmailoctopus SourceEmailoctopusEmailoctopus = "emailoctopus"
+ EmailoctopusEmailoctopus Emailoctopus = "emailoctopus"
)
-func (e SourceEmailoctopusEmailoctopus) ToPointer() *SourceEmailoctopusEmailoctopus {
+func (e Emailoctopus) ToPointer() *Emailoctopus {
return &e
}
-func (e *SourceEmailoctopusEmailoctopus) UnmarshalJSON(data []byte) error {
+func (e *Emailoctopus) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "emailoctopus":
- *e = SourceEmailoctopusEmailoctopus(v)
+ *e = Emailoctopus(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceEmailoctopusEmailoctopus: %v", v)
+ return fmt.Errorf("invalid value for Emailoctopus: %v", v)
}
}
type SourceEmailoctopus struct {
// EmailOctopus API Key. See the docs for information on how to generate this key.
- APIKey string `json:"api_key"`
- SourceType SourceEmailoctopusEmailoctopus `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Emailoctopus `const:"emailoctopus" json:"sourceType"`
+}
+
+func (s SourceEmailoctopus) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceEmailoctopus) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceEmailoctopus) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceEmailoctopus) GetSourceType() Emailoctopus {
+ return EmailoctopusEmailoctopus
}
diff --git a/internal/sdk/pkg/models/shared/sourceemailoctopuscreaterequest.go b/internal/sdk/pkg/models/shared/sourceemailoctopuscreaterequest.go
old mode 100755
new mode 100644
index 472d84359..d4768e7c1
--- a/internal/sdk/pkg/models/shared/sourceemailoctopuscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceemailoctopuscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceEmailoctopusCreateRequest struct {
Configuration SourceEmailoctopus `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceEmailoctopusCreateRequest) GetConfiguration() SourceEmailoctopus {
+ if o == nil {
+ return SourceEmailoctopus{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceEmailoctopusCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceEmailoctopusCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceEmailoctopusCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceEmailoctopusCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceemailoctopusputrequest.go b/internal/sdk/pkg/models/shared/sourceemailoctopusputrequest.go
old mode 100755
new mode 100644
index 0b95dce9f..b2dde5441
--- a/internal/sdk/pkg/models/shared/sourceemailoctopusputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceemailoctopusputrequest.go
@@ -7,3 +7,24 @@ type SourceEmailoctopusPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceEmailoctopusPutRequest) GetConfiguration() SourceEmailoctopusUpdate {
+ if o == nil {
+ return SourceEmailoctopusUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceEmailoctopusPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceEmailoctopusPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceemailoctopusupdate.go b/internal/sdk/pkg/models/shared/sourceemailoctopusupdate.go
old mode 100755
new mode 100644
index d5446868f..1a31b8f7b
--- a/internal/sdk/pkg/models/shared/sourceemailoctopusupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceemailoctopusupdate.go
@@ -6,3 +6,10 @@ type SourceEmailoctopusUpdate struct {
// EmailOctopus API Key. See the docs for information on how to generate this key.
APIKey string `json:"api_key"`
}
+
+func (o *SourceEmailoctopusUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourceexchangerates.go b/internal/sdk/pkg/models/shared/sourceexchangerates.go
old mode 100755
new mode 100644
index 24f4ff33c..d0272a311
--- a/internal/sdk/pkg/models/shared/sourceexchangerates.go
+++ b/internal/sdk/pkg/models/shared/sourceexchangerates.go
@@ -3,32 +3,33 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceExchangeRatesExchangeRates string
+type ExchangeRates string
const (
- SourceExchangeRatesExchangeRatesExchangeRates SourceExchangeRatesExchangeRates = "exchange-rates"
+ ExchangeRatesExchangeRates ExchangeRates = "exchange-rates"
)
-func (e SourceExchangeRatesExchangeRates) ToPointer() *SourceExchangeRatesExchangeRates {
+func (e ExchangeRates) ToPointer() *ExchangeRates {
return &e
}
-func (e *SourceExchangeRatesExchangeRates) UnmarshalJSON(data []byte) error {
+func (e *ExchangeRates) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "exchange-rates":
- *e = SourceExchangeRatesExchangeRates(v)
+ *e = ExchangeRates(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceExchangeRatesExchangeRates: %v", v)
+ return fmt.Errorf("invalid value for ExchangeRates: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceExchangeRates struct {
// ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default base currency is EUR
Base *string `json:"base,omitempty"`
// Ignore weekends? (Exchanges don't run on weekends)
- IgnoreWeekends *bool `json:"ignore_weekends,omitempty"`
- SourceType SourceExchangeRatesExchangeRates `json:"sourceType"`
+ IgnoreWeekends *bool `default:"true" json:"ignore_weekends"`
+ sourceType ExchangeRates `const:"exchange-rates" json:"sourceType"`
// Start getting data from that date.
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceExchangeRates) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceExchangeRates) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceExchangeRates) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *SourceExchangeRates) GetBase() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Base
+}
+
+func (o *SourceExchangeRates) GetIgnoreWeekends() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IgnoreWeekends
+}
+
+func (o *SourceExchangeRates) GetSourceType() ExchangeRates {
+ return ExchangeRatesExchangeRates
+}
+
+func (o *SourceExchangeRates) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceexchangeratescreaterequest.go b/internal/sdk/pkg/models/shared/sourceexchangeratescreaterequest.go
old mode 100755
new mode 100644
index 3d00826fe..61ad6a54f
--- a/internal/sdk/pkg/models/shared/sourceexchangeratescreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceexchangeratescreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceExchangeRatesCreateRequest struct {
Configuration SourceExchangeRates `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceExchangeRatesCreateRequest) GetConfiguration() SourceExchangeRates {
+ if o == nil {
+ return SourceExchangeRates{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceExchangeRatesCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceExchangeRatesCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceExchangeRatesCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceExchangeRatesCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceexchangeratesputrequest.go b/internal/sdk/pkg/models/shared/sourceexchangeratesputrequest.go
old mode 100755
new mode 100644
index 4b268f339..5eb682f2a
--- a/internal/sdk/pkg/models/shared/sourceexchangeratesputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceexchangeratesputrequest.go
@@ -7,3 +7,24 @@ type SourceExchangeRatesPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceExchangeRatesPutRequest) GetConfiguration() SourceExchangeRatesUpdate {
+ if o == nil {
+ return SourceExchangeRatesUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceExchangeRatesPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceExchangeRatesPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceexchangeratesupdate.go b/internal/sdk/pkg/models/shared/sourceexchangeratesupdate.go
old mode 100755
new mode 100644
index f78ede509..2a7264fbd
--- a/internal/sdk/pkg/models/shared/sourceexchangeratesupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceexchangeratesupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceExchangeRatesUpdate struct {
@@ -12,7 +13,46 @@ type SourceExchangeRatesUpdate struct {
// ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default base currency is EUR
Base *string `json:"base,omitempty"`
// Ignore weekends? (Exchanges don't run on weekends)
- IgnoreWeekends *bool `json:"ignore_weekends,omitempty"`
+ IgnoreWeekends *bool `default:"true" json:"ignore_weekends"`
// Start getting data from that date.
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceExchangeRatesUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceExchangeRatesUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceExchangeRatesUpdate) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *SourceExchangeRatesUpdate) GetBase() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Base
+}
+
+func (o *SourceExchangeRatesUpdate) GetIgnoreWeekends() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IgnoreWeekends
+}
+
+func (o *SourceExchangeRatesUpdate) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefacebookmarketing.go b/internal/sdk/pkg/models/shared/sourcefacebookmarketing.go
old mode 100755
new mode 100644
index c6e88d854..7f0c48fd9
--- a/internal/sdk/pkg/models/shared/sourcefacebookmarketing.go
+++ b/internal/sdk/pkg/models/shared/sourcefacebookmarketing.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-// SourceFacebookMarketingInsightConfigValidActionBreakdowns - An enumeration.
-type SourceFacebookMarketingInsightConfigValidActionBreakdowns string
+// SourceFacebookMarketingValidActionBreakdowns - An enumeration.
+type SourceFacebookMarketingValidActionBreakdowns string
const (
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionCanvasComponentName SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_canvas_component_name"
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionCarouselCardID SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_carousel_card_id"
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionCarouselCardName SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_carousel_card_name"
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionDestination SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_destination"
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionDevice SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_device"
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionReaction SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_reaction"
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionTargetID SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_target_id"
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionType SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_type"
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionVideoSound SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_video_sound"
- SourceFacebookMarketingInsightConfigValidActionBreakdownsActionVideoType SourceFacebookMarketingInsightConfigValidActionBreakdowns = "action_video_type"
+ SourceFacebookMarketingValidActionBreakdownsActionCanvasComponentName SourceFacebookMarketingValidActionBreakdowns = "action_canvas_component_name"
+ SourceFacebookMarketingValidActionBreakdownsActionCarouselCardID SourceFacebookMarketingValidActionBreakdowns = "action_carousel_card_id"
+ SourceFacebookMarketingValidActionBreakdownsActionCarouselCardName SourceFacebookMarketingValidActionBreakdowns = "action_carousel_card_name"
+ SourceFacebookMarketingValidActionBreakdownsActionDestination SourceFacebookMarketingValidActionBreakdowns = "action_destination"
+ SourceFacebookMarketingValidActionBreakdownsActionDevice SourceFacebookMarketingValidActionBreakdowns = "action_device"
+ SourceFacebookMarketingValidActionBreakdownsActionReaction SourceFacebookMarketingValidActionBreakdowns = "action_reaction"
+ SourceFacebookMarketingValidActionBreakdownsActionTargetID SourceFacebookMarketingValidActionBreakdowns = "action_target_id"
+ SourceFacebookMarketingValidActionBreakdownsActionType SourceFacebookMarketingValidActionBreakdowns = "action_type"
+ SourceFacebookMarketingValidActionBreakdownsActionVideoSound SourceFacebookMarketingValidActionBreakdowns = "action_video_sound"
+ SourceFacebookMarketingValidActionBreakdownsActionVideoType SourceFacebookMarketingValidActionBreakdowns = "action_video_type"
)
-func (e SourceFacebookMarketingInsightConfigValidActionBreakdowns) ToPointer() *SourceFacebookMarketingInsightConfigValidActionBreakdowns {
+func (e SourceFacebookMarketingValidActionBreakdowns) ToPointer() *SourceFacebookMarketingValidActionBreakdowns {
return &e
}
-func (e *SourceFacebookMarketingInsightConfigValidActionBreakdowns) UnmarshalJSON(data []byte) error {
+func (e *SourceFacebookMarketingValidActionBreakdowns) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -53,27 +54,27 @@ func (e *SourceFacebookMarketingInsightConfigValidActionBreakdowns) UnmarshalJSO
case "action_video_sound":
fallthrough
case "action_video_type":
- *e = SourceFacebookMarketingInsightConfigValidActionBreakdowns(v)
+ *e = SourceFacebookMarketingValidActionBreakdowns(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingInsightConfigValidActionBreakdowns: %v", v)
+ return fmt.Errorf("invalid value for SourceFacebookMarketingValidActionBreakdowns: %v", v)
}
}
-// SourceFacebookMarketingInsightConfigActionReportTime - Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.
-type SourceFacebookMarketingInsightConfigActionReportTime string
+// SourceFacebookMarketingActionReportTime - Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.
+type SourceFacebookMarketingActionReportTime string
const (
- SourceFacebookMarketingInsightConfigActionReportTimeConversion SourceFacebookMarketingInsightConfigActionReportTime = "conversion"
- SourceFacebookMarketingInsightConfigActionReportTimeImpression SourceFacebookMarketingInsightConfigActionReportTime = "impression"
- SourceFacebookMarketingInsightConfigActionReportTimeMixed SourceFacebookMarketingInsightConfigActionReportTime = "mixed"
+ SourceFacebookMarketingActionReportTimeConversion SourceFacebookMarketingActionReportTime = "conversion"
+ SourceFacebookMarketingActionReportTimeImpression SourceFacebookMarketingActionReportTime = "impression"
+ SourceFacebookMarketingActionReportTimeMixed SourceFacebookMarketingActionReportTime = "mixed"
)
-func (e SourceFacebookMarketingInsightConfigActionReportTime) ToPointer() *SourceFacebookMarketingInsightConfigActionReportTime {
+func (e SourceFacebookMarketingActionReportTime) ToPointer() *SourceFacebookMarketingActionReportTime {
return &e
}
-func (e *SourceFacebookMarketingInsightConfigActionReportTime) UnmarshalJSON(data []byte) error {
+func (e *SourceFacebookMarketingActionReportTime) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -84,56 +85,56 @@ func (e *SourceFacebookMarketingInsightConfigActionReportTime) UnmarshalJSON(dat
case "impression":
fallthrough
case "mixed":
- *e = SourceFacebookMarketingInsightConfigActionReportTime(v)
+ *e = SourceFacebookMarketingActionReportTime(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingInsightConfigActionReportTime: %v", v)
+ return fmt.Errorf("invalid value for SourceFacebookMarketingActionReportTime: %v", v)
}
}
-// SourceFacebookMarketingInsightConfigValidBreakdowns - An enumeration.
-type SourceFacebookMarketingInsightConfigValidBreakdowns string
+// SourceFacebookMarketingValidBreakdowns - An enumeration.
+type SourceFacebookMarketingValidBreakdowns string
const (
- SourceFacebookMarketingInsightConfigValidBreakdownsAdFormatAsset SourceFacebookMarketingInsightConfigValidBreakdowns = "ad_format_asset"
- SourceFacebookMarketingInsightConfigValidBreakdownsAge SourceFacebookMarketingInsightConfigValidBreakdowns = "age"
- SourceFacebookMarketingInsightConfigValidBreakdownsAppID SourceFacebookMarketingInsightConfigValidBreakdowns = "app_id"
- SourceFacebookMarketingInsightConfigValidBreakdownsBodyAsset SourceFacebookMarketingInsightConfigValidBreakdowns = "body_asset"
- SourceFacebookMarketingInsightConfigValidBreakdownsCallToActionAsset SourceFacebookMarketingInsightConfigValidBreakdowns = "call_to_action_asset"
- SourceFacebookMarketingInsightConfigValidBreakdownsCoarseConversionValue SourceFacebookMarketingInsightConfigValidBreakdowns = "coarse_conversion_value"
- SourceFacebookMarketingInsightConfigValidBreakdownsCountry SourceFacebookMarketingInsightConfigValidBreakdowns = "country"
- SourceFacebookMarketingInsightConfigValidBreakdownsDescriptionAsset SourceFacebookMarketingInsightConfigValidBreakdowns = "description_asset"
- SourceFacebookMarketingInsightConfigValidBreakdownsDevicePlatform SourceFacebookMarketingInsightConfigValidBreakdowns = "device_platform"
- SourceFacebookMarketingInsightConfigValidBreakdownsDma SourceFacebookMarketingInsightConfigValidBreakdowns = "dma"
- SourceFacebookMarketingInsightConfigValidBreakdownsFidelityType SourceFacebookMarketingInsightConfigValidBreakdowns = "fidelity_type"
- SourceFacebookMarketingInsightConfigValidBreakdownsFrequencyValue SourceFacebookMarketingInsightConfigValidBreakdowns = "frequency_value"
- SourceFacebookMarketingInsightConfigValidBreakdownsGender SourceFacebookMarketingInsightConfigValidBreakdowns = "gender"
- SourceFacebookMarketingInsightConfigValidBreakdownsHourlyStatsAggregatedByAdvertiserTimeZone SourceFacebookMarketingInsightConfigValidBreakdowns = "hourly_stats_aggregated_by_advertiser_time_zone"
- SourceFacebookMarketingInsightConfigValidBreakdownsHourlyStatsAggregatedByAudienceTimeZone SourceFacebookMarketingInsightConfigValidBreakdowns = "hourly_stats_aggregated_by_audience_time_zone"
- SourceFacebookMarketingInsightConfigValidBreakdownsHsid SourceFacebookMarketingInsightConfigValidBreakdowns = "hsid"
- SourceFacebookMarketingInsightConfigValidBreakdownsImageAsset SourceFacebookMarketingInsightConfigValidBreakdowns = "image_asset"
- SourceFacebookMarketingInsightConfigValidBreakdownsImpressionDevice SourceFacebookMarketingInsightConfigValidBreakdowns = "impression_device"
- SourceFacebookMarketingInsightConfigValidBreakdownsIsConversionIDModeled SourceFacebookMarketingInsightConfigValidBreakdowns = "is_conversion_id_modeled"
- SourceFacebookMarketingInsightConfigValidBreakdownsLinkURLAsset SourceFacebookMarketingInsightConfigValidBreakdowns = "link_url_asset"
- SourceFacebookMarketingInsightConfigValidBreakdownsMmm SourceFacebookMarketingInsightConfigValidBreakdowns = "mmm"
- SourceFacebookMarketingInsightConfigValidBreakdownsPlacePageID SourceFacebookMarketingInsightConfigValidBreakdowns = "place_page_id"
- SourceFacebookMarketingInsightConfigValidBreakdownsPlatformPosition SourceFacebookMarketingInsightConfigValidBreakdowns = "platform_position"
- SourceFacebookMarketingInsightConfigValidBreakdownsPostbackSequenceIndex SourceFacebookMarketingInsightConfigValidBreakdowns = "postback_sequence_index"
- SourceFacebookMarketingInsightConfigValidBreakdownsProductID SourceFacebookMarketingInsightConfigValidBreakdowns = "product_id"
- SourceFacebookMarketingInsightConfigValidBreakdownsPublisherPlatform SourceFacebookMarketingInsightConfigValidBreakdowns = "publisher_platform"
- SourceFacebookMarketingInsightConfigValidBreakdownsRedownload SourceFacebookMarketingInsightConfigValidBreakdowns = "redownload"
- SourceFacebookMarketingInsightConfigValidBreakdownsRegion SourceFacebookMarketingInsightConfigValidBreakdowns = "region"
- SourceFacebookMarketingInsightConfigValidBreakdownsSkanCampaignID SourceFacebookMarketingInsightConfigValidBreakdowns = "skan_campaign_id"
- SourceFacebookMarketingInsightConfigValidBreakdownsSkanConversionID SourceFacebookMarketingInsightConfigValidBreakdowns = "skan_conversion_id"
- SourceFacebookMarketingInsightConfigValidBreakdownsTitleAsset SourceFacebookMarketingInsightConfigValidBreakdowns = "title_asset"
- SourceFacebookMarketingInsightConfigValidBreakdownsVideoAsset SourceFacebookMarketingInsightConfigValidBreakdowns = "video_asset"
+ SourceFacebookMarketingValidBreakdownsAdFormatAsset SourceFacebookMarketingValidBreakdowns = "ad_format_asset"
+ SourceFacebookMarketingValidBreakdownsAge SourceFacebookMarketingValidBreakdowns = "age"
+ SourceFacebookMarketingValidBreakdownsAppID SourceFacebookMarketingValidBreakdowns = "app_id"
+ SourceFacebookMarketingValidBreakdownsBodyAsset SourceFacebookMarketingValidBreakdowns = "body_asset"
+ SourceFacebookMarketingValidBreakdownsCallToActionAsset SourceFacebookMarketingValidBreakdowns = "call_to_action_asset"
+ SourceFacebookMarketingValidBreakdownsCoarseConversionValue SourceFacebookMarketingValidBreakdowns = "coarse_conversion_value"
+ SourceFacebookMarketingValidBreakdownsCountry SourceFacebookMarketingValidBreakdowns = "country"
+ SourceFacebookMarketingValidBreakdownsDescriptionAsset SourceFacebookMarketingValidBreakdowns = "description_asset"
+ SourceFacebookMarketingValidBreakdownsDevicePlatform SourceFacebookMarketingValidBreakdowns = "device_platform"
+ SourceFacebookMarketingValidBreakdownsDma SourceFacebookMarketingValidBreakdowns = "dma"
+ SourceFacebookMarketingValidBreakdownsFidelityType SourceFacebookMarketingValidBreakdowns = "fidelity_type"
+ SourceFacebookMarketingValidBreakdownsFrequencyValue SourceFacebookMarketingValidBreakdowns = "frequency_value"
+ SourceFacebookMarketingValidBreakdownsGender SourceFacebookMarketingValidBreakdowns = "gender"
+ SourceFacebookMarketingValidBreakdownsHourlyStatsAggregatedByAdvertiserTimeZone SourceFacebookMarketingValidBreakdowns = "hourly_stats_aggregated_by_advertiser_time_zone"
+ SourceFacebookMarketingValidBreakdownsHourlyStatsAggregatedByAudienceTimeZone SourceFacebookMarketingValidBreakdowns = "hourly_stats_aggregated_by_audience_time_zone"
+ SourceFacebookMarketingValidBreakdownsHsid SourceFacebookMarketingValidBreakdowns = "hsid"
+ SourceFacebookMarketingValidBreakdownsImageAsset SourceFacebookMarketingValidBreakdowns = "image_asset"
+ SourceFacebookMarketingValidBreakdownsImpressionDevice SourceFacebookMarketingValidBreakdowns = "impression_device"
+ SourceFacebookMarketingValidBreakdownsIsConversionIDModeled SourceFacebookMarketingValidBreakdowns = "is_conversion_id_modeled"
+ SourceFacebookMarketingValidBreakdownsLinkURLAsset SourceFacebookMarketingValidBreakdowns = "link_url_asset"
+ SourceFacebookMarketingValidBreakdownsMmm SourceFacebookMarketingValidBreakdowns = "mmm"
+ SourceFacebookMarketingValidBreakdownsPlacePageID SourceFacebookMarketingValidBreakdowns = "place_page_id"
+ SourceFacebookMarketingValidBreakdownsPlatformPosition SourceFacebookMarketingValidBreakdowns = "platform_position"
+ SourceFacebookMarketingValidBreakdownsPostbackSequenceIndex SourceFacebookMarketingValidBreakdowns = "postback_sequence_index"
+ SourceFacebookMarketingValidBreakdownsProductID SourceFacebookMarketingValidBreakdowns = "product_id"
+ SourceFacebookMarketingValidBreakdownsPublisherPlatform SourceFacebookMarketingValidBreakdowns = "publisher_platform"
+ SourceFacebookMarketingValidBreakdownsRedownload SourceFacebookMarketingValidBreakdowns = "redownload"
+ SourceFacebookMarketingValidBreakdownsRegion SourceFacebookMarketingValidBreakdowns = "region"
+ SourceFacebookMarketingValidBreakdownsSkanCampaignID SourceFacebookMarketingValidBreakdowns = "skan_campaign_id"
+ SourceFacebookMarketingValidBreakdownsSkanConversionID SourceFacebookMarketingValidBreakdowns = "skan_conversion_id"
+ SourceFacebookMarketingValidBreakdownsTitleAsset SourceFacebookMarketingValidBreakdowns = "title_asset"
+ SourceFacebookMarketingValidBreakdownsVideoAsset SourceFacebookMarketingValidBreakdowns = "video_asset"
)
-func (e SourceFacebookMarketingInsightConfigValidBreakdowns) ToPointer() *SourceFacebookMarketingInsightConfigValidBreakdowns {
+func (e SourceFacebookMarketingValidBreakdowns) ToPointer() *SourceFacebookMarketingValidBreakdowns {
return &e
}
-func (e *SourceFacebookMarketingInsightConfigValidBreakdowns) UnmarshalJSON(data []byte) error {
+func (e *SourceFacebookMarketingValidBreakdowns) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -202,156 +203,156 @@ func (e *SourceFacebookMarketingInsightConfigValidBreakdowns) UnmarshalJSON(data
case "title_asset":
fallthrough
case "video_asset":
- *e = SourceFacebookMarketingInsightConfigValidBreakdowns(v)
+ *e = SourceFacebookMarketingValidBreakdowns(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingInsightConfigValidBreakdowns: %v", v)
+ return fmt.Errorf("invalid value for SourceFacebookMarketingValidBreakdowns: %v", v)
}
}
-// SourceFacebookMarketingInsightConfigValidEnums - An enumeration.
-type SourceFacebookMarketingInsightConfigValidEnums string
+// SourceFacebookMarketingValidEnums - An enumeration.
+type SourceFacebookMarketingValidEnums string
const (
- SourceFacebookMarketingInsightConfigValidEnumsAccountCurrency SourceFacebookMarketingInsightConfigValidEnums = "account_currency"
- SourceFacebookMarketingInsightConfigValidEnumsAccountID SourceFacebookMarketingInsightConfigValidEnums = "account_id"
- SourceFacebookMarketingInsightConfigValidEnumsAccountName SourceFacebookMarketingInsightConfigValidEnums = "account_name"
- SourceFacebookMarketingInsightConfigValidEnumsActionValues SourceFacebookMarketingInsightConfigValidEnums = "action_values"
- SourceFacebookMarketingInsightConfigValidEnumsActions SourceFacebookMarketingInsightConfigValidEnums = "actions"
- SourceFacebookMarketingInsightConfigValidEnumsAdClickActions SourceFacebookMarketingInsightConfigValidEnums = "ad_click_actions"
- SourceFacebookMarketingInsightConfigValidEnumsAdID SourceFacebookMarketingInsightConfigValidEnums = "ad_id"
- SourceFacebookMarketingInsightConfigValidEnumsAdImpressionActions SourceFacebookMarketingInsightConfigValidEnums = "ad_impression_actions"
- SourceFacebookMarketingInsightConfigValidEnumsAdName SourceFacebookMarketingInsightConfigValidEnums = "ad_name"
- SourceFacebookMarketingInsightConfigValidEnumsAdsetEnd SourceFacebookMarketingInsightConfigValidEnums = "adset_end"
- SourceFacebookMarketingInsightConfigValidEnumsAdsetID SourceFacebookMarketingInsightConfigValidEnums = "adset_id"
- SourceFacebookMarketingInsightConfigValidEnumsAdsetName SourceFacebookMarketingInsightConfigValidEnums = "adset_name"
- SourceFacebookMarketingInsightConfigValidEnumsAdsetStart SourceFacebookMarketingInsightConfigValidEnums = "adset_start"
- SourceFacebookMarketingInsightConfigValidEnumsAgeTargeting SourceFacebookMarketingInsightConfigValidEnums = "age_targeting"
- SourceFacebookMarketingInsightConfigValidEnumsAttributionSetting SourceFacebookMarketingInsightConfigValidEnums = "attribution_setting"
- SourceFacebookMarketingInsightConfigValidEnumsAuctionBid SourceFacebookMarketingInsightConfigValidEnums = "auction_bid"
- SourceFacebookMarketingInsightConfigValidEnumsAuctionCompetitiveness SourceFacebookMarketingInsightConfigValidEnums = "auction_competitiveness"
- SourceFacebookMarketingInsightConfigValidEnumsAuctionMaxCompetitorBid SourceFacebookMarketingInsightConfigValidEnums = "auction_max_competitor_bid"
- SourceFacebookMarketingInsightConfigValidEnumsBuyingType SourceFacebookMarketingInsightConfigValidEnums = "buying_type"
- SourceFacebookMarketingInsightConfigValidEnumsCampaignID SourceFacebookMarketingInsightConfigValidEnums = "campaign_id"
- SourceFacebookMarketingInsightConfigValidEnumsCampaignName SourceFacebookMarketingInsightConfigValidEnums = "campaign_name"
- SourceFacebookMarketingInsightConfigValidEnumsCanvasAvgViewPercent SourceFacebookMarketingInsightConfigValidEnums = "canvas_avg_view_percent"
- SourceFacebookMarketingInsightConfigValidEnumsCanvasAvgViewTime SourceFacebookMarketingInsightConfigValidEnums = "canvas_avg_view_time"
- SourceFacebookMarketingInsightConfigValidEnumsCatalogSegmentActions SourceFacebookMarketingInsightConfigValidEnums = "catalog_segment_actions"
- SourceFacebookMarketingInsightConfigValidEnumsCatalogSegmentValue SourceFacebookMarketingInsightConfigValidEnums = "catalog_segment_value"
- SourceFacebookMarketingInsightConfigValidEnumsCatalogSegmentValueMobilePurchaseRoas SourceFacebookMarketingInsightConfigValidEnums = "catalog_segment_value_mobile_purchase_roas"
- SourceFacebookMarketingInsightConfigValidEnumsCatalogSegmentValueOmniPurchaseRoas SourceFacebookMarketingInsightConfigValidEnums = "catalog_segment_value_omni_purchase_roas"
- SourceFacebookMarketingInsightConfigValidEnumsCatalogSegmentValueWebsitePurchaseRoas SourceFacebookMarketingInsightConfigValidEnums = "catalog_segment_value_website_purchase_roas"
- SourceFacebookMarketingInsightConfigValidEnumsClicks SourceFacebookMarketingInsightConfigValidEnums = "clicks"
- SourceFacebookMarketingInsightConfigValidEnumsConversionRateRanking SourceFacebookMarketingInsightConfigValidEnums = "conversion_rate_ranking"
- SourceFacebookMarketingInsightConfigValidEnumsConversionValues SourceFacebookMarketingInsightConfigValidEnums = "conversion_values"
- SourceFacebookMarketingInsightConfigValidEnumsConversions SourceFacebookMarketingInsightConfigValidEnums = "conversions"
- SourceFacebookMarketingInsightConfigValidEnumsConvertedProductQuantity SourceFacebookMarketingInsightConfigValidEnums = "converted_product_quantity"
- SourceFacebookMarketingInsightConfigValidEnumsConvertedProductValue SourceFacebookMarketingInsightConfigValidEnums = "converted_product_value"
- SourceFacebookMarketingInsightConfigValidEnumsCostPer15SecVideoView SourceFacebookMarketingInsightConfigValidEnums = "cost_per_15_sec_video_view"
- SourceFacebookMarketingInsightConfigValidEnumsCostPer2SecContinuousVideoView SourceFacebookMarketingInsightConfigValidEnums = "cost_per_2_sec_continuous_video_view"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerActionType SourceFacebookMarketingInsightConfigValidEnums = "cost_per_action_type"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerAdClick SourceFacebookMarketingInsightConfigValidEnums = "cost_per_ad_click"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerConversion SourceFacebookMarketingInsightConfigValidEnums = "cost_per_conversion"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerDdaCountbyConvs SourceFacebookMarketingInsightConfigValidEnums = "cost_per_dda_countby_convs"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerEstimatedAdRecallers SourceFacebookMarketingInsightConfigValidEnums = "cost_per_estimated_ad_recallers"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerInlineLinkClick SourceFacebookMarketingInsightConfigValidEnums = "cost_per_inline_link_click"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerInlinePostEngagement SourceFacebookMarketingInsightConfigValidEnums = "cost_per_inline_post_engagement"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerOneThousandAdImpression SourceFacebookMarketingInsightConfigValidEnums = "cost_per_one_thousand_ad_impression"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerOutboundClick SourceFacebookMarketingInsightConfigValidEnums = "cost_per_outbound_click"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerThruplay SourceFacebookMarketingInsightConfigValidEnums = "cost_per_thruplay"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerUniqueActionType SourceFacebookMarketingInsightConfigValidEnums = "cost_per_unique_action_type"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerUniqueClick SourceFacebookMarketingInsightConfigValidEnums = "cost_per_unique_click"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerUniqueConversion SourceFacebookMarketingInsightConfigValidEnums = "cost_per_unique_conversion"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerUniqueInlineLinkClick SourceFacebookMarketingInsightConfigValidEnums = "cost_per_unique_inline_link_click"
- SourceFacebookMarketingInsightConfigValidEnumsCostPerUniqueOutboundClick SourceFacebookMarketingInsightConfigValidEnums = "cost_per_unique_outbound_click"
- SourceFacebookMarketingInsightConfigValidEnumsCpc SourceFacebookMarketingInsightConfigValidEnums = "cpc"
- SourceFacebookMarketingInsightConfigValidEnumsCpm SourceFacebookMarketingInsightConfigValidEnums = "cpm"
- SourceFacebookMarketingInsightConfigValidEnumsCpp SourceFacebookMarketingInsightConfigValidEnums = "cpp"
- SourceFacebookMarketingInsightConfigValidEnumsCreatedTime SourceFacebookMarketingInsightConfigValidEnums = "created_time"
- SourceFacebookMarketingInsightConfigValidEnumsCreativeMediaType SourceFacebookMarketingInsightConfigValidEnums = "creative_media_type"
- SourceFacebookMarketingInsightConfigValidEnumsCtr SourceFacebookMarketingInsightConfigValidEnums = "ctr"
- SourceFacebookMarketingInsightConfigValidEnumsDateStart SourceFacebookMarketingInsightConfigValidEnums = "date_start"
- SourceFacebookMarketingInsightConfigValidEnumsDateStop SourceFacebookMarketingInsightConfigValidEnums = "date_stop"
- SourceFacebookMarketingInsightConfigValidEnumsDdaCountbyConvs SourceFacebookMarketingInsightConfigValidEnums = "dda_countby_convs"
- SourceFacebookMarketingInsightConfigValidEnumsDdaResults SourceFacebookMarketingInsightConfigValidEnums = "dda_results"
- SourceFacebookMarketingInsightConfigValidEnumsEngagementRateRanking SourceFacebookMarketingInsightConfigValidEnums = "engagement_rate_ranking"
- SourceFacebookMarketingInsightConfigValidEnumsEstimatedAdRecallRate SourceFacebookMarketingInsightConfigValidEnums = "estimated_ad_recall_rate"
- SourceFacebookMarketingInsightConfigValidEnumsEstimatedAdRecallRateLowerBound SourceFacebookMarketingInsightConfigValidEnums = "estimated_ad_recall_rate_lower_bound"
- SourceFacebookMarketingInsightConfigValidEnumsEstimatedAdRecallRateUpperBound SourceFacebookMarketingInsightConfigValidEnums = "estimated_ad_recall_rate_upper_bound"
- SourceFacebookMarketingInsightConfigValidEnumsEstimatedAdRecallers SourceFacebookMarketingInsightConfigValidEnums = "estimated_ad_recallers"
- SourceFacebookMarketingInsightConfigValidEnumsEstimatedAdRecallersLowerBound SourceFacebookMarketingInsightConfigValidEnums = "estimated_ad_recallers_lower_bound"
- SourceFacebookMarketingInsightConfigValidEnumsEstimatedAdRecallersUpperBound SourceFacebookMarketingInsightConfigValidEnums = "estimated_ad_recallers_upper_bound"
- SourceFacebookMarketingInsightConfigValidEnumsFrequency SourceFacebookMarketingInsightConfigValidEnums = "frequency"
- SourceFacebookMarketingInsightConfigValidEnumsFullViewImpressions SourceFacebookMarketingInsightConfigValidEnums = "full_view_impressions"
- SourceFacebookMarketingInsightConfigValidEnumsFullViewReach SourceFacebookMarketingInsightConfigValidEnums = "full_view_reach"
- SourceFacebookMarketingInsightConfigValidEnumsGenderTargeting SourceFacebookMarketingInsightConfigValidEnums = "gender_targeting"
- SourceFacebookMarketingInsightConfigValidEnumsImpressions SourceFacebookMarketingInsightConfigValidEnums = "impressions"
- SourceFacebookMarketingInsightConfigValidEnumsInlineLinkClickCtr SourceFacebookMarketingInsightConfigValidEnums = "inline_link_click_ctr"
- SourceFacebookMarketingInsightConfigValidEnumsInlineLinkClicks SourceFacebookMarketingInsightConfigValidEnums = "inline_link_clicks"
- SourceFacebookMarketingInsightConfigValidEnumsInlinePostEngagement SourceFacebookMarketingInsightConfigValidEnums = "inline_post_engagement"
- SourceFacebookMarketingInsightConfigValidEnumsInstagramUpcomingEventRemindersSet SourceFacebookMarketingInsightConfigValidEnums = "instagram_upcoming_event_reminders_set"
- SourceFacebookMarketingInsightConfigValidEnumsInstantExperienceClicksToOpen SourceFacebookMarketingInsightConfigValidEnums = "instant_experience_clicks_to_open"
- SourceFacebookMarketingInsightConfigValidEnumsInstantExperienceClicksToStart SourceFacebookMarketingInsightConfigValidEnums = "instant_experience_clicks_to_start"
- SourceFacebookMarketingInsightConfigValidEnumsInstantExperienceOutboundClicks SourceFacebookMarketingInsightConfigValidEnums = "instant_experience_outbound_clicks"
- SourceFacebookMarketingInsightConfigValidEnumsInteractiveComponentTap SourceFacebookMarketingInsightConfigValidEnums = "interactive_component_tap"
- SourceFacebookMarketingInsightConfigValidEnumsLabels SourceFacebookMarketingInsightConfigValidEnums = "labels"
- SourceFacebookMarketingInsightConfigValidEnumsLocation SourceFacebookMarketingInsightConfigValidEnums = "location"
- SourceFacebookMarketingInsightConfigValidEnumsMobileAppPurchaseRoas SourceFacebookMarketingInsightConfigValidEnums = "mobile_app_purchase_roas"
- SourceFacebookMarketingInsightConfigValidEnumsObjective SourceFacebookMarketingInsightConfigValidEnums = "objective"
- SourceFacebookMarketingInsightConfigValidEnumsOptimizationGoal SourceFacebookMarketingInsightConfigValidEnums = "optimization_goal"
- SourceFacebookMarketingInsightConfigValidEnumsOutboundClicks SourceFacebookMarketingInsightConfigValidEnums = "outbound_clicks"
- SourceFacebookMarketingInsightConfigValidEnumsOutboundClicksCtr SourceFacebookMarketingInsightConfigValidEnums = "outbound_clicks_ctr"
- SourceFacebookMarketingInsightConfigValidEnumsPlacePageName SourceFacebookMarketingInsightConfigValidEnums = "place_page_name"
- SourceFacebookMarketingInsightConfigValidEnumsPurchaseRoas SourceFacebookMarketingInsightConfigValidEnums = "purchase_roas"
- SourceFacebookMarketingInsightConfigValidEnumsQualifyingQuestionQualifyAnswerRate SourceFacebookMarketingInsightConfigValidEnums = "qualifying_question_qualify_answer_rate"
- SourceFacebookMarketingInsightConfigValidEnumsQualityRanking SourceFacebookMarketingInsightConfigValidEnums = "quality_ranking"
- SourceFacebookMarketingInsightConfigValidEnumsQualityScoreEctr SourceFacebookMarketingInsightConfigValidEnums = "quality_score_ectr"
- SourceFacebookMarketingInsightConfigValidEnumsQualityScoreEcvr SourceFacebookMarketingInsightConfigValidEnums = "quality_score_ecvr"
- SourceFacebookMarketingInsightConfigValidEnumsQualityScoreOrganic SourceFacebookMarketingInsightConfigValidEnums = "quality_score_organic"
- SourceFacebookMarketingInsightConfigValidEnumsReach SourceFacebookMarketingInsightConfigValidEnums = "reach"
- SourceFacebookMarketingInsightConfigValidEnumsSocialSpend SourceFacebookMarketingInsightConfigValidEnums = "social_spend"
- SourceFacebookMarketingInsightConfigValidEnumsSpend SourceFacebookMarketingInsightConfigValidEnums = "spend"
- SourceFacebookMarketingInsightConfigValidEnumsTotalPostbacks SourceFacebookMarketingInsightConfigValidEnums = "total_postbacks"
- SourceFacebookMarketingInsightConfigValidEnumsTotalPostbacksDetailed SourceFacebookMarketingInsightConfigValidEnums = "total_postbacks_detailed"
- SourceFacebookMarketingInsightConfigValidEnumsTotalPostbacksDetailedV4 SourceFacebookMarketingInsightConfigValidEnums = "total_postbacks_detailed_v4"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueActions SourceFacebookMarketingInsightConfigValidEnums = "unique_actions"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueClicks SourceFacebookMarketingInsightConfigValidEnums = "unique_clicks"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueConversions SourceFacebookMarketingInsightConfigValidEnums = "unique_conversions"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueCtr SourceFacebookMarketingInsightConfigValidEnums = "unique_ctr"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueInlineLinkClickCtr SourceFacebookMarketingInsightConfigValidEnums = "unique_inline_link_click_ctr"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueInlineLinkClicks SourceFacebookMarketingInsightConfigValidEnums = "unique_inline_link_clicks"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueLinkClicksCtr SourceFacebookMarketingInsightConfigValidEnums = "unique_link_clicks_ctr"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueOutboundClicks SourceFacebookMarketingInsightConfigValidEnums = "unique_outbound_clicks"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueOutboundClicksCtr SourceFacebookMarketingInsightConfigValidEnums = "unique_outbound_clicks_ctr"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueVideoContinuous2SecWatchedActions SourceFacebookMarketingInsightConfigValidEnums = "unique_video_continuous_2_sec_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsUniqueVideoView15Sec SourceFacebookMarketingInsightConfigValidEnums = "unique_video_view_15_sec"
- SourceFacebookMarketingInsightConfigValidEnumsUpdatedTime SourceFacebookMarketingInsightConfigValidEnums = "updated_time"
- SourceFacebookMarketingInsightConfigValidEnumsVideo15SecWatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_15_sec_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideo30SecWatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_30_sec_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoAvgTimeWatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_avg_time_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoContinuous2SecWatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_continuous_2_sec_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoP100WatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_p100_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoP25WatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_p25_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoP50WatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_p50_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoP75WatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_p75_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoP95WatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_p95_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoPlayActions SourceFacebookMarketingInsightConfigValidEnums = "video_play_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoPlayCurveActions SourceFacebookMarketingInsightConfigValidEnums = "video_play_curve_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoPlayRetention0To15sActions SourceFacebookMarketingInsightConfigValidEnums = "video_play_retention_0_to_15s_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoPlayRetention20To60sActions SourceFacebookMarketingInsightConfigValidEnums = "video_play_retention_20_to_60s_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoPlayRetentionGraphActions SourceFacebookMarketingInsightConfigValidEnums = "video_play_retention_graph_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoThruplayWatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_thruplay_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsVideoTimeWatchedActions SourceFacebookMarketingInsightConfigValidEnums = "video_time_watched_actions"
- SourceFacebookMarketingInsightConfigValidEnumsWebsiteCtr SourceFacebookMarketingInsightConfigValidEnums = "website_ctr"
- SourceFacebookMarketingInsightConfigValidEnumsWebsitePurchaseRoas SourceFacebookMarketingInsightConfigValidEnums = "website_purchase_roas"
- SourceFacebookMarketingInsightConfigValidEnumsWishBid SourceFacebookMarketingInsightConfigValidEnums = "wish_bid"
+ SourceFacebookMarketingValidEnumsAccountCurrency SourceFacebookMarketingValidEnums = "account_currency"
+ SourceFacebookMarketingValidEnumsAccountID SourceFacebookMarketingValidEnums = "account_id"
+ SourceFacebookMarketingValidEnumsAccountName SourceFacebookMarketingValidEnums = "account_name"
+ SourceFacebookMarketingValidEnumsActionValues SourceFacebookMarketingValidEnums = "action_values"
+ SourceFacebookMarketingValidEnumsActions SourceFacebookMarketingValidEnums = "actions"
+ SourceFacebookMarketingValidEnumsAdClickActions SourceFacebookMarketingValidEnums = "ad_click_actions"
+ SourceFacebookMarketingValidEnumsAdID SourceFacebookMarketingValidEnums = "ad_id"
+ SourceFacebookMarketingValidEnumsAdImpressionActions SourceFacebookMarketingValidEnums = "ad_impression_actions"
+ SourceFacebookMarketingValidEnumsAdName SourceFacebookMarketingValidEnums = "ad_name"
+ SourceFacebookMarketingValidEnumsAdsetEnd SourceFacebookMarketingValidEnums = "adset_end"
+ SourceFacebookMarketingValidEnumsAdsetID SourceFacebookMarketingValidEnums = "adset_id"
+ SourceFacebookMarketingValidEnumsAdsetName SourceFacebookMarketingValidEnums = "adset_name"
+ SourceFacebookMarketingValidEnumsAdsetStart SourceFacebookMarketingValidEnums = "adset_start"
+ SourceFacebookMarketingValidEnumsAgeTargeting SourceFacebookMarketingValidEnums = "age_targeting"
+ SourceFacebookMarketingValidEnumsAttributionSetting SourceFacebookMarketingValidEnums = "attribution_setting"
+ SourceFacebookMarketingValidEnumsAuctionBid SourceFacebookMarketingValidEnums = "auction_bid"
+ SourceFacebookMarketingValidEnumsAuctionCompetitiveness SourceFacebookMarketingValidEnums = "auction_competitiveness"
+ SourceFacebookMarketingValidEnumsAuctionMaxCompetitorBid SourceFacebookMarketingValidEnums = "auction_max_competitor_bid"
+ SourceFacebookMarketingValidEnumsBuyingType SourceFacebookMarketingValidEnums = "buying_type"
+ SourceFacebookMarketingValidEnumsCampaignID SourceFacebookMarketingValidEnums = "campaign_id"
+ SourceFacebookMarketingValidEnumsCampaignName SourceFacebookMarketingValidEnums = "campaign_name"
+ SourceFacebookMarketingValidEnumsCanvasAvgViewPercent SourceFacebookMarketingValidEnums = "canvas_avg_view_percent"
+ SourceFacebookMarketingValidEnumsCanvasAvgViewTime SourceFacebookMarketingValidEnums = "canvas_avg_view_time"
+ SourceFacebookMarketingValidEnumsCatalogSegmentActions SourceFacebookMarketingValidEnums = "catalog_segment_actions"
+ SourceFacebookMarketingValidEnumsCatalogSegmentValue SourceFacebookMarketingValidEnums = "catalog_segment_value"
+ SourceFacebookMarketingValidEnumsCatalogSegmentValueMobilePurchaseRoas SourceFacebookMarketingValidEnums = "catalog_segment_value_mobile_purchase_roas"
+ SourceFacebookMarketingValidEnumsCatalogSegmentValueOmniPurchaseRoas SourceFacebookMarketingValidEnums = "catalog_segment_value_omni_purchase_roas"
+ SourceFacebookMarketingValidEnumsCatalogSegmentValueWebsitePurchaseRoas SourceFacebookMarketingValidEnums = "catalog_segment_value_website_purchase_roas"
+ SourceFacebookMarketingValidEnumsClicks SourceFacebookMarketingValidEnums = "clicks"
+ SourceFacebookMarketingValidEnumsConversionRateRanking SourceFacebookMarketingValidEnums = "conversion_rate_ranking"
+ SourceFacebookMarketingValidEnumsConversionValues SourceFacebookMarketingValidEnums = "conversion_values"
+ SourceFacebookMarketingValidEnumsConversions SourceFacebookMarketingValidEnums = "conversions"
+ SourceFacebookMarketingValidEnumsConvertedProductQuantity SourceFacebookMarketingValidEnums = "converted_product_quantity"
+ SourceFacebookMarketingValidEnumsConvertedProductValue SourceFacebookMarketingValidEnums = "converted_product_value"
+ SourceFacebookMarketingValidEnumsCostPer15SecVideoView SourceFacebookMarketingValidEnums = "cost_per_15_sec_video_view"
+ SourceFacebookMarketingValidEnumsCostPer2SecContinuousVideoView SourceFacebookMarketingValidEnums = "cost_per_2_sec_continuous_video_view"
+ SourceFacebookMarketingValidEnumsCostPerActionType SourceFacebookMarketingValidEnums = "cost_per_action_type"
+ SourceFacebookMarketingValidEnumsCostPerAdClick SourceFacebookMarketingValidEnums = "cost_per_ad_click"
+ SourceFacebookMarketingValidEnumsCostPerConversion SourceFacebookMarketingValidEnums = "cost_per_conversion"
+ SourceFacebookMarketingValidEnumsCostPerDdaCountbyConvs SourceFacebookMarketingValidEnums = "cost_per_dda_countby_convs"
+ SourceFacebookMarketingValidEnumsCostPerEstimatedAdRecallers SourceFacebookMarketingValidEnums = "cost_per_estimated_ad_recallers"
+ SourceFacebookMarketingValidEnumsCostPerInlineLinkClick SourceFacebookMarketingValidEnums = "cost_per_inline_link_click"
+ SourceFacebookMarketingValidEnumsCostPerInlinePostEngagement SourceFacebookMarketingValidEnums = "cost_per_inline_post_engagement"
+ SourceFacebookMarketingValidEnumsCostPerOneThousandAdImpression SourceFacebookMarketingValidEnums = "cost_per_one_thousand_ad_impression"
+ SourceFacebookMarketingValidEnumsCostPerOutboundClick SourceFacebookMarketingValidEnums = "cost_per_outbound_click"
+ SourceFacebookMarketingValidEnumsCostPerThruplay SourceFacebookMarketingValidEnums = "cost_per_thruplay"
+ SourceFacebookMarketingValidEnumsCostPerUniqueActionType SourceFacebookMarketingValidEnums = "cost_per_unique_action_type"
+ SourceFacebookMarketingValidEnumsCostPerUniqueClick SourceFacebookMarketingValidEnums = "cost_per_unique_click"
+ SourceFacebookMarketingValidEnumsCostPerUniqueConversion SourceFacebookMarketingValidEnums = "cost_per_unique_conversion"
+ SourceFacebookMarketingValidEnumsCostPerUniqueInlineLinkClick SourceFacebookMarketingValidEnums = "cost_per_unique_inline_link_click"
+ SourceFacebookMarketingValidEnumsCostPerUniqueOutboundClick SourceFacebookMarketingValidEnums = "cost_per_unique_outbound_click"
+ SourceFacebookMarketingValidEnumsCpc SourceFacebookMarketingValidEnums = "cpc"
+ SourceFacebookMarketingValidEnumsCpm SourceFacebookMarketingValidEnums = "cpm"
+ SourceFacebookMarketingValidEnumsCpp SourceFacebookMarketingValidEnums = "cpp"
+ SourceFacebookMarketingValidEnumsCreatedTime SourceFacebookMarketingValidEnums = "created_time"
+ SourceFacebookMarketingValidEnumsCreativeMediaType SourceFacebookMarketingValidEnums = "creative_media_type"
+ SourceFacebookMarketingValidEnumsCtr SourceFacebookMarketingValidEnums = "ctr"
+ SourceFacebookMarketingValidEnumsDateStart SourceFacebookMarketingValidEnums = "date_start"
+ SourceFacebookMarketingValidEnumsDateStop SourceFacebookMarketingValidEnums = "date_stop"
+ SourceFacebookMarketingValidEnumsDdaCountbyConvs SourceFacebookMarketingValidEnums = "dda_countby_convs"
+ SourceFacebookMarketingValidEnumsDdaResults SourceFacebookMarketingValidEnums = "dda_results"
+ SourceFacebookMarketingValidEnumsEngagementRateRanking SourceFacebookMarketingValidEnums = "engagement_rate_ranking"
+ SourceFacebookMarketingValidEnumsEstimatedAdRecallRate SourceFacebookMarketingValidEnums = "estimated_ad_recall_rate"
+ SourceFacebookMarketingValidEnumsEstimatedAdRecallRateLowerBound SourceFacebookMarketingValidEnums = "estimated_ad_recall_rate_lower_bound"
+ SourceFacebookMarketingValidEnumsEstimatedAdRecallRateUpperBound SourceFacebookMarketingValidEnums = "estimated_ad_recall_rate_upper_bound"
+ SourceFacebookMarketingValidEnumsEstimatedAdRecallers SourceFacebookMarketingValidEnums = "estimated_ad_recallers"
+ SourceFacebookMarketingValidEnumsEstimatedAdRecallersLowerBound SourceFacebookMarketingValidEnums = "estimated_ad_recallers_lower_bound"
+ SourceFacebookMarketingValidEnumsEstimatedAdRecallersUpperBound SourceFacebookMarketingValidEnums = "estimated_ad_recallers_upper_bound"
+ SourceFacebookMarketingValidEnumsFrequency SourceFacebookMarketingValidEnums = "frequency"
+ SourceFacebookMarketingValidEnumsFullViewImpressions SourceFacebookMarketingValidEnums = "full_view_impressions"
+ SourceFacebookMarketingValidEnumsFullViewReach SourceFacebookMarketingValidEnums = "full_view_reach"
+ SourceFacebookMarketingValidEnumsGenderTargeting SourceFacebookMarketingValidEnums = "gender_targeting"
+ SourceFacebookMarketingValidEnumsImpressions SourceFacebookMarketingValidEnums = "impressions"
+ SourceFacebookMarketingValidEnumsInlineLinkClickCtr SourceFacebookMarketingValidEnums = "inline_link_click_ctr"
+ SourceFacebookMarketingValidEnumsInlineLinkClicks SourceFacebookMarketingValidEnums = "inline_link_clicks"
+ SourceFacebookMarketingValidEnumsInlinePostEngagement SourceFacebookMarketingValidEnums = "inline_post_engagement"
+ SourceFacebookMarketingValidEnumsInstagramUpcomingEventRemindersSet SourceFacebookMarketingValidEnums = "instagram_upcoming_event_reminders_set"
+ SourceFacebookMarketingValidEnumsInstantExperienceClicksToOpen SourceFacebookMarketingValidEnums = "instant_experience_clicks_to_open"
+ SourceFacebookMarketingValidEnumsInstantExperienceClicksToStart SourceFacebookMarketingValidEnums = "instant_experience_clicks_to_start"
+ SourceFacebookMarketingValidEnumsInstantExperienceOutboundClicks SourceFacebookMarketingValidEnums = "instant_experience_outbound_clicks"
+ SourceFacebookMarketingValidEnumsInteractiveComponentTap SourceFacebookMarketingValidEnums = "interactive_component_tap"
+ SourceFacebookMarketingValidEnumsLabels SourceFacebookMarketingValidEnums = "labels"
+ SourceFacebookMarketingValidEnumsLocation SourceFacebookMarketingValidEnums = "location"
+ SourceFacebookMarketingValidEnumsMobileAppPurchaseRoas SourceFacebookMarketingValidEnums = "mobile_app_purchase_roas"
+ SourceFacebookMarketingValidEnumsObjective SourceFacebookMarketingValidEnums = "objective"
+ SourceFacebookMarketingValidEnumsOptimizationGoal SourceFacebookMarketingValidEnums = "optimization_goal"
+ SourceFacebookMarketingValidEnumsOutboundClicks SourceFacebookMarketingValidEnums = "outbound_clicks"
+ SourceFacebookMarketingValidEnumsOutboundClicksCtr SourceFacebookMarketingValidEnums = "outbound_clicks_ctr"
+ SourceFacebookMarketingValidEnumsPlacePageName SourceFacebookMarketingValidEnums = "place_page_name"
+ SourceFacebookMarketingValidEnumsPurchaseRoas SourceFacebookMarketingValidEnums = "purchase_roas"
+ SourceFacebookMarketingValidEnumsQualifyingQuestionQualifyAnswerRate SourceFacebookMarketingValidEnums = "qualifying_question_qualify_answer_rate"
+ SourceFacebookMarketingValidEnumsQualityRanking SourceFacebookMarketingValidEnums = "quality_ranking"
+ SourceFacebookMarketingValidEnumsQualityScoreEctr SourceFacebookMarketingValidEnums = "quality_score_ectr"
+ SourceFacebookMarketingValidEnumsQualityScoreEcvr SourceFacebookMarketingValidEnums = "quality_score_ecvr"
+ SourceFacebookMarketingValidEnumsQualityScoreOrganic SourceFacebookMarketingValidEnums = "quality_score_organic"
+ SourceFacebookMarketingValidEnumsReach SourceFacebookMarketingValidEnums = "reach"
+ SourceFacebookMarketingValidEnumsSocialSpend SourceFacebookMarketingValidEnums = "social_spend"
+ SourceFacebookMarketingValidEnumsSpend SourceFacebookMarketingValidEnums = "spend"
+ SourceFacebookMarketingValidEnumsTotalPostbacks SourceFacebookMarketingValidEnums = "total_postbacks"
+ SourceFacebookMarketingValidEnumsTotalPostbacksDetailed SourceFacebookMarketingValidEnums = "total_postbacks_detailed"
+ SourceFacebookMarketingValidEnumsTotalPostbacksDetailedV4 SourceFacebookMarketingValidEnums = "total_postbacks_detailed_v4"
+ SourceFacebookMarketingValidEnumsUniqueActions SourceFacebookMarketingValidEnums = "unique_actions"
+ SourceFacebookMarketingValidEnumsUniqueClicks SourceFacebookMarketingValidEnums = "unique_clicks"
+ SourceFacebookMarketingValidEnumsUniqueConversions SourceFacebookMarketingValidEnums = "unique_conversions"
+ SourceFacebookMarketingValidEnumsUniqueCtr SourceFacebookMarketingValidEnums = "unique_ctr"
+ SourceFacebookMarketingValidEnumsUniqueInlineLinkClickCtr SourceFacebookMarketingValidEnums = "unique_inline_link_click_ctr"
+ SourceFacebookMarketingValidEnumsUniqueInlineLinkClicks SourceFacebookMarketingValidEnums = "unique_inline_link_clicks"
+ SourceFacebookMarketingValidEnumsUniqueLinkClicksCtr SourceFacebookMarketingValidEnums = "unique_link_clicks_ctr"
+ SourceFacebookMarketingValidEnumsUniqueOutboundClicks SourceFacebookMarketingValidEnums = "unique_outbound_clicks"
+ SourceFacebookMarketingValidEnumsUniqueOutboundClicksCtr SourceFacebookMarketingValidEnums = "unique_outbound_clicks_ctr"
+ SourceFacebookMarketingValidEnumsUniqueVideoContinuous2SecWatchedActions SourceFacebookMarketingValidEnums = "unique_video_continuous_2_sec_watched_actions"
+ SourceFacebookMarketingValidEnumsUniqueVideoView15Sec SourceFacebookMarketingValidEnums = "unique_video_view_15_sec"
+ SourceFacebookMarketingValidEnumsUpdatedTime SourceFacebookMarketingValidEnums = "updated_time"
+ SourceFacebookMarketingValidEnumsVideo15SecWatchedActions SourceFacebookMarketingValidEnums = "video_15_sec_watched_actions"
+ SourceFacebookMarketingValidEnumsVideo30SecWatchedActions SourceFacebookMarketingValidEnums = "video_30_sec_watched_actions"
+ SourceFacebookMarketingValidEnumsVideoAvgTimeWatchedActions SourceFacebookMarketingValidEnums = "video_avg_time_watched_actions"
+ SourceFacebookMarketingValidEnumsVideoContinuous2SecWatchedActions SourceFacebookMarketingValidEnums = "video_continuous_2_sec_watched_actions"
+ SourceFacebookMarketingValidEnumsVideoP100WatchedActions SourceFacebookMarketingValidEnums = "video_p100_watched_actions"
+ SourceFacebookMarketingValidEnumsVideoP25WatchedActions SourceFacebookMarketingValidEnums = "video_p25_watched_actions"
+ SourceFacebookMarketingValidEnumsVideoP50WatchedActions SourceFacebookMarketingValidEnums = "video_p50_watched_actions"
+ SourceFacebookMarketingValidEnumsVideoP75WatchedActions SourceFacebookMarketingValidEnums = "video_p75_watched_actions"
+ SourceFacebookMarketingValidEnumsVideoP95WatchedActions SourceFacebookMarketingValidEnums = "video_p95_watched_actions"
+ SourceFacebookMarketingValidEnumsVideoPlayActions SourceFacebookMarketingValidEnums = "video_play_actions"
+ SourceFacebookMarketingValidEnumsVideoPlayCurveActions SourceFacebookMarketingValidEnums = "video_play_curve_actions"
+ SourceFacebookMarketingValidEnumsVideoPlayRetention0To15sActions SourceFacebookMarketingValidEnums = "video_play_retention_0_to_15s_actions"
+ SourceFacebookMarketingValidEnumsVideoPlayRetention20To60sActions SourceFacebookMarketingValidEnums = "video_play_retention_20_to_60s_actions"
+ SourceFacebookMarketingValidEnumsVideoPlayRetentionGraphActions SourceFacebookMarketingValidEnums = "video_play_retention_graph_actions"
+ SourceFacebookMarketingValidEnumsVideoThruplayWatchedActions SourceFacebookMarketingValidEnums = "video_thruplay_watched_actions"
+ SourceFacebookMarketingValidEnumsVideoTimeWatchedActions SourceFacebookMarketingValidEnums = "video_time_watched_actions"
+ SourceFacebookMarketingValidEnumsWebsiteCtr SourceFacebookMarketingValidEnums = "website_ctr"
+ SourceFacebookMarketingValidEnumsWebsitePurchaseRoas SourceFacebookMarketingValidEnums = "website_purchase_roas"
+ SourceFacebookMarketingValidEnumsWishBid SourceFacebookMarketingValidEnums = "wish_bid"
)
-func (e SourceFacebookMarketingInsightConfigValidEnums) ToPointer() *SourceFacebookMarketingInsightConfigValidEnums {
+func (e SourceFacebookMarketingValidEnums) ToPointer() *SourceFacebookMarketingValidEnums {
return &e
}
-func (e *SourceFacebookMarketingInsightConfigValidEnums) UnmarshalJSON(data []byte) error {
+func (e *SourceFacebookMarketingValidEnums) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -620,28 +621,28 @@ func (e *SourceFacebookMarketingInsightConfigValidEnums) UnmarshalJSON(data []by
case "website_purchase_roas":
fallthrough
case "wish_bid":
- *e = SourceFacebookMarketingInsightConfigValidEnums(v)
+ *e = SourceFacebookMarketingValidEnums(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingInsightConfigValidEnums: %v", v)
+ return fmt.Errorf("invalid value for SourceFacebookMarketingValidEnums: %v", v)
}
}
-// SourceFacebookMarketingInsightConfigLevel - Chosen level for API
-type SourceFacebookMarketingInsightConfigLevel string
+// SourceFacebookMarketingLevel - Chosen level for API
+type SourceFacebookMarketingLevel string
const (
- SourceFacebookMarketingInsightConfigLevelAd SourceFacebookMarketingInsightConfigLevel = "ad"
- SourceFacebookMarketingInsightConfigLevelAdset SourceFacebookMarketingInsightConfigLevel = "adset"
- SourceFacebookMarketingInsightConfigLevelCampaign SourceFacebookMarketingInsightConfigLevel = "campaign"
- SourceFacebookMarketingInsightConfigLevelAccount SourceFacebookMarketingInsightConfigLevel = "account"
+ SourceFacebookMarketingLevelAd SourceFacebookMarketingLevel = "ad"
+ SourceFacebookMarketingLevelAdset SourceFacebookMarketingLevel = "adset"
+ SourceFacebookMarketingLevelCampaign SourceFacebookMarketingLevel = "campaign"
+ SourceFacebookMarketingLevelAccount SourceFacebookMarketingLevel = "account"
)
-func (e SourceFacebookMarketingInsightConfigLevel) ToPointer() *SourceFacebookMarketingInsightConfigLevel {
+func (e SourceFacebookMarketingLevel) ToPointer() *SourceFacebookMarketingLevel {
return &e
}
-func (e *SourceFacebookMarketingInsightConfigLevel) UnmarshalJSON(data []byte) error {
+func (e *SourceFacebookMarketingLevel) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -654,68 +655,149 @@ func (e *SourceFacebookMarketingInsightConfigLevel) UnmarshalJSON(data []byte) e
case "campaign":
fallthrough
case "account":
- *e = SourceFacebookMarketingInsightConfigLevel(v)
+ *e = SourceFacebookMarketingLevel(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingInsightConfigLevel: %v", v)
+ return fmt.Errorf("invalid value for SourceFacebookMarketingLevel: %v", v)
}
}
// SourceFacebookMarketingInsightConfig - Config for custom insights
type SourceFacebookMarketingInsightConfig struct {
// A list of chosen action_breakdowns for action_breakdowns
- ActionBreakdowns []SourceFacebookMarketingInsightConfigValidActionBreakdowns `json:"action_breakdowns,omitempty"`
+ ActionBreakdowns []SourceFacebookMarketingValidActionBreakdowns `json:"action_breakdowns,omitempty"`
// Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.
- ActionReportTime *SourceFacebookMarketingInsightConfigActionReportTime `json:"action_report_time,omitempty"`
+ ActionReportTime *SourceFacebookMarketingActionReportTime `default:"mixed" json:"action_report_time"`
// A list of chosen breakdowns for breakdowns
- Breakdowns []SourceFacebookMarketingInsightConfigValidBreakdowns `json:"breakdowns,omitempty"`
+ Breakdowns []SourceFacebookMarketingValidBreakdowns `json:"breakdowns,omitempty"`
// The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.
EndDate *time.Time `json:"end_date,omitempty"`
// A list of chosen fields for fields parameter
- Fields []SourceFacebookMarketingInsightConfigValidEnums `json:"fields,omitempty"`
+ Fields []SourceFacebookMarketingValidEnums `json:"fields,omitempty"`
// The attribution window
- InsightsLookbackWindow *int64 `json:"insights_lookback_window,omitempty"`
+ InsightsLookbackWindow *int64 `default:"28" json:"insights_lookback_window"`
// Chosen level for API
- Level *SourceFacebookMarketingInsightConfigLevel `json:"level,omitempty"`
+ Level *SourceFacebookMarketingLevel `default:"ad" json:"level"`
// The name value of insight
Name string `json:"name"`
// The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z.
StartDate *time.Time `json:"start_date,omitempty"`
// Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).
- TimeIncrement *int64 `json:"time_increment,omitempty"`
+ TimeIncrement *int64 `default:"1" json:"time_increment"`
}
-type SourceFacebookMarketingFacebookMarketing string
+func (s SourceFacebookMarketingInsightConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFacebookMarketingInsightConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetActionBreakdowns() []SourceFacebookMarketingValidActionBreakdowns {
+ if o == nil {
+ return nil
+ }
+ return o.ActionBreakdowns
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetActionReportTime() *SourceFacebookMarketingActionReportTime {
+ if o == nil {
+ return nil
+ }
+ return o.ActionReportTime
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetBreakdowns() []SourceFacebookMarketingValidBreakdowns {
+ if o == nil {
+ return nil
+ }
+ return o.Breakdowns
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetFields() []SourceFacebookMarketingValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.Fields
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetInsightsLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InsightsLookbackWindow
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetLevel() *SourceFacebookMarketingLevel {
+ if o == nil {
+ return nil
+ }
+ return o.Level
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceFacebookMarketingInsightConfig) GetTimeIncrement() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TimeIncrement
+}
+
+type FacebookMarketing string
const (
- SourceFacebookMarketingFacebookMarketingFacebookMarketing SourceFacebookMarketingFacebookMarketing = "facebook-marketing"
+ FacebookMarketingFacebookMarketing FacebookMarketing = "facebook-marketing"
)
-func (e SourceFacebookMarketingFacebookMarketing) ToPointer() *SourceFacebookMarketingFacebookMarketing {
+func (e FacebookMarketing) ToPointer() *FacebookMarketing {
return &e
}
-func (e *SourceFacebookMarketingFacebookMarketing) UnmarshalJSON(data []byte) error {
+func (e *FacebookMarketing) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "facebook-marketing":
- *e = SourceFacebookMarketingFacebookMarketing(v)
+ *e = FacebookMarketing(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingFacebookMarketing: %v", v)
+ return fmt.Errorf("invalid value for FacebookMarketing: %v", v)
}
}
type SourceFacebookMarketing struct {
// The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information.
AccessToken string `json:"access_token"`
- // The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. Open your Meta Ads Manager. The Ad account ID number is in the account dropdown menu or in your browser's address bar. See the docs for more information.
+ // The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. The Ad account ID number is in the account dropdown menu or in your browser's address bar of your Meta Ads Manager. See the docs for more information.
AccountID string `json:"account_id"`
// Allows action_breakdowns to be an empty list
- ActionBreakdownsAllowEmpty *bool `json:"action_breakdowns_allow_empty,omitempty"`
+ ActionBreakdownsAllowEmpty *bool `default:"true" json:"action_breakdowns_allow_empty"`
// The Client Id for your OAuth app
ClientID *string `json:"client_id,omitempty"`
// The Client Secret for your OAuth app
@@ -725,16 +807,113 @@ type SourceFacebookMarketing struct {
// The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.
EndDate *time.Time `json:"end_date,omitempty"`
// Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.
- FetchThumbnailImages *bool `json:"fetch_thumbnail_images,omitempty"`
+ FetchThumbnailImages *bool `default:"false" json:"fetch_thumbnail_images"`
// Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.
- IncludeDeleted *bool `json:"include_deleted,omitempty"`
+ IncludeDeleted *bool `default:"false" json:"include_deleted"`
// The attribution window. Facebook freezes insight data 28 days after it was generated, which means that all data from the past 28 days may have changed since we last emitted it, so you can retrieve refreshed insights from the past by setting this parameter. If you set a custom lookback window value in Facebook account, please provide the same value here.
- InsightsLookbackWindow *int64 `json:"insights_lookback_window,omitempty"`
- // Maximum batch size used when sending batch requests to Facebook API. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.
- MaxBatchSize *int64 `json:"max_batch_size,omitempty"`
+ InsightsLookbackWindow *int64 `default:"28" json:"insights_lookback_window"`
// Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.
- PageSize *int64 `json:"page_size,omitempty"`
- SourceType SourceFacebookMarketingFacebookMarketing `json:"sourceType"`
- // The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
- StartDate time.Time `json:"start_date"`
+ PageSize *int64 `default:"100" json:"page_size"`
+ sourceType FacebookMarketing `const:"facebook-marketing" json:"sourceType"`
+ // The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data will be replicated for usual streams and only last 2 years for insight streams.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceFacebookMarketing) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFacebookMarketing) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFacebookMarketing) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceFacebookMarketing) GetAccountID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountID
+}
+
+func (o *SourceFacebookMarketing) GetActionBreakdownsAllowEmpty() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.ActionBreakdownsAllowEmpty
+}
+
+func (o *SourceFacebookMarketing) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceFacebookMarketing) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceFacebookMarketing) GetCustomInsights() []SourceFacebookMarketingInsightConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomInsights
+}
+
+func (o *SourceFacebookMarketing) GetEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceFacebookMarketing) GetFetchThumbnailImages() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.FetchThumbnailImages
+}
+
+func (o *SourceFacebookMarketing) GetIncludeDeleted() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeleted
+}
+
+func (o *SourceFacebookMarketing) GetInsightsLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InsightsLookbackWindow
+}
+
+func (o *SourceFacebookMarketing) GetPageSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSize
+}
+
+func (o *SourceFacebookMarketing) GetSourceType() FacebookMarketing {
+ return FacebookMarketingFacebookMarketing
+}
+
+func (o *SourceFacebookMarketing) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcefacebookmarketingcreaterequest.go b/internal/sdk/pkg/models/shared/sourcefacebookmarketingcreaterequest.go
old mode 100755
new mode 100644
index 2084a682a..f6a0e18d7
--- a/internal/sdk/pkg/models/shared/sourcefacebookmarketingcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefacebookmarketingcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceFacebookMarketingCreateRequest struct {
Configuration SourceFacebookMarketing `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFacebookMarketingCreateRequest) GetConfiguration() SourceFacebookMarketing {
+ if o == nil {
+ return SourceFacebookMarketing{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFacebookMarketingCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceFacebookMarketingCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFacebookMarketingCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceFacebookMarketingCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefacebookmarketingputrequest.go b/internal/sdk/pkg/models/shared/sourcefacebookmarketingputrequest.go
old mode 100755
new mode 100644
index b388642df..2e4fda540
--- a/internal/sdk/pkg/models/shared/sourcefacebookmarketingputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefacebookmarketingputrequest.go
@@ -7,3 +7,24 @@ type SourceFacebookMarketingPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFacebookMarketingPutRequest) GetConfiguration() SourceFacebookMarketingUpdate {
+ if o == nil {
+ return SourceFacebookMarketingUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFacebookMarketingPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFacebookMarketingPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefacebookmarketingupdate.go b/internal/sdk/pkg/models/shared/sourcefacebookmarketingupdate.go
old mode 100755
new mode 100644
index c0a31b902..95043d48c
--- a/internal/sdk/pkg/models/shared/sourcefacebookmarketingupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcefacebookmarketingupdate.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-// SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns - An enumeration.
-type SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns string
+// ValidActionBreakdowns - An enumeration.
+type ValidActionBreakdowns string
const (
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionCanvasComponentName SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_canvas_component_name"
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionCarouselCardID SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_carousel_card_id"
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionCarouselCardName SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_carousel_card_name"
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionDestination SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_destination"
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionDevice SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_device"
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionReaction SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_reaction"
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionTargetID SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_target_id"
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionType SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_type"
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionVideoSound SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_video_sound"
- SourceFacebookMarketingUpdateInsightConfigValidActionBreakdownsActionVideoType SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns = "action_video_type"
+ ValidActionBreakdownsActionCanvasComponentName ValidActionBreakdowns = "action_canvas_component_name"
+ ValidActionBreakdownsActionCarouselCardID ValidActionBreakdowns = "action_carousel_card_id"
+ ValidActionBreakdownsActionCarouselCardName ValidActionBreakdowns = "action_carousel_card_name"
+ ValidActionBreakdownsActionDestination ValidActionBreakdowns = "action_destination"
+ ValidActionBreakdownsActionDevice ValidActionBreakdowns = "action_device"
+ ValidActionBreakdownsActionReaction ValidActionBreakdowns = "action_reaction"
+ ValidActionBreakdownsActionTargetID ValidActionBreakdowns = "action_target_id"
+ ValidActionBreakdownsActionType ValidActionBreakdowns = "action_type"
+ ValidActionBreakdownsActionVideoSound ValidActionBreakdowns = "action_video_sound"
+ ValidActionBreakdownsActionVideoType ValidActionBreakdowns = "action_video_type"
)
-func (e SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns) ToPointer() *SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns {
+func (e ValidActionBreakdowns) ToPointer() *ValidActionBreakdowns {
return &e
}
-func (e *SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns) UnmarshalJSON(data []byte) error {
+func (e *ValidActionBreakdowns) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -53,27 +54,27 @@ func (e *SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns) Unmars
case "action_video_sound":
fallthrough
case "action_video_type":
- *e = SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns(v)
+ *e = ValidActionBreakdowns(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns: %v", v)
+ return fmt.Errorf("invalid value for ValidActionBreakdowns: %v", v)
}
}
-// SourceFacebookMarketingUpdateInsightConfigActionReportTime - Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.
-type SourceFacebookMarketingUpdateInsightConfigActionReportTime string
+// ActionReportTime - Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.
+type ActionReportTime string
const (
- SourceFacebookMarketingUpdateInsightConfigActionReportTimeConversion SourceFacebookMarketingUpdateInsightConfigActionReportTime = "conversion"
- SourceFacebookMarketingUpdateInsightConfigActionReportTimeImpression SourceFacebookMarketingUpdateInsightConfigActionReportTime = "impression"
- SourceFacebookMarketingUpdateInsightConfigActionReportTimeMixed SourceFacebookMarketingUpdateInsightConfigActionReportTime = "mixed"
+ ActionReportTimeConversion ActionReportTime = "conversion"
+ ActionReportTimeImpression ActionReportTime = "impression"
+ ActionReportTimeMixed ActionReportTime = "mixed"
)
-func (e SourceFacebookMarketingUpdateInsightConfigActionReportTime) ToPointer() *SourceFacebookMarketingUpdateInsightConfigActionReportTime {
+func (e ActionReportTime) ToPointer() *ActionReportTime {
return &e
}
-func (e *SourceFacebookMarketingUpdateInsightConfigActionReportTime) UnmarshalJSON(data []byte) error {
+func (e *ActionReportTime) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -84,56 +85,56 @@ func (e *SourceFacebookMarketingUpdateInsightConfigActionReportTime) UnmarshalJS
case "impression":
fallthrough
case "mixed":
- *e = SourceFacebookMarketingUpdateInsightConfigActionReportTime(v)
+ *e = ActionReportTime(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingUpdateInsightConfigActionReportTime: %v", v)
+ return fmt.Errorf("invalid value for ActionReportTime: %v", v)
}
}
-// SourceFacebookMarketingUpdateInsightConfigValidBreakdowns - An enumeration.
-type SourceFacebookMarketingUpdateInsightConfigValidBreakdowns string
+// ValidBreakdowns - An enumeration.
+type ValidBreakdowns string
const (
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsAdFormatAsset SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "ad_format_asset"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsAge SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "age"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsAppID SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "app_id"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsBodyAsset SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "body_asset"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsCallToActionAsset SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "call_to_action_asset"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsCoarseConversionValue SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "coarse_conversion_value"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsCountry SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "country"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsDescriptionAsset SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "description_asset"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsDevicePlatform SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "device_platform"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsDma SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "dma"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsFidelityType SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "fidelity_type"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsFrequencyValue SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "frequency_value"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsGender SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "gender"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsHourlyStatsAggregatedByAdvertiserTimeZone SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "hourly_stats_aggregated_by_advertiser_time_zone"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsHourlyStatsAggregatedByAudienceTimeZone SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "hourly_stats_aggregated_by_audience_time_zone"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsHsid SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "hsid"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsImageAsset SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "image_asset"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsImpressionDevice SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "impression_device"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsIsConversionIDModeled SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "is_conversion_id_modeled"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsLinkURLAsset SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "link_url_asset"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsMmm SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "mmm"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsPlacePageID SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "place_page_id"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsPlatformPosition SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "platform_position"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsPostbackSequenceIndex SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "postback_sequence_index"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsProductID SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "product_id"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsPublisherPlatform SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "publisher_platform"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsRedownload SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "redownload"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsRegion SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "region"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsSkanCampaignID SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "skan_campaign_id"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsSkanConversionID SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "skan_conversion_id"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsTitleAsset SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "title_asset"
- SourceFacebookMarketingUpdateInsightConfigValidBreakdownsVideoAsset SourceFacebookMarketingUpdateInsightConfigValidBreakdowns = "video_asset"
+ ValidBreakdownsAdFormatAsset ValidBreakdowns = "ad_format_asset"
+ ValidBreakdownsAge ValidBreakdowns = "age"
+ ValidBreakdownsAppID ValidBreakdowns = "app_id"
+ ValidBreakdownsBodyAsset ValidBreakdowns = "body_asset"
+ ValidBreakdownsCallToActionAsset ValidBreakdowns = "call_to_action_asset"
+ ValidBreakdownsCoarseConversionValue ValidBreakdowns = "coarse_conversion_value"
+ ValidBreakdownsCountry ValidBreakdowns = "country"
+ ValidBreakdownsDescriptionAsset ValidBreakdowns = "description_asset"
+ ValidBreakdownsDevicePlatform ValidBreakdowns = "device_platform"
+ ValidBreakdownsDma ValidBreakdowns = "dma"
+ ValidBreakdownsFidelityType ValidBreakdowns = "fidelity_type"
+ ValidBreakdownsFrequencyValue ValidBreakdowns = "frequency_value"
+ ValidBreakdownsGender ValidBreakdowns = "gender"
+ ValidBreakdownsHourlyStatsAggregatedByAdvertiserTimeZone ValidBreakdowns = "hourly_stats_aggregated_by_advertiser_time_zone"
+ ValidBreakdownsHourlyStatsAggregatedByAudienceTimeZone ValidBreakdowns = "hourly_stats_aggregated_by_audience_time_zone"
+ ValidBreakdownsHsid ValidBreakdowns = "hsid"
+ ValidBreakdownsImageAsset ValidBreakdowns = "image_asset"
+ ValidBreakdownsImpressionDevice ValidBreakdowns = "impression_device"
+ ValidBreakdownsIsConversionIDModeled ValidBreakdowns = "is_conversion_id_modeled"
+ ValidBreakdownsLinkURLAsset ValidBreakdowns = "link_url_asset"
+ ValidBreakdownsMmm ValidBreakdowns = "mmm"
+ ValidBreakdownsPlacePageID ValidBreakdowns = "place_page_id"
+ ValidBreakdownsPlatformPosition ValidBreakdowns = "platform_position"
+ ValidBreakdownsPostbackSequenceIndex ValidBreakdowns = "postback_sequence_index"
+ ValidBreakdownsProductID ValidBreakdowns = "product_id"
+ ValidBreakdownsPublisherPlatform ValidBreakdowns = "publisher_platform"
+ ValidBreakdownsRedownload ValidBreakdowns = "redownload"
+ ValidBreakdownsRegion ValidBreakdowns = "region"
+ ValidBreakdownsSkanCampaignID ValidBreakdowns = "skan_campaign_id"
+ ValidBreakdownsSkanConversionID ValidBreakdowns = "skan_conversion_id"
+ ValidBreakdownsTitleAsset ValidBreakdowns = "title_asset"
+ ValidBreakdownsVideoAsset ValidBreakdowns = "video_asset"
)
-func (e SourceFacebookMarketingUpdateInsightConfigValidBreakdowns) ToPointer() *SourceFacebookMarketingUpdateInsightConfigValidBreakdowns {
+func (e ValidBreakdowns) ToPointer() *ValidBreakdowns {
return &e
}
-func (e *SourceFacebookMarketingUpdateInsightConfigValidBreakdowns) UnmarshalJSON(data []byte) error {
+func (e *ValidBreakdowns) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -202,156 +203,156 @@ func (e *SourceFacebookMarketingUpdateInsightConfigValidBreakdowns) UnmarshalJSO
case "title_asset":
fallthrough
case "video_asset":
- *e = SourceFacebookMarketingUpdateInsightConfigValidBreakdowns(v)
+ *e = ValidBreakdowns(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingUpdateInsightConfigValidBreakdowns: %v", v)
+ return fmt.Errorf("invalid value for ValidBreakdowns: %v", v)
}
}
-// SourceFacebookMarketingUpdateInsightConfigValidEnums - An enumeration.
-type SourceFacebookMarketingUpdateInsightConfigValidEnums string
+// SourceFacebookMarketingUpdateValidEnums - An enumeration.
+type SourceFacebookMarketingUpdateValidEnums string
const (
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAccountCurrency SourceFacebookMarketingUpdateInsightConfigValidEnums = "account_currency"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAccountID SourceFacebookMarketingUpdateInsightConfigValidEnums = "account_id"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAccountName SourceFacebookMarketingUpdateInsightConfigValidEnums = "account_name"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsActionValues SourceFacebookMarketingUpdateInsightConfigValidEnums = "action_values"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAdClickActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "ad_click_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAdID SourceFacebookMarketingUpdateInsightConfigValidEnums = "ad_id"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAdImpressionActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "ad_impression_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAdName SourceFacebookMarketingUpdateInsightConfigValidEnums = "ad_name"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAdsetEnd SourceFacebookMarketingUpdateInsightConfigValidEnums = "adset_end"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAdsetID SourceFacebookMarketingUpdateInsightConfigValidEnums = "adset_id"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAdsetName SourceFacebookMarketingUpdateInsightConfigValidEnums = "adset_name"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAdsetStart SourceFacebookMarketingUpdateInsightConfigValidEnums = "adset_start"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAgeTargeting SourceFacebookMarketingUpdateInsightConfigValidEnums = "age_targeting"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAttributionSetting SourceFacebookMarketingUpdateInsightConfigValidEnums = "attribution_setting"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAuctionBid SourceFacebookMarketingUpdateInsightConfigValidEnums = "auction_bid"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAuctionCompetitiveness SourceFacebookMarketingUpdateInsightConfigValidEnums = "auction_competitiveness"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsAuctionMaxCompetitorBid SourceFacebookMarketingUpdateInsightConfigValidEnums = "auction_max_competitor_bid"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsBuyingType SourceFacebookMarketingUpdateInsightConfigValidEnums = "buying_type"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCampaignID SourceFacebookMarketingUpdateInsightConfigValidEnums = "campaign_id"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCampaignName SourceFacebookMarketingUpdateInsightConfigValidEnums = "campaign_name"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCanvasAvgViewPercent SourceFacebookMarketingUpdateInsightConfigValidEnums = "canvas_avg_view_percent"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCanvasAvgViewTime SourceFacebookMarketingUpdateInsightConfigValidEnums = "canvas_avg_view_time"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCatalogSegmentActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "catalog_segment_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCatalogSegmentValue SourceFacebookMarketingUpdateInsightConfigValidEnums = "catalog_segment_value"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCatalogSegmentValueMobilePurchaseRoas SourceFacebookMarketingUpdateInsightConfigValidEnums = "catalog_segment_value_mobile_purchase_roas"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCatalogSegmentValueOmniPurchaseRoas SourceFacebookMarketingUpdateInsightConfigValidEnums = "catalog_segment_value_omni_purchase_roas"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCatalogSegmentValueWebsitePurchaseRoas SourceFacebookMarketingUpdateInsightConfigValidEnums = "catalog_segment_value_website_purchase_roas"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsClicks SourceFacebookMarketingUpdateInsightConfigValidEnums = "clicks"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsConversionRateRanking SourceFacebookMarketingUpdateInsightConfigValidEnums = "conversion_rate_ranking"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsConversionValues SourceFacebookMarketingUpdateInsightConfigValidEnums = "conversion_values"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsConversions SourceFacebookMarketingUpdateInsightConfigValidEnums = "conversions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsConvertedProductQuantity SourceFacebookMarketingUpdateInsightConfigValidEnums = "converted_product_quantity"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsConvertedProductValue SourceFacebookMarketingUpdateInsightConfigValidEnums = "converted_product_value"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPer15SecVideoView SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_15_sec_video_view"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPer2SecContinuousVideoView SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_2_sec_continuous_video_view"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerActionType SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_action_type"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerAdClick SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_ad_click"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerConversion SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_conversion"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerDdaCountbyConvs SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_dda_countby_convs"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerEstimatedAdRecallers SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_estimated_ad_recallers"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerInlineLinkClick SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_inline_link_click"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerInlinePostEngagement SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_inline_post_engagement"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerOneThousandAdImpression SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_one_thousand_ad_impression"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerOutboundClick SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_outbound_click"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerThruplay SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_thruplay"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerUniqueActionType SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_unique_action_type"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerUniqueClick SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_unique_click"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerUniqueConversion SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_unique_conversion"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerUniqueInlineLinkClick SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_unique_inline_link_click"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCostPerUniqueOutboundClick SourceFacebookMarketingUpdateInsightConfigValidEnums = "cost_per_unique_outbound_click"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCpc SourceFacebookMarketingUpdateInsightConfigValidEnums = "cpc"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCpm SourceFacebookMarketingUpdateInsightConfigValidEnums = "cpm"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCpp SourceFacebookMarketingUpdateInsightConfigValidEnums = "cpp"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCreatedTime SourceFacebookMarketingUpdateInsightConfigValidEnums = "created_time"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCreativeMediaType SourceFacebookMarketingUpdateInsightConfigValidEnums = "creative_media_type"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsCtr SourceFacebookMarketingUpdateInsightConfigValidEnums = "ctr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsDateStart SourceFacebookMarketingUpdateInsightConfigValidEnums = "date_start"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsDateStop SourceFacebookMarketingUpdateInsightConfigValidEnums = "date_stop"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsDdaCountbyConvs SourceFacebookMarketingUpdateInsightConfigValidEnums = "dda_countby_convs"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsDdaResults SourceFacebookMarketingUpdateInsightConfigValidEnums = "dda_results"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsEngagementRateRanking SourceFacebookMarketingUpdateInsightConfigValidEnums = "engagement_rate_ranking"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsEstimatedAdRecallRate SourceFacebookMarketingUpdateInsightConfigValidEnums = "estimated_ad_recall_rate"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsEstimatedAdRecallRateLowerBound SourceFacebookMarketingUpdateInsightConfigValidEnums = "estimated_ad_recall_rate_lower_bound"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsEstimatedAdRecallRateUpperBound SourceFacebookMarketingUpdateInsightConfigValidEnums = "estimated_ad_recall_rate_upper_bound"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsEstimatedAdRecallers SourceFacebookMarketingUpdateInsightConfigValidEnums = "estimated_ad_recallers"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsEstimatedAdRecallersLowerBound SourceFacebookMarketingUpdateInsightConfigValidEnums = "estimated_ad_recallers_lower_bound"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsEstimatedAdRecallersUpperBound SourceFacebookMarketingUpdateInsightConfigValidEnums = "estimated_ad_recallers_upper_bound"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsFrequency SourceFacebookMarketingUpdateInsightConfigValidEnums = "frequency"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsFullViewImpressions SourceFacebookMarketingUpdateInsightConfigValidEnums = "full_view_impressions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsFullViewReach SourceFacebookMarketingUpdateInsightConfigValidEnums = "full_view_reach"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsGenderTargeting SourceFacebookMarketingUpdateInsightConfigValidEnums = "gender_targeting"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsImpressions SourceFacebookMarketingUpdateInsightConfigValidEnums = "impressions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsInlineLinkClickCtr SourceFacebookMarketingUpdateInsightConfigValidEnums = "inline_link_click_ctr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsInlineLinkClicks SourceFacebookMarketingUpdateInsightConfigValidEnums = "inline_link_clicks"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsInlinePostEngagement SourceFacebookMarketingUpdateInsightConfigValidEnums = "inline_post_engagement"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsInstagramUpcomingEventRemindersSet SourceFacebookMarketingUpdateInsightConfigValidEnums = "instagram_upcoming_event_reminders_set"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsInstantExperienceClicksToOpen SourceFacebookMarketingUpdateInsightConfigValidEnums = "instant_experience_clicks_to_open"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsInstantExperienceClicksToStart SourceFacebookMarketingUpdateInsightConfigValidEnums = "instant_experience_clicks_to_start"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsInstantExperienceOutboundClicks SourceFacebookMarketingUpdateInsightConfigValidEnums = "instant_experience_outbound_clicks"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsInteractiveComponentTap SourceFacebookMarketingUpdateInsightConfigValidEnums = "interactive_component_tap"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsLabels SourceFacebookMarketingUpdateInsightConfigValidEnums = "labels"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsLocation SourceFacebookMarketingUpdateInsightConfigValidEnums = "location"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsMobileAppPurchaseRoas SourceFacebookMarketingUpdateInsightConfigValidEnums = "mobile_app_purchase_roas"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsObjective SourceFacebookMarketingUpdateInsightConfigValidEnums = "objective"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsOptimizationGoal SourceFacebookMarketingUpdateInsightConfigValidEnums = "optimization_goal"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsOutboundClicks SourceFacebookMarketingUpdateInsightConfigValidEnums = "outbound_clicks"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsOutboundClicksCtr SourceFacebookMarketingUpdateInsightConfigValidEnums = "outbound_clicks_ctr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsPlacePageName SourceFacebookMarketingUpdateInsightConfigValidEnums = "place_page_name"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsPurchaseRoas SourceFacebookMarketingUpdateInsightConfigValidEnums = "purchase_roas"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsQualifyingQuestionQualifyAnswerRate SourceFacebookMarketingUpdateInsightConfigValidEnums = "qualifying_question_qualify_answer_rate"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsQualityRanking SourceFacebookMarketingUpdateInsightConfigValidEnums = "quality_ranking"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsQualityScoreEctr SourceFacebookMarketingUpdateInsightConfigValidEnums = "quality_score_ectr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsQualityScoreEcvr SourceFacebookMarketingUpdateInsightConfigValidEnums = "quality_score_ecvr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsQualityScoreOrganic SourceFacebookMarketingUpdateInsightConfigValidEnums = "quality_score_organic"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsReach SourceFacebookMarketingUpdateInsightConfigValidEnums = "reach"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsSocialSpend SourceFacebookMarketingUpdateInsightConfigValidEnums = "social_spend"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsSpend SourceFacebookMarketingUpdateInsightConfigValidEnums = "spend"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsTotalPostbacks SourceFacebookMarketingUpdateInsightConfigValidEnums = "total_postbacks"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsTotalPostbacksDetailed SourceFacebookMarketingUpdateInsightConfigValidEnums = "total_postbacks_detailed"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsTotalPostbacksDetailedV4 SourceFacebookMarketingUpdateInsightConfigValidEnums = "total_postbacks_detailed_v4"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueClicks SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_clicks"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueConversions SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_conversions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueCtr SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_ctr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueInlineLinkClickCtr SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_inline_link_click_ctr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueInlineLinkClicks SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_inline_link_clicks"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueLinkClicksCtr SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_link_clicks_ctr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueOutboundClicks SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_outbound_clicks"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueOutboundClicksCtr SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_outbound_clicks_ctr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueVideoContinuous2SecWatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_video_continuous_2_sec_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUniqueVideoView15Sec SourceFacebookMarketingUpdateInsightConfigValidEnums = "unique_video_view_15_sec"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsUpdatedTime SourceFacebookMarketingUpdateInsightConfigValidEnums = "updated_time"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideo15SecWatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_15_sec_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideo30SecWatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_30_sec_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoAvgTimeWatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_avg_time_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoContinuous2SecWatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_continuous_2_sec_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoP100WatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_p100_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoP25WatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_p25_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoP50WatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_p50_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoP75WatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_p75_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoP95WatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_p95_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoPlayActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_play_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoPlayCurveActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_play_curve_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoPlayRetention0To15sActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_play_retention_0_to_15s_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoPlayRetention20To60sActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_play_retention_20_to_60s_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoPlayRetentionGraphActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_play_retention_graph_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoThruplayWatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_thruplay_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsVideoTimeWatchedActions SourceFacebookMarketingUpdateInsightConfigValidEnums = "video_time_watched_actions"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsWebsiteCtr SourceFacebookMarketingUpdateInsightConfigValidEnums = "website_ctr"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsWebsitePurchaseRoas SourceFacebookMarketingUpdateInsightConfigValidEnums = "website_purchase_roas"
- SourceFacebookMarketingUpdateInsightConfigValidEnumsWishBid SourceFacebookMarketingUpdateInsightConfigValidEnums = "wish_bid"
+ SourceFacebookMarketingUpdateValidEnumsAccountCurrency SourceFacebookMarketingUpdateValidEnums = "account_currency"
+ SourceFacebookMarketingUpdateValidEnumsAccountID SourceFacebookMarketingUpdateValidEnums = "account_id"
+ SourceFacebookMarketingUpdateValidEnumsAccountName SourceFacebookMarketingUpdateValidEnums = "account_name"
+ SourceFacebookMarketingUpdateValidEnumsActionValues SourceFacebookMarketingUpdateValidEnums = "action_values"
+ SourceFacebookMarketingUpdateValidEnumsActions SourceFacebookMarketingUpdateValidEnums = "actions"
+ SourceFacebookMarketingUpdateValidEnumsAdClickActions SourceFacebookMarketingUpdateValidEnums = "ad_click_actions"
+ SourceFacebookMarketingUpdateValidEnumsAdID SourceFacebookMarketingUpdateValidEnums = "ad_id"
+ SourceFacebookMarketingUpdateValidEnumsAdImpressionActions SourceFacebookMarketingUpdateValidEnums = "ad_impression_actions"
+ SourceFacebookMarketingUpdateValidEnumsAdName SourceFacebookMarketingUpdateValidEnums = "ad_name"
+ SourceFacebookMarketingUpdateValidEnumsAdsetEnd SourceFacebookMarketingUpdateValidEnums = "adset_end"
+ SourceFacebookMarketingUpdateValidEnumsAdsetID SourceFacebookMarketingUpdateValidEnums = "adset_id"
+ SourceFacebookMarketingUpdateValidEnumsAdsetName SourceFacebookMarketingUpdateValidEnums = "adset_name"
+ SourceFacebookMarketingUpdateValidEnumsAdsetStart SourceFacebookMarketingUpdateValidEnums = "adset_start"
+ SourceFacebookMarketingUpdateValidEnumsAgeTargeting SourceFacebookMarketingUpdateValidEnums = "age_targeting"
+ SourceFacebookMarketingUpdateValidEnumsAttributionSetting SourceFacebookMarketingUpdateValidEnums = "attribution_setting"
+ SourceFacebookMarketingUpdateValidEnumsAuctionBid SourceFacebookMarketingUpdateValidEnums = "auction_bid"
+ SourceFacebookMarketingUpdateValidEnumsAuctionCompetitiveness SourceFacebookMarketingUpdateValidEnums = "auction_competitiveness"
+ SourceFacebookMarketingUpdateValidEnumsAuctionMaxCompetitorBid SourceFacebookMarketingUpdateValidEnums = "auction_max_competitor_bid"
+ SourceFacebookMarketingUpdateValidEnumsBuyingType SourceFacebookMarketingUpdateValidEnums = "buying_type"
+ SourceFacebookMarketingUpdateValidEnumsCampaignID SourceFacebookMarketingUpdateValidEnums = "campaign_id"
+ SourceFacebookMarketingUpdateValidEnumsCampaignName SourceFacebookMarketingUpdateValidEnums = "campaign_name"
+ SourceFacebookMarketingUpdateValidEnumsCanvasAvgViewPercent SourceFacebookMarketingUpdateValidEnums = "canvas_avg_view_percent"
+ SourceFacebookMarketingUpdateValidEnumsCanvasAvgViewTime SourceFacebookMarketingUpdateValidEnums = "canvas_avg_view_time"
+ SourceFacebookMarketingUpdateValidEnumsCatalogSegmentActions SourceFacebookMarketingUpdateValidEnums = "catalog_segment_actions"
+ SourceFacebookMarketingUpdateValidEnumsCatalogSegmentValue SourceFacebookMarketingUpdateValidEnums = "catalog_segment_value"
+ SourceFacebookMarketingUpdateValidEnumsCatalogSegmentValueMobilePurchaseRoas SourceFacebookMarketingUpdateValidEnums = "catalog_segment_value_mobile_purchase_roas"
+ SourceFacebookMarketingUpdateValidEnumsCatalogSegmentValueOmniPurchaseRoas SourceFacebookMarketingUpdateValidEnums = "catalog_segment_value_omni_purchase_roas"
+ SourceFacebookMarketingUpdateValidEnumsCatalogSegmentValueWebsitePurchaseRoas SourceFacebookMarketingUpdateValidEnums = "catalog_segment_value_website_purchase_roas"
+ SourceFacebookMarketingUpdateValidEnumsClicks SourceFacebookMarketingUpdateValidEnums = "clicks"
+ SourceFacebookMarketingUpdateValidEnumsConversionRateRanking SourceFacebookMarketingUpdateValidEnums = "conversion_rate_ranking"
+ SourceFacebookMarketingUpdateValidEnumsConversionValues SourceFacebookMarketingUpdateValidEnums = "conversion_values"
+ SourceFacebookMarketingUpdateValidEnumsConversions SourceFacebookMarketingUpdateValidEnums = "conversions"
+ SourceFacebookMarketingUpdateValidEnumsConvertedProductQuantity SourceFacebookMarketingUpdateValidEnums = "converted_product_quantity"
+ SourceFacebookMarketingUpdateValidEnumsConvertedProductValue SourceFacebookMarketingUpdateValidEnums = "converted_product_value"
+ SourceFacebookMarketingUpdateValidEnumsCostPer15SecVideoView SourceFacebookMarketingUpdateValidEnums = "cost_per_15_sec_video_view"
+ SourceFacebookMarketingUpdateValidEnumsCostPer2SecContinuousVideoView SourceFacebookMarketingUpdateValidEnums = "cost_per_2_sec_continuous_video_view"
+ SourceFacebookMarketingUpdateValidEnumsCostPerActionType SourceFacebookMarketingUpdateValidEnums = "cost_per_action_type"
+ SourceFacebookMarketingUpdateValidEnumsCostPerAdClick SourceFacebookMarketingUpdateValidEnums = "cost_per_ad_click"
+ SourceFacebookMarketingUpdateValidEnumsCostPerConversion SourceFacebookMarketingUpdateValidEnums = "cost_per_conversion"
+ SourceFacebookMarketingUpdateValidEnumsCostPerDdaCountbyConvs SourceFacebookMarketingUpdateValidEnums = "cost_per_dda_countby_convs"
+ SourceFacebookMarketingUpdateValidEnumsCostPerEstimatedAdRecallers SourceFacebookMarketingUpdateValidEnums = "cost_per_estimated_ad_recallers"
+ SourceFacebookMarketingUpdateValidEnumsCostPerInlineLinkClick SourceFacebookMarketingUpdateValidEnums = "cost_per_inline_link_click"
+ SourceFacebookMarketingUpdateValidEnumsCostPerInlinePostEngagement SourceFacebookMarketingUpdateValidEnums = "cost_per_inline_post_engagement"
+ SourceFacebookMarketingUpdateValidEnumsCostPerOneThousandAdImpression SourceFacebookMarketingUpdateValidEnums = "cost_per_one_thousand_ad_impression"
+ SourceFacebookMarketingUpdateValidEnumsCostPerOutboundClick SourceFacebookMarketingUpdateValidEnums = "cost_per_outbound_click"
+ SourceFacebookMarketingUpdateValidEnumsCostPerThruplay SourceFacebookMarketingUpdateValidEnums = "cost_per_thruplay"
+ SourceFacebookMarketingUpdateValidEnumsCostPerUniqueActionType SourceFacebookMarketingUpdateValidEnums = "cost_per_unique_action_type"
+ SourceFacebookMarketingUpdateValidEnumsCostPerUniqueClick SourceFacebookMarketingUpdateValidEnums = "cost_per_unique_click"
+ SourceFacebookMarketingUpdateValidEnumsCostPerUniqueConversion SourceFacebookMarketingUpdateValidEnums = "cost_per_unique_conversion"
+ SourceFacebookMarketingUpdateValidEnumsCostPerUniqueInlineLinkClick SourceFacebookMarketingUpdateValidEnums = "cost_per_unique_inline_link_click"
+ SourceFacebookMarketingUpdateValidEnumsCostPerUniqueOutboundClick SourceFacebookMarketingUpdateValidEnums = "cost_per_unique_outbound_click"
+ SourceFacebookMarketingUpdateValidEnumsCpc SourceFacebookMarketingUpdateValidEnums = "cpc"
+ SourceFacebookMarketingUpdateValidEnumsCpm SourceFacebookMarketingUpdateValidEnums = "cpm"
+ SourceFacebookMarketingUpdateValidEnumsCpp SourceFacebookMarketingUpdateValidEnums = "cpp"
+ SourceFacebookMarketingUpdateValidEnumsCreatedTime SourceFacebookMarketingUpdateValidEnums = "created_time"
+ SourceFacebookMarketingUpdateValidEnumsCreativeMediaType SourceFacebookMarketingUpdateValidEnums = "creative_media_type"
+ SourceFacebookMarketingUpdateValidEnumsCtr SourceFacebookMarketingUpdateValidEnums = "ctr"
+ SourceFacebookMarketingUpdateValidEnumsDateStart SourceFacebookMarketingUpdateValidEnums = "date_start"
+ SourceFacebookMarketingUpdateValidEnumsDateStop SourceFacebookMarketingUpdateValidEnums = "date_stop"
+ SourceFacebookMarketingUpdateValidEnumsDdaCountbyConvs SourceFacebookMarketingUpdateValidEnums = "dda_countby_convs"
+ SourceFacebookMarketingUpdateValidEnumsDdaResults SourceFacebookMarketingUpdateValidEnums = "dda_results"
+ SourceFacebookMarketingUpdateValidEnumsEngagementRateRanking SourceFacebookMarketingUpdateValidEnums = "engagement_rate_ranking"
+ SourceFacebookMarketingUpdateValidEnumsEstimatedAdRecallRate SourceFacebookMarketingUpdateValidEnums = "estimated_ad_recall_rate"
+ SourceFacebookMarketingUpdateValidEnumsEstimatedAdRecallRateLowerBound SourceFacebookMarketingUpdateValidEnums = "estimated_ad_recall_rate_lower_bound"
+ SourceFacebookMarketingUpdateValidEnumsEstimatedAdRecallRateUpperBound SourceFacebookMarketingUpdateValidEnums = "estimated_ad_recall_rate_upper_bound"
+ SourceFacebookMarketingUpdateValidEnumsEstimatedAdRecallers SourceFacebookMarketingUpdateValidEnums = "estimated_ad_recallers"
+ SourceFacebookMarketingUpdateValidEnumsEstimatedAdRecallersLowerBound SourceFacebookMarketingUpdateValidEnums = "estimated_ad_recallers_lower_bound"
+ SourceFacebookMarketingUpdateValidEnumsEstimatedAdRecallersUpperBound SourceFacebookMarketingUpdateValidEnums = "estimated_ad_recallers_upper_bound"
+ SourceFacebookMarketingUpdateValidEnumsFrequency SourceFacebookMarketingUpdateValidEnums = "frequency"
+ SourceFacebookMarketingUpdateValidEnumsFullViewImpressions SourceFacebookMarketingUpdateValidEnums = "full_view_impressions"
+ SourceFacebookMarketingUpdateValidEnumsFullViewReach SourceFacebookMarketingUpdateValidEnums = "full_view_reach"
+ SourceFacebookMarketingUpdateValidEnumsGenderTargeting SourceFacebookMarketingUpdateValidEnums = "gender_targeting"
+ SourceFacebookMarketingUpdateValidEnumsImpressions SourceFacebookMarketingUpdateValidEnums = "impressions"
+ SourceFacebookMarketingUpdateValidEnumsInlineLinkClickCtr SourceFacebookMarketingUpdateValidEnums = "inline_link_click_ctr"
+ SourceFacebookMarketingUpdateValidEnumsInlineLinkClicks SourceFacebookMarketingUpdateValidEnums = "inline_link_clicks"
+ SourceFacebookMarketingUpdateValidEnumsInlinePostEngagement SourceFacebookMarketingUpdateValidEnums = "inline_post_engagement"
+ SourceFacebookMarketingUpdateValidEnumsInstagramUpcomingEventRemindersSet SourceFacebookMarketingUpdateValidEnums = "instagram_upcoming_event_reminders_set"
+ SourceFacebookMarketingUpdateValidEnumsInstantExperienceClicksToOpen SourceFacebookMarketingUpdateValidEnums = "instant_experience_clicks_to_open"
+ SourceFacebookMarketingUpdateValidEnumsInstantExperienceClicksToStart SourceFacebookMarketingUpdateValidEnums = "instant_experience_clicks_to_start"
+ SourceFacebookMarketingUpdateValidEnumsInstantExperienceOutboundClicks SourceFacebookMarketingUpdateValidEnums = "instant_experience_outbound_clicks"
+ SourceFacebookMarketingUpdateValidEnumsInteractiveComponentTap SourceFacebookMarketingUpdateValidEnums = "interactive_component_tap"
+ SourceFacebookMarketingUpdateValidEnumsLabels SourceFacebookMarketingUpdateValidEnums = "labels"
+ SourceFacebookMarketingUpdateValidEnumsLocation SourceFacebookMarketingUpdateValidEnums = "location"
+ SourceFacebookMarketingUpdateValidEnumsMobileAppPurchaseRoas SourceFacebookMarketingUpdateValidEnums = "mobile_app_purchase_roas"
+ SourceFacebookMarketingUpdateValidEnumsObjective SourceFacebookMarketingUpdateValidEnums = "objective"
+ SourceFacebookMarketingUpdateValidEnumsOptimizationGoal SourceFacebookMarketingUpdateValidEnums = "optimization_goal"
+ SourceFacebookMarketingUpdateValidEnumsOutboundClicks SourceFacebookMarketingUpdateValidEnums = "outbound_clicks"
+ SourceFacebookMarketingUpdateValidEnumsOutboundClicksCtr SourceFacebookMarketingUpdateValidEnums = "outbound_clicks_ctr"
+ SourceFacebookMarketingUpdateValidEnumsPlacePageName SourceFacebookMarketingUpdateValidEnums = "place_page_name"
+ SourceFacebookMarketingUpdateValidEnumsPurchaseRoas SourceFacebookMarketingUpdateValidEnums = "purchase_roas"
+ SourceFacebookMarketingUpdateValidEnumsQualifyingQuestionQualifyAnswerRate SourceFacebookMarketingUpdateValidEnums = "qualifying_question_qualify_answer_rate"
+ SourceFacebookMarketingUpdateValidEnumsQualityRanking SourceFacebookMarketingUpdateValidEnums = "quality_ranking"
+ SourceFacebookMarketingUpdateValidEnumsQualityScoreEctr SourceFacebookMarketingUpdateValidEnums = "quality_score_ectr"
+ SourceFacebookMarketingUpdateValidEnumsQualityScoreEcvr SourceFacebookMarketingUpdateValidEnums = "quality_score_ecvr"
+ SourceFacebookMarketingUpdateValidEnumsQualityScoreOrganic SourceFacebookMarketingUpdateValidEnums = "quality_score_organic"
+ SourceFacebookMarketingUpdateValidEnumsReach SourceFacebookMarketingUpdateValidEnums = "reach"
+ SourceFacebookMarketingUpdateValidEnumsSocialSpend SourceFacebookMarketingUpdateValidEnums = "social_spend"
+ SourceFacebookMarketingUpdateValidEnumsSpend SourceFacebookMarketingUpdateValidEnums = "spend"
+ SourceFacebookMarketingUpdateValidEnumsTotalPostbacks SourceFacebookMarketingUpdateValidEnums = "total_postbacks"
+ SourceFacebookMarketingUpdateValidEnumsTotalPostbacksDetailed SourceFacebookMarketingUpdateValidEnums = "total_postbacks_detailed"
+ SourceFacebookMarketingUpdateValidEnumsTotalPostbacksDetailedV4 SourceFacebookMarketingUpdateValidEnums = "total_postbacks_detailed_v4"
+ SourceFacebookMarketingUpdateValidEnumsUniqueActions SourceFacebookMarketingUpdateValidEnums = "unique_actions"
+ SourceFacebookMarketingUpdateValidEnumsUniqueClicks SourceFacebookMarketingUpdateValidEnums = "unique_clicks"
+ SourceFacebookMarketingUpdateValidEnumsUniqueConversions SourceFacebookMarketingUpdateValidEnums = "unique_conversions"
+ SourceFacebookMarketingUpdateValidEnumsUniqueCtr SourceFacebookMarketingUpdateValidEnums = "unique_ctr"
+ SourceFacebookMarketingUpdateValidEnumsUniqueInlineLinkClickCtr SourceFacebookMarketingUpdateValidEnums = "unique_inline_link_click_ctr"
+ SourceFacebookMarketingUpdateValidEnumsUniqueInlineLinkClicks SourceFacebookMarketingUpdateValidEnums = "unique_inline_link_clicks"
+ SourceFacebookMarketingUpdateValidEnumsUniqueLinkClicksCtr SourceFacebookMarketingUpdateValidEnums = "unique_link_clicks_ctr"
+ SourceFacebookMarketingUpdateValidEnumsUniqueOutboundClicks SourceFacebookMarketingUpdateValidEnums = "unique_outbound_clicks"
+ SourceFacebookMarketingUpdateValidEnumsUniqueOutboundClicksCtr SourceFacebookMarketingUpdateValidEnums = "unique_outbound_clicks_ctr"
+ SourceFacebookMarketingUpdateValidEnumsUniqueVideoContinuous2SecWatchedActions SourceFacebookMarketingUpdateValidEnums = "unique_video_continuous_2_sec_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsUniqueVideoView15Sec SourceFacebookMarketingUpdateValidEnums = "unique_video_view_15_sec"
+ SourceFacebookMarketingUpdateValidEnumsUpdatedTime SourceFacebookMarketingUpdateValidEnums = "updated_time"
+ SourceFacebookMarketingUpdateValidEnumsVideo15SecWatchedActions SourceFacebookMarketingUpdateValidEnums = "video_15_sec_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideo30SecWatchedActions SourceFacebookMarketingUpdateValidEnums = "video_30_sec_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoAvgTimeWatchedActions SourceFacebookMarketingUpdateValidEnums = "video_avg_time_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoContinuous2SecWatchedActions SourceFacebookMarketingUpdateValidEnums = "video_continuous_2_sec_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoP100WatchedActions SourceFacebookMarketingUpdateValidEnums = "video_p100_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoP25WatchedActions SourceFacebookMarketingUpdateValidEnums = "video_p25_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoP50WatchedActions SourceFacebookMarketingUpdateValidEnums = "video_p50_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoP75WatchedActions SourceFacebookMarketingUpdateValidEnums = "video_p75_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoP95WatchedActions SourceFacebookMarketingUpdateValidEnums = "video_p95_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoPlayActions SourceFacebookMarketingUpdateValidEnums = "video_play_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoPlayCurveActions SourceFacebookMarketingUpdateValidEnums = "video_play_curve_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoPlayRetention0To15sActions SourceFacebookMarketingUpdateValidEnums = "video_play_retention_0_to_15s_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoPlayRetention20To60sActions SourceFacebookMarketingUpdateValidEnums = "video_play_retention_20_to_60s_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoPlayRetentionGraphActions SourceFacebookMarketingUpdateValidEnums = "video_play_retention_graph_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoThruplayWatchedActions SourceFacebookMarketingUpdateValidEnums = "video_thruplay_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsVideoTimeWatchedActions SourceFacebookMarketingUpdateValidEnums = "video_time_watched_actions"
+ SourceFacebookMarketingUpdateValidEnumsWebsiteCtr SourceFacebookMarketingUpdateValidEnums = "website_ctr"
+ SourceFacebookMarketingUpdateValidEnumsWebsitePurchaseRoas SourceFacebookMarketingUpdateValidEnums = "website_purchase_roas"
+ SourceFacebookMarketingUpdateValidEnumsWishBid SourceFacebookMarketingUpdateValidEnums = "wish_bid"
)
-func (e SourceFacebookMarketingUpdateInsightConfigValidEnums) ToPointer() *SourceFacebookMarketingUpdateInsightConfigValidEnums {
+func (e SourceFacebookMarketingUpdateValidEnums) ToPointer() *SourceFacebookMarketingUpdateValidEnums {
return &e
}
-func (e *SourceFacebookMarketingUpdateInsightConfigValidEnums) UnmarshalJSON(data []byte) error {
+func (e *SourceFacebookMarketingUpdateValidEnums) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -620,28 +621,28 @@ func (e *SourceFacebookMarketingUpdateInsightConfigValidEnums) UnmarshalJSON(dat
case "website_purchase_roas":
fallthrough
case "wish_bid":
- *e = SourceFacebookMarketingUpdateInsightConfigValidEnums(v)
+ *e = SourceFacebookMarketingUpdateValidEnums(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingUpdateInsightConfigValidEnums: %v", v)
+ return fmt.Errorf("invalid value for SourceFacebookMarketingUpdateValidEnums: %v", v)
}
}
-// SourceFacebookMarketingUpdateInsightConfigLevel - Chosen level for API
-type SourceFacebookMarketingUpdateInsightConfigLevel string
+// Level - Chosen level for API
+type Level string
const (
- SourceFacebookMarketingUpdateInsightConfigLevelAd SourceFacebookMarketingUpdateInsightConfigLevel = "ad"
- SourceFacebookMarketingUpdateInsightConfigLevelAdset SourceFacebookMarketingUpdateInsightConfigLevel = "adset"
- SourceFacebookMarketingUpdateInsightConfigLevelCampaign SourceFacebookMarketingUpdateInsightConfigLevel = "campaign"
- SourceFacebookMarketingUpdateInsightConfigLevelAccount SourceFacebookMarketingUpdateInsightConfigLevel = "account"
+ LevelAd Level = "ad"
+ LevelAdset Level = "adset"
+ LevelCampaign Level = "campaign"
+ LevelAccount Level = "account"
)
-func (e SourceFacebookMarketingUpdateInsightConfigLevel) ToPointer() *SourceFacebookMarketingUpdateInsightConfigLevel {
+func (e Level) ToPointer() *Level {
return &e
}
-func (e *SourceFacebookMarketingUpdateInsightConfigLevel) UnmarshalJSON(data []byte) error {
+func (e *Level) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -654,62 +655,236 @@ func (e *SourceFacebookMarketingUpdateInsightConfigLevel) UnmarshalJSON(data []b
case "campaign":
fallthrough
case "account":
- *e = SourceFacebookMarketingUpdateInsightConfigLevel(v)
+ *e = Level(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookMarketingUpdateInsightConfigLevel: %v", v)
+ return fmt.Errorf("invalid value for Level: %v", v)
}
}
-// SourceFacebookMarketingUpdateInsightConfig - Config for custom insights
-type SourceFacebookMarketingUpdateInsightConfig struct {
+// InsightConfig - Config for custom insights
+type InsightConfig struct {
// A list of chosen action_breakdowns for action_breakdowns
- ActionBreakdowns []SourceFacebookMarketingUpdateInsightConfigValidActionBreakdowns `json:"action_breakdowns,omitempty"`
+ ActionBreakdowns []ValidActionBreakdowns `json:"action_breakdowns,omitempty"`
// Determines the report time of action stats. For example, if a person saw the ad on Jan 1st but converted on Jan 2nd, when you query the API with action_report_time=impression, you see a conversion on Jan 1st. When you query the API with action_report_time=conversion, you see a conversion on Jan 2nd.
- ActionReportTime *SourceFacebookMarketingUpdateInsightConfigActionReportTime `json:"action_report_time,omitempty"`
+ ActionReportTime *ActionReportTime `default:"mixed" json:"action_report_time"`
// A list of chosen breakdowns for breakdowns
- Breakdowns []SourceFacebookMarketingUpdateInsightConfigValidBreakdowns `json:"breakdowns,omitempty"`
+ Breakdowns []ValidBreakdowns `json:"breakdowns,omitempty"`
// The date until which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.
EndDate *time.Time `json:"end_date,omitempty"`
// A list of chosen fields for fields parameter
- Fields []SourceFacebookMarketingUpdateInsightConfigValidEnums `json:"fields,omitempty"`
+ Fields []SourceFacebookMarketingUpdateValidEnums `json:"fields,omitempty"`
// The attribution window
- InsightsLookbackWindow *int64 `json:"insights_lookback_window,omitempty"`
+ InsightsLookbackWindow *int64 `default:"28" json:"insights_lookback_window"`
// Chosen level for API
- Level *SourceFacebookMarketingUpdateInsightConfigLevel `json:"level,omitempty"`
+ Level *Level `default:"ad" json:"level"`
// The name value of insight
Name string `json:"name"`
// The date from which you'd like to replicate data for this stream, in the format YYYY-MM-DDT00:00:00Z.
StartDate *time.Time `json:"start_date,omitempty"`
// Time window in days by which to aggregate statistics. The sync will be chunked into N day intervals, where N is the number of days you specified. For example, if you set this value to 7, then all statistics will be reported as 7-day aggregates by starting from the start_date. If the start and end dates are October 1st and October 30th, then the connector will output 5 records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days only).
- TimeIncrement *int64 `json:"time_increment,omitempty"`
+ TimeIncrement *int64 `default:"1" json:"time_increment"`
+}
+
+func (i InsightConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(i, "", false)
+}
+
+func (i *InsightConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &i, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *InsightConfig) GetActionBreakdowns() []ValidActionBreakdowns {
+ if o == nil {
+ return nil
+ }
+ return o.ActionBreakdowns
+}
+
+func (o *InsightConfig) GetActionReportTime() *ActionReportTime {
+ if o == nil {
+ return nil
+ }
+ return o.ActionReportTime
+}
+
+func (o *InsightConfig) GetBreakdowns() []ValidBreakdowns {
+ if o == nil {
+ return nil
+ }
+ return o.Breakdowns
+}
+
+func (o *InsightConfig) GetEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *InsightConfig) GetFields() []SourceFacebookMarketingUpdateValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.Fields
+}
+
+func (o *InsightConfig) GetInsightsLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InsightsLookbackWindow
+}
+
+func (o *InsightConfig) GetLevel() *Level {
+ if o == nil {
+ return nil
+ }
+ return o.Level
+}
+
+func (o *InsightConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *InsightConfig) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *InsightConfig) GetTimeIncrement() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TimeIncrement
}
type SourceFacebookMarketingUpdate struct {
// The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information.
AccessToken string `json:"access_token"`
- // The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. Open your Meta Ads Manager. The Ad account ID number is in the account dropdown menu or in your browser's address bar. See the docs for more information.
+ // The Facebook Ad account ID to use when pulling data from the Facebook Marketing API. The Ad account ID number is in the account dropdown menu or in your browser's address bar of your Meta Ads Manager. See the docs for more information.
AccountID string `json:"account_id"`
// Allows action_breakdowns to be an empty list
- ActionBreakdownsAllowEmpty *bool `json:"action_breakdowns_allow_empty,omitempty"`
+ ActionBreakdownsAllowEmpty *bool `default:"true" json:"action_breakdowns_allow_empty"`
// The Client Id for your OAuth app
ClientID *string `json:"client_id,omitempty"`
// The Client Secret for your OAuth app
ClientSecret *string `json:"client_secret,omitempty"`
// A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field.
- CustomInsights []SourceFacebookMarketingUpdateInsightConfig `json:"custom_insights,omitempty"`
+ CustomInsights []InsightConfig `json:"custom_insights,omitempty"`
// The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data.
EndDate *time.Time `json:"end_date,omitempty"`
// Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative.
- FetchThumbnailImages *bool `json:"fetch_thumbnail_images,omitempty"`
+ FetchThumbnailImages *bool `default:"false" json:"fetch_thumbnail_images"`
// Set to active if you want to include data from deleted Campaigns, Ads, and AdSets.
- IncludeDeleted *bool `json:"include_deleted,omitempty"`
+ IncludeDeleted *bool `default:"false" json:"include_deleted"`
// The attribution window. Facebook freezes insight data 28 days after it was generated, which means that all data from the past 28 days may have changed since we last emitted it, so you can retrieve refreshed insights from the past by setting this parameter. If you set a custom lookback window value in Facebook account, please provide the same value here.
- InsightsLookbackWindow *int64 `json:"insights_lookback_window,omitempty"`
- // Maximum batch size used when sending batch requests to Facebook API. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.
- MaxBatchSize *int64 `json:"max_batch_size,omitempty"`
+ InsightsLookbackWindow *int64 `default:"28" json:"insights_lookback_window"`
// Page size used when sending requests to Facebook API to specify number of records per page when response has pagination. Most users do not need to set this field unless they specifically need to tune the connector to address specific issues or use cases.
- PageSize *int64 `json:"page_size,omitempty"`
- // The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
- StartDate time.Time `json:"start_date"`
+ PageSize *int64 `default:"100" json:"page_size"`
+ // The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data will be replicated for usual streams and only last 2 years for insight streams.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceFacebookMarketingUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFacebookMarketingUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFacebookMarketingUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceFacebookMarketingUpdate) GetAccountID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountID
+}
+
+func (o *SourceFacebookMarketingUpdate) GetActionBreakdownsAllowEmpty() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.ActionBreakdownsAllowEmpty
+}
+
+func (o *SourceFacebookMarketingUpdate) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceFacebookMarketingUpdate) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceFacebookMarketingUpdate) GetCustomInsights() []InsightConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomInsights
+}
+
+func (o *SourceFacebookMarketingUpdate) GetEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceFacebookMarketingUpdate) GetFetchThumbnailImages() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.FetchThumbnailImages
+}
+
+func (o *SourceFacebookMarketingUpdate) GetIncludeDeleted() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeleted
+}
+
+func (o *SourceFacebookMarketingUpdate) GetInsightsLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InsightsLookbackWindow
+}
+
+func (o *SourceFacebookMarketingUpdate) GetPageSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSize
+}
+
+func (o *SourceFacebookMarketingUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcefacebookpages.go b/internal/sdk/pkg/models/shared/sourcefacebookpages.go
old mode 100755
new mode 100644
index 4f3c0a622..210250e06
--- a/internal/sdk/pkg/models/shared/sourcefacebookpages.go
+++ b/internal/sdk/pkg/models/shared/sourcefacebookpages.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceFacebookPagesFacebookPages string
+type FacebookPages string
const (
- SourceFacebookPagesFacebookPagesFacebookPages SourceFacebookPagesFacebookPages = "facebook-pages"
+ FacebookPagesFacebookPages FacebookPages = "facebook-pages"
)
-func (e SourceFacebookPagesFacebookPages) ToPointer() *SourceFacebookPagesFacebookPages {
+func (e FacebookPages) ToPointer() *FacebookPages {
return &e
}
-func (e *SourceFacebookPagesFacebookPages) UnmarshalJSON(data []byte) error {
+func (e *FacebookPages) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "facebook-pages":
- *e = SourceFacebookPagesFacebookPages(v)
+ *e = FacebookPages(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFacebookPagesFacebookPages: %v", v)
+ return fmt.Errorf("invalid value for FacebookPages: %v", v)
}
}
@@ -35,6 +36,35 @@ type SourceFacebookPages struct {
// Facebook Page Access Token
AccessToken string `json:"access_token"`
// Page ID
- PageID string `json:"page_id"`
- SourceType SourceFacebookPagesFacebookPages `json:"sourceType"`
+ PageID string `json:"page_id"`
+ sourceType FacebookPages `const:"facebook-pages" json:"sourceType"`
+}
+
+func (s SourceFacebookPages) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFacebookPages) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFacebookPages) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceFacebookPages) GetPageID() string {
+ if o == nil {
+ return ""
+ }
+ return o.PageID
+}
+
+func (o *SourceFacebookPages) GetSourceType() FacebookPages {
+ return FacebookPagesFacebookPages
}
diff --git a/internal/sdk/pkg/models/shared/sourcefacebookpagescreaterequest.go b/internal/sdk/pkg/models/shared/sourcefacebookpagescreaterequest.go
old mode 100755
new mode 100644
index 2832b9fb4..df2ff2e25
--- a/internal/sdk/pkg/models/shared/sourcefacebookpagescreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefacebookpagescreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceFacebookPagesCreateRequest struct {
Configuration SourceFacebookPages `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFacebookPagesCreateRequest) GetConfiguration() SourceFacebookPages {
+ if o == nil {
+ return SourceFacebookPages{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFacebookPagesCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceFacebookPagesCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFacebookPagesCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceFacebookPagesCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefacebookpagesputrequest.go b/internal/sdk/pkg/models/shared/sourcefacebookpagesputrequest.go
old mode 100755
new mode 100644
index 6dd315853..857effb0c
--- a/internal/sdk/pkg/models/shared/sourcefacebookpagesputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefacebookpagesputrequest.go
@@ -7,3 +7,24 @@ type SourceFacebookPagesPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFacebookPagesPutRequest) GetConfiguration() SourceFacebookPagesUpdate {
+ if o == nil {
+ return SourceFacebookPagesUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFacebookPagesPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFacebookPagesPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefacebookpagesupdate.go b/internal/sdk/pkg/models/shared/sourcefacebookpagesupdate.go
old mode 100755
new mode 100644
index e820dd999..2b69ab87a
--- a/internal/sdk/pkg/models/shared/sourcefacebookpagesupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcefacebookpagesupdate.go
@@ -8,3 +8,17 @@ type SourceFacebookPagesUpdate struct {
// Page ID
PageID string `json:"page_id"`
}
+
+func (o *SourceFacebookPagesUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceFacebookPagesUpdate) GetPageID() string {
+ if o == nil {
+ return ""
+ }
+ return o.PageID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefaker.go b/internal/sdk/pkg/models/shared/sourcefaker.go
old mode 100755
new mode 100644
index ab3f5a94a..11f4612a0
--- a/internal/sdk/pkg/models/shared/sourcefaker.go
+++ b/internal/sdk/pkg/models/shared/sourcefaker.go
@@ -5,42 +5,93 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceFakerFaker string
+type Faker string
const (
- SourceFakerFakerFaker SourceFakerFaker = "faker"
+ FakerFaker Faker = "faker"
)
-func (e SourceFakerFaker) ToPointer() *SourceFakerFaker {
+func (e Faker) ToPointer() *Faker {
return &e
}
-func (e *SourceFakerFaker) UnmarshalJSON(data []byte) error {
+func (e *Faker) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "faker":
- *e = SourceFakerFaker(v)
+ *e = Faker(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFakerFaker: %v", v)
+ return fmt.Errorf("invalid value for Faker: %v", v)
}
}
type SourceFaker struct {
// Should the updated_at values for every record be new each sync? Setting this to false will case the source to stop emitting records after COUNT records have been emitted.
- AlwaysUpdated *bool `json:"always_updated,omitempty"`
+ AlwaysUpdated *bool `default:"true" json:"always_updated"`
// How many users should be generated in total. This setting does not apply to the purchases or products stream.
- Count int64 `json:"count"`
+ Count *int64 `default:"1000" json:"count"`
// How many parallel workers should we use to generate fake data? Choose a value equal to the number of CPUs you will allocate to this source.
- Parallelism *int64 `json:"parallelism,omitempty"`
+ Parallelism *int64 `default:"4" json:"parallelism"`
// How many fake records will be in each page (stream slice), before a state message is emitted?
- RecordsPerSlice *int64 `json:"records_per_slice,omitempty"`
+ RecordsPerSlice *int64 `default:"1000" json:"records_per_slice"`
// Manually control the faker random seed to return the same values on subsequent runs (leave -1 for random)
- Seed *int64 `json:"seed,omitempty"`
- SourceType SourceFakerFaker `json:"sourceType"`
+ Seed *int64 `default:"-1" json:"seed"`
+ sourceType Faker `const:"faker" json:"sourceType"`
+}
+
+func (s SourceFaker) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFaker) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFaker) GetAlwaysUpdated() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.AlwaysUpdated
+}
+
+func (o *SourceFaker) GetCount() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Count
+}
+
+func (o *SourceFaker) GetParallelism() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Parallelism
+}
+
+func (o *SourceFaker) GetRecordsPerSlice() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RecordsPerSlice
+}
+
+func (o *SourceFaker) GetSeed() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Seed
+}
+
+func (o *SourceFaker) GetSourceType() Faker {
+ return FakerFaker
}
diff --git a/internal/sdk/pkg/models/shared/sourcefakercreaterequest.go b/internal/sdk/pkg/models/shared/sourcefakercreaterequest.go
old mode 100755
new mode 100644
index 64bec2988..c127f9dc0
--- a/internal/sdk/pkg/models/shared/sourcefakercreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefakercreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceFakerCreateRequest struct {
Configuration SourceFaker `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFakerCreateRequest) GetConfiguration() SourceFaker {
+ if o == nil {
+ return SourceFaker{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFakerCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceFakerCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFakerCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceFakerCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefakerputrequest.go b/internal/sdk/pkg/models/shared/sourcefakerputrequest.go
old mode 100755
new mode 100644
index d66843419..85af4f46d
--- a/internal/sdk/pkg/models/shared/sourcefakerputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefakerputrequest.go
@@ -7,3 +7,24 @@ type SourceFakerPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFakerPutRequest) GetConfiguration() SourceFakerUpdate {
+ if o == nil {
+ return SourceFakerUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFakerPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFakerPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefakerupdate.go b/internal/sdk/pkg/models/shared/sourcefakerupdate.go
old mode 100755
new mode 100644
index cdb5b5136..80a1a64ee
--- a/internal/sdk/pkg/models/shared/sourcefakerupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcefakerupdate.go
@@ -2,15 +2,65 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceFakerUpdate struct {
// Should the updated_at values for every record be new each sync? Setting this to false will case the source to stop emitting records after COUNT records have been emitted.
- AlwaysUpdated *bool `json:"always_updated,omitempty"`
+ AlwaysUpdated *bool `default:"true" json:"always_updated"`
// How many users should be generated in total. This setting does not apply to the purchases or products stream.
- Count int64 `json:"count"`
+ Count *int64 `default:"1000" json:"count"`
// How many parallel workers should we use to generate fake data? Choose a value equal to the number of CPUs you will allocate to this source.
- Parallelism *int64 `json:"parallelism,omitempty"`
+ Parallelism *int64 `default:"4" json:"parallelism"`
// How many fake records will be in each page (stream slice), before a state message is emitted?
- RecordsPerSlice *int64 `json:"records_per_slice,omitempty"`
+ RecordsPerSlice *int64 `default:"1000" json:"records_per_slice"`
// Manually control the faker random seed to return the same values on subsequent runs (leave -1 for random)
- Seed *int64 `json:"seed,omitempty"`
+ Seed *int64 `default:"-1" json:"seed"`
+}
+
+func (s SourceFakerUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFakerUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFakerUpdate) GetAlwaysUpdated() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.AlwaysUpdated
+}
+
+func (o *SourceFakerUpdate) GetCount() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Count
+}
+
+func (o *SourceFakerUpdate) GetParallelism() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Parallelism
+}
+
+func (o *SourceFakerUpdate) GetRecordsPerSlice() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RecordsPerSlice
+}
+
+func (o *SourceFakerUpdate) GetSeed() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Seed
}
diff --git a/internal/sdk/pkg/models/shared/sourcefauna.go b/internal/sdk/pkg/models/shared/sourcefauna.go
old mode 100755
new mode 100644
index 0bb41415e..65358d2ea
--- a/internal/sdk/pkg/models/shared/sourcefauna.go
+++ b/internal/sdk/pkg/models/shared/sourcefauna.go
@@ -3,144 +3,176 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceFaunaCollectionDeletionModeEnabledDeletionMode string
+type SourceFaunaSchemasCollectionDeletionMode string
const (
- SourceFaunaCollectionDeletionModeEnabledDeletionModeDeletedField SourceFaunaCollectionDeletionModeEnabledDeletionMode = "deleted_field"
+ SourceFaunaSchemasCollectionDeletionModeDeletedField SourceFaunaSchemasCollectionDeletionMode = "deleted_field"
)
-func (e SourceFaunaCollectionDeletionModeEnabledDeletionMode) ToPointer() *SourceFaunaCollectionDeletionModeEnabledDeletionMode {
+func (e SourceFaunaSchemasCollectionDeletionMode) ToPointer() *SourceFaunaSchemasCollectionDeletionMode {
return &e
}
-func (e *SourceFaunaCollectionDeletionModeEnabledDeletionMode) UnmarshalJSON(data []byte) error {
+func (e *SourceFaunaSchemasCollectionDeletionMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "deleted_field":
- *e = SourceFaunaCollectionDeletionModeEnabledDeletionMode(v)
+ *e = SourceFaunaSchemasCollectionDeletionMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFaunaCollectionDeletionModeEnabledDeletionMode: %v", v)
+ return fmt.Errorf("invalid value for SourceFaunaSchemasCollectionDeletionMode: %v", v)
}
}
-// SourceFaunaCollectionDeletionModeEnabled - This only applies to incremental syncs.
+// SourceFaunaEnabled - This only applies to incremental syncs.
// Enabling deletion mode informs your destination of deleted documents.
// Disabled - Leave this feature disabled, and ignore deleted documents.
// Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.
-type SourceFaunaCollectionDeletionModeEnabled struct {
+type SourceFaunaEnabled struct {
// Name of the "deleted at" column.
- Column string `json:"column"`
- DeletionMode SourceFaunaCollectionDeletionModeEnabledDeletionMode `json:"deletion_mode"`
+ Column *string `default:"deleted_at" json:"column"`
+ deletionMode SourceFaunaSchemasCollectionDeletionMode `const:"deleted_field" json:"deletion_mode"`
}
-type SourceFaunaCollectionDeletionModeDisabledDeletionMode string
+func (s SourceFaunaEnabled) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFaunaEnabled) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFaunaEnabled) GetColumn() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Column
+}
+
+func (o *SourceFaunaEnabled) GetDeletionMode() SourceFaunaSchemasCollectionDeletionMode {
+ return SourceFaunaSchemasCollectionDeletionModeDeletedField
+}
+
+type SourceFaunaSchemasDeletionMode string
const (
- SourceFaunaCollectionDeletionModeDisabledDeletionModeIgnore SourceFaunaCollectionDeletionModeDisabledDeletionMode = "ignore"
+ SourceFaunaSchemasDeletionModeIgnore SourceFaunaSchemasDeletionMode = "ignore"
)
-func (e SourceFaunaCollectionDeletionModeDisabledDeletionMode) ToPointer() *SourceFaunaCollectionDeletionModeDisabledDeletionMode {
+func (e SourceFaunaSchemasDeletionMode) ToPointer() *SourceFaunaSchemasDeletionMode {
return &e
}
-func (e *SourceFaunaCollectionDeletionModeDisabledDeletionMode) UnmarshalJSON(data []byte) error {
+func (e *SourceFaunaSchemasDeletionMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "ignore":
- *e = SourceFaunaCollectionDeletionModeDisabledDeletionMode(v)
+ *e = SourceFaunaSchemasDeletionMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFaunaCollectionDeletionModeDisabledDeletionMode: %v", v)
+ return fmt.Errorf("invalid value for SourceFaunaSchemasDeletionMode: %v", v)
}
}
-// SourceFaunaCollectionDeletionModeDisabled - This only applies to incremental syncs.
+// SourceFaunaDisabled - This only applies to incremental syncs.
// Enabling deletion mode informs your destination of deleted documents.
// Disabled - Leave this feature disabled, and ignore deleted documents.
// Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.
-type SourceFaunaCollectionDeletionModeDisabled struct {
- DeletionMode SourceFaunaCollectionDeletionModeDisabledDeletionMode `json:"deletion_mode"`
+type SourceFaunaDisabled struct {
+ deletionMode SourceFaunaSchemasDeletionMode `const:"ignore" json:"deletion_mode"`
+}
+
+func (s SourceFaunaDisabled) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFaunaDisabled) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFaunaDisabled) GetDeletionMode() SourceFaunaSchemasDeletionMode {
+ return SourceFaunaSchemasDeletionModeIgnore
}
-type SourceFaunaCollectionDeletionModeType string
+type SourceFaunaDeletionModeType string
const (
- SourceFaunaCollectionDeletionModeTypeSourceFaunaCollectionDeletionModeDisabled SourceFaunaCollectionDeletionModeType = "source-fauna_Collection_Deletion Mode_Disabled"
- SourceFaunaCollectionDeletionModeTypeSourceFaunaCollectionDeletionModeEnabled SourceFaunaCollectionDeletionModeType = "source-fauna_Collection_Deletion Mode_Enabled"
+ SourceFaunaDeletionModeTypeSourceFaunaDisabled SourceFaunaDeletionModeType = "source-fauna_Disabled"
+ SourceFaunaDeletionModeTypeSourceFaunaEnabled SourceFaunaDeletionModeType = "source-fauna_Enabled"
)
-type SourceFaunaCollectionDeletionMode struct {
- SourceFaunaCollectionDeletionModeDisabled *SourceFaunaCollectionDeletionModeDisabled
- SourceFaunaCollectionDeletionModeEnabled *SourceFaunaCollectionDeletionModeEnabled
+type SourceFaunaDeletionMode struct {
+ SourceFaunaDisabled *SourceFaunaDisabled
+ SourceFaunaEnabled *SourceFaunaEnabled
- Type SourceFaunaCollectionDeletionModeType
+ Type SourceFaunaDeletionModeType
}
-func CreateSourceFaunaCollectionDeletionModeSourceFaunaCollectionDeletionModeDisabled(sourceFaunaCollectionDeletionModeDisabled SourceFaunaCollectionDeletionModeDisabled) SourceFaunaCollectionDeletionMode {
- typ := SourceFaunaCollectionDeletionModeTypeSourceFaunaCollectionDeletionModeDisabled
+func CreateSourceFaunaDeletionModeSourceFaunaDisabled(sourceFaunaDisabled SourceFaunaDisabled) SourceFaunaDeletionMode {
+ typ := SourceFaunaDeletionModeTypeSourceFaunaDisabled
- return SourceFaunaCollectionDeletionMode{
- SourceFaunaCollectionDeletionModeDisabled: &sourceFaunaCollectionDeletionModeDisabled,
- Type: typ,
+ return SourceFaunaDeletionMode{
+ SourceFaunaDisabled: &sourceFaunaDisabled,
+ Type: typ,
}
}
-func CreateSourceFaunaCollectionDeletionModeSourceFaunaCollectionDeletionModeEnabled(sourceFaunaCollectionDeletionModeEnabled SourceFaunaCollectionDeletionModeEnabled) SourceFaunaCollectionDeletionMode {
- typ := SourceFaunaCollectionDeletionModeTypeSourceFaunaCollectionDeletionModeEnabled
+func CreateSourceFaunaDeletionModeSourceFaunaEnabled(sourceFaunaEnabled SourceFaunaEnabled) SourceFaunaDeletionMode {
+ typ := SourceFaunaDeletionModeTypeSourceFaunaEnabled
- return SourceFaunaCollectionDeletionMode{
- SourceFaunaCollectionDeletionModeEnabled: &sourceFaunaCollectionDeletionModeEnabled,
- Type: typ,
+ return SourceFaunaDeletionMode{
+ SourceFaunaEnabled: &sourceFaunaEnabled,
+ Type: typ,
}
}
-func (u *SourceFaunaCollectionDeletionMode) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SourceFaunaDeletionMode) UnmarshalJSON(data []byte) error {
- sourceFaunaCollectionDeletionModeDisabled := new(SourceFaunaCollectionDeletionModeDisabled)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFaunaCollectionDeletionModeDisabled); err == nil {
- u.SourceFaunaCollectionDeletionModeDisabled = sourceFaunaCollectionDeletionModeDisabled
- u.Type = SourceFaunaCollectionDeletionModeTypeSourceFaunaCollectionDeletionModeDisabled
+ sourceFaunaDisabled := new(SourceFaunaDisabled)
+ if err := utils.UnmarshalJSON(data, &sourceFaunaDisabled, "", true, true); err == nil {
+ u.SourceFaunaDisabled = sourceFaunaDisabled
+ u.Type = SourceFaunaDeletionModeTypeSourceFaunaDisabled
return nil
}
- sourceFaunaCollectionDeletionModeEnabled := new(SourceFaunaCollectionDeletionModeEnabled)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFaunaCollectionDeletionModeEnabled); err == nil {
- u.SourceFaunaCollectionDeletionModeEnabled = sourceFaunaCollectionDeletionModeEnabled
- u.Type = SourceFaunaCollectionDeletionModeTypeSourceFaunaCollectionDeletionModeEnabled
+ sourceFaunaEnabled := new(SourceFaunaEnabled)
+ if err := utils.UnmarshalJSON(data, &sourceFaunaEnabled, "", true, true); err == nil {
+ u.SourceFaunaEnabled = sourceFaunaEnabled
+ u.Type = SourceFaunaDeletionModeTypeSourceFaunaEnabled
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceFaunaCollectionDeletionMode) MarshalJSON() ([]byte, error) {
- if u.SourceFaunaCollectionDeletionModeDisabled != nil {
- return json.Marshal(u.SourceFaunaCollectionDeletionModeDisabled)
+func (u SourceFaunaDeletionMode) MarshalJSON() ([]byte, error) {
+ if u.SourceFaunaDisabled != nil {
+ return utils.MarshalJSON(u.SourceFaunaDisabled, "", true)
}
- if u.SourceFaunaCollectionDeletionModeEnabled != nil {
- return json.Marshal(u.SourceFaunaCollectionDeletionModeEnabled)
+ if u.SourceFaunaEnabled != nil {
+ return utils.MarshalJSON(u.SourceFaunaEnabled, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// SourceFaunaCollection - Settings for the Fauna Collection.
@@ -149,34 +181,59 @@ type SourceFaunaCollection struct {
// Enabling deletion mode informs your destination of deleted documents.
// Disabled - Leave this feature disabled, and ignore deleted documents.
// Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.
- Deletions SourceFaunaCollectionDeletionMode `json:"deletions"`
+ Deletions SourceFaunaDeletionMode `json:"deletions"`
// The page size used when reading documents from the database. The larger the page size, the faster the connector processes documents. However, if a page is too large, the connector may fail.
// Choose your page size based on how large the documents are.
// See the docs.
- PageSize int64 `json:"page_size"`
+ PageSize *int64 `default:"64" json:"page_size"`
+}
+
+func (s SourceFaunaCollection) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFaunaCollection) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFaunaCollection) GetDeletions() SourceFaunaDeletionMode {
+ if o == nil {
+ return SourceFaunaDeletionMode{}
+ }
+ return o.Deletions
+}
+
+func (o *SourceFaunaCollection) GetPageSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSize
}
-type SourceFaunaFauna string
+type Fauna string
const (
- SourceFaunaFaunaFauna SourceFaunaFauna = "fauna"
+ FaunaFauna Fauna = "fauna"
)
-func (e SourceFaunaFauna) ToPointer() *SourceFaunaFauna {
+func (e Fauna) ToPointer() *Fauna {
return &e
}
-func (e *SourceFaunaFauna) UnmarshalJSON(data []byte) error {
+func (e *Fauna) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "fauna":
- *e = SourceFaunaFauna(v)
+ *e = Fauna(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFaunaFauna: %v", v)
+ return fmt.Errorf("invalid value for Fauna: %v", v)
}
}
@@ -184,12 +241,62 @@ type SourceFauna struct {
// Settings for the Fauna Collection.
Collection *SourceFaunaCollection `json:"collection,omitempty"`
// Domain of Fauna to query. Defaults db.fauna.com. See the docs.
- Domain string `json:"domain"`
+ Domain *string `default:"db.fauna.com" json:"domain"`
// Endpoint port.
- Port int64 `json:"port"`
+ Port *int64 `default:"443" json:"port"`
// URL scheme.
- Scheme string `json:"scheme"`
+ Scheme *string `default:"https" json:"scheme"`
// Fauna secret, used when authenticating with the database.
- Secret string `json:"secret"`
- SourceType SourceFaunaFauna `json:"sourceType"`
+ Secret string `json:"secret"`
+ sourceType Fauna `const:"fauna" json:"sourceType"`
+}
+
+func (s SourceFauna) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFauna) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFauna) GetCollection() *SourceFaunaCollection {
+ if o == nil {
+ return nil
+ }
+ return o.Collection
+}
+
+func (o *SourceFauna) GetDomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Domain
+}
+
+func (o *SourceFauna) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceFauna) GetScheme() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Scheme
+}
+
+func (o *SourceFauna) GetSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.Secret
+}
+
+func (o *SourceFauna) GetSourceType() Fauna {
+ return FaunaFauna
}
diff --git a/internal/sdk/pkg/models/shared/sourcefaunacreaterequest.go b/internal/sdk/pkg/models/shared/sourcefaunacreaterequest.go
old mode 100755
new mode 100644
index 52ab377ff..a8c5182bd
--- a/internal/sdk/pkg/models/shared/sourcefaunacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefaunacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceFaunaCreateRequest struct {
Configuration SourceFauna `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFaunaCreateRequest) GetConfiguration() SourceFauna {
+ if o == nil {
+ return SourceFauna{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFaunaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceFaunaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFaunaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceFaunaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefaunaputrequest.go b/internal/sdk/pkg/models/shared/sourcefaunaputrequest.go
old mode 100755
new mode 100644
index dfbe6a4ca..e1b2c0cdd
--- a/internal/sdk/pkg/models/shared/sourcefaunaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefaunaputrequest.go
@@ -7,3 +7,24 @@ type SourceFaunaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFaunaPutRequest) GetConfiguration() SourceFaunaUpdate {
+ if o == nil {
+ return SourceFaunaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFaunaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFaunaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefaunaupdate.go b/internal/sdk/pkg/models/shared/sourcefaunaupdate.go
old mode 100755
new mode 100644
index 59103b42c..fc9dc9569
--- a/internal/sdk/pkg/models/shared/sourcefaunaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcefaunaupdate.go
@@ -3,168 +3,271 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceFaunaUpdateCollectionDeletionModeEnabledDeletionMode string
+type SourceFaunaUpdateSchemasDeletionMode string
const (
- SourceFaunaUpdateCollectionDeletionModeEnabledDeletionModeDeletedField SourceFaunaUpdateCollectionDeletionModeEnabledDeletionMode = "deleted_field"
+ SourceFaunaUpdateSchemasDeletionModeDeletedField SourceFaunaUpdateSchemasDeletionMode = "deleted_field"
)
-func (e SourceFaunaUpdateCollectionDeletionModeEnabledDeletionMode) ToPointer() *SourceFaunaUpdateCollectionDeletionModeEnabledDeletionMode {
+func (e SourceFaunaUpdateSchemasDeletionMode) ToPointer() *SourceFaunaUpdateSchemasDeletionMode {
return &e
}
-func (e *SourceFaunaUpdateCollectionDeletionModeEnabledDeletionMode) UnmarshalJSON(data []byte) error {
+func (e *SourceFaunaUpdateSchemasDeletionMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "deleted_field":
- *e = SourceFaunaUpdateCollectionDeletionModeEnabledDeletionMode(v)
+ *e = SourceFaunaUpdateSchemasDeletionMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFaunaUpdateCollectionDeletionModeEnabledDeletionMode: %v", v)
+ return fmt.Errorf("invalid value for SourceFaunaUpdateSchemasDeletionMode: %v", v)
}
}
-// SourceFaunaUpdateCollectionDeletionModeEnabled - This only applies to incremental syncs.
+// Enabled - This only applies to incremental syncs.
// Enabling deletion mode informs your destination of deleted documents.
// Disabled - Leave this feature disabled, and ignore deleted documents.
// Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.
-type SourceFaunaUpdateCollectionDeletionModeEnabled struct {
+type Enabled struct {
// Name of the "deleted at" column.
- Column string `json:"column"`
- DeletionMode SourceFaunaUpdateCollectionDeletionModeEnabledDeletionMode `json:"deletion_mode"`
+ Column *string `default:"deleted_at" json:"column"`
+ deletionMode SourceFaunaUpdateSchemasDeletionMode `const:"deleted_field" json:"deletion_mode"`
}
-type SourceFaunaUpdateCollectionDeletionModeDisabledDeletionMode string
+func (e Enabled) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(e, "", false)
+}
+
+func (e *Enabled) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &e, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Enabled) GetColumn() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Column
+}
+
+func (o *Enabled) GetDeletionMode() SourceFaunaUpdateSchemasDeletionMode {
+ return SourceFaunaUpdateSchemasDeletionModeDeletedField
+}
+
+type SourceFaunaUpdateDeletionMode string
const (
- SourceFaunaUpdateCollectionDeletionModeDisabledDeletionModeIgnore SourceFaunaUpdateCollectionDeletionModeDisabledDeletionMode = "ignore"
+ SourceFaunaUpdateDeletionModeIgnore SourceFaunaUpdateDeletionMode = "ignore"
)
-func (e SourceFaunaUpdateCollectionDeletionModeDisabledDeletionMode) ToPointer() *SourceFaunaUpdateCollectionDeletionModeDisabledDeletionMode {
+func (e SourceFaunaUpdateDeletionMode) ToPointer() *SourceFaunaUpdateDeletionMode {
return &e
}
-func (e *SourceFaunaUpdateCollectionDeletionModeDisabledDeletionMode) UnmarshalJSON(data []byte) error {
+func (e *SourceFaunaUpdateDeletionMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "ignore":
- *e = SourceFaunaUpdateCollectionDeletionModeDisabledDeletionMode(v)
+ *e = SourceFaunaUpdateDeletionMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFaunaUpdateCollectionDeletionModeDisabledDeletionMode: %v", v)
+ return fmt.Errorf("invalid value for SourceFaunaUpdateDeletionMode: %v", v)
}
}
-// SourceFaunaUpdateCollectionDeletionModeDisabled - This only applies to incremental syncs.
+// Disabled - This only applies to incremental syncs.
// Enabling deletion mode informs your destination of deleted documents.
// Disabled - Leave this feature disabled, and ignore deleted documents.
// Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.
-type SourceFaunaUpdateCollectionDeletionModeDisabled struct {
- DeletionMode SourceFaunaUpdateCollectionDeletionModeDisabledDeletionMode `json:"deletion_mode"`
+type Disabled struct {
+ deletionMode SourceFaunaUpdateDeletionMode `const:"ignore" json:"deletion_mode"`
}
-type SourceFaunaUpdateCollectionDeletionModeType string
+func (d Disabled) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *Disabled) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Disabled) GetDeletionMode() SourceFaunaUpdateDeletionMode {
+ return SourceFaunaUpdateDeletionModeIgnore
+}
+
+type DeletionModeType string
const (
- SourceFaunaUpdateCollectionDeletionModeTypeSourceFaunaUpdateCollectionDeletionModeDisabled SourceFaunaUpdateCollectionDeletionModeType = "source-fauna-update_Collection_Deletion Mode_Disabled"
- SourceFaunaUpdateCollectionDeletionModeTypeSourceFaunaUpdateCollectionDeletionModeEnabled SourceFaunaUpdateCollectionDeletionModeType = "source-fauna-update_Collection_Deletion Mode_Enabled"
+ DeletionModeTypeDisabled DeletionModeType = "Disabled"
+ DeletionModeTypeEnabled DeletionModeType = "Enabled"
)
-type SourceFaunaUpdateCollectionDeletionMode struct {
- SourceFaunaUpdateCollectionDeletionModeDisabled *SourceFaunaUpdateCollectionDeletionModeDisabled
- SourceFaunaUpdateCollectionDeletionModeEnabled *SourceFaunaUpdateCollectionDeletionModeEnabled
+type DeletionMode struct {
+ Disabled *Disabled
+ Enabled *Enabled
- Type SourceFaunaUpdateCollectionDeletionModeType
+ Type DeletionModeType
}
-func CreateSourceFaunaUpdateCollectionDeletionModeSourceFaunaUpdateCollectionDeletionModeDisabled(sourceFaunaUpdateCollectionDeletionModeDisabled SourceFaunaUpdateCollectionDeletionModeDisabled) SourceFaunaUpdateCollectionDeletionMode {
- typ := SourceFaunaUpdateCollectionDeletionModeTypeSourceFaunaUpdateCollectionDeletionModeDisabled
+func CreateDeletionModeDisabled(disabled Disabled) DeletionMode {
+ typ := DeletionModeTypeDisabled
- return SourceFaunaUpdateCollectionDeletionMode{
- SourceFaunaUpdateCollectionDeletionModeDisabled: &sourceFaunaUpdateCollectionDeletionModeDisabled,
- Type: typ,
+ return DeletionMode{
+ Disabled: &disabled,
+ Type: typ,
}
}
-func CreateSourceFaunaUpdateCollectionDeletionModeSourceFaunaUpdateCollectionDeletionModeEnabled(sourceFaunaUpdateCollectionDeletionModeEnabled SourceFaunaUpdateCollectionDeletionModeEnabled) SourceFaunaUpdateCollectionDeletionMode {
- typ := SourceFaunaUpdateCollectionDeletionModeTypeSourceFaunaUpdateCollectionDeletionModeEnabled
+func CreateDeletionModeEnabled(enabled Enabled) DeletionMode {
+ typ := DeletionModeTypeEnabled
- return SourceFaunaUpdateCollectionDeletionMode{
- SourceFaunaUpdateCollectionDeletionModeEnabled: &sourceFaunaUpdateCollectionDeletionModeEnabled,
- Type: typ,
+ return DeletionMode{
+ Enabled: &enabled,
+ Type: typ,
}
}
-func (u *SourceFaunaUpdateCollectionDeletionMode) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *DeletionMode) UnmarshalJSON(data []byte) error {
- sourceFaunaUpdateCollectionDeletionModeDisabled := new(SourceFaunaUpdateCollectionDeletionModeDisabled)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFaunaUpdateCollectionDeletionModeDisabled); err == nil {
- u.SourceFaunaUpdateCollectionDeletionModeDisabled = sourceFaunaUpdateCollectionDeletionModeDisabled
- u.Type = SourceFaunaUpdateCollectionDeletionModeTypeSourceFaunaUpdateCollectionDeletionModeDisabled
+ disabled := new(Disabled)
+ if err := utils.UnmarshalJSON(data, &disabled, "", true, true); err == nil {
+ u.Disabled = disabled
+ u.Type = DeletionModeTypeDisabled
return nil
}
- sourceFaunaUpdateCollectionDeletionModeEnabled := new(SourceFaunaUpdateCollectionDeletionModeEnabled)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFaunaUpdateCollectionDeletionModeEnabled); err == nil {
- u.SourceFaunaUpdateCollectionDeletionModeEnabled = sourceFaunaUpdateCollectionDeletionModeEnabled
- u.Type = SourceFaunaUpdateCollectionDeletionModeTypeSourceFaunaUpdateCollectionDeletionModeEnabled
+ enabled := new(Enabled)
+ if err := utils.UnmarshalJSON(data, &enabled, "", true, true); err == nil {
+ u.Enabled = enabled
+ u.Type = DeletionModeTypeEnabled
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceFaunaUpdateCollectionDeletionMode) MarshalJSON() ([]byte, error) {
- if u.SourceFaunaUpdateCollectionDeletionModeDisabled != nil {
- return json.Marshal(u.SourceFaunaUpdateCollectionDeletionModeDisabled)
+func (u DeletionMode) MarshalJSON() ([]byte, error) {
+ if u.Disabled != nil {
+ return utils.MarshalJSON(u.Disabled, "", true)
}
- if u.SourceFaunaUpdateCollectionDeletionModeEnabled != nil {
- return json.Marshal(u.SourceFaunaUpdateCollectionDeletionModeEnabled)
+ if u.Enabled != nil {
+ return utils.MarshalJSON(u.Enabled, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceFaunaUpdateCollection - Settings for the Fauna Collection.
-type SourceFaunaUpdateCollection struct {
+// Collection - Settings for the Fauna Collection.
+type Collection struct {
// This only applies to incremental syncs.
// Enabling deletion mode informs your destination of deleted documents.
// Disabled - Leave this feature disabled, and ignore deleted documents.
// Enabled - Enables this feature. When a document is deleted, the connector exports a record with a "deleted at" column containing the time that the document was deleted.
- Deletions SourceFaunaUpdateCollectionDeletionMode `json:"deletions"`
+ Deletions DeletionMode `json:"deletions"`
// The page size used when reading documents from the database. The larger the page size, the faster the connector processes documents. However, if a page is too large, the connector may fail.
// Choose your page size based on how large the documents are.
// See the docs.
- PageSize int64 `json:"page_size"`
+ PageSize *int64 `default:"64" json:"page_size"`
+}
+
+func (c Collection) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *Collection) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Collection) GetDeletions() DeletionMode {
+ if o == nil {
+ return DeletionMode{}
+ }
+ return o.Deletions
+}
+
+func (o *Collection) GetPageSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSize
}
type SourceFaunaUpdate struct {
// Settings for the Fauna Collection.
- Collection *SourceFaunaUpdateCollection `json:"collection,omitempty"`
+ Collection *Collection `json:"collection,omitempty"`
// Domain of Fauna to query. Defaults db.fauna.com. See the docs.
- Domain string `json:"domain"`
+ Domain *string `default:"db.fauna.com" json:"domain"`
// Endpoint port.
- Port int64 `json:"port"`
+ Port *int64 `default:"443" json:"port"`
// URL scheme.
- Scheme string `json:"scheme"`
+ Scheme *string `default:"https" json:"scheme"`
// Fauna secret, used when authenticating with the database.
Secret string `json:"secret"`
}
+
+func (s SourceFaunaUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFaunaUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFaunaUpdate) GetCollection() *Collection {
+ if o == nil {
+ return nil
+ }
+ return o.Collection
+}
+
+func (o *SourceFaunaUpdate) GetDomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Domain
+}
+
+func (o *SourceFaunaUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceFaunaUpdate) GetScheme() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Scheme
+}
+
+func (o *SourceFaunaUpdate) GetSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.Secret
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefile.go b/internal/sdk/pkg/models/shared/sourcefile.go
new file mode 100644
index 000000000..ea934ba55
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcefile.go
@@ -0,0 +1,784 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+// SourceFileFileFormat - The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
+type SourceFileFileFormat string
+
+const (
+ SourceFileFileFormatCsv SourceFileFileFormat = "csv"
+ SourceFileFileFormatJSON SourceFileFileFormat = "json"
+ SourceFileFileFormatJsonl SourceFileFileFormat = "jsonl"
+ SourceFileFileFormatExcel SourceFileFileFormat = "excel"
+ SourceFileFileFormatExcelBinary SourceFileFileFormat = "excel_binary"
+ SourceFileFileFormatFeather SourceFileFileFormat = "feather"
+ SourceFileFileFormatParquet SourceFileFileFormat = "parquet"
+ SourceFileFileFormatYaml SourceFileFileFormat = "yaml"
+)
+
+func (e SourceFileFileFormat) ToPointer() *SourceFileFileFormat {
+ return &e
+}
+
+func (e *SourceFileFileFormat) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "csv":
+ fallthrough
+ case "json":
+ fallthrough
+ case "jsonl":
+ fallthrough
+ case "excel":
+ fallthrough
+ case "excel_binary":
+ fallthrough
+ case "feather":
+ fallthrough
+ case "parquet":
+ fallthrough
+ case "yaml":
+ *e = SourceFileFileFormat(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileFileFormat: %v", v)
+ }
+}
+
+type SourceFileSchemasProviderStorageProvider7Storage string
+
+const (
+ SourceFileSchemasProviderStorageProvider7StorageSftp SourceFileSchemasProviderStorageProvider7Storage = "SFTP"
+)
+
+func (e SourceFileSchemasProviderStorageProvider7Storage) ToPointer() *SourceFileSchemasProviderStorageProvider7Storage {
+ return &e
+}
+
+func (e *SourceFileSchemasProviderStorageProvider7Storage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SFTP":
+ *e = SourceFileSchemasProviderStorageProvider7Storage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileSchemasProviderStorageProvider7Storage: %v", v)
+ }
+}
+
+// SourceFileSFTPSecureFileTransferProtocol - The storage Provider or Location of the file(s) which should be replicated.
+type SourceFileSFTPSecureFileTransferProtocol struct {
+ Host string `json:"host"`
+ Password *string `json:"password,omitempty"`
+ Port *string `default:"22" json:"port"`
+ storage SourceFileSchemasProviderStorageProvider7Storage `const:"SFTP" json:"storage"`
+ User string `json:"user"`
+}
+
+func (s SourceFileSFTPSecureFileTransferProtocol) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFileSFTPSecureFileTransferProtocol) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFileSFTPSecureFileTransferProtocol) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceFileSFTPSecureFileTransferProtocol) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceFileSFTPSecureFileTransferProtocol) GetPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceFileSFTPSecureFileTransferProtocol) GetStorage() SourceFileSchemasProviderStorageProvider7Storage {
+ return SourceFileSchemasProviderStorageProvider7StorageSftp
+}
+
+func (o *SourceFileSFTPSecureFileTransferProtocol) GetUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.User
+}
+
+type SourceFileSchemasProviderStorageProvider6Storage string
+
+const (
+ SourceFileSchemasProviderStorageProvider6StorageScp SourceFileSchemasProviderStorageProvider6Storage = "SCP"
+)
+
+func (e SourceFileSchemasProviderStorageProvider6Storage) ToPointer() *SourceFileSchemasProviderStorageProvider6Storage {
+ return &e
+}
+
+func (e *SourceFileSchemasProviderStorageProvider6Storage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SCP":
+ *e = SourceFileSchemasProviderStorageProvider6Storage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileSchemasProviderStorageProvider6Storage: %v", v)
+ }
+}
+
+// SourceFileSCPSecureCopyProtocol - The storage Provider or Location of the file(s) which should be replicated.
+type SourceFileSCPSecureCopyProtocol struct {
+ Host string `json:"host"`
+ Password *string `json:"password,omitempty"`
+ Port *string `default:"22" json:"port"`
+ storage SourceFileSchemasProviderStorageProvider6Storage `const:"SCP" json:"storage"`
+ User string `json:"user"`
+}
+
+func (s SourceFileSCPSecureCopyProtocol) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFileSCPSecureCopyProtocol) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFileSCPSecureCopyProtocol) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceFileSCPSecureCopyProtocol) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceFileSCPSecureCopyProtocol) GetPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceFileSCPSecureCopyProtocol) GetStorage() SourceFileSchemasProviderStorageProvider6Storage {
+ return SourceFileSchemasProviderStorageProvider6StorageScp
+}
+
+func (o *SourceFileSCPSecureCopyProtocol) GetUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.User
+}
+
+type SourceFileSchemasProviderStorageProvider5Storage string
+
+const (
+ SourceFileSchemasProviderStorageProvider5StorageSSH SourceFileSchemasProviderStorageProvider5Storage = "SSH"
+)
+
+func (e SourceFileSchemasProviderStorageProvider5Storage) ToPointer() *SourceFileSchemasProviderStorageProvider5Storage {
+ return &e
+}
+
+func (e *SourceFileSchemasProviderStorageProvider5Storage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SSH":
+ *e = SourceFileSchemasProviderStorageProvider5Storage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileSchemasProviderStorageProvider5Storage: %v", v)
+ }
+}
+
+// SourceFileSSHSecureShell - The storage Provider or Location of the file(s) which should be replicated.
+type SourceFileSSHSecureShell struct {
+ Host string `json:"host"`
+ Password *string `json:"password,omitempty"`
+ Port *string `default:"22" json:"port"`
+ storage SourceFileSchemasProviderStorageProvider5Storage `const:"SSH" json:"storage"`
+ User string `json:"user"`
+}
+
+func (s SourceFileSSHSecureShell) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFileSSHSecureShell) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFileSSHSecureShell) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceFileSSHSecureShell) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceFileSSHSecureShell) GetPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceFileSSHSecureShell) GetStorage() SourceFileSchemasProviderStorageProvider5Storage {
+ return SourceFileSchemasProviderStorageProvider5StorageSSH
+}
+
+func (o *SourceFileSSHSecureShell) GetUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.User
+}
+
+type SourceFileSchemasProviderStorageProviderStorage string
+
+const (
+ SourceFileSchemasProviderStorageProviderStorageAzBlob SourceFileSchemasProviderStorageProviderStorage = "AzBlob"
+)
+
+func (e SourceFileSchemasProviderStorageProviderStorage) ToPointer() *SourceFileSchemasProviderStorageProviderStorage {
+ return &e
+}
+
+func (e *SourceFileSchemasProviderStorageProviderStorage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "AzBlob":
+ *e = SourceFileSchemasProviderStorageProviderStorage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileSchemasProviderStorageProviderStorage: %v", v)
+ }
+}
+
+// SourceFileAzBlobAzureBlobStorage - The storage Provider or Location of the file(s) which should be replicated.
+type SourceFileAzBlobAzureBlobStorage struct {
+ // To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
+ SasToken *string `json:"sas_token,omitempty"`
+ // To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
+ SharedKey *string `json:"shared_key,omitempty"`
+ storage SourceFileSchemasProviderStorageProviderStorage `const:"AzBlob" json:"storage"`
+ // The globally unique name of the storage account that the desired blob sits within. See here for more details.
+ StorageAccount string `json:"storage_account"`
+}
+
+func (s SourceFileAzBlobAzureBlobStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFileAzBlobAzureBlobStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFileAzBlobAzureBlobStorage) GetSasToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SasToken
+}
+
+func (o *SourceFileAzBlobAzureBlobStorage) GetSharedKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SharedKey
+}
+
+func (o *SourceFileAzBlobAzureBlobStorage) GetStorage() SourceFileSchemasProviderStorageProviderStorage {
+ return SourceFileSchemasProviderStorageProviderStorageAzBlob
+}
+
+func (o *SourceFileAzBlobAzureBlobStorage) GetStorageAccount() string {
+ if o == nil {
+ return ""
+ }
+ return o.StorageAccount
+}
+
+type SourceFileSchemasProviderStorage string
+
+const (
+ SourceFileSchemasProviderStorageS3 SourceFileSchemasProviderStorage = "S3"
+)
+
+func (e SourceFileSchemasProviderStorage) ToPointer() *SourceFileSchemasProviderStorage {
+ return &e
+}
+
+func (e *SourceFileSchemasProviderStorage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "S3":
+ *e = SourceFileSchemasProviderStorage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileSchemasProviderStorage: %v", v)
+ }
+}
+
+// SourceFileS3AmazonWebServices - The storage Provider or Location of the file(s) which should be replicated.
+type SourceFileS3AmazonWebServices struct {
+ // In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+ AwsAccessKeyID *string `json:"aws_access_key_id,omitempty"`
+ // In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+ AwsSecretAccessKey *string `json:"aws_secret_access_key,omitempty"`
+ storage SourceFileSchemasProviderStorage `const:"S3" json:"storage"`
+}
+
+func (s SourceFileS3AmazonWebServices) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFileS3AmazonWebServices) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFileS3AmazonWebServices) GetAwsAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsAccessKeyID
+}
+
+func (o *SourceFileS3AmazonWebServices) GetAwsSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsSecretAccessKey
+}
+
+func (o *SourceFileS3AmazonWebServices) GetStorage() SourceFileSchemasProviderStorage {
+ return SourceFileSchemasProviderStorageS3
+}
+
+type SourceFileSchemasStorage string
+
+const (
+ SourceFileSchemasStorageGcs SourceFileSchemasStorage = "GCS"
+)
+
+func (e SourceFileSchemasStorage) ToPointer() *SourceFileSchemasStorage {
+ return &e
+}
+
+func (e *SourceFileSchemasStorage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "GCS":
+ *e = SourceFileSchemasStorage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileSchemasStorage: %v", v)
+ }
+}
+
+// SourceFileGCSGoogleCloudStorage - The storage Provider or Location of the file(s) which should be replicated.
+type SourceFileGCSGoogleCloudStorage struct {
+ // In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
+ ServiceAccountJSON *string `json:"service_account_json,omitempty"`
+ storage SourceFileSchemasStorage `const:"GCS" json:"storage"`
+}
+
+func (s SourceFileGCSGoogleCloudStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFileGCSGoogleCloudStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFileGCSGoogleCloudStorage) GetServiceAccountJSON() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ServiceAccountJSON
+}
+
+func (o *SourceFileGCSGoogleCloudStorage) GetStorage() SourceFileSchemasStorage {
+ return SourceFileSchemasStorageGcs
+}
+
+type SourceFileStorage string
+
+const (
+ SourceFileStorageHTTPS SourceFileStorage = "HTTPS"
+)
+
+func (e SourceFileStorage) ToPointer() *SourceFileStorage {
+ return &e
+}
+
+func (e *SourceFileStorage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "HTTPS":
+ *e = SourceFileStorage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileStorage: %v", v)
+ }
+}
+
+// SourceFileHTTPSPublicWeb - The storage Provider or Location of the file(s) which should be replicated.
+type SourceFileHTTPSPublicWeb struct {
+ storage SourceFileStorage `const:"HTTPS" json:"storage"`
+ // Add User-Agent to request
+ UserAgent *bool `default:"false" json:"user_agent"`
+}
+
+func (s SourceFileHTTPSPublicWeb) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFileHTTPSPublicWeb) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFileHTTPSPublicWeb) GetStorage() SourceFileStorage {
+ return SourceFileStorageHTTPS
+}
+
+func (o *SourceFileHTTPSPublicWeb) GetUserAgent() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.UserAgent
+}
+
+type SourceFileStorageProviderType string
+
+const (
+ SourceFileStorageProviderTypeSourceFileHTTPSPublicWeb SourceFileStorageProviderType = "source-file_HTTPS: Public Web"
+ SourceFileStorageProviderTypeSourceFileGCSGoogleCloudStorage SourceFileStorageProviderType = "source-file_GCS: Google Cloud Storage"
+ SourceFileStorageProviderTypeSourceFileS3AmazonWebServices SourceFileStorageProviderType = "source-file_S3: Amazon Web Services"
+ SourceFileStorageProviderTypeSourceFileAzBlobAzureBlobStorage SourceFileStorageProviderType = "source-file_AzBlob: Azure Blob Storage"
+ SourceFileStorageProviderTypeSourceFileSSHSecureShell SourceFileStorageProviderType = "source-file_SSH: Secure Shell"
+ SourceFileStorageProviderTypeSourceFileSCPSecureCopyProtocol SourceFileStorageProviderType = "source-file_SCP: Secure copy protocol"
+ SourceFileStorageProviderTypeSourceFileSFTPSecureFileTransferProtocol SourceFileStorageProviderType = "source-file_SFTP: Secure File Transfer Protocol"
+)
+
+type SourceFileStorageProvider struct {
+ SourceFileHTTPSPublicWeb *SourceFileHTTPSPublicWeb
+ SourceFileGCSGoogleCloudStorage *SourceFileGCSGoogleCloudStorage
+ SourceFileS3AmazonWebServices *SourceFileS3AmazonWebServices
+ SourceFileAzBlobAzureBlobStorage *SourceFileAzBlobAzureBlobStorage
+ SourceFileSSHSecureShell *SourceFileSSHSecureShell
+ SourceFileSCPSecureCopyProtocol *SourceFileSCPSecureCopyProtocol
+ SourceFileSFTPSecureFileTransferProtocol *SourceFileSFTPSecureFileTransferProtocol
+
+ Type SourceFileStorageProviderType
+}
+
+func CreateSourceFileStorageProviderSourceFileHTTPSPublicWeb(sourceFileHTTPSPublicWeb SourceFileHTTPSPublicWeb) SourceFileStorageProvider {
+ typ := SourceFileStorageProviderTypeSourceFileHTTPSPublicWeb
+
+ return SourceFileStorageProvider{
+ SourceFileHTTPSPublicWeb: &sourceFileHTTPSPublicWeb,
+ Type: typ,
+ }
+}
+
+func CreateSourceFileStorageProviderSourceFileGCSGoogleCloudStorage(sourceFileGCSGoogleCloudStorage SourceFileGCSGoogleCloudStorage) SourceFileStorageProvider {
+ typ := SourceFileStorageProviderTypeSourceFileGCSGoogleCloudStorage
+
+ return SourceFileStorageProvider{
+ SourceFileGCSGoogleCloudStorage: &sourceFileGCSGoogleCloudStorage,
+ Type: typ,
+ }
+}
+
+func CreateSourceFileStorageProviderSourceFileS3AmazonWebServices(sourceFileS3AmazonWebServices SourceFileS3AmazonWebServices) SourceFileStorageProvider {
+ typ := SourceFileStorageProviderTypeSourceFileS3AmazonWebServices
+
+ return SourceFileStorageProvider{
+ SourceFileS3AmazonWebServices: &sourceFileS3AmazonWebServices,
+ Type: typ,
+ }
+}
+
+func CreateSourceFileStorageProviderSourceFileAzBlobAzureBlobStorage(sourceFileAzBlobAzureBlobStorage SourceFileAzBlobAzureBlobStorage) SourceFileStorageProvider {
+ typ := SourceFileStorageProviderTypeSourceFileAzBlobAzureBlobStorage
+
+ return SourceFileStorageProvider{
+ SourceFileAzBlobAzureBlobStorage: &sourceFileAzBlobAzureBlobStorage,
+ Type: typ,
+ }
+}
+
+func CreateSourceFileStorageProviderSourceFileSSHSecureShell(sourceFileSSHSecureShell SourceFileSSHSecureShell) SourceFileStorageProvider {
+ typ := SourceFileStorageProviderTypeSourceFileSSHSecureShell
+
+ return SourceFileStorageProvider{
+ SourceFileSSHSecureShell: &sourceFileSSHSecureShell,
+ Type: typ,
+ }
+}
+
+func CreateSourceFileStorageProviderSourceFileSCPSecureCopyProtocol(sourceFileSCPSecureCopyProtocol SourceFileSCPSecureCopyProtocol) SourceFileStorageProvider {
+ typ := SourceFileStorageProviderTypeSourceFileSCPSecureCopyProtocol
+
+ return SourceFileStorageProvider{
+ SourceFileSCPSecureCopyProtocol: &sourceFileSCPSecureCopyProtocol,
+ Type: typ,
+ }
+}
+
+func CreateSourceFileStorageProviderSourceFileSFTPSecureFileTransferProtocol(sourceFileSFTPSecureFileTransferProtocol SourceFileSFTPSecureFileTransferProtocol) SourceFileStorageProvider {
+ typ := SourceFileStorageProviderTypeSourceFileSFTPSecureFileTransferProtocol
+
+ return SourceFileStorageProvider{
+ SourceFileSFTPSecureFileTransferProtocol: &sourceFileSFTPSecureFileTransferProtocol,
+ Type: typ,
+ }
+}
+
+func (u *SourceFileStorageProvider) UnmarshalJSON(data []byte) error {
+
+ sourceFileHTTPSPublicWeb := new(SourceFileHTTPSPublicWeb)
+ if err := utils.UnmarshalJSON(data, &sourceFileHTTPSPublicWeb, "", true, true); err == nil {
+ u.SourceFileHTTPSPublicWeb = sourceFileHTTPSPublicWeb
+ u.Type = SourceFileStorageProviderTypeSourceFileHTTPSPublicWeb
+ return nil
+ }
+
+ sourceFileGCSGoogleCloudStorage := new(SourceFileGCSGoogleCloudStorage)
+ if err := utils.UnmarshalJSON(data, &sourceFileGCSGoogleCloudStorage, "", true, true); err == nil {
+ u.SourceFileGCSGoogleCloudStorage = sourceFileGCSGoogleCloudStorage
+ u.Type = SourceFileStorageProviderTypeSourceFileGCSGoogleCloudStorage
+ return nil
+ }
+
+ sourceFileS3AmazonWebServices := new(SourceFileS3AmazonWebServices)
+ if err := utils.UnmarshalJSON(data, &sourceFileS3AmazonWebServices, "", true, true); err == nil {
+ u.SourceFileS3AmazonWebServices = sourceFileS3AmazonWebServices
+ u.Type = SourceFileStorageProviderTypeSourceFileS3AmazonWebServices
+ return nil
+ }
+
+ sourceFileAzBlobAzureBlobStorage := new(SourceFileAzBlobAzureBlobStorage)
+ if err := utils.UnmarshalJSON(data, &sourceFileAzBlobAzureBlobStorage, "", true, true); err == nil {
+ u.SourceFileAzBlobAzureBlobStorage = sourceFileAzBlobAzureBlobStorage
+ u.Type = SourceFileStorageProviderTypeSourceFileAzBlobAzureBlobStorage
+ return nil
+ }
+
+ sourceFileSSHSecureShell := new(SourceFileSSHSecureShell)
+ if err := utils.UnmarshalJSON(data, &sourceFileSSHSecureShell, "", true, true); err == nil {
+ u.SourceFileSSHSecureShell = sourceFileSSHSecureShell
+ u.Type = SourceFileStorageProviderTypeSourceFileSSHSecureShell
+ return nil
+ }
+
+ sourceFileSCPSecureCopyProtocol := new(SourceFileSCPSecureCopyProtocol)
+ if err := utils.UnmarshalJSON(data, &sourceFileSCPSecureCopyProtocol, "", true, true); err == nil {
+ u.SourceFileSCPSecureCopyProtocol = sourceFileSCPSecureCopyProtocol
+ u.Type = SourceFileStorageProviderTypeSourceFileSCPSecureCopyProtocol
+ return nil
+ }
+
+ sourceFileSFTPSecureFileTransferProtocol := new(SourceFileSFTPSecureFileTransferProtocol)
+ if err := utils.UnmarshalJSON(data, &sourceFileSFTPSecureFileTransferProtocol, "", true, true); err == nil {
+ u.SourceFileSFTPSecureFileTransferProtocol = sourceFileSFTPSecureFileTransferProtocol
+ u.Type = SourceFileStorageProviderTypeSourceFileSFTPSecureFileTransferProtocol
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceFileStorageProvider) MarshalJSON() ([]byte, error) {
+ if u.SourceFileHTTPSPublicWeb != nil {
+ return utils.MarshalJSON(u.SourceFileHTTPSPublicWeb, "", true)
+ }
+
+ if u.SourceFileGCSGoogleCloudStorage != nil {
+ return utils.MarshalJSON(u.SourceFileGCSGoogleCloudStorage, "", true)
+ }
+
+ if u.SourceFileS3AmazonWebServices != nil {
+ return utils.MarshalJSON(u.SourceFileS3AmazonWebServices, "", true)
+ }
+
+ if u.SourceFileAzBlobAzureBlobStorage != nil {
+ return utils.MarshalJSON(u.SourceFileAzBlobAzureBlobStorage, "", true)
+ }
+
+ if u.SourceFileSSHSecureShell != nil {
+ return utils.MarshalJSON(u.SourceFileSSHSecureShell, "", true)
+ }
+
+ if u.SourceFileSCPSecureCopyProtocol != nil {
+ return utils.MarshalJSON(u.SourceFileSCPSecureCopyProtocol, "", true)
+ }
+
+ if u.SourceFileSFTPSecureFileTransferProtocol != nil {
+ return utils.MarshalJSON(u.SourceFileSFTPSecureFileTransferProtocol, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type File string
+
+const (
+ FileFile File = "file"
+)
+
+func (e File) ToPointer() *File {
+ return &e
+}
+
+func (e *File) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "file":
+ *e = File(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for File: %v", v)
+ }
+}
+
+type SourceFile struct {
+ // The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
+ DatasetName string `json:"dataset_name"`
+ // The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
+ Format *SourceFileFileFormat `default:"csv" json:"format"`
+ // The storage Provider or Location of the file(s) which should be replicated.
+ Provider SourceFileStorageProvider `json:"provider"`
+ // This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
+ ReaderOptions *string `json:"reader_options,omitempty"`
+ sourceType File `const:"file" json:"sourceType"`
+ // The URL path to access the file which should be replicated.
+ URL string `json:"url"`
+}
+
+func (s SourceFile) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFile) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFile) GetDatasetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatasetName
+}
+
+func (o *SourceFile) GetFormat() *SourceFileFileFormat {
+ if o == nil {
+ return nil
+ }
+ return o.Format
+}
+
+func (o *SourceFile) GetProvider() SourceFileStorageProvider {
+ if o == nil {
+ return SourceFileStorageProvider{}
+ }
+ return o.Provider
+}
+
+func (o *SourceFile) GetReaderOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReaderOptions
+}
+
+func (o *SourceFile) GetSourceType() File {
+ return FileFile
+}
+
+func (o *SourceFile) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefilecreaterequest.go b/internal/sdk/pkg/models/shared/sourcefilecreaterequest.go
new file mode 100644
index 000000000..4298db66d
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcefilecreaterequest.go
@@ -0,0 +1,49 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceFileCreateRequest struct {
+ Configuration SourceFile `json:"configuration"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ // Optional secretID obtained through the public API OAuth redirect flow.
+ SecretID *string `json:"secretId,omitempty"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceFileCreateRequest) GetConfiguration() SourceFile {
+ if o == nil {
+ return SourceFile{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFileCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceFileCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFileCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceFileCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefileputrequest.go b/internal/sdk/pkg/models/shared/sourcefileputrequest.go
new file mode 100644
index 000000000..05aeb531c
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcefileputrequest.go
@@ -0,0 +1,30 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceFilePutRequest struct {
+ Configuration SourceFileUpdate `json:"configuration"`
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceFilePutRequest) GetConfiguration() SourceFileUpdate {
+ if o == nil {
+ return SourceFileUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFilePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFilePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefilesecure.go b/internal/sdk/pkg/models/shared/sourcefilesecure.go
deleted file mode 100755
index 0a32ef1f4..000000000
--- a/internal/sdk/pkg/models/shared/sourcefilesecure.go
+++ /dev/null
@@ -1,511 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-// SourceFileSecureFileFormat - The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
-type SourceFileSecureFileFormat string
-
-const (
- SourceFileSecureFileFormatCsv SourceFileSecureFileFormat = "csv"
- SourceFileSecureFileFormatJSON SourceFileSecureFileFormat = "json"
- SourceFileSecureFileFormatJsonl SourceFileSecureFileFormat = "jsonl"
- SourceFileSecureFileFormatExcel SourceFileSecureFileFormat = "excel"
- SourceFileSecureFileFormatExcelBinary SourceFileSecureFileFormat = "excel_binary"
- SourceFileSecureFileFormatFeather SourceFileSecureFileFormat = "feather"
- SourceFileSecureFileFormatParquet SourceFileSecureFileFormat = "parquet"
- SourceFileSecureFileFormatYaml SourceFileSecureFileFormat = "yaml"
-)
-
-func (e SourceFileSecureFileFormat) ToPointer() *SourceFileSecureFileFormat {
- return &e
-}
-
-func (e *SourceFileSecureFileFormat) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "csv":
- fallthrough
- case "json":
- fallthrough
- case "jsonl":
- fallthrough
- case "excel":
- fallthrough
- case "excel_binary":
- fallthrough
- case "feather":
- fallthrough
- case "parquet":
- fallthrough
- case "yaml":
- *e = SourceFileSecureFileFormat(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureFileFormat: %v", v)
- }
-}
-
-type SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorage string
-
-const (
- SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorageSftp SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorage = "SFTP"
-)
-
-func (e SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorage) ToPointer() *SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorage {
- return &e
-}
-
-func (e *SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "SFTP":
- *e = SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorage: %v", v)
- }
-}
-
-// SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol struct {
- Host string `json:"host"`
- Password *string `json:"password,omitempty"`
- Port *string `json:"port,omitempty"`
- Storage SourceFileSecureStorageProviderSFTPSecureFileTransferProtocolStorage `json:"storage"`
- User string `json:"user"`
-}
-
-type SourceFileSecureStorageProviderSCPSecureCopyProtocolStorage string
-
-const (
- SourceFileSecureStorageProviderSCPSecureCopyProtocolStorageScp SourceFileSecureStorageProviderSCPSecureCopyProtocolStorage = "SCP"
-)
-
-func (e SourceFileSecureStorageProviderSCPSecureCopyProtocolStorage) ToPointer() *SourceFileSecureStorageProviderSCPSecureCopyProtocolStorage {
- return &e
-}
-
-func (e *SourceFileSecureStorageProviderSCPSecureCopyProtocolStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "SCP":
- *e = SourceFileSecureStorageProviderSCPSecureCopyProtocolStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureStorageProviderSCPSecureCopyProtocolStorage: %v", v)
- }
-}
-
-// SourceFileSecureStorageProviderSCPSecureCopyProtocol - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureStorageProviderSCPSecureCopyProtocol struct {
- Host string `json:"host"`
- Password *string `json:"password,omitempty"`
- Port *string `json:"port,omitempty"`
- Storage SourceFileSecureStorageProviderSCPSecureCopyProtocolStorage `json:"storage"`
- User string `json:"user"`
-}
-
-type SourceFileSecureStorageProviderSSHSecureShellStorage string
-
-const (
- SourceFileSecureStorageProviderSSHSecureShellStorageSSH SourceFileSecureStorageProviderSSHSecureShellStorage = "SSH"
-)
-
-func (e SourceFileSecureStorageProviderSSHSecureShellStorage) ToPointer() *SourceFileSecureStorageProviderSSHSecureShellStorage {
- return &e
-}
-
-func (e *SourceFileSecureStorageProviderSSHSecureShellStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "SSH":
- *e = SourceFileSecureStorageProviderSSHSecureShellStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureStorageProviderSSHSecureShellStorage: %v", v)
- }
-}
-
-// SourceFileSecureStorageProviderSSHSecureShell - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureStorageProviderSSHSecureShell struct {
- Host string `json:"host"`
- Password *string `json:"password,omitempty"`
- Port *string `json:"port,omitempty"`
- Storage SourceFileSecureStorageProviderSSHSecureShellStorage `json:"storage"`
- User string `json:"user"`
-}
-
-type SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorage string
-
-const (
- SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorageAzBlob SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorage = "AzBlob"
-)
-
-func (e SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorage) ToPointer() *SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorage {
- return &e
-}
-
-func (e *SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "AzBlob":
- *e = SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorage: %v", v)
- }
-}
-
-// SourceFileSecureStorageProviderAzBlobAzureBlobStorage - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureStorageProviderAzBlobAzureBlobStorage struct {
- // To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
- SasToken *string `json:"sas_token,omitempty"`
- // To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
- SharedKey *string `json:"shared_key,omitempty"`
- Storage SourceFileSecureStorageProviderAzBlobAzureBlobStorageStorage `json:"storage"`
- // The globally unique name of the storage account that the desired blob sits within. See here for more details.
- StorageAccount string `json:"storage_account"`
-}
-
-type SourceFileSecureStorageProviderS3AmazonWebServicesStorage string
-
-const (
- SourceFileSecureStorageProviderS3AmazonWebServicesStorageS3 SourceFileSecureStorageProviderS3AmazonWebServicesStorage = "S3"
-)
-
-func (e SourceFileSecureStorageProviderS3AmazonWebServicesStorage) ToPointer() *SourceFileSecureStorageProviderS3AmazonWebServicesStorage {
- return &e
-}
-
-func (e *SourceFileSecureStorageProviderS3AmazonWebServicesStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "S3":
- *e = SourceFileSecureStorageProviderS3AmazonWebServicesStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureStorageProviderS3AmazonWebServicesStorage: %v", v)
- }
-}
-
-// SourceFileSecureStorageProviderS3AmazonWebServices - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureStorageProviderS3AmazonWebServices struct {
- // In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- AwsAccessKeyID *string `json:"aws_access_key_id,omitempty"`
- // In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- AwsSecretAccessKey *string `json:"aws_secret_access_key,omitempty"`
- Storage SourceFileSecureStorageProviderS3AmazonWebServicesStorage `json:"storage"`
-}
-
-type SourceFileSecureStorageProviderGCSGoogleCloudStorageStorage string
-
-const (
- SourceFileSecureStorageProviderGCSGoogleCloudStorageStorageGcs SourceFileSecureStorageProviderGCSGoogleCloudStorageStorage = "GCS"
-)
-
-func (e SourceFileSecureStorageProviderGCSGoogleCloudStorageStorage) ToPointer() *SourceFileSecureStorageProviderGCSGoogleCloudStorageStorage {
- return &e
-}
-
-func (e *SourceFileSecureStorageProviderGCSGoogleCloudStorageStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "GCS":
- *e = SourceFileSecureStorageProviderGCSGoogleCloudStorageStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureStorageProviderGCSGoogleCloudStorageStorage: %v", v)
- }
-}
-
-// SourceFileSecureStorageProviderGCSGoogleCloudStorage - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureStorageProviderGCSGoogleCloudStorage struct {
- // In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
- ServiceAccountJSON *string `json:"service_account_json,omitempty"`
- Storage SourceFileSecureStorageProviderGCSGoogleCloudStorageStorage `json:"storage"`
-}
-
-type SourceFileSecureStorageProviderHTTPSPublicWebStorage string
-
-const (
- SourceFileSecureStorageProviderHTTPSPublicWebStorageHTTPS SourceFileSecureStorageProviderHTTPSPublicWebStorage = "HTTPS"
-)
-
-func (e SourceFileSecureStorageProviderHTTPSPublicWebStorage) ToPointer() *SourceFileSecureStorageProviderHTTPSPublicWebStorage {
- return &e
-}
-
-func (e *SourceFileSecureStorageProviderHTTPSPublicWebStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "HTTPS":
- *e = SourceFileSecureStorageProviderHTTPSPublicWebStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureStorageProviderHTTPSPublicWebStorage: %v", v)
- }
-}
-
-// SourceFileSecureStorageProviderHTTPSPublicWeb - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureStorageProviderHTTPSPublicWeb struct {
- Storage SourceFileSecureStorageProviderHTTPSPublicWebStorage `json:"storage"`
- // Add User-Agent to request
- UserAgent *bool `json:"user_agent,omitempty"`
-}
-
-type SourceFileSecureStorageProviderType string
-
-const (
- SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderHTTPSPublicWeb SourceFileSecureStorageProviderType = "source-file-secure_Storage Provider_HTTPS: Public Web"
- SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderGCSGoogleCloudStorage SourceFileSecureStorageProviderType = "source-file-secure_Storage Provider_GCS: Google Cloud Storage"
- SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderS3AmazonWebServices SourceFileSecureStorageProviderType = "source-file-secure_Storage Provider_S3: Amazon Web Services"
- SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderAzBlobAzureBlobStorage SourceFileSecureStorageProviderType = "source-file-secure_Storage Provider_AzBlob: Azure Blob Storage"
- SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderSSHSecureShell SourceFileSecureStorageProviderType = "source-file-secure_Storage Provider_SSH: Secure Shell"
- SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderSCPSecureCopyProtocol SourceFileSecureStorageProviderType = "source-file-secure_Storage Provider_SCP: Secure copy protocol"
- SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderSFTPSecureFileTransferProtocol SourceFileSecureStorageProviderType = "source-file-secure_Storage Provider_SFTP: Secure File Transfer Protocol"
-)
-
-type SourceFileSecureStorageProvider struct {
- SourceFileSecureStorageProviderHTTPSPublicWeb *SourceFileSecureStorageProviderHTTPSPublicWeb
- SourceFileSecureStorageProviderGCSGoogleCloudStorage *SourceFileSecureStorageProviderGCSGoogleCloudStorage
- SourceFileSecureStorageProviderS3AmazonWebServices *SourceFileSecureStorageProviderS3AmazonWebServices
- SourceFileSecureStorageProviderAzBlobAzureBlobStorage *SourceFileSecureStorageProviderAzBlobAzureBlobStorage
- SourceFileSecureStorageProviderSSHSecureShell *SourceFileSecureStorageProviderSSHSecureShell
- SourceFileSecureStorageProviderSCPSecureCopyProtocol *SourceFileSecureStorageProviderSCPSecureCopyProtocol
- SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol *SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol
-
- Type SourceFileSecureStorageProviderType
-}
-
-func CreateSourceFileSecureStorageProviderSourceFileSecureStorageProviderHTTPSPublicWeb(sourceFileSecureStorageProviderHTTPSPublicWeb SourceFileSecureStorageProviderHTTPSPublicWeb) SourceFileSecureStorageProvider {
- typ := SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderHTTPSPublicWeb
-
- return SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderHTTPSPublicWeb: &sourceFileSecureStorageProviderHTTPSPublicWeb,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureStorageProviderSourceFileSecureStorageProviderGCSGoogleCloudStorage(sourceFileSecureStorageProviderGCSGoogleCloudStorage SourceFileSecureStorageProviderGCSGoogleCloudStorage) SourceFileSecureStorageProvider {
- typ := SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderGCSGoogleCloudStorage
-
- return SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderGCSGoogleCloudStorage: &sourceFileSecureStorageProviderGCSGoogleCloudStorage,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureStorageProviderSourceFileSecureStorageProviderS3AmazonWebServices(sourceFileSecureStorageProviderS3AmazonWebServices SourceFileSecureStorageProviderS3AmazonWebServices) SourceFileSecureStorageProvider {
- typ := SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderS3AmazonWebServices
-
- return SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderS3AmazonWebServices: &sourceFileSecureStorageProviderS3AmazonWebServices,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureStorageProviderSourceFileSecureStorageProviderAzBlobAzureBlobStorage(sourceFileSecureStorageProviderAzBlobAzureBlobStorage SourceFileSecureStorageProviderAzBlobAzureBlobStorage) SourceFileSecureStorageProvider {
- typ := SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderAzBlobAzureBlobStorage
-
- return SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderAzBlobAzureBlobStorage: &sourceFileSecureStorageProviderAzBlobAzureBlobStorage,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureStorageProviderSourceFileSecureStorageProviderSSHSecureShell(sourceFileSecureStorageProviderSSHSecureShell SourceFileSecureStorageProviderSSHSecureShell) SourceFileSecureStorageProvider {
- typ := SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderSSHSecureShell
-
- return SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderSSHSecureShell: &sourceFileSecureStorageProviderSSHSecureShell,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureStorageProviderSourceFileSecureStorageProviderSCPSecureCopyProtocol(sourceFileSecureStorageProviderSCPSecureCopyProtocol SourceFileSecureStorageProviderSCPSecureCopyProtocol) SourceFileSecureStorageProvider {
- typ := SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderSCPSecureCopyProtocol
-
- return SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderSCPSecureCopyProtocol: &sourceFileSecureStorageProviderSCPSecureCopyProtocol,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureStorageProviderSourceFileSecureStorageProviderSFTPSecureFileTransferProtocol(sourceFileSecureStorageProviderSFTPSecureFileTransferProtocol SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol) SourceFileSecureStorageProvider {
- typ := SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderSFTPSecureFileTransferProtocol
-
- return SourceFileSecureStorageProvider{
- SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol: &sourceFileSecureStorageProviderSFTPSecureFileTransferProtocol,
- Type: typ,
- }
-}
-
-func (u *SourceFileSecureStorageProvider) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceFileSecureStorageProviderHTTPSPublicWeb := new(SourceFileSecureStorageProviderHTTPSPublicWeb)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureStorageProviderHTTPSPublicWeb); err == nil {
- u.SourceFileSecureStorageProviderHTTPSPublicWeb = sourceFileSecureStorageProviderHTTPSPublicWeb
- u.Type = SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderHTTPSPublicWeb
- return nil
- }
-
- sourceFileSecureStorageProviderGCSGoogleCloudStorage := new(SourceFileSecureStorageProviderGCSGoogleCloudStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureStorageProviderGCSGoogleCloudStorage); err == nil {
- u.SourceFileSecureStorageProviderGCSGoogleCloudStorage = sourceFileSecureStorageProviderGCSGoogleCloudStorage
- u.Type = SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderGCSGoogleCloudStorage
- return nil
- }
-
- sourceFileSecureStorageProviderS3AmazonWebServices := new(SourceFileSecureStorageProviderS3AmazonWebServices)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureStorageProviderS3AmazonWebServices); err == nil {
- u.SourceFileSecureStorageProviderS3AmazonWebServices = sourceFileSecureStorageProviderS3AmazonWebServices
- u.Type = SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderS3AmazonWebServices
- return nil
- }
-
- sourceFileSecureStorageProviderAzBlobAzureBlobStorage := new(SourceFileSecureStorageProviderAzBlobAzureBlobStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureStorageProviderAzBlobAzureBlobStorage); err == nil {
- u.SourceFileSecureStorageProviderAzBlobAzureBlobStorage = sourceFileSecureStorageProviderAzBlobAzureBlobStorage
- u.Type = SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderAzBlobAzureBlobStorage
- return nil
- }
-
- sourceFileSecureStorageProviderSSHSecureShell := new(SourceFileSecureStorageProviderSSHSecureShell)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureStorageProviderSSHSecureShell); err == nil {
- u.SourceFileSecureStorageProviderSSHSecureShell = sourceFileSecureStorageProviderSSHSecureShell
- u.Type = SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderSSHSecureShell
- return nil
- }
-
- sourceFileSecureStorageProviderSCPSecureCopyProtocol := new(SourceFileSecureStorageProviderSCPSecureCopyProtocol)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureStorageProviderSCPSecureCopyProtocol); err == nil {
- u.SourceFileSecureStorageProviderSCPSecureCopyProtocol = sourceFileSecureStorageProviderSCPSecureCopyProtocol
- u.Type = SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderSCPSecureCopyProtocol
- return nil
- }
-
- sourceFileSecureStorageProviderSFTPSecureFileTransferProtocol := new(SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureStorageProviderSFTPSecureFileTransferProtocol); err == nil {
- u.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol = sourceFileSecureStorageProviderSFTPSecureFileTransferProtocol
- u.Type = SourceFileSecureStorageProviderTypeSourceFileSecureStorageProviderSFTPSecureFileTransferProtocol
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u SourceFileSecureStorageProvider) MarshalJSON() ([]byte, error) {
- if u.SourceFileSecureStorageProviderHTTPSPublicWeb != nil {
- return json.Marshal(u.SourceFileSecureStorageProviderHTTPSPublicWeb)
- }
-
- if u.SourceFileSecureStorageProviderGCSGoogleCloudStorage != nil {
- return json.Marshal(u.SourceFileSecureStorageProviderGCSGoogleCloudStorage)
- }
-
- if u.SourceFileSecureStorageProviderS3AmazonWebServices != nil {
- return json.Marshal(u.SourceFileSecureStorageProviderS3AmazonWebServices)
- }
-
- if u.SourceFileSecureStorageProviderAzBlobAzureBlobStorage != nil {
- return json.Marshal(u.SourceFileSecureStorageProviderAzBlobAzureBlobStorage)
- }
-
- if u.SourceFileSecureStorageProviderSSHSecureShell != nil {
- return json.Marshal(u.SourceFileSecureStorageProviderSSHSecureShell)
- }
-
- if u.SourceFileSecureStorageProviderSCPSecureCopyProtocol != nil {
- return json.Marshal(u.SourceFileSecureStorageProviderSCPSecureCopyProtocol)
- }
-
- if u.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol != nil {
- return json.Marshal(u.SourceFileSecureStorageProviderSFTPSecureFileTransferProtocol)
- }
-
- return nil, nil
-}
-
-type SourceFileSecureFileSecure string
-
-const (
- SourceFileSecureFileSecureFileSecure SourceFileSecureFileSecure = "file-secure"
-)
-
-func (e SourceFileSecureFileSecure) ToPointer() *SourceFileSecureFileSecure {
- return &e
-}
-
-func (e *SourceFileSecureFileSecure) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "file-secure":
- *e = SourceFileSecureFileSecure(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureFileSecure: %v", v)
- }
-}
-
-type SourceFileSecure struct {
- // The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
- DatasetName string `json:"dataset_name"`
- // The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
- Format SourceFileSecureFileFormat `json:"format"`
- // The storage Provider or Location of the file(s) which should be replicated.
- Provider SourceFileSecureStorageProvider `json:"provider"`
- // This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
- ReaderOptions *string `json:"reader_options,omitempty"`
- SourceType SourceFileSecureFileSecure `json:"sourceType"`
- // The URL path to access the file which should be replicated.
- URL string `json:"url"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcefilesecurecreaterequest.go b/internal/sdk/pkg/models/shared/sourcefilesecurecreaterequest.go
deleted file mode 100755
index 9244320de..000000000
--- a/internal/sdk/pkg/models/shared/sourcefilesecurecreaterequest.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceFileSecureCreateRequest struct {
- Configuration SourceFileSecure `json:"configuration"`
- Name string `json:"name"`
- // Optional secretID obtained through the public API OAuth redirect flow.
- SecretID *string `json:"secretId,omitempty"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcefilesecureputrequest.go b/internal/sdk/pkg/models/shared/sourcefilesecureputrequest.go
deleted file mode 100755
index a881b2c90..000000000
--- a/internal/sdk/pkg/models/shared/sourcefilesecureputrequest.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceFileSecurePutRequest struct {
- Configuration SourceFileSecureUpdate `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcefilesecureupdate.go b/internal/sdk/pkg/models/shared/sourcefilesecureupdate.go
deleted file mode 100755
index 307221974..000000000
--- a/internal/sdk/pkg/models/shared/sourcefilesecureupdate.go
+++ /dev/null
@@ -1,486 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-// SourceFileSecureUpdateFileFormat - The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
-type SourceFileSecureUpdateFileFormat string
-
-const (
- SourceFileSecureUpdateFileFormatCsv SourceFileSecureUpdateFileFormat = "csv"
- SourceFileSecureUpdateFileFormatJSON SourceFileSecureUpdateFileFormat = "json"
- SourceFileSecureUpdateFileFormatJsonl SourceFileSecureUpdateFileFormat = "jsonl"
- SourceFileSecureUpdateFileFormatExcel SourceFileSecureUpdateFileFormat = "excel"
- SourceFileSecureUpdateFileFormatExcelBinary SourceFileSecureUpdateFileFormat = "excel_binary"
- SourceFileSecureUpdateFileFormatFeather SourceFileSecureUpdateFileFormat = "feather"
- SourceFileSecureUpdateFileFormatParquet SourceFileSecureUpdateFileFormat = "parquet"
- SourceFileSecureUpdateFileFormatYaml SourceFileSecureUpdateFileFormat = "yaml"
-)
-
-func (e SourceFileSecureUpdateFileFormat) ToPointer() *SourceFileSecureUpdateFileFormat {
- return &e
-}
-
-func (e *SourceFileSecureUpdateFileFormat) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "csv":
- fallthrough
- case "json":
- fallthrough
- case "jsonl":
- fallthrough
- case "excel":
- fallthrough
- case "excel_binary":
- fallthrough
- case "feather":
- fallthrough
- case "parquet":
- fallthrough
- case "yaml":
- *e = SourceFileSecureUpdateFileFormat(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureUpdateFileFormat: %v", v)
- }
-}
-
-type SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorage string
-
-const (
- SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorageSftp SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorage = "SFTP"
-)
-
-func (e SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorage) ToPointer() *SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorage {
- return &e
-}
-
-func (e *SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "SFTP":
- *e = SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorage: %v", v)
- }
-}
-
-// SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol struct {
- Host string `json:"host"`
- Password *string `json:"password,omitempty"`
- Port *string `json:"port,omitempty"`
- Storage SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocolStorage `json:"storage"`
- User string `json:"user"`
-}
-
-type SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorage string
-
-const (
- SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorageScp SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorage = "SCP"
-)
-
-func (e SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorage) ToPointer() *SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorage {
- return &e
-}
-
-func (e *SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "SCP":
- *e = SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorage: %v", v)
- }
-}
-
-// SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol struct {
- Host string `json:"host"`
- Password *string `json:"password,omitempty"`
- Port *string `json:"port,omitempty"`
- Storage SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocolStorage `json:"storage"`
- User string `json:"user"`
-}
-
-type SourceFileSecureUpdateStorageProviderSSHSecureShellStorage string
-
-const (
- SourceFileSecureUpdateStorageProviderSSHSecureShellStorageSSH SourceFileSecureUpdateStorageProviderSSHSecureShellStorage = "SSH"
-)
-
-func (e SourceFileSecureUpdateStorageProviderSSHSecureShellStorage) ToPointer() *SourceFileSecureUpdateStorageProviderSSHSecureShellStorage {
- return &e
-}
-
-func (e *SourceFileSecureUpdateStorageProviderSSHSecureShellStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "SSH":
- *e = SourceFileSecureUpdateStorageProviderSSHSecureShellStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureUpdateStorageProviderSSHSecureShellStorage: %v", v)
- }
-}
-
-// SourceFileSecureUpdateStorageProviderSSHSecureShell - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureUpdateStorageProviderSSHSecureShell struct {
- Host string `json:"host"`
- Password *string `json:"password,omitempty"`
- Port *string `json:"port,omitempty"`
- Storage SourceFileSecureUpdateStorageProviderSSHSecureShellStorage `json:"storage"`
- User string `json:"user"`
-}
-
-type SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorage string
-
-const (
- SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorageAzBlob SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorage = "AzBlob"
-)
-
-func (e SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorage) ToPointer() *SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorage {
- return &e
-}
-
-func (e *SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "AzBlob":
- *e = SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorage: %v", v)
- }
-}
-
-// SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage struct {
- // To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
- SasToken *string `json:"sas_token,omitempty"`
- // To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
- SharedKey *string `json:"shared_key,omitempty"`
- Storage SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorageStorage `json:"storage"`
- // The globally unique name of the storage account that the desired blob sits within. See here for more details.
- StorageAccount string `json:"storage_account"`
-}
-
-type SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorage string
-
-const (
- SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorageS3 SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorage = "S3"
-)
-
-func (e SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorage) ToPointer() *SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorage {
- return &e
-}
-
-func (e *SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "S3":
- *e = SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorage: %v", v)
- }
-}
-
-// SourceFileSecureUpdateStorageProviderS3AmazonWebServices - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureUpdateStorageProviderS3AmazonWebServices struct {
- // In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- AwsAccessKeyID *string `json:"aws_access_key_id,omitempty"`
- // In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- AwsSecretAccessKey *string `json:"aws_secret_access_key,omitempty"`
- Storage SourceFileSecureUpdateStorageProviderS3AmazonWebServicesStorage `json:"storage"`
-}
-
-type SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorage string
-
-const (
- SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorageGcs SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorage = "GCS"
-)
-
-func (e SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorage) ToPointer() *SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorage {
- return &e
-}
-
-func (e *SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "GCS":
- *e = SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorage: %v", v)
- }
-}
-
-// SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage struct {
- // In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
- ServiceAccountJSON *string `json:"service_account_json,omitempty"`
- Storage SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorageStorage `json:"storage"`
-}
-
-type SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorage string
-
-const (
- SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorageHTTPS SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorage = "HTTPS"
-)
-
-func (e SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorage) ToPointer() *SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorage {
- return &e
-}
-
-func (e *SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorage) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "HTTPS":
- *e = SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorage(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorage: %v", v)
- }
-}
-
-// SourceFileSecureUpdateStorageProviderHTTPSPublicWeb - The storage Provider or Location of the file(s) which should be replicated.
-type SourceFileSecureUpdateStorageProviderHTTPSPublicWeb struct {
- Storage SourceFileSecureUpdateStorageProviderHTTPSPublicWebStorage `json:"storage"`
- // Add User-Agent to request
- UserAgent *bool `json:"user_agent,omitempty"`
-}
-
-type SourceFileSecureUpdateStorageProviderType string
-
-const (
- SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderHTTPSPublicWeb SourceFileSecureUpdateStorageProviderType = "source-file-secure-update_Storage Provider_HTTPS: Public Web"
- SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage SourceFileSecureUpdateStorageProviderType = "source-file-secure-update_Storage Provider_GCS: Google Cloud Storage"
- SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderS3AmazonWebServices SourceFileSecureUpdateStorageProviderType = "source-file-secure-update_Storage Provider_S3: Amazon Web Services"
- SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage SourceFileSecureUpdateStorageProviderType = "source-file-secure-update_Storage Provider_AzBlob: Azure Blob Storage"
- SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderSSHSecureShell SourceFileSecureUpdateStorageProviderType = "source-file-secure-update_Storage Provider_SSH: Secure Shell"
- SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol SourceFileSecureUpdateStorageProviderType = "source-file-secure-update_Storage Provider_SCP: Secure copy protocol"
- SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol SourceFileSecureUpdateStorageProviderType = "source-file-secure-update_Storage Provider_SFTP: Secure File Transfer Protocol"
-)
-
-type SourceFileSecureUpdateStorageProvider struct {
- SourceFileSecureUpdateStorageProviderHTTPSPublicWeb *SourceFileSecureUpdateStorageProviderHTTPSPublicWeb
- SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage *SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage
- SourceFileSecureUpdateStorageProviderS3AmazonWebServices *SourceFileSecureUpdateStorageProviderS3AmazonWebServices
- SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage *SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage
- SourceFileSecureUpdateStorageProviderSSHSecureShell *SourceFileSecureUpdateStorageProviderSSHSecureShell
- SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol *SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol
- SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol *SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol
-
- Type SourceFileSecureUpdateStorageProviderType
-}
-
-func CreateSourceFileSecureUpdateStorageProviderSourceFileSecureUpdateStorageProviderHTTPSPublicWeb(sourceFileSecureUpdateStorageProviderHTTPSPublicWeb SourceFileSecureUpdateStorageProviderHTTPSPublicWeb) SourceFileSecureUpdateStorageProvider {
- typ := SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderHTTPSPublicWeb
-
- return SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderHTTPSPublicWeb: &sourceFileSecureUpdateStorageProviderHTTPSPublicWeb,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureUpdateStorageProviderSourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage(sourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage) SourceFileSecureUpdateStorageProvider {
- typ := SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage
-
- return SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage: &sourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureUpdateStorageProviderSourceFileSecureUpdateStorageProviderS3AmazonWebServices(sourceFileSecureUpdateStorageProviderS3AmazonWebServices SourceFileSecureUpdateStorageProviderS3AmazonWebServices) SourceFileSecureUpdateStorageProvider {
- typ := SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderS3AmazonWebServices
-
- return SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderS3AmazonWebServices: &sourceFileSecureUpdateStorageProviderS3AmazonWebServices,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureUpdateStorageProviderSourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage(sourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage) SourceFileSecureUpdateStorageProvider {
- typ := SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage
-
- return SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage: &sourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureUpdateStorageProviderSourceFileSecureUpdateStorageProviderSSHSecureShell(sourceFileSecureUpdateStorageProviderSSHSecureShell SourceFileSecureUpdateStorageProviderSSHSecureShell) SourceFileSecureUpdateStorageProvider {
- typ := SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderSSHSecureShell
-
- return SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderSSHSecureShell: &sourceFileSecureUpdateStorageProviderSSHSecureShell,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureUpdateStorageProviderSourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol(sourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol) SourceFileSecureUpdateStorageProvider {
- typ := SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol
-
- return SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol: &sourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol,
- Type: typ,
- }
-}
-
-func CreateSourceFileSecureUpdateStorageProviderSourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol(sourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol) SourceFileSecureUpdateStorageProvider {
- typ := SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol
-
- return SourceFileSecureUpdateStorageProvider{
- SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol: &sourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol,
- Type: typ,
- }
-}
-
-func (u *SourceFileSecureUpdateStorageProvider) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceFileSecureUpdateStorageProviderHTTPSPublicWeb := new(SourceFileSecureUpdateStorageProviderHTTPSPublicWeb)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureUpdateStorageProviderHTTPSPublicWeb); err == nil {
- u.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb = sourceFileSecureUpdateStorageProviderHTTPSPublicWeb
- u.Type = SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderHTTPSPublicWeb
- return nil
- }
-
- sourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage := new(SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage); err == nil {
- u.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage = sourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage
- u.Type = SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage
- return nil
- }
-
- sourceFileSecureUpdateStorageProviderS3AmazonWebServices := new(SourceFileSecureUpdateStorageProviderS3AmazonWebServices)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureUpdateStorageProviderS3AmazonWebServices); err == nil {
- u.SourceFileSecureUpdateStorageProviderS3AmazonWebServices = sourceFileSecureUpdateStorageProviderS3AmazonWebServices
- u.Type = SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderS3AmazonWebServices
- return nil
- }
-
- sourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage := new(SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage); err == nil {
- u.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage = sourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage
- u.Type = SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage
- return nil
- }
-
- sourceFileSecureUpdateStorageProviderSSHSecureShell := new(SourceFileSecureUpdateStorageProviderSSHSecureShell)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureUpdateStorageProviderSSHSecureShell); err == nil {
- u.SourceFileSecureUpdateStorageProviderSSHSecureShell = sourceFileSecureUpdateStorageProviderSSHSecureShell
- u.Type = SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderSSHSecureShell
- return nil
- }
-
- sourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol := new(SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol); err == nil {
- u.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol = sourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol
- u.Type = SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol
- return nil
- }
-
- sourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol := new(SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol); err == nil {
- u.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol = sourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol
- u.Type = SourceFileSecureUpdateStorageProviderTypeSourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u SourceFileSecureUpdateStorageProvider) MarshalJSON() ([]byte, error) {
- if u.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb != nil {
- return json.Marshal(u.SourceFileSecureUpdateStorageProviderHTTPSPublicWeb)
- }
-
- if u.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage != nil {
- return json.Marshal(u.SourceFileSecureUpdateStorageProviderGCSGoogleCloudStorage)
- }
-
- if u.SourceFileSecureUpdateStorageProviderS3AmazonWebServices != nil {
- return json.Marshal(u.SourceFileSecureUpdateStorageProviderS3AmazonWebServices)
- }
-
- if u.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage != nil {
- return json.Marshal(u.SourceFileSecureUpdateStorageProviderAzBlobAzureBlobStorage)
- }
-
- if u.SourceFileSecureUpdateStorageProviderSSHSecureShell != nil {
- return json.Marshal(u.SourceFileSecureUpdateStorageProviderSSHSecureShell)
- }
-
- if u.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol != nil {
- return json.Marshal(u.SourceFileSecureUpdateStorageProviderSCPSecureCopyProtocol)
- }
-
- if u.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol != nil {
- return json.Marshal(u.SourceFileSecureUpdateStorageProviderSFTPSecureFileTransferProtocol)
- }
-
- return nil, nil
-}
-
-type SourceFileSecureUpdate struct {
- // The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
- DatasetName string `json:"dataset_name"`
- // The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
- Format SourceFileSecureUpdateFileFormat `json:"format"`
- // The storage Provider or Location of the file(s) which should be replicated.
- Provider SourceFileSecureUpdateStorageProvider `json:"provider"`
- // This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
- ReaderOptions *string `json:"reader_options,omitempty"`
- // The URL path to access the file which should be replicated.
- URL string `json:"url"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcefileupdate.go b/internal/sdk/pkg/models/shared/sourcefileupdate.go
new file mode 100644
index 000000000..c6ec5ab76
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcefileupdate.go
@@ -0,0 +1,755 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+// FileFormat - The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
+type FileFormat string
+
+const (
+ FileFormatCsv FileFormat = "csv"
+ FileFormatJSON FileFormat = "json"
+ FileFormatJsonl FileFormat = "jsonl"
+ FileFormatExcel FileFormat = "excel"
+ FileFormatExcelBinary FileFormat = "excel_binary"
+ FileFormatFeather FileFormat = "feather"
+ FileFormatParquet FileFormat = "parquet"
+ FileFormatYaml FileFormat = "yaml"
+)
+
+func (e FileFormat) ToPointer() *FileFormat {
+ return &e
+}
+
+func (e *FileFormat) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "csv":
+ fallthrough
+ case "json":
+ fallthrough
+ case "jsonl":
+ fallthrough
+ case "excel":
+ fallthrough
+ case "excel_binary":
+ fallthrough
+ case "feather":
+ fallthrough
+ case "parquet":
+ fallthrough
+ case "yaml":
+ *e = FileFormat(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for FileFormat: %v", v)
+ }
+}
+
+type SourceFileUpdateSchemasProviderStorageProvider7Storage string
+
+const (
+ SourceFileUpdateSchemasProviderStorageProvider7StorageSftp SourceFileUpdateSchemasProviderStorageProvider7Storage = "SFTP"
+)
+
+func (e SourceFileUpdateSchemasProviderStorageProvider7Storage) ToPointer() *SourceFileUpdateSchemasProviderStorageProvider7Storage {
+ return &e
+}
+
+func (e *SourceFileUpdateSchemasProviderStorageProvider7Storage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SFTP":
+ *e = SourceFileUpdateSchemasProviderStorageProvider7Storage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileUpdateSchemasProviderStorageProvider7Storage: %v", v)
+ }
+}
+
+// SFTPSecureFileTransferProtocol - The storage Provider or Location of the file(s) which should be replicated.
+type SFTPSecureFileTransferProtocol struct {
+ Host string `json:"host"`
+ Password *string `json:"password,omitempty"`
+ Port *string `default:"22" json:"port"`
+ storage SourceFileUpdateSchemasProviderStorageProvider7Storage `const:"SFTP" json:"storage"`
+ User string `json:"user"`
+}
+
+func (s SFTPSecureFileTransferProtocol) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SFTPSecureFileTransferProtocol) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SFTPSecureFileTransferProtocol) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SFTPSecureFileTransferProtocol) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SFTPSecureFileTransferProtocol) GetPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SFTPSecureFileTransferProtocol) GetStorage() SourceFileUpdateSchemasProviderStorageProvider7Storage {
+ return SourceFileUpdateSchemasProviderStorageProvider7StorageSftp
+}
+
+func (o *SFTPSecureFileTransferProtocol) GetUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.User
+}
+
+type SourceFileUpdateSchemasProviderStorageProvider6Storage string
+
+const (
+ SourceFileUpdateSchemasProviderStorageProvider6StorageScp SourceFileUpdateSchemasProviderStorageProvider6Storage = "SCP"
+)
+
+func (e SourceFileUpdateSchemasProviderStorageProvider6Storage) ToPointer() *SourceFileUpdateSchemasProviderStorageProvider6Storage {
+ return &e
+}
+
+func (e *SourceFileUpdateSchemasProviderStorageProvider6Storage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SCP":
+ *e = SourceFileUpdateSchemasProviderStorageProvider6Storage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileUpdateSchemasProviderStorageProvider6Storage: %v", v)
+ }
+}
+
+// SCPSecureCopyProtocol - The storage Provider or Location of the file(s) which should be replicated.
+type SCPSecureCopyProtocol struct {
+ Host string `json:"host"`
+ Password *string `json:"password,omitempty"`
+ Port *string `default:"22" json:"port"`
+ storage SourceFileUpdateSchemasProviderStorageProvider6Storage `const:"SCP" json:"storage"`
+ User string `json:"user"`
+}
+
+func (s SCPSecureCopyProtocol) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SCPSecureCopyProtocol) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SCPSecureCopyProtocol) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SCPSecureCopyProtocol) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SCPSecureCopyProtocol) GetPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SCPSecureCopyProtocol) GetStorage() SourceFileUpdateSchemasProviderStorageProvider6Storage {
+ return SourceFileUpdateSchemasProviderStorageProvider6StorageScp
+}
+
+func (o *SCPSecureCopyProtocol) GetUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.User
+}
+
+type SourceFileUpdateSchemasProviderStorageProviderStorage string
+
+const (
+ SourceFileUpdateSchemasProviderStorageProviderStorageSSH SourceFileUpdateSchemasProviderStorageProviderStorage = "SSH"
+)
+
+func (e SourceFileUpdateSchemasProviderStorageProviderStorage) ToPointer() *SourceFileUpdateSchemasProviderStorageProviderStorage {
+ return &e
+}
+
+func (e *SourceFileUpdateSchemasProviderStorageProviderStorage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SSH":
+ *e = SourceFileUpdateSchemasProviderStorageProviderStorage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileUpdateSchemasProviderStorageProviderStorage: %v", v)
+ }
+}
+
+// SSHSecureShell - The storage Provider or Location of the file(s) which should be replicated.
+type SSHSecureShell struct {
+ Host string `json:"host"`
+ Password *string `json:"password,omitempty"`
+ Port *string `default:"22" json:"port"`
+ storage SourceFileUpdateSchemasProviderStorageProviderStorage `const:"SSH" json:"storage"`
+ User string `json:"user"`
+}
+
+func (s SSHSecureShell) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SSHSecureShell) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SSHSecureShell) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SSHSecureShell) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SSHSecureShell) GetPort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SSHSecureShell) GetStorage() SourceFileUpdateSchemasProviderStorageProviderStorage {
+ return SourceFileUpdateSchemasProviderStorageProviderStorageSSH
+}
+
+func (o *SSHSecureShell) GetUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.User
+}
+
+type SourceFileUpdateSchemasProviderStorage string
+
+const (
+ SourceFileUpdateSchemasProviderStorageAzBlob SourceFileUpdateSchemasProviderStorage = "AzBlob"
+)
+
+func (e SourceFileUpdateSchemasProviderStorage) ToPointer() *SourceFileUpdateSchemasProviderStorage {
+ return &e
+}
+
+func (e *SourceFileUpdateSchemasProviderStorage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "AzBlob":
+ *e = SourceFileUpdateSchemasProviderStorage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileUpdateSchemasProviderStorage: %v", v)
+ }
+}
+
+// AzBlobAzureBlobStorage - The storage Provider or Location of the file(s) which should be replicated.
+type AzBlobAzureBlobStorage struct {
+ // To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
+ SasToken *string `json:"sas_token,omitempty"`
+ // To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
+ SharedKey *string `json:"shared_key,omitempty"`
+ storage SourceFileUpdateSchemasProviderStorage `const:"AzBlob" json:"storage"`
+ // The globally unique name of the storage account that the desired blob sits within. See here for more details.
+ StorageAccount string `json:"storage_account"`
+}
+
+func (a AzBlobAzureBlobStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AzBlobAzureBlobStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AzBlobAzureBlobStorage) GetSasToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SasToken
+}
+
+func (o *AzBlobAzureBlobStorage) GetSharedKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SharedKey
+}
+
+func (o *AzBlobAzureBlobStorage) GetStorage() SourceFileUpdateSchemasProviderStorage {
+ return SourceFileUpdateSchemasProviderStorageAzBlob
+}
+
+func (o *AzBlobAzureBlobStorage) GetStorageAccount() string {
+ if o == nil {
+ return ""
+ }
+ return o.StorageAccount
+}
+
+type SourceFileUpdateSchemasStorage string
+
+const (
+ SourceFileUpdateSchemasStorageS3 SourceFileUpdateSchemasStorage = "S3"
+)
+
+func (e SourceFileUpdateSchemasStorage) ToPointer() *SourceFileUpdateSchemasStorage {
+ return &e
+}
+
+func (e *SourceFileUpdateSchemasStorage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "S3":
+ *e = SourceFileUpdateSchemasStorage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileUpdateSchemasStorage: %v", v)
+ }
+}
+
+// SourceFileUpdateS3AmazonWebServices - The storage Provider or Location of the file(s) which should be replicated.
+type SourceFileUpdateS3AmazonWebServices struct {
+ // In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+ AwsAccessKeyID *string `json:"aws_access_key_id,omitempty"`
+ // In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
+ AwsSecretAccessKey *string `json:"aws_secret_access_key,omitempty"`
+ storage SourceFileUpdateSchemasStorage `const:"S3" json:"storage"`
+}
+
+func (s SourceFileUpdateS3AmazonWebServices) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFileUpdateS3AmazonWebServices) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFileUpdateS3AmazonWebServices) GetAwsAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsAccessKeyID
+}
+
+func (o *SourceFileUpdateS3AmazonWebServices) GetAwsSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsSecretAccessKey
+}
+
+func (o *SourceFileUpdateS3AmazonWebServices) GetStorage() SourceFileUpdateSchemasStorage {
+ return SourceFileUpdateSchemasStorageS3
+}
+
+type SourceFileUpdateStorage string
+
+const (
+ SourceFileUpdateStorageGcs SourceFileUpdateStorage = "GCS"
+)
+
+func (e SourceFileUpdateStorage) ToPointer() *SourceFileUpdateStorage {
+ return &e
+}
+
+func (e *SourceFileUpdateStorage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "GCS":
+ *e = SourceFileUpdateStorage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceFileUpdateStorage: %v", v)
+ }
+}
+
+// GCSGoogleCloudStorage - The storage Provider or Location of the file(s) which should be replicated.
+type GCSGoogleCloudStorage struct {
+ // In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described here. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
+ ServiceAccountJSON *string `json:"service_account_json,omitempty"`
+ storage SourceFileUpdateStorage `const:"GCS" json:"storage"`
+}
+
+func (g GCSGoogleCloudStorage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(g, "", false)
+}
+
+func (g *GCSGoogleCloudStorage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &g, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *GCSGoogleCloudStorage) GetServiceAccountJSON() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ServiceAccountJSON
+}
+
+func (o *GCSGoogleCloudStorage) GetStorage() SourceFileUpdateStorage {
+ return SourceFileUpdateStorageGcs
+}
+
+type Storage string
+
+const (
+ StorageHTTPS Storage = "HTTPS"
+)
+
+func (e Storage) ToPointer() *Storage {
+ return &e
+}
+
+func (e *Storage) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "HTTPS":
+ *e = Storage(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for Storage: %v", v)
+ }
+}
+
+// HTTPSPublicWeb - The storage Provider or Location of the file(s) which should be replicated.
+type HTTPSPublicWeb struct {
+ storage Storage `const:"HTTPS" json:"storage"`
+ // Add User-Agent to request
+ UserAgent *bool `default:"false" json:"user_agent"`
+}
+
+func (h HTTPSPublicWeb) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(h, "", false)
+}
+
+func (h *HTTPSPublicWeb) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &h, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *HTTPSPublicWeb) GetStorage() Storage {
+ return StorageHTTPS
+}
+
+func (o *HTTPSPublicWeb) GetUserAgent() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.UserAgent
+}
+
+type StorageProviderType string
+
+const (
+ StorageProviderTypeHTTPSPublicWeb StorageProviderType = "HTTPS: Public Web"
+ StorageProviderTypeGCSGoogleCloudStorage StorageProviderType = "GCS: Google Cloud Storage"
+ StorageProviderTypeSourceFileUpdateS3AmazonWebServices StorageProviderType = "source-file-update_S3: Amazon Web Services"
+ StorageProviderTypeAzBlobAzureBlobStorage StorageProviderType = "AzBlob: Azure Blob Storage"
+ StorageProviderTypeSSHSecureShell StorageProviderType = "SSH: Secure Shell"
+ StorageProviderTypeSCPSecureCopyProtocol StorageProviderType = "SCP: Secure copy protocol"
+ StorageProviderTypeSFTPSecureFileTransferProtocol StorageProviderType = "SFTP: Secure File Transfer Protocol"
+)
+
+type StorageProvider struct {
+ HTTPSPublicWeb *HTTPSPublicWeb
+ GCSGoogleCloudStorage *GCSGoogleCloudStorage
+ SourceFileUpdateS3AmazonWebServices *SourceFileUpdateS3AmazonWebServices
+ AzBlobAzureBlobStorage *AzBlobAzureBlobStorage
+ SSHSecureShell *SSHSecureShell
+ SCPSecureCopyProtocol *SCPSecureCopyProtocol
+ SFTPSecureFileTransferProtocol *SFTPSecureFileTransferProtocol
+
+ Type StorageProviderType
+}
+
+func CreateStorageProviderHTTPSPublicWeb(httpsPublicWeb HTTPSPublicWeb) StorageProvider {
+ typ := StorageProviderTypeHTTPSPublicWeb
+
+ return StorageProvider{
+ HTTPSPublicWeb: &httpsPublicWeb,
+ Type: typ,
+ }
+}
+
+func CreateStorageProviderGCSGoogleCloudStorage(gcsGoogleCloudStorage GCSGoogleCloudStorage) StorageProvider {
+ typ := StorageProviderTypeGCSGoogleCloudStorage
+
+ return StorageProvider{
+ GCSGoogleCloudStorage: &gcsGoogleCloudStorage,
+ Type: typ,
+ }
+}
+
+func CreateStorageProviderSourceFileUpdateS3AmazonWebServices(sourceFileUpdateS3AmazonWebServices SourceFileUpdateS3AmazonWebServices) StorageProvider {
+ typ := StorageProviderTypeSourceFileUpdateS3AmazonWebServices
+
+ return StorageProvider{
+ SourceFileUpdateS3AmazonWebServices: &sourceFileUpdateS3AmazonWebServices,
+ Type: typ,
+ }
+}
+
+func CreateStorageProviderAzBlobAzureBlobStorage(azBlobAzureBlobStorage AzBlobAzureBlobStorage) StorageProvider {
+ typ := StorageProviderTypeAzBlobAzureBlobStorage
+
+ return StorageProvider{
+ AzBlobAzureBlobStorage: &azBlobAzureBlobStorage,
+ Type: typ,
+ }
+}
+
+func CreateStorageProviderSSHSecureShell(sshSecureShell SSHSecureShell) StorageProvider {
+ typ := StorageProviderTypeSSHSecureShell
+
+ return StorageProvider{
+ SSHSecureShell: &sshSecureShell,
+ Type: typ,
+ }
+}
+
+func CreateStorageProviderSCPSecureCopyProtocol(scpSecureCopyProtocol SCPSecureCopyProtocol) StorageProvider {
+ typ := StorageProviderTypeSCPSecureCopyProtocol
+
+ return StorageProvider{
+ SCPSecureCopyProtocol: &scpSecureCopyProtocol,
+ Type: typ,
+ }
+}
+
+func CreateStorageProviderSFTPSecureFileTransferProtocol(sftpSecureFileTransferProtocol SFTPSecureFileTransferProtocol) StorageProvider {
+ typ := StorageProviderTypeSFTPSecureFileTransferProtocol
+
+ return StorageProvider{
+ SFTPSecureFileTransferProtocol: &sftpSecureFileTransferProtocol,
+ Type: typ,
+ }
+}
+
+func (u *StorageProvider) UnmarshalJSON(data []byte) error {
+
+ httpsPublicWeb := new(HTTPSPublicWeb)
+ if err := utils.UnmarshalJSON(data, &httpsPublicWeb, "", true, true); err == nil {
+ u.HTTPSPublicWeb = httpsPublicWeb
+ u.Type = StorageProviderTypeHTTPSPublicWeb
+ return nil
+ }
+
+ gcsGoogleCloudStorage := new(GCSGoogleCloudStorage)
+ if err := utils.UnmarshalJSON(data, &gcsGoogleCloudStorage, "", true, true); err == nil {
+ u.GCSGoogleCloudStorage = gcsGoogleCloudStorage
+ u.Type = StorageProviderTypeGCSGoogleCloudStorage
+ return nil
+ }
+
+ sourceFileUpdateS3AmazonWebServices := new(SourceFileUpdateS3AmazonWebServices)
+ if err := utils.UnmarshalJSON(data, &sourceFileUpdateS3AmazonWebServices, "", true, true); err == nil {
+ u.SourceFileUpdateS3AmazonWebServices = sourceFileUpdateS3AmazonWebServices
+ u.Type = StorageProviderTypeSourceFileUpdateS3AmazonWebServices
+ return nil
+ }
+
+ azBlobAzureBlobStorage := new(AzBlobAzureBlobStorage)
+ if err := utils.UnmarshalJSON(data, &azBlobAzureBlobStorage, "", true, true); err == nil {
+ u.AzBlobAzureBlobStorage = azBlobAzureBlobStorage
+ u.Type = StorageProviderTypeAzBlobAzureBlobStorage
+ return nil
+ }
+
+ sshSecureShell := new(SSHSecureShell)
+ if err := utils.UnmarshalJSON(data, &sshSecureShell, "", true, true); err == nil {
+ u.SSHSecureShell = sshSecureShell
+ u.Type = StorageProviderTypeSSHSecureShell
+ return nil
+ }
+
+ scpSecureCopyProtocol := new(SCPSecureCopyProtocol)
+ if err := utils.UnmarshalJSON(data, &scpSecureCopyProtocol, "", true, true); err == nil {
+ u.SCPSecureCopyProtocol = scpSecureCopyProtocol
+ u.Type = StorageProviderTypeSCPSecureCopyProtocol
+ return nil
+ }
+
+ sftpSecureFileTransferProtocol := new(SFTPSecureFileTransferProtocol)
+ if err := utils.UnmarshalJSON(data, &sftpSecureFileTransferProtocol, "", true, true); err == nil {
+ u.SFTPSecureFileTransferProtocol = sftpSecureFileTransferProtocol
+ u.Type = StorageProviderTypeSFTPSecureFileTransferProtocol
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u StorageProvider) MarshalJSON() ([]byte, error) {
+ if u.HTTPSPublicWeb != nil {
+ return utils.MarshalJSON(u.HTTPSPublicWeb, "", true)
+ }
+
+ if u.GCSGoogleCloudStorage != nil {
+ return utils.MarshalJSON(u.GCSGoogleCloudStorage, "", true)
+ }
+
+ if u.SourceFileUpdateS3AmazonWebServices != nil {
+ return utils.MarshalJSON(u.SourceFileUpdateS3AmazonWebServices, "", true)
+ }
+
+ if u.AzBlobAzureBlobStorage != nil {
+ return utils.MarshalJSON(u.AzBlobAzureBlobStorage, "", true)
+ }
+
+ if u.SSHSecureShell != nil {
+ return utils.MarshalJSON(u.SSHSecureShell, "", true)
+ }
+
+ if u.SCPSecureCopyProtocol != nil {
+ return utils.MarshalJSON(u.SCPSecureCopyProtocol, "", true)
+ }
+
+ if u.SFTPSecureFileTransferProtocol != nil {
+ return utils.MarshalJSON(u.SFTPSecureFileTransferProtocol, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceFileUpdate struct {
+ // The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
+ DatasetName string `json:"dataset_name"`
+ // The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs).
+ Format *FileFormat `default:"csv" json:"format"`
+ // The storage Provider or Location of the file(s) which should be replicated.
+ Provider StorageProvider `json:"provider"`
+ // This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
+ ReaderOptions *string `json:"reader_options,omitempty"`
+ // The URL path to access the file which should be replicated.
+ URL string `json:"url"`
+}
+
+func (s SourceFileUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFileUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFileUpdate) GetDatasetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatasetName
+}
+
+func (o *SourceFileUpdate) GetFormat() *FileFormat {
+ if o == nil {
+ return nil
+ }
+ return o.Format
+}
+
+func (o *SourceFileUpdate) GetProvider() StorageProvider {
+ if o == nil {
+ return StorageProvider{}
+ }
+ return o.Provider
+}
+
+func (o *SourceFileUpdate) GetReaderOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReaderOptions
+}
+
+func (o *SourceFileUpdate) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefirebolt.go b/internal/sdk/pkg/models/shared/sourcefirebolt.go
old mode 100755
new mode 100644
index 915858968..4ad4e257c
--- a/internal/sdk/pkg/models/shared/sourcefirebolt.go
+++ b/internal/sdk/pkg/models/shared/sourcefirebolt.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceFireboltFirebolt string
@@ -42,7 +43,64 @@ type SourceFirebolt struct {
Host *string `json:"host,omitempty"`
// Firebolt password.
Password string `json:"password"`
- SourceType SourceFireboltFirebolt `json:"sourceType"`
+ sourceType SourceFireboltFirebolt `const:"firebolt" json:"sourceType"`
// Firebolt email address you use to login.
Username string `json:"username"`
}
+
+func (s SourceFirebolt) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFirebolt) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFirebolt) GetAccount() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Account
+}
+
+func (o *SourceFirebolt) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceFirebolt) GetEngine() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Engine
+}
+
+func (o *SourceFirebolt) GetHost() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Host
+}
+
+func (o *SourceFirebolt) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceFirebolt) GetSourceType() SourceFireboltFirebolt {
+ return SourceFireboltFireboltFirebolt
+}
+
+func (o *SourceFirebolt) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefireboltcreaterequest.go b/internal/sdk/pkg/models/shared/sourcefireboltcreaterequest.go
old mode 100755
new mode 100644
index 820f56912..0258bcee2
--- a/internal/sdk/pkg/models/shared/sourcefireboltcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefireboltcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceFireboltCreateRequest struct {
Configuration SourceFirebolt `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFireboltCreateRequest) GetConfiguration() SourceFirebolt {
+ if o == nil {
+ return SourceFirebolt{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFireboltCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceFireboltCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFireboltCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceFireboltCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefireboltputrequest.go b/internal/sdk/pkg/models/shared/sourcefireboltputrequest.go
old mode 100755
new mode 100644
index e3190d460..a8bcb310e
--- a/internal/sdk/pkg/models/shared/sourcefireboltputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefireboltputrequest.go
@@ -7,3 +7,24 @@ type SourceFireboltPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFireboltPutRequest) GetConfiguration() SourceFireboltUpdate {
+ if o == nil {
+ return SourceFireboltUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFireboltPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFireboltPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefireboltupdate.go b/internal/sdk/pkg/models/shared/sourcefireboltupdate.go
old mode 100755
new mode 100644
index b235f97f9..5d90e8c07
--- a/internal/sdk/pkg/models/shared/sourcefireboltupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcefireboltupdate.go
@@ -16,3 +16,45 @@ type SourceFireboltUpdate struct {
// Firebolt email address you use to login.
Username string `json:"username"`
}
+
+func (o *SourceFireboltUpdate) GetAccount() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Account
+}
+
+func (o *SourceFireboltUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceFireboltUpdate) GetEngine() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Engine
+}
+
+func (o *SourceFireboltUpdate) GetHost() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Host
+}
+
+func (o *SourceFireboltUpdate) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceFireboltUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshcaller.go b/internal/sdk/pkg/models/shared/sourcefreshcaller.go
old mode 100755
new mode 100644
index fb7dc5be4..d2153cc72
--- a/internal/sdk/pkg/models/shared/sourcefreshcaller.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshcaller.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceFreshcallerFreshcaller string
+type Freshcaller string
const (
- SourceFreshcallerFreshcallerFreshcaller SourceFreshcallerFreshcaller = "freshcaller"
+ FreshcallerFreshcaller Freshcaller = "freshcaller"
)
-func (e SourceFreshcallerFreshcaller) ToPointer() *SourceFreshcallerFreshcaller {
+func (e Freshcaller) ToPointer() *Freshcaller {
return &e
}
-func (e *SourceFreshcallerFreshcaller) UnmarshalJSON(data []byte) error {
+func (e *Freshcaller) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "freshcaller":
- *e = SourceFreshcallerFreshcaller(v)
+ *e = Freshcaller(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFreshcallerFreshcaller: %v", v)
+ return fmt.Errorf("invalid value for Freshcaller: %v", v)
}
}
@@ -38,10 +39,60 @@ type SourceFreshcaller struct {
// Used to construct Base URL for the Freshcaller APIs
Domain string `json:"domain"`
// The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.
- RequestsPerMinute *int64 `json:"requests_per_minute,omitempty"`
- SourceType SourceFreshcallerFreshcaller `json:"sourceType"`
+ RequestsPerMinute *int64 `json:"requests_per_minute,omitempty"`
+ sourceType Freshcaller `const:"freshcaller" json:"sourceType"`
// UTC date and time. Any data created after this date will be replicated.
- StartDate time.Time `json:"start_date"`
+ StartDate *time.Time `json:"start_date,omitempty"`
// Lag in minutes for each sync, i.e., at time T, data for the time range [prev_sync_time, T-30] will be fetched
SyncLagMinutes *int64 `json:"sync_lag_minutes,omitempty"`
}
+
+func (s SourceFreshcaller) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFreshcaller) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFreshcaller) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceFreshcaller) GetDomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Domain
+}
+
+func (o *SourceFreshcaller) GetRequestsPerMinute() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RequestsPerMinute
+}
+
+func (o *SourceFreshcaller) GetSourceType() Freshcaller {
+ return FreshcallerFreshcaller
+}
+
+func (o *SourceFreshcaller) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceFreshcaller) GetSyncLagMinutes() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SyncLagMinutes
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshcallercreaterequest.go b/internal/sdk/pkg/models/shared/sourcefreshcallercreaterequest.go
old mode 100755
new mode 100644
index d75de3bec..55c47db68
--- a/internal/sdk/pkg/models/shared/sourcefreshcallercreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshcallercreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceFreshcallerCreateRequest struct {
Configuration SourceFreshcaller `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFreshcallerCreateRequest) GetConfiguration() SourceFreshcaller {
+ if o == nil {
+ return SourceFreshcaller{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFreshcallerCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceFreshcallerCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFreshcallerCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceFreshcallerCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshcallerputrequest.go b/internal/sdk/pkg/models/shared/sourcefreshcallerputrequest.go
old mode 100755
new mode 100644
index 5dde81ca0..8f03f8c1d
--- a/internal/sdk/pkg/models/shared/sourcefreshcallerputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshcallerputrequest.go
@@ -7,3 +7,24 @@ type SourceFreshcallerPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFreshcallerPutRequest) GetConfiguration() SourceFreshcallerUpdate {
+ if o == nil {
+ return SourceFreshcallerUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFreshcallerPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFreshcallerPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshcallerupdate.go b/internal/sdk/pkg/models/shared/sourcefreshcallerupdate.go
old mode 100755
new mode 100644
index 0348cca15..6621f9834
--- a/internal/sdk/pkg/models/shared/sourcefreshcallerupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshcallerupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -14,7 +15,53 @@ type SourceFreshcallerUpdate struct {
// The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.
RequestsPerMinute *int64 `json:"requests_per_minute,omitempty"`
// UTC date and time. Any data created after this date will be replicated.
- StartDate time.Time `json:"start_date"`
+ StartDate *time.Time `json:"start_date,omitempty"`
// Lag in minutes for each sync, i.e., at time T, data for the time range [prev_sync_time, T-30] will be fetched
SyncLagMinutes *int64 `json:"sync_lag_minutes,omitempty"`
}
+
+func (s SourceFreshcallerUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFreshcallerUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFreshcallerUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceFreshcallerUpdate) GetDomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Domain
+}
+
+func (o *SourceFreshcallerUpdate) GetRequestsPerMinute() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RequestsPerMinute
+}
+
+func (o *SourceFreshcallerUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceFreshcallerUpdate) GetSyncLagMinutes() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SyncLagMinutes
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshdesk.go b/internal/sdk/pkg/models/shared/sourcefreshdesk.go
old mode 100755
new mode 100644
index f2bd23eb5..e305e8888
--- a/internal/sdk/pkg/models/shared/sourcefreshdesk.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshdesk.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceFreshdeskFreshdesk string
+type Freshdesk string
const (
- SourceFreshdeskFreshdeskFreshdesk SourceFreshdeskFreshdesk = "freshdesk"
+ FreshdeskFreshdesk Freshdesk = "freshdesk"
)
-func (e SourceFreshdeskFreshdesk) ToPointer() *SourceFreshdeskFreshdesk {
+func (e Freshdesk) ToPointer() *Freshdesk {
return &e
}
-func (e *SourceFreshdeskFreshdesk) UnmarshalJSON(data []byte) error {
+func (e *Freshdesk) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "freshdesk":
- *e = SourceFreshdeskFreshdesk(v)
+ *e = Freshdesk(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFreshdeskFreshdesk: %v", v)
+ return fmt.Errorf("invalid value for Freshdesk: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceFreshdesk struct {
// Freshdesk domain
Domain string `json:"domain"`
// The number of requests per minute that this source allowed to use. There is a rate limit of 50 requests per minute per app per account.
- RequestsPerMinute *int64 `json:"requests_per_minute,omitempty"`
- SourceType SourceFreshdeskFreshdesk `json:"sourceType"`
+ RequestsPerMinute *int64 `json:"requests_per_minute,omitempty"`
+ sourceType Freshdesk `const:"freshdesk" json:"sourceType"`
// UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceFreshdesk) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFreshdesk) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFreshdesk) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceFreshdesk) GetDomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Domain
+}
+
+func (o *SourceFreshdesk) GetRequestsPerMinute() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RequestsPerMinute
+}
+
+func (o *SourceFreshdesk) GetSourceType() Freshdesk {
+ return FreshdeskFreshdesk
+}
+
+func (o *SourceFreshdesk) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshdeskcreaterequest.go b/internal/sdk/pkg/models/shared/sourcefreshdeskcreaterequest.go
old mode 100755
new mode 100644
index c52f63d89..5576389e0
--- a/internal/sdk/pkg/models/shared/sourcefreshdeskcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshdeskcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceFreshdeskCreateRequest struct {
Configuration SourceFreshdesk `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFreshdeskCreateRequest) GetConfiguration() SourceFreshdesk {
+ if o == nil {
+ return SourceFreshdesk{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFreshdeskCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceFreshdeskCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFreshdeskCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceFreshdeskCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshdeskputrequest.go b/internal/sdk/pkg/models/shared/sourcefreshdeskputrequest.go
old mode 100755
new mode 100644
index 2e2a06652..449b87b7d
--- a/internal/sdk/pkg/models/shared/sourcefreshdeskputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshdeskputrequest.go
@@ -7,3 +7,24 @@ type SourceFreshdeskPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFreshdeskPutRequest) GetConfiguration() SourceFreshdeskUpdate {
+ if o == nil {
+ return SourceFreshdeskUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFreshdeskPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFreshdeskPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshdeskupdate.go b/internal/sdk/pkg/models/shared/sourcefreshdeskupdate.go
old mode 100755
new mode 100644
index 3093ea638..aa0be3c37
--- a/internal/sdk/pkg/models/shared/sourcefreshdeskupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshdeskupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -16,3 +17,42 @@ type SourceFreshdeskUpdate struct {
// UTC date and time. Any data created after this date will be replicated. If this parameter is not set, all data will be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceFreshdeskUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFreshdeskUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFreshdeskUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceFreshdeskUpdate) GetDomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Domain
+}
+
+func (o *SourceFreshdeskUpdate) GetRequestsPerMinute() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RequestsPerMinute
+}
+
+func (o *SourceFreshdeskUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshsales.go b/internal/sdk/pkg/models/shared/sourcefreshsales.go
old mode 100755
new mode 100644
index 7dc19e6dd..f7e728d0d
--- a/internal/sdk/pkg/models/shared/sourcefreshsales.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshsales.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceFreshsalesFreshsales string
+type Freshsales string
const (
- SourceFreshsalesFreshsalesFreshsales SourceFreshsalesFreshsales = "freshsales"
+ FreshsalesFreshsales Freshsales = "freshsales"
)
-func (e SourceFreshsalesFreshsales) ToPointer() *SourceFreshsalesFreshsales {
+func (e Freshsales) ToPointer() *Freshsales {
return &e
}
-func (e *SourceFreshsalesFreshsales) UnmarshalJSON(data []byte) error {
+func (e *Freshsales) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "freshsales":
- *e = SourceFreshsalesFreshsales(v)
+ *e = Freshsales(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceFreshsalesFreshsales: %v", v)
+ return fmt.Errorf("invalid value for Freshsales: %v", v)
}
}
@@ -35,6 +36,35 @@ type SourceFreshsales struct {
// Freshsales API Key. See here. The key is case sensitive.
APIKey string `json:"api_key"`
// The Name of your Freshsales domain
- DomainName string `json:"domain_name"`
- SourceType SourceFreshsalesFreshsales `json:"sourceType"`
+ DomainName string `json:"domain_name"`
+ sourceType Freshsales `const:"freshsales" json:"sourceType"`
+}
+
+func (s SourceFreshsales) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceFreshsales) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceFreshsales) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceFreshsales) GetDomainName() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomainName
+}
+
+func (o *SourceFreshsales) GetSourceType() Freshsales {
+ return FreshsalesFreshsales
}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshsalescreaterequest.go b/internal/sdk/pkg/models/shared/sourcefreshsalescreaterequest.go
old mode 100755
new mode 100644
index d1541cc01..e2625f3b9
--- a/internal/sdk/pkg/models/shared/sourcefreshsalescreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshsalescreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceFreshsalesCreateRequest struct {
Configuration SourceFreshsales `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFreshsalesCreateRequest) GetConfiguration() SourceFreshsales {
+ if o == nil {
+ return SourceFreshsales{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFreshsalesCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceFreshsalesCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFreshsalesCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceFreshsalesCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshsalesputrequest.go b/internal/sdk/pkg/models/shared/sourcefreshsalesputrequest.go
old mode 100755
new mode 100644
index 420fb7b2d..1f7102df1
--- a/internal/sdk/pkg/models/shared/sourcefreshsalesputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshsalesputrequest.go
@@ -7,3 +7,24 @@ type SourceFreshsalesPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceFreshsalesPutRequest) GetConfiguration() SourceFreshsalesUpdate {
+ if o == nil {
+ return SourceFreshsalesUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceFreshsalesPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceFreshsalesPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcefreshsalesupdate.go b/internal/sdk/pkg/models/shared/sourcefreshsalesupdate.go
old mode 100755
new mode 100644
index b2157624b..35a6c3387
--- a/internal/sdk/pkg/models/shared/sourcefreshsalesupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcefreshsalesupdate.go
@@ -8,3 +8,17 @@ type SourceFreshsalesUpdate struct {
// The Name of your Freshsales domain
DomainName string `json:"domain_name"`
}
+
+func (o *SourceFreshsalesUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceFreshsalesUpdate) GetDomainName() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomainName
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegainsightpx.go b/internal/sdk/pkg/models/shared/sourcegainsightpx.go
old mode 100755
new mode 100644
index 33fe990c5..7e2a98b05
--- a/internal/sdk/pkg/models/shared/sourcegainsightpx.go
+++ b/internal/sdk/pkg/models/shared/sourcegainsightpx.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGainsightPxGainsightPx string
+type GainsightPx string
const (
- SourceGainsightPxGainsightPxGainsightPx SourceGainsightPxGainsightPx = "gainsight-px"
+ GainsightPxGainsightPx GainsightPx = "gainsight-px"
)
-func (e SourceGainsightPxGainsightPx) ToPointer() *SourceGainsightPxGainsightPx {
+func (e GainsightPx) ToPointer() *GainsightPx {
return &e
}
-func (e *SourceGainsightPxGainsightPx) UnmarshalJSON(data []byte) error {
+func (e *GainsightPx) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "gainsight-px":
- *e = SourceGainsightPxGainsightPx(v)
+ *e = GainsightPx(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGainsightPxGainsightPx: %v", v)
+ return fmt.Errorf("invalid value for GainsightPx: %v", v)
}
}
type SourceGainsightPx struct {
// The Aptrinsic API Key which is recieved from the dashboard settings (ref - https://app.aptrinsic.com/settings/api-keys)
- APIKey string `json:"api_key"`
- SourceType SourceGainsightPxGainsightPx `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType GainsightPx `const:"gainsight-px" json:"sourceType"`
+}
+
+func (s SourceGainsightPx) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGainsightPx) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGainsightPx) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGainsightPx) GetSourceType() GainsightPx {
+ return GainsightPxGainsightPx
}
diff --git a/internal/sdk/pkg/models/shared/sourcegainsightpxcreaterequest.go b/internal/sdk/pkg/models/shared/sourcegainsightpxcreaterequest.go
old mode 100755
new mode 100644
index 06792190b..63d800243
--- a/internal/sdk/pkg/models/shared/sourcegainsightpxcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegainsightpxcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGainsightPxCreateRequest struct {
Configuration SourceGainsightPx `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGainsightPxCreateRequest) GetConfiguration() SourceGainsightPx {
+ if o == nil {
+ return SourceGainsightPx{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGainsightPxCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGainsightPxCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGainsightPxCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGainsightPxCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegainsightpxputrequest.go b/internal/sdk/pkg/models/shared/sourcegainsightpxputrequest.go
old mode 100755
new mode 100644
index f6a5d3ae6..90538ef3e
--- a/internal/sdk/pkg/models/shared/sourcegainsightpxputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegainsightpxputrequest.go
@@ -7,3 +7,24 @@ type SourceGainsightPxPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGainsightPxPutRequest) GetConfiguration() SourceGainsightPxUpdate {
+ if o == nil {
+ return SourceGainsightPxUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGainsightPxPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGainsightPxPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegainsightpxupdate.go b/internal/sdk/pkg/models/shared/sourcegainsightpxupdate.go
old mode 100755
new mode 100644
index 07ab98b25..0b6f98091
--- a/internal/sdk/pkg/models/shared/sourcegainsightpxupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegainsightpxupdate.go
@@ -6,3 +6,10 @@ type SourceGainsightPxUpdate struct {
// The Aptrinsic API Key which is recieved from the dashboard settings (ref - https://app.aptrinsic.com/settings/api-keys)
APIKey string `json:"api_key"`
}
+
+func (o *SourceGainsightPxUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegcs.go b/internal/sdk/pkg/models/shared/sourcegcs.go
old mode 100755
new mode 100644
index a71a3d77f..804feccc0
--- a/internal/sdk/pkg/models/shared/sourcegcs.go
+++ b/internal/sdk/pkg/models/shared/sourcegcs.go
@@ -4,7 +4,10 @@ package shared
import (
"encoding/json"
+ "errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
)
type SourceGcsGcs string
@@ -31,12 +34,642 @@ func (e *SourceGcsGcs) UnmarshalJSON(data []byte) error {
}
}
+type SourceGcsFiletype string
+
+const (
+ SourceGcsFiletypeCsv SourceGcsFiletype = "csv"
+)
+
+func (e SourceGcsFiletype) ToPointer() *SourceGcsFiletype {
+ return &e
+}
+
+func (e *SourceGcsFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "csv":
+ *e = SourceGcsFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsFiletype: %v", v)
+ }
+}
+
+type SourceGcsSchemasStreamsHeaderDefinitionType string
+
+const (
+ SourceGcsSchemasStreamsHeaderDefinitionTypeUserProvided SourceGcsSchemasStreamsHeaderDefinitionType = "User Provided"
+)
+
+func (e SourceGcsSchemasStreamsHeaderDefinitionType) ToPointer() *SourceGcsSchemasStreamsHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGcsSchemasStreamsHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "User Provided":
+ *e = SourceGcsSchemasStreamsHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsSchemasStreamsHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGcsUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGcsUserProvided struct {
+ // The column names that will be used while emitting the CSV records
+ ColumnNames []string `json:"column_names"`
+ headerDefinitionType *SourceGcsSchemasStreamsHeaderDefinitionType `const:"User Provided" json:"header_definition_type"`
+}
+
+func (s SourceGcsUserProvided) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcsUserProvided) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcsUserProvided) GetColumnNames() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ColumnNames
+}
+
+func (o *SourceGcsUserProvided) GetHeaderDefinitionType() *SourceGcsSchemasStreamsHeaderDefinitionType {
+ return SourceGcsSchemasStreamsHeaderDefinitionTypeUserProvided.ToPointer()
+}
+
+type SourceGcsSchemasHeaderDefinitionType string
+
+const (
+ SourceGcsSchemasHeaderDefinitionTypeAutogenerated SourceGcsSchemasHeaderDefinitionType = "Autogenerated"
+)
+
+func (e SourceGcsSchemasHeaderDefinitionType) ToPointer() *SourceGcsSchemasHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGcsSchemasHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Autogenerated":
+ *e = SourceGcsSchemasHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsSchemasHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGcsAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGcsAutogenerated struct {
+ headerDefinitionType *SourceGcsSchemasHeaderDefinitionType `const:"Autogenerated" json:"header_definition_type"`
+}
+
+func (s SourceGcsAutogenerated) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcsAutogenerated) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcsAutogenerated) GetHeaderDefinitionType() *SourceGcsSchemasHeaderDefinitionType {
+ return SourceGcsSchemasHeaderDefinitionTypeAutogenerated.ToPointer()
+}
+
+type SourceGcsHeaderDefinitionType string
+
+const (
+ SourceGcsHeaderDefinitionTypeFromCsv SourceGcsHeaderDefinitionType = "From CSV"
+)
+
+func (e SourceGcsHeaderDefinitionType) ToPointer() *SourceGcsHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGcsHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "From CSV":
+ *e = SourceGcsHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGcsFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGcsFromCSV struct {
+ headerDefinitionType *SourceGcsHeaderDefinitionType `const:"From CSV" json:"header_definition_type"`
+}
+
+func (s SourceGcsFromCSV) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcsFromCSV) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcsFromCSV) GetHeaderDefinitionType() *SourceGcsHeaderDefinitionType {
+ return SourceGcsHeaderDefinitionTypeFromCsv.ToPointer()
+}
+
+type SourceGcsCSVHeaderDefinitionType string
+
+const (
+ SourceGcsCSVHeaderDefinitionTypeSourceGcsFromCSV SourceGcsCSVHeaderDefinitionType = "source-gcs_From CSV"
+ SourceGcsCSVHeaderDefinitionTypeSourceGcsAutogenerated SourceGcsCSVHeaderDefinitionType = "source-gcs_Autogenerated"
+ SourceGcsCSVHeaderDefinitionTypeSourceGcsUserProvided SourceGcsCSVHeaderDefinitionType = "source-gcs_User Provided"
+)
+
+type SourceGcsCSVHeaderDefinition struct {
+ SourceGcsFromCSV *SourceGcsFromCSV
+ SourceGcsAutogenerated *SourceGcsAutogenerated
+ SourceGcsUserProvided *SourceGcsUserProvided
+
+ Type SourceGcsCSVHeaderDefinitionType
+}
+
+func CreateSourceGcsCSVHeaderDefinitionSourceGcsFromCSV(sourceGcsFromCSV SourceGcsFromCSV) SourceGcsCSVHeaderDefinition {
+ typ := SourceGcsCSVHeaderDefinitionTypeSourceGcsFromCSV
+
+ return SourceGcsCSVHeaderDefinition{
+ SourceGcsFromCSV: &sourceGcsFromCSV,
+ Type: typ,
+ }
+}
+
+func CreateSourceGcsCSVHeaderDefinitionSourceGcsAutogenerated(sourceGcsAutogenerated SourceGcsAutogenerated) SourceGcsCSVHeaderDefinition {
+ typ := SourceGcsCSVHeaderDefinitionTypeSourceGcsAutogenerated
+
+ return SourceGcsCSVHeaderDefinition{
+ SourceGcsAutogenerated: &sourceGcsAutogenerated,
+ Type: typ,
+ }
+}
+
+func CreateSourceGcsCSVHeaderDefinitionSourceGcsUserProvided(sourceGcsUserProvided SourceGcsUserProvided) SourceGcsCSVHeaderDefinition {
+ typ := SourceGcsCSVHeaderDefinitionTypeSourceGcsUserProvided
+
+ return SourceGcsCSVHeaderDefinition{
+ SourceGcsUserProvided: &sourceGcsUserProvided,
+ Type: typ,
+ }
+}
+
+func (u *SourceGcsCSVHeaderDefinition) UnmarshalJSON(data []byte) error {
+
+ sourceGcsFromCSV := new(SourceGcsFromCSV)
+ if err := utils.UnmarshalJSON(data, &sourceGcsFromCSV, "", true, true); err == nil {
+ u.SourceGcsFromCSV = sourceGcsFromCSV
+ u.Type = SourceGcsCSVHeaderDefinitionTypeSourceGcsFromCSV
+ return nil
+ }
+
+ sourceGcsAutogenerated := new(SourceGcsAutogenerated)
+ if err := utils.UnmarshalJSON(data, &sourceGcsAutogenerated, "", true, true); err == nil {
+ u.SourceGcsAutogenerated = sourceGcsAutogenerated
+ u.Type = SourceGcsCSVHeaderDefinitionTypeSourceGcsAutogenerated
+ return nil
+ }
+
+ sourceGcsUserProvided := new(SourceGcsUserProvided)
+ if err := utils.UnmarshalJSON(data, &sourceGcsUserProvided, "", true, true); err == nil {
+ u.SourceGcsUserProvided = sourceGcsUserProvided
+ u.Type = SourceGcsCSVHeaderDefinitionTypeSourceGcsUserProvided
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGcsCSVHeaderDefinition) MarshalJSON() ([]byte, error) {
+ if u.SourceGcsFromCSV != nil {
+ return utils.MarshalJSON(u.SourceGcsFromCSV, "", true)
+ }
+
+ if u.SourceGcsAutogenerated != nil {
+ return utils.MarshalJSON(u.SourceGcsAutogenerated, "", true)
+ }
+
+ if u.SourceGcsUserProvided != nil {
+ return utils.MarshalJSON(u.SourceGcsUserProvided, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourceGcsInferenceType - How to infer the types of the columns. If none, inference default to strings.
+type SourceGcsInferenceType string
+
+const (
+ SourceGcsInferenceTypeNone SourceGcsInferenceType = "None"
+ SourceGcsInferenceTypePrimitiveTypesOnly SourceGcsInferenceType = "Primitive Types Only"
+)
+
+func (e SourceGcsInferenceType) ToPointer() *SourceGcsInferenceType {
+ return &e
+}
+
+func (e *SourceGcsInferenceType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "None":
+ fallthrough
+ case "Primitive Types Only":
+ *e = SourceGcsInferenceType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsInferenceType: %v", v)
+ }
+}
+
+// SourceGcsCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGcsCSVFormat struct {
+ // The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+ Delimiter *string `default:"," json:"delimiter"`
+ // Whether two quotes in a quoted CSV value denote a single quote in the data.
+ DoubleQuote *bool `default:"true" json:"double_quote"`
+ // The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+ Encoding *string `default:"utf8" json:"encoding"`
+ // The character used for escaping special characters. To disallow escaping, leave this field blank.
+ EscapeChar *string `json:"escape_char,omitempty"`
+ // A set of case-sensitive strings that should be interpreted as false values.
+ FalseValues []string `json:"false_values,omitempty"`
+ filetype *SourceGcsFiletype `const:"csv" json:"filetype"`
+ // How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+ HeaderDefinition *SourceGcsCSVHeaderDefinition `json:"header_definition,omitempty"`
+ // How to infer the types of the columns. If none, inference default to strings.
+ InferenceType *SourceGcsInferenceType `default:"None" json:"inference_type"`
+ // A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
+ NullValues []string `json:"null_values,omitempty"`
+ // The character used for quoting CSV values. To disallow quoting, make this field blank.
+ QuoteChar *string `default:""" json:"quote_char"`
+ // The number of rows to skip after the header row.
+ SkipRowsAfterHeader *int64 `default:"0" json:"skip_rows_after_header"`
+ // The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+ SkipRowsBeforeHeader *int64 `default:"0" json:"skip_rows_before_header"`
+ // Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+ StringsCanBeNull *bool `default:"true" json:"strings_can_be_null"`
+ // A set of case-sensitive strings that should be interpreted as true values.
+ TrueValues []string `json:"true_values,omitempty"`
+}
+
+func (s SourceGcsCSVFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcsCSVFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcsCSVFormat) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
+}
+
+func (o *SourceGcsCSVFormat) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *SourceGcsCSVFormat) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *SourceGcsCSVFormat) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *SourceGcsCSVFormat) GetFalseValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FalseValues
+}
+
+func (o *SourceGcsCSVFormat) GetFiletype() *SourceGcsFiletype {
+ return SourceGcsFiletypeCsv.ToPointer()
+}
+
+func (o *SourceGcsCSVFormat) GetHeaderDefinition() *SourceGcsCSVHeaderDefinition {
+ if o == nil {
+ return nil
+ }
+ return o.HeaderDefinition
+}
+
+func (o *SourceGcsCSVFormat) GetInferenceType() *SourceGcsInferenceType {
+ if o == nil {
+ return nil
+ }
+ return o.InferenceType
+}
+
+func (o *SourceGcsCSVFormat) GetNullValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NullValues
+}
+
+func (o *SourceGcsCSVFormat) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
+}
+
+func (o *SourceGcsCSVFormat) GetSkipRowsAfterHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsAfterHeader
+}
+
+func (o *SourceGcsCSVFormat) GetSkipRowsBeforeHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsBeforeHeader
+}
+
+func (o *SourceGcsCSVFormat) GetStringsCanBeNull() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.StringsCanBeNull
+}
+
+func (o *SourceGcsCSVFormat) GetTrueValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TrueValues
+}
+
+type SourceGcsFormatType string
+
+const (
+ SourceGcsFormatTypeSourceGcsCSVFormat SourceGcsFormatType = "source-gcs_CSV Format"
+)
+
+type SourceGcsFormat struct {
+ SourceGcsCSVFormat *SourceGcsCSVFormat
+
+ Type SourceGcsFormatType
+}
+
+func CreateSourceGcsFormatSourceGcsCSVFormat(sourceGcsCSVFormat SourceGcsCSVFormat) SourceGcsFormat {
+ typ := SourceGcsFormatTypeSourceGcsCSVFormat
+
+ return SourceGcsFormat{
+ SourceGcsCSVFormat: &sourceGcsCSVFormat,
+ Type: typ,
+ }
+}
+
+func (u *SourceGcsFormat) UnmarshalJSON(data []byte) error {
+
+ sourceGcsCSVFormat := new(SourceGcsCSVFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGcsCSVFormat, "", true, true); err == nil {
+ u.SourceGcsCSVFormat = sourceGcsCSVFormat
+ u.Type = SourceGcsFormatTypeSourceGcsCSVFormat
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGcsFormat) MarshalJSON() ([]byte, error) {
+ if u.SourceGcsCSVFormat != nil {
+ return utils.MarshalJSON(u.SourceGcsCSVFormat, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourceGcsValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+type SourceGcsValidationPolicy string
+
+const (
+ SourceGcsValidationPolicyEmitRecord SourceGcsValidationPolicy = "Emit Record"
+ SourceGcsValidationPolicySkipRecord SourceGcsValidationPolicy = "Skip Record"
+ SourceGcsValidationPolicyWaitForDiscover SourceGcsValidationPolicy = "Wait for Discover"
+)
+
+func (e SourceGcsValidationPolicy) ToPointer() *SourceGcsValidationPolicy {
+ return &e
+}
+
+func (e *SourceGcsValidationPolicy) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Emit Record":
+ fallthrough
+ case "Skip Record":
+ fallthrough
+ case "Wait for Discover":
+ *e = SourceGcsValidationPolicy(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsValidationPolicy: %v", v)
+ }
+}
+
+type SourceGCSSourceGCSStreamConfig struct {
+ // When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
+ DaysToSyncIfHistoryIsFull *int64 `default:"3" json:"days_to_sync_if_history_is_full"`
+ // The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+ Format SourceGcsFormat `json:"format"`
+ // The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
+ Globs []string `json:"globs,omitempty"`
+ // The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
+ InputSchema *string `json:"input_schema,omitempty"`
+ // The path prefix configured in previous versions of the GCS connector. This option is deprecated in favor of a single glob.
+ LegacyPrefix *string `json:"legacy_prefix,omitempty"`
+ // The name of the stream.
+ Name string `json:"name"`
+ // The column or columns (for a composite key) that serves as the unique identifier of a record.
+ PrimaryKey *string `json:"primary_key,omitempty"`
+ // When enabled, syncs will not validate or structure records against the stream's schema.
+ Schemaless *bool `default:"false" json:"schemaless"`
+ // The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+ ValidationPolicy *SourceGcsValidationPolicy `default:"Emit Record" json:"validation_policy"`
+}
+
+func (s SourceGCSSourceGCSStreamConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGCSSourceGCSStreamConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGCSSourceGCSStreamConfig) GetDaysToSyncIfHistoryIsFull() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DaysToSyncIfHistoryIsFull
+}
+
+func (o *SourceGCSSourceGCSStreamConfig) GetFormat() SourceGcsFormat {
+ if o == nil {
+ return SourceGcsFormat{}
+ }
+ return o.Format
+}
+
+func (o *SourceGCSSourceGCSStreamConfig) GetGlobs() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Globs
+}
+
+func (o *SourceGCSSourceGCSStreamConfig) GetInputSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.InputSchema
+}
+
+func (o *SourceGCSSourceGCSStreamConfig) GetLegacyPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LegacyPrefix
+}
+
+func (o *SourceGCSSourceGCSStreamConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGCSSourceGCSStreamConfig) GetPrimaryKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrimaryKey
+}
+
+func (o *SourceGCSSourceGCSStreamConfig) GetSchemaless() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Schemaless
+}
+
+func (o *SourceGCSSourceGCSStreamConfig) GetValidationPolicy() *SourceGcsValidationPolicy {
+ if o == nil {
+ return nil
+ }
+ return o.ValidationPolicy
+}
+
+// SourceGcs - NOTE: When this Spec is changed, legacy_config_transformer.py must also be
+// modified to uptake the changes because it is responsible for converting
+// legacy GCS configs into file based configs using the File-Based CDK.
type SourceGcs struct {
- // GCS bucket name
- GcsBucket string `json:"gcs_bucket"`
- // GCS path to data
- GcsPath string `json:"gcs_path"`
+ // Name of the GCS bucket where the file(s) exist.
+ Bucket string `json:"bucket"`
// Enter your Google Cloud service account key in JSON format
ServiceAccount string `json:"service_account"`
- SourceType SourceGcsGcs `json:"sourceType"`
+ sourceType SourceGcsGcs `const:"gcs" json:"sourceType"`
+ // UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+ // Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
+ Streams []SourceGCSSourceGCSStreamConfig `json:"streams"`
+}
+
+func (s SourceGcs) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcs) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcs) GetBucket() string {
+ if o == nil {
+ return ""
+ }
+ return o.Bucket
+}
+
+func (o *SourceGcs) GetServiceAccount() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceAccount
+}
+
+func (o *SourceGcs) GetSourceType() SourceGcsGcs {
+ return SourceGcsGcsGcs
+}
+
+func (o *SourceGcs) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceGcs) GetStreams() []SourceGCSSourceGCSStreamConfig {
+ if o == nil {
+ return []SourceGCSSourceGCSStreamConfig{}
+ }
+ return o.Streams
}
diff --git a/internal/sdk/pkg/models/shared/sourcegcscreaterequest.go b/internal/sdk/pkg/models/shared/sourcegcscreaterequest.go
old mode 100755
new mode 100644
index 121f607c5..b7f3b9e29
--- a/internal/sdk/pkg/models/shared/sourcegcscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegcscreaterequest.go
@@ -3,9 +3,50 @@
package shared
type SourceGcsCreateRequest struct {
+ // NOTE: When this Spec is changed, legacy_config_transformer.py must also be
+ // modified to uptake the changes because it is responsible for converting
+ // legacy GCS configs into file based configs using the File-Based CDK.
Configuration SourceGcs `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGcsCreateRequest) GetConfiguration() SourceGcs {
+ if o == nil {
+ return SourceGcs{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGcsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGcsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGcsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGcsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegcsputrequest.go b/internal/sdk/pkg/models/shared/sourcegcsputrequest.go
old mode 100755
new mode 100644
index 7cf65563b..901fbbe82
--- a/internal/sdk/pkg/models/shared/sourcegcsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegcsputrequest.go
@@ -3,7 +3,31 @@
package shared
type SourceGcsPutRequest struct {
+ // NOTE: When this Spec is changed, legacy_config_transformer.py must also be
+ // modified to uptake the changes because it is responsible for converting
+ // legacy GCS configs into file based configs using the File-Based CDK.
Configuration SourceGcsUpdate `json:"configuration"`
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGcsPutRequest) GetConfiguration() SourceGcsUpdate {
+ if o == nil {
+ return SourceGcsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGcsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGcsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegcsupdate.go b/internal/sdk/pkg/models/shared/sourcegcsupdate.go
old mode 100755
new mode 100644
index fa05e9170..9fb713369
--- a/internal/sdk/pkg/models/shared/sourcegcsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegcsupdate.go
@@ -2,11 +2,645 @@
package shared
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
+)
+
+type SourceGcsUpdateFiletype string
+
+const (
+ SourceGcsUpdateFiletypeCsv SourceGcsUpdateFiletype = "csv"
+)
+
+func (e SourceGcsUpdateFiletype) ToPointer() *SourceGcsUpdateFiletype {
+ return &e
+}
+
+func (e *SourceGcsUpdateFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "csv":
+ *e = SourceGcsUpdateFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsUpdateFiletype: %v", v)
+ }
+}
+
+type SourceGcsUpdateSchemasStreamsHeaderDefinitionType string
+
+const (
+ SourceGcsUpdateSchemasStreamsHeaderDefinitionTypeUserProvided SourceGcsUpdateSchemasStreamsHeaderDefinitionType = "User Provided"
+)
+
+func (e SourceGcsUpdateSchemasStreamsHeaderDefinitionType) ToPointer() *SourceGcsUpdateSchemasStreamsHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGcsUpdateSchemasStreamsHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "User Provided":
+ *e = SourceGcsUpdateSchemasStreamsHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsUpdateSchemasStreamsHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGcsUpdateUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGcsUpdateUserProvided struct {
+ // The column names that will be used while emitting the CSV records
+ ColumnNames []string `json:"column_names"`
+ headerDefinitionType *SourceGcsUpdateSchemasStreamsHeaderDefinitionType `const:"User Provided" json:"header_definition_type"`
+}
+
+func (s SourceGcsUpdateUserProvided) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcsUpdateUserProvided) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcsUpdateUserProvided) GetColumnNames() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ColumnNames
+}
+
+func (o *SourceGcsUpdateUserProvided) GetHeaderDefinitionType() *SourceGcsUpdateSchemasStreamsHeaderDefinitionType {
+ return SourceGcsUpdateSchemasStreamsHeaderDefinitionTypeUserProvided.ToPointer()
+}
+
+type SourceGcsUpdateSchemasHeaderDefinitionType string
+
+const (
+ SourceGcsUpdateSchemasHeaderDefinitionTypeAutogenerated SourceGcsUpdateSchemasHeaderDefinitionType = "Autogenerated"
+)
+
+func (e SourceGcsUpdateSchemasHeaderDefinitionType) ToPointer() *SourceGcsUpdateSchemasHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGcsUpdateSchemasHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Autogenerated":
+ *e = SourceGcsUpdateSchemasHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsUpdateSchemasHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGcsUpdateAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGcsUpdateAutogenerated struct {
+ headerDefinitionType *SourceGcsUpdateSchemasHeaderDefinitionType `const:"Autogenerated" json:"header_definition_type"`
+}
+
+func (s SourceGcsUpdateAutogenerated) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcsUpdateAutogenerated) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcsUpdateAutogenerated) GetHeaderDefinitionType() *SourceGcsUpdateSchemasHeaderDefinitionType {
+ return SourceGcsUpdateSchemasHeaderDefinitionTypeAutogenerated.ToPointer()
+}
+
+type SourceGcsUpdateHeaderDefinitionType string
+
+const (
+ SourceGcsUpdateHeaderDefinitionTypeFromCsv SourceGcsUpdateHeaderDefinitionType = "From CSV"
+)
+
+func (e SourceGcsUpdateHeaderDefinitionType) ToPointer() *SourceGcsUpdateHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGcsUpdateHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "From CSV":
+ *e = SourceGcsUpdateHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsUpdateHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGcsUpdateFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGcsUpdateFromCSV struct {
+ headerDefinitionType *SourceGcsUpdateHeaderDefinitionType `const:"From CSV" json:"header_definition_type"`
+}
+
+func (s SourceGcsUpdateFromCSV) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcsUpdateFromCSV) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcsUpdateFromCSV) GetHeaderDefinitionType() *SourceGcsUpdateHeaderDefinitionType {
+ return SourceGcsUpdateHeaderDefinitionTypeFromCsv.ToPointer()
+}
+
+type SourceGcsUpdateCSVHeaderDefinitionType string
+
+const (
+ SourceGcsUpdateCSVHeaderDefinitionTypeSourceGcsUpdateFromCSV SourceGcsUpdateCSVHeaderDefinitionType = "source-gcs-update_From CSV"
+ SourceGcsUpdateCSVHeaderDefinitionTypeSourceGcsUpdateAutogenerated SourceGcsUpdateCSVHeaderDefinitionType = "source-gcs-update_Autogenerated"
+ SourceGcsUpdateCSVHeaderDefinitionTypeSourceGcsUpdateUserProvided SourceGcsUpdateCSVHeaderDefinitionType = "source-gcs-update_User Provided"
+)
+
+type SourceGcsUpdateCSVHeaderDefinition struct {
+ SourceGcsUpdateFromCSV *SourceGcsUpdateFromCSV
+ SourceGcsUpdateAutogenerated *SourceGcsUpdateAutogenerated
+ SourceGcsUpdateUserProvided *SourceGcsUpdateUserProvided
+
+ Type SourceGcsUpdateCSVHeaderDefinitionType
+}
+
+func CreateSourceGcsUpdateCSVHeaderDefinitionSourceGcsUpdateFromCSV(sourceGcsUpdateFromCSV SourceGcsUpdateFromCSV) SourceGcsUpdateCSVHeaderDefinition {
+ typ := SourceGcsUpdateCSVHeaderDefinitionTypeSourceGcsUpdateFromCSV
+
+ return SourceGcsUpdateCSVHeaderDefinition{
+ SourceGcsUpdateFromCSV: &sourceGcsUpdateFromCSV,
+ Type: typ,
+ }
+}
+
+func CreateSourceGcsUpdateCSVHeaderDefinitionSourceGcsUpdateAutogenerated(sourceGcsUpdateAutogenerated SourceGcsUpdateAutogenerated) SourceGcsUpdateCSVHeaderDefinition {
+ typ := SourceGcsUpdateCSVHeaderDefinitionTypeSourceGcsUpdateAutogenerated
+
+ return SourceGcsUpdateCSVHeaderDefinition{
+ SourceGcsUpdateAutogenerated: &sourceGcsUpdateAutogenerated,
+ Type: typ,
+ }
+}
+
+func CreateSourceGcsUpdateCSVHeaderDefinitionSourceGcsUpdateUserProvided(sourceGcsUpdateUserProvided SourceGcsUpdateUserProvided) SourceGcsUpdateCSVHeaderDefinition {
+ typ := SourceGcsUpdateCSVHeaderDefinitionTypeSourceGcsUpdateUserProvided
+
+ return SourceGcsUpdateCSVHeaderDefinition{
+ SourceGcsUpdateUserProvided: &sourceGcsUpdateUserProvided,
+ Type: typ,
+ }
+}
+
+func (u *SourceGcsUpdateCSVHeaderDefinition) UnmarshalJSON(data []byte) error {
+
+ sourceGcsUpdateFromCSV := new(SourceGcsUpdateFromCSV)
+ if err := utils.UnmarshalJSON(data, &sourceGcsUpdateFromCSV, "", true, true); err == nil {
+ u.SourceGcsUpdateFromCSV = sourceGcsUpdateFromCSV
+ u.Type = SourceGcsUpdateCSVHeaderDefinitionTypeSourceGcsUpdateFromCSV
+ return nil
+ }
+
+ sourceGcsUpdateAutogenerated := new(SourceGcsUpdateAutogenerated)
+ if err := utils.UnmarshalJSON(data, &sourceGcsUpdateAutogenerated, "", true, true); err == nil {
+ u.SourceGcsUpdateAutogenerated = sourceGcsUpdateAutogenerated
+ u.Type = SourceGcsUpdateCSVHeaderDefinitionTypeSourceGcsUpdateAutogenerated
+ return nil
+ }
+
+ sourceGcsUpdateUserProvided := new(SourceGcsUpdateUserProvided)
+ if err := utils.UnmarshalJSON(data, &sourceGcsUpdateUserProvided, "", true, true); err == nil {
+ u.SourceGcsUpdateUserProvided = sourceGcsUpdateUserProvided
+ u.Type = SourceGcsUpdateCSVHeaderDefinitionTypeSourceGcsUpdateUserProvided
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGcsUpdateCSVHeaderDefinition) MarshalJSON() ([]byte, error) {
+ if u.SourceGcsUpdateFromCSV != nil {
+ return utils.MarshalJSON(u.SourceGcsUpdateFromCSV, "", true)
+ }
+
+ if u.SourceGcsUpdateAutogenerated != nil {
+ return utils.MarshalJSON(u.SourceGcsUpdateAutogenerated, "", true)
+ }
+
+ if u.SourceGcsUpdateUserProvided != nil {
+ return utils.MarshalJSON(u.SourceGcsUpdateUserProvided, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourceGcsUpdateInferenceType - How to infer the types of the columns. If none, inference default to strings.
+type SourceGcsUpdateInferenceType string
+
+const (
+ SourceGcsUpdateInferenceTypeNone SourceGcsUpdateInferenceType = "None"
+ SourceGcsUpdateInferenceTypePrimitiveTypesOnly SourceGcsUpdateInferenceType = "Primitive Types Only"
+)
+
+func (e SourceGcsUpdateInferenceType) ToPointer() *SourceGcsUpdateInferenceType {
+ return &e
+}
+
+func (e *SourceGcsUpdateInferenceType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "None":
+ fallthrough
+ case "Primitive Types Only":
+ *e = SourceGcsUpdateInferenceType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsUpdateInferenceType: %v", v)
+ }
+}
+
+// SourceGcsUpdateCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGcsUpdateCSVFormat struct {
+ // The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+ Delimiter *string `default:"," json:"delimiter"`
+ // Whether two quotes in a quoted CSV value denote a single quote in the data.
+ DoubleQuote *bool `default:"true" json:"double_quote"`
+ // The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+ Encoding *string `default:"utf8" json:"encoding"`
+ // The character used for escaping special characters. To disallow escaping, leave this field blank.
+ EscapeChar *string `json:"escape_char,omitempty"`
+ // A set of case-sensitive strings that should be interpreted as false values.
+ FalseValues []string `json:"false_values,omitempty"`
+ filetype *SourceGcsUpdateFiletype `const:"csv" json:"filetype"`
+ // How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+ HeaderDefinition *SourceGcsUpdateCSVHeaderDefinition `json:"header_definition,omitempty"`
+ // How to infer the types of the columns. If none, inference default to strings.
+ InferenceType *SourceGcsUpdateInferenceType `default:"None" json:"inference_type"`
+ // A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
+ NullValues []string `json:"null_values,omitempty"`
+ // The character used for quoting CSV values. To disallow quoting, make this field blank.
+ QuoteChar *string `default:""" json:"quote_char"`
+ // The number of rows to skip after the header row.
+ SkipRowsAfterHeader *int64 `default:"0" json:"skip_rows_after_header"`
+ // The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+ SkipRowsBeforeHeader *int64 `default:"0" json:"skip_rows_before_header"`
+ // Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+ StringsCanBeNull *bool `default:"true" json:"strings_can_be_null"`
+ // A set of case-sensitive strings that should be interpreted as true values.
+ TrueValues []string `json:"true_values,omitempty"`
+}
+
+func (s SourceGcsUpdateCSVFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcsUpdateCSVFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetFalseValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FalseValues
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetFiletype() *SourceGcsUpdateFiletype {
+ return SourceGcsUpdateFiletypeCsv.ToPointer()
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetHeaderDefinition() *SourceGcsUpdateCSVHeaderDefinition {
+ if o == nil {
+ return nil
+ }
+ return o.HeaderDefinition
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetInferenceType() *SourceGcsUpdateInferenceType {
+ if o == nil {
+ return nil
+ }
+ return o.InferenceType
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetNullValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NullValues
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetSkipRowsAfterHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsAfterHeader
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetSkipRowsBeforeHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsBeforeHeader
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetStringsCanBeNull() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.StringsCanBeNull
+}
+
+func (o *SourceGcsUpdateCSVFormat) GetTrueValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TrueValues
+}
+
+type SourceGcsUpdateFormatType string
+
+const (
+ SourceGcsUpdateFormatTypeSourceGcsUpdateCSVFormat SourceGcsUpdateFormatType = "source-gcs-update_CSV Format"
+)
+
+type SourceGcsUpdateFormat struct {
+ SourceGcsUpdateCSVFormat *SourceGcsUpdateCSVFormat
+
+ Type SourceGcsUpdateFormatType
+}
+
+func CreateSourceGcsUpdateFormatSourceGcsUpdateCSVFormat(sourceGcsUpdateCSVFormat SourceGcsUpdateCSVFormat) SourceGcsUpdateFormat {
+ typ := SourceGcsUpdateFormatTypeSourceGcsUpdateCSVFormat
+
+ return SourceGcsUpdateFormat{
+ SourceGcsUpdateCSVFormat: &sourceGcsUpdateCSVFormat,
+ Type: typ,
+ }
+}
+
+func (u *SourceGcsUpdateFormat) UnmarshalJSON(data []byte) error {
+
+ sourceGcsUpdateCSVFormat := new(SourceGcsUpdateCSVFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGcsUpdateCSVFormat, "", true, true); err == nil {
+ u.SourceGcsUpdateCSVFormat = sourceGcsUpdateCSVFormat
+ u.Type = SourceGcsUpdateFormatTypeSourceGcsUpdateCSVFormat
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGcsUpdateFormat) MarshalJSON() ([]byte, error) {
+ if u.SourceGcsUpdateCSVFormat != nil {
+ return utils.MarshalJSON(u.SourceGcsUpdateCSVFormat, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourceGcsUpdateValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+type SourceGcsUpdateValidationPolicy string
+
+const (
+ SourceGcsUpdateValidationPolicyEmitRecord SourceGcsUpdateValidationPolicy = "Emit Record"
+ SourceGcsUpdateValidationPolicySkipRecord SourceGcsUpdateValidationPolicy = "Skip Record"
+ SourceGcsUpdateValidationPolicyWaitForDiscover SourceGcsUpdateValidationPolicy = "Wait for Discover"
+)
+
+func (e SourceGcsUpdateValidationPolicy) ToPointer() *SourceGcsUpdateValidationPolicy {
+ return &e
+}
+
+func (e *SourceGcsUpdateValidationPolicy) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Emit Record":
+ fallthrough
+ case "Skip Record":
+ fallthrough
+ case "Wait for Discover":
+ *e = SourceGcsUpdateValidationPolicy(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGcsUpdateValidationPolicy: %v", v)
+ }
+}
+
+type SourceGCSStreamConfig struct {
+ // When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
+ DaysToSyncIfHistoryIsFull *int64 `default:"3" json:"days_to_sync_if_history_is_full"`
+ // The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+ Format SourceGcsUpdateFormat `json:"format"`
+ // The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
+ Globs []string `json:"globs,omitempty"`
+ // The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
+ InputSchema *string `json:"input_schema,omitempty"`
+ // The path prefix configured in previous versions of the GCS connector. This option is deprecated in favor of a single glob.
+ LegacyPrefix *string `json:"legacy_prefix,omitempty"`
+ // The name of the stream.
+ Name string `json:"name"`
+ // The column or columns (for a composite key) that serves as the unique identifier of a record.
+ PrimaryKey *string `json:"primary_key,omitempty"`
+ // When enabled, syncs will not validate or structure records against the stream's schema.
+ Schemaless *bool `default:"false" json:"schemaless"`
+ // The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+ ValidationPolicy *SourceGcsUpdateValidationPolicy `default:"Emit Record" json:"validation_policy"`
+}
+
+func (s SourceGCSStreamConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGCSStreamConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGCSStreamConfig) GetDaysToSyncIfHistoryIsFull() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DaysToSyncIfHistoryIsFull
+}
+
+func (o *SourceGCSStreamConfig) GetFormat() SourceGcsUpdateFormat {
+ if o == nil {
+ return SourceGcsUpdateFormat{}
+ }
+ return o.Format
+}
+
+func (o *SourceGCSStreamConfig) GetGlobs() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Globs
+}
+
+func (o *SourceGCSStreamConfig) GetInputSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.InputSchema
+}
+
+func (o *SourceGCSStreamConfig) GetLegacyPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LegacyPrefix
+}
+
+func (o *SourceGCSStreamConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGCSStreamConfig) GetPrimaryKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrimaryKey
+}
+
+func (o *SourceGCSStreamConfig) GetSchemaless() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Schemaless
+}
+
+func (o *SourceGCSStreamConfig) GetValidationPolicy() *SourceGcsUpdateValidationPolicy {
+ if o == nil {
+ return nil
+ }
+ return o.ValidationPolicy
+}
+
+// SourceGcsUpdate - NOTE: When this Spec is changed, legacy_config_transformer.py must also be
+// modified to uptake the changes because it is responsible for converting
+// legacy GCS configs into file based configs using the File-Based CDK.
type SourceGcsUpdate struct {
- // GCS bucket name
- GcsBucket string `json:"gcs_bucket"`
- // GCS path to data
- GcsPath string `json:"gcs_path"`
+ // Name of the GCS bucket where the file(s) exist.
+ Bucket string `json:"bucket"`
// Enter your Google Cloud service account key in JSON format
ServiceAccount string `json:"service_account"`
+ // UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+ // Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
+ Streams []SourceGCSStreamConfig `json:"streams"`
+}
+
+func (s SourceGcsUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGcsUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGcsUpdate) GetBucket() string {
+ if o == nil {
+ return ""
+ }
+ return o.Bucket
+}
+
+func (o *SourceGcsUpdate) GetServiceAccount() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceAccount
+}
+
+func (o *SourceGcsUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceGcsUpdate) GetStreams() []SourceGCSStreamConfig {
+ if o == nil {
+ return []SourceGCSStreamConfig{}
+ }
+ return o.Streams
}
diff --git a/internal/sdk/pkg/models/shared/sourcegetlago.go b/internal/sdk/pkg/models/shared/sourcegetlago.go
old mode 100755
new mode 100644
index 31965e3ff..e0827cc1b
--- a/internal/sdk/pkg/models/shared/sourcegetlago.go
+++ b/internal/sdk/pkg/models/shared/sourcegetlago.go
@@ -5,34 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGetlagoGetlago string
+type Getlago string
const (
- SourceGetlagoGetlagoGetlago SourceGetlagoGetlago = "getlago"
+ GetlagoGetlago Getlago = "getlago"
)
-func (e SourceGetlagoGetlago) ToPointer() *SourceGetlagoGetlago {
+func (e Getlago) ToPointer() *Getlago {
return &e
}
-func (e *SourceGetlagoGetlago) UnmarshalJSON(data []byte) error {
+func (e *Getlago) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "getlago":
- *e = SourceGetlagoGetlago(v)
+ *e = Getlago(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGetlagoGetlago: %v", v)
+ return fmt.Errorf("invalid value for Getlago: %v", v)
}
}
type SourceGetlago struct {
// Your API Key. See here.
- APIKey string `json:"api_key"`
- SourceType SourceGetlagoGetlago `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ // Your Lago API URL
+ APIURL *string `default:"https://api.getlago.com/api/v1" json:"api_url"`
+ sourceType Getlago `const:"getlago" json:"sourceType"`
+}
+
+func (s SourceGetlago) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGetlago) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGetlago) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGetlago) GetAPIURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIURL
+}
+
+func (o *SourceGetlago) GetSourceType() Getlago {
+ return GetlagoGetlago
}
diff --git a/internal/sdk/pkg/models/shared/sourcegetlagocreaterequest.go b/internal/sdk/pkg/models/shared/sourcegetlagocreaterequest.go
old mode 100755
new mode 100644
index 7faa3f002..1d19c0f79
--- a/internal/sdk/pkg/models/shared/sourcegetlagocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegetlagocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGetlagoCreateRequest struct {
Configuration SourceGetlago `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGetlagoCreateRequest) GetConfiguration() SourceGetlago {
+ if o == nil {
+ return SourceGetlago{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGetlagoCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGetlagoCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGetlagoCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGetlagoCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegetlagoputrequest.go b/internal/sdk/pkg/models/shared/sourcegetlagoputrequest.go
old mode 100755
new mode 100644
index fa3de88db..1b536121e
--- a/internal/sdk/pkg/models/shared/sourcegetlagoputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegetlagoputrequest.go
@@ -7,3 +7,24 @@ type SourceGetlagoPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGetlagoPutRequest) GetConfiguration() SourceGetlagoUpdate {
+ if o == nil {
+ return SourceGetlagoUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGetlagoPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGetlagoPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegetlagoupdate.go b/internal/sdk/pkg/models/shared/sourcegetlagoupdate.go
old mode 100755
new mode 100644
index cbcac5210..aaf29e1fa
--- a/internal/sdk/pkg/models/shared/sourcegetlagoupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegetlagoupdate.go
@@ -2,7 +2,38 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceGetlagoUpdate struct {
// Your API Key. See here.
APIKey string `json:"api_key"`
+ // Your Lago API URL
+ APIURL *string `default:"https://api.getlago.com/api/v1" json:"api_url"`
+}
+
+func (s SourceGetlagoUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGetlagoUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGetlagoUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGetlagoUpdate) GetAPIURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIURL
}
diff --git a/internal/sdk/pkg/models/shared/sourcegithub.go b/internal/sdk/pkg/models/shared/sourcegithub.go
old mode 100755
new mode 100644
index 152dca92f..35ed8929b
--- a/internal/sdk/pkg/models/shared/sourcegithub.go
+++ b/internal/sdk/pkg/models/shared/sourcegithub.go
@@ -3,129 +3,182 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceGithubAuthenticationPersonalAccessTokenOptionTitle string
+type SourceGithubSchemasOptionTitle string
const (
- SourceGithubAuthenticationPersonalAccessTokenOptionTitlePatCredentials SourceGithubAuthenticationPersonalAccessTokenOptionTitle = "PAT Credentials"
+ SourceGithubSchemasOptionTitlePatCredentials SourceGithubSchemasOptionTitle = "PAT Credentials"
)
-func (e SourceGithubAuthenticationPersonalAccessTokenOptionTitle) ToPointer() *SourceGithubAuthenticationPersonalAccessTokenOptionTitle {
+func (e SourceGithubSchemasOptionTitle) ToPointer() *SourceGithubSchemasOptionTitle {
return &e
}
-func (e *SourceGithubAuthenticationPersonalAccessTokenOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceGithubSchemasOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "PAT Credentials":
- *e = SourceGithubAuthenticationPersonalAccessTokenOptionTitle(v)
+ *e = SourceGithubSchemasOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGithubAuthenticationPersonalAccessTokenOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceGithubSchemasOptionTitle: %v", v)
}
}
-// SourceGithubAuthenticationPersonalAccessToken - Choose how to authenticate to GitHub
-type SourceGithubAuthenticationPersonalAccessToken struct {
- OptionTitle *SourceGithubAuthenticationPersonalAccessTokenOptionTitle `json:"option_title,omitempty"`
+// SourceGithubPersonalAccessToken - Choose how to authenticate to GitHub
+type SourceGithubPersonalAccessToken struct {
+ optionTitle *SourceGithubSchemasOptionTitle `const:"PAT Credentials" json:"option_title,omitempty"`
// Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","
PersonalAccessToken string `json:"personal_access_token"`
}
-type SourceGithubAuthenticationOAuthOptionTitle string
+func (s SourceGithubPersonalAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGithubPersonalAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGithubPersonalAccessToken) GetOptionTitle() *SourceGithubSchemasOptionTitle {
+ return SourceGithubSchemasOptionTitlePatCredentials.ToPointer()
+}
+
+func (o *SourceGithubPersonalAccessToken) GetPersonalAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.PersonalAccessToken
+}
+
+type SourceGithubOptionTitle string
const (
- SourceGithubAuthenticationOAuthOptionTitleOAuthCredentials SourceGithubAuthenticationOAuthOptionTitle = "OAuth Credentials"
+ SourceGithubOptionTitleOAuthCredentials SourceGithubOptionTitle = "OAuth Credentials"
)
-func (e SourceGithubAuthenticationOAuthOptionTitle) ToPointer() *SourceGithubAuthenticationOAuthOptionTitle {
+func (e SourceGithubOptionTitle) ToPointer() *SourceGithubOptionTitle {
return &e
}
-func (e *SourceGithubAuthenticationOAuthOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceGithubOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth Credentials":
- *e = SourceGithubAuthenticationOAuthOptionTitle(v)
+ *e = SourceGithubOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGithubAuthenticationOAuthOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceGithubOptionTitle: %v", v)
}
}
-// SourceGithubAuthenticationOAuth - Choose how to authenticate to GitHub
-type SourceGithubAuthenticationOAuth struct {
+// SourceGithubOAuth - Choose how to authenticate to GitHub
+type SourceGithubOAuth struct {
// OAuth access token
AccessToken string `json:"access_token"`
// OAuth Client Id
ClientID *string `json:"client_id,omitempty"`
// OAuth Client secret
- ClientSecret *string `json:"client_secret,omitempty"`
- OptionTitle *SourceGithubAuthenticationOAuthOptionTitle `json:"option_title,omitempty"`
+ ClientSecret *string `json:"client_secret,omitempty"`
+ optionTitle *SourceGithubOptionTitle `const:"OAuth Credentials" json:"option_title,omitempty"`
+}
+
+func (s SourceGithubOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGithubOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGithubOAuth) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceGithubOAuth) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceGithubOAuth) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGithubOAuth) GetOptionTitle() *SourceGithubOptionTitle {
+ return SourceGithubOptionTitleOAuthCredentials.ToPointer()
}
type SourceGithubAuthenticationType string
const (
- SourceGithubAuthenticationTypeSourceGithubAuthenticationOAuth SourceGithubAuthenticationType = "source-github_Authentication_OAuth"
- SourceGithubAuthenticationTypeSourceGithubAuthenticationPersonalAccessToken SourceGithubAuthenticationType = "source-github_Authentication_Personal Access Token"
+ SourceGithubAuthenticationTypeSourceGithubOAuth SourceGithubAuthenticationType = "source-github_OAuth"
+ SourceGithubAuthenticationTypeSourceGithubPersonalAccessToken SourceGithubAuthenticationType = "source-github_Personal Access Token"
)
type SourceGithubAuthentication struct {
- SourceGithubAuthenticationOAuth *SourceGithubAuthenticationOAuth
- SourceGithubAuthenticationPersonalAccessToken *SourceGithubAuthenticationPersonalAccessToken
+ SourceGithubOAuth *SourceGithubOAuth
+ SourceGithubPersonalAccessToken *SourceGithubPersonalAccessToken
Type SourceGithubAuthenticationType
}
-func CreateSourceGithubAuthenticationSourceGithubAuthenticationOAuth(sourceGithubAuthenticationOAuth SourceGithubAuthenticationOAuth) SourceGithubAuthentication {
- typ := SourceGithubAuthenticationTypeSourceGithubAuthenticationOAuth
+func CreateSourceGithubAuthenticationSourceGithubOAuth(sourceGithubOAuth SourceGithubOAuth) SourceGithubAuthentication {
+ typ := SourceGithubAuthenticationTypeSourceGithubOAuth
return SourceGithubAuthentication{
- SourceGithubAuthenticationOAuth: &sourceGithubAuthenticationOAuth,
- Type: typ,
+ SourceGithubOAuth: &sourceGithubOAuth,
+ Type: typ,
}
}
-func CreateSourceGithubAuthenticationSourceGithubAuthenticationPersonalAccessToken(sourceGithubAuthenticationPersonalAccessToken SourceGithubAuthenticationPersonalAccessToken) SourceGithubAuthentication {
- typ := SourceGithubAuthenticationTypeSourceGithubAuthenticationPersonalAccessToken
+func CreateSourceGithubAuthenticationSourceGithubPersonalAccessToken(sourceGithubPersonalAccessToken SourceGithubPersonalAccessToken) SourceGithubAuthentication {
+ typ := SourceGithubAuthenticationTypeSourceGithubPersonalAccessToken
return SourceGithubAuthentication{
- SourceGithubAuthenticationPersonalAccessToken: &sourceGithubAuthenticationPersonalAccessToken,
- Type: typ,
+ SourceGithubPersonalAccessToken: &sourceGithubPersonalAccessToken,
+ Type: typ,
}
}
func (u *SourceGithubAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGithubAuthenticationPersonalAccessToken := new(SourceGithubAuthenticationPersonalAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGithubAuthenticationPersonalAccessToken); err == nil {
- u.SourceGithubAuthenticationPersonalAccessToken = sourceGithubAuthenticationPersonalAccessToken
- u.Type = SourceGithubAuthenticationTypeSourceGithubAuthenticationPersonalAccessToken
+
+ sourceGithubPersonalAccessToken := new(SourceGithubPersonalAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceGithubPersonalAccessToken, "", true, true); err == nil {
+ u.SourceGithubPersonalAccessToken = sourceGithubPersonalAccessToken
+ u.Type = SourceGithubAuthenticationTypeSourceGithubPersonalAccessToken
return nil
}
- sourceGithubAuthenticationOAuth := new(SourceGithubAuthenticationOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGithubAuthenticationOAuth); err == nil {
- u.SourceGithubAuthenticationOAuth = sourceGithubAuthenticationOAuth
- u.Type = SourceGithubAuthenticationTypeSourceGithubAuthenticationOAuth
+ sourceGithubOAuth := new(SourceGithubOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceGithubOAuth, "", true, true); err == nil {
+ u.SourceGithubOAuth = sourceGithubOAuth
+ u.Type = SourceGithubAuthenticationTypeSourceGithubOAuth
return nil
}
@@ -133,51 +186,128 @@ func (u *SourceGithubAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceGithubAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceGithubAuthenticationPersonalAccessToken != nil {
- return json.Marshal(u.SourceGithubAuthenticationPersonalAccessToken)
+ if u.SourceGithubOAuth != nil {
+ return utils.MarshalJSON(u.SourceGithubOAuth, "", true)
}
- if u.SourceGithubAuthenticationOAuth != nil {
- return json.Marshal(u.SourceGithubAuthenticationOAuth)
+ if u.SourceGithubPersonalAccessToken != nil {
+ return utils.MarshalJSON(u.SourceGithubPersonalAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceGithubGithub string
+type Github string
const (
- SourceGithubGithubGithub SourceGithubGithub = "github"
+ GithubGithub Github = "github"
)
-func (e SourceGithubGithub) ToPointer() *SourceGithubGithub {
+func (e Github) ToPointer() *Github {
return &e
}
-func (e *SourceGithubGithub) UnmarshalJSON(data []byte) error {
+func (e *Github) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "github":
- *e = SourceGithubGithub(v)
+ *e = Github(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGithubGithub: %v", v)
+ return fmt.Errorf("invalid value for Github: %v", v)
}
}
type SourceGithub struct {
- // Space-delimited list of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
+ // Please enter your basic URL from self-hosted GitHub instance or leave it empty to use GitHub.
+ APIURL *string `default:"https://api.github.com/" json:"api_url"`
+ // (DEPRCATED) Space-delimited list of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
Branch *string `json:"branch,omitempty"`
+ // List of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
+ Branches []string `json:"branches,omitempty"`
// Choose how to authenticate to GitHub
- Credentials *SourceGithubAuthentication `json:"credentials,omitempty"`
- // Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
- Repository string `json:"repository"`
+ Credentials SourceGithubAuthentication `json:"credentials"`
+ // List of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
+ Repositories []string `json:"repositories"`
+ // (DEPRCATED) Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
+ Repository *string `json:"repository,omitempty"`
// The GitHub API allows for a maximum of 5000 requests per hour (15000 for Github Enterprise). You can specify a lower value to limit your use of the API quota.
- RequestsPerHour *int64 `json:"requests_per_hour,omitempty"`
- SourceType SourceGithubGithub `json:"sourceType"`
- // The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info
- StartDate time.Time `json:"start_date"`
+ RequestsPerHour *int64 `json:"requests_per_hour,omitempty"`
+ sourceType Github `const:"github" json:"sourceType"`
+ // The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. If the date is not set, all data will be replicated. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceGithub) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGithub) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGithub) GetAPIURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIURL
+}
+
+func (o *SourceGithub) GetBranch() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Branch
+}
+
+func (o *SourceGithub) GetBranches() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Branches
+}
+
+func (o *SourceGithub) GetCredentials() SourceGithubAuthentication {
+ if o == nil {
+ return SourceGithubAuthentication{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGithub) GetRepositories() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Repositories
+}
+
+func (o *SourceGithub) GetRepository() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Repository
+}
+
+func (o *SourceGithub) GetRequestsPerHour() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RequestsPerHour
+}
+
+func (o *SourceGithub) GetSourceType() Github {
+ return GithubGithub
+}
+
+func (o *SourceGithub) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcegithubcreaterequest.go b/internal/sdk/pkg/models/shared/sourcegithubcreaterequest.go
old mode 100755
new mode 100644
index 7377759e0..fddc02e47
--- a/internal/sdk/pkg/models/shared/sourcegithubcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegithubcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGithubCreateRequest struct {
Configuration SourceGithub `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGithubCreateRequest) GetConfiguration() SourceGithub {
+ if o == nil {
+ return SourceGithub{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGithubCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGithubCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGithubCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGithubCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegithubputrequest.go b/internal/sdk/pkg/models/shared/sourcegithubputrequest.go
old mode 100755
new mode 100644
index dc5e37878..44362b20d
--- a/internal/sdk/pkg/models/shared/sourcegithubputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegithubputrequest.go
@@ -7,3 +7,24 @@ type SourceGithubPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGithubPutRequest) GetConfiguration() SourceGithubUpdate {
+ if o == nil {
+ return SourceGithubUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGithubPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGithubPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegithubupdate.go b/internal/sdk/pkg/models/shared/sourcegithubupdate.go
old mode 100755
new mode 100644
index 9f08a7a53..7e6c494de
--- a/internal/sdk/pkg/models/shared/sourcegithubupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegithubupdate.go
@@ -3,129 +3,182 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle string
+type SourceGithubUpdateOptionTitle string
const (
- SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitlePatCredentials SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle = "PAT Credentials"
+ SourceGithubUpdateOptionTitlePatCredentials SourceGithubUpdateOptionTitle = "PAT Credentials"
)
-func (e SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle) ToPointer() *SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle {
+func (e SourceGithubUpdateOptionTitle) ToPointer() *SourceGithubUpdateOptionTitle {
return &e
}
-func (e *SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceGithubUpdateOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "PAT Credentials":
- *e = SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle(v)
+ *e = SourceGithubUpdateOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceGithubUpdateOptionTitle: %v", v)
}
}
-// SourceGithubUpdateAuthenticationPersonalAccessToken - Choose how to authenticate to GitHub
-type SourceGithubUpdateAuthenticationPersonalAccessToken struct {
- OptionTitle *SourceGithubUpdateAuthenticationPersonalAccessTokenOptionTitle `json:"option_title,omitempty"`
+// SourceGithubUpdatePersonalAccessToken - Choose how to authenticate to GitHub
+type SourceGithubUpdatePersonalAccessToken struct {
+ optionTitle *SourceGithubUpdateOptionTitle `const:"PAT Credentials" json:"option_title,omitempty"`
// Log into GitHub and then generate a personal access token. To load balance your API quota consumption across multiple API tokens, input multiple tokens separated with ","
PersonalAccessToken string `json:"personal_access_token"`
}
-type SourceGithubUpdateAuthenticationOAuthOptionTitle string
+func (s SourceGithubUpdatePersonalAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGithubUpdatePersonalAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGithubUpdatePersonalAccessToken) GetOptionTitle() *SourceGithubUpdateOptionTitle {
+ return SourceGithubUpdateOptionTitlePatCredentials.ToPointer()
+}
+
+func (o *SourceGithubUpdatePersonalAccessToken) GetPersonalAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.PersonalAccessToken
+}
+
+type OptionTitle string
const (
- SourceGithubUpdateAuthenticationOAuthOptionTitleOAuthCredentials SourceGithubUpdateAuthenticationOAuthOptionTitle = "OAuth Credentials"
+ OptionTitleOAuthCredentials OptionTitle = "OAuth Credentials"
)
-func (e SourceGithubUpdateAuthenticationOAuthOptionTitle) ToPointer() *SourceGithubUpdateAuthenticationOAuthOptionTitle {
+func (e OptionTitle) ToPointer() *OptionTitle {
return &e
}
-func (e *SourceGithubUpdateAuthenticationOAuthOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *OptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth Credentials":
- *e = SourceGithubUpdateAuthenticationOAuthOptionTitle(v)
+ *e = OptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGithubUpdateAuthenticationOAuthOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for OptionTitle: %v", v)
}
}
-// SourceGithubUpdateAuthenticationOAuth - Choose how to authenticate to GitHub
-type SourceGithubUpdateAuthenticationOAuth struct {
+// OAuth - Choose how to authenticate to GitHub
+type OAuth struct {
// OAuth access token
AccessToken string `json:"access_token"`
// OAuth Client Id
ClientID *string `json:"client_id,omitempty"`
// OAuth Client secret
- ClientSecret *string `json:"client_secret,omitempty"`
- OptionTitle *SourceGithubUpdateAuthenticationOAuthOptionTitle `json:"option_title,omitempty"`
+ ClientSecret *string `json:"client_secret,omitempty"`
+ optionTitle *OptionTitle `const:"OAuth Credentials" json:"option_title,omitempty"`
+}
+
+func (o OAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(o, "", false)
+}
+
+func (o *OAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &o, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *OAuth) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *OAuth) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *OAuth) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *OAuth) GetOptionTitle() *OptionTitle {
+ return OptionTitleOAuthCredentials.ToPointer()
}
type SourceGithubUpdateAuthenticationType string
const (
- SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationOAuth SourceGithubUpdateAuthenticationType = "source-github-update_Authentication_OAuth"
- SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationPersonalAccessToken SourceGithubUpdateAuthenticationType = "source-github-update_Authentication_Personal Access Token"
+ SourceGithubUpdateAuthenticationTypeOAuth SourceGithubUpdateAuthenticationType = "OAuth"
+ SourceGithubUpdateAuthenticationTypeSourceGithubUpdatePersonalAccessToken SourceGithubUpdateAuthenticationType = "source-github-update_Personal Access Token"
)
type SourceGithubUpdateAuthentication struct {
- SourceGithubUpdateAuthenticationOAuth *SourceGithubUpdateAuthenticationOAuth
- SourceGithubUpdateAuthenticationPersonalAccessToken *SourceGithubUpdateAuthenticationPersonalAccessToken
+ OAuth *OAuth
+ SourceGithubUpdatePersonalAccessToken *SourceGithubUpdatePersonalAccessToken
Type SourceGithubUpdateAuthenticationType
}
-func CreateSourceGithubUpdateAuthenticationSourceGithubUpdateAuthenticationOAuth(sourceGithubUpdateAuthenticationOAuth SourceGithubUpdateAuthenticationOAuth) SourceGithubUpdateAuthentication {
- typ := SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationOAuth
+func CreateSourceGithubUpdateAuthenticationOAuth(oAuth OAuth) SourceGithubUpdateAuthentication {
+ typ := SourceGithubUpdateAuthenticationTypeOAuth
return SourceGithubUpdateAuthentication{
- SourceGithubUpdateAuthenticationOAuth: &sourceGithubUpdateAuthenticationOAuth,
- Type: typ,
+ OAuth: &oAuth,
+ Type: typ,
}
}
-func CreateSourceGithubUpdateAuthenticationSourceGithubUpdateAuthenticationPersonalAccessToken(sourceGithubUpdateAuthenticationPersonalAccessToken SourceGithubUpdateAuthenticationPersonalAccessToken) SourceGithubUpdateAuthentication {
- typ := SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationPersonalAccessToken
+func CreateSourceGithubUpdateAuthenticationSourceGithubUpdatePersonalAccessToken(sourceGithubUpdatePersonalAccessToken SourceGithubUpdatePersonalAccessToken) SourceGithubUpdateAuthentication {
+ typ := SourceGithubUpdateAuthenticationTypeSourceGithubUpdatePersonalAccessToken
return SourceGithubUpdateAuthentication{
- SourceGithubUpdateAuthenticationPersonalAccessToken: &sourceGithubUpdateAuthenticationPersonalAccessToken,
- Type: typ,
+ SourceGithubUpdatePersonalAccessToken: &sourceGithubUpdatePersonalAccessToken,
+ Type: typ,
}
}
func (u *SourceGithubUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGithubUpdateAuthenticationPersonalAccessToken := new(SourceGithubUpdateAuthenticationPersonalAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGithubUpdateAuthenticationPersonalAccessToken); err == nil {
- u.SourceGithubUpdateAuthenticationPersonalAccessToken = sourceGithubUpdateAuthenticationPersonalAccessToken
- u.Type = SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationPersonalAccessToken
+
+ sourceGithubUpdatePersonalAccessToken := new(SourceGithubUpdatePersonalAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceGithubUpdatePersonalAccessToken, "", true, true); err == nil {
+ u.SourceGithubUpdatePersonalAccessToken = sourceGithubUpdatePersonalAccessToken
+ u.Type = SourceGithubUpdateAuthenticationTypeSourceGithubUpdatePersonalAccessToken
return nil
}
- sourceGithubUpdateAuthenticationOAuth := new(SourceGithubUpdateAuthenticationOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGithubUpdateAuthenticationOAuth); err == nil {
- u.SourceGithubUpdateAuthenticationOAuth = sourceGithubUpdateAuthenticationOAuth
- u.Type = SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationOAuth
+ oAuth := new(OAuth)
+ if err := utils.UnmarshalJSON(data, &oAuth, "", true, true); err == nil {
+ u.OAuth = oAuth
+ u.Type = SourceGithubUpdateAuthenticationTypeOAuth
return nil
}
@@ -133,26 +186,99 @@ func (u *SourceGithubUpdateAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceGithubUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceGithubUpdateAuthenticationPersonalAccessToken != nil {
- return json.Marshal(u.SourceGithubUpdateAuthenticationPersonalAccessToken)
+ if u.OAuth != nil {
+ return utils.MarshalJSON(u.OAuth, "", true)
}
- if u.SourceGithubUpdateAuthenticationOAuth != nil {
- return json.Marshal(u.SourceGithubUpdateAuthenticationOAuth)
+ if u.SourceGithubUpdatePersonalAccessToken != nil {
+ return utils.MarshalJSON(u.SourceGithubUpdatePersonalAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceGithubUpdate struct {
- // Space-delimited list of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
+ // Please enter your basic URL from self-hosted GitHub instance or leave it empty to use GitHub.
+ APIURL *string `default:"https://api.github.com/" json:"api_url"`
+ // (DEPRCATED) Space-delimited list of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
Branch *string `json:"branch,omitempty"`
+ // List of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.
+ Branches []string `json:"branches,omitempty"`
// Choose how to authenticate to GitHub
- Credentials *SourceGithubUpdateAuthentication `json:"credentials,omitempty"`
- // Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
- Repository string `json:"repository"`
+ Credentials SourceGithubUpdateAuthentication `json:"credentials"`
+ // List of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
+ Repositories []string `json:"repositories"`
+ // (DEPRCATED) Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.
+ Repository *string `json:"repository,omitempty"`
// The GitHub API allows for a maximum of 5000 requests per hour (15000 for Github Enterprise). You can specify a lower value to limit your use of the API quota.
RequestsPerHour *int64 `json:"requests_per_hour,omitempty"`
- // The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info
- StartDate time.Time `json:"start_date"`
+ // The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. If the date is not set, all data will be replicated. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceGithubUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGithubUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGithubUpdate) GetAPIURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIURL
+}
+
+func (o *SourceGithubUpdate) GetBranch() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Branch
+}
+
+func (o *SourceGithubUpdate) GetBranches() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Branches
+}
+
+func (o *SourceGithubUpdate) GetCredentials() SourceGithubUpdateAuthentication {
+ if o == nil {
+ return SourceGithubUpdateAuthentication{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGithubUpdate) GetRepositories() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Repositories
+}
+
+func (o *SourceGithubUpdate) GetRepository() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Repository
+}
+
+func (o *SourceGithubUpdate) GetRequestsPerHour() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.RequestsPerHour
+}
+
+func (o *SourceGithubUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcegitlab.go b/internal/sdk/pkg/models/shared/sourcegitlab.go
old mode 100755
new mode 100644
index a8dcf47da..a1a7b10ac
--- a/internal/sdk/pkg/models/shared/sourcegitlab.go
+++ b/internal/sdk/pkg/models/shared/sourcegitlab.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceGitlabAuthorizationMethodPrivateTokenAuthType string
+type SourceGitlabSchemasAuthType string
const (
- SourceGitlabAuthorizationMethodPrivateTokenAuthTypeAccessToken SourceGitlabAuthorizationMethodPrivateTokenAuthType = "access_token"
+ SourceGitlabSchemasAuthTypeAccessToken SourceGitlabSchemasAuthType = "access_token"
)
-func (e SourceGitlabAuthorizationMethodPrivateTokenAuthType) ToPointer() *SourceGitlabAuthorizationMethodPrivateTokenAuthType {
+func (e SourceGitlabSchemasAuthType) ToPointer() *SourceGitlabSchemasAuthType {
return &e
}
-func (e *SourceGitlabAuthorizationMethodPrivateTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGitlabSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceGitlabAuthorizationMethodPrivateTokenAuthType(v)
+ *e = SourceGitlabSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGitlabAuthorizationMethodPrivateTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGitlabSchemasAuthType: %v", v)
}
}
-type SourceGitlabAuthorizationMethodPrivateToken struct {
+type SourceGitlabPrivateToken struct {
// Log into your Gitlab account and then generate a personal Access Token.
- AccessToken string `json:"access_token"`
- AuthType *SourceGitlabAuthorizationMethodPrivateTokenAuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceGitlabSchemasAuthType `const:"access_token" json:"auth_type,omitempty"`
}
-type SourceGitlabAuthorizationMethodOAuth20AuthType string
+func (s SourceGitlabPrivateToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGitlabPrivateToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGitlabPrivateToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceGitlabPrivateToken) GetAuthType() *SourceGitlabSchemasAuthType {
+ return SourceGitlabSchemasAuthTypeAccessToken.ToPointer()
+}
+
+type SourceGitlabAuthType string
const (
- SourceGitlabAuthorizationMethodOAuth20AuthTypeOauth20 SourceGitlabAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceGitlabAuthTypeOauth20 SourceGitlabAuthType = "oauth2.0"
)
-func (e SourceGitlabAuthorizationMethodOAuth20AuthType) ToPointer() *SourceGitlabAuthorizationMethodOAuth20AuthType {
+func (e SourceGitlabAuthType) ToPointer() *SourceGitlabAuthType {
return &e
}
-func (e *SourceGitlabAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGitlabAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceGitlabAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceGitlabAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGitlabAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGitlabAuthType: %v", v)
}
}
-type SourceGitlabAuthorizationMethodOAuth20 struct {
+type SourceGitlabOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceGitlabAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceGitlabAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The API ID of the Gitlab developer application.
ClientID string `json:"client_id"`
// The API Secret the Gitlab developer application.
@@ -78,56 +100,101 @@ type SourceGitlabAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceGitlabOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGitlabOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGitlabOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceGitlabOAuth20) GetAuthType() *SourceGitlabAuthType {
+ return SourceGitlabAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceGitlabOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGitlabOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGitlabOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceGitlabOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceGitlabAuthorizationMethodType string
const (
- SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodOAuth20 SourceGitlabAuthorizationMethodType = "source-gitlab_Authorization Method_OAuth2.0"
- SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodPrivateToken SourceGitlabAuthorizationMethodType = "source-gitlab_Authorization Method_Private Token"
+ SourceGitlabAuthorizationMethodTypeSourceGitlabOAuth20 SourceGitlabAuthorizationMethodType = "source-gitlab_OAuth2.0"
+ SourceGitlabAuthorizationMethodTypeSourceGitlabPrivateToken SourceGitlabAuthorizationMethodType = "source-gitlab_Private Token"
)
type SourceGitlabAuthorizationMethod struct {
- SourceGitlabAuthorizationMethodOAuth20 *SourceGitlabAuthorizationMethodOAuth20
- SourceGitlabAuthorizationMethodPrivateToken *SourceGitlabAuthorizationMethodPrivateToken
+ SourceGitlabOAuth20 *SourceGitlabOAuth20
+ SourceGitlabPrivateToken *SourceGitlabPrivateToken
Type SourceGitlabAuthorizationMethodType
}
-func CreateSourceGitlabAuthorizationMethodSourceGitlabAuthorizationMethodOAuth20(sourceGitlabAuthorizationMethodOAuth20 SourceGitlabAuthorizationMethodOAuth20) SourceGitlabAuthorizationMethod {
- typ := SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodOAuth20
+func CreateSourceGitlabAuthorizationMethodSourceGitlabOAuth20(sourceGitlabOAuth20 SourceGitlabOAuth20) SourceGitlabAuthorizationMethod {
+ typ := SourceGitlabAuthorizationMethodTypeSourceGitlabOAuth20
return SourceGitlabAuthorizationMethod{
- SourceGitlabAuthorizationMethodOAuth20: &sourceGitlabAuthorizationMethodOAuth20,
- Type: typ,
+ SourceGitlabOAuth20: &sourceGitlabOAuth20,
+ Type: typ,
}
}
-func CreateSourceGitlabAuthorizationMethodSourceGitlabAuthorizationMethodPrivateToken(sourceGitlabAuthorizationMethodPrivateToken SourceGitlabAuthorizationMethodPrivateToken) SourceGitlabAuthorizationMethod {
- typ := SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodPrivateToken
+func CreateSourceGitlabAuthorizationMethodSourceGitlabPrivateToken(sourceGitlabPrivateToken SourceGitlabPrivateToken) SourceGitlabAuthorizationMethod {
+ typ := SourceGitlabAuthorizationMethodTypeSourceGitlabPrivateToken
return SourceGitlabAuthorizationMethod{
- SourceGitlabAuthorizationMethodPrivateToken: &sourceGitlabAuthorizationMethodPrivateToken,
- Type: typ,
+ SourceGitlabPrivateToken: &sourceGitlabPrivateToken,
+ Type: typ,
}
}
func (u *SourceGitlabAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGitlabAuthorizationMethodPrivateToken := new(SourceGitlabAuthorizationMethodPrivateToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGitlabAuthorizationMethodPrivateToken); err == nil {
- u.SourceGitlabAuthorizationMethodPrivateToken = sourceGitlabAuthorizationMethodPrivateToken
- u.Type = SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodPrivateToken
+
+ sourceGitlabPrivateToken := new(SourceGitlabPrivateToken)
+ if err := utils.UnmarshalJSON(data, &sourceGitlabPrivateToken, "", true, true); err == nil {
+ u.SourceGitlabPrivateToken = sourceGitlabPrivateToken
+ u.Type = SourceGitlabAuthorizationMethodTypeSourceGitlabPrivateToken
return nil
}
- sourceGitlabAuthorizationMethodOAuth20 := new(SourceGitlabAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGitlabAuthorizationMethodOAuth20); err == nil {
- u.SourceGitlabAuthorizationMethodOAuth20 = sourceGitlabAuthorizationMethodOAuth20
- u.Type = SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodOAuth20
+ sourceGitlabOAuth20 := new(SourceGitlabOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceGitlabOAuth20, "", true, true); err == nil {
+ u.SourceGitlabOAuth20 = sourceGitlabOAuth20
+ u.Type = SourceGitlabAuthorizationMethodTypeSourceGitlabOAuth20
return nil
}
@@ -135,50 +202,118 @@ func (u *SourceGitlabAuthorizationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceGitlabAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceGitlabAuthorizationMethodPrivateToken != nil {
- return json.Marshal(u.SourceGitlabAuthorizationMethodPrivateToken)
+ if u.SourceGitlabOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceGitlabOAuth20, "", true)
}
- if u.SourceGitlabAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceGitlabAuthorizationMethodOAuth20)
+ if u.SourceGitlabPrivateToken != nil {
+ return utils.MarshalJSON(u.SourceGitlabPrivateToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceGitlabGitlab string
+type Gitlab string
const (
- SourceGitlabGitlabGitlab SourceGitlabGitlab = "gitlab"
+ GitlabGitlab Gitlab = "gitlab"
)
-func (e SourceGitlabGitlab) ToPointer() *SourceGitlabGitlab {
+func (e Gitlab) ToPointer() *Gitlab {
return &e
}
-func (e *SourceGitlabGitlab) UnmarshalJSON(data []byte) error {
+func (e *Gitlab) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "gitlab":
- *e = SourceGitlabGitlab(v)
+ *e = Gitlab(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGitlabGitlab: %v", v)
+ return fmt.Errorf("invalid value for Gitlab: %v", v)
}
}
type SourceGitlab struct {
// Please enter your basic URL from GitLab instance.
- APIURL *string `json:"api_url,omitempty"`
+ APIURL *string `default:"gitlab.com" json:"api_url"`
Credentials SourceGitlabAuthorizationMethod `json:"credentials"`
- // Space-delimited list of groups. e.g. airbyte.io.
+ // [DEPRECATED] Space-delimited list of groups. e.g. airbyte.io.
Groups *string `json:"groups,omitempty"`
+ // List of groups. e.g. airbyte.io.
+ GroupsList []string `json:"groups_list,omitempty"`
+ // [DEPRECATED] Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.
+ Projects *string `json:"projects,omitempty"`
// Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.
- Projects *string `json:"projects,omitempty"`
- SourceType SourceGitlabGitlab `json:"sourceType"`
- // The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
- StartDate time.Time `json:"start_date"`
+ ProjectsList []string `json:"projects_list,omitempty"`
+ sourceType Gitlab `const:"gitlab" json:"sourceType"`
+ // The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data will be replicated. All data generated after this date will be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceGitlab) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGitlab) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGitlab) GetAPIURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIURL
+}
+
+func (o *SourceGitlab) GetCredentials() SourceGitlabAuthorizationMethod {
+ if o == nil {
+ return SourceGitlabAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGitlab) GetGroups() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Groups
+}
+
+func (o *SourceGitlab) GetGroupsList() []string {
+ if o == nil {
+ return nil
+ }
+ return o.GroupsList
+}
+
+func (o *SourceGitlab) GetProjects() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Projects
+}
+
+func (o *SourceGitlab) GetProjectsList() []string {
+ if o == nil {
+ return nil
+ }
+ return o.ProjectsList
+}
+
+func (o *SourceGitlab) GetSourceType() Gitlab {
+ return GitlabGitlab
+}
+
+func (o *SourceGitlab) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcegitlabcreaterequest.go b/internal/sdk/pkg/models/shared/sourcegitlabcreaterequest.go
old mode 100755
new mode 100644
index e58e300b8..db83504c5
--- a/internal/sdk/pkg/models/shared/sourcegitlabcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegitlabcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGitlabCreateRequest struct {
Configuration SourceGitlab `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGitlabCreateRequest) GetConfiguration() SourceGitlab {
+ if o == nil {
+ return SourceGitlab{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGitlabCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGitlabCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGitlabCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGitlabCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegitlabputrequest.go b/internal/sdk/pkg/models/shared/sourcegitlabputrequest.go
old mode 100755
new mode 100644
index b90614f7c..66732e516
--- a/internal/sdk/pkg/models/shared/sourcegitlabputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegitlabputrequest.go
@@ -7,3 +7,24 @@ type SourceGitlabPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGitlabPutRequest) GetConfiguration() SourceGitlabUpdate {
+ if o == nil {
+ return SourceGitlabUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGitlabPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGitlabPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegitlabupdate.go b/internal/sdk/pkg/models/shared/sourcegitlabupdate.go
old mode 100755
new mode 100644
index 6cf89870f..8560f2808
--- a/internal/sdk/pkg/models/shared/sourcegitlabupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegitlabupdate.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType string
+type SourceGitlabUpdateSchemasAuthType string
const (
- SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthTypeAccessToken SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType = "access_token"
+ SourceGitlabUpdateSchemasAuthTypeAccessToken SourceGitlabUpdateSchemasAuthType = "access_token"
)
-func (e SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType) ToPointer() *SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType {
+func (e SourceGitlabUpdateSchemasAuthType) ToPointer() *SourceGitlabUpdateSchemasAuthType {
return &e
}
-func (e *SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGitlabUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType(v)
+ *e = SourceGitlabUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGitlabUpdateSchemasAuthType: %v", v)
}
}
-type SourceGitlabUpdateAuthorizationMethodPrivateToken struct {
+type PrivateToken struct {
// Log into your Gitlab account and then generate a personal Access Token.
- AccessToken string `json:"access_token"`
- AuthType *SourceGitlabUpdateAuthorizationMethodPrivateTokenAuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceGitlabUpdateSchemasAuthType `const:"access_token" json:"auth_type,omitempty"`
}
-type SourceGitlabUpdateAuthorizationMethodOAuth20AuthType string
+func (p PrivateToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *PrivateToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *PrivateToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *PrivateToken) GetAuthType() *SourceGitlabUpdateSchemasAuthType {
+ return SourceGitlabUpdateSchemasAuthTypeAccessToken.ToPointer()
+}
+
+type SourceGitlabUpdateAuthType string
const (
- SourceGitlabUpdateAuthorizationMethodOAuth20AuthTypeOauth20 SourceGitlabUpdateAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceGitlabUpdateAuthTypeOauth20 SourceGitlabUpdateAuthType = "oauth2.0"
)
-func (e SourceGitlabUpdateAuthorizationMethodOAuth20AuthType) ToPointer() *SourceGitlabUpdateAuthorizationMethodOAuth20AuthType {
+func (e SourceGitlabUpdateAuthType) ToPointer() *SourceGitlabUpdateAuthType {
return &e
}
-func (e *SourceGitlabUpdateAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGitlabUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceGitlabUpdateAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceGitlabUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGitlabUpdateAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGitlabUpdateAuthType: %v", v)
}
}
-type SourceGitlabUpdateAuthorizationMethodOAuth20 struct {
+type SourceGitlabUpdateOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceGitlabUpdateAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceGitlabUpdateAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The API ID of the Gitlab developer application.
ClientID string `json:"client_id"`
// The API Secret the Gitlab developer application.
@@ -78,56 +100,101 @@ type SourceGitlabUpdateAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceGitlabUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGitlabUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGitlabUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceGitlabUpdateOAuth20) GetAuthType() *SourceGitlabUpdateAuthType {
+ return SourceGitlabUpdateAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceGitlabUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGitlabUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGitlabUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceGitlabUpdateOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceGitlabUpdateAuthorizationMethodType string
const (
- SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodOAuth20 SourceGitlabUpdateAuthorizationMethodType = "source-gitlab-update_Authorization Method_OAuth2.0"
- SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodPrivateToken SourceGitlabUpdateAuthorizationMethodType = "source-gitlab-update_Authorization Method_Private Token"
+ SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateOAuth20 SourceGitlabUpdateAuthorizationMethodType = "source-gitlab-update_OAuth2.0"
+ SourceGitlabUpdateAuthorizationMethodTypePrivateToken SourceGitlabUpdateAuthorizationMethodType = "Private Token"
)
type SourceGitlabUpdateAuthorizationMethod struct {
- SourceGitlabUpdateAuthorizationMethodOAuth20 *SourceGitlabUpdateAuthorizationMethodOAuth20
- SourceGitlabUpdateAuthorizationMethodPrivateToken *SourceGitlabUpdateAuthorizationMethodPrivateToken
+ SourceGitlabUpdateOAuth20 *SourceGitlabUpdateOAuth20
+ PrivateToken *PrivateToken
Type SourceGitlabUpdateAuthorizationMethodType
}
-func CreateSourceGitlabUpdateAuthorizationMethodSourceGitlabUpdateAuthorizationMethodOAuth20(sourceGitlabUpdateAuthorizationMethodOAuth20 SourceGitlabUpdateAuthorizationMethodOAuth20) SourceGitlabUpdateAuthorizationMethod {
- typ := SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodOAuth20
+func CreateSourceGitlabUpdateAuthorizationMethodSourceGitlabUpdateOAuth20(sourceGitlabUpdateOAuth20 SourceGitlabUpdateOAuth20) SourceGitlabUpdateAuthorizationMethod {
+ typ := SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateOAuth20
return SourceGitlabUpdateAuthorizationMethod{
- SourceGitlabUpdateAuthorizationMethodOAuth20: &sourceGitlabUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceGitlabUpdateOAuth20: &sourceGitlabUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceGitlabUpdateAuthorizationMethodSourceGitlabUpdateAuthorizationMethodPrivateToken(sourceGitlabUpdateAuthorizationMethodPrivateToken SourceGitlabUpdateAuthorizationMethodPrivateToken) SourceGitlabUpdateAuthorizationMethod {
- typ := SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodPrivateToken
+func CreateSourceGitlabUpdateAuthorizationMethodPrivateToken(privateToken PrivateToken) SourceGitlabUpdateAuthorizationMethod {
+ typ := SourceGitlabUpdateAuthorizationMethodTypePrivateToken
return SourceGitlabUpdateAuthorizationMethod{
- SourceGitlabUpdateAuthorizationMethodPrivateToken: &sourceGitlabUpdateAuthorizationMethodPrivateToken,
- Type: typ,
+ PrivateToken: &privateToken,
+ Type: typ,
}
}
func (u *SourceGitlabUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGitlabUpdateAuthorizationMethodPrivateToken := new(SourceGitlabUpdateAuthorizationMethodPrivateToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGitlabUpdateAuthorizationMethodPrivateToken); err == nil {
- u.SourceGitlabUpdateAuthorizationMethodPrivateToken = sourceGitlabUpdateAuthorizationMethodPrivateToken
- u.Type = SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodPrivateToken
+
+ privateToken := new(PrivateToken)
+ if err := utils.UnmarshalJSON(data, &privateToken, "", true, true); err == nil {
+ u.PrivateToken = privateToken
+ u.Type = SourceGitlabUpdateAuthorizationMethodTypePrivateToken
return nil
}
- sourceGitlabUpdateAuthorizationMethodOAuth20 := new(SourceGitlabUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGitlabUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceGitlabUpdateAuthorizationMethodOAuth20 = sourceGitlabUpdateAuthorizationMethodOAuth20
- u.Type = SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodOAuth20
+ sourceGitlabUpdateOAuth20 := new(SourceGitlabUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceGitlabUpdateOAuth20, "", true, true); err == nil {
+ u.SourceGitlabUpdateOAuth20 = sourceGitlabUpdateOAuth20
+ u.Type = SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateOAuth20
return nil
}
@@ -135,25 +202,89 @@ func (u *SourceGitlabUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error
}
func (u SourceGitlabUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceGitlabUpdateAuthorizationMethodPrivateToken != nil {
- return json.Marshal(u.SourceGitlabUpdateAuthorizationMethodPrivateToken)
+ if u.SourceGitlabUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceGitlabUpdateOAuth20, "", true)
}
- if u.SourceGitlabUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceGitlabUpdateAuthorizationMethodOAuth20)
+ if u.PrivateToken != nil {
+ return utils.MarshalJSON(u.PrivateToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceGitlabUpdate struct {
// Please enter your basic URL from GitLab instance.
- APIURL *string `json:"api_url,omitempty"`
+ APIURL *string `default:"gitlab.com" json:"api_url"`
Credentials SourceGitlabUpdateAuthorizationMethod `json:"credentials"`
- // Space-delimited list of groups. e.g. airbyte.io.
+ // [DEPRECATED] Space-delimited list of groups. e.g. airbyte.io.
Groups *string `json:"groups,omitempty"`
- // Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.
+ // List of groups. e.g. airbyte.io.
+ GroupsList []string `json:"groups_list,omitempty"`
+ // [DEPRECATED] Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.
Projects *string `json:"projects,omitempty"`
- // The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
- StartDate time.Time `json:"start_date"`
+ // Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.
+ ProjectsList []string `json:"projects_list,omitempty"`
+ // The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data will be replicated. All data generated after this date will be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceGitlabUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGitlabUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGitlabUpdate) GetAPIURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIURL
+}
+
+func (o *SourceGitlabUpdate) GetCredentials() SourceGitlabUpdateAuthorizationMethod {
+ if o == nil {
+ return SourceGitlabUpdateAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGitlabUpdate) GetGroups() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Groups
+}
+
+func (o *SourceGitlabUpdate) GetGroupsList() []string {
+ if o == nil {
+ return nil
+ }
+ return o.GroupsList
+}
+
+func (o *SourceGitlabUpdate) GetProjects() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Projects
+}
+
+func (o *SourceGitlabUpdate) GetProjectsList() []string {
+ if o == nil {
+ return nil
+ }
+ return o.ProjectsList
+}
+
+func (o *SourceGitlabUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceglassfrog.go b/internal/sdk/pkg/models/shared/sourceglassfrog.go
old mode 100755
new mode 100644
index 17254d680..df1b185fe
--- a/internal/sdk/pkg/models/shared/sourceglassfrog.go
+++ b/internal/sdk/pkg/models/shared/sourceglassfrog.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGlassfrogGlassfrog string
+type Glassfrog string
const (
- SourceGlassfrogGlassfrogGlassfrog SourceGlassfrogGlassfrog = "glassfrog"
+ GlassfrogGlassfrog Glassfrog = "glassfrog"
)
-func (e SourceGlassfrogGlassfrog) ToPointer() *SourceGlassfrogGlassfrog {
+func (e Glassfrog) ToPointer() *Glassfrog {
return &e
}
-func (e *SourceGlassfrogGlassfrog) UnmarshalJSON(data []byte) error {
+func (e *Glassfrog) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "glassfrog":
- *e = SourceGlassfrogGlassfrog(v)
+ *e = Glassfrog(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGlassfrogGlassfrog: %v", v)
+ return fmt.Errorf("invalid value for Glassfrog: %v", v)
}
}
type SourceGlassfrog struct {
// API key provided by Glassfrog
- APIKey string `json:"api_key"`
- SourceType SourceGlassfrogGlassfrog `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Glassfrog `const:"glassfrog" json:"sourceType"`
+}
+
+func (s SourceGlassfrog) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGlassfrog) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGlassfrog) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGlassfrog) GetSourceType() Glassfrog {
+ return GlassfrogGlassfrog
}
diff --git a/internal/sdk/pkg/models/shared/sourceglassfrogcreaterequest.go b/internal/sdk/pkg/models/shared/sourceglassfrogcreaterequest.go
old mode 100755
new mode 100644
index 7ff3c660e..3b502a19f
--- a/internal/sdk/pkg/models/shared/sourceglassfrogcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceglassfrogcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGlassfrogCreateRequest struct {
Configuration SourceGlassfrog `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGlassfrogCreateRequest) GetConfiguration() SourceGlassfrog {
+ if o == nil {
+ return SourceGlassfrog{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGlassfrogCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGlassfrogCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGlassfrogCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGlassfrogCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceglassfrogputrequest.go b/internal/sdk/pkg/models/shared/sourceglassfrogputrequest.go
old mode 100755
new mode 100644
index 7bedcd583..08e16a3e3
--- a/internal/sdk/pkg/models/shared/sourceglassfrogputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceglassfrogputrequest.go
@@ -7,3 +7,24 @@ type SourceGlassfrogPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGlassfrogPutRequest) GetConfiguration() SourceGlassfrogUpdate {
+ if o == nil {
+ return SourceGlassfrogUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGlassfrogPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGlassfrogPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceglassfrogupdate.go b/internal/sdk/pkg/models/shared/sourceglassfrogupdate.go
old mode 100755
new mode 100644
index 69e240799..d90e5997b
--- a/internal/sdk/pkg/models/shared/sourceglassfrogupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceglassfrogupdate.go
@@ -6,3 +6,10 @@ type SourceGlassfrogUpdate struct {
// API key provided by Glassfrog
APIKey string `json:"api_key"`
}
+
+func (o *SourceGlassfrogUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegnews.go b/internal/sdk/pkg/models/shared/sourcegnews.go
old mode 100755
new mode 100644
index 04bda8a84..a755acb09
--- a/internal/sdk/pkg/models/shared/sourcegnews.go
+++ b/internal/sdk/pkg/models/shared/sourcegnews.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceGnewsCountry - This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter.
@@ -296,27 +297,27 @@ func (e *SourceGnewsSortBy) UnmarshalJSON(data []byte) error {
}
}
-type SourceGnewsGnews string
+type Gnews string
const (
- SourceGnewsGnewsGnews SourceGnewsGnews = "gnews"
+ GnewsGnews Gnews = "gnews"
)
-func (e SourceGnewsGnews) ToPointer() *SourceGnewsGnews {
+func (e Gnews) ToPointer() *Gnews {
return &e
}
-func (e *SourceGnewsGnews) UnmarshalJSON(data []byte) error {
+func (e *Gnews) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "gnews":
- *e = SourceGnewsGnews(v)
+ *e = Gnews(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGnewsGnews: %v", v)
+ return fmt.Errorf("invalid value for Gnews: %v", v)
}
}
@@ -400,7 +401,7 @@ type SourceGnews struct {
// - publishedAt = sort by publication date, the articles with the most recent publication date are returned first
// - relevance = sort by best match to keywords, the articles with the best match are returned first
Sortby *SourceGnewsSortBy `json:"sortby,omitempty"`
- SourceType SourceGnewsGnews `json:"sourceType"`
+ sourceType Gnews `const:"gnews" json:"sourceType"`
// This parameter allows you to filter the articles that have a publication date greater than or equal to the specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)
StartDate *string `json:"start_date,omitempty"`
// This parameter allows you to specify your search keywords to find the news articles you are looking for. The keywords will be used to return the most relevant articles. It is possible to use logical operators with keywords. - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by
@@ -421,3 +422,95 @@ type SourceGnews struct {
// This parameter allows you to change the category for the request.
TopHeadlinesTopic *SourceGnewsTopHeadlinesTopic `json:"top_headlines_topic,omitempty"`
}
+
+func (s SourceGnews) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGnews) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGnews) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGnews) GetCountry() *SourceGnewsCountry {
+ if o == nil {
+ return nil
+ }
+ return o.Country
+}
+
+func (o *SourceGnews) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceGnews) GetIn() []SourceGnewsIn {
+ if o == nil {
+ return nil
+ }
+ return o.In
+}
+
+func (o *SourceGnews) GetLanguage() *SourceGnewsLanguage {
+ if o == nil {
+ return nil
+ }
+ return o.Language
+}
+
+func (o *SourceGnews) GetNullable() []SourceGnewsNullable {
+ if o == nil {
+ return nil
+ }
+ return o.Nullable
+}
+
+func (o *SourceGnews) GetQuery() string {
+ if o == nil {
+ return ""
+ }
+ return o.Query
+}
+
+func (o *SourceGnews) GetSortby() *SourceGnewsSortBy {
+ if o == nil {
+ return nil
+ }
+ return o.Sortby
+}
+
+func (o *SourceGnews) GetSourceType() Gnews {
+ return GnewsGnews
+}
+
+func (o *SourceGnews) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceGnews) GetTopHeadlinesQuery() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TopHeadlinesQuery
+}
+
+func (o *SourceGnews) GetTopHeadlinesTopic() *SourceGnewsTopHeadlinesTopic {
+ if o == nil {
+ return nil
+ }
+ return o.TopHeadlinesTopic
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegnewscreaterequest.go b/internal/sdk/pkg/models/shared/sourcegnewscreaterequest.go
old mode 100755
new mode 100644
index 0756a9d19..00e86398d
--- a/internal/sdk/pkg/models/shared/sourcegnewscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegnewscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGnewsCreateRequest struct {
Configuration SourceGnews `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGnewsCreateRequest) GetConfiguration() SourceGnews {
+ if o == nil {
+ return SourceGnews{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGnewsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGnewsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGnewsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGnewsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegnewsputrequest.go b/internal/sdk/pkg/models/shared/sourcegnewsputrequest.go
old mode 100755
new mode 100644
index 54e679c58..e7ad9f0d2
--- a/internal/sdk/pkg/models/shared/sourcegnewsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegnewsputrequest.go
@@ -7,3 +7,24 @@ type SourceGnewsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGnewsPutRequest) GetConfiguration() SourceGnewsUpdate {
+ if o == nil {
+ return SourceGnewsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGnewsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGnewsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegnewsupdate.go b/internal/sdk/pkg/models/shared/sourcegnewsupdate.go
old mode 100755
new mode 100644
index 36814783d..4f74e25ae
--- a/internal/sdk/pkg/models/shared/sourcegnewsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegnewsupdate.go
@@ -7,47 +7,47 @@ import (
"fmt"
)
-// SourceGnewsUpdateCountry - This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter.
-type SourceGnewsUpdateCountry string
+// Country - This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter.
+type Country string
const (
- SourceGnewsUpdateCountryAu SourceGnewsUpdateCountry = "au"
- SourceGnewsUpdateCountryBr SourceGnewsUpdateCountry = "br"
- SourceGnewsUpdateCountryCa SourceGnewsUpdateCountry = "ca"
- SourceGnewsUpdateCountryCn SourceGnewsUpdateCountry = "cn"
- SourceGnewsUpdateCountryEg SourceGnewsUpdateCountry = "eg"
- SourceGnewsUpdateCountryFr SourceGnewsUpdateCountry = "fr"
- SourceGnewsUpdateCountryDe SourceGnewsUpdateCountry = "de"
- SourceGnewsUpdateCountryGr SourceGnewsUpdateCountry = "gr"
- SourceGnewsUpdateCountryHk SourceGnewsUpdateCountry = "hk"
- SourceGnewsUpdateCountryIn SourceGnewsUpdateCountry = "in"
- SourceGnewsUpdateCountryIe SourceGnewsUpdateCountry = "ie"
- SourceGnewsUpdateCountryIl SourceGnewsUpdateCountry = "il"
- SourceGnewsUpdateCountryIt SourceGnewsUpdateCountry = "it"
- SourceGnewsUpdateCountryJp SourceGnewsUpdateCountry = "jp"
- SourceGnewsUpdateCountryNl SourceGnewsUpdateCountry = "nl"
- SourceGnewsUpdateCountryNo SourceGnewsUpdateCountry = "no"
- SourceGnewsUpdateCountryPk SourceGnewsUpdateCountry = "pk"
- SourceGnewsUpdateCountryPe SourceGnewsUpdateCountry = "pe"
- SourceGnewsUpdateCountryPh SourceGnewsUpdateCountry = "ph"
- SourceGnewsUpdateCountryPt SourceGnewsUpdateCountry = "pt"
- SourceGnewsUpdateCountryRo SourceGnewsUpdateCountry = "ro"
- SourceGnewsUpdateCountryRu SourceGnewsUpdateCountry = "ru"
- SourceGnewsUpdateCountrySg SourceGnewsUpdateCountry = "sg"
- SourceGnewsUpdateCountryEs SourceGnewsUpdateCountry = "es"
- SourceGnewsUpdateCountrySe SourceGnewsUpdateCountry = "se"
- SourceGnewsUpdateCountryCh SourceGnewsUpdateCountry = "ch"
- SourceGnewsUpdateCountryTw SourceGnewsUpdateCountry = "tw"
- SourceGnewsUpdateCountryUa SourceGnewsUpdateCountry = "ua"
- SourceGnewsUpdateCountryGb SourceGnewsUpdateCountry = "gb"
- SourceGnewsUpdateCountryUs SourceGnewsUpdateCountry = "us"
+ CountryAu Country = "au"
+ CountryBr Country = "br"
+ CountryCa Country = "ca"
+ CountryCn Country = "cn"
+ CountryEg Country = "eg"
+ CountryFr Country = "fr"
+ CountryDe Country = "de"
+ CountryGr Country = "gr"
+ CountryHk Country = "hk"
+ CountryIn Country = "in"
+ CountryIe Country = "ie"
+ CountryIl Country = "il"
+ CountryIt Country = "it"
+ CountryJp Country = "jp"
+ CountryNl Country = "nl"
+ CountryNo Country = "no"
+ CountryPk Country = "pk"
+ CountryPe Country = "pe"
+ CountryPh Country = "ph"
+ CountryPt Country = "pt"
+ CountryRo Country = "ro"
+ CountryRu Country = "ru"
+ CountrySg Country = "sg"
+ CountryEs Country = "es"
+ CountrySe Country = "se"
+ CountryCh Country = "ch"
+ CountryTw Country = "tw"
+ CountryUa Country = "ua"
+ CountryGb Country = "gb"
+ CountryUs Country = "us"
)
-func (e SourceGnewsUpdateCountry) ToPointer() *SourceGnewsUpdateCountry {
+func (e Country) ToPointer() *Country {
return &e
}
-func (e *SourceGnewsUpdateCountry) UnmarshalJSON(data []byte) error {
+func (e *Country) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -112,26 +112,26 @@ func (e *SourceGnewsUpdateCountry) UnmarshalJSON(data []byte) error {
case "gb":
fallthrough
case "us":
- *e = SourceGnewsUpdateCountry(v)
+ *e = Country(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGnewsUpdateCountry: %v", v)
+ return fmt.Errorf("invalid value for Country: %v", v)
}
}
-type SourceGnewsUpdateIn string
+type In string
const (
- SourceGnewsUpdateInTitle SourceGnewsUpdateIn = "title"
- SourceGnewsUpdateInDescription SourceGnewsUpdateIn = "description"
- SourceGnewsUpdateInContent SourceGnewsUpdateIn = "content"
+ InTitle In = "title"
+ InDescription In = "description"
+ InContent In = "content"
)
-func (e SourceGnewsUpdateIn) ToPointer() *SourceGnewsUpdateIn {
+func (e In) ToPointer() *In {
return &e
}
-func (e *SourceGnewsUpdateIn) UnmarshalJSON(data []byte) error {
+func (e *In) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -142,45 +142,45 @@ func (e *SourceGnewsUpdateIn) UnmarshalJSON(data []byte) error {
case "description":
fallthrough
case "content":
- *e = SourceGnewsUpdateIn(v)
+ *e = In(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGnewsUpdateIn: %v", v)
+ return fmt.Errorf("invalid value for In: %v", v)
}
}
-type SourceGnewsUpdateLanguage string
+type Language string
const (
- SourceGnewsUpdateLanguageAr SourceGnewsUpdateLanguage = "ar"
- SourceGnewsUpdateLanguageZh SourceGnewsUpdateLanguage = "zh"
- SourceGnewsUpdateLanguageNl SourceGnewsUpdateLanguage = "nl"
- SourceGnewsUpdateLanguageEn SourceGnewsUpdateLanguage = "en"
- SourceGnewsUpdateLanguageFr SourceGnewsUpdateLanguage = "fr"
- SourceGnewsUpdateLanguageDe SourceGnewsUpdateLanguage = "de"
- SourceGnewsUpdateLanguageEl SourceGnewsUpdateLanguage = "el"
- SourceGnewsUpdateLanguageHe SourceGnewsUpdateLanguage = "he"
- SourceGnewsUpdateLanguageHi SourceGnewsUpdateLanguage = "hi"
- SourceGnewsUpdateLanguageIt SourceGnewsUpdateLanguage = "it"
- SourceGnewsUpdateLanguageJa SourceGnewsUpdateLanguage = "ja"
- SourceGnewsUpdateLanguageMl SourceGnewsUpdateLanguage = "ml"
- SourceGnewsUpdateLanguageMr SourceGnewsUpdateLanguage = "mr"
- SourceGnewsUpdateLanguageNo SourceGnewsUpdateLanguage = "no"
- SourceGnewsUpdateLanguagePt SourceGnewsUpdateLanguage = "pt"
- SourceGnewsUpdateLanguageRo SourceGnewsUpdateLanguage = "ro"
- SourceGnewsUpdateLanguageRu SourceGnewsUpdateLanguage = "ru"
- SourceGnewsUpdateLanguageEs SourceGnewsUpdateLanguage = "es"
- SourceGnewsUpdateLanguageSv SourceGnewsUpdateLanguage = "sv"
- SourceGnewsUpdateLanguageTa SourceGnewsUpdateLanguage = "ta"
- SourceGnewsUpdateLanguageTe SourceGnewsUpdateLanguage = "te"
- SourceGnewsUpdateLanguageUk SourceGnewsUpdateLanguage = "uk"
+ LanguageAr Language = "ar"
+ LanguageZh Language = "zh"
+ LanguageNl Language = "nl"
+ LanguageEn Language = "en"
+ LanguageFr Language = "fr"
+ LanguageDe Language = "de"
+ LanguageEl Language = "el"
+ LanguageHe Language = "he"
+ LanguageHi Language = "hi"
+ LanguageIt Language = "it"
+ LanguageJa Language = "ja"
+ LanguageMl Language = "ml"
+ LanguageMr Language = "mr"
+ LanguageNo Language = "no"
+ LanguagePt Language = "pt"
+ LanguageRo Language = "ro"
+ LanguageRu Language = "ru"
+ LanguageEs Language = "es"
+ LanguageSv Language = "sv"
+ LanguageTa Language = "ta"
+ LanguageTe Language = "te"
+ LanguageUk Language = "uk"
)
-func (e SourceGnewsUpdateLanguage) ToPointer() *SourceGnewsUpdateLanguage {
+func (e Language) ToPointer() *Language {
return &e
}
-func (e *SourceGnewsUpdateLanguage) UnmarshalJSON(data []byte) error {
+func (e *Language) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -229,26 +229,26 @@ func (e *SourceGnewsUpdateLanguage) UnmarshalJSON(data []byte) error {
case "te":
fallthrough
case "uk":
- *e = SourceGnewsUpdateLanguage(v)
+ *e = Language(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGnewsUpdateLanguage: %v", v)
+ return fmt.Errorf("invalid value for Language: %v", v)
}
}
-type SourceGnewsUpdateNullable string
+type Nullable string
const (
- SourceGnewsUpdateNullableTitle SourceGnewsUpdateNullable = "title"
- SourceGnewsUpdateNullableDescription SourceGnewsUpdateNullable = "description"
- SourceGnewsUpdateNullableContent SourceGnewsUpdateNullable = "content"
+ NullableTitle Nullable = "title"
+ NullableDescription Nullable = "description"
+ NullableContent Nullable = "content"
)
-func (e SourceGnewsUpdateNullable) ToPointer() *SourceGnewsUpdateNullable {
+func (e Nullable) ToPointer() *Nullable {
return &e
}
-func (e *SourceGnewsUpdateNullable) UnmarshalJSON(data []byte) error {
+func (e *Nullable) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -259,28 +259,28 @@ func (e *SourceGnewsUpdateNullable) UnmarshalJSON(data []byte) error {
case "description":
fallthrough
case "content":
- *e = SourceGnewsUpdateNullable(v)
+ *e = Nullable(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGnewsUpdateNullable: %v", v)
+ return fmt.Errorf("invalid value for Nullable: %v", v)
}
}
-// SourceGnewsUpdateSortBy - This parameter allows you to choose with which type of sorting the articles should be returned. Two values are possible:
+// SortBy - This parameter allows you to choose with which type of sorting the articles should be returned. Two values are possible:
// - publishedAt = sort by publication date, the articles with the most recent publication date are returned first
// - relevance = sort by best match to keywords, the articles with the best match are returned first
-type SourceGnewsUpdateSortBy string
+type SortBy string
const (
- SourceGnewsUpdateSortByPublishedAt SourceGnewsUpdateSortBy = "publishedAt"
- SourceGnewsUpdateSortByRelevance SourceGnewsUpdateSortBy = "relevance"
+ SortByPublishedAt SortBy = "publishedAt"
+ SortByRelevance SortBy = "relevance"
)
-func (e SourceGnewsUpdateSortBy) ToPointer() *SourceGnewsUpdateSortBy {
+func (e SortBy) ToPointer() *SortBy {
return &e
}
-func (e *SourceGnewsUpdateSortBy) UnmarshalJSON(data []byte) error {
+func (e *SortBy) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -289,33 +289,33 @@ func (e *SourceGnewsUpdateSortBy) UnmarshalJSON(data []byte) error {
case "publishedAt":
fallthrough
case "relevance":
- *e = SourceGnewsUpdateSortBy(v)
+ *e = SortBy(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGnewsUpdateSortBy: %v", v)
+ return fmt.Errorf("invalid value for SortBy: %v", v)
}
}
-// SourceGnewsUpdateTopHeadlinesTopic - This parameter allows you to change the category for the request.
-type SourceGnewsUpdateTopHeadlinesTopic string
+// TopHeadlinesTopic - This parameter allows you to change the category for the request.
+type TopHeadlinesTopic string
const (
- SourceGnewsUpdateTopHeadlinesTopicBreakingNews SourceGnewsUpdateTopHeadlinesTopic = "breaking-news"
- SourceGnewsUpdateTopHeadlinesTopicWorld SourceGnewsUpdateTopHeadlinesTopic = "world"
- SourceGnewsUpdateTopHeadlinesTopicNation SourceGnewsUpdateTopHeadlinesTopic = "nation"
- SourceGnewsUpdateTopHeadlinesTopicBusiness SourceGnewsUpdateTopHeadlinesTopic = "business"
- SourceGnewsUpdateTopHeadlinesTopicTechnology SourceGnewsUpdateTopHeadlinesTopic = "technology"
- SourceGnewsUpdateTopHeadlinesTopicEntertainment SourceGnewsUpdateTopHeadlinesTopic = "entertainment"
- SourceGnewsUpdateTopHeadlinesTopicSports SourceGnewsUpdateTopHeadlinesTopic = "sports"
- SourceGnewsUpdateTopHeadlinesTopicScience SourceGnewsUpdateTopHeadlinesTopic = "science"
- SourceGnewsUpdateTopHeadlinesTopicHealth SourceGnewsUpdateTopHeadlinesTopic = "health"
+ TopHeadlinesTopicBreakingNews TopHeadlinesTopic = "breaking-news"
+ TopHeadlinesTopicWorld TopHeadlinesTopic = "world"
+ TopHeadlinesTopicNation TopHeadlinesTopic = "nation"
+ TopHeadlinesTopicBusiness TopHeadlinesTopic = "business"
+ TopHeadlinesTopicTechnology TopHeadlinesTopic = "technology"
+ TopHeadlinesTopicEntertainment TopHeadlinesTopic = "entertainment"
+ TopHeadlinesTopicSports TopHeadlinesTopic = "sports"
+ TopHeadlinesTopicScience TopHeadlinesTopic = "science"
+ TopHeadlinesTopicHealth TopHeadlinesTopic = "health"
)
-func (e SourceGnewsUpdateTopHeadlinesTopic) ToPointer() *SourceGnewsUpdateTopHeadlinesTopic {
+func (e TopHeadlinesTopic) ToPointer() *TopHeadlinesTopic {
return &e
}
-func (e *SourceGnewsUpdateTopHeadlinesTopic) UnmarshalJSON(data []byte) error {
+func (e *TopHeadlinesTopic) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -338,10 +338,10 @@ func (e *SourceGnewsUpdateTopHeadlinesTopic) UnmarshalJSON(data []byte) error {
case "science":
fallthrough
case "health":
- *e = SourceGnewsUpdateTopHeadlinesTopic(v)
+ *e = TopHeadlinesTopic(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGnewsUpdateTopHeadlinesTopic: %v", v)
+ return fmt.Errorf("invalid value for TopHeadlinesTopic: %v", v)
}
}
@@ -349,14 +349,14 @@ type SourceGnewsUpdate struct {
// API Key
APIKey string `json:"api_key"`
// This parameter allows you to specify the country where the news articles returned by the API were published, the contents of the articles are not necessarily related to the specified country. You have to set as value the 2 letters code of the country you want to filter.
- Country *SourceGnewsUpdateCountry `json:"country,omitempty"`
+ Country *Country `json:"country,omitempty"`
// This parameter allows you to filter the articles that have a publication date smaller than or equal to the specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)
EndDate *string `json:"end_date,omitempty"`
// This parameter allows you to choose in which attributes the keywords are searched. The attributes that can be set are title, description and content. It is possible to combine several attributes.
- In []SourceGnewsUpdateIn `json:"in,omitempty"`
- Language *SourceGnewsUpdateLanguage `json:"language,omitempty"`
+ In []In `json:"in,omitempty"`
+ Language *Language `json:"language,omitempty"`
// This parameter allows you to specify the attributes that you allow to return null values. The attributes that can be set are title, description and content. It is possible to combine several attributes
- Nullable []SourceGnewsUpdateNullable `json:"nullable,omitempty"`
+ Nullable []Nullable `json:"nullable,omitempty"`
// This parameter allows you to specify your search keywords to find the news articles you are looking for. The keywords will be used to return the most relevant articles. It is possible to use logical operators with keywords. - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by
// quotation marks are used to search for articles with the exact same keyword sequence.
// For example the query: "Apple iPhone" will return articles matching at least once this sequence of keywords.
@@ -375,7 +375,7 @@ type SourceGnewsUpdate struct {
// This parameter allows you to choose with which type of sorting the articles should be returned. Two values are possible:
// - publishedAt = sort by publication date, the articles with the most recent publication date are returned first
// - relevance = sort by best match to keywords, the articles with the best match are returned first
- Sortby *SourceGnewsUpdateSortBy `json:"sortby,omitempty"`
+ Sortby *SortBy `json:"sortby,omitempty"`
// This parameter allows you to filter the articles that have a publication date greater than or equal to the specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)
StartDate *string `json:"start_date,omitempty"`
// This parameter allows you to specify your search keywords to find the news articles you are looking for. The keywords will be used to return the most relevant articles. It is possible to use logical operators with keywords. - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by
@@ -394,5 +394,82 @@ type SourceGnewsUpdate struct {
// iPhone
TopHeadlinesQuery *string `json:"top_headlines_query,omitempty"`
// This parameter allows you to change the category for the request.
- TopHeadlinesTopic *SourceGnewsUpdateTopHeadlinesTopic `json:"top_headlines_topic,omitempty"`
+ TopHeadlinesTopic *TopHeadlinesTopic `json:"top_headlines_topic,omitempty"`
+}
+
+func (o *SourceGnewsUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGnewsUpdate) GetCountry() *Country {
+ if o == nil {
+ return nil
+ }
+ return o.Country
+}
+
+func (o *SourceGnewsUpdate) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceGnewsUpdate) GetIn() []In {
+ if o == nil {
+ return nil
+ }
+ return o.In
+}
+
+func (o *SourceGnewsUpdate) GetLanguage() *Language {
+ if o == nil {
+ return nil
+ }
+ return o.Language
+}
+
+func (o *SourceGnewsUpdate) GetNullable() []Nullable {
+ if o == nil {
+ return nil
+ }
+ return o.Nullable
+}
+
+func (o *SourceGnewsUpdate) GetQuery() string {
+ if o == nil {
+ return ""
+ }
+ return o.Query
+}
+
+func (o *SourceGnewsUpdate) GetSortby() *SortBy {
+ if o == nil {
+ return nil
+ }
+ return o.Sortby
+}
+
+func (o *SourceGnewsUpdate) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceGnewsUpdate) GetTopHeadlinesQuery() *string {
+ if o == nil {
+ return nil
+ }
+ return o.TopHeadlinesQuery
+}
+
+func (o *SourceGnewsUpdate) GetTopHeadlinesTopic() *TopHeadlinesTopic {
+ if o == nil {
+ return nil
+ }
+ return o.TopHeadlinesTopic
}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleads.go b/internal/sdk/pkg/models/shared/sourcegoogleads.go
old mode 100755
new mode 100644
index d0d401ed9..08b92f3d8
--- a/internal/sdk/pkg/models/shared/sourcegoogleads.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleads.go
@@ -3,9 +3,10 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceGoogleAdsGoogleCredentials struct {
@@ -21,6 +22,41 @@ type SourceGoogleAdsGoogleCredentials struct {
RefreshToken string `json:"refresh_token"`
}
+func (o *SourceGoogleAdsGoogleCredentials) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceGoogleAdsGoogleCredentials) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGoogleAdsGoogleCredentials) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGoogleAdsGoogleCredentials) GetDeveloperToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.DeveloperToken
+}
+
+func (o *SourceGoogleAdsGoogleCredentials) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceGoogleAdsCustomQueries struct {
// A custom defined GAQL query for building the report. Avoid including the segments.date field; wherever possible, Airbyte will automatically include it for incremental syncs. For more information, refer to Google's documentation.
Query string `json:"query"`
@@ -28,33 +64,47 @@ type SourceGoogleAdsCustomQueries struct {
TableName string `json:"table_name"`
}
-type SourceGoogleAdsGoogleAds string
+func (o *SourceGoogleAdsCustomQueries) GetQuery() string {
+ if o == nil {
+ return ""
+ }
+ return o.Query
+}
+
+func (o *SourceGoogleAdsCustomQueries) GetTableName() string {
+ if o == nil {
+ return ""
+ }
+ return o.TableName
+}
+
+type GoogleAds string
const (
- SourceGoogleAdsGoogleAdsGoogleAds SourceGoogleAdsGoogleAds = "google-ads"
+ GoogleAdsGoogleAds GoogleAds = "google-ads"
)
-func (e SourceGoogleAdsGoogleAds) ToPointer() *SourceGoogleAdsGoogleAds {
+func (e GoogleAds) ToPointer() *GoogleAds {
return &e
}
-func (e *SourceGoogleAdsGoogleAds) UnmarshalJSON(data []byte) error {
+func (e *GoogleAds) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "google-ads":
- *e = SourceGoogleAdsGoogleAds(v)
+ *e = GoogleAds(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleAdsGoogleAds: %v", v)
+ return fmt.Errorf("invalid value for GoogleAds: %v", v)
}
}
type SourceGoogleAds struct {
// A conversion window is the number of days after an ad interaction (such as an ad click or video view) during which a conversion, such as a purchase, is recorded in Google Ads. For more information, see Google's documentation.
- ConversionWindowDays *int64 `json:"conversion_window_days,omitempty"`
+ ConversionWindowDays *int64 `default:"14" json:"conversion_window_days"`
Credentials SourceGoogleAdsGoogleCredentials `json:"credentials"`
CustomQueries []SourceGoogleAdsCustomQueries `json:"custom_queries,omitempty"`
// Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation.
@@ -62,8 +112,72 @@ type SourceGoogleAds struct {
// UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set)
EndDate *types.Date `json:"end_date,omitempty"`
// If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation.
- LoginCustomerID *string `json:"login_customer_id,omitempty"`
- SourceType SourceGoogleAdsGoogleAds `json:"sourceType"`
+ LoginCustomerID *string `json:"login_customer_id,omitempty"`
+ sourceType GoogleAds `const:"google-ads" json:"sourceType"`
// UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set)
StartDate *types.Date `json:"start_date,omitempty"`
}
+
+func (s SourceGoogleAds) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAds) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAds) GetConversionWindowDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ConversionWindowDays
+}
+
+func (o *SourceGoogleAds) GetCredentials() SourceGoogleAdsGoogleCredentials {
+ if o == nil {
+ return SourceGoogleAdsGoogleCredentials{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGoogleAds) GetCustomQueries() []SourceGoogleAdsCustomQueries {
+ if o == nil {
+ return nil
+ }
+ return o.CustomQueries
+}
+
+func (o *SourceGoogleAds) GetCustomerID() string {
+ if o == nil {
+ return ""
+ }
+ return o.CustomerID
+}
+
+func (o *SourceGoogleAds) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceGoogleAds) GetLoginCustomerID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LoginCustomerID
+}
+
+func (o *SourceGoogleAds) GetSourceType() GoogleAds {
+ return GoogleAdsGoogleAds
+}
+
+func (o *SourceGoogleAds) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleadscreaterequest.go b/internal/sdk/pkg/models/shared/sourcegoogleadscreaterequest.go
old mode 100755
new mode 100644
index e4db2238f..818443be3
--- a/internal/sdk/pkg/models/shared/sourcegoogleadscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleadscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGoogleAdsCreateRequest struct {
Configuration SourceGoogleAds `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleAdsCreateRequest) GetConfiguration() SourceGoogleAds {
+ if o == nil {
+ return SourceGoogleAds{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleAdsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGoogleAdsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleAdsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGoogleAdsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleadsputrequest.go b/internal/sdk/pkg/models/shared/sourcegoogleadsputrequest.go
old mode 100755
new mode 100644
index d63194028..584ef4f8d
--- a/internal/sdk/pkg/models/shared/sourcegoogleadsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleadsputrequest.go
@@ -7,3 +7,24 @@ type SourceGoogleAdsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleAdsPutRequest) GetConfiguration() SourceGoogleAdsUpdate {
+ if o == nil {
+ return SourceGoogleAdsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleAdsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleAdsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go b/internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go
old mode 100755
new mode 100644
index 8da356740..6cd12518b
--- a/internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go
@@ -3,10 +3,11 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGoogleAdsUpdateGoogleCredentials struct {
+type GoogleCredentials struct {
// The Access Token for making authenticated requests. For detailed instructions on finding this value, refer to our documentation.
AccessToken *string `json:"access_token,omitempty"`
// The Client ID of your Google Ads developer application. For detailed instructions on finding this value, refer to our documentation.
@@ -19,18 +20,67 @@ type SourceGoogleAdsUpdateGoogleCredentials struct {
RefreshToken string `json:"refresh_token"`
}
-type SourceGoogleAdsUpdateCustomQueries struct {
+func (o *GoogleCredentials) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *GoogleCredentials) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *GoogleCredentials) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *GoogleCredentials) GetDeveloperToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.DeveloperToken
+}
+
+func (o *GoogleCredentials) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+type CustomQueries struct {
// A custom defined GAQL query for building the report. Avoid including the segments.date field; wherever possible, Airbyte will automatically include it for incremental syncs. For more information, refer to Google's documentation.
Query string `json:"query"`
// The table name in your destination database for the chosen query.
TableName string `json:"table_name"`
}
+func (o *CustomQueries) GetQuery() string {
+ if o == nil {
+ return ""
+ }
+ return o.Query
+}
+
+func (o *CustomQueries) GetTableName() string {
+ if o == nil {
+ return ""
+ }
+ return o.TableName
+}
+
type SourceGoogleAdsUpdate struct {
// A conversion window is the number of days after an ad interaction (such as an ad click or video view) during which a conversion, such as a purchase, is recorded in Google Ads. For more information, see Google's documentation.
- ConversionWindowDays *int64 `json:"conversion_window_days,omitempty"`
- Credentials SourceGoogleAdsUpdateGoogleCredentials `json:"credentials"`
- CustomQueries []SourceGoogleAdsUpdateCustomQueries `json:"custom_queries,omitempty"`
+ ConversionWindowDays *int64 `default:"14" json:"conversion_window_days"`
+ Credentials GoogleCredentials `json:"credentials"`
+ CustomQueries []CustomQueries `json:"custom_queries,omitempty"`
// Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation.
CustomerID string `json:"customer_id"`
// UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set)
@@ -40,3 +90,63 @@ type SourceGoogleAdsUpdate struct {
// UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set)
StartDate *types.Date `json:"start_date,omitempty"`
}
+
+func (s SourceGoogleAdsUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAdsUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAdsUpdate) GetConversionWindowDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.ConversionWindowDays
+}
+
+func (o *SourceGoogleAdsUpdate) GetCredentials() GoogleCredentials {
+ if o == nil {
+ return GoogleCredentials{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGoogleAdsUpdate) GetCustomQueries() []CustomQueries {
+ if o == nil {
+ return nil
+ }
+ return o.CustomQueries
+}
+
+func (o *SourceGoogleAdsUpdate) GetCustomerID() string {
+ if o == nil {
+ return ""
+ }
+ return o.CustomerID
+}
+
+func (o *SourceGoogleAdsUpdate) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceGoogleAdsUpdate) GetLoginCustomerID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LoginCustomerID
+}
+
+func (o *SourceGoogleAdsUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go
old mode 100755
new mode 100644
index 7ce86a4bd..4e82f5fc4
--- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go
@@ -3,73 +3,95 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType string
+type SourceGoogleAnalyticsDataAPISchemasAuthType string
const (
- SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthTypeService SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType = "Service"
+ SourceGoogleAnalyticsDataAPISchemasAuthTypeService SourceGoogleAnalyticsDataAPISchemasAuthType = "Service"
)
-func (e SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType) ToPointer() *SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType {
+func (e SourceGoogleAnalyticsDataAPISchemasAuthType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasAuthType {
return &e
}
-func (e *SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleAnalyticsDataAPISchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service":
- *e = SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType(v)
+ *e = SourceGoogleAnalyticsDataAPISchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasAuthType: %v", v)
}
}
-// SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication - Credentials for the service
-type SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication struct {
- AuthType *SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthenticationAuthType `json:"auth_type,omitempty"`
+// SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication - Credentials for the service
+type SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication struct {
+ authType *SourceGoogleAnalyticsDataAPISchemasAuthType `const:"Service" json:"auth_type,omitempty"`
// The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.
CredentialsJSON string `json:"credentials_json"`
}
-type SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType string
+func (s SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication) GetAuthType() *SourceGoogleAnalyticsDataAPISchemasAuthType {
+ return SourceGoogleAnalyticsDataAPISchemasAuthTypeService.ToPointer()
+}
+
+func (o *SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+type SourceGoogleAnalyticsDataAPIAuthType string
const (
- SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthTypeClient SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType = "Client"
+ SourceGoogleAnalyticsDataAPIAuthTypeClient SourceGoogleAnalyticsDataAPIAuthType = "Client"
)
-func (e SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType) ToPointer() *SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType {
+func (e SourceGoogleAnalyticsDataAPIAuthType) ToPointer() *SourceGoogleAnalyticsDataAPIAuthType {
return &e
}
-func (e *SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleAnalyticsDataAPIAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType(v)
+ *e = SourceGoogleAnalyticsDataAPIAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIAuthType: %v", v)
}
}
-// SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth - Credentials for the service
-type SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth struct {
+// SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth - Credentials for the service
+type SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth struct {
// Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthType *SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauthAuthType `json:"auth_type,omitempty"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authType *SourceGoogleAnalyticsDataAPIAuthType `const:"Client" json:"auth_type,omitempty"`
// The Client ID of your Google Analytics developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Google Analytics developer application.
@@ -78,56 +100,94 @@ type SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth) GetAuthType() *SourceGoogleAnalyticsDataAPIAuthType {
+ return SourceGoogleAnalyticsDataAPIAuthTypeClient.ToPointer()
+}
+
+func (o *SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceGoogleAnalyticsDataAPICredentialsType string
const (
- SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth SourceGoogleAnalyticsDataAPICredentialsType = "source-google-analytics-data-api_Credentials_Authenticate via Google (Oauth)"
- SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication SourceGoogleAnalyticsDataAPICredentialsType = "source-google-analytics-data-api_Credentials_Service Account Key Authentication"
+ SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth SourceGoogleAnalyticsDataAPICredentialsType = "source-google-analytics-data-api_Authenticate via Google (Oauth)"
+ SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication SourceGoogleAnalyticsDataAPICredentialsType = "source-google-analytics-data-api_Service Account Key Authentication"
)
type SourceGoogleAnalyticsDataAPICredentials struct {
- SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth *SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth
- SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication *SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication
+ SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth *SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth
+ SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication *SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication
Type SourceGoogleAnalyticsDataAPICredentialsType
}
-func CreateSourceGoogleAnalyticsDataAPICredentialsSourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth(sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth) SourceGoogleAnalyticsDataAPICredentials {
- typ := SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth
+func CreateSourceGoogleAnalyticsDataAPICredentialsSourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth(sourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth) SourceGoogleAnalyticsDataAPICredentials {
+ typ := SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth
return SourceGoogleAnalyticsDataAPICredentials{
- SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth: &sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth,
+ SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth: &sourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth,
Type: typ,
}
}
-func CreateSourceGoogleAnalyticsDataAPICredentialsSourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication(sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication) SourceGoogleAnalyticsDataAPICredentials {
- typ := SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication
+func CreateSourceGoogleAnalyticsDataAPICredentialsSourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication(sourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication) SourceGoogleAnalyticsDataAPICredentials {
+ typ := SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication
return SourceGoogleAnalyticsDataAPICredentials{
- SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication: &sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication,
+ SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication: &sourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication,
Type: typ,
}
}
func (u *SourceGoogleAnalyticsDataAPICredentials) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication); err == nil {
- u.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication
- u.Type = SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication
+ sourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication = sourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication
+ u.Type = SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication
return nil
}
- sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth); err == nil {
- u.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth
- u.Type = SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth
+ sourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth = sourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth
+ u.Type = SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth
return nil
}
@@ -135,51 +195,8325 @@ func (u *SourceGoogleAnalyticsDataAPICredentials) UnmarshalJSON(data []byte) err
}
func (u SourceGoogleAnalyticsDataAPICredentials) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication != nil {
- return json.Marshal(u.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication)
+ if u.SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIAuthenticateViaGoogleOauth, "", true)
}
- if u.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth != nil {
- return json.Marshal(u.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth)
+ if u.SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI string
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName string
const (
- SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI = "google-analytics-data-api"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName = "betweenFilter"
)
-func (e SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI) ToPointer() *SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI {
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName {
return &e
}
-func (e *SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "google-analytics-data-api":
- *e = SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI(v)
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName: %v", v)
}
}
-type SourceGoogleAnalyticsDataAPI struct {
- // Credentials for the service
- Credentials *SourceGoogleAnalyticsDataAPICredentials `json:"credentials,omitempty"`
- // A JSON array describing the custom reports you want to sync from Google Analytics. See the documentation for more information about the exact format you can use to fill out this field.
- CustomReports *string `json:"custom_reports,omitempty"`
- // The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.
- DateRangesStartDate types.Date `json:"date_ranges_start_date"`
- // The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.
- PropertyID string `json:"property_id"`
- SourceType SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI `json:"sourceType"`
- // The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.
- WindowInDays *int64 `json:"window_in_days,omitempty"`
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIFromValueTypeSourceGoogleAnalyticsDataAPISchemasInt64Value SourceGoogleAnalyticsDataAPIFromValueType = "source-google-analytics-data-api_Schemas_int64Value"
+ SourceGoogleAnalyticsDataAPIFromValueTypeSourceGoogleAnalyticsDataAPISchemasDoubleValue SourceGoogleAnalyticsDataAPIFromValueType = "source-google-analytics-data-api_Schemas_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIFromValue struct {
+ SourceGoogleAnalyticsDataAPISchemasInt64Value *SourceGoogleAnalyticsDataAPISchemasInt64Value
+ SourceGoogleAnalyticsDataAPISchemasDoubleValue *SourceGoogleAnalyticsDataAPISchemasDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIFromValueSourceGoogleAnalyticsDataAPISchemasInt64Value(sourceGoogleAnalyticsDataAPISchemasInt64Value SourceGoogleAnalyticsDataAPISchemasInt64Value) SourceGoogleAnalyticsDataAPIFromValue {
+ typ := SourceGoogleAnalyticsDataAPIFromValueTypeSourceGoogleAnalyticsDataAPISchemasInt64Value
+
+ return SourceGoogleAnalyticsDataAPIFromValue{
+ SourceGoogleAnalyticsDataAPISchemasInt64Value: &sourceGoogleAnalyticsDataAPISchemasInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIFromValueSourceGoogleAnalyticsDataAPISchemasDoubleValue(sourceGoogleAnalyticsDataAPISchemasDoubleValue SourceGoogleAnalyticsDataAPISchemasDoubleValue) SourceGoogleAnalyticsDataAPIFromValue {
+ typ := SourceGoogleAnalyticsDataAPIFromValueTypeSourceGoogleAnalyticsDataAPISchemasDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIFromValue{
+ SourceGoogleAnalyticsDataAPISchemasDoubleValue: &sourceGoogleAnalyticsDataAPISchemasDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasInt64Value := new(SourceGoogleAnalyticsDataAPISchemasInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasInt64Value = sourceGoogleAnalyticsDataAPISchemasInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIFromValueTypeSourceGoogleAnalyticsDataAPISchemasInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasDoubleValue = sourceGoogleAnalyticsDataAPISchemasDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIFromValueTypeSourceGoogleAnalyticsDataAPISchemasDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter4ValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value SourceGoogleAnalyticsDataAPIToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_int64Value"
+ SourceGoogleAnalyticsDataAPIToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue SourceGoogleAnalyticsDataAPIToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIToValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value) SourceGoogleAnalyticsDataAPIToValue {
+ typ := SourceGoogleAnalyticsDataAPIToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value
+
+ return SourceGoogleAnalyticsDataAPIToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue) SourceGoogleAnalyticsDataAPIToValue {
+ typ := SourceGoogleAnalyticsDataAPIToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPIFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPIToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPIFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPIBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPIToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterNameNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPISchemasValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasValidEnumsEqual SourceGoogleAnalyticsDataAPISchemasValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasValidEnumsLessThan SourceGoogleAnalyticsDataAPISchemasValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPISchemasValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPISchemasValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasValidEnumsGreaterThan SourceGoogleAnalyticsDataAPISchemasValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPISchemasValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPISchemasValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPISchemasValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasValueType {
+ return SourceGoogleAnalyticsDataAPISchemasValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIValueTypeInt64Value SourceGoogleAnalyticsDataAPIValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIValueType) ToPointer() *SourceGoogleAnalyticsDataAPIValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIValueType {
+ return SourceGoogleAnalyticsDataAPIValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIValueUnionTypeSourceGoogleAnalyticsDataAPIInt64Value SourceGoogleAnalyticsDataAPIValueUnionType = "source-google-analytics-data-api_int64Value"
+ SourceGoogleAnalyticsDataAPIValueUnionTypeSourceGoogleAnalyticsDataAPIDoubleValue SourceGoogleAnalyticsDataAPIValueUnionType = "source-google-analytics-data-api_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIValue struct {
+ SourceGoogleAnalyticsDataAPIInt64Value *SourceGoogleAnalyticsDataAPIInt64Value
+ SourceGoogleAnalyticsDataAPIDoubleValue *SourceGoogleAnalyticsDataAPIDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIValueSourceGoogleAnalyticsDataAPIInt64Value(sourceGoogleAnalyticsDataAPIInt64Value SourceGoogleAnalyticsDataAPIInt64Value) SourceGoogleAnalyticsDataAPIValue {
+ typ := SourceGoogleAnalyticsDataAPIValueUnionTypeSourceGoogleAnalyticsDataAPIInt64Value
+
+ return SourceGoogleAnalyticsDataAPIValue{
+ SourceGoogleAnalyticsDataAPIInt64Value: &sourceGoogleAnalyticsDataAPIInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIValueSourceGoogleAnalyticsDataAPIDoubleValue(sourceGoogleAnalyticsDataAPIDoubleValue SourceGoogleAnalyticsDataAPIDoubleValue) SourceGoogleAnalyticsDataAPIValue {
+ typ := SourceGoogleAnalyticsDataAPIValueUnionTypeSourceGoogleAnalyticsDataAPIDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIValue{
+ SourceGoogleAnalyticsDataAPIDoubleValue: &sourceGoogleAnalyticsDataAPIDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIInt64Value := new(SourceGoogleAnalyticsDataAPIInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIInt64Value = sourceGoogleAnalyticsDataAPIInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIValueUnionTypeSourceGoogleAnalyticsDataAPIInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIDoubleValue := new(SourceGoogleAnalyticsDataAPIDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIDoubleValue = sourceGoogleAnalyticsDataAPIDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIValueUnionTypeSourceGoogleAnalyticsDataAPIDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPINumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPISchemasValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPIValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPINumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPINumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPINumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPINumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPISchemasValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPINumericFilter) GetValue() SourceGoogleAnalyticsDataAPIValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasFilterNameInListFilter SourceGoogleAnalyticsDataAPISchemasFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPIFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIFilterNameStringFilter SourceGoogleAnalyticsDataAPIFilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPIFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPIValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIValidEnumsExact SourceGoogleAnalyticsDataAPIValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPIValidEnumsBeginsWith SourceGoogleAnalyticsDataAPIValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPIValidEnumsEndsWith SourceGoogleAnalyticsDataAPIValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPIValidEnumsContains SourceGoogleAnalyticsDataAPIValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPIValidEnumsFullRegexp SourceGoogleAnalyticsDataAPIValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPIValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPIValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPIValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPIValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIFilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPIValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPIFilterName {
+ return SourceGoogleAnalyticsDataAPIFilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPIValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPIStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionType = "source-google-analytics-data-api_stringFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionType = "source-google-analytics-data-api_inListFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPINumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionType = "source-google-analytics-data-api_numericFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionType = "source-google-analytics-data-api_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter struct {
+ SourceGoogleAnalyticsDataAPIStringFilter *SourceGoogleAnalyticsDataAPIStringFilter
+ SourceGoogleAnalyticsDataAPIInListFilter *SourceGoogleAnalyticsDataAPIInListFilter
+ SourceGoogleAnalyticsDataAPINumericFilter *SourceGoogleAnalyticsDataAPINumericFilter
+ SourceGoogleAnalyticsDataAPIBetweenFilter *SourceGoogleAnalyticsDataAPIBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterSourceGoogleAnalyticsDataAPIStringFilter(sourceGoogleAnalyticsDataAPIStringFilter SourceGoogleAnalyticsDataAPIStringFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIStringFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIStringFilter: &sourceGoogleAnalyticsDataAPIStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterSourceGoogleAnalyticsDataAPIInListFilter(sourceGoogleAnalyticsDataAPIInListFilter SourceGoogleAnalyticsDataAPIInListFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIInListFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIInListFilter: &sourceGoogleAnalyticsDataAPIInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterSourceGoogleAnalyticsDataAPINumericFilter(sourceGoogleAnalyticsDataAPINumericFilter SourceGoogleAnalyticsDataAPINumericFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPINumericFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPINumericFilter: &sourceGoogleAnalyticsDataAPINumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterSourceGoogleAnalyticsDataAPIBetweenFilter(sourceGoogleAnalyticsDataAPIBetweenFilter SourceGoogleAnalyticsDataAPIBetweenFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIBetweenFilter: &sourceGoogleAnalyticsDataAPIBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIInListFilter := new(SourceGoogleAnalyticsDataAPIInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIInListFilter = sourceGoogleAnalyticsDataAPIInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPINumericFilter := new(SourceGoogleAnalyticsDataAPINumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPINumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPINumericFilter = sourceGoogleAnalyticsDataAPINumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPINumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIBetweenFilter := new(SourceGoogleAnalyticsDataAPIBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIBetweenFilter = sourceGoogleAnalyticsDataAPIBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIStringFilter := new(SourceGoogleAnalyticsDataAPIStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIStringFilter = sourceGoogleAnalyticsDataAPIStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPINumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPINumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterTypeFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterType = "filter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "filter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPIFilter - A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all dimensions.
+type SourceGoogleAnalyticsDataAPIFilter struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter `json:"filter"`
+ filterType *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterType `const:"filter" json:"filter_type,omitempty"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIFilter) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPIFilter) GetFilter() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter{}
+ }
+ return o.Filter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIFilter) GetFilterType() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterTypeFilter.ToPointer()
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_expression_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_expression_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_expression_filter_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_expression_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterNameNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsLessThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsGreaterThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter) GetValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterNameInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterNameStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsExact SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsBeginsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsEndsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsContains SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsFullRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_stringFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_inListFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_numericFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression) GetFilter() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterTypeNotExpression SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType = "notExpression"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "notExpression":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPINotExpression - The FilterExpression is NOT of notExpression.
+type SourceGoogleAnalyticsDataAPINotExpression struct {
+ Expression *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression `json:"expression,omitempty"`
+ filterType *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType `const:"notExpression" json:"filter_type,omitempty"`
+}
+
+func (s SourceGoogleAnalyticsDataAPINotExpression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPINotExpression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPINotExpression) GetExpression() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression {
+ if o == nil {
+ return nil
+ }
+ return o.Expression
+}
+
+func (o *SourceGoogleAnalyticsDataAPINotExpression) GetFilterType() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterTypeNotExpression.ToPointer()
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_2_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_2_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterNameNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnumsEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnumsLessThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnumsGreaterThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter) GetValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterNameInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterNameStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsExact SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsEndsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsContains SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_stringFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_inListFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_numericFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasExpression) GetFilter() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPISchemasFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasFilterTypeOrGroup SourceGoogleAnalyticsDataAPISchemasFilterType = "orGroup"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasFilterType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "orGroup":
+ *e = SourceGoogleAnalyticsDataAPISchemasFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasFilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPIOrGroup - The FilterExpressions in orGroup have an OR relationship.
+type SourceGoogleAnalyticsDataAPIOrGroup struct {
+ Expressions []SourceGoogleAnalyticsDataAPISchemasExpression `json:"expressions"`
+ filterType SourceGoogleAnalyticsDataAPISchemasFilterType `const:"orGroup" json:"filter_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIOrGroup) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIOrGroup) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIOrGroup) GetExpressions() []SourceGoogleAnalyticsDataAPISchemasExpression {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasExpression{}
+ }
+ return o.Expressions
+}
+
+func (o *SourceGoogleAnalyticsDataAPIOrGroup) GetFilterType() SourceGoogleAnalyticsDataAPISchemasFilterType {
+ return SourceGoogleAnalyticsDataAPISchemasFilterTypeOrGroup
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_expressions_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_expressions_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_expressions_filter_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_expressions_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterNameNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsLessThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsGreaterThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter) GetValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterNameInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterNameStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsExact SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsEndsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsContains SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_stringFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_inListFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_numericFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_dimensionFilter_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPIExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPIExpression) GetFilter() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPIFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIFilterTypeAndGroup SourceGoogleAnalyticsDataAPIFilterType = "andGroup"
+)
+
+func (e SourceGoogleAnalyticsDataAPIFilterType) ToPointer() *SourceGoogleAnalyticsDataAPIFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "andGroup":
+ *e = SourceGoogleAnalyticsDataAPIFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIFilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPIAndGroup - The FilterExpressions in andGroup have an AND relationship.
+type SourceGoogleAnalyticsDataAPIAndGroup struct {
+ Expressions []SourceGoogleAnalyticsDataAPIExpression `json:"expressions"`
+ filterType SourceGoogleAnalyticsDataAPIFilterType `const:"andGroup" json:"filter_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIAndGroup) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIAndGroup) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIAndGroup) GetExpressions() []SourceGoogleAnalyticsDataAPIExpression {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIExpression{}
+ }
+ return o.Expressions
+}
+
+func (o *SourceGoogleAnalyticsDataAPIAndGroup) GetFilterType() SourceGoogleAnalyticsDataAPIFilterType {
+ return SourceGoogleAnalyticsDataAPIFilterTypeAndGroup
+}
+
+type SourceGoogleAnalyticsDataAPIDimensionsFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPIAndGroup SourceGoogleAnalyticsDataAPIDimensionsFilterType = "source-google-analytics-data-api_andGroup"
+ SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPIOrGroup SourceGoogleAnalyticsDataAPIDimensionsFilterType = "source-google-analytics-data-api_orGroup"
+ SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPINotExpression SourceGoogleAnalyticsDataAPIDimensionsFilterType = "source-google-analytics-data-api_notExpression"
+ SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPIFilter SourceGoogleAnalyticsDataAPIDimensionsFilterType = "source-google-analytics-data-api_filter"
+)
+
+type SourceGoogleAnalyticsDataAPIDimensionsFilter struct {
+ SourceGoogleAnalyticsDataAPIAndGroup *SourceGoogleAnalyticsDataAPIAndGroup
+ SourceGoogleAnalyticsDataAPIOrGroup *SourceGoogleAnalyticsDataAPIOrGroup
+ SourceGoogleAnalyticsDataAPINotExpression *SourceGoogleAnalyticsDataAPINotExpression
+ SourceGoogleAnalyticsDataAPIFilter *SourceGoogleAnalyticsDataAPIFilter
+
+ Type SourceGoogleAnalyticsDataAPIDimensionsFilterType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIDimensionsFilterSourceGoogleAnalyticsDataAPIAndGroup(sourceGoogleAnalyticsDataAPIAndGroup SourceGoogleAnalyticsDataAPIAndGroup) SourceGoogleAnalyticsDataAPIDimensionsFilter {
+ typ := SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPIAndGroup
+
+ return SourceGoogleAnalyticsDataAPIDimensionsFilter{
+ SourceGoogleAnalyticsDataAPIAndGroup: &sourceGoogleAnalyticsDataAPIAndGroup,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIDimensionsFilterSourceGoogleAnalyticsDataAPIOrGroup(sourceGoogleAnalyticsDataAPIOrGroup SourceGoogleAnalyticsDataAPIOrGroup) SourceGoogleAnalyticsDataAPIDimensionsFilter {
+ typ := SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPIOrGroup
+
+ return SourceGoogleAnalyticsDataAPIDimensionsFilter{
+ SourceGoogleAnalyticsDataAPIOrGroup: &sourceGoogleAnalyticsDataAPIOrGroup,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIDimensionsFilterSourceGoogleAnalyticsDataAPINotExpression(sourceGoogleAnalyticsDataAPINotExpression SourceGoogleAnalyticsDataAPINotExpression) SourceGoogleAnalyticsDataAPIDimensionsFilter {
+ typ := SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPINotExpression
+
+ return SourceGoogleAnalyticsDataAPIDimensionsFilter{
+ SourceGoogleAnalyticsDataAPINotExpression: &sourceGoogleAnalyticsDataAPINotExpression,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIDimensionsFilterSourceGoogleAnalyticsDataAPIFilter(sourceGoogleAnalyticsDataAPIFilter SourceGoogleAnalyticsDataAPIFilter) SourceGoogleAnalyticsDataAPIDimensionsFilter {
+ typ := SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPIFilter
+
+ return SourceGoogleAnalyticsDataAPIDimensionsFilter{
+ SourceGoogleAnalyticsDataAPIFilter: &sourceGoogleAnalyticsDataAPIFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIDimensionsFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIAndGroup := new(SourceGoogleAnalyticsDataAPIAndGroup)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIAndGroup, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIAndGroup = sourceGoogleAnalyticsDataAPIAndGroup
+ u.Type = SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPIAndGroup
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIOrGroup := new(SourceGoogleAnalyticsDataAPIOrGroup)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIOrGroup, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIOrGroup = sourceGoogleAnalyticsDataAPIOrGroup
+ u.Type = SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPIOrGroup
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPINotExpression := new(SourceGoogleAnalyticsDataAPINotExpression)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPINotExpression, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPINotExpression = sourceGoogleAnalyticsDataAPINotExpression
+ u.Type = SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPINotExpression
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIFilter := new(SourceGoogleAnalyticsDataAPIFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIFilter = sourceGoogleAnalyticsDataAPIFilter
+ u.Type = SourceGoogleAnalyticsDataAPIDimensionsFilterTypeSourceGoogleAnalyticsDataAPIFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIDimensionsFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIAndGroup != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIAndGroup, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIOrGroup != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIOrGroup, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPINotExpression != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPINotExpression, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasFromValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) SourceGoogleAnalyticsDataAPISchemasFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) SourceGoogleAnalyticsDataAPISchemasFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value SourceGoogleAnalyticsDataAPISchemasToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_4_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue SourceGoogleAnalyticsDataAPISchemasToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_4_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasToValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value) SourceGoogleAnalyticsDataAPISchemasToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue) SourceGoogleAnalyticsDataAPISchemasToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPISchemasFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPISchemasToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPISchemasFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPISchemasToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterNameNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnumsEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnumsLessThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnumsGreaterThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value SourceGoogleAnalyticsDataAPISchemasValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value) SourceGoogleAnalyticsDataAPISchemasValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue) SourceGoogleAnalyticsDataAPISchemasValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPISchemasValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasNumericFilter) GetValue() SourceGoogleAnalyticsDataAPISchemasValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterNameInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterNameStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnumsExact SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnumsBeginsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnumsEndsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnumsContains SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnumsFullRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionType = "source-google-analytics-data-api_Schemas_stringFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionType = "source-google-analytics-data-api_Schemas_inListFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionType = "source-google-analytics-data-api_Schemas_numericFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionType = "source-google-analytics-data-api_Schemas_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter struct {
+ SourceGoogleAnalyticsDataAPISchemasStringFilter *SourceGoogleAnalyticsDataAPISchemasStringFilter
+ SourceGoogleAnalyticsDataAPISchemasInListFilter *SourceGoogleAnalyticsDataAPISchemasInListFilter
+ SourceGoogleAnalyticsDataAPISchemasNumericFilter *SourceGoogleAnalyticsDataAPISchemasNumericFilter
+ SourceGoogleAnalyticsDataAPISchemasBetweenFilter *SourceGoogleAnalyticsDataAPISchemasBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterSourceGoogleAnalyticsDataAPISchemasStringFilter(sourceGoogleAnalyticsDataAPISchemasStringFilter SourceGoogleAnalyticsDataAPISchemasStringFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasStringFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasStringFilter: &sourceGoogleAnalyticsDataAPISchemasStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterSourceGoogleAnalyticsDataAPISchemasInListFilter(sourceGoogleAnalyticsDataAPISchemasInListFilter SourceGoogleAnalyticsDataAPISchemasInListFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasInListFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasInListFilter: &sourceGoogleAnalyticsDataAPISchemasInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterSourceGoogleAnalyticsDataAPISchemasNumericFilter(sourceGoogleAnalyticsDataAPISchemasNumericFilter SourceGoogleAnalyticsDataAPISchemasNumericFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasNumericFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasNumericFilter: &sourceGoogleAnalyticsDataAPISchemasNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterSourceGoogleAnalyticsDataAPISchemasBetweenFilter(sourceGoogleAnalyticsDataAPISchemasBetweenFilter SourceGoogleAnalyticsDataAPISchemasBetweenFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasBetweenFilter: &sourceGoogleAnalyticsDataAPISchemasBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasInListFilter := new(SourceGoogleAnalyticsDataAPISchemasInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasInListFilter = sourceGoogleAnalyticsDataAPISchemasInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasNumericFilter := new(SourceGoogleAnalyticsDataAPISchemasNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasNumericFilter = sourceGoogleAnalyticsDataAPISchemasNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasBetweenFilter := new(SourceGoogleAnalyticsDataAPISchemasBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasBetweenFilter = sourceGoogleAnalyticsDataAPISchemasBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasStringFilter := new(SourceGoogleAnalyticsDataAPISchemasStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasStringFilter = sourceGoogleAnalyticsDataAPISchemasStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterTypeFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType = "filter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "filter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPISchemasFilter - A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all metrics.
+type SourceGoogleAnalyticsDataAPISchemasFilter struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter `json:"filter"`
+ filterType *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType `const:"filter" json:"filter_type,omitempty"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasFilter) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasFilter) GetFilter() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter{}
+ }
+ return o.Filter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasFilter) GetFilterType() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterTypeFilter.ToPointer()
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_expression_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_expression_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_expression_filter_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_expression_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterNameNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsLessThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsGreaterThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) GetValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterNameInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterNameStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsExact SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsEndsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsContains SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_stringFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_inListFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_numericFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_3_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Expression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Expression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Expression) GetFilter() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterTypeNotExpression SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType = "notExpression"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "notExpression":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPISchemasNotExpression - The FilterExpression is NOT of notExpression.
+type SourceGoogleAnalyticsDataAPISchemasNotExpression struct {
+ Expression *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Expression `json:"expression,omitempty"`
+ filterType *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType `const:"notExpression" json:"filter_type,omitempty"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasNotExpression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasNotExpression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasNotExpression) GetExpression() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Expression {
+ if o == nil {
+ return nil
+ }
+ return o.Expression
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasNotExpression) GetFilterType() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterTypeNotExpression.ToPointer()
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_2_expressions_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_2_expressions_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_2_expressions_filter_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_2_expressions_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterNameNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsLessThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsGreaterThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_2_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_2_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) GetValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterNameInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterNameStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsExact SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsEndsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsContains SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_stringFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_inListFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_numericFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression) GetFilter() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterTypeOrGroup SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType = "orGroup"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "orGroup":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPISchemasOrGroup - The FilterExpressions in orGroup have an OR relationship.
+type SourceGoogleAnalyticsDataAPISchemasOrGroup struct {
+ Expressions []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression `json:"expressions"`
+ filterType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType `const:"orGroup" json:"filter_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasOrGroup) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasOrGroup) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasOrGroup) GetExpressions() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression{}
+ }
+ return o.Expressions
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasOrGroup) GetFilterType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterTypeOrGroup
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterNameBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_1_expressions_filter_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_1_expressions_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_1_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_1_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterNameNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsLessThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsGreaterThan SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueTypeInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) GetValueType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_1_expressions_int64Value"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_Metrics filter_1_expressions_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter) GetValue() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterNameInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterNameStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsExact SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsEndsWith SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsContains SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_stringFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_inListFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_numericFilter"
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterType = "source-google-analytics-data-api_Schemas_custom_reports_array_metricFilter_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter struct {
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterType
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter(sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter) SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter {
+ typ := SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter{
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter: &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter := new(SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter = sourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterTypeSourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression) GetFilter() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterTypeAndGroup SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType = "andGroup"
+)
+
+func (e SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType) ToPointer() *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "andGroup":
+ *e = SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPISchemasAndGroup - The FilterExpressions in andGroup have an AND relationship.
+type SourceGoogleAnalyticsDataAPISchemasAndGroup struct {
+ Expressions []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression `json:"expressions"`
+ filterType SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType `const:"andGroup" json:"filter_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPISchemasAndGroup) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPISchemasAndGroup) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasAndGroup) GetExpressions() []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression{}
+ }
+ return o.Expressions
+}
+
+func (o *SourceGoogleAnalyticsDataAPISchemasAndGroup) GetFilterType() SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType {
+ return SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterTypeAndGroup
+}
+
+type SourceGoogleAnalyticsDataAPIMetricsFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasAndGroup SourceGoogleAnalyticsDataAPIMetricsFilterType = "source-google-analytics-data-api_Schemas_andGroup"
+ SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasOrGroup SourceGoogleAnalyticsDataAPIMetricsFilterType = "source-google-analytics-data-api_Schemas_orGroup"
+ SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasNotExpression SourceGoogleAnalyticsDataAPIMetricsFilterType = "source-google-analytics-data-api_Schemas_notExpression"
+ SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasFilter SourceGoogleAnalyticsDataAPIMetricsFilterType = "source-google-analytics-data-api_Schemas_filter"
+)
+
+type SourceGoogleAnalyticsDataAPIMetricsFilter struct {
+ SourceGoogleAnalyticsDataAPISchemasAndGroup *SourceGoogleAnalyticsDataAPISchemasAndGroup
+ SourceGoogleAnalyticsDataAPISchemasOrGroup *SourceGoogleAnalyticsDataAPISchemasOrGroup
+ SourceGoogleAnalyticsDataAPISchemasNotExpression *SourceGoogleAnalyticsDataAPISchemasNotExpression
+ SourceGoogleAnalyticsDataAPISchemasFilter *SourceGoogleAnalyticsDataAPISchemasFilter
+
+ Type SourceGoogleAnalyticsDataAPIMetricsFilterType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIMetricsFilterSourceGoogleAnalyticsDataAPISchemasAndGroup(sourceGoogleAnalyticsDataAPISchemasAndGroup SourceGoogleAnalyticsDataAPISchemasAndGroup) SourceGoogleAnalyticsDataAPIMetricsFilter {
+ typ := SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasAndGroup
+
+ return SourceGoogleAnalyticsDataAPIMetricsFilter{
+ SourceGoogleAnalyticsDataAPISchemasAndGroup: &sourceGoogleAnalyticsDataAPISchemasAndGroup,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIMetricsFilterSourceGoogleAnalyticsDataAPISchemasOrGroup(sourceGoogleAnalyticsDataAPISchemasOrGroup SourceGoogleAnalyticsDataAPISchemasOrGroup) SourceGoogleAnalyticsDataAPIMetricsFilter {
+ typ := SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasOrGroup
+
+ return SourceGoogleAnalyticsDataAPIMetricsFilter{
+ SourceGoogleAnalyticsDataAPISchemasOrGroup: &sourceGoogleAnalyticsDataAPISchemasOrGroup,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIMetricsFilterSourceGoogleAnalyticsDataAPISchemasNotExpression(sourceGoogleAnalyticsDataAPISchemasNotExpression SourceGoogleAnalyticsDataAPISchemasNotExpression) SourceGoogleAnalyticsDataAPIMetricsFilter {
+ typ := SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasNotExpression
+
+ return SourceGoogleAnalyticsDataAPIMetricsFilter{
+ SourceGoogleAnalyticsDataAPISchemasNotExpression: &sourceGoogleAnalyticsDataAPISchemasNotExpression,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIMetricsFilterSourceGoogleAnalyticsDataAPISchemasFilter(sourceGoogleAnalyticsDataAPISchemasFilter SourceGoogleAnalyticsDataAPISchemasFilter) SourceGoogleAnalyticsDataAPIMetricsFilter {
+ typ := SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasFilter
+
+ return SourceGoogleAnalyticsDataAPIMetricsFilter{
+ SourceGoogleAnalyticsDataAPISchemasFilter: &sourceGoogleAnalyticsDataAPISchemasFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIMetricsFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPISchemasAndGroup := new(SourceGoogleAnalyticsDataAPISchemasAndGroup)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasAndGroup, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasAndGroup = sourceGoogleAnalyticsDataAPISchemasAndGroup
+ u.Type = SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasAndGroup
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasOrGroup := new(SourceGoogleAnalyticsDataAPISchemasOrGroup)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasOrGroup, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasOrGroup = sourceGoogleAnalyticsDataAPISchemasOrGroup
+ u.Type = SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasOrGroup
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasNotExpression := new(SourceGoogleAnalyticsDataAPISchemasNotExpression)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasNotExpression, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasNotExpression = sourceGoogleAnalyticsDataAPISchemasNotExpression
+ u.Type = SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasNotExpression
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPISchemasFilter := new(SourceGoogleAnalyticsDataAPISchemasFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPISchemasFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPISchemasFilter = sourceGoogleAnalyticsDataAPISchemasFilter
+ u.Type = SourceGoogleAnalyticsDataAPIMetricsFilterTypeSourceGoogleAnalyticsDataAPISchemasFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIMetricsFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPISchemasAndGroup != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasAndGroup, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasOrGroup != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasOrGroup, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasNotExpression != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasNotExpression, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPISchemasFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPISchemasFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPICustomReportConfig struct {
+ // Dimensions filter
+ DimensionFilter *SourceGoogleAnalyticsDataAPIDimensionsFilter `json:"dimensionFilter,omitempty"`
+ // A list of dimensions.
+ Dimensions []string `json:"dimensions"`
+ // Metrics filter
+ MetricFilter *SourceGoogleAnalyticsDataAPIMetricsFilter `json:"metricFilter,omitempty"`
+ // A list of metrics.
+ Metrics []string `json:"metrics"`
+ // The name of the custom report, this name would be used as stream name.
+ Name string `json:"name"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPICustomReportConfig) GetDimensionFilter() *SourceGoogleAnalyticsDataAPIDimensionsFilter {
+ if o == nil {
+ return nil
+ }
+ return o.DimensionFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPICustomReportConfig) GetDimensions() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Dimensions
+}
+
+func (o *SourceGoogleAnalyticsDataAPICustomReportConfig) GetMetricFilter() *SourceGoogleAnalyticsDataAPIMetricsFilter {
+ if o == nil {
+ return nil
+ }
+ return o.MetricFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPICustomReportConfig) GetMetrics() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Metrics
+}
+
+func (o *SourceGoogleAnalyticsDataAPICustomReportConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+type GoogleAnalyticsDataAPI string
+
+const (
+ GoogleAnalyticsDataAPIGoogleAnalyticsDataAPI GoogleAnalyticsDataAPI = "google-analytics-data-api"
+)
+
+func (e GoogleAnalyticsDataAPI) ToPointer() *GoogleAnalyticsDataAPI {
+ return &e
+}
+
+func (e *GoogleAnalyticsDataAPI) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "google-analytics-data-api":
+ *e = GoogleAnalyticsDataAPI(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for GoogleAnalyticsDataAPI: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPI struct {
+ // Credentials for the service
+ Credentials *SourceGoogleAnalyticsDataAPICredentials `json:"credentials,omitempty"`
+ // You can add your Custom Analytics report by creating one.
+ CustomReportsArray []SourceGoogleAnalyticsDataAPICustomReportConfig `json:"custom_reports_array,omitempty"`
+ // The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.
+ DateRangesStartDate *types.Date `json:"date_ranges_start_date,omitempty"`
+ // A list of your Property IDs. The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.
+ PropertyIds []string `json:"property_ids"`
+ sourceType GoogleAnalyticsDataAPI `const:"google-analytics-data-api" json:"sourceType"`
+ // The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.
+ WindowInDays *int64 `default:"1" json:"window_in_days"`
+}
+
+func (s SourceGoogleAnalyticsDataAPI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPI) GetCredentials() *SourceGoogleAnalyticsDataAPICredentials {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceGoogleAnalyticsDataAPI) GetCustomReportsArray() []SourceGoogleAnalyticsDataAPICustomReportConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReportsArray
+}
+
+func (o *SourceGoogleAnalyticsDataAPI) GetDateRangesStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.DateRangesStartDate
+}
+
+func (o *SourceGoogleAnalyticsDataAPI) GetPropertyIds() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.PropertyIds
+}
+
+func (o *SourceGoogleAnalyticsDataAPI) GetSourceType() GoogleAnalyticsDataAPI {
+ return GoogleAnalyticsDataAPIGoogleAnalyticsDataAPI
+}
+
+func (o *SourceGoogleAnalyticsDataAPI) GetWindowInDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.WindowInDays
}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapicreaterequest.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapicreaterequest.go
old mode 100755
new mode 100644
index 936d00b85..e74548e8c
--- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGoogleAnalyticsDataAPICreateRequest struct {
Configuration SourceGoogleAnalyticsDataAPI `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleAnalyticsDataAPICreateRequest) GetConfiguration() SourceGoogleAnalyticsDataAPI {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPI{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleAnalyticsDataAPICreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGoogleAnalyticsDataAPICreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleAnalyticsDataAPICreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGoogleAnalyticsDataAPICreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiputrequest.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiputrequest.go
old mode 100755
new mode 100644
index f037ee121..1666eec08
--- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiputrequest.go
@@ -7,3 +7,24 @@ type SourceGoogleAnalyticsDataAPIPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleAnalyticsDataAPIPutRequest) GetConfiguration() SourceGoogleAnalyticsDataAPIUpdate {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleAnalyticsDataAPIPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleAnalyticsDataAPIPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go
old mode 100755
new mode 100644
index 9cbd8b504..fae69b780
--- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go
@@ -3,73 +3,95 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType string
+type SourceGoogleAnalyticsDataAPIUpdateSchemasAuthType string
const (
- SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthTypeService SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType = "Service"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasAuthTypeService SourceGoogleAnalyticsDataAPIUpdateSchemasAuthType = "Service"
)
-func (e SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType {
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasAuthType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasAuthType {
return &e
}
-func (e *SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service":
- *e = SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType(v)
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasAuthType: %v", v)
}
}
-// SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication - Credentials for the service
-type SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication struct {
- AuthType *SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthenticationAuthType `json:"auth_type,omitempty"`
+// ServiceAccountKeyAuthentication - Credentials for the service
+type ServiceAccountKeyAuthentication struct {
+ authType *SourceGoogleAnalyticsDataAPIUpdateSchemasAuthType `const:"Service" json:"auth_type,omitempty"`
// The JSON key linked to the service account used for authorization. For steps on obtaining this key, refer to the setup guide.
CredentialsJSON string `json:"credentials_json"`
}
-type SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType string
+func (s ServiceAccountKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *ServiceAccountKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ServiceAccountKeyAuthentication) GetAuthType() *SourceGoogleAnalyticsDataAPIUpdateSchemasAuthType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasAuthTypeService.ToPointer()
+}
+
+func (o *ServiceAccountKeyAuthentication) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateAuthType string
const (
- SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthTypeClient SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType = "Client"
+ SourceGoogleAnalyticsDataAPIUpdateAuthTypeClient SourceGoogleAnalyticsDataAPIUpdateAuthType = "Client"
)
-func (e SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType {
+func (e SourceGoogleAnalyticsDataAPIUpdateAuthType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateAuthType {
return &e
}
-func (e *SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleAnalyticsDataAPIUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType(v)
+ *e = SourceGoogleAnalyticsDataAPIUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateAuthType: %v", v)
}
}
-// SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth - Credentials for the service
-type SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth struct {
+// AuthenticateViaGoogleOauth - Credentials for the service
+type AuthenticateViaGoogleOauth struct {
// Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthType *SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauthAuthType `json:"auth_type,omitempty"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authType *SourceGoogleAnalyticsDataAPIUpdateAuthType `const:"Client" json:"auth_type,omitempty"`
// The Client ID of your Google Analytics developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Google Analytics developer application.
@@ -78,83 +100,8391 @@ type SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth str
RefreshToken string `json:"refresh_token"`
}
-type SourceGoogleAnalyticsDataAPIUpdateCredentialsType string
+func (a AuthenticateViaGoogleOauth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AuthenticateViaGoogleOauth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AuthenticateViaGoogleOauth) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *AuthenticateViaGoogleOauth) GetAuthType() *SourceGoogleAnalyticsDataAPIUpdateAuthType {
+ return SourceGoogleAnalyticsDataAPIUpdateAuthTypeClient.ToPointer()
+}
+
+func (o *AuthenticateViaGoogleOauth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *AuthenticateViaGoogleOauth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *AuthenticateViaGoogleOauth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+type CredentialsType string
+
+const (
+ CredentialsTypeAuthenticateViaGoogleOauth CredentialsType = "Authenticate via Google (Oauth)"
+ CredentialsTypeServiceAccountKeyAuthentication CredentialsType = "Service Account Key Authentication"
+)
+
+type Credentials struct {
+ AuthenticateViaGoogleOauth *AuthenticateViaGoogleOauth
+ ServiceAccountKeyAuthentication *ServiceAccountKeyAuthentication
+
+ Type CredentialsType
+}
+
+func CreateCredentialsAuthenticateViaGoogleOauth(authenticateViaGoogleOauth AuthenticateViaGoogleOauth) Credentials {
+ typ := CredentialsTypeAuthenticateViaGoogleOauth
+
+ return Credentials{
+ AuthenticateViaGoogleOauth: &authenticateViaGoogleOauth,
+ Type: typ,
+ }
+}
+
+func CreateCredentialsServiceAccountKeyAuthentication(serviceAccountKeyAuthentication ServiceAccountKeyAuthentication) Credentials {
+ typ := CredentialsTypeServiceAccountKeyAuthentication
+
+ return Credentials{
+ ServiceAccountKeyAuthentication: &serviceAccountKeyAuthentication,
+ Type: typ,
+ }
+}
+
+func (u *Credentials) UnmarshalJSON(data []byte) error {
+
+ serviceAccountKeyAuthentication := new(ServiceAccountKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &serviceAccountKeyAuthentication, "", true, true); err == nil {
+ u.ServiceAccountKeyAuthentication = serviceAccountKeyAuthentication
+ u.Type = CredentialsTypeServiceAccountKeyAuthentication
+ return nil
+ }
+
+ authenticateViaGoogleOauth := new(AuthenticateViaGoogleOauth)
+ if err := utils.UnmarshalJSON(data, &authenticateViaGoogleOauth, "", true, true); err == nil {
+ u.AuthenticateViaGoogleOauth = authenticateViaGoogleOauth
+ u.Type = CredentialsTypeAuthenticateViaGoogleOauth
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u Credentials) MarshalJSON() ([]byte, error) {
+ if u.AuthenticateViaGoogleOauth != nil {
+ return utils.MarshalJSON(u.AuthenticateViaGoogleOauth, "", true)
+ }
+
+ if u.ServiceAccountKeyAuthentication != nil {
+ return utils.MarshalJSON(u.ServiceAccountKeyAuthentication, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterNameBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasValueTypeInt64Value
+}
+
+type FromValueType string
const (
- SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth SourceGoogleAnalyticsDataAPIUpdateCredentialsType = "source-google-analytics-data-api-update_Credentials_Authenticate via Google (Oauth)"
- SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication SourceGoogleAnalyticsDataAPIUpdateCredentialsType = "source-google-analytics-data-api-update_Credentials_Service Account Key Authentication"
+ FromValueTypeSourceGoogleAnalyticsDataAPIUpdateInt64Value FromValueType = "source-google-analytics-data-api-update_int64Value"
+ FromValueTypeSourceGoogleAnalyticsDataAPIUpdateDoubleValue FromValueType = "source-google-analytics-data-api-update_doubleValue"
)
-type SourceGoogleAnalyticsDataAPIUpdateCredentials struct {
- SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth *SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth
- SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication *SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication
+type FromValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateInt64Value *SourceGoogleAnalyticsDataAPIUpdateInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateDoubleValue *SourceGoogleAnalyticsDataAPIUpdateDoubleValue
- Type SourceGoogleAnalyticsDataAPIUpdateCredentialsType
+ Type FromValueType
}
-func CreateSourceGoogleAnalyticsDataAPIUpdateCredentialsSourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth(sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth) SourceGoogleAnalyticsDataAPIUpdateCredentials {
- typ := SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth
+func CreateFromValueSourceGoogleAnalyticsDataAPIUpdateInt64Value(sourceGoogleAnalyticsDataAPIUpdateInt64Value SourceGoogleAnalyticsDataAPIUpdateInt64Value) FromValue {
+ typ := FromValueTypeSourceGoogleAnalyticsDataAPIUpdateInt64Value
- return SourceGoogleAnalyticsDataAPIUpdateCredentials{
- SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth: &sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth,
+ return FromValue{
+ SourceGoogleAnalyticsDataAPIUpdateInt64Value: &sourceGoogleAnalyticsDataAPIUpdateInt64Value,
Type: typ,
}
}
-func CreateSourceGoogleAnalyticsDataAPIUpdateCredentialsSourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication(sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication) SourceGoogleAnalyticsDataAPIUpdateCredentials {
- typ := SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication
+func CreateFromValueSourceGoogleAnalyticsDataAPIUpdateDoubleValue(sourceGoogleAnalyticsDataAPIUpdateDoubleValue SourceGoogleAnalyticsDataAPIUpdateDoubleValue) FromValue {
+ typ := FromValueTypeSourceGoogleAnalyticsDataAPIUpdateDoubleValue
- return SourceGoogleAnalyticsDataAPIUpdateCredentials{
- SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication: &sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication,
+ return FromValue{
+ SourceGoogleAnalyticsDataAPIUpdateDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateDoubleValue,
Type: typ,
}
}
-func (u *SourceGoogleAnalyticsDataAPIUpdateCredentials) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *FromValue) UnmarshalJSON(data []byte) error {
- sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication); err == nil {
- u.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication
- u.Type = SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication
+ sourceGoogleAnalyticsDataAPIUpdateInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateInt64Value = sourceGoogleAnalyticsDataAPIUpdateInt64Value
+ u.Type = FromValueTypeSourceGoogleAnalyticsDataAPIUpdateInt64Value
return nil
}
- sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth); err == nil {
- u.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth
- u.Type = SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth
+ sourceGoogleAnalyticsDataAPIUpdateDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateDoubleValue = sourceGoogleAnalyticsDataAPIUpdateDoubleValue
+ u.Type = FromValueTypeSourceGoogleAnalyticsDataAPIUpdateDoubleValue
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceGoogleAnalyticsDataAPIUpdateCredentials) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication != nil {
- return json.Marshal(u.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication)
+func (u FromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateInt64Value, "", true)
}
- if u.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth != nil {
- return json.Marshal(u.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth)
+ if u.SourceGoogleAnalyticsDataAPIUpdateDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateDoubleValue, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceGoogleAnalyticsDataAPIUpdate struct {
- // Credentials for the service
- Credentials *SourceGoogleAnalyticsDataAPIUpdateCredentials `json:"credentials,omitempty"`
- // A JSON array describing the custom reports you want to sync from Google Analytics. See the documentation for more information about the exact format you can use to fill out this field.
- CustomReports *string `json:"custom_reports,omitempty"`
- // The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.
- DateRangesStartDate types.Date `json:"date_ranges_start_date"`
- // The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.
- PropertyID string `json:"property_id"`
- // The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.
- WindowInDays *int64 `json:"window_in_days,omitempty"`
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueTypeInt64Value
+}
+
+type ToValueType string
+
+const (
+ ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value ToValueType = "source-google-analytics-data-api-update_Schemas_int64Value"
+ ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue ToValueType = "source-google-analytics-data-api-update_Schemas_doubleValue"
+)
+
+type ToValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue
+
+ Type ToValueType
+}
+
+func CreateToValueSourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value) ToValue {
+ typ := ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value
+
+ return ToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateToValueSourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue) ToValue {
+ typ := ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue
+
+ return ToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *ToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value
+ u.Type = ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue
+ u.Type = ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u ToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type BetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue FromValue `json:"fromValue"`
+ ToValue ToValue `json:"toValue"`
+}
+
+func (b BetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(b, "", false)
+}
+
+func (b *BetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &b, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *BetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterNameBetweenFilter
+}
+
+func (o *BetweenFilter) GetFromValue() FromValue {
+ if o == nil {
+ return FromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *BetweenFilter) GetToValue() ToValue {
+ if o == nil {
+ return ToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasFilterNameNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnumsEqual SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnumsLessThan SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnumsGreaterThan SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateValueType: %v", v)
+ }
+}
+
+type DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (d DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
+}
+
+func (d *DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateValueTypeDoubleValue
+}
+
+type ValueType string
+
+const (
+ ValueTypeInt64Value ValueType = "int64Value"
+)
+
+func (e ValueType) ToPointer() *ValueType {
+ return &e
+}
+
+func (e *ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for ValueType: %v", v)
+ }
+}
+
+type Int64Value struct {
+ Value string `json:"value"`
+ valueType ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (i Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(i, "", false)
+}
+
+func (i *Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &i, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *Int64Value) GetValueType() ValueType {
+ return ValueTypeInt64Value
+}
+
+type ValueUnionType string
+
+const (
+ ValueUnionTypeInt64Value ValueUnionType = "int64Value"
+ ValueUnionTypeDoubleValue ValueUnionType = "doubleValue"
+)
+
+type Value struct {
+ Int64Value *Int64Value
+ DoubleValue *DoubleValue
+
+ Type ValueUnionType
+}
+
+func CreateValueInt64Value(int64Value Int64Value) Value {
+ typ := ValueUnionTypeInt64Value
+
+ return Value{
+ Int64Value: &int64Value,
+ Type: typ,
+ }
+}
+
+func CreateValueDoubleValue(doubleValue DoubleValue) Value {
+ typ := ValueUnionTypeDoubleValue
+
+ return Value{
+ DoubleValue: &doubleValue,
+ Type: typ,
+ }
+}
+
+func (u *Value) UnmarshalJSON(data []byte) error {
+
+ int64Value := new(Int64Value)
+ if err := utils.UnmarshalJSON(data, &int64Value, "", true, true); err == nil {
+ u.Int64Value = int64Value
+ u.Type = ValueUnionTypeInt64Value
+ return nil
+ }
+
+ doubleValue := new(DoubleValue)
+ if err := utils.UnmarshalJSON(data, &doubleValue, "", true, true); err == nil {
+ u.DoubleValue = doubleValue
+ u.Type = ValueUnionTypeDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u Value) MarshalJSON() ([]byte, error) {
+ if u.Int64Value != nil {
+ return utils.MarshalJSON(u.Int64Value, "", true)
+ }
+
+ if u.DoubleValue != nil {
+ return utils.MarshalJSON(u.DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type NumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums `json:"operation"`
+ Value Value `json:"value"`
+}
+
+func (n NumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
+}
+
+func (n *NumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *NumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFilterNameNumericFilter
+}
+
+func (o *NumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *NumericFilter) GetValue() Value {
+ if o == nil {
+ return Value{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateFilterNameInListFilter SourceGoogleAnalyticsDataAPIUpdateFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateFilterName: %v", v)
+ }
+}
+
+type InListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (i InListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(i, "", false)
+}
+
+func (i *InListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &i, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *InListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *InListFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateFilterNameInListFilter
+}
+
+func (o *InListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type FilterName string
+
+const (
+ FilterNameStringFilter FilterName = "stringFilter"
+)
+
+func (e FilterName) ToPointer() *FilterName {
+ return &e
+}
+
+func (e *FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPIUpdateValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateValidEnumsExact SourceGoogleAnalyticsDataAPIUpdateValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPIUpdateValidEnumsBeginsWith SourceGoogleAnalyticsDataAPIUpdateValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateValidEnumsEndsWith SourceGoogleAnalyticsDataAPIUpdateValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateValidEnumsContains SourceGoogleAnalyticsDataAPIUpdateValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPIUpdateValidEnumsFullRegexp SourceGoogleAnalyticsDataAPIUpdateValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPIUpdateValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPIUpdateValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPIUpdateValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateValidEnums: %v", v)
+ }
+}
+
+type StringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName FilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPIUpdateValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s StringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *StringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *StringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *StringFilter) GetFilterName() FilterName {
+ return FilterNameStringFilter
+}
+
+func (o *StringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPIUpdateValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *StringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionType = "stringFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionType = "inListFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionType = "numericFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionType = "betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasFilter struct {
+ StringFilter *StringFilter
+ InListFilter *InListFilter
+ NumericFilter *NumericFilter
+ BetweenFilter *BetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasFilterStringFilter(stringFilter StringFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeStringFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFilter{
+ StringFilter: &stringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasFilterInListFilter(inListFilter InListFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeInListFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFilter{
+ InListFilter: &inListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasFilterNumericFilter(numericFilter NumericFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeNumericFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFilter{
+ NumericFilter: &numericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasFilterBetweenFilter(betweenFilter BetweenFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFilter{
+ BetweenFilter: &betweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasFilter) UnmarshalJSON(data []byte) error {
+
+ inListFilter := new(InListFilter)
+ if err := utils.UnmarshalJSON(data, &inListFilter, "", true, true); err == nil {
+ u.InListFilter = inListFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeInListFilter
+ return nil
+ }
+
+ numericFilter := new(NumericFilter)
+ if err := utils.UnmarshalJSON(data, &numericFilter, "", true, true); err == nil {
+ u.NumericFilter = numericFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeNumericFilter
+ return nil
+ }
+
+ betweenFilter := new(BetweenFilter)
+ if err := utils.UnmarshalJSON(data, &betweenFilter, "", true, true); err == nil {
+ u.BetweenFilter = betweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeBetweenFilter
+ return nil
+ }
+
+ stringFilter := new(StringFilter)
+ if err := utils.UnmarshalJSON(data, &stringFilter, "", true, true); err == nil {
+ u.StringFilter = stringFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasFilterUnionTypeStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasFilter) MarshalJSON() ([]byte, error) {
+ if u.StringFilter != nil {
+ return utils.MarshalJSON(u.StringFilter, "", true)
+ }
+
+ if u.InListFilter != nil {
+ return utils.MarshalJSON(u.InListFilter, "", true)
+ }
+
+ if u.NumericFilter != nil {
+ return utils.MarshalJSON(u.NumericFilter, "", true)
+ }
+
+ if u.BetweenFilter != nil {
+ return utils.MarshalJSON(u.BetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterTypeFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterType = "filter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "filter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterType: %v", v)
+ }
+}
+
+// Filter - A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all dimensions.
+type Filter struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPIUpdateSchemasFilter `json:"filter"`
+ filterType *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterType `const:"filter" json:"filter_type,omitempty"`
+}
+
+func (f Filter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(f, "", false)
+}
+
+func (f *Filter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &f, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Filter) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *Filter) GetFilter() SourceGoogleAnalyticsDataAPIUpdateSchemasFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFilter{}
+ }
+ return o.Filter
+}
+
+func (o *Filter) GetFilterType() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterTypeFilter.ToPointer()
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_expression_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_expression_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_expression_filter_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_expression_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasToValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPIUpdateSchemasToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPIUpdateSchemasToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterNameNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsLessThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsGreaterThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_3_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPIUpdateSchemasValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter) GetValue() SourceGoogleAnalyticsDataAPIUpdateSchemasValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterNameInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterNameStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsExact SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsBeginsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsEndsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsContains SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsFullRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType = "source-google-analytics-data-api-update_Schemas_stringFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType = "source-google-analytics-data-api-update_Schemas_inListFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType = "source-google-analytics-data-api-update_Schemas_numericFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType = "source-google-analytics-data-api-update_Schemas_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasExpression) GetFilter() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasFilterTypeNotExpression SourceGoogleAnalyticsDataAPIUpdateSchemasFilterType = "notExpression"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasFilterType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "notExpression":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasFilterType: %v", v)
+ }
+}
+
+// NotExpression - The FilterExpression is NOT of notExpression.
+type NotExpression struct {
+ Expression *SourceGoogleAnalyticsDataAPIUpdateSchemasExpression `json:"expression,omitempty"`
+ filterType *SourceGoogleAnalyticsDataAPIUpdateSchemasFilterType `const:"notExpression" json:"filter_type,omitempty"`
+}
+
+func (n NotExpression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
+}
+
+func (n *NotExpression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *NotExpression) GetExpression() *SourceGoogleAnalyticsDataAPIUpdateSchemasExpression {
+ if o == nil {
+ return nil
+ }
+ return o.Expression
+}
+
+func (o *NotExpression) GetFilterType() *SourceGoogleAnalyticsDataAPIUpdateSchemasFilterType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasFilterTypeNotExpression.ToPointer()
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterNameBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterNameNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnumsEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnumsLessThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnumsGreaterThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_2_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_2_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter) GetValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterNameInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterNameStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsExact SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsEndsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsContains SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_stringFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_inListFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_numericFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateExpression) GetFilter() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterFilter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateFilterTypeOrGroup SourceGoogleAnalyticsDataAPIUpdateFilterType = "orGroup"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateFilterType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "orGroup":
+ *e = SourceGoogleAnalyticsDataAPIUpdateFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateFilterType: %v", v)
+ }
+}
+
+// OrGroup - The FilterExpressions in orGroup have an OR relationship.
+type OrGroup struct {
+ Expressions []SourceGoogleAnalyticsDataAPIUpdateExpression `json:"expressions"`
+ filterType SourceGoogleAnalyticsDataAPIUpdateFilterType `const:"orGroup" json:"filter_type"`
+}
+
+func (o OrGroup) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(o, "", false)
+}
+
+func (o *OrGroup) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &o, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *OrGroup) GetExpressions() []SourceGoogleAnalyticsDataAPIUpdateExpression {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateExpression{}
+ }
+ return o.Expressions
+}
+
+func (o *OrGroup) GetFilterType() SourceGoogleAnalyticsDataAPIUpdateFilterType {
+ return SourceGoogleAnalyticsDataAPIUpdateFilterTypeOrGroup
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_expressions_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_expressions_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_expressions_filter_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_expressions_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterNameNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsLessThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsGreaterThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_dimensionFilter_Dimensions filter_1_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter) GetValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterNameInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterNameStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsExact SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsEndsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsContains SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_stringFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_inListFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_numericFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type Expression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter `json:"filter"`
+}
+
+func (o *Expression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *Expression) GetFilter() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter{}
+ }
+ return o.Filter
+}
+
+type FilterType string
+
+const (
+ FilterTypeAndGroup FilterType = "andGroup"
+)
+
+func (e FilterType) ToPointer() *FilterType {
+ return &e
+}
+
+func (e *FilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "andGroup":
+ *e = FilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for FilterType: %v", v)
+ }
+}
+
+// AndGroup - The FilterExpressions in andGroup have an AND relationship.
+type AndGroup struct {
+ Expressions []Expression `json:"expressions"`
+ filterType FilterType `const:"andGroup" json:"filter_type"`
+}
+
+func (a AndGroup) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AndGroup) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AndGroup) GetExpressions() []Expression {
+ if o == nil {
+ return []Expression{}
+ }
+ return o.Expressions
+}
+
+func (o *AndGroup) GetFilterType() FilterType {
+ return FilterTypeAndGroup
+}
+
+type DimensionsFilterType string
+
+const (
+ DimensionsFilterTypeAndGroup DimensionsFilterType = "andGroup"
+ DimensionsFilterTypeOrGroup DimensionsFilterType = "orGroup"
+ DimensionsFilterTypeNotExpression DimensionsFilterType = "notExpression"
+ DimensionsFilterTypeFilter DimensionsFilterType = "filter"
+)
+
+type DimensionsFilter struct {
+ AndGroup *AndGroup
+ OrGroup *OrGroup
+ NotExpression *NotExpression
+ Filter *Filter
+
+ Type DimensionsFilterType
+}
+
+func CreateDimensionsFilterAndGroup(andGroup AndGroup) DimensionsFilter {
+ typ := DimensionsFilterTypeAndGroup
+
+ return DimensionsFilter{
+ AndGroup: &andGroup,
+ Type: typ,
+ }
+}
+
+func CreateDimensionsFilterOrGroup(orGroup OrGroup) DimensionsFilter {
+ typ := DimensionsFilterTypeOrGroup
+
+ return DimensionsFilter{
+ OrGroup: &orGroup,
+ Type: typ,
+ }
+}
+
+func CreateDimensionsFilterNotExpression(notExpression NotExpression) DimensionsFilter {
+ typ := DimensionsFilterTypeNotExpression
+
+ return DimensionsFilter{
+ NotExpression: ¬Expression,
+ Type: typ,
+ }
+}
+
+func CreateDimensionsFilterFilter(filter Filter) DimensionsFilter {
+ typ := DimensionsFilterTypeFilter
+
+ return DimensionsFilter{
+ Filter: &filter,
+ Type: typ,
+ }
+}
+
+func (u *DimensionsFilter) UnmarshalJSON(data []byte) error {
+
+ andGroup := new(AndGroup)
+ if err := utils.UnmarshalJSON(data, &andGroup, "", true, true); err == nil {
+ u.AndGroup = andGroup
+ u.Type = DimensionsFilterTypeAndGroup
+ return nil
+ }
+
+ orGroup := new(OrGroup)
+ if err := utils.UnmarshalJSON(data, &orGroup, "", true, true); err == nil {
+ u.OrGroup = orGroup
+ u.Type = DimensionsFilterTypeOrGroup
+ return nil
+ }
+
+ notExpression := new(NotExpression)
+ if err := utils.UnmarshalJSON(data, ¬Expression, "", true, true); err == nil {
+ u.NotExpression = notExpression
+ u.Type = DimensionsFilterTypeNotExpression
+ return nil
+ }
+
+ filter := new(Filter)
+ if err := utils.UnmarshalJSON(data, &filter, "", true, true); err == nil {
+ u.Filter = filter
+ u.Type = DimensionsFilterTypeFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u DimensionsFilter) MarshalJSON() ([]byte, error) {
+ if u.AndGroup != nil {
+ return utils.MarshalJSON(u.AndGroup, "", true)
+ }
+
+ if u.OrGroup != nil {
+ return utils.MarshalJSON(u.OrGroup, "", true)
+ }
+
+ if u.NotExpression != nil {
+ return utils.MarshalJSON(u.NotExpression, "", true)
+ }
+
+ if u.Filter != nil {
+ return utils.MarshalJSON(u.Filter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateFromValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value) SourceGoogleAnalyticsDataAPIUpdateFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue) SourceGoogleAnalyticsDataAPIUpdateFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateToValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value) SourceGoogleAnalyticsDataAPIUpdateToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue) SourceGoogleAnalyticsDataAPIUpdateToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPIUpdateFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPIUpdateToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPIUpdateFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPIUpdateToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterNameNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnumsEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnumsLessThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnumsGreaterThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value SourceGoogleAnalyticsDataAPIUpdateValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue SourceGoogleAnalyticsDataAPIUpdateValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value) SourceGoogleAnalyticsDataAPIUpdateValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue) SourceGoogleAnalyticsDataAPIUpdateValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPIUpdateValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateNumericFilter) GetValue() SourceGoogleAnalyticsDataAPIUpdateValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterNameInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterNameStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnumsExact SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnumsBeginsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnumsEndsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnumsContains SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnumsFullRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionType = "source-google-analytics-data-api-update_stringFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionType = "source-google-analytics-data-api-update_inListFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionType = "source-google-analytics-data-api-update_numericFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionType = "source-google-analytics-data-api-update_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter struct {
+ SourceGoogleAnalyticsDataAPIUpdateStringFilter *SourceGoogleAnalyticsDataAPIUpdateStringFilter
+ SourceGoogleAnalyticsDataAPIUpdateInListFilter *SourceGoogleAnalyticsDataAPIUpdateInListFilter
+ SourceGoogleAnalyticsDataAPIUpdateNumericFilter *SourceGoogleAnalyticsDataAPIUpdateNumericFilter
+ SourceGoogleAnalyticsDataAPIUpdateBetweenFilter *SourceGoogleAnalyticsDataAPIUpdateBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterSourceGoogleAnalyticsDataAPIUpdateStringFilter(sourceGoogleAnalyticsDataAPIUpdateStringFilter SourceGoogleAnalyticsDataAPIUpdateStringFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateStringFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIUpdateStringFilter: &sourceGoogleAnalyticsDataAPIUpdateStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterSourceGoogleAnalyticsDataAPIUpdateInListFilter(sourceGoogleAnalyticsDataAPIUpdateInListFilter SourceGoogleAnalyticsDataAPIUpdateInListFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateInListFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIUpdateInListFilter: &sourceGoogleAnalyticsDataAPIUpdateInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterSourceGoogleAnalyticsDataAPIUpdateNumericFilter(sourceGoogleAnalyticsDataAPIUpdateNumericFilter SourceGoogleAnalyticsDataAPIUpdateNumericFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateNumericFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIUpdateNumericFilter: &sourceGoogleAnalyticsDataAPIUpdateNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterSourceGoogleAnalyticsDataAPIUpdateBetweenFilter(sourceGoogleAnalyticsDataAPIUpdateBetweenFilter SourceGoogleAnalyticsDataAPIUpdateBetweenFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter{
+ SourceGoogleAnalyticsDataAPIUpdateBetweenFilter: &sourceGoogleAnalyticsDataAPIUpdateBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateInListFilter := new(SourceGoogleAnalyticsDataAPIUpdateInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateInListFilter = sourceGoogleAnalyticsDataAPIUpdateInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateNumericFilter := new(SourceGoogleAnalyticsDataAPIUpdateNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateNumericFilter = sourceGoogleAnalyticsDataAPIUpdateNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateBetweenFilter := new(SourceGoogleAnalyticsDataAPIUpdateBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateBetweenFilter = sourceGoogleAnalyticsDataAPIUpdateBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateStringFilter := new(SourceGoogleAnalyticsDataAPIUpdateStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateStringFilter = sourceGoogleAnalyticsDataAPIUpdateStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterTypeFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType = "filter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "filter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPIUpdateFilter - A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all metrics.
+type SourceGoogleAnalyticsDataAPIUpdateFilter struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter `json:"filter"`
+ filterType *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType `const:"filter" json:"filter_type,omitempty"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateFilter) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateFilter) GetFilter() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayFilter{}
+ }
+ return o.Filter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateFilter) GetFilterType() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter4FilterTypeFilter.ToPointer()
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_expression_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_expression_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_expression_filter_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_expression_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterNameNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsLessThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsGreaterThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) GetValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Value{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterNameInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterNameStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsExact SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsEndsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsContains SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_stringFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_inListFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_numericFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_3_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterExpression) GetFilter() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3Filter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterTypeNotExpression SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType = "notExpression"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "notExpression":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPIUpdateNotExpression - The FilterExpression is NOT of notExpression.
+type SourceGoogleAnalyticsDataAPIUpdateNotExpression struct {
+ Expression *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterExpression `json:"expression,omitempty"`
+ filterType *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType `const:"notExpression" json:"filter_type,omitempty"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateNotExpression) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateNotExpression) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateNotExpression) GetExpression() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterExpression {
+ if o == nil {
+ return nil
+ }
+ return o.Expression
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateNotExpression) GetFilterType() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter3FilterTypeNotExpression.ToPointer()
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterNameBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_2_expressions_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_2_expressions_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_2_expressions_filter_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_2_expressions_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterNameNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsLessThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsGreaterThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_2_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_2_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) GetValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterNameInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterNameStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsExact SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsEndsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsContains SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2FilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_stringFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_inListFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_numericFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterExpression) GetFilter() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterTypeOrGroup SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterType = "orGroup"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "orGroup":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPIUpdateOrGroup - The FilterExpressions in orGroup have an OR relationship.
+type SourceGoogleAnalyticsDataAPIUpdateOrGroup struct {
+ Expressions []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterExpression `json:"expressions"`
+ filterType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterType `const:"orGroup" json:"filter_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateOrGroup) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateOrGroup) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateOrGroup) GetExpressions() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterExpression {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterExpression{}
+ }
+ return o.Expressions
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateOrGroup) GetFilterType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterFilterTypeOrGroup
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterNameBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName = "betweenFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "betweenFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_1_expressions_filter_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_1_expressions_filter_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_1_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_1_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValueTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName `const:"betweenFilter" json:"filter_name"`
+ FromValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue `json:"fromValue"`
+ ToValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue `json:"toValue"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterNameBetweenFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter) GetFromValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFromValue{}
+ }
+ return o.FromValue
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter) GetToValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterToValue{}
+ }
+ return o.ToValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterNameNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName = "numericFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "numericFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsOperationUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "OPERATION_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsLessThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "LESS_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsLessThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "LESS_THAN_OR_EQUAL"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsGreaterThan SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "GREATER_THAN"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnumsGreaterThanOrEqual SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums = "GREATER_THAN_OR_EQUAL"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "OPERATION_UNSPECIFIED":
+ fallthrough
+ case "EQUAL":
+ fallthrough
+ case "LESS_THAN":
+ fallthrough
+ case "LESS_THAN_OR_EQUAL":
+ fallthrough
+ case "GREATER_THAN":
+ fallthrough
+ case "GREATER_THAN_OR_EQUAL":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueTypeDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType = "doubleValue"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "doubleValue":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue struct {
+ Value float64 `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType `const:"doubleValue" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) GetValue() float64 {
+ if o == nil {
+ return 0.0
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueTypeDoubleValue
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueTypeInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType = "int64Value"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "int64Value":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value struct {
+ Value string `json:"value"`
+ valueType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType `const:"int64Value" json:"value_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) GetValueType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueTypeInt64Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_1_expressions_int64Value"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_Metrics filter_1_expressions_doubleValue"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValueUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter struct {
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName `const:"numericFilter" json:"filter_name"`
+ Operation []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums `json:"operation"`
+ Value SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1FilterNameNumericFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter) GetOperation() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums{}
+ }
+ return o.Operation
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter) GetValue() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterValue{}
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterNameInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName = "inListFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "inListFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName `const:"inListFilter" json:"filter_name"`
+ Values []string `json:"values"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterNameInListFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter) GetValues() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Values
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterNameStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName = "stringFilter"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "stringFilter":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsMatchTypeUnspecified SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "MATCH_TYPE_UNSPECIFIED"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsExact SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "EXACT"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsBeginsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "BEGINS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsEndsWith SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "ENDS_WITH"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsContains SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "CONTAINS"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsFullRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "FULL_REGEXP"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnumsPartialRegexp SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums = "PARTIAL_REGEXP"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "MATCH_TYPE_UNSPECIFIED":
+ fallthrough
+ case "EXACT":
+ fallthrough
+ case "BEGINS_WITH":
+ fallthrough
+ case "ENDS_WITH":
+ fallthrough
+ case "CONTAINS":
+ fallthrough
+ case "FULL_REGEXP":
+ fallthrough
+ case "PARTIAL_REGEXP":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums: %v", v)
+ }
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter struct {
+ CaseSensitive *bool `json:"caseSensitive,omitempty"`
+ filterName SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName `const:"stringFilter" json:"filter_name"`
+ MatchType []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums `json:"matchType,omitempty"`
+ Value string `json:"value"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter) GetCaseSensitive() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.CaseSensitive
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter) GetFilterName() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterNameStringFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter) GetMatchType() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.MatchType
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_stringFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_inListFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_numericFilter"
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionType = "source-google-analytics-data-api-update_Schemas_custom_reports_array_metricFilter_betweenFilter"
+)
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter struct {
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter
+
+ Type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionType
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter(sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter) SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter {
+ typ := SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter
+
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter{
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter: &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter := new(SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter = sourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter
+ u.Type = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterUnionTypeSourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterStringFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterInListFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterNumericFilter, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterBetweenFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayExpression struct {
+ FieldName string `json:"field_name"`
+ Filter SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter `json:"filter"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayExpression) GetFieldName() string {
+ if o == nil {
+ return ""
+ }
+ return o.FieldName
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayExpression) GetFilter() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter {
+ if o == nil {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilter{}
+ }
+ return o.Filter
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterType string
+
+const (
+ SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterTypeAndGroup SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterType = "andGroup"
+)
+
+func (e SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterType) ToPointer() *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterType {
+ return &e
+}
+
+func (e *SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "andGroup":
+ *e = SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterType: %v", v)
+ }
+}
+
+// SourceGoogleAnalyticsDataAPIUpdateAndGroup - The FilterExpressions in andGroup have an AND relationship.
+type SourceGoogleAnalyticsDataAPIUpdateAndGroup struct {
+ Expressions []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayExpression `json:"expressions"`
+ filterType SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterType `const:"andGroup" json:"filter_type"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdateAndGroup) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdateAndGroup) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateAndGroup) GetExpressions() []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayExpression {
+ if o == nil {
+ return []SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayExpression{}
+ }
+ return o.Expressions
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateAndGroup) GetFilterType() SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterType {
+ return SourceGoogleAnalyticsDataAPIUpdateSchemasCustomReportsArrayMetricFilterFilterTypeAndGroup
+}
+
+type MetricsFilterType string
+
+const (
+ MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateAndGroup MetricsFilterType = "source-google-analytics-data-api-update_andGroup"
+ MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateOrGroup MetricsFilterType = "source-google-analytics-data-api-update_orGroup"
+ MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateNotExpression MetricsFilterType = "source-google-analytics-data-api-update_notExpression"
+ MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateFilter MetricsFilterType = "source-google-analytics-data-api-update_filter"
+)
+
+type MetricsFilter struct {
+ SourceGoogleAnalyticsDataAPIUpdateAndGroup *SourceGoogleAnalyticsDataAPIUpdateAndGroup
+ SourceGoogleAnalyticsDataAPIUpdateOrGroup *SourceGoogleAnalyticsDataAPIUpdateOrGroup
+ SourceGoogleAnalyticsDataAPIUpdateNotExpression *SourceGoogleAnalyticsDataAPIUpdateNotExpression
+ SourceGoogleAnalyticsDataAPIUpdateFilter *SourceGoogleAnalyticsDataAPIUpdateFilter
+
+ Type MetricsFilterType
+}
+
+func CreateMetricsFilterSourceGoogleAnalyticsDataAPIUpdateAndGroup(sourceGoogleAnalyticsDataAPIUpdateAndGroup SourceGoogleAnalyticsDataAPIUpdateAndGroup) MetricsFilter {
+ typ := MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateAndGroup
+
+ return MetricsFilter{
+ SourceGoogleAnalyticsDataAPIUpdateAndGroup: &sourceGoogleAnalyticsDataAPIUpdateAndGroup,
+ Type: typ,
+ }
+}
+
+func CreateMetricsFilterSourceGoogleAnalyticsDataAPIUpdateOrGroup(sourceGoogleAnalyticsDataAPIUpdateOrGroup SourceGoogleAnalyticsDataAPIUpdateOrGroup) MetricsFilter {
+ typ := MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateOrGroup
+
+ return MetricsFilter{
+ SourceGoogleAnalyticsDataAPIUpdateOrGroup: &sourceGoogleAnalyticsDataAPIUpdateOrGroup,
+ Type: typ,
+ }
+}
+
+func CreateMetricsFilterSourceGoogleAnalyticsDataAPIUpdateNotExpression(sourceGoogleAnalyticsDataAPIUpdateNotExpression SourceGoogleAnalyticsDataAPIUpdateNotExpression) MetricsFilter {
+ typ := MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateNotExpression
+
+ return MetricsFilter{
+ SourceGoogleAnalyticsDataAPIUpdateNotExpression: &sourceGoogleAnalyticsDataAPIUpdateNotExpression,
+ Type: typ,
+ }
+}
+
+func CreateMetricsFilterSourceGoogleAnalyticsDataAPIUpdateFilter(sourceGoogleAnalyticsDataAPIUpdateFilter SourceGoogleAnalyticsDataAPIUpdateFilter) MetricsFilter {
+ typ := MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateFilter
+
+ return MetricsFilter{
+ SourceGoogleAnalyticsDataAPIUpdateFilter: &sourceGoogleAnalyticsDataAPIUpdateFilter,
+ Type: typ,
+ }
+}
+
+func (u *MetricsFilter) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleAnalyticsDataAPIUpdateAndGroup := new(SourceGoogleAnalyticsDataAPIUpdateAndGroup)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateAndGroup, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateAndGroup = sourceGoogleAnalyticsDataAPIUpdateAndGroup
+ u.Type = MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateAndGroup
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateOrGroup := new(SourceGoogleAnalyticsDataAPIUpdateOrGroup)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateOrGroup, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateOrGroup = sourceGoogleAnalyticsDataAPIUpdateOrGroup
+ u.Type = MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateOrGroup
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateNotExpression := new(SourceGoogleAnalyticsDataAPIUpdateNotExpression)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateNotExpression, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateNotExpression = sourceGoogleAnalyticsDataAPIUpdateNotExpression
+ u.Type = MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateNotExpression
+ return nil
+ }
+
+ sourceGoogleAnalyticsDataAPIUpdateFilter := new(SourceGoogleAnalyticsDataAPIUpdateFilter)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleAnalyticsDataAPIUpdateFilter, "", true, true); err == nil {
+ u.SourceGoogleAnalyticsDataAPIUpdateFilter = sourceGoogleAnalyticsDataAPIUpdateFilter
+ u.Type = MetricsFilterTypeSourceGoogleAnalyticsDataAPIUpdateFilter
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u MetricsFilter) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleAnalyticsDataAPIUpdateAndGroup != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateAndGroup, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateOrGroup != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateOrGroup, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateNotExpression != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateNotExpression, "", true)
+ }
+
+ if u.SourceGoogleAnalyticsDataAPIUpdateFilter != nil {
+ return utils.MarshalJSON(u.SourceGoogleAnalyticsDataAPIUpdateFilter, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig struct {
+ // Dimensions filter
+ DimensionFilter *DimensionsFilter `json:"dimensionFilter,omitempty"`
+ // A list of dimensions.
+ Dimensions []string `json:"dimensions"`
+ // Metrics filter
+ MetricFilter *MetricsFilter `json:"metricFilter,omitempty"`
+ // A list of metrics.
+ Metrics []string `json:"metrics"`
+ // The name of the custom report, this name would be used as stream name.
+ Name string `json:"name"`
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig) GetDimensionFilter() *DimensionsFilter {
+ if o == nil {
+ return nil
+ }
+ return o.DimensionFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig) GetDimensions() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Dimensions
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig) GetMetricFilter() *MetricsFilter {
+ if o == nil {
+ return nil
+ }
+ return o.MetricFilter
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig) GetMetrics() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Metrics
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+type SourceGoogleAnalyticsDataAPIUpdate struct {
+ // Credentials for the service
+ Credentials *Credentials `json:"credentials,omitempty"`
+ // You can add your Custom Analytics report by creating one.
+ CustomReportsArray []SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig `json:"custom_reports_array,omitempty"`
+ // The start date from which to replicate report data in the format YYYY-MM-DD. Data generated before this date will not be included in the report. Not applied to custom Cohort reports.
+ DateRangesStartDate *types.Date `json:"date_ranges_start_date,omitempty"`
+ // A list of your Property IDs. The Property ID is a unique number assigned to each property in Google Analytics, found in your GA4 property URL. This ID allows the connector to track the specific events associated with your property. Refer to the Google Analytics documentation to locate your property ID.
+ PropertyIds []string `json:"property_ids"`
+ // The interval in days for each data request made to the Google Analytics API. A larger value speeds up data sync, but increases the chance of data sampling, which may result in inaccuracies. We recommend a value of 1 to minimize sampling, unless speed is an absolute priority over accuracy. Acceptable values range from 1 to 364. Does not apply to custom Cohort reports. More information is available in the documentation.
+ WindowInDays *int64 `default:"1" json:"window_in_days"`
+}
+
+func (s SourceGoogleAnalyticsDataAPIUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleAnalyticsDataAPIUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdate) GetCredentials() *Credentials {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdate) GetCustomReportsArray() []SourceGoogleAnalyticsDataAPIUpdateCustomReportConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReportsArray
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdate) GetDateRangesStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.DateRangesStartDate
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdate) GetPropertyIds() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.PropertyIds
+}
+
+func (o *SourceGoogleAnalyticsDataAPIUpdate) GetWindowInDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.WindowInDays
}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4.go
deleted file mode 100755
index 7a376a55d..000000000
--- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4.go
+++ /dev/null
@@ -1,185 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-type SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType string
-
-const (
- SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthTypeService SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType = "Service"
-)
-
-func (e SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType) ToPointer() *SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType {
- return &e
-}
-
-func (e *SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "Service":
- *e = SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType: %v", v)
- }
-}
-
-// SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication - Credentials for the service
-type SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication struct {
- AuthType *SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthenticationAuthType `json:"auth_type,omitempty"`
- // The JSON key of the service account to use for authorization
- CredentialsJSON string `json:"credentials_json"`
-}
-
-type SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType string
-
-const (
- SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthTypeClient SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType = "Client"
-)
-
-func (e SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType) ToPointer() *SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType {
- return &e
-}
-
-func (e *SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "Client":
- *e = SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType: %v", v)
- }
-}
-
-// SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth - Credentials for the service
-type SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth struct {
- // Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthType *SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauthAuthType `json:"auth_type,omitempty"`
- // The Client ID of your Google Analytics developer application.
- ClientID string `json:"client_id"`
- // The Client Secret of your Google Analytics developer application.
- ClientSecret string `json:"client_secret"`
- // The token for obtaining a new access token.
- RefreshToken string `json:"refresh_token"`
-}
-
-type SourceGoogleAnalyticsV4CredentialsType string
-
-const (
- SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth SourceGoogleAnalyticsV4CredentialsType = "source-google-analytics-v4_Credentials_Authenticate via Google (Oauth)"
- SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication SourceGoogleAnalyticsV4CredentialsType = "source-google-analytics-v4_Credentials_Service Account Key Authentication"
-)
-
-type SourceGoogleAnalyticsV4Credentials struct {
- SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth *SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth
- SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication *SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication
-
- Type SourceGoogleAnalyticsV4CredentialsType
-}
-
-func CreateSourceGoogleAnalyticsV4CredentialsSourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth(sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth) SourceGoogleAnalyticsV4Credentials {
- typ := SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth
-
- return SourceGoogleAnalyticsV4Credentials{
- SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth: &sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth,
- Type: typ,
- }
-}
-
-func CreateSourceGoogleAnalyticsV4CredentialsSourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication(sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication) SourceGoogleAnalyticsV4Credentials {
- typ := SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication
-
- return SourceGoogleAnalyticsV4Credentials{
- SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication: &sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication,
- Type: typ,
- }
-}
-
-func (u *SourceGoogleAnalyticsV4Credentials) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication); err == nil {
- u.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication
- u.Type = SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication
- return nil
- }
-
- sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth); err == nil {
- u.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth
- u.Type = SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u SourceGoogleAnalyticsV4Credentials) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication != nil {
- return json.Marshal(u.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication)
- }
-
- if u.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth != nil {
- return json.Marshal(u.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth)
- }
-
- return nil, nil
-}
-
-type SourceGoogleAnalyticsV4GoogleAnalyticsV4 string
-
-const (
- SourceGoogleAnalyticsV4GoogleAnalyticsV4GoogleAnalyticsV4 SourceGoogleAnalyticsV4GoogleAnalyticsV4 = "google-analytics-v4"
-)
-
-func (e SourceGoogleAnalyticsV4GoogleAnalyticsV4) ToPointer() *SourceGoogleAnalyticsV4GoogleAnalyticsV4 {
- return &e
-}
-
-func (e *SourceGoogleAnalyticsV4GoogleAnalyticsV4) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "google-analytics-v4":
- *e = SourceGoogleAnalyticsV4GoogleAnalyticsV4(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsV4GoogleAnalyticsV4: %v", v)
- }
-}
-
-type SourceGoogleAnalyticsV4 struct {
- // Credentials for the service
- Credentials *SourceGoogleAnalyticsV4Credentials `json:"credentials,omitempty"`
- // A JSON array describing the custom reports you want to sync from Google Analytics. See the docs for more information about the exact format you can use to fill out this field.
- CustomReports *string `json:"custom_reports,omitempty"`
- SourceType SourceGoogleAnalyticsV4GoogleAnalyticsV4 `json:"sourceType"`
- // The date in the format YYYY-MM-DD. Any data before this date will not be replicated.
- StartDate types.Date `json:"start_date"`
- // The ID for the Google Analytics View you want to fetch data from. This can be found from the Google Analytics Account Explorer.
- ViewID string `json:"view_id"`
- // The time increment used by the connector when requesting data from the Google Analytics API. More information is available in the the docs. The bigger this value is, the faster the sync will be, but the more likely that sampling will be applied to your data, potentially causing inaccuracies in the returned results. We recommend setting this to 1 unless you have a hard requirement to make the sync faster at the expense of accuracy. The minimum allowed value for this field is 1, and the maximum is 364.
- WindowInDays *int64 `json:"window_in_days,omitempty"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4createrequest.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4createrequest.go
deleted file mode 100755
index ffde30a8d..000000000
--- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4createrequest.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceGoogleAnalyticsV4CreateRequest struct {
- Configuration SourceGoogleAnalyticsV4 `json:"configuration"`
- Name string `json:"name"`
- // Optional secretID obtained through the public API OAuth redirect flow.
- SecretID *string `json:"secretId,omitempty"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4putrequest.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4putrequest.go
deleted file mode 100755
index be06730b3..000000000
--- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4putrequest.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceGoogleAnalyticsV4PutRequest struct {
- Configuration SourceGoogleAnalyticsV4Update `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4update.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4update.go
deleted file mode 100755
index 3889f22a0..000000000
--- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4update.go
+++ /dev/null
@@ -1,160 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-type SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType string
-
-const (
- SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthTypeService SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType = "Service"
-)
-
-func (e SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType) ToPointer() *SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType {
- return &e
-}
-
-func (e *SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "Service":
- *e = SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType: %v", v)
- }
-}
-
-// SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication - Credentials for the service
-type SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication struct {
- AuthType *SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthenticationAuthType `json:"auth_type,omitempty"`
- // The JSON key of the service account to use for authorization
- CredentialsJSON string `json:"credentials_json"`
-}
-
-type SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType string
-
-const (
- SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthTypeClient SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType = "Client"
-)
-
-func (e SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType) ToPointer() *SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType {
- return &e
-}
-
-func (e *SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "Client":
- *e = SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType: %v", v)
- }
-}
-
-// SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth - Credentials for the service
-type SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth struct {
- // Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthType *SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauthAuthType `json:"auth_type,omitempty"`
- // The Client ID of your Google Analytics developer application.
- ClientID string `json:"client_id"`
- // The Client Secret of your Google Analytics developer application.
- ClientSecret string `json:"client_secret"`
- // The token for obtaining a new access token.
- RefreshToken string `json:"refresh_token"`
-}
-
-type SourceGoogleAnalyticsV4UpdateCredentialsType string
-
-const (
- SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth SourceGoogleAnalyticsV4UpdateCredentialsType = "source-google-analytics-v4-update_Credentials_Authenticate via Google (Oauth)"
- SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication SourceGoogleAnalyticsV4UpdateCredentialsType = "source-google-analytics-v4-update_Credentials_Service Account Key Authentication"
-)
-
-type SourceGoogleAnalyticsV4UpdateCredentials struct {
- SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth *SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth
- SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication *SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication
-
- Type SourceGoogleAnalyticsV4UpdateCredentialsType
-}
-
-func CreateSourceGoogleAnalyticsV4UpdateCredentialsSourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth(sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth) SourceGoogleAnalyticsV4UpdateCredentials {
- typ := SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth
-
- return SourceGoogleAnalyticsV4UpdateCredentials{
- SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth: &sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth,
- Type: typ,
- }
-}
-
-func CreateSourceGoogleAnalyticsV4UpdateCredentialsSourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication(sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication) SourceGoogleAnalyticsV4UpdateCredentials {
- typ := SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication
-
- return SourceGoogleAnalyticsV4UpdateCredentials{
- SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication: &sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication,
- Type: typ,
- }
-}
-
-func (u *SourceGoogleAnalyticsV4UpdateCredentials) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication); err == nil {
- u.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication
- u.Type = SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication
- return nil
- }
-
- sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth); err == nil {
- u.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth
- u.Type = SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u SourceGoogleAnalyticsV4UpdateCredentials) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication != nil {
- return json.Marshal(u.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication)
- }
-
- if u.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth != nil {
- return json.Marshal(u.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth)
- }
-
- return nil, nil
-}
-
-type SourceGoogleAnalyticsV4Update struct {
- // Credentials for the service
- Credentials *SourceGoogleAnalyticsV4UpdateCredentials `json:"credentials,omitempty"`
- // A JSON array describing the custom reports you want to sync from Google Analytics. See the docs for more information about the exact format you can use to fill out this field.
- CustomReports *string `json:"custom_reports,omitempty"`
- // The date in the format YYYY-MM-DD. Any data before this date will not be replicated.
- StartDate types.Date `json:"start_date"`
- // The ID for the Google Analytics View you want to fetch data from. This can be found from the Google Analytics Account Explorer.
- ViewID string `json:"view_id"`
- // The time increment used by the connector when requesting data from the Google Analytics API. More information is available in the the docs. The bigger this value is, the faster the sync will be, but the more likely that sampling will be applied to your data, potentially causing inaccuracies in the returned results. We recommend setting this to 1 unless you have a hard requirement to make the sync faster at the expense of accuracy. The minimum allowed value for this field is 1, and the maximum is 364.
- WindowInDays *int64 `json:"window_in_days,omitempty"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogledirectory.go b/internal/sdk/pkg/models/shared/sourcegoogledirectory.go
old mode 100755
new mode 100644
index d794413b5..1d6e7c1b6
--- a/internal/sdk/pkg/models/shared/sourcegoogledirectory.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogledirectory.go
@@ -3,134 +3,194 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle - Authentication Scenario
-type SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle string
+// SourceGoogleDirectorySchemasCredentialsTitle - Authentication Scenario
+type SourceGoogleDirectorySchemasCredentialsTitle string
const (
- SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitleServiceAccounts SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle = "Service accounts"
+ SourceGoogleDirectorySchemasCredentialsTitleServiceAccounts SourceGoogleDirectorySchemasCredentialsTitle = "Service accounts"
)
-func (e SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle) ToPointer() *SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle {
+func (e SourceGoogleDirectorySchemasCredentialsTitle) ToPointer() *SourceGoogleDirectorySchemasCredentialsTitle {
return &e
}
-func (e *SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleDirectorySchemasCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service accounts":
- *e = SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle(v)
+ *e = SourceGoogleDirectorySchemasCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleDirectorySchemasCredentialsTitle: %v", v)
}
}
-// SourceGoogleDirectoryGoogleCredentialsServiceAccountKey - For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email.
-type SourceGoogleDirectoryGoogleCredentialsServiceAccountKey struct {
+// SourceGoogleDirectoryServiceAccountKey - For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email.
+type SourceGoogleDirectoryServiceAccountKey struct {
// The contents of the JSON service account key. See the docs for more information on how to generate this key.
CredentialsJSON string `json:"credentials_json"`
// Authentication Scenario
- CredentialsTitle *SourceGoogleDirectoryGoogleCredentialsServiceAccountKeyCredentialsTitle `json:"credentials_title,omitempty"`
+ credentialsTitle *SourceGoogleDirectorySchemasCredentialsTitle `const:"Service accounts" json:"credentials_title,omitempty"`
// The email of the user, which has permissions to access the Google Workspace Admin APIs.
Email string `json:"email"`
}
-// SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle - Authentication Scenario
-type SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle string
+func (s SourceGoogleDirectoryServiceAccountKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDirectoryServiceAccountKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDirectoryServiceAccountKey) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+func (o *SourceGoogleDirectoryServiceAccountKey) GetCredentialsTitle() *SourceGoogleDirectorySchemasCredentialsTitle {
+ return SourceGoogleDirectorySchemasCredentialsTitleServiceAccounts.ToPointer()
+}
+
+func (o *SourceGoogleDirectoryServiceAccountKey) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+// SourceGoogleDirectoryCredentialsTitle - Authentication Scenario
+type SourceGoogleDirectoryCredentialsTitle string
const (
- SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitleWebServerApp SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle = "Web server app"
+ SourceGoogleDirectoryCredentialsTitleWebServerApp SourceGoogleDirectoryCredentialsTitle = "Web server app"
)
-func (e SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle) ToPointer() *SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle {
+func (e SourceGoogleDirectoryCredentialsTitle) ToPointer() *SourceGoogleDirectoryCredentialsTitle {
return &e
}
-func (e *SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleDirectoryCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Web server app":
- *e = SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle(v)
+ *e = SourceGoogleDirectoryCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleDirectoryCredentialsTitle: %v", v)
}
}
-// SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth - For these scenario user only needs to give permission to read Google Directory data.
-type SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth struct {
+// SourceGoogleDirectorySignInViaGoogleOAuth - For these scenario user only needs to give permission to read Google Directory data.
+type SourceGoogleDirectorySignInViaGoogleOAuth struct {
// The Client ID of the developer application.
ClientID string `json:"client_id"`
// The Client Secret of the developer application.
ClientSecret string `json:"client_secret"`
// Authentication Scenario
- CredentialsTitle *SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle `json:"credentials_title,omitempty"`
+ credentialsTitle *SourceGoogleDirectoryCredentialsTitle `const:"Web server app" json:"credentials_title,omitempty"`
// The Token for obtaining a new access token.
RefreshToken string `json:"refresh_token"`
}
+func (s SourceGoogleDirectorySignInViaGoogleOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDirectorySignInViaGoogleOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDirectorySignInViaGoogleOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGoogleDirectorySignInViaGoogleOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGoogleDirectorySignInViaGoogleOAuth) GetCredentialsTitle() *SourceGoogleDirectoryCredentialsTitle {
+ return SourceGoogleDirectoryCredentialsTitleWebServerApp.ToPointer()
+}
+
+func (o *SourceGoogleDirectorySignInViaGoogleOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceGoogleDirectoryGoogleCredentialsType string
const (
- SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth SourceGoogleDirectoryGoogleCredentialsType = "source-google-directory_Google Credentials_Sign in via Google (OAuth)"
- SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsServiceAccountKey SourceGoogleDirectoryGoogleCredentialsType = "source-google-directory_Google Credentials_Service Account Key"
+ SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectorySignInViaGoogleOAuth SourceGoogleDirectoryGoogleCredentialsType = "source-google-directory_Sign in via Google (OAuth)"
+ SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryServiceAccountKey SourceGoogleDirectoryGoogleCredentialsType = "source-google-directory_Service Account Key"
)
type SourceGoogleDirectoryGoogleCredentials struct {
- SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth *SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth
- SourceGoogleDirectoryGoogleCredentialsServiceAccountKey *SourceGoogleDirectoryGoogleCredentialsServiceAccountKey
+ SourceGoogleDirectorySignInViaGoogleOAuth *SourceGoogleDirectorySignInViaGoogleOAuth
+ SourceGoogleDirectoryServiceAccountKey *SourceGoogleDirectoryServiceAccountKey
Type SourceGoogleDirectoryGoogleCredentialsType
}
-func CreateSourceGoogleDirectoryGoogleCredentialsSourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth(sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth) SourceGoogleDirectoryGoogleCredentials {
- typ := SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth
+func CreateSourceGoogleDirectoryGoogleCredentialsSourceGoogleDirectorySignInViaGoogleOAuth(sourceGoogleDirectorySignInViaGoogleOAuth SourceGoogleDirectorySignInViaGoogleOAuth) SourceGoogleDirectoryGoogleCredentials {
+ typ := SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectorySignInViaGoogleOAuth
return SourceGoogleDirectoryGoogleCredentials{
- SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth: &sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth,
+ SourceGoogleDirectorySignInViaGoogleOAuth: &sourceGoogleDirectorySignInViaGoogleOAuth,
Type: typ,
}
}
-func CreateSourceGoogleDirectoryGoogleCredentialsSourceGoogleDirectoryGoogleCredentialsServiceAccountKey(sourceGoogleDirectoryGoogleCredentialsServiceAccountKey SourceGoogleDirectoryGoogleCredentialsServiceAccountKey) SourceGoogleDirectoryGoogleCredentials {
- typ := SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsServiceAccountKey
+func CreateSourceGoogleDirectoryGoogleCredentialsSourceGoogleDirectoryServiceAccountKey(sourceGoogleDirectoryServiceAccountKey SourceGoogleDirectoryServiceAccountKey) SourceGoogleDirectoryGoogleCredentials {
+ typ := SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryServiceAccountKey
return SourceGoogleDirectoryGoogleCredentials{
- SourceGoogleDirectoryGoogleCredentialsServiceAccountKey: &sourceGoogleDirectoryGoogleCredentialsServiceAccountKey,
- Type: typ,
+ SourceGoogleDirectoryServiceAccountKey: &sourceGoogleDirectoryServiceAccountKey,
+ Type: typ,
}
}
func (u *SourceGoogleDirectoryGoogleCredentials) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGoogleDirectoryGoogleCredentialsServiceAccountKey := new(SourceGoogleDirectoryGoogleCredentialsServiceAccountKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleDirectoryGoogleCredentialsServiceAccountKey); err == nil {
- u.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey = sourceGoogleDirectoryGoogleCredentialsServiceAccountKey
- u.Type = SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsServiceAccountKey
+
+ sourceGoogleDirectoryServiceAccountKey := new(SourceGoogleDirectoryServiceAccountKey)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDirectoryServiceAccountKey, "", true, true); err == nil {
+ u.SourceGoogleDirectoryServiceAccountKey = sourceGoogleDirectoryServiceAccountKey
+ u.Type = SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryServiceAccountKey
return nil
}
- sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth := new(SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth); err == nil {
- u.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth = sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth
- u.Type = SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth
+ sourceGoogleDirectorySignInViaGoogleOAuth := new(SourceGoogleDirectorySignInViaGoogleOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDirectorySignInViaGoogleOAuth, "", true, true); err == nil {
+ u.SourceGoogleDirectorySignInViaGoogleOAuth = sourceGoogleDirectorySignInViaGoogleOAuth
+ u.Type = SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectorySignInViaGoogleOAuth
return nil
}
@@ -138,43 +198,65 @@ func (u *SourceGoogleDirectoryGoogleCredentials) UnmarshalJSON(data []byte) erro
}
func (u SourceGoogleDirectoryGoogleCredentials) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey != nil {
- return json.Marshal(u.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey)
+ if u.SourceGoogleDirectorySignInViaGoogleOAuth != nil {
+ return utils.MarshalJSON(u.SourceGoogleDirectorySignInViaGoogleOAuth, "", true)
}
- if u.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth != nil {
- return json.Marshal(u.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth)
+ if u.SourceGoogleDirectoryServiceAccountKey != nil {
+ return utils.MarshalJSON(u.SourceGoogleDirectoryServiceAccountKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceGoogleDirectoryGoogleDirectory string
+type GoogleDirectory string
const (
- SourceGoogleDirectoryGoogleDirectoryGoogleDirectory SourceGoogleDirectoryGoogleDirectory = "google-directory"
+ GoogleDirectoryGoogleDirectory GoogleDirectory = "google-directory"
)
-func (e SourceGoogleDirectoryGoogleDirectory) ToPointer() *SourceGoogleDirectoryGoogleDirectory {
+func (e GoogleDirectory) ToPointer() *GoogleDirectory {
return &e
}
-func (e *SourceGoogleDirectoryGoogleDirectory) UnmarshalJSON(data []byte) error {
+func (e *GoogleDirectory) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "google-directory":
- *e = SourceGoogleDirectoryGoogleDirectory(v)
+ *e = GoogleDirectory(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleDirectoryGoogleDirectory: %v", v)
+ return fmt.Errorf("invalid value for GoogleDirectory: %v", v)
}
}
type SourceGoogleDirectory struct {
// Google APIs use the OAuth 2.0 protocol for authentication and authorization. The Source supports Web server application and Service accounts scenarios.
Credentials *SourceGoogleDirectoryGoogleCredentials `json:"credentials,omitempty"`
- SourceType SourceGoogleDirectoryGoogleDirectory `json:"sourceType"`
+ sourceType GoogleDirectory `const:"google-directory" json:"sourceType"`
+}
+
+func (s SourceGoogleDirectory) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDirectory) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDirectory) GetCredentials() *SourceGoogleDirectoryGoogleCredentials {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceGoogleDirectory) GetSourceType() GoogleDirectory {
+ return GoogleDirectoryGoogleDirectory
}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogledirectorycreaterequest.go b/internal/sdk/pkg/models/shared/sourcegoogledirectorycreaterequest.go
old mode 100755
new mode 100644
index 0683f8e59..ba52a3512
--- a/internal/sdk/pkg/models/shared/sourcegoogledirectorycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogledirectorycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGoogleDirectoryCreateRequest struct {
Configuration SourceGoogleDirectory `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleDirectoryCreateRequest) GetConfiguration() SourceGoogleDirectory {
+ if o == nil {
+ return SourceGoogleDirectory{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleDirectoryCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGoogleDirectoryCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleDirectoryCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGoogleDirectoryCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogledirectoryputrequest.go b/internal/sdk/pkg/models/shared/sourcegoogledirectoryputrequest.go
old mode 100755
new mode 100644
index 7069e3c24..37e306e6f
--- a/internal/sdk/pkg/models/shared/sourcegoogledirectoryputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogledirectoryputrequest.go
@@ -7,3 +7,24 @@ type SourceGoogleDirectoryPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleDirectoryPutRequest) GetConfiguration() SourceGoogleDirectoryUpdate {
+ if o == nil {
+ return SourceGoogleDirectoryUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleDirectoryPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleDirectoryPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go b/internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go
old mode 100755
new mode 100644
index cc2e16331..88569b18d
--- a/internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go
@@ -3,134 +3,194 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle - Authentication Scenario
-type SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle string
+// SourceGoogleDirectoryUpdateSchemasCredentialsTitle - Authentication Scenario
+type SourceGoogleDirectoryUpdateSchemasCredentialsTitle string
const (
- SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitleServiceAccounts SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle = "Service accounts"
+ SourceGoogleDirectoryUpdateSchemasCredentialsTitleServiceAccounts SourceGoogleDirectoryUpdateSchemasCredentialsTitle = "Service accounts"
)
-func (e SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle) ToPointer() *SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle {
+func (e SourceGoogleDirectoryUpdateSchemasCredentialsTitle) ToPointer() *SourceGoogleDirectoryUpdateSchemasCredentialsTitle {
return &e
}
-func (e *SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleDirectoryUpdateSchemasCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service accounts":
- *e = SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle(v)
+ *e = SourceGoogleDirectoryUpdateSchemasCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleDirectoryUpdateSchemasCredentialsTitle: %v", v)
}
}
-// SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey - For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email.
-type SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey struct {
+// ServiceAccountKey - For these scenario user should obtain service account's credentials from the Google API Console and provide delegated email.
+type ServiceAccountKey struct {
// The contents of the JSON service account key. See the docs for more information on how to generate this key.
CredentialsJSON string `json:"credentials_json"`
// Authentication Scenario
- CredentialsTitle *SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKeyCredentialsTitle `json:"credentials_title,omitempty"`
+ credentialsTitle *SourceGoogleDirectoryUpdateSchemasCredentialsTitle `const:"Service accounts" json:"credentials_title,omitempty"`
// The email of the user, which has permissions to access the Google Workspace Admin APIs.
Email string `json:"email"`
}
-// SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle - Authentication Scenario
-type SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle string
+func (s ServiceAccountKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *ServiceAccountKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ServiceAccountKey) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+func (o *ServiceAccountKey) GetCredentialsTitle() *SourceGoogleDirectoryUpdateSchemasCredentialsTitle {
+ return SourceGoogleDirectoryUpdateSchemasCredentialsTitleServiceAccounts.ToPointer()
+}
+
+func (o *ServiceAccountKey) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+// SourceGoogleDirectoryUpdateCredentialsTitle - Authentication Scenario
+type SourceGoogleDirectoryUpdateCredentialsTitle string
const (
- SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitleWebServerApp SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle = "Web server app"
+ SourceGoogleDirectoryUpdateCredentialsTitleWebServerApp SourceGoogleDirectoryUpdateCredentialsTitle = "Web server app"
)
-func (e SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle) ToPointer() *SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle {
+func (e SourceGoogleDirectoryUpdateCredentialsTitle) ToPointer() *SourceGoogleDirectoryUpdateCredentialsTitle {
return &e
}
-func (e *SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleDirectoryUpdateCredentialsTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Web server app":
- *e = SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle(v)
+ *e = SourceGoogleDirectoryUpdateCredentialsTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleDirectoryUpdateCredentialsTitle: %v", v)
}
}
-// SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth - For these scenario user only needs to give permission to read Google Directory data.
-type SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth struct {
+// SignInViaGoogleOAuth - For these scenario user only needs to give permission to read Google Directory data.
+type SignInViaGoogleOAuth struct {
// The Client ID of the developer application.
ClientID string `json:"client_id"`
// The Client Secret of the developer application.
ClientSecret string `json:"client_secret"`
// Authentication Scenario
- CredentialsTitle *SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuthCredentialsTitle `json:"credentials_title,omitempty"`
+ credentialsTitle *SourceGoogleDirectoryUpdateCredentialsTitle `const:"Web server app" json:"credentials_title,omitempty"`
// The Token for obtaining a new access token.
RefreshToken string `json:"refresh_token"`
}
+func (s SignInViaGoogleOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SignInViaGoogleOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SignInViaGoogleOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SignInViaGoogleOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SignInViaGoogleOAuth) GetCredentialsTitle() *SourceGoogleDirectoryUpdateCredentialsTitle {
+ return SourceGoogleDirectoryUpdateCredentialsTitleWebServerApp.ToPointer()
+}
+
+func (o *SignInViaGoogleOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceGoogleDirectoryUpdateGoogleCredentialsType string
const (
- SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth SourceGoogleDirectoryUpdateGoogleCredentialsType = "source-google-directory-update_Google Credentials_Sign in via Google (OAuth)"
- SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey SourceGoogleDirectoryUpdateGoogleCredentialsType = "source-google-directory-update_Google Credentials_Service Account Key"
+ SourceGoogleDirectoryUpdateGoogleCredentialsTypeSignInViaGoogleOAuth SourceGoogleDirectoryUpdateGoogleCredentialsType = "Sign in via Google (OAuth)"
+ SourceGoogleDirectoryUpdateGoogleCredentialsTypeServiceAccountKey SourceGoogleDirectoryUpdateGoogleCredentialsType = "Service Account Key"
)
type SourceGoogleDirectoryUpdateGoogleCredentials struct {
- SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth *SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth
- SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey *SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey
+ SignInViaGoogleOAuth *SignInViaGoogleOAuth
+ ServiceAccountKey *ServiceAccountKey
Type SourceGoogleDirectoryUpdateGoogleCredentialsType
}
-func CreateSourceGoogleDirectoryUpdateGoogleCredentialsSourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth(sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth) SourceGoogleDirectoryUpdateGoogleCredentials {
- typ := SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth
+func CreateSourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth(signInViaGoogleOAuth SignInViaGoogleOAuth) SourceGoogleDirectoryUpdateGoogleCredentials {
+ typ := SourceGoogleDirectoryUpdateGoogleCredentialsTypeSignInViaGoogleOAuth
return SourceGoogleDirectoryUpdateGoogleCredentials{
- SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth: &sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth,
- Type: typ,
+ SignInViaGoogleOAuth: &signInViaGoogleOAuth,
+ Type: typ,
}
}
-func CreateSourceGoogleDirectoryUpdateGoogleCredentialsSourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey(sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey) SourceGoogleDirectoryUpdateGoogleCredentials {
- typ := SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey
+func CreateSourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey(serviceAccountKey ServiceAccountKey) SourceGoogleDirectoryUpdateGoogleCredentials {
+ typ := SourceGoogleDirectoryUpdateGoogleCredentialsTypeServiceAccountKey
return SourceGoogleDirectoryUpdateGoogleCredentials{
- SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey: &sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey,
- Type: typ,
+ ServiceAccountKey: &serviceAccountKey,
+ Type: typ,
}
}
func (u *SourceGoogleDirectoryUpdateGoogleCredentials) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey := new(SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey); err == nil {
- u.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey = sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey
- u.Type = SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey
+
+ serviceAccountKey := new(ServiceAccountKey)
+ if err := utils.UnmarshalJSON(data, &serviceAccountKey, "", true, true); err == nil {
+ u.ServiceAccountKey = serviceAccountKey
+ u.Type = SourceGoogleDirectoryUpdateGoogleCredentialsTypeServiceAccountKey
return nil
}
- sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth := new(SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth); err == nil {
- u.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth = sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth
- u.Type = SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth
+ signInViaGoogleOAuth := new(SignInViaGoogleOAuth)
+ if err := utils.UnmarshalJSON(data, &signInViaGoogleOAuth, "", true, true); err == nil {
+ u.SignInViaGoogleOAuth = signInViaGoogleOAuth
+ u.Type = SourceGoogleDirectoryUpdateGoogleCredentialsTypeSignInViaGoogleOAuth
return nil
}
@@ -138,18 +198,25 @@ func (u *SourceGoogleDirectoryUpdateGoogleCredentials) UnmarshalJSON(data []byte
}
func (u SourceGoogleDirectoryUpdateGoogleCredentials) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey != nil {
- return json.Marshal(u.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey)
+ if u.SignInViaGoogleOAuth != nil {
+ return utils.MarshalJSON(u.SignInViaGoogleOAuth, "", true)
}
- if u.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth != nil {
- return json.Marshal(u.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth)
+ if u.ServiceAccountKey != nil {
+ return utils.MarshalJSON(u.ServiceAccountKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceGoogleDirectoryUpdate struct {
// Google APIs use the OAuth 2.0 protocol for authentication and authorization. The Source supports Web server application and Service accounts scenarios.
Credentials *SourceGoogleDirectoryUpdateGoogleCredentials `json:"credentials,omitempty"`
}
+
+func (o *SourceGoogleDirectoryUpdate) GetCredentials() *SourceGoogleDirectoryUpdateGoogleCredentials {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogledrive.go b/internal/sdk/pkg/models/shared/sourcegoogledrive.go
new file mode 100644
index 000000000..4b766bb56
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcegoogledrive.go
@@ -0,0 +1,1106 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
+)
+
+type SourceGoogleDriveSchemasAuthType string
+
+const (
+ SourceGoogleDriveSchemasAuthTypeService SourceGoogleDriveSchemasAuthType = "Service"
+)
+
+func (e SourceGoogleDriveSchemasAuthType) ToPointer() *SourceGoogleDriveSchemasAuthType {
+ return &e
+}
+
+func (e *SourceGoogleDriveSchemasAuthType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Service":
+ *e = SourceGoogleDriveSchemasAuthType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveSchemasAuthType: %v", v)
+ }
+}
+
+// SourceGoogleDriveServiceAccountKeyAuthentication - Credentials for connecting to the Google Drive API
+type SourceGoogleDriveServiceAccountKeyAuthentication struct {
+ authType *SourceGoogleDriveSchemasAuthType `const:"Service" json:"auth_type"`
+ // The JSON key of the service account to use for authorization. Read more here.
+ ServiceAccountInfo string `json:"service_account_info"`
+}
+
+func (s SourceGoogleDriveServiceAccountKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveServiceAccountKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveServiceAccountKeyAuthentication) GetAuthType() *SourceGoogleDriveSchemasAuthType {
+ return SourceGoogleDriveSchemasAuthTypeService.ToPointer()
+}
+
+func (o *SourceGoogleDriveServiceAccountKeyAuthentication) GetServiceAccountInfo() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceAccountInfo
+}
+
+type SourceGoogleDriveAuthType string
+
+const (
+ SourceGoogleDriveAuthTypeClient SourceGoogleDriveAuthType = "Client"
+)
+
+func (e SourceGoogleDriveAuthType) ToPointer() *SourceGoogleDriveAuthType {
+ return &e
+}
+
+func (e *SourceGoogleDriveAuthType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Client":
+ *e = SourceGoogleDriveAuthType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveAuthType: %v", v)
+ }
+}
+
+// SourceGoogleDriveAuthenticateViaGoogleOAuth - Credentials for connecting to the Google Drive API
+type SourceGoogleDriveAuthenticateViaGoogleOAuth struct {
+ authType *SourceGoogleDriveAuthType `const:"Client" json:"auth_type"`
+ // Client ID for the Google Drive API
+ ClientID string `json:"client_id"`
+ // Client Secret for the Google Drive API
+ ClientSecret string `json:"client_secret"`
+ // Refresh Token for the Google Drive API
+ RefreshToken string `json:"refresh_token"`
+}
+
+func (s SourceGoogleDriveAuthenticateViaGoogleOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveAuthenticateViaGoogleOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveAuthenticateViaGoogleOAuth) GetAuthType() *SourceGoogleDriveAuthType {
+ return SourceGoogleDriveAuthTypeClient.ToPointer()
+}
+
+func (o *SourceGoogleDriveAuthenticateViaGoogleOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGoogleDriveAuthenticateViaGoogleOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGoogleDriveAuthenticateViaGoogleOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+type SourceGoogleDriveAuthenticationType string
+
+const (
+ SourceGoogleDriveAuthenticationTypeSourceGoogleDriveAuthenticateViaGoogleOAuth SourceGoogleDriveAuthenticationType = "source-google-drive_Authenticate via Google (OAuth)"
+ SourceGoogleDriveAuthenticationTypeSourceGoogleDriveServiceAccountKeyAuthentication SourceGoogleDriveAuthenticationType = "source-google-drive_Service Account Key Authentication"
+)
+
+type SourceGoogleDriveAuthentication struct {
+ SourceGoogleDriveAuthenticateViaGoogleOAuth *SourceGoogleDriveAuthenticateViaGoogleOAuth
+ SourceGoogleDriveServiceAccountKeyAuthentication *SourceGoogleDriveServiceAccountKeyAuthentication
+
+ Type SourceGoogleDriveAuthenticationType
+}
+
+func CreateSourceGoogleDriveAuthenticationSourceGoogleDriveAuthenticateViaGoogleOAuth(sourceGoogleDriveAuthenticateViaGoogleOAuth SourceGoogleDriveAuthenticateViaGoogleOAuth) SourceGoogleDriveAuthentication {
+ typ := SourceGoogleDriveAuthenticationTypeSourceGoogleDriveAuthenticateViaGoogleOAuth
+
+ return SourceGoogleDriveAuthentication{
+ SourceGoogleDriveAuthenticateViaGoogleOAuth: &sourceGoogleDriveAuthenticateViaGoogleOAuth,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveAuthenticationSourceGoogleDriveServiceAccountKeyAuthentication(sourceGoogleDriveServiceAccountKeyAuthentication SourceGoogleDriveServiceAccountKeyAuthentication) SourceGoogleDriveAuthentication {
+ typ := SourceGoogleDriveAuthenticationTypeSourceGoogleDriveServiceAccountKeyAuthentication
+
+ return SourceGoogleDriveAuthentication{
+ SourceGoogleDriveServiceAccountKeyAuthentication: &sourceGoogleDriveServiceAccountKeyAuthentication,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleDriveAuthentication) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleDriveServiceAccountKeyAuthentication := new(SourceGoogleDriveServiceAccountKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveServiceAccountKeyAuthentication, "", true, true); err == nil {
+ u.SourceGoogleDriveServiceAccountKeyAuthentication = sourceGoogleDriveServiceAccountKeyAuthentication
+ u.Type = SourceGoogleDriveAuthenticationTypeSourceGoogleDriveServiceAccountKeyAuthentication
+ return nil
+ }
+
+ sourceGoogleDriveAuthenticateViaGoogleOAuth := new(SourceGoogleDriveAuthenticateViaGoogleOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveAuthenticateViaGoogleOAuth, "", true, true); err == nil {
+ u.SourceGoogleDriveAuthenticateViaGoogleOAuth = sourceGoogleDriveAuthenticateViaGoogleOAuth
+ u.Type = SourceGoogleDriveAuthenticationTypeSourceGoogleDriveAuthenticateViaGoogleOAuth
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleDriveAuthentication) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleDriveAuthenticateViaGoogleOAuth != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveAuthenticateViaGoogleOAuth, "", true)
+ }
+
+ if u.SourceGoogleDriveServiceAccountKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveServiceAccountKeyAuthentication, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type GoogleDrive string
+
+const (
+ GoogleDriveGoogleDrive GoogleDrive = "google-drive"
+)
+
+func (e GoogleDrive) ToPointer() *GoogleDrive {
+ return &e
+}
+
+func (e *GoogleDrive) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "google-drive":
+ *e = GoogleDrive(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for GoogleDrive: %v", v)
+ }
+}
+
+type SourceGoogleDriveSchemasStreamsFormatFormatFiletype string
+
+const (
+ SourceGoogleDriveSchemasStreamsFormatFormatFiletypeUnstructured SourceGoogleDriveSchemasStreamsFormatFormatFiletype = "unstructured"
+)
+
+func (e SourceGoogleDriveSchemasStreamsFormatFormatFiletype) ToPointer() *SourceGoogleDriveSchemasStreamsFormatFormatFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveSchemasStreamsFormatFormatFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "unstructured":
+ *e = SourceGoogleDriveSchemasStreamsFormatFormatFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveSchemasStreamsFormatFormatFiletype: %v", v)
+ }
+}
+
+// SourceGoogleDriveDocumentFileTypeFormatExperimental - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
+type SourceGoogleDriveDocumentFileTypeFormatExperimental struct {
+ filetype *SourceGoogleDriveSchemasStreamsFormatFormatFiletype `const:"unstructured" json:"filetype"`
+ // If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.
+ SkipUnprocessableFileTypes *bool `default:"true" json:"skip_unprocessable_file_types"`
+}
+
+func (s SourceGoogleDriveDocumentFileTypeFormatExperimental) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveDocumentFileTypeFormatExperimental) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveDocumentFileTypeFormatExperimental) GetFiletype() *SourceGoogleDriveSchemasStreamsFormatFormatFiletype {
+ return SourceGoogleDriveSchemasStreamsFormatFormatFiletypeUnstructured.ToPointer()
+}
+
+func (o *SourceGoogleDriveDocumentFileTypeFormatExperimental) GetSkipUnprocessableFileTypes() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.SkipUnprocessableFileTypes
+}
+
+type SourceGoogleDriveSchemasStreamsFormatFiletype string
+
+const (
+ SourceGoogleDriveSchemasStreamsFormatFiletypeParquet SourceGoogleDriveSchemasStreamsFormatFiletype = "parquet"
+)
+
+func (e SourceGoogleDriveSchemasStreamsFormatFiletype) ToPointer() *SourceGoogleDriveSchemasStreamsFormatFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveSchemasStreamsFormatFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "parquet":
+ *e = SourceGoogleDriveSchemasStreamsFormatFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveSchemasStreamsFormatFiletype: %v", v)
+ }
+}
+
+// SourceGoogleDriveParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGoogleDriveParquetFormat struct {
+ // Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
+ DecimalAsFloat *bool `default:"false" json:"decimal_as_float"`
+ filetype *SourceGoogleDriveSchemasStreamsFormatFiletype `const:"parquet" json:"filetype"`
+}
+
+func (s SourceGoogleDriveParquetFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveParquetFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveParquetFormat) GetDecimalAsFloat() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DecimalAsFloat
+}
+
+func (o *SourceGoogleDriveParquetFormat) GetFiletype() *SourceGoogleDriveSchemasStreamsFormatFiletype {
+ return SourceGoogleDriveSchemasStreamsFormatFiletypeParquet.ToPointer()
+}
+
+type SourceGoogleDriveSchemasStreamsFiletype string
+
+const (
+ SourceGoogleDriveSchemasStreamsFiletypeJsonl SourceGoogleDriveSchemasStreamsFiletype = "jsonl"
+)
+
+func (e SourceGoogleDriveSchemasStreamsFiletype) ToPointer() *SourceGoogleDriveSchemasStreamsFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveSchemasStreamsFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "jsonl":
+ *e = SourceGoogleDriveSchemasStreamsFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveSchemasStreamsFiletype: %v", v)
+ }
+}
+
+// SourceGoogleDriveJsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGoogleDriveJsonlFormat struct {
+ filetype *SourceGoogleDriveSchemasStreamsFiletype `const:"jsonl" json:"filetype"`
+}
+
+func (s SourceGoogleDriveJsonlFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveJsonlFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveJsonlFormat) GetFiletype() *SourceGoogleDriveSchemasStreamsFiletype {
+ return SourceGoogleDriveSchemasStreamsFiletypeJsonl.ToPointer()
+}
+
+type SourceGoogleDriveSchemasFiletype string
+
+const (
+ SourceGoogleDriveSchemasFiletypeCsv SourceGoogleDriveSchemasFiletype = "csv"
+)
+
+func (e SourceGoogleDriveSchemasFiletype) ToPointer() *SourceGoogleDriveSchemasFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveSchemasFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "csv":
+ *e = SourceGoogleDriveSchemasFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveSchemasFiletype: %v", v)
+ }
+}
+
+type SourceGoogleDriveSchemasStreamsHeaderDefinitionType string
+
+const (
+ SourceGoogleDriveSchemasStreamsHeaderDefinitionTypeUserProvided SourceGoogleDriveSchemasStreamsHeaderDefinitionType = "User Provided"
+)
+
+func (e SourceGoogleDriveSchemasStreamsHeaderDefinitionType) ToPointer() *SourceGoogleDriveSchemasStreamsHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGoogleDriveSchemasStreamsHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "User Provided":
+ *e = SourceGoogleDriveSchemasStreamsHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveSchemasStreamsHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGoogleDriveUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGoogleDriveUserProvided struct {
+ // The column names that will be used while emitting the CSV records
+ ColumnNames []string `json:"column_names"`
+ headerDefinitionType *SourceGoogleDriveSchemasStreamsHeaderDefinitionType `const:"User Provided" json:"header_definition_type"`
+}
+
+func (s SourceGoogleDriveUserProvided) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUserProvided) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUserProvided) GetColumnNames() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ColumnNames
+}
+
+func (o *SourceGoogleDriveUserProvided) GetHeaderDefinitionType() *SourceGoogleDriveSchemasStreamsHeaderDefinitionType {
+ return SourceGoogleDriveSchemasStreamsHeaderDefinitionTypeUserProvided.ToPointer()
+}
+
+type SourceGoogleDriveSchemasHeaderDefinitionType string
+
+const (
+ SourceGoogleDriveSchemasHeaderDefinitionTypeAutogenerated SourceGoogleDriveSchemasHeaderDefinitionType = "Autogenerated"
+)
+
+func (e SourceGoogleDriveSchemasHeaderDefinitionType) ToPointer() *SourceGoogleDriveSchemasHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGoogleDriveSchemasHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Autogenerated":
+ *e = SourceGoogleDriveSchemasHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveSchemasHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGoogleDriveAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGoogleDriveAutogenerated struct {
+ headerDefinitionType *SourceGoogleDriveSchemasHeaderDefinitionType `const:"Autogenerated" json:"header_definition_type"`
+}
+
+func (s SourceGoogleDriveAutogenerated) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveAutogenerated) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveAutogenerated) GetHeaderDefinitionType() *SourceGoogleDriveSchemasHeaderDefinitionType {
+ return SourceGoogleDriveSchemasHeaderDefinitionTypeAutogenerated.ToPointer()
+}
+
+type SourceGoogleDriveHeaderDefinitionType string
+
+const (
+ SourceGoogleDriveHeaderDefinitionTypeFromCsv SourceGoogleDriveHeaderDefinitionType = "From CSV"
+)
+
+func (e SourceGoogleDriveHeaderDefinitionType) ToPointer() *SourceGoogleDriveHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGoogleDriveHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "From CSV":
+ *e = SourceGoogleDriveHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGoogleDriveFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGoogleDriveFromCSV struct {
+ headerDefinitionType *SourceGoogleDriveHeaderDefinitionType `const:"From CSV" json:"header_definition_type"`
+}
+
+func (s SourceGoogleDriveFromCSV) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveFromCSV) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveFromCSV) GetHeaderDefinitionType() *SourceGoogleDriveHeaderDefinitionType {
+ return SourceGoogleDriveHeaderDefinitionTypeFromCsv.ToPointer()
+}
+
+type SourceGoogleDriveCSVHeaderDefinitionType string
+
+const (
+ SourceGoogleDriveCSVHeaderDefinitionTypeSourceGoogleDriveFromCSV SourceGoogleDriveCSVHeaderDefinitionType = "source-google-drive_From CSV"
+ SourceGoogleDriveCSVHeaderDefinitionTypeSourceGoogleDriveAutogenerated SourceGoogleDriveCSVHeaderDefinitionType = "source-google-drive_Autogenerated"
+ SourceGoogleDriveCSVHeaderDefinitionTypeSourceGoogleDriveUserProvided SourceGoogleDriveCSVHeaderDefinitionType = "source-google-drive_User Provided"
+)
+
+type SourceGoogleDriveCSVHeaderDefinition struct {
+ SourceGoogleDriveFromCSV *SourceGoogleDriveFromCSV
+ SourceGoogleDriveAutogenerated *SourceGoogleDriveAutogenerated
+ SourceGoogleDriveUserProvided *SourceGoogleDriveUserProvided
+
+ Type SourceGoogleDriveCSVHeaderDefinitionType
+}
+
+func CreateSourceGoogleDriveCSVHeaderDefinitionSourceGoogleDriveFromCSV(sourceGoogleDriveFromCSV SourceGoogleDriveFromCSV) SourceGoogleDriveCSVHeaderDefinition {
+ typ := SourceGoogleDriveCSVHeaderDefinitionTypeSourceGoogleDriveFromCSV
+
+ return SourceGoogleDriveCSVHeaderDefinition{
+ SourceGoogleDriveFromCSV: &sourceGoogleDriveFromCSV,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveCSVHeaderDefinitionSourceGoogleDriveAutogenerated(sourceGoogleDriveAutogenerated SourceGoogleDriveAutogenerated) SourceGoogleDriveCSVHeaderDefinition {
+ typ := SourceGoogleDriveCSVHeaderDefinitionTypeSourceGoogleDriveAutogenerated
+
+ return SourceGoogleDriveCSVHeaderDefinition{
+ SourceGoogleDriveAutogenerated: &sourceGoogleDriveAutogenerated,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveCSVHeaderDefinitionSourceGoogleDriveUserProvided(sourceGoogleDriveUserProvided SourceGoogleDriveUserProvided) SourceGoogleDriveCSVHeaderDefinition {
+ typ := SourceGoogleDriveCSVHeaderDefinitionTypeSourceGoogleDriveUserProvided
+
+ return SourceGoogleDriveCSVHeaderDefinition{
+ SourceGoogleDriveUserProvided: &sourceGoogleDriveUserProvided,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleDriveCSVHeaderDefinition) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleDriveFromCSV := new(SourceGoogleDriveFromCSV)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveFromCSV, "", true, true); err == nil {
+ u.SourceGoogleDriveFromCSV = sourceGoogleDriveFromCSV
+ u.Type = SourceGoogleDriveCSVHeaderDefinitionTypeSourceGoogleDriveFromCSV
+ return nil
+ }
+
+ sourceGoogleDriveAutogenerated := new(SourceGoogleDriveAutogenerated)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveAutogenerated, "", true, true); err == nil {
+ u.SourceGoogleDriveAutogenerated = sourceGoogleDriveAutogenerated
+ u.Type = SourceGoogleDriveCSVHeaderDefinitionTypeSourceGoogleDriveAutogenerated
+ return nil
+ }
+
+ sourceGoogleDriveUserProvided := new(SourceGoogleDriveUserProvided)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUserProvided, "", true, true); err == nil {
+ u.SourceGoogleDriveUserProvided = sourceGoogleDriveUserProvided
+ u.Type = SourceGoogleDriveCSVHeaderDefinitionTypeSourceGoogleDriveUserProvided
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleDriveCSVHeaderDefinition) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleDriveFromCSV != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveFromCSV, "", true)
+ }
+
+ if u.SourceGoogleDriveAutogenerated != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveAutogenerated, "", true)
+ }
+
+ if u.SourceGoogleDriveUserProvided != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUserProvided, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourceGoogleDriveCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGoogleDriveCSVFormat struct {
+ // The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+ Delimiter *string `default:"," json:"delimiter"`
+ // Whether two quotes in a quoted CSV value denote a single quote in the data.
+ DoubleQuote *bool `default:"true" json:"double_quote"`
+ // The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+ Encoding *string `default:"utf8" json:"encoding"`
+ // The character used for escaping special characters. To disallow escaping, leave this field blank.
+ EscapeChar *string `json:"escape_char,omitempty"`
+ // A set of case-sensitive strings that should be interpreted as false values.
+ FalseValues []string `json:"false_values,omitempty"`
+ filetype *SourceGoogleDriveSchemasFiletype `const:"csv" json:"filetype"`
+ // How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+ HeaderDefinition *SourceGoogleDriveCSVHeaderDefinition `json:"header_definition,omitempty"`
+ // A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
+ NullValues []string `json:"null_values,omitempty"`
+ // The character used for quoting CSV values. To disallow quoting, make this field blank.
+ QuoteChar *string `default:""" json:"quote_char"`
+ // The number of rows to skip after the header row.
+ SkipRowsAfterHeader *int64 `default:"0" json:"skip_rows_after_header"`
+ // The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+ SkipRowsBeforeHeader *int64 `default:"0" json:"skip_rows_before_header"`
+ // Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+ StringsCanBeNull *bool `default:"true" json:"strings_can_be_null"`
+ // A set of case-sensitive strings that should be interpreted as true values.
+ TrueValues []string `json:"true_values,omitempty"`
+}
+
+func (s SourceGoogleDriveCSVFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveCSVFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetFalseValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FalseValues
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetFiletype() *SourceGoogleDriveSchemasFiletype {
+ return SourceGoogleDriveSchemasFiletypeCsv.ToPointer()
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetHeaderDefinition() *SourceGoogleDriveCSVHeaderDefinition {
+ if o == nil {
+ return nil
+ }
+ return o.HeaderDefinition
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetNullValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NullValues
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetSkipRowsAfterHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsAfterHeader
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetSkipRowsBeforeHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsBeforeHeader
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetStringsCanBeNull() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.StringsCanBeNull
+}
+
+func (o *SourceGoogleDriveCSVFormat) GetTrueValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TrueValues
+}
+
+type SourceGoogleDriveFiletype string
+
+const (
+ SourceGoogleDriveFiletypeAvro SourceGoogleDriveFiletype = "avro"
+)
+
+func (e SourceGoogleDriveFiletype) ToPointer() *SourceGoogleDriveFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "avro":
+ *e = SourceGoogleDriveFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveFiletype: %v", v)
+ }
+}
+
+// SourceGoogleDriveAvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGoogleDriveAvroFormat struct {
+ // Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
+ DoubleAsString *bool `default:"false" json:"double_as_string"`
+ filetype *SourceGoogleDriveFiletype `const:"avro" json:"filetype"`
+}
+
+func (s SourceGoogleDriveAvroFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveAvroFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveAvroFormat) GetDoubleAsString() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleAsString
+}
+
+func (o *SourceGoogleDriveAvroFormat) GetFiletype() *SourceGoogleDriveFiletype {
+ return SourceGoogleDriveFiletypeAvro.ToPointer()
+}
+
+type SourceGoogleDriveFormatType string
+
+const (
+ SourceGoogleDriveFormatTypeSourceGoogleDriveAvroFormat SourceGoogleDriveFormatType = "source-google-drive_Avro Format"
+ SourceGoogleDriveFormatTypeSourceGoogleDriveCSVFormat SourceGoogleDriveFormatType = "source-google-drive_CSV Format"
+ SourceGoogleDriveFormatTypeSourceGoogleDriveJsonlFormat SourceGoogleDriveFormatType = "source-google-drive_Jsonl Format"
+ SourceGoogleDriveFormatTypeSourceGoogleDriveParquetFormat SourceGoogleDriveFormatType = "source-google-drive_Parquet Format"
+ SourceGoogleDriveFormatTypeSourceGoogleDriveDocumentFileTypeFormatExperimental SourceGoogleDriveFormatType = "source-google-drive_Document File Type Format (Experimental)"
+)
+
+type SourceGoogleDriveFormat struct {
+ SourceGoogleDriveAvroFormat *SourceGoogleDriveAvroFormat
+ SourceGoogleDriveCSVFormat *SourceGoogleDriveCSVFormat
+ SourceGoogleDriveJsonlFormat *SourceGoogleDriveJsonlFormat
+ SourceGoogleDriveParquetFormat *SourceGoogleDriveParquetFormat
+ SourceGoogleDriveDocumentFileTypeFormatExperimental *SourceGoogleDriveDocumentFileTypeFormatExperimental
+
+ Type SourceGoogleDriveFormatType
+}
+
+func CreateSourceGoogleDriveFormatSourceGoogleDriveAvroFormat(sourceGoogleDriveAvroFormat SourceGoogleDriveAvroFormat) SourceGoogleDriveFormat {
+ typ := SourceGoogleDriveFormatTypeSourceGoogleDriveAvroFormat
+
+ return SourceGoogleDriveFormat{
+ SourceGoogleDriveAvroFormat: &sourceGoogleDriveAvroFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveFormatSourceGoogleDriveCSVFormat(sourceGoogleDriveCSVFormat SourceGoogleDriveCSVFormat) SourceGoogleDriveFormat {
+ typ := SourceGoogleDriveFormatTypeSourceGoogleDriveCSVFormat
+
+ return SourceGoogleDriveFormat{
+ SourceGoogleDriveCSVFormat: &sourceGoogleDriveCSVFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveFormatSourceGoogleDriveJsonlFormat(sourceGoogleDriveJsonlFormat SourceGoogleDriveJsonlFormat) SourceGoogleDriveFormat {
+ typ := SourceGoogleDriveFormatTypeSourceGoogleDriveJsonlFormat
+
+ return SourceGoogleDriveFormat{
+ SourceGoogleDriveJsonlFormat: &sourceGoogleDriveJsonlFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveFormatSourceGoogleDriveParquetFormat(sourceGoogleDriveParquetFormat SourceGoogleDriveParquetFormat) SourceGoogleDriveFormat {
+ typ := SourceGoogleDriveFormatTypeSourceGoogleDriveParquetFormat
+
+ return SourceGoogleDriveFormat{
+ SourceGoogleDriveParquetFormat: &sourceGoogleDriveParquetFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveFormatSourceGoogleDriveDocumentFileTypeFormatExperimental(sourceGoogleDriveDocumentFileTypeFormatExperimental SourceGoogleDriveDocumentFileTypeFormatExperimental) SourceGoogleDriveFormat {
+ typ := SourceGoogleDriveFormatTypeSourceGoogleDriveDocumentFileTypeFormatExperimental
+
+ return SourceGoogleDriveFormat{
+ SourceGoogleDriveDocumentFileTypeFormatExperimental: &sourceGoogleDriveDocumentFileTypeFormatExperimental,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleDriveFormat) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleDriveJsonlFormat := new(SourceGoogleDriveJsonlFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveJsonlFormat, "", true, true); err == nil {
+ u.SourceGoogleDriveJsonlFormat = sourceGoogleDriveJsonlFormat
+ u.Type = SourceGoogleDriveFormatTypeSourceGoogleDriveJsonlFormat
+ return nil
+ }
+
+ sourceGoogleDriveAvroFormat := new(SourceGoogleDriveAvroFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveAvroFormat, "", true, true); err == nil {
+ u.SourceGoogleDriveAvroFormat = sourceGoogleDriveAvroFormat
+ u.Type = SourceGoogleDriveFormatTypeSourceGoogleDriveAvroFormat
+ return nil
+ }
+
+ sourceGoogleDriveParquetFormat := new(SourceGoogleDriveParquetFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveParquetFormat, "", true, true); err == nil {
+ u.SourceGoogleDriveParquetFormat = sourceGoogleDriveParquetFormat
+ u.Type = SourceGoogleDriveFormatTypeSourceGoogleDriveParquetFormat
+ return nil
+ }
+
+ sourceGoogleDriveDocumentFileTypeFormatExperimental := new(SourceGoogleDriveDocumentFileTypeFormatExperimental)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveDocumentFileTypeFormatExperimental, "", true, true); err == nil {
+ u.SourceGoogleDriveDocumentFileTypeFormatExperimental = sourceGoogleDriveDocumentFileTypeFormatExperimental
+ u.Type = SourceGoogleDriveFormatTypeSourceGoogleDriveDocumentFileTypeFormatExperimental
+ return nil
+ }
+
+ sourceGoogleDriveCSVFormat := new(SourceGoogleDriveCSVFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveCSVFormat, "", true, true); err == nil {
+ u.SourceGoogleDriveCSVFormat = sourceGoogleDriveCSVFormat
+ u.Type = SourceGoogleDriveFormatTypeSourceGoogleDriveCSVFormat
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleDriveFormat) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleDriveAvroFormat != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveAvroFormat, "", true)
+ }
+
+ if u.SourceGoogleDriveCSVFormat != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveCSVFormat, "", true)
+ }
+
+ if u.SourceGoogleDriveJsonlFormat != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveJsonlFormat, "", true)
+ }
+
+ if u.SourceGoogleDriveParquetFormat != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveParquetFormat, "", true)
+ }
+
+ if u.SourceGoogleDriveDocumentFileTypeFormatExperimental != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveDocumentFileTypeFormatExperimental, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourceGoogleDriveValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+type SourceGoogleDriveValidationPolicy string
+
+const (
+ SourceGoogleDriveValidationPolicyEmitRecord SourceGoogleDriveValidationPolicy = "Emit Record"
+ SourceGoogleDriveValidationPolicySkipRecord SourceGoogleDriveValidationPolicy = "Skip Record"
+ SourceGoogleDriveValidationPolicyWaitForDiscover SourceGoogleDriveValidationPolicy = "Wait for Discover"
+)
+
+func (e SourceGoogleDriveValidationPolicy) ToPointer() *SourceGoogleDriveValidationPolicy {
+ return &e
+}
+
+func (e *SourceGoogleDriveValidationPolicy) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Emit Record":
+ fallthrough
+ case "Skip Record":
+ fallthrough
+ case "Wait for Discover":
+ *e = SourceGoogleDriveValidationPolicy(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveValidationPolicy: %v", v)
+ }
+}
+
+type SourceGoogleDriveFileBasedStreamConfig struct {
+ // When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
+ DaysToSyncIfHistoryIsFull *int64 `default:"3" json:"days_to_sync_if_history_is_full"`
+ // The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+ Format SourceGoogleDriveFormat `json:"format"`
+ // The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
+ Globs []string `json:"globs,omitempty"`
+ // The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
+ InputSchema *string `json:"input_schema,omitempty"`
+ // The name of the stream.
+ Name string `json:"name"`
+ // The column or columns (for a composite key) that serves as the unique identifier of a record.
+ PrimaryKey *string `json:"primary_key,omitempty"`
+ // When enabled, syncs will not validate or structure records against the stream's schema.
+ Schemaless *bool `default:"false" json:"schemaless"`
+ // The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+ ValidationPolicy *SourceGoogleDriveValidationPolicy `default:"Emit Record" json:"validation_policy"`
+}
+
+func (s SourceGoogleDriveFileBasedStreamConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveFileBasedStreamConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveFileBasedStreamConfig) GetDaysToSyncIfHistoryIsFull() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DaysToSyncIfHistoryIsFull
+}
+
+func (o *SourceGoogleDriveFileBasedStreamConfig) GetFormat() SourceGoogleDriveFormat {
+ if o == nil {
+ return SourceGoogleDriveFormat{}
+ }
+ return o.Format
+}
+
+func (o *SourceGoogleDriveFileBasedStreamConfig) GetGlobs() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Globs
+}
+
+func (o *SourceGoogleDriveFileBasedStreamConfig) GetInputSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.InputSchema
+}
+
+func (o *SourceGoogleDriveFileBasedStreamConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleDriveFileBasedStreamConfig) GetPrimaryKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrimaryKey
+}
+
+func (o *SourceGoogleDriveFileBasedStreamConfig) GetSchemaless() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Schemaless
+}
+
+func (o *SourceGoogleDriveFileBasedStreamConfig) GetValidationPolicy() *SourceGoogleDriveValidationPolicy {
+ if o == nil {
+ return nil
+ }
+ return o.ValidationPolicy
+}
+
+// SourceGoogleDrive - Used during spec; allows the developer to configure the cloud provider specific options
+// that are needed when users configure a file-based source.
+type SourceGoogleDrive struct {
+ // Credentials for connecting to the Google Drive API
+ Credentials SourceGoogleDriveAuthentication `json:"credentials"`
+ // URL for the folder you want to sync. Using individual streams and glob patterns, it's possible to only sync a subset of all files located in the folder.
+ FolderURL string `json:"folder_url"`
+ sourceType GoogleDrive `const:"google-drive" json:"sourceType"`
+ // UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+ // Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
+ Streams []SourceGoogleDriveFileBasedStreamConfig `json:"streams"`
+}
+
+func (s SourceGoogleDrive) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDrive) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDrive) GetCredentials() SourceGoogleDriveAuthentication {
+ if o == nil {
+ return SourceGoogleDriveAuthentication{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGoogleDrive) GetFolderURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.FolderURL
+}
+
+func (o *SourceGoogleDrive) GetSourceType() GoogleDrive {
+ return GoogleDriveGoogleDrive
+}
+
+func (o *SourceGoogleDrive) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceGoogleDrive) GetStreams() []SourceGoogleDriveFileBasedStreamConfig {
+ if o == nil {
+ return []SourceGoogleDriveFileBasedStreamConfig{}
+ }
+ return o.Streams
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogledrivecreaterequest.go b/internal/sdk/pkg/models/shared/sourcegoogledrivecreaterequest.go
new file mode 100644
index 000000000..93a38b0ac
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcegoogledrivecreaterequest.go
@@ -0,0 +1,51 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceGoogleDriveCreateRequest struct {
+ // Used during spec; allows the developer to configure the cloud provider specific options
+ // that are needed when users configure a file-based source.
+ Configuration SourceGoogleDrive `json:"configuration"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ // Optional secretID obtained through the public API OAuth redirect flow.
+ SecretID *string `json:"secretId,omitempty"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceGoogleDriveCreateRequest) GetConfiguration() SourceGoogleDrive {
+ if o == nil {
+ return SourceGoogleDrive{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleDriveCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGoogleDriveCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleDriveCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGoogleDriveCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogledriveputrequest.go b/internal/sdk/pkg/models/shared/sourcegoogledriveputrequest.go
new file mode 100644
index 000000000..89370e43b
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcegoogledriveputrequest.go
@@ -0,0 +1,32 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceGoogleDrivePutRequest struct {
+ // Used during spec; allows the developer to configure the cloud provider specific options
+ // that are needed when users configure a file-based source.
+ Configuration SourceGoogleDriveUpdate `json:"configuration"`
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceGoogleDrivePutRequest) GetConfiguration() SourceGoogleDriveUpdate {
+ if o == nil {
+ return SourceGoogleDriveUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleDrivePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleDrivePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogledriveupdate.go b/internal/sdk/pkg/models/shared/sourcegoogledriveupdate.go
new file mode 100644
index 000000000..89ffe0ffa
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcegoogledriveupdate.go
@@ -0,0 +1,1077 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+ "time"
+)
+
+type SourceGoogleDriveUpdateSchemasAuthType string
+
+const (
+ SourceGoogleDriveUpdateSchemasAuthTypeService SourceGoogleDriveUpdateSchemasAuthType = "Service"
+)
+
+func (e SourceGoogleDriveUpdateSchemasAuthType) ToPointer() *SourceGoogleDriveUpdateSchemasAuthType {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Service":
+ *e = SourceGoogleDriveUpdateSchemasAuthType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateSchemasAuthType: %v", v)
+ }
+}
+
+// SourceGoogleDriveUpdateServiceAccountKeyAuthentication - Credentials for connecting to the Google Drive API
+type SourceGoogleDriveUpdateServiceAccountKeyAuthentication struct {
+ authType *SourceGoogleDriveUpdateSchemasAuthType `const:"Service" json:"auth_type"`
+ // The JSON key of the service account to use for authorization. Read more here.
+ ServiceAccountInfo string `json:"service_account_info"`
+}
+
+func (s SourceGoogleDriveUpdateServiceAccountKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateServiceAccountKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateServiceAccountKeyAuthentication) GetAuthType() *SourceGoogleDriveUpdateSchemasAuthType {
+ return SourceGoogleDriveUpdateSchemasAuthTypeService.ToPointer()
+}
+
+func (o *SourceGoogleDriveUpdateServiceAccountKeyAuthentication) GetServiceAccountInfo() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceAccountInfo
+}
+
+type SourceGoogleDriveUpdateAuthType string
+
+const (
+ SourceGoogleDriveUpdateAuthTypeClient SourceGoogleDriveUpdateAuthType = "Client"
+)
+
+func (e SourceGoogleDriveUpdateAuthType) ToPointer() *SourceGoogleDriveUpdateAuthType {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateAuthType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Client":
+ *e = SourceGoogleDriveUpdateAuthType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateAuthType: %v", v)
+ }
+}
+
+// SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth - Credentials for connecting to the Google Drive API
+type SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth struct {
+ authType *SourceGoogleDriveUpdateAuthType `const:"Client" json:"auth_type"`
+ // Client ID for the Google Drive API
+ ClientID string `json:"client_id"`
+ // Client Secret for the Google Drive API
+ ClientSecret string `json:"client_secret"`
+ // Refresh Token for the Google Drive API
+ RefreshToken string `json:"refresh_token"`
+}
+
+func (s SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth) GetAuthType() *SourceGoogleDriveUpdateAuthType {
+ return SourceGoogleDriveUpdateAuthTypeClient.ToPointer()
+}
+
+func (o *SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+type SourceGoogleDriveUpdateAuthenticationType string
+
+const (
+ SourceGoogleDriveUpdateAuthenticationTypeSourceGoogleDriveUpdateAuthenticateViaGoogleOAuth SourceGoogleDriveUpdateAuthenticationType = "source-google-drive-update_Authenticate via Google (OAuth)"
+ SourceGoogleDriveUpdateAuthenticationTypeSourceGoogleDriveUpdateServiceAccountKeyAuthentication SourceGoogleDriveUpdateAuthenticationType = "source-google-drive-update_Service Account Key Authentication"
+)
+
+type SourceGoogleDriveUpdateAuthentication struct {
+ SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth *SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth
+ SourceGoogleDriveUpdateServiceAccountKeyAuthentication *SourceGoogleDriveUpdateServiceAccountKeyAuthentication
+
+ Type SourceGoogleDriveUpdateAuthenticationType
+}
+
+func CreateSourceGoogleDriveUpdateAuthenticationSourceGoogleDriveUpdateAuthenticateViaGoogleOAuth(sourceGoogleDriveUpdateAuthenticateViaGoogleOAuth SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth) SourceGoogleDriveUpdateAuthentication {
+ typ := SourceGoogleDriveUpdateAuthenticationTypeSourceGoogleDriveUpdateAuthenticateViaGoogleOAuth
+
+ return SourceGoogleDriveUpdateAuthentication{
+ SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth: &sourceGoogleDriveUpdateAuthenticateViaGoogleOAuth,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveUpdateAuthenticationSourceGoogleDriveUpdateServiceAccountKeyAuthentication(sourceGoogleDriveUpdateServiceAccountKeyAuthentication SourceGoogleDriveUpdateServiceAccountKeyAuthentication) SourceGoogleDriveUpdateAuthentication {
+ typ := SourceGoogleDriveUpdateAuthenticationTypeSourceGoogleDriveUpdateServiceAccountKeyAuthentication
+
+ return SourceGoogleDriveUpdateAuthentication{
+ SourceGoogleDriveUpdateServiceAccountKeyAuthentication: &sourceGoogleDriveUpdateServiceAccountKeyAuthentication,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleDriveUpdateAuthentication) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleDriveUpdateServiceAccountKeyAuthentication := new(SourceGoogleDriveUpdateServiceAccountKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateServiceAccountKeyAuthentication, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateServiceAccountKeyAuthentication = sourceGoogleDriveUpdateServiceAccountKeyAuthentication
+ u.Type = SourceGoogleDriveUpdateAuthenticationTypeSourceGoogleDriveUpdateServiceAccountKeyAuthentication
+ return nil
+ }
+
+ sourceGoogleDriveUpdateAuthenticateViaGoogleOAuth := new(SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateAuthenticateViaGoogleOAuth, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth = sourceGoogleDriveUpdateAuthenticateViaGoogleOAuth
+ u.Type = SourceGoogleDriveUpdateAuthenticationTypeSourceGoogleDriveUpdateAuthenticateViaGoogleOAuth
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleDriveUpdateAuthentication) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateAuthenticateViaGoogleOAuth, "", true)
+ }
+
+ if u.SourceGoogleDriveUpdateServiceAccountKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateServiceAccountKeyAuthentication, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletype string
+
+const (
+ SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletypeUnstructured SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletype = "unstructured"
+)
+
+func (e SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletype) ToPointer() *SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "unstructured":
+ *e = SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletype: %v", v)
+ }
+}
+
+// SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
+type SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental struct {
+ filetype *SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletype `const:"unstructured" json:"filetype"`
+ // If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.
+ SkipUnprocessableFileTypes *bool `default:"true" json:"skip_unprocessable_file_types"`
+}
+
+func (s SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental) GetFiletype() *SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletype {
+ return SourceGoogleDriveUpdateSchemasStreamsFormatFormatFiletypeUnstructured.ToPointer()
+}
+
+func (o *SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental) GetSkipUnprocessableFileTypes() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.SkipUnprocessableFileTypes
+}
+
+type SourceGoogleDriveUpdateSchemasStreamsFormatFiletype string
+
+const (
+ SourceGoogleDriveUpdateSchemasStreamsFormatFiletypeParquet SourceGoogleDriveUpdateSchemasStreamsFormatFiletype = "parquet"
+)
+
+func (e SourceGoogleDriveUpdateSchemasStreamsFormatFiletype) ToPointer() *SourceGoogleDriveUpdateSchemasStreamsFormatFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateSchemasStreamsFormatFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "parquet":
+ *e = SourceGoogleDriveUpdateSchemasStreamsFormatFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateSchemasStreamsFormatFiletype: %v", v)
+ }
+}
+
+// SourceGoogleDriveUpdateParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGoogleDriveUpdateParquetFormat struct {
+ // Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
+ DecimalAsFloat *bool `default:"false" json:"decimal_as_float"`
+ filetype *SourceGoogleDriveUpdateSchemasStreamsFormatFiletype `const:"parquet" json:"filetype"`
+}
+
+func (s SourceGoogleDriveUpdateParquetFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateParquetFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateParquetFormat) GetDecimalAsFloat() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DecimalAsFloat
+}
+
+func (o *SourceGoogleDriveUpdateParquetFormat) GetFiletype() *SourceGoogleDriveUpdateSchemasStreamsFormatFiletype {
+ return SourceGoogleDriveUpdateSchemasStreamsFormatFiletypeParquet.ToPointer()
+}
+
+type SourceGoogleDriveUpdateSchemasStreamsFiletype string
+
+const (
+ SourceGoogleDriveUpdateSchemasStreamsFiletypeJsonl SourceGoogleDriveUpdateSchemasStreamsFiletype = "jsonl"
+)
+
+func (e SourceGoogleDriveUpdateSchemasStreamsFiletype) ToPointer() *SourceGoogleDriveUpdateSchemasStreamsFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateSchemasStreamsFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "jsonl":
+ *e = SourceGoogleDriveUpdateSchemasStreamsFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateSchemasStreamsFiletype: %v", v)
+ }
+}
+
+// SourceGoogleDriveUpdateJsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGoogleDriveUpdateJsonlFormat struct {
+ filetype *SourceGoogleDriveUpdateSchemasStreamsFiletype `const:"jsonl" json:"filetype"`
+}
+
+func (s SourceGoogleDriveUpdateJsonlFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateJsonlFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateJsonlFormat) GetFiletype() *SourceGoogleDriveUpdateSchemasStreamsFiletype {
+ return SourceGoogleDriveUpdateSchemasStreamsFiletypeJsonl.ToPointer()
+}
+
+type SourceGoogleDriveUpdateSchemasFiletype string
+
+const (
+ SourceGoogleDriveUpdateSchemasFiletypeCsv SourceGoogleDriveUpdateSchemasFiletype = "csv"
+)
+
+func (e SourceGoogleDriveUpdateSchemasFiletype) ToPointer() *SourceGoogleDriveUpdateSchemasFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateSchemasFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "csv":
+ *e = SourceGoogleDriveUpdateSchemasFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateSchemasFiletype: %v", v)
+ }
+}
+
+type SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionType string
+
+const (
+ SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionTypeUserProvided SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionType = "User Provided"
+)
+
+func (e SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionType) ToPointer() *SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "User Provided":
+ *e = SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGoogleDriveUpdateUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGoogleDriveUpdateUserProvided struct {
+ // The column names that will be used while emitting the CSV records
+ ColumnNames []string `json:"column_names"`
+ headerDefinitionType *SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionType `const:"User Provided" json:"header_definition_type"`
+}
+
+func (s SourceGoogleDriveUpdateUserProvided) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateUserProvided) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateUserProvided) GetColumnNames() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ColumnNames
+}
+
+func (o *SourceGoogleDriveUpdateUserProvided) GetHeaderDefinitionType() *SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionType {
+ return SourceGoogleDriveUpdateSchemasStreamsHeaderDefinitionTypeUserProvided.ToPointer()
+}
+
+type SourceGoogleDriveUpdateSchemasHeaderDefinitionType string
+
+const (
+ SourceGoogleDriveUpdateSchemasHeaderDefinitionTypeAutogenerated SourceGoogleDriveUpdateSchemasHeaderDefinitionType = "Autogenerated"
+)
+
+func (e SourceGoogleDriveUpdateSchemasHeaderDefinitionType) ToPointer() *SourceGoogleDriveUpdateSchemasHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateSchemasHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Autogenerated":
+ *e = SourceGoogleDriveUpdateSchemasHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateSchemasHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGoogleDriveUpdateAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGoogleDriveUpdateAutogenerated struct {
+ headerDefinitionType *SourceGoogleDriveUpdateSchemasHeaderDefinitionType `const:"Autogenerated" json:"header_definition_type"`
+}
+
+func (s SourceGoogleDriveUpdateAutogenerated) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateAutogenerated) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateAutogenerated) GetHeaderDefinitionType() *SourceGoogleDriveUpdateSchemasHeaderDefinitionType {
+ return SourceGoogleDriveUpdateSchemasHeaderDefinitionTypeAutogenerated.ToPointer()
+}
+
+type SourceGoogleDriveUpdateHeaderDefinitionType string
+
+const (
+ SourceGoogleDriveUpdateHeaderDefinitionTypeFromCsv SourceGoogleDriveUpdateHeaderDefinitionType = "From CSV"
+)
+
+func (e SourceGoogleDriveUpdateHeaderDefinitionType) ToPointer() *SourceGoogleDriveUpdateHeaderDefinitionType {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "From CSV":
+ *e = SourceGoogleDriveUpdateHeaderDefinitionType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceGoogleDriveUpdateFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceGoogleDriveUpdateFromCSV struct {
+ headerDefinitionType *SourceGoogleDriveUpdateHeaderDefinitionType `const:"From CSV" json:"header_definition_type"`
+}
+
+func (s SourceGoogleDriveUpdateFromCSV) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateFromCSV) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateFromCSV) GetHeaderDefinitionType() *SourceGoogleDriveUpdateHeaderDefinitionType {
+ return SourceGoogleDriveUpdateHeaderDefinitionTypeFromCsv.ToPointer()
+}
+
+type SourceGoogleDriveUpdateCSVHeaderDefinitionType string
+
+const (
+ SourceGoogleDriveUpdateCSVHeaderDefinitionTypeSourceGoogleDriveUpdateFromCSV SourceGoogleDriveUpdateCSVHeaderDefinitionType = "source-google-drive-update_From CSV"
+ SourceGoogleDriveUpdateCSVHeaderDefinitionTypeSourceGoogleDriveUpdateAutogenerated SourceGoogleDriveUpdateCSVHeaderDefinitionType = "source-google-drive-update_Autogenerated"
+ SourceGoogleDriveUpdateCSVHeaderDefinitionTypeSourceGoogleDriveUpdateUserProvided SourceGoogleDriveUpdateCSVHeaderDefinitionType = "source-google-drive-update_User Provided"
+)
+
+type SourceGoogleDriveUpdateCSVHeaderDefinition struct {
+ SourceGoogleDriveUpdateFromCSV *SourceGoogleDriveUpdateFromCSV
+ SourceGoogleDriveUpdateAutogenerated *SourceGoogleDriveUpdateAutogenerated
+ SourceGoogleDriveUpdateUserProvided *SourceGoogleDriveUpdateUserProvided
+
+ Type SourceGoogleDriveUpdateCSVHeaderDefinitionType
+}
+
+func CreateSourceGoogleDriveUpdateCSVHeaderDefinitionSourceGoogleDriveUpdateFromCSV(sourceGoogleDriveUpdateFromCSV SourceGoogleDriveUpdateFromCSV) SourceGoogleDriveUpdateCSVHeaderDefinition {
+ typ := SourceGoogleDriveUpdateCSVHeaderDefinitionTypeSourceGoogleDriveUpdateFromCSV
+
+ return SourceGoogleDriveUpdateCSVHeaderDefinition{
+ SourceGoogleDriveUpdateFromCSV: &sourceGoogleDriveUpdateFromCSV,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveUpdateCSVHeaderDefinitionSourceGoogleDriveUpdateAutogenerated(sourceGoogleDriveUpdateAutogenerated SourceGoogleDriveUpdateAutogenerated) SourceGoogleDriveUpdateCSVHeaderDefinition {
+ typ := SourceGoogleDriveUpdateCSVHeaderDefinitionTypeSourceGoogleDriveUpdateAutogenerated
+
+ return SourceGoogleDriveUpdateCSVHeaderDefinition{
+ SourceGoogleDriveUpdateAutogenerated: &sourceGoogleDriveUpdateAutogenerated,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveUpdateCSVHeaderDefinitionSourceGoogleDriveUpdateUserProvided(sourceGoogleDriveUpdateUserProvided SourceGoogleDriveUpdateUserProvided) SourceGoogleDriveUpdateCSVHeaderDefinition {
+ typ := SourceGoogleDriveUpdateCSVHeaderDefinitionTypeSourceGoogleDriveUpdateUserProvided
+
+ return SourceGoogleDriveUpdateCSVHeaderDefinition{
+ SourceGoogleDriveUpdateUserProvided: &sourceGoogleDriveUpdateUserProvided,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleDriveUpdateCSVHeaderDefinition) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleDriveUpdateFromCSV := new(SourceGoogleDriveUpdateFromCSV)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateFromCSV, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateFromCSV = sourceGoogleDriveUpdateFromCSV
+ u.Type = SourceGoogleDriveUpdateCSVHeaderDefinitionTypeSourceGoogleDriveUpdateFromCSV
+ return nil
+ }
+
+ sourceGoogleDriveUpdateAutogenerated := new(SourceGoogleDriveUpdateAutogenerated)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateAutogenerated, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateAutogenerated = sourceGoogleDriveUpdateAutogenerated
+ u.Type = SourceGoogleDriveUpdateCSVHeaderDefinitionTypeSourceGoogleDriveUpdateAutogenerated
+ return nil
+ }
+
+ sourceGoogleDriveUpdateUserProvided := new(SourceGoogleDriveUpdateUserProvided)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateUserProvided, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateUserProvided = sourceGoogleDriveUpdateUserProvided
+ u.Type = SourceGoogleDriveUpdateCSVHeaderDefinitionTypeSourceGoogleDriveUpdateUserProvided
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleDriveUpdateCSVHeaderDefinition) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleDriveUpdateFromCSV != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateFromCSV, "", true)
+ }
+
+ if u.SourceGoogleDriveUpdateAutogenerated != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateAutogenerated, "", true)
+ }
+
+ if u.SourceGoogleDriveUpdateUserProvided != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateUserProvided, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourceGoogleDriveUpdateCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGoogleDriveUpdateCSVFormat struct {
+ // The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
+ Delimiter *string `default:"," json:"delimiter"`
+ // Whether two quotes in a quoted CSV value denote a single quote in the data.
+ DoubleQuote *bool `default:"true" json:"double_quote"`
+ // The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
+ Encoding *string `default:"utf8" json:"encoding"`
+ // The character used for escaping special characters. To disallow escaping, leave this field blank.
+ EscapeChar *string `json:"escape_char,omitempty"`
+ // A set of case-sensitive strings that should be interpreted as false values.
+ FalseValues []string `json:"false_values,omitempty"`
+ filetype *SourceGoogleDriveUpdateSchemasFiletype `const:"csv" json:"filetype"`
+ // How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+ HeaderDefinition *SourceGoogleDriveUpdateCSVHeaderDefinition `json:"header_definition,omitempty"`
+ // A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
+ NullValues []string `json:"null_values,omitempty"`
+ // The character used for quoting CSV values. To disallow quoting, make this field blank.
+ QuoteChar *string `default:""" json:"quote_char"`
+ // The number of rows to skip after the header row.
+ SkipRowsAfterHeader *int64 `default:"0" json:"skip_rows_after_header"`
+ // The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
+ SkipRowsBeforeHeader *int64 `default:"0" json:"skip_rows_before_header"`
+ // Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
+ StringsCanBeNull *bool `default:"true" json:"strings_can_be_null"`
+ // A set of case-sensitive strings that should be interpreted as true values.
+ TrueValues []string `json:"true_values,omitempty"`
+}
+
+func (s SourceGoogleDriveUpdateCSVFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateCSVFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetFalseValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FalseValues
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetFiletype() *SourceGoogleDriveUpdateSchemasFiletype {
+ return SourceGoogleDriveUpdateSchemasFiletypeCsv.ToPointer()
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetHeaderDefinition() *SourceGoogleDriveUpdateCSVHeaderDefinition {
+ if o == nil {
+ return nil
+ }
+ return o.HeaderDefinition
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetNullValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NullValues
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetSkipRowsAfterHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsAfterHeader
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetSkipRowsBeforeHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsBeforeHeader
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetStringsCanBeNull() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.StringsCanBeNull
+}
+
+func (o *SourceGoogleDriveUpdateCSVFormat) GetTrueValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TrueValues
+}
+
+type SourceGoogleDriveUpdateFiletype string
+
+const (
+ SourceGoogleDriveUpdateFiletypeAvro SourceGoogleDriveUpdateFiletype = "avro"
+)
+
+func (e SourceGoogleDriveUpdateFiletype) ToPointer() *SourceGoogleDriveUpdateFiletype {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateFiletype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "avro":
+ *e = SourceGoogleDriveUpdateFiletype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateFiletype: %v", v)
+ }
+}
+
+// SourceGoogleDriveUpdateAvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceGoogleDriveUpdateAvroFormat struct {
+ // Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
+ DoubleAsString *bool `default:"false" json:"double_as_string"`
+ filetype *SourceGoogleDriveUpdateFiletype `const:"avro" json:"filetype"`
+}
+
+func (s SourceGoogleDriveUpdateAvroFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateAvroFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateAvroFormat) GetDoubleAsString() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleAsString
+}
+
+func (o *SourceGoogleDriveUpdateAvroFormat) GetFiletype() *SourceGoogleDriveUpdateFiletype {
+ return SourceGoogleDriveUpdateFiletypeAvro.ToPointer()
+}
+
+type SourceGoogleDriveUpdateFormatType string
+
+const (
+ SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateAvroFormat SourceGoogleDriveUpdateFormatType = "source-google-drive-update_Avro Format"
+ SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateCSVFormat SourceGoogleDriveUpdateFormatType = "source-google-drive-update_CSV Format"
+ SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateJsonlFormat SourceGoogleDriveUpdateFormatType = "source-google-drive-update_Jsonl Format"
+ SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateParquetFormat SourceGoogleDriveUpdateFormatType = "source-google-drive-update_Parquet Format"
+ SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateDocumentFileTypeFormatExperimental SourceGoogleDriveUpdateFormatType = "source-google-drive-update_Document File Type Format (Experimental)"
+)
+
+type SourceGoogleDriveUpdateFormat struct {
+ SourceGoogleDriveUpdateAvroFormat *SourceGoogleDriveUpdateAvroFormat
+ SourceGoogleDriveUpdateCSVFormat *SourceGoogleDriveUpdateCSVFormat
+ SourceGoogleDriveUpdateJsonlFormat *SourceGoogleDriveUpdateJsonlFormat
+ SourceGoogleDriveUpdateParquetFormat *SourceGoogleDriveUpdateParquetFormat
+ SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental *SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental
+
+ Type SourceGoogleDriveUpdateFormatType
+}
+
+func CreateSourceGoogleDriveUpdateFormatSourceGoogleDriveUpdateAvroFormat(sourceGoogleDriveUpdateAvroFormat SourceGoogleDriveUpdateAvroFormat) SourceGoogleDriveUpdateFormat {
+ typ := SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateAvroFormat
+
+ return SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateAvroFormat: &sourceGoogleDriveUpdateAvroFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveUpdateFormatSourceGoogleDriveUpdateCSVFormat(sourceGoogleDriveUpdateCSVFormat SourceGoogleDriveUpdateCSVFormat) SourceGoogleDriveUpdateFormat {
+ typ := SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateCSVFormat
+
+ return SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateCSVFormat: &sourceGoogleDriveUpdateCSVFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveUpdateFormatSourceGoogleDriveUpdateJsonlFormat(sourceGoogleDriveUpdateJsonlFormat SourceGoogleDriveUpdateJsonlFormat) SourceGoogleDriveUpdateFormat {
+ typ := SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateJsonlFormat
+
+ return SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateJsonlFormat: &sourceGoogleDriveUpdateJsonlFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveUpdateFormatSourceGoogleDriveUpdateParquetFormat(sourceGoogleDriveUpdateParquetFormat SourceGoogleDriveUpdateParquetFormat) SourceGoogleDriveUpdateFormat {
+ typ := SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateParquetFormat
+
+ return SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateParquetFormat: &sourceGoogleDriveUpdateParquetFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceGoogleDriveUpdateFormatSourceGoogleDriveUpdateDocumentFileTypeFormatExperimental(sourceGoogleDriveUpdateDocumentFileTypeFormatExperimental SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental) SourceGoogleDriveUpdateFormat {
+ typ := SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateDocumentFileTypeFormatExperimental
+
+ return SourceGoogleDriveUpdateFormat{
+ SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental: &sourceGoogleDriveUpdateDocumentFileTypeFormatExperimental,
+ Type: typ,
+ }
+}
+
+func (u *SourceGoogleDriveUpdateFormat) UnmarshalJSON(data []byte) error {
+
+ sourceGoogleDriveUpdateJsonlFormat := new(SourceGoogleDriveUpdateJsonlFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateJsonlFormat, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateJsonlFormat = sourceGoogleDriveUpdateJsonlFormat
+ u.Type = SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateJsonlFormat
+ return nil
+ }
+
+ sourceGoogleDriveUpdateAvroFormat := new(SourceGoogleDriveUpdateAvroFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateAvroFormat, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateAvroFormat = sourceGoogleDriveUpdateAvroFormat
+ u.Type = SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateAvroFormat
+ return nil
+ }
+
+ sourceGoogleDriveUpdateParquetFormat := new(SourceGoogleDriveUpdateParquetFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateParquetFormat, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateParquetFormat = sourceGoogleDriveUpdateParquetFormat
+ u.Type = SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateParquetFormat
+ return nil
+ }
+
+ sourceGoogleDriveUpdateDocumentFileTypeFormatExperimental := new(SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateDocumentFileTypeFormatExperimental, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental = sourceGoogleDriveUpdateDocumentFileTypeFormatExperimental
+ u.Type = SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateDocumentFileTypeFormatExperimental
+ return nil
+ }
+
+ sourceGoogleDriveUpdateCSVFormat := new(SourceGoogleDriveUpdateCSVFormat)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleDriveUpdateCSVFormat, "", true, true); err == nil {
+ u.SourceGoogleDriveUpdateCSVFormat = sourceGoogleDriveUpdateCSVFormat
+ u.Type = SourceGoogleDriveUpdateFormatTypeSourceGoogleDriveUpdateCSVFormat
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceGoogleDriveUpdateFormat) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleDriveUpdateAvroFormat != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateAvroFormat, "", true)
+ }
+
+ if u.SourceGoogleDriveUpdateCSVFormat != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateCSVFormat, "", true)
+ }
+
+ if u.SourceGoogleDriveUpdateJsonlFormat != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateJsonlFormat, "", true)
+ }
+
+ if u.SourceGoogleDriveUpdateParquetFormat != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateParquetFormat, "", true)
+ }
+
+ if u.SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental != nil {
+ return utils.MarshalJSON(u.SourceGoogleDriveUpdateDocumentFileTypeFormatExperimental, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourceGoogleDriveUpdateValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+type SourceGoogleDriveUpdateValidationPolicy string
+
+const (
+ SourceGoogleDriveUpdateValidationPolicyEmitRecord SourceGoogleDriveUpdateValidationPolicy = "Emit Record"
+ SourceGoogleDriveUpdateValidationPolicySkipRecord SourceGoogleDriveUpdateValidationPolicy = "Skip Record"
+ SourceGoogleDriveUpdateValidationPolicyWaitForDiscover SourceGoogleDriveUpdateValidationPolicy = "Wait for Discover"
+)
+
+func (e SourceGoogleDriveUpdateValidationPolicy) ToPointer() *SourceGoogleDriveUpdateValidationPolicy {
+ return &e
+}
+
+func (e *SourceGoogleDriveUpdateValidationPolicy) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "Emit Record":
+ fallthrough
+ case "Skip Record":
+ fallthrough
+ case "Wait for Discover":
+ *e = SourceGoogleDriveUpdateValidationPolicy(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceGoogleDriveUpdateValidationPolicy: %v", v)
+ }
+}
+
+type SourceGoogleDriveUpdateFileBasedStreamConfig struct {
+ // When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
+ DaysToSyncIfHistoryIsFull *int64 `default:"3" json:"days_to_sync_if_history_is_full"`
+ // The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+ Format SourceGoogleDriveUpdateFormat `json:"format"`
+ // The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
+ Globs []string `json:"globs,omitempty"`
+ // The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
+ InputSchema *string `json:"input_schema,omitempty"`
+ // The name of the stream.
+ Name string `json:"name"`
+ // The column or columns (for a composite key) that serves as the unique identifier of a record.
+ PrimaryKey *string `json:"primary_key,omitempty"`
+ // When enabled, syncs will not validate or structure records against the stream's schema.
+ Schemaless *bool `default:"false" json:"schemaless"`
+ // The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+ ValidationPolicy *SourceGoogleDriveUpdateValidationPolicy `default:"Emit Record" json:"validation_policy"`
+}
+
+func (s SourceGoogleDriveUpdateFileBasedStreamConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdateFileBasedStreamConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdateFileBasedStreamConfig) GetDaysToSyncIfHistoryIsFull() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DaysToSyncIfHistoryIsFull
+}
+
+func (o *SourceGoogleDriveUpdateFileBasedStreamConfig) GetFormat() SourceGoogleDriveUpdateFormat {
+ if o == nil {
+ return SourceGoogleDriveUpdateFormat{}
+ }
+ return o.Format
+}
+
+func (o *SourceGoogleDriveUpdateFileBasedStreamConfig) GetGlobs() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Globs
+}
+
+func (o *SourceGoogleDriveUpdateFileBasedStreamConfig) GetInputSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.InputSchema
+}
+
+func (o *SourceGoogleDriveUpdateFileBasedStreamConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleDriveUpdateFileBasedStreamConfig) GetPrimaryKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrimaryKey
+}
+
+func (o *SourceGoogleDriveUpdateFileBasedStreamConfig) GetSchemaless() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Schemaless
+}
+
+func (o *SourceGoogleDriveUpdateFileBasedStreamConfig) GetValidationPolicy() *SourceGoogleDriveUpdateValidationPolicy {
+ if o == nil {
+ return nil
+ }
+ return o.ValidationPolicy
+}
+
+// SourceGoogleDriveUpdate - Used during spec; allows the developer to configure the cloud provider specific options
+// that are needed when users configure a file-based source.
+type SourceGoogleDriveUpdate struct {
+ // Credentials for connecting to the Google Drive API
+ Credentials SourceGoogleDriveUpdateAuthentication `json:"credentials"`
+ // URL for the folder you want to sync. Using individual streams and glob patterns, it's possible to only sync a subset of all files located in the folder.
+ FolderURL string `json:"folder_url"`
+ // UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+ // Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
+ Streams []SourceGoogleDriveUpdateFileBasedStreamConfig `json:"streams"`
+}
+
+func (s SourceGoogleDriveUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleDriveUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleDriveUpdate) GetCredentials() SourceGoogleDriveUpdateAuthentication {
+ if o == nil {
+ return SourceGoogleDriveUpdateAuthentication{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGoogleDriveUpdate) GetFolderURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.FolderURL
+}
+
+func (o *SourceGoogleDriveUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceGoogleDriveUpdate) GetStreams() []SourceGoogleDriveUpdateFileBasedStreamConfig {
+ if o == nil {
+ return []SourceGoogleDriveUpdateFileBasedStreamConfig{}
+ }
+ return o.Streams
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsights.go b/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsights.go
old mode 100755
new mode 100644
index e956e6e02..a1486bb64
--- a/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsights.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsights.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceGooglePagespeedInsightsCategories string
@@ -43,27 +44,27 @@ func (e *SourceGooglePagespeedInsightsCategories) UnmarshalJSON(data []byte) err
}
}
-type SourceGooglePagespeedInsightsGooglePagespeedInsights string
+type GooglePagespeedInsights string
const (
- SourceGooglePagespeedInsightsGooglePagespeedInsightsGooglePagespeedInsights SourceGooglePagespeedInsightsGooglePagespeedInsights = "google-pagespeed-insights"
+ GooglePagespeedInsightsGooglePagespeedInsights GooglePagespeedInsights = "google-pagespeed-insights"
)
-func (e SourceGooglePagespeedInsightsGooglePagespeedInsights) ToPointer() *SourceGooglePagespeedInsightsGooglePagespeedInsights {
+func (e GooglePagespeedInsights) ToPointer() *GooglePagespeedInsights {
return &e
}
-func (e *SourceGooglePagespeedInsightsGooglePagespeedInsights) UnmarshalJSON(data []byte) error {
+func (e *GooglePagespeedInsights) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "google-pagespeed-insights":
- *e = SourceGooglePagespeedInsightsGooglePagespeedInsights(v)
+ *e = GooglePagespeedInsights(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGooglePagespeedInsightsGooglePagespeedInsights: %v", v)
+ return fmt.Errorf("invalid value for GooglePagespeedInsights: %v", v)
}
}
@@ -98,10 +99,53 @@ type SourceGooglePagespeedInsights struct {
// Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited when using without API Key. Creating and using the API key therefore is recommended. The key is case sensitive.
APIKey *string `json:"api_key,omitempty"`
// Defines which Lighthouse category to run. One or many of: "accessibility", "best-practices", "performance", "pwa", "seo".
- Categories []SourceGooglePagespeedInsightsCategories `json:"categories"`
- SourceType SourceGooglePagespeedInsightsGooglePagespeedInsights `json:"sourceType"`
+ Categories []SourceGooglePagespeedInsightsCategories `json:"categories"`
+ sourceType GooglePagespeedInsights `const:"google-pagespeed-insights" json:"sourceType"`
// The analyses strategy to use. Either "desktop" or "mobile".
Strategies []SourceGooglePagespeedInsightsStrategies `json:"strategies"`
// The URLs to retrieve pagespeed information from. The connector will attempt to sync PageSpeed reports for all the defined URLs. Format: https://(www.)url.domain
Urls []string `json:"urls"`
}
+
+func (s SourceGooglePagespeedInsights) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGooglePagespeedInsights) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGooglePagespeedInsights) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *SourceGooglePagespeedInsights) GetCategories() []SourceGooglePagespeedInsightsCategories {
+ if o == nil {
+ return []SourceGooglePagespeedInsightsCategories{}
+ }
+ return o.Categories
+}
+
+func (o *SourceGooglePagespeedInsights) GetSourceType() GooglePagespeedInsights {
+ return GooglePagespeedInsightsGooglePagespeedInsights
+}
+
+func (o *SourceGooglePagespeedInsights) GetStrategies() []SourceGooglePagespeedInsightsStrategies {
+ if o == nil {
+ return []SourceGooglePagespeedInsightsStrategies{}
+ }
+ return o.Strategies
+}
+
+func (o *SourceGooglePagespeedInsights) GetUrls() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Urls
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightscreaterequest.go b/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightscreaterequest.go
old mode 100755
new mode 100644
index 281736516..337b04528
--- a/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGooglePagespeedInsightsCreateRequest struct {
Configuration SourceGooglePagespeedInsights `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGooglePagespeedInsightsCreateRequest) GetConfiguration() SourceGooglePagespeedInsights {
+ if o == nil {
+ return SourceGooglePagespeedInsights{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGooglePagespeedInsightsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGooglePagespeedInsightsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGooglePagespeedInsightsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGooglePagespeedInsightsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsputrequest.go b/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsputrequest.go
old mode 100755
new mode 100644
index fa504b269..2a9ceaa5f
--- a/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsputrequest.go
@@ -7,3 +7,24 @@ type SourceGooglePagespeedInsightsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGooglePagespeedInsightsPutRequest) GetConfiguration() SourceGooglePagespeedInsightsUpdate {
+ if o == nil {
+ return SourceGooglePagespeedInsightsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGooglePagespeedInsightsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGooglePagespeedInsightsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsupdate.go b/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsupdate.go
old mode 100755
new mode 100644
index 88e7c88b8..356b91a18
--- a/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglepagespeedinsightsupdate.go
@@ -7,21 +7,21 @@ import (
"fmt"
)
-type SourceGooglePagespeedInsightsUpdateCategories string
+type Categories string
const (
- SourceGooglePagespeedInsightsUpdateCategoriesAccessibility SourceGooglePagespeedInsightsUpdateCategories = "accessibility"
- SourceGooglePagespeedInsightsUpdateCategoriesBestPractices SourceGooglePagespeedInsightsUpdateCategories = "best-practices"
- SourceGooglePagespeedInsightsUpdateCategoriesPerformance SourceGooglePagespeedInsightsUpdateCategories = "performance"
- SourceGooglePagespeedInsightsUpdateCategoriesPwa SourceGooglePagespeedInsightsUpdateCategories = "pwa"
- SourceGooglePagespeedInsightsUpdateCategoriesSeo SourceGooglePagespeedInsightsUpdateCategories = "seo"
+ CategoriesAccessibility Categories = "accessibility"
+ CategoriesBestPractices Categories = "best-practices"
+ CategoriesPerformance Categories = "performance"
+ CategoriesPwa Categories = "pwa"
+ CategoriesSeo Categories = "seo"
)
-func (e SourceGooglePagespeedInsightsUpdateCategories) ToPointer() *SourceGooglePagespeedInsightsUpdateCategories {
+func (e Categories) ToPointer() *Categories {
return &e
}
-func (e *SourceGooglePagespeedInsightsUpdateCategories) UnmarshalJSON(data []byte) error {
+func (e *Categories) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -36,25 +36,25 @@ func (e *SourceGooglePagespeedInsightsUpdateCategories) UnmarshalJSON(data []byt
case "pwa":
fallthrough
case "seo":
- *e = SourceGooglePagespeedInsightsUpdateCategories(v)
+ *e = Categories(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGooglePagespeedInsightsUpdateCategories: %v", v)
+ return fmt.Errorf("invalid value for Categories: %v", v)
}
}
-type SourceGooglePagespeedInsightsUpdateStrategies string
+type Strategies string
const (
- SourceGooglePagespeedInsightsUpdateStrategiesDesktop SourceGooglePagespeedInsightsUpdateStrategies = "desktop"
- SourceGooglePagespeedInsightsUpdateStrategiesMobile SourceGooglePagespeedInsightsUpdateStrategies = "mobile"
+ StrategiesDesktop Strategies = "desktop"
+ StrategiesMobile Strategies = "mobile"
)
-func (e SourceGooglePagespeedInsightsUpdateStrategies) ToPointer() *SourceGooglePagespeedInsightsUpdateStrategies {
+func (e Strategies) ToPointer() *Strategies {
return &e
}
-func (e *SourceGooglePagespeedInsightsUpdateStrategies) UnmarshalJSON(data []byte) error {
+func (e *Strategies) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -63,10 +63,10 @@ func (e *SourceGooglePagespeedInsightsUpdateStrategies) UnmarshalJSON(data []byt
case "desktop":
fallthrough
case "mobile":
- *e = SourceGooglePagespeedInsightsUpdateStrategies(v)
+ *e = Strategies(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGooglePagespeedInsightsUpdateStrategies: %v", v)
+ return fmt.Errorf("invalid value for Strategies: %v", v)
}
}
@@ -74,9 +74,37 @@ type SourceGooglePagespeedInsightsUpdate struct {
// Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited when using without API Key. Creating and using the API key therefore is recommended. The key is case sensitive.
APIKey *string `json:"api_key,omitempty"`
// Defines which Lighthouse category to run. One or many of: "accessibility", "best-practices", "performance", "pwa", "seo".
- Categories []SourceGooglePagespeedInsightsUpdateCategories `json:"categories"`
+ Categories []Categories `json:"categories"`
// The analyses strategy to use. Either "desktop" or "mobile".
- Strategies []SourceGooglePagespeedInsightsUpdateStrategies `json:"strategies"`
+ Strategies []Strategies `json:"strategies"`
// The URLs to retrieve pagespeed information from. The connector will attempt to sync PageSpeed reports for all the defined URLs. Format: https://(www.)url.domain
Urls []string `json:"urls"`
}
+
+func (o *SourceGooglePagespeedInsightsUpdate) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *SourceGooglePagespeedInsightsUpdate) GetCategories() []Categories {
+ if o == nil {
+ return []Categories{}
+ }
+ return o.Categories
+}
+
+func (o *SourceGooglePagespeedInsightsUpdate) GetStrategies() []Strategies {
+ if o == nil {
+ return []Strategies{}
+ }
+ return o.Strategies
+}
+
+func (o *SourceGooglePagespeedInsightsUpdate) GetUrls() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.Urls
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglesearchconsole.go b/internal/sdk/pkg/models/shared/sourcegooglesearchconsole.go
old mode 100755
new mode 100644
index 7c9e662f0..231ee7276
--- a/internal/sdk/pkg/models/shared/sourcegooglesearchconsole.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglesearchconsole.go
@@ -3,73 +3,102 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthType string
+type SourceGoogleSearchConsoleSchemasAuthType string
const (
- SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthTypeService SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthType = "Service"
+ SourceGoogleSearchConsoleSchemasAuthTypeService SourceGoogleSearchConsoleSchemasAuthType = "Service"
)
-func (e SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthType) ToPointer() *SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthType {
+func (e SourceGoogleSearchConsoleSchemasAuthType) ToPointer() *SourceGoogleSearchConsoleSchemasAuthType {
return &e
}
-func (e *SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSearchConsoleSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service":
- *e = SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthType(v)
+ *e = SourceGoogleSearchConsoleSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSearchConsoleSchemasAuthType: %v", v)
}
}
-type SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication struct {
- AuthType SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthenticationAuthType `json:"auth_type"`
+type SourceGoogleSearchConsoleServiceAccountKeyAuthentication struct {
+ authType SourceGoogleSearchConsoleSchemasAuthType `const:"Service" json:"auth_type"`
// The email of the user which has permissions to access the Google Workspace Admin APIs.
Email string `json:"email"`
// The JSON key of the service account to use for authorization. Read more here.
ServiceAccountInfo string `json:"service_account_info"`
}
-type SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthType string
+func (s SourceGoogleSearchConsoleServiceAccountKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSearchConsoleServiceAccountKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSearchConsoleServiceAccountKeyAuthentication) GetAuthType() SourceGoogleSearchConsoleSchemasAuthType {
+ return SourceGoogleSearchConsoleSchemasAuthTypeService
+}
+
+func (o *SourceGoogleSearchConsoleServiceAccountKeyAuthentication) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+func (o *SourceGoogleSearchConsoleServiceAccountKeyAuthentication) GetServiceAccountInfo() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceAccountInfo
+}
+
+type SourceGoogleSearchConsoleAuthType string
const (
- SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthTypeClient SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthType = "Client"
+ SourceGoogleSearchConsoleAuthTypeClient SourceGoogleSearchConsoleAuthType = "Client"
)
-func (e SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthType) ToPointer() *SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthType {
+func (e SourceGoogleSearchConsoleAuthType) ToPointer() *SourceGoogleSearchConsoleAuthType {
return &e
}
-func (e *SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSearchConsoleAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthType(v)
+ *e = SourceGoogleSearchConsoleAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSearchConsoleAuthType: %v", v)
}
}
-type SourceGoogleSearchConsoleAuthenticationTypeOAuth struct {
+type SourceGoogleSearchConsoleOAuth struct {
// Access token for making authenticated requests. Read more here.
- AccessToken *string `json:"access_token,omitempty"`
- AuthType SourceGoogleSearchConsoleAuthenticationTypeOAuthAuthType `json:"auth_type"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authType SourceGoogleSearchConsoleAuthType `const:"Client" json:"auth_type"`
// The client ID of your Google Search Console developer application. Read more here.
ClientID string `json:"client_id"`
// The client secret of your Google Search Console developer application. Read more here.
@@ -78,56 +107,94 @@ type SourceGoogleSearchConsoleAuthenticationTypeOAuth struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceGoogleSearchConsoleOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSearchConsoleOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSearchConsoleOAuth) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceGoogleSearchConsoleOAuth) GetAuthType() SourceGoogleSearchConsoleAuthType {
+ return SourceGoogleSearchConsoleAuthTypeClient
+}
+
+func (o *SourceGoogleSearchConsoleOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGoogleSearchConsoleOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGoogleSearchConsoleOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceGoogleSearchConsoleAuthenticationTypeType string
const (
- SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeOAuth SourceGoogleSearchConsoleAuthenticationTypeType = "source-google-search-console_Authentication Type_OAuth"
- SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication SourceGoogleSearchConsoleAuthenticationTypeType = "source-google-search-console_Authentication Type_Service Account Key Authentication"
+ SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleOAuth SourceGoogleSearchConsoleAuthenticationTypeType = "source-google-search-console_OAuth"
+ SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleServiceAccountKeyAuthentication SourceGoogleSearchConsoleAuthenticationTypeType = "source-google-search-console_Service Account Key Authentication"
)
type SourceGoogleSearchConsoleAuthenticationType struct {
- SourceGoogleSearchConsoleAuthenticationTypeOAuth *SourceGoogleSearchConsoleAuthenticationTypeOAuth
- SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication *SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication
+ SourceGoogleSearchConsoleOAuth *SourceGoogleSearchConsoleOAuth
+ SourceGoogleSearchConsoleServiceAccountKeyAuthentication *SourceGoogleSearchConsoleServiceAccountKeyAuthentication
Type SourceGoogleSearchConsoleAuthenticationTypeType
}
-func CreateSourceGoogleSearchConsoleAuthenticationTypeSourceGoogleSearchConsoleAuthenticationTypeOAuth(sourceGoogleSearchConsoleAuthenticationTypeOAuth SourceGoogleSearchConsoleAuthenticationTypeOAuth) SourceGoogleSearchConsoleAuthenticationType {
- typ := SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeOAuth
+func CreateSourceGoogleSearchConsoleAuthenticationTypeSourceGoogleSearchConsoleOAuth(sourceGoogleSearchConsoleOAuth SourceGoogleSearchConsoleOAuth) SourceGoogleSearchConsoleAuthenticationType {
+ typ := SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleOAuth
return SourceGoogleSearchConsoleAuthenticationType{
- SourceGoogleSearchConsoleAuthenticationTypeOAuth: &sourceGoogleSearchConsoleAuthenticationTypeOAuth,
- Type: typ,
+ SourceGoogleSearchConsoleOAuth: &sourceGoogleSearchConsoleOAuth,
+ Type: typ,
}
}
-func CreateSourceGoogleSearchConsoleAuthenticationTypeSourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication(sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication) SourceGoogleSearchConsoleAuthenticationType {
- typ := SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication
+func CreateSourceGoogleSearchConsoleAuthenticationTypeSourceGoogleSearchConsoleServiceAccountKeyAuthentication(sourceGoogleSearchConsoleServiceAccountKeyAuthentication SourceGoogleSearchConsoleServiceAccountKeyAuthentication) SourceGoogleSearchConsoleAuthenticationType {
+ typ := SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleServiceAccountKeyAuthentication
return SourceGoogleSearchConsoleAuthenticationType{
- SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication: &sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication,
+ SourceGoogleSearchConsoleServiceAccountKeyAuthentication: &sourceGoogleSearchConsoleServiceAccountKeyAuthentication,
Type: typ,
}
}
func (u *SourceGoogleSearchConsoleAuthenticationType) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication := new(SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication); err == nil {
- u.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication = sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication
- u.Type = SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication
+
+ sourceGoogleSearchConsoleServiceAccountKeyAuthentication := new(SourceGoogleSearchConsoleServiceAccountKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleSearchConsoleServiceAccountKeyAuthentication, "", true, true); err == nil {
+ u.SourceGoogleSearchConsoleServiceAccountKeyAuthentication = sourceGoogleSearchConsoleServiceAccountKeyAuthentication
+ u.Type = SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleServiceAccountKeyAuthentication
return nil
}
- sourceGoogleSearchConsoleAuthenticationTypeOAuth := new(SourceGoogleSearchConsoleAuthenticationTypeOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleSearchConsoleAuthenticationTypeOAuth); err == nil {
- u.SourceGoogleSearchConsoleAuthenticationTypeOAuth = sourceGoogleSearchConsoleAuthenticationTypeOAuth
- u.Type = SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeOAuth
+ sourceGoogleSearchConsoleOAuth := new(SourceGoogleSearchConsoleOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleSearchConsoleOAuth, "", true, true); err == nil {
+ u.SourceGoogleSearchConsoleOAuth = sourceGoogleSearchConsoleOAuth
+ u.Type = SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleOAuth
return nil
}
@@ -135,33 +202,33 @@ func (u *SourceGoogleSearchConsoleAuthenticationType) UnmarshalJSON(data []byte)
}
func (u SourceGoogleSearchConsoleAuthenticationType) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication != nil {
- return json.Marshal(u.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication)
+ if u.SourceGoogleSearchConsoleOAuth != nil {
+ return utils.MarshalJSON(u.SourceGoogleSearchConsoleOAuth, "", true)
}
- if u.SourceGoogleSearchConsoleAuthenticationTypeOAuth != nil {
- return json.Marshal(u.SourceGoogleSearchConsoleAuthenticationTypeOAuth)
+ if u.SourceGoogleSearchConsoleServiceAccountKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceGoogleSearchConsoleServiceAccountKeyAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceGoogleSearchConsoleCustomReportConfigValidEnums - An enumeration of dimensions.
-type SourceGoogleSearchConsoleCustomReportConfigValidEnums string
+// SourceGoogleSearchConsoleValidEnums - An enumeration of dimensions.
+type SourceGoogleSearchConsoleValidEnums string
const (
- SourceGoogleSearchConsoleCustomReportConfigValidEnumsCountry SourceGoogleSearchConsoleCustomReportConfigValidEnums = "country"
- SourceGoogleSearchConsoleCustomReportConfigValidEnumsDate SourceGoogleSearchConsoleCustomReportConfigValidEnums = "date"
- SourceGoogleSearchConsoleCustomReportConfigValidEnumsDevice SourceGoogleSearchConsoleCustomReportConfigValidEnums = "device"
- SourceGoogleSearchConsoleCustomReportConfigValidEnumsPage SourceGoogleSearchConsoleCustomReportConfigValidEnums = "page"
- SourceGoogleSearchConsoleCustomReportConfigValidEnumsQuery SourceGoogleSearchConsoleCustomReportConfigValidEnums = "query"
+ SourceGoogleSearchConsoleValidEnumsCountry SourceGoogleSearchConsoleValidEnums = "country"
+ SourceGoogleSearchConsoleValidEnumsDate SourceGoogleSearchConsoleValidEnums = "date"
+ SourceGoogleSearchConsoleValidEnumsDevice SourceGoogleSearchConsoleValidEnums = "device"
+ SourceGoogleSearchConsoleValidEnumsPage SourceGoogleSearchConsoleValidEnums = "page"
+ SourceGoogleSearchConsoleValidEnumsQuery SourceGoogleSearchConsoleValidEnums = "query"
)
-func (e SourceGoogleSearchConsoleCustomReportConfigValidEnums) ToPointer() *SourceGoogleSearchConsoleCustomReportConfigValidEnums {
+func (e SourceGoogleSearchConsoleValidEnums) ToPointer() *SourceGoogleSearchConsoleValidEnums {
return &e
}
-func (e *SourceGoogleSearchConsoleCustomReportConfigValidEnums) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSearchConsoleValidEnums) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -176,20 +243,34 @@ func (e *SourceGoogleSearchConsoleCustomReportConfigValidEnums) UnmarshalJSON(da
case "page":
fallthrough
case "query":
- *e = SourceGoogleSearchConsoleCustomReportConfigValidEnums(v)
+ *e = SourceGoogleSearchConsoleValidEnums(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSearchConsoleCustomReportConfigValidEnums: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSearchConsoleValidEnums: %v", v)
}
}
type SourceGoogleSearchConsoleCustomReportConfig struct {
- // A list of dimensions (country, date, device, page, query)
- Dimensions []SourceGoogleSearchConsoleCustomReportConfigValidEnums `json:"dimensions"`
+ // A list of available dimensions. Please note, that for technical reasons `date` is the default dimension which will be included in your query whether you specify it or not. Primary key will consist of your custom dimensions and the default dimension along with `site_url` and `search_type`.
+ Dimensions []SourceGoogleSearchConsoleValidEnums `json:"dimensions"`
// The name of the custom report, this name would be used as stream name
Name string `json:"name"`
}
+func (o *SourceGoogleSearchConsoleCustomReportConfig) GetDimensions() []SourceGoogleSearchConsoleValidEnums {
+ if o == nil {
+ return []SourceGoogleSearchConsoleValidEnums{}
+ }
+ return o.Dimensions
+}
+
+func (o *SourceGoogleSearchConsoleCustomReportConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
// SourceGoogleSearchConsoleDataFreshness - If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.
type SourceGoogleSearchConsoleDataFreshness string
@@ -218,27 +299,27 @@ func (e *SourceGoogleSearchConsoleDataFreshness) UnmarshalJSON(data []byte) erro
}
}
-type SourceGoogleSearchConsoleGoogleSearchConsole string
+type GoogleSearchConsole string
const (
- SourceGoogleSearchConsoleGoogleSearchConsoleGoogleSearchConsole SourceGoogleSearchConsoleGoogleSearchConsole = "google-search-console"
+ GoogleSearchConsoleGoogleSearchConsole GoogleSearchConsole = "google-search-console"
)
-func (e SourceGoogleSearchConsoleGoogleSearchConsole) ToPointer() *SourceGoogleSearchConsoleGoogleSearchConsole {
+func (e GoogleSearchConsole) ToPointer() *GoogleSearchConsole {
return &e
}
-func (e *SourceGoogleSearchConsoleGoogleSearchConsole) UnmarshalJSON(data []byte) error {
+func (e *GoogleSearchConsole) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "google-search-console":
- *e = SourceGoogleSearchConsoleGoogleSearchConsole(v)
+ *e = GoogleSearchConsole(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSearchConsoleGoogleSearchConsole: %v", v)
+ return fmt.Errorf("invalid value for GoogleSearchConsole: %v", v)
}
}
@@ -249,12 +330,76 @@ type SourceGoogleSearchConsole struct {
// You can add your Custom Analytics report by creating one.
CustomReportsArray []SourceGoogleSearchConsoleCustomReportConfig `json:"custom_reports_array,omitempty"`
// If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.
- DataState *SourceGoogleSearchConsoleDataFreshness `json:"data_state,omitempty"`
+ DataState *SourceGoogleSearchConsoleDataFreshness `default:"final" json:"data_state"`
// UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.
EndDate *types.Date `json:"end_date,omitempty"`
// The URLs of the website property attached to your GSC account. Learn more about properties here.
- SiteUrls []string `json:"site_urls"`
- SourceType SourceGoogleSearchConsoleGoogleSearchConsole `json:"sourceType"`
+ SiteUrls []string `json:"site_urls"`
+ sourceType GoogleSearchConsole `const:"google-search-console" json:"sourceType"`
// UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2021-01-01" json:"start_date"`
+}
+
+func (s SourceGoogleSearchConsole) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSearchConsole) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSearchConsole) GetAuthorization() SourceGoogleSearchConsoleAuthenticationType {
+ if o == nil {
+ return SourceGoogleSearchConsoleAuthenticationType{}
+ }
+ return o.Authorization
+}
+
+func (o *SourceGoogleSearchConsole) GetCustomReports() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReports
+}
+
+func (o *SourceGoogleSearchConsole) GetCustomReportsArray() []SourceGoogleSearchConsoleCustomReportConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReportsArray
+}
+
+func (o *SourceGoogleSearchConsole) GetDataState() *SourceGoogleSearchConsoleDataFreshness {
+ if o == nil {
+ return nil
+ }
+ return o.DataState
+}
+
+func (o *SourceGoogleSearchConsole) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceGoogleSearchConsole) GetSiteUrls() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.SiteUrls
+}
+
+func (o *SourceGoogleSearchConsole) GetSourceType() GoogleSearchConsole {
+ return GoogleSearchConsoleGoogleSearchConsole
+}
+
+func (o *SourceGoogleSearchConsole) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglesearchconsolecreaterequest.go b/internal/sdk/pkg/models/shared/sourcegooglesearchconsolecreaterequest.go
old mode 100755
new mode 100644
index c1eb08057..0dc76d63d
--- a/internal/sdk/pkg/models/shared/sourcegooglesearchconsolecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglesearchconsolecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGoogleSearchConsoleCreateRequest struct {
Configuration SourceGoogleSearchConsole `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleSearchConsoleCreateRequest) GetConfiguration() SourceGoogleSearchConsole {
+ if o == nil {
+ return SourceGoogleSearchConsole{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleSearchConsoleCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGoogleSearchConsoleCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleSearchConsoleCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGoogleSearchConsoleCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleputrequest.go b/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleputrequest.go
old mode 100755
new mode 100644
index af1449419..471aec515
--- a/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleputrequest.go
@@ -7,3 +7,24 @@ type SourceGoogleSearchConsolePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleSearchConsolePutRequest) GetConfiguration() SourceGoogleSearchConsoleUpdate {
+ if o == nil {
+ return SourceGoogleSearchConsoleUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleSearchConsolePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleSearchConsolePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleupdate.go b/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleupdate.go
old mode 100755
new mode 100644
index cd5ebed57..5d4580bea
--- a/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleupdate.go
@@ -3,73 +3,102 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthType string
+type SourceGoogleSearchConsoleUpdateSchemasAuthType string
const (
- SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthTypeService SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthType = "Service"
+ SourceGoogleSearchConsoleUpdateSchemasAuthTypeService SourceGoogleSearchConsoleUpdateSchemasAuthType = "Service"
)
-func (e SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthType) ToPointer() *SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthType {
+func (e SourceGoogleSearchConsoleUpdateSchemasAuthType) ToPointer() *SourceGoogleSearchConsoleUpdateSchemasAuthType {
return &e
}
-func (e *SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSearchConsoleUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service":
- *e = SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthType(v)
+ *e = SourceGoogleSearchConsoleUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateSchemasAuthType: %v", v)
}
}
-type SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication struct {
- AuthType SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthenticationAuthType `json:"auth_type"`
+type SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication struct {
+ authType SourceGoogleSearchConsoleUpdateSchemasAuthType `const:"Service" json:"auth_type"`
// The email of the user which has permissions to access the Google Workspace Admin APIs.
Email string `json:"email"`
// The JSON key of the service account to use for authorization. Read more here.
ServiceAccountInfo string `json:"service_account_info"`
}
-type SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthType string
+func (s SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication) GetAuthType() SourceGoogleSearchConsoleUpdateSchemasAuthType {
+ return SourceGoogleSearchConsoleUpdateSchemasAuthTypeService
+}
+
+func (o *SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+func (o *SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication) GetServiceAccountInfo() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceAccountInfo
+}
+
+type SourceGoogleSearchConsoleUpdateAuthType string
const (
- SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthTypeClient SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthType = "Client"
+ SourceGoogleSearchConsoleUpdateAuthTypeClient SourceGoogleSearchConsoleUpdateAuthType = "Client"
)
-func (e SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthType) ToPointer() *SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthType {
+func (e SourceGoogleSearchConsoleUpdateAuthType) ToPointer() *SourceGoogleSearchConsoleUpdateAuthType {
return &e
}
-func (e *SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSearchConsoleUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthType(v)
+ *e = SourceGoogleSearchConsoleUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateAuthType: %v", v)
}
}
-type SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth struct {
+type SourceGoogleSearchConsoleUpdateOAuth struct {
// Access token for making authenticated requests. Read more here.
- AccessToken *string `json:"access_token,omitempty"`
- AuthType SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuthAuthType `json:"auth_type"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authType SourceGoogleSearchConsoleUpdateAuthType `const:"Client" json:"auth_type"`
// The client ID of your Google Search Console developer application. Read more here.
ClientID string `json:"client_id"`
// The client secret of your Google Search Console developer application. Read more here.
@@ -78,90 +107,128 @@ type SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth struct {
RefreshToken string `json:"refresh_token"`
}
-type SourceGoogleSearchConsoleUpdateAuthenticationTypeType string
+func (s SourceGoogleSearchConsoleUpdateOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSearchConsoleUpdateOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSearchConsoleUpdateOAuth) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceGoogleSearchConsoleUpdateOAuth) GetAuthType() SourceGoogleSearchConsoleUpdateAuthType {
+ return SourceGoogleSearchConsoleUpdateAuthTypeClient
+}
+
+func (o *SourceGoogleSearchConsoleUpdateOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGoogleSearchConsoleUpdateOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGoogleSearchConsoleUpdateOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+type AuthenticationTypeType string
const (
- SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth SourceGoogleSearchConsoleUpdateAuthenticationTypeType = "source-google-search-console-update_Authentication Type_OAuth"
- SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication SourceGoogleSearchConsoleUpdateAuthenticationTypeType = "source-google-search-console-update_Authentication Type_Service Account Key Authentication"
+ AuthenticationTypeTypeSourceGoogleSearchConsoleUpdateOAuth AuthenticationTypeType = "source-google-search-console-update_OAuth"
+ AuthenticationTypeTypeSourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication AuthenticationTypeType = "source-google-search-console-update_Service Account Key Authentication"
)
-type SourceGoogleSearchConsoleUpdateAuthenticationType struct {
- SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth *SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth
- SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication *SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication
+type AuthenticationType struct {
+ SourceGoogleSearchConsoleUpdateOAuth *SourceGoogleSearchConsoleUpdateOAuth
+ SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication *SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication
- Type SourceGoogleSearchConsoleUpdateAuthenticationTypeType
+ Type AuthenticationTypeType
}
-func CreateSourceGoogleSearchConsoleUpdateAuthenticationTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth(sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth) SourceGoogleSearchConsoleUpdateAuthenticationType {
- typ := SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth
+func CreateAuthenticationTypeSourceGoogleSearchConsoleUpdateOAuth(sourceGoogleSearchConsoleUpdateOAuth SourceGoogleSearchConsoleUpdateOAuth) AuthenticationType {
+ typ := AuthenticationTypeTypeSourceGoogleSearchConsoleUpdateOAuth
- return SourceGoogleSearchConsoleUpdateAuthenticationType{
- SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth: &sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth,
- Type: typ,
+ return AuthenticationType{
+ SourceGoogleSearchConsoleUpdateOAuth: &sourceGoogleSearchConsoleUpdateOAuth,
+ Type: typ,
}
}
-func CreateSourceGoogleSearchConsoleUpdateAuthenticationTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication(sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication) SourceGoogleSearchConsoleUpdateAuthenticationType {
- typ := SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication
+func CreateAuthenticationTypeSourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication(sourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication) AuthenticationType {
+ typ := AuthenticationTypeTypeSourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication
- return SourceGoogleSearchConsoleUpdateAuthenticationType{
- SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication: &sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication,
+ return AuthenticationType{
+ SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication: &sourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication,
Type: typ,
}
}
-func (u *SourceGoogleSearchConsoleUpdateAuthenticationType) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *AuthenticationType) UnmarshalJSON(data []byte) error {
- sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication := new(SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication); err == nil {
- u.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication = sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication
- u.Type = SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication
+ sourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication := new(SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication, "", true, true); err == nil {
+ u.SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication = sourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication
+ u.Type = AuthenticationTypeTypeSourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication
return nil
}
- sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth := new(SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth); err == nil {
- u.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth = sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth
- u.Type = SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth
+ sourceGoogleSearchConsoleUpdateOAuth := new(SourceGoogleSearchConsoleUpdateOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleSearchConsoleUpdateOAuth, "", true, true); err == nil {
+ u.SourceGoogleSearchConsoleUpdateOAuth = sourceGoogleSearchConsoleUpdateOAuth
+ u.Type = AuthenticationTypeTypeSourceGoogleSearchConsoleUpdateOAuth
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceGoogleSearchConsoleUpdateAuthenticationType) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication != nil {
- return json.Marshal(u.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication)
+func (u AuthenticationType) MarshalJSON() ([]byte, error) {
+ if u.SourceGoogleSearchConsoleUpdateOAuth != nil {
+ return utils.MarshalJSON(u.SourceGoogleSearchConsoleUpdateOAuth, "", true)
}
- if u.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth != nil {
- return json.Marshal(u.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth)
+ if u.SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceGoogleSearchConsoleUpdateServiceAccountKeyAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums - An enumeration of dimensions.
-type SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums string
+// SourceGoogleSearchConsoleUpdateValidEnums - An enumeration of dimensions.
+type SourceGoogleSearchConsoleUpdateValidEnums string
const (
- SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsCountry SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "country"
- SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsDate SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "date"
- SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsDevice SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "device"
- SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsPage SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "page"
- SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsQuery SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "query"
+ SourceGoogleSearchConsoleUpdateValidEnumsCountry SourceGoogleSearchConsoleUpdateValidEnums = "country"
+ SourceGoogleSearchConsoleUpdateValidEnumsDate SourceGoogleSearchConsoleUpdateValidEnums = "date"
+ SourceGoogleSearchConsoleUpdateValidEnumsDevice SourceGoogleSearchConsoleUpdateValidEnums = "device"
+ SourceGoogleSearchConsoleUpdateValidEnumsPage SourceGoogleSearchConsoleUpdateValidEnums = "page"
+ SourceGoogleSearchConsoleUpdateValidEnumsQuery SourceGoogleSearchConsoleUpdateValidEnums = "query"
)
-func (e SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums) ToPointer() *SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums {
+func (e SourceGoogleSearchConsoleUpdateValidEnums) ToPointer() *SourceGoogleSearchConsoleUpdateValidEnums {
return &e
}
-func (e *SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSearchConsoleUpdateValidEnums) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -176,33 +243,47 @@ func (e *SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums) UnmarshalJ
case "page":
fallthrough
case "query":
- *e = SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums(v)
+ *e = SourceGoogleSearchConsoleUpdateValidEnums(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateValidEnums: %v", v)
}
}
type SourceGoogleSearchConsoleUpdateCustomReportConfig struct {
- // A list of dimensions (country, date, device, page, query)
- Dimensions []SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums `json:"dimensions"`
+ // A list of available dimensions. Please note, that for technical reasons `date` is the default dimension which will be included in your query whether you specify it or not. Primary key will consist of your custom dimensions and the default dimension along with `site_url` and `search_type`.
+ Dimensions []SourceGoogleSearchConsoleUpdateValidEnums `json:"dimensions"`
// The name of the custom report, this name would be used as stream name
Name string `json:"name"`
}
-// SourceGoogleSearchConsoleUpdateDataFreshness - If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.
-type SourceGoogleSearchConsoleUpdateDataFreshness string
+func (o *SourceGoogleSearchConsoleUpdateCustomReportConfig) GetDimensions() []SourceGoogleSearchConsoleUpdateValidEnums {
+ if o == nil {
+ return []SourceGoogleSearchConsoleUpdateValidEnums{}
+ }
+ return o.Dimensions
+}
+
+func (o *SourceGoogleSearchConsoleUpdateCustomReportConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+// DataFreshness - If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.
+type DataFreshness string
const (
- SourceGoogleSearchConsoleUpdateDataFreshnessFinal SourceGoogleSearchConsoleUpdateDataFreshness = "final"
- SourceGoogleSearchConsoleUpdateDataFreshnessAll SourceGoogleSearchConsoleUpdateDataFreshness = "all"
+ DataFreshnessFinal DataFreshness = "final"
+ DataFreshnessAll DataFreshness = "all"
)
-func (e SourceGoogleSearchConsoleUpdateDataFreshness) ToPointer() *SourceGoogleSearchConsoleUpdateDataFreshness {
+func (e DataFreshness) ToPointer() *DataFreshness {
return &e
}
-func (e *SourceGoogleSearchConsoleUpdateDataFreshness) UnmarshalJSON(data []byte) error {
+func (e *DataFreshness) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -211,25 +292,85 @@ func (e *SourceGoogleSearchConsoleUpdateDataFreshness) UnmarshalJSON(data []byte
case "final":
fallthrough
case "all":
- *e = SourceGoogleSearchConsoleUpdateDataFreshness(v)
+ *e = DataFreshness(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateDataFreshness: %v", v)
+ return fmt.Errorf("invalid value for DataFreshness: %v", v)
}
}
type SourceGoogleSearchConsoleUpdate struct {
- Authorization SourceGoogleSearchConsoleUpdateAuthenticationType `json:"authorization"`
+ Authorization AuthenticationType `json:"authorization"`
// (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports.
CustomReports *string `json:"custom_reports,omitempty"`
// You can add your Custom Analytics report by creating one.
CustomReportsArray []SourceGoogleSearchConsoleUpdateCustomReportConfig `json:"custom_reports_array,omitempty"`
// If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.
- DataState *SourceGoogleSearchConsoleUpdateDataFreshness `json:"data_state,omitempty"`
+ DataState *DataFreshness `default:"final" json:"data_state"`
// UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.
EndDate *types.Date `json:"end_date,omitempty"`
// The URLs of the website property attached to your GSC account. Learn more about properties here.
SiteUrls []string `json:"site_urls"`
// UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2021-01-01" json:"start_date"`
+}
+
+func (s SourceGoogleSearchConsoleUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSearchConsoleUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSearchConsoleUpdate) GetAuthorization() AuthenticationType {
+ if o == nil {
+ return AuthenticationType{}
+ }
+ return o.Authorization
+}
+
+func (o *SourceGoogleSearchConsoleUpdate) GetCustomReports() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReports
+}
+
+func (o *SourceGoogleSearchConsoleUpdate) GetCustomReportsArray() []SourceGoogleSearchConsoleUpdateCustomReportConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReportsArray
+}
+
+func (o *SourceGoogleSearchConsoleUpdate) GetDataState() *DataFreshness {
+ if o == nil {
+ return nil
+ }
+ return o.DataState
+}
+
+func (o *SourceGoogleSearchConsoleUpdate) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceGoogleSearchConsoleUpdate) GetSiteUrls() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.SiteUrls
+}
+
+func (o *SourceGoogleSearchConsoleUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglesheets.go b/internal/sdk/pkg/models/shared/sourcegooglesheets.go
old mode 100755
new mode 100644
index b2b5941e8..b7edb7578
--- a/internal/sdk/pkg/models/shared/sourcegooglesheets.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglesheets.go
@@ -3,70 +3,92 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthType string
+type SourceGoogleSheetsSchemasAuthType string
const (
- SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthTypeService SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthType = "Service"
+ SourceGoogleSheetsSchemasAuthTypeService SourceGoogleSheetsSchemasAuthType = "Service"
)
-func (e SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthType) ToPointer() *SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthType {
+func (e SourceGoogleSheetsSchemasAuthType) ToPointer() *SourceGoogleSheetsSchemasAuthType {
return &e
}
-func (e *SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSheetsSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service":
- *e = SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthType(v)
+ *e = SourceGoogleSheetsSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSheetsSchemasAuthType: %v", v)
}
}
-// SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication - Credentials for connecting to the Google Sheets API
-type SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication struct {
- AuthType SourceGoogleSheetsAuthenticationServiceAccountKeyAuthenticationAuthType `json:"auth_type"`
+// SourceGoogleSheetsServiceAccountKeyAuthentication - Credentials for connecting to the Google Sheets API
+type SourceGoogleSheetsServiceAccountKeyAuthentication struct {
+ authType SourceGoogleSheetsSchemasAuthType `const:"Service" json:"auth_type"`
// The JSON key of the service account to use for authorization. Read more here.
ServiceAccountInfo string `json:"service_account_info"`
}
-type SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthType string
+func (s SourceGoogleSheetsServiceAccountKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSheetsServiceAccountKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSheetsServiceAccountKeyAuthentication) GetAuthType() SourceGoogleSheetsSchemasAuthType {
+ return SourceGoogleSheetsSchemasAuthTypeService
+}
+
+func (o *SourceGoogleSheetsServiceAccountKeyAuthentication) GetServiceAccountInfo() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceAccountInfo
+}
+
+type SourceGoogleSheetsAuthType string
const (
- SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthTypeClient SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthType = "Client"
+ SourceGoogleSheetsAuthTypeClient SourceGoogleSheetsAuthType = "Client"
)
-func (e SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthType) ToPointer() *SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthType {
+func (e SourceGoogleSheetsAuthType) ToPointer() *SourceGoogleSheetsAuthType {
return &e
}
-func (e *SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSheetsAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthType(v)
+ *e = SourceGoogleSheetsAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSheetsAuthType: %v", v)
}
}
-// SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth - Credentials for connecting to the Google Sheets API
-type SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth struct {
- AuthType SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuthAuthType `json:"auth_type"`
+// SourceGoogleSheetsAuthenticateViaGoogleOAuth - Credentials for connecting to the Google Sheets API
+type SourceGoogleSheetsAuthenticateViaGoogleOAuth struct {
+ authType SourceGoogleSheetsAuthType `const:"Client" json:"auth_type"`
// Enter your Google application's Client ID. See Google's documentation for more information.
ClientID string `json:"client_id"`
// Enter your Google application's Client Secret. See Google's documentation for more information.
@@ -75,56 +97,87 @@ type SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceGoogleSheetsAuthenticateViaGoogleOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSheetsAuthenticateViaGoogleOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSheetsAuthenticateViaGoogleOAuth) GetAuthType() SourceGoogleSheetsAuthType {
+ return SourceGoogleSheetsAuthTypeClient
+}
+
+func (o *SourceGoogleSheetsAuthenticateViaGoogleOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGoogleSheetsAuthenticateViaGoogleOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGoogleSheetsAuthenticateViaGoogleOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceGoogleSheetsAuthenticationType string
const (
- SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth SourceGoogleSheetsAuthenticationType = "source-google-sheets_Authentication_Authenticate via Google (OAuth)"
- SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication SourceGoogleSheetsAuthenticationType = "source-google-sheets_Authentication_Service Account Key Authentication"
+ SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticateViaGoogleOAuth SourceGoogleSheetsAuthenticationType = "source-google-sheets_Authenticate via Google (OAuth)"
+ SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsServiceAccountKeyAuthentication SourceGoogleSheetsAuthenticationType = "source-google-sheets_Service Account Key Authentication"
)
type SourceGoogleSheetsAuthentication struct {
- SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth *SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth
- SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication *SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication
+ SourceGoogleSheetsAuthenticateViaGoogleOAuth *SourceGoogleSheetsAuthenticateViaGoogleOAuth
+ SourceGoogleSheetsServiceAccountKeyAuthentication *SourceGoogleSheetsServiceAccountKeyAuthentication
Type SourceGoogleSheetsAuthenticationType
}
-func CreateSourceGoogleSheetsAuthenticationSourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth(sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth) SourceGoogleSheetsAuthentication {
- typ := SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth
+func CreateSourceGoogleSheetsAuthenticationSourceGoogleSheetsAuthenticateViaGoogleOAuth(sourceGoogleSheetsAuthenticateViaGoogleOAuth SourceGoogleSheetsAuthenticateViaGoogleOAuth) SourceGoogleSheetsAuthentication {
+ typ := SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticateViaGoogleOAuth
return SourceGoogleSheetsAuthentication{
- SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth: &sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth,
+ SourceGoogleSheetsAuthenticateViaGoogleOAuth: &sourceGoogleSheetsAuthenticateViaGoogleOAuth,
Type: typ,
}
}
-func CreateSourceGoogleSheetsAuthenticationSourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication(sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication) SourceGoogleSheetsAuthentication {
- typ := SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication
+func CreateSourceGoogleSheetsAuthenticationSourceGoogleSheetsServiceAccountKeyAuthentication(sourceGoogleSheetsServiceAccountKeyAuthentication SourceGoogleSheetsServiceAccountKeyAuthentication) SourceGoogleSheetsAuthentication {
+ typ := SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsServiceAccountKeyAuthentication
return SourceGoogleSheetsAuthentication{
- SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication: &sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication,
+ SourceGoogleSheetsServiceAccountKeyAuthentication: &sourceGoogleSheetsServiceAccountKeyAuthentication,
Type: typ,
}
}
func (u *SourceGoogleSheetsAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication := new(SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication); err == nil {
- u.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication = sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication
- u.Type = SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication
+
+ sourceGoogleSheetsServiceAccountKeyAuthentication := new(SourceGoogleSheetsServiceAccountKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleSheetsServiceAccountKeyAuthentication, "", true, true); err == nil {
+ u.SourceGoogleSheetsServiceAccountKeyAuthentication = sourceGoogleSheetsServiceAccountKeyAuthentication
+ u.Type = SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsServiceAccountKeyAuthentication
return nil
}
- sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth := new(SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth); err == nil {
- u.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth = sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth
- u.Type = SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth
+ sourceGoogleSheetsAuthenticateViaGoogleOAuth := new(SourceGoogleSheetsAuthenticateViaGoogleOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleSheetsAuthenticateViaGoogleOAuth, "", true, true); err == nil {
+ u.SourceGoogleSheetsAuthenticateViaGoogleOAuth = sourceGoogleSheetsAuthenticateViaGoogleOAuth
+ u.Type = SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticateViaGoogleOAuth
return nil
}
@@ -132,15 +185,15 @@ func (u *SourceGoogleSheetsAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceGoogleSheetsAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication != nil {
- return json.Marshal(u.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication)
+ if u.SourceGoogleSheetsAuthenticateViaGoogleOAuth != nil {
+ return utils.MarshalJSON(u.SourceGoogleSheetsAuthenticateViaGoogleOAuth, "", true)
}
- if u.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth != nil {
- return json.Marshal(u.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth)
+ if u.SourceGoogleSheetsServiceAccountKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceGoogleSheetsServiceAccountKeyAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceGoogleSheetsGoogleSheets string
@@ -171,8 +224,44 @@ type SourceGoogleSheets struct {
// Credentials for connecting to the Google Sheets API
Credentials SourceGoogleSheetsAuthentication `json:"credentials"`
// Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.
- NamesConversion *bool `json:"names_conversion,omitempty"`
- SourceType SourceGoogleSheetsGoogleSheets `json:"sourceType"`
+ NamesConversion *bool `default:"false" json:"names_conversion"`
+ sourceType SourceGoogleSheetsGoogleSheets `const:"google-sheets" json:"sourceType"`
// Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'.
SpreadsheetID string `json:"spreadsheet_id"`
}
+
+func (s SourceGoogleSheets) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSheets) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSheets) GetCredentials() SourceGoogleSheetsAuthentication {
+ if o == nil {
+ return SourceGoogleSheetsAuthentication{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGoogleSheets) GetNamesConversion() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.NamesConversion
+}
+
+func (o *SourceGoogleSheets) GetSourceType() SourceGoogleSheetsGoogleSheets {
+ return SourceGoogleSheetsGoogleSheetsGoogleSheets
+}
+
+func (o *SourceGoogleSheets) GetSpreadsheetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SpreadsheetID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglesheetscreaterequest.go b/internal/sdk/pkg/models/shared/sourcegooglesheetscreaterequest.go
old mode 100755
new mode 100644
index 3eef2454f..f8431af63
--- a/internal/sdk/pkg/models/shared/sourcegooglesheetscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglesheetscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGoogleSheetsCreateRequest struct {
Configuration SourceGoogleSheets `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleSheetsCreateRequest) GetConfiguration() SourceGoogleSheets {
+ if o == nil {
+ return SourceGoogleSheets{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleSheetsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGoogleSheetsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleSheetsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGoogleSheetsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglesheetsputrequest.go b/internal/sdk/pkg/models/shared/sourcegooglesheetsputrequest.go
old mode 100755
new mode 100644
index f614731ae..7e85c788d
--- a/internal/sdk/pkg/models/shared/sourcegooglesheetsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglesheetsputrequest.go
@@ -7,3 +7,24 @@ type SourceGoogleSheetsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleSheetsPutRequest) GetConfiguration() SourceGoogleSheetsUpdate {
+ if o == nil {
+ return SourceGoogleSheetsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleSheetsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleSheetsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglesheetsupdate.go b/internal/sdk/pkg/models/shared/sourcegooglesheetsupdate.go
old mode 100755
new mode 100644
index 88fb73b21..bf277f4b6
--- a/internal/sdk/pkg/models/shared/sourcegooglesheetsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglesheetsupdate.go
@@ -3,70 +3,92 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthType string
+type SourceGoogleSheetsUpdateSchemasAuthType string
const (
- SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthTypeService SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthType = "Service"
+ SourceGoogleSheetsUpdateSchemasAuthTypeService SourceGoogleSheetsUpdateSchemasAuthType = "Service"
)
-func (e SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthType) ToPointer() *SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthType {
+func (e SourceGoogleSheetsUpdateSchemasAuthType) ToPointer() *SourceGoogleSheetsUpdateSchemasAuthType {
return &e
}
-func (e *SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSheetsUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service":
- *e = SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthType(v)
+ *e = SourceGoogleSheetsUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSheetsUpdateSchemasAuthType: %v", v)
}
}
-// SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication - Credentials for connecting to the Google Sheets API
-type SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication struct {
- AuthType SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthenticationAuthType `json:"auth_type"`
+// SourceGoogleSheetsUpdateServiceAccountKeyAuthentication - Credentials for connecting to the Google Sheets API
+type SourceGoogleSheetsUpdateServiceAccountKeyAuthentication struct {
+ authType SourceGoogleSheetsUpdateSchemasAuthType `const:"Service" json:"auth_type"`
// The JSON key of the service account to use for authorization. Read more here.
ServiceAccountInfo string `json:"service_account_info"`
}
-type SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthType string
+func (s SourceGoogleSheetsUpdateServiceAccountKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSheetsUpdateServiceAccountKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSheetsUpdateServiceAccountKeyAuthentication) GetAuthType() SourceGoogleSheetsUpdateSchemasAuthType {
+ return SourceGoogleSheetsUpdateSchemasAuthTypeService
+}
+
+func (o *SourceGoogleSheetsUpdateServiceAccountKeyAuthentication) GetServiceAccountInfo() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceAccountInfo
+}
+
+type SourceGoogleSheetsUpdateAuthType string
const (
- SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthTypeClient SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthType = "Client"
+ SourceGoogleSheetsUpdateAuthTypeClient SourceGoogleSheetsUpdateAuthType = "Client"
)
-func (e SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthType) ToPointer() *SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthType {
+func (e SourceGoogleSheetsUpdateAuthType) ToPointer() *SourceGoogleSheetsUpdateAuthType {
return &e
}
-func (e *SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceGoogleSheetsUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthType(v)
+ *e = SourceGoogleSheetsUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceGoogleSheetsUpdateAuthType: %v", v)
}
}
-// SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth - Credentials for connecting to the Google Sheets API
-type SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth struct {
- AuthType SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuthAuthType `json:"auth_type"`
+// SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth - Credentials for connecting to the Google Sheets API
+type SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth struct {
+ authType SourceGoogleSheetsUpdateAuthType `const:"Client" json:"auth_type"`
// Enter your Google application's Client ID. See Google's documentation for more information.
ClientID string `json:"client_id"`
// Enter your Google application's Client Secret. See Google's documentation for more information.
@@ -75,56 +97,87 @@ type SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth) GetAuthType() SourceGoogleSheetsUpdateAuthType {
+ return SourceGoogleSheetsUpdateAuthTypeClient
+}
+
+func (o *SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceGoogleSheetsUpdateAuthenticationType string
const (
- SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth SourceGoogleSheetsUpdateAuthenticationType = "source-google-sheets-update_Authentication_Authenticate via Google (OAuth)"
- SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication SourceGoogleSheetsUpdateAuthenticationType = "source-google-sheets-update_Authentication_Service Account Key Authentication"
+ SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth SourceGoogleSheetsUpdateAuthenticationType = "source-google-sheets-update_Authenticate via Google (OAuth)"
+ SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateServiceAccountKeyAuthentication SourceGoogleSheetsUpdateAuthenticationType = "source-google-sheets-update_Service Account Key Authentication"
)
type SourceGoogleSheetsUpdateAuthentication struct {
- SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth *SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth
- SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication *SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication
+ SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth *SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth
+ SourceGoogleSheetsUpdateServiceAccountKeyAuthentication *SourceGoogleSheetsUpdateServiceAccountKeyAuthentication
Type SourceGoogleSheetsUpdateAuthenticationType
}
-func CreateSourceGoogleSheetsUpdateAuthenticationSourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth(sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth) SourceGoogleSheetsUpdateAuthentication {
- typ := SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth
+func CreateSourceGoogleSheetsUpdateAuthenticationSourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth(sourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth) SourceGoogleSheetsUpdateAuthentication {
+ typ := SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth
return SourceGoogleSheetsUpdateAuthentication{
- SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth: &sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth,
+ SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth: &sourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth,
Type: typ,
}
}
-func CreateSourceGoogleSheetsUpdateAuthenticationSourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication(sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication) SourceGoogleSheetsUpdateAuthentication {
- typ := SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication
+func CreateSourceGoogleSheetsUpdateAuthenticationSourceGoogleSheetsUpdateServiceAccountKeyAuthentication(sourceGoogleSheetsUpdateServiceAccountKeyAuthentication SourceGoogleSheetsUpdateServiceAccountKeyAuthentication) SourceGoogleSheetsUpdateAuthentication {
+ typ := SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateServiceAccountKeyAuthentication
return SourceGoogleSheetsUpdateAuthentication{
- SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication: &sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication,
+ SourceGoogleSheetsUpdateServiceAccountKeyAuthentication: &sourceGoogleSheetsUpdateServiceAccountKeyAuthentication,
Type: typ,
}
}
func (u *SourceGoogleSheetsUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication := new(SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication); err == nil {
- u.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication = sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication
- u.Type = SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication
+
+ sourceGoogleSheetsUpdateServiceAccountKeyAuthentication := new(SourceGoogleSheetsUpdateServiceAccountKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleSheetsUpdateServiceAccountKeyAuthentication, "", true, true); err == nil {
+ u.SourceGoogleSheetsUpdateServiceAccountKeyAuthentication = sourceGoogleSheetsUpdateServiceAccountKeyAuthentication
+ u.Type = SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateServiceAccountKeyAuthentication
return nil
}
- sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth := new(SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth); err == nil {
- u.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth = sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth
- u.Type = SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth
+ sourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth := new(SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth, "", true, true); err == nil {
+ u.SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth = sourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth
+ u.Type = SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth
return nil
}
@@ -132,22 +185,54 @@ func (u *SourceGoogleSheetsUpdateAuthentication) UnmarshalJSON(data []byte) erro
}
func (u SourceGoogleSheetsUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication != nil {
- return json.Marshal(u.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication)
+ if u.SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth != nil {
+ return utils.MarshalJSON(u.SourceGoogleSheetsUpdateAuthenticateViaGoogleOAuth, "", true)
}
- if u.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth != nil {
- return json.Marshal(u.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth)
+ if u.SourceGoogleSheetsUpdateServiceAccountKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceGoogleSheetsUpdateServiceAccountKeyAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceGoogleSheetsUpdate struct {
// Credentials for connecting to the Google Sheets API
Credentials SourceGoogleSheetsUpdateAuthentication `json:"credentials"`
// Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.
- NamesConversion *bool `json:"names_conversion,omitempty"`
+ NamesConversion *bool `default:"false" json:"names_conversion"`
// Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'.
SpreadsheetID string `json:"spreadsheet_id"`
}
+
+func (s SourceGoogleSheetsUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleSheetsUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleSheetsUpdate) GetCredentials() SourceGoogleSheetsUpdateAuthentication {
+ if o == nil {
+ return SourceGoogleSheetsUpdateAuthentication{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceGoogleSheetsUpdate) GetNamesConversion() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.NamesConversion
+}
+
+func (o *SourceGoogleSheetsUpdate) GetSpreadsheetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SpreadsheetID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglewebfonts.go b/internal/sdk/pkg/models/shared/sourcegooglewebfonts.go
old mode 100755
new mode 100644
index f982f198d..627ab1e15
--- a/internal/sdk/pkg/models/shared/sourcegooglewebfonts.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglewebfonts.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGoogleWebfontsGoogleWebfonts string
+type GoogleWebfonts string
const (
- SourceGoogleWebfontsGoogleWebfontsGoogleWebfonts SourceGoogleWebfontsGoogleWebfonts = "google-webfonts"
+ GoogleWebfontsGoogleWebfonts GoogleWebfonts = "google-webfonts"
)
-func (e SourceGoogleWebfontsGoogleWebfonts) ToPointer() *SourceGoogleWebfontsGoogleWebfonts {
+func (e GoogleWebfonts) ToPointer() *GoogleWebfonts {
return &e
}
-func (e *SourceGoogleWebfontsGoogleWebfonts) UnmarshalJSON(data []byte) error {
+func (e *GoogleWebfonts) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "google-webfonts":
- *e = SourceGoogleWebfontsGoogleWebfonts(v)
+ *e = GoogleWebfonts(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleWebfontsGoogleWebfonts: %v", v)
+ return fmt.Errorf("invalid value for GoogleWebfonts: %v", v)
}
}
@@ -39,6 +40,49 @@ type SourceGoogleWebfonts struct {
// Optional, boolean type
PrettyPrint *string `json:"prettyPrint,omitempty"`
// Optional, to find how to sort
- Sort *string `json:"sort,omitempty"`
- SourceType SourceGoogleWebfontsGoogleWebfonts `json:"sourceType"`
+ Sort *string `json:"sort,omitempty"`
+ sourceType GoogleWebfonts `const:"google-webfonts" json:"sourceType"`
+}
+
+func (s SourceGoogleWebfonts) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleWebfonts) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleWebfonts) GetAlt() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Alt
+}
+
+func (o *SourceGoogleWebfonts) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGoogleWebfonts) GetPrettyPrint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrettyPrint
+}
+
+func (o *SourceGoogleWebfonts) GetSort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Sort
+}
+
+func (o *SourceGoogleWebfonts) GetSourceType() GoogleWebfonts {
+ return GoogleWebfontsGoogleWebfonts
}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglewebfontscreaterequest.go b/internal/sdk/pkg/models/shared/sourcegooglewebfontscreaterequest.go
old mode 100755
new mode 100644
index 89395ebee..7f1f21e8b
--- a/internal/sdk/pkg/models/shared/sourcegooglewebfontscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglewebfontscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGoogleWebfontsCreateRequest struct {
Configuration SourceGoogleWebfonts `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleWebfontsCreateRequest) GetConfiguration() SourceGoogleWebfonts {
+ if o == nil {
+ return SourceGoogleWebfonts{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleWebfontsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGoogleWebfontsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleWebfontsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGoogleWebfontsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglewebfontsputrequest.go b/internal/sdk/pkg/models/shared/sourcegooglewebfontsputrequest.go
old mode 100755
new mode 100644
index fe344e9cc..992c5f753
--- a/internal/sdk/pkg/models/shared/sourcegooglewebfontsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglewebfontsputrequest.go
@@ -7,3 +7,24 @@ type SourceGoogleWebfontsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleWebfontsPutRequest) GetConfiguration() SourceGoogleWebfontsUpdate {
+ if o == nil {
+ return SourceGoogleWebfontsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleWebfontsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleWebfontsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegooglewebfontsupdate.go b/internal/sdk/pkg/models/shared/sourcegooglewebfontsupdate.go
old mode 100755
new mode 100644
index f03ff5d57..5c4a4659e
--- a/internal/sdk/pkg/models/shared/sourcegooglewebfontsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegooglewebfontsupdate.go
@@ -12,3 +12,31 @@ type SourceGoogleWebfontsUpdate struct {
// Optional, to find how to sort
Sort *string `json:"sort,omitempty"`
}
+
+func (o *SourceGoogleWebfontsUpdate) GetAlt() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Alt
+}
+
+func (o *SourceGoogleWebfontsUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGoogleWebfontsUpdate) GetPrettyPrint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrettyPrint
+}
+
+func (o *SourceGoogleWebfontsUpdate) GetSort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Sort
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreports.go b/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreports.go
old mode 100755
new mode 100644
index 25892c4d6..ead3844d2
--- a/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreports.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreports.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports string
+type GoogleWorkspaceAdminReports string
const (
- SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports = "google-workspace-admin-reports"
+ GoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports GoogleWorkspaceAdminReports = "google-workspace-admin-reports"
)
-func (e SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports) ToPointer() *SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports {
+func (e GoogleWorkspaceAdminReports) ToPointer() *GoogleWorkspaceAdminReports {
return &e
}
-func (e *SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports) UnmarshalJSON(data []byte) error {
+func (e *GoogleWorkspaceAdminReports) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "google-workspace-admin-reports":
- *e = SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports(v)
+ *e = GoogleWorkspaceAdminReports(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports: %v", v)
+ return fmt.Errorf("invalid value for GoogleWorkspaceAdminReports: %v", v)
}
}
@@ -37,6 +38,42 @@ type SourceGoogleWorkspaceAdminReports struct {
// The email of the user, which has permissions to access the Google Workspace Admin APIs.
Email string `json:"email"`
// Sets the range of time shown in the report. Reports API allows from up to 180 days ago.
- Lookback *int64 `json:"lookback,omitempty"`
- SourceType SourceGoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports `json:"sourceType"`
+ Lookback *int64 `json:"lookback,omitempty"`
+ sourceType GoogleWorkspaceAdminReports `const:"google-workspace-admin-reports" json:"sourceType"`
+}
+
+func (s SourceGoogleWorkspaceAdminReports) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGoogleWorkspaceAdminReports) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGoogleWorkspaceAdminReports) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+func (o *SourceGoogleWorkspaceAdminReports) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+func (o *SourceGoogleWorkspaceAdminReports) GetLookback() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Lookback
+}
+
+func (o *SourceGoogleWorkspaceAdminReports) GetSourceType() GoogleWorkspaceAdminReports {
+ return GoogleWorkspaceAdminReportsGoogleWorkspaceAdminReports
}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportscreaterequest.go b/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportscreaterequest.go
old mode 100755
new mode 100644
index a92453ad1..8959dc954
--- a/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGoogleWorkspaceAdminReportsCreateRequest struct {
Configuration SourceGoogleWorkspaceAdminReports `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleWorkspaceAdminReportsCreateRequest) GetConfiguration() SourceGoogleWorkspaceAdminReports {
+ if o == nil {
+ return SourceGoogleWorkspaceAdminReports{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleWorkspaceAdminReportsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGoogleWorkspaceAdminReportsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleWorkspaceAdminReportsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGoogleWorkspaceAdminReportsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsputrequest.go b/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsputrequest.go
old mode 100755
new mode 100644
index b7a2b701c..b7c02b800
--- a/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsputrequest.go
@@ -7,3 +7,24 @@ type SourceGoogleWorkspaceAdminReportsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGoogleWorkspaceAdminReportsPutRequest) GetConfiguration() SourceGoogleWorkspaceAdminReportsUpdate {
+ if o == nil {
+ return SourceGoogleWorkspaceAdminReportsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGoogleWorkspaceAdminReportsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGoogleWorkspaceAdminReportsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsupdate.go b/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsupdate.go
old mode 100755
new mode 100644
index 9c7abe673..1ca63880e
--- a/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegoogleworkspaceadminreportsupdate.go
@@ -10,3 +10,24 @@ type SourceGoogleWorkspaceAdminReportsUpdate struct {
// Sets the range of time shown in the report. Reports API allows from up to 180 days ago.
Lookback *int64 `json:"lookback,omitempty"`
}
+
+func (o *SourceGoogleWorkspaceAdminReportsUpdate) GetCredentialsJSON() string {
+ if o == nil {
+ return ""
+ }
+ return o.CredentialsJSON
+}
+
+func (o *SourceGoogleWorkspaceAdminReportsUpdate) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+func (o *SourceGoogleWorkspaceAdminReportsUpdate) GetLookback() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Lookback
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegreenhouse.go b/internal/sdk/pkg/models/shared/sourcegreenhouse.go
old mode 100755
new mode 100644
index 291d7de32..2e10f4067
--- a/internal/sdk/pkg/models/shared/sourcegreenhouse.go
+++ b/internal/sdk/pkg/models/shared/sourcegreenhouse.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGreenhouseGreenhouse string
+type Greenhouse string
const (
- SourceGreenhouseGreenhouseGreenhouse SourceGreenhouseGreenhouse = "greenhouse"
+ GreenhouseGreenhouse Greenhouse = "greenhouse"
)
-func (e SourceGreenhouseGreenhouse) ToPointer() *SourceGreenhouseGreenhouse {
+func (e Greenhouse) ToPointer() *Greenhouse {
return &e
}
-func (e *SourceGreenhouseGreenhouse) UnmarshalJSON(data []byte) error {
+func (e *Greenhouse) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "greenhouse":
- *e = SourceGreenhouseGreenhouse(v)
+ *e = Greenhouse(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGreenhouseGreenhouse: %v", v)
+ return fmt.Errorf("invalid value for Greenhouse: %v", v)
}
}
type SourceGreenhouse struct {
// Greenhouse API Key. See the docs for more information on how to generate this key.
- APIKey string `json:"api_key"`
- SourceType SourceGreenhouseGreenhouse `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Greenhouse `const:"greenhouse" json:"sourceType"`
+}
+
+func (s SourceGreenhouse) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGreenhouse) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGreenhouse) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGreenhouse) GetSourceType() Greenhouse {
+ return GreenhouseGreenhouse
}
diff --git a/internal/sdk/pkg/models/shared/sourcegreenhousecreaterequest.go b/internal/sdk/pkg/models/shared/sourcegreenhousecreaterequest.go
old mode 100755
new mode 100644
index 98103be85..b2e83863d
--- a/internal/sdk/pkg/models/shared/sourcegreenhousecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegreenhousecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGreenhouseCreateRequest struct {
Configuration SourceGreenhouse `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGreenhouseCreateRequest) GetConfiguration() SourceGreenhouse {
+ if o == nil {
+ return SourceGreenhouse{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGreenhouseCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGreenhouseCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGreenhouseCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGreenhouseCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegreenhouseputrequest.go b/internal/sdk/pkg/models/shared/sourcegreenhouseputrequest.go
old mode 100755
new mode 100644
index 8d3667ce6..71c2bc58a
--- a/internal/sdk/pkg/models/shared/sourcegreenhouseputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegreenhouseputrequest.go
@@ -7,3 +7,24 @@ type SourceGreenhousePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGreenhousePutRequest) GetConfiguration() SourceGreenhouseUpdate {
+ if o == nil {
+ return SourceGreenhouseUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGreenhousePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGreenhousePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegreenhouseupdate.go b/internal/sdk/pkg/models/shared/sourcegreenhouseupdate.go
old mode 100755
new mode 100644
index fd4f82397..8a11ad8e2
--- a/internal/sdk/pkg/models/shared/sourcegreenhouseupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegreenhouseupdate.go
@@ -6,3 +6,10 @@ type SourceGreenhouseUpdate struct {
// Greenhouse API Key. See the docs for more information on how to generate this key.
APIKey string `json:"api_key"`
}
+
+func (o *SourceGreenhouseUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegridly.go b/internal/sdk/pkg/models/shared/sourcegridly.go
old mode 100755
new mode 100644
index f114a81fc..787ea2aa6
--- a/internal/sdk/pkg/models/shared/sourcegridly.go
+++ b/internal/sdk/pkg/models/shared/sourcegridly.go
@@ -5,35 +5,65 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceGridlyGridly string
+type Gridly string
const (
- SourceGridlyGridlyGridly SourceGridlyGridly = "gridly"
+ GridlyGridly Gridly = "gridly"
)
-func (e SourceGridlyGridly) ToPointer() *SourceGridlyGridly {
+func (e Gridly) ToPointer() *Gridly {
return &e
}
-func (e *SourceGridlyGridly) UnmarshalJSON(data []byte) error {
+func (e *Gridly) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "gridly":
- *e = SourceGridlyGridly(v)
+ *e = Gridly(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceGridlyGridly: %v", v)
+ return fmt.Errorf("invalid value for Gridly: %v", v)
}
}
type SourceGridly struct {
APIKey string `json:"api_key"`
// ID of a grid, or can be ID of a branch
- GridID string `json:"grid_id"`
- SourceType SourceGridlyGridly `json:"sourceType"`
+ GridID string `json:"grid_id"`
+ sourceType Gridly `const:"gridly" json:"sourceType"`
+}
+
+func (s SourceGridly) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceGridly) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceGridly) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGridly) GetGridID() string {
+ if o == nil {
+ return ""
+ }
+ return o.GridID
+}
+
+func (o *SourceGridly) GetSourceType() Gridly {
+ return GridlyGridly
}
diff --git a/internal/sdk/pkg/models/shared/sourcegridlycreaterequest.go b/internal/sdk/pkg/models/shared/sourcegridlycreaterequest.go
old mode 100755
new mode 100644
index 90ab7fe0a..f0ea43c1b
--- a/internal/sdk/pkg/models/shared/sourcegridlycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegridlycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceGridlyCreateRequest struct {
Configuration SourceGridly `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGridlyCreateRequest) GetConfiguration() SourceGridly {
+ if o == nil {
+ return SourceGridly{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGridlyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceGridlyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGridlyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceGridlyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegridlyputrequest.go b/internal/sdk/pkg/models/shared/sourcegridlyputrequest.go
old mode 100755
new mode 100644
index c038856d6..dc31b852e
--- a/internal/sdk/pkg/models/shared/sourcegridlyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcegridlyputrequest.go
@@ -7,3 +7,24 @@ type SourceGridlyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceGridlyPutRequest) GetConfiguration() SourceGridlyUpdate {
+ if o == nil {
+ return SourceGridlyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceGridlyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceGridlyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcegridlyupdate.go b/internal/sdk/pkg/models/shared/sourcegridlyupdate.go
old mode 100755
new mode 100644
index 490bc109a..7b1c1a927
--- a/internal/sdk/pkg/models/shared/sourcegridlyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcegridlyupdate.go
@@ -7,3 +7,17 @@ type SourceGridlyUpdate struct {
// ID of a grid, or can be ID of a branch
GridID string `json:"grid_id"`
}
+
+func (o *SourceGridlyUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceGridlyUpdate) GetGridID() string {
+ if o == nil {
+ return ""
+ }
+ return o.GridID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceharvest.go b/internal/sdk/pkg/models/shared/sourceharvest.go
old mode 100755
new mode 100644
index 81c717acf..b20701ed3
--- a/internal/sdk/pkg/models/shared/sourceharvest.go
+++ b/internal/sdk/pkg/models/shared/sourceharvest.go
@@ -3,225 +3,198 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType string
+type SourceHarvestSchemasAuthType string
const (
- SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthTypeToken SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType = "Token"
+ SourceHarvestSchemasAuthTypeToken SourceHarvestSchemasAuthType = "Token"
)
-func (e SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType) ToPointer() *SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType {
+func (e SourceHarvestSchemasAuthType) ToPointer() *SourceHarvestSchemasAuthType {
return &e
}
-func (e *SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceHarvestSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Token":
- *e = SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType(v)
+ *e = SourceHarvestSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceHarvestSchemasAuthType: %v", v)
}
}
-// SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken - Choose how to authenticate to Harvest.
-type SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken struct {
+// SourceHarvestAuthenticateWithPersonalAccessToken - Choose how to authenticate to Harvest.
+type SourceHarvestAuthenticateWithPersonalAccessToken struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// Log into Harvest and then create new personal access token.
- APIToken string `json:"api_token"`
- AuthType *SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType `json:"auth_type,omitempty"`
-
- AdditionalProperties interface{} `json:"-"`
+ APIToken string `json:"api_token"`
+ authType *SourceHarvestSchemasAuthType `const:"Token" json:"auth_type,omitempty"`
}
-type _SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken
-
-func (c *SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken) UnmarshalJSON(bs []byte) error {
- data := _SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceHarvestAuthenticateWithPersonalAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceHarvestAuthenticateWithPersonalAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "api_token")
- delete(additionalFields, "auth_type")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceHarvestAuthenticateWithPersonalAccessToken) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceHarvestAuthenticateWithPersonalAccessToken) GetAPIToken() string {
+ if o == nil {
+ return ""
}
+ return o.APIToken
+}
- return json.Marshal(out)
+func (o *SourceHarvestAuthenticateWithPersonalAccessToken) GetAuthType() *SourceHarvestSchemasAuthType {
+ return SourceHarvestSchemasAuthTypeToken.ToPointer()
}
-type SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType string
+type SourceHarvestAuthType string
const (
- SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthTypeClient SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType = "Client"
+ SourceHarvestAuthTypeClient SourceHarvestAuthType = "Client"
)
-func (e SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType) ToPointer() *SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType {
+func (e SourceHarvestAuthType) ToPointer() *SourceHarvestAuthType {
return &e
}
-func (e *SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceHarvestAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType(v)
+ *e = SourceHarvestAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceHarvestAuthType: %v", v)
}
}
-// SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth - Choose how to authenticate to Harvest.
-type SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth struct {
- AuthType *SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType `json:"auth_type,omitempty"`
+// SourceHarvestAuthenticateViaHarvestOAuth - Choose how to authenticate to Harvest.
+type SourceHarvestAuthenticateViaHarvestOAuth struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ authType *SourceHarvestAuthType `const:"Client" json:"auth_type,omitempty"`
// The Client ID of your Harvest developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Harvest developer application.
ClientSecret string `json:"client_secret"`
// Refresh Token to renew the expired Access Token.
RefreshToken string `json:"refresh_token"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth
-func (c *SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth) UnmarshalJSON(bs []byte) error {
- data := _SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceHarvestAuthenticateViaHarvestOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceHarvestAuthenticateViaHarvestOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "auth_type")
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
- delete(additionalFields, "refresh_token")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth(c))
- if err != nil {
- return nil, err
+func (o *SourceHarvestAuthenticateViaHarvestOAuth) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
+func (o *SourceHarvestAuthenticateViaHarvestOAuth) GetAuthType() *SourceHarvestAuthType {
+ return SourceHarvestAuthTypeClient.ToPointer()
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceHarvestAuthenticateViaHarvestOAuth) GetClientID() string {
+ if o == nil {
+ return ""
}
+ return o.ClientID
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceHarvestAuthenticateViaHarvestOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
}
+ return o.ClientSecret
+}
- return json.Marshal(out)
+func (o *SourceHarvestAuthenticateViaHarvestOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
}
type SourceHarvestAuthenticationMechanismType string
const (
- SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth SourceHarvestAuthenticationMechanismType = "source-harvest_Authentication mechanism_Authenticate via Harvest (OAuth)"
- SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceHarvestAuthenticationMechanismType = "source-harvest_Authentication mechanism_Authenticate with Personal Access Token"
+ SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticateViaHarvestOAuth SourceHarvestAuthenticationMechanismType = "source-harvest_Authenticate via Harvest (OAuth)"
+ SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticateWithPersonalAccessToken SourceHarvestAuthenticationMechanismType = "source-harvest_Authenticate with Personal Access Token"
)
type SourceHarvestAuthenticationMechanism struct {
- SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth *SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth
- SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken
+ SourceHarvestAuthenticateViaHarvestOAuth *SourceHarvestAuthenticateViaHarvestOAuth
+ SourceHarvestAuthenticateWithPersonalAccessToken *SourceHarvestAuthenticateWithPersonalAccessToken
Type SourceHarvestAuthenticationMechanismType
}
-func CreateSourceHarvestAuthenticationMechanismSourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth(sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth) SourceHarvestAuthenticationMechanism {
- typ := SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth
+func CreateSourceHarvestAuthenticationMechanismSourceHarvestAuthenticateViaHarvestOAuth(sourceHarvestAuthenticateViaHarvestOAuth SourceHarvestAuthenticateViaHarvestOAuth) SourceHarvestAuthenticationMechanism {
+ typ := SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticateViaHarvestOAuth
return SourceHarvestAuthenticationMechanism{
- SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth: &sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth,
- Type: typ,
+ SourceHarvestAuthenticateViaHarvestOAuth: &sourceHarvestAuthenticateViaHarvestOAuth,
+ Type: typ,
}
}
-func CreateSourceHarvestAuthenticationMechanismSourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken(sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken) SourceHarvestAuthenticationMechanism {
- typ := SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken
+func CreateSourceHarvestAuthenticationMechanismSourceHarvestAuthenticateWithPersonalAccessToken(sourceHarvestAuthenticateWithPersonalAccessToken SourceHarvestAuthenticateWithPersonalAccessToken) SourceHarvestAuthenticationMechanism {
+ typ := SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticateWithPersonalAccessToken
return SourceHarvestAuthenticationMechanism{
- SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken: &sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken,
+ SourceHarvestAuthenticateWithPersonalAccessToken: &sourceHarvestAuthenticateWithPersonalAccessToken,
Type: typ,
}
}
func (u *SourceHarvestAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil {
- u.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken
- u.Type = SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken
+
+ sourceHarvestAuthenticateWithPersonalAccessToken := new(SourceHarvestAuthenticateWithPersonalAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceHarvestAuthenticateWithPersonalAccessToken, "", true, true); err == nil {
+ u.SourceHarvestAuthenticateWithPersonalAccessToken = sourceHarvestAuthenticateWithPersonalAccessToken
+ u.Type = SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticateWithPersonalAccessToken
return nil
}
- sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth := new(SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth); err == nil {
- u.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth = sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth
- u.Type = SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth
+ sourceHarvestAuthenticateViaHarvestOAuth := new(SourceHarvestAuthenticateViaHarvestOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceHarvestAuthenticateViaHarvestOAuth, "", true, true); err == nil {
+ u.SourceHarvestAuthenticateViaHarvestOAuth = sourceHarvestAuthenticateViaHarvestOAuth
+ u.Type = SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticateViaHarvestOAuth
return nil
}
@@ -229,38 +202,38 @@ func (u *SourceHarvestAuthenticationMechanism) UnmarshalJSON(data []byte) error
}
func (u SourceHarvestAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
- return json.Marshal(u.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken)
+ if u.SourceHarvestAuthenticateViaHarvestOAuth != nil {
+ return utils.MarshalJSON(u.SourceHarvestAuthenticateViaHarvestOAuth, "", true)
}
- if u.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth != nil {
- return json.Marshal(u.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth)
+ if u.SourceHarvestAuthenticateWithPersonalAccessToken != nil {
+ return utils.MarshalJSON(u.SourceHarvestAuthenticateWithPersonalAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceHarvestHarvest string
+type Harvest string
const (
- SourceHarvestHarvestHarvest SourceHarvestHarvest = "harvest"
+ HarvestHarvest Harvest = "harvest"
)
-func (e SourceHarvestHarvest) ToPointer() *SourceHarvestHarvest {
+func (e Harvest) ToPointer() *Harvest {
return &e
}
-func (e *SourceHarvestHarvest) UnmarshalJSON(data []byte) error {
+func (e *Harvest) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "harvest":
- *e = SourceHarvestHarvest(v)
+ *e = Harvest(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHarvestHarvest: %v", v)
+ return fmt.Errorf("invalid value for Harvest: %v", v)
}
}
@@ -272,6 +245,49 @@ type SourceHarvest struct {
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.
ReplicationEndDate *time.Time `json:"replication_end_date,omitempty"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
- ReplicationStartDate time.Time `json:"replication_start_date"`
- SourceType SourceHarvestHarvest `json:"sourceType"`
+ ReplicationStartDate time.Time `json:"replication_start_date"`
+ sourceType Harvest `const:"harvest" json:"sourceType"`
+}
+
+func (s SourceHarvest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceHarvest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceHarvest) GetAccountID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountID
+}
+
+func (o *SourceHarvest) GetCredentials() *SourceHarvestAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceHarvest) GetReplicationEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationEndDate
+}
+
+func (o *SourceHarvest) GetReplicationStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.ReplicationStartDate
+}
+
+func (o *SourceHarvest) GetSourceType() Harvest {
+ return HarvestHarvest
}
diff --git a/internal/sdk/pkg/models/shared/sourceharvestcreaterequest.go b/internal/sdk/pkg/models/shared/sourceharvestcreaterequest.go
old mode 100755
new mode 100644
index c2de3745e..f093c5d23
--- a/internal/sdk/pkg/models/shared/sourceharvestcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceharvestcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceHarvestCreateRequest struct {
Configuration SourceHarvest `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceHarvestCreateRequest) GetConfiguration() SourceHarvest {
+ if o == nil {
+ return SourceHarvest{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceHarvestCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceHarvestCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceHarvestCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceHarvestCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceharvestputrequest.go b/internal/sdk/pkg/models/shared/sourceharvestputrequest.go
old mode 100755
new mode 100644
index bb8146b30..2e2f0316f
--- a/internal/sdk/pkg/models/shared/sourceharvestputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceharvestputrequest.go
@@ -7,3 +7,24 @@ type SourceHarvestPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceHarvestPutRequest) GetConfiguration() SourceHarvestUpdate {
+ if o == nil {
+ return SourceHarvestUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceHarvestPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceHarvestPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceharvestupdate.go b/internal/sdk/pkg/models/shared/sourceharvestupdate.go
old mode 100755
new mode 100644
index bc5a19c06..3c046935e
--- a/internal/sdk/pkg/models/shared/sourceharvestupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceharvestupdate.go
@@ -3,225 +3,198 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType string
+type SourceHarvestUpdateSchemasAuthType string
const (
- SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthTypeToken SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType = "Token"
+ SourceHarvestUpdateSchemasAuthTypeToken SourceHarvestUpdateSchemasAuthType = "Token"
)
-func (e SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType) ToPointer() *SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType {
+func (e SourceHarvestUpdateSchemasAuthType) ToPointer() *SourceHarvestUpdateSchemasAuthType {
return &e
}
-func (e *SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceHarvestUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Token":
- *e = SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType(v)
+ *e = SourceHarvestUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceHarvestUpdateSchemasAuthType: %v", v)
}
}
-// SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken - Choose how to authenticate to Harvest.
-type SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken struct {
+// SourceHarvestUpdateAuthenticateWithPersonalAccessToken - Choose how to authenticate to Harvest.
+type SourceHarvestUpdateAuthenticateWithPersonalAccessToken struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// Log into Harvest and then create new personal access token.
- APIToken string `json:"api_token"`
- AuthType *SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessTokenAuthType `json:"auth_type,omitempty"`
-
- AdditionalProperties interface{} `json:"-"`
+ APIToken string `json:"api_token"`
+ authType *SourceHarvestUpdateSchemasAuthType `const:"Token" json:"auth_type,omitempty"`
}
-type _SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
-
-func (c *SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) UnmarshalJSON(bs []byte) error {
- data := _SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceHarvestUpdateAuthenticateWithPersonalAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceHarvestUpdateAuthenticateWithPersonalAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "api_token")
- delete(additionalFields, "auth_type")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceHarvestUpdateAuthenticateWithPersonalAccessToken) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceHarvestUpdateAuthenticateWithPersonalAccessToken) GetAPIToken() string {
+ if o == nil {
+ return ""
}
+ return o.APIToken
+}
- return json.Marshal(out)
+func (o *SourceHarvestUpdateAuthenticateWithPersonalAccessToken) GetAuthType() *SourceHarvestUpdateSchemasAuthType {
+ return SourceHarvestUpdateSchemasAuthTypeToken.ToPointer()
}
-type SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType string
+type SourceHarvestUpdateAuthType string
const (
- SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthTypeClient SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType = "Client"
+ SourceHarvestUpdateAuthTypeClient SourceHarvestUpdateAuthType = "Client"
)
-func (e SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType) ToPointer() *SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType {
+func (e SourceHarvestUpdateAuthType) ToPointer() *SourceHarvestUpdateAuthType {
return &e
}
-func (e *SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceHarvestUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType(v)
+ *e = SourceHarvestUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceHarvestUpdateAuthType: %v", v)
}
}
-// SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth - Choose how to authenticate to Harvest.
-type SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth struct {
- AuthType *SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuthAuthType `json:"auth_type,omitempty"`
+// AuthenticateViaHarvestOAuth - Choose how to authenticate to Harvest.
+type AuthenticateViaHarvestOAuth struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ authType *SourceHarvestUpdateAuthType `const:"Client" json:"auth_type,omitempty"`
// The Client ID of your Harvest developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Harvest developer application.
ClientSecret string `json:"client_secret"`
// Refresh Token to renew the expired Access Token.
RefreshToken string `json:"refresh_token"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth
-func (c *SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth) UnmarshalJSON(bs []byte) error {
- data := _SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth(data)
-
- additionalFields := make(map[string]interface{})
+func (a AuthenticateViaHarvestOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (a *AuthenticateViaHarvestOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
return err
}
- delete(additionalFields, "auth_type")
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
- delete(additionalFields, "refresh_token")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth(c))
- if err != nil {
- return nil, err
+func (o *AuthenticateViaHarvestOAuth) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
+func (o *AuthenticateViaHarvestOAuth) GetAuthType() *SourceHarvestUpdateAuthType {
+ return SourceHarvestUpdateAuthTypeClient.ToPointer()
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *AuthenticateViaHarvestOAuth) GetClientID() string {
+ if o == nil {
+ return ""
}
+ return o.ClientID
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *AuthenticateViaHarvestOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
}
+ return o.ClientSecret
+}
- return json.Marshal(out)
+func (o *AuthenticateViaHarvestOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
}
type SourceHarvestUpdateAuthenticationMechanismType string
const (
- SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth SourceHarvestUpdateAuthenticationMechanismType = "source-harvest-update_Authentication mechanism_Authenticate via Harvest (OAuth)"
- SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceHarvestUpdateAuthenticationMechanismType = "source-harvest-update_Authentication mechanism_Authenticate with Personal Access Token"
+ SourceHarvestUpdateAuthenticationMechanismTypeAuthenticateViaHarvestOAuth SourceHarvestUpdateAuthenticationMechanismType = "Authenticate via Harvest (OAuth)"
+ SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticateWithPersonalAccessToken SourceHarvestUpdateAuthenticationMechanismType = "source-harvest-update_Authenticate with Personal Access Token"
)
type SourceHarvestUpdateAuthenticationMechanism struct {
- SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth *SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth
- SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken *SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
+ AuthenticateViaHarvestOAuth *AuthenticateViaHarvestOAuth
+ SourceHarvestUpdateAuthenticateWithPersonalAccessToken *SourceHarvestUpdateAuthenticateWithPersonalAccessToken
Type SourceHarvestUpdateAuthenticationMechanismType
}
-func CreateSourceHarvestUpdateAuthenticationMechanismSourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth(sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth) SourceHarvestUpdateAuthenticationMechanism {
- typ := SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth
+func CreateSourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth(authenticateViaHarvestOAuth AuthenticateViaHarvestOAuth) SourceHarvestUpdateAuthenticationMechanism {
+ typ := SourceHarvestUpdateAuthenticationMechanismTypeAuthenticateViaHarvestOAuth
return SourceHarvestUpdateAuthenticationMechanism{
- SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth: &sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth,
- Type: typ,
+ AuthenticateViaHarvestOAuth: &authenticateViaHarvestOAuth,
+ Type: typ,
}
}
-func CreateSourceHarvestUpdateAuthenticationMechanismSourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken(sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) SourceHarvestUpdateAuthenticationMechanism {
- typ := SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
+func CreateSourceHarvestUpdateAuthenticationMechanismSourceHarvestUpdateAuthenticateWithPersonalAccessToken(sourceHarvestUpdateAuthenticateWithPersonalAccessToken SourceHarvestUpdateAuthenticateWithPersonalAccessToken) SourceHarvestUpdateAuthenticationMechanism {
+ typ := SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticateWithPersonalAccessToken
return SourceHarvestUpdateAuthenticationMechanism{
- SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken: &sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken,
+ SourceHarvestUpdateAuthenticateWithPersonalAccessToken: &sourceHarvestUpdateAuthenticateWithPersonalAccessToken,
Type: typ,
}
}
func (u *SourceHarvestUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil {
- u.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
- u.Type = SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken
+
+ sourceHarvestUpdateAuthenticateWithPersonalAccessToken := new(SourceHarvestUpdateAuthenticateWithPersonalAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceHarvestUpdateAuthenticateWithPersonalAccessToken, "", true, true); err == nil {
+ u.SourceHarvestUpdateAuthenticateWithPersonalAccessToken = sourceHarvestUpdateAuthenticateWithPersonalAccessToken
+ u.Type = SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticateWithPersonalAccessToken
return nil
}
- sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth := new(SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth); err == nil {
- u.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth = sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth
- u.Type = SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth
+ authenticateViaHarvestOAuth := new(AuthenticateViaHarvestOAuth)
+ if err := utils.UnmarshalJSON(data, &authenticateViaHarvestOAuth, "", true, true); err == nil {
+ u.AuthenticateViaHarvestOAuth = authenticateViaHarvestOAuth
+ u.Type = SourceHarvestUpdateAuthenticationMechanismTypeAuthenticateViaHarvestOAuth
return nil
}
@@ -229,15 +202,15 @@ func (u *SourceHarvestUpdateAuthenticationMechanism) UnmarshalJSON(data []byte)
}
func (u SourceHarvestUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil {
- return json.Marshal(u.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken)
+ if u.AuthenticateViaHarvestOAuth != nil {
+ return utils.MarshalJSON(u.AuthenticateViaHarvestOAuth, "", true)
}
- if u.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth != nil {
- return json.Marshal(u.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth)
+ if u.SourceHarvestUpdateAuthenticateWithPersonalAccessToken != nil {
+ return utils.MarshalJSON(u.SourceHarvestUpdateAuthenticateWithPersonalAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceHarvestUpdate struct {
@@ -250,3 +223,42 @@ type SourceHarvestUpdate struct {
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
ReplicationStartDate time.Time `json:"replication_start_date"`
}
+
+func (s SourceHarvestUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceHarvestUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceHarvestUpdate) GetAccountID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountID
+}
+
+func (o *SourceHarvestUpdate) GetCredentials() *SourceHarvestUpdateAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceHarvestUpdate) GetReplicationEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationEndDate
+}
+
+func (o *SourceHarvestUpdate) GetReplicationStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.ReplicationStartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcehubplanner.go b/internal/sdk/pkg/models/shared/sourcehubplanner.go
old mode 100755
new mode 100644
index 230cb7d00..26c29bcfc
--- a/internal/sdk/pkg/models/shared/sourcehubplanner.go
+++ b/internal/sdk/pkg/models/shared/sourcehubplanner.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceHubplannerHubplanner string
+type Hubplanner string
const (
- SourceHubplannerHubplannerHubplanner SourceHubplannerHubplanner = "hubplanner"
+ HubplannerHubplanner Hubplanner = "hubplanner"
)
-func (e SourceHubplannerHubplanner) ToPointer() *SourceHubplannerHubplanner {
+func (e Hubplanner) ToPointer() *Hubplanner {
return &e
}
-func (e *SourceHubplannerHubplanner) UnmarshalJSON(data []byte) error {
+func (e *Hubplanner) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "hubplanner":
- *e = SourceHubplannerHubplanner(v)
+ *e = Hubplanner(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHubplannerHubplanner: %v", v)
+ return fmt.Errorf("invalid value for Hubplanner: %v", v)
}
}
type SourceHubplanner struct {
// Hubplanner API key. See https://github.com/hubplanner/API#authentication for more details.
- APIKey string `json:"api_key"`
- SourceType SourceHubplannerHubplanner `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Hubplanner `const:"hubplanner" json:"sourceType"`
+}
+
+func (s SourceHubplanner) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceHubplanner) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceHubplanner) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceHubplanner) GetSourceType() Hubplanner {
+ return HubplannerHubplanner
}
diff --git a/internal/sdk/pkg/models/shared/sourcehubplannercreaterequest.go b/internal/sdk/pkg/models/shared/sourcehubplannercreaterequest.go
old mode 100755
new mode 100644
index 43eff1535..d98d24186
--- a/internal/sdk/pkg/models/shared/sourcehubplannercreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcehubplannercreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceHubplannerCreateRequest struct {
Configuration SourceHubplanner `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceHubplannerCreateRequest) GetConfiguration() SourceHubplanner {
+ if o == nil {
+ return SourceHubplanner{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceHubplannerCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceHubplannerCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceHubplannerCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceHubplannerCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcehubplannerputrequest.go b/internal/sdk/pkg/models/shared/sourcehubplannerputrequest.go
old mode 100755
new mode 100644
index 8dbbf52f4..86e2b5bf8
--- a/internal/sdk/pkg/models/shared/sourcehubplannerputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcehubplannerputrequest.go
@@ -7,3 +7,24 @@ type SourceHubplannerPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceHubplannerPutRequest) GetConfiguration() SourceHubplannerUpdate {
+ if o == nil {
+ return SourceHubplannerUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceHubplannerPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceHubplannerPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcehubplannerupdate.go b/internal/sdk/pkg/models/shared/sourcehubplannerupdate.go
old mode 100755
new mode 100644
index eaf4fbc3c..cc16f0b8c
--- a/internal/sdk/pkg/models/shared/sourcehubplannerupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcehubplannerupdate.go
@@ -6,3 +6,10 @@ type SourceHubplannerUpdate struct {
// Hubplanner API key. See https://github.com/hubplanner/API#authentication for more details.
APIKey string `json:"api_key"`
}
+
+func (o *SourceHubplannerUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcehubspot.go b/internal/sdk/pkg/models/shared/sourcehubspot.go
old mode 100755
new mode 100644
index edd2dffda..7cbebce5c
--- a/internal/sdk/pkg/models/shared/sourcehubspot.go
+++ b/internal/sdk/pkg/models/shared/sourcehubspot.go
@@ -3,133 +3,186 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-// SourceHubspotAuthenticationPrivateAppAuthType - Name of the credentials set
-type SourceHubspotAuthenticationPrivateAppAuthType string
+// SourceHubspotSchemasAuthType - Name of the credentials set
+type SourceHubspotSchemasAuthType string
const (
- SourceHubspotAuthenticationPrivateAppAuthTypePrivateAppCredentials SourceHubspotAuthenticationPrivateAppAuthType = "Private App Credentials"
+ SourceHubspotSchemasAuthTypePrivateAppCredentials SourceHubspotSchemasAuthType = "Private App Credentials"
)
-func (e SourceHubspotAuthenticationPrivateAppAuthType) ToPointer() *SourceHubspotAuthenticationPrivateAppAuthType {
+func (e SourceHubspotSchemasAuthType) ToPointer() *SourceHubspotSchemasAuthType {
return &e
}
-func (e *SourceHubspotAuthenticationPrivateAppAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceHubspotSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Private App Credentials":
- *e = SourceHubspotAuthenticationPrivateAppAuthType(v)
+ *e = SourceHubspotSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHubspotAuthenticationPrivateAppAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceHubspotSchemasAuthType: %v", v)
}
}
-// SourceHubspotAuthenticationPrivateApp - Choose how to authenticate to HubSpot.
-type SourceHubspotAuthenticationPrivateApp struct {
+// SourceHubspotPrivateApp - Choose how to authenticate to HubSpot.
+type SourceHubspotPrivateApp struct {
// HubSpot Access token. See the Hubspot docs if you need help finding this token.
AccessToken string `json:"access_token"`
// Name of the credentials set
- CredentialsTitle SourceHubspotAuthenticationPrivateAppAuthType `json:"credentials_title"`
+ credentialsTitle SourceHubspotSchemasAuthType `const:"Private App Credentials" json:"credentials_title"`
}
-// SourceHubspotAuthenticationOAuthAuthType - Name of the credentials
-type SourceHubspotAuthenticationOAuthAuthType string
+func (s SourceHubspotPrivateApp) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceHubspotPrivateApp) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceHubspotPrivateApp) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceHubspotPrivateApp) GetCredentialsTitle() SourceHubspotSchemasAuthType {
+ return SourceHubspotSchemasAuthTypePrivateAppCredentials
+}
+
+// SourceHubspotAuthType - Name of the credentials
+type SourceHubspotAuthType string
const (
- SourceHubspotAuthenticationOAuthAuthTypeOAuthCredentials SourceHubspotAuthenticationOAuthAuthType = "OAuth Credentials"
+ SourceHubspotAuthTypeOAuthCredentials SourceHubspotAuthType = "OAuth Credentials"
)
-func (e SourceHubspotAuthenticationOAuthAuthType) ToPointer() *SourceHubspotAuthenticationOAuthAuthType {
+func (e SourceHubspotAuthType) ToPointer() *SourceHubspotAuthType {
return &e
}
-func (e *SourceHubspotAuthenticationOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceHubspotAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth Credentials":
- *e = SourceHubspotAuthenticationOAuthAuthType(v)
+ *e = SourceHubspotAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHubspotAuthenticationOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceHubspotAuthType: %v", v)
}
}
-// SourceHubspotAuthenticationOAuth - Choose how to authenticate to HubSpot.
-type SourceHubspotAuthenticationOAuth struct {
+// SourceHubspotOAuth - Choose how to authenticate to HubSpot.
+type SourceHubspotOAuth struct {
// The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.
ClientID string `json:"client_id"`
// The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.
ClientSecret string `json:"client_secret"`
// Name of the credentials
- CredentialsTitle SourceHubspotAuthenticationOAuthAuthType `json:"credentials_title"`
+ credentialsTitle SourceHubspotAuthType `const:"OAuth Credentials" json:"credentials_title"`
// Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.
RefreshToken string `json:"refresh_token"`
}
+func (s SourceHubspotOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceHubspotOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceHubspotOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceHubspotOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceHubspotOAuth) GetCredentialsTitle() SourceHubspotAuthType {
+ return SourceHubspotAuthTypeOAuthCredentials
+}
+
+func (o *SourceHubspotOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceHubspotAuthenticationType string
const (
- SourceHubspotAuthenticationTypeSourceHubspotAuthenticationOAuth SourceHubspotAuthenticationType = "source-hubspot_Authentication_OAuth"
- SourceHubspotAuthenticationTypeSourceHubspotAuthenticationPrivateApp SourceHubspotAuthenticationType = "source-hubspot_Authentication_Private App"
+ SourceHubspotAuthenticationTypeSourceHubspotOAuth SourceHubspotAuthenticationType = "source-hubspot_OAuth"
+ SourceHubspotAuthenticationTypeSourceHubspotPrivateApp SourceHubspotAuthenticationType = "source-hubspot_Private App"
)
type SourceHubspotAuthentication struct {
- SourceHubspotAuthenticationOAuth *SourceHubspotAuthenticationOAuth
- SourceHubspotAuthenticationPrivateApp *SourceHubspotAuthenticationPrivateApp
+ SourceHubspotOAuth *SourceHubspotOAuth
+ SourceHubspotPrivateApp *SourceHubspotPrivateApp
Type SourceHubspotAuthenticationType
}
-func CreateSourceHubspotAuthenticationSourceHubspotAuthenticationOAuth(sourceHubspotAuthenticationOAuth SourceHubspotAuthenticationOAuth) SourceHubspotAuthentication {
- typ := SourceHubspotAuthenticationTypeSourceHubspotAuthenticationOAuth
+func CreateSourceHubspotAuthenticationSourceHubspotOAuth(sourceHubspotOAuth SourceHubspotOAuth) SourceHubspotAuthentication {
+ typ := SourceHubspotAuthenticationTypeSourceHubspotOAuth
return SourceHubspotAuthentication{
- SourceHubspotAuthenticationOAuth: &sourceHubspotAuthenticationOAuth,
- Type: typ,
+ SourceHubspotOAuth: &sourceHubspotOAuth,
+ Type: typ,
}
}
-func CreateSourceHubspotAuthenticationSourceHubspotAuthenticationPrivateApp(sourceHubspotAuthenticationPrivateApp SourceHubspotAuthenticationPrivateApp) SourceHubspotAuthentication {
- typ := SourceHubspotAuthenticationTypeSourceHubspotAuthenticationPrivateApp
+func CreateSourceHubspotAuthenticationSourceHubspotPrivateApp(sourceHubspotPrivateApp SourceHubspotPrivateApp) SourceHubspotAuthentication {
+ typ := SourceHubspotAuthenticationTypeSourceHubspotPrivateApp
return SourceHubspotAuthentication{
- SourceHubspotAuthenticationPrivateApp: &sourceHubspotAuthenticationPrivateApp,
- Type: typ,
+ SourceHubspotPrivateApp: &sourceHubspotPrivateApp,
+ Type: typ,
}
}
func (u *SourceHubspotAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceHubspotAuthenticationPrivateApp := new(SourceHubspotAuthenticationPrivateApp)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceHubspotAuthenticationPrivateApp); err == nil {
- u.SourceHubspotAuthenticationPrivateApp = sourceHubspotAuthenticationPrivateApp
- u.Type = SourceHubspotAuthenticationTypeSourceHubspotAuthenticationPrivateApp
+
+ sourceHubspotPrivateApp := new(SourceHubspotPrivateApp)
+ if err := utils.UnmarshalJSON(data, &sourceHubspotPrivateApp, "", true, true); err == nil {
+ u.SourceHubspotPrivateApp = sourceHubspotPrivateApp
+ u.Type = SourceHubspotAuthenticationTypeSourceHubspotPrivateApp
return nil
}
- sourceHubspotAuthenticationOAuth := new(SourceHubspotAuthenticationOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceHubspotAuthenticationOAuth); err == nil {
- u.SourceHubspotAuthenticationOAuth = sourceHubspotAuthenticationOAuth
- u.Type = SourceHubspotAuthenticationTypeSourceHubspotAuthenticationOAuth
+ sourceHubspotOAuth := new(SourceHubspotOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceHubspotOAuth, "", true, true); err == nil {
+ u.SourceHubspotOAuth = sourceHubspotOAuth
+ u.Type = SourceHubspotAuthenticationTypeSourceHubspotOAuth
return nil
}
@@ -137,45 +190,74 @@ func (u *SourceHubspotAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceHubspotAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceHubspotAuthenticationPrivateApp != nil {
- return json.Marshal(u.SourceHubspotAuthenticationPrivateApp)
+ if u.SourceHubspotOAuth != nil {
+ return utils.MarshalJSON(u.SourceHubspotOAuth, "", true)
}
- if u.SourceHubspotAuthenticationOAuth != nil {
- return json.Marshal(u.SourceHubspotAuthenticationOAuth)
+ if u.SourceHubspotPrivateApp != nil {
+ return utils.MarshalJSON(u.SourceHubspotPrivateApp, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceHubspotHubspot string
+type Hubspot string
const (
- SourceHubspotHubspotHubspot SourceHubspotHubspot = "hubspot"
+ HubspotHubspot Hubspot = "hubspot"
)
-func (e SourceHubspotHubspot) ToPointer() *SourceHubspotHubspot {
+func (e Hubspot) ToPointer() *Hubspot {
return &e
}
-func (e *SourceHubspotHubspot) UnmarshalJSON(data []byte) error {
+func (e *Hubspot) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "hubspot":
- *e = SourceHubspotHubspot(v)
+ *e = Hubspot(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHubspotHubspot: %v", v)
+ return fmt.Errorf("invalid value for Hubspot: %v", v)
}
}
type SourceHubspot struct {
// Choose how to authenticate to HubSpot.
Credentials SourceHubspotAuthentication `json:"credentials"`
- SourceType SourceHubspotHubspot `json:"sourceType"`
+ sourceType Hubspot `const:"hubspot" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceHubspot) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceHubspot) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceHubspot) GetCredentials() SourceHubspotAuthentication {
+ if o == nil {
+ return SourceHubspotAuthentication{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceHubspot) GetSourceType() Hubspot {
+ return HubspotHubspot
+}
+
+func (o *SourceHubspot) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcehubspotcreaterequest.go b/internal/sdk/pkg/models/shared/sourcehubspotcreaterequest.go
old mode 100755
new mode 100644
index 2148a5023..9e82a4e2c
--- a/internal/sdk/pkg/models/shared/sourcehubspotcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcehubspotcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceHubspotCreateRequest struct {
Configuration SourceHubspot `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceHubspotCreateRequest) GetConfiguration() SourceHubspot {
+ if o == nil {
+ return SourceHubspot{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceHubspotCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceHubspotCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceHubspotCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceHubspotCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcehubspotputrequest.go b/internal/sdk/pkg/models/shared/sourcehubspotputrequest.go
old mode 100755
new mode 100644
index d6acbf8d2..51221fe09
--- a/internal/sdk/pkg/models/shared/sourcehubspotputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcehubspotputrequest.go
@@ -7,3 +7,24 @@ type SourceHubspotPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceHubspotPutRequest) GetConfiguration() SourceHubspotUpdate {
+ if o == nil {
+ return SourceHubspotUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceHubspotPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceHubspotPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcehubspotupdate.go b/internal/sdk/pkg/models/shared/sourcehubspotupdate.go
old mode 100755
new mode 100644
index b754f54f8..be7147549
--- a/internal/sdk/pkg/models/shared/sourcehubspotupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcehubspotupdate.go
@@ -3,133 +3,186 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-// SourceHubspotUpdateAuthenticationPrivateAppAuthType - Name of the credentials set
-type SourceHubspotUpdateAuthenticationPrivateAppAuthType string
+// SourceHubspotUpdateSchemasAuthType - Name of the credentials set
+type SourceHubspotUpdateSchemasAuthType string
const (
- SourceHubspotUpdateAuthenticationPrivateAppAuthTypePrivateAppCredentials SourceHubspotUpdateAuthenticationPrivateAppAuthType = "Private App Credentials"
+ SourceHubspotUpdateSchemasAuthTypePrivateAppCredentials SourceHubspotUpdateSchemasAuthType = "Private App Credentials"
)
-func (e SourceHubspotUpdateAuthenticationPrivateAppAuthType) ToPointer() *SourceHubspotUpdateAuthenticationPrivateAppAuthType {
+func (e SourceHubspotUpdateSchemasAuthType) ToPointer() *SourceHubspotUpdateSchemasAuthType {
return &e
}
-func (e *SourceHubspotUpdateAuthenticationPrivateAppAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceHubspotUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Private App Credentials":
- *e = SourceHubspotUpdateAuthenticationPrivateAppAuthType(v)
+ *e = SourceHubspotUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHubspotUpdateAuthenticationPrivateAppAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceHubspotUpdateSchemasAuthType: %v", v)
}
}
-// SourceHubspotUpdateAuthenticationPrivateApp - Choose how to authenticate to HubSpot.
-type SourceHubspotUpdateAuthenticationPrivateApp struct {
+// PrivateApp - Choose how to authenticate to HubSpot.
+type PrivateApp struct {
// HubSpot Access token. See the Hubspot docs if you need help finding this token.
AccessToken string `json:"access_token"`
// Name of the credentials set
- CredentialsTitle SourceHubspotUpdateAuthenticationPrivateAppAuthType `json:"credentials_title"`
+ credentialsTitle SourceHubspotUpdateSchemasAuthType `const:"Private App Credentials" json:"credentials_title"`
}
-// SourceHubspotUpdateAuthenticationOAuthAuthType - Name of the credentials
-type SourceHubspotUpdateAuthenticationOAuthAuthType string
+func (p PrivateApp) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *PrivateApp) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *PrivateApp) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *PrivateApp) GetCredentialsTitle() SourceHubspotUpdateSchemasAuthType {
+ return SourceHubspotUpdateSchemasAuthTypePrivateAppCredentials
+}
+
+// SourceHubspotUpdateAuthType - Name of the credentials
+type SourceHubspotUpdateAuthType string
const (
- SourceHubspotUpdateAuthenticationOAuthAuthTypeOAuthCredentials SourceHubspotUpdateAuthenticationOAuthAuthType = "OAuth Credentials"
+ SourceHubspotUpdateAuthTypeOAuthCredentials SourceHubspotUpdateAuthType = "OAuth Credentials"
)
-func (e SourceHubspotUpdateAuthenticationOAuthAuthType) ToPointer() *SourceHubspotUpdateAuthenticationOAuthAuthType {
+func (e SourceHubspotUpdateAuthType) ToPointer() *SourceHubspotUpdateAuthType {
return &e
}
-func (e *SourceHubspotUpdateAuthenticationOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceHubspotUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth Credentials":
- *e = SourceHubspotUpdateAuthenticationOAuthAuthType(v)
+ *e = SourceHubspotUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceHubspotUpdateAuthenticationOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceHubspotUpdateAuthType: %v", v)
}
}
-// SourceHubspotUpdateAuthenticationOAuth - Choose how to authenticate to HubSpot.
-type SourceHubspotUpdateAuthenticationOAuth struct {
+// SourceHubspotUpdateOAuth - Choose how to authenticate to HubSpot.
+type SourceHubspotUpdateOAuth struct {
// The Client ID of your HubSpot developer application. See the Hubspot docs if you need help finding this ID.
ClientID string `json:"client_id"`
// The client secret for your HubSpot developer application. See the Hubspot docs if you need help finding this secret.
ClientSecret string `json:"client_secret"`
// Name of the credentials
- CredentialsTitle SourceHubspotUpdateAuthenticationOAuthAuthType `json:"credentials_title"`
+ credentialsTitle SourceHubspotUpdateAuthType `const:"OAuth Credentials" json:"credentials_title"`
// Refresh token to renew an expired access token. See the Hubspot docs if you need help finding this token.
RefreshToken string `json:"refresh_token"`
}
+func (s SourceHubspotUpdateOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceHubspotUpdateOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceHubspotUpdateOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceHubspotUpdateOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceHubspotUpdateOAuth) GetCredentialsTitle() SourceHubspotUpdateAuthType {
+ return SourceHubspotUpdateAuthTypeOAuthCredentials
+}
+
+func (o *SourceHubspotUpdateOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceHubspotUpdateAuthenticationType string
const (
- SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationOAuth SourceHubspotUpdateAuthenticationType = "source-hubspot-update_Authentication_OAuth"
- SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationPrivateApp SourceHubspotUpdateAuthenticationType = "source-hubspot-update_Authentication_Private App"
+ SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateOAuth SourceHubspotUpdateAuthenticationType = "source-hubspot-update_OAuth"
+ SourceHubspotUpdateAuthenticationTypePrivateApp SourceHubspotUpdateAuthenticationType = "Private App"
)
type SourceHubspotUpdateAuthentication struct {
- SourceHubspotUpdateAuthenticationOAuth *SourceHubspotUpdateAuthenticationOAuth
- SourceHubspotUpdateAuthenticationPrivateApp *SourceHubspotUpdateAuthenticationPrivateApp
+ SourceHubspotUpdateOAuth *SourceHubspotUpdateOAuth
+ PrivateApp *PrivateApp
Type SourceHubspotUpdateAuthenticationType
}
-func CreateSourceHubspotUpdateAuthenticationSourceHubspotUpdateAuthenticationOAuth(sourceHubspotUpdateAuthenticationOAuth SourceHubspotUpdateAuthenticationOAuth) SourceHubspotUpdateAuthentication {
- typ := SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationOAuth
+func CreateSourceHubspotUpdateAuthenticationSourceHubspotUpdateOAuth(sourceHubspotUpdateOAuth SourceHubspotUpdateOAuth) SourceHubspotUpdateAuthentication {
+ typ := SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateOAuth
return SourceHubspotUpdateAuthentication{
- SourceHubspotUpdateAuthenticationOAuth: &sourceHubspotUpdateAuthenticationOAuth,
- Type: typ,
+ SourceHubspotUpdateOAuth: &sourceHubspotUpdateOAuth,
+ Type: typ,
}
}
-func CreateSourceHubspotUpdateAuthenticationSourceHubspotUpdateAuthenticationPrivateApp(sourceHubspotUpdateAuthenticationPrivateApp SourceHubspotUpdateAuthenticationPrivateApp) SourceHubspotUpdateAuthentication {
- typ := SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationPrivateApp
+func CreateSourceHubspotUpdateAuthenticationPrivateApp(privateApp PrivateApp) SourceHubspotUpdateAuthentication {
+ typ := SourceHubspotUpdateAuthenticationTypePrivateApp
return SourceHubspotUpdateAuthentication{
- SourceHubspotUpdateAuthenticationPrivateApp: &sourceHubspotUpdateAuthenticationPrivateApp,
- Type: typ,
+ PrivateApp: &privateApp,
+ Type: typ,
}
}
func (u *SourceHubspotUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceHubspotUpdateAuthenticationPrivateApp := new(SourceHubspotUpdateAuthenticationPrivateApp)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceHubspotUpdateAuthenticationPrivateApp); err == nil {
- u.SourceHubspotUpdateAuthenticationPrivateApp = sourceHubspotUpdateAuthenticationPrivateApp
- u.Type = SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationPrivateApp
+
+ privateApp := new(PrivateApp)
+ if err := utils.UnmarshalJSON(data, &privateApp, "", true, true); err == nil {
+ u.PrivateApp = privateApp
+ u.Type = SourceHubspotUpdateAuthenticationTypePrivateApp
return nil
}
- sourceHubspotUpdateAuthenticationOAuth := new(SourceHubspotUpdateAuthenticationOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceHubspotUpdateAuthenticationOAuth); err == nil {
- u.SourceHubspotUpdateAuthenticationOAuth = sourceHubspotUpdateAuthenticationOAuth
- u.Type = SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationOAuth
+ sourceHubspotUpdateOAuth := new(SourceHubspotUpdateOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceHubspotUpdateOAuth, "", true, true); err == nil {
+ u.SourceHubspotUpdateOAuth = sourceHubspotUpdateOAuth
+ u.Type = SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateOAuth
return nil
}
@@ -137,15 +190,15 @@ func (u *SourceHubspotUpdateAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceHubspotUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceHubspotUpdateAuthenticationPrivateApp != nil {
- return json.Marshal(u.SourceHubspotUpdateAuthenticationPrivateApp)
+ if u.SourceHubspotUpdateOAuth != nil {
+ return utils.MarshalJSON(u.SourceHubspotUpdateOAuth, "", true)
}
- if u.SourceHubspotUpdateAuthenticationOAuth != nil {
- return json.Marshal(u.SourceHubspotUpdateAuthenticationOAuth)
+ if u.PrivateApp != nil {
+ return utils.MarshalJSON(u.PrivateApp, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceHubspotUpdate struct {
@@ -154,3 +207,28 @@ type SourceHubspotUpdate struct {
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceHubspotUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceHubspotUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceHubspotUpdate) GetCredentials() SourceHubspotUpdateAuthentication {
+ if o == nil {
+ return SourceHubspotUpdateAuthentication{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceHubspotUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceinsightly.go b/internal/sdk/pkg/models/shared/sourceinsightly.go
old mode 100755
new mode 100644
index 2f01f463f..f8664d24b
--- a/internal/sdk/pkg/models/shared/sourceinsightly.go
+++ b/internal/sdk/pkg/models/shared/sourceinsightly.go
@@ -5,36 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceInsightlyInsightly string
+type Insightly string
const (
- SourceInsightlyInsightlyInsightly SourceInsightlyInsightly = "insightly"
+ InsightlyInsightly Insightly = "insightly"
)
-func (e SourceInsightlyInsightly) ToPointer() *SourceInsightlyInsightly {
+func (e Insightly) ToPointer() *Insightly {
return &e
}
-func (e *SourceInsightlyInsightly) UnmarshalJSON(data []byte) error {
+func (e *Insightly) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "insightly":
- *e = SourceInsightlyInsightly(v)
+ *e = Insightly(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceInsightlyInsightly: %v", v)
+ return fmt.Errorf("invalid value for Insightly: %v", v)
}
}
type SourceInsightly struct {
- SourceType SourceInsightlyInsightly `json:"sourceType"`
+ sourceType Insightly `const:"insightly" json:"sourceType"`
// The date from which you'd like to replicate data for Insightly in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. Note that it will be used only for incremental streams.
StartDate *string `json:"start_date"`
// Your Insightly API token.
Token *string `json:"token"`
}
+
+func (s SourceInsightly) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceInsightly) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceInsightly) GetSourceType() Insightly {
+ return InsightlyInsightly
+}
+
+func (o *SourceInsightly) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceInsightly) GetToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourceinsightlycreaterequest.go b/internal/sdk/pkg/models/shared/sourceinsightlycreaterequest.go
old mode 100755
new mode 100644
index 83693fb7a..4381f936a
--- a/internal/sdk/pkg/models/shared/sourceinsightlycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceinsightlycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceInsightlyCreateRequest struct {
Configuration SourceInsightly `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceInsightlyCreateRequest) GetConfiguration() SourceInsightly {
+ if o == nil {
+ return SourceInsightly{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceInsightlyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceInsightlyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceInsightlyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceInsightlyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceinsightlyputrequest.go b/internal/sdk/pkg/models/shared/sourceinsightlyputrequest.go
old mode 100755
new mode 100644
index c3dee3c1a..65bb12b71
--- a/internal/sdk/pkg/models/shared/sourceinsightlyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceinsightlyputrequest.go
@@ -7,3 +7,24 @@ type SourceInsightlyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceInsightlyPutRequest) GetConfiguration() SourceInsightlyUpdate {
+ if o == nil {
+ return SourceInsightlyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceInsightlyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceInsightlyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceinsightlyupdate.go b/internal/sdk/pkg/models/shared/sourceinsightlyupdate.go
old mode 100755
new mode 100644
index 57e3a7dfb..055a205d7
--- a/internal/sdk/pkg/models/shared/sourceinsightlyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceinsightlyupdate.go
@@ -8,3 +8,17 @@ type SourceInsightlyUpdate struct {
// Your Insightly API token.
Token *string `json:"token"`
}
+
+func (o *SourceInsightlyUpdate) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceInsightlyUpdate) GetToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourceinstagram.go b/internal/sdk/pkg/models/shared/sourceinstagram.go
old mode 100755
new mode 100644
index d372f1352..f541025dd
--- a/internal/sdk/pkg/models/shared/sourceinstagram.go
+++ b/internal/sdk/pkg/models/shared/sourceinstagram.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceInstagramInstagram string
+type Instagram string
const (
- SourceInstagramInstagramInstagram SourceInstagramInstagram = "instagram"
+ InstagramInstagram Instagram = "instagram"
)
-func (e SourceInstagramInstagram) ToPointer() *SourceInstagramInstagram {
+func (e Instagram) ToPointer() *Instagram {
return &e
}
-func (e *SourceInstagramInstagram) UnmarshalJSON(data []byte) error {
+func (e *Instagram) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "instagram":
- *e = SourceInstagramInstagram(v)
+ *e = Instagram(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceInstagramInstagram: %v", v)
+ return fmt.Errorf("invalid value for Instagram: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceInstagram struct {
// The Client ID for your Oauth application
ClientID *string `json:"client_id,omitempty"`
// The Client Secret for your Oauth application
- ClientSecret *string `json:"client_secret,omitempty"`
- SourceType SourceInstagramInstagram `json:"sourceType"`
- // The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
- StartDate time.Time `json:"start_date"`
+ ClientSecret *string `json:"client_secret,omitempty"`
+ sourceType Instagram `const:"instagram" json:"sourceType"`
+ // The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If left blank, the start date will be set to 2 years before the present date.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceInstagram) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceInstagram) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceInstagram) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceInstagram) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceInstagram) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceInstagram) GetSourceType() Instagram {
+ return InstagramInstagram
+}
+
+func (o *SourceInstagram) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceinstagramcreaterequest.go b/internal/sdk/pkg/models/shared/sourceinstagramcreaterequest.go
old mode 100755
new mode 100644
index 6cf9cad52..cd4346669
--- a/internal/sdk/pkg/models/shared/sourceinstagramcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceinstagramcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceInstagramCreateRequest struct {
Configuration SourceInstagram `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceInstagramCreateRequest) GetConfiguration() SourceInstagram {
+ if o == nil {
+ return SourceInstagram{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceInstagramCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceInstagramCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceInstagramCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceInstagramCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceinstagramputrequest.go b/internal/sdk/pkg/models/shared/sourceinstagramputrequest.go
old mode 100755
new mode 100644
index 2535ebdfb..84d12130b
--- a/internal/sdk/pkg/models/shared/sourceinstagramputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceinstagramputrequest.go
@@ -7,3 +7,24 @@ type SourceInstagramPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceInstagramPutRequest) GetConfiguration() SourceInstagramUpdate {
+ if o == nil {
+ return SourceInstagramUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceInstagramPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceInstagramPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceinstagramupdate.go b/internal/sdk/pkg/models/shared/sourceinstagramupdate.go
old mode 100755
new mode 100644
index ba9e6ba1a..f6ca72990
--- a/internal/sdk/pkg/models/shared/sourceinstagramupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceinstagramupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -13,6 +14,45 @@ type SourceInstagramUpdate struct {
ClientID *string `json:"client_id,omitempty"`
// The Client Secret for your Oauth application
ClientSecret *string `json:"client_secret,omitempty"`
- // The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
- StartDate time.Time `json:"start_date"`
+ // The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If left blank, the start date will be set to 2 years before the present date.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceInstagramUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceInstagramUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceInstagramUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceInstagramUpdate) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceInstagramUpdate) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceInstagramUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceinstatus.go b/internal/sdk/pkg/models/shared/sourceinstatus.go
old mode 100755
new mode 100644
index 43f1f32fa..0f411c361
--- a/internal/sdk/pkg/models/shared/sourceinstatus.go
+++ b/internal/sdk/pkg/models/shared/sourceinstatus.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceInstatusInstatus string
+type Instatus string
const (
- SourceInstatusInstatusInstatus SourceInstatusInstatus = "instatus"
+ InstatusInstatus Instatus = "instatus"
)
-func (e SourceInstatusInstatus) ToPointer() *SourceInstatusInstatus {
+func (e Instatus) ToPointer() *Instatus {
return &e
}
-func (e *SourceInstatusInstatus) UnmarshalJSON(data []byte) error {
+func (e *Instatus) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "instatus":
- *e = SourceInstatusInstatus(v)
+ *e = Instatus(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceInstatusInstatus: %v", v)
+ return fmt.Errorf("invalid value for Instatus: %v", v)
}
}
type SourceInstatus struct {
// Instatus REST API key
- APIKey string `json:"api_key"`
- SourceType SourceInstatusInstatus `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Instatus `const:"instatus" json:"sourceType"`
+}
+
+func (s SourceInstatus) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceInstatus) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceInstatus) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceInstatus) GetSourceType() Instatus {
+ return InstatusInstatus
}
diff --git a/internal/sdk/pkg/models/shared/sourceinstatuscreaterequest.go b/internal/sdk/pkg/models/shared/sourceinstatuscreaterequest.go
old mode 100755
new mode 100644
index 5c69321e4..81689e9df
--- a/internal/sdk/pkg/models/shared/sourceinstatuscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceinstatuscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceInstatusCreateRequest struct {
Configuration SourceInstatus `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceInstatusCreateRequest) GetConfiguration() SourceInstatus {
+ if o == nil {
+ return SourceInstatus{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceInstatusCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceInstatusCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceInstatusCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceInstatusCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceinstatusputrequest.go b/internal/sdk/pkg/models/shared/sourceinstatusputrequest.go
old mode 100755
new mode 100644
index f62b4179f..6d5e9e833
--- a/internal/sdk/pkg/models/shared/sourceinstatusputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceinstatusputrequest.go
@@ -7,3 +7,24 @@ type SourceInstatusPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceInstatusPutRequest) GetConfiguration() SourceInstatusUpdate {
+ if o == nil {
+ return SourceInstatusUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceInstatusPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceInstatusPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceinstatusupdate.go b/internal/sdk/pkg/models/shared/sourceinstatusupdate.go
old mode 100755
new mode 100644
index 2b714285e..e9fb2b07e
--- a/internal/sdk/pkg/models/shared/sourceinstatusupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceinstatusupdate.go
@@ -6,3 +6,10 @@ type SourceInstatusUpdate struct {
// Instatus REST API key
APIKey string `json:"api_key"`
}
+
+func (o *SourceInstatusUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourceintercom.go b/internal/sdk/pkg/models/shared/sourceintercom.go
old mode 100755
new mode 100644
index 375394f1b..4d8d513c5
--- a/internal/sdk/pkg/models/shared/sourceintercom.go
+++ b/internal/sdk/pkg/models/shared/sourceintercom.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceIntercomIntercom string
+type Intercom string
const (
- SourceIntercomIntercomIntercom SourceIntercomIntercom = "intercom"
+ IntercomIntercom Intercom = "intercom"
)
-func (e SourceIntercomIntercom) ToPointer() *SourceIntercomIntercom {
+func (e Intercom) ToPointer() *Intercom {
return &e
}
-func (e *SourceIntercomIntercom) UnmarshalJSON(data []byte) error {
+func (e *Intercom) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "intercom":
- *e = SourceIntercomIntercom(v)
+ *e = Intercom(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceIntercomIntercom: %v", v)
+ return fmt.Errorf("invalid value for Intercom: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceIntercom struct {
// Client Id for your Intercom application.
ClientID *string `json:"client_id,omitempty"`
// Client Secret for your Intercom application.
- ClientSecret *string `json:"client_secret,omitempty"`
- SourceType SourceIntercomIntercom `json:"sourceType"`
+ ClientSecret *string `json:"client_secret,omitempty"`
+ sourceType Intercom `const:"intercom" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceIntercom) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceIntercom) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceIntercom) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceIntercom) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceIntercom) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceIntercom) GetSourceType() Intercom {
+ return IntercomIntercom
+}
+
+func (o *SourceIntercom) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceintercomcreaterequest.go b/internal/sdk/pkg/models/shared/sourceintercomcreaterequest.go
old mode 100755
new mode 100644
index e6ae1bc17..24c9011f8
--- a/internal/sdk/pkg/models/shared/sourceintercomcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceintercomcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceIntercomCreateRequest struct {
Configuration SourceIntercom `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceIntercomCreateRequest) GetConfiguration() SourceIntercom {
+ if o == nil {
+ return SourceIntercom{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceIntercomCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceIntercomCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceIntercomCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceIntercomCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceintercomputrequest.go b/internal/sdk/pkg/models/shared/sourceintercomputrequest.go
old mode 100755
new mode 100644
index c038869ed..75636765a
--- a/internal/sdk/pkg/models/shared/sourceintercomputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceintercomputrequest.go
@@ -7,3 +7,24 @@ type SourceIntercomPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceIntercomPutRequest) GetConfiguration() SourceIntercomUpdate {
+ if o == nil {
+ return SourceIntercomUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceIntercomPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceIntercomPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceintercomupdate.go b/internal/sdk/pkg/models/shared/sourceintercomupdate.go
old mode 100755
new mode 100644
index f9fe5214e..f6bce5117
--- a/internal/sdk/pkg/models/shared/sourceintercomupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceintercomupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -16,3 +17,42 @@ type SourceIntercomUpdate struct {
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceIntercomUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceIntercomUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceIntercomUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceIntercomUpdate) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceIntercomUpdate) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceIntercomUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceip2whois.go b/internal/sdk/pkg/models/shared/sourceip2whois.go
old mode 100755
new mode 100644
index 0498f9193..ff3485bc7
--- a/internal/sdk/pkg/models/shared/sourceip2whois.go
+++ b/internal/sdk/pkg/models/shared/sourceip2whois.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceIp2whoisIp2whois string
+type Ip2whois string
const (
- SourceIp2whoisIp2whoisIp2whois SourceIp2whoisIp2whois = "ip2whois"
+ Ip2whoisIp2whois Ip2whois = "ip2whois"
)
-func (e SourceIp2whoisIp2whois) ToPointer() *SourceIp2whoisIp2whois {
+func (e Ip2whois) ToPointer() *Ip2whois {
return &e
}
-func (e *SourceIp2whoisIp2whois) UnmarshalJSON(data []byte) error {
+func (e *Ip2whois) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "ip2whois":
- *e = SourceIp2whoisIp2whois(v)
+ *e = Ip2whois(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceIp2whoisIp2whois: %v", v)
+ return fmt.Errorf("invalid value for Ip2whois: %v", v)
}
}
@@ -35,6 +36,35 @@ type SourceIp2whois struct {
// Your API Key. See here.
APIKey *string `json:"api_key,omitempty"`
// Domain name. See here.
- Domain *string `json:"domain,omitempty"`
- SourceType *SourceIp2whoisIp2whois `json:"sourceType,omitempty"`
+ Domain *string `json:"domain,omitempty"`
+ sourceType *Ip2whois `const:"ip2whois" json:"sourceType,omitempty"`
+}
+
+func (s SourceIp2whois) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceIp2whois) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceIp2whois) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *SourceIp2whois) GetDomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Domain
+}
+
+func (o *SourceIp2whois) GetSourceType() *Ip2whois {
+ return Ip2whoisIp2whois.ToPointer()
}
diff --git a/internal/sdk/pkg/models/shared/sourceip2whoiscreaterequest.go b/internal/sdk/pkg/models/shared/sourceip2whoiscreaterequest.go
old mode 100755
new mode 100644
index 41c469430..1035fa230
--- a/internal/sdk/pkg/models/shared/sourceip2whoiscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceip2whoiscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceIp2whoisCreateRequest struct {
Configuration SourceIp2whois `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceIp2whoisCreateRequest) GetConfiguration() SourceIp2whois {
+ if o == nil {
+ return SourceIp2whois{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceIp2whoisCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceIp2whoisCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceIp2whoisCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceIp2whoisCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceip2whoisputrequest.go b/internal/sdk/pkg/models/shared/sourceip2whoisputrequest.go
old mode 100755
new mode 100644
index dd0be5a64..9d4f64d74
--- a/internal/sdk/pkg/models/shared/sourceip2whoisputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceip2whoisputrequest.go
@@ -7,3 +7,24 @@ type SourceIp2whoisPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceIp2whoisPutRequest) GetConfiguration() SourceIp2whoisUpdate {
+ if o == nil {
+ return SourceIp2whoisUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceIp2whoisPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceIp2whoisPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceip2whoisupdate.go b/internal/sdk/pkg/models/shared/sourceip2whoisupdate.go
old mode 100755
new mode 100644
index bb1616e82..fb3e85bd8
--- a/internal/sdk/pkg/models/shared/sourceip2whoisupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceip2whoisupdate.go
@@ -8,3 +8,17 @@ type SourceIp2whoisUpdate struct {
// Domain name. See here.
Domain *string `json:"domain,omitempty"`
}
+
+func (o *SourceIp2whoisUpdate) GetAPIKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.APIKey
+}
+
+func (o *SourceIp2whoisUpdate) GetDomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Domain
+}
diff --git a/internal/sdk/pkg/models/shared/sourceiterable.go b/internal/sdk/pkg/models/shared/sourceiterable.go
old mode 100755
new mode 100644
index cf9085b6c..49180fbe5
--- a/internal/sdk/pkg/models/shared/sourceiterable.go
+++ b/internal/sdk/pkg/models/shared/sourceiterable.go
@@ -5,37 +5,67 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceIterableIterable string
+type Iterable string
const (
- SourceIterableIterableIterable SourceIterableIterable = "iterable"
+ IterableIterable Iterable = "iterable"
)
-func (e SourceIterableIterable) ToPointer() *SourceIterableIterable {
+func (e Iterable) ToPointer() *Iterable {
return &e
}
-func (e *SourceIterableIterable) UnmarshalJSON(data []byte) error {
+func (e *Iterable) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "iterable":
- *e = SourceIterableIterable(v)
+ *e = Iterable(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceIterableIterable: %v", v)
+ return fmt.Errorf("invalid value for Iterable: %v", v)
}
}
type SourceIterable struct {
// Iterable API Key. See the docs for more information on how to obtain this key.
- APIKey string `json:"api_key"`
- SourceType SourceIterableIterable `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Iterable `const:"iterable" json:"sourceType"`
// The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceIterable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceIterable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceIterable) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceIterable) GetSourceType() Iterable {
+ return IterableIterable
+}
+
+func (o *SourceIterable) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceiterablecreaterequest.go b/internal/sdk/pkg/models/shared/sourceiterablecreaterequest.go
old mode 100755
new mode 100644
index aa100742f..3ad488bff
--- a/internal/sdk/pkg/models/shared/sourceiterablecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceiterablecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceIterableCreateRequest struct {
Configuration SourceIterable `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceIterableCreateRequest) GetConfiguration() SourceIterable {
+ if o == nil {
+ return SourceIterable{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceIterableCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceIterableCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceIterableCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceIterableCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceiterableputrequest.go b/internal/sdk/pkg/models/shared/sourceiterableputrequest.go
old mode 100755
new mode 100644
index 1da6460de..f526ba95d
--- a/internal/sdk/pkg/models/shared/sourceiterableputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceiterableputrequest.go
@@ -7,3 +7,24 @@ type SourceIterablePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceIterablePutRequest) GetConfiguration() SourceIterableUpdate {
+ if o == nil {
+ return SourceIterableUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceIterablePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceIterablePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceiterableupdate.go b/internal/sdk/pkg/models/shared/sourceiterableupdate.go
old mode 100755
new mode 100644
index 4ae48114f..2305174e7
--- a/internal/sdk/pkg/models/shared/sourceiterableupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceiterableupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -12,3 +13,28 @@ type SourceIterableUpdate struct {
// The date from which you'd like to replicate data for Iterable, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceIterableUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceIterableUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceIterableUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceIterableUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcejira.go b/internal/sdk/pkg/models/shared/sourcejira.go
old mode 100755
new mode 100644
index 9b1d6b720..8578a7ea2
--- a/internal/sdk/pkg/models/shared/sourcejira.go
+++ b/internal/sdk/pkg/models/shared/sourcejira.go
@@ -5,30 +5,61 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceJiraJira string
+type SourceJiraIssuesStreamExpandWith string
const (
- SourceJiraJiraJira SourceJiraJira = "jira"
+ SourceJiraIssuesStreamExpandWithRenderedFields SourceJiraIssuesStreamExpandWith = "renderedFields"
+ SourceJiraIssuesStreamExpandWithTransitions SourceJiraIssuesStreamExpandWith = "transitions"
+ SourceJiraIssuesStreamExpandWithChangelog SourceJiraIssuesStreamExpandWith = "changelog"
)
-func (e SourceJiraJira) ToPointer() *SourceJiraJira {
+func (e SourceJiraIssuesStreamExpandWith) ToPointer() *SourceJiraIssuesStreamExpandWith {
return &e
}
-func (e *SourceJiraJira) UnmarshalJSON(data []byte) error {
+func (e *SourceJiraIssuesStreamExpandWith) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "renderedFields":
+ fallthrough
+ case "transitions":
+ fallthrough
+ case "changelog":
+ *e = SourceJiraIssuesStreamExpandWith(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceJiraIssuesStreamExpandWith: %v", v)
+ }
+}
+
+type Jira string
+
+const (
+ JiraJira Jira = "jira"
+)
+
+func (e Jira) ToPointer() *Jira {
+ return &e
+}
+
+func (e *Jira) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "jira":
- *e = SourceJiraJira(v)
+ *e = Jira(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceJiraJira: %v", v)
+ return fmt.Errorf("invalid value for Jira: %v", v)
}
}
@@ -40,14 +71,103 @@ type SourceJira struct {
// The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth.
Email string `json:"email"`
// Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.
- EnableExperimentalStreams *bool `json:"enable_experimental_streams,omitempty"`
- // Expand the changelog when replicating issues.
- ExpandIssueChangelog *bool `json:"expand_issue_changelog,omitempty"`
+ EnableExperimentalStreams *bool `default:"false" json:"enable_experimental_streams"`
+ // (DEPRECATED) Expand the changelog when replicating issues.
+ ExpandIssueChangelog *bool `default:"false" json:"expand_issue_changelog"`
+ // (DEPRECATED) Expand the transitions when replicating issues.
+ ExpandIssueTransition *bool `default:"false" json:"expand_issue_transition"`
+ // Select fields to Expand the `Issues` stream when replicating with:
+ IssuesStreamExpandWith []SourceJiraIssuesStreamExpandWith `json:"issues_stream_expand_with,omitempty"`
// List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects.
Projects []string `json:"projects,omitempty"`
- // Render issue fields in HTML format in addition to Jira JSON-like format.
- RenderFields *bool `json:"render_fields,omitempty"`
- SourceType SourceJiraJira `json:"sourceType"`
+ // (DEPRECATED) Render issue fields in HTML format in addition to Jira JSON-like format.
+ RenderFields *bool `default:"false" json:"render_fields"`
+ sourceType Jira `const:"jira" json:"sourceType"`
// The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceJira) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceJira) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceJira) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceJira) GetDomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Domain
+}
+
+func (o *SourceJira) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+func (o *SourceJira) GetEnableExperimentalStreams() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.EnableExperimentalStreams
+}
+
+func (o *SourceJira) GetExpandIssueChangelog() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.ExpandIssueChangelog
+}
+
+func (o *SourceJira) GetExpandIssueTransition() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.ExpandIssueTransition
+}
+
+func (o *SourceJira) GetIssuesStreamExpandWith() []SourceJiraIssuesStreamExpandWith {
+ if o == nil {
+ return nil
+ }
+ return o.IssuesStreamExpandWith
+}
+
+func (o *SourceJira) GetProjects() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Projects
+}
+
+func (o *SourceJira) GetRenderFields() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.RenderFields
+}
+
+func (o *SourceJira) GetSourceType() Jira {
+ return JiraJira
+}
+
+func (o *SourceJira) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcejiracreaterequest.go b/internal/sdk/pkg/models/shared/sourcejiracreaterequest.go
old mode 100755
new mode 100644
index 5758f0800..5699d5761
--- a/internal/sdk/pkg/models/shared/sourcejiracreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcejiracreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceJiraCreateRequest struct {
Configuration SourceJira `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceJiraCreateRequest) GetConfiguration() SourceJira {
+ if o == nil {
+ return SourceJira{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceJiraCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceJiraCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceJiraCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceJiraCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcejiraputrequest.go b/internal/sdk/pkg/models/shared/sourcejiraputrequest.go
old mode 100755
new mode 100644
index 3bbc1d2f8..c7ea04140
--- a/internal/sdk/pkg/models/shared/sourcejiraputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcejiraputrequest.go
@@ -7,3 +7,24 @@ type SourceJiraPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceJiraPutRequest) GetConfiguration() SourceJiraUpdate {
+ if o == nil {
+ return SourceJiraUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceJiraPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceJiraPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcejiraupdate.go b/internal/sdk/pkg/models/shared/sourcejiraupdate.go
old mode 100755
new mode 100644
index 449571b0a..3aebb90ef
--- a/internal/sdk/pkg/models/shared/sourcejiraupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcejiraupdate.go
@@ -3,9 +3,42 @@
package shared
import (
+ "encoding/json"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
+type IssuesStreamExpandWith string
+
+const (
+ IssuesStreamExpandWithRenderedFields IssuesStreamExpandWith = "renderedFields"
+ IssuesStreamExpandWithTransitions IssuesStreamExpandWith = "transitions"
+ IssuesStreamExpandWithChangelog IssuesStreamExpandWith = "changelog"
+)
+
+func (e IssuesStreamExpandWith) ToPointer() *IssuesStreamExpandWith {
+ return &e
+}
+
+func (e *IssuesStreamExpandWith) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "renderedFields":
+ fallthrough
+ case "transitions":
+ fallthrough
+ case "changelog":
+ *e = IssuesStreamExpandWith(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for IssuesStreamExpandWith: %v", v)
+ }
+}
+
type SourceJiraUpdate struct {
// Jira API Token. See the docs for more information on how to generate this key. API Token is used for Authorization to your account by BasicAuth.
APIToken string `json:"api_token"`
@@ -14,13 +47,98 @@ type SourceJiraUpdate struct {
// The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth.
Email string `json:"email"`
// Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.
- EnableExperimentalStreams *bool `json:"enable_experimental_streams,omitempty"`
- // Expand the changelog when replicating issues.
- ExpandIssueChangelog *bool `json:"expand_issue_changelog,omitempty"`
+ EnableExperimentalStreams *bool `default:"false" json:"enable_experimental_streams"`
+ // (DEPRECATED) Expand the changelog when replicating issues.
+ ExpandIssueChangelog *bool `default:"false" json:"expand_issue_changelog"`
+ // (DEPRECATED) Expand the transitions when replicating issues.
+ ExpandIssueTransition *bool `default:"false" json:"expand_issue_transition"`
+ // Select fields to Expand the `Issues` stream when replicating with:
+ IssuesStreamExpandWith []IssuesStreamExpandWith `json:"issues_stream_expand_with,omitempty"`
// List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects.
Projects []string `json:"projects,omitempty"`
- // Render issue fields in HTML format in addition to Jira JSON-like format.
- RenderFields *bool `json:"render_fields,omitempty"`
+ // (DEPRECATED) Render issue fields in HTML format in addition to Jira JSON-like format.
+ RenderFields *bool `default:"false" json:"render_fields"`
// The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceJiraUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceJiraUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceJiraUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceJiraUpdate) GetDomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Domain
+}
+
+func (o *SourceJiraUpdate) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+func (o *SourceJiraUpdate) GetEnableExperimentalStreams() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.EnableExperimentalStreams
+}
+
+func (o *SourceJiraUpdate) GetExpandIssueChangelog() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.ExpandIssueChangelog
+}
+
+func (o *SourceJiraUpdate) GetExpandIssueTransition() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.ExpandIssueTransition
+}
+
+func (o *SourceJiraUpdate) GetIssuesStreamExpandWith() []IssuesStreamExpandWith {
+ if o == nil {
+ return nil
+ }
+ return o.IssuesStreamExpandWith
+}
+
+func (o *SourceJiraUpdate) GetProjects() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Projects
+}
+
+func (o *SourceJiraUpdate) GetRenderFields() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.RenderFields
+}
+
+func (o *SourceJiraUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcek6cloud.go b/internal/sdk/pkg/models/shared/sourcek6cloud.go
old mode 100755
new mode 100644
index 597f6c8fc..d15271513
--- a/internal/sdk/pkg/models/shared/sourcek6cloud.go
+++ b/internal/sdk/pkg/models/shared/sourcek6cloud.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceK6CloudK6Cloud string
+type K6Cloud string
const (
- SourceK6CloudK6CloudK6Cloud SourceK6CloudK6Cloud = "k6-cloud"
+ K6CloudK6Cloud K6Cloud = "k6-cloud"
)
-func (e SourceK6CloudK6Cloud) ToPointer() *SourceK6CloudK6Cloud {
+func (e K6Cloud) ToPointer() *K6Cloud {
return &e
}
-func (e *SourceK6CloudK6Cloud) UnmarshalJSON(data []byte) error {
+func (e *K6Cloud) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "k6-cloud":
- *e = SourceK6CloudK6Cloud(v)
+ *e = K6Cloud(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceK6CloudK6Cloud: %v", v)
+ return fmt.Errorf("invalid value for K6Cloud: %v", v)
}
}
type SourceK6Cloud struct {
// Your API Token. See here. The key is case sensitive.
- APIToken string `json:"api_token"`
- SourceType SourceK6CloudK6Cloud `json:"sourceType"`
+ APIToken string `json:"api_token"`
+ sourceType K6Cloud `const:"k6-cloud" json:"sourceType"`
+}
+
+func (s SourceK6Cloud) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceK6Cloud) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceK6Cloud) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceK6Cloud) GetSourceType() K6Cloud {
+ return K6CloudK6Cloud
}
diff --git a/internal/sdk/pkg/models/shared/sourcek6cloudcreaterequest.go b/internal/sdk/pkg/models/shared/sourcek6cloudcreaterequest.go
old mode 100755
new mode 100644
index ad56678a5..46ffa24dd
--- a/internal/sdk/pkg/models/shared/sourcek6cloudcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcek6cloudcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceK6CloudCreateRequest struct {
Configuration SourceK6Cloud `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceK6CloudCreateRequest) GetConfiguration() SourceK6Cloud {
+ if o == nil {
+ return SourceK6Cloud{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceK6CloudCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceK6CloudCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceK6CloudCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceK6CloudCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcek6cloudputrequest.go b/internal/sdk/pkg/models/shared/sourcek6cloudputrequest.go
old mode 100755
new mode 100644
index eb2177108..8cdef846b
--- a/internal/sdk/pkg/models/shared/sourcek6cloudputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcek6cloudputrequest.go
@@ -7,3 +7,24 @@ type SourceK6CloudPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceK6CloudPutRequest) GetConfiguration() SourceK6CloudUpdate {
+ if o == nil {
+ return SourceK6CloudUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceK6CloudPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceK6CloudPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcek6cloudupdate.go b/internal/sdk/pkg/models/shared/sourcek6cloudupdate.go
old mode 100755
new mode 100644
index 96a73f3b3..bc710ace5
--- a/internal/sdk/pkg/models/shared/sourcek6cloudupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcek6cloudupdate.go
@@ -6,3 +6,10 @@ type SourceK6CloudUpdate struct {
// Your API Token. See here. The key is case sensitive.
APIToken string `json:"api_token"`
}
+
+func (o *SourceK6CloudUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourceklarna.go b/internal/sdk/pkg/models/shared/sourceklarna.go
old mode 100755
new mode 100644
index b1b7278c9..8427ed76c
--- a/internal/sdk/pkg/models/shared/sourceklarna.go
+++ b/internal/sdk/pkg/models/shared/sourceklarna.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceKlarnaRegion - Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'
@@ -38,27 +39,27 @@ func (e *SourceKlarnaRegion) UnmarshalJSON(data []byte) error {
}
}
-type SourceKlarnaKlarna string
+type Klarna string
const (
- SourceKlarnaKlarnaKlarna SourceKlarnaKlarna = "klarna"
+ KlarnaKlarna Klarna = "klarna"
)
-func (e SourceKlarnaKlarna) ToPointer() *SourceKlarnaKlarna {
+func (e Klarna) ToPointer() *Klarna {
return &e
}
-func (e *SourceKlarnaKlarna) UnmarshalJSON(data []byte) error {
+func (e *Klarna) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "klarna":
- *e = SourceKlarnaKlarna(v)
+ *e = Klarna(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceKlarnaKlarna: %v", v)
+ return fmt.Errorf("invalid value for Klarna: %v", v)
}
}
@@ -66,10 +67,53 @@ type SourceKlarna struct {
// A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)
Password string `json:"password"`
// Propertie defining if connector is used against playground or production environment
- Playground bool `json:"playground"`
+ Playground *bool `default:"false" json:"playground"`
// Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'
Region SourceKlarnaRegion `json:"region"`
- SourceType SourceKlarnaKlarna `json:"sourceType"`
+ sourceType Klarna `const:"klarna" json:"sourceType"`
// Consists of your Merchant ID (eid) - a unique number that identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)
Username string `json:"username"`
}
+
+func (s SourceKlarna) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceKlarna) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceKlarna) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceKlarna) GetPlayground() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Playground
+}
+
+func (o *SourceKlarna) GetRegion() SourceKlarnaRegion {
+ if o == nil {
+ return SourceKlarnaRegion("")
+ }
+ return o.Region
+}
+
+func (o *SourceKlarna) GetSourceType() Klarna {
+ return KlarnaKlarna
+}
+
+func (o *SourceKlarna) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceklarnacreaterequest.go b/internal/sdk/pkg/models/shared/sourceklarnacreaterequest.go
old mode 100755
new mode 100644
index a9b019b58..31ee12ba7
--- a/internal/sdk/pkg/models/shared/sourceklarnacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceklarnacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceKlarnaCreateRequest struct {
Configuration SourceKlarna `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceKlarnaCreateRequest) GetConfiguration() SourceKlarna {
+ if o == nil {
+ return SourceKlarna{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceKlarnaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceKlarnaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceKlarnaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceKlarnaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceklarnaputrequest.go b/internal/sdk/pkg/models/shared/sourceklarnaputrequest.go
old mode 100755
new mode 100644
index d4f7120ae..4bf122c6e
--- a/internal/sdk/pkg/models/shared/sourceklarnaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceklarnaputrequest.go
@@ -7,3 +7,24 @@ type SourceKlarnaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceKlarnaPutRequest) GetConfiguration() SourceKlarnaUpdate {
+ if o == nil {
+ return SourceKlarnaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceKlarnaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceKlarnaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceklarnaupdate.go b/internal/sdk/pkg/models/shared/sourceklarnaupdate.go
old mode 100755
new mode 100644
index 568788ba1..8794db1ed
--- a/internal/sdk/pkg/models/shared/sourceklarnaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceklarnaupdate.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceKlarnaUpdateRegion - Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'
@@ -42,9 +43,48 @@ type SourceKlarnaUpdate struct {
// A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)
Password string `json:"password"`
// Propertie defining if connector is used against playground or production environment
- Playground bool `json:"playground"`
+ Playground *bool `default:"false" json:"playground"`
// Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc'
Region SourceKlarnaUpdateRegion `json:"region"`
// Consists of your Merchant ID (eid) - a unique number that identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)
Username string `json:"username"`
}
+
+func (s SourceKlarnaUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceKlarnaUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceKlarnaUpdate) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceKlarnaUpdate) GetPlayground() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Playground
+}
+
+func (o *SourceKlarnaUpdate) GetRegion() SourceKlarnaUpdateRegion {
+ if o == nil {
+ return SourceKlarnaUpdateRegion("")
+ }
+ return o.Region
+}
+
+func (o *SourceKlarnaUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceklaviyo.go b/internal/sdk/pkg/models/shared/sourceklaviyo.go
old mode 100755
new mode 100644
index f7d941bdf..46181bc84
--- a/internal/sdk/pkg/models/shared/sourceklaviyo.go
+++ b/internal/sdk/pkg/models/shared/sourceklaviyo.go
@@ -5,37 +5,67 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceKlaviyoKlaviyo string
+type Klaviyo string
const (
- SourceKlaviyoKlaviyoKlaviyo SourceKlaviyoKlaviyo = "klaviyo"
+ KlaviyoKlaviyo Klaviyo = "klaviyo"
)
-func (e SourceKlaviyoKlaviyo) ToPointer() *SourceKlaviyoKlaviyo {
+func (e Klaviyo) ToPointer() *Klaviyo {
return &e
}
-func (e *SourceKlaviyoKlaviyo) UnmarshalJSON(data []byte) error {
+func (e *Klaviyo) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "klaviyo":
- *e = SourceKlaviyoKlaviyo(v)
+ *e = Klaviyo(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceKlaviyoKlaviyo: %v", v)
+ return fmt.Errorf("invalid value for Klaviyo: %v", v)
}
}
type SourceKlaviyo struct {
// Klaviyo API Key. See our docs if you need help finding this key.
- APIKey string `json:"api_key"`
- SourceType SourceKlaviyoKlaviyo `json:"sourceType"`
- // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
- StartDate time.Time `json:"start_date"`
+ APIKey string `json:"api_key"`
+ sourceType Klaviyo `const:"klaviyo" json:"sourceType"`
+ // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This field is optional - if not provided, all data will be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceKlaviyo) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceKlaviyo) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceKlaviyo) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceKlaviyo) GetSourceType() Klaviyo {
+ return KlaviyoKlaviyo
+}
+
+func (o *SourceKlaviyo) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceklaviyocreaterequest.go b/internal/sdk/pkg/models/shared/sourceklaviyocreaterequest.go
old mode 100755
new mode 100644
index 4b4ae3d49..7c6e1c7d0
--- a/internal/sdk/pkg/models/shared/sourceklaviyocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceklaviyocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceKlaviyoCreateRequest struct {
Configuration SourceKlaviyo `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceKlaviyoCreateRequest) GetConfiguration() SourceKlaviyo {
+ if o == nil {
+ return SourceKlaviyo{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceKlaviyoCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceKlaviyoCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceKlaviyoCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceKlaviyoCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceklaviyoputrequest.go b/internal/sdk/pkg/models/shared/sourceklaviyoputrequest.go
old mode 100755
new mode 100644
index 515e98c58..9acb470a4
--- a/internal/sdk/pkg/models/shared/sourceklaviyoputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceklaviyoputrequest.go
@@ -7,3 +7,24 @@ type SourceKlaviyoPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceKlaviyoPutRequest) GetConfiguration() SourceKlaviyoUpdate {
+ if o == nil {
+ return SourceKlaviyoUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceKlaviyoPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceKlaviyoPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceklaviyoupdate.go b/internal/sdk/pkg/models/shared/sourceklaviyoupdate.go
old mode 100755
new mode 100644
index a9ab14571..49464167f
--- a/internal/sdk/pkg/models/shared/sourceklaviyoupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceklaviyoupdate.go
@@ -3,12 +3,38 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
type SourceKlaviyoUpdate struct {
// Klaviyo API Key. See our docs if you need help finding this key.
APIKey string `json:"api_key"`
- // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
- StartDate time.Time `json:"start_date"`
+ // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This field is optional - if not provided, all data will be replicated.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceKlaviyoUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceKlaviyoUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceKlaviyoUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceKlaviyoUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcekustomersinger.go b/internal/sdk/pkg/models/shared/sourcekustomersinger.go
old mode 100755
new mode 100644
index 92b39833e..99d4c8c3e
--- a/internal/sdk/pkg/models/shared/sourcekustomersinger.go
+++ b/internal/sdk/pkg/models/shared/sourcekustomersinger.go
@@ -5,36 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceKustomerSingerKustomerSinger string
+type KustomerSinger string
const (
- SourceKustomerSingerKustomerSingerKustomerSinger SourceKustomerSingerKustomerSinger = "kustomer-singer"
+ KustomerSingerKustomerSinger KustomerSinger = "kustomer-singer"
)
-func (e SourceKustomerSingerKustomerSinger) ToPointer() *SourceKustomerSingerKustomerSinger {
+func (e KustomerSinger) ToPointer() *KustomerSinger {
return &e
}
-func (e *SourceKustomerSingerKustomerSinger) UnmarshalJSON(data []byte) error {
+func (e *KustomerSinger) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "kustomer-singer":
- *e = SourceKustomerSingerKustomerSinger(v)
+ *e = KustomerSinger(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceKustomerSingerKustomerSinger: %v", v)
+ return fmt.Errorf("invalid value for KustomerSinger: %v", v)
}
}
type SourceKustomerSinger struct {
// Kustomer API Token. See the docs on how to obtain this
- APIToken string `json:"api_token"`
- SourceType SourceKustomerSingerKustomerSinger `json:"sourceType"`
+ APIToken string `json:"api_token"`
+ sourceType KustomerSinger `const:"kustomer-singer" json:"sourceType"`
// The date from which you'd like to replicate the data
StartDate string `json:"start_date"`
}
+
+func (s SourceKustomerSinger) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceKustomerSinger) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceKustomerSinger) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceKustomerSinger) GetSourceType() KustomerSinger {
+ return KustomerSingerKustomerSinger
+}
+
+func (o *SourceKustomerSinger) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcekustomersingercreaterequest.go b/internal/sdk/pkg/models/shared/sourcekustomersingercreaterequest.go
old mode 100755
new mode 100644
index 1101425e4..0e4f77995
--- a/internal/sdk/pkg/models/shared/sourcekustomersingercreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcekustomersingercreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceKustomerSingerCreateRequest struct {
Configuration SourceKustomerSinger `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceKustomerSingerCreateRequest) GetConfiguration() SourceKustomerSinger {
+ if o == nil {
+ return SourceKustomerSinger{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceKustomerSingerCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceKustomerSingerCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceKustomerSingerCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceKustomerSingerCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcekustomersingerputrequest.go b/internal/sdk/pkg/models/shared/sourcekustomersingerputrequest.go
old mode 100755
new mode 100644
index c9bc81237..130c0ddaa
--- a/internal/sdk/pkg/models/shared/sourcekustomersingerputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcekustomersingerputrequest.go
@@ -7,3 +7,24 @@ type SourceKustomerSingerPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceKustomerSingerPutRequest) GetConfiguration() SourceKustomerSingerUpdate {
+ if o == nil {
+ return SourceKustomerSingerUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceKustomerSingerPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceKustomerSingerPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcekustomersingerupdate.go b/internal/sdk/pkg/models/shared/sourcekustomersingerupdate.go
old mode 100755
new mode 100644
index f13c4e9c4..affca8d64
--- a/internal/sdk/pkg/models/shared/sourcekustomersingerupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcekustomersingerupdate.go
@@ -8,3 +8,17 @@ type SourceKustomerSingerUpdate struct {
// The date from which you'd like to replicate the data
StartDate string `json:"start_date"`
}
+
+func (o *SourceKustomerSingerUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceKustomerSingerUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcekyve.go b/internal/sdk/pkg/models/shared/sourcekyve.go
old mode 100755
new mode 100644
index cade8b226..d59bef39c
--- a/internal/sdk/pkg/models/shared/sourcekyve.go
+++ b/internal/sdk/pkg/models/shared/sourcekyve.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceKyveKyve string
+type Kyve string
const (
- SourceKyveKyveKyve SourceKyveKyve = "kyve"
+ KyveKyve Kyve = "kyve"
)
-func (e SourceKyveKyve) ToPointer() *SourceKyveKyve {
+func (e Kyve) ToPointer() *Kyve {
return &e
}
-func (e *SourceKyveKyve) UnmarshalJSON(data []byte) error {
+func (e *Kyve) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "kyve":
- *e = SourceKyveKyve(v)
+ *e = Kyve(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceKyveKyve: %v", v)
+ return fmt.Errorf("invalid value for Kyve: %v", v)
}
}
@@ -35,12 +36,62 @@ type SourceKyve struct {
// The maximum amount of pages to go trough. Set to 'null' for all pages.
MaxPages *int64 `json:"max_pages,omitempty"`
// The pagesize for pagination, smaller numbers are used in integration tests.
- PageSize *int64 `json:"page_size,omitempty"`
+ PageSize *int64 `default:"100" json:"page_size"`
// The IDs of the KYVE storage pool you want to archive. (Comma separated)
- PoolIds string `json:"pool_ids"`
- SourceType SourceKyveKyve `json:"sourceType"`
+ PoolIds string `json:"pool_ids"`
+ sourceType Kyve `const:"kyve" json:"sourceType"`
// The start-id defines, from which bundle id the pipeline should start to extract the data (Comma separated)
StartIds string `json:"start_ids"`
// URL to the KYVE Chain API.
- URLBase *string `json:"url_base,omitempty"`
+ URLBase *string `default:"https://api.korellia.kyve.network" json:"url_base"`
+}
+
+func (s SourceKyve) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceKyve) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceKyve) GetMaxPages() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxPages
+}
+
+func (o *SourceKyve) GetPageSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSize
+}
+
+func (o *SourceKyve) GetPoolIds() string {
+ if o == nil {
+ return ""
+ }
+ return o.PoolIds
+}
+
+func (o *SourceKyve) GetSourceType() Kyve {
+ return KyveKyve
+}
+
+func (o *SourceKyve) GetStartIds() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartIds
+}
+
+func (o *SourceKyve) GetURLBase() *string {
+ if o == nil {
+ return nil
+ }
+ return o.URLBase
}
diff --git a/internal/sdk/pkg/models/shared/sourcekyvecreaterequest.go b/internal/sdk/pkg/models/shared/sourcekyvecreaterequest.go
old mode 100755
new mode 100644
index a36767b3b..a4ef09309
--- a/internal/sdk/pkg/models/shared/sourcekyvecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcekyvecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceKyveCreateRequest struct {
Configuration SourceKyve `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceKyveCreateRequest) GetConfiguration() SourceKyve {
+ if o == nil {
+ return SourceKyve{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceKyveCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceKyveCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceKyveCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceKyveCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcekyveputrequest.go b/internal/sdk/pkg/models/shared/sourcekyveputrequest.go
old mode 100755
new mode 100644
index 654f6e58e..5eef08f34
--- a/internal/sdk/pkg/models/shared/sourcekyveputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcekyveputrequest.go
@@ -7,3 +7,24 @@ type SourceKyvePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceKyvePutRequest) GetConfiguration() SourceKyveUpdate {
+ if o == nil {
+ return SourceKyveUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceKyvePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceKyvePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcekyveupdate.go b/internal/sdk/pkg/models/shared/sourcekyveupdate.go
old mode 100755
new mode 100644
index 774b0be67..f6d61bd7e
--- a/internal/sdk/pkg/models/shared/sourcekyveupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcekyveupdate.go
@@ -2,15 +2,65 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceKyveUpdate struct {
// The maximum amount of pages to go trough. Set to 'null' for all pages.
MaxPages *int64 `json:"max_pages,omitempty"`
// The pagesize for pagination, smaller numbers are used in integration tests.
- PageSize *int64 `json:"page_size,omitempty"`
+ PageSize *int64 `default:"100" json:"page_size"`
// The IDs of the KYVE storage pool you want to archive. (Comma separated)
PoolIds string `json:"pool_ids"`
// The start-id defines, from which bundle id the pipeline should start to extract the data (Comma separated)
StartIds string `json:"start_ids"`
// URL to the KYVE Chain API.
- URLBase *string `json:"url_base,omitempty"`
+ URLBase *string `default:"https://api.korellia.kyve.network" json:"url_base"`
+}
+
+func (s SourceKyveUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceKyveUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceKyveUpdate) GetMaxPages() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.MaxPages
+}
+
+func (o *SourceKyveUpdate) GetPageSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.PageSize
+}
+
+func (o *SourceKyveUpdate) GetPoolIds() string {
+ if o == nil {
+ return ""
+ }
+ return o.PoolIds
+}
+
+func (o *SourceKyveUpdate) GetStartIds() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartIds
+}
+
+func (o *SourceKyveUpdate) GetURLBase() *string {
+ if o == nil {
+ return nil
+ }
+ return o.URLBase
}
diff --git a/internal/sdk/pkg/models/shared/sourcelaunchdarkly.go b/internal/sdk/pkg/models/shared/sourcelaunchdarkly.go
old mode 100755
new mode 100644
index 79c2982b2..72b588cfd
--- a/internal/sdk/pkg/models/shared/sourcelaunchdarkly.go
+++ b/internal/sdk/pkg/models/shared/sourcelaunchdarkly.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceLaunchdarklyLaunchdarkly string
+type Launchdarkly string
const (
- SourceLaunchdarklyLaunchdarklyLaunchdarkly SourceLaunchdarklyLaunchdarkly = "launchdarkly"
+ LaunchdarklyLaunchdarkly Launchdarkly = "launchdarkly"
)
-func (e SourceLaunchdarklyLaunchdarkly) ToPointer() *SourceLaunchdarklyLaunchdarkly {
+func (e Launchdarkly) ToPointer() *Launchdarkly {
return &e
}
-func (e *SourceLaunchdarklyLaunchdarkly) UnmarshalJSON(data []byte) error {
+func (e *Launchdarkly) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "launchdarkly":
- *e = SourceLaunchdarklyLaunchdarkly(v)
+ *e = Launchdarkly(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLaunchdarklyLaunchdarkly: %v", v)
+ return fmt.Errorf("invalid value for Launchdarkly: %v", v)
}
}
type SourceLaunchdarkly struct {
// Your Access token. See here.
- AccessToken string `json:"access_token"`
- SourceType SourceLaunchdarklyLaunchdarkly `json:"sourceType"`
+ AccessToken string `json:"access_token"`
+ sourceType Launchdarkly `const:"launchdarkly" json:"sourceType"`
+}
+
+func (s SourceLaunchdarkly) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLaunchdarkly) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLaunchdarkly) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceLaunchdarkly) GetSourceType() Launchdarkly {
+ return LaunchdarklyLaunchdarkly
}
diff --git a/internal/sdk/pkg/models/shared/sourcelaunchdarklycreaterequest.go b/internal/sdk/pkg/models/shared/sourcelaunchdarklycreaterequest.go
old mode 100755
new mode 100644
index 2857579a8..33a458e03
--- a/internal/sdk/pkg/models/shared/sourcelaunchdarklycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelaunchdarklycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceLaunchdarklyCreateRequest struct {
Configuration SourceLaunchdarkly `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLaunchdarklyCreateRequest) GetConfiguration() SourceLaunchdarkly {
+ if o == nil {
+ return SourceLaunchdarkly{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLaunchdarklyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceLaunchdarklyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLaunchdarklyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceLaunchdarklyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelaunchdarklyputrequest.go b/internal/sdk/pkg/models/shared/sourcelaunchdarklyputrequest.go
old mode 100755
new mode 100644
index 3df017408..9da60a2bd
--- a/internal/sdk/pkg/models/shared/sourcelaunchdarklyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelaunchdarklyputrequest.go
@@ -7,3 +7,24 @@ type SourceLaunchdarklyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLaunchdarklyPutRequest) GetConfiguration() SourceLaunchdarklyUpdate {
+ if o == nil {
+ return SourceLaunchdarklyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLaunchdarklyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLaunchdarklyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelaunchdarklyupdate.go b/internal/sdk/pkg/models/shared/sourcelaunchdarklyupdate.go
old mode 100755
new mode 100644
index 20308055e..de238ecd5
--- a/internal/sdk/pkg/models/shared/sourcelaunchdarklyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcelaunchdarklyupdate.go
@@ -6,3 +6,10 @@ type SourceLaunchdarklyUpdate struct {
// Your Access token. See here.
AccessToken string `json:"access_token"`
}
+
+func (o *SourceLaunchdarklyUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelemlist.go b/internal/sdk/pkg/models/shared/sourcelemlist.go
old mode 100755
new mode 100644
index 1b132a68e..52e778512
--- a/internal/sdk/pkg/models/shared/sourcelemlist.go
+++ b/internal/sdk/pkg/models/shared/sourcelemlist.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceLemlistLemlist string
+type Lemlist string
const (
- SourceLemlistLemlistLemlist SourceLemlistLemlist = "lemlist"
+ LemlistLemlist Lemlist = "lemlist"
)
-func (e SourceLemlistLemlist) ToPointer() *SourceLemlistLemlist {
+func (e Lemlist) ToPointer() *Lemlist {
return &e
}
-func (e *SourceLemlistLemlist) UnmarshalJSON(data []byte) error {
+func (e *Lemlist) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "lemlist":
- *e = SourceLemlistLemlist(v)
+ *e = Lemlist(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLemlistLemlist: %v", v)
+ return fmt.Errorf("invalid value for Lemlist: %v", v)
}
}
type SourceLemlist struct {
// Lemlist API key,
- APIKey string `json:"api_key"`
- SourceType SourceLemlistLemlist `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Lemlist `const:"lemlist" json:"sourceType"`
+}
+
+func (s SourceLemlist) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLemlist) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLemlist) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceLemlist) GetSourceType() Lemlist {
+ return LemlistLemlist
}
diff --git a/internal/sdk/pkg/models/shared/sourcelemlistcreaterequest.go b/internal/sdk/pkg/models/shared/sourcelemlistcreaterequest.go
old mode 100755
new mode 100644
index af51ce024..7e78ba780
--- a/internal/sdk/pkg/models/shared/sourcelemlistcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelemlistcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceLemlistCreateRequest struct {
Configuration SourceLemlist `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLemlistCreateRequest) GetConfiguration() SourceLemlist {
+ if o == nil {
+ return SourceLemlist{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLemlistCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceLemlistCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLemlistCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceLemlistCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelemlistputrequest.go b/internal/sdk/pkg/models/shared/sourcelemlistputrequest.go
old mode 100755
new mode 100644
index 2b784f31d..a48691503
--- a/internal/sdk/pkg/models/shared/sourcelemlistputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelemlistputrequest.go
@@ -7,3 +7,24 @@ type SourceLemlistPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLemlistPutRequest) GetConfiguration() SourceLemlistUpdate {
+ if o == nil {
+ return SourceLemlistUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLemlistPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLemlistPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelemlistupdate.go b/internal/sdk/pkg/models/shared/sourcelemlistupdate.go
old mode 100755
new mode 100644
index 8781739b8..a9dd0594f
--- a/internal/sdk/pkg/models/shared/sourcelemlistupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcelemlistupdate.go
@@ -6,3 +6,10 @@ type SourceLemlistUpdate struct {
// Lemlist API key,
APIKey string `json:"api_key"`
}
+
+func (o *SourceLemlistUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourceleverhiring.go b/internal/sdk/pkg/models/shared/sourceleverhiring.go
old mode 100755
new mode 100644
index 7cb461607..9860b8e32
--- a/internal/sdk/pkg/models/shared/sourceleverhiring.go
+++ b/internal/sdk/pkg/models/shared/sourceleverhiring.go
@@ -3,70 +3,92 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType string
+type SourceLeverHiringSchemasAuthType string
const (
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthTypeAPIKey SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType = "Api Key"
+ SourceLeverHiringSchemasAuthTypeAPIKey SourceLeverHiringSchemasAuthType = "Api Key"
)
-func (e SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType) ToPointer() *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType {
+func (e SourceLeverHiringSchemasAuthType) ToPointer() *SourceLeverHiringSchemasAuthType {
return &e
}
-func (e *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceLeverHiringSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Api Key":
- *e = SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType(v)
+ *e = SourceLeverHiringSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceLeverHiringSchemasAuthType: %v", v)
}
}
-// SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey - Choose how to authenticate to Lever Hiring.
-type SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey struct {
+// SourceLeverHiringAuthenticateViaLeverAPIKey - Choose how to authenticate to Lever Hiring.
+type SourceLeverHiringAuthenticateViaLeverAPIKey struct {
// The Api Key of your Lever Hiring account.
- APIKey string `json:"api_key"`
- AuthType *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType `json:"auth_type,omitempty"`
+ APIKey string `json:"api_key"`
+ authType *SourceLeverHiringSchemasAuthType `const:"Api Key" json:"auth_type,omitempty"`
}
-type SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType string
+func (s SourceLeverHiringAuthenticateViaLeverAPIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLeverHiringAuthenticateViaLeverAPIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLeverHiringAuthenticateViaLeverAPIKey) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceLeverHiringAuthenticateViaLeverAPIKey) GetAuthType() *SourceLeverHiringSchemasAuthType {
+ return SourceLeverHiringSchemasAuthTypeAPIKey.ToPointer()
+}
+
+type SourceLeverHiringAuthType string
const (
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthTypeClient SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType = "Client"
+ SourceLeverHiringAuthTypeClient SourceLeverHiringAuthType = "Client"
)
-func (e SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType) ToPointer() *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType {
+func (e SourceLeverHiringAuthType) ToPointer() *SourceLeverHiringAuthType {
return &e
}
-func (e *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceLeverHiringAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType(v)
+ *e = SourceLeverHiringAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceLeverHiringAuthType: %v", v)
}
}
-// SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth - Choose how to authenticate to Lever Hiring.
-type SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth struct {
- AuthType *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuthAuthType `json:"auth_type,omitempty"`
+// SourceLeverHiringAuthenticateViaLeverOAuth - Choose how to authenticate to Lever Hiring.
+type SourceLeverHiringAuthenticateViaLeverOAuth struct {
+ authType *SourceLeverHiringAuthType `const:"Client" json:"auth_type,omitempty"`
// The Client ID of your Lever Hiring developer application.
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of your Lever Hiring developer application.
@@ -75,56 +97,87 @@ type SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceLeverHiringAuthenticateViaLeverOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLeverHiringAuthenticateViaLeverOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLeverHiringAuthenticateViaLeverOAuth) GetAuthType() *SourceLeverHiringAuthType {
+ return SourceLeverHiringAuthTypeClient.ToPointer()
+}
+
+func (o *SourceLeverHiringAuthenticateViaLeverOAuth) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceLeverHiringAuthenticateViaLeverOAuth) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceLeverHiringAuthenticateViaLeverOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceLeverHiringAuthenticationMechanismType string
const (
- SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth SourceLeverHiringAuthenticationMechanismType = "source-lever-hiring_Authentication Mechanism_Authenticate via Lever (OAuth)"
- SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey SourceLeverHiringAuthenticationMechanismType = "source-lever-hiring_Authentication Mechanism_Authenticate via Lever (Api Key)"
+ SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticateViaLeverOAuth SourceLeverHiringAuthenticationMechanismType = "source-lever-hiring_Authenticate via Lever (OAuth)"
+ SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticateViaLeverAPIKey SourceLeverHiringAuthenticationMechanismType = "source-lever-hiring_Authenticate via Lever (Api Key)"
)
type SourceLeverHiringAuthenticationMechanism struct {
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey *SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey
+ SourceLeverHiringAuthenticateViaLeverOAuth *SourceLeverHiringAuthenticateViaLeverOAuth
+ SourceLeverHiringAuthenticateViaLeverAPIKey *SourceLeverHiringAuthenticateViaLeverAPIKey
Type SourceLeverHiringAuthenticationMechanismType
}
-func CreateSourceLeverHiringAuthenticationMechanismSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth(sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth) SourceLeverHiringAuthenticationMechanism {
- typ := SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth
+func CreateSourceLeverHiringAuthenticationMechanismSourceLeverHiringAuthenticateViaLeverOAuth(sourceLeverHiringAuthenticateViaLeverOAuth SourceLeverHiringAuthenticateViaLeverOAuth) SourceLeverHiringAuthenticationMechanism {
+ typ := SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticateViaLeverOAuth
return SourceLeverHiringAuthenticationMechanism{
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth: &sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth,
+ SourceLeverHiringAuthenticateViaLeverOAuth: &sourceLeverHiringAuthenticateViaLeverOAuth,
Type: typ,
}
}
-func CreateSourceLeverHiringAuthenticationMechanismSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey(sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey) SourceLeverHiringAuthenticationMechanism {
- typ := SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey
+func CreateSourceLeverHiringAuthenticationMechanismSourceLeverHiringAuthenticateViaLeverAPIKey(sourceLeverHiringAuthenticateViaLeverAPIKey SourceLeverHiringAuthenticateViaLeverAPIKey) SourceLeverHiringAuthenticationMechanism {
+ typ := SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticateViaLeverAPIKey
return SourceLeverHiringAuthenticationMechanism{
- SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey: &sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey,
+ SourceLeverHiringAuthenticateViaLeverAPIKey: &sourceLeverHiringAuthenticateViaLeverAPIKey,
Type: typ,
}
}
func (u *SourceLeverHiringAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey := new(SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey); err == nil {
- u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey = sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey
- u.Type = SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey
+
+ sourceLeverHiringAuthenticateViaLeverAPIKey := new(SourceLeverHiringAuthenticateViaLeverAPIKey)
+ if err := utils.UnmarshalJSON(data, &sourceLeverHiringAuthenticateViaLeverAPIKey, "", true, true); err == nil {
+ u.SourceLeverHiringAuthenticateViaLeverAPIKey = sourceLeverHiringAuthenticateViaLeverAPIKey
+ u.Type = SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticateViaLeverAPIKey
return nil
}
- sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth := new(SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth); err == nil {
- u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth = sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth
- u.Type = SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth
+ sourceLeverHiringAuthenticateViaLeverOAuth := new(SourceLeverHiringAuthenticateViaLeverOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceLeverHiringAuthenticateViaLeverOAuth, "", true, true); err == nil {
+ u.SourceLeverHiringAuthenticateViaLeverOAuth = sourceLeverHiringAuthenticateViaLeverOAuth
+ u.Type = SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticateViaLeverOAuth
return nil
}
@@ -132,15 +185,15 @@ func (u *SourceLeverHiringAuthenticationMechanism) UnmarshalJSON(data []byte) er
}
func (u SourceLeverHiringAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey != nil {
- return json.Marshal(u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey)
+ if u.SourceLeverHiringAuthenticateViaLeverOAuth != nil {
+ return utils.MarshalJSON(u.SourceLeverHiringAuthenticateViaLeverOAuth, "", true)
}
- if u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth != nil {
- return json.Marshal(u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth)
+ if u.SourceLeverHiringAuthenticateViaLeverAPIKey != nil {
+ return utils.MarshalJSON(u.SourceLeverHiringAuthenticateViaLeverAPIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// SourceLeverHiringEnvironment - The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.
@@ -171,27 +224,27 @@ func (e *SourceLeverHiringEnvironment) UnmarshalJSON(data []byte) error {
}
}
-type SourceLeverHiringLeverHiring string
+type LeverHiring string
const (
- SourceLeverHiringLeverHiringLeverHiring SourceLeverHiringLeverHiring = "lever-hiring"
+ LeverHiringLeverHiring LeverHiring = "lever-hiring"
)
-func (e SourceLeverHiringLeverHiring) ToPointer() *SourceLeverHiringLeverHiring {
+func (e LeverHiring) ToPointer() *LeverHiring {
return &e
}
-func (e *SourceLeverHiringLeverHiring) UnmarshalJSON(data []byte) error {
+func (e *LeverHiring) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "lever-hiring":
- *e = SourceLeverHiringLeverHiring(v)
+ *e = LeverHiring(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLeverHiringLeverHiring: %v", v)
+ return fmt.Errorf("invalid value for LeverHiring: %v", v)
}
}
@@ -199,8 +252,44 @@ type SourceLeverHiring struct {
// Choose how to authenticate to Lever Hiring.
Credentials *SourceLeverHiringAuthenticationMechanism `json:"credentials,omitempty"`
// The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.
- Environment *SourceLeverHiringEnvironment `json:"environment,omitempty"`
- SourceType SourceLeverHiringLeverHiring `json:"sourceType"`
+ Environment *SourceLeverHiringEnvironment `default:"Sandbox" json:"environment"`
+ sourceType LeverHiring `const:"lever-hiring" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. Note that it will be used only in the following incremental streams: comments, commits, and issues.
StartDate string `json:"start_date"`
}
+
+func (s SourceLeverHiring) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLeverHiring) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLeverHiring) GetCredentials() *SourceLeverHiringAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceLeverHiring) GetEnvironment() *SourceLeverHiringEnvironment {
+ if o == nil {
+ return nil
+ }
+ return o.Environment
+}
+
+func (o *SourceLeverHiring) GetSourceType() LeverHiring {
+ return LeverHiringLeverHiring
+}
+
+func (o *SourceLeverHiring) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceleverhiringcreaterequest.go b/internal/sdk/pkg/models/shared/sourceleverhiringcreaterequest.go
old mode 100755
new mode 100644
index 45462f3de..aa96b5fb8
--- a/internal/sdk/pkg/models/shared/sourceleverhiringcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceleverhiringcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceLeverHiringCreateRequest struct {
Configuration SourceLeverHiring `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLeverHiringCreateRequest) GetConfiguration() SourceLeverHiring {
+ if o == nil {
+ return SourceLeverHiring{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLeverHiringCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceLeverHiringCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLeverHiringCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceLeverHiringCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceleverhiringputrequest.go b/internal/sdk/pkg/models/shared/sourceleverhiringputrequest.go
old mode 100755
new mode 100644
index 0ddac21de..d9bda455e
--- a/internal/sdk/pkg/models/shared/sourceleverhiringputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceleverhiringputrequest.go
@@ -7,3 +7,24 @@ type SourceLeverHiringPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLeverHiringPutRequest) GetConfiguration() SourceLeverHiringUpdate {
+ if o == nil {
+ return SourceLeverHiringUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLeverHiringPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLeverHiringPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceleverhiringupdate.go b/internal/sdk/pkg/models/shared/sourceleverhiringupdate.go
old mode 100755
new mode 100644
index 2e93832b9..8b6d728a8
--- a/internal/sdk/pkg/models/shared/sourceleverhiringupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceleverhiringupdate.go
@@ -3,70 +3,92 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType string
+type SourceLeverHiringUpdateSchemasAuthType string
const (
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthTypeAPIKey SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType = "Api Key"
+ SourceLeverHiringUpdateSchemasAuthTypeAPIKey SourceLeverHiringUpdateSchemasAuthType = "Api Key"
)
-func (e SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType) ToPointer() *SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType {
+func (e SourceLeverHiringUpdateSchemasAuthType) ToPointer() *SourceLeverHiringUpdateSchemasAuthType {
return &e
}
-func (e *SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceLeverHiringUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Api Key":
- *e = SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType(v)
+ *e = SourceLeverHiringUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceLeverHiringUpdateSchemasAuthType: %v", v)
}
}
-// SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey - Choose how to authenticate to Lever Hiring.
-type SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey struct {
+// AuthenticateViaLeverAPIKey - Choose how to authenticate to Lever Hiring.
+type AuthenticateViaLeverAPIKey struct {
// The Api Key of your Lever Hiring account.
- APIKey string `json:"api_key"`
- AuthType *SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKeyAuthType `json:"auth_type,omitempty"`
+ APIKey string `json:"api_key"`
+ authType *SourceLeverHiringUpdateSchemasAuthType `const:"Api Key" json:"auth_type,omitempty"`
}
-type SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType string
+func (a AuthenticateViaLeverAPIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AuthenticateViaLeverAPIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AuthenticateViaLeverAPIKey) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *AuthenticateViaLeverAPIKey) GetAuthType() *SourceLeverHiringUpdateSchemasAuthType {
+ return SourceLeverHiringUpdateSchemasAuthTypeAPIKey.ToPointer()
+}
+
+type SourceLeverHiringUpdateAuthType string
const (
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthTypeClient SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType = "Client"
+ SourceLeverHiringUpdateAuthTypeClient SourceLeverHiringUpdateAuthType = "Client"
)
-func (e SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType) ToPointer() *SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType {
+func (e SourceLeverHiringUpdateAuthType) ToPointer() *SourceLeverHiringUpdateAuthType {
return &e
}
-func (e *SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceLeverHiringUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType(v)
+ *e = SourceLeverHiringUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceLeverHiringUpdateAuthType: %v", v)
}
}
-// SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth - Choose how to authenticate to Lever Hiring.
-type SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth struct {
- AuthType *SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuthAuthType `json:"auth_type,omitempty"`
+// AuthenticateViaLeverOAuth - Choose how to authenticate to Lever Hiring.
+type AuthenticateViaLeverOAuth struct {
+ authType *SourceLeverHiringUpdateAuthType `const:"Client" json:"auth_type,omitempty"`
// The Client ID of your Lever Hiring developer application.
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of your Lever Hiring developer application.
@@ -75,56 +97,87 @@ type SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth str
RefreshToken string `json:"refresh_token"`
}
+func (a AuthenticateViaLeverOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AuthenticateViaLeverOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AuthenticateViaLeverOAuth) GetAuthType() *SourceLeverHiringUpdateAuthType {
+ return SourceLeverHiringUpdateAuthTypeClient.ToPointer()
+}
+
+func (o *AuthenticateViaLeverOAuth) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *AuthenticateViaLeverOAuth) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *AuthenticateViaLeverOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceLeverHiringUpdateAuthenticationMechanismType string
const (
- SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth SourceLeverHiringUpdateAuthenticationMechanismType = "source-lever-hiring-update_Authentication Mechanism_Authenticate via Lever (OAuth)"
- SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey SourceLeverHiringUpdateAuthenticationMechanismType = "source-lever-hiring-update_Authentication Mechanism_Authenticate via Lever (Api Key)"
+ SourceLeverHiringUpdateAuthenticationMechanismTypeAuthenticateViaLeverOAuth SourceLeverHiringUpdateAuthenticationMechanismType = "Authenticate via Lever (OAuth)"
+ SourceLeverHiringUpdateAuthenticationMechanismTypeAuthenticateViaLeverAPIKey SourceLeverHiringUpdateAuthenticationMechanismType = "Authenticate via Lever (Api Key)"
)
type SourceLeverHiringUpdateAuthenticationMechanism struct {
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth *SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey *SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey
+ AuthenticateViaLeverOAuth *AuthenticateViaLeverOAuth
+ AuthenticateViaLeverAPIKey *AuthenticateViaLeverAPIKey
Type SourceLeverHiringUpdateAuthenticationMechanismType
}
-func CreateSourceLeverHiringUpdateAuthenticationMechanismSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth(sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth) SourceLeverHiringUpdateAuthenticationMechanism {
- typ := SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth
+func CreateSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth(authenticateViaLeverOAuth AuthenticateViaLeverOAuth) SourceLeverHiringUpdateAuthenticationMechanism {
+ typ := SourceLeverHiringUpdateAuthenticationMechanismTypeAuthenticateViaLeverOAuth
return SourceLeverHiringUpdateAuthenticationMechanism{
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth: &sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth,
- Type: typ,
+ AuthenticateViaLeverOAuth: &authenticateViaLeverOAuth,
+ Type: typ,
}
}
-func CreateSourceLeverHiringUpdateAuthenticationMechanismSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey(sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey) SourceLeverHiringUpdateAuthenticationMechanism {
- typ := SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey
+func CreateSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey(authenticateViaLeverAPIKey AuthenticateViaLeverAPIKey) SourceLeverHiringUpdateAuthenticationMechanism {
+ typ := SourceLeverHiringUpdateAuthenticationMechanismTypeAuthenticateViaLeverAPIKey
return SourceLeverHiringUpdateAuthenticationMechanism{
- SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey: &sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey,
- Type: typ,
+ AuthenticateViaLeverAPIKey: &authenticateViaLeverAPIKey,
+ Type: typ,
}
}
func (u *SourceLeverHiringUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey := new(SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey); err == nil {
- u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey = sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey
- u.Type = SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey
+
+ authenticateViaLeverAPIKey := new(AuthenticateViaLeverAPIKey)
+ if err := utils.UnmarshalJSON(data, &authenticateViaLeverAPIKey, "", true, true); err == nil {
+ u.AuthenticateViaLeverAPIKey = authenticateViaLeverAPIKey
+ u.Type = SourceLeverHiringUpdateAuthenticationMechanismTypeAuthenticateViaLeverAPIKey
return nil
}
- sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth := new(SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth); err == nil {
- u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth = sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth
- u.Type = SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth
+ authenticateViaLeverOAuth := new(AuthenticateViaLeverOAuth)
+ if err := utils.UnmarshalJSON(data, &authenticateViaLeverOAuth, "", true, true); err == nil {
+ u.AuthenticateViaLeverOAuth = authenticateViaLeverOAuth
+ u.Type = SourceLeverHiringUpdateAuthenticationMechanismTypeAuthenticateViaLeverOAuth
return nil
}
@@ -132,15 +185,15 @@ func (u *SourceLeverHiringUpdateAuthenticationMechanism) UnmarshalJSON(data []by
}
func (u SourceLeverHiringUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey != nil {
- return json.Marshal(u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey)
+ if u.AuthenticateViaLeverOAuth != nil {
+ return utils.MarshalJSON(u.AuthenticateViaLeverOAuth, "", true)
}
- if u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth != nil {
- return json.Marshal(u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth)
+ if u.AuthenticateViaLeverAPIKey != nil {
+ return utils.MarshalJSON(u.AuthenticateViaLeverAPIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// SourceLeverHiringUpdateEnvironment - The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.
@@ -175,7 +228,39 @@ type SourceLeverHiringUpdate struct {
// Choose how to authenticate to Lever Hiring.
Credentials *SourceLeverHiringUpdateAuthenticationMechanism `json:"credentials,omitempty"`
// The environment in which you'd like to replicate data for Lever. This is used to determine which Lever API endpoint to use.
- Environment *SourceLeverHiringUpdateEnvironment `json:"environment,omitempty"`
+ Environment *SourceLeverHiringUpdateEnvironment `default:"Sandbox" json:"environment"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. Note that it will be used only in the following incremental streams: comments, commits, and issues.
StartDate string `json:"start_date"`
}
+
+func (s SourceLeverHiringUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLeverHiringUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLeverHiringUpdate) GetCredentials() *SourceLeverHiringUpdateAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceLeverHiringUpdate) GetEnvironment() *SourceLeverHiringUpdateEnvironment {
+ if o == nil {
+ return nil
+ }
+ return o.Environment
+}
+
+func (o *SourceLeverHiringUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinads.go b/internal/sdk/pkg/models/shared/sourcelinkedinads.go
old mode 100755
new mode 100644
index a397e7309..f2a632511
--- a/internal/sdk/pkg/models/shared/sourcelinkedinads.go
+++ b/internal/sdk/pkg/models/shared/sourcelinkedinads.go
@@ -3,45 +3,45 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory - Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.
-type SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory string
+// SourceLinkedinAdsPivotCategory - Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.
+type SourceLinkedinAdsPivotCategory string
const (
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCompany SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "COMPANY"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryAccount SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "ACCOUNT"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryShare SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "SHARE"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCampaign SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CAMPAIGN"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCreative SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CREATIVE"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCampaignGroup SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CAMPAIGN_GROUP"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryConversion SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CONVERSION"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryConversationNode SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CONVERSATION_NODE"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryConversationNodeOptionIndex SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CONVERSATION_NODE_OPTION_INDEX"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryServingLocation SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "SERVING_LOCATION"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCardIndex SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CARD_INDEX"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberCompanySize SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COMPANY_SIZE"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberIndustry SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_INDUSTRY"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberSeniority SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_SENIORITY"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberJobTitle SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_JOB_TITLE "
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberJobFunction SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_JOB_FUNCTION "
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberCountryV2 SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COUNTRY_V2 "
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberRegionV2 SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_REGION_V2"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberCompany SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COMPANY"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryPlacementName SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "PLACEMENT_NAME"
- SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryImpressionDeviceType SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "IMPRESSION_DEVICE_TYPE"
+ SourceLinkedinAdsPivotCategoryCompany SourceLinkedinAdsPivotCategory = "COMPANY"
+ SourceLinkedinAdsPivotCategoryAccount SourceLinkedinAdsPivotCategory = "ACCOUNT"
+ SourceLinkedinAdsPivotCategoryShare SourceLinkedinAdsPivotCategory = "SHARE"
+ SourceLinkedinAdsPivotCategoryCampaign SourceLinkedinAdsPivotCategory = "CAMPAIGN"
+ SourceLinkedinAdsPivotCategoryCreative SourceLinkedinAdsPivotCategory = "CREATIVE"
+ SourceLinkedinAdsPivotCategoryCampaignGroup SourceLinkedinAdsPivotCategory = "CAMPAIGN_GROUP"
+ SourceLinkedinAdsPivotCategoryConversion SourceLinkedinAdsPivotCategory = "CONVERSION"
+ SourceLinkedinAdsPivotCategoryConversationNode SourceLinkedinAdsPivotCategory = "CONVERSATION_NODE"
+ SourceLinkedinAdsPivotCategoryConversationNodeOptionIndex SourceLinkedinAdsPivotCategory = "CONVERSATION_NODE_OPTION_INDEX"
+ SourceLinkedinAdsPivotCategoryServingLocation SourceLinkedinAdsPivotCategory = "SERVING_LOCATION"
+ SourceLinkedinAdsPivotCategoryCardIndex SourceLinkedinAdsPivotCategory = "CARD_INDEX"
+ SourceLinkedinAdsPivotCategoryMemberCompanySize SourceLinkedinAdsPivotCategory = "MEMBER_COMPANY_SIZE"
+ SourceLinkedinAdsPivotCategoryMemberIndustry SourceLinkedinAdsPivotCategory = "MEMBER_INDUSTRY"
+ SourceLinkedinAdsPivotCategoryMemberSeniority SourceLinkedinAdsPivotCategory = "MEMBER_SENIORITY"
+ SourceLinkedinAdsPivotCategoryMemberJobTitle SourceLinkedinAdsPivotCategory = "MEMBER_JOB_TITLE "
+ SourceLinkedinAdsPivotCategoryMemberJobFunction SourceLinkedinAdsPivotCategory = "MEMBER_JOB_FUNCTION "
+ SourceLinkedinAdsPivotCategoryMemberCountryV2 SourceLinkedinAdsPivotCategory = "MEMBER_COUNTRY_V2 "
+ SourceLinkedinAdsPivotCategoryMemberRegionV2 SourceLinkedinAdsPivotCategory = "MEMBER_REGION_V2"
+ SourceLinkedinAdsPivotCategoryMemberCompany SourceLinkedinAdsPivotCategory = "MEMBER_COMPANY"
+ SourceLinkedinAdsPivotCategoryPlacementName SourceLinkedinAdsPivotCategory = "PLACEMENT_NAME"
+ SourceLinkedinAdsPivotCategoryImpressionDeviceType SourceLinkedinAdsPivotCategory = "IMPRESSION_DEVICE_TYPE"
)
-func (e SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory) ToPointer() *SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory {
+func (e SourceLinkedinAdsPivotCategory) ToPointer() *SourceLinkedinAdsPivotCategory {
return &e
}
-func (e *SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinAdsPivotCategory) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -88,28 +88,28 @@ func (e *SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory) Unmarshal
case "PLACEMENT_NAME":
fallthrough
case "IMPRESSION_DEVICE_TYPE":
- *e = SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory(v)
+ *e = SourceLinkedinAdsPivotCategory(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinAdsPivotCategory: %v", v)
}
}
-// SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity - Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.
-type SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity string
+// SourceLinkedinAdsTimeGranularity - Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.
+type SourceLinkedinAdsTimeGranularity string
const (
- SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularityAll SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity = "ALL"
- SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularityDaily SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity = "DAILY"
- SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularityMonthly SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity = "MONTHLY"
- SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularityYearly SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity = "YEARLY"
+ SourceLinkedinAdsTimeGranularityAll SourceLinkedinAdsTimeGranularity = "ALL"
+ SourceLinkedinAdsTimeGranularityDaily SourceLinkedinAdsTimeGranularity = "DAILY"
+ SourceLinkedinAdsTimeGranularityMonthly SourceLinkedinAdsTimeGranularity = "MONTHLY"
+ SourceLinkedinAdsTimeGranularityYearly SourceLinkedinAdsTimeGranularity = "YEARLY"
)
-func (e SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity) ToPointer() *SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity {
+func (e SourceLinkedinAdsTimeGranularity) ToPointer() *SourceLinkedinAdsTimeGranularity {
return &e
}
-func (e *SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinAdsTimeGranularity) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -122,10 +122,10 @@ func (e *SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity) Unmarsh
case "MONTHLY":
fallthrough
case "YEARLY":
- *e = SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity(v)
+ *e = SourceLinkedinAdsTimeGranularity(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinAdsTimeGranularity: %v", v)
}
}
@@ -134,67 +134,110 @@ type SourceLinkedinAdsAdAnalyticsReportConfiguration struct {
// The name for the custom report.
Name string `json:"name"`
// Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.
- PivotBy SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory `json:"pivot_by"`
+ PivotBy SourceLinkedinAdsPivotCategory `json:"pivot_by"`
// Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.
- TimeGranularity SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity `json:"time_granularity"`
+ TimeGranularity SourceLinkedinAdsTimeGranularity `json:"time_granularity"`
}
-type SourceLinkedinAdsAuthenticationAccessTokenAuthMethod string
+func (o *SourceLinkedinAdsAdAnalyticsReportConfiguration) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLinkedinAdsAdAnalyticsReportConfiguration) GetPivotBy() SourceLinkedinAdsPivotCategory {
+ if o == nil {
+ return SourceLinkedinAdsPivotCategory("")
+ }
+ return o.PivotBy
+}
+
+func (o *SourceLinkedinAdsAdAnalyticsReportConfiguration) GetTimeGranularity() SourceLinkedinAdsTimeGranularity {
+ if o == nil {
+ return SourceLinkedinAdsTimeGranularity("")
+ }
+ return o.TimeGranularity
+}
+
+type SourceLinkedinAdsSchemasAuthMethod string
const (
- SourceLinkedinAdsAuthenticationAccessTokenAuthMethodAccessToken SourceLinkedinAdsAuthenticationAccessTokenAuthMethod = "access_token"
+ SourceLinkedinAdsSchemasAuthMethodAccessToken SourceLinkedinAdsSchemasAuthMethod = "access_token"
)
-func (e SourceLinkedinAdsAuthenticationAccessTokenAuthMethod) ToPointer() *SourceLinkedinAdsAuthenticationAccessTokenAuthMethod {
+func (e SourceLinkedinAdsSchemasAuthMethod) ToPointer() *SourceLinkedinAdsSchemasAuthMethod {
return &e
}
-func (e *SourceLinkedinAdsAuthenticationAccessTokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinAdsSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceLinkedinAdsAuthenticationAccessTokenAuthMethod(v)
+ *e = SourceLinkedinAdsSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinAdsAuthenticationAccessTokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinAdsSchemasAuthMethod: %v", v)
}
}
-type SourceLinkedinAdsAuthenticationAccessToken struct {
+type SourceLinkedinAdsAccessToken struct {
// The access token generated for your developer application. Refer to our documentation for more information.
- AccessToken string `json:"access_token"`
- AuthMethod *SourceLinkedinAdsAuthenticationAccessTokenAuthMethod `json:"auth_method,omitempty"`
+ AccessToken string `json:"access_token"`
+ authMethod *SourceLinkedinAdsSchemasAuthMethod `const:"access_token" json:"auth_method,omitempty"`
+}
+
+func (s SourceLinkedinAdsAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
}
-type SourceLinkedinAdsAuthenticationOAuth20AuthMethod string
+func (s *SourceLinkedinAdsAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinAdsAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceLinkedinAdsAccessToken) GetAuthMethod() *SourceLinkedinAdsSchemasAuthMethod {
+ return SourceLinkedinAdsSchemasAuthMethodAccessToken.ToPointer()
+}
+
+type SourceLinkedinAdsAuthMethod string
const (
- SourceLinkedinAdsAuthenticationOAuth20AuthMethodOAuth20 SourceLinkedinAdsAuthenticationOAuth20AuthMethod = "oAuth2.0"
+ SourceLinkedinAdsAuthMethodOAuth20 SourceLinkedinAdsAuthMethod = "oAuth2.0"
)
-func (e SourceLinkedinAdsAuthenticationOAuth20AuthMethod) ToPointer() *SourceLinkedinAdsAuthenticationOAuth20AuthMethod {
+func (e SourceLinkedinAdsAuthMethod) ToPointer() *SourceLinkedinAdsAuthMethod {
return &e
}
-func (e *SourceLinkedinAdsAuthenticationOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinAdsAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oAuth2.0":
- *e = SourceLinkedinAdsAuthenticationOAuth20AuthMethod(v)
+ *e = SourceLinkedinAdsAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinAdsAuthenticationOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinAdsAuthMethod: %v", v)
}
}
-type SourceLinkedinAdsAuthenticationOAuth20 struct {
- AuthMethod *SourceLinkedinAdsAuthenticationOAuth20AuthMethod `json:"auth_method,omitempty"`
+type SourceLinkedinAdsOAuth20 struct {
+ authMethod *SourceLinkedinAdsAuthMethod `const:"oAuth2.0" json:"auth_method,omitempty"`
// The client ID of your developer application. Refer to our documentation for more information.
ClientID string `json:"client_id"`
// The client secret of your developer application. Refer to our documentation for more information.
@@ -203,56 +246,87 @@ type SourceLinkedinAdsAuthenticationOAuth20 struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceLinkedinAdsOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinkedinAdsOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinAdsOAuth20) GetAuthMethod() *SourceLinkedinAdsAuthMethod {
+ return SourceLinkedinAdsAuthMethodOAuth20.ToPointer()
+}
+
+func (o *SourceLinkedinAdsOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceLinkedinAdsOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceLinkedinAdsOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceLinkedinAdsAuthenticationType string
const (
- SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationOAuth20 SourceLinkedinAdsAuthenticationType = "source-linkedin-ads_Authentication_OAuth2.0"
- SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationAccessToken SourceLinkedinAdsAuthenticationType = "source-linkedin-ads_Authentication_Access Token"
+ SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsOAuth20 SourceLinkedinAdsAuthenticationType = "source-linkedin-ads_OAuth2.0"
+ SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAccessToken SourceLinkedinAdsAuthenticationType = "source-linkedin-ads_Access Token"
)
type SourceLinkedinAdsAuthentication struct {
- SourceLinkedinAdsAuthenticationOAuth20 *SourceLinkedinAdsAuthenticationOAuth20
- SourceLinkedinAdsAuthenticationAccessToken *SourceLinkedinAdsAuthenticationAccessToken
+ SourceLinkedinAdsOAuth20 *SourceLinkedinAdsOAuth20
+ SourceLinkedinAdsAccessToken *SourceLinkedinAdsAccessToken
Type SourceLinkedinAdsAuthenticationType
}
-func CreateSourceLinkedinAdsAuthenticationSourceLinkedinAdsAuthenticationOAuth20(sourceLinkedinAdsAuthenticationOAuth20 SourceLinkedinAdsAuthenticationOAuth20) SourceLinkedinAdsAuthentication {
- typ := SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationOAuth20
+func CreateSourceLinkedinAdsAuthenticationSourceLinkedinAdsOAuth20(sourceLinkedinAdsOAuth20 SourceLinkedinAdsOAuth20) SourceLinkedinAdsAuthentication {
+ typ := SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsOAuth20
return SourceLinkedinAdsAuthentication{
- SourceLinkedinAdsAuthenticationOAuth20: &sourceLinkedinAdsAuthenticationOAuth20,
- Type: typ,
+ SourceLinkedinAdsOAuth20: &sourceLinkedinAdsOAuth20,
+ Type: typ,
}
}
-func CreateSourceLinkedinAdsAuthenticationSourceLinkedinAdsAuthenticationAccessToken(sourceLinkedinAdsAuthenticationAccessToken SourceLinkedinAdsAuthenticationAccessToken) SourceLinkedinAdsAuthentication {
- typ := SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationAccessToken
+func CreateSourceLinkedinAdsAuthenticationSourceLinkedinAdsAccessToken(sourceLinkedinAdsAccessToken SourceLinkedinAdsAccessToken) SourceLinkedinAdsAuthentication {
+ typ := SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAccessToken
return SourceLinkedinAdsAuthentication{
- SourceLinkedinAdsAuthenticationAccessToken: &sourceLinkedinAdsAuthenticationAccessToken,
- Type: typ,
+ SourceLinkedinAdsAccessToken: &sourceLinkedinAdsAccessToken,
+ Type: typ,
}
}
func (u *SourceLinkedinAdsAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceLinkedinAdsAuthenticationAccessToken := new(SourceLinkedinAdsAuthenticationAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLinkedinAdsAuthenticationAccessToken); err == nil {
- u.SourceLinkedinAdsAuthenticationAccessToken = sourceLinkedinAdsAuthenticationAccessToken
- u.Type = SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationAccessToken
+
+ sourceLinkedinAdsAccessToken := new(SourceLinkedinAdsAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceLinkedinAdsAccessToken, "", true, true); err == nil {
+ u.SourceLinkedinAdsAccessToken = sourceLinkedinAdsAccessToken
+ u.Type = SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAccessToken
return nil
}
- sourceLinkedinAdsAuthenticationOAuth20 := new(SourceLinkedinAdsAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLinkedinAdsAuthenticationOAuth20); err == nil {
- u.SourceLinkedinAdsAuthenticationOAuth20 = sourceLinkedinAdsAuthenticationOAuth20
- u.Type = SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationOAuth20
+ sourceLinkedinAdsOAuth20 := new(SourceLinkedinAdsOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceLinkedinAdsOAuth20, "", true, true); err == nil {
+ u.SourceLinkedinAdsOAuth20 = sourceLinkedinAdsOAuth20
+ u.Type = SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsOAuth20
return nil
}
@@ -260,38 +334,38 @@ func (u *SourceLinkedinAdsAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceLinkedinAdsAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceLinkedinAdsAuthenticationAccessToken != nil {
- return json.Marshal(u.SourceLinkedinAdsAuthenticationAccessToken)
+ if u.SourceLinkedinAdsOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceLinkedinAdsOAuth20, "", true)
}
- if u.SourceLinkedinAdsAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceLinkedinAdsAuthenticationOAuth20)
+ if u.SourceLinkedinAdsAccessToken != nil {
+ return utils.MarshalJSON(u.SourceLinkedinAdsAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceLinkedinAdsLinkedinAds string
+type LinkedinAds string
const (
- SourceLinkedinAdsLinkedinAdsLinkedinAds SourceLinkedinAdsLinkedinAds = "linkedin-ads"
+ LinkedinAdsLinkedinAds LinkedinAds = "linkedin-ads"
)
-func (e SourceLinkedinAdsLinkedinAds) ToPointer() *SourceLinkedinAdsLinkedinAds {
+func (e LinkedinAds) ToPointer() *LinkedinAds {
return &e
}
-func (e *SourceLinkedinAdsLinkedinAds) UnmarshalJSON(data []byte) error {
+func (e *LinkedinAds) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "linkedin-ads":
- *e = SourceLinkedinAdsLinkedinAds(v)
+ *e = LinkedinAds(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinAdsLinkedinAds: %v", v)
+ return fmt.Errorf("invalid value for LinkedinAds: %v", v)
}
}
@@ -300,7 +374,50 @@ type SourceLinkedinAds struct {
AccountIds []int64 `json:"account_ids,omitempty"`
AdAnalyticsReports []SourceLinkedinAdsAdAnalyticsReportConfiguration `json:"ad_analytics_reports,omitempty"`
Credentials *SourceLinkedinAdsAuthentication `json:"credentials,omitempty"`
- SourceType SourceLinkedinAdsLinkedinAds `json:"sourceType"`
+ sourceType LinkedinAds `const:"linkedin-ads" json:"sourceType"`
// UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceLinkedinAds) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinkedinAds) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinAds) GetAccountIds() []int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AccountIds
+}
+
+func (o *SourceLinkedinAds) GetAdAnalyticsReports() []SourceLinkedinAdsAdAnalyticsReportConfiguration {
+ if o == nil {
+ return nil
+ }
+ return o.AdAnalyticsReports
+}
+
+func (o *SourceLinkedinAds) GetCredentials() *SourceLinkedinAdsAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceLinkedinAds) GetSourceType() LinkedinAds {
+ return LinkedinAdsLinkedinAds
+}
+
+func (o *SourceLinkedinAds) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinadscreaterequest.go b/internal/sdk/pkg/models/shared/sourcelinkedinadscreaterequest.go
old mode 100755
new mode 100644
index 99a742bab..b45a57db9
--- a/internal/sdk/pkg/models/shared/sourcelinkedinadscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelinkedinadscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceLinkedinAdsCreateRequest struct {
Configuration SourceLinkedinAds `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLinkedinAdsCreateRequest) GetConfiguration() SourceLinkedinAds {
+ if o == nil {
+ return SourceLinkedinAds{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLinkedinAdsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceLinkedinAdsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLinkedinAdsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceLinkedinAdsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinadsputrequest.go b/internal/sdk/pkg/models/shared/sourcelinkedinadsputrequest.go
old mode 100755
new mode 100644
index 2bbdbf9a8..e5100c905
--- a/internal/sdk/pkg/models/shared/sourcelinkedinadsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelinkedinadsputrequest.go
@@ -7,3 +7,24 @@ type SourceLinkedinAdsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLinkedinAdsPutRequest) GetConfiguration() SourceLinkedinAdsUpdate {
+ if o == nil {
+ return SourceLinkedinAdsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLinkedinAdsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLinkedinAdsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinadsupdate.go b/internal/sdk/pkg/models/shared/sourcelinkedinadsupdate.go
old mode 100755
new mode 100644
index 27c2d63fc..014650c37
--- a/internal/sdk/pkg/models/shared/sourcelinkedinadsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcelinkedinadsupdate.go
@@ -3,45 +3,45 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory - Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.
-type SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory string
+// PivotCategory - Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.
+type PivotCategory string
const (
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCompany SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "COMPANY"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryAccount SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "ACCOUNT"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryShare SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "SHARE"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCampaign SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CAMPAIGN"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCreative SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CREATIVE"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCampaignGroup SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CAMPAIGN_GROUP"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryConversion SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CONVERSION"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryConversationNode SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CONVERSATION_NODE"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryConversationNodeOptionIndex SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CONVERSATION_NODE_OPTION_INDEX"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryServingLocation SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "SERVING_LOCATION"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCardIndex SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CARD_INDEX"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberCompanySize SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COMPANY_SIZE"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberIndustry SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_INDUSTRY"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberSeniority SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_SENIORITY"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberJobTitle SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_JOB_TITLE "
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberJobFunction SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_JOB_FUNCTION "
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberCountryV2 SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COUNTRY_V2 "
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberRegionV2 SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_REGION_V2"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberCompany SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COMPANY"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryPlacementName SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "PLACEMENT_NAME"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryImpressionDeviceType SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "IMPRESSION_DEVICE_TYPE"
+ PivotCategoryCompany PivotCategory = "COMPANY"
+ PivotCategoryAccount PivotCategory = "ACCOUNT"
+ PivotCategoryShare PivotCategory = "SHARE"
+ PivotCategoryCampaign PivotCategory = "CAMPAIGN"
+ PivotCategoryCreative PivotCategory = "CREATIVE"
+ PivotCategoryCampaignGroup PivotCategory = "CAMPAIGN_GROUP"
+ PivotCategoryConversion PivotCategory = "CONVERSION"
+ PivotCategoryConversationNode PivotCategory = "CONVERSATION_NODE"
+ PivotCategoryConversationNodeOptionIndex PivotCategory = "CONVERSATION_NODE_OPTION_INDEX"
+ PivotCategoryServingLocation PivotCategory = "SERVING_LOCATION"
+ PivotCategoryCardIndex PivotCategory = "CARD_INDEX"
+ PivotCategoryMemberCompanySize PivotCategory = "MEMBER_COMPANY_SIZE"
+ PivotCategoryMemberIndustry PivotCategory = "MEMBER_INDUSTRY"
+ PivotCategoryMemberSeniority PivotCategory = "MEMBER_SENIORITY"
+ PivotCategoryMemberJobTitle PivotCategory = "MEMBER_JOB_TITLE "
+ PivotCategoryMemberJobFunction PivotCategory = "MEMBER_JOB_FUNCTION "
+ PivotCategoryMemberCountryV2 PivotCategory = "MEMBER_COUNTRY_V2 "
+ PivotCategoryMemberRegionV2 PivotCategory = "MEMBER_REGION_V2"
+ PivotCategoryMemberCompany PivotCategory = "MEMBER_COMPANY"
+ PivotCategoryPlacementName PivotCategory = "PLACEMENT_NAME"
+ PivotCategoryImpressionDeviceType PivotCategory = "IMPRESSION_DEVICE_TYPE"
)
-func (e SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory) ToPointer() *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory {
+func (e PivotCategory) ToPointer() *PivotCategory {
return &e
}
-func (e *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory) UnmarshalJSON(data []byte) error {
+func (e *PivotCategory) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -88,28 +88,28 @@ func (e *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory) Unm
case "PLACEMENT_NAME":
fallthrough
case "IMPRESSION_DEVICE_TYPE":
- *e = SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory(v)
+ *e = PivotCategory(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory: %v", v)
+ return fmt.Errorf("invalid value for PivotCategory: %v", v)
}
}
-// SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity - Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.
-type SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity string
+// TimeGranularity - Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.
+type TimeGranularity string
const (
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularityAll SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity = "ALL"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularityDaily SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity = "DAILY"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularityMonthly SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity = "MONTHLY"
- SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularityYearly SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity = "YEARLY"
+ TimeGranularityAll TimeGranularity = "ALL"
+ TimeGranularityDaily TimeGranularity = "DAILY"
+ TimeGranularityMonthly TimeGranularity = "MONTHLY"
+ TimeGranularityYearly TimeGranularity = "YEARLY"
)
-func (e SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity) ToPointer() *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity {
+func (e TimeGranularity) ToPointer() *TimeGranularity {
return &e
}
-func (e *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity) UnmarshalJSON(data []byte) error {
+func (e *TimeGranularity) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -122,79 +122,122 @@ func (e *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity) U
case "MONTHLY":
fallthrough
case "YEARLY":
- *e = SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity(v)
+ *e = TimeGranularity(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity: %v", v)
+ return fmt.Errorf("invalid value for TimeGranularity: %v", v)
}
}
-// SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration - Config for custom ad Analytics Report
-type SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration struct {
+// AdAnalyticsReportConfiguration - Config for custom ad Analytics Report
+type AdAnalyticsReportConfiguration struct {
// The name for the custom report.
Name string `json:"name"`
// Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.
- PivotBy SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory `json:"pivot_by"`
+ PivotBy PivotCategory `json:"pivot_by"`
// Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.
- TimeGranularity SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity `json:"time_granularity"`
+ TimeGranularity TimeGranularity `json:"time_granularity"`
}
-type SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod string
+func (o *AdAnalyticsReportConfiguration) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *AdAnalyticsReportConfiguration) GetPivotBy() PivotCategory {
+ if o == nil {
+ return PivotCategory("")
+ }
+ return o.PivotBy
+}
+
+func (o *AdAnalyticsReportConfiguration) GetTimeGranularity() TimeGranularity {
+ if o == nil {
+ return TimeGranularity("")
+ }
+ return o.TimeGranularity
+}
+
+type SourceLinkedinAdsUpdateSchemasAuthMethod string
const (
- SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethodAccessToken SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod = "access_token"
+ SourceLinkedinAdsUpdateSchemasAuthMethodAccessToken SourceLinkedinAdsUpdateSchemasAuthMethod = "access_token"
)
-func (e SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod) ToPointer() *SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod {
+func (e SourceLinkedinAdsUpdateSchemasAuthMethod) ToPointer() *SourceLinkedinAdsUpdateSchemasAuthMethod {
return &e
}
-func (e *SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinAdsUpdateSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod(v)
+ *e = SourceLinkedinAdsUpdateSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinAdsUpdateSchemasAuthMethod: %v", v)
}
}
-type SourceLinkedinAdsUpdateAuthenticationAccessToken struct {
+type AccessToken struct {
// The access token generated for your developer application. Refer to our documentation for more information.
- AccessToken string `json:"access_token"`
- AuthMethod *SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod `json:"auth_method,omitempty"`
+ AccessToken string `json:"access_token"`
+ authMethod *SourceLinkedinAdsUpdateSchemasAuthMethod `const:"access_token" json:"auth_method,omitempty"`
+}
+
+func (a AccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
}
-type SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod string
+func (o *AccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *AccessToken) GetAuthMethod() *SourceLinkedinAdsUpdateSchemasAuthMethod {
+ return SourceLinkedinAdsUpdateSchemasAuthMethodAccessToken.ToPointer()
+}
+
+type SourceLinkedinAdsUpdateAuthMethod string
const (
- SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethodOAuth20 SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod = "oAuth2.0"
+ SourceLinkedinAdsUpdateAuthMethodOAuth20 SourceLinkedinAdsUpdateAuthMethod = "oAuth2.0"
)
-func (e SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod) ToPointer() *SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod {
+func (e SourceLinkedinAdsUpdateAuthMethod) ToPointer() *SourceLinkedinAdsUpdateAuthMethod {
return &e
}
-func (e *SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinAdsUpdateAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oAuth2.0":
- *e = SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod(v)
+ *e = SourceLinkedinAdsUpdateAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinAdsUpdateAuthMethod: %v", v)
}
}
-type SourceLinkedinAdsUpdateAuthenticationOAuth20 struct {
- AuthMethod *SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod `json:"auth_method,omitempty"`
+type SourceLinkedinAdsUpdateOAuth20 struct {
+ authMethod *SourceLinkedinAdsUpdateAuthMethod `const:"oAuth2.0" json:"auth_method,omitempty"`
// The client ID of your developer application. Refer to our documentation for more information.
ClientID string `json:"client_id"`
// The client secret of your developer application. Refer to our documentation for more information.
@@ -203,56 +246,87 @@ type SourceLinkedinAdsUpdateAuthenticationOAuth20 struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceLinkedinAdsUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinkedinAdsUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinAdsUpdateOAuth20) GetAuthMethod() *SourceLinkedinAdsUpdateAuthMethod {
+ return SourceLinkedinAdsUpdateAuthMethodOAuth20.ToPointer()
+}
+
+func (o *SourceLinkedinAdsUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceLinkedinAdsUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceLinkedinAdsUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceLinkedinAdsUpdateAuthenticationType string
const (
- SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationOAuth20 SourceLinkedinAdsUpdateAuthenticationType = "source-linkedin-ads-update_Authentication_OAuth2.0"
- SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationAccessToken SourceLinkedinAdsUpdateAuthenticationType = "source-linkedin-ads-update_Authentication_Access Token"
+ SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateOAuth20 SourceLinkedinAdsUpdateAuthenticationType = "source-linkedin-ads-update_OAuth2.0"
+ SourceLinkedinAdsUpdateAuthenticationTypeAccessToken SourceLinkedinAdsUpdateAuthenticationType = "Access Token"
)
type SourceLinkedinAdsUpdateAuthentication struct {
- SourceLinkedinAdsUpdateAuthenticationOAuth20 *SourceLinkedinAdsUpdateAuthenticationOAuth20
- SourceLinkedinAdsUpdateAuthenticationAccessToken *SourceLinkedinAdsUpdateAuthenticationAccessToken
+ SourceLinkedinAdsUpdateOAuth20 *SourceLinkedinAdsUpdateOAuth20
+ AccessToken *AccessToken
Type SourceLinkedinAdsUpdateAuthenticationType
}
-func CreateSourceLinkedinAdsUpdateAuthenticationSourceLinkedinAdsUpdateAuthenticationOAuth20(sourceLinkedinAdsUpdateAuthenticationOAuth20 SourceLinkedinAdsUpdateAuthenticationOAuth20) SourceLinkedinAdsUpdateAuthentication {
- typ := SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationOAuth20
+func CreateSourceLinkedinAdsUpdateAuthenticationSourceLinkedinAdsUpdateOAuth20(sourceLinkedinAdsUpdateOAuth20 SourceLinkedinAdsUpdateOAuth20) SourceLinkedinAdsUpdateAuthentication {
+ typ := SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateOAuth20
return SourceLinkedinAdsUpdateAuthentication{
- SourceLinkedinAdsUpdateAuthenticationOAuth20: &sourceLinkedinAdsUpdateAuthenticationOAuth20,
- Type: typ,
+ SourceLinkedinAdsUpdateOAuth20: &sourceLinkedinAdsUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceLinkedinAdsUpdateAuthenticationSourceLinkedinAdsUpdateAuthenticationAccessToken(sourceLinkedinAdsUpdateAuthenticationAccessToken SourceLinkedinAdsUpdateAuthenticationAccessToken) SourceLinkedinAdsUpdateAuthentication {
- typ := SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationAccessToken
+func CreateSourceLinkedinAdsUpdateAuthenticationAccessToken(accessToken AccessToken) SourceLinkedinAdsUpdateAuthentication {
+ typ := SourceLinkedinAdsUpdateAuthenticationTypeAccessToken
return SourceLinkedinAdsUpdateAuthentication{
- SourceLinkedinAdsUpdateAuthenticationAccessToken: &sourceLinkedinAdsUpdateAuthenticationAccessToken,
- Type: typ,
+ AccessToken: &accessToken,
+ Type: typ,
}
}
func (u *SourceLinkedinAdsUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceLinkedinAdsUpdateAuthenticationAccessToken := new(SourceLinkedinAdsUpdateAuthenticationAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLinkedinAdsUpdateAuthenticationAccessToken); err == nil {
- u.SourceLinkedinAdsUpdateAuthenticationAccessToken = sourceLinkedinAdsUpdateAuthenticationAccessToken
- u.Type = SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationAccessToken
+
+ accessToken := new(AccessToken)
+ if err := utils.UnmarshalJSON(data, &accessToken, "", true, true); err == nil {
+ u.AccessToken = accessToken
+ u.Type = SourceLinkedinAdsUpdateAuthenticationTypeAccessToken
return nil
}
- sourceLinkedinAdsUpdateAuthenticationOAuth20 := new(SourceLinkedinAdsUpdateAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLinkedinAdsUpdateAuthenticationOAuth20); err == nil {
- u.SourceLinkedinAdsUpdateAuthenticationOAuth20 = sourceLinkedinAdsUpdateAuthenticationOAuth20
- u.Type = SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationOAuth20
+ sourceLinkedinAdsUpdateOAuth20 := new(SourceLinkedinAdsUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceLinkedinAdsUpdateOAuth20, "", true, true); err == nil {
+ u.SourceLinkedinAdsUpdateOAuth20 = sourceLinkedinAdsUpdateOAuth20
+ u.Type = SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateOAuth20
return nil
}
@@ -260,22 +334,61 @@ func (u *SourceLinkedinAdsUpdateAuthentication) UnmarshalJSON(data []byte) error
}
func (u SourceLinkedinAdsUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceLinkedinAdsUpdateAuthenticationAccessToken != nil {
- return json.Marshal(u.SourceLinkedinAdsUpdateAuthenticationAccessToken)
+ if u.SourceLinkedinAdsUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceLinkedinAdsUpdateOAuth20, "", true)
}
- if u.SourceLinkedinAdsUpdateAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceLinkedinAdsUpdateAuthenticationOAuth20)
+ if u.AccessToken != nil {
+ return utils.MarshalJSON(u.AccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceLinkedinAdsUpdate struct {
// Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs.
- AccountIds []int64 `json:"account_ids,omitempty"`
- AdAnalyticsReports []SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration `json:"ad_analytics_reports,omitempty"`
- Credentials *SourceLinkedinAdsUpdateAuthentication `json:"credentials,omitempty"`
+ AccountIds []int64 `json:"account_ids,omitempty"`
+ AdAnalyticsReports []AdAnalyticsReportConfiguration `json:"ad_analytics_reports,omitempty"`
+ Credentials *SourceLinkedinAdsUpdateAuthentication `json:"credentials,omitempty"`
// UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceLinkedinAdsUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinkedinAdsUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinAdsUpdate) GetAccountIds() []int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AccountIds
+}
+
+func (o *SourceLinkedinAdsUpdate) GetAdAnalyticsReports() []AdAnalyticsReportConfiguration {
+ if o == nil {
+ return nil
+ }
+ return o.AdAnalyticsReports
+}
+
+func (o *SourceLinkedinAdsUpdate) GetCredentials() *SourceLinkedinAdsUpdateAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceLinkedinAdsUpdate) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinpages.go b/internal/sdk/pkg/models/shared/sourcelinkedinpages.go
old mode 100755
new mode 100644
index d57609355..ef707a058
--- a/internal/sdk/pkg/models/shared/sourcelinkedinpages.go
+++ b/internal/sdk/pkg/models/shared/sourcelinkedinpages.go
@@ -3,68 +3,90 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceLinkedinPagesAuthenticationAccessTokenAuthMethod string
+type SourceLinkedinPagesSchemasAuthMethod string
const (
- SourceLinkedinPagesAuthenticationAccessTokenAuthMethodAccessToken SourceLinkedinPagesAuthenticationAccessTokenAuthMethod = "access_token"
+ SourceLinkedinPagesSchemasAuthMethodAccessToken SourceLinkedinPagesSchemasAuthMethod = "access_token"
)
-func (e SourceLinkedinPagesAuthenticationAccessTokenAuthMethod) ToPointer() *SourceLinkedinPagesAuthenticationAccessTokenAuthMethod {
+func (e SourceLinkedinPagesSchemasAuthMethod) ToPointer() *SourceLinkedinPagesSchemasAuthMethod {
return &e
}
-func (e *SourceLinkedinPagesAuthenticationAccessTokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinPagesSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceLinkedinPagesAuthenticationAccessTokenAuthMethod(v)
+ *e = SourceLinkedinPagesSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinPagesAuthenticationAccessTokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinPagesSchemasAuthMethod: %v", v)
}
}
-type SourceLinkedinPagesAuthenticationAccessToken struct {
+type SourceLinkedinPagesAccessToken struct {
// The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
- AccessToken string `json:"access_token"`
- AuthMethod *SourceLinkedinPagesAuthenticationAccessTokenAuthMethod `json:"auth_method,omitempty"`
+ AccessToken string `json:"access_token"`
+ authMethod *SourceLinkedinPagesSchemasAuthMethod `const:"access_token" json:"auth_method,omitempty"`
}
-type SourceLinkedinPagesAuthenticationOAuth20AuthMethod string
+func (s SourceLinkedinPagesAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinkedinPagesAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinPagesAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceLinkedinPagesAccessToken) GetAuthMethod() *SourceLinkedinPagesSchemasAuthMethod {
+ return SourceLinkedinPagesSchemasAuthMethodAccessToken.ToPointer()
+}
+
+type SourceLinkedinPagesAuthMethod string
const (
- SourceLinkedinPagesAuthenticationOAuth20AuthMethodOAuth20 SourceLinkedinPagesAuthenticationOAuth20AuthMethod = "oAuth2.0"
+ SourceLinkedinPagesAuthMethodOAuth20 SourceLinkedinPagesAuthMethod = "oAuth2.0"
)
-func (e SourceLinkedinPagesAuthenticationOAuth20AuthMethod) ToPointer() *SourceLinkedinPagesAuthenticationOAuth20AuthMethod {
+func (e SourceLinkedinPagesAuthMethod) ToPointer() *SourceLinkedinPagesAuthMethod {
return &e
}
-func (e *SourceLinkedinPagesAuthenticationOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinPagesAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oAuth2.0":
- *e = SourceLinkedinPagesAuthenticationOAuth20AuthMethod(v)
+ *e = SourceLinkedinPagesAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinPagesAuthenticationOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinPagesAuthMethod: %v", v)
}
}
-type SourceLinkedinPagesAuthenticationOAuth20 struct {
- AuthMethod *SourceLinkedinPagesAuthenticationOAuth20AuthMethod `json:"auth_method,omitempty"`
+type SourceLinkedinPagesOAuth20 struct {
+ authMethod *SourceLinkedinPagesAuthMethod `const:"oAuth2.0" json:"auth_method,omitempty"`
// The client ID of the LinkedIn developer application.
ClientID string `json:"client_id"`
// The client secret of the LinkedIn developer application.
@@ -73,56 +95,87 @@ type SourceLinkedinPagesAuthenticationOAuth20 struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceLinkedinPagesOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinkedinPagesOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinPagesOAuth20) GetAuthMethod() *SourceLinkedinPagesAuthMethod {
+ return SourceLinkedinPagesAuthMethodOAuth20.ToPointer()
+}
+
+func (o *SourceLinkedinPagesOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceLinkedinPagesOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceLinkedinPagesOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceLinkedinPagesAuthenticationType string
const (
- SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationOAuth20 SourceLinkedinPagesAuthenticationType = "source-linkedin-pages_Authentication_OAuth2.0"
- SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationAccessToken SourceLinkedinPagesAuthenticationType = "source-linkedin-pages_Authentication_Access token"
+ SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesOAuth20 SourceLinkedinPagesAuthenticationType = "source-linkedin-pages_OAuth2.0"
+ SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAccessToken SourceLinkedinPagesAuthenticationType = "source-linkedin-pages_Access token"
)
type SourceLinkedinPagesAuthentication struct {
- SourceLinkedinPagesAuthenticationOAuth20 *SourceLinkedinPagesAuthenticationOAuth20
- SourceLinkedinPagesAuthenticationAccessToken *SourceLinkedinPagesAuthenticationAccessToken
+ SourceLinkedinPagesOAuth20 *SourceLinkedinPagesOAuth20
+ SourceLinkedinPagesAccessToken *SourceLinkedinPagesAccessToken
Type SourceLinkedinPagesAuthenticationType
}
-func CreateSourceLinkedinPagesAuthenticationSourceLinkedinPagesAuthenticationOAuth20(sourceLinkedinPagesAuthenticationOAuth20 SourceLinkedinPagesAuthenticationOAuth20) SourceLinkedinPagesAuthentication {
- typ := SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationOAuth20
+func CreateSourceLinkedinPagesAuthenticationSourceLinkedinPagesOAuth20(sourceLinkedinPagesOAuth20 SourceLinkedinPagesOAuth20) SourceLinkedinPagesAuthentication {
+ typ := SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesOAuth20
return SourceLinkedinPagesAuthentication{
- SourceLinkedinPagesAuthenticationOAuth20: &sourceLinkedinPagesAuthenticationOAuth20,
- Type: typ,
+ SourceLinkedinPagesOAuth20: &sourceLinkedinPagesOAuth20,
+ Type: typ,
}
}
-func CreateSourceLinkedinPagesAuthenticationSourceLinkedinPagesAuthenticationAccessToken(sourceLinkedinPagesAuthenticationAccessToken SourceLinkedinPagesAuthenticationAccessToken) SourceLinkedinPagesAuthentication {
- typ := SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationAccessToken
+func CreateSourceLinkedinPagesAuthenticationSourceLinkedinPagesAccessToken(sourceLinkedinPagesAccessToken SourceLinkedinPagesAccessToken) SourceLinkedinPagesAuthentication {
+ typ := SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAccessToken
return SourceLinkedinPagesAuthentication{
- SourceLinkedinPagesAuthenticationAccessToken: &sourceLinkedinPagesAuthenticationAccessToken,
- Type: typ,
+ SourceLinkedinPagesAccessToken: &sourceLinkedinPagesAccessToken,
+ Type: typ,
}
}
func (u *SourceLinkedinPagesAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceLinkedinPagesAuthenticationAccessToken := new(SourceLinkedinPagesAuthenticationAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLinkedinPagesAuthenticationAccessToken); err == nil {
- u.SourceLinkedinPagesAuthenticationAccessToken = sourceLinkedinPagesAuthenticationAccessToken
- u.Type = SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationAccessToken
+
+ sourceLinkedinPagesAccessToken := new(SourceLinkedinPagesAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceLinkedinPagesAccessToken, "", true, true); err == nil {
+ u.SourceLinkedinPagesAccessToken = sourceLinkedinPagesAccessToken
+ u.Type = SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAccessToken
return nil
}
- sourceLinkedinPagesAuthenticationOAuth20 := new(SourceLinkedinPagesAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLinkedinPagesAuthenticationOAuth20); err == nil {
- u.SourceLinkedinPagesAuthenticationOAuth20 = sourceLinkedinPagesAuthenticationOAuth20
- u.Type = SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationOAuth20
+ sourceLinkedinPagesOAuth20 := new(SourceLinkedinPagesOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceLinkedinPagesOAuth20, "", true, true); err == nil {
+ u.SourceLinkedinPagesOAuth20 = sourceLinkedinPagesOAuth20
+ u.Type = SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesOAuth20
return nil
}
@@ -130,44 +183,73 @@ func (u *SourceLinkedinPagesAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceLinkedinPagesAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceLinkedinPagesAuthenticationAccessToken != nil {
- return json.Marshal(u.SourceLinkedinPagesAuthenticationAccessToken)
+ if u.SourceLinkedinPagesOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceLinkedinPagesOAuth20, "", true)
}
- if u.SourceLinkedinPagesAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceLinkedinPagesAuthenticationOAuth20)
+ if u.SourceLinkedinPagesAccessToken != nil {
+ return utils.MarshalJSON(u.SourceLinkedinPagesAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceLinkedinPagesLinkedinPages string
+type LinkedinPages string
const (
- SourceLinkedinPagesLinkedinPagesLinkedinPages SourceLinkedinPagesLinkedinPages = "linkedin-pages"
+ LinkedinPagesLinkedinPages LinkedinPages = "linkedin-pages"
)
-func (e SourceLinkedinPagesLinkedinPages) ToPointer() *SourceLinkedinPagesLinkedinPages {
+func (e LinkedinPages) ToPointer() *LinkedinPages {
return &e
}
-func (e *SourceLinkedinPagesLinkedinPages) UnmarshalJSON(data []byte) error {
+func (e *LinkedinPages) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "linkedin-pages":
- *e = SourceLinkedinPagesLinkedinPages(v)
+ *e = LinkedinPages(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinPagesLinkedinPages: %v", v)
+ return fmt.Errorf("invalid value for LinkedinPages: %v", v)
}
}
type SourceLinkedinPages struct {
Credentials *SourceLinkedinPagesAuthentication `json:"credentials,omitempty"`
// Specify the Organization ID
- OrgID string `json:"org_id"`
- SourceType SourceLinkedinPagesLinkedinPages `json:"sourceType"`
+ OrgID string `json:"org_id"`
+ sourceType LinkedinPages `const:"linkedin-pages" json:"sourceType"`
+}
+
+func (s SourceLinkedinPages) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinkedinPages) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinPages) GetCredentials() *SourceLinkedinPagesAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceLinkedinPages) GetOrgID() string {
+ if o == nil {
+ return ""
+ }
+ return o.OrgID
+}
+
+func (o *SourceLinkedinPages) GetSourceType() LinkedinPages {
+ return LinkedinPagesLinkedinPages
}
diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinpagescreaterequest.go b/internal/sdk/pkg/models/shared/sourcelinkedinpagescreaterequest.go
old mode 100755
new mode 100644
index 80193e1c2..c982034f7
--- a/internal/sdk/pkg/models/shared/sourcelinkedinpagescreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelinkedinpagescreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceLinkedinPagesCreateRequest struct {
Configuration SourceLinkedinPages `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLinkedinPagesCreateRequest) GetConfiguration() SourceLinkedinPages {
+ if o == nil {
+ return SourceLinkedinPages{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLinkedinPagesCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceLinkedinPagesCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLinkedinPagesCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceLinkedinPagesCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinpagesputrequest.go b/internal/sdk/pkg/models/shared/sourcelinkedinpagesputrequest.go
old mode 100755
new mode 100644
index 948253496..be23b2efe
--- a/internal/sdk/pkg/models/shared/sourcelinkedinpagesputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelinkedinpagesputrequest.go
@@ -7,3 +7,24 @@ type SourceLinkedinPagesPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLinkedinPagesPutRequest) GetConfiguration() SourceLinkedinPagesUpdate {
+ if o == nil {
+ return SourceLinkedinPagesUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLinkedinPagesPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLinkedinPagesPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinpagesupdate.go b/internal/sdk/pkg/models/shared/sourcelinkedinpagesupdate.go
old mode 100755
new mode 100644
index d6272b51c..481c85d5a
--- a/internal/sdk/pkg/models/shared/sourcelinkedinpagesupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcelinkedinpagesupdate.go
@@ -3,68 +3,90 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod string
+type SourceLinkedinPagesUpdateSchemasAuthMethod string
const (
- SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethodAccessToken SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod = "access_token"
+ SourceLinkedinPagesUpdateSchemasAuthMethodAccessToken SourceLinkedinPagesUpdateSchemasAuthMethod = "access_token"
)
-func (e SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod) ToPointer() *SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod {
+func (e SourceLinkedinPagesUpdateSchemasAuthMethod) ToPointer() *SourceLinkedinPagesUpdateSchemasAuthMethod {
return &e
}
-func (e *SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinPagesUpdateSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod(v)
+ *e = SourceLinkedinPagesUpdateSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinPagesUpdateSchemasAuthMethod: %v", v)
}
}
-type SourceLinkedinPagesUpdateAuthenticationAccessToken struct {
+type SourceLinkedinPagesUpdateAccessToken struct {
// The token value generated using the LinkedIn Developers OAuth Token Tools. See the docs to obtain yours.
- AccessToken string `json:"access_token"`
- AuthMethod *SourceLinkedinPagesUpdateAuthenticationAccessTokenAuthMethod `json:"auth_method,omitempty"`
+ AccessToken string `json:"access_token"`
+ authMethod *SourceLinkedinPagesUpdateSchemasAuthMethod `const:"access_token" json:"auth_method,omitempty"`
}
-type SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod string
+func (s SourceLinkedinPagesUpdateAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinkedinPagesUpdateAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinPagesUpdateAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceLinkedinPagesUpdateAccessToken) GetAuthMethod() *SourceLinkedinPagesUpdateSchemasAuthMethod {
+ return SourceLinkedinPagesUpdateSchemasAuthMethodAccessToken.ToPointer()
+}
+
+type SourceLinkedinPagesUpdateAuthMethod string
const (
- SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethodOAuth20 SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod = "oAuth2.0"
+ SourceLinkedinPagesUpdateAuthMethodOAuth20 SourceLinkedinPagesUpdateAuthMethod = "oAuth2.0"
)
-func (e SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod) ToPointer() *SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod {
+func (e SourceLinkedinPagesUpdateAuthMethod) ToPointer() *SourceLinkedinPagesUpdateAuthMethod {
return &e
}
-func (e *SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceLinkedinPagesUpdateAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oAuth2.0":
- *e = SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod(v)
+ *e = SourceLinkedinPagesUpdateAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceLinkedinPagesUpdateAuthMethod: %v", v)
}
}
-type SourceLinkedinPagesUpdateAuthenticationOAuth20 struct {
- AuthMethod *SourceLinkedinPagesUpdateAuthenticationOAuth20AuthMethod `json:"auth_method,omitempty"`
+type SourceLinkedinPagesUpdateOAuth20 struct {
+ authMethod *SourceLinkedinPagesUpdateAuthMethod `const:"oAuth2.0" json:"auth_method,omitempty"`
// The client ID of the LinkedIn developer application.
ClientID string `json:"client_id"`
// The client secret of the LinkedIn developer application.
@@ -73,56 +95,87 @@ type SourceLinkedinPagesUpdateAuthenticationOAuth20 struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceLinkedinPagesUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinkedinPagesUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinkedinPagesUpdateOAuth20) GetAuthMethod() *SourceLinkedinPagesUpdateAuthMethod {
+ return SourceLinkedinPagesUpdateAuthMethodOAuth20.ToPointer()
+}
+
+func (o *SourceLinkedinPagesUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceLinkedinPagesUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceLinkedinPagesUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceLinkedinPagesUpdateAuthenticationType string
const (
- SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationOAuth20 SourceLinkedinPagesUpdateAuthenticationType = "source-linkedin-pages-update_Authentication_OAuth2.0"
- SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationAccessToken SourceLinkedinPagesUpdateAuthenticationType = "source-linkedin-pages-update_Authentication_Access token"
+ SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateOAuth20 SourceLinkedinPagesUpdateAuthenticationType = "source-linkedin-pages-update_OAuth2.0"
+ SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAccessToken SourceLinkedinPagesUpdateAuthenticationType = "source-linkedin-pages-update_Access token"
)
type SourceLinkedinPagesUpdateAuthentication struct {
- SourceLinkedinPagesUpdateAuthenticationOAuth20 *SourceLinkedinPagesUpdateAuthenticationOAuth20
- SourceLinkedinPagesUpdateAuthenticationAccessToken *SourceLinkedinPagesUpdateAuthenticationAccessToken
+ SourceLinkedinPagesUpdateOAuth20 *SourceLinkedinPagesUpdateOAuth20
+ SourceLinkedinPagesUpdateAccessToken *SourceLinkedinPagesUpdateAccessToken
Type SourceLinkedinPagesUpdateAuthenticationType
}
-func CreateSourceLinkedinPagesUpdateAuthenticationSourceLinkedinPagesUpdateAuthenticationOAuth20(sourceLinkedinPagesUpdateAuthenticationOAuth20 SourceLinkedinPagesUpdateAuthenticationOAuth20) SourceLinkedinPagesUpdateAuthentication {
- typ := SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationOAuth20
+func CreateSourceLinkedinPagesUpdateAuthenticationSourceLinkedinPagesUpdateOAuth20(sourceLinkedinPagesUpdateOAuth20 SourceLinkedinPagesUpdateOAuth20) SourceLinkedinPagesUpdateAuthentication {
+ typ := SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateOAuth20
return SourceLinkedinPagesUpdateAuthentication{
- SourceLinkedinPagesUpdateAuthenticationOAuth20: &sourceLinkedinPagesUpdateAuthenticationOAuth20,
- Type: typ,
+ SourceLinkedinPagesUpdateOAuth20: &sourceLinkedinPagesUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceLinkedinPagesUpdateAuthenticationSourceLinkedinPagesUpdateAuthenticationAccessToken(sourceLinkedinPagesUpdateAuthenticationAccessToken SourceLinkedinPagesUpdateAuthenticationAccessToken) SourceLinkedinPagesUpdateAuthentication {
- typ := SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationAccessToken
+func CreateSourceLinkedinPagesUpdateAuthenticationSourceLinkedinPagesUpdateAccessToken(sourceLinkedinPagesUpdateAccessToken SourceLinkedinPagesUpdateAccessToken) SourceLinkedinPagesUpdateAuthentication {
+ typ := SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAccessToken
return SourceLinkedinPagesUpdateAuthentication{
- SourceLinkedinPagesUpdateAuthenticationAccessToken: &sourceLinkedinPagesUpdateAuthenticationAccessToken,
- Type: typ,
+ SourceLinkedinPagesUpdateAccessToken: &sourceLinkedinPagesUpdateAccessToken,
+ Type: typ,
}
}
func (u *SourceLinkedinPagesUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceLinkedinPagesUpdateAuthenticationAccessToken := new(SourceLinkedinPagesUpdateAuthenticationAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLinkedinPagesUpdateAuthenticationAccessToken); err == nil {
- u.SourceLinkedinPagesUpdateAuthenticationAccessToken = sourceLinkedinPagesUpdateAuthenticationAccessToken
- u.Type = SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationAccessToken
+
+ sourceLinkedinPagesUpdateAccessToken := new(SourceLinkedinPagesUpdateAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceLinkedinPagesUpdateAccessToken, "", true, true); err == nil {
+ u.SourceLinkedinPagesUpdateAccessToken = sourceLinkedinPagesUpdateAccessToken
+ u.Type = SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAccessToken
return nil
}
- sourceLinkedinPagesUpdateAuthenticationOAuth20 := new(SourceLinkedinPagesUpdateAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceLinkedinPagesUpdateAuthenticationOAuth20); err == nil {
- u.SourceLinkedinPagesUpdateAuthenticationOAuth20 = sourceLinkedinPagesUpdateAuthenticationOAuth20
- u.Type = SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationOAuth20
+ sourceLinkedinPagesUpdateOAuth20 := new(SourceLinkedinPagesUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceLinkedinPagesUpdateOAuth20, "", true, true); err == nil {
+ u.SourceLinkedinPagesUpdateOAuth20 = sourceLinkedinPagesUpdateOAuth20
+ u.Type = SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateOAuth20
return nil
}
@@ -130,15 +183,15 @@ func (u *SourceLinkedinPagesUpdateAuthentication) UnmarshalJSON(data []byte) err
}
func (u SourceLinkedinPagesUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceLinkedinPagesUpdateAuthenticationAccessToken != nil {
- return json.Marshal(u.SourceLinkedinPagesUpdateAuthenticationAccessToken)
+ if u.SourceLinkedinPagesUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceLinkedinPagesUpdateOAuth20, "", true)
}
- if u.SourceLinkedinPagesUpdateAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceLinkedinPagesUpdateAuthenticationOAuth20)
+ if u.SourceLinkedinPagesUpdateAccessToken != nil {
+ return utils.MarshalJSON(u.SourceLinkedinPagesUpdateAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceLinkedinPagesUpdate struct {
@@ -146,3 +199,17 @@ type SourceLinkedinPagesUpdate struct {
// Specify the Organization ID
OrgID string `json:"org_id"`
}
+
+func (o *SourceLinkedinPagesUpdate) GetCredentials() *SourceLinkedinPagesUpdateAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceLinkedinPagesUpdate) GetOrgID() string {
+ if o == nil {
+ return ""
+ }
+ return o.OrgID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinnworks.go b/internal/sdk/pkg/models/shared/sourcelinnworks.go
old mode 100755
new mode 100644
index aec60ff9e..628c963fa
--- a/internal/sdk/pkg/models/shared/sourcelinnworks.go
+++ b/internal/sdk/pkg/models/shared/sourcelinnworks.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceLinnworksLinnworks string
+type Linnworks string
const (
- SourceLinnworksLinnworksLinnworks SourceLinnworksLinnworks = "linnworks"
+ LinnworksLinnworks Linnworks = "linnworks"
)
-func (e SourceLinnworksLinnworks) ToPointer() *SourceLinnworksLinnworks {
+func (e Linnworks) ToPointer() *Linnworks {
return &e
}
-func (e *SourceLinnworksLinnworks) UnmarshalJSON(data []byte) error {
+func (e *Linnworks) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "linnworks":
- *e = SourceLinnworksLinnworks(v)
+ *e = Linnworks(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLinnworksLinnworks: %v", v)
+ return fmt.Errorf("invalid value for Linnworks: %v", v)
}
}
@@ -36,9 +37,52 @@ type SourceLinnworks struct {
// Linnworks Application ID
ApplicationID string `json:"application_id"`
// Linnworks Application Secret
- ApplicationSecret string `json:"application_secret"`
- SourceType SourceLinnworksLinnworks `json:"sourceType"`
+ ApplicationSecret string `json:"application_secret"`
+ sourceType Linnworks `const:"linnworks" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
Token string `json:"token"`
}
+
+func (s SourceLinnworks) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinnworks) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinnworks) GetApplicationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ApplicationID
+}
+
+func (o *SourceLinnworks) GetApplicationSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ApplicationSecret
+}
+
+func (o *SourceLinnworks) GetSourceType() Linnworks {
+ return LinnworksLinnworks
+}
+
+func (o *SourceLinnworks) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceLinnworks) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinnworkscreaterequest.go b/internal/sdk/pkg/models/shared/sourcelinnworkscreaterequest.go
old mode 100755
new mode 100644
index c8722ff2d..1f5165a38
--- a/internal/sdk/pkg/models/shared/sourcelinnworkscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelinnworkscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceLinnworksCreateRequest struct {
Configuration SourceLinnworks `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLinnworksCreateRequest) GetConfiguration() SourceLinnworks {
+ if o == nil {
+ return SourceLinnworks{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLinnworksCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceLinnworksCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLinnworksCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceLinnworksCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinnworksputrequest.go b/internal/sdk/pkg/models/shared/sourcelinnworksputrequest.go
old mode 100755
new mode 100644
index b7ceaf0a6..6a5ec6b73
--- a/internal/sdk/pkg/models/shared/sourcelinnworksputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelinnworksputrequest.go
@@ -7,3 +7,24 @@ type SourceLinnworksPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLinnworksPutRequest) GetConfiguration() SourceLinnworksUpdate {
+ if o == nil {
+ return SourceLinnworksUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLinnworksPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLinnworksPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelinnworksupdate.go b/internal/sdk/pkg/models/shared/sourcelinnworksupdate.go
old mode 100755
new mode 100644
index 4e9ede65e..81a2e6d36
--- a/internal/sdk/pkg/models/shared/sourcelinnworksupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcelinnworksupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -15,3 +16,42 @@ type SourceLinnworksUpdate struct {
StartDate time.Time `json:"start_date"`
Token string `json:"token"`
}
+
+func (s SourceLinnworksUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLinnworksUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLinnworksUpdate) GetApplicationID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ApplicationID
+}
+
+func (o *SourceLinnworksUpdate) GetApplicationSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ApplicationSecret
+}
+
+func (o *SourceLinnworksUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceLinnworksUpdate) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelokalise.go b/internal/sdk/pkg/models/shared/sourcelokalise.go
old mode 100755
new mode 100644
index c5bf13db7..39313d53c
--- a/internal/sdk/pkg/models/shared/sourcelokalise.go
+++ b/internal/sdk/pkg/models/shared/sourcelokalise.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceLokaliseLokalise string
+type Lokalise string
const (
- SourceLokaliseLokaliseLokalise SourceLokaliseLokalise = "lokalise"
+ LokaliseLokalise Lokalise = "lokalise"
)
-func (e SourceLokaliseLokalise) ToPointer() *SourceLokaliseLokalise {
+func (e Lokalise) ToPointer() *Lokalise {
return &e
}
-func (e *SourceLokaliseLokalise) UnmarshalJSON(data []byte) error {
+func (e *Lokalise) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "lokalise":
- *e = SourceLokaliseLokalise(v)
+ *e = Lokalise(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceLokaliseLokalise: %v", v)
+ return fmt.Errorf("invalid value for Lokalise: %v", v)
}
}
@@ -35,6 +36,35 @@ type SourceLokalise struct {
// Lokalise API Key with read-access. Available at Profile settings > API tokens. See here.
APIKey string `json:"api_key"`
// Lokalise project ID. Available at Project Settings > General.
- ProjectID string `json:"project_id"`
- SourceType SourceLokaliseLokalise `json:"sourceType"`
+ ProjectID string `json:"project_id"`
+ sourceType Lokalise `const:"lokalise" json:"sourceType"`
+}
+
+func (s SourceLokalise) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceLokalise) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceLokalise) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceLokalise) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
+
+func (o *SourceLokalise) GetSourceType() Lokalise {
+ return LokaliseLokalise
}
diff --git a/internal/sdk/pkg/models/shared/sourcelokalisecreaterequest.go b/internal/sdk/pkg/models/shared/sourcelokalisecreaterequest.go
old mode 100755
new mode 100644
index a4148de0f..2db2babf6
--- a/internal/sdk/pkg/models/shared/sourcelokalisecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelokalisecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceLokaliseCreateRequest struct {
Configuration SourceLokalise `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLokaliseCreateRequest) GetConfiguration() SourceLokalise {
+ if o == nil {
+ return SourceLokalise{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLokaliseCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceLokaliseCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLokaliseCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceLokaliseCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelokaliseputrequest.go b/internal/sdk/pkg/models/shared/sourcelokaliseputrequest.go
old mode 100755
new mode 100644
index 04a057c3e..b28aff4c7
--- a/internal/sdk/pkg/models/shared/sourcelokaliseputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcelokaliseputrequest.go
@@ -7,3 +7,24 @@ type SourceLokalisePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceLokalisePutRequest) GetConfiguration() SourceLokaliseUpdate {
+ if o == nil {
+ return SourceLokaliseUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceLokalisePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceLokalisePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcelokaliseupdate.go b/internal/sdk/pkg/models/shared/sourcelokaliseupdate.go
old mode 100755
new mode 100644
index 2f83fa8ee..4a51aee99
--- a/internal/sdk/pkg/models/shared/sourcelokaliseupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcelokaliseupdate.go
@@ -8,3 +8,17 @@ type SourceLokaliseUpdate struct {
// Lokalise project ID. Available at Project Settings > General.
ProjectID string `json:"project_id"`
}
+
+func (o *SourceLokaliseUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceLokaliseUpdate) GetProjectID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailchimp.go b/internal/sdk/pkg/models/shared/sourcemailchimp.go
old mode 100755
new mode 100644
index 1bf0ec2a8..45b3ff259
--- a/internal/sdk/pkg/models/shared/sourcemailchimp.go
+++ b/internal/sdk/pkg/models/shared/sourcemailchimp.go
@@ -3,126 +3,179 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMailchimpAuthenticationAPIKeyAuthType string
+type SourceMailchimpSchemasAuthType string
const (
- SourceMailchimpAuthenticationAPIKeyAuthTypeApikey SourceMailchimpAuthenticationAPIKeyAuthType = "apikey"
+ SourceMailchimpSchemasAuthTypeApikey SourceMailchimpSchemasAuthType = "apikey"
)
-func (e SourceMailchimpAuthenticationAPIKeyAuthType) ToPointer() *SourceMailchimpAuthenticationAPIKeyAuthType {
+func (e SourceMailchimpSchemasAuthType) ToPointer() *SourceMailchimpSchemasAuthType {
return &e
}
-func (e *SourceMailchimpAuthenticationAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMailchimpSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "apikey":
- *e = SourceMailchimpAuthenticationAPIKeyAuthType(v)
+ *e = SourceMailchimpSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMailchimpAuthenticationAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMailchimpSchemasAuthType: %v", v)
}
}
-type SourceMailchimpAuthenticationAPIKey struct {
+type SourceMailchimpAPIKey struct {
// Mailchimp API Key. See the docs for information on how to generate this key.
- Apikey string `json:"apikey"`
- AuthType SourceMailchimpAuthenticationAPIKeyAuthType `json:"auth_type"`
+ Apikey string `json:"apikey"`
+ authType SourceMailchimpSchemasAuthType `const:"apikey" json:"auth_type"`
}
-type SourceMailchimpAuthenticationOAuth20AuthType string
+func (s SourceMailchimpAPIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMailchimpAPIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMailchimpAPIKey) GetApikey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Apikey
+}
+
+func (o *SourceMailchimpAPIKey) GetAuthType() SourceMailchimpSchemasAuthType {
+ return SourceMailchimpSchemasAuthTypeApikey
+}
+
+type SourceMailchimpAuthType string
const (
- SourceMailchimpAuthenticationOAuth20AuthTypeOauth20 SourceMailchimpAuthenticationOAuth20AuthType = "oauth2.0"
+ SourceMailchimpAuthTypeOauth20 SourceMailchimpAuthType = "oauth2.0"
)
-func (e SourceMailchimpAuthenticationOAuth20AuthType) ToPointer() *SourceMailchimpAuthenticationOAuth20AuthType {
+func (e SourceMailchimpAuthType) ToPointer() *SourceMailchimpAuthType {
return &e
}
-func (e *SourceMailchimpAuthenticationOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMailchimpAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceMailchimpAuthenticationOAuth20AuthType(v)
+ *e = SourceMailchimpAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMailchimpAuthenticationOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMailchimpAuthType: %v", v)
}
}
-type SourceMailchimpAuthenticationOAuth20 struct {
+type SourceMailchimpOAuth20 struct {
// An access token generated using the above client ID and secret.
- AccessToken string `json:"access_token"`
- AuthType SourceMailchimpAuthenticationOAuth20AuthType `json:"auth_type"`
+ AccessToken string `json:"access_token"`
+ authType SourceMailchimpAuthType `const:"oauth2.0" json:"auth_type"`
// The Client ID of your OAuth application.
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of your OAuth application.
ClientSecret *string `json:"client_secret,omitempty"`
}
+func (s SourceMailchimpOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMailchimpOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMailchimpOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceMailchimpOAuth20) GetAuthType() SourceMailchimpAuthType {
+ return SourceMailchimpAuthTypeOauth20
+}
+
+func (o *SourceMailchimpOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceMailchimpOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
type SourceMailchimpAuthenticationType string
const (
- SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationOAuth20 SourceMailchimpAuthenticationType = "source-mailchimp_Authentication_OAuth2.0"
- SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationAPIKey SourceMailchimpAuthenticationType = "source-mailchimp_Authentication_API Key"
+ SourceMailchimpAuthenticationTypeSourceMailchimpOAuth20 SourceMailchimpAuthenticationType = "source-mailchimp_OAuth2.0"
+ SourceMailchimpAuthenticationTypeSourceMailchimpAPIKey SourceMailchimpAuthenticationType = "source-mailchimp_API Key"
)
type SourceMailchimpAuthentication struct {
- SourceMailchimpAuthenticationOAuth20 *SourceMailchimpAuthenticationOAuth20
- SourceMailchimpAuthenticationAPIKey *SourceMailchimpAuthenticationAPIKey
+ SourceMailchimpOAuth20 *SourceMailchimpOAuth20
+ SourceMailchimpAPIKey *SourceMailchimpAPIKey
Type SourceMailchimpAuthenticationType
}
-func CreateSourceMailchimpAuthenticationSourceMailchimpAuthenticationOAuth20(sourceMailchimpAuthenticationOAuth20 SourceMailchimpAuthenticationOAuth20) SourceMailchimpAuthentication {
- typ := SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationOAuth20
+func CreateSourceMailchimpAuthenticationSourceMailchimpOAuth20(sourceMailchimpOAuth20 SourceMailchimpOAuth20) SourceMailchimpAuthentication {
+ typ := SourceMailchimpAuthenticationTypeSourceMailchimpOAuth20
return SourceMailchimpAuthentication{
- SourceMailchimpAuthenticationOAuth20: &sourceMailchimpAuthenticationOAuth20,
- Type: typ,
+ SourceMailchimpOAuth20: &sourceMailchimpOAuth20,
+ Type: typ,
}
}
-func CreateSourceMailchimpAuthenticationSourceMailchimpAuthenticationAPIKey(sourceMailchimpAuthenticationAPIKey SourceMailchimpAuthenticationAPIKey) SourceMailchimpAuthentication {
- typ := SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationAPIKey
+func CreateSourceMailchimpAuthenticationSourceMailchimpAPIKey(sourceMailchimpAPIKey SourceMailchimpAPIKey) SourceMailchimpAuthentication {
+ typ := SourceMailchimpAuthenticationTypeSourceMailchimpAPIKey
return SourceMailchimpAuthentication{
- SourceMailchimpAuthenticationAPIKey: &sourceMailchimpAuthenticationAPIKey,
- Type: typ,
+ SourceMailchimpAPIKey: &sourceMailchimpAPIKey,
+ Type: typ,
}
}
func (u *SourceMailchimpAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMailchimpAuthenticationAPIKey := new(SourceMailchimpAuthenticationAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMailchimpAuthenticationAPIKey); err == nil {
- u.SourceMailchimpAuthenticationAPIKey = sourceMailchimpAuthenticationAPIKey
- u.Type = SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationAPIKey
+
+ sourceMailchimpAPIKey := new(SourceMailchimpAPIKey)
+ if err := utils.UnmarshalJSON(data, &sourceMailchimpAPIKey, "", true, true); err == nil {
+ u.SourceMailchimpAPIKey = sourceMailchimpAPIKey
+ u.Type = SourceMailchimpAuthenticationTypeSourceMailchimpAPIKey
return nil
}
- sourceMailchimpAuthenticationOAuth20 := new(SourceMailchimpAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMailchimpAuthenticationOAuth20); err == nil {
- u.SourceMailchimpAuthenticationOAuth20 = sourceMailchimpAuthenticationOAuth20
- u.Type = SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationOAuth20
+ sourceMailchimpOAuth20 := new(SourceMailchimpOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceMailchimpOAuth20, "", true, true); err == nil {
+ u.SourceMailchimpOAuth20 = sourceMailchimpOAuth20
+ u.Type = SourceMailchimpAuthenticationTypeSourceMailchimpOAuth20
return nil
}
@@ -130,43 +183,72 @@ func (u *SourceMailchimpAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceMailchimpAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceMailchimpAuthenticationAPIKey != nil {
- return json.Marshal(u.SourceMailchimpAuthenticationAPIKey)
+ if u.SourceMailchimpOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceMailchimpOAuth20, "", true)
}
- if u.SourceMailchimpAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceMailchimpAuthenticationOAuth20)
+ if u.SourceMailchimpAPIKey != nil {
+ return utils.MarshalJSON(u.SourceMailchimpAPIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceMailchimpMailchimp string
+type Mailchimp string
const (
- SourceMailchimpMailchimpMailchimp SourceMailchimpMailchimp = "mailchimp"
+ MailchimpMailchimp Mailchimp = "mailchimp"
)
-func (e SourceMailchimpMailchimp) ToPointer() *SourceMailchimpMailchimp {
+func (e Mailchimp) ToPointer() *Mailchimp {
return &e
}
-func (e *SourceMailchimpMailchimp) UnmarshalJSON(data []byte) error {
+func (e *Mailchimp) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "mailchimp":
- *e = SourceMailchimpMailchimp(v)
+ *e = Mailchimp(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMailchimpMailchimp: %v", v)
+ return fmt.Errorf("invalid value for Mailchimp: %v", v)
}
}
type SourceMailchimp struct {
CampaignID *string `json:"campaign_id,omitempty"`
Credentials *SourceMailchimpAuthentication `json:"credentials,omitempty"`
- SourceType SourceMailchimpMailchimp `json:"sourceType"`
+ sourceType Mailchimp `const:"mailchimp" json:"sourceType"`
+}
+
+func (s SourceMailchimp) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMailchimp) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMailchimp) GetCampaignID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CampaignID
+}
+
+func (o *SourceMailchimp) GetCredentials() *SourceMailchimpAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceMailchimp) GetSourceType() Mailchimp {
+ return MailchimpMailchimp
}
diff --git a/internal/sdk/pkg/models/shared/sourcemailchimpcreaterequest.go b/internal/sdk/pkg/models/shared/sourcemailchimpcreaterequest.go
old mode 100755
new mode 100644
index 6e0a8dc45..2e5eb534d
--- a/internal/sdk/pkg/models/shared/sourcemailchimpcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemailchimpcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMailchimpCreateRequest struct {
Configuration SourceMailchimp `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMailchimpCreateRequest) GetConfiguration() SourceMailchimp {
+ if o == nil {
+ return SourceMailchimp{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMailchimpCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMailchimpCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMailchimpCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMailchimpCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailchimpputrequest.go b/internal/sdk/pkg/models/shared/sourcemailchimpputrequest.go
old mode 100755
new mode 100644
index df35b9542..4fd63b3f3
--- a/internal/sdk/pkg/models/shared/sourcemailchimpputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemailchimpputrequest.go
@@ -7,3 +7,24 @@ type SourceMailchimpPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMailchimpPutRequest) GetConfiguration() SourceMailchimpUpdate {
+ if o == nil {
+ return SourceMailchimpUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMailchimpPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMailchimpPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailchimpupdate.go b/internal/sdk/pkg/models/shared/sourcemailchimpupdate.go
old mode 100755
new mode 100644
index beaab7dda..94a5f9091
--- a/internal/sdk/pkg/models/shared/sourcemailchimpupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemailchimpupdate.go
@@ -3,126 +3,179 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMailchimpUpdateAuthenticationAPIKeyAuthType string
+type SourceMailchimpUpdateSchemasAuthType string
const (
- SourceMailchimpUpdateAuthenticationAPIKeyAuthTypeApikey SourceMailchimpUpdateAuthenticationAPIKeyAuthType = "apikey"
+ SourceMailchimpUpdateSchemasAuthTypeApikey SourceMailchimpUpdateSchemasAuthType = "apikey"
)
-func (e SourceMailchimpUpdateAuthenticationAPIKeyAuthType) ToPointer() *SourceMailchimpUpdateAuthenticationAPIKeyAuthType {
+func (e SourceMailchimpUpdateSchemasAuthType) ToPointer() *SourceMailchimpUpdateSchemasAuthType {
return &e
}
-func (e *SourceMailchimpUpdateAuthenticationAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMailchimpUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "apikey":
- *e = SourceMailchimpUpdateAuthenticationAPIKeyAuthType(v)
+ *e = SourceMailchimpUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMailchimpUpdateAuthenticationAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMailchimpUpdateSchemasAuthType: %v", v)
}
}
-type SourceMailchimpUpdateAuthenticationAPIKey struct {
+type APIKey struct {
// Mailchimp API Key. See the docs for information on how to generate this key.
- Apikey string `json:"apikey"`
- AuthType SourceMailchimpUpdateAuthenticationAPIKeyAuthType `json:"auth_type"`
+ Apikey string `json:"apikey"`
+ authType SourceMailchimpUpdateSchemasAuthType `const:"apikey" json:"auth_type"`
}
-type SourceMailchimpUpdateAuthenticationOAuth20AuthType string
+func (a APIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *APIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *APIKey) GetApikey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Apikey
+}
+
+func (o *APIKey) GetAuthType() SourceMailchimpUpdateSchemasAuthType {
+ return SourceMailchimpUpdateSchemasAuthTypeApikey
+}
+
+type SourceMailchimpUpdateAuthType string
const (
- SourceMailchimpUpdateAuthenticationOAuth20AuthTypeOauth20 SourceMailchimpUpdateAuthenticationOAuth20AuthType = "oauth2.0"
+ SourceMailchimpUpdateAuthTypeOauth20 SourceMailchimpUpdateAuthType = "oauth2.0"
)
-func (e SourceMailchimpUpdateAuthenticationOAuth20AuthType) ToPointer() *SourceMailchimpUpdateAuthenticationOAuth20AuthType {
+func (e SourceMailchimpUpdateAuthType) ToPointer() *SourceMailchimpUpdateAuthType {
return &e
}
-func (e *SourceMailchimpUpdateAuthenticationOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMailchimpUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceMailchimpUpdateAuthenticationOAuth20AuthType(v)
+ *e = SourceMailchimpUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMailchimpUpdateAuthenticationOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMailchimpUpdateAuthType: %v", v)
}
}
-type SourceMailchimpUpdateAuthenticationOAuth20 struct {
+type SourceMailchimpUpdateOAuth20 struct {
// An access token generated using the above client ID and secret.
- AccessToken string `json:"access_token"`
- AuthType SourceMailchimpUpdateAuthenticationOAuth20AuthType `json:"auth_type"`
+ AccessToken string `json:"access_token"`
+ authType SourceMailchimpUpdateAuthType `const:"oauth2.0" json:"auth_type"`
// The Client ID of your OAuth application.
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of your OAuth application.
ClientSecret *string `json:"client_secret,omitempty"`
}
+func (s SourceMailchimpUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMailchimpUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMailchimpUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceMailchimpUpdateOAuth20) GetAuthType() SourceMailchimpUpdateAuthType {
+ return SourceMailchimpUpdateAuthTypeOauth20
+}
+
+func (o *SourceMailchimpUpdateOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceMailchimpUpdateOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
type SourceMailchimpUpdateAuthenticationType string
const (
- SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationOAuth20 SourceMailchimpUpdateAuthenticationType = "source-mailchimp-update_Authentication_OAuth2.0"
- SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationAPIKey SourceMailchimpUpdateAuthenticationType = "source-mailchimp-update_Authentication_API Key"
+ SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateOAuth20 SourceMailchimpUpdateAuthenticationType = "source-mailchimp-update_OAuth2.0"
+ SourceMailchimpUpdateAuthenticationTypeAPIKey SourceMailchimpUpdateAuthenticationType = "API Key"
)
type SourceMailchimpUpdateAuthentication struct {
- SourceMailchimpUpdateAuthenticationOAuth20 *SourceMailchimpUpdateAuthenticationOAuth20
- SourceMailchimpUpdateAuthenticationAPIKey *SourceMailchimpUpdateAuthenticationAPIKey
+ SourceMailchimpUpdateOAuth20 *SourceMailchimpUpdateOAuth20
+ APIKey *APIKey
Type SourceMailchimpUpdateAuthenticationType
}
-func CreateSourceMailchimpUpdateAuthenticationSourceMailchimpUpdateAuthenticationOAuth20(sourceMailchimpUpdateAuthenticationOAuth20 SourceMailchimpUpdateAuthenticationOAuth20) SourceMailchimpUpdateAuthentication {
- typ := SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationOAuth20
+func CreateSourceMailchimpUpdateAuthenticationSourceMailchimpUpdateOAuth20(sourceMailchimpUpdateOAuth20 SourceMailchimpUpdateOAuth20) SourceMailchimpUpdateAuthentication {
+ typ := SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateOAuth20
return SourceMailchimpUpdateAuthentication{
- SourceMailchimpUpdateAuthenticationOAuth20: &sourceMailchimpUpdateAuthenticationOAuth20,
- Type: typ,
+ SourceMailchimpUpdateOAuth20: &sourceMailchimpUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceMailchimpUpdateAuthenticationSourceMailchimpUpdateAuthenticationAPIKey(sourceMailchimpUpdateAuthenticationAPIKey SourceMailchimpUpdateAuthenticationAPIKey) SourceMailchimpUpdateAuthentication {
- typ := SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationAPIKey
+func CreateSourceMailchimpUpdateAuthenticationAPIKey(apiKey APIKey) SourceMailchimpUpdateAuthentication {
+ typ := SourceMailchimpUpdateAuthenticationTypeAPIKey
return SourceMailchimpUpdateAuthentication{
- SourceMailchimpUpdateAuthenticationAPIKey: &sourceMailchimpUpdateAuthenticationAPIKey,
- Type: typ,
+ APIKey: &apiKey,
+ Type: typ,
}
}
func (u *SourceMailchimpUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMailchimpUpdateAuthenticationAPIKey := new(SourceMailchimpUpdateAuthenticationAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMailchimpUpdateAuthenticationAPIKey); err == nil {
- u.SourceMailchimpUpdateAuthenticationAPIKey = sourceMailchimpUpdateAuthenticationAPIKey
- u.Type = SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationAPIKey
+
+ apiKey := new(APIKey)
+ if err := utils.UnmarshalJSON(data, &apiKey, "", true, true); err == nil {
+ u.APIKey = apiKey
+ u.Type = SourceMailchimpUpdateAuthenticationTypeAPIKey
return nil
}
- sourceMailchimpUpdateAuthenticationOAuth20 := new(SourceMailchimpUpdateAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMailchimpUpdateAuthenticationOAuth20); err == nil {
- u.SourceMailchimpUpdateAuthenticationOAuth20 = sourceMailchimpUpdateAuthenticationOAuth20
- u.Type = SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationOAuth20
+ sourceMailchimpUpdateOAuth20 := new(SourceMailchimpUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceMailchimpUpdateOAuth20, "", true, true); err == nil {
+ u.SourceMailchimpUpdateOAuth20 = sourceMailchimpUpdateOAuth20
+ u.Type = SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateOAuth20
return nil
}
@@ -130,18 +183,32 @@ func (u *SourceMailchimpUpdateAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceMailchimpUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceMailchimpUpdateAuthenticationAPIKey != nil {
- return json.Marshal(u.SourceMailchimpUpdateAuthenticationAPIKey)
+ if u.SourceMailchimpUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceMailchimpUpdateOAuth20, "", true)
}
- if u.SourceMailchimpUpdateAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceMailchimpUpdateAuthenticationOAuth20)
+ if u.APIKey != nil {
+ return utils.MarshalJSON(u.APIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceMailchimpUpdate struct {
CampaignID *string `json:"campaign_id,omitempty"`
Credentials *SourceMailchimpUpdateAuthentication `json:"credentials,omitempty"`
}
+
+func (o *SourceMailchimpUpdate) GetCampaignID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.CampaignID
+}
+
+func (o *SourceMailchimpUpdate) GetCredentials() *SourceMailchimpUpdateAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailgun.go b/internal/sdk/pkg/models/shared/sourcemailgun.go
old mode 100755
new mode 100644
index a54eb2abd..e04500c55
--- a/internal/sdk/pkg/models/shared/sourcemailgun.go
+++ b/internal/sdk/pkg/models/shared/sourcemailgun.go
@@ -5,39 +5,76 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceMailgunMailgun string
+type Mailgun string
const (
- SourceMailgunMailgunMailgun SourceMailgunMailgun = "mailgun"
+ MailgunMailgun Mailgun = "mailgun"
)
-func (e SourceMailgunMailgun) ToPointer() *SourceMailgunMailgun {
+func (e Mailgun) ToPointer() *Mailgun {
return &e
}
-func (e *SourceMailgunMailgun) UnmarshalJSON(data []byte) error {
+func (e *Mailgun) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "mailgun":
- *e = SourceMailgunMailgun(v)
+ *e = Mailgun(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMailgunMailgun: %v", v)
+ return fmt.Errorf("invalid value for Mailgun: %v", v)
}
}
type SourceMailgun struct {
// Domain region code. 'EU' or 'US' are possible values. The default is 'US'.
- DomainRegion *string `json:"domain_region,omitempty"`
+ DomainRegion *string `default:"US" json:"domain_region"`
// Primary account API key to access your Mailgun data.
- PrivateKey string `json:"private_key"`
- SourceType SourceMailgunMailgun `json:"sourceType"`
+ PrivateKey string `json:"private_key"`
+ sourceType Mailgun `const:"mailgun" json:"sourceType"`
// UTC date and time in the format 2020-10-01 00:00:00. Any data before this date will not be replicated. If omitted, defaults to 3 days ago.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceMailgun) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMailgun) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMailgun) GetDomainRegion() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DomainRegion
+}
+
+func (o *SourceMailgun) GetPrivateKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PrivateKey
+}
+
+func (o *SourceMailgun) GetSourceType() Mailgun {
+ return MailgunMailgun
+}
+
+func (o *SourceMailgun) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailguncreaterequest.go b/internal/sdk/pkg/models/shared/sourcemailguncreaterequest.go
old mode 100755
new mode 100644
index 0481153cf..8cb39d940
--- a/internal/sdk/pkg/models/shared/sourcemailguncreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemailguncreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMailgunCreateRequest struct {
Configuration SourceMailgun `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMailgunCreateRequest) GetConfiguration() SourceMailgun {
+ if o == nil {
+ return SourceMailgun{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMailgunCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMailgunCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMailgunCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMailgunCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailgunputrequest.go b/internal/sdk/pkg/models/shared/sourcemailgunputrequest.go
old mode 100755
new mode 100644
index 12016dd59..386898287
--- a/internal/sdk/pkg/models/shared/sourcemailgunputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemailgunputrequest.go
@@ -7,3 +7,24 @@ type SourceMailgunPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMailgunPutRequest) GetConfiguration() SourceMailgunUpdate {
+ if o == nil {
+ return SourceMailgunUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMailgunPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMailgunPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailgunupdate.go b/internal/sdk/pkg/models/shared/sourcemailgunupdate.go
old mode 100755
new mode 100644
index 220f35bdc..c8c81724e
--- a/internal/sdk/pkg/models/shared/sourcemailgunupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemailgunupdate.go
@@ -3,14 +3,47 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
type SourceMailgunUpdate struct {
// Domain region code. 'EU' or 'US' are possible values. The default is 'US'.
- DomainRegion *string `json:"domain_region,omitempty"`
+ DomainRegion *string `default:"US" json:"domain_region"`
// Primary account API key to access your Mailgun data.
PrivateKey string `json:"private_key"`
// UTC date and time in the format 2020-10-01 00:00:00. Any data before this date will not be replicated. If omitted, defaults to 3 days ago.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceMailgunUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMailgunUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMailgunUpdate) GetDomainRegion() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DomainRegion
+}
+
+func (o *SourceMailgunUpdate) GetPrivateKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.PrivateKey
+}
+
+func (o *SourceMailgunUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailjetsms.go b/internal/sdk/pkg/models/shared/sourcemailjetsms.go
old mode 100755
new mode 100644
index a42f40cb4..05ab4758c
--- a/internal/sdk/pkg/models/shared/sourcemailjetsms.go
+++ b/internal/sdk/pkg/models/shared/sourcemailjetsms.go
@@ -5,38 +5,75 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMailjetSmsMailjetSms string
+type MailjetSms string
const (
- SourceMailjetSmsMailjetSmsMailjetSms SourceMailjetSmsMailjetSms = "mailjet-sms"
+ MailjetSmsMailjetSms MailjetSms = "mailjet-sms"
)
-func (e SourceMailjetSmsMailjetSms) ToPointer() *SourceMailjetSmsMailjetSms {
+func (e MailjetSms) ToPointer() *MailjetSms {
return &e
}
-func (e *SourceMailjetSmsMailjetSms) UnmarshalJSON(data []byte) error {
+func (e *MailjetSms) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "mailjet-sms":
- *e = SourceMailjetSmsMailjetSms(v)
+ *e = MailjetSms(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMailjetSmsMailjetSms: %v", v)
+ return fmt.Errorf("invalid value for MailjetSms: %v", v)
}
}
type SourceMailjetSms struct {
// Retrieve SMS messages created before the specified timestamp. Required format - Unix timestamp.
- EndDate *int64 `json:"end_date,omitempty"`
- SourceType SourceMailjetSmsMailjetSms `json:"sourceType"`
+ EndDate *int64 `json:"end_date,omitempty"`
+ sourceType MailjetSms `const:"mailjet-sms" json:"sourceType"`
// Retrieve SMS messages created after the specified timestamp. Required format - Unix timestamp.
StartDate *int64 `json:"start_date,omitempty"`
// Your access token. See here.
Token string `json:"token"`
}
+
+func (s SourceMailjetSms) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMailjetSms) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMailjetSms) GetEndDate() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceMailjetSms) GetSourceType() MailjetSms {
+ return MailjetSmsMailjetSms
+}
+
+func (o *SourceMailjetSms) GetStartDate() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceMailjetSms) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailjetsmscreaterequest.go b/internal/sdk/pkg/models/shared/sourcemailjetsmscreaterequest.go
old mode 100755
new mode 100644
index b9c87b4e3..2c1cecfae
--- a/internal/sdk/pkg/models/shared/sourcemailjetsmscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemailjetsmscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMailjetSmsCreateRequest struct {
Configuration SourceMailjetSms `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMailjetSmsCreateRequest) GetConfiguration() SourceMailjetSms {
+ if o == nil {
+ return SourceMailjetSms{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMailjetSmsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMailjetSmsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMailjetSmsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMailjetSmsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailjetsmsputrequest.go b/internal/sdk/pkg/models/shared/sourcemailjetsmsputrequest.go
old mode 100755
new mode 100644
index a6957e590..7c4b4c5e0
--- a/internal/sdk/pkg/models/shared/sourcemailjetsmsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemailjetsmsputrequest.go
@@ -7,3 +7,24 @@ type SourceMailjetSmsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMailjetSmsPutRequest) GetConfiguration() SourceMailjetSmsUpdate {
+ if o == nil {
+ return SourceMailjetSmsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMailjetSmsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMailjetSmsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemailjetsmsupdate.go b/internal/sdk/pkg/models/shared/sourcemailjetsmsupdate.go
old mode 100755
new mode 100644
index 8b3c0a2e5..ba935d210
--- a/internal/sdk/pkg/models/shared/sourcemailjetsmsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemailjetsmsupdate.go
@@ -10,3 +10,24 @@ type SourceMailjetSmsUpdate struct {
// Your access token. See here.
Token string `json:"token"`
}
+
+func (o *SourceMailjetSmsUpdate) GetEndDate() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceMailjetSmsUpdate) GetStartDate() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceMailjetSmsUpdate) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemarketo.go b/internal/sdk/pkg/models/shared/sourcemarketo.go
old mode 100755
new mode 100644
index 5cd15e264..95e7de1cd
--- a/internal/sdk/pkg/models/shared/sourcemarketo.go
+++ b/internal/sdk/pkg/models/shared/sourcemarketo.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceMarketoMarketo string
+type Marketo string
const (
- SourceMarketoMarketoMarketo SourceMarketoMarketo = "marketo"
+ MarketoMarketo Marketo = "marketo"
)
-func (e SourceMarketoMarketo) ToPointer() *SourceMarketoMarketo {
+func (e Marketo) ToPointer() *Marketo {
return &e
}
-func (e *SourceMarketoMarketo) UnmarshalJSON(data []byte) error {
+func (e *Marketo) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "marketo":
- *e = SourceMarketoMarketo(v)
+ *e = Marketo(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMarketoMarketo: %v", v)
+ return fmt.Errorf("invalid value for Marketo: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceMarketo struct {
// The Client Secret of your Marketo developer application. See the docs for info on how to obtain this.
ClientSecret string `json:"client_secret"`
// Your Marketo Base URL. See the docs for info on how to obtain this.
- DomainURL string `json:"domain_url"`
- SourceType SourceMarketoMarketo `json:"sourceType"`
+ DomainURL string `json:"domain_url"`
+ sourceType Marketo `const:"marketo" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceMarketo) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMarketo) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMarketo) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceMarketo) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceMarketo) GetDomainURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomainURL
+}
+
+func (o *SourceMarketo) GetSourceType() Marketo {
+ return MarketoMarketo
+}
+
+func (o *SourceMarketo) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemarketocreaterequest.go b/internal/sdk/pkg/models/shared/sourcemarketocreaterequest.go
old mode 100755
new mode 100644
index df46f074b..cb6fbf37d
--- a/internal/sdk/pkg/models/shared/sourcemarketocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemarketocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMarketoCreateRequest struct {
Configuration SourceMarketo `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMarketoCreateRequest) GetConfiguration() SourceMarketo {
+ if o == nil {
+ return SourceMarketo{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMarketoCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMarketoCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMarketoCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMarketoCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemarketoputrequest.go b/internal/sdk/pkg/models/shared/sourcemarketoputrequest.go
old mode 100755
new mode 100644
index eb8250dfa..25799c60c
--- a/internal/sdk/pkg/models/shared/sourcemarketoputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemarketoputrequest.go
@@ -7,3 +7,24 @@ type SourceMarketoPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMarketoPutRequest) GetConfiguration() SourceMarketoUpdate {
+ if o == nil {
+ return SourceMarketoUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMarketoPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMarketoPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemarketoupdate.go b/internal/sdk/pkg/models/shared/sourcemarketoupdate.go
old mode 100755
new mode 100644
index 8d1bb6d47..50c970cad
--- a/internal/sdk/pkg/models/shared/sourcemarketoupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemarketoupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -16,3 +17,42 @@ type SourceMarketoUpdate struct {
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceMarketoUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMarketoUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMarketoUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceMarketoUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceMarketoUpdate) GetDomainURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomainURL
+}
+
+func (o *SourceMarketoUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemetabase.go b/internal/sdk/pkg/models/shared/sourcemetabase.go
old mode 100755
new mode 100644
index 203448afe..20b52b187
--- a/internal/sdk/pkg/models/shared/sourcemetabase.go
+++ b/internal/sdk/pkg/models/shared/sourcemetabase.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMetabaseMetabase string
+type Metabase string
const (
- SourceMetabaseMetabaseMetabase SourceMetabaseMetabase = "metabase"
+ MetabaseMetabase Metabase = "metabase"
)
-func (e SourceMetabaseMetabase) ToPointer() *SourceMetabaseMetabase {
+func (e Metabase) ToPointer() *Metabase {
return &e
}
-func (e *SourceMetabaseMetabase) UnmarshalJSON(data []byte) error {
+func (e *Metabase) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "metabase":
- *e = SourceMetabaseMetabase(v)
+ *e = Metabase(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMetabaseMetabase: %v", v)
+ return fmt.Errorf("invalid value for Metabase: %v", v)
}
}
@@ -41,7 +42,50 @@ type SourceMetabase struct {
// http://localhost:3000/api/session
// ``` Then copy the value of the `id` field returned by a successful call to that API.
// Note that by default, sessions are good for 14 days and needs to be regenerated.
- SessionToken *string `json:"session_token,omitempty"`
- SourceType SourceMetabaseMetabase `json:"sourceType"`
- Username *string `json:"username,omitempty"`
+ SessionToken *string `json:"session_token,omitempty"`
+ sourceType Metabase `const:"metabase" json:"sourceType"`
+ Username *string `json:"username,omitempty"`
+}
+
+func (s SourceMetabase) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMetabase) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMetabase) GetInstanceAPIURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.InstanceAPIURL
+}
+
+func (o *SourceMetabase) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceMetabase) GetSessionToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SessionToken
+}
+
+func (o *SourceMetabase) GetSourceType() Metabase {
+ return MetabaseMetabase
+}
+
+func (o *SourceMetabase) GetUsername() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Username
}
diff --git a/internal/sdk/pkg/models/shared/sourcemetabasecreaterequest.go b/internal/sdk/pkg/models/shared/sourcemetabasecreaterequest.go
old mode 100755
new mode 100644
index cda3432b7..6d6db5cfc
--- a/internal/sdk/pkg/models/shared/sourcemetabasecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemetabasecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMetabaseCreateRequest struct {
Configuration SourceMetabase `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMetabaseCreateRequest) GetConfiguration() SourceMetabase {
+ if o == nil {
+ return SourceMetabase{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMetabaseCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMetabaseCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMetabaseCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMetabaseCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemetabaseputrequest.go b/internal/sdk/pkg/models/shared/sourcemetabaseputrequest.go
old mode 100755
new mode 100644
index f6e9732a6..238a900c9
--- a/internal/sdk/pkg/models/shared/sourcemetabaseputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemetabaseputrequest.go
@@ -7,3 +7,24 @@ type SourceMetabasePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMetabasePutRequest) GetConfiguration() SourceMetabaseUpdate {
+ if o == nil {
+ return SourceMetabaseUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMetabasePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMetabasePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemetabaseupdate.go b/internal/sdk/pkg/models/shared/sourcemetabaseupdate.go
old mode 100755
new mode 100644
index a257e2a28..596366d81
--- a/internal/sdk/pkg/models/shared/sourcemetabaseupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemetabaseupdate.go
@@ -15,3 +15,31 @@ type SourceMetabaseUpdate struct {
SessionToken *string `json:"session_token,omitempty"`
Username *string `json:"username,omitempty"`
}
+
+func (o *SourceMetabaseUpdate) GetInstanceAPIURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.InstanceAPIURL
+}
+
+func (o *SourceMetabaseUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceMetabaseUpdate) GetSessionToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SessionToken
+}
+
+func (o *SourceMetabaseUpdate) GetUsername() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemicrosoftteams.go b/internal/sdk/pkg/models/shared/sourcemicrosoftteams.go
old mode 100755
new mode 100644
index 86ce9798c..e23f9be07
--- a/internal/sdk/pkg/models/shared/sourcemicrosoftteams.go
+++ b/internal/sdk/pkg/models/shared/sourcemicrosoftteams.go
@@ -3,39 +3,39 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType string
+type SourceMicrosoftTeamsSchemasAuthType string
const (
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthTypeToken SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType = "Token"
+ SourceMicrosoftTeamsSchemasAuthTypeToken SourceMicrosoftTeamsSchemasAuthType = "Token"
)
-func (e SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType) ToPointer() *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType {
+func (e SourceMicrosoftTeamsSchemasAuthType) ToPointer() *SourceMicrosoftTeamsSchemasAuthType {
return &e
}
-func (e *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMicrosoftTeamsSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Token":
- *e = SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType(v)
+ *e = SourceMicrosoftTeamsSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMicrosoftTeamsSchemasAuthType: %v", v)
}
}
-// SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft - Choose how to authenticate to Microsoft
-type SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft struct {
- AuthType *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftAuthType `json:"auth_type,omitempty"`
+// SourceMicrosoftTeamsAuthenticateViaMicrosoft - Choose how to authenticate to Microsoft
+type SourceMicrosoftTeamsAuthenticateViaMicrosoft struct {
+ authType *SourceMicrosoftTeamsSchemasAuthType `const:"Token" json:"auth_type"`
// The Client ID of your Microsoft Teams developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Microsoft Teams developer application.
@@ -44,33 +44,69 @@ type SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft struct
TenantID string `json:"tenant_id"`
}
-type SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType string
+func (s SourceMicrosoftTeamsAuthenticateViaMicrosoft) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMicrosoftTeamsAuthenticateViaMicrosoft) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMicrosoftTeamsAuthenticateViaMicrosoft) GetAuthType() *SourceMicrosoftTeamsSchemasAuthType {
+ return SourceMicrosoftTeamsSchemasAuthTypeToken.ToPointer()
+}
+
+func (o *SourceMicrosoftTeamsAuthenticateViaMicrosoft) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceMicrosoftTeamsAuthenticateViaMicrosoft) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceMicrosoftTeamsAuthenticateViaMicrosoft) GetTenantID() string {
+ if o == nil {
+ return ""
+ }
+ return o.TenantID
+}
+
+type SourceMicrosoftTeamsAuthType string
const (
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthTypeClient SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType = "Client"
+ SourceMicrosoftTeamsAuthTypeClient SourceMicrosoftTeamsAuthType = "Client"
)
-func (e SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType) ToPointer() *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType {
+func (e SourceMicrosoftTeamsAuthType) ToPointer() *SourceMicrosoftTeamsAuthType {
return &e
}
-func (e *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMicrosoftTeamsAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType(v)
+ *e = SourceMicrosoftTeamsAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMicrosoftTeamsAuthType: %v", v)
}
}
-// SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 - Choose how to authenticate to Microsoft
-type SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 struct {
- AuthType *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType `json:"auth_type,omitempty"`
+// SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 - Choose how to authenticate to Microsoft
+type SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 struct {
+ authType *SourceMicrosoftTeamsAuthType `const:"Client" json:"auth_type"`
// The Client ID of your Microsoft Teams developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Microsoft Teams developer application.
@@ -81,56 +117,94 @@ type SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
TenantID string `json:"tenant_id"`
}
+func (s SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20) GetAuthType() *SourceMicrosoftTeamsAuthType {
+ return SourceMicrosoftTeamsAuthTypeClient.ToPointer()
+}
+
+func (o *SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20) GetTenantID() string {
+ if o == nil {
+ return ""
+ }
+ return o.TenantID
+}
+
type SourceMicrosoftTeamsAuthenticationMechanismType string
const (
- SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 SourceMicrosoftTeamsAuthenticationMechanismType = "source-microsoft-teams_Authentication mechanism_Authenticate via Microsoft (OAuth 2.0)"
- SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft SourceMicrosoftTeamsAuthenticationMechanismType = "source-microsoft-teams_Authentication mechanism_Authenticate via Microsoft"
+ SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 SourceMicrosoftTeamsAuthenticationMechanismType = "source-microsoft-teams_Authenticate via Microsoft (OAuth 2.0)"
+ SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticateViaMicrosoft SourceMicrosoftTeamsAuthenticationMechanismType = "source-microsoft-teams_Authenticate via Microsoft"
)
type SourceMicrosoftTeamsAuthenticationMechanism struct {
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft *SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft
+ SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 *SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20
+ SourceMicrosoftTeamsAuthenticateViaMicrosoft *SourceMicrosoftTeamsAuthenticateViaMicrosoft
Type SourceMicrosoftTeamsAuthenticationMechanismType
}
-func CreateSourceMicrosoftTeamsAuthenticationMechanismSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20(sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) SourceMicrosoftTeamsAuthenticationMechanism {
- typ := SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
+func CreateSourceMicrosoftTeamsAuthenticationMechanismSourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20(sourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20) SourceMicrosoftTeamsAuthenticationMechanism {
+ typ := SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20
return SourceMicrosoftTeamsAuthenticationMechanism{
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20: &sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20,
+ SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20: &sourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20,
Type: typ,
}
}
-func CreateSourceMicrosoftTeamsAuthenticationMechanismSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft(sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft) SourceMicrosoftTeamsAuthenticationMechanism {
- typ := SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft
+func CreateSourceMicrosoftTeamsAuthenticationMechanismSourceMicrosoftTeamsAuthenticateViaMicrosoft(sourceMicrosoftTeamsAuthenticateViaMicrosoft SourceMicrosoftTeamsAuthenticateViaMicrosoft) SourceMicrosoftTeamsAuthenticationMechanism {
+ typ := SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticateViaMicrosoft
return SourceMicrosoftTeamsAuthenticationMechanism{
- SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft: &sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft,
+ SourceMicrosoftTeamsAuthenticateViaMicrosoft: &sourceMicrosoftTeamsAuthenticateViaMicrosoft,
Type: typ,
}
}
func (u *SourceMicrosoftTeamsAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft := new(SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft); err == nil {
- u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft = sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft
- u.Type = SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft
+
+ sourceMicrosoftTeamsAuthenticateViaMicrosoft := new(SourceMicrosoftTeamsAuthenticateViaMicrosoft)
+ if err := utils.UnmarshalJSON(data, &sourceMicrosoftTeamsAuthenticateViaMicrosoft, "", true, true); err == nil {
+ u.SourceMicrosoftTeamsAuthenticateViaMicrosoft = sourceMicrosoftTeamsAuthenticateViaMicrosoft
+ u.Type = SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticateViaMicrosoft
return nil
}
- sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 := new(SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20); err == nil {
- u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 = sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
- u.Type = SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
+ sourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 := new(SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20, "", true, true); err == nil {
+ u.SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 = sourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20
+ u.Type = SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20
return nil
}
@@ -138,38 +212,38 @@ func (u *SourceMicrosoftTeamsAuthenticationMechanism) UnmarshalJSON(data []byte)
}
func (u SourceMicrosoftTeamsAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft != nil {
- return json.Marshal(u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft)
+ if u.SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceMicrosoftTeamsAuthenticateViaMicrosoftOAuth20, "", true)
}
- if u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil {
- return json.Marshal(u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20)
+ if u.SourceMicrosoftTeamsAuthenticateViaMicrosoft != nil {
+ return utils.MarshalJSON(u.SourceMicrosoftTeamsAuthenticateViaMicrosoft, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceMicrosoftTeamsMicrosoftTeams string
+type MicrosoftTeams string
const (
- SourceMicrosoftTeamsMicrosoftTeamsMicrosoftTeams SourceMicrosoftTeamsMicrosoftTeams = "microsoft-teams"
+ MicrosoftTeamsMicrosoftTeams MicrosoftTeams = "microsoft-teams"
)
-func (e SourceMicrosoftTeamsMicrosoftTeams) ToPointer() *SourceMicrosoftTeamsMicrosoftTeams {
+func (e MicrosoftTeams) ToPointer() *MicrosoftTeams {
return &e
}
-func (e *SourceMicrosoftTeamsMicrosoftTeams) UnmarshalJSON(data []byte) error {
+func (e *MicrosoftTeams) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "microsoft-teams":
- *e = SourceMicrosoftTeamsMicrosoftTeams(v)
+ *e = MicrosoftTeams(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMicrosoftTeamsMicrosoftTeams: %v", v)
+ return fmt.Errorf("invalid value for MicrosoftTeams: %v", v)
}
}
@@ -177,6 +251,35 @@ type SourceMicrosoftTeams struct {
// Choose how to authenticate to Microsoft
Credentials *SourceMicrosoftTeamsAuthenticationMechanism `json:"credentials,omitempty"`
// Specifies the length of time over which the Team Device Report stream is aggregated. The supported values are: D7, D30, D90, and D180.
- Period string `json:"period"`
- SourceType SourceMicrosoftTeamsMicrosoftTeams `json:"sourceType"`
+ Period string `json:"period"`
+ sourceType MicrosoftTeams `const:"microsoft-teams" json:"sourceType"`
+}
+
+func (s SourceMicrosoftTeams) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMicrosoftTeams) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMicrosoftTeams) GetCredentials() *SourceMicrosoftTeamsAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceMicrosoftTeams) GetPeriod() string {
+ if o == nil {
+ return ""
+ }
+ return o.Period
+}
+
+func (o *SourceMicrosoftTeams) GetSourceType() MicrosoftTeams {
+ return MicrosoftTeamsMicrosoftTeams
}
diff --git a/internal/sdk/pkg/models/shared/sourcemicrosoftteamscreaterequest.go b/internal/sdk/pkg/models/shared/sourcemicrosoftteamscreaterequest.go
old mode 100755
new mode 100644
index 87f7830bc..a237975dd
--- a/internal/sdk/pkg/models/shared/sourcemicrosoftteamscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemicrosoftteamscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMicrosoftTeamsCreateRequest struct {
Configuration SourceMicrosoftTeams `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMicrosoftTeamsCreateRequest) GetConfiguration() SourceMicrosoftTeams {
+ if o == nil {
+ return SourceMicrosoftTeams{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMicrosoftTeamsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMicrosoftTeamsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMicrosoftTeamsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMicrosoftTeamsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemicrosoftteamsputrequest.go b/internal/sdk/pkg/models/shared/sourcemicrosoftteamsputrequest.go
old mode 100755
new mode 100644
index 6a44cac9f..e6e527ad9
--- a/internal/sdk/pkg/models/shared/sourcemicrosoftteamsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemicrosoftteamsputrequest.go
@@ -7,3 +7,24 @@ type SourceMicrosoftTeamsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMicrosoftTeamsPutRequest) GetConfiguration() SourceMicrosoftTeamsUpdate {
+ if o == nil {
+ return SourceMicrosoftTeamsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMicrosoftTeamsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMicrosoftTeamsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemicrosoftteamsupdate.go b/internal/sdk/pkg/models/shared/sourcemicrosoftteamsupdate.go
old mode 100755
new mode 100644
index 510b700cd..8e54e4497
--- a/internal/sdk/pkg/models/shared/sourcemicrosoftteamsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemicrosoftteamsupdate.go
@@ -3,39 +3,39 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType string
+type SourceMicrosoftTeamsUpdateSchemasAuthType string
const (
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthTypeToken SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType = "Token"
+ SourceMicrosoftTeamsUpdateSchemasAuthTypeToken SourceMicrosoftTeamsUpdateSchemasAuthType = "Token"
)
-func (e SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType) ToPointer() *SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType {
+func (e SourceMicrosoftTeamsUpdateSchemasAuthType) ToPointer() *SourceMicrosoftTeamsUpdateSchemasAuthType {
return &e
}
-func (e *SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMicrosoftTeamsUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Token":
- *e = SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType(v)
+ *e = SourceMicrosoftTeamsUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMicrosoftTeamsUpdateSchemasAuthType: %v", v)
}
}
-// SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft - Choose how to authenticate to Microsoft
-type SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft struct {
- AuthType *SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftAuthType `json:"auth_type,omitempty"`
+// AuthenticateViaMicrosoft - Choose how to authenticate to Microsoft
+type AuthenticateViaMicrosoft struct {
+ authType *SourceMicrosoftTeamsUpdateSchemasAuthType `const:"Token" json:"auth_type"`
// The Client ID of your Microsoft Teams developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Microsoft Teams developer application.
@@ -44,33 +44,69 @@ type SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft s
TenantID string `json:"tenant_id"`
}
-type SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType string
+func (a AuthenticateViaMicrosoft) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AuthenticateViaMicrosoft) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AuthenticateViaMicrosoft) GetAuthType() *SourceMicrosoftTeamsUpdateSchemasAuthType {
+ return SourceMicrosoftTeamsUpdateSchemasAuthTypeToken.ToPointer()
+}
+
+func (o *AuthenticateViaMicrosoft) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *AuthenticateViaMicrosoft) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *AuthenticateViaMicrosoft) GetTenantID() string {
+ if o == nil {
+ return ""
+ }
+ return o.TenantID
+}
+
+type SourceMicrosoftTeamsUpdateAuthType string
const (
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthTypeClient SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType = "Client"
+ SourceMicrosoftTeamsUpdateAuthTypeClient SourceMicrosoftTeamsUpdateAuthType = "Client"
)
-func (e SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType) ToPointer() *SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType {
+func (e SourceMicrosoftTeamsUpdateAuthType) ToPointer() *SourceMicrosoftTeamsUpdateAuthType {
return &e
}
-func (e *SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMicrosoftTeamsUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType(v)
+ *e = SourceMicrosoftTeamsUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMicrosoftTeamsUpdateAuthType: %v", v)
}
}
-// SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 - Choose how to authenticate to Microsoft
-type SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 struct {
- AuthType *SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20AuthType `json:"auth_type,omitempty"`
+// AuthenticateViaMicrosoftOAuth20 - Choose how to authenticate to Microsoft
+type AuthenticateViaMicrosoftOAuth20 struct {
+ authType *SourceMicrosoftTeamsUpdateAuthType `const:"Client" json:"auth_type"`
// The Client ID of your Microsoft Teams developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Microsoft Teams developer application.
@@ -81,56 +117,94 @@ type SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOA
TenantID string `json:"tenant_id"`
}
+func (a AuthenticateViaMicrosoftOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AuthenticateViaMicrosoftOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AuthenticateViaMicrosoftOAuth20) GetAuthType() *SourceMicrosoftTeamsUpdateAuthType {
+ return SourceMicrosoftTeamsUpdateAuthTypeClient.ToPointer()
+}
+
+func (o *AuthenticateViaMicrosoftOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *AuthenticateViaMicrosoftOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *AuthenticateViaMicrosoftOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *AuthenticateViaMicrosoftOAuth20) GetTenantID() string {
+ if o == nil {
+ return ""
+ }
+ return o.TenantID
+}
+
type SourceMicrosoftTeamsUpdateAuthenticationMechanismType string
const (
- SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 SourceMicrosoftTeamsUpdateAuthenticationMechanismType = "source-microsoft-teams-update_Authentication mechanism_Authenticate via Microsoft (OAuth 2.0)"
- SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft SourceMicrosoftTeamsUpdateAuthenticationMechanismType = "source-microsoft-teams-update_Authentication mechanism_Authenticate via Microsoft"
+ SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeAuthenticateViaMicrosoftOAuth20 SourceMicrosoftTeamsUpdateAuthenticationMechanismType = "Authenticate via Microsoft (OAuth 2.0)"
+ SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeAuthenticateViaMicrosoft SourceMicrosoftTeamsUpdateAuthenticationMechanismType = "Authenticate via Microsoft"
)
type SourceMicrosoftTeamsUpdateAuthenticationMechanism struct {
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 *SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft *SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft
+ AuthenticateViaMicrosoftOAuth20 *AuthenticateViaMicrosoftOAuth20
+ AuthenticateViaMicrosoft *AuthenticateViaMicrosoft
Type SourceMicrosoftTeamsUpdateAuthenticationMechanismType
}
-func CreateSourceMicrosoftTeamsUpdateAuthenticationMechanismSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20(sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) SourceMicrosoftTeamsUpdateAuthenticationMechanism {
- typ := SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
+func CreateSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20(authenticateViaMicrosoftOAuth20 AuthenticateViaMicrosoftOAuth20) SourceMicrosoftTeamsUpdateAuthenticationMechanism {
+ typ := SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeAuthenticateViaMicrosoftOAuth20
return SourceMicrosoftTeamsUpdateAuthenticationMechanism{
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20: &sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20,
- Type: typ,
+ AuthenticateViaMicrosoftOAuth20: &authenticateViaMicrosoftOAuth20,
+ Type: typ,
}
}
-func CreateSourceMicrosoftTeamsUpdateAuthenticationMechanismSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft(sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft) SourceMicrosoftTeamsUpdateAuthenticationMechanism {
- typ := SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft
+func CreateSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft(authenticateViaMicrosoft AuthenticateViaMicrosoft) SourceMicrosoftTeamsUpdateAuthenticationMechanism {
+ typ := SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeAuthenticateViaMicrosoft
return SourceMicrosoftTeamsUpdateAuthenticationMechanism{
- SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft: &sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft,
- Type: typ,
+ AuthenticateViaMicrosoft: &authenticateViaMicrosoft,
+ Type: typ,
}
}
func (u *SourceMicrosoftTeamsUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft := new(SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft); err == nil {
- u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft = sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft
- u.Type = SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft
+
+ authenticateViaMicrosoft := new(AuthenticateViaMicrosoft)
+ if err := utils.UnmarshalJSON(data, &authenticateViaMicrosoft, "", true, true); err == nil {
+ u.AuthenticateViaMicrosoft = authenticateViaMicrosoft
+ u.Type = SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeAuthenticateViaMicrosoft
return nil
}
- sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 := new(SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20); err == nil {
- u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 = sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
- u.Type = SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20
+ authenticateViaMicrosoftOAuth20 := new(AuthenticateViaMicrosoftOAuth20)
+ if err := utils.UnmarshalJSON(data, &authenticateViaMicrosoftOAuth20, "", true, true); err == nil {
+ u.AuthenticateViaMicrosoftOAuth20 = authenticateViaMicrosoftOAuth20
+ u.Type = SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeAuthenticateViaMicrosoftOAuth20
return nil
}
@@ -138,15 +212,15 @@ func (u *SourceMicrosoftTeamsUpdateAuthenticationMechanism) UnmarshalJSON(data [
}
func (u SourceMicrosoftTeamsUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft != nil {
- return json.Marshal(u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft)
+ if u.AuthenticateViaMicrosoftOAuth20 != nil {
+ return utils.MarshalJSON(u.AuthenticateViaMicrosoftOAuth20, "", true)
}
- if u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil {
- return json.Marshal(u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20)
+ if u.AuthenticateViaMicrosoft != nil {
+ return utils.MarshalJSON(u.AuthenticateViaMicrosoft, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceMicrosoftTeamsUpdate struct {
@@ -155,3 +229,17 @@ type SourceMicrosoftTeamsUpdate struct {
// Specifies the length of time over which the Team Device Report stream is aggregated. The supported values are: D7, D30, D90, and D180.
Period string `json:"period"`
}
+
+func (o *SourceMicrosoftTeamsUpdate) GetCredentials() *SourceMicrosoftTeamsUpdateAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceMicrosoftTeamsUpdate) GetPeriod() string {
+ if o == nil {
+ return ""
+ }
+ return o.Period
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemixpanel.go b/internal/sdk/pkg/models/shared/sourcemixpanel.go
old mode 100755
new mode 100644
index 6b993de7a..b5cf510b3
--- a/internal/sdk/pkg/models/shared/sourcemixpanel.go
+++ b/internal/sdk/pkg/models/shared/sourcemixpanel.go
@@ -3,127 +3,182 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle string
+type SourceMixpanelSchemasOptionTitle string
const (
- SourceMixpanelAuthenticationWildcardProjectSecretOptionTitleProjectSecret SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle = "Project Secret"
+ SourceMixpanelSchemasOptionTitleProjectSecret SourceMixpanelSchemasOptionTitle = "Project Secret"
)
-func (e SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle) ToPointer() *SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle {
+func (e SourceMixpanelSchemasOptionTitle) ToPointer() *SourceMixpanelSchemasOptionTitle {
return &e
}
-func (e *SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceMixpanelSchemasOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Project Secret":
- *e = SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle(v)
+ *e = SourceMixpanelSchemasOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceMixpanelSchemasOptionTitle: %v", v)
}
}
-// SourceMixpanelAuthenticationWildcardProjectSecret - Choose how to authenticate to Mixpanel
-type SourceMixpanelAuthenticationWildcardProjectSecret struct {
+// SourceMixpanelProjectSecret - Choose how to authenticate to Mixpanel
+type SourceMixpanelProjectSecret struct {
// Mixpanel project secret. See the docs for more information on how to obtain this.
- APISecret string `json:"api_secret"`
- OptionTitle *SourceMixpanelAuthenticationWildcardProjectSecretOptionTitle `json:"option_title,omitempty"`
+ APISecret string `json:"api_secret"`
+ optionTitle *SourceMixpanelSchemasOptionTitle `const:"Project Secret" json:"option_title,omitempty"`
}
-type SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle string
+func (s SourceMixpanelProjectSecret) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMixpanelProjectSecret) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMixpanelProjectSecret) GetAPISecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.APISecret
+}
+
+func (o *SourceMixpanelProjectSecret) GetOptionTitle() *SourceMixpanelSchemasOptionTitle {
+ return SourceMixpanelSchemasOptionTitleProjectSecret.ToPointer()
+}
+
+type SourceMixpanelOptionTitle string
const (
- SourceMixpanelAuthenticationWildcardServiceAccountOptionTitleServiceAccount SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle = "Service Account"
+ SourceMixpanelOptionTitleServiceAccount SourceMixpanelOptionTitle = "Service Account"
)
-func (e SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle) ToPointer() *SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle {
+func (e SourceMixpanelOptionTitle) ToPointer() *SourceMixpanelOptionTitle {
return &e
}
-func (e *SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceMixpanelOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service Account":
- *e = SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle(v)
+ *e = SourceMixpanelOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceMixpanelOptionTitle: %v", v)
}
}
-// SourceMixpanelAuthenticationWildcardServiceAccount - Choose how to authenticate to Mixpanel
-type SourceMixpanelAuthenticationWildcardServiceAccount struct {
- OptionTitle *SourceMixpanelAuthenticationWildcardServiceAccountOptionTitle `json:"option_title,omitempty"`
+// SourceMixpanelServiceAccount - Choose how to authenticate to Mixpanel
+type SourceMixpanelServiceAccount struct {
+ optionTitle *SourceMixpanelOptionTitle `const:"Service Account" json:"option_title,omitempty"`
+ // Your project ID number. See the docs for more information on how to obtain this.
+ ProjectID int64 `json:"project_id"`
// Mixpanel Service Account Secret. See the docs for more information on how to obtain this.
Secret string `json:"secret"`
// Mixpanel Service Account Username. See the docs for more information on how to obtain this.
Username string `json:"username"`
}
+func (s SourceMixpanelServiceAccount) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMixpanelServiceAccount) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMixpanelServiceAccount) GetOptionTitle() *SourceMixpanelOptionTitle {
+ return SourceMixpanelOptionTitleServiceAccount.ToPointer()
+}
+
+func (o *SourceMixpanelServiceAccount) GetProjectID() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ProjectID
+}
+
+func (o *SourceMixpanelServiceAccount) GetSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.Secret
+}
+
+func (o *SourceMixpanelServiceAccount) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
type SourceMixpanelAuthenticationWildcardType string
const (
- SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardServiceAccount SourceMixpanelAuthenticationWildcardType = "source-mixpanel_Authentication *_Service Account"
- SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardProjectSecret SourceMixpanelAuthenticationWildcardType = "source-mixpanel_Authentication *_Project Secret"
+ SourceMixpanelAuthenticationWildcardTypeSourceMixpanelServiceAccount SourceMixpanelAuthenticationWildcardType = "source-mixpanel_Service Account"
+ SourceMixpanelAuthenticationWildcardTypeSourceMixpanelProjectSecret SourceMixpanelAuthenticationWildcardType = "source-mixpanel_Project Secret"
)
type SourceMixpanelAuthenticationWildcard struct {
- SourceMixpanelAuthenticationWildcardServiceAccount *SourceMixpanelAuthenticationWildcardServiceAccount
- SourceMixpanelAuthenticationWildcardProjectSecret *SourceMixpanelAuthenticationWildcardProjectSecret
+ SourceMixpanelServiceAccount *SourceMixpanelServiceAccount
+ SourceMixpanelProjectSecret *SourceMixpanelProjectSecret
Type SourceMixpanelAuthenticationWildcardType
}
-func CreateSourceMixpanelAuthenticationWildcardSourceMixpanelAuthenticationWildcardServiceAccount(sourceMixpanelAuthenticationWildcardServiceAccount SourceMixpanelAuthenticationWildcardServiceAccount) SourceMixpanelAuthenticationWildcard {
- typ := SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardServiceAccount
+func CreateSourceMixpanelAuthenticationWildcardSourceMixpanelServiceAccount(sourceMixpanelServiceAccount SourceMixpanelServiceAccount) SourceMixpanelAuthenticationWildcard {
+ typ := SourceMixpanelAuthenticationWildcardTypeSourceMixpanelServiceAccount
return SourceMixpanelAuthenticationWildcard{
- SourceMixpanelAuthenticationWildcardServiceAccount: &sourceMixpanelAuthenticationWildcardServiceAccount,
- Type: typ,
+ SourceMixpanelServiceAccount: &sourceMixpanelServiceAccount,
+ Type: typ,
}
}
-func CreateSourceMixpanelAuthenticationWildcardSourceMixpanelAuthenticationWildcardProjectSecret(sourceMixpanelAuthenticationWildcardProjectSecret SourceMixpanelAuthenticationWildcardProjectSecret) SourceMixpanelAuthenticationWildcard {
- typ := SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardProjectSecret
+func CreateSourceMixpanelAuthenticationWildcardSourceMixpanelProjectSecret(sourceMixpanelProjectSecret SourceMixpanelProjectSecret) SourceMixpanelAuthenticationWildcard {
+ typ := SourceMixpanelAuthenticationWildcardTypeSourceMixpanelProjectSecret
return SourceMixpanelAuthenticationWildcard{
- SourceMixpanelAuthenticationWildcardProjectSecret: &sourceMixpanelAuthenticationWildcardProjectSecret,
- Type: typ,
+ SourceMixpanelProjectSecret: &sourceMixpanelProjectSecret,
+ Type: typ,
}
}
func (u *SourceMixpanelAuthenticationWildcard) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMixpanelAuthenticationWildcardProjectSecret := new(SourceMixpanelAuthenticationWildcardProjectSecret)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMixpanelAuthenticationWildcardProjectSecret); err == nil {
- u.SourceMixpanelAuthenticationWildcardProjectSecret = sourceMixpanelAuthenticationWildcardProjectSecret
- u.Type = SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardProjectSecret
+
+ sourceMixpanelProjectSecret := new(SourceMixpanelProjectSecret)
+ if err := utils.UnmarshalJSON(data, &sourceMixpanelProjectSecret, "", true, true); err == nil {
+ u.SourceMixpanelProjectSecret = sourceMixpanelProjectSecret
+ u.Type = SourceMixpanelAuthenticationWildcardTypeSourceMixpanelProjectSecret
return nil
}
- sourceMixpanelAuthenticationWildcardServiceAccount := new(SourceMixpanelAuthenticationWildcardServiceAccount)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMixpanelAuthenticationWildcardServiceAccount); err == nil {
- u.SourceMixpanelAuthenticationWildcardServiceAccount = sourceMixpanelAuthenticationWildcardServiceAccount
- u.Type = SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardServiceAccount
+ sourceMixpanelServiceAccount := new(SourceMixpanelServiceAccount)
+ if err := utils.UnmarshalJSON(data, &sourceMixpanelServiceAccount, "", true, true); err == nil {
+ u.SourceMixpanelServiceAccount = sourceMixpanelServiceAccount
+ u.Type = SourceMixpanelAuthenticationWildcardTypeSourceMixpanelServiceAccount
return nil
}
@@ -131,15 +186,15 @@ func (u *SourceMixpanelAuthenticationWildcard) UnmarshalJSON(data []byte) error
}
func (u SourceMixpanelAuthenticationWildcard) MarshalJSON() ([]byte, error) {
- if u.SourceMixpanelAuthenticationWildcardProjectSecret != nil {
- return json.Marshal(u.SourceMixpanelAuthenticationWildcardProjectSecret)
+ if u.SourceMixpanelServiceAccount != nil {
+ return utils.MarshalJSON(u.SourceMixpanelServiceAccount, "", true)
}
- if u.SourceMixpanelAuthenticationWildcardServiceAccount != nil {
- return json.Marshal(u.SourceMixpanelAuthenticationWildcardServiceAccount)
+ if u.SourceMixpanelProjectSecret != nil {
+ return utils.MarshalJSON(u.SourceMixpanelProjectSecret, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// SourceMixpanelRegion - The region of mixpanel domain instance either US or EU.
@@ -170,48 +225,117 @@ func (e *SourceMixpanelRegion) UnmarshalJSON(data []byte) error {
}
}
-type SourceMixpanelMixpanel string
+type Mixpanel string
const (
- SourceMixpanelMixpanelMixpanel SourceMixpanelMixpanel = "mixpanel"
+ MixpanelMixpanel Mixpanel = "mixpanel"
)
-func (e SourceMixpanelMixpanel) ToPointer() *SourceMixpanelMixpanel {
+func (e Mixpanel) ToPointer() *Mixpanel {
return &e
}
-func (e *SourceMixpanelMixpanel) UnmarshalJSON(data []byte) error {
+func (e *Mixpanel) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "mixpanel":
- *e = SourceMixpanelMixpanel(v)
+ *e = Mixpanel(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMixpanelMixpanel: %v", v)
+ return fmt.Errorf("invalid value for Mixpanel: %v", v)
}
}
type SourceMixpanel struct {
- // A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days.
- AttributionWindow *int64 `json:"attribution_window,omitempty"`
+ // A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days. (This value should be non-negative integer)
+ AttributionWindow *int64 `default:"5" json:"attribution_window"`
// Choose how to authenticate to Mixpanel
- Credentials *SourceMixpanelAuthenticationWildcard `json:"credentials,omitempty"`
- // Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment.
- DateWindowSize *int64 `json:"date_window_size,omitempty"`
+ Credentials SourceMixpanelAuthenticationWildcard `json:"credentials"`
+ // Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment. (This value should be positive integer)
+ DateWindowSize *int64 `default:"30" json:"date_window_size"`
// The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date
EndDate *types.Date `json:"end_date,omitempty"`
- // Your project ID number. See the docs for more information on how to obtain this.
- ProjectID *int64 `json:"project_id,omitempty"`
// Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console.
- ProjectTimezone *string `json:"project_timezone,omitempty"`
+ ProjectTimezone *string `default:"US/Pacific" json:"project_timezone"`
// The region of mixpanel domain instance either US or EU.
- Region *SourceMixpanelRegion `json:"region,omitempty"`
+ Region *SourceMixpanelRegion `default:"US" json:"region"`
// Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored.
- SelectPropertiesByDefault *bool `json:"select_properties_by_default,omitempty"`
- SourceType *SourceMixpanelMixpanel `json:"sourceType,omitempty"`
+ SelectPropertiesByDefault *bool `default:"true" json:"select_properties_by_default"`
+ sourceType Mixpanel `const:"mixpanel" json:"sourceType"`
// The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default.
StartDate *types.Date `json:"start_date,omitempty"`
}
+
+func (s SourceMixpanel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMixpanel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMixpanel) GetAttributionWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AttributionWindow
+}
+
+func (o *SourceMixpanel) GetCredentials() SourceMixpanelAuthenticationWildcard {
+ if o == nil {
+ return SourceMixpanelAuthenticationWildcard{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceMixpanel) GetDateWindowSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DateWindowSize
+}
+
+func (o *SourceMixpanel) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceMixpanel) GetProjectTimezone() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ProjectTimezone
+}
+
+func (o *SourceMixpanel) GetRegion() *SourceMixpanelRegion {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceMixpanel) GetSelectPropertiesByDefault() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.SelectPropertiesByDefault
+}
+
+func (o *SourceMixpanel) GetSourceType() Mixpanel {
+ return MixpanelMixpanel
+}
+
+func (o *SourceMixpanel) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemixpanelcreaterequest.go b/internal/sdk/pkg/models/shared/sourcemixpanelcreaterequest.go
old mode 100755
new mode 100644
index 49b1f2e70..b14150e27
--- a/internal/sdk/pkg/models/shared/sourcemixpanelcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemixpanelcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMixpanelCreateRequest struct {
Configuration SourceMixpanel `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMixpanelCreateRequest) GetConfiguration() SourceMixpanel {
+ if o == nil {
+ return SourceMixpanel{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMixpanelCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMixpanelCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMixpanelCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMixpanelCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemixpanelputrequest.go b/internal/sdk/pkg/models/shared/sourcemixpanelputrequest.go
old mode 100755
new mode 100644
index cdd44616d..970bba921
--- a/internal/sdk/pkg/models/shared/sourcemixpanelputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemixpanelputrequest.go
@@ -7,3 +7,24 @@ type SourceMixpanelPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMixpanelPutRequest) GetConfiguration() SourceMixpanelUpdate {
+ if o == nil {
+ return SourceMixpanelUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMixpanelPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMixpanelPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemixpanelupdate.go b/internal/sdk/pkg/models/shared/sourcemixpanelupdate.go
old mode 100755
new mode 100644
index 7d1dc36bd..9c4570c79
--- a/internal/sdk/pkg/models/shared/sourcemixpanelupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemixpanelupdate.go
@@ -3,143 +3,198 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle string
+type SourceMixpanelUpdateSchemasOptionTitle string
const (
- SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitleProjectSecret SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle = "Project Secret"
+ SourceMixpanelUpdateSchemasOptionTitleProjectSecret SourceMixpanelUpdateSchemasOptionTitle = "Project Secret"
)
-func (e SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle) ToPointer() *SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle {
+func (e SourceMixpanelUpdateSchemasOptionTitle) ToPointer() *SourceMixpanelUpdateSchemasOptionTitle {
return &e
}
-func (e *SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceMixpanelUpdateSchemasOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Project Secret":
- *e = SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle(v)
+ *e = SourceMixpanelUpdateSchemasOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceMixpanelUpdateSchemasOptionTitle: %v", v)
}
}
-// SourceMixpanelUpdateAuthenticationWildcardProjectSecret - Choose how to authenticate to Mixpanel
-type SourceMixpanelUpdateAuthenticationWildcardProjectSecret struct {
+// ProjectSecret - Choose how to authenticate to Mixpanel
+type ProjectSecret struct {
// Mixpanel project secret. See the docs for more information on how to obtain this.
- APISecret string `json:"api_secret"`
- OptionTitle *SourceMixpanelUpdateAuthenticationWildcardProjectSecretOptionTitle `json:"option_title,omitempty"`
+ APISecret string `json:"api_secret"`
+ optionTitle *SourceMixpanelUpdateSchemasOptionTitle `const:"Project Secret" json:"option_title,omitempty"`
}
-type SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle string
+func (p ProjectSecret) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *ProjectSecret) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ProjectSecret) GetAPISecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.APISecret
+}
+
+func (o *ProjectSecret) GetOptionTitle() *SourceMixpanelUpdateSchemasOptionTitle {
+ return SourceMixpanelUpdateSchemasOptionTitleProjectSecret.ToPointer()
+}
+
+type SourceMixpanelUpdateOptionTitle string
const (
- SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitleServiceAccount SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle = "Service Account"
+ SourceMixpanelUpdateOptionTitleServiceAccount SourceMixpanelUpdateOptionTitle = "Service Account"
)
-func (e SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle) ToPointer() *SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle {
+func (e SourceMixpanelUpdateOptionTitle) ToPointer() *SourceMixpanelUpdateOptionTitle {
return &e
}
-func (e *SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceMixpanelUpdateOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Service Account":
- *e = SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle(v)
+ *e = SourceMixpanelUpdateOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceMixpanelUpdateOptionTitle: %v", v)
}
}
-// SourceMixpanelUpdateAuthenticationWildcardServiceAccount - Choose how to authenticate to Mixpanel
-type SourceMixpanelUpdateAuthenticationWildcardServiceAccount struct {
- OptionTitle *SourceMixpanelUpdateAuthenticationWildcardServiceAccountOptionTitle `json:"option_title,omitempty"`
+// ServiceAccount - Choose how to authenticate to Mixpanel
+type ServiceAccount struct {
+ optionTitle *SourceMixpanelUpdateOptionTitle `const:"Service Account" json:"option_title,omitempty"`
+ // Your project ID number. See the docs for more information on how to obtain this.
+ ProjectID int64 `json:"project_id"`
// Mixpanel Service Account Secret. See the docs for more information on how to obtain this.
Secret string `json:"secret"`
// Mixpanel Service Account Username. See the docs for more information on how to obtain this.
Username string `json:"username"`
}
-type SourceMixpanelUpdateAuthenticationWildcardType string
+func (s ServiceAccount) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *ServiceAccount) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ServiceAccount) GetOptionTitle() *SourceMixpanelUpdateOptionTitle {
+ return SourceMixpanelUpdateOptionTitleServiceAccount.ToPointer()
+}
+
+func (o *ServiceAccount) GetProjectID() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.ProjectID
+}
+
+func (o *ServiceAccount) GetSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.Secret
+}
+
+func (o *ServiceAccount) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type AuthenticationWildcardType string
const (
- SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardServiceAccount SourceMixpanelUpdateAuthenticationWildcardType = "source-mixpanel-update_Authentication *_Service Account"
- SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardProjectSecret SourceMixpanelUpdateAuthenticationWildcardType = "source-mixpanel-update_Authentication *_Project Secret"
+ AuthenticationWildcardTypeServiceAccount AuthenticationWildcardType = "Service Account"
+ AuthenticationWildcardTypeProjectSecret AuthenticationWildcardType = "Project Secret"
)
-type SourceMixpanelUpdateAuthenticationWildcard struct {
- SourceMixpanelUpdateAuthenticationWildcardServiceAccount *SourceMixpanelUpdateAuthenticationWildcardServiceAccount
- SourceMixpanelUpdateAuthenticationWildcardProjectSecret *SourceMixpanelUpdateAuthenticationWildcardProjectSecret
+type AuthenticationWildcard struct {
+ ServiceAccount *ServiceAccount
+ ProjectSecret *ProjectSecret
- Type SourceMixpanelUpdateAuthenticationWildcardType
+ Type AuthenticationWildcardType
}
-func CreateSourceMixpanelUpdateAuthenticationWildcardSourceMixpanelUpdateAuthenticationWildcardServiceAccount(sourceMixpanelUpdateAuthenticationWildcardServiceAccount SourceMixpanelUpdateAuthenticationWildcardServiceAccount) SourceMixpanelUpdateAuthenticationWildcard {
- typ := SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardServiceAccount
+func CreateAuthenticationWildcardServiceAccount(serviceAccount ServiceAccount) AuthenticationWildcard {
+ typ := AuthenticationWildcardTypeServiceAccount
- return SourceMixpanelUpdateAuthenticationWildcard{
- SourceMixpanelUpdateAuthenticationWildcardServiceAccount: &sourceMixpanelUpdateAuthenticationWildcardServiceAccount,
- Type: typ,
+ return AuthenticationWildcard{
+ ServiceAccount: &serviceAccount,
+ Type: typ,
}
}
-func CreateSourceMixpanelUpdateAuthenticationWildcardSourceMixpanelUpdateAuthenticationWildcardProjectSecret(sourceMixpanelUpdateAuthenticationWildcardProjectSecret SourceMixpanelUpdateAuthenticationWildcardProjectSecret) SourceMixpanelUpdateAuthenticationWildcard {
- typ := SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardProjectSecret
+func CreateAuthenticationWildcardProjectSecret(projectSecret ProjectSecret) AuthenticationWildcard {
+ typ := AuthenticationWildcardTypeProjectSecret
- return SourceMixpanelUpdateAuthenticationWildcard{
- SourceMixpanelUpdateAuthenticationWildcardProjectSecret: &sourceMixpanelUpdateAuthenticationWildcardProjectSecret,
- Type: typ,
+ return AuthenticationWildcard{
+ ProjectSecret: &projectSecret,
+ Type: typ,
}
}
-func (u *SourceMixpanelUpdateAuthenticationWildcard) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *AuthenticationWildcard) UnmarshalJSON(data []byte) error {
- sourceMixpanelUpdateAuthenticationWildcardProjectSecret := new(SourceMixpanelUpdateAuthenticationWildcardProjectSecret)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMixpanelUpdateAuthenticationWildcardProjectSecret); err == nil {
- u.SourceMixpanelUpdateAuthenticationWildcardProjectSecret = sourceMixpanelUpdateAuthenticationWildcardProjectSecret
- u.Type = SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardProjectSecret
+ projectSecret := new(ProjectSecret)
+ if err := utils.UnmarshalJSON(data, &projectSecret, "", true, true); err == nil {
+ u.ProjectSecret = projectSecret
+ u.Type = AuthenticationWildcardTypeProjectSecret
return nil
}
- sourceMixpanelUpdateAuthenticationWildcardServiceAccount := new(SourceMixpanelUpdateAuthenticationWildcardServiceAccount)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMixpanelUpdateAuthenticationWildcardServiceAccount); err == nil {
- u.SourceMixpanelUpdateAuthenticationWildcardServiceAccount = sourceMixpanelUpdateAuthenticationWildcardServiceAccount
- u.Type = SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardServiceAccount
+ serviceAccount := new(ServiceAccount)
+ if err := utils.UnmarshalJSON(data, &serviceAccount, "", true, true); err == nil {
+ u.ServiceAccount = serviceAccount
+ u.Type = AuthenticationWildcardTypeServiceAccount
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceMixpanelUpdateAuthenticationWildcard) MarshalJSON() ([]byte, error) {
- if u.SourceMixpanelUpdateAuthenticationWildcardProjectSecret != nil {
- return json.Marshal(u.SourceMixpanelUpdateAuthenticationWildcardProjectSecret)
+func (u AuthenticationWildcard) MarshalJSON() ([]byte, error) {
+ if u.ServiceAccount != nil {
+ return utils.MarshalJSON(u.ServiceAccount, "", true)
}
- if u.SourceMixpanelUpdateAuthenticationWildcardServiceAccount != nil {
- return json.Marshal(u.SourceMixpanelUpdateAuthenticationWildcardServiceAccount)
+ if u.ProjectSecret != nil {
+ return utils.MarshalJSON(u.ProjectSecret, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// SourceMixpanelUpdateRegion - The region of mixpanel domain instance either US or EU.
@@ -171,22 +226,87 @@ func (e *SourceMixpanelUpdateRegion) UnmarshalJSON(data []byte) error {
}
type SourceMixpanelUpdate struct {
- // A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days.
- AttributionWindow *int64 `json:"attribution_window,omitempty"`
+ // A period of time for attributing results to ads and the lookback period after those actions occur during which ad results are counted. Default attribution window is 5 days. (This value should be non-negative integer)
+ AttributionWindow *int64 `default:"5" json:"attribution_window"`
// Choose how to authenticate to Mixpanel
- Credentials *SourceMixpanelUpdateAuthenticationWildcard `json:"credentials,omitempty"`
- // Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment.
- DateWindowSize *int64 `json:"date_window_size,omitempty"`
+ Credentials AuthenticationWildcard `json:"credentials"`
+ // Defines window size in days, that used to slice through data. You can reduce it, if amount of data in each window is too big for your environment. (This value should be positive integer)
+ DateWindowSize *int64 `default:"30" json:"date_window_size"`
// The date in the format YYYY-MM-DD. Any data after this date will not be replicated. Left empty to always sync to most recent date
EndDate *types.Date `json:"end_date,omitempty"`
- // Your project ID number. See the docs for more information on how to obtain this.
- ProjectID *int64 `json:"project_id,omitempty"`
// Time zone in which integer date times are stored. The project timezone may be found in the project settings in the Mixpanel console.
- ProjectTimezone *string `json:"project_timezone,omitempty"`
+ ProjectTimezone *string `default:"US/Pacific" json:"project_timezone"`
// The region of mixpanel domain instance either US or EU.
- Region *SourceMixpanelUpdateRegion `json:"region,omitempty"`
+ Region *SourceMixpanelUpdateRegion `default:"US" json:"region"`
// Setting this config parameter to TRUE ensures that new properties on events and engage records are captured. Otherwise new properties will be ignored.
- SelectPropertiesByDefault *bool `json:"select_properties_by_default,omitempty"`
+ SelectPropertiesByDefault *bool `default:"true" json:"select_properties_by_default"`
// The date in the format YYYY-MM-DD. Any data before this date will not be replicated. If this option is not set, the connector will replicate data from up to one year ago by default.
StartDate *types.Date `json:"start_date,omitempty"`
}
+
+func (s SourceMixpanelUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMixpanelUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMixpanelUpdate) GetAttributionWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AttributionWindow
+}
+
+func (o *SourceMixpanelUpdate) GetCredentials() AuthenticationWildcard {
+ if o == nil {
+ return AuthenticationWildcard{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceMixpanelUpdate) GetDateWindowSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DateWindowSize
+}
+
+func (o *SourceMixpanelUpdate) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceMixpanelUpdate) GetProjectTimezone() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ProjectTimezone
+}
+
+func (o *SourceMixpanelUpdate) GetRegion() *SourceMixpanelUpdateRegion {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceMixpanelUpdate) GetSelectPropertiesByDefault() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.SelectPropertiesByDefault
+}
+
+func (o *SourceMixpanelUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemonday.go b/internal/sdk/pkg/models/shared/sourcemonday.go
old mode 100755
new mode 100644
index aea4a1b89..442208961
--- a/internal/sdk/pkg/models/shared/sourcemonday.go
+++ b/internal/sdk/pkg/models/shared/sourcemonday.go
@@ -3,128 +3,188 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMondayAuthorizationMethodAPITokenAuthType string
+type SourceMondaySchemasAuthType string
const (
- SourceMondayAuthorizationMethodAPITokenAuthTypeAPIToken SourceMondayAuthorizationMethodAPITokenAuthType = "api_token"
+ SourceMondaySchemasAuthTypeAPIToken SourceMondaySchemasAuthType = "api_token"
)
-func (e SourceMondayAuthorizationMethodAPITokenAuthType) ToPointer() *SourceMondayAuthorizationMethodAPITokenAuthType {
+func (e SourceMondaySchemasAuthType) ToPointer() *SourceMondaySchemasAuthType {
return &e
}
-func (e *SourceMondayAuthorizationMethodAPITokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMondaySchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceMondayAuthorizationMethodAPITokenAuthType(v)
+ *e = SourceMondaySchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMondayAuthorizationMethodAPITokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMondaySchemasAuthType: %v", v)
}
}
-type SourceMondayAuthorizationMethodAPIToken struct {
+type SourceMondayAPIToken struct {
// API Token for making authenticated requests.
- APIToken string `json:"api_token"`
- AuthType SourceMondayAuthorizationMethodAPITokenAuthType `json:"auth_type"`
+ APIToken string `json:"api_token"`
+ authType SourceMondaySchemasAuthType `const:"api_token" json:"auth_type"`
}
-type SourceMondayAuthorizationMethodOAuth20AuthType string
+func (s SourceMondayAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMondayAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMondayAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceMondayAPIToken) GetAuthType() SourceMondaySchemasAuthType {
+ return SourceMondaySchemasAuthTypeAPIToken
+}
+
+type SourceMondayAuthType string
const (
- SourceMondayAuthorizationMethodOAuth20AuthTypeOauth20 SourceMondayAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceMondayAuthTypeOauth20 SourceMondayAuthType = "oauth2.0"
)
-func (e SourceMondayAuthorizationMethodOAuth20AuthType) ToPointer() *SourceMondayAuthorizationMethodOAuth20AuthType {
+func (e SourceMondayAuthType) ToPointer() *SourceMondayAuthType {
return &e
}
-func (e *SourceMondayAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMondayAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceMondayAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceMondayAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMondayAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMondayAuthType: %v", v)
}
}
-type SourceMondayAuthorizationMethodOAuth20 struct {
+type SourceMondayOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType SourceMondayAuthorizationMethodOAuth20AuthType `json:"auth_type"`
+ AccessToken string `json:"access_token"`
+ authType SourceMondayAuthType `const:"oauth2.0" json:"auth_type"`
// The Client ID of your OAuth application.
ClientID string `json:"client_id"`
// The Client Secret of your OAuth application.
ClientSecret string `json:"client_secret"`
// Slug/subdomain of the account, or the first part of the URL that comes before .monday.com
- Subdomain *string `json:"subdomain,omitempty"`
+ Subdomain *string `default:"" json:"subdomain"`
+}
+
+func (s SourceMondayOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMondayOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMondayOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceMondayOAuth20) GetAuthType() SourceMondayAuthType {
+ return SourceMondayAuthTypeOauth20
+}
+
+func (o *SourceMondayOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceMondayOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceMondayOAuth20) GetSubdomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Subdomain
}
type SourceMondayAuthorizationMethodType string
const (
- SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodOAuth20 SourceMondayAuthorizationMethodType = "source-monday_Authorization Method_OAuth2.0"
- SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodAPIToken SourceMondayAuthorizationMethodType = "source-monday_Authorization Method_API Token"
+ SourceMondayAuthorizationMethodTypeSourceMondayOAuth20 SourceMondayAuthorizationMethodType = "source-monday_OAuth2.0"
+ SourceMondayAuthorizationMethodTypeSourceMondayAPIToken SourceMondayAuthorizationMethodType = "source-monday_API Token"
)
type SourceMondayAuthorizationMethod struct {
- SourceMondayAuthorizationMethodOAuth20 *SourceMondayAuthorizationMethodOAuth20
- SourceMondayAuthorizationMethodAPIToken *SourceMondayAuthorizationMethodAPIToken
+ SourceMondayOAuth20 *SourceMondayOAuth20
+ SourceMondayAPIToken *SourceMondayAPIToken
Type SourceMondayAuthorizationMethodType
}
-func CreateSourceMondayAuthorizationMethodSourceMondayAuthorizationMethodOAuth20(sourceMondayAuthorizationMethodOAuth20 SourceMondayAuthorizationMethodOAuth20) SourceMondayAuthorizationMethod {
- typ := SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodOAuth20
+func CreateSourceMondayAuthorizationMethodSourceMondayOAuth20(sourceMondayOAuth20 SourceMondayOAuth20) SourceMondayAuthorizationMethod {
+ typ := SourceMondayAuthorizationMethodTypeSourceMondayOAuth20
return SourceMondayAuthorizationMethod{
- SourceMondayAuthorizationMethodOAuth20: &sourceMondayAuthorizationMethodOAuth20,
- Type: typ,
+ SourceMondayOAuth20: &sourceMondayOAuth20,
+ Type: typ,
}
}
-func CreateSourceMondayAuthorizationMethodSourceMondayAuthorizationMethodAPIToken(sourceMondayAuthorizationMethodAPIToken SourceMondayAuthorizationMethodAPIToken) SourceMondayAuthorizationMethod {
- typ := SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodAPIToken
+func CreateSourceMondayAuthorizationMethodSourceMondayAPIToken(sourceMondayAPIToken SourceMondayAPIToken) SourceMondayAuthorizationMethod {
+ typ := SourceMondayAuthorizationMethodTypeSourceMondayAPIToken
return SourceMondayAuthorizationMethod{
- SourceMondayAuthorizationMethodAPIToken: &sourceMondayAuthorizationMethodAPIToken,
- Type: typ,
+ SourceMondayAPIToken: &sourceMondayAPIToken,
+ Type: typ,
}
}
func (u *SourceMondayAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMondayAuthorizationMethodAPIToken := new(SourceMondayAuthorizationMethodAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMondayAuthorizationMethodAPIToken); err == nil {
- u.SourceMondayAuthorizationMethodAPIToken = sourceMondayAuthorizationMethodAPIToken
- u.Type = SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodAPIToken
+
+ sourceMondayAPIToken := new(SourceMondayAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceMondayAPIToken, "", true, true); err == nil {
+ u.SourceMondayAPIToken = sourceMondayAPIToken
+ u.Type = SourceMondayAuthorizationMethodTypeSourceMondayAPIToken
return nil
}
- sourceMondayAuthorizationMethodOAuth20 := new(SourceMondayAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMondayAuthorizationMethodOAuth20); err == nil {
- u.SourceMondayAuthorizationMethodOAuth20 = sourceMondayAuthorizationMethodOAuth20
- u.Type = SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodOAuth20
+ sourceMondayOAuth20 := new(SourceMondayOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceMondayOAuth20, "", true, true); err == nil {
+ u.SourceMondayOAuth20 = sourceMondayOAuth20
+ u.Type = SourceMondayAuthorizationMethodTypeSourceMondayOAuth20
return nil
}
@@ -132,42 +192,64 @@ func (u *SourceMondayAuthorizationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMondayAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMondayAuthorizationMethodAPIToken != nil {
- return json.Marshal(u.SourceMondayAuthorizationMethodAPIToken)
+ if u.SourceMondayOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceMondayOAuth20, "", true)
}
- if u.SourceMondayAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceMondayAuthorizationMethodOAuth20)
+ if u.SourceMondayAPIToken != nil {
+ return utils.MarshalJSON(u.SourceMondayAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceMondayMonday string
+type Monday string
const (
- SourceMondayMondayMonday SourceMondayMonday = "monday"
+ MondayMonday Monday = "monday"
)
-func (e SourceMondayMonday) ToPointer() *SourceMondayMonday {
+func (e Monday) ToPointer() *Monday {
return &e
}
-func (e *SourceMondayMonday) UnmarshalJSON(data []byte) error {
+func (e *Monday) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "monday":
- *e = SourceMondayMonday(v)
+ *e = Monday(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMondayMonday: %v", v)
+ return fmt.Errorf("invalid value for Monday: %v", v)
}
}
type SourceMonday struct {
Credentials *SourceMondayAuthorizationMethod `json:"credentials,omitempty"`
- SourceType SourceMondayMonday `json:"sourceType"`
+ sourceType Monday `const:"monday" json:"sourceType"`
+}
+
+func (s SourceMonday) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMonday) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMonday) GetCredentials() *SourceMondayAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceMonday) GetSourceType() Monday {
+ return MondayMonday
}
diff --git a/internal/sdk/pkg/models/shared/sourcemondaycreaterequest.go b/internal/sdk/pkg/models/shared/sourcemondaycreaterequest.go
old mode 100755
new mode 100644
index a0dcabf83..2e6cdb3b6
--- a/internal/sdk/pkg/models/shared/sourcemondaycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemondaycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMondayCreateRequest struct {
Configuration SourceMonday `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMondayCreateRequest) GetConfiguration() SourceMonday {
+ if o == nil {
+ return SourceMonday{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMondayCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMondayCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMondayCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMondayCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemondayputrequest.go b/internal/sdk/pkg/models/shared/sourcemondayputrequest.go
old mode 100755
new mode 100644
index ed2a47d3e..4d077a21d
--- a/internal/sdk/pkg/models/shared/sourcemondayputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemondayputrequest.go
@@ -7,3 +7,24 @@ type SourceMondayPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMondayPutRequest) GetConfiguration() SourceMondayUpdate {
+ if o == nil {
+ return SourceMondayUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMondayPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMondayPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemondayupdate.go b/internal/sdk/pkg/models/shared/sourcemondayupdate.go
old mode 100755
new mode 100644
index 5f2914db2..af6c00a91
--- a/internal/sdk/pkg/models/shared/sourcemondayupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemondayupdate.go
@@ -3,128 +3,188 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMondayUpdateAuthorizationMethodAPITokenAuthType string
+type SourceMondayUpdateSchemasAuthType string
const (
- SourceMondayUpdateAuthorizationMethodAPITokenAuthTypeAPIToken SourceMondayUpdateAuthorizationMethodAPITokenAuthType = "api_token"
+ SourceMondayUpdateSchemasAuthTypeAPIToken SourceMondayUpdateSchemasAuthType = "api_token"
)
-func (e SourceMondayUpdateAuthorizationMethodAPITokenAuthType) ToPointer() *SourceMondayUpdateAuthorizationMethodAPITokenAuthType {
+func (e SourceMondayUpdateSchemasAuthType) ToPointer() *SourceMondayUpdateSchemasAuthType {
return &e
}
-func (e *SourceMondayUpdateAuthorizationMethodAPITokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMondayUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceMondayUpdateAuthorizationMethodAPITokenAuthType(v)
+ *e = SourceMondayUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMondayUpdateAuthorizationMethodAPITokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMondayUpdateSchemasAuthType: %v", v)
}
}
-type SourceMondayUpdateAuthorizationMethodAPIToken struct {
+type APIToken struct {
// API Token for making authenticated requests.
- APIToken string `json:"api_token"`
- AuthType SourceMondayUpdateAuthorizationMethodAPITokenAuthType `json:"auth_type"`
+ APIToken string `json:"api_token"`
+ authType SourceMondayUpdateSchemasAuthType `const:"api_token" json:"auth_type"`
}
-type SourceMondayUpdateAuthorizationMethodOAuth20AuthType string
+func (a APIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *APIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *APIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *APIToken) GetAuthType() SourceMondayUpdateSchemasAuthType {
+ return SourceMondayUpdateSchemasAuthTypeAPIToken
+}
+
+type SourceMondayUpdateAuthType string
const (
- SourceMondayUpdateAuthorizationMethodOAuth20AuthTypeOauth20 SourceMondayUpdateAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceMondayUpdateAuthTypeOauth20 SourceMondayUpdateAuthType = "oauth2.0"
)
-func (e SourceMondayUpdateAuthorizationMethodOAuth20AuthType) ToPointer() *SourceMondayUpdateAuthorizationMethodOAuth20AuthType {
+func (e SourceMondayUpdateAuthType) ToPointer() *SourceMondayUpdateAuthType {
return &e
}
-func (e *SourceMondayUpdateAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceMondayUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceMondayUpdateAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceMondayUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMondayUpdateAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceMondayUpdateAuthType: %v", v)
}
}
-type SourceMondayUpdateAuthorizationMethodOAuth20 struct {
+type SourceMondayUpdateOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType SourceMondayUpdateAuthorizationMethodOAuth20AuthType `json:"auth_type"`
+ AccessToken string `json:"access_token"`
+ authType SourceMondayUpdateAuthType `const:"oauth2.0" json:"auth_type"`
// The Client ID of your OAuth application.
ClientID string `json:"client_id"`
// The Client Secret of your OAuth application.
ClientSecret string `json:"client_secret"`
// Slug/subdomain of the account, or the first part of the URL that comes before .monday.com
- Subdomain *string `json:"subdomain,omitempty"`
+ Subdomain *string `default:"" json:"subdomain"`
+}
+
+func (s SourceMondayUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMondayUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMondayUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceMondayUpdateOAuth20) GetAuthType() SourceMondayUpdateAuthType {
+ return SourceMondayUpdateAuthTypeOauth20
+}
+
+func (o *SourceMondayUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceMondayUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceMondayUpdateOAuth20) GetSubdomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Subdomain
}
type SourceMondayUpdateAuthorizationMethodType string
const (
- SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodOAuth20 SourceMondayUpdateAuthorizationMethodType = "source-monday-update_Authorization Method_OAuth2.0"
- SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodAPIToken SourceMondayUpdateAuthorizationMethodType = "source-monday-update_Authorization Method_API Token"
+ SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateOAuth20 SourceMondayUpdateAuthorizationMethodType = "source-monday-update_OAuth2.0"
+ SourceMondayUpdateAuthorizationMethodTypeAPIToken SourceMondayUpdateAuthorizationMethodType = "API Token"
)
type SourceMondayUpdateAuthorizationMethod struct {
- SourceMondayUpdateAuthorizationMethodOAuth20 *SourceMondayUpdateAuthorizationMethodOAuth20
- SourceMondayUpdateAuthorizationMethodAPIToken *SourceMondayUpdateAuthorizationMethodAPIToken
+ SourceMondayUpdateOAuth20 *SourceMondayUpdateOAuth20
+ APIToken *APIToken
Type SourceMondayUpdateAuthorizationMethodType
}
-func CreateSourceMondayUpdateAuthorizationMethodSourceMondayUpdateAuthorizationMethodOAuth20(sourceMondayUpdateAuthorizationMethodOAuth20 SourceMondayUpdateAuthorizationMethodOAuth20) SourceMondayUpdateAuthorizationMethod {
- typ := SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodOAuth20
+func CreateSourceMondayUpdateAuthorizationMethodSourceMondayUpdateOAuth20(sourceMondayUpdateOAuth20 SourceMondayUpdateOAuth20) SourceMondayUpdateAuthorizationMethod {
+ typ := SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateOAuth20
return SourceMondayUpdateAuthorizationMethod{
- SourceMondayUpdateAuthorizationMethodOAuth20: &sourceMondayUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceMondayUpdateOAuth20: &sourceMondayUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceMondayUpdateAuthorizationMethodSourceMondayUpdateAuthorizationMethodAPIToken(sourceMondayUpdateAuthorizationMethodAPIToken SourceMondayUpdateAuthorizationMethodAPIToken) SourceMondayUpdateAuthorizationMethod {
- typ := SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodAPIToken
+func CreateSourceMondayUpdateAuthorizationMethodAPIToken(apiToken APIToken) SourceMondayUpdateAuthorizationMethod {
+ typ := SourceMondayUpdateAuthorizationMethodTypeAPIToken
return SourceMondayUpdateAuthorizationMethod{
- SourceMondayUpdateAuthorizationMethodAPIToken: &sourceMondayUpdateAuthorizationMethodAPIToken,
- Type: typ,
+ APIToken: &apiToken,
+ Type: typ,
}
}
func (u *SourceMondayUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMondayUpdateAuthorizationMethodAPIToken := new(SourceMondayUpdateAuthorizationMethodAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMondayUpdateAuthorizationMethodAPIToken); err == nil {
- u.SourceMondayUpdateAuthorizationMethodAPIToken = sourceMondayUpdateAuthorizationMethodAPIToken
- u.Type = SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodAPIToken
+
+ apiToken := new(APIToken)
+ if err := utils.UnmarshalJSON(data, &apiToken, "", true, true); err == nil {
+ u.APIToken = apiToken
+ u.Type = SourceMondayUpdateAuthorizationMethodTypeAPIToken
return nil
}
- sourceMondayUpdateAuthorizationMethodOAuth20 := new(SourceMondayUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMondayUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceMondayUpdateAuthorizationMethodOAuth20 = sourceMondayUpdateAuthorizationMethodOAuth20
- u.Type = SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodOAuth20
+ sourceMondayUpdateOAuth20 := new(SourceMondayUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceMondayUpdateOAuth20, "", true, true); err == nil {
+ u.SourceMondayUpdateOAuth20 = sourceMondayUpdateOAuth20
+ u.Type = SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateOAuth20
return nil
}
@@ -132,17 +192,24 @@ func (u *SourceMondayUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error
}
func (u SourceMondayUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMondayUpdateAuthorizationMethodAPIToken != nil {
- return json.Marshal(u.SourceMondayUpdateAuthorizationMethodAPIToken)
+ if u.SourceMondayUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceMondayUpdateOAuth20, "", true)
}
- if u.SourceMondayUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceMondayUpdateAuthorizationMethodOAuth20)
+ if u.APIToken != nil {
+ return utils.MarshalJSON(u.APIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceMondayUpdate struct {
Credentials *SourceMondayUpdateAuthorizationMethod `json:"credentials,omitempty"`
}
+
+func (o *SourceMondayUpdate) GetCredentials() *SourceMondayUpdateAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodb.go b/internal/sdk/pkg/models/shared/sourcemongodb.go
deleted file mode 100755
index f24c34207..000000000
--- a/internal/sdk/pkg/models/shared/sourcemongodb.go
+++ /dev/null
@@ -1,284 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-type SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstance string
-
-const (
- SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstanceAtlas SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstance = "atlas"
-)
-
-func (e SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstance) ToPointer() *SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstance {
- return &e
-}
-
-func (e *SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstance) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "atlas":
- *e = SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstance(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstance: %v", v)
- }
-}
-
-// SourceMongodbMongoDBInstanceTypeMongoDBAtlas - The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type SourceMongodbMongoDBInstanceTypeMongoDBAtlas struct {
- // The URL of a cluster to connect to.
- ClusterURL string `json:"cluster_url"`
- Instance SourceMongodbMongoDBInstanceTypeMongoDBAtlasInstance `json:"instance"`
-
- AdditionalProperties interface{} `json:"-"`
-}
-type _SourceMongodbMongoDBInstanceTypeMongoDBAtlas SourceMongodbMongoDBInstanceTypeMongoDBAtlas
-
-func (c *SourceMongodbMongoDBInstanceTypeMongoDBAtlas) UnmarshalJSON(bs []byte) error {
- data := _SourceMongodbMongoDBInstanceTypeMongoDBAtlas{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceMongodbMongoDBInstanceTypeMongoDBAtlas(data)
-
- additionalFields := make(map[string]interface{})
-
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
- }
- delete(additionalFields, "cluster_url")
- delete(additionalFields, "instance")
-
- c.AdditionalProperties = additionalFields
-
- return nil
-}
-
-func (c SourceMongodbMongoDBInstanceTypeMongoDBAtlas) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceMongodbMongoDBInstanceTypeMongoDBAtlas(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- return json.Marshal(out)
-}
-
-type SourceMongodbMongoDbInstanceTypeReplicaSetInstance string
-
-const (
- SourceMongodbMongoDbInstanceTypeReplicaSetInstanceReplica SourceMongodbMongoDbInstanceTypeReplicaSetInstance = "replica"
-)
-
-func (e SourceMongodbMongoDbInstanceTypeReplicaSetInstance) ToPointer() *SourceMongodbMongoDbInstanceTypeReplicaSetInstance {
- return &e
-}
-
-func (e *SourceMongodbMongoDbInstanceTypeReplicaSetInstance) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "replica":
- *e = SourceMongodbMongoDbInstanceTypeReplicaSetInstance(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceMongodbMongoDbInstanceTypeReplicaSetInstance: %v", v)
- }
-}
-
-// SourceMongodbMongoDbInstanceTypeReplicaSet - The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type SourceMongodbMongoDbInstanceTypeReplicaSet struct {
- Instance SourceMongodbMongoDbInstanceTypeReplicaSetInstance `json:"instance"`
- // A replica set in MongoDB is a group of mongod processes that maintain the same data set.
- ReplicaSet *string `json:"replica_set,omitempty"`
- // The members of a replica set. Please specify `host`:`port` of each member separated by comma.
- ServerAddresses string `json:"server_addresses"`
-}
-
-type SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance string
-
-const (
- SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstanceStandalone SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance = "standalone"
-)
-
-func (e SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance) ToPointer() *SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance {
- return &e
-}
-
-func (e *SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "standalone":
- *e = SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance: %v", v)
- }
-}
-
-// SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance - The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance struct {
- // The host name of the Mongo database.
- Host string `json:"host"`
- Instance SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstanceInstance `json:"instance"`
- // The port of the Mongo database.
- Port int64 `json:"port"`
-}
-
-type SourceMongodbMongoDbInstanceTypeType string
-
-const (
- SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance SourceMongodbMongoDbInstanceTypeType = "source-mongodb_MongoDb Instance Type_Standalone MongoDb Instance"
- SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDbInstanceTypeReplicaSet SourceMongodbMongoDbInstanceTypeType = "source-mongodb_MongoDb Instance Type_Replica Set"
- SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDBInstanceTypeMongoDBAtlas SourceMongodbMongoDbInstanceTypeType = "source-mongodb_MongoDb Instance Type_MongoDB Atlas"
-)
-
-type SourceMongodbMongoDbInstanceType struct {
- SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance *SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
- SourceMongodbMongoDbInstanceTypeReplicaSet *SourceMongodbMongoDbInstanceTypeReplicaSet
- SourceMongodbMongoDBInstanceTypeMongoDBAtlas *SourceMongodbMongoDBInstanceTypeMongoDBAtlas
-
- Type SourceMongodbMongoDbInstanceTypeType
-}
-
-func CreateSourceMongodbMongoDbInstanceTypeSourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance(sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance) SourceMongodbMongoDbInstanceType {
- typ := SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
-
- return SourceMongodbMongoDbInstanceType{
- SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance: &sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance,
- Type: typ,
- }
-}
-
-func CreateSourceMongodbMongoDbInstanceTypeSourceMongodbMongoDbInstanceTypeReplicaSet(sourceMongodbMongoDbInstanceTypeReplicaSet SourceMongodbMongoDbInstanceTypeReplicaSet) SourceMongodbMongoDbInstanceType {
- typ := SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDbInstanceTypeReplicaSet
-
- return SourceMongodbMongoDbInstanceType{
- SourceMongodbMongoDbInstanceTypeReplicaSet: &sourceMongodbMongoDbInstanceTypeReplicaSet,
- Type: typ,
- }
-}
-
-func CreateSourceMongodbMongoDbInstanceTypeSourceMongodbMongoDBInstanceTypeMongoDBAtlas(sourceMongodbMongoDBInstanceTypeMongoDBAtlas SourceMongodbMongoDBInstanceTypeMongoDBAtlas) SourceMongodbMongoDbInstanceType {
- typ := SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDBInstanceTypeMongoDBAtlas
-
- return SourceMongodbMongoDbInstanceType{
- SourceMongodbMongoDBInstanceTypeMongoDBAtlas: &sourceMongodbMongoDBInstanceTypeMongoDBAtlas,
- Type: typ,
- }
-}
-
-func (u *SourceMongodbMongoDbInstanceType) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMongodbMongoDBInstanceTypeMongoDBAtlas := new(SourceMongodbMongoDBInstanceTypeMongoDBAtlas)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMongodbMongoDBInstanceTypeMongoDBAtlas); err == nil {
- u.SourceMongodbMongoDBInstanceTypeMongoDBAtlas = sourceMongodbMongoDBInstanceTypeMongoDBAtlas
- u.Type = SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDBInstanceTypeMongoDBAtlas
- return nil
- }
-
- sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance := new(SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance); err == nil {
- u.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance = sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
- u.Type = SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance
- return nil
- }
-
- sourceMongodbMongoDbInstanceTypeReplicaSet := new(SourceMongodbMongoDbInstanceTypeReplicaSet)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMongodbMongoDbInstanceTypeReplicaSet); err == nil {
- u.SourceMongodbMongoDbInstanceTypeReplicaSet = sourceMongodbMongoDbInstanceTypeReplicaSet
- u.Type = SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDbInstanceTypeReplicaSet
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u SourceMongodbMongoDbInstanceType) MarshalJSON() ([]byte, error) {
- if u.SourceMongodbMongoDBInstanceTypeMongoDBAtlas != nil {
- return json.Marshal(u.SourceMongodbMongoDBInstanceTypeMongoDBAtlas)
- }
-
- if u.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- return json.Marshal(u.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance)
- }
-
- if u.SourceMongodbMongoDbInstanceTypeReplicaSet != nil {
- return json.Marshal(u.SourceMongodbMongoDbInstanceTypeReplicaSet)
- }
-
- return nil, nil
-}
-
-type SourceMongodbMongodb string
-
-const (
- SourceMongodbMongodbMongodb SourceMongodbMongodb = "mongodb"
-)
-
-func (e SourceMongodbMongodb) ToPointer() *SourceMongodbMongodb {
- return &e
-}
-
-func (e *SourceMongodbMongodb) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "mongodb":
- *e = SourceMongodbMongodb(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceMongodbMongodb: %v", v)
- }
-}
-
-type SourceMongodb struct {
- // The authentication source where the user information is stored.
- AuthSource *string `json:"auth_source,omitempty"`
- // The database you want to replicate.
- Database string `json:"database"`
- // The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
- InstanceType *SourceMongodbMongoDbInstanceType `json:"instance_type,omitempty"`
- // The password associated with this username.
- Password *string `json:"password,omitempty"`
- SourceType SourceMongodbMongodb `json:"sourceType"`
- // The username which is used to access the database.
- User *string `json:"user,omitempty"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbcreaterequest.go b/internal/sdk/pkg/models/shared/sourcemongodbcreaterequest.go
deleted file mode 100755
index e6869e865..000000000
--- a/internal/sdk/pkg/models/shared/sourcemongodbcreaterequest.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceMongodbCreateRequest struct {
- Configuration SourceMongodb `json:"configuration"`
- Name string `json:"name"`
- // Optional secretID obtained through the public API OAuth redirect flow.
- SecretID *string `json:"secretId,omitempty"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbinternalpoc.go b/internal/sdk/pkg/models/shared/sourcemongodbinternalpoc.go
old mode 100755
new mode 100644
index 4b01a115d..57fe6808b
--- a/internal/sdk/pkg/models/shared/sourcemongodbinternalpoc.go
+++ b/internal/sdk/pkg/models/shared/sourcemongodbinternalpoc.go
@@ -5,42 +5,93 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMongodbInternalPocMongodbInternalPoc string
+type MongodbInternalPoc string
const (
- SourceMongodbInternalPocMongodbInternalPocMongodbInternalPoc SourceMongodbInternalPocMongodbInternalPoc = "mongodb-internal-poc"
+ MongodbInternalPocMongodbInternalPoc MongodbInternalPoc = "mongodb-internal-poc"
)
-func (e SourceMongodbInternalPocMongodbInternalPoc) ToPointer() *SourceMongodbInternalPocMongodbInternalPoc {
+func (e MongodbInternalPoc) ToPointer() *MongodbInternalPoc {
return &e
}
-func (e *SourceMongodbInternalPocMongodbInternalPoc) UnmarshalJSON(data []byte) error {
+func (e *MongodbInternalPoc) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "mongodb-internal-poc":
- *e = SourceMongodbInternalPocMongodbInternalPoc(v)
+ *e = MongodbInternalPoc(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMongodbInternalPocMongodbInternalPoc: %v", v)
+ return fmt.Errorf("invalid value for MongodbInternalPoc: %v", v)
}
}
type SourceMongodbInternalPoc struct {
// The authentication source where the user information is stored.
- AuthSource *string `json:"auth_source,omitempty"`
+ AuthSource *string `default:"admin" json:"auth_source"`
// The connection string of the database that you want to replicate..
ConnectionString *string `json:"connection_string,omitempty"`
// The password associated with this username.
Password *string `json:"password,omitempty"`
// The name of the replica set to be replicated.
- ReplicaSet *string `json:"replica_set,omitempty"`
- SourceType SourceMongodbInternalPocMongodbInternalPoc `json:"sourceType"`
+ ReplicaSet *string `json:"replica_set,omitempty"`
+ sourceType MongodbInternalPoc `const:"mongodb-internal-poc" json:"sourceType"`
// The username which is used to access the database.
User *string `json:"user,omitempty"`
}
+
+func (s SourceMongodbInternalPoc) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMongodbInternalPoc) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMongodbInternalPoc) GetAuthSource() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AuthSource
+}
+
+func (o *SourceMongodbInternalPoc) GetConnectionString() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ConnectionString
+}
+
+func (o *SourceMongodbInternalPoc) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceMongodbInternalPoc) GetReplicaSet() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicaSet
+}
+
+func (o *SourceMongodbInternalPoc) GetSourceType() MongodbInternalPoc {
+ return MongodbInternalPocMongodbInternalPoc
+}
+
+func (o *SourceMongodbInternalPoc) GetUser() *string {
+ if o == nil {
+ return nil
+ }
+ return o.User
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbinternalpoccreaterequest.go b/internal/sdk/pkg/models/shared/sourcemongodbinternalpoccreaterequest.go
old mode 100755
new mode 100644
index f9097a9c1..5251e50c6
--- a/internal/sdk/pkg/models/shared/sourcemongodbinternalpoccreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemongodbinternalpoccreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMongodbInternalPocCreateRequest struct {
Configuration SourceMongodbInternalPoc `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMongodbInternalPocCreateRequest) GetConfiguration() SourceMongodbInternalPoc {
+ if o == nil {
+ return SourceMongodbInternalPoc{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMongodbInternalPocCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMongodbInternalPocCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMongodbInternalPocCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMongodbInternalPocCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbinternalpocputrequest.go b/internal/sdk/pkg/models/shared/sourcemongodbinternalpocputrequest.go
old mode 100755
new mode 100644
index 1ebfb8189..daf4fd08b
--- a/internal/sdk/pkg/models/shared/sourcemongodbinternalpocputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemongodbinternalpocputrequest.go
@@ -7,3 +7,24 @@ type SourceMongodbInternalPocPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMongodbInternalPocPutRequest) GetConfiguration() SourceMongodbInternalPocUpdate {
+ if o == nil {
+ return SourceMongodbInternalPocUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMongodbInternalPocPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMongodbInternalPocPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbinternalpocupdate.go b/internal/sdk/pkg/models/shared/sourcemongodbinternalpocupdate.go
old mode 100755
new mode 100644
index bc422f34a..4874f7ae7
--- a/internal/sdk/pkg/models/shared/sourcemongodbinternalpocupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemongodbinternalpocupdate.go
@@ -2,9 +2,13 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceMongodbInternalPocUpdate struct {
// The authentication source where the user information is stored.
- AuthSource *string `json:"auth_source,omitempty"`
+ AuthSource *string `default:"admin" json:"auth_source"`
// The connection string of the database that you want to replicate..
ConnectionString *string `json:"connection_string,omitempty"`
// The password associated with this username.
@@ -14,3 +18,49 @@ type SourceMongodbInternalPocUpdate struct {
// The username which is used to access the database.
User *string `json:"user,omitempty"`
}
+
+func (s SourceMongodbInternalPocUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMongodbInternalPocUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMongodbInternalPocUpdate) GetAuthSource() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AuthSource
+}
+
+func (o *SourceMongodbInternalPocUpdate) GetConnectionString() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ConnectionString
+}
+
+func (o *SourceMongodbInternalPocUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceMongodbInternalPocUpdate) GetReplicaSet() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicaSet
+}
+
+func (o *SourceMongodbInternalPocUpdate) GetUser() *string {
+ if o == nil {
+ return nil
+ }
+ return o.User
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbputrequest.go b/internal/sdk/pkg/models/shared/sourcemongodbputrequest.go
deleted file mode 100755
index 51a42ffa8..000000000
--- a/internal/sdk/pkg/models/shared/sourcemongodbputrequest.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceMongodbPutRequest struct {
- Configuration SourceMongodbUpdate `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbupdate.go b/internal/sdk/pkg/models/shared/sourcemongodbupdate.go
deleted file mode 100755
index cff26e942..000000000
--- a/internal/sdk/pkg/models/shared/sourcemongodbupdate.go
+++ /dev/null
@@ -1,259 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
-)
-
-type SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance string
-
-const (
- SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstanceAtlas SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance = "atlas"
-)
-
-func (e SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance) ToPointer() *SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance {
- return &e
-}
-
-func (e *SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "atlas":
- *e = SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance: %v", v)
- }
-}
-
-// SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas - The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas struct {
- // The URL of a cluster to connect to.
- ClusterURL string `json:"cluster_url"`
- Instance SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlasInstance `json:"instance"`
-
- AdditionalProperties interface{} `json:"-"`
-}
-type _SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
-
-func (c *SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) UnmarshalJSON(bs []byte) error {
- data := _SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas(data)
-
- additionalFields := make(map[string]interface{})
-
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
- }
- delete(additionalFields, "cluster_url")
- delete(additionalFields, "instance")
-
- c.AdditionalProperties = additionalFields
-
- return nil
-}
-
-func (c SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- return json.Marshal(out)
-}
-
-type SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstance string
-
-const (
- SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstanceReplica SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstance = "replica"
-)
-
-func (e SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstance) ToPointer() *SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstance {
- return &e
-}
-
-func (e *SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstance) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "replica":
- *e = SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstance(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstance: %v", v)
- }
-}
-
-// SourceMongodbUpdateMongoDbInstanceTypeReplicaSet - The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type SourceMongodbUpdateMongoDbInstanceTypeReplicaSet struct {
- Instance SourceMongodbUpdateMongoDbInstanceTypeReplicaSetInstance `json:"instance"`
- // A replica set in MongoDB is a group of mongod processes that maintain the same data set.
- ReplicaSet *string `json:"replica_set,omitempty"`
- // The members of a replica set. Please specify `host`:`port` of each member separated by comma.
- ServerAddresses string `json:"server_addresses"`
-}
-
-type SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance string
-
-const (
- SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstanceStandalone SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance = "standalone"
-)
-
-func (e SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance) ToPointer() *SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance {
- return &e
-}
-
-func (e *SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "standalone":
- *e = SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance: %v", v)
- }
-}
-
-// SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance - The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
-type SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance struct {
- // The host name of the Mongo database.
- Host string `json:"host"`
- Instance SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstanceInstance `json:"instance"`
- // The port of the Mongo database.
- Port int64 `json:"port"`
-}
-
-type SourceMongodbUpdateMongoDbInstanceTypeType string
-
-const (
- SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance SourceMongodbUpdateMongoDbInstanceTypeType = "source-mongodb-update_MongoDb Instance Type_Standalone MongoDb Instance"
- SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDbInstanceTypeReplicaSet SourceMongodbUpdateMongoDbInstanceTypeType = "source-mongodb-update_MongoDb Instance Type_Replica Set"
- SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas SourceMongodbUpdateMongoDbInstanceTypeType = "source-mongodb-update_MongoDb Instance Type_MongoDB Atlas"
-)
-
-type SourceMongodbUpdateMongoDbInstanceType struct {
- SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance *SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
- SourceMongodbUpdateMongoDbInstanceTypeReplicaSet *SourceMongodbUpdateMongoDbInstanceTypeReplicaSet
- SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas *SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
-
- Type SourceMongodbUpdateMongoDbInstanceTypeType
-}
-
-func CreateSourceMongodbUpdateMongoDbInstanceTypeSourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance(sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance) SourceMongodbUpdateMongoDbInstanceType {
- typ := SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
-
- return SourceMongodbUpdateMongoDbInstanceType{
- SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance: &sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance,
- Type: typ,
- }
-}
-
-func CreateSourceMongodbUpdateMongoDbInstanceTypeSourceMongodbUpdateMongoDbInstanceTypeReplicaSet(sourceMongodbUpdateMongoDbInstanceTypeReplicaSet SourceMongodbUpdateMongoDbInstanceTypeReplicaSet) SourceMongodbUpdateMongoDbInstanceType {
- typ := SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDbInstanceTypeReplicaSet
-
- return SourceMongodbUpdateMongoDbInstanceType{
- SourceMongodbUpdateMongoDbInstanceTypeReplicaSet: &sourceMongodbUpdateMongoDbInstanceTypeReplicaSet,
- Type: typ,
- }
-}
-
-func CreateSourceMongodbUpdateMongoDbInstanceTypeSourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas(sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) SourceMongodbUpdateMongoDbInstanceType {
- typ := SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
-
- return SourceMongodbUpdateMongoDbInstanceType{
- SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas: &sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas,
- Type: typ,
- }
-}
-
-func (u *SourceMongodbUpdateMongoDbInstanceType) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas := new(SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas); err == nil {
- u.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas = sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
- u.Type = SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas
- return nil
- }
-
- sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance := new(SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance); err == nil {
- u.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance = sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
- u.Type = SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance
- return nil
- }
-
- sourceMongodbUpdateMongoDbInstanceTypeReplicaSet := new(SourceMongodbUpdateMongoDbInstanceTypeReplicaSet)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMongodbUpdateMongoDbInstanceTypeReplicaSet); err == nil {
- u.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet = sourceMongodbUpdateMongoDbInstanceTypeReplicaSet
- u.Type = SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDbInstanceTypeReplicaSet
- return nil
- }
-
- return errors.New("could not unmarshal into supported union types")
-}
-
-func (u SourceMongodbUpdateMongoDbInstanceType) MarshalJSON() ([]byte, error) {
- if u.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil {
- return json.Marshal(u.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas)
- }
-
- if u.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance != nil {
- return json.Marshal(u.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance)
- }
-
- if u.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet != nil {
- return json.Marshal(u.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet)
- }
-
- return nil, nil
-}
-
-type SourceMongodbUpdate struct {
- // The authentication source where the user information is stored.
- AuthSource *string `json:"auth_source,omitempty"`
- // The database you want to replicate.
- Database string `json:"database"`
- // The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.
- InstanceType *SourceMongodbUpdateMongoDbInstanceType `json:"instance_type,omitempty"`
- // The password associated with this username.
- Password *string `json:"password,omitempty"`
- // The username which is used to access the database.
- User *string `json:"user,omitempty"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbv2.go b/internal/sdk/pkg/models/shared/sourcemongodbv2.go
new file mode 100644
index 000000000..2b55f694c
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcemongodbv2.go
@@ -0,0 +1,346 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type SourceMongodbV2SchemasDatabaseConfigClusterType string
+
+const (
+ SourceMongodbV2SchemasDatabaseConfigClusterTypeSelfManagedReplicaSet SourceMongodbV2SchemasDatabaseConfigClusterType = "SELF_MANAGED_REPLICA_SET"
+)
+
+func (e SourceMongodbV2SchemasDatabaseConfigClusterType) ToPointer() *SourceMongodbV2SchemasDatabaseConfigClusterType {
+ return &e
+}
+
+func (e *SourceMongodbV2SchemasDatabaseConfigClusterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SELF_MANAGED_REPLICA_SET":
+ *e = SourceMongodbV2SchemasDatabaseConfigClusterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceMongodbV2SchemasDatabaseConfigClusterType: %v", v)
+ }
+}
+
+// SourceMongodbV2SelfManagedReplicaSet - MongoDB self-hosted cluster configured as a replica set
+type SourceMongodbV2SelfManagedReplicaSet struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ // The authentication source where the user information is stored.
+ AuthSource *string `default:"admin" json:"auth_source"`
+ clusterType SourceMongodbV2SchemasDatabaseConfigClusterType `const:"SELF_MANAGED_REPLICA_SET" json:"cluster_type"`
+ // The connection string of the cluster that you want to replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string for more information.
+ ConnectionString string `json:"connection_string"`
+ // The name of the MongoDB database that contains the collection(s) to replicate.
+ Database string `json:"database"`
+ // The password associated with this username.
+ Password *string `json:"password,omitempty"`
+ // The username which is used to access the database.
+ Username *string `json:"username,omitempty"`
+}
+
+func (s SourceMongodbV2SelfManagedReplicaSet) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMongodbV2SelfManagedReplicaSet) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMongodbV2SelfManagedReplicaSet) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalProperties
+}
+
+func (o *SourceMongodbV2SelfManagedReplicaSet) GetAuthSource() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AuthSource
+}
+
+func (o *SourceMongodbV2SelfManagedReplicaSet) GetClusterType() SourceMongodbV2SchemasDatabaseConfigClusterType {
+ return SourceMongodbV2SchemasDatabaseConfigClusterTypeSelfManagedReplicaSet
+}
+
+func (o *SourceMongodbV2SelfManagedReplicaSet) GetConnectionString() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionString
+}
+
+func (o *SourceMongodbV2SelfManagedReplicaSet) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceMongodbV2SelfManagedReplicaSet) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceMongodbV2SelfManagedReplicaSet) GetUsername() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Username
+}
+
+type SourceMongodbV2SchemasClusterType string
+
+const (
+ SourceMongodbV2SchemasClusterTypeAtlasReplicaSet SourceMongodbV2SchemasClusterType = "ATLAS_REPLICA_SET"
+)
+
+func (e SourceMongodbV2SchemasClusterType) ToPointer() *SourceMongodbV2SchemasClusterType {
+ return &e
+}
+
+func (e *SourceMongodbV2SchemasClusterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "ATLAS_REPLICA_SET":
+ *e = SourceMongodbV2SchemasClusterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceMongodbV2SchemasClusterType: %v", v)
+ }
+}
+
+// SourceMongodbV2MongoDBAtlasReplicaSet - MongoDB Atlas-hosted cluster configured as a replica set
+type SourceMongodbV2MongoDBAtlasReplicaSet struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ // The authentication source where the user information is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource for more details.
+ AuthSource *string `default:"admin" json:"auth_source"`
+ clusterType SourceMongodbV2SchemasClusterType `const:"ATLAS_REPLICA_SET" json:"cluster_type"`
+ // The connection string of the cluster that you want to replicate.
+ ConnectionString string `json:"connection_string"`
+ // The name of the MongoDB database that contains the collection(s) to replicate.
+ Database string `json:"database"`
+ // The password associated with this username.
+ Password string `json:"password"`
+ // The username which is used to access the database.
+ Username string `json:"username"`
+}
+
+func (s SourceMongodbV2MongoDBAtlasReplicaSet) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMongodbV2MongoDBAtlasReplicaSet) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMongodbV2MongoDBAtlasReplicaSet) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalProperties
+}
+
+func (o *SourceMongodbV2MongoDBAtlasReplicaSet) GetAuthSource() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AuthSource
+}
+
+func (o *SourceMongodbV2MongoDBAtlasReplicaSet) GetClusterType() SourceMongodbV2SchemasClusterType {
+ return SourceMongodbV2SchemasClusterTypeAtlasReplicaSet
+}
+
+func (o *SourceMongodbV2MongoDBAtlasReplicaSet) GetConnectionString() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionString
+}
+
+func (o *SourceMongodbV2MongoDBAtlasReplicaSet) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceMongodbV2MongoDBAtlasReplicaSet) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceMongodbV2MongoDBAtlasReplicaSet) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type SourceMongodbV2ClusterTypeType string
+
+const (
+ SourceMongodbV2ClusterTypeTypeSourceMongodbV2MongoDBAtlasReplicaSet SourceMongodbV2ClusterTypeType = "source-mongodb-v2_MongoDB Atlas Replica Set"
+ SourceMongodbV2ClusterTypeTypeSourceMongodbV2SelfManagedReplicaSet SourceMongodbV2ClusterTypeType = "source-mongodb-v2_Self-Managed Replica Set"
+)
+
+type SourceMongodbV2ClusterType struct {
+ SourceMongodbV2MongoDBAtlasReplicaSet *SourceMongodbV2MongoDBAtlasReplicaSet
+ SourceMongodbV2SelfManagedReplicaSet *SourceMongodbV2SelfManagedReplicaSet
+
+ Type SourceMongodbV2ClusterTypeType
+}
+
+func CreateSourceMongodbV2ClusterTypeSourceMongodbV2MongoDBAtlasReplicaSet(sourceMongodbV2MongoDBAtlasReplicaSet SourceMongodbV2MongoDBAtlasReplicaSet) SourceMongodbV2ClusterType {
+ typ := SourceMongodbV2ClusterTypeTypeSourceMongodbV2MongoDBAtlasReplicaSet
+
+ return SourceMongodbV2ClusterType{
+ SourceMongodbV2MongoDBAtlasReplicaSet: &sourceMongodbV2MongoDBAtlasReplicaSet,
+ Type: typ,
+ }
+}
+
+func CreateSourceMongodbV2ClusterTypeSourceMongodbV2SelfManagedReplicaSet(sourceMongodbV2SelfManagedReplicaSet SourceMongodbV2SelfManagedReplicaSet) SourceMongodbV2ClusterType {
+ typ := SourceMongodbV2ClusterTypeTypeSourceMongodbV2SelfManagedReplicaSet
+
+ return SourceMongodbV2ClusterType{
+ SourceMongodbV2SelfManagedReplicaSet: &sourceMongodbV2SelfManagedReplicaSet,
+ Type: typ,
+ }
+}
+
+func (u *SourceMongodbV2ClusterType) UnmarshalJSON(data []byte) error {
+
+ sourceMongodbV2MongoDBAtlasReplicaSet := new(SourceMongodbV2MongoDBAtlasReplicaSet)
+ if err := utils.UnmarshalJSON(data, &sourceMongodbV2MongoDBAtlasReplicaSet, "", true, true); err == nil {
+ u.SourceMongodbV2MongoDBAtlasReplicaSet = sourceMongodbV2MongoDBAtlasReplicaSet
+ u.Type = SourceMongodbV2ClusterTypeTypeSourceMongodbV2MongoDBAtlasReplicaSet
+ return nil
+ }
+
+ sourceMongodbV2SelfManagedReplicaSet := new(SourceMongodbV2SelfManagedReplicaSet)
+ if err := utils.UnmarshalJSON(data, &sourceMongodbV2SelfManagedReplicaSet, "", true, true); err == nil {
+ u.SourceMongodbV2SelfManagedReplicaSet = sourceMongodbV2SelfManagedReplicaSet
+ u.Type = SourceMongodbV2ClusterTypeTypeSourceMongodbV2SelfManagedReplicaSet
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourceMongodbV2ClusterType) MarshalJSON() ([]byte, error) {
+ if u.SourceMongodbV2MongoDBAtlasReplicaSet != nil {
+ return utils.MarshalJSON(u.SourceMongodbV2MongoDBAtlasReplicaSet, "", true)
+ }
+
+ if u.SourceMongodbV2SelfManagedReplicaSet != nil {
+ return utils.MarshalJSON(u.SourceMongodbV2SelfManagedReplicaSet, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type MongodbV2 string
+
+const (
+ MongodbV2MongodbV2 MongodbV2 = "mongodb-v2"
+)
+
+func (e MongodbV2) ToPointer() *MongodbV2 {
+ return &e
+}
+
+func (e *MongodbV2) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "mongodb-v2":
+ *e = MongodbV2(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for MongodbV2: %v", v)
+ }
+}
+
+type SourceMongodbV2 struct {
+ // Configures the MongoDB cluster type.
+ DatabaseConfig SourceMongodbV2ClusterType `json:"database_config"`
+ // The maximum number of documents to sample when attempting to discover the unique fields for a collection.
+ DiscoverSampleSize *int64 `default:"10000" json:"discover_sample_size"`
+ // The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds.
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
+ // The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
+ QueueSize *int64 `default:"10000" json:"queue_size"`
+ sourceType MongodbV2 `const:"mongodb-v2" json:"sourceType"`
+}
+
+func (s SourceMongodbV2) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMongodbV2) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMongodbV2) GetDatabaseConfig() SourceMongodbV2ClusterType {
+ if o == nil {
+ return SourceMongodbV2ClusterType{}
+ }
+ return o.DatabaseConfig
+}
+
+func (o *SourceMongodbV2) GetDiscoverSampleSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DiscoverSampleSize
+}
+
+func (o *SourceMongodbV2) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InitialWaitingSeconds
+}
+
+func (o *SourceMongodbV2) GetQueueSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.QueueSize
+}
+
+func (o *SourceMongodbV2) GetSourceType() MongodbV2 {
+ return MongodbV2MongodbV2
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbv2createrequest.go b/internal/sdk/pkg/models/shared/sourcemongodbv2createrequest.go
new file mode 100644
index 000000000..a021f9003
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcemongodbv2createrequest.go
@@ -0,0 +1,49 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceMongodbV2CreateRequest struct {
+ Configuration SourceMongodbV2 `json:"configuration"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ // Optional secretID obtained through the public API OAuth redirect flow.
+ SecretID *string `json:"secretId,omitempty"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceMongodbV2CreateRequest) GetConfiguration() SourceMongodbV2 {
+ if o == nil {
+ return SourceMongodbV2{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMongodbV2CreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMongodbV2CreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMongodbV2CreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMongodbV2CreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbv2putrequest.go b/internal/sdk/pkg/models/shared/sourcemongodbv2putrequest.go
new file mode 100644
index 000000000..8b25ab525
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcemongodbv2putrequest.go
@@ -0,0 +1,30 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceMongodbV2PutRequest struct {
+ Configuration SourceMongodbV2Update `json:"configuration"`
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceMongodbV2PutRequest) GetConfiguration() SourceMongodbV2Update {
+ if o == nil {
+ return SourceMongodbV2Update{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMongodbV2PutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMongodbV2PutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemongodbv2update.go b/internal/sdk/pkg/models/shared/sourcemongodbv2update.go
new file mode 100644
index 000000000..6f43df505
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcemongodbv2update.go
@@ -0,0 +1,317 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type SourceMongodbV2UpdateSchemasClusterType string
+
+const (
+ SourceMongodbV2UpdateSchemasClusterTypeSelfManagedReplicaSet SourceMongodbV2UpdateSchemasClusterType = "SELF_MANAGED_REPLICA_SET"
+)
+
+func (e SourceMongodbV2UpdateSchemasClusterType) ToPointer() *SourceMongodbV2UpdateSchemasClusterType {
+ return &e
+}
+
+func (e *SourceMongodbV2UpdateSchemasClusterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "SELF_MANAGED_REPLICA_SET":
+ *e = SourceMongodbV2UpdateSchemasClusterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceMongodbV2UpdateSchemasClusterType: %v", v)
+ }
+}
+
+// SelfManagedReplicaSet - MongoDB self-hosted cluster configured as a replica set
+type SelfManagedReplicaSet struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ // The authentication source where the user information is stored.
+ AuthSource *string `default:"admin" json:"auth_source"`
+ clusterType SourceMongodbV2UpdateSchemasClusterType `const:"SELF_MANAGED_REPLICA_SET" json:"cluster_type"`
+ // The connection string of the cluster that you want to replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string for more information.
+ ConnectionString string `json:"connection_string"`
+ // The name of the MongoDB database that contains the collection(s) to replicate.
+ Database string `json:"database"`
+ // The password associated with this username.
+ Password *string `json:"password,omitempty"`
+ // The username which is used to access the database.
+ Username *string `json:"username,omitempty"`
+}
+
+func (s SelfManagedReplicaSet) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SelfManagedReplicaSet) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SelfManagedReplicaSet) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalProperties
+}
+
+func (o *SelfManagedReplicaSet) GetAuthSource() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AuthSource
+}
+
+func (o *SelfManagedReplicaSet) GetClusterType() SourceMongodbV2UpdateSchemasClusterType {
+ return SourceMongodbV2UpdateSchemasClusterTypeSelfManagedReplicaSet
+}
+
+func (o *SelfManagedReplicaSet) GetConnectionString() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionString
+}
+
+func (o *SelfManagedReplicaSet) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SelfManagedReplicaSet) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SelfManagedReplicaSet) GetUsername() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Username
+}
+
+type SourceMongodbV2UpdateClusterType string
+
+const (
+ SourceMongodbV2UpdateClusterTypeAtlasReplicaSet SourceMongodbV2UpdateClusterType = "ATLAS_REPLICA_SET"
+)
+
+func (e SourceMongodbV2UpdateClusterType) ToPointer() *SourceMongodbV2UpdateClusterType {
+ return &e
+}
+
+func (e *SourceMongodbV2UpdateClusterType) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "ATLAS_REPLICA_SET":
+ *e = SourceMongodbV2UpdateClusterType(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceMongodbV2UpdateClusterType: %v", v)
+ }
+}
+
+// MongoDBAtlasReplicaSet - MongoDB Atlas-hosted cluster configured as a replica set
+type MongoDBAtlasReplicaSet struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ // The authentication source where the user information is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource for more details.
+ AuthSource *string `default:"admin" json:"auth_source"`
+ clusterType SourceMongodbV2UpdateClusterType `const:"ATLAS_REPLICA_SET" json:"cluster_type"`
+ // The connection string of the cluster that you want to replicate.
+ ConnectionString string `json:"connection_string"`
+ // The name of the MongoDB database that contains the collection(s) to replicate.
+ Database string `json:"database"`
+ // The password associated with this username.
+ Password string `json:"password"`
+ // The username which is used to access the database.
+ Username string `json:"username"`
+}
+
+func (m MongoDBAtlasReplicaSet) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(m, "", false)
+}
+
+func (m *MongoDBAtlasReplicaSet) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &m, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *MongoDBAtlasReplicaSet) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalProperties
+}
+
+func (o *MongoDBAtlasReplicaSet) GetAuthSource() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AuthSource
+}
+
+func (o *MongoDBAtlasReplicaSet) GetClusterType() SourceMongodbV2UpdateClusterType {
+ return SourceMongodbV2UpdateClusterTypeAtlasReplicaSet
+}
+
+func (o *MongoDBAtlasReplicaSet) GetConnectionString() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConnectionString
+}
+
+func (o *MongoDBAtlasReplicaSet) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *MongoDBAtlasReplicaSet) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *MongoDBAtlasReplicaSet) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type ClusterTypeType string
+
+const (
+ ClusterTypeTypeMongoDBAtlasReplicaSet ClusterTypeType = "MongoDB Atlas Replica Set"
+ ClusterTypeTypeSelfManagedReplicaSet ClusterTypeType = "Self-Managed Replica Set"
+)
+
+type ClusterType struct {
+ MongoDBAtlasReplicaSet *MongoDBAtlasReplicaSet
+ SelfManagedReplicaSet *SelfManagedReplicaSet
+
+ Type ClusterTypeType
+}
+
+func CreateClusterTypeMongoDBAtlasReplicaSet(mongoDBAtlasReplicaSet MongoDBAtlasReplicaSet) ClusterType {
+ typ := ClusterTypeTypeMongoDBAtlasReplicaSet
+
+ return ClusterType{
+ MongoDBAtlasReplicaSet: &mongoDBAtlasReplicaSet,
+ Type: typ,
+ }
+}
+
+func CreateClusterTypeSelfManagedReplicaSet(selfManagedReplicaSet SelfManagedReplicaSet) ClusterType {
+ typ := ClusterTypeTypeSelfManagedReplicaSet
+
+ return ClusterType{
+ SelfManagedReplicaSet: &selfManagedReplicaSet,
+ Type: typ,
+ }
+}
+
+func (u *ClusterType) UnmarshalJSON(data []byte) error {
+
+ mongoDBAtlasReplicaSet := new(MongoDBAtlasReplicaSet)
+ if err := utils.UnmarshalJSON(data, &mongoDBAtlasReplicaSet, "", true, true); err == nil {
+ u.MongoDBAtlasReplicaSet = mongoDBAtlasReplicaSet
+ u.Type = ClusterTypeTypeMongoDBAtlasReplicaSet
+ return nil
+ }
+
+ selfManagedReplicaSet := new(SelfManagedReplicaSet)
+ if err := utils.UnmarshalJSON(data, &selfManagedReplicaSet, "", true, true); err == nil {
+ u.SelfManagedReplicaSet = selfManagedReplicaSet
+ u.Type = ClusterTypeTypeSelfManagedReplicaSet
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u ClusterType) MarshalJSON() ([]byte, error) {
+ if u.MongoDBAtlasReplicaSet != nil {
+ return utils.MarshalJSON(u.MongoDBAtlasReplicaSet, "", true)
+ }
+
+ if u.SelfManagedReplicaSet != nil {
+ return utils.MarshalJSON(u.SelfManagedReplicaSet, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+type SourceMongodbV2Update struct {
+ // Configures the MongoDB cluster type.
+ DatabaseConfig ClusterType `json:"database_config"`
+ // The maximum number of documents to sample when attempting to discover the unique fields for a collection.
+ DiscoverSampleSize *int64 `default:"10000" json:"discover_sample_size"`
+ // The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds.
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
+ // The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
+ QueueSize *int64 `default:"10000" json:"queue_size"`
+}
+
+func (s SourceMongodbV2Update) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMongodbV2Update) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMongodbV2Update) GetDatabaseConfig() ClusterType {
+ if o == nil {
+ return ClusterType{}
+ }
+ return o.DatabaseConfig
+}
+
+func (o *SourceMongodbV2Update) GetDiscoverSampleSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DiscoverSampleSize
+}
+
+func (o *SourceMongodbV2Update) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InitialWaitingSeconds
+}
+
+func (o *SourceMongodbV2Update) GetQueueSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.QueueSize
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemssql.go b/internal/sdk/pkg/models/shared/sourcemssql.go
old mode 100755
new mode 100644
index 0d6f28e3c..df9d16a8f
--- a/internal/sdk/pkg/models/shared/sourcemssql.go
+++ b/internal/sdk/pkg/models/shared/sourcemssql.go
@@ -3,54 +3,69 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod string
+type SourceMssqlSchemasMethod string
const (
- SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod = "STANDARD"
+ SourceMssqlSchemasMethodStandard SourceMssqlSchemasMethod = "STANDARD"
)
-func (e SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod {
+func (e SourceMssqlSchemasMethod) ToPointer() *SourceMssqlSchemasMethod {
return &e
}
-func (e *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "STANDARD":
- *e = SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod(v)
+ *e = SourceMssqlSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlSchemasMethod: %v", v)
}
}
-// SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
-type SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor struct {
- Method SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"`
+// SourceMssqlScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
+type SourceMssqlScanChangesWithUserDefinedCursor struct {
+ method SourceMssqlSchemasMethod `const:"STANDARD" json:"method"`
}
-// SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
-type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync string
+func (s SourceMssqlScanChangesWithUserDefinedCursor) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlScanChangesWithUserDefinedCursor) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlScanChangesWithUserDefinedCursor) GetMethod() SourceMssqlSchemasMethod {
+ return SourceMssqlSchemasMethodStandard
+}
+
+// SourceMssqlDataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
+type SourceMssqlDataToSync string
const (
- SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSyncExistingAndNew SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync = "Existing and New"
- SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSyncNewChangesOnly SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync = "New Changes Only"
+ SourceMssqlDataToSyncExistingAndNew SourceMssqlDataToSync = "Existing and New"
+ SourceMssqlDataToSyncNewChangesOnly SourceMssqlDataToSync = "New Changes Only"
)
-func (e SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) ToPointer() *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync {
+func (e SourceMssqlDataToSync) ToPointer() *SourceMssqlDataToSync {
return &e
}
-func (e *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlDataToSync) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -59,50 +74,50 @@ func (e *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync)
case "Existing and New":
fallthrough
case "New Changes Only":
- *e = SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync(v)
+ *e = SourceMssqlDataToSync(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlDataToSync: %v", v)
}
}
-type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod string
+type SourceMssqlMethod string
const (
- SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethodCdc SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod = "CDC"
+ SourceMssqlMethodCdc SourceMssqlMethod = "CDC"
)
-func (e SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod) ToPointer() *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod {
+func (e SourceMssqlMethod) ToPointer() *SourceMssqlMethod {
return &e
}
-func (e *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CDC":
- *e = SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod(v)
+ *e = SourceMssqlMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlMethod: %v", v)
}
}
-// SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
-type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel string
+// SourceMssqlInitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
+type SourceMssqlInitialSnapshotIsolationLevel string
const (
- SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevelSnapshot SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel = "Snapshot"
- SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevelReadCommitted SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel = "Read Committed"
+ SourceMssqlInitialSnapshotIsolationLevelSnapshot SourceMssqlInitialSnapshotIsolationLevel = "Snapshot"
+ SourceMssqlInitialSnapshotIsolationLevelReadCommitted SourceMssqlInitialSnapshotIsolationLevel = "Read Committed"
)
-func (e SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) ToPointer() *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel {
+func (e SourceMssqlInitialSnapshotIsolationLevel) ToPointer() *SourceMssqlInitialSnapshotIsolationLevel {
return &e
}
-func (e *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlInitialSnapshotIsolationLevel) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -111,74 +126,105 @@ func (e *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnaps
case "Snapshot":
fallthrough
case "Read Committed":
- *e = SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel(v)
+ *e = SourceMssqlInitialSnapshotIsolationLevel(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlInitialSnapshotIsolationLevel: %v", v)
}
}
-// SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.
-type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC struct {
+// SourceMssqlReadChangesUsingChangeDataCaptureCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.
+type SourceMssqlReadChangesUsingChangeDataCaptureCDC struct {
// What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
- DataToSync *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync `json:"data_to_sync,omitempty"`
+ DataToSync *SourceMssqlDataToSync `default:"Existing and New" json:"data_to_sync"`
// The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
- InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"`
- Method SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod `json:"method"`
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
+ method SourceMssqlMethod `const:"CDC" json:"method"`
// Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
- SnapshotIsolation *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel `json:"snapshot_isolation,omitempty"`
+ SnapshotIsolation *SourceMssqlInitialSnapshotIsolationLevel `default:"Snapshot" json:"snapshot_isolation"`
+}
+
+func (s SourceMssqlReadChangesUsingChangeDataCaptureCDC) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlReadChangesUsingChangeDataCaptureCDC) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlReadChangesUsingChangeDataCaptureCDC) GetDataToSync() *SourceMssqlDataToSync {
+ if o == nil {
+ return nil
+ }
+ return o.DataToSync
+}
+
+func (o *SourceMssqlReadChangesUsingChangeDataCaptureCDC) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InitialWaitingSeconds
+}
+
+func (o *SourceMssqlReadChangesUsingChangeDataCaptureCDC) GetMethod() SourceMssqlMethod {
+ return SourceMssqlMethodCdc
+}
+
+func (o *SourceMssqlReadChangesUsingChangeDataCaptureCDC) GetSnapshotIsolation() *SourceMssqlInitialSnapshotIsolationLevel {
+ if o == nil {
+ return nil
+ }
+ return o.SnapshotIsolation
}
type SourceMssqlUpdateMethodType string
const (
- SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC SourceMssqlUpdateMethodType = "source-mssql_Update Method_Read Changes using Change Data Capture (CDC)"
- SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodScanChangesWithUserDefinedCursor SourceMssqlUpdateMethodType = "source-mssql_Update Method_Scan Changes with User Defined Cursor"
+ SourceMssqlUpdateMethodTypeSourceMssqlReadChangesUsingChangeDataCaptureCDC SourceMssqlUpdateMethodType = "source-mssql_Read Changes using Change Data Capture (CDC)"
+ SourceMssqlUpdateMethodTypeSourceMssqlScanChangesWithUserDefinedCursor SourceMssqlUpdateMethodType = "source-mssql_Scan Changes with User Defined Cursor"
)
type SourceMssqlUpdateMethod struct {
- SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC
- SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor
+ SourceMssqlReadChangesUsingChangeDataCaptureCDC *SourceMssqlReadChangesUsingChangeDataCaptureCDC
+ SourceMssqlScanChangesWithUserDefinedCursor *SourceMssqlScanChangesWithUserDefinedCursor
Type SourceMssqlUpdateMethodType
}
-func CreateSourceMssqlUpdateMethodSourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC(sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC) SourceMssqlUpdateMethod {
- typ := SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC
+func CreateSourceMssqlUpdateMethodSourceMssqlReadChangesUsingChangeDataCaptureCDC(sourceMssqlReadChangesUsingChangeDataCaptureCDC SourceMssqlReadChangesUsingChangeDataCaptureCDC) SourceMssqlUpdateMethod {
+ typ := SourceMssqlUpdateMethodTypeSourceMssqlReadChangesUsingChangeDataCaptureCDC
return SourceMssqlUpdateMethod{
- SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC: &sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC,
+ SourceMssqlReadChangesUsingChangeDataCaptureCDC: &sourceMssqlReadChangesUsingChangeDataCaptureCDC,
Type: typ,
}
}
-func CreateSourceMssqlUpdateMethodSourceMssqlUpdateMethodScanChangesWithUserDefinedCursor(sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor) SourceMssqlUpdateMethod {
- typ := SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodScanChangesWithUserDefinedCursor
+func CreateSourceMssqlUpdateMethodSourceMssqlScanChangesWithUserDefinedCursor(sourceMssqlScanChangesWithUserDefinedCursor SourceMssqlScanChangesWithUserDefinedCursor) SourceMssqlUpdateMethod {
+ typ := SourceMssqlUpdateMethodTypeSourceMssqlScanChangesWithUserDefinedCursor
return SourceMssqlUpdateMethod{
- SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor: &sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor,
+ SourceMssqlScanChangesWithUserDefinedCursor: &sourceMssqlScanChangesWithUserDefinedCursor,
Type: typ,
}
}
func (u *SourceMssqlUpdateMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor); err == nil {
- u.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor = sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor
- u.Type = SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodScanChangesWithUserDefinedCursor
+
+ sourceMssqlScanChangesWithUserDefinedCursor := new(SourceMssqlScanChangesWithUserDefinedCursor)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlScanChangesWithUserDefinedCursor, "", true, true); err == nil {
+ u.SourceMssqlScanChangesWithUserDefinedCursor = sourceMssqlScanChangesWithUserDefinedCursor
+ u.Type = SourceMssqlUpdateMethodTypeSourceMssqlScanChangesWithUserDefinedCursor
return nil
}
- sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC := new(SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC); err == nil {
- u.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC = sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC
- u.Type = SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC
+ sourceMssqlReadChangesUsingChangeDataCaptureCDC := new(SourceMssqlReadChangesUsingChangeDataCaptureCDC)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlReadChangesUsingChangeDataCaptureCDC, "", true, true); err == nil {
+ u.SourceMssqlReadChangesUsingChangeDataCaptureCDC = sourceMssqlReadChangesUsingChangeDataCaptureCDC
+ u.Type = SourceMssqlUpdateMethodTypeSourceMssqlReadChangesUsingChangeDataCaptureCDC
return nil
}
@@ -186,15 +232,15 @@ func (u *SourceMssqlUpdateMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMssqlUpdateMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor != nil {
- return json.Marshal(u.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor)
+ if u.SourceMssqlReadChangesUsingChangeDataCaptureCDC != nil {
+ return utils.MarshalJSON(u.SourceMssqlReadChangesUsingChangeDataCaptureCDC, "", true)
}
- if u.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil {
- return json.Marshal(u.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC)
+ if u.SourceMssqlScanChangesWithUserDefinedCursor != nil {
+ return utils.MarshalJSON(u.SourceMssqlScanChangesWithUserDefinedCursor, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceMssqlMssql string
@@ -221,116 +267,148 @@ func (e *SourceMssqlMssql) UnmarshalJSON(data []byte) error {
}
}
-type SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethod string
+type SourceMssqlSchemasSslMethodSslMethod string
const (
- SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethodEncryptedVerifyCertificate SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethod = "encrypted_verify_certificate"
+ SourceMssqlSchemasSslMethodSslMethodEncryptedVerifyCertificate SourceMssqlSchemasSslMethodSslMethod = "encrypted_verify_certificate"
)
-func (e SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethod) ToPointer() *SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethod {
+func (e SourceMssqlSchemasSslMethodSslMethod) ToPointer() *SourceMssqlSchemasSslMethodSslMethod {
return &e
}
-func (e *SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlSchemasSslMethodSslMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_verify_certificate":
- *e = SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethod(v)
+ *e = SourceMssqlSchemasSslMethodSslMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlSchemasSslMethodSslMethod: %v", v)
}
}
-// SourceMssqlSSLMethodEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
-type SourceMssqlSSLMethodEncryptedVerifyCertificate struct {
+// SourceMssqlEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
+type SourceMssqlEncryptedVerifyCertificate struct {
// Specifies the host name of the server. The value of this property must match the subject property of the certificate.
- HostNameInCertificate *string `json:"hostNameInCertificate,omitempty"`
- SslMethod SourceMssqlSSLMethodEncryptedVerifyCertificateSSLMethod `json:"ssl_method"`
+ HostNameInCertificate *string `json:"hostNameInCertificate,omitempty"`
+ sslMethod SourceMssqlSchemasSslMethodSslMethod `const:"encrypted_verify_certificate" json:"ssl_method"`
+}
+
+func (s SourceMssqlEncryptedVerifyCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlEncryptedVerifyCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlEncryptedVerifyCertificate) GetHostNameInCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.HostNameInCertificate
}
-type SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod string
+func (o *SourceMssqlEncryptedVerifyCertificate) GetSslMethod() SourceMssqlSchemasSslMethodSslMethod {
+ return SourceMssqlSchemasSslMethodSslMethodEncryptedVerifyCertificate
+}
+
+type SourceMssqlSchemasSslMethod string
const (
- SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethodEncryptedTrustServerCertificate SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod = "encrypted_trust_server_certificate"
+ SourceMssqlSchemasSslMethodEncryptedTrustServerCertificate SourceMssqlSchemasSslMethod = "encrypted_trust_server_certificate"
)
-func (e SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod) ToPointer() *SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod {
+func (e SourceMssqlSchemasSslMethod) ToPointer() *SourceMssqlSchemasSslMethod {
return &e
}
-func (e *SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlSchemasSslMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_trust_server_certificate":
- *e = SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod(v)
+ *e = SourceMssqlSchemasSslMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlSchemasSslMethod: %v", v)
}
}
-// SourceMssqlSSLMethodEncryptedTrustServerCertificate - Use the certificate provided by the server without verification. (For testing purposes only!)
-type SourceMssqlSSLMethodEncryptedTrustServerCertificate struct {
- SslMethod SourceMssqlSSLMethodEncryptedTrustServerCertificateSSLMethod `json:"ssl_method"`
+// SourceMssqlEncryptedTrustServerCertificate - Use the certificate provided by the server without verification. (For testing purposes only!)
+type SourceMssqlEncryptedTrustServerCertificate struct {
+ sslMethod SourceMssqlSchemasSslMethod `const:"encrypted_trust_server_certificate" json:"ssl_method"`
+}
+
+func (s SourceMssqlEncryptedTrustServerCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlEncryptedTrustServerCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlEncryptedTrustServerCertificate) GetSslMethod() SourceMssqlSchemasSslMethod {
+ return SourceMssqlSchemasSslMethodEncryptedTrustServerCertificate
}
type SourceMssqlSSLMethodType string
const (
- SourceMssqlSSLMethodTypeSourceMssqlSSLMethodEncryptedTrustServerCertificate SourceMssqlSSLMethodType = "source-mssql_SSL Method_Encrypted (trust server certificate)"
- SourceMssqlSSLMethodTypeSourceMssqlSSLMethodEncryptedVerifyCertificate SourceMssqlSSLMethodType = "source-mssql_SSL Method_Encrypted (verify certificate)"
+ SourceMssqlSSLMethodTypeSourceMssqlEncryptedTrustServerCertificate SourceMssqlSSLMethodType = "source-mssql_Encrypted (trust server certificate)"
+ SourceMssqlSSLMethodTypeSourceMssqlEncryptedVerifyCertificate SourceMssqlSSLMethodType = "source-mssql_Encrypted (verify certificate)"
)
type SourceMssqlSSLMethod struct {
- SourceMssqlSSLMethodEncryptedTrustServerCertificate *SourceMssqlSSLMethodEncryptedTrustServerCertificate
- SourceMssqlSSLMethodEncryptedVerifyCertificate *SourceMssqlSSLMethodEncryptedVerifyCertificate
+ SourceMssqlEncryptedTrustServerCertificate *SourceMssqlEncryptedTrustServerCertificate
+ SourceMssqlEncryptedVerifyCertificate *SourceMssqlEncryptedVerifyCertificate
Type SourceMssqlSSLMethodType
}
-func CreateSourceMssqlSSLMethodSourceMssqlSSLMethodEncryptedTrustServerCertificate(sourceMssqlSSLMethodEncryptedTrustServerCertificate SourceMssqlSSLMethodEncryptedTrustServerCertificate) SourceMssqlSSLMethod {
- typ := SourceMssqlSSLMethodTypeSourceMssqlSSLMethodEncryptedTrustServerCertificate
+func CreateSourceMssqlSSLMethodSourceMssqlEncryptedTrustServerCertificate(sourceMssqlEncryptedTrustServerCertificate SourceMssqlEncryptedTrustServerCertificate) SourceMssqlSSLMethod {
+ typ := SourceMssqlSSLMethodTypeSourceMssqlEncryptedTrustServerCertificate
return SourceMssqlSSLMethod{
- SourceMssqlSSLMethodEncryptedTrustServerCertificate: &sourceMssqlSSLMethodEncryptedTrustServerCertificate,
+ SourceMssqlEncryptedTrustServerCertificate: &sourceMssqlEncryptedTrustServerCertificate,
Type: typ,
}
}
-func CreateSourceMssqlSSLMethodSourceMssqlSSLMethodEncryptedVerifyCertificate(sourceMssqlSSLMethodEncryptedVerifyCertificate SourceMssqlSSLMethodEncryptedVerifyCertificate) SourceMssqlSSLMethod {
- typ := SourceMssqlSSLMethodTypeSourceMssqlSSLMethodEncryptedVerifyCertificate
+func CreateSourceMssqlSSLMethodSourceMssqlEncryptedVerifyCertificate(sourceMssqlEncryptedVerifyCertificate SourceMssqlEncryptedVerifyCertificate) SourceMssqlSSLMethod {
+ typ := SourceMssqlSSLMethodTypeSourceMssqlEncryptedVerifyCertificate
return SourceMssqlSSLMethod{
- SourceMssqlSSLMethodEncryptedVerifyCertificate: &sourceMssqlSSLMethodEncryptedVerifyCertificate,
- Type: typ,
+ SourceMssqlEncryptedVerifyCertificate: &sourceMssqlEncryptedVerifyCertificate,
+ Type: typ,
}
}
func (u *SourceMssqlSSLMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMssqlSSLMethodEncryptedTrustServerCertificate := new(SourceMssqlSSLMethodEncryptedTrustServerCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlSSLMethodEncryptedTrustServerCertificate); err == nil {
- u.SourceMssqlSSLMethodEncryptedTrustServerCertificate = sourceMssqlSSLMethodEncryptedTrustServerCertificate
- u.Type = SourceMssqlSSLMethodTypeSourceMssqlSSLMethodEncryptedTrustServerCertificate
+
+ sourceMssqlEncryptedTrustServerCertificate := new(SourceMssqlEncryptedTrustServerCertificate)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlEncryptedTrustServerCertificate, "", true, true); err == nil {
+ u.SourceMssqlEncryptedTrustServerCertificate = sourceMssqlEncryptedTrustServerCertificate
+ u.Type = SourceMssqlSSLMethodTypeSourceMssqlEncryptedTrustServerCertificate
return nil
}
- sourceMssqlSSLMethodEncryptedVerifyCertificate := new(SourceMssqlSSLMethodEncryptedVerifyCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlSSLMethodEncryptedVerifyCertificate); err == nil {
- u.SourceMssqlSSLMethodEncryptedVerifyCertificate = sourceMssqlSSLMethodEncryptedVerifyCertificate
- u.Type = SourceMssqlSSLMethodTypeSourceMssqlSSLMethodEncryptedVerifyCertificate
+ sourceMssqlEncryptedVerifyCertificate := new(SourceMssqlEncryptedVerifyCertificate)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlEncryptedVerifyCertificate, "", true, true); err == nil {
+ u.SourceMssqlEncryptedVerifyCertificate = sourceMssqlEncryptedVerifyCertificate
+ u.Type = SourceMssqlSSLMethodTypeSourceMssqlEncryptedVerifyCertificate
return nil
}
@@ -338,196 +416,290 @@ func (u *SourceMssqlSSLMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMssqlSSLMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMssqlSSLMethodEncryptedTrustServerCertificate != nil {
- return json.Marshal(u.SourceMssqlSSLMethodEncryptedTrustServerCertificate)
+ if u.SourceMssqlEncryptedTrustServerCertificate != nil {
+ return utils.MarshalJSON(u.SourceMssqlEncryptedTrustServerCertificate, "", true)
}
- if u.SourceMssqlSSLMethodEncryptedVerifyCertificate != nil {
- return json.Marshal(u.SourceMssqlSSLMethodEncryptedVerifyCertificate)
+ if u.SourceMssqlEncryptedVerifyCertificate != nil {
+ return utils.MarshalJSON(u.SourceMssqlEncryptedVerifyCertificate, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceMssqlSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceMssqlSchemasTunnelMethodTunnelMethod string
const (
- SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceMssqlSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceMssqlSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceMssqlSchemasTunnelMethodTunnelMethod) ToPointer() *SourceMssqlSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceMssqlSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceMssqlSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMssqlSSHTunnelMethodPasswordAuthentication struct {
+// SourceMssqlPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMssqlPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceMssqlSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMssqlSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceMssqlPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceMssqlPasswordAuthentication) GetTunnelMethod() SourceMssqlSchemasTunnelMethodTunnelMethod {
+ return SourceMssqlSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceMssqlPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceMssqlPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceMssqlPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceMssqlSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceMssqlSchemasTunnelMethod string
const (
- SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceMssqlSchemasTunnelMethodSSHKeyAuth SourceMssqlSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceMssqlSchemasTunnelMethod) ToPointer() *SourceMssqlSchemasTunnelMethod {
return &e
}
-func (e *SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceMssqlSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlSchemasTunnelMethod: %v", v)
}
}
-// SourceMssqlSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMssqlSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceMssqlSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMssqlSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceMssqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMssqlSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceMssqlSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceMssqlSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceMssqlSSHKeyAuthentication) GetTunnelMethod() SourceMssqlSchemasTunnelMethod {
+ return SourceMssqlSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceMssqlSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceMssqlSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceMssqlTunnelMethod - No ssh tunnel needed to connect to database
+type SourceMssqlTunnelMethod string
const (
- SourceMssqlSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceMssqlTunnelMethodNoTunnel SourceMssqlTunnelMethod = "NO_TUNNEL"
)
-func (e SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceMssqlTunnelMethod) ToPointer() *SourceMssqlTunnelMethod {
return &e
}
-func (e *SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceMssqlTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlTunnelMethod: %v", v)
}
}
-// SourceMssqlSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMssqlSSHTunnelMethodNoTunnel struct {
+// SourceMssqlNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMssqlNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceMssqlSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMssqlTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceMssqlNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlNoTunnel) GetTunnelMethod() SourceMssqlTunnelMethod {
+ return SourceMssqlTunnelMethodNoTunnel
}
type SourceMssqlSSHTunnelMethodType string
const (
- SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHTunnelMethodNoTunnel SourceMssqlSSHTunnelMethodType = "source-mssql_SSH Tunnel Method_No Tunnel"
- SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHTunnelMethodSSHKeyAuthentication SourceMssqlSSHTunnelMethodType = "source-mssql_SSH Tunnel Method_SSH Key Authentication"
- SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHTunnelMethodPasswordAuthentication SourceMssqlSSHTunnelMethodType = "source-mssql_SSH Tunnel Method_Password Authentication"
+ SourceMssqlSSHTunnelMethodTypeSourceMssqlNoTunnel SourceMssqlSSHTunnelMethodType = "source-mssql_No Tunnel"
+ SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHKeyAuthentication SourceMssqlSSHTunnelMethodType = "source-mssql_SSH Key Authentication"
+ SourceMssqlSSHTunnelMethodTypeSourceMssqlPasswordAuthentication SourceMssqlSSHTunnelMethodType = "source-mssql_Password Authentication"
)
type SourceMssqlSSHTunnelMethod struct {
- SourceMssqlSSHTunnelMethodNoTunnel *SourceMssqlSSHTunnelMethodNoTunnel
- SourceMssqlSSHTunnelMethodSSHKeyAuthentication *SourceMssqlSSHTunnelMethodSSHKeyAuthentication
- SourceMssqlSSHTunnelMethodPasswordAuthentication *SourceMssqlSSHTunnelMethodPasswordAuthentication
+ SourceMssqlNoTunnel *SourceMssqlNoTunnel
+ SourceMssqlSSHKeyAuthentication *SourceMssqlSSHKeyAuthentication
+ SourceMssqlPasswordAuthentication *SourceMssqlPasswordAuthentication
Type SourceMssqlSSHTunnelMethodType
}
-func CreateSourceMssqlSSHTunnelMethodSourceMssqlSSHTunnelMethodNoTunnel(sourceMssqlSSHTunnelMethodNoTunnel SourceMssqlSSHTunnelMethodNoTunnel) SourceMssqlSSHTunnelMethod {
- typ := SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHTunnelMethodNoTunnel
+func CreateSourceMssqlSSHTunnelMethodSourceMssqlNoTunnel(sourceMssqlNoTunnel SourceMssqlNoTunnel) SourceMssqlSSHTunnelMethod {
+ typ := SourceMssqlSSHTunnelMethodTypeSourceMssqlNoTunnel
return SourceMssqlSSHTunnelMethod{
- SourceMssqlSSHTunnelMethodNoTunnel: &sourceMssqlSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceMssqlNoTunnel: &sourceMssqlNoTunnel,
+ Type: typ,
}
}
-func CreateSourceMssqlSSHTunnelMethodSourceMssqlSSHTunnelMethodSSHKeyAuthentication(sourceMssqlSSHTunnelMethodSSHKeyAuthentication SourceMssqlSSHTunnelMethodSSHKeyAuthentication) SourceMssqlSSHTunnelMethod {
- typ := SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceMssqlSSHTunnelMethodSourceMssqlSSHKeyAuthentication(sourceMssqlSSHKeyAuthentication SourceMssqlSSHKeyAuthentication) SourceMssqlSSHTunnelMethod {
+ typ := SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHKeyAuthentication
return SourceMssqlSSHTunnelMethod{
- SourceMssqlSSHTunnelMethodSSHKeyAuthentication: &sourceMssqlSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourceMssqlSSHKeyAuthentication: &sourceMssqlSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourceMssqlSSHTunnelMethodSourceMssqlSSHTunnelMethodPasswordAuthentication(sourceMssqlSSHTunnelMethodPasswordAuthentication SourceMssqlSSHTunnelMethodPasswordAuthentication) SourceMssqlSSHTunnelMethod {
- typ := SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHTunnelMethodPasswordAuthentication
+func CreateSourceMssqlSSHTunnelMethodSourceMssqlPasswordAuthentication(sourceMssqlPasswordAuthentication SourceMssqlPasswordAuthentication) SourceMssqlSSHTunnelMethod {
+ typ := SourceMssqlSSHTunnelMethodTypeSourceMssqlPasswordAuthentication
return SourceMssqlSSHTunnelMethod{
- SourceMssqlSSHTunnelMethodPasswordAuthentication: &sourceMssqlSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ SourceMssqlPasswordAuthentication: &sourceMssqlPasswordAuthentication,
+ Type: typ,
}
}
func (u *SourceMssqlSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMssqlSSHTunnelMethodNoTunnel := new(SourceMssqlSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlSSHTunnelMethodNoTunnel); err == nil {
- u.SourceMssqlSSHTunnelMethodNoTunnel = sourceMssqlSSHTunnelMethodNoTunnel
- u.Type = SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHTunnelMethodNoTunnel
+
+ sourceMssqlNoTunnel := new(SourceMssqlNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlNoTunnel, "", true, true); err == nil {
+ u.SourceMssqlNoTunnel = sourceMssqlNoTunnel
+ u.Type = SourceMssqlSSHTunnelMethodTypeSourceMssqlNoTunnel
return nil
}
- sourceMssqlSSHTunnelMethodSSHKeyAuthentication := new(SourceMssqlSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceMssqlSSHTunnelMethodSSHKeyAuthentication = sourceMssqlSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHTunnelMethodSSHKeyAuthentication
+ sourceMssqlSSHKeyAuthentication := new(SourceMssqlSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceMssqlSSHKeyAuthentication = sourceMssqlSSHKeyAuthentication
+ u.Type = SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHKeyAuthentication
return nil
}
- sourceMssqlSSHTunnelMethodPasswordAuthentication := new(SourceMssqlSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceMssqlSSHTunnelMethodPasswordAuthentication = sourceMssqlSSHTunnelMethodPasswordAuthentication
- u.Type = SourceMssqlSSHTunnelMethodTypeSourceMssqlSSHTunnelMethodPasswordAuthentication
+ sourceMssqlPasswordAuthentication := new(SourceMssqlPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlPasswordAuthentication, "", true, true); err == nil {
+ u.SourceMssqlPasswordAuthentication = sourceMssqlPasswordAuthentication
+ u.Type = SourceMssqlSSHTunnelMethodTypeSourceMssqlPasswordAuthentication
return nil
}
@@ -535,19 +707,19 @@ func (u *SourceMssqlSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMssqlSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMssqlSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceMssqlSSHTunnelMethodNoTunnel)
+ if u.SourceMssqlNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceMssqlNoTunnel, "", true)
}
- if u.SourceMssqlSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceMssqlSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceMssqlSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceMssqlSSHKeyAuthentication, "", true)
}
- if u.SourceMssqlSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceMssqlSSHTunnelMethodPasswordAuthentication)
+ if u.SourceMssqlPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceMssqlPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceMssql struct {
@@ -565,7 +737,7 @@ type SourceMssql struct {
ReplicationMethod *SourceMssqlUpdateMethod `json:"replication_method,omitempty"`
// The list of schemas to sync from. Defaults to user. Case sensitive.
Schemas []string `json:"schemas,omitempty"`
- SourceType SourceMssqlMssql `json:"sourceType"`
+ sourceType SourceMssqlMssql `const:"mssql" json:"sourceType"`
// The encryption method which is used when communicating with the database.
SslMethod *SourceMssqlSSLMethod `json:"ssl_method,omitempty"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
@@ -573,3 +745,88 @@ type SourceMssql struct {
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (s SourceMssql) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssql) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssql) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceMssql) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceMssql) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceMssql) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceMssql) GetPort() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Port
+}
+
+func (o *SourceMssql) GetReplicationMethod() *SourceMssqlUpdateMethod {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationMethod
+}
+
+func (o *SourceMssql) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourceMssql) GetSourceType() SourceMssqlMssql {
+ return SourceMssqlMssqlMssql
+}
+
+func (o *SourceMssql) GetSslMethod() *SourceMssqlSSLMethod {
+ if o == nil {
+ return nil
+ }
+ return o.SslMethod
+}
+
+func (o *SourceMssql) GetTunnelMethod() *SourceMssqlSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceMssql) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemssqlcreaterequest.go b/internal/sdk/pkg/models/shared/sourcemssqlcreaterequest.go
old mode 100755
new mode 100644
index e82b8ef58..267fba69f
--- a/internal/sdk/pkg/models/shared/sourcemssqlcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemssqlcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMssqlCreateRequest struct {
Configuration SourceMssql `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMssqlCreateRequest) GetConfiguration() SourceMssql {
+ if o == nil {
+ return SourceMssql{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMssqlCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMssqlCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMssqlCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMssqlCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemssqlputrequest.go b/internal/sdk/pkg/models/shared/sourcemssqlputrequest.go
old mode 100755
new mode 100644
index 201711a24..e5eb44345
--- a/internal/sdk/pkg/models/shared/sourcemssqlputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemssqlputrequest.go
@@ -7,3 +7,24 @@ type SourceMssqlPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMssqlPutRequest) GetConfiguration() SourceMssqlUpdate {
+ if o == nil {
+ return SourceMssqlUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMssqlPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMssqlPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemssqlupdate.go b/internal/sdk/pkg/models/shared/sourcemssqlupdate.go
old mode 100755
new mode 100644
index f0d7ddf6f..1977e9dbf
--- a/internal/sdk/pkg/models/shared/sourcemssqlupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemssqlupdate.go
@@ -3,54 +3,69 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod string
+type SourceMssqlUpdateSchemasMethod string
const (
- SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod = "STANDARD"
+ SourceMssqlUpdateSchemasMethodStandard SourceMssqlUpdateSchemasMethod = "STANDARD"
)
-func (e SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod {
+func (e SourceMssqlUpdateSchemasMethod) ToPointer() *SourceMssqlUpdateSchemasMethod {
return &e
}
-func (e *SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlUpdateSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "STANDARD":
- *e = SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(v)
+ *e = SourceMssqlUpdateSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlUpdateSchemasMethod: %v", v)
}
}
-// SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
-type SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor struct {
- Method SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"`
+// ScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
+type ScanChangesWithUserDefinedCursor struct {
+ method SourceMssqlUpdateSchemasMethod `const:"STANDARD" json:"method"`
}
-// SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
-type SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync string
+func (s ScanChangesWithUserDefinedCursor) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *ScanChangesWithUserDefinedCursor) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ScanChangesWithUserDefinedCursor) GetMethod() SourceMssqlUpdateSchemasMethod {
+ return SourceMssqlUpdateSchemasMethodStandard
+}
+
+// DataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
+type DataToSync string
const (
- SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSyncExistingAndNew SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync = "Existing and New"
- SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSyncNewChangesOnly SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync = "New Changes Only"
+ DataToSyncExistingAndNew DataToSync = "Existing and New"
+ DataToSyncNewChangesOnly DataToSync = "New Changes Only"
)
-func (e SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) ToPointer() *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync {
+func (e DataToSync) ToPointer() *DataToSync {
return &e
}
-func (e *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) UnmarshalJSON(data []byte) error {
+func (e *DataToSync) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -59,50 +74,50 @@ func (e *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataTo
case "Existing and New":
fallthrough
case "New Changes Only":
- *e = SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync(v)
+ *e = DataToSync(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync: %v", v)
+ return fmt.Errorf("invalid value for DataToSync: %v", v)
}
}
-type SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod string
+type SourceMssqlUpdateSchemasReplicationMethodMethod string
const (
- SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethodCdc SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod = "CDC"
+ SourceMssqlUpdateSchemasReplicationMethodMethodCdc SourceMssqlUpdateSchemasReplicationMethodMethod = "CDC"
)
-func (e SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod) ToPointer() *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod {
+func (e SourceMssqlUpdateSchemasReplicationMethodMethod) ToPointer() *SourceMssqlUpdateSchemasReplicationMethodMethod {
return &e
}
-func (e *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlUpdateSchemasReplicationMethodMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CDC":
- *e = SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod(v)
+ *e = SourceMssqlUpdateSchemasReplicationMethodMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlUpdateSchemasReplicationMethodMethod: %v", v)
}
}
-// SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
-type SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel string
+// InitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
+type InitialSnapshotIsolationLevel string
const (
- SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevelSnapshot SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel = "Snapshot"
- SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevelReadCommitted SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel = "Read Committed"
+ InitialSnapshotIsolationLevelSnapshot InitialSnapshotIsolationLevel = "Snapshot"
+ InitialSnapshotIsolationLevelReadCommitted InitialSnapshotIsolationLevel = "Read Committed"
)
-func (e SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) ToPointer() *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel {
+func (e InitialSnapshotIsolationLevel) ToPointer() *InitialSnapshotIsolationLevel {
return &e
}
-func (e *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) UnmarshalJSON(data []byte) error {
+func (e *InitialSnapshotIsolationLevel) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -111,202 +126,265 @@ func (e *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitia
case "Snapshot":
fallthrough
case "Read Committed":
- *e = SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel(v)
+ *e = InitialSnapshotIsolationLevel(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel: %v", v)
+ return fmt.Errorf("invalid value for InitialSnapshotIsolationLevel: %v", v)
}
}
-// SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.
-type SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC struct {
+// ReadChangesUsingChangeDataCaptureCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.
+type ReadChangesUsingChangeDataCaptureCDC struct {
// What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC.
- DataToSync *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync `json:"data_to_sync,omitempty"`
+ DataToSync *DataToSync `default:"Existing and New" json:"data_to_sync"`
// The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
- InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"`
- Method SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod `json:"method"`
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
+ method SourceMssqlUpdateSchemasReplicationMethodMethod `const:"CDC" json:"method"`
// Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.
- SnapshotIsolation *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel `json:"snapshot_isolation,omitempty"`
+ SnapshotIsolation *InitialSnapshotIsolationLevel `default:"Snapshot" json:"snapshot_isolation"`
+}
+
+func (r ReadChangesUsingChangeDataCaptureCDC) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(r, "", false)
}
-type SourceMssqlUpdateUpdateMethodType string
+func (r *ReadChangesUsingChangeDataCaptureCDC) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &r, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ReadChangesUsingChangeDataCaptureCDC) GetDataToSync() *DataToSync {
+ if o == nil {
+ return nil
+ }
+ return o.DataToSync
+}
+
+func (o *ReadChangesUsingChangeDataCaptureCDC) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InitialWaitingSeconds
+}
+
+func (o *ReadChangesUsingChangeDataCaptureCDC) GetMethod() SourceMssqlUpdateSchemasReplicationMethodMethod {
+ return SourceMssqlUpdateSchemasReplicationMethodMethodCdc
+}
+
+func (o *ReadChangesUsingChangeDataCaptureCDC) GetSnapshotIsolation() *InitialSnapshotIsolationLevel {
+ if o == nil {
+ return nil
+ }
+ return o.SnapshotIsolation
+}
+
+type UpdateMethodType string
const (
- SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC SourceMssqlUpdateUpdateMethodType = "source-mssql-update_Update Method_Read Changes using Change Data Capture (CDC)"
- SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor SourceMssqlUpdateUpdateMethodType = "source-mssql-update_Update Method_Scan Changes with User Defined Cursor"
+ UpdateMethodTypeReadChangesUsingChangeDataCaptureCDC UpdateMethodType = "Read Changes using Change Data Capture (CDC)"
+ UpdateMethodTypeScanChangesWithUserDefinedCursor UpdateMethodType = "Scan Changes with User Defined Cursor"
)
-type SourceMssqlUpdateUpdateMethod struct {
- SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC
- SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
+type UpdateMethod struct {
+ ReadChangesUsingChangeDataCaptureCDC *ReadChangesUsingChangeDataCaptureCDC
+ ScanChangesWithUserDefinedCursor *ScanChangesWithUserDefinedCursor
- Type SourceMssqlUpdateUpdateMethodType
+ Type UpdateMethodType
}
-func CreateSourceMssqlUpdateUpdateMethodSourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC(sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC) SourceMssqlUpdateUpdateMethod {
- typ := SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC
+func CreateUpdateMethodReadChangesUsingChangeDataCaptureCDC(readChangesUsingChangeDataCaptureCDC ReadChangesUsingChangeDataCaptureCDC) UpdateMethod {
+ typ := UpdateMethodTypeReadChangesUsingChangeDataCaptureCDC
- return SourceMssqlUpdateUpdateMethod{
- SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC: &sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC,
- Type: typ,
+ return UpdateMethod{
+ ReadChangesUsingChangeDataCaptureCDC: &readChangesUsingChangeDataCaptureCDC,
+ Type: typ,
}
}
-func CreateSourceMssqlUpdateUpdateMethodSourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor(sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor) SourceMssqlUpdateUpdateMethod {
- typ := SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
+func CreateUpdateMethodScanChangesWithUserDefinedCursor(scanChangesWithUserDefinedCursor ScanChangesWithUserDefinedCursor) UpdateMethod {
+ typ := UpdateMethodTypeScanChangesWithUserDefinedCursor
- return SourceMssqlUpdateUpdateMethod{
- SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor: &sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor,
- Type: typ,
+ return UpdateMethod{
+ ScanChangesWithUserDefinedCursor: &scanChangesWithUserDefinedCursor,
+ Type: typ,
}
}
-func (u *SourceMssqlUpdateUpdateMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *UpdateMethod) UnmarshalJSON(data []byte) error {
- sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor); err == nil {
- u.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor = sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
- u.Type = SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
+ scanChangesWithUserDefinedCursor := new(ScanChangesWithUserDefinedCursor)
+ if err := utils.UnmarshalJSON(data, &scanChangesWithUserDefinedCursor, "", true, true); err == nil {
+ u.ScanChangesWithUserDefinedCursor = scanChangesWithUserDefinedCursor
+ u.Type = UpdateMethodTypeScanChangesWithUserDefinedCursor
return nil
}
- sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC := new(SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC); err == nil {
- u.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC = sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC
- u.Type = SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC
+ readChangesUsingChangeDataCaptureCDC := new(ReadChangesUsingChangeDataCaptureCDC)
+ if err := utils.UnmarshalJSON(data, &readChangesUsingChangeDataCaptureCDC, "", true, true); err == nil {
+ u.ReadChangesUsingChangeDataCaptureCDC = readChangesUsingChangeDataCaptureCDC
+ u.Type = UpdateMethodTypeReadChangesUsingChangeDataCaptureCDC
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceMssqlUpdateUpdateMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil {
- return json.Marshal(u.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor)
+func (u UpdateMethod) MarshalJSON() ([]byte, error) {
+ if u.ReadChangesUsingChangeDataCaptureCDC != nil {
+ return utils.MarshalJSON(u.ReadChangesUsingChangeDataCaptureCDC, "", true)
}
- if u.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil {
- return json.Marshal(u.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC)
+ if u.ScanChangesWithUserDefinedCursor != nil {
+ return utils.MarshalJSON(u.ScanChangesWithUserDefinedCursor, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod string
+type SourceMssqlUpdateSchemasSslMethodSslMethod string
const (
- SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethodEncryptedVerifyCertificate SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod = "encrypted_verify_certificate"
+ SourceMssqlUpdateSchemasSslMethodSslMethodEncryptedVerifyCertificate SourceMssqlUpdateSchemasSslMethodSslMethod = "encrypted_verify_certificate"
)
-func (e SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod) ToPointer() *SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod {
+func (e SourceMssqlUpdateSchemasSslMethodSslMethod) ToPointer() *SourceMssqlUpdateSchemasSslMethodSslMethod {
return &e
}
-func (e *SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlUpdateSchemasSslMethodSslMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_verify_certificate":
- *e = SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod(v)
+ *e = SourceMssqlUpdateSchemasSslMethodSslMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlUpdateSchemasSslMethodSslMethod: %v", v)
}
}
-// SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
-type SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate struct {
+// SourceMssqlUpdateEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
+type SourceMssqlUpdateEncryptedVerifyCertificate struct {
// Specifies the host name of the server. The value of this property must match the subject property of the certificate.
- HostNameInCertificate *string `json:"hostNameInCertificate,omitempty"`
- SslMethod SourceMssqlUpdateSSLMethodEncryptedVerifyCertificateSSLMethod `json:"ssl_method"`
+ HostNameInCertificate *string `json:"hostNameInCertificate,omitempty"`
+ sslMethod SourceMssqlUpdateSchemasSslMethodSslMethod `const:"encrypted_verify_certificate" json:"ssl_method"`
+}
+
+func (s SourceMssqlUpdateEncryptedVerifyCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlUpdateEncryptedVerifyCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
}
-type SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod string
+func (o *SourceMssqlUpdateEncryptedVerifyCertificate) GetHostNameInCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.HostNameInCertificate
+}
+
+func (o *SourceMssqlUpdateEncryptedVerifyCertificate) GetSslMethod() SourceMssqlUpdateSchemasSslMethodSslMethod {
+ return SourceMssqlUpdateSchemasSslMethodSslMethodEncryptedVerifyCertificate
+}
+
+type SourceMssqlUpdateSchemasSslMethod string
const (
- SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethodEncryptedTrustServerCertificate SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod = "encrypted_trust_server_certificate"
+ SourceMssqlUpdateSchemasSslMethodEncryptedTrustServerCertificate SourceMssqlUpdateSchemasSslMethod = "encrypted_trust_server_certificate"
)
-func (e SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod) ToPointer() *SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod {
+func (e SourceMssqlUpdateSchemasSslMethod) ToPointer() *SourceMssqlUpdateSchemasSslMethod {
return &e
}
-func (e *SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlUpdateSchemasSslMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_trust_server_certificate":
- *e = SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod(v)
+ *e = SourceMssqlUpdateSchemasSslMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlUpdateSchemasSslMethod: %v", v)
+ }
+}
+
+// SourceMssqlUpdateEncryptedTrustServerCertificate - Use the certificate provided by the server without verification. (For testing purposes only!)
+type SourceMssqlUpdateEncryptedTrustServerCertificate struct {
+ sslMethod SourceMssqlUpdateSchemasSslMethod `const:"encrypted_trust_server_certificate" json:"ssl_method"`
+}
+
+func (s SourceMssqlUpdateEncryptedTrustServerCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlUpdateEncryptedTrustServerCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate - Use the certificate provided by the server without verification. (For testing purposes only!)
-type SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate struct {
- SslMethod SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificateSSLMethod `json:"ssl_method"`
+func (o *SourceMssqlUpdateEncryptedTrustServerCertificate) GetSslMethod() SourceMssqlUpdateSchemasSslMethod {
+ return SourceMssqlUpdateSchemasSslMethodEncryptedTrustServerCertificate
}
type SourceMssqlUpdateSSLMethodType string
const (
- SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate SourceMssqlUpdateSSLMethodType = "source-mssql-update_SSL Method_Encrypted (trust server certificate)"
- SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateSSLMethodEncryptedVerifyCertificate SourceMssqlUpdateSSLMethodType = "source-mssql-update_SSL Method_Encrypted (verify certificate)"
+ SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateEncryptedTrustServerCertificate SourceMssqlUpdateSSLMethodType = "source-mssql-update_Encrypted (trust server certificate)"
+ SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateEncryptedVerifyCertificate SourceMssqlUpdateSSLMethodType = "source-mssql-update_Encrypted (verify certificate)"
)
type SourceMssqlUpdateSSLMethod struct {
- SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate *SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate
- SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate *SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate
+ SourceMssqlUpdateEncryptedTrustServerCertificate *SourceMssqlUpdateEncryptedTrustServerCertificate
+ SourceMssqlUpdateEncryptedVerifyCertificate *SourceMssqlUpdateEncryptedVerifyCertificate
Type SourceMssqlUpdateSSLMethodType
}
-func CreateSourceMssqlUpdateSSLMethodSourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate(sourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate) SourceMssqlUpdateSSLMethod {
- typ := SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate
+func CreateSourceMssqlUpdateSSLMethodSourceMssqlUpdateEncryptedTrustServerCertificate(sourceMssqlUpdateEncryptedTrustServerCertificate SourceMssqlUpdateEncryptedTrustServerCertificate) SourceMssqlUpdateSSLMethod {
+ typ := SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateEncryptedTrustServerCertificate
return SourceMssqlUpdateSSLMethod{
- SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate: &sourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate,
+ SourceMssqlUpdateEncryptedTrustServerCertificate: &sourceMssqlUpdateEncryptedTrustServerCertificate,
Type: typ,
}
}
-func CreateSourceMssqlUpdateSSLMethodSourceMssqlUpdateSSLMethodEncryptedVerifyCertificate(sourceMssqlUpdateSSLMethodEncryptedVerifyCertificate SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate) SourceMssqlUpdateSSLMethod {
- typ := SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateSSLMethodEncryptedVerifyCertificate
+func CreateSourceMssqlUpdateSSLMethodSourceMssqlUpdateEncryptedVerifyCertificate(sourceMssqlUpdateEncryptedVerifyCertificate SourceMssqlUpdateEncryptedVerifyCertificate) SourceMssqlUpdateSSLMethod {
+ typ := SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateEncryptedVerifyCertificate
return SourceMssqlUpdateSSLMethod{
- SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate: &sourceMssqlUpdateSSLMethodEncryptedVerifyCertificate,
+ SourceMssqlUpdateEncryptedVerifyCertificate: &sourceMssqlUpdateEncryptedVerifyCertificate,
Type: typ,
}
}
func (u *SourceMssqlUpdateSSLMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate := new(SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate); err == nil {
- u.SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate = sourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate
- u.Type = SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate
+
+ sourceMssqlUpdateEncryptedTrustServerCertificate := new(SourceMssqlUpdateEncryptedTrustServerCertificate)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlUpdateEncryptedTrustServerCertificate, "", true, true); err == nil {
+ u.SourceMssqlUpdateEncryptedTrustServerCertificate = sourceMssqlUpdateEncryptedTrustServerCertificate
+ u.Type = SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateEncryptedTrustServerCertificate
return nil
}
- sourceMssqlUpdateSSLMethodEncryptedVerifyCertificate := new(SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlUpdateSSLMethodEncryptedVerifyCertificate); err == nil {
- u.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate = sourceMssqlUpdateSSLMethodEncryptedVerifyCertificate
- u.Type = SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateSSLMethodEncryptedVerifyCertificate
+ sourceMssqlUpdateEncryptedVerifyCertificate := new(SourceMssqlUpdateEncryptedVerifyCertificate)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlUpdateEncryptedVerifyCertificate, "", true, true); err == nil {
+ u.SourceMssqlUpdateEncryptedVerifyCertificate = sourceMssqlUpdateEncryptedVerifyCertificate
+ u.Type = SourceMssqlUpdateSSLMethodTypeSourceMssqlUpdateEncryptedVerifyCertificate
return nil
}
@@ -314,196 +392,290 @@ func (u *SourceMssqlUpdateSSLMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMssqlUpdateSSLMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate != nil {
- return json.Marshal(u.SourceMssqlUpdateSSLMethodEncryptedTrustServerCertificate)
+ if u.SourceMssqlUpdateEncryptedTrustServerCertificate != nil {
+ return utils.MarshalJSON(u.SourceMssqlUpdateEncryptedTrustServerCertificate, "", true)
}
- if u.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate != nil {
- return json.Marshal(u.SourceMssqlUpdateSSLMethodEncryptedVerifyCertificate)
+ if u.SourceMssqlUpdateEncryptedVerifyCertificate != nil {
+ return utils.MarshalJSON(u.SourceMssqlUpdateEncryptedVerifyCertificate, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceMssqlUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceMssqlUpdateSchemasTunnelMethodTunnelMethod string
const (
- SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceMssqlUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceMssqlUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceMssqlUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *SourceMssqlUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceMssqlUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication struct {
+// SourceMssqlUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMssqlUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceMssqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMssqlUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceMssqlUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceMssqlUpdatePasswordAuthentication) GetTunnelMethod() SourceMssqlUpdateSchemasTunnelMethodTunnelMethod {
+ return SourceMssqlUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceMssqlUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceMssqlUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceMssqlUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceMssqlUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceMssqlUpdateSchemasTunnelMethod string
const (
- SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceMssqlUpdateSchemasTunnelMethodSSHKeyAuth SourceMssqlUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceMssqlUpdateSchemasTunnelMethod) ToPointer() *SourceMssqlUpdateSchemasTunnelMethod {
return &e
}
-func (e *SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceMssqlUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlUpdateSchemasTunnelMethod: %v", v)
}
}
-// SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceMssqlUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMssqlUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMssqlUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceMssqlUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceMssqlUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceMssqlUpdateSSHKeyAuthentication) GetTunnelMethod() SourceMssqlUpdateSchemasTunnelMethod {
+ return SourceMssqlUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceMssqlUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceMssqlUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceMssqlUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type SourceMssqlUpdateTunnelMethod string
const (
- SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceMssqlUpdateTunnelMethodNoTunnel SourceMssqlUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceMssqlUpdateTunnelMethod) ToPointer() *SourceMssqlUpdateTunnelMethod {
return &e
}
-func (e *SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMssqlUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceMssqlUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMssqlUpdateTunnelMethod: %v", v)
}
}
-// SourceMssqlUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMssqlUpdateSSHTunnelMethodNoTunnel struct {
+// SourceMssqlUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMssqlUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceMssqlUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMssqlUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceMssqlUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMssqlUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMssqlUpdateNoTunnel) GetTunnelMethod() SourceMssqlUpdateTunnelMethod {
+ return SourceMssqlUpdateTunnelMethodNoTunnel
}
type SourceMssqlUpdateSSHTunnelMethodType string
const (
- SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHTunnelMethodNoTunnel SourceMssqlUpdateSSHTunnelMethodType = "source-mssql-update_SSH Tunnel Method_No Tunnel"
- SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication SourceMssqlUpdateSSHTunnelMethodType = "source-mssql-update_SSH Tunnel Method_SSH Key Authentication"
- SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHTunnelMethodPasswordAuthentication SourceMssqlUpdateSSHTunnelMethodType = "source-mssql-update_SSH Tunnel Method_Password Authentication"
+ SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateNoTunnel SourceMssqlUpdateSSHTunnelMethodType = "source-mssql-update_No Tunnel"
+ SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHKeyAuthentication SourceMssqlUpdateSSHTunnelMethodType = "source-mssql-update_SSH Key Authentication"
+ SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdatePasswordAuthentication SourceMssqlUpdateSSHTunnelMethodType = "source-mssql-update_Password Authentication"
)
type SourceMssqlUpdateSSHTunnelMethod struct {
- SourceMssqlUpdateSSHTunnelMethodNoTunnel *SourceMssqlUpdateSSHTunnelMethodNoTunnel
- SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication *SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
- SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication *SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication
+ SourceMssqlUpdateNoTunnel *SourceMssqlUpdateNoTunnel
+ SourceMssqlUpdateSSHKeyAuthentication *SourceMssqlUpdateSSHKeyAuthentication
+ SourceMssqlUpdatePasswordAuthentication *SourceMssqlUpdatePasswordAuthentication
Type SourceMssqlUpdateSSHTunnelMethodType
}
-func CreateSourceMssqlUpdateSSHTunnelMethodSourceMssqlUpdateSSHTunnelMethodNoTunnel(sourceMssqlUpdateSSHTunnelMethodNoTunnel SourceMssqlUpdateSSHTunnelMethodNoTunnel) SourceMssqlUpdateSSHTunnelMethod {
- typ := SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHTunnelMethodNoTunnel
+func CreateSourceMssqlUpdateSSHTunnelMethodSourceMssqlUpdateNoTunnel(sourceMssqlUpdateNoTunnel SourceMssqlUpdateNoTunnel) SourceMssqlUpdateSSHTunnelMethod {
+ typ := SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateNoTunnel
return SourceMssqlUpdateSSHTunnelMethod{
- SourceMssqlUpdateSSHTunnelMethodNoTunnel: &sourceMssqlUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceMssqlUpdateNoTunnel: &sourceMssqlUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateSourceMssqlUpdateSSHTunnelMethodSourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication(sourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication) SourceMssqlUpdateSSHTunnelMethod {
- typ := SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceMssqlUpdateSSHTunnelMethodSourceMssqlUpdateSSHKeyAuthentication(sourceMssqlUpdateSSHKeyAuthentication SourceMssqlUpdateSSHKeyAuthentication) SourceMssqlUpdateSSHTunnelMethod {
+ typ := SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHKeyAuthentication
return SourceMssqlUpdateSSHTunnelMethod{
- SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication: &sourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourceMssqlUpdateSSHKeyAuthentication: &sourceMssqlUpdateSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourceMssqlUpdateSSHTunnelMethodSourceMssqlUpdateSSHTunnelMethodPasswordAuthentication(sourceMssqlUpdateSSHTunnelMethodPasswordAuthentication SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication) SourceMssqlUpdateSSHTunnelMethod {
- typ := SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHTunnelMethodPasswordAuthentication
+func CreateSourceMssqlUpdateSSHTunnelMethodSourceMssqlUpdatePasswordAuthentication(sourceMssqlUpdatePasswordAuthentication SourceMssqlUpdatePasswordAuthentication) SourceMssqlUpdateSSHTunnelMethod {
+ typ := SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdatePasswordAuthentication
return SourceMssqlUpdateSSHTunnelMethod{
- SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication: &sourceMssqlUpdateSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ SourceMssqlUpdatePasswordAuthentication: &sourceMssqlUpdatePasswordAuthentication,
+ Type: typ,
}
}
func (u *SourceMssqlUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceMssqlUpdateSSHTunnelMethodNoTunnel := new(SourceMssqlUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.SourceMssqlUpdateSSHTunnelMethodNoTunnel = sourceMssqlUpdateSSHTunnelMethodNoTunnel
- u.Type = SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHTunnelMethodNoTunnel
+
+ sourceMssqlUpdateNoTunnel := new(SourceMssqlUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlUpdateNoTunnel, "", true, true); err == nil {
+ u.SourceMssqlUpdateNoTunnel = sourceMssqlUpdateNoTunnel
+ u.Type = SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateNoTunnel
return nil
}
- sourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication := new(SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication = sourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication
+ sourceMssqlUpdateSSHKeyAuthentication := new(SourceMssqlUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceMssqlUpdateSSHKeyAuthentication = sourceMssqlUpdateSSHKeyAuthentication
+ u.Type = SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHKeyAuthentication
return nil
}
- sourceMssqlUpdateSSHTunnelMethodPasswordAuthentication := new(SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMssqlUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication = sourceMssqlUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdateSSHTunnelMethodPasswordAuthentication
+ sourceMssqlUpdatePasswordAuthentication := new(SourceMssqlUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceMssqlUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.SourceMssqlUpdatePasswordAuthentication = sourceMssqlUpdatePasswordAuthentication
+ u.Type = SourceMssqlUpdateSSHTunnelMethodTypeSourceMssqlUpdatePasswordAuthentication
return nil
}
@@ -511,19 +683,19 @@ func (u *SourceMssqlUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMssqlUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMssqlUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceMssqlUpdateSSHTunnelMethodNoTunnel)
+ if u.SourceMssqlUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceMssqlUpdateNoTunnel, "", true)
}
- if u.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceMssqlUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceMssqlUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceMssqlUpdateSSHKeyAuthentication, "", true)
}
- if u.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceMssqlUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.SourceMssqlUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceMssqlUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceMssqlUpdate struct {
@@ -538,7 +710,7 @@ type SourceMssqlUpdate struct {
// The port of the database.
Port int64 `json:"port"`
// Configures how data is extracted from the database.
- ReplicationMethod *SourceMssqlUpdateUpdateMethod `json:"replication_method,omitempty"`
+ ReplicationMethod *UpdateMethod `json:"replication_method,omitempty"`
// The list of schemas to sync from. Defaults to user. Case sensitive.
Schemas []string `json:"schemas,omitempty"`
// The encryption method which is used when communicating with the database.
@@ -548,3 +720,73 @@ type SourceMssqlUpdate struct {
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (o *SourceMssqlUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceMssqlUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceMssqlUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceMssqlUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceMssqlUpdate) GetPort() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Port
+}
+
+func (o *SourceMssqlUpdate) GetReplicationMethod() *UpdateMethod {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationMethod
+}
+
+func (o *SourceMssqlUpdate) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourceMssqlUpdate) GetSslMethod() *SourceMssqlUpdateSSLMethod {
+ if o == nil {
+ return nil
+ }
+ return o.SslMethod
+}
+
+func (o *SourceMssqlUpdate) GetTunnelMethod() *SourceMssqlUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceMssqlUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemyhours.go b/internal/sdk/pkg/models/shared/sourcemyhours.go
old mode 100755
new mode 100644
index 928e6736a..903f4b22c
--- a/internal/sdk/pkg/models/shared/sourcemyhours.go
+++ b/internal/sdk/pkg/models/shared/sourcemyhours.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMyHoursMyHours string
+type MyHours string
const (
- SourceMyHoursMyHoursMyHours SourceMyHoursMyHours = "my-hours"
+ MyHoursMyHours MyHours = "my-hours"
)
-func (e SourceMyHoursMyHours) ToPointer() *SourceMyHoursMyHours {
+func (e MyHours) ToPointer() *MyHours {
return &e
}
-func (e *SourceMyHoursMyHours) UnmarshalJSON(data []byte) error {
+func (e *MyHours) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "my-hours":
- *e = SourceMyHoursMyHours(v)
+ *e = MyHours(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMyHoursMyHours: %v", v)
+ return fmt.Errorf("invalid value for MyHours: %v", v)
}
}
@@ -35,10 +36,53 @@ type SourceMyHours struct {
// Your My Hours username
Email string `json:"email"`
// Pagination size used for retrieving logs in days
- LogsBatchSize *int64 `json:"logs_batch_size,omitempty"`
+ LogsBatchSize *int64 `default:"30" json:"logs_batch_size"`
// The password associated to the username
- Password string `json:"password"`
- SourceType SourceMyHoursMyHours `json:"sourceType"`
+ Password string `json:"password"`
+ sourceType MyHours `const:"my-hours" json:"sourceType"`
// Start date for collecting time logs
StartDate string `json:"start_date"`
}
+
+func (s SourceMyHours) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMyHours) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMyHours) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+func (o *SourceMyHours) GetLogsBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LogsBatchSize
+}
+
+func (o *SourceMyHours) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceMyHours) GetSourceType() MyHours {
+ return MyHoursMyHours
+}
+
+func (o *SourceMyHours) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemyhourscreaterequest.go b/internal/sdk/pkg/models/shared/sourcemyhourscreaterequest.go
old mode 100755
new mode 100644
index 6acb52958..e701e28c8
--- a/internal/sdk/pkg/models/shared/sourcemyhourscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemyhourscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMyHoursCreateRequest struct {
Configuration SourceMyHours `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMyHoursCreateRequest) GetConfiguration() SourceMyHours {
+ if o == nil {
+ return SourceMyHours{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMyHoursCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMyHoursCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMyHoursCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMyHoursCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemyhoursputrequest.go b/internal/sdk/pkg/models/shared/sourcemyhoursputrequest.go
old mode 100755
new mode 100644
index 2650db500..64bf5bc59
--- a/internal/sdk/pkg/models/shared/sourcemyhoursputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemyhoursputrequest.go
@@ -7,3 +7,24 @@ type SourceMyHoursPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMyHoursPutRequest) GetConfiguration() SourceMyHoursUpdate {
+ if o == nil {
+ return SourceMyHoursUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMyHoursPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMyHoursPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemyhoursupdate.go b/internal/sdk/pkg/models/shared/sourcemyhoursupdate.go
old mode 100755
new mode 100644
index ca86742ad..22167381d
--- a/internal/sdk/pkg/models/shared/sourcemyhoursupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemyhoursupdate.go
@@ -2,13 +2,56 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceMyHoursUpdate struct {
// Your My Hours username
Email string `json:"email"`
// Pagination size used for retrieving logs in days
- LogsBatchSize *int64 `json:"logs_batch_size,omitempty"`
+ LogsBatchSize *int64 `default:"30" json:"logs_batch_size"`
// The password associated to the username
Password string `json:"password"`
// Start date for collecting time logs
StartDate string `json:"start_date"`
}
+
+func (s SourceMyHoursUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMyHoursUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMyHoursUpdate) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+func (o *SourceMyHoursUpdate) GetLogsBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LogsBatchSize
+}
+
+func (o *SourceMyHoursUpdate) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceMyHoursUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemysql.go b/internal/sdk/pkg/models/shared/sourcemysql.go
old mode 100755
new mode 100644
index 43b166b83..10da141c2
--- a/internal/sdk/pkg/models/shared/sourcemysql.go
+++ b/internal/sdk/pkg/models/shared/sourcemysql.go
@@ -3,124 +3,163 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethod string
+type SourceMysqlSchemasMethod string
const (
- SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethod = "STANDARD"
+ SourceMysqlSchemasMethodStandard SourceMysqlSchemasMethod = "STANDARD"
)
-func (e SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethod {
+func (e SourceMysqlSchemasMethod) ToPointer() *SourceMysqlSchemasMethod {
return &e
}
-func (e *SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "STANDARD":
- *e = SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethod(v)
+ *e = SourceMysqlSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlSchemasMethod: %v", v)
}
}
-// SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
-type SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor struct {
- Method SourceMysqlUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"`
+// SourceMysqlScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
+type SourceMysqlScanChangesWithUserDefinedCursor struct {
+ method SourceMysqlSchemasMethod `const:"STANDARD" json:"method"`
}
-type SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethod string
+func (s SourceMysqlScanChangesWithUserDefinedCursor) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlScanChangesWithUserDefinedCursor) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlScanChangesWithUserDefinedCursor) GetMethod() SourceMysqlSchemasMethod {
+ return SourceMysqlSchemasMethodStandard
+}
+
+type SourceMysqlMethod string
const (
- SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethodCdc SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethod = "CDC"
+ SourceMysqlMethodCdc SourceMysqlMethod = "CDC"
)
-func (e SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethod) ToPointer() *SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethod {
+func (e SourceMysqlMethod) ToPointer() *SourceMysqlMethod {
return &e
}
-func (e *SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CDC":
- *e = SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethod(v)
+ *e = SourceMysqlMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlMethod: %v", v)
}
}
-// SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.
-type SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC struct {
+// SourceMysqlReadChangesUsingBinaryLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.
+type SourceMysqlReadChangesUsingBinaryLogCDC struct {
// The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
- InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"`
- Method SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDCMethod `json:"method"`
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
+ method SourceMysqlMethod `const:"CDC" json:"method"`
// Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.
ServerTimeZone *string `json:"server_time_zone,omitempty"`
}
+func (s SourceMysqlReadChangesUsingBinaryLogCDC) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlReadChangesUsingBinaryLogCDC) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlReadChangesUsingBinaryLogCDC) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InitialWaitingSeconds
+}
+
+func (o *SourceMysqlReadChangesUsingBinaryLogCDC) GetMethod() SourceMysqlMethod {
+ return SourceMysqlMethodCdc
+}
+
+func (o *SourceMysqlReadChangesUsingBinaryLogCDC) GetServerTimeZone() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ServerTimeZone
+}
+
type SourceMysqlUpdateMethodType string
const (
- SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC SourceMysqlUpdateMethodType = "source-mysql_Update Method_Read Changes using Binary Log (CDC)"
- SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodScanChangesWithUserDefinedCursor SourceMysqlUpdateMethodType = "source-mysql_Update Method_Scan Changes with User Defined Cursor"
+ SourceMysqlUpdateMethodTypeSourceMysqlReadChangesUsingBinaryLogCDC SourceMysqlUpdateMethodType = "source-mysql_Read Changes using Binary Log (CDC)"
+ SourceMysqlUpdateMethodTypeSourceMysqlScanChangesWithUserDefinedCursor SourceMysqlUpdateMethodType = "source-mysql_Scan Changes with User Defined Cursor"
)
type SourceMysqlUpdateMethod struct {
- SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC *SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC
- SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor *SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor
+ SourceMysqlReadChangesUsingBinaryLogCDC *SourceMysqlReadChangesUsingBinaryLogCDC
+ SourceMysqlScanChangesWithUserDefinedCursor *SourceMysqlScanChangesWithUserDefinedCursor
Type SourceMysqlUpdateMethodType
}
-func CreateSourceMysqlUpdateMethodSourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC(sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC) SourceMysqlUpdateMethod {
- typ := SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC
+func CreateSourceMysqlUpdateMethodSourceMysqlReadChangesUsingBinaryLogCDC(sourceMysqlReadChangesUsingBinaryLogCDC SourceMysqlReadChangesUsingBinaryLogCDC) SourceMysqlUpdateMethod {
+ typ := SourceMysqlUpdateMethodTypeSourceMysqlReadChangesUsingBinaryLogCDC
return SourceMysqlUpdateMethod{
- SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC: &sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC,
- Type: typ,
+ SourceMysqlReadChangesUsingBinaryLogCDC: &sourceMysqlReadChangesUsingBinaryLogCDC,
+ Type: typ,
}
}
-func CreateSourceMysqlUpdateMethodSourceMysqlUpdateMethodScanChangesWithUserDefinedCursor(sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor) SourceMysqlUpdateMethod {
- typ := SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodScanChangesWithUserDefinedCursor
+func CreateSourceMysqlUpdateMethodSourceMysqlScanChangesWithUserDefinedCursor(sourceMysqlScanChangesWithUserDefinedCursor SourceMysqlScanChangesWithUserDefinedCursor) SourceMysqlUpdateMethod {
+ typ := SourceMysqlUpdateMethodTypeSourceMysqlScanChangesWithUserDefinedCursor
return SourceMysqlUpdateMethod{
- SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor: &sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor,
+ SourceMysqlScanChangesWithUserDefinedCursor: &sourceMysqlScanChangesWithUserDefinedCursor,
Type: typ,
}
}
func (u *SourceMysqlUpdateMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor); err == nil {
- u.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor = sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor
- u.Type = SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodScanChangesWithUserDefinedCursor
+ sourceMysqlScanChangesWithUserDefinedCursor := new(SourceMysqlScanChangesWithUserDefinedCursor)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlScanChangesWithUserDefinedCursor, "", true, true); err == nil {
+ u.SourceMysqlScanChangesWithUserDefinedCursor = sourceMysqlScanChangesWithUserDefinedCursor
+ u.Type = SourceMysqlUpdateMethodTypeSourceMysqlScanChangesWithUserDefinedCursor
return nil
}
- sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC := new(SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC); err == nil {
- u.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC = sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC
- u.Type = SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC
+ sourceMysqlReadChangesUsingBinaryLogCDC := new(SourceMysqlReadChangesUsingBinaryLogCDC)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlReadChangesUsingBinaryLogCDC, "", true, true); err == nil {
+ u.SourceMysqlReadChangesUsingBinaryLogCDC = sourceMysqlReadChangesUsingBinaryLogCDC
+ u.Type = SourceMysqlUpdateMethodTypeSourceMysqlReadChangesUsingBinaryLogCDC
return nil
}
@@ -128,15 +167,15 @@ func (u *SourceMysqlUpdateMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMysqlUpdateMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor != nil {
- return json.Marshal(u.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor)
+ if u.SourceMysqlReadChangesUsingBinaryLogCDC != nil {
+ return utils.MarshalJSON(u.SourceMysqlReadChangesUsingBinaryLogCDC, "", true)
}
- if u.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC != nil {
- return json.Marshal(u.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC)
+ if u.SourceMysqlScanChangesWithUserDefinedCursor != nil {
+ return utils.MarshalJSON(u.SourceMysqlScanChangesWithUserDefinedCursor, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceMysqlMysql string
@@ -163,32 +202,32 @@ func (e *SourceMysqlMysql) UnmarshalJSON(data []byte) error {
}
}
-type SourceMysqlSSLModesVerifyIdentityMode string
+type SourceMysqlSchemasSSLModeSSLModesMode string
const (
- SourceMysqlSSLModesVerifyIdentityModeVerifyIdentity SourceMysqlSSLModesVerifyIdentityMode = "verify_identity"
+ SourceMysqlSchemasSSLModeSSLModesModeVerifyIdentity SourceMysqlSchemasSSLModeSSLModesMode = "verify_identity"
)
-func (e SourceMysqlSSLModesVerifyIdentityMode) ToPointer() *SourceMysqlSSLModesVerifyIdentityMode {
+func (e SourceMysqlSchemasSSLModeSSLModesMode) ToPointer() *SourceMysqlSchemasSSLModeSSLModesMode {
return &e
}
-func (e *SourceMysqlSSLModesVerifyIdentityMode) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlSchemasSSLModeSSLModesMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify_identity":
- *e = SourceMysqlSSLModesVerifyIdentityMode(v)
+ *e = SourceMysqlSchemasSSLModeSSLModesMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlSSLModesVerifyIdentityMode: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlSchemasSSLModeSSLModesMode: %v", v)
}
}
-// SourceMysqlSSLModesVerifyIdentity - Always connect with SSL. Verify both CA and Hostname.
-type SourceMysqlSSLModesVerifyIdentity struct {
+// SourceMysqlVerifyIdentity - Always connect with SSL. Verify both CA and Hostname.
+type SourceMysqlVerifyIdentity struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
@@ -197,35 +236,78 @@ type SourceMysqlSSLModesVerifyIdentity struct {
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourceMysqlSSLModesVerifyIdentityMode `json:"mode"`
+ mode SourceMysqlSchemasSSLModeSSLModesMode `const:"verify_identity" json:"mode"`
+}
+
+func (s SourceMysqlVerifyIdentity) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlVerifyIdentity) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
}
-type SourceMysqlSSLModesVerifyCAMode string
+func (o *SourceMysqlVerifyIdentity) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
+}
+
+func (o *SourceMysqlVerifyIdentity) GetClientCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientCertificate
+}
+
+func (o *SourceMysqlVerifyIdentity) GetClientKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKey
+}
+
+func (o *SourceMysqlVerifyIdentity) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *SourceMysqlVerifyIdentity) GetMode() SourceMysqlSchemasSSLModeSSLModesMode {
+ return SourceMysqlSchemasSSLModeSSLModesModeVerifyIdentity
+}
+
+type SourceMysqlSchemasSslModeMode string
const (
- SourceMysqlSSLModesVerifyCAModeVerifyCa SourceMysqlSSLModesVerifyCAMode = "verify_ca"
+ SourceMysqlSchemasSslModeModeVerifyCa SourceMysqlSchemasSslModeMode = "verify_ca"
)
-func (e SourceMysqlSSLModesVerifyCAMode) ToPointer() *SourceMysqlSSLModesVerifyCAMode {
+func (e SourceMysqlSchemasSslModeMode) ToPointer() *SourceMysqlSchemasSslModeMode {
return &e
}
-func (e *SourceMysqlSSLModesVerifyCAMode) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlSchemasSslModeMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify_ca":
- *e = SourceMysqlSSLModesVerifyCAMode(v)
+ *e = SourceMysqlSchemasSslModeMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlSSLModesVerifyCAMode: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlSchemasSslModeMode: %v", v)
}
}
-// SourceMysqlSSLModesVerifyCA - Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.
-type SourceMysqlSSLModesVerifyCA struct {
+// SourceMysqlVerifyCA - Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.
+type SourceMysqlVerifyCA struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
@@ -233,158 +315,222 @@ type SourceMysqlSSLModesVerifyCA struct {
// Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourceMysqlSSLModesVerifyCAMode `json:"mode"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourceMysqlSchemasSslModeMode `const:"verify_ca" json:"mode"`
+}
+
+func (s SourceMysqlVerifyCA) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlVerifyCA) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
}
-type SourceMysqlSSLModesRequiredMode string
+func (o *SourceMysqlVerifyCA) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
+}
+
+func (o *SourceMysqlVerifyCA) GetClientCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientCertificate
+}
+
+func (o *SourceMysqlVerifyCA) GetClientKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKey
+}
+
+func (o *SourceMysqlVerifyCA) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *SourceMysqlVerifyCA) GetMode() SourceMysqlSchemasSslModeMode {
+ return SourceMysqlSchemasSslModeModeVerifyCa
+}
+
+type SourceMysqlSchemasMode string
const (
- SourceMysqlSSLModesRequiredModeRequired SourceMysqlSSLModesRequiredMode = "required"
+ SourceMysqlSchemasModeRequired SourceMysqlSchemasMode = "required"
)
-func (e SourceMysqlSSLModesRequiredMode) ToPointer() *SourceMysqlSSLModesRequiredMode {
+func (e SourceMysqlSchemasMode) ToPointer() *SourceMysqlSchemasMode {
return &e
}
-func (e *SourceMysqlSSLModesRequiredMode) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "required":
- *e = SourceMysqlSSLModesRequiredMode(v)
+ *e = SourceMysqlSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlSSLModesRequiredMode: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlSchemasMode: %v", v)
}
}
-// SourceMysqlSSLModesRequired - Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.
-type SourceMysqlSSLModesRequired struct {
- Mode SourceMysqlSSLModesRequiredMode `json:"mode"`
+// SourceMysqlRequired - Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.
+type SourceMysqlRequired struct {
+ mode SourceMysqlSchemasMode `const:"required" json:"mode"`
}
-type SourceMysqlSSLModesPreferredMode string
+func (s SourceMysqlRequired) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlRequired) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlRequired) GetMode() SourceMysqlSchemasMode {
+ return SourceMysqlSchemasModeRequired
+}
+
+type SourceMysqlMode string
const (
- SourceMysqlSSLModesPreferredModePreferred SourceMysqlSSLModesPreferredMode = "preferred"
+ SourceMysqlModePreferred SourceMysqlMode = "preferred"
)
-func (e SourceMysqlSSLModesPreferredMode) ToPointer() *SourceMysqlSSLModesPreferredMode {
+func (e SourceMysqlMode) ToPointer() *SourceMysqlMode {
return &e
}
-func (e *SourceMysqlSSLModesPreferredMode) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "preferred":
- *e = SourceMysqlSSLModesPreferredMode(v)
+ *e = SourceMysqlMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlSSLModesPreferredMode: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlMode: %v", v)
+ }
+}
+
+// SourceMysqlPreferred - Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.
+type SourceMysqlPreferred struct {
+ mode SourceMysqlMode `const:"preferred" json:"mode"`
+}
+
+func (s SourceMysqlPreferred) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlPreferred) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceMysqlSSLModesPreferred - Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.
-type SourceMysqlSSLModesPreferred struct {
- Mode SourceMysqlSSLModesPreferredMode `json:"mode"`
+func (o *SourceMysqlPreferred) GetMode() SourceMysqlMode {
+ return SourceMysqlModePreferred
}
type SourceMysqlSSLModesType string
const (
- SourceMysqlSSLModesTypeSourceMysqlSSLModesPreferred SourceMysqlSSLModesType = "source-mysql_SSL modes_preferred"
- SourceMysqlSSLModesTypeSourceMysqlSSLModesRequired SourceMysqlSSLModesType = "source-mysql_SSL modes_required"
- SourceMysqlSSLModesTypeSourceMysqlSSLModesVerifyCA SourceMysqlSSLModesType = "source-mysql_SSL modes_Verify CA"
- SourceMysqlSSLModesTypeSourceMysqlSSLModesVerifyIdentity SourceMysqlSSLModesType = "source-mysql_SSL modes_Verify Identity"
+ SourceMysqlSSLModesTypeSourceMysqlPreferred SourceMysqlSSLModesType = "source-mysql_preferred"
+ SourceMysqlSSLModesTypeSourceMysqlRequired SourceMysqlSSLModesType = "source-mysql_required"
+ SourceMysqlSSLModesTypeSourceMysqlVerifyCA SourceMysqlSSLModesType = "source-mysql_Verify CA"
+ SourceMysqlSSLModesTypeSourceMysqlVerifyIdentity SourceMysqlSSLModesType = "source-mysql_Verify Identity"
)
type SourceMysqlSSLModes struct {
- SourceMysqlSSLModesPreferred *SourceMysqlSSLModesPreferred
- SourceMysqlSSLModesRequired *SourceMysqlSSLModesRequired
- SourceMysqlSSLModesVerifyCA *SourceMysqlSSLModesVerifyCA
- SourceMysqlSSLModesVerifyIdentity *SourceMysqlSSLModesVerifyIdentity
+ SourceMysqlPreferred *SourceMysqlPreferred
+ SourceMysqlRequired *SourceMysqlRequired
+ SourceMysqlVerifyCA *SourceMysqlVerifyCA
+ SourceMysqlVerifyIdentity *SourceMysqlVerifyIdentity
Type SourceMysqlSSLModesType
}
-func CreateSourceMysqlSSLModesSourceMysqlSSLModesPreferred(sourceMysqlSSLModesPreferred SourceMysqlSSLModesPreferred) SourceMysqlSSLModes {
- typ := SourceMysqlSSLModesTypeSourceMysqlSSLModesPreferred
+func CreateSourceMysqlSSLModesSourceMysqlPreferred(sourceMysqlPreferred SourceMysqlPreferred) SourceMysqlSSLModes {
+ typ := SourceMysqlSSLModesTypeSourceMysqlPreferred
return SourceMysqlSSLModes{
- SourceMysqlSSLModesPreferred: &sourceMysqlSSLModesPreferred,
- Type: typ,
+ SourceMysqlPreferred: &sourceMysqlPreferred,
+ Type: typ,
}
}
-func CreateSourceMysqlSSLModesSourceMysqlSSLModesRequired(sourceMysqlSSLModesRequired SourceMysqlSSLModesRequired) SourceMysqlSSLModes {
- typ := SourceMysqlSSLModesTypeSourceMysqlSSLModesRequired
+func CreateSourceMysqlSSLModesSourceMysqlRequired(sourceMysqlRequired SourceMysqlRequired) SourceMysqlSSLModes {
+ typ := SourceMysqlSSLModesTypeSourceMysqlRequired
return SourceMysqlSSLModes{
- SourceMysqlSSLModesRequired: &sourceMysqlSSLModesRequired,
- Type: typ,
+ SourceMysqlRequired: &sourceMysqlRequired,
+ Type: typ,
}
}
-func CreateSourceMysqlSSLModesSourceMysqlSSLModesVerifyCA(sourceMysqlSSLModesVerifyCA SourceMysqlSSLModesVerifyCA) SourceMysqlSSLModes {
- typ := SourceMysqlSSLModesTypeSourceMysqlSSLModesVerifyCA
+func CreateSourceMysqlSSLModesSourceMysqlVerifyCA(sourceMysqlVerifyCA SourceMysqlVerifyCA) SourceMysqlSSLModes {
+ typ := SourceMysqlSSLModesTypeSourceMysqlVerifyCA
return SourceMysqlSSLModes{
- SourceMysqlSSLModesVerifyCA: &sourceMysqlSSLModesVerifyCA,
- Type: typ,
+ SourceMysqlVerifyCA: &sourceMysqlVerifyCA,
+ Type: typ,
}
}
-func CreateSourceMysqlSSLModesSourceMysqlSSLModesVerifyIdentity(sourceMysqlSSLModesVerifyIdentity SourceMysqlSSLModesVerifyIdentity) SourceMysqlSSLModes {
- typ := SourceMysqlSSLModesTypeSourceMysqlSSLModesVerifyIdentity
+func CreateSourceMysqlSSLModesSourceMysqlVerifyIdentity(sourceMysqlVerifyIdentity SourceMysqlVerifyIdentity) SourceMysqlSSLModes {
+ typ := SourceMysqlSSLModesTypeSourceMysqlVerifyIdentity
return SourceMysqlSSLModes{
- SourceMysqlSSLModesVerifyIdentity: &sourceMysqlSSLModesVerifyIdentity,
- Type: typ,
+ SourceMysqlVerifyIdentity: &sourceMysqlVerifyIdentity,
+ Type: typ,
}
}
func (u *SourceMysqlSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- sourceMysqlSSLModesPreferred := new(SourceMysqlSSLModesPreferred)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlSSLModesPreferred); err == nil {
- u.SourceMysqlSSLModesPreferred = sourceMysqlSSLModesPreferred
- u.Type = SourceMysqlSSLModesTypeSourceMysqlSSLModesPreferred
+ sourceMysqlPreferred := new(SourceMysqlPreferred)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlPreferred, "", true, true); err == nil {
+ u.SourceMysqlPreferred = sourceMysqlPreferred
+ u.Type = SourceMysqlSSLModesTypeSourceMysqlPreferred
return nil
}
- sourceMysqlSSLModesRequired := new(SourceMysqlSSLModesRequired)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlSSLModesRequired); err == nil {
- u.SourceMysqlSSLModesRequired = sourceMysqlSSLModesRequired
- u.Type = SourceMysqlSSLModesTypeSourceMysqlSSLModesRequired
+ sourceMysqlRequired := new(SourceMysqlRequired)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlRequired, "", true, true); err == nil {
+ u.SourceMysqlRequired = sourceMysqlRequired
+ u.Type = SourceMysqlSSLModesTypeSourceMysqlRequired
return nil
}
- sourceMysqlSSLModesVerifyCA := new(SourceMysqlSSLModesVerifyCA)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlSSLModesVerifyCA); err == nil {
- u.SourceMysqlSSLModesVerifyCA = sourceMysqlSSLModesVerifyCA
- u.Type = SourceMysqlSSLModesTypeSourceMysqlSSLModesVerifyCA
+ sourceMysqlVerifyCA := new(SourceMysqlVerifyCA)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlVerifyCA, "", true, true); err == nil {
+ u.SourceMysqlVerifyCA = sourceMysqlVerifyCA
+ u.Type = SourceMysqlSSLModesTypeSourceMysqlVerifyCA
return nil
}
- sourceMysqlSSLModesVerifyIdentity := new(SourceMysqlSSLModesVerifyIdentity)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlSSLModesVerifyIdentity); err == nil {
- u.SourceMysqlSSLModesVerifyIdentity = sourceMysqlSSLModesVerifyIdentity
- u.Type = SourceMysqlSSLModesTypeSourceMysqlSSLModesVerifyIdentity
+ sourceMysqlVerifyIdentity := new(SourceMysqlVerifyIdentity)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlVerifyIdentity, "", true, true); err == nil {
+ u.SourceMysqlVerifyIdentity = sourceMysqlVerifyIdentity
+ u.Type = SourceMysqlSSLModesTypeSourceMysqlVerifyIdentity
return nil
}
@@ -392,204 +538,298 @@ func (u *SourceMysqlSSLModes) UnmarshalJSON(data []byte) error {
}
func (u SourceMysqlSSLModes) MarshalJSON() ([]byte, error) {
- if u.SourceMysqlSSLModesPreferred != nil {
- return json.Marshal(u.SourceMysqlSSLModesPreferred)
+ if u.SourceMysqlPreferred != nil {
+ return utils.MarshalJSON(u.SourceMysqlPreferred, "", true)
}
- if u.SourceMysqlSSLModesRequired != nil {
- return json.Marshal(u.SourceMysqlSSLModesRequired)
+ if u.SourceMysqlRequired != nil {
+ return utils.MarshalJSON(u.SourceMysqlRequired, "", true)
}
- if u.SourceMysqlSSLModesVerifyCA != nil {
- return json.Marshal(u.SourceMysqlSSLModesVerifyCA)
+ if u.SourceMysqlVerifyCA != nil {
+ return utils.MarshalJSON(u.SourceMysqlVerifyCA, "", true)
}
- if u.SourceMysqlSSLModesVerifyIdentity != nil {
- return json.Marshal(u.SourceMysqlSSLModesVerifyIdentity)
+ if u.SourceMysqlVerifyIdentity != nil {
+ return utils.MarshalJSON(u.SourceMysqlVerifyIdentity, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceMysqlSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceMysqlSchemasTunnelMethodTunnelMethod string
const (
- SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceMysqlSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceMysqlSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceMysqlSchemasTunnelMethodTunnelMethod) ToPointer() *SourceMysqlSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceMysqlSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceMysqlSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMysqlSSHTunnelMethodPasswordAuthentication struct {
+// SourceMysqlPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMysqlPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceMysqlSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMysqlSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceMysqlPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceMysqlPasswordAuthentication) GetTunnelMethod() SourceMysqlSchemasTunnelMethodTunnelMethod {
+ return SourceMysqlSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceMysqlPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceMysqlPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceMysqlPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceMysqlSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceMysqlSchemasTunnelMethod string
const (
- SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceMysqlSchemasTunnelMethodSSHKeyAuth SourceMysqlSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceMysqlSchemasTunnelMethod) ToPointer() *SourceMysqlSchemasTunnelMethod {
return &e
}
-func (e *SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceMysqlSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlSchemasTunnelMethod: %v", v)
}
}
-// SourceMysqlSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMysqlSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceMysqlSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMysqlSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceMysqlSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMysqlSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceMysqlSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceMysqlSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceMysqlSSHKeyAuthentication) GetTunnelMethod() SourceMysqlSchemasTunnelMethod {
+ return SourceMysqlSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceMysqlSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceMysqlSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceMysqlTunnelMethod - No ssh tunnel needed to connect to database
+type SourceMysqlTunnelMethod string
const (
- SourceMysqlSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceMysqlTunnelMethodNoTunnel SourceMysqlTunnelMethod = "NO_TUNNEL"
)
-func (e SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceMysqlTunnelMethod) ToPointer() *SourceMysqlTunnelMethod {
return &e
}
-func (e *SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceMysqlTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlTunnelMethod: %v", v)
}
}
-// SourceMysqlSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMysqlSSHTunnelMethodNoTunnel struct {
+// SourceMysqlNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMysqlNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceMysqlSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMysqlTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceMysqlNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlNoTunnel) GetTunnelMethod() SourceMysqlTunnelMethod {
+ return SourceMysqlTunnelMethodNoTunnel
}
type SourceMysqlSSHTunnelMethodType string
const (
- SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHTunnelMethodNoTunnel SourceMysqlSSHTunnelMethodType = "source-mysql_SSH Tunnel Method_No Tunnel"
- SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHTunnelMethodSSHKeyAuthentication SourceMysqlSSHTunnelMethodType = "source-mysql_SSH Tunnel Method_SSH Key Authentication"
- SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHTunnelMethodPasswordAuthentication SourceMysqlSSHTunnelMethodType = "source-mysql_SSH Tunnel Method_Password Authentication"
+ SourceMysqlSSHTunnelMethodTypeSourceMysqlNoTunnel SourceMysqlSSHTunnelMethodType = "source-mysql_No Tunnel"
+ SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHKeyAuthentication SourceMysqlSSHTunnelMethodType = "source-mysql_SSH Key Authentication"
+ SourceMysqlSSHTunnelMethodTypeSourceMysqlPasswordAuthentication SourceMysqlSSHTunnelMethodType = "source-mysql_Password Authentication"
)
type SourceMysqlSSHTunnelMethod struct {
- SourceMysqlSSHTunnelMethodNoTunnel *SourceMysqlSSHTunnelMethodNoTunnel
- SourceMysqlSSHTunnelMethodSSHKeyAuthentication *SourceMysqlSSHTunnelMethodSSHKeyAuthentication
- SourceMysqlSSHTunnelMethodPasswordAuthentication *SourceMysqlSSHTunnelMethodPasswordAuthentication
+ SourceMysqlNoTunnel *SourceMysqlNoTunnel
+ SourceMysqlSSHKeyAuthentication *SourceMysqlSSHKeyAuthentication
+ SourceMysqlPasswordAuthentication *SourceMysqlPasswordAuthentication
Type SourceMysqlSSHTunnelMethodType
}
-func CreateSourceMysqlSSHTunnelMethodSourceMysqlSSHTunnelMethodNoTunnel(sourceMysqlSSHTunnelMethodNoTunnel SourceMysqlSSHTunnelMethodNoTunnel) SourceMysqlSSHTunnelMethod {
- typ := SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHTunnelMethodNoTunnel
+func CreateSourceMysqlSSHTunnelMethodSourceMysqlNoTunnel(sourceMysqlNoTunnel SourceMysqlNoTunnel) SourceMysqlSSHTunnelMethod {
+ typ := SourceMysqlSSHTunnelMethodTypeSourceMysqlNoTunnel
return SourceMysqlSSHTunnelMethod{
- SourceMysqlSSHTunnelMethodNoTunnel: &sourceMysqlSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceMysqlNoTunnel: &sourceMysqlNoTunnel,
+ Type: typ,
}
}
-func CreateSourceMysqlSSHTunnelMethodSourceMysqlSSHTunnelMethodSSHKeyAuthentication(sourceMysqlSSHTunnelMethodSSHKeyAuthentication SourceMysqlSSHTunnelMethodSSHKeyAuthentication) SourceMysqlSSHTunnelMethod {
- typ := SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceMysqlSSHTunnelMethodSourceMysqlSSHKeyAuthentication(sourceMysqlSSHKeyAuthentication SourceMysqlSSHKeyAuthentication) SourceMysqlSSHTunnelMethod {
+ typ := SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHKeyAuthentication
return SourceMysqlSSHTunnelMethod{
- SourceMysqlSSHTunnelMethodSSHKeyAuthentication: &sourceMysqlSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourceMysqlSSHKeyAuthentication: &sourceMysqlSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourceMysqlSSHTunnelMethodSourceMysqlSSHTunnelMethodPasswordAuthentication(sourceMysqlSSHTunnelMethodPasswordAuthentication SourceMysqlSSHTunnelMethodPasswordAuthentication) SourceMysqlSSHTunnelMethod {
- typ := SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHTunnelMethodPasswordAuthentication
+func CreateSourceMysqlSSHTunnelMethodSourceMysqlPasswordAuthentication(sourceMysqlPasswordAuthentication SourceMysqlPasswordAuthentication) SourceMysqlSSHTunnelMethod {
+ typ := SourceMysqlSSHTunnelMethodTypeSourceMysqlPasswordAuthentication
return SourceMysqlSSHTunnelMethod{
- SourceMysqlSSHTunnelMethodPasswordAuthentication: &sourceMysqlSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ SourceMysqlPasswordAuthentication: &sourceMysqlPasswordAuthentication,
+ Type: typ,
}
}
func (u *SourceMysqlSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- sourceMysqlSSHTunnelMethodNoTunnel := new(SourceMysqlSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlSSHTunnelMethodNoTunnel); err == nil {
- u.SourceMysqlSSHTunnelMethodNoTunnel = sourceMysqlSSHTunnelMethodNoTunnel
- u.Type = SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHTunnelMethodNoTunnel
+ sourceMysqlNoTunnel := new(SourceMysqlNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlNoTunnel, "", true, true); err == nil {
+ u.SourceMysqlNoTunnel = sourceMysqlNoTunnel
+ u.Type = SourceMysqlSSHTunnelMethodTypeSourceMysqlNoTunnel
return nil
}
- sourceMysqlSSHTunnelMethodSSHKeyAuthentication := new(SourceMysqlSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceMysqlSSHTunnelMethodSSHKeyAuthentication = sourceMysqlSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHTunnelMethodSSHKeyAuthentication
+ sourceMysqlSSHKeyAuthentication := new(SourceMysqlSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceMysqlSSHKeyAuthentication = sourceMysqlSSHKeyAuthentication
+ u.Type = SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHKeyAuthentication
return nil
}
- sourceMysqlSSHTunnelMethodPasswordAuthentication := new(SourceMysqlSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceMysqlSSHTunnelMethodPasswordAuthentication = sourceMysqlSSHTunnelMethodPasswordAuthentication
- u.Type = SourceMysqlSSHTunnelMethodTypeSourceMysqlSSHTunnelMethodPasswordAuthentication
+ sourceMysqlPasswordAuthentication := new(SourceMysqlPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlPasswordAuthentication, "", true, true); err == nil {
+ u.SourceMysqlPasswordAuthentication = sourceMysqlPasswordAuthentication
+ u.Type = SourceMysqlSSHTunnelMethodTypeSourceMysqlPasswordAuthentication
return nil
}
@@ -597,19 +837,19 @@ func (u *SourceMysqlSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMysqlSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMysqlSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceMysqlSSHTunnelMethodNoTunnel)
+ if u.SourceMysqlNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceMysqlNoTunnel, "", true)
}
- if u.SourceMysqlSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceMysqlSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceMysqlSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceMysqlSSHKeyAuthentication, "", true)
}
- if u.SourceMysqlSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceMysqlSSHTunnelMethodPasswordAuthentication)
+ if u.SourceMysqlPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceMysqlPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceMysql struct {
@@ -622,10 +862,10 @@ type SourceMysql struct {
// The password associated with the username.
Password *string `json:"password,omitempty"`
// The port to connect to.
- Port int64 `json:"port"`
+ Port *int64 `default:"3306" json:"port"`
// Configures how data is extracted from the database.
ReplicationMethod SourceMysqlUpdateMethod `json:"replication_method"`
- SourceType SourceMysqlMysql `json:"sourceType"`
+ sourceType SourceMysqlMysql `const:"mysql" json:"sourceType"`
// SSL connection modes. Read more in the docs.
SslMode *SourceMysqlSSLModes `json:"ssl_mode,omitempty"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
@@ -633,3 +873,81 @@ type SourceMysql struct {
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (s SourceMysql) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysql) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysql) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceMysql) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceMysql) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceMysql) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceMysql) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceMysql) GetReplicationMethod() SourceMysqlUpdateMethod {
+ if o == nil {
+ return SourceMysqlUpdateMethod{}
+ }
+ return o.ReplicationMethod
+}
+
+func (o *SourceMysql) GetSourceType() SourceMysqlMysql {
+ return SourceMysqlMysqlMysql
+}
+
+func (o *SourceMysql) GetSslMode() *SourceMysqlSSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *SourceMysql) GetTunnelMethod() *SourceMysqlSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceMysql) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemysqlcreaterequest.go b/internal/sdk/pkg/models/shared/sourcemysqlcreaterequest.go
old mode 100755
new mode 100644
index f06779472..4aa5c910d
--- a/internal/sdk/pkg/models/shared/sourcemysqlcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemysqlcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceMysqlCreateRequest struct {
Configuration SourceMysql `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMysqlCreateRequest) GetConfiguration() SourceMysql {
+ if o == nil {
+ return SourceMysql{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMysqlCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceMysqlCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMysqlCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceMysqlCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemysqlputrequest.go b/internal/sdk/pkg/models/shared/sourcemysqlputrequest.go
old mode 100755
new mode 100644
index 37c9ee78c..58b638944
--- a/internal/sdk/pkg/models/shared/sourcemysqlputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcemysqlputrequest.go
@@ -7,3 +7,24 @@ type SourceMysqlPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceMysqlPutRequest) GetConfiguration() SourceMysqlUpdate {
+ if o == nil {
+ return SourceMysqlUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceMysqlPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceMysqlPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcemysqlupdate.go b/internal/sdk/pkg/models/shared/sourcemysqlupdate.go
old mode 100755
new mode 100644
index 4a2b8d84b..19c2d0f5c
--- a/internal/sdk/pkg/models/shared/sourcemysqlupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcemysqlupdate.go
@@ -3,124 +3,163 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod string
+type SourceMysqlUpdateSchemasMethod string
const (
- SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod = "STANDARD"
+ SourceMysqlUpdateSchemasMethodStandard SourceMysqlUpdateSchemasMethod = "STANDARD"
)
-func (e SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod {
+func (e SourceMysqlUpdateSchemasMethod) ToPointer() *SourceMysqlUpdateSchemasMethod {
return &e
}
-func (e *SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlUpdateSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "STANDARD":
- *e = SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(v)
+ *e = SourceMysqlUpdateSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlUpdateSchemasMethod: %v", v)
}
}
-// SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
-type SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor struct {
- Method SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"`
+// SourceMysqlUpdateScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
+type SourceMysqlUpdateScanChangesWithUserDefinedCursor struct {
+ method SourceMysqlUpdateSchemasMethod `const:"STANDARD" json:"method"`
}
-type SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethod string
+func (s SourceMysqlUpdateScanChangesWithUserDefinedCursor) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlUpdateScanChangesWithUserDefinedCursor) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlUpdateScanChangesWithUserDefinedCursor) GetMethod() SourceMysqlUpdateSchemasMethod {
+ return SourceMysqlUpdateSchemasMethodStandard
+}
+
+type SourceMysqlUpdateSchemasReplicationMethodMethod string
const (
- SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethodCdc SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethod = "CDC"
+ SourceMysqlUpdateSchemasReplicationMethodMethodCdc SourceMysqlUpdateSchemasReplicationMethodMethod = "CDC"
)
-func (e SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethod) ToPointer() *SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethod {
+func (e SourceMysqlUpdateSchemasReplicationMethodMethod) ToPointer() *SourceMysqlUpdateSchemasReplicationMethodMethod {
return &e
}
-func (e *SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlUpdateSchemasReplicationMethodMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CDC":
- *e = SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethod(v)
+ *e = SourceMysqlUpdateSchemasReplicationMethodMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlUpdateSchemasReplicationMethodMethod: %v", v)
}
}
-// SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.
-type SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC struct {
+// ReadChangesUsingBinaryLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the MySQL binary log. This must be enabled on your database.
+type ReadChangesUsingBinaryLogCDC struct {
// The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
- InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"`
- Method SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDCMethod `json:"method"`
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
+ method SourceMysqlUpdateSchemasReplicationMethodMethod `const:"CDC" json:"method"`
// Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.
ServerTimeZone *string `json:"server_time_zone,omitempty"`
}
+func (r ReadChangesUsingBinaryLogCDC) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(r, "", false)
+}
+
+func (r *ReadChangesUsingBinaryLogCDC) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &r, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ReadChangesUsingBinaryLogCDC) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.InitialWaitingSeconds
+}
+
+func (o *ReadChangesUsingBinaryLogCDC) GetMethod() SourceMysqlUpdateSchemasReplicationMethodMethod {
+ return SourceMysqlUpdateSchemasReplicationMethodMethodCdc
+}
+
+func (o *ReadChangesUsingBinaryLogCDC) GetServerTimeZone() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ServerTimeZone
+}
+
type SourceMysqlUpdateUpdateMethodType string
const (
- SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC SourceMysqlUpdateUpdateMethodType = "source-mysql-update_Update Method_Read Changes using Binary Log (CDC)"
- SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor SourceMysqlUpdateUpdateMethodType = "source-mysql-update_Update Method_Scan Changes with User Defined Cursor"
+ SourceMysqlUpdateUpdateMethodTypeReadChangesUsingBinaryLogCDC SourceMysqlUpdateUpdateMethodType = "Read Changes using Binary Log (CDC)"
+ SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateScanChangesWithUserDefinedCursor SourceMysqlUpdateUpdateMethodType = "source-mysql-update_Scan Changes with User Defined Cursor"
)
type SourceMysqlUpdateUpdateMethod struct {
- SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC *SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC
- SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
+ ReadChangesUsingBinaryLogCDC *ReadChangesUsingBinaryLogCDC
+ SourceMysqlUpdateScanChangesWithUserDefinedCursor *SourceMysqlUpdateScanChangesWithUserDefinedCursor
Type SourceMysqlUpdateUpdateMethodType
}
-func CreateSourceMysqlUpdateUpdateMethodSourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC(sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC) SourceMysqlUpdateUpdateMethod {
- typ := SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC
+func CreateSourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC(readChangesUsingBinaryLogCDC ReadChangesUsingBinaryLogCDC) SourceMysqlUpdateUpdateMethod {
+ typ := SourceMysqlUpdateUpdateMethodTypeReadChangesUsingBinaryLogCDC
return SourceMysqlUpdateUpdateMethod{
- SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC: &sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC,
- Type: typ,
+ ReadChangesUsingBinaryLogCDC: &readChangesUsingBinaryLogCDC,
+ Type: typ,
}
}
-func CreateSourceMysqlUpdateUpdateMethodSourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor(sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor) SourceMysqlUpdateUpdateMethod {
- typ := SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
+func CreateSourceMysqlUpdateUpdateMethodSourceMysqlUpdateScanChangesWithUserDefinedCursor(sourceMysqlUpdateScanChangesWithUserDefinedCursor SourceMysqlUpdateScanChangesWithUserDefinedCursor) SourceMysqlUpdateUpdateMethod {
+ typ := SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateScanChangesWithUserDefinedCursor
return SourceMysqlUpdateUpdateMethod{
- SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor: &sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor,
+ SourceMysqlUpdateScanChangesWithUserDefinedCursor: &sourceMysqlUpdateScanChangesWithUserDefinedCursor,
Type: typ,
}
}
func (u *SourceMysqlUpdateUpdateMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor); err == nil {
- u.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor = sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
- u.Type = SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor
+ sourceMysqlUpdateScanChangesWithUserDefinedCursor := new(SourceMysqlUpdateScanChangesWithUserDefinedCursor)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlUpdateScanChangesWithUserDefinedCursor, "", true, true); err == nil {
+ u.SourceMysqlUpdateScanChangesWithUserDefinedCursor = sourceMysqlUpdateScanChangesWithUserDefinedCursor
+ u.Type = SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateScanChangesWithUserDefinedCursor
return nil
}
- sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC := new(SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC); err == nil {
- u.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC = sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC
- u.Type = SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC
+ readChangesUsingBinaryLogCDC := new(ReadChangesUsingBinaryLogCDC)
+ if err := utils.UnmarshalJSON(data, &readChangesUsingBinaryLogCDC, "", true, true); err == nil {
+ u.ReadChangesUsingBinaryLogCDC = readChangesUsingBinaryLogCDC
+ u.Type = SourceMysqlUpdateUpdateMethodTypeReadChangesUsingBinaryLogCDC
return nil
}
@@ -128,43 +167,43 @@ func (u *SourceMysqlUpdateUpdateMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMysqlUpdateUpdateMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil {
- return json.Marshal(u.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor)
+ if u.ReadChangesUsingBinaryLogCDC != nil {
+ return utils.MarshalJSON(u.ReadChangesUsingBinaryLogCDC, "", true)
}
- if u.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC != nil {
- return json.Marshal(u.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC)
+ if u.SourceMysqlUpdateScanChangesWithUserDefinedCursor != nil {
+ return utils.MarshalJSON(u.SourceMysqlUpdateScanChangesWithUserDefinedCursor, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceMysqlUpdateSSLModesVerifyIdentityMode string
+type SourceMysqlUpdateSchemasSSLModeSSLModesMode string
const (
- SourceMysqlUpdateSSLModesVerifyIdentityModeVerifyIdentity SourceMysqlUpdateSSLModesVerifyIdentityMode = "verify_identity"
+ SourceMysqlUpdateSchemasSSLModeSSLModesModeVerifyIdentity SourceMysqlUpdateSchemasSSLModeSSLModesMode = "verify_identity"
)
-func (e SourceMysqlUpdateSSLModesVerifyIdentityMode) ToPointer() *SourceMysqlUpdateSSLModesVerifyIdentityMode {
+func (e SourceMysqlUpdateSchemasSSLModeSSLModesMode) ToPointer() *SourceMysqlUpdateSchemasSSLModeSSLModesMode {
return &e
}
-func (e *SourceMysqlUpdateSSLModesVerifyIdentityMode) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlUpdateSchemasSSLModeSSLModesMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify_identity":
- *e = SourceMysqlUpdateSSLModesVerifyIdentityMode(v)
+ *e = SourceMysqlUpdateSchemasSSLModeSSLModesMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateSSLModesVerifyIdentityMode: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlUpdateSchemasSSLModeSSLModesMode: %v", v)
}
}
-// SourceMysqlUpdateSSLModesVerifyIdentity - Always connect with SSL. Verify both CA and Hostname.
-type SourceMysqlUpdateSSLModesVerifyIdentity struct {
+// VerifyIdentity - Always connect with SSL. Verify both CA and Hostname.
+type VerifyIdentity struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
@@ -173,35 +212,78 @@ type SourceMysqlUpdateSSLModesVerifyIdentity struct {
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourceMysqlUpdateSSLModesVerifyIdentityMode `json:"mode"`
+ mode SourceMysqlUpdateSchemasSSLModeSSLModesMode `const:"verify_identity" json:"mode"`
+}
+
+func (v VerifyIdentity) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(v, "", false)
+}
+
+func (v *VerifyIdentity) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &v, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *VerifyIdentity) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
+}
+
+func (o *VerifyIdentity) GetClientCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientCertificate
+}
+
+func (o *VerifyIdentity) GetClientKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKey
+}
+
+func (o *VerifyIdentity) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *VerifyIdentity) GetMode() SourceMysqlUpdateSchemasSSLModeSSLModesMode {
+ return SourceMysqlUpdateSchemasSSLModeSSLModesModeVerifyIdentity
}
-type SourceMysqlUpdateSSLModesVerifyCAMode string
+type SourceMysqlUpdateSchemasSslModeMode string
const (
- SourceMysqlUpdateSSLModesVerifyCAModeVerifyCa SourceMysqlUpdateSSLModesVerifyCAMode = "verify_ca"
+ SourceMysqlUpdateSchemasSslModeModeVerifyCa SourceMysqlUpdateSchemasSslModeMode = "verify_ca"
)
-func (e SourceMysqlUpdateSSLModesVerifyCAMode) ToPointer() *SourceMysqlUpdateSSLModesVerifyCAMode {
+func (e SourceMysqlUpdateSchemasSslModeMode) ToPointer() *SourceMysqlUpdateSchemasSslModeMode {
return &e
}
-func (e *SourceMysqlUpdateSSLModesVerifyCAMode) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlUpdateSchemasSslModeMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify_ca":
- *e = SourceMysqlUpdateSSLModesVerifyCAMode(v)
+ *e = SourceMysqlUpdateSchemasSslModeMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateSSLModesVerifyCAMode: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlUpdateSchemasSslModeMode: %v", v)
}
}
-// SourceMysqlUpdateSSLModesVerifyCA - Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.
-type SourceMysqlUpdateSSLModesVerifyCA struct {
+// SourceMysqlUpdateVerifyCA - Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.
+type SourceMysqlUpdateVerifyCA struct {
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)
@@ -209,158 +291,222 @@ type SourceMysqlUpdateSSLModesVerifyCA struct {
// Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourceMysqlUpdateSSLModesVerifyCAMode `json:"mode"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourceMysqlUpdateSchemasSslModeMode `const:"verify_ca" json:"mode"`
+}
+
+func (s SourceMysqlUpdateVerifyCA) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlUpdateVerifyCA) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlUpdateVerifyCA) GetCaCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.CaCertificate
}
-type SourceMysqlUpdateSSLModesRequiredMode string
+func (o *SourceMysqlUpdateVerifyCA) GetClientCertificate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientCertificate
+}
+
+func (o *SourceMysqlUpdateVerifyCA) GetClientKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKey
+}
+
+func (o *SourceMysqlUpdateVerifyCA) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientKeyPassword
+}
+
+func (o *SourceMysqlUpdateVerifyCA) GetMode() SourceMysqlUpdateSchemasSslModeMode {
+ return SourceMysqlUpdateSchemasSslModeModeVerifyCa
+}
+
+type SourceMysqlUpdateSchemasMode string
const (
- SourceMysqlUpdateSSLModesRequiredModeRequired SourceMysqlUpdateSSLModesRequiredMode = "required"
+ SourceMysqlUpdateSchemasModeRequired SourceMysqlUpdateSchemasMode = "required"
)
-func (e SourceMysqlUpdateSSLModesRequiredMode) ToPointer() *SourceMysqlUpdateSSLModesRequiredMode {
+func (e SourceMysqlUpdateSchemasMode) ToPointer() *SourceMysqlUpdateSchemasMode {
return &e
}
-func (e *SourceMysqlUpdateSSLModesRequiredMode) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlUpdateSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "required":
- *e = SourceMysqlUpdateSSLModesRequiredMode(v)
+ *e = SourceMysqlUpdateSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateSSLModesRequiredMode: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlUpdateSchemasMode: %v", v)
+ }
+}
+
+// Required - Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.
+type Required struct {
+ mode SourceMysqlUpdateSchemasMode `const:"required" json:"mode"`
+}
+
+func (r Required) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(r, "", false)
+}
+
+func (r *Required) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &r, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceMysqlUpdateSSLModesRequired - Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.
-type SourceMysqlUpdateSSLModesRequired struct {
- Mode SourceMysqlUpdateSSLModesRequiredMode `json:"mode"`
+func (o *Required) GetMode() SourceMysqlUpdateSchemasMode {
+ return SourceMysqlUpdateSchemasModeRequired
}
-type SourceMysqlUpdateSSLModesPreferredMode string
+type SourceMysqlUpdateMode string
const (
- SourceMysqlUpdateSSLModesPreferredModePreferred SourceMysqlUpdateSSLModesPreferredMode = "preferred"
+ SourceMysqlUpdateModePreferred SourceMysqlUpdateMode = "preferred"
)
-func (e SourceMysqlUpdateSSLModesPreferredMode) ToPointer() *SourceMysqlUpdateSSLModesPreferredMode {
+func (e SourceMysqlUpdateMode) ToPointer() *SourceMysqlUpdateMode {
return &e
}
-func (e *SourceMysqlUpdateSSLModesPreferredMode) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlUpdateMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "preferred":
- *e = SourceMysqlUpdateSSLModesPreferredMode(v)
+ *e = SourceMysqlUpdateMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateSSLModesPreferredMode: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlUpdateMode: %v", v)
}
}
-// SourceMysqlUpdateSSLModesPreferred - Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.
-type SourceMysqlUpdateSSLModesPreferred struct {
- Mode SourceMysqlUpdateSSLModesPreferredMode `json:"mode"`
+// Preferred - Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.
+type Preferred struct {
+ mode SourceMysqlUpdateMode `const:"preferred" json:"mode"`
+}
+
+func (p Preferred) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *Preferred) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Preferred) GetMode() SourceMysqlUpdateMode {
+ return SourceMysqlUpdateModePreferred
}
type SourceMysqlUpdateSSLModesType string
const (
- SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesPreferred SourceMysqlUpdateSSLModesType = "source-mysql-update_SSL modes_preferred"
- SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesRequired SourceMysqlUpdateSSLModesType = "source-mysql-update_SSL modes_required"
- SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesVerifyCA SourceMysqlUpdateSSLModesType = "source-mysql-update_SSL modes_Verify CA"
- SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesVerifyIdentity SourceMysqlUpdateSSLModesType = "source-mysql-update_SSL modes_Verify Identity"
+ SourceMysqlUpdateSSLModesTypePreferred SourceMysqlUpdateSSLModesType = "preferred"
+ SourceMysqlUpdateSSLModesTypeRequired SourceMysqlUpdateSSLModesType = "required"
+ SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateVerifyCA SourceMysqlUpdateSSLModesType = "source-mysql-update_Verify CA"
+ SourceMysqlUpdateSSLModesTypeVerifyIdentity SourceMysqlUpdateSSLModesType = "Verify Identity"
)
type SourceMysqlUpdateSSLModes struct {
- SourceMysqlUpdateSSLModesPreferred *SourceMysqlUpdateSSLModesPreferred
- SourceMysqlUpdateSSLModesRequired *SourceMysqlUpdateSSLModesRequired
- SourceMysqlUpdateSSLModesVerifyCA *SourceMysqlUpdateSSLModesVerifyCA
- SourceMysqlUpdateSSLModesVerifyIdentity *SourceMysqlUpdateSSLModesVerifyIdentity
+ Preferred *Preferred
+ Required *Required
+ SourceMysqlUpdateVerifyCA *SourceMysqlUpdateVerifyCA
+ VerifyIdentity *VerifyIdentity
Type SourceMysqlUpdateSSLModesType
}
-func CreateSourceMysqlUpdateSSLModesSourceMysqlUpdateSSLModesPreferred(sourceMysqlUpdateSSLModesPreferred SourceMysqlUpdateSSLModesPreferred) SourceMysqlUpdateSSLModes {
- typ := SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesPreferred
+func CreateSourceMysqlUpdateSSLModesPreferred(preferred Preferred) SourceMysqlUpdateSSLModes {
+ typ := SourceMysqlUpdateSSLModesTypePreferred
return SourceMysqlUpdateSSLModes{
- SourceMysqlUpdateSSLModesPreferred: &sourceMysqlUpdateSSLModesPreferred,
- Type: typ,
+ Preferred: &preferred,
+ Type: typ,
}
}
-func CreateSourceMysqlUpdateSSLModesSourceMysqlUpdateSSLModesRequired(sourceMysqlUpdateSSLModesRequired SourceMysqlUpdateSSLModesRequired) SourceMysqlUpdateSSLModes {
- typ := SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesRequired
+func CreateSourceMysqlUpdateSSLModesRequired(required Required) SourceMysqlUpdateSSLModes {
+ typ := SourceMysqlUpdateSSLModesTypeRequired
return SourceMysqlUpdateSSLModes{
- SourceMysqlUpdateSSLModesRequired: &sourceMysqlUpdateSSLModesRequired,
- Type: typ,
+ Required: &required,
+ Type: typ,
}
}
-func CreateSourceMysqlUpdateSSLModesSourceMysqlUpdateSSLModesVerifyCA(sourceMysqlUpdateSSLModesVerifyCA SourceMysqlUpdateSSLModesVerifyCA) SourceMysqlUpdateSSLModes {
- typ := SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesVerifyCA
+func CreateSourceMysqlUpdateSSLModesSourceMysqlUpdateVerifyCA(sourceMysqlUpdateVerifyCA SourceMysqlUpdateVerifyCA) SourceMysqlUpdateSSLModes {
+ typ := SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateVerifyCA
return SourceMysqlUpdateSSLModes{
- SourceMysqlUpdateSSLModesVerifyCA: &sourceMysqlUpdateSSLModesVerifyCA,
- Type: typ,
+ SourceMysqlUpdateVerifyCA: &sourceMysqlUpdateVerifyCA,
+ Type: typ,
}
}
-func CreateSourceMysqlUpdateSSLModesSourceMysqlUpdateSSLModesVerifyIdentity(sourceMysqlUpdateSSLModesVerifyIdentity SourceMysqlUpdateSSLModesVerifyIdentity) SourceMysqlUpdateSSLModes {
- typ := SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesVerifyIdentity
+func CreateSourceMysqlUpdateSSLModesVerifyIdentity(verifyIdentity VerifyIdentity) SourceMysqlUpdateSSLModes {
+ typ := SourceMysqlUpdateSSLModesTypeVerifyIdentity
return SourceMysqlUpdateSSLModes{
- SourceMysqlUpdateSSLModesVerifyIdentity: &sourceMysqlUpdateSSLModesVerifyIdentity,
- Type: typ,
+ VerifyIdentity: &verifyIdentity,
+ Type: typ,
}
}
func (u *SourceMysqlUpdateSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- sourceMysqlUpdateSSLModesPreferred := new(SourceMysqlUpdateSSLModesPreferred)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateSSLModesPreferred); err == nil {
- u.SourceMysqlUpdateSSLModesPreferred = sourceMysqlUpdateSSLModesPreferred
- u.Type = SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesPreferred
+ preferred := new(Preferred)
+ if err := utils.UnmarshalJSON(data, &preferred, "", true, true); err == nil {
+ u.Preferred = preferred
+ u.Type = SourceMysqlUpdateSSLModesTypePreferred
return nil
}
- sourceMysqlUpdateSSLModesRequired := new(SourceMysqlUpdateSSLModesRequired)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateSSLModesRequired); err == nil {
- u.SourceMysqlUpdateSSLModesRequired = sourceMysqlUpdateSSLModesRequired
- u.Type = SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesRequired
+ required := new(Required)
+ if err := utils.UnmarshalJSON(data, &required, "", true, true); err == nil {
+ u.Required = required
+ u.Type = SourceMysqlUpdateSSLModesTypeRequired
return nil
}
- sourceMysqlUpdateSSLModesVerifyCA := new(SourceMysqlUpdateSSLModesVerifyCA)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateSSLModesVerifyCA); err == nil {
- u.SourceMysqlUpdateSSLModesVerifyCA = sourceMysqlUpdateSSLModesVerifyCA
- u.Type = SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesVerifyCA
+ sourceMysqlUpdateVerifyCA := new(SourceMysqlUpdateVerifyCA)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlUpdateVerifyCA, "", true, true); err == nil {
+ u.SourceMysqlUpdateVerifyCA = sourceMysqlUpdateVerifyCA
+ u.Type = SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateVerifyCA
return nil
}
- sourceMysqlUpdateSSLModesVerifyIdentity := new(SourceMysqlUpdateSSLModesVerifyIdentity)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateSSLModesVerifyIdentity); err == nil {
- u.SourceMysqlUpdateSSLModesVerifyIdentity = sourceMysqlUpdateSSLModesVerifyIdentity
- u.Type = SourceMysqlUpdateSSLModesTypeSourceMysqlUpdateSSLModesVerifyIdentity
+ verifyIdentity := new(VerifyIdentity)
+ if err := utils.UnmarshalJSON(data, &verifyIdentity, "", true, true); err == nil {
+ u.VerifyIdentity = verifyIdentity
+ u.Type = SourceMysqlUpdateSSLModesTypeVerifyIdentity
return nil
}
@@ -368,204 +514,298 @@ func (u *SourceMysqlUpdateSSLModes) UnmarshalJSON(data []byte) error {
}
func (u SourceMysqlUpdateSSLModes) MarshalJSON() ([]byte, error) {
- if u.SourceMysqlUpdateSSLModesPreferred != nil {
- return json.Marshal(u.SourceMysqlUpdateSSLModesPreferred)
+ if u.Preferred != nil {
+ return utils.MarshalJSON(u.Preferred, "", true)
}
- if u.SourceMysqlUpdateSSLModesRequired != nil {
- return json.Marshal(u.SourceMysqlUpdateSSLModesRequired)
+ if u.Required != nil {
+ return utils.MarshalJSON(u.Required, "", true)
}
- if u.SourceMysqlUpdateSSLModesVerifyCA != nil {
- return json.Marshal(u.SourceMysqlUpdateSSLModesVerifyCA)
+ if u.SourceMysqlUpdateVerifyCA != nil {
+ return utils.MarshalJSON(u.SourceMysqlUpdateVerifyCA, "", true)
}
- if u.SourceMysqlUpdateSSLModesVerifyIdentity != nil {
- return json.Marshal(u.SourceMysqlUpdateSSLModesVerifyIdentity)
+ if u.VerifyIdentity != nil {
+ return utils.MarshalJSON(u.VerifyIdentity, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceMysqlUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceMysqlUpdateSchemasTunnelMethodTunnelMethod string
const (
- SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceMysqlUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceMysqlUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceMysqlUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *SourceMysqlUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceMysqlUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication struct {
+// SourceMysqlUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMysqlUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceMysqlUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMysqlUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceMysqlUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceMysqlUpdatePasswordAuthentication) GetTunnelMethod() SourceMysqlUpdateSchemasTunnelMethodTunnelMethod {
+ return SourceMysqlUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceMysqlUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceMysqlUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceMysqlUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceMysqlUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceMysqlUpdateSchemasTunnelMethod string
const (
- SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceMysqlUpdateSchemasTunnelMethodSSHKeyAuth SourceMysqlUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceMysqlUpdateSchemasTunnelMethod) ToPointer() *SourceMysqlUpdateSchemasTunnelMethod {
return &e
}
-func (e *SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceMysqlUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlUpdateSchemasTunnelMethod: %v", v)
}
}
-// SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceMysqlUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMysqlUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMysqlUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceMysqlUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceMysqlUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceMysqlUpdateSSHKeyAuthentication) GetTunnelMethod() SourceMysqlUpdateSchemasTunnelMethod {
+ return SourceMysqlUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceMysqlUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceMysqlUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceMysqlUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type SourceMysqlUpdateTunnelMethod string
const (
- SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceMysqlUpdateTunnelMethodNoTunnel SourceMysqlUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceMysqlUpdateTunnelMethod) ToPointer() *SourceMysqlUpdateTunnelMethod {
return &e
}
-func (e *SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceMysqlUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceMysqlUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceMysqlUpdateTunnelMethod: %v", v)
}
}
-// SourceMysqlUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceMysqlUpdateSSHTunnelMethodNoTunnel struct {
+// SourceMysqlUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceMysqlUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceMysqlUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceMysqlUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceMysqlUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlUpdateNoTunnel) GetTunnelMethod() SourceMysqlUpdateTunnelMethod {
+ return SourceMysqlUpdateTunnelMethodNoTunnel
}
type SourceMysqlUpdateSSHTunnelMethodType string
const (
- SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHTunnelMethodNoTunnel SourceMysqlUpdateSSHTunnelMethodType = "source-mysql-update_SSH Tunnel Method_No Tunnel"
- SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication SourceMysqlUpdateSSHTunnelMethodType = "source-mysql-update_SSH Tunnel Method_SSH Key Authentication"
- SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHTunnelMethodPasswordAuthentication SourceMysqlUpdateSSHTunnelMethodType = "source-mysql-update_SSH Tunnel Method_Password Authentication"
+ SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateNoTunnel SourceMysqlUpdateSSHTunnelMethodType = "source-mysql-update_No Tunnel"
+ SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHKeyAuthentication SourceMysqlUpdateSSHTunnelMethodType = "source-mysql-update_SSH Key Authentication"
+ SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdatePasswordAuthentication SourceMysqlUpdateSSHTunnelMethodType = "source-mysql-update_Password Authentication"
)
type SourceMysqlUpdateSSHTunnelMethod struct {
- SourceMysqlUpdateSSHTunnelMethodNoTunnel *SourceMysqlUpdateSSHTunnelMethodNoTunnel
- SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication *SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
- SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication *SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication
+ SourceMysqlUpdateNoTunnel *SourceMysqlUpdateNoTunnel
+ SourceMysqlUpdateSSHKeyAuthentication *SourceMysqlUpdateSSHKeyAuthentication
+ SourceMysqlUpdatePasswordAuthentication *SourceMysqlUpdatePasswordAuthentication
Type SourceMysqlUpdateSSHTunnelMethodType
}
-func CreateSourceMysqlUpdateSSHTunnelMethodSourceMysqlUpdateSSHTunnelMethodNoTunnel(sourceMysqlUpdateSSHTunnelMethodNoTunnel SourceMysqlUpdateSSHTunnelMethodNoTunnel) SourceMysqlUpdateSSHTunnelMethod {
- typ := SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHTunnelMethodNoTunnel
+func CreateSourceMysqlUpdateSSHTunnelMethodSourceMysqlUpdateNoTunnel(sourceMysqlUpdateNoTunnel SourceMysqlUpdateNoTunnel) SourceMysqlUpdateSSHTunnelMethod {
+ typ := SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateNoTunnel
return SourceMysqlUpdateSSHTunnelMethod{
- SourceMysqlUpdateSSHTunnelMethodNoTunnel: &sourceMysqlUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceMysqlUpdateNoTunnel: &sourceMysqlUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateSourceMysqlUpdateSSHTunnelMethodSourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication(sourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication) SourceMysqlUpdateSSHTunnelMethod {
- typ := SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceMysqlUpdateSSHTunnelMethodSourceMysqlUpdateSSHKeyAuthentication(sourceMysqlUpdateSSHKeyAuthentication SourceMysqlUpdateSSHKeyAuthentication) SourceMysqlUpdateSSHTunnelMethod {
+ typ := SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHKeyAuthentication
return SourceMysqlUpdateSSHTunnelMethod{
- SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication: &sourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourceMysqlUpdateSSHKeyAuthentication: &sourceMysqlUpdateSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourceMysqlUpdateSSHTunnelMethodSourceMysqlUpdateSSHTunnelMethodPasswordAuthentication(sourceMysqlUpdateSSHTunnelMethodPasswordAuthentication SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication) SourceMysqlUpdateSSHTunnelMethod {
- typ := SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHTunnelMethodPasswordAuthentication
+func CreateSourceMysqlUpdateSSHTunnelMethodSourceMysqlUpdatePasswordAuthentication(sourceMysqlUpdatePasswordAuthentication SourceMysqlUpdatePasswordAuthentication) SourceMysqlUpdateSSHTunnelMethod {
+ typ := SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdatePasswordAuthentication
return SourceMysqlUpdateSSHTunnelMethod{
- SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication: &sourceMysqlUpdateSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ SourceMysqlUpdatePasswordAuthentication: &sourceMysqlUpdatePasswordAuthentication,
+ Type: typ,
}
}
func (u *SourceMysqlUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- sourceMysqlUpdateSSHTunnelMethodNoTunnel := new(SourceMysqlUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.SourceMysqlUpdateSSHTunnelMethodNoTunnel = sourceMysqlUpdateSSHTunnelMethodNoTunnel
- u.Type = SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHTunnelMethodNoTunnel
+ sourceMysqlUpdateNoTunnel := new(SourceMysqlUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlUpdateNoTunnel, "", true, true); err == nil {
+ u.SourceMysqlUpdateNoTunnel = sourceMysqlUpdateNoTunnel
+ u.Type = SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateNoTunnel
return nil
}
- sourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication := new(SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication = sourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication
+ sourceMysqlUpdateSSHKeyAuthentication := new(SourceMysqlUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceMysqlUpdateSSHKeyAuthentication = sourceMysqlUpdateSSHKeyAuthentication
+ u.Type = SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHKeyAuthentication
return nil
}
- sourceMysqlUpdateSSHTunnelMethodPasswordAuthentication := new(SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceMysqlUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication = sourceMysqlUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdateSSHTunnelMethodPasswordAuthentication
+ sourceMysqlUpdatePasswordAuthentication := new(SourceMysqlUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceMysqlUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.SourceMysqlUpdatePasswordAuthentication = sourceMysqlUpdatePasswordAuthentication
+ u.Type = SourceMysqlUpdateSSHTunnelMethodTypeSourceMysqlUpdatePasswordAuthentication
return nil
}
@@ -573,19 +813,19 @@ func (u *SourceMysqlUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceMysqlUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceMysqlUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceMysqlUpdateSSHTunnelMethodNoTunnel)
+ if u.SourceMysqlUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceMysqlUpdateNoTunnel, "", true)
}
- if u.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceMysqlUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceMysqlUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceMysqlUpdateSSHKeyAuthentication, "", true)
}
- if u.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceMysqlUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.SourceMysqlUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceMysqlUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceMysqlUpdate struct {
@@ -598,7 +838,7 @@ type SourceMysqlUpdate struct {
// The password associated with the username.
Password *string `json:"password,omitempty"`
// The port to connect to.
- Port int64 `json:"port"`
+ Port *int64 `default:"3306" json:"port"`
// Configures how data is extracted from the database.
ReplicationMethod SourceMysqlUpdateUpdateMethod `json:"replication_method"`
// SSL connection modes. Read more in the docs.
@@ -608,3 +848,77 @@ type SourceMysqlUpdate struct {
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (s SourceMysqlUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceMysqlUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceMysqlUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceMysqlUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceMysqlUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceMysqlUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceMysqlUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceMysqlUpdate) GetReplicationMethod() SourceMysqlUpdateUpdateMethod {
+ if o == nil {
+ return SourceMysqlUpdateUpdateMethod{}
+ }
+ return o.ReplicationMethod
+}
+
+func (o *SourceMysqlUpdate) GetSslMode() *SourceMysqlUpdateSSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *SourceMysqlUpdate) GetTunnelMethod() *SourceMysqlUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceMysqlUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcenetsuite.go b/internal/sdk/pkg/models/shared/sourcenetsuite.go
old mode 100755
new mode 100644
index 1446e1a4c..2a491ab30
--- a/internal/sdk/pkg/models/shared/sourcenetsuite.go
+++ b/internal/sdk/pkg/models/shared/sourcenetsuite.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceNetsuiteNetsuite string
+type Netsuite string
const (
- SourceNetsuiteNetsuiteNetsuite SourceNetsuiteNetsuite = "netsuite"
+ NetsuiteNetsuite Netsuite = "netsuite"
)
-func (e SourceNetsuiteNetsuite) ToPointer() *SourceNetsuiteNetsuite {
+func (e Netsuite) ToPointer() *Netsuite {
return &e
}
-func (e *SourceNetsuiteNetsuite) UnmarshalJSON(data []byte) error {
+func (e *Netsuite) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "netsuite":
- *e = SourceNetsuiteNetsuite(v)
+ *e = Netsuite(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceNetsuiteNetsuite: %v", v)
+ return fmt.Errorf("invalid value for Netsuite: %v", v)
}
}
@@ -39,8 +40,8 @@ type SourceNetsuite struct {
// The API names of the Netsuite objects you want to sync. Setting this speeds up the connection setup process by limiting the number of schemas that need to be retrieved from Netsuite.
ObjectTypes []string `json:"object_types,omitempty"`
// Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1, as for the `sandbox`
- Realm string `json:"realm"`
- SourceType SourceNetsuiteNetsuite `json:"sourceType"`
+ Realm string `json:"realm"`
+ sourceType Netsuite `const:"netsuite" json:"sourceType"`
// Starting point for your data replication, in format of "YYYY-MM-DDTHH:mm:ssZ"
StartDatetime string `json:"start_datetime"`
// Access token key
@@ -48,5 +49,76 @@ type SourceNetsuite struct {
// Access token secret
TokenSecret string `json:"token_secret"`
// The amount of days used to query the data with date chunks. Set smaller value, if you have lots of data.
- WindowInDays *int64 `json:"window_in_days,omitempty"`
+ WindowInDays *int64 `default:"30" json:"window_in_days"`
+}
+
+func (s SourceNetsuite) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNetsuite) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNetsuite) GetConsumerKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConsumerKey
+}
+
+func (o *SourceNetsuite) GetConsumerSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConsumerSecret
+}
+
+func (o *SourceNetsuite) GetObjectTypes() []string {
+ if o == nil {
+ return nil
+ }
+ return o.ObjectTypes
+}
+
+func (o *SourceNetsuite) GetRealm() string {
+ if o == nil {
+ return ""
+ }
+ return o.Realm
+}
+
+func (o *SourceNetsuite) GetSourceType() Netsuite {
+ return NetsuiteNetsuite
+}
+
+func (o *SourceNetsuite) GetStartDatetime() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDatetime
+}
+
+func (o *SourceNetsuite) GetTokenKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.TokenKey
+}
+
+func (o *SourceNetsuite) GetTokenSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.TokenSecret
+}
+
+func (o *SourceNetsuite) GetWindowInDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.WindowInDays
}
diff --git a/internal/sdk/pkg/models/shared/sourcenetsuitecreaterequest.go b/internal/sdk/pkg/models/shared/sourcenetsuitecreaterequest.go
old mode 100755
new mode 100644
index 6a076cd85..1616b07c7
--- a/internal/sdk/pkg/models/shared/sourcenetsuitecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcenetsuitecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceNetsuiteCreateRequest struct {
Configuration SourceNetsuite `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceNetsuiteCreateRequest) GetConfiguration() SourceNetsuite {
+ if o == nil {
+ return SourceNetsuite{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceNetsuiteCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceNetsuiteCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceNetsuiteCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceNetsuiteCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcenetsuiteputrequest.go b/internal/sdk/pkg/models/shared/sourcenetsuiteputrequest.go
old mode 100755
new mode 100644
index d8849bb21..787a62ca9
--- a/internal/sdk/pkg/models/shared/sourcenetsuiteputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcenetsuiteputrequest.go
@@ -7,3 +7,24 @@ type SourceNetsuitePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceNetsuitePutRequest) GetConfiguration() SourceNetsuiteUpdate {
+ if o == nil {
+ return SourceNetsuiteUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceNetsuitePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceNetsuitePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcenetsuiteupdate.go b/internal/sdk/pkg/models/shared/sourcenetsuiteupdate.go
old mode 100755
new mode 100644
index 4f5a07190..7947b6ac8
--- a/internal/sdk/pkg/models/shared/sourcenetsuiteupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcenetsuiteupdate.go
@@ -2,6 +2,10 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceNetsuiteUpdate struct {
// Consumer key associated with your integration
ConsumerKey string `json:"consumer_key"`
@@ -18,5 +22,72 @@ type SourceNetsuiteUpdate struct {
// Access token secret
TokenSecret string `json:"token_secret"`
// The amount of days used to query the data with date chunks. Set smaller value, if you have lots of data.
- WindowInDays *int64 `json:"window_in_days,omitempty"`
+ WindowInDays *int64 `default:"30" json:"window_in_days"`
+}
+
+func (s SourceNetsuiteUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNetsuiteUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNetsuiteUpdate) GetConsumerKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConsumerKey
+}
+
+func (o *SourceNetsuiteUpdate) GetConsumerSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConsumerSecret
+}
+
+func (o *SourceNetsuiteUpdate) GetObjectTypes() []string {
+ if o == nil {
+ return nil
+ }
+ return o.ObjectTypes
+}
+
+func (o *SourceNetsuiteUpdate) GetRealm() string {
+ if o == nil {
+ return ""
+ }
+ return o.Realm
+}
+
+func (o *SourceNetsuiteUpdate) GetStartDatetime() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDatetime
+}
+
+func (o *SourceNetsuiteUpdate) GetTokenKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.TokenKey
+}
+
+func (o *SourceNetsuiteUpdate) GetTokenSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.TokenSecret
+}
+
+func (o *SourceNetsuiteUpdate) GetWindowInDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.WindowInDays
}
diff --git a/internal/sdk/pkg/models/shared/sourcenotion.go b/internal/sdk/pkg/models/shared/sourcenotion.go
old mode 100755
new mode 100644
index 1a8b43d86..e252f2806
--- a/internal/sdk/pkg/models/shared/sourcenotion.go
+++ b/internal/sdk/pkg/models/shared/sourcenotion.go
@@ -3,175 +3,257 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceNotionAuthenticateUsingAccessTokenAuthType string
+type SourceNotionSchemasAuthType string
const (
- SourceNotionAuthenticateUsingAccessTokenAuthTypeToken SourceNotionAuthenticateUsingAccessTokenAuthType = "token"
+ SourceNotionSchemasAuthTypeToken SourceNotionSchemasAuthType = "token"
)
-func (e SourceNotionAuthenticateUsingAccessTokenAuthType) ToPointer() *SourceNotionAuthenticateUsingAccessTokenAuthType {
+func (e SourceNotionSchemasAuthType) ToPointer() *SourceNotionSchemasAuthType {
return &e
}
-func (e *SourceNotionAuthenticateUsingAccessTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceNotionSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "token":
- *e = SourceNotionAuthenticateUsingAccessTokenAuthType(v)
+ *e = SourceNotionSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceNotionAuthenticateUsingAccessTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceNotionSchemasAuthType: %v", v)
}
}
-// SourceNotionAuthenticateUsingAccessToken - Pick an authentication method.
-type SourceNotionAuthenticateUsingAccessToken struct {
- AuthType SourceNotionAuthenticateUsingAccessTokenAuthType `json:"auth_type"`
- // Notion API access token, see the docs for more information on how to obtain this token.
+// SourceNotionAccessToken - Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information.
+type SourceNotionAccessToken struct {
+ authType SourceNotionSchemasAuthType `const:"token" json:"auth_type"`
+ // The Access Token for your private Notion integration. See the docs for more information on how to obtain this token.
Token string `json:"token"`
}
-type SourceNotionAuthenticateUsingOAuth20AuthType string
+func (s SourceNotionAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNotionAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNotionAccessToken) GetAuthType() SourceNotionSchemasAuthType {
+ return SourceNotionSchemasAuthTypeToken
+}
+
+func (o *SourceNotionAccessToken) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
+
+type SourceNotionAuthType string
const (
- SourceNotionAuthenticateUsingOAuth20AuthTypeOAuth20 SourceNotionAuthenticateUsingOAuth20AuthType = "OAuth2.0"
+ SourceNotionAuthTypeOAuth20 SourceNotionAuthType = "OAuth2.0"
)
-func (e SourceNotionAuthenticateUsingOAuth20AuthType) ToPointer() *SourceNotionAuthenticateUsingOAuth20AuthType {
+func (e SourceNotionAuthType) ToPointer() *SourceNotionAuthType {
return &e
}
-func (e *SourceNotionAuthenticateUsingOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceNotionAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth2.0":
- *e = SourceNotionAuthenticateUsingOAuth20AuthType(v)
+ *e = SourceNotionAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceNotionAuthenticateUsingOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceNotionAuthType: %v", v)
}
}
-// SourceNotionAuthenticateUsingOAuth20 - Pick an authentication method.
-type SourceNotionAuthenticateUsingOAuth20 struct {
- // Access Token is a token you received by complete the OauthWebFlow of Notion.
- AccessToken string `json:"access_token"`
- AuthType SourceNotionAuthenticateUsingOAuth20AuthType `json:"auth_type"`
- // The ClientID of your Notion integration.
+// SourceNotionOAuth20 - Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information.
+type SourceNotionOAuth20 struct {
+ // The Access Token received by completing the OAuth flow for your Notion integration. See our docs for more information.
+ AccessToken string `json:"access_token"`
+ authType SourceNotionAuthType `const:"OAuth2.0" json:"auth_type"`
+ // The Client ID of your Notion integration. See our docs for more information.
ClientID string `json:"client_id"`
- // The ClientSecret of your Notion integration.
+ // The Client Secret of your Notion integration. See our docs for more information.
ClientSecret string `json:"client_secret"`
}
-type SourceNotionAuthenticateUsingType string
+func (s SourceNotionOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNotionOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNotionOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceNotionOAuth20) GetAuthType() SourceNotionAuthType {
+ return SourceNotionAuthTypeOAuth20
+}
+
+func (o *SourceNotionOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceNotionOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+type SourceNotionAuthenticationMethodType string
const (
- SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingOAuth20 SourceNotionAuthenticateUsingType = "source-notion_Authenticate using_OAuth2.0"
- SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingAccessToken SourceNotionAuthenticateUsingType = "source-notion_Authenticate using_Access Token"
+ SourceNotionAuthenticationMethodTypeSourceNotionOAuth20 SourceNotionAuthenticationMethodType = "source-notion_OAuth2.0"
+ SourceNotionAuthenticationMethodTypeSourceNotionAccessToken SourceNotionAuthenticationMethodType = "source-notion_Access Token"
)
-type SourceNotionAuthenticateUsing struct {
- SourceNotionAuthenticateUsingOAuth20 *SourceNotionAuthenticateUsingOAuth20
- SourceNotionAuthenticateUsingAccessToken *SourceNotionAuthenticateUsingAccessToken
+type SourceNotionAuthenticationMethod struct {
+ SourceNotionOAuth20 *SourceNotionOAuth20
+ SourceNotionAccessToken *SourceNotionAccessToken
- Type SourceNotionAuthenticateUsingType
+ Type SourceNotionAuthenticationMethodType
}
-func CreateSourceNotionAuthenticateUsingSourceNotionAuthenticateUsingOAuth20(sourceNotionAuthenticateUsingOAuth20 SourceNotionAuthenticateUsingOAuth20) SourceNotionAuthenticateUsing {
- typ := SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingOAuth20
+func CreateSourceNotionAuthenticationMethodSourceNotionOAuth20(sourceNotionOAuth20 SourceNotionOAuth20) SourceNotionAuthenticationMethod {
+ typ := SourceNotionAuthenticationMethodTypeSourceNotionOAuth20
- return SourceNotionAuthenticateUsing{
- SourceNotionAuthenticateUsingOAuth20: &sourceNotionAuthenticateUsingOAuth20,
- Type: typ,
+ return SourceNotionAuthenticationMethod{
+ SourceNotionOAuth20: &sourceNotionOAuth20,
+ Type: typ,
}
}
-func CreateSourceNotionAuthenticateUsingSourceNotionAuthenticateUsingAccessToken(sourceNotionAuthenticateUsingAccessToken SourceNotionAuthenticateUsingAccessToken) SourceNotionAuthenticateUsing {
- typ := SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingAccessToken
+func CreateSourceNotionAuthenticationMethodSourceNotionAccessToken(sourceNotionAccessToken SourceNotionAccessToken) SourceNotionAuthenticationMethod {
+ typ := SourceNotionAuthenticationMethodTypeSourceNotionAccessToken
- return SourceNotionAuthenticateUsing{
- SourceNotionAuthenticateUsingAccessToken: &sourceNotionAuthenticateUsingAccessToken,
- Type: typ,
+ return SourceNotionAuthenticationMethod{
+ SourceNotionAccessToken: &sourceNotionAccessToken,
+ Type: typ,
}
}
-func (u *SourceNotionAuthenticateUsing) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SourceNotionAuthenticationMethod) UnmarshalJSON(data []byte) error {
- sourceNotionAuthenticateUsingAccessToken := new(SourceNotionAuthenticateUsingAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceNotionAuthenticateUsingAccessToken); err == nil {
- u.SourceNotionAuthenticateUsingAccessToken = sourceNotionAuthenticateUsingAccessToken
- u.Type = SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingAccessToken
+ sourceNotionAccessToken := new(SourceNotionAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceNotionAccessToken, "", true, true); err == nil {
+ u.SourceNotionAccessToken = sourceNotionAccessToken
+ u.Type = SourceNotionAuthenticationMethodTypeSourceNotionAccessToken
return nil
}
- sourceNotionAuthenticateUsingOAuth20 := new(SourceNotionAuthenticateUsingOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceNotionAuthenticateUsingOAuth20); err == nil {
- u.SourceNotionAuthenticateUsingOAuth20 = sourceNotionAuthenticateUsingOAuth20
- u.Type = SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingOAuth20
+ sourceNotionOAuth20 := new(SourceNotionOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceNotionOAuth20, "", true, true); err == nil {
+ u.SourceNotionOAuth20 = sourceNotionOAuth20
+ u.Type = SourceNotionAuthenticationMethodTypeSourceNotionOAuth20
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceNotionAuthenticateUsing) MarshalJSON() ([]byte, error) {
- if u.SourceNotionAuthenticateUsingAccessToken != nil {
- return json.Marshal(u.SourceNotionAuthenticateUsingAccessToken)
+func (u SourceNotionAuthenticationMethod) MarshalJSON() ([]byte, error) {
+ if u.SourceNotionOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceNotionOAuth20, "", true)
}
- if u.SourceNotionAuthenticateUsingOAuth20 != nil {
- return json.Marshal(u.SourceNotionAuthenticateUsingOAuth20)
+ if u.SourceNotionAccessToken != nil {
+ return utils.MarshalJSON(u.SourceNotionAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceNotionNotion string
+type Notion string
const (
- SourceNotionNotionNotion SourceNotionNotion = "notion"
+ NotionNotion Notion = "notion"
)
-func (e SourceNotionNotion) ToPointer() *SourceNotionNotion {
+func (e Notion) ToPointer() *Notion {
return &e
}
-func (e *SourceNotionNotion) UnmarshalJSON(data []byte) error {
+func (e *Notion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "notion":
- *e = SourceNotionNotion(v)
+ *e = Notion(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceNotionNotion: %v", v)
+ return fmt.Errorf("invalid value for Notion: %v", v)
}
}
type SourceNotion struct {
- // Pick an authentication method.
- Credentials *SourceNotionAuthenticateUsing `json:"credentials,omitempty"`
- SourceType SourceNotionNotion `json:"sourceType"`
- // UTC date and time in the format 2017-01-25T00:00:00.000Z. Any data before this date will not be replicated.
- StartDate time.Time `json:"start_date"`
+ // Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information.
+ Credentials SourceNotionAuthenticationMethod `json:"credentials"`
+ sourceType Notion `const:"notion" json:"sourceType"`
+ // UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z. During incremental sync, any data generated before this date will not be replicated. If left blank, the start date will be set to 2 years before the present date.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceNotion) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNotion) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNotion) GetCredentials() SourceNotionAuthenticationMethod {
+ if o == nil {
+ return SourceNotionAuthenticationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceNotion) GetSourceType() Notion {
+ return NotionNotion
+}
+
+func (o *SourceNotion) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcenotioncreaterequest.go b/internal/sdk/pkg/models/shared/sourcenotioncreaterequest.go
old mode 100755
new mode 100644
index 7c1720427..d86cf4387
--- a/internal/sdk/pkg/models/shared/sourcenotioncreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcenotioncreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceNotionCreateRequest struct {
Configuration SourceNotion `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceNotionCreateRequest) GetConfiguration() SourceNotion {
+ if o == nil {
+ return SourceNotion{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceNotionCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceNotionCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceNotionCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceNotionCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcenotionputrequest.go b/internal/sdk/pkg/models/shared/sourcenotionputrequest.go
old mode 100755
new mode 100644
index 17745b126..4b13d33fa
--- a/internal/sdk/pkg/models/shared/sourcenotionputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcenotionputrequest.go
@@ -7,3 +7,24 @@ type SourceNotionPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceNotionPutRequest) GetConfiguration() SourceNotionUpdate {
+ if o == nil {
+ return SourceNotionUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceNotionPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceNotionPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcenotionupdate.go b/internal/sdk/pkg/models/shared/sourcenotionupdate.go
old mode 100755
new mode 100644
index f424fe1d5..52e1cf3c1
--- a/internal/sdk/pkg/models/shared/sourcenotionupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcenotionupdate.go
@@ -3,150 +3,228 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceNotionUpdateAuthenticateUsingAccessTokenAuthType string
+type SourceNotionUpdateSchemasAuthType string
const (
- SourceNotionUpdateAuthenticateUsingAccessTokenAuthTypeToken SourceNotionUpdateAuthenticateUsingAccessTokenAuthType = "token"
+ SourceNotionUpdateSchemasAuthTypeToken SourceNotionUpdateSchemasAuthType = "token"
)
-func (e SourceNotionUpdateAuthenticateUsingAccessTokenAuthType) ToPointer() *SourceNotionUpdateAuthenticateUsingAccessTokenAuthType {
+func (e SourceNotionUpdateSchemasAuthType) ToPointer() *SourceNotionUpdateSchemasAuthType {
return &e
}
-func (e *SourceNotionUpdateAuthenticateUsingAccessTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceNotionUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "token":
- *e = SourceNotionUpdateAuthenticateUsingAccessTokenAuthType(v)
+ *e = SourceNotionUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceNotionUpdateAuthenticateUsingAccessTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceNotionUpdateSchemasAuthType: %v", v)
}
}
-// SourceNotionUpdateAuthenticateUsingAccessToken - Pick an authentication method.
-type SourceNotionUpdateAuthenticateUsingAccessToken struct {
- AuthType SourceNotionUpdateAuthenticateUsingAccessTokenAuthType `json:"auth_type"`
- // Notion API access token, see the docs for more information on how to obtain this token.
+// SourceNotionUpdateAccessToken - Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information.
+type SourceNotionUpdateAccessToken struct {
+ authType SourceNotionUpdateSchemasAuthType `const:"token" json:"auth_type"`
+ // The Access Token for your private Notion integration. See the docs for more information on how to obtain this token.
Token string `json:"token"`
}
-type SourceNotionUpdateAuthenticateUsingOAuth20AuthType string
+func (s SourceNotionUpdateAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNotionUpdateAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNotionUpdateAccessToken) GetAuthType() SourceNotionUpdateSchemasAuthType {
+ return SourceNotionUpdateSchemasAuthTypeToken
+}
+
+func (o *SourceNotionUpdateAccessToken) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
+
+type SourceNotionUpdateAuthType string
const (
- SourceNotionUpdateAuthenticateUsingOAuth20AuthTypeOAuth20 SourceNotionUpdateAuthenticateUsingOAuth20AuthType = "OAuth2.0"
+ SourceNotionUpdateAuthTypeOAuth20 SourceNotionUpdateAuthType = "OAuth2.0"
)
-func (e SourceNotionUpdateAuthenticateUsingOAuth20AuthType) ToPointer() *SourceNotionUpdateAuthenticateUsingOAuth20AuthType {
+func (e SourceNotionUpdateAuthType) ToPointer() *SourceNotionUpdateAuthType {
return &e
}
-func (e *SourceNotionUpdateAuthenticateUsingOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceNotionUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth2.0":
- *e = SourceNotionUpdateAuthenticateUsingOAuth20AuthType(v)
+ *e = SourceNotionUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceNotionUpdateAuthenticateUsingOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceNotionUpdateAuthType: %v", v)
}
}
-// SourceNotionUpdateAuthenticateUsingOAuth20 - Pick an authentication method.
-type SourceNotionUpdateAuthenticateUsingOAuth20 struct {
- // Access Token is a token you received by complete the OauthWebFlow of Notion.
- AccessToken string `json:"access_token"`
- AuthType SourceNotionUpdateAuthenticateUsingOAuth20AuthType `json:"auth_type"`
- // The ClientID of your Notion integration.
+// SourceNotionUpdateOAuth20 - Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information.
+type SourceNotionUpdateOAuth20 struct {
+ // The Access Token received by completing the OAuth flow for your Notion integration. See our docs for more information.
+ AccessToken string `json:"access_token"`
+ authType SourceNotionUpdateAuthType `const:"OAuth2.0" json:"auth_type"`
+ // The Client ID of your Notion integration. See our docs for more information.
ClientID string `json:"client_id"`
- // The ClientSecret of your Notion integration.
+ // The Client Secret of your Notion integration. See our docs for more information.
ClientSecret string `json:"client_secret"`
}
-type SourceNotionUpdateAuthenticateUsingType string
+func (s SourceNotionUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNotionUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNotionUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceNotionUpdateOAuth20) GetAuthType() SourceNotionUpdateAuthType {
+ return SourceNotionUpdateAuthTypeOAuth20
+}
+
+func (o *SourceNotionUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceNotionUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+type SourceNotionUpdateAuthenticationMethodType string
const (
- SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingOAuth20 SourceNotionUpdateAuthenticateUsingType = "source-notion-update_Authenticate using_OAuth2.0"
- SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingAccessToken SourceNotionUpdateAuthenticateUsingType = "source-notion-update_Authenticate using_Access Token"
+ SourceNotionUpdateAuthenticationMethodTypeSourceNotionUpdateOAuth20 SourceNotionUpdateAuthenticationMethodType = "source-notion-update_OAuth2.0"
+ SourceNotionUpdateAuthenticationMethodTypeSourceNotionUpdateAccessToken SourceNotionUpdateAuthenticationMethodType = "source-notion-update_Access Token"
)
-type SourceNotionUpdateAuthenticateUsing struct {
- SourceNotionUpdateAuthenticateUsingOAuth20 *SourceNotionUpdateAuthenticateUsingOAuth20
- SourceNotionUpdateAuthenticateUsingAccessToken *SourceNotionUpdateAuthenticateUsingAccessToken
+type SourceNotionUpdateAuthenticationMethod struct {
+ SourceNotionUpdateOAuth20 *SourceNotionUpdateOAuth20
+ SourceNotionUpdateAccessToken *SourceNotionUpdateAccessToken
- Type SourceNotionUpdateAuthenticateUsingType
+ Type SourceNotionUpdateAuthenticationMethodType
}
-func CreateSourceNotionUpdateAuthenticateUsingSourceNotionUpdateAuthenticateUsingOAuth20(sourceNotionUpdateAuthenticateUsingOAuth20 SourceNotionUpdateAuthenticateUsingOAuth20) SourceNotionUpdateAuthenticateUsing {
- typ := SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingOAuth20
+func CreateSourceNotionUpdateAuthenticationMethodSourceNotionUpdateOAuth20(sourceNotionUpdateOAuth20 SourceNotionUpdateOAuth20) SourceNotionUpdateAuthenticationMethod {
+ typ := SourceNotionUpdateAuthenticationMethodTypeSourceNotionUpdateOAuth20
- return SourceNotionUpdateAuthenticateUsing{
- SourceNotionUpdateAuthenticateUsingOAuth20: &sourceNotionUpdateAuthenticateUsingOAuth20,
- Type: typ,
+ return SourceNotionUpdateAuthenticationMethod{
+ SourceNotionUpdateOAuth20: &sourceNotionUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceNotionUpdateAuthenticateUsingSourceNotionUpdateAuthenticateUsingAccessToken(sourceNotionUpdateAuthenticateUsingAccessToken SourceNotionUpdateAuthenticateUsingAccessToken) SourceNotionUpdateAuthenticateUsing {
- typ := SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingAccessToken
+func CreateSourceNotionUpdateAuthenticationMethodSourceNotionUpdateAccessToken(sourceNotionUpdateAccessToken SourceNotionUpdateAccessToken) SourceNotionUpdateAuthenticationMethod {
+ typ := SourceNotionUpdateAuthenticationMethodTypeSourceNotionUpdateAccessToken
- return SourceNotionUpdateAuthenticateUsing{
- SourceNotionUpdateAuthenticateUsingAccessToken: &sourceNotionUpdateAuthenticateUsingAccessToken,
- Type: typ,
+ return SourceNotionUpdateAuthenticationMethod{
+ SourceNotionUpdateAccessToken: &sourceNotionUpdateAccessToken,
+ Type: typ,
}
}
-func (u *SourceNotionUpdateAuthenticateUsing) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SourceNotionUpdateAuthenticationMethod) UnmarshalJSON(data []byte) error {
- sourceNotionUpdateAuthenticateUsingAccessToken := new(SourceNotionUpdateAuthenticateUsingAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceNotionUpdateAuthenticateUsingAccessToken); err == nil {
- u.SourceNotionUpdateAuthenticateUsingAccessToken = sourceNotionUpdateAuthenticateUsingAccessToken
- u.Type = SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingAccessToken
+ sourceNotionUpdateAccessToken := new(SourceNotionUpdateAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceNotionUpdateAccessToken, "", true, true); err == nil {
+ u.SourceNotionUpdateAccessToken = sourceNotionUpdateAccessToken
+ u.Type = SourceNotionUpdateAuthenticationMethodTypeSourceNotionUpdateAccessToken
return nil
}
- sourceNotionUpdateAuthenticateUsingOAuth20 := new(SourceNotionUpdateAuthenticateUsingOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceNotionUpdateAuthenticateUsingOAuth20); err == nil {
- u.SourceNotionUpdateAuthenticateUsingOAuth20 = sourceNotionUpdateAuthenticateUsingOAuth20
- u.Type = SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingOAuth20
+ sourceNotionUpdateOAuth20 := new(SourceNotionUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceNotionUpdateOAuth20, "", true, true); err == nil {
+ u.SourceNotionUpdateOAuth20 = sourceNotionUpdateOAuth20
+ u.Type = SourceNotionUpdateAuthenticationMethodTypeSourceNotionUpdateOAuth20
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceNotionUpdateAuthenticateUsing) MarshalJSON() ([]byte, error) {
- if u.SourceNotionUpdateAuthenticateUsingAccessToken != nil {
- return json.Marshal(u.SourceNotionUpdateAuthenticateUsingAccessToken)
+func (u SourceNotionUpdateAuthenticationMethod) MarshalJSON() ([]byte, error) {
+ if u.SourceNotionUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceNotionUpdateOAuth20, "", true)
}
- if u.SourceNotionUpdateAuthenticateUsingOAuth20 != nil {
- return json.Marshal(u.SourceNotionUpdateAuthenticateUsingOAuth20)
+ if u.SourceNotionUpdateAccessToken != nil {
+ return utils.MarshalJSON(u.SourceNotionUpdateAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceNotionUpdate struct {
- // Pick an authentication method.
- Credentials *SourceNotionUpdateAuthenticateUsing `json:"credentials,omitempty"`
- // UTC date and time in the format 2017-01-25T00:00:00.000Z. Any data before this date will not be replicated.
- StartDate time.Time `json:"start_date"`
+ // Choose either OAuth (recommended for Airbyte Cloud) or Access Token. See our docs for more information.
+ Credentials SourceNotionUpdateAuthenticationMethod `json:"credentials"`
+ // UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z. During incremental sync, any data generated before this date will not be replicated. If left blank, the start date will be set to 2 years before the present date.
+ StartDate *time.Time `json:"start_date,omitempty"`
+}
+
+func (s SourceNotionUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNotionUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNotionUpdate) GetCredentials() SourceNotionUpdateAuthenticationMethod {
+ if o == nil {
+ return SourceNotionUpdateAuthenticationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceNotionUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcenytimes.go b/internal/sdk/pkg/models/shared/sourcenytimes.go
old mode 100755
new mode 100644
index 3174db216..3a3fd9d7e
--- a/internal/sdk/pkg/models/shared/sourcenytimes.go
+++ b/internal/sdk/pkg/models/shared/sourcenytimes.go
@@ -3,9 +3,10 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceNytimesPeriodUsedForMostPopularStreams - Period of time (in days)
@@ -64,27 +65,27 @@ func (e *SourceNytimesShareTypeUsedForMostPopularSharedStream) UnmarshalJSON(dat
}
}
-type SourceNytimesNytimes string
+type Nytimes string
const (
- SourceNytimesNytimesNytimes SourceNytimesNytimes = "nytimes"
+ NytimesNytimes Nytimes = "nytimes"
)
-func (e SourceNytimesNytimes) ToPointer() *SourceNytimesNytimes {
+func (e Nytimes) ToPointer() *Nytimes {
return &e
}
-func (e *SourceNytimesNytimes) UnmarshalJSON(data []byte) error {
+func (e *Nytimes) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "nytimes":
- *e = SourceNytimesNytimes(v)
+ *e = Nytimes(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceNytimesNytimes: %v", v)
+ return fmt.Errorf("invalid value for Nytimes: %v", v)
}
}
@@ -97,7 +98,57 @@ type SourceNytimes struct {
Period SourceNytimesPeriodUsedForMostPopularStreams `json:"period"`
// Share Type
ShareType *SourceNytimesShareTypeUsedForMostPopularSharedStream `json:"share_type,omitempty"`
- SourceType SourceNytimesNytimes `json:"sourceType"`
+ sourceType Nytimes `const:"nytimes" json:"sourceType"`
// Start date to begin the article retrieval (format YYYY-MM)
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceNytimes) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNytimes) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNytimes) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceNytimes) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceNytimes) GetPeriod() SourceNytimesPeriodUsedForMostPopularStreams {
+ if o == nil {
+ return SourceNytimesPeriodUsedForMostPopularStreams(0)
+ }
+ return o.Period
+}
+
+func (o *SourceNytimes) GetShareType() *SourceNytimesShareTypeUsedForMostPopularSharedStream {
+ if o == nil {
+ return nil
+ }
+ return o.ShareType
+}
+
+func (o *SourceNytimes) GetSourceType() Nytimes {
+ return NytimesNytimes
+}
+
+func (o *SourceNytimes) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcenytimescreaterequest.go b/internal/sdk/pkg/models/shared/sourcenytimescreaterequest.go
old mode 100755
new mode 100644
index 312f801ae..ccd82927d
--- a/internal/sdk/pkg/models/shared/sourcenytimescreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcenytimescreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceNytimesCreateRequest struct {
Configuration SourceNytimes `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceNytimesCreateRequest) GetConfiguration() SourceNytimes {
+ if o == nil {
+ return SourceNytimes{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceNytimesCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceNytimesCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceNytimesCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceNytimesCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcenytimesputrequest.go b/internal/sdk/pkg/models/shared/sourcenytimesputrequest.go
old mode 100755
new mode 100644
index ee9a981ea..94c856939
--- a/internal/sdk/pkg/models/shared/sourcenytimesputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcenytimesputrequest.go
@@ -7,3 +7,24 @@ type SourceNytimesPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceNytimesPutRequest) GetConfiguration() SourceNytimesUpdate {
+ if o == nil {
+ return SourceNytimesUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceNytimesPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceNytimesPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcenytimesupdate.go b/internal/sdk/pkg/models/shared/sourcenytimesupdate.go
old mode 100755
new mode 100644
index 8c1b6179a..4368f44f0
--- a/internal/sdk/pkg/models/shared/sourcenytimesupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcenytimesupdate.go
@@ -3,25 +3,26 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceNytimesUpdatePeriodUsedForMostPopularStreams - Period of time (in days)
-type SourceNytimesUpdatePeriodUsedForMostPopularStreams int64
+// PeriodUsedForMostPopularStreams - Period of time (in days)
+type PeriodUsedForMostPopularStreams int64
const (
- SourceNytimesUpdatePeriodUsedForMostPopularStreamsOne SourceNytimesUpdatePeriodUsedForMostPopularStreams = 1
- SourceNytimesUpdatePeriodUsedForMostPopularStreamsSeven SourceNytimesUpdatePeriodUsedForMostPopularStreams = 7
- SourceNytimesUpdatePeriodUsedForMostPopularStreamsThirty SourceNytimesUpdatePeriodUsedForMostPopularStreams = 30
+ PeriodUsedForMostPopularStreamsOne PeriodUsedForMostPopularStreams = 1
+ PeriodUsedForMostPopularStreamsSeven PeriodUsedForMostPopularStreams = 7
+ PeriodUsedForMostPopularStreamsThirty PeriodUsedForMostPopularStreams = 30
)
-func (e SourceNytimesUpdatePeriodUsedForMostPopularStreams) ToPointer() *SourceNytimesUpdatePeriodUsedForMostPopularStreams {
+func (e PeriodUsedForMostPopularStreams) ToPointer() *PeriodUsedForMostPopularStreams {
return &e
}
-func (e *SourceNytimesUpdatePeriodUsedForMostPopularStreams) UnmarshalJSON(data []byte) error {
+func (e *PeriodUsedForMostPopularStreams) UnmarshalJSON(data []byte) error {
var v int64
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -32,35 +33,35 @@ func (e *SourceNytimesUpdatePeriodUsedForMostPopularStreams) UnmarshalJSON(data
case 7:
fallthrough
case 30:
- *e = SourceNytimesUpdatePeriodUsedForMostPopularStreams(v)
+ *e = PeriodUsedForMostPopularStreams(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceNytimesUpdatePeriodUsedForMostPopularStreams: %v", v)
+ return fmt.Errorf("invalid value for PeriodUsedForMostPopularStreams: %v", v)
}
}
-// SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream - Share Type
-type SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream string
+// ShareTypeUsedForMostPopularSharedStream - Share Type
+type ShareTypeUsedForMostPopularSharedStream string
const (
- SourceNytimesUpdateShareTypeUsedForMostPopularSharedStreamFacebook SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream = "facebook"
+ ShareTypeUsedForMostPopularSharedStreamFacebook ShareTypeUsedForMostPopularSharedStream = "facebook"
)
-func (e SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream) ToPointer() *SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream {
+func (e ShareTypeUsedForMostPopularSharedStream) ToPointer() *ShareTypeUsedForMostPopularSharedStream {
return &e
}
-func (e *SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream) UnmarshalJSON(data []byte) error {
+func (e *ShareTypeUsedForMostPopularSharedStream) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "facebook":
- *e = SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream(v)
+ *e = ShareTypeUsedForMostPopularSharedStream(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream: %v", v)
+ return fmt.Errorf("invalid value for ShareTypeUsedForMostPopularSharedStream: %v", v)
}
}
@@ -70,9 +71,55 @@ type SourceNytimesUpdate struct {
// End date to stop the article retrieval (format YYYY-MM)
EndDate *types.Date `json:"end_date,omitempty"`
// Period of time (in days)
- Period SourceNytimesUpdatePeriodUsedForMostPopularStreams `json:"period"`
+ Period PeriodUsedForMostPopularStreams `json:"period"`
// Share Type
- ShareType *SourceNytimesUpdateShareTypeUsedForMostPopularSharedStream `json:"share_type,omitempty"`
+ ShareType *ShareTypeUsedForMostPopularSharedStream `json:"share_type,omitempty"`
// Start date to begin the article retrieval (format YYYY-MM)
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceNytimesUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceNytimesUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceNytimesUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceNytimesUpdate) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceNytimesUpdate) GetPeriod() PeriodUsedForMostPopularStreams {
+ if o == nil {
+ return PeriodUsedForMostPopularStreams(0)
+ }
+ return o.Period
+}
+
+func (o *SourceNytimesUpdate) GetShareType() *ShareTypeUsedForMostPopularSharedStream {
+ if o == nil {
+ return nil
+ }
+ return o.ShareType
+}
+
+func (o *SourceNytimesUpdate) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceokta.go b/internal/sdk/pkg/models/shared/sourceokta.go
old mode 100755
new mode 100644
index b7ac00a18..02bff15fc
--- a/internal/sdk/pkg/models/shared/sourceokta.go
+++ b/internal/sdk/pkg/models/shared/sourceokta.go
@@ -3,68 +3,90 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOktaAuthorizationMethodAPITokenAuthType string
+type SourceOktaSchemasAuthType string
const (
- SourceOktaAuthorizationMethodAPITokenAuthTypeAPIToken SourceOktaAuthorizationMethodAPITokenAuthType = "api_token"
+ SourceOktaSchemasAuthTypeAPIToken SourceOktaSchemasAuthType = "api_token"
)
-func (e SourceOktaAuthorizationMethodAPITokenAuthType) ToPointer() *SourceOktaAuthorizationMethodAPITokenAuthType {
+func (e SourceOktaSchemasAuthType) ToPointer() *SourceOktaSchemasAuthType {
return &e
}
-func (e *SourceOktaAuthorizationMethodAPITokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceOktaSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceOktaAuthorizationMethodAPITokenAuthType(v)
+ *e = SourceOktaSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOktaAuthorizationMethodAPITokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceOktaSchemasAuthType: %v", v)
}
}
-type SourceOktaAuthorizationMethodAPIToken struct {
+type SourceOktaAPIToken struct {
// An Okta token. See the docs for instructions on how to generate it.
- APIToken string `json:"api_token"`
- AuthType SourceOktaAuthorizationMethodAPITokenAuthType `json:"auth_type"`
+ APIToken string `json:"api_token"`
+ authType SourceOktaSchemasAuthType `const:"api_token" json:"auth_type"`
}
-type SourceOktaAuthorizationMethodOAuth20AuthType string
+func (s SourceOktaAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOktaAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOktaAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceOktaAPIToken) GetAuthType() SourceOktaSchemasAuthType {
+ return SourceOktaSchemasAuthTypeAPIToken
+}
+
+type SourceOktaAuthType string
const (
- SourceOktaAuthorizationMethodOAuth20AuthTypeOauth20 SourceOktaAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceOktaAuthTypeOauth20 SourceOktaAuthType = "oauth2.0"
)
-func (e SourceOktaAuthorizationMethodOAuth20AuthType) ToPointer() *SourceOktaAuthorizationMethodOAuth20AuthType {
+func (e SourceOktaAuthType) ToPointer() *SourceOktaAuthType {
return &e
}
-func (e *SourceOktaAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceOktaAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceOktaAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceOktaAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOktaAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceOktaAuthType: %v", v)
}
}
-type SourceOktaAuthorizationMethodOAuth20 struct {
- AuthType SourceOktaAuthorizationMethodOAuth20AuthType `json:"auth_type"`
+type SourceOktaOAuth20 struct {
+ authType SourceOktaAuthType `const:"oauth2.0" json:"auth_type"`
// The Client ID of your OAuth application.
ClientID string `json:"client_id"`
// The Client Secret of your OAuth application.
@@ -73,56 +95,87 @@ type SourceOktaAuthorizationMethodOAuth20 struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceOktaOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOktaOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOktaOAuth20) GetAuthType() SourceOktaAuthType {
+ return SourceOktaAuthTypeOauth20
+}
+
+func (o *SourceOktaOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceOktaOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceOktaOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceOktaAuthorizationMethodType string
const (
- SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodOAuth20 SourceOktaAuthorizationMethodType = "source-okta_Authorization Method_OAuth2.0"
- SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodAPIToken SourceOktaAuthorizationMethodType = "source-okta_Authorization Method_API Token"
+ SourceOktaAuthorizationMethodTypeSourceOktaOAuth20 SourceOktaAuthorizationMethodType = "source-okta_OAuth2.0"
+ SourceOktaAuthorizationMethodTypeSourceOktaAPIToken SourceOktaAuthorizationMethodType = "source-okta_API Token"
)
type SourceOktaAuthorizationMethod struct {
- SourceOktaAuthorizationMethodOAuth20 *SourceOktaAuthorizationMethodOAuth20
- SourceOktaAuthorizationMethodAPIToken *SourceOktaAuthorizationMethodAPIToken
+ SourceOktaOAuth20 *SourceOktaOAuth20
+ SourceOktaAPIToken *SourceOktaAPIToken
Type SourceOktaAuthorizationMethodType
}
-func CreateSourceOktaAuthorizationMethodSourceOktaAuthorizationMethodOAuth20(sourceOktaAuthorizationMethodOAuth20 SourceOktaAuthorizationMethodOAuth20) SourceOktaAuthorizationMethod {
- typ := SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodOAuth20
+func CreateSourceOktaAuthorizationMethodSourceOktaOAuth20(sourceOktaOAuth20 SourceOktaOAuth20) SourceOktaAuthorizationMethod {
+ typ := SourceOktaAuthorizationMethodTypeSourceOktaOAuth20
return SourceOktaAuthorizationMethod{
- SourceOktaAuthorizationMethodOAuth20: &sourceOktaAuthorizationMethodOAuth20,
- Type: typ,
+ SourceOktaOAuth20: &sourceOktaOAuth20,
+ Type: typ,
}
}
-func CreateSourceOktaAuthorizationMethodSourceOktaAuthorizationMethodAPIToken(sourceOktaAuthorizationMethodAPIToken SourceOktaAuthorizationMethodAPIToken) SourceOktaAuthorizationMethod {
- typ := SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodAPIToken
+func CreateSourceOktaAuthorizationMethodSourceOktaAPIToken(sourceOktaAPIToken SourceOktaAPIToken) SourceOktaAuthorizationMethod {
+ typ := SourceOktaAuthorizationMethodTypeSourceOktaAPIToken
return SourceOktaAuthorizationMethod{
- SourceOktaAuthorizationMethodAPIToken: &sourceOktaAuthorizationMethodAPIToken,
- Type: typ,
+ SourceOktaAPIToken: &sourceOktaAPIToken,
+ Type: typ,
}
}
func (u *SourceOktaAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceOktaAuthorizationMethodAPIToken := new(SourceOktaAuthorizationMethodAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOktaAuthorizationMethodAPIToken); err == nil {
- u.SourceOktaAuthorizationMethodAPIToken = sourceOktaAuthorizationMethodAPIToken
- u.Type = SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodAPIToken
+
+ sourceOktaAPIToken := new(SourceOktaAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceOktaAPIToken, "", true, true); err == nil {
+ u.SourceOktaAPIToken = sourceOktaAPIToken
+ u.Type = SourceOktaAuthorizationMethodTypeSourceOktaAPIToken
return nil
}
- sourceOktaAuthorizationMethodOAuth20 := new(SourceOktaAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOktaAuthorizationMethodOAuth20); err == nil {
- u.SourceOktaAuthorizationMethodOAuth20 = sourceOktaAuthorizationMethodOAuth20
- u.Type = SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodOAuth20
+ sourceOktaOAuth20 := new(SourceOktaOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceOktaOAuth20, "", true, true); err == nil {
+ u.SourceOktaOAuth20 = sourceOktaOAuth20
+ u.Type = SourceOktaAuthorizationMethodTypeSourceOktaOAuth20
return nil
}
@@ -130,46 +183,82 @@ func (u *SourceOktaAuthorizationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceOktaAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceOktaAuthorizationMethodAPIToken != nil {
- return json.Marshal(u.SourceOktaAuthorizationMethodAPIToken)
+ if u.SourceOktaOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceOktaOAuth20, "", true)
}
- if u.SourceOktaAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceOktaAuthorizationMethodOAuth20)
+ if u.SourceOktaAPIToken != nil {
+ return utils.MarshalJSON(u.SourceOktaAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceOktaOkta string
+type Okta string
const (
- SourceOktaOktaOkta SourceOktaOkta = "okta"
+ OktaOkta Okta = "okta"
)
-func (e SourceOktaOkta) ToPointer() *SourceOktaOkta {
+func (e Okta) ToPointer() *Okta {
return &e
}
-func (e *SourceOktaOkta) UnmarshalJSON(data []byte) error {
+func (e *Okta) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "okta":
- *e = SourceOktaOkta(v)
+ *e = Okta(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOktaOkta: %v", v)
+ return fmt.Errorf("invalid value for Okta: %v", v)
}
}
type SourceOkta struct {
Credentials *SourceOktaAuthorizationMethod `json:"credentials,omitempty"`
// The Okta domain. See the docs for instructions on how to find it.
- Domain *string `json:"domain,omitempty"`
- SourceType SourceOktaOkta `json:"sourceType"`
+ Domain *string `json:"domain,omitempty"`
+ sourceType Okta `const:"okta" json:"sourceType"`
// UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any data before this date will not be replicated.
StartDate *string `json:"start_date,omitempty"`
}
+
+func (s SourceOkta) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOkta) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOkta) GetCredentials() *SourceOktaAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceOkta) GetDomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Domain
+}
+
+func (o *SourceOkta) GetSourceType() Okta {
+ return OktaOkta
+}
+
+func (o *SourceOkta) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoktacreaterequest.go b/internal/sdk/pkg/models/shared/sourceoktacreaterequest.go
old mode 100755
new mode 100644
index e8ba74327..c2ed97323
--- a/internal/sdk/pkg/models/shared/sourceoktacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceoktacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceOktaCreateRequest struct {
Configuration SourceOkta `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOktaCreateRequest) GetConfiguration() SourceOkta {
+ if o == nil {
+ return SourceOkta{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOktaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceOktaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOktaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceOktaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoktaputrequest.go b/internal/sdk/pkg/models/shared/sourceoktaputrequest.go
old mode 100755
new mode 100644
index 087c606b6..4739cde04
--- a/internal/sdk/pkg/models/shared/sourceoktaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceoktaputrequest.go
@@ -7,3 +7,24 @@ type SourceOktaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOktaPutRequest) GetConfiguration() SourceOktaUpdate {
+ if o == nil {
+ return SourceOktaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOktaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOktaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoktaupdate.go b/internal/sdk/pkg/models/shared/sourceoktaupdate.go
old mode 100755
new mode 100644
index df93dccc7..2315dfd80
--- a/internal/sdk/pkg/models/shared/sourceoktaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceoktaupdate.go
@@ -3,68 +3,90 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOktaUpdateAuthorizationMethodAPITokenAuthType string
+type SourceOktaUpdateSchemasAuthType string
const (
- SourceOktaUpdateAuthorizationMethodAPITokenAuthTypeAPIToken SourceOktaUpdateAuthorizationMethodAPITokenAuthType = "api_token"
+ SourceOktaUpdateSchemasAuthTypeAPIToken SourceOktaUpdateSchemasAuthType = "api_token"
)
-func (e SourceOktaUpdateAuthorizationMethodAPITokenAuthType) ToPointer() *SourceOktaUpdateAuthorizationMethodAPITokenAuthType {
+func (e SourceOktaUpdateSchemasAuthType) ToPointer() *SourceOktaUpdateSchemasAuthType {
return &e
}
-func (e *SourceOktaUpdateAuthorizationMethodAPITokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceOktaUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceOktaUpdateAuthorizationMethodAPITokenAuthType(v)
+ *e = SourceOktaUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOktaUpdateAuthorizationMethodAPITokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceOktaUpdateSchemasAuthType: %v", v)
}
}
-type SourceOktaUpdateAuthorizationMethodAPIToken struct {
+type SourceOktaUpdateAPIToken struct {
// An Okta token. See the docs for instructions on how to generate it.
- APIToken string `json:"api_token"`
- AuthType SourceOktaUpdateAuthorizationMethodAPITokenAuthType `json:"auth_type"`
+ APIToken string `json:"api_token"`
+ authType SourceOktaUpdateSchemasAuthType `const:"api_token" json:"auth_type"`
}
-type SourceOktaUpdateAuthorizationMethodOAuth20AuthType string
+func (s SourceOktaUpdateAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOktaUpdateAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOktaUpdateAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceOktaUpdateAPIToken) GetAuthType() SourceOktaUpdateSchemasAuthType {
+ return SourceOktaUpdateSchemasAuthTypeAPIToken
+}
+
+type SourceOktaUpdateAuthType string
const (
- SourceOktaUpdateAuthorizationMethodOAuth20AuthTypeOauth20 SourceOktaUpdateAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceOktaUpdateAuthTypeOauth20 SourceOktaUpdateAuthType = "oauth2.0"
)
-func (e SourceOktaUpdateAuthorizationMethodOAuth20AuthType) ToPointer() *SourceOktaUpdateAuthorizationMethodOAuth20AuthType {
+func (e SourceOktaUpdateAuthType) ToPointer() *SourceOktaUpdateAuthType {
return &e
}
-func (e *SourceOktaUpdateAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceOktaUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceOktaUpdateAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceOktaUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOktaUpdateAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceOktaUpdateAuthType: %v", v)
}
}
-type SourceOktaUpdateAuthorizationMethodOAuth20 struct {
- AuthType SourceOktaUpdateAuthorizationMethodOAuth20AuthType `json:"auth_type"`
+type SourceOktaUpdateOAuth20 struct {
+ authType SourceOktaUpdateAuthType `const:"oauth2.0" json:"auth_type"`
// The Client ID of your OAuth application.
ClientID string `json:"client_id"`
// The Client Secret of your OAuth application.
@@ -73,56 +95,87 @@ type SourceOktaUpdateAuthorizationMethodOAuth20 struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceOktaUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOktaUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOktaUpdateOAuth20) GetAuthType() SourceOktaUpdateAuthType {
+ return SourceOktaUpdateAuthTypeOauth20
+}
+
+func (o *SourceOktaUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceOktaUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceOktaUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceOktaUpdateAuthorizationMethodType string
const (
- SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodOAuth20 SourceOktaUpdateAuthorizationMethodType = "source-okta-update_Authorization Method_OAuth2.0"
- SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodAPIToken SourceOktaUpdateAuthorizationMethodType = "source-okta-update_Authorization Method_API Token"
+ SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateOAuth20 SourceOktaUpdateAuthorizationMethodType = "source-okta-update_OAuth2.0"
+ SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAPIToken SourceOktaUpdateAuthorizationMethodType = "source-okta-update_API Token"
)
type SourceOktaUpdateAuthorizationMethod struct {
- SourceOktaUpdateAuthorizationMethodOAuth20 *SourceOktaUpdateAuthorizationMethodOAuth20
- SourceOktaUpdateAuthorizationMethodAPIToken *SourceOktaUpdateAuthorizationMethodAPIToken
+ SourceOktaUpdateOAuth20 *SourceOktaUpdateOAuth20
+ SourceOktaUpdateAPIToken *SourceOktaUpdateAPIToken
Type SourceOktaUpdateAuthorizationMethodType
}
-func CreateSourceOktaUpdateAuthorizationMethodSourceOktaUpdateAuthorizationMethodOAuth20(sourceOktaUpdateAuthorizationMethodOAuth20 SourceOktaUpdateAuthorizationMethodOAuth20) SourceOktaUpdateAuthorizationMethod {
- typ := SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodOAuth20
+func CreateSourceOktaUpdateAuthorizationMethodSourceOktaUpdateOAuth20(sourceOktaUpdateOAuth20 SourceOktaUpdateOAuth20) SourceOktaUpdateAuthorizationMethod {
+ typ := SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateOAuth20
return SourceOktaUpdateAuthorizationMethod{
- SourceOktaUpdateAuthorizationMethodOAuth20: &sourceOktaUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceOktaUpdateOAuth20: &sourceOktaUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceOktaUpdateAuthorizationMethodSourceOktaUpdateAuthorizationMethodAPIToken(sourceOktaUpdateAuthorizationMethodAPIToken SourceOktaUpdateAuthorizationMethodAPIToken) SourceOktaUpdateAuthorizationMethod {
- typ := SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodAPIToken
+func CreateSourceOktaUpdateAuthorizationMethodSourceOktaUpdateAPIToken(sourceOktaUpdateAPIToken SourceOktaUpdateAPIToken) SourceOktaUpdateAuthorizationMethod {
+ typ := SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAPIToken
return SourceOktaUpdateAuthorizationMethod{
- SourceOktaUpdateAuthorizationMethodAPIToken: &sourceOktaUpdateAuthorizationMethodAPIToken,
- Type: typ,
+ SourceOktaUpdateAPIToken: &sourceOktaUpdateAPIToken,
+ Type: typ,
}
}
func (u *SourceOktaUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceOktaUpdateAuthorizationMethodAPIToken := new(SourceOktaUpdateAuthorizationMethodAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOktaUpdateAuthorizationMethodAPIToken); err == nil {
- u.SourceOktaUpdateAuthorizationMethodAPIToken = sourceOktaUpdateAuthorizationMethodAPIToken
- u.Type = SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodAPIToken
+
+ sourceOktaUpdateAPIToken := new(SourceOktaUpdateAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceOktaUpdateAPIToken, "", true, true); err == nil {
+ u.SourceOktaUpdateAPIToken = sourceOktaUpdateAPIToken
+ u.Type = SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAPIToken
return nil
}
- sourceOktaUpdateAuthorizationMethodOAuth20 := new(SourceOktaUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOktaUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceOktaUpdateAuthorizationMethodOAuth20 = sourceOktaUpdateAuthorizationMethodOAuth20
- u.Type = SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodOAuth20
+ sourceOktaUpdateOAuth20 := new(SourceOktaUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceOktaUpdateOAuth20, "", true, true); err == nil {
+ u.SourceOktaUpdateOAuth20 = sourceOktaUpdateOAuth20
+ u.Type = SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateOAuth20
return nil
}
@@ -130,15 +183,15 @@ func (u *SourceOktaUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceOktaUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceOktaUpdateAuthorizationMethodAPIToken != nil {
- return json.Marshal(u.SourceOktaUpdateAuthorizationMethodAPIToken)
+ if u.SourceOktaUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceOktaUpdateOAuth20, "", true)
}
- if u.SourceOktaUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceOktaUpdateAuthorizationMethodOAuth20)
+ if u.SourceOktaUpdateAPIToken != nil {
+ return utils.MarshalJSON(u.SourceOktaUpdateAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceOktaUpdate struct {
@@ -148,3 +201,24 @@ type SourceOktaUpdate struct {
// UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any data before this date will not be replicated.
StartDate *string `json:"start_date,omitempty"`
}
+
+func (o *SourceOktaUpdate) GetCredentials() *SourceOktaUpdateAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceOktaUpdate) GetDomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Domain
+}
+
+func (o *SourceOktaUpdate) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceomnisend.go b/internal/sdk/pkg/models/shared/sourceomnisend.go
old mode 100755
new mode 100644
index 6dce5e8ac..828244a75
--- a/internal/sdk/pkg/models/shared/sourceomnisend.go
+++ b/internal/sdk/pkg/models/shared/sourceomnisend.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOmnisendOmnisend string
+type Omnisend string
const (
- SourceOmnisendOmnisendOmnisend SourceOmnisendOmnisend = "omnisend"
+ OmnisendOmnisend Omnisend = "omnisend"
)
-func (e SourceOmnisendOmnisend) ToPointer() *SourceOmnisendOmnisend {
+func (e Omnisend) ToPointer() *Omnisend {
return &e
}
-func (e *SourceOmnisendOmnisend) UnmarshalJSON(data []byte) error {
+func (e *Omnisend) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "omnisend":
- *e = SourceOmnisendOmnisend(v)
+ *e = Omnisend(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOmnisendOmnisend: %v", v)
+ return fmt.Errorf("invalid value for Omnisend: %v", v)
}
}
type SourceOmnisend struct {
// API Key
- APIKey string `json:"api_key"`
- SourceType SourceOmnisendOmnisend `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Omnisend `const:"omnisend" json:"sourceType"`
+}
+
+func (s SourceOmnisend) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOmnisend) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOmnisend) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceOmnisend) GetSourceType() Omnisend {
+ return OmnisendOmnisend
}
diff --git a/internal/sdk/pkg/models/shared/sourceomnisendcreaterequest.go b/internal/sdk/pkg/models/shared/sourceomnisendcreaterequest.go
old mode 100755
new mode 100644
index 18bfedad8..80c982cdf
--- a/internal/sdk/pkg/models/shared/sourceomnisendcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceomnisendcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceOmnisendCreateRequest struct {
Configuration SourceOmnisend `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOmnisendCreateRequest) GetConfiguration() SourceOmnisend {
+ if o == nil {
+ return SourceOmnisend{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOmnisendCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceOmnisendCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOmnisendCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceOmnisendCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceomnisendputrequest.go b/internal/sdk/pkg/models/shared/sourceomnisendputrequest.go
old mode 100755
new mode 100644
index 032a3069d..dce42d400
--- a/internal/sdk/pkg/models/shared/sourceomnisendputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceomnisendputrequest.go
@@ -7,3 +7,24 @@ type SourceOmnisendPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOmnisendPutRequest) GetConfiguration() SourceOmnisendUpdate {
+ if o == nil {
+ return SourceOmnisendUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOmnisendPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOmnisendPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceomnisendupdate.go b/internal/sdk/pkg/models/shared/sourceomnisendupdate.go
old mode 100755
new mode 100644
index 3bdd56d80..edfdb3e37
--- a/internal/sdk/pkg/models/shared/sourceomnisendupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceomnisendupdate.go
@@ -6,3 +6,10 @@ type SourceOmnisendUpdate struct {
// API Key
APIKey string `json:"api_key"`
}
+
+func (o *SourceOmnisendUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourceonesignal.go b/internal/sdk/pkg/models/shared/sourceonesignal.go
old mode 100755
new mode 100644
index 9dbb2d768..3b7c5a3aa
--- a/internal/sdk/pkg/models/shared/sourceonesignal.go
+++ b/internal/sdk/pkg/models/shared/sourceonesignal.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -14,27 +15,48 @@ type SourceOnesignalApplications struct {
AppName *string `json:"app_name,omitempty"`
}
-type SourceOnesignalOnesignal string
+func (o *SourceOnesignalApplications) GetAppAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AppAPIKey
+}
+
+func (o *SourceOnesignalApplications) GetAppID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AppID
+}
+
+func (o *SourceOnesignalApplications) GetAppName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AppName
+}
+
+type Onesignal string
const (
- SourceOnesignalOnesignalOnesignal SourceOnesignalOnesignal = "onesignal"
+ OnesignalOnesignal Onesignal = "onesignal"
)
-func (e SourceOnesignalOnesignal) ToPointer() *SourceOnesignalOnesignal {
+func (e Onesignal) ToPointer() *Onesignal {
return &e
}
-func (e *SourceOnesignalOnesignal) UnmarshalJSON(data []byte) error {
+func (e *Onesignal) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "onesignal":
- *e = SourceOnesignalOnesignal(v)
+ *e = Onesignal(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOnesignalOnesignal: %v", v)
+ return fmt.Errorf("invalid value for Onesignal: %v", v)
}
}
@@ -42,10 +64,53 @@ type SourceOnesignal struct {
// Applications keys, see the docs for more information on how to obtain this data
Applications []SourceOnesignalApplications `json:"applications"`
// Comma-separated list of names and the value (sum/count) for the returned outcome data. See the docs for more details
- OutcomeNames string `json:"outcome_names"`
- SourceType SourceOnesignalOnesignal `json:"sourceType"`
+ OutcomeNames string `json:"outcome_names"`
+ sourceType Onesignal `const:"onesignal" json:"sourceType"`
// The date from which you'd like to replicate data for OneSignal API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate time.Time `json:"start_date"`
// OneSignal User Auth Key, see the docs for more information on how to obtain this key.
UserAuthKey string `json:"user_auth_key"`
}
+
+func (s SourceOnesignal) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOnesignal) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOnesignal) GetApplications() []SourceOnesignalApplications {
+ if o == nil {
+ return []SourceOnesignalApplications{}
+ }
+ return o.Applications
+}
+
+func (o *SourceOnesignal) GetOutcomeNames() string {
+ if o == nil {
+ return ""
+ }
+ return o.OutcomeNames
+}
+
+func (o *SourceOnesignal) GetSourceType() Onesignal {
+ return OnesignalOnesignal
+}
+
+func (o *SourceOnesignal) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceOnesignal) GetUserAuthKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.UserAuthKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourceonesignalcreaterequest.go b/internal/sdk/pkg/models/shared/sourceonesignalcreaterequest.go
old mode 100755
new mode 100644
index dbf890dd0..67ec545e8
--- a/internal/sdk/pkg/models/shared/sourceonesignalcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceonesignalcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceOnesignalCreateRequest struct {
Configuration SourceOnesignal `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOnesignalCreateRequest) GetConfiguration() SourceOnesignal {
+ if o == nil {
+ return SourceOnesignal{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOnesignalCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceOnesignalCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOnesignalCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceOnesignalCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceonesignalputrequest.go b/internal/sdk/pkg/models/shared/sourceonesignalputrequest.go
old mode 100755
new mode 100644
index c389af7e5..764ed3b57
--- a/internal/sdk/pkg/models/shared/sourceonesignalputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceonesignalputrequest.go
@@ -7,3 +7,24 @@ type SourceOnesignalPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOnesignalPutRequest) GetConfiguration() SourceOnesignalUpdate {
+ if o == nil {
+ return SourceOnesignalUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOnesignalPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOnesignalPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceonesignalupdate.go b/internal/sdk/pkg/models/shared/sourceonesignalupdate.go
old mode 100755
new mode 100644
index f5cc326da..3b81eee52
--- a/internal/sdk/pkg/models/shared/sourceonesignalupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceonesignalupdate.go
@@ -3,18 +3,40 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceOnesignalUpdateApplications struct {
+type Applications struct {
AppAPIKey string `json:"app_api_key"`
AppID string `json:"app_id"`
AppName *string `json:"app_name,omitempty"`
}
+func (o *Applications) GetAppAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AppAPIKey
+}
+
+func (o *Applications) GetAppID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AppID
+}
+
+func (o *Applications) GetAppName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AppName
+}
+
type SourceOnesignalUpdate struct {
// Applications keys, see the docs for more information on how to obtain this data
- Applications []SourceOnesignalUpdateApplications `json:"applications"`
+ Applications []Applications `json:"applications"`
// Comma-separated list of names and the value (sum/count) for the returned outcome data. See the docs for more details
OutcomeNames string `json:"outcome_names"`
// The date from which you'd like to replicate data for OneSignal API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
@@ -22,3 +44,42 @@ type SourceOnesignalUpdate struct {
// OneSignal User Auth Key, see the docs for more information on how to obtain this key.
UserAuthKey string `json:"user_auth_key"`
}
+
+func (s SourceOnesignalUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOnesignalUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOnesignalUpdate) GetApplications() []Applications {
+ if o == nil {
+ return []Applications{}
+ }
+ return o.Applications
+}
+
+func (o *SourceOnesignalUpdate) GetOutcomeNames() string {
+ if o == nil {
+ return ""
+ }
+ return o.OutcomeNames
+}
+
+func (o *SourceOnesignalUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceOnesignalUpdate) GetUserAuthKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.UserAuthKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoracle.go b/internal/sdk/pkg/models/shared/sourceoracle.go
old mode 100755
new mode 100644
index 05988f067..944122851
--- a/internal/sdk/pkg/models/shared/sourceoracle.go
+++ b/internal/sdk/pkg/models/shared/sourceoracle.go
@@ -3,122 +3,161 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOracleConnectBySystemIDSIDConnectionType string
+type SourceOracleSchemasConnectionType string
const (
- SourceOracleConnectBySystemIDSIDConnectionTypeSid SourceOracleConnectBySystemIDSIDConnectionType = "sid"
+ SourceOracleSchemasConnectionTypeSid SourceOracleSchemasConnectionType = "sid"
)
-func (e SourceOracleConnectBySystemIDSIDConnectionType) ToPointer() *SourceOracleConnectBySystemIDSIDConnectionType {
+func (e SourceOracleSchemasConnectionType) ToPointer() *SourceOracleSchemasConnectionType {
return &e
}
-func (e *SourceOracleConnectBySystemIDSIDConnectionType) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleSchemasConnectionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sid":
- *e = SourceOracleConnectBySystemIDSIDConnectionType(v)
+ *e = SourceOracleSchemasConnectionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleConnectBySystemIDSIDConnectionType: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleSchemasConnectionType: %v", v)
}
}
-// SourceOracleConnectBySystemIDSID - Use SID (Oracle System Identifier)
-type SourceOracleConnectBySystemIDSID struct {
- ConnectionType *SourceOracleConnectBySystemIDSIDConnectionType `json:"connection_type,omitempty"`
- Sid string `json:"sid"`
+// SourceOracleSystemIDSID - Use SID (Oracle System Identifier)
+type SourceOracleSystemIDSID struct {
+ connectionType *SourceOracleSchemasConnectionType `const:"sid" json:"connection_type"`
+ Sid string `json:"sid"`
}
-type SourceOracleConnectByServiceNameConnectionType string
+func (s SourceOracleSystemIDSID) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleSystemIDSID) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracleSystemIDSID) GetConnectionType() *SourceOracleSchemasConnectionType {
+ return SourceOracleSchemasConnectionTypeSid.ToPointer()
+}
+
+func (o *SourceOracleSystemIDSID) GetSid() string {
+ if o == nil {
+ return ""
+ }
+ return o.Sid
+}
+
+type SourceOracleConnectionType string
const (
- SourceOracleConnectByServiceNameConnectionTypeServiceName SourceOracleConnectByServiceNameConnectionType = "service_name"
+ SourceOracleConnectionTypeServiceName SourceOracleConnectionType = "service_name"
)
-func (e SourceOracleConnectByServiceNameConnectionType) ToPointer() *SourceOracleConnectByServiceNameConnectionType {
+func (e SourceOracleConnectionType) ToPointer() *SourceOracleConnectionType {
return &e
}
-func (e *SourceOracleConnectByServiceNameConnectionType) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleConnectionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "service_name":
- *e = SourceOracleConnectByServiceNameConnectionType(v)
+ *e = SourceOracleConnectionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleConnectByServiceNameConnectionType: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleConnectionType: %v", v)
+ }
+}
+
+// SourceOracleServiceName - Use service name
+type SourceOracleServiceName struct {
+ connectionType *SourceOracleConnectionType `const:"service_name" json:"connection_type"`
+ ServiceName string `json:"service_name"`
+}
+
+func (s SourceOracleServiceName) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleServiceName) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceOracleConnectByServiceName - Use service name
-type SourceOracleConnectByServiceName struct {
- ConnectionType *SourceOracleConnectByServiceNameConnectionType `json:"connection_type,omitempty"`
- ServiceName string `json:"service_name"`
+func (o *SourceOracleServiceName) GetConnectionType() *SourceOracleConnectionType {
+ return SourceOracleConnectionTypeServiceName.ToPointer()
+}
+
+func (o *SourceOracleServiceName) GetServiceName() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceName
}
type SourceOracleConnectByType string
const (
- SourceOracleConnectByTypeSourceOracleConnectByServiceName SourceOracleConnectByType = "source-oracle_Connect by_Service name"
- SourceOracleConnectByTypeSourceOracleConnectBySystemIDSID SourceOracleConnectByType = "source-oracle_Connect by_System ID (SID)"
+ SourceOracleConnectByTypeSourceOracleServiceName SourceOracleConnectByType = "source-oracle_Service name"
+ SourceOracleConnectByTypeSourceOracleSystemIDSID SourceOracleConnectByType = "source-oracle_System ID (SID)"
)
type SourceOracleConnectBy struct {
- SourceOracleConnectByServiceName *SourceOracleConnectByServiceName
- SourceOracleConnectBySystemIDSID *SourceOracleConnectBySystemIDSID
+ SourceOracleServiceName *SourceOracleServiceName
+ SourceOracleSystemIDSID *SourceOracleSystemIDSID
Type SourceOracleConnectByType
}
-func CreateSourceOracleConnectBySourceOracleConnectByServiceName(sourceOracleConnectByServiceName SourceOracleConnectByServiceName) SourceOracleConnectBy {
- typ := SourceOracleConnectByTypeSourceOracleConnectByServiceName
+func CreateSourceOracleConnectBySourceOracleServiceName(sourceOracleServiceName SourceOracleServiceName) SourceOracleConnectBy {
+ typ := SourceOracleConnectByTypeSourceOracleServiceName
return SourceOracleConnectBy{
- SourceOracleConnectByServiceName: &sourceOracleConnectByServiceName,
- Type: typ,
+ SourceOracleServiceName: &sourceOracleServiceName,
+ Type: typ,
}
}
-func CreateSourceOracleConnectBySourceOracleConnectBySystemIDSID(sourceOracleConnectBySystemIDSID SourceOracleConnectBySystemIDSID) SourceOracleConnectBy {
- typ := SourceOracleConnectByTypeSourceOracleConnectBySystemIDSID
+func CreateSourceOracleConnectBySourceOracleSystemIDSID(sourceOracleSystemIDSID SourceOracleSystemIDSID) SourceOracleConnectBy {
+ typ := SourceOracleConnectByTypeSourceOracleSystemIDSID
return SourceOracleConnectBy{
- SourceOracleConnectBySystemIDSID: &sourceOracleConnectBySystemIDSID,
- Type: typ,
+ SourceOracleSystemIDSID: &sourceOracleSystemIDSID,
+ Type: typ,
}
}
func (u *SourceOracleConnectBy) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceOracleConnectByServiceName := new(SourceOracleConnectByServiceName)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleConnectByServiceName); err == nil {
- u.SourceOracleConnectByServiceName = sourceOracleConnectByServiceName
- u.Type = SourceOracleConnectByTypeSourceOracleConnectByServiceName
+
+ sourceOracleServiceName := new(SourceOracleServiceName)
+ if err := utils.UnmarshalJSON(data, &sourceOracleServiceName, "", true, true); err == nil {
+ u.SourceOracleServiceName = sourceOracleServiceName
+ u.Type = SourceOracleConnectByTypeSourceOracleServiceName
return nil
}
- sourceOracleConnectBySystemIDSID := new(SourceOracleConnectBySystemIDSID)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleConnectBySystemIDSID); err == nil {
- u.SourceOracleConnectBySystemIDSID = sourceOracleConnectBySystemIDSID
- u.Type = SourceOracleConnectByTypeSourceOracleConnectBySystemIDSID
+ sourceOracleSystemIDSID := new(SourceOracleSystemIDSID)
+ if err := utils.UnmarshalJSON(data, &sourceOracleSystemIDSID, "", true, true); err == nil {
+ u.SourceOracleSystemIDSID = sourceOracleSystemIDSID
+ u.Type = SourceOracleConnectByTypeSourceOracleSystemIDSID
return nil
}
@@ -126,62 +165,84 @@ func (u *SourceOracleConnectBy) UnmarshalJSON(data []byte) error {
}
func (u SourceOracleConnectBy) MarshalJSON() ([]byte, error) {
- if u.SourceOracleConnectByServiceName != nil {
- return json.Marshal(u.SourceOracleConnectByServiceName)
+ if u.SourceOracleServiceName != nil {
+ return utils.MarshalJSON(u.SourceOracleServiceName, "", true)
}
- if u.SourceOracleConnectBySystemIDSID != nil {
- return json.Marshal(u.SourceOracleConnectBySystemIDSID)
+ if u.SourceOracleSystemIDSID != nil {
+ return utils.MarshalJSON(u.SourceOracleSystemIDSID, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod string
+type SourceOracleSchemasEncryptionMethod string
const (
- SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethodEncryptedVerifyCertificate SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod = "encrypted_verify_certificate"
+ SourceOracleSchemasEncryptionMethodEncryptedVerifyCertificate SourceOracleSchemasEncryptionMethod = "encrypted_verify_certificate"
)
-func (e SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod) ToPointer() *SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod {
+func (e SourceOracleSchemasEncryptionMethod) ToPointer() *SourceOracleSchemasEncryptionMethod {
return &e
}
-func (e *SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleSchemasEncryptionMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_verify_certificate":
- *e = SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod(v)
+ *e = SourceOracleSchemasEncryptionMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleSchemasEncryptionMethod: %v", v)
}
}
-// SourceOracleEncryptionTLSEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
-type SourceOracleEncryptionTLSEncryptedVerifyCertificate struct {
- EncryptionMethod SourceOracleEncryptionTLSEncryptedVerifyCertificateEncryptionMethod `json:"encryption_method"`
+// SourceOracleTLSEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
+type SourceOracleTLSEncryptedVerifyCertificate struct {
+ encryptionMethod *SourceOracleSchemasEncryptionMethod `const:"encrypted_verify_certificate" json:"encryption_method"`
// Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.
SslCertificate string `json:"ssl_certificate"`
}
-// SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm - This parameter defines what encryption algorithm is used.
-type SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm string
+func (s SourceOracleTLSEncryptedVerifyCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleTLSEncryptedVerifyCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracleTLSEncryptedVerifyCertificate) GetEncryptionMethod() *SourceOracleSchemasEncryptionMethod {
+ return SourceOracleSchemasEncryptionMethodEncryptedVerifyCertificate.ToPointer()
+}
+
+func (o *SourceOracleTLSEncryptedVerifyCertificate) GetSslCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.SslCertificate
+}
+
+// SourceOracleEncryptionAlgorithm - This parameter defines what encryption algorithm is used.
+type SourceOracleEncryptionAlgorithm string
const (
- SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithmAes256 SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm = "AES256"
- SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithmRc456 SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm = "RC4_56"
- SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithmThreeDes168 SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm = "3DES168"
+ SourceOracleEncryptionAlgorithmAes256 SourceOracleEncryptionAlgorithm = "AES256"
+ SourceOracleEncryptionAlgorithmRc456 SourceOracleEncryptionAlgorithm = "RC4_56"
+ SourceOracleEncryptionAlgorithmThreeDes168 SourceOracleEncryptionAlgorithm = "3DES168"
)
-func (e SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm) ToPointer() *SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm {
+func (e SourceOracleEncryptionAlgorithm) ToPointer() *SourceOracleEncryptionAlgorithm {
return &e
}
-func (e *SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleEncryptionAlgorithm) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -192,94 +253,111 @@ func (e *SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm) Un
case "RC4_56":
fallthrough
case "3DES168":
- *e = SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm(v)
+ *e = SourceOracleEncryptionAlgorithm(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleEncryptionAlgorithm: %v", v)
}
}
-type SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod string
+type SourceOracleEncryptionMethod string
const (
- SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethodClientNne SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod = "client_nne"
+ SourceOracleEncryptionMethodClientNne SourceOracleEncryptionMethod = "client_nne"
)
-func (e SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod) ToPointer() *SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod {
+func (e SourceOracleEncryptionMethod) ToPointer() *SourceOracleEncryptionMethod {
return &e
}
-func (e *SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleEncryptionMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "client_nne":
- *e = SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod(v)
+ *e = SourceOracleEncryptionMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleEncryptionMethod: %v", v)
}
}
-// SourceOracleEncryptionNativeNetworkEncryptionNNE - The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.
-type SourceOracleEncryptionNativeNetworkEncryptionNNE struct {
+// SourceOracleNativeNetworkEncryptionNNE - The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.
+type SourceOracleNativeNetworkEncryptionNNE struct {
// This parameter defines what encryption algorithm is used.
- EncryptionAlgorithm *SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm `json:"encryption_algorithm,omitempty"`
- EncryptionMethod SourceOracleEncryptionNativeNetworkEncryptionNNEEncryptionMethod `json:"encryption_method"`
+ EncryptionAlgorithm *SourceOracleEncryptionAlgorithm `default:"AES256" json:"encryption_algorithm"`
+ encryptionMethod *SourceOracleEncryptionMethod `const:"client_nne" json:"encryption_method"`
+}
+
+func (s SourceOracleNativeNetworkEncryptionNNE) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleNativeNetworkEncryptionNNE) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracleNativeNetworkEncryptionNNE) GetEncryptionAlgorithm() *SourceOracleEncryptionAlgorithm {
+ if o == nil {
+ return nil
+ }
+ return o.EncryptionAlgorithm
+}
+
+func (o *SourceOracleNativeNetworkEncryptionNNE) GetEncryptionMethod() *SourceOracleEncryptionMethod {
+ return SourceOracleEncryptionMethodClientNne.ToPointer()
}
type SourceOracleEncryptionType string
const (
- SourceOracleEncryptionTypeSourceOracleEncryptionNativeNetworkEncryptionNNE SourceOracleEncryptionType = "source-oracle_Encryption_Native Network Encryption (NNE)"
- SourceOracleEncryptionTypeSourceOracleEncryptionTLSEncryptedVerifyCertificate SourceOracleEncryptionType = "source-oracle_Encryption_TLS Encrypted (verify certificate)"
+ SourceOracleEncryptionTypeSourceOracleNativeNetworkEncryptionNNE SourceOracleEncryptionType = "source-oracle_Native Network Encryption (NNE)"
+ SourceOracleEncryptionTypeSourceOracleTLSEncryptedVerifyCertificate SourceOracleEncryptionType = "source-oracle_TLS Encrypted (verify certificate)"
)
type SourceOracleEncryption struct {
- SourceOracleEncryptionNativeNetworkEncryptionNNE *SourceOracleEncryptionNativeNetworkEncryptionNNE
- SourceOracleEncryptionTLSEncryptedVerifyCertificate *SourceOracleEncryptionTLSEncryptedVerifyCertificate
+ SourceOracleNativeNetworkEncryptionNNE *SourceOracleNativeNetworkEncryptionNNE
+ SourceOracleTLSEncryptedVerifyCertificate *SourceOracleTLSEncryptedVerifyCertificate
Type SourceOracleEncryptionType
}
-func CreateSourceOracleEncryptionSourceOracleEncryptionNativeNetworkEncryptionNNE(sourceOracleEncryptionNativeNetworkEncryptionNNE SourceOracleEncryptionNativeNetworkEncryptionNNE) SourceOracleEncryption {
- typ := SourceOracleEncryptionTypeSourceOracleEncryptionNativeNetworkEncryptionNNE
+func CreateSourceOracleEncryptionSourceOracleNativeNetworkEncryptionNNE(sourceOracleNativeNetworkEncryptionNNE SourceOracleNativeNetworkEncryptionNNE) SourceOracleEncryption {
+ typ := SourceOracleEncryptionTypeSourceOracleNativeNetworkEncryptionNNE
return SourceOracleEncryption{
- SourceOracleEncryptionNativeNetworkEncryptionNNE: &sourceOracleEncryptionNativeNetworkEncryptionNNE,
- Type: typ,
+ SourceOracleNativeNetworkEncryptionNNE: &sourceOracleNativeNetworkEncryptionNNE,
+ Type: typ,
}
}
-func CreateSourceOracleEncryptionSourceOracleEncryptionTLSEncryptedVerifyCertificate(sourceOracleEncryptionTLSEncryptedVerifyCertificate SourceOracleEncryptionTLSEncryptedVerifyCertificate) SourceOracleEncryption {
- typ := SourceOracleEncryptionTypeSourceOracleEncryptionTLSEncryptedVerifyCertificate
+func CreateSourceOracleEncryptionSourceOracleTLSEncryptedVerifyCertificate(sourceOracleTLSEncryptedVerifyCertificate SourceOracleTLSEncryptedVerifyCertificate) SourceOracleEncryption {
+ typ := SourceOracleEncryptionTypeSourceOracleTLSEncryptedVerifyCertificate
return SourceOracleEncryption{
- SourceOracleEncryptionTLSEncryptedVerifyCertificate: &sourceOracleEncryptionTLSEncryptedVerifyCertificate,
+ SourceOracleTLSEncryptedVerifyCertificate: &sourceOracleTLSEncryptedVerifyCertificate,
Type: typ,
}
}
func (u *SourceOracleEncryption) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceOracleEncryptionNativeNetworkEncryptionNNE := new(SourceOracleEncryptionNativeNetworkEncryptionNNE)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleEncryptionNativeNetworkEncryptionNNE); err == nil {
- u.SourceOracleEncryptionNativeNetworkEncryptionNNE = sourceOracleEncryptionNativeNetworkEncryptionNNE
- u.Type = SourceOracleEncryptionTypeSourceOracleEncryptionNativeNetworkEncryptionNNE
+
+ sourceOracleNativeNetworkEncryptionNNE := new(SourceOracleNativeNetworkEncryptionNNE)
+ if err := utils.UnmarshalJSON(data, &sourceOracleNativeNetworkEncryptionNNE, "", true, true); err == nil {
+ u.SourceOracleNativeNetworkEncryptionNNE = sourceOracleNativeNetworkEncryptionNNE
+ u.Type = SourceOracleEncryptionTypeSourceOracleNativeNetworkEncryptionNNE
return nil
}
- sourceOracleEncryptionTLSEncryptedVerifyCertificate := new(SourceOracleEncryptionTLSEncryptedVerifyCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleEncryptionTLSEncryptedVerifyCertificate); err == nil {
- u.SourceOracleEncryptionTLSEncryptedVerifyCertificate = sourceOracleEncryptionTLSEncryptedVerifyCertificate
- u.Type = SourceOracleEncryptionTypeSourceOracleEncryptionTLSEncryptedVerifyCertificate
+ sourceOracleTLSEncryptedVerifyCertificate := new(SourceOracleTLSEncryptedVerifyCertificate)
+ if err := utils.UnmarshalJSON(data, &sourceOracleTLSEncryptedVerifyCertificate, "", true, true); err == nil {
+ u.SourceOracleTLSEncryptedVerifyCertificate = sourceOracleTLSEncryptedVerifyCertificate
+ u.Type = SourceOracleEncryptionTypeSourceOracleTLSEncryptedVerifyCertificate
return nil
}
@@ -287,15 +365,15 @@ func (u *SourceOracleEncryption) UnmarshalJSON(data []byte) error {
}
func (u SourceOracleEncryption) MarshalJSON() ([]byte, error) {
- if u.SourceOracleEncryptionNativeNetworkEncryptionNNE != nil {
- return json.Marshal(u.SourceOracleEncryptionNativeNetworkEncryptionNNE)
+ if u.SourceOracleNativeNetworkEncryptionNNE != nil {
+ return utils.MarshalJSON(u.SourceOracleNativeNetworkEncryptionNNE, "", true)
}
- if u.SourceOracleEncryptionTLSEncryptedVerifyCertificate != nil {
- return json.Marshal(u.SourceOracleEncryptionTLSEncryptedVerifyCertificate)
+ if u.SourceOracleTLSEncryptedVerifyCertificate != nil {
+ return utils.MarshalJSON(u.SourceOracleTLSEncryptedVerifyCertificate, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceOracleOracle string
@@ -322,185 +400,279 @@ func (e *SourceOracleOracle) UnmarshalJSON(data []byte) error {
}
}
-// SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceOracleSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceOracleSchemasTunnelMethodTunnelMethod string
const (
- SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceOracleSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceOracleSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceOracleSchemasTunnelMethodTunnelMethod) ToPointer() *SourceOracleSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceOracleSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceOracleSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceOracleSSHTunnelMethodPasswordAuthentication struct {
+// SourceOraclePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceOraclePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceOracleSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceOracleSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceOraclePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOraclePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOraclePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceOraclePasswordAuthentication) GetTunnelMethod() SourceOracleSchemasTunnelMethodTunnelMethod {
+ return SourceOracleSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceOraclePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceOraclePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceOraclePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceOracleSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceOracleSchemasTunnelMethod string
const (
- SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceOracleSchemasTunnelMethodSSHKeyAuth SourceOracleSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceOracleSchemasTunnelMethod) ToPointer() *SourceOracleSchemasTunnelMethod {
return &e
}
-func (e *SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceOracleSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleSchemasTunnelMethod: %v", v)
}
}
-// SourceOracleSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceOracleSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceOracleSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceOracleSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceOracleSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceOracleSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceOracleSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceOracleSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceOracleSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracleSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceOracleSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceOracleSSHKeyAuthentication) GetTunnelMethod() SourceOracleSchemasTunnelMethod {
+ return SourceOracleSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceOracleSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceOracleSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceOracleTunnelMethod - No ssh tunnel needed to connect to database
+type SourceOracleTunnelMethod string
const (
- SourceOracleSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceOracleSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceOracleTunnelMethodNoTunnel SourceOracleTunnelMethod = "NO_TUNNEL"
)
-func (e SourceOracleSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceOracleSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceOracleTunnelMethod) ToPointer() *SourceOracleTunnelMethod {
return &e
}
-func (e *SourceOracleSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceOracleSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceOracleTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleTunnelMethod: %v", v)
}
}
-// SourceOracleSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceOracleSSHTunnelMethodNoTunnel struct {
+// SourceOracleNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceOracleNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceOracleSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceOracleTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceOracleNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracleNoTunnel) GetTunnelMethod() SourceOracleTunnelMethod {
+ return SourceOracleTunnelMethodNoTunnel
}
type SourceOracleSSHTunnelMethodType string
const (
- SourceOracleSSHTunnelMethodTypeSourceOracleSSHTunnelMethodNoTunnel SourceOracleSSHTunnelMethodType = "source-oracle_SSH Tunnel Method_No Tunnel"
- SourceOracleSSHTunnelMethodTypeSourceOracleSSHTunnelMethodSSHKeyAuthentication SourceOracleSSHTunnelMethodType = "source-oracle_SSH Tunnel Method_SSH Key Authentication"
- SourceOracleSSHTunnelMethodTypeSourceOracleSSHTunnelMethodPasswordAuthentication SourceOracleSSHTunnelMethodType = "source-oracle_SSH Tunnel Method_Password Authentication"
+ SourceOracleSSHTunnelMethodTypeSourceOracleNoTunnel SourceOracleSSHTunnelMethodType = "source-oracle_No Tunnel"
+ SourceOracleSSHTunnelMethodTypeSourceOracleSSHKeyAuthentication SourceOracleSSHTunnelMethodType = "source-oracle_SSH Key Authentication"
+ SourceOracleSSHTunnelMethodTypeSourceOraclePasswordAuthentication SourceOracleSSHTunnelMethodType = "source-oracle_Password Authentication"
)
type SourceOracleSSHTunnelMethod struct {
- SourceOracleSSHTunnelMethodNoTunnel *SourceOracleSSHTunnelMethodNoTunnel
- SourceOracleSSHTunnelMethodSSHKeyAuthentication *SourceOracleSSHTunnelMethodSSHKeyAuthentication
- SourceOracleSSHTunnelMethodPasswordAuthentication *SourceOracleSSHTunnelMethodPasswordAuthentication
+ SourceOracleNoTunnel *SourceOracleNoTunnel
+ SourceOracleSSHKeyAuthentication *SourceOracleSSHKeyAuthentication
+ SourceOraclePasswordAuthentication *SourceOraclePasswordAuthentication
Type SourceOracleSSHTunnelMethodType
}
-func CreateSourceOracleSSHTunnelMethodSourceOracleSSHTunnelMethodNoTunnel(sourceOracleSSHTunnelMethodNoTunnel SourceOracleSSHTunnelMethodNoTunnel) SourceOracleSSHTunnelMethod {
- typ := SourceOracleSSHTunnelMethodTypeSourceOracleSSHTunnelMethodNoTunnel
+func CreateSourceOracleSSHTunnelMethodSourceOracleNoTunnel(sourceOracleNoTunnel SourceOracleNoTunnel) SourceOracleSSHTunnelMethod {
+ typ := SourceOracleSSHTunnelMethodTypeSourceOracleNoTunnel
return SourceOracleSSHTunnelMethod{
- SourceOracleSSHTunnelMethodNoTunnel: &sourceOracleSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceOracleNoTunnel: &sourceOracleNoTunnel,
+ Type: typ,
}
}
-func CreateSourceOracleSSHTunnelMethodSourceOracleSSHTunnelMethodSSHKeyAuthentication(sourceOracleSSHTunnelMethodSSHKeyAuthentication SourceOracleSSHTunnelMethodSSHKeyAuthentication) SourceOracleSSHTunnelMethod {
- typ := SourceOracleSSHTunnelMethodTypeSourceOracleSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceOracleSSHTunnelMethodSourceOracleSSHKeyAuthentication(sourceOracleSSHKeyAuthentication SourceOracleSSHKeyAuthentication) SourceOracleSSHTunnelMethod {
+ typ := SourceOracleSSHTunnelMethodTypeSourceOracleSSHKeyAuthentication
return SourceOracleSSHTunnelMethod{
- SourceOracleSSHTunnelMethodSSHKeyAuthentication: &sourceOracleSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourceOracleSSHKeyAuthentication: &sourceOracleSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourceOracleSSHTunnelMethodSourceOracleSSHTunnelMethodPasswordAuthentication(sourceOracleSSHTunnelMethodPasswordAuthentication SourceOracleSSHTunnelMethodPasswordAuthentication) SourceOracleSSHTunnelMethod {
- typ := SourceOracleSSHTunnelMethodTypeSourceOracleSSHTunnelMethodPasswordAuthentication
+func CreateSourceOracleSSHTunnelMethodSourceOraclePasswordAuthentication(sourceOraclePasswordAuthentication SourceOraclePasswordAuthentication) SourceOracleSSHTunnelMethod {
+ typ := SourceOracleSSHTunnelMethodTypeSourceOraclePasswordAuthentication
return SourceOracleSSHTunnelMethod{
- SourceOracleSSHTunnelMethodPasswordAuthentication: &sourceOracleSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ SourceOraclePasswordAuthentication: &sourceOraclePasswordAuthentication,
+ Type: typ,
}
}
func (u *SourceOracleSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceOracleSSHTunnelMethodNoTunnel := new(SourceOracleSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleSSHTunnelMethodNoTunnel); err == nil {
- u.SourceOracleSSHTunnelMethodNoTunnel = sourceOracleSSHTunnelMethodNoTunnel
- u.Type = SourceOracleSSHTunnelMethodTypeSourceOracleSSHTunnelMethodNoTunnel
+
+ sourceOracleNoTunnel := new(SourceOracleNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceOracleNoTunnel, "", true, true); err == nil {
+ u.SourceOracleNoTunnel = sourceOracleNoTunnel
+ u.Type = SourceOracleSSHTunnelMethodTypeSourceOracleNoTunnel
return nil
}
- sourceOracleSSHTunnelMethodSSHKeyAuthentication := new(SourceOracleSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceOracleSSHTunnelMethodSSHKeyAuthentication = sourceOracleSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceOracleSSHTunnelMethodTypeSourceOracleSSHTunnelMethodSSHKeyAuthentication
+ sourceOracleSSHKeyAuthentication := new(SourceOracleSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceOracleSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceOracleSSHKeyAuthentication = sourceOracleSSHKeyAuthentication
+ u.Type = SourceOracleSSHTunnelMethodTypeSourceOracleSSHKeyAuthentication
return nil
}
- sourceOracleSSHTunnelMethodPasswordAuthentication := new(SourceOracleSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceOracleSSHTunnelMethodPasswordAuthentication = sourceOracleSSHTunnelMethodPasswordAuthentication
- u.Type = SourceOracleSSHTunnelMethodTypeSourceOracleSSHTunnelMethodPasswordAuthentication
+ sourceOraclePasswordAuthentication := new(SourceOraclePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceOraclePasswordAuthentication, "", true, true); err == nil {
+ u.SourceOraclePasswordAuthentication = sourceOraclePasswordAuthentication
+ u.Type = SourceOracleSSHTunnelMethodTypeSourceOraclePasswordAuthentication
return nil
}
@@ -508,19 +680,19 @@ func (u *SourceOracleSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceOracleSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceOracleSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceOracleSSHTunnelMethodNoTunnel)
+ if u.SourceOracleNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceOracleNoTunnel, "", true)
}
- if u.SourceOracleSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceOracleSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceOracleSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceOracleSSHKeyAuthentication, "", true)
}
- if u.SourceOracleSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceOracleSSHTunnelMethodPasswordAuthentication)
+ if u.SourceOraclePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceOraclePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceOracle struct {
@@ -538,12 +710,90 @@ type SourceOracle struct {
// Oracle Corporations recommends the following port numbers:
// 1521 - Default listening port for client connections to the listener.
// 2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL
- Port int64 `json:"port"`
+ Port *int64 `default:"1521" json:"port"`
// The list of schemas to sync from. Defaults to user. Case sensitive.
Schemas []string `json:"schemas,omitempty"`
- SourceType SourceOracleOracle `json:"sourceType"`
+ sourceType SourceOracleOracle `const:"oracle" json:"sourceType"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
TunnelMethod *SourceOracleSSHTunnelMethod `json:"tunnel_method,omitempty"`
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (s SourceOracle) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracle) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracle) GetConnectionData() *SourceOracleConnectBy {
+ if o == nil {
+ return nil
+ }
+ return o.ConnectionData
+}
+
+func (o *SourceOracle) GetEncryption() SourceOracleEncryption {
+ if o == nil {
+ return SourceOracleEncryption{}
+ }
+ return o.Encryption
+}
+
+func (o *SourceOracle) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceOracle) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceOracle) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceOracle) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceOracle) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourceOracle) GetSourceType() SourceOracleOracle {
+ return SourceOracleOracleOracle
+}
+
+func (o *SourceOracle) GetTunnelMethod() *SourceOracleSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceOracle) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoraclecreaterequest.go b/internal/sdk/pkg/models/shared/sourceoraclecreaterequest.go
old mode 100755
new mode 100644
index 829d3b081..8564ccdfc
--- a/internal/sdk/pkg/models/shared/sourceoraclecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceoraclecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceOracleCreateRequest struct {
Configuration SourceOracle `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOracleCreateRequest) GetConfiguration() SourceOracle {
+ if o == nil {
+ return SourceOracle{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOracleCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceOracleCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOracleCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceOracleCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoracleputrequest.go b/internal/sdk/pkg/models/shared/sourceoracleputrequest.go
old mode 100755
new mode 100644
index 4ffe4292c..86816832b
--- a/internal/sdk/pkg/models/shared/sourceoracleputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceoracleputrequest.go
@@ -7,3 +7,24 @@ type SourceOraclePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOraclePutRequest) GetConfiguration() SourceOracleUpdate {
+ if o == nil {
+ return SourceOracleUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOraclePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOraclePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoracleupdate.go b/internal/sdk/pkg/models/shared/sourceoracleupdate.go
old mode 100755
new mode 100644
index 9ec541de4..92681d044
--- a/internal/sdk/pkg/models/shared/sourceoracleupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceoracleupdate.go
@@ -3,185 +3,246 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOracleUpdateConnectBySystemIDSIDConnectionType string
+type SourceOracleUpdateConnectionType string
const (
- SourceOracleUpdateConnectBySystemIDSIDConnectionTypeSid SourceOracleUpdateConnectBySystemIDSIDConnectionType = "sid"
+ SourceOracleUpdateConnectionTypeSid SourceOracleUpdateConnectionType = "sid"
)
-func (e SourceOracleUpdateConnectBySystemIDSIDConnectionType) ToPointer() *SourceOracleUpdateConnectBySystemIDSIDConnectionType {
+func (e SourceOracleUpdateConnectionType) ToPointer() *SourceOracleUpdateConnectionType {
return &e
}
-func (e *SourceOracleUpdateConnectBySystemIDSIDConnectionType) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleUpdateConnectionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sid":
- *e = SourceOracleUpdateConnectBySystemIDSIDConnectionType(v)
+ *e = SourceOracleUpdateConnectionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleUpdateConnectBySystemIDSIDConnectionType: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleUpdateConnectionType: %v", v)
}
}
-// SourceOracleUpdateConnectBySystemIDSID - Use SID (Oracle System Identifier)
-type SourceOracleUpdateConnectBySystemIDSID struct {
- ConnectionType *SourceOracleUpdateConnectBySystemIDSIDConnectionType `json:"connection_type,omitempty"`
- Sid string `json:"sid"`
+// SystemIDSID - Use SID (Oracle System Identifier)
+type SystemIDSID struct {
+ connectionType *SourceOracleUpdateConnectionType `const:"sid" json:"connection_type"`
+ Sid string `json:"sid"`
}
-type SourceOracleUpdateConnectByServiceNameConnectionType string
+func (s SystemIDSID) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SystemIDSID) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SystemIDSID) GetConnectionType() *SourceOracleUpdateConnectionType {
+ return SourceOracleUpdateConnectionTypeSid.ToPointer()
+}
+
+func (o *SystemIDSID) GetSid() string {
+ if o == nil {
+ return ""
+ }
+ return o.Sid
+}
+
+type ConnectionType string
const (
- SourceOracleUpdateConnectByServiceNameConnectionTypeServiceName SourceOracleUpdateConnectByServiceNameConnectionType = "service_name"
+ ConnectionTypeServiceName ConnectionType = "service_name"
)
-func (e SourceOracleUpdateConnectByServiceNameConnectionType) ToPointer() *SourceOracleUpdateConnectByServiceNameConnectionType {
+func (e ConnectionType) ToPointer() *ConnectionType {
return &e
}
-func (e *SourceOracleUpdateConnectByServiceNameConnectionType) UnmarshalJSON(data []byte) error {
+func (e *ConnectionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "service_name":
- *e = SourceOracleUpdateConnectByServiceNameConnectionType(v)
+ *e = ConnectionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleUpdateConnectByServiceNameConnectionType: %v", v)
+ return fmt.Errorf("invalid value for ConnectionType: %v", v)
}
}
-// SourceOracleUpdateConnectByServiceName - Use service name
-type SourceOracleUpdateConnectByServiceName struct {
- ConnectionType *SourceOracleUpdateConnectByServiceNameConnectionType `json:"connection_type,omitempty"`
- ServiceName string `json:"service_name"`
+// ServiceName - Use service name
+type ServiceName struct {
+ connectionType *ConnectionType `const:"service_name" json:"connection_type"`
+ ServiceName string `json:"service_name"`
}
-type SourceOracleUpdateConnectByType string
+func (s ServiceName) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *ServiceName) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ServiceName) GetConnectionType() *ConnectionType {
+ return ConnectionTypeServiceName.ToPointer()
+}
+
+func (o *ServiceName) GetServiceName() string {
+ if o == nil {
+ return ""
+ }
+ return o.ServiceName
+}
+
+type ConnectByType string
const (
- SourceOracleUpdateConnectByTypeSourceOracleUpdateConnectByServiceName SourceOracleUpdateConnectByType = "source-oracle-update_Connect by_Service name"
- SourceOracleUpdateConnectByTypeSourceOracleUpdateConnectBySystemIDSID SourceOracleUpdateConnectByType = "source-oracle-update_Connect by_System ID (SID)"
+ ConnectByTypeServiceName ConnectByType = "Service name"
+ ConnectByTypeSystemIDSID ConnectByType = "System ID (SID)"
)
-type SourceOracleUpdateConnectBy struct {
- SourceOracleUpdateConnectByServiceName *SourceOracleUpdateConnectByServiceName
- SourceOracleUpdateConnectBySystemIDSID *SourceOracleUpdateConnectBySystemIDSID
+type ConnectBy struct {
+ ServiceName *ServiceName
+ SystemIDSID *SystemIDSID
- Type SourceOracleUpdateConnectByType
+ Type ConnectByType
}
-func CreateSourceOracleUpdateConnectBySourceOracleUpdateConnectByServiceName(sourceOracleUpdateConnectByServiceName SourceOracleUpdateConnectByServiceName) SourceOracleUpdateConnectBy {
- typ := SourceOracleUpdateConnectByTypeSourceOracleUpdateConnectByServiceName
+func CreateConnectByServiceName(serviceName ServiceName) ConnectBy {
+ typ := ConnectByTypeServiceName
- return SourceOracleUpdateConnectBy{
- SourceOracleUpdateConnectByServiceName: &sourceOracleUpdateConnectByServiceName,
- Type: typ,
+ return ConnectBy{
+ ServiceName: &serviceName,
+ Type: typ,
}
}
-func CreateSourceOracleUpdateConnectBySourceOracleUpdateConnectBySystemIDSID(sourceOracleUpdateConnectBySystemIDSID SourceOracleUpdateConnectBySystemIDSID) SourceOracleUpdateConnectBy {
- typ := SourceOracleUpdateConnectByTypeSourceOracleUpdateConnectBySystemIDSID
+func CreateConnectBySystemIDSID(systemIDSID SystemIDSID) ConnectBy {
+ typ := ConnectByTypeSystemIDSID
- return SourceOracleUpdateConnectBy{
- SourceOracleUpdateConnectBySystemIDSID: &sourceOracleUpdateConnectBySystemIDSID,
- Type: typ,
+ return ConnectBy{
+ SystemIDSID: &systemIDSID,
+ Type: typ,
}
}
-func (u *SourceOracleUpdateConnectBy) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *ConnectBy) UnmarshalJSON(data []byte) error {
- sourceOracleUpdateConnectByServiceName := new(SourceOracleUpdateConnectByServiceName)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleUpdateConnectByServiceName); err == nil {
- u.SourceOracleUpdateConnectByServiceName = sourceOracleUpdateConnectByServiceName
- u.Type = SourceOracleUpdateConnectByTypeSourceOracleUpdateConnectByServiceName
+ serviceName := new(ServiceName)
+ if err := utils.UnmarshalJSON(data, &serviceName, "", true, true); err == nil {
+ u.ServiceName = serviceName
+ u.Type = ConnectByTypeServiceName
return nil
}
- sourceOracleUpdateConnectBySystemIDSID := new(SourceOracleUpdateConnectBySystemIDSID)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleUpdateConnectBySystemIDSID); err == nil {
- u.SourceOracleUpdateConnectBySystemIDSID = sourceOracleUpdateConnectBySystemIDSID
- u.Type = SourceOracleUpdateConnectByTypeSourceOracleUpdateConnectBySystemIDSID
+ systemIDSID := new(SystemIDSID)
+ if err := utils.UnmarshalJSON(data, &systemIDSID, "", true, true); err == nil {
+ u.SystemIDSID = systemIDSID
+ u.Type = ConnectByTypeSystemIDSID
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceOracleUpdateConnectBy) MarshalJSON() ([]byte, error) {
- if u.SourceOracleUpdateConnectByServiceName != nil {
- return json.Marshal(u.SourceOracleUpdateConnectByServiceName)
+func (u ConnectBy) MarshalJSON() ([]byte, error) {
+ if u.ServiceName != nil {
+ return utils.MarshalJSON(u.ServiceName, "", true)
}
- if u.SourceOracleUpdateConnectBySystemIDSID != nil {
- return json.Marshal(u.SourceOracleUpdateConnectBySystemIDSID)
+ if u.SystemIDSID != nil {
+ return utils.MarshalJSON(u.SystemIDSID, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethod string
+type SourceOracleUpdateEncryptionMethod string
const (
- SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethodEncryptedVerifyCertificate SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethod = "encrypted_verify_certificate"
+ SourceOracleUpdateEncryptionMethodEncryptedVerifyCertificate SourceOracleUpdateEncryptionMethod = "encrypted_verify_certificate"
)
-func (e SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethod) ToPointer() *SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethod {
+func (e SourceOracleUpdateEncryptionMethod) ToPointer() *SourceOracleUpdateEncryptionMethod {
return &e
}
-func (e *SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleUpdateEncryptionMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "encrypted_verify_certificate":
- *e = SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethod(v)
+ *e = SourceOracleUpdateEncryptionMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleUpdateEncryptionMethod: %v", v)
}
}
-// SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
-type SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate struct {
- EncryptionMethod SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificateEncryptionMethod `json:"encryption_method"`
+// TLSEncryptedVerifyCertificate - Verify and use the certificate provided by the server.
+type TLSEncryptedVerifyCertificate struct {
+ encryptionMethod *SourceOracleUpdateEncryptionMethod `const:"encrypted_verify_certificate" json:"encryption_method"`
// Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.
SslCertificate string `json:"ssl_certificate"`
}
-// SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm - This parameter defines what encryption algorithm is used.
-type SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm string
+func (t TLSEncryptedVerifyCertificate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(t, "", false)
+}
+
+func (t *TLSEncryptedVerifyCertificate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &t, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *TLSEncryptedVerifyCertificate) GetEncryptionMethod() *SourceOracleUpdateEncryptionMethod {
+ return SourceOracleUpdateEncryptionMethodEncryptedVerifyCertificate.ToPointer()
+}
+
+func (o *TLSEncryptedVerifyCertificate) GetSslCertificate() string {
+ if o == nil {
+ return ""
+ }
+ return o.SslCertificate
+}
+
+// EncryptionAlgorithm - This parameter defines what encryption algorithm is used.
+type EncryptionAlgorithm string
const (
- SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithmAes256 SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm = "AES256"
- SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithmRc456 SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm = "RC4_56"
- SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithmThreeDes168 SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm = "3DES168"
+ EncryptionAlgorithmAes256 EncryptionAlgorithm = "AES256"
+ EncryptionAlgorithmRc456 EncryptionAlgorithm = "RC4_56"
+ EncryptionAlgorithmThreeDes168 EncryptionAlgorithm = "3DES168"
)
-func (e SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm) ToPointer() *SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm {
+func (e EncryptionAlgorithm) ToPointer() *EncryptionAlgorithm {
return &e
}
-func (e *SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm) UnmarshalJSON(data []byte) error {
+func (e *EncryptionAlgorithm) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -192,291 +253,402 @@ func (e *SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorit
case "RC4_56":
fallthrough
case "3DES168":
- *e = SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm(v)
+ *e = EncryptionAlgorithm(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm: %v", v)
+ return fmt.Errorf("invalid value for EncryptionAlgorithm: %v", v)
}
}
-type SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethod string
+type EncryptionMethod string
const (
- SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethodClientNne SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethod = "client_nne"
+ EncryptionMethodClientNne EncryptionMethod = "client_nne"
)
-func (e SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethod) ToPointer() *SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethod {
+func (e EncryptionMethod) ToPointer() *EncryptionMethod {
return &e
}
-func (e *SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethod) UnmarshalJSON(data []byte) error {
+func (e *EncryptionMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "client_nne":
- *e = SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethod(v)
+ *e = EncryptionMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethod: %v", v)
+ return fmt.Errorf("invalid value for EncryptionMethod: %v", v)
}
}
-// SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE - The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.
-type SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE struct {
+// NativeNetworkEncryptionNNE - The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.
+type NativeNetworkEncryptionNNE struct {
// This parameter defines what encryption algorithm is used.
- EncryptionAlgorithm *SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionAlgorithm `json:"encryption_algorithm,omitempty"`
- EncryptionMethod SourceOracleUpdateEncryptionNativeNetworkEncryptionNNEEncryptionMethod `json:"encryption_method"`
+ EncryptionAlgorithm *EncryptionAlgorithm `default:"AES256" json:"encryption_algorithm"`
+ encryptionMethod *EncryptionMethod `const:"client_nne" json:"encryption_method"`
}
-type SourceOracleUpdateEncryptionType string
+func (n NativeNetworkEncryptionNNE) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(n, "", false)
+}
+
+func (n *NativeNetworkEncryptionNNE) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &n, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *NativeNetworkEncryptionNNE) GetEncryptionAlgorithm() *EncryptionAlgorithm {
+ if o == nil {
+ return nil
+ }
+ return o.EncryptionAlgorithm
+}
+
+func (o *NativeNetworkEncryptionNNE) GetEncryptionMethod() *EncryptionMethod {
+ return EncryptionMethodClientNne.ToPointer()
+}
+
+type EncryptionUnionType string
const (
- SourceOracleUpdateEncryptionTypeSourceOracleUpdateEncryptionNativeNetworkEncryptionNNE SourceOracleUpdateEncryptionType = "source-oracle-update_Encryption_Native Network Encryption (NNE)"
- SourceOracleUpdateEncryptionTypeSourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate SourceOracleUpdateEncryptionType = "source-oracle-update_Encryption_TLS Encrypted (verify certificate)"
+ EncryptionUnionTypeNativeNetworkEncryptionNNE EncryptionUnionType = "Native Network Encryption (NNE)"
+ EncryptionUnionTypeTLSEncryptedVerifyCertificate EncryptionUnionType = "TLS Encrypted (verify certificate)"
)
-type SourceOracleUpdateEncryption struct {
- SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE *SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE
- SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate *SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate
+type Encryption struct {
+ NativeNetworkEncryptionNNE *NativeNetworkEncryptionNNE
+ TLSEncryptedVerifyCertificate *TLSEncryptedVerifyCertificate
- Type SourceOracleUpdateEncryptionType
+ Type EncryptionUnionType
}
-func CreateSourceOracleUpdateEncryptionSourceOracleUpdateEncryptionNativeNetworkEncryptionNNE(sourceOracleUpdateEncryptionNativeNetworkEncryptionNNE SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE) SourceOracleUpdateEncryption {
- typ := SourceOracleUpdateEncryptionTypeSourceOracleUpdateEncryptionNativeNetworkEncryptionNNE
+func CreateEncryptionNativeNetworkEncryptionNNE(nativeNetworkEncryptionNNE NativeNetworkEncryptionNNE) Encryption {
+ typ := EncryptionUnionTypeNativeNetworkEncryptionNNE
- return SourceOracleUpdateEncryption{
- SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE: &sourceOracleUpdateEncryptionNativeNetworkEncryptionNNE,
- Type: typ,
+ return Encryption{
+ NativeNetworkEncryptionNNE: &nativeNetworkEncryptionNNE,
+ Type: typ,
}
}
-func CreateSourceOracleUpdateEncryptionSourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate(sourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate) SourceOracleUpdateEncryption {
- typ := SourceOracleUpdateEncryptionTypeSourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate
+func CreateEncryptionTLSEncryptedVerifyCertificate(tlsEncryptedVerifyCertificate TLSEncryptedVerifyCertificate) Encryption {
+ typ := EncryptionUnionTypeTLSEncryptedVerifyCertificate
- return SourceOracleUpdateEncryption{
- SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate: &sourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate,
- Type: typ,
+ return Encryption{
+ TLSEncryptedVerifyCertificate: &tlsEncryptedVerifyCertificate,
+ Type: typ,
}
}
-func (u *SourceOracleUpdateEncryption) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *Encryption) UnmarshalJSON(data []byte) error {
- sourceOracleUpdateEncryptionNativeNetworkEncryptionNNE := new(SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleUpdateEncryptionNativeNetworkEncryptionNNE); err == nil {
- u.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE = sourceOracleUpdateEncryptionNativeNetworkEncryptionNNE
- u.Type = SourceOracleUpdateEncryptionTypeSourceOracleUpdateEncryptionNativeNetworkEncryptionNNE
+ nativeNetworkEncryptionNNE := new(NativeNetworkEncryptionNNE)
+ if err := utils.UnmarshalJSON(data, &nativeNetworkEncryptionNNE, "", true, true); err == nil {
+ u.NativeNetworkEncryptionNNE = nativeNetworkEncryptionNNE
+ u.Type = EncryptionUnionTypeNativeNetworkEncryptionNNE
return nil
}
- sourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate := new(SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate); err == nil {
- u.SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate = sourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate
- u.Type = SourceOracleUpdateEncryptionTypeSourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate
+ tlsEncryptedVerifyCertificate := new(TLSEncryptedVerifyCertificate)
+ if err := utils.UnmarshalJSON(data, &tlsEncryptedVerifyCertificate, "", true, true); err == nil {
+ u.TLSEncryptedVerifyCertificate = tlsEncryptedVerifyCertificate
+ u.Type = EncryptionUnionTypeTLSEncryptedVerifyCertificate
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceOracleUpdateEncryption) MarshalJSON() ([]byte, error) {
- if u.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE != nil {
- return json.Marshal(u.SourceOracleUpdateEncryptionNativeNetworkEncryptionNNE)
+func (u Encryption) MarshalJSON() ([]byte, error) {
+ if u.NativeNetworkEncryptionNNE != nil {
+ return utils.MarshalJSON(u.NativeNetworkEncryptionNNE, "", true)
}
- if u.SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate != nil {
- return json.Marshal(u.SourceOracleUpdateEncryptionTLSEncryptedVerifyCertificate)
+ if u.TLSEncryptedVerifyCertificate != nil {
+ return utils.MarshalJSON(u.TLSEncryptedVerifyCertificate, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourceOracleUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourceOracleUpdateSchemasTunnelMethodTunnelMethod string
const (
- SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourceOracleUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourceOracleUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourceOracleUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *SourceOracleUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourceOracleUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourceOracleUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceOracleUpdateSSHTunnelMethodPasswordAuthentication struct {
+// SourceOracleUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceOracleUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourceOracleUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceOracleUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourceOracleUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracleUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceOracleUpdatePasswordAuthentication) GetTunnelMethod() SourceOracleUpdateSchemasTunnelMethodTunnelMethod {
+ return SourceOracleUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourceOracleUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceOracleUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourceOracleUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourceOracleUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourceOracleUpdateSchemasTunnelMethod string
const (
- SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourceOracleUpdateSchemasTunnelMethodSSHKeyAuth SourceOracleUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourceOracleUpdateSchemasTunnelMethod) ToPointer() *SourceOracleUpdateSchemasTunnelMethod {
return &e
}
-func (e *SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourceOracleUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleUpdateSchemasTunnelMethod: %v", v)
}
}
-// SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// SourceOracleUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceOracleUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourceOracleUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceOracleUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourceOracleUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracleUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourceOracleUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourceOracleUpdateSSHKeyAuthentication) GetTunnelMethod() SourceOracleUpdateSchemasTunnelMethod {
+ return SourceOracleUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourceOracleUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourceOracleUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourceOracleUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type SourceOracleUpdateTunnelMethod string
const (
- SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourceOracleUpdateTunnelMethodNoTunnel SourceOracleUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourceOracleUpdateTunnelMethod) ToPointer() *SourceOracleUpdateTunnelMethod {
return &e
}
-func (e *SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceOracleUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourceOracleUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceOracleUpdateTunnelMethod: %v", v)
}
}
-// SourceOracleUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourceOracleUpdateSSHTunnelMethodNoTunnel struct {
+// SourceOracleUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourceOracleUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourceOracleUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourceOracleUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourceOracleUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracleUpdateNoTunnel) GetTunnelMethod() SourceOracleUpdateTunnelMethod {
+ return SourceOracleUpdateTunnelMethodNoTunnel
}
type SourceOracleUpdateSSHTunnelMethodType string
const (
- SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHTunnelMethodNoTunnel SourceOracleUpdateSSHTunnelMethodType = "source-oracle-update_SSH Tunnel Method_No Tunnel"
- SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication SourceOracleUpdateSSHTunnelMethodType = "source-oracle-update_SSH Tunnel Method_SSH Key Authentication"
- SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHTunnelMethodPasswordAuthentication SourceOracleUpdateSSHTunnelMethodType = "source-oracle-update_SSH Tunnel Method_Password Authentication"
+ SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateNoTunnel SourceOracleUpdateSSHTunnelMethodType = "source-oracle-update_No Tunnel"
+ SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHKeyAuthentication SourceOracleUpdateSSHTunnelMethodType = "source-oracle-update_SSH Key Authentication"
+ SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdatePasswordAuthentication SourceOracleUpdateSSHTunnelMethodType = "source-oracle-update_Password Authentication"
)
type SourceOracleUpdateSSHTunnelMethod struct {
- SourceOracleUpdateSSHTunnelMethodNoTunnel *SourceOracleUpdateSSHTunnelMethodNoTunnel
- SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication *SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication
- SourceOracleUpdateSSHTunnelMethodPasswordAuthentication *SourceOracleUpdateSSHTunnelMethodPasswordAuthentication
+ SourceOracleUpdateNoTunnel *SourceOracleUpdateNoTunnel
+ SourceOracleUpdateSSHKeyAuthentication *SourceOracleUpdateSSHKeyAuthentication
+ SourceOracleUpdatePasswordAuthentication *SourceOracleUpdatePasswordAuthentication
Type SourceOracleUpdateSSHTunnelMethodType
}
-func CreateSourceOracleUpdateSSHTunnelMethodSourceOracleUpdateSSHTunnelMethodNoTunnel(sourceOracleUpdateSSHTunnelMethodNoTunnel SourceOracleUpdateSSHTunnelMethodNoTunnel) SourceOracleUpdateSSHTunnelMethod {
- typ := SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHTunnelMethodNoTunnel
+func CreateSourceOracleUpdateSSHTunnelMethodSourceOracleUpdateNoTunnel(sourceOracleUpdateNoTunnel SourceOracleUpdateNoTunnel) SourceOracleUpdateSSHTunnelMethod {
+ typ := SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateNoTunnel
return SourceOracleUpdateSSHTunnelMethod{
- SourceOracleUpdateSSHTunnelMethodNoTunnel: &sourceOracleUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourceOracleUpdateNoTunnel: &sourceOracleUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateSourceOracleUpdateSSHTunnelMethodSourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication(sourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication) SourceOracleUpdateSSHTunnelMethod {
- typ := SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateSourceOracleUpdateSSHTunnelMethodSourceOracleUpdateSSHKeyAuthentication(sourceOracleUpdateSSHKeyAuthentication SourceOracleUpdateSSHKeyAuthentication) SourceOracleUpdateSSHTunnelMethod {
+ typ := SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHKeyAuthentication
return SourceOracleUpdateSSHTunnelMethod{
- SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication: &sourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourceOracleUpdateSSHKeyAuthentication: &sourceOracleUpdateSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourceOracleUpdateSSHTunnelMethodSourceOracleUpdateSSHTunnelMethodPasswordAuthentication(sourceOracleUpdateSSHTunnelMethodPasswordAuthentication SourceOracleUpdateSSHTunnelMethodPasswordAuthentication) SourceOracleUpdateSSHTunnelMethod {
- typ := SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHTunnelMethodPasswordAuthentication
+func CreateSourceOracleUpdateSSHTunnelMethodSourceOracleUpdatePasswordAuthentication(sourceOracleUpdatePasswordAuthentication SourceOracleUpdatePasswordAuthentication) SourceOracleUpdateSSHTunnelMethod {
+ typ := SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdatePasswordAuthentication
return SourceOracleUpdateSSHTunnelMethod{
- SourceOracleUpdateSSHTunnelMethodPasswordAuthentication: &sourceOracleUpdateSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ SourceOracleUpdatePasswordAuthentication: &sourceOracleUpdatePasswordAuthentication,
+ Type: typ,
}
}
func (u *SourceOracleUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceOracleUpdateSSHTunnelMethodNoTunnel := new(SourceOracleUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.SourceOracleUpdateSSHTunnelMethodNoTunnel = sourceOracleUpdateSSHTunnelMethodNoTunnel
- u.Type = SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHTunnelMethodNoTunnel
+
+ sourceOracleUpdateNoTunnel := new(SourceOracleUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourceOracleUpdateNoTunnel, "", true, true); err == nil {
+ u.SourceOracleUpdateNoTunnel = sourceOracleUpdateNoTunnel
+ u.Type = SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateNoTunnel
return nil
}
- sourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication := new(SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication = sourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication
+ sourceOracleUpdateSSHKeyAuthentication := new(SourceOracleUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceOracleUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceOracleUpdateSSHKeyAuthentication = sourceOracleUpdateSSHKeyAuthentication
+ u.Type = SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHKeyAuthentication
return nil
}
- sourceOracleUpdateSSHTunnelMethodPasswordAuthentication := new(SourceOracleUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOracleUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication = sourceOracleUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdateSSHTunnelMethodPasswordAuthentication
+ sourceOracleUpdatePasswordAuthentication := new(SourceOracleUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceOracleUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.SourceOracleUpdatePasswordAuthentication = sourceOracleUpdatePasswordAuthentication
+ u.Type = SourceOracleUpdateSSHTunnelMethodTypeSourceOracleUpdatePasswordAuthentication
return nil
}
@@ -484,26 +656,26 @@ func (u *SourceOracleUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceOracleUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourceOracleUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourceOracleUpdateSSHTunnelMethodNoTunnel)
+ if u.SourceOracleUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.SourceOracleUpdateNoTunnel, "", true)
}
- if u.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceOracleUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourceOracleUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceOracleUpdateSSHKeyAuthentication, "", true)
}
- if u.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourceOracleUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.SourceOracleUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceOracleUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceOracleUpdate struct {
// Connect data that will be used for DB connection
- ConnectionData *SourceOracleUpdateConnectBy `json:"connection_data,omitempty"`
+ ConnectionData *ConnectBy `json:"connection_data,omitempty"`
// The encryption method with is used when communicating with the database.
- Encryption SourceOracleUpdateEncryption `json:"encryption"`
+ Encryption Encryption `json:"encryption"`
// Hostname of the database.
Host string `json:"host"`
// Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
@@ -514,7 +686,7 @@ type SourceOracleUpdate struct {
// Oracle Corporations recommends the following port numbers:
// 1521 - Default listening port for client connections to the listener.
// 2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL
- Port int64 `json:"port"`
+ Port *int64 `default:"1521" json:"port"`
// The list of schemas to sync from. Defaults to user. Case sensitive.
Schemas []string `json:"schemas,omitempty"`
// Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
@@ -522,3 +694,77 @@ type SourceOracleUpdate struct {
// The username which is used to access the database.
Username string `json:"username"`
}
+
+func (s SourceOracleUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOracleUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOracleUpdate) GetConnectionData() *ConnectBy {
+ if o == nil {
+ return nil
+ }
+ return o.ConnectionData
+}
+
+func (o *SourceOracleUpdate) GetEncryption() Encryption {
+ if o == nil {
+ return Encryption{}
+ }
+ return o.Encryption
+}
+
+func (o *SourceOracleUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceOracleUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceOracleUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceOracleUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceOracleUpdate) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourceOracleUpdate) GetTunnelMethod() *SourceOracleUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourceOracleUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceorb.go b/internal/sdk/pkg/models/shared/sourceorb.go
old mode 100755
new mode 100644
index f05372685..ae7ef61e2
--- a/internal/sdk/pkg/models/shared/sourceorb.go
+++ b/internal/sdk/pkg/models/shared/sourceorb.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOrbOrb string
+type Orb string
const (
- SourceOrbOrbOrb SourceOrbOrb = "orb"
+ OrbOrb Orb = "orb"
)
-func (e SourceOrbOrb) ToPointer() *SourceOrbOrb {
+func (e Orb) ToPointer() *Orb {
return &e
}
-func (e *SourceOrbOrb) UnmarshalJSON(data []byte) error {
+func (e *Orb) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "orb":
- *e = SourceOrbOrb(v)
+ *e = Orb(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOrbOrb: %v", v)
+ return fmt.Errorf("invalid value for Orb: %v", v)
}
}
@@ -35,12 +36,12 @@ type SourceOrb struct {
// Orb API Key, issued from the Orb admin console.
APIKey string `json:"api_key"`
// When set to N, the connector will always refresh resources created within the past N days. By default, updated objects that are not newly created are not incrementally synced.
- LookbackWindowDays *int64 `json:"lookback_window_days,omitempty"`
+ LookbackWindowDays *int64 `default:"0" json:"lookback_window_days"`
// Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.
NumericEventPropertiesKeys []string `json:"numeric_event_properties_keys,omitempty"`
// Orb Plan ID to filter subscriptions that should have usage fetched.
- PlanID *string `json:"plan_id,omitempty"`
- SourceType SourceOrbOrb `json:"sourceType"`
+ PlanID *string `json:"plan_id,omitempty"`
+ sourceType Orb `const:"orb" json:"sourceType"`
// UTC date and time in the format 2022-03-01T00:00:00Z. Any data with created_at before this data will not be synced. For Subscription Usage, this becomes the `timeframe_start` API parameter.
StartDate string `json:"start_date"`
// Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.
@@ -48,3 +49,67 @@ type SourceOrb struct {
// Property key name to group subscription usage by.
SubscriptionUsageGroupingKey *string `json:"subscription_usage_grouping_key,omitempty"`
}
+
+func (s SourceOrb) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOrb) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOrb) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceOrb) GetLookbackWindowDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindowDays
+}
+
+func (o *SourceOrb) GetNumericEventPropertiesKeys() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NumericEventPropertiesKeys
+}
+
+func (o *SourceOrb) GetPlanID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PlanID
+}
+
+func (o *SourceOrb) GetSourceType() Orb {
+ return OrbOrb
+}
+
+func (o *SourceOrb) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceOrb) GetStringEventPropertiesKeys() []string {
+ if o == nil {
+ return nil
+ }
+ return o.StringEventPropertiesKeys
+}
+
+func (o *SourceOrb) GetSubscriptionUsageGroupingKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SubscriptionUsageGroupingKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourceorbcreaterequest.go b/internal/sdk/pkg/models/shared/sourceorbcreaterequest.go
old mode 100755
new mode 100644
index e732e9572..27e08fc08
--- a/internal/sdk/pkg/models/shared/sourceorbcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceorbcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceOrbCreateRequest struct {
Configuration SourceOrb `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOrbCreateRequest) GetConfiguration() SourceOrb {
+ if o == nil {
+ return SourceOrb{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOrbCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceOrbCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOrbCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceOrbCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceorbit.go b/internal/sdk/pkg/models/shared/sourceorbit.go
old mode 100755
new mode 100644
index ffb10c094..196c09000
--- a/internal/sdk/pkg/models/shared/sourceorbit.go
+++ b/internal/sdk/pkg/models/shared/sourceorbit.go
@@ -5,38 +5,75 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOrbitOrbit string
+type Orbit string
const (
- SourceOrbitOrbitOrbit SourceOrbitOrbit = "orbit"
+ OrbitOrbit Orbit = "orbit"
)
-func (e SourceOrbitOrbit) ToPointer() *SourceOrbitOrbit {
+func (e Orbit) ToPointer() *Orbit {
return &e
}
-func (e *SourceOrbitOrbit) UnmarshalJSON(data []byte) error {
+func (e *Orbit) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "orbit":
- *e = SourceOrbitOrbit(v)
+ *e = Orbit(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOrbitOrbit: %v", v)
+ return fmt.Errorf("invalid value for Orbit: %v", v)
}
}
type SourceOrbit struct {
// Authorizes you to work with Orbit workspaces associated with the token.
- APIToken string `json:"api_token"`
- SourceType SourceOrbitOrbit `json:"sourceType"`
+ APIToken string `json:"api_token"`
+ sourceType Orbit `const:"orbit" json:"sourceType"`
// Date in the format 2022-06-26. Only load members whose last activities are after this date.
StartDate *string `json:"start_date,omitempty"`
// The unique name of the workspace that your API token is associated with.
Workspace string `json:"workspace"`
}
+
+func (s SourceOrbit) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOrbit) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOrbit) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceOrbit) GetSourceType() Orbit {
+ return OrbitOrbit
+}
+
+func (o *SourceOrbit) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceOrbit) GetWorkspace() string {
+ if o == nil {
+ return ""
+ }
+ return o.Workspace
+}
diff --git a/internal/sdk/pkg/models/shared/sourceorbitcreaterequest.go b/internal/sdk/pkg/models/shared/sourceorbitcreaterequest.go
old mode 100755
new mode 100644
index 6e289339f..0a4c13484
--- a/internal/sdk/pkg/models/shared/sourceorbitcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceorbitcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceOrbitCreateRequest struct {
Configuration SourceOrbit `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOrbitCreateRequest) GetConfiguration() SourceOrbit {
+ if o == nil {
+ return SourceOrbit{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOrbitCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceOrbitCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOrbitCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceOrbitCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceorbitputrequest.go b/internal/sdk/pkg/models/shared/sourceorbitputrequest.go
old mode 100755
new mode 100644
index 2593a6bdd..fa0402061
--- a/internal/sdk/pkg/models/shared/sourceorbitputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceorbitputrequest.go
@@ -7,3 +7,24 @@ type SourceOrbitPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOrbitPutRequest) GetConfiguration() SourceOrbitUpdate {
+ if o == nil {
+ return SourceOrbitUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOrbitPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOrbitPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceorbitupdate.go b/internal/sdk/pkg/models/shared/sourceorbitupdate.go
old mode 100755
new mode 100644
index 2c6492733..7b8550f8f
--- a/internal/sdk/pkg/models/shared/sourceorbitupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceorbitupdate.go
@@ -10,3 +10,24 @@ type SourceOrbitUpdate struct {
// The unique name of the workspace that your API token is associated with.
Workspace string `json:"workspace"`
}
+
+func (o *SourceOrbitUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceOrbitUpdate) GetStartDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceOrbitUpdate) GetWorkspace() string {
+ if o == nil {
+ return ""
+ }
+ return o.Workspace
+}
diff --git a/internal/sdk/pkg/models/shared/sourceorbputrequest.go b/internal/sdk/pkg/models/shared/sourceorbputrequest.go
old mode 100755
new mode 100644
index 3674fb937..14446eabe
--- a/internal/sdk/pkg/models/shared/sourceorbputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceorbputrequest.go
@@ -7,3 +7,24 @@ type SourceOrbPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOrbPutRequest) GetConfiguration() SourceOrbUpdate {
+ if o == nil {
+ return SourceOrbUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOrbPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOrbPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceorbupdate.go b/internal/sdk/pkg/models/shared/sourceorbupdate.go
old mode 100755
new mode 100644
index df8f86065..c942b1391
--- a/internal/sdk/pkg/models/shared/sourceorbupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceorbupdate.go
@@ -2,11 +2,15 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceOrbUpdate struct {
// Orb API Key, issued from the Orb admin console.
APIKey string `json:"api_key"`
// When set to N, the connector will always refresh resources created within the past N days. By default, updated objects that are not newly created are not incrementally synced.
- LookbackWindowDays *int64 `json:"lookback_window_days,omitempty"`
+ LookbackWindowDays *int64 `default:"0" json:"lookback_window_days"`
// Property key names to extract from all events, in order to enrich ledger entries corresponding to an event deduction.
NumericEventPropertiesKeys []string `json:"numeric_event_properties_keys,omitempty"`
// Orb Plan ID to filter subscriptions that should have usage fetched.
@@ -18,3 +22,63 @@ type SourceOrbUpdate struct {
// Property key name to group subscription usage by.
SubscriptionUsageGroupingKey *string `json:"subscription_usage_grouping_key,omitempty"`
}
+
+func (s SourceOrbUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOrbUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOrbUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceOrbUpdate) GetLookbackWindowDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindowDays
+}
+
+func (o *SourceOrbUpdate) GetNumericEventPropertiesKeys() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NumericEventPropertiesKeys
+}
+
+func (o *SourceOrbUpdate) GetPlanID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PlanID
+}
+
+func (o *SourceOrbUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceOrbUpdate) GetStringEventPropertiesKeys() []string {
+ if o == nil {
+ return nil
+ }
+ return o.StringEventPropertiesKeys
+}
+
+func (o *SourceOrbUpdate) GetSubscriptionUsageGroupingKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SubscriptionUsageGroupingKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoutbrainamplify.go b/internal/sdk/pkg/models/shared/sourceoutbrainamplify.go
old mode 100755
new mode 100644
index eefe375c3..80a85c807
--- a/internal/sdk/pkg/models/shared/sourceoutbrainamplify.go
+++ b/internal/sdk/pkg/models/shared/sourceoutbrainamplify.go
@@ -3,126 +3,172 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest string
+type SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequest string
const (
- SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequestUsernamePassword SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest = "username_password"
+ SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequestUsernamePassword SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequest = "username_password"
)
-func (e SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest) ToPointer() *SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest {
+func (e SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequest) ToPointer() *SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequest {
return &e
}
-func (e *SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest) UnmarshalJSON(data []byte) error {
+func (e *SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequest) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "username_password":
- *e = SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest(v)
+ *e = SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequest(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest: %v", v)
+ return fmt.Errorf("invalid value for SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequest: %v", v)
}
}
-// SourceOutbrainAmplifyAuthenticationMethodUsernamePassword - Credentials for making authenticated requests requires either username/password or access_token.
-type SourceOutbrainAmplifyAuthenticationMethodUsernamePassword struct {
+// SourceOutbrainAmplifyUsernamePassword - Credentials for making authenticated requests requires either username/password or access_token.
+type SourceOutbrainAmplifyUsernamePassword struct {
// Add Password for authentication.
- Password string `json:"password"`
- Type SourceOutbrainAmplifyAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest `json:"type"`
+ Password string `json:"password"`
+ type_ SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequest `const:"username_password" json:"type"`
// Add Username for authentication.
Username string `json:"username"`
}
-type SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests string
+func (s SourceOutbrainAmplifyUsernamePassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOutbrainAmplifyUsernamePassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOutbrainAmplifyUsernamePassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceOutbrainAmplifyUsernamePassword) GetType() SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequest {
+ return SourceOutbrainAmplifyBothUsernameAndPasswordIsRequiredForAuthenticationRequestUsernamePassword
+}
+
+func (o *SourceOutbrainAmplifyUsernamePassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequests string
const (
- SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequestsAccessToken SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests = "access_token"
+ SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequestsAccessToken SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequests = "access_token"
)
-func (e SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests) ToPointer() *SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests {
+func (e SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequests) ToPointer() *SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequests {
return &e
}
-func (e *SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests) UnmarshalJSON(data []byte) error {
+func (e *SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequests) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests(v)
+ *e = SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequests(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests: %v", v)
+ return fmt.Errorf("invalid value for SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequests: %v", v)
}
}
-// SourceOutbrainAmplifyAuthenticationMethodAccessToken - Credentials for making authenticated requests requires either username/password or access_token.
-type SourceOutbrainAmplifyAuthenticationMethodAccessToken struct {
+// SourceOutbrainAmplifyAccessToken - Credentials for making authenticated requests requires either username/password or access_token.
+type SourceOutbrainAmplifyAccessToken struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- Type SourceOutbrainAmplifyAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests `json:"type"`
+ AccessToken string `json:"access_token"`
+ type_ SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequests `const:"access_token" json:"type"`
+}
+
+func (s SourceOutbrainAmplifyAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOutbrainAmplifyAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOutbrainAmplifyAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceOutbrainAmplifyAccessToken) GetType() SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequests {
+ return SourceOutbrainAmplifyAccessTokenIsRequiredForAuthenticationRequestsAccessToken
}
type SourceOutbrainAmplifyAuthenticationMethodType string
const (
- SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyAuthenticationMethodAccessToken SourceOutbrainAmplifyAuthenticationMethodType = "source-outbrain-amplify_Authentication Method_Access token"
- SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyAuthenticationMethodUsernamePassword SourceOutbrainAmplifyAuthenticationMethodType = "source-outbrain-amplify_Authentication Method_Username Password"
+ SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyAccessToken SourceOutbrainAmplifyAuthenticationMethodType = "source-outbrain-amplify_Access token"
+ SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyUsernamePassword SourceOutbrainAmplifyAuthenticationMethodType = "source-outbrain-amplify_Username Password"
)
type SourceOutbrainAmplifyAuthenticationMethod struct {
- SourceOutbrainAmplifyAuthenticationMethodAccessToken *SourceOutbrainAmplifyAuthenticationMethodAccessToken
- SourceOutbrainAmplifyAuthenticationMethodUsernamePassword *SourceOutbrainAmplifyAuthenticationMethodUsernamePassword
+ SourceOutbrainAmplifyAccessToken *SourceOutbrainAmplifyAccessToken
+ SourceOutbrainAmplifyUsernamePassword *SourceOutbrainAmplifyUsernamePassword
Type SourceOutbrainAmplifyAuthenticationMethodType
}
-func CreateSourceOutbrainAmplifyAuthenticationMethodSourceOutbrainAmplifyAuthenticationMethodAccessToken(sourceOutbrainAmplifyAuthenticationMethodAccessToken SourceOutbrainAmplifyAuthenticationMethodAccessToken) SourceOutbrainAmplifyAuthenticationMethod {
- typ := SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyAuthenticationMethodAccessToken
+func CreateSourceOutbrainAmplifyAuthenticationMethodSourceOutbrainAmplifyAccessToken(sourceOutbrainAmplifyAccessToken SourceOutbrainAmplifyAccessToken) SourceOutbrainAmplifyAuthenticationMethod {
+ typ := SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyAccessToken
return SourceOutbrainAmplifyAuthenticationMethod{
- SourceOutbrainAmplifyAuthenticationMethodAccessToken: &sourceOutbrainAmplifyAuthenticationMethodAccessToken,
- Type: typ,
+ SourceOutbrainAmplifyAccessToken: &sourceOutbrainAmplifyAccessToken,
+ Type: typ,
}
}
-func CreateSourceOutbrainAmplifyAuthenticationMethodSourceOutbrainAmplifyAuthenticationMethodUsernamePassword(sourceOutbrainAmplifyAuthenticationMethodUsernamePassword SourceOutbrainAmplifyAuthenticationMethodUsernamePassword) SourceOutbrainAmplifyAuthenticationMethod {
- typ := SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyAuthenticationMethodUsernamePassword
+func CreateSourceOutbrainAmplifyAuthenticationMethodSourceOutbrainAmplifyUsernamePassword(sourceOutbrainAmplifyUsernamePassword SourceOutbrainAmplifyUsernamePassword) SourceOutbrainAmplifyAuthenticationMethod {
+ typ := SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyUsernamePassword
return SourceOutbrainAmplifyAuthenticationMethod{
- SourceOutbrainAmplifyAuthenticationMethodUsernamePassword: &sourceOutbrainAmplifyAuthenticationMethodUsernamePassword,
- Type: typ,
+ SourceOutbrainAmplifyUsernamePassword: &sourceOutbrainAmplifyUsernamePassword,
+ Type: typ,
}
}
func (u *SourceOutbrainAmplifyAuthenticationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceOutbrainAmplifyAuthenticationMethodAccessToken := new(SourceOutbrainAmplifyAuthenticationMethodAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOutbrainAmplifyAuthenticationMethodAccessToken); err == nil {
- u.SourceOutbrainAmplifyAuthenticationMethodAccessToken = sourceOutbrainAmplifyAuthenticationMethodAccessToken
- u.Type = SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyAuthenticationMethodAccessToken
+
+ sourceOutbrainAmplifyAccessToken := new(SourceOutbrainAmplifyAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceOutbrainAmplifyAccessToken, "", true, true); err == nil {
+ u.SourceOutbrainAmplifyAccessToken = sourceOutbrainAmplifyAccessToken
+ u.Type = SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyAccessToken
return nil
}
- sourceOutbrainAmplifyAuthenticationMethodUsernamePassword := new(SourceOutbrainAmplifyAuthenticationMethodUsernamePassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOutbrainAmplifyAuthenticationMethodUsernamePassword); err == nil {
- u.SourceOutbrainAmplifyAuthenticationMethodUsernamePassword = sourceOutbrainAmplifyAuthenticationMethodUsernamePassword
- u.Type = SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyAuthenticationMethodUsernamePassword
+ sourceOutbrainAmplifyUsernamePassword := new(SourceOutbrainAmplifyUsernamePassword)
+ if err := utils.UnmarshalJSON(data, &sourceOutbrainAmplifyUsernamePassword, "", true, true); err == nil {
+ u.SourceOutbrainAmplifyUsernamePassword = sourceOutbrainAmplifyUsernamePassword
+ u.Type = SourceOutbrainAmplifyAuthenticationMethodTypeSourceOutbrainAmplifyUsernamePassword
return nil
}
@@ -130,15 +176,15 @@ func (u *SourceOutbrainAmplifyAuthenticationMethod) UnmarshalJSON(data []byte) e
}
func (u SourceOutbrainAmplifyAuthenticationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceOutbrainAmplifyAuthenticationMethodAccessToken != nil {
- return json.Marshal(u.SourceOutbrainAmplifyAuthenticationMethodAccessToken)
+ if u.SourceOutbrainAmplifyAccessToken != nil {
+ return utils.MarshalJSON(u.SourceOutbrainAmplifyAccessToken, "", true)
}
- if u.SourceOutbrainAmplifyAuthenticationMethodUsernamePassword != nil {
- return json.Marshal(u.SourceOutbrainAmplifyAuthenticationMethodUsernamePassword)
+ if u.SourceOutbrainAmplifyUsernamePassword != nil {
+ return utils.MarshalJSON(u.SourceOutbrainAmplifyUsernamePassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// SourceOutbrainAmplifyGranularityForGeoLocationRegion - The granularity used for geo location data in reports.
@@ -203,27 +249,27 @@ func (e *SourceOutbrainAmplifyGranularityForPeriodicReports) UnmarshalJSON(data
}
}
-type SourceOutbrainAmplifyOutbrainAmplify string
+type OutbrainAmplify string
const (
- SourceOutbrainAmplifyOutbrainAmplifyOutbrainAmplify SourceOutbrainAmplifyOutbrainAmplify = "outbrain-amplify"
+ OutbrainAmplifyOutbrainAmplify OutbrainAmplify = "outbrain-amplify"
)
-func (e SourceOutbrainAmplifyOutbrainAmplify) ToPointer() *SourceOutbrainAmplifyOutbrainAmplify {
+func (e OutbrainAmplify) ToPointer() *OutbrainAmplify {
return &e
}
-func (e *SourceOutbrainAmplifyOutbrainAmplify) UnmarshalJSON(data []byte) error {
+func (e *OutbrainAmplify) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "outbrain-amplify":
- *e = SourceOutbrainAmplifyOutbrainAmplify(v)
+ *e = OutbrainAmplify(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOutbrainAmplifyOutbrainAmplify: %v", v)
+ return fmt.Errorf("invalid value for OutbrainAmplify: %v", v)
}
}
@@ -236,7 +282,57 @@ type SourceOutbrainAmplify struct {
GeoLocationBreakdown *SourceOutbrainAmplifyGranularityForGeoLocationRegion `json:"geo_location_breakdown,omitempty"`
// The granularity used for periodic data in reports. See the docs.
ReportGranularity *SourceOutbrainAmplifyGranularityForPeriodicReports `json:"report_granularity,omitempty"`
- SourceType SourceOutbrainAmplifyOutbrainAmplify `json:"sourceType"`
+ sourceType OutbrainAmplify `const:"outbrain-amplify" json:"sourceType"`
// Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before this date will not be replicated.
StartDate string `json:"start_date"`
}
+
+func (s SourceOutbrainAmplify) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOutbrainAmplify) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOutbrainAmplify) GetCredentials() SourceOutbrainAmplifyAuthenticationMethod {
+ if o == nil {
+ return SourceOutbrainAmplifyAuthenticationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceOutbrainAmplify) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceOutbrainAmplify) GetGeoLocationBreakdown() *SourceOutbrainAmplifyGranularityForGeoLocationRegion {
+ if o == nil {
+ return nil
+ }
+ return o.GeoLocationBreakdown
+}
+
+func (o *SourceOutbrainAmplify) GetReportGranularity() *SourceOutbrainAmplifyGranularityForPeriodicReports {
+ if o == nil {
+ return nil
+ }
+ return o.ReportGranularity
+}
+
+func (o *SourceOutbrainAmplify) GetSourceType() OutbrainAmplify {
+ return OutbrainAmplifyOutbrainAmplify
+}
+
+func (o *SourceOutbrainAmplify) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoutbrainamplifycreaterequest.go b/internal/sdk/pkg/models/shared/sourceoutbrainamplifycreaterequest.go
old mode 100755
new mode 100644
index c09b391fe..bf22f9846
--- a/internal/sdk/pkg/models/shared/sourceoutbrainamplifycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceoutbrainamplifycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceOutbrainAmplifyCreateRequest struct {
Configuration SourceOutbrainAmplify `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOutbrainAmplifyCreateRequest) GetConfiguration() SourceOutbrainAmplify {
+ if o == nil {
+ return SourceOutbrainAmplify{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOutbrainAmplifyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceOutbrainAmplifyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOutbrainAmplifyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceOutbrainAmplifyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoutbrainamplifyputrequest.go b/internal/sdk/pkg/models/shared/sourceoutbrainamplifyputrequest.go
old mode 100755
new mode 100644
index 9089d8dfe..07e38be55
--- a/internal/sdk/pkg/models/shared/sourceoutbrainamplifyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceoutbrainamplifyputrequest.go
@@ -7,3 +7,24 @@ type SourceOutbrainAmplifyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOutbrainAmplifyPutRequest) GetConfiguration() SourceOutbrainAmplifyUpdate {
+ if o == nil {
+ return SourceOutbrainAmplifyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOutbrainAmplifyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOutbrainAmplifyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoutbrainamplifyupdate.go b/internal/sdk/pkg/models/shared/sourceoutbrainamplifyupdate.go
old mode 100755
new mode 100644
index a0e8c8441..07ef97bd5
--- a/internal/sdk/pkg/models/shared/sourceoutbrainamplifyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceoutbrainamplifyupdate.go
@@ -3,126 +3,172 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest string
+type BothUsernameAndPasswordIsRequiredForAuthenticationRequest string
const (
- SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequestUsernamePassword SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest = "username_password"
+ BothUsernameAndPasswordIsRequiredForAuthenticationRequestUsernamePassword BothUsernameAndPasswordIsRequiredForAuthenticationRequest = "username_password"
)
-func (e SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest) ToPointer() *SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest {
+func (e BothUsernameAndPasswordIsRequiredForAuthenticationRequest) ToPointer() *BothUsernameAndPasswordIsRequiredForAuthenticationRequest {
return &e
}
-func (e *SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest) UnmarshalJSON(data []byte) error {
+func (e *BothUsernameAndPasswordIsRequiredForAuthenticationRequest) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "username_password":
- *e = SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest(v)
+ *e = BothUsernameAndPasswordIsRequiredForAuthenticationRequest(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest: %v", v)
+ return fmt.Errorf("invalid value for BothUsernameAndPasswordIsRequiredForAuthenticationRequest: %v", v)
}
}
-// SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword - Credentials for making authenticated requests requires either username/password or access_token.
-type SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword struct {
+// SourceOutbrainAmplifyUpdateUsernamePassword - Credentials for making authenticated requests requires either username/password or access_token.
+type SourceOutbrainAmplifyUpdateUsernamePassword struct {
// Add Password for authentication.
- Password string `json:"password"`
- Type SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePasswordBothUsernameAndPasswordIsRequiredForAuthenticationRequest `json:"type"`
+ Password string `json:"password"`
+ type_ BothUsernameAndPasswordIsRequiredForAuthenticationRequest `const:"username_password" json:"type"`
// Add Username for authentication.
Username string `json:"username"`
}
-type SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests string
+func (s SourceOutbrainAmplifyUpdateUsernamePassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOutbrainAmplifyUpdateUsernamePassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOutbrainAmplifyUpdateUsernamePassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceOutbrainAmplifyUpdateUsernamePassword) GetType() BothUsernameAndPasswordIsRequiredForAuthenticationRequest {
+ return BothUsernameAndPasswordIsRequiredForAuthenticationRequestUsernamePassword
+}
+
+func (o *SourceOutbrainAmplifyUpdateUsernamePassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type AccessTokenIsRequiredForAuthenticationRequests string
const (
- SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequestsAccessToken SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests = "access_token"
+ AccessTokenIsRequiredForAuthenticationRequestsAccessToken AccessTokenIsRequiredForAuthenticationRequests = "access_token"
)
-func (e SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests) ToPointer() *SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests {
+func (e AccessTokenIsRequiredForAuthenticationRequests) ToPointer() *AccessTokenIsRequiredForAuthenticationRequests {
return &e
}
-func (e *SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests) UnmarshalJSON(data []byte) error {
+func (e *AccessTokenIsRequiredForAuthenticationRequests) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests(v)
+ *e = AccessTokenIsRequiredForAuthenticationRequests(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests: %v", v)
+ return fmt.Errorf("invalid value for AccessTokenIsRequiredForAuthenticationRequests: %v", v)
}
}
-// SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken - Credentials for making authenticated requests requires either username/password or access_token.
-type SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken struct {
+// SourceOutbrainAmplifyUpdateAccessToken - Credentials for making authenticated requests requires either username/password or access_token.
+type SourceOutbrainAmplifyUpdateAccessToken struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- Type SourceOutbrainAmplifyUpdateAuthenticationMethodAccessTokenAccessTokenIsRequiredForAuthenticationRequests `json:"type"`
+ AccessToken string `json:"access_token"`
+ type_ AccessTokenIsRequiredForAuthenticationRequests `const:"access_token" json:"type"`
+}
+
+func (s SourceOutbrainAmplifyUpdateAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOutbrainAmplifyUpdateAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOutbrainAmplifyUpdateAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceOutbrainAmplifyUpdateAccessToken) GetType() AccessTokenIsRequiredForAuthenticationRequests {
+ return AccessTokenIsRequiredForAuthenticationRequestsAccessToken
}
type SourceOutbrainAmplifyUpdateAuthenticationMethodType string
const (
- SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken SourceOutbrainAmplifyUpdateAuthenticationMethodType = "source-outbrain-amplify-update_Authentication Method_Access token"
- SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword SourceOutbrainAmplifyUpdateAuthenticationMethodType = "source-outbrain-amplify-update_Authentication Method_Username Password"
+ SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateAccessToken SourceOutbrainAmplifyUpdateAuthenticationMethodType = "source-outbrain-amplify-update_Access token"
+ SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateUsernamePassword SourceOutbrainAmplifyUpdateAuthenticationMethodType = "source-outbrain-amplify-update_Username Password"
)
type SourceOutbrainAmplifyUpdateAuthenticationMethod struct {
- SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken *SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken
- SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword *SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword
+ SourceOutbrainAmplifyUpdateAccessToken *SourceOutbrainAmplifyUpdateAccessToken
+ SourceOutbrainAmplifyUpdateUsernamePassword *SourceOutbrainAmplifyUpdateUsernamePassword
Type SourceOutbrainAmplifyUpdateAuthenticationMethodType
}
-func CreateSourceOutbrainAmplifyUpdateAuthenticationMethodSourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken(sourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken) SourceOutbrainAmplifyUpdateAuthenticationMethod {
- typ := SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken
+func CreateSourceOutbrainAmplifyUpdateAuthenticationMethodSourceOutbrainAmplifyUpdateAccessToken(sourceOutbrainAmplifyUpdateAccessToken SourceOutbrainAmplifyUpdateAccessToken) SourceOutbrainAmplifyUpdateAuthenticationMethod {
+ typ := SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateAccessToken
return SourceOutbrainAmplifyUpdateAuthenticationMethod{
- SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken: &sourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken,
- Type: typ,
+ SourceOutbrainAmplifyUpdateAccessToken: &sourceOutbrainAmplifyUpdateAccessToken,
+ Type: typ,
}
}
-func CreateSourceOutbrainAmplifyUpdateAuthenticationMethodSourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword(sourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword) SourceOutbrainAmplifyUpdateAuthenticationMethod {
- typ := SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword
+func CreateSourceOutbrainAmplifyUpdateAuthenticationMethodSourceOutbrainAmplifyUpdateUsernamePassword(sourceOutbrainAmplifyUpdateUsernamePassword SourceOutbrainAmplifyUpdateUsernamePassword) SourceOutbrainAmplifyUpdateAuthenticationMethod {
+ typ := SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateUsernamePassword
return SourceOutbrainAmplifyUpdateAuthenticationMethod{
- SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword: &sourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword,
+ SourceOutbrainAmplifyUpdateUsernamePassword: &sourceOutbrainAmplifyUpdateUsernamePassword,
Type: typ,
}
}
func (u *SourceOutbrainAmplifyUpdateAuthenticationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken := new(SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken); err == nil {
- u.SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken = sourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken
- u.Type = SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken
+
+ sourceOutbrainAmplifyUpdateAccessToken := new(SourceOutbrainAmplifyUpdateAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceOutbrainAmplifyUpdateAccessToken, "", true, true); err == nil {
+ u.SourceOutbrainAmplifyUpdateAccessToken = sourceOutbrainAmplifyUpdateAccessToken
+ u.Type = SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateAccessToken
return nil
}
- sourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword := new(SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword); err == nil {
- u.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword = sourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword
- u.Type = SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword
+ sourceOutbrainAmplifyUpdateUsernamePassword := new(SourceOutbrainAmplifyUpdateUsernamePassword)
+ if err := utils.UnmarshalJSON(data, &sourceOutbrainAmplifyUpdateUsernamePassword, "", true, true); err == nil {
+ u.SourceOutbrainAmplifyUpdateUsernamePassword = sourceOutbrainAmplifyUpdateUsernamePassword
+ u.Type = SourceOutbrainAmplifyUpdateAuthenticationMethodTypeSourceOutbrainAmplifyUpdateUsernamePassword
return nil
}
@@ -130,31 +176,31 @@ func (u *SourceOutbrainAmplifyUpdateAuthenticationMethod) UnmarshalJSON(data []b
}
func (u SourceOutbrainAmplifyUpdateAuthenticationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken != nil {
- return json.Marshal(u.SourceOutbrainAmplifyUpdateAuthenticationMethodAccessToken)
+ if u.SourceOutbrainAmplifyUpdateAccessToken != nil {
+ return utils.MarshalJSON(u.SourceOutbrainAmplifyUpdateAccessToken, "", true)
}
- if u.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword != nil {
- return json.Marshal(u.SourceOutbrainAmplifyUpdateAuthenticationMethodUsernamePassword)
+ if u.SourceOutbrainAmplifyUpdateUsernamePassword != nil {
+ return utils.MarshalJSON(u.SourceOutbrainAmplifyUpdateUsernamePassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion - The granularity used for geo location data in reports.
-type SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion string
+// GranularityForGeoLocationRegion - The granularity used for geo location data in reports.
+type GranularityForGeoLocationRegion string
const (
- SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegionCountry SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion = "country"
- SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegionRegion SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion = "region"
- SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegionSubregion SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion = "subregion"
+ GranularityForGeoLocationRegionCountry GranularityForGeoLocationRegion = "country"
+ GranularityForGeoLocationRegionRegion GranularityForGeoLocationRegion = "region"
+ GranularityForGeoLocationRegionSubregion GranularityForGeoLocationRegion = "subregion"
)
-func (e SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion) ToPointer() *SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion {
+func (e GranularityForGeoLocationRegion) ToPointer() *GranularityForGeoLocationRegion {
return &e
}
-func (e *SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion) UnmarshalJSON(data []byte) error {
+func (e *GranularityForGeoLocationRegion) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -165,27 +211,27 @@ func (e *SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion) UnmarshalJS
case "region":
fallthrough
case "subregion":
- *e = SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion(v)
+ *e = GranularityForGeoLocationRegion(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion: %v", v)
+ return fmt.Errorf("invalid value for GranularityForGeoLocationRegion: %v", v)
}
}
-// SourceOutbrainAmplifyUpdateGranularityForPeriodicReports - The granularity used for periodic data in reports. See the docs.
-type SourceOutbrainAmplifyUpdateGranularityForPeriodicReports string
+// GranularityForPeriodicReports - The granularity used for periodic data in reports. See the docs.
+type GranularityForPeriodicReports string
const (
- SourceOutbrainAmplifyUpdateGranularityForPeriodicReportsDaily SourceOutbrainAmplifyUpdateGranularityForPeriodicReports = "daily"
- SourceOutbrainAmplifyUpdateGranularityForPeriodicReportsWeekly SourceOutbrainAmplifyUpdateGranularityForPeriodicReports = "weekly"
- SourceOutbrainAmplifyUpdateGranularityForPeriodicReportsMonthly SourceOutbrainAmplifyUpdateGranularityForPeriodicReports = "monthly"
+ GranularityForPeriodicReportsDaily GranularityForPeriodicReports = "daily"
+ GranularityForPeriodicReportsWeekly GranularityForPeriodicReports = "weekly"
+ GranularityForPeriodicReportsMonthly GranularityForPeriodicReports = "monthly"
)
-func (e SourceOutbrainAmplifyUpdateGranularityForPeriodicReports) ToPointer() *SourceOutbrainAmplifyUpdateGranularityForPeriodicReports {
+func (e GranularityForPeriodicReports) ToPointer() *GranularityForPeriodicReports {
return &e
}
-func (e *SourceOutbrainAmplifyUpdateGranularityForPeriodicReports) UnmarshalJSON(data []byte) error {
+func (e *GranularityForPeriodicReports) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -196,10 +242,10 @@ func (e *SourceOutbrainAmplifyUpdateGranularityForPeriodicReports) UnmarshalJSON
case "weekly":
fallthrough
case "monthly":
- *e = SourceOutbrainAmplifyUpdateGranularityForPeriodicReports(v)
+ *e = GranularityForPeriodicReports(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOutbrainAmplifyUpdateGranularityForPeriodicReports: %v", v)
+ return fmt.Errorf("invalid value for GranularityForPeriodicReports: %v", v)
}
}
@@ -209,9 +255,44 @@ type SourceOutbrainAmplifyUpdate struct {
// Date in the format YYYY-MM-DD.
EndDate *string `json:"end_date,omitempty"`
// The granularity used for geo location data in reports.
- GeoLocationBreakdown *SourceOutbrainAmplifyUpdateGranularityForGeoLocationRegion `json:"geo_location_breakdown,omitempty"`
+ GeoLocationBreakdown *GranularityForGeoLocationRegion `json:"geo_location_breakdown,omitempty"`
// The granularity used for periodic data in reports. See the docs.
- ReportGranularity *SourceOutbrainAmplifyUpdateGranularityForPeriodicReports `json:"report_granularity,omitempty"`
+ ReportGranularity *GranularityForPeriodicReports `json:"report_granularity,omitempty"`
// Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before this date will not be replicated.
StartDate string `json:"start_date"`
}
+
+func (o *SourceOutbrainAmplifyUpdate) GetCredentials() SourceOutbrainAmplifyUpdateAuthenticationMethod {
+ if o == nil {
+ return SourceOutbrainAmplifyUpdateAuthenticationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceOutbrainAmplifyUpdate) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceOutbrainAmplifyUpdate) GetGeoLocationBreakdown() *GranularityForGeoLocationRegion {
+ if o == nil {
+ return nil
+ }
+ return o.GeoLocationBreakdown
+}
+
+func (o *SourceOutbrainAmplifyUpdate) GetReportGranularity() *GranularityForPeriodicReports {
+ if o == nil {
+ return nil
+ }
+ return o.ReportGranularity
+}
+
+func (o *SourceOutbrainAmplifyUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoutreach.go b/internal/sdk/pkg/models/shared/sourceoutreach.go
old mode 100755
new mode 100644
index ca8dfd1e2..6a38db510
--- a/internal/sdk/pkg/models/shared/sourceoutreach.go
+++ b/internal/sdk/pkg/models/shared/sourceoutreach.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceOutreachOutreach string
+type Outreach string
const (
- SourceOutreachOutreachOutreach SourceOutreachOutreach = "outreach"
+ OutreachOutreach Outreach = "outreach"
)
-func (e SourceOutreachOutreach) ToPointer() *SourceOutreachOutreach {
+func (e Outreach) ToPointer() *Outreach {
return &e
}
-func (e *SourceOutreachOutreach) UnmarshalJSON(data []byte) error {
+func (e *Outreach) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "outreach":
- *e = SourceOutreachOutreach(v)
+ *e = Outreach(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceOutreachOutreach: %v", v)
+ return fmt.Errorf("invalid value for Outreach: %v", v)
}
}
@@ -39,8 +40,58 @@ type SourceOutreach struct {
// A Redirect URI is the location where the authorization server sends the user once the app has been successfully authorized and granted an authorization code or access token.
RedirectURI string `json:"redirect_uri"`
// The token for obtaining the new access token.
- RefreshToken string `json:"refresh_token"`
- SourceType SourceOutreachOutreach `json:"sourceType"`
+ RefreshToken string `json:"refresh_token"`
+ sourceType Outreach `const:"outreach" json:"sourceType"`
// The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate string `json:"start_date"`
}
+
+func (s SourceOutreach) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceOutreach) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceOutreach) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceOutreach) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceOutreach) GetRedirectURI() string {
+ if o == nil {
+ return ""
+ }
+ return o.RedirectURI
+}
+
+func (o *SourceOutreach) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceOutreach) GetSourceType() Outreach {
+ return OutreachOutreach
+}
+
+func (o *SourceOutreach) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoutreachcreaterequest.go b/internal/sdk/pkg/models/shared/sourceoutreachcreaterequest.go
old mode 100755
new mode 100644
index 87c2dfea2..b314112aa
--- a/internal/sdk/pkg/models/shared/sourceoutreachcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceoutreachcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceOutreachCreateRequest struct {
Configuration SourceOutreach `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOutreachCreateRequest) GetConfiguration() SourceOutreach {
+ if o == nil {
+ return SourceOutreach{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOutreachCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceOutreachCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOutreachCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceOutreachCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoutreachputrequest.go b/internal/sdk/pkg/models/shared/sourceoutreachputrequest.go
old mode 100755
new mode 100644
index b7ef5cb37..faac76a6e
--- a/internal/sdk/pkg/models/shared/sourceoutreachputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceoutreachputrequest.go
@@ -7,3 +7,24 @@ type SourceOutreachPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceOutreachPutRequest) GetConfiguration() SourceOutreachUpdate {
+ if o == nil {
+ return SourceOutreachUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceOutreachPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceOutreachPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceoutreachupdate.go b/internal/sdk/pkg/models/shared/sourceoutreachupdate.go
old mode 100755
new mode 100644
index 68cfdf643..54fa5a9d9
--- a/internal/sdk/pkg/models/shared/sourceoutreachupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceoutreachupdate.go
@@ -14,3 +14,38 @@ type SourceOutreachUpdate struct {
// The date from which you'd like to replicate data for Outreach API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate string `json:"start_date"`
}
+
+func (o *SourceOutreachUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceOutreachUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceOutreachUpdate) GetRedirectURI() string {
+ if o == nil {
+ return ""
+ }
+ return o.RedirectURI
+}
+
+func (o *SourceOutreachUpdate) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceOutreachUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepatchrequest.go b/internal/sdk/pkg/models/shared/sourcepatchrequest.go
old mode 100755
new mode 100644
index ebad0f887..91632db61
--- a/internal/sdk/pkg/models/shared/sourcepatchrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepatchrequest.go
@@ -10,3 +10,31 @@ type SourcePatchRequest struct {
SecretID *string `json:"secretId,omitempty"`
WorkspaceID *string `json:"workspaceId,omitempty"`
}
+
+func (o *SourcePatchRequest) GetConfiguration() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.Configuration
+}
+
+func (o *SourcePatchRequest) GetName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Name
+}
+
+func (o *SourcePatchRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePatchRequest) GetWorkspaceID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepaypaltransaction.go b/internal/sdk/pkg/models/shared/sourcepaypaltransaction.go
old mode 100755
new mode 100644
index cffabd1e3..8b43b54cb
--- a/internal/sdk/pkg/models/shared/sourcepaypaltransaction.go
+++ b/internal/sdk/pkg/models/shared/sourcepaypaltransaction.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourcePaypalTransactionPaypalTransaction string
+type PaypalTransaction string
const (
- SourcePaypalTransactionPaypalTransactionPaypalTransaction SourcePaypalTransactionPaypalTransaction = "paypal-transaction"
+ PaypalTransactionPaypalTransaction PaypalTransaction = "paypal-transaction"
)
-func (e SourcePaypalTransactionPaypalTransaction) ToPointer() *SourcePaypalTransactionPaypalTransaction {
+func (e PaypalTransaction) ToPointer() *PaypalTransaction {
return &e
}
-func (e *SourcePaypalTransactionPaypalTransaction) UnmarshalJSON(data []byte) error {
+func (e *PaypalTransaction) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "paypal-transaction":
- *e = SourcePaypalTransactionPaypalTransaction(v)
+ *e = PaypalTransaction(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePaypalTransactionPaypalTransaction: %v", v)
+ return fmt.Errorf("invalid value for PaypalTransaction: %v", v)
}
}
@@ -38,10 +39,69 @@ type SourcePaypalTransaction struct {
// The Client Secret of your Paypal developer application.
ClientSecret string `json:"client_secret"`
// Determines whether to use the sandbox or production environment.
- IsSandbox bool `json:"is_sandbox"`
+ IsSandbox *bool `default:"false" json:"is_sandbox"`
// The key to refresh the expired access token.
- RefreshToken *string `json:"refresh_token,omitempty"`
- SourceType SourcePaypalTransactionPaypalTransaction `json:"sourceType"`
- // Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.
+ RefreshToken *string `json:"refresh_token,omitempty"`
+ sourceType PaypalTransaction `const:"paypal-transaction" json:"sourceType"`
+ // Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.
StartDate time.Time `json:"start_date"`
+ // The number of days per request. Must be a number between 1 and 31.
+ TimeWindow *int64 `default:"7" json:"time_window"`
+}
+
+func (s SourcePaypalTransaction) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePaypalTransaction) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePaypalTransaction) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourcePaypalTransaction) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourcePaypalTransaction) GetIsSandbox() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IsSandbox
+}
+
+func (o *SourcePaypalTransaction) GetRefreshToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RefreshToken
+}
+
+func (o *SourcePaypalTransaction) GetSourceType() PaypalTransaction {
+ return PaypalTransactionPaypalTransaction
+}
+
+func (o *SourcePaypalTransaction) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourcePaypalTransaction) GetTimeWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TimeWindow
}
diff --git a/internal/sdk/pkg/models/shared/sourcepaypaltransactioncreaterequest.go b/internal/sdk/pkg/models/shared/sourcepaypaltransactioncreaterequest.go
old mode 100755
new mode 100644
index d6bb310b0..b9bb6385a
--- a/internal/sdk/pkg/models/shared/sourcepaypaltransactioncreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepaypaltransactioncreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePaypalTransactionCreateRequest struct {
Configuration SourcePaypalTransaction `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePaypalTransactionCreateRequest) GetConfiguration() SourcePaypalTransaction {
+ if o == nil {
+ return SourcePaypalTransaction{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePaypalTransactionCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePaypalTransactionCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePaypalTransactionCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePaypalTransactionCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepaypaltransactionputrequest.go b/internal/sdk/pkg/models/shared/sourcepaypaltransactionputrequest.go
old mode 100755
new mode 100644
index 48ea11899..d5f5d4fcb
--- a/internal/sdk/pkg/models/shared/sourcepaypaltransactionputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepaypaltransactionputrequest.go
@@ -7,3 +7,24 @@ type SourcePaypalTransactionPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePaypalTransactionPutRequest) GetConfiguration() SourcePaypalTransactionUpdate {
+ if o == nil {
+ return SourcePaypalTransactionUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePaypalTransactionPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePaypalTransactionPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepaypaltransactionupdate.go b/internal/sdk/pkg/models/shared/sourcepaypaltransactionupdate.go
old mode 100755
new mode 100644
index c81d91a68..f5599979c
--- a/internal/sdk/pkg/models/shared/sourcepaypaltransactionupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepaypaltransactionupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -12,9 +13,64 @@ type SourcePaypalTransactionUpdate struct {
// The Client Secret of your Paypal developer application.
ClientSecret string `json:"client_secret"`
// Determines whether to use the sandbox or production environment.
- IsSandbox bool `json:"is_sandbox"`
+ IsSandbox *bool `default:"false" json:"is_sandbox"`
// The key to refresh the expired access token.
RefreshToken *string `json:"refresh_token,omitempty"`
- // Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.
+ // Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before present time.
StartDate time.Time `json:"start_date"`
+ // The number of days per request. Must be a number between 1 and 31.
+ TimeWindow *int64 `default:"7" json:"time_window"`
+}
+
+func (s SourcePaypalTransactionUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePaypalTransactionUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePaypalTransactionUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourcePaypalTransactionUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourcePaypalTransactionUpdate) GetIsSandbox() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IsSandbox
+}
+
+func (o *SourcePaypalTransactionUpdate) GetRefreshToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RefreshToken
+}
+
+func (o *SourcePaypalTransactionUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourcePaypalTransactionUpdate) GetTimeWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TimeWindow
}
diff --git a/internal/sdk/pkg/models/shared/sourcepaystack.go b/internal/sdk/pkg/models/shared/sourcepaystack.go
old mode 100755
new mode 100644
index 43eefe9f7..7eb64098b
--- a/internal/sdk/pkg/models/shared/sourcepaystack.go
+++ b/internal/sdk/pkg/models/shared/sourcepaystack.go
@@ -5,39 +5,76 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourcePaystackPaystack string
+type Paystack string
const (
- SourcePaystackPaystackPaystack SourcePaystackPaystack = "paystack"
+ PaystackPaystack Paystack = "paystack"
)
-func (e SourcePaystackPaystack) ToPointer() *SourcePaystackPaystack {
+func (e Paystack) ToPointer() *Paystack {
return &e
}
-func (e *SourcePaystackPaystack) UnmarshalJSON(data []byte) error {
+func (e *Paystack) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "paystack":
- *e = SourcePaystackPaystack(v)
+ *e = Paystack(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePaystackPaystack: %v", v)
+ return fmt.Errorf("invalid value for Paystack: %v", v)
}
}
type SourcePaystack struct {
// When set, the connector will always reload data from the past N days, where N is the value set here. This is useful if your data is updated after creation.
- LookbackWindowDays *int64 `json:"lookback_window_days,omitempty"`
+ LookbackWindowDays *int64 `default:"0" json:"lookback_window_days"`
// The Paystack API key (usually starts with 'sk_live_'; find yours here).
- SecretKey string `json:"secret_key"`
- SourceType SourcePaystackPaystack `json:"sourceType"`
+ SecretKey string `json:"secret_key"`
+ sourceType Paystack `const:"paystack" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourcePaystack) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePaystack) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePaystack) GetLookbackWindowDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindowDays
+}
+
+func (o *SourcePaystack) GetSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretKey
+}
+
+func (o *SourcePaystack) GetSourceType() Paystack {
+ return PaystackPaystack
+}
+
+func (o *SourcePaystack) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepaystackcreaterequest.go b/internal/sdk/pkg/models/shared/sourcepaystackcreaterequest.go
old mode 100755
new mode 100644
index ed876fe50..05a2fcfb5
--- a/internal/sdk/pkg/models/shared/sourcepaystackcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepaystackcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePaystackCreateRequest struct {
Configuration SourcePaystack `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePaystackCreateRequest) GetConfiguration() SourcePaystack {
+ if o == nil {
+ return SourcePaystack{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePaystackCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePaystackCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePaystackCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePaystackCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepaystackputrequest.go b/internal/sdk/pkg/models/shared/sourcepaystackputrequest.go
old mode 100755
new mode 100644
index dfec568cc..3a87fb689
--- a/internal/sdk/pkg/models/shared/sourcepaystackputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepaystackputrequest.go
@@ -7,3 +7,24 @@ type SourcePaystackPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePaystackPutRequest) GetConfiguration() SourcePaystackUpdate {
+ if o == nil {
+ return SourcePaystackUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePaystackPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePaystackPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepaystackupdate.go b/internal/sdk/pkg/models/shared/sourcepaystackupdate.go
old mode 100755
new mode 100644
index 67cf8c4dd..67560ac5b
--- a/internal/sdk/pkg/models/shared/sourcepaystackupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepaystackupdate.go
@@ -3,14 +3,47 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
type SourcePaystackUpdate struct {
// When set, the connector will always reload data from the past N days, where N is the value set here. This is useful if your data is updated after creation.
- LookbackWindowDays *int64 `json:"lookback_window_days,omitempty"`
+ LookbackWindowDays *int64 `default:"0" json:"lookback_window_days"`
// The Paystack API key (usually starts with 'sk_live_'; find yours here).
SecretKey string `json:"secret_key"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourcePaystackUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePaystackUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePaystackUpdate) GetLookbackWindowDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindowDays
+}
+
+func (o *SourcePaystackUpdate) GetSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretKey
+}
+
+func (o *SourcePaystackUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcependo.go b/internal/sdk/pkg/models/shared/sourcependo.go
old mode 100755
new mode 100644
index c375df671..ec377ad51
--- a/internal/sdk/pkg/models/shared/sourcependo.go
+++ b/internal/sdk/pkg/models/shared/sourcependo.go
@@ -5,33 +5,56 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePendoPendo string
+type Pendo string
const (
- SourcePendoPendoPendo SourcePendoPendo = "pendo"
+ PendoPendo Pendo = "pendo"
)
-func (e SourcePendoPendo) ToPointer() *SourcePendoPendo {
+func (e Pendo) ToPointer() *Pendo {
return &e
}
-func (e *SourcePendoPendo) UnmarshalJSON(data []byte) error {
+func (e *Pendo) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pendo":
- *e = SourcePendoPendo(v)
+ *e = Pendo(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePendoPendo: %v", v)
+ return fmt.Errorf("invalid value for Pendo: %v", v)
}
}
type SourcePendo struct {
- APIKey string `json:"api_key"`
- SourceType SourcePendoPendo `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Pendo `const:"pendo" json:"sourceType"`
+}
+
+func (s SourcePendo) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePendo) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePendo) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourcePendo) GetSourceType() Pendo {
+ return PendoPendo
}
diff --git a/internal/sdk/pkg/models/shared/sourcependocreaterequest.go b/internal/sdk/pkg/models/shared/sourcependocreaterequest.go
old mode 100755
new mode 100644
index 004705f01..4339c50b3
--- a/internal/sdk/pkg/models/shared/sourcependocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcependocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePendoCreateRequest struct {
Configuration SourcePendo `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePendoCreateRequest) GetConfiguration() SourcePendo {
+ if o == nil {
+ return SourcePendo{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePendoCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePendoCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePendoCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePendoCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcependoputrequest.go b/internal/sdk/pkg/models/shared/sourcependoputrequest.go
old mode 100755
new mode 100644
index 175264dff..62b651f4c
--- a/internal/sdk/pkg/models/shared/sourcependoputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcependoputrequest.go
@@ -7,3 +7,24 @@ type SourcePendoPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePendoPutRequest) GetConfiguration() SourcePendoUpdate {
+ if o == nil {
+ return SourcePendoUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePendoPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePendoPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcependoupdate.go b/internal/sdk/pkg/models/shared/sourcependoupdate.go
old mode 100755
new mode 100644
index ba5b99d3f..5df25c698
--- a/internal/sdk/pkg/models/shared/sourcependoupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcependoupdate.go
@@ -5,3 +5,10 @@ package shared
type SourcePendoUpdate struct {
APIKey string `json:"api_key"`
}
+
+func (o *SourcePendoUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepersistiq.go b/internal/sdk/pkg/models/shared/sourcepersistiq.go
old mode 100755
new mode 100644
index 1032e62db..6f7059b77
--- a/internal/sdk/pkg/models/shared/sourcepersistiq.go
+++ b/internal/sdk/pkg/models/shared/sourcepersistiq.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePersistiqPersistiq string
+type Persistiq string
const (
- SourcePersistiqPersistiqPersistiq SourcePersistiqPersistiq = "persistiq"
+ PersistiqPersistiq Persistiq = "persistiq"
)
-func (e SourcePersistiqPersistiq) ToPointer() *SourcePersistiqPersistiq {
+func (e Persistiq) ToPointer() *Persistiq {
return &e
}
-func (e *SourcePersistiqPersistiq) UnmarshalJSON(data []byte) error {
+func (e *Persistiq) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "persistiq":
- *e = SourcePersistiqPersistiq(v)
+ *e = Persistiq(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePersistiqPersistiq: %v", v)
+ return fmt.Errorf("invalid value for Persistiq: %v", v)
}
}
type SourcePersistiq struct {
// PersistIq API Key. See the docs for more information on where to find that key.
- APIKey string `json:"api_key"`
- SourceType SourcePersistiqPersistiq `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Persistiq `const:"persistiq" json:"sourceType"`
+}
+
+func (s SourcePersistiq) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePersistiq) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePersistiq) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourcePersistiq) GetSourceType() Persistiq {
+ return PersistiqPersistiq
}
diff --git a/internal/sdk/pkg/models/shared/sourcepersistiqcreaterequest.go b/internal/sdk/pkg/models/shared/sourcepersistiqcreaterequest.go
old mode 100755
new mode 100644
index ff1000598..abda624cf
--- a/internal/sdk/pkg/models/shared/sourcepersistiqcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepersistiqcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePersistiqCreateRequest struct {
Configuration SourcePersistiq `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePersistiqCreateRequest) GetConfiguration() SourcePersistiq {
+ if o == nil {
+ return SourcePersistiq{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePersistiqCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePersistiqCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePersistiqCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePersistiqCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepersistiqputrequest.go b/internal/sdk/pkg/models/shared/sourcepersistiqputrequest.go
old mode 100755
new mode 100644
index 4ac71da92..a3b6b86dc
--- a/internal/sdk/pkg/models/shared/sourcepersistiqputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepersistiqputrequest.go
@@ -7,3 +7,24 @@ type SourcePersistiqPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePersistiqPutRequest) GetConfiguration() SourcePersistiqUpdate {
+ if o == nil {
+ return SourcePersistiqUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePersistiqPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePersistiqPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepersistiqupdate.go b/internal/sdk/pkg/models/shared/sourcepersistiqupdate.go
old mode 100755
new mode 100644
index c604b4d51..03c383b8c
--- a/internal/sdk/pkg/models/shared/sourcepersistiqupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepersistiqupdate.go
@@ -6,3 +6,10 @@ type SourcePersistiqUpdate struct {
// PersistIq API Key. See the docs for more information on where to find that key.
APIKey string `json:"api_key"`
}
+
+func (o *SourcePersistiqUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepexelsapi.go b/internal/sdk/pkg/models/shared/sourcepexelsapi.go
old mode 100755
new mode 100644
index 4ab5a1064..424ffa37a
--- a/internal/sdk/pkg/models/shared/sourcepexelsapi.go
+++ b/internal/sdk/pkg/models/shared/sourcepexelsapi.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePexelsAPIPexelsAPI string
+type PexelsAPI string
const (
- SourcePexelsAPIPexelsAPIPexelsAPI SourcePexelsAPIPexelsAPI = "pexels-api"
+ PexelsAPIPexelsAPI PexelsAPI = "pexels-api"
)
-func (e SourcePexelsAPIPexelsAPI) ToPointer() *SourcePexelsAPIPexelsAPI {
+func (e PexelsAPI) ToPointer() *PexelsAPI {
return &e
}
-func (e *SourcePexelsAPIPexelsAPI) UnmarshalJSON(data []byte) error {
+func (e *PexelsAPI) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pexels-api":
- *e = SourcePexelsAPIPexelsAPI(v)
+ *e = PexelsAPI(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePexelsAPIPexelsAPI: %v", v)
+ return fmt.Errorf("invalid value for PexelsAPI: %v", v)
}
}
@@ -43,6 +44,63 @@ type SourcePexelsAPI struct {
// Optional, the search query, Example Ocean, Tigers, Pears, etc.
Query string `json:"query"`
// Optional, Minimum photo size. The current supported sizes are large(24MP), medium(12MP) or small(4MP).
- Size *string `json:"size,omitempty"`
- SourceType SourcePexelsAPIPexelsAPI `json:"sourceType"`
+ Size *string `json:"size,omitempty"`
+ sourceType PexelsAPI `const:"pexels-api" json:"sourceType"`
+}
+
+func (s SourcePexelsAPI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePexelsAPI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePexelsAPI) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourcePexelsAPI) GetColor() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Color
+}
+
+func (o *SourcePexelsAPI) GetLocale() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Locale
+}
+
+func (o *SourcePexelsAPI) GetOrientation() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Orientation
+}
+
+func (o *SourcePexelsAPI) GetQuery() string {
+ if o == nil {
+ return ""
+ }
+ return o.Query
+}
+
+func (o *SourcePexelsAPI) GetSize() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Size
+}
+
+func (o *SourcePexelsAPI) GetSourceType() PexelsAPI {
+ return PexelsAPIPexelsAPI
}
diff --git a/internal/sdk/pkg/models/shared/sourcepexelsapicreaterequest.go b/internal/sdk/pkg/models/shared/sourcepexelsapicreaterequest.go
old mode 100755
new mode 100644
index 67c927df3..42233234f
--- a/internal/sdk/pkg/models/shared/sourcepexelsapicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepexelsapicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePexelsAPICreateRequest struct {
Configuration SourcePexelsAPI `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePexelsAPICreateRequest) GetConfiguration() SourcePexelsAPI {
+ if o == nil {
+ return SourcePexelsAPI{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePexelsAPICreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePexelsAPICreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePexelsAPICreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePexelsAPICreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepexelsapiputrequest.go b/internal/sdk/pkg/models/shared/sourcepexelsapiputrequest.go
old mode 100755
new mode 100644
index 6e46f1b4b..013ee83c4
--- a/internal/sdk/pkg/models/shared/sourcepexelsapiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepexelsapiputrequest.go
@@ -7,3 +7,24 @@ type SourcePexelsAPIPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePexelsAPIPutRequest) GetConfiguration() SourcePexelsAPIUpdate {
+ if o == nil {
+ return SourcePexelsAPIUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePexelsAPIPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePexelsAPIPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepexelsapiupdate.go b/internal/sdk/pkg/models/shared/sourcepexelsapiupdate.go
old mode 100755
new mode 100644
index 57d245d44..a45402bb7
--- a/internal/sdk/pkg/models/shared/sourcepexelsapiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepexelsapiupdate.go
@@ -16,3 +16,45 @@ type SourcePexelsAPIUpdate struct {
// Optional, Minimum photo size. The current supported sizes are large(24MP), medium(12MP) or small(4MP).
Size *string `json:"size,omitempty"`
}
+
+func (o *SourcePexelsAPIUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourcePexelsAPIUpdate) GetColor() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Color
+}
+
+func (o *SourcePexelsAPIUpdate) GetLocale() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Locale
+}
+
+func (o *SourcePexelsAPIUpdate) GetOrientation() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Orientation
+}
+
+func (o *SourcePexelsAPIUpdate) GetQuery() string {
+ if o == nil {
+ return ""
+ }
+ return o.Query
+}
+
+func (o *SourcePexelsAPIUpdate) GetSize() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Size
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepinterest.go b/internal/sdk/pkg/models/shared/sourcepinterest.go
old mode 100755
new mode 100644
index 840fe1737..8f86713bc
--- a/internal/sdk/pkg/models/shared/sourcepinterest.go
+++ b/internal/sdk/pkg/models/shared/sourcepinterest.go
@@ -3,166 +3,879 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePinterestAuthorizationMethodAccessTokenAuthMethod string
+type SourcePinterestAuthMethod string
const (
- SourcePinterestAuthorizationMethodAccessTokenAuthMethodAccessToken SourcePinterestAuthorizationMethodAccessTokenAuthMethod = "access_token"
+ SourcePinterestAuthMethodOauth20 SourcePinterestAuthMethod = "oauth2.0"
)
-func (e SourcePinterestAuthorizationMethodAccessTokenAuthMethod) ToPointer() *SourcePinterestAuthorizationMethodAccessTokenAuthMethod {
+func (e SourcePinterestAuthMethod) ToPointer() *SourcePinterestAuthMethod {
return &e
}
-func (e *SourcePinterestAuthorizationMethodAccessTokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePinterestAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "access_token":
- *e = SourcePinterestAuthorizationMethodAccessTokenAuthMethod(v)
+ case "oauth2.0":
+ *e = SourcePinterestAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePinterestAuthorizationMethodAccessTokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePinterestAuthMethod: %v", v)
+ }
+}
+
+type SourcePinterestOAuth20 struct {
+ authMethod SourcePinterestAuthMethod `const:"oauth2.0" json:"auth_method"`
+ // The Client ID of your OAuth application
+ ClientID *string `json:"client_id,omitempty"`
+ // The Client Secret of your OAuth application.
+ ClientSecret *string `json:"client_secret,omitempty"`
+ // Refresh Token to obtain new Access Token, when it's expired.
+ RefreshToken string `json:"refresh_token"`
+}
+
+func (s SourcePinterestOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePinterestOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePinterestOAuth20) GetAuthMethod() SourcePinterestAuthMethod {
+ return SourcePinterestAuthMethodOauth20
+}
+
+func (o *SourcePinterestOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourcePinterestOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourcePinterestOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+type SourcePinterestAuthorizationMethodType string
+
+const (
+ SourcePinterestAuthorizationMethodTypeSourcePinterestOAuth20 SourcePinterestAuthorizationMethodType = "source-pinterest_OAuth2.0"
+)
+
+type SourcePinterestAuthorizationMethod struct {
+ SourcePinterestOAuth20 *SourcePinterestOAuth20
+
+ Type SourcePinterestAuthorizationMethodType
+}
+
+func CreateSourcePinterestAuthorizationMethodSourcePinterestOAuth20(sourcePinterestOAuth20 SourcePinterestOAuth20) SourcePinterestAuthorizationMethod {
+ typ := SourcePinterestAuthorizationMethodTypeSourcePinterestOAuth20
+
+ return SourcePinterestAuthorizationMethod{
+ SourcePinterestOAuth20: &sourcePinterestOAuth20,
+ Type: typ,
+ }
+}
+
+func (u *SourcePinterestAuthorizationMethod) UnmarshalJSON(data []byte) error {
+
+ sourcePinterestOAuth20 := new(SourcePinterestOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourcePinterestOAuth20, "", true, true); err == nil {
+ u.SourcePinterestOAuth20 = sourcePinterestOAuth20
+ u.Type = SourcePinterestAuthorizationMethodTypeSourcePinterestOAuth20
+ return nil
}
+
+ return errors.New("could not unmarshal into supported union types")
}
-type SourcePinterestAuthorizationMethodAccessToken struct {
- // The Access Token to make authenticated requests.
- AccessToken string `json:"access_token"`
- AuthMethod SourcePinterestAuthorizationMethodAccessTokenAuthMethod `json:"auth_method"`
+func (u SourcePinterestAuthorizationMethod) MarshalJSON() ([]byte, error) {
+ if u.SourcePinterestOAuth20 != nil {
+ return utils.MarshalJSON(u.SourcePinterestOAuth20, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourcePinterestAuthorizationMethodOAuth20AuthMethod string
+// SourcePinterestValidEnums - An enumeration.
+type SourcePinterestValidEnums string
const (
- SourcePinterestAuthorizationMethodOAuth20AuthMethodOauth20 SourcePinterestAuthorizationMethodOAuth20AuthMethod = "oauth2.0"
+ SourcePinterestValidEnumsIndividual SourcePinterestValidEnums = "INDIVIDUAL"
+ SourcePinterestValidEnumsHousehold SourcePinterestValidEnums = "HOUSEHOLD"
)
-func (e SourcePinterestAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourcePinterestAuthorizationMethodOAuth20AuthMethod {
+func (e SourcePinterestValidEnums) ToPointer() *SourcePinterestValidEnums {
return &e
}
-func (e *SourcePinterestAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePinterestValidEnums) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "oauth2.0":
- *e = SourcePinterestAuthorizationMethodOAuth20AuthMethod(v)
+ case "INDIVIDUAL":
+ fallthrough
+ case "HOUSEHOLD":
+ *e = SourcePinterestValidEnums(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePinterestAuthorizationMethodOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePinterestValidEnums: %v", v)
}
}
-type SourcePinterestAuthorizationMethodOAuth20 struct {
- AuthMethod SourcePinterestAuthorizationMethodOAuth20AuthMethod `json:"auth_method"`
- // The Client ID of your OAuth application
- ClientID *string `json:"client_id,omitempty"`
- // The Client Secret of your OAuth application.
- ClientSecret *string `json:"client_secret,omitempty"`
- // Refresh Token to obtain new Access Token, when it's expired.
- RefreshToken string `json:"refresh_token"`
+// SourcePinterestClickWindowDays - Number of days to use as the conversion attribution window for a pin click action.
+type SourcePinterestClickWindowDays int64
+
+const (
+ SourcePinterestClickWindowDaysZero SourcePinterestClickWindowDays = 0
+ SourcePinterestClickWindowDaysOne SourcePinterestClickWindowDays = 1
+ SourcePinterestClickWindowDaysSeven SourcePinterestClickWindowDays = 7
+ SourcePinterestClickWindowDaysFourteen SourcePinterestClickWindowDays = 14
+ SourcePinterestClickWindowDaysThirty SourcePinterestClickWindowDays = 30
+ SourcePinterestClickWindowDaysSixty SourcePinterestClickWindowDays = 60
+)
+
+func (e SourcePinterestClickWindowDays) ToPointer() *SourcePinterestClickWindowDays {
+ return &e
}
-type SourcePinterestAuthorizationMethodType string
+func (e *SourcePinterestClickWindowDays) UnmarshalJSON(data []byte) error {
+ var v int64
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case 0:
+ fallthrough
+ case 1:
+ fallthrough
+ case 7:
+ fallthrough
+ case 14:
+ fallthrough
+ case 30:
+ fallthrough
+ case 60:
+ *e = SourcePinterestClickWindowDays(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePinterestClickWindowDays: %v", v)
+ }
+}
+
+// SourcePinterestSchemasValidEnums - An enumeration.
+type SourcePinterestSchemasValidEnums string
const (
- SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodOAuth20 SourcePinterestAuthorizationMethodType = "source-pinterest_Authorization Method_OAuth2.0"
- SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodAccessToken SourcePinterestAuthorizationMethodType = "source-pinterest_Authorization Method_Access Token"
+ SourcePinterestSchemasValidEnumsAdvertiserID SourcePinterestSchemasValidEnums = "ADVERTISER_ID"
+ SourcePinterestSchemasValidEnumsAdAccountID SourcePinterestSchemasValidEnums = "AD_ACCOUNT_ID"
+ SourcePinterestSchemasValidEnumsAdGroupEntityStatus SourcePinterestSchemasValidEnums = "AD_GROUP_ENTITY_STATUS"
+ SourcePinterestSchemasValidEnumsAdGroupID SourcePinterestSchemasValidEnums = "AD_GROUP_ID"
+ SourcePinterestSchemasValidEnumsAdID SourcePinterestSchemasValidEnums = "AD_ID"
+ SourcePinterestSchemasValidEnumsCampaignDailySpendCap SourcePinterestSchemasValidEnums = "CAMPAIGN_DAILY_SPEND_CAP"
+ SourcePinterestSchemasValidEnumsCampaignEntityStatus SourcePinterestSchemasValidEnums = "CAMPAIGN_ENTITY_STATUS"
+ SourcePinterestSchemasValidEnumsCampaignID SourcePinterestSchemasValidEnums = "CAMPAIGN_ID"
+ SourcePinterestSchemasValidEnumsCampaignLifetimeSpendCap SourcePinterestSchemasValidEnums = "CAMPAIGN_LIFETIME_SPEND_CAP"
+ SourcePinterestSchemasValidEnumsCampaignName SourcePinterestSchemasValidEnums = "CAMPAIGN_NAME"
+ SourcePinterestSchemasValidEnumsCheckoutRoas SourcePinterestSchemasValidEnums = "CHECKOUT_ROAS"
+ SourcePinterestSchemasValidEnumsClickthrough1 SourcePinterestSchemasValidEnums = "CLICKTHROUGH_1"
+ SourcePinterestSchemasValidEnumsClickthrough1Gross SourcePinterestSchemasValidEnums = "CLICKTHROUGH_1_GROSS"
+ SourcePinterestSchemasValidEnumsClickthrough2 SourcePinterestSchemasValidEnums = "CLICKTHROUGH_2"
+ SourcePinterestSchemasValidEnumsCpcInMicroDollar SourcePinterestSchemasValidEnums = "CPC_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsCpmInDollar SourcePinterestSchemasValidEnums = "CPM_IN_DOLLAR"
+ SourcePinterestSchemasValidEnumsCpmInMicroDollar SourcePinterestSchemasValidEnums = "CPM_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsCtr SourcePinterestSchemasValidEnums = "CTR"
+ SourcePinterestSchemasValidEnumsCtr2 SourcePinterestSchemasValidEnums = "CTR_2"
+ SourcePinterestSchemasValidEnumsEcpcvInDollar SourcePinterestSchemasValidEnums = "ECPCV_IN_DOLLAR"
+ SourcePinterestSchemasValidEnumsEcpcvP95InDollar SourcePinterestSchemasValidEnums = "ECPCV_P95_IN_DOLLAR"
+ SourcePinterestSchemasValidEnumsEcpcInDollar SourcePinterestSchemasValidEnums = "ECPC_IN_DOLLAR"
+ SourcePinterestSchemasValidEnumsEcpcInMicroDollar SourcePinterestSchemasValidEnums = "ECPC_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsEcpeInDollar SourcePinterestSchemasValidEnums = "ECPE_IN_DOLLAR"
+ SourcePinterestSchemasValidEnumsEcpmInMicroDollar SourcePinterestSchemasValidEnums = "ECPM_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsEcpvInDollar SourcePinterestSchemasValidEnums = "ECPV_IN_DOLLAR"
+ SourcePinterestSchemasValidEnumsEctr SourcePinterestSchemasValidEnums = "ECTR"
+ SourcePinterestSchemasValidEnumsEengagementRate SourcePinterestSchemasValidEnums = "EENGAGEMENT_RATE"
+ SourcePinterestSchemasValidEnumsEngagement1 SourcePinterestSchemasValidEnums = "ENGAGEMENT_1"
+ SourcePinterestSchemasValidEnumsEngagement2 SourcePinterestSchemasValidEnums = "ENGAGEMENT_2"
+ SourcePinterestSchemasValidEnumsEngagementRate SourcePinterestSchemasValidEnums = "ENGAGEMENT_RATE"
+ SourcePinterestSchemasValidEnumsIdeaPinProductTagVisit1 SourcePinterestSchemasValidEnums = "IDEA_PIN_PRODUCT_TAG_VISIT_1"
+ SourcePinterestSchemasValidEnumsIdeaPinProductTagVisit2 SourcePinterestSchemasValidEnums = "IDEA_PIN_PRODUCT_TAG_VISIT_2"
+ SourcePinterestSchemasValidEnumsImpression1 SourcePinterestSchemasValidEnums = "IMPRESSION_1"
+ SourcePinterestSchemasValidEnumsImpression1Gross SourcePinterestSchemasValidEnums = "IMPRESSION_1_GROSS"
+ SourcePinterestSchemasValidEnumsImpression2 SourcePinterestSchemasValidEnums = "IMPRESSION_2"
+ SourcePinterestSchemasValidEnumsInappCheckoutCostPerAction SourcePinterestSchemasValidEnums = "INAPP_CHECKOUT_COST_PER_ACTION"
+ SourcePinterestSchemasValidEnumsOutboundClick1 SourcePinterestSchemasValidEnums = "OUTBOUND_CLICK_1"
+ SourcePinterestSchemasValidEnumsOutboundClick2 SourcePinterestSchemasValidEnums = "OUTBOUND_CLICK_2"
+ SourcePinterestSchemasValidEnumsPageVisitCostPerAction SourcePinterestSchemasValidEnums = "PAGE_VISIT_COST_PER_ACTION"
+ SourcePinterestSchemasValidEnumsPageVisitRoas SourcePinterestSchemasValidEnums = "PAGE_VISIT_ROAS"
+ SourcePinterestSchemasValidEnumsPaidImpression SourcePinterestSchemasValidEnums = "PAID_IMPRESSION"
+ SourcePinterestSchemasValidEnumsPinID SourcePinterestSchemasValidEnums = "PIN_ID"
+ SourcePinterestSchemasValidEnumsPinPromotionID SourcePinterestSchemasValidEnums = "PIN_PROMOTION_ID"
+ SourcePinterestSchemasValidEnumsRepin1 SourcePinterestSchemasValidEnums = "REPIN_1"
+ SourcePinterestSchemasValidEnumsRepin2 SourcePinterestSchemasValidEnums = "REPIN_2"
+ SourcePinterestSchemasValidEnumsRepinRate SourcePinterestSchemasValidEnums = "REPIN_RATE"
+ SourcePinterestSchemasValidEnumsSpendInDollar SourcePinterestSchemasValidEnums = "SPEND_IN_DOLLAR"
+ SourcePinterestSchemasValidEnumsSpendInMicroDollar SourcePinterestSchemasValidEnums = "SPEND_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalCheckout SourcePinterestSchemasValidEnums = "TOTAL_CHECKOUT"
+ SourcePinterestSchemasValidEnumsTotalCheckoutValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalClickthrough SourcePinterestSchemasValidEnums = "TOTAL_CLICKTHROUGH"
+ SourcePinterestSchemasValidEnumsTotalClickAddToCart SourcePinterestSchemasValidEnums = "TOTAL_CLICK_ADD_TO_CART"
+ SourcePinterestSchemasValidEnumsTotalClickCheckout SourcePinterestSchemasValidEnums = "TOTAL_CLICK_CHECKOUT"
+ SourcePinterestSchemasValidEnumsTotalClickCheckoutValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalClickLead SourcePinterestSchemasValidEnums = "TOTAL_CLICK_LEAD"
+ SourcePinterestSchemasValidEnumsTotalClickSignup SourcePinterestSchemasValidEnums = "TOTAL_CLICK_SIGNUP"
+ SourcePinterestSchemasValidEnumsTotalClickSignupValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalConversions SourcePinterestSchemasValidEnums = "TOTAL_CONVERSIONS"
+ SourcePinterestSchemasValidEnumsTotalCustom SourcePinterestSchemasValidEnums = "TOTAL_CUSTOM"
+ SourcePinterestSchemasValidEnumsTotalEngagement SourcePinterestSchemasValidEnums = "TOTAL_ENGAGEMENT"
+ SourcePinterestSchemasValidEnumsTotalEngagementCheckout SourcePinterestSchemasValidEnums = "TOTAL_ENGAGEMENT_CHECKOUT"
+ SourcePinterestSchemasValidEnumsTotalEngagementCheckoutValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalEngagementLead SourcePinterestSchemasValidEnums = "TOTAL_ENGAGEMENT_LEAD"
+ SourcePinterestSchemasValidEnumsTotalEngagementSignup SourcePinterestSchemasValidEnums = "TOTAL_ENGAGEMENT_SIGNUP"
+ SourcePinterestSchemasValidEnumsTotalEngagementSignupValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalIdeaPinProductTagVisit SourcePinterestSchemasValidEnums = "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT"
+ SourcePinterestSchemasValidEnumsTotalImpressionFrequency SourcePinterestSchemasValidEnums = "TOTAL_IMPRESSION_FREQUENCY"
+ SourcePinterestSchemasValidEnumsTotalImpressionUser SourcePinterestSchemasValidEnums = "TOTAL_IMPRESSION_USER"
+ SourcePinterestSchemasValidEnumsTotalLead SourcePinterestSchemasValidEnums = "TOTAL_LEAD"
+ SourcePinterestSchemasValidEnumsTotalOfflineCheckout SourcePinterestSchemasValidEnums = "TOTAL_OFFLINE_CHECKOUT"
+ SourcePinterestSchemasValidEnumsTotalPageVisit SourcePinterestSchemasValidEnums = "TOTAL_PAGE_VISIT"
+ SourcePinterestSchemasValidEnumsTotalRepinRate SourcePinterestSchemasValidEnums = "TOTAL_REPIN_RATE"
+ SourcePinterestSchemasValidEnumsTotalSignup SourcePinterestSchemasValidEnums = "TOTAL_SIGNUP"
+ SourcePinterestSchemasValidEnumsTotalSignupValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalVideo3SecViews SourcePinterestSchemasValidEnums = "TOTAL_VIDEO_3SEC_VIEWS"
+ SourcePinterestSchemasValidEnumsTotalVideoAvgWatchtimeInSecond SourcePinterestSchemasValidEnums = "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND"
+ SourcePinterestSchemasValidEnumsTotalVideoMrcViews SourcePinterestSchemasValidEnums = "TOTAL_VIDEO_MRC_VIEWS"
+ SourcePinterestSchemasValidEnumsTotalVideoP0Combined SourcePinterestSchemasValidEnums = "TOTAL_VIDEO_P0_COMBINED"
+ SourcePinterestSchemasValidEnumsTotalVideoP100Complete SourcePinterestSchemasValidEnums = "TOTAL_VIDEO_P100_COMPLETE"
+ SourcePinterestSchemasValidEnumsTotalVideoP25Combined SourcePinterestSchemasValidEnums = "TOTAL_VIDEO_P25_COMBINED"
+ SourcePinterestSchemasValidEnumsTotalVideoP50Combined SourcePinterestSchemasValidEnums = "TOTAL_VIDEO_P50_COMBINED"
+ SourcePinterestSchemasValidEnumsTotalVideoP75Combined SourcePinterestSchemasValidEnums = "TOTAL_VIDEO_P75_COMBINED"
+ SourcePinterestSchemasValidEnumsTotalVideoP95Combined SourcePinterestSchemasValidEnums = "TOTAL_VIDEO_P95_COMBINED"
+ SourcePinterestSchemasValidEnumsTotalViewAddToCart SourcePinterestSchemasValidEnums = "TOTAL_VIEW_ADD_TO_CART"
+ SourcePinterestSchemasValidEnumsTotalViewCheckout SourcePinterestSchemasValidEnums = "TOTAL_VIEW_CHECKOUT"
+ SourcePinterestSchemasValidEnumsTotalViewCheckoutValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalViewLead SourcePinterestSchemasValidEnums = "TOTAL_VIEW_LEAD"
+ SourcePinterestSchemasValidEnumsTotalViewSignup SourcePinterestSchemasValidEnums = "TOTAL_VIEW_SIGNUP"
+ SourcePinterestSchemasValidEnumsTotalViewSignupValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalWebCheckout SourcePinterestSchemasValidEnums = "TOTAL_WEB_CHECKOUT"
+ SourcePinterestSchemasValidEnumsTotalWebCheckoutValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalWebClickCheckout SourcePinterestSchemasValidEnums = "TOTAL_WEB_CLICK_CHECKOUT"
+ SourcePinterestSchemasValidEnumsTotalWebClickCheckoutValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalWebEngagementCheckout SourcePinterestSchemasValidEnums = "TOTAL_WEB_ENGAGEMENT_CHECKOUT"
+ SourcePinterestSchemasValidEnumsTotalWebEngagementCheckoutValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsTotalWebSessions SourcePinterestSchemasValidEnums = "TOTAL_WEB_SESSIONS"
+ SourcePinterestSchemasValidEnumsTotalWebViewCheckout SourcePinterestSchemasValidEnums = "TOTAL_WEB_VIEW_CHECKOUT"
+ SourcePinterestSchemasValidEnumsTotalWebViewCheckoutValueInMicroDollar SourcePinterestSchemasValidEnums = "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestSchemasValidEnumsVideo3SecViews2 SourcePinterestSchemasValidEnums = "VIDEO_3SEC_VIEWS_2"
+ SourcePinterestSchemasValidEnumsVideoLength SourcePinterestSchemasValidEnums = "VIDEO_LENGTH"
+ SourcePinterestSchemasValidEnumsVideoMrcViews2 SourcePinterestSchemasValidEnums = "VIDEO_MRC_VIEWS_2"
+ SourcePinterestSchemasValidEnumsVideoP0Combined2 SourcePinterestSchemasValidEnums = "VIDEO_P0_COMBINED_2"
+ SourcePinterestSchemasValidEnumsVideoP100Complete2 SourcePinterestSchemasValidEnums = "VIDEO_P100_COMPLETE_2"
+ SourcePinterestSchemasValidEnumsVideoP25Combined2 SourcePinterestSchemasValidEnums = "VIDEO_P25_COMBINED_2"
+ SourcePinterestSchemasValidEnumsVideoP50Combined2 SourcePinterestSchemasValidEnums = "VIDEO_P50_COMBINED_2"
+ SourcePinterestSchemasValidEnumsVideoP75Combined2 SourcePinterestSchemasValidEnums = "VIDEO_P75_COMBINED_2"
+ SourcePinterestSchemasValidEnumsVideoP95Combined2 SourcePinterestSchemasValidEnums = "VIDEO_P95_COMBINED_2"
+ SourcePinterestSchemasValidEnumsWebCheckoutCostPerAction SourcePinterestSchemasValidEnums = "WEB_CHECKOUT_COST_PER_ACTION"
+ SourcePinterestSchemasValidEnumsWebCheckoutRoas SourcePinterestSchemasValidEnums = "WEB_CHECKOUT_ROAS"
+ SourcePinterestSchemasValidEnumsWebSessions1 SourcePinterestSchemasValidEnums = "WEB_SESSIONS_1"
+ SourcePinterestSchemasValidEnumsWebSessions2 SourcePinterestSchemasValidEnums = "WEB_SESSIONS_2"
)
-type SourcePinterestAuthorizationMethod struct {
- SourcePinterestAuthorizationMethodOAuth20 *SourcePinterestAuthorizationMethodOAuth20
- SourcePinterestAuthorizationMethodAccessToken *SourcePinterestAuthorizationMethodAccessToken
+func (e SourcePinterestSchemasValidEnums) ToPointer() *SourcePinterestSchemasValidEnums {
+ return &e
+}
- Type SourcePinterestAuthorizationMethodType
+func (e *SourcePinterestSchemasValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "ADVERTISER_ID":
+ fallthrough
+ case "AD_ACCOUNT_ID":
+ fallthrough
+ case "AD_GROUP_ENTITY_STATUS":
+ fallthrough
+ case "AD_GROUP_ID":
+ fallthrough
+ case "AD_ID":
+ fallthrough
+ case "CAMPAIGN_DAILY_SPEND_CAP":
+ fallthrough
+ case "CAMPAIGN_ENTITY_STATUS":
+ fallthrough
+ case "CAMPAIGN_ID":
+ fallthrough
+ case "CAMPAIGN_LIFETIME_SPEND_CAP":
+ fallthrough
+ case "CAMPAIGN_NAME":
+ fallthrough
+ case "CHECKOUT_ROAS":
+ fallthrough
+ case "CLICKTHROUGH_1":
+ fallthrough
+ case "CLICKTHROUGH_1_GROSS":
+ fallthrough
+ case "CLICKTHROUGH_2":
+ fallthrough
+ case "CPC_IN_MICRO_DOLLAR":
+ fallthrough
+ case "CPM_IN_DOLLAR":
+ fallthrough
+ case "CPM_IN_MICRO_DOLLAR":
+ fallthrough
+ case "CTR":
+ fallthrough
+ case "CTR_2":
+ fallthrough
+ case "ECPCV_IN_DOLLAR":
+ fallthrough
+ case "ECPCV_P95_IN_DOLLAR":
+ fallthrough
+ case "ECPC_IN_DOLLAR":
+ fallthrough
+ case "ECPC_IN_MICRO_DOLLAR":
+ fallthrough
+ case "ECPE_IN_DOLLAR":
+ fallthrough
+ case "ECPM_IN_MICRO_DOLLAR":
+ fallthrough
+ case "ECPV_IN_DOLLAR":
+ fallthrough
+ case "ECTR":
+ fallthrough
+ case "EENGAGEMENT_RATE":
+ fallthrough
+ case "ENGAGEMENT_1":
+ fallthrough
+ case "ENGAGEMENT_2":
+ fallthrough
+ case "ENGAGEMENT_RATE":
+ fallthrough
+ case "IDEA_PIN_PRODUCT_TAG_VISIT_1":
+ fallthrough
+ case "IDEA_PIN_PRODUCT_TAG_VISIT_2":
+ fallthrough
+ case "IMPRESSION_1":
+ fallthrough
+ case "IMPRESSION_1_GROSS":
+ fallthrough
+ case "IMPRESSION_2":
+ fallthrough
+ case "INAPP_CHECKOUT_COST_PER_ACTION":
+ fallthrough
+ case "OUTBOUND_CLICK_1":
+ fallthrough
+ case "OUTBOUND_CLICK_2":
+ fallthrough
+ case "PAGE_VISIT_COST_PER_ACTION":
+ fallthrough
+ case "PAGE_VISIT_ROAS":
+ fallthrough
+ case "PAID_IMPRESSION":
+ fallthrough
+ case "PIN_ID":
+ fallthrough
+ case "PIN_PROMOTION_ID":
+ fallthrough
+ case "REPIN_1":
+ fallthrough
+ case "REPIN_2":
+ fallthrough
+ case "REPIN_RATE":
+ fallthrough
+ case "SPEND_IN_DOLLAR":
+ fallthrough
+ case "SPEND_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_CHECKOUT":
+ fallthrough
+ case "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_CLICKTHROUGH":
+ fallthrough
+ case "TOTAL_CLICK_ADD_TO_CART":
+ fallthrough
+ case "TOTAL_CLICK_CHECKOUT":
+ fallthrough
+ case "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_CLICK_LEAD":
+ fallthrough
+ case "TOTAL_CLICK_SIGNUP":
+ fallthrough
+ case "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_CONVERSIONS":
+ fallthrough
+ case "TOTAL_CUSTOM":
+ fallthrough
+ case "TOTAL_ENGAGEMENT":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_CHECKOUT":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_LEAD":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_SIGNUP":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT":
+ fallthrough
+ case "TOTAL_IMPRESSION_FREQUENCY":
+ fallthrough
+ case "TOTAL_IMPRESSION_USER":
+ fallthrough
+ case "TOTAL_LEAD":
+ fallthrough
+ case "TOTAL_OFFLINE_CHECKOUT":
+ fallthrough
+ case "TOTAL_PAGE_VISIT":
+ fallthrough
+ case "TOTAL_REPIN_RATE":
+ fallthrough
+ case "TOTAL_SIGNUP":
+ fallthrough
+ case "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_VIDEO_3SEC_VIEWS":
+ fallthrough
+ case "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND":
+ fallthrough
+ case "TOTAL_VIDEO_MRC_VIEWS":
+ fallthrough
+ case "TOTAL_VIDEO_P0_COMBINED":
+ fallthrough
+ case "TOTAL_VIDEO_P100_COMPLETE":
+ fallthrough
+ case "TOTAL_VIDEO_P25_COMBINED":
+ fallthrough
+ case "TOTAL_VIDEO_P50_COMBINED":
+ fallthrough
+ case "TOTAL_VIDEO_P75_COMBINED":
+ fallthrough
+ case "TOTAL_VIDEO_P95_COMBINED":
+ fallthrough
+ case "TOTAL_VIEW_ADD_TO_CART":
+ fallthrough
+ case "TOTAL_VIEW_CHECKOUT":
+ fallthrough
+ case "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_VIEW_LEAD":
+ fallthrough
+ case "TOTAL_VIEW_SIGNUP":
+ fallthrough
+ case "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_WEB_CHECKOUT":
+ fallthrough
+ case "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_WEB_CLICK_CHECKOUT":
+ fallthrough
+ case "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_WEB_ENGAGEMENT_CHECKOUT":
+ fallthrough
+ case "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_WEB_SESSIONS":
+ fallthrough
+ case "TOTAL_WEB_VIEW_CHECKOUT":
+ fallthrough
+ case "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "VIDEO_3SEC_VIEWS_2":
+ fallthrough
+ case "VIDEO_LENGTH":
+ fallthrough
+ case "VIDEO_MRC_VIEWS_2":
+ fallthrough
+ case "VIDEO_P0_COMBINED_2":
+ fallthrough
+ case "VIDEO_P100_COMPLETE_2":
+ fallthrough
+ case "VIDEO_P25_COMBINED_2":
+ fallthrough
+ case "VIDEO_P50_COMBINED_2":
+ fallthrough
+ case "VIDEO_P75_COMBINED_2":
+ fallthrough
+ case "VIDEO_P95_COMBINED_2":
+ fallthrough
+ case "WEB_CHECKOUT_COST_PER_ACTION":
+ fallthrough
+ case "WEB_CHECKOUT_ROAS":
+ fallthrough
+ case "WEB_SESSIONS_1":
+ fallthrough
+ case "WEB_SESSIONS_2":
+ *e = SourcePinterestSchemasValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePinterestSchemasValidEnums: %v", v)
+ }
}
-func CreateSourcePinterestAuthorizationMethodSourcePinterestAuthorizationMethodOAuth20(sourcePinterestAuthorizationMethodOAuth20 SourcePinterestAuthorizationMethodOAuth20) SourcePinterestAuthorizationMethod {
- typ := SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodOAuth20
+// SourcePinterestConversionReportTime - The date by which the conversion metrics returned from this endpoint will be reported. There are two dates associated with a conversion event: the date that the user interacted with the ad, and the date that the user completed a conversion event..
+type SourcePinterestConversionReportTime string
- return SourcePinterestAuthorizationMethod{
- SourcePinterestAuthorizationMethodOAuth20: &sourcePinterestAuthorizationMethodOAuth20,
- Type: typ,
+const (
+ SourcePinterestConversionReportTimeTimeOfAdAction SourcePinterestConversionReportTime = "TIME_OF_AD_ACTION"
+ SourcePinterestConversionReportTimeTimeOfConversion SourcePinterestConversionReportTime = "TIME_OF_CONVERSION"
+)
+
+func (e SourcePinterestConversionReportTime) ToPointer() *SourcePinterestConversionReportTime {
+ return &e
+}
+
+func (e *SourcePinterestConversionReportTime) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "TIME_OF_AD_ACTION":
+ fallthrough
+ case "TIME_OF_CONVERSION":
+ *e = SourcePinterestConversionReportTime(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePinterestConversionReportTime: %v", v)
}
}
-func CreateSourcePinterestAuthorizationMethodSourcePinterestAuthorizationMethodAccessToken(sourcePinterestAuthorizationMethodAccessToken SourcePinterestAuthorizationMethodAccessToken) SourcePinterestAuthorizationMethod {
- typ := SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodAccessToken
+// SourcePinterestEngagementWindowDays - Number of days to use as the conversion attribution window for an engagement action.
+type SourcePinterestEngagementWindowDays int64
- return SourcePinterestAuthorizationMethod{
- SourcePinterestAuthorizationMethodAccessToken: &sourcePinterestAuthorizationMethodAccessToken,
- Type: typ,
+const (
+ SourcePinterestEngagementWindowDaysZero SourcePinterestEngagementWindowDays = 0
+ SourcePinterestEngagementWindowDaysOne SourcePinterestEngagementWindowDays = 1
+ SourcePinterestEngagementWindowDaysSeven SourcePinterestEngagementWindowDays = 7
+ SourcePinterestEngagementWindowDaysFourteen SourcePinterestEngagementWindowDays = 14
+ SourcePinterestEngagementWindowDaysThirty SourcePinterestEngagementWindowDays = 30
+ SourcePinterestEngagementWindowDaysSixty SourcePinterestEngagementWindowDays = 60
+)
+
+func (e SourcePinterestEngagementWindowDays) ToPointer() *SourcePinterestEngagementWindowDays {
+ return &e
+}
+
+func (e *SourcePinterestEngagementWindowDays) UnmarshalJSON(data []byte) error {
+ var v int64
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case 0:
+ fallthrough
+ case 1:
+ fallthrough
+ case 7:
+ fallthrough
+ case 14:
+ fallthrough
+ case 30:
+ fallthrough
+ case 60:
+ *e = SourcePinterestEngagementWindowDays(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePinterestEngagementWindowDays: %v", v)
}
}
-func (u *SourcePinterestAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+// SourcePinterestGranularity - Chosen granularity for API
+type SourcePinterestGranularity string
- sourcePinterestAuthorizationMethodAccessToken := new(SourcePinterestAuthorizationMethodAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePinterestAuthorizationMethodAccessToken); err == nil {
- u.SourcePinterestAuthorizationMethodAccessToken = sourcePinterestAuthorizationMethodAccessToken
- u.Type = SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodAccessToken
+const (
+ SourcePinterestGranularityTotal SourcePinterestGranularity = "TOTAL"
+ SourcePinterestGranularityDay SourcePinterestGranularity = "DAY"
+ SourcePinterestGranularityHour SourcePinterestGranularity = "HOUR"
+ SourcePinterestGranularityWeek SourcePinterestGranularity = "WEEK"
+ SourcePinterestGranularityMonth SourcePinterestGranularity = "MONTH"
+)
+
+func (e SourcePinterestGranularity) ToPointer() *SourcePinterestGranularity {
+ return &e
+}
+
+func (e *SourcePinterestGranularity) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "TOTAL":
+ fallthrough
+ case "DAY":
+ fallthrough
+ case "HOUR":
+ fallthrough
+ case "WEEK":
+ fallthrough
+ case "MONTH":
+ *e = SourcePinterestGranularity(v)
return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePinterestGranularity: %v", v)
}
+}
+
+// SourcePinterestLevel - Chosen level for API
+type SourcePinterestLevel string
+
+const (
+ SourcePinterestLevelAdvertiser SourcePinterestLevel = "ADVERTISER"
+ SourcePinterestLevelAdvertiserTargeting SourcePinterestLevel = "ADVERTISER_TARGETING"
+ SourcePinterestLevelCampaign SourcePinterestLevel = "CAMPAIGN"
+ SourcePinterestLevelCampaignTargeting SourcePinterestLevel = "CAMPAIGN_TARGETING"
+ SourcePinterestLevelAdGroup SourcePinterestLevel = "AD_GROUP"
+ SourcePinterestLevelAdGroupTargeting SourcePinterestLevel = "AD_GROUP_TARGETING"
+ SourcePinterestLevelPinPromotion SourcePinterestLevel = "PIN_PROMOTION"
+ SourcePinterestLevelPinPromotionTargeting SourcePinterestLevel = "PIN_PROMOTION_TARGETING"
+ SourcePinterestLevelKeyword SourcePinterestLevel = "KEYWORD"
+ SourcePinterestLevelProductGroup SourcePinterestLevel = "PRODUCT_GROUP"
+ SourcePinterestLevelProductGroupTargeting SourcePinterestLevel = "PRODUCT_GROUP_TARGETING"
+ SourcePinterestLevelProductItem SourcePinterestLevel = "PRODUCT_ITEM"
+)
+
+func (e SourcePinterestLevel) ToPointer() *SourcePinterestLevel {
+ return &e
+}
- sourcePinterestAuthorizationMethodOAuth20 := new(SourcePinterestAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePinterestAuthorizationMethodOAuth20); err == nil {
- u.SourcePinterestAuthorizationMethodOAuth20 = sourcePinterestAuthorizationMethodOAuth20
- u.Type = SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodOAuth20
+func (e *SourcePinterestLevel) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "ADVERTISER":
+ fallthrough
+ case "ADVERTISER_TARGETING":
+ fallthrough
+ case "CAMPAIGN":
+ fallthrough
+ case "CAMPAIGN_TARGETING":
+ fallthrough
+ case "AD_GROUP":
+ fallthrough
+ case "AD_GROUP_TARGETING":
+ fallthrough
+ case "PIN_PROMOTION":
+ fallthrough
+ case "PIN_PROMOTION_TARGETING":
+ fallthrough
+ case "KEYWORD":
+ fallthrough
+ case "PRODUCT_GROUP":
+ fallthrough
+ case "PRODUCT_GROUP_TARGETING":
+ fallthrough
+ case "PRODUCT_ITEM":
+ *e = SourcePinterestLevel(v)
return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePinterestLevel: %v", v)
}
+}
- return errors.New("could not unmarshal into supported union types")
+// SourcePinterestViewWindowDays - Number of days to use as the conversion attribution window for a view action.
+type SourcePinterestViewWindowDays int64
+
+const (
+ SourcePinterestViewWindowDaysZero SourcePinterestViewWindowDays = 0
+ SourcePinterestViewWindowDaysOne SourcePinterestViewWindowDays = 1
+ SourcePinterestViewWindowDaysSeven SourcePinterestViewWindowDays = 7
+ SourcePinterestViewWindowDaysFourteen SourcePinterestViewWindowDays = 14
+ SourcePinterestViewWindowDaysThirty SourcePinterestViewWindowDays = 30
+ SourcePinterestViewWindowDaysSixty SourcePinterestViewWindowDays = 60
+)
+
+func (e SourcePinterestViewWindowDays) ToPointer() *SourcePinterestViewWindowDays {
+ return &e
}
-func (u SourcePinterestAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourcePinterestAuthorizationMethodAccessToken != nil {
- return json.Marshal(u.SourcePinterestAuthorizationMethodAccessToken)
+func (e *SourcePinterestViewWindowDays) UnmarshalJSON(data []byte) error {
+ var v int64
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case 0:
+ fallthrough
+ case 1:
+ fallthrough
+ case 7:
+ fallthrough
+ case 14:
+ fallthrough
+ case 30:
+ fallthrough
+ case 60:
+ *e = SourcePinterestViewWindowDays(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePinterestViewWindowDays: %v", v)
}
+}
+
+// SourcePinterestReportConfig - Config for custom report
+type SourcePinterestReportConfig struct {
+ // List of types of attribution for the conversion report
+ AttributionTypes []SourcePinterestValidEnums `json:"attribution_types,omitempty"`
+ // Number of days to use as the conversion attribution window for a pin click action.
+ ClickWindowDays *SourcePinterestClickWindowDays `default:"30" json:"click_window_days"`
+ // A list of chosen columns
+ Columns []SourcePinterestSchemasValidEnums `json:"columns"`
+ // The date by which the conversion metrics returned from this endpoint will be reported. There are two dates associated with a conversion event: the date that the user interacted with the ad, and the date that the user completed a conversion event..
+ ConversionReportTime *SourcePinterestConversionReportTime `default:"TIME_OF_AD_ACTION" json:"conversion_report_time"`
+ // Number of days to use as the conversion attribution window for an engagement action.
+ EngagementWindowDays *SourcePinterestEngagementWindowDays `default:"30" json:"engagement_window_days"`
+ // Chosen granularity for API
+ Granularity *SourcePinterestGranularity `default:"TOTAL" json:"granularity"`
+ // Chosen level for API
+ Level *SourcePinterestLevel `default:"ADVERTISER" json:"level"`
+ // The name value of report
+ Name string `json:"name"`
+ // A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by report api (913 days from today).
+ StartDate *types.Date `json:"start_date,omitempty"`
+ // Number of days to use as the conversion attribution window for a view action.
+ ViewWindowDays *SourcePinterestViewWindowDays `default:"30" json:"view_window_days"`
+}
+
+func (s SourcePinterestReportConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if u.SourcePinterestAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourcePinterestAuthorizationMethodOAuth20)
+func (s *SourcePinterestReportConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
}
+ return nil
+}
- return nil, nil
+func (o *SourcePinterestReportConfig) GetAttributionTypes() []SourcePinterestValidEnums {
+ if o == nil {
+ return nil
+ }
+ return o.AttributionTypes
}
-type SourcePinterestPinterest string
+func (o *SourcePinterestReportConfig) GetClickWindowDays() *SourcePinterestClickWindowDays {
+ if o == nil {
+ return nil
+ }
+ return o.ClickWindowDays
+}
+
+func (o *SourcePinterestReportConfig) GetColumns() []SourcePinterestSchemasValidEnums {
+ if o == nil {
+ return []SourcePinterestSchemasValidEnums{}
+ }
+ return o.Columns
+}
+
+func (o *SourcePinterestReportConfig) GetConversionReportTime() *SourcePinterestConversionReportTime {
+ if o == nil {
+ return nil
+ }
+ return o.ConversionReportTime
+}
+
+func (o *SourcePinterestReportConfig) GetEngagementWindowDays() *SourcePinterestEngagementWindowDays {
+ if o == nil {
+ return nil
+ }
+ return o.EngagementWindowDays
+}
+
+func (o *SourcePinterestReportConfig) GetGranularity() *SourcePinterestGranularity {
+ if o == nil {
+ return nil
+ }
+ return o.Granularity
+}
+
+func (o *SourcePinterestReportConfig) GetLevel() *SourcePinterestLevel {
+ if o == nil {
+ return nil
+ }
+ return o.Level
+}
+
+func (o *SourcePinterestReportConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePinterestReportConfig) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourcePinterestReportConfig) GetViewWindowDays() *SourcePinterestViewWindowDays {
+ if o == nil {
+ return nil
+ }
+ return o.ViewWindowDays
+}
+
+type Pinterest string
const (
- SourcePinterestPinterestPinterest SourcePinterestPinterest = "pinterest"
+ PinterestPinterest Pinterest = "pinterest"
)
-func (e SourcePinterestPinterest) ToPointer() *SourcePinterestPinterest {
+func (e Pinterest) ToPointer() *Pinterest {
return &e
}
-func (e *SourcePinterestPinterest) UnmarshalJSON(data []byte) error {
+func (e *Pinterest) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pinterest":
- *e = SourcePinterestPinterest(v)
+ *e = Pinterest(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePinterestPinterest: %v", v)
+ return fmt.Errorf("invalid value for Pinterest: %v", v)
}
}
@@ -198,9 +911,54 @@ func (e *SourcePinterestStatus) UnmarshalJSON(data []byte) error {
type SourcePinterest struct {
Credentials *SourcePinterestAuthorizationMethod `json:"credentials,omitempty"`
- SourceType SourcePinterestPinterest `json:"sourceType"`
+ // A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field.
+ CustomReports []SourcePinterestReportConfig `json:"custom_reports,omitempty"`
+ sourceType *Pinterest `const:"pinterest" json:"sourceType,omitempty"`
// A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today).
- StartDate types.Date `json:"start_date"`
+ StartDate *types.Date `json:"start_date,omitempty"`
// Entity statuses based off of campaigns, ad_groups, and ads. If you do not have a status set, it will be ignored completely.
Status []SourcePinterestStatus `json:"status,omitempty"`
}
+
+func (s SourcePinterest) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePinterest) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePinterest) GetCredentials() *SourcePinterestAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourcePinterest) GetCustomReports() []SourcePinterestReportConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReports
+}
+
+func (o *SourcePinterest) GetSourceType() *Pinterest {
+ return PinterestPinterest.ToPointer()
+}
+
+func (o *SourcePinterest) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourcePinterest) GetStatus() []SourcePinterestStatus {
+ if o == nil {
+ return nil
+ }
+ return o.Status
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepinterestcreaterequest.go b/internal/sdk/pkg/models/shared/sourcepinterestcreaterequest.go
old mode 100755
new mode 100644
index 9e6a28aab..077886984
--- a/internal/sdk/pkg/models/shared/sourcepinterestcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepinterestcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePinterestCreateRequest struct {
Configuration SourcePinterest `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePinterestCreateRequest) GetConfiguration() SourcePinterest {
+ if o == nil {
+ return SourcePinterest{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePinterestCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePinterestCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePinterestCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePinterestCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepinterestputrequest.go b/internal/sdk/pkg/models/shared/sourcepinterestputrequest.go
old mode 100755
new mode 100644
index 12802a7cc..9b5aad42a
--- a/internal/sdk/pkg/models/shared/sourcepinterestputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepinterestputrequest.go
@@ -7,3 +7,24 @@ type SourcePinterestPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePinterestPutRequest) GetConfiguration() SourcePinterestUpdate {
+ if o == nil {
+ return SourcePinterestUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePinterestPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePinterestPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepinterestupdate.go b/internal/sdk/pkg/models/shared/sourcepinterestupdate.go
old mode 100755
new mode 100644
index 50d3ed725..741c3260e
--- a/internal/sdk/pkg/models/shared/sourcepinterestupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepinterestupdate.go
@@ -3,158 +3,871 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethod string
+type SourcePinterestUpdateAuthMethod string
const (
- SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethodAccessToken SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethod = "access_token"
+ SourcePinterestUpdateAuthMethodOauth20 SourcePinterestUpdateAuthMethod = "oauth2.0"
)
-func (e SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethod) ToPointer() *SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethod {
+func (e SourcePinterestUpdateAuthMethod) ToPointer() *SourcePinterestUpdateAuthMethod {
return &e
}
-func (e *SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePinterestUpdateAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "access_token":
- *e = SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethod(v)
+ case "oauth2.0":
+ *e = SourcePinterestUpdateAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePinterestUpdateAuthMethod: %v", v)
+ }
+}
+
+type SourcePinterestUpdateOAuth20 struct {
+ authMethod SourcePinterestUpdateAuthMethod `const:"oauth2.0" json:"auth_method"`
+ // The Client ID of your OAuth application
+ ClientID *string `json:"client_id,omitempty"`
+ // The Client Secret of your OAuth application.
+ ClientSecret *string `json:"client_secret,omitempty"`
+ // Refresh Token to obtain new Access Token, when it's expired.
+ RefreshToken string `json:"refresh_token"`
+}
+
+func (s SourcePinterestUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePinterestUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePinterestUpdateOAuth20) GetAuthMethod() SourcePinterestUpdateAuthMethod {
+ return SourcePinterestUpdateAuthMethodOauth20
+}
+
+func (o *SourcePinterestUpdateOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourcePinterestUpdateOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientSecret
}
-type SourcePinterestUpdateAuthorizationMethodAccessToken struct {
- // The Access Token to make authenticated requests.
- AccessToken string `json:"access_token"`
- AuthMethod SourcePinterestUpdateAuthorizationMethodAccessTokenAuthMethod `json:"auth_method"`
+func (o *SourcePinterestUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
}
-type SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethod string
+type SourcePinterestUpdateAuthorizationMethodType string
const (
- SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethodOauth20 SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethod = "oauth2.0"
+ SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateOAuth20 SourcePinterestUpdateAuthorizationMethodType = "source-pinterest-update_OAuth2.0"
)
-func (e SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethod {
+type SourcePinterestUpdateAuthorizationMethod struct {
+ SourcePinterestUpdateOAuth20 *SourcePinterestUpdateOAuth20
+
+ Type SourcePinterestUpdateAuthorizationMethodType
+}
+
+func CreateSourcePinterestUpdateAuthorizationMethodSourcePinterestUpdateOAuth20(sourcePinterestUpdateOAuth20 SourcePinterestUpdateOAuth20) SourcePinterestUpdateAuthorizationMethod {
+ typ := SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateOAuth20
+
+ return SourcePinterestUpdateAuthorizationMethod{
+ SourcePinterestUpdateOAuth20: &sourcePinterestUpdateOAuth20,
+ Type: typ,
+ }
+}
+
+func (u *SourcePinterestUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
+
+ sourcePinterestUpdateOAuth20 := new(SourcePinterestUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourcePinterestUpdateOAuth20, "", true, true); err == nil {
+ u.SourcePinterestUpdateOAuth20 = sourcePinterestUpdateOAuth20
+ u.Type = SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateOAuth20
+ return nil
+ }
+
+ return errors.New("could not unmarshal into supported union types")
+}
+
+func (u SourcePinterestUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
+ if u.SourcePinterestUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourcePinterestUpdateOAuth20, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
+}
+
+// SourcePinterestUpdateValidEnums - An enumeration.
+type SourcePinterestUpdateValidEnums string
+
+const (
+ SourcePinterestUpdateValidEnumsIndividual SourcePinterestUpdateValidEnums = "INDIVIDUAL"
+ SourcePinterestUpdateValidEnumsHousehold SourcePinterestUpdateValidEnums = "HOUSEHOLD"
+)
+
+func (e SourcePinterestUpdateValidEnums) ToPointer() *SourcePinterestUpdateValidEnums {
return &e
}
-func (e *SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePinterestUpdateValidEnums) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "oauth2.0":
- *e = SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethod(v)
+ case "INDIVIDUAL":
+ fallthrough
+ case "HOUSEHOLD":
+ *e = SourcePinterestUpdateValidEnums(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePinterestUpdateValidEnums: %v", v)
}
}
-type SourcePinterestUpdateAuthorizationMethodOAuth20 struct {
- AuthMethod SourcePinterestUpdateAuthorizationMethodOAuth20AuthMethod `json:"auth_method"`
- // The Client ID of your OAuth application
- ClientID *string `json:"client_id,omitempty"`
- // The Client Secret of your OAuth application.
- ClientSecret *string `json:"client_secret,omitempty"`
- // Refresh Token to obtain new Access Token, when it's expired.
- RefreshToken string `json:"refresh_token"`
+// ClickWindowDays - Number of days to use as the conversion attribution window for a pin click action.
+type ClickWindowDays int64
+
+const (
+ ClickWindowDaysZero ClickWindowDays = 0
+ ClickWindowDaysOne ClickWindowDays = 1
+ ClickWindowDaysSeven ClickWindowDays = 7
+ ClickWindowDaysFourteen ClickWindowDays = 14
+ ClickWindowDaysThirty ClickWindowDays = 30
+ ClickWindowDaysSixty ClickWindowDays = 60
+)
+
+func (e ClickWindowDays) ToPointer() *ClickWindowDays {
+ return &e
}
-type SourcePinterestUpdateAuthorizationMethodType string
+func (e *ClickWindowDays) UnmarshalJSON(data []byte) error {
+ var v int64
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case 0:
+ fallthrough
+ case 1:
+ fallthrough
+ case 7:
+ fallthrough
+ case 14:
+ fallthrough
+ case 30:
+ fallthrough
+ case 60:
+ *e = ClickWindowDays(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for ClickWindowDays: %v", v)
+ }
+}
+
+// SourcePinterestUpdateSchemasValidEnums - An enumeration.
+type SourcePinterestUpdateSchemasValidEnums string
const (
- SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodOAuth20 SourcePinterestUpdateAuthorizationMethodType = "source-pinterest-update_Authorization Method_OAuth2.0"
- SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodAccessToken SourcePinterestUpdateAuthorizationMethodType = "source-pinterest-update_Authorization Method_Access Token"
+ SourcePinterestUpdateSchemasValidEnumsAdvertiserID SourcePinterestUpdateSchemasValidEnums = "ADVERTISER_ID"
+ SourcePinterestUpdateSchemasValidEnumsAdAccountID SourcePinterestUpdateSchemasValidEnums = "AD_ACCOUNT_ID"
+ SourcePinterestUpdateSchemasValidEnumsAdGroupEntityStatus SourcePinterestUpdateSchemasValidEnums = "AD_GROUP_ENTITY_STATUS"
+ SourcePinterestUpdateSchemasValidEnumsAdGroupID SourcePinterestUpdateSchemasValidEnums = "AD_GROUP_ID"
+ SourcePinterestUpdateSchemasValidEnumsAdID SourcePinterestUpdateSchemasValidEnums = "AD_ID"
+ SourcePinterestUpdateSchemasValidEnumsCampaignDailySpendCap SourcePinterestUpdateSchemasValidEnums = "CAMPAIGN_DAILY_SPEND_CAP"
+ SourcePinterestUpdateSchemasValidEnumsCampaignEntityStatus SourcePinterestUpdateSchemasValidEnums = "CAMPAIGN_ENTITY_STATUS"
+ SourcePinterestUpdateSchemasValidEnumsCampaignID SourcePinterestUpdateSchemasValidEnums = "CAMPAIGN_ID"
+ SourcePinterestUpdateSchemasValidEnumsCampaignLifetimeSpendCap SourcePinterestUpdateSchemasValidEnums = "CAMPAIGN_LIFETIME_SPEND_CAP"
+ SourcePinterestUpdateSchemasValidEnumsCampaignName SourcePinterestUpdateSchemasValidEnums = "CAMPAIGN_NAME"
+ SourcePinterestUpdateSchemasValidEnumsCheckoutRoas SourcePinterestUpdateSchemasValidEnums = "CHECKOUT_ROAS"
+ SourcePinterestUpdateSchemasValidEnumsClickthrough1 SourcePinterestUpdateSchemasValidEnums = "CLICKTHROUGH_1"
+ SourcePinterestUpdateSchemasValidEnumsClickthrough1Gross SourcePinterestUpdateSchemasValidEnums = "CLICKTHROUGH_1_GROSS"
+ SourcePinterestUpdateSchemasValidEnumsClickthrough2 SourcePinterestUpdateSchemasValidEnums = "CLICKTHROUGH_2"
+ SourcePinterestUpdateSchemasValidEnumsCpcInMicroDollar SourcePinterestUpdateSchemasValidEnums = "CPC_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsCpmInDollar SourcePinterestUpdateSchemasValidEnums = "CPM_IN_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsCpmInMicroDollar SourcePinterestUpdateSchemasValidEnums = "CPM_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsCtr SourcePinterestUpdateSchemasValidEnums = "CTR"
+ SourcePinterestUpdateSchemasValidEnumsCtr2 SourcePinterestUpdateSchemasValidEnums = "CTR_2"
+ SourcePinterestUpdateSchemasValidEnumsEcpcvInDollar SourcePinterestUpdateSchemasValidEnums = "ECPCV_IN_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsEcpcvP95InDollar SourcePinterestUpdateSchemasValidEnums = "ECPCV_P95_IN_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsEcpcInDollar SourcePinterestUpdateSchemasValidEnums = "ECPC_IN_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsEcpcInMicroDollar SourcePinterestUpdateSchemasValidEnums = "ECPC_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsEcpeInDollar SourcePinterestUpdateSchemasValidEnums = "ECPE_IN_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsEcpmInMicroDollar SourcePinterestUpdateSchemasValidEnums = "ECPM_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsEcpvInDollar SourcePinterestUpdateSchemasValidEnums = "ECPV_IN_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsEctr SourcePinterestUpdateSchemasValidEnums = "ECTR"
+ SourcePinterestUpdateSchemasValidEnumsEengagementRate SourcePinterestUpdateSchemasValidEnums = "EENGAGEMENT_RATE"
+ SourcePinterestUpdateSchemasValidEnumsEngagement1 SourcePinterestUpdateSchemasValidEnums = "ENGAGEMENT_1"
+ SourcePinterestUpdateSchemasValidEnumsEngagement2 SourcePinterestUpdateSchemasValidEnums = "ENGAGEMENT_2"
+ SourcePinterestUpdateSchemasValidEnumsEngagementRate SourcePinterestUpdateSchemasValidEnums = "ENGAGEMENT_RATE"
+ SourcePinterestUpdateSchemasValidEnumsIdeaPinProductTagVisit1 SourcePinterestUpdateSchemasValidEnums = "IDEA_PIN_PRODUCT_TAG_VISIT_1"
+ SourcePinterestUpdateSchemasValidEnumsIdeaPinProductTagVisit2 SourcePinterestUpdateSchemasValidEnums = "IDEA_PIN_PRODUCT_TAG_VISIT_2"
+ SourcePinterestUpdateSchemasValidEnumsImpression1 SourcePinterestUpdateSchemasValidEnums = "IMPRESSION_1"
+ SourcePinterestUpdateSchemasValidEnumsImpression1Gross SourcePinterestUpdateSchemasValidEnums = "IMPRESSION_1_GROSS"
+ SourcePinterestUpdateSchemasValidEnumsImpression2 SourcePinterestUpdateSchemasValidEnums = "IMPRESSION_2"
+ SourcePinterestUpdateSchemasValidEnumsInappCheckoutCostPerAction SourcePinterestUpdateSchemasValidEnums = "INAPP_CHECKOUT_COST_PER_ACTION"
+ SourcePinterestUpdateSchemasValidEnumsOutboundClick1 SourcePinterestUpdateSchemasValidEnums = "OUTBOUND_CLICK_1"
+ SourcePinterestUpdateSchemasValidEnumsOutboundClick2 SourcePinterestUpdateSchemasValidEnums = "OUTBOUND_CLICK_2"
+ SourcePinterestUpdateSchemasValidEnumsPageVisitCostPerAction SourcePinterestUpdateSchemasValidEnums = "PAGE_VISIT_COST_PER_ACTION"
+ SourcePinterestUpdateSchemasValidEnumsPageVisitRoas SourcePinterestUpdateSchemasValidEnums = "PAGE_VISIT_ROAS"
+ SourcePinterestUpdateSchemasValidEnumsPaidImpression SourcePinterestUpdateSchemasValidEnums = "PAID_IMPRESSION"
+ SourcePinterestUpdateSchemasValidEnumsPinID SourcePinterestUpdateSchemasValidEnums = "PIN_ID"
+ SourcePinterestUpdateSchemasValidEnumsPinPromotionID SourcePinterestUpdateSchemasValidEnums = "PIN_PROMOTION_ID"
+ SourcePinterestUpdateSchemasValidEnumsRepin1 SourcePinterestUpdateSchemasValidEnums = "REPIN_1"
+ SourcePinterestUpdateSchemasValidEnumsRepin2 SourcePinterestUpdateSchemasValidEnums = "REPIN_2"
+ SourcePinterestUpdateSchemasValidEnumsRepinRate SourcePinterestUpdateSchemasValidEnums = "REPIN_RATE"
+ SourcePinterestUpdateSchemasValidEnumsSpendInDollar SourcePinterestUpdateSchemasValidEnums = "SPEND_IN_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsSpendInMicroDollar SourcePinterestUpdateSchemasValidEnums = "SPEND_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalCheckout SourcePinterestUpdateSchemasValidEnums = "TOTAL_CHECKOUT"
+ SourcePinterestUpdateSchemasValidEnumsTotalCheckoutValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalClickthrough SourcePinterestUpdateSchemasValidEnums = "TOTAL_CLICKTHROUGH"
+ SourcePinterestUpdateSchemasValidEnumsTotalClickAddToCart SourcePinterestUpdateSchemasValidEnums = "TOTAL_CLICK_ADD_TO_CART"
+ SourcePinterestUpdateSchemasValidEnumsTotalClickCheckout SourcePinterestUpdateSchemasValidEnums = "TOTAL_CLICK_CHECKOUT"
+ SourcePinterestUpdateSchemasValidEnumsTotalClickCheckoutValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalClickLead SourcePinterestUpdateSchemasValidEnums = "TOTAL_CLICK_LEAD"
+ SourcePinterestUpdateSchemasValidEnumsTotalClickSignup SourcePinterestUpdateSchemasValidEnums = "TOTAL_CLICK_SIGNUP"
+ SourcePinterestUpdateSchemasValidEnumsTotalClickSignupValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalConversions SourcePinterestUpdateSchemasValidEnums = "TOTAL_CONVERSIONS"
+ SourcePinterestUpdateSchemasValidEnumsTotalCustom SourcePinterestUpdateSchemasValidEnums = "TOTAL_CUSTOM"
+ SourcePinterestUpdateSchemasValidEnumsTotalEngagement SourcePinterestUpdateSchemasValidEnums = "TOTAL_ENGAGEMENT"
+ SourcePinterestUpdateSchemasValidEnumsTotalEngagementCheckout SourcePinterestUpdateSchemasValidEnums = "TOTAL_ENGAGEMENT_CHECKOUT"
+ SourcePinterestUpdateSchemasValidEnumsTotalEngagementCheckoutValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalEngagementLead SourcePinterestUpdateSchemasValidEnums = "TOTAL_ENGAGEMENT_LEAD"
+ SourcePinterestUpdateSchemasValidEnumsTotalEngagementSignup SourcePinterestUpdateSchemasValidEnums = "TOTAL_ENGAGEMENT_SIGNUP"
+ SourcePinterestUpdateSchemasValidEnumsTotalEngagementSignupValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalIdeaPinProductTagVisit SourcePinterestUpdateSchemasValidEnums = "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT"
+ SourcePinterestUpdateSchemasValidEnumsTotalImpressionFrequency SourcePinterestUpdateSchemasValidEnums = "TOTAL_IMPRESSION_FREQUENCY"
+ SourcePinterestUpdateSchemasValidEnumsTotalImpressionUser SourcePinterestUpdateSchemasValidEnums = "TOTAL_IMPRESSION_USER"
+ SourcePinterestUpdateSchemasValidEnumsTotalLead SourcePinterestUpdateSchemasValidEnums = "TOTAL_LEAD"
+ SourcePinterestUpdateSchemasValidEnumsTotalOfflineCheckout SourcePinterestUpdateSchemasValidEnums = "TOTAL_OFFLINE_CHECKOUT"
+ SourcePinterestUpdateSchemasValidEnumsTotalPageVisit SourcePinterestUpdateSchemasValidEnums = "TOTAL_PAGE_VISIT"
+ SourcePinterestUpdateSchemasValidEnumsTotalRepinRate SourcePinterestUpdateSchemasValidEnums = "TOTAL_REPIN_RATE"
+ SourcePinterestUpdateSchemasValidEnumsTotalSignup SourcePinterestUpdateSchemasValidEnums = "TOTAL_SIGNUP"
+ SourcePinterestUpdateSchemasValidEnumsTotalSignupValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalVideo3SecViews SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIDEO_3SEC_VIEWS"
+ SourcePinterestUpdateSchemasValidEnumsTotalVideoAvgWatchtimeInSecond SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND"
+ SourcePinterestUpdateSchemasValidEnumsTotalVideoMrcViews SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIDEO_MRC_VIEWS"
+ SourcePinterestUpdateSchemasValidEnumsTotalVideoP0Combined SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIDEO_P0_COMBINED"
+ SourcePinterestUpdateSchemasValidEnumsTotalVideoP100Complete SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIDEO_P100_COMPLETE"
+ SourcePinterestUpdateSchemasValidEnumsTotalVideoP25Combined SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIDEO_P25_COMBINED"
+ SourcePinterestUpdateSchemasValidEnumsTotalVideoP50Combined SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIDEO_P50_COMBINED"
+ SourcePinterestUpdateSchemasValidEnumsTotalVideoP75Combined SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIDEO_P75_COMBINED"
+ SourcePinterestUpdateSchemasValidEnumsTotalVideoP95Combined SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIDEO_P95_COMBINED"
+ SourcePinterestUpdateSchemasValidEnumsTotalViewAddToCart SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIEW_ADD_TO_CART"
+ SourcePinterestUpdateSchemasValidEnumsTotalViewCheckout SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIEW_CHECKOUT"
+ SourcePinterestUpdateSchemasValidEnumsTotalViewCheckoutValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalViewLead SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIEW_LEAD"
+ SourcePinterestUpdateSchemasValidEnumsTotalViewSignup SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIEW_SIGNUP"
+ SourcePinterestUpdateSchemasValidEnumsTotalViewSignupValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalWebCheckout SourcePinterestUpdateSchemasValidEnums = "TOTAL_WEB_CHECKOUT"
+ SourcePinterestUpdateSchemasValidEnumsTotalWebCheckoutValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalWebClickCheckout SourcePinterestUpdateSchemasValidEnums = "TOTAL_WEB_CLICK_CHECKOUT"
+ SourcePinterestUpdateSchemasValidEnumsTotalWebClickCheckoutValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalWebEngagementCheckout SourcePinterestUpdateSchemasValidEnums = "TOTAL_WEB_ENGAGEMENT_CHECKOUT"
+ SourcePinterestUpdateSchemasValidEnumsTotalWebEngagementCheckoutValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsTotalWebSessions SourcePinterestUpdateSchemasValidEnums = "TOTAL_WEB_SESSIONS"
+ SourcePinterestUpdateSchemasValidEnumsTotalWebViewCheckout SourcePinterestUpdateSchemasValidEnums = "TOTAL_WEB_VIEW_CHECKOUT"
+ SourcePinterestUpdateSchemasValidEnumsTotalWebViewCheckoutValueInMicroDollar SourcePinterestUpdateSchemasValidEnums = "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR"
+ SourcePinterestUpdateSchemasValidEnumsVideo3SecViews2 SourcePinterestUpdateSchemasValidEnums = "VIDEO_3SEC_VIEWS_2"
+ SourcePinterestUpdateSchemasValidEnumsVideoLength SourcePinterestUpdateSchemasValidEnums = "VIDEO_LENGTH"
+ SourcePinterestUpdateSchemasValidEnumsVideoMrcViews2 SourcePinterestUpdateSchemasValidEnums = "VIDEO_MRC_VIEWS_2"
+ SourcePinterestUpdateSchemasValidEnumsVideoP0Combined2 SourcePinterestUpdateSchemasValidEnums = "VIDEO_P0_COMBINED_2"
+ SourcePinterestUpdateSchemasValidEnumsVideoP100Complete2 SourcePinterestUpdateSchemasValidEnums = "VIDEO_P100_COMPLETE_2"
+ SourcePinterestUpdateSchemasValidEnumsVideoP25Combined2 SourcePinterestUpdateSchemasValidEnums = "VIDEO_P25_COMBINED_2"
+ SourcePinterestUpdateSchemasValidEnumsVideoP50Combined2 SourcePinterestUpdateSchemasValidEnums = "VIDEO_P50_COMBINED_2"
+ SourcePinterestUpdateSchemasValidEnumsVideoP75Combined2 SourcePinterestUpdateSchemasValidEnums = "VIDEO_P75_COMBINED_2"
+ SourcePinterestUpdateSchemasValidEnumsVideoP95Combined2 SourcePinterestUpdateSchemasValidEnums = "VIDEO_P95_COMBINED_2"
+ SourcePinterestUpdateSchemasValidEnumsWebCheckoutCostPerAction SourcePinterestUpdateSchemasValidEnums = "WEB_CHECKOUT_COST_PER_ACTION"
+ SourcePinterestUpdateSchemasValidEnumsWebCheckoutRoas SourcePinterestUpdateSchemasValidEnums = "WEB_CHECKOUT_ROAS"
+ SourcePinterestUpdateSchemasValidEnumsWebSessions1 SourcePinterestUpdateSchemasValidEnums = "WEB_SESSIONS_1"
+ SourcePinterestUpdateSchemasValidEnumsWebSessions2 SourcePinterestUpdateSchemasValidEnums = "WEB_SESSIONS_2"
)
-type SourcePinterestUpdateAuthorizationMethod struct {
- SourcePinterestUpdateAuthorizationMethodOAuth20 *SourcePinterestUpdateAuthorizationMethodOAuth20
- SourcePinterestUpdateAuthorizationMethodAccessToken *SourcePinterestUpdateAuthorizationMethodAccessToken
+func (e SourcePinterestUpdateSchemasValidEnums) ToPointer() *SourcePinterestUpdateSchemasValidEnums {
+ return &e
+}
- Type SourcePinterestUpdateAuthorizationMethodType
+func (e *SourcePinterestUpdateSchemasValidEnums) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "ADVERTISER_ID":
+ fallthrough
+ case "AD_ACCOUNT_ID":
+ fallthrough
+ case "AD_GROUP_ENTITY_STATUS":
+ fallthrough
+ case "AD_GROUP_ID":
+ fallthrough
+ case "AD_ID":
+ fallthrough
+ case "CAMPAIGN_DAILY_SPEND_CAP":
+ fallthrough
+ case "CAMPAIGN_ENTITY_STATUS":
+ fallthrough
+ case "CAMPAIGN_ID":
+ fallthrough
+ case "CAMPAIGN_LIFETIME_SPEND_CAP":
+ fallthrough
+ case "CAMPAIGN_NAME":
+ fallthrough
+ case "CHECKOUT_ROAS":
+ fallthrough
+ case "CLICKTHROUGH_1":
+ fallthrough
+ case "CLICKTHROUGH_1_GROSS":
+ fallthrough
+ case "CLICKTHROUGH_2":
+ fallthrough
+ case "CPC_IN_MICRO_DOLLAR":
+ fallthrough
+ case "CPM_IN_DOLLAR":
+ fallthrough
+ case "CPM_IN_MICRO_DOLLAR":
+ fallthrough
+ case "CTR":
+ fallthrough
+ case "CTR_2":
+ fallthrough
+ case "ECPCV_IN_DOLLAR":
+ fallthrough
+ case "ECPCV_P95_IN_DOLLAR":
+ fallthrough
+ case "ECPC_IN_DOLLAR":
+ fallthrough
+ case "ECPC_IN_MICRO_DOLLAR":
+ fallthrough
+ case "ECPE_IN_DOLLAR":
+ fallthrough
+ case "ECPM_IN_MICRO_DOLLAR":
+ fallthrough
+ case "ECPV_IN_DOLLAR":
+ fallthrough
+ case "ECTR":
+ fallthrough
+ case "EENGAGEMENT_RATE":
+ fallthrough
+ case "ENGAGEMENT_1":
+ fallthrough
+ case "ENGAGEMENT_2":
+ fallthrough
+ case "ENGAGEMENT_RATE":
+ fallthrough
+ case "IDEA_PIN_PRODUCT_TAG_VISIT_1":
+ fallthrough
+ case "IDEA_PIN_PRODUCT_TAG_VISIT_2":
+ fallthrough
+ case "IMPRESSION_1":
+ fallthrough
+ case "IMPRESSION_1_GROSS":
+ fallthrough
+ case "IMPRESSION_2":
+ fallthrough
+ case "INAPP_CHECKOUT_COST_PER_ACTION":
+ fallthrough
+ case "OUTBOUND_CLICK_1":
+ fallthrough
+ case "OUTBOUND_CLICK_2":
+ fallthrough
+ case "PAGE_VISIT_COST_PER_ACTION":
+ fallthrough
+ case "PAGE_VISIT_ROAS":
+ fallthrough
+ case "PAID_IMPRESSION":
+ fallthrough
+ case "PIN_ID":
+ fallthrough
+ case "PIN_PROMOTION_ID":
+ fallthrough
+ case "REPIN_1":
+ fallthrough
+ case "REPIN_2":
+ fallthrough
+ case "REPIN_RATE":
+ fallthrough
+ case "SPEND_IN_DOLLAR":
+ fallthrough
+ case "SPEND_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_CHECKOUT":
+ fallthrough
+ case "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_CLICKTHROUGH":
+ fallthrough
+ case "TOTAL_CLICK_ADD_TO_CART":
+ fallthrough
+ case "TOTAL_CLICK_CHECKOUT":
+ fallthrough
+ case "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_CLICK_LEAD":
+ fallthrough
+ case "TOTAL_CLICK_SIGNUP":
+ fallthrough
+ case "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_CONVERSIONS":
+ fallthrough
+ case "TOTAL_CUSTOM":
+ fallthrough
+ case "TOTAL_ENGAGEMENT":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_CHECKOUT":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_LEAD":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_SIGNUP":
+ fallthrough
+ case "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT":
+ fallthrough
+ case "TOTAL_IMPRESSION_FREQUENCY":
+ fallthrough
+ case "TOTAL_IMPRESSION_USER":
+ fallthrough
+ case "TOTAL_LEAD":
+ fallthrough
+ case "TOTAL_OFFLINE_CHECKOUT":
+ fallthrough
+ case "TOTAL_PAGE_VISIT":
+ fallthrough
+ case "TOTAL_REPIN_RATE":
+ fallthrough
+ case "TOTAL_SIGNUP":
+ fallthrough
+ case "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_VIDEO_3SEC_VIEWS":
+ fallthrough
+ case "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND":
+ fallthrough
+ case "TOTAL_VIDEO_MRC_VIEWS":
+ fallthrough
+ case "TOTAL_VIDEO_P0_COMBINED":
+ fallthrough
+ case "TOTAL_VIDEO_P100_COMPLETE":
+ fallthrough
+ case "TOTAL_VIDEO_P25_COMBINED":
+ fallthrough
+ case "TOTAL_VIDEO_P50_COMBINED":
+ fallthrough
+ case "TOTAL_VIDEO_P75_COMBINED":
+ fallthrough
+ case "TOTAL_VIDEO_P95_COMBINED":
+ fallthrough
+ case "TOTAL_VIEW_ADD_TO_CART":
+ fallthrough
+ case "TOTAL_VIEW_CHECKOUT":
+ fallthrough
+ case "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_VIEW_LEAD":
+ fallthrough
+ case "TOTAL_VIEW_SIGNUP":
+ fallthrough
+ case "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_WEB_CHECKOUT":
+ fallthrough
+ case "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_WEB_CLICK_CHECKOUT":
+ fallthrough
+ case "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_WEB_ENGAGEMENT_CHECKOUT":
+ fallthrough
+ case "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "TOTAL_WEB_SESSIONS":
+ fallthrough
+ case "TOTAL_WEB_VIEW_CHECKOUT":
+ fallthrough
+ case "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR":
+ fallthrough
+ case "VIDEO_3SEC_VIEWS_2":
+ fallthrough
+ case "VIDEO_LENGTH":
+ fallthrough
+ case "VIDEO_MRC_VIEWS_2":
+ fallthrough
+ case "VIDEO_P0_COMBINED_2":
+ fallthrough
+ case "VIDEO_P100_COMPLETE_2":
+ fallthrough
+ case "VIDEO_P25_COMBINED_2":
+ fallthrough
+ case "VIDEO_P50_COMBINED_2":
+ fallthrough
+ case "VIDEO_P75_COMBINED_2":
+ fallthrough
+ case "VIDEO_P95_COMBINED_2":
+ fallthrough
+ case "WEB_CHECKOUT_COST_PER_ACTION":
+ fallthrough
+ case "WEB_CHECKOUT_ROAS":
+ fallthrough
+ case "WEB_SESSIONS_1":
+ fallthrough
+ case "WEB_SESSIONS_2":
+ *e = SourcePinterestUpdateSchemasValidEnums(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePinterestUpdateSchemasValidEnums: %v", v)
+ }
}
-func CreateSourcePinterestUpdateAuthorizationMethodSourcePinterestUpdateAuthorizationMethodOAuth20(sourcePinterestUpdateAuthorizationMethodOAuth20 SourcePinterestUpdateAuthorizationMethodOAuth20) SourcePinterestUpdateAuthorizationMethod {
- typ := SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodOAuth20
+// ConversionReportTime - The date by which the conversion metrics returned from this endpoint will be reported. There are two dates associated with a conversion event: the date that the user interacted with the ad, and the date that the user completed a conversion event..
+type ConversionReportTime string
- return SourcePinterestUpdateAuthorizationMethod{
- SourcePinterestUpdateAuthorizationMethodOAuth20: &sourcePinterestUpdateAuthorizationMethodOAuth20,
- Type: typ,
+const (
+ ConversionReportTimeTimeOfAdAction ConversionReportTime = "TIME_OF_AD_ACTION"
+ ConversionReportTimeTimeOfConversion ConversionReportTime = "TIME_OF_CONVERSION"
+)
+
+func (e ConversionReportTime) ToPointer() *ConversionReportTime {
+ return &e
+}
+
+func (e *ConversionReportTime) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "TIME_OF_AD_ACTION":
+ fallthrough
+ case "TIME_OF_CONVERSION":
+ *e = ConversionReportTime(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for ConversionReportTime: %v", v)
}
}
-func CreateSourcePinterestUpdateAuthorizationMethodSourcePinterestUpdateAuthorizationMethodAccessToken(sourcePinterestUpdateAuthorizationMethodAccessToken SourcePinterestUpdateAuthorizationMethodAccessToken) SourcePinterestUpdateAuthorizationMethod {
- typ := SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodAccessToken
+// EngagementWindowDays - Number of days to use as the conversion attribution window for an engagement action.
+type EngagementWindowDays int64
- return SourcePinterestUpdateAuthorizationMethod{
- SourcePinterestUpdateAuthorizationMethodAccessToken: &sourcePinterestUpdateAuthorizationMethodAccessToken,
- Type: typ,
+const (
+ EngagementWindowDaysZero EngagementWindowDays = 0
+ EngagementWindowDaysOne EngagementWindowDays = 1
+ EngagementWindowDaysSeven EngagementWindowDays = 7
+ EngagementWindowDaysFourteen EngagementWindowDays = 14
+ EngagementWindowDaysThirty EngagementWindowDays = 30
+ EngagementWindowDaysSixty EngagementWindowDays = 60
+)
+
+func (e EngagementWindowDays) ToPointer() *EngagementWindowDays {
+ return &e
+}
+
+func (e *EngagementWindowDays) UnmarshalJSON(data []byte) error {
+ var v int64
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case 0:
+ fallthrough
+ case 1:
+ fallthrough
+ case 7:
+ fallthrough
+ case 14:
+ fallthrough
+ case 30:
+ fallthrough
+ case 60:
+ *e = EngagementWindowDays(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for EngagementWindowDays: %v", v)
}
}
-func (u *SourcePinterestUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+// Granularity - Chosen granularity for API
+type Granularity string
+
+const (
+ GranularityTotal Granularity = "TOTAL"
+ GranularityDay Granularity = "DAY"
+ GranularityHour Granularity = "HOUR"
+ GranularityWeek Granularity = "WEEK"
+ GranularityMonth Granularity = "MONTH"
+)
+
+func (e Granularity) ToPointer() *Granularity {
+ return &e
+}
+
+func (e *Granularity) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "TOTAL":
+ fallthrough
+ case "DAY":
+ fallthrough
+ case "HOUR":
+ fallthrough
+ case "WEEK":
+ fallthrough
+ case "MONTH":
+ *e = Granularity(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for Granularity: %v", v)
+ }
+}
+
+// SourcePinterestUpdateLevel - Chosen level for API
+type SourcePinterestUpdateLevel string
+
+const (
+ SourcePinterestUpdateLevelAdvertiser SourcePinterestUpdateLevel = "ADVERTISER"
+ SourcePinterestUpdateLevelAdvertiserTargeting SourcePinterestUpdateLevel = "ADVERTISER_TARGETING"
+ SourcePinterestUpdateLevelCampaign SourcePinterestUpdateLevel = "CAMPAIGN"
+ SourcePinterestUpdateLevelCampaignTargeting SourcePinterestUpdateLevel = "CAMPAIGN_TARGETING"
+ SourcePinterestUpdateLevelAdGroup SourcePinterestUpdateLevel = "AD_GROUP"
+ SourcePinterestUpdateLevelAdGroupTargeting SourcePinterestUpdateLevel = "AD_GROUP_TARGETING"
+ SourcePinterestUpdateLevelPinPromotion SourcePinterestUpdateLevel = "PIN_PROMOTION"
+ SourcePinterestUpdateLevelPinPromotionTargeting SourcePinterestUpdateLevel = "PIN_PROMOTION_TARGETING"
+ SourcePinterestUpdateLevelKeyword SourcePinterestUpdateLevel = "KEYWORD"
+ SourcePinterestUpdateLevelProductGroup SourcePinterestUpdateLevel = "PRODUCT_GROUP"
+ SourcePinterestUpdateLevelProductGroupTargeting SourcePinterestUpdateLevel = "PRODUCT_GROUP_TARGETING"
+ SourcePinterestUpdateLevelProductItem SourcePinterestUpdateLevel = "PRODUCT_ITEM"
+)
+
+func (e SourcePinterestUpdateLevel) ToPointer() *SourcePinterestUpdateLevel {
+ return &e
+}
+
+func (e *SourcePinterestUpdateLevel) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "ADVERTISER":
+ fallthrough
+ case "ADVERTISER_TARGETING":
+ fallthrough
+ case "CAMPAIGN":
+ fallthrough
+ case "CAMPAIGN_TARGETING":
+ fallthrough
+ case "AD_GROUP":
+ fallthrough
+ case "AD_GROUP_TARGETING":
+ fallthrough
+ case "PIN_PROMOTION":
+ fallthrough
+ case "PIN_PROMOTION_TARGETING":
+ fallthrough
+ case "KEYWORD":
+ fallthrough
+ case "PRODUCT_GROUP":
+ fallthrough
+ case "PRODUCT_GROUP_TARGETING":
+ fallthrough
+ case "PRODUCT_ITEM":
+ *e = SourcePinterestUpdateLevel(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePinterestUpdateLevel: %v", v)
+ }
+}
+
+// ViewWindowDays - Number of days to use as the conversion attribution window for a view action.
+type ViewWindowDays int64
+
+const (
+ ViewWindowDaysZero ViewWindowDays = 0
+ ViewWindowDaysOne ViewWindowDays = 1
+ ViewWindowDaysSeven ViewWindowDays = 7
+ ViewWindowDaysFourteen ViewWindowDays = 14
+ ViewWindowDaysThirty ViewWindowDays = 30
+ ViewWindowDaysSixty ViewWindowDays = 60
+)
- sourcePinterestUpdateAuthorizationMethodAccessToken := new(SourcePinterestUpdateAuthorizationMethodAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePinterestUpdateAuthorizationMethodAccessToken); err == nil {
- u.SourcePinterestUpdateAuthorizationMethodAccessToken = sourcePinterestUpdateAuthorizationMethodAccessToken
- u.Type = SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodAccessToken
+func (e ViewWindowDays) ToPointer() *ViewWindowDays {
+ return &e
+}
+
+func (e *ViewWindowDays) UnmarshalJSON(data []byte) error {
+ var v int64
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case 0:
+ fallthrough
+ case 1:
+ fallthrough
+ case 7:
+ fallthrough
+ case 14:
+ fallthrough
+ case 30:
+ fallthrough
+ case 60:
+ *e = ViewWindowDays(v)
return nil
+ default:
+ return fmt.Errorf("invalid value for ViewWindowDays: %v", v)
}
+}
- sourcePinterestUpdateAuthorizationMethodOAuth20 := new(SourcePinterestUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePinterestUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourcePinterestUpdateAuthorizationMethodOAuth20 = sourcePinterestUpdateAuthorizationMethodOAuth20
- u.Type = SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodOAuth20
+// ReportConfig - Config for custom report
+type ReportConfig struct {
+ // List of types of attribution for the conversion report
+ AttributionTypes []SourcePinterestUpdateValidEnums `json:"attribution_types,omitempty"`
+ // Number of days to use as the conversion attribution window for a pin click action.
+ ClickWindowDays *ClickWindowDays `default:"30" json:"click_window_days"`
+ // A list of chosen columns
+ Columns []SourcePinterestUpdateSchemasValidEnums `json:"columns"`
+ // The date by which the conversion metrics returned from this endpoint will be reported. There are two dates associated with a conversion event: the date that the user interacted with the ad, and the date that the user completed a conversion event..
+ ConversionReportTime *ConversionReportTime `default:"TIME_OF_AD_ACTION" json:"conversion_report_time"`
+ // Number of days to use as the conversion attribution window for an engagement action.
+ EngagementWindowDays *EngagementWindowDays `default:"30" json:"engagement_window_days"`
+ // Chosen granularity for API
+ Granularity *Granularity `default:"TOTAL" json:"granularity"`
+ // Chosen level for API
+ Level *SourcePinterestUpdateLevel `default:"ADVERTISER" json:"level"`
+ // The name value of report
+ Name string `json:"name"`
+ // A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by report api (913 days from today).
+ StartDate *types.Date `json:"start_date,omitempty"`
+ // Number of days to use as the conversion attribution window for a view action.
+ ViewWindowDays *ViewWindowDays `default:"30" json:"view_window_days"`
+}
+
+func (r ReportConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(r, "", false)
+}
+
+func (r *ReportConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &r, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *ReportConfig) GetAttributionTypes() []SourcePinterestUpdateValidEnums {
+ if o == nil {
return nil
}
+ return o.AttributionTypes
+}
- return errors.New("could not unmarshal into supported union types")
+func (o *ReportConfig) GetClickWindowDays() *ClickWindowDays {
+ if o == nil {
+ return nil
+ }
+ return o.ClickWindowDays
}
-func (u SourcePinterestUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourcePinterestUpdateAuthorizationMethodAccessToken != nil {
- return json.Marshal(u.SourcePinterestUpdateAuthorizationMethodAccessToken)
+func (o *ReportConfig) GetColumns() []SourcePinterestUpdateSchemasValidEnums {
+ if o == nil {
+ return []SourcePinterestUpdateSchemasValidEnums{}
}
+ return o.Columns
+}
- if u.SourcePinterestUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourcePinterestUpdateAuthorizationMethodOAuth20)
+func (o *ReportConfig) GetConversionReportTime() *ConversionReportTime {
+ if o == nil {
+ return nil
+ }
+ return o.ConversionReportTime
+}
+
+func (o *ReportConfig) GetEngagementWindowDays() *EngagementWindowDays {
+ if o == nil {
+ return nil
+ }
+ return o.EngagementWindowDays
+}
+
+func (o *ReportConfig) GetGranularity() *Granularity {
+ if o == nil {
+ return nil
}
+ return o.Granularity
+}
- return nil, nil
+func (o *ReportConfig) GetLevel() *SourcePinterestUpdateLevel {
+ if o == nil {
+ return nil
+ }
+ return o.Level
+}
+
+func (o *ReportConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *ReportConfig) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *ReportConfig) GetViewWindowDays() *ViewWindowDays {
+ if o == nil {
+ return nil
+ }
+ return o.ViewWindowDays
}
-type SourcePinterestUpdateStatus string
+type Status string
const (
- SourcePinterestUpdateStatusActive SourcePinterestUpdateStatus = "ACTIVE"
- SourcePinterestUpdateStatusPaused SourcePinterestUpdateStatus = "PAUSED"
- SourcePinterestUpdateStatusArchived SourcePinterestUpdateStatus = "ARCHIVED"
+ StatusActive Status = "ACTIVE"
+ StatusPaused Status = "PAUSED"
+ StatusArchived Status = "ARCHIVED"
)
-func (e SourcePinterestUpdateStatus) ToPointer() *SourcePinterestUpdateStatus {
+func (e Status) ToPointer() *Status {
return &e
}
-func (e *SourcePinterestUpdateStatus) UnmarshalJSON(data []byte) error {
+func (e *Status) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -165,17 +878,58 @@ func (e *SourcePinterestUpdateStatus) UnmarshalJSON(data []byte) error {
case "PAUSED":
fallthrough
case "ARCHIVED":
- *e = SourcePinterestUpdateStatus(v)
+ *e = Status(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePinterestUpdateStatus: %v", v)
+ return fmt.Errorf("invalid value for Status: %v", v)
}
}
type SourcePinterestUpdate struct {
Credentials *SourcePinterestUpdateAuthorizationMethod `json:"credentials,omitempty"`
+ // A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field.
+ CustomReports []ReportConfig `json:"custom_reports,omitempty"`
// A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (89 days from today).
- StartDate types.Date `json:"start_date"`
+ StartDate *types.Date `json:"start_date,omitempty"`
// Entity statuses based off of campaigns, ad_groups, and ads. If you do not have a status set, it will be ignored completely.
- Status []SourcePinterestUpdateStatus `json:"status,omitempty"`
+ Status []Status `json:"status,omitempty"`
+}
+
+func (s SourcePinterestUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePinterestUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePinterestUpdate) GetCredentials() *SourcePinterestUpdateAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourcePinterestUpdate) GetCustomReports() []ReportConfig {
+ if o == nil {
+ return nil
+ }
+ return o.CustomReports
+}
+
+func (o *SourcePinterestUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourcePinterestUpdate) GetStatus() []Status {
+ if o == nil {
+ return nil
+ }
+ return o.Status
}
diff --git a/internal/sdk/pkg/models/shared/sourcepipedrive.go b/internal/sdk/pkg/models/shared/sourcepipedrive.go
old mode 100755
new mode 100644
index dc1492737..165a14c4d
--- a/internal/sdk/pkg/models/shared/sourcepipedrive.go
+++ b/internal/sdk/pkg/models/shared/sourcepipedrive.go
@@ -5,66 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
- "time"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePipedriveAPIKeyAuthenticationAuthType string
+type Pipedrive string
const (
- SourcePipedriveAPIKeyAuthenticationAuthTypeToken SourcePipedriveAPIKeyAuthenticationAuthType = "Token"
+ PipedrivePipedrive Pipedrive = "pipedrive"
)
-func (e SourcePipedriveAPIKeyAuthenticationAuthType) ToPointer() *SourcePipedriveAPIKeyAuthenticationAuthType {
+func (e Pipedrive) ToPointer() *Pipedrive {
return &e
}
-func (e *SourcePipedriveAPIKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *Pipedrive) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
- case "Token":
- *e = SourcePipedriveAPIKeyAuthenticationAuthType(v)
+ case "pipedrive":
+ *e = Pipedrive(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePipedriveAPIKeyAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for Pipedrive: %v", v)
}
}
-type SourcePipedriveAPIKeyAuthentication struct {
+type SourcePipedrive struct {
// The Pipedrive API Token.
- APIToken string `json:"api_token"`
- AuthType SourcePipedriveAPIKeyAuthenticationAuthType `json:"auth_type"`
+ APIToken string `json:"api_token"`
+ // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental
+ ReplicationStartDate string `json:"replication_start_date"`
+ sourceType Pipedrive `const:"pipedrive" json:"sourceType"`
}
-type SourcePipedrivePipedrive string
-
-const (
- SourcePipedrivePipedrivePipedrive SourcePipedrivePipedrive = "pipedrive"
-)
-
-func (e SourcePipedrivePipedrive) ToPointer() *SourcePipedrivePipedrive {
- return &e
+func (s SourcePipedrive) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
}
-func (e *SourcePipedrivePipedrive) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
+func (s *SourcePipedrive) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
return err
}
- switch v {
- case "pipedrive":
- *e = SourcePipedrivePipedrive(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourcePipedrivePipedrive: %v", v)
+ return nil
+}
+
+func (o *SourcePipedrive) GetAPIToken() string {
+ if o == nil {
+ return ""
}
+ return o.APIToken
}
-type SourcePipedrive struct {
- Authorization *SourcePipedriveAPIKeyAuthentication `json:"authorization,omitempty"`
- // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental
- ReplicationStartDate time.Time `json:"replication_start_date"`
- SourceType SourcePipedrivePipedrive `json:"sourceType"`
+func (o *SourcePipedrive) GetReplicationStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.ReplicationStartDate
+}
+
+func (o *SourcePipedrive) GetSourceType() Pipedrive {
+ return PipedrivePipedrive
}
diff --git a/internal/sdk/pkg/models/shared/sourcepipedrivecreaterequest.go b/internal/sdk/pkg/models/shared/sourcepipedrivecreaterequest.go
old mode 100755
new mode 100644
index 56525669a..4c357b167
--- a/internal/sdk/pkg/models/shared/sourcepipedrivecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepipedrivecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePipedriveCreateRequest struct {
Configuration SourcePipedrive `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePipedriveCreateRequest) GetConfiguration() SourcePipedrive {
+ if o == nil {
+ return SourcePipedrive{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePipedriveCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePipedriveCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePipedriveCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePipedriveCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepipedriveputrequest.go b/internal/sdk/pkg/models/shared/sourcepipedriveputrequest.go
old mode 100755
new mode 100644
index e1d83f0cc..17dd458d3
--- a/internal/sdk/pkg/models/shared/sourcepipedriveputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepipedriveputrequest.go
@@ -7,3 +7,24 @@ type SourcePipedrivePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePipedrivePutRequest) GetConfiguration() SourcePipedriveUpdate {
+ if o == nil {
+ return SourcePipedriveUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePipedrivePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePipedrivePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepipedriveupdate.go b/internal/sdk/pkg/models/shared/sourcepipedriveupdate.go
old mode 100755
new mode 100644
index dea5d664b..e3198b543
--- a/internal/sdk/pkg/models/shared/sourcepipedriveupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepipedriveupdate.go
@@ -2,44 +2,23 @@
package shared
-import (
- "encoding/json"
- "fmt"
- "time"
-)
-
-type SourcePipedriveUpdateAPIKeyAuthenticationAuthType string
-
-const (
- SourcePipedriveUpdateAPIKeyAuthenticationAuthTypeToken SourcePipedriveUpdateAPIKeyAuthenticationAuthType = "Token"
-)
-
-func (e SourcePipedriveUpdateAPIKeyAuthenticationAuthType) ToPointer() *SourcePipedriveUpdateAPIKeyAuthenticationAuthType {
- return &e
+type SourcePipedriveUpdate struct {
+ // The Pipedrive API Token.
+ APIToken string `json:"api_token"`
+ // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental
+ ReplicationStartDate string `json:"replication_start_date"`
}
-func (e *SourcePipedriveUpdateAPIKeyAuthenticationAuthType) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "Token":
- *e = SourcePipedriveUpdateAPIKeyAuthenticationAuthType(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourcePipedriveUpdateAPIKeyAuthenticationAuthType: %v", v)
+func (o *SourcePipedriveUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
}
+ return o.APIToken
}
-type SourcePipedriveUpdateAPIKeyAuthentication struct {
- // The Pipedrive API Token.
- APIToken string `json:"api_token"`
- AuthType SourcePipedriveUpdateAPIKeyAuthenticationAuthType `json:"auth_type"`
-}
-
-type SourcePipedriveUpdate struct {
- Authorization *SourcePipedriveUpdateAPIKeyAuthentication `json:"authorization,omitempty"`
- // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental
- ReplicationStartDate time.Time `json:"replication_start_date"`
+func (o *SourcePipedriveUpdate) GetReplicationStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.ReplicationStartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcepocket.go b/internal/sdk/pkg/models/shared/sourcepocket.go
old mode 100755
new mode 100644
index d0a084172..1f82bf7c8
--- a/internal/sdk/pkg/models/shared/sourcepocket.go
+++ b/internal/sdk/pkg/models/shared/sourcepocket.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourcePocketContentType - Select the content type of the items to retrieve.
@@ -100,27 +101,27 @@ func (e *SourcePocketSortBy) UnmarshalJSON(data []byte) error {
}
}
-type SourcePocketPocket string
+type Pocket string
const (
- SourcePocketPocketPocket SourcePocketPocket = "pocket"
+ PocketPocket Pocket = "pocket"
)
-func (e SourcePocketPocket) ToPointer() *SourcePocketPocket {
+func (e Pocket) ToPointer() *Pocket {
return &e
}
-func (e *SourcePocketPocket) UnmarshalJSON(data []byte) error {
+func (e *Pocket) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pocket":
- *e = SourcePocketPocket(v)
+ *e = Pocket(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePocketPocket: %v", v)
+ return fmt.Errorf("invalid value for Pocket: %v", v)
}
}
@@ -167,16 +168,108 @@ type SourcePocket struct {
// Only return items from a particular `domain`.
Domain *string `json:"domain,omitempty"`
// Retrieve only favorited items.
- Favorite *bool `json:"favorite,omitempty"`
+ Favorite *bool `default:"false" json:"favorite"`
// Only return items whose title or url contain the `search` string.
Search *string `json:"search,omitempty"`
// Only return items modified since the given timestamp.
Since *string `json:"since,omitempty"`
// Sort retrieved items by the given criteria.
Sort *SourcePocketSortBy `json:"sort,omitempty"`
- SourceType SourcePocketPocket `json:"sourceType"`
+ sourceType Pocket `const:"pocket" json:"sourceType"`
// Select the state of the items to retrieve.
State *SourcePocketState `json:"state,omitempty"`
// Return only items tagged with this tag name. Use _untagged_ for retrieving only untagged items.
Tag *string `json:"tag,omitempty"`
}
+
+func (s SourcePocket) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePocket) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePocket) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourcePocket) GetConsumerKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConsumerKey
+}
+
+func (o *SourcePocket) GetContentType() *SourcePocketContentType {
+ if o == nil {
+ return nil
+ }
+ return o.ContentType
+}
+
+func (o *SourcePocket) GetDetailType() *SourcePocketDetailType {
+ if o == nil {
+ return nil
+ }
+ return o.DetailType
+}
+
+func (o *SourcePocket) GetDomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Domain
+}
+
+func (o *SourcePocket) GetFavorite() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Favorite
+}
+
+func (o *SourcePocket) GetSearch() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Search
+}
+
+func (o *SourcePocket) GetSince() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Since
+}
+
+func (o *SourcePocket) GetSort() *SourcePocketSortBy {
+ if o == nil {
+ return nil
+ }
+ return o.Sort
+}
+
+func (o *SourcePocket) GetSourceType() Pocket {
+ return PocketPocket
+}
+
+func (o *SourcePocket) GetState() *SourcePocketState {
+ if o == nil {
+ return nil
+ }
+ return o.State
+}
+
+func (o *SourcePocket) GetTag() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Tag
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepocketcreaterequest.go b/internal/sdk/pkg/models/shared/sourcepocketcreaterequest.go
old mode 100755
new mode 100644
index 2ab4ba5dc..2258d13ab
--- a/internal/sdk/pkg/models/shared/sourcepocketcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepocketcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePocketCreateRequest struct {
Configuration SourcePocket `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePocketCreateRequest) GetConfiguration() SourcePocket {
+ if o == nil {
+ return SourcePocket{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePocketCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePocketCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePocketCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePocketCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepocketputrequest.go b/internal/sdk/pkg/models/shared/sourcepocketputrequest.go
old mode 100755
new mode 100644
index 746abded0..b4fb44a6f
--- a/internal/sdk/pkg/models/shared/sourcepocketputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepocketputrequest.go
@@ -7,3 +7,24 @@ type SourcePocketPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePocketPutRequest) GetConfiguration() SourcePocketUpdate {
+ if o == nil {
+ return SourcePocketUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePocketPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePocketPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepocketupdate.go b/internal/sdk/pkg/models/shared/sourcepocketupdate.go
old mode 100755
new mode 100644
index f7ea995e7..bd92c043e
--- a/internal/sdk/pkg/models/shared/sourcepocketupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepocketupdate.go
@@ -5,22 +5,23 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourcePocketUpdateContentType - Select the content type of the items to retrieve.
-type SourcePocketUpdateContentType string
+// ContentType - Select the content type of the items to retrieve.
+type ContentType string
const (
- SourcePocketUpdateContentTypeArticle SourcePocketUpdateContentType = "article"
- SourcePocketUpdateContentTypeVideo SourcePocketUpdateContentType = "video"
- SourcePocketUpdateContentTypeImage SourcePocketUpdateContentType = "image"
+ ContentTypeArticle ContentType = "article"
+ ContentTypeVideo ContentType = "video"
+ ContentTypeImage ContentType = "image"
)
-func (e SourcePocketUpdateContentType) ToPointer() *SourcePocketUpdateContentType {
+func (e ContentType) ToPointer() *ContentType {
return &e
}
-func (e *SourcePocketUpdateContentType) UnmarshalJSON(data []byte) error {
+func (e *ContentType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -31,26 +32,26 @@ func (e *SourcePocketUpdateContentType) UnmarshalJSON(data []byte) error {
case "video":
fallthrough
case "image":
- *e = SourcePocketUpdateContentType(v)
+ *e = ContentType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePocketUpdateContentType: %v", v)
+ return fmt.Errorf("invalid value for ContentType: %v", v)
}
}
-// SourcePocketUpdateDetailType - Select the granularity of the information about each item.
-type SourcePocketUpdateDetailType string
+// DetailType - Select the granularity of the information about each item.
+type DetailType string
const (
- SourcePocketUpdateDetailTypeSimple SourcePocketUpdateDetailType = "simple"
- SourcePocketUpdateDetailTypeComplete SourcePocketUpdateDetailType = "complete"
+ DetailTypeSimple DetailType = "simple"
+ DetailTypeComplete DetailType = "complete"
)
-func (e SourcePocketUpdateDetailType) ToPointer() *SourcePocketUpdateDetailType {
+func (e DetailType) ToPointer() *DetailType {
return &e
}
-func (e *SourcePocketUpdateDetailType) UnmarshalJSON(data []byte) error {
+func (e *DetailType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -59,10 +60,10 @@ func (e *SourcePocketUpdateDetailType) UnmarshalJSON(data []byte) error {
case "simple":
fallthrough
case "complete":
- *e = SourcePocketUpdateDetailType(v)
+ *e = DetailType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePocketUpdateDetailType: %v", v)
+ return fmt.Errorf("invalid value for DetailType: %v", v)
}
}
@@ -100,20 +101,20 @@ func (e *SourcePocketUpdateSortBy) UnmarshalJSON(data []byte) error {
}
}
-// SourcePocketUpdateState - Select the state of the items to retrieve.
-type SourcePocketUpdateState string
+// State - Select the state of the items to retrieve.
+type State string
const (
- SourcePocketUpdateStateUnread SourcePocketUpdateState = "unread"
- SourcePocketUpdateStateArchive SourcePocketUpdateState = "archive"
- SourcePocketUpdateStateAll SourcePocketUpdateState = "all"
+ StateUnread State = "unread"
+ StateArchive State = "archive"
+ StateAll State = "all"
)
-func (e SourcePocketUpdateState) ToPointer() *SourcePocketUpdateState {
+func (e State) ToPointer() *State {
return &e
}
-func (e *SourcePocketUpdateState) UnmarshalJSON(data []byte) error {
+func (e *State) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -124,10 +125,10 @@ func (e *SourcePocketUpdateState) UnmarshalJSON(data []byte) error {
case "archive":
fallthrough
case "all":
- *e = SourcePocketUpdateState(v)
+ *e = State(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePocketUpdateState: %v", v)
+ return fmt.Errorf("invalid value for State: %v", v)
}
}
@@ -137,13 +138,13 @@ type SourcePocketUpdate struct {
// Your application's Consumer Key.
ConsumerKey string `json:"consumer_key"`
// Select the content type of the items to retrieve.
- ContentType *SourcePocketUpdateContentType `json:"content_type,omitempty"`
+ ContentType *ContentType `json:"content_type,omitempty"`
// Select the granularity of the information about each item.
- DetailType *SourcePocketUpdateDetailType `json:"detail_type,omitempty"`
+ DetailType *DetailType `json:"detail_type,omitempty"`
// Only return items from a particular `domain`.
Domain *string `json:"domain,omitempty"`
// Retrieve only favorited items.
- Favorite *bool `json:"favorite,omitempty"`
+ Favorite *bool `default:"false" json:"favorite"`
// Only return items whose title or url contain the `search` string.
Search *string `json:"search,omitempty"`
// Only return items modified since the given timestamp.
@@ -151,7 +152,95 @@ type SourcePocketUpdate struct {
// Sort retrieved items by the given criteria.
Sort *SourcePocketUpdateSortBy `json:"sort,omitempty"`
// Select the state of the items to retrieve.
- State *SourcePocketUpdateState `json:"state,omitempty"`
+ State *State `json:"state,omitempty"`
// Return only items tagged with this tag name. Use _untagged_ for retrieving only untagged items.
Tag *string `json:"tag,omitempty"`
}
+
+func (s SourcePocketUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePocketUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePocketUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourcePocketUpdate) GetConsumerKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.ConsumerKey
+}
+
+func (o *SourcePocketUpdate) GetContentType() *ContentType {
+ if o == nil {
+ return nil
+ }
+ return o.ContentType
+}
+
+func (o *SourcePocketUpdate) GetDetailType() *DetailType {
+ if o == nil {
+ return nil
+ }
+ return o.DetailType
+}
+
+func (o *SourcePocketUpdate) GetDomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Domain
+}
+
+func (o *SourcePocketUpdate) GetFavorite() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Favorite
+}
+
+func (o *SourcePocketUpdate) GetSearch() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Search
+}
+
+func (o *SourcePocketUpdate) GetSince() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Since
+}
+
+func (o *SourcePocketUpdate) GetSort() *SourcePocketUpdateSortBy {
+ if o == nil {
+ return nil
+ }
+ return o.Sort
+}
+
+func (o *SourcePocketUpdate) GetState() *State {
+ if o == nil {
+ return nil
+ }
+ return o.State
+}
+
+func (o *SourcePocketUpdate) GetTag() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Tag
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepokeapi.go b/internal/sdk/pkg/models/shared/sourcepokeapi.go
old mode 100755
new mode 100644
index 6162b3c6f..ae2db0282
--- a/internal/sdk/pkg/models/shared/sourcepokeapi.go
+++ b/internal/sdk/pkg/models/shared/sourcepokeapi.go
@@ -5,34 +5,2773 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePokeapiPokeapi string
+// SourcePokeapiPokemonName - Pokemon requested from the API.
+type SourcePokeapiPokemonName string
const (
- SourcePokeapiPokeapiPokeapi SourcePokeapiPokeapi = "pokeapi"
+ SourcePokeapiPokemonNameBulbasaur SourcePokeapiPokemonName = "bulbasaur"
+ SourcePokeapiPokemonNameIvysaur SourcePokeapiPokemonName = "ivysaur"
+ SourcePokeapiPokemonNameVenusaur SourcePokeapiPokemonName = "venusaur"
+ SourcePokeapiPokemonNameCharmander SourcePokeapiPokemonName = "charmander"
+ SourcePokeapiPokemonNameCharmeleon SourcePokeapiPokemonName = "charmeleon"
+ SourcePokeapiPokemonNameCharizard SourcePokeapiPokemonName = "charizard"
+ SourcePokeapiPokemonNameSquirtle SourcePokeapiPokemonName = "squirtle"
+ SourcePokeapiPokemonNameWartortle SourcePokeapiPokemonName = "wartortle"
+ SourcePokeapiPokemonNameBlastoise SourcePokeapiPokemonName = "blastoise"
+ SourcePokeapiPokemonNameCaterpie SourcePokeapiPokemonName = "caterpie"
+ SourcePokeapiPokemonNameMetapod SourcePokeapiPokemonName = "metapod"
+ SourcePokeapiPokemonNameButterfree SourcePokeapiPokemonName = "butterfree"
+ SourcePokeapiPokemonNameWeedle SourcePokeapiPokemonName = "weedle"
+ SourcePokeapiPokemonNameKakuna SourcePokeapiPokemonName = "kakuna"
+ SourcePokeapiPokemonNameBeedrill SourcePokeapiPokemonName = "beedrill"
+ SourcePokeapiPokemonNamePidgey SourcePokeapiPokemonName = "pidgey"
+ SourcePokeapiPokemonNamePidgeotto SourcePokeapiPokemonName = "pidgeotto"
+ SourcePokeapiPokemonNamePidgeot SourcePokeapiPokemonName = "pidgeot"
+ SourcePokeapiPokemonNameRattata SourcePokeapiPokemonName = "rattata"
+ SourcePokeapiPokemonNameRaticate SourcePokeapiPokemonName = "raticate"
+ SourcePokeapiPokemonNameSpearow SourcePokeapiPokemonName = "spearow"
+ SourcePokeapiPokemonNameFearow SourcePokeapiPokemonName = "fearow"
+ SourcePokeapiPokemonNameEkans SourcePokeapiPokemonName = "ekans"
+ SourcePokeapiPokemonNameArbok SourcePokeapiPokemonName = "arbok"
+ SourcePokeapiPokemonNamePikachu SourcePokeapiPokemonName = "pikachu"
+ SourcePokeapiPokemonNameRaichu SourcePokeapiPokemonName = "raichu"
+ SourcePokeapiPokemonNameSandshrew SourcePokeapiPokemonName = "sandshrew"
+ SourcePokeapiPokemonNameSandslash SourcePokeapiPokemonName = "sandslash"
+ SourcePokeapiPokemonNameNidoranf SourcePokeapiPokemonName = "nidoranf"
+ SourcePokeapiPokemonNameNidorina SourcePokeapiPokemonName = "nidorina"
+ SourcePokeapiPokemonNameNidoqueen SourcePokeapiPokemonName = "nidoqueen"
+ SourcePokeapiPokemonNameNidoranm SourcePokeapiPokemonName = "nidoranm"
+ SourcePokeapiPokemonNameNidorino SourcePokeapiPokemonName = "nidorino"
+ SourcePokeapiPokemonNameNidoking SourcePokeapiPokemonName = "nidoking"
+ SourcePokeapiPokemonNameClefairy SourcePokeapiPokemonName = "clefairy"
+ SourcePokeapiPokemonNameClefable SourcePokeapiPokemonName = "clefable"
+ SourcePokeapiPokemonNameVulpix SourcePokeapiPokemonName = "vulpix"
+ SourcePokeapiPokemonNameNinetales SourcePokeapiPokemonName = "ninetales"
+ SourcePokeapiPokemonNameJigglypuff SourcePokeapiPokemonName = "jigglypuff"
+ SourcePokeapiPokemonNameWigglytuff SourcePokeapiPokemonName = "wigglytuff"
+ SourcePokeapiPokemonNameZubat SourcePokeapiPokemonName = "zubat"
+ SourcePokeapiPokemonNameGolbat SourcePokeapiPokemonName = "golbat"
+ SourcePokeapiPokemonNameOddish SourcePokeapiPokemonName = "oddish"
+ SourcePokeapiPokemonNameGloom SourcePokeapiPokemonName = "gloom"
+ SourcePokeapiPokemonNameVileplume SourcePokeapiPokemonName = "vileplume"
+ SourcePokeapiPokemonNameParas SourcePokeapiPokemonName = "paras"
+ SourcePokeapiPokemonNameParasect SourcePokeapiPokemonName = "parasect"
+ SourcePokeapiPokemonNameVenonat SourcePokeapiPokemonName = "venonat"
+ SourcePokeapiPokemonNameVenomoth SourcePokeapiPokemonName = "venomoth"
+ SourcePokeapiPokemonNameDiglett SourcePokeapiPokemonName = "diglett"
+ SourcePokeapiPokemonNameDugtrio SourcePokeapiPokemonName = "dugtrio"
+ SourcePokeapiPokemonNameMeowth SourcePokeapiPokemonName = "meowth"
+ SourcePokeapiPokemonNamePersian SourcePokeapiPokemonName = "persian"
+ SourcePokeapiPokemonNamePsyduck SourcePokeapiPokemonName = "psyduck"
+ SourcePokeapiPokemonNameGolduck SourcePokeapiPokemonName = "golduck"
+ SourcePokeapiPokemonNameMankey SourcePokeapiPokemonName = "mankey"
+ SourcePokeapiPokemonNamePrimeape SourcePokeapiPokemonName = "primeape"
+ SourcePokeapiPokemonNameGrowlithe SourcePokeapiPokemonName = "growlithe"
+ SourcePokeapiPokemonNameArcanine SourcePokeapiPokemonName = "arcanine"
+ SourcePokeapiPokemonNamePoliwag SourcePokeapiPokemonName = "poliwag"
+ SourcePokeapiPokemonNamePoliwhirl SourcePokeapiPokemonName = "poliwhirl"
+ SourcePokeapiPokemonNamePoliwrath SourcePokeapiPokemonName = "poliwrath"
+ SourcePokeapiPokemonNameAbra SourcePokeapiPokemonName = "abra"
+ SourcePokeapiPokemonNameKadabra SourcePokeapiPokemonName = "kadabra"
+ SourcePokeapiPokemonNameAlakazam SourcePokeapiPokemonName = "alakazam"
+ SourcePokeapiPokemonNameMachop SourcePokeapiPokemonName = "machop"
+ SourcePokeapiPokemonNameMachoke SourcePokeapiPokemonName = "machoke"
+ SourcePokeapiPokemonNameMachamp SourcePokeapiPokemonName = "machamp"
+ SourcePokeapiPokemonNameBellsprout SourcePokeapiPokemonName = "bellsprout"
+ SourcePokeapiPokemonNameWeepinbell SourcePokeapiPokemonName = "weepinbell"
+ SourcePokeapiPokemonNameVictreebel SourcePokeapiPokemonName = "victreebel"
+ SourcePokeapiPokemonNameTentacool SourcePokeapiPokemonName = "tentacool"
+ SourcePokeapiPokemonNameTentacruel SourcePokeapiPokemonName = "tentacruel"
+ SourcePokeapiPokemonNameGeodude SourcePokeapiPokemonName = "geodude"
+ SourcePokeapiPokemonNameGraveler SourcePokeapiPokemonName = "graveler"
+ SourcePokeapiPokemonNameGolem SourcePokeapiPokemonName = "golem"
+ SourcePokeapiPokemonNamePonyta SourcePokeapiPokemonName = "ponyta"
+ SourcePokeapiPokemonNameRapidash SourcePokeapiPokemonName = "rapidash"
+ SourcePokeapiPokemonNameSlowpoke SourcePokeapiPokemonName = "slowpoke"
+ SourcePokeapiPokemonNameSlowbro SourcePokeapiPokemonName = "slowbro"
+ SourcePokeapiPokemonNameMagnemite SourcePokeapiPokemonName = "magnemite"
+ SourcePokeapiPokemonNameMagneton SourcePokeapiPokemonName = "magneton"
+ SourcePokeapiPokemonNameFarfetchd SourcePokeapiPokemonName = "farfetchd"
+ SourcePokeapiPokemonNameDoduo SourcePokeapiPokemonName = "doduo"
+ SourcePokeapiPokemonNameDodrio SourcePokeapiPokemonName = "dodrio"
+ SourcePokeapiPokemonNameSeel SourcePokeapiPokemonName = "seel"
+ SourcePokeapiPokemonNameDewgong SourcePokeapiPokemonName = "dewgong"
+ SourcePokeapiPokemonNameGrimer SourcePokeapiPokemonName = "grimer"
+ SourcePokeapiPokemonNameMuk SourcePokeapiPokemonName = "muk"
+ SourcePokeapiPokemonNameShellder SourcePokeapiPokemonName = "shellder"
+ SourcePokeapiPokemonNameCloyster SourcePokeapiPokemonName = "cloyster"
+ SourcePokeapiPokemonNameGastly SourcePokeapiPokemonName = "gastly"
+ SourcePokeapiPokemonNameHaunter SourcePokeapiPokemonName = "haunter"
+ SourcePokeapiPokemonNameGengar SourcePokeapiPokemonName = "gengar"
+ SourcePokeapiPokemonNameOnix SourcePokeapiPokemonName = "onix"
+ SourcePokeapiPokemonNameDrowzee SourcePokeapiPokemonName = "drowzee"
+ SourcePokeapiPokemonNameHypno SourcePokeapiPokemonName = "hypno"
+ SourcePokeapiPokemonNameKrabby SourcePokeapiPokemonName = "krabby"
+ SourcePokeapiPokemonNameKingler SourcePokeapiPokemonName = "kingler"
+ SourcePokeapiPokemonNameVoltorb SourcePokeapiPokemonName = "voltorb"
+ SourcePokeapiPokemonNameElectrode SourcePokeapiPokemonName = "electrode"
+ SourcePokeapiPokemonNameExeggcute SourcePokeapiPokemonName = "exeggcute"
+ SourcePokeapiPokemonNameExeggutor SourcePokeapiPokemonName = "exeggutor"
+ SourcePokeapiPokemonNameCubone SourcePokeapiPokemonName = "cubone"
+ SourcePokeapiPokemonNameMarowak SourcePokeapiPokemonName = "marowak"
+ SourcePokeapiPokemonNameHitmonlee SourcePokeapiPokemonName = "hitmonlee"
+ SourcePokeapiPokemonNameHitmonchan SourcePokeapiPokemonName = "hitmonchan"
+ SourcePokeapiPokemonNameLickitung SourcePokeapiPokemonName = "lickitung"
+ SourcePokeapiPokemonNameKoffing SourcePokeapiPokemonName = "koffing"
+ SourcePokeapiPokemonNameWeezing SourcePokeapiPokemonName = "weezing"
+ SourcePokeapiPokemonNameRhyhorn SourcePokeapiPokemonName = "rhyhorn"
+ SourcePokeapiPokemonNameRhydon SourcePokeapiPokemonName = "rhydon"
+ SourcePokeapiPokemonNameChansey SourcePokeapiPokemonName = "chansey"
+ SourcePokeapiPokemonNameTangela SourcePokeapiPokemonName = "tangela"
+ SourcePokeapiPokemonNameKangaskhan SourcePokeapiPokemonName = "kangaskhan"
+ SourcePokeapiPokemonNameHorsea SourcePokeapiPokemonName = "horsea"
+ SourcePokeapiPokemonNameSeadra SourcePokeapiPokemonName = "seadra"
+ SourcePokeapiPokemonNameGoldeen SourcePokeapiPokemonName = "goldeen"
+ SourcePokeapiPokemonNameSeaking SourcePokeapiPokemonName = "seaking"
+ SourcePokeapiPokemonNameStaryu SourcePokeapiPokemonName = "staryu"
+ SourcePokeapiPokemonNameStarmie SourcePokeapiPokemonName = "starmie"
+ SourcePokeapiPokemonNameMrmime SourcePokeapiPokemonName = "mrmime"
+ SourcePokeapiPokemonNameScyther SourcePokeapiPokemonName = "scyther"
+ SourcePokeapiPokemonNameJynx SourcePokeapiPokemonName = "jynx"
+ SourcePokeapiPokemonNameElectabuzz SourcePokeapiPokemonName = "electabuzz"
+ SourcePokeapiPokemonNameMagmar SourcePokeapiPokemonName = "magmar"
+ SourcePokeapiPokemonNamePinsir SourcePokeapiPokemonName = "pinsir"
+ SourcePokeapiPokemonNameTauros SourcePokeapiPokemonName = "tauros"
+ SourcePokeapiPokemonNameMagikarp SourcePokeapiPokemonName = "magikarp"
+ SourcePokeapiPokemonNameGyarados SourcePokeapiPokemonName = "gyarados"
+ SourcePokeapiPokemonNameLapras SourcePokeapiPokemonName = "lapras"
+ SourcePokeapiPokemonNameDitto SourcePokeapiPokemonName = "ditto"
+ SourcePokeapiPokemonNameEevee SourcePokeapiPokemonName = "eevee"
+ SourcePokeapiPokemonNameVaporeon SourcePokeapiPokemonName = "vaporeon"
+ SourcePokeapiPokemonNameJolteon SourcePokeapiPokemonName = "jolteon"
+ SourcePokeapiPokemonNameFlareon SourcePokeapiPokemonName = "flareon"
+ SourcePokeapiPokemonNamePorygon SourcePokeapiPokemonName = "porygon"
+ SourcePokeapiPokemonNameOmanyte SourcePokeapiPokemonName = "omanyte"
+ SourcePokeapiPokemonNameOmastar SourcePokeapiPokemonName = "omastar"
+ SourcePokeapiPokemonNameKabuto SourcePokeapiPokemonName = "kabuto"
+ SourcePokeapiPokemonNameKabutops SourcePokeapiPokemonName = "kabutops"
+ SourcePokeapiPokemonNameAerodactyl SourcePokeapiPokemonName = "aerodactyl"
+ SourcePokeapiPokemonNameSnorlax SourcePokeapiPokemonName = "snorlax"
+ SourcePokeapiPokemonNameArticuno SourcePokeapiPokemonName = "articuno"
+ SourcePokeapiPokemonNameZapdos SourcePokeapiPokemonName = "zapdos"
+ SourcePokeapiPokemonNameMoltres SourcePokeapiPokemonName = "moltres"
+ SourcePokeapiPokemonNameDratini SourcePokeapiPokemonName = "dratini"
+ SourcePokeapiPokemonNameDragonair SourcePokeapiPokemonName = "dragonair"
+ SourcePokeapiPokemonNameDragonite SourcePokeapiPokemonName = "dragonite"
+ SourcePokeapiPokemonNameMewtwo SourcePokeapiPokemonName = "mewtwo"
+ SourcePokeapiPokemonNameMew SourcePokeapiPokemonName = "mew"
+ SourcePokeapiPokemonNameChikorita SourcePokeapiPokemonName = "chikorita"
+ SourcePokeapiPokemonNameBayleef SourcePokeapiPokemonName = "bayleef"
+ SourcePokeapiPokemonNameMeganium SourcePokeapiPokemonName = "meganium"
+ SourcePokeapiPokemonNameCyndaquil SourcePokeapiPokemonName = "cyndaquil"
+ SourcePokeapiPokemonNameQuilava SourcePokeapiPokemonName = "quilava"
+ SourcePokeapiPokemonNameTyphlosion SourcePokeapiPokemonName = "typhlosion"
+ SourcePokeapiPokemonNameTotodile SourcePokeapiPokemonName = "totodile"
+ SourcePokeapiPokemonNameCroconaw SourcePokeapiPokemonName = "croconaw"
+ SourcePokeapiPokemonNameFeraligatr SourcePokeapiPokemonName = "feraligatr"
+ SourcePokeapiPokemonNameSentret SourcePokeapiPokemonName = "sentret"
+ SourcePokeapiPokemonNameFurret SourcePokeapiPokemonName = "furret"
+ SourcePokeapiPokemonNameHoothoot SourcePokeapiPokemonName = "hoothoot"
+ SourcePokeapiPokemonNameNoctowl SourcePokeapiPokemonName = "noctowl"
+ SourcePokeapiPokemonNameLedyba SourcePokeapiPokemonName = "ledyba"
+ SourcePokeapiPokemonNameLedian SourcePokeapiPokemonName = "ledian"
+ SourcePokeapiPokemonNameSpinarak SourcePokeapiPokemonName = "spinarak"
+ SourcePokeapiPokemonNameAriados SourcePokeapiPokemonName = "ariados"
+ SourcePokeapiPokemonNameCrobat SourcePokeapiPokemonName = "crobat"
+ SourcePokeapiPokemonNameChinchou SourcePokeapiPokemonName = "chinchou"
+ SourcePokeapiPokemonNameLanturn SourcePokeapiPokemonName = "lanturn"
+ SourcePokeapiPokemonNamePichu SourcePokeapiPokemonName = "pichu"
+ SourcePokeapiPokemonNameCleffa SourcePokeapiPokemonName = "cleffa"
+ SourcePokeapiPokemonNameIgglybuff SourcePokeapiPokemonName = "igglybuff"
+ SourcePokeapiPokemonNameTogepi SourcePokeapiPokemonName = "togepi"
+ SourcePokeapiPokemonNameTogetic SourcePokeapiPokemonName = "togetic"
+ SourcePokeapiPokemonNameNatu SourcePokeapiPokemonName = "natu"
+ SourcePokeapiPokemonNameXatu SourcePokeapiPokemonName = "xatu"
+ SourcePokeapiPokemonNameMareep SourcePokeapiPokemonName = "mareep"
+ SourcePokeapiPokemonNameFlaaffy SourcePokeapiPokemonName = "flaaffy"
+ SourcePokeapiPokemonNameAmpharos SourcePokeapiPokemonName = "ampharos"
+ SourcePokeapiPokemonNameBellossom SourcePokeapiPokemonName = "bellossom"
+ SourcePokeapiPokemonNameMarill SourcePokeapiPokemonName = "marill"
+ SourcePokeapiPokemonNameAzumarill SourcePokeapiPokemonName = "azumarill"
+ SourcePokeapiPokemonNameSudowoodo SourcePokeapiPokemonName = "sudowoodo"
+ SourcePokeapiPokemonNamePolitoed SourcePokeapiPokemonName = "politoed"
+ SourcePokeapiPokemonNameHoppip SourcePokeapiPokemonName = "hoppip"
+ SourcePokeapiPokemonNameSkiploom SourcePokeapiPokemonName = "skiploom"
+ SourcePokeapiPokemonNameJumpluff SourcePokeapiPokemonName = "jumpluff"
+ SourcePokeapiPokemonNameAipom SourcePokeapiPokemonName = "aipom"
+ SourcePokeapiPokemonNameSunkern SourcePokeapiPokemonName = "sunkern"
+ SourcePokeapiPokemonNameSunflora SourcePokeapiPokemonName = "sunflora"
+ SourcePokeapiPokemonNameYanma SourcePokeapiPokemonName = "yanma"
+ SourcePokeapiPokemonNameWooper SourcePokeapiPokemonName = "wooper"
+ SourcePokeapiPokemonNameQuagsire SourcePokeapiPokemonName = "quagsire"
+ SourcePokeapiPokemonNameEspeon SourcePokeapiPokemonName = "espeon"
+ SourcePokeapiPokemonNameUmbreon SourcePokeapiPokemonName = "umbreon"
+ SourcePokeapiPokemonNameMurkrow SourcePokeapiPokemonName = "murkrow"
+ SourcePokeapiPokemonNameSlowking SourcePokeapiPokemonName = "slowking"
+ SourcePokeapiPokemonNameMisdreavus SourcePokeapiPokemonName = "misdreavus"
+ SourcePokeapiPokemonNameUnown SourcePokeapiPokemonName = "unown"
+ SourcePokeapiPokemonNameWobbuffet SourcePokeapiPokemonName = "wobbuffet"
+ SourcePokeapiPokemonNameGirafarig SourcePokeapiPokemonName = "girafarig"
+ SourcePokeapiPokemonNamePineco SourcePokeapiPokemonName = "pineco"
+ SourcePokeapiPokemonNameForretress SourcePokeapiPokemonName = "forretress"
+ SourcePokeapiPokemonNameDunsparce SourcePokeapiPokemonName = "dunsparce"
+ SourcePokeapiPokemonNameGligar SourcePokeapiPokemonName = "gligar"
+ SourcePokeapiPokemonNameSteelix SourcePokeapiPokemonName = "steelix"
+ SourcePokeapiPokemonNameSnubbull SourcePokeapiPokemonName = "snubbull"
+ SourcePokeapiPokemonNameGranbull SourcePokeapiPokemonName = "granbull"
+ SourcePokeapiPokemonNameQwilfish SourcePokeapiPokemonName = "qwilfish"
+ SourcePokeapiPokemonNameScizor SourcePokeapiPokemonName = "scizor"
+ SourcePokeapiPokemonNameShuckle SourcePokeapiPokemonName = "shuckle"
+ SourcePokeapiPokemonNameHeracross SourcePokeapiPokemonName = "heracross"
+ SourcePokeapiPokemonNameSneasel SourcePokeapiPokemonName = "sneasel"
+ SourcePokeapiPokemonNameTeddiursa SourcePokeapiPokemonName = "teddiursa"
+ SourcePokeapiPokemonNameUrsaring SourcePokeapiPokemonName = "ursaring"
+ SourcePokeapiPokemonNameSlugma SourcePokeapiPokemonName = "slugma"
+ SourcePokeapiPokemonNameMagcargo SourcePokeapiPokemonName = "magcargo"
+ SourcePokeapiPokemonNameSwinub SourcePokeapiPokemonName = "swinub"
+ SourcePokeapiPokemonNamePiloswine SourcePokeapiPokemonName = "piloswine"
+ SourcePokeapiPokemonNameCorsola SourcePokeapiPokemonName = "corsola"
+ SourcePokeapiPokemonNameRemoraid SourcePokeapiPokemonName = "remoraid"
+ SourcePokeapiPokemonNameOctillery SourcePokeapiPokemonName = "octillery"
+ SourcePokeapiPokemonNameDelibird SourcePokeapiPokemonName = "delibird"
+ SourcePokeapiPokemonNameMantine SourcePokeapiPokemonName = "mantine"
+ SourcePokeapiPokemonNameSkarmory SourcePokeapiPokemonName = "skarmory"
+ SourcePokeapiPokemonNameHoundour SourcePokeapiPokemonName = "houndour"
+ SourcePokeapiPokemonNameHoundoom SourcePokeapiPokemonName = "houndoom"
+ SourcePokeapiPokemonNameKingdra SourcePokeapiPokemonName = "kingdra"
+ SourcePokeapiPokemonNamePhanpy SourcePokeapiPokemonName = "phanpy"
+ SourcePokeapiPokemonNameDonphan SourcePokeapiPokemonName = "donphan"
+ SourcePokeapiPokemonNamePorygon2 SourcePokeapiPokemonName = "porygon2"
+ SourcePokeapiPokemonNameStantler SourcePokeapiPokemonName = "stantler"
+ SourcePokeapiPokemonNameSmeargle SourcePokeapiPokemonName = "smeargle"
+ SourcePokeapiPokemonNameTyrogue SourcePokeapiPokemonName = "tyrogue"
+ SourcePokeapiPokemonNameHitmontop SourcePokeapiPokemonName = "hitmontop"
+ SourcePokeapiPokemonNameSmoochum SourcePokeapiPokemonName = "smoochum"
+ SourcePokeapiPokemonNameElekid SourcePokeapiPokemonName = "elekid"
+ SourcePokeapiPokemonNameMagby SourcePokeapiPokemonName = "magby"
+ SourcePokeapiPokemonNameMiltank SourcePokeapiPokemonName = "miltank"
+ SourcePokeapiPokemonNameBlissey SourcePokeapiPokemonName = "blissey"
+ SourcePokeapiPokemonNameRaikou SourcePokeapiPokemonName = "raikou"
+ SourcePokeapiPokemonNameEntei SourcePokeapiPokemonName = "entei"
+ SourcePokeapiPokemonNameSuicune SourcePokeapiPokemonName = "suicune"
+ SourcePokeapiPokemonNameLarvitar SourcePokeapiPokemonName = "larvitar"
+ SourcePokeapiPokemonNamePupitar SourcePokeapiPokemonName = "pupitar"
+ SourcePokeapiPokemonNameTyranitar SourcePokeapiPokemonName = "tyranitar"
+ SourcePokeapiPokemonNameLugia SourcePokeapiPokemonName = "lugia"
+ SourcePokeapiPokemonNameHoOh SourcePokeapiPokemonName = "ho-oh"
+ SourcePokeapiPokemonNameCelebi SourcePokeapiPokemonName = "celebi"
+ SourcePokeapiPokemonNameTreecko SourcePokeapiPokemonName = "treecko"
+ SourcePokeapiPokemonNameGrovyle SourcePokeapiPokemonName = "grovyle"
+ SourcePokeapiPokemonNameSceptile SourcePokeapiPokemonName = "sceptile"
+ SourcePokeapiPokemonNameTorchic SourcePokeapiPokemonName = "torchic"
+ SourcePokeapiPokemonNameCombusken SourcePokeapiPokemonName = "combusken"
+ SourcePokeapiPokemonNameBlaziken SourcePokeapiPokemonName = "blaziken"
+ SourcePokeapiPokemonNameMudkip SourcePokeapiPokemonName = "mudkip"
+ SourcePokeapiPokemonNameMarshtomp SourcePokeapiPokemonName = "marshtomp"
+ SourcePokeapiPokemonNameSwampert SourcePokeapiPokemonName = "swampert"
+ SourcePokeapiPokemonNamePoochyena SourcePokeapiPokemonName = "poochyena"
+ SourcePokeapiPokemonNameMightyena SourcePokeapiPokemonName = "mightyena"
+ SourcePokeapiPokemonNameZigzagoon SourcePokeapiPokemonName = "zigzagoon"
+ SourcePokeapiPokemonNameLinoone SourcePokeapiPokemonName = "linoone"
+ SourcePokeapiPokemonNameWurmple SourcePokeapiPokemonName = "wurmple"
+ SourcePokeapiPokemonNameSilcoon SourcePokeapiPokemonName = "silcoon"
+ SourcePokeapiPokemonNameBeautifly SourcePokeapiPokemonName = "beautifly"
+ SourcePokeapiPokemonNameCascoon SourcePokeapiPokemonName = "cascoon"
+ SourcePokeapiPokemonNameDustox SourcePokeapiPokemonName = "dustox"
+ SourcePokeapiPokemonNameLotad SourcePokeapiPokemonName = "lotad"
+ SourcePokeapiPokemonNameLombre SourcePokeapiPokemonName = "lombre"
+ SourcePokeapiPokemonNameLudicolo SourcePokeapiPokemonName = "ludicolo"
+ SourcePokeapiPokemonNameSeedot SourcePokeapiPokemonName = "seedot"
+ SourcePokeapiPokemonNameNuzleaf SourcePokeapiPokemonName = "nuzleaf"
+ SourcePokeapiPokemonNameShiftry SourcePokeapiPokemonName = "shiftry"
+ SourcePokeapiPokemonNameTaillow SourcePokeapiPokemonName = "taillow"
+ SourcePokeapiPokemonNameSwellow SourcePokeapiPokemonName = "swellow"
+ SourcePokeapiPokemonNameWingull SourcePokeapiPokemonName = "wingull"
+ SourcePokeapiPokemonNamePelipper SourcePokeapiPokemonName = "pelipper"
+ SourcePokeapiPokemonNameRalts SourcePokeapiPokemonName = "ralts"
+ SourcePokeapiPokemonNameKirlia SourcePokeapiPokemonName = "kirlia"
+ SourcePokeapiPokemonNameGardevoir SourcePokeapiPokemonName = "gardevoir"
+ SourcePokeapiPokemonNameSurskit SourcePokeapiPokemonName = "surskit"
+ SourcePokeapiPokemonNameMasquerain SourcePokeapiPokemonName = "masquerain"
+ SourcePokeapiPokemonNameShroomish SourcePokeapiPokemonName = "shroomish"
+ SourcePokeapiPokemonNameBreloom SourcePokeapiPokemonName = "breloom"
+ SourcePokeapiPokemonNameSlakoth SourcePokeapiPokemonName = "slakoth"
+ SourcePokeapiPokemonNameVigoroth SourcePokeapiPokemonName = "vigoroth"
+ SourcePokeapiPokemonNameSlaking SourcePokeapiPokemonName = "slaking"
+ SourcePokeapiPokemonNameNincada SourcePokeapiPokemonName = "nincada"
+ SourcePokeapiPokemonNameNinjask SourcePokeapiPokemonName = "ninjask"
+ SourcePokeapiPokemonNameShedinja SourcePokeapiPokemonName = "shedinja"
+ SourcePokeapiPokemonNameWhismur SourcePokeapiPokemonName = "whismur"
+ SourcePokeapiPokemonNameLoudred SourcePokeapiPokemonName = "loudred"
+ SourcePokeapiPokemonNameExploud SourcePokeapiPokemonName = "exploud"
+ SourcePokeapiPokemonNameMakuhita SourcePokeapiPokemonName = "makuhita"
+ SourcePokeapiPokemonNameHariyama SourcePokeapiPokemonName = "hariyama"
+ SourcePokeapiPokemonNameAzurill SourcePokeapiPokemonName = "azurill"
+ SourcePokeapiPokemonNameNosepass SourcePokeapiPokemonName = "nosepass"
+ SourcePokeapiPokemonNameSkitty SourcePokeapiPokemonName = "skitty"
+ SourcePokeapiPokemonNameDelcatty SourcePokeapiPokemonName = "delcatty"
+ SourcePokeapiPokemonNameSableye SourcePokeapiPokemonName = "sableye"
+ SourcePokeapiPokemonNameMawile SourcePokeapiPokemonName = "mawile"
+ SourcePokeapiPokemonNameAron SourcePokeapiPokemonName = "aron"
+ SourcePokeapiPokemonNameLairon SourcePokeapiPokemonName = "lairon"
+ SourcePokeapiPokemonNameAggron SourcePokeapiPokemonName = "aggron"
+ SourcePokeapiPokemonNameMeditite SourcePokeapiPokemonName = "meditite"
+ SourcePokeapiPokemonNameMedicham SourcePokeapiPokemonName = "medicham"
+ SourcePokeapiPokemonNameElectrike SourcePokeapiPokemonName = "electrike"
+ SourcePokeapiPokemonNameManectric SourcePokeapiPokemonName = "manectric"
+ SourcePokeapiPokemonNamePlusle SourcePokeapiPokemonName = "plusle"
+ SourcePokeapiPokemonNameMinun SourcePokeapiPokemonName = "minun"
+ SourcePokeapiPokemonNameVolbeat SourcePokeapiPokemonName = "volbeat"
+ SourcePokeapiPokemonNameIllumise SourcePokeapiPokemonName = "illumise"
+ SourcePokeapiPokemonNameRoselia SourcePokeapiPokemonName = "roselia"
+ SourcePokeapiPokemonNameGulpin SourcePokeapiPokemonName = "gulpin"
+ SourcePokeapiPokemonNameSwalot SourcePokeapiPokemonName = "swalot"
+ SourcePokeapiPokemonNameCarvanha SourcePokeapiPokemonName = "carvanha"
+ SourcePokeapiPokemonNameSharpedo SourcePokeapiPokemonName = "sharpedo"
+ SourcePokeapiPokemonNameWailmer SourcePokeapiPokemonName = "wailmer"
+ SourcePokeapiPokemonNameWailord SourcePokeapiPokemonName = "wailord"
+ SourcePokeapiPokemonNameNumel SourcePokeapiPokemonName = "numel"
+ SourcePokeapiPokemonNameCamerupt SourcePokeapiPokemonName = "camerupt"
+ SourcePokeapiPokemonNameTorkoal SourcePokeapiPokemonName = "torkoal"
+ SourcePokeapiPokemonNameSpoink SourcePokeapiPokemonName = "spoink"
+ SourcePokeapiPokemonNameGrumpig SourcePokeapiPokemonName = "grumpig"
+ SourcePokeapiPokemonNameSpinda SourcePokeapiPokemonName = "spinda"
+ SourcePokeapiPokemonNameTrapinch SourcePokeapiPokemonName = "trapinch"
+ SourcePokeapiPokemonNameVibrava SourcePokeapiPokemonName = "vibrava"
+ SourcePokeapiPokemonNameFlygon SourcePokeapiPokemonName = "flygon"
+ SourcePokeapiPokemonNameCacnea SourcePokeapiPokemonName = "cacnea"
+ SourcePokeapiPokemonNameCacturne SourcePokeapiPokemonName = "cacturne"
+ SourcePokeapiPokemonNameSwablu SourcePokeapiPokemonName = "swablu"
+ SourcePokeapiPokemonNameAltaria SourcePokeapiPokemonName = "altaria"
+ SourcePokeapiPokemonNameZangoose SourcePokeapiPokemonName = "zangoose"
+ SourcePokeapiPokemonNameSeviper SourcePokeapiPokemonName = "seviper"
+ SourcePokeapiPokemonNameLunatone SourcePokeapiPokemonName = "lunatone"
+ SourcePokeapiPokemonNameSolrock SourcePokeapiPokemonName = "solrock"
+ SourcePokeapiPokemonNameBarboach SourcePokeapiPokemonName = "barboach"
+ SourcePokeapiPokemonNameWhiscash SourcePokeapiPokemonName = "whiscash"
+ SourcePokeapiPokemonNameCorphish SourcePokeapiPokemonName = "corphish"
+ SourcePokeapiPokemonNameCrawdaunt SourcePokeapiPokemonName = "crawdaunt"
+ SourcePokeapiPokemonNameBaltoy SourcePokeapiPokemonName = "baltoy"
+ SourcePokeapiPokemonNameClaydol SourcePokeapiPokemonName = "claydol"
+ SourcePokeapiPokemonNameLileep SourcePokeapiPokemonName = "lileep"
+ SourcePokeapiPokemonNameCradily SourcePokeapiPokemonName = "cradily"
+ SourcePokeapiPokemonNameAnorith SourcePokeapiPokemonName = "anorith"
+ SourcePokeapiPokemonNameArmaldo SourcePokeapiPokemonName = "armaldo"
+ SourcePokeapiPokemonNameFeebas SourcePokeapiPokemonName = "feebas"
+ SourcePokeapiPokemonNameMilotic SourcePokeapiPokemonName = "milotic"
+ SourcePokeapiPokemonNameCastform SourcePokeapiPokemonName = "castform"
+ SourcePokeapiPokemonNameKecleon SourcePokeapiPokemonName = "kecleon"
+ SourcePokeapiPokemonNameShuppet SourcePokeapiPokemonName = "shuppet"
+ SourcePokeapiPokemonNameBanette SourcePokeapiPokemonName = "banette"
+ SourcePokeapiPokemonNameDuskull SourcePokeapiPokemonName = "duskull"
+ SourcePokeapiPokemonNameDusclops SourcePokeapiPokemonName = "dusclops"
+ SourcePokeapiPokemonNameTropius SourcePokeapiPokemonName = "tropius"
+ SourcePokeapiPokemonNameChimecho SourcePokeapiPokemonName = "chimecho"
+ SourcePokeapiPokemonNameAbsol SourcePokeapiPokemonName = "absol"
+ SourcePokeapiPokemonNameWynaut SourcePokeapiPokemonName = "wynaut"
+ SourcePokeapiPokemonNameSnorunt SourcePokeapiPokemonName = "snorunt"
+ SourcePokeapiPokemonNameGlalie SourcePokeapiPokemonName = "glalie"
+ SourcePokeapiPokemonNameSpheal SourcePokeapiPokemonName = "spheal"
+ SourcePokeapiPokemonNameSealeo SourcePokeapiPokemonName = "sealeo"
+ SourcePokeapiPokemonNameWalrein SourcePokeapiPokemonName = "walrein"
+ SourcePokeapiPokemonNameClamperl SourcePokeapiPokemonName = "clamperl"
+ SourcePokeapiPokemonNameHuntail SourcePokeapiPokemonName = "huntail"
+ SourcePokeapiPokemonNameGorebyss SourcePokeapiPokemonName = "gorebyss"
+ SourcePokeapiPokemonNameRelicanth SourcePokeapiPokemonName = "relicanth"
+ SourcePokeapiPokemonNameLuvdisc SourcePokeapiPokemonName = "luvdisc"
+ SourcePokeapiPokemonNameBagon SourcePokeapiPokemonName = "bagon"
+ SourcePokeapiPokemonNameShelgon SourcePokeapiPokemonName = "shelgon"
+ SourcePokeapiPokemonNameSalamence SourcePokeapiPokemonName = "salamence"
+ SourcePokeapiPokemonNameBeldum SourcePokeapiPokemonName = "beldum"
+ SourcePokeapiPokemonNameMetang SourcePokeapiPokemonName = "metang"
+ SourcePokeapiPokemonNameMetagross SourcePokeapiPokemonName = "metagross"
+ SourcePokeapiPokemonNameRegirock SourcePokeapiPokemonName = "regirock"
+ SourcePokeapiPokemonNameRegice SourcePokeapiPokemonName = "regice"
+ SourcePokeapiPokemonNameRegisteel SourcePokeapiPokemonName = "registeel"
+ SourcePokeapiPokemonNameLatias SourcePokeapiPokemonName = "latias"
+ SourcePokeapiPokemonNameLatios SourcePokeapiPokemonName = "latios"
+ SourcePokeapiPokemonNameKyogre SourcePokeapiPokemonName = "kyogre"
+ SourcePokeapiPokemonNameGroudon SourcePokeapiPokemonName = "groudon"
+ SourcePokeapiPokemonNameRayquaza SourcePokeapiPokemonName = "rayquaza"
+ SourcePokeapiPokemonNameJirachi SourcePokeapiPokemonName = "jirachi"
+ SourcePokeapiPokemonNameDeoxys SourcePokeapiPokemonName = "deoxys"
+ SourcePokeapiPokemonNameTurtwig SourcePokeapiPokemonName = "turtwig"
+ SourcePokeapiPokemonNameGrotle SourcePokeapiPokemonName = "grotle"
+ SourcePokeapiPokemonNameTorterra SourcePokeapiPokemonName = "torterra"
+ SourcePokeapiPokemonNameChimchar SourcePokeapiPokemonName = "chimchar"
+ SourcePokeapiPokemonNameMonferno SourcePokeapiPokemonName = "monferno"
+ SourcePokeapiPokemonNameInfernape SourcePokeapiPokemonName = "infernape"
+ SourcePokeapiPokemonNamePiplup SourcePokeapiPokemonName = "piplup"
+ SourcePokeapiPokemonNamePrinplup SourcePokeapiPokemonName = "prinplup"
+ SourcePokeapiPokemonNameEmpoleon SourcePokeapiPokemonName = "empoleon"
+ SourcePokeapiPokemonNameStarly SourcePokeapiPokemonName = "starly"
+ SourcePokeapiPokemonNameStaravia SourcePokeapiPokemonName = "staravia"
+ SourcePokeapiPokemonNameStaraptor SourcePokeapiPokemonName = "staraptor"
+ SourcePokeapiPokemonNameBidoof SourcePokeapiPokemonName = "bidoof"
+ SourcePokeapiPokemonNameBibarel SourcePokeapiPokemonName = "bibarel"
+ SourcePokeapiPokemonNameKricketot SourcePokeapiPokemonName = "kricketot"
+ SourcePokeapiPokemonNameKricketune SourcePokeapiPokemonName = "kricketune"
+ SourcePokeapiPokemonNameShinx SourcePokeapiPokemonName = "shinx"
+ SourcePokeapiPokemonNameLuxio SourcePokeapiPokemonName = "luxio"
+ SourcePokeapiPokemonNameLuxray SourcePokeapiPokemonName = "luxray"
+ SourcePokeapiPokemonNameBudew SourcePokeapiPokemonName = "budew"
+ SourcePokeapiPokemonNameRoserade SourcePokeapiPokemonName = "roserade"
+ SourcePokeapiPokemonNameCranidos SourcePokeapiPokemonName = "cranidos"
+ SourcePokeapiPokemonNameRampardos SourcePokeapiPokemonName = "rampardos"
+ SourcePokeapiPokemonNameShieldon SourcePokeapiPokemonName = "shieldon"
+ SourcePokeapiPokemonNameBastiodon SourcePokeapiPokemonName = "bastiodon"
+ SourcePokeapiPokemonNameBurmy SourcePokeapiPokemonName = "burmy"
+ SourcePokeapiPokemonNameWormadam SourcePokeapiPokemonName = "wormadam"
+ SourcePokeapiPokemonNameMothim SourcePokeapiPokemonName = "mothim"
+ SourcePokeapiPokemonNameCombee SourcePokeapiPokemonName = "combee"
+ SourcePokeapiPokemonNameVespiquen SourcePokeapiPokemonName = "vespiquen"
+ SourcePokeapiPokemonNamePachirisu SourcePokeapiPokemonName = "pachirisu"
+ SourcePokeapiPokemonNameBuizel SourcePokeapiPokemonName = "buizel"
+ SourcePokeapiPokemonNameFloatzel SourcePokeapiPokemonName = "floatzel"
+ SourcePokeapiPokemonNameCherubi SourcePokeapiPokemonName = "cherubi"
+ SourcePokeapiPokemonNameCherrim SourcePokeapiPokemonName = "cherrim"
+ SourcePokeapiPokemonNameShellos SourcePokeapiPokemonName = "shellos"
+ SourcePokeapiPokemonNameGastrodon SourcePokeapiPokemonName = "gastrodon"
+ SourcePokeapiPokemonNameAmbipom SourcePokeapiPokemonName = "ambipom"
+ SourcePokeapiPokemonNameDrifloon SourcePokeapiPokemonName = "drifloon"
+ SourcePokeapiPokemonNameDrifblim SourcePokeapiPokemonName = "drifblim"
+ SourcePokeapiPokemonNameBuneary SourcePokeapiPokemonName = "buneary"
+ SourcePokeapiPokemonNameLopunny SourcePokeapiPokemonName = "lopunny"
+ SourcePokeapiPokemonNameMismagius SourcePokeapiPokemonName = "mismagius"
+ SourcePokeapiPokemonNameHonchkrow SourcePokeapiPokemonName = "honchkrow"
+ SourcePokeapiPokemonNameGlameow SourcePokeapiPokemonName = "glameow"
+ SourcePokeapiPokemonNamePurugly SourcePokeapiPokemonName = "purugly"
+ SourcePokeapiPokemonNameChingling SourcePokeapiPokemonName = "chingling"
+ SourcePokeapiPokemonNameStunky SourcePokeapiPokemonName = "stunky"
+ SourcePokeapiPokemonNameSkuntank SourcePokeapiPokemonName = "skuntank"
+ SourcePokeapiPokemonNameBronzor SourcePokeapiPokemonName = "bronzor"
+ SourcePokeapiPokemonNameBronzong SourcePokeapiPokemonName = "bronzong"
+ SourcePokeapiPokemonNameBonsly SourcePokeapiPokemonName = "bonsly"
+ SourcePokeapiPokemonNameMimejr SourcePokeapiPokemonName = "mimejr"
+ SourcePokeapiPokemonNameHappiny SourcePokeapiPokemonName = "happiny"
+ SourcePokeapiPokemonNameChatot SourcePokeapiPokemonName = "chatot"
+ SourcePokeapiPokemonNameSpiritomb SourcePokeapiPokemonName = "spiritomb"
+ SourcePokeapiPokemonNameGible SourcePokeapiPokemonName = "gible"
+ SourcePokeapiPokemonNameGabite SourcePokeapiPokemonName = "gabite"
+ SourcePokeapiPokemonNameGarchomp SourcePokeapiPokemonName = "garchomp"
+ SourcePokeapiPokemonNameMunchlax SourcePokeapiPokemonName = "munchlax"
+ SourcePokeapiPokemonNameRiolu SourcePokeapiPokemonName = "riolu"
+ SourcePokeapiPokemonNameLucario SourcePokeapiPokemonName = "lucario"
+ SourcePokeapiPokemonNameHippopotas SourcePokeapiPokemonName = "hippopotas"
+ SourcePokeapiPokemonNameHippowdon SourcePokeapiPokemonName = "hippowdon"
+ SourcePokeapiPokemonNameSkorupi SourcePokeapiPokemonName = "skorupi"
+ SourcePokeapiPokemonNameDrapion SourcePokeapiPokemonName = "drapion"
+ SourcePokeapiPokemonNameCroagunk SourcePokeapiPokemonName = "croagunk"
+ SourcePokeapiPokemonNameToxicroak SourcePokeapiPokemonName = "toxicroak"
+ SourcePokeapiPokemonNameCarnivine SourcePokeapiPokemonName = "carnivine"
+ SourcePokeapiPokemonNameFinneon SourcePokeapiPokemonName = "finneon"
+ SourcePokeapiPokemonNameLumineon SourcePokeapiPokemonName = "lumineon"
+ SourcePokeapiPokemonNameMantyke SourcePokeapiPokemonName = "mantyke"
+ SourcePokeapiPokemonNameSnover SourcePokeapiPokemonName = "snover"
+ SourcePokeapiPokemonNameAbomasnow SourcePokeapiPokemonName = "abomasnow"
+ SourcePokeapiPokemonNameWeavile SourcePokeapiPokemonName = "weavile"
+ SourcePokeapiPokemonNameMagnezone SourcePokeapiPokemonName = "magnezone"
+ SourcePokeapiPokemonNameLickilicky SourcePokeapiPokemonName = "lickilicky"
+ SourcePokeapiPokemonNameRhyperior SourcePokeapiPokemonName = "rhyperior"
+ SourcePokeapiPokemonNameTangrowth SourcePokeapiPokemonName = "tangrowth"
+ SourcePokeapiPokemonNameElectivire SourcePokeapiPokemonName = "electivire"
+ SourcePokeapiPokemonNameMagmortar SourcePokeapiPokemonName = "magmortar"
+ SourcePokeapiPokemonNameTogekiss SourcePokeapiPokemonName = "togekiss"
+ SourcePokeapiPokemonNameYanmega SourcePokeapiPokemonName = "yanmega"
+ SourcePokeapiPokemonNameLeafeon SourcePokeapiPokemonName = "leafeon"
+ SourcePokeapiPokemonNameGlaceon SourcePokeapiPokemonName = "glaceon"
+ SourcePokeapiPokemonNameGliscor SourcePokeapiPokemonName = "gliscor"
+ SourcePokeapiPokemonNameMamoswine SourcePokeapiPokemonName = "mamoswine"
+ SourcePokeapiPokemonNamePorygonZ SourcePokeapiPokemonName = "porygon-z"
+ SourcePokeapiPokemonNameGallade SourcePokeapiPokemonName = "gallade"
+ SourcePokeapiPokemonNameProbopass SourcePokeapiPokemonName = "probopass"
+ SourcePokeapiPokemonNameDusknoir SourcePokeapiPokemonName = "dusknoir"
+ SourcePokeapiPokemonNameFroslass SourcePokeapiPokemonName = "froslass"
+ SourcePokeapiPokemonNameRotom SourcePokeapiPokemonName = "rotom"
+ SourcePokeapiPokemonNameUxie SourcePokeapiPokemonName = "uxie"
+ SourcePokeapiPokemonNameMesprit SourcePokeapiPokemonName = "mesprit"
+ SourcePokeapiPokemonNameAzelf SourcePokeapiPokemonName = "azelf"
+ SourcePokeapiPokemonNameDialga SourcePokeapiPokemonName = "dialga"
+ SourcePokeapiPokemonNamePalkia SourcePokeapiPokemonName = "palkia"
+ SourcePokeapiPokemonNameHeatran SourcePokeapiPokemonName = "heatran"
+ SourcePokeapiPokemonNameRegigigas SourcePokeapiPokemonName = "regigigas"
+ SourcePokeapiPokemonNameGiratina SourcePokeapiPokemonName = "giratina"
+ SourcePokeapiPokemonNameCresselia SourcePokeapiPokemonName = "cresselia"
+ SourcePokeapiPokemonNamePhione SourcePokeapiPokemonName = "phione"
+ SourcePokeapiPokemonNameManaphy SourcePokeapiPokemonName = "manaphy"
+ SourcePokeapiPokemonNameDarkrai SourcePokeapiPokemonName = "darkrai"
+ SourcePokeapiPokemonNameShaymin SourcePokeapiPokemonName = "shaymin"
+ SourcePokeapiPokemonNameArceus SourcePokeapiPokemonName = "arceus"
+ SourcePokeapiPokemonNameVictini SourcePokeapiPokemonName = "victini"
+ SourcePokeapiPokemonNameSnivy SourcePokeapiPokemonName = "snivy"
+ SourcePokeapiPokemonNameServine SourcePokeapiPokemonName = "servine"
+ SourcePokeapiPokemonNameSerperior SourcePokeapiPokemonName = "serperior"
+ SourcePokeapiPokemonNameTepig SourcePokeapiPokemonName = "tepig"
+ SourcePokeapiPokemonNamePignite SourcePokeapiPokemonName = "pignite"
+ SourcePokeapiPokemonNameEmboar SourcePokeapiPokemonName = "emboar"
+ SourcePokeapiPokemonNameOshawott SourcePokeapiPokemonName = "oshawott"
+ SourcePokeapiPokemonNameDewott SourcePokeapiPokemonName = "dewott"
+ SourcePokeapiPokemonNameSamurott SourcePokeapiPokemonName = "samurott"
+ SourcePokeapiPokemonNamePatrat SourcePokeapiPokemonName = "patrat"
+ SourcePokeapiPokemonNameWatchog SourcePokeapiPokemonName = "watchog"
+ SourcePokeapiPokemonNameLillipup SourcePokeapiPokemonName = "lillipup"
+ SourcePokeapiPokemonNameHerdier SourcePokeapiPokemonName = "herdier"
+ SourcePokeapiPokemonNameStoutland SourcePokeapiPokemonName = "stoutland"
+ SourcePokeapiPokemonNamePurrloin SourcePokeapiPokemonName = "purrloin"
+ SourcePokeapiPokemonNameLiepard SourcePokeapiPokemonName = "liepard"
+ SourcePokeapiPokemonNamePansage SourcePokeapiPokemonName = "pansage"
+ SourcePokeapiPokemonNameSimisage SourcePokeapiPokemonName = "simisage"
+ SourcePokeapiPokemonNamePansear SourcePokeapiPokemonName = "pansear"
+ SourcePokeapiPokemonNameSimisear SourcePokeapiPokemonName = "simisear"
+ SourcePokeapiPokemonNamePanpour SourcePokeapiPokemonName = "panpour"
+ SourcePokeapiPokemonNameSimipour SourcePokeapiPokemonName = "simipour"
+ SourcePokeapiPokemonNameMunna SourcePokeapiPokemonName = "munna"
+ SourcePokeapiPokemonNameMusharna SourcePokeapiPokemonName = "musharna"
+ SourcePokeapiPokemonNamePidove SourcePokeapiPokemonName = "pidove"
+ SourcePokeapiPokemonNameTranquill SourcePokeapiPokemonName = "tranquill"
+ SourcePokeapiPokemonNameUnfezant SourcePokeapiPokemonName = "unfezant"
+ SourcePokeapiPokemonNameBlitzle SourcePokeapiPokemonName = "blitzle"
+ SourcePokeapiPokemonNameZebstrika SourcePokeapiPokemonName = "zebstrika"
+ SourcePokeapiPokemonNameRoggenrola SourcePokeapiPokemonName = "roggenrola"
+ SourcePokeapiPokemonNameBoldore SourcePokeapiPokemonName = "boldore"
+ SourcePokeapiPokemonNameGigalith SourcePokeapiPokemonName = "gigalith"
+ SourcePokeapiPokemonNameWoobat SourcePokeapiPokemonName = "woobat"
+ SourcePokeapiPokemonNameSwoobat SourcePokeapiPokemonName = "swoobat"
+ SourcePokeapiPokemonNameDrilbur SourcePokeapiPokemonName = "drilbur"
+ SourcePokeapiPokemonNameExcadrill SourcePokeapiPokemonName = "excadrill"
+ SourcePokeapiPokemonNameAudino SourcePokeapiPokemonName = "audino"
+ SourcePokeapiPokemonNameTimburr SourcePokeapiPokemonName = "timburr"
+ SourcePokeapiPokemonNameGurdurr SourcePokeapiPokemonName = "gurdurr"
+ SourcePokeapiPokemonNameConkeldurr SourcePokeapiPokemonName = "conkeldurr"
+ SourcePokeapiPokemonNameTympole SourcePokeapiPokemonName = "tympole"
+ SourcePokeapiPokemonNamePalpitoad SourcePokeapiPokemonName = "palpitoad"
+ SourcePokeapiPokemonNameSeismitoad SourcePokeapiPokemonName = "seismitoad"
+ SourcePokeapiPokemonNameThroh SourcePokeapiPokemonName = "throh"
+ SourcePokeapiPokemonNameSawk SourcePokeapiPokemonName = "sawk"
+ SourcePokeapiPokemonNameSewaddle SourcePokeapiPokemonName = "sewaddle"
+ SourcePokeapiPokemonNameSwadloon SourcePokeapiPokemonName = "swadloon"
+ SourcePokeapiPokemonNameLeavanny SourcePokeapiPokemonName = "leavanny"
+ SourcePokeapiPokemonNameVenipede SourcePokeapiPokemonName = "venipede"
+ SourcePokeapiPokemonNameWhirlipede SourcePokeapiPokemonName = "whirlipede"
+ SourcePokeapiPokemonNameScolipede SourcePokeapiPokemonName = "scolipede"
+ SourcePokeapiPokemonNameCottonee SourcePokeapiPokemonName = "cottonee"
+ SourcePokeapiPokemonNameWhimsicott SourcePokeapiPokemonName = "whimsicott"
+ SourcePokeapiPokemonNamePetilil SourcePokeapiPokemonName = "petilil"
+ SourcePokeapiPokemonNameLilligant SourcePokeapiPokemonName = "lilligant"
+ SourcePokeapiPokemonNameBasculin SourcePokeapiPokemonName = "basculin"
+ SourcePokeapiPokemonNameSandile SourcePokeapiPokemonName = "sandile"
+ SourcePokeapiPokemonNameKrokorok SourcePokeapiPokemonName = "krokorok"
+ SourcePokeapiPokemonNameKrookodile SourcePokeapiPokemonName = "krookodile"
+ SourcePokeapiPokemonNameDarumaka SourcePokeapiPokemonName = "darumaka"
+ SourcePokeapiPokemonNameDarmanitan SourcePokeapiPokemonName = "darmanitan"
+ SourcePokeapiPokemonNameMaractus SourcePokeapiPokemonName = "maractus"
+ SourcePokeapiPokemonNameDwebble SourcePokeapiPokemonName = "dwebble"
+ SourcePokeapiPokemonNameCrustle SourcePokeapiPokemonName = "crustle"
+ SourcePokeapiPokemonNameScraggy SourcePokeapiPokemonName = "scraggy"
+ SourcePokeapiPokemonNameScrafty SourcePokeapiPokemonName = "scrafty"
+ SourcePokeapiPokemonNameSigilyph SourcePokeapiPokemonName = "sigilyph"
+ SourcePokeapiPokemonNameYamask SourcePokeapiPokemonName = "yamask"
+ SourcePokeapiPokemonNameCofagrigus SourcePokeapiPokemonName = "cofagrigus"
+ SourcePokeapiPokemonNameTirtouga SourcePokeapiPokemonName = "tirtouga"
+ SourcePokeapiPokemonNameCarracosta SourcePokeapiPokemonName = "carracosta"
+ SourcePokeapiPokemonNameArchen SourcePokeapiPokemonName = "archen"
+ SourcePokeapiPokemonNameArcheops SourcePokeapiPokemonName = "archeops"
+ SourcePokeapiPokemonNameTrubbish SourcePokeapiPokemonName = "trubbish"
+ SourcePokeapiPokemonNameGarbodor SourcePokeapiPokemonName = "garbodor"
+ SourcePokeapiPokemonNameZorua SourcePokeapiPokemonName = "zorua"
+ SourcePokeapiPokemonNameZoroark SourcePokeapiPokemonName = "zoroark"
+ SourcePokeapiPokemonNameMinccino SourcePokeapiPokemonName = "minccino"
+ SourcePokeapiPokemonNameCinccino SourcePokeapiPokemonName = "cinccino"
+ SourcePokeapiPokemonNameGothita SourcePokeapiPokemonName = "gothita"
+ SourcePokeapiPokemonNameGothorita SourcePokeapiPokemonName = "gothorita"
+ SourcePokeapiPokemonNameGothitelle SourcePokeapiPokemonName = "gothitelle"
+ SourcePokeapiPokemonNameSolosis SourcePokeapiPokemonName = "solosis"
+ SourcePokeapiPokemonNameDuosion SourcePokeapiPokemonName = "duosion"
+ SourcePokeapiPokemonNameReuniclus SourcePokeapiPokemonName = "reuniclus"
+ SourcePokeapiPokemonNameDucklett SourcePokeapiPokemonName = "ducklett"
+ SourcePokeapiPokemonNameSwanna SourcePokeapiPokemonName = "swanna"
+ SourcePokeapiPokemonNameVanillite SourcePokeapiPokemonName = "vanillite"
+ SourcePokeapiPokemonNameVanillish SourcePokeapiPokemonName = "vanillish"
+ SourcePokeapiPokemonNameVanilluxe SourcePokeapiPokemonName = "vanilluxe"
+ SourcePokeapiPokemonNameDeerling SourcePokeapiPokemonName = "deerling"
+ SourcePokeapiPokemonNameSawsbuck SourcePokeapiPokemonName = "sawsbuck"
+ SourcePokeapiPokemonNameEmolga SourcePokeapiPokemonName = "emolga"
+ SourcePokeapiPokemonNameKarrablast SourcePokeapiPokemonName = "karrablast"
+ SourcePokeapiPokemonNameEscavalier SourcePokeapiPokemonName = "escavalier"
+ SourcePokeapiPokemonNameFoongus SourcePokeapiPokemonName = "foongus"
+ SourcePokeapiPokemonNameAmoonguss SourcePokeapiPokemonName = "amoonguss"
+ SourcePokeapiPokemonNameFrillish SourcePokeapiPokemonName = "frillish"
+ SourcePokeapiPokemonNameJellicent SourcePokeapiPokemonName = "jellicent"
+ SourcePokeapiPokemonNameAlomomola SourcePokeapiPokemonName = "alomomola"
+ SourcePokeapiPokemonNameJoltik SourcePokeapiPokemonName = "joltik"
+ SourcePokeapiPokemonNameGalvantula SourcePokeapiPokemonName = "galvantula"
+ SourcePokeapiPokemonNameFerroseed SourcePokeapiPokemonName = "ferroseed"
+ SourcePokeapiPokemonNameFerrothorn SourcePokeapiPokemonName = "ferrothorn"
+ SourcePokeapiPokemonNameKlink SourcePokeapiPokemonName = "klink"
+ SourcePokeapiPokemonNameKlang SourcePokeapiPokemonName = "klang"
+ SourcePokeapiPokemonNameKlinklang SourcePokeapiPokemonName = "klinklang"
+ SourcePokeapiPokemonNameTynamo SourcePokeapiPokemonName = "tynamo"
+ SourcePokeapiPokemonNameEelektrik SourcePokeapiPokemonName = "eelektrik"
+ SourcePokeapiPokemonNameEelektross SourcePokeapiPokemonName = "eelektross"
+ SourcePokeapiPokemonNameElgyem SourcePokeapiPokemonName = "elgyem"
+ SourcePokeapiPokemonNameBeheeyem SourcePokeapiPokemonName = "beheeyem"
+ SourcePokeapiPokemonNameLitwick SourcePokeapiPokemonName = "litwick"
+ SourcePokeapiPokemonNameLampent SourcePokeapiPokemonName = "lampent"
+ SourcePokeapiPokemonNameChandelure SourcePokeapiPokemonName = "chandelure"
+ SourcePokeapiPokemonNameAxew SourcePokeapiPokemonName = "axew"
+ SourcePokeapiPokemonNameFraxure SourcePokeapiPokemonName = "fraxure"
+ SourcePokeapiPokemonNameHaxorus SourcePokeapiPokemonName = "haxorus"
+ SourcePokeapiPokemonNameCubchoo SourcePokeapiPokemonName = "cubchoo"
+ SourcePokeapiPokemonNameBeartic SourcePokeapiPokemonName = "beartic"
+ SourcePokeapiPokemonNameCryogonal SourcePokeapiPokemonName = "cryogonal"
+ SourcePokeapiPokemonNameShelmet SourcePokeapiPokemonName = "shelmet"
+ SourcePokeapiPokemonNameAccelgor SourcePokeapiPokemonName = "accelgor"
+ SourcePokeapiPokemonNameStunfisk SourcePokeapiPokemonName = "stunfisk"
+ SourcePokeapiPokemonNameMienfoo SourcePokeapiPokemonName = "mienfoo"
+ SourcePokeapiPokemonNameMienshao SourcePokeapiPokemonName = "mienshao"
+ SourcePokeapiPokemonNameDruddigon SourcePokeapiPokemonName = "druddigon"
+ SourcePokeapiPokemonNameGolett SourcePokeapiPokemonName = "golett"
+ SourcePokeapiPokemonNameGolurk SourcePokeapiPokemonName = "golurk"
+ SourcePokeapiPokemonNamePawniard SourcePokeapiPokemonName = "pawniard"
+ SourcePokeapiPokemonNameBisharp SourcePokeapiPokemonName = "bisharp"
+ SourcePokeapiPokemonNameBouffalant SourcePokeapiPokemonName = "bouffalant"
+ SourcePokeapiPokemonNameRufflet SourcePokeapiPokemonName = "rufflet"
+ SourcePokeapiPokemonNameBraviary SourcePokeapiPokemonName = "braviary"
+ SourcePokeapiPokemonNameVullaby SourcePokeapiPokemonName = "vullaby"
+ SourcePokeapiPokemonNameMandibuzz SourcePokeapiPokemonName = "mandibuzz"
+ SourcePokeapiPokemonNameHeatmor SourcePokeapiPokemonName = "heatmor"
+ SourcePokeapiPokemonNameDurant SourcePokeapiPokemonName = "durant"
+ SourcePokeapiPokemonNameDeino SourcePokeapiPokemonName = "deino"
+ SourcePokeapiPokemonNameZweilous SourcePokeapiPokemonName = "zweilous"
+ SourcePokeapiPokemonNameHydreigon SourcePokeapiPokemonName = "hydreigon"
+ SourcePokeapiPokemonNameLarvesta SourcePokeapiPokemonName = "larvesta"
+ SourcePokeapiPokemonNameVolcarona SourcePokeapiPokemonName = "volcarona"
+ SourcePokeapiPokemonNameCobalion SourcePokeapiPokemonName = "cobalion"
+ SourcePokeapiPokemonNameTerrakion SourcePokeapiPokemonName = "terrakion"
+ SourcePokeapiPokemonNameVirizion SourcePokeapiPokemonName = "virizion"
+ SourcePokeapiPokemonNameTornadus SourcePokeapiPokemonName = "tornadus"
+ SourcePokeapiPokemonNameThundurus SourcePokeapiPokemonName = "thundurus"
+ SourcePokeapiPokemonNameReshiram SourcePokeapiPokemonName = "reshiram"
+ SourcePokeapiPokemonNameZekrom SourcePokeapiPokemonName = "zekrom"
+ SourcePokeapiPokemonNameLandorus SourcePokeapiPokemonName = "landorus"
+ SourcePokeapiPokemonNameKyurem SourcePokeapiPokemonName = "kyurem"
+ SourcePokeapiPokemonNameKeldeo SourcePokeapiPokemonName = "keldeo"
+ SourcePokeapiPokemonNameMeloetta SourcePokeapiPokemonName = "meloetta"
+ SourcePokeapiPokemonNameGenesect SourcePokeapiPokemonName = "genesect"
+ SourcePokeapiPokemonNameChespin SourcePokeapiPokemonName = "chespin"
+ SourcePokeapiPokemonNameQuilladin SourcePokeapiPokemonName = "quilladin"
+ SourcePokeapiPokemonNameChesnaught SourcePokeapiPokemonName = "chesnaught"
+ SourcePokeapiPokemonNameFennekin SourcePokeapiPokemonName = "fennekin"
+ SourcePokeapiPokemonNameBraixen SourcePokeapiPokemonName = "braixen"
+ SourcePokeapiPokemonNameDelphox SourcePokeapiPokemonName = "delphox"
+ SourcePokeapiPokemonNameFroakie SourcePokeapiPokemonName = "froakie"
+ SourcePokeapiPokemonNameFrogadier SourcePokeapiPokemonName = "frogadier"
+ SourcePokeapiPokemonNameGreninja SourcePokeapiPokemonName = "greninja"
+ SourcePokeapiPokemonNameBunnelby SourcePokeapiPokemonName = "bunnelby"
+ SourcePokeapiPokemonNameDiggersby SourcePokeapiPokemonName = "diggersby"
+ SourcePokeapiPokemonNameFletchling SourcePokeapiPokemonName = "fletchling"
+ SourcePokeapiPokemonNameFletchinder SourcePokeapiPokemonName = "fletchinder"
+ SourcePokeapiPokemonNameTalonflame SourcePokeapiPokemonName = "talonflame"
+ SourcePokeapiPokemonNameScatterbug SourcePokeapiPokemonName = "scatterbug"
+ SourcePokeapiPokemonNameSpewpa SourcePokeapiPokemonName = "spewpa"
+ SourcePokeapiPokemonNameVivillon SourcePokeapiPokemonName = "vivillon"
+ SourcePokeapiPokemonNameLitleo SourcePokeapiPokemonName = "litleo"
+ SourcePokeapiPokemonNamePyroar SourcePokeapiPokemonName = "pyroar"
+ SourcePokeapiPokemonNameFlabebe SourcePokeapiPokemonName = "flabebe"
+ SourcePokeapiPokemonNameFloette SourcePokeapiPokemonName = "floette"
+ SourcePokeapiPokemonNameFlorges SourcePokeapiPokemonName = "florges"
+ SourcePokeapiPokemonNameSkiddo SourcePokeapiPokemonName = "skiddo"
+ SourcePokeapiPokemonNameGogoat SourcePokeapiPokemonName = "gogoat"
+ SourcePokeapiPokemonNamePancham SourcePokeapiPokemonName = "pancham"
+ SourcePokeapiPokemonNamePangoro SourcePokeapiPokemonName = "pangoro"
+ SourcePokeapiPokemonNameFurfrou SourcePokeapiPokemonName = "furfrou"
+ SourcePokeapiPokemonNameEspurr SourcePokeapiPokemonName = "espurr"
+ SourcePokeapiPokemonNameMeowstic SourcePokeapiPokemonName = "meowstic"
+ SourcePokeapiPokemonNameHonedge SourcePokeapiPokemonName = "honedge"
+ SourcePokeapiPokemonNameDoublade SourcePokeapiPokemonName = "doublade"
+ SourcePokeapiPokemonNameAegislash SourcePokeapiPokemonName = "aegislash"
+ SourcePokeapiPokemonNameSpritzee SourcePokeapiPokemonName = "spritzee"
+ SourcePokeapiPokemonNameAromatisse SourcePokeapiPokemonName = "aromatisse"
+ SourcePokeapiPokemonNameSwirlix SourcePokeapiPokemonName = "swirlix"
+ SourcePokeapiPokemonNameSlurpuff SourcePokeapiPokemonName = "slurpuff"
+ SourcePokeapiPokemonNameInkay SourcePokeapiPokemonName = "inkay"
+ SourcePokeapiPokemonNameMalamar SourcePokeapiPokemonName = "malamar"
+ SourcePokeapiPokemonNameBinacle SourcePokeapiPokemonName = "binacle"
+ SourcePokeapiPokemonNameBarbaracle SourcePokeapiPokemonName = "barbaracle"
+ SourcePokeapiPokemonNameSkrelp SourcePokeapiPokemonName = "skrelp"
+ SourcePokeapiPokemonNameDragalge SourcePokeapiPokemonName = "dragalge"
+ SourcePokeapiPokemonNameClauncher SourcePokeapiPokemonName = "clauncher"
+ SourcePokeapiPokemonNameClawitzer SourcePokeapiPokemonName = "clawitzer"
+ SourcePokeapiPokemonNameHelioptile SourcePokeapiPokemonName = "helioptile"
+ SourcePokeapiPokemonNameHeliolisk SourcePokeapiPokemonName = "heliolisk"
+ SourcePokeapiPokemonNameTyrunt SourcePokeapiPokemonName = "tyrunt"
+ SourcePokeapiPokemonNameTyrantrum SourcePokeapiPokemonName = "tyrantrum"
+ SourcePokeapiPokemonNameAmaura SourcePokeapiPokemonName = "amaura"
+ SourcePokeapiPokemonNameAurorus SourcePokeapiPokemonName = "aurorus"
+ SourcePokeapiPokemonNameSylveon SourcePokeapiPokemonName = "sylveon"
+ SourcePokeapiPokemonNameHawlucha SourcePokeapiPokemonName = "hawlucha"
+ SourcePokeapiPokemonNameDedenne SourcePokeapiPokemonName = "dedenne"
+ SourcePokeapiPokemonNameCarbink SourcePokeapiPokemonName = "carbink"
+ SourcePokeapiPokemonNameGoomy SourcePokeapiPokemonName = "goomy"
+ SourcePokeapiPokemonNameSliggoo SourcePokeapiPokemonName = "sliggoo"
+ SourcePokeapiPokemonNameGoodra SourcePokeapiPokemonName = "goodra"
+ SourcePokeapiPokemonNameKlefki SourcePokeapiPokemonName = "klefki"
+ SourcePokeapiPokemonNamePhantump SourcePokeapiPokemonName = "phantump"
+ SourcePokeapiPokemonNameTrevenant SourcePokeapiPokemonName = "trevenant"
+ SourcePokeapiPokemonNamePumpkaboo SourcePokeapiPokemonName = "pumpkaboo"
+ SourcePokeapiPokemonNameGourgeist SourcePokeapiPokemonName = "gourgeist"
+ SourcePokeapiPokemonNameBergmite SourcePokeapiPokemonName = "bergmite"
+ SourcePokeapiPokemonNameAvalugg SourcePokeapiPokemonName = "avalugg"
+ SourcePokeapiPokemonNameNoibat SourcePokeapiPokemonName = "noibat"
+ SourcePokeapiPokemonNameNoivern SourcePokeapiPokemonName = "noivern"
+ SourcePokeapiPokemonNameXerneas SourcePokeapiPokemonName = "xerneas"
+ SourcePokeapiPokemonNameYveltal SourcePokeapiPokemonName = "yveltal"
+ SourcePokeapiPokemonNameZygarde SourcePokeapiPokemonName = "zygarde"
+ SourcePokeapiPokemonNameDiancie SourcePokeapiPokemonName = "diancie"
+ SourcePokeapiPokemonNameHoopa SourcePokeapiPokemonName = "hoopa"
+ SourcePokeapiPokemonNameVolcanion SourcePokeapiPokemonName = "volcanion"
+ SourcePokeapiPokemonNameRowlet SourcePokeapiPokemonName = "rowlet"
+ SourcePokeapiPokemonNameDartrix SourcePokeapiPokemonName = "dartrix"
+ SourcePokeapiPokemonNameDecidueye SourcePokeapiPokemonName = "decidueye"
+ SourcePokeapiPokemonNameLitten SourcePokeapiPokemonName = "litten"
+ SourcePokeapiPokemonNameTorracat SourcePokeapiPokemonName = "torracat"
+ SourcePokeapiPokemonNameIncineroar SourcePokeapiPokemonName = "incineroar"
+ SourcePokeapiPokemonNamePopplio SourcePokeapiPokemonName = "popplio"
+ SourcePokeapiPokemonNameBrionne SourcePokeapiPokemonName = "brionne"
+ SourcePokeapiPokemonNamePrimarina SourcePokeapiPokemonName = "primarina"
+ SourcePokeapiPokemonNamePikipek SourcePokeapiPokemonName = "pikipek"
+ SourcePokeapiPokemonNameTrumbeak SourcePokeapiPokemonName = "trumbeak"
+ SourcePokeapiPokemonNameToucannon SourcePokeapiPokemonName = "toucannon"
+ SourcePokeapiPokemonNameYungoos SourcePokeapiPokemonName = "yungoos"
+ SourcePokeapiPokemonNameGumshoos SourcePokeapiPokemonName = "gumshoos"
+ SourcePokeapiPokemonNameGrubbin SourcePokeapiPokemonName = "grubbin"
+ SourcePokeapiPokemonNameCharjabug SourcePokeapiPokemonName = "charjabug"
+ SourcePokeapiPokemonNameVikavolt SourcePokeapiPokemonName = "vikavolt"
+ SourcePokeapiPokemonNameCrabrawler SourcePokeapiPokemonName = "crabrawler"
+ SourcePokeapiPokemonNameCrabominable SourcePokeapiPokemonName = "crabominable"
+ SourcePokeapiPokemonNameOricorio SourcePokeapiPokemonName = "oricorio"
+ SourcePokeapiPokemonNameCutiefly SourcePokeapiPokemonName = "cutiefly"
+ SourcePokeapiPokemonNameRibombee SourcePokeapiPokemonName = "ribombee"
+ SourcePokeapiPokemonNameRockruff SourcePokeapiPokemonName = "rockruff"
+ SourcePokeapiPokemonNameLycanroc SourcePokeapiPokemonName = "lycanroc"
+ SourcePokeapiPokemonNameWishiwashi SourcePokeapiPokemonName = "wishiwashi"
+ SourcePokeapiPokemonNameMareanie SourcePokeapiPokemonName = "mareanie"
+ SourcePokeapiPokemonNameToxapex SourcePokeapiPokemonName = "toxapex"
+ SourcePokeapiPokemonNameMudbray SourcePokeapiPokemonName = "mudbray"
+ SourcePokeapiPokemonNameMudsdale SourcePokeapiPokemonName = "mudsdale"
+ SourcePokeapiPokemonNameDewpider SourcePokeapiPokemonName = "dewpider"
+ SourcePokeapiPokemonNameAraquanid SourcePokeapiPokemonName = "araquanid"
+ SourcePokeapiPokemonNameFomantis SourcePokeapiPokemonName = "fomantis"
+ SourcePokeapiPokemonNameLurantis SourcePokeapiPokemonName = "lurantis"
+ SourcePokeapiPokemonNameMorelull SourcePokeapiPokemonName = "morelull"
+ SourcePokeapiPokemonNameShiinotic SourcePokeapiPokemonName = "shiinotic"
+ SourcePokeapiPokemonNameSalandit SourcePokeapiPokemonName = "salandit"
+ SourcePokeapiPokemonNameSalazzle SourcePokeapiPokemonName = "salazzle"
+ SourcePokeapiPokemonNameStufful SourcePokeapiPokemonName = "stufful"
+ SourcePokeapiPokemonNameBewear SourcePokeapiPokemonName = "bewear"
+ SourcePokeapiPokemonNameBounsweet SourcePokeapiPokemonName = "bounsweet"
+ SourcePokeapiPokemonNameSteenee SourcePokeapiPokemonName = "steenee"
+ SourcePokeapiPokemonNameTsareena SourcePokeapiPokemonName = "tsareena"
+ SourcePokeapiPokemonNameComfey SourcePokeapiPokemonName = "comfey"
+ SourcePokeapiPokemonNameOranguru SourcePokeapiPokemonName = "oranguru"
+ SourcePokeapiPokemonNamePassimian SourcePokeapiPokemonName = "passimian"
+ SourcePokeapiPokemonNameWimpod SourcePokeapiPokemonName = "wimpod"
+ SourcePokeapiPokemonNameGolisopod SourcePokeapiPokemonName = "golisopod"
+ SourcePokeapiPokemonNameSandygast SourcePokeapiPokemonName = "sandygast"
+ SourcePokeapiPokemonNamePalossand SourcePokeapiPokemonName = "palossand"
+ SourcePokeapiPokemonNamePyukumuku SourcePokeapiPokemonName = "pyukumuku"
+ SourcePokeapiPokemonNameTypenull SourcePokeapiPokemonName = "typenull"
+ SourcePokeapiPokemonNameSilvally SourcePokeapiPokemonName = "silvally"
+ SourcePokeapiPokemonNameMinior SourcePokeapiPokemonName = "minior"
+ SourcePokeapiPokemonNameKomala SourcePokeapiPokemonName = "komala"
+ SourcePokeapiPokemonNameTurtonator SourcePokeapiPokemonName = "turtonator"
+ SourcePokeapiPokemonNameTogedemaru SourcePokeapiPokemonName = "togedemaru"
+ SourcePokeapiPokemonNameMimikyu SourcePokeapiPokemonName = "mimikyu"
+ SourcePokeapiPokemonNameBruxish SourcePokeapiPokemonName = "bruxish"
+ SourcePokeapiPokemonNameDrampa SourcePokeapiPokemonName = "drampa"
+ SourcePokeapiPokemonNameDhelmise SourcePokeapiPokemonName = "dhelmise"
+ SourcePokeapiPokemonNameJangmoO SourcePokeapiPokemonName = "jangmo-o"
+ SourcePokeapiPokemonNameHakamoO SourcePokeapiPokemonName = "hakamo-o"
+ SourcePokeapiPokemonNameKommoO SourcePokeapiPokemonName = "kommo-o"
+ SourcePokeapiPokemonNameTapukoko SourcePokeapiPokemonName = "tapukoko"
+ SourcePokeapiPokemonNameTapulele SourcePokeapiPokemonName = "tapulele"
+ SourcePokeapiPokemonNameTapubulu SourcePokeapiPokemonName = "tapubulu"
+ SourcePokeapiPokemonNameTapufini SourcePokeapiPokemonName = "tapufini"
+ SourcePokeapiPokemonNameCosmog SourcePokeapiPokemonName = "cosmog"
+ SourcePokeapiPokemonNameCosmoem SourcePokeapiPokemonName = "cosmoem"
+ SourcePokeapiPokemonNameSolgaleo SourcePokeapiPokemonName = "solgaleo"
+ SourcePokeapiPokemonNameLunala SourcePokeapiPokemonName = "lunala"
+ SourcePokeapiPokemonNameNihilego SourcePokeapiPokemonName = "nihilego"
+ SourcePokeapiPokemonNameBuzzwole SourcePokeapiPokemonName = "buzzwole"
+ SourcePokeapiPokemonNamePheromosa SourcePokeapiPokemonName = "pheromosa"
+ SourcePokeapiPokemonNameXurkitree SourcePokeapiPokemonName = "xurkitree"
+ SourcePokeapiPokemonNameCelesteela SourcePokeapiPokemonName = "celesteela"
+ SourcePokeapiPokemonNameKartana SourcePokeapiPokemonName = "kartana"
+ SourcePokeapiPokemonNameGuzzlord SourcePokeapiPokemonName = "guzzlord"
+ SourcePokeapiPokemonNameNecrozma SourcePokeapiPokemonName = "necrozma"
+ SourcePokeapiPokemonNameMagearna SourcePokeapiPokemonName = "magearna"
+ SourcePokeapiPokemonNameMarshadow SourcePokeapiPokemonName = "marshadow"
+ SourcePokeapiPokemonNamePoipole SourcePokeapiPokemonName = "poipole"
+ SourcePokeapiPokemonNameNaganadel SourcePokeapiPokemonName = "naganadel"
+ SourcePokeapiPokemonNameStakataka SourcePokeapiPokemonName = "stakataka"
+ SourcePokeapiPokemonNameBlacephalon SourcePokeapiPokemonName = "blacephalon"
+ SourcePokeapiPokemonNameZeraora SourcePokeapiPokemonName = "zeraora"
+ SourcePokeapiPokemonNameMeltan SourcePokeapiPokemonName = "meltan"
+ SourcePokeapiPokemonNameMelmetal SourcePokeapiPokemonName = "melmetal"
+ SourcePokeapiPokemonNameGrookey SourcePokeapiPokemonName = "grookey"
+ SourcePokeapiPokemonNameThwackey SourcePokeapiPokemonName = "thwackey"
+ SourcePokeapiPokemonNameRillaboom SourcePokeapiPokemonName = "rillaboom"
+ SourcePokeapiPokemonNameScorbunny SourcePokeapiPokemonName = "scorbunny"
+ SourcePokeapiPokemonNameRaboot SourcePokeapiPokemonName = "raboot"
+ SourcePokeapiPokemonNameCinderace SourcePokeapiPokemonName = "cinderace"
+ SourcePokeapiPokemonNameSobble SourcePokeapiPokemonName = "sobble"
+ SourcePokeapiPokemonNameDrizzile SourcePokeapiPokemonName = "drizzile"
+ SourcePokeapiPokemonNameInteleon SourcePokeapiPokemonName = "inteleon"
+ SourcePokeapiPokemonNameSkwovet SourcePokeapiPokemonName = "skwovet"
+ SourcePokeapiPokemonNameGreedent SourcePokeapiPokemonName = "greedent"
+ SourcePokeapiPokemonNameRookidee SourcePokeapiPokemonName = "rookidee"
+ SourcePokeapiPokemonNameCorvisquire SourcePokeapiPokemonName = "corvisquire"
+ SourcePokeapiPokemonNameCorviknight SourcePokeapiPokemonName = "corviknight"
+ SourcePokeapiPokemonNameBlipbug SourcePokeapiPokemonName = "blipbug"
+ SourcePokeapiPokemonNameDottler SourcePokeapiPokemonName = "dottler"
+ SourcePokeapiPokemonNameOrbeetle SourcePokeapiPokemonName = "orbeetle"
+ SourcePokeapiPokemonNameNickit SourcePokeapiPokemonName = "nickit"
+ SourcePokeapiPokemonNameThievul SourcePokeapiPokemonName = "thievul"
+ SourcePokeapiPokemonNameGossifleur SourcePokeapiPokemonName = "gossifleur"
+ SourcePokeapiPokemonNameEldegoss SourcePokeapiPokemonName = "eldegoss"
+ SourcePokeapiPokemonNameWooloo SourcePokeapiPokemonName = "wooloo"
+ SourcePokeapiPokemonNameDubwool SourcePokeapiPokemonName = "dubwool"
+ SourcePokeapiPokemonNameChewtle SourcePokeapiPokemonName = "chewtle"
+ SourcePokeapiPokemonNameDrednaw SourcePokeapiPokemonName = "drednaw"
+ SourcePokeapiPokemonNameYamper SourcePokeapiPokemonName = "yamper"
+ SourcePokeapiPokemonNameBoltund SourcePokeapiPokemonName = "boltund"
+ SourcePokeapiPokemonNameRolycoly SourcePokeapiPokemonName = "rolycoly"
+ SourcePokeapiPokemonNameCarkol SourcePokeapiPokemonName = "carkol"
+ SourcePokeapiPokemonNameCoalossal SourcePokeapiPokemonName = "coalossal"
+ SourcePokeapiPokemonNameApplin SourcePokeapiPokemonName = "applin"
+ SourcePokeapiPokemonNameFlapple SourcePokeapiPokemonName = "flapple"
+ SourcePokeapiPokemonNameAppletun SourcePokeapiPokemonName = "appletun"
+ SourcePokeapiPokemonNameSilicobra SourcePokeapiPokemonName = "silicobra"
+ SourcePokeapiPokemonNameSandaconda SourcePokeapiPokemonName = "sandaconda"
+ SourcePokeapiPokemonNameCramorant SourcePokeapiPokemonName = "cramorant"
+ SourcePokeapiPokemonNameArrokuda SourcePokeapiPokemonName = "arrokuda"
+ SourcePokeapiPokemonNameBarraskewda SourcePokeapiPokemonName = "barraskewda"
+ SourcePokeapiPokemonNameToxel SourcePokeapiPokemonName = "toxel"
+ SourcePokeapiPokemonNameToxtricity SourcePokeapiPokemonName = "toxtricity"
+ SourcePokeapiPokemonNameSizzlipede SourcePokeapiPokemonName = "sizzlipede"
+ SourcePokeapiPokemonNameCentiskorch SourcePokeapiPokemonName = "centiskorch"
+ SourcePokeapiPokemonNameClobbopus SourcePokeapiPokemonName = "clobbopus"
+ SourcePokeapiPokemonNameGrapploct SourcePokeapiPokemonName = "grapploct"
+ SourcePokeapiPokemonNameSinistea SourcePokeapiPokemonName = "sinistea"
+ SourcePokeapiPokemonNamePolteageist SourcePokeapiPokemonName = "polteageist"
+ SourcePokeapiPokemonNameHatenna SourcePokeapiPokemonName = "hatenna"
+ SourcePokeapiPokemonNameHattrem SourcePokeapiPokemonName = "hattrem"
+ SourcePokeapiPokemonNameHatterene SourcePokeapiPokemonName = "hatterene"
+ SourcePokeapiPokemonNameImpidimp SourcePokeapiPokemonName = "impidimp"
+ SourcePokeapiPokemonNameMorgrem SourcePokeapiPokemonName = "morgrem"
+ SourcePokeapiPokemonNameGrimmsnarl SourcePokeapiPokemonName = "grimmsnarl"
+ SourcePokeapiPokemonNameObstagoon SourcePokeapiPokemonName = "obstagoon"
+ SourcePokeapiPokemonNamePerrserker SourcePokeapiPokemonName = "perrserker"
+ SourcePokeapiPokemonNameCursola SourcePokeapiPokemonName = "cursola"
+ SourcePokeapiPokemonNameSirfetchd SourcePokeapiPokemonName = "sirfetchd"
+ SourcePokeapiPokemonNameMrrime SourcePokeapiPokemonName = "mrrime"
+ SourcePokeapiPokemonNameRunerigus SourcePokeapiPokemonName = "runerigus"
+ SourcePokeapiPokemonNameMilcery SourcePokeapiPokemonName = "milcery"
+ SourcePokeapiPokemonNameAlcremie SourcePokeapiPokemonName = "alcremie"
+ SourcePokeapiPokemonNameFalinks SourcePokeapiPokemonName = "falinks"
+ SourcePokeapiPokemonNamePincurchin SourcePokeapiPokemonName = "pincurchin"
+ SourcePokeapiPokemonNameSnom SourcePokeapiPokemonName = "snom"
+ SourcePokeapiPokemonNameFrosmoth SourcePokeapiPokemonName = "frosmoth"
+ SourcePokeapiPokemonNameStonjourner SourcePokeapiPokemonName = "stonjourner"
+ SourcePokeapiPokemonNameEiscue SourcePokeapiPokemonName = "eiscue"
+ SourcePokeapiPokemonNameIndeedee SourcePokeapiPokemonName = "indeedee"
+ SourcePokeapiPokemonNameMorpeko SourcePokeapiPokemonName = "morpeko"
+ SourcePokeapiPokemonNameCufant SourcePokeapiPokemonName = "cufant"
+ SourcePokeapiPokemonNameCopperajah SourcePokeapiPokemonName = "copperajah"
+ SourcePokeapiPokemonNameDracozolt SourcePokeapiPokemonName = "dracozolt"
+ SourcePokeapiPokemonNameArctozolt SourcePokeapiPokemonName = "arctozolt"
+ SourcePokeapiPokemonNameDracovish SourcePokeapiPokemonName = "dracovish"
+ SourcePokeapiPokemonNameArctovish SourcePokeapiPokemonName = "arctovish"
+ SourcePokeapiPokemonNameDuraludon SourcePokeapiPokemonName = "duraludon"
+ SourcePokeapiPokemonNameDreepy SourcePokeapiPokemonName = "dreepy"
+ SourcePokeapiPokemonNameDrakloak SourcePokeapiPokemonName = "drakloak"
+ SourcePokeapiPokemonNameDragapult SourcePokeapiPokemonName = "dragapult"
+ SourcePokeapiPokemonNameZacian SourcePokeapiPokemonName = "zacian"
+ SourcePokeapiPokemonNameZamazenta SourcePokeapiPokemonName = "zamazenta"
+ SourcePokeapiPokemonNameEternatus SourcePokeapiPokemonName = "eternatus"
+ SourcePokeapiPokemonNameKubfu SourcePokeapiPokemonName = "kubfu"
+ SourcePokeapiPokemonNameUrshifu SourcePokeapiPokemonName = "urshifu"
+ SourcePokeapiPokemonNameZarude SourcePokeapiPokemonName = "zarude"
+ SourcePokeapiPokemonNameRegieleki SourcePokeapiPokemonName = "regieleki"
+ SourcePokeapiPokemonNameRegidrago SourcePokeapiPokemonName = "regidrago"
+ SourcePokeapiPokemonNameGlastrier SourcePokeapiPokemonName = "glastrier"
+ SourcePokeapiPokemonNameSpectrier SourcePokeapiPokemonName = "spectrier"
+ SourcePokeapiPokemonNameCalyrex SourcePokeapiPokemonName = "calyrex"
)
-func (e SourcePokeapiPokeapi) ToPointer() *SourcePokeapiPokeapi {
+func (e SourcePokeapiPokemonName) ToPointer() *SourcePokeapiPokemonName {
return &e
}
-func (e *SourcePokeapiPokeapi) UnmarshalJSON(data []byte) error {
+func (e *SourcePokeapiPokemonName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "bulbasaur":
+ fallthrough
+ case "ivysaur":
+ fallthrough
+ case "venusaur":
+ fallthrough
+ case "charmander":
+ fallthrough
+ case "charmeleon":
+ fallthrough
+ case "charizard":
+ fallthrough
+ case "squirtle":
+ fallthrough
+ case "wartortle":
+ fallthrough
+ case "blastoise":
+ fallthrough
+ case "caterpie":
+ fallthrough
+ case "metapod":
+ fallthrough
+ case "butterfree":
+ fallthrough
+ case "weedle":
+ fallthrough
+ case "kakuna":
+ fallthrough
+ case "beedrill":
+ fallthrough
+ case "pidgey":
+ fallthrough
+ case "pidgeotto":
+ fallthrough
+ case "pidgeot":
+ fallthrough
+ case "rattata":
+ fallthrough
+ case "raticate":
+ fallthrough
+ case "spearow":
+ fallthrough
+ case "fearow":
+ fallthrough
+ case "ekans":
+ fallthrough
+ case "arbok":
+ fallthrough
+ case "pikachu":
+ fallthrough
+ case "raichu":
+ fallthrough
+ case "sandshrew":
+ fallthrough
+ case "sandslash":
+ fallthrough
+ case "nidoranf":
+ fallthrough
+ case "nidorina":
+ fallthrough
+ case "nidoqueen":
+ fallthrough
+ case "nidoranm":
+ fallthrough
+ case "nidorino":
+ fallthrough
+ case "nidoking":
+ fallthrough
+ case "clefairy":
+ fallthrough
+ case "clefable":
+ fallthrough
+ case "vulpix":
+ fallthrough
+ case "ninetales":
+ fallthrough
+ case "jigglypuff":
+ fallthrough
+ case "wigglytuff":
+ fallthrough
+ case "zubat":
+ fallthrough
+ case "golbat":
+ fallthrough
+ case "oddish":
+ fallthrough
+ case "gloom":
+ fallthrough
+ case "vileplume":
+ fallthrough
+ case "paras":
+ fallthrough
+ case "parasect":
+ fallthrough
+ case "venonat":
+ fallthrough
+ case "venomoth":
+ fallthrough
+ case "diglett":
+ fallthrough
+ case "dugtrio":
+ fallthrough
+ case "meowth":
+ fallthrough
+ case "persian":
+ fallthrough
+ case "psyduck":
+ fallthrough
+ case "golduck":
+ fallthrough
+ case "mankey":
+ fallthrough
+ case "primeape":
+ fallthrough
+ case "growlithe":
+ fallthrough
+ case "arcanine":
+ fallthrough
+ case "poliwag":
+ fallthrough
+ case "poliwhirl":
+ fallthrough
+ case "poliwrath":
+ fallthrough
+ case "abra":
+ fallthrough
+ case "kadabra":
+ fallthrough
+ case "alakazam":
+ fallthrough
+ case "machop":
+ fallthrough
+ case "machoke":
+ fallthrough
+ case "machamp":
+ fallthrough
+ case "bellsprout":
+ fallthrough
+ case "weepinbell":
+ fallthrough
+ case "victreebel":
+ fallthrough
+ case "tentacool":
+ fallthrough
+ case "tentacruel":
+ fallthrough
+ case "geodude":
+ fallthrough
+ case "graveler":
+ fallthrough
+ case "golem":
+ fallthrough
+ case "ponyta":
+ fallthrough
+ case "rapidash":
+ fallthrough
+ case "slowpoke":
+ fallthrough
+ case "slowbro":
+ fallthrough
+ case "magnemite":
+ fallthrough
+ case "magneton":
+ fallthrough
+ case "farfetchd":
+ fallthrough
+ case "doduo":
+ fallthrough
+ case "dodrio":
+ fallthrough
+ case "seel":
+ fallthrough
+ case "dewgong":
+ fallthrough
+ case "grimer":
+ fallthrough
+ case "muk":
+ fallthrough
+ case "shellder":
+ fallthrough
+ case "cloyster":
+ fallthrough
+ case "gastly":
+ fallthrough
+ case "haunter":
+ fallthrough
+ case "gengar":
+ fallthrough
+ case "onix":
+ fallthrough
+ case "drowzee":
+ fallthrough
+ case "hypno":
+ fallthrough
+ case "krabby":
+ fallthrough
+ case "kingler":
+ fallthrough
+ case "voltorb":
+ fallthrough
+ case "electrode":
+ fallthrough
+ case "exeggcute":
+ fallthrough
+ case "exeggutor":
+ fallthrough
+ case "cubone":
+ fallthrough
+ case "marowak":
+ fallthrough
+ case "hitmonlee":
+ fallthrough
+ case "hitmonchan":
+ fallthrough
+ case "lickitung":
+ fallthrough
+ case "koffing":
+ fallthrough
+ case "weezing":
+ fallthrough
+ case "rhyhorn":
+ fallthrough
+ case "rhydon":
+ fallthrough
+ case "chansey":
+ fallthrough
+ case "tangela":
+ fallthrough
+ case "kangaskhan":
+ fallthrough
+ case "horsea":
+ fallthrough
+ case "seadra":
+ fallthrough
+ case "goldeen":
+ fallthrough
+ case "seaking":
+ fallthrough
+ case "staryu":
+ fallthrough
+ case "starmie":
+ fallthrough
+ case "mrmime":
+ fallthrough
+ case "scyther":
+ fallthrough
+ case "jynx":
+ fallthrough
+ case "electabuzz":
+ fallthrough
+ case "magmar":
+ fallthrough
+ case "pinsir":
+ fallthrough
+ case "tauros":
+ fallthrough
+ case "magikarp":
+ fallthrough
+ case "gyarados":
+ fallthrough
+ case "lapras":
+ fallthrough
+ case "ditto":
+ fallthrough
+ case "eevee":
+ fallthrough
+ case "vaporeon":
+ fallthrough
+ case "jolteon":
+ fallthrough
+ case "flareon":
+ fallthrough
+ case "porygon":
+ fallthrough
+ case "omanyte":
+ fallthrough
+ case "omastar":
+ fallthrough
+ case "kabuto":
+ fallthrough
+ case "kabutops":
+ fallthrough
+ case "aerodactyl":
+ fallthrough
+ case "snorlax":
+ fallthrough
+ case "articuno":
+ fallthrough
+ case "zapdos":
+ fallthrough
+ case "moltres":
+ fallthrough
+ case "dratini":
+ fallthrough
+ case "dragonair":
+ fallthrough
+ case "dragonite":
+ fallthrough
+ case "mewtwo":
+ fallthrough
+ case "mew":
+ fallthrough
+ case "chikorita":
+ fallthrough
+ case "bayleef":
+ fallthrough
+ case "meganium":
+ fallthrough
+ case "cyndaquil":
+ fallthrough
+ case "quilava":
+ fallthrough
+ case "typhlosion":
+ fallthrough
+ case "totodile":
+ fallthrough
+ case "croconaw":
+ fallthrough
+ case "feraligatr":
+ fallthrough
+ case "sentret":
+ fallthrough
+ case "furret":
+ fallthrough
+ case "hoothoot":
+ fallthrough
+ case "noctowl":
+ fallthrough
+ case "ledyba":
+ fallthrough
+ case "ledian":
+ fallthrough
+ case "spinarak":
+ fallthrough
+ case "ariados":
+ fallthrough
+ case "crobat":
+ fallthrough
+ case "chinchou":
+ fallthrough
+ case "lanturn":
+ fallthrough
+ case "pichu":
+ fallthrough
+ case "cleffa":
+ fallthrough
+ case "igglybuff":
+ fallthrough
+ case "togepi":
+ fallthrough
+ case "togetic":
+ fallthrough
+ case "natu":
+ fallthrough
+ case "xatu":
+ fallthrough
+ case "mareep":
+ fallthrough
+ case "flaaffy":
+ fallthrough
+ case "ampharos":
+ fallthrough
+ case "bellossom":
+ fallthrough
+ case "marill":
+ fallthrough
+ case "azumarill":
+ fallthrough
+ case "sudowoodo":
+ fallthrough
+ case "politoed":
+ fallthrough
+ case "hoppip":
+ fallthrough
+ case "skiploom":
+ fallthrough
+ case "jumpluff":
+ fallthrough
+ case "aipom":
+ fallthrough
+ case "sunkern":
+ fallthrough
+ case "sunflora":
+ fallthrough
+ case "yanma":
+ fallthrough
+ case "wooper":
+ fallthrough
+ case "quagsire":
+ fallthrough
+ case "espeon":
+ fallthrough
+ case "umbreon":
+ fallthrough
+ case "murkrow":
+ fallthrough
+ case "slowking":
+ fallthrough
+ case "misdreavus":
+ fallthrough
+ case "unown":
+ fallthrough
+ case "wobbuffet":
+ fallthrough
+ case "girafarig":
+ fallthrough
+ case "pineco":
+ fallthrough
+ case "forretress":
+ fallthrough
+ case "dunsparce":
+ fallthrough
+ case "gligar":
+ fallthrough
+ case "steelix":
+ fallthrough
+ case "snubbull":
+ fallthrough
+ case "granbull":
+ fallthrough
+ case "qwilfish":
+ fallthrough
+ case "scizor":
+ fallthrough
+ case "shuckle":
+ fallthrough
+ case "heracross":
+ fallthrough
+ case "sneasel":
+ fallthrough
+ case "teddiursa":
+ fallthrough
+ case "ursaring":
+ fallthrough
+ case "slugma":
+ fallthrough
+ case "magcargo":
+ fallthrough
+ case "swinub":
+ fallthrough
+ case "piloswine":
+ fallthrough
+ case "corsola":
+ fallthrough
+ case "remoraid":
+ fallthrough
+ case "octillery":
+ fallthrough
+ case "delibird":
+ fallthrough
+ case "mantine":
+ fallthrough
+ case "skarmory":
+ fallthrough
+ case "houndour":
+ fallthrough
+ case "houndoom":
+ fallthrough
+ case "kingdra":
+ fallthrough
+ case "phanpy":
+ fallthrough
+ case "donphan":
+ fallthrough
+ case "porygon2":
+ fallthrough
+ case "stantler":
+ fallthrough
+ case "smeargle":
+ fallthrough
+ case "tyrogue":
+ fallthrough
+ case "hitmontop":
+ fallthrough
+ case "smoochum":
+ fallthrough
+ case "elekid":
+ fallthrough
+ case "magby":
+ fallthrough
+ case "miltank":
+ fallthrough
+ case "blissey":
+ fallthrough
+ case "raikou":
+ fallthrough
+ case "entei":
+ fallthrough
+ case "suicune":
+ fallthrough
+ case "larvitar":
+ fallthrough
+ case "pupitar":
+ fallthrough
+ case "tyranitar":
+ fallthrough
+ case "lugia":
+ fallthrough
+ case "ho-oh":
+ fallthrough
+ case "celebi":
+ fallthrough
+ case "treecko":
+ fallthrough
+ case "grovyle":
+ fallthrough
+ case "sceptile":
+ fallthrough
+ case "torchic":
+ fallthrough
+ case "combusken":
+ fallthrough
+ case "blaziken":
+ fallthrough
+ case "mudkip":
+ fallthrough
+ case "marshtomp":
+ fallthrough
+ case "swampert":
+ fallthrough
+ case "poochyena":
+ fallthrough
+ case "mightyena":
+ fallthrough
+ case "zigzagoon":
+ fallthrough
+ case "linoone":
+ fallthrough
+ case "wurmple":
+ fallthrough
+ case "silcoon":
+ fallthrough
+ case "beautifly":
+ fallthrough
+ case "cascoon":
+ fallthrough
+ case "dustox":
+ fallthrough
+ case "lotad":
+ fallthrough
+ case "lombre":
+ fallthrough
+ case "ludicolo":
+ fallthrough
+ case "seedot":
+ fallthrough
+ case "nuzleaf":
+ fallthrough
+ case "shiftry":
+ fallthrough
+ case "taillow":
+ fallthrough
+ case "swellow":
+ fallthrough
+ case "wingull":
+ fallthrough
+ case "pelipper":
+ fallthrough
+ case "ralts":
+ fallthrough
+ case "kirlia":
+ fallthrough
+ case "gardevoir":
+ fallthrough
+ case "surskit":
+ fallthrough
+ case "masquerain":
+ fallthrough
+ case "shroomish":
+ fallthrough
+ case "breloom":
+ fallthrough
+ case "slakoth":
+ fallthrough
+ case "vigoroth":
+ fallthrough
+ case "slaking":
+ fallthrough
+ case "nincada":
+ fallthrough
+ case "ninjask":
+ fallthrough
+ case "shedinja":
+ fallthrough
+ case "whismur":
+ fallthrough
+ case "loudred":
+ fallthrough
+ case "exploud":
+ fallthrough
+ case "makuhita":
+ fallthrough
+ case "hariyama":
+ fallthrough
+ case "azurill":
+ fallthrough
+ case "nosepass":
+ fallthrough
+ case "skitty":
+ fallthrough
+ case "delcatty":
+ fallthrough
+ case "sableye":
+ fallthrough
+ case "mawile":
+ fallthrough
+ case "aron":
+ fallthrough
+ case "lairon":
+ fallthrough
+ case "aggron":
+ fallthrough
+ case "meditite":
+ fallthrough
+ case "medicham":
+ fallthrough
+ case "electrike":
+ fallthrough
+ case "manectric":
+ fallthrough
+ case "plusle":
+ fallthrough
+ case "minun":
+ fallthrough
+ case "volbeat":
+ fallthrough
+ case "illumise":
+ fallthrough
+ case "roselia":
+ fallthrough
+ case "gulpin":
+ fallthrough
+ case "swalot":
+ fallthrough
+ case "carvanha":
+ fallthrough
+ case "sharpedo":
+ fallthrough
+ case "wailmer":
+ fallthrough
+ case "wailord":
+ fallthrough
+ case "numel":
+ fallthrough
+ case "camerupt":
+ fallthrough
+ case "torkoal":
+ fallthrough
+ case "spoink":
+ fallthrough
+ case "grumpig":
+ fallthrough
+ case "spinda":
+ fallthrough
+ case "trapinch":
+ fallthrough
+ case "vibrava":
+ fallthrough
+ case "flygon":
+ fallthrough
+ case "cacnea":
+ fallthrough
+ case "cacturne":
+ fallthrough
+ case "swablu":
+ fallthrough
+ case "altaria":
+ fallthrough
+ case "zangoose":
+ fallthrough
+ case "seviper":
+ fallthrough
+ case "lunatone":
+ fallthrough
+ case "solrock":
+ fallthrough
+ case "barboach":
+ fallthrough
+ case "whiscash":
+ fallthrough
+ case "corphish":
+ fallthrough
+ case "crawdaunt":
+ fallthrough
+ case "baltoy":
+ fallthrough
+ case "claydol":
+ fallthrough
+ case "lileep":
+ fallthrough
+ case "cradily":
+ fallthrough
+ case "anorith":
+ fallthrough
+ case "armaldo":
+ fallthrough
+ case "feebas":
+ fallthrough
+ case "milotic":
+ fallthrough
+ case "castform":
+ fallthrough
+ case "kecleon":
+ fallthrough
+ case "shuppet":
+ fallthrough
+ case "banette":
+ fallthrough
+ case "duskull":
+ fallthrough
+ case "dusclops":
+ fallthrough
+ case "tropius":
+ fallthrough
+ case "chimecho":
+ fallthrough
+ case "absol":
+ fallthrough
+ case "wynaut":
+ fallthrough
+ case "snorunt":
+ fallthrough
+ case "glalie":
+ fallthrough
+ case "spheal":
+ fallthrough
+ case "sealeo":
+ fallthrough
+ case "walrein":
+ fallthrough
+ case "clamperl":
+ fallthrough
+ case "huntail":
+ fallthrough
+ case "gorebyss":
+ fallthrough
+ case "relicanth":
+ fallthrough
+ case "luvdisc":
+ fallthrough
+ case "bagon":
+ fallthrough
+ case "shelgon":
+ fallthrough
+ case "salamence":
+ fallthrough
+ case "beldum":
+ fallthrough
+ case "metang":
+ fallthrough
+ case "metagross":
+ fallthrough
+ case "regirock":
+ fallthrough
+ case "regice":
+ fallthrough
+ case "registeel":
+ fallthrough
+ case "latias":
+ fallthrough
+ case "latios":
+ fallthrough
+ case "kyogre":
+ fallthrough
+ case "groudon":
+ fallthrough
+ case "rayquaza":
+ fallthrough
+ case "jirachi":
+ fallthrough
+ case "deoxys":
+ fallthrough
+ case "turtwig":
+ fallthrough
+ case "grotle":
+ fallthrough
+ case "torterra":
+ fallthrough
+ case "chimchar":
+ fallthrough
+ case "monferno":
+ fallthrough
+ case "infernape":
+ fallthrough
+ case "piplup":
+ fallthrough
+ case "prinplup":
+ fallthrough
+ case "empoleon":
+ fallthrough
+ case "starly":
+ fallthrough
+ case "staravia":
+ fallthrough
+ case "staraptor":
+ fallthrough
+ case "bidoof":
+ fallthrough
+ case "bibarel":
+ fallthrough
+ case "kricketot":
+ fallthrough
+ case "kricketune":
+ fallthrough
+ case "shinx":
+ fallthrough
+ case "luxio":
+ fallthrough
+ case "luxray":
+ fallthrough
+ case "budew":
+ fallthrough
+ case "roserade":
+ fallthrough
+ case "cranidos":
+ fallthrough
+ case "rampardos":
+ fallthrough
+ case "shieldon":
+ fallthrough
+ case "bastiodon":
+ fallthrough
+ case "burmy":
+ fallthrough
+ case "wormadam":
+ fallthrough
+ case "mothim":
+ fallthrough
+ case "combee":
+ fallthrough
+ case "vespiquen":
+ fallthrough
+ case "pachirisu":
+ fallthrough
+ case "buizel":
+ fallthrough
+ case "floatzel":
+ fallthrough
+ case "cherubi":
+ fallthrough
+ case "cherrim":
+ fallthrough
+ case "shellos":
+ fallthrough
+ case "gastrodon":
+ fallthrough
+ case "ambipom":
+ fallthrough
+ case "drifloon":
+ fallthrough
+ case "drifblim":
+ fallthrough
+ case "buneary":
+ fallthrough
+ case "lopunny":
+ fallthrough
+ case "mismagius":
+ fallthrough
+ case "honchkrow":
+ fallthrough
+ case "glameow":
+ fallthrough
+ case "purugly":
+ fallthrough
+ case "chingling":
+ fallthrough
+ case "stunky":
+ fallthrough
+ case "skuntank":
+ fallthrough
+ case "bronzor":
+ fallthrough
+ case "bronzong":
+ fallthrough
+ case "bonsly":
+ fallthrough
+ case "mimejr":
+ fallthrough
+ case "happiny":
+ fallthrough
+ case "chatot":
+ fallthrough
+ case "spiritomb":
+ fallthrough
+ case "gible":
+ fallthrough
+ case "gabite":
+ fallthrough
+ case "garchomp":
+ fallthrough
+ case "munchlax":
+ fallthrough
+ case "riolu":
+ fallthrough
+ case "lucario":
+ fallthrough
+ case "hippopotas":
+ fallthrough
+ case "hippowdon":
+ fallthrough
+ case "skorupi":
+ fallthrough
+ case "drapion":
+ fallthrough
+ case "croagunk":
+ fallthrough
+ case "toxicroak":
+ fallthrough
+ case "carnivine":
+ fallthrough
+ case "finneon":
+ fallthrough
+ case "lumineon":
+ fallthrough
+ case "mantyke":
+ fallthrough
+ case "snover":
+ fallthrough
+ case "abomasnow":
+ fallthrough
+ case "weavile":
+ fallthrough
+ case "magnezone":
+ fallthrough
+ case "lickilicky":
+ fallthrough
+ case "rhyperior":
+ fallthrough
+ case "tangrowth":
+ fallthrough
+ case "electivire":
+ fallthrough
+ case "magmortar":
+ fallthrough
+ case "togekiss":
+ fallthrough
+ case "yanmega":
+ fallthrough
+ case "leafeon":
+ fallthrough
+ case "glaceon":
+ fallthrough
+ case "gliscor":
+ fallthrough
+ case "mamoswine":
+ fallthrough
+ case "porygon-z":
+ fallthrough
+ case "gallade":
+ fallthrough
+ case "probopass":
+ fallthrough
+ case "dusknoir":
+ fallthrough
+ case "froslass":
+ fallthrough
+ case "rotom":
+ fallthrough
+ case "uxie":
+ fallthrough
+ case "mesprit":
+ fallthrough
+ case "azelf":
+ fallthrough
+ case "dialga":
+ fallthrough
+ case "palkia":
+ fallthrough
+ case "heatran":
+ fallthrough
+ case "regigigas":
+ fallthrough
+ case "giratina":
+ fallthrough
+ case "cresselia":
+ fallthrough
+ case "phione":
+ fallthrough
+ case "manaphy":
+ fallthrough
+ case "darkrai":
+ fallthrough
+ case "shaymin":
+ fallthrough
+ case "arceus":
+ fallthrough
+ case "victini":
+ fallthrough
+ case "snivy":
+ fallthrough
+ case "servine":
+ fallthrough
+ case "serperior":
+ fallthrough
+ case "tepig":
+ fallthrough
+ case "pignite":
+ fallthrough
+ case "emboar":
+ fallthrough
+ case "oshawott":
+ fallthrough
+ case "dewott":
+ fallthrough
+ case "samurott":
+ fallthrough
+ case "patrat":
+ fallthrough
+ case "watchog":
+ fallthrough
+ case "lillipup":
+ fallthrough
+ case "herdier":
+ fallthrough
+ case "stoutland":
+ fallthrough
+ case "purrloin":
+ fallthrough
+ case "liepard":
+ fallthrough
+ case "pansage":
+ fallthrough
+ case "simisage":
+ fallthrough
+ case "pansear":
+ fallthrough
+ case "simisear":
+ fallthrough
+ case "panpour":
+ fallthrough
+ case "simipour":
+ fallthrough
+ case "munna":
+ fallthrough
+ case "musharna":
+ fallthrough
+ case "pidove":
+ fallthrough
+ case "tranquill":
+ fallthrough
+ case "unfezant":
+ fallthrough
+ case "blitzle":
+ fallthrough
+ case "zebstrika":
+ fallthrough
+ case "roggenrola":
+ fallthrough
+ case "boldore":
+ fallthrough
+ case "gigalith":
+ fallthrough
+ case "woobat":
+ fallthrough
+ case "swoobat":
+ fallthrough
+ case "drilbur":
+ fallthrough
+ case "excadrill":
+ fallthrough
+ case "audino":
+ fallthrough
+ case "timburr":
+ fallthrough
+ case "gurdurr":
+ fallthrough
+ case "conkeldurr":
+ fallthrough
+ case "tympole":
+ fallthrough
+ case "palpitoad":
+ fallthrough
+ case "seismitoad":
+ fallthrough
+ case "throh":
+ fallthrough
+ case "sawk":
+ fallthrough
+ case "sewaddle":
+ fallthrough
+ case "swadloon":
+ fallthrough
+ case "leavanny":
+ fallthrough
+ case "venipede":
+ fallthrough
+ case "whirlipede":
+ fallthrough
+ case "scolipede":
+ fallthrough
+ case "cottonee":
+ fallthrough
+ case "whimsicott":
+ fallthrough
+ case "petilil":
+ fallthrough
+ case "lilligant":
+ fallthrough
+ case "basculin":
+ fallthrough
+ case "sandile":
+ fallthrough
+ case "krokorok":
+ fallthrough
+ case "krookodile":
+ fallthrough
+ case "darumaka":
+ fallthrough
+ case "darmanitan":
+ fallthrough
+ case "maractus":
+ fallthrough
+ case "dwebble":
+ fallthrough
+ case "crustle":
+ fallthrough
+ case "scraggy":
+ fallthrough
+ case "scrafty":
+ fallthrough
+ case "sigilyph":
+ fallthrough
+ case "yamask":
+ fallthrough
+ case "cofagrigus":
+ fallthrough
+ case "tirtouga":
+ fallthrough
+ case "carracosta":
+ fallthrough
+ case "archen":
+ fallthrough
+ case "archeops":
+ fallthrough
+ case "trubbish":
+ fallthrough
+ case "garbodor":
+ fallthrough
+ case "zorua":
+ fallthrough
+ case "zoroark":
+ fallthrough
+ case "minccino":
+ fallthrough
+ case "cinccino":
+ fallthrough
+ case "gothita":
+ fallthrough
+ case "gothorita":
+ fallthrough
+ case "gothitelle":
+ fallthrough
+ case "solosis":
+ fallthrough
+ case "duosion":
+ fallthrough
+ case "reuniclus":
+ fallthrough
+ case "ducklett":
+ fallthrough
+ case "swanna":
+ fallthrough
+ case "vanillite":
+ fallthrough
+ case "vanillish":
+ fallthrough
+ case "vanilluxe":
+ fallthrough
+ case "deerling":
+ fallthrough
+ case "sawsbuck":
+ fallthrough
+ case "emolga":
+ fallthrough
+ case "karrablast":
+ fallthrough
+ case "escavalier":
+ fallthrough
+ case "foongus":
+ fallthrough
+ case "amoonguss":
+ fallthrough
+ case "frillish":
+ fallthrough
+ case "jellicent":
+ fallthrough
+ case "alomomola":
+ fallthrough
+ case "joltik":
+ fallthrough
+ case "galvantula":
+ fallthrough
+ case "ferroseed":
+ fallthrough
+ case "ferrothorn":
+ fallthrough
+ case "klink":
+ fallthrough
+ case "klang":
+ fallthrough
+ case "klinklang":
+ fallthrough
+ case "tynamo":
+ fallthrough
+ case "eelektrik":
+ fallthrough
+ case "eelektross":
+ fallthrough
+ case "elgyem":
+ fallthrough
+ case "beheeyem":
+ fallthrough
+ case "litwick":
+ fallthrough
+ case "lampent":
+ fallthrough
+ case "chandelure":
+ fallthrough
+ case "axew":
+ fallthrough
+ case "fraxure":
+ fallthrough
+ case "haxorus":
+ fallthrough
+ case "cubchoo":
+ fallthrough
+ case "beartic":
+ fallthrough
+ case "cryogonal":
+ fallthrough
+ case "shelmet":
+ fallthrough
+ case "accelgor":
+ fallthrough
+ case "stunfisk":
+ fallthrough
+ case "mienfoo":
+ fallthrough
+ case "mienshao":
+ fallthrough
+ case "druddigon":
+ fallthrough
+ case "golett":
+ fallthrough
+ case "golurk":
+ fallthrough
+ case "pawniard":
+ fallthrough
+ case "bisharp":
+ fallthrough
+ case "bouffalant":
+ fallthrough
+ case "rufflet":
+ fallthrough
+ case "braviary":
+ fallthrough
+ case "vullaby":
+ fallthrough
+ case "mandibuzz":
+ fallthrough
+ case "heatmor":
+ fallthrough
+ case "durant":
+ fallthrough
+ case "deino":
+ fallthrough
+ case "zweilous":
+ fallthrough
+ case "hydreigon":
+ fallthrough
+ case "larvesta":
+ fallthrough
+ case "volcarona":
+ fallthrough
+ case "cobalion":
+ fallthrough
+ case "terrakion":
+ fallthrough
+ case "virizion":
+ fallthrough
+ case "tornadus":
+ fallthrough
+ case "thundurus":
+ fallthrough
+ case "reshiram":
+ fallthrough
+ case "zekrom":
+ fallthrough
+ case "landorus":
+ fallthrough
+ case "kyurem":
+ fallthrough
+ case "keldeo":
+ fallthrough
+ case "meloetta":
+ fallthrough
+ case "genesect":
+ fallthrough
+ case "chespin":
+ fallthrough
+ case "quilladin":
+ fallthrough
+ case "chesnaught":
+ fallthrough
+ case "fennekin":
+ fallthrough
+ case "braixen":
+ fallthrough
+ case "delphox":
+ fallthrough
+ case "froakie":
+ fallthrough
+ case "frogadier":
+ fallthrough
+ case "greninja":
+ fallthrough
+ case "bunnelby":
+ fallthrough
+ case "diggersby":
+ fallthrough
+ case "fletchling":
+ fallthrough
+ case "fletchinder":
+ fallthrough
+ case "talonflame":
+ fallthrough
+ case "scatterbug":
+ fallthrough
+ case "spewpa":
+ fallthrough
+ case "vivillon":
+ fallthrough
+ case "litleo":
+ fallthrough
+ case "pyroar":
+ fallthrough
+ case "flabebe":
+ fallthrough
+ case "floette":
+ fallthrough
+ case "florges":
+ fallthrough
+ case "skiddo":
+ fallthrough
+ case "gogoat":
+ fallthrough
+ case "pancham":
+ fallthrough
+ case "pangoro":
+ fallthrough
+ case "furfrou":
+ fallthrough
+ case "espurr":
+ fallthrough
+ case "meowstic":
+ fallthrough
+ case "honedge":
+ fallthrough
+ case "doublade":
+ fallthrough
+ case "aegislash":
+ fallthrough
+ case "spritzee":
+ fallthrough
+ case "aromatisse":
+ fallthrough
+ case "swirlix":
+ fallthrough
+ case "slurpuff":
+ fallthrough
+ case "inkay":
+ fallthrough
+ case "malamar":
+ fallthrough
+ case "binacle":
+ fallthrough
+ case "barbaracle":
+ fallthrough
+ case "skrelp":
+ fallthrough
+ case "dragalge":
+ fallthrough
+ case "clauncher":
+ fallthrough
+ case "clawitzer":
+ fallthrough
+ case "helioptile":
+ fallthrough
+ case "heliolisk":
+ fallthrough
+ case "tyrunt":
+ fallthrough
+ case "tyrantrum":
+ fallthrough
+ case "amaura":
+ fallthrough
+ case "aurorus":
+ fallthrough
+ case "sylveon":
+ fallthrough
+ case "hawlucha":
+ fallthrough
+ case "dedenne":
+ fallthrough
+ case "carbink":
+ fallthrough
+ case "goomy":
+ fallthrough
+ case "sliggoo":
+ fallthrough
+ case "goodra":
+ fallthrough
+ case "klefki":
+ fallthrough
+ case "phantump":
+ fallthrough
+ case "trevenant":
+ fallthrough
+ case "pumpkaboo":
+ fallthrough
+ case "gourgeist":
+ fallthrough
+ case "bergmite":
+ fallthrough
+ case "avalugg":
+ fallthrough
+ case "noibat":
+ fallthrough
+ case "noivern":
+ fallthrough
+ case "xerneas":
+ fallthrough
+ case "yveltal":
+ fallthrough
+ case "zygarde":
+ fallthrough
+ case "diancie":
+ fallthrough
+ case "hoopa":
+ fallthrough
+ case "volcanion":
+ fallthrough
+ case "rowlet":
+ fallthrough
+ case "dartrix":
+ fallthrough
+ case "decidueye":
+ fallthrough
+ case "litten":
+ fallthrough
+ case "torracat":
+ fallthrough
+ case "incineroar":
+ fallthrough
+ case "popplio":
+ fallthrough
+ case "brionne":
+ fallthrough
+ case "primarina":
+ fallthrough
+ case "pikipek":
+ fallthrough
+ case "trumbeak":
+ fallthrough
+ case "toucannon":
+ fallthrough
+ case "yungoos":
+ fallthrough
+ case "gumshoos":
+ fallthrough
+ case "grubbin":
+ fallthrough
+ case "charjabug":
+ fallthrough
+ case "vikavolt":
+ fallthrough
+ case "crabrawler":
+ fallthrough
+ case "crabominable":
+ fallthrough
+ case "oricorio":
+ fallthrough
+ case "cutiefly":
+ fallthrough
+ case "ribombee":
+ fallthrough
+ case "rockruff":
+ fallthrough
+ case "lycanroc":
+ fallthrough
+ case "wishiwashi":
+ fallthrough
+ case "mareanie":
+ fallthrough
+ case "toxapex":
+ fallthrough
+ case "mudbray":
+ fallthrough
+ case "mudsdale":
+ fallthrough
+ case "dewpider":
+ fallthrough
+ case "araquanid":
+ fallthrough
+ case "fomantis":
+ fallthrough
+ case "lurantis":
+ fallthrough
+ case "morelull":
+ fallthrough
+ case "shiinotic":
+ fallthrough
+ case "salandit":
+ fallthrough
+ case "salazzle":
+ fallthrough
+ case "stufful":
+ fallthrough
+ case "bewear":
+ fallthrough
+ case "bounsweet":
+ fallthrough
+ case "steenee":
+ fallthrough
+ case "tsareena":
+ fallthrough
+ case "comfey":
+ fallthrough
+ case "oranguru":
+ fallthrough
+ case "passimian":
+ fallthrough
+ case "wimpod":
+ fallthrough
+ case "golisopod":
+ fallthrough
+ case "sandygast":
+ fallthrough
+ case "palossand":
+ fallthrough
+ case "pyukumuku":
+ fallthrough
+ case "typenull":
+ fallthrough
+ case "silvally":
+ fallthrough
+ case "minior":
+ fallthrough
+ case "komala":
+ fallthrough
+ case "turtonator":
+ fallthrough
+ case "togedemaru":
+ fallthrough
+ case "mimikyu":
+ fallthrough
+ case "bruxish":
+ fallthrough
+ case "drampa":
+ fallthrough
+ case "dhelmise":
+ fallthrough
+ case "jangmo-o":
+ fallthrough
+ case "hakamo-o":
+ fallthrough
+ case "kommo-o":
+ fallthrough
+ case "tapukoko":
+ fallthrough
+ case "tapulele":
+ fallthrough
+ case "tapubulu":
+ fallthrough
+ case "tapufini":
+ fallthrough
+ case "cosmog":
+ fallthrough
+ case "cosmoem":
+ fallthrough
+ case "solgaleo":
+ fallthrough
+ case "lunala":
+ fallthrough
+ case "nihilego":
+ fallthrough
+ case "buzzwole":
+ fallthrough
+ case "pheromosa":
+ fallthrough
+ case "xurkitree":
+ fallthrough
+ case "celesteela":
+ fallthrough
+ case "kartana":
+ fallthrough
+ case "guzzlord":
+ fallthrough
+ case "necrozma":
+ fallthrough
+ case "magearna":
+ fallthrough
+ case "marshadow":
+ fallthrough
+ case "poipole":
+ fallthrough
+ case "naganadel":
+ fallthrough
+ case "stakataka":
+ fallthrough
+ case "blacephalon":
+ fallthrough
+ case "zeraora":
+ fallthrough
+ case "meltan":
+ fallthrough
+ case "melmetal":
+ fallthrough
+ case "grookey":
+ fallthrough
+ case "thwackey":
+ fallthrough
+ case "rillaboom":
+ fallthrough
+ case "scorbunny":
+ fallthrough
+ case "raboot":
+ fallthrough
+ case "cinderace":
+ fallthrough
+ case "sobble":
+ fallthrough
+ case "drizzile":
+ fallthrough
+ case "inteleon":
+ fallthrough
+ case "skwovet":
+ fallthrough
+ case "greedent":
+ fallthrough
+ case "rookidee":
+ fallthrough
+ case "corvisquire":
+ fallthrough
+ case "corviknight":
+ fallthrough
+ case "blipbug":
+ fallthrough
+ case "dottler":
+ fallthrough
+ case "orbeetle":
+ fallthrough
+ case "nickit":
+ fallthrough
+ case "thievul":
+ fallthrough
+ case "gossifleur":
+ fallthrough
+ case "eldegoss":
+ fallthrough
+ case "wooloo":
+ fallthrough
+ case "dubwool":
+ fallthrough
+ case "chewtle":
+ fallthrough
+ case "drednaw":
+ fallthrough
+ case "yamper":
+ fallthrough
+ case "boltund":
+ fallthrough
+ case "rolycoly":
+ fallthrough
+ case "carkol":
+ fallthrough
+ case "coalossal":
+ fallthrough
+ case "applin":
+ fallthrough
+ case "flapple":
+ fallthrough
+ case "appletun":
+ fallthrough
+ case "silicobra":
+ fallthrough
+ case "sandaconda":
+ fallthrough
+ case "cramorant":
+ fallthrough
+ case "arrokuda":
+ fallthrough
+ case "barraskewda":
+ fallthrough
+ case "toxel":
+ fallthrough
+ case "toxtricity":
+ fallthrough
+ case "sizzlipede":
+ fallthrough
+ case "centiskorch":
+ fallthrough
+ case "clobbopus":
+ fallthrough
+ case "grapploct":
+ fallthrough
+ case "sinistea":
+ fallthrough
+ case "polteageist":
+ fallthrough
+ case "hatenna":
+ fallthrough
+ case "hattrem":
+ fallthrough
+ case "hatterene":
+ fallthrough
+ case "impidimp":
+ fallthrough
+ case "morgrem":
+ fallthrough
+ case "grimmsnarl":
+ fallthrough
+ case "obstagoon":
+ fallthrough
+ case "perrserker":
+ fallthrough
+ case "cursola":
+ fallthrough
+ case "sirfetchd":
+ fallthrough
+ case "mrrime":
+ fallthrough
+ case "runerigus":
+ fallthrough
+ case "milcery":
+ fallthrough
+ case "alcremie":
+ fallthrough
+ case "falinks":
+ fallthrough
+ case "pincurchin":
+ fallthrough
+ case "snom":
+ fallthrough
+ case "frosmoth":
+ fallthrough
+ case "stonjourner":
+ fallthrough
+ case "eiscue":
+ fallthrough
+ case "indeedee":
+ fallthrough
+ case "morpeko":
+ fallthrough
+ case "cufant":
+ fallthrough
+ case "copperajah":
+ fallthrough
+ case "dracozolt":
+ fallthrough
+ case "arctozolt":
+ fallthrough
+ case "dracovish":
+ fallthrough
+ case "arctovish":
+ fallthrough
+ case "duraludon":
+ fallthrough
+ case "dreepy":
+ fallthrough
+ case "drakloak":
+ fallthrough
+ case "dragapult":
+ fallthrough
+ case "zacian":
+ fallthrough
+ case "zamazenta":
+ fallthrough
+ case "eternatus":
+ fallthrough
+ case "kubfu":
+ fallthrough
+ case "urshifu":
+ fallthrough
+ case "zarude":
+ fallthrough
+ case "regieleki":
+ fallthrough
+ case "regidrago":
+ fallthrough
+ case "glastrier":
+ fallthrough
+ case "spectrier":
+ fallthrough
+ case "calyrex":
+ *e = SourcePokeapiPokemonName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourcePokeapiPokemonName: %v", v)
+ }
+}
+
+type Pokeapi string
+
+const (
+ PokeapiPokeapi Pokeapi = "pokeapi"
+)
+
+func (e Pokeapi) ToPointer() *Pokeapi {
+ return &e
+}
+
+func (e *Pokeapi) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pokeapi":
- *e = SourcePokeapiPokeapi(v)
+ *e = Pokeapi(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePokeapiPokeapi: %v", v)
+ return fmt.Errorf("invalid value for Pokeapi: %v", v)
}
}
type SourcePokeapi struct {
// Pokemon requested from the API.
- PokemonName string `json:"pokemon_name"`
- SourceType SourcePokeapiPokeapi `json:"sourceType"`
+ PokemonName SourcePokeapiPokemonName `json:"pokemon_name"`
+ sourceType Pokeapi `const:"pokeapi" json:"sourceType"`
+}
+
+func (s SourcePokeapi) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePokeapi) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePokeapi) GetPokemonName() SourcePokeapiPokemonName {
+ if o == nil {
+ return SourcePokeapiPokemonName("")
+ }
+ return o.PokemonName
+}
+
+func (o *SourcePokeapi) GetSourceType() Pokeapi {
+ return PokeapiPokeapi
}
diff --git a/internal/sdk/pkg/models/shared/sourcepokeapicreaterequest.go b/internal/sdk/pkg/models/shared/sourcepokeapicreaterequest.go
old mode 100755
new mode 100644
index 237a7d774..647848e39
--- a/internal/sdk/pkg/models/shared/sourcepokeapicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepokeapicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePokeapiCreateRequest struct {
Configuration SourcePokeapi `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePokeapiCreateRequest) GetConfiguration() SourcePokeapi {
+ if o == nil {
+ return SourcePokeapi{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePokeapiCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePokeapiCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePokeapiCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePokeapiCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepokeapiputrequest.go b/internal/sdk/pkg/models/shared/sourcepokeapiputrequest.go
old mode 100755
new mode 100644
index da456e002..12ab195ec
--- a/internal/sdk/pkg/models/shared/sourcepokeapiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepokeapiputrequest.go
@@ -7,3 +7,24 @@ type SourcePokeapiPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePokeapiPutRequest) GetConfiguration() SourcePokeapiUpdate {
+ if o == nil {
+ return SourcePokeapiUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePokeapiPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePokeapiPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepokeapiupdate.go b/internal/sdk/pkg/models/shared/sourcepokeapiupdate.go
old mode 100755
new mode 100644
index 099347fe4..9471828fb
--- a/internal/sdk/pkg/models/shared/sourcepokeapiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepokeapiupdate.go
@@ -2,7 +2,2735 @@
package shared
+import (
+ "encoding/json"
+ "fmt"
+)
+
+// PokemonName - Pokemon requested from the API.
+type PokemonName string
+
+const (
+ PokemonNameBulbasaur PokemonName = "bulbasaur"
+ PokemonNameIvysaur PokemonName = "ivysaur"
+ PokemonNameVenusaur PokemonName = "venusaur"
+ PokemonNameCharmander PokemonName = "charmander"
+ PokemonNameCharmeleon PokemonName = "charmeleon"
+ PokemonNameCharizard PokemonName = "charizard"
+ PokemonNameSquirtle PokemonName = "squirtle"
+ PokemonNameWartortle PokemonName = "wartortle"
+ PokemonNameBlastoise PokemonName = "blastoise"
+ PokemonNameCaterpie PokemonName = "caterpie"
+ PokemonNameMetapod PokemonName = "metapod"
+ PokemonNameButterfree PokemonName = "butterfree"
+ PokemonNameWeedle PokemonName = "weedle"
+ PokemonNameKakuna PokemonName = "kakuna"
+ PokemonNameBeedrill PokemonName = "beedrill"
+ PokemonNamePidgey PokemonName = "pidgey"
+ PokemonNamePidgeotto PokemonName = "pidgeotto"
+ PokemonNamePidgeot PokemonName = "pidgeot"
+ PokemonNameRattata PokemonName = "rattata"
+ PokemonNameRaticate PokemonName = "raticate"
+ PokemonNameSpearow PokemonName = "spearow"
+ PokemonNameFearow PokemonName = "fearow"
+ PokemonNameEkans PokemonName = "ekans"
+ PokemonNameArbok PokemonName = "arbok"
+ PokemonNamePikachu PokemonName = "pikachu"
+ PokemonNameRaichu PokemonName = "raichu"
+ PokemonNameSandshrew PokemonName = "sandshrew"
+ PokemonNameSandslash PokemonName = "sandslash"
+ PokemonNameNidoranf PokemonName = "nidoranf"
+ PokemonNameNidorina PokemonName = "nidorina"
+ PokemonNameNidoqueen PokemonName = "nidoqueen"
+ PokemonNameNidoranm PokemonName = "nidoranm"
+ PokemonNameNidorino PokemonName = "nidorino"
+ PokemonNameNidoking PokemonName = "nidoking"
+ PokemonNameClefairy PokemonName = "clefairy"
+ PokemonNameClefable PokemonName = "clefable"
+ PokemonNameVulpix PokemonName = "vulpix"
+ PokemonNameNinetales PokemonName = "ninetales"
+ PokemonNameJigglypuff PokemonName = "jigglypuff"
+ PokemonNameWigglytuff PokemonName = "wigglytuff"
+ PokemonNameZubat PokemonName = "zubat"
+ PokemonNameGolbat PokemonName = "golbat"
+ PokemonNameOddish PokemonName = "oddish"
+ PokemonNameGloom PokemonName = "gloom"
+ PokemonNameVileplume PokemonName = "vileplume"
+ PokemonNameParas PokemonName = "paras"
+ PokemonNameParasect PokemonName = "parasect"
+ PokemonNameVenonat PokemonName = "venonat"
+ PokemonNameVenomoth PokemonName = "venomoth"
+ PokemonNameDiglett PokemonName = "diglett"
+ PokemonNameDugtrio PokemonName = "dugtrio"
+ PokemonNameMeowth PokemonName = "meowth"
+ PokemonNamePersian PokemonName = "persian"
+ PokemonNamePsyduck PokemonName = "psyduck"
+ PokemonNameGolduck PokemonName = "golduck"
+ PokemonNameMankey PokemonName = "mankey"
+ PokemonNamePrimeape PokemonName = "primeape"
+ PokemonNameGrowlithe PokemonName = "growlithe"
+ PokemonNameArcanine PokemonName = "arcanine"
+ PokemonNamePoliwag PokemonName = "poliwag"
+ PokemonNamePoliwhirl PokemonName = "poliwhirl"
+ PokemonNamePoliwrath PokemonName = "poliwrath"
+ PokemonNameAbra PokemonName = "abra"
+ PokemonNameKadabra PokemonName = "kadabra"
+ PokemonNameAlakazam PokemonName = "alakazam"
+ PokemonNameMachop PokemonName = "machop"
+ PokemonNameMachoke PokemonName = "machoke"
+ PokemonNameMachamp PokemonName = "machamp"
+ PokemonNameBellsprout PokemonName = "bellsprout"
+ PokemonNameWeepinbell PokemonName = "weepinbell"
+ PokemonNameVictreebel PokemonName = "victreebel"
+ PokemonNameTentacool PokemonName = "tentacool"
+ PokemonNameTentacruel PokemonName = "tentacruel"
+ PokemonNameGeodude PokemonName = "geodude"
+ PokemonNameGraveler PokemonName = "graveler"
+ PokemonNameGolem PokemonName = "golem"
+ PokemonNamePonyta PokemonName = "ponyta"
+ PokemonNameRapidash PokemonName = "rapidash"
+ PokemonNameSlowpoke PokemonName = "slowpoke"
+ PokemonNameSlowbro PokemonName = "slowbro"
+ PokemonNameMagnemite PokemonName = "magnemite"
+ PokemonNameMagneton PokemonName = "magneton"
+ PokemonNameFarfetchd PokemonName = "farfetchd"
+ PokemonNameDoduo PokemonName = "doduo"
+ PokemonNameDodrio PokemonName = "dodrio"
+ PokemonNameSeel PokemonName = "seel"
+ PokemonNameDewgong PokemonName = "dewgong"
+ PokemonNameGrimer PokemonName = "grimer"
+ PokemonNameMuk PokemonName = "muk"
+ PokemonNameShellder PokemonName = "shellder"
+ PokemonNameCloyster PokemonName = "cloyster"
+ PokemonNameGastly PokemonName = "gastly"
+ PokemonNameHaunter PokemonName = "haunter"
+ PokemonNameGengar PokemonName = "gengar"
+ PokemonNameOnix PokemonName = "onix"
+ PokemonNameDrowzee PokemonName = "drowzee"
+ PokemonNameHypno PokemonName = "hypno"
+ PokemonNameKrabby PokemonName = "krabby"
+ PokemonNameKingler PokemonName = "kingler"
+ PokemonNameVoltorb PokemonName = "voltorb"
+ PokemonNameElectrode PokemonName = "electrode"
+ PokemonNameExeggcute PokemonName = "exeggcute"
+ PokemonNameExeggutor PokemonName = "exeggutor"
+ PokemonNameCubone PokemonName = "cubone"
+ PokemonNameMarowak PokemonName = "marowak"
+ PokemonNameHitmonlee PokemonName = "hitmonlee"
+ PokemonNameHitmonchan PokemonName = "hitmonchan"
+ PokemonNameLickitung PokemonName = "lickitung"
+ PokemonNameKoffing PokemonName = "koffing"
+ PokemonNameWeezing PokemonName = "weezing"
+ PokemonNameRhyhorn PokemonName = "rhyhorn"
+ PokemonNameRhydon PokemonName = "rhydon"
+ PokemonNameChansey PokemonName = "chansey"
+ PokemonNameTangela PokemonName = "tangela"
+ PokemonNameKangaskhan PokemonName = "kangaskhan"
+ PokemonNameHorsea PokemonName = "horsea"
+ PokemonNameSeadra PokemonName = "seadra"
+ PokemonNameGoldeen PokemonName = "goldeen"
+ PokemonNameSeaking PokemonName = "seaking"
+ PokemonNameStaryu PokemonName = "staryu"
+ PokemonNameStarmie PokemonName = "starmie"
+ PokemonNameMrmime PokemonName = "mrmime"
+ PokemonNameScyther PokemonName = "scyther"
+ PokemonNameJynx PokemonName = "jynx"
+ PokemonNameElectabuzz PokemonName = "electabuzz"
+ PokemonNameMagmar PokemonName = "magmar"
+ PokemonNamePinsir PokemonName = "pinsir"
+ PokemonNameTauros PokemonName = "tauros"
+ PokemonNameMagikarp PokemonName = "magikarp"
+ PokemonNameGyarados PokemonName = "gyarados"
+ PokemonNameLapras PokemonName = "lapras"
+ PokemonNameDitto PokemonName = "ditto"
+ PokemonNameEevee PokemonName = "eevee"
+ PokemonNameVaporeon PokemonName = "vaporeon"
+ PokemonNameJolteon PokemonName = "jolteon"
+ PokemonNameFlareon PokemonName = "flareon"
+ PokemonNamePorygon PokemonName = "porygon"
+ PokemonNameOmanyte PokemonName = "omanyte"
+ PokemonNameOmastar PokemonName = "omastar"
+ PokemonNameKabuto PokemonName = "kabuto"
+ PokemonNameKabutops PokemonName = "kabutops"
+ PokemonNameAerodactyl PokemonName = "aerodactyl"
+ PokemonNameSnorlax PokemonName = "snorlax"
+ PokemonNameArticuno PokemonName = "articuno"
+ PokemonNameZapdos PokemonName = "zapdos"
+ PokemonNameMoltres PokemonName = "moltres"
+ PokemonNameDratini PokemonName = "dratini"
+ PokemonNameDragonair PokemonName = "dragonair"
+ PokemonNameDragonite PokemonName = "dragonite"
+ PokemonNameMewtwo PokemonName = "mewtwo"
+ PokemonNameMew PokemonName = "mew"
+ PokemonNameChikorita PokemonName = "chikorita"
+ PokemonNameBayleef PokemonName = "bayleef"
+ PokemonNameMeganium PokemonName = "meganium"
+ PokemonNameCyndaquil PokemonName = "cyndaquil"
+ PokemonNameQuilava PokemonName = "quilava"
+ PokemonNameTyphlosion PokemonName = "typhlosion"
+ PokemonNameTotodile PokemonName = "totodile"
+ PokemonNameCroconaw PokemonName = "croconaw"
+ PokemonNameFeraligatr PokemonName = "feraligatr"
+ PokemonNameSentret PokemonName = "sentret"
+ PokemonNameFurret PokemonName = "furret"
+ PokemonNameHoothoot PokemonName = "hoothoot"
+ PokemonNameNoctowl PokemonName = "noctowl"
+ PokemonNameLedyba PokemonName = "ledyba"
+ PokemonNameLedian PokemonName = "ledian"
+ PokemonNameSpinarak PokemonName = "spinarak"
+ PokemonNameAriados PokemonName = "ariados"
+ PokemonNameCrobat PokemonName = "crobat"
+ PokemonNameChinchou PokemonName = "chinchou"
+ PokemonNameLanturn PokemonName = "lanturn"
+ PokemonNamePichu PokemonName = "pichu"
+ PokemonNameCleffa PokemonName = "cleffa"
+ PokemonNameIgglybuff PokemonName = "igglybuff"
+ PokemonNameTogepi PokemonName = "togepi"
+ PokemonNameTogetic PokemonName = "togetic"
+ PokemonNameNatu PokemonName = "natu"
+ PokemonNameXatu PokemonName = "xatu"
+ PokemonNameMareep PokemonName = "mareep"
+ PokemonNameFlaaffy PokemonName = "flaaffy"
+ PokemonNameAmpharos PokemonName = "ampharos"
+ PokemonNameBellossom PokemonName = "bellossom"
+ PokemonNameMarill PokemonName = "marill"
+ PokemonNameAzumarill PokemonName = "azumarill"
+ PokemonNameSudowoodo PokemonName = "sudowoodo"
+ PokemonNamePolitoed PokemonName = "politoed"
+ PokemonNameHoppip PokemonName = "hoppip"
+ PokemonNameSkiploom PokemonName = "skiploom"
+ PokemonNameJumpluff PokemonName = "jumpluff"
+ PokemonNameAipom PokemonName = "aipom"
+ PokemonNameSunkern PokemonName = "sunkern"
+ PokemonNameSunflora PokemonName = "sunflora"
+ PokemonNameYanma PokemonName = "yanma"
+ PokemonNameWooper PokemonName = "wooper"
+ PokemonNameQuagsire PokemonName = "quagsire"
+ PokemonNameEspeon PokemonName = "espeon"
+ PokemonNameUmbreon PokemonName = "umbreon"
+ PokemonNameMurkrow PokemonName = "murkrow"
+ PokemonNameSlowking PokemonName = "slowking"
+ PokemonNameMisdreavus PokemonName = "misdreavus"
+ PokemonNameUnown PokemonName = "unown"
+ PokemonNameWobbuffet PokemonName = "wobbuffet"
+ PokemonNameGirafarig PokemonName = "girafarig"
+ PokemonNamePineco PokemonName = "pineco"
+ PokemonNameForretress PokemonName = "forretress"
+ PokemonNameDunsparce PokemonName = "dunsparce"
+ PokemonNameGligar PokemonName = "gligar"
+ PokemonNameSteelix PokemonName = "steelix"
+ PokemonNameSnubbull PokemonName = "snubbull"
+ PokemonNameGranbull PokemonName = "granbull"
+ PokemonNameQwilfish PokemonName = "qwilfish"
+ PokemonNameScizor PokemonName = "scizor"
+ PokemonNameShuckle PokemonName = "shuckle"
+ PokemonNameHeracross PokemonName = "heracross"
+ PokemonNameSneasel PokemonName = "sneasel"
+ PokemonNameTeddiursa PokemonName = "teddiursa"
+ PokemonNameUrsaring PokemonName = "ursaring"
+ PokemonNameSlugma PokemonName = "slugma"
+ PokemonNameMagcargo PokemonName = "magcargo"
+ PokemonNameSwinub PokemonName = "swinub"
+ PokemonNamePiloswine PokemonName = "piloswine"
+ PokemonNameCorsola PokemonName = "corsola"
+ PokemonNameRemoraid PokemonName = "remoraid"
+ PokemonNameOctillery PokemonName = "octillery"
+ PokemonNameDelibird PokemonName = "delibird"
+ PokemonNameMantine PokemonName = "mantine"
+ PokemonNameSkarmory PokemonName = "skarmory"
+ PokemonNameHoundour PokemonName = "houndour"
+ PokemonNameHoundoom PokemonName = "houndoom"
+ PokemonNameKingdra PokemonName = "kingdra"
+ PokemonNamePhanpy PokemonName = "phanpy"
+ PokemonNameDonphan PokemonName = "donphan"
+ PokemonNamePorygon2 PokemonName = "porygon2"
+ PokemonNameStantler PokemonName = "stantler"
+ PokemonNameSmeargle PokemonName = "smeargle"
+ PokemonNameTyrogue PokemonName = "tyrogue"
+ PokemonNameHitmontop PokemonName = "hitmontop"
+ PokemonNameSmoochum PokemonName = "smoochum"
+ PokemonNameElekid PokemonName = "elekid"
+ PokemonNameMagby PokemonName = "magby"
+ PokemonNameMiltank PokemonName = "miltank"
+ PokemonNameBlissey PokemonName = "blissey"
+ PokemonNameRaikou PokemonName = "raikou"
+ PokemonNameEntei PokemonName = "entei"
+ PokemonNameSuicune PokemonName = "suicune"
+ PokemonNameLarvitar PokemonName = "larvitar"
+ PokemonNamePupitar PokemonName = "pupitar"
+ PokemonNameTyranitar PokemonName = "tyranitar"
+ PokemonNameLugia PokemonName = "lugia"
+ PokemonNameHoOh PokemonName = "ho-oh"
+ PokemonNameCelebi PokemonName = "celebi"
+ PokemonNameTreecko PokemonName = "treecko"
+ PokemonNameGrovyle PokemonName = "grovyle"
+ PokemonNameSceptile PokemonName = "sceptile"
+ PokemonNameTorchic PokemonName = "torchic"
+ PokemonNameCombusken PokemonName = "combusken"
+ PokemonNameBlaziken PokemonName = "blaziken"
+ PokemonNameMudkip PokemonName = "mudkip"
+ PokemonNameMarshtomp PokemonName = "marshtomp"
+ PokemonNameSwampert PokemonName = "swampert"
+ PokemonNamePoochyena PokemonName = "poochyena"
+ PokemonNameMightyena PokemonName = "mightyena"
+ PokemonNameZigzagoon PokemonName = "zigzagoon"
+ PokemonNameLinoone PokemonName = "linoone"
+ PokemonNameWurmple PokemonName = "wurmple"
+ PokemonNameSilcoon PokemonName = "silcoon"
+ PokemonNameBeautifly PokemonName = "beautifly"
+ PokemonNameCascoon PokemonName = "cascoon"
+ PokemonNameDustox PokemonName = "dustox"
+ PokemonNameLotad PokemonName = "lotad"
+ PokemonNameLombre PokemonName = "lombre"
+ PokemonNameLudicolo PokemonName = "ludicolo"
+ PokemonNameSeedot PokemonName = "seedot"
+ PokemonNameNuzleaf PokemonName = "nuzleaf"
+ PokemonNameShiftry PokemonName = "shiftry"
+ PokemonNameTaillow PokemonName = "taillow"
+ PokemonNameSwellow PokemonName = "swellow"
+ PokemonNameWingull PokemonName = "wingull"
+ PokemonNamePelipper PokemonName = "pelipper"
+ PokemonNameRalts PokemonName = "ralts"
+ PokemonNameKirlia PokemonName = "kirlia"
+ PokemonNameGardevoir PokemonName = "gardevoir"
+ PokemonNameSurskit PokemonName = "surskit"
+ PokemonNameMasquerain PokemonName = "masquerain"
+ PokemonNameShroomish PokemonName = "shroomish"
+ PokemonNameBreloom PokemonName = "breloom"
+ PokemonNameSlakoth PokemonName = "slakoth"
+ PokemonNameVigoroth PokemonName = "vigoroth"
+ PokemonNameSlaking PokemonName = "slaking"
+ PokemonNameNincada PokemonName = "nincada"
+ PokemonNameNinjask PokemonName = "ninjask"
+ PokemonNameShedinja PokemonName = "shedinja"
+ PokemonNameWhismur PokemonName = "whismur"
+ PokemonNameLoudred PokemonName = "loudred"
+ PokemonNameExploud PokemonName = "exploud"
+ PokemonNameMakuhita PokemonName = "makuhita"
+ PokemonNameHariyama PokemonName = "hariyama"
+ PokemonNameAzurill PokemonName = "azurill"
+ PokemonNameNosepass PokemonName = "nosepass"
+ PokemonNameSkitty PokemonName = "skitty"
+ PokemonNameDelcatty PokemonName = "delcatty"
+ PokemonNameSableye PokemonName = "sableye"
+ PokemonNameMawile PokemonName = "mawile"
+ PokemonNameAron PokemonName = "aron"
+ PokemonNameLairon PokemonName = "lairon"
+ PokemonNameAggron PokemonName = "aggron"
+ PokemonNameMeditite PokemonName = "meditite"
+ PokemonNameMedicham PokemonName = "medicham"
+ PokemonNameElectrike PokemonName = "electrike"
+ PokemonNameManectric PokemonName = "manectric"
+ PokemonNamePlusle PokemonName = "plusle"
+ PokemonNameMinun PokemonName = "minun"
+ PokemonNameVolbeat PokemonName = "volbeat"
+ PokemonNameIllumise PokemonName = "illumise"
+ PokemonNameRoselia PokemonName = "roselia"
+ PokemonNameGulpin PokemonName = "gulpin"
+ PokemonNameSwalot PokemonName = "swalot"
+ PokemonNameCarvanha PokemonName = "carvanha"
+ PokemonNameSharpedo PokemonName = "sharpedo"
+ PokemonNameWailmer PokemonName = "wailmer"
+ PokemonNameWailord PokemonName = "wailord"
+ PokemonNameNumel PokemonName = "numel"
+ PokemonNameCamerupt PokemonName = "camerupt"
+ PokemonNameTorkoal PokemonName = "torkoal"
+ PokemonNameSpoink PokemonName = "spoink"
+ PokemonNameGrumpig PokemonName = "grumpig"
+ PokemonNameSpinda PokemonName = "spinda"
+ PokemonNameTrapinch PokemonName = "trapinch"
+ PokemonNameVibrava PokemonName = "vibrava"
+ PokemonNameFlygon PokemonName = "flygon"
+ PokemonNameCacnea PokemonName = "cacnea"
+ PokemonNameCacturne PokemonName = "cacturne"
+ PokemonNameSwablu PokemonName = "swablu"
+ PokemonNameAltaria PokemonName = "altaria"
+ PokemonNameZangoose PokemonName = "zangoose"
+ PokemonNameSeviper PokemonName = "seviper"
+ PokemonNameLunatone PokemonName = "lunatone"
+ PokemonNameSolrock PokemonName = "solrock"
+ PokemonNameBarboach PokemonName = "barboach"
+ PokemonNameWhiscash PokemonName = "whiscash"
+ PokemonNameCorphish PokemonName = "corphish"
+ PokemonNameCrawdaunt PokemonName = "crawdaunt"
+ PokemonNameBaltoy PokemonName = "baltoy"
+ PokemonNameClaydol PokemonName = "claydol"
+ PokemonNameLileep PokemonName = "lileep"
+ PokemonNameCradily PokemonName = "cradily"
+ PokemonNameAnorith PokemonName = "anorith"
+ PokemonNameArmaldo PokemonName = "armaldo"
+ PokemonNameFeebas PokemonName = "feebas"
+ PokemonNameMilotic PokemonName = "milotic"
+ PokemonNameCastform PokemonName = "castform"
+ PokemonNameKecleon PokemonName = "kecleon"
+ PokemonNameShuppet PokemonName = "shuppet"
+ PokemonNameBanette PokemonName = "banette"
+ PokemonNameDuskull PokemonName = "duskull"
+ PokemonNameDusclops PokemonName = "dusclops"
+ PokemonNameTropius PokemonName = "tropius"
+ PokemonNameChimecho PokemonName = "chimecho"
+ PokemonNameAbsol PokemonName = "absol"
+ PokemonNameWynaut PokemonName = "wynaut"
+ PokemonNameSnorunt PokemonName = "snorunt"
+ PokemonNameGlalie PokemonName = "glalie"
+ PokemonNameSpheal PokemonName = "spheal"
+ PokemonNameSealeo PokemonName = "sealeo"
+ PokemonNameWalrein PokemonName = "walrein"
+ PokemonNameClamperl PokemonName = "clamperl"
+ PokemonNameHuntail PokemonName = "huntail"
+ PokemonNameGorebyss PokemonName = "gorebyss"
+ PokemonNameRelicanth PokemonName = "relicanth"
+ PokemonNameLuvdisc PokemonName = "luvdisc"
+ PokemonNameBagon PokemonName = "bagon"
+ PokemonNameShelgon PokemonName = "shelgon"
+ PokemonNameSalamence PokemonName = "salamence"
+ PokemonNameBeldum PokemonName = "beldum"
+ PokemonNameMetang PokemonName = "metang"
+ PokemonNameMetagross PokemonName = "metagross"
+ PokemonNameRegirock PokemonName = "regirock"
+ PokemonNameRegice PokemonName = "regice"
+ PokemonNameRegisteel PokemonName = "registeel"
+ PokemonNameLatias PokemonName = "latias"
+ PokemonNameLatios PokemonName = "latios"
+ PokemonNameKyogre PokemonName = "kyogre"
+ PokemonNameGroudon PokemonName = "groudon"
+ PokemonNameRayquaza PokemonName = "rayquaza"
+ PokemonNameJirachi PokemonName = "jirachi"
+ PokemonNameDeoxys PokemonName = "deoxys"
+ PokemonNameTurtwig PokemonName = "turtwig"
+ PokemonNameGrotle PokemonName = "grotle"
+ PokemonNameTorterra PokemonName = "torterra"
+ PokemonNameChimchar PokemonName = "chimchar"
+ PokemonNameMonferno PokemonName = "monferno"
+ PokemonNameInfernape PokemonName = "infernape"
+ PokemonNamePiplup PokemonName = "piplup"
+ PokemonNamePrinplup PokemonName = "prinplup"
+ PokemonNameEmpoleon PokemonName = "empoleon"
+ PokemonNameStarly PokemonName = "starly"
+ PokemonNameStaravia PokemonName = "staravia"
+ PokemonNameStaraptor PokemonName = "staraptor"
+ PokemonNameBidoof PokemonName = "bidoof"
+ PokemonNameBibarel PokemonName = "bibarel"
+ PokemonNameKricketot PokemonName = "kricketot"
+ PokemonNameKricketune PokemonName = "kricketune"
+ PokemonNameShinx PokemonName = "shinx"
+ PokemonNameLuxio PokemonName = "luxio"
+ PokemonNameLuxray PokemonName = "luxray"
+ PokemonNameBudew PokemonName = "budew"
+ PokemonNameRoserade PokemonName = "roserade"
+ PokemonNameCranidos PokemonName = "cranidos"
+ PokemonNameRampardos PokemonName = "rampardos"
+ PokemonNameShieldon PokemonName = "shieldon"
+ PokemonNameBastiodon PokemonName = "bastiodon"
+ PokemonNameBurmy PokemonName = "burmy"
+ PokemonNameWormadam PokemonName = "wormadam"
+ PokemonNameMothim PokemonName = "mothim"
+ PokemonNameCombee PokemonName = "combee"
+ PokemonNameVespiquen PokemonName = "vespiquen"
+ PokemonNamePachirisu PokemonName = "pachirisu"
+ PokemonNameBuizel PokemonName = "buizel"
+ PokemonNameFloatzel PokemonName = "floatzel"
+ PokemonNameCherubi PokemonName = "cherubi"
+ PokemonNameCherrim PokemonName = "cherrim"
+ PokemonNameShellos PokemonName = "shellos"
+ PokemonNameGastrodon PokemonName = "gastrodon"
+ PokemonNameAmbipom PokemonName = "ambipom"
+ PokemonNameDrifloon PokemonName = "drifloon"
+ PokemonNameDrifblim PokemonName = "drifblim"
+ PokemonNameBuneary PokemonName = "buneary"
+ PokemonNameLopunny PokemonName = "lopunny"
+ PokemonNameMismagius PokemonName = "mismagius"
+ PokemonNameHonchkrow PokemonName = "honchkrow"
+ PokemonNameGlameow PokemonName = "glameow"
+ PokemonNamePurugly PokemonName = "purugly"
+ PokemonNameChingling PokemonName = "chingling"
+ PokemonNameStunky PokemonName = "stunky"
+ PokemonNameSkuntank PokemonName = "skuntank"
+ PokemonNameBronzor PokemonName = "bronzor"
+ PokemonNameBronzong PokemonName = "bronzong"
+ PokemonNameBonsly PokemonName = "bonsly"
+ PokemonNameMimejr PokemonName = "mimejr"
+ PokemonNameHappiny PokemonName = "happiny"
+ PokemonNameChatot PokemonName = "chatot"
+ PokemonNameSpiritomb PokemonName = "spiritomb"
+ PokemonNameGible PokemonName = "gible"
+ PokemonNameGabite PokemonName = "gabite"
+ PokemonNameGarchomp PokemonName = "garchomp"
+ PokemonNameMunchlax PokemonName = "munchlax"
+ PokemonNameRiolu PokemonName = "riolu"
+ PokemonNameLucario PokemonName = "lucario"
+ PokemonNameHippopotas PokemonName = "hippopotas"
+ PokemonNameHippowdon PokemonName = "hippowdon"
+ PokemonNameSkorupi PokemonName = "skorupi"
+ PokemonNameDrapion PokemonName = "drapion"
+ PokemonNameCroagunk PokemonName = "croagunk"
+ PokemonNameToxicroak PokemonName = "toxicroak"
+ PokemonNameCarnivine PokemonName = "carnivine"
+ PokemonNameFinneon PokemonName = "finneon"
+ PokemonNameLumineon PokemonName = "lumineon"
+ PokemonNameMantyke PokemonName = "mantyke"
+ PokemonNameSnover PokemonName = "snover"
+ PokemonNameAbomasnow PokemonName = "abomasnow"
+ PokemonNameWeavile PokemonName = "weavile"
+ PokemonNameMagnezone PokemonName = "magnezone"
+ PokemonNameLickilicky PokemonName = "lickilicky"
+ PokemonNameRhyperior PokemonName = "rhyperior"
+ PokemonNameTangrowth PokemonName = "tangrowth"
+ PokemonNameElectivire PokemonName = "electivire"
+ PokemonNameMagmortar PokemonName = "magmortar"
+ PokemonNameTogekiss PokemonName = "togekiss"
+ PokemonNameYanmega PokemonName = "yanmega"
+ PokemonNameLeafeon PokemonName = "leafeon"
+ PokemonNameGlaceon PokemonName = "glaceon"
+ PokemonNameGliscor PokemonName = "gliscor"
+ PokemonNameMamoswine PokemonName = "mamoswine"
+ PokemonNamePorygonZ PokemonName = "porygon-z"
+ PokemonNameGallade PokemonName = "gallade"
+ PokemonNameProbopass PokemonName = "probopass"
+ PokemonNameDusknoir PokemonName = "dusknoir"
+ PokemonNameFroslass PokemonName = "froslass"
+ PokemonNameRotom PokemonName = "rotom"
+ PokemonNameUxie PokemonName = "uxie"
+ PokemonNameMesprit PokemonName = "mesprit"
+ PokemonNameAzelf PokemonName = "azelf"
+ PokemonNameDialga PokemonName = "dialga"
+ PokemonNamePalkia PokemonName = "palkia"
+ PokemonNameHeatran PokemonName = "heatran"
+ PokemonNameRegigigas PokemonName = "regigigas"
+ PokemonNameGiratina PokemonName = "giratina"
+ PokemonNameCresselia PokemonName = "cresselia"
+ PokemonNamePhione PokemonName = "phione"
+ PokemonNameManaphy PokemonName = "manaphy"
+ PokemonNameDarkrai PokemonName = "darkrai"
+ PokemonNameShaymin PokemonName = "shaymin"
+ PokemonNameArceus PokemonName = "arceus"
+ PokemonNameVictini PokemonName = "victini"
+ PokemonNameSnivy PokemonName = "snivy"
+ PokemonNameServine PokemonName = "servine"
+ PokemonNameSerperior PokemonName = "serperior"
+ PokemonNameTepig PokemonName = "tepig"
+ PokemonNamePignite PokemonName = "pignite"
+ PokemonNameEmboar PokemonName = "emboar"
+ PokemonNameOshawott PokemonName = "oshawott"
+ PokemonNameDewott PokemonName = "dewott"
+ PokemonNameSamurott PokemonName = "samurott"
+ PokemonNamePatrat PokemonName = "patrat"
+ PokemonNameWatchog PokemonName = "watchog"
+ PokemonNameLillipup PokemonName = "lillipup"
+ PokemonNameHerdier PokemonName = "herdier"
+ PokemonNameStoutland PokemonName = "stoutland"
+ PokemonNamePurrloin PokemonName = "purrloin"
+ PokemonNameLiepard PokemonName = "liepard"
+ PokemonNamePansage PokemonName = "pansage"
+ PokemonNameSimisage PokemonName = "simisage"
+ PokemonNamePansear PokemonName = "pansear"
+ PokemonNameSimisear PokemonName = "simisear"
+ PokemonNamePanpour PokemonName = "panpour"
+ PokemonNameSimipour PokemonName = "simipour"
+ PokemonNameMunna PokemonName = "munna"
+ PokemonNameMusharna PokemonName = "musharna"
+ PokemonNamePidove PokemonName = "pidove"
+ PokemonNameTranquill PokemonName = "tranquill"
+ PokemonNameUnfezant PokemonName = "unfezant"
+ PokemonNameBlitzle PokemonName = "blitzle"
+ PokemonNameZebstrika PokemonName = "zebstrika"
+ PokemonNameRoggenrola PokemonName = "roggenrola"
+ PokemonNameBoldore PokemonName = "boldore"
+ PokemonNameGigalith PokemonName = "gigalith"
+ PokemonNameWoobat PokemonName = "woobat"
+ PokemonNameSwoobat PokemonName = "swoobat"
+ PokemonNameDrilbur PokemonName = "drilbur"
+ PokemonNameExcadrill PokemonName = "excadrill"
+ PokemonNameAudino PokemonName = "audino"
+ PokemonNameTimburr PokemonName = "timburr"
+ PokemonNameGurdurr PokemonName = "gurdurr"
+ PokemonNameConkeldurr PokemonName = "conkeldurr"
+ PokemonNameTympole PokemonName = "tympole"
+ PokemonNamePalpitoad PokemonName = "palpitoad"
+ PokemonNameSeismitoad PokemonName = "seismitoad"
+ PokemonNameThroh PokemonName = "throh"
+ PokemonNameSawk PokemonName = "sawk"
+ PokemonNameSewaddle PokemonName = "sewaddle"
+ PokemonNameSwadloon PokemonName = "swadloon"
+ PokemonNameLeavanny PokemonName = "leavanny"
+ PokemonNameVenipede PokemonName = "venipede"
+ PokemonNameWhirlipede PokemonName = "whirlipede"
+ PokemonNameScolipede PokemonName = "scolipede"
+ PokemonNameCottonee PokemonName = "cottonee"
+ PokemonNameWhimsicott PokemonName = "whimsicott"
+ PokemonNamePetilil PokemonName = "petilil"
+ PokemonNameLilligant PokemonName = "lilligant"
+ PokemonNameBasculin PokemonName = "basculin"
+ PokemonNameSandile PokemonName = "sandile"
+ PokemonNameKrokorok PokemonName = "krokorok"
+ PokemonNameKrookodile PokemonName = "krookodile"
+ PokemonNameDarumaka PokemonName = "darumaka"
+ PokemonNameDarmanitan PokemonName = "darmanitan"
+ PokemonNameMaractus PokemonName = "maractus"
+ PokemonNameDwebble PokemonName = "dwebble"
+ PokemonNameCrustle PokemonName = "crustle"
+ PokemonNameScraggy PokemonName = "scraggy"
+ PokemonNameScrafty PokemonName = "scrafty"
+ PokemonNameSigilyph PokemonName = "sigilyph"
+ PokemonNameYamask PokemonName = "yamask"
+ PokemonNameCofagrigus PokemonName = "cofagrigus"
+ PokemonNameTirtouga PokemonName = "tirtouga"
+ PokemonNameCarracosta PokemonName = "carracosta"
+ PokemonNameArchen PokemonName = "archen"
+ PokemonNameArcheops PokemonName = "archeops"
+ PokemonNameTrubbish PokemonName = "trubbish"
+ PokemonNameGarbodor PokemonName = "garbodor"
+ PokemonNameZorua PokemonName = "zorua"
+ PokemonNameZoroark PokemonName = "zoroark"
+ PokemonNameMinccino PokemonName = "minccino"
+ PokemonNameCinccino PokemonName = "cinccino"
+ PokemonNameGothita PokemonName = "gothita"
+ PokemonNameGothorita PokemonName = "gothorita"
+ PokemonNameGothitelle PokemonName = "gothitelle"
+ PokemonNameSolosis PokemonName = "solosis"
+ PokemonNameDuosion PokemonName = "duosion"
+ PokemonNameReuniclus PokemonName = "reuniclus"
+ PokemonNameDucklett PokemonName = "ducklett"
+ PokemonNameSwanna PokemonName = "swanna"
+ PokemonNameVanillite PokemonName = "vanillite"
+ PokemonNameVanillish PokemonName = "vanillish"
+ PokemonNameVanilluxe PokemonName = "vanilluxe"
+ PokemonNameDeerling PokemonName = "deerling"
+ PokemonNameSawsbuck PokemonName = "sawsbuck"
+ PokemonNameEmolga PokemonName = "emolga"
+ PokemonNameKarrablast PokemonName = "karrablast"
+ PokemonNameEscavalier PokemonName = "escavalier"
+ PokemonNameFoongus PokemonName = "foongus"
+ PokemonNameAmoonguss PokemonName = "amoonguss"
+ PokemonNameFrillish PokemonName = "frillish"
+ PokemonNameJellicent PokemonName = "jellicent"
+ PokemonNameAlomomola PokemonName = "alomomola"
+ PokemonNameJoltik PokemonName = "joltik"
+ PokemonNameGalvantula PokemonName = "galvantula"
+ PokemonNameFerroseed PokemonName = "ferroseed"
+ PokemonNameFerrothorn PokemonName = "ferrothorn"
+ PokemonNameKlink PokemonName = "klink"
+ PokemonNameKlang PokemonName = "klang"
+ PokemonNameKlinklang PokemonName = "klinklang"
+ PokemonNameTynamo PokemonName = "tynamo"
+ PokemonNameEelektrik PokemonName = "eelektrik"
+ PokemonNameEelektross PokemonName = "eelektross"
+ PokemonNameElgyem PokemonName = "elgyem"
+ PokemonNameBeheeyem PokemonName = "beheeyem"
+ PokemonNameLitwick PokemonName = "litwick"
+ PokemonNameLampent PokemonName = "lampent"
+ PokemonNameChandelure PokemonName = "chandelure"
+ PokemonNameAxew PokemonName = "axew"
+ PokemonNameFraxure PokemonName = "fraxure"
+ PokemonNameHaxorus PokemonName = "haxorus"
+ PokemonNameCubchoo PokemonName = "cubchoo"
+ PokemonNameBeartic PokemonName = "beartic"
+ PokemonNameCryogonal PokemonName = "cryogonal"
+ PokemonNameShelmet PokemonName = "shelmet"
+ PokemonNameAccelgor PokemonName = "accelgor"
+ PokemonNameStunfisk PokemonName = "stunfisk"
+ PokemonNameMienfoo PokemonName = "mienfoo"
+ PokemonNameMienshao PokemonName = "mienshao"
+ PokemonNameDruddigon PokemonName = "druddigon"
+ PokemonNameGolett PokemonName = "golett"
+ PokemonNameGolurk PokemonName = "golurk"
+ PokemonNamePawniard PokemonName = "pawniard"
+ PokemonNameBisharp PokemonName = "bisharp"
+ PokemonNameBouffalant PokemonName = "bouffalant"
+ PokemonNameRufflet PokemonName = "rufflet"
+ PokemonNameBraviary PokemonName = "braviary"
+ PokemonNameVullaby PokemonName = "vullaby"
+ PokemonNameMandibuzz PokemonName = "mandibuzz"
+ PokemonNameHeatmor PokemonName = "heatmor"
+ PokemonNameDurant PokemonName = "durant"
+ PokemonNameDeino PokemonName = "deino"
+ PokemonNameZweilous PokemonName = "zweilous"
+ PokemonNameHydreigon PokemonName = "hydreigon"
+ PokemonNameLarvesta PokemonName = "larvesta"
+ PokemonNameVolcarona PokemonName = "volcarona"
+ PokemonNameCobalion PokemonName = "cobalion"
+ PokemonNameTerrakion PokemonName = "terrakion"
+ PokemonNameVirizion PokemonName = "virizion"
+ PokemonNameTornadus PokemonName = "tornadus"
+ PokemonNameThundurus PokemonName = "thundurus"
+ PokemonNameReshiram PokemonName = "reshiram"
+ PokemonNameZekrom PokemonName = "zekrom"
+ PokemonNameLandorus PokemonName = "landorus"
+ PokemonNameKyurem PokemonName = "kyurem"
+ PokemonNameKeldeo PokemonName = "keldeo"
+ PokemonNameMeloetta PokemonName = "meloetta"
+ PokemonNameGenesect PokemonName = "genesect"
+ PokemonNameChespin PokemonName = "chespin"
+ PokemonNameQuilladin PokemonName = "quilladin"
+ PokemonNameChesnaught PokemonName = "chesnaught"
+ PokemonNameFennekin PokemonName = "fennekin"
+ PokemonNameBraixen PokemonName = "braixen"
+ PokemonNameDelphox PokemonName = "delphox"
+ PokemonNameFroakie PokemonName = "froakie"
+ PokemonNameFrogadier PokemonName = "frogadier"
+ PokemonNameGreninja PokemonName = "greninja"
+ PokemonNameBunnelby PokemonName = "bunnelby"
+ PokemonNameDiggersby PokemonName = "diggersby"
+ PokemonNameFletchling PokemonName = "fletchling"
+ PokemonNameFletchinder PokemonName = "fletchinder"
+ PokemonNameTalonflame PokemonName = "talonflame"
+ PokemonNameScatterbug PokemonName = "scatterbug"
+ PokemonNameSpewpa PokemonName = "spewpa"
+ PokemonNameVivillon PokemonName = "vivillon"
+ PokemonNameLitleo PokemonName = "litleo"
+ PokemonNamePyroar PokemonName = "pyroar"
+ PokemonNameFlabebe PokemonName = "flabebe"
+ PokemonNameFloette PokemonName = "floette"
+ PokemonNameFlorges PokemonName = "florges"
+ PokemonNameSkiddo PokemonName = "skiddo"
+ PokemonNameGogoat PokemonName = "gogoat"
+ PokemonNamePancham PokemonName = "pancham"
+ PokemonNamePangoro PokemonName = "pangoro"
+ PokemonNameFurfrou PokemonName = "furfrou"
+ PokemonNameEspurr PokemonName = "espurr"
+ PokemonNameMeowstic PokemonName = "meowstic"
+ PokemonNameHonedge PokemonName = "honedge"
+ PokemonNameDoublade PokemonName = "doublade"
+ PokemonNameAegislash PokemonName = "aegislash"
+ PokemonNameSpritzee PokemonName = "spritzee"
+ PokemonNameAromatisse PokemonName = "aromatisse"
+ PokemonNameSwirlix PokemonName = "swirlix"
+ PokemonNameSlurpuff PokemonName = "slurpuff"
+ PokemonNameInkay PokemonName = "inkay"
+ PokemonNameMalamar PokemonName = "malamar"
+ PokemonNameBinacle PokemonName = "binacle"
+ PokemonNameBarbaracle PokemonName = "barbaracle"
+ PokemonNameSkrelp PokemonName = "skrelp"
+ PokemonNameDragalge PokemonName = "dragalge"
+ PokemonNameClauncher PokemonName = "clauncher"
+ PokemonNameClawitzer PokemonName = "clawitzer"
+ PokemonNameHelioptile PokemonName = "helioptile"
+ PokemonNameHeliolisk PokemonName = "heliolisk"
+ PokemonNameTyrunt PokemonName = "tyrunt"
+ PokemonNameTyrantrum PokemonName = "tyrantrum"
+ PokemonNameAmaura PokemonName = "amaura"
+ PokemonNameAurorus PokemonName = "aurorus"
+ PokemonNameSylveon PokemonName = "sylveon"
+ PokemonNameHawlucha PokemonName = "hawlucha"
+ PokemonNameDedenne PokemonName = "dedenne"
+ PokemonNameCarbink PokemonName = "carbink"
+ PokemonNameGoomy PokemonName = "goomy"
+ PokemonNameSliggoo PokemonName = "sliggoo"
+ PokemonNameGoodra PokemonName = "goodra"
+ PokemonNameKlefki PokemonName = "klefki"
+ PokemonNamePhantump PokemonName = "phantump"
+ PokemonNameTrevenant PokemonName = "trevenant"
+ PokemonNamePumpkaboo PokemonName = "pumpkaboo"
+ PokemonNameGourgeist PokemonName = "gourgeist"
+ PokemonNameBergmite PokemonName = "bergmite"
+ PokemonNameAvalugg PokemonName = "avalugg"
+ PokemonNameNoibat PokemonName = "noibat"
+ PokemonNameNoivern PokemonName = "noivern"
+ PokemonNameXerneas PokemonName = "xerneas"
+ PokemonNameYveltal PokemonName = "yveltal"
+ PokemonNameZygarde PokemonName = "zygarde"
+ PokemonNameDiancie PokemonName = "diancie"
+ PokemonNameHoopa PokemonName = "hoopa"
+ PokemonNameVolcanion PokemonName = "volcanion"
+ PokemonNameRowlet PokemonName = "rowlet"
+ PokemonNameDartrix PokemonName = "dartrix"
+ PokemonNameDecidueye PokemonName = "decidueye"
+ PokemonNameLitten PokemonName = "litten"
+ PokemonNameTorracat PokemonName = "torracat"
+ PokemonNameIncineroar PokemonName = "incineroar"
+ PokemonNamePopplio PokemonName = "popplio"
+ PokemonNameBrionne PokemonName = "brionne"
+ PokemonNamePrimarina PokemonName = "primarina"
+ PokemonNamePikipek PokemonName = "pikipek"
+ PokemonNameTrumbeak PokemonName = "trumbeak"
+ PokemonNameToucannon PokemonName = "toucannon"
+ PokemonNameYungoos PokemonName = "yungoos"
+ PokemonNameGumshoos PokemonName = "gumshoos"
+ PokemonNameGrubbin PokemonName = "grubbin"
+ PokemonNameCharjabug PokemonName = "charjabug"
+ PokemonNameVikavolt PokemonName = "vikavolt"
+ PokemonNameCrabrawler PokemonName = "crabrawler"
+ PokemonNameCrabominable PokemonName = "crabominable"
+ PokemonNameOricorio PokemonName = "oricorio"
+ PokemonNameCutiefly PokemonName = "cutiefly"
+ PokemonNameRibombee PokemonName = "ribombee"
+ PokemonNameRockruff PokemonName = "rockruff"
+ PokemonNameLycanroc PokemonName = "lycanroc"
+ PokemonNameWishiwashi PokemonName = "wishiwashi"
+ PokemonNameMareanie PokemonName = "mareanie"
+ PokemonNameToxapex PokemonName = "toxapex"
+ PokemonNameMudbray PokemonName = "mudbray"
+ PokemonNameMudsdale PokemonName = "mudsdale"
+ PokemonNameDewpider PokemonName = "dewpider"
+ PokemonNameAraquanid PokemonName = "araquanid"
+ PokemonNameFomantis PokemonName = "fomantis"
+ PokemonNameLurantis PokemonName = "lurantis"
+ PokemonNameMorelull PokemonName = "morelull"
+ PokemonNameShiinotic PokemonName = "shiinotic"
+ PokemonNameSalandit PokemonName = "salandit"
+ PokemonNameSalazzle PokemonName = "salazzle"
+ PokemonNameStufful PokemonName = "stufful"
+ PokemonNameBewear PokemonName = "bewear"
+ PokemonNameBounsweet PokemonName = "bounsweet"
+ PokemonNameSteenee PokemonName = "steenee"
+ PokemonNameTsareena PokemonName = "tsareena"
+ PokemonNameComfey PokemonName = "comfey"
+ PokemonNameOranguru PokemonName = "oranguru"
+ PokemonNamePassimian PokemonName = "passimian"
+ PokemonNameWimpod PokemonName = "wimpod"
+ PokemonNameGolisopod PokemonName = "golisopod"
+ PokemonNameSandygast PokemonName = "sandygast"
+ PokemonNamePalossand PokemonName = "palossand"
+ PokemonNamePyukumuku PokemonName = "pyukumuku"
+ PokemonNameTypenull PokemonName = "typenull"
+ PokemonNameSilvally PokemonName = "silvally"
+ PokemonNameMinior PokemonName = "minior"
+ PokemonNameKomala PokemonName = "komala"
+ PokemonNameTurtonator PokemonName = "turtonator"
+ PokemonNameTogedemaru PokemonName = "togedemaru"
+ PokemonNameMimikyu PokemonName = "mimikyu"
+ PokemonNameBruxish PokemonName = "bruxish"
+ PokemonNameDrampa PokemonName = "drampa"
+ PokemonNameDhelmise PokemonName = "dhelmise"
+ PokemonNameJangmoO PokemonName = "jangmo-o"
+ PokemonNameHakamoO PokemonName = "hakamo-o"
+ PokemonNameKommoO PokemonName = "kommo-o"
+ PokemonNameTapukoko PokemonName = "tapukoko"
+ PokemonNameTapulele PokemonName = "tapulele"
+ PokemonNameTapubulu PokemonName = "tapubulu"
+ PokemonNameTapufini PokemonName = "tapufini"
+ PokemonNameCosmog PokemonName = "cosmog"
+ PokemonNameCosmoem PokemonName = "cosmoem"
+ PokemonNameSolgaleo PokemonName = "solgaleo"
+ PokemonNameLunala PokemonName = "lunala"
+ PokemonNameNihilego PokemonName = "nihilego"
+ PokemonNameBuzzwole PokemonName = "buzzwole"
+ PokemonNamePheromosa PokemonName = "pheromosa"
+ PokemonNameXurkitree PokemonName = "xurkitree"
+ PokemonNameCelesteela PokemonName = "celesteela"
+ PokemonNameKartana PokemonName = "kartana"
+ PokemonNameGuzzlord PokemonName = "guzzlord"
+ PokemonNameNecrozma PokemonName = "necrozma"
+ PokemonNameMagearna PokemonName = "magearna"
+ PokemonNameMarshadow PokemonName = "marshadow"
+ PokemonNamePoipole PokemonName = "poipole"
+ PokemonNameNaganadel PokemonName = "naganadel"
+ PokemonNameStakataka PokemonName = "stakataka"
+ PokemonNameBlacephalon PokemonName = "blacephalon"
+ PokemonNameZeraora PokemonName = "zeraora"
+ PokemonNameMeltan PokemonName = "meltan"
+ PokemonNameMelmetal PokemonName = "melmetal"
+ PokemonNameGrookey PokemonName = "grookey"
+ PokemonNameThwackey PokemonName = "thwackey"
+ PokemonNameRillaboom PokemonName = "rillaboom"
+ PokemonNameScorbunny PokemonName = "scorbunny"
+ PokemonNameRaboot PokemonName = "raboot"
+ PokemonNameCinderace PokemonName = "cinderace"
+ PokemonNameSobble PokemonName = "sobble"
+ PokemonNameDrizzile PokemonName = "drizzile"
+ PokemonNameInteleon PokemonName = "inteleon"
+ PokemonNameSkwovet PokemonName = "skwovet"
+ PokemonNameGreedent PokemonName = "greedent"
+ PokemonNameRookidee PokemonName = "rookidee"
+ PokemonNameCorvisquire PokemonName = "corvisquire"
+ PokemonNameCorviknight PokemonName = "corviknight"
+ PokemonNameBlipbug PokemonName = "blipbug"
+ PokemonNameDottler PokemonName = "dottler"
+ PokemonNameOrbeetle PokemonName = "orbeetle"
+ PokemonNameNickit PokemonName = "nickit"
+ PokemonNameThievul PokemonName = "thievul"
+ PokemonNameGossifleur PokemonName = "gossifleur"
+ PokemonNameEldegoss PokemonName = "eldegoss"
+ PokemonNameWooloo PokemonName = "wooloo"
+ PokemonNameDubwool PokemonName = "dubwool"
+ PokemonNameChewtle PokemonName = "chewtle"
+ PokemonNameDrednaw PokemonName = "drednaw"
+ PokemonNameYamper PokemonName = "yamper"
+ PokemonNameBoltund PokemonName = "boltund"
+ PokemonNameRolycoly PokemonName = "rolycoly"
+ PokemonNameCarkol PokemonName = "carkol"
+ PokemonNameCoalossal PokemonName = "coalossal"
+ PokemonNameApplin PokemonName = "applin"
+ PokemonNameFlapple PokemonName = "flapple"
+ PokemonNameAppletun PokemonName = "appletun"
+ PokemonNameSilicobra PokemonName = "silicobra"
+ PokemonNameSandaconda PokemonName = "sandaconda"
+ PokemonNameCramorant PokemonName = "cramorant"
+ PokemonNameArrokuda PokemonName = "arrokuda"
+ PokemonNameBarraskewda PokemonName = "barraskewda"
+ PokemonNameToxel PokemonName = "toxel"
+ PokemonNameToxtricity PokemonName = "toxtricity"
+ PokemonNameSizzlipede PokemonName = "sizzlipede"
+ PokemonNameCentiskorch PokemonName = "centiskorch"
+ PokemonNameClobbopus PokemonName = "clobbopus"
+ PokemonNameGrapploct PokemonName = "grapploct"
+ PokemonNameSinistea PokemonName = "sinistea"
+ PokemonNamePolteageist PokemonName = "polteageist"
+ PokemonNameHatenna PokemonName = "hatenna"
+ PokemonNameHattrem PokemonName = "hattrem"
+ PokemonNameHatterene PokemonName = "hatterene"
+ PokemonNameImpidimp PokemonName = "impidimp"
+ PokemonNameMorgrem PokemonName = "morgrem"
+ PokemonNameGrimmsnarl PokemonName = "grimmsnarl"
+ PokemonNameObstagoon PokemonName = "obstagoon"
+ PokemonNamePerrserker PokemonName = "perrserker"
+ PokemonNameCursola PokemonName = "cursola"
+ PokemonNameSirfetchd PokemonName = "sirfetchd"
+ PokemonNameMrrime PokemonName = "mrrime"
+ PokemonNameRunerigus PokemonName = "runerigus"
+ PokemonNameMilcery PokemonName = "milcery"
+ PokemonNameAlcremie PokemonName = "alcremie"
+ PokemonNameFalinks PokemonName = "falinks"
+ PokemonNamePincurchin PokemonName = "pincurchin"
+ PokemonNameSnom PokemonName = "snom"
+ PokemonNameFrosmoth PokemonName = "frosmoth"
+ PokemonNameStonjourner PokemonName = "stonjourner"
+ PokemonNameEiscue PokemonName = "eiscue"
+ PokemonNameIndeedee PokemonName = "indeedee"
+ PokemonNameMorpeko PokemonName = "morpeko"
+ PokemonNameCufant PokemonName = "cufant"
+ PokemonNameCopperajah PokemonName = "copperajah"
+ PokemonNameDracozolt PokemonName = "dracozolt"
+ PokemonNameArctozolt PokemonName = "arctozolt"
+ PokemonNameDracovish PokemonName = "dracovish"
+ PokemonNameArctovish PokemonName = "arctovish"
+ PokemonNameDuraludon PokemonName = "duraludon"
+ PokemonNameDreepy PokemonName = "dreepy"
+ PokemonNameDrakloak PokemonName = "drakloak"
+ PokemonNameDragapult PokemonName = "dragapult"
+ PokemonNameZacian PokemonName = "zacian"
+ PokemonNameZamazenta PokemonName = "zamazenta"
+ PokemonNameEternatus PokemonName = "eternatus"
+ PokemonNameKubfu PokemonName = "kubfu"
+ PokemonNameUrshifu PokemonName = "urshifu"
+ PokemonNameZarude PokemonName = "zarude"
+ PokemonNameRegieleki PokemonName = "regieleki"
+ PokemonNameRegidrago PokemonName = "regidrago"
+ PokemonNameGlastrier PokemonName = "glastrier"
+ PokemonNameSpectrier PokemonName = "spectrier"
+ PokemonNameCalyrex PokemonName = "calyrex"
+)
+
+func (e PokemonName) ToPointer() *PokemonName {
+ return &e
+}
+
+func (e *PokemonName) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "bulbasaur":
+ fallthrough
+ case "ivysaur":
+ fallthrough
+ case "venusaur":
+ fallthrough
+ case "charmander":
+ fallthrough
+ case "charmeleon":
+ fallthrough
+ case "charizard":
+ fallthrough
+ case "squirtle":
+ fallthrough
+ case "wartortle":
+ fallthrough
+ case "blastoise":
+ fallthrough
+ case "caterpie":
+ fallthrough
+ case "metapod":
+ fallthrough
+ case "butterfree":
+ fallthrough
+ case "weedle":
+ fallthrough
+ case "kakuna":
+ fallthrough
+ case "beedrill":
+ fallthrough
+ case "pidgey":
+ fallthrough
+ case "pidgeotto":
+ fallthrough
+ case "pidgeot":
+ fallthrough
+ case "rattata":
+ fallthrough
+ case "raticate":
+ fallthrough
+ case "spearow":
+ fallthrough
+ case "fearow":
+ fallthrough
+ case "ekans":
+ fallthrough
+ case "arbok":
+ fallthrough
+ case "pikachu":
+ fallthrough
+ case "raichu":
+ fallthrough
+ case "sandshrew":
+ fallthrough
+ case "sandslash":
+ fallthrough
+ case "nidoranf":
+ fallthrough
+ case "nidorina":
+ fallthrough
+ case "nidoqueen":
+ fallthrough
+ case "nidoranm":
+ fallthrough
+ case "nidorino":
+ fallthrough
+ case "nidoking":
+ fallthrough
+ case "clefairy":
+ fallthrough
+ case "clefable":
+ fallthrough
+ case "vulpix":
+ fallthrough
+ case "ninetales":
+ fallthrough
+ case "jigglypuff":
+ fallthrough
+ case "wigglytuff":
+ fallthrough
+ case "zubat":
+ fallthrough
+ case "golbat":
+ fallthrough
+ case "oddish":
+ fallthrough
+ case "gloom":
+ fallthrough
+ case "vileplume":
+ fallthrough
+ case "paras":
+ fallthrough
+ case "parasect":
+ fallthrough
+ case "venonat":
+ fallthrough
+ case "venomoth":
+ fallthrough
+ case "diglett":
+ fallthrough
+ case "dugtrio":
+ fallthrough
+ case "meowth":
+ fallthrough
+ case "persian":
+ fallthrough
+ case "psyduck":
+ fallthrough
+ case "golduck":
+ fallthrough
+ case "mankey":
+ fallthrough
+ case "primeape":
+ fallthrough
+ case "growlithe":
+ fallthrough
+ case "arcanine":
+ fallthrough
+ case "poliwag":
+ fallthrough
+ case "poliwhirl":
+ fallthrough
+ case "poliwrath":
+ fallthrough
+ case "abra":
+ fallthrough
+ case "kadabra":
+ fallthrough
+ case "alakazam":
+ fallthrough
+ case "machop":
+ fallthrough
+ case "machoke":
+ fallthrough
+ case "machamp":
+ fallthrough
+ case "bellsprout":
+ fallthrough
+ case "weepinbell":
+ fallthrough
+ case "victreebel":
+ fallthrough
+ case "tentacool":
+ fallthrough
+ case "tentacruel":
+ fallthrough
+ case "geodude":
+ fallthrough
+ case "graveler":
+ fallthrough
+ case "golem":
+ fallthrough
+ case "ponyta":
+ fallthrough
+ case "rapidash":
+ fallthrough
+ case "slowpoke":
+ fallthrough
+ case "slowbro":
+ fallthrough
+ case "magnemite":
+ fallthrough
+ case "magneton":
+ fallthrough
+ case "farfetchd":
+ fallthrough
+ case "doduo":
+ fallthrough
+ case "dodrio":
+ fallthrough
+ case "seel":
+ fallthrough
+ case "dewgong":
+ fallthrough
+ case "grimer":
+ fallthrough
+ case "muk":
+ fallthrough
+ case "shellder":
+ fallthrough
+ case "cloyster":
+ fallthrough
+ case "gastly":
+ fallthrough
+ case "haunter":
+ fallthrough
+ case "gengar":
+ fallthrough
+ case "onix":
+ fallthrough
+ case "drowzee":
+ fallthrough
+ case "hypno":
+ fallthrough
+ case "krabby":
+ fallthrough
+ case "kingler":
+ fallthrough
+ case "voltorb":
+ fallthrough
+ case "electrode":
+ fallthrough
+ case "exeggcute":
+ fallthrough
+ case "exeggutor":
+ fallthrough
+ case "cubone":
+ fallthrough
+ case "marowak":
+ fallthrough
+ case "hitmonlee":
+ fallthrough
+ case "hitmonchan":
+ fallthrough
+ case "lickitung":
+ fallthrough
+ case "koffing":
+ fallthrough
+ case "weezing":
+ fallthrough
+ case "rhyhorn":
+ fallthrough
+ case "rhydon":
+ fallthrough
+ case "chansey":
+ fallthrough
+ case "tangela":
+ fallthrough
+ case "kangaskhan":
+ fallthrough
+ case "horsea":
+ fallthrough
+ case "seadra":
+ fallthrough
+ case "goldeen":
+ fallthrough
+ case "seaking":
+ fallthrough
+ case "staryu":
+ fallthrough
+ case "starmie":
+ fallthrough
+ case "mrmime":
+ fallthrough
+ case "scyther":
+ fallthrough
+ case "jynx":
+ fallthrough
+ case "electabuzz":
+ fallthrough
+ case "magmar":
+ fallthrough
+ case "pinsir":
+ fallthrough
+ case "tauros":
+ fallthrough
+ case "magikarp":
+ fallthrough
+ case "gyarados":
+ fallthrough
+ case "lapras":
+ fallthrough
+ case "ditto":
+ fallthrough
+ case "eevee":
+ fallthrough
+ case "vaporeon":
+ fallthrough
+ case "jolteon":
+ fallthrough
+ case "flareon":
+ fallthrough
+ case "porygon":
+ fallthrough
+ case "omanyte":
+ fallthrough
+ case "omastar":
+ fallthrough
+ case "kabuto":
+ fallthrough
+ case "kabutops":
+ fallthrough
+ case "aerodactyl":
+ fallthrough
+ case "snorlax":
+ fallthrough
+ case "articuno":
+ fallthrough
+ case "zapdos":
+ fallthrough
+ case "moltres":
+ fallthrough
+ case "dratini":
+ fallthrough
+ case "dragonair":
+ fallthrough
+ case "dragonite":
+ fallthrough
+ case "mewtwo":
+ fallthrough
+ case "mew":
+ fallthrough
+ case "chikorita":
+ fallthrough
+ case "bayleef":
+ fallthrough
+ case "meganium":
+ fallthrough
+ case "cyndaquil":
+ fallthrough
+ case "quilava":
+ fallthrough
+ case "typhlosion":
+ fallthrough
+ case "totodile":
+ fallthrough
+ case "croconaw":
+ fallthrough
+ case "feraligatr":
+ fallthrough
+ case "sentret":
+ fallthrough
+ case "furret":
+ fallthrough
+ case "hoothoot":
+ fallthrough
+ case "noctowl":
+ fallthrough
+ case "ledyba":
+ fallthrough
+ case "ledian":
+ fallthrough
+ case "spinarak":
+ fallthrough
+ case "ariados":
+ fallthrough
+ case "crobat":
+ fallthrough
+ case "chinchou":
+ fallthrough
+ case "lanturn":
+ fallthrough
+ case "pichu":
+ fallthrough
+ case "cleffa":
+ fallthrough
+ case "igglybuff":
+ fallthrough
+ case "togepi":
+ fallthrough
+ case "togetic":
+ fallthrough
+ case "natu":
+ fallthrough
+ case "xatu":
+ fallthrough
+ case "mareep":
+ fallthrough
+ case "flaaffy":
+ fallthrough
+ case "ampharos":
+ fallthrough
+ case "bellossom":
+ fallthrough
+ case "marill":
+ fallthrough
+ case "azumarill":
+ fallthrough
+ case "sudowoodo":
+ fallthrough
+ case "politoed":
+ fallthrough
+ case "hoppip":
+ fallthrough
+ case "skiploom":
+ fallthrough
+ case "jumpluff":
+ fallthrough
+ case "aipom":
+ fallthrough
+ case "sunkern":
+ fallthrough
+ case "sunflora":
+ fallthrough
+ case "yanma":
+ fallthrough
+ case "wooper":
+ fallthrough
+ case "quagsire":
+ fallthrough
+ case "espeon":
+ fallthrough
+ case "umbreon":
+ fallthrough
+ case "murkrow":
+ fallthrough
+ case "slowking":
+ fallthrough
+ case "misdreavus":
+ fallthrough
+ case "unown":
+ fallthrough
+ case "wobbuffet":
+ fallthrough
+ case "girafarig":
+ fallthrough
+ case "pineco":
+ fallthrough
+ case "forretress":
+ fallthrough
+ case "dunsparce":
+ fallthrough
+ case "gligar":
+ fallthrough
+ case "steelix":
+ fallthrough
+ case "snubbull":
+ fallthrough
+ case "granbull":
+ fallthrough
+ case "qwilfish":
+ fallthrough
+ case "scizor":
+ fallthrough
+ case "shuckle":
+ fallthrough
+ case "heracross":
+ fallthrough
+ case "sneasel":
+ fallthrough
+ case "teddiursa":
+ fallthrough
+ case "ursaring":
+ fallthrough
+ case "slugma":
+ fallthrough
+ case "magcargo":
+ fallthrough
+ case "swinub":
+ fallthrough
+ case "piloswine":
+ fallthrough
+ case "corsola":
+ fallthrough
+ case "remoraid":
+ fallthrough
+ case "octillery":
+ fallthrough
+ case "delibird":
+ fallthrough
+ case "mantine":
+ fallthrough
+ case "skarmory":
+ fallthrough
+ case "houndour":
+ fallthrough
+ case "houndoom":
+ fallthrough
+ case "kingdra":
+ fallthrough
+ case "phanpy":
+ fallthrough
+ case "donphan":
+ fallthrough
+ case "porygon2":
+ fallthrough
+ case "stantler":
+ fallthrough
+ case "smeargle":
+ fallthrough
+ case "tyrogue":
+ fallthrough
+ case "hitmontop":
+ fallthrough
+ case "smoochum":
+ fallthrough
+ case "elekid":
+ fallthrough
+ case "magby":
+ fallthrough
+ case "miltank":
+ fallthrough
+ case "blissey":
+ fallthrough
+ case "raikou":
+ fallthrough
+ case "entei":
+ fallthrough
+ case "suicune":
+ fallthrough
+ case "larvitar":
+ fallthrough
+ case "pupitar":
+ fallthrough
+ case "tyranitar":
+ fallthrough
+ case "lugia":
+ fallthrough
+ case "ho-oh":
+ fallthrough
+ case "celebi":
+ fallthrough
+ case "treecko":
+ fallthrough
+ case "grovyle":
+ fallthrough
+ case "sceptile":
+ fallthrough
+ case "torchic":
+ fallthrough
+ case "combusken":
+ fallthrough
+ case "blaziken":
+ fallthrough
+ case "mudkip":
+ fallthrough
+ case "marshtomp":
+ fallthrough
+ case "swampert":
+ fallthrough
+ case "poochyena":
+ fallthrough
+ case "mightyena":
+ fallthrough
+ case "zigzagoon":
+ fallthrough
+ case "linoone":
+ fallthrough
+ case "wurmple":
+ fallthrough
+ case "silcoon":
+ fallthrough
+ case "beautifly":
+ fallthrough
+ case "cascoon":
+ fallthrough
+ case "dustox":
+ fallthrough
+ case "lotad":
+ fallthrough
+ case "lombre":
+ fallthrough
+ case "ludicolo":
+ fallthrough
+ case "seedot":
+ fallthrough
+ case "nuzleaf":
+ fallthrough
+ case "shiftry":
+ fallthrough
+ case "taillow":
+ fallthrough
+ case "swellow":
+ fallthrough
+ case "wingull":
+ fallthrough
+ case "pelipper":
+ fallthrough
+ case "ralts":
+ fallthrough
+ case "kirlia":
+ fallthrough
+ case "gardevoir":
+ fallthrough
+ case "surskit":
+ fallthrough
+ case "masquerain":
+ fallthrough
+ case "shroomish":
+ fallthrough
+ case "breloom":
+ fallthrough
+ case "slakoth":
+ fallthrough
+ case "vigoroth":
+ fallthrough
+ case "slaking":
+ fallthrough
+ case "nincada":
+ fallthrough
+ case "ninjask":
+ fallthrough
+ case "shedinja":
+ fallthrough
+ case "whismur":
+ fallthrough
+ case "loudred":
+ fallthrough
+ case "exploud":
+ fallthrough
+ case "makuhita":
+ fallthrough
+ case "hariyama":
+ fallthrough
+ case "azurill":
+ fallthrough
+ case "nosepass":
+ fallthrough
+ case "skitty":
+ fallthrough
+ case "delcatty":
+ fallthrough
+ case "sableye":
+ fallthrough
+ case "mawile":
+ fallthrough
+ case "aron":
+ fallthrough
+ case "lairon":
+ fallthrough
+ case "aggron":
+ fallthrough
+ case "meditite":
+ fallthrough
+ case "medicham":
+ fallthrough
+ case "electrike":
+ fallthrough
+ case "manectric":
+ fallthrough
+ case "plusle":
+ fallthrough
+ case "minun":
+ fallthrough
+ case "volbeat":
+ fallthrough
+ case "illumise":
+ fallthrough
+ case "roselia":
+ fallthrough
+ case "gulpin":
+ fallthrough
+ case "swalot":
+ fallthrough
+ case "carvanha":
+ fallthrough
+ case "sharpedo":
+ fallthrough
+ case "wailmer":
+ fallthrough
+ case "wailord":
+ fallthrough
+ case "numel":
+ fallthrough
+ case "camerupt":
+ fallthrough
+ case "torkoal":
+ fallthrough
+ case "spoink":
+ fallthrough
+ case "grumpig":
+ fallthrough
+ case "spinda":
+ fallthrough
+ case "trapinch":
+ fallthrough
+ case "vibrava":
+ fallthrough
+ case "flygon":
+ fallthrough
+ case "cacnea":
+ fallthrough
+ case "cacturne":
+ fallthrough
+ case "swablu":
+ fallthrough
+ case "altaria":
+ fallthrough
+ case "zangoose":
+ fallthrough
+ case "seviper":
+ fallthrough
+ case "lunatone":
+ fallthrough
+ case "solrock":
+ fallthrough
+ case "barboach":
+ fallthrough
+ case "whiscash":
+ fallthrough
+ case "corphish":
+ fallthrough
+ case "crawdaunt":
+ fallthrough
+ case "baltoy":
+ fallthrough
+ case "claydol":
+ fallthrough
+ case "lileep":
+ fallthrough
+ case "cradily":
+ fallthrough
+ case "anorith":
+ fallthrough
+ case "armaldo":
+ fallthrough
+ case "feebas":
+ fallthrough
+ case "milotic":
+ fallthrough
+ case "castform":
+ fallthrough
+ case "kecleon":
+ fallthrough
+ case "shuppet":
+ fallthrough
+ case "banette":
+ fallthrough
+ case "duskull":
+ fallthrough
+ case "dusclops":
+ fallthrough
+ case "tropius":
+ fallthrough
+ case "chimecho":
+ fallthrough
+ case "absol":
+ fallthrough
+ case "wynaut":
+ fallthrough
+ case "snorunt":
+ fallthrough
+ case "glalie":
+ fallthrough
+ case "spheal":
+ fallthrough
+ case "sealeo":
+ fallthrough
+ case "walrein":
+ fallthrough
+ case "clamperl":
+ fallthrough
+ case "huntail":
+ fallthrough
+ case "gorebyss":
+ fallthrough
+ case "relicanth":
+ fallthrough
+ case "luvdisc":
+ fallthrough
+ case "bagon":
+ fallthrough
+ case "shelgon":
+ fallthrough
+ case "salamence":
+ fallthrough
+ case "beldum":
+ fallthrough
+ case "metang":
+ fallthrough
+ case "metagross":
+ fallthrough
+ case "regirock":
+ fallthrough
+ case "regice":
+ fallthrough
+ case "registeel":
+ fallthrough
+ case "latias":
+ fallthrough
+ case "latios":
+ fallthrough
+ case "kyogre":
+ fallthrough
+ case "groudon":
+ fallthrough
+ case "rayquaza":
+ fallthrough
+ case "jirachi":
+ fallthrough
+ case "deoxys":
+ fallthrough
+ case "turtwig":
+ fallthrough
+ case "grotle":
+ fallthrough
+ case "torterra":
+ fallthrough
+ case "chimchar":
+ fallthrough
+ case "monferno":
+ fallthrough
+ case "infernape":
+ fallthrough
+ case "piplup":
+ fallthrough
+ case "prinplup":
+ fallthrough
+ case "empoleon":
+ fallthrough
+ case "starly":
+ fallthrough
+ case "staravia":
+ fallthrough
+ case "staraptor":
+ fallthrough
+ case "bidoof":
+ fallthrough
+ case "bibarel":
+ fallthrough
+ case "kricketot":
+ fallthrough
+ case "kricketune":
+ fallthrough
+ case "shinx":
+ fallthrough
+ case "luxio":
+ fallthrough
+ case "luxray":
+ fallthrough
+ case "budew":
+ fallthrough
+ case "roserade":
+ fallthrough
+ case "cranidos":
+ fallthrough
+ case "rampardos":
+ fallthrough
+ case "shieldon":
+ fallthrough
+ case "bastiodon":
+ fallthrough
+ case "burmy":
+ fallthrough
+ case "wormadam":
+ fallthrough
+ case "mothim":
+ fallthrough
+ case "combee":
+ fallthrough
+ case "vespiquen":
+ fallthrough
+ case "pachirisu":
+ fallthrough
+ case "buizel":
+ fallthrough
+ case "floatzel":
+ fallthrough
+ case "cherubi":
+ fallthrough
+ case "cherrim":
+ fallthrough
+ case "shellos":
+ fallthrough
+ case "gastrodon":
+ fallthrough
+ case "ambipom":
+ fallthrough
+ case "drifloon":
+ fallthrough
+ case "drifblim":
+ fallthrough
+ case "buneary":
+ fallthrough
+ case "lopunny":
+ fallthrough
+ case "mismagius":
+ fallthrough
+ case "honchkrow":
+ fallthrough
+ case "glameow":
+ fallthrough
+ case "purugly":
+ fallthrough
+ case "chingling":
+ fallthrough
+ case "stunky":
+ fallthrough
+ case "skuntank":
+ fallthrough
+ case "bronzor":
+ fallthrough
+ case "bronzong":
+ fallthrough
+ case "bonsly":
+ fallthrough
+ case "mimejr":
+ fallthrough
+ case "happiny":
+ fallthrough
+ case "chatot":
+ fallthrough
+ case "spiritomb":
+ fallthrough
+ case "gible":
+ fallthrough
+ case "gabite":
+ fallthrough
+ case "garchomp":
+ fallthrough
+ case "munchlax":
+ fallthrough
+ case "riolu":
+ fallthrough
+ case "lucario":
+ fallthrough
+ case "hippopotas":
+ fallthrough
+ case "hippowdon":
+ fallthrough
+ case "skorupi":
+ fallthrough
+ case "drapion":
+ fallthrough
+ case "croagunk":
+ fallthrough
+ case "toxicroak":
+ fallthrough
+ case "carnivine":
+ fallthrough
+ case "finneon":
+ fallthrough
+ case "lumineon":
+ fallthrough
+ case "mantyke":
+ fallthrough
+ case "snover":
+ fallthrough
+ case "abomasnow":
+ fallthrough
+ case "weavile":
+ fallthrough
+ case "magnezone":
+ fallthrough
+ case "lickilicky":
+ fallthrough
+ case "rhyperior":
+ fallthrough
+ case "tangrowth":
+ fallthrough
+ case "electivire":
+ fallthrough
+ case "magmortar":
+ fallthrough
+ case "togekiss":
+ fallthrough
+ case "yanmega":
+ fallthrough
+ case "leafeon":
+ fallthrough
+ case "glaceon":
+ fallthrough
+ case "gliscor":
+ fallthrough
+ case "mamoswine":
+ fallthrough
+ case "porygon-z":
+ fallthrough
+ case "gallade":
+ fallthrough
+ case "probopass":
+ fallthrough
+ case "dusknoir":
+ fallthrough
+ case "froslass":
+ fallthrough
+ case "rotom":
+ fallthrough
+ case "uxie":
+ fallthrough
+ case "mesprit":
+ fallthrough
+ case "azelf":
+ fallthrough
+ case "dialga":
+ fallthrough
+ case "palkia":
+ fallthrough
+ case "heatran":
+ fallthrough
+ case "regigigas":
+ fallthrough
+ case "giratina":
+ fallthrough
+ case "cresselia":
+ fallthrough
+ case "phione":
+ fallthrough
+ case "manaphy":
+ fallthrough
+ case "darkrai":
+ fallthrough
+ case "shaymin":
+ fallthrough
+ case "arceus":
+ fallthrough
+ case "victini":
+ fallthrough
+ case "snivy":
+ fallthrough
+ case "servine":
+ fallthrough
+ case "serperior":
+ fallthrough
+ case "tepig":
+ fallthrough
+ case "pignite":
+ fallthrough
+ case "emboar":
+ fallthrough
+ case "oshawott":
+ fallthrough
+ case "dewott":
+ fallthrough
+ case "samurott":
+ fallthrough
+ case "patrat":
+ fallthrough
+ case "watchog":
+ fallthrough
+ case "lillipup":
+ fallthrough
+ case "herdier":
+ fallthrough
+ case "stoutland":
+ fallthrough
+ case "purrloin":
+ fallthrough
+ case "liepard":
+ fallthrough
+ case "pansage":
+ fallthrough
+ case "simisage":
+ fallthrough
+ case "pansear":
+ fallthrough
+ case "simisear":
+ fallthrough
+ case "panpour":
+ fallthrough
+ case "simipour":
+ fallthrough
+ case "munna":
+ fallthrough
+ case "musharna":
+ fallthrough
+ case "pidove":
+ fallthrough
+ case "tranquill":
+ fallthrough
+ case "unfezant":
+ fallthrough
+ case "blitzle":
+ fallthrough
+ case "zebstrika":
+ fallthrough
+ case "roggenrola":
+ fallthrough
+ case "boldore":
+ fallthrough
+ case "gigalith":
+ fallthrough
+ case "woobat":
+ fallthrough
+ case "swoobat":
+ fallthrough
+ case "drilbur":
+ fallthrough
+ case "excadrill":
+ fallthrough
+ case "audino":
+ fallthrough
+ case "timburr":
+ fallthrough
+ case "gurdurr":
+ fallthrough
+ case "conkeldurr":
+ fallthrough
+ case "tympole":
+ fallthrough
+ case "palpitoad":
+ fallthrough
+ case "seismitoad":
+ fallthrough
+ case "throh":
+ fallthrough
+ case "sawk":
+ fallthrough
+ case "sewaddle":
+ fallthrough
+ case "swadloon":
+ fallthrough
+ case "leavanny":
+ fallthrough
+ case "venipede":
+ fallthrough
+ case "whirlipede":
+ fallthrough
+ case "scolipede":
+ fallthrough
+ case "cottonee":
+ fallthrough
+ case "whimsicott":
+ fallthrough
+ case "petilil":
+ fallthrough
+ case "lilligant":
+ fallthrough
+ case "basculin":
+ fallthrough
+ case "sandile":
+ fallthrough
+ case "krokorok":
+ fallthrough
+ case "krookodile":
+ fallthrough
+ case "darumaka":
+ fallthrough
+ case "darmanitan":
+ fallthrough
+ case "maractus":
+ fallthrough
+ case "dwebble":
+ fallthrough
+ case "crustle":
+ fallthrough
+ case "scraggy":
+ fallthrough
+ case "scrafty":
+ fallthrough
+ case "sigilyph":
+ fallthrough
+ case "yamask":
+ fallthrough
+ case "cofagrigus":
+ fallthrough
+ case "tirtouga":
+ fallthrough
+ case "carracosta":
+ fallthrough
+ case "archen":
+ fallthrough
+ case "archeops":
+ fallthrough
+ case "trubbish":
+ fallthrough
+ case "garbodor":
+ fallthrough
+ case "zorua":
+ fallthrough
+ case "zoroark":
+ fallthrough
+ case "minccino":
+ fallthrough
+ case "cinccino":
+ fallthrough
+ case "gothita":
+ fallthrough
+ case "gothorita":
+ fallthrough
+ case "gothitelle":
+ fallthrough
+ case "solosis":
+ fallthrough
+ case "duosion":
+ fallthrough
+ case "reuniclus":
+ fallthrough
+ case "ducklett":
+ fallthrough
+ case "swanna":
+ fallthrough
+ case "vanillite":
+ fallthrough
+ case "vanillish":
+ fallthrough
+ case "vanilluxe":
+ fallthrough
+ case "deerling":
+ fallthrough
+ case "sawsbuck":
+ fallthrough
+ case "emolga":
+ fallthrough
+ case "karrablast":
+ fallthrough
+ case "escavalier":
+ fallthrough
+ case "foongus":
+ fallthrough
+ case "amoonguss":
+ fallthrough
+ case "frillish":
+ fallthrough
+ case "jellicent":
+ fallthrough
+ case "alomomola":
+ fallthrough
+ case "joltik":
+ fallthrough
+ case "galvantula":
+ fallthrough
+ case "ferroseed":
+ fallthrough
+ case "ferrothorn":
+ fallthrough
+ case "klink":
+ fallthrough
+ case "klang":
+ fallthrough
+ case "klinklang":
+ fallthrough
+ case "tynamo":
+ fallthrough
+ case "eelektrik":
+ fallthrough
+ case "eelektross":
+ fallthrough
+ case "elgyem":
+ fallthrough
+ case "beheeyem":
+ fallthrough
+ case "litwick":
+ fallthrough
+ case "lampent":
+ fallthrough
+ case "chandelure":
+ fallthrough
+ case "axew":
+ fallthrough
+ case "fraxure":
+ fallthrough
+ case "haxorus":
+ fallthrough
+ case "cubchoo":
+ fallthrough
+ case "beartic":
+ fallthrough
+ case "cryogonal":
+ fallthrough
+ case "shelmet":
+ fallthrough
+ case "accelgor":
+ fallthrough
+ case "stunfisk":
+ fallthrough
+ case "mienfoo":
+ fallthrough
+ case "mienshao":
+ fallthrough
+ case "druddigon":
+ fallthrough
+ case "golett":
+ fallthrough
+ case "golurk":
+ fallthrough
+ case "pawniard":
+ fallthrough
+ case "bisharp":
+ fallthrough
+ case "bouffalant":
+ fallthrough
+ case "rufflet":
+ fallthrough
+ case "braviary":
+ fallthrough
+ case "vullaby":
+ fallthrough
+ case "mandibuzz":
+ fallthrough
+ case "heatmor":
+ fallthrough
+ case "durant":
+ fallthrough
+ case "deino":
+ fallthrough
+ case "zweilous":
+ fallthrough
+ case "hydreigon":
+ fallthrough
+ case "larvesta":
+ fallthrough
+ case "volcarona":
+ fallthrough
+ case "cobalion":
+ fallthrough
+ case "terrakion":
+ fallthrough
+ case "virizion":
+ fallthrough
+ case "tornadus":
+ fallthrough
+ case "thundurus":
+ fallthrough
+ case "reshiram":
+ fallthrough
+ case "zekrom":
+ fallthrough
+ case "landorus":
+ fallthrough
+ case "kyurem":
+ fallthrough
+ case "keldeo":
+ fallthrough
+ case "meloetta":
+ fallthrough
+ case "genesect":
+ fallthrough
+ case "chespin":
+ fallthrough
+ case "quilladin":
+ fallthrough
+ case "chesnaught":
+ fallthrough
+ case "fennekin":
+ fallthrough
+ case "braixen":
+ fallthrough
+ case "delphox":
+ fallthrough
+ case "froakie":
+ fallthrough
+ case "frogadier":
+ fallthrough
+ case "greninja":
+ fallthrough
+ case "bunnelby":
+ fallthrough
+ case "diggersby":
+ fallthrough
+ case "fletchling":
+ fallthrough
+ case "fletchinder":
+ fallthrough
+ case "talonflame":
+ fallthrough
+ case "scatterbug":
+ fallthrough
+ case "spewpa":
+ fallthrough
+ case "vivillon":
+ fallthrough
+ case "litleo":
+ fallthrough
+ case "pyroar":
+ fallthrough
+ case "flabebe":
+ fallthrough
+ case "floette":
+ fallthrough
+ case "florges":
+ fallthrough
+ case "skiddo":
+ fallthrough
+ case "gogoat":
+ fallthrough
+ case "pancham":
+ fallthrough
+ case "pangoro":
+ fallthrough
+ case "furfrou":
+ fallthrough
+ case "espurr":
+ fallthrough
+ case "meowstic":
+ fallthrough
+ case "honedge":
+ fallthrough
+ case "doublade":
+ fallthrough
+ case "aegislash":
+ fallthrough
+ case "spritzee":
+ fallthrough
+ case "aromatisse":
+ fallthrough
+ case "swirlix":
+ fallthrough
+ case "slurpuff":
+ fallthrough
+ case "inkay":
+ fallthrough
+ case "malamar":
+ fallthrough
+ case "binacle":
+ fallthrough
+ case "barbaracle":
+ fallthrough
+ case "skrelp":
+ fallthrough
+ case "dragalge":
+ fallthrough
+ case "clauncher":
+ fallthrough
+ case "clawitzer":
+ fallthrough
+ case "helioptile":
+ fallthrough
+ case "heliolisk":
+ fallthrough
+ case "tyrunt":
+ fallthrough
+ case "tyrantrum":
+ fallthrough
+ case "amaura":
+ fallthrough
+ case "aurorus":
+ fallthrough
+ case "sylveon":
+ fallthrough
+ case "hawlucha":
+ fallthrough
+ case "dedenne":
+ fallthrough
+ case "carbink":
+ fallthrough
+ case "goomy":
+ fallthrough
+ case "sliggoo":
+ fallthrough
+ case "goodra":
+ fallthrough
+ case "klefki":
+ fallthrough
+ case "phantump":
+ fallthrough
+ case "trevenant":
+ fallthrough
+ case "pumpkaboo":
+ fallthrough
+ case "gourgeist":
+ fallthrough
+ case "bergmite":
+ fallthrough
+ case "avalugg":
+ fallthrough
+ case "noibat":
+ fallthrough
+ case "noivern":
+ fallthrough
+ case "xerneas":
+ fallthrough
+ case "yveltal":
+ fallthrough
+ case "zygarde":
+ fallthrough
+ case "diancie":
+ fallthrough
+ case "hoopa":
+ fallthrough
+ case "volcanion":
+ fallthrough
+ case "rowlet":
+ fallthrough
+ case "dartrix":
+ fallthrough
+ case "decidueye":
+ fallthrough
+ case "litten":
+ fallthrough
+ case "torracat":
+ fallthrough
+ case "incineroar":
+ fallthrough
+ case "popplio":
+ fallthrough
+ case "brionne":
+ fallthrough
+ case "primarina":
+ fallthrough
+ case "pikipek":
+ fallthrough
+ case "trumbeak":
+ fallthrough
+ case "toucannon":
+ fallthrough
+ case "yungoos":
+ fallthrough
+ case "gumshoos":
+ fallthrough
+ case "grubbin":
+ fallthrough
+ case "charjabug":
+ fallthrough
+ case "vikavolt":
+ fallthrough
+ case "crabrawler":
+ fallthrough
+ case "crabominable":
+ fallthrough
+ case "oricorio":
+ fallthrough
+ case "cutiefly":
+ fallthrough
+ case "ribombee":
+ fallthrough
+ case "rockruff":
+ fallthrough
+ case "lycanroc":
+ fallthrough
+ case "wishiwashi":
+ fallthrough
+ case "mareanie":
+ fallthrough
+ case "toxapex":
+ fallthrough
+ case "mudbray":
+ fallthrough
+ case "mudsdale":
+ fallthrough
+ case "dewpider":
+ fallthrough
+ case "araquanid":
+ fallthrough
+ case "fomantis":
+ fallthrough
+ case "lurantis":
+ fallthrough
+ case "morelull":
+ fallthrough
+ case "shiinotic":
+ fallthrough
+ case "salandit":
+ fallthrough
+ case "salazzle":
+ fallthrough
+ case "stufful":
+ fallthrough
+ case "bewear":
+ fallthrough
+ case "bounsweet":
+ fallthrough
+ case "steenee":
+ fallthrough
+ case "tsareena":
+ fallthrough
+ case "comfey":
+ fallthrough
+ case "oranguru":
+ fallthrough
+ case "passimian":
+ fallthrough
+ case "wimpod":
+ fallthrough
+ case "golisopod":
+ fallthrough
+ case "sandygast":
+ fallthrough
+ case "palossand":
+ fallthrough
+ case "pyukumuku":
+ fallthrough
+ case "typenull":
+ fallthrough
+ case "silvally":
+ fallthrough
+ case "minior":
+ fallthrough
+ case "komala":
+ fallthrough
+ case "turtonator":
+ fallthrough
+ case "togedemaru":
+ fallthrough
+ case "mimikyu":
+ fallthrough
+ case "bruxish":
+ fallthrough
+ case "drampa":
+ fallthrough
+ case "dhelmise":
+ fallthrough
+ case "jangmo-o":
+ fallthrough
+ case "hakamo-o":
+ fallthrough
+ case "kommo-o":
+ fallthrough
+ case "tapukoko":
+ fallthrough
+ case "tapulele":
+ fallthrough
+ case "tapubulu":
+ fallthrough
+ case "tapufini":
+ fallthrough
+ case "cosmog":
+ fallthrough
+ case "cosmoem":
+ fallthrough
+ case "solgaleo":
+ fallthrough
+ case "lunala":
+ fallthrough
+ case "nihilego":
+ fallthrough
+ case "buzzwole":
+ fallthrough
+ case "pheromosa":
+ fallthrough
+ case "xurkitree":
+ fallthrough
+ case "celesteela":
+ fallthrough
+ case "kartana":
+ fallthrough
+ case "guzzlord":
+ fallthrough
+ case "necrozma":
+ fallthrough
+ case "magearna":
+ fallthrough
+ case "marshadow":
+ fallthrough
+ case "poipole":
+ fallthrough
+ case "naganadel":
+ fallthrough
+ case "stakataka":
+ fallthrough
+ case "blacephalon":
+ fallthrough
+ case "zeraora":
+ fallthrough
+ case "meltan":
+ fallthrough
+ case "melmetal":
+ fallthrough
+ case "grookey":
+ fallthrough
+ case "thwackey":
+ fallthrough
+ case "rillaboom":
+ fallthrough
+ case "scorbunny":
+ fallthrough
+ case "raboot":
+ fallthrough
+ case "cinderace":
+ fallthrough
+ case "sobble":
+ fallthrough
+ case "drizzile":
+ fallthrough
+ case "inteleon":
+ fallthrough
+ case "skwovet":
+ fallthrough
+ case "greedent":
+ fallthrough
+ case "rookidee":
+ fallthrough
+ case "corvisquire":
+ fallthrough
+ case "corviknight":
+ fallthrough
+ case "blipbug":
+ fallthrough
+ case "dottler":
+ fallthrough
+ case "orbeetle":
+ fallthrough
+ case "nickit":
+ fallthrough
+ case "thievul":
+ fallthrough
+ case "gossifleur":
+ fallthrough
+ case "eldegoss":
+ fallthrough
+ case "wooloo":
+ fallthrough
+ case "dubwool":
+ fallthrough
+ case "chewtle":
+ fallthrough
+ case "drednaw":
+ fallthrough
+ case "yamper":
+ fallthrough
+ case "boltund":
+ fallthrough
+ case "rolycoly":
+ fallthrough
+ case "carkol":
+ fallthrough
+ case "coalossal":
+ fallthrough
+ case "applin":
+ fallthrough
+ case "flapple":
+ fallthrough
+ case "appletun":
+ fallthrough
+ case "silicobra":
+ fallthrough
+ case "sandaconda":
+ fallthrough
+ case "cramorant":
+ fallthrough
+ case "arrokuda":
+ fallthrough
+ case "barraskewda":
+ fallthrough
+ case "toxel":
+ fallthrough
+ case "toxtricity":
+ fallthrough
+ case "sizzlipede":
+ fallthrough
+ case "centiskorch":
+ fallthrough
+ case "clobbopus":
+ fallthrough
+ case "grapploct":
+ fallthrough
+ case "sinistea":
+ fallthrough
+ case "polteageist":
+ fallthrough
+ case "hatenna":
+ fallthrough
+ case "hattrem":
+ fallthrough
+ case "hatterene":
+ fallthrough
+ case "impidimp":
+ fallthrough
+ case "morgrem":
+ fallthrough
+ case "grimmsnarl":
+ fallthrough
+ case "obstagoon":
+ fallthrough
+ case "perrserker":
+ fallthrough
+ case "cursola":
+ fallthrough
+ case "sirfetchd":
+ fallthrough
+ case "mrrime":
+ fallthrough
+ case "runerigus":
+ fallthrough
+ case "milcery":
+ fallthrough
+ case "alcremie":
+ fallthrough
+ case "falinks":
+ fallthrough
+ case "pincurchin":
+ fallthrough
+ case "snom":
+ fallthrough
+ case "frosmoth":
+ fallthrough
+ case "stonjourner":
+ fallthrough
+ case "eiscue":
+ fallthrough
+ case "indeedee":
+ fallthrough
+ case "morpeko":
+ fallthrough
+ case "cufant":
+ fallthrough
+ case "copperajah":
+ fallthrough
+ case "dracozolt":
+ fallthrough
+ case "arctozolt":
+ fallthrough
+ case "dracovish":
+ fallthrough
+ case "arctovish":
+ fallthrough
+ case "duraludon":
+ fallthrough
+ case "dreepy":
+ fallthrough
+ case "drakloak":
+ fallthrough
+ case "dragapult":
+ fallthrough
+ case "zacian":
+ fallthrough
+ case "zamazenta":
+ fallthrough
+ case "eternatus":
+ fallthrough
+ case "kubfu":
+ fallthrough
+ case "urshifu":
+ fallthrough
+ case "zarude":
+ fallthrough
+ case "regieleki":
+ fallthrough
+ case "regidrago":
+ fallthrough
+ case "glastrier":
+ fallthrough
+ case "spectrier":
+ fallthrough
+ case "calyrex":
+ *e = PokemonName(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for PokemonName: %v", v)
+ }
+}
+
type SourcePokeapiUpdate struct {
// Pokemon requested from the API.
- PokemonName string `json:"pokemon_name"`
+ PokemonName PokemonName `json:"pokemon_name"`
+}
+
+func (o *SourcePokeapiUpdate) GetPokemonName() PokemonName {
+ if o == nil {
+ return PokemonName("")
+ }
+ return o.PokemonName
}
diff --git a/internal/sdk/pkg/models/shared/sourcepolygonstockapi.go b/internal/sdk/pkg/models/shared/sourcepolygonstockapi.go
old mode 100755
new mode 100644
index e34a99f25..a6b1ae873
--- a/internal/sdk/pkg/models/shared/sourcepolygonstockapi.go
+++ b/internal/sdk/pkg/models/shared/sourcepolygonstockapi.go
@@ -3,32 +3,33 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePolygonStockAPIPolygonStockAPI string
+type PolygonStockAPI string
const (
- SourcePolygonStockAPIPolygonStockAPIPolygonStockAPI SourcePolygonStockAPIPolygonStockAPI = "polygon-stock-api"
+ PolygonStockAPIPolygonStockAPI PolygonStockAPI = "polygon-stock-api"
)
-func (e SourcePolygonStockAPIPolygonStockAPI) ToPointer() *SourcePolygonStockAPIPolygonStockAPI {
+func (e PolygonStockAPI) ToPointer() *PolygonStockAPI {
return &e
}
-func (e *SourcePolygonStockAPIPolygonStockAPI) UnmarshalJSON(data []byte) error {
+func (e *PolygonStockAPI) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "polygon-stock-api":
- *e = SourcePolygonStockAPIPolygonStockAPI(v)
+ *e = PolygonStockAPI(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePolygonStockAPIPolygonStockAPI: %v", v)
+ return fmt.Errorf("invalid value for PolygonStockAPI: %v", v)
}
}
@@ -44,8 +45,8 @@ type SourcePolygonStockAPI struct {
// The size of the timespan multiplier.
Multiplier int64 `json:"multiplier"`
// Sort the results by timestamp. asc will return results in ascending order (oldest at the top), desc will return results in descending order (newest at the top).
- Sort *string `json:"sort,omitempty"`
- SourceType SourcePolygonStockAPIPolygonStockAPI `json:"sourceType"`
+ Sort *string `json:"sort,omitempty"`
+ sourceType PolygonStockAPI `const:"polygon-stock-api" json:"sourceType"`
// The beginning date for the aggregate window.
StartDate types.Date `json:"start_date"`
// The exchange symbol that this item is traded under.
@@ -53,3 +54,81 @@ type SourcePolygonStockAPI struct {
// The size of the time window.
Timespan string `json:"timespan"`
}
+
+func (s SourcePolygonStockAPI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePolygonStockAPI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePolygonStockAPI) GetAdjusted() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Adjusted
+}
+
+func (o *SourcePolygonStockAPI) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourcePolygonStockAPI) GetEndDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.EndDate
+}
+
+func (o *SourcePolygonStockAPI) GetLimit() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Limit
+}
+
+func (o *SourcePolygonStockAPI) GetMultiplier() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Multiplier
+}
+
+func (o *SourcePolygonStockAPI) GetSort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Sort
+}
+
+func (o *SourcePolygonStockAPI) GetSourceType() PolygonStockAPI {
+ return PolygonStockAPIPolygonStockAPI
+}
+
+func (o *SourcePolygonStockAPI) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
+
+func (o *SourcePolygonStockAPI) GetStocksTicker() string {
+ if o == nil {
+ return ""
+ }
+ return o.StocksTicker
+}
+
+func (o *SourcePolygonStockAPI) GetTimespan() string {
+ if o == nil {
+ return ""
+ }
+ return o.Timespan
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepolygonstockapicreaterequest.go b/internal/sdk/pkg/models/shared/sourcepolygonstockapicreaterequest.go
old mode 100755
new mode 100644
index 263a4f147..4f68d3dec
--- a/internal/sdk/pkg/models/shared/sourcepolygonstockapicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepolygonstockapicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePolygonStockAPICreateRequest struct {
Configuration SourcePolygonStockAPI `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePolygonStockAPICreateRequest) GetConfiguration() SourcePolygonStockAPI {
+ if o == nil {
+ return SourcePolygonStockAPI{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePolygonStockAPICreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePolygonStockAPICreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePolygonStockAPICreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePolygonStockAPICreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepolygonstockapiputrequest.go b/internal/sdk/pkg/models/shared/sourcepolygonstockapiputrequest.go
old mode 100755
new mode 100644
index e51fab723..ff80c490a
--- a/internal/sdk/pkg/models/shared/sourcepolygonstockapiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepolygonstockapiputrequest.go
@@ -7,3 +7,24 @@ type SourcePolygonStockAPIPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePolygonStockAPIPutRequest) GetConfiguration() SourcePolygonStockAPIUpdate {
+ if o == nil {
+ return SourcePolygonStockAPIUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePolygonStockAPIPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePolygonStockAPIPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepolygonstockapiupdate.go b/internal/sdk/pkg/models/shared/sourcepolygonstockapiupdate.go
old mode 100755
new mode 100644
index 1181094ae..31837b682
--- a/internal/sdk/pkg/models/shared/sourcepolygonstockapiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepolygonstockapiupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourcePolygonStockAPIUpdate struct {
@@ -26,3 +27,77 @@ type SourcePolygonStockAPIUpdate struct {
// The size of the time window.
Timespan string `json:"timespan"`
}
+
+func (s SourcePolygonStockAPIUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePolygonStockAPIUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePolygonStockAPIUpdate) GetAdjusted() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Adjusted
+}
+
+func (o *SourcePolygonStockAPIUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourcePolygonStockAPIUpdate) GetEndDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.EndDate
+}
+
+func (o *SourcePolygonStockAPIUpdate) GetLimit() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Limit
+}
+
+func (o *SourcePolygonStockAPIUpdate) GetMultiplier() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.Multiplier
+}
+
+func (o *SourcePolygonStockAPIUpdate) GetSort() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Sort
+}
+
+func (o *SourcePolygonStockAPIUpdate) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
+
+func (o *SourcePolygonStockAPIUpdate) GetStocksTicker() string {
+ if o == nil {
+ return ""
+ }
+ return o.StocksTicker
+}
+
+func (o *SourcePolygonStockAPIUpdate) GetTimespan() string {
+ if o == nil {
+ return ""
+ }
+ return o.Timespan
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepostgres.go b/internal/sdk/pkg/models/shared/sourcepostgres.go
old mode 100755
new mode 100644
index 6b4574ec4..ebcff6905
--- a/internal/sdk/pkg/models/shared/sourcepostgres.go
+++ b/internal/sdk/pkg/models/shared/sourcepostgres.go
@@ -3,83 +3,113 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod string
+type SourcePostgresSchemasReplicationMethodMethod string
const (
- SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod = "Standard"
+ SourcePostgresSchemasReplicationMethodMethodStandard SourcePostgresSchemasReplicationMethodMethod = "Standard"
)
-func (e SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod {
+func (e SourcePostgresSchemasReplicationMethodMethod) ToPointer() *SourcePostgresSchemasReplicationMethodMethod {
return &e
}
-func (e *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresSchemasReplicationMethodMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Standard":
- *e = SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod(v)
+ *e = SourcePostgresSchemasReplicationMethodMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresSchemasReplicationMethodMethod: %v", v)
}
}
-// SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
-type SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor struct {
- Method SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"`
+// SourcePostgresScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
+type SourcePostgresScanChangesWithUserDefinedCursor struct {
+ method SourcePostgresSchemasReplicationMethodMethod `const:"Standard" json:"method"`
}
-type SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod string
+func (s SourcePostgresScanChangesWithUserDefinedCursor) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresScanChangesWithUserDefinedCursor) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgresScanChangesWithUserDefinedCursor) GetMethod() SourcePostgresSchemasReplicationMethodMethod {
+ return SourcePostgresSchemasReplicationMethodMethodStandard
+}
+
+type SourcePostgresSchemasMethod string
const (
- SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethodXmin SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod = "Xmin"
+ SourcePostgresSchemasMethodXmin SourcePostgresSchemasMethod = "Xmin"
)
-func (e SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod) ToPointer() *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod {
+func (e SourcePostgresSchemasMethod) ToPointer() *SourcePostgresSchemasMethod {
return &e
}
-func (e *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Xmin":
- *e = SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod(v)
+ *e = SourcePostgresSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresSchemasMethod: %v", v)
+ }
+}
+
+// SourcePostgresDetectChangesWithXminSystemColumn - Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.
+type SourcePostgresDetectChangesWithXminSystemColumn struct {
+ method SourcePostgresSchemasMethod `const:"Xmin" json:"method"`
+}
+
+func (s SourcePostgresDetectChangesWithXminSystemColumn) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresDetectChangesWithXminSystemColumn) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn - Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.
-type SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn struct {
- Method SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod `json:"method"`
+func (o *SourcePostgresDetectChangesWithXminSystemColumn) GetMethod() SourcePostgresSchemasMethod {
+ return SourcePostgresSchemasMethodXmin
}
-// SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour string
+// SourcePostgresLSNCommitBehaviour - Determines when Airbyte should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
+type SourcePostgresLSNCommitBehaviour string
const (
- SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviourWhileReadingData SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour = "While reading Data"
- SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviourAfterLoadingDataInTheDestination SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour = "After loading Data in the destination"
+ SourcePostgresLSNCommitBehaviourWhileReadingData SourcePostgresLSNCommitBehaviour = "While reading Data"
+ SourcePostgresLSNCommitBehaviourAfterLoadingDataInTheDestination SourcePostgresLSNCommitBehaviour = "After loading Data in the destination"
)
-func (e SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) ToPointer() *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour {
+func (e SourcePostgresLSNCommitBehaviour) ToPointer() *SourcePostgresLSNCommitBehaviour {
return &e
}
-func (e *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresLSNCommitBehaviour) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -88,201 +118,207 @@ func (e *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBeha
case "While reading Data":
fallthrough
case "After loading Data in the destination":
- *e = SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour(v)
+ *e = SourcePostgresLSNCommitBehaviour(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresLSNCommitBehaviour: %v", v)
}
}
-type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod string
+type SourcePostgresMethod string
const (
- SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethodCdc SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod = "CDC"
+ SourcePostgresMethodCdc SourcePostgresMethod = "CDC"
)
-func (e SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod) ToPointer() *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod {
+func (e SourcePostgresMethod) ToPointer() *SourcePostgresMethod {
return &e
}
-func (e *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CDC":
- *e = SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod(v)
+ *e = SourcePostgresMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresMethod: %v", v)
}
}
-// SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin - A logical decoding plugin installed on the PostgreSQL server.
-type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin string
+// SourcePostgresPlugin - A logical decoding plugin installed on the PostgreSQL server.
+type SourcePostgresPlugin string
const (
- SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPluginPgoutput SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin = "pgoutput"
+ SourcePostgresPluginPgoutput SourcePostgresPlugin = "pgoutput"
)
-func (e SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) ToPointer() *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin {
+func (e SourcePostgresPlugin) ToPointer() *SourcePostgresPlugin {
return &e
}
-func (e *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresPlugin) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pgoutput":
- *e = SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin(v)
+ *e = SourcePostgresPlugin(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresPlugin: %v", v)
}
}
-// SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.
-type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC struct {
+// SourcePostgresReadChangesUsingWriteAheadLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.
+type SourcePostgresReadChangesUsingWriteAheadLogCDC struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
- InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"`
- // Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
- LsnCommitBehaviour *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour `json:"lsn_commit_behaviour,omitempty"`
- Method SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod `json:"method"`
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
+ // Determines when Airbyte should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
+ LsnCommitBehaviour *SourcePostgresLSNCommitBehaviour `default:"After loading Data in the destination" json:"lsn_commit_behaviour"`
+ method SourcePostgresMethod `const:"CDC" json:"method"`
// A logical decoding plugin installed on the PostgreSQL server.
- Plugin *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin `json:"plugin,omitempty"`
+ Plugin *SourcePostgresPlugin `default:"pgoutput" json:"plugin"`
// A Postgres publication used for consuming changes. Read about publications and replication identities.
Publication string `json:"publication"`
// The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
- QueueSize *int64 `json:"queue_size,omitempty"`
+ QueueSize *int64 `default:"10000" json:"queue_size"`
// A plugin logical replication slot. Read about replication slots.
ReplicationSlot string `json:"replication_slot"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC
-func (c *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC{}
+func (s SourcePostgresReadChangesUsingWriteAheadLogCDC) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourcePostgresReadChangesUsingWriteAheadLogCDC) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC(data)
+ return nil
+}
- additionalFields := make(map[string]interface{})
+func (o *SourcePostgresReadChangesUsingWriteAheadLogCDC) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourcePostgresReadChangesUsingWriteAheadLogCDC) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "initial_waiting_seconds")
- delete(additionalFields, "lsn_commit_behaviour")
- delete(additionalFields, "method")
- delete(additionalFields, "plugin")
- delete(additionalFields, "publication")
- delete(additionalFields, "queue_size")
- delete(additionalFields, "replication_slot")
+ return o.InitialWaitingSeconds
+}
- c.AdditionalProperties = additionalFields
+func (o *SourcePostgresReadChangesUsingWriteAheadLogCDC) GetLsnCommitBehaviour() *SourcePostgresLSNCommitBehaviour {
+ if o == nil {
+ return nil
+ }
+ return o.LsnCommitBehaviour
+}
- return nil
+func (o *SourcePostgresReadChangesUsingWriteAheadLogCDC) GetMethod() SourcePostgresMethod {
+ return SourcePostgresMethodCdc
}
-func (c SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC(c))
- if err != nil {
- return nil, err
+func (o *SourcePostgresReadChangesUsingWriteAheadLogCDC) GetPlugin() *SourcePostgresPlugin {
+ if o == nil {
+ return nil
}
+ return o.Plugin
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresReadChangesUsingWriteAheadLogCDC) GetPublication() string {
+ if o == nil {
+ return ""
}
+ return o.Publication
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourcePostgresReadChangesUsingWriteAheadLogCDC) GetQueueSize() *int64 {
+ if o == nil {
+ return nil
}
+ return o.QueueSize
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresReadChangesUsingWriteAheadLogCDC) GetReplicationSlot() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.ReplicationSlot
}
type SourcePostgresUpdateMethodType string
const (
- SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateMethodType = "source-postgres_Update Method_Read Changes using Write-Ahead Log (CDC)"
- SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodDetectChangesWithXminSystemColumn SourcePostgresUpdateMethodType = "source-postgres_Update Method_Detect Changes with Xmin System Column"
- SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodScanChangesWithUserDefinedCursor SourcePostgresUpdateMethodType = "source-postgres_Update Method_Scan Changes with User Defined Cursor"
+ SourcePostgresUpdateMethodTypeSourcePostgresReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateMethodType = "source-postgres_Read Changes using Write-Ahead Log (CDC)"
+ SourcePostgresUpdateMethodTypeSourcePostgresDetectChangesWithXminSystemColumn SourcePostgresUpdateMethodType = "source-postgres_Detect Changes with Xmin System Column"
+ SourcePostgresUpdateMethodTypeSourcePostgresScanChangesWithUserDefinedCursor SourcePostgresUpdateMethodType = "source-postgres_Scan Changes with User Defined Cursor"
)
type SourcePostgresUpdateMethod struct {
- SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC
- SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn
- SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor
+ SourcePostgresReadChangesUsingWriteAheadLogCDC *SourcePostgresReadChangesUsingWriteAheadLogCDC
+ SourcePostgresDetectChangesWithXminSystemColumn *SourcePostgresDetectChangesWithXminSystemColumn
+ SourcePostgresScanChangesWithUserDefinedCursor *SourcePostgresScanChangesWithUserDefinedCursor
Type SourcePostgresUpdateMethodType
}
-func CreateSourcePostgresUpdateMethodSourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC(sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC) SourcePostgresUpdateMethod {
- typ := SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC
+func CreateSourcePostgresUpdateMethodSourcePostgresReadChangesUsingWriteAheadLogCDC(sourcePostgresReadChangesUsingWriteAheadLogCDC SourcePostgresReadChangesUsingWriteAheadLogCDC) SourcePostgresUpdateMethod {
+ typ := SourcePostgresUpdateMethodTypeSourcePostgresReadChangesUsingWriteAheadLogCDC
return SourcePostgresUpdateMethod{
- SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC: &sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC,
+ SourcePostgresReadChangesUsingWriteAheadLogCDC: &sourcePostgresReadChangesUsingWriteAheadLogCDC,
Type: typ,
}
}
-func CreateSourcePostgresUpdateMethodSourcePostgresUpdateMethodDetectChangesWithXminSystemColumn(sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn) SourcePostgresUpdateMethod {
- typ := SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodDetectChangesWithXminSystemColumn
+func CreateSourcePostgresUpdateMethodSourcePostgresDetectChangesWithXminSystemColumn(sourcePostgresDetectChangesWithXminSystemColumn SourcePostgresDetectChangesWithXminSystemColumn) SourcePostgresUpdateMethod {
+ typ := SourcePostgresUpdateMethodTypeSourcePostgresDetectChangesWithXminSystemColumn
return SourcePostgresUpdateMethod{
- SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn: &sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn,
+ SourcePostgresDetectChangesWithXminSystemColumn: &sourcePostgresDetectChangesWithXminSystemColumn,
Type: typ,
}
}
-func CreateSourcePostgresUpdateMethodSourcePostgresUpdateMethodScanChangesWithUserDefinedCursor(sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor) SourcePostgresUpdateMethod {
- typ := SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodScanChangesWithUserDefinedCursor
+func CreateSourcePostgresUpdateMethodSourcePostgresScanChangesWithUserDefinedCursor(sourcePostgresScanChangesWithUserDefinedCursor SourcePostgresScanChangesWithUserDefinedCursor) SourcePostgresUpdateMethod {
+ typ := SourcePostgresUpdateMethodTypeSourcePostgresScanChangesWithUserDefinedCursor
return SourcePostgresUpdateMethod{
- SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor: &sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor,
+ SourcePostgresScanChangesWithUserDefinedCursor: &sourcePostgresScanChangesWithUserDefinedCursor,
Type: typ,
}
}
func (u *SourcePostgresUpdateMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn := new(SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn); err == nil {
- u.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn = sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn
- u.Type = SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodDetectChangesWithXminSystemColumn
+
+ sourcePostgresDetectChangesWithXminSystemColumn := new(SourcePostgresDetectChangesWithXminSystemColumn)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresDetectChangesWithXminSystemColumn, "", true, true); err == nil {
+ u.SourcePostgresDetectChangesWithXminSystemColumn = sourcePostgresDetectChangesWithXminSystemColumn
+ u.Type = SourcePostgresUpdateMethodTypeSourcePostgresDetectChangesWithXminSystemColumn
return nil
}
- sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor := new(SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor); err == nil {
- u.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor = sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor
- u.Type = SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodScanChangesWithUserDefinedCursor
+ sourcePostgresScanChangesWithUserDefinedCursor := new(SourcePostgresScanChangesWithUserDefinedCursor)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresScanChangesWithUserDefinedCursor, "", true, true); err == nil {
+ u.SourcePostgresScanChangesWithUserDefinedCursor = sourcePostgresScanChangesWithUserDefinedCursor
+ u.Type = SourcePostgresUpdateMethodTypeSourcePostgresScanChangesWithUserDefinedCursor
return nil
}
- sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC := new(SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC); err == nil {
- u.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC = sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC
- u.Type = SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC
+ sourcePostgresReadChangesUsingWriteAheadLogCDC := new(SourcePostgresReadChangesUsingWriteAheadLogCDC)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresReadChangesUsingWriteAheadLogCDC, "", true, true); err == nil {
+ u.SourcePostgresReadChangesUsingWriteAheadLogCDC = sourcePostgresReadChangesUsingWriteAheadLogCDC
+ u.Type = SourcePostgresUpdateMethodTypeSourcePostgresReadChangesUsingWriteAheadLogCDC
return nil
}
@@ -290,19 +326,19 @@ func (u *SourcePostgresUpdateMethod) UnmarshalJSON(data []byte) error {
}
func (u SourcePostgresUpdateMethod) MarshalJSON() ([]byte, error) {
- if u.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn != nil {
- return json.Marshal(u.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn)
+ if u.SourcePostgresReadChangesUsingWriteAheadLogCDC != nil {
+ return utils.MarshalJSON(u.SourcePostgresReadChangesUsingWriteAheadLogCDC, "", true)
}
- if u.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor != nil {
- return json.Marshal(u.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor)
+ if u.SourcePostgresDetectChangesWithXminSystemColumn != nil {
+ return utils.MarshalJSON(u.SourcePostgresDetectChangesWithXminSystemColumn, "", true)
}
- if u.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC != nil {
- return json.Marshal(u.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC)
+ if u.SourcePostgresScanChangesWithUserDefinedCursor != nil {
+ return utils.MarshalJSON(u.SourcePostgresScanChangesWithUserDefinedCursor, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourcePostgresPostgres string
@@ -329,32 +365,33 @@ func (e *SourcePostgresPostgres) UnmarshalJSON(data []byte) error {
}
}
-type SourcePostgresSSLModesVerifyFullMode string
+type SourcePostgresSchemasSSLModeSSLModes6Mode string
const (
- SourcePostgresSSLModesVerifyFullModeVerifyFull SourcePostgresSSLModesVerifyFullMode = "verify-full"
+ SourcePostgresSchemasSSLModeSSLModes6ModeVerifyFull SourcePostgresSchemasSSLModeSSLModes6Mode = "verify-full"
)
-func (e SourcePostgresSSLModesVerifyFullMode) ToPointer() *SourcePostgresSSLModesVerifyFullMode {
+func (e SourcePostgresSchemasSSLModeSSLModes6Mode) ToPointer() *SourcePostgresSchemasSSLModeSSLModes6Mode {
return &e
}
-func (e *SourcePostgresSSLModesVerifyFullMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresSchemasSSLModeSSLModes6Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-full":
- *e = SourcePostgresSSLModesVerifyFullMode(v)
+ *e = SourcePostgresSchemasSSLModeSSLModes6Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresSSLModesVerifyFullMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresSchemasSSLModeSSLModes6Mode: %v", v)
}
}
-// SourcePostgresSSLModesVerifyFull - This is the most secure mode. Always require encryption and verifies the identity of the source database server.
-type SourcePostgresSSLModesVerifyFull struct {
+// SourcePostgresVerifyFull - This is the most secure mode. Always require encryption and verifies the identity of the source database server.
+type SourcePostgresVerifyFull struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -362,86 +399,87 @@ type SourcePostgresSSLModesVerifyFull struct {
// Client key
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourcePostgresSSLModesVerifyFullMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourcePostgresSchemasSSLModeSSLModes6Mode `const:"verify-full" json:"mode"`
}
-type _SourcePostgresSSLModesVerifyFull SourcePostgresSSLModesVerifyFull
-func (c *SourcePostgresSSLModesVerifyFull) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresSSLModesVerifyFull{}
+func (s SourcePostgresVerifyFull) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourcePostgresVerifyFull) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourcePostgresSSLModesVerifyFull(data)
-
- additionalFields := make(map[string]interface{})
+ return nil
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourcePostgresVerifyFull) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "ca_certificate")
- delete(additionalFields, "client_certificate")
- delete(additionalFields, "client_key")
- delete(additionalFields, "client_key_password")
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
- return nil
+ return o.AdditionalProperties
}
-func (c SourcePostgresSSLModesVerifyFull) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresSSLModesVerifyFull(c))
- if err != nil {
- return nil, err
+func (o *SourcePostgresVerifyFull) GetCaCertificate() string {
+ if o == nil {
+ return ""
}
+ return o.CaCertificate
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresVerifyFull) GetClientCertificate() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientCertificate
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourcePostgresVerifyFull) GetClientKey() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKey
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresVerifyFull) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKeyPassword
+}
- return json.Marshal(out)
+func (o *SourcePostgresVerifyFull) GetMode() SourcePostgresSchemasSSLModeSSLModes6Mode {
+ return SourcePostgresSchemasSSLModeSSLModes6ModeVerifyFull
}
-type SourcePostgresSSLModesVerifyCaMode string
+type SourcePostgresSchemasSSLModeSSLModes5Mode string
const (
- SourcePostgresSSLModesVerifyCaModeVerifyCa SourcePostgresSSLModesVerifyCaMode = "verify-ca"
+ SourcePostgresSchemasSSLModeSSLModes5ModeVerifyCa SourcePostgresSchemasSSLModeSSLModes5Mode = "verify-ca"
)
-func (e SourcePostgresSSLModesVerifyCaMode) ToPointer() *SourcePostgresSSLModesVerifyCaMode {
+func (e SourcePostgresSchemasSSLModeSSLModes5Mode) ToPointer() *SourcePostgresSchemasSSLModeSSLModes5Mode {
return &e
}
-func (e *SourcePostgresSSLModesVerifyCaMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresSchemasSSLModeSSLModes5Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-ca":
- *e = SourcePostgresSSLModesVerifyCaMode(v)
+ *e = SourcePostgresSchemasSSLModeSSLModes5Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresSSLModesVerifyCaMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresSchemasSSLModeSSLModes5Mode: %v", v)
}
}
-// SourcePostgresSSLModesVerifyCa - Always require encryption and verifies that the source database server has a valid SSL certificate.
-type SourcePostgresSSLModesVerifyCa struct {
+// SourcePostgresVerifyCa - Always require encryption and verifies that the source database server has a valid SSL certificate.
+type SourcePostgresVerifyCa struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -449,490 +487,385 @@ type SourcePostgresSSLModesVerifyCa struct {
// Client key
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourcePostgresSSLModesVerifyCaMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourcePostgresSchemasSSLModeSSLModes5Mode `const:"verify-ca" json:"mode"`
}
-type _SourcePostgresSSLModesVerifyCa SourcePostgresSSLModesVerifyCa
-func (c *SourcePostgresSSLModesVerifyCa) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresSSLModesVerifyCa{}
+func (s SourcePostgresVerifyCa) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourcePostgresVerifyCa) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourcePostgresSSLModesVerifyCa(data)
-
- additionalFields := make(map[string]interface{})
+ return nil
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourcePostgresVerifyCa) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "ca_certificate")
- delete(additionalFields, "client_certificate")
- delete(additionalFields, "client_key")
- delete(additionalFields, "client_key_password")
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
- return nil
+ return o.AdditionalProperties
}
-func (c SourcePostgresSSLModesVerifyCa) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresSSLModesVerifyCa(c))
- if err != nil {
- return nil, err
+func (o *SourcePostgresVerifyCa) GetCaCertificate() string {
+ if o == nil {
+ return ""
}
+ return o.CaCertificate
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresVerifyCa) GetClientCertificate() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientCertificate
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourcePostgresVerifyCa) GetClientKey() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKey
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresVerifyCa) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKeyPassword
+}
- return json.Marshal(out)
+func (o *SourcePostgresVerifyCa) GetMode() SourcePostgresSchemasSSLModeSSLModes5Mode {
+ return SourcePostgresSchemasSSLModeSSLModes5ModeVerifyCa
}
-type SourcePostgresSSLModesRequireMode string
+type SourcePostgresSchemasSSLModeSSLModesMode string
const (
- SourcePostgresSSLModesRequireModeRequire SourcePostgresSSLModesRequireMode = "require"
+ SourcePostgresSchemasSSLModeSSLModesModeRequire SourcePostgresSchemasSSLModeSSLModesMode = "require"
)
-func (e SourcePostgresSSLModesRequireMode) ToPointer() *SourcePostgresSSLModesRequireMode {
+func (e SourcePostgresSchemasSSLModeSSLModesMode) ToPointer() *SourcePostgresSchemasSSLModeSSLModesMode {
return &e
}
-func (e *SourcePostgresSSLModesRequireMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresSchemasSSLModeSSLModesMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "require":
- *e = SourcePostgresSSLModesRequireMode(v)
+ *e = SourcePostgresSchemasSSLModeSSLModesMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresSSLModesRequireMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresSchemasSSLModeSSLModesMode: %v", v)
}
}
-// SourcePostgresSSLModesRequire - Always require encryption. If the source database server does not support encryption, connection will fail.
-type SourcePostgresSSLModesRequire struct {
- Mode SourcePostgresSSLModesRequireMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourcePostgresRequire - Always require encryption. If the source database server does not support encryption, connection will fail.
+type SourcePostgresRequire struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourcePostgresSchemasSSLModeSSLModesMode `const:"require" json:"mode"`
}
-type _SourcePostgresSSLModesRequire SourcePostgresSSLModesRequire
-
-func (c *SourcePostgresSSLModesRequire) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresSSLModesRequire{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourcePostgresSSLModesRequire(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourcePostgresRequire) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourcePostgresRequire) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourcePostgresSSLModesRequire) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresSSLModesRequire(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresRequire) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourcePostgresRequire) GetMode() SourcePostgresSchemasSSLModeSSLModesMode {
+ return SourcePostgresSchemasSSLModeSSLModesModeRequire
}
-type SourcePostgresSSLModesPreferMode string
+type SourcePostgresSchemasSslModeMode string
const (
- SourcePostgresSSLModesPreferModePrefer SourcePostgresSSLModesPreferMode = "prefer"
+ SourcePostgresSchemasSslModeModePrefer SourcePostgresSchemasSslModeMode = "prefer"
)
-func (e SourcePostgresSSLModesPreferMode) ToPointer() *SourcePostgresSSLModesPreferMode {
+func (e SourcePostgresSchemasSslModeMode) ToPointer() *SourcePostgresSchemasSslModeMode {
return &e
}
-func (e *SourcePostgresSSLModesPreferMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresSchemasSslModeMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "prefer":
- *e = SourcePostgresSSLModesPreferMode(v)
+ *e = SourcePostgresSchemasSslModeMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresSSLModesPreferMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresSchemasSslModeMode: %v", v)
}
}
-// SourcePostgresSSLModesPrefer - Allows unencrypted connection only if the source database does not support encryption.
-type SourcePostgresSSLModesPrefer struct {
- Mode SourcePostgresSSLModesPreferMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourcePostgresPrefer - Allows unencrypted connection only if the source database does not support encryption.
+type SourcePostgresPrefer struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourcePostgresSchemasSslModeMode `const:"prefer" json:"mode"`
}
-type _SourcePostgresSSLModesPrefer SourcePostgresSSLModesPrefer
-func (c *SourcePostgresSSLModesPrefer) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresSSLModesPrefer{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourcePostgresSSLModesPrefer(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourcePostgresPrefer) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourcePostgresPrefer) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourcePostgresSSLModesPrefer) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresSSLModesPrefer(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresPrefer) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourcePostgresPrefer) GetMode() SourcePostgresSchemasSslModeMode {
+ return SourcePostgresSchemasSslModeModePrefer
}
-type SourcePostgresSSLModesAllowMode string
+type SourcePostgresSchemasMode string
const (
- SourcePostgresSSLModesAllowModeAllow SourcePostgresSSLModesAllowMode = "allow"
+ SourcePostgresSchemasModeAllow SourcePostgresSchemasMode = "allow"
)
-func (e SourcePostgresSSLModesAllowMode) ToPointer() *SourcePostgresSSLModesAllowMode {
+func (e SourcePostgresSchemasMode) ToPointer() *SourcePostgresSchemasMode {
return &e
}
-func (e *SourcePostgresSSLModesAllowMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "allow":
- *e = SourcePostgresSSLModesAllowMode(v)
+ *e = SourcePostgresSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresSSLModesAllowMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresSchemasMode: %v", v)
}
}
-// SourcePostgresSSLModesAllow - Enables encryption only when required by the source database.
-type SourcePostgresSSLModesAllow struct {
- Mode SourcePostgresSSLModesAllowMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourcePostgresAllow - Enables encryption only when required by the source database.
+type SourcePostgresAllow struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourcePostgresSchemasMode `const:"allow" json:"mode"`
}
-type _SourcePostgresSSLModesAllow SourcePostgresSSLModesAllow
-
-func (c *SourcePostgresSSLModesAllow) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresSSLModesAllow{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourcePostgresSSLModesAllow(data)
- additionalFields := make(map[string]interface{})
+func (s SourcePostgresAllow) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourcePostgresAllow) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourcePostgresSSLModesAllow) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresSSLModesAllow(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresAllow) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourcePostgresAllow) GetMode() SourcePostgresSchemasMode {
+ return SourcePostgresSchemasModeAllow
}
-type SourcePostgresSSLModesDisableMode string
+type SourcePostgresMode string
const (
- SourcePostgresSSLModesDisableModeDisable SourcePostgresSSLModesDisableMode = "disable"
+ SourcePostgresModeDisable SourcePostgresMode = "disable"
)
-func (e SourcePostgresSSLModesDisableMode) ToPointer() *SourcePostgresSSLModesDisableMode {
+func (e SourcePostgresMode) ToPointer() *SourcePostgresMode {
return &e
}
-func (e *SourcePostgresSSLModesDisableMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "disable":
- *e = SourcePostgresSSLModesDisableMode(v)
+ *e = SourcePostgresMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresSSLModesDisableMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresMode: %v", v)
}
}
-// SourcePostgresSSLModesDisable - Disables encryption of communication between Airbyte and source database.
-type SourcePostgresSSLModesDisable struct {
- Mode SourcePostgresSSLModesDisableMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourcePostgresDisable - Disables encryption of communication between Airbyte and source database.
+type SourcePostgresDisable struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourcePostgresMode `const:"disable" json:"mode"`
}
-type _SourcePostgresSSLModesDisable SourcePostgresSSLModesDisable
-
-func (c *SourcePostgresSSLModesDisable) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresSSLModesDisable{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourcePostgresSSLModesDisable(data)
- additionalFields := make(map[string]interface{})
+func (s SourcePostgresDisable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourcePostgresDisable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourcePostgresSSLModesDisable) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresSSLModesDisable(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresDisable) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourcePostgresDisable) GetMode() SourcePostgresMode {
+ return SourcePostgresModeDisable
}
type SourcePostgresSSLModesType string
const (
- SourcePostgresSSLModesTypeSourcePostgresSSLModesDisable SourcePostgresSSLModesType = "source-postgres_SSL Modes_disable"
- SourcePostgresSSLModesTypeSourcePostgresSSLModesAllow SourcePostgresSSLModesType = "source-postgres_SSL Modes_allow"
- SourcePostgresSSLModesTypeSourcePostgresSSLModesPrefer SourcePostgresSSLModesType = "source-postgres_SSL Modes_prefer"
- SourcePostgresSSLModesTypeSourcePostgresSSLModesRequire SourcePostgresSSLModesType = "source-postgres_SSL Modes_require"
- SourcePostgresSSLModesTypeSourcePostgresSSLModesVerifyCa SourcePostgresSSLModesType = "source-postgres_SSL Modes_verify-ca"
- SourcePostgresSSLModesTypeSourcePostgresSSLModesVerifyFull SourcePostgresSSLModesType = "source-postgres_SSL Modes_verify-full"
+ SourcePostgresSSLModesTypeSourcePostgresDisable SourcePostgresSSLModesType = "source-postgres_disable"
+ SourcePostgresSSLModesTypeSourcePostgresAllow SourcePostgresSSLModesType = "source-postgres_allow"
+ SourcePostgresSSLModesTypeSourcePostgresPrefer SourcePostgresSSLModesType = "source-postgres_prefer"
+ SourcePostgresSSLModesTypeSourcePostgresRequire SourcePostgresSSLModesType = "source-postgres_require"
+ SourcePostgresSSLModesTypeSourcePostgresVerifyCa SourcePostgresSSLModesType = "source-postgres_verify-ca"
+ SourcePostgresSSLModesTypeSourcePostgresVerifyFull SourcePostgresSSLModesType = "source-postgres_verify-full"
)
type SourcePostgresSSLModes struct {
- SourcePostgresSSLModesDisable *SourcePostgresSSLModesDisable
- SourcePostgresSSLModesAllow *SourcePostgresSSLModesAllow
- SourcePostgresSSLModesPrefer *SourcePostgresSSLModesPrefer
- SourcePostgresSSLModesRequire *SourcePostgresSSLModesRequire
- SourcePostgresSSLModesVerifyCa *SourcePostgresSSLModesVerifyCa
- SourcePostgresSSLModesVerifyFull *SourcePostgresSSLModesVerifyFull
+ SourcePostgresDisable *SourcePostgresDisable
+ SourcePostgresAllow *SourcePostgresAllow
+ SourcePostgresPrefer *SourcePostgresPrefer
+ SourcePostgresRequire *SourcePostgresRequire
+ SourcePostgresVerifyCa *SourcePostgresVerifyCa
+ SourcePostgresVerifyFull *SourcePostgresVerifyFull
Type SourcePostgresSSLModesType
}
-func CreateSourcePostgresSSLModesSourcePostgresSSLModesDisable(sourcePostgresSSLModesDisable SourcePostgresSSLModesDisable) SourcePostgresSSLModes {
- typ := SourcePostgresSSLModesTypeSourcePostgresSSLModesDisable
+func CreateSourcePostgresSSLModesSourcePostgresDisable(sourcePostgresDisable SourcePostgresDisable) SourcePostgresSSLModes {
+ typ := SourcePostgresSSLModesTypeSourcePostgresDisable
return SourcePostgresSSLModes{
- SourcePostgresSSLModesDisable: &sourcePostgresSSLModesDisable,
- Type: typ,
+ SourcePostgresDisable: &sourcePostgresDisable,
+ Type: typ,
}
}
-func CreateSourcePostgresSSLModesSourcePostgresSSLModesAllow(sourcePostgresSSLModesAllow SourcePostgresSSLModesAllow) SourcePostgresSSLModes {
- typ := SourcePostgresSSLModesTypeSourcePostgresSSLModesAllow
+func CreateSourcePostgresSSLModesSourcePostgresAllow(sourcePostgresAllow SourcePostgresAllow) SourcePostgresSSLModes {
+ typ := SourcePostgresSSLModesTypeSourcePostgresAllow
return SourcePostgresSSLModes{
- SourcePostgresSSLModesAllow: &sourcePostgresSSLModesAllow,
- Type: typ,
+ SourcePostgresAllow: &sourcePostgresAllow,
+ Type: typ,
}
}
-func CreateSourcePostgresSSLModesSourcePostgresSSLModesPrefer(sourcePostgresSSLModesPrefer SourcePostgresSSLModesPrefer) SourcePostgresSSLModes {
- typ := SourcePostgresSSLModesTypeSourcePostgresSSLModesPrefer
+func CreateSourcePostgresSSLModesSourcePostgresPrefer(sourcePostgresPrefer SourcePostgresPrefer) SourcePostgresSSLModes {
+ typ := SourcePostgresSSLModesTypeSourcePostgresPrefer
return SourcePostgresSSLModes{
- SourcePostgresSSLModesPrefer: &sourcePostgresSSLModesPrefer,
- Type: typ,
+ SourcePostgresPrefer: &sourcePostgresPrefer,
+ Type: typ,
}
}
-func CreateSourcePostgresSSLModesSourcePostgresSSLModesRequire(sourcePostgresSSLModesRequire SourcePostgresSSLModesRequire) SourcePostgresSSLModes {
- typ := SourcePostgresSSLModesTypeSourcePostgresSSLModesRequire
+func CreateSourcePostgresSSLModesSourcePostgresRequire(sourcePostgresRequire SourcePostgresRequire) SourcePostgresSSLModes {
+ typ := SourcePostgresSSLModesTypeSourcePostgresRequire
return SourcePostgresSSLModes{
- SourcePostgresSSLModesRequire: &sourcePostgresSSLModesRequire,
- Type: typ,
+ SourcePostgresRequire: &sourcePostgresRequire,
+ Type: typ,
}
}
-func CreateSourcePostgresSSLModesSourcePostgresSSLModesVerifyCa(sourcePostgresSSLModesVerifyCa SourcePostgresSSLModesVerifyCa) SourcePostgresSSLModes {
- typ := SourcePostgresSSLModesTypeSourcePostgresSSLModesVerifyCa
+func CreateSourcePostgresSSLModesSourcePostgresVerifyCa(sourcePostgresVerifyCa SourcePostgresVerifyCa) SourcePostgresSSLModes {
+ typ := SourcePostgresSSLModesTypeSourcePostgresVerifyCa
return SourcePostgresSSLModes{
- SourcePostgresSSLModesVerifyCa: &sourcePostgresSSLModesVerifyCa,
- Type: typ,
+ SourcePostgresVerifyCa: &sourcePostgresVerifyCa,
+ Type: typ,
}
}
-func CreateSourcePostgresSSLModesSourcePostgresSSLModesVerifyFull(sourcePostgresSSLModesVerifyFull SourcePostgresSSLModesVerifyFull) SourcePostgresSSLModes {
- typ := SourcePostgresSSLModesTypeSourcePostgresSSLModesVerifyFull
+func CreateSourcePostgresSSLModesSourcePostgresVerifyFull(sourcePostgresVerifyFull SourcePostgresVerifyFull) SourcePostgresSSLModes {
+ typ := SourcePostgresSSLModesTypeSourcePostgresVerifyFull
return SourcePostgresSSLModes{
- SourcePostgresSSLModesVerifyFull: &sourcePostgresSSLModesVerifyFull,
- Type: typ,
+ SourcePostgresVerifyFull: &sourcePostgresVerifyFull,
+ Type: typ,
}
}
func (u *SourcePostgresSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourcePostgresSSLModesDisable := new(SourcePostgresSSLModesDisable)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresSSLModesDisable); err == nil {
- u.SourcePostgresSSLModesDisable = sourcePostgresSSLModesDisable
- u.Type = SourcePostgresSSLModesTypeSourcePostgresSSLModesDisable
+
+ sourcePostgresDisable := new(SourcePostgresDisable)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresDisable, "", true, true); err == nil {
+ u.SourcePostgresDisable = sourcePostgresDisable
+ u.Type = SourcePostgresSSLModesTypeSourcePostgresDisable
return nil
}
- sourcePostgresSSLModesAllow := new(SourcePostgresSSLModesAllow)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresSSLModesAllow); err == nil {
- u.SourcePostgresSSLModesAllow = sourcePostgresSSLModesAllow
- u.Type = SourcePostgresSSLModesTypeSourcePostgresSSLModesAllow
+ sourcePostgresAllow := new(SourcePostgresAllow)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresAllow, "", true, true); err == nil {
+ u.SourcePostgresAllow = sourcePostgresAllow
+ u.Type = SourcePostgresSSLModesTypeSourcePostgresAllow
return nil
}
- sourcePostgresSSLModesPrefer := new(SourcePostgresSSLModesPrefer)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresSSLModesPrefer); err == nil {
- u.SourcePostgresSSLModesPrefer = sourcePostgresSSLModesPrefer
- u.Type = SourcePostgresSSLModesTypeSourcePostgresSSLModesPrefer
+ sourcePostgresPrefer := new(SourcePostgresPrefer)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresPrefer, "", true, true); err == nil {
+ u.SourcePostgresPrefer = sourcePostgresPrefer
+ u.Type = SourcePostgresSSLModesTypeSourcePostgresPrefer
return nil
}
- sourcePostgresSSLModesRequire := new(SourcePostgresSSLModesRequire)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresSSLModesRequire); err == nil {
- u.SourcePostgresSSLModesRequire = sourcePostgresSSLModesRequire
- u.Type = SourcePostgresSSLModesTypeSourcePostgresSSLModesRequire
+ sourcePostgresRequire := new(SourcePostgresRequire)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresRequire, "", true, true); err == nil {
+ u.SourcePostgresRequire = sourcePostgresRequire
+ u.Type = SourcePostgresSSLModesTypeSourcePostgresRequire
return nil
}
- sourcePostgresSSLModesVerifyCa := new(SourcePostgresSSLModesVerifyCa)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresSSLModesVerifyCa); err == nil {
- u.SourcePostgresSSLModesVerifyCa = sourcePostgresSSLModesVerifyCa
- u.Type = SourcePostgresSSLModesTypeSourcePostgresSSLModesVerifyCa
+ sourcePostgresVerifyCa := new(SourcePostgresVerifyCa)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresVerifyCa, "", true, true); err == nil {
+ u.SourcePostgresVerifyCa = sourcePostgresVerifyCa
+ u.Type = SourcePostgresSSLModesTypeSourcePostgresVerifyCa
return nil
}
- sourcePostgresSSLModesVerifyFull := new(SourcePostgresSSLModesVerifyFull)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresSSLModesVerifyFull); err == nil {
- u.SourcePostgresSSLModesVerifyFull = sourcePostgresSSLModesVerifyFull
- u.Type = SourcePostgresSSLModesTypeSourcePostgresSSLModesVerifyFull
+ sourcePostgresVerifyFull := new(SourcePostgresVerifyFull)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresVerifyFull, "", true, true); err == nil {
+ u.SourcePostgresVerifyFull = sourcePostgresVerifyFull
+ u.Type = SourcePostgresSSLModesTypeSourcePostgresVerifyFull
return nil
}
@@ -940,212 +873,306 @@ func (u *SourcePostgresSSLModes) UnmarshalJSON(data []byte) error {
}
func (u SourcePostgresSSLModes) MarshalJSON() ([]byte, error) {
- if u.SourcePostgresSSLModesDisable != nil {
- return json.Marshal(u.SourcePostgresSSLModesDisable)
+ if u.SourcePostgresDisable != nil {
+ return utils.MarshalJSON(u.SourcePostgresDisable, "", true)
}
- if u.SourcePostgresSSLModesAllow != nil {
- return json.Marshal(u.SourcePostgresSSLModesAllow)
+ if u.SourcePostgresAllow != nil {
+ return utils.MarshalJSON(u.SourcePostgresAllow, "", true)
}
- if u.SourcePostgresSSLModesPrefer != nil {
- return json.Marshal(u.SourcePostgresSSLModesPrefer)
+ if u.SourcePostgresPrefer != nil {
+ return utils.MarshalJSON(u.SourcePostgresPrefer, "", true)
}
- if u.SourcePostgresSSLModesRequire != nil {
- return json.Marshal(u.SourcePostgresSSLModesRequire)
+ if u.SourcePostgresRequire != nil {
+ return utils.MarshalJSON(u.SourcePostgresRequire, "", true)
}
- if u.SourcePostgresSSLModesVerifyCa != nil {
- return json.Marshal(u.SourcePostgresSSLModesVerifyCa)
+ if u.SourcePostgresVerifyCa != nil {
+ return utils.MarshalJSON(u.SourcePostgresVerifyCa, "", true)
}
- if u.SourcePostgresSSLModesVerifyFull != nil {
- return json.Marshal(u.SourcePostgresSSLModesVerifyFull)
+ if u.SourcePostgresVerifyFull != nil {
+ return utils.MarshalJSON(u.SourcePostgresVerifyFull, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourcePostgresSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourcePostgresSchemasTunnelMethodTunnelMethod string
const (
- SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourcePostgresSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourcePostgresSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourcePostgresSchemasTunnelMethodTunnelMethod) ToPointer() *SourcePostgresSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourcePostgresSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourcePostgresSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourcePostgresSSHTunnelMethodPasswordAuthentication struct {
+// SourcePostgresPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourcePostgresPasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourcePostgresSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourcePostgresSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourcePostgresPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgresPasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourcePostgresPasswordAuthentication) GetTunnelMethod() SourcePostgresSchemasTunnelMethodTunnelMethod {
+ return SourcePostgresSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourcePostgresPasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourcePostgresPasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourcePostgresPasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourcePostgresSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourcePostgresSchemasTunnelMethod string
const (
- SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourcePostgresSchemasTunnelMethodSSHKeyAuth SourcePostgresSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourcePostgresSchemasTunnelMethod) ToPointer() *SourcePostgresSchemasTunnelMethod {
return &e
}
-func (e *SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourcePostgresSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresSchemasTunnelMethod: %v", v)
}
}
-// SourcePostgresSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourcePostgresSSHTunnelMethodSSHKeyAuthentication struct {
+// SourcePostgresSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourcePostgresSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourcePostgresSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourcePostgresSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourcePostgresSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgresSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourcePostgresSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourcePostgresSSHKeyAuthentication) GetTunnelMethod() SourcePostgresSchemasTunnelMethod {
+ return SourcePostgresSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourcePostgresSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourcePostgresSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourcePostgresTunnelMethod - No ssh tunnel needed to connect to database
+type SourcePostgresTunnelMethod string
const (
- SourcePostgresSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourcePostgresTunnelMethodNoTunnel SourcePostgresTunnelMethod = "NO_TUNNEL"
)
-func (e SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourcePostgresTunnelMethod) ToPointer() *SourcePostgresTunnelMethod {
return &e
}
-func (e *SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourcePostgresTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresTunnelMethod: %v", v)
}
}
-// SourcePostgresSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourcePostgresSSHTunnelMethodNoTunnel struct {
+// SourcePostgresNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourcePostgresNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourcePostgresSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourcePostgresTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourcePostgresNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgresNoTunnel) GetTunnelMethod() SourcePostgresTunnelMethod {
+ return SourcePostgresTunnelMethodNoTunnel
}
type SourcePostgresSSHTunnelMethodType string
const (
- SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHTunnelMethodNoTunnel SourcePostgresSSHTunnelMethodType = "source-postgres_SSH Tunnel Method_No Tunnel"
- SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHTunnelMethodSSHKeyAuthentication SourcePostgresSSHTunnelMethodType = "source-postgres_SSH Tunnel Method_SSH Key Authentication"
- SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHTunnelMethodPasswordAuthentication SourcePostgresSSHTunnelMethodType = "source-postgres_SSH Tunnel Method_Password Authentication"
+ SourcePostgresSSHTunnelMethodTypeSourcePostgresNoTunnel SourcePostgresSSHTunnelMethodType = "source-postgres_No Tunnel"
+ SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHKeyAuthentication SourcePostgresSSHTunnelMethodType = "source-postgres_SSH Key Authentication"
+ SourcePostgresSSHTunnelMethodTypeSourcePostgresPasswordAuthentication SourcePostgresSSHTunnelMethodType = "source-postgres_Password Authentication"
)
type SourcePostgresSSHTunnelMethod struct {
- SourcePostgresSSHTunnelMethodNoTunnel *SourcePostgresSSHTunnelMethodNoTunnel
- SourcePostgresSSHTunnelMethodSSHKeyAuthentication *SourcePostgresSSHTunnelMethodSSHKeyAuthentication
- SourcePostgresSSHTunnelMethodPasswordAuthentication *SourcePostgresSSHTunnelMethodPasswordAuthentication
+ SourcePostgresNoTunnel *SourcePostgresNoTunnel
+ SourcePostgresSSHKeyAuthentication *SourcePostgresSSHKeyAuthentication
+ SourcePostgresPasswordAuthentication *SourcePostgresPasswordAuthentication
Type SourcePostgresSSHTunnelMethodType
}
-func CreateSourcePostgresSSHTunnelMethodSourcePostgresSSHTunnelMethodNoTunnel(sourcePostgresSSHTunnelMethodNoTunnel SourcePostgresSSHTunnelMethodNoTunnel) SourcePostgresSSHTunnelMethod {
- typ := SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHTunnelMethodNoTunnel
+func CreateSourcePostgresSSHTunnelMethodSourcePostgresNoTunnel(sourcePostgresNoTunnel SourcePostgresNoTunnel) SourcePostgresSSHTunnelMethod {
+ typ := SourcePostgresSSHTunnelMethodTypeSourcePostgresNoTunnel
return SourcePostgresSSHTunnelMethod{
- SourcePostgresSSHTunnelMethodNoTunnel: &sourcePostgresSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourcePostgresNoTunnel: &sourcePostgresNoTunnel,
+ Type: typ,
}
}
-func CreateSourcePostgresSSHTunnelMethodSourcePostgresSSHTunnelMethodSSHKeyAuthentication(sourcePostgresSSHTunnelMethodSSHKeyAuthentication SourcePostgresSSHTunnelMethodSSHKeyAuthentication) SourcePostgresSSHTunnelMethod {
- typ := SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHTunnelMethodSSHKeyAuthentication
+func CreateSourcePostgresSSHTunnelMethodSourcePostgresSSHKeyAuthentication(sourcePostgresSSHKeyAuthentication SourcePostgresSSHKeyAuthentication) SourcePostgresSSHTunnelMethod {
+ typ := SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHKeyAuthentication
return SourcePostgresSSHTunnelMethod{
- SourcePostgresSSHTunnelMethodSSHKeyAuthentication: &sourcePostgresSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourcePostgresSSHKeyAuthentication: &sourcePostgresSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourcePostgresSSHTunnelMethodSourcePostgresSSHTunnelMethodPasswordAuthentication(sourcePostgresSSHTunnelMethodPasswordAuthentication SourcePostgresSSHTunnelMethodPasswordAuthentication) SourcePostgresSSHTunnelMethod {
- typ := SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHTunnelMethodPasswordAuthentication
+func CreateSourcePostgresSSHTunnelMethodSourcePostgresPasswordAuthentication(sourcePostgresPasswordAuthentication SourcePostgresPasswordAuthentication) SourcePostgresSSHTunnelMethod {
+ typ := SourcePostgresSSHTunnelMethodTypeSourcePostgresPasswordAuthentication
return SourcePostgresSSHTunnelMethod{
- SourcePostgresSSHTunnelMethodPasswordAuthentication: &sourcePostgresSSHTunnelMethodPasswordAuthentication,
- Type: typ,
+ SourcePostgresPasswordAuthentication: &sourcePostgresPasswordAuthentication,
+ Type: typ,
}
}
func (u *SourcePostgresSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourcePostgresSSHTunnelMethodNoTunnel := new(SourcePostgresSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresSSHTunnelMethodNoTunnel); err == nil {
- u.SourcePostgresSSHTunnelMethodNoTunnel = sourcePostgresSSHTunnelMethodNoTunnel
- u.Type = SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHTunnelMethodNoTunnel
+
+ sourcePostgresNoTunnel := new(SourcePostgresNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresNoTunnel, "", true, true); err == nil {
+ u.SourcePostgresNoTunnel = sourcePostgresNoTunnel
+ u.Type = SourcePostgresSSHTunnelMethodTypeSourcePostgresNoTunnel
return nil
}
- sourcePostgresSSHTunnelMethodSSHKeyAuthentication := new(SourcePostgresSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourcePostgresSSHTunnelMethodSSHKeyAuthentication = sourcePostgresSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHTunnelMethodSSHKeyAuthentication
+ sourcePostgresSSHKeyAuthentication := new(SourcePostgresSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourcePostgresSSHKeyAuthentication = sourcePostgresSSHKeyAuthentication
+ u.Type = SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHKeyAuthentication
return nil
}
- sourcePostgresSSHTunnelMethodPasswordAuthentication := new(SourcePostgresSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourcePostgresSSHTunnelMethodPasswordAuthentication = sourcePostgresSSHTunnelMethodPasswordAuthentication
- u.Type = SourcePostgresSSHTunnelMethodTypeSourcePostgresSSHTunnelMethodPasswordAuthentication
+ sourcePostgresPasswordAuthentication := new(SourcePostgresPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresPasswordAuthentication, "", true, true); err == nil {
+ u.SourcePostgresPasswordAuthentication = sourcePostgresPasswordAuthentication
+ u.Type = SourcePostgresSSHTunnelMethodTypeSourcePostgresPasswordAuthentication
return nil
}
@@ -1153,19 +1180,19 @@ func (u *SourcePostgresSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourcePostgresSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourcePostgresSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourcePostgresSSHTunnelMethodNoTunnel)
+ if u.SourcePostgresNoTunnel != nil {
+ return utils.MarshalJSON(u.SourcePostgresNoTunnel, "", true)
}
- if u.SourcePostgresSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourcePostgresSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourcePostgresSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourcePostgresSSHKeyAuthentication, "", true)
}
- if u.SourcePostgresSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourcePostgresSSHTunnelMethodPasswordAuthentication)
+ if u.SourcePostgresPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourcePostgresPasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourcePostgres struct {
@@ -1178,12 +1205,12 @@ type SourcePostgres struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5432" json:"port"`
// Configures how data is extracted from the database.
ReplicationMethod *SourcePostgresUpdateMethod `json:"replication_method,omitempty"`
// The list of schemas (case sensitive) to sync from. Defaults to public.
Schemas []string `json:"schemas,omitempty"`
- SourceType SourcePostgresPostgres `json:"sourceType"`
+ sourceType SourcePostgresPostgres `const:"postgres" json:"sourceType"`
// SSL connection modes.
// Read more in the docs.
SslMode *SourcePostgresSSLModes `json:"ssl_mode,omitempty"`
@@ -1192,3 +1219,88 @@ type SourcePostgres struct {
// Username to access the database.
Username string `json:"username"`
}
+
+func (s SourcePostgres) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgres) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgres) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourcePostgres) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourcePostgres) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourcePostgres) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourcePostgres) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourcePostgres) GetReplicationMethod() *SourcePostgresUpdateMethod {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationMethod
+}
+
+func (o *SourcePostgres) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourcePostgres) GetSourceType() SourcePostgresPostgres {
+ return SourcePostgresPostgresPostgres
+}
+
+func (o *SourcePostgres) GetSslMode() *SourcePostgresSSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *SourcePostgres) GetTunnelMethod() *SourcePostgresSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourcePostgres) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepostgrescreaterequest.go b/internal/sdk/pkg/models/shared/sourcepostgrescreaterequest.go
old mode 100755
new mode 100644
index 66652e543..a2e0e1c4d
--- a/internal/sdk/pkg/models/shared/sourcepostgrescreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepostgrescreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePostgresCreateRequest struct {
Configuration SourcePostgres `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePostgresCreateRequest) GetConfiguration() SourcePostgres {
+ if o == nil {
+ return SourcePostgres{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePostgresCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePostgresCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePostgresCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePostgresCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepostgresputrequest.go b/internal/sdk/pkg/models/shared/sourcepostgresputrequest.go
old mode 100755
new mode 100644
index da9770541..19debc1b4
--- a/internal/sdk/pkg/models/shared/sourcepostgresputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepostgresputrequest.go
@@ -7,3 +7,24 @@ type SourcePostgresPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePostgresPutRequest) GetConfiguration() SourcePostgresUpdate {
+ if o == nil {
+ return SourcePostgresUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePostgresPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePostgresPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepostgresupdate.go b/internal/sdk/pkg/models/shared/sourcepostgresupdate.go
old mode 100755
new mode 100644
index e7f92d960..c4ad716d2
--- a/internal/sdk/pkg/models/shared/sourcepostgresupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepostgresupdate.go
@@ -3,83 +3,113 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod string
+type SourcePostgresUpdateSchemasReplicationMethodMethod string
const (
- SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod = "Standard"
+ SourcePostgresUpdateSchemasReplicationMethodMethodStandard SourcePostgresUpdateSchemasReplicationMethodMethod = "Standard"
)
-func (e SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod {
+func (e SourcePostgresUpdateSchemasReplicationMethodMethod) ToPointer() *SourcePostgresUpdateSchemasReplicationMethodMethod {
return &e
}
-func (e *SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasReplicationMethodMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Standard":
- *e = SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(v)
+ *e = SourcePostgresUpdateSchemasReplicationMethodMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasReplicationMethodMethod: %v", v)
}
}
-// SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
-type SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor struct {
- Method SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"`
+// SourcePostgresUpdateScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).
+type SourcePostgresUpdateScanChangesWithUserDefinedCursor struct {
+ method SourcePostgresUpdateSchemasReplicationMethodMethod `const:"Standard" json:"method"`
}
-type SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod string
+func (s SourcePostgresUpdateScanChangesWithUserDefinedCursor) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresUpdateScanChangesWithUserDefinedCursor) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgresUpdateScanChangesWithUserDefinedCursor) GetMethod() SourcePostgresUpdateSchemasReplicationMethodMethod {
+ return SourcePostgresUpdateSchemasReplicationMethodMethodStandard
+}
+
+type SourcePostgresUpdateSchemasMethod string
const (
- SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethodXmin SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod = "Xmin"
+ SourcePostgresUpdateSchemasMethodXmin SourcePostgresUpdateSchemasMethod = "Xmin"
)
-func (e SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod) ToPointer() *SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod {
+func (e SourcePostgresUpdateSchemasMethod) ToPointer() *SourcePostgresUpdateSchemasMethod {
return &e
}
-func (e *SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Xmin":
- *e = SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod(v)
+ *e = SourcePostgresUpdateSchemasMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasMethod: %v", v)
}
}
-// SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn - Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.
-type SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn struct {
- Method SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod `json:"method"`
+// DetectChangesWithXminSystemColumn - Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.
+type DetectChangesWithXminSystemColumn struct {
+ method SourcePostgresUpdateSchemasMethod `const:"Xmin" json:"method"`
+}
+
+func (d DetectChangesWithXminSystemColumn) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(d, "", false)
}
-// SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
-type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour string
+func (d *DetectChangesWithXminSystemColumn) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &d, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *DetectChangesWithXminSystemColumn) GetMethod() SourcePostgresUpdateSchemasMethod {
+ return SourcePostgresUpdateSchemasMethodXmin
+}
+
+// SourcePostgresUpdateLSNCommitBehaviour - Determines when Airbyte should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
+type SourcePostgresUpdateLSNCommitBehaviour string
const (
- SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviourWhileReadingData SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour = "While reading Data"
- SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviourAfterLoadingDataInTheDestination SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour = "After loading Data in the destination"
+ SourcePostgresUpdateLSNCommitBehaviourWhileReadingData SourcePostgresUpdateLSNCommitBehaviour = "While reading Data"
+ SourcePostgresUpdateLSNCommitBehaviourAfterLoadingDataInTheDestination SourcePostgresUpdateLSNCommitBehaviour = "After loading Data in the destination"
)
-func (e SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) ToPointer() *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour {
+func (e SourcePostgresUpdateLSNCommitBehaviour) ToPointer() *SourcePostgresUpdateLSNCommitBehaviour {
return &e
}
-func (e *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateLSNCommitBehaviour) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -88,201 +118,207 @@ func (e *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNComm
case "While reading Data":
fallthrough
case "After loading Data in the destination":
- *e = SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour(v)
+ *e = SourcePostgresUpdateLSNCommitBehaviour(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateLSNCommitBehaviour: %v", v)
}
}
-type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod string
+type SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethod string
const (
- SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethodCdc SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod = "CDC"
+ SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethodCdc SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethod = "CDC"
)
-func (e SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod) ToPointer() *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod {
+func (e SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethod) ToPointer() *SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethod {
return &e
}
-func (e *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "CDC":
- *e = SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod(v)
+ *e = SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethod: %v", v)
}
}
-// SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin - A logical decoding plugin installed on the PostgreSQL server.
-type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin string
+// SourcePostgresUpdatePlugin - A logical decoding plugin installed on the PostgreSQL server.
+type SourcePostgresUpdatePlugin string
const (
- SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPluginPgoutput SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin = "pgoutput"
+ SourcePostgresUpdatePluginPgoutput SourcePostgresUpdatePlugin = "pgoutput"
)
-func (e SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) ToPointer() *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin {
+func (e SourcePostgresUpdatePlugin) ToPointer() *SourcePostgresUpdatePlugin {
return &e
}
-func (e *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdatePlugin) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pgoutput":
- *e = SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin(v)
+ *e = SourcePostgresUpdatePlugin(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdatePlugin: %v", v)
}
}
-// SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.
-type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC struct {
+// ReadChangesUsingWriteAheadLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.
+type ReadChangesUsingWriteAheadLogCDC struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.
- InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"`
- // Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
- LsnCommitBehaviour *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour `json:"lsn_commit_behaviour,omitempty"`
- Method SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod `json:"method"`
+ InitialWaitingSeconds *int64 `default:"300" json:"initial_waiting_seconds"`
+ // Determines when Airbyte should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync.
+ LsnCommitBehaviour *SourcePostgresUpdateLSNCommitBehaviour `default:"After loading Data in the destination" json:"lsn_commit_behaviour"`
+ method SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethod `const:"CDC" json:"method"`
// A logical decoding plugin installed on the PostgreSQL server.
- Plugin *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin `json:"plugin,omitempty"`
+ Plugin *SourcePostgresUpdatePlugin `default:"pgoutput" json:"plugin"`
// A Postgres publication used for consuming changes. Read about publications and replication identities.
Publication string `json:"publication"`
// The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful.
- QueueSize *int64 `json:"queue_size,omitempty"`
+ QueueSize *int64 `default:"10000" json:"queue_size"`
// A plugin logical replication slot. Read about replication slots.
ReplicationSlot string `json:"replication_slot"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC
-func (c *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC{}
+func (r ReadChangesUsingWriteAheadLogCDC) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(r, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (r *ReadChangesUsingWriteAheadLogCDC) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &r, "", false, true); err != nil {
return err
}
- *c = SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC(data)
+ return nil
+}
- additionalFields := make(map[string]interface{})
+func (o *ReadChangesUsingWriteAheadLogCDC) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *ReadChangesUsingWriteAheadLogCDC) GetInitialWaitingSeconds() *int64 {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "initial_waiting_seconds")
- delete(additionalFields, "lsn_commit_behaviour")
- delete(additionalFields, "method")
- delete(additionalFields, "plugin")
- delete(additionalFields, "publication")
- delete(additionalFields, "queue_size")
- delete(additionalFields, "replication_slot")
+ return o.InitialWaitingSeconds
+}
- c.AdditionalProperties = additionalFields
+func (o *ReadChangesUsingWriteAheadLogCDC) GetLsnCommitBehaviour() *SourcePostgresUpdateLSNCommitBehaviour {
+ if o == nil {
+ return nil
+ }
+ return o.LsnCommitBehaviour
+}
- return nil
+func (o *ReadChangesUsingWriteAheadLogCDC) GetMethod() SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethod {
+ return SourcePostgresUpdateSchemasReplicationMethodUpdateMethodMethodCdc
}
-func (c SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC(c))
- if err != nil {
- return nil, err
+func (o *ReadChangesUsingWriteAheadLogCDC) GetPlugin() *SourcePostgresUpdatePlugin {
+ if o == nil {
+ return nil
}
+ return o.Plugin
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *ReadChangesUsingWriteAheadLogCDC) GetPublication() string {
+ if o == nil {
+ return ""
}
+ return o.Publication
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *ReadChangesUsingWriteAheadLogCDC) GetQueueSize() *int64 {
+ if o == nil {
+ return nil
}
+ return o.QueueSize
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *ReadChangesUsingWriteAheadLogCDC) GetReplicationSlot() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.ReplicationSlot
}
type SourcePostgresUpdateUpdateMethodType string
const (
- SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateUpdateMethodType = "source-postgres-update_Update Method_Read Changes using Write-Ahead Log (CDC)"
- SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn SourcePostgresUpdateUpdateMethodType = "source-postgres-update_Update Method_Detect Changes with Xmin System Column"
- SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor SourcePostgresUpdateUpdateMethodType = "source-postgres-update_Update Method_Scan Changes with User Defined Cursor"
+ SourcePostgresUpdateUpdateMethodTypeReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateUpdateMethodType = "Read Changes using Write-Ahead Log (CDC)"
+ SourcePostgresUpdateUpdateMethodTypeDetectChangesWithXminSystemColumn SourcePostgresUpdateUpdateMethodType = "Detect Changes with Xmin System Column"
+ SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateScanChangesWithUserDefinedCursor SourcePostgresUpdateUpdateMethodType = "source-postgres-update_Scan Changes with User Defined Cursor"
)
type SourcePostgresUpdateUpdateMethod struct {
- SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC
- SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn
- SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor
+ ReadChangesUsingWriteAheadLogCDC *ReadChangesUsingWriteAheadLogCDC
+ DetectChangesWithXminSystemColumn *DetectChangesWithXminSystemColumn
+ SourcePostgresUpdateScanChangesWithUserDefinedCursor *SourcePostgresUpdateScanChangesWithUserDefinedCursor
Type SourcePostgresUpdateUpdateMethodType
}
-func CreateSourcePostgresUpdateUpdateMethodSourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC(sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC) SourcePostgresUpdateUpdateMethod {
- typ := SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC
+func CreateSourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC(readChangesUsingWriteAheadLogCDC ReadChangesUsingWriteAheadLogCDC) SourcePostgresUpdateUpdateMethod {
+ typ := SourcePostgresUpdateUpdateMethodTypeReadChangesUsingWriteAheadLogCDC
return SourcePostgresUpdateUpdateMethod{
- SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC: &sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC,
- Type: typ,
+ ReadChangesUsingWriteAheadLogCDC: &readChangesUsingWriteAheadLogCDC,
+ Type: typ,
}
}
-func CreateSourcePostgresUpdateUpdateMethodSourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn(sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn) SourcePostgresUpdateUpdateMethod {
- typ := SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn
+func CreateSourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn(detectChangesWithXminSystemColumn DetectChangesWithXminSystemColumn) SourcePostgresUpdateUpdateMethod {
+ typ := SourcePostgresUpdateUpdateMethodTypeDetectChangesWithXminSystemColumn
return SourcePostgresUpdateUpdateMethod{
- SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn: &sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn,
- Type: typ,
+ DetectChangesWithXminSystemColumn: &detectChangesWithXminSystemColumn,
+ Type: typ,
}
}
-func CreateSourcePostgresUpdateUpdateMethodSourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor(sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor) SourcePostgresUpdateUpdateMethod {
- typ := SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor
+func CreateSourcePostgresUpdateUpdateMethodSourcePostgresUpdateScanChangesWithUserDefinedCursor(sourcePostgresUpdateScanChangesWithUserDefinedCursor SourcePostgresUpdateScanChangesWithUserDefinedCursor) SourcePostgresUpdateUpdateMethod {
+ typ := SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateScanChangesWithUserDefinedCursor
return SourcePostgresUpdateUpdateMethod{
- SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor: &sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor,
+ SourcePostgresUpdateScanChangesWithUserDefinedCursor: &sourcePostgresUpdateScanChangesWithUserDefinedCursor,
Type: typ,
}
}
func (u *SourcePostgresUpdateUpdateMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn := new(SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn); err == nil {
- u.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn = sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn
- u.Type = SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn
+
+ detectChangesWithXminSystemColumn := new(DetectChangesWithXminSystemColumn)
+ if err := utils.UnmarshalJSON(data, &detectChangesWithXminSystemColumn, "", true, true); err == nil {
+ u.DetectChangesWithXminSystemColumn = detectChangesWithXminSystemColumn
+ u.Type = SourcePostgresUpdateUpdateMethodTypeDetectChangesWithXminSystemColumn
return nil
}
- sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor := new(SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor); err == nil {
- u.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor = sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor
- u.Type = SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor
+ sourcePostgresUpdateScanChangesWithUserDefinedCursor := new(SourcePostgresUpdateScanChangesWithUserDefinedCursor)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdateScanChangesWithUserDefinedCursor, "", true, true); err == nil {
+ u.SourcePostgresUpdateScanChangesWithUserDefinedCursor = sourcePostgresUpdateScanChangesWithUserDefinedCursor
+ u.Type = SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateScanChangesWithUserDefinedCursor
return nil
}
- sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC := new(SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC); err == nil {
- u.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC = sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC
- u.Type = SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC
+ readChangesUsingWriteAheadLogCDC := new(ReadChangesUsingWriteAheadLogCDC)
+ if err := utils.UnmarshalJSON(data, &readChangesUsingWriteAheadLogCDC, "", true, true); err == nil {
+ u.ReadChangesUsingWriteAheadLogCDC = readChangesUsingWriteAheadLogCDC
+ u.Type = SourcePostgresUpdateUpdateMethodTypeReadChangesUsingWriteAheadLogCDC
return nil
}
@@ -290,47 +326,48 @@ func (u *SourcePostgresUpdateUpdateMethod) UnmarshalJSON(data []byte) error {
}
func (u SourcePostgresUpdateUpdateMethod) MarshalJSON() ([]byte, error) {
- if u.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn != nil {
- return json.Marshal(u.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn)
+ if u.ReadChangesUsingWriteAheadLogCDC != nil {
+ return utils.MarshalJSON(u.ReadChangesUsingWriteAheadLogCDC, "", true)
}
- if u.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil {
- return json.Marshal(u.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor)
+ if u.DetectChangesWithXminSystemColumn != nil {
+ return utils.MarshalJSON(u.DetectChangesWithXminSystemColumn, "", true)
}
- if u.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC != nil {
- return json.Marshal(u.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC)
+ if u.SourcePostgresUpdateScanChangesWithUserDefinedCursor != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdateScanChangesWithUserDefinedCursor, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourcePostgresUpdateSSLModesVerifyFullMode string
+type SourcePostgresUpdateSchemasSSLModeSSLModes6Mode string
const (
- SourcePostgresUpdateSSLModesVerifyFullModeVerifyFull SourcePostgresUpdateSSLModesVerifyFullMode = "verify-full"
+ SourcePostgresUpdateSchemasSSLModeSSLModes6ModeVerifyFull SourcePostgresUpdateSchemasSSLModeSSLModes6Mode = "verify-full"
)
-func (e SourcePostgresUpdateSSLModesVerifyFullMode) ToPointer() *SourcePostgresUpdateSSLModesVerifyFullMode {
+func (e SourcePostgresUpdateSchemasSSLModeSSLModes6Mode) ToPointer() *SourcePostgresUpdateSchemasSSLModeSSLModes6Mode {
return &e
}
-func (e *SourcePostgresUpdateSSLModesVerifyFullMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasSSLModeSSLModes6Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-full":
- *e = SourcePostgresUpdateSSLModesVerifyFullMode(v)
+ *e = SourcePostgresUpdateSchemasSSLModeSSLModes6Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateSSLModesVerifyFullMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasSSLModeSSLModes6Mode: %v", v)
}
}
-// SourcePostgresUpdateSSLModesVerifyFull - This is the most secure mode. Always require encryption and verifies the identity of the source database server.
-type SourcePostgresUpdateSSLModesVerifyFull struct {
+// SourcePostgresUpdateVerifyFull - This is the most secure mode. Always require encryption and verifies the identity of the source database server.
+type SourcePostgresUpdateVerifyFull struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -338,86 +375,87 @@ type SourcePostgresUpdateSSLModesVerifyFull struct {
// Client key
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourcePostgresUpdateSSLModesVerifyFullMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourcePostgresUpdateSchemasSSLModeSSLModes6Mode `const:"verify-full" json:"mode"`
}
-type _SourcePostgresUpdateSSLModesVerifyFull SourcePostgresUpdateSSLModesVerifyFull
-func (c *SourcePostgresUpdateSSLModesVerifyFull) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresUpdateSSLModesVerifyFull{}
+func (s SourcePostgresUpdateVerifyFull) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourcePostgresUpdateVerifyFull) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourcePostgresUpdateSSLModesVerifyFull(data)
-
- additionalFields := make(map[string]interface{})
+ return nil
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourcePostgresUpdateVerifyFull) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "ca_certificate")
- delete(additionalFields, "client_certificate")
- delete(additionalFields, "client_key")
- delete(additionalFields, "client_key_password")
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
- return nil
+ return o.AdditionalProperties
}
-func (c SourcePostgresUpdateSSLModesVerifyFull) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresUpdateSSLModesVerifyFull(c))
- if err != nil {
- return nil, err
+func (o *SourcePostgresUpdateVerifyFull) GetCaCertificate() string {
+ if o == nil {
+ return ""
}
+ return o.CaCertificate
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresUpdateVerifyFull) GetClientCertificate() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientCertificate
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourcePostgresUpdateVerifyFull) GetClientKey() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKey
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresUpdateVerifyFull) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKeyPassword
+}
- return json.Marshal(out)
+func (o *SourcePostgresUpdateVerifyFull) GetMode() SourcePostgresUpdateSchemasSSLModeSSLModes6Mode {
+ return SourcePostgresUpdateSchemasSSLModeSSLModes6ModeVerifyFull
}
-type SourcePostgresUpdateSSLModesVerifyCaMode string
+type SourcePostgresUpdateSchemasSSLModeSSLModes5Mode string
const (
- SourcePostgresUpdateSSLModesVerifyCaModeVerifyCa SourcePostgresUpdateSSLModesVerifyCaMode = "verify-ca"
+ SourcePostgresUpdateSchemasSSLModeSSLModes5ModeVerifyCa SourcePostgresUpdateSchemasSSLModeSSLModes5Mode = "verify-ca"
)
-func (e SourcePostgresUpdateSSLModesVerifyCaMode) ToPointer() *SourcePostgresUpdateSSLModesVerifyCaMode {
+func (e SourcePostgresUpdateSchemasSSLModeSSLModes5Mode) ToPointer() *SourcePostgresUpdateSchemasSSLModeSSLModes5Mode {
return &e
}
-func (e *SourcePostgresUpdateSSLModesVerifyCaMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasSSLModeSSLModes5Mode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "verify-ca":
- *e = SourcePostgresUpdateSSLModesVerifyCaMode(v)
+ *e = SourcePostgresUpdateSchemasSSLModeSSLModes5Mode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateSSLModesVerifyCaMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasSSLModeSSLModes5Mode: %v", v)
}
}
-// SourcePostgresUpdateSSLModesVerifyCa - Always require encryption and verifies that the source database server has a valid SSL certificate.
-type SourcePostgresUpdateSSLModesVerifyCa struct {
+// SourcePostgresUpdateVerifyCa - Always require encryption and verifies that the source database server has a valid SSL certificate.
+type SourcePostgresUpdateVerifyCa struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// CA certificate
CaCertificate string `json:"ca_certificate"`
// Client certificate
@@ -425,490 +463,385 @@ type SourcePostgresUpdateSSLModesVerifyCa struct {
// Client key
ClientKey *string `json:"client_key,omitempty"`
// Password for keystorage. If you do not add it - the password will be generated automatically.
- ClientKeyPassword *string `json:"client_key_password,omitempty"`
- Mode SourcePostgresUpdateSSLModesVerifyCaMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientKeyPassword *string `json:"client_key_password,omitempty"`
+ mode SourcePostgresUpdateSchemasSSLModeSSLModes5Mode `const:"verify-ca" json:"mode"`
}
-type _SourcePostgresUpdateSSLModesVerifyCa SourcePostgresUpdateSSLModesVerifyCa
-func (c *SourcePostgresUpdateSSLModesVerifyCa) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresUpdateSSLModesVerifyCa{}
+func (s SourcePostgresUpdateVerifyCa) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &data); err != nil {
+func (s *SourcePostgresUpdateVerifyCa) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- *c = SourcePostgresUpdateSSLModesVerifyCa(data)
-
- additionalFields := make(map[string]interface{})
+ return nil
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
- return err
+func (o *SourcePostgresUpdateVerifyCa) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
- delete(additionalFields, "ca_certificate")
- delete(additionalFields, "client_certificate")
- delete(additionalFields, "client_key")
- delete(additionalFields, "client_key_password")
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
- return nil
+ return o.AdditionalProperties
}
-func (c SourcePostgresUpdateSSLModesVerifyCa) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresUpdateSSLModesVerifyCa(c))
- if err != nil {
- return nil, err
+func (o *SourcePostgresUpdateVerifyCa) GetCaCertificate() string {
+ if o == nil {
+ return ""
}
+ return o.CaCertificate
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresUpdateVerifyCa) GetClientCertificate() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientCertificate
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourcePostgresUpdateVerifyCa) GetClientKey() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKey
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresUpdateVerifyCa) GetClientKeyPassword() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientKeyPassword
+}
- return json.Marshal(out)
+func (o *SourcePostgresUpdateVerifyCa) GetMode() SourcePostgresUpdateSchemasSSLModeSSLModes5Mode {
+ return SourcePostgresUpdateSchemasSSLModeSSLModes5ModeVerifyCa
}
-type SourcePostgresUpdateSSLModesRequireMode string
+type SourcePostgresUpdateSchemasSSLModeSSLModesMode string
const (
- SourcePostgresUpdateSSLModesRequireModeRequire SourcePostgresUpdateSSLModesRequireMode = "require"
+ SourcePostgresUpdateSchemasSSLModeSSLModesModeRequire SourcePostgresUpdateSchemasSSLModeSSLModesMode = "require"
)
-func (e SourcePostgresUpdateSSLModesRequireMode) ToPointer() *SourcePostgresUpdateSSLModesRequireMode {
+func (e SourcePostgresUpdateSchemasSSLModeSSLModesMode) ToPointer() *SourcePostgresUpdateSchemasSSLModeSSLModesMode {
return &e
}
-func (e *SourcePostgresUpdateSSLModesRequireMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasSSLModeSSLModesMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "require":
- *e = SourcePostgresUpdateSSLModesRequireMode(v)
+ *e = SourcePostgresUpdateSchemasSSLModeSSLModesMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateSSLModesRequireMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasSSLModeSSLModesMode: %v", v)
}
}
-// SourcePostgresUpdateSSLModesRequire - Always require encryption. If the source database server does not support encryption, connection will fail.
-type SourcePostgresUpdateSSLModesRequire struct {
- Mode SourcePostgresUpdateSSLModesRequireMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourcePostgresUpdateRequire - Always require encryption. If the source database server does not support encryption, connection will fail.
+type SourcePostgresUpdateRequire struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourcePostgresUpdateSchemasSSLModeSSLModesMode `const:"require" json:"mode"`
}
-type _SourcePostgresUpdateSSLModesRequire SourcePostgresUpdateSSLModesRequire
-
-func (c *SourcePostgresUpdateSSLModesRequire) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresUpdateSSLModesRequire{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourcePostgresUpdateSSLModesRequire(data)
- additionalFields := make(map[string]interface{})
+func (s SourcePostgresUpdateRequire) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourcePostgresUpdateRequire) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourcePostgresUpdateSSLModesRequire) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresUpdateSSLModesRequire(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresUpdateRequire) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourcePostgresUpdateRequire) GetMode() SourcePostgresUpdateSchemasSSLModeSSLModesMode {
+ return SourcePostgresUpdateSchemasSSLModeSSLModesModeRequire
}
-type SourcePostgresUpdateSSLModesPreferMode string
+type SourcePostgresUpdateSchemasSslModeMode string
const (
- SourcePostgresUpdateSSLModesPreferModePrefer SourcePostgresUpdateSSLModesPreferMode = "prefer"
+ SourcePostgresUpdateSchemasSslModeModePrefer SourcePostgresUpdateSchemasSslModeMode = "prefer"
)
-func (e SourcePostgresUpdateSSLModesPreferMode) ToPointer() *SourcePostgresUpdateSSLModesPreferMode {
+func (e SourcePostgresUpdateSchemasSslModeMode) ToPointer() *SourcePostgresUpdateSchemasSslModeMode {
return &e
}
-func (e *SourcePostgresUpdateSSLModesPreferMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasSslModeMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "prefer":
- *e = SourcePostgresUpdateSSLModesPreferMode(v)
+ *e = SourcePostgresUpdateSchemasSslModeMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateSSLModesPreferMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasSslModeMode: %v", v)
}
}
-// SourcePostgresUpdateSSLModesPrefer - Allows unencrypted connection only if the source database does not support encryption.
-type SourcePostgresUpdateSSLModesPrefer struct {
- Mode SourcePostgresUpdateSSLModesPreferMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourcePostgresUpdatePrefer - Allows unencrypted connection only if the source database does not support encryption.
+type SourcePostgresUpdatePrefer struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourcePostgresUpdateSchemasSslModeMode `const:"prefer" json:"mode"`
}
-type _SourcePostgresUpdateSSLModesPrefer SourcePostgresUpdateSSLModesPrefer
-
-func (c *SourcePostgresUpdateSSLModesPrefer) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresUpdateSSLModesPrefer{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourcePostgresUpdateSSLModesPrefer(data)
- additionalFields := make(map[string]interface{})
+func (s SourcePostgresUpdatePrefer) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourcePostgresUpdatePrefer) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourcePostgresUpdateSSLModesPrefer) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresUpdateSSLModesPrefer(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresUpdatePrefer) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourcePostgresUpdatePrefer) GetMode() SourcePostgresUpdateSchemasSslModeMode {
+ return SourcePostgresUpdateSchemasSslModeModePrefer
}
-type SourcePostgresUpdateSSLModesAllowMode string
+type SourcePostgresUpdateSchemasMode string
const (
- SourcePostgresUpdateSSLModesAllowModeAllow SourcePostgresUpdateSSLModesAllowMode = "allow"
+ SourcePostgresUpdateSchemasModeAllow SourcePostgresUpdateSchemasMode = "allow"
)
-func (e SourcePostgresUpdateSSLModesAllowMode) ToPointer() *SourcePostgresUpdateSSLModesAllowMode {
+func (e SourcePostgresUpdateSchemasMode) ToPointer() *SourcePostgresUpdateSchemasMode {
return &e
}
-func (e *SourcePostgresUpdateSSLModesAllowMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "allow":
- *e = SourcePostgresUpdateSSLModesAllowMode(v)
+ *e = SourcePostgresUpdateSchemasMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateSSLModesAllowMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasMode: %v", v)
}
}
-// SourcePostgresUpdateSSLModesAllow - Enables encryption only when required by the source database.
-type SourcePostgresUpdateSSLModesAllow struct {
- Mode SourcePostgresUpdateSSLModesAllowMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourcePostgresUpdateAllow - Enables encryption only when required by the source database.
+type SourcePostgresUpdateAllow struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourcePostgresUpdateSchemasMode `const:"allow" json:"mode"`
}
-type _SourcePostgresUpdateSSLModesAllow SourcePostgresUpdateSSLModesAllow
-
-func (c *SourcePostgresUpdateSSLModesAllow) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresUpdateSSLModesAllow{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourcePostgresUpdateSSLModesAllow(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourcePostgresUpdateAllow) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourcePostgresUpdateAllow) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourcePostgresUpdateSSLModesAllow) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresUpdateSSLModesAllow(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresUpdateAllow) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourcePostgresUpdateAllow) GetMode() SourcePostgresUpdateSchemasMode {
+ return SourcePostgresUpdateSchemasModeAllow
}
-type SourcePostgresUpdateSSLModesDisableMode string
+type SourcePostgresUpdateMode string
const (
- SourcePostgresUpdateSSLModesDisableModeDisable SourcePostgresUpdateSSLModesDisableMode = "disable"
+ SourcePostgresUpdateModeDisable SourcePostgresUpdateMode = "disable"
)
-func (e SourcePostgresUpdateSSLModesDisableMode) ToPointer() *SourcePostgresUpdateSSLModesDisableMode {
+func (e SourcePostgresUpdateMode) ToPointer() *SourcePostgresUpdateMode {
return &e
}
-func (e *SourcePostgresUpdateSSLModesDisableMode) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateMode) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "disable":
- *e = SourcePostgresUpdateSSLModesDisableMode(v)
+ *e = SourcePostgresUpdateMode(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateSSLModesDisableMode: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateMode: %v", v)
}
}
-// SourcePostgresUpdateSSLModesDisable - Disables encryption of communication between Airbyte and source database.
-type SourcePostgresUpdateSSLModesDisable struct {
- Mode SourcePostgresUpdateSSLModesDisableMode `json:"mode"`
-
- AdditionalProperties interface{} `json:"-"`
+// SourcePostgresUpdateDisable - Disables encryption of communication between Airbyte and source database.
+type SourcePostgresUpdateDisable struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ mode SourcePostgresUpdateMode `const:"disable" json:"mode"`
}
-type _SourcePostgresUpdateSSLModesDisable SourcePostgresUpdateSSLModesDisable
-
-func (c *SourcePostgresUpdateSSLModesDisable) UnmarshalJSON(bs []byte) error {
- data := _SourcePostgresUpdateSSLModesDisable{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourcePostgresUpdateSSLModesDisable(data)
- additionalFields := make(map[string]interface{})
+func (s SourcePostgresUpdateDisable) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourcePostgresUpdateDisable) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "mode")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourcePostgresUpdateSSLModesDisable) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourcePostgresUpdateSSLModesDisable(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourcePostgresUpdateDisable) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- return json.Marshal(out)
+func (o *SourcePostgresUpdateDisable) GetMode() SourcePostgresUpdateMode {
+ return SourcePostgresUpdateModeDisable
}
type SourcePostgresUpdateSSLModesType string
const (
- SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesDisable SourcePostgresUpdateSSLModesType = "source-postgres-update_SSL Modes_disable"
- SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesAllow SourcePostgresUpdateSSLModesType = "source-postgres-update_SSL Modes_allow"
- SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesPrefer SourcePostgresUpdateSSLModesType = "source-postgres-update_SSL Modes_prefer"
- SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesRequire SourcePostgresUpdateSSLModesType = "source-postgres-update_SSL Modes_require"
- SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesVerifyCa SourcePostgresUpdateSSLModesType = "source-postgres-update_SSL Modes_verify-ca"
- SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesVerifyFull SourcePostgresUpdateSSLModesType = "source-postgres-update_SSL Modes_verify-full"
+ SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateDisable SourcePostgresUpdateSSLModesType = "source-postgres-update_disable"
+ SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateAllow SourcePostgresUpdateSSLModesType = "source-postgres-update_allow"
+ SourcePostgresUpdateSSLModesTypeSourcePostgresUpdatePrefer SourcePostgresUpdateSSLModesType = "source-postgres-update_prefer"
+ SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateRequire SourcePostgresUpdateSSLModesType = "source-postgres-update_require"
+ SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateVerifyCa SourcePostgresUpdateSSLModesType = "source-postgres-update_verify-ca"
+ SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateVerifyFull SourcePostgresUpdateSSLModesType = "source-postgres-update_verify-full"
)
type SourcePostgresUpdateSSLModes struct {
- SourcePostgresUpdateSSLModesDisable *SourcePostgresUpdateSSLModesDisable
- SourcePostgresUpdateSSLModesAllow *SourcePostgresUpdateSSLModesAllow
- SourcePostgresUpdateSSLModesPrefer *SourcePostgresUpdateSSLModesPrefer
- SourcePostgresUpdateSSLModesRequire *SourcePostgresUpdateSSLModesRequire
- SourcePostgresUpdateSSLModesVerifyCa *SourcePostgresUpdateSSLModesVerifyCa
- SourcePostgresUpdateSSLModesVerifyFull *SourcePostgresUpdateSSLModesVerifyFull
+ SourcePostgresUpdateDisable *SourcePostgresUpdateDisable
+ SourcePostgresUpdateAllow *SourcePostgresUpdateAllow
+ SourcePostgresUpdatePrefer *SourcePostgresUpdatePrefer
+ SourcePostgresUpdateRequire *SourcePostgresUpdateRequire
+ SourcePostgresUpdateVerifyCa *SourcePostgresUpdateVerifyCa
+ SourcePostgresUpdateVerifyFull *SourcePostgresUpdateVerifyFull
Type SourcePostgresUpdateSSLModesType
}
-func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateSSLModesDisable(sourcePostgresUpdateSSLModesDisable SourcePostgresUpdateSSLModesDisable) SourcePostgresUpdateSSLModes {
- typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesDisable
+func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateDisable(sourcePostgresUpdateDisable SourcePostgresUpdateDisable) SourcePostgresUpdateSSLModes {
+ typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateDisable
return SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesDisable: &sourcePostgresUpdateSSLModesDisable,
- Type: typ,
+ SourcePostgresUpdateDisable: &sourcePostgresUpdateDisable,
+ Type: typ,
}
}
-func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateSSLModesAllow(sourcePostgresUpdateSSLModesAllow SourcePostgresUpdateSSLModesAllow) SourcePostgresUpdateSSLModes {
- typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesAllow
+func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateAllow(sourcePostgresUpdateAllow SourcePostgresUpdateAllow) SourcePostgresUpdateSSLModes {
+ typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateAllow
return SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesAllow: &sourcePostgresUpdateSSLModesAllow,
- Type: typ,
+ SourcePostgresUpdateAllow: &sourcePostgresUpdateAllow,
+ Type: typ,
}
}
-func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateSSLModesPrefer(sourcePostgresUpdateSSLModesPrefer SourcePostgresUpdateSSLModesPrefer) SourcePostgresUpdateSSLModes {
- typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesPrefer
+func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdatePrefer(sourcePostgresUpdatePrefer SourcePostgresUpdatePrefer) SourcePostgresUpdateSSLModes {
+ typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdatePrefer
return SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesPrefer: &sourcePostgresUpdateSSLModesPrefer,
- Type: typ,
+ SourcePostgresUpdatePrefer: &sourcePostgresUpdatePrefer,
+ Type: typ,
}
}
-func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateSSLModesRequire(sourcePostgresUpdateSSLModesRequire SourcePostgresUpdateSSLModesRequire) SourcePostgresUpdateSSLModes {
- typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesRequire
+func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateRequire(sourcePostgresUpdateRequire SourcePostgresUpdateRequire) SourcePostgresUpdateSSLModes {
+ typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateRequire
return SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesRequire: &sourcePostgresUpdateSSLModesRequire,
- Type: typ,
+ SourcePostgresUpdateRequire: &sourcePostgresUpdateRequire,
+ Type: typ,
}
}
-func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateSSLModesVerifyCa(sourcePostgresUpdateSSLModesVerifyCa SourcePostgresUpdateSSLModesVerifyCa) SourcePostgresUpdateSSLModes {
- typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesVerifyCa
+func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateVerifyCa(sourcePostgresUpdateVerifyCa SourcePostgresUpdateVerifyCa) SourcePostgresUpdateSSLModes {
+ typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateVerifyCa
return SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesVerifyCa: &sourcePostgresUpdateSSLModesVerifyCa,
- Type: typ,
+ SourcePostgresUpdateVerifyCa: &sourcePostgresUpdateVerifyCa,
+ Type: typ,
}
}
-func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateSSLModesVerifyFull(sourcePostgresUpdateSSLModesVerifyFull SourcePostgresUpdateSSLModesVerifyFull) SourcePostgresUpdateSSLModes {
- typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesVerifyFull
+func CreateSourcePostgresUpdateSSLModesSourcePostgresUpdateVerifyFull(sourcePostgresUpdateVerifyFull SourcePostgresUpdateVerifyFull) SourcePostgresUpdateSSLModes {
+ typ := SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateVerifyFull
return SourcePostgresUpdateSSLModes{
- SourcePostgresUpdateSSLModesVerifyFull: &sourcePostgresUpdateSSLModesVerifyFull,
- Type: typ,
+ SourcePostgresUpdateVerifyFull: &sourcePostgresUpdateVerifyFull,
+ Type: typ,
}
}
func (u *SourcePostgresUpdateSSLModes) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourcePostgresUpdateSSLModesDisable := new(SourcePostgresUpdateSSLModesDisable)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateSSLModesDisable); err == nil {
- u.SourcePostgresUpdateSSLModesDisable = sourcePostgresUpdateSSLModesDisable
- u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesDisable
+
+ sourcePostgresUpdateDisable := new(SourcePostgresUpdateDisable)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdateDisable, "", true, true); err == nil {
+ u.SourcePostgresUpdateDisable = sourcePostgresUpdateDisable
+ u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateDisable
return nil
}
- sourcePostgresUpdateSSLModesAllow := new(SourcePostgresUpdateSSLModesAllow)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateSSLModesAllow); err == nil {
- u.SourcePostgresUpdateSSLModesAllow = sourcePostgresUpdateSSLModesAllow
- u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesAllow
+ sourcePostgresUpdateAllow := new(SourcePostgresUpdateAllow)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdateAllow, "", true, true); err == nil {
+ u.SourcePostgresUpdateAllow = sourcePostgresUpdateAllow
+ u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateAllow
return nil
}
- sourcePostgresUpdateSSLModesPrefer := new(SourcePostgresUpdateSSLModesPrefer)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateSSLModesPrefer); err == nil {
- u.SourcePostgresUpdateSSLModesPrefer = sourcePostgresUpdateSSLModesPrefer
- u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesPrefer
+ sourcePostgresUpdatePrefer := new(SourcePostgresUpdatePrefer)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdatePrefer, "", true, true); err == nil {
+ u.SourcePostgresUpdatePrefer = sourcePostgresUpdatePrefer
+ u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdatePrefer
return nil
}
- sourcePostgresUpdateSSLModesRequire := new(SourcePostgresUpdateSSLModesRequire)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateSSLModesRequire); err == nil {
- u.SourcePostgresUpdateSSLModesRequire = sourcePostgresUpdateSSLModesRequire
- u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesRequire
+ sourcePostgresUpdateRequire := new(SourcePostgresUpdateRequire)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdateRequire, "", true, true); err == nil {
+ u.SourcePostgresUpdateRequire = sourcePostgresUpdateRequire
+ u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateRequire
return nil
}
- sourcePostgresUpdateSSLModesVerifyCa := new(SourcePostgresUpdateSSLModesVerifyCa)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateSSLModesVerifyCa); err == nil {
- u.SourcePostgresUpdateSSLModesVerifyCa = sourcePostgresUpdateSSLModesVerifyCa
- u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesVerifyCa
+ sourcePostgresUpdateVerifyCa := new(SourcePostgresUpdateVerifyCa)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdateVerifyCa, "", true, true); err == nil {
+ u.SourcePostgresUpdateVerifyCa = sourcePostgresUpdateVerifyCa
+ u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateVerifyCa
return nil
}
- sourcePostgresUpdateSSLModesVerifyFull := new(SourcePostgresUpdateSSLModesVerifyFull)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateSSLModesVerifyFull); err == nil {
- u.SourcePostgresUpdateSSLModesVerifyFull = sourcePostgresUpdateSSLModesVerifyFull
- u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateSSLModesVerifyFull
+ sourcePostgresUpdateVerifyFull := new(SourcePostgresUpdateVerifyFull)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdateVerifyFull, "", true, true); err == nil {
+ u.SourcePostgresUpdateVerifyFull = sourcePostgresUpdateVerifyFull
+ u.Type = SourcePostgresUpdateSSLModesTypeSourcePostgresUpdateVerifyFull
return nil
}
@@ -916,212 +849,306 @@ func (u *SourcePostgresUpdateSSLModes) UnmarshalJSON(data []byte) error {
}
func (u SourcePostgresUpdateSSLModes) MarshalJSON() ([]byte, error) {
- if u.SourcePostgresUpdateSSLModesDisable != nil {
- return json.Marshal(u.SourcePostgresUpdateSSLModesDisable)
+ if u.SourcePostgresUpdateDisable != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdateDisable, "", true)
}
- if u.SourcePostgresUpdateSSLModesAllow != nil {
- return json.Marshal(u.SourcePostgresUpdateSSLModesAllow)
+ if u.SourcePostgresUpdateAllow != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdateAllow, "", true)
}
- if u.SourcePostgresUpdateSSLModesPrefer != nil {
- return json.Marshal(u.SourcePostgresUpdateSSLModesPrefer)
+ if u.SourcePostgresUpdatePrefer != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdatePrefer, "", true)
}
- if u.SourcePostgresUpdateSSLModesRequire != nil {
- return json.Marshal(u.SourcePostgresUpdateSSLModesRequire)
+ if u.SourcePostgresUpdateRequire != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdateRequire, "", true)
}
- if u.SourcePostgresUpdateSSLModesVerifyCa != nil {
- return json.Marshal(u.SourcePostgresUpdateSSLModesVerifyCa)
+ if u.SourcePostgresUpdateVerifyCa != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdateVerifyCa, "", true)
}
- if u.SourcePostgresUpdateSSLModesVerifyFull != nil {
- return json.Marshal(u.SourcePostgresUpdateSSLModesVerifyFull)
+ if u.SourcePostgresUpdateVerifyFull != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdateVerifyFull, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and password authentication
-type SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod string
+// SourcePostgresUpdateSchemasTunnelMethodTunnelMethod - Connect through a jump server tunnel host using username and password authentication
+type SourcePostgresUpdateSchemasTunnelMethodTunnelMethod string
const (
- SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethodSSHPasswordAuth SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod = "SSH_PASSWORD_AUTH"
+ SourcePostgresUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth SourcePostgresUpdateSchemasTunnelMethodTunnelMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) ToPointer() *SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod {
+func (e SourcePostgresUpdateSchemasTunnelMethodTunnelMethod) ToPointer() *SourcePostgresUpdateSchemasTunnelMethodTunnelMethod {
return &e
}
-func (e *SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasTunnelMethodTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod(v)
+ *e = SourcePostgresUpdateSchemasTunnelMethodTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasTunnelMethodTunnelMethod: %v", v)
}
}
-// SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication struct {
+// SourcePostgresUpdatePasswordAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourcePostgresUpdatePasswordAuthentication struct {
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and password authentication
- TunnelMethod SourcePostgresUpdateSSHTunnelMethodPasswordAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourcePostgresUpdateSchemasTunnelMethodTunnelMethod `const:"SSH_PASSWORD_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host
TunnelUser string `json:"tunnel_user"`
// OS-level password for logging into the jump server host
TunnelUserPassword string `json:"tunnel_user_password"`
}
-// SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod - Connect through a jump server tunnel host using username and ssh key
-type SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod string
+func (s SourcePostgresUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgresUpdatePasswordAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourcePostgresUpdatePasswordAuthentication) GetTunnelMethod() SourcePostgresUpdateSchemasTunnelMethodTunnelMethod {
+ return SourcePostgresUpdateSchemasTunnelMethodTunnelMethodSSHPasswordAuth
+}
+
+func (o *SourcePostgresUpdatePasswordAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourcePostgresUpdatePasswordAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+func (o *SourcePostgresUpdatePasswordAuthentication) GetTunnelUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUserPassword
+}
+
+// SourcePostgresUpdateSchemasTunnelMethod - Connect through a jump server tunnel host using username and ssh key
+type SourcePostgresUpdateSchemasTunnelMethod string
const (
- SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethodSSHKeyAuth SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod = "SSH_KEY_AUTH"
+ SourcePostgresUpdateSchemasTunnelMethodSSHKeyAuth SourcePostgresUpdateSchemasTunnelMethod = "SSH_KEY_AUTH"
)
-func (e SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) ToPointer() *SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod {
+func (e SourcePostgresUpdateSchemasTunnelMethod) ToPointer() *SourcePostgresUpdateSchemasTunnelMethod {
return &e
}
-func (e *SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateSchemasTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod(v)
+ *e = SourcePostgresUpdateSchemasTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateSchemasTunnelMethod: %v", v)
}
}
-// SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication struct {
+// SourcePostgresUpdateSSHKeyAuthentication - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourcePostgresUpdateSSHKeyAuthentication struct {
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
SSHKey string `json:"ssh_key"`
// Hostname of the jump server host that allows inbound ssh tunnel.
TunnelHost string `json:"tunnel_host"`
// Connect through a jump server tunnel host using username and ssh key
- TunnelMethod SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthenticationTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourcePostgresUpdateSchemasTunnelMethod `const:"SSH_KEY_AUTH" json:"tunnel_method"`
// Port on the proxy/jump server that accepts inbound ssh connections.
- TunnelPort int64 `json:"tunnel_port"`
+ TunnelPort *int64 `default:"22" json:"tunnel_port"`
// OS-level username for logging into the jump server host.
TunnelUser string `json:"tunnel_user"`
}
-// SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod - No ssh tunnel needed to connect to database
-type SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod string
+func (s SourcePostgresUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgresUpdateSSHKeyAuthentication) GetSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SSHKey
+}
+
+func (o *SourcePostgresUpdateSSHKeyAuthentication) GetTunnelHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelHost
+}
+
+func (o *SourcePostgresUpdateSSHKeyAuthentication) GetTunnelMethod() SourcePostgresUpdateSchemasTunnelMethod {
+ return SourcePostgresUpdateSchemasTunnelMethodSSHKeyAuth
+}
+
+func (o *SourcePostgresUpdateSSHKeyAuthentication) GetTunnelPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelPort
+}
+
+func (o *SourcePostgresUpdateSSHKeyAuthentication) GetTunnelUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.TunnelUser
+}
+
+// SourcePostgresUpdateTunnelMethod - No ssh tunnel needed to connect to database
+type SourcePostgresUpdateTunnelMethod string
const (
- SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethodNoTunnel SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod = "NO_TUNNEL"
+ SourcePostgresUpdateTunnelMethodNoTunnel SourcePostgresUpdateTunnelMethod = "NO_TUNNEL"
)
-func (e SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod) ToPointer() *SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod {
+func (e SourcePostgresUpdateTunnelMethod) ToPointer() *SourcePostgresUpdateTunnelMethod {
return &e
}
-func (e *SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod) UnmarshalJSON(data []byte) error {
+func (e *SourcePostgresUpdateTunnelMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "NO_TUNNEL":
- *e = SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod(v)
+ *e = SourcePostgresUpdateTunnelMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod: %v", v)
+ return fmt.Errorf("invalid value for SourcePostgresUpdateTunnelMethod: %v", v)
}
}
-// SourcePostgresUpdateSSHTunnelMethodNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
-type SourcePostgresUpdateSSHTunnelMethodNoTunnel struct {
+// SourcePostgresUpdateNoTunnel - Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
+type SourcePostgresUpdateNoTunnel struct {
// No ssh tunnel needed to connect to database
- TunnelMethod SourcePostgresUpdateSSHTunnelMethodNoTunnelTunnelMethod `json:"tunnel_method"`
+ tunnelMethod SourcePostgresUpdateTunnelMethod `const:"NO_TUNNEL" json:"tunnel_method"`
+}
+
+func (s SourcePostgresUpdateNoTunnel) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresUpdateNoTunnel) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgresUpdateNoTunnel) GetTunnelMethod() SourcePostgresUpdateTunnelMethod {
+ return SourcePostgresUpdateTunnelMethodNoTunnel
}
type SourcePostgresUpdateSSHTunnelMethodType string
const (
- SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHTunnelMethodNoTunnel SourcePostgresUpdateSSHTunnelMethodType = "source-postgres-update_SSH Tunnel Method_No Tunnel"
- SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication SourcePostgresUpdateSSHTunnelMethodType = "source-postgres-update_SSH Tunnel Method_SSH Key Authentication"
- SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHTunnelMethodPasswordAuthentication SourcePostgresUpdateSSHTunnelMethodType = "source-postgres-update_SSH Tunnel Method_Password Authentication"
+ SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateNoTunnel SourcePostgresUpdateSSHTunnelMethodType = "source-postgres-update_No Tunnel"
+ SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHKeyAuthentication SourcePostgresUpdateSSHTunnelMethodType = "source-postgres-update_SSH Key Authentication"
+ SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdatePasswordAuthentication SourcePostgresUpdateSSHTunnelMethodType = "source-postgres-update_Password Authentication"
)
type SourcePostgresUpdateSSHTunnelMethod struct {
- SourcePostgresUpdateSSHTunnelMethodNoTunnel *SourcePostgresUpdateSSHTunnelMethodNoTunnel
- SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication *SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication
- SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication *SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication
+ SourcePostgresUpdateNoTunnel *SourcePostgresUpdateNoTunnel
+ SourcePostgresUpdateSSHKeyAuthentication *SourcePostgresUpdateSSHKeyAuthentication
+ SourcePostgresUpdatePasswordAuthentication *SourcePostgresUpdatePasswordAuthentication
Type SourcePostgresUpdateSSHTunnelMethodType
}
-func CreateSourcePostgresUpdateSSHTunnelMethodSourcePostgresUpdateSSHTunnelMethodNoTunnel(sourcePostgresUpdateSSHTunnelMethodNoTunnel SourcePostgresUpdateSSHTunnelMethodNoTunnel) SourcePostgresUpdateSSHTunnelMethod {
- typ := SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHTunnelMethodNoTunnel
+func CreateSourcePostgresUpdateSSHTunnelMethodSourcePostgresUpdateNoTunnel(sourcePostgresUpdateNoTunnel SourcePostgresUpdateNoTunnel) SourcePostgresUpdateSSHTunnelMethod {
+ typ := SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateNoTunnel
return SourcePostgresUpdateSSHTunnelMethod{
- SourcePostgresUpdateSSHTunnelMethodNoTunnel: &sourcePostgresUpdateSSHTunnelMethodNoTunnel,
- Type: typ,
+ SourcePostgresUpdateNoTunnel: &sourcePostgresUpdateNoTunnel,
+ Type: typ,
}
}
-func CreateSourcePostgresUpdateSSHTunnelMethodSourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication(sourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication) SourcePostgresUpdateSSHTunnelMethod {
- typ := SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication
+func CreateSourcePostgresUpdateSSHTunnelMethodSourcePostgresUpdateSSHKeyAuthentication(sourcePostgresUpdateSSHKeyAuthentication SourcePostgresUpdateSSHKeyAuthentication) SourcePostgresUpdateSSHTunnelMethod {
+ typ := SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHKeyAuthentication
return SourcePostgresUpdateSSHTunnelMethod{
- SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication: &sourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication,
- Type: typ,
+ SourcePostgresUpdateSSHKeyAuthentication: &sourcePostgresUpdateSSHKeyAuthentication,
+ Type: typ,
}
}
-func CreateSourcePostgresUpdateSSHTunnelMethodSourcePostgresUpdateSSHTunnelMethodPasswordAuthentication(sourcePostgresUpdateSSHTunnelMethodPasswordAuthentication SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication) SourcePostgresUpdateSSHTunnelMethod {
- typ := SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHTunnelMethodPasswordAuthentication
+func CreateSourcePostgresUpdateSSHTunnelMethodSourcePostgresUpdatePasswordAuthentication(sourcePostgresUpdatePasswordAuthentication SourcePostgresUpdatePasswordAuthentication) SourcePostgresUpdateSSHTunnelMethod {
+ typ := SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdatePasswordAuthentication
return SourcePostgresUpdateSSHTunnelMethod{
- SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication: &sourcePostgresUpdateSSHTunnelMethodPasswordAuthentication,
+ SourcePostgresUpdatePasswordAuthentication: &sourcePostgresUpdatePasswordAuthentication,
Type: typ,
}
}
func (u *SourcePostgresUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourcePostgresUpdateSSHTunnelMethodNoTunnel := new(SourcePostgresUpdateSSHTunnelMethodNoTunnel)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateSSHTunnelMethodNoTunnel); err == nil {
- u.SourcePostgresUpdateSSHTunnelMethodNoTunnel = sourcePostgresUpdateSSHTunnelMethodNoTunnel
- u.Type = SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHTunnelMethodNoTunnel
+
+ sourcePostgresUpdateNoTunnel := new(SourcePostgresUpdateNoTunnel)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdateNoTunnel, "", true, true); err == nil {
+ u.SourcePostgresUpdateNoTunnel = sourcePostgresUpdateNoTunnel
+ u.Type = SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateNoTunnel
return nil
}
- sourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication := new(SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication); err == nil {
- u.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication = sourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication
- u.Type = SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication
+ sourcePostgresUpdateSSHKeyAuthentication := new(SourcePostgresUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourcePostgresUpdateSSHKeyAuthentication = sourcePostgresUpdateSSHKeyAuthentication
+ u.Type = SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHKeyAuthentication
return nil
}
- sourcePostgresUpdateSSHTunnelMethodPasswordAuthentication := new(SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourcePostgresUpdateSSHTunnelMethodPasswordAuthentication); err == nil {
- u.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication = sourcePostgresUpdateSSHTunnelMethodPasswordAuthentication
- u.Type = SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdateSSHTunnelMethodPasswordAuthentication
+ sourcePostgresUpdatePasswordAuthentication := new(SourcePostgresUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourcePostgresUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.SourcePostgresUpdatePasswordAuthentication = sourcePostgresUpdatePasswordAuthentication
+ u.Type = SourcePostgresUpdateSSHTunnelMethodTypeSourcePostgresUpdatePasswordAuthentication
return nil
}
@@ -1129,19 +1156,19 @@ func (u *SourcePostgresUpdateSSHTunnelMethod) UnmarshalJSON(data []byte) error {
}
func (u SourcePostgresUpdateSSHTunnelMethod) MarshalJSON() ([]byte, error) {
- if u.SourcePostgresUpdateSSHTunnelMethodNoTunnel != nil {
- return json.Marshal(u.SourcePostgresUpdateSSHTunnelMethodNoTunnel)
+ if u.SourcePostgresUpdateNoTunnel != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdateNoTunnel, "", true)
}
- if u.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication != nil {
- return json.Marshal(u.SourcePostgresUpdateSSHTunnelMethodSSHKeyAuthentication)
+ if u.SourcePostgresUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdateSSHKeyAuthentication, "", true)
}
- if u.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication != nil {
- return json.Marshal(u.SourcePostgresUpdateSSHTunnelMethodPasswordAuthentication)
+ if u.SourcePostgresUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourcePostgresUpdatePasswordAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourcePostgresUpdate struct {
@@ -1154,7 +1181,7 @@ type SourcePostgresUpdate struct {
// Password associated with the username.
Password *string `json:"password,omitempty"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5432" json:"port"`
// Configures how data is extracted from the database.
ReplicationMethod *SourcePostgresUpdateUpdateMethod `json:"replication_method,omitempty"`
// The list of schemas (case sensitive) to sync from. Defaults to public.
@@ -1167,3 +1194,84 @@ type SourcePostgresUpdate struct {
// Username to access the database.
Username string `json:"username"`
}
+
+func (s SourcePostgresUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostgresUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostgresUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourcePostgresUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourcePostgresUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourcePostgresUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourcePostgresUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourcePostgresUpdate) GetReplicationMethod() *SourcePostgresUpdateUpdateMethod {
+ if o == nil {
+ return nil
+ }
+ return o.ReplicationMethod
+}
+
+func (o *SourcePostgresUpdate) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourcePostgresUpdate) GetSslMode() *SourcePostgresUpdateSSLModes {
+ if o == nil {
+ return nil
+ }
+ return o.SslMode
+}
+
+func (o *SourcePostgresUpdate) GetTunnelMethod() *SourcePostgresUpdateSSHTunnelMethod {
+ if o == nil {
+ return nil
+ }
+ return o.TunnelMethod
+}
+
+func (o *SourcePostgresUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceposthog.go b/internal/sdk/pkg/models/shared/sourceposthog.go
old mode 100755
new mode 100644
index 046b777cb..ebe177efb
--- a/internal/sdk/pkg/models/shared/sourceposthog.go
+++ b/internal/sdk/pkg/models/shared/sourceposthog.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourcePosthogPosthog string
+type Posthog string
const (
- SourcePosthogPosthogPosthog SourcePosthogPosthog = "posthog"
+ PosthogPosthog Posthog = "posthog"
)
-func (e SourcePosthogPosthog) ToPointer() *SourcePosthogPosthog {
+func (e Posthog) ToPointer() *Posthog {
return &e
}
-func (e *SourcePosthogPosthog) UnmarshalJSON(data []byte) error {
+func (e *Posthog) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "posthog":
- *e = SourcePosthogPosthog(v)
+ *e = Posthog(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePosthogPosthog: %v", v)
+ return fmt.Errorf("invalid value for Posthog: %v", v)
}
}
@@ -36,10 +37,53 @@ type SourcePosthog struct {
// API Key. See the docs for information on how to generate this key.
APIKey string `json:"api_key"`
// Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).
- BaseURL *string `json:"base_url,omitempty"`
+ BaseURL *string `default:"https://app.posthog.com" json:"base_url"`
// Set lower value in case of failing long running sync of events stream.
- EventsTimeStep *int64 `json:"events_time_step,omitempty"`
- SourceType SourcePosthogPosthog `json:"sourceType"`
+ EventsTimeStep *int64 `default:"30" json:"events_time_step"`
+ sourceType Posthog `const:"posthog" json:"sourceType"`
// The date from which you'd like to replicate the data. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourcePosthog) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePosthog) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePosthog) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourcePosthog) GetBaseURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BaseURL
+}
+
+func (o *SourcePosthog) GetEventsTimeStep() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.EventsTimeStep
+}
+
+func (o *SourcePosthog) GetSourceType() Posthog {
+ return PosthogPosthog
+}
+
+func (o *SourcePosthog) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceposthogcreaterequest.go b/internal/sdk/pkg/models/shared/sourceposthogcreaterequest.go
old mode 100755
new mode 100644
index f81350aa4..c72218a86
--- a/internal/sdk/pkg/models/shared/sourceposthogcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceposthogcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePosthogCreateRequest struct {
Configuration SourcePosthog `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePosthogCreateRequest) GetConfiguration() SourcePosthog {
+ if o == nil {
+ return SourcePosthog{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePosthogCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePosthogCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePosthogCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePosthogCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceposthogputrequest.go b/internal/sdk/pkg/models/shared/sourceposthogputrequest.go
old mode 100755
new mode 100644
index 15e138a6d..6aea283a0
--- a/internal/sdk/pkg/models/shared/sourceposthogputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceposthogputrequest.go
@@ -7,3 +7,24 @@ type SourcePosthogPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePosthogPutRequest) GetConfiguration() SourcePosthogUpdate {
+ if o == nil {
+ return SourcePosthogUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePosthogPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePosthogPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceposthogupdate.go b/internal/sdk/pkg/models/shared/sourceposthogupdate.go
old mode 100755
new mode 100644
index e39684f56..6886ccd87
--- a/internal/sdk/pkg/models/shared/sourceposthogupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceposthogupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -10,9 +11,48 @@ type SourcePosthogUpdate struct {
// API Key. See the docs for information on how to generate this key.
APIKey string `json:"api_key"`
// Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).
- BaseURL *string `json:"base_url,omitempty"`
+ BaseURL *string `default:"https://app.posthog.com" json:"base_url"`
// Set lower value in case of failing long running sync of events stream.
- EventsTimeStep *int64 `json:"events_time_step,omitempty"`
+ EventsTimeStep *int64 `default:"30" json:"events_time_step"`
// The date from which you'd like to replicate the data. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourcePosthogUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePosthogUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePosthogUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourcePosthogUpdate) GetBaseURL() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BaseURL
+}
+
+func (o *SourcePosthogUpdate) GetEventsTimeStep() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.EventsTimeStep
+}
+
+func (o *SourcePosthogUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepostmarkapp.go b/internal/sdk/pkg/models/shared/sourcepostmarkapp.go
old mode 100755
new mode 100644
index 0da75d7cf..590b807ab
--- a/internal/sdk/pkg/models/shared/sourcepostmarkapp.go
+++ b/internal/sdk/pkg/models/shared/sourcepostmarkapp.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePostmarkappPostmarkapp string
+type Postmarkapp string
const (
- SourcePostmarkappPostmarkappPostmarkapp SourcePostmarkappPostmarkapp = "postmarkapp"
+ PostmarkappPostmarkapp Postmarkapp = "postmarkapp"
)
-func (e SourcePostmarkappPostmarkapp) ToPointer() *SourcePostmarkappPostmarkapp {
+func (e Postmarkapp) ToPointer() *Postmarkapp {
return &e
}
-func (e *SourcePostmarkappPostmarkapp) UnmarshalJSON(data []byte) error {
+func (e *Postmarkapp) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "postmarkapp":
- *e = SourcePostmarkappPostmarkapp(v)
+ *e = Postmarkapp(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePostmarkappPostmarkapp: %v", v)
+ return fmt.Errorf("invalid value for Postmarkapp: %v", v)
}
}
@@ -35,6 +36,35 @@ type SourcePostmarkapp struct {
// API Key for account
XPostmarkAccountToken string `json:"X-Postmark-Account-Token"`
// API Key for server
- XPostmarkServerToken string `json:"X-Postmark-Server-Token"`
- SourceType SourcePostmarkappPostmarkapp `json:"sourceType"`
+ XPostmarkServerToken string `json:"X-Postmark-Server-Token"`
+ sourceType Postmarkapp `const:"postmarkapp" json:"sourceType"`
+}
+
+func (s SourcePostmarkapp) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePostmarkapp) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePostmarkapp) GetXPostmarkAccountToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.XPostmarkAccountToken
+}
+
+func (o *SourcePostmarkapp) GetXPostmarkServerToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.XPostmarkServerToken
+}
+
+func (o *SourcePostmarkapp) GetSourceType() Postmarkapp {
+ return PostmarkappPostmarkapp
}
diff --git a/internal/sdk/pkg/models/shared/sourcepostmarkappcreaterequest.go b/internal/sdk/pkg/models/shared/sourcepostmarkappcreaterequest.go
old mode 100755
new mode 100644
index ca07e6d63..0cc658e6a
--- a/internal/sdk/pkg/models/shared/sourcepostmarkappcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepostmarkappcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePostmarkappCreateRequest struct {
Configuration SourcePostmarkapp `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePostmarkappCreateRequest) GetConfiguration() SourcePostmarkapp {
+ if o == nil {
+ return SourcePostmarkapp{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePostmarkappCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePostmarkappCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePostmarkappCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePostmarkappCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepostmarkappputrequest.go b/internal/sdk/pkg/models/shared/sourcepostmarkappputrequest.go
old mode 100755
new mode 100644
index 64231efc3..44d083fe0
--- a/internal/sdk/pkg/models/shared/sourcepostmarkappputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepostmarkappputrequest.go
@@ -7,3 +7,24 @@ type SourcePostmarkappPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePostmarkappPutRequest) GetConfiguration() SourcePostmarkappUpdate {
+ if o == nil {
+ return SourcePostmarkappUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePostmarkappPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePostmarkappPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepostmarkappupdate.go b/internal/sdk/pkg/models/shared/sourcepostmarkappupdate.go
old mode 100755
new mode 100644
index 7ac0c52ec..f3ad6390e
--- a/internal/sdk/pkg/models/shared/sourcepostmarkappupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepostmarkappupdate.go
@@ -8,3 +8,17 @@ type SourcePostmarkappUpdate struct {
// API Key for server
XPostmarkServerToken string `json:"X-Postmark-Server-Token"`
}
+
+func (o *SourcePostmarkappUpdate) GetXPostmarkAccountToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.XPostmarkAccountToken
+}
+
+func (o *SourcePostmarkappUpdate) GetXPostmarkServerToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.XPostmarkServerToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourceprestashop.go b/internal/sdk/pkg/models/shared/sourceprestashop.go
old mode 100755
new mode 100644
index ad12a5922..5072127a2
--- a/internal/sdk/pkg/models/shared/sourceprestashop.go
+++ b/internal/sdk/pkg/models/shared/sourceprestashop.go
@@ -3,41 +3,78 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePrestashopPrestashop string
+type Prestashop string
const (
- SourcePrestashopPrestashopPrestashop SourcePrestashopPrestashop = "prestashop"
+ PrestashopPrestashop Prestashop = "prestashop"
)
-func (e SourcePrestashopPrestashop) ToPointer() *SourcePrestashopPrestashop {
+func (e Prestashop) ToPointer() *Prestashop {
return &e
}
-func (e *SourcePrestashopPrestashop) UnmarshalJSON(data []byte) error {
+func (e *Prestashop) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "prestashop":
- *e = SourcePrestashopPrestashop(v)
+ *e = Prestashop(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePrestashopPrestashop: %v", v)
+ return fmt.Errorf("invalid value for Prestashop: %v", v)
}
}
type SourcePrestashop struct {
// Your PrestaShop access key. See the docs for info on how to obtain this.
- AccessKey string `json:"access_key"`
- SourceType SourcePrestashopPrestashop `json:"sourceType"`
+ AccessKey string `json:"access_key"`
+ sourceType Prestashop `const:"prestashop" json:"sourceType"`
// The Start date in the format YYYY-MM-DD.
StartDate types.Date `json:"start_date"`
// Shop URL without trailing slash.
URL string `json:"url"`
}
+
+func (s SourcePrestashop) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePrestashop) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePrestashop) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *SourcePrestashop) GetSourceType() Prestashop {
+ return PrestashopPrestashop
+}
+
+func (o *SourcePrestashop) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
+
+func (o *SourcePrestashop) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sourceprestashopcreaterequest.go b/internal/sdk/pkg/models/shared/sourceprestashopcreaterequest.go
old mode 100755
new mode 100644
index 01d769504..89be569b9
--- a/internal/sdk/pkg/models/shared/sourceprestashopcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceprestashopcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePrestashopCreateRequest struct {
Configuration SourcePrestashop `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePrestashopCreateRequest) GetConfiguration() SourcePrestashop {
+ if o == nil {
+ return SourcePrestashop{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePrestashopCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePrestashopCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePrestashopCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePrestashopCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceprestashopputrequest.go b/internal/sdk/pkg/models/shared/sourceprestashopputrequest.go
old mode 100755
new mode 100644
index 7a6f21106..97ed8d75c
--- a/internal/sdk/pkg/models/shared/sourceprestashopputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceprestashopputrequest.go
@@ -7,3 +7,24 @@ type SourcePrestashopPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePrestashopPutRequest) GetConfiguration() SourcePrestashopUpdate {
+ if o == nil {
+ return SourcePrestashopUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePrestashopPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePrestashopPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceprestashopupdate.go b/internal/sdk/pkg/models/shared/sourceprestashopupdate.go
old mode 100755
new mode 100644
index 3595f9806..f9daad3e3
--- a/internal/sdk/pkg/models/shared/sourceprestashopupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceprestashopupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourcePrestashopUpdate struct {
@@ -14,3 +15,35 @@ type SourcePrestashopUpdate struct {
// Shop URL without trailing slash.
URL string `json:"url"`
}
+
+func (s SourcePrestashopUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePrestashopUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePrestashopUpdate) GetAccessKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessKey
+}
+
+func (o *SourcePrestashopUpdate) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
+
+func (o *SourcePrestashopUpdate) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepunkapi.go b/internal/sdk/pkg/models/shared/sourcepunkapi.go
old mode 100755
new mode 100644
index 84114ad5f..3eb14c7ea
--- a/internal/sdk/pkg/models/shared/sourcepunkapi.go
+++ b/internal/sdk/pkg/models/shared/sourcepunkapi.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePunkAPIPunkAPI string
+type PunkAPI string
const (
- SourcePunkAPIPunkAPIPunkAPI SourcePunkAPIPunkAPI = "punk-api"
+ PunkAPIPunkAPI PunkAPI = "punk-api"
)
-func (e SourcePunkAPIPunkAPI) ToPointer() *SourcePunkAPIPunkAPI {
+func (e PunkAPI) ToPointer() *PunkAPI {
return &e
}
-func (e *SourcePunkAPIPunkAPI) UnmarshalJSON(data []byte) error {
+func (e *PunkAPI) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "punk-api":
- *e = SourcePunkAPIPunkAPI(v)
+ *e = PunkAPI(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePunkAPIPunkAPI: %v", v)
+ return fmt.Errorf("invalid value for PunkAPI: %v", v)
}
}
@@ -37,6 +38,42 @@ type SourcePunkAPI struct {
// To extract specific data with Unique ID
BrewedBefore string `json:"brewed_before"`
// To extract specific data with Unique ID
- ID *string `json:"id,omitempty"`
- SourceType SourcePunkAPIPunkAPI `json:"sourceType"`
+ ID *string `json:"id,omitempty"`
+ sourceType PunkAPI `const:"punk-api" json:"sourceType"`
+}
+
+func (s SourcePunkAPI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePunkAPI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePunkAPI) GetBrewedAfter() string {
+ if o == nil {
+ return ""
+ }
+ return o.BrewedAfter
+}
+
+func (o *SourcePunkAPI) GetBrewedBefore() string {
+ if o == nil {
+ return ""
+ }
+ return o.BrewedBefore
+}
+
+func (o *SourcePunkAPI) GetID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ID
+}
+
+func (o *SourcePunkAPI) GetSourceType() PunkAPI {
+ return PunkAPIPunkAPI
}
diff --git a/internal/sdk/pkg/models/shared/sourcepunkapicreaterequest.go b/internal/sdk/pkg/models/shared/sourcepunkapicreaterequest.go
old mode 100755
new mode 100644
index ed80da0ec..585e3b724
--- a/internal/sdk/pkg/models/shared/sourcepunkapicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepunkapicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePunkAPICreateRequest struct {
Configuration SourcePunkAPI `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePunkAPICreateRequest) GetConfiguration() SourcePunkAPI {
+ if o == nil {
+ return SourcePunkAPI{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePunkAPICreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePunkAPICreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePunkAPICreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePunkAPICreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepunkapiputrequest.go b/internal/sdk/pkg/models/shared/sourcepunkapiputrequest.go
old mode 100755
new mode 100644
index b5154dbbf..bd53af290
--- a/internal/sdk/pkg/models/shared/sourcepunkapiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepunkapiputrequest.go
@@ -7,3 +7,24 @@ type SourcePunkAPIPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePunkAPIPutRequest) GetConfiguration() SourcePunkAPIUpdate {
+ if o == nil {
+ return SourcePunkAPIUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePunkAPIPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePunkAPIPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepunkapiupdate.go b/internal/sdk/pkg/models/shared/sourcepunkapiupdate.go
old mode 100755
new mode 100644
index a14bdcef2..53f27ff93
--- a/internal/sdk/pkg/models/shared/sourcepunkapiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepunkapiupdate.go
@@ -10,3 +10,24 @@ type SourcePunkAPIUpdate struct {
// To extract specific data with Unique ID
ID *string `json:"id,omitempty"`
}
+
+func (o *SourcePunkAPIUpdate) GetBrewedAfter() string {
+ if o == nil {
+ return ""
+ }
+ return o.BrewedAfter
+}
+
+func (o *SourcePunkAPIUpdate) GetBrewedBefore() string {
+ if o == nil {
+ return ""
+ }
+ return o.BrewedBefore
+}
+
+func (o *SourcePunkAPIUpdate) GetID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceputrequest.go b/internal/sdk/pkg/models/shared/sourceputrequest.go
old mode 100755
new mode 100644
index 54a4812bb..2dea9aba0
--- a/internal/sdk/pkg/models/shared/sourceputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceputrequest.go
@@ -7,3 +7,17 @@ type SourcePutRequest struct {
Configuration interface{} `json:"configuration"`
Name string `json:"name"`
}
+
+func (o *SourcePutRequest) GetConfiguration() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.Configuration
+}
+
+func (o *SourcePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepypi.go b/internal/sdk/pkg/models/shared/sourcepypi.go
old mode 100755
new mode 100644
index af1806d02..7ff965567
--- a/internal/sdk/pkg/models/shared/sourcepypi.go
+++ b/internal/sdk/pkg/models/shared/sourcepypi.go
@@ -5,36 +5,66 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourcePypiPypi string
+type Pypi string
const (
- SourcePypiPypiPypi SourcePypiPypi = "pypi"
+ PypiPypi Pypi = "pypi"
)
-func (e SourcePypiPypi) ToPointer() *SourcePypiPypi {
+func (e Pypi) ToPointer() *Pypi {
return &e
}
-func (e *SourcePypiPypi) UnmarshalJSON(data []byte) error {
+func (e *Pypi) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "pypi":
- *e = SourcePypiPypi(v)
+ *e = Pypi(v)
return nil
default:
- return fmt.Errorf("invalid value for SourcePypiPypi: %v", v)
+ return fmt.Errorf("invalid value for Pypi: %v", v)
}
}
type SourcePypi struct {
// Name of the project/package. Can only be in lowercase with hyphen. This is the name used using pip command for installing the package.
- ProjectName string `json:"project_name"`
- SourceType SourcePypiPypi `json:"sourceType"`
+ ProjectName string `json:"project_name"`
+ sourceType Pypi `const:"pypi" json:"sourceType"`
// Version of the project/package. Use it to find a particular release instead of all releases.
Version *string `json:"version,omitempty"`
}
+
+func (s SourcePypi) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourcePypi) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourcePypi) GetProjectName() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectName
+}
+
+func (o *SourcePypi) GetSourceType() Pypi {
+ return PypiPypi
+}
+
+func (o *SourcePypi) GetVersion() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Version
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepypicreaterequest.go b/internal/sdk/pkg/models/shared/sourcepypicreaterequest.go
old mode 100755
new mode 100644
index 8837bcef8..ffdc95cef
--- a/internal/sdk/pkg/models/shared/sourcepypicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepypicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourcePypiCreateRequest struct {
Configuration SourcePypi `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePypiCreateRequest) GetConfiguration() SourcePypi {
+ if o == nil {
+ return SourcePypi{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePypiCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourcePypiCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePypiCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourcePypiCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepypiputrequest.go b/internal/sdk/pkg/models/shared/sourcepypiputrequest.go
old mode 100755
new mode 100644
index 840b09d13..8a2d50277
--- a/internal/sdk/pkg/models/shared/sourcepypiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcepypiputrequest.go
@@ -7,3 +7,24 @@ type SourcePypiPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourcePypiPutRequest) GetConfiguration() SourcePypiUpdate {
+ if o == nil {
+ return SourcePypiUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourcePypiPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourcePypiPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcepypiupdate.go b/internal/sdk/pkg/models/shared/sourcepypiupdate.go
old mode 100755
new mode 100644
index 45eb75656..38e72a9b4
--- a/internal/sdk/pkg/models/shared/sourcepypiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcepypiupdate.go
@@ -8,3 +8,17 @@ type SourcePypiUpdate struct {
// Version of the project/package. Use it to find a particular release instead of all releases.
Version *string `json:"version,omitempty"`
}
+
+func (o *SourcePypiUpdate) GetProjectName() string {
+ if o == nil {
+ return ""
+ }
+ return o.ProjectName
+}
+
+func (o *SourcePypiUpdate) GetVersion() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Version
+}
diff --git a/internal/sdk/pkg/models/shared/sourcequalaroo.go b/internal/sdk/pkg/models/shared/sourcequalaroo.go
old mode 100755
new mode 100644
index 36ca0e94d..6137815c4
--- a/internal/sdk/pkg/models/shared/sourcequalaroo.go
+++ b/internal/sdk/pkg/models/shared/sourcequalaroo.go
@@ -5,36 +5,37 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceQualarooQualaroo string
+type Qualaroo string
const (
- SourceQualarooQualarooQualaroo SourceQualarooQualaroo = "qualaroo"
+ QualarooQualaroo Qualaroo = "qualaroo"
)
-func (e SourceQualarooQualaroo) ToPointer() *SourceQualarooQualaroo {
+func (e Qualaroo) ToPointer() *Qualaroo {
return &e
}
-func (e *SourceQualarooQualaroo) UnmarshalJSON(data []byte) error {
+func (e *Qualaroo) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "qualaroo":
- *e = SourceQualarooQualaroo(v)
+ *e = Qualaroo(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceQualarooQualaroo: %v", v)
+ return fmt.Errorf("invalid value for Qualaroo: %v", v)
}
}
type SourceQualaroo struct {
// A Qualaroo token. See the docs for instructions on how to generate it.
- Key string `json:"key"`
- SourceType SourceQualarooQualaroo `json:"sourceType"`
+ Key string `json:"key"`
+ sourceType Qualaroo `const:"qualaroo" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate string `json:"start_date"`
// IDs of the surveys from which you'd like to replicate data. If left empty, data from all surveys to which you have access will be replicated.
@@ -42,3 +43,46 @@ type SourceQualaroo struct {
// A Qualaroo token. See the docs for instructions on how to generate it.
Token string `json:"token"`
}
+
+func (s SourceQualaroo) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceQualaroo) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceQualaroo) GetKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Key
+}
+
+func (o *SourceQualaroo) GetSourceType() Qualaroo {
+ return QualarooQualaroo
+}
+
+func (o *SourceQualaroo) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceQualaroo) GetSurveyIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyIds
+}
+
+func (o *SourceQualaroo) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcequalaroocreaterequest.go b/internal/sdk/pkg/models/shared/sourcequalaroocreaterequest.go
old mode 100755
new mode 100644
index 658b2634d..e1bce5251
--- a/internal/sdk/pkg/models/shared/sourcequalaroocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcequalaroocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceQualarooCreateRequest struct {
Configuration SourceQualaroo `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceQualarooCreateRequest) GetConfiguration() SourceQualaroo {
+ if o == nil {
+ return SourceQualaroo{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceQualarooCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceQualarooCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceQualarooCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceQualarooCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcequalarooputrequest.go b/internal/sdk/pkg/models/shared/sourcequalarooputrequest.go
old mode 100755
new mode 100644
index 344885d2f..e677639db
--- a/internal/sdk/pkg/models/shared/sourcequalarooputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcequalarooputrequest.go
@@ -7,3 +7,24 @@ type SourceQualarooPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceQualarooPutRequest) GetConfiguration() SourceQualarooUpdate {
+ if o == nil {
+ return SourceQualarooUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceQualarooPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceQualarooPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcequalarooupdate.go b/internal/sdk/pkg/models/shared/sourcequalarooupdate.go
old mode 100755
new mode 100644
index 62f548ad8..1e7d97fc0
--- a/internal/sdk/pkg/models/shared/sourcequalarooupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcequalarooupdate.go
@@ -12,3 +12,31 @@ type SourceQualarooUpdate struct {
// A Qualaroo token. See the docs for instructions on how to generate it.
Token string `json:"token"`
}
+
+func (o *SourceQualarooUpdate) GetKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Key
+}
+
+func (o *SourceQualarooUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceQualarooUpdate) GetSurveyIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyIds
+}
+
+func (o *SourceQualarooUpdate) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcequickbooks.go b/internal/sdk/pkg/models/shared/sourcequickbooks.go
old mode 100755
new mode 100644
index 9f82c3624..dcaf50d5a
--- a/internal/sdk/pkg/models/shared/sourcequickbooks.go
+++ b/internal/sdk/pkg/models/shared/sourcequickbooks.go
@@ -3,41 +3,41 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceQuickbooksAuthorizationMethodOAuth20AuthType string
+type SourceQuickbooksAuthType string
const (
- SourceQuickbooksAuthorizationMethodOAuth20AuthTypeOauth20 SourceQuickbooksAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceQuickbooksAuthTypeOauth20 SourceQuickbooksAuthType = "oauth2.0"
)
-func (e SourceQuickbooksAuthorizationMethodOAuth20AuthType) ToPointer() *SourceQuickbooksAuthorizationMethodOAuth20AuthType {
+func (e SourceQuickbooksAuthType) ToPointer() *SourceQuickbooksAuthType {
return &e
}
-func (e *SourceQuickbooksAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceQuickbooksAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceQuickbooksAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceQuickbooksAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceQuickbooksAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceQuickbooksAuthType: %v", v)
}
}
-type SourceQuickbooksAuthorizationMethodOAuth20 struct {
+type SourceQuickbooksOAuth20 struct {
// Access token fot making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceQuickbooksAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceQuickbooksAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
ClientID string `json:"client_id"`
// Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
@@ -50,36 +50,90 @@ type SourceQuickbooksAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceQuickbooksOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceQuickbooksOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceQuickbooksOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceQuickbooksOAuth20) GetAuthType() *SourceQuickbooksAuthType {
+ return SourceQuickbooksAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceQuickbooksOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceQuickbooksOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceQuickbooksOAuth20) GetRealmID() string {
+ if o == nil {
+ return ""
+ }
+ return o.RealmID
+}
+
+func (o *SourceQuickbooksOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceQuickbooksOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceQuickbooksAuthorizationMethodType string
const (
- SourceQuickbooksAuthorizationMethodTypeSourceQuickbooksAuthorizationMethodOAuth20 SourceQuickbooksAuthorizationMethodType = "source-quickbooks_Authorization Method_OAuth2.0"
+ SourceQuickbooksAuthorizationMethodTypeSourceQuickbooksOAuth20 SourceQuickbooksAuthorizationMethodType = "source-quickbooks_OAuth2.0"
)
type SourceQuickbooksAuthorizationMethod struct {
- SourceQuickbooksAuthorizationMethodOAuth20 *SourceQuickbooksAuthorizationMethodOAuth20
+ SourceQuickbooksOAuth20 *SourceQuickbooksOAuth20
Type SourceQuickbooksAuthorizationMethodType
}
-func CreateSourceQuickbooksAuthorizationMethodSourceQuickbooksAuthorizationMethodOAuth20(sourceQuickbooksAuthorizationMethodOAuth20 SourceQuickbooksAuthorizationMethodOAuth20) SourceQuickbooksAuthorizationMethod {
- typ := SourceQuickbooksAuthorizationMethodTypeSourceQuickbooksAuthorizationMethodOAuth20
+func CreateSourceQuickbooksAuthorizationMethodSourceQuickbooksOAuth20(sourceQuickbooksOAuth20 SourceQuickbooksOAuth20) SourceQuickbooksAuthorizationMethod {
+ typ := SourceQuickbooksAuthorizationMethodTypeSourceQuickbooksOAuth20
return SourceQuickbooksAuthorizationMethod{
- SourceQuickbooksAuthorizationMethodOAuth20: &sourceQuickbooksAuthorizationMethodOAuth20,
- Type: typ,
+ SourceQuickbooksOAuth20: &sourceQuickbooksOAuth20,
+ Type: typ,
}
}
func (u *SourceQuickbooksAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceQuickbooksAuthorizationMethodOAuth20 := new(SourceQuickbooksAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceQuickbooksAuthorizationMethodOAuth20); err == nil {
- u.SourceQuickbooksAuthorizationMethodOAuth20 = sourceQuickbooksAuthorizationMethodOAuth20
- u.Type = SourceQuickbooksAuthorizationMethodTypeSourceQuickbooksAuthorizationMethodOAuth20
+
+ sourceQuickbooksOAuth20 := new(SourceQuickbooksOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceQuickbooksOAuth20, "", true, true); err == nil {
+ u.SourceQuickbooksOAuth20 = sourceQuickbooksOAuth20
+ u.Type = SourceQuickbooksAuthorizationMethodTypeSourceQuickbooksOAuth20
return nil
}
@@ -87,42 +141,78 @@ func (u *SourceQuickbooksAuthorizationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceQuickbooksAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceQuickbooksAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceQuickbooksAuthorizationMethodOAuth20)
+ if u.SourceQuickbooksOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceQuickbooksOAuth20, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceQuickbooksQuickbooks string
+type Quickbooks string
const (
- SourceQuickbooksQuickbooksQuickbooks SourceQuickbooksQuickbooks = "quickbooks"
+ QuickbooksQuickbooks Quickbooks = "quickbooks"
)
-func (e SourceQuickbooksQuickbooks) ToPointer() *SourceQuickbooksQuickbooks {
+func (e Quickbooks) ToPointer() *Quickbooks {
return &e
}
-func (e *SourceQuickbooksQuickbooks) UnmarshalJSON(data []byte) error {
+func (e *Quickbooks) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "quickbooks":
- *e = SourceQuickbooksQuickbooks(v)
+ *e = Quickbooks(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceQuickbooksQuickbooks: %v", v)
+ return fmt.Errorf("invalid value for Quickbooks: %v", v)
}
}
type SourceQuickbooks struct {
Credentials SourceQuickbooksAuthorizationMethod `json:"credentials"`
// Determines whether to use the sandbox or production environment.
- Sandbox bool `json:"sandbox"`
- SourceType SourceQuickbooksQuickbooks `json:"sourceType"`
+ Sandbox *bool `default:"false" json:"sandbox"`
+ sourceType Quickbooks `const:"quickbooks" json:"sourceType"`
// The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceQuickbooks) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceQuickbooks) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceQuickbooks) GetCredentials() SourceQuickbooksAuthorizationMethod {
+ if o == nil {
+ return SourceQuickbooksAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceQuickbooks) GetSandbox() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Sandbox
+}
+
+func (o *SourceQuickbooks) GetSourceType() Quickbooks {
+ return QuickbooksQuickbooks
+}
+
+func (o *SourceQuickbooks) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcequickbookscreaterequest.go b/internal/sdk/pkg/models/shared/sourcequickbookscreaterequest.go
old mode 100755
new mode 100644
index 2c35859fb..9da6f2bca
--- a/internal/sdk/pkg/models/shared/sourcequickbookscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcequickbookscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceQuickbooksCreateRequest struct {
Configuration SourceQuickbooks `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceQuickbooksCreateRequest) GetConfiguration() SourceQuickbooks {
+ if o == nil {
+ return SourceQuickbooks{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceQuickbooksCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceQuickbooksCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceQuickbooksCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceQuickbooksCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcequickbooksputrequest.go b/internal/sdk/pkg/models/shared/sourcequickbooksputrequest.go
old mode 100755
new mode 100644
index 9ec6dbcb4..a08467075
--- a/internal/sdk/pkg/models/shared/sourcequickbooksputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcequickbooksputrequest.go
@@ -7,3 +7,24 @@ type SourceQuickbooksPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceQuickbooksPutRequest) GetConfiguration() SourceQuickbooksUpdate {
+ if o == nil {
+ return SourceQuickbooksUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceQuickbooksPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceQuickbooksPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcequickbooksupdate.go b/internal/sdk/pkg/models/shared/sourcequickbooksupdate.go
old mode 100755
new mode 100644
index 8b33845fb..e927b2168
--- a/internal/sdk/pkg/models/shared/sourcequickbooksupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcequickbooksupdate.go
@@ -3,41 +3,41 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType string
+type SourceQuickbooksUpdateAuthType string
const (
- SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthTypeOauth20 SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceQuickbooksUpdateAuthTypeOauth20 SourceQuickbooksUpdateAuthType = "oauth2.0"
)
-func (e SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType) ToPointer() *SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType {
+func (e SourceQuickbooksUpdateAuthType) ToPointer() *SourceQuickbooksUpdateAuthType {
return &e
}
-func (e *SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceQuickbooksUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceQuickbooksUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceQuickbooksUpdateAuthType: %v", v)
}
}
-type SourceQuickbooksUpdateAuthorizationMethodOAuth20 struct {
+type SourceQuickbooksUpdateOAuth20 struct {
// Access token fot making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceQuickbooksUpdateAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceQuickbooksUpdateAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// Identifies which app is making the request. Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
ClientID string `json:"client_id"`
// Obtain this value from the Keys tab on the app profile via My Apps on the developer site. There are two versions of this key: development and production.
@@ -50,36 +50,90 @@ type SourceQuickbooksUpdateAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceQuickbooksUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceQuickbooksUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceQuickbooksUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceQuickbooksUpdateOAuth20) GetAuthType() *SourceQuickbooksUpdateAuthType {
+ return SourceQuickbooksUpdateAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceQuickbooksUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceQuickbooksUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceQuickbooksUpdateOAuth20) GetRealmID() string {
+ if o == nil {
+ return ""
+ }
+ return o.RealmID
+}
+
+func (o *SourceQuickbooksUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceQuickbooksUpdateOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceQuickbooksUpdateAuthorizationMethodType string
const (
- SourceQuickbooksUpdateAuthorizationMethodTypeSourceQuickbooksUpdateAuthorizationMethodOAuth20 SourceQuickbooksUpdateAuthorizationMethodType = "source-quickbooks-update_Authorization Method_OAuth2.0"
+ SourceQuickbooksUpdateAuthorizationMethodTypeSourceQuickbooksUpdateOAuth20 SourceQuickbooksUpdateAuthorizationMethodType = "source-quickbooks-update_OAuth2.0"
)
type SourceQuickbooksUpdateAuthorizationMethod struct {
- SourceQuickbooksUpdateAuthorizationMethodOAuth20 *SourceQuickbooksUpdateAuthorizationMethodOAuth20
+ SourceQuickbooksUpdateOAuth20 *SourceQuickbooksUpdateOAuth20
Type SourceQuickbooksUpdateAuthorizationMethodType
}
-func CreateSourceQuickbooksUpdateAuthorizationMethodSourceQuickbooksUpdateAuthorizationMethodOAuth20(sourceQuickbooksUpdateAuthorizationMethodOAuth20 SourceQuickbooksUpdateAuthorizationMethodOAuth20) SourceQuickbooksUpdateAuthorizationMethod {
- typ := SourceQuickbooksUpdateAuthorizationMethodTypeSourceQuickbooksUpdateAuthorizationMethodOAuth20
+func CreateSourceQuickbooksUpdateAuthorizationMethodSourceQuickbooksUpdateOAuth20(sourceQuickbooksUpdateOAuth20 SourceQuickbooksUpdateOAuth20) SourceQuickbooksUpdateAuthorizationMethod {
+ typ := SourceQuickbooksUpdateAuthorizationMethodTypeSourceQuickbooksUpdateOAuth20
return SourceQuickbooksUpdateAuthorizationMethod{
- SourceQuickbooksUpdateAuthorizationMethodOAuth20: &sourceQuickbooksUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceQuickbooksUpdateOAuth20: &sourceQuickbooksUpdateOAuth20,
+ Type: typ,
}
}
func (u *SourceQuickbooksUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceQuickbooksUpdateAuthorizationMethodOAuth20 := new(SourceQuickbooksUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceQuickbooksUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceQuickbooksUpdateAuthorizationMethodOAuth20 = sourceQuickbooksUpdateAuthorizationMethodOAuth20
- u.Type = SourceQuickbooksUpdateAuthorizationMethodTypeSourceQuickbooksUpdateAuthorizationMethodOAuth20
+
+ sourceQuickbooksUpdateOAuth20 := new(SourceQuickbooksUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceQuickbooksUpdateOAuth20, "", true, true); err == nil {
+ u.SourceQuickbooksUpdateOAuth20 = sourceQuickbooksUpdateOAuth20
+ u.Type = SourceQuickbooksUpdateAuthorizationMethodTypeSourceQuickbooksUpdateOAuth20
return nil
}
@@ -87,17 +141,49 @@ func (u *SourceQuickbooksUpdateAuthorizationMethod) UnmarshalJSON(data []byte) e
}
func (u SourceQuickbooksUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceQuickbooksUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceQuickbooksUpdateAuthorizationMethodOAuth20)
+ if u.SourceQuickbooksUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceQuickbooksUpdateOAuth20, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceQuickbooksUpdate struct {
Credentials SourceQuickbooksUpdateAuthorizationMethod `json:"credentials"`
// Determines whether to use the sandbox or production environment.
- Sandbox bool `json:"sandbox"`
+ Sandbox *bool `default:"false" json:"sandbox"`
// The default value to use if no bookmark exists for an endpoint (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceQuickbooksUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceQuickbooksUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceQuickbooksUpdate) GetCredentials() SourceQuickbooksUpdateAuthorizationMethod {
+ if o == nil {
+ return SourceQuickbooksUpdateAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceQuickbooksUpdate) GetSandbox() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Sandbox
+}
+
+func (o *SourceQuickbooksUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerailz.go b/internal/sdk/pkg/models/shared/sourcerailz.go
old mode 100755
new mode 100644
index a8f54aa97..698a83a3b
--- a/internal/sdk/pkg/models/shared/sourcerailz.go
+++ b/internal/sdk/pkg/models/shared/sourcerailz.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceRailzRailz string
+type Railz string
const (
- SourceRailzRailzRailz SourceRailzRailz = "railz"
+ RailzRailz Railz = "railz"
)
-func (e SourceRailzRailz) ToPointer() *SourceRailzRailz {
+func (e Railz) ToPointer() *Railz {
return &e
}
-func (e *SourceRailzRailz) UnmarshalJSON(data []byte) error {
+func (e *Railz) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "railz":
- *e = SourceRailzRailz(v)
+ *e = Railz(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRailzRailz: %v", v)
+ return fmt.Errorf("invalid value for Railz: %v", v)
}
}
@@ -35,8 +36,44 @@ type SourceRailz struct {
// Client ID (client_id)
ClientID string `json:"client_id"`
// Secret key (secret_key)
- SecretKey string `json:"secret_key"`
- SourceType SourceRailzRailz `json:"sourceType"`
+ SecretKey string `json:"secret_key"`
+ sourceType Railz `const:"railz" json:"sourceType"`
// Start date
StartDate string `json:"start_date"`
}
+
+func (s SourceRailz) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRailz) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRailz) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceRailz) GetSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretKey
+}
+
+func (o *SourceRailz) GetSourceType() Railz {
+ return RailzRailz
+}
+
+func (o *SourceRailz) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerailzcreaterequest.go b/internal/sdk/pkg/models/shared/sourcerailzcreaterequest.go
old mode 100755
new mode 100644
index 09895d3d7..556b908ee
--- a/internal/sdk/pkg/models/shared/sourcerailzcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerailzcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceRailzCreateRequest struct {
Configuration SourceRailz `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRailzCreateRequest) GetConfiguration() SourceRailz {
+ if o == nil {
+ return SourceRailz{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRailzCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceRailzCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRailzCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceRailzCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerailzputrequest.go b/internal/sdk/pkg/models/shared/sourcerailzputrequest.go
old mode 100755
new mode 100644
index 562a15683..2b5b02039
--- a/internal/sdk/pkg/models/shared/sourcerailzputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerailzputrequest.go
@@ -7,3 +7,24 @@ type SourceRailzPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRailzPutRequest) GetConfiguration() SourceRailzUpdate {
+ if o == nil {
+ return SourceRailzUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRailzPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRailzPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerailzupdate.go b/internal/sdk/pkg/models/shared/sourcerailzupdate.go
old mode 100755
new mode 100644
index 71d424db8..d8aef7640
--- a/internal/sdk/pkg/models/shared/sourcerailzupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcerailzupdate.go
@@ -10,3 +10,24 @@ type SourceRailzUpdate struct {
// Start date
StartDate string `json:"start_date"`
}
+
+func (o *SourceRailzUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceRailzUpdate) GetSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretKey
+}
+
+func (o *SourceRailzUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecharge.go b/internal/sdk/pkg/models/shared/sourcerecharge.go
old mode 100755
new mode 100644
index 20a2db716..b767fd755
--- a/internal/sdk/pkg/models/shared/sourcerecharge.go
+++ b/internal/sdk/pkg/models/shared/sourcerecharge.go
@@ -5,37 +5,67 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceRechargeRecharge string
+type Recharge string
const (
- SourceRechargeRechargeRecharge SourceRechargeRecharge = "recharge"
+ RechargeRecharge Recharge = "recharge"
)
-func (e SourceRechargeRecharge) ToPointer() *SourceRechargeRecharge {
+func (e Recharge) ToPointer() *Recharge {
return &e
}
-func (e *SourceRechargeRecharge) UnmarshalJSON(data []byte) error {
+func (e *Recharge) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "recharge":
- *e = SourceRechargeRecharge(v)
+ *e = Recharge(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRechargeRecharge: %v", v)
+ return fmt.Errorf("invalid value for Recharge: %v", v)
}
}
type SourceRecharge struct {
// The value of the Access Token generated. See the docs for more information.
- AccessToken string `json:"access_token"`
- SourceType SourceRechargeRecharge `json:"sourceType"`
+ AccessToken string `json:"access_token"`
+ sourceType Recharge `const:"recharge" json:"sourceType"`
// The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceRecharge) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRecharge) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRecharge) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceRecharge) GetSourceType() Recharge {
+ return RechargeRecharge
+}
+
+func (o *SourceRecharge) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerechargecreaterequest.go b/internal/sdk/pkg/models/shared/sourcerechargecreaterequest.go
old mode 100755
new mode 100644
index f1ad8c125..0f6a0b4e6
--- a/internal/sdk/pkg/models/shared/sourcerechargecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerechargecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceRechargeCreateRequest struct {
Configuration SourceRecharge `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRechargeCreateRequest) GetConfiguration() SourceRecharge {
+ if o == nil {
+ return SourceRecharge{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRechargeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceRechargeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRechargeCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceRechargeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerechargeputrequest.go b/internal/sdk/pkg/models/shared/sourcerechargeputrequest.go
old mode 100755
new mode 100644
index 938d0e25f..21a20fdfa
--- a/internal/sdk/pkg/models/shared/sourcerechargeputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerechargeputrequest.go
@@ -7,3 +7,24 @@ type SourceRechargePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRechargePutRequest) GetConfiguration() SourceRechargeUpdate {
+ if o == nil {
+ return SourceRechargeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRechargePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRechargePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerechargeupdate.go b/internal/sdk/pkg/models/shared/sourcerechargeupdate.go
old mode 100755
new mode 100644
index 9a000377e..b85f21a40
--- a/internal/sdk/pkg/models/shared/sourcerechargeupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcerechargeupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -12,3 +13,28 @@ type SourceRechargeUpdate struct {
// The date from which you'd like to replicate data for Recharge API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceRechargeUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRechargeUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRechargeUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceRechargeUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecreation.go b/internal/sdk/pkg/models/shared/sourcerecreation.go
old mode 100755
new mode 100644
index e49200990..f47699491
--- a/internal/sdk/pkg/models/shared/sourcerecreation.go
+++ b/internal/sdk/pkg/models/shared/sourcerecreation.go
@@ -5,35 +5,65 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceRecreationRecreation string
+type Recreation string
const (
- SourceRecreationRecreationRecreation SourceRecreationRecreation = "recreation"
+ RecreationRecreation Recreation = "recreation"
)
-func (e SourceRecreationRecreation) ToPointer() *SourceRecreationRecreation {
+func (e Recreation) ToPointer() *Recreation {
return &e
}
-func (e *SourceRecreationRecreation) UnmarshalJSON(data []byte) error {
+func (e *Recreation) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "recreation":
- *e = SourceRecreationRecreation(v)
+ *e = Recreation(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRecreationRecreation: %v", v)
+ return fmt.Errorf("invalid value for Recreation: %v", v)
}
}
type SourceRecreation struct {
// API Key
- Apikey string `json:"apikey"`
- QueryCampsites *string `json:"query_campsites,omitempty"`
- SourceType SourceRecreationRecreation `json:"sourceType"`
+ Apikey string `json:"apikey"`
+ QueryCampsites *string `json:"query_campsites,omitempty"`
+ sourceType Recreation `const:"recreation" json:"sourceType"`
+}
+
+func (s SourceRecreation) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRecreation) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRecreation) GetApikey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Apikey
+}
+
+func (o *SourceRecreation) GetQueryCampsites() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QueryCampsites
+}
+
+func (o *SourceRecreation) GetSourceType() Recreation {
+ return RecreationRecreation
}
diff --git a/internal/sdk/pkg/models/shared/sourcerecreationcreaterequest.go b/internal/sdk/pkg/models/shared/sourcerecreationcreaterequest.go
old mode 100755
new mode 100644
index dd70f87cd..2da9b7590
--- a/internal/sdk/pkg/models/shared/sourcerecreationcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerecreationcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceRecreationCreateRequest struct {
Configuration SourceRecreation `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRecreationCreateRequest) GetConfiguration() SourceRecreation {
+ if o == nil {
+ return SourceRecreation{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRecreationCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceRecreationCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRecreationCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceRecreationCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecreationputrequest.go b/internal/sdk/pkg/models/shared/sourcerecreationputrequest.go
old mode 100755
new mode 100644
index 86ea86a02..1b9c17eb8
--- a/internal/sdk/pkg/models/shared/sourcerecreationputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerecreationputrequest.go
@@ -7,3 +7,24 @@ type SourceRecreationPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRecreationPutRequest) GetConfiguration() SourceRecreationUpdate {
+ if o == nil {
+ return SourceRecreationUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRecreationPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRecreationPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecreationupdate.go b/internal/sdk/pkg/models/shared/sourcerecreationupdate.go
old mode 100755
new mode 100644
index 753a9364e..c84b0b29e
--- a/internal/sdk/pkg/models/shared/sourcerecreationupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcerecreationupdate.go
@@ -7,3 +7,17 @@ type SourceRecreationUpdate struct {
Apikey string `json:"apikey"`
QueryCampsites *string `json:"query_campsites,omitempty"`
}
+
+func (o *SourceRecreationUpdate) GetApikey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Apikey
+}
+
+func (o *SourceRecreationUpdate) GetQueryCampsites() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QueryCampsites
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecruitee.go b/internal/sdk/pkg/models/shared/sourcerecruitee.go
old mode 100755
new mode 100644
index fca183e30..77c72870d
--- a/internal/sdk/pkg/models/shared/sourcerecruitee.go
+++ b/internal/sdk/pkg/models/shared/sourcerecruitee.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceRecruiteeRecruitee string
+type Recruitee string
const (
- SourceRecruiteeRecruiteeRecruitee SourceRecruiteeRecruitee = "recruitee"
+ RecruiteeRecruitee Recruitee = "recruitee"
)
-func (e SourceRecruiteeRecruitee) ToPointer() *SourceRecruiteeRecruitee {
+func (e Recruitee) ToPointer() *Recruitee {
return &e
}
-func (e *SourceRecruiteeRecruitee) UnmarshalJSON(data []byte) error {
+func (e *Recruitee) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "recruitee":
- *e = SourceRecruiteeRecruitee(v)
+ *e = Recruitee(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRecruiteeRecruitee: %v", v)
+ return fmt.Errorf("invalid value for Recruitee: %v", v)
}
}
@@ -35,6 +36,35 @@ type SourceRecruitee struct {
// Recruitee API Key. See here.
APIKey string `json:"api_key"`
// Recruitee Company ID. You can also find this ID on the Recruitee API tokens page.
- CompanyID int64 `json:"company_id"`
- SourceType SourceRecruiteeRecruitee `json:"sourceType"`
+ CompanyID int64 `json:"company_id"`
+ sourceType Recruitee `const:"recruitee" json:"sourceType"`
+}
+
+func (s SourceRecruitee) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRecruitee) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRecruitee) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceRecruitee) GetCompanyID() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.CompanyID
+}
+
+func (o *SourceRecruitee) GetSourceType() Recruitee {
+ return RecruiteeRecruitee
}
diff --git a/internal/sdk/pkg/models/shared/sourcerecruiteecreaterequest.go b/internal/sdk/pkg/models/shared/sourcerecruiteecreaterequest.go
old mode 100755
new mode 100644
index 9233616ba..8272e65a6
--- a/internal/sdk/pkg/models/shared/sourcerecruiteecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerecruiteecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceRecruiteeCreateRequest struct {
Configuration SourceRecruitee `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRecruiteeCreateRequest) GetConfiguration() SourceRecruitee {
+ if o == nil {
+ return SourceRecruitee{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRecruiteeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceRecruiteeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRecruiteeCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceRecruiteeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecruiteeputrequest.go b/internal/sdk/pkg/models/shared/sourcerecruiteeputrequest.go
old mode 100755
new mode 100644
index c5bb5310a..9032f41b9
--- a/internal/sdk/pkg/models/shared/sourcerecruiteeputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerecruiteeputrequest.go
@@ -7,3 +7,24 @@ type SourceRecruiteePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRecruiteePutRequest) GetConfiguration() SourceRecruiteeUpdate {
+ if o == nil {
+ return SourceRecruiteeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRecruiteePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRecruiteePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecruiteeupdate.go b/internal/sdk/pkg/models/shared/sourcerecruiteeupdate.go
old mode 100755
new mode 100644
index 6ce14fd0f..f03325ca3
--- a/internal/sdk/pkg/models/shared/sourcerecruiteeupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcerecruiteeupdate.go
@@ -8,3 +8,17 @@ type SourceRecruiteeUpdate struct {
// Recruitee Company ID. You can also find this ID on the Recruitee API tokens page.
CompanyID int64 `json:"company_id"`
}
+
+func (o *SourceRecruiteeUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceRecruiteeUpdate) GetCompanyID() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.CompanyID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecurly.go b/internal/sdk/pkg/models/shared/sourcerecurly.go
old mode 100755
new mode 100644
index e9d2f3d06..2e93dd18f
--- a/internal/sdk/pkg/models/shared/sourcerecurly.go
+++ b/internal/sdk/pkg/models/shared/sourcerecurly.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceRecurlyRecurly string
+type Recurly string
const (
- SourceRecurlyRecurlyRecurly SourceRecurlyRecurly = "recurly"
+ RecurlyRecurly Recurly = "recurly"
)
-func (e SourceRecurlyRecurly) ToPointer() *SourceRecurlyRecurly {
+func (e Recurly) ToPointer() *Recurly {
return &e
}
-func (e *SourceRecurlyRecurly) UnmarshalJSON(data []byte) error {
+func (e *Recurly) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "recurly":
- *e = SourceRecurlyRecurly(v)
+ *e = Recurly(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRecurlyRecurly: %v", v)
+ return fmt.Errorf("invalid value for Recurly: %v", v)
}
}
@@ -37,6 +38,42 @@ type SourceRecurly struct {
// ISO8601 timestamp from which the replication from Recurly API will start from.
BeginTime *string `json:"begin_time,omitempty"`
// ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported.
- EndTime *string `json:"end_time,omitempty"`
- SourceType SourceRecurlyRecurly `json:"sourceType"`
+ EndTime *string `json:"end_time,omitempty"`
+ sourceType Recurly `const:"recurly" json:"sourceType"`
+}
+
+func (s SourceRecurly) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRecurly) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRecurly) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceRecurly) GetBeginTime() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BeginTime
+}
+
+func (o *SourceRecurly) GetEndTime() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndTime
+}
+
+func (o *SourceRecurly) GetSourceType() Recurly {
+ return RecurlyRecurly
}
diff --git a/internal/sdk/pkg/models/shared/sourcerecurlycreaterequest.go b/internal/sdk/pkg/models/shared/sourcerecurlycreaterequest.go
old mode 100755
new mode 100644
index 68705e1ef..205098cff
--- a/internal/sdk/pkg/models/shared/sourcerecurlycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerecurlycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceRecurlyCreateRequest struct {
Configuration SourceRecurly `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRecurlyCreateRequest) GetConfiguration() SourceRecurly {
+ if o == nil {
+ return SourceRecurly{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRecurlyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceRecurlyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRecurlyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceRecurlyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecurlyputrequest.go b/internal/sdk/pkg/models/shared/sourcerecurlyputrequest.go
old mode 100755
new mode 100644
index 1d21dd7b0..f33b95695
--- a/internal/sdk/pkg/models/shared/sourcerecurlyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerecurlyputrequest.go
@@ -7,3 +7,24 @@ type SourceRecurlyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRecurlyPutRequest) GetConfiguration() SourceRecurlyUpdate {
+ if o == nil {
+ return SourceRecurlyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRecurlyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRecurlyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerecurlyupdate.go b/internal/sdk/pkg/models/shared/sourcerecurlyupdate.go
old mode 100755
new mode 100644
index 4d315a869..1c8df9352
--- a/internal/sdk/pkg/models/shared/sourcerecurlyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcerecurlyupdate.go
@@ -10,3 +10,24 @@ type SourceRecurlyUpdate struct {
// ISO8601 timestamp to which the replication from Recurly API will stop. Records after that date won't be imported.
EndTime *string `json:"end_time,omitempty"`
}
+
+func (o *SourceRecurlyUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceRecurlyUpdate) GetBeginTime() *string {
+ if o == nil {
+ return nil
+ }
+ return o.BeginTime
+}
+
+func (o *SourceRecurlyUpdate) GetEndTime() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndTime
+}
diff --git a/internal/sdk/pkg/models/shared/sourceredshift.go b/internal/sdk/pkg/models/shared/sourceredshift.go
old mode 100755
new mode 100644
index ecc32b4ba..7ea1696af
--- a/internal/sdk/pkg/models/shared/sourceredshift.go
+++ b/internal/sdk/pkg/models/shared/sourceredshift.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceRedshiftRedshift string
@@ -41,10 +42,74 @@ type SourceRedshift struct {
// Password associated with the username.
Password string `json:"password"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5439" json:"port"`
// The list of schemas to sync from. Specify one or more explicitly or keep empty to process all schemas. Schema names are case sensitive.
Schemas []string `json:"schemas,omitempty"`
- SourceType SourceRedshiftRedshift `json:"sourceType"`
+ sourceType SourceRedshiftRedshift `const:"redshift" json:"sourceType"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (s SourceRedshift) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRedshift) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRedshift) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceRedshift) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceRedshift) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceRedshift) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceRedshift) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceRedshift) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourceRedshift) GetSourceType() SourceRedshiftRedshift {
+ return SourceRedshiftRedshiftRedshift
+}
+
+func (o *SourceRedshift) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceredshiftcreaterequest.go b/internal/sdk/pkg/models/shared/sourceredshiftcreaterequest.go
old mode 100755
new mode 100644
index 7b0681cbe..f78c2db9b
--- a/internal/sdk/pkg/models/shared/sourceredshiftcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceredshiftcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceRedshiftCreateRequest struct {
Configuration SourceRedshift `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRedshiftCreateRequest) GetConfiguration() SourceRedshift {
+ if o == nil {
+ return SourceRedshift{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRedshiftCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceRedshiftCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRedshiftCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceRedshiftCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceredshiftputrequest.go b/internal/sdk/pkg/models/shared/sourceredshiftputrequest.go
old mode 100755
new mode 100644
index 947a3320c..b2c652b06
--- a/internal/sdk/pkg/models/shared/sourceredshiftputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceredshiftputrequest.go
@@ -7,3 +7,24 @@ type SourceRedshiftPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRedshiftPutRequest) GetConfiguration() SourceRedshiftUpdate {
+ if o == nil {
+ return SourceRedshiftUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRedshiftPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRedshiftPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceredshiftupdate.go b/internal/sdk/pkg/models/shared/sourceredshiftupdate.go
old mode 100755
new mode 100644
index c14eac658..3cf8cfaee
--- a/internal/sdk/pkg/models/shared/sourceredshiftupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceredshiftupdate.go
@@ -2,6 +2,10 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceRedshiftUpdate struct {
// Name of the database.
Database string `json:"database"`
@@ -12,9 +16,69 @@ type SourceRedshiftUpdate struct {
// Password associated with the username.
Password string `json:"password"`
// Port of the database.
- Port int64 `json:"port"`
+ Port *int64 `default:"5439" json:"port"`
// The list of schemas to sync from. Specify one or more explicitly or keep empty to process all schemas. Schema names are case sensitive.
Schemas []string `json:"schemas,omitempty"`
// Username to use to access the database.
Username string `json:"username"`
}
+
+func (s SourceRedshiftUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRedshiftUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRedshiftUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceRedshiftUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceRedshiftUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceRedshiftUpdate) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceRedshiftUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceRedshiftUpdate) GetSchemas() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Schemas
+}
+
+func (o *SourceRedshiftUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourceresponse.go b/internal/sdk/pkg/models/shared/sourceresponse.go
old mode 100755
new mode 100644
index 5a2c3fba2..4d4cbeeec
--- a/internal/sdk/pkg/models/shared/sourceresponse.go
+++ b/internal/sdk/pkg/models/shared/sourceresponse.go
@@ -11,3 +11,38 @@ type SourceResponse struct {
SourceType string `json:"sourceType"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceResponse) GetConfiguration() interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.Configuration
+}
+
+func (o *SourceResponse) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceResponse) GetSourceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceID
+}
+
+func (o *SourceResponse) GetSourceType() string {
+ if o == nil {
+ return ""
+ }
+ return o.SourceType
+}
+
+func (o *SourceResponse) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceretently.go b/internal/sdk/pkg/models/shared/sourceretently.go
old mode 100755
new mode 100644
index 1f262efd5..68b9bc0e8
--- a/internal/sdk/pkg/models/shared/sourceretently.go
+++ b/internal/sdk/pkg/models/shared/sourceretently.go
@@ -3,224 +3,197 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType string
+type SourceRetentlySchemasAuthType string
const (
- SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthTypeToken SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType = "Token"
+ SourceRetentlySchemasAuthTypeToken SourceRetentlySchemasAuthType = "Token"
)
-func (e SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType) ToPointer() *SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType {
+func (e SourceRetentlySchemasAuthType) ToPointer() *SourceRetentlySchemasAuthType {
return &e
}
-func (e *SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceRetentlySchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Token":
- *e = SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType(v)
+ *e = SourceRetentlySchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceRetentlySchemasAuthType: %v", v)
}
}
-// SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken - Choose how to authenticate to Retently
-type SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken struct {
+// SourceRetentlyAuthenticateWithAPIToken - Choose how to authenticate to Retently
+type SourceRetentlyAuthenticateWithAPIToken struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// Retently API Token. See the docs for more information on how to obtain this key.
- APIKey string `json:"api_key"`
- AuthType *SourceRetentlyAuthenticationMechanismAuthenticateWithAPITokenAuthType `json:"auth_type,omitempty"`
-
- AdditionalProperties interface{} `json:"-"`
+ APIKey string `json:"api_key"`
+ authType *SourceRetentlySchemasAuthType `const:"Token" json:"auth_type,omitempty"`
}
-type _SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken
-
-func (c *SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken) UnmarshalJSON(bs []byte) error {
- data := _SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken(data)
- additionalFields := make(map[string]interface{})
+func (s SourceRetentlyAuthenticateWithAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceRetentlyAuthenticateWithAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "api_key")
- delete(additionalFields, "auth_type")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceRetentlyAuthenticateWithAPIToken) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceRetentlyAuthenticateWithAPIToken) GetAPIKey() string {
+ if o == nil {
+ return ""
}
+ return o.APIKey
+}
- return json.Marshal(out)
+func (o *SourceRetentlyAuthenticateWithAPIToken) GetAuthType() *SourceRetentlySchemasAuthType {
+ return SourceRetentlySchemasAuthTypeToken.ToPointer()
}
-type SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType string
+type SourceRetentlyAuthType string
const (
- SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthTypeClient SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType = "Client"
+ SourceRetentlyAuthTypeClient SourceRetentlyAuthType = "Client"
)
-func (e SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType) ToPointer() *SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType {
+func (e SourceRetentlyAuthType) ToPointer() *SourceRetentlyAuthType {
return &e
}
-func (e *SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceRetentlyAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType(v)
+ *e = SourceRetentlyAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceRetentlyAuthType: %v", v)
}
}
-// SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth - Choose how to authenticate to Retently
-type SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth struct {
- AuthType *SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType `json:"auth_type,omitempty"`
+// SourceRetentlyAuthenticateViaRetentlyOAuth - Choose how to authenticate to Retently
+type SourceRetentlyAuthenticateViaRetentlyOAuth struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ authType *SourceRetentlyAuthType `const:"Client" json:"auth_type,omitempty"`
// The Client ID of your Retently developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Retently developer application.
ClientSecret string `json:"client_secret"`
// Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.
RefreshToken string `json:"refresh_token"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth
-
-func (c *SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth) UnmarshalJSON(bs []byte) error {
- data := _SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceRetentlyAuthenticateViaRetentlyOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceRetentlyAuthenticateViaRetentlyOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "auth_type")
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
- delete(additionalFields, "refresh_token")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth(c))
- if err != nil {
- return nil, err
+func (o *SourceRetentlyAuthenticateViaRetentlyOAuth) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
+func (o *SourceRetentlyAuthenticateViaRetentlyOAuth) GetAuthType() *SourceRetentlyAuthType {
+ return SourceRetentlyAuthTypeClient.ToPointer()
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceRetentlyAuthenticateViaRetentlyOAuth) GetClientID() string {
+ if o == nil {
+ return ""
}
+ return o.ClientID
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceRetentlyAuthenticateViaRetentlyOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
}
+ return o.ClientSecret
+}
- return json.Marshal(out)
+func (o *SourceRetentlyAuthenticateViaRetentlyOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
}
type SourceRetentlyAuthenticationMechanismType string
const (
- SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth SourceRetentlyAuthenticationMechanismType = "source-retently_Authentication Mechanism_Authenticate via Retently (OAuth)"
- SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken SourceRetentlyAuthenticationMechanismType = "source-retently_Authentication Mechanism_Authenticate with API Token"
+ SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticateViaRetentlyOAuth SourceRetentlyAuthenticationMechanismType = "source-retently_Authenticate via Retently (OAuth)"
+ SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticateWithAPIToken SourceRetentlyAuthenticationMechanismType = "source-retently_Authenticate with API Token"
)
type SourceRetentlyAuthenticationMechanism struct {
- SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth *SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth
- SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken *SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken
+ SourceRetentlyAuthenticateViaRetentlyOAuth *SourceRetentlyAuthenticateViaRetentlyOAuth
+ SourceRetentlyAuthenticateWithAPIToken *SourceRetentlyAuthenticateWithAPIToken
Type SourceRetentlyAuthenticationMechanismType
}
-func CreateSourceRetentlyAuthenticationMechanismSourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth(sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth) SourceRetentlyAuthenticationMechanism {
- typ := SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth
+func CreateSourceRetentlyAuthenticationMechanismSourceRetentlyAuthenticateViaRetentlyOAuth(sourceRetentlyAuthenticateViaRetentlyOAuth SourceRetentlyAuthenticateViaRetentlyOAuth) SourceRetentlyAuthenticationMechanism {
+ typ := SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticateViaRetentlyOAuth
return SourceRetentlyAuthenticationMechanism{
- SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth: &sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth,
+ SourceRetentlyAuthenticateViaRetentlyOAuth: &sourceRetentlyAuthenticateViaRetentlyOAuth,
Type: typ,
}
}
-func CreateSourceRetentlyAuthenticationMechanismSourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken(sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken) SourceRetentlyAuthenticationMechanism {
- typ := SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken
+func CreateSourceRetentlyAuthenticationMechanismSourceRetentlyAuthenticateWithAPIToken(sourceRetentlyAuthenticateWithAPIToken SourceRetentlyAuthenticateWithAPIToken) SourceRetentlyAuthenticationMechanism {
+ typ := SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticateWithAPIToken
return SourceRetentlyAuthenticationMechanism{
- SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken: &sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken,
- Type: typ,
+ SourceRetentlyAuthenticateWithAPIToken: &sourceRetentlyAuthenticateWithAPIToken,
+ Type: typ,
}
}
func (u *SourceRetentlyAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken := new(SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken); err == nil {
- u.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken = sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken
- u.Type = SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken
+
+ sourceRetentlyAuthenticateWithAPIToken := new(SourceRetentlyAuthenticateWithAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceRetentlyAuthenticateWithAPIToken, "", true, true); err == nil {
+ u.SourceRetentlyAuthenticateWithAPIToken = sourceRetentlyAuthenticateWithAPIToken
+ u.Type = SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticateWithAPIToken
return nil
}
- sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth := new(SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth); err == nil {
- u.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth = sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth
- u.Type = SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth
+ sourceRetentlyAuthenticateViaRetentlyOAuth := new(SourceRetentlyAuthenticateViaRetentlyOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceRetentlyAuthenticateViaRetentlyOAuth, "", true, true); err == nil {
+ u.SourceRetentlyAuthenticateViaRetentlyOAuth = sourceRetentlyAuthenticateViaRetentlyOAuth
+ u.Type = SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticateViaRetentlyOAuth
return nil
}
@@ -228,43 +201,65 @@ func (u *SourceRetentlyAuthenticationMechanism) UnmarshalJSON(data []byte) error
}
func (u SourceRetentlyAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken != nil {
- return json.Marshal(u.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken)
+ if u.SourceRetentlyAuthenticateViaRetentlyOAuth != nil {
+ return utils.MarshalJSON(u.SourceRetentlyAuthenticateViaRetentlyOAuth, "", true)
}
- if u.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil {
- return json.Marshal(u.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth)
+ if u.SourceRetentlyAuthenticateWithAPIToken != nil {
+ return utils.MarshalJSON(u.SourceRetentlyAuthenticateWithAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceRetentlyRetently string
+type Retently string
const (
- SourceRetentlyRetentlyRetently SourceRetentlyRetently = "retently"
+ RetentlyRetently Retently = "retently"
)
-func (e SourceRetentlyRetently) ToPointer() *SourceRetentlyRetently {
+func (e Retently) ToPointer() *Retently {
return &e
}
-func (e *SourceRetentlyRetently) UnmarshalJSON(data []byte) error {
+func (e *Retently) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "retently":
- *e = SourceRetentlyRetently(v)
+ *e = Retently(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRetentlyRetently: %v", v)
+ return fmt.Errorf("invalid value for Retently: %v", v)
}
}
type SourceRetently struct {
// Choose how to authenticate to Retently
Credentials *SourceRetentlyAuthenticationMechanism `json:"credentials,omitempty"`
- SourceType *SourceRetentlyRetently `json:"sourceType,omitempty"`
+ sourceType *Retently `const:"retently" json:"sourceType,omitempty"`
+}
+
+func (s SourceRetently) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRetently) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRetently) GetCredentials() *SourceRetentlyAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceRetently) GetSourceType() *Retently {
+ return RetentlyRetently.ToPointer()
}
diff --git a/internal/sdk/pkg/models/shared/sourceretentlycreaterequest.go b/internal/sdk/pkg/models/shared/sourceretentlycreaterequest.go
old mode 100755
new mode 100644
index f5f84a5d6..7dd07eaa7
--- a/internal/sdk/pkg/models/shared/sourceretentlycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceretentlycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceRetentlyCreateRequest struct {
Configuration SourceRetently `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRetentlyCreateRequest) GetConfiguration() SourceRetently {
+ if o == nil {
+ return SourceRetently{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRetentlyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceRetentlyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRetentlyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceRetentlyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceretentlyputrequest.go b/internal/sdk/pkg/models/shared/sourceretentlyputrequest.go
old mode 100755
new mode 100644
index 7b0b11e9f..38cff5f95
--- a/internal/sdk/pkg/models/shared/sourceretentlyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceretentlyputrequest.go
@@ -7,3 +7,24 @@ type SourceRetentlyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRetentlyPutRequest) GetConfiguration() SourceRetentlyUpdate {
+ if o == nil {
+ return SourceRetentlyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRetentlyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRetentlyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceretentlyupdate.go b/internal/sdk/pkg/models/shared/sourceretentlyupdate.go
old mode 100755
new mode 100644
index 2b092a7fc..da1a6540b
--- a/internal/sdk/pkg/models/shared/sourceretentlyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceretentlyupdate.go
@@ -3,224 +3,197 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType string
+type SourceRetentlyUpdateSchemasAuthType string
const (
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthTypeToken SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType = "Token"
+ SourceRetentlyUpdateSchemasAuthTypeToken SourceRetentlyUpdateSchemasAuthType = "Token"
)
-func (e SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType) ToPointer() *SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType {
+func (e SourceRetentlyUpdateSchemasAuthType) ToPointer() *SourceRetentlyUpdateSchemasAuthType {
return &e
}
-func (e *SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceRetentlyUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Token":
- *e = SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType(v)
+ *e = SourceRetentlyUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceRetentlyUpdateSchemasAuthType: %v", v)
}
}
-// SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken - Choose how to authenticate to Retently
-type SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken struct {
+// AuthenticateWithAPIToken - Choose how to authenticate to Retently
+type AuthenticateWithAPIToken struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// Retently API Token. See the docs for more information on how to obtain this key.
- APIKey string `json:"api_key"`
- AuthType *SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPITokenAuthType `json:"auth_type,omitempty"`
-
- AdditionalProperties interface{} `json:"-"`
+ APIKey string `json:"api_key"`
+ authType *SourceRetentlyUpdateSchemasAuthType `const:"Token" json:"auth_type,omitempty"`
}
-type _SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken
-
-func (c *SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken) UnmarshalJSON(bs []byte) error {
- data := _SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken(data)
- additionalFields := make(map[string]interface{})
+func (a AuthenticateWithAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (a *AuthenticateWithAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
return err
}
- delete(additionalFields, "api_key")
- delete(additionalFields, "auth_type")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken(c))
- if err != nil {
- return nil, err
- }
-
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
-
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *AuthenticateWithAPIToken) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *AuthenticateWithAPIToken) GetAPIKey() string {
+ if o == nil {
+ return ""
}
+ return o.APIKey
+}
- return json.Marshal(out)
+func (o *AuthenticateWithAPIToken) GetAuthType() *SourceRetentlyUpdateSchemasAuthType {
+ return SourceRetentlyUpdateSchemasAuthTypeToken.ToPointer()
}
-type SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType string
+type SourceRetentlyUpdateAuthType string
const (
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthTypeClient SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType = "Client"
+ SourceRetentlyUpdateAuthTypeClient SourceRetentlyUpdateAuthType = "Client"
)
-func (e SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType) ToPointer() *SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType {
+func (e SourceRetentlyUpdateAuthType) ToPointer() *SourceRetentlyUpdateAuthType {
return &e
}
-func (e *SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceRetentlyUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType(v)
+ *e = SourceRetentlyUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceRetentlyUpdateAuthType: %v", v)
}
}
-// SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth - Choose how to authenticate to Retently
-type SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth struct {
- AuthType *SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuthAuthType `json:"auth_type,omitempty"`
+// AuthenticateViaRetentlyOAuth - Choose how to authenticate to Retently
+type AuthenticateViaRetentlyOAuth struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
+ authType *SourceRetentlyUpdateAuthType `const:"Client" json:"auth_type,omitempty"`
// The Client ID of your Retently developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Retently developer application.
ClientSecret string `json:"client_secret"`
// Retently Refresh Token which can be used to fetch new Bearer Tokens when the current one expires.
RefreshToken string `json:"refresh_token"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth
-func (c *SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth) UnmarshalJSON(bs []byte) error {
- data := _SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth(data)
-
- additionalFields := make(map[string]interface{})
+func (a AuthenticateViaRetentlyOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (a *AuthenticateViaRetentlyOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
return err
}
- delete(additionalFields, "auth_type")
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
- delete(additionalFields, "refresh_token")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth(c))
- if err != nil {
- return nil, err
+func (o *AuthenticateViaRetentlyOAuth) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
- }
+func (o *AuthenticateViaRetentlyOAuth) GetAuthType() *SourceRetentlyUpdateAuthType {
+ return SourceRetentlyUpdateAuthTypeClient.ToPointer()
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *AuthenticateViaRetentlyOAuth) GetClientID() string {
+ if o == nil {
+ return ""
}
+ return o.ClientID
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *AuthenticateViaRetentlyOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
}
+ return o.ClientSecret
+}
- return json.Marshal(out)
+func (o *AuthenticateViaRetentlyOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
}
type SourceRetentlyUpdateAuthenticationMechanismType string
const (
- SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth SourceRetentlyUpdateAuthenticationMechanismType = "source-retently-update_Authentication Mechanism_Authenticate via Retently (OAuth)"
- SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken SourceRetentlyUpdateAuthenticationMechanismType = "source-retently-update_Authentication Mechanism_Authenticate with API Token"
+ SourceRetentlyUpdateAuthenticationMechanismTypeAuthenticateViaRetentlyOAuth SourceRetentlyUpdateAuthenticationMechanismType = "Authenticate via Retently (OAuth)"
+ SourceRetentlyUpdateAuthenticationMechanismTypeAuthenticateWithAPIToken SourceRetentlyUpdateAuthenticationMechanismType = "Authenticate with API Token"
)
type SourceRetentlyUpdateAuthenticationMechanism struct {
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth *SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken *SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken
+ AuthenticateViaRetentlyOAuth *AuthenticateViaRetentlyOAuth
+ AuthenticateWithAPIToken *AuthenticateWithAPIToken
Type SourceRetentlyUpdateAuthenticationMechanismType
}
-func CreateSourceRetentlyUpdateAuthenticationMechanismSourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth(sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth) SourceRetentlyUpdateAuthenticationMechanism {
- typ := SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth
+func CreateSourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth(authenticateViaRetentlyOAuth AuthenticateViaRetentlyOAuth) SourceRetentlyUpdateAuthenticationMechanism {
+ typ := SourceRetentlyUpdateAuthenticationMechanismTypeAuthenticateViaRetentlyOAuth
return SourceRetentlyUpdateAuthenticationMechanism{
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth: &sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth,
- Type: typ,
+ AuthenticateViaRetentlyOAuth: &authenticateViaRetentlyOAuth,
+ Type: typ,
}
}
-func CreateSourceRetentlyUpdateAuthenticationMechanismSourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken(sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken) SourceRetentlyUpdateAuthenticationMechanism {
- typ := SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken
+func CreateSourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken(authenticateWithAPIToken AuthenticateWithAPIToken) SourceRetentlyUpdateAuthenticationMechanism {
+ typ := SourceRetentlyUpdateAuthenticationMechanismTypeAuthenticateWithAPIToken
return SourceRetentlyUpdateAuthenticationMechanism{
- SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken: &sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken,
- Type: typ,
+ AuthenticateWithAPIToken: &authenticateWithAPIToken,
+ Type: typ,
}
}
func (u *SourceRetentlyUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken := new(SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken); err == nil {
- u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken = sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken
- u.Type = SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken
+
+ authenticateWithAPIToken := new(AuthenticateWithAPIToken)
+ if err := utils.UnmarshalJSON(data, &authenticateWithAPIToken, "", true, true); err == nil {
+ u.AuthenticateWithAPIToken = authenticateWithAPIToken
+ u.Type = SourceRetentlyUpdateAuthenticationMechanismTypeAuthenticateWithAPIToken
return nil
}
- sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth := new(SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth); err == nil {
- u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth = sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth
- u.Type = SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth
+ authenticateViaRetentlyOAuth := new(AuthenticateViaRetentlyOAuth)
+ if err := utils.UnmarshalJSON(data, &authenticateViaRetentlyOAuth, "", true, true); err == nil {
+ u.AuthenticateViaRetentlyOAuth = authenticateViaRetentlyOAuth
+ u.Type = SourceRetentlyUpdateAuthenticationMechanismTypeAuthenticateViaRetentlyOAuth
return nil
}
@@ -228,18 +201,25 @@ func (u *SourceRetentlyUpdateAuthenticationMechanism) UnmarshalJSON(data []byte)
}
func (u SourceRetentlyUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken != nil {
- return json.Marshal(u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken)
+ if u.AuthenticateViaRetentlyOAuth != nil {
+ return utils.MarshalJSON(u.AuthenticateViaRetentlyOAuth, "", true)
}
- if u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil {
- return json.Marshal(u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth)
+ if u.AuthenticateWithAPIToken != nil {
+ return utils.MarshalJSON(u.AuthenticateWithAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceRetentlyUpdate struct {
// Choose how to authenticate to Retently
Credentials *SourceRetentlyUpdateAuthenticationMechanism `json:"credentials,omitempty"`
}
+
+func (o *SourceRetentlyUpdate) GetCredentials() *SourceRetentlyUpdateAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerkicovid.go b/internal/sdk/pkg/models/shared/sourcerkicovid.go
old mode 100755
new mode 100644
index 2db017cf9..77d938d26
--- a/internal/sdk/pkg/models/shared/sourcerkicovid.go
+++ b/internal/sdk/pkg/models/shared/sourcerkicovid.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceRkiCovidRkiCovid string
+type RkiCovid string
const (
- SourceRkiCovidRkiCovidRkiCovid SourceRkiCovidRkiCovid = "rki-covid"
+ RkiCovidRkiCovid RkiCovid = "rki-covid"
)
-func (e SourceRkiCovidRkiCovid) ToPointer() *SourceRkiCovidRkiCovid {
+func (e RkiCovid) ToPointer() *RkiCovid {
return &e
}
-func (e *SourceRkiCovidRkiCovid) UnmarshalJSON(data []byte) error {
+func (e *RkiCovid) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "rki-covid":
- *e = SourceRkiCovidRkiCovid(v)
+ *e = RkiCovid(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRkiCovidRkiCovid: %v", v)
+ return fmt.Errorf("invalid value for RkiCovid: %v", v)
}
}
type SourceRkiCovid struct {
- SourceType SourceRkiCovidRkiCovid `json:"sourceType"`
+ sourceType RkiCovid `const:"rki-covid" json:"sourceType"`
// UTC date in the format 2017-01-25. Any data before this date will not be replicated.
StartDate string `json:"start_date"`
}
+
+func (s SourceRkiCovid) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRkiCovid) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRkiCovid) GetSourceType() RkiCovid {
+ return RkiCovidRkiCovid
+}
+
+func (o *SourceRkiCovid) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerkicovidcreaterequest.go b/internal/sdk/pkg/models/shared/sourcerkicovidcreaterequest.go
old mode 100755
new mode 100644
index 7d28e528b..e642e7103
--- a/internal/sdk/pkg/models/shared/sourcerkicovidcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerkicovidcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceRkiCovidCreateRequest struct {
Configuration SourceRkiCovid `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRkiCovidCreateRequest) GetConfiguration() SourceRkiCovid {
+ if o == nil {
+ return SourceRkiCovid{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRkiCovidCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceRkiCovidCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRkiCovidCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceRkiCovidCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerkicovidputrequest.go b/internal/sdk/pkg/models/shared/sourcerkicovidputrequest.go
old mode 100755
new mode 100644
index 50e8af066..63a346b73
--- a/internal/sdk/pkg/models/shared/sourcerkicovidputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerkicovidputrequest.go
@@ -7,3 +7,24 @@ type SourceRkiCovidPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRkiCovidPutRequest) GetConfiguration() SourceRkiCovidUpdate {
+ if o == nil {
+ return SourceRkiCovidUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRkiCovidPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRkiCovidPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerkicovidupdate.go b/internal/sdk/pkg/models/shared/sourcerkicovidupdate.go
old mode 100755
new mode 100644
index 9ad8b656f..fa7f68580
--- a/internal/sdk/pkg/models/shared/sourcerkicovidupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcerkicovidupdate.go
@@ -6,3 +6,10 @@ type SourceRkiCovidUpdate struct {
// UTC date in the format 2017-01-25. Any data before this date will not be replicated.
StartDate string `json:"start_date"`
}
+
+func (o *SourceRkiCovidUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerss.go b/internal/sdk/pkg/models/shared/sourcerss.go
old mode 100755
new mode 100644
index b8795c4e8..15538ec7f
--- a/internal/sdk/pkg/models/shared/sourcerss.go
+++ b/internal/sdk/pkg/models/shared/sourcerss.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceRssRss string
+type Rss string
const (
- SourceRssRssRss SourceRssRss = "rss"
+ RssRss Rss = "rss"
)
-func (e SourceRssRss) ToPointer() *SourceRssRss {
+func (e Rss) ToPointer() *Rss {
return &e
}
-func (e *SourceRssRss) UnmarshalJSON(data []byte) error {
+func (e *Rss) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "rss":
- *e = SourceRssRss(v)
+ *e = Rss(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceRssRss: %v", v)
+ return fmt.Errorf("invalid value for Rss: %v", v)
}
}
type SourceRss struct {
- SourceType SourceRssRss `json:"sourceType"`
+ sourceType Rss `const:"rss" json:"sourceType"`
// RSS Feed URL
URL string `json:"url"`
}
+
+func (s SourceRss) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceRss) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceRss) GetSourceType() Rss {
+ return RssRss
+}
+
+func (o *SourceRss) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sourcersscreaterequest.go b/internal/sdk/pkg/models/shared/sourcersscreaterequest.go
old mode 100755
new mode 100644
index 50a20d49a..28434e488
--- a/internal/sdk/pkg/models/shared/sourcersscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcersscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceRssCreateRequest struct {
Configuration SourceRss `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRssCreateRequest) GetConfiguration() SourceRss {
+ if o == nil {
+ return SourceRss{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRssCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceRssCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRssCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceRssCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerssputrequest.go b/internal/sdk/pkg/models/shared/sourcerssputrequest.go
old mode 100755
new mode 100644
index 99508a9d4..da3a4d8f3
--- a/internal/sdk/pkg/models/shared/sourcerssputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcerssputrequest.go
@@ -7,3 +7,24 @@ type SourceRssPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceRssPutRequest) GetConfiguration() SourceRssUpdate {
+ if o == nil {
+ return SourceRssUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceRssPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceRssPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcerssupdate.go b/internal/sdk/pkg/models/shared/sourcerssupdate.go
old mode 100755
new mode 100644
index 6d7acaa88..335889ab4
--- a/internal/sdk/pkg/models/shared/sourcerssupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcerssupdate.go
@@ -6,3 +6,10 @@ type SourceRssUpdate struct {
// RSS Feed URL
URL string `json:"url"`
}
+
+func (o *SourceRssUpdate) GetURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.URL
+}
diff --git a/internal/sdk/pkg/models/shared/sources3.go b/internal/sdk/pkg/models/shared/sources3.go
old mode 100755
new mode 100644
index 7fb39df61..80620299e
--- a/internal/sdk/pkg/models/shared/sources3.go
+++ b/internal/sdk/pkg/models/shared/sources3.go
@@ -3,51 +3,51 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceS3FileFormatJsonlFiletype string
+type SourceS3SchemasFormatFileFormatFiletype string
const (
- SourceS3FileFormatJsonlFiletypeJsonl SourceS3FileFormatJsonlFiletype = "jsonl"
+ SourceS3SchemasFormatFileFormatFiletypeJsonl SourceS3SchemasFormatFileFormatFiletype = "jsonl"
)
-func (e SourceS3FileFormatJsonlFiletype) ToPointer() *SourceS3FileFormatJsonlFiletype {
+func (e SourceS3SchemasFormatFileFormatFiletype) ToPointer() *SourceS3SchemasFormatFileFormatFiletype {
return &e
}
-func (e *SourceS3FileFormatJsonlFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3SchemasFormatFileFormatFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "jsonl":
- *e = SourceS3FileFormatJsonlFiletype(v)
+ *e = SourceS3SchemasFormatFileFormatFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileFormatJsonlFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3SchemasFormatFileFormatFiletype: %v", v)
}
}
-// SourceS3FileFormatJsonlUnexpectedFieldBehavior - How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
-type SourceS3FileFormatJsonlUnexpectedFieldBehavior string
+// SourceS3UnexpectedFieldBehavior - How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
+type SourceS3UnexpectedFieldBehavior string
const (
- SourceS3FileFormatJsonlUnexpectedFieldBehaviorIgnore SourceS3FileFormatJsonlUnexpectedFieldBehavior = "ignore"
- SourceS3FileFormatJsonlUnexpectedFieldBehaviorInfer SourceS3FileFormatJsonlUnexpectedFieldBehavior = "infer"
- SourceS3FileFormatJsonlUnexpectedFieldBehaviorError SourceS3FileFormatJsonlUnexpectedFieldBehavior = "error"
+ SourceS3UnexpectedFieldBehaviorIgnore SourceS3UnexpectedFieldBehavior = "ignore"
+ SourceS3UnexpectedFieldBehaviorInfer SourceS3UnexpectedFieldBehavior = "infer"
+ SourceS3UnexpectedFieldBehaviorError SourceS3UnexpectedFieldBehavior = "error"
)
-func (e SourceS3FileFormatJsonlUnexpectedFieldBehavior) ToPointer() *SourceS3FileFormatJsonlUnexpectedFieldBehavior {
+func (e SourceS3UnexpectedFieldBehavior) ToPointer() *SourceS3UnexpectedFieldBehavior {
return &e
}
-func (e *SourceS3FileFormatJsonlUnexpectedFieldBehavior) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UnexpectedFieldBehavior) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -58,227 +58,390 @@ func (e *SourceS3FileFormatJsonlUnexpectedFieldBehavior) UnmarshalJSON(data []by
case "infer":
fallthrough
case "error":
- *e = SourceS3FileFormatJsonlUnexpectedFieldBehavior(v)
+ *e = SourceS3UnexpectedFieldBehavior(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileFormatJsonlUnexpectedFieldBehavior: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UnexpectedFieldBehavior: %v", v)
}
}
-// SourceS3FileFormatJsonl - This connector uses PyArrow for JSON Lines (jsonl) file parsing.
-type SourceS3FileFormatJsonl struct {
+// SourceS3Jsonl - This connector uses PyArrow for JSON Lines (jsonl) file parsing.
+type SourceS3Jsonl struct {
// The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
- BlockSize *int64 `json:"block_size,omitempty"`
- Filetype *SourceS3FileFormatJsonlFiletype `json:"filetype,omitempty"`
+ BlockSize *int64 `default:"0" json:"block_size"`
+ filetype *SourceS3SchemasFormatFileFormatFiletype `const:"jsonl" json:"filetype"`
// Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.
- NewlinesInValues *bool `json:"newlines_in_values,omitempty"`
+ NewlinesInValues *bool `default:"false" json:"newlines_in_values"`
// How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
- UnexpectedFieldBehavior *SourceS3FileFormatJsonlUnexpectedFieldBehavior `json:"unexpected_field_behavior,omitempty"`
+ UnexpectedFieldBehavior *SourceS3UnexpectedFieldBehavior `default:"infer" json:"unexpected_field_behavior"`
}
-type SourceS3FileFormatAvroFiletype string
+func (s SourceS3Jsonl) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3Jsonl) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3Jsonl) GetBlockSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BlockSize
+}
+
+func (o *SourceS3Jsonl) GetFiletype() *SourceS3SchemasFormatFileFormatFiletype {
+ return SourceS3SchemasFormatFileFormatFiletypeJsonl.ToPointer()
+}
+
+func (o *SourceS3Jsonl) GetNewlinesInValues() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.NewlinesInValues
+}
+
+func (o *SourceS3Jsonl) GetUnexpectedFieldBehavior() *SourceS3UnexpectedFieldBehavior {
+ if o == nil {
+ return nil
+ }
+ return o.UnexpectedFieldBehavior
+}
+
+type SourceS3SchemasFormatFiletype string
const (
- SourceS3FileFormatAvroFiletypeAvro SourceS3FileFormatAvroFiletype = "avro"
+ SourceS3SchemasFormatFiletypeAvro SourceS3SchemasFormatFiletype = "avro"
)
-func (e SourceS3FileFormatAvroFiletype) ToPointer() *SourceS3FileFormatAvroFiletype {
+func (e SourceS3SchemasFormatFiletype) ToPointer() *SourceS3SchemasFormatFiletype {
return &e
}
-func (e *SourceS3FileFormatAvroFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3SchemasFormatFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "avro":
- *e = SourceS3FileFormatAvroFiletype(v)
+ *e = SourceS3SchemasFormatFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileFormatAvroFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3SchemasFormatFiletype: %v", v)
}
}
-// SourceS3FileFormatAvro - This connector utilises fastavro for Avro parsing.
-type SourceS3FileFormatAvro struct {
- Filetype *SourceS3FileFormatAvroFiletype `json:"filetype,omitempty"`
+// SourceS3Avro - This connector utilises fastavro for Avro parsing.
+type SourceS3Avro struct {
+ filetype *SourceS3SchemasFormatFiletype `const:"avro" json:"filetype"`
+}
+
+func (s SourceS3Avro) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
}
-type SourceS3FileFormatParquetFiletype string
+func (s *SourceS3Avro) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3Avro) GetFiletype() *SourceS3SchemasFormatFiletype {
+ return SourceS3SchemasFormatFiletypeAvro.ToPointer()
+}
+
+type SourceS3SchemasFiletype string
const (
- SourceS3FileFormatParquetFiletypeParquet SourceS3FileFormatParquetFiletype = "parquet"
+ SourceS3SchemasFiletypeParquet SourceS3SchemasFiletype = "parquet"
)
-func (e SourceS3FileFormatParquetFiletype) ToPointer() *SourceS3FileFormatParquetFiletype {
+func (e SourceS3SchemasFiletype) ToPointer() *SourceS3SchemasFiletype {
return &e
}
-func (e *SourceS3FileFormatParquetFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3SchemasFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "parquet":
- *e = SourceS3FileFormatParquetFiletype(v)
+ *e = SourceS3SchemasFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileFormatParquetFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3SchemasFiletype: %v", v)
}
}
-// SourceS3FileFormatParquet - This connector utilises PyArrow (Apache Arrow) for Parquet parsing.
-type SourceS3FileFormatParquet struct {
+// SourceS3Parquet - This connector utilises PyArrow (Apache Arrow) for Parquet parsing.
+type SourceS3Parquet struct {
// Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.
- BatchSize *int64 `json:"batch_size,omitempty"`
+ BatchSize *int64 `default:"65536" json:"batch_size"`
// Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.
- BufferSize *int64 `json:"buffer_size,omitempty"`
+ BufferSize *int64 `default:"2" json:"buffer_size"`
// If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.
- Columns []string `json:"columns,omitempty"`
- Filetype *SourceS3FileFormatParquetFiletype `json:"filetype,omitempty"`
+ Columns []string `json:"columns,omitempty"`
+ filetype *SourceS3SchemasFiletype `const:"parquet" json:"filetype"`
+}
+
+func (s SourceS3Parquet) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3Parquet) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3Parquet) GetBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchSize
+}
+
+func (o *SourceS3Parquet) GetBufferSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BufferSize
+}
+
+func (o *SourceS3Parquet) GetColumns() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Columns
}
-type SourceS3FileFormatCSVFiletype string
+func (o *SourceS3Parquet) GetFiletype() *SourceS3SchemasFiletype {
+ return SourceS3SchemasFiletypeParquet.ToPointer()
+}
+
+type SourceS3Filetype string
const (
- SourceS3FileFormatCSVFiletypeCsv SourceS3FileFormatCSVFiletype = "csv"
+ SourceS3FiletypeCsv SourceS3Filetype = "csv"
)
-func (e SourceS3FileFormatCSVFiletype) ToPointer() *SourceS3FileFormatCSVFiletype {
+func (e SourceS3Filetype) ToPointer() *SourceS3Filetype {
return &e
}
-func (e *SourceS3FileFormatCSVFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3Filetype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "csv":
- *e = SourceS3FileFormatCSVFiletype(v)
+ *e = SourceS3Filetype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileFormatCSVFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3Filetype: %v", v)
}
}
-// SourceS3FileFormatCSV - This connector utilises PyArrow (Apache Arrow) for CSV parsing.
-type SourceS3FileFormatCSV struct {
+// SourceS3CSV - This connector utilises PyArrow (Apache Arrow) for CSV parsing.
+type SourceS3CSV struct {
// Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.
AdditionalReaderOptions *string `json:"additional_reader_options,omitempty"`
// Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.
AdvancedOptions *string `json:"advanced_options,omitempty"`
// The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
- BlockSize *int64 `json:"block_size,omitempty"`
+ BlockSize *int64 `default:"10000" json:"block_size"`
// The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
- Delimiter *string `json:"delimiter,omitempty"`
+ Delimiter *string `default:"," json:"delimiter"`
// Whether two quotes in a quoted CSV value denote a single quote in the data.
- DoubleQuote *bool `json:"double_quote,omitempty"`
+ DoubleQuote *bool `default:"true" json:"double_quote"`
// The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
- Encoding *string `json:"encoding,omitempty"`
+ Encoding *string `default:"utf8" json:"encoding"`
// The character used for escaping special characters. To disallow escaping, leave this field blank.
- EscapeChar *string `json:"escape_char,omitempty"`
- Filetype *SourceS3FileFormatCSVFiletype `json:"filetype,omitempty"`
+ EscapeChar *string `json:"escape_char,omitempty"`
+ filetype *SourceS3Filetype `const:"csv" json:"filetype"`
// Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings
- InferDatatypes *bool `json:"infer_datatypes,omitempty"`
+ InferDatatypes *bool `default:"true" json:"infer_datatypes"`
// Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.
- NewlinesInValues *bool `json:"newlines_in_values,omitempty"`
+ NewlinesInValues *bool `default:"false" json:"newlines_in_values"`
// The character used for quoting CSV values. To disallow quoting, make this field blank.
- QuoteChar *string `json:"quote_char,omitempty"`
+ QuoteChar *string `default:""" json:"quote_char"`
+}
+
+func (s SourceS3CSV) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3CSV) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3CSV) GetAdditionalReaderOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalReaderOptions
+}
+
+func (o *SourceS3CSV) GetAdvancedOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AdvancedOptions
+}
+
+func (o *SourceS3CSV) GetBlockSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BlockSize
+}
+
+func (o *SourceS3CSV) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
+}
+
+func (o *SourceS3CSV) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *SourceS3CSV) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *SourceS3CSV) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *SourceS3CSV) GetFiletype() *SourceS3Filetype {
+ return SourceS3FiletypeCsv.ToPointer()
+}
+
+func (o *SourceS3CSV) GetInferDatatypes() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.InferDatatypes
+}
+
+func (o *SourceS3CSV) GetNewlinesInValues() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.NewlinesInValues
+}
+
+func (o *SourceS3CSV) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
}
type SourceS3FileFormatType string
const (
- SourceS3FileFormatTypeSourceS3FileFormatCSV SourceS3FileFormatType = "source-s3_File Format_CSV"
- SourceS3FileFormatTypeSourceS3FileFormatParquet SourceS3FileFormatType = "source-s3_File Format_Parquet"
- SourceS3FileFormatTypeSourceS3FileFormatAvro SourceS3FileFormatType = "source-s3_File Format_Avro"
- SourceS3FileFormatTypeSourceS3FileFormatJsonl SourceS3FileFormatType = "source-s3_File Format_Jsonl"
+ SourceS3FileFormatTypeSourceS3CSV SourceS3FileFormatType = "source-s3_CSV"
+ SourceS3FileFormatTypeSourceS3Parquet SourceS3FileFormatType = "source-s3_Parquet"
+ SourceS3FileFormatTypeSourceS3Avro SourceS3FileFormatType = "source-s3_Avro"
+ SourceS3FileFormatTypeSourceS3Jsonl SourceS3FileFormatType = "source-s3_Jsonl"
)
type SourceS3FileFormat struct {
- SourceS3FileFormatCSV *SourceS3FileFormatCSV
- SourceS3FileFormatParquet *SourceS3FileFormatParquet
- SourceS3FileFormatAvro *SourceS3FileFormatAvro
- SourceS3FileFormatJsonl *SourceS3FileFormatJsonl
+ SourceS3CSV *SourceS3CSV
+ SourceS3Parquet *SourceS3Parquet
+ SourceS3Avro *SourceS3Avro
+ SourceS3Jsonl *SourceS3Jsonl
Type SourceS3FileFormatType
}
-func CreateSourceS3FileFormatSourceS3FileFormatCSV(sourceS3FileFormatCSV SourceS3FileFormatCSV) SourceS3FileFormat {
- typ := SourceS3FileFormatTypeSourceS3FileFormatCSV
+func CreateSourceS3FileFormatSourceS3CSV(sourceS3CSV SourceS3CSV) SourceS3FileFormat {
+ typ := SourceS3FileFormatTypeSourceS3CSV
return SourceS3FileFormat{
- SourceS3FileFormatCSV: &sourceS3FileFormatCSV,
- Type: typ,
+ SourceS3CSV: &sourceS3CSV,
+ Type: typ,
}
}
-func CreateSourceS3FileFormatSourceS3FileFormatParquet(sourceS3FileFormatParquet SourceS3FileFormatParquet) SourceS3FileFormat {
- typ := SourceS3FileFormatTypeSourceS3FileFormatParquet
+func CreateSourceS3FileFormatSourceS3Parquet(sourceS3Parquet SourceS3Parquet) SourceS3FileFormat {
+ typ := SourceS3FileFormatTypeSourceS3Parquet
return SourceS3FileFormat{
- SourceS3FileFormatParquet: &sourceS3FileFormatParquet,
- Type: typ,
+ SourceS3Parquet: &sourceS3Parquet,
+ Type: typ,
}
}
-func CreateSourceS3FileFormatSourceS3FileFormatAvro(sourceS3FileFormatAvro SourceS3FileFormatAvro) SourceS3FileFormat {
- typ := SourceS3FileFormatTypeSourceS3FileFormatAvro
+func CreateSourceS3FileFormatSourceS3Avro(sourceS3Avro SourceS3Avro) SourceS3FileFormat {
+ typ := SourceS3FileFormatTypeSourceS3Avro
return SourceS3FileFormat{
- SourceS3FileFormatAvro: &sourceS3FileFormatAvro,
- Type: typ,
+ SourceS3Avro: &sourceS3Avro,
+ Type: typ,
}
}
-func CreateSourceS3FileFormatSourceS3FileFormatJsonl(sourceS3FileFormatJsonl SourceS3FileFormatJsonl) SourceS3FileFormat {
- typ := SourceS3FileFormatTypeSourceS3FileFormatJsonl
+func CreateSourceS3FileFormatSourceS3Jsonl(sourceS3Jsonl SourceS3Jsonl) SourceS3FileFormat {
+ typ := SourceS3FileFormatTypeSourceS3Jsonl
return SourceS3FileFormat{
- SourceS3FileFormatJsonl: &sourceS3FileFormatJsonl,
- Type: typ,
+ SourceS3Jsonl: &sourceS3Jsonl,
+ Type: typ,
}
}
func (u *SourceS3FileFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- sourceS3FileFormatAvro := new(SourceS3FileFormatAvro)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileFormatAvro); err == nil {
- u.SourceS3FileFormatAvro = sourceS3FileFormatAvro
- u.Type = SourceS3FileFormatTypeSourceS3FileFormatAvro
+ sourceS3Avro := new(SourceS3Avro)
+ if err := utils.UnmarshalJSON(data, &sourceS3Avro, "", true, true); err == nil {
+ u.SourceS3Avro = sourceS3Avro
+ u.Type = SourceS3FileFormatTypeSourceS3Avro
return nil
}
- sourceS3FileFormatParquet := new(SourceS3FileFormatParquet)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileFormatParquet); err == nil {
- u.SourceS3FileFormatParquet = sourceS3FileFormatParquet
- u.Type = SourceS3FileFormatTypeSourceS3FileFormatParquet
+ sourceS3Parquet := new(SourceS3Parquet)
+ if err := utils.UnmarshalJSON(data, &sourceS3Parquet, "", true, true); err == nil {
+ u.SourceS3Parquet = sourceS3Parquet
+ u.Type = SourceS3FileFormatTypeSourceS3Parquet
return nil
}
- sourceS3FileFormatJsonl := new(SourceS3FileFormatJsonl)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileFormatJsonl); err == nil {
- u.SourceS3FileFormatJsonl = sourceS3FileFormatJsonl
- u.Type = SourceS3FileFormatTypeSourceS3FileFormatJsonl
+ sourceS3Jsonl := new(SourceS3Jsonl)
+ if err := utils.UnmarshalJSON(data, &sourceS3Jsonl, "", true, true); err == nil {
+ u.SourceS3Jsonl = sourceS3Jsonl
+ u.Type = SourceS3FileFormatTypeSourceS3Jsonl
return nil
}
- sourceS3FileFormatCSV := new(SourceS3FileFormatCSV)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileFormatCSV); err == nil {
- u.SourceS3FileFormatCSV = sourceS3FileFormatCSV
- u.Type = SourceS3FileFormatTypeSourceS3FileFormatCSV
+ sourceS3CSV := new(SourceS3CSV)
+ if err := utils.UnmarshalJSON(data, &sourceS3CSV, "", true, true); err == nil {
+ u.SourceS3CSV = sourceS3CSV
+ u.Type = SourceS3FileFormatTypeSourceS3CSV
return nil
}
@@ -286,23 +449,23 @@ func (u *SourceS3FileFormat) UnmarshalJSON(data []byte) error {
}
func (u SourceS3FileFormat) MarshalJSON() ([]byte, error) {
- if u.SourceS3FileFormatAvro != nil {
- return json.Marshal(u.SourceS3FileFormatAvro)
+ if u.SourceS3CSV != nil {
+ return utils.MarshalJSON(u.SourceS3CSV, "", true)
}
- if u.SourceS3FileFormatParquet != nil {
- return json.Marshal(u.SourceS3FileFormatParquet)
+ if u.SourceS3Parquet != nil {
+ return utils.MarshalJSON(u.SourceS3Parquet, "", true)
}
- if u.SourceS3FileFormatJsonl != nil {
- return json.Marshal(u.SourceS3FileFormatJsonl)
+ if u.SourceS3Avro != nil {
+ return utils.MarshalJSON(u.SourceS3Avro, "", true)
}
- if u.SourceS3FileFormatCSV != nil {
- return json.Marshal(u.SourceS3FileFormatCSV)
+ if u.SourceS3Jsonl != nil {
+ return utils.MarshalJSON(u.SourceS3Jsonl, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
// SourceS3S3AmazonWebServices - Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services
@@ -314,13 +477,66 @@ type SourceS3S3AmazonWebServices struct {
// Name of the S3 bucket where the file(s) exist.
Bucket *string `json:"bucket,omitempty"`
// Endpoint to an S3 compatible service. Leave empty to use AWS.
- Endpoint *string `json:"endpoint,omitempty"`
+ Endpoint *string `default:"" json:"endpoint"`
// By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate.
- PathPrefix *string `json:"path_prefix,omitempty"`
+ PathPrefix *string `default:"" json:"path_prefix"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
}
+func (s SourceS3S3AmazonWebServices) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3S3AmazonWebServices) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3S3AmazonWebServices) GetAwsAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsAccessKeyID
+}
+
+func (o *SourceS3S3AmazonWebServices) GetAwsSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsSecretAccessKey
+}
+
+func (o *SourceS3S3AmazonWebServices) GetBucket() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Bucket
+}
+
+func (o *SourceS3S3AmazonWebServices) GetEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Endpoint
+}
+
+func (o *SourceS3S3AmazonWebServices) GetPathPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PathPrefix
+}
+
+func (o *SourceS3S3AmazonWebServices) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
type SourceS3S3 string
const (
@@ -345,284 +561,419 @@ func (e *SourceS3S3) UnmarshalJSON(data []byte) error {
}
}
-type SourceS3FileBasedStreamConfigFormatParquetFormatFiletype string
+type SourceS3SchemasStreamsFormatFormat5Filetype string
+
+const (
+ SourceS3SchemasStreamsFormatFormat5FiletypeUnstructured SourceS3SchemasStreamsFormatFormat5Filetype = "unstructured"
+)
+
+func (e SourceS3SchemasStreamsFormatFormat5Filetype) ToPointer() *SourceS3SchemasStreamsFormatFormat5Filetype {
+ return &e
+}
+
+func (e *SourceS3SchemasStreamsFormatFormat5Filetype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "unstructured":
+ *e = SourceS3SchemasStreamsFormatFormat5Filetype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceS3SchemasStreamsFormatFormat5Filetype: %v", v)
+ }
+}
+
+// SourceS3DocumentFileTypeFormatExperimental - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
+type SourceS3DocumentFileTypeFormatExperimental struct {
+ filetype *SourceS3SchemasStreamsFormatFormat5Filetype `const:"unstructured" json:"filetype"`
+ // If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.
+ SkipUnprocessableFileTypes *bool `default:"true" json:"skip_unprocessable_file_types"`
+}
+
+func (s SourceS3DocumentFileTypeFormatExperimental) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3DocumentFileTypeFormatExperimental) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3DocumentFileTypeFormatExperimental) GetFiletype() *SourceS3SchemasStreamsFormatFormat5Filetype {
+ return SourceS3SchemasStreamsFormatFormat5FiletypeUnstructured.ToPointer()
+}
+
+func (o *SourceS3DocumentFileTypeFormatExperimental) GetSkipUnprocessableFileTypes() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.SkipUnprocessableFileTypes
+}
+
+type SourceS3SchemasStreamsFormatFormat4Filetype string
const (
- SourceS3FileBasedStreamConfigFormatParquetFormatFiletypeParquet SourceS3FileBasedStreamConfigFormatParquetFormatFiletype = "parquet"
+ SourceS3SchemasStreamsFormatFormat4FiletypeParquet SourceS3SchemasStreamsFormatFormat4Filetype = "parquet"
)
-func (e SourceS3FileBasedStreamConfigFormatParquetFormatFiletype) ToPointer() *SourceS3FileBasedStreamConfigFormatParquetFormatFiletype {
+func (e SourceS3SchemasStreamsFormatFormat4Filetype) ToPointer() *SourceS3SchemasStreamsFormatFormat4Filetype {
return &e
}
-func (e *SourceS3FileBasedStreamConfigFormatParquetFormatFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3SchemasStreamsFormatFormat4Filetype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "parquet":
- *e = SourceS3FileBasedStreamConfigFormatParquetFormatFiletype(v)
+ *e = SourceS3SchemasStreamsFormatFormat4Filetype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatParquetFormatFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3SchemasStreamsFormatFormat4Filetype: %v", v)
}
}
-// SourceS3FileBasedStreamConfigFormatParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
-type SourceS3FileBasedStreamConfigFormatParquetFormat struct {
+// SourceS3ParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceS3ParquetFormat struct {
// Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
- DecimalAsFloat *bool `json:"decimal_as_float,omitempty"`
- Filetype *SourceS3FileBasedStreamConfigFormatParquetFormatFiletype `json:"filetype,omitempty"`
+ DecimalAsFloat *bool `default:"false" json:"decimal_as_float"`
+ filetype *SourceS3SchemasStreamsFormatFormat4Filetype `const:"parquet" json:"filetype"`
+}
+
+func (s SourceS3ParquetFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3ParquetFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3ParquetFormat) GetDecimalAsFloat() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DecimalAsFloat
+}
+
+func (o *SourceS3ParquetFormat) GetFiletype() *SourceS3SchemasStreamsFormatFormat4Filetype {
+ return SourceS3SchemasStreamsFormatFormat4FiletypeParquet.ToPointer()
}
-type SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype string
+type SourceS3SchemasStreamsFormatFormatFiletype string
const (
- SourceS3FileBasedStreamConfigFormatJsonlFormatFiletypeJsonl SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype = "jsonl"
+ SourceS3SchemasStreamsFormatFormatFiletypeJsonl SourceS3SchemasStreamsFormatFormatFiletype = "jsonl"
)
-func (e SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype) ToPointer() *SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype {
+func (e SourceS3SchemasStreamsFormatFormatFiletype) ToPointer() *SourceS3SchemasStreamsFormatFormatFiletype {
return &e
}
-func (e *SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3SchemasStreamsFormatFormatFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "jsonl":
- *e = SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype(v)
+ *e = SourceS3SchemasStreamsFormatFormatFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3SchemasStreamsFormatFormatFiletype: %v", v)
+ }
+}
+
+// SourceS3JsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceS3JsonlFormat struct {
+ filetype *SourceS3SchemasStreamsFormatFormatFiletype `const:"jsonl" json:"filetype"`
+}
+
+func (s SourceS3JsonlFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3JsonlFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceS3FileBasedStreamConfigFormatJsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
-type SourceS3FileBasedStreamConfigFormatJsonlFormat struct {
- Filetype *SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype `json:"filetype,omitempty"`
+func (o *SourceS3JsonlFormat) GetFiletype() *SourceS3SchemasStreamsFormatFormatFiletype {
+ return SourceS3SchemasStreamsFormatFormatFiletypeJsonl.ToPointer()
}
-type SourceS3FileBasedStreamConfigFormatCSVFormatFiletype string
+type SourceS3SchemasStreamsFormatFiletype string
const (
- SourceS3FileBasedStreamConfigFormatCSVFormatFiletypeCsv SourceS3FileBasedStreamConfigFormatCSVFormatFiletype = "csv"
+ SourceS3SchemasStreamsFormatFiletypeCsv SourceS3SchemasStreamsFormatFiletype = "csv"
)
-func (e SourceS3FileBasedStreamConfigFormatCSVFormatFiletype) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatFiletype {
+func (e SourceS3SchemasStreamsFormatFiletype) ToPointer() *SourceS3SchemasStreamsFormatFiletype {
return &e
}
-func (e *SourceS3FileBasedStreamConfigFormatCSVFormatFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3SchemasStreamsFormatFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "csv":
- *e = SourceS3FileBasedStreamConfigFormatCSVFormatFiletype(v)
+ *e = SourceS3SchemasStreamsFormatFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3SchemasStreamsFormatFiletype: %v", v)
}
}
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType string
+type SourceS3SchemasStreamsHeaderDefinitionType string
const (
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionTypeUserProvided SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType = "User Provided"
+ SourceS3SchemasStreamsHeaderDefinitionTypeUserProvided SourceS3SchemasStreamsHeaderDefinitionType = "User Provided"
)
-func (e SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType {
+func (e SourceS3SchemasStreamsHeaderDefinitionType) ToPointer() *SourceS3SchemasStreamsHeaderDefinitionType {
return &e
}
-func (e *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+func (e *SourceS3SchemasStreamsHeaderDefinitionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "User Provided":
- *e = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType(v)
+ *e = SourceS3SchemasStreamsHeaderDefinitionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType: %v", v)
+ return fmt.Errorf("invalid value for SourceS3SchemasStreamsHeaderDefinitionType: %v", v)
}
}
-// SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided struct {
+// SourceS3UserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceS3UserProvided struct {
// The column names that will be used while emitting the CSV records
- ColumnNames []string `json:"column_names"`
- HeaderDefinitionType *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType `json:"header_definition_type,omitempty"`
+ ColumnNames []string `json:"column_names"`
+ headerDefinitionType *SourceS3SchemasStreamsHeaderDefinitionType `const:"User Provided" json:"header_definition_type"`
+}
+
+func (s SourceS3UserProvided) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UserProvided) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3UserProvided) GetColumnNames() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ColumnNames
}
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType string
+func (o *SourceS3UserProvided) GetHeaderDefinitionType() *SourceS3SchemasStreamsHeaderDefinitionType {
+ return SourceS3SchemasStreamsHeaderDefinitionTypeUserProvided.ToPointer()
+}
+
+type SourceS3SchemasHeaderDefinitionType string
const (
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionTypeAutogenerated SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType = "Autogenerated"
+ SourceS3SchemasHeaderDefinitionTypeAutogenerated SourceS3SchemasHeaderDefinitionType = "Autogenerated"
)
-func (e SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType {
+func (e SourceS3SchemasHeaderDefinitionType) ToPointer() *SourceS3SchemasHeaderDefinitionType {
return &e
}
-func (e *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+func (e *SourceS3SchemasHeaderDefinitionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Autogenerated":
- *e = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType(v)
+ *e = SourceS3SchemasHeaderDefinitionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType: %v", v)
+ return fmt.Errorf("invalid value for SourceS3SchemasHeaderDefinitionType: %v", v)
}
}
-// SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated struct {
- HeaderDefinitionType *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType `json:"header_definition_type,omitempty"`
+// SourceS3Autogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceS3Autogenerated struct {
+ headerDefinitionType *SourceS3SchemasHeaderDefinitionType `const:"Autogenerated" json:"header_definition_type"`
}
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType string
+func (s SourceS3Autogenerated) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3Autogenerated) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3Autogenerated) GetHeaderDefinitionType() *SourceS3SchemasHeaderDefinitionType {
+ return SourceS3SchemasHeaderDefinitionTypeAutogenerated.ToPointer()
+}
+
+type SourceS3HeaderDefinitionType string
const (
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionTypeFromCsv SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType = "From CSV"
+ SourceS3HeaderDefinitionTypeFromCsv SourceS3HeaderDefinitionType = "From CSV"
)
-func (e SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType {
+func (e SourceS3HeaderDefinitionType) ToPointer() *SourceS3HeaderDefinitionType {
return &e
}
-func (e *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+func (e *SourceS3HeaderDefinitionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "From CSV":
- *e = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType(v)
+ *e = SourceS3HeaderDefinitionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType: %v", v)
+ return fmt.Errorf("invalid value for SourceS3HeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceS3FromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceS3FromCSV struct {
+ headerDefinitionType *SourceS3HeaderDefinitionType `const:"From CSV" json:"header_definition_type"`
+}
+
+func (s SourceS3FromCSV) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3FromCSV) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV struct {
- HeaderDefinitionType *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType `json:"header_definition_type,omitempty"`
+func (o *SourceS3FromCSV) GetHeaderDefinitionType() *SourceS3HeaderDefinitionType {
+ return SourceS3HeaderDefinitionTypeFromCsv.ToPointer()
}
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType string
+type SourceS3CSVHeaderDefinitionType string
const (
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_From CSV"
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_Autogenerated"
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_User Provided"
+ SourceS3CSVHeaderDefinitionTypeSourceS3FromCSV SourceS3CSVHeaderDefinitionType = "source-s3_From CSV"
+ SourceS3CSVHeaderDefinitionTypeSourceS3Autogenerated SourceS3CSVHeaderDefinitionType = "source-s3_Autogenerated"
+ SourceS3CSVHeaderDefinitionTypeSourceS3UserProvided SourceS3CSVHeaderDefinitionType = "source-s3_User Provided"
)
-type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition struct {
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
+type SourceS3CSVHeaderDefinition struct {
+ SourceS3FromCSV *SourceS3FromCSV
+ SourceS3Autogenerated *SourceS3Autogenerated
+ SourceS3UserProvided *SourceS3UserProvided
- Type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType
+ Type SourceS3CSVHeaderDefinitionType
}
-func CreateSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV(sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV) SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition {
- typ := SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
+func CreateSourceS3CSVHeaderDefinitionSourceS3FromCSV(sourceS3FromCSV SourceS3FromCSV) SourceS3CSVHeaderDefinition {
+ typ := SourceS3CSVHeaderDefinitionTypeSourceS3FromCSV
- return SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV: &sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV,
- Type: typ,
+ return SourceS3CSVHeaderDefinition{
+ SourceS3FromCSV: &sourceS3FromCSV,
+ Type: typ,
}
}
-func CreateSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated(sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated) SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition {
- typ := SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
+func CreateSourceS3CSVHeaderDefinitionSourceS3Autogenerated(sourceS3Autogenerated SourceS3Autogenerated) SourceS3CSVHeaderDefinition {
+ typ := SourceS3CSVHeaderDefinitionTypeSourceS3Autogenerated
- return SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated: &sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated,
- Type: typ,
+ return SourceS3CSVHeaderDefinition{
+ SourceS3Autogenerated: &sourceS3Autogenerated,
+ Type: typ,
}
}
-func CreateSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided(sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided) SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition {
- typ := SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
+func CreateSourceS3CSVHeaderDefinitionSourceS3UserProvided(sourceS3UserProvided SourceS3UserProvided) SourceS3CSVHeaderDefinition {
+ typ := SourceS3CSVHeaderDefinitionTypeSourceS3UserProvided
- return SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided: &sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided,
- Type: typ,
+ return SourceS3CSVHeaderDefinition{
+ SourceS3UserProvided: &sourceS3UserProvided,
+ Type: typ,
}
}
-func (u *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SourceS3CSVHeaderDefinition) UnmarshalJSON(data []byte) error {
- sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV := new(SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV); err == nil {
- u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV = sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
- u.Type = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
+ sourceS3FromCSV := new(SourceS3FromCSV)
+ if err := utils.UnmarshalJSON(data, &sourceS3FromCSV, "", true, true); err == nil {
+ u.SourceS3FromCSV = sourceS3FromCSV
+ u.Type = SourceS3CSVHeaderDefinitionTypeSourceS3FromCSV
return nil
}
- sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated := new(SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated); err == nil {
- u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated = sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
- u.Type = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
+ sourceS3Autogenerated := new(SourceS3Autogenerated)
+ if err := utils.UnmarshalJSON(data, &sourceS3Autogenerated, "", true, true); err == nil {
+ u.SourceS3Autogenerated = sourceS3Autogenerated
+ u.Type = SourceS3CSVHeaderDefinitionTypeSourceS3Autogenerated
return nil
}
- sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided := new(SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided); err == nil {
- u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided = sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
- u.Type = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
+ sourceS3UserProvided := new(SourceS3UserProvided)
+ if err := utils.UnmarshalJSON(data, &sourceS3UserProvided, "", true, true); err == nil {
+ u.SourceS3UserProvided = sourceS3UserProvided
+ u.Type = SourceS3CSVHeaderDefinitionTypeSourceS3UserProvided
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition) MarshalJSON() ([]byte, error) {
- if u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil {
- return json.Marshal(u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV)
+func (u SourceS3CSVHeaderDefinition) MarshalJSON() ([]byte, error) {
+ if u.SourceS3FromCSV != nil {
+ return utils.MarshalJSON(u.SourceS3FromCSV, "", true)
}
- if u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil {
- return json.Marshal(u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated)
+ if u.SourceS3Autogenerated != nil {
+ return utils.MarshalJSON(u.SourceS3Autogenerated, "", true)
}
- if u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil {
- return json.Marshal(u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided)
+ if u.SourceS3UserProvided != nil {
+ return utils.MarshalJSON(u.SourceS3UserProvided, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType - How to infer the types of the columns. If none, inference default to strings.
-type SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType string
+// SourceS3InferenceType - How to infer the types of the columns. If none, inference default to strings.
+type SourceS3InferenceType string
const (
- SourceS3FileBasedStreamConfigFormatCSVFormatInferenceTypeNone SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType = "None"
- SourceS3FileBasedStreamConfigFormatCSVFormatInferenceTypePrimitiveTypesOnly SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType = "Primitive Types Only"
+ SourceS3InferenceTypeNone SourceS3InferenceType = "None"
+ SourceS3InferenceTypePrimitiveTypesOnly SourceS3InferenceType = "Primitive Types Only"
)
-func (e SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType {
+func (e SourceS3InferenceType) ToPointer() *SourceS3InferenceType {
return &e
}
-func (e *SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType) UnmarshalJSON(data []byte) error {
+func (e *SourceS3InferenceType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -631,205 +982,346 @@ func (e *SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType) UnmarshalJSO
case "None":
fallthrough
case "Primitive Types Only":
- *e = SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType(v)
+ *e = SourceS3InferenceType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType: %v", v)
+ return fmt.Errorf("invalid value for SourceS3InferenceType: %v", v)
}
}
-// SourceS3FileBasedStreamConfigFormatCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
-type SourceS3FileBasedStreamConfigFormatCSVFormat struct {
+// SourceS3CSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceS3CSVFormat struct {
// The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
- Delimiter *string `json:"delimiter,omitempty"`
+ Delimiter *string `default:"," json:"delimiter"`
// Whether two quotes in a quoted CSV value denote a single quote in the data.
- DoubleQuote *bool `json:"double_quote,omitempty"`
+ DoubleQuote *bool `default:"true" json:"double_quote"`
// The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
- Encoding *string `json:"encoding,omitempty"`
+ Encoding *string `default:"utf8" json:"encoding"`
// The character used for escaping special characters. To disallow escaping, leave this field blank.
EscapeChar *string `json:"escape_char,omitempty"`
// A set of case-sensitive strings that should be interpreted as false values.
- FalseValues []string `json:"false_values,omitempty"`
- Filetype *SourceS3FileBasedStreamConfigFormatCSVFormatFiletype `json:"filetype,omitempty"`
+ FalseValues []string `json:"false_values,omitempty"`
+ filetype *SourceS3SchemasStreamsFormatFiletype `const:"csv" json:"filetype"`
// How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
- HeaderDefinition *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition `json:"header_definition,omitempty"`
+ HeaderDefinition *SourceS3CSVHeaderDefinition `json:"header_definition,omitempty"`
// How to infer the types of the columns. If none, inference default to strings.
- InferenceType *SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType `json:"inference_type,omitempty"`
+ InferenceType *SourceS3InferenceType `default:"None" json:"inference_type"`
// A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
NullValues []string `json:"null_values,omitempty"`
// The character used for quoting CSV values. To disallow quoting, make this field blank.
- QuoteChar *string `json:"quote_char,omitempty"`
+ QuoteChar *string `default:""" json:"quote_char"`
// The number of rows to skip after the header row.
- SkipRowsAfterHeader *int64 `json:"skip_rows_after_header,omitempty"`
+ SkipRowsAfterHeader *int64 `default:"0" json:"skip_rows_after_header"`
// The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
- SkipRowsBeforeHeader *int64 `json:"skip_rows_before_header,omitempty"`
+ SkipRowsBeforeHeader *int64 `default:"0" json:"skip_rows_before_header"`
// Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
- StringsCanBeNull *bool `json:"strings_can_be_null,omitempty"`
+ StringsCanBeNull *bool `default:"true" json:"strings_can_be_null"`
// A set of case-sensitive strings that should be interpreted as true values.
TrueValues []string `json:"true_values,omitempty"`
}
-type SourceS3FileBasedStreamConfigFormatAvroFormatFiletype string
+func (s SourceS3CSVFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3CSVFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3CSVFormat) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
+}
+
+func (o *SourceS3CSVFormat) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *SourceS3CSVFormat) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *SourceS3CSVFormat) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *SourceS3CSVFormat) GetFalseValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FalseValues
+}
+
+func (o *SourceS3CSVFormat) GetFiletype() *SourceS3SchemasStreamsFormatFiletype {
+ return SourceS3SchemasStreamsFormatFiletypeCsv.ToPointer()
+}
+
+func (o *SourceS3CSVFormat) GetHeaderDefinition() *SourceS3CSVHeaderDefinition {
+ if o == nil {
+ return nil
+ }
+ return o.HeaderDefinition
+}
+
+func (o *SourceS3CSVFormat) GetInferenceType() *SourceS3InferenceType {
+ if o == nil {
+ return nil
+ }
+ return o.InferenceType
+}
+
+func (o *SourceS3CSVFormat) GetNullValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NullValues
+}
+
+func (o *SourceS3CSVFormat) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
+}
+
+func (o *SourceS3CSVFormat) GetSkipRowsAfterHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsAfterHeader
+}
+
+func (o *SourceS3CSVFormat) GetSkipRowsBeforeHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsBeforeHeader
+}
+
+func (o *SourceS3CSVFormat) GetStringsCanBeNull() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.StringsCanBeNull
+}
+
+func (o *SourceS3CSVFormat) GetTrueValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TrueValues
+}
+
+type SourceS3SchemasStreamsFiletype string
const (
- SourceS3FileBasedStreamConfigFormatAvroFormatFiletypeAvro SourceS3FileBasedStreamConfigFormatAvroFormatFiletype = "avro"
+ SourceS3SchemasStreamsFiletypeAvro SourceS3SchemasStreamsFiletype = "avro"
)
-func (e SourceS3FileBasedStreamConfigFormatAvroFormatFiletype) ToPointer() *SourceS3FileBasedStreamConfigFormatAvroFormatFiletype {
+func (e SourceS3SchemasStreamsFiletype) ToPointer() *SourceS3SchemasStreamsFiletype {
return &e
}
-func (e *SourceS3FileBasedStreamConfigFormatAvroFormatFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3SchemasStreamsFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "avro":
- *e = SourceS3FileBasedStreamConfigFormatAvroFormatFiletype(v)
+ *e = SourceS3SchemasStreamsFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatAvroFormatFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3SchemasStreamsFiletype: %v", v)
}
}
-// SourceS3FileBasedStreamConfigFormatAvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
-type SourceS3FileBasedStreamConfigFormatAvroFormat struct {
+// SourceS3AvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceS3AvroFormat struct {
// Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
- DoubleAsString *bool `json:"double_as_string,omitempty"`
- Filetype *SourceS3FileBasedStreamConfigFormatAvroFormatFiletype `json:"filetype,omitempty"`
+ DoubleAsString *bool `default:"false" json:"double_as_string"`
+ filetype *SourceS3SchemasStreamsFiletype `const:"avro" json:"filetype"`
+}
+
+func (s SourceS3AvroFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3AvroFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3AvroFormat) GetDoubleAsString() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleAsString
+}
+
+func (o *SourceS3AvroFormat) GetFiletype() *SourceS3SchemasStreamsFiletype {
+ return SourceS3SchemasStreamsFiletypeAvro.ToPointer()
}
-type SourceS3FileBasedStreamConfigFormatType string
+type SourceS3FormatType string
const (
- SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatAvroFormat SourceS3FileBasedStreamConfigFormatType = "source-s3_FileBasedStreamConfig_Format_Avro Format"
- SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatCSVFormat SourceS3FileBasedStreamConfigFormatType = "source-s3_FileBasedStreamConfig_Format_CSV Format"
- SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatJsonlFormat SourceS3FileBasedStreamConfigFormatType = "source-s3_FileBasedStreamConfig_Format_Jsonl Format"
- SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatParquetFormat SourceS3FileBasedStreamConfigFormatType = "source-s3_FileBasedStreamConfig_Format_Parquet Format"
+ SourceS3FormatTypeSourceS3AvroFormat SourceS3FormatType = "source-s3_Avro Format"
+ SourceS3FormatTypeSourceS3CSVFormat SourceS3FormatType = "source-s3_CSV Format"
+ SourceS3FormatTypeSourceS3JsonlFormat SourceS3FormatType = "source-s3_Jsonl Format"
+ SourceS3FormatTypeSourceS3ParquetFormat SourceS3FormatType = "source-s3_Parquet Format"
+ SourceS3FormatTypeSourceS3DocumentFileTypeFormatExperimental SourceS3FormatType = "source-s3_Document File Type Format (Experimental)"
)
-type SourceS3FileBasedStreamConfigFormat struct {
- SourceS3FileBasedStreamConfigFormatAvroFormat *SourceS3FileBasedStreamConfigFormatAvroFormat
- SourceS3FileBasedStreamConfigFormatCSVFormat *SourceS3FileBasedStreamConfigFormatCSVFormat
- SourceS3FileBasedStreamConfigFormatJsonlFormat *SourceS3FileBasedStreamConfigFormatJsonlFormat
- SourceS3FileBasedStreamConfigFormatParquetFormat *SourceS3FileBasedStreamConfigFormatParquetFormat
+type SourceS3Format struct {
+ SourceS3AvroFormat *SourceS3AvroFormat
+ SourceS3CSVFormat *SourceS3CSVFormat
+ SourceS3JsonlFormat *SourceS3JsonlFormat
+ SourceS3ParquetFormat *SourceS3ParquetFormat
+ SourceS3DocumentFileTypeFormatExperimental *SourceS3DocumentFileTypeFormatExperimental
- Type SourceS3FileBasedStreamConfigFormatType
+ Type SourceS3FormatType
}
-func CreateSourceS3FileBasedStreamConfigFormatSourceS3FileBasedStreamConfigFormatAvroFormat(sourceS3FileBasedStreamConfigFormatAvroFormat SourceS3FileBasedStreamConfigFormatAvroFormat) SourceS3FileBasedStreamConfigFormat {
- typ := SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatAvroFormat
+func CreateSourceS3FormatSourceS3AvroFormat(sourceS3AvroFormat SourceS3AvroFormat) SourceS3Format {
+ typ := SourceS3FormatTypeSourceS3AvroFormat
- return SourceS3FileBasedStreamConfigFormat{
- SourceS3FileBasedStreamConfigFormatAvroFormat: &sourceS3FileBasedStreamConfigFormatAvroFormat,
- Type: typ,
+ return SourceS3Format{
+ SourceS3AvroFormat: &sourceS3AvroFormat,
+ Type: typ,
}
}
-func CreateSourceS3FileBasedStreamConfigFormatSourceS3FileBasedStreamConfigFormatCSVFormat(sourceS3FileBasedStreamConfigFormatCSVFormat SourceS3FileBasedStreamConfigFormatCSVFormat) SourceS3FileBasedStreamConfigFormat {
- typ := SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatCSVFormat
+func CreateSourceS3FormatSourceS3CSVFormat(sourceS3CSVFormat SourceS3CSVFormat) SourceS3Format {
+ typ := SourceS3FormatTypeSourceS3CSVFormat
- return SourceS3FileBasedStreamConfigFormat{
- SourceS3FileBasedStreamConfigFormatCSVFormat: &sourceS3FileBasedStreamConfigFormatCSVFormat,
- Type: typ,
+ return SourceS3Format{
+ SourceS3CSVFormat: &sourceS3CSVFormat,
+ Type: typ,
}
}
-func CreateSourceS3FileBasedStreamConfigFormatSourceS3FileBasedStreamConfigFormatJsonlFormat(sourceS3FileBasedStreamConfigFormatJsonlFormat SourceS3FileBasedStreamConfigFormatJsonlFormat) SourceS3FileBasedStreamConfigFormat {
- typ := SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatJsonlFormat
+func CreateSourceS3FormatSourceS3JsonlFormat(sourceS3JsonlFormat SourceS3JsonlFormat) SourceS3Format {
+ typ := SourceS3FormatTypeSourceS3JsonlFormat
- return SourceS3FileBasedStreamConfigFormat{
- SourceS3FileBasedStreamConfigFormatJsonlFormat: &sourceS3FileBasedStreamConfigFormatJsonlFormat,
- Type: typ,
+ return SourceS3Format{
+ SourceS3JsonlFormat: &sourceS3JsonlFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceS3FormatSourceS3ParquetFormat(sourceS3ParquetFormat SourceS3ParquetFormat) SourceS3Format {
+ typ := SourceS3FormatTypeSourceS3ParquetFormat
+
+ return SourceS3Format{
+ SourceS3ParquetFormat: &sourceS3ParquetFormat,
+ Type: typ,
}
}
-func CreateSourceS3FileBasedStreamConfigFormatSourceS3FileBasedStreamConfigFormatParquetFormat(sourceS3FileBasedStreamConfigFormatParquetFormat SourceS3FileBasedStreamConfigFormatParquetFormat) SourceS3FileBasedStreamConfigFormat {
- typ := SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatParquetFormat
+func CreateSourceS3FormatSourceS3DocumentFileTypeFormatExperimental(sourceS3DocumentFileTypeFormatExperimental SourceS3DocumentFileTypeFormatExperimental) SourceS3Format {
+ typ := SourceS3FormatTypeSourceS3DocumentFileTypeFormatExperimental
- return SourceS3FileBasedStreamConfigFormat{
- SourceS3FileBasedStreamConfigFormatParquetFormat: &sourceS3FileBasedStreamConfigFormatParquetFormat,
+ return SourceS3Format{
+ SourceS3DocumentFileTypeFormatExperimental: &sourceS3DocumentFileTypeFormatExperimental,
Type: typ,
}
}
-func (u *SourceS3FileBasedStreamConfigFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SourceS3Format) UnmarshalJSON(data []byte) error {
- sourceS3FileBasedStreamConfigFormatJsonlFormat := new(SourceS3FileBasedStreamConfigFormatJsonlFormat)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileBasedStreamConfigFormatJsonlFormat); err == nil {
- u.SourceS3FileBasedStreamConfigFormatJsonlFormat = sourceS3FileBasedStreamConfigFormatJsonlFormat
- u.Type = SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatJsonlFormat
+ sourceS3JsonlFormat := new(SourceS3JsonlFormat)
+ if err := utils.UnmarshalJSON(data, &sourceS3JsonlFormat, "", true, true); err == nil {
+ u.SourceS3JsonlFormat = sourceS3JsonlFormat
+ u.Type = SourceS3FormatTypeSourceS3JsonlFormat
return nil
}
- sourceS3FileBasedStreamConfigFormatAvroFormat := new(SourceS3FileBasedStreamConfigFormatAvroFormat)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileBasedStreamConfigFormatAvroFormat); err == nil {
- u.SourceS3FileBasedStreamConfigFormatAvroFormat = sourceS3FileBasedStreamConfigFormatAvroFormat
- u.Type = SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatAvroFormat
+ sourceS3AvroFormat := new(SourceS3AvroFormat)
+ if err := utils.UnmarshalJSON(data, &sourceS3AvroFormat, "", true, true); err == nil {
+ u.SourceS3AvroFormat = sourceS3AvroFormat
+ u.Type = SourceS3FormatTypeSourceS3AvroFormat
return nil
}
- sourceS3FileBasedStreamConfigFormatParquetFormat := new(SourceS3FileBasedStreamConfigFormatParquetFormat)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileBasedStreamConfigFormatParquetFormat); err == nil {
- u.SourceS3FileBasedStreamConfigFormatParquetFormat = sourceS3FileBasedStreamConfigFormatParquetFormat
- u.Type = SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatParquetFormat
+ sourceS3ParquetFormat := new(SourceS3ParquetFormat)
+ if err := utils.UnmarshalJSON(data, &sourceS3ParquetFormat, "", true, true); err == nil {
+ u.SourceS3ParquetFormat = sourceS3ParquetFormat
+ u.Type = SourceS3FormatTypeSourceS3ParquetFormat
return nil
}
- sourceS3FileBasedStreamConfigFormatCSVFormat := new(SourceS3FileBasedStreamConfigFormatCSVFormat)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3FileBasedStreamConfigFormatCSVFormat); err == nil {
- u.SourceS3FileBasedStreamConfigFormatCSVFormat = sourceS3FileBasedStreamConfigFormatCSVFormat
- u.Type = SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatCSVFormat
+ sourceS3DocumentFileTypeFormatExperimental := new(SourceS3DocumentFileTypeFormatExperimental)
+ if err := utils.UnmarshalJSON(data, &sourceS3DocumentFileTypeFormatExperimental, "", true, true); err == nil {
+ u.SourceS3DocumentFileTypeFormatExperimental = sourceS3DocumentFileTypeFormatExperimental
+ u.Type = SourceS3FormatTypeSourceS3DocumentFileTypeFormatExperimental
+ return nil
+ }
+
+ sourceS3CSVFormat := new(SourceS3CSVFormat)
+ if err := utils.UnmarshalJSON(data, &sourceS3CSVFormat, "", true, true); err == nil {
+ u.SourceS3CSVFormat = sourceS3CSVFormat
+ u.Type = SourceS3FormatTypeSourceS3CSVFormat
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceS3FileBasedStreamConfigFormat) MarshalJSON() ([]byte, error) {
- if u.SourceS3FileBasedStreamConfigFormatJsonlFormat != nil {
- return json.Marshal(u.SourceS3FileBasedStreamConfigFormatJsonlFormat)
+func (u SourceS3Format) MarshalJSON() ([]byte, error) {
+ if u.SourceS3AvroFormat != nil {
+ return utils.MarshalJSON(u.SourceS3AvroFormat, "", true)
}
- if u.SourceS3FileBasedStreamConfigFormatAvroFormat != nil {
- return json.Marshal(u.SourceS3FileBasedStreamConfigFormatAvroFormat)
+ if u.SourceS3CSVFormat != nil {
+ return utils.MarshalJSON(u.SourceS3CSVFormat, "", true)
}
- if u.SourceS3FileBasedStreamConfigFormatParquetFormat != nil {
- return json.Marshal(u.SourceS3FileBasedStreamConfigFormatParquetFormat)
+ if u.SourceS3JsonlFormat != nil {
+ return utils.MarshalJSON(u.SourceS3JsonlFormat, "", true)
}
- if u.SourceS3FileBasedStreamConfigFormatCSVFormat != nil {
- return json.Marshal(u.SourceS3FileBasedStreamConfigFormatCSVFormat)
+ if u.SourceS3ParquetFormat != nil {
+ return utils.MarshalJSON(u.SourceS3ParquetFormat, "", true)
}
- return nil, nil
+ if u.SourceS3DocumentFileTypeFormatExperimental != nil {
+ return utils.MarshalJSON(u.SourceS3DocumentFileTypeFormatExperimental, "", true)
+ }
+
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceS3FileBasedStreamConfigValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
-type SourceS3FileBasedStreamConfigValidationPolicy string
+// SourceS3ValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+type SourceS3ValidationPolicy string
const (
- SourceS3FileBasedStreamConfigValidationPolicyEmitRecord SourceS3FileBasedStreamConfigValidationPolicy = "Emit Record"
- SourceS3FileBasedStreamConfigValidationPolicySkipRecord SourceS3FileBasedStreamConfigValidationPolicy = "Skip Record"
- SourceS3FileBasedStreamConfigValidationPolicyWaitForDiscover SourceS3FileBasedStreamConfigValidationPolicy = "Wait for Discover"
+ SourceS3ValidationPolicyEmitRecord SourceS3ValidationPolicy = "Emit Record"
+ SourceS3ValidationPolicySkipRecord SourceS3ValidationPolicy = "Skip Record"
+ SourceS3ValidationPolicyWaitForDiscover SourceS3ValidationPolicy = "Wait for Discover"
)
-func (e SourceS3FileBasedStreamConfigValidationPolicy) ToPointer() *SourceS3FileBasedStreamConfigValidationPolicy {
+func (e SourceS3ValidationPolicy) ToPointer() *SourceS3ValidationPolicy {
return &e
}
-func (e *SourceS3FileBasedStreamConfigValidationPolicy) UnmarshalJSON(data []byte) error {
+func (e *SourceS3ValidationPolicy) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -840,20 +1332,18 @@ func (e *SourceS3FileBasedStreamConfigValidationPolicy) UnmarshalJSON(data []byt
case "Skip Record":
fallthrough
case "Wait for Discover":
- *e = SourceS3FileBasedStreamConfigValidationPolicy(v)
+ *e = SourceS3ValidationPolicy(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigValidationPolicy: %v", v)
+ return fmt.Errorf("invalid value for SourceS3ValidationPolicy: %v", v)
}
}
type SourceS3FileBasedStreamConfig struct {
// When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
- DaysToSyncIfHistoryIsFull *int64 `json:"days_to_sync_if_history_is_full,omitempty"`
- // The data file type that is being extracted for a stream.
- FileType string `json:"file_type"`
+ DaysToSyncIfHistoryIsFull *int64 `default:"3" json:"days_to_sync_if_history_is_full"`
// The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
- Format *SourceS3FileBasedStreamConfigFormat `json:"format,omitempty"`
+ Format SourceS3Format `json:"format"`
// The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
Globs []string `json:"globs,omitempty"`
// The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
@@ -865,9 +1355,83 @@ type SourceS3FileBasedStreamConfig struct {
// The column or columns (for a composite key) that serves as the unique identifier of a record.
PrimaryKey *string `json:"primary_key,omitempty"`
// When enabled, syncs will not validate or structure records against the stream's schema.
- Schemaless *bool `json:"schemaless,omitempty"`
+ Schemaless *bool `default:"false" json:"schemaless"`
// The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
- ValidationPolicy *SourceS3FileBasedStreamConfigValidationPolicy `json:"validation_policy,omitempty"`
+ ValidationPolicy *SourceS3ValidationPolicy `default:"Emit Record" json:"validation_policy"`
+}
+
+func (s SourceS3FileBasedStreamConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3FileBasedStreamConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3FileBasedStreamConfig) GetDaysToSyncIfHistoryIsFull() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DaysToSyncIfHistoryIsFull
+}
+
+func (o *SourceS3FileBasedStreamConfig) GetFormat() SourceS3Format {
+ if o == nil {
+ return SourceS3Format{}
+ }
+ return o.Format
+}
+
+func (o *SourceS3FileBasedStreamConfig) GetGlobs() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Globs
+}
+
+func (o *SourceS3FileBasedStreamConfig) GetInputSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.InputSchema
+}
+
+func (o *SourceS3FileBasedStreamConfig) GetLegacyPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LegacyPrefix
+}
+
+func (o *SourceS3FileBasedStreamConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceS3FileBasedStreamConfig) GetPrimaryKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrimaryKey
+}
+
+func (o *SourceS3FileBasedStreamConfig) GetSchemaless() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Schemaless
+}
+
+func (o *SourceS3FileBasedStreamConfig) GetValidationPolicy() *SourceS3ValidationPolicy {
+ if o == nil {
+ return nil
+ }
+ return o.ValidationPolicy
}
// SourceS3 - NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
@@ -881,8 +1445,8 @@ type SourceS3 struct {
Bucket string `json:"bucket"`
// Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.
Dataset *string `json:"dataset,omitempty"`
- // Endpoint to an S3 compatible service. Leave empty to use AWS.
- Endpoint *string `json:"endpoint,omitempty"`
+ // Endpoint to an S3 compatible service. Leave empty to use AWS. The custom endpoint must be secure, but the 'https' prefix is not required.
+ Endpoint *string `default:"" json:"endpoint"`
// Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate
Format *SourceS3FileFormat `json:"format,omitempty"`
// Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.
@@ -890,10 +1454,102 @@ type SourceS3 struct {
// Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services
Provider *SourceS3S3AmazonWebServices `json:"provider,omitempty"`
// Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.
- Schema *string `json:"schema,omitempty"`
- SourceType SourceS3S3 `json:"sourceType"`
+ Schema *string `default:"{}" json:"schema"`
+ sourceType SourceS3S3 `const:"s3" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
// Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
Streams []SourceS3FileBasedStreamConfig `json:"streams"`
}
+
+func (s SourceS3) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3) GetAwsAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsAccessKeyID
+}
+
+func (o *SourceS3) GetAwsSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsSecretAccessKey
+}
+
+func (o *SourceS3) GetBucket() string {
+ if o == nil {
+ return ""
+ }
+ return o.Bucket
+}
+
+func (o *SourceS3) GetDataset() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Dataset
+}
+
+func (o *SourceS3) GetEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Endpoint
+}
+
+func (o *SourceS3) GetFormat() *SourceS3FileFormat {
+ if o == nil {
+ return nil
+ }
+ return o.Format
+}
+
+func (o *SourceS3) GetPathPattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PathPattern
+}
+
+func (o *SourceS3) GetProvider() *SourceS3S3AmazonWebServices {
+ if o == nil {
+ return nil
+ }
+ return o.Provider
+}
+
+func (o *SourceS3) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *SourceS3) GetSourceType() SourceS3S3 {
+ return SourceS3S3S3
+}
+
+func (o *SourceS3) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceS3) GetStreams() []SourceS3FileBasedStreamConfig {
+ if o == nil {
+ return []SourceS3FileBasedStreamConfig{}
+ }
+ return o.Streams
+}
diff --git a/internal/sdk/pkg/models/shared/sources3createrequest.go b/internal/sdk/pkg/models/shared/sources3createrequest.go
old mode 100755
new mode 100644
index 1bec6ba14..fde2307c9
--- a/internal/sdk/pkg/models/shared/sources3createrequest.go
+++ b/internal/sdk/pkg/models/shared/sources3createrequest.go
@@ -6,8 +6,46 @@ type SourceS3CreateRequest struct {
// NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
// because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK.
Configuration SourceS3 `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceS3CreateRequest) GetConfiguration() SourceS3 {
+ if o == nil {
+ return SourceS3{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceS3CreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceS3CreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceS3CreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceS3CreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sources3putrequest.go b/internal/sdk/pkg/models/shared/sources3putrequest.go
old mode 100755
new mode 100644
index e622dbcfb..af44ecdd3
--- a/internal/sdk/pkg/models/shared/sources3putrequest.go
+++ b/internal/sdk/pkg/models/shared/sources3putrequest.go
@@ -9,3 +9,24 @@ type SourceS3PutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceS3PutRequest) GetConfiguration() SourceS3Update {
+ if o == nil {
+ return SourceS3Update{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceS3PutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceS3PutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sources3update.go b/internal/sdk/pkg/models/shared/sources3update.go
old mode 100755
new mode 100644
index 322258501..03940d9f5
--- a/internal/sdk/pkg/models/shared/sources3update.go
+++ b/internal/sdk/pkg/models/shared/sources3update.go
@@ -3,51 +3,51 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceS3UpdateFileFormatJsonlFiletype string
+type SourceS3UpdateSchemasFormatFiletype string
const (
- SourceS3UpdateFileFormatJsonlFiletypeJsonl SourceS3UpdateFileFormatJsonlFiletype = "jsonl"
+ SourceS3UpdateSchemasFormatFiletypeJsonl SourceS3UpdateSchemasFormatFiletype = "jsonl"
)
-func (e SourceS3UpdateFileFormatJsonlFiletype) ToPointer() *SourceS3UpdateFileFormatJsonlFiletype {
+func (e SourceS3UpdateSchemasFormatFiletype) ToPointer() *SourceS3UpdateSchemasFormatFiletype {
return &e
}
-func (e *SourceS3UpdateFileFormatJsonlFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateSchemasFormatFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "jsonl":
- *e = SourceS3UpdateFileFormatJsonlFiletype(v)
+ *e = SourceS3UpdateSchemasFormatFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileFormatJsonlFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasFormatFiletype: %v", v)
}
}
-// SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior - How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
-type SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior string
+// UnexpectedFieldBehavior - How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
+type UnexpectedFieldBehavior string
const (
- SourceS3UpdateFileFormatJsonlUnexpectedFieldBehaviorIgnore SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior = "ignore"
- SourceS3UpdateFileFormatJsonlUnexpectedFieldBehaviorInfer SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior = "infer"
- SourceS3UpdateFileFormatJsonlUnexpectedFieldBehaviorError SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior = "error"
+ UnexpectedFieldBehaviorIgnore UnexpectedFieldBehavior = "ignore"
+ UnexpectedFieldBehaviorInfer UnexpectedFieldBehavior = "infer"
+ UnexpectedFieldBehaviorError UnexpectedFieldBehavior = "error"
)
-func (e SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior) ToPointer() *SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior {
+func (e UnexpectedFieldBehavior) ToPointer() *UnexpectedFieldBehavior {
return &e
}
-func (e *SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior) UnmarshalJSON(data []byte) error {
+func (e *UnexpectedFieldBehavior) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -58,227 +58,390 @@ func (e *SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior) UnmarshalJSON(dat
case "infer":
fallthrough
case "error":
- *e = SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior(v)
+ *e = UnexpectedFieldBehavior(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior: %v", v)
+ return fmt.Errorf("invalid value for UnexpectedFieldBehavior: %v", v)
}
}
-// SourceS3UpdateFileFormatJsonl - This connector uses PyArrow for JSON Lines (jsonl) file parsing.
-type SourceS3UpdateFileFormatJsonl struct {
+// Jsonl - This connector uses PyArrow for JSON Lines (jsonl) file parsing.
+type Jsonl struct {
// The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
- BlockSize *int64 `json:"block_size,omitempty"`
- Filetype *SourceS3UpdateFileFormatJsonlFiletype `json:"filetype,omitempty"`
+ BlockSize *int64 `default:"0" json:"block_size"`
+ filetype *SourceS3UpdateSchemasFormatFiletype `const:"jsonl" json:"filetype"`
// Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.
- NewlinesInValues *bool `json:"newlines_in_values,omitempty"`
+ NewlinesInValues *bool `default:"false" json:"newlines_in_values"`
// How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details
- UnexpectedFieldBehavior *SourceS3UpdateFileFormatJsonlUnexpectedFieldBehavior `json:"unexpected_field_behavior,omitempty"`
+ UnexpectedFieldBehavior *UnexpectedFieldBehavior `default:"infer" json:"unexpected_field_behavior"`
}
-type SourceS3UpdateFileFormatAvroFiletype string
+func (j Jsonl) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(j, "", false)
+}
+
+func (j *Jsonl) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &j, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Jsonl) GetBlockSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BlockSize
+}
+
+func (o *Jsonl) GetFiletype() *SourceS3UpdateSchemasFormatFiletype {
+ return SourceS3UpdateSchemasFormatFiletypeJsonl.ToPointer()
+}
+
+func (o *Jsonl) GetNewlinesInValues() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.NewlinesInValues
+}
+
+func (o *Jsonl) GetUnexpectedFieldBehavior() *UnexpectedFieldBehavior {
+ if o == nil {
+ return nil
+ }
+ return o.UnexpectedFieldBehavior
+}
+
+type SourceS3UpdateSchemasFiletype string
const (
- SourceS3UpdateFileFormatAvroFiletypeAvro SourceS3UpdateFileFormatAvroFiletype = "avro"
+ SourceS3UpdateSchemasFiletypeAvro SourceS3UpdateSchemasFiletype = "avro"
)
-func (e SourceS3UpdateFileFormatAvroFiletype) ToPointer() *SourceS3UpdateFileFormatAvroFiletype {
+func (e SourceS3UpdateSchemasFiletype) ToPointer() *SourceS3UpdateSchemasFiletype {
return &e
}
-func (e *SourceS3UpdateFileFormatAvroFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateSchemasFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "avro":
- *e = SourceS3UpdateFileFormatAvroFiletype(v)
+ *e = SourceS3UpdateSchemasFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileFormatAvroFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasFiletype: %v", v)
}
}
-// SourceS3UpdateFileFormatAvro - This connector utilises fastavro for Avro parsing.
-type SourceS3UpdateFileFormatAvro struct {
- Filetype *SourceS3UpdateFileFormatAvroFiletype `json:"filetype,omitempty"`
+// Avro - This connector utilises fastavro for Avro parsing.
+type Avro struct {
+ filetype *SourceS3UpdateSchemasFiletype `const:"avro" json:"filetype"`
+}
+
+func (a Avro) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
}
-type SourceS3UpdateFileFormatParquetFiletype string
+func (a *Avro) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Avro) GetFiletype() *SourceS3UpdateSchemasFiletype {
+ return SourceS3UpdateSchemasFiletypeAvro.ToPointer()
+}
+
+type SourceS3UpdateFiletype string
const (
- SourceS3UpdateFileFormatParquetFiletypeParquet SourceS3UpdateFileFormatParquetFiletype = "parquet"
+ SourceS3UpdateFiletypeParquet SourceS3UpdateFiletype = "parquet"
)
-func (e SourceS3UpdateFileFormatParquetFiletype) ToPointer() *SourceS3UpdateFileFormatParquetFiletype {
+func (e SourceS3UpdateFiletype) ToPointer() *SourceS3UpdateFiletype {
return &e
}
-func (e *SourceS3UpdateFileFormatParquetFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "parquet":
- *e = SourceS3UpdateFileFormatParquetFiletype(v)
+ *e = SourceS3UpdateFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileFormatParquetFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateFiletype: %v", v)
}
}
-// SourceS3UpdateFileFormatParquet - This connector utilises PyArrow (Apache Arrow) for Parquet parsing.
-type SourceS3UpdateFileFormatParquet struct {
+// Parquet - This connector utilises PyArrow (Apache Arrow) for Parquet parsing.
+type Parquet struct {
// Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.
- BatchSize *int64 `json:"batch_size,omitempty"`
+ BatchSize *int64 `default:"65536" json:"batch_size"`
// Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.
- BufferSize *int64 `json:"buffer_size,omitempty"`
+ BufferSize *int64 `default:"2" json:"buffer_size"`
// If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.
- Columns []string `json:"columns,omitempty"`
- Filetype *SourceS3UpdateFileFormatParquetFiletype `json:"filetype,omitempty"`
+ Columns []string `json:"columns,omitempty"`
+ filetype *SourceS3UpdateFiletype `const:"parquet" json:"filetype"`
+}
+
+func (p Parquet) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(p, "", false)
+}
+
+func (p *Parquet) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &p, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Parquet) GetBatchSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BatchSize
+}
+
+func (o *Parquet) GetBufferSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BufferSize
+}
+
+func (o *Parquet) GetColumns() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Columns
}
-type SourceS3UpdateFileFormatCSVFiletype string
+func (o *Parquet) GetFiletype() *SourceS3UpdateFiletype {
+ return SourceS3UpdateFiletypeParquet.ToPointer()
+}
+
+type SourceS3UpdateSchemasFormatFileFormatFiletype string
const (
- SourceS3UpdateFileFormatCSVFiletypeCsv SourceS3UpdateFileFormatCSVFiletype = "csv"
+ SourceS3UpdateSchemasFormatFileFormatFiletypeCsv SourceS3UpdateSchemasFormatFileFormatFiletype = "csv"
)
-func (e SourceS3UpdateFileFormatCSVFiletype) ToPointer() *SourceS3UpdateFileFormatCSVFiletype {
+func (e SourceS3UpdateSchemasFormatFileFormatFiletype) ToPointer() *SourceS3UpdateSchemasFormatFileFormatFiletype {
return &e
}
-func (e *SourceS3UpdateFileFormatCSVFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateSchemasFormatFileFormatFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "csv":
- *e = SourceS3UpdateFileFormatCSVFiletype(v)
+ *e = SourceS3UpdateSchemasFormatFileFormatFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileFormatCSVFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasFormatFileFormatFiletype: %v", v)
}
}
-// SourceS3UpdateFileFormatCSV - This connector utilises PyArrow (Apache Arrow) for CSV parsing.
-type SourceS3UpdateFileFormatCSV struct {
+// Csv - This connector utilises PyArrow (Apache Arrow) for CSV parsing.
+type Csv struct {
// Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.
AdditionalReaderOptions *string `json:"additional_reader_options,omitempty"`
// Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.
AdvancedOptions *string `json:"advanced_options,omitempty"`
// The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.
- BlockSize *int64 `json:"block_size,omitempty"`
+ BlockSize *int64 `default:"10000" json:"block_size"`
// The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
- Delimiter *string `json:"delimiter,omitempty"`
+ Delimiter *string `default:"," json:"delimiter"`
// Whether two quotes in a quoted CSV value denote a single quote in the data.
- DoubleQuote *bool `json:"double_quote,omitempty"`
+ DoubleQuote *bool `default:"true" json:"double_quote"`
// The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
- Encoding *string `json:"encoding,omitempty"`
+ Encoding *string `default:"utf8" json:"encoding"`
// The character used for escaping special characters. To disallow escaping, leave this field blank.
- EscapeChar *string `json:"escape_char,omitempty"`
- Filetype *SourceS3UpdateFileFormatCSVFiletype `json:"filetype,omitempty"`
+ EscapeChar *string `json:"escape_char,omitempty"`
+ filetype *SourceS3UpdateSchemasFormatFileFormatFiletype `const:"csv" json:"filetype"`
// Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings
- InferDatatypes *bool `json:"infer_datatypes,omitempty"`
+ InferDatatypes *bool `default:"true" json:"infer_datatypes"`
// Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.
- NewlinesInValues *bool `json:"newlines_in_values,omitempty"`
+ NewlinesInValues *bool `default:"false" json:"newlines_in_values"`
// The character used for quoting CSV values. To disallow quoting, make this field blank.
- QuoteChar *string `json:"quote_char,omitempty"`
+ QuoteChar *string `default:""" json:"quote_char"`
+}
+
+func (c Csv) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(c, "", false)
+}
+
+func (c *Csv) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &c, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *Csv) GetAdditionalReaderOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AdditionalReaderOptions
+}
+
+func (o *Csv) GetAdvancedOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AdvancedOptions
+}
+
+func (o *Csv) GetBlockSize() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.BlockSize
+}
+
+func (o *Csv) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
+}
+
+func (o *Csv) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *Csv) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *Csv) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *Csv) GetFiletype() *SourceS3UpdateSchemasFormatFileFormatFiletype {
+ return SourceS3UpdateSchemasFormatFileFormatFiletypeCsv.ToPointer()
+}
+
+func (o *Csv) GetInferDatatypes() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.InferDatatypes
+}
+
+func (o *Csv) GetNewlinesInValues() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.NewlinesInValues
+}
+
+func (o *Csv) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
}
type SourceS3UpdateFileFormatType string
const (
- SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatCSV SourceS3UpdateFileFormatType = "source-s3-update_File Format_CSV"
- SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatParquet SourceS3UpdateFileFormatType = "source-s3-update_File Format_Parquet"
- SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatAvro SourceS3UpdateFileFormatType = "source-s3-update_File Format_Avro"
- SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatJsonl SourceS3UpdateFileFormatType = "source-s3-update_File Format_Jsonl"
+ SourceS3UpdateFileFormatTypeCsv SourceS3UpdateFileFormatType = "CSV"
+ SourceS3UpdateFileFormatTypeParquet SourceS3UpdateFileFormatType = "Parquet"
+ SourceS3UpdateFileFormatTypeAvro SourceS3UpdateFileFormatType = "Avro"
+ SourceS3UpdateFileFormatTypeJsonl SourceS3UpdateFileFormatType = "Jsonl"
)
type SourceS3UpdateFileFormat struct {
- SourceS3UpdateFileFormatCSV *SourceS3UpdateFileFormatCSV
- SourceS3UpdateFileFormatParquet *SourceS3UpdateFileFormatParquet
- SourceS3UpdateFileFormatAvro *SourceS3UpdateFileFormatAvro
- SourceS3UpdateFileFormatJsonl *SourceS3UpdateFileFormatJsonl
+ Csv *Csv
+ Parquet *Parquet
+ Avro *Avro
+ Jsonl *Jsonl
Type SourceS3UpdateFileFormatType
}
-func CreateSourceS3UpdateFileFormatSourceS3UpdateFileFormatCSV(sourceS3UpdateFileFormatCSV SourceS3UpdateFileFormatCSV) SourceS3UpdateFileFormat {
- typ := SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatCSV
+func CreateSourceS3UpdateFileFormatCsv(csv Csv) SourceS3UpdateFileFormat {
+ typ := SourceS3UpdateFileFormatTypeCsv
return SourceS3UpdateFileFormat{
- SourceS3UpdateFileFormatCSV: &sourceS3UpdateFileFormatCSV,
- Type: typ,
+ Csv: &csv,
+ Type: typ,
}
}
-func CreateSourceS3UpdateFileFormatSourceS3UpdateFileFormatParquet(sourceS3UpdateFileFormatParquet SourceS3UpdateFileFormatParquet) SourceS3UpdateFileFormat {
- typ := SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatParquet
+func CreateSourceS3UpdateFileFormatParquet(parquet Parquet) SourceS3UpdateFileFormat {
+ typ := SourceS3UpdateFileFormatTypeParquet
return SourceS3UpdateFileFormat{
- SourceS3UpdateFileFormatParquet: &sourceS3UpdateFileFormatParquet,
- Type: typ,
+ Parquet: &parquet,
+ Type: typ,
}
}
-func CreateSourceS3UpdateFileFormatSourceS3UpdateFileFormatAvro(sourceS3UpdateFileFormatAvro SourceS3UpdateFileFormatAvro) SourceS3UpdateFileFormat {
- typ := SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatAvro
+func CreateSourceS3UpdateFileFormatAvro(avro Avro) SourceS3UpdateFileFormat {
+ typ := SourceS3UpdateFileFormatTypeAvro
return SourceS3UpdateFileFormat{
- SourceS3UpdateFileFormatAvro: &sourceS3UpdateFileFormatAvro,
- Type: typ,
+ Avro: &avro,
+ Type: typ,
}
}
-func CreateSourceS3UpdateFileFormatSourceS3UpdateFileFormatJsonl(sourceS3UpdateFileFormatJsonl SourceS3UpdateFileFormatJsonl) SourceS3UpdateFileFormat {
- typ := SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatJsonl
+func CreateSourceS3UpdateFileFormatJsonl(jsonl Jsonl) SourceS3UpdateFileFormat {
+ typ := SourceS3UpdateFileFormatTypeJsonl
return SourceS3UpdateFileFormat{
- SourceS3UpdateFileFormatJsonl: &sourceS3UpdateFileFormatJsonl,
- Type: typ,
+ Jsonl: &jsonl,
+ Type: typ,
}
}
func (u *SourceS3UpdateFileFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
- sourceS3UpdateFileFormatAvro := new(SourceS3UpdateFileFormatAvro)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileFormatAvro); err == nil {
- u.SourceS3UpdateFileFormatAvro = sourceS3UpdateFileFormatAvro
- u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatAvro
+ avro := new(Avro)
+ if err := utils.UnmarshalJSON(data, &avro, "", true, true); err == nil {
+ u.Avro = avro
+ u.Type = SourceS3UpdateFileFormatTypeAvro
return nil
}
- sourceS3UpdateFileFormatParquet := new(SourceS3UpdateFileFormatParquet)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileFormatParquet); err == nil {
- u.SourceS3UpdateFileFormatParquet = sourceS3UpdateFileFormatParquet
- u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatParquet
+ parquet := new(Parquet)
+ if err := utils.UnmarshalJSON(data, &parquet, "", true, true); err == nil {
+ u.Parquet = parquet
+ u.Type = SourceS3UpdateFileFormatTypeParquet
return nil
}
- sourceS3UpdateFileFormatJsonl := new(SourceS3UpdateFileFormatJsonl)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileFormatJsonl); err == nil {
- u.SourceS3UpdateFileFormatJsonl = sourceS3UpdateFileFormatJsonl
- u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatJsonl
+ jsonl := new(Jsonl)
+ if err := utils.UnmarshalJSON(data, &jsonl, "", true, true); err == nil {
+ u.Jsonl = jsonl
+ u.Type = SourceS3UpdateFileFormatTypeJsonl
return nil
}
- sourceS3UpdateFileFormatCSV := new(SourceS3UpdateFileFormatCSV)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileFormatCSV); err == nil {
- u.SourceS3UpdateFileFormatCSV = sourceS3UpdateFileFormatCSV
- u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatCSV
+ csv := new(Csv)
+ if err := utils.UnmarshalJSON(data, &csv, "", true, true); err == nil {
+ u.Csv = csv
+ u.Type = SourceS3UpdateFileFormatTypeCsv
return nil
}
@@ -286,27 +449,27 @@ func (u *SourceS3UpdateFileFormat) UnmarshalJSON(data []byte) error {
}
func (u SourceS3UpdateFileFormat) MarshalJSON() ([]byte, error) {
- if u.SourceS3UpdateFileFormatAvro != nil {
- return json.Marshal(u.SourceS3UpdateFileFormatAvro)
+ if u.Csv != nil {
+ return utils.MarshalJSON(u.Csv, "", true)
}
- if u.SourceS3UpdateFileFormatParquet != nil {
- return json.Marshal(u.SourceS3UpdateFileFormatParquet)
+ if u.Parquet != nil {
+ return utils.MarshalJSON(u.Parquet, "", true)
}
- if u.SourceS3UpdateFileFormatJsonl != nil {
- return json.Marshal(u.SourceS3UpdateFileFormatJsonl)
+ if u.Avro != nil {
+ return utils.MarshalJSON(u.Avro, "", true)
}
- if u.SourceS3UpdateFileFormatCSV != nil {
- return json.Marshal(u.SourceS3UpdateFileFormatCSV)
+ if u.Jsonl != nil {
+ return utils.MarshalJSON(u.Jsonl, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceS3UpdateS3AmazonWebServices - Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services
-type SourceS3UpdateS3AmazonWebServices struct {
+// S3AmazonWebServices - Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services
+type S3AmazonWebServices struct {
// In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
AwsAccessKeyID *string `json:"aws_access_key_id,omitempty"`
// In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
@@ -314,291 +477,479 @@ type SourceS3UpdateS3AmazonWebServices struct {
// Name of the S3 bucket where the file(s) exist.
Bucket *string `json:"bucket,omitempty"`
// Endpoint to an S3 compatible service. Leave empty to use AWS.
- Endpoint *string `json:"endpoint,omitempty"`
+ Endpoint *string `default:"" json:"endpoint"`
// By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate.
- PathPrefix *string `json:"path_prefix,omitempty"`
+ PathPrefix *string `default:"" json:"path_prefix"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
}
-type SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype string
+func (s S3AmazonWebServices) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *S3AmazonWebServices) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *S3AmazonWebServices) GetAwsAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsAccessKeyID
+}
+
+func (o *S3AmazonWebServices) GetAwsSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsSecretAccessKey
+}
+
+func (o *S3AmazonWebServices) GetBucket() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Bucket
+}
+
+func (o *S3AmazonWebServices) GetEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Endpoint
+}
+
+func (o *S3AmazonWebServices) GetPathPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PathPrefix
+}
+
+func (o *S3AmazonWebServices) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+type SourceS3UpdateSchemasStreamsFormatFormat5Filetype string
+
+const (
+ SourceS3UpdateSchemasStreamsFormatFormat5FiletypeUnstructured SourceS3UpdateSchemasStreamsFormatFormat5Filetype = "unstructured"
+)
+
+func (e SourceS3UpdateSchemasStreamsFormatFormat5Filetype) ToPointer() *SourceS3UpdateSchemasStreamsFormatFormat5Filetype {
+ return &e
+}
+
+func (e *SourceS3UpdateSchemasStreamsFormatFormat5Filetype) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "unstructured":
+ *e = SourceS3UpdateSchemasStreamsFormatFormat5Filetype(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasStreamsFormatFormat5Filetype: %v", v)
+ }
+}
+
+// SourceS3UpdateDocumentFileTypeFormatExperimental - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
+type SourceS3UpdateDocumentFileTypeFormatExperimental struct {
+ filetype *SourceS3UpdateSchemasStreamsFormatFormat5Filetype `const:"unstructured" json:"filetype"`
+ // If true, skip files that cannot be parsed because of their file type and log a warning. If false, fail the sync. Corrupted files with valid file types will still result in a failed sync.
+ SkipUnprocessableFileTypes *bool `default:"true" json:"skip_unprocessable_file_types"`
+}
+
+func (s SourceS3UpdateDocumentFileTypeFormatExperimental) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UpdateDocumentFileTypeFormatExperimental) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3UpdateDocumentFileTypeFormatExperimental) GetFiletype() *SourceS3UpdateSchemasStreamsFormatFormat5Filetype {
+ return SourceS3UpdateSchemasStreamsFormatFormat5FiletypeUnstructured.ToPointer()
+}
+
+func (o *SourceS3UpdateDocumentFileTypeFormatExperimental) GetSkipUnprocessableFileTypes() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.SkipUnprocessableFileTypes
+}
+
+type SourceS3UpdateSchemasStreamsFormatFormat4Filetype string
const (
- SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletypeParquet SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype = "parquet"
+ SourceS3UpdateSchemasStreamsFormatFormat4FiletypeParquet SourceS3UpdateSchemasStreamsFormatFormat4Filetype = "parquet"
)
-func (e SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype {
+func (e SourceS3UpdateSchemasStreamsFormatFormat4Filetype) ToPointer() *SourceS3UpdateSchemasStreamsFormatFormat4Filetype {
return &e
}
-func (e *SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateSchemasStreamsFormatFormat4Filetype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "parquet":
- *e = SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype(v)
+ *e = SourceS3UpdateSchemasStreamsFormatFormat4Filetype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasStreamsFormatFormat4Filetype: %v", v)
}
}
-// SourceS3UpdateFileBasedStreamConfigFormatParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
-type SourceS3UpdateFileBasedStreamConfigFormatParquetFormat struct {
+// SourceS3UpdateParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceS3UpdateParquetFormat struct {
// Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.
- DecimalAsFloat *bool `json:"decimal_as_float,omitempty"`
- Filetype *SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype `json:"filetype,omitempty"`
+ DecimalAsFloat *bool `default:"false" json:"decimal_as_float"`
+ filetype *SourceS3UpdateSchemasStreamsFormatFormat4Filetype `const:"parquet" json:"filetype"`
+}
+
+func (s SourceS3UpdateParquetFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UpdateParquetFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3UpdateParquetFormat) GetDecimalAsFloat() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DecimalAsFloat
+}
+
+func (o *SourceS3UpdateParquetFormat) GetFiletype() *SourceS3UpdateSchemasStreamsFormatFormat4Filetype {
+ return SourceS3UpdateSchemasStreamsFormatFormat4FiletypeParquet.ToPointer()
}
-type SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype string
+type SourceS3UpdateSchemasStreamsFormatFormatFiletype string
const (
- SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletypeJsonl SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype = "jsonl"
+ SourceS3UpdateSchemasStreamsFormatFormatFiletypeJsonl SourceS3UpdateSchemasStreamsFormatFormatFiletype = "jsonl"
)
-func (e SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype {
+func (e SourceS3UpdateSchemasStreamsFormatFormatFiletype) ToPointer() *SourceS3UpdateSchemasStreamsFormatFormatFiletype {
return &e
}
-func (e *SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateSchemasStreamsFormatFormatFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "jsonl":
- *e = SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype(v)
+ *e = SourceS3UpdateSchemasStreamsFormatFormatFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasStreamsFormatFormatFiletype: %v", v)
+ }
+}
+
+// SourceS3UpdateJsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceS3UpdateJsonlFormat struct {
+ filetype *SourceS3UpdateSchemasStreamsFormatFormatFiletype `const:"jsonl" json:"filetype"`
+}
+
+func (s SourceS3UpdateJsonlFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UpdateJsonlFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
-type SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat struct {
- Filetype *SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype `json:"filetype,omitempty"`
+func (o *SourceS3UpdateJsonlFormat) GetFiletype() *SourceS3UpdateSchemasStreamsFormatFormatFiletype {
+ return SourceS3UpdateSchemasStreamsFormatFormatFiletypeJsonl.ToPointer()
}
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype string
+type SourceS3UpdateSchemasStreamsFormatFiletype string
const (
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletypeCsv SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype = "csv"
+ SourceS3UpdateSchemasStreamsFormatFiletypeCsv SourceS3UpdateSchemasStreamsFormatFiletype = "csv"
)
-func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype {
+func (e SourceS3UpdateSchemasStreamsFormatFiletype) ToPointer() *SourceS3UpdateSchemasStreamsFormatFiletype {
return &e
}
-func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateSchemasStreamsFormatFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "csv":
- *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype(v)
+ *e = SourceS3UpdateSchemasStreamsFormatFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasStreamsFormatFiletype: %v", v)
}
}
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType string
+type SourceS3UpdateSchemasStreamsHeaderDefinitionType string
const (
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionTypeUserProvided SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType = "User Provided"
+ SourceS3UpdateSchemasStreamsHeaderDefinitionTypeUserProvided SourceS3UpdateSchemasStreamsHeaderDefinitionType = "User Provided"
)
-func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType {
+func (e SourceS3UpdateSchemasStreamsHeaderDefinitionType) ToPointer() *SourceS3UpdateSchemasStreamsHeaderDefinitionType {
return &e
}
-func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateSchemasStreamsHeaderDefinitionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "User Provided":
- *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType(v)
+ *e = SourceS3UpdateSchemasStreamsHeaderDefinitionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasStreamsHeaderDefinitionType: %v", v)
}
}
-// SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided struct {
+// SourceS3UpdateUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceS3UpdateUserProvided struct {
// The column names that will be used while emitting the CSV records
- ColumnNames []string `json:"column_names"`
- HeaderDefinitionType *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType `json:"header_definition_type,omitempty"`
+ ColumnNames []string `json:"column_names"`
+ headerDefinitionType *SourceS3UpdateSchemasStreamsHeaderDefinitionType `const:"User Provided" json:"header_definition_type"`
+}
+
+func (s SourceS3UpdateUserProvided) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UpdateUserProvided) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3UpdateUserProvided) GetColumnNames() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.ColumnNames
+}
+
+func (o *SourceS3UpdateUserProvided) GetHeaderDefinitionType() *SourceS3UpdateSchemasStreamsHeaderDefinitionType {
+ return SourceS3UpdateSchemasStreamsHeaderDefinitionTypeUserProvided.ToPointer()
}
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType string
+type SourceS3UpdateSchemasHeaderDefinitionType string
const (
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionTypeAutogenerated SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType = "Autogenerated"
+ SourceS3UpdateSchemasHeaderDefinitionTypeAutogenerated SourceS3UpdateSchemasHeaderDefinitionType = "Autogenerated"
)
-func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType {
+func (e SourceS3UpdateSchemasHeaderDefinitionType) ToPointer() *SourceS3UpdateSchemasHeaderDefinitionType {
return &e
}
-func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateSchemasHeaderDefinitionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Autogenerated":
- *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType(v)
+ *e = SourceS3UpdateSchemasHeaderDefinitionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasHeaderDefinitionType: %v", v)
}
}
-// SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated struct {
- HeaderDefinitionType *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType `json:"header_definition_type,omitempty"`
+// SourceS3UpdateAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceS3UpdateAutogenerated struct {
+ headerDefinitionType *SourceS3UpdateSchemasHeaderDefinitionType `const:"Autogenerated" json:"header_definition_type"`
}
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType string
+func (s SourceS3UpdateAutogenerated) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UpdateAutogenerated) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3UpdateAutogenerated) GetHeaderDefinitionType() *SourceS3UpdateSchemasHeaderDefinitionType {
+ return SourceS3UpdateSchemasHeaderDefinitionTypeAutogenerated.ToPointer()
+}
+
+type SourceS3UpdateHeaderDefinitionType string
const (
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionTypeFromCsv SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType = "From CSV"
+ SourceS3UpdateHeaderDefinitionTypeFromCsv SourceS3UpdateHeaderDefinitionType = "From CSV"
)
-func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType {
+func (e SourceS3UpdateHeaderDefinitionType) ToPointer() *SourceS3UpdateHeaderDefinitionType {
return &e
}
-func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateHeaderDefinitionType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "From CSV":
- *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType(v)
+ *e = SourceS3UpdateHeaderDefinitionType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateHeaderDefinitionType: %v", v)
+ }
+}
+
+// SourceS3UpdateFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
+type SourceS3UpdateFromCSV struct {
+ headerDefinitionType *SourceS3UpdateHeaderDefinitionType `const:"From CSV" json:"header_definition_type"`
+}
+
+func (s SourceS3UpdateFromCSV) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UpdateFromCSV) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV struct {
- HeaderDefinitionType *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType `json:"header_definition_type,omitempty"`
+func (o *SourceS3UpdateFromCSV) GetHeaderDefinitionType() *SourceS3UpdateHeaderDefinitionType {
+ return SourceS3UpdateHeaderDefinitionTypeFromCsv.ToPointer()
}
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType string
+type SourceS3UpdateCSVHeaderDefinitionType string
const (
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3-update_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_From CSV"
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3-update_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_Autogenerated"
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3-update_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_User Provided"
+ SourceS3UpdateCSVHeaderDefinitionTypeSourceS3UpdateFromCSV SourceS3UpdateCSVHeaderDefinitionType = "source-s3-update_From CSV"
+ SourceS3UpdateCSVHeaderDefinitionTypeSourceS3UpdateAutogenerated SourceS3UpdateCSVHeaderDefinitionType = "source-s3-update_Autogenerated"
+ SourceS3UpdateCSVHeaderDefinitionTypeSourceS3UpdateUserProvided SourceS3UpdateCSVHeaderDefinitionType = "source-s3-update_User Provided"
)
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition struct {
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
+type SourceS3UpdateCSVHeaderDefinition struct {
+ SourceS3UpdateFromCSV *SourceS3UpdateFromCSV
+ SourceS3UpdateAutogenerated *SourceS3UpdateAutogenerated
+ SourceS3UpdateUserProvided *SourceS3UpdateUserProvided
- Type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType
+ Type SourceS3UpdateCSVHeaderDefinitionType
}
-func CreateSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV(sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV) SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition {
- typ := SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
+func CreateSourceS3UpdateCSVHeaderDefinitionSourceS3UpdateFromCSV(sourceS3UpdateFromCSV SourceS3UpdateFromCSV) SourceS3UpdateCSVHeaderDefinition {
+ typ := SourceS3UpdateCSVHeaderDefinitionTypeSourceS3UpdateFromCSV
- return SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV: &sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV,
- Type: typ,
+ return SourceS3UpdateCSVHeaderDefinition{
+ SourceS3UpdateFromCSV: &sourceS3UpdateFromCSV,
+ Type: typ,
}
}
-func CreateSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated(sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated) SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition {
- typ := SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
+func CreateSourceS3UpdateCSVHeaderDefinitionSourceS3UpdateAutogenerated(sourceS3UpdateAutogenerated SourceS3UpdateAutogenerated) SourceS3UpdateCSVHeaderDefinition {
+ typ := SourceS3UpdateCSVHeaderDefinitionTypeSourceS3UpdateAutogenerated
- return SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated: &sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated,
- Type: typ,
+ return SourceS3UpdateCSVHeaderDefinition{
+ SourceS3UpdateAutogenerated: &sourceS3UpdateAutogenerated,
+ Type: typ,
}
}
-func CreateSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided(sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided) SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition {
- typ := SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
+func CreateSourceS3UpdateCSVHeaderDefinitionSourceS3UpdateUserProvided(sourceS3UpdateUserProvided SourceS3UpdateUserProvided) SourceS3UpdateCSVHeaderDefinition {
+ typ := SourceS3UpdateCSVHeaderDefinitionTypeSourceS3UpdateUserProvided
- return SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided: &sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided,
- Type: typ,
+ return SourceS3UpdateCSVHeaderDefinition{
+ SourceS3UpdateUserProvided: &sourceS3UpdateUserProvided,
+ Type: typ,
}
}
-func (u *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SourceS3UpdateCSVHeaderDefinition) UnmarshalJSON(data []byte) error {
- sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV := new(SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV); err == nil {
- u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV = sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
- u.Type = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV
+ sourceS3UpdateFromCSV := new(SourceS3UpdateFromCSV)
+ if err := utils.UnmarshalJSON(data, &sourceS3UpdateFromCSV, "", true, true); err == nil {
+ u.SourceS3UpdateFromCSV = sourceS3UpdateFromCSV
+ u.Type = SourceS3UpdateCSVHeaderDefinitionTypeSourceS3UpdateFromCSV
return nil
}
- sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated := new(SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated); err == nil {
- u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated = sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
- u.Type = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated
+ sourceS3UpdateAutogenerated := new(SourceS3UpdateAutogenerated)
+ if err := utils.UnmarshalJSON(data, &sourceS3UpdateAutogenerated, "", true, true); err == nil {
+ u.SourceS3UpdateAutogenerated = sourceS3UpdateAutogenerated
+ u.Type = SourceS3UpdateCSVHeaderDefinitionTypeSourceS3UpdateAutogenerated
return nil
}
- sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided := new(SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided); err == nil {
- u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided = sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
- u.Type = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided
+ sourceS3UpdateUserProvided := new(SourceS3UpdateUserProvided)
+ if err := utils.UnmarshalJSON(data, &sourceS3UpdateUserProvided, "", true, true); err == nil {
+ u.SourceS3UpdateUserProvided = sourceS3UpdateUserProvided
+ u.Type = SourceS3UpdateCSVHeaderDefinitionTypeSourceS3UpdateUserProvided
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition) MarshalJSON() ([]byte, error) {
- if u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil {
- return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV)
+func (u SourceS3UpdateCSVHeaderDefinition) MarshalJSON() ([]byte, error) {
+ if u.SourceS3UpdateFromCSV != nil {
+ return utils.MarshalJSON(u.SourceS3UpdateFromCSV, "", true)
}
- if u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil {
- return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated)
+ if u.SourceS3UpdateAutogenerated != nil {
+ return utils.MarshalJSON(u.SourceS3UpdateAutogenerated, "", true)
}
- if u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil {
- return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided)
+ if u.SourceS3UpdateUserProvided != nil {
+ return utils.MarshalJSON(u.SourceS3UpdateUserProvided, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType - How to infer the types of the columns. If none, inference default to strings.
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType string
+// SourceS3UpdateInferenceType - How to infer the types of the columns. If none, inference default to strings.
+type SourceS3UpdateInferenceType string
const (
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceTypeNone SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType = "None"
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceTypePrimitiveTypesOnly SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType = "Primitive Types Only"
+ SourceS3UpdateInferenceTypeNone SourceS3UpdateInferenceType = "None"
+ SourceS3UpdateInferenceTypePrimitiveTypesOnly SourceS3UpdateInferenceType = "Primitive Types Only"
)
-func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType {
+func (e SourceS3UpdateInferenceType) ToPointer() *SourceS3UpdateInferenceType {
return &e
}
-func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateInferenceType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -607,205 +958,346 @@ func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType) Unmars
case "None":
fallthrough
case "Primitive Types Only":
- *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType(v)
+ *e = SourceS3UpdateInferenceType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateInferenceType: %v", v)
}
}
-// SourceS3UpdateFileBasedStreamConfigFormatCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
-type SourceS3UpdateFileBasedStreamConfigFormatCSVFormat struct {
+// SourceS3UpdateCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceS3UpdateCSVFormat struct {
// The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.
- Delimiter *string `json:"delimiter,omitempty"`
+ Delimiter *string `default:"," json:"delimiter"`
// Whether two quotes in a quoted CSV value denote a single quote in the data.
- DoubleQuote *bool `json:"double_quote,omitempty"`
+ DoubleQuote *bool `default:"true" json:"double_quote"`
// The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.
- Encoding *string `json:"encoding,omitempty"`
+ Encoding *string `default:"utf8" json:"encoding"`
// The character used for escaping special characters. To disallow escaping, leave this field blank.
EscapeChar *string `json:"escape_char,omitempty"`
// A set of case-sensitive strings that should be interpreted as false values.
- FalseValues []string `json:"false_values,omitempty"`
- Filetype *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype `json:"filetype,omitempty"`
+ FalseValues []string `json:"false_values,omitempty"`
+ filetype *SourceS3UpdateSchemasStreamsFormatFiletype `const:"csv" json:"filetype"`
// How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
- HeaderDefinition *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition `json:"header_definition,omitempty"`
+ HeaderDefinition *SourceS3UpdateCSVHeaderDefinition `json:"header_definition,omitempty"`
// How to infer the types of the columns. If none, inference default to strings.
- InferenceType *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType `json:"inference_type,omitempty"`
+ InferenceType *SourceS3UpdateInferenceType `default:"None" json:"inference_type"`
// A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
NullValues []string `json:"null_values,omitempty"`
// The character used for quoting CSV values. To disallow quoting, make this field blank.
- QuoteChar *string `json:"quote_char,omitempty"`
+ QuoteChar *string `default:""" json:"quote_char"`
// The number of rows to skip after the header row.
- SkipRowsAfterHeader *int64 `json:"skip_rows_after_header,omitempty"`
+ SkipRowsAfterHeader *int64 `default:"0" json:"skip_rows_after_header"`
// The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.
- SkipRowsBeforeHeader *int64 `json:"skip_rows_before_header,omitempty"`
+ SkipRowsBeforeHeader *int64 `default:"0" json:"skip_rows_before_header"`
// Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.
- StringsCanBeNull *bool `json:"strings_can_be_null,omitempty"`
+ StringsCanBeNull *bool `default:"true" json:"strings_can_be_null"`
// A set of case-sensitive strings that should be interpreted as true values.
TrueValues []string `json:"true_values,omitempty"`
}
-type SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype string
+func (s SourceS3UpdateCSVFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UpdateCSVFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3UpdateCSVFormat) GetDelimiter() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Delimiter
+}
+
+func (o *SourceS3UpdateCSVFormat) GetDoubleQuote() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleQuote
+}
+
+func (o *SourceS3UpdateCSVFormat) GetEncoding() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Encoding
+}
+
+func (o *SourceS3UpdateCSVFormat) GetEscapeChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EscapeChar
+}
+
+func (o *SourceS3UpdateCSVFormat) GetFalseValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FalseValues
+}
+
+func (o *SourceS3UpdateCSVFormat) GetFiletype() *SourceS3UpdateSchemasStreamsFormatFiletype {
+ return SourceS3UpdateSchemasStreamsFormatFiletypeCsv.ToPointer()
+}
+
+func (o *SourceS3UpdateCSVFormat) GetHeaderDefinition() *SourceS3UpdateCSVHeaderDefinition {
+ if o == nil {
+ return nil
+ }
+ return o.HeaderDefinition
+}
+
+func (o *SourceS3UpdateCSVFormat) GetInferenceType() *SourceS3UpdateInferenceType {
+ if o == nil {
+ return nil
+ }
+ return o.InferenceType
+}
+
+func (o *SourceS3UpdateCSVFormat) GetNullValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.NullValues
+}
+
+func (o *SourceS3UpdateCSVFormat) GetQuoteChar() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QuoteChar
+}
+
+func (o *SourceS3UpdateCSVFormat) GetSkipRowsAfterHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsAfterHeader
+}
+
+func (o *SourceS3UpdateCSVFormat) GetSkipRowsBeforeHeader() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SkipRowsBeforeHeader
+}
+
+func (o *SourceS3UpdateCSVFormat) GetStringsCanBeNull() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.StringsCanBeNull
+}
+
+func (o *SourceS3UpdateCSVFormat) GetTrueValues() []string {
+ if o == nil {
+ return nil
+ }
+ return o.TrueValues
+}
+
+type SourceS3UpdateSchemasStreamsFiletype string
const (
- SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletypeAvro SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype = "avro"
+ SourceS3UpdateSchemasStreamsFiletypeAvro SourceS3UpdateSchemasStreamsFiletype = "avro"
)
-func (e SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype {
+func (e SourceS3UpdateSchemasStreamsFiletype) ToPointer() *SourceS3UpdateSchemasStreamsFiletype {
return &e
}
-func (e *SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateSchemasStreamsFiletype) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "avro":
- *e = SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype(v)
+ *e = SourceS3UpdateSchemasStreamsFiletype(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateSchemasStreamsFiletype: %v", v)
}
}
-// SourceS3UpdateFileBasedStreamConfigFormatAvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
-type SourceS3UpdateFileBasedStreamConfigFormatAvroFormat struct {
+// SourceS3UpdateAvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
+type SourceS3UpdateAvroFormat struct {
// Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.
- DoubleAsString *bool `json:"double_as_string,omitempty"`
- Filetype *SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype `json:"filetype,omitempty"`
+ DoubleAsString *bool `default:"false" json:"double_as_string"`
+ filetype *SourceS3UpdateSchemasStreamsFiletype `const:"avro" json:"filetype"`
+}
+
+func (s SourceS3UpdateAvroFormat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UpdateAvroFormat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
}
-type SourceS3UpdateFileBasedStreamConfigFormatType string
+func (o *SourceS3UpdateAvroFormat) GetDoubleAsString() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.DoubleAsString
+}
+
+func (o *SourceS3UpdateAvroFormat) GetFiletype() *SourceS3UpdateSchemasStreamsFiletype {
+ return SourceS3UpdateSchemasStreamsFiletypeAvro.ToPointer()
+}
+
+type SourceS3UpdateFormatType string
const (
- SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatAvroFormat SourceS3UpdateFileBasedStreamConfigFormatType = "source-s3-update_FileBasedStreamConfig_Format_Avro Format"
- SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormat SourceS3UpdateFileBasedStreamConfigFormatType = "source-s3-update_FileBasedStreamConfig_Format_CSV Format"
- SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatJsonlFormat SourceS3UpdateFileBasedStreamConfigFormatType = "source-s3-update_FileBasedStreamConfig_Format_Jsonl Format"
- SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatParquetFormat SourceS3UpdateFileBasedStreamConfigFormatType = "source-s3-update_FileBasedStreamConfig_Format_Parquet Format"
+ SourceS3UpdateFormatTypeSourceS3UpdateAvroFormat SourceS3UpdateFormatType = "source-s3-update_Avro Format"
+ SourceS3UpdateFormatTypeSourceS3UpdateCSVFormat SourceS3UpdateFormatType = "source-s3-update_CSV Format"
+ SourceS3UpdateFormatTypeSourceS3UpdateJsonlFormat SourceS3UpdateFormatType = "source-s3-update_Jsonl Format"
+ SourceS3UpdateFormatTypeSourceS3UpdateParquetFormat SourceS3UpdateFormatType = "source-s3-update_Parquet Format"
+ SourceS3UpdateFormatTypeSourceS3UpdateDocumentFileTypeFormatExperimental SourceS3UpdateFormatType = "source-s3-update_Document File Type Format (Experimental)"
)
-type SourceS3UpdateFileBasedStreamConfigFormat struct {
- SourceS3UpdateFileBasedStreamConfigFormatAvroFormat *SourceS3UpdateFileBasedStreamConfigFormatAvroFormat
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormat *SourceS3UpdateFileBasedStreamConfigFormatCSVFormat
- SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat *SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat
- SourceS3UpdateFileBasedStreamConfigFormatParquetFormat *SourceS3UpdateFileBasedStreamConfigFormatParquetFormat
+type SourceS3UpdateFormat struct {
+ SourceS3UpdateAvroFormat *SourceS3UpdateAvroFormat
+ SourceS3UpdateCSVFormat *SourceS3UpdateCSVFormat
+ SourceS3UpdateJsonlFormat *SourceS3UpdateJsonlFormat
+ SourceS3UpdateParquetFormat *SourceS3UpdateParquetFormat
+ SourceS3UpdateDocumentFileTypeFormatExperimental *SourceS3UpdateDocumentFileTypeFormatExperimental
- Type SourceS3UpdateFileBasedStreamConfigFormatType
+ Type SourceS3UpdateFormatType
}
-func CreateSourceS3UpdateFileBasedStreamConfigFormatSourceS3UpdateFileBasedStreamConfigFormatAvroFormat(sourceS3UpdateFileBasedStreamConfigFormatAvroFormat SourceS3UpdateFileBasedStreamConfigFormatAvroFormat) SourceS3UpdateFileBasedStreamConfigFormat {
- typ := SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatAvroFormat
+func CreateSourceS3UpdateFormatSourceS3UpdateAvroFormat(sourceS3UpdateAvroFormat SourceS3UpdateAvroFormat) SourceS3UpdateFormat {
+ typ := SourceS3UpdateFormatTypeSourceS3UpdateAvroFormat
- return SourceS3UpdateFileBasedStreamConfigFormat{
- SourceS3UpdateFileBasedStreamConfigFormatAvroFormat: &sourceS3UpdateFileBasedStreamConfigFormatAvroFormat,
- Type: typ,
+ return SourceS3UpdateFormat{
+ SourceS3UpdateAvroFormat: &sourceS3UpdateAvroFormat,
+ Type: typ,
}
}
-func CreateSourceS3UpdateFileBasedStreamConfigFormatSourceS3UpdateFileBasedStreamConfigFormatCSVFormat(sourceS3UpdateFileBasedStreamConfigFormatCSVFormat SourceS3UpdateFileBasedStreamConfigFormatCSVFormat) SourceS3UpdateFileBasedStreamConfigFormat {
- typ := SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormat
+func CreateSourceS3UpdateFormatSourceS3UpdateCSVFormat(sourceS3UpdateCSVFormat SourceS3UpdateCSVFormat) SourceS3UpdateFormat {
+ typ := SourceS3UpdateFormatTypeSourceS3UpdateCSVFormat
- return SourceS3UpdateFileBasedStreamConfigFormat{
- SourceS3UpdateFileBasedStreamConfigFormatCSVFormat: &sourceS3UpdateFileBasedStreamConfigFormatCSVFormat,
- Type: typ,
+ return SourceS3UpdateFormat{
+ SourceS3UpdateCSVFormat: &sourceS3UpdateCSVFormat,
+ Type: typ,
}
}
-func CreateSourceS3UpdateFileBasedStreamConfigFormatSourceS3UpdateFileBasedStreamConfigFormatJsonlFormat(sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat) SourceS3UpdateFileBasedStreamConfigFormat {
- typ := SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatJsonlFormat
+func CreateSourceS3UpdateFormatSourceS3UpdateJsonlFormat(sourceS3UpdateJsonlFormat SourceS3UpdateJsonlFormat) SourceS3UpdateFormat {
+ typ := SourceS3UpdateFormatTypeSourceS3UpdateJsonlFormat
- return SourceS3UpdateFileBasedStreamConfigFormat{
- SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat: &sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat,
- Type: typ,
+ return SourceS3UpdateFormat{
+ SourceS3UpdateJsonlFormat: &sourceS3UpdateJsonlFormat,
+ Type: typ,
+ }
+}
+
+func CreateSourceS3UpdateFormatSourceS3UpdateParquetFormat(sourceS3UpdateParquetFormat SourceS3UpdateParquetFormat) SourceS3UpdateFormat {
+ typ := SourceS3UpdateFormatTypeSourceS3UpdateParquetFormat
+
+ return SourceS3UpdateFormat{
+ SourceS3UpdateParquetFormat: &sourceS3UpdateParquetFormat,
+ Type: typ,
}
}
-func CreateSourceS3UpdateFileBasedStreamConfigFormatSourceS3UpdateFileBasedStreamConfigFormatParquetFormat(sourceS3UpdateFileBasedStreamConfigFormatParquetFormat SourceS3UpdateFileBasedStreamConfigFormatParquetFormat) SourceS3UpdateFileBasedStreamConfigFormat {
- typ := SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatParquetFormat
+func CreateSourceS3UpdateFormatSourceS3UpdateDocumentFileTypeFormatExperimental(sourceS3UpdateDocumentFileTypeFormatExperimental SourceS3UpdateDocumentFileTypeFormatExperimental) SourceS3UpdateFormat {
+ typ := SourceS3UpdateFormatTypeSourceS3UpdateDocumentFileTypeFormatExperimental
- return SourceS3UpdateFileBasedStreamConfigFormat{
- SourceS3UpdateFileBasedStreamConfigFormatParquetFormat: &sourceS3UpdateFileBasedStreamConfigFormatParquetFormat,
+ return SourceS3UpdateFormat{
+ SourceS3UpdateDocumentFileTypeFormatExperimental: &sourceS3UpdateDocumentFileTypeFormatExperimental,
Type: typ,
}
}
-func (u *SourceS3UpdateFileBasedStreamConfigFormat) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *SourceS3UpdateFormat) UnmarshalJSON(data []byte) error {
+
+ sourceS3UpdateJsonlFormat := new(SourceS3UpdateJsonlFormat)
+ if err := utils.UnmarshalJSON(data, &sourceS3UpdateJsonlFormat, "", true, true); err == nil {
+ u.SourceS3UpdateJsonlFormat = sourceS3UpdateJsonlFormat
+ u.Type = SourceS3UpdateFormatTypeSourceS3UpdateJsonlFormat
+ return nil
+ }
- sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat := new(SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat); err == nil {
- u.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat = sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat
- u.Type = SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatJsonlFormat
+ sourceS3UpdateAvroFormat := new(SourceS3UpdateAvroFormat)
+ if err := utils.UnmarshalJSON(data, &sourceS3UpdateAvroFormat, "", true, true); err == nil {
+ u.SourceS3UpdateAvroFormat = sourceS3UpdateAvroFormat
+ u.Type = SourceS3UpdateFormatTypeSourceS3UpdateAvroFormat
return nil
}
- sourceS3UpdateFileBasedStreamConfigFormatAvroFormat := new(SourceS3UpdateFileBasedStreamConfigFormatAvroFormat)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatAvroFormat); err == nil {
- u.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat = sourceS3UpdateFileBasedStreamConfigFormatAvroFormat
- u.Type = SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatAvroFormat
+ sourceS3UpdateParquetFormat := new(SourceS3UpdateParquetFormat)
+ if err := utils.UnmarshalJSON(data, &sourceS3UpdateParquetFormat, "", true, true); err == nil {
+ u.SourceS3UpdateParquetFormat = sourceS3UpdateParquetFormat
+ u.Type = SourceS3UpdateFormatTypeSourceS3UpdateParquetFormat
return nil
}
- sourceS3UpdateFileBasedStreamConfigFormatParquetFormat := new(SourceS3UpdateFileBasedStreamConfigFormatParquetFormat)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatParquetFormat); err == nil {
- u.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat = sourceS3UpdateFileBasedStreamConfigFormatParquetFormat
- u.Type = SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatParquetFormat
+ sourceS3UpdateDocumentFileTypeFormatExperimental := new(SourceS3UpdateDocumentFileTypeFormatExperimental)
+ if err := utils.UnmarshalJSON(data, &sourceS3UpdateDocumentFileTypeFormatExperimental, "", true, true); err == nil {
+ u.SourceS3UpdateDocumentFileTypeFormatExperimental = sourceS3UpdateDocumentFileTypeFormatExperimental
+ u.Type = SourceS3UpdateFormatTypeSourceS3UpdateDocumentFileTypeFormatExperimental
return nil
}
- sourceS3UpdateFileBasedStreamConfigFormatCSVFormat := new(SourceS3UpdateFileBasedStreamConfigFormatCSVFormat)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatCSVFormat); err == nil {
- u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat = sourceS3UpdateFileBasedStreamConfigFormatCSVFormat
- u.Type = SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormat
+ sourceS3UpdateCSVFormat := new(SourceS3UpdateCSVFormat)
+ if err := utils.UnmarshalJSON(data, &sourceS3UpdateCSVFormat, "", true, true); err == nil {
+ u.SourceS3UpdateCSVFormat = sourceS3UpdateCSVFormat
+ u.Type = SourceS3UpdateFormatTypeSourceS3UpdateCSVFormat
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceS3UpdateFileBasedStreamConfigFormat) MarshalJSON() ([]byte, error) {
- if u.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat != nil {
- return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat)
+func (u SourceS3UpdateFormat) MarshalJSON() ([]byte, error) {
+ if u.SourceS3UpdateAvroFormat != nil {
+ return utils.MarshalJSON(u.SourceS3UpdateAvroFormat, "", true)
+ }
+
+ if u.SourceS3UpdateCSVFormat != nil {
+ return utils.MarshalJSON(u.SourceS3UpdateCSVFormat, "", true)
}
- if u.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat != nil {
- return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat)
+ if u.SourceS3UpdateJsonlFormat != nil {
+ return utils.MarshalJSON(u.SourceS3UpdateJsonlFormat, "", true)
}
- if u.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat != nil {
- return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat)
+ if u.SourceS3UpdateParquetFormat != nil {
+ return utils.MarshalJSON(u.SourceS3UpdateParquetFormat, "", true)
}
- if u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat != nil {
- return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat)
+ if u.SourceS3UpdateDocumentFileTypeFormatExperimental != nil {
+ return utils.MarshalJSON(u.SourceS3UpdateDocumentFileTypeFormatExperimental, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-// SourceS3UpdateFileBasedStreamConfigValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
-type SourceS3UpdateFileBasedStreamConfigValidationPolicy string
+// SourceS3UpdateValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
+type SourceS3UpdateValidationPolicy string
const (
- SourceS3UpdateFileBasedStreamConfigValidationPolicyEmitRecord SourceS3UpdateFileBasedStreamConfigValidationPolicy = "Emit Record"
- SourceS3UpdateFileBasedStreamConfigValidationPolicySkipRecord SourceS3UpdateFileBasedStreamConfigValidationPolicy = "Skip Record"
- SourceS3UpdateFileBasedStreamConfigValidationPolicyWaitForDiscover SourceS3UpdateFileBasedStreamConfigValidationPolicy = "Wait for Discover"
+ SourceS3UpdateValidationPolicyEmitRecord SourceS3UpdateValidationPolicy = "Emit Record"
+ SourceS3UpdateValidationPolicySkipRecord SourceS3UpdateValidationPolicy = "Skip Record"
+ SourceS3UpdateValidationPolicyWaitForDiscover SourceS3UpdateValidationPolicy = "Wait for Discover"
)
-func (e SourceS3UpdateFileBasedStreamConfigValidationPolicy) ToPointer() *SourceS3UpdateFileBasedStreamConfigValidationPolicy {
+func (e SourceS3UpdateValidationPolicy) ToPointer() *SourceS3UpdateValidationPolicy {
return &e
}
-func (e *SourceS3UpdateFileBasedStreamConfigValidationPolicy) UnmarshalJSON(data []byte) error {
+func (e *SourceS3UpdateValidationPolicy) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -816,20 +1308,18 @@ func (e *SourceS3UpdateFileBasedStreamConfigValidationPolicy) UnmarshalJSON(data
case "Skip Record":
fallthrough
case "Wait for Discover":
- *e = SourceS3UpdateFileBasedStreamConfigValidationPolicy(v)
+ *e = SourceS3UpdateValidationPolicy(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigValidationPolicy: %v", v)
+ return fmt.Errorf("invalid value for SourceS3UpdateValidationPolicy: %v", v)
}
}
type SourceS3UpdateFileBasedStreamConfig struct {
// When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.
- DaysToSyncIfHistoryIsFull *int64 `json:"days_to_sync_if_history_is_full,omitempty"`
- // The data file type that is being extracted for a stream.
- FileType string `json:"file_type"`
+ DaysToSyncIfHistoryIsFull *int64 `default:"3" json:"days_to_sync_if_history_is_full"`
// The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
- Format *SourceS3UpdateFileBasedStreamConfigFormat `json:"format,omitempty"`
+ Format SourceS3UpdateFormat `json:"format"`
// The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.
Globs []string `json:"globs,omitempty"`
// The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
@@ -841,9 +1331,83 @@ type SourceS3UpdateFileBasedStreamConfig struct {
// The column or columns (for a composite key) that serves as the unique identifier of a record.
PrimaryKey *string `json:"primary_key,omitempty"`
// When enabled, syncs will not validate or structure records against the stream's schema.
- Schemaless *bool `json:"schemaless,omitempty"`
+ Schemaless *bool `default:"false" json:"schemaless"`
// The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.
- ValidationPolicy *SourceS3UpdateFileBasedStreamConfigValidationPolicy `json:"validation_policy,omitempty"`
+ ValidationPolicy *SourceS3UpdateValidationPolicy `default:"Emit Record" json:"validation_policy"`
+}
+
+func (s SourceS3UpdateFileBasedStreamConfig) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3UpdateFileBasedStreamConfig) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3UpdateFileBasedStreamConfig) GetDaysToSyncIfHistoryIsFull() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.DaysToSyncIfHistoryIsFull
+}
+
+func (o *SourceS3UpdateFileBasedStreamConfig) GetFormat() SourceS3UpdateFormat {
+ if o == nil {
+ return SourceS3UpdateFormat{}
+ }
+ return o.Format
+}
+
+func (o *SourceS3UpdateFileBasedStreamConfig) GetGlobs() []string {
+ if o == nil {
+ return nil
+ }
+ return o.Globs
+}
+
+func (o *SourceS3UpdateFileBasedStreamConfig) GetInputSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.InputSchema
+}
+
+func (o *SourceS3UpdateFileBasedStreamConfig) GetLegacyPrefix() *string {
+ if o == nil {
+ return nil
+ }
+ return o.LegacyPrefix
+}
+
+func (o *SourceS3UpdateFileBasedStreamConfig) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceS3UpdateFileBasedStreamConfig) GetPrimaryKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrimaryKey
+}
+
+func (o *SourceS3UpdateFileBasedStreamConfig) GetSchemaless() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.Schemaless
+}
+
+func (o *SourceS3UpdateFileBasedStreamConfig) GetValidationPolicy() *SourceS3UpdateValidationPolicy {
+ if o == nil {
+ return nil
+ }
+ return o.ValidationPolicy
}
// SourceS3Update - NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes
@@ -857,18 +1421,106 @@ type SourceS3Update struct {
Bucket string `json:"bucket"`
// Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.
Dataset *string `json:"dataset,omitempty"`
- // Endpoint to an S3 compatible service. Leave empty to use AWS.
- Endpoint *string `json:"endpoint,omitempty"`
+ // Endpoint to an S3 compatible service. Leave empty to use AWS. The custom endpoint must be secure, but the 'https' prefix is not required.
+ Endpoint *string `default:"" json:"endpoint"`
// Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate
Format *SourceS3UpdateFileFormat `json:"format,omitempty"`
// Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.
PathPattern *string `json:"path_pattern,omitempty"`
// Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services
- Provider *SourceS3UpdateS3AmazonWebServices `json:"provider,omitempty"`
+ Provider *S3AmazonWebServices `json:"provider,omitempty"`
// Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.
- Schema *string `json:"schema,omitempty"`
+ Schema *string `default:"{}" json:"schema"`
// UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
// Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
Streams []SourceS3UpdateFileBasedStreamConfig `json:"streams"`
}
+
+func (s SourceS3Update) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceS3Update) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceS3Update) GetAwsAccessKeyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsAccessKeyID
+}
+
+func (o *SourceS3Update) GetAwsSecretAccessKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AwsSecretAccessKey
+}
+
+func (o *SourceS3Update) GetBucket() string {
+ if o == nil {
+ return ""
+ }
+ return o.Bucket
+}
+
+func (o *SourceS3Update) GetDataset() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Dataset
+}
+
+func (o *SourceS3Update) GetEndpoint() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Endpoint
+}
+
+func (o *SourceS3Update) GetFormat() *SourceS3UpdateFileFormat {
+ if o == nil {
+ return nil
+ }
+ return o.Format
+}
+
+func (o *SourceS3Update) GetPathPattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PathPattern
+}
+
+func (o *SourceS3Update) GetProvider() *S3AmazonWebServices {
+ if o == nil {
+ return nil
+ }
+ return o.Provider
+}
+
+func (o *SourceS3Update) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *SourceS3Update) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceS3Update) GetStreams() []SourceS3UpdateFileBasedStreamConfig {
+ if o == nil {
+ return []SourceS3UpdateFileBasedStreamConfig{}
+ }
+ return o.Streams
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesalesforce.go b/internal/sdk/pkg/models/shared/sourcesalesforce.go
old mode 100755
new mode 100644
index 4d87fae13..27c57d75d
--- a/internal/sdk/pkg/models/shared/sourcesalesforce.go
+++ b/internal/sdk/pkg/models/shared/sourcesalesforce.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -32,48 +33,48 @@ func (e *SourceSalesforceAuthType) UnmarshalJSON(data []byte) error {
}
}
-type SourceSalesforceSalesforce string
+type Salesforce string
const (
- SourceSalesforceSalesforceSalesforce SourceSalesforceSalesforce = "salesforce"
+ SalesforceSalesforce Salesforce = "salesforce"
)
-func (e SourceSalesforceSalesforce) ToPointer() *SourceSalesforceSalesforce {
+func (e Salesforce) ToPointer() *Salesforce {
return &e
}
-func (e *SourceSalesforceSalesforce) UnmarshalJSON(data []byte) error {
+func (e *Salesforce) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "salesforce":
- *e = SourceSalesforceSalesforce(v)
+ *e = Salesforce(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSalesforceSalesforce: %v", v)
+ return fmt.Errorf("invalid value for Salesforce: %v", v)
}
}
-type SourceSalesforceStreamsCriteriaSearchCriteria string
+type SourceSalesforceSearchCriteria string
const (
- SourceSalesforceStreamsCriteriaSearchCriteriaStartsWith SourceSalesforceStreamsCriteriaSearchCriteria = "starts with"
- SourceSalesforceStreamsCriteriaSearchCriteriaEndsWith SourceSalesforceStreamsCriteriaSearchCriteria = "ends with"
- SourceSalesforceStreamsCriteriaSearchCriteriaContains SourceSalesforceStreamsCriteriaSearchCriteria = "contains"
- SourceSalesforceStreamsCriteriaSearchCriteriaExacts SourceSalesforceStreamsCriteriaSearchCriteria = "exacts"
- SourceSalesforceStreamsCriteriaSearchCriteriaStartsNotWith SourceSalesforceStreamsCriteriaSearchCriteria = "starts not with"
- SourceSalesforceStreamsCriteriaSearchCriteriaEndsNotWith SourceSalesforceStreamsCriteriaSearchCriteria = "ends not with"
- SourceSalesforceStreamsCriteriaSearchCriteriaNotContains SourceSalesforceStreamsCriteriaSearchCriteria = "not contains"
- SourceSalesforceStreamsCriteriaSearchCriteriaNotExacts SourceSalesforceStreamsCriteriaSearchCriteria = "not exacts"
+ SourceSalesforceSearchCriteriaStartsWith SourceSalesforceSearchCriteria = "starts with"
+ SourceSalesforceSearchCriteriaEndsWith SourceSalesforceSearchCriteria = "ends with"
+ SourceSalesforceSearchCriteriaContains SourceSalesforceSearchCriteria = "contains"
+ SourceSalesforceSearchCriteriaExacts SourceSalesforceSearchCriteria = "exacts"
+ SourceSalesforceSearchCriteriaStartsNotWith SourceSalesforceSearchCriteria = "starts not with"
+ SourceSalesforceSearchCriteriaEndsNotWith SourceSalesforceSearchCriteria = "ends not with"
+ SourceSalesforceSearchCriteriaNotContains SourceSalesforceSearchCriteria = "not contains"
+ SourceSalesforceSearchCriteriaNotExacts SourceSalesforceSearchCriteria = "not exacts"
)
-func (e SourceSalesforceStreamsCriteriaSearchCriteria) ToPointer() *SourceSalesforceStreamsCriteriaSearchCriteria {
+func (e SourceSalesforceSearchCriteria) ToPointer() *SourceSalesforceSearchCriteria {
return &e
}
-func (e *SourceSalesforceStreamsCriteriaSearchCriteria) UnmarshalJSON(data []byte) error {
+func (e *SourceSalesforceSearchCriteria) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -94,33 +95,126 @@ func (e *SourceSalesforceStreamsCriteriaSearchCriteria) UnmarshalJSON(data []byt
case "not contains":
fallthrough
case "not exacts":
- *e = SourceSalesforceStreamsCriteriaSearchCriteria(v)
+ *e = SourceSalesforceSearchCriteria(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSalesforceStreamsCriteriaSearchCriteria: %v", v)
+ return fmt.Errorf("invalid value for SourceSalesforceSearchCriteria: %v", v)
}
}
type SourceSalesforceStreamsCriteria struct {
- Criteria SourceSalesforceStreamsCriteriaSearchCriteria `json:"criteria"`
- Value string `json:"value"`
+ Criteria *SourceSalesforceSearchCriteria `default:"contains" json:"criteria"`
+ Value string `json:"value"`
+}
+
+func (s SourceSalesforceStreamsCriteria) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSalesforceStreamsCriteria) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSalesforceStreamsCriteria) GetCriteria() *SourceSalesforceSearchCriteria {
+ if o == nil {
+ return nil
+ }
+ return o.Criteria
+}
+
+func (o *SourceSalesforceStreamsCriteria) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
}
type SourceSalesforce struct {
- AuthType *SourceSalesforceAuthType `json:"auth_type,omitempty"`
+ authType *SourceSalesforceAuthType `const:"Client" json:"auth_type,omitempty"`
// Enter your Salesforce developer application's Client ID
ClientID string `json:"client_id"`
// Enter your Salesforce developer application's Client secret
ClientSecret string `json:"client_secret"`
// Toggle to use Bulk API (this might cause empty fields for some streams)
- ForceUseBulkAPI *bool `json:"force_use_bulk_api,omitempty"`
+ ForceUseBulkAPI *bool `default:"false" json:"force_use_bulk_api"`
// Toggle if you're using a Salesforce Sandbox
- IsSandbox *bool `json:"is_sandbox,omitempty"`
+ IsSandbox *bool `default:"false" json:"is_sandbox"`
// Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account.
- RefreshToken string `json:"refresh_token"`
- SourceType SourceSalesforceSalesforce `json:"sourceType"`
+ RefreshToken string `json:"refresh_token"`
+ sourceType Salesforce `const:"salesforce" json:"sourceType"`
// Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ format. Airbyte will replicate the data updated on and after this date. If this field is blank, Airbyte will replicate the data for last two years.
StartDate *time.Time `json:"start_date,omitempty"`
// Add filters to select only required stream based on `SObject` name. Use this field to filter which tables are displayed by this connector. This is useful if your Salesforce account has a large number of tables (>1000), in which case you may find it easier to navigate the UI and speed up the connector's performance if you restrict the tables displayed by this connector.
StreamsCriteria []SourceSalesforceStreamsCriteria `json:"streams_criteria,omitempty"`
}
+
+func (s SourceSalesforce) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSalesforce) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSalesforce) GetAuthType() *SourceSalesforceAuthType {
+ return SourceSalesforceAuthTypeClient.ToPointer()
+}
+
+func (o *SourceSalesforce) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSalesforce) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSalesforce) GetForceUseBulkAPI() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.ForceUseBulkAPI
+}
+
+func (o *SourceSalesforce) GetIsSandbox() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IsSandbox
+}
+
+func (o *SourceSalesforce) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceSalesforce) GetSourceType() Salesforce {
+ return SalesforceSalesforce
+}
+
+func (o *SourceSalesforce) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceSalesforce) GetStreamsCriteria() []SourceSalesforceStreamsCriteria {
+ if o == nil {
+ return nil
+ }
+ return o.StreamsCriteria
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesalesforcecreaterequest.go b/internal/sdk/pkg/models/shared/sourcesalesforcecreaterequest.go
old mode 100755
new mode 100644
index e3b855232..de1083732
--- a/internal/sdk/pkg/models/shared/sourcesalesforcecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesalesforcecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSalesforceCreateRequest struct {
Configuration SourceSalesforce `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSalesforceCreateRequest) GetConfiguration() SourceSalesforce {
+ if o == nil {
+ return SourceSalesforce{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSalesforceCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSalesforceCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSalesforceCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSalesforceCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesalesforceputrequest.go b/internal/sdk/pkg/models/shared/sourcesalesforceputrequest.go
old mode 100755
new mode 100644
index a6e023e00..3288e59ff
--- a/internal/sdk/pkg/models/shared/sourcesalesforceputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesalesforceputrequest.go
@@ -7,3 +7,24 @@ type SourceSalesforcePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSalesforcePutRequest) GetConfiguration() SourceSalesforceUpdate {
+ if o == nil {
+ return SourceSalesforceUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSalesforcePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSalesforcePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesalesforceupdate.go b/internal/sdk/pkg/models/shared/sourcesalesforceupdate.go
old mode 100755
new mode 100644
index 7348e0150..cda863b15
--- a/internal/sdk/pkg/models/shared/sourcesalesforceupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesalesforceupdate.go
@@ -5,51 +5,52 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSalesforceUpdateAuthType string
+type AuthType string
const (
- SourceSalesforceUpdateAuthTypeClient SourceSalesforceUpdateAuthType = "Client"
+ AuthTypeClient AuthType = "Client"
)
-func (e SourceSalesforceUpdateAuthType) ToPointer() *SourceSalesforceUpdateAuthType {
+func (e AuthType) ToPointer() *AuthType {
return &e
}
-func (e *SourceSalesforceUpdateAuthType) UnmarshalJSON(data []byte) error {
+func (e *AuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Client":
- *e = SourceSalesforceUpdateAuthType(v)
+ *e = AuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSalesforceUpdateAuthType: %v", v)
+ return fmt.Errorf("invalid value for AuthType: %v", v)
}
}
-type SourceSalesforceUpdateStreamsCriteriaSearchCriteria string
+type SearchCriteria string
const (
- SourceSalesforceUpdateStreamsCriteriaSearchCriteriaStartsWith SourceSalesforceUpdateStreamsCriteriaSearchCriteria = "starts with"
- SourceSalesforceUpdateStreamsCriteriaSearchCriteriaEndsWith SourceSalesforceUpdateStreamsCriteriaSearchCriteria = "ends with"
- SourceSalesforceUpdateStreamsCriteriaSearchCriteriaContains SourceSalesforceUpdateStreamsCriteriaSearchCriteria = "contains"
- SourceSalesforceUpdateStreamsCriteriaSearchCriteriaExacts SourceSalesforceUpdateStreamsCriteriaSearchCriteria = "exacts"
- SourceSalesforceUpdateStreamsCriteriaSearchCriteriaStartsNotWith SourceSalesforceUpdateStreamsCriteriaSearchCriteria = "starts not with"
- SourceSalesforceUpdateStreamsCriteriaSearchCriteriaEndsNotWith SourceSalesforceUpdateStreamsCriteriaSearchCriteria = "ends not with"
- SourceSalesforceUpdateStreamsCriteriaSearchCriteriaNotContains SourceSalesforceUpdateStreamsCriteriaSearchCriteria = "not contains"
- SourceSalesforceUpdateStreamsCriteriaSearchCriteriaNotExacts SourceSalesforceUpdateStreamsCriteriaSearchCriteria = "not exacts"
+ SearchCriteriaStartsWith SearchCriteria = "starts with"
+ SearchCriteriaEndsWith SearchCriteria = "ends with"
+ SearchCriteriaContains SearchCriteria = "contains"
+ SearchCriteriaExacts SearchCriteria = "exacts"
+ SearchCriteriaStartsNotWith SearchCriteria = "starts not with"
+ SearchCriteriaEndsNotWith SearchCriteria = "ends not with"
+ SearchCriteriaNotContains SearchCriteria = "not contains"
+ SearchCriteriaNotExacts SearchCriteria = "not exacts"
)
-func (e SourceSalesforceUpdateStreamsCriteriaSearchCriteria) ToPointer() *SourceSalesforceUpdateStreamsCriteriaSearchCriteria {
+func (e SearchCriteria) ToPointer() *SearchCriteria {
return &e
}
-func (e *SourceSalesforceUpdateStreamsCriteriaSearchCriteria) UnmarshalJSON(data []byte) error {
+func (e *SearchCriteria) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -70,32 +71,121 @@ func (e *SourceSalesforceUpdateStreamsCriteriaSearchCriteria) UnmarshalJSON(data
case "not contains":
fallthrough
case "not exacts":
- *e = SourceSalesforceUpdateStreamsCriteriaSearchCriteria(v)
+ *e = SearchCriteria(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSalesforceUpdateStreamsCriteriaSearchCriteria: %v", v)
+ return fmt.Errorf("invalid value for SearchCriteria: %v", v)
}
}
-type SourceSalesforceUpdateStreamsCriteria struct {
- Criteria SourceSalesforceUpdateStreamsCriteriaSearchCriteria `json:"criteria"`
- Value string `json:"value"`
+type StreamsCriteria struct {
+ Criteria *SearchCriteria `default:"contains" json:"criteria"`
+ Value string `json:"value"`
+}
+
+func (s StreamsCriteria) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *StreamsCriteria) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *StreamsCriteria) GetCriteria() *SearchCriteria {
+ if o == nil {
+ return nil
+ }
+ return o.Criteria
+}
+
+func (o *StreamsCriteria) GetValue() string {
+ if o == nil {
+ return ""
+ }
+ return o.Value
}
type SourceSalesforceUpdate struct {
- AuthType *SourceSalesforceUpdateAuthType `json:"auth_type,omitempty"`
+ authType *AuthType `const:"Client" json:"auth_type,omitempty"`
// Enter your Salesforce developer application's Client ID
ClientID string `json:"client_id"`
// Enter your Salesforce developer application's Client secret
ClientSecret string `json:"client_secret"`
// Toggle to use Bulk API (this might cause empty fields for some streams)
- ForceUseBulkAPI *bool `json:"force_use_bulk_api,omitempty"`
+ ForceUseBulkAPI *bool `default:"false" json:"force_use_bulk_api"`
// Toggle if you're using a Salesforce Sandbox
- IsSandbox *bool `json:"is_sandbox,omitempty"`
+ IsSandbox *bool `default:"false" json:"is_sandbox"`
// Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce account.
RefreshToken string `json:"refresh_token"`
// Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ format. Airbyte will replicate the data updated on and after this date. If this field is blank, Airbyte will replicate the data for last two years.
StartDate *time.Time `json:"start_date,omitempty"`
// Add filters to select only required stream based on `SObject` name. Use this field to filter which tables are displayed by this connector. This is useful if your Salesforce account has a large number of tables (>1000), in which case you may find it easier to navigate the UI and speed up the connector's performance if you restrict the tables displayed by this connector.
- StreamsCriteria []SourceSalesforceUpdateStreamsCriteria `json:"streams_criteria,omitempty"`
+ StreamsCriteria []StreamsCriteria `json:"streams_criteria,omitempty"`
+}
+
+func (s SourceSalesforceUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSalesforceUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSalesforceUpdate) GetAuthType() *AuthType {
+ return AuthTypeClient.ToPointer()
+}
+
+func (o *SourceSalesforceUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSalesforceUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSalesforceUpdate) GetForceUseBulkAPI() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.ForceUseBulkAPI
+}
+
+func (o *SourceSalesforceUpdate) GetIsSandbox() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IsSandbox
+}
+
+func (o *SourceSalesforceUpdate) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceSalesforceUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceSalesforceUpdate) GetStreamsCriteria() []StreamsCriteria {
+ if o == nil {
+ return nil
+ }
+ return o.StreamsCriteria
}
diff --git a/internal/sdk/pkg/models/shared/sourcesalesloft.go b/internal/sdk/pkg/models/shared/sourcesalesloft.go
old mode 100755
new mode 100644
index af5a5cf5c..4309fd429
--- a/internal/sdk/pkg/models/shared/sourcesalesloft.go
+++ b/internal/sdk/pkg/models/shared/sourcesalesloft.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType string
+type SourceSalesloftSchemasAuthType string
const (
- SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthTypeAPIKey SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType = "api_key"
+ SourceSalesloftSchemasAuthTypeAPIKey SourceSalesloftSchemasAuthType = "api_key"
)
-func (e SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType) ToPointer() *SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType {
+func (e SourceSalesloftSchemasAuthType) ToPointer() *SourceSalesloftSchemasAuthType {
return &e
}
-func (e *SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSalesloftSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_key":
- *e = SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType(v)
+ *e = SourceSalesloftSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSalesloftSchemasAuthType: %v", v)
}
}
-type SourceSalesloftCredentialsAuthenticateViaAPIKey struct {
+type SourceSalesloftAuthenticateViaAPIKey struct {
// API Key for making authenticated requests. More instruction on how to find this value in our docs
- APIKey string `json:"api_key"`
- AuthType SourceSalesloftCredentialsAuthenticateViaAPIKeyAuthType `json:"auth_type"`
+ APIKey string `json:"api_key"`
+ authType SourceSalesloftSchemasAuthType `const:"api_key" json:"auth_type"`
}
-type SourceSalesloftCredentialsAuthenticateViaOAuthAuthType string
+func (s SourceSalesloftAuthenticateViaAPIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSalesloftAuthenticateViaAPIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSalesloftAuthenticateViaAPIKey) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceSalesloftAuthenticateViaAPIKey) GetAuthType() SourceSalesloftSchemasAuthType {
+ return SourceSalesloftSchemasAuthTypeAPIKey
+}
+
+type SourceSalesloftAuthType string
const (
- SourceSalesloftCredentialsAuthenticateViaOAuthAuthTypeOauth20 SourceSalesloftCredentialsAuthenticateViaOAuthAuthType = "oauth2.0"
+ SourceSalesloftAuthTypeOauth20 SourceSalesloftAuthType = "oauth2.0"
)
-func (e SourceSalesloftCredentialsAuthenticateViaOAuthAuthType) ToPointer() *SourceSalesloftCredentialsAuthenticateViaOAuthAuthType {
+func (e SourceSalesloftAuthType) ToPointer() *SourceSalesloftAuthType {
return &e
}
-func (e *SourceSalesloftCredentialsAuthenticateViaOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSalesloftAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceSalesloftCredentialsAuthenticateViaOAuthAuthType(v)
+ *e = SourceSalesloftAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSalesloftCredentialsAuthenticateViaOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSalesloftAuthType: %v", v)
}
}
-type SourceSalesloftCredentialsAuthenticateViaOAuth struct {
+type SourceSalesloftAuthenticateViaOAuth struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType SourceSalesloftCredentialsAuthenticateViaOAuthAuthType `json:"auth_type"`
+ AccessToken string `json:"access_token"`
+ authType SourceSalesloftAuthType `const:"oauth2.0" json:"auth_type"`
// The Client ID of your Salesloft developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Salesloft developer application.
@@ -78,56 +100,101 @@ type SourceSalesloftCredentialsAuthenticateViaOAuth struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceSalesloftAuthenticateViaOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSalesloftAuthenticateViaOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSalesloftAuthenticateViaOAuth) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSalesloftAuthenticateViaOAuth) GetAuthType() SourceSalesloftAuthType {
+ return SourceSalesloftAuthTypeOauth20
+}
+
+func (o *SourceSalesloftAuthenticateViaOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSalesloftAuthenticateViaOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSalesloftAuthenticateViaOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceSalesloftAuthenticateViaOAuth) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceSalesloftCredentialsType string
const (
- SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaOAuth SourceSalesloftCredentialsType = "source-salesloft_Credentials_Authenticate via OAuth"
- SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaAPIKey SourceSalesloftCredentialsType = "source-salesloft_Credentials_Authenticate via API Key"
+ SourceSalesloftCredentialsTypeSourceSalesloftAuthenticateViaOAuth SourceSalesloftCredentialsType = "source-salesloft_Authenticate via OAuth"
+ SourceSalesloftCredentialsTypeSourceSalesloftAuthenticateViaAPIKey SourceSalesloftCredentialsType = "source-salesloft_Authenticate via API Key"
)
type SourceSalesloftCredentials struct {
- SourceSalesloftCredentialsAuthenticateViaOAuth *SourceSalesloftCredentialsAuthenticateViaOAuth
- SourceSalesloftCredentialsAuthenticateViaAPIKey *SourceSalesloftCredentialsAuthenticateViaAPIKey
+ SourceSalesloftAuthenticateViaOAuth *SourceSalesloftAuthenticateViaOAuth
+ SourceSalesloftAuthenticateViaAPIKey *SourceSalesloftAuthenticateViaAPIKey
Type SourceSalesloftCredentialsType
}
-func CreateSourceSalesloftCredentialsSourceSalesloftCredentialsAuthenticateViaOAuth(sourceSalesloftCredentialsAuthenticateViaOAuth SourceSalesloftCredentialsAuthenticateViaOAuth) SourceSalesloftCredentials {
- typ := SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaOAuth
+func CreateSourceSalesloftCredentialsSourceSalesloftAuthenticateViaOAuth(sourceSalesloftAuthenticateViaOAuth SourceSalesloftAuthenticateViaOAuth) SourceSalesloftCredentials {
+ typ := SourceSalesloftCredentialsTypeSourceSalesloftAuthenticateViaOAuth
return SourceSalesloftCredentials{
- SourceSalesloftCredentialsAuthenticateViaOAuth: &sourceSalesloftCredentialsAuthenticateViaOAuth,
- Type: typ,
+ SourceSalesloftAuthenticateViaOAuth: &sourceSalesloftAuthenticateViaOAuth,
+ Type: typ,
}
}
-func CreateSourceSalesloftCredentialsSourceSalesloftCredentialsAuthenticateViaAPIKey(sourceSalesloftCredentialsAuthenticateViaAPIKey SourceSalesloftCredentialsAuthenticateViaAPIKey) SourceSalesloftCredentials {
- typ := SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaAPIKey
+func CreateSourceSalesloftCredentialsSourceSalesloftAuthenticateViaAPIKey(sourceSalesloftAuthenticateViaAPIKey SourceSalesloftAuthenticateViaAPIKey) SourceSalesloftCredentials {
+ typ := SourceSalesloftCredentialsTypeSourceSalesloftAuthenticateViaAPIKey
return SourceSalesloftCredentials{
- SourceSalesloftCredentialsAuthenticateViaAPIKey: &sourceSalesloftCredentialsAuthenticateViaAPIKey,
- Type: typ,
+ SourceSalesloftAuthenticateViaAPIKey: &sourceSalesloftAuthenticateViaAPIKey,
+ Type: typ,
}
}
func (u *SourceSalesloftCredentials) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSalesloftCredentialsAuthenticateViaAPIKey := new(SourceSalesloftCredentialsAuthenticateViaAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSalesloftCredentialsAuthenticateViaAPIKey); err == nil {
- u.SourceSalesloftCredentialsAuthenticateViaAPIKey = sourceSalesloftCredentialsAuthenticateViaAPIKey
- u.Type = SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaAPIKey
+
+ sourceSalesloftAuthenticateViaAPIKey := new(SourceSalesloftAuthenticateViaAPIKey)
+ if err := utils.UnmarshalJSON(data, &sourceSalesloftAuthenticateViaAPIKey, "", true, true); err == nil {
+ u.SourceSalesloftAuthenticateViaAPIKey = sourceSalesloftAuthenticateViaAPIKey
+ u.Type = SourceSalesloftCredentialsTypeSourceSalesloftAuthenticateViaAPIKey
return nil
}
- sourceSalesloftCredentialsAuthenticateViaOAuth := new(SourceSalesloftCredentialsAuthenticateViaOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSalesloftCredentialsAuthenticateViaOAuth); err == nil {
- u.SourceSalesloftCredentialsAuthenticateViaOAuth = sourceSalesloftCredentialsAuthenticateViaOAuth
- u.Type = SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaOAuth
+ sourceSalesloftAuthenticateViaOAuth := new(SourceSalesloftAuthenticateViaOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceSalesloftAuthenticateViaOAuth, "", true, true); err == nil {
+ u.SourceSalesloftAuthenticateViaOAuth = sourceSalesloftAuthenticateViaOAuth
+ u.Type = SourceSalesloftCredentialsTypeSourceSalesloftAuthenticateViaOAuth
return nil
}
@@ -135,44 +202,73 @@ func (u *SourceSalesloftCredentials) UnmarshalJSON(data []byte) error {
}
func (u SourceSalesloftCredentials) MarshalJSON() ([]byte, error) {
- if u.SourceSalesloftCredentialsAuthenticateViaAPIKey != nil {
- return json.Marshal(u.SourceSalesloftCredentialsAuthenticateViaAPIKey)
+ if u.SourceSalesloftAuthenticateViaOAuth != nil {
+ return utils.MarshalJSON(u.SourceSalesloftAuthenticateViaOAuth, "", true)
}
- if u.SourceSalesloftCredentialsAuthenticateViaOAuth != nil {
- return json.Marshal(u.SourceSalesloftCredentialsAuthenticateViaOAuth)
+ if u.SourceSalesloftAuthenticateViaAPIKey != nil {
+ return utils.MarshalJSON(u.SourceSalesloftAuthenticateViaAPIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceSalesloftSalesloft string
+type Salesloft string
const (
- SourceSalesloftSalesloftSalesloft SourceSalesloftSalesloft = "salesloft"
+ SalesloftSalesloft Salesloft = "salesloft"
)
-func (e SourceSalesloftSalesloft) ToPointer() *SourceSalesloftSalesloft {
+func (e Salesloft) ToPointer() *Salesloft {
return &e
}
-func (e *SourceSalesloftSalesloft) UnmarshalJSON(data []byte) error {
+func (e *Salesloft) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "salesloft":
- *e = SourceSalesloftSalesloft(v)
+ *e = Salesloft(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSalesloftSalesloft: %v", v)
+ return fmt.Errorf("invalid value for Salesloft: %v", v)
}
}
type SourceSalesloft struct {
Credentials SourceSalesloftCredentials `json:"credentials"`
- SourceType SourceSalesloftSalesloft `json:"sourceType"`
+ sourceType Salesloft `const:"salesloft" json:"sourceType"`
// The date from which you'd like to replicate data for Salesloft API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceSalesloft) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSalesloft) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSalesloft) GetCredentials() SourceSalesloftCredentials {
+ if o == nil {
+ return SourceSalesloftCredentials{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceSalesloft) GetSourceType() Salesloft {
+ return SalesloftSalesloft
+}
+
+func (o *SourceSalesloft) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesalesloftcreaterequest.go b/internal/sdk/pkg/models/shared/sourcesalesloftcreaterequest.go
old mode 100755
new mode 100644
index dcc8da1bd..cef09f42a
--- a/internal/sdk/pkg/models/shared/sourcesalesloftcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesalesloftcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSalesloftCreateRequest struct {
Configuration SourceSalesloft `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSalesloftCreateRequest) GetConfiguration() SourceSalesloft {
+ if o == nil {
+ return SourceSalesloft{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSalesloftCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSalesloftCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSalesloftCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSalesloftCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesalesloftputrequest.go b/internal/sdk/pkg/models/shared/sourcesalesloftputrequest.go
old mode 100755
new mode 100644
index 082f24c73..dc3a19db3
--- a/internal/sdk/pkg/models/shared/sourcesalesloftputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesalesloftputrequest.go
@@ -7,3 +7,24 @@ type SourceSalesloftPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSalesloftPutRequest) GetConfiguration() SourceSalesloftUpdate {
+ if o == nil {
+ return SourceSalesloftUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSalesloftPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSalesloftPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesalesloftupdate.go b/internal/sdk/pkg/models/shared/sourcesalesloftupdate.go
old mode 100755
new mode 100644
index 3eac04137..01b67fdb0
--- a/internal/sdk/pkg/models/shared/sourcesalesloftupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesalesloftupdate.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthType string
+type SourceSalesloftUpdateSchemasAuthType string
const (
- SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthTypeAPIKey SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthType = "api_key"
+ SourceSalesloftUpdateSchemasAuthTypeAPIKey SourceSalesloftUpdateSchemasAuthType = "api_key"
)
-func (e SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthType) ToPointer() *SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthType {
+func (e SourceSalesloftUpdateSchemasAuthType) ToPointer() *SourceSalesloftUpdateSchemasAuthType {
return &e
}
-func (e *SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSalesloftUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_key":
- *e = SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthType(v)
+ *e = SourceSalesloftUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSalesloftUpdateSchemasAuthType: %v", v)
}
}
-type SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey struct {
+type AuthenticateViaAPIKey struct {
// API Key for making authenticated requests. More instruction on how to find this value in our docs
- APIKey string `json:"api_key"`
- AuthType SourceSalesloftUpdateCredentialsAuthenticateViaAPIKeyAuthType `json:"auth_type"`
+ APIKey string `json:"api_key"`
+ authType SourceSalesloftUpdateSchemasAuthType `const:"api_key" json:"auth_type"`
}
-type SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthType string
+func (a AuthenticateViaAPIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AuthenticateViaAPIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AuthenticateViaAPIKey) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *AuthenticateViaAPIKey) GetAuthType() SourceSalesloftUpdateSchemasAuthType {
+ return SourceSalesloftUpdateSchemasAuthTypeAPIKey
+}
+
+type SourceSalesloftUpdateAuthType string
const (
- SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthTypeOauth20 SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthType = "oauth2.0"
+ SourceSalesloftUpdateAuthTypeOauth20 SourceSalesloftUpdateAuthType = "oauth2.0"
)
-func (e SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthType) ToPointer() *SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthType {
+func (e SourceSalesloftUpdateAuthType) ToPointer() *SourceSalesloftUpdateAuthType {
return &e
}
-func (e *SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSalesloftUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthType(v)
+ *e = SourceSalesloftUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSalesloftUpdateAuthType: %v", v)
}
}
-type SourceSalesloftUpdateCredentialsAuthenticateViaOAuth struct {
+type AuthenticateViaOAuth struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType SourceSalesloftUpdateCredentialsAuthenticateViaOAuthAuthType `json:"auth_type"`
+ AccessToken string `json:"access_token"`
+ authType SourceSalesloftUpdateAuthType `const:"oauth2.0" json:"auth_type"`
// The Client ID of your Salesloft developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Salesloft developer application.
@@ -78,56 +100,101 @@ type SourceSalesloftUpdateCredentialsAuthenticateViaOAuth struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (a AuthenticateViaOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *AuthenticateViaOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *AuthenticateViaOAuth) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *AuthenticateViaOAuth) GetAuthType() SourceSalesloftUpdateAuthType {
+ return SourceSalesloftUpdateAuthTypeOauth20
+}
+
+func (o *AuthenticateViaOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *AuthenticateViaOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *AuthenticateViaOAuth) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *AuthenticateViaOAuth) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceSalesloftUpdateCredentialsType string
const (
- SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaOAuth SourceSalesloftUpdateCredentialsType = "source-salesloft-update_Credentials_Authenticate via OAuth"
- SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaAPIKey SourceSalesloftUpdateCredentialsType = "source-salesloft-update_Credentials_Authenticate via API Key"
+ SourceSalesloftUpdateCredentialsTypeAuthenticateViaOAuth SourceSalesloftUpdateCredentialsType = "Authenticate via OAuth"
+ SourceSalesloftUpdateCredentialsTypeAuthenticateViaAPIKey SourceSalesloftUpdateCredentialsType = "Authenticate via API Key"
)
type SourceSalesloftUpdateCredentials struct {
- SourceSalesloftUpdateCredentialsAuthenticateViaOAuth *SourceSalesloftUpdateCredentialsAuthenticateViaOAuth
- SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey *SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey
+ AuthenticateViaOAuth *AuthenticateViaOAuth
+ AuthenticateViaAPIKey *AuthenticateViaAPIKey
Type SourceSalesloftUpdateCredentialsType
}
-func CreateSourceSalesloftUpdateCredentialsSourceSalesloftUpdateCredentialsAuthenticateViaOAuth(sourceSalesloftUpdateCredentialsAuthenticateViaOAuth SourceSalesloftUpdateCredentialsAuthenticateViaOAuth) SourceSalesloftUpdateCredentials {
- typ := SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaOAuth
+func CreateSourceSalesloftUpdateCredentialsAuthenticateViaOAuth(authenticateViaOAuth AuthenticateViaOAuth) SourceSalesloftUpdateCredentials {
+ typ := SourceSalesloftUpdateCredentialsTypeAuthenticateViaOAuth
return SourceSalesloftUpdateCredentials{
- SourceSalesloftUpdateCredentialsAuthenticateViaOAuth: &sourceSalesloftUpdateCredentialsAuthenticateViaOAuth,
- Type: typ,
+ AuthenticateViaOAuth: &authenticateViaOAuth,
+ Type: typ,
}
}
-func CreateSourceSalesloftUpdateCredentialsSourceSalesloftUpdateCredentialsAuthenticateViaAPIKey(sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey) SourceSalesloftUpdateCredentials {
- typ := SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaAPIKey
+func CreateSourceSalesloftUpdateCredentialsAuthenticateViaAPIKey(authenticateViaAPIKey AuthenticateViaAPIKey) SourceSalesloftUpdateCredentials {
+ typ := SourceSalesloftUpdateCredentialsTypeAuthenticateViaAPIKey
return SourceSalesloftUpdateCredentials{
- SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey: &sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey,
- Type: typ,
+ AuthenticateViaAPIKey: &authenticateViaAPIKey,
+ Type: typ,
}
}
func (u *SourceSalesloftUpdateCredentials) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey := new(SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey); err == nil {
- u.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey = sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey
- u.Type = SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaAPIKey
+
+ authenticateViaAPIKey := new(AuthenticateViaAPIKey)
+ if err := utils.UnmarshalJSON(data, &authenticateViaAPIKey, "", true, true); err == nil {
+ u.AuthenticateViaAPIKey = authenticateViaAPIKey
+ u.Type = SourceSalesloftUpdateCredentialsTypeAuthenticateViaAPIKey
return nil
}
- sourceSalesloftUpdateCredentialsAuthenticateViaOAuth := new(SourceSalesloftUpdateCredentialsAuthenticateViaOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSalesloftUpdateCredentialsAuthenticateViaOAuth); err == nil {
- u.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth = sourceSalesloftUpdateCredentialsAuthenticateViaOAuth
- u.Type = SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaOAuth
+ authenticateViaOAuth := new(AuthenticateViaOAuth)
+ if err := utils.UnmarshalJSON(data, &authenticateViaOAuth, "", true, true); err == nil {
+ u.AuthenticateViaOAuth = authenticateViaOAuth
+ u.Type = SourceSalesloftUpdateCredentialsTypeAuthenticateViaOAuth
return nil
}
@@ -135,15 +202,15 @@ func (u *SourceSalesloftUpdateCredentials) UnmarshalJSON(data []byte) error {
}
func (u SourceSalesloftUpdateCredentials) MarshalJSON() ([]byte, error) {
- if u.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey != nil {
- return json.Marshal(u.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey)
+ if u.AuthenticateViaOAuth != nil {
+ return utils.MarshalJSON(u.AuthenticateViaOAuth, "", true)
}
- if u.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth != nil {
- return json.Marshal(u.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth)
+ if u.AuthenticateViaAPIKey != nil {
+ return utils.MarshalJSON(u.AuthenticateViaAPIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceSalesloftUpdate struct {
@@ -151,3 +218,28 @@ type SourceSalesloftUpdate struct {
// The date from which you'd like to replicate data for Salesloft API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceSalesloftUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSalesloftUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSalesloftUpdate) GetCredentials() SourceSalesloftUpdateCredentials {
+ if o == nil {
+ return SourceSalesloftUpdateCredentials{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceSalesloftUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesapfieldglass.go b/internal/sdk/pkg/models/shared/sourcesapfieldglass.go
old mode 100755
new mode 100644
index dc4446bfb..6438572e2
--- a/internal/sdk/pkg/models/shared/sourcesapfieldglass.go
+++ b/internal/sdk/pkg/models/shared/sourcesapfieldglass.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSapFieldglassSapFieldglass string
+type SapFieldglass string
const (
- SourceSapFieldglassSapFieldglassSapFieldglass SourceSapFieldglassSapFieldglass = "sap-fieldglass"
+ SapFieldglassSapFieldglass SapFieldglass = "sap-fieldglass"
)
-func (e SourceSapFieldglassSapFieldglass) ToPointer() *SourceSapFieldglassSapFieldglass {
+func (e SapFieldglass) ToPointer() *SapFieldglass {
return &e
}
-func (e *SourceSapFieldglassSapFieldglass) UnmarshalJSON(data []byte) error {
+func (e *SapFieldglass) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sap-fieldglass":
- *e = SourceSapFieldglassSapFieldglass(v)
+ *e = SapFieldglass(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSapFieldglassSapFieldglass: %v", v)
+ return fmt.Errorf("invalid value for SapFieldglass: %v", v)
}
}
type SourceSapFieldglass struct {
// API Key
- APIKey string `json:"api_key"`
- SourceType SourceSapFieldglassSapFieldglass `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType SapFieldglass `const:"sap-fieldglass" json:"sourceType"`
+}
+
+func (s SourceSapFieldglass) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSapFieldglass) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSapFieldglass) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceSapFieldglass) GetSourceType() SapFieldglass {
+ return SapFieldglassSapFieldglass
}
diff --git a/internal/sdk/pkg/models/shared/sourcesapfieldglasscreaterequest.go b/internal/sdk/pkg/models/shared/sourcesapfieldglasscreaterequest.go
old mode 100755
new mode 100644
index 09f071cea..36d84c30e
--- a/internal/sdk/pkg/models/shared/sourcesapfieldglasscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesapfieldglasscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSapFieldglassCreateRequest struct {
Configuration SourceSapFieldglass `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSapFieldglassCreateRequest) GetConfiguration() SourceSapFieldglass {
+ if o == nil {
+ return SourceSapFieldglass{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSapFieldglassCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSapFieldglassCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSapFieldglassCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSapFieldglassCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesapfieldglassputrequest.go b/internal/sdk/pkg/models/shared/sourcesapfieldglassputrequest.go
old mode 100755
new mode 100644
index db746c23d..7c48f766b
--- a/internal/sdk/pkg/models/shared/sourcesapfieldglassputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesapfieldglassputrequest.go
@@ -7,3 +7,24 @@ type SourceSapFieldglassPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSapFieldglassPutRequest) GetConfiguration() SourceSapFieldglassUpdate {
+ if o == nil {
+ return SourceSapFieldglassUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSapFieldglassPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSapFieldglassPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesapfieldglassupdate.go b/internal/sdk/pkg/models/shared/sourcesapfieldglassupdate.go
old mode 100755
new mode 100644
index 4e346c87a..35c15b770
--- a/internal/sdk/pkg/models/shared/sourcesapfieldglassupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesapfieldglassupdate.go
@@ -6,3 +6,10 @@ type SourceSapFieldglassUpdate struct {
// API Key
APIKey string `json:"api_key"`
}
+
+func (o *SourceSapFieldglassUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesecoda.go b/internal/sdk/pkg/models/shared/sourcesecoda.go
old mode 100755
new mode 100644
index b8dfa49f4..62ae2ee6f
--- a/internal/sdk/pkg/models/shared/sourcesecoda.go
+++ b/internal/sdk/pkg/models/shared/sourcesecoda.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSecodaSecoda string
+type Secoda string
const (
- SourceSecodaSecodaSecoda SourceSecodaSecoda = "secoda"
+ SecodaSecoda Secoda = "secoda"
)
-func (e SourceSecodaSecoda) ToPointer() *SourceSecodaSecoda {
+func (e Secoda) ToPointer() *Secoda {
return &e
}
-func (e *SourceSecodaSecoda) UnmarshalJSON(data []byte) error {
+func (e *Secoda) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "secoda":
- *e = SourceSecodaSecoda(v)
+ *e = Secoda(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSecodaSecoda: %v", v)
+ return fmt.Errorf("invalid value for Secoda: %v", v)
}
}
type SourceSecoda struct {
// Your API Access Key. See here. The key is case sensitive.
- APIKey string `json:"api_key"`
- SourceType SourceSecodaSecoda `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Secoda `const:"secoda" json:"sourceType"`
+}
+
+func (s SourceSecoda) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSecoda) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSecoda) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceSecoda) GetSourceType() Secoda {
+ return SecodaSecoda
}
diff --git a/internal/sdk/pkg/models/shared/sourcesecodacreaterequest.go b/internal/sdk/pkg/models/shared/sourcesecodacreaterequest.go
old mode 100755
new mode 100644
index ddfa2d065..8338c2adf
--- a/internal/sdk/pkg/models/shared/sourcesecodacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesecodacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSecodaCreateRequest struct {
Configuration SourceSecoda `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSecodaCreateRequest) GetConfiguration() SourceSecoda {
+ if o == nil {
+ return SourceSecoda{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSecodaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSecodaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSecodaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSecodaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesecodaputrequest.go b/internal/sdk/pkg/models/shared/sourcesecodaputrequest.go
old mode 100755
new mode 100644
index 1475c0534..527d3c881
--- a/internal/sdk/pkg/models/shared/sourcesecodaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesecodaputrequest.go
@@ -7,3 +7,24 @@ type SourceSecodaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSecodaPutRequest) GetConfiguration() SourceSecodaUpdate {
+ if o == nil {
+ return SourceSecodaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSecodaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSecodaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesecodaupdate.go b/internal/sdk/pkg/models/shared/sourcesecodaupdate.go
old mode 100755
new mode 100644
index 67cbea04f..35b904026
--- a/internal/sdk/pkg/models/shared/sourcesecodaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesecodaupdate.go
@@ -6,3 +6,10 @@ type SourceSecodaUpdate struct {
// Your API Access Key. See here. The key is case sensitive.
APIKey string `json:"api_key"`
}
+
+func (o *SourceSecodaUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesendgrid.go b/internal/sdk/pkg/models/shared/sourcesendgrid.go
old mode 100755
new mode 100644
index 3850e9e0f..46e661c22
--- a/internal/sdk/pkg/models/shared/sourcesendgrid.go
+++ b/internal/sdk/pkg/models/shared/sourcesendgrid.go
@@ -5,37 +5,67 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSendgridSendgrid string
+type Sendgrid string
const (
- SourceSendgridSendgridSendgrid SourceSendgridSendgrid = "sendgrid"
+ SendgridSendgrid Sendgrid = "sendgrid"
)
-func (e SourceSendgridSendgrid) ToPointer() *SourceSendgridSendgrid {
+func (e Sendgrid) ToPointer() *Sendgrid {
return &e
}
-func (e *SourceSendgridSendgrid) UnmarshalJSON(data []byte) error {
+func (e *Sendgrid) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sendgrid":
- *e = SourceSendgridSendgrid(v)
+ *e = Sendgrid(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSendgridSendgrid: %v", v)
+ return fmt.Errorf("invalid value for Sendgrid: %v", v)
}
}
type SourceSendgrid struct {
// API Key, use admin to generate this key.
- Apikey string `json:"apikey"`
- SourceType SourceSendgridSendgrid `json:"sourceType"`
+ Apikey string `json:"apikey"`
+ sourceType Sendgrid `const:"sendgrid" json:"sourceType"`
// Start time in ISO8601 format. Any data before this time point will not be replicated.
StartTime *time.Time `json:"start_time,omitempty"`
}
+
+func (s SourceSendgrid) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSendgrid) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSendgrid) GetApikey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Apikey
+}
+
+func (o *SourceSendgrid) GetSourceType() Sendgrid {
+ return SendgridSendgrid
+}
+
+func (o *SourceSendgrid) GetStartTime() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartTime
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesendgridcreaterequest.go b/internal/sdk/pkg/models/shared/sourcesendgridcreaterequest.go
old mode 100755
new mode 100644
index a411466c0..6d115f7e5
--- a/internal/sdk/pkg/models/shared/sourcesendgridcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesendgridcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSendgridCreateRequest struct {
Configuration SourceSendgrid `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSendgridCreateRequest) GetConfiguration() SourceSendgrid {
+ if o == nil {
+ return SourceSendgrid{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSendgridCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSendgridCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSendgridCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSendgridCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesendgridputrequest.go b/internal/sdk/pkg/models/shared/sourcesendgridputrequest.go
old mode 100755
new mode 100644
index 376d5f4b8..0a5d90375
--- a/internal/sdk/pkg/models/shared/sourcesendgridputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesendgridputrequest.go
@@ -7,3 +7,24 @@ type SourceSendgridPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSendgridPutRequest) GetConfiguration() SourceSendgridUpdate {
+ if o == nil {
+ return SourceSendgridUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSendgridPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSendgridPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesendgridupdate.go b/internal/sdk/pkg/models/shared/sourcesendgridupdate.go
old mode 100755
new mode 100644
index 148718b55..4905b0f7e
--- a/internal/sdk/pkg/models/shared/sourcesendgridupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesendgridupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -12,3 +13,28 @@ type SourceSendgridUpdate struct {
// Start time in ISO8601 format. Any data before this time point will not be replicated.
StartTime *time.Time `json:"start_time,omitempty"`
}
+
+func (s SourceSendgridUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSendgridUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSendgridUpdate) GetApikey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Apikey
+}
+
+func (o *SourceSendgridUpdate) GetStartTime() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartTime
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesendinblue.go b/internal/sdk/pkg/models/shared/sourcesendinblue.go
old mode 100755
new mode 100644
index 8ffcf5871..99288a574
--- a/internal/sdk/pkg/models/shared/sourcesendinblue.go
+++ b/internal/sdk/pkg/models/shared/sourcesendinblue.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSendinblueSendinblue string
+type Sendinblue string
const (
- SourceSendinblueSendinblueSendinblue SourceSendinblueSendinblue = "sendinblue"
+ SendinblueSendinblue Sendinblue = "sendinblue"
)
-func (e SourceSendinblueSendinblue) ToPointer() *SourceSendinblueSendinblue {
+func (e Sendinblue) ToPointer() *Sendinblue {
return &e
}
-func (e *SourceSendinblueSendinblue) UnmarshalJSON(data []byte) error {
+func (e *Sendinblue) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sendinblue":
- *e = SourceSendinblueSendinblue(v)
+ *e = Sendinblue(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSendinblueSendinblue: %v", v)
+ return fmt.Errorf("invalid value for Sendinblue: %v", v)
}
}
type SourceSendinblue struct {
// Your API Key. See here.
- APIKey string `json:"api_key"`
- SourceType SourceSendinblueSendinblue `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Sendinblue `const:"sendinblue" json:"sourceType"`
+}
+
+func (s SourceSendinblue) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSendinblue) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSendinblue) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceSendinblue) GetSourceType() Sendinblue {
+ return SendinblueSendinblue
}
diff --git a/internal/sdk/pkg/models/shared/sourcesendinbluecreaterequest.go b/internal/sdk/pkg/models/shared/sourcesendinbluecreaterequest.go
old mode 100755
new mode 100644
index ece74b25d..5be7dfb13
--- a/internal/sdk/pkg/models/shared/sourcesendinbluecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesendinbluecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSendinblueCreateRequest struct {
Configuration SourceSendinblue `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSendinblueCreateRequest) GetConfiguration() SourceSendinblue {
+ if o == nil {
+ return SourceSendinblue{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSendinblueCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSendinblueCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSendinblueCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSendinblueCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesendinblueputrequest.go b/internal/sdk/pkg/models/shared/sourcesendinblueputrequest.go
old mode 100755
new mode 100644
index 683a60e0b..10c12283d
--- a/internal/sdk/pkg/models/shared/sourcesendinblueputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesendinblueputrequest.go
@@ -7,3 +7,24 @@ type SourceSendinbluePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSendinbluePutRequest) GetConfiguration() SourceSendinblueUpdate {
+ if o == nil {
+ return SourceSendinblueUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSendinbluePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSendinbluePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesendinblueupdate.go b/internal/sdk/pkg/models/shared/sourcesendinblueupdate.go
old mode 100755
new mode 100644
index c0449a8d3..a985d5733
--- a/internal/sdk/pkg/models/shared/sourcesendinblueupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesendinblueupdate.go
@@ -6,3 +6,10 @@ type SourceSendinblueUpdate struct {
// Your API Key. See here.
APIKey string `json:"api_key"`
}
+
+func (o *SourceSendinblueUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesenseforce.go b/internal/sdk/pkg/models/shared/sourcesenseforce.go
old mode 100755
new mode 100644
index 25cc1a02a..bbe0124c3
--- a/internal/sdk/pkg/models/shared/sourcesenseforce.go
+++ b/internal/sdk/pkg/models/shared/sourcesenseforce.go
@@ -3,32 +3,33 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSenseforceSenseforce string
+type Senseforce string
const (
- SourceSenseforceSenseforceSenseforce SourceSenseforceSenseforce = "senseforce"
+ SenseforceSenseforce Senseforce = "senseforce"
)
-func (e SourceSenseforceSenseforce) ToPointer() *SourceSenseforceSenseforce {
+func (e Senseforce) ToPointer() *Senseforce {
return &e
}
-func (e *SourceSenseforceSenseforce) UnmarshalJSON(data []byte) error {
+func (e *Senseforce) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "senseforce":
- *e = SourceSenseforceSenseforce(v)
+ *e = Senseforce(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSenseforceSenseforce: %v", v)
+ return fmt.Errorf("invalid value for Senseforce: %v", v)
}
}
@@ -40,8 +41,58 @@ type SourceSenseforce struct {
// The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source).
DatasetID string `json:"dataset_id"`
// The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more.
- SliceRange *int64 `json:"slice_range,omitempty"`
- SourceType SourceSenseforceSenseforce `json:"sourceType"`
+ SliceRange *int64 `default:"10" json:"slice_range"`
+ sourceType Senseforce `const:"senseforce" json:"sourceType"`
// UTC date and time in the format 2017-01-25. Only data with "Timestamp" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceSenseforce) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSenseforce) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSenseforce) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSenseforce) GetBackendURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BackendURL
+}
+
+func (o *SourceSenseforce) GetDatasetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatasetID
+}
+
+func (o *SourceSenseforce) GetSliceRange() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SliceRange
+}
+
+func (o *SourceSenseforce) GetSourceType() Senseforce {
+ return SenseforceSenseforce
+}
+
+func (o *SourceSenseforce) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesenseforcecreaterequest.go b/internal/sdk/pkg/models/shared/sourcesenseforcecreaterequest.go
old mode 100755
new mode 100644
index ac6d2b74d..13f741194
--- a/internal/sdk/pkg/models/shared/sourcesenseforcecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesenseforcecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSenseforceCreateRequest struct {
Configuration SourceSenseforce `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSenseforceCreateRequest) GetConfiguration() SourceSenseforce {
+ if o == nil {
+ return SourceSenseforce{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSenseforceCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSenseforceCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSenseforceCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSenseforceCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesenseforceputrequest.go b/internal/sdk/pkg/models/shared/sourcesenseforceputrequest.go
old mode 100755
new mode 100644
index a49703eb2..b245d8be5
--- a/internal/sdk/pkg/models/shared/sourcesenseforceputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesenseforceputrequest.go
@@ -7,3 +7,24 @@ type SourceSenseforcePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSenseforcePutRequest) GetConfiguration() SourceSenseforceUpdate {
+ if o == nil {
+ return SourceSenseforceUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSenseforcePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSenseforcePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesenseforceupdate.go b/internal/sdk/pkg/models/shared/sourcesenseforceupdate.go
old mode 100755
new mode 100644
index 7eea8f113..58ad5472b
--- a/internal/sdk/pkg/models/shared/sourcesenseforceupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesenseforceupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceSenseforceUpdate struct {
@@ -14,7 +15,53 @@ type SourceSenseforceUpdate struct {
// The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source).
DatasetID string `json:"dataset_id"`
// The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more.
- SliceRange *int64 `json:"slice_range,omitempty"`
+ SliceRange *int64 `default:"10" json:"slice_range"`
// UTC date and time in the format 2017-01-25. Only data with "Timestamp" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceSenseforceUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSenseforceUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSenseforceUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSenseforceUpdate) GetBackendURL() string {
+ if o == nil {
+ return ""
+ }
+ return o.BackendURL
+}
+
+func (o *SourceSenseforceUpdate) GetDatasetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DatasetID
+}
+
+func (o *SourceSenseforceUpdate) GetSliceRange() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SliceRange
+}
+
+func (o *SourceSenseforceUpdate) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesentry.go b/internal/sdk/pkg/models/shared/sourcesentry.go
old mode 100755
new mode 100644
index 5bebe4d27..49c7aca87
--- a/internal/sdk/pkg/models/shared/sourcesentry.go
+++ b/internal/sdk/pkg/models/shared/sourcesentry.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSentrySentry string
+type Sentry string
const (
- SourceSentrySentrySentry SourceSentrySentry = "sentry"
+ SentrySentry Sentry = "sentry"
)
-func (e SourceSentrySentry) ToPointer() *SourceSentrySentry {
+func (e Sentry) ToPointer() *Sentry {
return &e
}
-func (e *SourceSentrySentry) UnmarshalJSON(data []byte) error {
+func (e *Sentry) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sentry":
- *e = SourceSentrySentry(v)
+ *e = Sentry(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSentrySentry: %v", v)
+ return fmt.Errorf("invalid value for Sentry: %v", v)
}
}
@@ -37,10 +38,60 @@ type SourceSentry struct {
// Fields to retrieve when fetching discover events
DiscoverFields []interface{} `json:"discover_fields,omitempty"`
// Host name of Sentry API server.For self-hosted, specify your host name here. Otherwise, leave it empty.
- Hostname *string `json:"hostname,omitempty"`
+ Hostname *string `default:"sentry.io" json:"hostname"`
// The slug of the organization the groups belong to.
Organization string `json:"organization"`
// The name (slug) of the Project you want to sync.
- Project string `json:"project"`
- SourceType SourceSentrySentry `json:"sourceType"`
+ Project string `json:"project"`
+ sourceType Sentry `const:"sentry" json:"sourceType"`
+}
+
+func (s SourceSentry) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSentry) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSentry) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
+
+func (o *SourceSentry) GetDiscoverFields() []interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.DiscoverFields
+}
+
+func (o *SourceSentry) GetHostname() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Hostname
+}
+
+func (o *SourceSentry) GetOrganization() string {
+ if o == nil {
+ return ""
+ }
+ return o.Organization
+}
+
+func (o *SourceSentry) GetProject() string {
+ if o == nil {
+ return ""
+ }
+ return o.Project
+}
+
+func (o *SourceSentry) GetSourceType() Sentry {
+ return SentrySentry
}
diff --git a/internal/sdk/pkg/models/shared/sourcesentrycreaterequest.go b/internal/sdk/pkg/models/shared/sourcesentrycreaterequest.go
old mode 100755
new mode 100644
index 10a21ced8..2400da507
--- a/internal/sdk/pkg/models/shared/sourcesentrycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesentrycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSentryCreateRequest struct {
Configuration SourceSentry `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSentryCreateRequest) GetConfiguration() SourceSentry {
+ if o == nil {
+ return SourceSentry{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSentryCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSentryCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSentryCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSentryCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesentryputrequest.go b/internal/sdk/pkg/models/shared/sourcesentryputrequest.go
old mode 100755
new mode 100644
index 957945e98..377d44ad5
--- a/internal/sdk/pkg/models/shared/sourcesentryputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesentryputrequest.go
@@ -7,3 +7,24 @@ type SourceSentryPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSentryPutRequest) GetConfiguration() SourceSentryUpdate {
+ if o == nil {
+ return SourceSentryUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSentryPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSentryPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesentryupdate.go b/internal/sdk/pkg/models/shared/sourcesentryupdate.go
old mode 100755
new mode 100644
index 4d91fb634..a2c088343
--- a/internal/sdk/pkg/models/shared/sourcesentryupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesentryupdate.go
@@ -2,15 +2,65 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
type SourceSentryUpdate struct {
// Log into Sentry and then create authentication tokens.For self-hosted, you can find or create authentication tokens by visiting "{instance_url_prefix}/settings/account/api/auth-tokens/"
AuthToken string `json:"auth_token"`
// Fields to retrieve when fetching discover events
DiscoverFields []interface{} `json:"discover_fields,omitempty"`
// Host name of Sentry API server.For self-hosted, specify your host name here. Otherwise, leave it empty.
- Hostname *string `json:"hostname,omitempty"`
+ Hostname *string `default:"sentry.io" json:"hostname"`
// The slug of the organization the groups belong to.
Organization string `json:"organization"`
// The name (slug) of the Project you want to sync.
Project string `json:"project"`
}
+
+func (s SourceSentryUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSentryUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSentryUpdate) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
+
+func (o *SourceSentryUpdate) GetDiscoverFields() []interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.DiscoverFields
+}
+
+func (o *SourceSentryUpdate) GetHostname() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Hostname
+}
+
+func (o *SourceSentryUpdate) GetOrganization() string {
+ if o == nil {
+ return ""
+ }
+ return o.Organization
+}
+
+func (o *SourceSentryUpdate) GetProject() string {
+ if o == nil {
+ return ""
+ }
+ return o.Project
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesftp.go b/internal/sdk/pkg/models/shared/sourcesftp.go
old mode 100755
new mode 100644
index a29715945..a6e9b3d58
--- a/internal/sdk/pkg/models/shared/sourcesftp.go
+++ b/internal/sdk/pkg/models/shared/sourcesftp.go
@@ -3,128 +3,167 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod - Connect through ssh key
-type SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod string
+// SourceSftpSchemasAuthMethod - Connect through ssh key
+type SourceSftpSchemasAuthMethod string
const (
- SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethodSSHKeyAuth SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod = "SSH_KEY_AUTH"
+ SourceSftpSchemasAuthMethodSSHKeyAuth SourceSftpSchemasAuthMethod = "SSH_KEY_AUTH"
)
-func (e SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod) ToPointer() *SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod {
+func (e SourceSftpSchemasAuthMethod) ToPointer() *SourceSftpSchemasAuthMethod {
return &e
}
-func (e *SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceSftpSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod(v)
+ *e = SourceSftpSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceSftpSchemasAuthMethod: %v", v)
}
}
-// SourceSftpAuthenticationWildcardSSHKeyAuthentication - The server authentication method
-type SourceSftpAuthenticationWildcardSSHKeyAuthentication struct {
+// SourceSftpSSHKeyAuthentication - The server authentication method
+type SourceSftpSSHKeyAuthentication struct {
// Connect through ssh key
- AuthMethod SourceSftpAuthenticationWildcardSSHKeyAuthenticationAuthMethod `json:"auth_method"`
+ authMethod SourceSftpSchemasAuthMethod `const:"SSH_KEY_AUTH" json:"auth_method"`
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
AuthSSHKey string `json:"auth_ssh_key"`
}
-// SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod - Connect through password authentication
-type SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod string
+func (s SourceSftpSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSftpSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSftpSSHKeyAuthentication) GetAuthMethod() SourceSftpSchemasAuthMethod {
+ return SourceSftpSchemasAuthMethodSSHKeyAuth
+}
+
+func (o *SourceSftpSSHKeyAuthentication) GetAuthSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthSSHKey
+}
+
+// SourceSftpAuthMethod - Connect through password authentication
+type SourceSftpAuthMethod string
const (
- SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethodSSHPasswordAuth SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod = "SSH_PASSWORD_AUTH"
+ SourceSftpAuthMethodSSHPasswordAuth SourceSftpAuthMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod) ToPointer() *SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod {
+func (e SourceSftpAuthMethod) ToPointer() *SourceSftpAuthMethod {
return &e
}
-func (e *SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceSftpAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod(v)
+ *e = SourceSftpAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceSftpAuthMethod: %v", v)
}
}
-// SourceSftpAuthenticationWildcardPasswordAuthentication - The server authentication method
-type SourceSftpAuthenticationWildcardPasswordAuthentication struct {
+// SourceSftpPasswordAuthentication - The server authentication method
+type SourceSftpPasswordAuthentication struct {
// Connect through password authentication
- AuthMethod SourceSftpAuthenticationWildcardPasswordAuthenticationAuthMethod `json:"auth_method"`
+ authMethod SourceSftpAuthMethod `const:"SSH_PASSWORD_AUTH" json:"auth_method"`
// OS-level password for logging into the jump server host
AuthUserPassword string `json:"auth_user_password"`
}
+func (s SourceSftpPasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSftpPasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSftpPasswordAuthentication) GetAuthMethod() SourceSftpAuthMethod {
+ return SourceSftpAuthMethodSSHPasswordAuth
+}
+
+func (o *SourceSftpPasswordAuthentication) GetAuthUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthUserPassword
+}
+
type SourceSftpAuthenticationWildcardType string
const (
- SourceSftpAuthenticationWildcardTypeSourceSftpAuthenticationWildcardPasswordAuthentication SourceSftpAuthenticationWildcardType = "source-sftp_Authentication *_Password Authentication"
- SourceSftpAuthenticationWildcardTypeSourceSftpAuthenticationWildcardSSHKeyAuthentication SourceSftpAuthenticationWildcardType = "source-sftp_Authentication *_SSH Key Authentication"
+ SourceSftpAuthenticationWildcardTypeSourceSftpPasswordAuthentication SourceSftpAuthenticationWildcardType = "source-sftp_Password Authentication"
+ SourceSftpAuthenticationWildcardTypeSourceSftpSSHKeyAuthentication SourceSftpAuthenticationWildcardType = "source-sftp_SSH Key Authentication"
)
type SourceSftpAuthenticationWildcard struct {
- SourceSftpAuthenticationWildcardPasswordAuthentication *SourceSftpAuthenticationWildcardPasswordAuthentication
- SourceSftpAuthenticationWildcardSSHKeyAuthentication *SourceSftpAuthenticationWildcardSSHKeyAuthentication
+ SourceSftpPasswordAuthentication *SourceSftpPasswordAuthentication
+ SourceSftpSSHKeyAuthentication *SourceSftpSSHKeyAuthentication
Type SourceSftpAuthenticationWildcardType
}
-func CreateSourceSftpAuthenticationWildcardSourceSftpAuthenticationWildcardPasswordAuthentication(sourceSftpAuthenticationWildcardPasswordAuthentication SourceSftpAuthenticationWildcardPasswordAuthentication) SourceSftpAuthenticationWildcard {
- typ := SourceSftpAuthenticationWildcardTypeSourceSftpAuthenticationWildcardPasswordAuthentication
+func CreateSourceSftpAuthenticationWildcardSourceSftpPasswordAuthentication(sourceSftpPasswordAuthentication SourceSftpPasswordAuthentication) SourceSftpAuthenticationWildcard {
+ typ := SourceSftpAuthenticationWildcardTypeSourceSftpPasswordAuthentication
return SourceSftpAuthenticationWildcard{
- SourceSftpAuthenticationWildcardPasswordAuthentication: &sourceSftpAuthenticationWildcardPasswordAuthentication,
- Type: typ,
+ SourceSftpPasswordAuthentication: &sourceSftpPasswordAuthentication,
+ Type: typ,
}
}
-func CreateSourceSftpAuthenticationWildcardSourceSftpAuthenticationWildcardSSHKeyAuthentication(sourceSftpAuthenticationWildcardSSHKeyAuthentication SourceSftpAuthenticationWildcardSSHKeyAuthentication) SourceSftpAuthenticationWildcard {
- typ := SourceSftpAuthenticationWildcardTypeSourceSftpAuthenticationWildcardSSHKeyAuthentication
+func CreateSourceSftpAuthenticationWildcardSourceSftpSSHKeyAuthentication(sourceSftpSSHKeyAuthentication SourceSftpSSHKeyAuthentication) SourceSftpAuthenticationWildcard {
+ typ := SourceSftpAuthenticationWildcardTypeSourceSftpSSHKeyAuthentication
return SourceSftpAuthenticationWildcard{
- SourceSftpAuthenticationWildcardSSHKeyAuthentication: &sourceSftpAuthenticationWildcardSSHKeyAuthentication,
- Type: typ,
+ SourceSftpSSHKeyAuthentication: &sourceSftpSSHKeyAuthentication,
+ Type: typ,
}
}
func (u *SourceSftpAuthenticationWildcard) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSftpAuthenticationWildcardPasswordAuthentication := new(SourceSftpAuthenticationWildcardPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSftpAuthenticationWildcardPasswordAuthentication); err == nil {
- u.SourceSftpAuthenticationWildcardPasswordAuthentication = sourceSftpAuthenticationWildcardPasswordAuthentication
- u.Type = SourceSftpAuthenticationWildcardTypeSourceSftpAuthenticationWildcardPasswordAuthentication
+
+ sourceSftpPasswordAuthentication := new(SourceSftpPasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceSftpPasswordAuthentication, "", true, true); err == nil {
+ u.SourceSftpPasswordAuthentication = sourceSftpPasswordAuthentication
+ u.Type = SourceSftpAuthenticationWildcardTypeSourceSftpPasswordAuthentication
return nil
}
- sourceSftpAuthenticationWildcardSSHKeyAuthentication := new(SourceSftpAuthenticationWildcardSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSftpAuthenticationWildcardSSHKeyAuthentication); err == nil {
- u.SourceSftpAuthenticationWildcardSSHKeyAuthentication = sourceSftpAuthenticationWildcardSSHKeyAuthentication
- u.Type = SourceSftpAuthenticationWildcardTypeSourceSftpAuthenticationWildcardSSHKeyAuthentication
+ sourceSftpSSHKeyAuthentication := new(SourceSftpSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceSftpSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceSftpSSHKeyAuthentication = sourceSftpSSHKeyAuthentication
+ u.Type = SourceSftpAuthenticationWildcardTypeSourceSftpSSHKeyAuthentication
return nil
}
@@ -132,38 +171,38 @@ func (u *SourceSftpAuthenticationWildcard) UnmarshalJSON(data []byte) error {
}
func (u SourceSftpAuthenticationWildcard) MarshalJSON() ([]byte, error) {
- if u.SourceSftpAuthenticationWildcardPasswordAuthentication != nil {
- return json.Marshal(u.SourceSftpAuthenticationWildcardPasswordAuthentication)
+ if u.SourceSftpPasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceSftpPasswordAuthentication, "", true)
}
- if u.SourceSftpAuthenticationWildcardSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceSftpAuthenticationWildcardSSHKeyAuthentication)
+ if u.SourceSftpSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceSftpSSHKeyAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceSftpSftp string
+type Sftp string
const (
- SourceSftpSftpSftp SourceSftpSftp = "sftp"
+ SftpSftp Sftp = "sftp"
)
-func (e SourceSftpSftp) ToPointer() *SourceSftpSftp {
+func (e Sftp) ToPointer() *Sftp {
return &e
}
-func (e *SourceSftpSftp) UnmarshalJSON(data []byte) error {
+func (e *Sftp) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sftp":
- *e = SourceSftpSftp(v)
+ *e = Sftp(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSftpSftp: %v", v)
+ return fmt.Errorf("invalid value for Sftp: %v", v)
}
}
@@ -171,16 +210,80 @@ type SourceSftp struct {
// The server authentication method
Credentials *SourceSftpAuthenticationWildcard `json:"credentials,omitempty"`
// The regular expression to specify files for sync in a chosen Folder Path
- FilePattern *string `json:"file_pattern,omitempty"`
+ FilePattern *string `default:"" json:"file_pattern"`
// Coma separated file types. Currently only 'csv' and 'json' types are supported.
- FileTypes *string `json:"file_types,omitempty"`
+ FileTypes *string `default:"csv,json" json:"file_types"`
// The directory to search files for sync
- FolderPath *string `json:"folder_path,omitempty"`
+ FolderPath *string `default:"" json:"folder_path"`
// The server host address
Host string `json:"host"`
// The server port
- Port int64 `json:"port"`
- SourceType SourceSftpSftp `json:"sourceType"`
+ Port *int64 `default:"22" json:"port"`
+ sourceType Sftp `const:"sftp" json:"sourceType"`
// The server user
User string `json:"user"`
}
+
+func (s SourceSftp) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSftp) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSftp) GetCredentials() *SourceSftpAuthenticationWildcard {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSftp) GetFilePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FilePattern
+}
+
+func (o *SourceSftp) GetFileTypes() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileTypes
+}
+
+func (o *SourceSftp) GetFolderPath() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FolderPath
+}
+
+func (o *SourceSftp) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceSftp) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceSftp) GetSourceType() Sftp {
+ return SftpSftp
+}
+
+func (o *SourceSftp) GetUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.User
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesftpbulk.go b/internal/sdk/pkg/models/shared/sourcesftpbulk.go
old mode 100755
new mode 100644
index 232d9ac36..3bde3e1de
--- a/internal/sdk/pkg/models/shared/sourcesftpbulk.go
+++ b/internal/sdk/pkg/models/shared/sourcesftpbulk.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -36,50 +37,50 @@ func (e *SourceSftpBulkFileType) UnmarshalJSON(data []byte) error {
}
}
-type SourceSftpBulkSftpBulk string
+type SftpBulk string
const (
- SourceSftpBulkSftpBulkSftpBulk SourceSftpBulkSftpBulk = "sftp-bulk"
+ SftpBulkSftpBulk SftpBulk = "sftp-bulk"
)
-func (e SourceSftpBulkSftpBulk) ToPointer() *SourceSftpBulkSftpBulk {
+func (e SftpBulk) ToPointer() *SftpBulk {
return &e
}
-func (e *SourceSftpBulkSftpBulk) UnmarshalJSON(data []byte) error {
+func (e *SftpBulk) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sftp-bulk":
- *e = SourceSftpBulkSftpBulk(v)
+ *e = SftpBulk(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSftpBulkSftpBulk: %v", v)
+ return fmt.Errorf("invalid value for SftpBulk: %v", v)
}
}
type SourceSftpBulk struct {
// Sync only the most recent file for the configured folder path and file pattern
- FileMostRecent *bool `json:"file_most_recent,omitempty"`
+ FileMostRecent *bool `default:"false" json:"file_most_recent"`
// The regular expression to specify files for sync in a chosen Folder Path
- FilePattern *string `json:"file_pattern,omitempty"`
+ FilePattern *string `default:"" json:"file_pattern"`
// The file type you want to sync. Currently only 'csv' and 'json' files are supported.
- FileType *SourceSftpBulkFileType `json:"file_type,omitempty"`
+ FileType *SourceSftpBulkFileType `default:"csv" json:"file_type"`
// The directory to search files for sync
- FolderPath string `json:"folder_path"`
+ FolderPath *string `default:"" json:"folder_path"`
// The server host address
Host string `json:"host"`
// OS-level password for logging into the jump server host
Password *string `json:"password,omitempty"`
// The server port
- Port int64 `json:"port"`
+ Port *int64 `default:"22" json:"port"`
// The private key
PrivateKey *string `json:"private_key,omitempty"`
// The separator used in the CSV files. Define None if you want to use the Sniffer functionality
- Separator *string `json:"separator,omitempty"`
- SourceType SourceSftpBulkSftpBulk `json:"sourceType"`
+ Separator *string `default:"," json:"separator"`
+ sourceType SftpBulk `const:"sftp-bulk" json:"sourceType"`
// The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate time.Time `json:"start_date"`
// The name of the stream or table you want to create
@@ -87,3 +88,102 @@ type SourceSftpBulk struct {
// The server user
Username string `json:"username"`
}
+
+func (s SourceSftpBulk) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSftpBulk) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSftpBulk) GetFileMostRecent() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.FileMostRecent
+}
+
+func (o *SourceSftpBulk) GetFilePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FilePattern
+}
+
+func (o *SourceSftpBulk) GetFileType() *SourceSftpBulkFileType {
+ if o == nil {
+ return nil
+ }
+ return o.FileType
+}
+
+func (o *SourceSftpBulk) GetFolderPath() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FolderPath
+}
+
+func (o *SourceSftpBulk) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceSftpBulk) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceSftpBulk) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceSftpBulk) GetPrivateKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrivateKey
+}
+
+func (o *SourceSftpBulk) GetSeparator() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Separator
+}
+
+func (o *SourceSftpBulk) GetSourceType() SftpBulk {
+ return SftpBulkSftpBulk
+}
+
+func (o *SourceSftpBulk) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceSftpBulk) GetStreamName() string {
+ if o == nil {
+ return ""
+ }
+ return o.StreamName
+}
+
+func (o *SourceSftpBulk) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesftpbulkcreaterequest.go b/internal/sdk/pkg/models/shared/sourcesftpbulkcreaterequest.go
old mode 100755
new mode 100644
index a9d546aeb..ed8b0805e
--- a/internal/sdk/pkg/models/shared/sourcesftpbulkcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesftpbulkcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSftpBulkCreateRequest struct {
Configuration SourceSftpBulk `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSftpBulkCreateRequest) GetConfiguration() SourceSftpBulk {
+ if o == nil {
+ return SourceSftpBulk{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSftpBulkCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSftpBulkCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSftpBulkCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSftpBulkCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesftpbulkputrequest.go b/internal/sdk/pkg/models/shared/sourcesftpbulkputrequest.go
old mode 100755
new mode 100644
index a2b319622..a5885b029
--- a/internal/sdk/pkg/models/shared/sourcesftpbulkputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesftpbulkputrequest.go
@@ -7,3 +7,24 @@ type SourceSftpBulkPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSftpBulkPutRequest) GetConfiguration() SourceSftpBulkUpdate {
+ if o == nil {
+ return SourceSftpBulkUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSftpBulkPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSftpBulkPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesftpbulkupdate.go b/internal/sdk/pkg/models/shared/sourcesftpbulkupdate.go
old mode 100755
new mode 100644
index f8c6fd403..3d190e2f9
--- a/internal/sdk/pkg/models/shared/sourcesftpbulkupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesftpbulkupdate.go
@@ -5,22 +5,23 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-// SourceSftpBulkUpdateFileType - The file type you want to sync. Currently only 'csv' and 'json' files are supported.
-type SourceSftpBulkUpdateFileType string
+// FileType - The file type you want to sync. Currently only 'csv' and 'json' files are supported.
+type FileType string
const (
- SourceSftpBulkUpdateFileTypeCsv SourceSftpBulkUpdateFileType = "csv"
- SourceSftpBulkUpdateFileTypeJSON SourceSftpBulkUpdateFileType = "json"
+ FileTypeCsv FileType = "csv"
+ FileTypeJSON FileType = "json"
)
-func (e SourceSftpBulkUpdateFileType) ToPointer() *SourceSftpBulkUpdateFileType {
+func (e FileType) ToPointer() *FileType {
return &e
}
-func (e *SourceSftpBulkUpdateFileType) UnmarshalJSON(data []byte) error {
+func (e *FileType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -29,32 +30,32 @@ func (e *SourceSftpBulkUpdateFileType) UnmarshalJSON(data []byte) error {
case "csv":
fallthrough
case "json":
- *e = SourceSftpBulkUpdateFileType(v)
+ *e = FileType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSftpBulkUpdateFileType: %v", v)
+ return fmt.Errorf("invalid value for FileType: %v", v)
}
}
type SourceSftpBulkUpdate struct {
// Sync only the most recent file for the configured folder path and file pattern
- FileMostRecent *bool `json:"file_most_recent,omitempty"`
+ FileMostRecent *bool `default:"false" json:"file_most_recent"`
// The regular expression to specify files for sync in a chosen Folder Path
- FilePattern *string `json:"file_pattern,omitempty"`
+ FilePattern *string `default:"" json:"file_pattern"`
// The file type you want to sync. Currently only 'csv' and 'json' files are supported.
- FileType *SourceSftpBulkUpdateFileType `json:"file_type,omitempty"`
+ FileType *FileType `default:"csv" json:"file_type"`
// The directory to search files for sync
- FolderPath string `json:"folder_path"`
+ FolderPath *string `default:"" json:"folder_path"`
// The server host address
Host string `json:"host"`
// OS-level password for logging into the jump server host
Password *string `json:"password,omitempty"`
// The server port
- Port int64 `json:"port"`
+ Port *int64 `default:"22" json:"port"`
// The private key
PrivateKey *string `json:"private_key,omitempty"`
// The separator used in the CSV files. Define None if you want to use the Sniffer functionality
- Separator *string `json:"separator,omitempty"`
+ Separator *string `default:"," json:"separator"`
// The date from which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate time.Time `json:"start_date"`
// The name of the stream or table you want to create
@@ -62,3 +63,98 @@ type SourceSftpBulkUpdate struct {
// The server user
Username string `json:"username"`
}
+
+func (s SourceSftpBulkUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSftpBulkUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSftpBulkUpdate) GetFileMostRecent() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.FileMostRecent
+}
+
+func (o *SourceSftpBulkUpdate) GetFilePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FilePattern
+}
+
+func (o *SourceSftpBulkUpdate) GetFileType() *FileType {
+ if o == nil {
+ return nil
+ }
+ return o.FileType
+}
+
+func (o *SourceSftpBulkUpdate) GetFolderPath() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FolderPath
+}
+
+func (o *SourceSftpBulkUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceSftpBulkUpdate) GetPassword() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Password
+}
+
+func (o *SourceSftpBulkUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceSftpBulkUpdate) GetPrivateKey() *string {
+ if o == nil {
+ return nil
+ }
+ return o.PrivateKey
+}
+
+func (o *SourceSftpBulkUpdate) GetSeparator() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Separator
+}
+
+func (o *SourceSftpBulkUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceSftpBulkUpdate) GetStreamName() string {
+ if o == nil {
+ return ""
+ }
+ return o.StreamName
+}
+
+func (o *SourceSftpBulkUpdate) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesftpcreaterequest.go b/internal/sdk/pkg/models/shared/sourcesftpcreaterequest.go
old mode 100755
new mode 100644
index a84c7ad50..b09ccecd7
--- a/internal/sdk/pkg/models/shared/sourcesftpcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesftpcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSftpCreateRequest struct {
Configuration SourceSftp `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSftpCreateRequest) GetConfiguration() SourceSftp {
+ if o == nil {
+ return SourceSftp{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSftpCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSftpCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSftpCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSftpCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesftpputrequest.go b/internal/sdk/pkg/models/shared/sourcesftpputrequest.go
old mode 100755
new mode 100644
index fb99243b1..196713f4c
--- a/internal/sdk/pkg/models/shared/sourcesftpputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesftpputrequest.go
@@ -7,3 +7,24 @@ type SourceSftpPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSftpPutRequest) GetConfiguration() SourceSftpUpdate {
+ if o == nil {
+ return SourceSftpUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSftpPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSftpPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesftpupdate.go b/internal/sdk/pkg/models/shared/sourcesftpupdate.go
old mode 100755
new mode 100644
index bf10367e4..15917b71c
--- a/internal/sdk/pkg/models/shared/sourcesftpupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesftpupdate.go
@@ -3,128 +3,167 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod - Connect through ssh key
-type SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod string
+// SourceSftpUpdateSchemasAuthMethod - Connect through ssh key
+type SourceSftpUpdateSchemasAuthMethod string
const (
- SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethodSSHKeyAuth SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod = "SSH_KEY_AUTH"
+ SourceSftpUpdateSchemasAuthMethodSSHKeyAuth SourceSftpUpdateSchemasAuthMethod = "SSH_KEY_AUTH"
)
-func (e SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod) ToPointer() *SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod {
+func (e SourceSftpUpdateSchemasAuthMethod) ToPointer() *SourceSftpUpdateSchemasAuthMethod {
return &e
}
-func (e *SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceSftpUpdateSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_KEY_AUTH":
- *e = SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod(v)
+ *e = SourceSftpUpdateSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceSftpUpdateSchemasAuthMethod: %v", v)
}
}
-// SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication - The server authentication method
-type SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication struct {
+// SourceSftpUpdateSSHKeyAuthentication - The server authentication method
+type SourceSftpUpdateSSHKeyAuthentication struct {
// Connect through ssh key
- AuthMethod SourceSftpUpdateAuthenticationWildcardSSHKeyAuthenticationAuthMethod `json:"auth_method"`
+ authMethod SourceSftpUpdateSchemasAuthMethod `const:"SSH_KEY_AUTH" json:"auth_method"`
// OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
AuthSSHKey string `json:"auth_ssh_key"`
}
-// SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod - Connect through password authentication
-type SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod string
+func (s SourceSftpUpdateSSHKeyAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSftpUpdateSSHKeyAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSftpUpdateSSHKeyAuthentication) GetAuthMethod() SourceSftpUpdateSchemasAuthMethod {
+ return SourceSftpUpdateSchemasAuthMethodSSHKeyAuth
+}
+
+func (o *SourceSftpUpdateSSHKeyAuthentication) GetAuthSSHKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthSSHKey
+}
+
+// SourceSftpUpdateAuthMethod - Connect through password authentication
+type SourceSftpUpdateAuthMethod string
const (
- SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethodSSHPasswordAuth SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod = "SSH_PASSWORD_AUTH"
+ SourceSftpUpdateAuthMethodSSHPasswordAuth SourceSftpUpdateAuthMethod = "SSH_PASSWORD_AUTH"
)
-func (e SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod) ToPointer() *SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod {
+func (e SourceSftpUpdateAuthMethod) ToPointer() *SourceSftpUpdateAuthMethod {
return &e
}
-func (e *SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceSftpUpdateAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "SSH_PASSWORD_AUTH":
- *e = SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod(v)
+ *e = SourceSftpUpdateAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceSftpUpdateAuthMethod: %v", v)
}
}
-// SourceSftpUpdateAuthenticationWildcardPasswordAuthentication - The server authentication method
-type SourceSftpUpdateAuthenticationWildcardPasswordAuthentication struct {
+// SourceSftpUpdatePasswordAuthentication - The server authentication method
+type SourceSftpUpdatePasswordAuthentication struct {
// Connect through password authentication
- AuthMethod SourceSftpUpdateAuthenticationWildcardPasswordAuthenticationAuthMethod `json:"auth_method"`
+ authMethod SourceSftpUpdateAuthMethod `const:"SSH_PASSWORD_AUTH" json:"auth_method"`
// OS-level password for logging into the jump server host
AuthUserPassword string `json:"auth_user_password"`
}
+func (s SourceSftpUpdatePasswordAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSftpUpdatePasswordAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSftpUpdatePasswordAuthentication) GetAuthMethod() SourceSftpUpdateAuthMethod {
+ return SourceSftpUpdateAuthMethodSSHPasswordAuth
+}
+
+func (o *SourceSftpUpdatePasswordAuthentication) GetAuthUserPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthUserPassword
+}
+
type SourceSftpUpdateAuthenticationWildcardType string
const (
- SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdateAuthenticationWildcardPasswordAuthentication SourceSftpUpdateAuthenticationWildcardType = "source-sftp-update_Authentication *_Password Authentication"
- SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication SourceSftpUpdateAuthenticationWildcardType = "source-sftp-update_Authentication *_SSH Key Authentication"
+ SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdatePasswordAuthentication SourceSftpUpdateAuthenticationWildcardType = "source-sftp-update_Password Authentication"
+ SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdateSSHKeyAuthentication SourceSftpUpdateAuthenticationWildcardType = "source-sftp-update_SSH Key Authentication"
)
type SourceSftpUpdateAuthenticationWildcard struct {
- SourceSftpUpdateAuthenticationWildcardPasswordAuthentication *SourceSftpUpdateAuthenticationWildcardPasswordAuthentication
- SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication *SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication
+ SourceSftpUpdatePasswordAuthentication *SourceSftpUpdatePasswordAuthentication
+ SourceSftpUpdateSSHKeyAuthentication *SourceSftpUpdateSSHKeyAuthentication
Type SourceSftpUpdateAuthenticationWildcardType
}
-func CreateSourceSftpUpdateAuthenticationWildcardSourceSftpUpdateAuthenticationWildcardPasswordAuthentication(sourceSftpUpdateAuthenticationWildcardPasswordAuthentication SourceSftpUpdateAuthenticationWildcardPasswordAuthentication) SourceSftpUpdateAuthenticationWildcard {
- typ := SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdateAuthenticationWildcardPasswordAuthentication
+func CreateSourceSftpUpdateAuthenticationWildcardSourceSftpUpdatePasswordAuthentication(sourceSftpUpdatePasswordAuthentication SourceSftpUpdatePasswordAuthentication) SourceSftpUpdateAuthenticationWildcard {
+ typ := SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdatePasswordAuthentication
return SourceSftpUpdateAuthenticationWildcard{
- SourceSftpUpdateAuthenticationWildcardPasswordAuthentication: &sourceSftpUpdateAuthenticationWildcardPasswordAuthentication,
- Type: typ,
+ SourceSftpUpdatePasswordAuthentication: &sourceSftpUpdatePasswordAuthentication,
+ Type: typ,
}
}
-func CreateSourceSftpUpdateAuthenticationWildcardSourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication(sourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication) SourceSftpUpdateAuthenticationWildcard {
- typ := SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication
+func CreateSourceSftpUpdateAuthenticationWildcardSourceSftpUpdateSSHKeyAuthentication(sourceSftpUpdateSSHKeyAuthentication SourceSftpUpdateSSHKeyAuthentication) SourceSftpUpdateAuthenticationWildcard {
+ typ := SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdateSSHKeyAuthentication
return SourceSftpUpdateAuthenticationWildcard{
- SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication: &sourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication,
- Type: typ,
+ SourceSftpUpdateSSHKeyAuthentication: &sourceSftpUpdateSSHKeyAuthentication,
+ Type: typ,
}
}
func (u *SourceSftpUpdateAuthenticationWildcard) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSftpUpdateAuthenticationWildcardPasswordAuthentication := new(SourceSftpUpdateAuthenticationWildcardPasswordAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSftpUpdateAuthenticationWildcardPasswordAuthentication); err == nil {
- u.SourceSftpUpdateAuthenticationWildcardPasswordAuthentication = sourceSftpUpdateAuthenticationWildcardPasswordAuthentication
- u.Type = SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdateAuthenticationWildcardPasswordAuthentication
+
+ sourceSftpUpdatePasswordAuthentication := new(SourceSftpUpdatePasswordAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceSftpUpdatePasswordAuthentication, "", true, true); err == nil {
+ u.SourceSftpUpdatePasswordAuthentication = sourceSftpUpdatePasswordAuthentication
+ u.Type = SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdatePasswordAuthentication
return nil
}
- sourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication := new(SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication); err == nil {
- u.SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication = sourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication
- u.Type = SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication
+ sourceSftpUpdateSSHKeyAuthentication := new(SourceSftpUpdateSSHKeyAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceSftpUpdateSSHKeyAuthentication, "", true, true); err == nil {
+ u.SourceSftpUpdateSSHKeyAuthentication = sourceSftpUpdateSSHKeyAuthentication
+ u.Type = SourceSftpUpdateAuthenticationWildcardTypeSourceSftpUpdateSSHKeyAuthentication
return nil
}
@@ -132,30 +171,90 @@ func (u *SourceSftpUpdateAuthenticationWildcard) UnmarshalJSON(data []byte) erro
}
func (u SourceSftpUpdateAuthenticationWildcard) MarshalJSON() ([]byte, error) {
- if u.SourceSftpUpdateAuthenticationWildcardPasswordAuthentication != nil {
- return json.Marshal(u.SourceSftpUpdateAuthenticationWildcardPasswordAuthentication)
+ if u.SourceSftpUpdatePasswordAuthentication != nil {
+ return utils.MarshalJSON(u.SourceSftpUpdatePasswordAuthentication, "", true)
}
- if u.SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication != nil {
- return json.Marshal(u.SourceSftpUpdateAuthenticationWildcardSSHKeyAuthentication)
+ if u.SourceSftpUpdateSSHKeyAuthentication != nil {
+ return utils.MarshalJSON(u.SourceSftpUpdateSSHKeyAuthentication, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceSftpUpdate struct {
// The server authentication method
Credentials *SourceSftpUpdateAuthenticationWildcard `json:"credentials,omitempty"`
// The regular expression to specify files for sync in a chosen Folder Path
- FilePattern *string `json:"file_pattern,omitempty"`
+ FilePattern *string `default:"" json:"file_pattern"`
// Coma separated file types. Currently only 'csv' and 'json' types are supported.
- FileTypes *string `json:"file_types,omitempty"`
+ FileTypes *string `default:"csv,json" json:"file_types"`
// The directory to search files for sync
- FolderPath *string `json:"folder_path,omitempty"`
+ FolderPath *string `default:"" json:"folder_path"`
// The server host address
Host string `json:"host"`
// The server port
- Port int64 `json:"port"`
+ Port *int64 `default:"22" json:"port"`
// The server user
User string `json:"user"`
}
+
+func (s SourceSftpUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSftpUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSftpUpdate) GetCredentials() *SourceSftpUpdateAuthenticationWildcard {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSftpUpdate) GetFilePattern() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FilePattern
+}
+
+func (o *SourceSftpUpdate) GetFileTypes() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FileTypes
+}
+
+func (o *SourceSftpUpdate) GetFolderPath() *string {
+ if o == nil {
+ return nil
+ }
+ return o.FolderPath
+}
+
+func (o *SourceSftpUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceSftpUpdate) GetPort() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.Port
+}
+
+func (o *SourceSftpUpdate) GetUser() string {
+ if o == nil {
+ return ""
+ }
+ return o.User
+}
diff --git a/internal/sdk/pkg/models/shared/sourceshopify.go b/internal/sdk/pkg/models/shared/sourceshopify.go
old mode 100755
new mode 100644
index 4655cca84..d39fc83a6
--- a/internal/sdk/pkg/models/shared/sourceshopify.go
+++ b/internal/sdk/pkg/models/shared/sourceshopify.go
@@ -3,129 +3,182 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod string
+type SourceShopifySchemasAuthMethod string
const (
- SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethodAPIPassword SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod = "api_password"
+ SourceShopifySchemasAuthMethodAPIPassword SourceShopifySchemasAuthMethod = "api_password"
)
-func (e SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod) ToPointer() *SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod {
+func (e SourceShopifySchemasAuthMethod) ToPointer() *SourceShopifySchemasAuthMethod {
return &e
}
-func (e *SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceShopifySchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_password":
- *e = SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod(v)
+ *e = SourceShopifySchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceShopifySchemasAuthMethod: %v", v)
}
}
-// SourceShopifyShopifyAuthorizationMethodAPIPassword - API Password Auth
-type SourceShopifyShopifyAuthorizationMethodAPIPassword struct {
+// SourceShopifyAPIPassword - API Password Auth
+type SourceShopifyAPIPassword struct {
// The API Password for your private application in the `Shopify` store.
- APIPassword string `json:"api_password"`
- AuthMethod SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod `json:"auth_method"`
+ APIPassword string `json:"api_password"`
+ authMethod SourceShopifySchemasAuthMethod `const:"api_password" json:"auth_method"`
}
-type SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod string
+func (s SourceShopifyAPIPassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceShopifyAPIPassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceShopifyAPIPassword) GetAPIPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIPassword
+}
+
+func (o *SourceShopifyAPIPassword) GetAuthMethod() SourceShopifySchemasAuthMethod {
+ return SourceShopifySchemasAuthMethodAPIPassword
+}
+
+type SourceShopifyAuthMethod string
const (
- SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethodOauth20 SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod = "oauth2.0"
+ SourceShopifyAuthMethodOauth20 SourceShopifyAuthMethod = "oauth2.0"
)
-func (e SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod {
+func (e SourceShopifyAuthMethod) ToPointer() *SourceShopifyAuthMethod {
return &e
}
-func (e *SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceShopifyAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod(v)
+ *e = SourceShopifyAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceShopifyAuthMethod: %v", v)
}
}
-// SourceShopifyShopifyAuthorizationMethodOAuth20 - OAuth2.0
-type SourceShopifyShopifyAuthorizationMethodOAuth20 struct {
+// SourceShopifyOAuth20 - OAuth2.0
+type SourceShopifyOAuth20 struct {
// The Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthMethod SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod `json:"auth_method"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authMethod SourceShopifyAuthMethod `const:"oauth2.0" json:"auth_method"`
// The Client ID of the Shopify developer application.
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of the Shopify developer application.
ClientSecret *string `json:"client_secret,omitempty"`
}
+func (s SourceShopifyOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceShopifyOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceShopifyOAuth20) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceShopifyOAuth20) GetAuthMethod() SourceShopifyAuthMethod {
+ return SourceShopifyAuthMethodOauth20
+}
+
+func (o *SourceShopifyOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceShopifyOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
type SourceShopifyShopifyAuthorizationMethodType string
const (
- SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodOAuth20 SourceShopifyShopifyAuthorizationMethodType = "source-shopify_Shopify Authorization Method_OAuth2.0"
- SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodAPIPassword SourceShopifyShopifyAuthorizationMethodType = "source-shopify_Shopify Authorization Method_API Password"
+ SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyOAuth20 SourceShopifyShopifyAuthorizationMethodType = "source-shopify_OAuth2.0"
+ SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyAPIPassword SourceShopifyShopifyAuthorizationMethodType = "source-shopify_API Password"
)
type SourceShopifyShopifyAuthorizationMethod struct {
- SourceShopifyShopifyAuthorizationMethodOAuth20 *SourceShopifyShopifyAuthorizationMethodOAuth20
- SourceShopifyShopifyAuthorizationMethodAPIPassword *SourceShopifyShopifyAuthorizationMethodAPIPassword
+ SourceShopifyOAuth20 *SourceShopifyOAuth20
+ SourceShopifyAPIPassword *SourceShopifyAPIPassword
Type SourceShopifyShopifyAuthorizationMethodType
}
-func CreateSourceShopifyShopifyAuthorizationMethodSourceShopifyShopifyAuthorizationMethodOAuth20(sourceShopifyShopifyAuthorizationMethodOAuth20 SourceShopifyShopifyAuthorizationMethodOAuth20) SourceShopifyShopifyAuthorizationMethod {
- typ := SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodOAuth20
+func CreateSourceShopifyShopifyAuthorizationMethodSourceShopifyOAuth20(sourceShopifyOAuth20 SourceShopifyOAuth20) SourceShopifyShopifyAuthorizationMethod {
+ typ := SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyOAuth20
return SourceShopifyShopifyAuthorizationMethod{
- SourceShopifyShopifyAuthorizationMethodOAuth20: &sourceShopifyShopifyAuthorizationMethodOAuth20,
- Type: typ,
+ SourceShopifyOAuth20: &sourceShopifyOAuth20,
+ Type: typ,
}
}
-func CreateSourceShopifyShopifyAuthorizationMethodSourceShopifyShopifyAuthorizationMethodAPIPassword(sourceShopifyShopifyAuthorizationMethodAPIPassword SourceShopifyShopifyAuthorizationMethodAPIPassword) SourceShopifyShopifyAuthorizationMethod {
- typ := SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodAPIPassword
+func CreateSourceShopifyShopifyAuthorizationMethodSourceShopifyAPIPassword(sourceShopifyAPIPassword SourceShopifyAPIPassword) SourceShopifyShopifyAuthorizationMethod {
+ typ := SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyAPIPassword
return SourceShopifyShopifyAuthorizationMethod{
- SourceShopifyShopifyAuthorizationMethodAPIPassword: &sourceShopifyShopifyAuthorizationMethodAPIPassword,
- Type: typ,
+ SourceShopifyAPIPassword: &sourceShopifyAPIPassword,
+ Type: typ,
}
}
func (u *SourceShopifyShopifyAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceShopifyShopifyAuthorizationMethodAPIPassword := new(SourceShopifyShopifyAuthorizationMethodAPIPassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceShopifyShopifyAuthorizationMethodAPIPassword); err == nil {
- u.SourceShopifyShopifyAuthorizationMethodAPIPassword = sourceShopifyShopifyAuthorizationMethodAPIPassword
- u.Type = SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodAPIPassword
+
+ sourceShopifyAPIPassword := new(SourceShopifyAPIPassword)
+ if err := utils.UnmarshalJSON(data, &sourceShopifyAPIPassword, "", true, true); err == nil {
+ u.SourceShopifyAPIPassword = sourceShopifyAPIPassword
+ u.Type = SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyAPIPassword
return nil
}
- sourceShopifyShopifyAuthorizationMethodOAuth20 := new(SourceShopifyShopifyAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceShopifyShopifyAuthorizationMethodOAuth20); err == nil {
- u.SourceShopifyShopifyAuthorizationMethodOAuth20 = sourceShopifyShopifyAuthorizationMethodOAuth20
- u.Type = SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodOAuth20
+ sourceShopifyOAuth20 := new(SourceShopifyOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceShopifyOAuth20, "", true, true); err == nil {
+ u.SourceShopifyOAuth20 = sourceShopifyOAuth20
+ u.Type = SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyOAuth20
return nil
}
@@ -133,38 +186,38 @@ func (u *SourceShopifyShopifyAuthorizationMethod) UnmarshalJSON(data []byte) err
}
func (u SourceShopifyShopifyAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceShopifyShopifyAuthorizationMethodAPIPassword != nil {
- return json.Marshal(u.SourceShopifyShopifyAuthorizationMethodAPIPassword)
+ if u.SourceShopifyOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceShopifyOAuth20, "", true)
}
- if u.SourceShopifyShopifyAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceShopifyShopifyAuthorizationMethodOAuth20)
+ if u.SourceShopifyAPIPassword != nil {
+ return utils.MarshalJSON(u.SourceShopifyAPIPassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceShopifyShopify string
+type Shopify string
const (
- SourceShopifyShopifyShopify SourceShopifyShopify = "shopify"
+ ShopifyShopify Shopify = "shopify"
)
-func (e SourceShopifyShopify) ToPointer() *SourceShopifyShopify {
+func (e Shopify) ToPointer() *Shopify {
return &e
}
-func (e *SourceShopifyShopify) UnmarshalJSON(data []byte) error {
+func (e *Shopify) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "shopify":
- *e = SourceShopifyShopify(v)
+ *e = Shopify(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceShopifyShopify: %v", v)
+ return fmt.Errorf("invalid value for Shopify: %v", v)
}
}
@@ -172,8 +225,44 @@ type SourceShopify struct {
// The authorization method to use to retrieve data from Shopify
Credentials *SourceShopifyShopifyAuthorizationMethod `json:"credentials,omitempty"`
// The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME' or 'NAME.myshopify.com'.
- Shop string `json:"shop"`
- SourceType SourceShopifyShopify `json:"sourceType"`
+ Shop string `json:"shop"`
+ sourceType Shopify `const:"shopify" json:"sourceType"`
// The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2020-01-01" json:"start_date"`
+}
+
+func (s SourceShopify) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceShopify) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceShopify) GetCredentials() *SourceShopifyShopifyAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceShopify) GetShop() string {
+ if o == nil {
+ return ""
+ }
+ return o.Shop
+}
+
+func (o *SourceShopify) GetSourceType() Shopify {
+ return ShopifyShopify
+}
+
+func (o *SourceShopify) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceshopifycreaterequest.go b/internal/sdk/pkg/models/shared/sourceshopifycreaterequest.go
old mode 100755
new mode 100644
index 3e37c68da..b46055be0
--- a/internal/sdk/pkg/models/shared/sourceshopifycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceshopifycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceShopifyCreateRequest struct {
Configuration SourceShopify `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceShopifyCreateRequest) GetConfiguration() SourceShopify {
+ if o == nil {
+ return SourceShopify{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceShopifyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceShopifyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceShopifyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceShopifyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceshopifyputrequest.go b/internal/sdk/pkg/models/shared/sourceshopifyputrequest.go
old mode 100755
new mode 100644
index 15cc16f72..c79070035
--- a/internal/sdk/pkg/models/shared/sourceshopifyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceshopifyputrequest.go
@@ -7,3 +7,24 @@ type SourceShopifyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceShopifyPutRequest) GetConfiguration() SourceShopifyUpdate {
+ if o == nil {
+ return SourceShopifyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceShopifyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceShopifyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceshopifyupdate.go b/internal/sdk/pkg/models/shared/sourceshopifyupdate.go
old mode 100755
new mode 100644
index 29285ffe9..95573daf9
--- a/internal/sdk/pkg/models/shared/sourceshopifyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceshopifyupdate.go
@@ -3,152 +3,237 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod string
+type SourceShopifyUpdateSchemasAuthMethod string
const (
- SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod = "api_password"
+ SourceShopifyUpdateSchemasAuthMethodAPIPassword SourceShopifyUpdateSchemasAuthMethod = "api_password"
)
-func (e SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod) ToPointer() *SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod {
+func (e SourceShopifyUpdateSchemasAuthMethod) ToPointer() *SourceShopifyUpdateSchemasAuthMethod {
return &e
}
-func (e *SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceShopifyUpdateSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_password":
- *e = SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod(v)
+ *e = SourceShopifyUpdateSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceShopifyUpdateSchemasAuthMethod: %v", v)
}
}
-// SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword - API Password Auth
-type SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword struct {
+// APIPassword - API Password Auth
+type APIPassword struct {
// The API Password for your private application in the `Shopify` store.
- APIPassword string `json:"api_password"`
- AuthMethod SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod `json:"auth_method"`
+ APIPassword string `json:"api_password"`
+ authMethod SourceShopifyUpdateSchemasAuthMethod `const:"api_password" json:"auth_method"`
}
-type SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod string
+func (a APIPassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *APIPassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *APIPassword) GetAPIPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIPassword
+}
+
+func (o *APIPassword) GetAuthMethod() SourceShopifyUpdateSchemasAuthMethod {
+ return SourceShopifyUpdateSchemasAuthMethodAPIPassword
+}
+
+type SourceShopifyUpdateAuthMethod string
const (
- SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethodOauth20 SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod = "oauth2.0"
+ SourceShopifyUpdateAuthMethodOauth20 SourceShopifyUpdateAuthMethod = "oauth2.0"
)
-func (e SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod {
+func (e SourceShopifyUpdateAuthMethod) ToPointer() *SourceShopifyUpdateAuthMethod {
return &e
}
-func (e *SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceShopifyUpdateAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod(v)
+ *e = SourceShopifyUpdateAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceShopifyUpdateAuthMethod: %v", v)
}
}
-// SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 - OAuth2.0
-type SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 struct {
+// SourceShopifyUpdateOAuth20 - OAuth2.0
+type SourceShopifyUpdateOAuth20 struct {
// The Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthMethod SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod `json:"auth_method"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authMethod SourceShopifyUpdateAuthMethod `const:"oauth2.0" json:"auth_method"`
// The Client ID of the Shopify developer application.
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of the Shopify developer application.
ClientSecret *string `json:"client_secret,omitempty"`
}
-type SourceShopifyUpdateShopifyAuthorizationMethodType string
+func (s SourceShopifyUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceShopifyUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceShopifyUpdateOAuth20) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceShopifyUpdateOAuth20) GetAuthMethod() SourceShopifyUpdateAuthMethod {
+ return SourceShopifyUpdateAuthMethodOauth20
+}
+
+func (o *SourceShopifyUpdateOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceShopifyUpdateOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+type ShopifyAuthorizationMethodType string
const (
- SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodOAuth20 SourceShopifyUpdateShopifyAuthorizationMethodType = "source-shopify-update_Shopify Authorization Method_OAuth2.0"
- SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodType = "source-shopify-update_Shopify Authorization Method_API Password"
+ ShopifyAuthorizationMethodTypeSourceShopifyUpdateOAuth20 ShopifyAuthorizationMethodType = "source-shopify-update_OAuth2.0"
+ ShopifyAuthorizationMethodTypeAPIPassword ShopifyAuthorizationMethodType = "API Password"
)
-type SourceShopifyUpdateShopifyAuthorizationMethod struct {
- SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 *SourceShopifyUpdateShopifyAuthorizationMethodOAuth20
- SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword *SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword
+type ShopifyAuthorizationMethod struct {
+ SourceShopifyUpdateOAuth20 *SourceShopifyUpdateOAuth20
+ APIPassword *APIPassword
- Type SourceShopifyUpdateShopifyAuthorizationMethodType
+ Type ShopifyAuthorizationMethodType
}
-func CreateSourceShopifyUpdateShopifyAuthorizationMethodSourceShopifyUpdateShopifyAuthorizationMethodOAuth20(sourceShopifyUpdateShopifyAuthorizationMethodOAuth20 SourceShopifyUpdateShopifyAuthorizationMethodOAuth20) SourceShopifyUpdateShopifyAuthorizationMethod {
- typ := SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodOAuth20
+func CreateShopifyAuthorizationMethodSourceShopifyUpdateOAuth20(sourceShopifyUpdateOAuth20 SourceShopifyUpdateOAuth20) ShopifyAuthorizationMethod {
+ typ := ShopifyAuthorizationMethodTypeSourceShopifyUpdateOAuth20
- return SourceShopifyUpdateShopifyAuthorizationMethod{
- SourceShopifyUpdateShopifyAuthorizationMethodOAuth20: &sourceShopifyUpdateShopifyAuthorizationMethodOAuth20,
- Type: typ,
+ return ShopifyAuthorizationMethod{
+ SourceShopifyUpdateOAuth20: &sourceShopifyUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceShopifyUpdateShopifyAuthorizationMethodSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword(sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword) SourceShopifyUpdateShopifyAuthorizationMethod {
- typ := SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword
+func CreateShopifyAuthorizationMethodAPIPassword(apiPassword APIPassword) ShopifyAuthorizationMethod {
+ typ := ShopifyAuthorizationMethodTypeAPIPassword
- return SourceShopifyUpdateShopifyAuthorizationMethod{
- SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword: &sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword,
- Type: typ,
+ return ShopifyAuthorizationMethod{
+ APIPassword: &apiPassword,
+ Type: typ,
}
}
-func (u *SourceShopifyUpdateShopifyAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *ShopifyAuthorizationMethod) UnmarshalJSON(data []byte) error {
- sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword := new(SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword); err == nil {
- u.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword = sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword
- u.Type = SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword
+ apiPassword := new(APIPassword)
+ if err := utils.UnmarshalJSON(data, &apiPassword, "", true, true); err == nil {
+ u.APIPassword = apiPassword
+ u.Type = ShopifyAuthorizationMethodTypeAPIPassword
return nil
}
- sourceShopifyUpdateShopifyAuthorizationMethodOAuth20 := new(SourceShopifyUpdateShopifyAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceShopifyUpdateShopifyAuthorizationMethodOAuth20); err == nil {
- u.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 = sourceShopifyUpdateShopifyAuthorizationMethodOAuth20
- u.Type = SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodOAuth20
+ sourceShopifyUpdateOAuth20 := new(SourceShopifyUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceShopifyUpdateOAuth20, "", true, true); err == nil {
+ u.SourceShopifyUpdateOAuth20 = sourceShopifyUpdateOAuth20
+ u.Type = ShopifyAuthorizationMethodTypeSourceShopifyUpdateOAuth20
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceShopifyUpdateShopifyAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword != nil {
- return json.Marshal(u.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword)
+func (u ShopifyAuthorizationMethod) MarshalJSON() ([]byte, error) {
+ if u.SourceShopifyUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceShopifyUpdateOAuth20, "", true)
}
- if u.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20)
+ if u.APIPassword != nil {
+ return utils.MarshalJSON(u.APIPassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceShopifyUpdate struct {
// The authorization method to use to retrieve data from Shopify
- Credentials *SourceShopifyUpdateShopifyAuthorizationMethod `json:"credentials,omitempty"`
+ Credentials *ShopifyAuthorizationMethod `json:"credentials,omitempty"`
// The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME' or 'NAME.myshopify.com'.
Shop string `json:"shop"`
// The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2020-01-01" json:"start_date"`
+}
+
+func (s SourceShopifyUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceShopifyUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceShopifyUpdate) GetCredentials() *ShopifyAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceShopifyUpdate) GetShop() string {
+ if o == nil {
+ return ""
+ }
+ return o.Shop
+}
+
+func (o *SourceShopifyUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourceshortio.go b/internal/sdk/pkg/models/shared/sourceshortio.go
old mode 100755
new mode 100644
index 27ca24498..e3481e7c5
--- a/internal/sdk/pkg/models/shared/sourceshortio.go
+++ b/internal/sdk/pkg/models/shared/sourceshortio.go
@@ -5,37 +5,74 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceShortioShortio string
+type Shortio string
const (
- SourceShortioShortioShortio SourceShortioShortio = "shortio"
+ ShortioShortio Shortio = "shortio"
)
-func (e SourceShortioShortio) ToPointer() *SourceShortioShortio {
+func (e Shortio) ToPointer() *Shortio {
return &e
}
-func (e *SourceShortioShortio) UnmarshalJSON(data []byte) error {
+func (e *Shortio) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "shortio":
- *e = SourceShortioShortio(v)
+ *e = Shortio(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceShortioShortio: %v", v)
+ return fmt.Errorf("invalid value for Shortio: %v", v)
}
}
type SourceShortio struct {
DomainID string `json:"domain_id"`
// Short.io Secret Key
- SecretKey string `json:"secret_key"`
- SourceType SourceShortioShortio `json:"sourceType"`
+ SecretKey string `json:"secret_key"`
+ sourceType Shortio `const:"shortio" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate string `json:"start_date"`
}
+
+func (s SourceShortio) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceShortio) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceShortio) GetDomainID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomainID
+}
+
+func (o *SourceShortio) GetSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretKey
+}
+
+func (o *SourceShortio) GetSourceType() Shortio {
+ return ShortioShortio
+}
+
+func (o *SourceShortio) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceshortiocreaterequest.go b/internal/sdk/pkg/models/shared/sourceshortiocreaterequest.go
old mode 100755
new mode 100644
index 34c443c31..6c3b7493b
--- a/internal/sdk/pkg/models/shared/sourceshortiocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceshortiocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceShortioCreateRequest struct {
Configuration SourceShortio `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceShortioCreateRequest) GetConfiguration() SourceShortio {
+ if o == nil {
+ return SourceShortio{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceShortioCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceShortioCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceShortioCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceShortioCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceshortioputrequest.go b/internal/sdk/pkg/models/shared/sourceshortioputrequest.go
old mode 100755
new mode 100644
index 2bf56b6c6..30a3c7227
--- a/internal/sdk/pkg/models/shared/sourceshortioputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceshortioputrequest.go
@@ -7,3 +7,24 @@ type SourceShortioPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceShortioPutRequest) GetConfiguration() SourceShortioUpdate {
+ if o == nil {
+ return SourceShortioUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceShortioPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceShortioPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceshortioupdate.go b/internal/sdk/pkg/models/shared/sourceshortioupdate.go
old mode 100755
new mode 100644
index b78384096..cf9a5b081
--- a/internal/sdk/pkg/models/shared/sourceshortioupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceshortioupdate.go
@@ -9,3 +9,24 @@ type SourceShortioUpdate struct {
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate string `json:"start_date"`
}
+
+func (o *SourceShortioUpdate) GetDomainID() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomainID
+}
+
+func (o *SourceShortioUpdate) GetSecretKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.SecretKey
+}
+
+func (o *SourceShortioUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceslack.go b/internal/sdk/pkg/models/shared/sourceslack.go
old mode 100755
new mode 100644
index f852d03f8..e4965caee
--- a/internal/sdk/pkg/models/shared/sourceslack.go
+++ b/internal/sdk/pkg/models/shared/sourceslack.go
@@ -3,129 +3,182 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSlackAuthenticationMechanismAPITokenOptionTitle string
+type SourceSlackSchemasOptionTitle string
const (
- SourceSlackAuthenticationMechanismAPITokenOptionTitleAPITokenCredentials SourceSlackAuthenticationMechanismAPITokenOptionTitle = "API Token Credentials"
+ SourceSlackSchemasOptionTitleAPITokenCredentials SourceSlackSchemasOptionTitle = "API Token Credentials"
)
-func (e SourceSlackAuthenticationMechanismAPITokenOptionTitle) ToPointer() *SourceSlackAuthenticationMechanismAPITokenOptionTitle {
+func (e SourceSlackSchemasOptionTitle) ToPointer() *SourceSlackSchemasOptionTitle {
return &e
}
-func (e *SourceSlackAuthenticationMechanismAPITokenOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceSlackSchemasOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "API Token Credentials":
- *e = SourceSlackAuthenticationMechanismAPITokenOptionTitle(v)
+ *e = SourceSlackSchemasOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSlackAuthenticationMechanismAPITokenOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceSlackSchemasOptionTitle: %v", v)
}
}
-// SourceSlackAuthenticationMechanismAPIToken - Choose how to authenticate into Slack
-type SourceSlackAuthenticationMechanismAPIToken struct {
+// SourceSlackAPIToken - Choose how to authenticate into Slack
+type SourceSlackAPIToken struct {
// A Slack bot token. See the docs for instructions on how to generate it.
- APIToken string `json:"api_token"`
- OptionTitle SourceSlackAuthenticationMechanismAPITokenOptionTitle `json:"option_title"`
+ APIToken string `json:"api_token"`
+ optionTitle SourceSlackSchemasOptionTitle `const:"API Token Credentials" json:"option_title"`
}
-type SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitle string
+func (s SourceSlackAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSlackAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSlackAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceSlackAPIToken) GetOptionTitle() SourceSlackSchemasOptionTitle {
+ return SourceSlackSchemasOptionTitleAPITokenCredentials
+}
+
+type SourceSlackOptionTitle string
const (
- SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitleDefaultOAuth20Authorization SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitle = "Default OAuth2.0 authorization"
+ SourceSlackOptionTitleDefaultOAuth20Authorization SourceSlackOptionTitle = "Default OAuth2.0 authorization"
)
-func (e SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitle) ToPointer() *SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitle {
+func (e SourceSlackOptionTitle) ToPointer() *SourceSlackOptionTitle {
return &e
}
-func (e *SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceSlackOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Default OAuth2.0 authorization":
- *e = SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitle(v)
+ *e = SourceSlackOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceSlackOptionTitle: %v", v)
}
}
-// SourceSlackAuthenticationMechanismSignInViaSlackOAuth - Choose how to authenticate into Slack
-type SourceSlackAuthenticationMechanismSignInViaSlackOAuth struct {
+// SourceSlackSignInViaSlackOAuth - Choose how to authenticate into Slack
+type SourceSlackSignInViaSlackOAuth struct {
// Slack access_token. See our docs if you need help generating the token.
AccessToken string `json:"access_token"`
// Slack client_id. See our docs if you need help finding this id.
ClientID string `json:"client_id"`
// Slack client_secret. See our docs if you need help finding this secret.
- ClientSecret string `json:"client_secret"`
- OptionTitle SourceSlackAuthenticationMechanismSignInViaSlackOAuthOptionTitle `json:"option_title"`
+ ClientSecret string `json:"client_secret"`
+ optionTitle SourceSlackOptionTitle `const:"Default OAuth2.0 authorization" json:"option_title"`
+}
+
+func (s SourceSlackSignInViaSlackOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSlackSignInViaSlackOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSlackSignInViaSlackOAuth) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSlackSignInViaSlackOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSlackSignInViaSlackOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSlackSignInViaSlackOAuth) GetOptionTitle() SourceSlackOptionTitle {
+ return SourceSlackOptionTitleDefaultOAuth20Authorization
}
type SourceSlackAuthenticationMechanismType string
const (
- SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismSignInViaSlackOAuth SourceSlackAuthenticationMechanismType = "source-slack_Authentication mechanism_Sign in via Slack (OAuth)"
- SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismAPIToken SourceSlackAuthenticationMechanismType = "source-slack_Authentication mechanism_API Token"
+ SourceSlackAuthenticationMechanismTypeSourceSlackSignInViaSlackOAuth SourceSlackAuthenticationMechanismType = "source-slack_Sign in via Slack (OAuth)"
+ SourceSlackAuthenticationMechanismTypeSourceSlackAPIToken SourceSlackAuthenticationMechanismType = "source-slack_API Token"
)
type SourceSlackAuthenticationMechanism struct {
- SourceSlackAuthenticationMechanismSignInViaSlackOAuth *SourceSlackAuthenticationMechanismSignInViaSlackOAuth
- SourceSlackAuthenticationMechanismAPIToken *SourceSlackAuthenticationMechanismAPIToken
+ SourceSlackSignInViaSlackOAuth *SourceSlackSignInViaSlackOAuth
+ SourceSlackAPIToken *SourceSlackAPIToken
Type SourceSlackAuthenticationMechanismType
}
-func CreateSourceSlackAuthenticationMechanismSourceSlackAuthenticationMechanismSignInViaSlackOAuth(sourceSlackAuthenticationMechanismSignInViaSlackOAuth SourceSlackAuthenticationMechanismSignInViaSlackOAuth) SourceSlackAuthenticationMechanism {
- typ := SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismSignInViaSlackOAuth
+func CreateSourceSlackAuthenticationMechanismSourceSlackSignInViaSlackOAuth(sourceSlackSignInViaSlackOAuth SourceSlackSignInViaSlackOAuth) SourceSlackAuthenticationMechanism {
+ typ := SourceSlackAuthenticationMechanismTypeSourceSlackSignInViaSlackOAuth
return SourceSlackAuthenticationMechanism{
- SourceSlackAuthenticationMechanismSignInViaSlackOAuth: &sourceSlackAuthenticationMechanismSignInViaSlackOAuth,
- Type: typ,
+ SourceSlackSignInViaSlackOAuth: &sourceSlackSignInViaSlackOAuth,
+ Type: typ,
}
}
-func CreateSourceSlackAuthenticationMechanismSourceSlackAuthenticationMechanismAPIToken(sourceSlackAuthenticationMechanismAPIToken SourceSlackAuthenticationMechanismAPIToken) SourceSlackAuthenticationMechanism {
- typ := SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismAPIToken
+func CreateSourceSlackAuthenticationMechanismSourceSlackAPIToken(sourceSlackAPIToken SourceSlackAPIToken) SourceSlackAuthenticationMechanism {
+ typ := SourceSlackAuthenticationMechanismTypeSourceSlackAPIToken
return SourceSlackAuthenticationMechanism{
- SourceSlackAuthenticationMechanismAPIToken: &sourceSlackAuthenticationMechanismAPIToken,
- Type: typ,
+ SourceSlackAPIToken: &sourceSlackAPIToken,
+ Type: typ,
}
}
func (u *SourceSlackAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSlackAuthenticationMechanismAPIToken := new(SourceSlackAuthenticationMechanismAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSlackAuthenticationMechanismAPIToken); err == nil {
- u.SourceSlackAuthenticationMechanismAPIToken = sourceSlackAuthenticationMechanismAPIToken
- u.Type = SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismAPIToken
+
+ sourceSlackAPIToken := new(SourceSlackAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceSlackAPIToken, "", true, true); err == nil {
+ u.SourceSlackAPIToken = sourceSlackAPIToken
+ u.Type = SourceSlackAuthenticationMechanismTypeSourceSlackAPIToken
return nil
}
- sourceSlackAuthenticationMechanismSignInViaSlackOAuth := new(SourceSlackAuthenticationMechanismSignInViaSlackOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSlackAuthenticationMechanismSignInViaSlackOAuth); err == nil {
- u.SourceSlackAuthenticationMechanismSignInViaSlackOAuth = sourceSlackAuthenticationMechanismSignInViaSlackOAuth
- u.Type = SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismSignInViaSlackOAuth
+ sourceSlackSignInViaSlackOAuth := new(SourceSlackSignInViaSlackOAuth)
+ if err := utils.UnmarshalJSON(data, &sourceSlackSignInViaSlackOAuth, "", true, true); err == nil {
+ u.SourceSlackSignInViaSlackOAuth = sourceSlackSignInViaSlackOAuth
+ u.Type = SourceSlackAuthenticationMechanismTypeSourceSlackSignInViaSlackOAuth
return nil
}
@@ -133,38 +186,38 @@ func (u *SourceSlackAuthenticationMechanism) UnmarshalJSON(data []byte) error {
}
func (u SourceSlackAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceSlackAuthenticationMechanismAPIToken != nil {
- return json.Marshal(u.SourceSlackAuthenticationMechanismAPIToken)
+ if u.SourceSlackSignInViaSlackOAuth != nil {
+ return utils.MarshalJSON(u.SourceSlackSignInViaSlackOAuth, "", true)
}
- if u.SourceSlackAuthenticationMechanismSignInViaSlackOAuth != nil {
- return json.Marshal(u.SourceSlackAuthenticationMechanismSignInViaSlackOAuth)
+ if u.SourceSlackAPIToken != nil {
+ return utils.MarshalJSON(u.SourceSlackAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceSlackSlack string
+type Slack string
const (
- SourceSlackSlackSlack SourceSlackSlack = "slack"
+ SlackSlack Slack = "slack"
)
-func (e SourceSlackSlack) ToPointer() *SourceSlackSlack {
+func (e Slack) ToPointer() *Slack {
return &e
}
-func (e *SourceSlackSlack) UnmarshalJSON(data []byte) error {
+func (e *Slack) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "slack":
- *e = SourceSlackSlack(v)
+ *e = Slack(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSlackSlack: %v", v)
+ return fmt.Errorf("invalid value for Slack: %v", v)
}
}
@@ -174,10 +227,60 @@ type SourceSlack struct {
// Choose how to authenticate into Slack
Credentials *SourceSlackAuthenticationMechanism `json:"credentials,omitempty"`
// Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages.
- JoinChannels bool `json:"join_channels"`
+ JoinChannels *bool `default:"true" json:"join_channels"`
// How far into the past to look for messages in threads, default is 0 days
- LookbackWindow int64 `json:"lookback_window"`
- SourceType SourceSlackSlack `json:"sourceType"`
+ LookbackWindow *int64 `default:"0" json:"lookback_window"`
+ sourceType Slack `const:"slack" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceSlack) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSlack) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSlack) GetChannelFilter() []string {
+ if o == nil {
+ return nil
+ }
+ return o.ChannelFilter
+}
+
+func (o *SourceSlack) GetCredentials() *SourceSlackAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSlack) GetJoinChannels() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.JoinChannels
+}
+
+func (o *SourceSlack) GetLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindow
+}
+
+func (o *SourceSlack) GetSourceType() Slack {
+ return SlackSlack
+}
+
+func (o *SourceSlack) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceslackcreaterequest.go b/internal/sdk/pkg/models/shared/sourceslackcreaterequest.go
old mode 100755
new mode 100644
index c2cb41b73..2d7961b09
--- a/internal/sdk/pkg/models/shared/sourceslackcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceslackcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSlackCreateRequest struct {
Configuration SourceSlack `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSlackCreateRequest) GetConfiguration() SourceSlack {
+ if o == nil {
+ return SourceSlack{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSlackCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSlackCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSlackCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSlackCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceslackputrequest.go b/internal/sdk/pkg/models/shared/sourceslackputrequest.go
old mode 100755
new mode 100644
index 8a3240ced..74dd4ca80
--- a/internal/sdk/pkg/models/shared/sourceslackputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceslackputrequest.go
@@ -7,3 +7,24 @@ type SourceSlackPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSlackPutRequest) GetConfiguration() SourceSlackUpdate {
+ if o == nil {
+ return SourceSlackUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSlackPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSlackPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceslackupdate.go b/internal/sdk/pkg/models/shared/sourceslackupdate.go
old mode 100755
new mode 100644
index 4809b8586..2c56b4953
--- a/internal/sdk/pkg/models/shared/sourceslackupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceslackupdate.go
@@ -3,129 +3,182 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitle string
+type SourceSlackUpdateSchemasOptionTitle string
const (
- SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitleAPITokenCredentials SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitle = "API Token Credentials"
+ SourceSlackUpdateSchemasOptionTitleAPITokenCredentials SourceSlackUpdateSchemasOptionTitle = "API Token Credentials"
)
-func (e SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitle) ToPointer() *SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitle {
+func (e SourceSlackUpdateSchemasOptionTitle) ToPointer() *SourceSlackUpdateSchemasOptionTitle {
return &e
}
-func (e *SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceSlackUpdateSchemasOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "API Token Credentials":
- *e = SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitle(v)
+ *e = SourceSlackUpdateSchemasOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceSlackUpdateSchemasOptionTitle: %v", v)
}
}
-// SourceSlackUpdateAuthenticationMechanismAPIToken - Choose how to authenticate into Slack
-type SourceSlackUpdateAuthenticationMechanismAPIToken struct {
+// SourceSlackUpdateAPIToken - Choose how to authenticate into Slack
+type SourceSlackUpdateAPIToken struct {
// A Slack bot token. See the docs for instructions on how to generate it.
- APIToken string `json:"api_token"`
- OptionTitle SourceSlackUpdateAuthenticationMechanismAPITokenOptionTitle `json:"option_title"`
+ APIToken string `json:"api_token"`
+ optionTitle SourceSlackUpdateSchemasOptionTitle `const:"API Token Credentials" json:"option_title"`
}
-type SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitle string
+func (s SourceSlackUpdateAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSlackUpdateAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSlackUpdateAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceSlackUpdateAPIToken) GetOptionTitle() SourceSlackUpdateSchemasOptionTitle {
+ return SourceSlackUpdateSchemasOptionTitleAPITokenCredentials
+}
+
+type SourceSlackUpdateOptionTitle string
const (
- SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitleDefaultOAuth20Authorization SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitle = "Default OAuth2.0 authorization"
+ SourceSlackUpdateOptionTitleDefaultOAuth20Authorization SourceSlackUpdateOptionTitle = "Default OAuth2.0 authorization"
)
-func (e SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitle) ToPointer() *SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitle {
+func (e SourceSlackUpdateOptionTitle) ToPointer() *SourceSlackUpdateOptionTitle {
return &e
}
-func (e *SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitle) UnmarshalJSON(data []byte) error {
+func (e *SourceSlackUpdateOptionTitle) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "Default OAuth2.0 authorization":
- *e = SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitle(v)
+ *e = SourceSlackUpdateOptionTitle(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitle: %v", v)
+ return fmt.Errorf("invalid value for SourceSlackUpdateOptionTitle: %v", v)
}
}
-// SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth - Choose how to authenticate into Slack
-type SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth struct {
+// SignInViaSlackOAuth - Choose how to authenticate into Slack
+type SignInViaSlackOAuth struct {
// Slack access_token. See our docs if you need help generating the token.
AccessToken string `json:"access_token"`
// Slack client_id. See our docs if you need help finding this id.
ClientID string `json:"client_id"`
// Slack client_secret. See our docs if you need help finding this secret.
- ClientSecret string `json:"client_secret"`
- OptionTitle SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuthOptionTitle `json:"option_title"`
+ ClientSecret string `json:"client_secret"`
+ optionTitle SourceSlackUpdateOptionTitle `const:"Default OAuth2.0 authorization" json:"option_title"`
+}
+
+func (s SignInViaSlackOAuth) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SignInViaSlackOAuth) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SignInViaSlackOAuth) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SignInViaSlackOAuth) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SignInViaSlackOAuth) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SignInViaSlackOAuth) GetOptionTitle() SourceSlackUpdateOptionTitle {
+ return SourceSlackUpdateOptionTitleDefaultOAuth20Authorization
}
type SourceSlackUpdateAuthenticationMechanismType string
const (
- SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth SourceSlackUpdateAuthenticationMechanismType = "source-slack-update_Authentication mechanism_Sign in via Slack (OAuth)"
- SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismAPIToken SourceSlackUpdateAuthenticationMechanismType = "source-slack-update_Authentication mechanism_API Token"
+ SourceSlackUpdateAuthenticationMechanismTypeSignInViaSlackOAuth SourceSlackUpdateAuthenticationMechanismType = "Sign in via Slack (OAuth)"
+ SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAPIToken SourceSlackUpdateAuthenticationMechanismType = "source-slack-update_API Token"
)
type SourceSlackUpdateAuthenticationMechanism struct {
- SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth *SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth
- SourceSlackUpdateAuthenticationMechanismAPIToken *SourceSlackUpdateAuthenticationMechanismAPIToken
+ SignInViaSlackOAuth *SignInViaSlackOAuth
+ SourceSlackUpdateAPIToken *SourceSlackUpdateAPIToken
Type SourceSlackUpdateAuthenticationMechanismType
}
-func CreateSourceSlackUpdateAuthenticationMechanismSourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth(sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth) SourceSlackUpdateAuthenticationMechanism {
- typ := SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth
+func CreateSourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth(signInViaSlackOAuth SignInViaSlackOAuth) SourceSlackUpdateAuthenticationMechanism {
+ typ := SourceSlackUpdateAuthenticationMechanismTypeSignInViaSlackOAuth
return SourceSlackUpdateAuthenticationMechanism{
- SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth: &sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth,
- Type: typ,
+ SignInViaSlackOAuth: &signInViaSlackOAuth,
+ Type: typ,
}
}
-func CreateSourceSlackUpdateAuthenticationMechanismSourceSlackUpdateAuthenticationMechanismAPIToken(sourceSlackUpdateAuthenticationMechanismAPIToken SourceSlackUpdateAuthenticationMechanismAPIToken) SourceSlackUpdateAuthenticationMechanism {
- typ := SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismAPIToken
+func CreateSourceSlackUpdateAuthenticationMechanismSourceSlackUpdateAPIToken(sourceSlackUpdateAPIToken SourceSlackUpdateAPIToken) SourceSlackUpdateAuthenticationMechanism {
+ typ := SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAPIToken
return SourceSlackUpdateAuthenticationMechanism{
- SourceSlackUpdateAuthenticationMechanismAPIToken: &sourceSlackUpdateAuthenticationMechanismAPIToken,
- Type: typ,
+ SourceSlackUpdateAPIToken: &sourceSlackUpdateAPIToken,
+ Type: typ,
}
}
func (u *SourceSlackUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSlackUpdateAuthenticationMechanismAPIToken := new(SourceSlackUpdateAuthenticationMechanismAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSlackUpdateAuthenticationMechanismAPIToken); err == nil {
- u.SourceSlackUpdateAuthenticationMechanismAPIToken = sourceSlackUpdateAuthenticationMechanismAPIToken
- u.Type = SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismAPIToken
+
+ sourceSlackUpdateAPIToken := new(SourceSlackUpdateAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceSlackUpdateAPIToken, "", true, true); err == nil {
+ u.SourceSlackUpdateAPIToken = sourceSlackUpdateAPIToken
+ u.Type = SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAPIToken
return nil
}
- sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth := new(SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth); err == nil {
- u.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth = sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth
- u.Type = SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth
+ signInViaSlackOAuth := new(SignInViaSlackOAuth)
+ if err := utils.UnmarshalJSON(data, &signInViaSlackOAuth, "", true, true); err == nil {
+ u.SignInViaSlackOAuth = signInViaSlackOAuth
+ u.Type = SourceSlackUpdateAuthenticationMechanismTypeSignInViaSlackOAuth
return nil
}
@@ -133,15 +186,15 @@ func (u *SourceSlackUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) er
}
func (u SourceSlackUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) {
- if u.SourceSlackUpdateAuthenticationMechanismAPIToken != nil {
- return json.Marshal(u.SourceSlackUpdateAuthenticationMechanismAPIToken)
+ if u.SignInViaSlackOAuth != nil {
+ return utils.MarshalJSON(u.SignInViaSlackOAuth, "", true)
}
- if u.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth != nil {
- return json.Marshal(u.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth)
+ if u.SourceSlackUpdateAPIToken != nil {
+ return utils.MarshalJSON(u.SourceSlackUpdateAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceSlackUpdate struct {
@@ -150,9 +203,55 @@ type SourceSlackUpdate struct {
// Choose how to authenticate into Slack
Credentials *SourceSlackUpdateAuthenticationMechanism `json:"credentials,omitempty"`
// Whether to join all channels or to sync data only from channels the bot is already in. If false, you'll need to manually add the bot to all the channels from which you'd like to sync messages.
- JoinChannels bool `json:"join_channels"`
+ JoinChannels *bool `default:"true" json:"join_channels"`
// How far into the past to look for messages in threads, default is 0 days
- LookbackWindow int64 `json:"lookback_window"`
+ LookbackWindow *int64 `default:"0" json:"lookback_window"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceSlackUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSlackUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSlackUpdate) GetChannelFilter() []string {
+ if o == nil {
+ return nil
+ }
+ return o.ChannelFilter
+}
+
+func (o *SourceSlackUpdate) GetCredentials() *SourceSlackUpdateAuthenticationMechanism {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSlackUpdate) GetJoinChannels() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.JoinChannels
+}
+
+func (o *SourceSlackUpdate) GetLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindow
+}
+
+func (o *SourceSlackUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesmaily.go b/internal/sdk/pkg/models/shared/sourcesmaily.go
old mode 100755
new mode 100644
index ff5a512af..ed1931d71
--- a/internal/sdk/pkg/models/shared/sourcesmaily.go
+++ b/internal/sdk/pkg/models/shared/sourcesmaily.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSmailySmaily string
+type Smaily string
const (
- SourceSmailySmailySmaily SourceSmailySmaily = "smaily"
+ SmailySmaily Smaily = "smaily"
)
-func (e SourceSmailySmaily) ToPointer() *SourceSmailySmaily {
+func (e Smaily) ToPointer() *Smaily {
return &e
}
-func (e *SourceSmailySmaily) UnmarshalJSON(data []byte) error {
+func (e *Smaily) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "smaily":
- *e = SourceSmailySmaily(v)
+ *e = Smaily(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSmailySmaily: %v", v)
+ return fmt.Errorf("invalid value for Smaily: %v", v)
}
}
@@ -37,6 +38,42 @@ type SourceSmaily struct {
// API Subdomain. See https://smaily.com/help/api/general/create-api-user/
APISubdomain string `json:"api_subdomain"`
// API user username. See https://smaily.com/help/api/general/create-api-user/
- APIUsername string `json:"api_username"`
- SourceType SourceSmailySmaily `json:"sourceType"`
+ APIUsername string `json:"api_username"`
+ sourceType Smaily `const:"smaily" json:"sourceType"`
+}
+
+func (s SourceSmaily) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSmaily) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSmaily) GetAPIPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIPassword
+}
+
+func (o *SourceSmaily) GetAPISubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.APISubdomain
+}
+
+func (o *SourceSmaily) GetAPIUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIUsername
+}
+
+func (o *SourceSmaily) GetSourceType() Smaily {
+ return SmailySmaily
}
diff --git a/internal/sdk/pkg/models/shared/sourcesmailycreaterequest.go b/internal/sdk/pkg/models/shared/sourcesmailycreaterequest.go
old mode 100755
new mode 100644
index 22035852f..918e52f2c
--- a/internal/sdk/pkg/models/shared/sourcesmailycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesmailycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSmailyCreateRequest struct {
Configuration SourceSmaily `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSmailyCreateRequest) GetConfiguration() SourceSmaily {
+ if o == nil {
+ return SourceSmaily{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSmailyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSmailyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSmailyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSmailyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesmailyputrequest.go b/internal/sdk/pkg/models/shared/sourcesmailyputrequest.go
old mode 100755
new mode 100644
index 945e0037f..b1576318e
--- a/internal/sdk/pkg/models/shared/sourcesmailyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesmailyputrequest.go
@@ -7,3 +7,24 @@ type SourceSmailyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSmailyPutRequest) GetConfiguration() SourceSmailyUpdate {
+ if o == nil {
+ return SourceSmailyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSmailyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSmailyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesmailyupdate.go b/internal/sdk/pkg/models/shared/sourcesmailyupdate.go
old mode 100755
new mode 100644
index 65be48891..870707bd9
--- a/internal/sdk/pkg/models/shared/sourcesmailyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesmailyupdate.go
@@ -10,3 +10,24 @@ type SourceSmailyUpdate struct {
// API user username. See https://smaily.com/help/api/general/create-api-user/
APIUsername string `json:"api_username"`
}
+
+func (o *SourceSmailyUpdate) GetAPIPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIPassword
+}
+
+func (o *SourceSmailyUpdate) GetAPISubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.APISubdomain
+}
+
+func (o *SourceSmailyUpdate) GetAPIUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIUsername
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesmartengage.go b/internal/sdk/pkg/models/shared/sourcesmartengage.go
old mode 100755
new mode 100644
index 93e12aab9..85f76ea42
--- a/internal/sdk/pkg/models/shared/sourcesmartengage.go
+++ b/internal/sdk/pkg/models/shared/sourcesmartengage.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSmartengageSmartengage string
+type Smartengage string
const (
- SourceSmartengageSmartengageSmartengage SourceSmartengageSmartengage = "smartengage"
+ SmartengageSmartengage Smartengage = "smartengage"
)
-func (e SourceSmartengageSmartengage) ToPointer() *SourceSmartengageSmartengage {
+func (e Smartengage) ToPointer() *Smartengage {
return &e
}
-func (e *SourceSmartengageSmartengage) UnmarshalJSON(data []byte) error {
+func (e *Smartengage) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "smartengage":
- *e = SourceSmartengageSmartengage(v)
+ *e = Smartengage(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSmartengageSmartengage: %v", v)
+ return fmt.Errorf("invalid value for Smartengage: %v", v)
}
}
type SourceSmartengage struct {
// API Key
- APIKey string `json:"api_key"`
- SourceType SourceSmartengageSmartengage `json:"sourceType"`
+ APIKey string `json:"api_key"`
+ sourceType Smartengage `const:"smartengage" json:"sourceType"`
+}
+
+func (s SourceSmartengage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSmartengage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSmartengage) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceSmartengage) GetSourceType() Smartengage {
+ return SmartengageSmartengage
}
diff --git a/internal/sdk/pkg/models/shared/sourcesmartengagecreaterequest.go b/internal/sdk/pkg/models/shared/sourcesmartengagecreaterequest.go
old mode 100755
new mode 100644
index 2be4072c7..981112f0a
--- a/internal/sdk/pkg/models/shared/sourcesmartengagecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesmartengagecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSmartengageCreateRequest struct {
Configuration SourceSmartengage `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSmartengageCreateRequest) GetConfiguration() SourceSmartengage {
+ if o == nil {
+ return SourceSmartengage{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSmartengageCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSmartengageCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSmartengageCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSmartengageCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesmartengageputrequest.go b/internal/sdk/pkg/models/shared/sourcesmartengageputrequest.go
old mode 100755
new mode 100644
index c98b8167a..8cc023b6a
--- a/internal/sdk/pkg/models/shared/sourcesmartengageputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesmartengageputrequest.go
@@ -7,3 +7,24 @@ type SourceSmartengagePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSmartengagePutRequest) GetConfiguration() SourceSmartengageUpdate {
+ if o == nil {
+ return SourceSmartengageUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSmartengagePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSmartengagePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesmartengageupdate.go b/internal/sdk/pkg/models/shared/sourcesmartengageupdate.go
old mode 100755
new mode 100644
index 38e707809..ea63939c3
--- a/internal/sdk/pkg/models/shared/sourcesmartengageupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesmartengageupdate.go
@@ -6,3 +6,10 @@ type SourceSmartengageUpdate struct {
// API Key
APIKey string `json:"api_key"`
}
+
+func (o *SourceSmartengageUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesmartsheets.go b/internal/sdk/pkg/models/shared/sourcesmartsheets.go
old mode 100755
new mode 100644
index 59cf3943b..6cc63482a
--- a/internal/sdk/pkg/models/shared/sourcesmartsheets.go
+++ b/internal/sdk/pkg/models/shared/sourcesmartsheets.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType string
+type SourceSmartsheetsSchemasAuthType string
const (
- SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthTypeAccessToken SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType = "access_token"
+ SourceSmartsheetsSchemasAuthTypeAccessToken SourceSmartsheetsSchemasAuthType = "access_token"
)
-func (e SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType) ToPointer() *SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType {
+func (e SourceSmartsheetsSchemasAuthType) ToPointer() *SourceSmartsheetsSchemasAuthType {
return &e
}
-func (e *SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSmartsheetsSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType(v)
+ *e = SourceSmartsheetsSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSmartsheetsSchemasAuthType: %v", v)
}
}
-type SourceSmartsheetsAuthorizationMethodAPIAccessToken struct {
+type SourceSmartsheetsAPIAccessToken struct {
// The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.
- AccessToken string `json:"access_token"`
- AuthType *SourceSmartsheetsAuthorizationMethodAPIAccessTokenAuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceSmartsheetsSchemasAuthType `const:"access_token" json:"auth_type,omitempty"`
}
-type SourceSmartsheetsAuthorizationMethodOAuth20AuthType string
+func (s SourceSmartsheetsAPIAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSmartsheetsAPIAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSmartsheetsAPIAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSmartsheetsAPIAccessToken) GetAuthType() *SourceSmartsheetsSchemasAuthType {
+ return SourceSmartsheetsSchemasAuthTypeAccessToken.ToPointer()
+}
+
+type SourceSmartsheetsAuthType string
const (
- SourceSmartsheetsAuthorizationMethodOAuth20AuthTypeOauth20 SourceSmartsheetsAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceSmartsheetsAuthTypeOauth20 SourceSmartsheetsAuthType = "oauth2.0"
)
-func (e SourceSmartsheetsAuthorizationMethodOAuth20AuthType) ToPointer() *SourceSmartsheetsAuthorizationMethodOAuth20AuthType {
+func (e SourceSmartsheetsAuthType) ToPointer() *SourceSmartsheetsAuthType {
return &e
}
-func (e *SourceSmartsheetsAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSmartsheetsAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceSmartsheetsAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceSmartsheetsAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSmartsheetsAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSmartsheetsAuthType: %v", v)
}
}
-type SourceSmartsheetsAuthorizationMethodOAuth20 struct {
+type SourceSmartsheetsOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceSmartsheetsAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceSmartsheetsAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The API ID of the SmartSheets developer application.
ClientID string `json:"client_id"`
// The API Secret the SmartSheets developer application.
@@ -78,56 +100,101 @@ type SourceSmartsheetsAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceSmartsheetsOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSmartsheetsOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSmartsheetsOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSmartsheetsOAuth20) GetAuthType() *SourceSmartsheetsAuthType {
+ return SourceSmartsheetsAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceSmartsheetsOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSmartsheetsOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSmartsheetsOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceSmartsheetsOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceSmartsheetsAuthorizationMethodType string
const (
- SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodOAuth20 SourceSmartsheetsAuthorizationMethodType = "source-smartsheets_Authorization Method_OAuth2.0"
- SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodAPIAccessToken SourceSmartsheetsAuthorizationMethodType = "source-smartsheets_Authorization Method_API Access Token"
+ SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsOAuth20 SourceSmartsheetsAuthorizationMethodType = "source-smartsheets_OAuth2.0"
+ SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAPIAccessToken SourceSmartsheetsAuthorizationMethodType = "source-smartsheets_API Access Token"
)
type SourceSmartsheetsAuthorizationMethod struct {
- SourceSmartsheetsAuthorizationMethodOAuth20 *SourceSmartsheetsAuthorizationMethodOAuth20
- SourceSmartsheetsAuthorizationMethodAPIAccessToken *SourceSmartsheetsAuthorizationMethodAPIAccessToken
+ SourceSmartsheetsOAuth20 *SourceSmartsheetsOAuth20
+ SourceSmartsheetsAPIAccessToken *SourceSmartsheetsAPIAccessToken
Type SourceSmartsheetsAuthorizationMethodType
}
-func CreateSourceSmartsheetsAuthorizationMethodSourceSmartsheetsAuthorizationMethodOAuth20(sourceSmartsheetsAuthorizationMethodOAuth20 SourceSmartsheetsAuthorizationMethodOAuth20) SourceSmartsheetsAuthorizationMethod {
- typ := SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodOAuth20
+func CreateSourceSmartsheetsAuthorizationMethodSourceSmartsheetsOAuth20(sourceSmartsheetsOAuth20 SourceSmartsheetsOAuth20) SourceSmartsheetsAuthorizationMethod {
+ typ := SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsOAuth20
return SourceSmartsheetsAuthorizationMethod{
- SourceSmartsheetsAuthorizationMethodOAuth20: &sourceSmartsheetsAuthorizationMethodOAuth20,
- Type: typ,
+ SourceSmartsheetsOAuth20: &sourceSmartsheetsOAuth20,
+ Type: typ,
}
}
-func CreateSourceSmartsheetsAuthorizationMethodSourceSmartsheetsAuthorizationMethodAPIAccessToken(sourceSmartsheetsAuthorizationMethodAPIAccessToken SourceSmartsheetsAuthorizationMethodAPIAccessToken) SourceSmartsheetsAuthorizationMethod {
- typ := SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodAPIAccessToken
+func CreateSourceSmartsheetsAuthorizationMethodSourceSmartsheetsAPIAccessToken(sourceSmartsheetsAPIAccessToken SourceSmartsheetsAPIAccessToken) SourceSmartsheetsAuthorizationMethod {
+ typ := SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAPIAccessToken
return SourceSmartsheetsAuthorizationMethod{
- SourceSmartsheetsAuthorizationMethodAPIAccessToken: &sourceSmartsheetsAuthorizationMethodAPIAccessToken,
- Type: typ,
+ SourceSmartsheetsAPIAccessToken: &sourceSmartsheetsAPIAccessToken,
+ Type: typ,
}
}
func (u *SourceSmartsheetsAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSmartsheetsAuthorizationMethodAPIAccessToken := new(SourceSmartsheetsAuthorizationMethodAPIAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSmartsheetsAuthorizationMethodAPIAccessToken); err == nil {
- u.SourceSmartsheetsAuthorizationMethodAPIAccessToken = sourceSmartsheetsAuthorizationMethodAPIAccessToken
- u.Type = SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodAPIAccessToken
+
+ sourceSmartsheetsAPIAccessToken := new(SourceSmartsheetsAPIAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceSmartsheetsAPIAccessToken, "", true, true); err == nil {
+ u.SourceSmartsheetsAPIAccessToken = sourceSmartsheetsAPIAccessToken
+ u.Type = SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAPIAccessToken
return nil
}
- sourceSmartsheetsAuthorizationMethodOAuth20 := new(SourceSmartsheetsAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSmartsheetsAuthorizationMethodOAuth20); err == nil {
- u.SourceSmartsheetsAuthorizationMethodOAuth20 = sourceSmartsheetsAuthorizationMethodOAuth20
- u.Type = SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodOAuth20
+ sourceSmartsheetsOAuth20 := new(SourceSmartsheetsOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceSmartsheetsOAuth20, "", true, true); err == nil {
+ u.SourceSmartsheetsOAuth20 = sourceSmartsheetsOAuth20
+ u.Type = SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsOAuth20
return nil
}
@@ -135,15 +202,15 @@ func (u *SourceSmartsheetsAuthorizationMethod) UnmarshalJSON(data []byte) error
}
func (u SourceSmartsheetsAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceSmartsheetsAuthorizationMethodAPIAccessToken != nil {
- return json.Marshal(u.SourceSmartsheetsAuthorizationMethodAPIAccessToken)
+ if u.SourceSmartsheetsOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceSmartsheetsOAuth20, "", true)
}
- if u.SourceSmartsheetsAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceSmartsheetsAuthorizationMethodOAuth20)
+ if u.SourceSmartsheetsAPIAccessToken != nil {
+ return utils.MarshalJSON(u.SourceSmartsheetsAPIAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceSmartsheetsValidenums string
@@ -218,27 +285,27 @@ func (e *SourceSmartsheetsValidenums) UnmarshalJSON(data []byte) error {
}
}
-type SourceSmartsheetsSmartsheets string
+type Smartsheets string
const (
- SourceSmartsheetsSmartsheetsSmartsheets SourceSmartsheetsSmartsheets = "smartsheets"
+ SmartsheetsSmartsheets Smartsheets = "smartsheets"
)
-func (e SourceSmartsheetsSmartsheets) ToPointer() *SourceSmartsheetsSmartsheets {
+func (e Smartsheets) ToPointer() *Smartsheets {
return &e
}
-func (e *SourceSmartsheetsSmartsheets) UnmarshalJSON(data []byte) error {
+func (e *Smartsheets) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "smartsheets":
- *e = SourceSmartsheetsSmartsheets(v)
+ *e = Smartsheets(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSmartsheetsSmartsheets: %v", v)
+ return fmt.Errorf("invalid value for Smartsheets: %v", v)
}
}
@@ -246,9 +313,52 @@ type SourceSmartsheets struct {
Credentials SourceSmartsheetsAuthorizationMethod `json:"credentials"`
// A List of available columns which metadata can be pulled from.
MetadataFields []SourceSmartsheetsValidenums `json:"metadata_fields,omitempty"`
- SourceType SourceSmartsheetsSmartsheets `json:"sourceType"`
+ sourceType Smartsheets `const:"smartsheets" json:"sourceType"`
// The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties
SpreadsheetID string `json:"spreadsheet_id"`
// Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: `2000-01-01T13:00:00`
- StartDatetime *time.Time `json:"start_datetime,omitempty"`
+ StartDatetime *time.Time `default:"2020-01-01T00:00:00+00:00" json:"start_datetime"`
+}
+
+func (s SourceSmartsheets) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSmartsheets) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSmartsheets) GetCredentials() SourceSmartsheetsAuthorizationMethod {
+ if o == nil {
+ return SourceSmartsheetsAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceSmartsheets) GetMetadataFields() []SourceSmartsheetsValidenums {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *SourceSmartsheets) GetSourceType() Smartsheets {
+ return SmartsheetsSmartsheets
+}
+
+func (o *SourceSmartsheets) GetSpreadsheetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SpreadsheetID
+}
+
+func (o *SourceSmartsheets) GetStartDatetime() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDatetime
}
diff --git a/internal/sdk/pkg/models/shared/sourcesmartsheetscreaterequest.go b/internal/sdk/pkg/models/shared/sourcesmartsheetscreaterequest.go
old mode 100755
new mode 100644
index ebb759b8c..266ebf385
--- a/internal/sdk/pkg/models/shared/sourcesmartsheetscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesmartsheetscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSmartsheetsCreateRequest struct {
Configuration SourceSmartsheets `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSmartsheetsCreateRequest) GetConfiguration() SourceSmartsheets {
+ if o == nil {
+ return SourceSmartsheets{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSmartsheetsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSmartsheetsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSmartsheetsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSmartsheetsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesmartsheetsputrequest.go b/internal/sdk/pkg/models/shared/sourcesmartsheetsputrequest.go
old mode 100755
new mode 100644
index 8fa5f23a5..a0d1ef253
--- a/internal/sdk/pkg/models/shared/sourcesmartsheetsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesmartsheetsputrequest.go
@@ -7,3 +7,24 @@ type SourceSmartsheetsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSmartsheetsPutRequest) GetConfiguration() SourceSmartsheetsUpdate {
+ if o == nil {
+ return SourceSmartsheetsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSmartsheetsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSmartsheetsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesmartsheetsupdate.go b/internal/sdk/pkg/models/shared/sourcesmartsheetsupdate.go
old mode 100755
new mode 100644
index bc026d94f..534497d7e
--- a/internal/sdk/pkg/models/shared/sourcesmartsheetsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesmartsheetsupdate.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType string
+type SourceSmartsheetsUpdateSchemasAuthType string
const (
- SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthTypeAccessToken SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType = "access_token"
+ SourceSmartsheetsUpdateSchemasAuthTypeAccessToken SourceSmartsheetsUpdateSchemasAuthType = "access_token"
)
-func (e SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType) ToPointer() *SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType {
+func (e SourceSmartsheetsUpdateSchemasAuthType) ToPointer() *SourceSmartsheetsUpdateSchemasAuthType {
return &e
}
-func (e *SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSmartsheetsUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType(v)
+ *e = SourceSmartsheetsUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSmartsheetsUpdateSchemasAuthType: %v", v)
}
}
-type SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken struct {
+type APIAccessToken struct {
// The access token to use for accessing your data from Smartsheets. This access token must be generated by a user with at least read access to the data you'd like to replicate. Generate an access token in the Smartsheets main menu by clicking Account > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token.
- AccessToken string `json:"access_token"`
- AuthType *SourceSmartsheetsUpdateAuthorizationMethodAPIAccessTokenAuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceSmartsheetsUpdateSchemasAuthType `const:"access_token" json:"auth_type,omitempty"`
}
-type SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType string
+func (a APIAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
+
+func (a *APIAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *APIAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *APIAccessToken) GetAuthType() *SourceSmartsheetsUpdateSchemasAuthType {
+ return SourceSmartsheetsUpdateSchemasAuthTypeAccessToken.ToPointer()
+}
+
+type SourceSmartsheetsUpdateAuthType string
const (
- SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthTypeOauth20 SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceSmartsheetsUpdateAuthTypeOauth20 SourceSmartsheetsUpdateAuthType = "oauth2.0"
)
-func (e SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType) ToPointer() *SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType {
+func (e SourceSmartsheetsUpdateAuthType) ToPointer() *SourceSmartsheetsUpdateAuthType {
return &e
}
-func (e *SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSmartsheetsUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceSmartsheetsUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSmartsheetsUpdateAuthType: %v", v)
}
}
-type SourceSmartsheetsUpdateAuthorizationMethodOAuth20 struct {
+type SourceSmartsheetsUpdateOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceSmartsheetsUpdateAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceSmartsheetsUpdateAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The API ID of the SmartSheets developer application.
ClientID string `json:"client_id"`
// The API Secret the SmartSheets developer application.
@@ -78,56 +100,101 @@ type SourceSmartsheetsUpdateAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceSmartsheetsUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSmartsheetsUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSmartsheetsUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSmartsheetsUpdateOAuth20) GetAuthType() *SourceSmartsheetsUpdateAuthType {
+ return SourceSmartsheetsUpdateAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceSmartsheetsUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSmartsheetsUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSmartsheetsUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceSmartsheetsUpdateOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceSmartsheetsUpdateAuthorizationMethodType string
const (
- SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodOAuth20 SourceSmartsheetsUpdateAuthorizationMethodType = "source-smartsheets-update_Authorization Method_OAuth2.0"
- SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken SourceSmartsheetsUpdateAuthorizationMethodType = "source-smartsheets-update_Authorization Method_API Access Token"
+ SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateOAuth20 SourceSmartsheetsUpdateAuthorizationMethodType = "source-smartsheets-update_OAuth2.0"
+ SourceSmartsheetsUpdateAuthorizationMethodTypeAPIAccessToken SourceSmartsheetsUpdateAuthorizationMethodType = "API Access Token"
)
type SourceSmartsheetsUpdateAuthorizationMethod struct {
- SourceSmartsheetsUpdateAuthorizationMethodOAuth20 *SourceSmartsheetsUpdateAuthorizationMethodOAuth20
- SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken *SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken
+ SourceSmartsheetsUpdateOAuth20 *SourceSmartsheetsUpdateOAuth20
+ APIAccessToken *APIAccessToken
Type SourceSmartsheetsUpdateAuthorizationMethodType
}
-func CreateSourceSmartsheetsUpdateAuthorizationMethodSourceSmartsheetsUpdateAuthorizationMethodOAuth20(sourceSmartsheetsUpdateAuthorizationMethodOAuth20 SourceSmartsheetsUpdateAuthorizationMethodOAuth20) SourceSmartsheetsUpdateAuthorizationMethod {
- typ := SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodOAuth20
+func CreateSourceSmartsheetsUpdateAuthorizationMethodSourceSmartsheetsUpdateOAuth20(sourceSmartsheetsUpdateOAuth20 SourceSmartsheetsUpdateOAuth20) SourceSmartsheetsUpdateAuthorizationMethod {
+ typ := SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateOAuth20
return SourceSmartsheetsUpdateAuthorizationMethod{
- SourceSmartsheetsUpdateAuthorizationMethodOAuth20: &sourceSmartsheetsUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceSmartsheetsUpdateOAuth20: &sourceSmartsheetsUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceSmartsheetsUpdateAuthorizationMethodSourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken(sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken) SourceSmartsheetsUpdateAuthorizationMethod {
- typ := SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken
+func CreateSourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken(apiAccessToken APIAccessToken) SourceSmartsheetsUpdateAuthorizationMethod {
+ typ := SourceSmartsheetsUpdateAuthorizationMethodTypeAPIAccessToken
return SourceSmartsheetsUpdateAuthorizationMethod{
- SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken: &sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken,
- Type: typ,
+ APIAccessToken: &apiAccessToken,
+ Type: typ,
}
}
func (u *SourceSmartsheetsUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken := new(SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken); err == nil {
- u.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken = sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken
- u.Type = SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken
+
+ apiAccessToken := new(APIAccessToken)
+ if err := utils.UnmarshalJSON(data, &apiAccessToken, "", true, true); err == nil {
+ u.APIAccessToken = apiAccessToken
+ u.Type = SourceSmartsheetsUpdateAuthorizationMethodTypeAPIAccessToken
return nil
}
- sourceSmartsheetsUpdateAuthorizationMethodOAuth20 := new(SourceSmartsheetsUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSmartsheetsUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceSmartsheetsUpdateAuthorizationMethodOAuth20 = sourceSmartsheetsUpdateAuthorizationMethodOAuth20
- u.Type = SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodOAuth20
+ sourceSmartsheetsUpdateOAuth20 := new(SourceSmartsheetsUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceSmartsheetsUpdateOAuth20, "", true, true); err == nil {
+ u.SourceSmartsheetsUpdateOAuth20 = sourceSmartsheetsUpdateOAuth20
+ u.Type = SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateOAuth20
return nil
}
@@ -135,44 +202,44 @@ func (u *SourceSmartsheetsUpdateAuthorizationMethod) UnmarshalJSON(data []byte)
}
func (u SourceSmartsheetsUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken != nil {
- return json.Marshal(u.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken)
+ if u.SourceSmartsheetsUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceSmartsheetsUpdateOAuth20, "", true)
}
- if u.SourceSmartsheetsUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceSmartsheetsUpdateAuthorizationMethodOAuth20)
+ if u.APIAccessToken != nil {
+ return utils.MarshalJSON(u.APIAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceSmartsheetsUpdateValidenums string
+type Validenums string
const (
- SourceSmartsheetsUpdateValidenumsSheetcreatedAt SourceSmartsheetsUpdateValidenums = "sheetcreatedAt"
- SourceSmartsheetsUpdateValidenumsSheetid SourceSmartsheetsUpdateValidenums = "sheetid"
- SourceSmartsheetsUpdateValidenumsSheetmodifiedAt SourceSmartsheetsUpdateValidenums = "sheetmodifiedAt"
- SourceSmartsheetsUpdateValidenumsSheetname SourceSmartsheetsUpdateValidenums = "sheetname"
- SourceSmartsheetsUpdateValidenumsSheetpermalink SourceSmartsheetsUpdateValidenums = "sheetpermalink"
- SourceSmartsheetsUpdateValidenumsSheetversion SourceSmartsheetsUpdateValidenums = "sheetversion"
- SourceSmartsheetsUpdateValidenumsSheetaccessLevel SourceSmartsheetsUpdateValidenums = "sheetaccess_level"
- SourceSmartsheetsUpdateValidenumsRowID SourceSmartsheetsUpdateValidenums = "row_id"
- SourceSmartsheetsUpdateValidenumsRowAccessLevel SourceSmartsheetsUpdateValidenums = "row_access_level"
- SourceSmartsheetsUpdateValidenumsRowCreatedAt SourceSmartsheetsUpdateValidenums = "row_created_at"
- SourceSmartsheetsUpdateValidenumsRowCreatedBy SourceSmartsheetsUpdateValidenums = "row_created_by"
- SourceSmartsheetsUpdateValidenumsRowExpanded SourceSmartsheetsUpdateValidenums = "row_expanded"
- SourceSmartsheetsUpdateValidenumsRowModifiedBy SourceSmartsheetsUpdateValidenums = "row_modified_by"
- SourceSmartsheetsUpdateValidenumsRowParentID SourceSmartsheetsUpdateValidenums = "row_parent_id"
- SourceSmartsheetsUpdateValidenumsRowPermalink SourceSmartsheetsUpdateValidenums = "row_permalink"
- SourceSmartsheetsUpdateValidenumsRowNumber SourceSmartsheetsUpdateValidenums = "row_number"
- SourceSmartsheetsUpdateValidenumsRowVersion SourceSmartsheetsUpdateValidenums = "row_version"
+ ValidenumsSheetcreatedAt Validenums = "sheetcreatedAt"
+ ValidenumsSheetid Validenums = "sheetid"
+ ValidenumsSheetmodifiedAt Validenums = "sheetmodifiedAt"
+ ValidenumsSheetname Validenums = "sheetname"
+ ValidenumsSheetpermalink Validenums = "sheetpermalink"
+ ValidenumsSheetversion Validenums = "sheetversion"
+ ValidenumsSheetaccessLevel Validenums = "sheetaccess_level"
+ ValidenumsRowID Validenums = "row_id"
+ ValidenumsRowAccessLevel Validenums = "row_access_level"
+ ValidenumsRowCreatedAt Validenums = "row_created_at"
+ ValidenumsRowCreatedBy Validenums = "row_created_by"
+ ValidenumsRowExpanded Validenums = "row_expanded"
+ ValidenumsRowModifiedBy Validenums = "row_modified_by"
+ ValidenumsRowParentID Validenums = "row_parent_id"
+ ValidenumsRowPermalink Validenums = "row_permalink"
+ ValidenumsRowNumber Validenums = "row_number"
+ ValidenumsRowVersion Validenums = "row_version"
)
-func (e SourceSmartsheetsUpdateValidenums) ToPointer() *SourceSmartsheetsUpdateValidenums {
+func (e Validenums) ToPointer() *Validenums {
return &e
}
-func (e *SourceSmartsheetsUpdateValidenums) UnmarshalJSON(data []byte) error {
+func (e *Validenums) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -211,19 +278,58 @@ func (e *SourceSmartsheetsUpdateValidenums) UnmarshalJSON(data []byte) error {
case "row_number":
fallthrough
case "row_version":
- *e = SourceSmartsheetsUpdateValidenums(v)
+ *e = Validenums(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSmartsheetsUpdateValidenums: %v", v)
+ return fmt.Errorf("invalid value for Validenums: %v", v)
}
}
type SourceSmartsheetsUpdate struct {
Credentials SourceSmartsheetsUpdateAuthorizationMethod `json:"credentials"`
// A List of available columns which metadata can be pulled from.
- MetadataFields []SourceSmartsheetsUpdateValidenums `json:"metadata_fields,omitempty"`
+ MetadataFields []Validenums `json:"metadata_fields,omitempty"`
// The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties
SpreadsheetID string `json:"spreadsheet_id"`
// Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: `2000-01-01T13:00:00`
- StartDatetime *time.Time `json:"start_datetime,omitempty"`
+ StartDatetime *time.Time `default:"2020-01-01T00:00:00+00:00" json:"start_datetime"`
+}
+
+func (s SourceSmartsheetsUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSmartsheetsUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSmartsheetsUpdate) GetCredentials() SourceSmartsheetsUpdateAuthorizationMethod {
+ if o == nil {
+ return SourceSmartsheetsUpdateAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceSmartsheetsUpdate) GetMetadataFields() []Validenums {
+ if o == nil {
+ return nil
+ }
+ return o.MetadataFields
+}
+
+func (o *SourceSmartsheetsUpdate) GetSpreadsheetID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SpreadsheetID
+}
+
+func (o *SourceSmartsheetsUpdate) GetStartDatetime() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDatetime
}
diff --git a/internal/sdk/pkg/models/shared/sourcesnapchatmarketing.go b/internal/sdk/pkg/models/shared/sourcesnapchatmarketing.go
old mode 100755
new mode 100644
index 02941c6b3..a542fb0e5
--- a/internal/sdk/pkg/models/shared/sourcesnapchatmarketing.go
+++ b/internal/sdk/pkg/models/shared/sourcesnapchatmarketing.go
@@ -3,32 +3,33 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSnapchatMarketingSnapchatMarketing string
+type SnapchatMarketing string
const (
- SourceSnapchatMarketingSnapchatMarketingSnapchatMarketing SourceSnapchatMarketingSnapchatMarketing = "snapchat-marketing"
+ SnapchatMarketingSnapchatMarketing SnapchatMarketing = "snapchat-marketing"
)
-func (e SourceSnapchatMarketingSnapchatMarketing) ToPointer() *SourceSnapchatMarketingSnapchatMarketing {
+func (e SnapchatMarketing) ToPointer() *SnapchatMarketing {
return &e
}
-func (e *SourceSnapchatMarketingSnapchatMarketing) UnmarshalJSON(data []byte) error {
+func (e *SnapchatMarketing) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "snapchat-marketing":
- *e = SourceSnapchatMarketingSnapchatMarketing(v)
+ *e = SnapchatMarketing(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSnapchatMarketingSnapchatMarketing: %v", v)
+ return fmt.Errorf("invalid value for SnapchatMarketing: %v", v)
}
}
@@ -40,8 +41,58 @@ type SourceSnapchatMarketing struct {
// Date in the format 2017-01-25. Any data after this date will not be replicated.
EndDate *types.Date `json:"end_date,omitempty"`
// Refresh Token to renew the expired Access Token.
- RefreshToken string `json:"refresh_token"`
- SourceType SourceSnapchatMarketingSnapchatMarketing `json:"sourceType"`
+ RefreshToken string `json:"refresh_token"`
+ sourceType SnapchatMarketing `const:"snapchat-marketing" json:"sourceType"`
// Date in the format 2022-01-01. Any data before this date will not be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2022-01-01" json:"start_date"`
+}
+
+func (s SourceSnapchatMarketing) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSnapchatMarketing) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSnapchatMarketing) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSnapchatMarketing) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSnapchatMarketing) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceSnapchatMarketing) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceSnapchatMarketing) GetSourceType() SnapchatMarketing {
+ return SnapchatMarketingSnapchatMarketing
+}
+
+func (o *SourceSnapchatMarketing) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcesnapchatmarketingcreaterequest.go b/internal/sdk/pkg/models/shared/sourcesnapchatmarketingcreaterequest.go
old mode 100755
new mode 100644
index 8d2e3f544..f3a115aef
--- a/internal/sdk/pkg/models/shared/sourcesnapchatmarketingcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesnapchatmarketingcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSnapchatMarketingCreateRequest struct {
Configuration SourceSnapchatMarketing `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSnapchatMarketingCreateRequest) GetConfiguration() SourceSnapchatMarketing {
+ if o == nil {
+ return SourceSnapchatMarketing{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSnapchatMarketingCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSnapchatMarketingCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSnapchatMarketingCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSnapchatMarketingCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesnapchatmarketingputrequest.go b/internal/sdk/pkg/models/shared/sourcesnapchatmarketingputrequest.go
old mode 100755
new mode 100644
index c12f881a6..96b869099
--- a/internal/sdk/pkg/models/shared/sourcesnapchatmarketingputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesnapchatmarketingputrequest.go
@@ -7,3 +7,24 @@ type SourceSnapchatMarketingPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSnapchatMarketingPutRequest) GetConfiguration() SourceSnapchatMarketingUpdate {
+ if o == nil {
+ return SourceSnapchatMarketingUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSnapchatMarketingPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSnapchatMarketingPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesnapchatmarketingupdate.go b/internal/sdk/pkg/models/shared/sourcesnapchatmarketingupdate.go
old mode 100755
new mode 100644
index 319436d5b..a11f9d1b8
--- a/internal/sdk/pkg/models/shared/sourcesnapchatmarketingupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesnapchatmarketingupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceSnapchatMarketingUpdate struct {
@@ -16,5 +17,51 @@ type SourceSnapchatMarketingUpdate struct {
// Refresh Token to renew the expired Access Token.
RefreshToken string `json:"refresh_token"`
// Date in the format 2022-01-01. Any data before this date will not be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2022-01-01" json:"start_date"`
+}
+
+func (s SourceSnapchatMarketingUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSnapchatMarketingUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSnapchatMarketingUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSnapchatMarketingUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSnapchatMarketingUpdate) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceSnapchatMarketingUpdate) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceSnapchatMarketingUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcesnowflake.go b/internal/sdk/pkg/models/shared/sourcesnowflake.go
old mode 100755
new mode 100644
index aa6aac316..7501f6922
--- a/internal/sdk/pkg/models/shared/sourcesnowflake.go
+++ b/internal/sdk/pkg/models/shared/sourcesnowflake.go
@@ -3,72 +3,101 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthType string
+type SourceSnowflakeSchemasAuthType string
const (
- SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthTypeUsernamePassword SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthType = "username/password"
+ SourceSnowflakeSchemasAuthTypeUsernamePassword SourceSnowflakeSchemasAuthType = "username/password"
)
-func (e SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthType) ToPointer() *SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthType {
+func (e SourceSnowflakeSchemasAuthType) ToPointer() *SourceSnowflakeSchemasAuthType {
return &e
}
-func (e *SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSnowflakeSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "username/password":
- *e = SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthType(v)
+ *e = SourceSnowflakeSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSnowflakeSchemasAuthType: %v", v)
}
}
-type SourceSnowflakeAuthorizationMethodUsernameAndPassword struct {
- AuthType SourceSnowflakeAuthorizationMethodUsernameAndPasswordAuthType `json:"auth_type"`
+type SourceSnowflakeUsernameAndPassword struct {
+ authType SourceSnowflakeSchemasAuthType `const:"username/password" json:"auth_type"`
// The password associated with the username.
Password string `json:"password"`
// The username you created to allow Airbyte to access the database.
Username string `json:"username"`
}
-type SourceSnowflakeAuthorizationMethodOAuth20AuthType string
+func (s SourceSnowflakeUsernameAndPassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSnowflakeUsernameAndPassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSnowflakeUsernameAndPassword) GetAuthType() SourceSnowflakeSchemasAuthType {
+ return SourceSnowflakeSchemasAuthTypeUsernamePassword
+}
+
+func (o *SourceSnowflakeUsernameAndPassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceSnowflakeUsernameAndPassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type SourceSnowflakeAuthType string
const (
- SourceSnowflakeAuthorizationMethodOAuth20AuthTypeOAuth SourceSnowflakeAuthorizationMethodOAuth20AuthType = "OAuth"
+ SourceSnowflakeAuthTypeOAuth SourceSnowflakeAuthType = "OAuth"
)
-func (e SourceSnowflakeAuthorizationMethodOAuth20AuthType) ToPointer() *SourceSnowflakeAuthorizationMethodOAuth20AuthType {
+func (e SourceSnowflakeAuthType) ToPointer() *SourceSnowflakeAuthType {
return &e
}
-func (e *SourceSnowflakeAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSnowflakeAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth":
- *e = SourceSnowflakeAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceSnowflakeAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSnowflakeAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSnowflakeAuthType: %v", v)
}
}
-type SourceSnowflakeAuthorizationMethodOAuth20 struct {
+type SourceSnowflakeOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthType SourceSnowflakeAuthorizationMethodOAuth20AuthType `json:"auth_type"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authType SourceSnowflakeAuthType `const:"OAuth" json:"auth_type"`
// The Client ID of your Snowflake developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Snowflake developer application.
@@ -77,56 +106,94 @@ type SourceSnowflakeAuthorizationMethodOAuth20 struct {
RefreshToken *string `json:"refresh_token,omitempty"`
}
+func (s SourceSnowflakeOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSnowflakeOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSnowflakeOAuth20) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSnowflakeOAuth20) GetAuthType() SourceSnowflakeAuthType {
+ return SourceSnowflakeAuthTypeOAuth
+}
+
+func (o *SourceSnowflakeOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSnowflakeOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSnowflakeOAuth20) GetRefreshToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RefreshToken
+}
+
type SourceSnowflakeAuthorizationMethodType string
const (
- SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodOAuth20 SourceSnowflakeAuthorizationMethodType = "source-snowflake_Authorization Method_OAuth2.0"
- SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodUsernameAndPassword SourceSnowflakeAuthorizationMethodType = "source-snowflake_Authorization Method_Username and Password"
+ SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeOAuth20 SourceSnowflakeAuthorizationMethodType = "source-snowflake_OAuth2.0"
+ SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeUsernameAndPassword SourceSnowflakeAuthorizationMethodType = "source-snowflake_Username and Password"
)
type SourceSnowflakeAuthorizationMethod struct {
- SourceSnowflakeAuthorizationMethodOAuth20 *SourceSnowflakeAuthorizationMethodOAuth20
- SourceSnowflakeAuthorizationMethodUsernameAndPassword *SourceSnowflakeAuthorizationMethodUsernameAndPassword
+ SourceSnowflakeOAuth20 *SourceSnowflakeOAuth20
+ SourceSnowflakeUsernameAndPassword *SourceSnowflakeUsernameAndPassword
Type SourceSnowflakeAuthorizationMethodType
}
-func CreateSourceSnowflakeAuthorizationMethodSourceSnowflakeAuthorizationMethodOAuth20(sourceSnowflakeAuthorizationMethodOAuth20 SourceSnowflakeAuthorizationMethodOAuth20) SourceSnowflakeAuthorizationMethod {
- typ := SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodOAuth20
+func CreateSourceSnowflakeAuthorizationMethodSourceSnowflakeOAuth20(sourceSnowflakeOAuth20 SourceSnowflakeOAuth20) SourceSnowflakeAuthorizationMethod {
+ typ := SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeOAuth20
return SourceSnowflakeAuthorizationMethod{
- SourceSnowflakeAuthorizationMethodOAuth20: &sourceSnowflakeAuthorizationMethodOAuth20,
- Type: typ,
+ SourceSnowflakeOAuth20: &sourceSnowflakeOAuth20,
+ Type: typ,
}
}
-func CreateSourceSnowflakeAuthorizationMethodSourceSnowflakeAuthorizationMethodUsernameAndPassword(sourceSnowflakeAuthorizationMethodUsernameAndPassword SourceSnowflakeAuthorizationMethodUsernameAndPassword) SourceSnowflakeAuthorizationMethod {
- typ := SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodUsernameAndPassword
+func CreateSourceSnowflakeAuthorizationMethodSourceSnowflakeUsernameAndPassword(sourceSnowflakeUsernameAndPassword SourceSnowflakeUsernameAndPassword) SourceSnowflakeAuthorizationMethod {
+ typ := SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeUsernameAndPassword
return SourceSnowflakeAuthorizationMethod{
- SourceSnowflakeAuthorizationMethodUsernameAndPassword: &sourceSnowflakeAuthorizationMethodUsernameAndPassword,
- Type: typ,
+ SourceSnowflakeUsernameAndPassword: &sourceSnowflakeUsernameAndPassword,
+ Type: typ,
}
}
func (u *SourceSnowflakeAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSnowflakeAuthorizationMethodUsernameAndPassword := new(SourceSnowflakeAuthorizationMethodUsernameAndPassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSnowflakeAuthorizationMethodUsernameAndPassword); err == nil {
- u.SourceSnowflakeAuthorizationMethodUsernameAndPassword = sourceSnowflakeAuthorizationMethodUsernameAndPassword
- u.Type = SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodUsernameAndPassword
+
+ sourceSnowflakeUsernameAndPassword := new(SourceSnowflakeUsernameAndPassword)
+ if err := utils.UnmarshalJSON(data, &sourceSnowflakeUsernameAndPassword, "", true, true); err == nil {
+ u.SourceSnowflakeUsernameAndPassword = sourceSnowflakeUsernameAndPassword
+ u.Type = SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeUsernameAndPassword
return nil
}
- sourceSnowflakeAuthorizationMethodOAuth20 := new(SourceSnowflakeAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSnowflakeAuthorizationMethodOAuth20); err == nil {
- u.SourceSnowflakeAuthorizationMethodOAuth20 = sourceSnowflakeAuthorizationMethodOAuth20
- u.Type = SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodOAuth20
+ sourceSnowflakeOAuth20 := new(SourceSnowflakeOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceSnowflakeOAuth20, "", true, true); err == nil {
+ u.SourceSnowflakeOAuth20 = sourceSnowflakeOAuth20
+ u.Type = SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeOAuth20
return nil
}
@@ -134,15 +201,15 @@ func (u *SourceSnowflakeAuthorizationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceSnowflakeAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceSnowflakeAuthorizationMethodUsernameAndPassword != nil {
- return json.Marshal(u.SourceSnowflakeAuthorizationMethodUsernameAndPassword)
+ if u.SourceSnowflakeOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceSnowflakeOAuth20, "", true)
}
- if u.SourceSnowflakeAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceSnowflakeAuthorizationMethodOAuth20)
+ if u.SourceSnowflakeUsernameAndPassword != nil {
+ return utils.MarshalJSON(u.SourceSnowflakeUsernameAndPassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceSnowflakeSnowflake string
@@ -181,7 +248,71 @@ type SourceSnowflake struct {
Role string `json:"role"`
// The source Snowflake schema tables. Leave empty to access tables from multiple schemas.
Schema *string `json:"schema,omitempty"`
- SourceType SourceSnowflakeSnowflake `json:"sourceType"`
+ sourceType SourceSnowflakeSnowflake `const:"snowflake" json:"sourceType"`
// The warehouse you created for Airbyte to access data.
Warehouse string `json:"warehouse"`
}
+
+func (s SourceSnowflake) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSnowflake) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSnowflake) GetCredentials() *SourceSnowflakeAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSnowflake) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceSnowflake) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceSnowflake) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceSnowflake) GetRole() string {
+ if o == nil {
+ return ""
+ }
+ return o.Role
+}
+
+func (o *SourceSnowflake) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *SourceSnowflake) GetSourceType() SourceSnowflakeSnowflake {
+ return SourceSnowflakeSnowflakeSnowflake
+}
+
+func (o *SourceSnowflake) GetWarehouse() string {
+ if o == nil {
+ return ""
+ }
+ return o.Warehouse
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesnowflakecreaterequest.go b/internal/sdk/pkg/models/shared/sourcesnowflakecreaterequest.go
old mode 100755
new mode 100644
index ccb09dbf4..cbc40f1d6
--- a/internal/sdk/pkg/models/shared/sourcesnowflakecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesnowflakecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSnowflakeCreateRequest struct {
Configuration SourceSnowflake `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSnowflakeCreateRequest) GetConfiguration() SourceSnowflake {
+ if o == nil {
+ return SourceSnowflake{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSnowflakeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSnowflakeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSnowflakeCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSnowflakeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesnowflakeputrequest.go b/internal/sdk/pkg/models/shared/sourcesnowflakeputrequest.go
old mode 100755
new mode 100644
index 6a6631f62..d7fd86a21
--- a/internal/sdk/pkg/models/shared/sourcesnowflakeputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesnowflakeputrequest.go
@@ -7,3 +7,24 @@ type SourceSnowflakePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSnowflakePutRequest) GetConfiguration() SourceSnowflakeUpdate {
+ if o == nil {
+ return SourceSnowflakeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSnowflakePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSnowflakePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesnowflakeupdate.go b/internal/sdk/pkg/models/shared/sourcesnowflakeupdate.go
old mode 100755
new mode 100644
index 41673aec4..064b3f1f6
--- a/internal/sdk/pkg/models/shared/sourcesnowflakeupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesnowflakeupdate.go
@@ -3,72 +3,101 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType string
+type SourceSnowflakeUpdateSchemasAuthType string
const (
- SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthTypeUsernamePassword SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType = "username/password"
+ SourceSnowflakeUpdateSchemasAuthTypeUsernamePassword SourceSnowflakeUpdateSchemasAuthType = "username/password"
)
-func (e SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType) ToPointer() *SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType {
+func (e SourceSnowflakeUpdateSchemasAuthType) ToPointer() *SourceSnowflakeUpdateSchemasAuthType {
return &e
}
-func (e *SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSnowflakeUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "username/password":
- *e = SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType(v)
+ *e = SourceSnowflakeUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSnowflakeUpdateSchemasAuthType: %v", v)
}
}
-type SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword struct {
- AuthType SourceSnowflakeUpdateAuthorizationMethodUsernameAndPasswordAuthType `json:"auth_type"`
+type SourceSnowflakeUpdateUsernameAndPassword struct {
+ authType SourceSnowflakeUpdateSchemasAuthType `const:"username/password" json:"auth_type"`
// The password associated with the username.
Password string `json:"password"`
// The username you created to allow Airbyte to access the database.
Username string `json:"username"`
}
-type SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthType string
+func (s SourceSnowflakeUpdateUsernameAndPassword) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSnowflakeUpdateUsernameAndPassword) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSnowflakeUpdateUsernameAndPassword) GetAuthType() SourceSnowflakeUpdateSchemasAuthType {
+ return SourceSnowflakeUpdateSchemasAuthTypeUsernamePassword
+}
+
+func (o *SourceSnowflakeUpdateUsernameAndPassword) GetPassword() string {
+ if o == nil {
+ return ""
+ }
+ return o.Password
+}
+
+func (o *SourceSnowflakeUpdateUsernameAndPassword) GetUsername() string {
+ if o == nil {
+ return ""
+ }
+ return o.Username
+}
+
+type SourceSnowflakeUpdateAuthType string
const (
- SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthTypeOAuth SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthType = "OAuth"
+ SourceSnowflakeUpdateAuthTypeOAuth SourceSnowflakeUpdateAuthType = "OAuth"
)
-func (e SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthType) ToPointer() *SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthType {
+func (e SourceSnowflakeUpdateAuthType) ToPointer() *SourceSnowflakeUpdateAuthType {
return &e
}
-func (e *SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSnowflakeUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth":
- *e = SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceSnowflakeUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSnowflakeUpdateAuthType: %v", v)
}
}
-type SourceSnowflakeUpdateAuthorizationMethodOAuth20 struct {
+type SourceSnowflakeUpdateOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken *string `json:"access_token,omitempty"`
- AuthType SourceSnowflakeUpdateAuthorizationMethodOAuth20AuthType `json:"auth_type"`
+ AccessToken *string `json:"access_token,omitempty"`
+ authType SourceSnowflakeUpdateAuthType `const:"OAuth" json:"auth_type"`
// The Client ID of your Snowflake developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Snowflake developer application.
@@ -77,56 +106,94 @@ type SourceSnowflakeUpdateAuthorizationMethodOAuth20 struct {
RefreshToken *string `json:"refresh_token,omitempty"`
}
+func (s SourceSnowflakeUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSnowflakeUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSnowflakeUpdateOAuth20) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSnowflakeUpdateOAuth20) GetAuthType() SourceSnowflakeUpdateAuthType {
+ return SourceSnowflakeUpdateAuthTypeOAuth
+}
+
+func (o *SourceSnowflakeUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSnowflakeUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSnowflakeUpdateOAuth20) GetRefreshToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RefreshToken
+}
+
type SourceSnowflakeUpdateAuthorizationMethodType string
const (
- SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodOAuth20 SourceSnowflakeUpdateAuthorizationMethodType = "source-snowflake-update_Authorization Method_OAuth2.0"
- SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword SourceSnowflakeUpdateAuthorizationMethodType = "source-snowflake-update_Authorization Method_Username and Password"
+ SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateOAuth20 SourceSnowflakeUpdateAuthorizationMethodType = "source-snowflake-update_OAuth2.0"
+ SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateUsernameAndPassword SourceSnowflakeUpdateAuthorizationMethodType = "source-snowflake-update_Username and Password"
)
type SourceSnowflakeUpdateAuthorizationMethod struct {
- SourceSnowflakeUpdateAuthorizationMethodOAuth20 *SourceSnowflakeUpdateAuthorizationMethodOAuth20
- SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword *SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword
+ SourceSnowflakeUpdateOAuth20 *SourceSnowflakeUpdateOAuth20
+ SourceSnowflakeUpdateUsernameAndPassword *SourceSnowflakeUpdateUsernameAndPassword
Type SourceSnowflakeUpdateAuthorizationMethodType
}
-func CreateSourceSnowflakeUpdateAuthorizationMethodSourceSnowflakeUpdateAuthorizationMethodOAuth20(sourceSnowflakeUpdateAuthorizationMethodOAuth20 SourceSnowflakeUpdateAuthorizationMethodOAuth20) SourceSnowflakeUpdateAuthorizationMethod {
- typ := SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodOAuth20
+func CreateSourceSnowflakeUpdateAuthorizationMethodSourceSnowflakeUpdateOAuth20(sourceSnowflakeUpdateOAuth20 SourceSnowflakeUpdateOAuth20) SourceSnowflakeUpdateAuthorizationMethod {
+ typ := SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateOAuth20
return SourceSnowflakeUpdateAuthorizationMethod{
- SourceSnowflakeUpdateAuthorizationMethodOAuth20: &sourceSnowflakeUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceSnowflakeUpdateOAuth20: &sourceSnowflakeUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceSnowflakeUpdateAuthorizationMethodSourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword(sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword) SourceSnowflakeUpdateAuthorizationMethod {
- typ := SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword
+func CreateSourceSnowflakeUpdateAuthorizationMethodSourceSnowflakeUpdateUsernameAndPassword(sourceSnowflakeUpdateUsernameAndPassword SourceSnowflakeUpdateUsernameAndPassword) SourceSnowflakeUpdateAuthorizationMethod {
+ typ := SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateUsernameAndPassword
return SourceSnowflakeUpdateAuthorizationMethod{
- SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword: &sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword,
- Type: typ,
+ SourceSnowflakeUpdateUsernameAndPassword: &sourceSnowflakeUpdateUsernameAndPassword,
+ Type: typ,
}
}
func (u *SourceSnowflakeUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword := new(SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword); err == nil {
- u.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword = sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword
- u.Type = SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword
+
+ sourceSnowflakeUpdateUsernameAndPassword := new(SourceSnowflakeUpdateUsernameAndPassword)
+ if err := utils.UnmarshalJSON(data, &sourceSnowflakeUpdateUsernameAndPassword, "", true, true); err == nil {
+ u.SourceSnowflakeUpdateUsernameAndPassword = sourceSnowflakeUpdateUsernameAndPassword
+ u.Type = SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateUsernameAndPassword
return nil
}
- sourceSnowflakeUpdateAuthorizationMethodOAuth20 := new(SourceSnowflakeUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSnowflakeUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceSnowflakeUpdateAuthorizationMethodOAuth20 = sourceSnowflakeUpdateAuthorizationMethodOAuth20
- u.Type = SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodOAuth20
+ sourceSnowflakeUpdateOAuth20 := new(SourceSnowflakeUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceSnowflakeUpdateOAuth20, "", true, true); err == nil {
+ u.SourceSnowflakeUpdateOAuth20 = sourceSnowflakeUpdateOAuth20
+ u.Type = SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateOAuth20
return nil
}
@@ -134,15 +201,15 @@ func (u *SourceSnowflakeUpdateAuthorizationMethod) UnmarshalJSON(data []byte) er
}
func (u SourceSnowflakeUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword != nil {
- return json.Marshal(u.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword)
+ if u.SourceSnowflakeUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceSnowflakeUpdateOAuth20, "", true)
}
- if u.SourceSnowflakeUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceSnowflakeUpdateAuthorizationMethodOAuth20)
+ if u.SourceSnowflakeUpdateUsernameAndPassword != nil {
+ return utils.MarshalJSON(u.SourceSnowflakeUpdateUsernameAndPassword, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceSnowflakeUpdate struct {
@@ -160,3 +227,52 @@ type SourceSnowflakeUpdate struct {
// The warehouse you created for Airbyte to access data.
Warehouse string `json:"warehouse"`
}
+
+func (o *SourceSnowflakeUpdate) GetCredentials() *SourceSnowflakeUpdateAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSnowflakeUpdate) GetDatabase() string {
+ if o == nil {
+ return ""
+ }
+ return o.Database
+}
+
+func (o *SourceSnowflakeUpdate) GetHost() string {
+ if o == nil {
+ return ""
+ }
+ return o.Host
+}
+
+func (o *SourceSnowflakeUpdate) GetJdbcURLParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.JdbcURLParams
+}
+
+func (o *SourceSnowflakeUpdate) GetRole() string {
+ if o == nil {
+ return ""
+ }
+ return o.Role
+}
+
+func (o *SourceSnowflakeUpdate) GetSchema() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Schema
+}
+
+func (o *SourceSnowflakeUpdate) GetWarehouse() string {
+ if o == nil {
+ return ""
+ }
+ return o.Warehouse
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesonarcloud.go b/internal/sdk/pkg/models/shared/sourcesonarcloud.go
old mode 100755
new mode 100644
index 8482f3150..e36cd5ca7
--- a/internal/sdk/pkg/models/shared/sourcesonarcloud.go
+++ b/internal/sdk/pkg/models/shared/sourcesonarcloud.go
@@ -3,32 +3,33 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSonarCloudSonarCloud string
+type SonarCloud string
const (
- SourceSonarCloudSonarCloudSonarCloud SourceSonarCloudSonarCloud = "sonar-cloud"
+ SonarCloudSonarCloud SonarCloud = "sonar-cloud"
)
-func (e SourceSonarCloudSonarCloud) ToPointer() *SourceSonarCloudSonarCloud {
+func (e SonarCloud) ToPointer() *SonarCloud {
return &e
}
-func (e *SourceSonarCloudSonarCloud) UnmarshalJSON(data []byte) error {
+func (e *SonarCloud) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sonar-cloud":
- *e = SourceSonarCloudSonarCloud(v)
+ *e = SonarCloud(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSonarCloudSonarCloud: %v", v)
+ return fmt.Errorf("invalid value for SonarCloud: %v", v)
}
}
@@ -38,10 +39,60 @@ type SourceSonarCloud struct {
// To retrieve issues created before the given date (inclusive).
EndDate *types.Date `json:"end_date,omitempty"`
// Organization key. See here.
- Organization string `json:"organization"`
- SourceType SourceSonarCloudSonarCloud `json:"sourceType"`
+ Organization string `json:"organization"`
+ sourceType SonarCloud `const:"sonar-cloud" json:"sourceType"`
// To retrieve issues created after the given date (inclusive).
StartDate *types.Date `json:"start_date,omitempty"`
// Your User Token. See here. The token is case sensitive.
UserToken string `json:"user_token"`
}
+
+func (s SourceSonarCloud) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSonarCloud) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSonarCloud) GetComponentKeys() []interface{} {
+ if o == nil {
+ return []interface{}{}
+ }
+ return o.ComponentKeys
+}
+
+func (o *SourceSonarCloud) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceSonarCloud) GetOrganization() string {
+ if o == nil {
+ return ""
+ }
+ return o.Organization
+}
+
+func (o *SourceSonarCloud) GetSourceType() SonarCloud {
+ return SonarCloudSonarCloud
+}
+
+func (o *SourceSonarCloud) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceSonarCloud) GetUserToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.UserToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesonarcloudcreaterequest.go b/internal/sdk/pkg/models/shared/sourcesonarcloudcreaterequest.go
old mode 100755
new mode 100644
index ac254aa48..1ff33cee2
--- a/internal/sdk/pkg/models/shared/sourcesonarcloudcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesonarcloudcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSonarCloudCreateRequest struct {
Configuration SourceSonarCloud `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSonarCloudCreateRequest) GetConfiguration() SourceSonarCloud {
+ if o == nil {
+ return SourceSonarCloud{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSonarCloudCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSonarCloudCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSonarCloudCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSonarCloudCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesonarcloudputrequest.go b/internal/sdk/pkg/models/shared/sourcesonarcloudputrequest.go
old mode 100755
new mode 100644
index 547b95d6a..f9a77477f
--- a/internal/sdk/pkg/models/shared/sourcesonarcloudputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesonarcloudputrequest.go
@@ -7,3 +7,24 @@ type SourceSonarCloudPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSonarCloudPutRequest) GetConfiguration() SourceSonarCloudUpdate {
+ if o == nil {
+ return SourceSonarCloudUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSonarCloudPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSonarCloudPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesonarcloudupdate.go b/internal/sdk/pkg/models/shared/sourcesonarcloudupdate.go
old mode 100755
new mode 100644
index 0d2266086..043fcbb1f
--- a/internal/sdk/pkg/models/shared/sourcesonarcloudupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesonarcloudupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceSonarCloudUpdate struct {
@@ -18,3 +19,49 @@ type SourceSonarCloudUpdate struct {
// Your User Token. See here. The token is case sensitive.
UserToken string `json:"user_token"`
}
+
+func (s SourceSonarCloudUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSonarCloudUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSonarCloudUpdate) GetComponentKeys() []interface{} {
+ if o == nil {
+ return []interface{}{}
+ }
+ return o.ComponentKeys
+}
+
+func (o *SourceSonarCloudUpdate) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceSonarCloudUpdate) GetOrganization() string {
+ if o == nil {
+ return ""
+ }
+ return o.Organization
+}
+
+func (o *SourceSonarCloudUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceSonarCloudUpdate) GetUserToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.UserToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourcespacexapi.go b/internal/sdk/pkg/models/shared/sourcespacexapi.go
old mode 100755
new mode 100644
index 06b864407..0c2dfdea9
--- a/internal/sdk/pkg/models/shared/sourcespacexapi.go
+++ b/internal/sdk/pkg/models/shared/sourcespacexapi.go
@@ -5,34 +5,64 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSpacexAPISpacexAPI string
+type SpacexAPI string
const (
- SourceSpacexAPISpacexAPISpacexAPI SourceSpacexAPISpacexAPI = "spacex-api"
+ SpacexAPISpacexAPI SpacexAPI = "spacex-api"
)
-func (e SourceSpacexAPISpacexAPI) ToPointer() *SourceSpacexAPISpacexAPI {
+func (e SpacexAPI) ToPointer() *SpacexAPI {
return &e
}
-func (e *SourceSpacexAPISpacexAPI) UnmarshalJSON(data []byte) error {
+func (e *SpacexAPI) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "spacex-api":
- *e = SourceSpacexAPISpacexAPI(v)
+ *e = SpacexAPI(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSpacexAPISpacexAPI: %v", v)
+ return fmt.Errorf("invalid value for SpacexAPI: %v", v)
}
}
type SourceSpacexAPI struct {
- ID *string `json:"id,omitempty"`
- Options *string `json:"options,omitempty"`
- SourceType *SourceSpacexAPISpacexAPI `json:"sourceType,omitempty"`
+ ID *string `json:"id,omitempty"`
+ Options *string `json:"options,omitempty"`
+ sourceType SpacexAPI `const:"spacex-api" json:"sourceType"`
+}
+
+func (s SourceSpacexAPI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSpacexAPI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSpacexAPI) GetID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ID
+}
+
+func (o *SourceSpacexAPI) GetOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Options
+}
+
+func (o *SourceSpacexAPI) GetSourceType() SpacexAPI {
+ return SpacexAPISpacexAPI
}
diff --git a/internal/sdk/pkg/models/shared/sourcespacexapicreaterequest.go b/internal/sdk/pkg/models/shared/sourcespacexapicreaterequest.go
old mode 100755
new mode 100644
index 43c970bde..93f5af89d
--- a/internal/sdk/pkg/models/shared/sourcespacexapicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcespacexapicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSpacexAPICreateRequest struct {
Configuration SourceSpacexAPI `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSpacexAPICreateRequest) GetConfiguration() SourceSpacexAPI {
+ if o == nil {
+ return SourceSpacexAPI{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSpacexAPICreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSpacexAPICreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSpacexAPICreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSpacexAPICreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcespacexapiputrequest.go b/internal/sdk/pkg/models/shared/sourcespacexapiputrequest.go
old mode 100755
new mode 100644
index d561743f3..cbe4f1f30
--- a/internal/sdk/pkg/models/shared/sourcespacexapiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcespacexapiputrequest.go
@@ -7,3 +7,24 @@ type SourceSpacexAPIPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSpacexAPIPutRequest) GetConfiguration() SourceSpacexAPIUpdate {
+ if o == nil {
+ return SourceSpacexAPIUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSpacexAPIPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSpacexAPIPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcespacexapiupdate.go b/internal/sdk/pkg/models/shared/sourcespacexapiupdate.go
old mode 100755
new mode 100644
index 7806c9003..62d2057ba
--- a/internal/sdk/pkg/models/shared/sourcespacexapiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcespacexapiupdate.go
@@ -6,3 +6,17 @@ type SourceSpacexAPIUpdate struct {
ID *string `json:"id,omitempty"`
Options *string `json:"options,omitempty"`
}
+
+func (o *SourceSpacexAPIUpdate) GetID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ID
+}
+
+func (o *SourceSpacexAPIUpdate) GetOptions() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Options
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesquare.go b/internal/sdk/pkg/models/shared/sourcesquare.go
old mode 100755
new mode 100644
index 8cd8d96ad..b988b6cd7
--- a/internal/sdk/pkg/models/shared/sourcesquare.go
+++ b/internal/sdk/pkg/models/shared/sourcesquare.go
@@ -3,71 +3,93 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSquareAuthenticationAPIKeyAuthType string
+type SourceSquareSchemasAuthType string
const (
- SourceSquareAuthenticationAPIKeyAuthTypeAPIKey SourceSquareAuthenticationAPIKeyAuthType = "API Key"
+ SourceSquareSchemasAuthTypeAPIKey SourceSquareSchemasAuthType = "API Key"
)
-func (e SourceSquareAuthenticationAPIKeyAuthType) ToPointer() *SourceSquareAuthenticationAPIKeyAuthType {
+func (e SourceSquareSchemasAuthType) ToPointer() *SourceSquareSchemasAuthType {
return &e
}
-func (e *SourceSquareAuthenticationAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSquareSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "API Key":
- *e = SourceSquareAuthenticationAPIKeyAuthType(v)
+ *e = SourceSquareSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSquareAuthenticationAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSquareSchemasAuthType: %v", v)
}
}
-// SourceSquareAuthenticationAPIKey - Choose how to authenticate to Square.
-type SourceSquareAuthenticationAPIKey struct {
+// SourceSquareAPIKey - Choose how to authenticate to Square.
+type SourceSquareAPIKey struct {
// The API key for a Square application
- APIKey string `json:"api_key"`
- AuthType SourceSquareAuthenticationAPIKeyAuthType `json:"auth_type"`
+ APIKey string `json:"api_key"`
+ authType SourceSquareSchemasAuthType `const:"API Key" json:"auth_type"`
}
-type SourceSquareAuthenticationOauthAuthenticationAuthType string
+func (s SourceSquareAPIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSquareAPIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSquareAPIKey) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceSquareAPIKey) GetAuthType() SourceSquareSchemasAuthType {
+ return SourceSquareSchemasAuthTypeAPIKey
+}
+
+type SourceSquareAuthType string
const (
- SourceSquareAuthenticationOauthAuthenticationAuthTypeOAuth SourceSquareAuthenticationOauthAuthenticationAuthType = "OAuth"
+ SourceSquareAuthTypeOAuth SourceSquareAuthType = "OAuth"
)
-func (e SourceSquareAuthenticationOauthAuthenticationAuthType) ToPointer() *SourceSquareAuthenticationOauthAuthenticationAuthType {
+func (e SourceSquareAuthType) ToPointer() *SourceSquareAuthType {
return &e
}
-func (e *SourceSquareAuthenticationOauthAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSquareAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth":
- *e = SourceSquareAuthenticationOauthAuthenticationAuthType(v)
+ *e = SourceSquareAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSquareAuthenticationOauthAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSquareAuthType: %v", v)
}
}
-// SourceSquareAuthenticationOauthAuthentication - Choose how to authenticate to Square.
-type SourceSquareAuthenticationOauthAuthentication struct {
- AuthType SourceSquareAuthenticationOauthAuthenticationAuthType `json:"auth_type"`
+// SourceSquareOauthAuthentication - Choose how to authenticate to Square.
+type SourceSquareOauthAuthentication struct {
+ authType SourceSquareAuthType `const:"OAuth" json:"auth_type"`
// The Square-issued ID of your application
ClientID string `json:"client_id"`
// The Square-issued application secret for your application
@@ -76,56 +98,87 @@ type SourceSquareAuthenticationOauthAuthentication struct {
RefreshToken string `json:"refresh_token"`
}
+func (s SourceSquareOauthAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSquareOauthAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSquareOauthAuthentication) GetAuthType() SourceSquareAuthType {
+ return SourceSquareAuthTypeOAuth
+}
+
+func (o *SourceSquareOauthAuthentication) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceSquareOauthAuthentication) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceSquareOauthAuthentication) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceSquareAuthenticationType string
const (
- SourceSquareAuthenticationTypeSourceSquareAuthenticationOauthAuthentication SourceSquareAuthenticationType = "source-square_Authentication_Oauth authentication"
- SourceSquareAuthenticationTypeSourceSquareAuthenticationAPIKey SourceSquareAuthenticationType = "source-square_Authentication_API key"
+ SourceSquareAuthenticationTypeSourceSquareOauthAuthentication SourceSquareAuthenticationType = "source-square_Oauth authentication"
+ SourceSquareAuthenticationTypeSourceSquareAPIKey SourceSquareAuthenticationType = "source-square_API key"
)
type SourceSquareAuthentication struct {
- SourceSquareAuthenticationOauthAuthentication *SourceSquareAuthenticationOauthAuthentication
- SourceSquareAuthenticationAPIKey *SourceSquareAuthenticationAPIKey
+ SourceSquareOauthAuthentication *SourceSquareOauthAuthentication
+ SourceSquareAPIKey *SourceSquareAPIKey
Type SourceSquareAuthenticationType
}
-func CreateSourceSquareAuthenticationSourceSquareAuthenticationOauthAuthentication(sourceSquareAuthenticationOauthAuthentication SourceSquareAuthenticationOauthAuthentication) SourceSquareAuthentication {
- typ := SourceSquareAuthenticationTypeSourceSquareAuthenticationOauthAuthentication
+func CreateSourceSquareAuthenticationSourceSquareOauthAuthentication(sourceSquareOauthAuthentication SourceSquareOauthAuthentication) SourceSquareAuthentication {
+ typ := SourceSquareAuthenticationTypeSourceSquareOauthAuthentication
return SourceSquareAuthentication{
- SourceSquareAuthenticationOauthAuthentication: &sourceSquareAuthenticationOauthAuthentication,
- Type: typ,
+ SourceSquareOauthAuthentication: &sourceSquareOauthAuthentication,
+ Type: typ,
}
}
-func CreateSourceSquareAuthenticationSourceSquareAuthenticationAPIKey(sourceSquareAuthenticationAPIKey SourceSquareAuthenticationAPIKey) SourceSquareAuthentication {
- typ := SourceSquareAuthenticationTypeSourceSquareAuthenticationAPIKey
+func CreateSourceSquareAuthenticationSourceSquareAPIKey(sourceSquareAPIKey SourceSquareAPIKey) SourceSquareAuthentication {
+ typ := SourceSquareAuthenticationTypeSourceSquareAPIKey
return SourceSquareAuthentication{
- SourceSquareAuthenticationAPIKey: &sourceSquareAuthenticationAPIKey,
- Type: typ,
+ SourceSquareAPIKey: &sourceSquareAPIKey,
+ Type: typ,
}
}
func (u *SourceSquareAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSquareAuthenticationAPIKey := new(SourceSquareAuthenticationAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSquareAuthenticationAPIKey); err == nil {
- u.SourceSquareAuthenticationAPIKey = sourceSquareAuthenticationAPIKey
- u.Type = SourceSquareAuthenticationTypeSourceSquareAuthenticationAPIKey
+
+ sourceSquareAPIKey := new(SourceSquareAPIKey)
+ if err := utils.UnmarshalJSON(data, &sourceSquareAPIKey, "", true, true); err == nil {
+ u.SourceSquareAPIKey = sourceSquareAPIKey
+ u.Type = SourceSquareAuthenticationTypeSourceSquareAPIKey
return nil
}
- sourceSquareAuthenticationOauthAuthentication := new(SourceSquareAuthenticationOauthAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSquareAuthenticationOauthAuthentication); err == nil {
- u.SourceSquareAuthenticationOauthAuthentication = sourceSquareAuthenticationOauthAuthentication
- u.Type = SourceSquareAuthenticationTypeSourceSquareAuthenticationOauthAuthentication
+ sourceSquareOauthAuthentication := new(SourceSquareOauthAuthentication)
+ if err := utils.UnmarshalJSON(data, &sourceSquareOauthAuthentication, "", true, true); err == nil {
+ u.SourceSquareOauthAuthentication = sourceSquareOauthAuthentication
+ u.Type = SourceSquareAuthenticationTypeSourceSquareOauthAuthentication
return nil
}
@@ -133,38 +186,38 @@ func (u *SourceSquareAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceSquareAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceSquareAuthenticationAPIKey != nil {
- return json.Marshal(u.SourceSquareAuthenticationAPIKey)
+ if u.SourceSquareOauthAuthentication != nil {
+ return utils.MarshalJSON(u.SourceSquareOauthAuthentication, "", true)
}
- if u.SourceSquareAuthenticationOauthAuthentication != nil {
- return json.Marshal(u.SourceSquareAuthenticationOauthAuthentication)
+ if u.SourceSquareAPIKey != nil {
+ return utils.MarshalJSON(u.SourceSquareAPIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceSquareSquare string
+type Square string
const (
- SourceSquareSquareSquare SourceSquareSquare = "square"
+ SquareSquare Square = "square"
)
-func (e SourceSquareSquare) ToPointer() *SourceSquareSquare {
+func (e Square) ToPointer() *Square {
return &e
}
-func (e *SourceSquareSquare) UnmarshalJSON(data []byte) error {
+func (e *Square) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "square":
- *e = SourceSquareSquare(v)
+ *e = Square(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSquareSquare: %v", v)
+ return fmt.Errorf("invalid value for Square: %v", v)
}
}
@@ -172,10 +225,53 @@ type SourceSquare struct {
// Choose how to authenticate to Square.
Credentials *SourceSquareAuthentication `json:"credentials,omitempty"`
// In some streams there is an option to include deleted objects (Items, Categories, Discounts, Taxes)
- IncludeDeletedObjects *bool `json:"include_deleted_objects,omitempty"`
+ IncludeDeletedObjects *bool `default:"false" json:"include_deleted_objects"`
// Determines whether to use the sandbox or production environment.
- IsSandbox bool `json:"is_sandbox"`
- SourceType SourceSquareSquare `json:"sourceType"`
+ IsSandbox *bool `default:"false" json:"is_sandbox"`
+ sourceType Square `const:"square" json:"sourceType"`
// UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. If not set, all data will be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2021-01-01" json:"start_date"`
+}
+
+func (s SourceSquare) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSquare) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSquare) GetCredentials() *SourceSquareAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSquare) GetIncludeDeletedObjects() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeletedObjects
+}
+
+func (o *SourceSquare) GetIsSandbox() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IsSandbox
+}
+
+func (o *SourceSquare) GetSourceType() Square {
+ return SquareSquare
+}
+
+func (o *SourceSquare) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcesquarecreaterequest.go b/internal/sdk/pkg/models/shared/sourcesquarecreaterequest.go
old mode 100755
new mode 100644
index f9ebb5068..854ee6270
--- a/internal/sdk/pkg/models/shared/sourcesquarecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesquarecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSquareCreateRequest struct {
Configuration SourceSquare `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSquareCreateRequest) GetConfiguration() SourceSquare {
+ if o == nil {
+ return SourceSquare{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSquareCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSquareCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSquareCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSquareCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesquareputrequest.go b/internal/sdk/pkg/models/shared/sourcesquareputrequest.go
old mode 100755
new mode 100644
index 25d6c4e60..5f1ac4577
--- a/internal/sdk/pkg/models/shared/sourcesquareputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesquareputrequest.go
@@ -7,3 +7,24 @@ type SourceSquarePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSquarePutRequest) GetConfiguration() SourceSquareUpdate {
+ if o == nil {
+ return SourceSquareUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSquarePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSquarePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesquareupdate.go b/internal/sdk/pkg/models/shared/sourcesquareupdate.go
old mode 100755
new mode 100644
index 192eaf7e6..a2af97059
--- a/internal/sdk/pkg/models/shared/sourcesquareupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesquareupdate.go
@@ -3,71 +3,93 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSquareUpdateAuthenticationAPIKeyAuthType string
+type SourceSquareUpdateSchemasAuthType string
const (
- SourceSquareUpdateAuthenticationAPIKeyAuthTypeAPIKey SourceSquareUpdateAuthenticationAPIKeyAuthType = "API Key"
+ SourceSquareUpdateSchemasAuthTypeAPIKey SourceSquareUpdateSchemasAuthType = "API Key"
)
-func (e SourceSquareUpdateAuthenticationAPIKeyAuthType) ToPointer() *SourceSquareUpdateAuthenticationAPIKeyAuthType {
+func (e SourceSquareUpdateSchemasAuthType) ToPointer() *SourceSquareUpdateSchemasAuthType {
return &e
}
-func (e *SourceSquareUpdateAuthenticationAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSquareUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "API Key":
- *e = SourceSquareUpdateAuthenticationAPIKeyAuthType(v)
+ *e = SourceSquareUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSquareUpdateAuthenticationAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSquareUpdateSchemasAuthType: %v", v)
}
}
-// SourceSquareUpdateAuthenticationAPIKey - Choose how to authenticate to Square.
-type SourceSquareUpdateAuthenticationAPIKey struct {
+// SourceSquareUpdateAPIKey - Choose how to authenticate to Square.
+type SourceSquareUpdateAPIKey struct {
// The API key for a Square application
- APIKey string `json:"api_key"`
- AuthType SourceSquareUpdateAuthenticationAPIKeyAuthType `json:"auth_type"`
+ APIKey string `json:"api_key"`
+ authType SourceSquareUpdateSchemasAuthType `const:"API Key" json:"auth_type"`
}
-type SourceSquareUpdateAuthenticationOauthAuthenticationAuthType string
+func (s SourceSquareUpdateAPIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSquareUpdateAPIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSquareUpdateAPIKey) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceSquareUpdateAPIKey) GetAuthType() SourceSquareUpdateSchemasAuthType {
+ return SourceSquareUpdateSchemasAuthTypeAPIKey
+}
+
+type SourceSquareUpdateAuthType string
const (
- SourceSquareUpdateAuthenticationOauthAuthenticationAuthTypeOAuth SourceSquareUpdateAuthenticationOauthAuthenticationAuthType = "OAuth"
+ SourceSquareUpdateAuthTypeOAuth SourceSquareUpdateAuthType = "OAuth"
)
-func (e SourceSquareUpdateAuthenticationOauthAuthenticationAuthType) ToPointer() *SourceSquareUpdateAuthenticationOauthAuthenticationAuthType {
+func (e SourceSquareUpdateAuthType) ToPointer() *SourceSquareUpdateAuthType {
return &e
}
-func (e *SourceSquareUpdateAuthenticationOauthAuthenticationAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceSquareUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "OAuth":
- *e = SourceSquareUpdateAuthenticationOauthAuthenticationAuthType(v)
+ *e = SourceSquareUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSquareUpdateAuthenticationOauthAuthenticationAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceSquareUpdateAuthType: %v", v)
}
}
-// SourceSquareUpdateAuthenticationOauthAuthentication - Choose how to authenticate to Square.
-type SourceSquareUpdateAuthenticationOauthAuthentication struct {
- AuthType SourceSquareUpdateAuthenticationOauthAuthenticationAuthType `json:"auth_type"`
+// OauthAuthentication - Choose how to authenticate to Square.
+type OauthAuthentication struct {
+ authType SourceSquareUpdateAuthType `const:"OAuth" json:"auth_type"`
// The Square-issued ID of your application
ClientID string `json:"client_id"`
// The Square-issued application secret for your application
@@ -76,56 +98,87 @@ type SourceSquareUpdateAuthenticationOauthAuthentication struct {
RefreshToken string `json:"refresh_token"`
}
+func (o OauthAuthentication) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(o, "", false)
+}
+
+func (o *OauthAuthentication) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &o, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *OauthAuthentication) GetAuthType() SourceSquareUpdateAuthType {
+ return SourceSquareUpdateAuthTypeOAuth
+}
+
+func (o *OauthAuthentication) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *OauthAuthentication) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *OauthAuthentication) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
type SourceSquareUpdateAuthenticationType string
const (
- SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationOauthAuthentication SourceSquareUpdateAuthenticationType = "source-square-update_Authentication_Oauth authentication"
- SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationAPIKey SourceSquareUpdateAuthenticationType = "source-square-update_Authentication_API key"
+ SourceSquareUpdateAuthenticationTypeOauthAuthentication SourceSquareUpdateAuthenticationType = "Oauth authentication"
+ SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAPIKey SourceSquareUpdateAuthenticationType = "source-square-update_API key"
)
type SourceSquareUpdateAuthentication struct {
- SourceSquareUpdateAuthenticationOauthAuthentication *SourceSquareUpdateAuthenticationOauthAuthentication
- SourceSquareUpdateAuthenticationAPIKey *SourceSquareUpdateAuthenticationAPIKey
+ OauthAuthentication *OauthAuthentication
+ SourceSquareUpdateAPIKey *SourceSquareUpdateAPIKey
Type SourceSquareUpdateAuthenticationType
}
-func CreateSourceSquareUpdateAuthenticationSourceSquareUpdateAuthenticationOauthAuthentication(sourceSquareUpdateAuthenticationOauthAuthentication SourceSquareUpdateAuthenticationOauthAuthentication) SourceSquareUpdateAuthentication {
- typ := SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationOauthAuthentication
+func CreateSourceSquareUpdateAuthenticationOauthAuthentication(oauthAuthentication OauthAuthentication) SourceSquareUpdateAuthentication {
+ typ := SourceSquareUpdateAuthenticationTypeOauthAuthentication
return SourceSquareUpdateAuthentication{
- SourceSquareUpdateAuthenticationOauthAuthentication: &sourceSquareUpdateAuthenticationOauthAuthentication,
- Type: typ,
+ OauthAuthentication: &oauthAuthentication,
+ Type: typ,
}
}
-func CreateSourceSquareUpdateAuthenticationSourceSquareUpdateAuthenticationAPIKey(sourceSquareUpdateAuthenticationAPIKey SourceSquareUpdateAuthenticationAPIKey) SourceSquareUpdateAuthentication {
- typ := SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationAPIKey
+func CreateSourceSquareUpdateAuthenticationSourceSquareUpdateAPIKey(sourceSquareUpdateAPIKey SourceSquareUpdateAPIKey) SourceSquareUpdateAuthentication {
+ typ := SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAPIKey
return SourceSquareUpdateAuthentication{
- SourceSquareUpdateAuthenticationAPIKey: &sourceSquareUpdateAuthenticationAPIKey,
- Type: typ,
+ SourceSquareUpdateAPIKey: &sourceSquareUpdateAPIKey,
+ Type: typ,
}
}
func (u *SourceSquareUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSquareUpdateAuthenticationAPIKey := new(SourceSquareUpdateAuthenticationAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSquareUpdateAuthenticationAPIKey); err == nil {
- u.SourceSquareUpdateAuthenticationAPIKey = sourceSquareUpdateAuthenticationAPIKey
- u.Type = SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationAPIKey
+
+ sourceSquareUpdateAPIKey := new(SourceSquareUpdateAPIKey)
+ if err := utils.UnmarshalJSON(data, &sourceSquareUpdateAPIKey, "", true, true); err == nil {
+ u.SourceSquareUpdateAPIKey = sourceSquareUpdateAPIKey
+ u.Type = SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAPIKey
return nil
}
- sourceSquareUpdateAuthenticationOauthAuthentication := new(SourceSquareUpdateAuthenticationOauthAuthentication)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSquareUpdateAuthenticationOauthAuthentication); err == nil {
- u.SourceSquareUpdateAuthenticationOauthAuthentication = sourceSquareUpdateAuthenticationOauthAuthentication
- u.Type = SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationOauthAuthentication
+ oauthAuthentication := new(OauthAuthentication)
+ if err := utils.UnmarshalJSON(data, &oauthAuthentication, "", true, true); err == nil {
+ u.OauthAuthentication = oauthAuthentication
+ u.Type = SourceSquareUpdateAuthenticationTypeOauthAuthentication
return nil
}
@@ -133,24 +186,63 @@ func (u *SourceSquareUpdateAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceSquareUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceSquareUpdateAuthenticationAPIKey != nil {
- return json.Marshal(u.SourceSquareUpdateAuthenticationAPIKey)
+ if u.OauthAuthentication != nil {
+ return utils.MarshalJSON(u.OauthAuthentication, "", true)
}
- if u.SourceSquareUpdateAuthenticationOauthAuthentication != nil {
- return json.Marshal(u.SourceSquareUpdateAuthenticationOauthAuthentication)
+ if u.SourceSquareUpdateAPIKey != nil {
+ return utils.MarshalJSON(u.SourceSquareUpdateAPIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceSquareUpdate struct {
// Choose how to authenticate to Square.
Credentials *SourceSquareUpdateAuthentication `json:"credentials,omitempty"`
// In some streams there is an option to include deleted objects (Items, Categories, Discounts, Taxes)
- IncludeDeletedObjects *bool `json:"include_deleted_objects,omitempty"`
+ IncludeDeletedObjects *bool `default:"false" json:"include_deleted_objects"`
// Determines whether to use the sandbox or production environment.
- IsSandbox bool `json:"is_sandbox"`
+ IsSandbox *bool `default:"false" json:"is_sandbox"`
// UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. If not set, all data will be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2021-01-01" json:"start_date"`
+}
+
+func (s SourceSquareUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSquareUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSquareUpdate) GetCredentials() *SourceSquareUpdateAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSquareUpdate) GetIncludeDeletedObjects() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeletedObjects
+}
+
+func (o *SourceSquareUpdate) GetIsSandbox() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IsSandbox
+}
+
+func (o *SourceSquareUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcesresponse.go b/internal/sdk/pkg/models/shared/sourcesresponse.go
old mode 100755
new mode 100644
index 897114977..295f6aec3
--- a/internal/sdk/pkg/models/shared/sourcesresponse.go
+++ b/internal/sdk/pkg/models/shared/sourcesresponse.go
@@ -7,3 +7,24 @@ type SourcesResponse struct {
Next *string `json:"next,omitempty"`
Previous *string `json:"previous,omitempty"`
}
+
+func (o *SourcesResponse) GetData() []SourceResponse {
+ if o == nil {
+ return []SourceResponse{}
+ }
+ return o.Data
+}
+
+func (o *SourcesResponse) GetNext() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Next
+}
+
+func (o *SourcesResponse) GetPrevious() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Previous
+}
diff --git a/internal/sdk/pkg/models/shared/sourcestrava.go b/internal/sdk/pkg/models/shared/sourcestrava.go
old mode 100755
new mode 100644
index d85532fc0..df0e3cfaf
--- a/internal/sdk/pkg/models/shared/sourcestrava.go
+++ b/internal/sdk/pkg/models/shared/sourcestrava.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -32,41 +33,95 @@ func (e *SourceStravaAuthType) UnmarshalJSON(data []byte) error {
}
}
-type SourceStravaStrava string
+type Strava string
const (
- SourceStravaStravaStrava SourceStravaStrava = "strava"
+ StravaStrava Strava = "strava"
)
-func (e SourceStravaStrava) ToPointer() *SourceStravaStrava {
+func (e Strava) ToPointer() *Strava {
return &e
}
-func (e *SourceStravaStrava) UnmarshalJSON(data []byte) error {
+func (e *Strava) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "strava":
- *e = SourceStravaStrava(v)
+ *e = Strava(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceStravaStrava: %v", v)
+ return fmt.Errorf("invalid value for Strava: %v", v)
}
}
type SourceStrava struct {
// The Athlete ID of your Strava developer application.
AthleteID int64 `json:"athlete_id"`
- AuthType *SourceStravaAuthType `json:"auth_type,omitempty"`
+ authType *SourceStravaAuthType `const:"Client" json:"auth_type"`
// The Client ID of your Strava developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Strava developer application.
ClientSecret string `json:"client_secret"`
// The Refresh Token with the activity: read_all permissions.
- RefreshToken string `json:"refresh_token"`
- SourceType SourceStravaStrava `json:"sourceType"`
+ RefreshToken string `json:"refresh_token"`
+ sourceType Strava `const:"strava" json:"sourceType"`
// UTC date and time. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceStrava) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceStrava) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceStrava) GetAthleteID() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.AthleteID
+}
+
+func (o *SourceStrava) GetAuthType() *SourceStravaAuthType {
+ return SourceStravaAuthTypeClient.ToPointer()
+}
+
+func (o *SourceStrava) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceStrava) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceStrava) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceStrava) GetSourceType() Strava {
+ return StravaStrava
+}
+
+func (o *SourceStrava) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcestravacreaterequest.go b/internal/sdk/pkg/models/shared/sourcestravacreaterequest.go
old mode 100755
new mode 100644
index e6a72f1a6..11aad712e
--- a/internal/sdk/pkg/models/shared/sourcestravacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcestravacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceStravaCreateRequest struct {
Configuration SourceStrava `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceStravaCreateRequest) GetConfiguration() SourceStrava {
+ if o == nil {
+ return SourceStrava{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceStravaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceStravaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceStravaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceStravaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcestravaputrequest.go b/internal/sdk/pkg/models/shared/sourcestravaputrequest.go
old mode 100755
new mode 100644
index 33914b022..8388a552a
--- a/internal/sdk/pkg/models/shared/sourcestravaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcestravaputrequest.go
@@ -7,3 +7,24 @@ type SourceStravaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceStravaPutRequest) GetConfiguration() SourceStravaUpdate {
+ if o == nil {
+ return SourceStravaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceStravaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceStravaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcestravaupdate.go b/internal/sdk/pkg/models/shared/sourcestravaupdate.go
old mode 100755
new mode 100644
index 6c42a8bde..39840658a
--- a/internal/sdk/pkg/models/shared/sourcestravaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcestravaupdate.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -35,7 +36,7 @@ func (e *SourceStravaUpdateAuthType) UnmarshalJSON(data []byte) error {
type SourceStravaUpdate struct {
// The Athlete ID of your Strava developer application.
AthleteID int64 `json:"athlete_id"`
- AuthType *SourceStravaUpdateAuthType `json:"auth_type,omitempty"`
+ authType *SourceStravaUpdateAuthType `const:"Client" json:"auth_type"`
// The Client ID of your Strava developer application.
ClientID string `json:"client_id"`
// The Client Secret of your Strava developer application.
@@ -45,3 +46,53 @@ type SourceStravaUpdate struct {
// UTC date and time. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceStravaUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceStravaUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceStravaUpdate) GetAthleteID() int64 {
+ if o == nil {
+ return 0
+ }
+ return o.AthleteID
+}
+
+func (o *SourceStravaUpdate) GetAuthType() *SourceStravaUpdateAuthType {
+ return SourceStravaUpdateAuthTypeClient.ToPointer()
+}
+
+func (o *SourceStravaUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceStravaUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceStravaUpdate) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceStravaUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcestripe.go b/internal/sdk/pkg/models/shared/sourcestripe.go
old mode 100755
new mode 100644
index ec167d261..567363496
--- a/internal/sdk/pkg/models/shared/sourcestripe.go
+++ b/internal/sdk/pkg/models/shared/sourcestripe.go
@@ -5,43 +5,112 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceStripeStripe string
+type Stripe string
const (
- SourceStripeStripeStripe SourceStripeStripe = "stripe"
+ StripeStripe Stripe = "stripe"
)
-func (e SourceStripeStripe) ToPointer() *SourceStripeStripe {
+func (e Stripe) ToPointer() *Stripe {
return &e
}
-func (e *SourceStripeStripe) UnmarshalJSON(data []byte) error {
+func (e *Stripe) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "stripe":
- *e = SourceStripeStripe(v)
+ *e = Stripe(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceStripeStripe: %v", v)
+ return fmt.Errorf("invalid value for Stripe: %v", v)
}
}
type SourceStripe struct {
// Your Stripe account ID (starts with 'acct_', find yours here).
AccountID string `json:"account_id"`
+ // The number of API calls per second that you allow connector to make. This value can not be bigger than real API call rate limit (https://stripe.com/docs/rate-limits). If not specified the default maximum is 25 and 100 calls per second for test and production tokens respectively.
+ CallRateLimit *int64 `json:"call_rate_limit,omitempty"`
// Stripe API key (usually starts with 'sk_live_'; find yours here).
ClientSecret string `json:"client_secret"`
- // When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here
- LookbackWindowDays *int64 `json:"lookback_window_days,omitempty"`
+ // When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. The Lookback Window only applies to streams that do not support event-based incremental syncs: Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks, Refunds. More info here
+ LookbackWindowDays *int64 `default:"0" json:"lookback_window_days"`
+ // The number of worker thread to use for the sync. The performance upper boundary depends on call_rate_limit setting and type of account.
+ NumWorkers *int64 `default:"10" json:"num_workers"`
// The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted.
- SliceRange *int64 `json:"slice_range,omitempty"`
- SourceType SourceStripeStripe `json:"sourceType"`
+ SliceRange *int64 `default:"365" json:"slice_range"`
+ sourceType Stripe `const:"stripe" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated.
- StartDate *time.Time `json:"start_date,omitempty"`
+ StartDate *time.Time `default:"2017-01-25T00:00:00Z" json:"start_date"`
+}
+
+func (s SourceStripe) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceStripe) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceStripe) GetAccountID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountID
+}
+
+func (o *SourceStripe) GetCallRateLimit() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CallRateLimit
+}
+
+func (o *SourceStripe) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceStripe) GetLookbackWindowDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindowDays
+}
+
+func (o *SourceStripe) GetNumWorkers() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.NumWorkers
+}
+
+func (o *SourceStripe) GetSliceRange() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SliceRange
+}
+
+func (o *SourceStripe) GetSourceType() Stripe {
+ return StripeStripe
+}
+
+func (o *SourceStripe) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcestripecreaterequest.go b/internal/sdk/pkg/models/shared/sourcestripecreaterequest.go
old mode 100755
new mode 100644
index b5bb25614..66fb2bfb5
--- a/internal/sdk/pkg/models/shared/sourcestripecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcestripecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceStripeCreateRequest struct {
Configuration SourceStripe `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceStripeCreateRequest) GetConfiguration() SourceStripe {
+ if o == nil {
+ return SourceStripe{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceStripeCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceStripeCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceStripeCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceStripeCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcestripeputrequest.go b/internal/sdk/pkg/models/shared/sourcestripeputrequest.go
old mode 100755
new mode 100644
index 2a121b998..fe2ad05e8
--- a/internal/sdk/pkg/models/shared/sourcestripeputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcestripeputrequest.go
@@ -7,3 +7,24 @@ type SourceStripePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceStripePutRequest) GetConfiguration() SourceStripeUpdate {
+ if o == nil {
+ return SourceStripeUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceStripePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceStripePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcestripeupdate.go b/internal/sdk/pkg/models/shared/sourcestripeupdate.go
old mode 100755
new mode 100644
index b4103b482..02603c3a2
--- a/internal/sdk/pkg/models/shared/sourcestripeupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcestripeupdate.go
@@ -3,18 +3,83 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
type SourceStripeUpdate struct {
// Your Stripe account ID (starts with 'acct_', find yours here).
AccountID string `json:"account_id"`
+ // The number of API calls per second that you allow connector to make. This value can not be bigger than real API call rate limit (https://stripe.com/docs/rate-limits). If not specified the default maximum is 25 and 100 calls per second for test and production tokens respectively.
+ CallRateLimit *int64 `json:"call_rate_limit,omitempty"`
// Stripe API key (usually starts with 'sk_live_'; find yours here).
ClientSecret string `json:"client_secret"`
- // When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here
- LookbackWindowDays *int64 `json:"lookback_window_days,omitempty"`
+ // When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. The Lookback Window only applies to streams that do not support event-based incremental syncs: Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks, Refunds. More info here
+ LookbackWindowDays *int64 `default:"0" json:"lookback_window_days"`
+ // The number of worker thread to use for the sync. The performance upper boundary depends on call_rate_limit setting and type of account.
+ NumWorkers *int64 `default:"10" json:"num_workers"`
// The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted.
- SliceRange *int64 `json:"slice_range,omitempty"`
+ SliceRange *int64 `default:"365" json:"slice_range"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated.
- StartDate *time.Time `json:"start_date,omitempty"`
+ StartDate *time.Time `default:"2017-01-25T00:00:00Z" json:"start_date"`
+}
+
+func (s SourceStripeUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceStripeUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceStripeUpdate) GetAccountID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountID
+}
+
+func (o *SourceStripeUpdate) GetCallRateLimit() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.CallRateLimit
+}
+
+func (o *SourceStripeUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceStripeUpdate) GetLookbackWindowDays() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindowDays
+}
+
+func (o *SourceStripeUpdate) GetNumWorkers() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.NumWorkers
+}
+
+func (o *SourceStripeUpdate) GetSliceRange() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.SliceRange
+}
+
+func (o *SourceStripeUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcesurveymonkey.go b/internal/sdk/pkg/models/shared/sourcesurveymonkey.go
old mode 100755
new mode 100644
index fa711d416..a80e095e6
--- a/internal/sdk/pkg/models/shared/sourcesurveymonkey.go
+++ b/internal/sdk/pkg/models/shared/sourcesurveymonkey.go
@@ -5,44 +5,81 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod string
+type SourceSurveymonkeyAuthMethod string
const (
- SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethodOauth20 SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod = "oauth2.0"
+ SourceSurveymonkeyAuthMethodOauth20 SourceSurveymonkeyAuthMethod = "oauth2.0"
)
-func (e SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod) ToPointer() *SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod {
+func (e SourceSurveymonkeyAuthMethod) ToPointer() *SourceSurveymonkeyAuthMethod {
return &e
}
-func (e *SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceSurveymonkeyAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod(v)
+ *e = SourceSurveymonkeyAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceSurveymonkeyAuthMethod: %v", v)
}
}
// SourceSurveymonkeySurveyMonkeyAuthorizationMethod - The authorization method to use to retrieve data from SurveyMonkey
type SourceSurveymonkeySurveyMonkeyAuthorizationMethod struct {
// Access Token for making authenticated requests. See the docs for information on how to generate this key.
- AccessToken string `json:"access_token"`
- AuthMethod SourceSurveymonkeySurveyMonkeyAuthorizationMethodAuthMethod `json:"auth_method"`
+ AccessToken string `json:"access_token"`
+ authMethod SourceSurveymonkeyAuthMethod `const:"oauth2.0" json:"auth_method"`
// The Client ID of the SurveyMonkey developer application.
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of the SurveyMonkey developer application.
ClientSecret *string `json:"client_secret,omitempty"`
}
+func (s SourceSurveymonkeySurveyMonkeyAuthorizationMethod) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSurveymonkeySurveyMonkeyAuthorizationMethod) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSurveymonkeySurveyMonkeyAuthorizationMethod) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSurveymonkeySurveyMonkeyAuthorizationMethod) GetAuthMethod() SourceSurveymonkeyAuthMethod {
+ return SourceSurveymonkeyAuthMethodOauth20
+}
+
+func (o *SourceSurveymonkeySurveyMonkeyAuthorizationMethod) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceSurveymonkeySurveyMonkeyAuthorizationMethod) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
// SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccount - Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.
type SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccount string
@@ -74,27 +111,27 @@ func (e *SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccount) UnmarshalJS
}
}
-type SourceSurveymonkeySurveymonkey string
+type Surveymonkey string
const (
- SourceSurveymonkeySurveymonkeySurveymonkey SourceSurveymonkeySurveymonkey = "surveymonkey"
+ SurveymonkeySurveymonkey Surveymonkey = "surveymonkey"
)
-func (e SourceSurveymonkeySurveymonkey) ToPointer() *SourceSurveymonkeySurveymonkey {
+func (e Surveymonkey) ToPointer() *Surveymonkey {
return &e
}
-func (e *SourceSurveymonkeySurveymonkey) UnmarshalJSON(data []byte) error {
+func (e *Surveymonkey) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "surveymonkey":
- *e = SourceSurveymonkeySurveymonkey(v)
+ *e = Surveymonkey(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSurveymonkeySurveymonkey: %v", v)
+ return fmt.Errorf("invalid value for Surveymonkey: %v", v)
}
}
@@ -102,10 +139,53 @@ type SourceSurveymonkey struct {
// The authorization method to use to retrieve data from SurveyMonkey
Credentials *SourceSurveymonkeySurveyMonkeyAuthorizationMethod `json:"credentials,omitempty"`
// Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.
- Origin *SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccount `json:"origin,omitempty"`
- SourceType SourceSurveymonkeySurveymonkey `json:"sourceType"`
+ Origin *SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccount `default:"USA" json:"origin"`
+ sourceType Surveymonkey `const:"surveymonkey" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
// IDs of the surveys from which you'd like to replicate data. If left empty, data from all boards to which you have access will be replicated.
SurveyIds []string `json:"survey_ids,omitempty"`
}
+
+func (s SourceSurveymonkey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSurveymonkey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSurveymonkey) GetCredentials() *SourceSurveymonkeySurveyMonkeyAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSurveymonkey) GetOrigin() *SourceSurveymonkeyOriginDatacenterOfTheSurveyMonkeyAccount {
+ if o == nil {
+ return nil
+ }
+ return o.Origin
+}
+
+func (o *SourceSurveymonkey) GetSourceType() Surveymonkey {
+ return SurveymonkeySurveymonkey
+}
+
+func (o *SourceSurveymonkey) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceSurveymonkey) GetSurveyIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyIds
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesurveymonkeycreaterequest.go b/internal/sdk/pkg/models/shared/sourcesurveymonkeycreaterequest.go
old mode 100755
new mode 100644
index 1192e807f..2913f7773
--- a/internal/sdk/pkg/models/shared/sourcesurveymonkeycreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesurveymonkeycreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSurveymonkeyCreateRequest struct {
Configuration SourceSurveymonkey `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSurveymonkeyCreateRequest) GetConfiguration() SourceSurveymonkey {
+ if o == nil {
+ return SourceSurveymonkey{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSurveymonkeyCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSurveymonkeyCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSurveymonkeyCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSurveymonkeyCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesurveymonkeyputrequest.go b/internal/sdk/pkg/models/shared/sourcesurveymonkeyputrequest.go
old mode 100755
new mode 100644
index 9f2c7a8b9..af3def0b3
--- a/internal/sdk/pkg/models/shared/sourcesurveymonkeyputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesurveymonkeyputrequest.go
@@ -7,3 +7,24 @@ type SourceSurveymonkeyPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSurveymonkeyPutRequest) GetConfiguration() SourceSurveymonkeyUpdate {
+ if o == nil {
+ return SourceSurveymonkeyUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSurveymonkeyPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSurveymonkeyPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesurveymonkeyupdate.go b/internal/sdk/pkg/models/shared/sourcesurveymonkeyupdate.go
old mode 100755
new mode 100644
index bd3bbf9e6..5ead76524
--- a/internal/sdk/pkg/models/shared/sourcesurveymonkeyupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesurveymonkeyupdate.go
@@ -5,58 +5,95 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethod string
+type SourceSurveymonkeyUpdateAuthMethod string
const (
- SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethodOauth20 SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethod = "oauth2.0"
+ SourceSurveymonkeyUpdateAuthMethodOauth20 SourceSurveymonkeyUpdateAuthMethod = "oauth2.0"
)
-func (e SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethod) ToPointer() *SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethod {
+func (e SourceSurveymonkeyUpdateAuthMethod) ToPointer() *SourceSurveymonkeyUpdateAuthMethod {
return &e
}
-func (e *SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceSurveymonkeyUpdateAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethod(v)
+ *e = SourceSurveymonkeyUpdateAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceSurveymonkeyUpdateAuthMethod: %v", v)
}
}
-// SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethod - The authorization method to use to retrieve data from SurveyMonkey
-type SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethod struct {
+// SurveyMonkeyAuthorizationMethod - The authorization method to use to retrieve data from SurveyMonkey
+type SurveyMonkeyAuthorizationMethod struct {
// Access Token for making authenticated requests. See the docs for information on how to generate this key.
- AccessToken string `json:"access_token"`
- AuthMethod SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethodAuthMethod `json:"auth_method"`
+ AccessToken string `json:"access_token"`
+ authMethod SourceSurveymonkeyUpdateAuthMethod `const:"oauth2.0" json:"auth_method"`
// The Client ID of the SurveyMonkey developer application.
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of the SurveyMonkey developer application.
ClientSecret *string `json:"client_secret,omitempty"`
}
-// SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount - Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.
-type SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount string
+func (s SurveyMonkeyAuthorizationMethod) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SurveyMonkeyAuthorizationMethod) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SurveyMonkeyAuthorizationMethod) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SurveyMonkeyAuthorizationMethod) GetAuthMethod() SourceSurveymonkeyUpdateAuthMethod {
+ return SourceSurveymonkeyUpdateAuthMethodOauth20
+}
+
+func (o *SurveyMonkeyAuthorizationMethod) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SurveyMonkeyAuthorizationMethod) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+// OriginDatacenterOfTheSurveyMonkeyAccount - Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.
+type OriginDatacenterOfTheSurveyMonkeyAccount string
const (
- SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccountUsa SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount = "USA"
- SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccountEurope SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount = "Europe"
- SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccountCanada SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount = "Canada"
+ OriginDatacenterOfTheSurveyMonkeyAccountUsa OriginDatacenterOfTheSurveyMonkeyAccount = "USA"
+ OriginDatacenterOfTheSurveyMonkeyAccountEurope OriginDatacenterOfTheSurveyMonkeyAccount = "Europe"
+ OriginDatacenterOfTheSurveyMonkeyAccountCanada OriginDatacenterOfTheSurveyMonkeyAccount = "Canada"
)
-func (e SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount) ToPointer() *SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount {
+func (e OriginDatacenterOfTheSurveyMonkeyAccount) ToPointer() *OriginDatacenterOfTheSurveyMonkeyAccount {
return &e
}
-func (e *SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount) UnmarshalJSON(data []byte) error {
+func (e *OriginDatacenterOfTheSurveyMonkeyAccount) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -67,20 +104,59 @@ func (e *SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount) Unmar
case "Europe":
fallthrough
case "Canada":
- *e = SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount(v)
+ *e = OriginDatacenterOfTheSurveyMonkeyAccount(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount: %v", v)
+ return fmt.Errorf("invalid value for OriginDatacenterOfTheSurveyMonkeyAccount: %v", v)
}
}
type SourceSurveymonkeyUpdate struct {
// The authorization method to use to retrieve data from SurveyMonkey
- Credentials *SourceSurveymonkeyUpdateSurveyMonkeyAuthorizationMethod `json:"credentials,omitempty"`
+ Credentials *SurveyMonkeyAuthorizationMethod `json:"credentials,omitempty"`
// Depending on the originating datacenter of the SurveyMonkey account, the API access URL may be different.
- Origin *SourceSurveymonkeyUpdateOriginDatacenterOfTheSurveyMonkeyAccount `json:"origin,omitempty"`
+ Origin *OriginDatacenterOfTheSurveyMonkeyAccount `default:"USA" json:"origin"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
// IDs of the surveys from which you'd like to replicate data. If left empty, data from all boards to which you have access will be replicated.
SurveyIds []string `json:"survey_ids,omitempty"`
}
+
+func (s SourceSurveymonkeyUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSurveymonkeyUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSurveymonkeyUpdate) GetCredentials() *SurveyMonkeyAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceSurveymonkeyUpdate) GetOrigin() *OriginDatacenterOfTheSurveyMonkeyAccount {
+ if o == nil {
+ return nil
+ }
+ return o.Origin
+}
+
+func (o *SourceSurveymonkeyUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceSurveymonkeyUpdate) GetSurveyIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyIds
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesurveysparrow.go b/internal/sdk/pkg/models/shared/sourcesurveysparrow.go
old mode 100755
new mode 100644
index 83f9284b2..018974790
--- a/internal/sdk/pkg/models/shared/sourcesurveysparrow.go
+++ b/internal/sdk/pkg/models/shared/sourcesurveysparrow.go
@@ -3,120 +3,145 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSurveySparrowBaseURLGlobalAccountURLBase string
+type SourceSurveySparrowSchemasURLBase string
const (
- SourceSurveySparrowBaseURLGlobalAccountURLBaseHTTPSAPISurveysparrowComV3 SourceSurveySparrowBaseURLGlobalAccountURLBase = "https://api.surveysparrow.com/v3"
+ SourceSurveySparrowSchemasURLBaseHTTPSAPISurveysparrowComV3 SourceSurveySparrowSchemasURLBase = "https://api.surveysparrow.com/v3"
)
-func (e SourceSurveySparrowBaseURLGlobalAccountURLBase) ToPointer() *SourceSurveySparrowBaseURLGlobalAccountURLBase {
+func (e SourceSurveySparrowSchemasURLBase) ToPointer() *SourceSurveySparrowSchemasURLBase {
return &e
}
-func (e *SourceSurveySparrowBaseURLGlobalAccountURLBase) UnmarshalJSON(data []byte) error {
+func (e *SourceSurveySparrowSchemasURLBase) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "https://api.surveysparrow.com/v3":
- *e = SourceSurveySparrowBaseURLGlobalAccountURLBase(v)
+ *e = SourceSurveySparrowSchemasURLBase(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSurveySparrowBaseURLGlobalAccountURLBase: %v", v)
+ return fmt.Errorf("invalid value for SourceSurveySparrowSchemasURLBase: %v", v)
}
}
-// SourceSurveySparrowBaseURLGlobalAccount - Is your account location is EU based? If yes, the base url to retrieve data will be different.
-type SourceSurveySparrowBaseURLGlobalAccount struct {
- URLBase *SourceSurveySparrowBaseURLGlobalAccountURLBase `json:"url_base,omitempty"`
+// SourceSurveySparrowGlobalAccount - Is your account location is EU based? If yes, the base url to retrieve data will be different.
+type SourceSurveySparrowGlobalAccount struct {
+ urlBase *SourceSurveySparrowSchemasURLBase `const:"https://api.surveysparrow.com/v3" json:"url_base,omitempty"`
}
-type SourceSurveySparrowBaseURLEUBasedAccountURLBase string
+func (s SourceSurveySparrowGlobalAccount) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSurveySparrowGlobalAccount) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSurveySparrowGlobalAccount) GetURLBase() *SourceSurveySparrowSchemasURLBase {
+ return SourceSurveySparrowSchemasURLBaseHTTPSAPISurveysparrowComV3.ToPointer()
+}
+
+type SourceSurveySparrowURLBase string
const (
- SourceSurveySparrowBaseURLEUBasedAccountURLBaseHTTPSEuAPISurveysparrowComV3 SourceSurveySparrowBaseURLEUBasedAccountURLBase = "https://eu-api.surveysparrow.com/v3"
+ SourceSurveySparrowURLBaseHTTPSEuAPISurveysparrowComV3 SourceSurveySparrowURLBase = "https://eu-api.surveysparrow.com/v3"
)
-func (e SourceSurveySparrowBaseURLEUBasedAccountURLBase) ToPointer() *SourceSurveySparrowBaseURLEUBasedAccountURLBase {
+func (e SourceSurveySparrowURLBase) ToPointer() *SourceSurveySparrowURLBase {
return &e
}
-func (e *SourceSurveySparrowBaseURLEUBasedAccountURLBase) UnmarshalJSON(data []byte) error {
+func (e *SourceSurveySparrowURLBase) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "https://eu-api.surveysparrow.com/v3":
- *e = SourceSurveySparrowBaseURLEUBasedAccountURLBase(v)
+ *e = SourceSurveySparrowURLBase(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSurveySparrowBaseURLEUBasedAccountURLBase: %v", v)
+ return fmt.Errorf("invalid value for SourceSurveySparrowURLBase: %v", v)
+ }
+}
+
+// SourceSurveySparrowEUBasedAccount - Is your account location is EU based? If yes, the base url to retrieve data will be different.
+type SourceSurveySparrowEUBasedAccount struct {
+ urlBase *SourceSurveySparrowURLBase `const:"https://eu-api.surveysparrow.com/v3" json:"url_base,omitempty"`
+}
+
+func (s SourceSurveySparrowEUBasedAccount) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSurveySparrowEUBasedAccount) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
}
+ return nil
}
-// SourceSurveySparrowBaseURLEUBasedAccount - Is your account location is EU based? If yes, the base url to retrieve data will be different.
-type SourceSurveySparrowBaseURLEUBasedAccount struct {
- URLBase *SourceSurveySparrowBaseURLEUBasedAccountURLBase `json:"url_base,omitempty"`
+func (o *SourceSurveySparrowEUBasedAccount) GetURLBase() *SourceSurveySparrowURLBase {
+ return SourceSurveySparrowURLBaseHTTPSEuAPISurveysparrowComV3.ToPointer()
}
type SourceSurveySparrowBaseURLType string
const (
- SourceSurveySparrowBaseURLTypeSourceSurveySparrowBaseURLEUBasedAccount SourceSurveySparrowBaseURLType = "source-survey-sparrow_Base URL_EU-based account"
- SourceSurveySparrowBaseURLTypeSourceSurveySparrowBaseURLGlobalAccount SourceSurveySparrowBaseURLType = "source-survey-sparrow_Base URL_Global account"
+ SourceSurveySparrowBaseURLTypeSourceSurveySparrowEUBasedAccount SourceSurveySparrowBaseURLType = "source-survey-sparrow_EU-based account"
+ SourceSurveySparrowBaseURLTypeSourceSurveySparrowGlobalAccount SourceSurveySparrowBaseURLType = "source-survey-sparrow_Global account"
)
type SourceSurveySparrowBaseURL struct {
- SourceSurveySparrowBaseURLEUBasedAccount *SourceSurveySparrowBaseURLEUBasedAccount
- SourceSurveySparrowBaseURLGlobalAccount *SourceSurveySparrowBaseURLGlobalAccount
+ SourceSurveySparrowEUBasedAccount *SourceSurveySparrowEUBasedAccount
+ SourceSurveySparrowGlobalAccount *SourceSurveySparrowGlobalAccount
Type SourceSurveySparrowBaseURLType
}
-func CreateSourceSurveySparrowBaseURLSourceSurveySparrowBaseURLEUBasedAccount(sourceSurveySparrowBaseURLEUBasedAccount SourceSurveySparrowBaseURLEUBasedAccount) SourceSurveySparrowBaseURL {
- typ := SourceSurveySparrowBaseURLTypeSourceSurveySparrowBaseURLEUBasedAccount
+func CreateSourceSurveySparrowBaseURLSourceSurveySparrowEUBasedAccount(sourceSurveySparrowEUBasedAccount SourceSurveySparrowEUBasedAccount) SourceSurveySparrowBaseURL {
+ typ := SourceSurveySparrowBaseURLTypeSourceSurveySparrowEUBasedAccount
return SourceSurveySparrowBaseURL{
- SourceSurveySparrowBaseURLEUBasedAccount: &sourceSurveySparrowBaseURLEUBasedAccount,
- Type: typ,
+ SourceSurveySparrowEUBasedAccount: &sourceSurveySparrowEUBasedAccount,
+ Type: typ,
}
}
-func CreateSourceSurveySparrowBaseURLSourceSurveySparrowBaseURLGlobalAccount(sourceSurveySparrowBaseURLGlobalAccount SourceSurveySparrowBaseURLGlobalAccount) SourceSurveySparrowBaseURL {
- typ := SourceSurveySparrowBaseURLTypeSourceSurveySparrowBaseURLGlobalAccount
+func CreateSourceSurveySparrowBaseURLSourceSurveySparrowGlobalAccount(sourceSurveySparrowGlobalAccount SourceSurveySparrowGlobalAccount) SourceSurveySparrowBaseURL {
+ typ := SourceSurveySparrowBaseURLTypeSourceSurveySparrowGlobalAccount
return SourceSurveySparrowBaseURL{
- SourceSurveySparrowBaseURLGlobalAccount: &sourceSurveySparrowBaseURLGlobalAccount,
- Type: typ,
+ SourceSurveySparrowGlobalAccount: &sourceSurveySparrowGlobalAccount,
+ Type: typ,
}
}
func (u *SourceSurveySparrowBaseURL) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceSurveySparrowBaseURLEUBasedAccount := new(SourceSurveySparrowBaseURLEUBasedAccount)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSurveySparrowBaseURLEUBasedAccount); err == nil {
- u.SourceSurveySparrowBaseURLEUBasedAccount = sourceSurveySparrowBaseURLEUBasedAccount
- u.Type = SourceSurveySparrowBaseURLTypeSourceSurveySparrowBaseURLEUBasedAccount
+
+ sourceSurveySparrowEUBasedAccount := new(SourceSurveySparrowEUBasedAccount)
+ if err := utils.UnmarshalJSON(data, &sourceSurveySparrowEUBasedAccount, "", true, true); err == nil {
+ u.SourceSurveySparrowEUBasedAccount = sourceSurveySparrowEUBasedAccount
+ u.Type = SourceSurveySparrowBaseURLTypeSourceSurveySparrowEUBasedAccount
return nil
}
- sourceSurveySparrowBaseURLGlobalAccount := new(SourceSurveySparrowBaseURLGlobalAccount)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSurveySparrowBaseURLGlobalAccount); err == nil {
- u.SourceSurveySparrowBaseURLGlobalAccount = sourceSurveySparrowBaseURLGlobalAccount
- u.Type = SourceSurveySparrowBaseURLTypeSourceSurveySparrowBaseURLGlobalAccount
+ sourceSurveySparrowGlobalAccount := new(SourceSurveySparrowGlobalAccount)
+ if err := utils.UnmarshalJSON(data, &sourceSurveySparrowGlobalAccount, "", true, true); err == nil {
+ u.SourceSurveySparrowGlobalAccount = sourceSurveySparrowGlobalAccount
+ u.Type = SourceSurveySparrowBaseURLTypeSourceSurveySparrowGlobalAccount
return nil
}
@@ -124,38 +149,38 @@ func (u *SourceSurveySparrowBaseURL) UnmarshalJSON(data []byte) error {
}
func (u SourceSurveySparrowBaseURL) MarshalJSON() ([]byte, error) {
- if u.SourceSurveySparrowBaseURLEUBasedAccount != nil {
- return json.Marshal(u.SourceSurveySparrowBaseURLEUBasedAccount)
+ if u.SourceSurveySparrowEUBasedAccount != nil {
+ return utils.MarshalJSON(u.SourceSurveySparrowEUBasedAccount, "", true)
}
- if u.SourceSurveySparrowBaseURLGlobalAccount != nil {
- return json.Marshal(u.SourceSurveySparrowBaseURLGlobalAccount)
+ if u.SourceSurveySparrowGlobalAccount != nil {
+ return utils.MarshalJSON(u.SourceSurveySparrowGlobalAccount, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceSurveySparrowSurveySparrow string
+type SurveySparrow string
const (
- SourceSurveySparrowSurveySparrowSurveySparrow SourceSurveySparrowSurveySparrow = "survey-sparrow"
+ SurveySparrowSurveySparrow SurveySparrow = "survey-sparrow"
)
-func (e SourceSurveySparrowSurveySparrow) ToPointer() *SourceSurveySparrowSurveySparrow {
+func (e SurveySparrow) ToPointer() *SurveySparrow {
return &e
}
-func (e *SourceSurveySparrowSurveySparrow) UnmarshalJSON(data []byte) error {
+func (e *SurveySparrow) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "survey-sparrow":
- *e = SourceSurveySparrowSurveySparrow(v)
+ *e = SurveySparrow(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSurveySparrowSurveySparrow: %v", v)
+ return fmt.Errorf("invalid value for SurveySparrow: %v", v)
}
}
@@ -163,8 +188,44 @@ type SourceSurveySparrow struct {
// Your access token. See here. The key is case sensitive.
AccessToken string `json:"access_token"`
// Is your account location is EU based? If yes, the base url to retrieve data will be different.
- Region *SourceSurveySparrowBaseURL `json:"region,omitempty"`
- SourceType SourceSurveySparrowSurveySparrow `json:"sourceType"`
+ Region *SourceSurveySparrowBaseURL `json:"region,omitempty"`
+ sourceType SurveySparrow `const:"survey-sparrow" json:"sourceType"`
// A List of your survey ids for survey-specific stream
SurveyID []interface{} `json:"survey_id,omitempty"`
}
+
+func (s SourceSurveySparrow) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceSurveySparrow) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceSurveySparrow) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSurveySparrow) GetRegion() *SourceSurveySparrowBaseURL {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceSurveySparrow) GetSourceType() SurveySparrow {
+ return SurveySparrowSurveySparrow
+}
+
+func (o *SourceSurveySparrow) GetSurveyID() []interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesurveysparrowcreaterequest.go b/internal/sdk/pkg/models/shared/sourcesurveysparrowcreaterequest.go
old mode 100755
new mode 100644
index 03d92cbcf..3f3d24bcc
--- a/internal/sdk/pkg/models/shared/sourcesurveysparrowcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesurveysparrowcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceSurveySparrowCreateRequest struct {
Configuration SourceSurveySparrow `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSurveySparrowCreateRequest) GetConfiguration() SourceSurveySparrow {
+ if o == nil {
+ return SourceSurveySparrow{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSurveySparrowCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceSurveySparrowCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSurveySparrowCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceSurveySparrowCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesurveysparrowputrequest.go b/internal/sdk/pkg/models/shared/sourcesurveysparrowputrequest.go
old mode 100755
new mode 100644
index c10e24eb7..eb12fa858
--- a/internal/sdk/pkg/models/shared/sourcesurveysparrowputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcesurveysparrowputrequest.go
@@ -7,3 +7,24 @@ type SourceSurveySparrowPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceSurveySparrowPutRequest) GetConfiguration() SourceSurveySparrowUpdate {
+ if o == nil {
+ return SourceSurveySparrowUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceSurveySparrowPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceSurveySparrowPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcesurveysparrowupdate.go b/internal/sdk/pkg/models/shared/sourcesurveysparrowupdate.go
old mode 100755
new mode 100644
index cb829ca15..90feac8d7
--- a/internal/sdk/pkg/models/shared/sourcesurveysparrowupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcesurveysparrowupdate.go
@@ -3,143 +3,189 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase string
+type SourceSurveySparrowUpdateURLBase string
const (
- SourceSurveySparrowUpdateBaseURLGlobalAccountURLBaseHTTPSAPISurveysparrowComV3 SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase = "https://api.surveysparrow.com/v3"
+ SourceSurveySparrowUpdateURLBaseHTTPSAPISurveysparrowComV3 SourceSurveySparrowUpdateURLBase = "https://api.surveysparrow.com/v3"
)
-func (e SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase) ToPointer() *SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase {
+func (e SourceSurveySparrowUpdateURLBase) ToPointer() *SourceSurveySparrowUpdateURLBase {
return &e
}
-func (e *SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase) UnmarshalJSON(data []byte) error {
+func (e *SourceSurveySparrowUpdateURLBase) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "https://api.surveysparrow.com/v3":
- *e = SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase(v)
+ *e = SourceSurveySparrowUpdateURLBase(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase: %v", v)
+ return fmt.Errorf("invalid value for SourceSurveySparrowUpdateURLBase: %v", v)
}
}
-// SourceSurveySparrowUpdateBaseURLGlobalAccount - Is your account location is EU based? If yes, the base url to retrieve data will be different.
-type SourceSurveySparrowUpdateBaseURLGlobalAccount struct {
- URLBase *SourceSurveySparrowUpdateBaseURLGlobalAccountURLBase `json:"url_base,omitempty"`
+// GlobalAccount - Is your account location is EU based? If yes, the base url to retrieve data will be different.
+type GlobalAccount struct {
+ urlBase *SourceSurveySparrowUpdateURLBase `const:"https://api.surveysparrow.com/v3" json:"url_base,omitempty"`
}
-type SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase string
+func (g GlobalAccount) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(g, "", false)
+}
+
+func (g *GlobalAccount) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &g, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *GlobalAccount) GetURLBase() *SourceSurveySparrowUpdateURLBase {
+ return SourceSurveySparrowUpdateURLBaseHTTPSAPISurveysparrowComV3.ToPointer()
+}
+
+type URLBase string
const (
- SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBaseHTTPSEuAPISurveysparrowComV3 SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase = "https://eu-api.surveysparrow.com/v3"
+ URLBaseHTTPSEuAPISurveysparrowComV3 URLBase = "https://eu-api.surveysparrow.com/v3"
)
-func (e SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase) ToPointer() *SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase {
+func (e URLBase) ToPointer() *URLBase {
return &e
}
-func (e *SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase) UnmarshalJSON(data []byte) error {
+func (e *URLBase) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "https://eu-api.surveysparrow.com/v3":
- *e = SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase(v)
+ *e = URLBase(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase: %v", v)
+ return fmt.Errorf("invalid value for URLBase: %v", v)
}
}
-// SourceSurveySparrowUpdateBaseURLEUBasedAccount - Is your account location is EU based? If yes, the base url to retrieve data will be different.
-type SourceSurveySparrowUpdateBaseURLEUBasedAccount struct {
- URLBase *SourceSurveySparrowUpdateBaseURLEUBasedAccountURLBase `json:"url_base,omitempty"`
+// EUBasedAccount - Is your account location is EU based? If yes, the base url to retrieve data will be different.
+type EUBasedAccount struct {
+ urlBase *URLBase `const:"https://eu-api.surveysparrow.com/v3" json:"url_base,omitempty"`
}
-type SourceSurveySparrowUpdateBaseURLType string
+func (e EUBasedAccount) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(e, "", false)
+}
+
+func (e *EUBasedAccount) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &e, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *EUBasedAccount) GetURLBase() *URLBase {
+ return URLBaseHTTPSEuAPISurveysparrowComV3.ToPointer()
+}
+
+type BaseURLType string
const (
- SourceSurveySparrowUpdateBaseURLTypeSourceSurveySparrowUpdateBaseURLEUBasedAccount SourceSurveySparrowUpdateBaseURLType = "source-survey-sparrow-update_Base URL_EU-based account"
- SourceSurveySparrowUpdateBaseURLTypeSourceSurveySparrowUpdateBaseURLGlobalAccount SourceSurveySparrowUpdateBaseURLType = "source-survey-sparrow-update_Base URL_Global account"
+ BaseURLTypeEUBasedAccount BaseURLType = "EU-based account"
+ BaseURLTypeGlobalAccount BaseURLType = "Global account"
)
-type SourceSurveySparrowUpdateBaseURL struct {
- SourceSurveySparrowUpdateBaseURLEUBasedAccount *SourceSurveySparrowUpdateBaseURLEUBasedAccount
- SourceSurveySparrowUpdateBaseURLGlobalAccount *SourceSurveySparrowUpdateBaseURLGlobalAccount
+type BaseURL struct {
+ EUBasedAccount *EUBasedAccount
+ GlobalAccount *GlobalAccount
- Type SourceSurveySparrowUpdateBaseURLType
+ Type BaseURLType
}
-func CreateSourceSurveySparrowUpdateBaseURLSourceSurveySparrowUpdateBaseURLEUBasedAccount(sourceSurveySparrowUpdateBaseURLEUBasedAccount SourceSurveySparrowUpdateBaseURLEUBasedAccount) SourceSurveySparrowUpdateBaseURL {
- typ := SourceSurveySparrowUpdateBaseURLTypeSourceSurveySparrowUpdateBaseURLEUBasedAccount
+func CreateBaseURLEUBasedAccount(euBasedAccount EUBasedAccount) BaseURL {
+ typ := BaseURLTypeEUBasedAccount
- return SourceSurveySparrowUpdateBaseURL{
- SourceSurveySparrowUpdateBaseURLEUBasedAccount: &sourceSurveySparrowUpdateBaseURLEUBasedAccount,
- Type: typ,
+ return BaseURL{
+ EUBasedAccount: &euBasedAccount,
+ Type: typ,
}
}
-func CreateSourceSurveySparrowUpdateBaseURLSourceSurveySparrowUpdateBaseURLGlobalAccount(sourceSurveySparrowUpdateBaseURLGlobalAccount SourceSurveySparrowUpdateBaseURLGlobalAccount) SourceSurveySparrowUpdateBaseURL {
- typ := SourceSurveySparrowUpdateBaseURLTypeSourceSurveySparrowUpdateBaseURLGlobalAccount
+func CreateBaseURLGlobalAccount(globalAccount GlobalAccount) BaseURL {
+ typ := BaseURLTypeGlobalAccount
- return SourceSurveySparrowUpdateBaseURL{
- SourceSurveySparrowUpdateBaseURLGlobalAccount: &sourceSurveySparrowUpdateBaseURLGlobalAccount,
- Type: typ,
+ return BaseURL{
+ GlobalAccount: &globalAccount,
+ Type: typ,
}
}
-func (u *SourceSurveySparrowUpdateBaseURL) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
+func (u *BaseURL) UnmarshalJSON(data []byte) error {
- sourceSurveySparrowUpdateBaseURLEUBasedAccount := new(SourceSurveySparrowUpdateBaseURLEUBasedAccount)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSurveySparrowUpdateBaseURLEUBasedAccount); err == nil {
- u.SourceSurveySparrowUpdateBaseURLEUBasedAccount = sourceSurveySparrowUpdateBaseURLEUBasedAccount
- u.Type = SourceSurveySparrowUpdateBaseURLTypeSourceSurveySparrowUpdateBaseURLEUBasedAccount
+ euBasedAccount := new(EUBasedAccount)
+ if err := utils.UnmarshalJSON(data, &euBasedAccount, "", true, true); err == nil {
+ u.EUBasedAccount = euBasedAccount
+ u.Type = BaseURLTypeEUBasedAccount
return nil
}
- sourceSurveySparrowUpdateBaseURLGlobalAccount := new(SourceSurveySparrowUpdateBaseURLGlobalAccount)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceSurveySparrowUpdateBaseURLGlobalAccount); err == nil {
- u.SourceSurveySparrowUpdateBaseURLGlobalAccount = sourceSurveySparrowUpdateBaseURLGlobalAccount
- u.Type = SourceSurveySparrowUpdateBaseURLTypeSourceSurveySparrowUpdateBaseURLGlobalAccount
+ globalAccount := new(GlobalAccount)
+ if err := utils.UnmarshalJSON(data, &globalAccount, "", true, true); err == nil {
+ u.GlobalAccount = globalAccount
+ u.Type = BaseURLTypeGlobalAccount
return nil
}
return errors.New("could not unmarshal into supported union types")
}
-func (u SourceSurveySparrowUpdateBaseURL) MarshalJSON() ([]byte, error) {
- if u.SourceSurveySparrowUpdateBaseURLEUBasedAccount != nil {
- return json.Marshal(u.SourceSurveySparrowUpdateBaseURLEUBasedAccount)
+func (u BaseURL) MarshalJSON() ([]byte, error) {
+ if u.EUBasedAccount != nil {
+ return utils.MarshalJSON(u.EUBasedAccount, "", true)
}
- if u.SourceSurveySparrowUpdateBaseURLGlobalAccount != nil {
- return json.Marshal(u.SourceSurveySparrowUpdateBaseURLGlobalAccount)
+ if u.GlobalAccount != nil {
+ return utils.MarshalJSON(u.GlobalAccount, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceSurveySparrowUpdate struct {
// Your access token. See here. The key is case sensitive.
AccessToken string `json:"access_token"`
// Is your account location is EU based? If yes, the base url to retrieve data will be different.
- Region *SourceSurveySparrowUpdateBaseURL `json:"region,omitempty"`
+ Region *BaseURL `json:"region,omitempty"`
// A List of your survey ids for survey-specific stream
SurveyID []interface{} `json:"survey_id,omitempty"`
}
+
+func (o *SourceSurveySparrowUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceSurveySparrowUpdate) GetRegion() *BaseURL {
+ if o == nil {
+ return nil
+ }
+ return o.Region
+}
+
+func (o *SourceSurveySparrowUpdate) GetSurveyID() []interface{} {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetempo.go b/internal/sdk/pkg/models/shared/sourcetempo.go
old mode 100755
new mode 100644
index b52fcc6dd..ce62c335f
--- a/internal/sdk/pkg/models/shared/sourcetempo.go
+++ b/internal/sdk/pkg/models/shared/sourcetempo.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceTempoTempo string
+type Tempo string
const (
- SourceTempoTempoTempo SourceTempoTempo = "tempo"
+ TempoTempo Tempo = "tempo"
)
-func (e SourceTempoTempo) ToPointer() *SourceTempoTempo {
+func (e Tempo) ToPointer() *Tempo {
return &e
}
-func (e *SourceTempoTempo) UnmarshalJSON(data []byte) error {
+func (e *Tempo) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "tempo":
- *e = SourceTempoTempo(v)
+ *e = Tempo(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTempoTempo: %v", v)
+ return fmt.Errorf("invalid value for Tempo: %v", v)
}
}
type SourceTempo struct {
// Tempo API Token. Go to Tempo>Settings, scroll down to Data Access and select API integration.
- APIToken string `json:"api_token"`
- SourceType SourceTempoTempo `json:"sourceType"`
+ APIToken string `json:"api_token"`
+ sourceType Tempo `const:"tempo" json:"sourceType"`
+}
+
+func (s SourceTempo) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTempo) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTempo) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceTempo) GetSourceType() Tempo {
+ return TempoTempo
}
diff --git a/internal/sdk/pkg/models/shared/sourcetempocreaterequest.go b/internal/sdk/pkg/models/shared/sourcetempocreaterequest.go
old mode 100755
new mode 100644
index 4859d9ea1..510a2ef41
--- a/internal/sdk/pkg/models/shared/sourcetempocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetempocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTempoCreateRequest struct {
Configuration SourceTempo `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTempoCreateRequest) GetConfiguration() SourceTempo {
+ if o == nil {
+ return SourceTempo{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTempoCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTempoCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTempoCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTempoCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetempoputrequest.go b/internal/sdk/pkg/models/shared/sourcetempoputrequest.go
old mode 100755
new mode 100644
index 8ab692d58..7789a9a7a
--- a/internal/sdk/pkg/models/shared/sourcetempoputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetempoputrequest.go
@@ -7,3 +7,24 @@ type SourceTempoPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTempoPutRequest) GetConfiguration() SourceTempoUpdate {
+ if o == nil {
+ return SourceTempoUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTempoPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTempoPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetempoupdate.go b/internal/sdk/pkg/models/shared/sourcetempoupdate.go
old mode 100755
new mode 100644
index ed660abbb..b60661e87
--- a/internal/sdk/pkg/models/shared/sourcetempoupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetempoupdate.go
@@ -6,3 +6,10 @@ type SourceTempoUpdate struct {
// Tempo API Token. Go to Tempo>Settings, scroll down to Data Access and select API integration.
APIToken string `json:"api_token"`
}
+
+func (o *SourceTempoUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetheguardianapi.go b/internal/sdk/pkg/models/shared/sourcetheguardianapi.go
old mode 100755
new mode 100644
index 924b6de3b..08b8526cb
--- a/internal/sdk/pkg/models/shared/sourcetheguardianapi.go
+++ b/internal/sdk/pkg/models/shared/sourcetheguardianapi.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceTheGuardianAPITheGuardianAPI string
+type TheGuardianAPI string
const (
- SourceTheGuardianAPITheGuardianAPITheGuardianAPI SourceTheGuardianAPITheGuardianAPI = "the-guardian-api"
+ TheGuardianAPITheGuardianAPI TheGuardianAPI = "the-guardian-api"
)
-func (e SourceTheGuardianAPITheGuardianAPI) ToPointer() *SourceTheGuardianAPITheGuardianAPI {
+func (e TheGuardianAPI) ToPointer() *TheGuardianAPI {
return &e
}
-func (e *SourceTheGuardianAPITheGuardianAPI) UnmarshalJSON(data []byte) error {
+func (e *TheGuardianAPI) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "the-guardian-api":
- *e = SourceTheGuardianAPITheGuardianAPI(v)
+ *e = TheGuardianAPI(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTheGuardianAPITheGuardianAPI: %v", v)
+ return fmt.Errorf("invalid value for TheGuardianAPI: %v", v)
}
}
@@ -39,10 +40,67 @@ type SourceTheGuardianAPI struct {
// (Optional) The query (q) parameter filters the results to only those that include that search term. The q parameter supports AND, OR and NOT operators.
Query *string `json:"query,omitempty"`
// (Optional) Use this to filter the results by a particular section. See here for a list of all sections, and here for the sections endpoint documentation.
- Section *string `json:"section,omitempty"`
- SourceType SourceTheGuardianAPITheGuardianAPI `json:"sourceType"`
+ Section *string `json:"section,omitempty"`
+ sourceType TheGuardianAPI `const:"the-guardian-api" json:"sourceType"`
// Use this to set the minimum date (YYYY-MM-DD) of the results. Results older than the start_date will not be shown.
StartDate string `json:"start_date"`
// (Optional) A tag is a piece of data that is used by The Guardian to categorise content. Use this parameter to filter results by showing only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation.
Tag *string `json:"tag,omitempty"`
}
+
+func (s SourceTheGuardianAPI) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTheGuardianAPI) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTheGuardianAPI) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceTheGuardianAPI) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceTheGuardianAPI) GetQuery() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Query
+}
+
+func (o *SourceTheGuardianAPI) GetSection() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Section
+}
+
+func (o *SourceTheGuardianAPI) GetSourceType() TheGuardianAPI {
+ return TheGuardianAPITheGuardianAPI
+}
+
+func (o *SourceTheGuardianAPI) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceTheGuardianAPI) GetTag() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Tag
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetheguardianapicreaterequest.go b/internal/sdk/pkg/models/shared/sourcetheguardianapicreaterequest.go
old mode 100755
new mode 100644
index 271ac9548..1029ff198
--- a/internal/sdk/pkg/models/shared/sourcetheguardianapicreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetheguardianapicreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTheGuardianAPICreateRequest struct {
Configuration SourceTheGuardianAPI `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTheGuardianAPICreateRequest) GetConfiguration() SourceTheGuardianAPI {
+ if o == nil {
+ return SourceTheGuardianAPI{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTheGuardianAPICreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTheGuardianAPICreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTheGuardianAPICreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTheGuardianAPICreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetheguardianapiputrequest.go b/internal/sdk/pkg/models/shared/sourcetheguardianapiputrequest.go
old mode 100755
new mode 100644
index 7a31d5eac..9906d86a8
--- a/internal/sdk/pkg/models/shared/sourcetheguardianapiputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetheguardianapiputrequest.go
@@ -7,3 +7,24 @@ type SourceTheGuardianAPIPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTheGuardianAPIPutRequest) GetConfiguration() SourceTheGuardianAPIUpdate {
+ if o == nil {
+ return SourceTheGuardianAPIUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTheGuardianAPIPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTheGuardianAPIPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetheguardianapiupdate.go b/internal/sdk/pkg/models/shared/sourcetheguardianapiupdate.go
old mode 100755
new mode 100644
index 2bd303549..b13596eed
--- a/internal/sdk/pkg/models/shared/sourcetheguardianapiupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetheguardianapiupdate.go
@@ -16,3 +16,45 @@ type SourceTheGuardianAPIUpdate struct {
// (Optional) A tag is a piece of data that is used by The Guardian to categorise content. Use this parameter to filter results by showing only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation.
Tag *string `json:"tag,omitempty"`
}
+
+func (o *SourceTheGuardianAPIUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceTheGuardianAPIUpdate) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceTheGuardianAPIUpdate) GetQuery() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Query
+}
+
+func (o *SourceTheGuardianAPIUpdate) GetSection() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Section
+}
+
+func (o *SourceTheGuardianAPIUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceTheGuardianAPIUpdate) GetTag() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Tag
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetiktokmarketing.go b/internal/sdk/pkg/models/shared/sourcetiktokmarketing.go
old mode 100755
new mode 100644
index 87cf9ff32..96f6acb81
--- a/internal/sdk/pkg/models/shared/sourcetiktokmarketing.go
+++ b/internal/sdk/pkg/models/shared/sourcetiktokmarketing.go
@@ -3,133 +3,200 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType string
+type SourceTiktokMarketingSchemasAuthType string
const (
- SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthTypeSandboxAccessToken SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType = "sandbox_access_token"
+ SourceTiktokMarketingSchemasAuthTypeSandboxAccessToken SourceTiktokMarketingSchemasAuthType = "sandbox_access_token"
)
-func (e SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType) ToPointer() *SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType {
+func (e SourceTiktokMarketingSchemasAuthType) ToPointer() *SourceTiktokMarketingSchemasAuthType {
return &e
}
-func (e *SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTiktokMarketingSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sandbox_access_token":
- *e = SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType(v)
+ *e = SourceTiktokMarketingSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTiktokMarketingSchemasAuthType: %v", v)
}
}
-// SourceTiktokMarketingAuthenticationMethodSandboxAccessToken - Authentication method
-type SourceTiktokMarketingAuthenticationMethodSandboxAccessToken struct {
+// SourceTiktokMarketingSandboxAccessToken - Authentication method
+type SourceTiktokMarketingSandboxAccessToken struct {
// The long-term authorized access token.
AccessToken string `json:"access_token"`
// The Advertiser ID which generated for the developer's Sandbox application.
- AdvertiserID string `json:"advertiser_id"`
- AuthType *SourceTiktokMarketingAuthenticationMethodSandboxAccessTokenAuthType `json:"auth_type,omitempty"`
+ AdvertiserID string `json:"advertiser_id"`
+ authType *SourceTiktokMarketingSchemasAuthType `const:"sandbox_access_token" json:"auth_type,omitempty"`
}
-type SourceTiktokMarketingAuthenticationMethodOAuth20AuthType string
+func (s SourceTiktokMarketingSandboxAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTiktokMarketingSandboxAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTiktokMarketingSandboxAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceTiktokMarketingSandboxAccessToken) GetAdvertiserID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AdvertiserID
+}
+
+func (o *SourceTiktokMarketingSandboxAccessToken) GetAuthType() *SourceTiktokMarketingSchemasAuthType {
+ return SourceTiktokMarketingSchemasAuthTypeSandboxAccessToken.ToPointer()
+}
+
+type SourceTiktokMarketingAuthType string
const (
- SourceTiktokMarketingAuthenticationMethodOAuth20AuthTypeOauth20 SourceTiktokMarketingAuthenticationMethodOAuth20AuthType = "oauth2.0"
+ SourceTiktokMarketingAuthTypeOauth20 SourceTiktokMarketingAuthType = "oauth2.0"
)
-func (e SourceTiktokMarketingAuthenticationMethodOAuth20AuthType) ToPointer() *SourceTiktokMarketingAuthenticationMethodOAuth20AuthType {
+func (e SourceTiktokMarketingAuthType) ToPointer() *SourceTiktokMarketingAuthType {
return &e
}
-func (e *SourceTiktokMarketingAuthenticationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTiktokMarketingAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceTiktokMarketingAuthenticationMethodOAuth20AuthType(v)
+ *e = SourceTiktokMarketingAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTiktokMarketingAuthenticationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTiktokMarketingAuthType: %v", v)
}
}
-// SourceTiktokMarketingAuthenticationMethodOAuth20 - Authentication method
-type SourceTiktokMarketingAuthenticationMethodOAuth20 struct {
+// SourceTiktokMarketingOAuth20 - Authentication method
+type SourceTiktokMarketingOAuth20 struct {
// Long-term Authorized Access Token.
AccessToken string `json:"access_token"`
// The Advertiser ID to filter reports and streams. Let this empty to retrieve all.
AdvertiserID *string `json:"advertiser_id,omitempty"`
// The Developer Application App ID.
- AppID string `json:"app_id"`
- AuthType *SourceTiktokMarketingAuthenticationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AppID string `json:"app_id"`
+ authType *SourceTiktokMarketingAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The Developer Application Secret.
Secret string `json:"secret"`
}
+func (s SourceTiktokMarketingOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTiktokMarketingOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTiktokMarketingOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceTiktokMarketingOAuth20) GetAdvertiserID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AdvertiserID
+}
+
+func (o *SourceTiktokMarketingOAuth20) GetAppID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AppID
+}
+
+func (o *SourceTiktokMarketingOAuth20) GetAuthType() *SourceTiktokMarketingAuthType {
+ return SourceTiktokMarketingAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceTiktokMarketingOAuth20) GetSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.Secret
+}
+
type SourceTiktokMarketingAuthenticationMethodType string
const (
- SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodOAuth20 SourceTiktokMarketingAuthenticationMethodType = "source-tiktok-marketing_Authentication Method_OAuth2.0"
- SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodSandboxAccessToken SourceTiktokMarketingAuthenticationMethodType = "source-tiktok-marketing_Authentication Method_Sandbox Access Token"
+ SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingOAuth20 SourceTiktokMarketingAuthenticationMethodType = "source-tiktok-marketing_OAuth2.0"
+ SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingSandboxAccessToken SourceTiktokMarketingAuthenticationMethodType = "source-tiktok-marketing_Sandbox Access Token"
)
type SourceTiktokMarketingAuthenticationMethod struct {
- SourceTiktokMarketingAuthenticationMethodOAuth20 *SourceTiktokMarketingAuthenticationMethodOAuth20
- SourceTiktokMarketingAuthenticationMethodSandboxAccessToken *SourceTiktokMarketingAuthenticationMethodSandboxAccessToken
+ SourceTiktokMarketingOAuth20 *SourceTiktokMarketingOAuth20
+ SourceTiktokMarketingSandboxAccessToken *SourceTiktokMarketingSandboxAccessToken
Type SourceTiktokMarketingAuthenticationMethodType
}
-func CreateSourceTiktokMarketingAuthenticationMethodSourceTiktokMarketingAuthenticationMethodOAuth20(sourceTiktokMarketingAuthenticationMethodOAuth20 SourceTiktokMarketingAuthenticationMethodOAuth20) SourceTiktokMarketingAuthenticationMethod {
- typ := SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodOAuth20
+func CreateSourceTiktokMarketingAuthenticationMethodSourceTiktokMarketingOAuth20(sourceTiktokMarketingOAuth20 SourceTiktokMarketingOAuth20) SourceTiktokMarketingAuthenticationMethod {
+ typ := SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingOAuth20
return SourceTiktokMarketingAuthenticationMethod{
- SourceTiktokMarketingAuthenticationMethodOAuth20: &sourceTiktokMarketingAuthenticationMethodOAuth20,
- Type: typ,
+ SourceTiktokMarketingOAuth20: &sourceTiktokMarketingOAuth20,
+ Type: typ,
}
}
-func CreateSourceTiktokMarketingAuthenticationMethodSourceTiktokMarketingAuthenticationMethodSandboxAccessToken(sourceTiktokMarketingAuthenticationMethodSandboxAccessToken SourceTiktokMarketingAuthenticationMethodSandboxAccessToken) SourceTiktokMarketingAuthenticationMethod {
- typ := SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodSandboxAccessToken
+func CreateSourceTiktokMarketingAuthenticationMethodSourceTiktokMarketingSandboxAccessToken(sourceTiktokMarketingSandboxAccessToken SourceTiktokMarketingSandboxAccessToken) SourceTiktokMarketingAuthenticationMethod {
+ typ := SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingSandboxAccessToken
return SourceTiktokMarketingAuthenticationMethod{
- SourceTiktokMarketingAuthenticationMethodSandboxAccessToken: &sourceTiktokMarketingAuthenticationMethodSandboxAccessToken,
- Type: typ,
+ SourceTiktokMarketingSandboxAccessToken: &sourceTiktokMarketingSandboxAccessToken,
+ Type: typ,
}
}
func (u *SourceTiktokMarketingAuthenticationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceTiktokMarketingAuthenticationMethodSandboxAccessToken := new(SourceTiktokMarketingAuthenticationMethodSandboxAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTiktokMarketingAuthenticationMethodSandboxAccessToken); err == nil {
- u.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken = sourceTiktokMarketingAuthenticationMethodSandboxAccessToken
- u.Type = SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodSandboxAccessToken
+
+ sourceTiktokMarketingSandboxAccessToken := new(SourceTiktokMarketingSandboxAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceTiktokMarketingSandboxAccessToken, "", true, true); err == nil {
+ u.SourceTiktokMarketingSandboxAccessToken = sourceTiktokMarketingSandboxAccessToken
+ u.Type = SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingSandboxAccessToken
return nil
}
- sourceTiktokMarketingAuthenticationMethodOAuth20 := new(SourceTiktokMarketingAuthenticationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTiktokMarketingAuthenticationMethodOAuth20); err == nil {
- u.SourceTiktokMarketingAuthenticationMethodOAuth20 = sourceTiktokMarketingAuthenticationMethodOAuth20
- u.Type = SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodOAuth20
+ sourceTiktokMarketingOAuth20 := new(SourceTiktokMarketingOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceTiktokMarketingOAuth20, "", true, true); err == nil {
+ u.SourceTiktokMarketingOAuth20 = sourceTiktokMarketingOAuth20
+ u.Type = SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingOAuth20
return nil
}
@@ -137,51 +204,101 @@ func (u *SourceTiktokMarketingAuthenticationMethod) UnmarshalJSON(data []byte) e
}
func (u SourceTiktokMarketingAuthenticationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken != nil {
- return json.Marshal(u.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken)
+ if u.SourceTiktokMarketingOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceTiktokMarketingOAuth20, "", true)
}
- if u.SourceTiktokMarketingAuthenticationMethodOAuth20 != nil {
- return json.Marshal(u.SourceTiktokMarketingAuthenticationMethodOAuth20)
+ if u.SourceTiktokMarketingSandboxAccessToken != nil {
+ return utils.MarshalJSON(u.SourceTiktokMarketingSandboxAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceTiktokMarketingTiktokMarketing string
+type TiktokMarketing string
const (
- SourceTiktokMarketingTiktokMarketingTiktokMarketing SourceTiktokMarketingTiktokMarketing = "tiktok-marketing"
+ TiktokMarketingTiktokMarketing TiktokMarketing = "tiktok-marketing"
)
-func (e SourceTiktokMarketingTiktokMarketing) ToPointer() *SourceTiktokMarketingTiktokMarketing {
+func (e TiktokMarketing) ToPointer() *TiktokMarketing {
return &e
}
-func (e *SourceTiktokMarketingTiktokMarketing) UnmarshalJSON(data []byte) error {
+func (e *TiktokMarketing) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "tiktok-marketing":
- *e = SourceTiktokMarketingTiktokMarketing(v)
+ *e = TiktokMarketing(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTiktokMarketingTiktokMarketing: %v", v)
+ return fmt.Errorf("invalid value for TiktokMarketing: %v", v)
}
}
type SourceTiktokMarketing struct {
// The attribution window in days.
- AttributionWindow *int64 `json:"attribution_window,omitempty"`
+ AttributionWindow *int64 `default:"3" json:"attribution_window"`
// Authentication method
Credentials *SourceTiktokMarketingAuthenticationMethod `json:"credentials,omitempty"`
// The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DD. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the data till the current date.
EndDate *types.Date `json:"end_date,omitempty"`
// Set to active if you want to include deleted data in reports.
- IncludeDeleted *bool `json:"include_deleted,omitempty"`
- SourceType *SourceTiktokMarketingTiktokMarketing `json:"sourceType,omitempty"`
+ IncludeDeleted *bool `default:"false" json:"include_deleted"`
+ sourceType *TiktokMarketing `const:"tiktok-marketing" json:"sourceType,omitempty"`
// The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2016-09-01" json:"start_date"`
+}
+
+func (s SourceTiktokMarketing) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTiktokMarketing) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTiktokMarketing) GetAttributionWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AttributionWindow
+}
+
+func (o *SourceTiktokMarketing) GetCredentials() *SourceTiktokMarketingAuthenticationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceTiktokMarketing) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceTiktokMarketing) GetIncludeDeleted() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeleted
+}
+
+func (o *SourceTiktokMarketing) GetSourceType() *TiktokMarketing {
+ return TiktokMarketingTiktokMarketing.ToPointer()
+}
+
+func (o *SourceTiktokMarketing) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcetiktokmarketingcreaterequest.go b/internal/sdk/pkg/models/shared/sourcetiktokmarketingcreaterequest.go
old mode 100755
new mode 100644
index 1319e6afa..3a5bf111c
--- a/internal/sdk/pkg/models/shared/sourcetiktokmarketingcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetiktokmarketingcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTiktokMarketingCreateRequest struct {
Configuration SourceTiktokMarketing `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTiktokMarketingCreateRequest) GetConfiguration() SourceTiktokMarketing {
+ if o == nil {
+ return SourceTiktokMarketing{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTiktokMarketingCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTiktokMarketingCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTiktokMarketingCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTiktokMarketingCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetiktokmarketingputrequest.go b/internal/sdk/pkg/models/shared/sourcetiktokmarketingputrequest.go
old mode 100755
new mode 100644
index 103aff832..e93c355cf
--- a/internal/sdk/pkg/models/shared/sourcetiktokmarketingputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetiktokmarketingputrequest.go
@@ -7,3 +7,24 @@ type SourceTiktokMarketingPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTiktokMarketingPutRequest) GetConfiguration() SourceTiktokMarketingUpdate {
+ if o == nil {
+ return SourceTiktokMarketingUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTiktokMarketingPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTiktokMarketingPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetiktokmarketingupdate.go b/internal/sdk/pkg/models/shared/sourcetiktokmarketingupdate.go
old mode 100755
new mode 100644
index 721c98586..62f711b44
--- a/internal/sdk/pkg/models/shared/sourcetiktokmarketingupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetiktokmarketingupdate.go
@@ -3,133 +3,200 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType string
+type SourceTiktokMarketingUpdateSchemasAuthType string
const (
- SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthTypeSandboxAccessToken SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType = "sandbox_access_token"
+ SourceTiktokMarketingUpdateSchemasAuthTypeSandboxAccessToken SourceTiktokMarketingUpdateSchemasAuthType = "sandbox_access_token"
)
-func (e SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType) ToPointer() *SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType {
+func (e SourceTiktokMarketingUpdateSchemasAuthType) ToPointer() *SourceTiktokMarketingUpdateSchemasAuthType {
return &e
}
-func (e *SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTiktokMarketingUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "sandbox_access_token":
- *e = SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType(v)
+ *e = SourceTiktokMarketingUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTiktokMarketingUpdateSchemasAuthType: %v", v)
}
}
-// SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken - Authentication method
-type SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken struct {
+// SandboxAccessToken - Authentication method
+type SandboxAccessToken struct {
// The long-term authorized access token.
AccessToken string `json:"access_token"`
// The Advertiser ID which generated for the developer's Sandbox application.
- AdvertiserID string `json:"advertiser_id"`
- AuthType *SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessTokenAuthType `json:"auth_type,omitempty"`
+ AdvertiserID string `json:"advertiser_id"`
+ authType *SourceTiktokMarketingUpdateSchemasAuthType `const:"sandbox_access_token" json:"auth_type,omitempty"`
}
-type SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType string
+func (s SandboxAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SandboxAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SandboxAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SandboxAccessToken) GetAdvertiserID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AdvertiserID
+}
+
+func (o *SandboxAccessToken) GetAuthType() *SourceTiktokMarketingUpdateSchemasAuthType {
+ return SourceTiktokMarketingUpdateSchemasAuthTypeSandboxAccessToken.ToPointer()
+}
+
+type SourceTiktokMarketingUpdateAuthType string
const (
- SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthTypeOauth20 SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType = "oauth2.0"
+ SourceTiktokMarketingUpdateAuthTypeOauth20 SourceTiktokMarketingUpdateAuthType = "oauth2.0"
)
-func (e SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType) ToPointer() *SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType {
+func (e SourceTiktokMarketingUpdateAuthType) ToPointer() *SourceTiktokMarketingUpdateAuthType {
return &e
}
-func (e *SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTiktokMarketingUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType(v)
+ *e = SourceTiktokMarketingUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTiktokMarketingUpdateAuthType: %v", v)
}
}
-// SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 - Authentication method
-type SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 struct {
+// SourceTiktokMarketingUpdateOAuth20 - Authentication method
+type SourceTiktokMarketingUpdateOAuth20 struct {
// Long-term Authorized Access Token.
AccessToken string `json:"access_token"`
// The Advertiser ID to filter reports and streams. Let this empty to retrieve all.
AdvertiserID *string `json:"advertiser_id,omitempty"`
// The Developer Application App ID.
- AppID string `json:"app_id"`
- AuthType *SourceTiktokMarketingUpdateAuthenticationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AppID string `json:"app_id"`
+ authType *SourceTiktokMarketingUpdateAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The Developer Application Secret.
Secret string `json:"secret"`
}
+func (s SourceTiktokMarketingUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTiktokMarketingUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTiktokMarketingUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceTiktokMarketingUpdateOAuth20) GetAdvertiserID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AdvertiserID
+}
+
+func (o *SourceTiktokMarketingUpdateOAuth20) GetAppID() string {
+ if o == nil {
+ return ""
+ }
+ return o.AppID
+}
+
+func (o *SourceTiktokMarketingUpdateOAuth20) GetAuthType() *SourceTiktokMarketingUpdateAuthType {
+ return SourceTiktokMarketingUpdateAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceTiktokMarketingUpdateOAuth20) GetSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.Secret
+}
+
type SourceTiktokMarketingUpdateAuthenticationMethodType string
const (
- SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodOAuth20 SourceTiktokMarketingUpdateAuthenticationMethodType = "source-tiktok-marketing-update_Authentication Method_OAuth2.0"
- SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken SourceTiktokMarketingUpdateAuthenticationMethodType = "source-tiktok-marketing-update_Authentication Method_Sandbox Access Token"
+ SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateOAuth20 SourceTiktokMarketingUpdateAuthenticationMethodType = "source-tiktok-marketing-update_OAuth2.0"
+ SourceTiktokMarketingUpdateAuthenticationMethodTypeSandboxAccessToken SourceTiktokMarketingUpdateAuthenticationMethodType = "Sandbox Access Token"
)
type SourceTiktokMarketingUpdateAuthenticationMethod struct {
- SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 *SourceTiktokMarketingUpdateAuthenticationMethodOAuth20
- SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken *SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken
+ SourceTiktokMarketingUpdateOAuth20 *SourceTiktokMarketingUpdateOAuth20
+ SandboxAccessToken *SandboxAccessToken
Type SourceTiktokMarketingUpdateAuthenticationMethodType
}
-func CreateSourceTiktokMarketingUpdateAuthenticationMethodSourceTiktokMarketingUpdateAuthenticationMethodOAuth20(sourceTiktokMarketingUpdateAuthenticationMethodOAuth20 SourceTiktokMarketingUpdateAuthenticationMethodOAuth20) SourceTiktokMarketingUpdateAuthenticationMethod {
- typ := SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodOAuth20
+func CreateSourceTiktokMarketingUpdateAuthenticationMethodSourceTiktokMarketingUpdateOAuth20(sourceTiktokMarketingUpdateOAuth20 SourceTiktokMarketingUpdateOAuth20) SourceTiktokMarketingUpdateAuthenticationMethod {
+ typ := SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateOAuth20
return SourceTiktokMarketingUpdateAuthenticationMethod{
- SourceTiktokMarketingUpdateAuthenticationMethodOAuth20: &sourceTiktokMarketingUpdateAuthenticationMethodOAuth20,
- Type: typ,
+ SourceTiktokMarketingUpdateOAuth20: &sourceTiktokMarketingUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceTiktokMarketingUpdateAuthenticationMethodSourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken(sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken) SourceTiktokMarketingUpdateAuthenticationMethod {
- typ := SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken
+func CreateSourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken(sandboxAccessToken SandboxAccessToken) SourceTiktokMarketingUpdateAuthenticationMethod {
+ typ := SourceTiktokMarketingUpdateAuthenticationMethodTypeSandboxAccessToken
return SourceTiktokMarketingUpdateAuthenticationMethod{
- SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken: &sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken,
- Type: typ,
+ SandboxAccessToken: &sandboxAccessToken,
+ Type: typ,
}
}
func (u *SourceTiktokMarketingUpdateAuthenticationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken := new(SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken); err == nil {
- u.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken = sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken
- u.Type = SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken
+
+ sandboxAccessToken := new(SandboxAccessToken)
+ if err := utils.UnmarshalJSON(data, &sandboxAccessToken, "", true, true); err == nil {
+ u.SandboxAccessToken = sandboxAccessToken
+ u.Type = SourceTiktokMarketingUpdateAuthenticationMethodTypeSandboxAccessToken
return nil
}
- sourceTiktokMarketingUpdateAuthenticationMethodOAuth20 := new(SourceTiktokMarketingUpdateAuthenticationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTiktokMarketingUpdateAuthenticationMethodOAuth20); err == nil {
- u.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 = sourceTiktokMarketingUpdateAuthenticationMethodOAuth20
- u.Type = SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodOAuth20
+ sourceTiktokMarketingUpdateOAuth20 := new(SourceTiktokMarketingUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceTiktokMarketingUpdateOAuth20, "", true, true); err == nil {
+ u.SourceTiktokMarketingUpdateOAuth20 = sourceTiktokMarketingUpdateOAuth20
+ u.Type = SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateOAuth20
return nil
}
@@ -137,26 +204,72 @@ func (u *SourceTiktokMarketingUpdateAuthenticationMethod) UnmarshalJSON(data []b
}
func (u SourceTiktokMarketingUpdateAuthenticationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken != nil {
- return json.Marshal(u.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken)
+ if u.SourceTiktokMarketingUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceTiktokMarketingUpdateOAuth20, "", true)
}
- if u.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 != nil {
- return json.Marshal(u.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20)
+ if u.SandboxAccessToken != nil {
+ return utils.MarshalJSON(u.SandboxAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceTiktokMarketingUpdate struct {
// The attribution window in days.
- AttributionWindow *int64 `json:"attribution_window,omitempty"`
+ AttributionWindow *int64 `default:"3" json:"attribution_window"`
// Authentication method
Credentials *SourceTiktokMarketingUpdateAuthenticationMethod `json:"credentials,omitempty"`
// The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DD. All data generated between start_date and this date will be replicated. Not setting this option will result in always syncing the data till the current date.
EndDate *types.Date `json:"end_date,omitempty"`
// Set to active if you want to include deleted data in reports.
- IncludeDeleted *bool `json:"include_deleted,omitempty"`
+ IncludeDeleted *bool `default:"false" json:"include_deleted"`
// The Start Date in format: YYYY-MM-DD. Any data before this date will not be replicated. If this parameter is not set, all data will be replicated.
- StartDate *types.Date `json:"start_date,omitempty"`
+ StartDate *types.Date `default:"2016-09-01" json:"start_date"`
+}
+
+func (s SourceTiktokMarketingUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTiktokMarketingUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTiktokMarketingUpdate) GetAttributionWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.AttributionWindow
+}
+
+func (o *SourceTiktokMarketingUpdate) GetCredentials() *SourceTiktokMarketingUpdateAuthenticationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceTiktokMarketingUpdate) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceTiktokMarketingUpdate) GetIncludeDeleted() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IncludeDeleted
+}
+
+func (o *SourceTiktokMarketingUpdate) GetStartDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
}
diff --git a/internal/sdk/pkg/models/shared/sourcetodoist.go b/internal/sdk/pkg/models/shared/sourcetodoist.go
old mode 100755
new mode 100644
index 365d78a5e..3205c0e0e
--- a/internal/sdk/pkg/models/shared/sourcetodoist.go
+++ b/internal/sdk/pkg/models/shared/sourcetodoist.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceTodoistTodoist string
+type Todoist string
const (
- SourceTodoistTodoistTodoist SourceTodoistTodoist = "todoist"
+ TodoistTodoist Todoist = "todoist"
)
-func (e SourceTodoistTodoist) ToPointer() *SourceTodoistTodoist {
+func (e Todoist) ToPointer() *Todoist {
return &e
}
-func (e *SourceTodoistTodoist) UnmarshalJSON(data []byte) error {
+func (e *Todoist) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "todoist":
- *e = SourceTodoistTodoist(v)
+ *e = Todoist(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTodoistTodoist: %v", v)
+ return fmt.Errorf("invalid value for Todoist: %v", v)
}
}
type SourceTodoist struct {
- SourceType SourceTodoistTodoist `json:"sourceType"`
+ sourceType Todoist `const:"todoist" json:"sourceType"`
// Your API Token. See here. The token is case sensitive.
Token string `json:"token"`
}
+
+func (s SourceTodoist) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTodoist) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTodoist) GetSourceType() Todoist {
+ return TodoistTodoist
+}
+
+func (o *SourceTodoist) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetodoistcreaterequest.go b/internal/sdk/pkg/models/shared/sourcetodoistcreaterequest.go
old mode 100755
new mode 100644
index b85d68c63..c48332cae
--- a/internal/sdk/pkg/models/shared/sourcetodoistcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetodoistcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTodoistCreateRequest struct {
Configuration SourceTodoist `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTodoistCreateRequest) GetConfiguration() SourceTodoist {
+ if o == nil {
+ return SourceTodoist{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTodoistCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTodoistCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTodoistCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTodoistCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetodoistputrequest.go b/internal/sdk/pkg/models/shared/sourcetodoistputrequest.go
old mode 100755
new mode 100644
index 4689b29aa..e5a56db9c
--- a/internal/sdk/pkg/models/shared/sourcetodoistputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetodoistputrequest.go
@@ -7,3 +7,24 @@ type SourceTodoistPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTodoistPutRequest) GetConfiguration() SourceTodoistUpdate {
+ if o == nil {
+ return SourceTodoistUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTodoistPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTodoistPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetodoistupdate.go b/internal/sdk/pkg/models/shared/sourcetodoistupdate.go
old mode 100755
new mode 100644
index 22db76dbc..f09962cff
--- a/internal/sdk/pkg/models/shared/sourcetodoistupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetodoistupdate.go
@@ -6,3 +6,10 @@ type SourceTodoistUpdate struct {
// Your API Token. See here. The token is case sensitive.
Token string `json:"token"`
}
+
+func (o *SourceTodoistUpdate) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetrello.go b/internal/sdk/pkg/models/shared/sourcetrello.go
old mode 100755
new mode 100644
index c30257373..91f5305f5
--- a/internal/sdk/pkg/models/shared/sourcetrello.go
+++ b/internal/sdk/pkg/models/shared/sourcetrello.go
@@ -5,41 +5,85 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceTrelloTrello string
+type Trello string
const (
- SourceTrelloTrelloTrello SourceTrelloTrello = "trello"
+ TrelloTrello Trello = "trello"
)
-func (e SourceTrelloTrello) ToPointer() *SourceTrelloTrello {
+func (e Trello) ToPointer() *Trello {
return &e
}
-func (e *SourceTrelloTrello) UnmarshalJSON(data []byte) error {
+func (e *Trello) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "trello":
- *e = SourceTrelloTrello(v)
+ *e = Trello(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTrelloTrello: %v", v)
+ return fmt.Errorf("invalid value for Trello: %v", v)
}
}
type SourceTrello struct {
- // IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated.
+ // IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated. Please note that this is not the 8-character ID in the board's shortLink (URL of the board). Rather, what is required here is the 24-character ID usually returned by the API
BoardIds []string `json:"board_ids,omitempty"`
// Trello API key. See the docs for instructions on how to generate it.
- Key string `json:"key"`
- SourceType SourceTrelloTrello `json:"sourceType"`
+ Key string `json:"key"`
+ sourceType Trello `const:"trello" json:"sourceType"`
// UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
// Trello API token. See the docs for instructions on how to generate it.
Token string `json:"token"`
}
+
+func (s SourceTrello) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTrello) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTrello) GetBoardIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.BoardIds
+}
+
+func (o *SourceTrello) GetKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Key
+}
+
+func (o *SourceTrello) GetSourceType() Trello {
+ return TrelloTrello
+}
+
+func (o *SourceTrello) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceTrello) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetrellocreaterequest.go b/internal/sdk/pkg/models/shared/sourcetrellocreaterequest.go
old mode 100755
new mode 100644
index c24dc64eb..7d15845d8
--- a/internal/sdk/pkg/models/shared/sourcetrellocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetrellocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTrelloCreateRequest struct {
Configuration SourceTrello `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTrelloCreateRequest) GetConfiguration() SourceTrello {
+ if o == nil {
+ return SourceTrello{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTrelloCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTrelloCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTrelloCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTrelloCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetrelloputrequest.go b/internal/sdk/pkg/models/shared/sourcetrelloputrequest.go
old mode 100755
new mode 100644
index 718491c5b..fe2472556
--- a/internal/sdk/pkg/models/shared/sourcetrelloputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetrelloputrequest.go
@@ -7,3 +7,24 @@ type SourceTrelloPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTrelloPutRequest) GetConfiguration() SourceTrelloUpdate {
+ if o == nil {
+ return SourceTrelloUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTrelloPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTrelloPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetrelloupdate.go b/internal/sdk/pkg/models/shared/sourcetrelloupdate.go
old mode 100755
new mode 100644
index 259dfe316..fea9c3126
--- a/internal/sdk/pkg/models/shared/sourcetrelloupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetrelloupdate.go
@@ -3,11 +3,12 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
type SourceTrelloUpdate struct {
- // IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated.
+ // IDs of the boards to replicate data from. If left empty, data from all boards to which you have access will be replicated. Please note that this is not the 8-character ID in the board's shortLink (URL of the board). Rather, what is required here is the 24-character ID usually returned by the API
BoardIds []string `json:"board_ids,omitempty"`
// Trello API key. See the docs for instructions on how to generate it.
Key string `json:"key"`
@@ -16,3 +17,42 @@ type SourceTrelloUpdate struct {
// Trello API token. See the docs for instructions on how to generate it.
Token string `json:"token"`
}
+
+func (s SourceTrelloUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTrelloUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTrelloUpdate) GetBoardIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.BoardIds
+}
+
+func (o *SourceTrelloUpdate) GetKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.Key
+}
+
+func (o *SourceTrelloUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceTrelloUpdate) GetToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.Token
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetrustpilot.go b/internal/sdk/pkg/models/shared/sourcetrustpilot.go
old mode 100755
new mode 100644
index dbe483587..7430482b3
--- a/internal/sdk/pkg/models/shared/sourcetrustpilot.go
+++ b/internal/sdk/pkg/models/shared/sourcetrustpilot.go
@@ -3,72 +3,94 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceTrustpilotAuthorizationMethodAPIKeyAuthType string
+type SourceTrustpilotSchemasAuthType string
const (
- SourceTrustpilotAuthorizationMethodAPIKeyAuthTypeApikey SourceTrustpilotAuthorizationMethodAPIKeyAuthType = "apikey"
+ SourceTrustpilotSchemasAuthTypeApikey SourceTrustpilotSchemasAuthType = "apikey"
)
-func (e SourceTrustpilotAuthorizationMethodAPIKeyAuthType) ToPointer() *SourceTrustpilotAuthorizationMethodAPIKeyAuthType {
+func (e SourceTrustpilotSchemasAuthType) ToPointer() *SourceTrustpilotSchemasAuthType {
return &e
}
-func (e *SourceTrustpilotAuthorizationMethodAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTrustpilotSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "apikey":
- *e = SourceTrustpilotAuthorizationMethodAPIKeyAuthType(v)
+ *e = SourceTrustpilotSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTrustpilotAuthorizationMethodAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTrustpilotSchemasAuthType: %v", v)
}
}
-// SourceTrustpilotAuthorizationMethodAPIKey - The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0.
-type SourceTrustpilotAuthorizationMethodAPIKey struct {
- AuthType *SourceTrustpilotAuthorizationMethodAPIKeyAuthType `json:"auth_type,omitempty"`
+// SourceTrustpilotAPIKey - The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0.
+type SourceTrustpilotAPIKey struct {
+ authType *SourceTrustpilotSchemasAuthType `const:"apikey" json:"auth_type,omitempty"`
// The API key of the Trustpilot API application.
ClientID string `json:"client_id"`
}
-type SourceTrustpilotAuthorizationMethodOAuth20AuthType string
+func (s SourceTrustpilotAPIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTrustpilotAPIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTrustpilotAPIKey) GetAuthType() *SourceTrustpilotSchemasAuthType {
+ return SourceTrustpilotSchemasAuthTypeApikey.ToPointer()
+}
+
+func (o *SourceTrustpilotAPIKey) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+type SourceTrustpilotAuthType string
const (
- SourceTrustpilotAuthorizationMethodOAuth20AuthTypeOauth20 SourceTrustpilotAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceTrustpilotAuthTypeOauth20 SourceTrustpilotAuthType = "oauth2.0"
)
-func (e SourceTrustpilotAuthorizationMethodOAuth20AuthType) ToPointer() *SourceTrustpilotAuthorizationMethodOAuth20AuthType {
+func (e SourceTrustpilotAuthType) ToPointer() *SourceTrustpilotAuthType {
return &e
}
-func (e *SourceTrustpilotAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTrustpilotAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceTrustpilotAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceTrustpilotAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTrustpilotAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTrustpilotAuthType: %v", v)
}
}
-type SourceTrustpilotAuthorizationMethodOAuth20 struct {
+type SourceTrustpilotOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceTrustpilotAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceTrustpilotAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The API key of the Trustpilot API application. (represents the OAuth Client ID)
ClientID string `json:"client_id"`
// The Secret of the Trustpilot API application. (represents the OAuth Client Secret)
@@ -79,56 +101,101 @@ type SourceTrustpilotAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceTrustpilotOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTrustpilotOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTrustpilotOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceTrustpilotOAuth20) GetAuthType() *SourceTrustpilotAuthType {
+ return SourceTrustpilotAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceTrustpilotOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceTrustpilotOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceTrustpilotOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceTrustpilotOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceTrustpilotAuthorizationMethodType string
const (
- SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodOAuth20 SourceTrustpilotAuthorizationMethodType = "source-trustpilot_Authorization Method_OAuth 2.0"
- SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodAPIKey SourceTrustpilotAuthorizationMethodType = "source-trustpilot_Authorization Method_API Key"
+ SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotOAuth20 SourceTrustpilotAuthorizationMethodType = "source-trustpilot_OAuth 2.0"
+ SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAPIKey SourceTrustpilotAuthorizationMethodType = "source-trustpilot_API Key"
)
type SourceTrustpilotAuthorizationMethod struct {
- SourceTrustpilotAuthorizationMethodOAuth20 *SourceTrustpilotAuthorizationMethodOAuth20
- SourceTrustpilotAuthorizationMethodAPIKey *SourceTrustpilotAuthorizationMethodAPIKey
+ SourceTrustpilotOAuth20 *SourceTrustpilotOAuth20
+ SourceTrustpilotAPIKey *SourceTrustpilotAPIKey
Type SourceTrustpilotAuthorizationMethodType
}
-func CreateSourceTrustpilotAuthorizationMethodSourceTrustpilotAuthorizationMethodOAuth20(sourceTrustpilotAuthorizationMethodOAuth20 SourceTrustpilotAuthorizationMethodOAuth20) SourceTrustpilotAuthorizationMethod {
- typ := SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodOAuth20
+func CreateSourceTrustpilotAuthorizationMethodSourceTrustpilotOAuth20(sourceTrustpilotOAuth20 SourceTrustpilotOAuth20) SourceTrustpilotAuthorizationMethod {
+ typ := SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotOAuth20
return SourceTrustpilotAuthorizationMethod{
- SourceTrustpilotAuthorizationMethodOAuth20: &sourceTrustpilotAuthorizationMethodOAuth20,
- Type: typ,
+ SourceTrustpilotOAuth20: &sourceTrustpilotOAuth20,
+ Type: typ,
}
}
-func CreateSourceTrustpilotAuthorizationMethodSourceTrustpilotAuthorizationMethodAPIKey(sourceTrustpilotAuthorizationMethodAPIKey SourceTrustpilotAuthorizationMethodAPIKey) SourceTrustpilotAuthorizationMethod {
- typ := SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodAPIKey
+func CreateSourceTrustpilotAuthorizationMethodSourceTrustpilotAPIKey(sourceTrustpilotAPIKey SourceTrustpilotAPIKey) SourceTrustpilotAuthorizationMethod {
+ typ := SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAPIKey
return SourceTrustpilotAuthorizationMethod{
- SourceTrustpilotAuthorizationMethodAPIKey: &sourceTrustpilotAuthorizationMethodAPIKey,
- Type: typ,
+ SourceTrustpilotAPIKey: &sourceTrustpilotAPIKey,
+ Type: typ,
}
}
func (u *SourceTrustpilotAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceTrustpilotAuthorizationMethodAPIKey := new(SourceTrustpilotAuthorizationMethodAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTrustpilotAuthorizationMethodAPIKey); err == nil {
- u.SourceTrustpilotAuthorizationMethodAPIKey = sourceTrustpilotAuthorizationMethodAPIKey
- u.Type = SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodAPIKey
+
+ sourceTrustpilotAPIKey := new(SourceTrustpilotAPIKey)
+ if err := utils.UnmarshalJSON(data, &sourceTrustpilotAPIKey, "", true, true); err == nil {
+ u.SourceTrustpilotAPIKey = sourceTrustpilotAPIKey
+ u.Type = SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAPIKey
return nil
}
- sourceTrustpilotAuthorizationMethodOAuth20 := new(SourceTrustpilotAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTrustpilotAuthorizationMethodOAuth20); err == nil {
- u.SourceTrustpilotAuthorizationMethodOAuth20 = sourceTrustpilotAuthorizationMethodOAuth20
- u.Type = SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodOAuth20
+ sourceTrustpilotOAuth20 := new(SourceTrustpilotOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceTrustpilotOAuth20, "", true, true); err == nil {
+ u.SourceTrustpilotOAuth20 = sourceTrustpilotOAuth20
+ u.Type = SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotOAuth20
return nil
}
@@ -136,38 +203,38 @@ func (u *SourceTrustpilotAuthorizationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceTrustpilotAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceTrustpilotAuthorizationMethodAPIKey != nil {
- return json.Marshal(u.SourceTrustpilotAuthorizationMethodAPIKey)
+ if u.SourceTrustpilotOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceTrustpilotOAuth20, "", true)
}
- if u.SourceTrustpilotAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceTrustpilotAuthorizationMethodOAuth20)
+ if u.SourceTrustpilotAPIKey != nil {
+ return utils.MarshalJSON(u.SourceTrustpilotAPIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceTrustpilotTrustpilot string
+type Trustpilot string
const (
- SourceTrustpilotTrustpilotTrustpilot SourceTrustpilotTrustpilot = "trustpilot"
+ TrustpilotTrustpilot Trustpilot = "trustpilot"
)
-func (e SourceTrustpilotTrustpilot) ToPointer() *SourceTrustpilotTrustpilot {
+func (e Trustpilot) ToPointer() *Trustpilot {
return &e
}
-func (e *SourceTrustpilotTrustpilot) UnmarshalJSON(data []byte) error {
+func (e *Trustpilot) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "trustpilot":
- *e = SourceTrustpilotTrustpilot(v)
+ *e = Trustpilot(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTrustpilotTrustpilot: %v", v)
+ return fmt.Errorf("invalid value for Trustpilot: %v", v)
}
}
@@ -175,7 +242,43 @@ type SourceTrustpilot struct {
// The names of business units which shall be synchronized. Some streams e.g. configured_business_units or private_reviews use this configuration.
BusinessUnits []string `json:"business_units"`
Credentials SourceTrustpilotAuthorizationMethod `json:"credentials"`
- SourceType SourceTrustpilotTrustpilot `json:"sourceType"`
+ sourceType Trustpilot `const:"trustpilot" json:"sourceType"`
// For streams with sync. method incremental the start date time to be used
StartDate string `json:"start_date"`
}
+
+func (s SourceTrustpilot) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTrustpilot) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTrustpilot) GetBusinessUnits() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.BusinessUnits
+}
+
+func (o *SourceTrustpilot) GetCredentials() SourceTrustpilotAuthorizationMethod {
+ if o == nil {
+ return SourceTrustpilotAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceTrustpilot) GetSourceType() Trustpilot {
+ return TrustpilotTrustpilot
+}
+
+func (o *SourceTrustpilot) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetrustpilotcreaterequest.go b/internal/sdk/pkg/models/shared/sourcetrustpilotcreaterequest.go
old mode 100755
new mode 100644
index a8affd920..f7d959431
--- a/internal/sdk/pkg/models/shared/sourcetrustpilotcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetrustpilotcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTrustpilotCreateRequest struct {
Configuration SourceTrustpilot `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTrustpilotCreateRequest) GetConfiguration() SourceTrustpilot {
+ if o == nil {
+ return SourceTrustpilot{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTrustpilotCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTrustpilotCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTrustpilotCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTrustpilotCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetrustpilotputrequest.go b/internal/sdk/pkg/models/shared/sourcetrustpilotputrequest.go
old mode 100755
new mode 100644
index 68c1f35e0..c8006a69e
--- a/internal/sdk/pkg/models/shared/sourcetrustpilotputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetrustpilotputrequest.go
@@ -7,3 +7,24 @@ type SourceTrustpilotPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTrustpilotPutRequest) GetConfiguration() SourceTrustpilotUpdate {
+ if o == nil {
+ return SourceTrustpilotUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTrustpilotPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTrustpilotPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetrustpilotupdate.go b/internal/sdk/pkg/models/shared/sourcetrustpilotupdate.go
old mode 100755
new mode 100644
index 15bf17aad..472d1b218
--- a/internal/sdk/pkg/models/shared/sourcetrustpilotupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetrustpilotupdate.go
@@ -3,72 +3,94 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType string
+type SourceTrustpilotUpdateSchemasAuthType string
const (
- SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthTypeApikey SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType = "apikey"
+ SourceTrustpilotUpdateSchemasAuthTypeApikey SourceTrustpilotUpdateSchemasAuthType = "apikey"
)
-func (e SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType) ToPointer() *SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType {
+func (e SourceTrustpilotUpdateSchemasAuthType) ToPointer() *SourceTrustpilotUpdateSchemasAuthType {
return &e
}
-func (e *SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTrustpilotUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "apikey":
- *e = SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType(v)
+ *e = SourceTrustpilotUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTrustpilotUpdateSchemasAuthType: %v", v)
}
}
-// SourceTrustpilotUpdateAuthorizationMethodAPIKey - The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0.
-type SourceTrustpilotUpdateAuthorizationMethodAPIKey struct {
- AuthType *SourceTrustpilotUpdateAuthorizationMethodAPIKeyAuthType `json:"auth_type,omitempty"`
+// SourceTrustpilotUpdateAPIKey - The API key authentication method gives you access to only the streams which are part of the Public API. When you want to get streams available via the Consumer API (e.g. the private reviews) you need to use authentication method OAuth 2.0.
+type SourceTrustpilotUpdateAPIKey struct {
+ authType *SourceTrustpilotUpdateSchemasAuthType `const:"apikey" json:"auth_type,omitempty"`
// The API key of the Trustpilot API application.
ClientID string `json:"client_id"`
}
-type SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType string
+func (s SourceTrustpilotUpdateAPIKey) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTrustpilotUpdateAPIKey) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTrustpilotUpdateAPIKey) GetAuthType() *SourceTrustpilotUpdateSchemasAuthType {
+ return SourceTrustpilotUpdateSchemasAuthTypeApikey.ToPointer()
+}
+
+func (o *SourceTrustpilotUpdateAPIKey) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+type SourceTrustpilotUpdateAuthType string
const (
- SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthTypeOauth20 SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceTrustpilotUpdateAuthTypeOauth20 SourceTrustpilotUpdateAuthType = "oauth2.0"
)
-func (e SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType) ToPointer() *SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType {
+func (e SourceTrustpilotUpdateAuthType) ToPointer() *SourceTrustpilotUpdateAuthType {
return &e
}
-func (e *SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTrustpilotUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceTrustpilotUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTrustpilotUpdateAuthType: %v", v)
}
}
-type SourceTrustpilotUpdateAuthorizationMethodOAuth20 struct {
+type SourceTrustpilotUpdateOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceTrustpilotUpdateAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceTrustpilotUpdateAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The API key of the Trustpilot API application. (represents the OAuth Client ID)
ClientID string `json:"client_id"`
// The Secret of the Trustpilot API application. (represents the OAuth Client Secret)
@@ -79,56 +101,101 @@ type SourceTrustpilotUpdateAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceTrustpilotUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTrustpilotUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTrustpilotUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceTrustpilotUpdateOAuth20) GetAuthType() *SourceTrustpilotUpdateAuthType {
+ return SourceTrustpilotUpdateAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceTrustpilotUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceTrustpilotUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceTrustpilotUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceTrustpilotUpdateOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceTrustpilotUpdateAuthorizationMethodType string
const (
- SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodOAuth20 SourceTrustpilotUpdateAuthorizationMethodType = "source-trustpilot-update_Authorization Method_OAuth 2.0"
- SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodAPIKey SourceTrustpilotUpdateAuthorizationMethodType = "source-trustpilot-update_Authorization Method_API Key"
+ SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateOAuth20 SourceTrustpilotUpdateAuthorizationMethodType = "source-trustpilot-update_OAuth 2.0"
+ SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAPIKey SourceTrustpilotUpdateAuthorizationMethodType = "source-trustpilot-update_API Key"
)
type SourceTrustpilotUpdateAuthorizationMethod struct {
- SourceTrustpilotUpdateAuthorizationMethodOAuth20 *SourceTrustpilotUpdateAuthorizationMethodOAuth20
- SourceTrustpilotUpdateAuthorizationMethodAPIKey *SourceTrustpilotUpdateAuthorizationMethodAPIKey
+ SourceTrustpilotUpdateOAuth20 *SourceTrustpilotUpdateOAuth20
+ SourceTrustpilotUpdateAPIKey *SourceTrustpilotUpdateAPIKey
Type SourceTrustpilotUpdateAuthorizationMethodType
}
-func CreateSourceTrustpilotUpdateAuthorizationMethodSourceTrustpilotUpdateAuthorizationMethodOAuth20(sourceTrustpilotUpdateAuthorizationMethodOAuth20 SourceTrustpilotUpdateAuthorizationMethodOAuth20) SourceTrustpilotUpdateAuthorizationMethod {
- typ := SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodOAuth20
+func CreateSourceTrustpilotUpdateAuthorizationMethodSourceTrustpilotUpdateOAuth20(sourceTrustpilotUpdateOAuth20 SourceTrustpilotUpdateOAuth20) SourceTrustpilotUpdateAuthorizationMethod {
+ typ := SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateOAuth20
return SourceTrustpilotUpdateAuthorizationMethod{
- SourceTrustpilotUpdateAuthorizationMethodOAuth20: &sourceTrustpilotUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceTrustpilotUpdateOAuth20: &sourceTrustpilotUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceTrustpilotUpdateAuthorizationMethodSourceTrustpilotUpdateAuthorizationMethodAPIKey(sourceTrustpilotUpdateAuthorizationMethodAPIKey SourceTrustpilotUpdateAuthorizationMethodAPIKey) SourceTrustpilotUpdateAuthorizationMethod {
- typ := SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodAPIKey
+func CreateSourceTrustpilotUpdateAuthorizationMethodSourceTrustpilotUpdateAPIKey(sourceTrustpilotUpdateAPIKey SourceTrustpilotUpdateAPIKey) SourceTrustpilotUpdateAuthorizationMethod {
+ typ := SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAPIKey
return SourceTrustpilotUpdateAuthorizationMethod{
- SourceTrustpilotUpdateAuthorizationMethodAPIKey: &sourceTrustpilotUpdateAuthorizationMethodAPIKey,
- Type: typ,
+ SourceTrustpilotUpdateAPIKey: &sourceTrustpilotUpdateAPIKey,
+ Type: typ,
}
}
func (u *SourceTrustpilotUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceTrustpilotUpdateAuthorizationMethodAPIKey := new(SourceTrustpilotUpdateAuthorizationMethodAPIKey)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTrustpilotUpdateAuthorizationMethodAPIKey); err == nil {
- u.SourceTrustpilotUpdateAuthorizationMethodAPIKey = sourceTrustpilotUpdateAuthorizationMethodAPIKey
- u.Type = SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodAPIKey
+
+ sourceTrustpilotUpdateAPIKey := new(SourceTrustpilotUpdateAPIKey)
+ if err := utils.UnmarshalJSON(data, &sourceTrustpilotUpdateAPIKey, "", true, true); err == nil {
+ u.SourceTrustpilotUpdateAPIKey = sourceTrustpilotUpdateAPIKey
+ u.Type = SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAPIKey
return nil
}
- sourceTrustpilotUpdateAuthorizationMethodOAuth20 := new(SourceTrustpilotUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTrustpilotUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceTrustpilotUpdateAuthorizationMethodOAuth20 = sourceTrustpilotUpdateAuthorizationMethodOAuth20
- u.Type = SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodOAuth20
+ sourceTrustpilotUpdateOAuth20 := new(SourceTrustpilotUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceTrustpilotUpdateOAuth20, "", true, true); err == nil {
+ u.SourceTrustpilotUpdateOAuth20 = sourceTrustpilotUpdateOAuth20
+ u.Type = SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateOAuth20
return nil
}
@@ -136,15 +203,15 @@ func (u *SourceTrustpilotUpdateAuthorizationMethod) UnmarshalJSON(data []byte) e
}
func (u SourceTrustpilotUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceTrustpilotUpdateAuthorizationMethodAPIKey != nil {
- return json.Marshal(u.SourceTrustpilotUpdateAuthorizationMethodAPIKey)
+ if u.SourceTrustpilotUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceTrustpilotUpdateOAuth20, "", true)
}
- if u.SourceTrustpilotUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceTrustpilotUpdateAuthorizationMethodOAuth20)
+ if u.SourceTrustpilotUpdateAPIKey != nil {
+ return utils.MarshalJSON(u.SourceTrustpilotUpdateAPIKey, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceTrustpilotUpdate struct {
@@ -154,3 +221,24 @@ type SourceTrustpilotUpdate struct {
// For streams with sync. method incremental the start date time to be used
StartDate string `json:"start_date"`
}
+
+func (o *SourceTrustpilotUpdate) GetBusinessUnits() []string {
+ if o == nil {
+ return []string{}
+ }
+ return o.BusinessUnits
+}
+
+func (o *SourceTrustpilotUpdate) GetCredentials() SourceTrustpilotUpdateAuthorizationMethod {
+ if o == nil {
+ return SourceTrustpilotUpdateAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceTrustpilotUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetvmazeschedule.go b/internal/sdk/pkg/models/shared/sourcetvmazeschedule.go
old mode 100755
new mode 100644
index 2a4fe9f53..b3a2c8397
--- a/internal/sdk/pkg/models/shared/sourcetvmazeschedule.go
+++ b/internal/sdk/pkg/models/shared/sourcetvmazeschedule.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceTvmazeScheduleTvmazeSchedule string
+type TvmazeSchedule string
const (
- SourceTvmazeScheduleTvmazeScheduleTvmazeSchedule SourceTvmazeScheduleTvmazeSchedule = "tvmaze-schedule"
+ TvmazeScheduleTvmazeSchedule TvmazeSchedule = "tvmaze-schedule"
)
-func (e SourceTvmazeScheduleTvmazeSchedule) ToPointer() *SourceTvmazeScheduleTvmazeSchedule {
+func (e TvmazeSchedule) ToPointer() *TvmazeSchedule {
return &e
}
-func (e *SourceTvmazeScheduleTvmazeSchedule) UnmarshalJSON(data []byte) error {
+func (e *TvmazeSchedule) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "tvmaze-schedule":
- *e = SourceTvmazeScheduleTvmazeSchedule(v)
+ *e = TvmazeSchedule(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTvmazeScheduleTvmazeSchedule: %v", v)
+ return fmt.Errorf("invalid value for TvmazeSchedule: %v", v)
}
}
@@ -36,8 +37,8 @@ type SourceTvmazeSchedule struct {
DomesticScheduleCountryCode string `json:"domestic_schedule_country_code"`
// End date for TV schedule retrieval. May be in the future. Optional.
//
- EndDate *string `json:"end_date,omitempty"`
- SourceType SourceTvmazeScheduleTvmazeSchedule `json:"sourceType"`
+ EndDate *string `json:"end_date,omitempty"`
+ sourceType TvmazeSchedule `const:"tvmaze-schedule" json:"sourceType"`
// Start date for TV schedule retrieval. May be in the future.
StartDate string `json:"start_date"`
// ISO 3166-1 country code for web TV schedule retrieval. Leave blank for
@@ -46,3 +47,46 @@ type SourceTvmazeSchedule struct {
//
WebScheduleCountryCode *string `json:"web_schedule_country_code,omitempty"`
}
+
+func (s SourceTvmazeSchedule) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTvmazeSchedule) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTvmazeSchedule) GetDomesticScheduleCountryCode() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomesticScheduleCountryCode
+}
+
+func (o *SourceTvmazeSchedule) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceTvmazeSchedule) GetSourceType() TvmazeSchedule {
+ return TvmazeScheduleTvmazeSchedule
+}
+
+func (o *SourceTvmazeSchedule) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceTvmazeSchedule) GetWebScheduleCountryCode() *string {
+ if o == nil {
+ return nil
+ }
+ return o.WebScheduleCountryCode
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetvmazeschedulecreaterequest.go b/internal/sdk/pkg/models/shared/sourcetvmazeschedulecreaterequest.go
old mode 100755
new mode 100644
index 1a67adf4b..367fe6622
--- a/internal/sdk/pkg/models/shared/sourcetvmazeschedulecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetvmazeschedulecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTvmazeScheduleCreateRequest struct {
Configuration SourceTvmazeSchedule `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTvmazeScheduleCreateRequest) GetConfiguration() SourceTvmazeSchedule {
+ if o == nil {
+ return SourceTvmazeSchedule{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTvmazeScheduleCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTvmazeScheduleCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTvmazeScheduleCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTvmazeScheduleCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetvmazescheduleputrequest.go b/internal/sdk/pkg/models/shared/sourcetvmazescheduleputrequest.go
old mode 100755
new mode 100644
index 882f84884..6aaed8a85
--- a/internal/sdk/pkg/models/shared/sourcetvmazescheduleputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetvmazescheduleputrequest.go
@@ -7,3 +7,24 @@ type SourceTvmazeSchedulePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTvmazeSchedulePutRequest) GetConfiguration() SourceTvmazeScheduleUpdate {
+ if o == nil {
+ return SourceTvmazeScheduleUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTvmazeSchedulePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTvmazeSchedulePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetvmazescheduleupdate.go b/internal/sdk/pkg/models/shared/sourcetvmazescheduleupdate.go
old mode 100755
new mode 100644
index 4191c57b3..6caacb44e
--- a/internal/sdk/pkg/models/shared/sourcetvmazescheduleupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetvmazescheduleupdate.go
@@ -16,3 +16,31 @@ type SourceTvmazeScheduleUpdate struct {
//
WebScheduleCountryCode *string `json:"web_schedule_country_code,omitempty"`
}
+
+func (o *SourceTvmazeScheduleUpdate) GetDomesticScheduleCountryCode() string {
+ if o == nil {
+ return ""
+ }
+ return o.DomesticScheduleCountryCode
+}
+
+func (o *SourceTvmazeScheduleUpdate) GetEndDate() *string {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceTvmazeScheduleUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceTvmazeScheduleUpdate) GetWebScheduleCountryCode() *string {
+ if o == nil {
+ return nil
+ }
+ return o.WebScheduleCountryCode
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwilio.go b/internal/sdk/pkg/models/shared/sourcetwilio.go
old mode 100755
new mode 100644
index 433b544fd..959550185
--- a/internal/sdk/pkg/models/shared/sourcetwilio.go
+++ b/internal/sdk/pkg/models/shared/sourcetwilio.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceTwilioTwilio string
+type Twilio string
const (
- SourceTwilioTwilioTwilio SourceTwilioTwilio = "twilio"
+ TwilioTwilio Twilio = "twilio"
)
-func (e SourceTwilioTwilio) ToPointer() *SourceTwilioTwilio {
+func (e Twilio) ToPointer() *Twilio {
return &e
}
-func (e *SourceTwilioTwilio) UnmarshalJSON(data []byte) error {
+func (e *Twilio) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "twilio":
- *e = SourceTwilioTwilio(v)
+ *e = Twilio(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTwilioTwilio: %v", v)
+ return fmt.Errorf("invalid value for Twilio: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceTwilio struct {
// Twilio Auth Token.
AuthToken string `json:"auth_token"`
// How far into the past to look for records. (in minutes)
- LookbackWindow *int64 `json:"lookback_window,omitempty"`
- SourceType SourceTwilioTwilio `json:"sourceType"`
+ LookbackWindow *int64 `default:"0" json:"lookback_window"`
+ sourceType Twilio `const:"twilio" json:"sourceType"`
// UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceTwilio) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTwilio) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTwilio) GetAccountSid() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountSid
+}
+
+func (o *SourceTwilio) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
+
+func (o *SourceTwilio) GetLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindow
+}
+
+func (o *SourceTwilio) GetSourceType() Twilio {
+ return TwilioTwilio
+}
+
+func (o *SourceTwilio) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwiliocreaterequest.go b/internal/sdk/pkg/models/shared/sourcetwiliocreaterequest.go
old mode 100755
new mode 100644
index 8e6170383..8279e7706
--- a/internal/sdk/pkg/models/shared/sourcetwiliocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetwiliocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTwilioCreateRequest struct {
Configuration SourceTwilio `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTwilioCreateRequest) GetConfiguration() SourceTwilio {
+ if o == nil {
+ return SourceTwilio{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTwilioCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTwilioCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTwilioCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTwilioCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwilioputrequest.go b/internal/sdk/pkg/models/shared/sourcetwilioputrequest.go
old mode 100755
new mode 100644
index 63fb7a042..c10923fec
--- a/internal/sdk/pkg/models/shared/sourcetwilioputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetwilioputrequest.go
@@ -7,3 +7,24 @@ type SourceTwilioPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTwilioPutRequest) GetConfiguration() SourceTwilioUpdate {
+ if o == nil {
+ return SourceTwilioUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTwilioPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTwilioPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwiliotaskrouter.go b/internal/sdk/pkg/models/shared/sourcetwiliotaskrouter.go
old mode 100755
new mode 100644
index a0f698911..cf5ce5deb
--- a/internal/sdk/pkg/models/shared/sourcetwiliotaskrouter.go
+++ b/internal/sdk/pkg/models/shared/sourcetwiliotaskrouter.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceTwilioTaskrouterTwilioTaskrouter string
+type TwilioTaskrouter string
const (
- SourceTwilioTaskrouterTwilioTaskrouterTwilioTaskrouter SourceTwilioTaskrouterTwilioTaskrouter = "twilio-taskrouter"
+ TwilioTaskrouterTwilioTaskrouter TwilioTaskrouter = "twilio-taskrouter"
)
-func (e SourceTwilioTaskrouterTwilioTaskrouter) ToPointer() *SourceTwilioTaskrouterTwilioTaskrouter {
+func (e TwilioTaskrouter) ToPointer() *TwilioTaskrouter {
return &e
}
-func (e *SourceTwilioTaskrouterTwilioTaskrouter) UnmarshalJSON(data []byte) error {
+func (e *TwilioTaskrouter) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "twilio-taskrouter":
- *e = SourceTwilioTaskrouterTwilioTaskrouter(v)
+ *e = TwilioTaskrouter(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTwilioTaskrouterTwilioTaskrouter: %v", v)
+ return fmt.Errorf("invalid value for TwilioTaskrouter: %v", v)
}
}
@@ -35,6 +36,35 @@ type SourceTwilioTaskrouter struct {
// Twilio Account ID
AccountSid string `json:"account_sid"`
// Twilio Auth Token
- AuthToken string `json:"auth_token"`
- SourceType SourceTwilioTaskrouterTwilioTaskrouter `json:"sourceType"`
+ AuthToken string `json:"auth_token"`
+ sourceType TwilioTaskrouter `const:"twilio-taskrouter" json:"sourceType"`
+}
+
+func (s SourceTwilioTaskrouter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTwilioTaskrouter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTwilioTaskrouter) GetAccountSid() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountSid
+}
+
+func (o *SourceTwilioTaskrouter) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
+
+func (o *SourceTwilioTaskrouter) GetSourceType() TwilioTaskrouter {
+ return TwilioTaskrouterTwilioTaskrouter
}
diff --git a/internal/sdk/pkg/models/shared/sourcetwiliotaskroutercreaterequest.go b/internal/sdk/pkg/models/shared/sourcetwiliotaskroutercreaterequest.go
old mode 100755
new mode 100644
index b76edc67a..7e9c038f5
--- a/internal/sdk/pkg/models/shared/sourcetwiliotaskroutercreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetwiliotaskroutercreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTwilioTaskrouterCreateRequest struct {
Configuration SourceTwilioTaskrouter `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTwilioTaskrouterCreateRequest) GetConfiguration() SourceTwilioTaskrouter {
+ if o == nil {
+ return SourceTwilioTaskrouter{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTwilioTaskrouterCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTwilioTaskrouterCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTwilioTaskrouterCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTwilioTaskrouterCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwiliotaskrouterputrequest.go b/internal/sdk/pkg/models/shared/sourcetwiliotaskrouterputrequest.go
old mode 100755
new mode 100644
index 2834afd35..543e0733b
--- a/internal/sdk/pkg/models/shared/sourcetwiliotaskrouterputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetwiliotaskrouterputrequest.go
@@ -7,3 +7,24 @@ type SourceTwilioTaskrouterPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTwilioTaskrouterPutRequest) GetConfiguration() SourceTwilioTaskrouterUpdate {
+ if o == nil {
+ return SourceTwilioTaskrouterUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTwilioTaskrouterPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTwilioTaskrouterPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwiliotaskrouterupdate.go b/internal/sdk/pkg/models/shared/sourcetwiliotaskrouterupdate.go
old mode 100755
new mode 100644
index fd3358d4e..8a117d671
--- a/internal/sdk/pkg/models/shared/sourcetwiliotaskrouterupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetwiliotaskrouterupdate.go
@@ -8,3 +8,17 @@ type SourceTwilioTaskrouterUpdate struct {
// Twilio Auth Token
AuthToken string `json:"auth_token"`
}
+
+func (o *SourceTwilioTaskrouterUpdate) GetAccountSid() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountSid
+}
+
+func (o *SourceTwilioTaskrouterUpdate) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwilioupdate.go b/internal/sdk/pkg/models/shared/sourcetwilioupdate.go
old mode 100755
new mode 100644
index ca81407f0..2b869ed99
--- a/internal/sdk/pkg/models/shared/sourcetwilioupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetwilioupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -12,7 +13,46 @@ type SourceTwilioUpdate struct {
// Twilio Auth Token.
AuthToken string `json:"auth_token"`
// How far into the past to look for records. (in minutes)
- LookbackWindow *int64 `json:"lookback_window,omitempty"`
+ LookbackWindow *int64 `default:"0" json:"lookback_window"`
// UTC date and time in the format 2020-10-01T00:00:00Z. Any data before this date will not be replicated.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceTwilioUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTwilioUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTwilioUpdate) GetAccountSid() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccountSid
+}
+
+func (o *SourceTwilioUpdate) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
+
+func (o *SourceTwilioUpdate) GetLookbackWindow() *int64 {
+ if o == nil {
+ return nil
+ }
+ return o.LookbackWindow
+}
+
+func (o *SourceTwilioUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwitter.go b/internal/sdk/pkg/models/shared/sourcetwitter.go
old mode 100755
new mode 100644
index c8f482674..e974a0ffd
--- a/internal/sdk/pkg/models/shared/sourcetwitter.go
+++ b/internal/sdk/pkg/models/shared/sourcetwitter.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceTwitterTwitter string
+type Twitter string
const (
- SourceTwitterTwitterTwitter SourceTwitterTwitter = "twitter"
+ TwitterTwitter Twitter = "twitter"
)
-func (e SourceTwitterTwitter) ToPointer() *SourceTwitterTwitter {
+func (e Twitter) ToPointer() *Twitter {
return &e
}
-func (e *SourceTwitterTwitter) UnmarshalJSON(data []byte) error {
+func (e *Twitter) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "twitter":
- *e = SourceTwitterTwitter(v)
+ *e = Twitter(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTwitterTwitter: %v", v)
+ return fmt.Errorf("invalid value for Twitter: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceTwitter struct {
// The end date for retrieving tweets must be a minimum of 10 seconds prior to the request time.
EndDate *time.Time `json:"end_date,omitempty"`
// Query for matching Tweets. You can learn how to build this query by reading build a query guide .
- Query string `json:"query"`
- SourceType SourceTwitterTwitter `json:"sourceType"`
+ Query string `json:"query"`
+ sourceType Twitter `const:"twitter" json:"sourceType"`
// The start date for retrieving tweets cannot be more than 7 days in the past.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceTwitter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTwitter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTwitter) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceTwitter) GetEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceTwitter) GetQuery() string {
+ if o == nil {
+ return ""
+ }
+ return o.Query
+}
+
+func (o *SourceTwitter) GetSourceType() Twitter {
+ return TwitterTwitter
+}
+
+func (o *SourceTwitter) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwittercreaterequest.go b/internal/sdk/pkg/models/shared/sourcetwittercreaterequest.go
old mode 100755
new mode 100644
index d53ada26e..82311bb7f
--- a/internal/sdk/pkg/models/shared/sourcetwittercreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetwittercreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTwitterCreateRequest struct {
Configuration SourceTwitter `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTwitterCreateRequest) GetConfiguration() SourceTwitter {
+ if o == nil {
+ return SourceTwitter{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTwitterCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTwitterCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTwitterCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTwitterCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwitterputrequest.go b/internal/sdk/pkg/models/shared/sourcetwitterputrequest.go
old mode 100755
new mode 100644
index c6a5216be..f24f5bb20
--- a/internal/sdk/pkg/models/shared/sourcetwitterputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetwitterputrequest.go
@@ -7,3 +7,24 @@ type SourceTwitterPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTwitterPutRequest) GetConfiguration() SourceTwitterUpdate {
+ if o == nil {
+ return SourceTwitterUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTwitterPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTwitterPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetwitterupdate.go b/internal/sdk/pkg/models/shared/sourcetwitterupdate.go
old mode 100755
new mode 100644
index 8f75918d7..9071b1d40
--- a/internal/sdk/pkg/models/shared/sourcetwitterupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetwitterupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -16,3 +17,42 @@ type SourceTwitterUpdate struct {
// The start date for retrieving tweets cannot be more than 7 days in the past.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceTwitterUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTwitterUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTwitterUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceTwitterUpdate) GetEndDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceTwitterUpdate) GetQuery() string {
+ if o == nil {
+ return ""
+ }
+ return o.Query
+}
+
+func (o *SourceTwitterUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetypeform.go b/internal/sdk/pkg/models/shared/sourcetypeform.go
old mode 100755
new mode 100644
index ee27b7786..87a59d961
--- a/internal/sdk/pkg/models/shared/sourcetypeform.go
+++ b/internal/sdk/pkg/models/shared/sourcetypeform.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceTypeformAuthorizationMethodPrivateTokenAuthType string
+type SourceTypeformSchemasAuthType string
const (
- SourceTypeformAuthorizationMethodPrivateTokenAuthTypeAccessToken SourceTypeformAuthorizationMethodPrivateTokenAuthType = "access_token"
+ SourceTypeformSchemasAuthTypeAccessToken SourceTypeformSchemasAuthType = "access_token"
)
-func (e SourceTypeformAuthorizationMethodPrivateTokenAuthType) ToPointer() *SourceTypeformAuthorizationMethodPrivateTokenAuthType {
+func (e SourceTypeformSchemasAuthType) ToPointer() *SourceTypeformSchemasAuthType {
return &e
}
-func (e *SourceTypeformAuthorizationMethodPrivateTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTypeformSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceTypeformAuthorizationMethodPrivateTokenAuthType(v)
+ *e = SourceTypeformSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTypeformAuthorizationMethodPrivateTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTypeformSchemasAuthType: %v", v)
}
}
-type SourceTypeformAuthorizationMethodPrivateToken struct {
+type SourceTypeformPrivateToken struct {
// Log into your Typeform account and then generate a personal Access Token.
- AccessToken string `json:"access_token"`
- AuthType *SourceTypeformAuthorizationMethodPrivateTokenAuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceTypeformSchemasAuthType `const:"access_token" json:"auth_type,omitempty"`
}
-type SourceTypeformAuthorizationMethodOAuth20AuthType string
+func (s SourceTypeformPrivateToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTypeformPrivateToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTypeformPrivateToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceTypeformPrivateToken) GetAuthType() *SourceTypeformSchemasAuthType {
+ return SourceTypeformSchemasAuthTypeAccessToken.ToPointer()
+}
+
+type SourceTypeformAuthType string
const (
- SourceTypeformAuthorizationMethodOAuth20AuthTypeOauth20 SourceTypeformAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceTypeformAuthTypeOauth20 SourceTypeformAuthType = "oauth2.0"
)
-func (e SourceTypeformAuthorizationMethodOAuth20AuthType) ToPointer() *SourceTypeformAuthorizationMethodOAuth20AuthType {
+func (e SourceTypeformAuthType) ToPointer() *SourceTypeformAuthType {
return &e
}
-func (e *SourceTypeformAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTypeformAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceTypeformAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceTypeformAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTypeformAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTypeformAuthType: %v", v)
}
}
-type SourceTypeformAuthorizationMethodOAuth20 struct {
+type SourceTypeformOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceTypeformAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceTypeformAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The Client ID of the Typeform developer application.
ClientID string `json:"client_id"`
// The Client Secret the Typeform developer application.
@@ -78,56 +100,101 @@ type SourceTypeformAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceTypeformOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTypeformOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTypeformOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceTypeformOAuth20) GetAuthType() *SourceTypeformAuthType {
+ return SourceTypeformAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceTypeformOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceTypeformOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceTypeformOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceTypeformOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceTypeformAuthorizationMethodType string
const (
- SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodOAuth20 SourceTypeformAuthorizationMethodType = "source-typeform_Authorization Method_OAuth2.0"
- SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodPrivateToken SourceTypeformAuthorizationMethodType = "source-typeform_Authorization Method_Private Token"
+ SourceTypeformAuthorizationMethodTypeSourceTypeformOAuth20 SourceTypeformAuthorizationMethodType = "source-typeform_OAuth2.0"
+ SourceTypeformAuthorizationMethodTypeSourceTypeformPrivateToken SourceTypeformAuthorizationMethodType = "source-typeform_Private Token"
)
type SourceTypeformAuthorizationMethod struct {
- SourceTypeformAuthorizationMethodOAuth20 *SourceTypeformAuthorizationMethodOAuth20
- SourceTypeformAuthorizationMethodPrivateToken *SourceTypeformAuthorizationMethodPrivateToken
+ SourceTypeformOAuth20 *SourceTypeformOAuth20
+ SourceTypeformPrivateToken *SourceTypeformPrivateToken
Type SourceTypeformAuthorizationMethodType
}
-func CreateSourceTypeformAuthorizationMethodSourceTypeformAuthorizationMethodOAuth20(sourceTypeformAuthorizationMethodOAuth20 SourceTypeformAuthorizationMethodOAuth20) SourceTypeformAuthorizationMethod {
- typ := SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodOAuth20
+func CreateSourceTypeformAuthorizationMethodSourceTypeformOAuth20(sourceTypeformOAuth20 SourceTypeformOAuth20) SourceTypeformAuthorizationMethod {
+ typ := SourceTypeformAuthorizationMethodTypeSourceTypeformOAuth20
return SourceTypeformAuthorizationMethod{
- SourceTypeformAuthorizationMethodOAuth20: &sourceTypeformAuthorizationMethodOAuth20,
- Type: typ,
+ SourceTypeformOAuth20: &sourceTypeformOAuth20,
+ Type: typ,
}
}
-func CreateSourceTypeformAuthorizationMethodSourceTypeformAuthorizationMethodPrivateToken(sourceTypeformAuthorizationMethodPrivateToken SourceTypeformAuthorizationMethodPrivateToken) SourceTypeformAuthorizationMethod {
- typ := SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodPrivateToken
+func CreateSourceTypeformAuthorizationMethodSourceTypeformPrivateToken(sourceTypeformPrivateToken SourceTypeformPrivateToken) SourceTypeformAuthorizationMethod {
+ typ := SourceTypeformAuthorizationMethodTypeSourceTypeformPrivateToken
return SourceTypeformAuthorizationMethod{
- SourceTypeformAuthorizationMethodPrivateToken: &sourceTypeformAuthorizationMethodPrivateToken,
- Type: typ,
+ SourceTypeformPrivateToken: &sourceTypeformPrivateToken,
+ Type: typ,
}
}
func (u *SourceTypeformAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceTypeformAuthorizationMethodPrivateToken := new(SourceTypeformAuthorizationMethodPrivateToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTypeformAuthorizationMethodPrivateToken); err == nil {
- u.SourceTypeformAuthorizationMethodPrivateToken = sourceTypeformAuthorizationMethodPrivateToken
- u.Type = SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodPrivateToken
+
+ sourceTypeformPrivateToken := new(SourceTypeformPrivateToken)
+ if err := utils.UnmarshalJSON(data, &sourceTypeformPrivateToken, "", true, true); err == nil {
+ u.SourceTypeformPrivateToken = sourceTypeformPrivateToken
+ u.Type = SourceTypeformAuthorizationMethodTypeSourceTypeformPrivateToken
return nil
}
- sourceTypeformAuthorizationMethodOAuth20 := new(SourceTypeformAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTypeformAuthorizationMethodOAuth20); err == nil {
- u.SourceTypeformAuthorizationMethodOAuth20 = sourceTypeformAuthorizationMethodOAuth20
- u.Type = SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodOAuth20
+ sourceTypeformOAuth20 := new(SourceTypeformOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceTypeformOAuth20, "", true, true); err == nil {
+ u.SourceTypeformOAuth20 = sourceTypeformOAuth20
+ u.Type = SourceTypeformAuthorizationMethodTypeSourceTypeformOAuth20
return nil
}
@@ -135,46 +202,82 @@ func (u *SourceTypeformAuthorizationMethod) UnmarshalJSON(data []byte) error {
}
func (u SourceTypeformAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceTypeformAuthorizationMethodPrivateToken != nil {
- return json.Marshal(u.SourceTypeformAuthorizationMethodPrivateToken)
+ if u.SourceTypeformOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceTypeformOAuth20, "", true)
}
- if u.SourceTypeformAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceTypeformAuthorizationMethodOAuth20)
+ if u.SourceTypeformPrivateToken != nil {
+ return utils.MarshalJSON(u.SourceTypeformPrivateToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceTypeformTypeform string
+type Typeform string
const (
- SourceTypeformTypeformTypeform SourceTypeformTypeform = "typeform"
+ TypeformTypeform Typeform = "typeform"
)
-func (e SourceTypeformTypeform) ToPointer() *SourceTypeformTypeform {
+func (e Typeform) ToPointer() *Typeform {
return &e
}
-func (e *SourceTypeformTypeform) UnmarshalJSON(data []byte) error {
+func (e *Typeform) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "typeform":
- *e = SourceTypeformTypeform(v)
+ *e = Typeform(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTypeformTypeform: %v", v)
+ return fmt.Errorf("invalid value for Typeform: %v", v)
}
}
type SourceTypeform struct {
Credentials SourceTypeformAuthorizationMethod `json:"credentials"`
// When this parameter is set, the connector will replicate data only from the input forms. Otherwise, all forms in your Typeform account will be replicated. You can find form IDs in your form URLs. For example, in the URL "https://mysite.typeform.com/to/u6nXL7" the form_id is u6nXL7. You can find form URLs on Share panel
- FormIds []string `json:"form_ids,omitempty"`
- SourceType SourceTypeformTypeform `json:"sourceType"`
+ FormIds []string `json:"form_ids,omitempty"`
+ sourceType Typeform `const:"typeform" json:"sourceType"`
// The date from which you'd like to replicate data for Typeform API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceTypeform) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTypeform) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTypeform) GetCredentials() SourceTypeformAuthorizationMethod {
+ if o == nil {
+ return SourceTypeformAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceTypeform) GetFormIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FormIds
+}
+
+func (o *SourceTypeform) GetSourceType() Typeform {
+ return TypeformTypeform
+}
+
+func (o *SourceTypeform) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetypeformcreaterequest.go b/internal/sdk/pkg/models/shared/sourcetypeformcreaterequest.go
old mode 100755
new mode 100644
index 525a973c9..6ddbae166
--- a/internal/sdk/pkg/models/shared/sourcetypeformcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetypeformcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceTypeformCreateRequest struct {
Configuration SourceTypeform `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTypeformCreateRequest) GetConfiguration() SourceTypeform {
+ if o == nil {
+ return SourceTypeform{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTypeformCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceTypeformCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTypeformCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceTypeformCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetypeformputrequest.go b/internal/sdk/pkg/models/shared/sourcetypeformputrequest.go
old mode 100755
new mode 100644
index 28801dfce..16a927351
--- a/internal/sdk/pkg/models/shared/sourcetypeformputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcetypeformputrequest.go
@@ -7,3 +7,24 @@ type SourceTypeformPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceTypeformPutRequest) GetConfiguration() SourceTypeformUpdate {
+ if o == nil {
+ return SourceTypeformUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceTypeformPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceTypeformPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcetypeformupdate.go b/internal/sdk/pkg/models/shared/sourcetypeformupdate.go
old mode 100755
new mode 100644
index b1eac3762..6be91647e
--- a/internal/sdk/pkg/models/shared/sourcetypeformupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcetypeformupdate.go
@@ -3,71 +3,93 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType string
+type SourceTypeformUpdateSchemasAuthType string
const (
- SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthTypeAccessToken SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType = "access_token"
+ SourceTypeformUpdateSchemasAuthTypeAccessToken SourceTypeformUpdateSchemasAuthType = "access_token"
)
-func (e SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType) ToPointer() *SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType {
+func (e SourceTypeformUpdateSchemasAuthType) ToPointer() *SourceTypeformUpdateSchemasAuthType {
return &e
}
-func (e *SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTypeformUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType(v)
+ *e = SourceTypeformUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTypeformUpdateSchemasAuthType: %v", v)
}
}
-type SourceTypeformUpdateAuthorizationMethodPrivateToken struct {
+type SourceTypeformUpdatePrivateToken struct {
// Log into your Typeform account and then generate a personal Access Token.
- AccessToken string `json:"access_token"`
- AuthType *SourceTypeformUpdateAuthorizationMethodPrivateTokenAuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceTypeformUpdateSchemasAuthType `const:"access_token" json:"auth_type,omitempty"`
}
-type SourceTypeformUpdateAuthorizationMethodOAuth20AuthType string
+func (s SourceTypeformUpdatePrivateToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTypeformUpdatePrivateToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTypeformUpdatePrivateToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceTypeformUpdatePrivateToken) GetAuthType() *SourceTypeformUpdateSchemasAuthType {
+ return SourceTypeformUpdateSchemasAuthTypeAccessToken.ToPointer()
+}
+
+type SourceTypeformUpdateAuthType string
const (
- SourceTypeformUpdateAuthorizationMethodOAuth20AuthTypeOauth20 SourceTypeformUpdateAuthorizationMethodOAuth20AuthType = "oauth2.0"
+ SourceTypeformUpdateAuthTypeOauth20 SourceTypeformUpdateAuthType = "oauth2.0"
)
-func (e SourceTypeformUpdateAuthorizationMethodOAuth20AuthType) ToPointer() *SourceTypeformUpdateAuthorizationMethodOAuth20AuthType {
+func (e SourceTypeformUpdateAuthType) ToPointer() *SourceTypeformUpdateAuthType {
return &e
}
-func (e *SourceTypeformUpdateAuthorizationMethodOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceTypeformUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceTypeformUpdateAuthorizationMethodOAuth20AuthType(v)
+ *e = SourceTypeformUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceTypeformUpdateAuthorizationMethodOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceTypeformUpdateAuthType: %v", v)
}
}
-type SourceTypeformUpdateAuthorizationMethodOAuth20 struct {
+type SourceTypeformUpdateOAuth20 struct {
// Access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthType *SourceTypeformUpdateAuthorizationMethodOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceTypeformUpdateAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// The Client ID of the Typeform developer application.
ClientID string `json:"client_id"`
// The Client Secret the Typeform developer application.
@@ -78,56 +100,101 @@ type SourceTypeformUpdateAuthorizationMethodOAuth20 struct {
TokenExpiryDate time.Time `json:"token_expiry_date"`
}
+func (s SourceTypeformUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTypeformUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTypeformUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceTypeformUpdateOAuth20) GetAuthType() *SourceTypeformUpdateAuthType {
+ return SourceTypeformUpdateAuthTypeOauth20.ToPointer()
+}
+
+func (o *SourceTypeformUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceTypeformUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceTypeformUpdateOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceTypeformUpdateOAuth20) GetTokenExpiryDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.TokenExpiryDate
+}
+
type SourceTypeformUpdateAuthorizationMethodType string
const (
- SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodOAuth20 SourceTypeformUpdateAuthorizationMethodType = "source-typeform-update_Authorization Method_OAuth2.0"
- SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodPrivateToken SourceTypeformUpdateAuthorizationMethodType = "source-typeform-update_Authorization Method_Private Token"
+ SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateOAuth20 SourceTypeformUpdateAuthorizationMethodType = "source-typeform-update_OAuth2.0"
+ SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdatePrivateToken SourceTypeformUpdateAuthorizationMethodType = "source-typeform-update_Private Token"
)
type SourceTypeformUpdateAuthorizationMethod struct {
- SourceTypeformUpdateAuthorizationMethodOAuth20 *SourceTypeformUpdateAuthorizationMethodOAuth20
- SourceTypeformUpdateAuthorizationMethodPrivateToken *SourceTypeformUpdateAuthorizationMethodPrivateToken
+ SourceTypeformUpdateOAuth20 *SourceTypeformUpdateOAuth20
+ SourceTypeformUpdatePrivateToken *SourceTypeformUpdatePrivateToken
Type SourceTypeformUpdateAuthorizationMethodType
}
-func CreateSourceTypeformUpdateAuthorizationMethodSourceTypeformUpdateAuthorizationMethodOAuth20(sourceTypeformUpdateAuthorizationMethodOAuth20 SourceTypeformUpdateAuthorizationMethodOAuth20) SourceTypeformUpdateAuthorizationMethod {
- typ := SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodOAuth20
+func CreateSourceTypeformUpdateAuthorizationMethodSourceTypeformUpdateOAuth20(sourceTypeformUpdateOAuth20 SourceTypeformUpdateOAuth20) SourceTypeformUpdateAuthorizationMethod {
+ typ := SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateOAuth20
return SourceTypeformUpdateAuthorizationMethod{
- SourceTypeformUpdateAuthorizationMethodOAuth20: &sourceTypeformUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceTypeformUpdateOAuth20: &sourceTypeformUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceTypeformUpdateAuthorizationMethodSourceTypeformUpdateAuthorizationMethodPrivateToken(sourceTypeformUpdateAuthorizationMethodPrivateToken SourceTypeformUpdateAuthorizationMethodPrivateToken) SourceTypeformUpdateAuthorizationMethod {
- typ := SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodPrivateToken
+func CreateSourceTypeformUpdateAuthorizationMethodSourceTypeformUpdatePrivateToken(sourceTypeformUpdatePrivateToken SourceTypeformUpdatePrivateToken) SourceTypeformUpdateAuthorizationMethod {
+ typ := SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdatePrivateToken
return SourceTypeformUpdateAuthorizationMethod{
- SourceTypeformUpdateAuthorizationMethodPrivateToken: &sourceTypeformUpdateAuthorizationMethodPrivateToken,
- Type: typ,
+ SourceTypeformUpdatePrivateToken: &sourceTypeformUpdatePrivateToken,
+ Type: typ,
}
}
func (u *SourceTypeformUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceTypeformUpdateAuthorizationMethodPrivateToken := new(SourceTypeformUpdateAuthorizationMethodPrivateToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTypeformUpdateAuthorizationMethodPrivateToken); err == nil {
- u.SourceTypeformUpdateAuthorizationMethodPrivateToken = sourceTypeformUpdateAuthorizationMethodPrivateToken
- u.Type = SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodPrivateToken
+
+ sourceTypeformUpdatePrivateToken := new(SourceTypeformUpdatePrivateToken)
+ if err := utils.UnmarshalJSON(data, &sourceTypeformUpdatePrivateToken, "", true, true); err == nil {
+ u.SourceTypeformUpdatePrivateToken = sourceTypeformUpdatePrivateToken
+ u.Type = SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdatePrivateToken
return nil
}
- sourceTypeformUpdateAuthorizationMethodOAuth20 := new(SourceTypeformUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceTypeformUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceTypeformUpdateAuthorizationMethodOAuth20 = sourceTypeformUpdateAuthorizationMethodOAuth20
- u.Type = SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodOAuth20
+ sourceTypeformUpdateOAuth20 := new(SourceTypeformUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceTypeformUpdateOAuth20, "", true, true); err == nil {
+ u.SourceTypeformUpdateOAuth20 = sourceTypeformUpdateOAuth20
+ u.Type = SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateOAuth20
return nil
}
@@ -135,15 +202,15 @@ func (u *SourceTypeformUpdateAuthorizationMethod) UnmarshalJSON(data []byte) err
}
func (u SourceTypeformUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceTypeformUpdateAuthorizationMethodPrivateToken != nil {
- return json.Marshal(u.SourceTypeformUpdateAuthorizationMethodPrivateToken)
+ if u.SourceTypeformUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceTypeformUpdateOAuth20, "", true)
}
- if u.SourceTypeformUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceTypeformUpdateAuthorizationMethodOAuth20)
+ if u.SourceTypeformUpdatePrivateToken != nil {
+ return utils.MarshalJSON(u.SourceTypeformUpdatePrivateToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceTypeformUpdate struct {
@@ -153,3 +220,35 @@ type SourceTypeformUpdate struct {
// The date from which you'd like to replicate data for Typeform API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
}
+
+func (s SourceTypeformUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceTypeformUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceTypeformUpdate) GetCredentials() SourceTypeformUpdateAuthorizationMethod {
+ if o == nil {
+ return SourceTypeformUpdateAuthorizationMethod{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceTypeformUpdate) GetFormIds() []string {
+ if o == nil {
+ return nil
+ }
+ return o.FormIds
+}
+
+func (o *SourceTypeformUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceuscensus.go b/internal/sdk/pkg/models/shared/sourceuscensus.go
old mode 100755
new mode 100644
index f2c8bfcf2..ce166736d
--- a/internal/sdk/pkg/models/shared/sourceuscensus.go
+++ b/internal/sdk/pkg/models/shared/sourceuscensus.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceUsCensusUsCensus string
+type UsCensus string
const (
- SourceUsCensusUsCensusUsCensus SourceUsCensusUsCensus = "us-census"
+ UsCensusUsCensus UsCensus = "us-census"
)
-func (e SourceUsCensusUsCensus) ToPointer() *SourceUsCensusUsCensus {
+func (e UsCensus) ToPointer() *UsCensus {
return &e
}
-func (e *SourceUsCensusUsCensus) UnmarshalJSON(data []byte) error {
+func (e *UsCensus) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "us-census":
- *e = SourceUsCensusUsCensus(v)
+ *e = UsCensus(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceUsCensusUsCensus: %v", v)
+ return fmt.Errorf("invalid value for UsCensus: %v", v)
}
}
@@ -37,6 +38,42 @@ type SourceUsCensus struct {
// The query parameters portion of the GET request, without the api key
QueryParams *string `json:"query_params,omitempty"`
// The path portion of the GET request
- QueryPath string `json:"query_path"`
- SourceType SourceUsCensusUsCensus `json:"sourceType"`
+ QueryPath string `json:"query_path"`
+ sourceType UsCensus `const:"us-census" json:"sourceType"`
+}
+
+func (s SourceUsCensus) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceUsCensus) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceUsCensus) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceUsCensus) GetQueryParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QueryParams
+}
+
+func (o *SourceUsCensus) GetQueryPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.QueryPath
+}
+
+func (o *SourceUsCensus) GetSourceType() UsCensus {
+ return UsCensusUsCensus
}
diff --git a/internal/sdk/pkg/models/shared/sourceuscensuscreaterequest.go b/internal/sdk/pkg/models/shared/sourceuscensuscreaterequest.go
old mode 100755
new mode 100644
index 0141948b4..770e43b48
--- a/internal/sdk/pkg/models/shared/sourceuscensuscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceuscensuscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceUsCensusCreateRequest struct {
Configuration SourceUsCensus `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceUsCensusCreateRequest) GetConfiguration() SourceUsCensus {
+ if o == nil {
+ return SourceUsCensus{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceUsCensusCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceUsCensusCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceUsCensusCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceUsCensusCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceuscensusputrequest.go b/internal/sdk/pkg/models/shared/sourceuscensusputrequest.go
old mode 100755
new mode 100644
index 0858eb357..ea076d60f
--- a/internal/sdk/pkg/models/shared/sourceuscensusputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceuscensusputrequest.go
@@ -7,3 +7,24 @@ type SourceUsCensusPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceUsCensusPutRequest) GetConfiguration() SourceUsCensusUpdate {
+ if o == nil {
+ return SourceUsCensusUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceUsCensusPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceUsCensusPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceuscensusupdate.go b/internal/sdk/pkg/models/shared/sourceuscensusupdate.go
old mode 100755
new mode 100644
index 8daa6b80d..14bee65eb
--- a/internal/sdk/pkg/models/shared/sourceuscensusupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceuscensusupdate.go
@@ -10,3 +10,24 @@ type SourceUsCensusUpdate struct {
// The path portion of the GET request
QueryPath string `json:"query_path"`
}
+
+func (o *SourceUsCensusUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceUsCensusUpdate) GetQueryParams() *string {
+ if o == nil {
+ return nil
+ }
+ return o.QueryParams
+}
+
+func (o *SourceUsCensusUpdate) GetQueryPath() string {
+ if o == nil {
+ return ""
+ }
+ return o.QueryPath
+}
diff --git a/internal/sdk/pkg/models/shared/sourcevantage.go b/internal/sdk/pkg/models/shared/sourcevantage.go
old mode 100755
new mode 100644
index 32f6c6302..01d3111c6
--- a/internal/sdk/pkg/models/shared/sourcevantage.go
+++ b/internal/sdk/pkg/models/shared/sourcevantage.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceVantageVantage string
+type Vantage string
const (
- SourceVantageVantageVantage SourceVantageVantage = "vantage"
+ VantageVantage Vantage = "vantage"
)
-func (e SourceVantageVantage) ToPointer() *SourceVantageVantage {
+func (e Vantage) ToPointer() *Vantage {
return &e
}
-func (e *SourceVantageVantage) UnmarshalJSON(data []byte) error {
+func (e *Vantage) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "vantage":
- *e = SourceVantageVantage(v)
+ *e = Vantage(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceVantageVantage: %v", v)
+ return fmt.Errorf("invalid value for Vantage: %v", v)
}
}
type SourceVantage struct {
// Your API Access token. See here.
- AccessToken string `json:"access_token"`
- SourceType SourceVantageVantage `json:"sourceType"`
+ AccessToken string `json:"access_token"`
+ sourceType Vantage `const:"vantage" json:"sourceType"`
+}
+
+func (s SourceVantage) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceVantage) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceVantage) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceVantage) GetSourceType() Vantage {
+ return VantageVantage
}
diff --git a/internal/sdk/pkg/models/shared/sourcevantagecreaterequest.go b/internal/sdk/pkg/models/shared/sourcevantagecreaterequest.go
old mode 100755
new mode 100644
index 7b498cc7b..9b4ddc3fa
--- a/internal/sdk/pkg/models/shared/sourcevantagecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcevantagecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceVantageCreateRequest struct {
Configuration SourceVantage `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceVantageCreateRequest) GetConfiguration() SourceVantage {
+ if o == nil {
+ return SourceVantage{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceVantageCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceVantageCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceVantageCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceVantageCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcevantageputrequest.go b/internal/sdk/pkg/models/shared/sourcevantageputrequest.go
old mode 100755
new mode 100644
index 27e664b23..401b23371
--- a/internal/sdk/pkg/models/shared/sourcevantageputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcevantageputrequest.go
@@ -7,3 +7,24 @@ type SourceVantagePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceVantagePutRequest) GetConfiguration() SourceVantageUpdate {
+ if o == nil {
+ return SourceVantageUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceVantagePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceVantagePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcevantageupdate.go b/internal/sdk/pkg/models/shared/sourcevantageupdate.go
old mode 100755
new mode 100644
index d08b4a56f..1bbb87107
--- a/internal/sdk/pkg/models/shared/sourcevantageupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcevantageupdate.go
@@ -6,3 +6,10 @@ type SourceVantageUpdate struct {
// Your API Access token. See here.
AccessToken string `json:"access_token"`
}
+
+func (o *SourceVantageUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewebflow.go b/internal/sdk/pkg/models/shared/sourcewebflow.go
old mode 100755
new mode 100644
index 4eaa0c9b4..f4afa6d70
--- a/internal/sdk/pkg/models/shared/sourcewebflow.go
+++ b/internal/sdk/pkg/models/shared/sourcewebflow.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceWebflowWebflow string
+type Webflow string
const (
- SourceWebflowWebflowWebflow SourceWebflowWebflow = "webflow"
+ WebflowWebflow Webflow = "webflow"
)
-func (e SourceWebflowWebflow) ToPointer() *SourceWebflowWebflow {
+func (e Webflow) ToPointer() *Webflow {
return &e
}
-func (e *SourceWebflowWebflow) UnmarshalJSON(data []byte) error {
+func (e *Webflow) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "webflow":
- *e = SourceWebflowWebflow(v)
+ *e = Webflow(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceWebflowWebflow: %v", v)
+ return fmt.Errorf("invalid value for Webflow: %v", v)
}
}
@@ -35,6 +36,35 @@ type SourceWebflow struct {
// The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api
APIKey string `json:"api_key"`
// The id of the Webflow site you are requesting data from. See https://developers.webflow.com/#sites
- SiteID string `json:"site_id"`
- SourceType SourceWebflowWebflow `json:"sourceType"`
+ SiteID string `json:"site_id"`
+ sourceType Webflow `const:"webflow" json:"sourceType"`
+}
+
+func (s SourceWebflow) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceWebflow) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceWebflow) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceWebflow) GetSiteID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SiteID
+}
+
+func (o *SourceWebflow) GetSourceType() Webflow {
+ return WebflowWebflow
}
diff --git a/internal/sdk/pkg/models/shared/sourcewebflowcreaterequest.go b/internal/sdk/pkg/models/shared/sourcewebflowcreaterequest.go
old mode 100755
new mode 100644
index 47b87c346..549136c02
--- a/internal/sdk/pkg/models/shared/sourcewebflowcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcewebflowcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceWebflowCreateRequest struct {
Configuration SourceWebflow `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceWebflowCreateRequest) GetConfiguration() SourceWebflow {
+ if o == nil {
+ return SourceWebflow{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceWebflowCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceWebflowCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceWebflowCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceWebflowCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewebflowputrequest.go b/internal/sdk/pkg/models/shared/sourcewebflowputrequest.go
old mode 100755
new mode 100644
index b00c9bddb..9c1984aae
--- a/internal/sdk/pkg/models/shared/sourcewebflowputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcewebflowputrequest.go
@@ -7,3 +7,24 @@ type SourceWebflowPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceWebflowPutRequest) GetConfiguration() SourceWebflowUpdate {
+ if o == nil {
+ return SourceWebflowUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceWebflowPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceWebflowPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewebflowupdate.go b/internal/sdk/pkg/models/shared/sourcewebflowupdate.go
old mode 100755
new mode 100644
index da3f7a401..8ca0658ce
--- a/internal/sdk/pkg/models/shared/sourcewebflowupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcewebflowupdate.go
@@ -8,3 +8,17 @@ type SourceWebflowUpdate struct {
// The id of the Webflow site you are requesting data from. See https://developers.webflow.com/#sites
SiteID string `json:"site_id"`
}
+
+func (o *SourceWebflowUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceWebflowUpdate) GetSiteID() string {
+ if o == nil {
+ return ""
+ }
+ return o.SiteID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewhiskyhunter.go b/internal/sdk/pkg/models/shared/sourcewhiskyhunter.go
old mode 100755
new mode 100644
index 0048d6180..e0e81ec93
--- a/internal/sdk/pkg/models/shared/sourcewhiskyhunter.go
+++ b/internal/sdk/pkg/models/shared/sourcewhiskyhunter.go
@@ -5,32 +5,48 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceWhiskyHunterWhiskyHunter string
+type WhiskyHunter string
const (
- SourceWhiskyHunterWhiskyHunterWhiskyHunter SourceWhiskyHunterWhiskyHunter = "whisky-hunter"
+ WhiskyHunterWhiskyHunter WhiskyHunter = "whisky-hunter"
)
-func (e SourceWhiskyHunterWhiskyHunter) ToPointer() *SourceWhiskyHunterWhiskyHunter {
+func (e WhiskyHunter) ToPointer() *WhiskyHunter {
return &e
}
-func (e *SourceWhiskyHunterWhiskyHunter) UnmarshalJSON(data []byte) error {
+func (e *WhiskyHunter) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "whisky-hunter":
- *e = SourceWhiskyHunterWhiskyHunter(v)
+ *e = WhiskyHunter(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceWhiskyHunterWhiskyHunter: %v", v)
+ return fmt.Errorf("invalid value for WhiskyHunter: %v", v)
}
}
type SourceWhiskyHunter struct {
- SourceType *SourceWhiskyHunterWhiskyHunter `json:"sourceType,omitempty"`
+ sourceType *WhiskyHunter `const:"whisky-hunter" json:"sourceType,omitempty"`
+}
+
+func (s SourceWhiskyHunter) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceWhiskyHunter) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceWhiskyHunter) GetSourceType() *WhiskyHunter {
+ return WhiskyHunterWhiskyHunter.ToPointer()
}
diff --git a/internal/sdk/pkg/models/shared/sourcewhiskyhuntercreaterequest.go b/internal/sdk/pkg/models/shared/sourcewhiskyhuntercreaterequest.go
old mode 100755
new mode 100644
index 36589e681..575e2a353
--- a/internal/sdk/pkg/models/shared/sourcewhiskyhuntercreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcewhiskyhuntercreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceWhiskyHunterCreateRequest struct {
Configuration SourceWhiskyHunter `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceWhiskyHunterCreateRequest) GetConfiguration() SourceWhiskyHunter {
+ if o == nil {
+ return SourceWhiskyHunter{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceWhiskyHunterCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceWhiskyHunterCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceWhiskyHunterCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceWhiskyHunterCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewhiskyhunterputrequest.go b/internal/sdk/pkg/models/shared/sourcewhiskyhunterputrequest.go
old mode 100755
new mode 100644
index 9433e3b78..c52199aaf
--- a/internal/sdk/pkg/models/shared/sourcewhiskyhunterputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcewhiskyhunterputrequest.go
@@ -7,3 +7,24 @@ type SourceWhiskyHunterPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceWhiskyHunterPutRequest) GetConfiguration() SourceWhiskyHunterUpdate {
+ if o == nil {
+ return SourceWhiskyHunterUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceWhiskyHunterPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceWhiskyHunterPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewhiskyhunterupdate.go b/internal/sdk/pkg/models/shared/sourcewhiskyhunterupdate.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/sourcewikipediapageviews.go b/internal/sdk/pkg/models/shared/sourcewikipediapageviews.go
old mode 100755
new mode 100644
index 2536d6051..ae175c7f3
--- a/internal/sdk/pkg/models/shared/sourcewikipediapageviews.go
+++ b/internal/sdk/pkg/models/shared/sourcewikipediapageviews.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceWikipediaPageviewsWikipediaPageviews string
+type WikipediaPageviews string
const (
- SourceWikipediaPageviewsWikipediaPageviewsWikipediaPageviews SourceWikipediaPageviewsWikipediaPageviews = "wikipedia-pageviews"
+ WikipediaPageviewsWikipediaPageviews WikipediaPageviews = "wikipedia-pageviews"
)
-func (e SourceWikipediaPageviewsWikipediaPageviews) ToPointer() *SourceWikipediaPageviewsWikipediaPageviews {
+func (e WikipediaPageviews) ToPointer() *WikipediaPageviews {
return &e
}
-func (e *SourceWikipediaPageviewsWikipediaPageviews) UnmarshalJSON(data []byte) error {
+func (e *WikipediaPageviews) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "wikipedia-pageviews":
- *e = SourceWikipediaPageviewsWikipediaPageviews(v)
+ *e = WikipediaPageviews(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceWikipediaPageviewsWikipediaPageviews: %v", v)
+ return fmt.Errorf("invalid value for WikipediaPageviews: %v", v)
}
}
@@ -43,8 +44,72 @@ type SourceWikipediaPageviews struct {
// The date of the last day to include, in YYYYMMDD or YYYYMMDDHH format.
End string `json:"end"`
// If you want to filter by project, use the domain of any Wikimedia project.
- Project string `json:"project"`
- SourceType SourceWikipediaPageviewsWikipediaPageviews `json:"sourceType"`
+ Project string `json:"project"`
+ sourceType WikipediaPageviews `const:"wikipedia-pageviews" json:"sourceType"`
// The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format.
Start string `json:"start"`
}
+
+func (s SourceWikipediaPageviews) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceWikipediaPageviews) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceWikipediaPageviews) GetAccess() string {
+ if o == nil {
+ return ""
+ }
+ return o.Access
+}
+
+func (o *SourceWikipediaPageviews) GetAgent() string {
+ if o == nil {
+ return ""
+ }
+ return o.Agent
+}
+
+func (o *SourceWikipediaPageviews) GetArticle() string {
+ if o == nil {
+ return ""
+ }
+ return o.Article
+}
+
+func (o *SourceWikipediaPageviews) GetCountry() string {
+ if o == nil {
+ return ""
+ }
+ return o.Country
+}
+
+func (o *SourceWikipediaPageviews) GetEnd() string {
+ if o == nil {
+ return ""
+ }
+ return o.End
+}
+
+func (o *SourceWikipediaPageviews) GetProject() string {
+ if o == nil {
+ return ""
+ }
+ return o.Project
+}
+
+func (o *SourceWikipediaPageviews) GetSourceType() WikipediaPageviews {
+ return WikipediaPageviewsWikipediaPageviews
+}
+
+func (o *SourceWikipediaPageviews) GetStart() string {
+ if o == nil {
+ return ""
+ }
+ return o.Start
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewikipediapageviewscreaterequest.go b/internal/sdk/pkg/models/shared/sourcewikipediapageviewscreaterequest.go
old mode 100755
new mode 100644
index f587002bf..52d2384b4
--- a/internal/sdk/pkg/models/shared/sourcewikipediapageviewscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcewikipediapageviewscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceWikipediaPageviewsCreateRequest struct {
Configuration SourceWikipediaPageviews `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceWikipediaPageviewsCreateRequest) GetConfiguration() SourceWikipediaPageviews {
+ if o == nil {
+ return SourceWikipediaPageviews{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceWikipediaPageviewsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceWikipediaPageviewsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceWikipediaPageviewsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceWikipediaPageviewsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewikipediapageviewsputrequest.go b/internal/sdk/pkg/models/shared/sourcewikipediapageviewsputrequest.go
old mode 100755
new mode 100644
index 78e160400..c797d146c
--- a/internal/sdk/pkg/models/shared/sourcewikipediapageviewsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcewikipediapageviewsputrequest.go
@@ -7,3 +7,24 @@ type SourceWikipediaPageviewsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceWikipediaPageviewsPutRequest) GetConfiguration() SourceWikipediaPageviewsUpdate {
+ if o == nil {
+ return SourceWikipediaPageviewsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceWikipediaPageviewsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceWikipediaPageviewsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewikipediapageviewsupdate.go b/internal/sdk/pkg/models/shared/sourcewikipediapageviewsupdate.go
old mode 100755
new mode 100644
index 18fd15ffc..41d4ef2d8
--- a/internal/sdk/pkg/models/shared/sourcewikipediapageviewsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcewikipediapageviewsupdate.go
@@ -18,3 +18,52 @@ type SourceWikipediaPageviewsUpdate struct {
// The date of the first day to include, in YYYYMMDD or YYYYMMDDHH format.
Start string `json:"start"`
}
+
+func (o *SourceWikipediaPageviewsUpdate) GetAccess() string {
+ if o == nil {
+ return ""
+ }
+ return o.Access
+}
+
+func (o *SourceWikipediaPageviewsUpdate) GetAgent() string {
+ if o == nil {
+ return ""
+ }
+ return o.Agent
+}
+
+func (o *SourceWikipediaPageviewsUpdate) GetArticle() string {
+ if o == nil {
+ return ""
+ }
+ return o.Article
+}
+
+func (o *SourceWikipediaPageviewsUpdate) GetCountry() string {
+ if o == nil {
+ return ""
+ }
+ return o.Country
+}
+
+func (o *SourceWikipediaPageviewsUpdate) GetEnd() string {
+ if o == nil {
+ return ""
+ }
+ return o.End
+}
+
+func (o *SourceWikipediaPageviewsUpdate) GetProject() string {
+ if o == nil {
+ return ""
+ }
+ return o.Project
+}
+
+func (o *SourceWikipediaPageviewsUpdate) GetStart() string {
+ if o == nil {
+ return ""
+ }
+ return o.Start
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewoocommerce.go b/internal/sdk/pkg/models/shared/sourcewoocommerce.go
old mode 100755
new mode 100644
index 600f8671f..5030e0389
--- a/internal/sdk/pkg/models/shared/sourcewoocommerce.go
+++ b/internal/sdk/pkg/models/shared/sourcewoocommerce.go
@@ -3,32 +3,33 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceWoocommerceWoocommerce string
+type Woocommerce string
const (
- SourceWoocommerceWoocommerceWoocommerce SourceWoocommerceWoocommerce = "woocommerce"
+ WoocommerceWoocommerce Woocommerce = "woocommerce"
)
-func (e SourceWoocommerceWoocommerce) ToPointer() *SourceWoocommerceWoocommerce {
+func (e Woocommerce) ToPointer() *Woocommerce {
return &e
}
-func (e *SourceWoocommerceWoocommerce) UnmarshalJSON(data []byte) error {
+func (e *Woocommerce) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "woocommerce":
- *e = SourceWoocommerceWoocommerce(v)
+ *e = Woocommerce(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceWoocommerceWoocommerce: %v", v)
+ return fmt.Errorf("invalid value for Woocommerce: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceWoocommerce struct {
// Customer Secret for API in WooCommerce shop
APISecret string `json:"api_secret"`
// The name of the store. For https://EXAMPLE.com, the shop name is 'EXAMPLE.com'.
- Shop string `json:"shop"`
- SourceType SourceWoocommerceWoocommerce `json:"sourceType"`
+ Shop string `json:"shop"`
+ sourceType Woocommerce `const:"woocommerce" json:"sourceType"`
// The date you would like to replicate data from. Format: YYYY-MM-DD
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceWoocommerce) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceWoocommerce) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceWoocommerce) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceWoocommerce) GetAPISecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.APISecret
+}
+
+func (o *SourceWoocommerce) GetShop() string {
+ if o == nil {
+ return ""
+ }
+ return o.Shop
+}
+
+func (o *SourceWoocommerce) GetSourceType() Woocommerce {
+ return WoocommerceWoocommerce
+}
+
+func (o *SourceWoocommerce) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewoocommercecreaterequest.go b/internal/sdk/pkg/models/shared/sourcewoocommercecreaterequest.go
old mode 100755
new mode 100644
index 47384fbe4..65a9b821f
--- a/internal/sdk/pkg/models/shared/sourcewoocommercecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcewoocommercecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceWoocommerceCreateRequest struct {
Configuration SourceWoocommerce `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceWoocommerceCreateRequest) GetConfiguration() SourceWoocommerce {
+ if o == nil {
+ return SourceWoocommerce{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceWoocommerceCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceWoocommerceCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceWoocommerceCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceWoocommerceCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewoocommerceputrequest.go b/internal/sdk/pkg/models/shared/sourcewoocommerceputrequest.go
old mode 100755
new mode 100644
index 3184143e6..fe2e48126
--- a/internal/sdk/pkg/models/shared/sourcewoocommerceputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcewoocommerceputrequest.go
@@ -7,3 +7,24 @@ type SourceWoocommercePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceWoocommercePutRequest) GetConfiguration() SourceWoocommerceUpdate {
+ if o == nil {
+ return SourceWoocommerceUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceWoocommercePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceWoocommercePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcewoocommerceupdate.go b/internal/sdk/pkg/models/shared/sourcewoocommerceupdate.go
old mode 100755
new mode 100644
index 95e974a3c..4998446aa
--- a/internal/sdk/pkg/models/shared/sourcewoocommerceupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcewoocommerceupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceWoocommerceUpdate struct {
@@ -16,3 +17,42 @@ type SourceWoocommerceUpdate struct {
// The date you would like to replicate data from. Format: YYYY-MM-DD
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceWoocommerceUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceWoocommerceUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceWoocommerceUpdate) GetAPIKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIKey
+}
+
+func (o *SourceWoocommerceUpdate) GetAPISecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.APISecret
+}
+
+func (o *SourceWoocommerceUpdate) GetShop() string {
+ if o == nil {
+ return ""
+ }
+ return o.Shop
+}
+
+func (o *SourceWoocommerceUpdate) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourcexero.go b/internal/sdk/pkg/models/shared/sourcexero.go
deleted file mode 100755
index ec97d3bd0..000000000
--- a/internal/sdk/pkg/models/shared/sourcexero.go
+++ /dev/null
@@ -1,55 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "encoding/json"
- "fmt"
- "time"
-)
-
-type SourceXeroAuthenticateViaXeroOAuth struct {
- // Enter your Xero application's access token
- AccessToken string `json:"access_token"`
- // Enter your Xero application's Client ID
- ClientID string `json:"client_id"`
- // Enter your Xero application's Client Secret
- ClientSecret string `json:"client_secret"`
- // Enter your Xero application's refresh token
- RefreshToken string `json:"refresh_token"`
- // The date-time when the access token should be refreshed
- TokenExpiryDate string `json:"token_expiry_date"`
-}
-
-type SourceXeroXero string
-
-const (
- SourceXeroXeroXero SourceXeroXero = "xero"
-)
-
-func (e SourceXeroXero) ToPointer() *SourceXeroXero {
- return &e
-}
-
-func (e *SourceXeroXero) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "xero":
- *e = SourceXeroXero(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceXeroXero: %v", v)
- }
-}
-
-type SourceXero struct {
- Authentication SourceXeroAuthenticateViaXeroOAuth `json:"authentication"`
- SourceType SourceXeroXero `json:"sourceType"`
- // UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any data with created_at before this data will not be synced.
- StartDate time.Time `json:"start_date"`
- // Enter your Xero organization's Tenant ID
- TenantID string `json:"tenant_id"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcexerocreaterequest.go b/internal/sdk/pkg/models/shared/sourcexerocreaterequest.go
deleted file mode 100755
index 373448520..000000000
--- a/internal/sdk/pkg/models/shared/sourcexerocreaterequest.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceXeroCreateRequest struct {
- Configuration SourceXero `json:"configuration"`
- Name string `json:"name"`
- // Optional secretID obtained through the public API OAuth redirect flow.
- SecretID *string `json:"secretId,omitempty"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcexeroputrequest.go b/internal/sdk/pkg/models/shared/sourcexeroputrequest.go
deleted file mode 100755
index 1eb615235..000000000
--- a/internal/sdk/pkg/models/shared/sourcexeroputrequest.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceXeroPutRequest struct {
- Configuration SourceXeroUpdate `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcexeroupdate.go b/internal/sdk/pkg/models/shared/sourcexeroupdate.go
deleted file mode 100755
index 168ad1257..000000000
--- a/internal/sdk/pkg/models/shared/sourcexeroupdate.go
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "time"
-)
-
-type SourceXeroUpdateAuthenticateViaXeroOAuth struct {
- // Enter your Xero application's access token
- AccessToken string `json:"access_token"`
- // Enter your Xero application's Client ID
- ClientID string `json:"client_id"`
- // Enter your Xero application's Client Secret
- ClientSecret string `json:"client_secret"`
- // Enter your Xero application's refresh token
- RefreshToken string `json:"refresh_token"`
- // The date-time when the access token should be refreshed
- TokenExpiryDate string `json:"token_expiry_date"`
-}
-
-type SourceXeroUpdate struct {
- Authentication SourceXeroUpdateAuthenticateViaXeroOAuth `json:"authentication"`
- // UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any data with created_at before this data will not be synced.
- StartDate time.Time `json:"start_date"`
- // Enter your Xero organization's Tenant ID
- TenantID string `json:"tenant_id"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourcexkcd.go b/internal/sdk/pkg/models/shared/sourcexkcd.go
old mode 100755
new mode 100644
index af0bc4cba..553cd487c
--- a/internal/sdk/pkg/models/shared/sourcexkcd.go
+++ b/internal/sdk/pkg/models/shared/sourcexkcd.go
@@ -5,32 +5,48 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceXkcdXkcd string
+type Xkcd string
const (
- SourceXkcdXkcdXkcd SourceXkcdXkcd = "xkcd"
+ XkcdXkcd Xkcd = "xkcd"
)
-func (e SourceXkcdXkcd) ToPointer() *SourceXkcdXkcd {
+func (e Xkcd) ToPointer() *Xkcd {
return &e
}
-func (e *SourceXkcdXkcd) UnmarshalJSON(data []byte) error {
+func (e *Xkcd) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "xkcd":
- *e = SourceXkcdXkcd(v)
+ *e = Xkcd(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceXkcdXkcd: %v", v)
+ return fmt.Errorf("invalid value for Xkcd: %v", v)
}
}
type SourceXkcd struct {
- SourceType *SourceXkcdXkcd `json:"sourceType,omitempty"`
+ sourceType *Xkcd `const:"xkcd" json:"sourceType,omitempty"`
+}
+
+func (s SourceXkcd) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceXkcd) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceXkcd) GetSourceType() *Xkcd {
+ return XkcdXkcd.ToPointer()
}
diff --git a/internal/sdk/pkg/models/shared/sourcexkcdcreaterequest.go b/internal/sdk/pkg/models/shared/sourcexkcdcreaterequest.go
old mode 100755
new mode 100644
index 40d897e0c..eb441a122
--- a/internal/sdk/pkg/models/shared/sourcexkcdcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcexkcdcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceXkcdCreateRequest struct {
Configuration SourceXkcd `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceXkcdCreateRequest) GetConfiguration() SourceXkcd {
+ if o == nil {
+ return SourceXkcd{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceXkcdCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceXkcdCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceXkcdCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceXkcdCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcexkcdputrequest.go b/internal/sdk/pkg/models/shared/sourcexkcdputrequest.go
old mode 100755
new mode 100644
index 4dda9839a..820cc22ab
--- a/internal/sdk/pkg/models/shared/sourcexkcdputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcexkcdputrequest.go
@@ -7,3 +7,24 @@ type SourceXkcdPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceXkcdPutRequest) GetConfiguration() SourceXkcdUpdate {
+ if o == nil {
+ return SourceXkcdUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceXkcdPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceXkcdPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcexkcdupdate.go b/internal/sdk/pkg/models/shared/sourcexkcdupdate.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/models/shared/sourceyandexmetrica.go b/internal/sdk/pkg/models/shared/sourceyandexmetrica.go
old mode 100755
new mode 100644
index 6aa639bbb..b79863a81
--- a/internal/sdk/pkg/models/shared/sourceyandexmetrica.go
+++ b/internal/sdk/pkg/models/shared/sourceyandexmetrica.go
@@ -3,32 +3,33 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceYandexMetricaYandexMetrica string
+type YandexMetrica string
const (
- SourceYandexMetricaYandexMetricaYandexMetrica SourceYandexMetricaYandexMetrica = "yandex-metrica"
+ YandexMetricaYandexMetrica YandexMetrica = "yandex-metrica"
)
-func (e SourceYandexMetricaYandexMetrica) ToPointer() *SourceYandexMetricaYandexMetrica {
+func (e YandexMetrica) ToPointer() *YandexMetrica {
return &e
}
-func (e *SourceYandexMetricaYandexMetrica) UnmarshalJSON(data []byte) error {
+func (e *YandexMetrica) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "yandex-metrica":
- *e = SourceYandexMetricaYandexMetrica(v)
+ *e = YandexMetrica(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceYandexMetricaYandexMetrica: %v", v)
+ return fmt.Errorf("invalid value for YandexMetrica: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceYandexMetrica struct {
// Counter ID
CounterID string `json:"counter_id"`
// Starting point for your data replication, in format of "YYYY-MM-DD". If not provided will sync till most recent date.
- EndDate *types.Date `json:"end_date,omitempty"`
- SourceType SourceYandexMetricaYandexMetrica `json:"sourceType"`
+ EndDate *types.Date `json:"end_date,omitempty"`
+ sourceType YandexMetrica `const:"yandex-metrica" json:"sourceType"`
// Starting point for your data replication, in format of "YYYY-MM-DD".
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceYandexMetrica) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceYandexMetrica) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceYandexMetrica) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
+
+func (o *SourceYandexMetrica) GetCounterID() string {
+ if o == nil {
+ return ""
+ }
+ return o.CounterID
+}
+
+func (o *SourceYandexMetrica) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceYandexMetrica) GetSourceType() YandexMetrica {
+ return YandexMetricaYandexMetrica
+}
+
+func (o *SourceYandexMetrica) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyandexmetricacreaterequest.go b/internal/sdk/pkg/models/shared/sourceyandexmetricacreaterequest.go
old mode 100755
new mode 100644
index 73244243c..6a012a2d3
--- a/internal/sdk/pkg/models/shared/sourceyandexmetricacreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceyandexmetricacreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceYandexMetricaCreateRequest struct {
Configuration SourceYandexMetrica `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceYandexMetricaCreateRequest) GetConfiguration() SourceYandexMetrica {
+ if o == nil {
+ return SourceYandexMetrica{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceYandexMetricaCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceYandexMetricaCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceYandexMetricaCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceYandexMetricaCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyandexmetricaputrequest.go b/internal/sdk/pkg/models/shared/sourceyandexmetricaputrequest.go
old mode 100755
new mode 100644
index a77de3b99..c2738ebd8
--- a/internal/sdk/pkg/models/shared/sourceyandexmetricaputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceyandexmetricaputrequest.go
@@ -7,3 +7,24 @@ type SourceYandexMetricaPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceYandexMetricaPutRequest) GetConfiguration() SourceYandexMetricaUpdate {
+ if o == nil {
+ return SourceYandexMetricaUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceYandexMetricaPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceYandexMetricaPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyandexmetricaupdate.go b/internal/sdk/pkg/models/shared/sourceyandexmetricaupdate.go
old mode 100755
new mode 100644
index 73215e315..462e00f19
--- a/internal/sdk/pkg/models/shared/sourceyandexmetricaupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceyandexmetricaupdate.go
@@ -3,7 +3,8 @@
package shared
import (
- "airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceYandexMetricaUpdate struct {
@@ -16,3 +17,42 @@ type SourceYandexMetricaUpdate struct {
// Starting point for your data replication, in format of "YYYY-MM-DD".
StartDate types.Date `json:"start_date"`
}
+
+func (s SourceYandexMetricaUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceYandexMetricaUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceYandexMetricaUpdate) GetAuthToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AuthToken
+}
+
+func (o *SourceYandexMetricaUpdate) GetCounterID() string {
+ if o == nil {
+ return ""
+ }
+ return o.CounterID
+}
+
+func (o *SourceYandexMetricaUpdate) GetEndDate() *types.Date {
+ if o == nil {
+ return nil
+ }
+ return o.EndDate
+}
+
+func (o *SourceYandexMetricaUpdate) GetStartDate() types.Date {
+ if o == nil {
+ return types.Date{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyotpo.go b/internal/sdk/pkg/models/shared/sourceyotpo.go
old mode 100755
new mode 100644
index c1b66c1f7..2c533a3ce
--- a/internal/sdk/pkg/models/shared/sourceyotpo.go
+++ b/internal/sdk/pkg/models/shared/sourceyotpo.go
@@ -5,30 +5,31 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceYotpoYotpo string
+type Yotpo string
const (
- SourceYotpoYotpoYotpo SourceYotpoYotpo = "yotpo"
+ YotpoYotpo Yotpo = "yotpo"
)
-func (e SourceYotpoYotpo) ToPointer() *SourceYotpoYotpo {
+func (e Yotpo) ToPointer() *Yotpo {
return &e
}
-func (e *SourceYotpoYotpo) UnmarshalJSON(data []byte) error {
+func (e *Yotpo) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "yotpo":
- *e = SourceYotpoYotpo(v)
+ *e = Yotpo(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceYotpoYotpo: %v", v)
+ return fmt.Errorf("invalid value for Yotpo: %v", v)
}
}
@@ -38,8 +39,51 @@ type SourceYotpo struct {
// App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)
AppKey string `json:"app_key"`
// Email address registered with yotpo.
- Email string `json:"email"`
- SourceType SourceYotpoYotpo `json:"sourceType"`
+ Email *string `default:"example@gmail.com" json:"email"`
+ sourceType Yotpo `const:"yotpo" json:"sourceType"`
// Date time filter for incremental filter, Specify which date to extract from.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceYotpo) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceYotpo) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceYotpo) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceYotpo) GetAppKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AppKey
+}
+
+func (o *SourceYotpo) GetEmail() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Email
+}
+
+func (o *SourceYotpo) GetSourceType() Yotpo {
+ return YotpoYotpo
+}
+
+func (o *SourceYotpo) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyotpocreaterequest.go b/internal/sdk/pkg/models/shared/sourceyotpocreaterequest.go
old mode 100755
new mode 100644
index 5cda5576a..4dba4dedd
--- a/internal/sdk/pkg/models/shared/sourceyotpocreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceyotpocreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceYotpoCreateRequest struct {
Configuration SourceYotpo `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceYotpoCreateRequest) GetConfiguration() SourceYotpo {
+ if o == nil {
+ return SourceYotpo{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceYotpoCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceYotpoCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceYotpoCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceYotpoCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyotpoputrequest.go b/internal/sdk/pkg/models/shared/sourceyotpoputrequest.go
old mode 100755
new mode 100644
index 513758ba0..91015e893
--- a/internal/sdk/pkg/models/shared/sourceyotpoputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceyotpoputrequest.go
@@ -7,3 +7,24 @@ type SourceYotpoPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceYotpoPutRequest) GetConfiguration() SourceYotpoUpdate {
+ if o == nil {
+ return SourceYotpoUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceYotpoPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceYotpoPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyotpoupdate.go b/internal/sdk/pkg/models/shared/sourceyotpoupdate.go
old mode 100755
new mode 100644
index 8f0691d95..56036c74b
--- a/internal/sdk/pkg/models/shared/sourceyotpoupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceyotpoupdate.go
@@ -3,6 +3,7 @@
package shared
import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -12,7 +13,46 @@ type SourceYotpoUpdate struct {
// App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)
AppKey string `json:"app_key"`
// Email address registered with yotpo.
- Email string `json:"email"`
+ Email *string `default:"example@gmail.com" json:"email"`
// Date time filter for incremental filter, Specify which date to extract from.
StartDate time.Time `json:"start_date"`
}
+
+func (s SourceYotpoUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceYotpoUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceYotpoUpdate) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceYotpoUpdate) GetAppKey() string {
+ if o == nil {
+ return ""
+ }
+ return o.AppKey
+}
+
+func (o *SourceYotpoUpdate) GetEmail() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Email
+}
+
+func (o *SourceYotpoUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyounium.go b/internal/sdk/pkg/models/shared/sourceyounium.go
deleted file mode 100755
index 0b45c8f6e..000000000
--- a/internal/sdk/pkg/models/shared/sourceyounium.go
+++ /dev/null
@@ -1,44 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-import (
- "encoding/json"
- "fmt"
-)
-
-type SourceYouniumYounium string
-
-const (
- SourceYouniumYouniumYounium SourceYouniumYounium = "younium"
-)
-
-func (e SourceYouniumYounium) ToPointer() *SourceYouniumYounium {
- return &e
-}
-
-func (e *SourceYouniumYounium) UnmarshalJSON(data []byte) error {
- var v string
- if err := json.Unmarshal(data, &v); err != nil {
- return err
- }
- switch v {
- case "younium":
- *e = SourceYouniumYounium(v)
- return nil
- default:
- return fmt.Errorf("invalid value for SourceYouniumYounium: %v", v)
- }
-}
-
-type SourceYounium struct {
- // Legal Entity that data should be pulled from
- LegalEntity string `json:"legal_entity"`
- // Account password for younium account API key
- Password string `json:"password"`
- // Property defining if connector is used against playground or production environment
- Playground *bool `json:"playground,omitempty"`
- SourceType SourceYouniumYounium `json:"sourceType"`
- // Username for Younium account
- Username string `json:"username"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourceyouniumcreaterequest.go b/internal/sdk/pkg/models/shared/sourceyouniumcreaterequest.go
deleted file mode 100755
index 4f8b75153..000000000
--- a/internal/sdk/pkg/models/shared/sourceyouniumcreaterequest.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceYouniumCreateRequest struct {
- Configuration SourceYounium `json:"configuration"`
- Name string `json:"name"`
- // Optional secretID obtained through the public API OAuth redirect flow.
- SecretID *string `json:"secretId,omitempty"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourceyouniumputrequest.go b/internal/sdk/pkg/models/shared/sourceyouniumputrequest.go
deleted file mode 100755
index 592c5577f..000000000
--- a/internal/sdk/pkg/models/shared/sourceyouniumputrequest.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceYouniumPutRequest struct {
- Configuration SourceYouniumUpdate `json:"configuration"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourceyouniumupdate.go b/internal/sdk/pkg/models/shared/sourceyouniumupdate.go
deleted file mode 100755
index ee4d51332..000000000
--- a/internal/sdk/pkg/models/shared/sourceyouniumupdate.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
-
-package shared
-
-type SourceYouniumUpdate struct {
- // Legal Entity that data should be pulled from
- LegalEntity string `json:"legal_entity"`
- // Account password for younium account API key
- Password string `json:"password"`
- // Property defining if connector is used against playground or production environment
- Playground *bool `json:"playground,omitempty"`
- // Username for Younium account
- Username string `json:"username"`
-}
diff --git a/internal/sdk/pkg/models/shared/sourceyoutubeanalytics.go b/internal/sdk/pkg/models/shared/sourceyoutubeanalytics.go
old mode 100755
new mode 100644
index 4b71d5e04..4c048abc9
--- a/internal/sdk/pkg/models/shared/sourceyoutubeanalytics.go
+++ b/internal/sdk/pkg/models/shared/sourceyoutubeanalytics.go
@@ -5,90 +5,105 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
type SourceYoutubeAnalyticsAuthenticateViaOAuth20 struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The Client ID of your developer application
ClientID string `json:"client_id"`
// The client secret of your developer application
ClientSecret string `json:"client_secret"`
// A refresh token generated using the above client ID and secret
RefreshToken string `json:"refresh_token"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceYoutubeAnalyticsAuthenticateViaOAuth20 SourceYoutubeAnalyticsAuthenticateViaOAuth20
-
-func (c *SourceYoutubeAnalyticsAuthenticateViaOAuth20) UnmarshalJSON(bs []byte) error {
- data := _SourceYoutubeAnalyticsAuthenticateViaOAuth20{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceYoutubeAnalyticsAuthenticateViaOAuth20(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceYoutubeAnalyticsAuthenticateViaOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceYoutubeAnalyticsAuthenticateViaOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
return err
}
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
- delete(additionalFields, "refresh_token")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceYoutubeAnalyticsAuthenticateViaOAuth20) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceYoutubeAnalyticsAuthenticateViaOAuth20(c))
- if err != nil {
- return nil, err
+func (o *SourceYoutubeAnalyticsAuthenticateViaOAuth20) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceYoutubeAnalyticsAuthenticateViaOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
}
+ return o.ClientID
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceYoutubeAnalyticsAuthenticateViaOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
}
+ return o.ClientSecret
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceYoutubeAnalyticsAuthenticateViaOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.RefreshToken
}
-type SourceYoutubeAnalyticsYoutubeAnalytics string
+type YoutubeAnalytics string
const (
- SourceYoutubeAnalyticsYoutubeAnalyticsYoutubeAnalytics SourceYoutubeAnalyticsYoutubeAnalytics = "youtube-analytics"
+ YoutubeAnalyticsYoutubeAnalytics YoutubeAnalytics = "youtube-analytics"
)
-func (e SourceYoutubeAnalyticsYoutubeAnalytics) ToPointer() *SourceYoutubeAnalyticsYoutubeAnalytics {
+func (e YoutubeAnalytics) ToPointer() *YoutubeAnalytics {
return &e
}
-func (e *SourceYoutubeAnalyticsYoutubeAnalytics) UnmarshalJSON(data []byte) error {
+func (e *YoutubeAnalytics) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "youtube-analytics":
- *e = SourceYoutubeAnalyticsYoutubeAnalytics(v)
+ *e = YoutubeAnalytics(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceYoutubeAnalyticsYoutubeAnalytics: %v", v)
+ return fmt.Errorf("invalid value for YoutubeAnalytics: %v", v)
}
}
type SourceYoutubeAnalytics struct {
Credentials SourceYoutubeAnalyticsAuthenticateViaOAuth20 `json:"credentials"`
- SourceType SourceYoutubeAnalyticsYoutubeAnalytics `json:"sourceType"`
+ sourceType YoutubeAnalytics `const:"youtube-analytics" json:"sourceType"`
+}
+
+func (s SourceYoutubeAnalytics) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceYoutubeAnalytics) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceYoutubeAnalytics) GetCredentials() SourceYoutubeAnalyticsAuthenticateViaOAuth20 {
+ if o == nil {
+ return SourceYoutubeAnalyticsAuthenticateViaOAuth20{}
+ }
+ return o.Credentials
+}
+
+func (o *SourceYoutubeAnalytics) GetSourceType() YoutubeAnalytics {
+ return YoutubeAnalyticsYoutubeAnalytics
}
diff --git a/internal/sdk/pkg/models/shared/sourceyoutubeanalyticscreaterequest.go b/internal/sdk/pkg/models/shared/sourceyoutubeanalyticscreaterequest.go
old mode 100755
new mode 100644
index 33b351062..dd0bc16e3
--- a/internal/sdk/pkg/models/shared/sourceyoutubeanalyticscreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourceyoutubeanalyticscreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceYoutubeAnalyticsCreateRequest struct {
Configuration SourceYoutubeAnalytics `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceYoutubeAnalyticsCreateRequest) GetConfiguration() SourceYoutubeAnalytics {
+ if o == nil {
+ return SourceYoutubeAnalytics{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceYoutubeAnalyticsCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceYoutubeAnalyticsCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceYoutubeAnalyticsCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceYoutubeAnalyticsCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyoutubeanalyticsputrequest.go b/internal/sdk/pkg/models/shared/sourceyoutubeanalyticsputrequest.go
old mode 100755
new mode 100644
index 31640e626..1a3ef5d8c
--- a/internal/sdk/pkg/models/shared/sourceyoutubeanalyticsputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourceyoutubeanalyticsputrequest.go
@@ -7,3 +7,24 @@ type SourceYoutubeAnalyticsPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceYoutubeAnalyticsPutRequest) GetConfiguration() SourceYoutubeAnalyticsUpdate {
+ if o == nil {
+ return SourceYoutubeAnalyticsUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceYoutubeAnalyticsPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceYoutubeAnalyticsPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourceyoutubeanalyticsupdate.go b/internal/sdk/pkg/models/shared/sourceyoutubeanalyticsupdate.go
old mode 100755
new mode 100644
index e287fea81..b77b711d6
--- a/internal/sdk/pkg/models/shared/sourceyoutubeanalyticsupdate.go
+++ b/internal/sdk/pkg/models/shared/sourceyoutubeanalyticsupdate.go
@@ -3,66 +3,65 @@
package shared
import (
- "encoding/json"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20 struct {
+type AuthenticateViaOAuth20 struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The Client ID of your developer application
ClientID string `json:"client_id"`
// The client secret of your developer application
ClientSecret string `json:"client_secret"`
// A refresh token generated using the above client ID and secret
RefreshToken string `json:"refresh_token"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20 SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20
-
-func (c *SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20) UnmarshalJSON(bs []byte) error {
- data := _SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20(data)
- additionalFields := make(map[string]interface{})
+func (a AuthenticateViaOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(a, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (a *AuthenticateViaOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &a, "", false, false); err != nil {
return err
}
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
- delete(additionalFields, "refresh_token")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20(c))
- if err != nil {
- return nil, err
+func (o *AuthenticateViaOAuth20) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *AuthenticateViaOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
}
+ return o.ClientID
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *AuthenticateViaOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
}
+ return o.ClientSecret
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *AuthenticateViaOAuth20) GetRefreshToken() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.RefreshToken
}
type SourceYoutubeAnalyticsUpdate struct {
- Credentials SourceYoutubeAnalyticsUpdateAuthenticateViaOAuth20 `json:"credentials"`
+ Credentials AuthenticateViaOAuth20 `json:"credentials"`
+}
+
+func (o *SourceYoutubeAnalyticsUpdate) GetCredentials() AuthenticateViaOAuth20 {
+ if o == nil {
+ return AuthenticateViaOAuth20{}
+ }
+ return o.Credentials
}
diff --git a/internal/sdk/pkg/models/shared/sourcezendeskchat.go b/internal/sdk/pkg/models/shared/sourcezendeskchat.go
old mode 100755
new mode 100644
index dd193b085..a95076c39
--- a/internal/sdk/pkg/models/shared/sourcezendeskchat.go
+++ b/internal/sdk/pkg/models/shared/sourcezendeskchat.go
@@ -3,129 +3,189 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceZendeskChatAuthorizationMethodAccessTokenCredentials string
+type SourceZendeskChatSchemasCredentials string
const (
- SourceZendeskChatAuthorizationMethodAccessTokenCredentialsAccessToken SourceZendeskChatAuthorizationMethodAccessTokenCredentials = "access_token"
+ SourceZendeskChatSchemasCredentialsAccessToken SourceZendeskChatSchemasCredentials = "access_token"
)
-func (e SourceZendeskChatAuthorizationMethodAccessTokenCredentials) ToPointer() *SourceZendeskChatAuthorizationMethodAccessTokenCredentials {
+func (e SourceZendeskChatSchemasCredentials) ToPointer() *SourceZendeskChatSchemasCredentials {
return &e
}
-func (e *SourceZendeskChatAuthorizationMethodAccessTokenCredentials) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskChatSchemasCredentials) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceZendeskChatAuthorizationMethodAccessTokenCredentials(v)
+ *e = SourceZendeskChatSchemasCredentials(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskChatAuthorizationMethodAccessTokenCredentials: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskChatSchemasCredentials: %v", v)
}
}
-type SourceZendeskChatAuthorizationMethodAccessToken struct {
+type SourceZendeskChatAccessToken struct {
// The Access Token to make authenticated requests.
- AccessToken string `json:"access_token"`
- Credentials SourceZendeskChatAuthorizationMethodAccessTokenCredentials `json:"credentials"`
+ AccessToken string `json:"access_token"`
+ credentials SourceZendeskChatSchemasCredentials `const:"access_token" json:"credentials"`
}
-type SourceZendeskChatAuthorizationMethodOAuth20Credentials string
+func (s SourceZendeskChatAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskChatAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskChatAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceZendeskChatAccessToken) GetCredentials() SourceZendeskChatSchemasCredentials {
+ return SourceZendeskChatSchemasCredentialsAccessToken
+}
+
+type SourceZendeskChatCredentials string
const (
- SourceZendeskChatAuthorizationMethodOAuth20CredentialsOauth20 SourceZendeskChatAuthorizationMethodOAuth20Credentials = "oauth2.0"
+ SourceZendeskChatCredentialsOauth20 SourceZendeskChatCredentials = "oauth2.0"
)
-func (e SourceZendeskChatAuthorizationMethodOAuth20Credentials) ToPointer() *SourceZendeskChatAuthorizationMethodOAuth20Credentials {
+func (e SourceZendeskChatCredentials) ToPointer() *SourceZendeskChatCredentials {
return &e
}
-func (e *SourceZendeskChatAuthorizationMethodOAuth20Credentials) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskChatCredentials) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceZendeskChatAuthorizationMethodOAuth20Credentials(v)
+ *e = SourceZendeskChatCredentials(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskChatAuthorizationMethodOAuth20Credentials: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskChatCredentials: %v", v)
}
}
-type SourceZendeskChatAuthorizationMethodOAuth20 struct {
+type SourceZendeskChatOAuth20 struct {
// Access Token for making authenticated requests.
AccessToken *string `json:"access_token,omitempty"`
// The Client ID of your OAuth application
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of your OAuth application.
- ClientSecret *string `json:"client_secret,omitempty"`
- Credentials SourceZendeskChatAuthorizationMethodOAuth20Credentials `json:"credentials"`
+ ClientSecret *string `json:"client_secret,omitempty"`
+ credentials SourceZendeskChatCredentials `const:"oauth2.0" json:"credentials"`
// Refresh Token to obtain new Access Token, when it's expired.
RefreshToken *string `json:"refresh_token,omitempty"`
}
+func (s SourceZendeskChatOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskChatOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskChatOAuth20) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceZendeskChatOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceZendeskChatOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceZendeskChatOAuth20) GetCredentials() SourceZendeskChatCredentials {
+ return SourceZendeskChatCredentialsOauth20
+}
+
+func (o *SourceZendeskChatOAuth20) GetRefreshToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RefreshToken
+}
+
type SourceZendeskChatAuthorizationMethodType string
const (
- SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodOAuth20 SourceZendeskChatAuthorizationMethodType = "source-zendesk-chat_Authorization Method_OAuth2.0"
- SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodAccessToken SourceZendeskChatAuthorizationMethodType = "source-zendesk-chat_Authorization Method_Access Token"
+ SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatOAuth20 SourceZendeskChatAuthorizationMethodType = "source-zendesk-chat_OAuth2.0"
+ SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAccessToken SourceZendeskChatAuthorizationMethodType = "source-zendesk-chat_Access Token"
)
type SourceZendeskChatAuthorizationMethod struct {
- SourceZendeskChatAuthorizationMethodOAuth20 *SourceZendeskChatAuthorizationMethodOAuth20
- SourceZendeskChatAuthorizationMethodAccessToken *SourceZendeskChatAuthorizationMethodAccessToken
+ SourceZendeskChatOAuth20 *SourceZendeskChatOAuth20
+ SourceZendeskChatAccessToken *SourceZendeskChatAccessToken
Type SourceZendeskChatAuthorizationMethodType
}
-func CreateSourceZendeskChatAuthorizationMethodSourceZendeskChatAuthorizationMethodOAuth20(sourceZendeskChatAuthorizationMethodOAuth20 SourceZendeskChatAuthorizationMethodOAuth20) SourceZendeskChatAuthorizationMethod {
- typ := SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodOAuth20
+func CreateSourceZendeskChatAuthorizationMethodSourceZendeskChatOAuth20(sourceZendeskChatOAuth20 SourceZendeskChatOAuth20) SourceZendeskChatAuthorizationMethod {
+ typ := SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatOAuth20
return SourceZendeskChatAuthorizationMethod{
- SourceZendeskChatAuthorizationMethodOAuth20: &sourceZendeskChatAuthorizationMethodOAuth20,
- Type: typ,
+ SourceZendeskChatOAuth20: &sourceZendeskChatOAuth20,
+ Type: typ,
}
}
-func CreateSourceZendeskChatAuthorizationMethodSourceZendeskChatAuthorizationMethodAccessToken(sourceZendeskChatAuthorizationMethodAccessToken SourceZendeskChatAuthorizationMethodAccessToken) SourceZendeskChatAuthorizationMethod {
- typ := SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodAccessToken
+func CreateSourceZendeskChatAuthorizationMethodSourceZendeskChatAccessToken(sourceZendeskChatAccessToken SourceZendeskChatAccessToken) SourceZendeskChatAuthorizationMethod {
+ typ := SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAccessToken
return SourceZendeskChatAuthorizationMethod{
- SourceZendeskChatAuthorizationMethodAccessToken: &sourceZendeskChatAuthorizationMethodAccessToken,
- Type: typ,
+ SourceZendeskChatAccessToken: &sourceZendeskChatAccessToken,
+ Type: typ,
}
}
func (u *SourceZendeskChatAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceZendeskChatAuthorizationMethodAccessToken := new(SourceZendeskChatAuthorizationMethodAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskChatAuthorizationMethodAccessToken); err == nil {
- u.SourceZendeskChatAuthorizationMethodAccessToken = sourceZendeskChatAuthorizationMethodAccessToken
- u.Type = SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodAccessToken
+
+ sourceZendeskChatAccessToken := new(SourceZendeskChatAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskChatAccessToken, "", true, true); err == nil {
+ u.SourceZendeskChatAccessToken = sourceZendeskChatAccessToken
+ u.Type = SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAccessToken
return nil
}
- sourceZendeskChatAuthorizationMethodOAuth20 := new(SourceZendeskChatAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskChatAuthorizationMethodOAuth20); err == nil {
- u.SourceZendeskChatAuthorizationMethodOAuth20 = sourceZendeskChatAuthorizationMethodOAuth20
- u.Type = SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodOAuth20
+ sourceZendeskChatOAuth20 := new(SourceZendeskChatOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskChatOAuth20, "", true, true); err == nil {
+ u.SourceZendeskChatOAuth20 = sourceZendeskChatOAuth20
+ u.Type = SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatOAuth20
return nil
}
@@ -133,46 +193,82 @@ func (u *SourceZendeskChatAuthorizationMethod) UnmarshalJSON(data []byte) error
}
func (u SourceZendeskChatAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceZendeskChatAuthorizationMethodAccessToken != nil {
- return json.Marshal(u.SourceZendeskChatAuthorizationMethodAccessToken)
+ if u.SourceZendeskChatOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceZendeskChatOAuth20, "", true)
}
- if u.SourceZendeskChatAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceZendeskChatAuthorizationMethodOAuth20)
+ if u.SourceZendeskChatAccessToken != nil {
+ return utils.MarshalJSON(u.SourceZendeskChatAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceZendeskChatZendeskChat string
+type ZendeskChat string
const (
- SourceZendeskChatZendeskChatZendeskChat SourceZendeskChatZendeskChat = "zendesk-chat"
+ ZendeskChatZendeskChat ZendeskChat = "zendesk-chat"
)
-func (e SourceZendeskChatZendeskChat) ToPointer() *SourceZendeskChatZendeskChat {
+func (e ZendeskChat) ToPointer() *ZendeskChat {
return &e
}
-func (e *SourceZendeskChatZendeskChat) UnmarshalJSON(data []byte) error {
+func (e *ZendeskChat) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zendesk-chat":
- *e = SourceZendeskChatZendeskChat(v)
+ *e = ZendeskChat(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskChatZendeskChat: %v", v)
+ return fmt.Errorf("invalid value for ZendeskChat: %v", v)
}
}
type SourceZendeskChat struct {
Credentials *SourceZendeskChatAuthorizationMethod `json:"credentials,omitempty"`
- SourceType SourceZendeskChatZendeskChat `json:"sourceType"`
+ sourceType ZendeskChat `const:"zendesk-chat" json:"sourceType"`
// The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z.
StartDate time.Time `json:"start_date"`
// Required if you access Zendesk Chat from a Zendesk Support subdomain.
- Subdomain *string `json:"subdomain,omitempty"`
+ Subdomain *string `default:"" json:"subdomain"`
+}
+
+func (s SourceZendeskChat) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskChat) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskChat) GetCredentials() *SourceZendeskChatAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceZendeskChat) GetSourceType() ZendeskChat {
+ return ZendeskChatZendeskChat
+}
+
+func (o *SourceZendeskChat) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceZendeskChat) GetSubdomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Subdomain
}
diff --git a/internal/sdk/pkg/models/shared/sourcezendeskchatcreaterequest.go b/internal/sdk/pkg/models/shared/sourcezendeskchatcreaterequest.go
old mode 100755
new mode 100644
index 02ba08e69..531e00bc2
--- a/internal/sdk/pkg/models/shared/sourcezendeskchatcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezendeskchatcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceZendeskChatCreateRequest struct {
Configuration SourceZendeskChat `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZendeskChatCreateRequest) GetConfiguration() SourceZendeskChat {
+ if o == nil {
+ return SourceZendeskChat{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskChatCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceZendeskChatCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskChatCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceZendeskChatCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendeskchatputrequest.go b/internal/sdk/pkg/models/shared/sourcezendeskchatputrequest.go
old mode 100755
new mode 100644
index 75dfdffbd..8e8ddcc1d
--- a/internal/sdk/pkg/models/shared/sourcezendeskchatputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezendeskchatputrequest.go
@@ -7,3 +7,24 @@ type SourceZendeskChatPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZendeskChatPutRequest) GetConfiguration() SourceZendeskChatUpdate {
+ if o == nil {
+ return SourceZendeskChatUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskChatPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskChatPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go b/internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go
old mode 100755
new mode 100644
index 0aadd5f86..81bff5bcb
--- a/internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go
@@ -3,129 +3,189 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentials string
+type SourceZendeskChatUpdateSchemasCredentials string
const (
- SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentialsAccessToken SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentials = "access_token"
+ SourceZendeskChatUpdateSchemasCredentialsAccessToken SourceZendeskChatUpdateSchemasCredentials = "access_token"
)
-func (e SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentials) ToPointer() *SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentials {
+func (e SourceZendeskChatUpdateSchemasCredentials) ToPointer() *SourceZendeskChatUpdateSchemasCredentials {
return &e
}
-func (e *SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentials) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskChatUpdateSchemasCredentials) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "access_token":
- *e = SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentials(v)
+ *e = SourceZendeskChatUpdateSchemasCredentials(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentials: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskChatUpdateSchemasCredentials: %v", v)
}
}
-type SourceZendeskChatUpdateAuthorizationMethodAccessToken struct {
+type SourceZendeskChatUpdateAccessToken struct {
// The Access Token to make authenticated requests.
- AccessToken string `json:"access_token"`
- Credentials SourceZendeskChatUpdateAuthorizationMethodAccessTokenCredentials `json:"credentials"`
+ AccessToken string `json:"access_token"`
+ credentials SourceZendeskChatUpdateSchemasCredentials `const:"access_token" json:"credentials"`
}
-type SourceZendeskChatUpdateAuthorizationMethodOAuth20Credentials string
+func (s SourceZendeskChatUpdateAccessToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskChatUpdateAccessToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskChatUpdateAccessToken) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceZendeskChatUpdateAccessToken) GetCredentials() SourceZendeskChatUpdateSchemasCredentials {
+ return SourceZendeskChatUpdateSchemasCredentialsAccessToken
+}
+
+type SourceZendeskChatUpdateCredentials string
const (
- SourceZendeskChatUpdateAuthorizationMethodOAuth20CredentialsOauth20 SourceZendeskChatUpdateAuthorizationMethodOAuth20Credentials = "oauth2.0"
+ SourceZendeskChatUpdateCredentialsOauth20 SourceZendeskChatUpdateCredentials = "oauth2.0"
)
-func (e SourceZendeskChatUpdateAuthorizationMethodOAuth20Credentials) ToPointer() *SourceZendeskChatUpdateAuthorizationMethodOAuth20Credentials {
+func (e SourceZendeskChatUpdateCredentials) ToPointer() *SourceZendeskChatUpdateCredentials {
return &e
}
-func (e *SourceZendeskChatUpdateAuthorizationMethodOAuth20Credentials) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskChatUpdateCredentials) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceZendeskChatUpdateAuthorizationMethodOAuth20Credentials(v)
+ *e = SourceZendeskChatUpdateCredentials(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskChatUpdateAuthorizationMethodOAuth20Credentials: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskChatUpdateCredentials: %v", v)
}
}
-type SourceZendeskChatUpdateAuthorizationMethodOAuth20 struct {
+type SourceZendeskChatUpdateOAuth20 struct {
// Access Token for making authenticated requests.
AccessToken *string `json:"access_token,omitempty"`
// The Client ID of your OAuth application
ClientID *string `json:"client_id,omitempty"`
// The Client Secret of your OAuth application.
- ClientSecret *string `json:"client_secret,omitempty"`
- Credentials SourceZendeskChatUpdateAuthorizationMethodOAuth20Credentials `json:"credentials"`
+ ClientSecret *string `json:"client_secret,omitempty"`
+ credentials SourceZendeskChatUpdateCredentials `const:"oauth2.0" json:"credentials"`
// Refresh Token to obtain new Access Token, when it's expired.
RefreshToken *string `json:"refresh_token,omitempty"`
}
+func (s SourceZendeskChatUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskChatUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskChatUpdateOAuth20) GetAccessToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.AccessToken
+}
+
+func (o *SourceZendeskChatUpdateOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientID
+}
+
+func (o *SourceZendeskChatUpdateOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceZendeskChatUpdateOAuth20) GetCredentials() SourceZendeskChatUpdateCredentials {
+ return SourceZendeskChatUpdateCredentialsOauth20
+}
+
+func (o *SourceZendeskChatUpdateOAuth20) GetRefreshToken() *string {
+ if o == nil {
+ return nil
+ }
+ return o.RefreshToken
+}
+
type SourceZendeskChatUpdateAuthorizationMethodType string
const (
- SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodOAuth20 SourceZendeskChatUpdateAuthorizationMethodType = "source-zendesk-chat-update_Authorization Method_OAuth2.0"
- SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodAccessToken SourceZendeskChatUpdateAuthorizationMethodType = "source-zendesk-chat-update_Authorization Method_Access Token"
+ SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateOAuth20 SourceZendeskChatUpdateAuthorizationMethodType = "source-zendesk-chat-update_OAuth2.0"
+ SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAccessToken SourceZendeskChatUpdateAuthorizationMethodType = "source-zendesk-chat-update_Access Token"
)
type SourceZendeskChatUpdateAuthorizationMethod struct {
- SourceZendeskChatUpdateAuthorizationMethodOAuth20 *SourceZendeskChatUpdateAuthorizationMethodOAuth20
- SourceZendeskChatUpdateAuthorizationMethodAccessToken *SourceZendeskChatUpdateAuthorizationMethodAccessToken
+ SourceZendeskChatUpdateOAuth20 *SourceZendeskChatUpdateOAuth20
+ SourceZendeskChatUpdateAccessToken *SourceZendeskChatUpdateAccessToken
Type SourceZendeskChatUpdateAuthorizationMethodType
}
-func CreateSourceZendeskChatUpdateAuthorizationMethodSourceZendeskChatUpdateAuthorizationMethodOAuth20(sourceZendeskChatUpdateAuthorizationMethodOAuth20 SourceZendeskChatUpdateAuthorizationMethodOAuth20) SourceZendeskChatUpdateAuthorizationMethod {
- typ := SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodOAuth20
+func CreateSourceZendeskChatUpdateAuthorizationMethodSourceZendeskChatUpdateOAuth20(sourceZendeskChatUpdateOAuth20 SourceZendeskChatUpdateOAuth20) SourceZendeskChatUpdateAuthorizationMethod {
+ typ := SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateOAuth20
return SourceZendeskChatUpdateAuthorizationMethod{
- SourceZendeskChatUpdateAuthorizationMethodOAuth20: &sourceZendeskChatUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceZendeskChatUpdateOAuth20: &sourceZendeskChatUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceZendeskChatUpdateAuthorizationMethodSourceZendeskChatUpdateAuthorizationMethodAccessToken(sourceZendeskChatUpdateAuthorizationMethodAccessToken SourceZendeskChatUpdateAuthorizationMethodAccessToken) SourceZendeskChatUpdateAuthorizationMethod {
- typ := SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodAccessToken
+func CreateSourceZendeskChatUpdateAuthorizationMethodSourceZendeskChatUpdateAccessToken(sourceZendeskChatUpdateAccessToken SourceZendeskChatUpdateAccessToken) SourceZendeskChatUpdateAuthorizationMethod {
+ typ := SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAccessToken
return SourceZendeskChatUpdateAuthorizationMethod{
- SourceZendeskChatUpdateAuthorizationMethodAccessToken: &sourceZendeskChatUpdateAuthorizationMethodAccessToken,
- Type: typ,
+ SourceZendeskChatUpdateAccessToken: &sourceZendeskChatUpdateAccessToken,
+ Type: typ,
}
}
func (u *SourceZendeskChatUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceZendeskChatUpdateAuthorizationMethodAccessToken := new(SourceZendeskChatUpdateAuthorizationMethodAccessToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskChatUpdateAuthorizationMethodAccessToken); err == nil {
- u.SourceZendeskChatUpdateAuthorizationMethodAccessToken = sourceZendeskChatUpdateAuthorizationMethodAccessToken
- u.Type = SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodAccessToken
+
+ sourceZendeskChatUpdateAccessToken := new(SourceZendeskChatUpdateAccessToken)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskChatUpdateAccessToken, "", true, true); err == nil {
+ u.SourceZendeskChatUpdateAccessToken = sourceZendeskChatUpdateAccessToken
+ u.Type = SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAccessToken
return nil
}
- sourceZendeskChatUpdateAuthorizationMethodOAuth20 := new(SourceZendeskChatUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskChatUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceZendeskChatUpdateAuthorizationMethodOAuth20 = sourceZendeskChatUpdateAuthorizationMethodOAuth20
- u.Type = SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodOAuth20
+ sourceZendeskChatUpdateOAuth20 := new(SourceZendeskChatUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskChatUpdateOAuth20, "", true, true); err == nil {
+ u.SourceZendeskChatUpdateOAuth20 = sourceZendeskChatUpdateOAuth20
+ u.Type = SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateOAuth20
return nil
}
@@ -133,15 +193,15 @@ func (u *SourceZendeskChatUpdateAuthorizationMethod) UnmarshalJSON(data []byte)
}
func (u SourceZendeskChatUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceZendeskChatUpdateAuthorizationMethodAccessToken != nil {
- return json.Marshal(u.SourceZendeskChatUpdateAuthorizationMethodAccessToken)
+ if u.SourceZendeskChatUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceZendeskChatUpdateOAuth20, "", true)
}
- if u.SourceZendeskChatUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceZendeskChatUpdateAuthorizationMethodOAuth20)
+ if u.SourceZendeskChatUpdateAccessToken != nil {
+ return utils.MarshalJSON(u.SourceZendeskChatUpdateAccessToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceZendeskChatUpdate struct {
@@ -149,5 +209,37 @@ type SourceZendeskChatUpdate struct {
// The date from which you'd like to replicate data for Zendesk Chat API, in the format YYYY-MM-DDT00:00:00Z.
StartDate time.Time `json:"start_date"`
// Required if you access Zendesk Chat from a Zendesk Support subdomain.
- Subdomain *string `json:"subdomain,omitempty"`
+ Subdomain *string `default:"" json:"subdomain"`
+}
+
+func (s SourceZendeskChatUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskChatUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskChatUpdate) GetCredentials() *SourceZendeskChatUpdateAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceZendeskChatUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceZendeskChatUpdate) GetSubdomain() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Subdomain
}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksell.go b/internal/sdk/pkg/models/shared/sourcezendesksell.go
new file mode 100644
index 000000000..ffd5b3dec
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcezendesksell.go
@@ -0,0 +1,61 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+import (
+ "encoding/json"
+ "fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
+type ZendeskSell string
+
+const (
+ ZendeskSellZendeskSell ZendeskSell = "zendesk-sell"
+)
+
+func (e ZendeskSell) ToPointer() *ZendeskSell {
+ return &e
+}
+
+func (e *ZendeskSell) UnmarshalJSON(data []byte) error {
+ var v string
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ switch v {
+ case "zendesk-sell":
+ *e = ZendeskSell(v)
+ return nil
+ default:
+ return fmt.Errorf("invalid value for ZendeskSell: %v", v)
+ }
+}
+
+type SourceZendeskSell struct {
+ // The API token for authenticating to Zendesk Sell
+ APIToken string `json:"api_token"`
+ sourceType ZendeskSell `const:"zendesk-sell" json:"sourceType"`
+}
+
+func (s SourceZendeskSell) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskSell) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskSell) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceZendeskSell) GetSourceType() ZendeskSell {
+ return ZendeskSellZendeskSell
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksellcreaterequest.go b/internal/sdk/pkg/models/shared/sourcezendesksellcreaterequest.go
new file mode 100644
index 000000000..721c0191a
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcezendesksellcreaterequest.go
@@ -0,0 +1,49 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceZendeskSellCreateRequest struct {
+ Configuration SourceZendeskSell `json:"configuration"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
+ // Optional secretID obtained through the public API OAuth redirect flow.
+ SecretID *string `json:"secretId,omitempty"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceZendeskSellCreateRequest) GetConfiguration() SourceZendeskSell {
+ if o == nil {
+ return SourceZendeskSell{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskSellCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceZendeskSellCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskSellCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceZendeskSellCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksellputrequest.go b/internal/sdk/pkg/models/shared/sourcezendesksellputrequest.go
new file mode 100644
index 000000000..f92da9c36
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcezendesksellputrequest.go
@@ -0,0 +1,30 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceZendeskSellPutRequest struct {
+ Configuration SourceZendeskSellUpdate `json:"configuration"`
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (o *SourceZendeskSellPutRequest) GetConfiguration() SourceZendeskSellUpdate {
+ if o == nil {
+ return SourceZendeskSellUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskSellPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskSellPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksellupdate.go b/internal/sdk/pkg/models/shared/sourcezendesksellupdate.go
new file mode 100644
index 000000000..a3585241a
--- /dev/null
+++ b/internal/sdk/pkg/models/shared/sourcezendesksellupdate.go
@@ -0,0 +1,15 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package shared
+
+type SourceZendeskSellUpdate struct {
+ // The API token for authenticating to Zendesk Sell
+ APIToken string `json:"api_token"`
+}
+
+func (o *SourceZendeskSellUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksunshine.go b/internal/sdk/pkg/models/shared/sourcezendesksunshine.go
old mode 100755
new mode 100644
index 5a06d7458..258e987b4
--- a/internal/sdk/pkg/models/shared/sourcezendesksunshine.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesksunshine.go
@@ -3,129 +3,189 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod string
+type SourceZendeskSunshineSchemasAuthMethod string
const (
- SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethodAPIToken SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod = "api_token"
+ SourceZendeskSunshineSchemasAuthMethodAPIToken SourceZendeskSunshineSchemasAuthMethod = "api_token"
)
-func (e SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod) ToPointer() *SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod {
+func (e SourceZendeskSunshineSchemasAuthMethod) ToPointer() *SourceZendeskSunshineSchemasAuthMethod {
return &e
}
-func (e *SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskSunshineSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod(v)
+ *e = SourceZendeskSunshineSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskSunshineSchemasAuthMethod: %v", v)
}
}
-type SourceZendeskSunshineAuthorizationMethodAPIToken struct {
+type SourceZendeskSunshineAPIToken struct {
// API Token. See the docs for information on how to generate this key.
- APIToken string `json:"api_token"`
- AuthMethod SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod `json:"auth_method"`
+ APIToken string `json:"api_token"`
+ authMethod *SourceZendeskSunshineSchemasAuthMethod `const:"api_token" json:"auth_method"`
// The user email for your Zendesk account
Email string `json:"email"`
}
-type SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod string
+func (s SourceZendeskSunshineAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskSunshineAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskSunshineAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceZendeskSunshineAPIToken) GetAuthMethod() *SourceZendeskSunshineSchemasAuthMethod {
+ return SourceZendeskSunshineSchemasAuthMethodAPIToken.ToPointer()
+}
+
+func (o *SourceZendeskSunshineAPIToken) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+type SourceZendeskSunshineAuthMethod string
const (
- SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethodOauth20 SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod = "oauth2.0"
+ SourceZendeskSunshineAuthMethodOauth20 SourceZendeskSunshineAuthMethod = "oauth2.0"
)
-func (e SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod {
+func (e SourceZendeskSunshineAuthMethod) ToPointer() *SourceZendeskSunshineAuthMethod {
return &e
}
-func (e *SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskSunshineAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod(v)
+ *e = SourceZendeskSunshineAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskSunshineAuthMethod: %v", v)
}
}
-type SourceZendeskSunshineAuthorizationMethodOAuth20 struct {
+type SourceZendeskSunshineOAuth20 struct {
// Long-term access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthMethod SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod `json:"auth_method"`
+ AccessToken string `json:"access_token"`
+ authMethod *SourceZendeskSunshineAuthMethod `const:"oauth2.0" json:"auth_method"`
// The Client ID of your OAuth application.
ClientID string `json:"client_id"`
// The Client Secret of your OAuth application.
ClientSecret string `json:"client_secret"`
}
+func (s SourceZendeskSunshineOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskSunshineOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskSunshineOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceZendeskSunshineOAuth20) GetAuthMethod() *SourceZendeskSunshineAuthMethod {
+ return SourceZendeskSunshineAuthMethodOauth20.ToPointer()
+}
+
+func (o *SourceZendeskSunshineOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceZendeskSunshineOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
type SourceZendeskSunshineAuthorizationMethodType string
const (
- SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodOAuth20 SourceZendeskSunshineAuthorizationMethodType = "source-zendesk-sunshine_Authorization Method_OAuth2.0"
- SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodAPIToken SourceZendeskSunshineAuthorizationMethodType = "source-zendesk-sunshine_Authorization Method_API Token"
+ SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineOAuth20 SourceZendeskSunshineAuthorizationMethodType = "source-zendesk-sunshine_OAuth2.0"
+ SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAPIToken SourceZendeskSunshineAuthorizationMethodType = "source-zendesk-sunshine_API Token"
)
type SourceZendeskSunshineAuthorizationMethod struct {
- SourceZendeskSunshineAuthorizationMethodOAuth20 *SourceZendeskSunshineAuthorizationMethodOAuth20
- SourceZendeskSunshineAuthorizationMethodAPIToken *SourceZendeskSunshineAuthorizationMethodAPIToken
+ SourceZendeskSunshineOAuth20 *SourceZendeskSunshineOAuth20
+ SourceZendeskSunshineAPIToken *SourceZendeskSunshineAPIToken
Type SourceZendeskSunshineAuthorizationMethodType
}
-func CreateSourceZendeskSunshineAuthorizationMethodSourceZendeskSunshineAuthorizationMethodOAuth20(sourceZendeskSunshineAuthorizationMethodOAuth20 SourceZendeskSunshineAuthorizationMethodOAuth20) SourceZendeskSunshineAuthorizationMethod {
- typ := SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodOAuth20
+func CreateSourceZendeskSunshineAuthorizationMethodSourceZendeskSunshineOAuth20(sourceZendeskSunshineOAuth20 SourceZendeskSunshineOAuth20) SourceZendeskSunshineAuthorizationMethod {
+ typ := SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineOAuth20
return SourceZendeskSunshineAuthorizationMethod{
- SourceZendeskSunshineAuthorizationMethodOAuth20: &sourceZendeskSunshineAuthorizationMethodOAuth20,
- Type: typ,
+ SourceZendeskSunshineOAuth20: &sourceZendeskSunshineOAuth20,
+ Type: typ,
}
}
-func CreateSourceZendeskSunshineAuthorizationMethodSourceZendeskSunshineAuthorizationMethodAPIToken(sourceZendeskSunshineAuthorizationMethodAPIToken SourceZendeskSunshineAuthorizationMethodAPIToken) SourceZendeskSunshineAuthorizationMethod {
- typ := SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodAPIToken
+func CreateSourceZendeskSunshineAuthorizationMethodSourceZendeskSunshineAPIToken(sourceZendeskSunshineAPIToken SourceZendeskSunshineAPIToken) SourceZendeskSunshineAuthorizationMethod {
+ typ := SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAPIToken
return SourceZendeskSunshineAuthorizationMethod{
- SourceZendeskSunshineAuthorizationMethodAPIToken: &sourceZendeskSunshineAuthorizationMethodAPIToken,
- Type: typ,
+ SourceZendeskSunshineAPIToken: &sourceZendeskSunshineAPIToken,
+ Type: typ,
}
}
func (u *SourceZendeskSunshineAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceZendeskSunshineAuthorizationMethodAPIToken := new(SourceZendeskSunshineAuthorizationMethodAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskSunshineAuthorizationMethodAPIToken); err == nil {
- u.SourceZendeskSunshineAuthorizationMethodAPIToken = sourceZendeskSunshineAuthorizationMethodAPIToken
- u.Type = SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodAPIToken
+
+ sourceZendeskSunshineAPIToken := new(SourceZendeskSunshineAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskSunshineAPIToken, "", true, true); err == nil {
+ u.SourceZendeskSunshineAPIToken = sourceZendeskSunshineAPIToken
+ u.Type = SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAPIToken
return nil
}
- sourceZendeskSunshineAuthorizationMethodOAuth20 := new(SourceZendeskSunshineAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskSunshineAuthorizationMethodOAuth20); err == nil {
- u.SourceZendeskSunshineAuthorizationMethodOAuth20 = sourceZendeskSunshineAuthorizationMethodOAuth20
- u.Type = SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodOAuth20
+ sourceZendeskSunshineOAuth20 := new(SourceZendeskSunshineOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskSunshineOAuth20, "", true, true); err == nil {
+ u.SourceZendeskSunshineOAuth20 = sourceZendeskSunshineOAuth20
+ u.Type = SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineOAuth20
return nil
}
@@ -133,46 +193,82 @@ func (u *SourceZendeskSunshineAuthorizationMethod) UnmarshalJSON(data []byte) er
}
func (u SourceZendeskSunshineAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceZendeskSunshineAuthorizationMethodAPIToken != nil {
- return json.Marshal(u.SourceZendeskSunshineAuthorizationMethodAPIToken)
+ if u.SourceZendeskSunshineOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceZendeskSunshineOAuth20, "", true)
}
- if u.SourceZendeskSunshineAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceZendeskSunshineAuthorizationMethodOAuth20)
+ if u.SourceZendeskSunshineAPIToken != nil {
+ return utils.MarshalJSON(u.SourceZendeskSunshineAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceZendeskSunshineZendeskSunshine string
+type ZendeskSunshine string
const (
- SourceZendeskSunshineZendeskSunshineZendeskSunshine SourceZendeskSunshineZendeskSunshine = "zendesk-sunshine"
+ ZendeskSunshineZendeskSunshine ZendeskSunshine = "zendesk-sunshine"
)
-func (e SourceZendeskSunshineZendeskSunshine) ToPointer() *SourceZendeskSunshineZendeskSunshine {
+func (e ZendeskSunshine) ToPointer() *ZendeskSunshine {
return &e
}
-func (e *SourceZendeskSunshineZendeskSunshine) UnmarshalJSON(data []byte) error {
+func (e *ZendeskSunshine) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zendesk-sunshine":
- *e = SourceZendeskSunshineZendeskSunshine(v)
+ *e = ZendeskSunshine(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSunshineZendeskSunshine: %v", v)
+ return fmt.Errorf("invalid value for ZendeskSunshine: %v", v)
}
}
type SourceZendeskSunshine struct {
Credentials *SourceZendeskSunshineAuthorizationMethod `json:"credentials,omitempty"`
- SourceType SourceZendeskSunshineZendeskSunshine `json:"sourceType"`
+ sourceType ZendeskSunshine `const:"zendesk-sunshine" json:"sourceType"`
// The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z.
StartDate time.Time `json:"start_date"`
// The subdomain for your Zendesk Account.
Subdomain string `json:"subdomain"`
}
+
+func (s SourceZendeskSunshine) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskSunshine) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskSunshine) GetCredentials() *SourceZendeskSunshineAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceZendeskSunshine) GetSourceType() ZendeskSunshine {
+ return ZendeskSunshineZendeskSunshine
+}
+
+func (o *SourceZendeskSunshine) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceZendeskSunshine) GetSubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Subdomain
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksunshinecreaterequest.go b/internal/sdk/pkg/models/shared/sourcezendesksunshinecreaterequest.go
old mode 100755
new mode 100644
index 4c095f044..ce89b1890
--- a/internal/sdk/pkg/models/shared/sourcezendesksunshinecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesksunshinecreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceZendeskSunshineCreateRequest struct {
Configuration SourceZendeskSunshine `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZendeskSunshineCreateRequest) GetConfiguration() SourceZendeskSunshine {
+ if o == nil {
+ return SourceZendeskSunshine{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskSunshineCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceZendeskSunshineCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskSunshineCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceZendeskSunshineCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksunshineputrequest.go b/internal/sdk/pkg/models/shared/sourcezendesksunshineputrequest.go
old mode 100755
new mode 100644
index 37e32c281..198d83ee6
--- a/internal/sdk/pkg/models/shared/sourcezendesksunshineputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesksunshineputrequest.go
@@ -7,3 +7,24 @@ type SourceZendeskSunshinePutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZendeskSunshinePutRequest) GetConfiguration() SourceZendeskSunshineUpdate {
+ if o == nil {
+ return SourceZendeskSunshineUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskSunshinePutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskSunshinePutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go b/internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go
old mode 100755
new mode 100644
index 1866a08d2..f6c8d28cb
--- a/internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go
@@ -3,129 +3,189 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod string
+type SourceZendeskSunshineUpdateSchemasAuthMethod string
const (
- SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethodAPIToken SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod = "api_token"
+ SourceZendeskSunshineUpdateSchemasAuthMethodAPIToken SourceZendeskSunshineUpdateSchemasAuthMethod = "api_token"
)
-func (e SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod) ToPointer() *SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod {
+func (e SourceZendeskSunshineUpdateSchemasAuthMethod) ToPointer() *SourceZendeskSunshineUpdateSchemasAuthMethod {
return &e
}
-func (e *SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskSunshineUpdateSchemasAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod(v)
+ *e = SourceZendeskSunshineUpdateSchemasAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskSunshineUpdateSchemasAuthMethod: %v", v)
}
}
-type SourceZendeskSunshineUpdateAuthorizationMethodAPIToken struct {
+type SourceZendeskSunshineUpdateAPIToken struct {
// API Token. See the docs for information on how to generate this key.
- APIToken string `json:"api_token"`
- AuthMethod SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod `json:"auth_method"`
+ APIToken string `json:"api_token"`
+ authMethod *SourceZendeskSunshineUpdateSchemasAuthMethod `const:"api_token" json:"auth_method"`
// The user email for your Zendesk account
Email string `json:"email"`
}
-type SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod string
+func (s SourceZendeskSunshineUpdateAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskSunshineUpdateAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskSunshineUpdateAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceZendeskSunshineUpdateAPIToken) GetAuthMethod() *SourceZendeskSunshineUpdateSchemasAuthMethod {
+ return SourceZendeskSunshineUpdateSchemasAuthMethodAPIToken.ToPointer()
+}
+
+func (o *SourceZendeskSunshineUpdateAPIToken) GetEmail() string {
+ if o == nil {
+ return ""
+ }
+ return o.Email
+}
+
+type SourceZendeskSunshineUpdateAuthMethod string
const (
- SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethodOauth20 SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod = "oauth2.0"
+ SourceZendeskSunshineUpdateAuthMethodOauth20 SourceZendeskSunshineUpdateAuthMethod = "oauth2.0"
)
-func (e SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod {
+func (e SourceZendeskSunshineUpdateAuthMethod) ToPointer() *SourceZendeskSunshineUpdateAuthMethod {
return &e
}
-func (e *SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskSunshineUpdateAuthMethod) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod(v)
+ *e = SourceZendeskSunshineUpdateAuthMethod(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskSunshineUpdateAuthMethod: %v", v)
}
}
-type SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 struct {
+type SourceZendeskSunshineUpdateOAuth20 struct {
// Long-term access Token for making authenticated requests.
- AccessToken string `json:"access_token"`
- AuthMethod SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod `json:"auth_method"`
+ AccessToken string `json:"access_token"`
+ authMethod *SourceZendeskSunshineUpdateAuthMethod `const:"oauth2.0" json:"auth_method"`
// The Client ID of your OAuth application.
ClientID string `json:"client_id"`
// The Client Secret of your OAuth application.
ClientSecret string `json:"client_secret"`
}
+func (s SourceZendeskSunshineUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskSunshineUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskSunshineUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.AccessToken
+}
+
+func (o *SourceZendeskSunshineUpdateOAuth20) GetAuthMethod() *SourceZendeskSunshineUpdateAuthMethod {
+ return SourceZendeskSunshineUpdateAuthMethodOauth20.ToPointer()
+}
+
+func (o *SourceZendeskSunshineUpdateOAuth20) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceZendeskSunshineUpdateOAuth20) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
type SourceZendeskSunshineUpdateAuthorizationMethodType string
const (
- SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodOAuth20 SourceZendeskSunshineUpdateAuthorizationMethodType = "source-zendesk-sunshine-update_Authorization Method_OAuth2.0"
- SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodAPIToken SourceZendeskSunshineUpdateAuthorizationMethodType = "source-zendesk-sunshine-update_Authorization Method_API Token"
+ SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateOAuth20 SourceZendeskSunshineUpdateAuthorizationMethodType = "source-zendesk-sunshine-update_OAuth2.0"
+ SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAPIToken SourceZendeskSunshineUpdateAuthorizationMethodType = "source-zendesk-sunshine-update_API Token"
)
type SourceZendeskSunshineUpdateAuthorizationMethod struct {
- SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 *SourceZendeskSunshineUpdateAuthorizationMethodOAuth20
- SourceZendeskSunshineUpdateAuthorizationMethodAPIToken *SourceZendeskSunshineUpdateAuthorizationMethodAPIToken
+ SourceZendeskSunshineUpdateOAuth20 *SourceZendeskSunshineUpdateOAuth20
+ SourceZendeskSunshineUpdateAPIToken *SourceZendeskSunshineUpdateAPIToken
Type SourceZendeskSunshineUpdateAuthorizationMethodType
}
-func CreateSourceZendeskSunshineUpdateAuthorizationMethodSourceZendeskSunshineUpdateAuthorizationMethodOAuth20(sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 SourceZendeskSunshineUpdateAuthorizationMethodOAuth20) SourceZendeskSunshineUpdateAuthorizationMethod {
- typ := SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodOAuth20
+func CreateSourceZendeskSunshineUpdateAuthorizationMethodSourceZendeskSunshineUpdateOAuth20(sourceZendeskSunshineUpdateOAuth20 SourceZendeskSunshineUpdateOAuth20) SourceZendeskSunshineUpdateAuthorizationMethod {
+ typ := SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateOAuth20
return SourceZendeskSunshineUpdateAuthorizationMethod{
- SourceZendeskSunshineUpdateAuthorizationMethodOAuth20: &sourceZendeskSunshineUpdateAuthorizationMethodOAuth20,
- Type: typ,
+ SourceZendeskSunshineUpdateOAuth20: &sourceZendeskSunshineUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceZendeskSunshineUpdateAuthorizationMethodSourceZendeskSunshineUpdateAuthorizationMethodAPIToken(sourceZendeskSunshineUpdateAuthorizationMethodAPIToken SourceZendeskSunshineUpdateAuthorizationMethodAPIToken) SourceZendeskSunshineUpdateAuthorizationMethod {
- typ := SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodAPIToken
+func CreateSourceZendeskSunshineUpdateAuthorizationMethodSourceZendeskSunshineUpdateAPIToken(sourceZendeskSunshineUpdateAPIToken SourceZendeskSunshineUpdateAPIToken) SourceZendeskSunshineUpdateAuthorizationMethod {
+ typ := SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAPIToken
return SourceZendeskSunshineUpdateAuthorizationMethod{
- SourceZendeskSunshineUpdateAuthorizationMethodAPIToken: &sourceZendeskSunshineUpdateAuthorizationMethodAPIToken,
- Type: typ,
+ SourceZendeskSunshineUpdateAPIToken: &sourceZendeskSunshineUpdateAPIToken,
+ Type: typ,
}
}
func (u *SourceZendeskSunshineUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceZendeskSunshineUpdateAuthorizationMethodAPIToken := new(SourceZendeskSunshineUpdateAuthorizationMethodAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskSunshineUpdateAuthorizationMethodAPIToken); err == nil {
- u.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken = sourceZendeskSunshineUpdateAuthorizationMethodAPIToken
- u.Type = SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodAPIToken
+
+ sourceZendeskSunshineUpdateAPIToken := new(SourceZendeskSunshineUpdateAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskSunshineUpdateAPIToken, "", true, true); err == nil {
+ u.SourceZendeskSunshineUpdateAPIToken = sourceZendeskSunshineUpdateAPIToken
+ u.Type = SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAPIToken
return nil
}
- sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 := new(SourceZendeskSunshineUpdateAuthorizationMethodOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskSunshineUpdateAuthorizationMethodOAuth20); err == nil {
- u.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 = sourceZendeskSunshineUpdateAuthorizationMethodOAuth20
- u.Type = SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodOAuth20
+ sourceZendeskSunshineUpdateOAuth20 := new(SourceZendeskSunshineUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskSunshineUpdateOAuth20, "", true, true); err == nil {
+ u.SourceZendeskSunshineUpdateOAuth20 = sourceZendeskSunshineUpdateOAuth20
+ u.Type = SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateOAuth20
return nil
}
@@ -133,15 +193,15 @@ func (u *SourceZendeskSunshineUpdateAuthorizationMethod) UnmarshalJSON(data []by
}
func (u SourceZendeskSunshineUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) {
- if u.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken != nil {
- return json.Marshal(u.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken)
+ if u.SourceZendeskSunshineUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceZendeskSunshineUpdateOAuth20, "", true)
}
- if u.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 != nil {
- return json.Marshal(u.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20)
+ if u.SourceZendeskSunshineUpdateAPIToken != nil {
+ return utils.MarshalJSON(u.SourceZendeskSunshineUpdateAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceZendeskSunshineUpdate struct {
@@ -151,3 +211,35 @@ type SourceZendeskSunshineUpdate struct {
// The subdomain for your Zendesk Account.
Subdomain string `json:"subdomain"`
}
+
+func (s SourceZendeskSunshineUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskSunshineUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskSunshineUpdate) GetCredentials() *SourceZendeskSunshineUpdateAuthorizationMethod {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceZendeskSunshineUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceZendeskSunshineUpdate) GetSubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Subdomain
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksupport.go b/internal/sdk/pkg/models/shared/sourcezendesksupport.go
old mode 100755
new mode 100644
index e6712241c..a66e429e7
--- a/internal/sdk/pkg/models/shared/sourcezendesksupport.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesksupport.go
@@ -3,228 +3,207 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceZendeskSupportAuthenticationAPITokenCredentials string
+type SourceZendeskSupportSchemasCredentials string
const (
- SourceZendeskSupportAuthenticationAPITokenCredentialsAPIToken SourceZendeskSupportAuthenticationAPITokenCredentials = "api_token"
+ SourceZendeskSupportSchemasCredentialsAPIToken SourceZendeskSupportSchemasCredentials = "api_token"
)
-func (e SourceZendeskSupportAuthenticationAPITokenCredentials) ToPointer() *SourceZendeskSupportAuthenticationAPITokenCredentials {
+func (e SourceZendeskSupportSchemasCredentials) ToPointer() *SourceZendeskSupportSchemasCredentials {
return &e
}
-func (e *SourceZendeskSupportAuthenticationAPITokenCredentials) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskSupportSchemasCredentials) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceZendeskSupportAuthenticationAPITokenCredentials(v)
+ *e = SourceZendeskSupportSchemasCredentials(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSupportAuthenticationAPITokenCredentials: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskSupportSchemasCredentials: %v", v)
}
}
-// SourceZendeskSupportAuthenticationAPIToken - Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
-type SourceZendeskSupportAuthenticationAPIToken struct {
+// SourceZendeskSupportAPIToken - Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
+type SourceZendeskSupportAPIToken struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The value of the API token generated. See our full documentation for more information on generating this token.
- APIToken string `json:"api_token"`
- Credentials *SourceZendeskSupportAuthenticationAPITokenCredentials `json:"credentials,omitempty"`
+ APIToken string `json:"api_token"`
+ credentials *SourceZendeskSupportSchemasCredentials `const:"api_token" json:"credentials,omitempty"`
// The user email for your Zendesk account.
Email string `json:"email"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceZendeskSupportAuthenticationAPIToken SourceZendeskSupportAuthenticationAPIToken
-
-func (c *SourceZendeskSupportAuthenticationAPIToken) UnmarshalJSON(bs []byte) error {
- data := _SourceZendeskSupportAuthenticationAPIToken{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceZendeskSupportAuthenticationAPIToken(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceZendeskSupportAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceZendeskSupportAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "api_token")
- delete(additionalFields, "credentials")
- delete(additionalFields, "email")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceZendeskSupportAuthenticationAPIToken) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceZendeskSupportAuthenticationAPIToken(c))
- if err != nil {
- return nil, err
+func (o *SourceZendeskSupportAPIToken) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskSupportAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
}
+ return o.APIToken
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
+func (o *SourceZendeskSupportAPIToken) GetCredentials() *SourceZendeskSupportSchemasCredentials {
+ return SourceZendeskSupportSchemasCredentialsAPIToken.ToPointer()
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskSupportAPIToken) GetEmail() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.Email
}
-type SourceZendeskSupportAuthenticationOAuth20Credentials string
+type SourceZendeskSupportCredentials string
const (
- SourceZendeskSupportAuthenticationOAuth20CredentialsOauth20 SourceZendeskSupportAuthenticationOAuth20Credentials = "oauth2.0"
+ SourceZendeskSupportCredentialsOauth20 SourceZendeskSupportCredentials = "oauth2.0"
)
-func (e SourceZendeskSupportAuthenticationOAuth20Credentials) ToPointer() *SourceZendeskSupportAuthenticationOAuth20Credentials {
+func (e SourceZendeskSupportCredentials) ToPointer() *SourceZendeskSupportCredentials {
return &e
}
-func (e *SourceZendeskSupportAuthenticationOAuth20Credentials) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskSupportCredentials) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceZendeskSupportAuthenticationOAuth20Credentials(v)
+ *e = SourceZendeskSupportCredentials(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSupportAuthenticationOAuth20Credentials: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskSupportCredentials: %v", v)
}
}
-// SourceZendeskSupportAuthenticationOAuth20 - Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
-type SourceZendeskSupportAuthenticationOAuth20 struct {
+// SourceZendeskSupportOAuth20 - Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
+type SourceZendeskSupportOAuth20 struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The OAuth access token. See the Zendesk docs for more information on generating this token.
AccessToken string `json:"access_token"`
// The OAuth client's ID. See this guide for more information.
ClientID *string `json:"client_id,omitempty"`
// The OAuth client secret. See this guide for more information.
- ClientSecret *string `json:"client_secret,omitempty"`
- Credentials *SourceZendeskSupportAuthenticationOAuth20Credentials `json:"credentials,omitempty"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientSecret *string `json:"client_secret,omitempty"`
+ credentials *SourceZendeskSupportCredentials `const:"oauth2.0" json:"credentials,omitempty"`
}
-type _SourceZendeskSupportAuthenticationOAuth20 SourceZendeskSupportAuthenticationOAuth20
-
-func (c *SourceZendeskSupportAuthenticationOAuth20) UnmarshalJSON(bs []byte) error {
- data := _SourceZendeskSupportAuthenticationOAuth20{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceZendeskSupportAuthenticationOAuth20(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceZendeskSupportOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceZendeskSupportOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "access_token")
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
- delete(additionalFields, "credentials")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceZendeskSupportAuthenticationOAuth20) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceZendeskSupportAuthenticationOAuth20(c))
- if err != nil {
- return nil, err
+func (o *SourceZendeskSupportOAuth20) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskSupportOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
}
+ return o.AccessToken
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceZendeskSupportOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientID
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskSupportOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientSecret
+}
- return json.Marshal(out)
+func (o *SourceZendeskSupportOAuth20) GetCredentials() *SourceZendeskSupportCredentials {
+ return SourceZendeskSupportCredentialsOauth20.ToPointer()
}
type SourceZendeskSupportAuthenticationType string
const (
- SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationOAuth20 SourceZendeskSupportAuthenticationType = "source-zendesk-support_Authentication_OAuth2.0"
- SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationAPIToken SourceZendeskSupportAuthenticationType = "source-zendesk-support_Authentication_API Token"
+ SourceZendeskSupportAuthenticationTypeSourceZendeskSupportOAuth20 SourceZendeskSupportAuthenticationType = "source-zendesk-support_OAuth2.0"
+ SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAPIToken SourceZendeskSupportAuthenticationType = "source-zendesk-support_API Token"
)
type SourceZendeskSupportAuthentication struct {
- SourceZendeskSupportAuthenticationOAuth20 *SourceZendeskSupportAuthenticationOAuth20
- SourceZendeskSupportAuthenticationAPIToken *SourceZendeskSupportAuthenticationAPIToken
+ SourceZendeskSupportOAuth20 *SourceZendeskSupportOAuth20
+ SourceZendeskSupportAPIToken *SourceZendeskSupportAPIToken
Type SourceZendeskSupportAuthenticationType
}
-func CreateSourceZendeskSupportAuthenticationSourceZendeskSupportAuthenticationOAuth20(sourceZendeskSupportAuthenticationOAuth20 SourceZendeskSupportAuthenticationOAuth20) SourceZendeskSupportAuthentication {
- typ := SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationOAuth20
+func CreateSourceZendeskSupportAuthenticationSourceZendeskSupportOAuth20(sourceZendeskSupportOAuth20 SourceZendeskSupportOAuth20) SourceZendeskSupportAuthentication {
+ typ := SourceZendeskSupportAuthenticationTypeSourceZendeskSupportOAuth20
return SourceZendeskSupportAuthentication{
- SourceZendeskSupportAuthenticationOAuth20: &sourceZendeskSupportAuthenticationOAuth20,
- Type: typ,
+ SourceZendeskSupportOAuth20: &sourceZendeskSupportOAuth20,
+ Type: typ,
}
}
-func CreateSourceZendeskSupportAuthenticationSourceZendeskSupportAuthenticationAPIToken(sourceZendeskSupportAuthenticationAPIToken SourceZendeskSupportAuthenticationAPIToken) SourceZendeskSupportAuthentication {
- typ := SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationAPIToken
+func CreateSourceZendeskSupportAuthenticationSourceZendeskSupportAPIToken(sourceZendeskSupportAPIToken SourceZendeskSupportAPIToken) SourceZendeskSupportAuthentication {
+ typ := SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAPIToken
return SourceZendeskSupportAuthentication{
- SourceZendeskSupportAuthenticationAPIToken: &sourceZendeskSupportAuthenticationAPIToken,
- Type: typ,
+ SourceZendeskSupportAPIToken: &sourceZendeskSupportAPIToken,
+ Type: typ,
}
}
func (u *SourceZendeskSupportAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceZendeskSupportAuthenticationAPIToken := new(SourceZendeskSupportAuthenticationAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskSupportAuthenticationAPIToken); err == nil {
- u.SourceZendeskSupportAuthenticationAPIToken = sourceZendeskSupportAuthenticationAPIToken
- u.Type = SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationAPIToken
+
+ sourceZendeskSupportAPIToken := new(SourceZendeskSupportAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskSupportAPIToken, "", true, true); err == nil {
+ u.SourceZendeskSupportAPIToken = sourceZendeskSupportAPIToken
+ u.Type = SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAPIToken
return nil
}
- sourceZendeskSupportAuthenticationOAuth20 := new(SourceZendeskSupportAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskSupportAuthenticationOAuth20); err == nil {
- u.SourceZendeskSupportAuthenticationOAuth20 = sourceZendeskSupportAuthenticationOAuth20
- u.Type = SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationOAuth20
+ sourceZendeskSupportOAuth20 := new(SourceZendeskSupportOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskSupportOAuth20, "", true, true); err == nil {
+ u.SourceZendeskSupportOAuth20 = sourceZendeskSupportOAuth20
+ u.Type = SourceZendeskSupportAuthenticationTypeSourceZendeskSupportOAuth20
return nil
}
@@ -232,38 +211,38 @@ func (u *SourceZendeskSupportAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceZendeskSupportAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceZendeskSupportAuthenticationAPIToken != nil {
- return json.Marshal(u.SourceZendeskSupportAuthenticationAPIToken)
+ if u.SourceZendeskSupportOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceZendeskSupportOAuth20, "", true)
}
- if u.SourceZendeskSupportAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceZendeskSupportAuthenticationOAuth20)
+ if u.SourceZendeskSupportAPIToken != nil {
+ return utils.MarshalJSON(u.SourceZendeskSupportAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceZendeskSupportZendeskSupport string
+type ZendeskSupport string
const (
- SourceZendeskSupportZendeskSupportZendeskSupport SourceZendeskSupportZendeskSupport = "zendesk-support"
+ ZendeskSupportZendeskSupport ZendeskSupport = "zendesk-support"
)
-func (e SourceZendeskSupportZendeskSupport) ToPointer() *SourceZendeskSupportZendeskSupport {
+func (e ZendeskSupport) ToPointer() *ZendeskSupport {
return &e
}
-func (e *SourceZendeskSupportZendeskSupport) UnmarshalJSON(data []byte) error {
+func (e *ZendeskSupport) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zendesk-support":
- *e = SourceZendeskSupportZendeskSupport(v)
+ *e = ZendeskSupport(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSupportZendeskSupport: %v", v)
+ return fmt.Errorf("invalid value for ZendeskSupport: %v", v)
}
}
@@ -271,10 +250,53 @@ type SourceZendeskSupport struct {
// Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
Credentials *SourceZendeskSupportAuthentication `json:"credentials,omitempty"`
// Makes each stream read a single page of data.
- IgnorePagination *bool `json:"ignore_pagination,omitempty"`
- SourceType SourceZendeskSupportZendeskSupport `json:"sourceType"`
+ IgnorePagination *bool `default:"false" json:"ignore_pagination"`
+ sourceType ZendeskSupport `const:"zendesk-support" json:"sourceType"`
// The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
// This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain.
Subdomain string `json:"subdomain"`
}
+
+func (s SourceZendeskSupport) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskSupport) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskSupport) GetCredentials() *SourceZendeskSupportAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceZendeskSupport) GetIgnorePagination() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IgnorePagination
+}
+
+func (o *SourceZendeskSupport) GetSourceType() ZendeskSupport {
+ return ZendeskSupportZendeskSupport
+}
+
+func (o *SourceZendeskSupport) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceZendeskSupport) GetSubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Subdomain
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksupportcreaterequest.go b/internal/sdk/pkg/models/shared/sourcezendesksupportcreaterequest.go
old mode 100755
new mode 100644
index 56e4857ae..3f4acf380
--- a/internal/sdk/pkg/models/shared/sourcezendesksupportcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesksupportcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceZendeskSupportCreateRequest struct {
Configuration SourceZendeskSupport `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZendeskSupportCreateRequest) GetConfiguration() SourceZendeskSupport {
+ if o == nil {
+ return SourceZendeskSupport{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskSupportCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceZendeskSupportCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskSupportCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceZendeskSupportCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksupportputrequest.go b/internal/sdk/pkg/models/shared/sourcezendesksupportputrequest.go
old mode 100755
new mode 100644
index c0962a378..9fe434312
--- a/internal/sdk/pkg/models/shared/sourcezendesksupportputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesksupportputrequest.go
@@ -7,3 +7,24 @@ type SourceZendeskSupportPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZendeskSupportPutRequest) GetConfiguration() SourceZendeskSupportUpdate {
+ if o == nil {
+ return SourceZendeskSupportUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskSupportPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskSupportPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesksupportupdate.go b/internal/sdk/pkg/models/shared/sourcezendesksupportupdate.go
old mode 100755
new mode 100644
index 0651cb035..1e29497c6
--- a/internal/sdk/pkg/models/shared/sourcezendesksupportupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesksupportupdate.go
@@ -3,228 +3,207 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceZendeskSupportUpdateAuthenticationAPITokenCredentials string
+type SourceZendeskSupportUpdateSchemasCredentials string
const (
- SourceZendeskSupportUpdateAuthenticationAPITokenCredentialsAPIToken SourceZendeskSupportUpdateAuthenticationAPITokenCredentials = "api_token"
+ SourceZendeskSupportUpdateSchemasCredentialsAPIToken SourceZendeskSupportUpdateSchemasCredentials = "api_token"
)
-func (e SourceZendeskSupportUpdateAuthenticationAPITokenCredentials) ToPointer() *SourceZendeskSupportUpdateAuthenticationAPITokenCredentials {
+func (e SourceZendeskSupportUpdateSchemasCredentials) ToPointer() *SourceZendeskSupportUpdateSchemasCredentials {
return &e
}
-func (e *SourceZendeskSupportUpdateAuthenticationAPITokenCredentials) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskSupportUpdateSchemasCredentials) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceZendeskSupportUpdateAuthenticationAPITokenCredentials(v)
+ *e = SourceZendeskSupportUpdateSchemasCredentials(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSupportUpdateAuthenticationAPITokenCredentials: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskSupportUpdateSchemasCredentials: %v", v)
}
}
-// SourceZendeskSupportUpdateAuthenticationAPIToken - Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
-type SourceZendeskSupportUpdateAuthenticationAPIToken struct {
+// SourceZendeskSupportUpdateAPIToken - Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
+type SourceZendeskSupportUpdateAPIToken struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The value of the API token generated. See our full documentation for more information on generating this token.
- APIToken string `json:"api_token"`
- Credentials *SourceZendeskSupportUpdateAuthenticationAPITokenCredentials `json:"credentials,omitempty"`
+ APIToken string `json:"api_token"`
+ credentials *SourceZendeskSupportUpdateSchemasCredentials `const:"api_token" json:"credentials,omitempty"`
// The user email for your Zendesk account.
Email string `json:"email"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceZendeskSupportUpdateAuthenticationAPIToken SourceZendeskSupportUpdateAuthenticationAPIToken
-
-func (c *SourceZendeskSupportUpdateAuthenticationAPIToken) UnmarshalJSON(bs []byte) error {
- data := _SourceZendeskSupportUpdateAuthenticationAPIToken{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceZendeskSupportUpdateAuthenticationAPIToken(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceZendeskSupportUpdateAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceZendeskSupportUpdateAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "api_token")
- delete(additionalFields, "credentials")
- delete(additionalFields, "email")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceZendeskSupportUpdateAuthenticationAPIToken) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceZendeskSupportUpdateAuthenticationAPIToken(c))
- if err != nil {
- return nil, err
+func (o *SourceZendeskSupportUpdateAPIToken) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskSupportUpdateAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
}
+ return o.APIToken
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
+func (o *SourceZendeskSupportUpdateAPIToken) GetCredentials() *SourceZendeskSupportUpdateSchemasCredentials {
+ return SourceZendeskSupportUpdateSchemasCredentialsAPIToken.ToPointer()
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskSupportUpdateAPIToken) GetEmail() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.Email
}
-type SourceZendeskSupportUpdateAuthenticationOAuth20Credentials string
+type SourceZendeskSupportUpdateCredentials string
const (
- SourceZendeskSupportUpdateAuthenticationOAuth20CredentialsOauth20 SourceZendeskSupportUpdateAuthenticationOAuth20Credentials = "oauth2.0"
+ SourceZendeskSupportUpdateCredentialsOauth20 SourceZendeskSupportUpdateCredentials = "oauth2.0"
)
-func (e SourceZendeskSupportUpdateAuthenticationOAuth20Credentials) ToPointer() *SourceZendeskSupportUpdateAuthenticationOAuth20Credentials {
+func (e SourceZendeskSupportUpdateCredentials) ToPointer() *SourceZendeskSupportUpdateCredentials {
return &e
}
-func (e *SourceZendeskSupportUpdateAuthenticationOAuth20Credentials) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskSupportUpdateCredentials) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceZendeskSupportUpdateAuthenticationOAuth20Credentials(v)
+ *e = SourceZendeskSupportUpdateCredentials(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskSupportUpdateAuthenticationOAuth20Credentials: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskSupportUpdateCredentials: %v", v)
}
}
-// SourceZendeskSupportUpdateAuthenticationOAuth20 - Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
-type SourceZendeskSupportUpdateAuthenticationOAuth20 struct {
+// SourceZendeskSupportUpdateOAuth20 - Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
+type SourceZendeskSupportUpdateOAuth20 struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The OAuth access token. See the Zendesk docs for more information on generating this token.
AccessToken string `json:"access_token"`
// The OAuth client's ID. See this guide for more information.
ClientID *string `json:"client_id,omitempty"`
// The OAuth client secret. See this guide for more information.
- ClientSecret *string `json:"client_secret,omitempty"`
- Credentials *SourceZendeskSupportUpdateAuthenticationOAuth20Credentials `json:"credentials,omitempty"`
-
- AdditionalProperties interface{} `json:"-"`
+ ClientSecret *string `json:"client_secret,omitempty"`
+ credentials *SourceZendeskSupportUpdateCredentials `const:"oauth2.0" json:"credentials,omitempty"`
}
-type _SourceZendeskSupportUpdateAuthenticationOAuth20 SourceZendeskSupportUpdateAuthenticationOAuth20
-func (c *SourceZendeskSupportUpdateAuthenticationOAuth20) UnmarshalJSON(bs []byte) error {
- data := _SourceZendeskSupportUpdateAuthenticationOAuth20{}
-
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceZendeskSupportUpdateAuthenticationOAuth20(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceZendeskSupportUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceZendeskSupportUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "access_token")
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
- delete(additionalFields, "credentials")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceZendeskSupportUpdateAuthenticationOAuth20) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceZendeskSupportUpdateAuthenticationOAuth20(c))
- if err != nil {
- return nil, err
+func (o *SourceZendeskSupportUpdateOAuth20) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskSupportUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
}
+ return o.AccessToken
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
+func (o *SourceZendeskSupportUpdateOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientID
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskSupportUpdateOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientSecret
+}
- return json.Marshal(out)
+func (o *SourceZendeskSupportUpdateOAuth20) GetCredentials() *SourceZendeskSupportUpdateCredentials {
+ return SourceZendeskSupportUpdateCredentialsOauth20.ToPointer()
}
type SourceZendeskSupportUpdateAuthenticationType string
const (
- SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationOAuth20 SourceZendeskSupportUpdateAuthenticationType = "source-zendesk-support-update_Authentication_OAuth2.0"
- SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationAPIToken SourceZendeskSupportUpdateAuthenticationType = "source-zendesk-support-update_Authentication_API Token"
+ SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateOAuth20 SourceZendeskSupportUpdateAuthenticationType = "source-zendesk-support-update_OAuth2.0"
+ SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAPIToken SourceZendeskSupportUpdateAuthenticationType = "source-zendesk-support-update_API Token"
)
type SourceZendeskSupportUpdateAuthentication struct {
- SourceZendeskSupportUpdateAuthenticationOAuth20 *SourceZendeskSupportUpdateAuthenticationOAuth20
- SourceZendeskSupportUpdateAuthenticationAPIToken *SourceZendeskSupportUpdateAuthenticationAPIToken
+ SourceZendeskSupportUpdateOAuth20 *SourceZendeskSupportUpdateOAuth20
+ SourceZendeskSupportUpdateAPIToken *SourceZendeskSupportUpdateAPIToken
Type SourceZendeskSupportUpdateAuthenticationType
}
-func CreateSourceZendeskSupportUpdateAuthenticationSourceZendeskSupportUpdateAuthenticationOAuth20(sourceZendeskSupportUpdateAuthenticationOAuth20 SourceZendeskSupportUpdateAuthenticationOAuth20) SourceZendeskSupportUpdateAuthentication {
- typ := SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationOAuth20
+func CreateSourceZendeskSupportUpdateAuthenticationSourceZendeskSupportUpdateOAuth20(sourceZendeskSupportUpdateOAuth20 SourceZendeskSupportUpdateOAuth20) SourceZendeskSupportUpdateAuthentication {
+ typ := SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateOAuth20
return SourceZendeskSupportUpdateAuthentication{
- SourceZendeskSupportUpdateAuthenticationOAuth20: &sourceZendeskSupportUpdateAuthenticationOAuth20,
- Type: typ,
+ SourceZendeskSupportUpdateOAuth20: &sourceZendeskSupportUpdateOAuth20,
+ Type: typ,
}
}
-func CreateSourceZendeskSupportUpdateAuthenticationSourceZendeskSupportUpdateAuthenticationAPIToken(sourceZendeskSupportUpdateAuthenticationAPIToken SourceZendeskSupportUpdateAuthenticationAPIToken) SourceZendeskSupportUpdateAuthentication {
- typ := SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationAPIToken
+func CreateSourceZendeskSupportUpdateAuthenticationSourceZendeskSupportUpdateAPIToken(sourceZendeskSupportUpdateAPIToken SourceZendeskSupportUpdateAPIToken) SourceZendeskSupportUpdateAuthentication {
+ typ := SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAPIToken
return SourceZendeskSupportUpdateAuthentication{
- SourceZendeskSupportUpdateAuthenticationAPIToken: &sourceZendeskSupportUpdateAuthenticationAPIToken,
- Type: typ,
+ SourceZendeskSupportUpdateAPIToken: &sourceZendeskSupportUpdateAPIToken,
+ Type: typ,
}
}
func (u *SourceZendeskSupportUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceZendeskSupportUpdateAuthenticationAPIToken := new(SourceZendeskSupportUpdateAuthenticationAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskSupportUpdateAuthenticationAPIToken); err == nil {
- u.SourceZendeskSupportUpdateAuthenticationAPIToken = sourceZendeskSupportUpdateAuthenticationAPIToken
- u.Type = SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationAPIToken
+
+ sourceZendeskSupportUpdateAPIToken := new(SourceZendeskSupportUpdateAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskSupportUpdateAPIToken, "", true, true); err == nil {
+ u.SourceZendeskSupportUpdateAPIToken = sourceZendeskSupportUpdateAPIToken
+ u.Type = SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAPIToken
return nil
}
- sourceZendeskSupportUpdateAuthenticationOAuth20 := new(SourceZendeskSupportUpdateAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskSupportUpdateAuthenticationOAuth20); err == nil {
- u.SourceZendeskSupportUpdateAuthenticationOAuth20 = sourceZendeskSupportUpdateAuthenticationOAuth20
- u.Type = SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationOAuth20
+ sourceZendeskSupportUpdateOAuth20 := new(SourceZendeskSupportUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskSupportUpdateOAuth20, "", true, true); err == nil {
+ u.SourceZendeskSupportUpdateOAuth20 = sourceZendeskSupportUpdateOAuth20
+ u.Type = SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateOAuth20
return nil
}
@@ -232,24 +211,63 @@ func (u *SourceZendeskSupportUpdateAuthentication) UnmarshalJSON(data []byte) er
}
func (u SourceZendeskSupportUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceZendeskSupportUpdateAuthenticationAPIToken != nil {
- return json.Marshal(u.SourceZendeskSupportUpdateAuthenticationAPIToken)
+ if u.SourceZendeskSupportUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceZendeskSupportUpdateOAuth20, "", true)
}
- if u.SourceZendeskSupportUpdateAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceZendeskSupportUpdateAuthenticationOAuth20)
+ if u.SourceZendeskSupportUpdateAPIToken != nil {
+ return utils.MarshalJSON(u.SourceZendeskSupportUpdateAPIToken, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceZendeskSupportUpdate struct {
// Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.
Credentials *SourceZendeskSupportUpdateAuthentication `json:"credentials,omitempty"`
// Makes each stream read a single page of data.
- IgnorePagination *bool `json:"ignore_pagination,omitempty"`
+ IgnorePagination *bool `default:"false" json:"ignore_pagination"`
// The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate *time.Time `json:"start_date,omitempty"`
// This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain.
Subdomain string `json:"subdomain"`
}
+
+func (s SourceZendeskSupportUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskSupportUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskSupportUpdate) GetCredentials() *SourceZendeskSupportUpdateAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceZendeskSupportUpdate) GetIgnorePagination() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.IgnorePagination
+}
+
+func (o *SourceZendeskSupportUpdate) GetStartDate() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDate
+}
+
+func (o *SourceZendeskSupportUpdate) GetSubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Subdomain
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesktalk.go b/internal/sdk/pkg/models/shared/sourcezendesktalk.go
old mode 100755
new mode 100644
index 149275ce1..2789030df
--- a/internal/sdk/pkg/models/shared/sourcezendesktalk.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesktalk.go
@@ -3,228 +3,207 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceZendeskTalkAuthenticationOAuth20AuthType string
+type SourceZendeskTalkSchemasAuthType string
const (
- SourceZendeskTalkAuthenticationOAuth20AuthTypeOauth20 SourceZendeskTalkAuthenticationOAuth20AuthType = "oauth2.0"
+ SourceZendeskTalkSchemasAuthTypeOauth20 SourceZendeskTalkSchemasAuthType = "oauth2.0"
)
-func (e SourceZendeskTalkAuthenticationOAuth20AuthType) ToPointer() *SourceZendeskTalkAuthenticationOAuth20AuthType {
+func (e SourceZendeskTalkSchemasAuthType) ToPointer() *SourceZendeskTalkSchemasAuthType {
return &e
}
-func (e *SourceZendeskTalkAuthenticationOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskTalkSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceZendeskTalkAuthenticationOAuth20AuthType(v)
+ *e = SourceZendeskTalkSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskTalkAuthenticationOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskTalkSchemasAuthType: %v", v)
}
}
-// SourceZendeskTalkAuthenticationOAuth20 - Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
-type SourceZendeskTalkAuthenticationOAuth20 struct {
+// SourceZendeskTalkOAuth20 - Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
+type SourceZendeskTalkOAuth20 struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The value of the API token generated. See the docs for more information.
- AccessToken string `json:"access_token"`
- AuthType *SourceZendeskTalkAuthenticationOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceZendeskTalkSchemasAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// Client ID
ClientID *string `json:"client_id,omitempty"`
// Client Secret
ClientSecret *string `json:"client_secret,omitempty"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceZendeskTalkAuthenticationOAuth20 SourceZendeskTalkAuthenticationOAuth20
-
-func (c *SourceZendeskTalkAuthenticationOAuth20) UnmarshalJSON(bs []byte) error {
- data := _SourceZendeskTalkAuthenticationOAuth20{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceZendeskTalkAuthenticationOAuth20(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceZendeskTalkOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceZendeskTalkOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "access_token")
- delete(additionalFields, "auth_type")
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceZendeskTalkAuthenticationOAuth20) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceZendeskTalkAuthenticationOAuth20(c))
- if err != nil {
- return nil, err
+func (o *SourceZendeskTalkOAuth20) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskTalkOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
}
+ return o.AccessToken
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
+func (o *SourceZendeskTalkOAuth20) GetAuthType() *SourceZendeskTalkSchemasAuthType {
+ return SourceZendeskTalkSchemasAuthTypeOauth20.ToPointer()
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskTalkOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientID
+}
- return json.Marshal(out)
+func (o *SourceZendeskTalkOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
}
-type SourceZendeskTalkAuthenticationAPITokenAuthType string
+type SourceZendeskTalkAuthType string
const (
- SourceZendeskTalkAuthenticationAPITokenAuthTypeAPIToken SourceZendeskTalkAuthenticationAPITokenAuthType = "api_token"
+ SourceZendeskTalkAuthTypeAPIToken SourceZendeskTalkAuthType = "api_token"
)
-func (e SourceZendeskTalkAuthenticationAPITokenAuthType) ToPointer() *SourceZendeskTalkAuthenticationAPITokenAuthType {
+func (e SourceZendeskTalkAuthType) ToPointer() *SourceZendeskTalkAuthType {
return &e
}
-func (e *SourceZendeskTalkAuthenticationAPITokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskTalkAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceZendeskTalkAuthenticationAPITokenAuthType(v)
+ *e = SourceZendeskTalkAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskTalkAuthenticationAPITokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskTalkAuthType: %v", v)
}
}
-// SourceZendeskTalkAuthenticationAPIToken - Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
-type SourceZendeskTalkAuthenticationAPIToken struct {
+// SourceZendeskTalkAPIToken - Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
+type SourceZendeskTalkAPIToken struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The value of the API token generated. See the docs for more information.
- APIToken string `json:"api_token"`
- AuthType *SourceZendeskTalkAuthenticationAPITokenAuthType `json:"auth_type,omitempty"`
+ APIToken string `json:"api_token"`
+ authType *SourceZendeskTalkAuthType `const:"api_token" json:"auth_type,omitempty"`
// The user email for your Zendesk account.
Email string `json:"email"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceZendeskTalkAuthenticationAPIToken SourceZendeskTalkAuthenticationAPIToken
-
-func (c *SourceZendeskTalkAuthenticationAPIToken) UnmarshalJSON(bs []byte) error {
- data := _SourceZendeskTalkAuthenticationAPIToken{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceZendeskTalkAuthenticationAPIToken(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceZendeskTalkAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceZendeskTalkAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "api_token")
- delete(additionalFields, "auth_type")
- delete(additionalFields, "email")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceZendeskTalkAuthenticationAPIToken) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceZendeskTalkAuthenticationAPIToken(c))
- if err != nil {
- return nil, err
+func (o *SourceZendeskTalkAPIToken) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskTalkAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
}
+ return o.APIToken
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
+func (o *SourceZendeskTalkAPIToken) GetAuthType() *SourceZendeskTalkAuthType {
+ return SourceZendeskTalkAuthTypeAPIToken.ToPointer()
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskTalkAPIToken) GetEmail() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.Email
}
type SourceZendeskTalkAuthenticationType string
const (
- SourceZendeskTalkAuthenticationTypeSourceZendeskTalkAuthenticationAPIToken SourceZendeskTalkAuthenticationType = "source-zendesk-talk_Authentication_API Token"
- SourceZendeskTalkAuthenticationTypeSourceZendeskTalkAuthenticationOAuth20 SourceZendeskTalkAuthenticationType = "source-zendesk-talk_Authentication_OAuth2.0"
+ SourceZendeskTalkAuthenticationTypeSourceZendeskTalkAPIToken SourceZendeskTalkAuthenticationType = "source-zendesk-talk_API Token"
+ SourceZendeskTalkAuthenticationTypeSourceZendeskTalkOAuth20 SourceZendeskTalkAuthenticationType = "source-zendesk-talk_OAuth2.0"
)
type SourceZendeskTalkAuthentication struct {
- SourceZendeskTalkAuthenticationAPIToken *SourceZendeskTalkAuthenticationAPIToken
- SourceZendeskTalkAuthenticationOAuth20 *SourceZendeskTalkAuthenticationOAuth20
+ SourceZendeskTalkAPIToken *SourceZendeskTalkAPIToken
+ SourceZendeskTalkOAuth20 *SourceZendeskTalkOAuth20
Type SourceZendeskTalkAuthenticationType
}
-func CreateSourceZendeskTalkAuthenticationSourceZendeskTalkAuthenticationAPIToken(sourceZendeskTalkAuthenticationAPIToken SourceZendeskTalkAuthenticationAPIToken) SourceZendeskTalkAuthentication {
- typ := SourceZendeskTalkAuthenticationTypeSourceZendeskTalkAuthenticationAPIToken
+func CreateSourceZendeskTalkAuthenticationSourceZendeskTalkAPIToken(sourceZendeskTalkAPIToken SourceZendeskTalkAPIToken) SourceZendeskTalkAuthentication {
+ typ := SourceZendeskTalkAuthenticationTypeSourceZendeskTalkAPIToken
return SourceZendeskTalkAuthentication{
- SourceZendeskTalkAuthenticationAPIToken: &sourceZendeskTalkAuthenticationAPIToken,
- Type: typ,
+ SourceZendeskTalkAPIToken: &sourceZendeskTalkAPIToken,
+ Type: typ,
}
}
-func CreateSourceZendeskTalkAuthenticationSourceZendeskTalkAuthenticationOAuth20(sourceZendeskTalkAuthenticationOAuth20 SourceZendeskTalkAuthenticationOAuth20) SourceZendeskTalkAuthentication {
- typ := SourceZendeskTalkAuthenticationTypeSourceZendeskTalkAuthenticationOAuth20
+func CreateSourceZendeskTalkAuthenticationSourceZendeskTalkOAuth20(sourceZendeskTalkOAuth20 SourceZendeskTalkOAuth20) SourceZendeskTalkAuthentication {
+ typ := SourceZendeskTalkAuthenticationTypeSourceZendeskTalkOAuth20
return SourceZendeskTalkAuthentication{
- SourceZendeskTalkAuthenticationOAuth20: &sourceZendeskTalkAuthenticationOAuth20,
- Type: typ,
+ SourceZendeskTalkOAuth20: &sourceZendeskTalkOAuth20,
+ Type: typ,
}
}
func (u *SourceZendeskTalkAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceZendeskTalkAuthenticationAPIToken := new(SourceZendeskTalkAuthenticationAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskTalkAuthenticationAPIToken); err == nil {
- u.SourceZendeskTalkAuthenticationAPIToken = sourceZendeskTalkAuthenticationAPIToken
- u.Type = SourceZendeskTalkAuthenticationTypeSourceZendeskTalkAuthenticationAPIToken
+
+ sourceZendeskTalkAPIToken := new(SourceZendeskTalkAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskTalkAPIToken, "", true, true); err == nil {
+ u.SourceZendeskTalkAPIToken = sourceZendeskTalkAPIToken
+ u.Type = SourceZendeskTalkAuthenticationTypeSourceZendeskTalkAPIToken
return nil
}
- sourceZendeskTalkAuthenticationOAuth20 := new(SourceZendeskTalkAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskTalkAuthenticationOAuth20); err == nil {
- u.SourceZendeskTalkAuthenticationOAuth20 = sourceZendeskTalkAuthenticationOAuth20
- u.Type = SourceZendeskTalkAuthenticationTypeSourceZendeskTalkAuthenticationOAuth20
+ sourceZendeskTalkOAuth20 := new(SourceZendeskTalkOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskTalkOAuth20, "", true, true); err == nil {
+ u.SourceZendeskTalkOAuth20 = sourceZendeskTalkOAuth20
+ u.Type = SourceZendeskTalkAuthenticationTypeSourceZendeskTalkOAuth20
return nil
}
@@ -232,47 +211,83 @@ func (u *SourceZendeskTalkAuthentication) UnmarshalJSON(data []byte) error {
}
func (u SourceZendeskTalkAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceZendeskTalkAuthenticationAPIToken != nil {
- return json.Marshal(u.SourceZendeskTalkAuthenticationAPIToken)
+ if u.SourceZendeskTalkAPIToken != nil {
+ return utils.MarshalJSON(u.SourceZendeskTalkAPIToken, "", true)
}
- if u.SourceZendeskTalkAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceZendeskTalkAuthenticationOAuth20)
+ if u.SourceZendeskTalkOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceZendeskTalkOAuth20, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
-type SourceZendeskTalkZendeskTalk string
+type ZendeskTalk string
const (
- SourceZendeskTalkZendeskTalkZendeskTalk SourceZendeskTalkZendeskTalk = "zendesk-talk"
+ ZendeskTalkZendeskTalk ZendeskTalk = "zendesk-talk"
)
-func (e SourceZendeskTalkZendeskTalk) ToPointer() *SourceZendeskTalkZendeskTalk {
+func (e ZendeskTalk) ToPointer() *ZendeskTalk {
return &e
}
-func (e *SourceZendeskTalkZendeskTalk) UnmarshalJSON(data []byte) error {
+func (e *ZendeskTalk) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zendesk-talk":
- *e = SourceZendeskTalkZendeskTalk(v)
+ *e = ZendeskTalk(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskTalkZendeskTalk: %v", v)
+ return fmt.Errorf("invalid value for ZendeskTalk: %v", v)
}
}
type SourceZendeskTalk struct {
// Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
Credentials *SourceZendeskTalkAuthentication `json:"credentials,omitempty"`
- SourceType SourceZendeskTalkZendeskTalk `json:"sourceType"`
+ sourceType ZendeskTalk `const:"zendesk-talk" json:"sourceType"`
// The date from which you'd like to replicate data for Zendesk Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.
StartDate time.Time `json:"start_date"`
// This is your Zendesk subdomain that can be found in your account URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN is the value of your subdomain.
Subdomain string `json:"subdomain"`
}
+
+func (s SourceZendeskTalk) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskTalk) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskTalk) GetCredentials() *SourceZendeskTalkAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceZendeskTalk) GetSourceType() ZendeskTalk {
+ return ZendeskTalkZendeskTalk
+}
+
+func (o *SourceZendeskTalk) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceZendeskTalk) GetSubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Subdomain
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesktalkcreaterequest.go b/internal/sdk/pkg/models/shared/sourcezendesktalkcreaterequest.go
old mode 100755
new mode 100644
index 68278d9d4..5a9838a32
--- a/internal/sdk/pkg/models/shared/sourcezendesktalkcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesktalkcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceZendeskTalkCreateRequest struct {
Configuration SourceZendeskTalk `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZendeskTalkCreateRequest) GetConfiguration() SourceZendeskTalk {
+ if o == nil {
+ return SourceZendeskTalk{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskTalkCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceZendeskTalkCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskTalkCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceZendeskTalkCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesktalkputrequest.go b/internal/sdk/pkg/models/shared/sourcezendesktalkputrequest.go
old mode 100755
new mode 100644
index 83c370cd7..f9280b7c9
--- a/internal/sdk/pkg/models/shared/sourcezendesktalkputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesktalkputrequest.go
@@ -7,3 +7,24 @@ type SourceZendeskTalkPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZendeskTalkPutRequest) GetConfiguration() SourceZendeskTalkUpdate {
+ if o == nil {
+ return SourceZendeskTalkUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZendeskTalkPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZendeskTalkPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezendesktalkupdate.go b/internal/sdk/pkg/models/shared/sourcezendesktalkupdate.go
old mode 100755
new mode 100644
index 9a61c80b7..639068901
--- a/internal/sdk/pkg/models/shared/sourcezendesktalkupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcezendesktalkupdate.go
@@ -3,228 +3,207 @@
package shared
import (
- "bytes"
"encoding/json"
"errors"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-type SourceZendeskTalkUpdateAuthenticationOAuth20AuthType string
+type SourceZendeskTalkUpdateSchemasAuthType string
const (
- SourceZendeskTalkUpdateAuthenticationOAuth20AuthTypeOauth20 SourceZendeskTalkUpdateAuthenticationOAuth20AuthType = "oauth2.0"
+ SourceZendeskTalkUpdateSchemasAuthTypeOauth20 SourceZendeskTalkUpdateSchemasAuthType = "oauth2.0"
)
-func (e SourceZendeskTalkUpdateAuthenticationOAuth20AuthType) ToPointer() *SourceZendeskTalkUpdateAuthenticationOAuth20AuthType {
+func (e SourceZendeskTalkUpdateSchemasAuthType) ToPointer() *SourceZendeskTalkUpdateSchemasAuthType {
return &e
}
-func (e *SourceZendeskTalkUpdateAuthenticationOAuth20AuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskTalkUpdateSchemasAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "oauth2.0":
- *e = SourceZendeskTalkUpdateAuthenticationOAuth20AuthType(v)
+ *e = SourceZendeskTalkUpdateSchemasAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskTalkUpdateAuthenticationOAuth20AuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskTalkUpdateSchemasAuthType: %v", v)
}
}
-// SourceZendeskTalkUpdateAuthenticationOAuth20 - Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
-type SourceZendeskTalkUpdateAuthenticationOAuth20 struct {
+// SourceZendeskTalkUpdateOAuth20 - Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
+type SourceZendeskTalkUpdateOAuth20 struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The value of the API token generated. See the docs for more information.
- AccessToken string `json:"access_token"`
- AuthType *SourceZendeskTalkUpdateAuthenticationOAuth20AuthType `json:"auth_type,omitempty"`
+ AccessToken string `json:"access_token"`
+ authType *SourceZendeskTalkUpdateSchemasAuthType `const:"oauth2.0" json:"auth_type,omitempty"`
// Client ID
ClientID *string `json:"client_id,omitempty"`
// Client Secret
ClientSecret *string `json:"client_secret,omitempty"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceZendeskTalkUpdateAuthenticationOAuth20 SourceZendeskTalkUpdateAuthenticationOAuth20
-
-func (c *SourceZendeskTalkUpdateAuthenticationOAuth20) UnmarshalJSON(bs []byte) error {
- data := _SourceZendeskTalkUpdateAuthenticationOAuth20{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceZendeskTalkUpdateAuthenticationOAuth20(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceZendeskTalkUpdateOAuth20) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceZendeskTalkUpdateOAuth20) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "access_token")
- delete(additionalFields, "auth_type")
- delete(additionalFields, "client_id")
- delete(additionalFields, "client_secret")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceZendeskTalkUpdateAuthenticationOAuth20) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceZendeskTalkUpdateAuthenticationOAuth20(c))
- if err != nil {
- return nil, err
+func (o *SourceZendeskTalkUpdateOAuth20) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskTalkUpdateOAuth20) GetAccessToken() string {
+ if o == nil {
+ return ""
}
+ return o.AccessToken
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
+func (o *SourceZendeskTalkUpdateOAuth20) GetAuthType() *SourceZendeskTalkUpdateSchemasAuthType {
+ return SourceZendeskTalkUpdateSchemasAuthTypeOauth20.ToPointer()
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskTalkUpdateOAuth20) GetClientID() *string {
+ if o == nil {
+ return nil
}
+ return o.ClientID
+}
- return json.Marshal(out)
+func (o *SourceZendeskTalkUpdateOAuth20) GetClientSecret() *string {
+ if o == nil {
+ return nil
+ }
+ return o.ClientSecret
}
-type SourceZendeskTalkUpdateAuthenticationAPITokenAuthType string
+type SourceZendeskTalkUpdateAuthType string
const (
- SourceZendeskTalkUpdateAuthenticationAPITokenAuthTypeAPIToken SourceZendeskTalkUpdateAuthenticationAPITokenAuthType = "api_token"
+ SourceZendeskTalkUpdateAuthTypeAPIToken SourceZendeskTalkUpdateAuthType = "api_token"
)
-func (e SourceZendeskTalkUpdateAuthenticationAPITokenAuthType) ToPointer() *SourceZendeskTalkUpdateAuthenticationAPITokenAuthType {
+func (e SourceZendeskTalkUpdateAuthType) ToPointer() *SourceZendeskTalkUpdateAuthType {
return &e
}
-func (e *SourceZendeskTalkUpdateAuthenticationAPITokenAuthType) UnmarshalJSON(data []byte) error {
+func (e *SourceZendeskTalkUpdateAuthType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "api_token":
- *e = SourceZendeskTalkUpdateAuthenticationAPITokenAuthType(v)
+ *e = SourceZendeskTalkUpdateAuthType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZendeskTalkUpdateAuthenticationAPITokenAuthType: %v", v)
+ return fmt.Errorf("invalid value for SourceZendeskTalkUpdateAuthType: %v", v)
}
}
-// SourceZendeskTalkUpdateAuthenticationAPIToken - Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
-type SourceZendeskTalkUpdateAuthenticationAPIToken struct {
+// SourceZendeskTalkUpdateAPIToken - Zendesk service provides two authentication methods. Choose between: `OAuth2.0` or `API token`.
+type SourceZendeskTalkUpdateAPIToken struct {
+ AdditionalProperties interface{} `additionalProperties:"true" json:"-"`
// The value of the API token generated. See the docs for more information.
- APIToken string `json:"api_token"`
- AuthType *SourceZendeskTalkUpdateAuthenticationAPITokenAuthType `json:"auth_type,omitempty"`
+ APIToken string `json:"api_token"`
+ authType *SourceZendeskTalkUpdateAuthType `const:"api_token" json:"auth_type,omitempty"`
// The user email for your Zendesk account.
Email string `json:"email"`
-
- AdditionalProperties interface{} `json:"-"`
}
-type _SourceZendeskTalkUpdateAuthenticationAPIToken SourceZendeskTalkUpdateAuthenticationAPIToken
-
-func (c *SourceZendeskTalkUpdateAuthenticationAPIToken) UnmarshalJSON(bs []byte) error {
- data := _SourceZendeskTalkUpdateAuthenticationAPIToken{}
- if err := json.Unmarshal(bs, &data); err != nil {
- return err
- }
- *c = SourceZendeskTalkUpdateAuthenticationAPIToken(data)
-
- additionalFields := make(map[string]interface{})
+func (s SourceZendeskTalkUpdateAPIToken) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
- if err := json.Unmarshal(bs, &additionalFields); err != nil {
+func (s *SourceZendeskTalkUpdateAPIToken) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, true); err != nil {
return err
}
- delete(additionalFields, "api_token")
- delete(additionalFields, "auth_type")
- delete(additionalFields, "email")
-
- c.AdditionalProperties = additionalFields
-
return nil
}
-func (c SourceZendeskTalkUpdateAuthenticationAPIToken) MarshalJSON() ([]byte, error) {
- out := map[string]interface{}{}
- bs, err := json.Marshal(_SourceZendeskTalkUpdateAuthenticationAPIToken(c))
- if err != nil {
- return nil, err
+func (o *SourceZendeskTalkUpdateAPIToken) GetAdditionalProperties() interface{} {
+ if o == nil {
+ return nil
}
+ return o.AdditionalProperties
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskTalkUpdateAPIToken) GetAPIToken() string {
+ if o == nil {
+ return ""
}
+ return o.APIToken
+}
- bs, err = json.Marshal(c.AdditionalProperties)
- if err != nil {
- return nil, err
- }
+func (o *SourceZendeskTalkUpdateAPIToken) GetAuthType() *SourceZendeskTalkUpdateAuthType {
+ return SourceZendeskTalkUpdateAuthTypeAPIToken.ToPointer()
+}
- if err := json.Unmarshal([]byte(bs), &out); err != nil {
- return nil, err
+func (o *SourceZendeskTalkUpdateAPIToken) GetEmail() string {
+ if o == nil {
+ return ""
}
-
- return json.Marshal(out)
+ return o.Email
}
type SourceZendeskTalkUpdateAuthenticationType string
const (
- SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateAuthenticationAPIToken SourceZendeskTalkUpdateAuthenticationType = "source-zendesk-talk-update_Authentication_API Token"
- SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateAuthenticationOAuth20 SourceZendeskTalkUpdateAuthenticationType = "source-zendesk-talk-update_Authentication_OAuth2.0"
+ SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateAPIToken SourceZendeskTalkUpdateAuthenticationType = "source-zendesk-talk-update_API Token"
+ SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateOAuth20 SourceZendeskTalkUpdateAuthenticationType = "source-zendesk-talk-update_OAuth2.0"
)
type SourceZendeskTalkUpdateAuthentication struct {
- SourceZendeskTalkUpdateAuthenticationAPIToken *SourceZendeskTalkUpdateAuthenticationAPIToken
- SourceZendeskTalkUpdateAuthenticationOAuth20 *SourceZendeskTalkUpdateAuthenticationOAuth20
+ SourceZendeskTalkUpdateAPIToken *SourceZendeskTalkUpdateAPIToken
+ SourceZendeskTalkUpdateOAuth20 *SourceZendeskTalkUpdateOAuth20
Type SourceZendeskTalkUpdateAuthenticationType
}
-func CreateSourceZendeskTalkUpdateAuthenticationSourceZendeskTalkUpdateAuthenticationAPIToken(sourceZendeskTalkUpdateAuthenticationAPIToken SourceZendeskTalkUpdateAuthenticationAPIToken) SourceZendeskTalkUpdateAuthentication {
- typ := SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateAuthenticationAPIToken
+func CreateSourceZendeskTalkUpdateAuthenticationSourceZendeskTalkUpdateAPIToken(sourceZendeskTalkUpdateAPIToken SourceZendeskTalkUpdateAPIToken) SourceZendeskTalkUpdateAuthentication {
+ typ := SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateAPIToken
return SourceZendeskTalkUpdateAuthentication{
- SourceZendeskTalkUpdateAuthenticationAPIToken: &sourceZendeskTalkUpdateAuthenticationAPIToken,
- Type: typ,
+ SourceZendeskTalkUpdateAPIToken: &sourceZendeskTalkUpdateAPIToken,
+ Type: typ,
}
}
-func CreateSourceZendeskTalkUpdateAuthenticationSourceZendeskTalkUpdateAuthenticationOAuth20(sourceZendeskTalkUpdateAuthenticationOAuth20 SourceZendeskTalkUpdateAuthenticationOAuth20) SourceZendeskTalkUpdateAuthentication {
- typ := SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateAuthenticationOAuth20
+func CreateSourceZendeskTalkUpdateAuthenticationSourceZendeskTalkUpdateOAuth20(sourceZendeskTalkUpdateOAuth20 SourceZendeskTalkUpdateOAuth20) SourceZendeskTalkUpdateAuthentication {
+ typ := SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateOAuth20
return SourceZendeskTalkUpdateAuthentication{
- SourceZendeskTalkUpdateAuthenticationOAuth20: &sourceZendeskTalkUpdateAuthenticationOAuth20,
- Type: typ,
+ SourceZendeskTalkUpdateOAuth20: &sourceZendeskTalkUpdateOAuth20,
+ Type: typ,
}
}
func (u *SourceZendeskTalkUpdateAuthentication) UnmarshalJSON(data []byte) error {
- var d *json.Decoder
-
- sourceZendeskTalkUpdateAuthenticationAPIToken := new(SourceZendeskTalkUpdateAuthenticationAPIToken)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskTalkUpdateAuthenticationAPIToken); err == nil {
- u.SourceZendeskTalkUpdateAuthenticationAPIToken = sourceZendeskTalkUpdateAuthenticationAPIToken
- u.Type = SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateAuthenticationAPIToken
+
+ sourceZendeskTalkUpdateAPIToken := new(SourceZendeskTalkUpdateAPIToken)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskTalkUpdateAPIToken, "", true, true); err == nil {
+ u.SourceZendeskTalkUpdateAPIToken = sourceZendeskTalkUpdateAPIToken
+ u.Type = SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateAPIToken
return nil
}
- sourceZendeskTalkUpdateAuthenticationOAuth20 := new(SourceZendeskTalkUpdateAuthenticationOAuth20)
- d = json.NewDecoder(bytes.NewReader(data))
- d.DisallowUnknownFields()
- if err := d.Decode(&sourceZendeskTalkUpdateAuthenticationOAuth20); err == nil {
- u.SourceZendeskTalkUpdateAuthenticationOAuth20 = sourceZendeskTalkUpdateAuthenticationOAuth20
- u.Type = SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateAuthenticationOAuth20
+ sourceZendeskTalkUpdateOAuth20 := new(SourceZendeskTalkUpdateOAuth20)
+ if err := utils.UnmarshalJSON(data, &sourceZendeskTalkUpdateOAuth20, "", true, true); err == nil {
+ u.SourceZendeskTalkUpdateOAuth20 = sourceZendeskTalkUpdateOAuth20
+ u.Type = SourceZendeskTalkUpdateAuthenticationTypeSourceZendeskTalkUpdateOAuth20
return nil
}
@@ -232,15 +211,15 @@ func (u *SourceZendeskTalkUpdateAuthentication) UnmarshalJSON(data []byte) error
}
func (u SourceZendeskTalkUpdateAuthentication) MarshalJSON() ([]byte, error) {
- if u.SourceZendeskTalkUpdateAuthenticationAPIToken != nil {
- return json.Marshal(u.SourceZendeskTalkUpdateAuthenticationAPIToken)
+ if u.SourceZendeskTalkUpdateAPIToken != nil {
+ return utils.MarshalJSON(u.SourceZendeskTalkUpdateAPIToken, "", true)
}
- if u.SourceZendeskTalkUpdateAuthenticationOAuth20 != nil {
- return json.Marshal(u.SourceZendeskTalkUpdateAuthenticationOAuth20)
+ if u.SourceZendeskTalkUpdateOAuth20 != nil {
+ return utils.MarshalJSON(u.SourceZendeskTalkUpdateOAuth20, "", true)
}
- return nil, nil
+ return nil, errors.New("could not marshal union type: all fields are null")
}
type SourceZendeskTalkUpdate struct {
@@ -251,3 +230,35 @@ type SourceZendeskTalkUpdate struct {
// This is your Zendesk subdomain that can be found in your account URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN is the value of your subdomain.
Subdomain string `json:"subdomain"`
}
+
+func (s SourceZendeskTalkUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZendeskTalkUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZendeskTalkUpdate) GetCredentials() *SourceZendeskTalkUpdateAuthentication {
+ if o == nil {
+ return nil
+ }
+ return o.Credentials
+}
+
+func (o *SourceZendeskTalkUpdate) GetStartDate() time.Time {
+ if o == nil {
+ return time.Time{}
+ }
+ return o.StartDate
+}
+
+func (o *SourceZendeskTalkUpdate) GetSubdomain() string {
+ if o == nil {
+ return ""
+ }
+ return o.Subdomain
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezenloop.go b/internal/sdk/pkg/models/shared/sourcezenloop.go
old mode 100755
new mode 100644
index ca9d890a7..38e7f600a
--- a/internal/sdk/pkg/models/shared/sourcezenloop.go
+++ b/internal/sdk/pkg/models/shared/sourcezenloop.go
@@ -5,29 +5,30 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceZenloopZenloop string
+type Zenloop string
const (
- SourceZenloopZenloopZenloop SourceZenloopZenloop = "zenloop"
+ ZenloopZenloop Zenloop = "zenloop"
)
-func (e SourceZenloopZenloop) ToPointer() *SourceZenloopZenloop {
+func (e Zenloop) ToPointer() *Zenloop {
return &e
}
-func (e *SourceZenloopZenloop) UnmarshalJSON(data []byte) error {
+func (e *Zenloop) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zenloop":
- *e = SourceZenloopZenloop(v)
+ *e = Zenloop(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZenloopZenloop: %v", v)
+ return fmt.Errorf("invalid value for Zenloop: %v", v)
}
}
@@ -35,10 +36,53 @@ type SourceZenloop struct {
// Zenloop API Token. You can get the API token in settings page here
APIToken string `json:"api_token"`
// Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24. Leave empty if only data from current data should be synced
- DateFrom *string `json:"date_from,omitempty"`
- SourceType SourceZenloopZenloop `json:"sourceType"`
+ DateFrom *string `json:"date_from,omitempty"`
+ sourceType Zenloop `const:"zenloop" json:"sourceType"`
// Zenloop Survey Group ID. Can be found by pulling All Survey Groups via SurveyGroups stream. Leave empty to pull answers from all survey groups
SurveyGroupID *string `json:"survey_group_id,omitempty"`
// Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys
SurveyID *string `json:"survey_id,omitempty"`
}
+
+func (s SourceZenloop) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZenloop) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZenloop) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceZenloop) GetDateFrom() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DateFrom
+}
+
+func (o *SourceZenloop) GetSourceType() Zenloop {
+ return ZenloopZenloop
+}
+
+func (o *SourceZenloop) GetSurveyGroupID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyGroupID
+}
+
+func (o *SourceZenloop) GetSurveyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezenloopcreaterequest.go b/internal/sdk/pkg/models/shared/sourcezenloopcreaterequest.go
old mode 100755
new mode 100644
index a498a7f9f..3218e1b97
--- a/internal/sdk/pkg/models/shared/sourcezenloopcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezenloopcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceZenloopCreateRequest struct {
Configuration SourceZenloop `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZenloopCreateRequest) GetConfiguration() SourceZenloop {
+ if o == nil {
+ return SourceZenloop{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZenloopCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceZenloopCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZenloopCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceZenloopCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezenloopputrequest.go b/internal/sdk/pkg/models/shared/sourcezenloopputrequest.go
old mode 100755
new mode 100644
index 27d558cf3..b6e529f2e
--- a/internal/sdk/pkg/models/shared/sourcezenloopputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezenloopputrequest.go
@@ -7,3 +7,24 @@ type SourceZenloopPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZenloopPutRequest) GetConfiguration() SourceZenloopUpdate {
+ if o == nil {
+ return SourceZenloopUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZenloopPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZenloopPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezenloopupdate.go b/internal/sdk/pkg/models/shared/sourcezenloopupdate.go
old mode 100755
new mode 100644
index b7e780e0d..86c3fb736
--- a/internal/sdk/pkg/models/shared/sourcezenloopupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcezenloopupdate.go
@@ -12,3 +12,31 @@ type SourceZenloopUpdate struct {
// Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys
SurveyID *string `json:"survey_id,omitempty"`
}
+
+func (o *SourceZenloopUpdate) GetAPIToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.APIToken
+}
+
+func (o *SourceZenloopUpdate) GetDateFrom() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DateFrom
+}
+
+func (o *SourceZenloopUpdate) GetSurveyGroupID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyGroupID
+}
+
+func (o *SourceZenloopUpdate) GetSurveyID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SurveyID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezohocrm.go b/internal/sdk/pkg/models/shared/sourcezohocrm.go
old mode 100755
new mode 100644
index 27cba7f77..bcad6bcdf
--- a/internal/sdk/pkg/models/shared/sourcezohocrm.go
+++ b/internal/sdk/pkg/models/shared/sourcezohocrm.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
@@ -116,27 +117,27 @@ func (e *SourceZohoCrmEnvironment) UnmarshalJSON(data []byte) error {
}
}
-type SourceZohoCrmZohoCrm string
+type ZohoCrm string
const (
- SourceZohoCrmZohoCrmZohoCrm SourceZohoCrmZohoCrm = "zoho-crm"
+ ZohoCrmZohoCrm ZohoCrm = "zoho-crm"
)
-func (e SourceZohoCrmZohoCrm) ToPointer() *SourceZohoCrmZohoCrm {
+func (e ZohoCrm) ToPointer() *ZohoCrm {
return &e
}
-func (e *SourceZohoCrmZohoCrm) UnmarshalJSON(data []byte) error {
+func (e *ZohoCrm) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zoho-crm":
- *e = SourceZohoCrmZohoCrm(v)
+ *e = ZohoCrm(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZohoCrmZohoCrm: %v", v)
+ return fmt.Errorf("invalid value for ZohoCrm: %v", v)
}
}
@@ -148,12 +149,76 @@ type SourceZohoCrm struct {
// Please choose the region of your Data Center location. More info by this Link
DcRegion SourceZohoCrmDataCenterLocation `json:"dc_region"`
// Choose your Edition of Zoho CRM to determine API Concurrency Limits
- Edition SourceZohoCRMZohoCRMEdition `json:"edition"`
+ Edition *SourceZohoCRMZohoCRMEdition `default:"Free" json:"edition"`
// Please choose the environment
Environment SourceZohoCrmEnvironment `json:"environment"`
// OAuth2.0 Refresh Token
- RefreshToken string `json:"refresh_token"`
- SourceType SourceZohoCrmZohoCrm `json:"sourceType"`
+ RefreshToken string `json:"refresh_token"`
+ sourceType ZohoCrm `const:"zoho-crm" json:"sourceType"`
// ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`
StartDatetime *time.Time `json:"start_datetime,omitempty"`
}
+
+func (s SourceZohoCrm) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZohoCrm) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZohoCrm) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceZohoCrm) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceZohoCrm) GetDcRegion() SourceZohoCrmDataCenterLocation {
+ if o == nil {
+ return SourceZohoCrmDataCenterLocation("")
+ }
+ return o.DcRegion
+}
+
+func (o *SourceZohoCrm) GetEdition() *SourceZohoCRMZohoCRMEdition {
+ if o == nil {
+ return nil
+ }
+ return o.Edition
+}
+
+func (o *SourceZohoCrm) GetEnvironment() SourceZohoCrmEnvironment {
+ if o == nil {
+ return SourceZohoCrmEnvironment("")
+ }
+ return o.Environment
+}
+
+func (o *SourceZohoCrm) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceZohoCrm) GetSourceType() ZohoCrm {
+ return ZohoCrmZohoCrm
+}
+
+func (o *SourceZohoCrm) GetStartDatetime() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDatetime
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezohocrmcreaterequest.go b/internal/sdk/pkg/models/shared/sourcezohocrmcreaterequest.go
old mode 100755
new mode 100644
index d53cafd85..43a4f8c8c
--- a/internal/sdk/pkg/models/shared/sourcezohocrmcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezohocrmcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceZohoCrmCreateRequest struct {
Configuration SourceZohoCrm `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZohoCrmCreateRequest) GetConfiguration() SourceZohoCrm {
+ if o == nil {
+ return SourceZohoCrm{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZohoCrmCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceZohoCrmCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZohoCrmCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceZohoCrmCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezohocrmputrequest.go b/internal/sdk/pkg/models/shared/sourcezohocrmputrequest.go
old mode 100755
new mode 100644
index 1cbadc1fd..e283ab7dd
--- a/internal/sdk/pkg/models/shared/sourcezohocrmputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezohocrmputrequest.go
@@ -7,3 +7,24 @@ type SourceZohoCrmPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZohoCrmPutRequest) GetConfiguration() SourceZohoCrmUpdate {
+ if o == nil {
+ return SourceZohoCrmUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZohoCrmPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZohoCrmPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezohocrmupdate.go b/internal/sdk/pkg/models/shared/sourcezohocrmupdate.go
old mode 100755
new mode 100644
index 468492159..bbb571aa2
--- a/internal/sdk/pkg/models/shared/sourcezohocrmupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcezohocrmupdate.go
@@ -5,26 +5,27 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"time"
)
-// SourceZohoCrmUpdateDataCenterLocation - Please choose the region of your Data Center location. More info by this Link
-type SourceZohoCrmUpdateDataCenterLocation string
+// DataCenterLocation - Please choose the region of your Data Center location. More info by this Link
+type DataCenterLocation string
const (
- SourceZohoCrmUpdateDataCenterLocationUs SourceZohoCrmUpdateDataCenterLocation = "US"
- SourceZohoCrmUpdateDataCenterLocationAu SourceZohoCrmUpdateDataCenterLocation = "AU"
- SourceZohoCrmUpdateDataCenterLocationEu SourceZohoCrmUpdateDataCenterLocation = "EU"
- SourceZohoCrmUpdateDataCenterLocationIn SourceZohoCrmUpdateDataCenterLocation = "IN"
- SourceZohoCrmUpdateDataCenterLocationCn SourceZohoCrmUpdateDataCenterLocation = "CN"
- SourceZohoCrmUpdateDataCenterLocationJp SourceZohoCrmUpdateDataCenterLocation = "JP"
+ DataCenterLocationUs DataCenterLocation = "US"
+ DataCenterLocationAu DataCenterLocation = "AU"
+ DataCenterLocationEu DataCenterLocation = "EU"
+ DataCenterLocationIn DataCenterLocation = "IN"
+ DataCenterLocationCn DataCenterLocation = "CN"
+ DataCenterLocationJp DataCenterLocation = "JP"
)
-func (e SourceZohoCrmUpdateDataCenterLocation) ToPointer() *SourceZohoCrmUpdateDataCenterLocation {
+func (e DataCenterLocation) ToPointer() *DataCenterLocation {
return &e
}
-func (e *SourceZohoCrmUpdateDataCenterLocation) UnmarshalJSON(data []byte) error {
+func (e *DataCenterLocation) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -41,29 +42,29 @@ func (e *SourceZohoCrmUpdateDataCenterLocation) UnmarshalJSON(data []byte) error
case "CN":
fallthrough
case "JP":
- *e = SourceZohoCrmUpdateDataCenterLocation(v)
+ *e = DataCenterLocation(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZohoCrmUpdateDataCenterLocation: %v", v)
+ return fmt.Errorf("invalid value for DataCenterLocation: %v", v)
}
}
-// SourceZohoCRMUpdateZohoCRMEdition - Choose your Edition of Zoho CRM to determine API Concurrency Limits
-type SourceZohoCRMUpdateZohoCRMEdition string
+// ZohoCRMEdition - Choose your Edition of Zoho CRM to determine API Concurrency Limits
+type ZohoCRMEdition string
const (
- SourceZohoCRMUpdateZohoCRMEditionFree SourceZohoCRMUpdateZohoCRMEdition = "Free"
- SourceZohoCRMUpdateZohoCRMEditionStandard SourceZohoCRMUpdateZohoCRMEdition = "Standard"
- SourceZohoCRMUpdateZohoCRMEditionProfessional SourceZohoCRMUpdateZohoCRMEdition = "Professional"
- SourceZohoCRMUpdateZohoCRMEditionEnterprise SourceZohoCRMUpdateZohoCRMEdition = "Enterprise"
- SourceZohoCRMUpdateZohoCRMEditionUltimate SourceZohoCRMUpdateZohoCRMEdition = "Ultimate"
+ ZohoCRMEditionFree ZohoCRMEdition = "Free"
+ ZohoCRMEditionStandard ZohoCRMEdition = "Standard"
+ ZohoCRMEditionProfessional ZohoCRMEdition = "Professional"
+ ZohoCRMEditionEnterprise ZohoCRMEdition = "Enterprise"
+ ZohoCRMEditionUltimate ZohoCRMEdition = "Ultimate"
)
-func (e SourceZohoCRMUpdateZohoCRMEdition) ToPointer() *SourceZohoCRMUpdateZohoCRMEdition {
+func (e ZohoCRMEdition) ToPointer() *ZohoCRMEdition {
return &e
}
-func (e *SourceZohoCRMUpdateZohoCRMEdition) UnmarshalJSON(data []byte) error {
+func (e *ZohoCRMEdition) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -78,10 +79,10 @@ func (e *SourceZohoCRMUpdateZohoCRMEdition) UnmarshalJSON(data []byte) error {
case "Enterprise":
fallthrough
case "Ultimate":
- *e = SourceZohoCRMUpdateZohoCRMEdition(v)
+ *e = ZohoCRMEdition(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZohoCRMUpdateZohoCRMEdition: %v", v)
+ return fmt.Errorf("invalid value for ZohoCRMEdition: %v", v)
}
}
@@ -122,9 +123,9 @@ type SourceZohoCrmUpdate struct {
// OAuth2.0 Client Secret
ClientSecret string `json:"client_secret"`
// Please choose the region of your Data Center location. More info by this Link
- DcRegion SourceZohoCrmUpdateDataCenterLocation `json:"dc_region"`
+ DcRegion DataCenterLocation `json:"dc_region"`
// Choose your Edition of Zoho CRM to determine API Concurrency Limits
- Edition SourceZohoCRMUpdateZohoCRMEdition `json:"edition"`
+ Edition *ZohoCRMEdition `default:"Free" json:"edition"`
// Please choose the environment
Environment SourceZohoCrmUpdateEnvironment `json:"environment"`
// OAuth2.0 Refresh Token
@@ -132,3 +133,63 @@ type SourceZohoCrmUpdate struct {
// ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`
StartDatetime *time.Time `json:"start_datetime,omitempty"`
}
+
+func (s SourceZohoCrmUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZohoCrmUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZohoCrmUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceZohoCrmUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceZohoCrmUpdate) GetDcRegion() DataCenterLocation {
+ if o == nil {
+ return DataCenterLocation("")
+ }
+ return o.DcRegion
+}
+
+func (o *SourceZohoCrmUpdate) GetEdition() *ZohoCRMEdition {
+ if o == nil {
+ return nil
+ }
+ return o.Edition
+}
+
+func (o *SourceZohoCrmUpdate) GetEnvironment() SourceZohoCrmUpdateEnvironment {
+ if o == nil {
+ return SourceZohoCrmUpdateEnvironment("")
+ }
+ return o.Environment
+}
+
+func (o *SourceZohoCrmUpdate) GetRefreshToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.RefreshToken
+}
+
+func (o *SourceZohoCrmUpdate) GetStartDatetime() *time.Time {
+ if o == nil {
+ return nil
+ }
+ return o.StartDatetime
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezoom.go b/internal/sdk/pkg/models/shared/sourcezoom.go
old mode 100755
new mode 100644
index 7a6ec9c19..72d5de107
--- a/internal/sdk/pkg/models/shared/sourcezoom.go
+++ b/internal/sdk/pkg/models/shared/sourcezoom.go
@@ -5,34 +5,57 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-type SourceZoomZoom string
+type Zoom string
const (
- SourceZoomZoomZoom SourceZoomZoom = "zoom"
+ ZoomZoom Zoom = "zoom"
)
-func (e SourceZoomZoom) ToPointer() *SourceZoomZoom {
+func (e Zoom) ToPointer() *Zoom {
return &e
}
-func (e *SourceZoomZoom) UnmarshalJSON(data []byte) error {
+func (e *Zoom) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zoom":
- *e = SourceZoomZoom(v)
+ *e = Zoom(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZoomZoom: %v", v)
+ return fmt.Errorf("invalid value for Zoom: %v", v)
}
}
type SourceZoom struct {
// JWT Token
- JwtToken string `json:"jwt_token"`
- SourceType SourceZoomZoom `json:"sourceType"`
+ JwtToken string `json:"jwt_token"`
+ sourceType Zoom `const:"zoom" json:"sourceType"`
+}
+
+func (s SourceZoom) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZoom) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZoom) GetJwtToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.JwtToken
+}
+
+func (o *SourceZoom) GetSourceType() Zoom {
+ return ZoomZoom
}
diff --git a/internal/sdk/pkg/models/shared/sourcezoomcreaterequest.go b/internal/sdk/pkg/models/shared/sourcezoomcreaterequest.go
old mode 100755
new mode 100644
index 7b8eb0843..c6121ac7c
--- a/internal/sdk/pkg/models/shared/sourcezoomcreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezoomcreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceZoomCreateRequest struct {
Configuration SourceZoom `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZoomCreateRequest) GetConfiguration() SourceZoom {
+ if o == nil {
+ return SourceZoom{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZoomCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceZoomCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZoomCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceZoomCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezoomputrequest.go b/internal/sdk/pkg/models/shared/sourcezoomputrequest.go
old mode 100755
new mode 100644
index 70c689a81..03af6cb7b
--- a/internal/sdk/pkg/models/shared/sourcezoomputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezoomputrequest.go
@@ -7,3 +7,24 @@ type SourceZoomPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZoomPutRequest) GetConfiguration() SourceZoomUpdate {
+ if o == nil {
+ return SourceZoomUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZoomPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZoomPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezoomupdate.go b/internal/sdk/pkg/models/shared/sourcezoomupdate.go
old mode 100755
new mode 100644
index 9a96197bf..a363112d5
--- a/internal/sdk/pkg/models/shared/sourcezoomupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcezoomupdate.go
@@ -6,3 +6,10 @@ type SourceZoomUpdate struct {
// JWT Token
JwtToken string `json:"jwt_token"`
}
+
+func (o *SourceZoomUpdate) GetJwtToken() string {
+ if o == nil {
+ return ""
+ }
+ return o.JwtToken
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezuora.go b/internal/sdk/pkg/models/shared/sourcezuora.go
old mode 100755
new mode 100644
index 364ba8bb6..d284327da
--- a/internal/sdk/pkg/models/shared/sourcezuora.go
+++ b/internal/sdk/pkg/models/shared/sourcezuora.go
@@ -5,6 +5,7 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
// SourceZuoraDataQueryType - Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link
@@ -35,27 +36,27 @@ func (e *SourceZuoraDataQueryType) UnmarshalJSON(data []byte) error {
}
}
-type SourceZuoraZuora string
+type Zuora string
const (
- SourceZuoraZuoraZuora SourceZuoraZuora = "zuora"
+ ZuoraZuora Zuora = "zuora"
)
-func (e SourceZuoraZuora) ToPointer() *SourceZuoraZuora {
+func (e Zuora) ToPointer() *Zuora {
return &e
}
-func (e *SourceZuoraZuora) UnmarshalJSON(data []byte) error {
+func (e *Zuora) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
}
switch v {
case "zuora":
- *e = SourceZuoraZuora(v)
+ *e = Zuora(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZuoraZuora: %v", v)
+ return fmt.Errorf("invalid value for Zuora: %v", v)
}
}
@@ -114,12 +115,69 @@ type SourceZuora struct {
// Your OAuth user Client Secret
ClientSecret string `json:"client_secret"`
// Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link
- DataQuery SourceZuoraDataQueryType `json:"data_query"`
- SourceType SourceZuoraZuora `json:"sourceType"`
+ DataQuery *SourceZuoraDataQueryType `default:"Live" json:"data_query"`
+ sourceType Zuora `const:"zuora" json:"sourceType"`
// Start Date in format: YYYY-MM-DD
StartDate string `json:"start_date"`
// Please choose the right endpoint where your Tenant is located. More info by this Link
TenantEndpoint SourceZuoraTenantEndpointLocation `json:"tenant_endpoint"`
// The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year).
- WindowInDays *string `json:"window_in_days,omitempty"`
+ WindowInDays *string `default:"90" json:"window_in_days"`
+}
+
+func (s SourceZuora) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZuora) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZuora) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceZuora) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceZuora) GetDataQuery() *SourceZuoraDataQueryType {
+ if o == nil {
+ return nil
+ }
+ return o.DataQuery
+}
+
+func (o *SourceZuora) GetSourceType() Zuora {
+ return ZuoraZuora
+}
+
+func (o *SourceZuora) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceZuora) GetTenantEndpoint() SourceZuoraTenantEndpointLocation {
+ if o == nil {
+ return SourceZuoraTenantEndpointLocation("")
+ }
+ return o.TenantEndpoint
+}
+
+func (o *SourceZuora) GetWindowInDays() *string {
+ if o == nil {
+ return nil
+ }
+ return o.WindowInDays
}
diff --git a/internal/sdk/pkg/models/shared/sourcezuoracreaterequest.go b/internal/sdk/pkg/models/shared/sourcezuoracreaterequest.go
old mode 100755
new mode 100644
index 72605450b..b74453b2c
--- a/internal/sdk/pkg/models/shared/sourcezuoracreaterequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezuoracreaterequest.go
@@ -4,8 +4,46 @@ package shared
type SourceZuoraCreateRequest struct {
Configuration SourceZuora `json:"configuration"`
- Name string `json:"name"`
+ // The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided.
+ DefinitionID *string `json:"definitionId,omitempty"`
+ // Name of the source e.g. dev-mysql-instance.
+ Name string `json:"name"`
// Optional secretID obtained through the public API OAuth redirect flow.
SecretID *string `json:"secretId,omitempty"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZuoraCreateRequest) GetConfiguration() SourceZuora {
+ if o == nil {
+ return SourceZuora{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZuoraCreateRequest) GetDefinitionID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.DefinitionID
+}
+
+func (o *SourceZuoraCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZuoraCreateRequest) GetSecretID() *string {
+ if o == nil {
+ return nil
+ }
+ return o.SecretID
+}
+
+func (o *SourceZuoraCreateRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezuoraputrequest.go b/internal/sdk/pkg/models/shared/sourcezuoraputrequest.go
old mode 100755
new mode 100644
index a595e0bb1..9107f2a82
--- a/internal/sdk/pkg/models/shared/sourcezuoraputrequest.go
+++ b/internal/sdk/pkg/models/shared/sourcezuoraputrequest.go
@@ -7,3 +7,24 @@ type SourceZuoraPutRequest struct {
Name string `json:"name"`
WorkspaceID string `json:"workspaceId"`
}
+
+func (o *SourceZuoraPutRequest) GetConfiguration() SourceZuoraUpdate {
+ if o == nil {
+ return SourceZuoraUpdate{}
+ }
+ return o.Configuration
+}
+
+func (o *SourceZuoraPutRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *SourceZuoraPutRequest) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
+}
diff --git a/internal/sdk/pkg/models/shared/sourcezuoraupdate.go b/internal/sdk/pkg/models/shared/sourcezuoraupdate.go
old mode 100755
new mode 100644
index b9f92c3df..661f54412
--- a/internal/sdk/pkg/models/shared/sourcezuoraupdate.go
+++ b/internal/sdk/pkg/models/shared/sourcezuoraupdate.go
@@ -5,21 +5,22 @@ package shared
import (
"encoding/json"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
)
-// SourceZuoraUpdateDataQueryType - Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link
-type SourceZuoraUpdateDataQueryType string
+// DataQueryType - Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link
+type DataQueryType string
const (
- SourceZuoraUpdateDataQueryTypeLive SourceZuoraUpdateDataQueryType = "Live"
- SourceZuoraUpdateDataQueryTypeUnlimited SourceZuoraUpdateDataQueryType = "Unlimited"
+ DataQueryTypeLive DataQueryType = "Live"
+ DataQueryTypeUnlimited DataQueryType = "Unlimited"
)
-func (e SourceZuoraUpdateDataQueryType) ToPointer() *SourceZuoraUpdateDataQueryType {
+func (e DataQueryType) ToPointer() *DataQueryType {
return &e
}
-func (e *SourceZuoraUpdateDataQueryType) UnmarshalJSON(data []byte) error {
+func (e *DataQueryType) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -28,33 +29,33 @@ func (e *SourceZuoraUpdateDataQueryType) UnmarshalJSON(data []byte) error {
case "Live":
fallthrough
case "Unlimited":
- *e = SourceZuoraUpdateDataQueryType(v)
+ *e = DataQueryType(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZuoraUpdateDataQueryType: %v", v)
+ return fmt.Errorf("invalid value for DataQueryType: %v", v)
}
}
-// SourceZuoraUpdateTenantEndpointLocation - Please choose the right endpoint where your Tenant is located. More info by this Link
-type SourceZuoraUpdateTenantEndpointLocation string
+// TenantEndpointLocation - Please choose the right endpoint where your Tenant is located. More info by this Link
+type TenantEndpointLocation string
const (
- SourceZuoraUpdateTenantEndpointLocationUsProduction SourceZuoraUpdateTenantEndpointLocation = "US Production"
- SourceZuoraUpdateTenantEndpointLocationUsCloudProduction SourceZuoraUpdateTenantEndpointLocation = "US Cloud Production"
- SourceZuoraUpdateTenantEndpointLocationUsAPISandbox SourceZuoraUpdateTenantEndpointLocation = "US API Sandbox"
- SourceZuoraUpdateTenantEndpointLocationUsCloudAPISandbox SourceZuoraUpdateTenantEndpointLocation = "US Cloud API Sandbox"
- SourceZuoraUpdateTenantEndpointLocationUsCentralSandbox SourceZuoraUpdateTenantEndpointLocation = "US Central Sandbox"
- SourceZuoraUpdateTenantEndpointLocationUsPerformanceTest SourceZuoraUpdateTenantEndpointLocation = "US Performance Test"
- SourceZuoraUpdateTenantEndpointLocationEuProduction SourceZuoraUpdateTenantEndpointLocation = "EU Production"
- SourceZuoraUpdateTenantEndpointLocationEuAPISandbox SourceZuoraUpdateTenantEndpointLocation = "EU API Sandbox"
- SourceZuoraUpdateTenantEndpointLocationEuCentralSandbox SourceZuoraUpdateTenantEndpointLocation = "EU Central Sandbox"
+ TenantEndpointLocationUsProduction TenantEndpointLocation = "US Production"
+ TenantEndpointLocationUsCloudProduction TenantEndpointLocation = "US Cloud Production"
+ TenantEndpointLocationUsAPISandbox TenantEndpointLocation = "US API Sandbox"
+ TenantEndpointLocationUsCloudAPISandbox TenantEndpointLocation = "US Cloud API Sandbox"
+ TenantEndpointLocationUsCentralSandbox TenantEndpointLocation = "US Central Sandbox"
+ TenantEndpointLocationUsPerformanceTest TenantEndpointLocation = "US Performance Test"
+ TenantEndpointLocationEuProduction TenantEndpointLocation = "EU Production"
+ TenantEndpointLocationEuAPISandbox TenantEndpointLocation = "EU API Sandbox"
+ TenantEndpointLocationEuCentralSandbox TenantEndpointLocation = "EU Central Sandbox"
)
-func (e SourceZuoraUpdateTenantEndpointLocation) ToPointer() *SourceZuoraUpdateTenantEndpointLocation {
+func (e TenantEndpointLocation) ToPointer() *TenantEndpointLocation {
return &e
}
-func (e *SourceZuoraUpdateTenantEndpointLocation) UnmarshalJSON(data []byte) error {
+func (e *TenantEndpointLocation) UnmarshalJSON(data []byte) error {
var v string
if err := json.Unmarshal(data, &v); err != nil {
return err
@@ -77,10 +78,10 @@ func (e *SourceZuoraUpdateTenantEndpointLocation) UnmarshalJSON(data []byte) err
case "EU API Sandbox":
fallthrough
case "EU Central Sandbox":
- *e = SourceZuoraUpdateTenantEndpointLocation(v)
+ *e = TenantEndpointLocation(v)
return nil
default:
- return fmt.Errorf("invalid value for SourceZuoraUpdateTenantEndpointLocation: %v", v)
+ return fmt.Errorf("invalid value for TenantEndpointLocation: %v", v)
}
}
@@ -90,11 +91,64 @@ type SourceZuoraUpdate struct {
// Your OAuth user Client Secret
ClientSecret string `json:"client_secret"`
// Choose between `Live`, or `Unlimited` - the optimized, replicated database at 12 hours freshness for high volume extraction Link
- DataQuery SourceZuoraUpdateDataQueryType `json:"data_query"`
+ DataQuery *DataQueryType `default:"Live" json:"data_query"`
// Start Date in format: YYYY-MM-DD
StartDate string `json:"start_date"`
// Please choose the right endpoint where your Tenant is located. More info by this Link
- TenantEndpoint SourceZuoraUpdateTenantEndpointLocation `json:"tenant_endpoint"`
+ TenantEndpoint TenantEndpointLocation `json:"tenant_endpoint"`
// The amount of days for each data-chunk begining from start_date. Bigger the value - faster the fetch. (0.1 - as for couple of hours, 1 - as for a Day; 364 - as for a Year).
- WindowInDays *string `json:"window_in_days,omitempty"`
+ WindowInDays *string `default:"90" json:"window_in_days"`
+}
+
+func (s SourceZuoraUpdate) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(s, "", false)
+}
+
+func (s *SourceZuoraUpdate) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &s, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *SourceZuoraUpdate) GetClientID() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientID
+}
+
+func (o *SourceZuoraUpdate) GetClientSecret() string {
+ if o == nil {
+ return ""
+ }
+ return o.ClientSecret
+}
+
+func (o *SourceZuoraUpdate) GetDataQuery() *DataQueryType {
+ if o == nil {
+ return nil
+ }
+ return o.DataQuery
+}
+
+func (o *SourceZuoraUpdate) GetStartDate() string {
+ if o == nil {
+ return ""
+ }
+ return o.StartDate
+}
+
+func (o *SourceZuoraUpdate) GetTenantEndpoint() TenantEndpointLocation {
+ if o == nil {
+ return TenantEndpointLocation("")
+ }
+ return o.TenantEndpoint
+}
+
+func (o *SourceZuoraUpdate) GetWindowInDays() *string {
+ if o == nil {
+ return nil
+ }
+ return o.WindowInDays
}
diff --git a/internal/sdk/pkg/models/shared/streamconfiguration.go b/internal/sdk/pkg/models/shared/streamconfiguration.go
old mode 100755
new mode 100644
index 5b64ec4e6..c9b91a3bd
--- a/internal/sdk/pkg/models/shared/streamconfiguration.go
+++ b/internal/sdk/pkg/models/shared/streamconfiguration.go
@@ -11,3 +11,31 @@ type StreamConfiguration struct {
PrimaryKey [][]string `json:"primaryKey,omitempty"`
SyncMode *ConnectionSyncModeEnum `json:"syncMode,omitempty"`
}
+
+func (o *StreamConfiguration) GetCursorField() []string {
+ if o == nil {
+ return nil
+ }
+ return o.CursorField
+}
+
+func (o *StreamConfiguration) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *StreamConfiguration) GetPrimaryKey() [][]string {
+ if o == nil {
+ return nil
+ }
+ return o.PrimaryKey
+}
+
+func (o *StreamConfiguration) GetSyncMode() *ConnectionSyncModeEnum {
+ if o == nil {
+ return nil
+ }
+ return o.SyncMode
+}
diff --git a/internal/sdk/pkg/models/shared/streamconfigurations.go b/internal/sdk/pkg/models/shared/streamconfigurations.go
old mode 100755
new mode 100644
index 76d2ad439..307fd5d38
--- a/internal/sdk/pkg/models/shared/streamconfigurations.go
+++ b/internal/sdk/pkg/models/shared/streamconfigurations.go
@@ -6,3 +6,10 @@ package shared
type StreamConfigurations struct {
Streams []StreamConfiguration `json:"streams,omitempty"`
}
+
+func (o *StreamConfigurations) GetStreams() []StreamConfiguration {
+ if o == nil {
+ return nil
+ }
+ return o.Streams
+}
diff --git a/internal/sdk/pkg/models/shared/streamproperties.go b/internal/sdk/pkg/models/shared/streamproperties.go
old mode 100755
new mode 100644
index 6fad8f040..e5ac76682
--- a/internal/sdk/pkg/models/shared/streamproperties.go
+++ b/internal/sdk/pkg/models/shared/streamproperties.go
@@ -11,3 +11,45 @@ type StreamProperties struct {
StreamName *string `json:"streamName,omitempty"`
SyncModes []ConnectionSyncModeEnum `json:"syncModes,omitempty"`
}
+
+func (o *StreamProperties) GetDefaultCursorField() []string {
+ if o == nil {
+ return nil
+ }
+ return o.DefaultCursorField
+}
+
+func (o *StreamProperties) GetPropertyFields() [][]string {
+ if o == nil {
+ return nil
+ }
+ return o.PropertyFields
+}
+
+func (o *StreamProperties) GetSourceDefinedCursorField() *bool {
+ if o == nil {
+ return nil
+ }
+ return o.SourceDefinedCursorField
+}
+
+func (o *StreamProperties) GetSourceDefinedPrimaryKey() [][]string {
+ if o == nil {
+ return nil
+ }
+ return o.SourceDefinedPrimaryKey
+}
+
+func (o *StreamProperties) GetStreamName() *string {
+ if o == nil {
+ return nil
+ }
+ return o.StreamName
+}
+
+func (o *StreamProperties) GetSyncModes() []ConnectionSyncModeEnum {
+ if o == nil {
+ return nil
+ }
+ return o.SyncModes
+}
diff --git a/internal/sdk/pkg/models/shared/streampropertiesresponse.go b/internal/sdk/pkg/models/shared/streampropertiesresponse.go
old mode 100755
new mode 100644
index 5661ca882..e41b1c4c5
--- a/internal/sdk/pkg/models/shared/streampropertiesresponse.go
+++ b/internal/sdk/pkg/models/shared/streampropertiesresponse.go
@@ -6,3 +6,10 @@ package shared
type StreamPropertiesResponse struct {
Streams []StreamProperties `json:"streams,omitempty"`
}
+
+func (o *StreamPropertiesResponse) GetStreams() []StreamProperties {
+ if o == nil {
+ return nil
+ }
+ return o.Streams
+}
diff --git a/internal/sdk/pkg/models/shared/workspacecreaterequest.go b/internal/sdk/pkg/models/shared/workspacecreaterequest.go
old mode 100755
new mode 100644
index 3fae9ae53..8640a3ae8
--- a/internal/sdk/pkg/models/shared/workspacecreaterequest.go
+++ b/internal/sdk/pkg/models/shared/workspacecreaterequest.go
@@ -6,3 +6,10 @@ type WorkspaceCreateRequest struct {
// Name of the workspace
Name string `json:"name"`
}
+
+func (o *WorkspaceCreateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
diff --git a/internal/sdk/pkg/models/shared/workspaceoauthcredentialsrequest.go b/internal/sdk/pkg/models/shared/workspaceoauthcredentialsrequest.go
old mode 100755
new mode 100644
index 03f2bab53..d036eb643
--- a/internal/sdk/pkg/models/shared/workspaceoauthcredentialsrequest.go
+++ b/internal/sdk/pkg/models/shared/workspaceoauthcredentialsrequest.go
@@ -11,3 +11,24 @@ type WorkspaceOAuthCredentialsRequest struct {
// The name of the source i.e. google-ads
Name string `json:"name"`
}
+
+func (o *WorkspaceOAuthCredentialsRequest) GetActorType() ActorTypeEnum {
+ if o == nil {
+ return ActorTypeEnum("")
+ }
+ return o.ActorType
+}
+
+func (o *WorkspaceOAuthCredentialsRequest) GetConfiguration() OAuthCredentialsConfiguration {
+ if o == nil {
+ return OAuthCredentialsConfiguration{}
+ }
+ return o.Configuration
+}
+
+func (o *WorkspaceOAuthCredentialsRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
diff --git a/internal/sdk/pkg/models/shared/workspaceresponse.go b/internal/sdk/pkg/models/shared/workspaceresponse.go
old mode 100755
new mode 100644
index d549a480c..0eb048455
--- a/internal/sdk/pkg/models/shared/workspaceresponse.go
+++ b/internal/sdk/pkg/models/shared/workspaceresponse.go
@@ -2,9 +2,45 @@
package shared
+import (
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
+)
+
// WorkspaceResponse - Provides details of a single workspace.
type WorkspaceResponse struct {
- DataResidency GeographyEnum `json:"dataResidency"`
- Name string `json:"name"`
- WorkspaceID string `json:"workspaceId"`
+ DataResidency *GeographyEnum `default:"auto" json:"dataResidency"`
+ Name string `json:"name"`
+ WorkspaceID string `json:"workspaceId"`
+}
+
+func (w WorkspaceResponse) MarshalJSON() ([]byte, error) {
+ return utils.MarshalJSON(w, "", false)
+}
+
+func (w *WorkspaceResponse) UnmarshalJSON(data []byte) error {
+ if err := utils.UnmarshalJSON(data, &w, "", false, false); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (o *WorkspaceResponse) GetDataResidency() *GeographyEnum {
+ if o == nil {
+ return nil
+ }
+ return o.DataResidency
+}
+
+func (o *WorkspaceResponse) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
+
+func (o *WorkspaceResponse) GetWorkspaceID() string {
+ if o == nil {
+ return ""
+ }
+ return o.WorkspaceID
}
diff --git a/internal/sdk/pkg/models/shared/workspacesresponse.go b/internal/sdk/pkg/models/shared/workspacesresponse.go
old mode 100755
new mode 100644
index b35d3295d..1ffa3c0ef
--- a/internal/sdk/pkg/models/shared/workspacesresponse.go
+++ b/internal/sdk/pkg/models/shared/workspacesresponse.go
@@ -7,3 +7,24 @@ type WorkspacesResponse struct {
Next *string `json:"next,omitempty"`
Previous *string `json:"previous,omitempty"`
}
+
+func (o *WorkspacesResponse) GetData() []WorkspaceResponse {
+ if o == nil {
+ return []WorkspaceResponse{}
+ }
+ return o.Data
+}
+
+func (o *WorkspacesResponse) GetNext() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Next
+}
+
+func (o *WorkspacesResponse) GetPrevious() *string {
+ if o == nil {
+ return nil
+ }
+ return o.Previous
+}
diff --git a/internal/sdk/pkg/models/shared/workspaceupdaterequest.go b/internal/sdk/pkg/models/shared/workspaceupdaterequest.go
old mode 100755
new mode 100644
index a5b945625..f53d5af0d
--- a/internal/sdk/pkg/models/shared/workspaceupdaterequest.go
+++ b/internal/sdk/pkg/models/shared/workspaceupdaterequest.go
@@ -6,3 +6,10 @@ type WorkspaceUpdateRequest struct {
// Name of the workspace
Name string `json:"name"`
}
+
+func (o *WorkspaceUpdateRequest) GetName() string {
+ if o == nil {
+ return ""
+ }
+ return o.Name
+}
diff --git a/internal/sdk/pkg/types/bigint.go b/internal/sdk/pkg/types/bigint.go
old mode 100755
new mode 100644
index b37a41537..afd0cd2b8
--- a/internal/sdk/pkg/types/bigint.go
+++ b/internal/sdk/pkg/types/bigint.go
@@ -7,42 +7,15 @@ import (
"math/big"
)
-type BigInt struct {
- big.Int
-}
-
-func (b BigInt) MarshalJSON() ([]byte, error) {
- return []byte(`"` + b.String() + `"`), nil
-}
-
-func (b *BigInt) UnmarshalJSON(p []byte) error {
- if string(p) == "null" {
- return nil
- }
-
- stringVal := string(p)
- if len(stringVal) > 2 && stringVal[0] == '"' && stringVal[len(stringVal)-1] == '"' {
- stringVal = stringVal[1 : len(stringVal)-1]
- }
-
- var z big.Int
- _, ok := z.SetString(string(stringVal), 10)
- if !ok {
- return fmt.Errorf("not a valid big integer: %s", p)
- }
- b.Int = z
- return nil
-}
-
-// MustBigIntFromString provides a helper function to return a big.Int from a string
+// MustNewBigIntFromString returns an instance of big.Int from a string
// The string is assumed to be base 10 and if it is not a valid big.Int
-// then the function will return nil
-func MustBigIntFromString(s string) *BigInt {
+// then the function panics.
+// Avoid using this function in production code.
+func MustNewBigIntFromString(s string) *big.Int {
i, ok := new(big.Int).SetString(s, 10)
if !ok {
- return nil
- }
- return &BigInt{
- Int: *i,
+ panic(fmt.Errorf("failed to parse string as big.Int"))
}
+
+ return i
}
diff --git a/internal/sdk/pkg/types/date.go b/internal/sdk/pkg/types/date.go
old mode 100755
new mode 100644
index 01c69b7ac..c4648fa96
--- a/internal/sdk/pkg/types/date.go
+++ b/internal/sdk/pkg/types/date.go
@@ -67,6 +67,10 @@ func MustDateFromString(str string) Date {
return d
}
+func (d Date) GetTime() time.Time {
+ return d.Time
+}
+
func (d Date) MarshalJSON() ([]byte, error) {
return []byte(fmt.Sprintf(`"%s"`, d.Time.Format("2006-01-02"))), nil
}
diff --git a/internal/sdk/pkg/types/datetime.go b/internal/sdk/pkg/types/datetime.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/types/decimal.go b/internal/sdk/pkg/types/decimal.go
new file mode 100644
index 000000000..a42284b92
--- /dev/null
+++ b/internal/sdk/pkg/types/decimal.go
@@ -0,0 +1,20 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package types
+
+import (
+ "fmt"
+
+ "github.com/ericlagergren/decimal"
+)
+
+// MustNewDecimalFromString returns an instance of Decimal from a string
+// Avoid using this function in production code.
+func MustNewDecimalFromString(s string) *decimal.Big {
+ d, ok := new(decimal.Big).SetString(s)
+ if !ok {
+ panic(fmt.Errorf("failed to parse string as decimal.Big"))
+ }
+
+ return d
+}
diff --git a/internal/sdk/pkg/types/pointers.go b/internal/sdk/pkg/types/pointers.go
new file mode 100644
index 000000000..4f15e99d4
--- /dev/null
+++ b/internal/sdk/pkg/types/pointers.go
@@ -0,0 +1,10 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package types
+
+func String(s string) *string { return &s }
+func Bool(b bool) *bool { return &b }
+func Int(i int) *int { return &i }
+func Int64(i int64) *int64 { return &i }
+func Float32(f float32) *float32 { return &f }
+func Float64(f float64) *float64 { return &f }
diff --git a/internal/sdk/pkg/utils/contenttype.go b/internal/sdk/pkg/utils/contenttype.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/utils/form.go b/internal/sdk/pkg/utils/form.go
old mode 100755
new mode 100644
index 3fea5fd07..1a3946938
--- a/internal/sdk/pkg/utils/form.go
+++ b/internal/sdk/pkg/utils/form.go
@@ -10,7 +10,9 @@ import (
"strings"
"time"
- "airbyte/internal/sdk/pkg/types"
+ "github.com/ericlagergren/decimal"
+
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
)
func populateForm(paramName string, explode bool, objType reflect.Type, objValue reflect.Value, delimiter string, getFieldName func(reflect.StructField) string) url.Values {
@@ -33,10 +35,10 @@ func populateForm(paramName string, explode bool, objType reflect.Type, objValue
formValues.Add(paramName, valToString(objValue.Interface()))
case types.Date:
formValues.Add(paramName, valToString(objValue.Interface()))
- case types.BigInt:
- formValues.Add(paramName, valToString(objValue.Interface()))
case big.Int:
formValues.Add(paramName, valToString(objValue.Interface()))
+ case decimal.Big:
+ formValues.Add(paramName, valToString(objValue.Interface()))
default:
var items []string
diff --git a/internal/sdk/pkg/utils/headers.go b/internal/sdk/pkg/utils/headers.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/utils/json.go b/internal/sdk/pkg/utils/json.go
new file mode 100644
index 000000000..6dce06639
--- /dev/null
+++ b/internal/sdk/pkg/utils/json.go
@@ -0,0 +1,579 @@
+// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.
+
+package utils
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "math/big"
+ "reflect"
+ "strings"
+ "time"
+ "unsafe"
+
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
+
+ "github.com/ericlagergren/decimal"
+)
+
+func MarshalJSON(v interface{}, tag reflect.StructTag, topLevel bool) ([]byte, error) {
+ typ, val := dereferencePointers(reflect.TypeOf(v), reflect.ValueOf(v))
+
+ switch {
+ case isModelType(typ):
+ if topLevel {
+ return json.Marshal(v)
+ }
+
+ if isNil(typ, val) {
+ return []byte("null"), nil
+ }
+
+ out := map[string]json.RawMessage{}
+
+ for i := 0; i < typ.NumField(); i++ {
+ field := typ.Field(i)
+ fieldVal := val.Field(i)
+
+ fieldName := field.Name
+
+ omitEmpty := false
+ jsonTag := field.Tag.Get("json")
+ if jsonTag != "" {
+ for _, tag := range strings.Split(jsonTag, ",") {
+ if tag == "omitempty" {
+ omitEmpty = true
+ } else {
+ fieldName = tag
+ }
+ }
+ }
+
+ if isNil(field.Type, fieldVal) {
+ if omitEmpty {
+ continue
+ }
+ }
+
+ if !field.IsExported() && field.Tag.Get("const") == "" {
+ continue
+ }
+
+ additionalProperties := field.Tag.Get("additionalProperties")
+ if fieldName == "-" && additionalProperties == "" {
+ continue
+ }
+
+ if additionalProperties == "true" {
+ if field.Type.Kind() != reflect.Map {
+ return nil, fmt.Errorf("additionalProperties must be a map")
+ }
+
+ for _, key := range fieldVal.MapKeys() {
+ r, err := marshalValue(fieldVal.MapIndex(key).Interface(), field.Tag)
+ if err != nil {
+ return nil, err
+ }
+
+ out[key.String()] = r
+ }
+
+ continue
+ }
+
+ var fv interface{}
+
+ if field.IsExported() {
+ fv = fieldVal.Interface()
+ } else {
+ pt := reflect.New(typ).Elem()
+ pt.Set(val)
+
+ pf := pt.Field(i)
+
+ fv = reflect.NewAt(pf.Type(), unsafe.Pointer(pf.UnsafeAddr())).Elem().Interface()
+ }
+
+ r, err := marshalValue(fv, field.Tag)
+ if err != nil {
+ return nil, err
+ }
+
+ out[fieldName] = r
+ }
+
+ return json.Marshal(out)
+ default:
+ return marshalValue(v, tag)
+ }
+}
+
+func UnmarshalJSON(b []byte, v interface{}, tag reflect.StructTag, topLevel bool, disallowUnknownFields bool) error {
+ if reflect.TypeOf(v).Kind() != reflect.Ptr {
+ return fmt.Errorf("v must be a pointer")
+ }
+
+ typ, val := dereferencePointers(reflect.TypeOf(v), reflect.ValueOf(v))
+
+ switch {
+ case isModelType(typ):
+ if topLevel || bytes.Equal(b, []byte("null")) {
+ d := json.NewDecoder(bytes.NewReader(b))
+ if disallowUnknownFields {
+ d.DisallowUnknownFields()
+ }
+ return d.Decode(v)
+ }
+
+ var unmarhsaled map[string]json.RawMessage
+
+ if err := json.Unmarshal(b, &unmarhsaled); err != nil {
+ return err
+ }
+
+ var additionalPropertiesField *reflect.StructField
+ var additionalPropertiesValue *reflect.Value
+
+ for i := 0; i < typ.NumField(); i++ {
+ field := typ.Field(i)
+ fieldVal := val.Field(i)
+
+ fieldName := field.Name
+
+ jsonTag := field.Tag.Get("json")
+ if jsonTag != "" {
+ for _, tag := range strings.Split(jsonTag, ",") {
+ if tag != "omitempty" {
+ fieldName = tag
+ }
+ }
+ }
+
+ if field.Tag.Get("additionalProperties") == "true" {
+ additionalPropertiesField = &field
+ additionalPropertiesValue = &fieldVal
+ continue
+ }
+
+ // If we receive a value for a const field ignore it but mark it as unmarshaled
+ if field.Tag.Get("const") != "" {
+ if r, ok := unmarhsaled[fieldName]; ok {
+ val := string(r)
+ if strings.HasPrefix(val, `"`) && strings.HasSuffix(val, `"`) {
+ val = val[1 : len(val)-1]
+ }
+ if val != field.Tag.Get("const") {
+ return fmt.Errorf("const field %s does not match expected value %s", fieldName, field.Tag.Get("const"))
+ }
+
+ delete(unmarhsaled, fieldName)
+ }
+ } else if !field.IsExported() {
+ continue
+ }
+
+ value, ok := unmarhsaled[fieldName]
+ if !ok {
+ defaultTag := field.Tag.Get("default")
+ if defaultTag != "" {
+ value = handleDefaultConstValue(defaultTag, fieldVal.Interface(), field.Tag)
+ ok = true
+ }
+ } else {
+ delete(unmarhsaled, fieldName)
+ }
+
+ if ok {
+ if err := unmarshalValue(value, fieldVal, field.Tag, disallowUnknownFields); err != nil {
+ return err
+ }
+ }
+ }
+
+ keys := make([]string, 0, len(unmarhsaled))
+ for k := range unmarhsaled {
+ keys = append(keys, k)
+ }
+
+ if len(keys) > 0 {
+ if disallowUnknownFields && (additionalPropertiesField == nil || additionalPropertiesValue == nil) {
+ return fmt.Errorf("unknown fields: %v", keys)
+ }
+
+ if additionalPropertiesField != nil && additionalPropertiesValue != nil {
+ if additionalPropertiesValue.Kind() != reflect.Map {
+ return fmt.Errorf("additionalProperties must be a map")
+ }
+
+ additionalPropertiesValue.Set(reflect.MakeMap(additionalPropertiesField.Type))
+
+ for key, value := range unmarhsaled {
+ val := reflect.New(additionalPropertiesField.Type.Elem())
+
+ if err := unmarshalValue(value, val, additionalPropertiesField.Tag, disallowUnknownFields); err != nil {
+ return err
+ }
+
+ additionalPropertiesValue.SetMapIndex(reflect.ValueOf(key), val.Elem())
+ }
+ }
+ }
+ default:
+ return unmarshalValue(b, reflect.ValueOf(v), tag, disallowUnknownFields)
+ }
+
+ return nil
+}
+
+func marshalValue(v interface{}, tag reflect.StructTag) (json.RawMessage, error) {
+ constTag := tag.Get("const")
+ if constTag != "" {
+ return handleDefaultConstValue(constTag, v, tag), nil
+ }
+
+ if isNil(reflect.TypeOf(v), reflect.ValueOf(v)) {
+ defaultTag := tag.Get("default")
+ if defaultTag != "" {
+ return handleDefaultConstValue(defaultTag, v, tag), nil
+ }
+
+ return []byte("null"), nil
+ }
+
+ typ, val := dereferencePointers(reflect.TypeOf(v), reflect.ValueOf(v))
+ switch typ.Kind() {
+ case reflect.Map:
+ if isNil(typ, val) {
+ return []byte("null"), nil
+ }
+
+ out := map[string]json.RawMessage{}
+
+ for _, key := range val.MapKeys() {
+ itemVal := val.MapIndex(key)
+
+ if isNil(itemVal.Type(), itemVal) {
+ out[key.String()] = []byte("null")
+ continue
+ }
+
+ r, err := marshalValue(itemVal.Interface(), tag)
+ if err != nil {
+ return nil, err
+ }
+
+ out[key.String()] = r
+ }
+
+ return json.Marshal(out)
+ case reflect.Slice, reflect.Array:
+ if isNil(typ, val) {
+ return []byte("null"), nil
+ }
+
+ out := []json.RawMessage{}
+
+ for i := 0; i < val.Len(); i++ {
+ itemVal := val.Index(i)
+
+ if isNil(itemVal.Type(), itemVal) {
+ out = append(out, []byte("null"))
+ continue
+ }
+
+ r, err := marshalValue(itemVal.Interface(), tag)
+ if err != nil {
+ return nil, err
+ }
+
+ out = append(out, r)
+ }
+
+ return json.Marshal(out)
+ case reflect.Struct:
+ switch typ {
+ case reflect.TypeOf(time.Time{}):
+ return []byte(fmt.Sprintf(`"%s"`, val.Interface().(time.Time).Format(time.RFC3339Nano))), nil
+ case reflect.TypeOf(big.Int{}):
+ format := tag.Get("bigint")
+ if format == "string" {
+ b := val.Interface().(big.Int)
+ return []byte(fmt.Sprintf(`"%s"`, (&b).String())), nil
+ }
+ case reflect.TypeOf(decimal.Big{}):
+ format := tag.Get("decimal")
+ if format == "number" {
+ b := val.Interface().(decimal.Big)
+ f, ok := (&b).Float64()
+ if ok {
+ return []byte(b.String()), nil
+ }
+
+ return []byte(fmt.Sprintf(`%f`, f)), nil
+ }
+ }
+ }
+
+ return json.Marshal(v)
+}
+
+func handleDefaultConstValue(tagValue string, val interface{}, tag reflect.StructTag) json.RawMessage {
+ if tagValue == "null" {
+ return []byte("null")
+ }
+
+ typ := dereferenceTypePointer(reflect.TypeOf(val))
+ switch typ {
+ case reflect.TypeOf(time.Time{}):
+ return []byte(fmt.Sprintf(`"%s"`, tagValue))
+ case reflect.TypeOf(big.Int{}):
+ bigIntTag := tag.Get("bigint")
+ if bigIntTag == "string" {
+ return []byte(fmt.Sprintf(`"%s"`, tagValue))
+ }
+ case reflect.TypeOf(decimal.Big{}):
+ decimalTag := tag.Get("decimal")
+ if decimalTag != "number" {
+ return []byte(fmt.Sprintf(`"%s"`, tagValue))
+ }
+ case reflect.TypeOf(types.Date{}):
+ return []byte(fmt.Sprintf(`"%s"`, tagValue))
+ default:
+ if typ.Kind() == reflect.String {
+ return []byte(fmt.Sprintf(`"%s"`, tagValue))
+ }
+ }
+
+ return []byte(tagValue)
+}
+
+func unmarshalValue(value json.RawMessage, v reflect.Value, tag reflect.StructTag, disallowUnknownFields bool) error {
+ if bytes.Equal(value, []byte("null")) {
+ if v.CanAddr() {
+ return json.Unmarshal(value, v.Addr().Interface())
+ } else {
+ return json.Unmarshal(value, v.Interface())
+ }
+ }
+
+ typ := dereferenceTypePointer(v.Type())
+
+ switch typ.Kind() {
+ case reflect.Map:
+ if bytes.Equal(value, []byte("null")) || !isComplexValueType(dereferenceTypePointer(typ.Elem())) {
+ if v.CanAddr() {
+ return json.Unmarshal(value, v.Addr().Interface())
+ } else {
+ return json.Unmarshal(value, v.Interface())
+ }
+ }
+
+ var unmarhsaled map[string]json.RawMessage
+
+ if err := json.Unmarshal(value, &unmarhsaled); err != nil {
+ return err
+ }
+
+ m := reflect.MakeMap(typ)
+
+ for k, value := range unmarhsaled {
+ itemVal := reflect.New(typ.Elem())
+
+ if err := unmarshalValue(value, itemVal, tag, disallowUnknownFields); err != nil {
+ return err
+ }
+
+ m.SetMapIndex(reflect.ValueOf(k), itemVal.Elem())
+ }
+
+ v.Set(m)
+ return nil
+ case reflect.Slice, reflect.Array:
+ if bytes.Equal(value, []byte("null")) || !isComplexValueType(dereferenceTypePointer(typ.Elem())) {
+ if v.CanAddr() {
+ return json.Unmarshal(value, v.Addr().Interface())
+ } else {
+ return json.Unmarshal(value, v.Interface())
+ }
+ }
+
+ var unmarhsaled []json.RawMessage
+
+ if err := json.Unmarshal(value, &unmarhsaled); err != nil {
+ return err
+ }
+
+ arrVal := v
+
+ for _, value := range unmarhsaled {
+ itemVal := reflect.New(typ.Elem())
+
+ if err := unmarshalValue(value, itemVal, tag, disallowUnknownFields); err != nil {
+ return err
+ }
+
+ arrVal = reflect.Append(arrVal, itemVal.Elem())
+ }
+
+ v.Set(arrVal)
+ return nil
+ case reflect.Struct:
+ switch typ {
+ case reflect.TypeOf(time.Time{}):
+ var s string
+ if err := json.Unmarshal(value, &s); err != nil {
+ return err
+ }
+
+ t, err := time.Parse(time.RFC3339Nano, s)
+ if err != nil {
+ return fmt.Errorf("failed to parse string as time.Time: %w", err)
+ }
+
+ if v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ v.Set(reflect.New(typ))
+ }
+ v = v.Elem()
+ }
+
+ v.Set(reflect.ValueOf(t))
+ return nil
+ case reflect.TypeOf(big.Int{}):
+ var b *big.Int
+
+ format := tag.Get("bigint")
+ if format == "string" {
+ var s string
+ if err := json.Unmarshal(value, &s); err != nil {
+ return err
+ }
+
+ var ok bool
+ b, ok = new(big.Int).SetString(s, 10)
+ if !ok {
+ return fmt.Errorf("failed to parse string as big.Int")
+ }
+ } else {
+ if err := json.Unmarshal(value, &b); err != nil {
+ return err
+ }
+ }
+
+ if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+
+ v.Set(reflect.ValueOf(b))
+ return nil
+ case reflect.TypeOf(decimal.Big{}):
+ var d *decimal.Big
+ format := tag.Get("decimal")
+ if format == "number" {
+ var ok bool
+ d, ok = new(decimal.Big).SetString(string(value))
+ if !ok {
+ return fmt.Errorf("failed to parse number as decimal.Big")
+ }
+ } else {
+ if err := json.Unmarshal(value, &d); err != nil {
+ return err
+ }
+ }
+
+ if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+
+ v.Set(reflect.ValueOf(d))
+ return nil
+ case reflect.TypeOf(types.Date{}):
+ var s string
+
+ if err := json.Unmarshal(value, &s); err != nil {
+ return err
+ }
+
+ d, err := types.DateFromString(s)
+ if err != nil {
+ return fmt.Errorf("failed to parse string as types.Date: %w", err)
+ }
+
+ if v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ v.Set(reflect.New(typ))
+ }
+ v = v.Elem()
+ }
+
+ v.Set(reflect.ValueOf(d))
+ return nil
+ }
+ }
+
+ var val interface{}
+
+ if v.CanAddr() {
+ val = v.Addr().Interface()
+ } else {
+ val = v.Interface()
+ }
+
+ d := json.NewDecoder(bytes.NewReader(value))
+ if disallowUnknownFields {
+ d.DisallowUnknownFields()
+ }
+ return d.Decode(val)
+}
+
+func dereferencePointers(typ reflect.Type, val reflect.Value) (reflect.Type, reflect.Value) {
+ if typ.Kind() == reflect.Ptr {
+ typ = typ.Elem()
+ val = val.Elem()
+ } else {
+ return typ, val
+ }
+
+ return dereferencePointers(typ, val)
+}
+
+func dereferenceTypePointer(typ reflect.Type) reflect.Type {
+ if typ.Kind() == reflect.Ptr {
+ typ = typ.Elem()
+ } else {
+ return typ
+ }
+
+ return dereferenceTypePointer(typ)
+}
+
+func isComplexValueType(typ reflect.Type) bool {
+ switch typ.Kind() {
+ case reflect.Struct:
+ switch typ {
+ case reflect.TypeOf(time.Time{}):
+ fallthrough
+ case reflect.TypeOf(big.Int{}):
+ fallthrough
+ case reflect.TypeOf(decimal.Big{}):
+ fallthrough
+ case reflect.TypeOf(types.Date{}):
+ return true
+ }
+ }
+
+ return false
+}
+
+func isModelType(typ reflect.Type) bool {
+ if isComplexValueType(typ) {
+ return false
+ }
+
+ if typ.Kind() == reflect.Struct {
+ return true
+ }
+
+ return false
+}
diff --git a/internal/sdk/pkg/utils/pathparams.go b/internal/sdk/pkg/utils/pathparams.go
old mode 100755
new mode 100644
index 1e0dcac84..0489af80d
--- a/internal/sdk/pkg/utils/pathparams.go
+++ b/internal/sdk/pkg/utils/pathparams.go
@@ -5,9 +5,15 @@ package utils
import (
"context"
"fmt"
+ "math/big"
"net/url"
"reflect"
"strings"
+ "time"
+
+ "github.com/ericlagergren/decimal"
+
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
)
func GenerateURL(ctx context.Context, serverURL, path string, pathParams interface{}, globals map[string]map[string]map[string]interface{}) (string, error) {
@@ -95,31 +101,42 @@ func getSimplePathParams(ctx context.Context, parentName string, objType reflect
}
pathParams[parentName] = strings.Join(ppVals, ",")
case reflect.Struct:
- var ppVals []string
- for i := 0; i < objType.NumField(); i++ {
- fieldType := objType.Field(i)
- valType := objValue.Field(i)
-
- ppTag := parseParamTag(pathParamTagKey, fieldType, "simple", explode)
- if ppTag == nil {
- continue
- }
+ switch objValue.Interface().(type) {
+ case time.Time:
+ pathParams[parentName] = valToString(objValue.Interface())
+ case types.Date:
+ pathParams[parentName] = valToString(objValue.Interface())
+ case big.Int:
+ pathParams[parentName] = valToString(objValue.Interface())
+ case decimal.Big:
+ pathParams[parentName] = valToString(objValue.Interface())
+ default:
+ var ppVals []string
+ for i := 0; i < objType.NumField(); i++ {
+ fieldType := objType.Field(i)
+ valType := objValue.Field(i)
+
+ ppTag := parseParamTag(pathParamTagKey, fieldType, "simple", explode)
+ if ppTag == nil {
+ continue
+ }
- if isNil(fieldType.Type, valType) {
- continue
- }
+ if isNil(fieldType.Type, valType) {
+ continue
+ }
- if fieldType.Type.Kind() == reflect.Pointer {
- valType = valType.Elem()
- }
+ if fieldType.Type.Kind() == reflect.Pointer {
+ valType = valType.Elem()
+ }
- if explode {
- ppVals = append(ppVals, fmt.Sprintf("%s=%s", ppTag.ParamName, valToString(valType.Interface())))
- } else {
- ppVals = append(ppVals, fmt.Sprintf("%s,%s", ppTag.ParamName, valToString(valType.Interface())))
+ if explode {
+ ppVals = append(ppVals, fmt.Sprintf("%s=%s", ppTag.ParamName, valToString(valType.Interface())))
+ } else {
+ ppVals = append(ppVals, fmt.Sprintf("%s,%s", ppTag.ParamName, valToString(valType.Interface())))
+ }
}
+ pathParams[parentName] = strings.Join(ppVals, ",")
}
- pathParams[parentName] = strings.Join(ppVals, ",")
default:
pathParams[parentName] = valToString(objValue.Interface())
}
diff --git a/internal/sdk/pkg/utils/queryparams.go b/internal/sdk/pkg/utils/queryparams.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/utils/requestbody.go b/internal/sdk/pkg/utils/requestbody.go
old mode 100755
new mode 100644
index d9fcbdaab..950f39a31
--- a/internal/sdk/pkg/utils/requestbody.go
+++ b/internal/sdk/pkg/utils/requestbody.go
@@ -5,7 +5,6 @@ package utils
import (
"bytes"
"context"
- "encoding/json"
"fmt"
"io"
"mime/multipart"
@@ -26,12 +25,16 @@ var (
urlEncodedEncodingRegex = regexp.MustCompile(`application\/x-www-form-urlencoded.*`)
)
-func SerializeRequestBody(ctx context.Context, request interface{}, requestFieldName string, serializationMethod string) (io.Reader, string, error) {
+func SerializeRequestBody(ctx context.Context, request interface{}, nullable, optional bool, requestFieldName, serializationMethod, tag string) (io.Reader, string, error) {
requestStructType := reflect.TypeOf(request)
requestValType := reflect.ValueOf(request)
if isNil(requestStructType, requestValType) {
- return nil, "", nil
+ if !nullable && optional {
+ return nil, "", nil
+ }
+
+ return serializeContentType(requestFieldName, SerializationMethodToContentType[serializationMethod], requestValType, tag)
}
if requestStructType.Kind() == reflect.Pointer {
@@ -40,7 +43,7 @@ func SerializeRequestBody(ctx context.Context, request interface{}, requestField
}
if requestStructType.Kind() != reflect.Struct {
- return serializeContentType(requestFieldName, SerializationMethodToContentType[serializationMethod], requestValType)
+ return serializeContentType(requestFieldName, SerializationMethodToContentType[serializationMethod], requestValType, tag)
}
requestField, ok := requestStructType.FieldByName(requestFieldName)
@@ -51,23 +54,43 @@ func SerializeRequestBody(ctx context.Context, request interface{}, requestField
// request object (non-flattened)
requestVal := requestValType.FieldByName(requestFieldName)
if isNil(requestField.Type, requestVal) {
- return nil, "", nil
+ if !nullable && optional {
+ return nil, "", nil
+ }
+
+ return serializeContentType(requestFieldName, tag.MediaType, requestVal, string(requestField.Tag))
}
- return serializeContentType(requestFieldName, tag.MediaType, requestVal)
+ return serializeContentType(requestFieldName, tag.MediaType, requestVal, string(requestField.Tag))
}
}
// flattened request object
- return serializeContentType(requestFieldName, SerializationMethodToContentType[serializationMethod], requestValType)
+ return serializeContentType(requestFieldName, SerializationMethodToContentType[serializationMethod], reflect.ValueOf(request), tag)
}
-func serializeContentType(fieldName string, mediaType string, val reflect.Value) (*bytes.Buffer, string, error) {
+func serializeContentType(fieldName string, mediaType string, val reflect.Value, tag string) (*bytes.Buffer, string, error) {
buf := &bytes.Buffer{}
+ if isNil(val.Type(), val) {
+ // TODO: what does a null mean for other content types? Just returning an empty buffer for now
+ if jsonEncodingRegex.MatchString(mediaType) {
+ if _, err := buf.Write([]byte("null")); err != nil {
+ return nil, "", err
+ }
+ }
+
+ return buf, mediaType, nil
+ }
+
switch {
case jsonEncodingRegex.MatchString(mediaType):
- if err := json.NewEncoder(buf).Encode(val.Interface()); err != nil {
+ data, err := MarshalJSON(val.Interface(), reflect.StructTag(tag), true)
+ if err != nil {
+ return nil, "", err
+ }
+
+ if _, err := buf.Write(data); err != nil {
return nil, "", err
}
case multipartEncodingRegex.MatchString(mediaType):
@@ -137,7 +160,7 @@ func encodeMultipartFormData(w io.Writer, data interface{}) (string, error) {
writer.Close()
return "", err
}
- d, err := json.Marshal(valType.Interface())
+ d, err := MarshalJSON(valType.Interface(), field.Tag, true)
if err != nil {
writer.Close()
return "", err
@@ -242,7 +265,7 @@ func encodeFormData(fieldName string, w io.Writer, data interface{}) error {
tag := parseFormTag(field)
if tag.JSON {
- data, err := json.Marshal(valType.Interface())
+ data, err := MarshalJSON(valType.Interface(), field.Tag, true)
if err != nil {
return err
}
diff --git a/internal/sdk/pkg/utils/retries.go b/internal/sdk/pkg/utils/retries.go
old mode 100755
new mode 100644
diff --git a/internal/sdk/pkg/utils/security.go b/internal/sdk/pkg/utils/security.go
old mode 100755
new mode 100644
index 675cd5758..fa5eff7df
--- a/internal/sdk/pkg/utils/security.go
+++ b/internal/sdk/pkg/utils/security.go
@@ -3,6 +3,7 @@
package utils
import (
+ "context"
"encoding/base64"
"fmt"
"net/http"
@@ -26,51 +27,73 @@ type securityTag struct {
SubType string
}
-type SecurityClient struct {
- client HTTPClient
+type securityConfig struct {
headers map[string]string
queryParams map[string]string
}
-func newSecurityClient(client HTTPClient) *SecurityClient {
+type SecurityClient struct {
+ HTTPClient
+ security func(ctx context.Context) (interface{}, error)
+}
+
+func newSecurityClient(client HTTPClient, security func(ctx context.Context) (interface{}, error)) *SecurityClient {
return &SecurityClient{
- client: client,
- headers: make(map[string]string),
- queryParams: make(map[string]string),
+ HTTPClient: client,
+ security: security,
}
}
func (c *SecurityClient) Do(req *http.Request) (*http.Response, error) {
- for k, v := range c.headers {
+ securityCtx, err := c.security(req.Context())
+ if err != nil {
+ return nil, err
+ }
+
+ ctx := securityConfig{
+ headers: make(map[string]string),
+ queryParams: make(map[string]string),
+ }
+ parseSecurityStruct(&ctx, securityCtx)
+
+ for k, v := range ctx.headers {
req.Header.Set(k, v)
}
queryParams := req.URL.Query()
- for k, v := range c.queryParams {
- queryParams.Set(k, v)
+ for k, v := range ctx.queryParams {
+ queryParams.Add(k, v)
}
req.URL.RawQuery = queryParams.Encode()
- return c.client.Do(req)
+ return c.HTTPClient.Do(req)
}
-func ConfigureSecurityClient(c HTTPClient, security interface{}) *SecurityClient {
- client := parseSecurityStruct(c, security)
- if client != nil {
- return client
- }
+func ConfigureSecurityClient(c HTTPClient, security func(ctx context.Context) (interface{}, error)) *SecurityClient {
+ return newSecurityClient(c, security)
+}
- return newSecurityClient(c)
+func trueReflectValue(val reflect.Value) reflect.Value {
+ kind := val.Type().Kind()
+ for kind == reflect.Interface || kind == reflect.Ptr {
+ innerVal := val.Elem()
+ if !innerVal.IsValid() {
+ break
+ }
+ val = innerVal
+ kind = val.Type().Kind()
+ }
+ return val
}
-func parseSecurityStruct(c HTTPClient, security interface{}) *SecurityClient {
- securityStructType := reflect.TypeOf(security)
- securityValType := reflect.ValueOf(security)
+func parseSecurityStruct(c *securityConfig, security interface{}) {
+ securityValType := trueReflectValue(reflect.ValueOf(security))
+ securityStructType := securityValType.Type()
if isNil(securityStructType, securityValType) {
- return nil
+ return
}
if securityStructType.Kind() == reflect.Ptr {
@@ -78,8 +101,6 @@ func parseSecurityStruct(c HTTPClient, security interface{}) *SecurityClient {
securityValType = securityValType.Elem()
}
- client := newSecurityClient(c)
-
for i := 0; i < securityStructType.NumField(); i++ {
fieldType := securityStructType.Field(i)
valType := securityValType.Field(i)
@@ -97,66 +118,51 @@ func parseSecurityStruct(c HTTPClient, security interface{}) *SecurityClient {
secTag := parseSecurityTag(fieldType)
if secTag != nil {
if secTag.Option {
- return parseSecurityOption(c, valType.Interface())
+ handleSecurityOption(c, valType.Interface())
} else if secTag.Scheme {
// Special case for basic auth which could be a flattened struct
if secTag.SubType == "basic" && kind != reflect.Struct {
- parseSecurityScheme(client, secTag, security)
- return client
+ parseSecurityScheme(c, secTag, security)
} else {
- parseSecurityScheme(client, secTag, valType.Interface())
+ parseSecurityScheme(c, secTag, valType.Interface())
}
}
}
}
-
- return client
}
-func parseSecurityOption(c HTTPClient, option interface{}) *SecurityClient {
- optionStructType := reflect.TypeOf(option)
- optionValType := reflect.ValueOf(option)
+func handleSecurityOption(c *securityConfig, option interface{}) error {
+ optionValType := trueReflectValue(reflect.ValueOf(option))
+ optionStructType := optionValType.Type()
if isNil(optionStructType, optionValType) {
return nil
}
- if optionStructType.Kind() == reflect.Ptr {
- optionStructType = optionStructType.Elem()
- optionValType = optionValType.Elem()
- }
-
- client := newSecurityClient(c)
-
for i := 0; i < optionStructType.NumField(); i++ {
fieldType := optionStructType.Field(i)
valType := optionValType.Field(i)
secTag := parseSecurityTag(fieldType)
if secTag != nil && secTag.Scheme {
- parseSecurityScheme(client, secTag, valType.Interface())
+ parseSecurityScheme(c, secTag, valType.Interface())
}
}
- return client
+ return nil
}
-func parseSecurityScheme(client *SecurityClient, schemeTag *securityTag, scheme interface{}) {
- schemeType := reflect.TypeOf(scheme)
- schemeVal := reflect.ValueOf(scheme)
+func parseSecurityScheme(client *securityConfig, schemeTag *securityTag, scheme interface{}) {
+ schemeVal := trueReflectValue(reflect.ValueOf(scheme))
+ schemeType := schemeVal.Type()
if isNil(schemeType, schemeVal) {
return
}
- if schemeType.Kind() == reflect.Ptr {
- schemeType = schemeType.Elem()
- schemeVal = schemeVal.Elem()
- }
-
if schemeType.Kind() == reflect.Struct {
if schemeTag.Type == "http" && schemeTag.SubType == "basic" {
- parseBasicAuthScheme(client, schemeVal.Interface())
+ handleBasicAuthScheme(client, schemeVal.Interface())
return
}
@@ -184,7 +190,7 @@ func parseSecurityScheme(client *SecurityClient, schemeTag *securityTag, scheme
}
}
-func parseSecuritySchemeValue(client *SecurityClient, schemeTag *securityTag, secTag *securityTag, val interface{}) {
+func parseSecuritySchemeValue(client *securityConfig, schemeTag *securityTag, secTag *securityTag, val interface{}) {
switch schemeTag.Type {
case "apiKey":
switch schemeTag.SubType {
@@ -221,7 +227,7 @@ func prefixBearer(authHeaderValue string) string {
return fmt.Sprintf("Bearer %s", authHeaderValue)
}
-func parseBasicAuthScheme(client *SecurityClient, scheme interface{}) {
+func handleBasicAuthScheme(client *securityConfig, scheme interface{}) {
schemeStructType := reflect.TypeOf(scheme)
schemeValType := reflect.ValueOf(scheme)
diff --git a/internal/sdk/pkg/utils/utils.go b/internal/sdk/pkg/utils/utils.go
old mode 100755
new mode 100644
index 7accc1699..8c8161a08
--- a/internal/sdk/pkg/utils/utils.go
+++ b/internal/sdk/pkg/utils/utils.go
@@ -3,7 +3,6 @@
package utils
import (
- "encoding/json"
"fmt"
"io"
"math/big"
@@ -12,7 +11,7 @@ import (
"strings"
"time"
- "airbyte/internal/sdk/pkg/types"
+ "github.com/ericlagergren/decimal"
)
const (
@@ -32,12 +31,12 @@ var (
}
)
-func UnmarshalJsonFromResponseBody(body io.Reader, out interface{}) error {
+func UnmarshalJsonFromResponseBody(body io.Reader, out interface{}, tag string) error {
data, err := io.ReadAll(body)
if err != nil {
return fmt.Errorf("error reading response body: %w", err)
}
- if err := json.Unmarshal(data, &out); err != nil {
+ if err := UnmarshalJSON(data, out, reflect.StructTag(tag), true, false); err != nil {
return fmt.Errorf("error unmarshalling json response body: %w", err)
}
@@ -82,7 +81,6 @@ func parseStructTag(tagKey string, field reflect.StructField) map[string]string
parts = append(parts, "true")
case 2:
// key=value option
- break
default:
// invalid option
continue
@@ -127,10 +125,10 @@ func valToString(val interface{}) string {
switch v := val.(type) {
case time.Time:
return v.Format(time.RFC3339Nano)
- case types.BigInt:
- return v.String()
case big.Int:
return v.String()
+ case decimal.Big:
+ return v.String()
default:
return fmt.Sprintf("%v", v)
}
diff --git a/internal/sdk/sdk.go b/internal/sdk/sdk.go
old mode 100755
new mode 100644
index 3f1332596..f699103c8
--- a/internal/sdk/sdk.go
+++ b/internal/sdk/sdk.go
@@ -3,9 +3,10 @@
package sdk
import (
- "airbyte/internal/sdk/pkg/models/shared"
- "airbyte/internal/sdk/pkg/utils"
+ "context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"net/http"
"time"
)
@@ -42,13 +43,15 @@ func Float64(f float64) *float64 { return &f }
type sdkConfiguration struct {
DefaultClient HTTPClient
SecurityClient HTTPClient
- Security *shared.Security
+ Security func(context.Context) (interface{}, error)
ServerURL string
ServerIndex int
Language string
OpenAPIDocVersion string
SDKVersion string
GenVersion string
+ UserAgent string
+ RetryConfig *utils.RetryConfig
}
func (c *sdkConfiguration) GetServerDetails() (string, map[string]string) {
@@ -61,12 +64,12 @@ func (c *sdkConfiguration) GetServerDetails() (string, map[string]string) {
// SDK - airbyte-api: Programatically control Airbyte Cloud, OSS & Enterprise.
type SDK struct {
- Connections *connections
- Destinations *destinations
- Jobs *jobs
- Sources *sources
- Streams *streams
- Workspaces *workspaces
+ Connections *Connections
+ Destinations *Destinations
+ Jobs *Jobs
+ Sources *Sources
+ Streams *Streams
+ Workspaces *Workspaces
sdkConfiguration sdkConfiguration
}
@@ -109,10 +112,31 @@ func WithClient(client HTTPClient) SDKOption {
}
}
+func withSecurity(security interface{}) func(context.Context) (interface{}, error) {
+ return func(context.Context) (interface{}, error) {
+ return &security, nil
+ }
+}
+
// WithSecurity configures the SDK to use the provided security details
func WithSecurity(security shared.Security) SDKOption {
return func(sdk *SDK) {
- sdk.sdkConfiguration.Security = &security
+ sdk.sdkConfiguration.Security = withSecurity(security)
+ }
+}
+
+// WithSecuritySource configures the SDK to invoke the Security Source function on each method call to determine authentication
+func WithSecuritySource(security func(context.Context) (shared.Security, error)) SDKOption {
+ return func(sdk *SDK) {
+ sdk.sdkConfiguration.Security = func(ctx context.Context) (interface{}, error) {
+ return security(ctx)
+ }
+ }
+}
+
+func WithRetryConfig(retryConfig utils.RetryConfig) SDKOption {
+ return func(sdk *SDK) {
+ sdk.sdkConfiguration.RetryConfig = &retryConfig
}
}
@@ -120,10 +144,11 @@ func WithSecurity(security shared.Security) SDKOption {
func New(opts ...SDKOption) *SDK {
sdk := &SDK{
sdkConfiguration: sdkConfiguration{
- Language: "terraform",
+ Language: "go",
OpenAPIDocVersion: "1.0.0",
- SDKVersion: "0.3.4",
- GenVersion: "2.108.3",
+ SDKVersion: "0.3.5",
+ GenVersion: "2.195.2",
+ UserAgent: "speakeasy-sdk/go 0.3.5 2.195.2 1.0.0 airbyte",
},
}
for _, opt := range opts {
diff --git a/internal/sdk/sources.go b/internal/sdk/sources.go
old mode 100755
new mode 100644
index 632fff7ce..1a3641ba9
--- a/internal/sdk/sources.go
+++ b/internal/sdk/sources.go
@@ -3,38 +3,38 @@
package sdk
import (
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/sdk/pkg/models/shared"
- "airbyte/internal/sdk/pkg/utils"
"bytes"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/sdkerrors"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"io"
"net/http"
"strings"
)
-type sources struct {
+type Sources struct {
sdkConfiguration sdkConfiguration
}
-func newSources(sdkConfig sdkConfiguration) *sources {
- return &sources{
+func newSources(sdkConfig sdkConfiguration) *Sources {
+ return &Sources{
sdkConfiguration: sdkConfig,
}
}
// CreateSource - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSource(ctx context.Context, request shared.SourceCreateRequest) (*operations.CreateSourceResponse, error) {
+func (s *Sources) CreateSource(ctx context.Context, request *shared.SourceCreateRequest) (*operations.CreateSourceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43,7 +43,7 @@ func (s *sources) CreateSource(ctx context.Context, request shared.SourceCreateR
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -76,12 +76,14 @@ func (s *sources) CreateSource(ctx context.Context, request shared.SourceCreateR
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -93,15 +95,14 @@ func (s *sources) CreateSource(ctx context.Context, request shared.SourceCreateR
// CreateSourceAha - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAha(ctx context.Context, request shared.SourceAhaCreateRequest) (*operations.CreateSourceAhaResponse, error) {
+func (s *Sources) CreateSourceAha(ctx context.Context, request *shared.SourceAhaCreateRequest) (*operations.CreateSourceAhaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Aha"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -110,7 +111,7 @@ func (s *sources) CreateSourceAha(ctx context.Context, request shared.SourceAhaC
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -143,12 +144,14 @@ func (s *sources) CreateSourceAha(ctx context.Context, request shared.SourceAhaC
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -160,15 +163,14 @@ func (s *sources) CreateSourceAha(ctx context.Context, request shared.SourceAhaC
// CreateSourceAircall - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAircall(ctx context.Context, request shared.SourceAircallCreateRequest) (*operations.CreateSourceAircallResponse, error) {
+func (s *Sources) CreateSourceAircall(ctx context.Context, request *shared.SourceAircallCreateRequest) (*operations.CreateSourceAircallResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Aircall"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -177,7 +179,7 @@ func (s *sources) CreateSourceAircall(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -210,12 +212,14 @@ func (s *sources) CreateSourceAircall(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -227,15 +231,14 @@ func (s *sources) CreateSourceAircall(ctx context.Context, request shared.Source
// CreateSourceAirtable - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAirtable(ctx context.Context, request shared.SourceAirtableCreateRequest) (*operations.CreateSourceAirtableResponse, error) {
+func (s *Sources) CreateSourceAirtable(ctx context.Context, request *shared.SourceAirtableCreateRequest) (*operations.CreateSourceAirtableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Airtable"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -244,7 +247,7 @@ func (s *sources) CreateSourceAirtable(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -277,12 +280,14 @@ func (s *sources) CreateSourceAirtable(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -294,15 +299,14 @@ func (s *sources) CreateSourceAirtable(ctx context.Context, request shared.Sourc
// CreateSourceAlloydb - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAlloydb(ctx context.Context, request shared.SourceAlloydbCreateRequest) (*operations.CreateSourceAlloydbResponse, error) {
+func (s *Sources) CreateSourceAlloydb(ctx context.Context, request *shared.SourceAlloydbCreateRequest) (*operations.CreateSourceAlloydbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Alloydb"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -311,7 +315,7 @@ func (s *sources) CreateSourceAlloydb(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -344,12 +348,14 @@ func (s *sources) CreateSourceAlloydb(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -361,15 +367,14 @@ func (s *sources) CreateSourceAlloydb(ctx context.Context, request shared.Source
// CreateSourceAmazonAds - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAmazonAds(ctx context.Context, request shared.SourceAmazonAdsCreateRequest) (*operations.CreateSourceAmazonAdsResponse, error) {
+func (s *Sources) CreateSourceAmazonAds(ctx context.Context, request *shared.SourceAmazonAdsCreateRequest) (*operations.CreateSourceAmazonAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#AmazonAds"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -378,7 +383,7 @@ func (s *sources) CreateSourceAmazonAds(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -411,12 +416,14 @@ func (s *sources) CreateSourceAmazonAds(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -428,15 +435,14 @@ func (s *sources) CreateSourceAmazonAds(ctx context.Context, request shared.Sour
// CreateSourceAmazonSellerPartner - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAmazonSellerPartner(ctx context.Context, request shared.SourceAmazonSellerPartnerCreateRequest) (*operations.CreateSourceAmazonSellerPartnerResponse, error) {
+func (s *Sources) CreateSourceAmazonSellerPartner(ctx context.Context, request *shared.SourceAmazonSellerPartnerCreateRequest) (*operations.CreateSourceAmazonSellerPartnerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#AmazonSellerPartner"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -445,7 +451,7 @@ func (s *sources) CreateSourceAmazonSellerPartner(ctx context.Context, request s
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -478,12 +484,14 @@ func (s *sources) CreateSourceAmazonSellerPartner(ctx context.Context, request s
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -495,15 +503,14 @@ func (s *sources) CreateSourceAmazonSellerPartner(ctx context.Context, request s
// CreateSourceAmazonSqs - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAmazonSqs(ctx context.Context, request shared.SourceAmazonSqsCreateRequest) (*operations.CreateSourceAmazonSqsResponse, error) {
+func (s *Sources) CreateSourceAmazonSqs(ctx context.Context, request *shared.SourceAmazonSqsCreateRequest) (*operations.CreateSourceAmazonSqsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#AmazonSqs"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -512,7 +519,7 @@ func (s *sources) CreateSourceAmazonSqs(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -545,12 +552,14 @@ func (s *sources) CreateSourceAmazonSqs(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -562,15 +571,14 @@ func (s *sources) CreateSourceAmazonSqs(ctx context.Context, request shared.Sour
// CreateSourceAmplitude - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAmplitude(ctx context.Context, request shared.SourceAmplitudeCreateRequest) (*operations.CreateSourceAmplitudeResponse, error) {
+func (s *Sources) CreateSourceAmplitude(ctx context.Context, request *shared.SourceAmplitudeCreateRequest) (*operations.CreateSourceAmplitudeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Amplitude"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -579,7 +587,7 @@ func (s *sources) CreateSourceAmplitude(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -612,12 +620,14 @@ func (s *sources) CreateSourceAmplitude(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -629,15 +639,14 @@ func (s *sources) CreateSourceAmplitude(ctx context.Context, request shared.Sour
// CreateSourceApifyDataset - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceApifyDataset(ctx context.Context, request shared.SourceApifyDatasetCreateRequest) (*operations.CreateSourceApifyDatasetResponse, error) {
+func (s *Sources) CreateSourceApifyDataset(ctx context.Context, request *shared.SourceApifyDatasetCreateRequest) (*operations.CreateSourceApifyDatasetResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#ApifyDataset"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -646,7 +655,7 @@ func (s *sources) CreateSourceApifyDataset(ctx context.Context, request shared.S
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -679,12 +688,14 @@ func (s *sources) CreateSourceApifyDataset(ctx context.Context, request shared.S
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -696,15 +707,14 @@ func (s *sources) CreateSourceApifyDataset(ctx context.Context, request shared.S
// CreateSourceAppfollow - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAppfollow(ctx context.Context, request shared.SourceAppfollowCreateRequest) (*operations.CreateSourceAppfollowResponse, error) {
+func (s *Sources) CreateSourceAppfollow(ctx context.Context, request *shared.SourceAppfollowCreateRequest) (*operations.CreateSourceAppfollowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Appfollow"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -713,7 +723,7 @@ func (s *sources) CreateSourceAppfollow(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -746,12 +756,14 @@ func (s *sources) CreateSourceAppfollow(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -763,15 +775,14 @@ func (s *sources) CreateSourceAppfollow(ctx context.Context, request shared.Sour
// CreateSourceAsana - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAsana(ctx context.Context, request shared.SourceAsanaCreateRequest) (*operations.CreateSourceAsanaResponse, error) {
+func (s *Sources) CreateSourceAsana(ctx context.Context, request *shared.SourceAsanaCreateRequest) (*operations.CreateSourceAsanaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Asana"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -780,7 +791,7 @@ func (s *sources) CreateSourceAsana(ctx context.Context, request shared.SourceAs
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -813,12 +824,14 @@ func (s *sources) CreateSourceAsana(ctx context.Context, request shared.SourceAs
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -830,15 +843,14 @@ func (s *sources) CreateSourceAsana(ctx context.Context, request shared.SourceAs
// CreateSourceAuth0 - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAuth0(ctx context.Context, request shared.SourceAuth0CreateRequest) (*operations.CreateSourceAuth0Response, error) {
+func (s *Sources) CreateSourceAuth0(ctx context.Context, request *shared.SourceAuth0CreateRequest) (*operations.CreateSourceAuth0Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Auth0"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -847,7 +859,7 @@ func (s *sources) CreateSourceAuth0(ctx context.Context, request shared.SourceAu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -880,12 +892,14 @@ func (s *sources) CreateSourceAuth0(ctx context.Context, request shared.SourceAu
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -897,15 +911,14 @@ func (s *sources) CreateSourceAuth0(ctx context.Context, request shared.SourceAu
// CreateSourceAwsCloudtrail - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAwsCloudtrail(ctx context.Context, request shared.SourceAwsCloudtrailCreateRequest) (*operations.CreateSourceAwsCloudtrailResponse, error) {
+func (s *Sources) CreateSourceAwsCloudtrail(ctx context.Context, request *shared.SourceAwsCloudtrailCreateRequest) (*operations.CreateSourceAwsCloudtrailResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#AwsCloudtrail"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -914,7 +927,7 @@ func (s *sources) CreateSourceAwsCloudtrail(ctx context.Context, request shared.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -947,12 +960,14 @@ func (s *sources) CreateSourceAwsCloudtrail(ctx context.Context, request shared.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -964,15 +979,14 @@ func (s *sources) CreateSourceAwsCloudtrail(ctx context.Context, request shared.
// CreateSourceAzureBlobStorage - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAzureBlobStorage(ctx context.Context, request shared.SourceAzureBlobStorageCreateRequest) (*operations.CreateSourceAzureBlobStorageResponse, error) {
+func (s *Sources) CreateSourceAzureBlobStorage(ctx context.Context, request *shared.SourceAzureBlobStorageCreateRequest) (*operations.CreateSourceAzureBlobStorageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#AzureBlobStorage"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -981,7 +995,7 @@ func (s *sources) CreateSourceAzureBlobStorage(ctx context.Context, request shar
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1014,12 +1028,14 @@ func (s *sources) CreateSourceAzureBlobStorage(ctx context.Context, request shar
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1031,15 +1047,14 @@ func (s *sources) CreateSourceAzureBlobStorage(ctx context.Context, request shar
// CreateSourceAzureTable - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceAzureTable(ctx context.Context, request shared.SourceAzureTableCreateRequest) (*operations.CreateSourceAzureTableResponse, error) {
+func (s *Sources) CreateSourceAzureTable(ctx context.Context, request *shared.SourceAzureTableCreateRequest) (*operations.CreateSourceAzureTableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#AzureTable"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1048,7 +1063,7 @@ func (s *sources) CreateSourceAzureTable(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1081,12 +1096,14 @@ func (s *sources) CreateSourceAzureTable(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1098,15 +1115,14 @@ func (s *sources) CreateSourceAzureTable(ctx context.Context, request shared.Sou
// CreateSourceBambooHr - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceBambooHr(ctx context.Context, request shared.SourceBambooHrCreateRequest) (*operations.CreateSourceBambooHrResponse, error) {
+func (s *Sources) CreateSourceBambooHr(ctx context.Context, request *shared.SourceBambooHrCreateRequest) (*operations.CreateSourceBambooHrResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#BambooHr"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1115,7 +1131,7 @@ func (s *sources) CreateSourceBambooHr(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1148,12 +1164,14 @@ func (s *sources) CreateSourceBambooHr(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1163,17 +1181,16 @@ func (s *sources) CreateSourceBambooHr(ctx context.Context, request shared.Sourc
return res, nil
}
-// CreateSourceBigcommerce - Create a source
+// CreateSourceBigquery - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceBigcommerce(ctx context.Context, request shared.SourceBigcommerceCreateRequest) (*operations.CreateSourceBigcommerceResponse, error) {
+func (s *Sources) CreateSourceBigquery(ctx context.Context, request *shared.SourceBigqueryCreateRequest) (*operations.CreateSourceBigqueryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#Bigcommerce"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#Bigquery"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1182,7 +1199,7 @@ func (s *sources) CreateSourceBigcommerce(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1206,7 +1223,7 @@ func (s *sources) CreateSourceBigcommerce(ctx context.Context, request shared.So
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceBigcommerceResponse{
+ res := &operations.CreateSourceBigqueryResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -1215,12 +1232,14 @@ func (s *sources) CreateSourceBigcommerce(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1230,17 +1249,16 @@ func (s *sources) CreateSourceBigcommerce(ctx context.Context, request shared.So
return res, nil
}
-// CreateSourceBigquery - Create a source
+// CreateSourceBingAds - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceBigquery(ctx context.Context, request shared.SourceBigqueryCreateRequest) (*operations.CreateSourceBigqueryResponse, error) {
+func (s *Sources) CreateSourceBingAds(ctx context.Context, request *shared.SourceBingAdsCreateRequest) (*operations.CreateSourceBingAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#Bigquery"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#BingAds"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1249,7 +1267,7 @@ func (s *sources) CreateSourceBigquery(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1273,7 +1291,7 @@ func (s *sources) CreateSourceBigquery(ctx context.Context, request shared.Sourc
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceBigqueryResponse{
+ res := &operations.CreateSourceBingAdsResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -1282,12 +1300,14 @@ func (s *sources) CreateSourceBigquery(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1297,17 +1317,16 @@ func (s *sources) CreateSourceBigquery(ctx context.Context, request shared.Sourc
return res, nil
}
-// CreateSourceBingAds - Create a source
+// CreateSourceBraintree - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceBingAds(ctx context.Context, request shared.SourceBingAdsCreateRequest) (*operations.CreateSourceBingAdsResponse, error) {
+func (s *Sources) CreateSourceBraintree(ctx context.Context, request *shared.SourceBraintreeCreateRequest) (*operations.CreateSourceBraintreeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#BingAds"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#Braintree"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1316,7 +1335,7 @@ func (s *sources) CreateSourceBingAds(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1340,7 +1359,7 @@ func (s *sources) CreateSourceBingAds(ctx context.Context, request shared.Source
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceBingAdsResponse{
+ res := &operations.CreateSourceBraintreeResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -1349,12 +1368,14 @@ func (s *sources) CreateSourceBingAds(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1364,17 +1385,16 @@ func (s *sources) CreateSourceBingAds(ctx context.Context, request shared.Source
return res, nil
}
-// CreateSourceBraintree - Create a source
+// CreateSourceBraze - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceBraintree(ctx context.Context, request shared.SourceBraintreeCreateRequest) (*operations.CreateSourceBraintreeResponse, error) {
+func (s *Sources) CreateSourceBraze(ctx context.Context, request *shared.SourceBrazeCreateRequest) (*operations.CreateSourceBrazeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#Braintree"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#Braze"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1383,7 +1403,7 @@ func (s *sources) CreateSourceBraintree(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1407,7 +1427,7 @@ func (s *sources) CreateSourceBraintree(ctx context.Context, request shared.Sour
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceBraintreeResponse{
+ res := &operations.CreateSourceBrazeResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -1416,12 +1436,14 @@ func (s *sources) CreateSourceBraintree(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1431,17 +1453,16 @@ func (s *sources) CreateSourceBraintree(ctx context.Context, request shared.Sour
return res, nil
}
-// CreateSourceBraze - Create a source
+// CreateSourceCart - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceBraze(ctx context.Context, request shared.SourceBrazeCreateRequest) (*operations.CreateSourceBrazeResponse, error) {
+func (s *Sources) CreateSourceCart(ctx context.Context, request *shared.SourceCartCreateRequest) (*operations.CreateSourceCartResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#Braze"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#Cart"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1450,7 +1471,7 @@ func (s *sources) CreateSourceBraze(ctx context.Context, request shared.SourceBr
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1474,7 +1495,7 @@ func (s *sources) CreateSourceBraze(ctx context.Context, request shared.SourceBr
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceBrazeResponse{
+ res := &operations.CreateSourceCartResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -1483,12 +1504,14 @@ func (s *sources) CreateSourceBraze(ctx context.Context, request shared.SourceBr
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1500,15 +1523,14 @@ func (s *sources) CreateSourceBraze(ctx context.Context, request shared.SourceBr
// CreateSourceChargebee - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceChargebee(ctx context.Context, request shared.SourceChargebeeCreateRequest) (*operations.CreateSourceChargebeeResponse, error) {
+func (s *Sources) CreateSourceChargebee(ctx context.Context, request *shared.SourceChargebeeCreateRequest) (*operations.CreateSourceChargebeeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Chargebee"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1517,7 +1539,7 @@ func (s *sources) CreateSourceChargebee(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1550,12 +1572,14 @@ func (s *sources) CreateSourceChargebee(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1567,15 +1591,14 @@ func (s *sources) CreateSourceChargebee(ctx context.Context, request shared.Sour
// CreateSourceChartmogul - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceChartmogul(ctx context.Context, request shared.SourceChartmogulCreateRequest) (*operations.CreateSourceChartmogulResponse, error) {
+func (s *Sources) CreateSourceChartmogul(ctx context.Context, request *shared.SourceChartmogulCreateRequest) (*operations.CreateSourceChartmogulResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Chartmogul"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1584,7 +1607,7 @@ func (s *sources) CreateSourceChartmogul(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1617,12 +1640,14 @@ func (s *sources) CreateSourceChartmogul(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1634,15 +1659,14 @@ func (s *sources) CreateSourceChartmogul(ctx context.Context, request shared.Sou
// CreateSourceClickhouse - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceClickhouse(ctx context.Context, request shared.SourceClickhouseCreateRequest) (*operations.CreateSourceClickhouseResponse, error) {
+func (s *Sources) CreateSourceClickhouse(ctx context.Context, request *shared.SourceClickhouseCreateRequest) (*operations.CreateSourceClickhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Clickhouse"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1651,7 +1675,7 @@ func (s *sources) CreateSourceClickhouse(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1684,12 +1708,14 @@ func (s *sources) CreateSourceClickhouse(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1701,15 +1727,14 @@ func (s *sources) CreateSourceClickhouse(ctx context.Context, request shared.Sou
// CreateSourceClickupAPI - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceClickupAPI(ctx context.Context, request shared.SourceClickupAPICreateRequest) (*operations.CreateSourceClickupAPIResponse, error) {
+func (s *Sources) CreateSourceClickupAPI(ctx context.Context, request *shared.SourceClickupAPICreateRequest) (*operations.CreateSourceClickupAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#ClickupApi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1718,7 +1743,7 @@ func (s *sources) CreateSourceClickupAPI(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1751,12 +1776,14 @@ func (s *sources) CreateSourceClickupAPI(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1768,15 +1795,14 @@ func (s *sources) CreateSourceClickupAPI(ctx context.Context, request shared.Sou
// CreateSourceClockify - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceClockify(ctx context.Context, request shared.SourceClockifyCreateRequest) (*operations.CreateSourceClockifyResponse, error) {
+func (s *Sources) CreateSourceClockify(ctx context.Context, request *shared.SourceClockifyCreateRequest) (*operations.CreateSourceClockifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Clockify"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1785,7 +1811,7 @@ func (s *sources) CreateSourceClockify(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1818,12 +1844,14 @@ func (s *sources) CreateSourceClockify(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1835,15 +1863,14 @@ func (s *sources) CreateSourceClockify(ctx context.Context, request shared.Sourc
// CreateSourceCloseCom - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceCloseCom(ctx context.Context, request shared.SourceCloseComCreateRequest) (*operations.CreateSourceCloseComResponse, error) {
+func (s *Sources) CreateSourceCloseCom(ctx context.Context, request *shared.SourceCloseComCreateRequest) (*operations.CreateSourceCloseComResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#CloseCom"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1852,7 +1879,7 @@ func (s *sources) CreateSourceCloseCom(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1885,12 +1912,14 @@ func (s *sources) CreateSourceCloseCom(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1902,15 +1931,14 @@ func (s *sources) CreateSourceCloseCom(ctx context.Context, request shared.Sourc
// CreateSourceCoda - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceCoda(ctx context.Context, request shared.SourceCodaCreateRequest) (*operations.CreateSourceCodaResponse, error) {
+func (s *Sources) CreateSourceCoda(ctx context.Context, request *shared.SourceCodaCreateRequest) (*operations.CreateSourceCodaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Coda"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1919,7 +1947,7 @@ func (s *sources) CreateSourceCoda(ctx context.Context, request shared.SourceCod
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -1952,12 +1980,14 @@ func (s *sources) CreateSourceCoda(ctx context.Context, request shared.SourceCod
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -1969,15 +1999,14 @@ func (s *sources) CreateSourceCoda(ctx context.Context, request shared.SourceCod
// CreateSourceCoinAPI - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceCoinAPI(ctx context.Context, request shared.SourceCoinAPICreateRequest) (*operations.CreateSourceCoinAPIResponse, error) {
+func (s *Sources) CreateSourceCoinAPI(ctx context.Context, request *shared.SourceCoinAPICreateRequest) (*operations.CreateSourceCoinAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#CoinApi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -1986,7 +2015,7 @@ func (s *sources) CreateSourceCoinAPI(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2019,12 +2048,14 @@ func (s *sources) CreateSourceCoinAPI(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2036,15 +2067,14 @@ func (s *sources) CreateSourceCoinAPI(ctx context.Context, request shared.Source
// CreateSourceCoinmarketcap - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceCoinmarketcap(ctx context.Context, request shared.SourceCoinmarketcapCreateRequest) (*operations.CreateSourceCoinmarketcapResponse, error) {
+func (s *Sources) CreateSourceCoinmarketcap(ctx context.Context, request *shared.SourceCoinmarketcapCreateRequest) (*operations.CreateSourceCoinmarketcapResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Coinmarketcap"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2053,7 +2083,7 @@ func (s *sources) CreateSourceCoinmarketcap(ctx context.Context, request shared.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2086,12 +2116,14 @@ func (s *sources) CreateSourceCoinmarketcap(ctx context.Context, request shared.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2103,15 +2135,14 @@ func (s *sources) CreateSourceCoinmarketcap(ctx context.Context, request shared.
// CreateSourceConfigcat - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceConfigcat(ctx context.Context, request shared.SourceConfigcatCreateRequest) (*operations.CreateSourceConfigcatResponse, error) {
+func (s *Sources) CreateSourceConfigcat(ctx context.Context, request *shared.SourceConfigcatCreateRequest) (*operations.CreateSourceConfigcatResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Configcat"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2120,7 +2151,7 @@ func (s *sources) CreateSourceConfigcat(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2153,12 +2184,14 @@ func (s *sources) CreateSourceConfigcat(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2170,15 +2203,14 @@ func (s *sources) CreateSourceConfigcat(ctx context.Context, request shared.Sour
// CreateSourceConfluence - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceConfluence(ctx context.Context, request shared.SourceConfluenceCreateRequest) (*operations.CreateSourceConfluenceResponse, error) {
+func (s *Sources) CreateSourceConfluence(ctx context.Context, request *shared.SourceConfluenceCreateRequest) (*operations.CreateSourceConfluenceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Confluence"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2187,7 +2219,7 @@ func (s *sources) CreateSourceConfluence(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2220,12 +2252,14 @@ func (s *sources) CreateSourceConfluence(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2237,15 +2271,14 @@ func (s *sources) CreateSourceConfluence(ctx context.Context, request shared.Sou
// CreateSourceConvex - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceConvex(ctx context.Context, request shared.SourceConvexCreateRequest) (*operations.CreateSourceConvexResponse, error) {
+func (s *Sources) CreateSourceConvex(ctx context.Context, request *shared.SourceConvexCreateRequest) (*operations.CreateSourceConvexResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Convex"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2254,7 +2287,7 @@ func (s *sources) CreateSourceConvex(ctx context.Context, request shared.SourceC
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2287,12 +2320,14 @@ func (s *sources) CreateSourceConvex(ctx context.Context, request shared.SourceC
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2304,15 +2339,14 @@ func (s *sources) CreateSourceConvex(ctx context.Context, request shared.SourceC
// CreateSourceDatascope - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceDatascope(ctx context.Context, request shared.SourceDatascopeCreateRequest) (*operations.CreateSourceDatascopeResponse, error) {
+func (s *Sources) CreateSourceDatascope(ctx context.Context, request *shared.SourceDatascopeCreateRequest) (*operations.CreateSourceDatascopeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Datascope"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2321,7 +2355,7 @@ func (s *sources) CreateSourceDatascope(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2354,12 +2388,14 @@ func (s *sources) CreateSourceDatascope(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2371,15 +2407,14 @@ func (s *sources) CreateSourceDatascope(ctx context.Context, request shared.Sour
// CreateSourceDelighted - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceDelighted(ctx context.Context, request shared.SourceDelightedCreateRequest) (*operations.CreateSourceDelightedResponse, error) {
+func (s *Sources) CreateSourceDelighted(ctx context.Context, request *shared.SourceDelightedCreateRequest) (*operations.CreateSourceDelightedResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Delighted"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2388,7 +2423,7 @@ func (s *sources) CreateSourceDelighted(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2421,12 +2456,14 @@ func (s *sources) CreateSourceDelighted(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2438,15 +2475,14 @@ func (s *sources) CreateSourceDelighted(ctx context.Context, request shared.Sour
// CreateSourceDixa - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceDixa(ctx context.Context, request shared.SourceDixaCreateRequest) (*operations.CreateSourceDixaResponse, error) {
+func (s *Sources) CreateSourceDixa(ctx context.Context, request *shared.SourceDixaCreateRequest) (*operations.CreateSourceDixaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Dixa"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2455,7 +2491,7 @@ func (s *sources) CreateSourceDixa(ctx context.Context, request shared.SourceDix
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2488,12 +2524,14 @@ func (s *sources) CreateSourceDixa(ctx context.Context, request shared.SourceDix
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2505,15 +2543,14 @@ func (s *sources) CreateSourceDixa(ctx context.Context, request shared.SourceDix
// CreateSourceDockerhub - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceDockerhub(ctx context.Context, request shared.SourceDockerhubCreateRequest) (*operations.CreateSourceDockerhubResponse, error) {
+func (s *Sources) CreateSourceDockerhub(ctx context.Context, request *shared.SourceDockerhubCreateRequest) (*operations.CreateSourceDockerhubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Dockerhub"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2522,7 +2559,7 @@ func (s *sources) CreateSourceDockerhub(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2555,12 +2592,14 @@ func (s *sources) CreateSourceDockerhub(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2572,15 +2611,14 @@ func (s *sources) CreateSourceDockerhub(ctx context.Context, request shared.Sour
// CreateSourceDremio - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceDremio(ctx context.Context, request shared.SourceDremioCreateRequest) (*operations.CreateSourceDremioResponse, error) {
+func (s *Sources) CreateSourceDremio(ctx context.Context, request *shared.SourceDremioCreateRequest) (*operations.CreateSourceDremioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Dremio"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2589,7 +2627,7 @@ func (s *sources) CreateSourceDremio(ctx context.Context, request shared.SourceD
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2622,12 +2660,14 @@ func (s *sources) CreateSourceDremio(ctx context.Context, request shared.SourceD
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2639,15 +2679,14 @@ func (s *sources) CreateSourceDremio(ctx context.Context, request shared.SourceD
// CreateSourceDynamodb - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceDynamodb(ctx context.Context, request shared.SourceDynamodbCreateRequest) (*operations.CreateSourceDynamodbResponse, error) {
+func (s *Sources) CreateSourceDynamodb(ctx context.Context, request *shared.SourceDynamodbCreateRequest) (*operations.CreateSourceDynamodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Dynamodb"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2656,7 +2695,7 @@ func (s *sources) CreateSourceDynamodb(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2689,79 +2728,14 @@ func (s *sources) CreateSourceDynamodb(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
- }
- case httpRes.StatusCode == 400:
- fallthrough
- case httpRes.StatusCode == 403:
- }
-
- return res, nil
-}
-
-// CreateSourceE2eTestCloud - Create a source
-// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceE2eTestCloud(ctx context.Context, request shared.SourceE2eTestCloudCreateRequest) (*operations.CreateSourceE2eTestCloudResponse, error) {
- baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#E2eTestCloud"
-
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
- if err != nil {
- return nil, fmt.Errorf("error serializing request body: %w", err)
- }
-
- debugBody := bytes.NewBuffer([]byte{})
- debugReader := io.TeeReader(bodyReader, debugBody)
-
- req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader)
- if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
- }
- req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
-
- req.Header.Set("Content-Type", reqContentType)
-
- client := s.sdkConfiguration.SecurityClient
-
- httpRes, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("error sending request: %w", err)
- }
- if httpRes == nil {
- return nil, fmt.Errorf("error sending request: no response")
- }
-
- rawBody, err := io.ReadAll(httpRes.Body)
- if err != nil {
- return nil, fmt.Errorf("error reading response body: %w", err)
- }
- httpRes.Request.Body = io.NopCloser(debugBody)
- httpRes.Body.Close()
- httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
-
- contentType := httpRes.Header.Get("Content-Type")
-
- res := &operations.CreateSourceE2eTestCloudResponse{
- StatusCode: httpRes.StatusCode,
- ContentType: contentType,
- RawResponse: httpRes,
- }
- switch {
- case httpRes.StatusCode == 200:
- switch {
- case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
- }
-
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2773,15 +2747,14 @@ func (s *sources) CreateSourceE2eTestCloud(ctx context.Context, request shared.S
// CreateSourceEmailoctopus - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceEmailoctopus(ctx context.Context, request shared.SourceEmailoctopusCreateRequest) (*operations.CreateSourceEmailoctopusResponse, error) {
+func (s *Sources) CreateSourceEmailoctopus(ctx context.Context, request *shared.SourceEmailoctopusCreateRequest) (*operations.CreateSourceEmailoctopusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Emailoctopus"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2790,7 +2763,7 @@ func (s *sources) CreateSourceEmailoctopus(ctx context.Context, request shared.S
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2823,12 +2796,14 @@ func (s *sources) CreateSourceEmailoctopus(ctx context.Context, request shared.S
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2840,15 +2815,14 @@ func (s *sources) CreateSourceEmailoctopus(ctx context.Context, request shared.S
// CreateSourceExchangeRates - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceExchangeRates(ctx context.Context, request shared.SourceExchangeRatesCreateRequest) (*operations.CreateSourceExchangeRatesResponse, error) {
+func (s *Sources) CreateSourceExchangeRates(ctx context.Context, request *shared.SourceExchangeRatesCreateRequest) (*operations.CreateSourceExchangeRatesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#ExchangeRates"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2857,7 +2831,7 @@ func (s *sources) CreateSourceExchangeRates(ctx context.Context, request shared.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2890,12 +2864,14 @@ func (s *sources) CreateSourceExchangeRates(ctx context.Context, request shared.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2907,15 +2883,14 @@ func (s *sources) CreateSourceExchangeRates(ctx context.Context, request shared.
// CreateSourceFacebookMarketing - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceFacebookMarketing(ctx context.Context, request shared.SourceFacebookMarketingCreateRequest) (*operations.CreateSourceFacebookMarketingResponse, error) {
+func (s *Sources) CreateSourceFacebookMarketing(ctx context.Context, request *shared.SourceFacebookMarketingCreateRequest) (*operations.CreateSourceFacebookMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#FacebookMarketing"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2924,7 +2899,7 @@ func (s *sources) CreateSourceFacebookMarketing(ctx context.Context, request sha
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -2957,12 +2932,14 @@ func (s *sources) CreateSourceFacebookMarketing(ctx context.Context, request sha
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -2974,15 +2951,14 @@ func (s *sources) CreateSourceFacebookMarketing(ctx context.Context, request sha
// CreateSourceFacebookPages - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceFacebookPages(ctx context.Context, request shared.SourceFacebookPagesCreateRequest) (*operations.CreateSourceFacebookPagesResponse, error) {
+func (s *Sources) CreateSourceFacebookPages(ctx context.Context, request *shared.SourceFacebookPagesCreateRequest) (*operations.CreateSourceFacebookPagesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#FacebookPages"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -2991,7 +2967,7 @@ func (s *sources) CreateSourceFacebookPages(ctx context.Context, request shared.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3024,12 +3000,14 @@ func (s *sources) CreateSourceFacebookPages(ctx context.Context, request shared.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3041,15 +3019,14 @@ func (s *sources) CreateSourceFacebookPages(ctx context.Context, request shared.
// CreateSourceFaker - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceFaker(ctx context.Context, request shared.SourceFakerCreateRequest) (*operations.CreateSourceFakerResponse, error) {
+func (s *Sources) CreateSourceFaker(ctx context.Context, request *shared.SourceFakerCreateRequest) (*operations.CreateSourceFakerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Faker"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3058,7 +3035,7 @@ func (s *sources) CreateSourceFaker(ctx context.Context, request shared.SourceFa
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3091,12 +3068,14 @@ func (s *sources) CreateSourceFaker(ctx context.Context, request shared.SourceFa
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3108,15 +3087,14 @@ func (s *sources) CreateSourceFaker(ctx context.Context, request shared.SourceFa
// CreateSourceFauna - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceFauna(ctx context.Context, request shared.SourceFaunaCreateRequest) (*operations.CreateSourceFaunaResponse, error) {
+func (s *Sources) CreateSourceFauna(ctx context.Context, request *shared.SourceFaunaCreateRequest) (*operations.CreateSourceFaunaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Fauna"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3125,7 +3103,7 @@ func (s *sources) CreateSourceFauna(ctx context.Context, request shared.SourceFa
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3158,12 +3136,14 @@ func (s *sources) CreateSourceFauna(ctx context.Context, request shared.SourceFa
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3173,17 +3153,16 @@ func (s *sources) CreateSourceFauna(ctx context.Context, request shared.SourceFa
return res, nil
}
-// CreateSourceFileSecure - Create a source
+// CreateSourceFile - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceFileSecure(ctx context.Context, request shared.SourceFileSecureCreateRequest) (*operations.CreateSourceFileSecureResponse, error) {
+func (s *Sources) CreateSourceFile(ctx context.Context, request *shared.SourceFileCreateRequest) (*operations.CreateSourceFileResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#FileSecure"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#File"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3192,7 +3171,7 @@ func (s *sources) CreateSourceFileSecure(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3216,7 +3195,7 @@ func (s *sources) CreateSourceFileSecure(ctx context.Context, request shared.Sou
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceFileSecureResponse{
+ res := &operations.CreateSourceFileResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -3225,12 +3204,14 @@ func (s *sources) CreateSourceFileSecure(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3242,15 +3223,14 @@ func (s *sources) CreateSourceFileSecure(ctx context.Context, request shared.Sou
// CreateSourceFirebolt - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceFirebolt(ctx context.Context, request shared.SourceFireboltCreateRequest) (*operations.CreateSourceFireboltResponse, error) {
+func (s *Sources) CreateSourceFirebolt(ctx context.Context, request *shared.SourceFireboltCreateRequest) (*operations.CreateSourceFireboltResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Firebolt"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3259,7 +3239,7 @@ func (s *sources) CreateSourceFirebolt(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3292,12 +3272,14 @@ func (s *sources) CreateSourceFirebolt(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3309,15 +3291,14 @@ func (s *sources) CreateSourceFirebolt(ctx context.Context, request shared.Sourc
// CreateSourceFreshcaller - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceFreshcaller(ctx context.Context, request shared.SourceFreshcallerCreateRequest) (*operations.CreateSourceFreshcallerResponse, error) {
+func (s *Sources) CreateSourceFreshcaller(ctx context.Context, request *shared.SourceFreshcallerCreateRequest) (*operations.CreateSourceFreshcallerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Freshcaller"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3326,7 +3307,7 @@ func (s *sources) CreateSourceFreshcaller(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3359,12 +3340,14 @@ func (s *sources) CreateSourceFreshcaller(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3376,15 +3359,14 @@ func (s *sources) CreateSourceFreshcaller(ctx context.Context, request shared.So
// CreateSourceFreshdesk - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceFreshdesk(ctx context.Context, request shared.SourceFreshdeskCreateRequest) (*operations.CreateSourceFreshdeskResponse, error) {
+func (s *Sources) CreateSourceFreshdesk(ctx context.Context, request *shared.SourceFreshdeskCreateRequest) (*operations.CreateSourceFreshdeskResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Freshdesk"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3393,7 +3375,7 @@ func (s *sources) CreateSourceFreshdesk(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3426,12 +3408,14 @@ func (s *sources) CreateSourceFreshdesk(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3443,15 +3427,14 @@ func (s *sources) CreateSourceFreshdesk(ctx context.Context, request shared.Sour
// CreateSourceFreshsales - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceFreshsales(ctx context.Context, request shared.SourceFreshsalesCreateRequest) (*operations.CreateSourceFreshsalesResponse, error) {
+func (s *Sources) CreateSourceFreshsales(ctx context.Context, request *shared.SourceFreshsalesCreateRequest) (*operations.CreateSourceFreshsalesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Freshsales"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3460,7 +3443,7 @@ func (s *sources) CreateSourceFreshsales(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3493,12 +3476,14 @@ func (s *sources) CreateSourceFreshsales(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3510,15 +3495,14 @@ func (s *sources) CreateSourceFreshsales(ctx context.Context, request shared.Sou
// CreateSourceGainsightPx - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGainsightPx(ctx context.Context, request shared.SourceGainsightPxCreateRequest) (*operations.CreateSourceGainsightPxResponse, error) {
+func (s *Sources) CreateSourceGainsightPx(ctx context.Context, request *shared.SourceGainsightPxCreateRequest) (*operations.CreateSourceGainsightPxResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#GainsightPx"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3527,7 +3511,7 @@ func (s *sources) CreateSourceGainsightPx(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3560,12 +3544,14 @@ func (s *sources) CreateSourceGainsightPx(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3577,15 +3563,14 @@ func (s *sources) CreateSourceGainsightPx(ctx context.Context, request shared.So
// CreateSourceGcs - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGcs(ctx context.Context, request shared.SourceGcsCreateRequest) (*operations.CreateSourceGcsResponse, error) {
+func (s *Sources) CreateSourceGcs(ctx context.Context, request *shared.SourceGcsCreateRequest) (*operations.CreateSourceGcsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Gcs"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3594,7 +3579,7 @@ func (s *sources) CreateSourceGcs(ctx context.Context, request shared.SourceGcsC
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3627,12 +3612,14 @@ func (s *sources) CreateSourceGcs(ctx context.Context, request shared.SourceGcsC
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3644,15 +3631,14 @@ func (s *sources) CreateSourceGcs(ctx context.Context, request shared.SourceGcsC
// CreateSourceGetlago - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGetlago(ctx context.Context, request shared.SourceGetlagoCreateRequest) (*operations.CreateSourceGetlagoResponse, error) {
+func (s *Sources) CreateSourceGetlago(ctx context.Context, request *shared.SourceGetlagoCreateRequest) (*operations.CreateSourceGetlagoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Getlago"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3661,7 +3647,7 @@ func (s *sources) CreateSourceGetlago(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3694,12 +3680,14 @@ func (s *sources) CreateSourceGetlago(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3711,15 +3699,14 @@ func (s *sources) CreateSourceGetlago(ctx context.Context, request shared.Source
// CreateSourceGithub - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGithub(ctx context.Context, request shared.SourceGithubCreateRequest) (*operations.CreateSourceGithubResponse, error) {
+func (s *Sources) CreateSourceGithub(ctx context.Context, request *shared.SourceGithubCreateRequest) (*operations.CreateSourceGithubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Github"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3728,7 +3715,7 @@ func (s *sources) CreateSourceGithub(ctx context.Context, request shared.SourceG
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3761,12 +3748,14 @@ func (s *sources) CreateSourceGithub(ctx context.Context, request shared.SourceG
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3778,15 +3767,14 @@ func (s *sources) CreateSourceGithub(ctx context.Context, request shared.SourceG
// CreateSourceGitlab - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGitlab(ctx context.Context, request shared.SourceGitlabCreateRequest) (*operations.CreateSourceGitlabResponse, error) {
+func (s *Sources) CreateSourceGitlab(ctx context.Context, request *shared.SourceGitlabCreateRequest) (*operations.CreateSourceGitlabResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Gitlab"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3795,7 +3783,7 @@ func (s *sources) CreateSourceGitlab(ctx context.Context, request shared.SourceG
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3828,12 +3816,14 @@ func (s *sources) CreateSourceGitlab(ctx context.Context, request shared.SourceG
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3845,15 +3835,14 @@ func (s *sources) CreateSourceGitlab(ctx context.Context, request shared.SourceG
// CreateSourceGlassfrog - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGlassfrog(ctx context.Context, request shared.SourceGlassfrogCreateRequest) (*operations.CreateSourceGlassfrogResponse, error) {
+func (s *Sources) CreateSourceGlassfrog(ctx context.Context, request *shared.SourceGlassfrogCreateRequest) (*operations.CreateSourceGlassfrogResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Glassfrog"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3862,7 +3851,7 @@ func (s *sources) CreateSourceGlassfrog(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3895,12 +3884,14 @@ func (s *sources) CreateSourceGlassfrog(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3912,15 +3903,14 @@ func (s *sources) CreateSourceGlassfrog(ctx context.Context, request shared.Sour
// CreateSourceGnews - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGnews(ctx context.Context, request shared.SourceGnewsCreateRequest) (*operations.CreateSourceGnewsResponse, error) {
+func (s *Sources) CreateSourceGnews(ctx context.Context, request *shared.SourceGnewsCreateRequest) (*operations.CreateSourceGnewsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Gnews"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3929,7 +3919,7 @@ func (s *sources) CreateSourceGnews(ctx context.Context, request shared.SourceGn
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -3962,12 +3952,14 @@ func (s *sources) CreateSourceGnews(ctx context.Context, request shared.SourceGn
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -3979,15 +3971,14 @@ func (s *sources) CreateSourceGnews(ctx context.Context, request shared.SourceGn
// CreateSourceGoogleAds - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGoogleAds(ctx context.Context, request shared.SourceGoogleAdsCreateRequest) (*operations.CreateSourceGoogleAdsResponse, error) {
+func (s *Sources) CreateSourceGoogleAds(ctx context.Context, request *shared.SourceGoogleAdsCreateRequest) (*operations.CreateSourceGoogleAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleAds"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -3996,7 +3987,7 @@ func (s *sources) CreateSourceGoogleAds(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4029,12 +4020,14 @@ func (s *sources) CreateSourceGoogleAds(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4046,15 +4039,14 @@ func (s *sources) CreateSourceGoogleAds(ctx context.Context, request shared.Sour
// CreateSourceGoogleAnalyticsDataAPI - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGoogleAnalyticsDataAPI(ctx context.Context, request shared.SourceGoogleAnalyticsDataAPICreateRequest) (*operations.CreateSourceGoogleAnalyticsDataAPIResponse, error) {
+func (s *Sources) CreateSourceGoogleAnalyticsDataAPI(ctx context.Context, request *shared.SourceGoogleAnalyticsDataAPICreateRequest) (*operations.CreateSourceGoogleAnalyticsDataAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleAnalyticsDataApi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4063,7 +4055,7 @@ func (s *sources) CreateSourceGoogleAnalyticsDataAPI(ctx context.Context, reques
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4096,12 +4088,14 @@ func (s *sources) CreateSourceGoogleAnalyticsDataAPI(ctx context.Context, reques
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4111,17 +4105,16 @@ func (s *sources) CreateSourceGoogleAnalyticsDataAPI(ctx context.Context, reques
return res, nil
}
-// CreateSourceGoogleAnalyticsV4 - Create a source
+// CreateSourceGoogleDirectory - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGoogleAnalyticsV4(ctx context.Context, request shared.SourceGoogleAnalyticsV4CreateRequest) (*operations.CreateSourceGoogleAnalyticsV4Response, error) {
+func (s *Sources) CreateSourceGoogleDirectory(ctx context.Context, request *shared.SourceGoogleDirectoryCreateRequest) (*operations.CreateSourceGoogleDirectoryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleAnalyticsV4"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleDirectory"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4130,7 +4123,7 @@ func (s *sources) CreateSourceGoogleAnalyticsV4(ctx context.Context, request sha
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4154,7 +4147,7 @@ func (s *sources) CreateSourceGoogleAnalyticsV4(ctx context.Context, request sha
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceGoogleAnalyticsV4Response{
+ res := &operations.CreateSourceGoogleDirectoryResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4163,12 +4156,14 @@ func (s *sources) CreateSourceGoogleAnalyticsV4(ctx context.Context, request sha
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4178,17 +4173,16 @@ func (s *sources) CreateSourceGoogleAnalyticsV4(ctx context.Context, request sha
return res, nil
}
-// CreateSourceGoogleDirectory - Create a source
+// CreateSourceGoogleDrive - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGoogleDirectory(ctx context.Context, request shared.SourceGoogleDirectoryCreateRequest) (*operations.CreateSourceGoogleDirectoryResponse, error) {
+func (s *Sources) CreateSourceGoogleDrive(ctx context.Context, request *shared.SourceGoogleDriveCreateRequest) (*operations.CreateSourceGoogleDriveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleDirectory"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleDrive"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4197,7 +4191,7 @@ func (s *sources) CreateSourceGoogleDirectory(ctx context.Context, request share
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4221,7 +4215,7 @@ func (s *sources) CreateSourceGoogleDirectory(ctx context.Context, request share
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceGoogleDirectoryResponse{
+ res := &operations.CreateSourceGoogleDriveResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -4230,12 +4224,14 @@ func (s *sources) CreateSourceGoogleDirectory(ctx context.Context, request share
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4247,15 +4243,14 @@ func (s *sources) CreateSourceGoogleDirectory(ctx context.Context, request share
// CreateSourceGooglePagespeedInsights - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGooglePagespeedInsights(ctx context.Context, request shared.SourceGooglePagespeedInsightsCreateRequest) (*operations.CreateSourceGooglePagespeedInsightsResponse, error) {
+func (s *Sources) CreateSourceGooglePagespeedInsights(ctx context.Context, request *shared.SourceGooglePagespeedInsightsCreateRequest) (*operations.CreateSourceGooglePagespeedInsightsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#GooglePagespeedInsights"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4264,7 +4259,7 @@ func (s *sources) CreateSourceGooglePagespeedInsights(ctx context.Context, reque
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4297,12 +4292,14 @@ func (s *sources) CreateSourceGooglePagespeedInsights(ctx context.Context, reque
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4314,15 +4311,14 @@ func (s *sources) CreateSourceGooglePagespeedInsights(ctx context.Context, reque
// CreateSourceGoogleSearchConsole - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGoogleSearchConsole(ctx context.Context, request shared.SourceGoogleSearchConsoleCreateRequest) (*operations.CreateSourceGoogleSearchConsoleResponse, error) {
+func (s *Sources) CreateSourceGoogleSearchConsole(ctx context.Context, request *shared.SourceGoogleSearchConsoleCreateRequest) (*operations.CreateSourceGoogleSearchConsoleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleSearchConsole"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4331,7 +4327,7 @@ func (s *sources) CreateSourceGoogleSearchConsole(ctx context.Context, request s
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4364,12 +4360,14 @@ func (s *sources) CreateSourceGoogleSearchConsole(ctx context.Context, request s
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4381,15 +4379,14 @@ func (s *sources) CreateSourceGoogleSearchConsole(ctx context.Context, request s
// CreateSourceGoogleSheets - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGoogleSheets(ctx context.Context, request shared.SourceGoogleSheetsCreateRequest) (*operations.CreateSourceGoogleSheetsResponse, error) {
+func (s *Sources) CreateSourceGoogleSheets(ctx context.Context, request *shared.SourceGoogleSheetsCreateRequest) (*operations.CreateSourceGoogleSheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleSheets"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4398,7 +4395,7 @@ func (s *sources) CreateSourceGoogleSheets(ctx context.Context, request shared.S
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4431,12 +4428,14 @@ func (s *sources) CreateSourceGoogleSheets(ctx context.Context, request shared.S
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4448,15 +4447,14 @@ func (s *sources) CreateSourceGoogleSheets(ctx context.Context, request shared.S
// CreateSourceGoogleWebfonts - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGoogleWebfonts(ctx context.Context, request shared.SourceGoogleWebfontsCreateRequest) (*operations.CreateSourceGoogleWebfontsResponse, error) {
+func (s *Sources) CreateSourceGoogleWebfonts(ctx context.Context, request *shared.SourceGoogleWebfontsCreateRequest) (*operations.CreateSourceGoogleWebfontsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleWebfonts"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4465,7 +4463,7 @@ func (s *sources) CreateSourceGoogleWebfonts(ctx context.Context, request shared
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4498,12 +4496,14 @@ func (s *sources) CreateSourceGoogleWebfonts(ctx context.Context, request shared
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4515,15 +4515,14 @@ func (s *sources) CreateSourceGoogleWebfonts(ctx context.Context, request shared
// CreateSourceGoogleWorkspaceAdminReports - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGoogleWorkspaceAdminReports(ctx context.Context, request shared.SourceGoogleWorkspaceAdminReportsCreateRequest) (*operations.CreateSourceGoogleWorkspaceAdminReportsResponse, error) {
+func (s *Sources) CreateSourceGoogleWorkspaceAdminReports(ctx context.Context, request *shared.SourceGoogleWorkspaceAdminReportsCreateRequest) (*operations.CreateSourceGoogleWorkspaceAdminReportsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#GoogleWorkspaceAdminReports"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4532,7 +4531,7 @@ func (s *sources) CreateSourceGoogleWorkspaceAdminReports(ctx context.Context, r
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4565,12 +4564,14 @@ func (s *sources) CreateSourceGoogleWorkspaceAdminReports(ctx context.Context, r
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4582,15 +4583,14 @@ func (s *sources) CreateSourceGoogleWorkspaceAdminReports(ctx context.Context, r
// CreateSourceGreenhouse - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGreenhouse(ctx context.Context, request shared.SourceGreenhouseCreateRequest) (*operations.CreateSourceGreenhouseResponse, error) {
+func (s *Sources) CreateSourceGreenhouse(ctx context.Context, request *shared.SourceGreenhouseCreateRequest) (*operations.CreateSourceGreenhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Greenhouse"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4599,7 +4599,7 @@ func (s *sources) CreateSourceGreenhouse(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4632,12 +4632,14 @@ func (s *sources) CreateSourceGreenhouse(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4649,15 +4651,14 @@ func (s *sources) CreateSourceGreenhouse(ctx context.Context, request shared.Sou
// CreateSourceGridly - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceGridly(ctx context.Context, request shared.SourceGridlyCreateRequest) (*operations.CreateSourceGridlyResponse, error) {
+func (s *Sources) CreateSourceGridly(ctx context.Context, request *shared.SourceGridlyCreateRequest) (*operations.CreateSourceGridlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Gridly"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4666,7 +4667,7 @@ func (s *sources) CreateSourceGridly(ctx context.Context, request shared.SourceG
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4699,12 +4700,14 @@ func (s *sources) CreateSourceGridly(ctx context.Context, request shared.SourceG
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4716,15 +4719,14 @@ func (s *sources) CreateSourceGridly(ctx context.Context, request shared.SourceG
// CreateSourceHarvest - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceHarvest(ctx context.Context, request shared.SourceHarvestCreateRequest) (*operations.CreateSourceHarvestResponse, error) {
+func (s *Sources) CreateSourceHarvest(ctx context.Context, request *shared.SourceHarvestCreateRequest) (*operations.CreateSourceHarvestResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Harvest"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4733,7 +4735,7 @@ func (s *sources) CreateSourceHarvest(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4766,12 +4768,14 @@ func (s *sources) CreateSourceHarvest(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4783,15 +4787,14 @@ func (s *sources) CreateSourceHarvest(ctx context.Context, request shared.Source
// CreateSourceHubplanner - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceHubplanner(ctx context.Context, request shared.SourceHubplannerCreateRequest) (*operations.CreateSourceHubplannerResponse, error) {
+func (s *Sources) CreateSourceHubplanner(ctx context.Context, request *shared.SourceHubplannerCreateRequest) (*operations.CreateSourceHubplannerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Hubplanner"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4800,7 +4803,7 @@ func (s *sources) CreateSourceHubplanner(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4833,12 +4836,14 @@ func (s *sources) CreateSourceHubplanner(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4850,15 +4855,14 @@ func (s *sources) CreateSourceHubplanner(ctx context.Context, request shared.Sou
// CreateSourceHubspot - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceHubspot(ctx context.Context, request shared.SourceHubspotCreateRequest) (*operations.CreateSourceHubspotResponse, error) {
+func (s *Sources) CreateSourceHubspot(ctx context.Context, request *shared.SourceHubspotCreateRequest) (*operations.CreateSourceHubspotResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Hubspot"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4867,7 +4871,7 @@ func (s *sources) CreateSourceHubspot(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4900,12 +4904,14 @@ func (s *sources) CreateSourceHubspot(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4917,15 +4923,14 @@ func (s *sources) CreateSourceHubspot(ctx context.Context, request shared.Source
// CreateSourceInsightly - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceInsightly(ctx context.Context, request shared.SourceInsightlyCreateRequest) (*operations.CreateSourceInsightlyResponse, error) {
+func (s *Sources) CreateSourceInsightly(ctx context.Context, request *shared.SourceInsightlyCreateRequest) (*operations.CreateSourceInsightlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Insightly"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -4934,7 +4939,7 @@ func (s *sources) CreateSourceInsightly(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -4967,12 +4972,14 @@ func (s *sources) CreateSourceInsightly(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -4984,15 +4991,14 @@ func (s *sources) CreateSourceInsightly(ctx context.Context, request shared.Sour
// CreateSourceInstagram - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceInstagram(ctx context.Context, request shared.SourceInstagramCreateRequest) (*operations.CreateSourceInstagramResponse, error) {
+func (s *Sources) CreateSourceInstagram(ctx context.Context, request *shared.SourceInstagramCreateRequest) (*operations.CreateSourceInstagramResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Instagram"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5001,7 +5007,7 @@ func (s *sources) CreateSourceInstagram(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5034,12 +5040,14 @@ func (s *sources) CreateSourceInstagram(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5051,15 +5059,14 @@ func (s *sources) CreateSourceInstagram(ctx context.Context, request shared.Sour
// CreateSourceInstatus - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceInstatus(ctx context.Context, request shared.SourceInstatusCreateRequest) (*operations.CreateSourceInstatusResponse, error) {
+func (s *Sources) CreateSourceInstatus(ctx context.Context, request *shared.SourceInstatusCreateRequest) (*operations.CreateSourceInstatusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Instatus"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5068,7 +5075,7 @@ func (s *sources) CreateSourceInstatus(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5101,12 +5108,14 @@ func (s *sources) CreateSourceInstatus(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5118,15 +5127,14 @@ func (s *sources) CreateSourceInstatus(ctx context.Context, request shared.Sourc
// CreateSourceIntercom - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceIntercom(ctx context.Context, request shared.SourceIntercomCreateRequest) (*operations.CreateSourceIntercomResponse, error) {
+func (s *Sources) CreateSourceIntercom(ctx context.Context, request *shared.SourceIntercomCreateRequest) (*operations.CreateSourceIntercomResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Intercom"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5135,7 +5143,7 @@ func (s *sources) CreateSourceIntercom(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5168,12 +5176,14 @@ func (s *sources) CreateSourceIntercom(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5185,15 +5195,14 @@ func (s *sources) CreateSourceIntercom(ctx context.Context, request shared.Sourc
// CreateSourceIp2whois - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceIp2whois(ctx context.Context, request shared.SourceIp2whoisCreateRequest) (*operations.CreateSourceIp2whoisResponse, error) {
+func (s *Sources) CreateSourceIp2whois(ctx context.Context, request *shared.SourceIp2whoisCreateRequest) (*operations.CreateSourceIp2whoisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Ip2whois"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5202,7 +5211,7 @@ func (s *sources) CreateSourceIp2whois(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5235,12 +5244,14 @@ func (s *sources) CreateSourceIp2whois(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5252,15 +5263,14 @@ func (s *sources) CreateSourceIp2whois(ctx context.Context, request shared.Sourc
// CreateSourceIterable - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceIterable(ctx context.Context, request shared.SourceIterableCreateRequest) (*operations.CreateSourceIterableResponse, error) {
+func (s *Sources) CreateSourceIterable(ctx context.Context, request *shared.SourceIterableCreateRequest) (*operations.CreateSourceIterableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Iterable"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5269,7 +5279,7 @@ func (s *sources) CreateSourceIterable(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5302,12 +5312,14 @@ func (s *sources) CreateSourceIterable(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5319,15 +5331,14 @@ func (s *sources) CreateSourceIterable(ctx context.Context, request shared.Sourc
// CreateSourceJira - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceJira(ctx context.Context, request shared.SourceJiraCreateRequest) (*operations.CreateSourceJiraResponse, error) {
+func (s *Sources) CreateSourceJira(ctx context.Context, request *shared.SourceJiraCreateRequest) (*operations.CreateSourceJiraResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Jira"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5336,7 +5347,7 @@ func (s *sources) CreateSourceJira(ctx context.Context, request shared.SourceJir
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5369,12 +5380,14 @@ func (s *sources) CreateSourceJira(ctx context.Context, request shared.SourceJir
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5386,15 +5399,14 @@ func (s *sources) CreateSourceJira(ctx context.Context, request shared.SourceJir
// CreateSourceK6Cloud - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceK6Cloud(ctx context.Context, request shared.SourceK6CloudCreateRequest) (*operations.CreateSourceK6CloudResponse, error) {
+func (s *Sources) CreateSourceK6Cloud(ctx context.Context, request *shared.SourceK6CloudCreateRequest) (*operations.CreateSourceK6CloudResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#K6Cloud"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5403,7 +5415,7 @@ func (s *sources) CreateSourceK6Cloud(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5436,12 +5448,14 @@ func (s *sources) CreateSourceK6Cloud(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5453,15 +5467,14 @@ func (s *sources) CreateSourceK6Cloud(ctx context.Context, request shared.Source
// CreateSourceKlarna - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceKlarna(ctx context.Context, request shared.SourceKlarnaCreateRequest) (*operations.CreateSourceKlarnaResponse, error) {
+func (s *Sources) CreateSourceKlarna(ctx context.Context, request *shared.SourceKlarnaCreateRequest) (*operations.CreateSourceKlarnaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Klarna"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5470,7 +5483,7 @@ func (s *sources) CreateSourceKlarna(ctx context.Context, request shared.SourceK
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5503,12 +5516,14 @@ func (s *sources) CreateSourceKlarna(ctx context.Context, request shared.SourceK
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5520,15 +5535,14 @@ func (s *sources) CreateSourceKlarna(ctx context.Context, request shared.SourceK
// CreateSourceKlaviyo - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceKlaviyo(ctx context.Context, request shared.SourceKlaviyoCreateRequest) (*operations.CreateSourceKlaviyoResponse, error) {
+func (s *Sources) CreateSourceKlaviyo(ctx context.Context, request *shared.SourceKlaviyoCreateRequest) (*operations.CreateSourceKlaviyoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Klaviyo"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5537,7 +5551,7 @@ func (s *sources) CreateSourceKlaviyo(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5570,12 +5584,14 @@ func (s *sources) CreateSourceKlaviyo(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5587,15 +5603,14 @@ func (s *sources) CreateSourceKlaviyo(ctx context.Context, request shared.Source
// CreateSourceKustomerSinger - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceKustomerSinger(ctx context.Context, request shared.SourceKustomerSingerCreateRequest) (*operations.CreateSourceKustomerSingerResponse, error) {
+func (s *Sources) CreateSourceKustomerSinger(ctx context.Context, request *shared.SourceKustomerSingerCreateRequest) (*operations.CreateSourceKustomerSingerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#KustomerSinger"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5604,7 +5619,7 @@ func (s *sources) CreateSourceKustomerSinger(ctx context.Context, request shared
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5637,12 +5652,14 @@ func (s *sources) CreateSourceKustomerSinger(ctx context.Context, request shared
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5654,15 +5671,14 @@ func (s *sources) CreateSourceKustomerSinger(ctx context.Context, request shared
// CreateSourceKyve - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceKyve(ctx context.Context, request shared.SourceKyveCreateRequest) (*operations.CreateSourceKyveResponse, error) {
+func (s *Sources) CreateSourceKyve(ctx context.Context, request *shared.SourceKyveCreateRequest) (*operations.CreateSourceKyveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Kyve"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5671,7 +5687,7 @@ func (s *sources) CreateSourceKyve(ctx context.Context, request shared.SourceKyv
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5704,12 +5720,14 @@ func (s *sources) CreateSourceKyve(ctx context.Context, request shared.SourceKyv
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5721,15 +5739,14 @@ func (s *sources) CreateSourceKyve(ctx context.Context, request shared.SourceKyv
// CreateSourceLaunchdarkly - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceLaunchdarkly(ctx context.Context, request shared.SourceLaunchdarklyCreateRequest) (*operations.CreateSourceLaunchdarklyResponse, error) {
+func (s *Sources) CreateSourceLaunchdarkly(ctx context.Context, request *shared.SourceLaunchdarklyCreateRequest) (*operations.CreateSourceLaunchdarklyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Launchdarkly"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5738,7 +5755,7 @@ func (s *sources) CreateSourceLaunchdarkly(ctx context.Context, request shared.S
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5771,12 +5788,14 @@ func (s *sources) CreateSourceLaunchdarkly(ctx context.Context, request shared.S
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5788,15 +5807,14 @@ func (s *sources) CreateSourceLaunchdarkly(ctx context.Context, request shared.S
// CreateSourceLemlist - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceLemlist(ctx context.Context, request shared.SourceLemlistCreateRequest) (*operations.CreateSourceLemlistResponse, error) {
+func (s *Sources) CreateSourceLemlist(ctx context.Context, request *shared.SourceLemlistCreateRequest) (*operations.CreateSourceLemlistResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Lemlist"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5805,7 +5823,7 @@ func (s *sources) CreateSourceLemlist(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5838,12 +5856,14 @@ func (s *sources) CreateSourceLemlist(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5855,15 +5875,14 @@ func (s *sources) CreateSourceLemlist(ctx context.Context, request shared.Source
// CreateSourceLeverHiring - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceLeverHiring(ctx context.Context, request shared.SourceLeverHiringCreateRequest) (*operations.CreateSourceLeverHiringResponse, error) {
+func (s *Sources) CreateSourceLeverHiring(ctx context.Context, request *shared.SourceLeverHiringCreateRequest) (*operations.CreateSourceLeverHiringResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#LeverHiring"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5872,7 +5891,7 @@ func (s *sources) CreateSourceLeverHiring(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5905,12 +5924,14 @@ func (s *sources) CreateSourceLeverHiring(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5922,15 +5943,14 @@ func (s *sources) CreateSourceLeverHiring(ctx context.Context, request shared.So
// CreateSourceLinkedinAds - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceLinkedinAds(ctx context.Context, request shared.SourceLinkedinAdsCreateRequest) (*operations.CreateSourceLinkedinAdsResponse, error) {
+func (s *Sources) CreateSourceLinkedinAds(ctx context.Context, request *shared.SourceLinkedinAdsCreateRequest) (*operations.CreateSourceLinkedinAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#LinkedinAds"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -5939,7 +5959,7 @@ func (s *sources) CreateSourceLinkedinAds(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -5972,12 +5992,14 @@ func (s *sources) CreateSourceLinkedinAds(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -5989,15 +6011,14 @@ func (s *sources) CreateSourceLinkedinAds(ctx context.Context, request shared.So
// CreateSourceLinkedinPages - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceLinkedinPages(ctx context.Context, request shared.SourceLinkedinPagesCreateRequest) (*operations.CreateSourceLinkedinPagesResponse, error) {
+func (s *Sources) CreateSourceLinkedinPages(ctx context.Context, request *shared.SourceLinkedinPagesCreateRequest) (*operations.CreateSourceLinkedinPagesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#LinkedinPages"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6006,7 +6027,7 @@ func (s *sources) CreateSourceLinkedinPages(ctx context.Context, request shared.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6039,12 +6060,14 @@ func (s *sources) CreateSourceLinkedinPages(ctx context.Context, request shared.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6056,15 +6079,14 @@ func (s *sources) CreateSourceLinkedinPages(ctx context.Context, request shared.
// CreateSourceLinnworks - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceLinnworks(ctx context.Context, request shared.SourceLinnworksCreateRequest) (*operations.CreateSourceLinnworksResponse, error) {
+func (s *Sources) CreateSourceLinnworks(ctx context.Context, request *shared.SourceLinnworksCreateRequest) (*operations.CreateSourceLinnworksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Linnworks"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6073,7 +6095,7 @@ func (s *sources) CreateSourceLinnworks(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6106,12 +6128,14 @@ func (s *sources) CreateSourceLinnworks(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6123,15 +6147,14 @@ func (s *sources) CreateSourceLinnworks(ctx context.Context, request shared.Sour
// CreateSourceLokalise - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceLokalise(ctx context.Context, request shared.SourceLokaliseCreateRequest) (*operations.CreateSourceLokaliseResponse, error) {
+func (s *Sources) CreateSourceLokalise(ctx context.Context, request *shared.SourceLokaliseCreateRequest) (*operations.CreateSourceLokaliseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Lokalise"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6140,7 +6163,7 @@ func (s *sources) CreateSourceLokalise(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6173,12 +6196,14 @@ func (s *sources) CreateSourceLokalise(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6190,15 +6215,14 @@ func (s *sources) CreateSourceLokalise(ctx context.Context, request shared.Sourc
// CreateSourceMailchimp - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMailchimp(ctx context.Context, request shared.SourceMailchimpCreateRequest) (*operations.CreateSourceMailchimpResponse, error) {
+func (s *Sources) CreateSourceMailchimp(ctx context.Context, request *shared.SourceMailchimpCreateRequest) (*operations.CreateSourceMailchimpResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Mailchimp"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6207,7 +6231,7 @@ func (s *sources) CreateSourceMailchimp(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6240,12 +6264,14 @@ func (s *sources) CreateSourceMailchimp(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6257,15 +6283,14 @@ func (s *sources) CreateSourceMailchimp(ctx context.Context, request shared.Sour
// CreateSourceMailgun - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMailgun(ctx context.Context, request shared.SourceMailgunCreateRequest) (*operations.CreateSourceMailgunResponse, error) {
+func (s *Sources) CreateSourceMailgun(ctx context.Context, request *shared.SourceMailgunCreateRequest) (*operations.CreateSourceMailgunResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Mailgun"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6274,7 +6299,7 @@ func (s *sources) CreateSourceMailgun(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6307,12 +6332,14 @@ func (s *sources) CreateSourceMailgun(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6324,15 +6351,14 @@ func (s *sources) CreateSourceMailgun(ctx context.Context, request shared.Source
// CreateSourceMailjetSms - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMailjetSms(ctx context.Context, request shared.SourceMailjetSmsCreateRequest) (*operations.CreateSourceMailjetSmsResponse, error) {
+func (s *Sources) CreateSourceMailjetSms(ctx context.Context, request *shared.SourceMailjetSmsCreateRequest) (*operations.CreateSourceMailjetSmsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#MailjetSms"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6341,7 +6367,7 @@ func (s *sources) CreateSourceMailjetSms(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6374,12 +6400,14 @@ func (s *sources) CreateSourceMailjetSms(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6391,15 +6419,14 @@ func (s *sources) CreateSourceMailjetSms(ctx context.Context, request shared.Sou
// CreateSourceMarketo - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMarketo(ctx context.Context, request shared.SourceMarketoCreateRequest) (*operations.CreateSourceMarketoResponse, error) {
+func (s *Sources) CreateSourceMarketo(ctx context.Context, request *shared.SourceMarketoCreateRequest) (*operations.CreateSourceMarketoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Marketo"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6408,7 +6435,7 @@ func (s *sources) CreateSourceMarketo(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6441,12 +6468,14 @@ func (s *sources) CreateSourceMarketo(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6458,15 +6487,14 @@ func (s *sources) CreateSourceMarketo(ctx context.Context, request shared.Source
// CreateSourceMetabase - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMetabase(ctx context.Context, request shared.SourceMetabaseCreateRequest) (*operations.CreateSourceMetabaseResponse, error) {
+func (s *Sources) CreateSourceMetabase(ctx context.Context, request *shared.SourceMetabaseCreateRequest) (*operations.CreateSourceMetabaseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Metabase"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6475,7 +6503,7 @@ func (s *sources) CreateSourceMetabase(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6508,12 +6536,14 @@ func (s *sources) CreateSourceMetabase(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6525,15 +6555,14 @@ func (s *sources) CreateSourceMetabase(ctx context.Context, request shared.Sourc
// CreateSourceMicrosoftTeams - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMicrosoftTeams(ctx context.Context, request shared.SourceMicrosoftTeamsCreateRequest) (*operations.CreateSourceMicrosoftTeamsResponse, error) {
+func (s *Sources) CreateSourceMicrosoftTeams(ctx context.Context, request *shared.SourceMicrosoftTeamsCreateRequest) (*operations.CreateSourceMicrosoftTeamsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#MicrosoftTeams"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6542,7 +6571,7 @@ func (s *sources) CreateSourceMicrosoftTeams(ctx context.Context, request shared
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6575,12 +6604,14 @@ func (s *sources) CreateSourceMicrosoftTeams(ctx context.Context, request shared
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6592,15 +6623,14 @@ func (s *sources) CreateSourceMicrosoftTeams(ctx context.Context, request shared
// CreateSourceMixpanel - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMixpanel(ctx context.Context, request shared.SourceMixpanelCreateRequest) (*operations.CreateSourceMixpanelResponse, error) {
+func (s *Sources) CreateSourceMixpanel(ctx context.Context, request *shared.SourceMixpanelCreateRequest) (*operations.CreateSourceMixpanelResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Mixpanel"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6609,7 +6639,7 @@ func (s *sources) CreateSourceMixpanel(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6642,12 +6672,14 @@ func (s *sources) CreateSourceMixpanel(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6659,15 +6691,14 @@ func (s *sources) CreateSourceMixpanel(ctx context.Context, request shared.Sourc
// CreateSourceMonday - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMonday(ctx context.Context, request shared.SourceMondayCreateRequest) (*operations.CreateSourceMondayResponse, error) {
+func (s *Sources) CreateSourceMonday(ctx context.Context, request *shared.SourceMondayCreateRequest) (*operations.CreateSourceMondayResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Monday"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6676,7 +6707,7 @@ func (s *sources) CreateSourceMonday(ctx context.Context, request shared.SourceM
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6709,12 +6740,14 @@ func (s *sources) CreateSourceMonday(ctx context.Context, request shared.SourceM
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6724,17 +6757,16 @@ func (s *sources) CreateSourceMonday(ctx context.Context, request shared.SourceM
return res, nil
}
-// CreateSourceMongodb - Create a source
+// CreateSourceMongodbInternalPoc - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMongodb(ctx context.Context, request shared.SourceMongodbCreateRequest) (*operations.CreateSourceMongodbResponse, error) {
+func (s *Sources) CreateSourceMongodbInternalPoc(ctx context.Context, request *shared.SourceMongodbInternalPocCreateRequest) (*operations.CreateSourceMongodbInternalPocResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#Mongodb"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#MongodbInternalPoc"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6743,7 +6775,7 @@ func (s *sources) CreateSourceMongodb(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6767,7 +6799,7 @@ func (s *sources) CreateSourceMongodb(ctx context.Context, request shared.Source
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceMongodbResponse{
+ res := &operations.CreateSourceMongodbInternalPocResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -6776,12 +6808,14 @@ func (s *sources) CreateSourceMongodb(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6791,17 +6825,16 @@ func (s *sources) CreateSourceMongodb(ctx context.Context, request shared.Source
return res, nil
}
-// CreateSourceMongodbInternalPoc - Create a source
+// CreateSourceMongodbV2 - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMongodbInternalPoc(ctx context.Context, request shared.SourceMongodbInternalPocCreateRequest) (*operations.CreateSourceMongodbInternalPocResponse, error) {
+func (s *Sources) CreateSourceMongodbV2(ctx context.Context, request *shared.SourceMongodbV2CreateRequest) (*operations.CreateSourceMongodbV2Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#MongodbInternalPoc"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#MongodbV2"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6810,7 +6843,7 @@ func (s *sources) CreateSourceMongodbInternalPoc(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6834,7 +6867,7 @@ func (s *sources) CreateSourceMongodbInternalPoc(ctx context.Context, request sh
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceMongodbInternalPocResponse{
+ res := &operations.CreateSourceMongodbV2Response{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -6843,12 +6876,14 @@ func (s *sources) CreateSourceMongodbInternalPoc(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6860,15 +6895,14 @@ func (s *sources) CreateSourceMongodbInternalPoc(ctx context.Context, request sh
// CreateSourceMssql - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMssql(ctx context.Context, request shared.SourceMssqlCreateRequest) (*operations.CreateSourceMssqlResponse, error) {
+func (s *Sources) CreateSourceMssql(ctx context.Context, request *shared.SourceMssqlCreateRequest) (*operations.CreateSourceMssqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Mssql"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6877,7 +6911,7 @@ func (s *sources) CreateSourceMssql(ctx context.Context, request shared.SourceMs
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6910,12 +6944,14 @@ func (s *sources) CreateSourceMssql(ctx context.Context, request shared.SourceMs
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6927,15 +6963,14 @@ func (s *sources) CreateSourceMssql(ctx context.Context, request shared.SourceMs
// CreateSourceMyHours - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMyHours(ctx context.Context, request shared.SourceMyHoursCreateRequest) (*operations.CreateSourceMyHoursResponse, error) {
+func (s *Sources) CreateSourceMyHours(ctx context.Context, request *shared.SourceMyHoursCreateRequest) (*operations.CreateSourceMyHoursResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#MyHours"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -6944,7 +6979,7 @@ func (s *sources) CreateSourceMyHours(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -6977,12 +7012,14 @@ func (s *sources) CreateSourceMyHours(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -6994,15 +7031,14 @@ func (s *sources) CreateSourceMyHours(ctx context.Context, request shared.Source
// CreateSourceMysql - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceMysql(ctx context.Context, request shared.SourceMysqlCreateRequest) (*operations.CreateSourceMysqlResponse, error) {
+func (s *Sources) CreateSourceMysql(ctx context.Context, request *shared.SourceMysqlCreateRequest) (*operations.CreateSourceMysqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Mysql"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7011,7 +7047,7 @@ func (s *sources) CreateSourceMysql(ctx context.Context, request shared.SourceMy
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7044,12 +7080,14 @@ func (s *sources) CreateSourceMysql(ctx context.Context, request shared.SourceMy
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7061,15 +7099,14 @@ func (s *sources) CreateSourceMysql(ctx context.Context, request shared.SourceMy
// CreateSourceNetsuite - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceNetsuite(ctx context.Context, request shared.SourceNetsuiteCreateRequest) (*operations.CreateSourceNetsuiteResponse, error) {
+func (s *Sources) CreateSourceNetsuite(ctx context.Context, request *shared.SourceNetsuiteCreateRequest) (*operations.CreateSourceNetsuiteResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Netsuite"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7078,7 +7115,7 @@ func (s *sources) CreateSourceNetsuite(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7111,12 +7148,14 @@ func (s *sources) CreateSourceNetsuite(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7128,15 +7167,14 @@ func (s *sources) CreateSourceNetsuite(ctx context.Context, request shared.Sourc
// CreateSourceNotion - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceNotion(ctx context.Context, request shared.SourceNotionCreateRequest) (*operations.CreateSourceNotionResponse, error) {
+func (s *Sources) CreateSourceNotion(ctx context.Context, request *shared.SourceNotionCreateRequest) (*operations.CreateSourceNotionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Notion"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7145,7 +7183,7 @@ func (s *sources) CreateSourceNotion(ctx context.Context, request shared.SourceN
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7178,12 +7216,14 @@ func (s *sources) CreateSourceNotion(ctx context.Context, request shared.SourceN
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7195,15 +7235,14 @@ func (s *sources) CreateSourceNotion(ctx context.Context, request shared.SourceN
// CreateSourceNytimes - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceNytimes(ctx context.Context, request shared.SourceNytimesCreateRequest) (*operations.CreateSourceNytimesResponse, error) {
+func (s *Sources) CreateSourceNytimes(ctx context.Context, request *shared.SourceNytimesCreateRequest) (*operations.CreateSourceNytimesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Nytimes"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7212,7 +7251,7 @@ func (s *sources) CreateSourceNytimes(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7245,12 +7284,14 @@ func (s *sources) CreateSourceNytimes(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7262,15 +7303,14 @@ func (s *sources) CreateSourceNytimes(ctx context.Context, request shared.Source
// CreateSourceOkta - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceOkta(ctx context.Context, request shared.SourceOktaCreateRequest) (*operations.CreateSourceOktaResponse, error) {
+func (s *Sources) CreateSourceOkta(ctx context.Context, request *shared.SourceOktaCreateRequest) (*operations.CreateSourceOktaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Okta"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7279,7 +7319,7 @@ func (s *sources) CreateSourceOkta(ctx context.Context, request shared.SourceOkt
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7312,12 +7352,14 @@ func (s *sources) CreateSourceOkta(ctx context.Context, request shared.SourceOkt
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7329,15 +7371,14 @@ func (s *sources) CreateSourceOkta(ctx context.Context, request shared.SourceOkt
// CreateSourceOmnisend - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceOmnisend(ctx context.Context, request shared.SourceOmnisendCreateRequest) (*operations.CreateSourceOmnisendResponse, error) {
+func (s *Sources) CreateSourceOmnisend(ctx context.Context, request *shared.SourceOmnisendCreateRequest) (*operations.CreateSourceOmnisendResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Omnisend"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7346,7 +7387,7 @@ func (s *sources) CreateSourceOmnisend(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7379,12 +7420,14 @@ func (s *sources) CreateSourceOmnisend(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7396,15 +7439,14 @@ func (s *sources) CreateSourceOmnisend(ctx context.Context, request shared.Sourc
// CreateSourceOnesignal - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceOnesignal(ctx context.Context, request shared.SourceOnesignalCreateRequest) (*operations.CreateSourceOnesignalResponse, error) {
+func (s *Sources) CreateSourceOnesignal(ctx context.Context, request *shared.SourceOnesignalCreateRequest) (*operations.CreateSourceOnesignalResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Onesignal"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7413,7 +7455,7 @@ func (s *sources) CreateSourceOnesignal(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7446,12 +7488,14 @@ func (s *sources) CreateSourceOnesignal(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7463,15 +7507,14 @@ func (s *sources) CreateSourceOnesignal(ctx context.Context, request shared.Sour
// CreateSourceOracle - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceOracle(ctx context.Context, request shared.SourceOracleCreateRequest) (*operations.CreateSourceOracleResponse, error) {
+func (s *Sources) CreateSourceOracle(ctx context.Context, request *shared.SourceOracleCreateRequest) (*operations.CreateSourceOracleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Oracle"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7480,7 +7523,7 @@ func (s *sources) CreateSourceOracle(ctx context.Context, request shared.SourceO
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7513,12 +7556,14 @@ func (s *sources) CreateSourceOracle(ctx context.Context, request shared.SourceO
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7530,15 +7575,14 @@ func (s *sources) CreateSourceOracle(ctx context.Context, request shared.SourceO
// CreateSourceOrb - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceOrb(ctx context.Context, request shared.SourceOrbCreateRequest) (*operations.CreateSourceOrbResponse, error) {
+func (s *Sources) CreateSourceOrb(ctx context.Context, request *shared.SourceOrbCreateRequest) (*operations.CreateSourceOrbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Orb"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7547,7 +7591,7 @@ func (s *sources) CreateSourceOrb(ctx context.Context, request shared.SourceOrbC
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7580,12 +7624,14 @@ func (s *sources) CreateSourceOrb(ctx context.Context, request shared.SourceOrbC
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7597,15 +7643,14 @@ func (s *sources) CreateSourceOrb(ctx context.Context, request shared.SourceOrbC
// CreateSourceOrbit - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceOrbit(ctx context.Context, request shared.SourceOrbitCreateRequest) (*operations.CreateSourceOrbitResponse, error) {
+func (s *Sources) CreateSourceOrbit(ctx context.Context, request *shared.SourceOrbitCreateRequest) (*operations.CreateSourceOrbitResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Orbit"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7614,7 +7659,7 @@ func (s *sources) CreateSourceOrbit(ctx context.Context, request shared.SourceOr
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7647,12 +7692,14 @@ func (s *sources) CreateSourceOrbit(ctx context.Context, request shared.SourceOr
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7664,15 +7711,14 @@ func (s *sources) CreateSourceOrbit(ctx context.Context, request shared.SourceOr
// CreateSourceOutbrainAmplify - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceOutbrainAmplify(ctx context.Context, request shared.SourceOutbrainAmplifyCreateRequest) (*operations.CreateSourceOutbrainAmplifyResponse, error) {
+func (s *Sources) CreateSourceOutbrainAmplify(ctx context.Context, request *shared.SourceOutbrainAmplifyCreateRequest) (*operations.CreateSourceOutbrainAmplifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#OutbrainAmplify"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7681,7 +7727,7 @@ func (s *sources) CreateSourceOutbrainAmplify(ctx context.Context, request share
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7714,12 +7760,14 @@ func (s *sources) CreateSourceOutbrainAmplify(ctx context.Context, request share
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7731,15 +7779,14 @@ func (s *sources) CreateSourceOutbrainAmplify(ctx context.Context, request share
// CreateSourceOutreach - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceOutreach(ctx context.Context, request shared.SourceOutreachCreateRequest) (*operations.CreateSourceOutreachResponse, error) {
+func (s *Sources) CreateSourceOutreach(ctx context.Context, request *shared.SourceOutreachCreateRequest) (*operations.CreateSourceOutreachResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Outreach"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7748,7 +7795,7 @@ func (s *sources) CreateSourceOutreach(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7781,12 +7828,14 @@ func (s *sources) CreateSourceOutreach(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7798,15 +7847,14 @@ func (s *sources) CreateSourceOutreach(ctx context.Context, request shared.Sourc
// CreateSourcePaypalTransaction - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePaypalTransaction(ctx context.Context, request shared.SourcePaypalTransactionCreateRequest) (*operations.CreateSourcePaypalTransactionResponse, error) {
+func (s *Sources) CreateSourcePaypalTransaction(ctx context.Context, request *shared.SourcePaypalTransactionCreateRequest) (*operations.CreateSourcePaypalTransactionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#PaypalTransaction"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7815,7 +7863,7 @@ func (s *sources) CreateSourcePaypalTransaction(ctx context.Context, request sha
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7848,12 +7896,14 @@ func (s *sources) CreateSourcePaypalTransaction(ctx context.Context, request sha
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7865,15 +7915,14 @@ func (s *sources) CreateSourcePaypalTransaction(ctx context.Context, request sha
// CreateSourcePaystack - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePaystack(ctx context.Context, request shared.SourcePaystackCreateRequest) (*operations.CreateSourcePaystackResponse, error) {
+func (s *Sources) CreateSourcePaystack(ctx context.Context, request *shared.SourcePaystackCreateRequest) (*operations.CreateSourcePaystackResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Paystack"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7882,7 +7931,7 @@ func (s *sources) CreateSourcePaystack(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7915,12 +7964,14 @@ func (s *sources) CreateSourcePaystack(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7932,15 +7983,14 @@ func (s *sources) CreateSourcePaystack(ctx context.Context, request shared.Sourc
// CreateSourcePendo - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePendo(ctx context.Context, request shared.SourcePendoCreateRequest) (*operations.CreateSourcePendoResponse, error) {
+func (s *Sources) CreateSourcePendo(ctx context.Context, request *shared.SourcePendoCreateRequest) (*operations.CreateSourcePendoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Pendo"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -7949,7 +7999,7 @@ func (s *sources) CreateSourcePendo(ctx context.Context, request shared.SourcePe
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -7982,12 +8032,14 @@ func (s *sources) CreateSourcePendo(ctx context.Context, request shared.SourcePe
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -7999,15 +8051,14 @@ func (s *sources) CreateSourcePendo(ctx context.Context, request shared.SourcePe
// CreateSourcePersistiq - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePersistiq(ctx context.Context, request shared.SourcePersistiqCreateRequest) (*operations.CreateSourcePersistiqResponse, error) {
+func (s *Sources) CreateSourcePersistiq(ctx context.Context, request *shared.SourcePersistiqCreateRequest) (*operations.CreateSourcePersistiqResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Persistiq"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8016,7 +8067,7 @@ func (s *sources) CreateSourcePersistiq(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8049,12 +8100,14 @@ func (s *sources) CreateSourcePersistiq(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8066,15 +8119,14 @@ func (s *sources) CreateSourcePersistiq(ctx context.Context, request shared.Sour
// CreateSourcePexelsAPI - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePexelsAPI(ctx context.Context, request shared.SourcePexelsAPICreateRequest) (*operations.CreateSourcePexelsAPIResponse, error) {
+func (s *Sources) CreateSourcePexelsAPI(ctx context.Context, request *shared.SourcePexelsAPICreateRequest) (*operations.CreateSourcePexelsAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#PexelsApi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8083,7 +8135,7 @@ func (s *sources) CreateSourcePexelsAPI(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8116,12 +8168,14 @@ func (s *sources) CreateSourcePexelsAPI(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8133,15 +8187,14 @@ func (s *sources) CreateSourcePexelsAPI(ctx context.Context, request shared.Sour
// CreateSourcePinterest - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePinterest(ctx context.Context, request shared.SourcePinterestCreateRequest) (*operations.CreateSourcePinterestResponse, error) {
+func (s *Sources) CreateSourcePinterest(ctx context.Context, request *shared.SourcePinterestCreateRequest) (*operations.CreateSourcePinterestResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Pinterest"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8150,7 +8203,7 @@ func (s *sources) CreateSourcePinterest(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8183,12 +8236,14 @@ func (s *sources) CreateSourcePinterest(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8200,15 +8255,14 @@ func (s *sources) CreateSourcePinterest(ctx context.Context, request shared.Sour
// CreateSourcePipedrive - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePipedrive(ctx context.Context, request shared.SourcePipedriveCreateRequest) (*operations.CreateSourcePipedriveResponse, error) {
+func (s *Sources) CreateSourcePipedrive(ctx context.Context, request *shared.SourcePipedriveCreateRequest) (*operations.CreateSourcePipedriveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Pipedrive"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8217,7 +8271,7 @@ func (s *sources) CreateSourcePipedrive(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8250,12 +8304,14 @@ func (s *sources) CreateSourcePipedrive(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8267,15 +8323,14 @@ func (s *sources) CreateSourcePipedrive(ctx context.Context, request shared.Sour
// CreateSourcePocket - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePocket(ctx context.Context, request shared.SourcePocketCreateRequest) (*operations.CreateSourcePocketResponse, error) {
+func (s *Sources) CreateSourcePocket(ctx context.Context, request *shared.SourcePocketCreateRequest) (*operations.CreateSourcePocketResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Pocket"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8284,7 +8339,7 @@ func (s *sources) CreateSourcePocket(ctx context.Context, request shared.SourceP
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8317,12 +8372,14 @@ func (s *sources) CreateSourcePocket(ctx context.Context, request shared.SourceP
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8334,15 +8391,14 @@ func (s *sources) CreateSourcePocket(ctx context.Context, request shared.SourceP
// CreateSourcePokeapi - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePokeapi(ctx context.Context, request shared.SourcePokeapiCreateRequest) (*operations.CreateSourcePokeapiResponse, error) {
+func (s *Sources) CreateSourcePokeapi(ctx context.Context, request *shared.SourcePokeapiCreateRequest) (*operations.CreateSourcePokeapiResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Pokeapi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8351,7 +8407,7 @@ func (s *sources) CreateSourcePokeapi(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8384,12 +8440,14 @@ func (s *sources) CreateSourcePokeapi(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8401,15 +8459,14 @@ func (s *sources) CreateSourcePokeapi(ctx context.Context, request shared.Source
// CreateSourcePolygonStockAPI - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePolygonStockAPI(ctx context.Context, request shared.SourcePolygonStockAPICreateRequest) (*operations.CreateSourcePolygonStockAPIResponse, error) {
+func (s *Sources) CreateSourcePolygonStockAPI(ctx context.Context, request *shared.SourcePolygonStockAPICreateRequest) (*operations.CreateSourcePolygonStockAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#PolygonStockApi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8418,7 +8475,7 @@ func (s *sources) CreateSourcePolygonStockAPI(ctx context.Context, request share
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8451,12 +8508,14 @@ func (s *sources) CreateSourcePolygonStockAPI(ctx context.Context, request share
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8468,15 +8527,14 @@ func (s *sources) CreateSourcePolygonStockAPI(ctx context.Context, request share
// CreateSourcePostgres - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePostgres(ctx context.Context, request shared.SourcePostgresCreateRequest) (*operations.CreateSourcePostgresResponse, error) {
+func (s *Sources) CreateSourcePostgres(ctx context.Context, request *shared.SourcePostgresCreateRequest) (*operations.CreateSourcePostgresResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Postgres"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8485,7 +8543,7 @@ func (s *sources) CreateSourcePostgres(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8518,12 +8576,14 @@ func (s *sources) CreateSourcePostgres(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8535,15 +8595,14 @@ func (s *sources) CreateSourcePostgres(ctx context.Context, request shared.Sourc
// CreateSourcePosthog - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePosthog(ctx context.Context, request shared.SourcePosthogCreateRequest) (*operations.CreateSourcePosthogResponse, error) {
+func (s *Sources) CreateSourcePosthog(ctx context.Context, request *shared.SourcePosthogCreateRequest) (*operations.CreateSourcePosthogResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Posthog"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8552,7 +8611,7 @@ func (s *sources) CreateSourcePosthog(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8585,12 +8644,14 @@ func (s *sources) CreateSourcePosthog(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8602,15 +8663,14 @@ func (s *sources) CreateSourcePosthog(ctx context.Context, request shared.Source
// CreateSourcePostmarkapp - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePostmarkapp(ctx context.Context, request shared.SourcePostmarkappCreateRequest) (*operations.CreateSourcePostmarkappResponse, error) {
+func (s *Sources) CreateSourcePostmarkapp(ctx context.Context, request *shared.SourcePostmarkappCreateRequest) (*operations.CreateSourcePostmarkappResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Postmarkapp"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8619,7 +8679,7 @@ func (s *sources) CreateSourcePostmarkapp(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8652,12 +8712,14 @@ func (s *sources) CreateSourcePostmarkapp(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8669,15 +8731,14 @@ func (s *sources) CreateSourcePostmarkapp(ctx context.Context, request shared.So
// CreateSourcePrestashop - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePrestashop(ctx context.Context, request shared.SourcePrestashopCreateRequest) (*operations.CreateSourcePrestashopResponse, error) {
+func (s *Sources) CreateSourcePrestashop(ctx context.Context, request *shared.SourcePrestashopCreateRequest) (*operations.CreateSourcePrestashopResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Prestashop"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8686,7 +8747,7 @@ func (s *sources) CreateSourcePrestashop(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8719,12 +8780,14 @@ func (s *sources) CreateSourcePrestashop(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8736,15 +8799,14 @@ func (s *sources) CreateSourcePrestashop(ctx context.Context, request shared.Sou
// CreateSourcePunkAPI - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePunkAPI(ctx context.Context, request shared.SourcePunkAPICreateRequest) (*operations.CreateSourcePunkAPIResponse, error) {
+func (s *Sources) CreateSourcePunkAPI(ctx context.Context, request *shared.SourcePunkAPICreateRequest) (*operations.CreateSourcePunkAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#PunkApi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8753,7 +8815,7 @@ func (s *sources) CreateSourcePunkAPI(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8786,12 +8848,14 @@ func (s *sources) CreateSourcePunkAPI(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8803,15 +8867,14 @@ func (s *sources) CreateSourcePunkAPI(ctx context.Context, request shared.Source
// CreateSourcePypi - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourcePypi(ctx context.Context, request shared.SourcePypiCreateRequest) (*operations.CreateSourcePypiResponse, error) {
+func (s *Sources) CreateSourcePypi(ctx context.Context, request *shared.SourcePypiCreateRequest) (*operations.CreateSourcePypiResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Pypi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8820,7 +8883,7 @@ func (s *sources) CreateSourcePypi(ctx context.Context, request shared.SourcePyp
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8853,12 +8916,14 @@ func (s *sources) CreateSourcePypi(ctx context.Context, request shared.SourcePyp
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8870,15 +8935,14 @@ func (s *sources) CreateSourcePypi(ctx context.Context, request shared.SourcePyp
// CreateSourceQualaroo - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceQualaroo(ctx context.Context, request shared.SourceQualarooCreateRequest) (*operations.CreateSourceQualarooResponse, error) {
+func (s *Sources) CreateSourceQualaroo(ctx context.Context, request *shared.SourceQualarooCreateRequest) (*operations.CreateSourceQualarooResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Qualaroo"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8887,7 +8951,7 @@ func (s *sources) CreateSourceQualaroo(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8920,12 +8984,14 @@ func (s *sources) CreateSourceQualaroo(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -8937,15 +9003,14 @@ func (s *sources) CreateSourceQualaroo(ctx context.Context, request shared.Sourc
// CreateSourceQuickbooks - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceQuickbooks(ctx context.Context, request shared.SourceQuickbooksCreateRequest) (*operations.CreateSourceQuickbooksResponse, error) {
+func (s *Sources) CreateSourceQuickbooks(ctx context.Context, request *shared.SourceQuickbooksCreateRequest) (*operations.CreateSourceQuickbooksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Quickbooks"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -8954,7 +9019,7 @@ func (s *sources) CreateSourceQuickbooks(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -8987,12 +9052,14 @@ func (s *sources) CreateSourceQuickbooks(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9004,15 +9071,14 @@ func (s *sources) CreateSourceQuickbooks(ctx context.Context, request shared.Sou
// CreateSourceRailz - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceRailz(ctx context.Context, request shared.SourceRailzCreateRequest) (*operations.CreateSourceRailzResponse, error) {
+func (s *Sources) CreateSourceRailz(ctx context.Context, request *shared.SourceRailzCreateRequest) (*operations.CreateSourceRailzResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Railz"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9021,7 +9087,7 @@ func (s *sources) CreateSourceRailz(ctx context.Context, request shared.SourceRa
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9054,12 +9120,14 @@ func (s *sources) CreateSourceRailz(ctx context.Context, request shared.SourceRa
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9071,15 +9139,14 @@ func (s *sources) CreateSourceRailz(ctx context.Context, request shared.SourceRa
// CreateSourceRecharge - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceRecharge(ctx context.Context, request shared.SourceRechargeCreateRequest) (*operations.CreateSourceRechargeResponse, error) {
+func (s *Sources) CreateSourceRecharge(ctx context.Context, request *shared.SourceRechargeCreateRequest) (*operations.CreateSourceRechargeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Recharge"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9088,7 +9155,7 @@ func (s *sources) CreateSourceRecharge(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9121,12 +9188,14 @@ func (s *sources) CreateSourceRecharge(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9138,15 +9207,14 @@ func (s *sources) CreateSourceRecharge(ctx context.Context, request shared.Sourc
// CreateSourceRecreation - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceRecreation(ctx context.Context, request shared.SourceRecreationCreateRequest) (*operations.CreateSourceRecreationResponse, error) {
+func (s *Sources) CreateSourceRecreation(ctx context.Context, request *shared.SourceRecreationCreateRequest) (*operations.CreateSourceRecreationResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Recreation"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9155,7 +9223,7 @@ func (s *sources) CreateSourceRecreation(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9188,12 +9256,14 @@ func (s *sources) CreateSourceRecreation(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9205,15 +9275,14 @@ func (s *sources) CreateSourceRecreation(ctx context.Context, request shared.Sou
// CreateSourceRecruitee - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceRecruitee(ctx context.Context, request shared.SourceRecruiteeCreateRequest) (*operations.CreateSourceRecruiteeResponse, error) {
+func (s *Sources) CreateSourceRecruitee(ctx context.Context, request *shared.SourceRecruiteeCreateRequest) (*operations.CreateSourceRecruiteeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Recruitee"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9222,7 +9291,7 @@ func (s *sources) CreateSourceRecruitee(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9255,12 +9324,14 @@ func (s *sources) CreateSourceRecruitee(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9272,15 +9343,14 @@ func (s *sources) CreateSourceRecruitee(ctx context.Context, request shared.Sour
// CreateSourceRecurly - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceRecurly(ctx context.Context, request shared.SourceRecurlyCreateRequest) (*operations.CreateSourceRecurlyResponse, error) {
+func (s *Sources) CreateSourceRecurly(ctx context.Context, request *shared.SourceRecurlyCreateRequest) (*operations.CreateSourceRecurlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Recurly"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9289,7 +9359,7 @@ func (s *sources) CreateSourceRecurly(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9322,12 +9392,14 @@ func (s *sources) CreateSourceRecurly(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9339,15 +9411,14 @@ func (s *sources) CreateSourceRecurly(ctx context.Context, request shared.Source
// CreateSourceRedshift - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceRedshift(ctx context.Context, request shared.SourceRedshiftCreateRequest) (*operations.CreateSourceRedshiftResponse, error) {
+func (s *Sources) CreateSourceRedshift(ctx context.Context, request *shared.SourceRedshiftCreateRequest) (*operations.CreateSourceRedshiftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Redshift"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9356,7 +9427,7 @@ func (s *sources) CreateSourceRedshift(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9389,12 +9460,14 @@ func (s *sources) CreateSourceRedshift(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9406,15 +9479,14 @@ func (s *sources) CreateSourceRedshift(ctx context.Context, request shared.Sourc
// CreateSourceRetently - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceRetently(ctx context.Context, request shared.SourceRetentlyCreateRequest) (*operations.CreateSourceRetentlyResponse, error) {
+func (s *Sources) CreateSourceRetently(ctx context.Context, request *shared.SourceRetentlyCreateRequest) (*operations.CreateSourceRetentlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Retently"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9423,7 +9495,7 @@ func (s *sources) CreateSourceRetently(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9456,12 +9528,14 @@ func (s *sources) CreateSourceRetently(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9473,15 +9547,14 @@ func (s *sources) CreateSourceRetently(ctx context.Context, request shared.Sourc
// CreateSourceRkiCovid - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceRkiCovid(ctx context.Context, request shared.SourceRkiCovidCreateRequest) (*operations.CreateSourceRkiCovidResponse, error) {
+func (s *Sources) CreateSourceRkiCovid(ctx context.Context, request *shared.SourceRkiCovidCreateRequest) (*operations.CreateSourceRkiCovidResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#RkiCovid"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9490,7 +9563,7 @@ func (s *sources) CreateSourceRkiCovid(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9523,12 +9596,14 @@ func (s *sources) CreateSourceRkiCovid(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9540,15 +9615,14 @@ func (s *sources) CreateSourceRkiCovid(ctx context.Context, request shared.Sourc
// CreateSourceRss - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceRss(ctx context.Context, request shared.SourceRssCreateRequest) (*operations.CreateSourceRssResponse, error) {
+func (s *Sources) CreateSourceRss(ctx context.Context, request *shared.SourceRssCreateRequest) (*operations.CreateSourceRssResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Rss"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9557,7 +9631,7 @@ func (s *sources) CreateSourceRss(ctx context.Context, request shared.SourceRssC
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9590,12 +9664,14 @@ func (s *sources) CreateSourceRss(ctx context.Context, request shared.SourceRssC
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9607,15 +9683,14 @@ func (s *sources) CreateSourceRss(ctx context.Context, request shared.SourceRssC
// CreateSourceS3 - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceS3(ctx context.Context, request shared.SourceS3CreateRequest) (*operations.CreateSourceS3Response, error) {
+func (s *Sources) CreateSourceS3(ctx context.Context, request *shared.SourceS3CreateRequest) (*operations.CreateSourceS3Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#S3"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9624,7 +9699,7 @@ func (s *sources) CreateSourceS3(ctx context.Context, request shared.SourceS3Cre
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9657,12 +9732,14 @@ func (s *sources) CreateSourceS3(ctx context.Context, request shared.SourceS3Cre
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9674,15 +9751,14 @@ func (s *sources) CreateSourceS3(ctx context.Context, request shared.SourceS3Cre
// CreateSourceSalesforce - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSalesforce(ctx context.Context, request shared.SourceSalesforceCreateRequest) (*operations.CreateSourceSalesforceResponse, error) {
+func (s *Sources) CreateSourceSalesforce(ctx context.Context, request *shared.SourceSalesforceCreateRequest) (*operations.CreateSourceSalesforceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Salesforce"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9691,7 +9767,7 @@ func (s *sources) CreateSourceSalesforce(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9724,12 +9800,14 @@ func (s *sources) CreateSourceSalesforce(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9741,15 +9819,14 @@ func (s *sources) CreateSourceSalesforce(ctx context.Context, request shared.Sou
// CreateSourceSalesloft - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSalesloft(ctx context.Context, request shared.SourceSalesloftCreateRequest) (*operations.CreateSourceSalesloftResponse, error) {
+func (s *Sources) CreateSourceSalesloft(ctx context.Context, request *shared.SourceSalesloftCreateRequest) (*operations.CreateSourceSalesloftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Salesloft"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9758,7 +9835,7 @@ func (s *sources) CreateSourceSalesloft(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9791,12 +9868,14 @@ func (s *sources) CreateSourceSalesloft(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9808,15 +9887,14 @@ func (s *sources) CreateSourceSalesloft(ctx context.Context, request shared.Sour
// CreateSourceSapFieldglass - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSapFieldglass(ctx context.Context, request shared.SourceSapFieldglassCreateRequest) (*operations.CreateSourceSapFieldglassResponse, error) {
+func (s *Sources) CreateSourceSapFieldglass(ctx context.Context, request *shared.SourceSapFieldglassCreateRequest) (*operations.CreateSourceSapFieldglassResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#SapFieldglass"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9825,7 +9903,7 @@ func (s *sources) CreateSourceSapFieldglass(ctx context.Context, request shared.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9858,12 +9936,14 @@ func (s *sources) CreateSourceSapFieldglass(ctx context.Context, request shared.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9875,15 +9955,14 @@ func (s *sources) CreateSourceSapFieldglass(ctx context.Context, request shared.
// CreateSourceSecoda - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSecoda(ctx context.Context, request shared.SourceSecodaCreateRequest) (*operations.CreateSourceSecodaResponse, error) {
+func (s *Sources) CreateSourceSecoda(ctx context.Context, request *shared.SourceSecodaCreateRequest) (*operations.CreateSourceSecodaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Secoda"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9892,7 +9971,7 @@ func (s *sources) CreateSourceSecoda(ctx context.Context, request shared.SourceS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9925,12 +10004,14 @@ func (s *sources) CreateSourceSecoda(ctx context.Context, request shared.SourceS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -9942,15 +10023,14 @@ func (s *sources) CreateSourceSecoda(ctx context.Context, request shared.SourceS
// CreateSourceSendgrid - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSendgrid(ctx context.Context, request shared.SourceSendgridCreateRequest) (*operations.CreateSourceSendgridResponse, error) {
+func (s *Sources) CreateSourceSendgrid(ctx context.Context, request *shared.SourceSendgridCreateRequest) (*operations.CreateSourceSendgridResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Sendgrid"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -9959,7 +10039,7 @@ func (s *sources) CreateSourceSendgrid(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -9992,12 +10072,14 @@ func (s *sources) CreateSourceSendgrid(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10009,15 +10091,14 @@ func (s *sources) CreateSourceSendgrid(ctx context.Context, request shared.Sourc
// CreateSourceSendinblue - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSendinblue(ctx context.Context, request shared.SourceSendinblueCreateRequest) (*operations.CreateSourceSendinblueResponse, error) {
+func (s *Sources) CreateSourceSendinblue(ctx context.Context, request *shared.SourceSendinblueCreateRequest) (*operations.CreateSourceSendinblueResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Sendinblue"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10026,7 +10107,7 @@ func (s *sources) CreateSourceSendinblue(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10059,12 +10140,14 @@ func (s *sources) CreateSourceSendinblue(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10076,15 +10159,14 @@ func (s *sources) CreateSourceSendinblue(ctx context.Context, request shared.Sou
// CreateSourceSenseforce - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSenseforce(ctx context.Context, request shared.SourceSenseforceCreateRequest) (*operations.CreateSourceSenseforceResponse, error) {
+func (s *Sources) CreateSourceSenseforce(ctx context.Context, request *shared.SourceSenseforceCreateRequest) (*operations.CreateSourceSenseforceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Senseforce"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10093,7 +10175,7 @@ func (s *sources) CreateSourceSenseforce(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10126,12 +10208,14 @@ func (s *sources) CreateSourceSenseforce(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10143,15 +10227,14 @@ func (s *sources) CreateSourceSenseforce(ctx context.Context, request shared.Sou
// CreateSourceSentry - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSentry(ctx context.Context, request shared.SourceSentryCreateRequest) (*operations.CreateSourceSentryResponse, error) {
+func (s *Sources) CreateSourceSentry(ctx context.Context, request *shared.SourceSentryCreateRequest) (*operations.CreateSourceSentryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Sentry"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10160,7 +10243,7 @@ func (s *sources) CreateSourceSentry(ctx context.Context, request shared.SourceS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10193,12 +10276,14 @@ func (s *sources) CreateSourceSentry(ctx context.Context, request shared.SourceS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10210,15 +10295,14 @@ func (s *sources) CreateSourceSentry(ctx context.Context, request shared.SourceS
// CreateSourceSftp - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSftp(ctx context.Context, request shared.SourceSftpCreateRequest) (*operations.CreateSourceSftpResponse, error) {
+func (s *Sources) CreateSourceSftp(ctx context.Context, request *shared.SourceSftpCreateRequest) (*operations.CreateSourceSftpResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Sftp"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10227,7 +10311,7 @@ func (s *sources) CreateSourceSftp(ctx context.Context, request shared.SourceSft
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10260,12 +10344,14 @@ func (s *sources) CreateSourceSftp(ctx context.Context, request shared.SourceSft
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10277,15 +10363,14 @@ func (s *sources) CreateSourceSftp(ctx context.Context, request shared.SourceSft
// CreateSourceSftpBulk - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSftpBulk(ctx context.Context, request shared.SourceSftpBulkCreateRequest) (*operations.CreateSourceSftpBulkResponse, error) {
+func (s *Sources) CreateSourceSftpBulk(ctx context.Context, request *shared.SourceSftpBulkCreateRequest) (*operations.CreateSourceSftpBulkResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#SftpBulk"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10294,7 +10379,7 @@ func (s *sources) CreateSourceSftpBulk(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10327,12 +10412,14 @@ func (s *sources) CreateSourceSftpBulk(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10344,15 +10431,14 @@ func (s *sources) CreateSourceSftpBulk(ctx context.Context, request shared.Sourc
// CreateSourceShopify - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceShopify(ctx context.Context, request shared.SourceShopifyCreateRequest) (*operations.CreateSourceShopifyResponse, error) {
+func (s *Sources) CreateSourceShopify(ctx context.Context, request *shared.SourceShopifyCreateRequest) (*operations.CreateSourceShopifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Shopify"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10361,7 +10447,7 @@ func (s *sources) CreateSourceShopify(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10394,12 +10480,14 @@ func (s *sources) CreateSourceShopify(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10411,15 +10499,14 @@ func (s *sources) CreateSourceShopify(ctx context.Context, request shared.Source
// CreateSourceShortio - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceShortio(ctx context.Context, request shared.SourceShortioCreateRequest) (*operations.CreateSourceShortioResponse, error) {
+func (s *Sources) CreateSourceShortio(ctx context.Context, request *shared.SourceShortioCreateRequest) (*operations.CreateSourceShortioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Shortio"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10428,7 +10515,7 @@ func (s *sources) CreateSourceShortio(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10461,12 +10548,14 @@ func (s *sources) CreateSourceShortio(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10478,15 +10567,14 @@ func (s *sources) CreateSourceShortio(ctx context.Context, request shared.Source
// CreateSourceSlack - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSlack(ctx context.Context, request shared.SourceSlackCreateRequest) (*operations.CreateSourceSlackResponse, error) {
+func (s *Sources) CreateSourceSlack(ctx context.Context, request *shared.SourceSlackCreateRequest) (*operations.CreateSourceSlackResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Slack"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10495,7 +10583,7 @@ func (s *sources) CreateSourceSlack(ctx context.Context, request shared.SourceSl
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10528,12 +10616,14 @@ func (s *sources) CreateSourceSlack(ctx context.Context, request shared.SourceSl
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10545,15 +10635,14 @@ func (s *sources) CreateSourceSlack(ctx context.Context, request shared.SourceSl
// CreateSourceSmaily - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSmaily(ctx context.Context, request shared.SourceSmailyCreateRequest) (*operations.CreateSourceSmailyResponse, error) {
+func (s *Sources) CreateSourceSmaily(ctx context.Context, request *shared.SourceSmailyCreateRequest) (*operations.CreateSourceSmailyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Smaily"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10562,7 +10651,7 @@ func (s *sources) CreateSourceSmaily(ctx context.Context, request shared.SourceS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10595,12 +10684,14 @@ func (s *sources) CreateSourceSmaily(ctx context.Context, request shared.SourceS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10612,15 +10703,14 @@ func (s *sources) CreateSourceSmaily(ctx context.Context, request shared.SourceS
// CreateSourceSmartengage - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSmartengage(ctx context.Context, request shared.SourceSmartengageCreateRequest) (*operations.CreateSourceSmartengageResponse, error) {
+func (s *Sources) CreateSourceSmartengage(ctx context.Context, request *shared.SourceSmartengageCreateRequest) (*operations.CreateSourceSmartengageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Smartengage"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10629,7 +10719,7 @@ func (s *sources) CreateSourceSmartengage(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10662,12 +10752,14 @@ func (s *sources) CreateSourceSmartengage(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10679,15 +10771,14 @@ func (s *sources) CreateSourceSmartengage(ctx context.Context, request shared.So
// CreateSourceSmartsheets - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSmartsheets(ctx context.Context, request shared.SourceSmartsheetsCreateRequest) (*operations.CreateSourceSmartsheetsResponse, error) {
+func (s *Sources) CreateSourceSmartsheets(ctx context.Context, request *shared.SourceSmartsheetsCreateRequest) (*operations.CreateSourceSmartsheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Smartsheets"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10696,7 +10787,7 @@ func (s *sources) CreateSourceSmartsheets(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10729,12 +10820,14 @@ func (s *sources) CreateSourceSmartsheets(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10746,15 +10839,14 @@ func (s *sources) CreateSourceSmartsheets(ctx context.Context, request shared.So
// CreateSourceSnapchatMarketing - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSnapchatMarketing(ctx context.Context, request shared.SourceSnapchatMarketingCreateRequest) (*operations.CreateSourceSnapchatMarketingResponse, error) {
+func (s *Sources) CreateSourceSnapchatMarketing(ctx context.Context, request *shared.SourceSnapchatMarketingCreateRequest) (*operations.CreateSourceSnapchatMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#SnapchatMarketing"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10763,7 +10855,7 @@ func (s *sources) CreateSourceSnapchatMarketing(ctx context.Context, request sha
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10796,12 +10888,14 @@ func (s *sources) CreateSourceSnapchatMarketing(ctx context.Context, request sha
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10813,15 +10907,14 @@ func (s *sources) CreateSourceSnapchatMarketing(ctx context.Context, request sha
// CreateSourceSnowflake - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSnowflake(ctx context.Context, request shared.SourceSnowflakeCreateRequest) (*operations.CreateSourceSnowflakeResponse, error) {
+func (s *Sources) CreateSourceSnowflake(ctx context.Context, request *shared.SourceSnowflakeCreateRequest) (*operations.CreateSourceSnowflakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Snowflake"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10830,7 +10923,7 @@ func (s *sources) CreateSourceSnowflake(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10863,12 +10956,14 @@ func (s *sources) CreateSourceSnowflake(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10880,15 +10975,14 @@ func (s *sources) CreateSourceSnowflake(ctx context.Context, request shared.Sour
// CreateSourceSonarCloud - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSonarCloud(ctx context.Context, request shared.SourceSonarCloudCreateRequest) (*operations.CreateSourceSonarCloudResponse, error) {
+func (s *Sources) CreateSourceSonarCloud(ctx context.Context, request *shared.SourceSonarCloudCreateRequest) (*operations.CreateSourceSonarCloudResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#SonarCloud"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10897,7 +10991,7 @@ func (s *sources) CreateSourceSonarCloud(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10930,12 +11024,14 @@ func (s *sources) CreateSourceSonarCloud(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -10947,15 +11043,14 @@ func (s *sources) CreateSourceSonarCloud(ctx context.Context, request shared.Sou
// CreateSourceSpacexAPI - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSpacexAPI(ctx context.Context, request shared.SourceSpacexAPICreateRequest) (*operations.CreateSourceSpacexAPIResponse, error) {
+func (s *Sources) CreateSourceSpacexAPI(ctx context.Context, request *shared.SourceSpacexAPICreateRequest) (*operations.CreateSourceSpacexAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#SpacexApi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -10964,7 +11059,7 @@ func (s *sources) CreateSourceSpacexAPI(ctx context.Context, request shared.Sour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -10997,12 +11092,14 @@ func (s *sources) CreateSourceSpacexAPI(ctx context.Context, request shared.Sour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11014,15 +11111,14 @@ func (s *sources) CreateSourceSpacexAPI(ctx context.Context, request shared.Sour
// CreateSourceSquare - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSquare(ctx context.Context, request shared.SourceSquareCreateRequest) (*operations.CreateSourceSquareResponse, error) {
+func (s *Sources) CreateSourceSquare(ctx context.Context, request *shared.SourceSquareCreateRequest) (*operations.CreateSourceSquareResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Square"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11031,7 +11127,7 @@ func (s *sources) CreateSourceSquare(ctx context.Context, request shared.SourceS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11064,12 +11160,14 @@ func (s *sources) CreateSourceSquare(ctx context.Context, request shared.SourceS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11081,15 +11179,14 @@ func (s *sources) CreateSourceSquare(ctx context.Context, request shared.SourceS
// CreateSourceStrava - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceStrava(ctx context.Context, request shared.SourceStravaCreateRequest) (*operations.CreateSourceStravaResponse, error) {
+func (s *Sources) CreateSourceStrava(ctx context.Context, request *shared.SourceStravaCreateRequest) (*operations.CreateSourceStravaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Strava"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11098,7 +11195,7 @@ func (s *sources) CreateSourceStrava(ctx context.Context, request shared.SourceS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11131,12 +11228,14 @@ func (s *sources) CreateSourceStrava(ctx context.Context, request shared.SourceS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11148,15 +11247,14 @@ func (s *sources) CreateSourceStrava(ctx context.Context, request shared.SourceS
// CreateSourceStripe - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceStripe(ctx context.Context, request shared.SourceStripeCreateRequest) (*operations.CreateSourceStripeResponse, error) {
+func (s *Sources) CreateSourceStripe(ctx context.Context, request *shared.SourceStripeCreateRequest) (*operations.CreateSourceStripeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Stripe"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11165,7 +11263,7 @@ func (s *sources) CreateSourceStripe(ctx context.Context, request shared.SourceS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11198,12 +11296,14 @@ func (s *sources) CreateSourceStripe(ctx context.Context, request shared.SourceS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11215,15 +11315,14 @@ func (s *sources) CreateSourceStripe(ctx context.Context, request shared.SourceS
// CreateSourceSurveySparrow - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSurveySparrow(ctx context.Context, request shared.SourceSurveySparrowCreateRequest) (*operations.CreateSourceSurveySparrowResponse, error) {
+func (s *Sources) CreateSourceSurveySparrow(ctx context.Context, request *shared.SourceSurveySparrowCreateRequest) (*operations.CreateSourceSurveySparrowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#SurveySparrow"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11232,7 +11331,7 @@ func (s *sources) CreateSourceSurveySparrow(ctx context.Context, request shared.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11265,12 +11364,14 @@ func (s *sources) CreateSourceSurveySparrow(ctx context.Context, request shared.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11282,15 +11383,14 @@ func (s *sources) CreateSourceSurveySparrow(ctx context.Context, request shared.
// CreateSourceSurveymonkey - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceSurveymonkey(ctx context.Context, request shared.SourceSurveymonkeyCreateRequest) (*operations.CreateSourceSurveymonkeyResponse, error) {
+func (s *Sources) CreateSourceSurveymonkey(ctx context.Context, request *shared.SourceSurveymonkeyCreateRequest) (*operations.CreateSourceSurveymonkeyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Surveymonkey"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11299,7 +11399,7 @@ func (s *sources) CreateSourceSurveymonkey(ctx context.Context, request shared.S
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11332,12 +11432,14 @@ func (s *sources) CreateSourceSurveymonkey(ctx context.Context, request shared.S
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11349,15 +11451,14 @@ func (s *sources) CreateSourceSurveymonkey(ctx context.Context, request shared.S
// CreateSourceTempo - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTempo(ctx context.Context, request shared.SourceTempoCreateRequest) (*operations.CreateSourceTempoResponse, error) {
+func (s *Sources) CreateSourceTempo(ctx context.Context, request *shared.SourceTempoCreateRequest) (*operations.CreateSourceTempoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Tempo"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11366,7 +11467,7 @@ func (s *sources) CreateSourceTempo(ctx context.Context, request shared.SourceTe
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11399,12 +11500,14 @@ func (s *sources) CreateSourceTempo(ctx context.Context, request shared.SourceTe
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11416,15 +11519,14 @@ func (s *sources) CreateSourceTempo(ctx context.Context, request shared.SourceTe
// CreateSourceTheGuardianAPI - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTheGuardianAPI(ctx context.Context, request shared.SourceTheGuardianAPICreateRequest) (*operations.CreateSourceTheGuardianAPIResponse, error) {
+func (s *Sources) CreateSourceTheGuardianAPI(ctx context.Context, request *shared.SourceTheGuardianAPICreateRequest) (*operations.CreateSourceTheGuardianAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#TheGuardianApi"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11433,7 +11535,7 @@ func (s *sources) CreateSourceTheGuardianAPI(ctx context.Context, request shared
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11466,12 +11568,14 @@ func (s *sources) CreateSourceTheGuardianAPI(ctx context.Context, request shared
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11483,15 +11587,14 @@ func (s *sources) CreateSourceTheGuardianAPI(ctx context.Context, request shared
// CreateSourceTiktokMarketing - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTiktokMarketing(ctx context.Context, request shared.SourceTiktokMarketingCreateRequest) (*operations.CreateSourceTiktokMarketingResponse, error) {
+func (s *Sources) CreateSourceTiktokMarketing(ctx context.Context, request *shared.SourceTiktokMarketingCreateRequest) (*operations.CreateSourceTiktokMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#TiktokMarketing"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11500,7 +11603,7 @@ func (s *sources) CreateSourceTiktokMarketing(ctx context.Context, request share
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11533,12 +11636,14 @@ func (s *sources) CreateSourceTiktokMarketing(ctx context.Context, request share
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11550,15 +11655,14 @@ func (s *sources) CreateSourceTiktokMarketing(ctx context.Context, request share
// CreateSourceTodoist - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTodoist(ctx context.Context, request shared.SourceTodoistCreateRequest) (*operations.CreateSourceTodoistResponse, error) {
+func (s *Sources) CreateSourceTodoist(ctx context.Context, request *shared.SourceTodoistCreateRequest) (*operations.CreateSourceTodoistResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Todoist"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11567,7 +11671,7 @@ func (s *sources) CreateSourceTodoist(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11600,12 +11704,14 @@ func (s *sources) CreateSourceTodoist(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11617,15 +11723,14 @@ func (s *sources) CreateSourceTodoist(ctx context.Context, request shared.Source
// CreateSourceTrello - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTrello(ctx context.Context, request shared.SourceTrelloCreateRequest) (*operations.CreateSourceTrelloResponse, error) {
+func (s *Sources) CreateSourceTrello(ctx context.Context, request *shared.SourceTrelloCreateRequest) (*operations.CreateSourceTrelloResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Trello"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11634,7 +11739,7 @@ func (s *sources) CreateSourceTrello(ctx context.Context, request shared.SourceT
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11667,12 +11772,14 @@ func (s *sources) CreateSourceTrello(ctx context.Context, request shared.SourceT
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11684,15 +11791,14 @@ func (s *sources) CreateSourceTrello(ctx context.Context, request shared.SourceT
// CreateSourceTrustpilot - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTrustpilot(ctx context.Context, request shared.SourceTrustpilotCreateRequest) (*operations.CreateSourceTrustpilotResponse, error) {
+func (s *Sources) CreateSourceTrustpilot(ctx context.Context, request *shared.SourceTrustpilotCreateRequest) (*operations.CreateSourceTrustpilotResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Trustpilot"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11701,7 +11807,7 @@ func (s *sources) CreateSourceTrustpilot(ctx context.Context, request shared.Sou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11734,12 +11840,14 @@ func (s *sources) CreateSourceTrustpilot(ctx context.Context, request shared.Sou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11751,15 +11859,14 @@ func (s *sources) CreateSourceTrustpilot(ctx context.Context, request shared.Sou
// CreateSourceTvmazeSchedule - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTvmazeSchedule(ctx context.Context, request shared.SourceTvmazeScheduleCreateRequest) (*operations.CreateSourceTvmazeScheduleResponse, error) {
+func (s *Sources) CreateSourceTvmazeSchedule(ctx context.Context, request *shared.SourceTvmazeScheduleCreateRequest) (*operations.CreateSourceTvmazeScheduleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#TvmazeSchedule"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11768,7 +11875,7 @@ func (s *sources) CreateSourceTvmazeSchedule(ctx context.Context, request shared
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11801,12 +11908,14 @@ func (s *sources) CreateSourceTvmazeSchedule(ctx context.Context, request shared
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11818,15 +11927,14 @@ func (s *sources) CreateSourceTvmazeSchedule(ctx context.Context, request shared
// CreateSourceTwilio - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTwilio(ctx context.Context, request shared.SourceTwilioCreateRequest) (*operations.CreateSourceTwilioResponse, error) {
+func (s *Sources) CreateSourceTwilio(ctx context.Context, request *shared.SourceTwilioCreateRequest) (*operations.CreateSourceTwilioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Twilio"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11835,7 +11943,7 @@ func (s *sources) CreateSourceTwilio(ctx context.Context, request shared.SourceT
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11868,12 +11976,14 @@ func (s *sources) CreateSourceTwilio(ctx context.Context, request shared.SourceT
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11885,15 +11995,14 @@ func (s *sources) CreateSourceTwilio(ctx context.Context, request shared.SourceT
// CreateSourceTwilioTaskrouter - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTwilioTaskrouter(ctx context.Context, request shared.SourceTwilioTaskrouterCreateRequest) (*operations.CreateSourceTwilioTaskrouterResponse, error) {
+func (s *Sources) CreateSourceTwilioTaskrouter(ctx context.Context, request *shared.SourceTwilioTaskrouterCreateRequest) (*operations.CreateSourceTwilioTaskrouterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#TwilioTaskrouter"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11902,7 +12011,7 @@ func (s *sources) CreateSourceTwilioTaskrouter(ctx context.Context, request shar
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -11935,12 +12044,14 @@ func (s *sources) CreateSourceTwilioTaskrouter(ctx context.Context, request shar
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -11952,15 +12063,14 @@ func (s *sources) CreateSourceTwilioTaskrouter(ctx context.Context, request shar
// CreateSourceTwitter - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTwitter(ctx context.Context, request shared.SourceTwitterCreateRequest) (*operations.CreateSourceTwitterResponse, error) {
+func (s *Sources) CreateSourceTwitter(ctx context.Context, request *shared.SourceTwitterCreateRequest) (*operations.CreateSourceTwitterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Twitter"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -11969,7 +12079,7 @@ func (s *sources) CreateSourceTwitter(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12002,12 +12112,14 @@ func (s *sources) CreateSourceTwitter(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12019,15 +12131,14 @@ func (s *sources) CreateSourceTwitter(ctx context.Context, request shared.Source
// CreateSourceTypeform - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceTypeform(ctx context.Context, request shared.SourceTypeformCreateRequest) (*operations.CreateSourceTypeformResponse, error) {
+func (s *Sources) CreateSourceTypeform(ctx context.Context, request *shared.SourceTypeformCreateRequest) (*operations.CreateSourceTypeformResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Typeform"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12036,7 +12147,7 @@ func (s *sources) CreateSourceTypeform(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12069,12 +12180,14 @@ func (s *sources) CreateSourceTypeform(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12086,15 +12199,14 @@ func (s *sources) CreateSourceTypeform(ctx context.Context, request shared.Sourc
// CreateSourceUsCensus - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceUsCensus(ctx context.Context, request shared.SourceUsCensusCreateRequest) (*operations.CreateSourceUsCensusResponse, error) {
+func (s *Sources) CreateSourceUsCensus(ctx context.Context, request *shared.SourceUsCensusCreateRequest) (*operations.CreateSourceUsCensusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#UsCensus"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12103,7 +12215,7 @@ func (s *sources) CreateSourceUsCensus(ctx context.Context, request shared.Sourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12136,12 +12248,14 @@ func (s *sources) CreateSourceUsCensus(ctx context.Context, request shared.Sourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12153,15 +12267,14 @@ func (s *sources) CreateSourceUsCensus(ctx context.Context, request shared.Sourc
// CreateSourceVantage - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceVantage(ctx context.Context, request shared.SourceVantageCreateRequest) (*operations.CreateSourceVantageResponse, error) {
+func (s *Sources) CreateSourceVantage(ctx context.Context, request *shared.SourceVantageCreateRequest) (*operations.CreateSourceVantageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Vantage"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12170,7 +12283,7 @@ func (s *sources) CreateSourceVantage(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12203,12 +12316,14 @@ func (s *sources) CreateSourceVantage(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12220,15 +12335,14 @@ func (s *sources) CreateSourceVantage(ctx context.Context, request shared.Source
// CreateSourceWebflow - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceWebflow(ctx context.Context, request shared.SourceWebflowCreateRequest) (*operations.CreateSourceWebflowResponse, error) {
+func (s *Sources) CreateSourceWebflow(ctx context.Context, request *shared.SourceWebflowCreateRequest) (*operations.CreateSourceWebflowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Webflow"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12237,7 +12351,7 @@ func (s *sources) CreateSourceWebflow(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12270,12 +12384,14 @@ func (s *sources) CreateSourceWebflow(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12287,15 +12403,14 @@ func (s *sources) CreateSourceWebflow(ctx context.Context, request shared.Source
// CreateSourceWhiskyHunter - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceWhiskyHunter(ctx context.Context, request shared.SourceWhiskyHunterCreateRequest) (*operations.CreateSourceWhiskyHunterResponse, error) {
+func (s *Sources) CreateSourceWhiskyHunter(ctx context.Context, request *shared.SourceWhiskyHunterCreateRequest) (*operations.CreateSourceWhiskyHunterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#WhiskyHunter"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12304,7 +12419,7 @@ func (s *sources) CreateSourceWhiskyHunter(ctx context.Context, request shared.S
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12337,12 +12452,14 @@ func (s *sources) CreateSourceWhiskyHunter(ctx context.Context, request shared.S
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12354,15 +12471,14 @@ func (s *sources) CreateSourceWhiskyHunter(ctx context.Context, request shared.S
// CreateSourceWikipediaPageviews - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceWikipediaPageviews(ctx context.Context, request shared.SourceWikipediaPageviewsCreateRequest) (*operations.CreateSourceWikipediaPageviewsResponse, error) {
+func (s *Sources) CreateSourceWikipediaPageviews(ctx context.Context, request *shared.SourceWikipediaPageviewsCreateRequest) (*operations.CreateSourceWikipediaPageviewsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#WikipediaPageviews"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12371,7 +12487,7 @@ func (s *sources) CreateSourceWikipediaPageviews(ctx context.Context, request sh
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12404,12 +12520,14 @@ func (s *sources) CreateSourceWikipediaPageviews(ctx context.Context, request sh
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12421,15 +12539,14 @@ func (s *sources) CreateSourceWikipediaPageviews(ctx context.Context, request sh
// CreateSourceWoocommerce - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceWoocommerce(ctx context.Context, request shared.SourceWoocommerceCreateRequest) (*operations.CreateSourceWoocommerceResponse, error) {
+func (s *Sources) CreateSourceWoocommerce(ctx context.Context, request *shared.SourceWoocommerceCreateRequest) (*operations.CreateSourceWoocommerceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Woocommerce"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12438,7 +12555,7 @@ func (s *sources) CreateSourceWoocommerce(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12471,79 +12588,14 @@ func (s *sources) CreateSourceWoocommerce(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
- }
- case httpRes.StatusCode == 400:
- fallthrough
- case httpRes.StatusCode == 403:
- }
-
- return res, nil
-}
-
-// CreateSourceXero - Create a source
-// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceXero(ctx context.Context, request shared.SourceXeroCreateRequest) (*operations.CreateSourceXeroResponse, error) {
- baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#Xero"
-
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
- if err != nil {
- return nil, fmt.Errorf("error serializing request body: %w", err)
- }
-
- debugBody := bytes.NewBuffer([]byte{})
- debugReader := io.TeeReader(bodyReader, debugBody)
-
- req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader)
- if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
- }
- req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
-
- req.Header.Set("Content-Type", reqContentType)
-
- client := s.sdkConfiguration.SecurityClient
-
- httpRes, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("error sending request: %w", err)
- }
- if httpRes == nil {
- return nil, fmt.Errorf("error sending request: no response")
- }
-
- rawBody, err := io.ReadAll(httpRes.Body)
- if err != nil {
- return nil, fmt.Errorf("error reading response body: %w", err)
- }
- httpRes.Request.Body = io.NopCloser(debugBody)
- httpRes.Body.Close()
- httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
-
- contentType := httpRes.Header.Get("Content-Type")
-
- res := &operations.CreateSourceXeroResponse{
- StatusCode: httpRes.StatusCode,
- ContentType: contentType,
- RawResponse: httpRes,
- }
- switch {
- case httpRes.StatusCode == 200:
- switch {
- case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
- }
-
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12555,15 +12607,14 @@ func (s *sources) CreateSourceXero(ctx context.Context, request shared.SourceXer
// CreateSourceXkcd - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceXkcd(ctx context.Context, request shared.SourceXkcdCreateRequest) (*operations.CreateSourceXkcdResponse, error) {
+func (s *Sources) CreateSourceXkcd(ctx context.Context, request *shared.SourceXkcdCreateRequest) (*operations.CreateSourceXkcdResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Xkcd"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12572,7 +12623,7 @@ func (s *sources) CreateSourceXkcd(ctx context.Context, request shared.SourceXkc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12605,12 +12656,14 @@ func (s *sources) CreateSourceXkcd(ctx context.Context, request shared.SourceXkc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12622,15 +12675,14 @@ func (s *sources) CreateSourceXkcd(ctx context.Context, request shared.SourceXkc
// CreateSourceYandexMetrica - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceYandexMetrica(ctx context.Context, request shared.SourceYandexMetricaCreateRequest) (*operations.CreateSourceYandexMetricaResponse, error) {
+func (s *Sources) CreateSourceYandexMetrica(ctx context.Context, request *shared.SourceYandexMetricaCreateRequest) (*operations.CreateSourceYandexMetricaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#YandexMetrica"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12639,7 +12691,7 @@ func (s *sources) CreateSourceYandexMetrica(ctx context.Context, request shared.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12672,12 +12724,14 @@ func (s *sources) CreateSourceYandexMetrica(ctx context.Context, request shared.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12689,15 +12743,14 @@ func (s *sources) CreateSourceYandexMetrica(ctx context.Context, request shared.
// CreateSourceYotpo - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceYotpo(ctx context.Context, request shared.SourceYotpoCreateRequest) (*operations.CreateSourceYotpoResponse, error) {
+func (s *Sources) CreateSourceYotpo(ctx context.Context, request *shared.SourceYotpoCreateRequest) (*operations.CreateSourceYotpoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Yotpo"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12706,7 +12759,7 @@ func (s *sources) CreateSourceYotpo(ctx context.Context, request shared.SourceYo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12739,12 +12792,14 @@ func (s *sources) CreateSourceYotpo(ctx context.Context, request shared.SourceYo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12754,17 +12809,16 @@ func (s *sources) CreateSourceYotpo(ctx context.Context, request shared.SourceYo
return res, nil
}
-// CreateSourceYounium - Create a source
+// CreateSourceYoutubeAnalytics - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceYounium(ctx context.Context, request shared.SourceYouniumCreateRequest) (*operations.CreateSourceYouniumResponse, error) {
+func (s *Sources) CreateSourceYoutubeAnalytics(ctx context.Context, request *shared.SourceYoutubeAnalyticsCreateRequest) (*operations.CreateSourceYoutubeAnalyticsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#Younium"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#YoutubeAnalytics"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12773,7 +12827,7 @@ func (s *sources) CreateSourceYounium(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12797,7 +12851,7 @@ func (s *sources) CreateSourceYounium(ctx context.Context, request shared.Source
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceYouniumResponse{
+ res := &operations.CreateSourceYoutubeAnalyticsResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -12806,12 +12860,14 @@ func (s *sources) CreateSourceYounium(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12821,17 +12877,16 @@ func (s *sources) CreateSourceYounium(ctx context.Context, request shared.Source
return res, nil
}
-// CreateSourceYoutubeAnalytics - Create a source
+// CreateSourceZendeskChat - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceYoutubeAnalytics(ctx context.Context, request shared.SourceYoutubeAnalyticsCreateRequest) (*operations.CreateSourceYoutubeAnalyticsResponse, error) {
+func (s *Sources) CreateSourceZendeskChat(ctx context.Context, request *shared.SourceZendeskChatCreateRequest) (*operations.CreateSourceZendeskChatResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#YoutubeAnalytics"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#ZendeskChat"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12840,7 +12895,7 @@ func (s *sources) CreateSourceYoutubeAnalytics(ctx context.Context, request shar
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12864,7 +12919,7 @@ func (s *sources) CreateSourceYoutubeAnalytics(ctx context.Context, request shar
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceYoutubeAnalyticsResponse{
+ res := &operations.CreateSourceZendeskChatResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -12873,12 +12928,14 @@ func (s *sources) CreateSourceYoutubeAnalytics(ctx context.Context, request shar
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12888,17 +12945,16 @@ func (s *sources) CreateSourceYoutubeAnalytics(ctx context.Context, request shar
return res, nil
}
-// CreateSourceZendeskChat - Create a source
+// CreateSourceZendeskSell - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceZendeskChat(ctx context.Context, request shared.SourceZendeskChatCreateRequest) (*operations.CreateSourceZendeskChatResponse, error) {
+func (s *Sources) CreateSourceZendeskSell(ctx context.Context, request *shared.SourceZendeskSellCreateRequest) (*operations.CreateSourceZendeskSellResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url := strings.TrimSuffix(baseURL, "/") + "/sources#ZendeskChat"
+ url := strings.TrimSuffix(baseURL, "/") + "/sources#ZendeskSell"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12907,7 +12963,7 @@ func (s *sources) CreateSourceZendeskChat(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -12931,7 +12987,7 @@ func (s *sources) CreateSourceZendeskChat(ctx context.Context, request shared.So
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.CreateSourceZendeskChatResponse{
+ res := &operations.CreateSourceZendeskSellResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -12940,12 +12996,14 @@ func (s *sources) CreateSourceZendeskChat(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -12957,15 +13015,14 @@ func (s *sources) CreateSourceZendeskChat(ctx context.Context, request shared.So
// CreateSourceZendeskSunshine - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceZendeskSunshine(ctx context.Context, request shared.SourceZendeskSunshineCreateRequest) (*operations.CreateSourceZendeskSunshineResponse, error) {
+func (s *Sources) CreateSourceZendeskSunshine(ctx context.Context, request *shared.SourceZendeskSunshineCreateRequest) (*operations.CreateSourceZendeskSunshineResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#ZendeskSunshine"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -12974,7 +13031,7 @@ func (s *sources) CreateSourceZendeskSunshine(ctx context.Context, request share
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -13007,12 +13064,14 @@ func (s *sources) CreateSourceZendeskSunshine(ctx context.Context, request share
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -13024,15 +13083,14 @@ func (s *sources) CreateSourceZendeskSunshine(ctx context.Context, request share
// CreateSourceZendeskSupport - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceZendeskSupport(ctx context.Context, request shared.SourceZendeskSupportCreateRequest) (*operations.CreateSourceZendeskSupportResponse, error) {
+func (s *Sources) CreateSourceZendeskSupport(ctx context.Context, request *shared.SourceZendeskSupportCreateRequest) (*operations.CreateSourceZendeskSupportResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#ZendeskSupport"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -13041,7 +13099,7 @@ func (s *sources) CreateSourceZendeskSupport(ctx context.Context, request shared
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -13074,12 +13132,14 @@ func (s *sources) CreateSourceZendeskSupport(ctx context.Context, request shared
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -13091,15 +13151,14 @@ func (s *sources) CreateSourceZendeskSupport(ctx context.Context, request shared
// CreateSourceZendeskTalk - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceZendeskTalk(ctx context.Context, request shared.SourceZendeskTalkCreateRequest) (*operations.CreateSourceZendeskTalkResponse, error) {
+func (s *Sources) CreateSourceZendeskTalk(ctx context.Context, request *shared.SourceZendeskTalkCreateRequest) (*operations.CreateSourceZendeskTalkResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#ZendeskTalk"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -13108,7 +13167,7 @@ func (s *sources) CreateSourceZendeskTalk(ctx context.Context, request shared.So
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -13141,12 +13200,14 @@ func (s *sources) CreateSourceZendeskTalk(ctx context.Context, request shared.So
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -13158,15 +13219,14 @@ func (s *sources) CreateSourceZendeskTalk(ctx context.Context, request shared.So
// CreateSourceZenloop - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceZenloop(ctx context.Context, request shared.SourceZenloopCreateRequest) (*operations.CreateSourceZenloopResponse, error) {
+func (s *Sources) CreateSourceZenloop(ctx context.Context, request *shared.SourceZenloopCreateRequest) (*operations.CreateSourceZenloopResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Zenloop"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -13175,7 +13235,7 @@ func (s *sources) CreateSourceZenloop(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -13208,12 +13268,14 @@ func (s *sources) CreateSourceZenloop(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -13225,15 +13287,14 @@ func (s *sources) CreateSourceZenloop(ctx context.Context, request shared.Source
// CreateSourceZohoCrm - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceZohoCrm(ctx context.Context, request shared.SourceZohoCrmCreateRequest) (*operations.CreateSourceZohoCrmResponse, error) {
+func (s *Sources) CreateSourceZohoCrm(ctx context.Context, request *shared.SourceZohoCrmCreateRequest) (*operations.CreateSourceZohoCrmResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#ZohoCrm"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -13242,7 +13303,7 @@ func (s *sources) CreateSourceZohoCrm(ctx context.Context, request shared.Source
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -13275,12 +13336,14 @@ func (s *sources) CreateSourceZohoCrm(ctx context.Context, request shared.Source
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -13292,15 +13355,14 @@ func (s *sources) CreateSourceZohoCrm(ctx context.Context, request shared.Source
// CreateSourceZoom - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceZoom(ctx context.Context, request shared.SourceZoomCreateRequest) (*operations.CreateSourceZoomResponse, error) {
+func (s *Sources) CreateSourceZoom(ctx context.Context, request *shared.SourceZoomCreateRequest) (*operations.CreateSourceZoomResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Zoom"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -13309,7 +13371,7 @@ func (s *sources) CreateSourceZoom(ctx context.Context, request shared.SourceZoo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -13342,12 +13404,14 @@ func (s *sources) CreateSourceZoom(ctx context.Context, request shared.SourceZoo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -13359,15 +13423,14 @@ func (s *sources) CreateSourceZoom(ctx context.Context, request shared.SourceZoo
// CreateSourceZuora - Create a source
// Creates a source given a name, workspace id, and a json blob containing the configuration for the source.
-func (s *sources) CreateSourceZuora(ctx context.Context, request shared.SourceZuoraCreateRequest) (*operations.CreateSourceZuoraResponse, error) {
+func (s *Sources) CreateSourceZuora(ctx context.Context, request *shared.SourceZuoraCreateRequest) (*operations.CreateSourceZuoraResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources#Zuora"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -13376,7 +13439,7 @@ func (s *sources) CreateSourceZuora(ctx context.Context, request shared.SourceZu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -13409,12 +13472,14 @@ func (s *sources) CreateSourceZuora(ctx context.Context, request shared.SourceZu
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -13425,7 +13490,7 @@ func (s *sources) CreateSourceZuora(ctx context.Context, request shared.SourceZu
}
// DeleteSource - Delete a Source
-func (s *sources) DeleteSource(ctx context.Context, request operations.DeleteSourceRequest) (*operations.DeleteSourceResponse, error) {
+func (s *Sources) DeleteSource(ctx context.Context, request operations.DeleteSourceRequest) (*operations.DeleteSourceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}", request, nil)
if err != nil {
@@ -13437,7 +13502,7 @@ func (s *sources) DeleteSource(ctx context.Context, request operations.DeleteSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13475,7 +13540,7 @@ func (s *sources) DeleteSource(ctx context.Context, request operations.DeleteSou
}
// DeleteSourceAha - Delete a Source
-func (s *sources) DeleteSourceAha(ctx context.Context, request operations.DeleteSourceAhaRequest) (*operations.DeleteSourceAhaResponse, error) {
+func (s *Sources) DeleteSourceAha(ctx context.Context, request operations.DeleteSourceAhaRequest) (*operations.DeleteSourceAhaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Aha", request, nil)
if err != nil {
@@ -13487,7 +13552,7 @@ func (s *sources) DeleteSourceAha(ctx context.Context, request operations.Delete
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13525,7 +13590,7 @@ func (s *sources) DeleteSourceAha(ctx context.Context, request operations.Delete
}
// DeleteSourceAircall - Delete a Source
-func (s *sources) DeleteSourceAircall(ctx context.Context, request operations.DeleteSourceAircallRequest) (*operations.DeleteSourceAircallResponse, error) {
+func (s *Sources) DeleteSourceAircall(ctx context.Context, request operations.DeleteSourceAircallRequest) (*operations.DeleteSourceAircallResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Aircall", request, nil)
if err != nil {
@@ -13537,7 +13602,7 @@ func (s *sources) DeleteSourceAircall(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13575,7 +13640,7 @@ func (s *sources) DeleteSourceAircall(ctx context.Context, request operations.De
}
// DeleteSourceAirtable - Delete a Source
-func (s *sources) DeleteSourceAirtable(ctx context.Context, request operations.DeleteSourceAirtableRequest) (*operations.DeleteSourceAirtableResponse, error) {
+func (s *Sources) DeleteSourceAirtable(ctx context.Context, request operations.DeleteSourceAirtableRequest) (*operations.DeleteSourceAirtableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Airtable", request, nil)
if err != nil {
@@ -13587,7 +13652,7 @@ func (s *sources) DeleteSourceAirtable(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13625,7 +13690,7 @@ func (s *sources) DeleteSourceAirtable(ctx context.Context, request operations.D
}
// DeleteSourceAlloydb - Delete a Source
-func (s *sources) DeleteSourceAlloydb(ctx context.Context, request operations.DeleteSourceAlloydbRequest) (*operations.DeleteSourceAlloydbResponse, error) {
+func (s *Sources) DeleteSourceAlloydb(ctx context.Context, request operations.DeleteSourceAlloydbRequest) (*operations.DeleteSourceAlloydbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Alloydb", request, nil)
if err != nil {
@@ -13637,7 +13702,7 @@ func (s *sources) DeleteSourceAlloydb(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13675,7 +13740,7 @@ func (s *sources) DeleteSourceAlloydb(ctx context.Context, request operations.De
}
// DeleteSourceAmazonAds - Delete a Source
-func (s *sources) DeleteSourceAmazonAds(ctx context.Context, request operations.DeleteSourceAmazonAdsRequest) (*operations.DeleteSourceAmazonAdsResponse, error) {
+func (s *Sources) DeleteSourceAmazonAds(ctx context.Context, request operations.DeleteSourceAmazonAdsRequest) (*operations.DeleteSourceAmazonAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AmazonAds", request, nil)
if err != nil {
@@ -13687,7 +13752,7 @@ func (s *sources) DeleteSourceAmazonAds(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13725,7 +13790,7 @@ func (s *sources) DeleteSourceAmazonAds(ctx context.Context, request operations.
}
// DeleteSourceAmazonSellerPartner - Delete a Source
-func (s *sources) DeleteSourceAmazonSellerPartner(ctx context.Context, request operations.DeleteSourceAmazonSellerPartnerRequest) (*operations.DeleteSourceAmazonSellerPartnerResponse, error) {
+func (s *Sources) DeleteSourceAmazonSellerPartner(ctx context.Context, request operations.DeleteSourceAmazonSellerPartnerRequest) (*operations.DeleteSourceAmazonSellerPartnerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AmazonSellerPartner", request, nil)
if err != nil {
@@ -13737,7 +13802,7 @@ func (s *sources) DeleteSourceAmazonSellerPartner(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13775,7 +13840,7 @@ func (s *sources) DeleteSourceAmazonSellerPartner(ctx context.Context, request o
}
// DeleteSourceAmazonSqs - Delete a Source
-func (s *sources) DeleteSourceAmazonSqs(ctx context.Context, request operations.DeleteSourceAmazonSqsRequest) (*operations.DeleteSourceAmazonSqsResponse, error) {
+func (s *Sources) DeleteSourceAmazonSqs(ctx context.Context, request operations.DeleteSourceAmazonSqsRequest) (*operations.DeleteSourceAmazonSqsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AmazonSqs", request, nil)
if err != nil {
@@ -13787,7 +13852,7 @@ func (s *sources) DeleteSourceAmazonSqs(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13825,7 +13890,7 @@ func (s *sources) DeleteSourceAmazonSqs(ctx context.Context, request operations.
}
// DeleteSourceAmplitude - Delete a Source
-func (s *sources) DeleteSourceAmplitude(ctx context.Context, request operations.DeleteSourceAmplitudeRequest) (*operations.DeleteSourceAmplitudeResponse, error) {
+func (s *Sources) DeleteSourceAmplitude(ctx context.Context, request operations.DeleteSourceAmplitudeRequest) (*operations.DeleteSourceAmplitudeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Amplitude", request, nil)
if err != nil {
@@ -13837,7 +13902,7 @@ func (s *sources) DeleteSourceAmplitude(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13875,7 +13940,7 @@ func (s *sources) DeleteSourceAmplitude(ctx context.Context, request operations.
}
// DeleteSourceApifyDataset - Delete a Source
-func (s *sources) DeleteSourceApifyDataset(ctx context.Context, request operations.DeleteSourceApifyDatasetRequest) (*operations.DeleteSourceApifyDatasetResponse, error) {
+func (s *Sources) DeleteSourceApifyDataset(ctx context.Context, request operations.DeleteSourceApifyDatasetRequest) (*operations.DeleteSourceApifyDatasetResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ApifyDataset", request, nil)
if err != nil {
@@ -13887,7 +13952,7 @@ func (s *sources) DeleteSourceApifyDataset(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13925,7 +13990,7 @@ func (s *sources) DeleteSourceApifyDataset(ctx context.Context, request operatio
}
// DeleteSourceAppfollow - Delete a Source
-func (s *sources) DeleteSourceAppfollow(ctx context.Context, request operations.DeleteSourceAppfollowRequest) (*operations.DeleteSourceAppfollowResponse, error) {
+func (s *Sources) DeleteSourceAppfollow(ctx context.Context, request operations.DeleteSourceAppfollowRequest) (*operations.DeleteSourceAppfollowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Appfollow", request, nil)
if err != nil {
@@ -13937,7 +14002,7 @@ func (s *sources) DeleteSourceAppfollow(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -13975,7 +14040,7 @@ func (s *sources) DeleteSourceAppfollow(ctx context.Context, request operations.
}
// DeleteSourceAsana - Delete a Source
-func (s *sources) DeleteSourceAsana(ctx context.Context, request operations.DeleteSourceAsanaRequest) (*operations.DeleteSourceAsanaResponse, error) {
+func (s *Sources) DeleteSourceAsana(ctx context.Context, request operations.DeleteSourceAsanaRequest) (*operations.DeleteSourceAsanaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Asana", request, nil)
if err != nil {
@@ -13987,7 +14052,7 @@ func (s *sources) DeleteSourceAsana(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14025,7 +14090,7 @@ func (s *sources) DeleteSourceAsana(ctx context.Context, request operations.Dele
}
// DeleteSourceAuth0 - Delete a Source
-func (s *sources) DeleteSourceAuth0(ctx context.Context, request operations.DeleteSourceAuth0Request) (*operations.DeleteSourceAuth0Response, error) {
+func (s *Sources) DeleteSourceAuth0(ctx context.Context, request operations.DeleteSourceAuth0Request) (*operations.DeleteSourceAuth0Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Auth0", request, nil)
if err != nil {
@@ -14037,7 +14102,7 @@ func (s *sources) DeleteSourceAuth0(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14075,7 +14140,7 @@ func (s *sources) DeleteSourceAuth0(ctx context.Context, request operations.Dele
}
// DeleteSourceAwsCloudtrail - Delete a Source
-func (s *sources) DeleteSourceAwsCloudtrail(ctx context.Context, request operations.DeleteSourceAwsCloudtrailRequest) (*operations.DeleteSourceAwsCloudtrailResponse, error) {
+func (s *Sources) DeleteSourceAwsCloudtrail(ctx context.Context, request operations.DeleteSourceAwsCloudtrailRequest) (*operations.DeleteSourceAwsCloudtrailResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AwsCloudtrail", request, nil)
if err != nil {
@@ -14087,7 +14152,7 @@ func (s *sources) DeleteSourceAwsCloudtrail(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14125,7 +14190,7 @@ func (s *sources) DeleteSourceAwsCloudtrail(ctx context.Context, request operati
}
// DeleteSourceAzureBlobStorage - Delete a Source
-func (s *sources) DeleteSourceAzureBlobStorage(ctx context.Context, request operations.DeleteSourceAzureBlobStorageRequest) (*operations.DeleteSourceAzureBlobStorageResponse, error) {
+func (s *Sources) DeleteSourceAzureBlobStorage(ctx context.Context, request operations.DeleteSourceAzureBlobStorageRequest) (*operations.DeleteSourceAzureBlobStorageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AzureBlobStorage", request, nil)
if err != nil {
@@ -14137,7 +14202,7 @@ func (s *sources) DeleteSourceAzureBlobStorage(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14175,7 +14240,7 @@ func (s *sources) DeleteSourceAzureBlobStorage(ctx context.Context, request oper
}
// DeleteSourceAzureTable - Delete a Source
-func (s *sources) DeleteSourceAzureTable(ctx context.Context, request operations.DeleteSourceAzureTableRequest) (*operations.DeleteSourceAzureTableResponse, error) {
+func (s *Sources) DeleteSourceAzureTable(ctx context.Context, request operations.DeleteSourceAzureTableRequest) (*operations.DeleteSourceAzureTableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AzureTable", request, nil)
if err != nil {
@@ -14187,7 +14252,7 @@ func (s *sources) DeleteSourceAzureTable(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14225,7 +14290,7 @@ func (s *sources) DeleteSourceAzureTable(ctx context.Context, request operations
}
// DeleteSourceBambooHr - Delete a Source
-func (s *sources) DeleteSourceBambooHr(ctx context.Context, request operations.DeleteSourceBambooHrRequest) (*operations.DeleteSourceBambooHrResponse, error) {
+func (s *Sources) DeleteSourceBambooHr(ctx context.Context, request operations.DeleteSourceBambooHrRequest) (*operations.DeleteSourceBambooHrResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#BambooHr", request, nil)
if err != nil {
@@ -14237,7 +14302,7 @@ func (s *sources) DeleteSourceBambooHr(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14274,10 +14339,10 @@ func (s *sources) DeleteSourceBambooHr(ctx context.Context, request operations.D
return res, nil
}
-// DeleteSourceBigcommerce - Delete a Source
-func (s *sources) DeleteSourceBigcommerce(ctx context.Context, request operations.DeleteSourceBigcommerceRequest) (*operations.DeleteSourceBigcommerceResponse, error) {
+// DeleteSourceBigquery - Delete a Source
+func (s *Sources) DeleteSourceBigquery(ctx context.Context, request operations.DeleteSourceBigqueryRequest) (*operations.DeleteSourceBigqueryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Bigcommerce", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Bigquery", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -14287,7 +14352,7 @@ func (s *sources) DeleteSourceBigcommerce(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14308,7 +14373,7 @@ func (s *sources) DeleteSourceBigcommerce(ctx context.Context, request operation
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceBigcommerceResponse{
+ res := &operations.DeleteSourceBigqueryResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -14324,10 +14389,10 @@ func (s *sources) DeleteSourceBigcommerce(ctx context.Context, request operation
return res, nil
}
-// DeleteSourceBigquery - Delete a Source
-func (s *sources) DeleteSourceBigquery(ctx context.Context, request operations.DeleteSourceBigqueryRequest) (*operations.DeleteSourceBigqueryResponse, error) {
+// DeleteSourceBingAds - Delete a Source
+func (s *Sources) DeleteSourceBingAds(ctx context.Context, request operations.DeleteSourceBingAdsRequest) (*operations.DeleteSourceBingAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Bigquery", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#BingAds", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -14337,7 +14402,7 @@ func (s *sources) DeleteSourceBigquery(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14358,7 +14423,7 @@ func (s *sources) DeleteSourceBigquery(ctx context.Context, request operations.D
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceBigqueryResponse{
+ res := &operations.DeleteSourceBingAdsResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -14374,10 +14439,10 @@ func (s *sources) DeleteSourceBigquery(ctx context.Context, request operations.D
return res, nil
}
-// DeleteSourceBingAds - Delete a Source
-func (s *sources) DeleteSourceBingAds(ctx context.Context, request operations.DeleteSourceBingAdsRequest) (*operations.DeleteSourceBingAdsResponse, error) {
+// DeleteSourceBraintree - Delete a Source
+func (s *Sources) DeleteSourceBraintree(ctx context.Context, request operations.DeleteSourceBraintreeRequest) (*operations.DeleteSourceBraintreeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#BingAds", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braintree", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -14387,7 +14452,7 @@ func (s *sources) DeleteSourceBingAds(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14408,7 +14473,7 @@ func (s *sources) DeleteSourceBingAds(ctx context.Context, request operations.De
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceBingAdsResponse{
+ res := &operations.DeleteSourceBraintreeResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -14424,10 +14489,10 @@ func (s *sources) DeleteSourceBingAds(ctx context.Context, request operations.De
return res, nil
}
-// DeleteSourceBraintree - Delete a Source
-func (s *sources) DeleteSourceBraintree(ctx context.Context, request operations.DeleteSourceBraintreeRequest) (*operations.DeleteSourceBraintreeResponse, error) {
+// DeleteSourceBraze - Delete a Source
+func (s *Sources) DeleteSourceBraze(ctx context.Context, request operations.DeleteSourceBrazeRequest) (*operations.DeleteSourceBrazeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braintree", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braze", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -14437,7 +14502,7 @@ func (s *sources) DeleteSourceBraintree(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14458,7 +14523,7 @@ func (s *sources) DeleteSourceBraintree(ctx context.Context, request operations.
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceBraintreeResponse{
+ res := &operations.DeleteSourceBrazeResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -14474,10 +14539,10 @@ func (s *sources) DeleteSourceBraintree(ctx context.Context, request operations.
return res, nil
}
-// DeleteSourceBraze - Delete a Source
-func (s *sources) DeleteSourceBraze(ctx context.Context, request operations.DeleteSourceBrazeRequest) (*operations.DeleteSourceBrazeResponse, error) {
+// DeleteSourceCart - Delete a Source
+func (s *Sources) DeleteSourceCart(ctx context.Context, request operations.DeleteSourceCartRequest) (*operations.DeleteSourceCartResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braze", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Cart", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -14487,7 +14552,7 @@ func (s *sources) DeleteSourceBraze(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14508,7 +14573,7 @@ func (s *sources) DeleteSourceBraze(ctx context.Context, request operations.Dele
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceBrazeResponse{
+ res := &operations.DeleteSourceCartResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -14525,7 +14590,7 @@ func (s *sources) DeleteSourceBraze(ctx context.Context, request operations.Dele
}
// DeleteSourceChargebee - Delete a Source
-func (s *sources) DeleteSourceChargebee(ctx context.Context, request operations.DeleteSourceChargebeeRequest) (*operations.DeleteSourceChargebeeResponse, error) {
+func (s *Sources) DeleteSourceChargebee(ctx context.Context, request operations.DeleteSourceChargebeeRequest) (*operations.DeleteSourceChargebeeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Chargebee", request, nil)
if err != nil {
@@ -14537,7 +14602,7 @@ func (s *sources) DeleteSourceChargebee(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14575,7 +14640,7 @@ func (s *sources) DeleteSourceChargebee(ctx context.Context, request operations.
}
// DeleteSourceChartmogul - Delete a Source
-func (s *sources) DeleteSourceChartmogul(ctx context.Context, request operations.DeleteSourceChartmogulRequest) (*operations.DeleteSourceChartmogulResponse, error) {
+func (s *Sources) DeleteSourceChartmogul(ctx context.Context, request operations.DeleteSourceChartmogulRequest) (*operations.DeleteSourceChartmogulResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Chartmogul", request, nil)
if err != nil {
@@ -14587,7 +14652,7 @@ func (s *sources) DeleteSourceChartmogul(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14625,7 +14690,7 @@ func (s *sources) DeleteSourceChartmogul(ctx context.Context, request operations
}
// DeleteSourceClickhouse - Delete a Source
-func (s *sources) DeleteSourceClickhouse(ctx context.Context, request operations.DeleteSourceClickhouseRequest) (*operations.DeleteSourceClickhouseResponse, error) {
+func (s *Sources) DeleteSourceClickhouse(ctx context.Context, request operations.DeleteSourceClickhouseRequest) (*operations.DeleteSourceClickhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Clickhouse", request, nil)
if err != nil {
@@ -14637,7 +14702,7 @@ func (s *sources) DeleteSourceClickhouse(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14675,7 +14740,7 @@ func (s *sources) DeleteSourceClickhouse(ctx context.Context, request operations
}
// DeleteSourceClickupAPI - Delete a Source
-func (s *sources) DeleteSourceClickupAPI(ctx context.Context, request operations.DeleteSourceClickupAPIRequest) (*operations.DeleteSourceClickupAPIResponse, error) {
+func (s *Sources) DeleteSourceClickupAPI(ctx context.Context, request operations.DeleteSourceClickupAPIRequest) (*operations.DeleteSourceClickupAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ClickupApi", request, nil)
if err != nil {
@@ -14687,7 +14752,7 @@ func (s *sources) DeleteSourceClickupAPI(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14725,7 +14790,7 @@ func (s *sources) DeleteSourceClickupAPI(ctx context.Context, request operations
}
// DeleteSourceClockify - Delete a Source
-func (s *sources) DeleteSourceClockify(ctx context.Context, request operations.DeleteSourceClockifyRequest) (*operations.DeleteSourceClockifyResponse, error) {
+func (s *Sources) DeleteSourceClockify(ctx context.Context, request operations.DeleteSourceClockifyRequest) (*operations.DeleteSourceClockifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Clockify", request, nil)
if err != nil {
@@ -14737,7 +14802,7 @@ func (s *sources) DeleteSourceClockify(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14775,7 +14840,7 @@ func (s *sources) DeleteSourceClockify(ctx context.Context, request operations.D
}
// DeleteSourceCloseCom - Delete a Source
-func (s *sources) DeleteSourceCloseCom(ctx context.Context, request operations.DeleteSourceCloseComRequest) (*operations.DeleteSourceCloseComResponse, error) {
+func (s *Sources) DeleteSourceCloseCom(ctx context.Context, request operations.DeleteSourceCloseComRequest) (*operations.DeleteSourceCloseComResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CloseCom", request, nil)
if err != nil {
@@ -14787,7 +14852,7 @@ func (s *sources) DeleteSourceCloseCom(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14825,7 +14890,7 @@ func (s *sources) DeleteSourceCloseCom(ctx context.Context, request operations.D
}
// DeleteSourceCoda - Delete a Source
-func (s *sources) DeleteSourceCoda(ctx context.Context, request operations.DeleteSourceCodaRequest) (*operations.DeleteSourceCodaResponse, error) {
+func (s *Sources) DeleteSourceCoda(ctx context.Context, request operations.DeleteSourceCodaRequest) (*operations.DeleteSourceCodaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coda", request, nil)
if err != nil {
@@ -14837,7 +14902,7 @@ func (s *sources) DeleteSourceCoda(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14875,7 +14940,7 @@ func (s *sources) DeleteSourceCoda(ctx context.Context, request operations.Delet
}
// DeleteSourceCoinAPI - Delete a Source
-func (s *sources) DeleteSourceCoinAPI(ctx context.Context, request operations.DeleteSourceCoinAPIRequest) (*operations.DeleteSourceCoinAPIResponse, error) {
+func (s *Sources) DeleteSourceCoinAPI(ctx context.Context, request operations.DeleteSourceCoinAPIRequest) (*operations.DeleteSourceCoinAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CoinApi", request, nil)
if err != nil {
@@ -14887,7 +14952,7 @@ func (s *sources) DeleteSourceCoinAPI(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14925,7 +14990,7 @@ func (s *sources) DeleteSourceCoinAPI(ctx context.Context, request operations.De
}
// DeleteSourceCoinmarketcap - Delete a Source
-func (s *sources) DeleteSourceCoinmarketcap(ctx context.Context, request operations.DeleteSourceCoinmarketcapRequest) (*operations.DeleteSourceCoinmarketcapResponse, error) {
+func (s *Sources) DeleteSourceCoinmarketcap(ctx context.Context, request operations.DeleteSourceCoinmarketcapRequest) (*operations.DeleteSourceCoinmarketcapResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coinmarketcap", request, nil)
if err != nil {
@@ -14937,7 +15002,7 @@ func (s *sources) DeleteSourceCoinmarketcap(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -14975,7 +15040,7 @@ func (s *sources) DeleteSourceCoinmarketcap(ctx context.Context, request operati
}
// DeleteSourceConfigcat - Delete a Source
-func (s *sources) DeleteSourceConfigcat(ctx context.Context, request operations.DeleteSourceConfigcatRequest) (*operations.DeleteSourceConfigcatResponse, error) {
+func (s *Sources) DeleteSourceConfigcat(ctx context.Context, request operations.DeleteSourceConfigcatRequest) (*operations.DeleteSourceConfigcatResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Configcat", request, nil)
if err != nil {
@@ -14987,7 +15052,7 @@ func (s *sources) DeleteSourceConfigcat(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15025,7 +15090,7 @@ func (s *sources) DeleteSourceConfigcat(ctx context.Context, request operations.
}
// DeleteSourceConfluence - Delete a Source
-func (s *sources) DeleteSourceConfluence(ctx context.Context, request operations.DeleteSourceConfluenceRequest) (*operations.DeleteSourceConfluenceResponse, error) {
+func (s *Sources) DeleteSourceConfluence(ctx context.Context, request operations.DeleteSourceConfluenceRequest) (*operations.DeleteSourceConfluenceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Confluence", request, nil)
if err != nil {
@@ -15037,7 +15102,7 @@ func (s *sources) DeleteSourceConfluence(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15075,7 +15140,7 @@ func (s *sources) DeleteSourceConfluence(ctx context.Context, request operations
}
// DeleteSourceConvex - Delete a Source
-func (s *sources) DeleteSourceConvex(ctx context.Context, request operations.DeleteSourceConvexRequest) (*operations.DeleteSourceConvexResponse, error) {
+func (s *Sources) DeleteSourceConvex(ctx context.Context, request operations.DeleteSourceConvexRequest) (*operations.DeleteSourceConvexResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Convex", request, nil)
if err != nil {
@@ -15087,7 +15152,7 @@ func (s *sources) DeleteSourceConvex(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15125,7 +15190,7 @@ func (s *sources) DeleteSourceConvex(ctx context.Context, request operations.Del
}
// DeleteSourceDatascope - Delete a Source
-func (s *sources) DeleteSourceDatascope(ctx context.Context, request operations.DeleteSourceDatascopeRequest) (*operations.DeleteSourceDatascopeResponse, error) {
+func (s *Sources) DeleteSourceDatascope(ctx context.Context, request operations.DeleteSourceDatascopeRequest) (*operations.DeleteSourceDatascopeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Datascope", request, nil)
if err != nil {
@@ -15137,7 +15202,7 @@ func (s *sources) DeleteSourceDatascope(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15175,7 +15240,7 @@ func (s *sources) DeleteSourceDatascope(ctx context.Context, request operations.
}
// DeleteSourceDelighted - Delete a Source
-func (s *sources) DeleteSourceDelighted(ctx context.Context, request operations.DeleteSourceDelightedRequest) (*operations.DeleteSourceDelightedResponse, error) {
+func (s *Sources) DeleteSourceDelighted(ctx context.Context, request operations.DeleteSourceDelightedRequest) (*operations.DeleteSourceDelightedResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Delighted", request, nil)
if err != nil {
@@ -15187,7 +15252,7 @@ func (s *sources) DeleteSourceDelighted(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15225,7 +15290,7 @@ func (s *sources) DeleteSourceDelighted(ctx context.Context, request operations.
}
// DeleteSourceDixa - Delete a Source
-func (s *sources) DeleteSourceDixa(ctx context.Context, request operations.DeleteSourceDixaRequest) (*operations.DeleteSourceDixaResponse, error) {
+func (s *Sources) DeleteSourceDixa(ctx context.Context, request operations.DeleteSourceDixaRequest) (*operations.DeleteSourceDixaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dixa", request, nil)
if err != nil {
@@ -15237,7 +15302,7 @@ func (s *sources) DeleteSourceDixa(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15275,7 +15340,7 @@ func (s *sources) DeleteSourceDixa(ctx context.Context, request operations.Delet
}
// DeleteSourceDockerhub - Delete a Source
-func (s *sources) DeleteSourceDockerhub(ctx context.Context, request operations.DeleteSourceDockerhubRequest) (*operations.DeleteSourceDockerhubResponse, error) {
+func (s *Sources) DeleteSourceDockerhub(ctx context.Context, request operations.DeleteSourceDockerhubRequest) (*operations.DeleteSourceDockerhubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dockerhub", request, nil)
if err != nil {
@@ -15287,7 +15352,7 @@ func (s *sources) DeleteSourceDockerhub(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15325,7 +15390,7 @@ func (s *sources) DeleteSourceDockerhub(ctx context.Context, request operations.
}
// DeleteSourceDremio - Delete a Source
-func (s *sources) DeleteSourceDremio(ctx context.Context, request operations.DeleteSourceDremioRequest) (*operations.DeleteSourceDremioResponse, error) {
+func (s *Sources) DeleteSourceDremio(ctx context.Context, request operations.DeleteSourceDremioRequest) (*operations.DeleteSourceDremioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dremio", request, nil)
if err != nil {
@@ -15337,7 +15402,7 @@ func (s *sources) DeleteSourceDremio(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15375,7 +15440,7 @@ func (s *sources) DeleteSourceDremio(ctx context.Context, request operations.Del
}
// DeleteSourceDynamodb - Delete a Source
-func (s *sources) DeleteSourceDynamodb(ctx context.Context, request operations.DeleteSourceDynamodbRequest) (*operations.DeleteSourceDynamodbResponse, error) {
+func (s *Sources) DeleteSourceDynamodb(ctx context.Context, request operations.DeleteSourceDynamodbRequest) (*operations.DeleteSourceDynamodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dynamodb", request, nil)
if err != nil {
@@ -15387,7 +15452,7 @@ func (s *sources) DeleteSourceDynamodb(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15424,58 +15489,8 @@ func (s *sources) DeleteSourceDynamodb(ctx context.Context, request operations.D
return res, nil
}
-// DeleteSourceE2eTestCloud - Delete a Source
-func (s *sources) DeleteSourceE2eTestCloud(ctx context.Context, request operations.DeleteSourceE2eTestCloudRequest) (*operations.DeleteSourceE2eTestCloudResponse, error) {
- baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#E2eTestCloud", request, nil)
- if err != nil {
- return nil, fmt.Errorf("error generating URL: %w", err)
- }
-
- req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil)
- if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
- }
- req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
-
- client := s.sdkConfiguration.SecurityClient
-
- httpRes, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("error sending request: %w", err)
- }
- if httpRes == nil {
- return nil, fmt.Errorf("error sending request: no response")
- }
-
- rawBody, err := io.ReadAll(httpRes.Body)
- if err != nil {
- return nil, fmt.Errorf("error reading response body: %w", err)
- }
- httpRes.Body.Close()
- httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
-
- contentType := httpRes.Header.Get("Content-Type")
-
- res := &operations.DeleteSourceE2eTestCloudResponse{
- StatusCode: httpRes.StatusCode,
- ContentType: contentType,
- RawResponse: httpRes,
- }
- switch {
- case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
- fallthrough
- case httpRes.StatusCode == 403:
- fallthrough
- case httpRes.StatusCode == 404:
- }
-
- return res, nil
-}
-
// DeleteSourceEmailoctopus - Delete a Source
-func (s *sources) DeleteSourceEmailoctopus(ctx context.Context, request operations.DeleteSourceEmailoctopusRequest) (*operations.DeleteSourceEmailoctopusResponse, error) {
+func (s *Sources) DeleteSourceEmailoctopus(ctx context.Context, request operations.DeleteSourceEmailoctopusRequest) (*operations.DeleteSourceEmailoctopusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Emailoctopus", request, nil)
if err != nil {
@@ -15487,7 +15502,7 @@ func (s *sources) DeleteSourceEmailoctopus(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15525,7 +15540,7 @@ func (s *sources) DeleteSourceEmailoctopus(ctx context.Context, request operatio
}
// DeleteSourceExchangeRates - Delete a Source
-func (s *sources) DeleteSourceExchangeRates(ctx context.Context, request operations.DeleteSourceExchangeRatesRequest) (*operations.DeleteSourceExchangeRatesResponse, error) {
+func (s *Sources) DeleteSourceExchangeRates(ctx context.Context, request operations.DeleteSourceExchangeRatesRequest) (*operations.DeleteSourceExchangeRatesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ExchangeRates", request, nil)
if err != nil {
@@ -15537,7 +15552,7 @@ func (s *sources) DeleteSourceExchangeRates(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15575,7 +15590,7 @@ func (s *sources) DeleteSourceExchangeRates(ctx context.Context, request operati
}
// DeleteSourceFacebookMarketing - Delete a Source
-func (s *sources) DeleteSourceFacebookMarketing(ctx context.Context, request operations.DeleteSourceFacebookMarketingRequest) (*operations.DeleteSourceFacebookMarketingResponse, error) {
+func (s *Sources) DeleteSourceFacebookMarketing(ctx context.Context, request operations.DeleteSourceFacebookMarketingRequest) (*operations.DeleteSourceFacebookMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#FacebookMarketing", request, nil)
if err != nil {
@@ -15587,7 +15602,7 @@ func (s *sources) DeleteSourceFacebookMarketing(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15625,7 +15640,7 @@ func (s *sources) DeleteSourceFacebookMarketing(ctx context.Context, request ope
}
// DeleteSourceFacebookPages - Delete a Source
-func (s *sources) DeleteSourceFacebookPages(ctx context.Context, request operations.DeleteSourceFacebookPagesRequest) (*operations.DeleteSourceFacebookPagesResponse, error) {
+func (s *Sources) DeleteSourceFacebookPages(ctx context.Context, request operations.DeleteSourceFacebookPagesRequest) (*operations.DeleteSourceFacebookPagesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#FacebookPages", request, nil)
if err != nil {
@@ -15637,7 +15652,7 @@ func (s *sources) DeleteSourceFacebookPages(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15675,7 +15690,7 @@ func (s *sources) DeleteSourceFacebookPages(ctx context.Context, request operati
}
// DeleteSourceFaker - Delete a Source
-func (s *sources) DeleteSourceFaker(ctx context.Context, request operations.DeleteSourceFakerRequest) (*operations.DeleteSourceFakerResponse, error) {
+func (s *Sources) DeleteSourceFaker(ctx context.Context, request operations.DeleteSourceFakerRequest) (*operations.DeleteSourceFakerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Faker", request, nil)
if err != nil {
@@ -15687,7 +15702,7 @@ func (s *sources) DeleteSourceFaker(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15725,7 +15740,7 @@ func (s *sources) DeleteSourceFaker(ctx context.Context, request operations.Dele
}
// DeleteSourceFauna - Delete a Source
-func (s *sources) DeleteSourceFauna(ctx context.Context, request operations.DeleteSourceFaunaRequest) (*operations.DeleteSourceFaunaResponse, error) {
+func (s *Sources) DeleteSourceFauna(ctx context.Context, request operations.DeleteSourceFaunaRequest) (*operations.DeleteSourceFaunaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Fauna", request, nil)
if err != nil {
@@ -15737,7 +15752,7 @@ func (s *sources) DeleteSourceFauna(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15774,10 +15789,10 @@ func (s *sources) DeleteSourceFauna(ctx context.Context, request operations.Dele
return res, nil
}
-// DeleteSourceFileSecure - Delete a Source
-func (s *sources) DeleteSourceFileSecure(ctx context.Context, request operations.DeleteSourceFileSecureRequest) (*operations.DeleteSourceFileSecureResponse, error) {
+// DeleteSourceFile - Delete a Source
+func (s *Sources) DeleteSourceFile(ctx context.Context, request operations.DeleteSourceFileRequest) (*operations.DeleteSourceFileResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#FileSecure", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#File", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -15787,7 +15802,7 @@ func (s *sources) DeleteSourceFileSecure(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15808,7 +15823,7 @@ func (s *sources) DeleteSourceFileSecure(ctx context.Context, request operations
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceFileSecureResponse{
+ res := &operations.DeleteSourceFileResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -15825,7 +15840,7 @@ func (s *sources) DeleteSourceFileSecure(ctx context.Context, request operations
}
// DeleteSourceFirebolt - Delete a Source
-func (s *sources) DeleteSourceFirebolt(ctx context.Context, request operations.DeleteSourceFireboltRequest) (*operations.DeleteSourceFireboltResponse, error) {
+func (s *Sources) DeleteSourceFirebolt(ctx context.Context, request operations.DeleteSourceFireboltRequest) (*operations.DeleteSourceFireboltResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Firebolt", request, nil)
if err != nil {
@@ -15837,7 +15852,7 @@ func (s *sources) DeleteSourceFirebolt(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15875,7 +15890,7 @@ func (s *sources) DeleteSourceFirebolt(ctx context.Context, request operations.D
}
// DeleteSourceFreshcaller - Delete a Source
-func (s *sources) DeleteSourceFreshcaller(ctx context.Context, request operations.DeleteSourceFreshcallerRequest) (*operations.DeleteSourceFreshcallerResponse, error) {
+func (s *Sources) DeleteSourceFreshcaller(ctx context.Context, request operations.DeleteSourceFreshcallerRequest) (*operations.DeleteSourceFreshcallerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Freshcaller", request, nil)
if err != nil {
@@ -15887,7 +15902,7 @@ func (s *sources) DeleteSourceFreshcaller(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15925,7 +15940,7 @@ func (s *sources) DeleteSourceFreshcaller(ctx context.Context, request operation
}
// DeleteSourceFreshdesk - Delete a Source
-func (s *sources) DeleteSourceFreshdesk(ctx context.Context, request operations.DeleteSourceFreshdeskRequest) (*operations.DeleteSourceFreshdeskResponse, error) {
+func (s *Sources) DeleteSourceFreshdesk(ctx context.Context, request operations.DeleteSourceFreshdeskRequest) (*operations.DeleteSourceFreshdeskResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Freshdesk", request, nil)
if err != nil {
@@ -15937,7 +15952,7 @@ func (s *sources) DeleteSourceFreshdesk(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -15975,7 +15990,7 @@ func (s *sources) DeleteSourceFreshdesk(ctx context.Context, request operations.
}
// DeleteSourceFreshsales - Delete a Source
-func (s *sources) DeleteSourceFreshsales(ctx context.Context, request operations.DeleteSourceFreshsalesRequest) (*operations.DeleteSourceFreshsalesResponse, error) {
+func (s *Sources) DeleteSourceFreshsales(ctx context.Context, request operations.DeleteSourceFreshsalesRequest) (*operations.DeleteSourceFreshsalesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Freshsales", request, nil)
if err != nil {
@@ -15987,7 +16002,7 @@ func (s *sources) DeleteSourceFreshsales(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16025,7 +16040,7 @@ func (s *sources) DeleteSourceFreshsales(ctx context.Context, request operations
}
// DeleteSourceGainsightPx - Delete a Source
-func (s *sources) DeleteSourceGainsightPx(ctx context.Context, request operations.DeleteSourceGainsightPxRequest) (*operations.DeleteSourceGainsightPxResponse, error) {
+func (s *Sources) DeleteSourceGainsightPx(ctx context.Context, request operations.DeleteSourceGainsightPxRequest) (*operations.DeleteSourceGainsightPxResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GainsightPx", request, nil)
if err != nil {
@@ -16037,7 +16052,7 @@ func (s *sources) DeleteSourceGainsightPx(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16075,7 +16090,7 @@ func (s *sources) DeleteSourceGainsightPx(ctx context.Context, request operation
}
// DeleteSourceGcs - Delete a Source
-func (s *sources) DeleteSourceGcs(ctx context.Context, request operations.DeleteSourceGcsRequest) (*operations.DeleteSourceGcsResponse, error) {
+func (s *Sources) DeleteSourceGcs(ctx context.Context, request operations.DeleteSourceGcsRequest) (*operations.DeleteSourceGcsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gcs", request, nil)
if err != nil {
@@ -16087,7 +16102,7 @@ func (s *sources) DeleteSourceGcs(ctx context.Context, request operations.Delete
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16125,7 +16140,7 @@ func (s *sources) DeleteSourceGcs(ctx context.Context, request operations.Delete
}
// DeleteSourceGetlago - Delete a Source
-func (s *sources) DeleteSourceGetlago(ctx context.Context, request operations.DeleteSourceGetlagoRequest) (*operations.DeleteSourceGetlagoResponse, error) {
+func (s *Sources) DeleteSourceGetlago(ctx context.Context, request operations.DeleteSourceGetlagoRequest) (*operations.DeleteSourceGetlagoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Getlago", request, nil)
if err != nil {
@@ -16137,7 +16152,7 @@ func (s *sources) DeleteSourceGetlago(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16175,7 +16190,7 @@ func (s *sources) DeleteSourceGetlago(ctx context.Context, request operations.De
}
// DeleteSourceGithub - Delete a Source
-func (s *sources) DeleteSourceGithub(ctx context.Context, request operations.DeleteSourceGithubRequest) (*operations.DeleteSourceGithubResponse, error) {
+func (s *Sources) DeleteSourceGithub(ctx context.Context, request operations.DeleteSourceGithubRequest) (*operations.DeleteSourceGithubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Github", request, nil)
if err != nil {
@@ -16187,7 +16202,7 @@ func (s *sources) DeleteSourceGithub(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16225,7 +16240,7 @@ func (s *sources) DeleteSourceGithub(ctx context.Context, request operations.Del
}
// DeleteSourceGitlab - Delete a Source
-func (s *sources) DeleteSourceGitlab(ctx context.Context, request operations.DeleteSourceGitlabRequest) (*operations.DeleteSourceGitlabResponse, error) {
+func (s *Sources) DeleteSourceGitlab(ctx context.Context, request operations.DeleteSourceGitlabRequest) (*operations.DeleteSourceGitlabResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gitlab", request, nil)
if err != nil {
@@ -16237,7 +16252,7 @@ func (s *sources) DeleteSourceGitlab(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16275,7 +16290,7 @@ func (s *sources) DeleteSourceGitlab(ctx context.Context, request operations.Del
}
// DeleteSourceGlassfrog - Delete a Source
-func (s *sources) DeleteSourceGlassfrog(ctx context.Context, request operations.DeleteSourceGlassfrogRequest) (*operations.DeleteSourceGlassfrogResponse, error) {
+func (s *Sources) DeleteSourceGlassfrog(ctx context.Context, request operations.DeleteSourceGlassfrogRequest) (*operations.DeleteSourceGlassfrogResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Glassfrog", request, nil)
if err != nil {
@@ -16287,7 +16302,7 @@ func (s *sources) DeleteSourceGlassfrog(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16325,7 +16340,7 @@ func (s *sources) DeleteSourceGlassfrog(ctx context.Context, request operations.
}
// DeleteSourceGnews - Delete a Source
-func (s *sources) DeleteSourceGnews(ctx context.Context, request operations.DeleteSourceGnewsRequest) (*operations.DeleteSourceGnewsResponse, error) {
+func (s *Sources) DeleteSourceGnews(ctx context.Context, request operations.DeleteSourceGnewsRequest) (*operations.DeleteSourceGnewsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gnews", request, nil)
if err != nil {
@@ -16337,7 +16352,7 @@ func (s *sources) DeleteSourceGnews(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16375,7 +16390,7 @@ func (s *sources) DeleteSourceGnews(ctx context.Context, request operations.Dele
}
// DeleteSourceGoogleAds - Delete a Source
-func (s *sources) DeleteSourceGoogleAds(ctx context.Context, request operations.DeleteSourceGoogleAdsRequest) (*operations.DeleteSourceGoogleAdsResponse, error) {
+func (s *Sources) DeleteSourceGoogleAds(ctx context.Context, request operations.DeleteSourceGoogleAdsRequest) (*operations.DeleteSourceGoogleAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleAds", request, nil)
if err != nil {
@@ -16387,7 +16402,7 @@ func (s *sources) DeleteSourceGoogleAds(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16425,7 +16440,7 @@ func (s *sources) DeleteSourceGoogleAds(ctx context.Context, request operations.
}
// DeleteSourceGoogleAnalyticsDataAPI - Delete a Source
-func (s *sources) DeleteSourceGoogleAnalyticsDataAPI(ctx context.Context, request operations.DeleteSourceGoogleAnalyticsDataAPIRequest) (*operations.DeleteSourceGoogleAnalyticsDataAPIResponse, error) {
+func (s *Sources) DeleteSourceGoogleAnalyticsDataAPI(ctx context.Context, request operations.DeleteSourceGoogleAnalyticsDataAPIRequest) (*operations.DeleteSourceGoogleAnalyticsDataAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleAnalyticsDataApi", request, nil)
if err != nil {
@@ -16437,7 +16452,7 @@ func (s *sources) DeleteSourceGoogleAnalyticsDataAPI(ctx context.Context, reques
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16474,10 +16489,10 @@ func (s *sources) DeleteSourceGoogleAnalyticsDataAPI(ctx context.Context, reques
return res, nil
}
-// DeleteSourceGoogleAnalyticsV4 - Delete a Source
-func (s *sources) DeleteSourceGoogleAnalyticsV4(ctx context.Context, request operations.DeleteSourceGoogleAnalyticsV4Request) (*operations.DeleteSourceGoogleAnalyticsV4Response, error) {
+// DeleteSourceGoogleDirectory - Delete a Source
+func (s *Sources) DeleteSourceGoogleDirectory(ctx context.Context, request operations.DeleteSourceGoogleDirectoryRequest) (*operations.DeleteSourceGoogleDirectoryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleAnalyticsV4", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleDirectory", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -16487,7 +16502,7 @@ func (s *sources) DeleteSourceGoogleAnalyticsV4(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16508,7 +16523,7 @@ func (s *sources) DeleteSourceGoogleAnalyticsV4(ctx context.Context, request ope
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceGoogleAnalyticsV4Response{
+ res := &operations.DeleteSourceGoogleDirectoryResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -16524,10 +16539,10 @@ func (s *sources) DeleteSourceGoogleAnalyticsV4(ctx context.Context, request ope
return res, nil
}
-// DeleteSourceGoogleDirectory - Delete a Source
-func (s *sources) DeleteSourceGoogleDirectory(ctx context.Context, request operations.DeleteSourceGoogleDirectoryRequest) (*operations.DeleteSourceGoogleDirectoryResponse, error) {
+// DeleteSourceGoogleDrive - Delete a Source
+func (s *Sources) DeleteSourceGoogleDrive(ctx context.Context, request operations.DeleteSourceGoogleDriveRequest) (*operations.DeleteSourceGoogleDriveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleDirectory", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleDrive", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -16537,7 +16552,7 @@ func (s *sources) DeleteSourceGoogleDirectory(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16558,7 +16573,7 @@ func (s *sources) DeleteSourceGoogleDirectory(ctx context.Context, request opera
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceGoogleDirectoryResponse{
+ res := &operations.DeleteSourceGoogleDriveResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -16575,7 +16590,7 @@ func (s *sources) DeleteSourceGoogleDirectory(ctx context.Context, request opera
}
// DeleteSourceGooglePagespeedInsights - Delete a Source
-func (s *sources) DeleteSourceGooglePagespeedInsights(ctx context.Context, request operations.DeleteSourceGooglePagespeedInsightsRequest) (*operations.DeleteSourceGooglePagespeedInsightsResponse, error) {
+func (s *Sources) DeleteSourceGooglePagespeedInsights(ctx context.Context, request operations.DeleteSourceGooglePagespeedInsightsRequest) (*operations.DeleteSourceGooglePagespeedInsightsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GooglePagespeedInsights", request, nil)
if err != nil {
@@ -16587,7 +16602,7 @@ func (s *sources) DeleteSourceGooglePagespeedInsights(ctx context.Context, reque
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16625,7 +16640,7 @@ func (s *sources) DeleteSourceGooglePagespeedInsights(ctx context.Context, reque
}
// DeleteSourceGoogleSearchConsole - Delete a Source
-func (s *sources) DeleteSourceGoogleSearchConsole(ctx context.Context, request operations.DeleteSourceGoogleSearchConsoleRequest) (*operations.DeleteSourceGoogleSearchConsoleResponse, error) {
+func (s *Sources) DeleteSourceGoogleSearchConsole(ctx context.Context, request operations.DeleteSourceGoogleSearchConsoleRequest) (*operations.DeleteSourceGoogleSearchConsoleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleSearchConsole", request, nil)
if err != nil {
@@ -16637,7 +16652,7 @@ func (s *sources) DeleteSourceGoogleSearchConsole(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16675,7 +16690,7 @@ func (s *sources) DeleteSourceGoogleSearchConsole(ctx context.Context, request o
}
// DeleteSourceGoogleSheets - Delete a Source
-func (s *sources) DeleteSourceGoogleSheets(ctx context.Context, request operations.DeleteSourceGoogleSheetsRequest) (*operations.DeleteSourceGoogleSheetsResponse, error) {
+func (s *Sources) DeleteSourceGoogleSheets(ctx context.Context, request operations.DeleteSourceGoogleSheetsRequest) (*operations.DeleteSourceGoogleSheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleSheets", request, nil)
if err != nil {
@@ -16687,7 +16702,7 @@ func (s *sources) DeleteSourceGoogleSheets(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16725,7 +16740,7 @@ func (s *sources) DeleteSourceGoogleSheets(ctx context.Context, request operatio
}
// DeleteSourceGoogleWebfonts - Delete a Source
-func (s *sources) DeleteSourceGoogleWebfonts(ctx context.Context, request operations.DeleteSourceGoogleWebfontsRequest) (*operations.DeleteSourceGoogleWebfontsResponse, error) {
+func (s *Sources) DeleteSourceGoogleWebfonts(ctx context.Context, request operations.DeleteSourceGoogleWebfontsRequest) (*operations.DeleteSourceGoogleWebfontsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleWebfonts", request, nil)
if err != nil {
@@ -16737,7 +16752,7 @@ func (s *sources) DeleteSourceGoogleWebfonts(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16775,7 +16790,7 @@ func (s *sources) DeleteSourceGoogleWebfonts(ctx context.Context, request operat
}
// DeleteSourceGoogleWorkspaceAdminReports - Delete a Source
-func (s *sources) DeleteSourceGoogleWorkspaceAdminReports(ctx context.Context, request operations.DeleteSourceGoogleWorkspaceAdminReportsRequest) (*operations.DeleteSourceGoogleWorkspaceAdminReportsResponse, error) {
+func (s *Sources) DeleteSourceGoogleWorkspaceAdminReports(ctx context.Context, request operations.DeleteSourceGoogleWorkspaceAdminReportsRequest) (*operations.DeleteSourceGoogleWorkspaceAdminReportsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleWorkspaceAdminReports", request, nil)
if err != nil {
@@ -16787,7 +16802,7 @@ func (s *sources) DeleteSourceGoogleWorkspaceAdminReports(ctx context.Context, r
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16825,7 +16840,7 @@ func (s *sources) DeleteSourceGoogleWorkspaceAdminReports(ctx context.Context, r
}
// DeleteSourceGreenhouse - Delete a Source
-func (s *sources) DeleteSourceGreenhouse(ctx context.Context, request operations.DeleteSourceGreenhouseRequest) (*operations.DeleteSourceGreenhouseResponse, error) {
+func (s *Sources) DeleteSourceGreenhouse(ctx context.Context, request operations.DeleteSourceGreenhouseRequest) (*operations.DeleteSourceGreenhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Greenhouse", request, nil)
if err != nil {
@@ -16837,7 +16852,7 @@ func (s *sources) DeleteSourceGreenhouse(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16875,7 +16890,7 @@ func (s *sources) DeleteSourceGreenhouse(ctx context.Context, request operations
}
// DeleteSourceGridly - Delete a Source
-func (s *sources) DeleteSourceGridly(ctx context.Context, request operations.DeleteSourceGridlyRequest) (*operations.DeleteSourceGridlyResponse, error) {
+func (s *Sources) DeleteSourceGridly(ctx context.Context, request operations.DeleteSourceGridlyRequest) (*operations.DeleteSourceGridlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gridly", request, nil)
if err != nil {
@@ -16887,7 +16902,7 @@ func (s *sources) DeleteSourceGridly(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16925,7 +16940,7 @@ func (s *sources) DeleteSourceGridly(ctx context.Context, request operations.Del
}
// DeleteSourceHarvest - Delete a Source
-func (s *sources) DeleteSourceHarvest(ctx context.Context, request operations.DeleteSourceHarvestRequest) (*operations.DeleteSourceHarvestResponse, error) {
+func (s *Sources) DeleteSourceHarvest(ctx context.Context, request operations.DeleteSourceHarvestRequest) (*operations.DeleteSourceHarvestResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Harvest", request, nil)
if err != nil {
@@ -16937,7 +16952,7 @@ func (s *sources) DeleteSourceHarvest(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -16975,7 +16990,7 @@ func (s *sources) DeleteSourceHarvest(ctx context.Context, request operations.De
}
// DeleteSourceHubplanner - Delete a Source
-func (s *sources) DeleteSourceHubplanner(ctx context.Context, request operations.DeleteSourceHubplannerRequest) (*operations.DeleteSourceHubplannerResponse, error) {
+func (s *Sources) DeleteSourceHubplanner(ctx context.Context, request operations.DeleteSourceHubplannerRequest) (*operations.DeleteSourceHubplannerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Hubplanner", request, nil)
if err != nil {
@@ -16987,7 +17002,7 @@ func (s *sources) DeleteSourceHubplanner(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17025,7 +17040,7 @@ func (s *sources) DeleteSourceHubplanner(ctx context.Context, request operations
}
// DeleteSourceHubspot - Delete a Source
-func (s *sources) DeleteSourceHubspot(ctx context.Context, request operations.DeleteSourceHubspotRequest) (*operations.DeleteSourceHubspotResponse, error) {
+func (s *Sources) DeleteSourceHubspot(ctx context.Context, request operations.DeleteSourceHubspotRequest) (*operations.DeleteSourceHubspotResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Hubspot", request, nil)
if err != nil {
@@ -17037,7 +17052,7 @@ func (s *sources) DeleteSourceHubspot(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17075,7 +17090,7 @@ func (s *sources) DeleteSourceHubspot(ctx context.Context, request operations.De
}
// DeleteSourceInsightly - Delete a Source
-func (s *sources) DeleteSourceInsightly(ctx context.Context, request operations.DeleteSourceInsightlyRequest) (*operations.DeleteSourceInsightlyResponse, error) {
+func (s *Sources) DeleteSourceInsightly(ctx context.Context, request operations.DeleteSourceInsightlyRequest) (*operations.DeleteSourceInsightlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Insightly", request, nil)
if err != nil {
@@ -17087,7 +17102,7 @@ func (s *sources) DeleteSourceInsightly(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17125,7 +17140,7 @@ func (s *sources) DeleteSourceInsightly(ctx context.Context, request operations.
}
// DeleteSourceInstagram - Delete a Source
-func (s *sources) DeleteSourceInstagram(ctx context.Context, request operations.DeleteSourceInstagramRequest) (*operations.DeleteSourceInstagramResponse, error) {
+func (s *Sources) DeleteSourceInstagram(ctx context.Context, request operations.DeleteSourceInstagramRequest) (*operations.DeleteSourceInstagramResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Instagram", request, nil)
if err != nil {
@@ -17137,7 +17152,7 @@ func (s *sources) DeleteSourceInstagram(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17175,7 +17190,7 @@ func (s *sources) DeleteSourceInstagram(ctx context.Context, request operations.
}
// DeleteSourceInstatus - Delete a Source
-func (s *sources) DeleteSourceInstatus(ctx context.Context, request operations.DeleteSourceInstatusRequest) (*operations.DeleteSourceInstatusResponse, error) {
+func (s *Sources) DeleteSourceInstatus(ctx context.Context, request operations.DeleteSourceInstatusRequest) (*operations.DeleteSourceInstatusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Instatus", request, nil)
if err != nil {
@@ -17187,7 +17202,7 @@ func (s *sources) DeleteSourceInstatus(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17225,7 +17240,7 @@ func (s *sources) DeleteSourceInstatus(ctx context.Context, request operations.D
}
// DeleteSourceIntercom - Delete a Source
-func (s *sources) DeleteSourceIntercom(ctx context.Context, request operations.DeleteSourceIntercomRequest) (*operations.DeleteSourceIntercomResponse, error) {
+func (s *Sources) DeleteSourceIntercom(ctx context.Context, request operations.DeleteSourceIntercomRequest) (*operations.DeleteSourceIntercomResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Intercom", request, nil)
if err != nil {
@@ -17237,7 +17252,7 @@ func (s *sources) DeleteSourceIntercom(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17275,7 +17290,7 @@ func (s *sources) DeleteSourceIntercom(ctx context.Context, request operations.D
}
// DeleteSourceIp2whois - Delete a Source
-func (s *sources) DeleteSourceIp2whois(ctx context.Context, request operations.DeleteSourceIp2whoisRequest) (*operations.DeleteSourceIp2whoisResponse, error) {
+func (s *Sources) DeleteSourceIp2whois(ctx context.Context, request operations.DeleteSourceIp2whoisRequest) (*operations.DeleteSourceIp2whoisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Ip2whois", request, nil)
if err != nil {
@@ -17287,7 +17302,7 @@ func (s *sources) DeleteSourceIp2whois(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17325,7 +17340,7 @@ func (s *sources) DeleteSourceIp2whois(ctx context.Context, request operations.D
}
// DeleteSourceIterable - Delete a Source
-func (s *sources) DeleteSourceIterable(ctx context.Context, request operations.DeleteSourceIterableRequest) (*operations.DeleteSourceIterableResponse, error) {
+func (s *Sources) DeleteSourceIterable(ctx context.Context, request operations.DeleteSourceIterableRequest) (*operations.DeleteSourceIterableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Iterable", request, nil)
if err != nil {
@@ -17337,7 +17352,7 @@ func (s *sources) DeleteSourceIterable(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17375,7 +17390,7 @@ func (s *sources) DeleteSourceIterable(ctx context.Context, request operations.D
}
// DeleteSourceJira - Delete a Source
-func (s *sources) DeleteSourceJira(ctx context.Context, request operations.DeleteSourceJiraRequest) (*operations.DeleteSourceJiraResponse, error) {
+func (s *Sources) DeleteSourceJira(ctx context.Context, request operations.DeleteSourceJiraRequest) (*operations.DeleteSourceJiraResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Jira", request, nil)
if err != nil {
@@ -17387,7 +17402,7 @@ func (s *sources) DeleteSourceJira(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17425,7 +17440,7 @@ func (s *sources) DeleteSourceJira(ctx context.Context, request operations.Delet
}
// DeleteSourceK6Cloud - Delete a Source
-func (s *sources) DeleteSourceK6Cloud(ctx context.Context, request operations.DeleteSourceK6CloudRequest) (*operations.DeleteSourceK6CloudResponse, error) {
+func (s *Sources) DeleteSourceK6Cloud(ctx context.Context, request operations.DeleteSourceK6CloudRequest) (*operations.DeleteSourceK6CloudResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#K6Cloud", request, nil)
if err != nil {
@@ -17437,7 +17452,7 @@ func (s *sources) DeleteSourceK6Cloud(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17475,7 +17490,7 @@ func (s *sources) DeleteSourceK6Cloud(ctx context.Context, request operations.De
}
// DeleteSourceKlarna - Delete a Source
-func (s *sources) DeleteSourceKlarna(ctx context.Context, request operations.DeleteSourceKlarnaRequest) (*operations.DeleteSourceKlarnaResponse, error) {
+func (s *Sources) DeleteSourceKlarna(ctx context.Context, request operations.DeleteSourceKlarnaRequest) (*operations.DeleteSourceKlarnaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Klarna", request, nil)
if err != nil {
@@ -17487,7 +17502,7 @@ func (s *sources) DeleteSourceKlarna(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17525,7 +17540,7 @@ func (s *sources) DeleteSourceKlarna(ctx context.Context, request operations.Del
}
// DeleteSourceKlaviyo - Delete a Source
-func (s *sources) DeleteSourceKlaviyo(ctx context.Context, request operations.DeleteSourceKlaviyoRequest) (*operations.DeleteSourceKlaviyoResponse, error) {
+func (s *Sources) DeleteSourceKlaviyo(ctx context.Context, request operations.DeleteSourceKlaviyoRequest) (*operations.DeleteSourceKlaviyoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Klaviyo", request, nil)
if err != nil {
@@ -17537,7 +17552,7 @@ func (s *sources) DeleteSourceKlaviyo(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17575,7 +17590,7 @@ func (s *sources) DeleteSourceKlaviyo(ctx context.Context, request operations.De
}
// DeleteSourceKustomerSinger - Delete a Source
-func (s *sources) DeleteSourceKustomerSinger(ctx context.Context, request operations.DeleteSourceKustomerSingerRequest) (*operations.DeleteSourceKustomerSingerResponse, error) {
+func (s *Sources) DeleteSourceKustomerSinger(ctx context.Context, request operations.DeleteSourceKustomerSingerRequest) (*operations.DeleteSourceKustomerSingerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#KustomerSinger", request, nil)
if err != nil {
@@ -17587,7 +17602,7 @@ func (s *sources) DeleteSourceKustomerSinger(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17625,7 +17640,7 @@ func (s *sources) DeleteSourceKustomerSinger(ctx context.Context, request operat
}
// DeleteSourceKyve - Delete a Source
-func (s *sources) DeleteSourceKyve(ctx context.Context, request operations.DeleteSourceKyveRequest) (*operations.DeleteSourceKyveResponse, error) {
+func (s *Sources) DeleteSourceKyve(ctx context.Context, request operations.DeleteSourceKyveRequest) (*operations.DeleteSourceKyveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Kyve", request, nil)
if err != nil {
@@ -17637,7 +17652,7 @@ func (s *sources) DeleteSourceKyve(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17675,7 +17690,7 @@ func (s *sources) DeleteSourceKyve(ctx context.Context, request operations.Delet
}
// DeleteSourceLaunchdarkly - Delete a Source
-func (s *sources) DeleteSourceLaunchdarkly(ctx context.Context, request operations.DeleteSourceLaunchdarklyRequest) (*operations.DeleteSourceLaunchdarklyResponse, error) {
+func (s *Sources) DeleteSourceLaunchdarkly(ctx context.Context, request operations.DeleteSourceLaunchdarklyRequest) (*operations.DeleteSourceLaunchdarklyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Launchdarkly", request, nil)
if err != nil {
@@ -17687,7 +17702,7 @@ func (s *sources) DeleteSourceLaunchdarkly(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17725,7 +17740,7 @@ func (s *sources) DeleteSourceLaunchdarkly(ctx context.Context, request operatio
}
// DeleteSourceLemlist - Delete a Source
-func (s *sources) DeleteSourceLemlist(ctx context.Context, request operations.DeleteSourceLemlistRequest) (*operations.DeleteSourceLemlistResponse, error) {
+func (s *Sources) DeleteSourceLemlist(ctx context.Context, request operations.DeleteSourceLemlistRequest) (*operations.DeleteSourceLemlistResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Lemlist", request, nil)
if err != nil {
@@ -17737,7 +17752,7 @@ func (s *sources) DeleteSourceLemlist(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17775,7 +17790,7 @@ func (s *sources) DeleteSourceLemlist(ctx context.Context, request operations.De
}
// DeleteSourceLeverHiring - Delete a Source
-func (s *sources) DeleteSourceLeverHiring(ctx context.Context, request operations.DeleteSourceLeverHiringRequest) (*operations.DeleteSourceLeverHiringResponse, error) {
+func (s *Sources) DeleteSourceLeverHiring(ctx context.Context, request operations.DeleteSourceLeverHiringRequest) (*operations.DeleteSourceLeverHiringResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#LeverHiring", request, nil)
if err != nil {
@@ -17787,7 +17802,7 @@ func (s *sources) DeleteSourceLeverHiring(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17825,7 +17840,7 @@ func (s *sources) DeleteSourceLeverHiring(ctx context.Context, request operation
}
// DeleteSourceLinkedinAds - Delete a Source
-func (s *sources) DeleteSourceLinkedinAds(ctx context.Context, request operations.DeleteSourceLinkedinAdsRequest) (*operations.DeleteSourceLinkedinAdsResponse, error) {
+func (s *Sources) DeleteSourceLinkedinAds(ctx context.Context, request operations.DeleteSourceLinkedinAdsRequest) (*operations.DeleteSourceLinkedinAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#LinkedinAds", request, nil)
if err != nil {
@@ -17837,7 +17852,7 @@ func (s *sources) DeleteSourceLinkedinAds(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17875,7 +17890,7 @@ func (s *sources) DeleteSourceLinkedinAds(ctx context.Context, request operation
}
// DeleteSourceLinkedinPages - Delete a Source
-func (s *sources) DeleteSourceLinkedinPages(ctx context.Context, request operations.DeleteSourceLinkedinPagesRequest) (*operations.DeleteSourceLinkedinPagesResponse, error) {
+func (s *Sources) DeleteSourceLinkedinPages(ctx context.Context, request operations.DeleteSourceLinkedinPagesRequest) (*operations.DeleteSourceLinkedinPagesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#LinkedinPages", request, nil)
if err != nil {
@@ -17887,7 +17902,7 @@ func (s *sources) DeleteSourceLinkedinPages(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17925,7 +17940,7 @@ func (s *sources) DeleteSourceLinkedinPages(ctx context.Context, request operati
}
// DeleteSourceLinnworks - Delete a Source
-func (s *sources) DeleteSourceLinnworks(ctx context.Context, request operations.DeleteSourceLinnworksRequest) (*operations.DeleteSourceLinnworksResponse, error) {
+func (s *Sources) DeleteSourceLinnworks(ctx context.Context, request operations.DeleteSourceLinnworksRequest) (*operations.DeleteSourceLinnworksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Linnworks", request, nil)
if err != nil {
@@ -17937,7 +17952,7 @@ func (s *sources) DeleteSourceLinnworks(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -17975,7 +17990,7 @@ func (s *sources) DeleteSourceLinnworks(ctx context.Context, request operations.
}
// DeleteSourceLokalise - Delete a Source
-func (s *sources) DeleteSourceLokalise(ctx context.Context, request operations.DeleteSourceLokaliseRequest) (*operations.DeleteSourceLokaliseResponse, error) {
+func (s *Sources) DeleteSourceLokalise(ctx context.Context, request operations.DeleteSourceLokaliseRequest) (*operations.DeleteSourceLokaliseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Lokalise", request, nil)
if err != nil {
@@ -17987,7 +18002,7 @@ func (s *sources) DeleteSourceLokalise(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18025,7 +18040,7 @@ func (s *sources) DeleteSourceLokalise(ctx context.Context, request operations.D
}
// DeleteSourceMailchimp - Delete a Source
-func (s *sources) DeleteSourceMailchimp(ctx context.Context, request operations.DeleteSourceMailchimpRequest) (*operations.DeleteSourceMailchimpResponse, error) {
+func (s *Sources) DeleteSourceMailchimp(ctx context.Context, request operations.DeleteSourceMailchimpRequest) (*operations.DeleteSourceMailchimpResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mailchimp", request, nil)
if err != nil {
@@ -18037,7 +18052,7 @@ func (s *sources) DeleteSourceMailchimp(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18075,7 +18090,7 @@ func (s *sources) DeleteSourceMailchimp(ctx context.Context, request operations.
}
// DeleteSourceMailgun - Delete a Source
-func (s *sources) DeleteSourceMailgun(ctx context.Context, request operations.DeleteSourceMailgunRequest) (*operations.DeleteSourceMailgunResponse, error) {
+func (s *Sources) DeleteSourceMailgun(ctx context.Context, request operations.DeleteSourceMailgunRequest) (*operations.DeleteSourceMailgunResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mailgun", request, nil)
if err != nil {
@@ -18087,7 +18102,7 @@ func (s *sources) DeleteSourceMailgun(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18125,7 +18140,7 @@ func (s *sources) DeleteSourceMailgun(ctx context.Context, request operations.De
}
// DeleteSourceMailjetSms - Delete a Source
-func (s *sources) DeleteSourceMailjetSms(ctx context.Context, request operations.DeleteSourceMailjetSmsRequest) (*operations.DeleteSourceMailjetSmsResponse, error) {
+func (s *Sources) DeleteSourceMailjetSms(ctx context.Context, request operations.DeleteSourceMailjetSmsRequest) (*operations.DeleteSourceMailjetSmsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MailjetSms", request, nil)
if err != nil {
@@ -18137,7 +18152,7 @@ func (s *sources) DeleteSourceMailjetSms(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18175,7 +18190,7 @@ func (s *sources) DeleteSourceMailjetSms(ctx context.Context, request operations
}
// DeleteSourceMarketo - Delete a Source
-func (s *sources) DeleteSourceMarketo(ctx context.Context, request operations.DeleteSourceMarketoRequest) (*operations.DeleteSourceMarketoResponse, error) {
+func (s *Sources) DeleteSourceMarketo(ctx context.Context, request operations.DeleteSourceMarketoRequest) (*operations.DeleteSourceMarketoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Marketo", request, nil)
if err != nil {
@@ -18187,7 +18202,7 @@ func (s *sources) DeleteSourceMarketo(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18225,7 +18240,7 @@ func (s *sources) DeleteSourceMarketo(ctx context.Context, request operations.De
}
// DeleteSourceMetabase - Delete a Source
-func (s *sources) DeleteSourceMetabase(ctx context.Context, request operations.DeleteSourceMetabaseRequest) (*operations.DeleteSourceMetabaseResponse, error) {
+func (s *Sources) DeleteSourceMetabase(ctx context.Context, request operations.DeleteSourceMetabaseRequest) (*operations.DeleteSourceMetabaseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Metabase", request, nil)
if err != nil {
@@ -18237,7 +18252,7 @@ func (s *sources) DeleteSourceMetabase(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18275,7 +18290,7 @@ func (s *sources) DeleteSourceMetabase(ctx context.Context, request operations.D
}
// DeleteSourceMicrosoftTeams - Delete a Source
-func (s *sources) DeleteSourceMicrosoftTeams(ctx context.Context, request operations.DeleteSourceMicrosoftTeamsRequest) (*operations.DeleteSourceMicrosoftTeamsResponse, error) {
+func (s *Sources) DeleteSourceMicrosoftTeams(ctx context.Context, request operations.DeleteSourceMicrosoftTeamsRequest) (*operations.DeleteSourceMicrosoftTeamsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MicrosoftTeams", request, nil)
if err != nil {
@@ -18287,7 +18302,7 @@ func (s *sources) DeleteSourceMicrosoftTeams(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18325,7 +18340,7 @@ func (s *sources) DeleteSourceMicrosoftTeams(ctx context.Context, request operat
}
// DeleteSourceMixpanel - Delete a Source
-func (s *sources) DeleteSourceMixpanel(ctx context.Context, request operations.DeleteSourceMixpanelRequest) (*operations.DeleteSourceMixpanelResponse, error) {
+func (s *Sources) DeleteSourceMixpanel(ctx context.Context, request operations.DeleteSourceMixpanelRequest) (*operations.DeleteSourceMixpanelResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mixpanel", request, nil)
if err != nil {
@@ -18337,7 +18352,7 @@ func (s *sources) DeleteSourceMixpanel(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18375,7 +18390,7 @@ func (s *sources) DeleteSourceMixpanel(ctx context.Context, request operations.D
}
// DeleteSourceMonday - Delete a Source
-func (s *sources) DeleteSourceMonday(ctx context.Context, request operations.DeleteSourceMondayRequest) (*operations.DeleteSourceMondayResponse, error) {
+func (s *Sources) DeleteSourceMonday(ctx context.Context, request operations.DeleteSourceMondayRequest) (*operations.DeleteSourceMondayResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Monday", request, nil)
if err != nil {
@@ -18387,7 +18402,7 @@ func (s *sources) DeleteSourceMonday(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18424,10 +18439,10 @@ func (s *sources) DeleteSourceMonday(ctx context.Context, request operations.Del
return res, nil
}
-// DeleteSourceMongodb - Delete a Source
-func (s *sources) DeleteSourceMongodb(ctx context.Context, request operations.DeleteSourceMongodbRequest) (*operations.DeleteSourceMongodbResponse, error) {
+// DeleteSourceMongodbInternalPoc - Delete a Source
+func (s *Sources) DeleteSourceMongodbInternalPoc(ctx context.Context, request operations.DeleteSourceMongodbInternalPocRequest) (*operations.DeleteSourceMongodbInternalPocResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mongodb", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MongodbInternalPoc", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -18437,7 +18452,7 @@ func (s *sources) DeleteSourceMongodb(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18458,7 +18473,7 @@ func (s *sources) DeleteSourceMongodb(ctx context.Context, request operations.De
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceMongodbResponse{
+ res := &operations.DeleteSourceMongodbInternalPocResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -18474,10 +18489,10 @@ func (s *sources) DeleteSourceMongodb(ctx context.Context, request operations.De
return res, nil
}
-// DeleteSourceMongodbInternalPoc - Delete a Source
-func (s *sources) DeleteSourceMongodbInternalPoc(ctx context.Context, request operations.DeleteSourceMongodbInternalPocRequest) (*operations.DeleteSourceMongodbInternalPocResponse, error) {
+// DeleteSourceMongodbV2 - Delete a Source
+func (s *Sources) DeleteSourceMongodbV2(ctx context.Context, request operations.DeleteSourceMongodbV2Request) (*operations.DeleteSourceMongodbV2Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MongodbInternalPoc", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MongodbV2", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -18487,7 +18502,7 @@ func (s *sources) DeleteSourceMongodbInternalPoc(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18508,7 +18523,7 @@ func (s *sources) DeleteSourceMongodbInternalPoc(ctx context.Context, request op
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceMongodbInternalPocResponse{
+ res := &operations.DeleteSourceMongodbV2Response{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -18525,7 +18540,7 @@ func (s *sources) DeleteSourceMongodbInternalPoc(ctx context.Context, request op
}
// DeleteSourceMssql - Delete a Source
-func (s *sources) DeleteSourceMssql(ctx context.Context, request operations.DeleteSourceMssqlRequest) (*operations.DeleteSourceMssqlResponse, error) {
+func (s *Sources) DeleteSourceMssql(ctx context.Context, request operations.DeleteSourceMssqlRequest) (*operations.DeleteSourceMssqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mssql", request, nil)
if err != nil {
@@ -18537,7 +18552,7 @@ func (s *sources) DeleteSourceMssql(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18575,7 +18590,7 @@ func (s *sources) DeleteSourceMssql(ctx context.Context, request operations.Dele
}
// DeleteSourceMyHours - Delete a Source
-func (s *sources) DeleteSourceMyHours(ctx context.Context, request operations.DeleteSourceMyHoursRequest) (*operations.DeleteSourceMyHoursResponse, error) {
+func (s *Sources) DeleteSourceMyHours(ctx context.Context, request operations.DeleteSourceMyHoursRequest) (*operations.DeleteSourceMyHoursResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MyHours", request, nil)
if err != nil {
@@ -18587,7 +18602,7 @@ func (s *sources) DeleteSourceMyHours(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18625,7 +18640,7 @@ func (s *sources) DeleteSourceMyHours(ctx context.Context, request operations.De
}
// DeleteSourceMysql - Delete a Source
-func (s *sources) DeleteSourceMysql(ctx context.Context, request operations.DeleteSourceMysqlRequest) (*operations.DeleteSourceMysqlResponse, error) {
+func (s *Sources) DeleteSourceMysql(ctx context.Context, request operations.DeleteSourceMysqlRequest) (*operations.DeleteSourceMysqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mysql", request, nil)
if err != nil {
@@ -18637,7 +18652,7 @@ func (s *sources) DeleteSourceMysql(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18675,7 +18690,7 @@ func (s *sources) DeleteSourceMysql(ctx context.Context, request operations.Dele
}
// DeleteSourceNetsuite - Delete a Source
-func (s *sources) DeleteSourceNetsuite(ctx context.Context, request operations.DeleteSourceNetsuiteRequest) (*operations.DeleteSourceNetsuiteResponse, error) {
+func (s *Sources) DeleteSourceNetsuite(ctx context.Context, request operations.DeleteSourceNetsuiteRequest) (*operations.DeleteSourceNetsuiteResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Netsuite", request, nil)
if err != nil {
@@ -18687,7 +18702,7 @@ func (s *sources) DeleteSourceNetsuite(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18725,7 +18740,7 @@ func (s *sources) DeleteSourceNetsuite(ctx context.Context, request operations.D
}
// DeleteSourceNotion - Delete a Source
-func (s *sources) DeleteSourceNotion(ctx context.Context, request operations.DeleteSourceNotionRequest) (*operations.DeleteSourceNotionResponse, error) {
+func (s *Sources) DeleteSourceNotion(ctx context.Context, request operations.DeleteSourceNotionRequest) (*operations.DeleteSourceNotionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Notion", request, nil)
if err != nil {
@@ -18737,7 +18752,7 @@ func (s *sources) DeleteSourceNotion(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18775,7 +18790,7 @@ func (s *sources) DeleteSourceNotion(ctx context.Context, request operations.Del
}
// DeleteSourceNytimes - Delete a Source
-func (s *sources) DeleteSourceNytimes(ctx context.Context, request operations.DeleteSourceNytimesRequest) (*operations.DeleteSourceNytimesResponse, error) {
+func (s *Sources) DeleteSourceNytimes(ctx context.Context, request operations.DeleteSourceNytimesRequest) (*operations.DeleteSourceNytimesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Nytimes", request, nil)
if err != nil {
@@ -18787,7 +18802,7 @@ func (s *sources) DeleteSourceNytimes(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18825,7 +18840,7 @@ func (s *sources) DeleteSourceNytimes(ctx context.Context, request operations.De
}
// DeleteSourceOkta - Delete a Source
-func (s *sources) DeleteSourceOkta(ctx context.Context, request operations.DeleteSourceOktaRequest) (*operations.DeleteSourceOktaResponse, error) {
+func (s *Sources) DeleteSourceOkta(ctx context.Context, request operations.DeleteSourceOktaRequest) (*operations.DeleteSourceOktaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Okta", request, nil)
if err != nil {
@@ -18837,7 +18852,7 @@ func (s *sources) DeleteSourceOkta(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18875,7 +18890,7 @@ func (s *sources) DeleteSourceOkta(ctx context.Context, request operations.Delet
}
// DeleteSourceOmnisend - Delete a Source
-func (s *sources) DeleteSourceOmnisend(ctx context.Context, request operations.DeleteSourceOmnisendRequest) (*operations.DeleteSourceOmnisendResponse, error) {
+func (s *Sources) DeleteSourceOmnisend(ctx context.Context, request operations.DeleteSourceOmnisendRequest) (*operations.DeleteSourceOmnisendResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Omnisend", request, nil)
if err != nil {
@@ -18887,7 +18902,7 @@ func (s *sources) DeleteSourceOmnisend(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18925,7 +18940,7 @@ func (s *sources) DeleteSourceOmnisend(ctx context.Context, request operations.D
}
// DeleteSourceOnesignal - Delete a Source
-func (s *sources) DeleteSourceOnesignal(ctx context.Context, request operations.DeleteSourceOnesignalRequest) (*operations.DeleteSourceOnesignalResponse, error) {
+func (s *Sources) DeleteSourceOnesignal(ctx context.Context, request operations.DeleteSourceOnesignalRequest) (*operations.DeleteSourceOnesignalResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Onesignal", request, nil)
if err != nil {
@@ -18937,7 +18952,7 @@ func (s *sources) DeleteSourceOnesignal(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -18975,7 +18990,7 @@ func (s *sources) DeleteSourceOnesignal(ctx context.Context, request operations.
}
// DeleteSourceOracle - Delete a Source
-func (s *sources) DeleteSourceOracle(ctx context.Context, request operations.DeleteSourceOracleRequest) (*operations.DeleteSourceOracleResponse, error) {
+func (s *Sources) DeleteSourceOracle(ctx context.Context, request operations.DeleteSourceOracleRequest) (*operations.DeleteSourceOracleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Oracle", request, nil)
if err != nil {
@@ -18987,7 +19002,7 @@ func (s *sources) DeleteSourceOracle(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19025,7 +19040,7 @@ func (s *sources) DeleteSourceOracle(ctx context.Context, request operations.Del
}
// DeleteSourceOrb - Delete a Source
-func (s *sources) DeleteSourceOrb(ctx context.Context, request operations.DeleteSourceOrbRequest) (*operations.DeleteSourceOrbResponse, error) {
+func (s *Sources) DeleteSourceOrb(ctx context.Context, request operations.DeleteSourceOrbRequest) (*operations.DeleteSourceOrbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Orb", request, nil)
if err != nil {
@@ -19037,7 +19052,7 @@ func (s *sources) DeleteSourceOrb(ctx context.Context, request operations.Delete
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19075,7 +19090,7 @@ func (s *sources) DeleteSourceOrb(ctx context.Context, request operations.Delete
}
// DeleteSourceOrbit - Delete a Source
-func (s *sources) DeleteSourceOrbit(ctx context.Context, request operations.DeleteSourceOrbitRequest) (*operations.DeleteSourceOrbitResponse, error) {
+func (s *Sources) DeleteSourceOrbit(ctx context.Context, request operations.DeleteSourceOrbitRequest) (*operations.DeleteSourceOrbitResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Orbit", request, nil)
if err != nil {
@@ -19087,7 +19102,7 @@ func (s *sources) DeleteSourceOrbit(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19125,7 +19140,7 @@ func (s *sources) DeleteSourceOrbit(ctx context.Context, request operations.Dele
}
// DeleteSourceOutbrainAmplify - Delete a Source
-func (s *sources) DeleteSourceOutbrainAmplify(ctx context.Context, request operations.DeleteSourceOutbrainAmplifyRequest) (*operations.DeleteSourceOutbrainAmplifyResponse, error) {
+func (s *Sources) DeleteSourceOutbrainAmplify(ctx context.Context, request operations.DeleteSourceOutbrainAmplifyRequest) (*operations.DeleteSourceOutbrainAmplifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#OutbrainAmplify", request, nil)
if err != nil {
@@ -19137,7 +19152,7 @@ func (s *sources) DeleteSourceOutbrainAmplify(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19175,7 +19190,7 @@ func (s *sources) DeleteSourceOutbrainAmplify(ctx context.Context, request opera
}
// DeleteSourceOutreach - Delete a Source
-func (s *sources) DeleteSourceOutreach(ctx context.Context, request operations.DeleteSourceOutreachRequest) (*operations.DeleteSourceOutreachResponse, error) {
+func (s *Sources) DeleteSourceOutreach(ctx context.Context, request operations.DeleteSourceOutreachRequest) (*operations.DeleteSourceOutreachResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Outreach", request, nil)
if err != nil {
@@ -19187,7 +19202,7 @@ func (s *sources) DeleteSourceOutreach(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19225,7 +19240,7 @@ func (s *sources) DeleteSourceOutreach(ctx context.Context, request operations.D
}
// DeleteSourcePaypalTransaction - Delete a Source
-func (s *sources) DeleteSourcePaypalTransaction(ctx context.Context, request operations.DeleteSourcePaypalTransactionRequest) (*operations.DeleteSourcePaypalTransactionResponse, error) {
+func (s *Sources) DeleteSourcePaypalTransaction(ctx context.Context, request operations.DeleteSourcePaypalTransactionRequest) (*operations.DeleteSourcePaypalTransactionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PaypalTransaction", request, nil)
if err != nil {
@@ -19237,7 +19252,7 @@ func (s *sources) DeleteSourcePaypalTransaction(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19275,7 +19290,7 @@ func (s *sources) DeleteSourcePaypalTransaction(ctx context.Context, request ope
}
// DeleteSourcePaystack - Delete a Source
-func (s *sources) DeleteSourcePaystack(ctx context.Context, request operations.DeleteSourcePaystackRequest) (*operations.DeleteSourcePaystackResponse, error) {
+func (s *Sources) DeleteSourcePaystack(ctx context.Context, request operations.DeleteSourcePaystackRequest) (*operations.DeleteSourcePaystackResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Paystack", request, nil)
if err != nil {
@@ -19287,7 +19302,7 @@ func (s *sources) DeleteSourcePaystack(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19325,7 +19340,7 @@ func (s *sources) DeleteSourcePaystack(ctx context.Context, request operations.D
}
// DeleteSourcePendo - Delete a Source
-func (s *sources) DeleteSourcePendo(ctx context.Context, request operations.DeleteSourcePendoRequest) (*operations.DeleteSourcePendoResponse, error) {
+func (s *Sources) DeleteSourcePendo(ctx context.Context, request operations.DeleteSourcePendoRequest) (*operations.DeleteSourcePendoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pendo", request, nil)
if err != nil {
@@ -19337,7 +19352,7 @@ func (s *sources) DeleteSourcePendo(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19375,7 +19390,7 @@ func (s *sources) DeleteSourcePendo(ctx context.Context, request operations.Dele
}
// DeleteSourcePersistiq - Delete a Source
-func (s *sources) DeleteSourcePersistiq(ctx context.Context, request operations.DeleteSourcePersistiqRequest) (*operations.DeleteSourcePersistiqResponse, error) {
+func (s *Sources) DeleteSourcePersistiq(ctx context.Context, request operations.DeleteSourcePersistiqRequest) (*operations.DeleteSourcePersistiqResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Persistiq", request, nil)
if err != nil {
@@ -19387,7 +19402,7 @@ func (s *sources) DeleteSourcePersistiq(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19425,7 +19440,7 @@ func (s *sources) DeleteSourcePersistiq(ctx context.Context, request operations.
}
// DeleteSourcePexelsAPI - Delete a Source
-func (s *sources) DeleteSourcePexelsAPI(ctx context.Context, request operations.DeleteSourcePexelsAPIRequest) (*operations.DeleteSourcePexelsAPIResponse, error) {
+func (s *Sources) DeleteSourcePexelsAPI(ctx context.Context, request operations.DeleteSourcePexelsAPIRequest) (*operations.DeleteSourcePexelsAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PexelsApi", request, nil)
if err != nil {
@@ -19437,7 +19452,7 @@ func (s *sources) DeleteSourcePexelsAPI(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19475,7 +19490,7 @@ func (s *sources) DeleteSourcePexelsAPI(ctx context.Context, request operations.
}
// DeleteSourcePinterest - Delete a Source
-func (s *sources) DeleteSourcePinterest(ctx context.Context, request operations.DeleteSourcePinterestRequest) (*operations.DeleteSourcePinterestResponse, error) {
+func (s *Sources) DeleteSourcePinterest(ctx context.Context, request operations.DeleteSourcePinterestRequest) (*operations.DeleteSourcePinterestResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pinterest", request, nil)
if err != nil {
@@ -19487,7 +19502,7 @@ func (s *sources) DeleteSourcePinterest(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19525,7 +19540,7 @@ func (s *sources) DeleteSourcePinterest(ctx context.Context, request operations.
}
// DeleteSourcePipedrive - Delete a Source
-func (s *sources) DeleteSourcePipedrive(ctx context.Context, request operations.DeleteSourcePipedriveRequest) (*operations.DeleteSourcePipedriveResponse, error) {
+func (s *Sources) DeleteSourcePipedrive(ctx context.Context, request operations.DeleteSourcePipedriveRequest) (*operations.DeleteSourcePipedriveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pipedrive", request, nil)
if err != nil {
@@ -19537,7 +19552,7 @@ func (s *sources) DeleteSourcePipedrive(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19575,7 +19590,7 @@ func (s *sources) DeleteSourcePipedrive(ctx context.Context, request operations.
}
// DeleteSourcePocket - Delete a Source
-func (s *sources) DeleteSourcePocket(ctx context.Context, request operations.DeleteSourcePocketRequest) (*operations.DeleteSourcePocketResponse, error) {
+func (s *Sources) DeleteSourcePocket(ctx context.Context, request operations.DeleteSourcePocketRequest) (*operations.DeleteSourcePocketResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pocket", request, nil)
if err != nil {
@@ -19587,7 +19602,7 @@ func (s *sources) DeleteSourcePocket(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19625,7 +19640,7 @@ func (s *sources) DeleteSourcePocket(ctx context.Context, request operations.Del
}
// DeleteSourcePokeapi - Delete a Source
-func (s *sources) DeleteSourcePokeapi(ctx context.Context, request operations.DeleteSourcePokeapiRequest) (*operations.DeleteSourcePokeapiResponse, error) {
+func (s *Sources) DeleteSourcePokeapi(ctx context.Context, request operations.DeleteSourcePokeapiRequest) (*operations.DeleteSourcePokeapiResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pokeapi", request, nil)
if err != nil {
@@ -19637,7 +19652,7 @@ func (s *sources) DeleteSourcePokeapi(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19675,7 +19690,7 @@ func (s *sources) DeleteSourcePokeapi(ctx context.Context, request operations.De
}
// DeleteSourcePolygonStockAPI - Delete a Source
-func (s *sources) DeleteSourcePolygonStockAPI(ctx context.Context, request operations.DeleteSourcePolygonStockAPIRequest) (*operations.DeleteSourcePolygonStockAPIResponse, error) {
+func (s *Sources) DeleteSourcePolygonStockAPI(ctx context.Context, request operations.DeleteSourcePolygonStockAPIRequest) (*operations.DeleteSourcePolygonStockAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PolygonStockApi", request, nil)
if err != nil {
@@ -19687,7 +19702,7 @@ func (s *sources) DeleteSourcePolygonStockAPI(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19725,7 +19740,7 @@ func (s *sources) DeleteSourcePolygonStockAPI(ctx context.Context, request opera
}
// DeleteSourcePostgres - Delete a Source
-func (s *sources) DeleteSourcePostgres(ctx context.Context, request operations.DeleteSourcePostgresRequest) (*operations.DeleteSourcePostgresResponse, error) {
+func (s *Sources) DeleteSourcePostgres(ctx context.Context, request operations.DeleteSourcePostgresRequest) (*operations.DeleteSourcePostgresResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Postgres", request, nil)
if err != nil {
@@ -19737,7 +19752,7 @@ func (s *sources) DeleteSourcePostgres(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19775,7 +19790,7 @@ func (s *sources) DeleteSourcePostgres(ctx context.Context, request operations.D
}
// DeleteSourcePosthog - Delete a Source
-func (s *sources) DeleteSourcePosthog(ctx context.Context, request operations.DeleteSourcePosthogRequest) (*operations.DeleteSourcePosthogResponse, error) {
+func (s *Sources) DeleteSourcePosthog(ctx context.Context, request operations.DeleteSourcePosthogRequest) (*operations.DeleteSourcePosthogResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Posthog", request, nil)
if err != nil {
@@ -19787,7 +19802,7 @@ func (s *sources) DeleteSourcePosthog(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19825,7 +19840,7 @@ func (s *sources) DeleteSourcePosthog(ctx context.Context, request operations.De
}
// DeleteSourcePostmarkapp - Delete a Source
-func (s *sources) DeleteSourcePostmarkapp(ctx context.Context, request operations.DeleteSourcePostmarkappRequest) (*operations.DeleteSourcePostmarkappResponse, error) {
+func (s *Sources) DeleteSourcePostmarkapp(ctx context.Context, request operations.DeleteSourcePostmarkappRequest) (*operations.DeleteSourcePostmarkappResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Postmarkapp", request, nil)
if err != nil {
@@ -19837,7 +19852,7 @@ func (s *sources) DeleteSourcePostmarkapp(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19875,7 +19890,7 @@ func (s *sources) DeleteSourcePostmarkapp(ctx context.Context, request operation
}
// DeleteSourcePrestashop - Delete a Source
-func (s *sources) DeleteSourcePrestashop(ctx context.Context, request operations.DeleteSourcePrestashopRequest) (*operations.DeleteSourcePrestashopResponse, error) {
+func (s *Sources) DeleteSourcePrestashop(ctx context.Context, request operations.DeleteSourcePrestashopRequest) (*operations.DeleteSourcePrestashopResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Prestashop", request, nil)
if err != nil {
@@ -19887,7 +19902,7 @@ func (s *sources) DeleteSourcePrestashop(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19925,7 +19940,7 @@ func (s *sources) DeleteSourcePrestashop(ctx context.Context, request operations
}
// DeleteSourcePunkAPI - Delete a Source
-func (s *sources) DeleteSourcePunkAPI(ctx context.Context, request operations.DeleteSourcePunkAPIRequest) (*operations.DeleteSourcePunkAPIResponse, error) {
+func (s *Sources) DeleteSourcePunkAPI(ctx context.Context, request operations.DeleteSourcePunkAPIRequest) (*operations.DeleteSourcePunkAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PunkApi", request, nil)
if err != nil {
@@ -19937,7 +19952,7 @@ func (s *sources) DeleteSourcePunkAPI(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -19975,7 +19990,7 @@ func (s *sources) DeleteSourcePunkAPI(ctx context.Context, request operations.De
}
// DeleteSourcePypi - Delete a Source
-func (s *sources) DeleteSourcePypi(ctx context.Context, request operations.DeleteSourcePypiRequest) (*operations.DeleteSourcePypiResponse, error) {
+func (s *Sources) DeleteSourcePypi(ctx context.Context, request operations.DeleteSourcePypiRequest) (*operations.DeleteSourcePypiResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pypi", request, nil)
if err != nil {
@@ -19987,7 +20002,7 @@ func (s *sources) DeleteSourcePypi(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20025,7 +20040,7 @@ func (s *sources) DeleteSourcePypi(ctx context.Context, request operations.Delet
}
// DeleteSourceQualaroo - Delete a Source
-func (s *sources) DeleteSourceQualaroo(ctx context.Context, request operations.DeleteSourceQualarooRequest) (*operations.DeleteSourceQualarooResponse, error) {
+func (s *Sources) DeleteSourceQualaroo(ctx context.Context, request operations.DeleteSourceQualarooRequest) (*operations.DeleteSourceQualarooResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Qualaroo", request, nil)
if err != nil {
@@ -20037,7 +20052,7 @@ func (s *sources) DeleteSourceQualaroo(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20075,7 +20090,7 @@ func (s *sources) DeleteSourceQualaroo(ctx context.Context, request operations.D
}
// DeleteSourceQuickbooks - Delete a Source
-func (s *sources) DeleteSourceQuickbooks(ctx context.Context, request operations.DeleteSourceQuickbooksRequest) (*operations.DeleteSourceQuickbooksResponse, error) {
+func (s *Sources) DeleteSourceQuickbooks(ctx context.Context, request operations.DeleteSourceQuickbooksRequest) (*operations.DeleteSourceQuickbooksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Quickbooks", request, nil)
if err != nil {
@@ -20087,7 +20102,7 @@ func (s *sources) DeleteSourceQuickbooks(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20125,7 +20140,7 @@ func (s *sources) DeleteSourceQuickbooks(ctx context.Context, request operations
}
// DeleteSourceRailz - Delete a Source
-func (s *sources) DeleteSourceRailz(ctx context.Context, request operations.DeleteSourceRailzRequest) (*operations.DeleteSourceRailzResponse, error) {
+func (s *Sources) DeleteSourceRailz(ctx context.Context, request operations.DeleteSourceRailzRequest) (*operations.DeleteSourceRailzResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Railz", request, nil)
if err != nil {
@@ -20137,7 +20152,7 @@ func (s *sources) DeleteSourceRailz(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20175,7 +20190,7 @@ func (s *sources) DeleteSourceRailz(ctx context.Context, request operations.Dele
}
// DeleteSourceRecharge - Delete a Source
-func (s *sources) DeleteSourceRecharge(ctx context.Context, request operations.DeleteSourceRechargeRequest) (*operations.DeleteSourceRechargeResponse, error) {
+func (s *Sources) DeleteSourceRecharge(ctx context.Context, request operations.DeleteSourceRechargeRequest) (*operations.DeleteSourceRechargeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recharge", request, nil)
if err != nil {
@@ -20187,7 +20202,7 @@ func (s *sources) DeleteSourceRecharge(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20225,7 +20240,7 @@ func (s *sources) DeleteSourceRecharge(ctx context.Context, request operations.D
}
// DeleteSourceRecreation - Delete a Source
-func (s *sources) DeleteSourceRecreation(ctx context.Context, request operations.DeleteSourceRecreationRequest) (*operations.DeleteSourceRecreationResponse, error) {
+func (s *Sources) DeleteSourceRecreation(ctx context.Context, request operations.DeleteSourceRecreationRequest) (*operations.DeleteSourceRecreationResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recreation", request, nil)
if err != nil {
@@ -20237,7 +20252,7 @@ func (s *sources) DeleteSourceRecreation(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20275,7 +20290,7 @@ func (s *sources) DeleteSourceRecreation(ctx context.Context, request operations
}
// DeleteSourceRecruitee - Delete a Source
-func (s *sources) DeleteSourceRecruitee(ctx context.Context, request operations.DeleteSourceRecruiteeRequest) (*operations.DeleteSourceRecruiteeResponse, error) {
+func (s *Sources) DeleteSourceRecruitee(ctx context.Context, request operations.DeleteSourceRecruiteeRequest) (*operations.DeleteSourceRecruiteeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recruitee", request, nil)
if err != nil {
@@ -20287,7 +20302,7 @@ func (s *sources) DeleteSourceRecruitee(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20325,7 +20340,7 @@ func (s *sources) DeleteSourceRecruitee(ctx context.Context, request operations.
}
// DeleteSourceRecurly - Delete a Source
-func (s *sources) DeleteSourceRecurly(ctx context.Context, request operations.DeleteSourceRecurlyRequest) (*operations.DeleteSourceRecurlyResponse, error) {
+func (s *Sources) DeleteSourceRecurly(ctx context.Context, request operations.DeleteSourceRecurlyRequest) (*operations.DeleteSourceRecurlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recurly", request, nil)
if err != nil {
@@ -20337,7 +20352,7 @@ func (s *sources) DeleteSourceRecurly(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20375,7 +20390,7 @@ func (s *sources) DeleteSourceRecurly(ctx context.Context, request operations.De
}
// DeleteSourceRedshift - Delete a Source
-func (s *sources) DeleteSourceRedshift(ctx context.Context, request operations.DeleteSourceRedshiftRequest) (*operations.DeleteSourceRedshiftResponse, error) {
+func (s *Sources) DeleteSourceRedshift(ctx context.Context, request operations.DeleteSourceRedshiftRequest) (*operations.DeleteSourceRedshiftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Redshift", request, nil)
if err != nil {
@@ -20387,7 +20402,7 @@ func (s *sources) DeleteSourceRedshift(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20425,7 +20440,7 @@ func (s *sources) DeleteSourceRedshift(ctx context.Context, request operations.D
}
// DeleteSourceRetently - Delete a Source
-func (s *sources) DeleteSourceRetently(ctx context.Context, request operations.DeleteSourceRetentlyRequest) (*operations.DeleteSourceRetentlyResponse, error) {
+func (s *Sources) DeleteSourceRetently(ctx context.Context, request operations.DeleteSourceRetentlyRequest) (*operations.DeleteSourceRetentlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Retently", request, nil)
if err != nil {
@@ -20437,7 +20452,7 @@ func (s *sources) DeleteSourceRetently(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20475,7 +20490,7 @@ func (s *sources) DeleteSourceRetently(ctx context.Context, request operations.D
}
// DeleteSourceRkiCovid - Delete a Source
-func (s *sources) DeleteSourceRkiCovid(ctx context.Context, request operations.DeleteSourceRkiCovidRequest) (*operations.DeleteSourceRkiCovidResponse, error) {
+func (s *Sources) DeleteSourceRkiCovid(ctx context.Context, request operations.DeleteSourceRkiCovidRequest) (*operations.DeleteSourceRkiCovidResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#RkiCovid", request, nil)
if err != nil {
@@ -20487,7 +20502,7 @@ func (s *sources) DeleteSourceRkiCovid(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20525,7 +20540,7 @@ func (s *sources) DeleteSourceRkiCovid(ctx context.Context, request operations.D
}
// DeleteSourceRss - Delete a Source
-func (s *sources) DeleteSourceRss(ctx context.Context, request operations.DeleteSourceRssRequest) (*operations.DeleteSourceRssResponse, error) {
+func (s *Sources) DeleteSourceRss(ctx context.Context, request operations.DeleteSourceRssRequest) (*operations.DeleteSourceRssResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Rss", request, nil)
if err != nil {
@@ -20537,7 +20552,7 @@ func (s *sources) DeleteSourceRss(ctx context.Context, request operations.Delete
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20575,7 +20590,7 @@ func (s *sources) DeleteSourceRss(ctx context.Context, request operations.Delete
}
// DeleteSourceS3 - Delete a Source
-func (s *sources) DeleteSourceS3(ctx context.Context, request operations.DeleteSourceS3Request) (*operations.DeleteSourceS3Response, error) {
+func (s *Sources) DeleteSourceS3(ctx context.Context, request operations.DeleteSourceS3Request) (*operations.DeleteSourceS3Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#S3", request, nil)
if err != nil {
@@ -20587,7 +20602,7 @@ func (s *sources) DeleteSourceS3(ctx context.Context, request operations.DeleteS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20625,7 +20640,7 @@ func (s *sources) DeleteSourceS3(ctx context.Context, request operations.DeleteS
}
// DeleteSourceSalesforce - Delete a Source
-func (s *sources) DeleteSourceSalesforce(ctx context.Context, request operations.DeleteSourceSalesforceRequest) (*operations.DeleteSourceSalesforceResponse, error) {
+func (s *Sources) DeleteSourceSalesforce(ctx context.Context, request operations.DeleteSourceSalesforceRequest) (*operations.DeleteSourceSalesforceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Salesforce", request, nil)
if err != nil {
@@ -20637,7 +20652,7 @@ func (s *sources) DeleteSourceSalesforce(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20675,7 +20690,7 @@ func (s *sources) DeleteSourceSalesforce(ctx context.Context, request operations
}
// DeleteSourceSalesloft - Delete a Source
-func (s *sources) DeleteSourceSalesloft(ctx context.Context, request operations.DeleteSourceSalesloftRequest) (*operations.DeleteSourceSalesloftResponse, error) {
+func (s *Sources) DeleteSourceSalesloft(ctx context.Context, request operations.DeleteSourceSalesloftRequest) (*operations.DeleteSourceSalesloftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Salesloft", request, nil)
if err != nil {
@@ -20687,7 +20702,7 @@ func (s *sources) DeleteSourceSalesloft(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20725,7 +20740,7 @@ func (s *sources) DeleteSourceSalesloft(ctx context.Context, request operations.
}
// DeleteSourceSapFieldglass - Delete a Source
-func (s *sources) DeleteSourceSapFieldglass(ctx context.Context, request operations.DeleteSourceSapFieldglassRequest) (*operations.DeleteSourceSapFieldglassResponse, error) {
+func (s *Sources) DeleteSourceSapFieldglass(ctx context.Context, request operations.DeleteSourceSapFieldglassRequest) (*operations.DeleteSourceSapFieldglassResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SapFieldglass", request, nil)
if err != nil {
@@ -20737,7 +20752,7 @@ func (s *sources) DeleteSourceSapFieldglass(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20775,7 +20790,7 @@ func (s *sources) DeleteSourceSapFieldglass(ctx context.Context, request operati
}
// DeleteSourceSecoda - Delete a Source
-func (s *sources) DeleteSourceSecoda(ctx context.Context, request operations.DeleteSourceSecodaRequest) (*operations.DeleteSourceSecodaResponse, error) {
+func (s *Sources) DeleteSourceSecoda(ctx context.Context, request operations.DeleteSourceSecodaRequest) (*operations.DeleteSourceSecodaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Secoda", request, nil)
if err != nil {
@@ -20787,7 +20802,7 @@ func (s *sources) DeleteSourceSecoda(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20825,7 +20840,7 @@ func (s *sources) DeleteSourceSecoda(ctx context.Context, request operations.Del
}
// DeleteSourceSendgrid - Delete a Source
-func (s *sources) DeleteSourceSendgrid(ctx context.Context, request operations.DeleteSourceSendgridRequest) (*operations.DeleteSourceSendgridResponse, error) {
+func (s *Sources) DeleteSourceSendgrid(ctx context.Context, request operations.DeleteSourceSendgridRequest) (*operations.DeleteSourceSendgridResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sendgrid", request, nil)
if err != nil {
@@ -20837,7 +20852,7 @@ func (s *sources) DeleteSourceSendgrid(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20875,7 +20890,7 @@ func (s *sources) DeleteSourceSendgrid(ctx context.Context, request operations.D
}
// DeleteSourceSendinblue - Delete a Source
-func (s *sources) DeleteSourceSendinblue(ctx context.Context, request operations.DeleteSourceSendinblueRequest) (*operations.DeleteSourceSendinblueResponse, error) {
+func (s *Sources) DeleteSourceSendinblue(ctx context.Context, request operations.DeleteSourceSendinblueRequest) (*operations.DeleteSourceSendinblueResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sendinblue", request, nil)
if err != nil {
@@ -20887,7 +20902,7 @@ func (s *sources) DeleteSourceSendinblue(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20925,7 +20940,7 @@ func (s *sources) DeleteSourceSendinblue(ctx context.Context, request operations
}
// DeleteSourceSenseforce - Delete a Source
-func (s *sources) DeleteSourceSenseforce(ctx context.Context, request operations.DeleteSourceSenseforceRequest) (*operations.DeleteSourceSenseforceResponse, error) {
+func (s *Sources) DeleteSourceSenseforce(ctx context.Context, request operations.DeleteSourceSenseforceRequest) (*operations.DeleteSourceSenseforceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Senseforce", request, nil)
if err != nil {
@@ -20937,7 +20952,7 @@ func (s *sources) DeleteSourceSenseforce(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -20975,7 +20990,7 @@ func (s *sources) DeleteSourceSenseforce(ctx context.Context, request operations
}
// DeleteSourceSentry - Delete a Source
-func (s *sources) DeleteSourceSentry(ctx context.Context, request operations.DeleteSourceSentryRequest) (*operations.DeleteSourceSentryResponse, error) {
+func (s *Sources) DeleteSourceSentry(ctx context.Context, request operations.DeleteSourceSentryRequest) (*operations.DeleteSourceSentryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sentry", request, nil)
if err != nil {
@@ -20987,7 +21002,7 @@ func (s *sources) DeleteSourceSentry(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21025,7 +21040,7 @@ func (s *sources) DeleteSourceSentry(ctx context.Context, request operations.Del
}
// DeleteSourceSftp - Delete a Source
-func (s *sources) DeleteSourceSftp(ctx context.Context, request operations.DeleteSourceSftpRequest) (*operations.DeleteSourceSftpResponse, error) {
+func (s *Sources) DeleteSourceSftp(ctx context.Context, request operations.DeleteSourceSftpRequest) (*operations.DeleteSourceSftpResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sftp", request, nil)
if err != nil {
@@ -21037,7 +21052,7 @@ func (s *sources) DeleteSourceSftp(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21075,7 +21090,7 @@ func (s *sources) DeleteSourceSftp(ctx context.Context, request operations.Delet
}
// DeleteSourceSftpBulk - Delete a Source
-func (s *sources) DeleteSourceSftpBulk(ctx context.Context, request operations.DeleteSourceSftpBulkRequest) (*operations.DeleteSourceSftpBulkResponse, error) {
+func (s *Sources) DeleteSourceSftpBulk(ctx context.Context, request operations.DeleteSourceSftpBulkRequest) (*operations.DeleteSourceSftpBulkResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SftpBulk", request, nil)
if err != nil {
@@ -21087,7 +21102,7 @@ func (s *sources) DeleteSourceSftpBulk(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21125,7 +21140,7 @@ func (s *sources) DeleteSourceSftpBulk(ctx context.Context, request operations.D
}
// DeleteSourceShopify - Delete a Source
-func (s *sources) DeleteSourceShopify(ctx context.Context, request operations.DeleteSourceShopifyRequest) (*operations.DeleteSourceShopifyResponse, error) {
+func (s *Sources) DeleteSourceShopify(ctx context.Context, request operations.DeleteSourceShopifyRequest) (*operations.DeleteSourceShopifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Shopify", request, nil)
if err != nil {
@@ -21137,7 +21152,7 @@ func (s *sources) DeleteSourceShopify(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21175,7 +21190,7 @@ func (s *sources) DeleteSourceShopify(ctx context.Context, request operations.De
}
// DeleteSourceShortio - Delete a Source
-func (s *sources) DeleteSourceShortio(ctx context.Context, request operations.DeleteSourceShortioRequest) (*operations.DeleteSourceShortioResponse, error) {
+func (s *Sources) DeleteSourceShortio(ctx context.Context, request operations.DeleteSourceShortioRequest) (*operations.DeleteSourceShortioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Shortio", request, nil)
if err != nil {
@@ -21187,7 +21202,7 @@ func (s *sources) DeleteSourceShortio(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21225,7 +21240,7 @@ func (s *sources) DeleteSourceShortio(ctx context.Context, request operations.De
}
// DeleteSourceSlack - Delete a Source
-func (s *sources) DeleteSourceSlack(ctx context.Context, request operations.DeleteSourceSlackRequest) (*operations.DeleteSourceSlackResponse, error) {
+func (s *Sources) DeleteSourceSlack(ctx context.Context, request operations.DeleteSourceSlackRequest) (*operations.DeleteSourceSlackResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Slack", request, nil)
if err != nil {
@@ -21237,7 +21252,7 @@ func (s *sources) DeleteSourceSlack(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21275,7 +21290,7 @@ func (s *sources) DeleteSourceSlack(ctx context.Context, request operations.Dele
}
// DeleteSourceSmaily - Delete a Source
-func (s *sources) DeleteSourceSmaily(ctx context.Context, request operations.DeleteSourceSmailyRequest) (*operations.DeleteSourceSmailyResponse, error) {
+func (s *Sources) DeleteSourceSmaily(ctx context.Context, request operations.DeleteSourceSmailyRequest) (*operations.DeleteSourceSmailyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Smaily", request, nil)
if err != nil {
@@ -21287,7 +21302,7 @@ func (s *sources) DeleteSourceSmaily(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21325,7 +21340,7 @@ func (s *sources) DeleteSourceSmaily(ctx context.Context, request operations.Del
}
// DeleteSourceSmartengage - Delete a Source
-func (s *sources) DeleteSourceSmartengage(ctx context.Context, request operations.DeleteSourceSmartengageRequest) (*operations.DeleteSourceSmartengageResponse, error) {
+func (s *Sources) DeleteSourceSmartengage(ctx context.Context, request operations.DeleteSourceSmartengageRequest) (*operations.DeleteSourceSmartengageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Smartengage", request, nil)
if err != nil {
@@ -21337,7 +21352,7 @@ func (s *sources) DeleteSourceSmartengage(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21375,7 +21390,7 @@ func (s *sources) DeleteSourceSmartengage(ctx context.Context, request operation
}
// DeleteSourceSmartsheets - Delete a Source
-func (s *sources) DeleteSourceSmartsheets(ctx context.Context, request operations.DeleteSourceSmartsheetsRequest) (*operations.DeleteSourceSmartsheetsResponse, error) {
+func (s *Sources) DeleteSourceSmartsheets(ctx context.Context, request operations.DeleteSourceSmartsheetsRequest) (*operations.DeleteSourceSmartsheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Smartsheets", request, nil)
if err != nil {
@@ -21387,7 +21402,7 @@ func (s *sources) DeleteSourceSmartsheets(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21425,7 +21440,7 @@ func (s *sources) DeleteSourceSmartsheets(ctx context.Context, request operation
}
// DeleteSourceSnapchatMarketing - Delete a Source
-func (s *sources) DeleteSourceSnapchatMarketing(ctx context.Context, request operations.DeleteSourceSnapchatMarketingRequest) (*operations.DeleteSourceSnapchatMarketingResponse, error) {
+func (s *Sources) DeleteSourceSnapchatMarketing(ctx context.Context, request operations.DeleteSourceSnapchatMarketingRequest) (*operations.DeleteSourceSnapchatMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SnapchatMarketing", request, nil)
if err != nil {
@@ -21437,7 +21452,7 @@ func (s *sources) DeleteSourceSnapchatMarketing(ctx context.Context, request ope
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21475,7 +21490,7 @@ func (s *sources) DeleteSourceSnapchatMarketing(ctx context.Context, request ope
}
// DeleteSourceSnowflake - Delete a Source
-func (s *sources) DeleteSourceSnowflake(ctx context.Context, request operations.DeleteSourceSnowflakeRequest) (*operations.DeleteSourceSnowflakeResponse, error) {
+func (s *Sources) DeleteSourceSnowflake(ctx context.Context, request operations.DeleteSourceSnowflakeRequest) (*operations.DeleteSourceSnowflakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Snowflake", request, nil)
if err != nil {
@@ -21487,7 +21502,7 @@ func (s *sources) DeleteSourceSnowflake(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21525,7 +21540,7 @@ func (s *sources) DeleteSourceSnowflake(ctx context.Context, request operations.
}
// DeleteSourceSonarCloud - Delete a Source
-func (s *sources) DeleteSourceSonarCloud(ctx context.Context, request operations.DeleteSourceSonarCloudRequest) (*operations.DeleteSourceSonarCloudResponse, error) {
+func (s *Sources) DeleteSourceSonarCloud(ctx context.Context, request operations.DeleteSourceSonarCloudRequest) (*operations.DeleteSourceSonarCloudResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SonarCloud", request, nil)
if err != nil {
@@ -21537,7 +21552,7 @@ func (s *sources) DeleteSourceSonarCloud(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21575,7 +21590,7 @@ func (s *sources) DeleteSourceSonarCloud(ctx context.Context, request operations
}
// DeleteSourceSpacexAPI - Delete a Source
-func (s *sources) DeleteSourceSpacexAPI(ctx context.Context, request operations.DeleteSourceSpacexAPIRequest) (*operations.DeleteSourceSpacexAPIResponse, error) {
+func (s *Sources) DeleteSourceSpacexAPI(ctx context.Context, request operations.DeleteSourceSpacexAPIRequest) (*operations.DeleteSourceSpacexAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SpacexApi", request, nil)
if err != nil {
@@ -21587,7 +21602,7 @@ func (s *sources) DeleteSourceSpacexAPI(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21625,7 +21640,7 @@ func (s *sources) DeleteSourceSpacexAPI(ctx context.Context, request operations.
}
// DeleteSourceSquare - Delete a Source
-func (s *sources) DeleteSourceSquare(ctx context.Context, request operations.DeleteSourceSquareRequest) (*operations.DeleteSourceSquareResponse, error) {
+func (s *Sources) DeleteSourceSquare(ctx context.Context, request operations.DeleteSourceSquareRequest) (*operations.DeleteSourceSquareResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Square", request, nil)
if err != nil {
@@ -21637,7 +21652,7 @@ func (s *sources) DeleteSourceSquare(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21675,7 +21690,7 @@ func (s *sources) DeleteSourceSquare(ctx context.Context, request operations.Del
}
// DeleteSourceStrava - Delete a Source
-func (s *sources) DeleteSourceStrava(ctx context.Context, request operations.DeleteSourceStravaRequest) (*operations.DeleteSourceStravaResponse, error) {
+func (s *Sources) DeleteSourceStrava(ctx context.Context, request operations.DeleteSourceStravaRequest) (*operations.DeleteSourceStravaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Strava", request, nil)
if err != nil {
@@ -21687,7 +21702,7 @@ func (s *sources) DeleteSourceStrava(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21725,7 +21740,7 @@ func (s *sources) DeleteSourceStrava(ctx context.Context, request operations.Del
}
// DeleteSourceStripe - Delete a Source
-func (s *sources) DeleteSourceStripe(ctx context.Context, request operations.DeleteSourceStripeRequest) (*operations.DeleteSourceStripeResponse, error) {
+func (s *Sources) DeleteSourceStripe(ctx context.Context, request operations.DeleteSourceStripeRequest) (*operations.DeleteSourceStripeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Stripe", request, nil)
if err != nil {
@@ -21737,7 +21752,7 @@ func (s *sources) DeleteSourceStripe(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21775,7 +21790,7 @@ func (s *sources) DeleteSourceStripe(ctx context.Context, request operations.Del
}
// DeleteSourceSurveySparrow - Delete a Source
-func (s *sources) DeleteSourceSurveySparrow(ctx context.Context, request operations.DeleteSourceSurveySparrowRequest) (*operations.DeleteSourceSurveySparrowResponse, error) {
+func (s *Sources) DeleteSourceSurveySparrow(ctx context.Context, request operations.DeleteSourceSurveySparrowRequest) (*operations.DeleteSourceSurveySparrowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SurveySparrow", request, nil)
if err != nil {
@@ -21787,7 +21802,7 @@ func (s *sources) DeleteSourceSurveySparrow(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21825,7 +21840,7 @@ func (s *sources) DeleteSourceSurveySparrow(ctx context.Context, request operati
}
// DeleteSourceSurveymonkey - Delete a Source
-func (s *sources) DeleteSourceSurveymonkey(ctx context.Context, request operations.DeleteSourceSurveymonkeyRequest) (*operations.DeleteSourceSurveymonkeyResponse, error) {
+func (s *Sources) DeleteSourceSurveymonkey(ctx context.Context, request operations.DeleteSourceSurveymonkeyRequest) (*operations.DeleteSourceSurveymonkeyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Surveymonkey", request, nil)
if err != nil {
@@ -21837,7 +21852,7 @@ func (s *sources) DeleteSourceSurveymonkey(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21875,7 +21890,7 @@ func (s *sources) DeleteSourceSurveymonkey(ctx context.Context, request operatio
}
// DeleteSourceTempo - Delete a Source
-func (s *sources) DeleteSourceTempo(ctx context.Context, request operations.DeleteSourceTempoRequest) (*operations.DeleteSourceTempoResponse, error) {
+func (s *Sources) DeleteSourceTempo(ctx context.Context, request operations.DeleteSourceTempoRequest) (*operations.DeleteSourceTempoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Tempo", request, nil)
if err != nil {
@@ -21887,7 +21902,7 @@ func (s *sources) DeleteSourceTempo(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21925,7 +21940,7 @@ func (s *sources) DeleteSourceTempo(ctx context.Context, request operations.Dele
}
// DeleteSourceTheGuardianAPI - Delete a Source
-func (s *sources) DeleteSourceTheGuardianAPI(ctx context.Context, request operations.DeleteSourceTheGuardianAPIRequest) (*operations.DeleteSourceTheGuardianAPIResponse, error) {
+func (s *Sources) DeleteSourceTheGuardianAPI(ctx context.Context, request operations.DeleteSourceTheGuardianAPIRequest) (*operations.DeleteSourceTheGuardianAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TheGuardianApi", request, nil)
if err != nil {
@@ -21937,7 +21952,7 @@ func (s *sources) DeleteSourceTheGuardianAPI(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -21975,7 +21990,7 @@ func (s *sources) DeleteSourceTheGuardianAPI(ctx context.Context, request operat
}
// DeleteSourceTiktokMarketing - Delete a Source
-func (s *sources) DeleteSourceTiktokMarketing(ctx context.Context, request operations.DeleteSourceTiktokMarketingRequest) (*operations.DeleteSourceTiktokMarketingResponse, error) {
+func (s *Sources) DeleteSourceTiktokMarketing(ctx context.Context, request operations.DeleteSourceTiktokMarketingRequest) (*operations.DeleteSourceTiktokMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TiktokMarketing", request, nil)
if err != nil {
@@ -21987,7 +22002,7 @@ func (s *sources) DeleteSourceTiktokMarketing(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22025,7 +22040,7 @@ func (s *sources) DeleteSourceTiktokMarketing(ctx context.Context, request opera
}
// DeleteSourceTodoist - Delete a Source
-func (s *sources) DeleteSourceTodoist(ctx context.Context, request operations.DeleteSourceTodoistRequest) (*operations.DeleteSourceTodoistResponse, error) {
+func (s *Sources) DeleteSourceTodoist(ctx context.Context, request operations.DeleteSourceTodoistRequest) (*operations.DeleteSourceTodoistResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Todoist", request, nil)
if err != nil {
@@ -22037,7 +22052,7 @@ func (s *sources) DeleteSourceTodoist(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22075,7 +22090,7 @@ func (s *sources) DeleteSourceTodoist(ctx context.Context, request operations.De
}
// DeleteSourceTrello - Delete a Source
-func (s *sources) DeleteSourceTrello(ctx context.Context, request operations.DeleteSourceTrelloRequest) (*operations.DeleteSourceTrelloResponse, error) {
+func (s *Sources) DeleteSourceTrello(ctx context.Context, request operations.DeleteSourceTrelloRequest) (*operations.DeleteSourceTrelloResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Trello", request, nil)
if err != nil {
@@ -22087,7 +22102,7 @@ func (s *sources) DeleteSourceTrello(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22125,7 +22140,7 @@ func (s *sources) DeleteSourceTrello(ctx context.Context, request operations.Del
}
// DeleteSourceTrustpilot - Delete a Source
-func (s *sources) DeleteSourceTrustpilot(ctx context.Context, request operations.DeleteSourceTrustpilotRequest) (*operations.DeleteSourceTrustpilotResponse, error) {
+func (s *Sources) DeleteSourceTrustpilot(ctx context.Context, request operations.DeleteSourceTrustpilotRequest) (*operations.DeleteSourceTrustpilotResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Trustpilot", request, nil)
if err != nil {
@@ -22137,7 +22152,7 @@ func (s *sources) DeleteSourceTrustpilot(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22175,7 +22190,7 @@ func (s *sources) DeleteSourceTrustpilot(ctx context.Context, request operations
}
// DeleteSourceTvmazeSchedule - Delete a Source
-func (s *sources) DeleteSourceTvmazeSchedule(ctx context.Context, request operations.DeleteSourceTvmazeScheduleRequest) (*operations.DeleteSourceTvmazeScheduleResponse, error) {
+func (s *Sources) DeleteSourceTvmazeSchedule(ctx context.Context, request operations.DeleteSourceTvmazeScheduleRequest) (*operations.DeleteSourceTvmazeScheduleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TvmazeSchedule", request, nil)
if err != nil {
@@ -22187,7 +22202,7 @@ func (s *sources) DeleteSourceTvmazeSchedule(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22225,7 +22240,7 @@ func (s *sources) DeleteSourceTvmazeSchedule(ctx context.Context, request operat
}
// DeleteSourceTwilio - Delete a Source
-func (s *sources) DeleteSourceTwilio(ctx context.Context, request operations.DeleteSourceTwilioRequest) (*operations.DeleteSourceTwilioResponse, error) {
+func (s *Sources) DeleteSourceTwilio(ctx context.Context, request operations.DeleteSourceTwilioRequest) (*operations.DeleteSourceTwilioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Twilio", request, nil)
if err != nil {
@@ -22237,7 +22252,7 @@ func (s *sources) DeleteSourceTwilio(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22275,7 +22290,7 @@ func (s *sources) DeleteSourceTwilio(ctx context.Context, request operations.Del
}
// DeleteSourceTwilioTaskrouter - Delete a Source
-func (s *sources) DeleteSourceTwilioTaskrouter(ctx context.Context, request operations.DeleteSourceTwilioTaskrouterRequest) (*operations.DeleteSourceTwilioTaskrouterResponse, error) {
+func (s *Sources) DeleteSourceTwilioTaskrouter(ctx context.Context, request operations.DeleteSourceTwilioTaskrouterRequest) (*operations.DeleteSourceTwilioTaskrouterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TwilioTaskrouter", request, nil)
if err != nil {
@@ -22287,7 +22302,7 @@ func (s *sources) DeleteSourceTwilioTaskrouter(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22325,7 +22340,7 @@ func (s *sources) DeleteSourceTwilioTaskrouter(ctx context.Context, request oper
}
// DeleteSourceTwitter - Delete a Source
-func (s *sources) DeleteSourceTwitter(ctx context.Context, request operations.DeleteSourceTwitterRequest) (*operations.DeleteSourceTwitterResponse, error) {
+func (s *Sources) DeleteSourceTwitter(ctx context.Context, request operations.DeleteSourceTwitterRequest) (*operations.DeleteSourceTwitterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Twitter", request, nil)
if err != nil {
@@ -22337,7 +22352,7 @@ func (s *sources) DeleteSourceTwitter(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22375,7 +22390,7 @@ func (s *sources) DeleteSourceTwitter(ctx context.Context, request operations.De
}
// DeleteSourceTypeform - Delete a Source
-func (s *sources) DeleteSourceTypeform(ctx context.Context, request operations.DeleteSourceTypeformRequest) (*operations.DeleteSourceTypeformResponse, error) {
+func (s *Sources) DeleteSourceTypeform(ctx context.Context, request operations.DeleteSourceTypeformRequest) (*operations.DeleteSourceTypeformResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Typeform", request, nil)
if err != nil {
@@ -22387,7 +22402,7 @@ func (s *sources) DeleteSourceTypeform(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22425,7 +22440,7 @@ func (s *sources) DeleteSourceTypeform(ctx context.Context, request operations.D
}
// DeleteSourceUsCensus - Delete a Source
-func (s *sources) DeleteSourceUsCensus(ctx context.Context, request operations.DeleteSourceUsCensusRequest) (*operations.DeleteSourceUsCensusResponse, error) {
+func (s *Sources) DeleteSourceUsCensus(ctx context.Context, request operations.DeleteSourceUsCensusRequest) (*operations.DeleteSourceUsCensusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#UsCensus", request, nil)
if err != nil {
@@ -22437,7 +22452,7 @@ func (s *sources) DeleteSourceUsCensus(ctx context.Context, request operations.D
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22475,7 +22490,7 @@ func (s *sources) DeleteSourceUsCensus(ctx context.Context, request operations.D
}
// DeleteSourceVantage - Delete a Source
-func (s *sources) DeleteSourceVantage(ctx context.Context, request operations.DeleteSourceVantageRequest) (*operations.DeleteSourceVantageResponse, error) {
+func (s *Sources) DeleteSourceVantage(ctx context.Context, request operations.DeleteSourceVantageRequest) (*operations.DeleteSourceVantageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Vantage", request, nil)
if err != nil {
@@ -22487,7 +22502,7 @@ func (s *sources) DeleteSourceVantage(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22525,7 +22540,7 @@ func (s *sources) DeleteSourceVantage(ctx context.Context, request operations.De
}
// DeleteSourceWebflow - Delete a Source
-func (s *sources) DeleteSourceWebflow(ctx context.Context, request operations.DeleteSourceWebflowRequest) (*operations.DeleteSourceWebflowResponse, error) {
+func (s *Sources) DeleteSourceWebflow(ctx context.Context, request operations.DeleteSourceWebflowRequest) (*operations.DeleteSourceWebflowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Webflow", request, nil)
if err != nil {
@@ -22537,7 +22552,7 @@ func (s *sources) DeleteSourceWebflow(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22575,7 +22590,7 @@ func (s *sources) DeleteSourceWebflow(ctx context.Context, request operations.De
}
// DeleteSourceWhiskyHunter - Delete a Source
-func (s *sources) DeleteSourceWhiskyHunter(ctx context.Context, request operations.DeleteSourceWhiskyHunterRequest) (*operations.DeleteSourceWhiskyHunterResponse, error) {
+func (s *Sources) DeleteSourceWhiskyHunter(ctx context.Context, request operations.DeleteSourceWhiskyHunterRequest) (*operations.DeleteSourceWhiskyHunterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#WhiskyHunter", request, nil)
if err != nil {
@@ -22587,7 +22602,7 @@ func (s *sources) DeleteSourceWhiskyHunter(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22625,7 +22640,7 @@ func (s *sources) DeleteSourceWhiskyHunter(ctx context.Context, request operatio
}
// DeleteSourceWikipediaPageviews - Delete a Source
-func (s *sources) DeleteSourceWikipediaPageviews(ctx context.Context, request operations.DeleteSourceWikipediaPageviewsRequest) (*operations.DeleteSourceWikipediaPageviewsResponse, error) {
+func (s *Sources) DeleteSourceWikipediaPageviews(ctx context.Context, request operations.DeleteSourceWikipediaPageviewsRequest) (*operations.DeleteSourceWikipediaPageviewsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#WikipediaPageviews", request, nil)
if err != nil {
@@ -22637,7 +22652,7 @@ func (s *sources) DeleteSourceWikipediaPageviews(ctx context.Context, request op
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22675,7 +22690,7 @@ func (s *sources) DeleteSourceWikipediaPageviews(ctx context.Context, request op
}
// DeleteSourceWoocommerce - Delete a Source
-func (s *sources) DeleteSourceWoocommerce(ctx context.Context, request operations.DeleteSourceWoocommerceRequest) (*operations.DeleteSourceWoocommerceResponse, error) {
+func (s *Sources) DeleteSourceWoocommerce(ctx context.Context, request operations.DeleteSourceWoocommerceRequest) (*operations.DeleteSourceWoocommerceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Woocommerce", request, nil)
if err != nil {
@@ -22687,7 +22702,7 @@ func (s *sources) DeleteSourceWoocommerce(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22724,10 +22739,10 @@ func (s *sources) DeleteSourceWoocommerce(ctx context.Context, request operation
return res, nil
}
-// DeleteSourceXero - Delete a Source
-func (s *sources) DeleteSourceXero(ctx context.Context, request operations.DeleteSourceXeroRequest) (*operations.DeleteSourceXeroResponse, error) {
+// DeleteSourceXkcd - Delete a Source
+func (s *Sources) DeleteSourceXkcd(ctx context.Context, request operations.DeleteSourceXkcdRequest) (*operations.DeleteSourceXkcdResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Xero", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Xkcd", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -22737,7 +22752,7 @@ func (s *sources) DeleteSourceXero(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22758,7 +22773,7 @@ func (s *sources) DeleteSourceXero(ctx context.Context, request operations.Delet
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceXeroResponse{
+ res := &operations.DeleteSourceXkcdResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -22774,10 +22789,10 @@ func (s *sources) DeleteSourceXero(ctx context.Context, request operations.Delet
return res, nil
}
-// DeleteSourceXkcd - Delete a Source
-func (s *sources) DeleteSourceXkcd(ctx context.Context, request operations.DeleteSourceXkcdRequest) (*operations.DeleteSourceXkcdResponse, error) {
+// DeleteSourceYandexMetrica - Delete a Source
+func (s *Sources) DeleteSourceYandexMetrica(ctx context.Context, request operations.DeleteSourceYandexMetricaRequest) (*operations.DeleteSourceYandexMetricaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Xkcd", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YandexMetrica", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -22787,7 +22802,7 @@ func (s *sources) DeleteSourceXkcd(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22808,7 +22823,7 @@ func (s *sources) DeleteSourceXkcd(ctx context.Context, request operations.Delet
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceXkcdResponse{
+ res := &operations.DeleteSourceYandexMetricaResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -22824,10 +22839,10 @@ func (s *sources) DeleteSourceXkcd(ctx context.Context, request operations.Delet
return res, nil
}
-// DeleteSourceYandexMetrica - Delete a Source
-func (s *sources) DeleteSourceYandexMetrica(ctx context.Context, request operations.DeleteSourceYandexMetricaRequest) (*operations.DeleteSourceYandexMetricaResponse, error) {
+// DeleteSourceYotpo - Delete a Source
+func (s *Sources) DeleteSourceYotpo(ctx context.Context, request operations.DeleteSourceYotpoRequest) (*operations.DeleteSourceYotpoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YandexMetrica", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Yotpo", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -22837,7 +22852,7 @@ func (s *sources) DeleteSourceYandexMetrica(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22858,7 +22873,7 @@ func (s *sources) DeleteSourceYandexMetrica(ctx context.Context, request operati
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceYandexMetricaResponse{
+ res := &operations.DeleteSourceYotpoResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -22874,10 +22889,10 @@ func (s *sources) DeleteSourceYandexMetrica(ctx context.Context, request operati
return res, nil
}
-// DeleteSourceYotpo - Delete a Source
-func (s *sources) DeleteSourceYotpo(ctx context.Context, request operations.DeleteSourceYotpoRequest) (*operations.DeleteSourceYotpoResponse, error) {
+// DeleteSourceYoutubeAnalytics - Delete a Source
+func (s *Sources) DeleteSourceYoutubeAnalytics(ctx context.Context, request operations.DeleteSourceYoutubeAnalyticsRequest) (*operations.DeleteSourceYoutubeAnalyticsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Yotpo", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YoutubeAnalytics", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -22887,7 +22902,7 @@ func (s *sources) DeleteSourceYotpo(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22908,7 +22923,7 @@ func (s *sources) DeleteSourceYotpo(ctx context.Context, request operations.Dele
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceYotpoResponse{
+ res := &operations.DeleteSourceYoutubeAnalyticsResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -22924,10 +22939,10 @@ func (s *sources) DeleteSourceYotpo(ctx context.Context, request operations.Dele
return res, nil
}
-// DeleteSourceYounium - Delete a Source
-func (s *sources) DeleteSourceYounium(ctx context.Context, request operations.DeleteSourceYouniumRequest) (*operations.DeleteSourceYouniumResponse, error) {
+// DeleteSourceZendeskChat - Delete a Source
+func (s *Sources) DeleteSourceZendeskChat(ctx context.Context, request operations.DeleteSourceZendeskChatRequest) (*operations.DeleteSourceZendeskChatResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Younium", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskChat", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -22937,7 +22952,7 @@ func (s *sources) DeleteSourceYounium(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -22958,7 +22973,7 @@ func (s *sources) DeleteSourceYounium(ctx context.Context, request operations.De
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.DeleteSourceYouniumResponse{
+ res := &operations.DeleteSourceZendeskChatResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -22974,10 +22989,60 @@ func (s *sources) DeleteSourceYounium(ctx context.Context, request operations.De
return res, nil
}
-// DeleteSourceYoutubeAnalytics - Delete a Source
-func (s *sources) DeleteSourceYoutubeAnalytics(ctx context.Context, request operations.DeleteSourceYoutubeAnalyticsRequest) (*operations.DeleteSourceYoutubeAnalyticsResponse, error) {
+// DeleteSourceZendeskSell - Delete a Source
+func (s *Sources) DeleteSourceZendeskSell(ctx context.Context, request operations.DeleteSourceZendeskSellRequest) (*operations.DeleteSourceZendeskSellResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YoutubeAnalytics", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSell", request, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error generating URL: %w", err)
+ }
+
+ req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil)
+ if err != nil {
+ return nil, fmt.Errorf("error creating request: %w", err)
+ }
+ req.Header.Set("Accept", "*/*")
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
+
+ client := s.sdkConfiguration.SecurityClient
+
+ httpRes, err := client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("error sending request: %w", err)
+ }
+ if httpRes == nil {
+ return nil, fmt.Errorf("error sending request: no response")
+ }
+
+ rawBody, err := io.ReadAll(httpRes.Body)
+ if err != nil {
+ return nil, fmt.Errorf("error reading response body: %w", err)
+ }
+ httpRes.Body.Close()
+ httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
+
+ contentType := httpRes.Header.Get("Content-Type")
+
+ res := &operations.DeleteSourceZendeskSellResponse{
+ StatusCode: httpRes.StatusCode,
+ ContentType: contentType,
+ RawResponse: httpRes,
+ }
+ switch {
+ case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
+ fallthrough
+ case httpRes.StatusCode == 403:
+ fallthrough
+ case httpRes.StatusCode == 404:
+ }
+
+ return res, nil
+}
+
+// DeleteSourceZendeskSunshine - Delete a Source
+func (s *Sources) DeleteSourceZendeskSunshine(ctx context.Context, request operations.DeleteSourceZendeskSunshineRequest) (*operations.DeleteSourceZendeskSunshineResponse, error) {
+ baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSunshine", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -22987,107 +23052,7 @@ func (s *sources) DeleteSourceYoutubeAnalytics(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
-
- client := s.sdkConfiguration.SecurityClient
-
- httpRes, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("error sending request: %w", err)
- }
- if httpRes == nil {
- return nil, fmt.Errorf("error sending request: no response")
- }
-
- rawBody, err := io.ReadAll(httpRes.Body)
- if err != nil {
- return nil, fmt.Errorf("error reading response body: %w", err)
- }
- httpRes.Body.Close()
- httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
-
- contentType := httpRes.Header.Get("Content-Type")
-
- res := &operations.DeleteSourceYoutubeAnalyticsResponse{
- StatusCode: httpRes.StatusCode,
- ContentType: contentType,
- RawResponse: httpRes,
- }
- switch {
- case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
- fallthrough
- case httpRes.StatusCode == 403:
- fallthrough
- case httpRes.StatusCode == 404:
- }
-
- return res, nil
-}
-
-// DeleteSourceZendeskChat - Delete a Source
-func (s *sources) DeleteSourceZendeskChat(ctx context.Context, request operations.DeleteSourceZendeskChatRequest) (*operations.DeleteSourceZendeskChatResponse, error) {
- baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskChat", request, nil)
- if err != nil {
- return nil, fmt.Errorf("error generating URL: %w", err)
- }
-
- req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil)
- if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
- }
- req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
-
- client := s.sdkConfiguration.SecurityClient
-
- httpRes, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("error sending request: %w", err)
- }
- if httpRes == nil {
- return nil, fmt.Errorf("error sending request: no response")
- }
-
- rawBody, err := io.ReadAll(httpRes.Body)
- if err != nil {
- return nil, fmt.Errorf("error reading response body: %w", err)
- }
- httpRes.Body.Close()
- httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
-
- contentType := httpRes.Header.Get("Content-Type")
-
- res := &operations.DeleteSourceZendeskChatResponse{
- StatusCode: httpRes.StatusCode,
- ContentType: contentType,
- RawResponse: httpRes,
- }
- switch {
- case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
- fallthrough
- case httpRes.StatusCode == 403:
- fallthrough
- case httpRes.StatusCode == 404:
- }
-
- return res, nil
-}
-
-// DeleteSourceZendeskSunshine - Delete a Source
-func (s *sources) DeleteSourceZendeskSunshine(ctx context.Context, request operations.DeleteSourceZendeskSunshineRequest) (*operations.DeleteSourceZendeskSunshineResponse, error) {
- baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSunshine", request, nil)
- if err != nil {
- return nil, fmt.Errorf("error generating URL: %w", err)
- }
-
- req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil)
- if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
- }
- req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23125,7 +23090,7 @@ func (s *sources) DeleteSourceZendeskSunshine(ctx context.Context, request opera
}
// DeleteSourceZendeskSupport - Delete a Source
-func (s *sources) DeleteSourceZendeskSupport(ctx context.Context, request operations.DeleteSourceZendeskSupportRequest) (*operations.DeleteSourceZendeskSupportResponse, error) {
+func (s *Sources) DeleteSourceZendeskSupport(ctx context.Context, request operations.DeleteSourceZendeskSupportRequest) (*operations.DeleteSourceZendeskSupportResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSupport", request, nil)
if err != nil {
@@ -23137,7 +23102,7 @@ func (s *sources) DeleteSourceZendeskSupport(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23175,7 +23140,7 @@ func (s *sources) DeleteSourceZendeskSupport(ctx context.Context, request operat
}
// DeleteSourceZendeskTalk - Delete a Source
-func (s *sources) DeleteSourceZendeskTalk(ctx context.Context, request operations.DeleteSourceZendeskTalkRequest) (*operations.DeleteSourceZendeskTalkResponse, error) {
+func (s *Sources) DeleteSourceZendeskTalk(ctx context.Context, request operations.DeleteSourceZendeskTalkRequest) (*operations.DeleteSourceZendeskTalkResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskTalk", request, nil)
if err != nil {
@@ -23187,7 +23152,7 @@ func (s *sources) DeleteSourceZendeskTalk(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23225,7 +23190,7 @@ func (s *sources) DeleteSourceZendeskTalk(ctx context.Context, request operation
}
// DeleteSourceZenloop - Delete a Source
-func (s *sources) DeleteSourceZenloop(ctx context.Context, request operations.DeleteSourceZenloopRequest) (*operations.DeleteSourceZenloopResponse, error) {
+func (s *Sources) DeleteSourceZenloop(ctx context.Context, request operations.DeleteSourceZenloopRequest) (*operations.DeleteSourceZenloopResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Zenloop", request, nil)
if err != nil {
@@ -23237,7 +23202,7 @@ func (s *sources) DeleteSourceZenloop(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23275,7 +23240,7 @@ func (s *sources) DeleteSourceZenloop(ctx context.Context, request operations.De
}
// DeleteSourceZohoCrm - Delete a Source
-func (s *sources) DeleteSourceZohoCrm(ctx context.Context, request operations.DeleteSourceZohoCrmRequest) (*operations.DeleteSourceZohoCrmResponse, error) {
+func (s *Sources) DeleteSourceZohoCrm(ctx context.Context, request operations.DeleteSourceZohoCrmRequest) (*operations.DeleteSourceZohoCrmResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZohoCrm", request, nil)
if err != nil {
@@ -23287,7 +23252,7 @@ func (s *sources) DeleteSourceZohoCrm(ctx context.Context, request operations.De
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23325,7 +23290,7 @@ func (s *sources) DeleteSourceZohoCrm(ctx context.Context, request operations.De
}
// DeleteSourceZoom - Delete a Source
-func (s *sources) DeleteSourceZoom(ctx context.Context, request operations.DeleteSourceZoomRequest) (*operations.DeleteSourceZoomResponse, error) {
+func (s *Sources) DeleteSourceZoom(ctx context.Context, request operations.DeleteSourceZoomRequest) (*operations.DeleteSourceZoomResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Zoom", request, nil)
if err != nil {
@@ -23337,7 +23302,7 @@ func (s *sources) DeleteSourceZoom(ctx context.Context, request operations.Delet
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23375,7 +23340,7 @@ func (s *sources) DeleteSourceZoom(ctx context.Context, request operations.Delet
}
// DeleteSourceZuora - Delete a Source
-func (s *sources) DeleteSourceZuora(ctx context.Context, request operations.DeleteSourceZuoraRequest) (*operations.DeleteSourceZuoraResponse, error) {
+func (s *Sources) DeleteSourceZuora(ctx context.Context, request operations.DeleteSourceZuoraRequest) (*operations.DeleteSourceZuoraResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Zuora", request, nil)
if err != nil {
@@ -23387,7 +23352,7 @@ func (s *sources) DeleteSourceZuora(ctx context.Context, request operations.Dele
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23425,7 +23390,7 @@ func (s *sources) DeleteSourceZuora(ctx context.Context, request operations.Dele
}
// GetSource - Get Source details
-func (s *sources) GetSource(ctx context.Context, request operations.GetSourceRequest) (*operations.GetSourceResponse, error) {
+func (s *Sources) GetSource(ctx context.Context, request operations.GetSourceRequest) (*operations.GetSourceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}", request, nil)
if err != nil {
@@ -23437,7 +23402,7 @@ func (s *sources) GetSource(ctx context.Context, request operations.GetSourceReq
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23467,12 +23432,14 @@ func (s *sources) GetSource(ctx context.Context, request operations.GetSourceReq
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -23483,7 +23450,7 @@ func (s *sources) GetSource(ctx context.Context, request operations.GetSourceReq
}
// GetSourceAha - Get Source details
-func (s *sources) GetSourceAha(ctx context.Context, request operations.GetSourceAhaRequest) (*operations.GetSourceAhaResponse, error) {
+func (s *Sources) GetSourceAha(ctx context.Context, request operations.GetSourceAhaRequest) (*operations.GetSourceAhaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Aha", request, nil)
if err != nil {
@@ -23495,7 +23462,7 @@ func (s *sources) GetSourceAha(ctx context.Context, request operations.GetSource
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23525,12 +23492,14 @@ func (s *sources) GetSourceAha(ctx context.Context, request operations.GetSource
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -23541,7 +23510,7 @@ func (s *sources) GetSourceAha(ctx context.Context, request operations.GetSource
}
// GetSourceAircall - Get Source details
-func (s *sources) GetSourceAircall(ctx context.Context, request operations.GetSourceAircallRequest) (*operations.GetSourceAircallResponse, error) {
+func (s *Sources) GetSourceAircall(ctx context.Context, request operations.GetSourceAircallRequest) (*operations.GetSourceAircallResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Aircall", request, nil)
if err != nil {
@@ -23553,7 +23522,7 @@ func (s *sources) GetSourceAircall(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23583,12 +23552,14 @@ func (s *sources) GetSourceAircall(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -23599,7 +23570,7 @@ func (s *sources) GetSourceAircall(ctx context.Context, request operations.GetSo
}
// GetSourceAirtable - Get Source details
-func (s *sources) GetSourceAirtable(ctx context.Context, request operations.GetSourceAirtableRequest) (*operations.GetSourceAirtableResponse, error) {
+func (s *Sources) GetSourceAirtable(ctx context.Context, request operations.GetSourceAirtableRequest) (*operations.GetSourceAirtableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Airtable", request, nil)
if err != nil {
@@ -23611,7 +23582,7 @@ func (s *sources) GetSourceAirtable(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23641,12 +23612,14 @@ func (s *sources) GetSourceAirtable(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -23657,7 +23630,7 @@ func (s *sources) GetSourceAirtable(ctx context.Context, request operations.GetS
}
// GetSourceAlloydb - Get Source details
-func (s *sources) GetSourceAlloydb(ctx context.Context, request operations.GetSourceAlloydbRequest) (*operations.GetSourceAlloydbResponse, error) {
+func (s *Sources) GetSourceAlloydb(ctx context.Context, request operations.GetSourceAlloydbRequest) (*operations.GetSourceAlloydbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Alloydb", request, nil)
if err != nil {
@@ -23669,7 +23642,7 @@ func (s *sources) GetSourceAlloydb(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23699,12 +23672,14 @@ func (s *sources) GetSourceAlloydb(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -23715,7 +23690,7 @@ func (s *sources) GetSourceAlloydb(ctx context.Context, request operations.GetSo
}
// GetSourceAmazonAds - Get Source details
-func (s *sources) GetSourceAmazonAds(ctx context.Context, request operations.GetSourceAmazonAdsRequest) (*operations.GetSourceAmazonAdsResponse, error) {
+func (s *Sources) GetSourceAmazonAds(ctx context.Context, request operations.GetSourceAmazonAdsRequest) (*operations.GetSourceAmazonAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AmazonAds", request, nil)
if err != nil {
@@ -23727,7 +23702,7 @@ func (s *sources) GetSourceAmazonAds(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23757,12 +23732,14 @@ func (s *sources) GetSourceAmazonAds(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -23773,7 +23750,7 @@ func (s *sources) GetSourceAmazonAds(ctx context.Context, request operations.Get
}
// GetSourceAmazonSellerPartner - Get Source details
-func (s *sources) GetSourceAmazonSellerPartner(ctx context.Context, request operations.GetSourceAmazonSellerPartnerRequest) (*operations.GetSourceAmazonSellerPartnerResponse, error) {
+func (s *Sources) GetSourceAmazonSellerPartner(ctx context.Context, request operations.GetSourceAmazonSellerPartnerRequest) (*operations.GetSourceAmazonSellerPartnerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AmazonSellerPartner", request, nil)
if err != nil {
@@ -23785,7 +23762,7 @@ func (s *sources) GetSourceAmazonSellerPartner(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23815,12 +23792,14 @@ func (s *sources) GetSourceAmazonSellerPartner(ctx context.Context, request oper
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -23831,7 +23810,7 @@ func (s *sources) GetSourceAmazonSellerPartner(ctx context.Context, request oper
}
// GetSourceAmazonSqs - Get Source details
-func (s *sources) GetSourceAmazonSqs(ctx context.Context, request operations.GetSourceAmazonSqsRequest) (*operations.GetSourceAmazonSqsResponse, error) {
+func (s *Sources) GetSourceAmazonSqs(ctx context.Context, request operations.GetSourceAmazonSqsRequest) (*operations.GetSourceAmazonSqsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AmazonSqs", request, nil)
if err != nil {
@@ -23843,7 +23822,7 @@ func (s *sources) GetSourceAmazonSqs(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23873,12 +23852,14 @@ func (s *sources) GetSourceAmazonSqs(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -23889,7 +23870,7 @@ func (s *sources) GetSourceAmazonSqs(ctx context.Context, request operations.Get
}
// GetSourceAmplitude - Get Source details
-func (s *sources) GetSourceAmplitude(ctx context.Context, request operations.GetSourceAmplitudeRequest) (*operations.GetSourceAmplitudeResponse, error) {
+func (s *Sources) GetSourceAmplitude(ctx context.Context, request operations.GetSourceAmplitudeRequest) (*operations.GetSourceAmplitudeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Amplitude", request, nil)
if err != nil {
@@ -23901,7 +23882,7 @@ func (s *sources) GetSourceAmplitude(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23931,12 +23912,14 @@ func (s *sources) GetSourceAmplitude(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -23947,7 +23930,7 @@ func (s *sources) GetSourceAmplitude(ctx context.Context, request operations.Get
}
// GetSourceApifyDataset - Get Source details
-func (s *sources) GetSourceApifyDataset(ctx context.Context, request operations.GetSourceApifyDatasetRequest) (*operations.GetSourceApifyDatasetResponse, error) {
+func (s *Sources) GetSourceApifyDataset(ctx context.Context, request operations.GetSourceApifyDatasetRequest) (*operations.GetSourceApifyDatasetResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ApifyDataset", request, nil)
if err != nil {
@@ -23959,7 +23942,7 @@ func (s *sources) GetSourceApifyDataset(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -23989,12 +23972,14 @@ func (s *sources) GetSourceApifyDataset(ctx context.Context, request operations.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24005,7 +23990,7 @@ func (s *sources) GetSourceApifyDataset(ctx context.Context, request operations.
}
// GetSourceAppfollow - Get Source details
-func (s *sources) GetSourceAppfollow(ctx context.Context, request operations.GetSourceAppfollowRequest) (*operations.GetSourceAppfollowResponse, error) {
+func (s *Sources) GetSourceAppfollow(ctx context.Context, request operations.GetSourceAppfollowRequest) (*operations.GetSourceAppfollowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Appfollow", request, nil)
if err != nil {
@@ -24017,7 +24002,7 @@ func (s *sources) GetSourceAppfollow(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24047,12 +24032,14 @@ func (s *sources) GetSourceAppfollow(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24063,7 +24050,7 @@ func (s *sources) GetSourceAppfollow(ctx context.Context, request operations.Get
}
// GetSourceAsana - Get Source details
-func (s *sources) GetSourceAsana(ctx context.Context, request operations.GetSourceAsanaRequest) (*operations.GetSourceAsanaResponse, error) {
+func (s *Sources) GetSourceAsana(ctx context.Context, request operations.GetSourceAsanaRequest) (*operations.GetSourceAsanaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Asana", request, nil)
if err != nil {
@@ -24075,7 +24062,7 @@ func (s *sources) GetSourceAsana(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24105,12 +24092,14 @@ func (s *sources) GetSourceAsana(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24121,7 +24110,7 @@ func (s *sources) GetSourceAsana(ctx context.Context, request operations.GetSour
}
// GetSourceAuth0 - Get Source details
-func (s *sources) GetSourceAuth0(ctx context.Context, request operations.GetSourceAuth0Request) (*operations.GetSourceAuth0Response, error) {
+func (s *Sources) GetSourceAuth0(ctx context.Context, request operations.GetSourceAuth0Request) (*operations.GetSourceAuth0Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Auth0", request, nil)
if err != nil {
@@ -24133,7 +24122,7 @@ func (s *sources) GetSourceAuth0(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24163,12 +24152,14 @@ func (s *sources) GetSourceAuth0(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24179,7 +24170,7 @@ func (s *sources) GetSourceAuth0(ctx context.Context, request operations.GetSour
}
// GetSourceAwsCloudtrail - Get Source details
-func (s *sources) GetSourceAwsCloudtrail(ctx context.Context, request operations.GetSourceAwsCloudtrailRequest) (*operations.GetSourceAwsCloudtrailResponse, error) {
+func (s *Sources) GetSourceAwsCloudtrail(ctx context.Context, request operations.GetSourceAwsCloudtrailRequest) (*operations.GetSourceAwsCloudtrailResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AwsCloudtrail", request, nil)
if err != nil {
@@ -24191,7 +24182,7 @@ func (s *sources) GetSourceAwsCloudtrail(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24221,12 +24212,14 @@ func (s *sources) GetSourceAwsCloudtrail(ctx context.Context, request operations
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24237,7 +24230,7 @@ func (s *sources) GetSourceAwsCloudtrail(ctx context.Context, request operations
}
// GetSourceAzureBlobStorage - Get Source details
-func (s *sources) GetSourceAzureBlobStorage(ctx context.Context, request operations.GetSourceAzureBlobStorageRequest) (*operations.GetSourceAzureBlobStorageResponse, error) {
+func (s *Sources) GetSourceAzureBlobStorage(ctx context.Context, request operations.GetSourceAzureBlobStorageRequest) (*operations.GetSourceAzureBlobStorageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AzureBlobStorage", request, nil)
if err != nil {
@@ -24249,7 +24242,7 @@ func (s *sources) GetSourceAzureBlobStorage(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24279,12 +24272,14 @@ func (s *sources) GetSourceAzureBlobStorage(ctx context.Context, request operati
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24295,7 +24290,7 @@ func (s *sources) GetSourceAzureBlobStorage(ctx context.Context, request operati
}
// GetSourceAzureTable - Get Source details
-func (s *sources) GetSourceAzureTable(ctx context.Context, request operations.GetSourceAzureTableRequest) (*operations.GetSourceAzureTableResponse, error) {
+func (s *Sources) GetSourceAzureTable(ctx context.Context, request operations.GetSourceAzureTableRequest) (*operations.GetSourceAzureTableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AzureTable", request, nil)
if err != nil {
@@ -24307,7 +24302,7 @@ func (s *sources) GetSourceAzureTable(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24337,12 +24332,14 @@ func (s *sources) GetSourceAzureTable(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24353,7 +24350,7 @@ func (s *sources) GetSourceAzureTable(ctx context.Context, request operations.Ge
}
// GetSourceBambooHr - Get Source details
-func (s *sources) GetSourceBambooHr(ctx context.Context, request operations.GetSourceBambooHrRequest) (*operations.GetSourceBambooHrResponse, error) {
+func (s *Sources) GetSourceBambooHr(ctx context.Context, request operations.GetSourceBambooHrRequest) (*operations.GetSourceBambooHrResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#BambooHr", request, nil)
if err != nil {
@@ -24365,7 +24362,7 @@ func (s *sources) GetSourceBambooHr(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24395,12 +24392,14 @@ func (s *sources) GetSourceBambooHr(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24410,10 +24409,10 @@ func (s *sources) GetSourceBambooHr(ctx context.Context, request operations.GetS
return res, nil
}
-// GetSourceBigcommerce - Get Source details
-func (s *sources) GetSourceBigcommerce(ctx context.Context, request operations.GetSourceBigcommerceRequest) (*operations.GetSourceBigcommerceResponse, error) {
+// GetSourceBigquery - Get Source details
+func (s *Sources) GetSourceBigquery(ctx context.Context, request operations.GetSourceBigqueryRequest) (*operations.GetSourceBigqueryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Bigcommerce", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Bigquery", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -24423,7 +24422,7 @@ func (s *sources) GetSourceBigcommerce(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24444,7 +24443,7 @@ func (s *sources) GetSourceBigcommerce(ctx context.Context, request operations.G
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceBigcommerceResponse{
+ res := &operations.GetSourceBigqueryResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -24453,12 +24452,14 @@ func (s *sources) GetSourceBigcommerce(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24468,10 +24469,10 @@ func (s *sources) GetSourceBigcommerce(ctx context.Context, request operations.G
return res, nil
}
-// GetSourceBigquery - Get Source details
-func (s *sources) GetSourceBigquery(ctx context.Context, request operations.GetSourceBigqueryRequest) (*operations.GetSourceBigqueryResponse, error) {
+// GetSourceBingAds - Get Source details
+func (s *Sources) GetSourceBingAds(ctx context.Context, request operations.GetSourceBingAdsRequest) (*operations.GetSourceBingAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Bigquery", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#BingAds", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -24481,7 +24482,7 @@ func (s *sources) GetSourceBigquery(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24502,7 +24503,7 @@ func (s *sources) GetSourceBigquery(ctx context.Context, request operations.GetS
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceBigqueryResponse{
+ res := &operations.GetSourceBingAdsResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -24511,12 +24512,14 @@ func (s *sources) GetSourceBigquery(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24526,10 +24529,10 @@ func (s *sources) GetSourceBigquery(ctx context.Context, request operations.GetS
return res, nil
}
-// GetSourceBingAds - Get Source details
-func (s *sources) GetSourceBingAds(ctx context.Context, request operations.GetSourceBingAdsRequest) (*operations.GetSourceBingAdsResponse, error) {
+// GetSourceBraintree - Get Source details
+func (s *Sources) GetSourceBraintree(ctx context.Context, request operations.GetSourceBraintreeRequest) (*operations.GetSourceBraintreeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#BingAds", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braintree", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -24539,7 +24542,7 @@ func (s *sources) GetSourceBingAds(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24560,7 +24563,7 @@ func (s *sources) GetSourceBingAds(ctx context.Context, request operations.GetSo
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceBingAdsResponse{
+ res := &operations.GetSourceBraintreeResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -24569,12 +24572,14 @@ func (s *sources) GetSourceBingAds(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24584,10 +24589,10 @@ func (s *sources) GetSourceBingAds(ctx context.Context, request operations.GetSo
return res, nil
}
-// GetSourceBraintree - Get Source details
-func (s *sources) GetSourceBraintree(ctx context.Context, request operations.GetSourceBraintreeRequest) (*operations.GetSourceBraintreeResponse, error) {
+// GetSourceBraze - Get Source details
+func (s *Sources) GetSourceBraze(ctx context.Context, request operations.GetSourceBrazeRequest) (*operations.GetSourceBrazeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braintree", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braze", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -24597,7 +24602,7 @@ func (s *sources) GetSourceBraintree(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24618,7 +24623,7 @@ func (s *sources) GetSourceBraintree(ctx context.Context, request operations.Get
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceBraintreeResponse{
+ res := &operations.GetSourceBrazeResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -24627,12 +24632,14 @@ func (s *sources) GetSourceBraintree(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24642,10 +24649,10 @@ func (s *sources) GetSourceBraintree(ctx context.Context, request operations.Get
return res, nil
}
-// GetSourceBraze - Get Source details
-func (s *sources) GetSourceBraze(ctx context.Context, request operations.GetSourceBrazeRequest) (*operations.GetSourceBrazeResponse, error) {
+// GetSourceCart - Get Source details
+func (s *Sources) GetSourceCart(ctx context.Context, request operations.GetSourceCartRequest) (*operations.GetSourceCartResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braze", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Cart", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -24655,7 +24662,7 @@ func (s *sources) GetSourceBraze(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24676,7 +24683,7 @@ func (s *sources) GetSourceBraze(ctx context.Context, request operations.GetSour
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceBrazeResponse{
+ res := &operations.GetSourceCartResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -24685,12 +24692,14 @@ func (s *sources) GetSourceBraze(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24701,7 +24710,7 @@ func (s *sources) GetSourceBraze(ctx context.Context, request operations.GetSour
}
// GetSourceChargebee - Get Source details
-func (s *sources) GetSourceChargebee(ctx context.Context, request operations.GetSourceChargebeeRequest) (*operations.GetSourceChargebeeResponse, error) {
+func (s *Sources) GetSourceChargebee(ctx context.Context, request operations.GetSourceChargebeeRequest) (*operations.GetSourceChargebeeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Chargebee", request, nil)
if err != nil {
@@ -24713,7 +24722,7 @@ func (s *sources) GetSourceChargebee(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24743,12 +24752,14 @@ func (s *sources) GetSourceChargebee(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24759,7 +24770,7 @@ func (s *sources) GetSourceChargebee(ctx context.Context, request operations.Get
}
// GetSourceChartmogul - Get Source details
-func (s *sources) GetSourceChartmogul(ctx context.Context, request operations.GetSourceChartmogulRequest) (*operations.GetSourceChartmogulResponse, error) {
+func (s *Sources) GetSourceChartmogul(ctx context.Context, request operations.GetSourceChartmogulRequest) (*operations.GetSourceChartmogulResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Chartmogul", request, nil)
if err != nil {
@@ -24771,7 +24782,7 @@ func (s *sources) GetSourceChartmogul(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24801,12 +24812,14 @@ func (s *sources) GetSourceChartmogul(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24817,7 +24830,7 @@ func (s *sources) GetSourceChartmogul(ctx context.Context, request operations.Ge
}
// GetSourceClickhouse - Get Source details
-func (s *sources) GetSourceClickhouse(ctx context.Context, request operations.GetSourceClickhouseRequest) (*operations.GetSourceClickhouseResponse, error) {
+func (s *Sources) GetSourceClickhouse(ctx context.Context, request operations.GetSourceClickhouseRequest) (*operations.GetSourceClickhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Clickhouse", request, nil)
if err != nil {
@@ -24829,7 +24842,7 @@ func (s *sources) GetSourceClickhouse(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24859,12 +24872,14 @@ func (s *sources) GetSourceClickhouse(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24875,7 +24890,7 @@ func (s *sources) GetSourceClickhouse(ctx context.Context, request operations.Ge
}
// GetSourceClickupAPI - Get Source details
-func (s *sources) GetSourceClickupAPI(ctx context.Context, request operations.GetSourceClickupAPIRequest) (*operations.GetSourceClickupAPIResponse, error) {
+func (s *Sources) GetSourceClickupAPI(ctx context.Context, request operations.GetSourceClickupAPIRequest) (*operations.GetSourceClickupAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ClickupApi", request, nil)
if err != nil {
@@ -24887,7 +24902,7 @@ func (s *sources) GetSourceClickupAPI(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24917,12 +24932,14 @@ func (s *sources) GetSourceClickupAPI(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24933,7 +24950,7 @@ func (s *sources) GetSourceClickupAPI(ctx context.Context, request operations.Ge
}
// GetSourceClockify - Get Source details
-func (s *sources) GetSourceClockify(ctx context.Context, request operations.GetSourceClockifyRequest) (*operations.GetSourceClockifyResponse, error) {
+func (s *Sources) GetSourceClockify(ctx context.Context, request operations.GetSourceClockifyRequest) (*operations.GetSourceClockifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Clockify", request, nil)
if err != nil {
@@ -24945,7 +24962,7 @@ func (s *sources) GetSourceClockify(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -24975,12 +24992,14 @@ func (s *sources) GetSourceClockify(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -24991,7 +25010,7 @@ func (s *sources) GetSourceClockify(ctx context.Context, request operations.GetS
}
// GetSourceCloseCom - Get Source details
-func (s *sources) GetSourceCloseCom(ctx context.Context, request operations.GetSourceCloseComRequest) (*operations.GetSourceCloseComResponse, error) {
+func (s *Sources) GetSourceCloseCom(ctx context.Context, request operations.GetSourceCloseComRequest) (*operations.GetSourceCloseComResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CloseCom", request, nil)
if err != nil {
@@ -25003,7 +25022,7 @@ func (s *sources) GetSourceCloseCom(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25033,12 +25052,14 @@ func (s *sources) GetSourceCloseCom(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25049,7 +25070,7 @@ func (s *sources) GetSourceCloseCom(ctx context.Context, request operations.GetS
}
// GetSourceCoda - Get Source details
-func (s *sources) GetSourceCoda(ctx context.Context, request operations.GetSourceCodaRequest) (*operations.GetSourceCodaResponse, error) {
+func (s *Sources) GetSourceCoda(ctx context.Context, request operations.GetSourceCodaRequest) (*operations.GetSourceCodaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coda", request, nil)
if err != nil {
@@ -25061,7 +25082,7 @@ func (s *sources) GetSourceCoda(ctx context.Context, request operations.GetSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25091,12 +25112,14 @@ func (s *sources) GetSourceCoda(ctx context.Context, request operations.GetSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25107,7 +25130,7 @@ func (s *sources) GetSourceCoda(ctx context.Context, request operations.GetSourc
}
// GetSourceCoinAPI - Get Source details
-func (s *sources) GetSourceCoinAPI(ctx context.Context, request operations.GetSourceCoinAPIRequest) (*operations.GetSourceCoinAPIResponse, error) {
+func (s *Sources) GetSourceCoinAPI(ctx context.Context, request operations.GetSourceCoinAPIRequest) (*operations.GetSourceCoinAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CoinApi", request, nil)
if err != nil {
@@ -25119,7 +25142,7 @@ func (s *sources) GetSourceCoinAPI(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25149,12 +25172,14 @@ func (s *sources) GetSourceCoinAPI(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25165,7 +25190,7 @@ func (s *sources) GetSourceCoinAPI(ctx context.Context, request operations.GetSo
}
// GetSourceCoinmarketcap - Get Source details
-func (s *sources) GetSourceCoinmarketcap(ctx context.Context, request operations.GetSourceCoinmarketcapRequest) (*operations.GetSourceCoinmarketcapResponse, error) {
+func (s *Sources) GetSourceCoinmarketcap(ctx context.Context, request operations.GetSourceCoinmarketcapRequest) (*operations.GetSourceCoinmarketcapResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coinmarketcap", request, nil)
if err != nil {
@@ -25177,7 +25202,7 @@ func (s *sources) GetSourceCoinmarketcap(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25207,12 +25232,14 @@ func (s *sources) GetSourceCoinmarketcap(ctx context.Context, request operations
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25223,7 +25250,7 @@ func (s *sources) GetSourceCoinmarketcap(ctx context.Context, request operations
}
// GetSourceConfigcat - Get Source details
-func (s *sources) GetSourceConfigcat(ctx context.Context, request operations.GetSourceConfigcatRequest) (*operations.GetSourceConfigcatResponse, error) {
+func (s *Sources) GetSourceConfigcat(ctx context.Context, request operations.GetSourceConfigcatRequest) (*operations.GetSourceConfigcatResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Configcat", request, nil)
if err != nil {
@@ -25235,7 +25262,7 @@ func (s *sources) GetSourceConfigcat(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25265,12 +25292,14 @@ func (s *sources) GetSourceConfigcat(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25281,7 +25310,7 @@ func (s *sources) GetSourceConfigcat(ctx context.Context, request operations.Get
}
// GetSourceConfluence - Get Source details
-func (s *sources) GetSourceConfluence(ctx context.Context, request operations.GetSourceConfluenceRequest) (*operations.GetSourceConfluenceResponse, error) {
+func (s *Sources) GetSourceConfluence(ctx context.Context, request operations.GetSourceConfluenceRequest) (*operations.GetSourceConfluenceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Confluence", request, nil)
if err != nil {
@@ -25293,7 +25322,7 @@ func (s *sources) GetSourceConfluence(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25323,12 +25352,14 @@ func (s *sources) GetSourceConfluence(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25339,7 +25370,7 @@ func (s *sources) GetSourceConfluence(ctx context.Context, request operations.Ge
}
// GetSourceConvex - Get Source details
-func (s *sources) GetSourceConvex(ctx context.Context, request operations.GetSourceConvexRequest) (*operations.GetSourceConvexResponse, error) {
+func (s *Sources) GetSourceConvex(ctx context.Context, request operations.GetSourceConvexRequest) (*operations.GetSourceConvexResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Convex", request, nil)
if err != nil {
@@ -25351,7 +25382,7 @@ func (s *sources) GetSourceConvex(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25381,12 +25412,14 @@ func (s *sources) GetSourceConvex(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25397,7 +25430,7 @@ func (s *sources) GetSourceConvex(ctx context.Context, request operations.GetSou
}
// GetSourceDatascope - Get Source details
-func (s *sources) GetSourceDatascope(ctx context.Context, request operations.GetSourceDatascopeRequest) (*operations.GetSourceDatascopeResponse, error) {
+func (s *Sources) GetSourceDatascope(ctx context.Context, request operations.GetSourceDatascopeRequest) (*operations.GetSourceDatascopeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Datascope", request, nil)
if err != nil {
@@ -25409,7 +25442,7 @@ func (s *sources) GetSourceDatascope(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25439,12 +25472,14 @@ func (s *sources) GetSourceDatascope(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25455,7 +25490,7 @@ func (s *sources) GetSourceDatascope(ctx context.Context, request operations.Get
}
// GetSourceDelighted - Get Source details
-func (s *sources) GetSourceDelighted(ctx context.Context, request operations.GetSourceDelightedRequest) (*operations.GetSourceDelightedResponse, error) {
+func (s *Sources) GetSourceDelighted(ctx context.Context, request operations.GetSourceDelightedRequest) (*operations.GetSourceDelightedResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Delighted", request, nil)
if err != nil {
@@ -25467,7 +25502,7 @@ func (s *sources) GetSourceDelighted(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25497,12 +25532,14 @@ func (s *sources) GetSourceDelighted(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25513,7 +25550,7 @@ func (s *sources) GetSourceDelighted(ctx context.Context, request operations.Get
}
// GetSourceDixa - Get Source details
-func (s *sources) GetSourceDixa(ctx context.Context, request operations.GetSourceDixaRequest) (*operations.GetSourceDixaResponse, error) {
+func (s *Sources) GetSourceDixa(ctx context.Context, request operations.GetSourceDixaRequest) (*operations.GetSourceDixaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dixa", request, nil)
if err != nil {
@@ -25525,7 +25562,7 @@ func (s *sources) GetSourceDixa(ctx context.Context, request operations.GetSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25555,12 +25592,14 @@ func (s *sources) GetSourceDixa(ctx context.Context, request operations.GetSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25571,7 +25610,7 @@ func (s *sources) GetSourceDixa(ctx context.Context, request operations.GetSourc
}
// GetSourceDockerhub - Get Source details
-func (s *sources) GetSourceDockerhub(ctx context.Context, request operations.GetSourceDockerhubRequest) (*operations.GetSourceDockerhubResponse, error) {
+func (s *Sources) GetSourceDockerhub(ctx context.Context, request operations.GetSourceDockerhubRequest) (*operations.GetSourceDockerhubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dockerhub", request, nil)
if err != nil {
@@ -25583,7 +25622,7 @@ func (s *sources) GetSourceDockerhub(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25613,12 +25652,14 @@ func (s *sources) GetSourceDockerhub(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25629,7 +25670,7 @@ func (s *sources) GetSourceDockerhub(ctx context.Context, request operations.Get
}
// GetSourceDremio - Get Source details
-func (s *sources) GetSourceDremio(ctx context.Context, request operations.GetSourceDremioRequest) (*operations.GetSourceDremioResponse, error) {
+func (s *Sources) GetSourceDremio(ctx context.Context, request operations.GetSourceDremioRequest) (*operations.GetSourceDremioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dremio", request, nil)
if err != nil {
@@ -25641,7 +25682,7 @@ func (s *sources) GetSourceDremio(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25671,12 +25712,14 @@ func (s *sources) GetSourceDremio(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25687,7 +25730,7 @@ func (s *sources) GetSourceDremio(ctx context.Context, request operations.GetSou
}
// GetSourceDynamodb - Get Source details
-func (s *sources) GetSourceDynamodb(ctx context.Context, request operations.GetSourceDynamodbRequest) (*operations.GetSourceDynamodbResponse, error) {
+func (s *Sources) GetSourceDynamodb(ctx context.Context, request operations.GetSourceDynamodbRequest) (*operations.GetSourceDynamodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dynamodb", request, nil)
if err != nil {
@@ -25699,7 +25742,7 @@ func (s *sources) GetSourceDynamodb(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25729,70 +25772,14 @@ func (s *sources) GetSourceDynamodb(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
- }
- case httpRes.StatusCode == 403:
- fallthrough
- case httpRes.StatusCode == 404:
- }
-
- return res, nil
-}
-
-// GetSourceE2eTestCloud - Get Source details
-func (s *sources) GetSourceE2eTestCloud(ctx context.Context, request operations.GetSourceE2eTestCloudRequest) (*operations.GetSourceE2eTestCloudResponse, error) {
- baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#E2eTestCloud", request, nil)
- if err != nil {
- return nil, fmt.Errorf("error generating URL: %w", err)
- }
-
- req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
- if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
- }
- req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
-
- client := s.sdkConfiguration.SecurityClient
-
- httpRes, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("error sending request: %w", err)
- }
- if httpRes == nil {
- return nil, fmt.Errorf("error sending request: no response")
- }
-
- rawBody, err := io.ReadAll(httpRes.Body)
- if err != nil {
- return nil, fmt.Errorf("error reading response body: %w", err)
- }
- httpRes.Body.Close()
- httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
-
- contentType := httpRes.Header.Get("Content-Type")
-
- res := &operations.GetSourceE2eTestCloudResponse{
- StatusCode: httpRes.StatusCode,
- ContentType: contentType,
- RawResponse: httpRes,
- }
- switch {
- case httpRes.StatusCode == 200:
- switch {
- case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
- }
-
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25803,7 +25790,7 @@ func (s *sources) GetSourceE2eTestCloud(ctx context.Context, request operations.
}
// GetSourceEmailoctopus - Get Source details
-func (s *sources) GetSourceEmailoctopus(ctx context.Context, request operations.GetSourceEmailoctopusRequest) (*operations.GetSourceEmailoctopusResponse, error) {
+func (s *Sources) GetSourceEmailoctopus(ctx context.Context, request operations.GetSourceEmailoctopusRequest) (*operations.GetSourceEmailoctopusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Emailoctopus", request, nil)
if err != nil {
@@ -25815,7 +25802,7 @@ func (s *sources) GetSourceEmailoctopus(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25845,12 +25832,14 @@ func (s *sources) GetSourceEmailoctopus(ctx context.Context, request operations.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25861,7 +25850,7 @@ func (s *sources) GetSourceEmailoctopus(ctx context.Context, request operations.
}
// GetSourceExchangeRates - Get Source details
-func (s *sources) GetSourceExchangeRates(ctx context.Context, request operations.GetSourceExchangeRatesRequest) (*operations.GetSourceExchangeRatesResponse, error) {
+func (s *Sources) GetSourceExchangeRates(ctx context.Context, request operations.GetSourceExchangeRatesRequest) (*operations.GetSourceExchangeRatesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ExchangeRates", request, nil)
if err != nil {
@@ -25873,7 +25862,7 @@ func (s *sources) GetSourceExchangeRates(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25903,12 +25892,14 @@ func (s *sources) GetSourceExchangeRates(ctx context.Context, request operations
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25919,7 +25910,7 @@ func (s *sources) GetSourceExchangeRates(ctx context.Context, request operations
}
// GetSourceFacebookMarketing - Get Source details
-func (s *sources) GetSourceFacebookMarketing(ctx context.Context, request operations.GetSourceFacebookMarketingRequest) (*operations.GetSourceFacebookMarketingResponse, error) {
+func (s *Sources) GetSourceFacebookMarketing(ctx context.Context, request operations.GetSourceFacebookMarketingRequest) (*operations.GetSourceFacebookMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#FacebookMarketing", request, nil)
if err != nil {
@@ -25931,7 +25922,7 @@ func (s *sources) GetSourceFacebookMarketing(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -25961,12 +25952,14 @@ func (s *sources) GetSourceFacebookMarketing(ctx context.Context, request operat
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -25977,7 +25970,7 @@ func (s *sources) GetSourceFacebookMarketing(ctx context.Context, request operat
}
// GetSourceFacebookPages - Get Source details
-func (s *sources) GetSourceFacebookPages(ctx context.Context, request operations.GetSourceFacebookPagesRequest) (*operations.GetSourceFacebookPagesResponse, error) {
+func (s *Sources) GetSourceFacebookPages(ctx context.Context, request operations.GetSourceFacebookPagesRequest) (*operations.GetSourceFacebookPagesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#FacebookPages", request, nil)
if err != nil {
@@ -25989,7 +25982,7 @@ func (s *sources) GetSourceFacebookPages(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26019,12 +26012,14 @@ func (s *sources) GetSourceFacebookPages(ctx context.Context, request operations
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26035,7 +26030,7 @@ func (s *sources) GetSourceFacebookPages(ctx context.Context, request operations
}
// GetSourceFaker - Get Source details
-func (s *sources) GetSourceFaker(ctx context.Context, request operations.GetSourceFakerRequest) (*operations.GetSourceFakerResponse, error) {
+func (s *Sources) GetSourceFaker(ctx context.Context, request operations.GetSourceFakerRequest) (*operations.GetSourceFakerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Faker", request, nil)
if err != nil {
@@ -26047,7 +26042,7 @@ func (s *sources) GetSourceFaker(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26077,12 +26072,14 @@ func (s *sources) GetSourceFaker(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26093,7 +26090,7 @@ func (s *sources) GetSourceFaker(ctx context.Context, request operations.GetSour
}
// GetSourceFauna - Get Source details
-func (s *sources) GetSourceFauna(ctx context.Context, request operations.GetSourceFaunaRequest) (*operations.GetSourceFaunaResponse, error) {
+func (s *Sources) GetSourceFauna(ctx context.Context, request operations.GetSourceFaunaRequest) (*operations.GetSourceFaunaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Fauna", request, nil)
if err != nil {
@@ -26105,7 +26102,7 @@ func (s *sources) GetSourceFauna(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26135,12 +26132,14 @@ func (s *sources) GetSourceFauna(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26150,10 +26149,10 @@ func (s *sources) GetSourceFauna(ctx context.Context, request operations.GetSour
return res, nil
}
-// GetSourceFileSecure - Get Source details
-func (s *sources) GetSourceFileSecure(ctx context.Context, request operations.GetSourceFileSecureRequest) (*operations.GetSourceFileSecureResponse, error) {
+// GetSourceFile - Get Source details
+func (s *Sources) GetSourceFile(ctx context.Context, request operations.GetSourceFileRequest) (*operations.GetSourceFileResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#FileSecure", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#File", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -26163,7 +26162,7 @@ func (s *sources) GetSourceFileSecure(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26184,7 +26183,7 @@ func (s *sources) GetSourceFileSecure(ctx context.Context, request operations.Ge
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceFileSecureResponse{
+ res := &operations.GetSourceFileResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -26193,12 +26192,14 @@ func (s *sources) GetSourceFileSecure(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26209,7 +26210,7 @@ func (s *sources) GetSourceFileSecure(ctx context.Context, request operations.Ge
}
// GetSourceFirebolt - Get Source details
-func (s *sources) GetSourceFirebolt(ctx context.Context, request operations.GetSourceFireboltRequest) (*operations.GetSourceFireboltResponse, error) {
+func (s *Sources) GetSourceFirebolt(ctx context.Context, request operations.GetSourceFireboltRequest) (*operations.GetSourceFireboltResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Firebolt", request, nil)
if err != nil {
@@ -26221,7 +26222,7 @@ func (s *sources) GetSourceFirebolt(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26251,12 +26252,14 @@ func (s *sources) GetSourceFirebolt(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26267,7 +26270,7 @@ func (s *sources) GetSourceFirebolt(ctx context.Context, request operations.GetS
}
// GetSourceFreshcaller - Get Source details
-func (s *sources) GetSourceFreshcaller(ctx context.Context, request operations.GetSourceFreshcallerRequest) (*operations.GetSourceFreshcallerResponse, error) {
+func (s *Sources) GetSourceFreshcaller(ctx context.Context, request operations.GetSourceFreshcallerRequest) (*operations.GetSourceFreshcallerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Freshcaller", request, nil)
if err != nil {
@@ -26279,7 +26282,7 @@ func (s *sources) GetSourceFreshcaller(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26309,12 +26312,14 @@ func (s *sources) GetSourceFreshcaller(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26325,7 +26330,7 @@ func (s *sources) GetSourceFreshcaller(ctx context.Context, request operations.G
}
// GetSourceFreshdesk - Get Source details
-func (s *sources) GetSourceFreshdesk(ctx context.Context, request operations.GetSourceFreshdeskRequest) (*operations.GetSourceFreshdeskResponse, error) {
+func (s *Sources) GetSourceFreshdesk(ctx context.Context, request operations.GetSourceFreshdeskRequest) (*operations.GetSourceFreshdeskResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Freshdesk", request, nil)
if err != nil {
@@ -26337,7 +26342,7 @@ func (s *sources) GetSourceFreshdesk(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26367,12 +26372,14 @@ func (s *sources) GetSourceFreshdesk(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26383,7 +26390,7 @@ func (s *sources) GetSourceFreshdesk(ctx context.Context, request operations.Get
}
// GetSourceFreshsales - Get Source details
-func (s *sources) GetSourceFreshsales(ctx context.Context, request operations.GetSourceFreshsalesRequest) (*operations.GetSourceFreshsalesResponse, error) {
+func (s *Sources) GetSourceFreshsales(ctx context.Context, request operations.GetSourceFreshsalesRequest) (*operations.GetSourceFreshsalesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Freshsales", request, nil)
if err != nil {
@@ -26395,7 +26402,7 @@ func (s *sources) GetSourceFreshsales(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26425,12 +26432,14 @@ func (s *sources) GetSourceFreshsales(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26441,7 +26450,7 @@ func (s *sources) GetSourceFreshsales(ctx context.Context, request operations.Ge
}
// GetSourceGainsightPx - Get Source details
-func (s *sources) GetSourceGainsightPx(ctx context.Context, request operations.GetSourceGainsightPxRequest) (*operations.GetSourceGainsightPxResponse, error) {
+func (s *Sources) GetSourceGainsightPx(ctx context.Context, request operations.GetSourceGainsightPxRequest) (*operations.GetSourceGainsightPxResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GainsightPx", request, nil)
if err != nil {
@@ -26453,7 +26462,7 @@ func (s *sources) GetSourceGainsightPx(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26483,12 +26492,14 @@ func (s *sources) GetSourceGainsightPx(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26499,7 +26510,7 @@ func (s *sources) GetSourceGainsightPx(ctx context.Context, request operations.G
}
// GetSourceGcs - Get Source details
-func (s *sources) GetSourceGcs(ctx context.Context, request operations.GetSourceGcsRequest) (*operations.GetSourceGcsResponse, error) {
+func (s *Sources) GetSourceGcs(ctx context.Context, request operations.GetSourceGcsRequest) (*operations.GetSourceGcsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gcs", request, nil)
if err != nil {
@@ -26511,7 +26522,7 @@ func (s *sources) GetSourceGcs(ctx context.Context, request operations.GetSource
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26541,12 +26552,14 @@ func (s *sources) GetSourceGcs(ctx context.Context, request operations.GetSource
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26557,7 +26570,7 @@ func (s *sources) GetSourceGcs(ctx context.Context, request operations.GetSource
}
// GetSourceGetlago - Get Source details
-func (s *sources) GetSourceGetlago(ctx context.Context, request operations.GetSourceGetlagoRequest) (*operations.GetSourceGetlagoResponse, error) {
+func (s *Sources) GetSourceGetlago(ctx context.Context, request operations.GetSourceGetlagoRequest) (*operations.GetSourceGetlagoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Getlago", request, nil)
if err != nil {
@@ -26569,7 +26582,7 @@ func (s *sources) GetSourceGetlago(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26599,12 +26612,14 @@ func (s *sources) GetSourceGetlago(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26615,7 +26630,7 @@ func (s *sources) GetSourceGetlago(ctx context.Context, request operations.GetSo
}
// GetSourceGithub - Get Source details
-func (s *sources) GetSourceGithub(ctx context.Context, request operations.GetSourceGithubRequest) (*operations.GetSourceGithubResponse, error) {
+func (s *Sources) GetSourceGithub(ctx context.Context, request operations.GetSourceGithubRequest) (*operations.GetSourceGithubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Github", request, nil)
if err != nil {
@@ -26627,7 +26642,7 @@ func (s *sources) GetSourceGithub(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26657,12 +26672,14 @@ func (s *sources) GetSourceGithub(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26673,7 +26690,7 @@ func (s *sources) GetSourceGithub(ctx context.Context, request operations.GetSou
}
// GetSourceGitlab - Get Source details
-func (s *sources) GetSourceGitlab(ctx context.Context, request operations.GetSourceGitlabRequest) (*operations.GetSourceGitlabResponse, error) {
+func (s *Sources) GetSourceGitlab(ctx context.Context, request operations.GetSourceGitlabRequest) (*operations.GetSourceGitlabResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gitlab", request, nil)
if err != nil {
@@ -26685,7 +26702,7 @@ func (s *sources) GetSourceGitlab(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26715,12 +26732,14 @@ func (s *sources) GetSourceGitlab(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26731,7 +26750,7 @@ func (s *sources) GetSourceGitlab(ctx context.Context, request operations.GetSou
}
// GetSourceGlassfrog - Get Source details
-func (s *sources) GetSourceGlassfrog(ctx context.Context, request operations.GetSourceGlassfrogRequest) (*operations.GetSourceGlassfrogResponse, error) {
+func (s *Sources) GetSourceGlassfrog(ctx context.Context, request operations.GetSourceGlassfrogRequest) (*operations.GetSourceGlassfrogResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Glassfrog", request, nil)
if err != nil {
@@ -26743,7 +26762,7 @@ func (s *sources) GetSourceGlassfrog(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26773,12 +26792,14 @@ func (s *sources) GetSourceGlassfrog(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26789,7 +26810,7 @@ func (s *sources) GetSourceGlassfrog(ctx context.Context, request operations.Get
}
// GetSourceGnews - Get Source details
-func (s *sources) GetSourceGnews(ctx context.Context, request operations.GetSourceGnewsRequest) (*operations.GetSourceGnewsResponse, error) {
+func (s *Sources) GetSourceGnews(ctx context.Context, request operations.GetSourceGnewsRequest) (*operations.GetSourceGnewsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gnews", request, nil)
if err != nil {
@@ -26801,7 +26822,7 @@ func (s *sources) GetSourceGnews(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26831,12 +26852,14 @@ func (s *sources) GetSourceGnews(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26847,7 +26870,7 @@ func (s *sources) GetSourceGnews(ctx context.Context, request operations.GetSour
}
// GetSourceGoogleAds - Get Source details
-func (s *sources) GetSourceGoogleAds(ctx context.Context, request operations.GetSourceGoogleAdsRequest) (*operations.GetSourceGoogleAdsResponse, error) {
+func (s *Sources) GetSourceGoogleAds(ctx context.Context, request operations.GetSourceGoogleAdsRequest) (*operations.GetSourceGoogleAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleAds", request, nil)
if err != nil {
@@ -26859,7 +26882,7 @@ func (s *sources) GetSourceGoogleAds(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26889,12 +26912,14 @@ func (s *sources) GetSourceGoogleAds(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26905,7 +26930,7 @@ func (s *sources) GetSourceGoogleAds(ctx context.Context, request operations.Get
}
// GetSourceGoogleAnalyticsDataAPI - Get Source details
-func (s *sources) GetSourceGoogleAnalyticsDataAPI(ctx context.Context, request operations.GetSourceGoogleAnalyticsDataAPIRequest) (*operations.GetSourceGoogleAnalyticsDataAPIResponse, error) {
+func (s *Sources) GetSourceGoogleAnalyticsDataAPI(ctx context.Context, request operations.GetSourceGoogleAnalyticsDataAPIRequest) (*operations.GetSourceGoogleAnalyticsDataAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleAnalyticsDataApi", request, nil)
if err != nil {
@@ -26917,7 +26942,7 @@ func (s *sources) GetSourceGoogleAnalyticsDataAPI(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26947,12 +26972,14 @@ func (s *sources) GetSourceGoogleAnalyticsDataAPI(ctx context.Context, request o
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -26962,10 +26989,10 @@ func (s *sources) GetSourceGoogleAnalyticsDataAPI(ctx context.Context, request o
return res, nil
}
-// GetSourceGoogleAnalyticsV4 - Get Source details
-func (s *sources) GetSourceGoogleAnalyticsV4(ctx context.Context, request operations.GetSourceGoogleAnalyticsV4Request) (*operations.GetSourceGoogleAnalyticsV4Response, error) {
+// GetSourceGoogleDirectory - Get Source details
+func (s *Sources) GetSourceGoogleDirectory(ctx context.Context, request operations.GetSourceGoogleDirectoryRequest) (*operations.GetSourceGoogleDirectoryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleAnalyticsV4", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleDirectory", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -26975,7 +27002,7 @@ func (s *sources) GetSourceGoogleAnalyticsV4(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -26996,7 +27023,7 @@ func (s *sources) GetSourceGoogleAnalyticsV4(ctx context.Context, request operat
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceGoogleAnalyticsV4Response{
+ res := &operations.GetSourceGoogleDirectoryResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -27005,12 +27032,14 @@ func (s *sources) GetSourceGoogleAnalyticsV4(ctx context.Context, request operat
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27020,10 +27049,10 @@ func (s *sources) GetSourceGoogleAnalyticsV4(ctx context.Context, request operat
return res, nil
}
-// GetSourceGoogleDirectory - Get Source details
-func (s *sources) GetSourceGoogleDirectory(ctx context.Context, request operations.GetSourceGoogleDirectoryRequest) (*operations.GetSourceGoogleDirectoryResponse, error) {
+// GetSourceGoogleDrive - Get Source details
+func (s *Sources) GetSourceGoogleDrive(ctx context.Context, request operations.GetSourceGoogleDriveRequest) (*operations.GetSourceGoogleDriveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleDirectory", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleDrive", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -27033,7 +27062,7 @@ func (s *sources) GetSourceGoogleDirectory(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27054,7 +27083,7 @@ func (s *sources) GetSourceGoogleDirectory(ctx context.Context, request operatio
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceGoogleDirectoryResponse{
+ res := &operations.GetSourceGoogleDriveResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -27063,12 +27092,14 @@ func (s *sources) GetSourceGoogleDirectory(ctx context.Context, request operatio
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27079,7 +27110,7 @@ func (s *sources) GetSourceGoogleDirectory(ctx context.Context, request operatio
}
// GetSourceGooglePagespeedInsights - Get Source details
-func (s *sources) GetSourceGooglePagespeedInsights(ctx context.Context, request operations.GetSourceGooglePagespeedInsightsRequest) (*operations.GetSourceGooglePagespeedInsightsResponse, error) {
+func (s *Sources) GetSourceGooglePagespeedInsights(ctx context.Context, request operations.GetSourceGooglePagespeedInsightsRequest) (*operations.GetSourceGooglePagespeedInsightsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GooglePagespeedInsights", request, nil)
if err != nil {
@@ -27091,7 +27122,7 @@ func (s *sources) GetSourceGooglePagespeedInsights(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27121,12 +27152,14 @@ func (s *sources) GetSourceGooglePagespeedInsights(ctx context.Context, request
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27137,7 +27170,7 @@ func (s *sources) GetSourceGooglePagespeedInsights(ctx context.Context, request
}
// GetSourceGoogleSearchConsole - Get Source details
-func (s *sources) GetSourceGoogleSearchConsole(ctx context.Context, request operations.GetSourceGoogleSearchConsoleRequest) (*operations.GetSourceGoogleSearchConsoleResponse, error) {
+func (s *Sources) GetSourceGoogleSearchConsole(ctx context.Context, request operations.GetSourceGoogleSearchConsoleRequest) (*operations.GetSourceGoogleSearchConsoleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleSearchConsole", request, nil)
if err != nil {
@@ -27149,7 +27182,7 @@ func (s *sources) GetSourceGoogleSearchConsole(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27179,12 +27212,14 @@ func (s *sources) GetSourceGoogleSearchConsole(ctx context.Context, request oper
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27195,7 +27230,7 @@ func (s *sources) GetSourceGoogleSearchConsole(ctx context.Context, request oper
}
// GetSourceGoogleSheets - Get Source details
-func (s *sources) GetSourceGoogleSheets(ctx context.Context, request operations.GetSourceGoogleSheetsRequest) (*operations.GetSourceGoogleSheetsResponse, error) {
+func (s *Sources) GetSourceGoogleSheets(ctx context.Context, request operations.GetSourceGoogleSheetsRequest) (*operations.GetSourceGoogleSheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleSheets", request, nil)
if err != nil {
@@ -27207,7 +27242,7 @@ func (s *sources) GetSourceGoogleSheets(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27237,12 +27272,14 @@ func (s *sources) GetSourceGoogleSheets(ctx context.Context, request operations.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27253,7 +27290,7 @@ func (s *sources) GetSourceGoogleSheets(ctx context.Context, request operations.
}
// GetSourceGoogleWebfonts - Get Source details
-func (s *sources) GetSourceGoogleWebfonts(ctx context.Context, request operations.GetSourceGoogleWebfontsRequest) (*operations.GetSourceGoogleWebfontsResponse, error) {
+func (s *Sources) GetSourceGoogleWebfonts(ctx context.Context, request operations.GetSourceGoogleWebfontsRequest) (*operations.GetSourceGoogleWebfontsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleWebfonts", request, nil)
if err != nil {
@@ -27265,7 +27302,7 @@ func (s *sources) GetSourceGoogleWebfonts(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27295,12 +27332,14 @@ func (s *sources) GetSourceGoogleWebfonts(ctx context.Context, request operation
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27311,7 +27350,7 @@ func (s *sources) GetSourceGoogleWebfonts(ctx context.Context, request operation
}
// GetSourceGoogleWorkspaceAdminReports - Get Source details
-func (s *sources) GetSourceGoogleWorkspaceAdminReports(ctx context.Context, request operations.GetSourceGoogleWorkspaceAdminReportsRequest) (*operations.GetSourceGoogleWorkspaceAdminReportsResponse, error) {
+func (s *Sources) GetSourceGoogleWorkspaceAdminReports(ctx context.Context, request operations.GetSourceGoogleWorkspaceAdminReportsRequest) (*operations.GetSourceGoogleWorkspaceAdminReportsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleWorkspaceAdminReports", request, nil)
if err != nil {
@@ -27323,7 +27362,7 @@ func (s *sources) GetSourceGoogleWorkspaceAdminReports(ctx context.Context, requ
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27353,12 +27392,14 @@ func (s *sources) GetSourceGoogleWorkspaceAdminReports(ctx context.Context, requ
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27369,7 +27410,7 @@ func (s *sources) GetSourceGoogleWorkspaceAdminReports(ctx context.Context, requ
}
// GetSourceGreenhouse - Get Source details
-func (s *sources) GetSourceGreenhouse(ctx context.Context, request operations.GetSourceGreenhouseRequest) (*operations.GetSourceGreenhouseResponse, error) {
+func (s *Sources) GetSourceGreenhouse(ctx context.Context, request operations.GetSourceGreenhouseRequest) (*operations.GetSourceGreenhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Greenhouse", request, nil)
if err != nil {
@@ -27381,7 +27422,7 @@ func (s *sources) GetSourceGreenhouse(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27411,12 +27452,14 @@ func (s *sources) GetSourceGreenhouse(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27427,7 +27470,7 @@ func (s *sources) GetSourceGreenhouse(ctx context.Context, request operations.Ge
}
// GetSourceGridly - Get Source details
-func (s *sources) GetSourceGridly(ctx context.Context, request operations.GetSourceGridlyRequest) (*operations.GetSourceGridlyResponse, error) {
+func (s *Sources) GetSourceGridly(ctx context.Context, request operations.GetSourceGridlyRequest) (*operations.GetSourceGridlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gridly", request, nil)
if err != nil {
@@ -27439,7 +27482,7 @@ func (s *sources) GetSourceGridly(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27469,12 +27512,14 @@ func (s *sources) GetSourceGridly(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27485,7 +27530,7 @@ func (s *sources) GetSourceGridly(ctx context.Context, request operations.GetSou
}
// GetSourceHarvest - Get Source details
-func (s *sources) GetSourceHarvest(ctx context.Context, request operations.GetSourceHarvestRequest) (*operations.GetSourceHarvestResponse, error) {
+func (s *Sources) GetSourceHarvest(ctx context.Context, request operations.GetSourceHarvestRequest) (*operations.GetSourceHarvestResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Harvest", request, nil)
if err != nil {
@@ -27497,7 +27542,7 @@ func (s *sources) GetSourceHarvest(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27527,12 +27572,14 @@ func (s *sources) GetSourceHarvest(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27543,7 +27590,7 @@ func (s *sources) GetSourceHarvest(ctx context.Context, request operations.GetSo
}
// GetSourceHubplanner - Get Source details
-func (s *sources) GetSourceHubplanner(ctx context.Context, request operations.GetSourceHubplannerRequest) (*operations.GetSourceHubplannerResponse, error) {
+func (s *Sources) GetSourceHubplanner(ctx context.Context, request operations.GetSourceHubplannerRequest) (*operations.GetSourceHubplannerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Hubplanner", request, nil)
if err != nil {
@@ -27555,7 +27602,7 @@ func (s *sources) GetSourceHubplanner(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27585,12 +27632,14 @@ func (s *sources) GetSourceHubplanner(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27601,7 +27650,7 @@ func (s *sources) GetSourceHubplanner(ctx context.Context, request operations.Ge
}
// GetSourceHubspot - Get Source details
-func (s *sources) GetSourceHubspot(ctx context.Context, request operations.GetSourceHubspotRequest) (*operations.GetSourceHubspotResponse, error) {
+func (s *Sources) GetSourceHubspot(ctx context.Context, request operations.GetSourceHubspotRequest) (*operations.GetSourceHubspotResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Hubspot", request, nil)
if err != nil {
@@ -27613,7 +27662,7 @@ func (s *sources) GetSourceHubspot(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27643,12 +27692,14 @@ func (s *sources) GetSourceHubspot(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27659,7 +27710,7 @@ func (s *sources) GetSourceHubspot(ctx context.Context, request operations.GetSo
}
// GetSourceInsightly - Get Source details
-func (s *sources) GetSourceInsightly(ctx context.Context, request operations.GetSourceInsightlyRequest) (*operations.GetSourceInsightlyResponse, error) {
+func (s *Sources) GetSourceInsightly(ctx context.Context, request operations.GetSourceInsightlyRequest) (*operations.GetSourceInsightlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Insightly", request, nil)
if err != nil {
@@ -27671,7 +27722,7 @@ func (s *sources) GetSourceInsightly(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27701,12 +27752,14 @@ func (s *sources) GetSourceInsightly(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27717,7 +27770,7 @@ func (s *sources) GetSourceInsightly(ctx context.Context, request operations.Get
}
// GetSourceInstagram - Get Source details
-func (s *sources) GetSourceInstagram(ctx context.Context, request operations.GetSourceInstagramRequest) (*operations.GetSourceInstagramResponse, error) {
+func (s *Sources) GetSourceInstagram(ctx context.Context, request operations.GetSourceInstagramRequest) (*operations.GetSourceInstagramResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Instagram", request, nil)
if err != nil {
@@ -27729,7 +27782,7 @@ func (s *sources) GetSourceInstagram(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27759,12 +27812,14 @@ func (s *sources) GetSourceInstagram(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27775,7 +27830,7 @@ func (s *sources) GetSourceInstagram(ctx context.Context, request operations.Get
}
// GetSourceInstatus - Get Source details
-func (s *sources) GetSourceInstatus(ctx context.Context, request operations.GetSourceInstatusRequest) (*operations.GetSourceInstatusResponse, error) {
+func (s *Sources) GetSourceInstatus(ctx context.Context, request operations.GetSourceInstatusRequest) (*operations.GetSourceInstatusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Instatus", request, nil)
if err != nil {
@@ -27787,7 +27842,7 @@ func (s *sources) GetSourceInstatus(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27817,12 +27872,14 @@ func (s *sources) GetSourceInstatus(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27833,7 +27890,7 @@ func (s *sources) GetSourceInstatus(ctx context.Context, request operations.GetS
}
// GetSourceIntercom - Get Source details
-func (s *sources) GetSourceIntercom(ctx context.Context, request operations.GetSourceIntercomRequest) (*operations.GetSourceIntercomResponse, error) {
+func (s *Sources) GetSourceIntercom(ctx context.Context, request operations.GetSourceIntercomRequest) (*operations.GetSourceIntercomResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Intercom", request, nil)
if err != nil {
@@ -27845,7 +27902,7 @@ func (s *sources) GetSourceIntercom(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27875,12 +27932,14 @@ func (s *sources) GetSourceIntercom(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27891,7 +27950,7 @@ func (s *sources) GetSourceIntercom(ctx context.Context, request operations.GetS
}
// GetSourceIp2whois - Get Source details
-func (s *sources) GetSourceIp2whois(ctx context.Context, request operations.GetSourceIp2whoisRequest) (*operations.GetSourceIp2whoisResponse, error) {
+func (s *Sources) GetSourceIp2whois(ctx context.Context, request operations.GetSourceIp2whoisRequest) (*operations.GetSourceIp2whoisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Ip2whois", request, nil)
if err != nil {
@@ -27903,7 +27962,7 @@ func (s *sources) GetSourceIp2whois(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27933,12 +27992,14 @@ func (s *sources) GetSourceIp2whois(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -27949,7 +28010,7 @@ func (s *sources) GetSourceIp2whois(ctx context.Context, request operations.GetS
}
// GetSourceIterable - Get Source details
-func (s *sources) GetSourceIterable(ctx context.Context, request operations.GetSourceIterableRequest) (*operations.GetSourceIterableResponse, error) {
+func (s *Sources) GetSourceIterable(ctx context.Context, request operations.GetSourceIterableRequest) (*operations.GetSourceIterableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Iterable", request, nil)
if err != nil {
@@ -27961,7 +28022,7 @@ func (s *sources) GetSourceIterable(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -27991,12 +28052,14 @@ func (s *sources) GetSourceIterable(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28007,7 +28070,7 @@ func (s *sources) GetSourceIterable(ctx context.Context, request operations.GetS
}
// GetSourceJira - Get Source details
-func (s *sources) GetSourceJira(ctx context.Context, request operations.GetSourceJiraRequest) (*operations.GetSourceJiraResponse, error) {
+func (s *Sources) GetSourceJira(ctx context.Context, request operations.GetSourceJiraRequest) (*operations.GetSourceJiraResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Jira", request, nil)
if err != nil {
@@ -28019,7 +28082,7 @@ func (s *sources) GetSourceJira(ctx context.Context, request operations.GetSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28049,12 +28112,14 @@ func (s *sources) GetSourceJira(ctx context.Context, request operations.GetSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28065,7 +28130,7 @@ func (s *sources) GetSourceJira(ctx context.Context, request operations.GetSourc
}
// GetSourceK6Cloud - Get Source details
-func (s *sources) GetSourceK6Cloud(ctx context.Context, request operations.GetSourceK6CloudRequest) (*operations.GetSourceK6CloudResponse, error) {
+func (s *Sources) GetSourceK6Cloud(ctx context.Context, request operations.GetSourceK6CloudRequest) (*operations.GetSourceK6CloudResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#K6Cloud", request, nil)
if err != nil {
@@ -28077,7 +28142,7 @@ func (s *sources) GetSourceK6Cloud(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28107,12 +28172,14 @@ func (s *sources) GetSourceK6Cloud(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28123,7 +28190,7 @@ func (s *sources) GetSourceK6Cloud(ctx context.Context, request operations.GetSo
}
// GetSourceKlarna - Get Source details
-func (s *sources) GetSourceKlarna(ctx context.Context, request operations.GetSourceKlarnaRequest) (*operations.GetSourceKlarnaResponse, error) {
+func (s *Sources) GetSourceKlarna(ctx context.Context, request operations.GetSourceKlarnaRequest) (*operations.GetSourceKlarnaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Klarna", request, nil)
if err != nil {
@@ -28135,7 +28202,7 @@ func (s *sources) GetSourceKlarna(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28165,12 +28232,14 @@ func (s *sources) GetSourceKlarna(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28181,7 +28250,7 @@ func (s *sources) GetSourceKlarna(ctx context.Context, request operations.GetSou
}
// GetSourceKlaviyo - Get Source details
-func (s *sources) GetSourceKlaviyo(ctx context.Context, request operations.GetSourceKlaviyoRequest) (*operations.GetSourceKlaviyoResponse, error) {
+func (s *Sources) GetSourceKlaviyo(ctx context.Context, request operations.GetSourceKlaviyoRequest) (*operations.GetSourceKlaviyoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Klaviyo", request, nil)
if err != nil {
@@ -28193,7 +28262,7 @@ func (s *sources) GetSourceKlaviyo(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28223,12 +28292,14 @@ func (s *sources) GetSourceKlaviyo(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28239,7 +28310,7 @@ func (s *sources) GetSourceKlaviyo(ctx context.Context, request operations.GetSo
}
// GetSourceKustomerSinger - Get Source details
-func (s *sources) GetSourceKustomerSinger(ctx context.Context, request operations.GetSourceKustomerSingerRequest) (*operations.GetSourceKustomerSingerResponse, error) {
+func (s *Sources) GetSourceKustomerSinger(ctx context.Context, request operations.GetSourceKustomerSingerRequest) (*operations.GetSourceKustomerSingerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#KustomerSinger", request, nil)
if err != nil {
@@ -28251,7 +28322,7 @@ func (s *sources) GetSourceKustomerSinger(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28281,12 +28352,14 @@ func (s *sources) GetSourceKustomerSinger(ctx context.Context, request operation
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28297,7 +28370,7 @@ func (s *sources) GetSourceKustomerSinger(ctx context.Context, request operation
}
// GetSourceKyve - Get Source details
-func (s *sources) GetSourceKyve(ctx context.Context, request operations.GetSourceKyveRequest) (*operations.GetSourceKyveResponse, error) {
+func (s *Sources) GetSourceKyve(ctx context.Context, request operations.GetSourceKyveRequest) (*operations.GetSourceKyveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Kyve", request, nil)
if err != nil {
@@ -28309,7 +28382,7 @@ func (s *sources) GetSourceKyve(ctx context.Context, request operations.GetSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28339,12 +28412,14 @@ func (s *sources) GetSourceKyve(ctx context.Context, request operations.GetSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28355,7 +28430,7 @@ func (s *sources) GetSourceKyve(ctx context.Context, request operations.GetSourc
}
// GetSourceLaunchdarkly - Get Source details
-func (s *sources) GetSourceLaunchdarkly(ctx context.Context, request operations.GetSourceLaunchdarklyRequest) (*operations.GetSourceLaunchdarklyResponse, error) {
+func (s *Sources) GetSourceLaunchdarkly(ctx context.Context, request operations.GetSourceLaunchdarklyRequest) (*operations.GetSourceLaunchdarklyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Launchdarkly", request, nil)
if err != nil {
@@ -28367,7 +28442,7 @@ func (s *sources) GetSourceLaunchdarkly(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28397,12 +28472,14 @@ func (s *sources) GetSourceLaunchdarkly(ctx context.Context, request operations.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28413,7 +28490,7 @@ func (s *sources) GetSourceLaunchdarkly(ctx context.Context, request operations.
}
// GetSourceLemlist - Get Source details
-func (s *sources) GetSourceLemlist(ctx context.Context, request operations.GetSourceLemlistRequest) (*operations.GetSourceLemlistResponse, error) {
+func (s *Sources) GetSourceLemlist(ctx context.Context, request operations.GetSourceLemlistRequest) (*operations.GetSourceLemlistResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Lemlist", request, nil)
if err != nil {
@@ -28425,7 +28502,7 @@ func (s *sources) GetSourceLemlist(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28455,12 +28532,14 @@ func (s *sources) GetSourceLemlist(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28471,7 +28550,7 @@ func (s *sources) GetSourceLemlist(ctx context.Context, request operations.GetSo
}
// GetSourceLeverHiring - Get Source details
-func (s *sources) GetSourceLeverHiring(ctx context.Context, request operations.GetSourceLeverHiringRequest) (*operations.GetSourceLeverHiringResponse, error) {
+func (s *Sources) GetSourceLeverHiring(ctx context.Context, request operations.GetSourceLeverHiringRequest) (*operations.GetSourceLeverHiringResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#LeverHiring", request, nil)
if err != nil {
@@ -28483,7 +28562,7 @@ func (s *sources) GetSourceLeverHiring(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28513,12 +28592,14 @@ func (s *sources) GetSourceLeverHiring(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28529,7 +28610,7 @@ func (s *sources) GetSourceLeverHiring(ctx context.Context, request operations.G
}
// GetSourceLinkedinAds - Get Source details
-func (s *sources) GetSourceLinkedinAds(ctx context.Context, request operations.GetSourceLinkedinAdsRequest) (*operations.GetSourceLinkedinAdsResponse, error) {
+func (s *Sources) GetSourceLinkedinAds(ctx context.Context, request operations.GetSourceLinkedinAdsRequest) (*operations.GetSourceLinkedinAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#LinkedinAds", request, nil)
if err != nil {
@@ -28541,7 +28622,7 @@ func (s *sources) GetSourceLinkedinAds(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28571,12 +28652,14 @@ func (s *sources) GetSourceLinkedinAds(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28587,7 +28670,7 @@ func (s *sources) GetSourceLinkedinAds(ctx context.Context, request operations.G
}
// GetSourceLinkedinPages - Get Source details
-func (s *sources) GetSourceLinkedinPages(ctx context.Context, request operations.GetSourceLinkedinPagesRequest) (*operations.GetSourceLinkedinPagesResponse, error) {
+func (s *Sources) GetSourceLinkedinPages(ctx context.Context, request operations.GetSourceLinkedinPagesRequest) (*operations.GetSourceLinkedinPagesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#LinkedinPages", request, nil)
if err != nil {
@@ -28599,7 +28682,7 @@ func (s *sources) GetSourceLinkedinPages(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28629,12 +28712,14 @@ func (s *sources) GetSourceLinkedinPages(ctx context.Context, request operations
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28645,7 +28730,7 @@ func (s *sources) GetSourceLinkedinPages(ctx context.Context, request operations
}
// GetSourceLinnworks - Get Source details
-func (s *sources) GetSourceLinnworks(ctx context.Context, request operations.GetSourceLinnworksRequest) (*operations.GetSourceLinnworksResponse, error) {
+func (s *Sources) GetSourceLinnworks(ctx context.Context, request operations.GetSourceLinnworksRequest) (*operations.GetSourceLinnworksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Linnworks", request, nil)
if err != nil {
@@ -28657,7 +28742,7 @@ func (s *sources) GetSourceLinnworks(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28687,12 +28772,14 @@ func (s *sources) GetSourceLinnworks(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28703,7 +28790,7 @@ func (s *sources) GetSourceLinnworks(ctx context.Context, request operations.Get
}
// GetSourceLokalise - Get Source details
-func (s *sources) GetSourceLokalise(ctx context.Context, request operations.GetSourceLokaliseRequest) (*operations.GetSourceLokaliseResponse, error) {
+func (s *Sources) GetSourceLokalise(ctx context.Context, request operations.GetSourceLokaliseRequest) (*operations.GetSourceLokaliseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Lokalise", request, nil)
if err != nil {
@@ -28715,7 +28802,7 @@ func (s *sources) GetSourceLokalise(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28745,12 +28832,14 @@ func (s *sources) GetSourceLokalise(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28761,7 +28850,7 @@ func (s *sources) GetSourceLokalise(ctx context.Context, request operations.GetS
}
// GetSourceMailchimp - Get Source details
-func (s *sources) GetSourceMailchimp(ctx context.Context, request operations.GetSourceMailchimpRequest) (*operations.GetSourceMailchimpResponse, error) {
+func (s *Sources) GetSourceMailchimp(ctx context.Context, request operations.GetSourceMailchimpRequest) (*operations.GetSourceMailchimpResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mailchimp", request, nil)
if err != nil {
@@ -28773,7 +28862,7 @@ func (s *sources) GetSourceMailchimp(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28803,12 +28892,14 @@ func (s *sources) GetSourceMailchimp(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28819,7 +28910,7 @@ func (s *sources) GetSourceMailchimp(ctx context.Context, request operations.Get
}
// GetSourceMailgun - Get Source details
-func (s *sources) GetSourceMailgun(ctx context.Context, request operations.GetSourceMailgunRequest) (*operations.GetSourceMailgunResponse, error) {
+func (s *Sources) GetSourceMailgun(ctx context.Context, request operations.GetSourceMailgunRequest) (*operations.GetSourceMailgunResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mailgun", request, nil)
if err != nil {
@@ -28831,7 +28922,7 @@ func (s *sources) GetSourceMailgun(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28861,12 +28952,14 @@ func (s *sources) GetSourceMailgun(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28877,7 +28970,7 @@ func (s *sources) GetSourceMailgun(ctx context.Context, request operations.GetSo
}
// GetSourceMailjetSms - Get Source details
-func (s *sources) GetSourceMailjetSms(ctx context.Context, request operations.GetSourceMailjetSmsRequest) (*operations.GetSourceMailjetSmsResponse, error) {
+func (s *Sources) GetSourceMailjetSms(ctx context.Context, request operations.GetSourceMailjetSmsRequest) (*operations.GetSourceMailjetSmsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MailjetSms", request, nil)
if err != nil {
@@ -28889,7 +28982,7 @@ func (s *sources) GetSourceMailjetSms(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28919,12 +29012,14 @@ func (s *sources) GetSourceMailjetSms(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28935,7 +29030,7 @@ func (s *sources) GetSourceMailjetSms(ctx context.Context, request operations.Ge
}
// GetSourceMarketo - Get Source details
-func (s *sources) GetSourceMarketo(ctx context.Context, request operations.GetSourceMarketoRequest) (*operations.GetSourceMarketoResponse, error) {
+func (s *Sources) GetSourceMarketo(ctx context.Context, request operations.GetSourceMarketoRequest) (*operations.GetSourceMarketoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Marketo", request, nil)
if err != nil {
@@ -28947,7 +29042,7 @@ func (s *sources) GetSourceMarketo(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -28977,12 +29072,14 @@ func (s *sources) GetSourceMarketo(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -28993,7 +29090,7 @@ func (s *sources) GetSourceMarketo(ctx context.Context, request operations.GetSo
}
// GetSourceMetabase - Get Source details
-func (s *sources) GetSourceMetabase(ctx context.Context, request operations.GetSourceMetabaseRequest) (*operations.GetSourceMetabaseResponse, error) {
+func (s *Sources) GetSourceMetabase(ctx context.Context, request operations.GetSourceMetabaseRequest) (*operations.GetSourceMetabaseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Metabase", request, nil)
if err != nil {
@@ -29005,7 +29102,7 @@ func (s *sources) GetSourceMetabase(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29035,12 +29132,14 @@ func (s *sources) GetSourceMetabase(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29051,7 +29150,7 @@ func (s *sources) GetSourceMetabase(ctx context.Context, request operations.GetS
}
// GetSourceMicrosoftTeams - Get Source details
-func (s *sources) GetSourceMicrosoftTeams(ctx context.Context, request operations.GetSourceMicrosoftTeamsRequest) (*operations.GetSourceMicrosoftTeamsResponse, error) {
+func (s *Sources) GetSourceMicrosoftTeams(ctx context.Context, request operations.GetSourceMicrosoftTeamsRequest) (*operations.GetSourceMicrosoftTeamsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MicrosoftTeams", request, nil)
if err != nil {
@@ -29063,7 +29162,7 @@ func (s *sources) GetSourceMicrosoftTeams(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29093,12 +29192,14 @@ func (s *sources) GetSourceMicrosoftTeams(ctx context.Context, request operation
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29109,7 +29210,7 @@ func (s *sources) GetSourceMicrosoftTeams(ctx context.Context, request operation
}
// GetSourceMixpanel - Get Source details
-func (s *sources) GetSourceMixpanel(ctx context.Context, request operations.GetSourceMixpanelRequest) (*operations.GetSourceMixpanelResponse, error) {
+func (s *Sources) GetSourceMixpanel(ctx context.Context, request operations.GetSourceMixpanelRequest) (*operations.GetSourceMixpanelResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mixpanel", request, nil)
if err != nil {
@@ -29121,7 +29222,7 @@ func (s *sources) GetSourceMixpanel(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29151,12 +29252,14 @@ func (s *sources) GetSourceMixpanel(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29167,7 +29270,7 @@ func (s *sources) GetSourceMixpanel(ctx context.Context, request operations.GetS
}
// GetSourceMonday - Get Source details
-func (s *sources) GetSourceMonday(ctx context.Context, request operations.GetSourceMondayRequest) (*operations.GetSourceMondayResponse, error) {
+func (s *Sources) GetSourceMonday(ctx context.Context, request operations.GetSourceMondayRequest) (*operations.GetSourceMondayResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Monday", request, nil)
if err != nil {
@@ -29179,7 +29282,7 @@ func (s *sources) GetSourceMonday(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29209,12 +29312,14 @@ func (s *sources) GetSourceMonday(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29224,10 +29329,10 @@ func (s *sources) GetSourceMonday(ctx context.Context, request operations.GetSou
return res, nil
}
-// GetSourceMongodb - Get Source details
-func (s *sources) GetSourceMongodb(ctx context.Context, request operations.GetSourceMongodbRequest) (*operations.GetSourceMongodbResponse, error) {
+// GetSourceMongodbInternalPoc - Get Source details
+func (s *Sources) GetSourceMongodbInternalPoc(ctx context.Context, request operations.GetSourceMongodbInternalPocRequest) (*operations.GetSourceMongodbInternalPocResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mongodb", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MongodbInternalPoc", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -29237,7 +29342,7 @@ func (s *sources) GetSourceMongodb(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29258,7 +29363,7 @@ func (s *sources) GetSourceMongodb(ctx context.Context, request operations.GetSo
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceMongodbResponse{
+ res := &operations.GetSourceMongodbInternalPocResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -29267,12 +29372,14 @@ func (s *sources) GetSourceMongodb(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29282,10 +29389,10 @@ func (s *sources) GetSourceMongodb(ctx context.Context, request operations.GetSo
return res, nil
}
-// GetSourceMongodbInternalPoc - Get Source details
-func (s *sources) GetSourceMongodbInternalPoc(ctx context.Context, request operations.GetSourceMongodbInternalPocRequest) (*operations.GetSourceMongodbInternalPocResponse, error) {
+// GetSourceMongodbV2 - Get Source details
+func (s *Sources) GetSourceMongodbV2(ctx context.Context, request operations.GetSourceMongodbV2Request) (*operations.GetSourceMongodbV2Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MongodbInternalPoc", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MongodbV2", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -29295,7 +29402,7 @@ func (s *sources) GetSourceMongodbInternalPoc(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29316,7 +29423,7 @@ func (s *sources) GetSourceMongodbInternalPoc(ctx context.Context, request opera
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceMongodbInternalPocResponse{
+ res := &operations.GetSourceMongodbV2Response{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -29325,12 +29432,14 @@ func (s *sources) GetSourceMongodbInternalPoc(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29341,7 +29450,7 @@ func (s *sources) GetSourceMongodbInternalPoc(ctx context.Context, request opera
}
// GetSourceMssql - Get Source details
-func (s *sources) GetSourceMssql(ctx context.Context, request operations.GetSourceMssqlRequest) (*operations.GetSourceMssqlResponse, error) {
+func (s *Sources) GetSourceMssql(ctx context.Context, request operations.GetSourceMssqlRequest) (*operations.GetSourceMssqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mssql", request, nil)
if err != nil {
@@ -29353,7 +29462,7 @@ func (s *sources) GetSourceMssql(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29383,12 +29492,14 @@ func (s *sources) GetSourceMssql(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29399,7 +29510,7 @@ func (s *sources) GetSourceMssql(ctx context.Context, request operations.GetSour
}
// GetSourceMyHours - Get Source details
-func (s *sources) GetSourceMyHours(ctx context.Context, request operations.GetSourceMyHoursRequest) (*operations.GetSourceMyHoursResponse, error) {
+func (s *Sources) GetSourceMyHours(ctx context.Context, request operations.GetSourceMyHoursRequest) (*operations.GetSourceMyHoursResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MyHours", request, nil)
if err != nil {
@@ -29411,7 +29522,7 @@ func (s *sources) GetSourceMyHours(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29441,12 +29552,14 @@ func (s *sources) GetSourceMyHours(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29457,7 +29570,7 @@ func (s *sources) GetSourceMyHours(ctx context.Context, request operations.GetSo
}
// GetSourceMysql - Get Source details
-func (s *sources) GetSourceMysql(ctx context.Context, request operations.GetSourceMysqlRequest) (*operations.GetSourceMysqlResponse, error) {
+func (s *Sources) GetSourceMysql(ctx context.Context, request operations.GetSourceMysqlRequest) (*operations.GetSourceMysqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mysql", request, nil)
if err != nil {
@@ -29469,7 +29582,7 @@ func (s *sources) GetSourceMysql(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29499,12 +29612,14 @@ func (s *sources) GetSourceMysql(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29515,7 +29630,7 @@ func (s *sources) GetSourceMysql(ctx context.Context, request operations.GetSour
}
// GetSourceNetsuite - Get Source details
-func (s *sources) GetSourceNetsuite(ctx context.Context, request operations.GetSourceNetsuiteRequest) (*operations.GetSourceNetsuiteResponse, error) {
+func (s *Sources) GetSourceNetsuite(ctx context.Context, request operations.GetSourceNetsuiteRequest) (*operations.GetSourceNetsuiteResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Netsuite", request, nil)
if err != nil {
@@ -29527,7 +29642,7 @@ func (s *sources) GetSourceNetsuite(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29557,12 +29672,14 @@ func (s *sources) GetSourceNetsuite(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29573,7 +29690,7 @@ func (s *sources) GetSourceNetsuite(ctx context.Context, request operations.GetS
}
// GetSourceNotion - Get Source details
-func (s *sources) GetSourceNotion(ctx context.Context, request operations.GetSourceNotionRequest) (*operations.GetSourceNotionResponse, error) {
+func (s *Sources) GetSourceNotion(ctx context.Context, request operations.GetSourceNotionRequest) (*operations.GetSourceNotionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Notion", request, nil)
if err != nil {
@@ -29585,7 +29702,7 @@ func (s *sources) GetSourceNotion(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29615,12 +29732,14 @@ func (s *sources) GetSourceNotion(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29631,7 +29750,7 @@ func (s *sources) GetSourceNotion(ctx context.Context, request operations.GetSou
}
// GetSourceNytimes - Get Source details
-func (s *sources) GetSourceNytimes(ctx context.Context, request operations.GetSourceNytimesRequest) (*operations.GetSourceNytimesResponse, error) {
+func (s *Sources) GetSourceNytimes(ctx context.Context, request operations.GetSourceNytimesRequest) (*operations.GetSourceNytimesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Nytimes", request, nil)
if err != nil {
@@ -29643,7 +29762,7 @@ func (s *sources) GetSourceNytimes(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29673,12 +29792,14 @@ func (s *sources) GetSourceNytimes(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29689,7 +29810,7 @@ func (s *sources) GetSourceNytimes(ctx context.Context, request operations.GetSo
}
// GetSourceOkta - Get Source details
-func (s *sources) GetSourceOkta(ctx context.Context, request operations.GetSourceOktaRequest) (*operations.GetSourceOktaResponse, error) {
+func (s *Sources) GetSourceOkta(ctx context.Context, request operations.GetSourceOktaRequest) (*operations.GetSourceOktaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Okta", request, nil)
if err != nil {
@@ -29701,7 +29822,7 @@ func (s *sources) GetSourceOkta(ctx context.Context, request operations.GetSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29731,12 +29852,14 @@ func (s *sources) GetSourceOkta(ctx context.Context, request operations.GetSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29747,7 +29870,7 @@ func (s *sources) GetSourceOkta(ctx context.Context, request operations.GetSourc
}
// GetSourceOmnisend - Get Source details
-func (s *sources) GetSourceOmnisend(ctx context.Context, request operations.GetSourceOmnisendRequest) (*operations.GetSourceOmnisendResponse, error) {
+func (s *Sources) GetSourceOmnisend(ctx context.Context, request operations.GetSourceOmnisendRequest) (*operations.GetSourceOmnisendResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Omnisend", request, nil)
if err != nil {
@@ -29759,7 +29882,7 @@ func (s *sources) GetSourceOmnisend(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29789,12 +29912,14 @@ func (s *sources) GetSourceOmnisend(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29805,7 +29930,7 @@ func (s *sources) GetSourceOmnisend(ctx context.Context, request operations.GetS
}
// GetSourceOnesignal - Get Source details
-func (s *sources) GetSourceOnesignal(ctx context.Context, request operations.GetSourceOnesignalRequest) (*operations.GetSourceOnesignalResponse, error) {
+func (s *Sources) GetSourceOnesignal(ctx context.Context, request operations.GetSourceOnesignalRequest) (*operations.GetSourceOnesignalResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Onesignal", request, nil)
if err != nil {
@@ -29817,7 +29942,7 @@ func (s *sources) GetSourceOnesignal(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29847,12 +29972,14 @@ func (s *sources) GetSourceOnesignal(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29863,7 +29990,7 @@ func (s *sources) GetSourceOnesignal(ctx context.Context, request operations.Get
}
// GetSourceOracle - Get Source details
-func (s *sources) GetSourceOracle(ctx context.Context, request operations.GetSourceOracleRequest) (*operations.GetSourceOracleResponse, error) {
+func (s *Sources) GetSourceOracle(ctx context.Context, request operations.GetSourceOracleRequest) (*operations.GetSourceOracleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Oracle", request, nil)
if err != nil {
@@ -29875,7 +30002,7 @@ func (s *sources) GetSourceOracle(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29905,12 +30032,14 @@ func (s *sources) GetSourceOracle(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29921,7 +30050,7 @@ func (s *sources) GetSourceOracle(ctx context.Context, request operations.GetSou
}
// GetSourceOrb - Get Source details
-func (s *sources) GetSourceOrb(ctx context.Context, request operations.GetSourceOrbRequest) (*operations.GetSourceOrbResponse, error) {
+func (s *Sources) GetSourceOrb(ctx context.Context, request operations.GetSourceOrbRequest) (*operations.GetSourceOrbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Orb", request, nil)
if err != nil {
@@ -29933,7 +30062,7 @@ func (s *sources) GetSourceOrb(ctx context.Context, request operations.GetSource
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -29963,12 +30092,14 @@ func (s *sources) GetSourceOrb(ctx context.Context, request operations.GetSource
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -29979,7 +30110,7 @@ func (s *sources) GetSourceOrb(ctx context.Context, request operations.GetSource
}
// GetSourceOrbit - Get Source details
-func (s *sources) GetSourceOrbit(ctx context.Context, request operations.GetSourceOrbitRequest) (*operations.GetSourceOrbitResponse, error) {
+func (s *Sources) GetSourceOrbit(ctx context.Context, request operations.GetSourceOrbitRequest) (*operations.GetSourceOrbitResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Orbit", request, nil)
if err != nil {
@@ -29991,7 +30122,7 @@ func (s *sources) GetSourceOrbit(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30021,12 +30152,14 @@ func (s *sources) GetSourceOrbit(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30037,7 +30170,7 @@ func (s *sources) GetSourceOrbit(ctx context.Context, request operations.GetSour
}
// GetSourceOutbrainAmplify - Get Source details
-func (s *sources) GetSourceOutbrainAmplify(ctx context.Context, request operations.GetSourceOutbrainAmplifyRequest) (*operations.GetSourceOutbrainAmplifyResponse, error) {
+func (s *Sources) GetSourceOutbrainAmplify(ctx context.Context, request operations.GetSourceOutbrainAmplifyRequest) (*operations.GetSourceOutbrainAmplifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#OutbrainAmplify", request, nil)
if err != nil {
@@ -30049,7 +30182,7 @@ func (s *sources) GetSourceOutbrainAmplify(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30079,12 +30212,14 @@ func (s *sources) GetSourceOutbrainAmplify(ctx context.Context, request operatio
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30095,7 +30230,7 @@ func (s *sources) GetSourceOutbrainAmplify(ctx context.Context, request operatio
}
// GetSourceOutreach - Get Source details
-func (s *sources) GetSourceOutreach(ctx context.Context, request operations.GetSourceOutreachRequest) (*operations.GetSourceOutreachResponse, error) {
+func (s *Sources) GetSourceOutreach(ctx context.Context, request operations.GetSourceOutreachRequest) (*operations.GetSourceOutreachResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Outreach", request, nil)
if err != nil {
@@ -30107,7 +30242,7 @@ func (s *sources) GetSourceOutreach(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30137,12 +30272,14 @@ func (s *sources) GetSourceOutreach(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30153,7 +30290,7 @@ func (s *sources) GetSourceOutreach(ctx context.Context, request operations.GetS
}
// GetSourcePaypalTransaction - Get Source details
-func (s *sources) GetSourcePaypalTransaction(ctx context.Context, request operations.GetSourcePaypalTransactionRequest) (*operations.GetSourcePaypalTransactionResponse, error) {
+func (s *Sources) GetSourcePaypalTransaction(ctx context.Context, request operations.GetSourcePaypalTransactionRequest) (*operations.GetSourcePaypalTransactionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PaypalTransaction", request, nil)
if err != nil {
@@ -30165,7 +30302,7 @@ func (s *sources) GetSourcePaypalTransaction(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30195,12 +30332,14 @@ func (s *sources) GetSourcePaypalTransaction(ctx context.Context, request operat
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30211,7 +30350,7 @@ func (s *sources) GetSourcePaypalTransaction(ctx context.Context, request operat
}
// GetSourcePaystack - Get Source details
-func (s *sources) GetSourcePaystack(ctx context.Context, request operations.GetSourcePaystackRequest) (*operations.GetSourcePaystackResponse, error) {
+func (s *Sources) GetSourcePaystack(ctx context.Context, request operations.GetSourcePaystackRequest) (*operations.GetSourcePaystackResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Paystack", request, nil)
if err != nil {
@@ -30223,7 +30362,7 @@ func (s *sources) GetSourcePaystack(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30253,12 +30392,14 @@ func (s *sources) GetSourcePaystack(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30269,7 +30410,7 @@ func (s *sources) GetSourcePaystack(ctx context.Context, request operations.GetS
}
// GetSourcePendo - Get Source details
-func (s *sources) GetSourcePendo(ctx context.Context, request operations.GetSourcePendoRequest) (*operations.GetSourcePendoResponse, error) {
+func (s *Sources) GetSourcePendo(ctx context.Context, request operations.GetSourcePendoRequest) (*operations.GetSourcePendoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pendo", request, nil)
if err != nil {
@@ -30281,7 +30422,7 @@ func (s *sources) GetSourcePendo(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30311,12 +30452,14 @@ func (s *sources) GetSourcePendo(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30327,7 +30470,7 @@ func (s *sources) GetSourcePendo(ctx context.Context, request operations.GetSour
}
// GetSourcePersistiq - Get Source details
-func (s *sources) GetSourcePersistiq(ctx context.Context, request operations.GetSourcePersistiqRequest) (*operations.GetSourcePersistiqResponse, error) {
+func (s *Sources) GetSourcePersistiq(ctx context.Context, request operations.GetSourcePersistiqRequest) (*operations.GetSourcePersistiqResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Persistiq", request, nil)
if err != nil {
@@ -30339,7 +30482,7 @@ func (s *sources) GetSourcePersistiq(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30369,12 +30512,14 @@ func (s *sources) GetSourcePersistiq(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30385,7 +30530,7 @@ func (s *sources) GetSourcePersistiq(ctx context.Context, request operations.Get
}
// GetSourcePexelsAPI - Get Source details
-func (s *sources) GetSourcePexelsAPI(ctx context.Context, request operations.GetSourcePexelsAPIRequest) (*operations.GetSourcePexelsAPIResponse, error) {
+func (s *Sources) GetSourcePexelsAPI(ctx context.Context, request operations.GetSourcePexelsAPIRequest) (*operations.GetSourcePexelsAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PexelsApi", request, nil)
if err != nil {
@@ -30397,7 +30542,7 @@ func (s *sources) GetSourcePexelsAPI(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30427,12 +30572,14 @@ func (s *sources) GetSourcePexelsAPI(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30443,7 +30590,7 @@ func (s *sources) GetSourcePexelsAPI(ctx context.Context, request operations.Get
}
// GetSourcePinterest - Get Source details
-func (s *sources) GetSourcePinterest(ctx context.Context, request operations.GetSourcePinterestRequest) (*operations.GetSourcePinterestResponse, error) {
+func (s *Sources) GetSourcePinterest(ctx context.Context, request operations.GetSourcePinterestRequest) (*operations.GetSourcePinterestResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pinterest", request, nil)
if err != nil {
@@ -30455,7 +30602,7 @@ func (s *sources) GetSourcePinterest(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30485,12 +30632,14 @@ func (s *sources) GetSourcePinterest(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30501,7 +30650,7 @@ func (s *sources) GetSourcePinterest(ctx context.Context, request operations.Get
}
// GetSourcePipedrive - Get Source details
-func (s *sources) GetSourcePipedrive(ctx context.Context, request operations.GetSourcePipedriveRequest) (*operations.GetSourcePipedriveResponse, error) {
+func (s *Sources) GetSourcePipedrive(ctx context.Context, request operations.GetSourcePipedriveRequest) (*operations.GetSourcePipedriveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pipedrive", request, nil)
if err != nil {
@@ -30513,7 +30662,7 @@ func (s *sources) GetSourcePipedrive(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30543,12 +30692,14 @@ func (s *sources) GetSourcePipedrive(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30559,7 +30710,7 @@ func (s *sources) GetSourcePipedrive(ctx context.Context, request operations.Get
}
// GetSourcePocket - Get Source details
-func (s *sources) GetSourcePocket(ctx context.Context, request operations.GetSourcePocketRequest) (*operations.GetSourcePocketResponse, error) {
+func (s *Sources) GetSourcePocket(ctx context.Context, request operations.GetSourcePocketRequest) (*operations.GetSourcePocketResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pocket", request, nil)
if err != nil {
@@ -30571,7 +30722,7 @@ func (s *sources) GetSourcePocket(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30601,12 +30752,14 @@ func (s *sources) GetSourcePocket(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30617,7 +30770,7 @@ func (s *sources) GetSourcePocket(ctx context.Context, request operations.GetSou
}
// GetSourcePokeapi - Get Source details
-func (s *sources) GetSourcePokeapi(ctx context.Context, request operations.GetSourcePokeapiRequest) (*operations.GetSourcePokeapiResponse, error) {
+func (s *Sources) GetSourcePokeapi(ctx context.Context, request operations.GetSourcePokeapiRequest) (*operations.GetSourcePokeapiResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pokeapi", request, nil)
if err != nil {
@@ -30629,7 +30782,7 @@ func (s *sources) GetSourcePokeapi(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30659,12 +30812,14 @@ func (s *sources) GetSourcePokeapi(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30675,7 +30830,7 @@ func (s *sources) GetSourcePokeapi(ctx context.Context, request operations.GetSo
}
// GetSourcePolygonStockAPI - Get Source details
-func (s *sources) GetSourcePolygonStockAPI(ctx context.Context, request operations.GetSourcePolygonStockAPIRequest) (*operations.GetSourcePolygonStockAPIResponse, error) {
+func (s *Sources) GetSourcePolygonStockAPI(ctx context.Context, request operations.GetSourcePolygonStockAPIRequest) (*operations.GetSourcePolygonStockAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PolygonStockApi", request, nil)
if err != nil {
@@ -30687,7 +30842,7 @@ func (s *sources) GetSourcePolygonStockAPI(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30717,12 +30872,14 @@ func (s *sources) GetSourcePolygonStockAPI(ctx context.Context, request operatio
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30733,7 +30890,7 @@ func (s *sources) GetSourcePolygonStockAPI(ctx context.Context, request operatio
}
// GetSourcePostgres - Get Source details
-func (s *sources) GetSourcePostgres(ctx context.Context, request operations.GetSourcePostgresRequest) (*operations.GetSourcePostgresResponse, error) {
+func (s *Sources) GetSourcePostgres(ctx context.Context, request operations.GetSourcePostgresRequest) (*operations.GetSourcePostgresResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Postgres", request, nil)
if err != nil {
@@ -30745,7 +30902,7 @@ func (s *sources) GetSourcePostgres(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30775,12 +30932,14 @@ func (s *sources) GetSourcePostgres(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30791,7 +30950,7 @@ func (s *sources) GetSourcePostgres(ctx context.Context, request operations.GetS
}
// GetSourcePosthog - Get Source details
-func (s *sources) GetSourcePosthog(ctx context.Context, request operations.GetSourcePosthogRequest) (*operations.GetSourcePosthogResponse, error) {
+func (s *Sources) GetSourcePosthog(ctx context.Context, request operations.GetSourcePosthogRequest) (*operations.GetSourcePosthogResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Posthog", request, nil)
if err != nil {
@@ -30803,7 +30962,7 @@ func (s *sources) GetSourcePosthog(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30833,12 +30992,14 @@ func (s *sources) GetSourcePosthog(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30849,7 +31010,7 @@ func (s *sources) GetSourcePosthog(ctx context.Context, request operations.GetSo
}
// GetSourcePostmarkapp - Get Source details
-func (s *sources) GetSourcePostmarkapp(ctx context.Context, request operations.GetSourcePostmarkappRequest) (*operations.GetSourcePostmarkappResponse, error) {
+func (s *Sources) GetSourcePostmarkapp(ctx context.Context, request operations.GetSourcePostmarkappRequest) (*operations.GetSourcePostmarkappResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Postmarkapp", request, nil)
if err != nil {
@@ -30861,7 +31022,7 @@ func (s *sources) GetSourcePostmarkapp(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30891,12 +31052,14 @@ func (s *sources) GetSourcePostmarkapp(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30907,7 +31070,7 @@ func (s *sources) GetSourcePostmarkapp(ctx context.Context, request operations.G
}
// GetSourcePrestashop - Get Source details
-func (s *sources) GetSourcePrestashop(ctx context.Context, request operations.GetSourcePrestashopRequest) (*operations.GetSourcePrestashopResponse, error) {
+func (s *Sources) GetSourcePrestashop(ctx context.Context, request operations.GetSourcePrestashopRequest) (*operations.GetSourcePrestashopResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Prestashop", request, nil)
if err != nil {
@@ -30919,7 +31082,7 @@ func (s *sources) GetSourcePrestashop(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -30949,12 +31112,14 @@ func (s *sources) GetSourcePrestashop(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -30965,7 +31130,7 @@ func (s *sources) GetSourcePrestashop(ctx context.Context, request operations.Ge
}
// GetSourcePunkAPI - Get Source details
-func (s *sources) GetSourcePunkAPI(ctx context.Context, request operations.GetSourcePunkAPIRequest) (*operations.GetSourcePunkAPIResponse, error) {
+func (s *Sources) GetSourcePunkAPI(ctx context.Context, request operations.GetSourcePunkAPIRequest) (*operations.GetSourcePunkAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PunkApi", request, nil)
if err != nil {
@@ -30977,7 +31142,7 @@ func (s *sources) GetSourcePunkAPI(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31007,12 +31172,14 @@ func (s *sources) GetSourcePunkAPI(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31023,7 +31190,7 @@ func (s *sources) GetSourcePunkAPI(ctx context.Context, request operations.GetSo
}
// GetSourcePypi - Get Source details
-func (s *sources) GetSourcePypi(ctx context.Context, request operations.GetSourcePypiRequest) (*operations.GetSourcePypiResponse, error) {
+func (s *Sources) GetSourcePypi(ctx context.Context, request operations.GetSourcePypiRequest) (*operations.GetSourcePypiResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pypi", request, nil)
if err != nil {
@@ -31035,7 +31202,7 @@ func (s *sources) GetSourcePypi(ctx context.Context, request operations.GetSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31065,12 +31232,14 @@ func (s *sources) GetSourcePypi(ctx context.Context, request operations.GetSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31081,7 +31250,7 @@ func (s *sources) GetSourcePypi(ctx context.Context, request operations.GetSourc
}
// GetSourceQualaroo - Get Source details
-func (s *sources) GetSourceQualaroo(ctx context.Context, request operations.GetSourceQualarooRequest) (*operations.GetSourceQualarooResponse, error) {
+func (s *Sources) GetSourceQualaroo(ctx context.Context, request operations.GetSourceQualarooRequest) (*operations.GetSourceQualarooResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Qualaroo", request, nil)
if err != nil {
@@ -31093,7 +31262,7 @@ func (s *sources) GetSourceQualaroo(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31123,12 +31292,14 @@ func (s *sources) GetSourceQualaroo(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31139,7 +31310,7 @@ func (s *sources) GetSourceQualaroo(ctx context.Context, request operations.GetS
}
// GetSourceQuickbooks - Get Source details
-func (s *sources) GetSourceQuickbooks(ctx context.Context, request operations.GetSourceQuickbooksRequest) (*operations.GetSourceQuickbooksResponse, error) {
+func (s *Sources) GetSourceQuickbooks(ctx context.Context, request operations.GetSourceQuickbooksRequest) (*operations.GetSourceQuickbooksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Quickbooks", request, nil)
if err != nil {
@@ -31151,7 +31322,7 @@ func (s *sources) GetSourceQuickbooks(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31181,12 +31352,14 @@ func (s *sources) GetSourceQuickbooks(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31197,7 +31370,7 @@ func (s *sources) GetSourceQuickbooks(ctx context.Context, request operations.Ge
}
// GetSourceRailz - Get Source details
-func (s *sources) GetSourceRailz(ctx context.Context, request operations.GetSourceRailzRequest) (*operations.GetSourceRailzResponse, error) {
+func (s *Sources) GetSourceRailz(ctx context.Context, request operations.GetSourceRailzRequest) (*operations.GetSourceRailzResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Railz", request, nil)
if err != nil {
@@ -31209,7 +31382,7 @@ func (s *sources) GetSourceRailz(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31239,12 +31412,14 @@ func (s *sources) GetSourceRailz(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31255,7 +31430,7 @@ func (s *sources) GetSourceRailz(ctx context.Context, request operations.GetSour
}
// GetSourceRecharge - Get Source details
-func (s *sources) GetSourceRecharge(ctx context.Context, request operations.GetSourceRechargeRequest) (*operations.GetSourceRechargeResponse, error) {
+func (s *Sources) GetSourceRecharge(ctx context.Context, request operations.GetSourceRechargeRequest) (*operations.GetSourceRechargeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recharge", request, nil)
if err != nil {
@@ -31267,7 +31442,7 @@ func (s *sources) GetSourceRecharge(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31297,12 +31472,14 @@ func (s *sources) GetSourceRecharge(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31313,7 +31490,7 @@ func (s *sources) GetSourceRecharge(ctx context.Context, request operations.GetS
}
// GetSourceRecreation - Get Source details
-func (s *sources) GetSourceRecreation(ctx context.Context, request operations.GetSourceRecreationRequest) (*operations.GetSourceRecreationResponse, error) {
+func (s *Sources) GetSourceRecreation(ctx context.Context, request operations.GetSourceRecreationRequest) (*operations.GetSourceRecreationResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recreation", request, nil)
if err != nil {
@@ -31325,7 +31502,7 @@ func (s *sources) GetSourceRecreation(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31355,12 +31532,14 @@ func (s *sources) GetSourceRecreation(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31371,7 +31550,7 @@ func (s *sources) GetSourceRecreation(ctx context.Context, request operations.Ge
}
// GetSourceRecruitee - Get Source details
-func (s *sources) GetSourceRecruitee(ctx context.Context, request operations.GetSourceRecruiteeRequest) (*operations.GetSourceRecruiteeResponse, error) {
+func (s *Sources) GetSourceRecruitee(ctx context.Context, request operations.GetSourceRecruiteeRequest) (*operations.GetSourceRecruiteeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recruitee", request, nil)
if err != nil {
@@ -31383,7 +31562,7 @@ func (s *sources) GetSourceRecruitee(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31413,12 +31592,14 @@ func (s *sources) GetSourceRecruitee(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31429,7 +31610,7 @@ func (s *sources) GetSourceRecruitee(ctx context.Context, request operations.Get
}
// GetSourceRecurly - Get Source details
-func (s *sources) GetSourceRecurly(ctx context.Context, request operations.GetSourceRecurlyRequest) (*operations.GetSourceRecurlyResponse, error) {
+func (s *Sources) GetSourceRecurly(ctx context.Context, request operations.GetSourceRecurlyRequest) (*operations.GetSourceRecurlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recurly", request, nil)
if err != nil {
@@ -31441,7 +31622,7 @@ func (s *sources) GetSourceRecurly(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31471,12 +31652,14 @@ func (s *sources) GetSourceRecurly(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31487,7 +31670,7 @@ func (s *sources) GetSourceRecurly(ctx context.Context, request operations.GetSo
}
// GetSourceRedshift - Get Source details
-func (s *sources) GetSourceRedshift(ctx context.Context, request operations.GetSourceRedshiftRequest) (*operations.GetSourceRedshiftResponse, error) {
+func (s *Sources) GetSourceRedshift(ctx context.Context, request operations.GetSourceRedshiftRequest) (*operations.GetSourceRedshiftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Redshift", request, nil)
if err != nil {
@@ -31499,7 +31682,7 @@ func (s *sources) GetSourceRedshift(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31529,12 +31712,14 @@ func (s *sources) GetSourceRedshift(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31545,7 +31730,7 @@ func (s *sources) GetSourceRedshift(ctx context.Context, request operations.GetS
}
// GetSourceRetently - Get Source details
-func (s *sources) GetSourceRetently(ctx context.Context, request operations.GetSourceRetentlyRequest) (*operations.GetSourceRetentlyResponse, error) {
+func (s *Sources) GetSourceRetently(ctx context.Context, request operations.GetSourceRetentlyRequest) (*operations.GetSourceRetentlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Retently", request, nil)
if err != nil {
@@ -31557,7 +31742,7 @@ func (s *sources) GetSourceRetently(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31587,12 +31772,14 @@ func (s *sources) GetSourceRetently(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31603,7 +31790,7 @@ func (s *sources) GetSourceRetently(ctx context.Context, request operations.GetS
}
// GetSourceRkiCovid - Get Source details
-func (s *sources) GetSourceRkiCovid(ctx context.Context, request operations.GetSourceRkiCovidRequest) (*operations.GetSourceRkiCovidResponse, error) {
+func (s *Sources) GetSourceRkiCovid(ctx context.Context, request operations.GetSourceRkiCovidRequest) (*operations.GetSourceRkiCovidResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#RkiCovid", request, nil)
if err != nil {
@@ -31615,7 +31802,7 @@ func (s *sources) GetSourceRkiCovid(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31645,12 +31832,14 @@ func (s *sources) GetSourceRkiCovid(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31661,7 +31850,7 @@ func (s *sources) GetSourceRkiCovid(ctx context.Context, request operations.GetS
}
// GetSourceRss - Get Source details
-func (s *sources) GetSourceRss(ctx context.Context, request operations.GetSourceRssRequest) (*operations.GetSourceRssResponse, error) {
+func (s *Sources) GetSourceRss(ctx context.Context, request operations.GetSourceRssRequest) (*operations.GetSourceRssResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Rss", request, nil)
if err != nil {
@@ -31673,7 +31862,7 @@ func (s *sources) GetSourceRss(ctx context.Context, request operations.GetSource
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31703,12 +31892,14 @@ func (s *sources) GetSourceRss(ctx context.Context, request operations.GetSource
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31719,7 +31910,7 @@ func (s *sources) GetSourceRss(ctx context.Context, request operations.GetSource
}
// GetSourceS3 - Get Source details
-func (s *sources) GetSourceS3(ctx context.Context, request operations.GetSourceS3Request) (*operations.GetSourceS3Response, error) {
+func (s *Sources) GetSourceS3(ctx context.Context, request operations.GetSourceS3Request) (*operations.GetSourceS3Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#S3", request, nil)
if err != nil {
@@ -31731,7 +31922,7 @@ func (s *sources) GetSourceS3(ctx context.Context, request operations.GetSourceS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31761,12 +31952,14 @@ func (s *sources) GetSourceS3(ctx context.Context, request operations.GetSourceS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31777,7 +31970,7 @@ func (s *sources) GetSourceS3(ctx context.Context, request operations.GetSourceS
}
// GetSourceSalesforce - Get Source details
-func (s *sources) GetSourceSalesforce(ctx context.Context, request operations.GetSourceSalesforceRequest) (*operations.GetSourceSalesforceResponse, error) {
+func (s *Sources) GetSourceSalesforce(ctx context.Context, request operations.GetSourceSalesforceRequest) (*operations.GetSourceSalesforceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Salesforce", request, nil)
if err != nil {
@@ -31789,7 +31982,7 @@ func (s *sources) GetSourceSalesforce(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31819,12 +32012,14 @@ func (s *sources) GetSourceSalesforce(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31835,7 +32030,7 @@ func (s *sources) GetSourceSalesforce(ctx context.Context, request operations.Ge
}
// GetSourceSalesloft - Get Source details
-func (s *sources) GetSourceSalesloft(ctx context.Context, request operations.GetSourceSalesloftRequest) (*operations.GetSourceSalesloftResponse, error) {
+func (s *Sources) GetSourceSalesloft(ctx context.Context, request operations.GetSourceSalesloftRequest) (*operations.GetSourceSalesloftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Salesloft", request, nil)
if err != nil {
@@ -31847,7 +32042,7 @@ func (s *sources) GetSourceSalesloft(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31877,12 +32072,14 @@ func (s *sources) GetSourceSalesloft(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31893,7 +32090,7 @@ func (s *sources) GetSourceSalesloft(ctx context.Context, request operations.Get
}
// GetSourceSapFieldglass - Get Source details
-func (s *sources) GetSourceSapFieldglass(ctx context.Context, request operations.GetSourceSapFieldglassRequest) (*operations.GetSourceSapFieldglassResponse, error) {
+func (s *Sources) GetSourceSapFieldglass(ctx context.Context, request operations.GetSourceSapFieldglassRequest) (*operations.GetSourceSapFieldglassResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SapFieldglass", request, nil)
if err != nil {
@@ -31905,7 +32102,7 @@ func (s *sources) GetSourceSapFieldglass(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31935,12 +32132,14 @@ func (s *sources) GetSourceSapFieldglass(ctx context.Context, request operations
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -31951,7 +32150,7 @@ func (s *sources) GetSourceSapFieldglass(ctx context.Context, request operations
}
// GetSourceSecoda - Get Source details
-func (s *sources) GetSourceSecoda(ctx context.Context, request operations.GetSourceSecodaRequest) (*operations.GetSourceSecodaResponse, error) {
+func (s *Sources) GetSourceSecoda(ctx context.Context, request operations.GetSourceSecodaRequest) (*operations.GetSourceSecodaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Secoda", request, nil)
if err != nil {
@@ -31963,7 +32162,7 @@ func (s *sources) GetSourceSecoda(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -31993,12 +32192,14 @@ func (s *sources) GetSourceSecoda(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32009,7 +32210,7 @@ func (s *sources) GetSourceSecoda(ctx context.Context, request operations.GetSou
}
// GetSourceSendgrid - Get Source details
-func (s *sources) GetSourceSendgrid(ctx context.Context, request operations.GetSourceSendgridRequest) (*operations.GetSourceSendgridResponse, error) {
+func (s *Sources) GetSourceSendgrid(ctx context.Context, request operations.GetSourceSendgridRequest) (*operations.GetSourceSendgridResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sendgrid", request, nil)
if err != nil {
@@ -32021,7 +32222,7 @@ func (s *sources) GetSourceSendgrid(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32051,12 +32252,14 @@ func (s *sources) GetSourceSendgrid(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32067,7 +32270,7 @@ func (s *sources) GetSourceSendgrid(ctx context.Context, request operations.GetS
}
// GetSourceSendinblue - Get Source details
-func (s *sources) GetSourceSendinblue(ctx context.Context, request operations.GetSourceSendinblueRequest) (*operations.GetSourceSendinblueResponse, error) {
+func (s *Sources) GetSourceSendinblue(ctx context.Context, request operations.GetSourceSendinblueRequest) (*operations.GetSourceSendinblueResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sendinblue", request, nil)
if err != nil {
@@ -32079,7 +32282,7 @@ func (s *sources) GetSourceSendinblue(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32109,12 +32312,14 @@ func (s *sources) GetSourceSendinblue(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32125,7 +32330,7 @@ func (s *sources) GetSourceSendinblue(ctx context.Context, request operations.Ge
}
// GetSourceSenseforce - Get Source details
-func (s *sources) GetSourceSenseforce(ctx context.Context, request operations.GetSourceSenseforceRequest) (*operations.GetSourceSenseforceResponse, error) {
+func (s *Sources) GetSourceSenseforce(ctx context.Context, request operations.GetSourceSenseforceRequest) (*operations.GetSourceSenseforceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Senseforce", request, nil)
if err != nil {
@@ -32137,7 +32342,7 @@ func (s *sources) GetSourceSenseforce(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32167,12 +32372,14 @@ func (s *sources) GetSourceSenseforce(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32183,7 +32390,7 @@ func (s *sources) GetSourceSenseforce(ctx context.Context, request operations.Ge
}
// GetSourceSentry - Get Source details
-func (s *sources) GetSourceSentry(ctx context.Context, request operations.GetSourceSentryRequest) (*operations.GetSourceSentryResponse, error) {
+func (s *Sources) GetSourceSentry(ctx context.Context, request operations.GetSourceSentryRequest) (*operations.GetSourceSentryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sentry", request, nil)
if err != nil {
@@ -32195,7 +32402,7 @@ func (s *sources) GetSourceSentry(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32225,12 +32432,14 @@ func (s *sources) GetSourceSentry(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32241,7 +32450,7 @@ func (s *sources) GetSourceSentry(ctx context.Context, request operations.GetSou
}
// GetSourceSftp - Get Source details
-func (s *sources) GetSourceSftp(ctx context.Context, request operations.GetSourceSftpRequest) (*operations.GetSourceSftpResponse, error) {
+func (s *Sources) GetSourceSftp(ctx context.Context, request operations.GetSourceSftpRequest) (*operations.GetSourceSftpResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sftp", request, nil)
if err != nil {
@@ -32253,7 +32462,7 @@ func (s *sources) GetSourceSftp(ctx context.Context, request operations.GetSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32283,12 +32492,14 @@ func (s *sources) GetSourceSftp(ctx context.Context, request operations.GetSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32299,7 +32510,7 @@ func (s *sources) GetSourceSftp(ctx context.Context, request operations.GetSourc
}
// GetSourceSftpBulk - Get Source details
-func (s *sources) GetSourceSftpBulk(ctx context.Context, request operations.GetSourceSftpBulkRequest) (*operations.GetSourceSftpBulkResponse, error) {
+func (s *Sources) GetSourceSftpBulk(ctx context.Context, request operations.GetSourceSftpBulkRequest) (*operations.GetSourceSftpBulkResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SftpBulk", request, nil)
if err != nil {
@@ -32311,7 +32522,7 @@ func (s *sources) GetSourceSftpBulk(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32341,12 +32552,14 @@ func (s *sources) GetSourceSftpBulk(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32357,7 +32570,7 @@ func (s *sources) GetSourceSftpBulk(ctx context.Context, request operations.GetS
}
// GetSourceShopify - Get Source details
-func (s *sources) GetSourceShopify(ctx context.Context, request operations.GetSourceShopifyRequest) (*operations.GetSourceShopifyResponse, error) {
+func (s *Sources) GetSourceShopify(ctx context.Context, request operations.GetSourceShopifyRequest) (*operations.GetSourceShopifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Shopify", request, nil)
if err != nil {
@@ -32369,7 +32582,7 @@ func (s *sources) GetSourceShopify(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32399,12 +32612,14 @@ func (s *sources) GetSourceShopify(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32415,7 +32630,7 @@ func (s *sources) GetSourceShopify(ctx context.Context, request operations.GetSo
}
// GetSourceShortio - Get Source details
-func (s *sources) GetSourceShortio(ctx context.Context, request operations.GetSourceShortioRequest) (*operations.GetSourceShortioResponse, error) {
+func (s *Sources) GetSourceShortio(ctx context.Context, request operations.GetSourceShortioRequest) (*operations.GetSourceShortioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Shortio", request, nil)
if err != nil {
@@ -32427,7 +32642,7 @@ func (s *sources) GetSourceShortio(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32457,12 +32672,14 @@ func (s *sources) GetSourceShortio(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32473,7 +32690,7 @@ func (s *sources) GetSourceShortio(ctx context.Context, request operations.GetSo
}
// GetSourceSlack - Get Source details
-func (s *sources) GetSourceSlack(ctx context.Context, request operations.GetSourceSlackRequest) (*operations.GetSourceSlackResponse, error) {
+func (s *Sources) GetSourceSlack(ctx context.Context, request operations.GetSourceSlackRequest) (*operations.GetSourceSlackResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Slack", request, nil)
if err != nil {
@@ -32485,7 +32702,7 @@ func (s *sources) GetSourceSlack(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32515,12 +32732,14 @@ func (s *sources) GetSourceSlack(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32531,7 +32750,7 @@ func (s *sources) GetSourceSlack(ctx context.Context, request operations.GetSour
}
// GetSourceSmaily - Get Source details
-func (s *sources) GetSourceSmaily(ctx context.Context, request operations.GetSourceSmailyRequest) (*operations.GetSourceSmailyResponse, error) {
+func (s *Sources) GetSourceSmaily(ctx context.Context, request operations.GetSourceSmailyRequest) (*operations.GetSourceSmailyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Smaily", request, nil)
if err != nil {
@@ -32543,7 +32762,7 @@ func (s *sources) GetSourceSmaily(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32573,12 +32792,14 @@ func (s *sources) GetSourceSmaily(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32589,7 +32810,7 @@ func (s *sources) GetSourceSmaily(ctx context.Context, request operations.GetSou
}
// GetSourceSmartengage - Get Source details
-func (s *sources) GetSourceSmartengage(ctx context.Context, request operations.GetSourceSmartengageRequest) (*operations.GetSourceSmartengageResponse, error) {
+func (s *Sources) GetSourceSmartengage(ctx context.Context, request operations.GetSourceSmartengageRequest) (*operations.GetSourceSmartengageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Smartengage", request, nil)
if err != nil {
@@ -32601,7 +32822,7 @@ func (s *sources) GetSourceSmartengage(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32631,12 +32852,14 @@ func (s *sources) GetSourceSmartengage(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32647,7 +32870,7 @@ func (s *sources) GetSourceSmartengage(ctx context.Context, request operations.G
}
// GetSourceSmartsheets - Get Source details
-func (s *sources) GetSourceSmartsheets(ctx context.Context, request operations.GetSourceSmartsheetsRequest) (*operations.GetSourceSmartsheetsResponse, error) {
+func (s *Sources) GetSourceSmartsheets(ctx context.Context, request operations.GetSourceSmartsheetsRequest) (*operations.GetSourceSmartsheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Smartsheets", request, nil)
if err != nil {
@@ -32659,7 +32882,7 @@ func (s *sources) GetSourceSmartsheets(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32689,12 +32912,14 @@ func (s *sources) GetSourceSmartsheets(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32705,7 +32930,7 @@ func (s *sources) GetSourceSmartsheets(ctx context.Context, request operations.G
}
// GetSourceSnapchatMarketing - Get Source details
-func (s *sources) GetSourceSnapchatMarketing(ctx context.Context, request operations.GetSourceSnapchatMarketingRequest) (*operations.GetSourceSnapchatMarketingResponse, error) {
+func (s *Sources) GetSourceSnapchatMarketing(ctx context.Context, request operations.GetSourceSnapchatMarketingRequest) (*operations.GetSourceSnapchatMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SnapchatMarketing", request, nil)
if err != nil {
@@ -32717,7 +32942,7 @@ func (s *sources) GetSourceSnapchatMarketing(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32747,12 +32972,14 @@ func (s *sources) GetSourceSnapchatMarketing(ctx context.Context, request operat
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32763,7 +32990,7 @@ func (s *sources) GetSourceSnapchatMarketing(ctx context.Context, request operat
}
// GetSourceSnowflake - Get Source details
-func (s *sources) GetSourceSnowflake(ctx context.Context, request operations.GetSourceSnowflakeRequest) (*operations.GetSourceSnowflakeResponse, error) {
+func (s *Sources) GetSourceSnowflake(ctx context.Context, request operations.GetSourceSnowflakeRequest) (*operations.GetSourceSnowflakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Snowflake", request, nil)
if err != nil {
@@ -32775,7 +33002,7 @@ func (s *sources) GetSourceSnowflake(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32805,12 +33032,14 @@ func (s *sources) GetSourceSnowflake(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32821,7 +33050,7 @@ func (s *sources) GetSourceSnowflake(ctx context.Context, request operations.Get
}
// GetSourceSonarCloud - Get Source details
-func (s *sources) GetSourceSonarCloud(ctx context.Context, request operations.GetSourceSonarCloudRequest) (*operations.GetSourceSonarCloudResponse, error) {
+func (s *Sources) GetSourceSonarCloud(ctx context.Context, request operations.GetSourceSonarCloudRequest) (*operations.GetSourceSonarCloudResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SonarCloud", request, nil)
if err != nil {
@@ -32833,7 +33062,7 @@ func (s *sources) GetSourceSonarCloud(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32863,12 +33092,14 @@ func (s *sources) GetSourceSonarCloud(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32879,7 +33110,7 @@ func (s *sources) GetSourceSonarCloud(ctx context.Context, request operations.Ge
}
// GetSourceSpacexAPI - Get Source details
-func (s *sources) GetSourceSpacexAPI(ctx context.Context, request operations.GetSourceSpacexAPIRequest) (*operations.GetSourceSpacexAPIResponse, error) {
+func (s *Sources) GetSourceSpacexAPI(ctx context.Context, request operations.GetSourceSpacexAPIRequest) (*operations.GetSourceSpacexAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SpacexApi", request, nil)
if err != nil {
@@ -32891,7 +33122,7 @@ func (s *sources) GetSourceSpacexAPI(ctx context.Context, request operations.Get
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32921,12 +33152,14 @@ func (s *sources) GetSourceSpacexAPI(ctx context.Context, request operations.Get
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32937,7 +33170,7 @@ func (s *sources) GetSourceSpacexAPI(ctx context.Context, request operations.Get
}
// GetSourceSquare - Get Source details
-func (s *sources) GetSourceSquare(ctx context.Context, request operations.GetSourceSquareRequest) (*operations.GetSourceSquareResponse, error) {
+func (s *Sources) GetSourceSquare(ctx context.Context, request operations.GetSourceSquareRequest) (*operations.GetSourceSquareResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Square", request, nil)
if err != nil {
@@ -32949,7 +33182,7 @@ func (s *sources) GetSourceSquare(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -32979,12 +33212,14 @@ func (s *sources) GetSourceSquare(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -32995,7 +33230,7 @@ func (s *sources) GetSourceSquare(ctx context.Context, request operations.GetSou
}
// GetSourceStrava - Get Source details
-func (s *sources) GetSourceStrava(ctx context.Context, request operations.GetSourceStravaRequest) (*operations.GetSourceStravaResponse, error) {
+func (s *Sources) GetSourceStrava(ctx context.Context, request operations.GetSourceStravaRequest) (*operations.GetSourceStravaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Strava", request, nil)
if err != nil {
@@ -33007,7 +33242,7 @@ func (s *sources) GetSourceStrava(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33037,12 +33272,14 @@ func (s *sources) GetSourceStrava(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33053,7 +33290,7 @@ func (s *sources) GetSourceStrava(ctx context.Context, request operations.GetSou
}
// GetSourceStripe - Get Source details
-func (s *sources) GetSourceStripe(ctx context.Context, request operations.GetSourceStripeRequest) (*operations.GetSourceStripeResponse, error) {
+func (s *Sources) GetSourceStripe(ctx context.Context, request operations.GetSourceStripeRequest) (*operations.GetSourceStripeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Stripe", request, nil)
if err != nil {
@@ -33065,7 +33302,7 @@ func (s *sources) GetSourceStripe(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33095,12 +33332,14 @@ func (s *sources) GetSourceStripe(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33111,7 +33350,7 @@ func (s *sources) GetSourceStripe(ctx context.Context, request operations.GetSou
}
// GetSourceSurveySparrow - Get Source details
-func (s *sources) GetSourceSurveySparrow(ctx context.Context, request operations.GetSourceSurveySparrowRequest) (*operations.GetSourceSurveySparrowResponse, error) {
+func (s *Sources) GetSourceSurveySparrow(ctx context.Context, request operations.GetSourceSurveySparrowRequest) (*operations.GetSourceSurveySparrowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SurveySparrow", request, nil)
if err != nil {
@@ -33123,7 +33362,7 @@ func (s *sources) GetSourceSurveySparrow(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33153,12 +33392,14 @@ func (s *sources) GetSourceSurveySparrow(ctx context.Context, request operations
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33169,7 +33410,7 @@ func (s *sources) GetSourceSurveySparrow(ctx context.Context, request operations
}
// GetSourceSurveymonkey - Get Source details
-func (s *sources) GetSourceSurveymonkey(ctx context.Context, request operations.GetSourceSurveymonkeyRequest) (*operations.GetSourceSurveymonkeyResponse, error) {
+func (s *Sources) GetSourceSurveymonkey(ctx context.Context, request operations.GetSourceSurveymonkeyRequest) (*operations.GetSourceSurveymonkeyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Surveymonkey", request, nil)
if err != nil {
@@ -33181,7 +33422,7 @@ func (s *sources) GetSourceSurveymonkey(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33211,12 +33452,14 @@ func (s *sources) GetSourceSurveymonkey(ctx context.Context, request operations.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33227,7 +33470,7 @@ func (s *sources) GetSourceSurveymonkey(ctx context.Context, request operations.
}
// GetSourceTempo - Get Source details
-func (s *sources) GetSourceTempo(ctx context.Context, request operations.GetSourceTempoRequest) (*operations.GetSourceTempoResponse, error) {
+func (s *Sources) GetSourceTempo(ctx context.Context, request operations.GetSourceTempoRequest) (*operations.GetSourceTempoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Tempo", request, nil)
if err != nil {
@@ -33239,7 +33482,7 @@ func (s *sources) GetSourceTempo(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33269,12 +33512,14 @@ func (s *sources) GetSourceTempo(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33285,7 +33530,7 @@ func (s *sources) GetSourceTempo(ctx context.Context, request operations.GetSour
}
// GetSourceTheGuardianAPI - Get Source details
-func (s *sources) GetSourceTheGuardianAPI(ctx context.Context, request operations.GetSourceTheGuardianAPIRequest) (*operations.GetSourceTheGuardianAPIResponse, error) {
+func (s *Sources) GetSourceTheGuardianAPI(ctx context.Context, request operations.GetSourceTheGuardianAPIRequest) (*operations.GetSourceTheGuardianAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TheGuardianApi", request, nil)
if err != nil {
@@ -33297,7 +33542,7 @@ func (s *sources) GetSourceTheGuardianAPI(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33327,12 +33572,14 @@ func (s *sources) GetSourceTheGuardianAPI(ctx context.Context, request operation
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33343,7 +33590,7 @@ func (s *sources) GetSourceTheGuardianAPI(ctx context.Context, request operation
}
// GetSourceTiktokMarketing - Get Source details
-func (s *sources) GetSourceTiktokMarketing(ctx context.Context, request operations.GetSourceTiktokMarketingRequest) (*operations.GetSourceTiktokMarketingResponse, error) {
+func (s *Sources) GetSourceTiktokMarketing(ctx context.Context, request operations.GetSourceTiktokMarketingRequest) (*operations.GetSourceTiktokMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TiktokMarketing", request, nil)
if err != nil {
@@ -33355,7 +33602,7 @@ func (s *sources) GetSourceTiktokMarketing(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33385,12 +33632,14 @@ func (s *sources) GetSourceTiktokMarketing(ctx context.Context, request operatio
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33401,7 +33650,7 @@ func (s *sources) GetSourceTiktokMarketing(ctx context.Context, request operatio
}
// GetSourceTodoist - Get Source details
-func (s *sources) GetSourceTodoist(ctx context.Context, request operations.GetSourceTodoistRequest) (*operations.GetSourceTodoistResponse, error) {
+func (s *Sources) GetSourceTodoist(ctx context.Context, request operations.GetSourceTodoistRequest) (*operations.GetSourceTodoistResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Todoist", request, nil)
if err != nil {
@@ -33413,7 +33662,7 @@ func (s *sources) GetSourceTodoist(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33443,12 +33692,14 @@ func (s *sources) GetSourceTodoist(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33459,7 +33710,7 @@ func (s *sources) GetSourceTodoist(ctx context.Context, request operations.GetSo
}
// GetSourceTrello - Get Source details
-func (s *sources) GetSourceTrello(ctx context.Context, request operations.GetSourceTrelloRequest) (*operations.GetSourceTrelloResponse, error) {
+func (s *Sources) GetSourceTrello(ctx context.Context, request operations.GetSourceTrelloRequest) (*operations.GetSourceTrelloResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Trello", request, nil)
if err != nil {
@@ -33471,7 +33722,7 @@ func (s *sources) GetSourceTrello(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33501,12 +33752,14 @@ func (s *sources) GetSourceTrello(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33517,7 +33770,7 @@ func (s *sources) GetSourceTrello(ctx context.Context, request operations.GetSou
}
// GetSourceTrustpilot - Get Source details
-func (s *sources) GetSourceTrustpilot(ctx context.Context, request operations.GetSourceTrustpilotRequest) (*operations.GetSourceTrustpilotResponse, error) {
+func (s *Sources) GetSourceTrustpilot(ctx context.Context, request operations.GetSourceTrustpilotRequest) (*operations.GetSourceTrustpilotResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Trustpilot", request, nil)
if err != nil {
@@ -33529,7 +33782,7 @@ func (s *sources) GetSourceTrustpilot(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33559,12 +33812,14 @@ func (s *sources) GetSourceTrustpilot(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33575,7 +33830,7 @@ func (s *sources) GetSourceTrustpilot(ctx context.Context, request operations.Ge
}
// GetSourceTvmazeSchedule - Get Source details
-func (s *sources) GetSourceTvmazeSchedule(ctx context.Context, request operations.GetSourceTvmazeScheduleRequest) (*operations.GetSourceTvmazeScheduleResponse, error) {
+func (s *Sources) GetSourceTvmazeSchedule(ctx context.Context, request operations.GetSourceTvmazeScheduleRequest) (*operations.GetSourceTvmazeScheduleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TvmazeSchedule", request, nil)
if err != nil {
@@ -33587,7 +33842,7 @@ func (s *sources) GetSourceTvmazeSchedule(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33617,12 +33872,14 @@ func (s *sources) GetSourceTvmazeSchedule(ctx context.Context, request operation
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33633,7 +33890,7 @@ func (s *sources) GetSourceTvmazeSchedule(ctx context.Context, request operation
}
// GetSourceTwilio - Get Source details
-func (s *sources) GetSourceTwilio(ctx context.Context, request operations.GetSourceTwilioRequest) (*operations.GetSourceTwilioResponse, error) {
+func (s *Sources) GetSourceTwilio(ctx context.Context, request operations.GetSourceTwilioRequest) (*operations.GetSourceTwilioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Twilio", request, nil)
if err != nil {
@@ -33645,7 +33902,7 @@ func (s *sources) GetSourceTwilio(ctx context.Context, request operations.GetSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33675,12 +33932,14 @@ func (s *sources) GetSourceTwilio(ctx context.Context, request operations.GetSou
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33691,7 +33950,7 @@ func (s *sources) GetSourceTwilio(ctx context.Context, request operations.GetSou
}
// GetSourceTwilioTaskrouter - Get Source details
-func (s *sources) GetSourceTwilioTaskrouter(ctx context.Context, request operations.GetSourceTwilioTaskrouterRequest) (*operations.GetSourceTwilioTaskrouterResponse, error) {
+func (s *Sources) GetSourceTwilioTaskrouter(ctx context.Context, request operations.GetSourceTwilioTaskrouterRequest) (*operations.GetSourceTwilioTaskrouterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TwilioTaskrouter", request, nil)
if err != nil {
@@ -33703,7 +33962,7 @@ func (s *sources) GetSourceTwilioTaskrouter(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33733,12 +33992,14 @@ func (s *sources) GetSourceTwilioTaskrouter(ctx context.Context, request operati
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33749,7 +34010,7 @@ func (s *sources) GetSourceTwilioTaskrouter(ctx context.Context, request operati
}
// GetSourceTwitter - Get Source details
-func (s *sources) GetSourceTwitter(ctx context.Context, request operations.GetSourceTwitterRequest) (*operations.GetSourceTwitterResponse, error) {
+func (s *Sources) GetSourceTwitter(ctx context.Context, request operations.GetSourceTwitterRequest) (*operations.GetSourceTwitterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Twitter", request, nil)
if err != nil {
@@ -33761,7 +34022,7 @@ func (s *sources) GetSourceTwitter(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33791,12 +34052,14 @@ func (s *sources) GetSourceTwitter(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33807,7 +34070,7 @@ func (s *sources) GetSourceTwitter(ctx context.Context, request operations.GetSo
}
// GetSourceTypeform - Get Source details
-func (s *sources) GetSourceTypeform(ctx context.Context, request operations.GetSourceTypeformRequest) (*operations.GetSourceTypeformResponse, error) {
+func (s *Sources) GetSourceTypeform(ctx context.Context, request operations.GetSourceTypeformRequest) (*operations.GetSourceTypeformResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Typeform", request, nil)
if err != nil {
@@ -33819,7 +34082,7 @@ func (s *sources) GetSourceTypeform(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33849,12 +34112,14 @@ func (s *sources) GetSourceTypeform(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33865,7 +34130,7 @@ func (s *sources) GetSourceTypeform(ctx context.Context, request operations.GetS
}
// GetSourceUsCensus - Get Source details
-func (s *sources) GetSourceUsCensus(ctx context.Context, request operations.GetSourceUsCensusRequest) (*operations.GetSourceUsCensusResponse, error) {
+func (s *Sources) GetSourceUsCensus(ctx context.Context, request operations.GetSourceUsCensusRequest) (*operations.GetSourceUsCensusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#UsCensus", request, nil)
if err != nil {
@@ -33877,7 +34142,7 @@ func (s *sources) GetSourceUsCensus(ctx context.Context, request operations.GetS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33907,12 +34172,14 @@ func (s *sources) GetSourceUsCensus(ctx context.Context, request operations.GetS
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33923,7 +34190,7 @@ func (s *sources) GetSourceUsCensus(ctx context.Context, request operations.GetS
}
// GetSourceVantage - Get Source details
-func (s *sources) GetSourceVantage(ctx context.Context, request operations.GetSourceVantageRequest) (*operations.GetSourceVantageResponse, error) {
+func (s *Sources) GetSourceVantage(ctx context.Context, request operations.GetSourceVantageRequest) (*operations.GetSourceVantageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Vantage", request, nil)
if err != nil {
@@ -33935,7 +34202,7 @@ func (s *sources) GetSourceVantage(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -33965,12 +34232,14 @@ func (s *sources) GetSourceVantage(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -33981,7 +34250,7 @@ func (s *sources) GetSourceVantage(ctx context.Context, request operations.GetSo
}
// GetSourceWebflow - Get Source details
-func (s *sources) GetSourceWebflow(ctx context.Context, request operations.GetSourceWebflowRequest) (*operations.GetSourceWebflowResponse, error) {
+func (s *Sources) GetSourceWebflow(ctx context.Context, request operations.GetSourceWebflowRequest) (*operations.GetSourceWebflowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Webflow", request, nil)
if err != nil {
@@ -33993,7 +34262,7 @@ func (s *sources) GetSourceWebflow(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34023,12 +34292,14 @@ func (s *sources) GetSourceWebflow(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34039,7 +34310,7 @@ func (s *sources) GetSourceWebflow(ctx context.Context, request operations.GetSo
}
// GetSourceWhiskyHunter - Get Source details
-func (s *sources) GetSourceWhiskyHunter(ctx context.Context, request operations.GetSourceWhiskyHunterRequest) (*operations.GetSourceWhiskyHunterResponse, error) {
+func (s *Sources) GetSourceWhiskyHunter(ctx context.Context, request operations.GetSourceWhiskyHunterRequest) (*operations.GetSourceWhiskyHunterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#WhiskyHunter", request, nil)
if err != nil {
@@ -34051,7 +34322,7 @@ func (s *sources) GetSourceWhiskyHunter(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34081,12 +34352,14 @@ func (s *sources) GetSourceWhiskyHunter(ctx context.Context, request operations.
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34097,7 +34370,7 @@ func (s *sources) GetSourceWhiskyHunter(ctx context.Context, request operations.
}
// GetSourceWikipediaPageviews - Get Source details
-func (s *sources) GetSourceWikipediaPageviews(ctx context.Context, request operations.GetSourceWikipediaPageviewsRequest) (*operations.GetSourceWikipediaPageviewsResponse, error) {
+func (s *Sources) GetSourceWikipediaPageviews(ctx context.Context, request operations.GetSourceWikipediaPageviewsRequest) (*operations.GetSourceWikipediaPageviewsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#WikipediaPageviews", request, nil)
if err != nil {
@@ -34109,7 +34382,7 @@ func (s *sources) GetSourceWikipediaPageviews(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34139,12 +34412,14 @@ func (s *sources) GetSourceWikipediaPageviews(ctx context.Context, request opera
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34155,7 +34430,7 @@ func (s *sources) GetSourceWikipediaPageviews(ctx context.Context, request opera
}
// GetSourceWoocommerce - Get Source details
-func (s *sources) GetSourceWoocommerce(ctx context.Context, request operations.GetSourceWoocommerceRequest) (*operations.GetSourceWoocommerceResponse, error) {
+func (s *Sources) GetSourceWoocommerce(ctx context.Context, request operations.GetSourceWoocommerceRequest) (*operations.GetSourceWoocommerceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Woocommerce", request, nil)
if err != nil {
@@ -34167,7 +34442,7 @@ func (s *sources) GetSourceWoocommerce(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34197,70 +34472,14 @@ func (s *sources) GetSourceWoocommerce(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
- }
-
- res.SourceResponse = out
- }
- case httpRes.StatusCode == 403:
- fallthrough
- case httpRes.StatusCode == 404:
- }
-
- return res, nil
-}
-
-// GetSourceXero - Get Source details
-func (s *sources) GetSourceXero(ctx context.Context, request operations.GetSourceXeroRequest) (*operations.GetSourceXeroResponse, error) {
- baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Xero", request, nil)
- if err != nil {
- return nil, fmt.Errorf("error generating URL: %w", err)
- }
-
- req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
- if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
- }
- req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
-
- client := s.sdkConfiguration.SecurityClient
-
- httpRes, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("error sending request: %w", err)
- }
- if httpRes == nil {
- return nil, fmt.Errorf("error sending request: no response")
- }
-
- rawBody, err := io.ReadAll(httpRes.Body)
- if err != nil {
- return nil, fmt.Errorf("error reading response body: %w", err)
- }
- httpRes.Body.Close()
- httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
-
- contentType := httpRes.Header.Get("Content-Type")
-
- res := &operations.GetSourceXeroResponse{
- StatusCode: httpRes.StatusCode,
- ContentType: contentType,
- RawResponse: httpRes,
- }
- switch {
- case httpRes.StatusCode == 200:
- switch {
- case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34271,7 +34490,7 @@ func (s *sources) GetSourceXero(ctx context.Context, request operations.GetSourc
}
// GetSourceXkcd - Get Source details
-func (s *sources) GetSourceXkcd(ctx context.Context, request operations.GetSourceXkcdRequest) (*operations.GetSourceXkcdResponse, error) {
+func (s *Sources) GetSourceXkcd(ctx context.Context, request operations.GetSourceXkcdRequest) (*operations.GetSourceXkcdResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Xkcd", request, nil)
if err != nil {
@@ -34283,7 +34502,7 @@ func (s *sources) GetSourceXkcd(ctx context.Context, request operations.GetSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34313,12 +34532,14 @@ func (s *sources) GetSourceXkcd(ctx context.Context, request operations.GetSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34329,7 +34550,7 @@ func (s *sources) GetSourceXkcd(ctx context.Context, request operations.GetSourc
}
// GetSourceYandexMetrica - Get Source details
-func (s *sources) GetSourceYandexMetrica(ctx context.Context, request operations.GetSourceYandexMetricaRequest) (*operations.GetSourceYandexMetricaResponse, error) {
+func (s *Sources) GetSourceYandexMetrica(ctx context.Context, request operations.GetSourceYandexMetricaRequest) (*operations.GetSourceYandexMetricaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YandexMetrica", request, nil)
if err != nil {
@@ -34341,7 +34562,7 @@ func (s *sources) GetSourceYandexMetrica(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34371,12 +34592,14 @@ func (s *sources) GetSourceYandexMetrica(ctx context.Context, request operations
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34387,7 +34610,7 @@ func (s *sources) GetSourceYandexMetrica(ctx context.Context, request operations
}
// GetSourceYotpo - Get Source details
-func (s *sources) GetSourceYotpo(ctx context.Context, request operations.GetSourceYotpoRequest) (*operations.GetSourceYotpoResponse, error) {
+func (s *Sources) GetSourceYotpo(ctx context.Context, request operations.GetSourceYotpoRequest) (*operations.GetSourceYotpoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Yotpo", request, nil)
if err != nil {
@@ -34399,7 +34622,7 @@ func (s *sources) GetSourceYotpo(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34429,12 +34652,14 @@ func (s *sources) GetSourceYotpo(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34444,10 +34669,10 @@ func (s *sources) GetSourceYotpo(ctx context.Context, request operations.GetSour
return res, nil
}
-// GetSourceYounium - Get Source details
-func (s *sources) GetSourceYounium(ctx context.Context, request operations.GetSourceYouniumRequest) (*operations.GetSourceYouniumResponse, error) {
+// GetSourceYoutubeAnalytics - Get Source details
+func (s *Sources) GetSourceYoutubeAnalytics(ctx context.Context, request operations.GetSourceYoutubeAnalyticsRequest) (*operations.GetSourceYoutubeAnalyticsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Younium", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YoutubeAnalytics", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -34457,7 +34682,7 @@ func (s *sources) GetSourceYounium(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34478,7 +34703,7 @@ func (s *sources) GetSourceYounium(ctx context.Context, request operations.GetSo
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceYouniumResponse{
+ res := &operations.GetSourceYoutubeAnalyticsResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -34487,12 +34712,14 @@ func (s *sources) GetSourceYounium(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34502,10 +34729,10 @@ func (s *sources) GetSourceYounium(ctx context.Context, request operations.GetSo
return res, nil
}
-// GetSourceYoutubeAnalytics - Get Source details
-func (s *sources) GetSourceYoutubeAnalytics(ctx context.Context, request operations.GetSourceYoutubeAnalyticsRequest) (*operations.GetSourceYoutubeAnalyticsResponse, error) {
+// GetSourceZendeskChat - Get Source details
+func (s *Sources) GetSourceZendeskChat(ctx context.Context, request operations.GetSourceZendeskChatRequest) (*operations.GetSourceZendeskChatResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YoutubeAnalytics", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskChat", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -34515,7 +34742,7 @@ func (s *sources) GetSourceYoutubeAnalytics(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34536,7 +34763,7 @@ func (s *sources) GetSourceYoutubeAnalytics(ctx context.Context, request operati
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceYoutubeAnalyticsResponse{
+ res := &operations.GetSourceZendeskChatResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -34545,12 +34772,14 @@ func (s *sources) GetSourceYoutubeAnalytics(ctx context.Context, request operati
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34560,10 +34789,10 @@ func (s *sources) GetSourceYoutubeAnalytics(ctx context.Context, request operati
return res, nil
}
-// GetSourceZendeskChat - Get Source details
-func (s *sources) GetSourceZendeskChat(ctx context.Context, request operations.GetSourceZendeskChatRequest) (*operations.GetSourceZendeskChatResponse, error) {
+// GetSourceZendeskSell - Get Source details
+func (s *Sources) GetSourceZendeskSell(ctx context.Context, request operations.GetSourceZendeskSellRequest) (*operations.GetSourceZendeskSellResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskChat", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSell", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
@@ -34573,7 +34802,7 @@ func (s *sources) GetSourceZendeskChat(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34594,7 +34823,7 @@ func (s *sources) GetSourceZendeskChat(ctx context.Context, request operations.G
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.GetSourceZendeskChatResponse{
+ res := &operations.GetSourceZendeskSellResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -34603,12 +34832,14 @@ func (s *sources) GetSourceZendeskChat(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34619,7 +34850,7 @@ func (s *sources) GetSourceZendeskChat(ctx context.Context, request operations.G
}
// GetSourceZendeskSunshine - Get Source details
-func (s *sources) GetSourceZendeskSunshine(ctx context.Context, request operations.GetSourceZendeskSunshineRequest) (*operations.GetSourceZendeskSunshineResponse, error) {
+func (s *Sources) GetSourceZendeskSunshine(ctx context.Context, request operations.GetSourceZendeskSunshineRequest) (*operations.GetSourceZendeskSunshineResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSunshine", request, nil)
if err != nil {
@@ -34631,7 +34862,7 @@ func (s *sources) GetSourceZendeskSunshine(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34661,12 +34892,14 @@ func (s *sources) GetSourceZendeskSunshine(ctx context.Context, request operatio
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34677,7 +34910,7 @@ func (s *sources) GetSourceZendeskSunshine(ctx context.Context, request operatio
}
// GetSourceZendeskSupport - Get Source details
-func (s *sources) GetSourceZendeskSupport(ctx context.Context, request operations.GetSourceZendeskSupportRequest) (*operations.GetSourceZendeskSupportResponse, error) {
+func (s *Sources) GetSourceZendeskSupport(ctx context.Context, request operations.GetSourceZendeskSupportRequest) (*operations.GetSourceZendeskSupportResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSupport", request, nil)
if err != nil {
@@ -34689,7 +34922,7 @@ func (s *sources) GetSourceZendeskSupport(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34719,12 +34952,14 @@ func (s *sources) GetSourceZendeskSupport(ctx context.Context, request operation
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34735,7 +34970,7 @@ func (s *sources) GetSourceZendeskSupport(ctx context.Context, request operation
}
// GetSourceZendeskTalk - Get Source details
-func (s *sources) GetSourceZendeskTalk(ctx context.Context, request operations.GetSourceZendeskTalkRequest) (*operations.GetSourceZendeskTalkResponse, error) {
+func (s *Sources) GetSourceZendeskTalk(ctx context.Context, request operations.GetSourceZendeskTalkRequest) (*operations.GetSourceZendeskTalkResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskTalk", request, nil)
if err != nil {
@@ -34747,7 +34982,7 @@ func (s *sources) GetSourceZendeskTalk(ctx context.Context, request operations.G
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34777,12 +35012,14 @@ func (s *sources) GetSourceZendeskTalk(ctx context.Context, request operations.G
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34793,7 +35030,7 @@ func (s *sources) GetSourceZendeskTalk(ctx context.Context, request operations.G
}
// GetSourceZenloop - Get Source details
-func (s *sources) GetSourceZenloop(ctx context.Context, request operations.GetSourceZenloopRequest) (*operations.GetSourceZenloopResponse, error) {
+func (s *Sources) GetSourceZenloop(ctx context.Context, request operations.GetSourceZenloopRequest) (*operations.GetSourceZenloopResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Zenloop", request, nil)
if err != nil {
@@ -34805,7 +35042,7 @@ func (s *sources) GetSourceZenloop(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34835,12 +35072,14 @@ func (s *sources) GetSourceZenloop(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34851,7 +35090,7 @@ func (s *sources) GetSourceZenloop(ctx context.Context, request operations.GetSo
}
// GetSourceZohoCrm - Get Source details
-func (s *sources) GetSourceZohoCrm(ctx context.Context, request operations.GetSourceZohoCrmRequest) (*operations.GetSourceZohoCrmResponse, error) {
+func (s *Sources) GetSourceZohoCrm(ctx context.Context, request operations.GetSourceZohoCrmRequest) (*operations.GetSourceZohoCrmResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZohoCrm", request, nil)
if err != nil {
@@ -34863,7 +35102,7 @@ func (s *sources) GetSourceZohoCrm(ctx context.Context, request operations.GetSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34893,12 +35132,14 @@ func (s *sources) GetSourceZohoCrm(ctx context.Context, request operations.GetSo
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34909,7 +35150,7 @@ func (s *sources) GetSourceZohoCrm(ctx context.Context, request operations.GetSo
}
// GetSourceZoom - Get Source details
-func (s *sources) GetSourceZoom(ctx context.Context, request operations.GetSourceZoomRequest) (*operations.GetSourceZoomResponse, error) {
+func (s *Sources) GetSourceZoom(ctx context.Context, request operations.GetSourceZoomRequest) (*operations.GetSourceZoomResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Zoom", request, nil)
if err != nil {
@@ -34921,7 +35162,7 @@ func (s *sources) GetSourceZoom(ctx context.Context, request operations.GetSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -34951,12 +35192,14 @@ func (s *sources) GetSourceZoom(ctx context.Context, request operations.GetSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -34967,7 +35210,7 @@ func (s *sources) GetSourceZoom(ctx context.Context, request operations.GetSourc
}
// GetSourceZuora - Get Source details
-func (s *sources) GetSourceZuora(ctx context.Context, request operations.GetSourceZuoraRequest) (*operations.GetSourceZuoraResponse, error) {
+func (s *Sources) GetSourceZuora(ctx context.Context, request operations.GetSourceZuoraRequest) (*operations.GetSourceZuoraResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Zuora", request, nil)
if err != nil {
@@ -34979,7 +35222,7 @@ func (s *sources) GetSourceZuora(ctx context.Context, request operations.GetSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -35009,12 +35252,14 @@ func (s *sources) GetSourceZuora(ctx context.Context, request operations.GetSour
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -35030,11 +35275,11 @@ func (s *sources) GetSourceZuora(ctx context.Context, request operations.GetSour
// This returns a fully formed URL for performing user authentication against the relevant source identity provider (IdP). Once authentication has been completed, the IdP will redirect to an Airbyte endpoint which will save the access and refresh tokens off as a secret and return the secret ID to the redirect URL specified in the `secret_id` query string parameter.
//
// That secret ID can be used to create a source with credentials in place of actual tokens.
-func (s *sources) InitiateOAuth(ctx context.Context, request shared.InitiateOauthRequest) (*operations.InitiateOAuthResponse, error) {
+func (s *Sources) InitiateOAuth(ctx context.Context, request shared.InitiateOauthRequest) (*operations.InitiateOAuthResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources/initiateOAuth"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, false, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
@@ -35050,7 +35295,7 @@ func (s *sources) InitiateOAuth(ctx context.Context, request shared.InitiateOaut
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35091,7 +35336,7 @@ func (s *sources) InitiateOAuth(ctx context.Context, request shared.InitiateOaut
}
// ListSources - List sources
-func (s *sources) ListSources(ctx context.Context, request operations.ListSourcesRequest) (*operations.ListSourcesResponse, error) {
+func (s *Sources) ListSources(ctx context.Context, request operations.ListSourcesRequest) (*operations.ListSourcesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/sources"
@@ -35100,7 +35345,7 @@ func (s *sources) ListSources(ctx context.Context, request operations.ListSource
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
if err := utils.PopulateQueryParams(ctx, req, request, nil); err != nil {
return nil, fmt.Errorf("error populating query params: %w", err)
@@ -35134,12 +35379,14 @@ func (s *sources) ListSources(ctx context.Context, request operations.ListSource
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourcesResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourcesResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourcesResponse = out
+ res.SourcesResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -35150,18 +35397,17 @@ func (s *sources) ListSources(ctx context.Context, request operations.ListSource
}
// PatchSource - Update a Source
-func (s *sources) PatchSource(ctx context.Context, request operations.PatchSourceRequest) (*operations.PatchSourceResponse, error) {
+func (s *Sources) PatchSource(ctx context.Context, request operations.PatchSourceRequest) (*operations.PatchSourceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePatchRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePatchRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35170,7 +35416,7 @@ func (s *sources) PatchSource(ctx context.Context, request operations.PatchSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35203,12 +35449,14 @@ func (s *sources) PatchSource(ctx context.Context, request operations.PatchSourc
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -35219,18 +35467,17 @@ func (s *sources) PatchSource(ctx context.Context, request operations.PatchSourc
}
// PutSource - Update a Source and fully overwrite it
-func (s *sources) PutSource(ctx context.Context, request operations.PutSourceRequest) (*operations.PutSourceResponse, error) {
+func (s *Sources) PutSource(ctx context.Context, request operations.PutSourceRequest) (*operations.PutSourceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35239,7 +35486,7 @@ func (s *sources) PutSource(ctx context.Context, request operations.PutSourceReq
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35272,12 +35519,14 @@ func (s *sources) PutSource(ctx context.Context, request operations.PutSourceReq
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.SourceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.SourceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.SourceResponse = out
+ res.SourceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -35288,18 +35537,17 @@ func (s *sources) PutSource(ctx context.Context, request operations.PutSourceReq
}
// PutSourceAha - Update a Source fully
-func (s *sources) PutSourceAha(ctx context.Context, request operations.PutSourceAhaRequest) (*operations.PutSourceAhaResponse, error) {
+func (s *Sources) PutSourceAha(ctx context.Context, request operations.PutSourceAhaRequest) (*operations.PutSourceAhaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Aha", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAhaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAhaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35308,7 +35556,7 @@ func (s *sources) PutSourceAha(ctx context.Context, request operations.PutSource
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35349,18 +35597,17 @@ func (s *sources) PutSourceAha(ctx context.Context, request operations.PutSource
}
// PutSourceAircall - Update a Source fully
-func (s *sources) PutSourceAircall(ctx context.Context, request operations.PutSourceAircallRequest) (*operations.PutSourceAircallResponse, error) {
+func (s *Sources) PutSourceAircall(ctx context.Context, request operations.PutSourceAircallRequest) (*operations.PutSourceAircallResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Aircall", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAircallPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAircallPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35369,7 +35616,7 @@ func (s *sources) PutSourceAircall(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35410,18 +35657,17 @@ func (s *sources) PutSourceAircall(ctx context.Context, request operations.PutSo
}
// PutSourceAirtable - Update a Source fully
-func (s *sources) PutSourceAirtable(ctx context.Context, request operations.PutSourceAirtableRequest) (*operations.PutSourceAirtableResponse, error) {
+func (s *Sources) PutSourceAirtable(ctx context.Context, request operations.PutSourceAirtableRequest) (*operations.PutSourceAirtableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Airtable", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAirtablePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAirtablePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35430,7 +35676,7 @@ func (s *sources) PutSourceAirtable(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35471,18 +35717,17 @@ func (s *sources) PutSourceAirtable(ctx context.Context, request operations.PutS
}
// PutSourceAlloydb - Update a Source fully
-func (s *sources) PutSourceAlloydb(ctx context.Context, request operations.PutSourceAlloydbRequest) (*operations.PutSourceAlloydbResponse, error) {
+func (s *Sources) PutSourceAlloydb(ctx context.Context, request operations.PutSourceAlloydbRequest) (*operations.PutSourceAlloydbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Alloydb", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAlloydbPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAlloydbPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35491,7 +35736,7 @@ func (s *sources) PutSourceAlloydb(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35532,18 +35777,17 @@ func (s *sources) PutSourceAlloydb(ctx context.Context, request operations.PutSo
}
// PutSourceAmazonAds - Update a Source fully
-func (s *sources) PutSourceAmazonAds(ctx context.Context, request operations.PutSourceAmazonAdsRequest) (*operations.PutSourceAmazonAdsResponse, error) {
+func (s *Sources) PutSourceAmazonAds(ctx context.Context, request operations.PutSourceAmazonAdsRequest) (*operations.PutSourceAmazonAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AmazonAds", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAmazonAdsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAmazonAdsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35552,7 +35796,7 @@ func (s *sources) PutSourceAmazonAds(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35593,18 +35837,17 @@ func (s *sources) PutSourceAmazonAds(ctx context.Context, request operations.Put
}
// PutSourceAmazonSellerPartner - Update a Source fully
-func (s *sources) PutSourceAmazonSellerPartner(ctx context.Context, request operations.PutSourceAmazonSellerPartnerRequest) (*operations.PutSourceAmazonSellerPartnerResponse, error) {
+func (s *Sources) PutSourceAmazonSellerPartner(ctx context.Context, request operations.PutSourceAmazonSellerPartnerRequest) (*operations.PutSourceAmazonSellerPartnerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AmazonSellerPartner", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAmazonSellerPartnerPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAmazonSellerPartnerPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35613,7 +35856,7 @@ func (s *sources) PutSourceAmazonSellerPartner(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35654,18 +35897,17 @@ func (s *sources) PutSourceAmazonSellerPartner(ctx context.Context, request oper
}
// PutSourceAmazonSqs - Update a Source fully
-func (s *sources) PutSourceAmazonSqs(ctx context.Context, request operations.PutSourceAmazonSqsRequest) (*operations.PutSourceAmazonSqsResponse, error) {
+func (s *Sources) PutSourceAmazonSqs(ctx context.Context, request operations.PutSourceAmazonSqsRequest) (*operations.PutSourceAmazonSqsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AmazonSqs", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAmazonSqsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAmazonSqsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35674,7 +35916,7 @@ func (s *sources) PutSourceAmazonSqs(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35715,18 +35957,17 @@ func (s *sources) PutSourceAmazonSqs(ctx context.Context, request operations.Put
}
// PutSourceAmplitude - Update a Source fully
-func (s *sources) PutSourceAmplitude(ctx context.Context, request operations.PutSourceAmplitudeRequest) (*operations.PutSourceAmplitudeResponse, error) {
+func (s *Sources) PutSourceAmplitude(ctx context.Context, request operations.PutSourceAmplitudeRequest) (*operations.PutSourceAmplitudeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Amplitude", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAmplitudePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAmplitudePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35735,7 +35976,7 @@ func (s *sources) PutSourceAmplitude(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35776,18 +36017,17 @@ func (s *sources) PutSourceAmplitude(ctx context.Context, request operations.Put
}
// PutSourceApifyDataset - Update a Source fully
-func (s *sources) PutSourceApifyDataset(ctx context.Context, request operations.PutSourceApifyDatasetRequest) (*operations.PutSourceApifyDatasetResponse, error) {
+func (s *Sources) PutSourceApifyDataset(ctx context.Context, request operations.PutSourceApifyDatasetRequest) (*operations.PutSourceApifyDatasetResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ApifyDataset", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceApifyDatasetPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceApifyDatasetPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35796,7 +36036,7 @@ func (s *sources) PutSourceApifyDataset(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35837,18 +36077,17 @@ func (s *sources) PutSourceApifyDataset(ctx context.Context, request operations.
}
// PutSourceAppfollow - Update a Source fully
-func (s *sources) PutSourceAppfollow(ctx context.Context, request operations.PutSourceAppfollowRequest) (*operations.PutSourceAppfollowResponse, error) {
+func (s *Sources) PutSourceAppfollow(ctx context.Context, request operations.PutSourceAppfollowRequest) (*operations.PutSourceAppfollowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Appfollow", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAppfollowPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAppfollowPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35857,7 +36096,7 @@ func (s *sources) PutSourceAppfollow(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35898,18 +36137,17 @@ func (s *sources) PutSourceAppfollow(ctx context.Context, request operations.Put
}
// PutSourceAsana - Update a Source fully
-func (s *sources) PutSourceAsana(ctx context.Context, request operations.PutSourceAsanaRequest) (*operations.PutSourceAsanaResponse, error) {
+func (s *Sources) PutSourceAsana(ctx context.Context, request operations.PutSourceAsanaRequest) (*operations.PutSourceAsanaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Asana", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAsanaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAsanaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35918,7 +36156,7 @@ func (s *sources) PutSourceAsana(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -35959,18 +36197,17 @@ func (s *sources) PutSourceAsana(ctx context.Context, request operations.PutSour
}
// PutSourceAuth0 - Update a Source fully
-func (s *sources) PutSourceAuth0(ctx context.Context, request operations.PutSourceAuth0Request) (*operations.PutSourceAuth0Response, error) {
+func (s *Sources) PutSourceAuth0(ctx context.Context, request operations.PutSourceAuth0Request) (*operations.PutSourceAuth0Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Auth0", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAuth0PutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAuth0PutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -35979,7 +36216,7 @@ func (s *sources) PutSourceAuth0(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36020,18 +36257,17 @@ func (s *sources) PutSourceAuth0(ctx context.Context, request operations.PutSour
}
// PutSourceAwsCloudtrail - Update a Source fully
-func (s *sources) PutSourceAwsCloudtrail(ctx context.Context, request operations.PutSourceAwsCloudtrailRequest) (*operations.PutSourceAwsCloudtrailResponse, error) {
+func (s *Sources) PutSourceAwsCloudtrail(ctx context.Context, request operations.PutSourceAwsCloudtrailRequest) (*operations.PutSourceAwsCloudtrailResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AwsCloudtrail", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAwsCloudtrailPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAwsCloudtrailPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36040,7 +36276,7 @@ func (s *sources) PutSourceAwsCloudtrail(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36081,18 +36317,17 @@ func (s *sources) PutSourceAwsCloudtrail(ctx context.Context, request operations
}
// PutSourceAzureBlobStorage - Update a Source fully
-func (s *sources) PutSourceAzureBlobStorage(ctx context.Context, request operations.PutSourceAzureBlobStorageRequest) (*operations.PutSourceAzureBlobStorageResponse, error) {
+func (s *Sources) PutSourceAzureBlobStorage(ctx context.Context, request operations.PutSourceAzureBlobStorageRequest) (*operations.PutSourceAzureBlobStorageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AzureBlobStorage", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAzureBlobStoragePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAzureBlobStoragePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36101,7 +36336,7 @@ func (s *sources) PutSourceAzureBlobStorage(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36142,18 +36377,17 @@ func (s *sources) PutSourceAzureBlobStorage(ctx context.Context, request operati
}
// PutSourceAzureTable - Update a Source fully
-func (s *sources) PutSourceAzureTable(ctx context.Context, request operations.PutSourceAzureTableRequest) (*operations.PutSourceAzureTableResponse, error) {
+func (s *Sources) PutSourceAzureTable(ctx context.Context, request operations.PutSourceAzureTableRequest) (*operations.PutSourceAzureTableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#AzureTable", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceAzureTablePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceAzureTablePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36162,7 +36396,7 @@ func (s *sources) PutSourceAzureTable(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36203,18 +36437,17 @@ func (s *sources) PutSourceAzureTable(ctx context.Context, request operations.Pu
}
// PutSourceBambooHr - Update a Source fully
-func (s *sources) PutSourceBambooHr(ctx context.Context, request operations.PutSourceBambooHrRequest) (*operations.PutSourceBambooHrResponse, error) {
+func (s *Sources) PutSourceBambooHr(ctx context.Context, request operations.PutSourceBambooHrRequest) (*operations.PutSourceBambooHrResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#BambooHr", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceBambooHrPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceBambooHrPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36223,7 +36456,7 @@ func (s *sources) PutSourceBambooHr(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36263,19 +36496,18 @@ func (s *sources) PutSourceBambooHr(ctx context.Context, request operations.PutS
return res, nil
}
-// PutSourceBigcommerce - Update a Source fully
-func (s *sources) PutSourceBigcommerce(ctx context.Context, request operations.PutSourceBigcommerceRequest) (*operations.PutSourceBigcommerceResponse, error) {
+// PutSourceBigquery - Update a Source fully
+func (s *Sources) PutSourceBigquery(ctx context.Context, request operations.PutSourceBigqueryRequest) (*operations.PutSourceBigqueryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Bigcommerce", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Bigquery", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceBigcommercePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceBigqueryPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36284,7 +36516,7 @@ func (s *sources) PutSourceBigcommerce(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36308,7 +36540,7 @@ func (s *sources) PutSourceBigcommerce(ctx context.Context, request operations.P
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceBigcommerceResponse{
+ res := &operations.PutSourceBigqueryResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -36324,19 +36556,18 @@ func (s *sources) PutSourceBigcommerce(ctx context.Context, request operations.P
return res, nil
}
-// PutSourceBigquery - Update a Source fully
-func (s *sources) PutSourceBigquery(ctx context.Context, request operations.PutSourceBigqueryRequest) (*operations.PutSourceBigqueryResponse, error) {
+// PutSourceBingAds - Update a Source fully
+func (s *Sources) PutSourceBingAds(ctx context.Context, request operations.PutSourceBingAdsRequest) (*operations.PutSourceBingAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Bigquery", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#BingAds", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceBigqueryPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceBingAdsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36345,7 +36576,7 @@ func (s *sources) PutSourceBigquery(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36369,7 +36600,7 @@ func (s *sources) PutSourceBigquery(ctx context.Context, request operations.PutS
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceBigqueryResponse{
+ res := &operations.PutSourceBingAdsResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -36385,19 +36616,18 @@ func (s *sources) PutSourceBigquery(ctx context.Context, request operations.PutS
return res, nil
}
-// PutSourceBingAds - Update a Source fully
-func (s *sources) PutSourceBingAds(ctx context.Context, request operations.PutSourceBingAdsRequest) (*operations.PutSourceBingAdsResponse, error) {
+// PutSourceBraintree - Update a Source fully
+func (s *Sources) PutSourceBraintree(ctx context.Context, request operations.PutSourceBraintreeRequest) (*operations.PutSourceBraintreeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#BingAds", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braintree", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceBingAdsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceBraintreePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36406,7 +36636,7 @@ func (s *sources) PutSourceBingAds(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36430,7 +36660,7 @@ func (s *sources) PutSourceBingAds(ctx context.Context, request operations.PutSo
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceBingAdsResponse{
+ res := &operations.PutSourceBraintreeResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -36446,19 +36676,18 @@ func (s *sources) PutSourceBingAds(ctx context.Context, request operations.PutSo
return res, nil
}
-// PutSourceBraintree - Update a Source fully
-func (s *sources) PutSourceBraintree(ctx context.Context, request operations.PutSourceBraintreeRequest) (*operations.PutSourceBraintreeResponse, error) {
+// PutSourceBraze - Update a Source fully
+func (s *Sources) PutSourceBraze(ctx context.Context, request operations.PutSourceBrazeRequest) (*operations.PutSourceBrazeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braintree", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braze", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceBraintreePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceBrazePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36467,7 +36696,7 @@ func (s *sources) PutSourceBraintree(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36491,7 +36720,7 @@ func (s *sources) PutSourceBraintree(ctx context.Context, request operations.Put
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceBraintreeResponse{
+ res := &operations.PutSourceBrazeResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -36507,19 +36736,18 @@ func (s *sources) PutSourceBraintree(ctx context.Context, request operations.Put
return res, nil
}
-// PutSourceBraze - Update a Source fully
-func (s *sources) PutSourceBraze(ctx context.Context, request operations.PutSourceBrazeRequest) (*operations.PutSourceBrazeResponse, error) {
+// PutSourceCart - Update a Source fully
+func (s *Sources) PutSourceCart(ctx context.Context, request operations.PutSourceCartRequest) (*operations.PutSourceCartResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Braze", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Cart", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceBrazePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceCartPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36528,7 +36756,7 @@ func (s *sources) PutSourceBraze(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36552,7 +36780,7 @@ func (s *sources) PutSourceBraze(ctx context.Context, request operations.PutSour
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceBrazeResponse{
+ res := &operations.PutSourceCartResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -36569,18 +36797,17 @@ func (s *sources) PutSourceBraze(ctx context.Context, request operations.PutSour
}
// PutSourceChargebee - Update a Source fully
-func (s *sources) PutSourceChargebee(ctx context.Context, request operations.PutSourceChargebeeRequest) (*operations.PutSourceChargebeeResponse, error) {
+func (s *Sources) PutSourceChargebee(ctx context.Context, request operations.PutSourceChargebeeRequest) (*operations.PutSourceChargebeeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Chargebee", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceChargebeePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceChargebeePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36589,7 +36816,7 @@ func (s *sources) PutSourceChargebee(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36630,18 +36857,17 @@ func (s *sources) PutSourceChargebee(ctx context.Context, request operations.Put
}
// PutSourceChartmogul - Update a Source fully
-func (s *sources) PutSourceChartmogul(ctx context.Context, request operations.PutSourceChartmogulRequest) (*operations.PutSourceChartmogulResponse, error) {
+func (s *Sources) PutSourceChartmogul(ctx context.Context, request operations.PutSourceChartmogulRequest) (*operations.PutSourceChartmogulResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Chartmogul", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceChartmogulPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceChartmogulPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36650,7 +36876,7 @@ func (s *sources) PutSourceChartmogul(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36691,18 +36917,17 @@ func (s *sources) PutSourceChartmogul(ctx context.Context, request operations.Pu
}
// PutSourceClickhouse - Update a Source fully
-func (s *sources) PutSourceClickhouse(ctx context.Context, request operations.PutSourceClickhouseRequest) (*operations.PutSourceClickhouseResponse, error) {
+func (s *Sources) PutSourceClickhouse(ctx context.Context, request operations.PutSourceClickhouseRequest) (*operations.PutSourceClickhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Clickhouse", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceClickhousePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceClickhousePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36711,7 +36936,7 @@ func (s *sources) PutSourceClickhouse(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36752,18 +36977,17 @@ func (s *sources) PutSourceClickhouse(ctx context.Context, request operations.Pu
}
// PutSourceClickupAPI - Update a Source fully
-func (s *sources) PutSourceClickupAPI(ctx context.Context, request operations.PutSourceClickupAPIRequest) (*operations.PutSourceClickupAPIResponse, error) {
+func (s *Sources) PutSourceClickupAPI(ctx context.Context, request operations.PutSourceClickupAPIRequest) (*operations.PutSourceClickupAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ClickupApi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceClickupAPIPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceClickupAPIPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36772,7 +36996,7 @@ func (s *sources) PutSourceClickupAPI(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36813,18 +37037,17 @@ func (s *sources) PutSourceClickupAPI(ctx context.Context, request operations.Pu
}
// PutSourceClockify - Update a Source fully
-func (s *sources) PutSourceClockify(ctx context.Context, request operations.PutSourceClockifyRequest) (*operations.PutSourceClockifyResponse, error) {
+func (s *Sources) PutSourceClockify(ctx context.Context, request operations.PutSourceClockifyRequest) (*operations.PutSourceClockifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Clockify", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceClockifyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceClockifyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36833,7 +37056,7 @@ func (s *sources) PutSourceClockify(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36874,18 +37097,17 @@ func (s *sources) PutSourceClockify(ctx context.Context, request operations.PutS
}
// PutSourceCloseCom - Update a Source fully
-func (s *sources) PutSourceCloseCom(ctx context.Context, request operations.PutSourceCloseComRequest) (*operations.PutSourceCloseComResponse, error) {
+func (s *Sources) PutSourceCloseCom(ctx context.Context, request operations.PutSourceCloseComRequest) (*operations.PutSourceCloseComResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CloseCom", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceCloseComPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceCloseComPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36894,7 +37116,7 @@ func (s *sources) PutSourceCloseCom(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36935,18 +37157,17 @@ func (s *sources) PutSourceCloseCom(ctx context.Context, request operations.PutS
}
// PutSourceCoda - Update a Source fully
-func (s *sources) PutSourceCoda(ctx context.Context, request operations.PutSourceCodaRequest) (*operations.PutSourceCodaResponse, error) {
+func (s *Sources) PutSourceCoda(ctx context.Context, request operations.PutSourceCodaRequest) (*operations.PutSourceCodaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coda", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceCodaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceCodaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -36955,7 +37176,7 @@ func (s *sources) PutSourceCoda(ctx context.Context, request operations.PutSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -36996,18 +37217,17 @@ func (s *sources) PutSourceCoda(ctx context.Context, request operations.PutSourc
}
// PutSourceCoinAPI - Update a Source fully
-func (s *sources) PutSourceCoinAPI(ctx context.Context, request operations.PutSourceCoinAPIRequest) (*operations.PutSourceCoinAPIResponse, error) {
+func (s *Sources) PutSourceCoinAPI(ctx context.Context, request operations.PutSourceCoinAPIRequest) (*operations.PutSourceCoinAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CoinApi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceCoinAPIPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceCoinAPIPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37016,7 +37236,7 @@ func (s *sources) PutSourceCoinAPI(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37057,18 +37277,17 @@ func (s *sources) PutSourceCoinAPI(ctx context.Context, request operations.PutSo
}
// PutSourceCoinmarketcap - Update a Source fully
-func (s *sources) PutSourceCoinmarketcap(ctx context.Context, request operations.PutSourceCoinmarketcapRequest) (*operations.PutSourceCoinmarketcapResponse, error) {
+func (s *Sources) PutSourceCoinmarketcap(ctx context.Context, request operations.PutSourceCoinmarketcapRequest) (*operations.PutSourceCoinmarketcapResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coinmarketcap", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceCoinmarketcapPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceCoinmarketcapPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37077,7 +37296,7 @@ func (s *sources) PutSourceCoinmarketcap(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37118,18 +37337,17 @@ func (s *sources) PutSourceCoinmarketcap(ctx context.Context, request operations
}
// PutSourceConfigcat - Update a Source fully
-func (s *sources) PutSourceConfigcat(ctx context.Context, request operations.PutSourceConfigcatRequest) (*operations.PutSourceConfigcatResponse, error) {
+func (s *Sources) PutSourceConfigcat(ctx context.Context, request operations.PutSourceConfigcatRequest) (*operations.PutSourceConfigcatResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Configcat", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceConfigcatPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceConfigcatPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37138,7 +37356,7 @@ func (s *sources) PutSourceConfigcat(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37179,18 +37397,17 @@ func (s *sources) PutSourceConfigcat(ctx context.Context, request operations.Put
}
// PutSourceConfluence - Update a Source fully
-func (s *sources) PutSourceConfluence(ctx context.Context, request operations.PutSourceConfluenceRequest) (*operations.PutSourceConfluenceResponse, error) {
+func (s *Sources) PutSourceConfluence(ctx context.Context, request operations.PutSourceConfluenceRequest) (*operations.PutSourceConfluenceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Confluence", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceConfluencePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceConfluencePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37199,7 +37416,7 @@ func (s *sources) PutSourceConfluence(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37240,18 +37457,17 @@ func (s *sources) PutSourceConfluence(ctx context.Context, request operations.Pu
}
// PutSourceConvex - Update a Source fully
-func (s *sources) PutSourceConvex(ctx context.Context, request operations.PutSourceConvexRequest) (*operations.PutSourceConvexResponse, error) {
+func (s *Sources) PutSourceConvex(ctx context.Context, request operations.PutSourceConvexRequest) (*operations.PutSourceConvexResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Convex", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceConvexPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceConvexPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37260,7 +37476,7 @@ func (s *sources) PutSourceConvex(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37301,18 +37517,17 @@ func (s *sources) PutSourceConvex(ctx context.Context, request operations.PutSou
}
// PutSourceDatascope - Update a Source fully
-func (s *sources) PutSourceDatascope(ctx context.Context, request operations.PutSourceDatascopeRequest) (*operations.PutSourceDatascopeResponse, error) {
+func (s *Sources) PutSourceDatascope(ctx context.Context, request operations.PutSourceDatascopeRequest) (*operations.PutSourceDatascopeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Datascope", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceDatascopePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceDatascopePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37321,7 +37536,7 @@ func (s *sources) PutSourceDatascope(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37362,18 +37577,17 @@ func (s *sources) PutSourceDatascope(ctx context.Context, request operations.Put
}
// PutSourceDelighted - Update a Source fully
-func (s *sources) PutSourceDelighted(ctx context.Context, request operations.PutSourceDelightedRequest) (*operations.PutSourceDelightedResponse, error) {
+func (s *Sources) PutSourceDelighted(ctx context.Context, request operations.PutSourceDelightedRequest) (*operations.PutSourceDelightedResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Delighted", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceDelightedPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceDelightedPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37382,7 +37596,7 @@ func (s *sources) PutSourceDelighted(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37423,18 +37637,17 @@ func (s *sources) PutSourceDelighted(ctx context.Context, request operations.Put
}
// PutSourceDixa - Update a Source fully
-func (s *sources) PutSourceDixa(ctx context.Context, request operations.PutSourceDixaRequest) (*operations.PutSourceDixaResponse, error) {
+func (s *Sources) PutSourceDixa(ctx context.Context, request operations.PutSourceDixaRequest) (*operations.PutSourceDixaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dixa", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceDixaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceDixaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37443,7 +37656,7 @@ func (s *sources) PutSourceDixa(ctx context.Context, request operations.PutSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37484,18 +37697,17 @@ func (s *sources) PutSourceDixa(ctx context.Context, request operations.PutSourc
}
// PutSourceDockerhub - Update a Source fully
-func (s *sources) PutSourceDockerhub(ctx context.Context, request operations.PutSourceDockerhubRequest) (*operations.PutSourceDockerhubResponse, error) {
+func (s *Sources) PutSourceDockerhub(ctx context.Context, request operations.PutSourceDockerhubRequest) (*operations.PutSourceDockerhubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dockerhub", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceDockerhubPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceDockerhubPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37504,7 +37716,7 @@ func (s *sources) PutSourceDockerhub(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37545,18 +37757,17 @@ func (s *sources) PutSourceDockerhub(ctx context.Context, request operations.Put
}
// PutSourceDremio - Update a Source fully
-func (s *sources) PutSourceDremio(ctx context.Context, request operations.PutSourceDremioRequest) (*operations.PutSourceDremioResponse, error) {
+func (s *Sources) PutSourceDremio(ctx context.Context, request operations.PutSourceDremioRequest) (*operations.PutSourceDremioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dremio", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceDremioPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceDremioPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37565,7 +37776,7 @@ func (s *sources) PutSourceDremio(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37606,18 +37817,17 @@ func (s *sources) PutSourceDremio(ctx context.Context, request operations.PutSou
}
// PutSourceDynamodb - Update a Source fully
-func (s *sources) PutSourceDynamodb(ctx context.Context, request operations.PutSourceDynamodbRequest) (*operations.PutSourceDynamodbResponse, error) {
+func (s *Sources) PutSourceDynamodb(ctx context.Context, request operations.PutSourceDynamodbRequest) (*operations.PutSourceDynamodbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Dynamodb", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceDynamodbPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceDynamodbPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37626,7 +37836,7 @@ func (s *sources) PutSourceDynamodb(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37666,80 +37876,18 @@ func (s *sources) PutSourceDynamodb(ctx context.Context, request operations.PutS
return res, nil
}
-// PutSourceE2eTestCloud - Update a Source fully
-func (s *sources) PutSourceE2eTestCloud(ctx context.Context, request operations.PutSourceE2eTestCloudRequest) (*operations.PutSourceE2eTestCloudResponse, error) {
- baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#E2eTestCloud", request, nil)
- if err != nil {
- return nil, fmt.Errorf("error generating URL: %w", err)
- }
-
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceE2eTestCloudPutRequest", "json")
- if err != nil {
- return nil, fmt.Errorf("error serializing request body: %w", err)
- }
-
- debugBody := bytes.NewBuffer([]byte{})
- debugReader := io.TeeReader(bodyReader, debugBody)
-
- req, err := http.NewRequestWithContext(ctx, "PUT", url, debugReader)
- if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
- }
- req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
-
- req.Header.Set("Content-Type", reqContentType)
-
- client := s.sdkConfiguration.SecurityClient
-
- httpRes, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("error sending request: %w", err)
- }
- if httpRes == nil {
- return nil, fmt.Errorf("error sending request: no response")
- }
-
- rawBody, err := io.ReadAll(httpRes.Body)
- if err != nil {
- return nil, fmt.Errorf("error reading response body: %w", err)
- }
- httpRes.Request.Body = io.NopCloser(debugBody)
- httpRes.Body.Close()
- httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
-
- contentType := httpRes.Header.Get("Content-Type")
-
- res := &operations.PutSourceE2eTestCloudResponse{
- StatusCode: httpRes.StatusCode,
- ContentType: contentType,
- RawResponse: httpRes,
- }
- switch {
- case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
- fallthrough
- case httpRes.StatusCode == 403:
- fallthrough
- case httpRes.StatusCode == 404:
- }
-
- return res, nil
-}
-
// PutSourceEmailoctopus - Update a Source fully
-func (s *sources) PutSourceEmailoctopus(ctx context.Context, request operations.PutSourceEmailoctopusRequest) (*operations.PutSourceEmailoctopusResponse, error) {
+func (s *Sources) PutSourceEmailoctopus(ctx context.Context, request operations.PutSourceEmailoctopusRequest) (*operations.PutSourceEmailoctopusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Emailoctopus", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceEmailoctopusPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceEmailoctopusPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37748,7 +37896,7 @@ func (s *sources) PutSourceEmailoctopus(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37789,18 +37937,17 @@ func (s *sources) PutSourceEmailoctopus(ctx context.Context, request operations.
}
// PutSourceExchangeRates - Update a Source fully
-func (s *sources) PutSourceExchangeRates(ctx context.Context, request operations.PutSourceExchangeRatesRequest) (*operations.PutSourceExchangeRatesResponse, error) {
+func (s *Sources) PutSourceExchangeRates(ctx context.Context, request operations.PutSourceExchangeRatesRequest) (*operations.PutSourceExchangeRatesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ExchangeRates", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceExchangeRatesPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceExchangeRatesPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37809,7 +37956,7 @@ func (s *sources) PutSourceExchangeRates(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37850,18 +37997,17 @@ func (s *sources) PutSourceExchangeRates(ctx context.Context, request operations
}
// PutSourceFacebookMarketing - Update a Source fully
-func (s *sources) PutSourceFacebookMarketing(ctx context.Context, request operations.PutSourceFacebookMarketingRequest) (*operations.PutSourceFacebookMarketingResponse, error) {
+func (s *Sources) PutSourceFacebookMarketing(ctx context.Context, request operations.PutSourceFacebookMarketingRequest) (*operations.PutSourceFacebookMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#FacebookMarketing", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceFacebookMarketingPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceFacebookMarketingPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37870,7 +38016,7 @@ func (s *sources) PutSourceFacebookMarketing(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37911,18 +38057,17 @@ func (s *sources) PutSourceFacebookMarketing(ctx context.Context, request operat
}
// PutSourceFacebookPages - Update a Source fully
-func (s *sources) PutSourceFacebookPages(ctx context.Context, request operations.PutSourceFacebookPagesRequest) (*operations.PutSourceFacebookPagesResponse, error) {
+func (s *Sources) PutSourceFacebookPages(ctx context.Context, request operations.PutSourceFacebookPagesRequest) (*operations.PutSourceFacebookPagesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#FacebookPages", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceFacebookPagesPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceFacebookPagesPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37931,7 +38076,7 @@ func (s *sources) PutSourceFacebookPages(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -37972,18 +38117,17 @@ func (s *sources) PutSourceFacebookPages(ctx context.Context, request operations
}
// PutSourceFaker - Update a Source fully
-func (s *sources) PutSourceFaker(ctx context.Context, request operations.PutSourceFakerRequest) (*operations.PutSourceFakerResponse, error) {
+func (s *Sources) PutSourceFaker(ctx context.Context, request operations.PutSourceFakerRequest) (*operations.PutSourceFakerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Faker", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceFakerPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceFakerPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -37992,7 +38136,7 @@ func (s *sources) PutSourceFaker(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38033,18 +38177,17 @@ func (s *sources) PutSourceFaker(ctx context.Context, request operations.PutSour
}
// PutSourceFauna - Update a Source fully
-func (s *sources) PutSourceFauna(ctx context.Context, request operations.PutSourceFaunaRequest) (*operations.PutSourceFaunaResponse, error) {
+func (s *Sources) PutSourceFauna(ctx context.Context, request operations.PutSourceFaunaRequest) (*operations.PutSourceFaunaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Fauna", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceFaunaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceFaunaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38053,7 +38196,7 @@ func (s *sources) PutSourceFauna(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38093,19 +38236,18 @@ func (s *sources) PutSourceFauna(ctx context.Context, request operations.PutSour
return res, nil
}
-// PutSourceFileSecure - Update a Source fully
-func (s *sources) PutSourceFileSecure(ctx context.Context, request operations.PutSourceFileSecureRequest) (*operations.PutSourceFileSecureResponse, error) {
+// PutSourceFile - Update a Source fully
+func (s *Sources) PutSourceFile(ctx context.Context, request operations.PutSourceFileRequest) (*operations.PutSourceFileResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#FileSecure", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#File", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceFileSecurePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceFilePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38114,7 +38256,7 @@ func (s *sources) PutSourceFileSecure(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38138,7 +38280,7 @@ func (s *sources) PutSourceFileSecure(ctx context.Context, request operations.Pu
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceFileSecureResponse{
+ res := &operations.PutSourceFileResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -38155,18 +38297,17 @@ func (s *sources) PutSourceFileSecure(ctx context.Context, request operations.Pu
}
// PutSourceFirebolt - Update a Source fully
-func (s *sources) PutSourceFirebolt(ctx context.Context, request operations.PutSourceFireboltRequest) (*operations.PutSourceFireboltResponse, error) {
+func (s *Sources) PutSourceFirebolt(ctx context.Context, request operations.PutSourceFireboltRequest) (*operations.PutSourceFireboltResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Firebolt", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceFireboltPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceFireboltPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38175,7 +38316,7 @@ func (s *sources) PutSourceFirebolt(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38216,18 +38357,17 @@ func (s *sources) PutSourceFirebolt(ctx context.Context, request operations.PutS
}
// PutSourceFreshcaller - Update a Source fully
-func (s *sources) PutSourceFreshcaller(ctx context.Context, request operations.PutSourceFreshcallerRequest) (*operations.PutSourceFreshcallerResponse, error) {
+func (s *Sources) PutSourceFreshcaller(ctx context.Context, request operations.PutSourceFreshcallerRequest) (*operations.PutSourceFreshcallerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Freshcaller", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceFreshcallerPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceFreshcallerPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38236,7 +38376,7 @@ func (s *sources) PutSourceFreshcaller(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38277,18 +38417,17 @@ func (s *sources) PutSourceFreshcaller(ctx context.Context, request operations.P
}
// PutSourceFreshdesk - Update a Source fully
-func (s *sources) PutSourceFreshdesk(ctx context.Context, request operations.PutSourceFreshdeskRequest) (*operations.PutSourceFreshdeskResponse, error) {
+func (s *Sources) PutSourceFreshdesk(ctx context.Context, request operations.PutSourceFreshdeskRequest) (*operations.PutSourceFreshdeskResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Freshdesk", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceFreshdeskPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceFreshdeskPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38297,7 +38436,7 @@ func (s *sources) PutSourceFreshdesk(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38338,18 +38477,17 @@ func (s *sources) PutSourceFreshdesk(ctx context.Context, request operations.Put
}
// PutSourceFreshsales - Update a Source fully
-func (s *sources) PutSourceFreshsales(ctx context.Context, request operations.PutSourceFreshsalesRequest) (*operations.PutSourceFreshsalesResponse, error) {
+func (s *Sources) PutSourceFreshsales(ctx context.Context, request operations.PutSourceFreshsalesRequest) (*operations.PutSourceFreshsalesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Freshsales", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceFreshsalesPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceFreshsalesPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38358,7 +38496,7 @@ func (s *sources) PutSourceFreshsales(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38399,18 +38537,17 @@ func (s *sources) PutSourceFreshsales(ctx context.Context, request operations.Pu
}
// PutSourceGainsightPx - Update a Source fully
-func (s *sources) PutSourceGainsightPx(ctx context.Context, request operations.PutSourceGainsightPxRequest) (*operations.PutSourceGainsightPxResponse, error) {
+func (s *Sources) PutSourceGainsightPx(ctx context.Context, request operations.PutSourceGainsightPxRequest) (*operations.PutSourceGainsightPxResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GainsightPx", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGainsightPxPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGainsightPxPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38419,7 +38556,7 @@ func (s *sources) PutSourceGainsightPx(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38460,18 +38597,17 @@ func (s *sources) PutSourceGainsightPx(ctx context.Context, request operations.P
}
// PutSourceGcs - Update a Source fully
-func (s *sources) PutSourceGcs(ctx context.Context, request operations.PutSourceGcsRequest) (*operations.PutSourceGcsResponse, error) {
+func (s *Sources) PutSourceGcs(ctx context.Context, request operations.PutSourceGcsRequest) (*operations.PutSourceGcsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gcs", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGcsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGcsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38480,7 +38616,7 @@ func (s *sources) PutSourceGcs(ctx context.Context, request operations.PutSource
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38521,18 +38657,17 @@ func (s *sources) PutSourceGcs(ctx context.Context, request operations.PutSource
}
// PutSourceGetlago - Update a Source fully
-func (s *sources) PutSourceGetlago(ctx context.Context, request operations.PutSourceGetlagoRequest) (*operations.PutSourceGetlagoResponse, error) {
+func (s *Sources) PutSourceGetlago(ctx context.Context, request operations.PutSourceGetlagoRequest) (*operations.PutSourceGetlagoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Getlago", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGetlagoPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGetlagoPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38541,7 +38676,7 @@ func (s *sources) PutSourceGetlago(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38582,18 +38717,17 @@ func (s *sources) PutSourceGetlago(ctx context.Context, request operations.PutSo
}
// PutSourceGithub - Update a Source fully
-func (s *sources) PutSourceGithub(ctx context.Context, request operations.PutSourceGithubRequest) (*operations.PutSourceGithubResponse, error) {
+func (s *Sources) PutSourceGithub(ctx context.Context, request operations.PutSourceGithubRequest) (*operations.PutSourceGithubResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Github", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGithubPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGithubPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38602,7 +38736,7 @@ func (s *sources) PutSourceGithub(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38643,18 +38777,17 @@ func (s *sources) PutSourceGithub(ctx context.Context, request operations.PutSou
}
// PutSourceGitlab - Update a Source fully
-func (s *sources) PutSourceGitlab(ctx context.Context, request operations.PutSourceGitlabRequest) (*operations.PutSourceGitlabResponse, error) {
+func (s *Sources) PutSourceGitlab(ctx context.Context, request operations.PutSourceGitlabRequest) (*operations.PutSourceGitlabResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gitlab", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGitlabPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGitlabPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38663,7 +38796,7 @@ func (s *sources) PutSourceGitlab(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38704,18 +38837,17 @@ func (s *sources) PutSourceGitlab(ctx context.Context, request operations.PutSou
}
// PutSourceGlassfrog - Update a Source fully
-func (s *sources) PutSourceGlassfrog(ctx context.Context, request operations.PutSourceGlassfrogRequest) (*operations.PutSourceGlassfrogResponse, error) {
+func (s *Sources) PutSourceGlassfrog(ctx context.Context, request operations.PutSourceGlassfrogRequest) (*operations.PutSourceGlassfrogResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Glassfrog", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGlassfrogPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGlassfrogPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38724,7 +38856,7 @@ func (s *sources) PutSourceGlassfrog(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38765,18 +38897,17 @@ func (s *sources) PutSourceGlassfrog(ctx context.Context, request operations.Put
}
// PutSourceGnews - Update a Source fully
-func (s *sources) PutSourceGnews(ctx context.Context, request operations.PutSourceGnewsRequest) (*operations.PutSourceGnewsResponse, error) {
+func (s *Sources) PutSourceGnews(ctx context.Context, request operations.PutSourceGnewsRequest) (*operations.PutSourceGnewsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gnews", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGnewsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGnewsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38785,7 +38916,7 @@ func (s *sources) PutSourceGnews(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38826,18 +38957,17 @@ func (s *sources) PutSourceGnews(ctx context.Context, request operations.PutSour
}
// PutSourceGoogleAds - Update a Source fully
-func (s *sources) PutSourceGoogleAds(ctx context.Context, request operations.PutSourceGoogleAdsRequest) (*operations.PutSourceGoogleAdsResponse, error) {
+func (s *Sources) PutSourceGoogleAds(ctx context.Context, request operations.PutSourceGoogleAdsRequest) (*operations.PutSourceGoogleAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleAds", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGoogleAdsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGoogleAdsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38846,7 +38976,7 @@ func (s *sources) PutSourceGoogleAds(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38887,18 +39017,17 @@ func (s *sources) PutSourceGoogleAds(ctx context.Context, request operations.Put
}
// PutSourceGoogleAnalyticsDataAPI - Update a Source fully
-func (s *sources) PutSourceGoogleAnalyticsDataAPI(ctx context.Context, request operations.PutSourceGoogleAnalyticsDataAPIRequest) (*operations.PutSourceGoogleAnalyticsDataAPIResponse, error) {
+func (s *Sources) PutSourceGoogleAnalyticsDataAPI(ctx context.Context, request operations.PutSourceGoogleAnalyticsDataAPIRequest) (*operations.PutSourceGoogleAnalyticsDataAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleAnalyticsDataApi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGoogleAnalyticsDataAPIPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGoogleAnalyticsDataAPIPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38907,7 +39036,7 @@ func (s *sources) PutSourceGoogleAnalyticsDataAPI(ctx context.Context, request o
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38947,19 +39076,18 @@ func (s *sources) PutSourceGoogleAnalyticsDataAPI(ctx context.Context, request o
return res, nil
}
-// PutSourceGoogleAnalyticsV4 - Update a Source fully
-func (s *sources) PutSourceGoogleAnalyticsV4(ctx context.Context, request operations.PutSourceGoogleAnalyticsV4Request) (*operations.PutSourceGoogleAnalyticsV4Response, error) {
+// PutSourceGoogleDirectory - Update a Source fully
+func (s *Sources) PutSourceGoogleDirectory(ctx context.Context, request operations.PutSourceGoogleDirectoryRequest) (*operations.PutSourceGoogleDirectoryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleAnalyticsV4", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleDirectory", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGoogleAnalyticsV4PutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGoogleDirectoryPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -38968,7 +39096,7 @@ func (s *sources) PutSourceGoogleAnalyticsV4(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -38992,7 +39120,7 @@ func (s *sources) PutSourceGoogleAnalyticsV4(ctx context.Context, request operat
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceGoogleAnalyticsV4Response{
+ res := &operations.PutSourceGoogleDirectoryResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -39008,19 +39136,18 @@ func (s *sources) PutSourceGoogleAnalyticsV4(ctx context.Context, request operat
return res, nil
}
-// PutSourceGoogleDirectory - Update a Source fully
-func (s *sources) PutSourceGoogleDirectory(ctx context.Context, request operations.PutSourceGoogleDirectoryRequest) (*operations.PutSourceGoogleDirectoryResponse, error) {
+// PutSourceGoogleDrive - Update a Source fully
+func (s *Sources) PutSourceGoogleDrive(ctx context.Context, request operations.PutSourceGoogleDriveRequest) (*operations.PutSourceGoogleDriveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleDirectory", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleDrive", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGoogleDirectoryPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGoogleDrivePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39029,7 +39156,7 @@ func (s *sources) PutSourceGoogleDirectory(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39053,7 +39180,7 @@ func (s *sources) PutSourceGoogleDirectory(ctx context.Context, request operatio
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceGoogleDirectoryResponse{
+ res := &operations.PutSourceGoogleDriveResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -39070,18 +39197,17 @@ func (s *sources) PutSourceGoogleDirectory(ctx context.Context, request operatio
}
// PutSourceGooglePagespeedInsights - Update a Source fully
-func (s *sources) PutSourceGooglePagespeedInsights(ctx context.Context, request operations.PutSourceGooglePagespeedInsightsRequest) (*operations.PutSourceGooglePagespeedInsightsResponse, error) {
+func (s *Sources) PutSourceGooglePagespeedInsights(ctx context.Context, request operations.PutSourceGooglePagespeedInsightsRequest) (*operations.PutSourceGooglePagespeedInsightsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GooglePagespeedInsights", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGooglePagespeedInsightsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGooglePagespeedInsightsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39090,7 +39216,7 @@ func (s *sources) PutSourceGooglePagespeedInsights(ctx context.Context, request
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39131,18 +39257,17 @@ func (s *sources) PutSourceGooglePagespeedInsights(ctx context.Context, request
}
// PutSourceGoogleSearchConsole - Update a Source fully
-func (s *sources) PutSourceGoogleSearchConsole(ctx context.Context, request operations.PutSourceGoogleSearchConsoleRequest) (*operations.PutSourceGoogleSearchConsoleResponse, error) {
+func (s *Sources) PutSourceGoogleSearchConsole(ctx context.Context, request operations.PutSourceGoogleSearchConsoleRequest) (*operations.PutSourceGoogleSearchConsoleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleSearchConsole", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGoogleSearchConsolePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGoogleSearchConsolePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39151,7 +39276,7 @@ func (s *sources) PutSourceGoogleSearchConsole(ctx context.Context, request oper
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39192,18 +39317,17 @@ func (s *sources) PutSourceGoogleSearchConsole(ctx context.Context, request oper
}
// PutSourceGoogleSheets - Update a Source fully
-func (s *sources) PutSourceGoogleSheets(ctx context.Context, request operations.PutSourceGoogleSheetsRequest) (*operations.PutSourceGoogleSheetsResponse, error) {
+func (s *Sources) PutSourceGoogleSheets(ctx context.Context, request operations.PutSourceGoogleSheetsRequest) (*operations.PutSourceGoogleSheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleSheets", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGoogleSheetsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGoogleSheetsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39212,7 +39336,7 @@ func (s *sources) PutSourceGoogleSheets(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39253,18 +39377,17 @@ func (s *sources) PutSourceGoogleSheets(ctx context.Context, request operations.
}
// PutSourceGoogleWebfonts - Update a Source fully
-func (s *sources) PutSourceGoogleWebfonts(ctx context.Context, request operations.PutSourceGoogleWebfontsRequest) (*operations.PutSourceGoogleWebfontsResponse, error) {
+func (s *Sources) PutSourceGoogleWebfonts(ctx context.Context, request operations.PutSourceGoogleWebfontsRequest) (*operations.PutSourceGoogleWebfontsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleWebfonts", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGoogleWebfontsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGoogleWebfontsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39273,7 +39396,7 @@ func (s *sources) PutSourceGoogleWebfonts(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39314,18 +39437,17 @@ func (s *sources) PutSourceGoogleWebfonts(ctx context.Context, request operation
}
// PutSourceGoogleWorkspaceAdminReports - Update a Source fully
-func (s *sources) PutSourceGoogleWorkspaceAdminReports(ctx context.Context, request operations.PutSourceGoogleWorkspaceAdminReportsRequest) (*operations.PutSourceGoogleWorkspaceAdminReportsResponse, error) {
+func (s *Sources) PutSourceGoogleWorkspaceAdminReports(ctx context.Context, request operations.PutSourceGoogleWorkspaceAdminReportsRequest) (*operations.PutSourceGoogleWorkspaceAdminReportsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#GoogleWorkspaceAdminReports", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGoogleWorkspaceAdminReportsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGoogleWorkspaceAdminReportsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39334,7 +39456,7 @@ func (s *sources) PutSourceGoogleWorkspaceAdminReports(ctx context.Context, requ
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39375,18 +39497,17 @@ func (s *sources) PutSourceGoogleWorkspaceAdminReports(ctx context.Context, requ
}
// PutSourceGreenhouse - Update a Source fully
-func (s *sources) PutSourceGreenhouse(ctx context.Context, request operations.PutSourceGreenhouseRequest) (*operations.PutSourceGreenhouseResponse, error) {
+func (s *Sources) PutSourceGreenhouse(ctx context.Context, request operations.PutSourceGreenhouseRequest) (*operations.PutSourceGreenhouseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Greenhouse", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGreenhousePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGreenhousePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39395,7 +39516,7 @@ func (s *sources) PutSourceGreenhouse(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39436,18 +39557,17 @@ func (s *sources) PutSourceGreenhouse(ctx context.Context, request operations.Pu
}
// PutSourceGridly - Update a Source fully
-func (s *sources) PutSourceGridly(ctx context.Context, request operations.PutSourceGridlyRequest) (*operations.PutSourceGridlyResponse, error) {
+func (s *Sources) PutSourceGridly(ctx context.Context, request operations.PutSourceGridlyRequest) (*operations.PutSourceGridlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Gridly", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceGridlyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceGridlyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39456,7 +39576,7 @@ func (s *sources) PutSourceGridly(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39497,18 +39617,17 @@ func (s *sources) PutSourceGridly(ctx context.Context, request operations.PutSou
}
// PutSourceHarvest - Update a Source fully
-func (s *sources) PutSourceHarvest(ctx context.Context, request operations.PutSourceHarvestRequest) (*operations.PutSourceHarvestResponse, error) {
+func (s *Sources) PutSourceHarvest(ctx context.Context, request operations.PutSourceHarvestRequest) (*operations.PutSourceHarvestResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Harvest", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceHarvestPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceHarvestPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39517,7 +39636,7 @@ func (s *sources) PutSourceHarvest(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39558,18 +39677,17 @@ func (s *sources) PutSourceHarvest(ctx context.Context, request operations.PutSo
}
// PutSourceHubplanner - Update a Source fully
-func (s *sources) PutSourceHubplanner(ctx context.Context, request operations.PutSourceHubplannerRequest) (*operations.PutSourceHubplannerResponse, error) {
+func (s *Sources) PutSourceHubplanner(ctx context.Context, request operations.PutSourceHubplannerRequest) (*operations.PutSourceHubplannerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Hubplanner", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceHubplannerPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceHubplannerPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39578,7 +39696,7 @@ func (s *sources) PutSourceHubplanner(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39619,18 +39737,17 @@ func (s *sources) PutSourceHubplanner(ctx context.Context, request operations.Pu
}
// PutSourceHubspot - Update a Source fully
-func (s *sources) PutSourceHubspot(ctx context.Context, request operations.PutSourceHubspotRequest) (*operations.PutSourceHubspotResponse, error) {
+func (s *Sources) PutSourceHubspot(ctx context.Context, request operations.PutSourceHubspotRequest) (*operations.PutSourceHubspotResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Hubspot", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceHubspotPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceHubspotPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39639,7 +39756,7 @@ func (s *sources) PutSourceHubspot(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39680,18 +39797,17 @@ func (s *sources) PutSourceHubspot(ctx context.Context, request operations.PutSo
}
// PutSourceInsightly - Update a Source fully
-func (s *sources) PutSourceInsightly(ctx context.Context, request operations.PutSourceInsightlyRequest) (*operations.PutSourceInsightlyResponse, error) {
+func (s *Sources) PutSourceInsightly(ctx context.Context, request operations.PutSourceInsightlyRequest) (*operations.PutSourceInsightlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Insightly", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceInsightlyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceInsightlyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39700,7 +39816,7 @@ func (s *sources) PutSourceInsightly(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39741,18 +39857,17 @@ func (s *sources) PutSourceInsightly(ctx context.Context, request operations.Put
}
// PutSourceInstagram - Update a Source fully
-func (s *sources) PutSourceInstagram(ctx context.Context, request operations.PutSourceInstagramRequest) (*operations.PutSourceInstagramResponse, error) {
+func (s *Sources) PutSourceInstagram(ctx context.Context, request operations.PutSourceInstagramRequest) (*operations.PutSourceInstagramResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Instagram", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceInstagramPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceInstagramPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39761,7 +39876,7 @@ func (s *sources) PutSourceInstagram(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39802,18 +39917,17 @@ func (s *sources) PutSourceInstagram(ctx context.Context, request operations.Put
}
// PutSourceInstatus - Update a Source fully
-func (s *sources) PutSourceInstatus(ctx context.Context, request operations.PutSourceInstatusRequest) (*operations.PutSourceInstatusResponse, error) {
+func (s *Sources) PutSourceInstatus(ctx context.Context, request operations.PutSourceInstatusRequest) (*operations.PutSourceInstatusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Instatus", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceInstatusPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceInstatusPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39822,7 +39936,7 @@ func (s *sources) PutSourceInstatus(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39863,18 +39977,17 @@ func (s *sources) PutSourceInstatus(ctx context.Context, request operations.PutS
}
// PutSourceIntercom - Update a Source fully
-func (s *sources) PutSourceIntercom(ctx context.Context, request operations.PutSourceIntercomRequest) (*operations.PutSourceIntercomResponse, error) {
+func (s *Sources) PutSourceIntercom(ctx context.Context, request operations.PutSourceIntercomRequest) (*operations.PutSourceIntercomResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Intercom", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceIntercomPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceIntercomPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39883,7 +39996,7 @@ func (s *sources) PutSourceIntercom(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39924,18 +40037,17 @@ func (s *sources) PutSourceIntercom(ctx context.Context, request operations.PutS
}
// PutSourceIp2whois - Update a Source fully
-func (s *sources) PutSourceIp2whois(ctx context.Context, request operations.PutSourceIp2whoisRequest) (*operations.PutSourceIp2whoisResponse, error) {
+func (s *Sources) PutSourceIp2whois(ctx context.Context, request operations.PutSourceIp2whoisRequest) (*operations.PutSourceIp2whoisResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Ip2whois", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceIp2whoisPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceIp2whoisPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -39944,7 +40056,7 @@ func (s *sources) PutSourceIp2whois(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -39985,18 +40097,17 @@ func (s *sources) PutSourceIp2whois(ctx context.Context, request operations.PutS
}
// PutSourceIterable - Update a Source fully
-func (s *sources) PutSourceIterable(ctx context.Context, request operations.PutSourceIterableRequest) (*operations.PutSourceIterableResponse, error) {
+func (s *Sources) PutSourceIterable(ctx context.Context, request operations.PutSourceIterableRequest) (*operations.PutSourceIterableResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Iterable", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceIterablePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceIterablePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40005,7 +40116,7 @@ func (s *sources) PutSourceIterable(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40046,18 +40157,17 @@ func (s *sources) PutSourceIterable(ctx context.Context, request operations.PutS
}
// PutSourceJira - Update a Source fully
-func (s *sources) PutSourceJira(ctx context.Context, request operations.PutSourceJiraRequest) (*operations.PutSourceJiraResponse, error) {
+func (s *Sources) PutSourceJira(ctx context.Context, request operations.PutSourceJiraRequest) (*operations.PutSourceJiraResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Jira", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceJiraPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceJiraPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40066,7 +40176,7 @@ func (s *sources) PutSourceJira(ctx context.Context, request operations.PutSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40107,18 +40217,17 @@ func (s *sources) PutSourceJira(ctx context.Context, request operations.PutSourc
}
// PutSourceK6Cloud - Update a Source fully
-func (s *sources) PutSourceK6Cloud(ctx context.Context, request operations.PutSourceK6CloudRequest) (*operations.PutSourceK6CloudResponse, error) {
+func (s *Sources) PutSourceK6Cloud(ctx context.Context, request operations.PutSourceK6CloudRequest) (*operations.PutSourceK6CloudResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#K6Cloud", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceK6CloudPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceK6CloudPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40127,7 +40236,7 @@ func (s *sources) PutSourceK6Cloud(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40168,18 +40277,17 @@ func (s *sources) PutSourceK6Cloud(ctx context.Context, request operations.PutSo
}
// PutSourceKlarna - Update a Source fully
-func (s *sources) PutSourceKlarna(ctx context.Context, request operations.PutSourceKlarnaRequest) (*operations.PutSourceKlarnaResponse, error) {
+func (s *Sources) PutSourceKlarna(ctx context.Context, request operations.PutSourceKlarnaRequest) (*operations.PutSourceKlarnaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Klarna", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceKlarnaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceKlarnaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40188,7 +40296,7 @@ func (s *sources) PutSourceKlarna(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40229,18 +40337,17 @@ func (s *sources) PutSourceKlarna(ctx context.Context, request operations.PutSou
}
// PutSourceKlaviyo - Update a Source fully
-func (s *sources) PutSourceKlaviyo(ctx context.Context, request operations.PutSourceKlaviyoRequest) (*operations.PutSourceKlaviyoResponse, error) {
+func (s *Sources) PutSourceKlaviyo(ctx context.Context, request operations.PutSourceKlaviyoRequest) (*operations.PutSourceKlaviyoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Klaviyo", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceKlaviyoPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceKlaviyoPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40249,7 +40356,7 @@ func (s *sources) PutSourceKlaviyo(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40290,18 +40397,17 @@ func (s *sources) PutSourceKlaviyo(ctx context.Context, request operations.PutSo
}
// PutSourceKustomerSinger - Update a Source fully
-func (s *sources) PutSourceKustomerSinger(ctx context.Context, request operations.PutSourceKustomerSingerRequest) (*operations.PutSourceKustomerSingerResponse, error) {
+func (s *Sources) PutSourceKustomerSinger(ctx context.Context, request operations.PutSourceKustomerSingerRequest) (*operations.PutSourceKustomerSingerResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#KustomerSinger", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceKustomerSingerPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceKustomerSingerPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40310,7 +40416,7 @@ func (s *sources) PutSourceKustomerSinger(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40351,18 +40457,17 @@ func (s *sources) PutSourceKustomerSinger(ctx context.Context, request operation
}
// PutSourceKyve - Update a Source fully
-func (s *sources) PutSourceKyve(ctx context.Context, request operations.PutSourceKyveRequest) (*operations.PutSourceKyveResponse, error) {
+func (s *Sources) PutSourceKyve(ctx context.Context, request operations.PutSourceKyveRequest) (*operations.PutSourceKyveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Kyve", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceKyvePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceKyvePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40371,7 +40476,7 @@ func (s *sources) PutSourceKyve(ctx context.Context, request operations.PutSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40412,18 +40517,17 @@ func (s *sources) PutSourceKyve(ctx context.Context, request operations.PutSourc
}
// PutSourceLaunchdarkly - Update a Source fully
-func (s *sources) PutSourceLaunchdarkly(ctx context.Context, request operations.PutSourceLaunchdarklyRequest) (*operations.PutSourceLaunchdarklyResponse, error) {
+func (s *Sources) PutSourceLaunchdarkly(ctx context.Context, request operations.PutSourceLaunchdarklyRequest) (*operations.PutSourceLaunchdarklyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Launchdarkly", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceLaunchdarklyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceLaunchdarklyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40432,7 +40536,7 @@ func (s *sources) PutSourceLaunchdarkly(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40473,18 +40577,17 @@ func (s *sources) PutSourceLaunchdarkly(ctx context.Context, request operations.
}
// PutSourceLemlist - Update a Source fully
-func (s *sources) PutSourceLemlist(ctx context.Context, request operations.PutSourceLemlistRequest) (*operations.PutSourceLemlistResponse, error) {
+func (s *Sources) PutSourceLemlist(ctx context.Context, request operations.PutSourceLemlistRequest) (*operations.PutSourceLemlistResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Lemlist", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceLemlistPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceLemlistPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40493,7 +40596,7 @@ func (s *sources) PutSourceLemlist(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40534,18 +40637,17 @@ func (s *sources) PutSourceLemlist(ctx context.Context, request operations.PutSo
}
// PutSourceLeverHiring - Update a Source fully
-func (s *sources) PutSourceLeverHiring(ctx context.Context, request operations.PutSourceLeverHiringRequest) (*operations.PutSourceLeverHiringResponse, error) {
+func (s *Sources) PutSourceLeverHiring(ctx context.Context, request operations.PutSourceLeverHiringRequest) (*operations.PutSourceLeverHiringResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#LeverHiring", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceLeverHiringPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceLeverHiringPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40554,7 +40656,7 @@ func (s *sources) PutSourceLeverHiring(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40595,18 +40697,17 @@ func (s *sources) PutSourceLeverHiring(ctx context.Context, request operations.P
}
// PutSourceLinkedinAds - Update a Source fully
-func (s *sources) PutSourceLinkedinAds(ctx context.Context, request operations.PutSourceLinkedinAdsRequest) (*operations.PutSourceLinkedinAdsResponse, error) {
+func (s *Sources) PutSourceLinkedinAds(ctx context.Context, request operations.PutSourceLinkedinAdsRequest) (*operations.PutSourceLinkedinAdsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#LinkedinAds", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceLinkedinAdsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceLinkedinAdsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40615,7 +40716,7 @@ func (s *sources) PutSourceLinkedinAds(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40656,18 +40757,17 @@ func (s *sources) PutSourceLinkedinAds(ctx context.Context, request operations.P
}
// PutSourceLinkedinPages - Update a Source fully
-func (s *sources) PutSourceLinkedinPages(ctx context.Context, request operations.PutSourceLinkedinPagesRequest) (*operations.PutSourceLinkedinPagesResponse, error) {
+func (s *Sources) PutSourceLinkedinPages(ctx context.Context, request operations.PutSourceLinkedinPagesRequest) (*operations.PutSourceLinkedinPagesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#LinkedinPages", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceLinkedinPagesPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceLinkedinPagesPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40676,7 +40776,7 @@ func (s *sources) PutSourceLinkedinPages(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40717,18 +40817,17 @@ func (s *sources) PutSourceLinkedinPages(ctx context.Context, request operations
}
// PutSourceLinnworks - Update a Source fully
-func (s *sources) PutSourceLinnworks(ctx context.Context, request operations.PutSourceLinnworksRequest) (*operations.PutSourceLinnworksResponse, error) {
+func (s *Sources) PutSourceLinnworks(ctx context.Context, request operations.PutSourceLinnworksRequest) (*operations.PutSourceLinnworksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Linnworks", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceLinnworksPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceLinnworksPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40737,7 +40836,7 @@ func (s *sources) PutSourceLinnworks(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40778,18 +40877,17 @@ func (s *sources) PutSourceLinnworks(ctx context.Context, request operations.Put
}
// PutSourceLokalise - Update a Source fully
-func (s *sources) PutSourceLokalise(ctx context.Context, request operations.PutSourceLokaliseRequest) (*operations.PutSourceLokaliseResponse, error) {
+func (s *Sources) PutSourceLokalise(ctx context.Context, request operations.PutSourceLokaliseRequest) (*operations.PutSourceLokaliseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Lokalise", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceLokalisePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceLokalisePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40798,7 +40896,7 @@ func (s *sources) PutSourceLokalise(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40839,18 +40937,17 @@ func (s *sources) PutSourceLokalise(ctx context.Context, request operations.PutS
}
// PutSourceMailchimp - Update a Source fully
-func (s *sources) PutSourceMailchimp(ctx context.Context, request operations.PutSourceMailchimpRequest) (*operations.PutSourceMailchimpResponse, error) {
+func (s *Sources) PutSourceMailchimp(ctx context.Context, request operations.PutSourceMailchimpRequest) (*operations.PutSourceMailchimpResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mailchimp", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMailchimpPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMailchimpPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40859,7 +40956,7 @@ func (s *sources) PutSourceMailchimp(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40900,18 +40997,17 @@ func (s *sources) PutSourceMailchimp(ctx context.Context, request operations.Put
}
// PutSourceMailgun - Update a Source fully
-func (s *sources) PutSourceMailgun(ctx context.Context, request operations.PutSourceMailgunRequest) (*operations.PutSourceMailgunResponse, error) {
+func (s *Sources) PutSourceMailgun(ctx context.Context, request operations.PutSourceMailgunRequest) (*operations.PutSourceMailgunResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mailgun", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMailgunPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMailgunPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40920,7 +41016,7 @@ func (s *sources) PutSourceMailgun(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -40961,18 +41057,17 @@ func (s *sources) PutSourceMailgun(ctx context.Context, request operations.PutSo
}
// PutSourceMailjetSms - Update a Source fully
-func (s *sources) PutSourceMailjetSms(ctx context.Context, request operations.PutSourceMailjetSmsRequest) (*operations.PutSourceMailjetSmsResponse, error) {
+func (s *Sources) PutSourceMailjetSms(ctx context.Context, request operations.PutSourceMailjetSmsRequest) (*operations.PutSourceMailjetSmsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MailjetSms", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMailjetSmsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMailjetSmsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -40981,7 +41076,7 @@ func (s *sources) PutSourceMailjetSms(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41022,18 +41117,17 @@ func (s *sources) PutSourceMailjetSms(ctx context.Context, request operations.Pu
}
// PutSourceMarketo - Update a Source fully
-func (s *sources) PutSourceMarketo(ctx context.Context, request operations.PutSourceMarketoRequest) (*operations.PutSourceMarketoResponse, error) {
+func (s *Sources) PutSourceMarketo(ctx context.Context, request operations.PutSourceMarketoRequest) (*operations.PutSourceMarketoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Marketo", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMarketoPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMarketoPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41042,7 +41136,7 @@ func (s *sources) PutSourceMarketo(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41083,18 +41177,17 @@ func (s *sources) PutSourceMarketo(ctx context.Context, request operations.PutSo
}
// PutSourceMetabase - Update a Source fully
-func (s *sources) PutSourceMetabase(ctx context.Context, request operations.PutSourceMetabaseRequest) (*operations.PutSourceMetabaseResponse, error) {
+func (s *Sources) PutSourceMetabase(ctx context.Context, request operations.PutSourceMetabaseRequest) (*operations.PutSourceMetabaseResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Metabase", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMetabasePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMetabasePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41103,7 +41196,7 @@ func (s *sources) PutSourceMetabase(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41144,18 +41237,17 @@ func (s *sources) PutSourceMetabase(ctx context.Context, request operations.PutS
}
// PutSourceMicrosoftTeams - Update a Source fully
-func (s *sources) PutSourceMicrosoftTeams(ctx context.Context, request operations.PutSourceMicrosoftTeamsRequest) (*operations.PutSourceMicrosoftTeamsResponse, error) {
+func (s *Sources) PutSourceMicrosoftTeams(ctx context.Context, request operations.PutSourceMicrosoftTeamsRequest) (*operations.PutSourceMicrosoftTeamsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MicrosoftTeams", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMicrosoftTeamsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMicrosoftTeamsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41164,7 +41256,7 @@ func (s *sources) PutSourceMicrosoftTeams(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41205,18 +41297,17 @@ func (s *sources) PutSourceMicrosoftTeams(ctx context.Context, request operation
}
// PutSourceMixpanel - Update a Source fully
-func (s *sources) PutSourceMixpanel(ctx context.Context, request operations.PutSourceMixpanelRequest) (*operations.PutSourceMixpanelResponse, error) {
+func (s *Sources) PutSourceMixpanel(ctx context.Context, request operations.PutSourceMixpanelRequest) (*operations.PutSourceMixpanelResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mixpanel", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMixpanelPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMixpanelPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41225,7 +41316,7 @@ func (s *sources) PutSourceMixpanel(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41266,18 +41357,17 @@ func (s *sources) PutSourceMixpanel(ctx context.Context, request operations.PutS
}
// PutSourceMonday - Update a Source fully
-func (s *sources) PutSourceMonday(ctx context.Context, request operations.PutSourceMondayRequest) (*operations.PutSourceMondayResponse, error) {
+func (s *Sources) PutSourceMonday(ctx context.Context, request operations.PutSourceMondayRequest) (*operations.PutSourceMondayResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Monday", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMondayPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMondayPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41286,7 +41376,7 @@ func (s *sources) PutSourceMonday(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41326,19 +41416,18 @@ func (s *sources) PutSourceMonday(ctx context.Context, request operations.PutSou
return res, nil
}
-// PutSourceMongodb - Update a Source fully
-func (s *sources) PutSourceMongodb(ctx context.Context, request operations.PutSourceMongodbRequest) (*operations.PutSourceMongodbResponse, error) {
+// PutSourceMongodbInternalPoc - Update a Source fully
+func (s *Sources) PutSourceMongodbInternalPoc(ctx context.Context, request operations.PutSourceMongodbInternalPocRequest) (*operations.PutSourceMongodbInternalPocResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mongodb", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MongodbInternalPoc", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMongodbPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMongodbInternalPocPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41347,7 +41436,7 @@ func (s *sources) PutSourceMongodb(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41371,7 +41460,7 @@ func (s *sources) PutSourceMongodb(ctx context.Context, request operations.PutSo
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceMongodbResponse{
+ res := &operations.PutSourceMongodbInternalPocResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -41387,19 +41476,18 @@ func (s *sources) PutSourceMongodb(ctx context.Context, request operations.PutSo
return res, nil
}
-// PutSourceMongodbInternalPoc - Update a Source fully
-func (s *sources) PutSourceMongodbInternalPoc(ctx context.Context, request operations.PutSourceMongodbInternalPocRequest) (*operations.PutSourceMongodbInternalPocResponse, error) {
+// PutSourceMongodbV2 - Update a Source fully
+func (s *Sources) PutSourceMongodbV2(ctx context.Context, request operations.PutSourceMongodbV2Request) (*operations.PutSourceMongodbV2Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MongodbInternalPoc", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MongodbV2", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMongodbInternalPocPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMongodbV2PutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41408,7 +41496,7 @@ func (s *sources) PutSourceMongodbInternalPoc(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41432,7 +41520,7 @@ func (s *sources) PutSourceMongodbInternalPoc(ctx context.Context, request opera
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceMongodbInternalPocResponse{
+ res := &operations.PutSourceMongodbV2Response{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -41449,18 +41537,17 @@ func (s *sources) PutSourceMongodbInternalPoc(ctx context.Context, request opera
}
// PutSourceMssql - Update a Source fully
-func (s *sources) PutSourceMssql(ctx context.Context, request operations.PutSourceMssqlRequest) (*operations.PutSourceMssqlResponse, error) {
+func (s *Sources) PutSourceMssql(ctx context.Context, request operations.PutSourceMssqlRequest) (*operations.PutSourceMssqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mssql", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMssqlPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMssqlPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41469,7 +41556,7 @@ func (s *sources) PutSourceMssql(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41510,18 +41597,17 @@ func (s *sources) PutSourceMssql(ctx context.Context, request operations.PutSour
}
// PutSourceMyHours - Update a Source fully
-func (s *sources) PutSourceMyHours(ctx context.Context, request operations.PutSourceMyHoursRequest) (*operations.PutSourceMyHoursResponse, error) {
+func (s *Sources) PutSourceMyHours(ctx context.Context, request operations.PutSourceMyHoursRequest) (*operations.PutSourceMyHoursResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#MyHours", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMyHoursPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMyHoursPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41530,7 +41616,7 @@ func (s *sources) PutSourceMyHours(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41571,18 +41657,17 @@ func (s *sources) PutSourceMyHours(ctx context.Context, request operations.PutSo
}
// PutSourceMysql - Update a Source fully
-func (s *sources) PutSourceMysql(ctx context.Context, request operations.PutSourceMysqlRequest) (*operations.PutSourceMysqlResponse, error) {
+func (s *Sources) PutSourceMysql(ctx context.Context, request operations.PutSourceMysqlRequest) (*operations.PutSourceMysqlResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Mysql", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceMysqlPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceMysqlPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41591,7 +41676,7 @@ func (s *sources) PutSourceMysql(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41632,18 +41717,17 @@ func (s *sources) PutSourceMysql(ctx context.Context, request operations.PutSour
}
// PutSourceNetsuite - Update a Source fully
-func (s *sources) PutSourceNetsuite(ctx context.Context, request operations.PutSourceNetsuiteRequest) (*operations.PutSourceNetsuiteResponse, error) {
+func (s *Sources) PutSourceNetsuite(ctx context.Context, request operations.PutSourceNetsuiteRequest) (*operations.PutSourceNetsuiteResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Netsuite", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceNetsuitePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceNetsuitePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41652,7 +41736,7 @@ func (s *sources) PutSourceNetsuite(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41693,18 +41777,17 @@ func (s *sources) PutSourceNetsuite(ctx context.Context, request operations.PutS
}
// PutSourceNotion - Update a Source fully
-func (s *sources) PutSourceNotion(ctx context.Context, request operations.PutSourceNotionRequest) (*operations.PutSourceNotionResponse, error) {
+func (s *Sources) PutSourceNotion(ctx context.Context, request operations.PutSourceNotionRequest) (*operations.PutSourceNotionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Notion", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceNotionPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceNotionPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41713,7 +41796,7 @@ func (s *sources) PutSourceNotion(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41754,18 +41837,17 @@ func (s *sources) PutSourceNotion(ctx context.Context, request operations.PutSou
}
// PutSourceNytimes - Update a Source fully
-func (s *sources) PutSourceNytimes(ctx context.Context, request operations.PutSourceNytimesRequest) (*operations.PutSourceNytimesResponse, error) {
+func (s *Sources) PutSourceNytimes(ctx context.Context, request operations.PutSourceNytimesRequest) (*operations.PutSourceNytimesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Nytimes", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceNytimesPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceNytimesPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41774,7 +41856,7 @@ func (s *sources) PutSourceNytimes(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41815,18 +41897,17 @@ func (s *sources) PutSourceNytimes(ctx context.Context, request operations.PutSo
}
// PutSourceOkta - Update a Source fully
-func (s *sources) PutSourceOkta(ctx context.Context, request operations.PutSourceOktaRequest) (*operations.PutSourceOktaResponse, error) {
+func (s *Sources) PutSourceOkta(ctx context.Context, request operations.PutSourceOktaRequest) (*operations.PutSourceOktaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Okta", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceOktaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceOktaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41835,7 +41916,7 @@ func (s *sources) PutSourceOkta(ctx context.Context, request operations.PutSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41876,18 +41957,17 @@ func (s *sources) PutSourceOkta(ctx context.Context, request operations.PutSourc
}
// PutSourceOmnisend - Update a Source fully
-func (s *sources) PutSourceOmnisend(ctx context.Context, request operations.PutSourceOmnisendRequest) (*operations.PutSourceOmnisendResponse, error) {
+func (s *Sources) PutSourceOmnisend(ctx context.Context, request operations.PutSourceOmnisendRequest) (*operations.PutSourceOmnisendResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Omnisend", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceOmnisendPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceOmnisendPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41896,7 +41976,7 @@ func (s *sources) PutSourceOmnisend(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41937,18 +42017,17 @@ func (s *sources) PutSourceOmnisend(ctx context.Context, request operations.PutS
}
// PutSourceOnesignal - Update a Source fully
-func (s *sources) PutSourceOnesignal(ctx context.Context, request operations.PutSourceOnesignalRequest) (*operations.PutSourceOnesignalResponse, error) {
+func (s *Sources) PutSourceOnesignal(ctx context.Context, request operations.PutSourceOnesignalRequest) (*operations.PutSourceOnesignalResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Onesignal", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceOnesignalPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceOnesignalPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -41957,7 +42036,7 @@ func (s *sources) PutSourceOnesignal(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -41998,18 +42077,17 @@ func (s *sources) PutSourceOnesignal(ctx context.Context, request operations.Put
}
// PutSourceOracle - Update a Source fully
-func (s *sources) PutSourceOracle(ctx context.Context, request operations.PutSourceOracleRequest) (*operations.PutSourceOracleResponse, error) {
+func (s *Sources) PutSourceOracle(ctx context.Context, request operations.PutSourceOracleRequest) (*operations.PutSourceOracleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Oracle", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceOraclePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceOraclePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42018,7 +42096,7 @@ func (s *sources) PutSourceOracle(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42059,18 +42137,17 @@ func (s *sources) PutSourceOracle(ctx context.Context, request operations.PutSou
}
// PutSourceOrb - Update a Source fully
-func (s *sources) PutSourceOrb(ctx context.Context, request operations.PutSourceOrbRequest) (*operations.PutSourceOrbResponse, error) {
+func (s *Sources) PutSourceOrb(ctx context.Context, request operations.PutSourceOrbRequest) (*operations.PutSourceOrbResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Orb", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceOrbPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceOrbPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42079,7 +42156,7 @@ func (s *sources) PutSourceOrb(ctx context.Context, request operations.PutSource
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42120,18 +42197,17 @@ func (s *sources) PutSourceOrb(ctx context.Context, request operations.PutSource
}
// PutSourceOrbit - Update a Source fully
-func (s *sources) PutSourceOrbit(ctx context.Context, request operations.PutSourceOrbitRequest) (*operations.PutSourceOrbitResponse, error) {
+func (s *Sources) PutSourceOrbit(ctx context.Context, request operations.PutSourceOrbitRequest) (*operations.PutSourceOrbitResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Orbit", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceOrbitPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceOrbitPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42140,7 +42216,7 @@ func (s *sources) PutSourceOrbit(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42181,18 +42257,17 @@ func (s *sources) PutSourceOrbit(ctx context.Context, request operations.PutSour
}
// PutSourceOutbrainAmplify - Update a Source fully
-func (s *sources) PutSourceOutbrainAmplify(ctx context.Context, request operations.PutSourceOutbrainAmplifyRequest) (*operations.PutSourceOutbrainAmplifyResponse, error) {
+func (s *Sources) PutSourceOutbrainAmplify(ctx context.Context, request operations.PutSourceOutbrainAmplifyRequest) (*operations.PutSourceOutbrainAmplifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#OutbrainAmplify", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceOutbrainAmplifyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceOutbrainAmplifyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42201,7 +42276,7 @@ func (s *sources) PutSourceOutbrainAmplify(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42242,18 +42317,17 @@ func (s *sources) PutSourceOutbrainAmplify(ctx context.Context, request operatio
}
// PutSourceOutreach - Update a Source fully
-func (s *sources) PutSourceOutreach(ctx context.Context, request operations.PutSourceOutreachRequest) (*operations.PutSourceOutreachResponse, error) {
+func (s *Sources) PutSourceOutreach(ctx context.Context, request operations.PutSourceOutreachRequest) (*operations.PutSourceOutreachResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Outreach", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceOutreachPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceOutreachPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42262,7 +42336,7 @@ func (s *sources) PutSourceOutreach(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42303,18 +42377,17 @@ func (s *sources) PutSourceOutreach(ctx context.Context, request operations.PutS
}
// PutSourcePaypalTransaction - Update a Source fully
-func (s *sources) PutSourcePaypalTransaction(ctx context.Context, request operations.PutSourcePaypalTransactionRequest) (*operations.PutSourcePaypalTransactionResponse, error) {
+func (s *Sources) PutSourcePaypalTransaction(ctx context.Context, request operations.PutSourcePaypalTransactionRequest) (*operations.PutSourcePaypalTransactionResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PaypalTransaction", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePaypalTransactionPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePaypalTransactionPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42323,7 +42396,7 @@ func (s *sources) PutSourcePaypalTransaction(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42364,18 +42437,17 @@ func (s *sources) PutSourcePaypalTransaction(ctx context.Context, request operat
}
// PutSourcePaystack - Update a Source fully
-func (s *sources) PutSourcePaystack(ctx context.Context, request operations.PutSourcePaystackRequest) (*operations.PutSourcePaystackResponse, error) {
+func (s *Sources) PutSourcePaystack(ctx context.Context, request operations.PutSourcePaystackRequest) (*operations.PutSourcePaystackResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Paystack", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePaystackPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePaystackPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42384,7 +42456,7 @@ func (s *sources) PutSourcePaystack(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42425,18 +42497,17 @@ func (s *sources) PutSourcePaystack(ctx context.Context, request operations.PutS
}
// PutSourcePendo - Update a Source fully
-func (s *sources) PutSourcePendo(ctx context.Context, request operations.PutSourcePendoRequest) (*operations.PutSourcePendoResponse, error) {
+func (s *Sources) PutSourcePendo(ctx context.Context, request operations.PutSourcePendoRequest) (*operations.PutSourcePendoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pendo", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePendoPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePendoPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42445,7 +42516,7 @@ func (s *sources) PutSourcePendo(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42486,18 +42557,17 @@ func (s *sources) PutSourcePendo(ctx context.Context, request operations.PutSour
}
// PutSourcePersistiq - Update a Source fully
-func (s *sources) PutSourcePersistiq(ctx context.Context, request operations.PutSourcePersistiqRequest) (*operations.PutSourcePersistiqResponse, error) {
+func (s *Sources) PutSourcePersistiq(ctx context.Context, request operations.PutSourcePersistiqRequest) (*operations.PutSourcePersistiqResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Persistiq", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePersistiqPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePersistiqPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42506,7 +42576,7 @@ func (s *sources) PutSourcePersistiq(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42547,18 +42617,17 @@ func (s *sources) PutSourcePersistiq(ctx context.Context, request operations.Put
}
// PutSourcePexelsAPI - Update a Source fully
-func (s *sources) PutSourcePexelsAPI(ctx context.Context, request operations.PutSourcePexelsAPIRequest) (*operations.PutSourcePexelsAPIResponse, error) {
+func (s *Sources) PutSourcePexelsAPI(ctx context.Context, request operations.PutSourcePexelsAPIRequest) (*operations.PutSourcePexelsAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PexelsApi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePexelsAPIPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePexelsAPIPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42567,7 +42636,7 @@ func (s *sources) PutSourcePexelsAPI(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42608,18 +42677,17 @@ func (s *sources) PutSourcePexelsAPI(ctx context.Context, request operations.Put
}
// PutSourcePinterest - Update a Source fully
-func (s *sources) PutSourcePinterest(ctx context.Context, request operations.PutSourcePinterestRequest) (*operations.PutSourcePinterestResponse, error) {
+func (s *Sources) PutSourcePinterest(ctx context.Context, request operations.PutSourcePinterestRequest) (*operations.PutSourcePinterestResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pinterest", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePinterestPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePinterestPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42628,7 +42696,7 @@ func (s *sources) PutSourcePinterest(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42669,18 +42737,17 @@ func (s *sources) PutSourcePinterest(ctx context.Context, request operations.Put
}
// PutSourcePipedrive - Update a Source fully
-func (s *sources) PutSourcePipedrive(ctx context.Context, request operations.PutSourcePipedriveRequest) (*operations.PutSourcePipedriveResponse, error) {
+func (s *Sources) PutSourcePipedrive(ctx context.Context, request operations.PutSourcePipedriveRequest) (*operations.PutSourcePipedriveResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pipedrive", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePipedrivePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePipedrivePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42689,7 +42756,7 @@ func (s *sources) PutSourcePipedrive(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42730,18 +42797,17 @@ func (s *sources) PutSourcePipedrive(ctx context.Context, request operations.Put
}
// PutSourcePocket - Update a Source fully
-func (s *sources) PutSourcePocket(ctx context.Context, request operations.PutSourcePocketRequest) (*operations.PutSourcePocketResponse, error) {
+func (s *Sources) PutSourcePocket(ctx context.Context, request operations.PutSourcePocketRequest) (*operations.PutSourcePocketResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pocket", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePocketPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePocketPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42750,7 +42816,7 @@ func (s *sources) PutSourcePocket(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42791,18 +42857,17 @@ func (s *sources) PutSourcePocket(ctx context.Context, request operations.PutSou
}
// PutSourcePokeapi - Update a Source fully
-func (s *sources) PutSourcePokeapi(ctx context.Context, request operations.PutSourcePokeapiRequest) (*operations.PutSourcePokeapiResponse, error) {
+func (s *Sources) PutSourcePokeapi(ctx context.Context, request operations.PutSourcePokeapiRequest) (*operations.PutSourcePokeapiResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pokeapi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePokeapiPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePokeapiPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42811,7 +42876,7 @@ func (s *sources) PutSourcePokeapi(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42852,18 +42917,17 @@ func (s *sources) PutSourcePokeapi(ctx context.Context, request operations.PutSo
}
// PutSourcePolygonStockAPI - Update a Source fully
-func (s *sources) PutSourcePolygonStockAPI(ctx context.Context, request operations.PutSourcePolygonStockAPIRequest) (*operations.PutSourcePolygonStockAPIResponse, error) {
+func (s *Sources) PutSourcePolygonStockAPI(ctx context.Context, request operations.PutSourcePolygonStockAPIRequest) (*operations.PutSourcePolygonStockAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PolygonStockApi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePolygonStockAPIPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePolygonStockAPIPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42872,7 +42936,7 @@ func (s *sources) PutSourcePolygonStockAPI(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42913,18 +42977,17 @@ func (s *sources) PutSourcePolygonStockAPI(ctx context.Context, request operatio
}
// PutSourcePostgres - Update a Source fully
-func (s *sources) PutSourcePostgres(ctx context.Context, request operations.PutSourcePostgresRequest) (*operations.PutSourcePostgresResponse, error) {
+func (s *Sources) PutSourcePostgres(ctx context.Context, request operations.PutSourcePostgresRequest) (*operations.PutSourcePostgresResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Postgres", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePostgresPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePostgresPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42933,7 +42996,7 @@ func (s *sources) PutSourcePostgres(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -42974,18 +43037,17 @@ func (s *sources) PutSourcePostgres(ctx context.Context, request operations.PutS
}
// PutSourcePosthog - Update a Source fully
-func (s *sources) PutSourcePosthog(ctx context.Context, request operations.PutSourcePosthogRequest) (*operations.PutSourcePosthogResponse, error) {
+func (s *Sources) PutSourcePosthog(ctx context.Context, request operations.PutSourcePosthogRequest) (*operations.PutSourcePosthogResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Posthog", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePosthogPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePosthogPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -42994,7 +43056,7 @@ func (s *sources) PutSourcePosthog(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43035,18 +43097,17 @@ func (s *sources) PutSourcePosthog(ctx context.Context, request operations.PutSo
}
// PutSourcePostmarkapp - Update a Source fully
-func (s *sources) PutSourcePostmarkapp(ctx context.Context, request operations.PutSourcePostmarkappRequest) (*operations.PutSourcePostmarkappResponse, error) {
+func (s *Sources) PutSourcePostmarkapp(ctx context.Context, request operations.PutSourcePostmarkappRequest) (*operations.PutSourcePostmarkappResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Postmarkapp", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePostmarkappPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePostmarkappPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43055,7 +43116,7 @@ func (s *sources) PutSourcePostmarkapp(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43096,18 +43157,17 @@ func (s *sources) PutSourcePostmarkapp(ctx context.Context, request operations.P
}
// PutSourcePrestashop - Update a Source fully
-func (s *sources) PutSourcePrestashop(ctx context.Context, request operations.PutSourcePrestashopRequest) (*operations.PutSourcePrestashopResponse, error) {
+func (s *Sources) PutSourcePrestashop(ctx context.Context, request operations.PutSourcePrestashopRequest) (*operations.PutSourcePrestashopResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Prestashop", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePrestashopPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePrestashopPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43116,7 +43176,7 @@ func (s *sources) PutSourcePrestashop(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43157,18 +43217,17 @@ func (s *sources) PutSourcePrestashop(ctx context.Context, request operations.Pu
}
// PutSourcePunkAPI - Update a Source fully
-func (s *sources) PutSourcePunkAPI(ctx context.Context, request operations.PutSourcePunkAPIRequest) (*operations.PutSourcePunkAPIResponse, error) {
+func (s *Sources) PutSourcePunkAPI(ctx context.Context, request operations.PutSourcePunkAPIRequest) (*operations.PutSourcePunkAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PunkApi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePunkAPIPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePunkAPIPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43177,7 +43236,7 @@ func (s *sources) PutSourcePunkAPI(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43218,18 +43277,17 @@ func (s *sources) PutSourcePunkAPI(ctx context.Context, request operations.PutSo
}
// PutSourcePypi - Update a Source fully
-func (s *sources) PutSourcePypi(ctx context.Context, request operations.PutSourcePypiRequest) (*operations.PutSourcePypiResponse, error) {
+func (s *Sources) PutSourcePypi(ctx context.Context, request operations.PutSourcePypiRequest) (*operations.PutSourcePypiResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Pypi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePypiPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourcePypiPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43238,7 +43296,7 @@ func (s *sources) PutSourcePypi(ctx context.Context, request operations.PutSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43279,18 +43337,17 @@ func (s *sources) PutSourcePypi(ctx context.Context, request operations.PutSourc
}
// PutSourceQualaroo - Update a Source fully
-func (s *sources) PutSourceQualaroo(ctx context.Context, request operations.PutSourceQualarooRequest) (*operations.PutSourceQualarooResponse, error) {
+func (s *Sources) PutSourceQualaroo(ctx context.Context, request operations.PutSourceQualarooRequest) (*operations.PutSourceQualarooResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Qualaroo", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceQualarooPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceQualarooPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43299,7 +43356,7 @@ func (s *sources) PutSourceQualaroo(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43340,18 +43397,17 @@ func (s *sources) PutSourceQualaroo(ctx context.Context, request operations.PutS
}
// PutSourceQuickbooks - Update a Source fully
-func (s *sources) PutSourceQuickbooks(ctx context.Context, request operations.PutSourceQuickbooksRequest) (*operations.PutSourceQuickbooksResponse, error) {
+func (s *Sources) PutSourceQuickbooks(ctx context.Context, request operations.PutSourceQuickbooksRequest) (*operations.PutSourceQuickbooksResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Quickbooks", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceQuickbooksPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceQuickbooksPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43360,7 +43416,7 @@ func (s *sources) PutSourceQuickbooks(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43401,18 +43457,17 @@ func (s *sources) PutSourceQuickbooks(ctx context.Context, request operations.Pu
}
// PutSourceRailz - Update a Source fully
-func (s *sources) PutSourceRailz(ctx context.Context, request operations.PutSourceRailzRequest) (*operations.PutSourceRailzResponse, error) {
+func (s *Sources) PutSourceRailz(ctx context.Context, request operations.PutSourceRailzRequest) (*operations.PutSourceRailzResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Railz", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceRailzPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceRailzPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43421,7 +43476,7 @@ func (s *sources) PutSourceRailz(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43462,18 +43517,17 @@ func (s *sources) PutSourceRailz(ctx context.Context, request operations.PutSour
}
// PutSourceRecharge - Update a Source fully
-func (s *sources) PutSourceRecharge(ctx context.Context, request operations.PutSourceRechargeRequest) (*operations.PutSourceRechargeResponse, error) {
+func (s *Sources) PutSourceRecharge(ctx context.Context, request operations.PutSourceRechargeRequest) (*operations.PutSourceRechargeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recharge", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceRechargePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceRechargePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43482,7 +43536,7 @@ func (s *sources) PutSourceRecharge(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43523,18 +43577,17 @@ func (s *sources) PutSourceRecharge(ctx context.Context, request operations.PutS
}
// PutSourceRecreation - Update a Source fully
-func (s *sources) PutSourceRecreation(ctx context.Context, request operations.PutSourceRecreationRequest) (*operations.PutSourceRecreationResponse, error) {
+func (s *Sources) PutSourceRecreation(ctx context.Context, request operations.PutSourceRecreationRequest) (*operations.PutSourceRecreationResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recreation", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceRecreationPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceRecreationPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43543,7 +43596,7 @@ func (s *sources) PutSourceRecreation(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43584,18 +43637,17 @@ func (s *sources) PutSourceRecreation(ctx context.Context, request operations.Pu
}
// PutSourceRecruitee - Update a Source fully
-func (s *sources) PutSourceRecruitee(ctx context.Context, request operations.PutSourceRecruiteeRequest) (*operations.PutSourceRecruiteeResponse, error) {
+func (s *Sources) PutSourceRecruitee(ctx context.Context, request operations.PutSourceRecruiteeRequest) (*operations.PutSourceRecruiteeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recruitee", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceRecruiteePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceRecruiteePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43604,7 +43656,7 @@ func (s *sources) PutSourceRecruitee(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43645,18 +43697,17 @@ func (s *sources) PutSourceRecruitee(ctx context.Context, request operations.Put
}
// PutSourceRecurly - Update a Source fully
-func (s *sources) PutSourceRecurly(ctx context.Context, request operations.PutSourceRecurlyRequest) (*operations.PutSourceRecurlyResponse, error) {
+func (s *Sources) PutSourceRecurly(ctx context.Context, request operations.PutSourceRecurlyRequest) (*operations.PutSourceRecurlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Recurly", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceRecurlyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceRecurlyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43665,7 +43716,7 @@ func (s *sources) PutSourceRecurly(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43706,18 +43757,17 @@ func (s *sources) PutSourceRecurly(ctx context.Context, request operations.PutSo
}
// PutSourceRedshift - Update a Source fully
-func (s *sources) PutSourceRedshift(ctx context.Context, request operations.PutSourceRedshiftRequest) (*operations.PutSourceRedshiftResponse, error) {
+func (s *Sources) PutSourceRedshift(ctx context.Context, request operations.PutSourceRedshiftRequest) (*operations.PutSourceRedshiftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Redshift", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceRedshiftPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceRedshiftPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43726,7 +43776,7 @@ func (s *sources) PutSourceRedshift(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43767,18 +43817,17 @@ func (s *sources) PutSourceRedshift(ctx context.Context, request operations.PutS
}
// PutSourceRetently - Update a Source fully
-func (s *sources) PutSourceRetently(ctx context.Context, request operations.PutSourceRetentlyRequest) (*operations.PutSourceRetentlyResponse, error) {
+func (s *Sources) PutSourceRetently(ctx context.Context, request operations.PutSourceRetentlyRequest) (*operations.PutSourceRetentlyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Retently", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceRetentlyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceRetentlyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43787,7 +43836,7 @@ func (s *sources) PutSourceRetently(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43828,18 +43877,17 @@ func (s *sources) PutSourceRetently(ctx context.Context, request operations.PutS
}
// PutSourceRkiCovid - Update a Source fully
-func (s *sources) PutSourceRkiCovid(ctx context.Context, request operations.PutSourceRkiCovidRequest) (*operations.PutSourceRkiCovidResponse, error) {
+func (s *Sources) PutSourceRkiCovid(ctx context.Context, request operations.PutSourceRkiCovidRequest) (*operations.PutSourceRkiCovidResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#RkiCovid", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceRkiCovidPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceRkiCovidPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43848,7 +43896,7 @@ func (s *sources) PutSourceRkiCovid(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43889,18 +43937,17 @@ func (s *sources) PutSourceRkiCovid(ctx context.Context, request operations.PutS
}
// PutSourceRss - Update a Source fully
-func (s *sources) PutSourceRss(ctx context.Context, request operations.PutSourceRssRequest) (*operations.PutSourceRssResponse, error) {
+func (s *Sources) PutSourceRss(ctx context.Context, request operations.PutSourceRssRequest) (*operations.PutSourceRssResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Rss", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceRssPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceRssPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43909,7 +43956,7 @@ func (s *sources) PutSourceRss(ctx context.Context, request operations.PutSource
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -43950,18 +43997,17 @@ func (s *sources) PutSourceRss(ctx context.Context, request operations.PutSource
}
// PutSourceS3 - Update a Source fully
-func (s *sources) PutSourceS3(ctx context.Context, request operations.PutSourceS3Request) (*operations.PutSourceS3Response, error) {
+func (s *Sources) PutSourceS3(ctx context.Context, request operations.PutSourceS3Request) (*operations.PutSourceS3Response, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#S3", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceS3PutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceS3PutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -43970,7 +44016,7 @@ func (s *sources) PutSourceS3(ctx context.Context, request operations.PutSourceS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44011,18 +44057,17 @@ func (s *sources) PutSourceS3(ctx context.Context, request operations.PutSourceS
}
// PutSourceSalesforce - Update a Source fully
-func (s *sources) PutSourceSalesforce(ctx context.Context, request operations.PutSourceSalesforceRequest) (*operations.PutSourceSalesforceResponse, error) {
+func (s *Sources) PutSourceSalesforce(ctx context.Context, request operations.PutSourceSalesforceRequest) (*operations.PutSourceSalesforceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Salesforce", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSalesforcePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSalesforcePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44031,7 +44076,7 @@ func (s *sources) PutSourceSalesforce(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44072,18 +44117,17 @@ func (s *sources) PutSourceSalesforce(ctx context.Context, request operations.Pu
}
// PutSourceSalesloft - Update a Source fully
-func (s *sources) PutSourceSalesloft(ctx context.Context, request operations.PutSourceSalesloftRequest) (*operations.PutSourceSalesloftResponse, error) {
+func (s *Sources) PutSourceSalesloft(ctx context.Context, request operations.PutSourceSalesloftRequest) (*operations.PutSourceSalesloftResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Salesloft", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSalesloftPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSalesloftPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44092,7 +44136,7 @@ func (s *sources) PutSourceSalesloft(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44133,18 +44177,17 @@ func (s *sources) PutSourceSalesloft(ctx context.Context, request operations.Put
}
// PutSourceSapFieldglass - Update a Source fully
-func (s *sources) PutSourceSapFieldglass(ctx context.Context, request operations.PutSourceSapFieldglassRequest) (*operations.PutSourceSapFieldglassResponse, error) {
+func (s *Sources) PutSourceSapFieldglass(ctx context.Context, request operations.PutSourceSapFieldglassRequest) (*operations.PutSourceSapFieldglassResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SapFieldglass", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSapFieldglassPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSapFieldglassPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44153,7 +44196,7 @@ func (s *sources) PutSourceSapFieldglass(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44194,18 +44237,17 @@ func (s *sources) PutSourceSapFieldglass(ctx context.Context, request operations
}
// PutSourceSecoda - Update a Source fully
-func (s *sources) PutSourceSecoda(ctx context.Context, request operations.PutSourceSecodaRequest) (*operations.PutSourceSecodaResponse, error) {
+func (s *Sources) PutSourceSecoda(ctx context.Context, request operations.PutSourceSecodaRequest) (*operations.PutSourceSecodaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Secoda", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSecodaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSecodaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44214,7 +44256,7 @@ func (s *sources) PutSourceSecoda(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44255,18 +44297,17 @@ func (s *sources) PutSourceSecoda(ctx context.Context, request operations.PutSou
}
// PutSourceSendgrid - Update a Source fully
-func (s *sources) PutSourceSendgrid(ctx context.Context, request operations.PutSourceSendgridRequest) (*operations.PutSourceSendgridResponse, error) {
+func (s *Sources) PutSourceSendgrid(ctx context.Context, request operations.PutSourceSendgridRequest) (*operations.PutSourceSendgridResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sendgrid", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSendgridPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSendgridPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44275,7 +44316,7 @@ func (s *sources) PutSourceSendgrid(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44316,18 +44357,17 @@ func (s *sources) PutSourceSendgrid(ctx context.Context, request operations.PutS
}
// PutSourceSendinblue - Update a Source fully
-func (s *sources) PutSourceSendinblue(ctx context.Context, request operations.PutSourceSendinblueRequest) (*operations.PutSourceSendinblueResponse, error) {
+func (s *Sources) PutSourceSendinblue(ctx context.Context, request operations.PutSourceSendinblueRequest) (*operations.PutSourceSendinblueResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sendinblue", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSendinbluePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSendinbluePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44336,7 +44376,7 @@ func (s *sources) PutSourceSendinblue(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44377,18 +44417,17 @@ func (s *sources) PutSourceSendinblue(ctx context.Context, request operations.Pu
}
// PutSourceSenseforce - Update a Source fully
-func (s *sources) PutSourceSenseforce(ctx context.Context, request operations.PutSourceSenseforceRequest) (*operations.PutSourceSenseforceResponse, error) {
+func (s *Sources) PutSourceSenseforce(ctx context.Context, request operations.PutSourceSenseforceRequest) (*operations.PutSourceSenseforceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Senseforce", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSenseforcePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSenseforcePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44397,7 +44436,7 @@ func (s *sources) PutSourceSenseforce(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44438,18 +44477,17 @@ func (s *sources) PutSourceSenseforce(ctx context.Context, request operations.Pu
}
// PutSourceSentry - Update a Source fully
-func (s *sources) PutSourceSentry(ctx context.Context, request operations.PutSourceSentryRequest) (*operations.PutSourceSentryResponse, error) {
+func (s *Sources) PutSourceSentry(ctx context.Context, request operations.PutSourceSentryRequest) (*operations.PutSourceSentryResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sentry", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSentryPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSentryPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44458,7 +44496,7 @@ func (s *sources) PutSourceSentry(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44499,18 +44537,17 @@ func (s *sources) PutSourceSentry(ctx context.Context, request operations.PutSou
}
// PutSourceSftp - Update a Source fully
-func (s *sources) PutSourceSftp(ctx context.Context, request operations.PutSourceSftpRequest) (*operations.PutSourceSftpResponse, error) {
+func (s *Sources) PutSourceSftp(ctx context.Context, request operations.PutSourceSftpRequest) (*operations.PutSourceSftpResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Sftp", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSftpPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSftpPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44519,7 +44556,7 @@ func (s *sources) PutSourceSftp(ctx context.Context, request operations.PutSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44560,18 +44597,17 @@ func (s *sources) PutSourceSftp(ctx context.Context, request operations.PutSourc
}
// PutSourceSftpBulk - Update a Source fully
-func (s *sources) PutSourceSftpBulk(ctx context.Context, request operations.PutSourceSftpBulkRequest) (*operations.PutSourceSftpBulkResponse, error) {
+func (s *Sources) PutSourceSftpBulk(ctx context.Context, request operations.PutSourceSftpBulkRequest) (*operations.PutSourceSftpBulkResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SftpBulk", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSftpBulkPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSftpBulkPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44580,7 +44616,7 @@ func (s *sources) PutSourceSftpBulk(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44621,18 +44657,17 @@ func (s *sources) PutSourceSftpBulk(ctx context.Context, request operations.PutS
}
// PutSourceShopify - Update a Source fully
-func (s *sources) PutSourceShopify(ctx context.Context, request operations.PutSourceShopifyRequest) (*operations.PutSourceShopifyResponse, error) {
+func (s *Sources) PutSourceShopify(ctx context.Context, request operations.PutSourceShopifyRequest) (*operations.PutSourceShopifyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Shopify", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceShopifyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceShopifyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44641,7 +44676,7 @@ func (s *sources) PutSourceShopify(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44682,18 +44717,17 @@ func (s *sources) PutSourceShopify(ctx context.Context, request operations.PutSo
}
// PutSourceShortio - Update a Source fully
-func (s *sources) PutSourceShortio(ctx context.Context, request operations.PutSourceShortioRequest) (*operations.PutSourceShortioResponse, error) {
+func (s *Sources) PutSourceShortio(ctx context.Context, request operations.PutSourceShortioRequest) (*operations.PutSourceShortioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Shortio", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceShortioPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceShortioPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44702,7 +44736,7 @@ func (s *sources) PutSourceShortio(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44743,18 +44777,17 @@ func (s *sources) PutSourceShortio(ctx context.Context, request operations.PutSo
}
// PutSourceSlack - Update a Source fully
-func (s *sources) PutSourceSlack(ctx context.Context, request operations.PutSourceSlackRequest) (*operations.PutSourceSlackResponse, error) {
+func (s *Sources) PutSourceSlack(ctx context.Context, request operations.PutSourceSlackRequest) (*operations.PutSourceSlackResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Slack", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSlackPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSlackPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44763,7 +44796,7 @@ func (s *sources) PutSourceSlack(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44804,18 +44837,17 @@ func (s *sources) PutSourceSlack(ctx context.Context, request operations.PutSour
}
// PutSourceSmaily - Update a Source fully
-func (s *sources) PutSourceSmaily(ctx context.Context, request operations.PutSourceSmailyRequest) (*operations.PutSourceSmailyResponse, error) {
+func (s *Sources) PutSourceSmaily(ctx context.Context, request operations.PutSourceSmailyRequest) (*operations.PutSourceSmailyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Smaily", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSmailyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSmailyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44824,7 +44856,7 @@ func (s *sources) PutSourceSmaily(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44865,18 +44897,17 @@ func (s *sources) PutSourceSmaily(ctx context.Context, request operations.PutSou
}
// PutSourceSmartengage - Update a Source fully
-func (s *sources) PutSourceSmartengage(ctx context.Context, request operations.PutSourceSmartengageRequest) (*operations.PutSourceSmartengageResponse, error) {
+func (s *Sources) PutSourceSmartengage(ctx context.Context, request operations.PutSourceSmartengageRequest) (*operations.PutSourceSmartengageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Smartengage", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSmartengagePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSmartengagePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44885,7 +44916,7 @@ func (s *sources) PutSourceSmartengage(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44926,18 +44957,17 @@ func (s *sources) PutSourceSmartengage(ctx context.Context, request operations.P
}
// PutSourceSmartsheets - Update a Source fully
-func (s *sources) PutSourceSmartsheets(ctx context.Context, request operations.PutSourceSmartsheetsRequest) (*operations.PutSourceSmartsheetsResponse, error) {
+func (s *Sources) PutSourceSmartsheets(ctx context.Context, request operations.PutSourceSmartsheetsRequest) (*operations.PutSourceSmartsheetsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Smartsheets", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSmartsheetsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSmartsheetsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -44946,7 +44976,7 @@ func (s *sources) PutSourceSmartsheets(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -44987,18 +45017,17 @@ func (s *sources) PutSourceSmartsheets(ctx context.Context, request operations.P
}
// PutSourceSnapchatMarketing - Update a Source fully
-func (s *sources) PutSourceSnapchatMarketing(ctx context.Context, request operations.PutSourceSnapchatMarketingRequest) (*operations.PutSourceSnapchatMarketingResponse, error) {
+func (s *Sources) PutSourceSnapchatMarketing(ctx context.Context, request operations.PutSourceSnapchatMarketingRequest) (*operations.PutSourceSnapchatMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SnapchatMarketing", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSnapchatMarketingPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSnapchatMarketingPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45007,7 +45036,7 @@ func (s *sources) PutSourceSnapchatMarketing(ctx context.Context, request operat
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45048,18 +45077,17 @@ func (s *sources) PutSourceSnapchatMarketing(ctx context.Context, request operat
}
// PutSourceSnowflake - Update a Source fully
-func (s *sources) PutSourceSnowflake(ctx context.Context, request operations.PutSourceSnowflakeRequest) (*operations.PutSourceSnowflakeResponse, error) {
+func (s *Sources) PutSourceSnowflake(ctx context.Context, request operations.PutSourceSnowflakeRequest) (*operations.PutSourceSnowflakeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Snowflake", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSnowflakePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSnowflakePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45068,7 +45096,7 @@ func (s *sources) PutSourceSnowflake(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45109,18 +45137,17 @@ func (s *sources) PutSourceSnowflake(ctx context.Context, request operations.Put
}
// PutSourceSonarCloud - Update a Source fully
-func (s *sources) PutSourceSonarCloud(ctx context.Context, request operations.PutSourceSonarCloudRequest) (*operations.PutSourceSonarCloudResponse, error) {
+func (s *Sources) PutSourceSonarCloud(ctx context.Context, request operations.PutSourceSonarCloudRequest) (*operations.PutSourceSonarCloudResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SonarCloud", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSonarCloudPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSonarCloudPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45129,7 +45156,7 @@ func (s *sources) PutSourceSonarCloud(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45170,18 +45197,17 @@ func (s *sources) PutSourceSonarCloud(ctx context.Context, request operations.Pu
}
// PutSourceSpacexAPI - Update a Source fully
-func (s *sources) PutSourceSpacexAPI(ctx context.Context, request operations.PutSourceSpacexAPIRequest) (*operations.PutSourceSpacexAPIResponse, error) {
+func (s *Sources) PutSourceSpacexAPI(ctx context.Context, request operations.PutSourceSpacexAPIRequest) (*operations.PutSourceSpacexAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SpacexApi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSpacexAPIPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSpacexAPIPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45190,7 +45216,7 @@ func (s *sources) PutSourceSpacexAPI(ctx context.Context, request operations.Put
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45231,18 +45257,17 @@ func (s *sources) PutSourceSpacexAPI(ctx context.Context, request operations.Put
}
// PutSourceSquare - Update a Source fully
-func (s *sources) PutSourceSquare(ctx context.Context, request operations.PutSourceSquareRequest) (*operations.PutSourceSquareResponse, error) {
+func (s *Sources) PutSourceSquare(ctx context.Context, request operations.PutSourceSquareRequest) (*operations.PutSourceSquareResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Square", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSquarePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSquarePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45251,7 +45276,7 @@ func (s *sources) PutSourceSquare(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45292,18 +45317,17 @@ func (s *sources) PutSourceSquare(ctx context.Context, request operations.PutSou
}
// PutSourceStrava - Update a Source fully
-func (s *sources) PutSourceStrava(ctx context.Context, request operations.PutSourceStravaRequest) (*operations.PutSourceStravaResponse, error) {
+func (s *Sources) PutSourceStrava(ctx context.Context, request operations.PutSourceStravaRequest) (*operations.PutSourceStravaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Strava", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceStravaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceStravaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45312,7 +45336,7 @@ func (s *sources) PutSourceStrava(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45353,18 +45377,17 @@ func (s *sources) PutSourceStrava(ctx context.Context, request operations.PutSou
}
// PutSourceStripe - Update a Source fully
-func (s *sources) PutSourceStripe(ctx context.Context, request operations.PutSourceStripeRequest) (*operations.PutSourceStripeResponse, error) {
+func (s *Sources) PutSourceStripe(ctx context.Context, request operations.PutSourceStripeRequest) (*operations.PutSourceStripeResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Stripe", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceStripePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceStripePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45373,7 +45396,7 @@ func (s *sources) PutSourceStripe(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45414,18 +45437,17 @@ func (s *sources) PutSourceStripe(ctx context.Context, request operations.PutSou
}
// PutSourceSurveySparrow - Update a Source fully
-func (s *sources) PutSourceSurveySparrow(ctx context.Context, request operations.PutSourceSurveySparrowRequest) (*operations.PutSourceSurveySparrowResponse, error) {
+func (s *Sources) PutSourceSurveySparrow(ctx context.Context, request operations.PutSourceSurveySparrowRequest) (*operations.PutSourceSurveySparrowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#SurveySparrow", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSurveySparrowPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSurveySparrowPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45434,7 +45456,7 @@ func (s *sources) PutSourceSurveySparrow(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45475,18 +45497,17 @@ func (s *sources) PutSourceSurveySparrow(ctx context.Context, request operations
}
// PutSourceSurveymonkey - Update a Source fully
-func (s *sources) PutSourceSurveymonkey(ctx context.Context, request operations.PutSourceSurveymonkeyRequest) (*operations.PutSourceSurveymonkeyResponse, error) {
+func (s *Sources) PutSourceSurveymonkey(ctx context.Context, request operations.PutSourceSurveymonkeyRequest) (*operations.PutSourceSurveymonkeyResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Surveymonkey", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceSurveymonkeyPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceSurveymonkeyPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45495,7 +45516,7 @@ func (s *sources) PutSourceSurveymonkey(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45536,18 +45557,17 @@ func (s *sources) PutSourceSurveymonkey(ctx context.Context, request operations.
}
// PutSourceTempo - Update a Source fully
-func (s *sources) PutSourceTempo(ctx context.Context, request operations.PutSourceTempoRequest) (*operations.PutSourceTempoResponse, error) {
+func (s *Sources) PutSourceTempo(ctx context.Context, request operations.PutSourceTempoRequest) (*operations.PutSourceTempoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Tempo", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTempoPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTempoPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45556,7 +45576,7 @@ func (s *sources) PutSourceTempo(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45597,18 +45617,17 @@ func (s *sources) PutSourceTempo(ctx context.Context, request operations.PutSour
}
// PutSourceTheGuardianAPI - Update a Source fully
-func (s *sources) PutSourceTheGuardianAPI(ctx context.Context, request operations.PutSourceTheGuardianAPIRequest) (*operations.PutSourceTheGuardianAPIResponse, error) {
+func (s *Sources) PutSourceTheGuardianAPI(ctx context.Context, request operations.PutSourceTheGuardianAPIRequest) (*operations.PutSourceTheGuardianAPIResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TheGuardianApi", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTheGuardianAPIPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTheGuardianAPIPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45617,7 +45636,7 @@ func (s *sources) PutSourceTheGuardianAPI(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45658,18 +45677,17 @@ func (s *sources) PutSourceTheGuardianAPI(ctx context.Context, request operation
}
// PutSourceTiktokMarketing - Update a Source fully
-func (s *sources) PutSourceTiktokMarketing(ctx context.Context, request operations.PutSourceTiktokMarketingRequest) (*operations.PutSourceTiktokMarketingResponse, error) {
+func (s *Sources) PutSourceTiktokMarketing(ctx context.Context, request operations.PutSourceTiktokMarketingRequest) (*operations.PutSourceTiktokMarketingResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TiktokMarketing", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTiktokMarketingPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTiktokMarketingPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45678,7 +45696,7 @@ func (s *sources) PutSourceTiktokMarketing(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45719,18 +45737,17 @@ func (s *sources) PutSourceTiktokMarketing(ctx context.Context, request operatio
}
// PutSourceTodoist - Update a Source fully
-func (s *sources) PutSourceTodoist(ctx context.Context, request operations.PutSourceTodoistRequest) (*operations.PutSourceTodoistResponse, error) {
+func (s *Sources) PutSourceTodoist(ctx context.Context, request operations.PutSourceTodoistRequest) (*operations.PutSourceTodoistResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Todoist", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTodoistPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTodoistPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45739,7 +45756,7 @@ func (s *sources) PutSourceTodoist(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45780,18 +45797,17 @@ func (s *sources) PutSourceTodoist(ctx context.Context, request operations.PutSo
}
// PutSourceTrello - Update a Source fully
-func (s *sources) PutSourceTrello(ctx context.Context, request operations.PutSourceTrelloRequest) (*operations.PutSourceTrelloResponse, error) {
+func (s *Sources) PutSourceTrello(ctx context.Context, request operations.PutSourceTrelloRequest) (*operations.PutSourceTrelloResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Trello", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTrelloPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTrelloPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45800,7 +45816,7 @@ func (s *sources) PutSourceTrello(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45841,18 +45857,17 @@ func (s *sources) PutSourceTrello(ctx context.Context, request operations.PutSou
}
// PutSourceTrustpilot - Update a Source fully
-func (s *sources) PutSourceTrustpilot(ctx context.Context, request operations.PutSourceTrustpilotRequest) (*operations.PutSourceTrustpilotResponse, error) {
+func (s *Sources) PutSourceTrustpilot(ctx context.Context, request operations.PutSourceTrustpilotRequest) (*operations.PutSourceTrustpilotResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Trustpilot", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTrustpilotPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTrustpilotPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45861,7 +45876,7 @@ func (s *sources) PutSourceTrustpilot(ctx context.Context, request operations.Pu
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45902,18 +45917,17 @@ func (s *sources) PutSourceTrustpilot(ctx context.Context, request operations.Pu
}
// PutSourceTvmazeSchedule - Update a Source fully
-func (s *sources) PutSourceTvmazeSchedule(ctx context.Context, request operations.PutSourceTvmazeScheduleRequest) (*operations.PutSourceTvmazeScheduleResponse, error) {
+func (s *Sources) PutSourceTvmazeSchedule(ctx context.Context, request operations.PutSourceTvmazeScheduleRequest) (*operations.PutSourceTvmazeScheduleResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TvmazeSchedule", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTvmazeSchedulePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTvmazeSchedulePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45922,7 +45936,7 @@ func (s *sources) PutSourceTvmazeSchedule(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -45963,18 +45977,17 @@ func (s *sources) PutSourceTvmazeSchedule(ctx context.Context, request operation
}
// PutSourceTwilio - Update a Source fully
-func (s *sources) PutSourceTwilio(ctx context.Context, request operations.PutSourceTwilioRequest) (*operations.PutSourceTwilioResponse, error) {
+func (s *Sources) PutSourceTwilio(ctx context.Context, request operations.PutSourceTwilioRequest) (*operations.PutSourceTwilioResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Twilio", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTwilioPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTwilioPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -45983,7 +45996,7 @@ func (s *sources) PutSourceTwilio(ctx context.Context, request operations.PutSou
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46024,18 +46037,17 @@ func (s *sources) PutSourceTwilio(ctx context.Context, request operations.PutSou
}
// PutSourceTwilioTaskrouter - Update a Source fully
-func (s *sources) PutSourceTwilioTaskrouter(ctx context.Context, request operations.PutSourceTwilioTaskrouterRequest) (*operations.PutSourceTwilioTaskrouterResponse, error) {
+func (s *Sources) PutSourceTwilioTaskrouter(ctx context.Context, request operations.PutSourceTwilioTaskrouterRequest) (*operations.PutSourceTwilioTaskrouterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#TwilioTaskrouter", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTwilioTaskrouterPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTwilioTaskrouterPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46044,7 +46056,7 @@ func (s *sources) PutSourceTwilioTaskrouter(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46085,18 +46097,17 @@ func (s *sources) PutSourceTwilioTaskrouter(ctx context.Context, request operati
}
// PutSourceTwitter - Update a Source fully
-func (s *sources) PutSourceTwitter(ctx context.Context, request operations.PutSourceTwitterRequest) (*operations.PutSourceTwitterResponse, error) {
+func (s *Sources) PutSourceTwitter(ctx context.Context, request operations.PutSourceTwitterRequest) (*operations.PutSourceTwitterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Twitter", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTwitterPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTwitterPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46105,7 +46116,7 @@ func (s *sources) PutSourceTwitter(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46146,18 +46157,17 @@ func (s *sources) PutSourceTwitter(ctx context.Context, request operations.PutSo
}
// PutSourceTypeform - Update a Source fully
-func (s *sources) PutSourceTypeform(ctx context.Context, request operations.PutSourceTypeformRequest) (*operations.PutSourceTypeformResponse, error) {
+func (s *Sources) PutSourceTypeform(ctx context.Context, request operations.PutSourceTypeformRequest) (*operations.PutSourceTypeformResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Typeform", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceTypeformPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceTypeformPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46166,7 +46176,7 @@ func (s *sources) PutSourceTypeform(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46207,18 +46217,17 @@ func (s *sources) PutSourceTypeform(ctx context.Context, request operations.PutS
}
// PutSourceUsCensus - Update a Source fully
-func (s *sources) PutSourceUsCensus(ctx context.Context, request operations.PutSourceUsCensusRequest) (*operations.PutSourceUsCensusResponse, error) {
+func (s *Sources) PutSourceUsCensus(ctx context.Context, request operations.PutSourceUsCensusRequest) (*operations.PutSourceUsCensusResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#UsCensus", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceUsCensusPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceUsCensusPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46227,7 +46236,7 @@ func (s *sources) PutSourceUsCensus(ctx context.Context, request operations.PutS
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46268,18 +46277,17 @@ func (s *sources) PutSourceUsCensus(ctx context.Context, request operations.PutS
}
// PutSourceVantage - Update a Source fully
-func (s *sources) PutSourceVantage(ctx context.Context, request operations.PutSourceVantageRequest) (*operations.PutSourceVantageResponse, error) {
+func (s *Sources) PutSourceVantage(ctx context.Context, request operations.PutSourceVantageRequest) (*operations.PutSourceVantageResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Vantage", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceVantagePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceVantagePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46288,7 +46296,7 @@ func (s *sources) PutSourceVantage(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46329,18 +46337,17 @@ func (s *sources) PutSourceVantage(ctx context.Context, request operations.PutSo
}
// PutSourceWebflow - Update a Source fully
-func (s *sources) PutSourceWebflow(ctx context.Context, request operations.PutSourceWebflowRequest) (*operations.PutSourceWebflowResponse, error) {
+func (s *Sources) PutSourceWebflow(ctx context.Context, request operations.PutSourceWebflowRequest) (*operations.PutSourceWebflowResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Webflow", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceWebflowPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceWebflowPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46349,7 +46356,7 @@ func (s *sources) PutSourceWebflow(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46390,18 +46397,17 @@ func (s *sources) PutSourceWebflow(ctx context.Context, request operations.PutSo
}
// PutSourceWhiskyHunter - Update a Source fully
-func (s *sources) PutSourceWhiskyHunter(ctx context.Context, request operations.PutSourceWhiskyHunterRequest) (*operations.PutSourceWhiskyHunterResponse, error) {
+func (s *Sources) PutSourceWhiskyHunter(ctx context.Context, request operations.PutSourceWhiskyHunterRequest) (*operations.PutSourceWhiskyHunterResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#WhiskyHunter", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceWhiskyHunterPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceWhiskyHunterPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46410,7 +46416,7 @@ func (s *sources) PutSourceWhiskyHunter(ctx context.Context, request operations.
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46451,18 +46457,17 @@ func (s *sources) PutSourceWhiskyHunter(ctx context.Context, request operations.
}
// PutSourceWikipediaPageviews - Update a Source fully
-func (s *sources) PutSourceWikipediaPageviews(ctx context.Context, request operations.PutSourceWikipediaPageviewsRequest) (*operations.PutSourceWikipediaPageviewsResponse, error) {
+func (s *Sources) PutSourceWikipediaPageviews(ctx context.Context, request operations.PutSourceWikipediaPageviewsRequest) (*operations.PutSourceWikipediaPageviewsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#WikipediaPageviews", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceWikipediaPageviewsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceWikipediaPageviewsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46471,7 +46476,7 @@ func (s *sources) PutSourceWikipediaPageviews(ctx context.Context, request opera
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46512,18 +46517,17 @@ func (s *sources) PutSourceWikipediaPageviews(ctx context.Context, request opera
}
// PutSourceWoocommerce - Update a Source fully
-func (s *sources) PutSourceWoocommerce(ctx context.Context, request operations.PutSourceWoocommerceRequest) (*operations.PutSourceWoocommerceResponse, error) {
+func (s *Sources) PutSourceWoocommerce(ctx context.Context, request operations.PutSourceWoocommerceRequest) (*operations.PutSourceWoocommerceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Woocommerce", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceWoocommercePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceWoocommercePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46532,7 +46536,7 @@ func (s *sources) PutSourceWoocommerce(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46572,80 +46576,18 @@ func (s *sources) PutSourceWoocommerce(ctx context.Context, request operations.P
return res, nil
}
-// PutSourceXero - Update a Source fully
-func (s *sources) PutSourceXero(ctx context.Context, request operations.PutSourceXeroRequest) (*operations.PutSourceXeroResponse, error) {
- baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Xero", request, nil)
- if err != nil {
- return nil, fmt.Errorf("error generating URL: %w", err)
- }
-
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceXeroPutRequest", "json")
- if err != nil {
- return nil, fmt.Errorf("error serializing request body: %w", err)
- }
-
- debugBody := bytes.NewBuffer([]byte{})
- debugReader := io.TeeReader(bodyReader, debugBody)
-
- req, err := http.NewRequestWithContext(ctx, "PUT", url, debugReader)
- if err != nil {
- return nil, fmt.Errorf("error creating request: %w", err)
- }
- req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
-
- req.Header.Set("Content-Type", reqContentType)
-
- client := s.sdkConfiguration.SecurityClient
-
- httpRes, err := client.Do(req)
- if err != nil {
- return nil, fmt.Errorf("error sending request: %w", err)
- }
- if httpRes == nil {
- return nil, fmt.Errorf("error sending request: no response")
- }
-
- rawBody, err := io.ReadAll(httpRes.Body)
- if err != nil {
- return nil, fmt.Errorf("error reading response body: %w", err)
- }
- httpRes.Request.Body = io.NopCloser(debugBody)
- httpRes.Body.Close()
- httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody))
-
- contentType := httpRes.Header.Get("Content-Type")
-
- res := &operations.PutSourceXeroResponse{
- StatusCode: httpRes.StatusCode,
- ContentType: contentType,
- RawResponse: httpRes,
- }
- switch {
- case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300:
- fallthrough
- case httpRes.StatusCode == 403:
- fallthrough
- case httpRes.StatusCode == 404:
- }
-
- return res, nil
-}
-
// PutSourceXkcd - Update a Source fully
-func (s *sources) PutSourceXkcd(ctx context.Context, request operations.PutSourceXkcdRequest) (*operations.PutSourceXkcdResponse, error) {
+func (s *Sources) PutSourceXkcd(ctx context.Context, request operations.PutSourceXkcdRequest) (*operations.PutSourceXkcdResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Xkcd", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceXkcdPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceXkcdPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46654,7 +46596,7 @@ func (s *sources) PutSourceXkcd(ctx context.Context, request operations.PutSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46695,18 +46637,17 @@ func (s *sources) PutSourceXkcd(ctx context.Context, request operations.PutSourc
}
// PutSourceYandexMetrica - Update a Source fully
-func (s *sources) PutSourceYandexMetrica(ctx context.Context, request operations.PutSourceYandexMetricaRequest) (*operations.PutSourceYandexMetricaResponse, error) {
+func (s *Sources) PutSourceYandexMetrica(ctx context.Context, request operations.PutSourceYandexMetricaRequest) (*operations.PutSourceYandexMetricaResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YandexMetrica", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceYandexMetricaPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceYandexMetricaPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46715,7 +46656,7 @@ func (s *sources) PutSourceYandexMetrica(ctx context.Context, request operations
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46756,18 +46697,17 @@ func (s *sources) PutSourceYandexMetrica(ctx context.Context, request operations
}
// PutSourceYotpo - Update a Source fully
-func (s *sources) PutSourceYotpo(ctx context.Context, request operations.PutSourceYotpoRequest) (*operations.PutSourceYotpoResponse, error) {
+func (s *Sources) PutSourceYotpo(ctx context.Context, request operations.PutSourceYotpoRequest) (*operations.PutSourceYotpoResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Yotpo", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceYotpoPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceYotpoPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46776,7 +46716,7 @@ func (s *sources) PutSourceYotpo(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46816,19 +46756,18 @@ func (s *sources) PutSourceYotpo(ctx context.Context, request operations.PutSour
return res, nil
}
-// PutSourceYounium - Update a Source fully
-func (s *sources) PutSourceYounium(ctx context.Context, request operations.PutSourceYouniumRequest) (*operations.PutSourceYouniumResponse, error) {
+// PutSourceYoutubeAnalytics - Update a Source fully
+func (s *Sources) PutSourceYoutubeAnalytics(ctx context.Context, request operations.PutSourceYoutubeAnalyticsRequest) (*operations.PutSourceYoutubeAnalyticsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Younium", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YoutubeAnalytics", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceYouniumPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceYoutubeAnalyticsPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46837,7 +46776,7 @@ func (s *sources) PutSourceYounium(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46861,7 +46800,7 @@ func (s *sources) PutSourceYounium(ctx context.Context, request operations.PutSo
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceYouniumResponse{
+ res := &operations.PutSourceYoutubeAnalyticsResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -46877,19 +46816,18 @@ func (s *sources) PutSourceYounium(ctx context.Context, request operations.PutSo
return res, nil
}
-// PutSourceYoutubeAnalytics - Update a Source fully
-func (s *sources) PutSourceYoutubeAnalytics(ctx context.Context, request operations.PutSourceYoutubeAnalyticsRequest) (*operations.PutSourceYoutubeAnalyticsResponse, error) {
+// PutSourceZendeskChat - Update a Source fully
+func (s *Sources) PutSourceZendeskChat(ctx context.Context, request operations.PutSourceZendeskChatRequest) (*operations.PutSourceZendeskChatResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#YoutubeAnalytics", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskChat", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceYoutubeAnalyticsPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceZendeskChatPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46898,7 +46836,7 @@ func (s *sources) PutSourceYoutubeAnalytics(ctx context.Context, request operati
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46922,7 +46860,7 @@ func (s *sources) PutSourceYoutubeAnalytics(ctx context.Context, request operati
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceYoutubeAnalyticsResponse{
+ res := &operations.PutSourceZendeskChatResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -46938,19 +46876,18 @@ func (s *sources) PutSourceYoutubeAnalytics(ctx context.Context, request operati
return res, nil
}
-// PutSourceZendeskChat - Update a Source fully
-func (s *sources) PutSourceZendeskChat(ctx context.Context, request operations.PutSourceZendeskChatRequest) (*operations.PutSourceZendeskChatResponse, error) {
+// PutSourceZendeskSell - Update a Source fully
+func (s *Sources) PutSourceZendeskSell(ctx context.Context, request operations.PutSourceZendeskSellRequest) (*operations.PutSourceZendeskSellResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
- url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskChat", request, nil)
+ url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSell", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceZendeskChatPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceZendeskSellPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -46959,7 +46896,7 @@ func (s *sources) PutSourceZendeskChat(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -46983,7 +46920,7 @@ func (s *sources) PutSourceZendeskChat(ctx context.Context, request operations.P
contentType := httpRes.Header.Get("Content-Type")
- res := &operations.PutSourceZendeskChatResponse{
+ res := &operations.PutSourceZendeskSellResponse{
StatusCode: httpRes.StatusCode,
ContentType: contentType,
RawResponse: httpRes,
@@ -47000,18 +46937,17 @@ func (s *sources) PutSourceZendeskChat(ctx context.Context, request operations.P
}
// PutSourceZendeskSunshine - Update a Source fully
-func (s *sources) PutSourceZendeskSunshine(ctx context.Context, request operations.PutSourceZendeskSunshineRequest) (*operations.PutSourceZendeskSunshineResponse, error) {
+func (s *Sources) PutSourceZendeskSunshine(ctx context.Context, request operations.PutSourceZendeskSunshineRequest) (*operations.PutSourceZendeskSunshineResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSunshine", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceZendeskSunshinePutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceZendeskSunshinePutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -47020,7 +46956,7 @@ func (s *sources) PutSourceZendeskSunshine(ctx context.Context, request operatio
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -47061,18 +46997,17 @@ func (s *sources) PutSourceZendeskSunshine(ctx context.Context, request operatio
}
// PutSourceZendeskSupport - Update a Source fully
-func (s *sources) PutSourceZendeskSupport(ctx context.Context, request operations.PutSourceZendeskSupportRequest) (*operations.PutSourceZendeskSupportResponse, error) {
+func (s *Sources) PutSourceZendeskSupport(ctx context.Context, request operations.PutSourceZendeskSupportRequest) (*operations.PutSourceZendeskSupportResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskSupport", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceZendeskSupportPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceZendeskSupportPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -47081,7 +47016,7 @@ func (s *sources) PutSourceZendeskSupport(ctx context.Context, request operation
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -47122,18 +47057,17 @@ func (s *sources) PutSourceZendeskSupport(ctx context.Context, request operation
}
// PutSourceZendeskTalk - Update a Source fully
-func (s *sources) PutSourceZendeskTalk(ctx context.Context, request operations.PutSourceZendeskTalkRequest) (*operations.PutSourceZendeskTalkResponse, error) {
+func (s *Sources) PutSourceZendeskTalk(ctx context.Context, request operations.PutSourceZendeskTalkRequest) (*operations.PutSourceZendeskTalkResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZendeskTalk", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceZendeskTalkPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceZendeskTalkPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -47142,7 +47076,7 @@ func (s *sources) PutSourceZendeskTalk(ctx context.Context, request operations.P
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -47183,18 +47117,17 @@ func (s *sources) PutSourceZendeskTalk(ctx context.Context, request operations.P
}
// PutSourceZenloop - Update a Source fully
-func (s *sources) PutSourceZenloop(ctx context.Context, request operations.PutSourceZenloopRequest) (*operations.PutSourceZenloopResponse, error) {
+func (s *Sources) PutSourceZenloop(ctx context.Context, request operations.PutSourceZenloopRequest) (*operations.PutSourceZenloopResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Zenloop", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceZenloopPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceZenloopPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -47203,7 +47136,7 @@ func (s *sources) PutSourceZenloop(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -47244,18 +47177,17 @@ func (s *sources) PutSourceZenloop(ctx context.Context, request operations.PutSo
}
// PutSourceZohoCrm - Update a Source fully
-func (s *sources) PutSourceZohoCrm(ctx context.Context, request operations.PutSourceZohoCrmRequest) (*operations.PutSourceZohoCrmResponse, error) {
+func (s *Sources) PutSourceZohoCrm(ctx context.Context, request operations.PutSourceZohoCrmRequest) (*operations.PutSourceZohoCrmResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#ZohoCrm", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceZohoCrmPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceZohoCrmPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -47264,7 +47196,7 @@ func (s *sources) PutSourceZohoCrm(ctx context.Context, request operations.PutSo
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -47305,18 +47237,17 @@ func (s *sources) PutSourceZohoCrm(ctx context.Context, request operations.PutSo
}
// PutSourceZoom - Update a Source fully
-func (s *sources) PutSourceZoom(ctx context.Context, request operations.PutSourceZoomRequest) (*operations.PutSourceZoomResponse, error) {
+func (s *Sources) PutSourceZoom(ctx context.Context, request operations.PutSourceZoomRequest) (*operations.PutSourceZoomResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Zoom", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceZoomPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceZoomPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -47325,7 +47256,7 @@ func (s *sources) PutSourceZoom(ctx context.Context, request operations.PutSourc
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -47366,18 +47297,17 @@ func (s *sources) PutSourceZoom(ctx context.Context, request operations.PutSourc
}
// PutSourceZuora - Update a Source fully
-func (s *sources) PutSourceZuora(ctx context.Context, request operations.PutSourceZuoraRequest) (*operations.PutSourceZuoraResponse, error) {
+func (s *Sources) PutSourceZuora(ctx context.Context, request operations.PutSourceZuoraRequest) (*operations.PutSourceZuoraResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Zuora", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceZuoraPutRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, true, "SourceZuoraPutRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
-
debugBody := bytes.NewBuffer([]byte{})
debugReader := io.TeeReader(bodyReader, debugBody)
@@ -47386,7 +47316,7 @@ func (s *sources) PutSourceZuora(ctx context.Context, request operations.PutSour
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
diff --git a/internal/sdk/streams.go b/internal/sdk/streams.go
old mode 100755
new mode 100644
index 4e561fda5..d15e2a5b1
--- a/internal/sdk/streams.go
+++ b/internal/sdk/streams.go
@@ -3,29 +3,30 @@
package sdk
import (
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/sdk/pkg/models/shared"
- "airbyte/internal/sdk/pkg/utils"
"bytes"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/sdkerrors"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"io"
"net/http"
"strings"
)
-type streams struct {
+type Streams struct {
sdkConfiguration sdkConfiguration
}
-func newStreams(sdkConfig sdkConfiguration) *streams {
- return &streams{
+func newStreams(sdkConfig sdkConfiguration) *Streams {
+ return &Streams{
sdkConfiguration: sdkConfig,
}
}
// GetStreamProperties - Get stream properties
-func (s *streams) GetStreamProperties(ctx context.Context, request operations.GetStreamPropertiesRequest) (*operations.GetStreamPropertiesResponse, error) {
+func (s *Streams) GetStreamProperties(ctx context.Context, request operations.GetStreamPropertiesRequest) (*operations.GetStreamPropertiesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/streams"
@@ -34,7 +35,7 @@ func (s *streams) GetStreamProperties(ctx context.Context, request operations.Ge
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
if err := utils.PopulateQueryParams(ctx, req, request, nil); err != nil {
return nil, fmt.Errorf("error populating query params: %w", err)
@@ -68,12 +69,14 @@ func (s *streams) GetStreamProperties(ctx context.Context, request operations.Ge
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.StreamPropertiesResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.StreamPropertiesResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.StreamPropertiesResponse = out
+ res.StreamPropertiesResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
diff --git a/internal/sdk/workspaces.go b/internal/sdk/workspaces.go
old mode 100755
new mode 100644
index 33fbae82d..2c9ee08e4
--- a/internal/sdk/workspaces.go
+++ b/internal/sdk/workspaces.go
@@ -3,23 +3,24 @@
package sdk
import (
- "airbyte/internal/sdk/pkg/models/operations"
- "airbyte/internal/sdk/pkg/models/shared"
- "airbyte/internal/sdk/pkg/utils"
"bytes"
"context"
"fmt"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/operations"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/sdkerrors"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/models/shared"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/utils"
"io"
"net/http"
"strings"
)
-type workspaces struct {
+type Workspaces struct {
sdkConfiguration sdkConfiguration
}
-func newWorkspaces(sdkConfig sdkConfiguration) *workspaces {
- return &workspaces{
+func newWorkspaces(sdkConfig sdkConfiguration) *Workspaces {
+ return &Workspaces{
sdkConfiguration: sdkConfig,
}
}
@@ -27,14 +28,14 @@ func newWorkspaces(sdkConfig sdkConfiguration) *workspaces {
// CreateOrUpdateWorkspaceOAuthCredentials - Create OAuth override credentials for a workspace and source type.
// Create/update a set of OAuth credentials to override the Airbyte-provided OAuth credentials used for source/destination OAuth.
// In order to determine what the credential configuration needs to be, please see the connector specification of the relevant source/destination.
-func (s *workspaces) CreateOrUpdateWorkspaceOAuthCredentials(ctx context.Context, request operations.CreateOrUpdateWorkspaceOAuthCredentialsRequest) (*operations.CreateOrUpdateWorkspaceOAuthCredentialsResponse, error) {
+func (s *Workspaces) CreateOrUpdateWorkspaceOAuthCredentials(ctx context.Context, request operations.CreateOrUpdateWorkspaceOAuthCredentialsRequest) (*operations.CreateOrUpdateWorkspaceOAuthCredentialsResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/workspaces/{workspaceId}/oauthCredentials", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "WorkspaceOAuthCredentialsRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, false, "WorkspaceOAuthCredentialsRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
@@ -50,7 +51,7 @@ func (s *workspaces) CreateOrUpdateWorkspaceOAuthCredentials(ctx context.Context
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -91,11 +92,11 @@ func (s *workspaces) CreateOrUpdateWorkspaceOAuthCredentials(ctx context.Context
}
// CreateWorkspace - Create a workspace
-func (s *workspaces) CreateWorkspace(ctx context.Context, request shared.WorkspaceCreateRequest) (*operations.CreateWorkspaceResponse, error) {
+func (s *Workspaces) CreateWorkspace(ctx context.Context, request shared.WorkspaceCreateRequest) (*operations.CreateWorkspaceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/workspaces"
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, false, "Request", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
@@ -111,7 +112,7 @@ func (s *workspaces) CreateWorkspace(ctx context.Context, request shared.Workspa
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -144,12 +145,14 @@ func (s *workspaces) CreateWorkspace(ctx context.Context, request shared.Workspa
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.WorkspaceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.WorkspaceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.WorkspaceResponse = out
+ res.WorkspaceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
@@ -160,7 +163,7 @@ func (s *workspaces) CreateWorkspace(ctx context.Context, request shared.Workspa
}
// DeleteWorkspace - Delete a Workspace
-func (s *workspaces) DeleteWorkspace(ctx context.Context, request operations.DeleteWorkspaceRequest) (*operations.DeleteWorkspaceResponse, error) {
+func (s *Workspaces) DeleteWorkspace(ctx context.Context, request operations.DeleteWorkspaceRequest) (*operations.DeleteWorkspaceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/workspaces/{workspaceId}", request, nil)
if err != nil {
@@ -172,7 +175,7 @@ func (s *workspaces) DeleteWorkspace(ctx context.Context, request operations.Del
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "*/*")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -210,7 +213,7 @@ func (s *workspaces) DeleteWorkspace(ctx context.Context, request operations.Del
}
// GetWorkspace - Get Workspace details
-func (s *workspaces) GetWorkspace(ctx context.Context, request operations.GetWorkspaceRequest) (*operations.GetWorkspaceResponse, error) {
+func (s *Workspaces) GetWorkspace(ctx context.Context, request operations.GetWorkspaceRequest) (*operations.GetWorkspaceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/workspaces/{workspaceId}", request, nil)
if err != nil {
@@ -222,7 +225,7 @@ func (s *workspaces) GetWorkspace(ctx context.Context, request operations.GetWor
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
client := s.sdkConfiguration.SecurityClient
@@ -252,12 +255,14 @@ func (s *workspaces) GetWorkspace(ctx context.Context, request operations.GetWor
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.WorkspaceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.WorkspaceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.WorkspaceResponse = out
+ res.WorkspaceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -268,7 +273,7 @@ func (s *workspaces) GetWorkspace(ctx context.Context, request operations.GetWor
}
// ListWorkspaces - List workspaces
-func (s *workspaces) ListWorkspaces(ctx context.Context, request operations.ListWorkspacesRequest) (*operations.ListWorkspacesResponse, error) {
+func (s *Workspaces) ListWorkspaces(ctx context.Context, request operations.ListWorkspacesRequest) (*operations.ListWorkspacesResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url := strings.TrimSuffix(baseURL, "/") + "/workspaces"
@@ -277,7 +282,7 @@ func (s *workspaces) ListWorkspaces(ctx context.Context, request operations.List
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
if err := utils.PopulateQueryParams(ctx, req, request, nil); err != nil {
return nil, fmt.Errorf("error populating query params: %w", err)
@@ -311,12 +316,14 @@ func (s *workspaces) ListWorkspaces(ctx context.Context, request operations.List
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.WorkspacesResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.WorkspacesResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.WorkspacesResponse = out
+ res.WorkspacesResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 403:
fallthrough
@@ -327,14 +334,14 @@ func (s *workspaces) ListWorkspaces(ctx context.Context, request operations.List
}
// UpdateWorkspace - Update a workspace
-func (s *workspaces) UpdateWorkspace(ctx context.Context, request operations.UpdateWorkspaceRequest) (*operations.UpdateWorkspaceResponse, error) {
+func (s *Workspaces) UpdateWorkspace(ctx context.Context, request operations.UpdateWorkspaceRequest) (*operations.UpdateWorkspaceResponse, error) {
baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())
url, err := utils.GenerateURL(ctx, baseURL, "/workspaces/{workspaceId}", request, nil)
if err != nil {
return nil, fmt.Errorf("error generating URL: %w", err)
}
- bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "WorkspaceUpdateRequest", "json")
+ bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, false, false, "WorkspaceUpdateRequest", "json", `request:"mediaType=application/json"`)
if err != nil {
return nil, fmt.Errorf("error serializing request body: %w", err)
}
@@ -350,7 +357,7 @@ func (s *workspaces) UpdateWorkspace(ctx context.Context, request operations.Upd
return nil, fmt.Errorf("error creating request: %w", err)
}
req.Header.Set("Accept", "application/json")
- req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion))
+ req.Header.Set("user-agent", s.sdkConfiguration.UserAgent)
req.Header.Set("Content-Type", reqContentType)
@@ -383,12 +390,14 @@ func (s *workspaces) UpdateWorkspace(ctx context.Context, request operations.Upd
case httpRes.StatusCode == 200:
switch {
case utils.MatchContentType(contentType, `application/json`):
- var out *shared.WorkspaceResponse
- if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil {
- return res, err
+ var out shared.WorkspaceResponse
+ if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out, ""); err != nil {
+ return nil, err
}
- res.WorkspaceResponse = out
+ res.WorkspaceResponse = &out
+ default:
+ return nil, sdkerrors.NewSDKError(fmt.Sprintf("unknown content-type received: %s", contentType), httpRes.StatusCode, string(rawBody), httpRes)
}
case httpRes.StatusCode == 400:
fallthrough
diff --git a/internal/validators/DateValidator.go b/internal/validators/DateValidator.go
old mode 100755
new mode 100644
index bc102fc6b..bc2db8c85
--- a/internal/validators/DateValidator.go
+++ b/internal/validators/DateValidator.go
@@ -3,8 +3,8 @@
package validators
import (
- "airbyte/internal/sdk/pkg/types"
"context"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/sdk/pkg/types"
"github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
)
diff --git a/internal/validators/ExactlyOneChild.go b/internal/validators/ExactlyOneChild.go
old mode 100755
new mode 100644
diff --git a/internal/validators/JSONParseValidator.go b/internal/validators/JSONParseValidator.go
old mode 100755
new mode 100644
diff --git a/internal/validators/RFC3339Validator.go b/internal/validators/RFC3339Validator.go
old mode 100755
new mode 100644
index 42c90dc6d..ad3d71502
--- a/internal/validators/RFC3339Validator.go
+++ b/internal/validators/RFC3339Validator.go
@@ -4,15 +4,15 @@ package validators
import (
"context"
+ "time"
+
"github.com/hashicorp/terraform-plugin-framework-validators/helpers/validatordiag"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
- "time"
)
var _ validator.String = RFC3339TimeValidator{}
-type RFC3339TimeValidator struct {
-}
+type RFC3339TimeValidator struct{}
func (validator RFC3339TimeValidator) Description(ctx context.Context) string {
return "value must be a string in RFC3339 format"
@@ -28,7 +28,7 @@ func (validator RFC3339TimeValidator) ValidateString(ctx context.Context, req va
return
}
- if _, err := time.Parse(time.RFC3339, req.ConfigValue.ValueString()); err != nil {
+ if _, err := time.Parse(time.RFC3339Nano, req.ConfigValue.ValueString()); err != nil {
resp.Diagnostics.Append(validatordiag.InvalidAttributeTypeDiagnostic(
req.Path,
validator.MarkdownDescription(ctx),
@@ -42,7 +42,7 @@ func (validator RFC3339TimeValidator) ValidateString(ctx context.Context, req va
// attribute value:
//
// - Is a String.
-// - Is in RFC3339 Format.
+// - Is in RFC3339Nano Format.
//
// Null (unconfigured) and unknown (known after apply) values are skipped.
func IsRFC3339() validator.String {
diff --git a/main.go b/main.go
old mode 100755
new mode 100644
index 39450770c..d28d66faf
--- a/main.go
+++ b/main.go
@@ -7,15 +7,11 @@ import (
"flag"
"log"
- "airbyte/internal/provider"
+ "github.com/airbytehq/terraform-provider-airbyte/internal/provider"
"github.com/hashicorp/terraform-plugin-framework/providerserver"
)
-// Run "go generate" to format example terraform files and generate the docs for the registry/website
-
-// If you do not have terraform installed, you can remove the formatting command, but its suggested to
-// ensure the documentation is formatted properly.
-//go:generate terraform fmt -recursive ./examples/
+// Run "go generate" to generate the docs for the registry/website on each regeneration of the provider.
// Run the docs generation tool, check its repository for more information on how it works and how docs
// can be customized.
diff --git a/terraform-registry-manifest.json b/terraform-registry-manifest.json
old mode 100755
new mode 100644
diff --git a/tools/tools.go b/tools/tools.go
old mode 100755
new mode 100644